sqoop-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From arv...@apache.org
Subject svn commit: r1196486 [2/3] - in /incubator/sqoop/trunk: ./ src/docs/dev/ src/docs/man/ src/docs/sip/ src/docs/user/ src/docs/web/ src/java/com/cloudera/sqoop/ src/java/com/cloudera/sqoop/cli/ src/java/com/cloudera/sqoop/hbase/ src/java/com/cloudera/sqo...
Date Wed, 02 Nov 2011 07:49:08 GMT
Modified: incubator/sqoop/trunk/src/java/com/cloudera/sqoop/mapreduce/DelegatingOutputFormat.java
URL: http://svn.apache.org/viewvc/incubator/sqoop/trunk/src/java/com/cloudera/sqoop/mapreduce/DelegatingOutputFormat.java?rev=1196486&r1=1196485&r2=1196486&view=diff
==============================================================================
--- incubator/sqoop/trunk/src/java/com/cloudera/sqoop/mapreduce/DelegatingOutputFormat.java (original)
+++ incubator/sqoop/trunk/src/java/com/cloudera/sqoop/mapreduce/DelegatingOutputFormat.java Wed Nov  2 07:48:56 2011
@@ -24,7 +24,7 @@ import com.cloudera.sqoop.lib.FieldMappa
  * @deprecated Moving to use org.apache.sqoop namespace.
  */
 public class DelegatingOutputFormat<K extends FieldMappable, V>
-    extends org.apache.sqoop.mapreduce.DelegatingOutputFormat <K, V> {
+    extends org.apache.sqoop.mapreduce.DelegatingOutputFormat<K, V> {
 
   public static final String DELEGATE_CLASS_KEY =
       org.apache.sqoop.mapreduce.DelegatingOutputFormat.DELEGATE_CLASS_KEY;

Modified: incubator/sqoop/trunk/src/java/com/cloudera/sqoop/mapreduce/ExportBatchOutputFormat.java
URL: http://svn.apache.org/viewvc/incubator/sqoop/trunk/src/java/com/cloudera/sqoop/mapreduce/ExportBatchOutputFormat.java?rev=1196486&r1=1196485&r2=1196486&view=diff
==============================================================================
--- incubator/sqoop/trunk/src/java/com/cloudera/sqoop/mapreduce/ExportBatchOutputFormat.java (original)
+++ incubator/sqoop/trunk/src/java/com/cloudera/sqoop/mapreduce/ExportBatchOutputFormat.java Wed Nov  2 07:48:56 2011
@@ -25,5 +25,5 @@ import com.cloudera.sqoop.lib.SqoopRecor
  */
 public class ExportBatchOutputFormat<K extends SqoopRecord, V>
     extends org.apache.sqoop.mapreduce.ExportBatchOutputFormat
-    <SqoopRecord,V> {
+    <SqoopRecord, V> {
 }

Modified: incubator/sqoop/trunk/src/java/com/cloudera/sqoop/mapreduce/db/DBOutputFormat.java
URL: http://svn.apache.org/viewvc/incubator/sqoop/trunk/src/java/com/cloudera/sqoop/mapreduce/db/DBOutputFormat.java?rev=1196486&r1=1196485&r2=1196486&view=diff
==============================================================================
--- incubator/sqoop/trunk/src/java/com/cloudera/sqoop/mapreduce/db/DBOutputFormat.java (original)
+++ incubator/sqoop/trunk/src/java/com/cloudera/sqoop/mapreduce/db/DBOutputFormat.java Wed Nov  2 07:48:56 2011
@@ -21,7 +21,6 @@ import java.sql.Connection;
 import java.sql.PreparedStatement;
 import java.sql.SQLException;
 
-import org.apache.hadoop.mapreduce.RecordWriter;
 import org.apache.hadoop.mapreduce.lib.db.DBWritable;
 
 /**

Modified: incubator/sqoop/trunk/src/java/com/cloudera/sqoop/mapreduce/db/DataDrivenDBInputFormat.java
URL: http://svn.apache.org/viewvc/incubator/sqoop/trunk/src/java/com/cloudera/sqoop/mapreduce/db/DataDrivenDBInputFormat.java?rev=1196486&r1=1196485&r2=1196486&view=diff
==============================================================================
--- incubator/sqoop/trunk/src/java/com/cloudera/sqoop/mapreduce/db/DataDrivenDBInputFormat.java (original)
+++ incubator/sqoop/trunk/src/java/com/cloudera/sqoop/mapreduce/db/DataDrivenDBInputFormat.java Wed Nov  2 07:48:56 2011
@@ -47,7 +47,7 @@ public class DataDrivenDBInputFormat<T e
    * @deprecated use org.apache.sqoop.mapreduce.db.DataDrivenDBInputFormat.
    *   DataDrivenDBInputSplit instead.
    * @see org.apache.sqoop.mapreduce.db.DataDrivenDBInputFormat.
-   * 					DataDrivenDBInputSplit
+   *      DataDrivenDBInputSplit
    */
   public static class DataDrivenDBInputSplit extends
   org.apache.sqoop.mapreduce.db.DataDrivenDBInputFormat.DataDrivenDBInputSplit {

Modified: incubator/sqoop/trunk/src/java/com/cloudera/sqoop/tool/CodeGenTool.java
URL: http://svn.apache.org/viewvc/incubator/sqoop/trunk/src/java/com/cloudera/sqoop/tool/CodeGenTool.java?rev=1196486&r1=1196485&r2=1196486&view=diff
==============================================================================
--- incubator/sqoop/trunk/src/java/com/cloudera/sqoop/tool/CodeGenTool.java (original)
+++ incubator/sqoop/trunk/src/java/com/cloudera/sqoop/tool/CodeGenTool.java Wed Nov  2 07:48:56 2011
@@ -20,6 +20,6 @@ package com.cloudera.sqoop.tool;
 /**
  * @deprecated Moving to use org.apache.sqoop namespace.
  */
-public class CodeGenTool 
+public class CodeGenTool
     extends org.apache.sqoop.tool.CodeGenTool {
 }

Modified: incubator/sqoop/trunk/src/java/com/cloudera/sqoop/tool/SqoopTool.java
URL: http://svn.apache.org/viewvc/incubator/sqoop/trunk/src/java/com/cloudera/sqoop/tool/SqoopTool.java?rev=1196486&r1=1196485&r2=1196486&view=diff
==============================================================================
--- incubator/sqoop/trunk/src/java/com/cloudera/sqoop/tool/SqoopTool.java (original)
+++ incubator/sqoop/trunk/src/java/com/cloudera/sqoop/tool/SqoopTool.java Wed Nov  2 07:48:56 2011
@@ -19,8 +19,6 @@ package com.cloudera.sqoop.tool;
 
 import java.util.Set;
 
-import org.apache.hadoop.util.StringUtils;
-
 /**
  * @deprecated Moving to use org.apache.sqoop namespace.
  */

Modified: incubator/sqoop/trunk/src/java/com/cloudera/sqoop/util/TaskId.java
URL: http://svn.apache.org/viewvc/incubator/sqoop/trunk/src/java/com/cloudera/sqoop/util/TaskId.java?rev=1196486&r1=1196485&r2=1196486&view=diff
==============================================================================
--- incubator/sqoop/trunk/src/java/com/cloudera/sqoop/util/TaskId.java (original)
+++ incubator/sqoop/trunk/src/java/com/cloudera/sqoop/util/TaskId.java Wed Nov  2 07:48:56 2011
@@ -23,8 +23,6 @@ import java.io.IOException;
 
 import org.apache.hadoop.conf.Configuration;
 
-import com.cloudera.sqoop.config.ConfigurationConstants;
-
 /**
  * @deprecated Moving to use org.apache.sqoop namespace.
  */

Added: incubator/sqoop/trunk/src/java/org/apache/sqoop/ConnFactory.java
URL: http://svn.apache.org/viewvc/incubator/sqoop/trunk/src/java/org/apache/sqoop/ConnFactory.java?rev=1196486&view=auto
==============================================================================
--- incubator/sqoop/trunk/src/java/org/apache/sqoop/ConnFactory.java (added)
+++ incubator/sqoop/trunk/src/java/org/apache/sqoop/ConnFactory.java Wed Nov  2 07:48:56 2011
@@ -0,0 +1,232 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.sqoop;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.io.Reader;
+import java.util.Arrays;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.util.ReflectionUtils;
+import org.apache.hadoop.util.StringUtils;
+
+import com.cloudera.sqoop.manager.ConnManager;
+import com.cloudera.sqoop.manager.DefaultManagerFactory;
+import com.cloudera.sqoop.manager.ManagerFactory;
+import com.cloudera.sqoop.metastore.JobData;
+
+import com.cloudera.sqoop.util.ClassLoaderStack;
+
+/**
+ * Factory class to create the ConnManager type required
+ * for the current import job.
+ *
+ * This class delegates the actual responsibility for instantiating
+ * ConnManagers to one or more instances of ManagerFactory. ManagerFactories
+ * are consulted in the order specified in sqoop-site.xml
+ * (sqoop.connection.factories).
+ */
+public class ConnFactory {
+
+  public static final Log LOG = LogFactory.getLog(ConnFactory.class.getName());
+
+  public ConnFactory(Configuration conf) {
+    factories = new LinkedList<ManagerFactory>();
+    instantiateFactories(conf);
+  }
+
+  /** The sqoop-site.xml configuration property used to set the list of
+   * available ManagerFactories.
+   */
+  public static final String FACTORY_CLASS_NAMES_KEY =
+      "sqoop.connection.factories";
+
+  // The default value for sqoop.connection.factories is the
+  // name of the DefaultManagerFactory.
+  public static final String DEFAULT_FACTORY_CLASS_NAMES =
+      DefaultManagerFactory.class.getName();
+
+  /** The list of ManagerFactory instances consulted by getManager().
+   */
+  private List<ManagerFactory> factories;
+
+  /**
+   * Create the ManagerFactory instances that should populate
+   * the factories list.
+   */
+  private void instantiateFactories(Configuration conf) {
+    loadManagersFromConfDir(conf);
+    String [] classNameArray =
+        conf.getStrings(FACTORY_CLASS_NAMES_KEY, DEFAULT_FACTORY_CLASS_NAMES);
+
+    for (String className : classNameArray) {
+      try {
+        className = className.trim(); // Ignore leading/trailing whitespace.
+        ManagerFactory factory = ReflectionUtils.newInstance(
+            (Class<? extends ManagerFactory>)
+            conf.getClassByName(className), conf);
+        LOG.debug("Loaded manager factory: " + className);
+        factories.add(factory);
+      } catch (ClassNotFoundException cnfe) {
+        LOG.error("Could not load ManagerFactory " + className
+            + " (not found)");
+      }
+    }
+  }
+
+  /**
+   * Factory method to get a ConnManager for the given JDBC connect string.
+   * @param data the connection and other configuration arguments.
+   * @return a ConnManager instance for the appropriate database.
+   * @throws IOException if it cannot find a ConnManager for this schema.
+   */
+  public ConnManager getManager(JobData data) throws IOException {
+    // Try all the available manager factories.
+    for (ManagerFactory factory : factories) {
+      LOG.debug("Trying ManagerFactory: " + factory.getClass().getName());
+      ConnManager mgr = factory.accept(data);
+      if (null != mgr) {
+        LOG.debug("Instantiated ConnManager " + mgr.toString());
+        return mgr;
+      }
+    }
+
+    throw new IOException("No manager for connect string: "
+        + data.getSqoopOptions().getConnectString());
+  }
+
+  /**
+   * Add a ManagerFactory class to the list that we instantiate.
+   * @param conf the Configuration to set.
+   * @param factory the ManagerFactory class name to add.
+   */
+  private void addManager(Configuration conf, String factory) {
+    String curVal = conf.get(FACTORY_CLASS_NAMES_KEY);
+    if (null == curVal) {
+      conf.set(FACTORY_CLASS_NAMES_KEY, factory);
+    } else {
+      conf.set(FACTORY_CLASS_NAMES_KEY, curVal + "," + factory);
+    }
+  }
+
+  /**
+   * Read the specified file and extract any ManagerFactory implementation
+   * names from there.
+   * @param conf the configuration to populate.
+   * @param f the file containing the configuration data to add.
+   */
+  private void addManagersFromFile(Configuration conf, File f) {
+    Reader r = null;
+    try {
+      // The file format is actually Java properties-file syntax.
+      r = new InputStreamReader(new FileInputStream(f));
+      Properties props = new Properties();
+      props.load(r);
+
+      for (Map.Entry<Object, Object> entry : props.entrySet()) {
+        // Each key is a ManagerFactory class name.
+        // Each value, if set, is the jar that contains it.
+        String factory = entry.getKey().toString();
+        addManager(conf, factory);
+
+        String jarName = entry.getValue().toString();
+        if (jarName.length() > 0) {
+          ClassLoaderStack.addJarFile(jarName, factory);
+          LOG.debug("Added factory " + factory + " in jar " + jarName
+              + " specified by " + f);
+        } else if (LOG.isDebugEnabled()) {
+          LOG.debug("Added factory " + factory + " specified by " + f);
+        }
+      }
+    } catch (IOException ioe) {
+      LOG.error("Error loading ManagerFactory information from file "
+          + f + ": " + StringUtils.stringifyException(ioe));
+    } finally {
+      if (null != r) {
+        try {
+          r.close();
+        } catch (IOException ioe) {
+          LOG.warn("Error closing file " + f + ": " + ioe);
+        }
+      }
+    }
+  }
+
+  /**
+   * If $SQOOP_CONF_DIR/managers.d/ exists and sqoop.connection.factories is
+   * not set, then we look through the files in that directory; they should
+   * contain lines of the form mgr.class.name[=/path/to/containing.jar].
+   *
+   * <p>
+   * Put all mgr.class.names into the Configuration, and load any specified
+   * jars into the ClassLoader.
+   * </p>
+   *
+   * @param conf the current configuration to populate with class names.
+   * @return conf again, after possibly populating sqoop.connection.factories.
+   */
+  private Configuration loadManagersFromConfDir(Configuration conf) {
+    if (conf.get(FACTORY_CLASS_NAMES_KEY) != null) {
+      LOG.debug(FACTORY_CLASS_NAMES_KEY + " is set; ignoring managers.d");
+      return conf;
+    }
+
+    String confDirName = System.getenv("SQOOP_CONF_DIR");
+    if (null == confDirName) {
+      LOG.warn("$SQOOP_CONF_DIR has not been set in the environment. "
+          + "Cannot check for additional configuration.");
+      return conf;
+    }
+
+    File confDir = new File(confDirName);
+    File mgrDir = new File(confDir, "managers.d");
+
+    if (mgrDir.exists() && mgrDir.isDirectory()) {
+      // We have a managers.d subdirectory. Get the file list, sort it,
+      // and process them in order.
+      String [] fileNames = mgrDir.list();
+      Arrays.sort(fileNames);
+
+      for (String fileName : fileNames) {
+        File f = new File(mgrDir, fileName);
+        if (f.isFile()) {
+          addManagersFromFile(conf, f);
+        }
+      }
+
+      // Add the default MF.
+      addManager(conf, DEFAULT_FACTORY_CLASS_NAMES);
+    }
+
+    // Set the classloader in this configuration so that it will use
+    // the jars we just loaded in.
+    conf.setClassLoader(Thread.currentThread().getContextClassLoader());
+    return conf;
+  }
+}
+

Added: incubator/sqoop/trunk/src/java/org/apache/sqoop/Sqoop.java
URL: http://svn.apache.org/viewvc/incubator/sqoop/trunk/src/java/org/apache/sqoop/Sqoop.java?rev=1196486&view=auto
==============================================================================
--- incubator/sqoop/trunk/src/java/org/apache/sqoop/Sqoop.java (added)
+++ incubator/sqoop/trunk/src/java/org/apache/sqoop/Sqoop.java Wed Nov  2 07:48:56 2011
@@ -0,0 +1,242 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.sqoop;
+
+import java.util.Arrays;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.conf.Configured;
+import org.apache.hadoop.util.Tool;
+import org.apache.hadoop.util.ToolRunner;
+
+import com.cloudera.sqoop.SqoopOptions;
+import com.cloudera.sqoop.cli.ToolOptions;
+import com.cloudera.sqoop.tool.SqoopTool;
+import com.cloudera.sqoop.util.OptionsFileUtil;
+
+/**
+ * Main entry-point for Sqoop
+ * Usage: hadoop jar (this_jar_name) com.cloudera.sqoop.Sqoop (options)
+ * See the SqoopOptions class for options.
+ */
+public class Sqoop extends Configured implements Tool {
+
+  public static final Log SQOOP_LOG = LogFactory.getLog("com.cloudera.sqoop");
+  public static final Log LOG = LogFactory.getLog(Sqoop.class.getName());
+
+  /**
+   * If this System property is set, always throw an exception, do not just
+   * exit with status 1.
+   */
+  public static final String SQOOP_RETHROW_PROPERTY = "sqoop.throwOnError";
+
+  /**
+   * The option to specify an options file from which other options to the
+   * tool are read.
+   */
+  public static final String SQOOP_OPTIONS_FILE_SPECIFIER = "--options-file";
+
+  static {
+    Configuration.addDefaultResource("sqoop-site.xml");
+  }
+
+  private SqoopTool tool;
+  private SqoopOptions options;
+  private String [] childPrgmArgs;
+
+  /**
+   * Creates a new instance of Sqoop set to run the supplied SqoopTool
+   * with the default configuration.
+   * @param tool the SqoopTool to run in the main body of Sqoop.
+   */
+  public Sqoop(SqoopTool tool) {
+    this(tool, (Configuration) null);
+  }
+
+  /**
+   * Creates a new instance of Sqoop set to run the supplied SqoopTool
+   * with the provided configuration.
+   * @param tool the SqoopTool to run in the main body of Sqoop.
+   * @param conf the Configuration to use (e.g., from ToolRunner).
+   */
+  public Sqoop(SqoopTool tool, Configuration conf) {
+    this(tool, conf, new SqoopOptions());
+  }
+
+  /**
+   * Creates a new instance of Sqoop set to run the supplied SqoopTool
+   * with the provided configuration and SqoopOptions.
+   * @param tool the SqoopTool to run in the main body of Sqoop.
+   * @param conf the Configuration to use (e.g., from ToolRunner).
+   * @param opts the SqoopOptions which control the tool's parameters.
+   */
+  public Sqoop(SqoopTool tool, Configuration conf, SqoopOptions opts) {
+    if (null != conf) {
+      setConf(conf);
+    }
+
+    this.options = opts;
+    this.options.setConf(getConf());
+
+    this.tool = tool;
+  }
+
+  /**
+   * @return the SqoopOptions used in this Sqoop instance.
+   */
+  public SqoopOptions getOptions() {
+    return this.options;
+  }
+
+  /**
+   * @return the SqoopTool used in this Sqoop instance.
+   */
+  public SqoopTool getTool() {
+    return this.tool;
+  }
+
+  @Override
+  /**
+   * Actual main entry-point for the program
+   */
+  public int run(String [] args) {
+    if (options.getConf() == null) {
+      // Configuration wasn't initialized until after the ToolRunner
+      // got us to this point. ToolRunner gave Sqoop itself a Conf
+      // though.
+      options.setConf(getConf());
+    }
+
+    try {
+      options = tool.parseArguments(args, null, options, false);
+      tool.appendArgs(this.childPrgmArgs);
+      tool.validateOptions(options);
+    } catch (Exception e) {
+      // Couldn't parse arguments.
+      // Log the stack trace for this exception
+      LOG.debug(e.getMessage(), e);
+      // Print exception message.
+      System.err.println(e.getMessage());
+      // Print the tool usage message and exit.
+      ToolOptions toolOpts = new ToolOptions();
+      tool.configureOptions(toolOpts);
+      tool.printHelp(toolOpts);
+      return 1; // Exit on exception here.
+    }
+
+    return tool.run(options);
+  }
+
+  /**
+   * SqoopTools sometimes pass arguments to a child program (e.g., mysqldump).
+   * Users can specify additional args to these programs by preceeding the
+   * additional arguments with a standalone '--'; but
+   * ToolRunner/GenericOptionsParser will cull out this argument. We remove
+   * the child-program arguments in advance, and store them to be readded
+   * later.
+   * @param argv the argv in to the SqoopTool
+   * @return the argv with a "--" and any subsequent arguments removed.
+   */
+  private String [] stashChildPrgmArgs(String [] argv) {
+    for (int i = 0; i < argv.length; i++) {
+      if ("--".equals(argv[i])) {
+        this.childPrgmArgs = Arrays.copyOfRange(argv, i, argv.length);
+        return Arrays.copyOfRange(argv, 0, i);
+      }
+    }
+
+    // Didn't find child-program arguments.
+    return argv;
+  }
+
+  /**
+   * Given a Sqoop object and a set of arguments to deliver to
+   * its embedded SqoopTool, run the tool, wrapping the call to
+   * ToolRunner.
+   * This entry-point is preferred to ToolRunner.run() because
+   * it has a chance to stash child program arguments before
+   * GenericOptionsParser would remove them.
+   */
+  public static int runSqoop(Sqoop sqoop, String [] args) {
+    try {
+      String [] toolArgs = sqoop.stashChildPrgmArgs(args);
+      return ToolRunner.run(sqoop.getConf(), sqoop, toolArgs);
+    } catch (Exception e) {
+      LOG.error("Got exception running Sqoop: " + e.toString());
+      e.printStackTrace();
+      if (System.getProperty(SQOOP_RETHROW_PROPERTY) != null) {
+        throw new RuntimeException(e);
+      }
+      return 1;
+    }
+
+  }
+
+  /**
+   * Entry-point that parses the correct SqoopTool to use from the args,
+   * but does not call System.exit() as main() will.
+   */
+  public static int runTool(String [] args, Configuration conf) {
+    // Expand the options
+    String[] expandedArgs = null;
+    try {
+      expandedArgs = OptionsFileUtil.expandArguments(args);
+    } catch (Exception ex) {
+      LOG.error("Error while expanding arguments", ex);
+      System.err.println(ex.getMessage());
+      System.err.println("Try 'sqoop help' for usage.");
+      return 1;
+    }
+
+    String toolName = expandedArgs[0];
+    Configuration pluginConf = SqoopTool.loadPlugins(conf);
+    SqoopTool tool = SqoopTool.getTool(toolName);
+    if (null == tool) {
+      System.err.println("No such sqoop tool: " + toolName
+          + ". See 'sqoop help'.");
+      return 1;
+    }
+
+
+    Sqoop sqoop = new Sqoop(tool, pluginConf);
+    return runSqoop(sqoop,
+        Arrays.copyOfRange(expandedArgs, 1, expandedArgs.length));
+  }
+
+  /**
+   * Entry-point that parses the correct SqoopTool to use from the args,
+   * but does not call System.exit() as main() will.
+   */
+  public static int runTool(String [] args) {
+    return runTool(args, new Configuration());
+  }
+
+  public static void main(String [] args) {
+    if (args.length == 0) {
+      System.err.println("Try 'sqoop help' for usage.");
+      System.exit(1);
+    }
+
+    int ret = runTool(args);
+    System.exit(ret);
+  }
+}
+

Added: incubator/sqoop/trunk/src/java/org/apache/sqoop/SqoopOptions.java
URL: http://svn.apache.org/viewvc/incubator/sqoop/trunk/src/java/org/apache/sqoop/SqoopOptions.java?rev=1196486&view=auto
==============================================================================
--- incubator/sqoop/trunk/src/java/org/apache/sqoop/SqoopOptions.java (added)
+++ incubator/sqoop/trunk/src/java/org/apache/sqoop/SqoopOptions.java Wed Nov  2 07:48:56 2011
@@ -0,0 +1,1961 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.sqoop;
+
+import java.io.File;
+import java.lang.reflect.Field;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Iterator;
+import java.util.Map;
+import java.util.Properties;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+
+import com.cloudera.sqoop.lib.DelimiterSet;
+import com.cloudera.sqoop.lib.LargeObjectLoader;
+import com.cloudera.sqoop.tool.SqoopTool;
+import com.cloudera.sqoop.util.RandomHash;
+import com.cloudera.sqoop.util.StoredAsProperty;
+
+/**
+ * Configurable state used by Sqoop tools.
+ */
+public class SqoopOptions implements Cloneable {
+
+  public static final Log LOG = LogFactory.getLog(SqoopOptions.class.getName());
+
+  /**
+   * Set to true in configuration if you want to put db passwords
+   * in the metastore.
+   */
+  public static final String METASTORE_PASSWORD_KEY =
+      "sqoop.metastore.client.record.password";
+
+  public static final boolean METASTORE_PASSWORD_DEFAULT = false;
+
+  /**
+   * Thrown when invalid cmdline options are given.
+   */
+  @SuppressWarnings("serial")
+  public static class InvalidOptionsException extends Exception {
+
+    private String message;
+
+    public InvalidOptionsException(final String msg) {
+      this.message = msg;
+    }
+
+    public String getMessage() {
+      return message;
+    }
+
+    public String toString() {
+      return getMessage();
+    }
+  }
+
+  /** Selects in-HDFS destination file format. */
+  public enum FileLayout {
+    TextFile,
+    SequenceFile,
+    AvroDataFile
+  }
+
+  /**
+   * Incremental imports support two modes:
+   * <ul>
+   * <li>new rows being appended to the end of a table with an
+   * incrementing id</li>
+   * <li>new data results in a date-last-modified column being
+   * updated to NOW(); Sqoop will pull all dirty rows in the next
+   * incremental import.</li>
+   * </ul>
+   */
+  public enum IncrementalMode {
+    None,
+    AppendRows,
+    DateLastModified,
+  }
+
+
+  // TODO(aaron): Adding something here? Add a setter and a getter.  Add a
+  // default value in initDefaults() if you need one.  If this value needs to
+  // be serialized in the metastore, it should be marked with
+  // @StoredAsProperty(), if it is an int, long, boolean, String, or Enum.
+  // Arrays and other "special" types should be added directly to the
+  // loadProperties() and writeProperties() methods. Then add command-line
+  // arguments in the appropriate tools. The names of all command-line args
+  // are stored as constants in BaseSqoopTool.
+
+  @StoredAsProperty("db.connect.string") private String connectString;
+  @StoredAsProperty("db.table") private String tableName;
+  private String [] columns; // Array stored as db.column.list.
+  @StoredAsProperty("db.username") private String username;
+  @StoredAsProperty("db.export.staging.table") private String stagingTableName;
+  @StoredAsProperty("db.clear.staging.table") private boolean clearStagingTable;
+  private Properties connectionParams; //Properties stored as db.connect.params
+
+
+  // May not be serialized, based on configuration.
+  // db.require.password is used to determine whether 'some' password is
+  // used. If so, it is stored as 'db.password'.
+  private String password;
+
+  @StoredAsProperty("null.string") private String nullStringValue;
+  @StoredAsProperty("input.null.string") private String inNullStringValue;
+  @StoredAsProperty("null.non-string") private String nullNonStringValue;
+  @StoredAsProperty("input.null.non-string")
+      private String inNullNonStringValue;
+
+  @StoredAsProperty("codegen.output.dir") private String codeOutputDir;
+  @StoredAsProperty("codegen.compile.dir") private String jarOutputDir;
+  // Boolean specifying whether jarOutputDir is a nonce tmpdir (true), or
+  // explicitly set by the user (false). If the former, disregard any value
+  // for jarOutputDir saved in the metastore.
+  @StoredAsProperty("codegen.auto.compile.dir") private boolean jarDirIsAuto;
+  private String hadoopHome; // not serialized to metastore.
+  @StoredAsProperty("db.split.column") private String splitByCol;
+  @StoredAsProperty("db.where.clause") private String whereClause;
+  @StoredAsProperty("db.query") private String sqlQuery;
+  @StoredAsProperty("db.query.boundary") private String boundaryQuery;
+  @StoredAsProperty("jdbc.driver.class") private String driverClassName;
+  @StoredAsProperty("hdfs.warehouse.dir") private String warehouseDir;
+  @StoredAsProperty("hdfs.target.dir") private String targetDir;
+  @StoredAsProperty("hdfs.append.dir") private boolean append;
+  @StoredAsProperty("hdfs.file.format") private FileLayout layout;
+  @StoredAsProperty("direct.import") private boolean direct; // "direct mode."
+  @StoredAsProperty("db.batch") private boolean batchMode;
+  private String tmpDir; // where temp data goes; usually /tmp; not serialized.
+  private String hiveHome; // not serialized to metastore.
+  @StoredAsProperty("hive.import") private boolean hiveImport;
+  @StoredAsProperty("hive.overwrite.table") private boolean overwriteHiveTable;
+  @StoredAsProperty("hive.fail.table.exists")
+  private boolean failIfHiveTableExists;
+  @StoredAsProperty("hive.table.name") private String hiveTableName;
+  @StoredAsProperty("hive.drop.delims") private boolean hiveDropDelims;
+  @StoredAsProperty("hive.delims.replacement")
+  private String hiveDelimsReplacement;
+  @StoredAsProperty("hive.partition.key") private String hivePartitionKey;
+  @StoredAsProperty("hive.partition.value") private String hivePartitionValue;
+
+  // User explicit mapping of types
+  private Properties mapColumnJava; // stored as map.colum.java
+  private Properties mapColumnHive; // stored as map.column.hive
+
+  // An ordered list of column names denoting what order columns are
+  // serialized to a PreparedStatement from a generated record type.
+  // Not serialized to metastore.
+  private String [] dbOutColumns;
+
+  // package to prepend to auto-named classes.
+  @StoredAsProperty("codegen.java.packagename") private String packageName;
+
+  // package+class to apply to individual table import.
+  // also used as an *input* class with existingJarFile.
+  @StoredAsProperty("codegen.java.classname") private String className;
+
+  // Name of a jar containing existing table definition
+  // class to use.
+  @StoredAsProperty("codegen.jar.file") private String existingJarFile;
+
+  @StoredAsProperty("mapreduce.num.mappers") private int numMappers;
+  @StoredAsProperty("enable.compression") private boolean useCompression;
+  @StoredAsProperty("compression.codec") private String compressionCodec;
+
+  // In direct mode, open a new stream every X bytes.
+  @StoredAsProperty("import.direct.split.size") private long directSplitSize;
+
+  // Max size of an inline LOB; larger LOBs are written
+  // to external files on disk.
+  @StoredAsProperty("import.max.inline.lob.size") private long maxInlineLobSize;
+
+  // Max number 'n' of rows to fetch from the
+  // database when more rows are needed.
+  @StoredAsProperty("import.fetch.size") private Integer fetchSize;
+
+  // HDFS path to read from when performing an export
+  @StoredAsProperty("export.source.dir") private String exportDir;
+
+  // Column to use for the WHERE clause in an UPDATE-based export.
+  @StoredAsProperty("export.update.col") private String updateKeyCol;
+
+  /**
+   * Update mode option specifies how updates are performed when
+   * new rows are found with non-matching keys in database.
+   * It supports two modes:
+   * <ul>
+   * <li>UpdateOnly: This is the default. New rows are silently ignored.</li>
+   * <li>AllowInsert: New rows are inserted into the database.</li>
+   * </ul>
+   */
+  public enum UpdateMode {
+    UpdateOnly,
+    AllowInsert
+  }
+
+  @StoredAsProperty("export.new.update") private UpdateMode updateMode;
+
+  private DelimiterSet inputDelimiters; // codegen.input.delimiters.
+  private DelimiterSet outputDelimiters; // codegen.output.delimiters.
+  private boolean areDelimsManuallySet;
+
+  private Configuration conf;
+
+  public static final int DEFAULT_NUM_MAPPERS = 4;
+
+  private String [] extraArgs;
+
+  // HBase table to import into.
+  @StoredAsProperty("hbase.table") private String hbaseTable;
+
+  // Column family to prepend to inserted cols.
+  @StoredAsProperty("hbase.col.family") private String hbaseColFamily;
+
+  // Column of the input to use as the row key.
+  @StoredAsProperty("hbase.row.key.col") private String hbaseRowKeyCol;
+
+  // if true, create tables/col families.
+  @StoredAsProperty("hbase.create.table") private boolean hbaseCreateTable;
+
+  // col to filter on for incremental imports.
+  @StoredAsProperty("incremental.col") private String incrementalTestCol;
+  // incremental import mode we're using.
+  @StoredAsProperty("incremental.mode")
+  private IncrementalMode incrementalMode;
+  // What was the last-imported value of incrementalTestCol?
+  @StoredAsProperty("incremental.last.value")
+  private String incrementalLastValue;
+
+  // HDFS paths for "old" and "new" datasets in merge tool.
+  @StoredAsProperty("merge.old.path") private String mergeOldPath;
+  @StoredAsProperty("merge.new.path") private String mergeNewPath;
+
+  // "key" column for the merge operation.
+  @StoredAsProperty("merge.key.col") private String mergeKeyCol;
+
+
+  // These next two fields are not serialized to the metastore.
+  // If this SqoopOptions is created by reading a saved job, these will
+  // be populated by the JobStorage to facilitate updating the same
+  // job.
+  private String jobName;
+  private Map<String, String> jobStorageDescriptor;
+
+  // If we restore a job and then allow the user to apply arguments on
+  // top, we retain the version without the arguments in a reference to the
+  // 'parent' SqoopOptions instance, here.
+  private SqoopOptions parent;
+
+  // Nonce directory name. Generate one per process, lazily, if
+  // getNonceJarDir() is called. Not recorded in metadata. This is used as
+  // a temporary holding area for compilation work done by this process.
+  private static String curNonce;
+
+  // the connection manager fully qualified class name
+  @StoredAsProperty("connection.manager") private String connManagerClassName;
+
+  // The currently active tool. (Not saved in properties)
+  // Used to pass the SqoopTool instance in to mapreduce job configuration
+  // (JobBase, etc).
+  private SqoopTool activeSqoopTool;
+
+  public SqoopOptions() {
+    initDefaults(null);
+  }
+
+  public SqoopOptions(Configuration conf) {
+    initDefaults(conf);
+  }
+
+  /**
+   * Alternate SqoopOptions interface used mostly for unit testing.
+   * @param connect JDBC connect string to use
+   * @param table Table to read
+   */
+  public SqoopOptions(final String connect, final String table) {
+    initDefaults(null);
+
+    this.connectString = connect;
+    this.tableName = table;
+  }
+
+  private boolean getBooleanProperty(Properties props, String propName,
+      boolean defaultValue) {
+    String str = props.getProperty(propName,
+        Boolean.toString(defaultValue)).toLowerCase();
+    return "true".equals(str) || "yes".equals(str) || "1".equals(str);
+  }
+
+  private long getLongProperty(Properties props, String propName,
+      long defaultValue) {
+    String str = props.getProperty(propName,
+        Long.toString(defaultValue)).toLowerCase();
+    try {
+      return Long.parseLong(str);
+    } catch (NumberFormatException nfe) {
+      LOG.warn("Could not parse integer value for config parameter "
+          + propName);
+      return defaultValue;
+    }
+  }
+
+  private int getIntProperty(Properties props, String propName,
+      int defaultVal) {
+    long longVal = getLongProperty(props, propName, defaultVal);
+    return (int) longVal;
+  }
+
+  private char getCharProperty(Properties props, String propName,
+      char defaultVal) {
+    int intVal = getIntProperty(props, propName, (int) defaultVal);
+    return (char) intVal;
+  }
+
+  private DelimiterSet getDelimiterProperties(Properties props,
+      String prefix, DelimiterSet defaults) {
+
+    if (null == defaults) {
+      defaults = new DelimiterSet();
+    }
+
+    char field = getCharProperty(props, prefix + ".field",
+        defaults.getFieldsTerminatedBy());
+    char record = getCharProperty(props, prefix + ".record",
+        defaults.getLinesTerminatedBy());
+    char enclose = getCharProperty(props, prefix + ".enclose",
+        defaults.getEnclosedBy());
+    char escape = getCharProperty(props, prefix + ".escape",
+        defaults.getEscapedBy());
+    boolean required = getBooleanProperty(props, prefix +".enclose.required",
+        defaults.isEncloseRequired());
+
+    return new DelimiterSet(field, record, enclose, escape, required);
+  }
+
+  private void setDelimiterProperties(Properties props,
+      String prefix, DelimiterSet values) {
+    putProperty(props, prefix + ".field",
+        Integer.toString((int) values.getFieldsTerminatedBy()));
+    putProperty(props, prefix + ".record",
+        Integer.toString((int) values.getLinesTerminatedBy()));
+    putProperty(props, prefix + ".enclose",
+        Integer.toString((int) values.getEnclosedBy()));
+    putProperty(props, prefix + ".escape",
+        Integer.toString((int) values.getEscapedBy()));
+    putProperty(props, prefix + ".enclose.required",
+        Boolean.toString(values.isEncloseRequired()));
+  }
+
+  /** Take a comma-delimited list of input and split the elements
+   * into an output array. */
+  private String [] listToArray(String strList) {
+    return strList.split(",");
+  }
+
+  private String arrayToList(String [] array) {
+    if (null == array) {
+      return null;
+    }
+
+    StringBuilder sb = new StringBuilder();
+    boolean first = true;
+    for (String elem : array) {
+      if (!first) {
+        sb.append(",");
+      }
+      sb.append(elem);
+      first = false;
+    }
+
+    return sb.toString();
+  }
+
+  /**
+   * A put() method for Properties that is tolerent of 'null' values.
+   * If a null value is specified, the property is unset.
+   */
+  private void putProperty(Properties props, String k, String v) {
+    if (null == v) {
+      props.remove(k);
+    } else {
+      props.setProperty(k, v);
+    }
+  }
+
+  /**
+   * Given a property prefix that denotes a set of numbered properties,
+   * return an array containing all the properties.
+   *
+   * For instance, if prefix is "foo", then return properties "foo.0",
+   * "foo.1", "foo.2", and so on as an array. If no such properties
+   * exist, return 'defaults'.
+   */
+  private String [] getArgArrayProperty(Properties props, String prefix,
+      String [] defaults) {
+    int cur = 0;
+    ArrayList<String> al = new ArrayList<String>();
+    while (true) {
+      String curProp = prefix + "." + cur;
+      String curStr = props.getProperty(curProp, null);
+      if (null == curStr) {
+        break;
+      }
+
+      al.add(curStr);
+      cur++;
+    }
+
+    if (cur == 0) {
+      // Couldn't find an array here; return the defaults.
+      return defaults;
+    }
+
+    return al.toArray(new String[0]);
+  }
+
+  private void setArgArrayProperties(Properties props, String prefix,
+      String [] values) {
+    if (null == values) {
+      return;
+    }
+
+    for (int i = 0; i < values.length; i++) {
+      putProperty(props, prefix + "." + i, values[i]);
+    }
+  }
+
+  /**
+   * This method encodes the property key values found in the provided
+   * properties instance <tt>values</tt> into another properties instance
+   * <tt>props</tt>. The specified <tt>prefix</tt> is used as a namespace
+   * qualifier for keys when inserting. This allows easy introspection of the
+   * property key values in <tt>props</tt> instance to later separate out all
+   * the properties that belong to the <tt>values</tt> instance.
+   * @param props the container properties instance
+   * @param prefix the prefix for qualifying contained property keys.
+   * @param values the contained properties instance, all of whose elements will
+   *               be added to the container properties instance.
+   *
+   * @see #getPropertiesAsNetstedProperties(Properties, String)
+   */
+  private void setPropertiesAsNestedProperties(Properties props,
+          String prefix, Properties values) {
+    String nestedPropertyPrefix = prefix + ".";
+    if (null == values || values.size() == 0) {
+      Iterator<String> it = props.stringPropertyNames().iterator();
+      while (it.hasNext()) {
+        String name = it.next();
+        if (name.startsWith(nestedPropertyPrefix)) {
+          props.remove(name);
+        }
+      }
+    } else {
+      Iterator<String> it = values.stringPropertyNames().iterator();
+      while (it.hasNext()) {
+        String name = it.next();
+        putProperty(props,
+                nestedPropertyPrefix + name, values.getProperty(name));
+      }
+    }
+  }
+
+  /**
+   * This method decodes the property key values found in the provided
+   * properties instance <tt>props</tt> that have keys beginning with the
+   * given prefix. Matching elements from this properties instance are modified
+   * so that their prefix is dropped.
+   * @param props the properties container
+   * @param prefix the prefix qualifying properties that need to be removed
+   * @return a new properties instance that contains all matching elements from
+   * the container properties.
+   */
+  private Properties getPropertiesAsNetstedProperties(
+          Properties props, String prefix) {
+    Properties nestedProps = new Properties();
+    String nestedPropertyPrefix = prefix + ".";
+    int index = nestedPropertyPrefix.length();
+    if (props != null && props.size() > 0) {
+      Iterator<String> it = props.stringPropertyNames().iterator();
+      while (it.hasNext()) {
+        String name = it.next();
+        if (name.startsWith(nestedPropertyPrefix)){
+          String shortName = name.substring(index);
+          nestedProps.put(shortName, props.get(name));
+        }
+      }
+    }
+    return nestedProps;
+  }
+
+  @SuppressWarnings("unchecked")
+  /**
+   * Given a set of properties, load this into the current SqoopOptions
+   * instance.
+   */
+  public void loadProperties(Properties props) {
+
+    try {
+      Field [] fields = getClass().getDeclaredFields();
+      for (Field f : fields) {
+        if (f.isAnnotationPresent(StoredAsProperty.class)) {
+          Class typ = f.getType();
+          StoredAsProperty storedAs = f.getAnnotation(StoredAsProperty.class);
+          String propName = storedAs.value();
+
+          if (typ.equals(int.class)) {
+            f.setInt(this,
+                getIntProperty(props, propName, f.getInt(this)));
+          } else if (typ.equals(boolean.class)) {
+            f.setBoolean(this,
+                getBooleanProperty(props, propName, f.getBoolean(this)));
+          } else if (typ.equals(long.class)) {
+            f.setLong(this,
+                getLongProperty(props, propName, f.getLong(this)));
+          } else if (typ.equals(String.class)) {
+            f.set(this, props.getProperty(propName, (String) f.get(this)));
+          } else if (typ.equals(Integer.class)) {
+            String value = props.getProperty(
+                propName,
+                f.get(this) == null ? "null" : f.get(this).toString());
+            f.set(this, value.equals("null") ? null : new Integer(value));
+          } else if (typ.isEnum()) {
+            f.set(this, Enum.valueOf(typ,
+                props.getProperty(propName, f.get(this).toString())));
+          } else {
+            throw new RuntimeException("Could not retrieve property "
+                + propName + " for type: " + typ);
+          }
+        }
+      }
+    } catch (IllegalAccessException iae) {
+      throw new RuntimeException("Illegal access to field in property setter",
+          iae);
+    }
+
+    // Now load properties that were stored with special types, or require
+    // additional logic to set.
+
+    if (getBooleanProperty(props, "db.require.password", false)) {
+      // The user's password was stripped out from the metastore.
+      // Require that the user enter it now.
+      setPasswordFromConsole();
+    } else {
+      this.password = props.getProperty("db.password", this.password);
+    }
+
+    if (this.jarDirIsAuto) {
+      // We memoized a user-specific nonce dir for compilation to the data
+      // store.  Disregard that setting and create a new nonce dir.
+      String localUsername = System.getProperty("user.name", "unknown");
+      this.jarOutputDir = getNonceJarDir(tmpDir + "sqoop-" + localUsername
+          + "/compile");
+    }
+
+    String colListStr = props.getProperty("db.column.list", null);
+    if (null != colListStr) {
+      this.columns = listToArray(colListStr);
+    }
+
+    this.inputDelimiters = getDelimiterProperties(props,
+        "codegen.input.delimiters", this.inputDelimiters);
+    this.outputDelimiters = getDelimiterProperties(props,
+        "codegen.output.delimiters", this.outputDelimiters);
+
+    this.extraArgs = getArgArrayProperty(props, "tool.arguments",
+        this.extraArgs);
+
+    this.connectionParams =
+        getPropertiesAsNetstedProperties(props, "db.connect.params");
+
+    // Loading user mapping
+    this.mapColumnHive =
+            getPropertiesAsNetstedProperties(props, "map.column.hive");
+    this.mapColumnJava =
+            getPropertiesAsNetstedProperties(props, "map.column.java");
+
+    // Delimiters were previously memoized; don't let the tool override
+    // them with defaults.
+    this.areDelimsManuallySet = true;
+  }
+
+  /**
+   * Return a Properties instance that encapsulates all the "sticky"
+   * state of this SqoopOptions that should be written to a metastore
+   * to restore the job later.
+   */
+  public Properties writeProperties() {
+    Properties props = new Properties();
+
+    try {
+      Field [] fields = getClass().getDeclaredFields();
+      for (Field f : fields) {
+        if (f.isAnnotationPresent(StoredAsProperty.class)) {
+          Class typ = f.getType();
+          StoredAsProperty storedAs = f.getAnnotation(StoredAsProperty.class);
+          String propName = storedAs.value();
+
+          if (typ.equals(int.class)) {
+            putProperty(props, propName, Integer.toString(f.getInt(this)));
+          } else if (typ.equals(boolean.class)) {
+            putProperty(props, propName, Boolean.toString(f.getBoolean(this)));
+          } else if (typ.equals(long.class)) {
+            putProperty(props, propName, Long.toString(f.getLong(this)));
+          } else if (typ.equals(String.class)) {
+            putProperty(props, propName, (String) f.get(this));
+          } else if (typ.equals(Integer.class)) {
+            putProperty(
+                props,
+                propName,
+                f.get(this) == null ? "null" : f.get(this).toString());
+          } else if (typ.isEnum()) {
+            putProperty(props, propName, f.get(this).toString());
+          } else {
+            throw new RuntimeException("Could not set property "
+                + propName + " for type: " + typ);
+          }
+        }
+      }
+    } catch (IllegalAccessException iae) {
+      throw new RuntimeException("Illegal access to field in property setter",
+          iae);
+    }
+
+
+    if (this.getConf().getBoolean(
+        METASTORE_PASSWORD_KEY, METASTORE_PASSWORD_DEFAULT)) {
+      // If the user specifies, we may store the password in the metastore.
+      putProperty(props, "db.password", this.password);
+      putProperty(props, "db.require.password", "false");
+    } else if (this.password != null) {
+      // Otherwise, if the user has set a password, we just record
+      // a flag stating that the password will need to be reentered.
+      putProperty(props, "db.require.password", "true");
+    } else {
+      // No password saved or required.
+      putProperty(props, "db.require.password", "false");
+    }
+
+    putProperty(props, "db.column.list", arrayToList(this.columns));
+    setDelimiterProperties(props, "codegen.input.delimiters",
+        this.inputDelimiters);
+    setDelimiterProperties(props, "codegen.output.delimiters",
+        this.outputDelimiters);
+    setArgArrayProperties(props, "tool.arguments", this.extraArgs);
+
+    setPropertiesAsNestedProperties(props,
+            "db.connect.params", this.connectionParams);
+
+    setPropertiesAsNestedProperties(props,
+            "map.column.hive", this.mapColumnHive);
+    setPropertiesAsNestedProperties(props,
+            "map.column.java", this.mapColumnJava);
+    return props;
+  }
+
+  @Override
+  public Object clone() {
+    try {
+      SqoopOptions other = (SqoopOptions) super.clone();
+      if (null != columns) {
+        other.columns = Arrays.copyOf(columns, columns.length);
+      }
+
+      if (null != dbOutColumns) {
+        other.dbOutColumns = Arrays.copyOf(dbOutColumns, dbOutColumns.length);
+      }
+
+      if (null != inputDelimiters) {
+        other.inputDelimiters = (DelimiterSet) inputDelimiters.clone();
+      }
+
+      if (null != outputDelimiters) {
+        other.outputDelimiters = (DelimiterSet) outputDelimiters.clone();
+      }
+
+      if (null != conf) {
+        other.conf = new Configuration(conf);
+      }
+
+      if (null != extraArgs) {
+        other.extraArgs = Arrays.copyOf(extraArgs, extraArgs.length);
+      }
+
+      if (null != connectionParams) {
+        other.setConnectionParams(this.connectionParams);
+      }
+
+      if (null != mapColumnHive) {
+        other.mapColumnHive = (Properties) this.mapColumnHive.clone();
+      }
+
+      if (null != mapColumnJava) {
+        other.mapColumnJava = (Properties) this.mapColumnJava.clone();
+      }
+
+      return other;
+    } catch (CloneNotSupportedException cnse) {
+      // Shouldn't happen.
+      return null;
+    }
+  }
+
+  /**
+   * @return the temp directory to use; this is guaranteed to end with
+   * the file separator character (e.g., '/').
+   */
+  public String getTempDir() {
+    return this.tmpDir;
+  }
+
+  /**
+   * Return the name of a directory that does not exist before
+   * calling this method, and does exist afterward. We should be
+   * the only client of this directory. If this directory is not
+   * used during the lifetime of the JVM, schedule it to be removed
+   * when the JVM exits.
+   */
+  private static String getNonceJarDir(String tmpBase) {
+
+    // Make sure we don't loop forever in the event of a permission error.
+    final int MAX_DIR_CREATE_ATTEMPTS = 32;
+
+    if (null != curNonce) {
+      return curNonce;
+    }
+
+    File baseDir = new File(tmpBase);
+    File hashDir = null;
+
+    for (int attempts = 0; attempts < MAX_DIR_CREATE_ATTEMPTS; attempts++) {
+      hashDir = new File(baseDir, RandomHash.generateMD5String());
+      while (hashDir.exists()) {
+        hashDir = new File(baseDir, RandomHash.generateMD5String());
+      }
+
+      if (hashDir.mkdirs()) {
+        // We created the directory. Use it.
+        // If this directory is not actually filled with files, delete it
+        // when the JVM quits.
+        hashDir.deleteOnExit();
+        break;
+      }
+    }
+
+    if (hashDir == null || !hashDir.exists()) {
+      throw new RuntimeException("Could not create temporary directory: "
+          + hashDir + "; check for a directory permissions issue on /tmp.");
+    }
+
+    LOG.debug("Generated nonce dir: " + hashDir.toString());
+    SqoopOptions.curNonce = hashDir.toString();
+    return SqoopOptions.curNonce;
+  }
+
+  /**
+   * Reset the nonce directory and force a new one to be generated. This
+   * method is intended to be used only by multiple unit tests that want
+   * to isolate themselves from one another. It should not be called
+   * during normal Sqoop execution.
+   */
+  public static void clearNonceDir() {
+    LOG.warn("Clearing nonce directory");
+    SqoopOptions.curNonce = null;
+  }
+
+  private void initDefaults(Configuration baseConfiguration) {
+    // first, set the true defaults if nothing else happens.
+    // default action is to run the full pipeline.
+    this.hadoopHome = System.getenv("HADOOP_HOME");
+
+    // Set this with $HIVE_HOME, but -Dhive.home can override.
+    this.hiveHome = System.getenv("HIVE_HOME");
+    this.hiveHome = System.getProperty("hive.home", this.hiveHome);
+
+    this.inputDelimiters = new DelimiterSet(
+        DelimiterSet.NULL_CHAR, DelimiterSet.NULL_CHAR,
+        DelimiterSet.NULL_CHAR, DelimiterSet.NULL_CHAR, false);
+    this.outputDelimiters = new DelimiterSet();
+
+    // Set this to cwd, but -Dsqoop.src.dir can override.
+    this.codeOutputDir = System.getProperty("sqoop.src.dir", ".");
+
+    String myTmpDir = System.getProperty("test.build.data", "/tmp/");
+    if (!myTmpDir.endsWith(File.separator)) {
+      myTmpDir = myTmpDir + File.separator;
+    }
+
+    this.tmpDir = myTmpDir;
+    String localUsername = System.getProperty("user.name", "unknown");
+    this.jarOutputDir = getNonceJarDir(tmpDir + "sqoop-" + localUsername
+        + "/compile");
+    this.jarDirIsAuto = true;
+    this.layout = FileLayout.TextFile;
+
+    this.areDelimsManuallySet = false;
+
+    this.numMappers = DEFAULT_NUM_MAPPERS;
+    this.useCompression = false;
+    this.compressionCodec = null;
+    this.directSplitSize = 0;
+
+    this.maxInlineLobSize = LargeObjectLoader.DEFAULT_MAX_LOB_LENGTH;
+
+    // Don't set a default value for fetchsize. This allows a JDBCManager to
+    // provide a database-specific default, if no value is provided by the
+    // user.
+    this.fetchSize = null;
+
+    if (null == baseConfiguration) {
+      this.conf = new Configuration();
+    } else {
+      this.conf = baseConfiguration;
+    }
+
+    this.extraArgs = null;
+
+    this.dbOutColumns = null;
+
+    this.incrementalMode = IncrementalMode.None;
+
+    this.updateMode = UpdateMode.UpdateOnly;
+
+    // Creating instances for user specific mapping
+    this.mapColumnHive = new Properties();
+    this.mapColumnJava = new Properties();
+  }
+
+  /**
+   * Given a string containing a single character or an escape sequence
+   * representing a char, return that char itself.
+   *
+   * Normal literal characters return themselves: "x" -&gt; 'x', etc.
+   * Strings containing a '\' followed by one of t, r, n, or b escape to the
+   * usual character as seen in Java: "\n" -&gt; (newline), etc.
+   *
+   * Strings like "\0ooo" return the character specified by the octal sequence
+   * 'ooo'. Strings like "\0xhhh" or "\0Xhhh" return the character specified by
+   * the hex sequence 'hhh'.
+   *
+   * If the input string contains leading or trailing spaces, these are
+   * ignored.
+   */
+  public static char toChar(String charish) throws InvalidOptionsException {
+    if (null == charish || charish.length() == 0) {
+      throw new InvalidOptionsException("Character argument expected."
+          + "\nTry --help for usage instructions.");
+    }
+
+    if (charish.startsWith("\\0x") || charish.startsWith("\\0X")) {
+      if (charish.length() == 3) {
+        throw new InvalidOptionsException(
+            "Base-16 value expected for character argument."
+            + "\nTry --help for usage instructions.");
+      } else {
+        String valStr = charish.substring(3);
+        int val = Integer.parseInt(valStr, 16);
+        return (char) val;
+      }
+    } else if (charish.startsWith("\\0")) {
+      if (charish.equals("\\0")) {
+        // it's just '\0', which we can take as shorthand for nul.
+        return DelimiterSet.NULL_CHAR;
+      } else {
+        // it's an octal value.
+        String valStr = charish.substring(2);
+        int val = Integer.parseInt(valStr, 8);
+        return (char) val;
+      }
+    } else if (charish.startsWith("\\")) {
+      if (charish.length() == 1) {
+        // it's just a '\'. Keep it literal.
+        return '\\';
+      } else if (charish.length() > 2) {
+        // we don't have any 3+ char escape strings.
+        throw new InvalidOptionsException(
+            "Cannot understand character argument: " + charish
+            + "\nTry --help for usage instructions.");
+      } else {
+        // this is some sort of normal 1-character escape sequence.
+        char escapeWhat = charish.charAt(1);
+        switch(escapeWhat) {
+        case 'b':
+          return '\b';
+        case 'n':
+          return '\n';
+        case 'r':
+          return '\r';
+        case 't':
+          return '\t';
+        case '\"':
+          return '\"';
+        case '\'':
+          return '\'';
+        case '\\':
+          return '\\';
+        default:
+          throw new InvalidOptionsException(
+              "Cannot understand character argument: " + charish
+              + "\nTry --help for usage instructions.");
+        }
+      }
+    } else {
+      // it's a normal character.
+      if (charish.length() > 1) {
+        LOG.warn("Character argument " + charish + " has multiple characters; "
+            + "only the first will be used.");
+      }
+
+      return charish.charAt(0);
+    }
+  }
+
+  /**
+   * Get the temporary directory; guaranteed to end in File.separator
+   * (e.g., '/').
+   */
+  public String getTmpDir() {
+    return tmpDir;
+  }
+
+  public void setTmpDir(String tmp) {
+    this.tmpDir = tmp;
+  }
+
+  public String getConnectString() {
+    return connectString;
+  }
+
+  public void setConnectString(String connectStr) {
+    this.connectString = connectStr;
+  }
+
+  public String getTableName() {
+    return tableName;
+  }
+
+  public void setTableName(String table) {
+    this.tableName = table;
+  }
+
+  public String getStagingTableName() {
+    return stagingTableName;
+  }
+
+  public void setStagingTableName(String stagingTable) {
+    this.stagingTableName = stagingTable;
+  }
+
+  public boolean doClearStagingTable() {
+    return clearStagingTable;
+  }
+
+  public void setClearStagingTable(boolean clear) {
+    clearStagingTable = clear;
+  }
+
+  public String getExportDir() {
+    return exportDir;
+  }
+
+  public void setExportDir(String dir) {
+    this.exportDir = dir;
+  }
+
+  public String getExistingJarName() {
+    return existingJarFile;
+  }
+
+  public void setExistingJarName(String jarFile) {
+    this.existingJarFile = jarFile;
+  }
+
+  public String[] getColumns() {
+    if (null == columns) {
+      return null;
+    } else {
+      return Arrays.copyOf(columns, columns.length);
+    }
+  }
+
+  public void setColumns(String [] cols) {
+    if (null == cols) {
+      this.columns = null;
+    } else {
+      this.columns = Arrays.copyOf(cols, cols.length);
+    }
+  }
+
+  public String getSplitByCol() {
+    return splitByCol;
+  }
+
+  public void setSplitByCol(String splitBy) {
+    this.splitByCol = splitBy;
+  }
+
+  public String getWhereClause() {
+    return whereClause;
+  }
+
+  public void setWhereClause(String where) {
+    this.whereClause = where;
+  }
+
+  public String getUsername() {
+    return username;
+  }
+
+  public void setUsername(String user) {
+    this.username = user;
+  }
+
+  public String getPassword() {
+    return password;
+  }
+
+  protected void parseColumnMapping(String mapping,
+          Properties output) {
+    output.clear();
+    String[] maps = mapping.split(",");
+    for(String map : maps) {
+      String[] details = map.split("=");
+      output.put(details[0], details[1]);
+    }
+  }
+
+  public void setMapColumnHive(String mapColumn) {
+    parseColumnMapping(mapColumn, mapColumnHive);
+  }
+
+  public void setMapColumn(String mapColumn) {
+    parseColumnMapping(mapColumn, mapColumnJava);
+  }
+
+  public Properties getMapColumnHive() {
+    return mapColumnHive;
+  }
+
+  public Properties getMapColumnJava() {
+    return mapColumnJava;
+  }
+
+  /**
+   * Allow the user to enter his password on the console without printing
+   * characters.
+   * @return the password as a string
+   */
+  private String securePasswordEntry() {
+    return new String(System.console().readPassword("Enter password: "));
+  }
+
+  /**
+   * Set the password in this SqoopOptions from the console without printing
+   * characters.
+   */
+  public void setPasswordFromConsole() {
+    this.password = securePasswordEntry();
+  }
+
+  public void setPassword(String pass) {
+    this.password = pass;
+  }
+
+  public boolean isDirect() {
+    return direct;
+  }
+
+  public void setDirectMode(boolean isDirect) {
+    this.direct = isDirect;
+  }
+
+  /**
+   * @return true if underlying statements to be executed in batch mode,
+   * or false if to be executed in a single multirow statement.
+   */
+  public boolean isBatchMode() {
+    return batchMode;
+  }
+
+  public void setBatchMode(boolean mode) {
+    this.batchMode = mode;
+  }
+
+  /**
+   * @return the number of map tasks to use for import.
+   */
+  public int getNumMappers() {
+    return this.numMappers;
+  }
+
+  public void setNumMappers(int m) {
+    this.numMappers = m;
+  }
+
+  /**
+   * @return the user-specified absolute class name for the table.
+   */
+  public String getClassName() {
+    return className;
+  }
+
+  public void setClassName(String name) {
+    this.className = name;
+  }
+
+  /**
+   * @return the user-specified package to prepend to table names via
+   * --package-name.
+   */
+  public String getPackageName() {
+    return packageName;
+  }
+
+  public void setPackageName(String name) {
+    this.packageName = name;
+  }
+
+  public String getHiveHome() {
+    return hiveHome;
+  }
+
+  public void setHiveHome(String home) {
+    this.hiveHome = home;
+  }
+
+  /** @return true if we should import the table into Hive. */
+  public boolean doHiveImport() {
+    return hiveImport;
+  }
+
+  public void setHiveImport(boolean doImport) {
+    this.hiveImport = doImport;
+  }
+
+  /**
+   * @return the user-specified option to overwrite existing table in hive.
+   */
+  public boolean doOverwriteHiveTable() {
+    return overwriteHiveTable;
+  }
+
+  public void setOverwriteHiveTable(boolean overwrite) {
+    this.overwriteHiveTable = overwrite;
+  }
+
+  /**
+   * @return the user-specified option to modify fields to drop hive delimiters
+   */
+  public boolean doHiveDropDelims() {
+    return hiveDropDelims;
+  }
+
+  public void setHiveDropDelims(boolean dropHiveDelims) {
+    this.hiveDropDelims = dropHiveDelims;
+  }
+
+  /**
+   * @return the user-specified option to specify the replacement string
+   *         for hive delimeters
+   */
+  public String getHiveDelimsReplacement() {
+    return hiveDelimsReplacement;
+  }
+
+  public void setHiveDelimsReplacement(String replacement) {
+    this.hiveDelimsReplacement = replacement;
+  }
+
+  /**
+   * @return the user-specified option to specify sqoop's behavior during
+   *         target table creation if the table exists.
+   */
+  public boolean doFailIfHiveTableExists() {
+    return failIfHiveTableExists;
+  }
+
+  public void setFailIfHiveTableExists(boolean fail) {
+    this.failIfHiveTableExists = fail;
+  }
+
+  /**
+   * @return location where .java files go; guaranteed to end with '/'.
+   */
+  public String getCodeOutputDir() {
+    if (codeOutputDir.endsWith(File.separator)) {
+      return codeOutputDir;
+    } else {
+      return codeOutputDir + File.separator;
+    }
+  }
+
+  public void setCodeOutputDir(String outputDir) {
+    this.codeOutputDir = outputDir;
+  }
+
+  /**
+   * @return location where .jar and .class files go; guaranteed to end with
+   * '/'.
+   */
+  public String getJarOutputDir() {
+    if (jarOutputDir.endsWith(File.separator)) {
+      return jarOutputDir;
+    } else {
+      return jarOutputDir + File.separator;
+    }
+  }
+
+  public void setJarOutputDir(String outDir) {
+    this.jarOutputDir = outDir;
+    this.jarDirIsAuto = false;
+  }
+
+  /**
+   * Return the value of $HADOOP_HOME.
+   * @return $HADOOP_HOME, or null if it's not set.
+   */
+  public String getHadoopHome() {
+    return hadoopHome;
+  }
+
+  public void setHadoopHome(String home) {
+    this.hadoopHome = home;
+  }
+
+  /**
+   * @return a sql command to execute and exit with.
+   */
+  public String getSqlQuery() {
+    return sqlQuery;
+  }
+
+  public void setSqlQuery(String sqlStatement) {
+    this.sqlQuery = sqlStatement;
+  }
+
+  public String getBoundaryQuery() {
+    return boundaryQuery;
+  }
+
+  public void setBoundaryQuery(String sqlStatement) {
+    boundaryQuery = sqlStatement;
+  }
+
+  /**
+   * @return The JDBC driver class name specified with --driver.
+   */
+  public String getDriverClassName() {
+    return driverClassName;
+  }
+
+  public void setDriverClassName(String driverClass) {
+    this.driverClassName = driverClass;
+  }
+
+  /**
+   * @return the base destination path for table uploads.
+   */
+  public String getWarehouseDir() {
+    return warehouseDir;
+  }
+
+  public void setWarehouseDir(String warehouse) {
+    this.warehouseDir = warehouse;
+  }
+
+  public String getTargetDir() {
+    return this.targetDir;
+  }
+
+  public void setTargetDir(String dir) {
+    this.targetDir = dir;
+  }
+
+  public void setAppendMode(boolean doAppend) {
+    this.append = doAppend;
+  }
+
+  public boolean isAppendMode() {
+    return this.append;
+  }
+
+  /**
+   * @return the destination file format
+   */
+  public FileLayout getFileLayout() {
+    return this.layout;
+  }
+
+  public void setFileLayout(FileLayout fileLayout) {
+    this.layout = fileLayout;
+  }
+
+  /**
+   * @return the field delimiter to use when parsing lines. Defaults to the
+   * field delim to use when printing lines.
+   */
+  public char getInputFieldDelim() {
+    char f = inputDelimiters.getFieldsTerminatedBy();
+    if (f == DelimiterSet.NULL_CHAR) {
+      return this.outputDelimiters.getFieldsTerminatedBy();
+    } else {
+      return f;
+    }
+  }
+
+  /**
+   * Set the field delimiter to use when parsing lines.
+   */
+  public void setInputFieldsTerminatedBy(char c) {
+    this.inputDelimiters.setFieldsTerminatedBy(c);
+  }
+
+  /**
+   * @return the record delimiter to use when parsing lines. Defaults to the
+   * record delim to use when printing lines.
+   */
+  public char getInputRecordDelim() {
+    char r = inputDelimiters.getLinesTerminatedBy();
+    if (r == DelimiterSet.NULL_CHAR) {
+      return this.outputDelimiters.getLinesTerminatedBy();
+    } else {
+      return r;
+    }
+  }
+
+  /**
+   * Set the record delimiter to use when parsing lines.
+   */
+  public void setInputLinesTerminatedBy(char c) {
+    this.inputDelimiters.setLinesTerminatedBy(c);
+  }
+
+  /**
+   * @return the character that may enclose fields when parsing lines.
+   * Defaults to the enclosing-char to use when printing lines.
+   */
+  public char getInputEnclosedBy() {
+    char c = inputDelimiters.getEnclosedBy();
+    if (c == DelimiterSet.NULL_CHAR) {
+      return this.outputDelimiters.getEnclosedBy();
+    } else {
+      return c;
+    }
+  }
+
+  /**
+   * Set the enclosed-by character to use when parsing lines.
+   */
+  public void setInputEnclosedBy(char c) {
+    this.inputDelimiters.setEnclosedBy(c);
+  }
+
+  /**
+   * @return the escape character to use when parsing lines. Defaults to the
+   * escape character used when printing lines.
+   */
+  public char getInputEscapedBy() {
+    char c = inputDelimiters.getEscapedBy();
+    if (c == DelimiterSet.NULL_CHAR) {
+      return this.outputDelimiters.getEscapedBy();
+    } else {
+      return c;
+    }
+  }
+
+  /**
+   * Set the escaped-by character to use when parsing lines.
+   */
+  public void setInputEscapedBy(char c) {
+    this.inputDelimiters.setEscapedBy(c);
+  }
+
+  /**
+   * @return true if fields must be enclosed by the --enclosed-by character
+   * when parsing.  Defaults to false. Set true when --input-enclosed-by is
+   * used.
+   */
+  public boolean isInputEncloseRequired() {
+    char c = this.inputDelimiters.getEnclosedBy();
+    if (c == DelimiterSet.NULL_CHAR) {
+      return this.outputDelimiters.isEncloseRequired();
+    } else {
+      return this.inputDelimiters.isEncloseRequired();
+    }
+  }
+
+  /**
+   * If true, then all input fields are expected to be enclosed by the
+   * enclosed-by character when parsing.
+   */
+  public void setInputEncloseRequired(boolean required) {
+    this.inputDelimiters.setEncloseRequired(required);
+  }
+
+  /**
+   * @return the character to print between fields when importing them to
+   * text.
+   */
+  public char getOutputFieldDelim() {
+    return this.outputDelimiters.getFieldsTerminatedBy();
+  }
+
+  /**
+   * Set the field delimiter to use when formatting lines.
+   */
+  public void setFieldsTerminatedBy(char c) {
+    this.outputDelimiters.setFieldsTerminatedBy(c);
+  }
+
+
+  /**
+   * @return the character to print between records when importing them to
+   * text.
+   */
+  public char getOutputRecordDelim() {
+    return this.outputDelimiters.getLinesTerminatedBy();
+  }
+
+  /**
+   * Set the record delimiter to use when formatting lines.
+   */
+  public void setLinesTerminatedBy(char c) {
+    this.outputDelimiters.setLinesTerminatedBy(c);
+  }
+
+  /**
+   * @return a character which may enclose the contents of fields when
+   * imported to text.
+   */
+  public char getOutputEnclosedBy() {
+    return this.outputDelimiters.getEnclosedBy();
+  }
+
+  /**
+   * Set the enclosed-by character to use when formatting lines.
+   */
+  public void setEnclosedBy(char c) {
+    this.outputDelimiters.setEnclosedBy(c);
+  }
+
+  /**
+   * @return a character which signifies an escape sequence when importing to
+   * text.
+   */
+  public char getOutputEscapedBy() {
+    return this.outputDelimiters.getEscapedBy();
+  }
+
+  /**
+   * Set the escaped-by character to use when formatting lines.
+   */
+  public void setEscapedBy(char c) {
+    this.outputDelimiters.setEscapedBy(c);
+  }
+
+  /**
+   * @return true if fields imported to text must be enclosed by the
+   * EnclosedBy char.  default is false; set to true if --enclosed-by is used
+   * instead of --optionally-enclosed-by.
+   */
+  public boolean isOutputEncloseRequired() {
+    return this.outputDelimiters.isEncloseRequired();
+  }
+
+  /**
+   * If true, then the enclosed-by character will be applied to all fields,
+   * even if internal characters do not need enclosed-by protection.
+   */
+  public void setOutputEncloseRequired(boolean required) {
+    this.outputDelimiters.setEncloseRequired(required);
+  }
+
+  /**
+   * @return the set of delimiters used for formatting output records.
+   */
+  public DelimiterSet getOutputDelimiters() {
+    return this.outputDelimiters.copy();
+  }
+
+  /**
+   * Set the complete set of delimiters to use for output formatting.
+   */
+  public void setOutputDelimiters(DelimiterSet delimiters) {
+    this.outputDelimiters = delimiters.copy();
+  }
+
+  /**
+   * @return the set of delimiters used for parsing the input.
+   * This may include values implicitly set by the output delimiters.
+   */
+  public DelimiterSet getInputDelimiters() {
+    return new DelimiterSet(
+        getInputFieldDelim(),
+        getInputRecordDelim(),
+        getInputEnclosedBy(),
+        getInputEscapedBy(),
+        isInputEncloseRequired());
+  }
+
+  /**
+   * @return true if the user wants imported results to be compressed.
+   */
+  public boolean shouldUseCompression() {
+    return this.useCompression || compressionCodec != null;
+  }
+
+  public void setUseCompression(boolean compress) {
+    this.useCompression = compress;
+  }
+
+  /**
+   * @return the name of the compression codec to use when importing.
+   * E.g. <code>org.apache.hadoop.io.compress.GzipCodec</code>.
+   */
+  public String getCompressionCodec() {
+    return compressionCodec;
+  }
+
+  public void setCompressionCodec(String codec) {
+    this.compressionCodec = codec;
+  }
+  /**
+   * @return the name of the destination table when importing to Hive.
+   */
+  public String getHiveTableName() {
+    if (null != this.hiveTableName) {
+      return this.hiveTableName;
+    } else {
+      return this.tableName;
+    }
+  }
+
+  public void setHiveTableName(String name) {
+    this.hiveTableName = name;
+  }
+
+  public String getHivePartitionKey() {
+    return hivePartitionKey;
+  }
+
+  public void setHivePartitionKey(String hpk) {
+    this.hivePartitionKey = hpk;
+  }
+
+  public String getHivePartitionValue() {
+    return hivePartitionValue;
+  }
+
+  public void setHivePartitionValue(String hpv) {
+    this.hivePartitionValue = hpv;
+  }
+
+  /**
+   * @return the file size to split by when using --direct mode.
+   */
+  public long getDirectSplitSize() {
+    return this.directSplitSize;
+  }
+
+  public void setDirectSplitSize(long splitSize) {
+    this.directSplitSize = splitSize;
+  }
+
+  /**
+   * @return the max size of a LOB before we spill to a separate file.
+   */
+  public long getInlineLobLimit() {
+    return this.maxInlineLobSize;
+  }
+
+  public void setInlineLobLimit(long limit) {
+    this.maxInlineLobSize = limit;
+  }
+
+  public Integer getFetchSize() {
+    return this.fetchSize;
+  }
+
+  public void setFetchSize(Integer size) {
+    this.fetchSize = size;
+  }
+
+  /**
+   * @return true if the delimiters have been explicitly set by the user.
+   */
+  public boolean explicitDelims() {
+    return areDelimsManuallySet;
+  }
+
+  /**
+   * Flag the delimiter settings as explicit user settings, or implicit.
+   */
+  public void setExplicitDelims(boolean explicit) {
+    this.areDelimsManuallySet = explicit;
+  }
+
+  public Configuration getConf() {
+    return conf;
+  }
+
+  public void setConf(Configuration config) {
+    this.conf = config;
+  }
+
+  /**
+   * @return command-line arguments after a '-'.
+   */
+  public String [] getExtraArgs() {
+    if (extraArgs == null) {
+      return null;
+    }
+
+    String [] out = new String[extraArgs.length];
+    for (int i = 0; i < extraArgs.length; i++) {
+      out[i] = extraArgs[i];
+    }
+    return out;
+  }
+
+  public void setExtraArgs(String [] args) {
+    if (null == args) {
+      this.extraArgs = null;
+      return;
+    }
+
+    this.extraArgs = new String[args.length];
+    for (int i = 0; i < args.length; i++) {
+      this.extraArgs[i] = args[i];
+    }
+  }
+
+  /**
+   * Set the name of the column to be used in the WHERE clause of an
+   * UPDATE-based export process.
+   */
+  public void setUpdateKeyCol(String colName) {
+    this.updateKeyCol = colName;
+  }
+
+  /**
+   * @return the column which is the key column in a table to be exported
+   * in update mode.
+   */
+  public String getUpdateKeyCol() {
+    return this.updateKeyCol;
+  }
+
+  /**
+   * Set "UpdateOnly" to silently ignore new rows during update export.
+   * Set "AllowInsert" to insert new rows during update export.
+   */
+  public void setUpdateMode(UpdateMode mode) {
+    this.updateMode = mode;
+  }
+
+  /**
+   * @return how to handle new rows found in update export.
+   */
+  public UpdateMode getUpdateMode() {
+    return updateMode;
+  }
+
+  /**
+   * @return an ordered list of column names. The code generator should
+   * generate the DBWritable.write(PreparedStatement) method with columns
+   * exporting in this order, if it is non-null.
+   */
+  public String [] getDbOutputColumns() {
+    if (null != dbOutColumns) {
+      return Arrays.copyOf(this.dbOutColumns, dbOutColumns.length);
+    } else {
+      return null;
+    }
+  }
+
+  /**
+   * Set the order in which columns should be serialized by the generated
+   * DBWritable.write(PreparedStatement) method. Setting this to null will use
+   * the "natural order" of the database table.
+   *
+   * TODO: Expose this setter via the command-line arguments for the codegen
+   * module. That would allow users to export to tables with columns in a
+   * different physical order than the file layout in HDFS.
+   */
+  public void setDbOutputColumns(String [] outCols) {
+    if (null == outCols) {
+      this.dbOutColumns = null;
+    } else {
+      this.dbOutColumns = Arrays.copyOf(outCols, outCols.length);
+    }
+  }
+
+  /**
+   * Set whether we should create missing HBase tables.
+   */
+  public void setCreateHBaseTable(boolean create) {
+    this.hbaseCreateTable = create;
+  }
+
+  /**
+   * Returns true if we should create HBase tables/column families
+   * that are missing.
+   */
+  public boolean getCreateHBaseTable() {
+    return this.hbaseCreateTable;
+  }
+
+  /**
+   * Sets the HBase target column family.
+   */
+  public void setHBaseColFamily(String colFamily) {
+    this.hbaseColFamily = colFamily;
+  }
+
+  /**
+   * Gets the HBase import target column family.
+   */
+  public String getHBaseColFamily() {
+    return this.hbaseColFamily;
+  }
+
+  /**
+   * Gets the column to use as the row id in an hbase import.
+   * If null, use the primary key column.
+   */
+  public String getHBaseRowKeyColumn() {
+    return this.hbaseRowKeyCol;
+  }
+
+  /**
+   * Sets the column to use as the row id in an hbase import.
+   */
+  public void setHBaseRowKeyColumn(String col) {
+    this.hbaseRowKeyCol = col;
+  }
+
+  /**
+   * Gets the target HBase table name, if any.
+   */
+  public String getHBaseTable() {
+    return this.hbaseTable;
+  }
+
+  /**
+   * Sets the target HBase table name for an import.
+   */
+  public void setHBaseTable(String table) {
+    this.hbaseTable = table;
+  }
+
+  /**
+   * Set the column of the import source table to check for incremental import
+   * state.
+   */
+  public void setIncrementalTestColumn(String colName) {
+    this.incrementalTestCol = colName;
+  }
+
+  /**
+   * Return the name of the column of the import source table
+   * to check for incremental import state.
+   */
+  public String getIncrementalTestColumn() {
+    return this.incrementalTestCol;
+  }
+
+  /**
+   * Set the incremental import mode to use.
+   */
+  public void setIncrementalMode(IncrementalMode mode) {
+    this.incrementalMode = mode;
+  }
+
+  /**
+   * Get the incremental import mode to use.
+   */
+  public IncrementalMode getIncrementalMode() {
+    return this.incrementalMode;
+  }
+
+  /**
+   * Set the last imported value of the incremental import test column.
+   */
+  public void setIncrementalLastValue(String lastVal) {
+    this.incrementalLastValue = lastVal;
+  }
+
+  /**
+   * Get the last imported value of the incremental import test column.
+   */
+  public String getIncrementalLastValue() {
+    return this.incrementalLastValue;
+  }
+
+  /**
+   * Set the name of the saved job this SqoopOptions belongs to.
+   */
+  public void setJobName(String job) {
+    this.jobName = job;
+  }
+
+  /**
+   * Get the name of the saved job this SqoopOptions belongs to.
+   */
+  public String getJobName() {
+    return this.jobName;
+  }
+
+  /**
+   * Set the JobStorage descriptor used to open the saved job
+   * this SqoopOptions belongs to.
+   */
+  public void setStorageDescriptor(Map<String, String> descriptor) {
+    this.jobStorageDescriptor = descriptor;
+  }
+
+  /**
+   * Get the JobStorage descriptor used to open the saved job
+   * this SqoopOptions belongs to.
+   */
+  public Map<String, String> getStorageDescriptor() {
+    return this.jobStorageDescriptor;
+  }
+
+  /**
+   * Return the parent instance this SqoopOptions is derived from.
+   */
+  public SqoopOptions getParent() {
+    return this.parent;
+  }
+
+  /**
+   * Set the parent instance this SqoopOptions is derived from.
+   */
+  public void setParent(SqoopOptions options) {
+    this.parent = options;
+  }
+
+  /**
+   * Set the path name used to do an incremental import of old data
+   * which will be combined with an "new" dataset.
+   */
+  public void setMergeOldPath(String path) {
+    this.mergeOldPath = path;
+  }
+
+  /**
+   * Return the path name used to do an incremental import of old data
+   * which will be combined with an "new" dataset.
+   */
+  public String getMergeOldPath() {
+    return this.mergeOldPath;
+  }
+
+  /**
+   * Set the path name used to do an incremental import of new data
+   * which will be combined with an "old" dataset.
+   */
+  public void setMergeNewPath(String path) {
+    this.mergeNewPath = path;
+  }
+
+  /**
+   * Return the path name used to do an incremental import of new data
+   * which will be combined with an "old" dataset.
+   */
+  public String getMergeNewPath() {
+    return this.mergeNewPath;
+  }
+
+  /**
+   * Set the name of the column used to merge an old and new dataset.
+   */
+  public void setMergeKeyCol(String col) {
+    this.mergeKeyCol = col;
+  }
+
+  /**
+   * Return the name of the column used to merge an old and new dataset.
+   */
+  public String getMergeKeyCol() {
+    return this.mergeKeyCol;
+  }
+
+  public void setConnManagerClassName(String connManagerClass) {
+    this.connManagerClassName = connManagerClass;
+  }
+
+  public String getConnManagerClassName() {
+    return connManagerClassName;
+  }
+
+  /** @return the SqoopTool that is operating this session. */
+  public SqoopTool getActiveSqoopTool() {
+    return activeSqoopTool;
+  }
+
+  public void setActiveSqoopTool(SqoopTool tool) {
+    activeSqoopTool = tool;
+  }
+
+  public void setNullStringValue(String nullString) {
+    this.nullStringValue = nullString;
+  }
+
+  public String getNullStringValue() {
+    return nullStringValue;
+  }
+
+  public void setInNullStringValue(String inNullString) {
+    this.inNullStringValue = inNullString;
+  }
+
+  public String getInNullStringValue() {
+    return inNullStringValue;
+  }
+
+  public void setNullNonStringValue(String nullNonString) {
+    this.nullNonStringValue = nullNonString;
+  }
+
+  public String getNullNonStringValue() {
+    return nullNonStringValue;
+  }
+
+  public void setInNullNonStringValue(String inNullNonString) {
+    this.inNullNonStringValue = inNullNonString;
+  }
+
+  public String getInNullNonStringValue() {
+    return inNullNonStringValue;
+  }
+
+  public void setConnectionParams(Properties params) {
+    connectionParams = new Properties();
+    connectionParams.putAll(params);
+  }
+
+  public Properties getConnectionParams() {
+    return connectionParams;
+  }
+}
+

Modified: incubator/sqoop/trunk/src/java/org/apache/sqoop/lib/ClobRef.java
URL: http://svn.apache.org/viewvc/incubator/sqoop/trunk/src/java/org/apache/sqoop/lib/ClobRef.java?rev=1196486&r1=1196485&r2=1196486&view=diff
==============================================================================
--- incubator/sqoop/trunk/src/java/org/apache/sqoop/lib/ClobRef.java (original)
+++ incubator/sqoop/trunk/src/java/org/apache/sqoop/lib/ClobRef.java Wed Nov  2 07:48:56 2011
@@ -1,6 +1,4 @@
 /**
- * Copyright 2011 The Apache Software Foundation
- *
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information

Modified: incubator/sqoop/trunk/src/java/org/apache/sqoop/lib/DelimiterSet.java
URL: http://svn.apache.org/viewvc/incubator/sqoop/trunk/src/java/org/apache/sqoop/lib/DelimiterSet.java?rev=1196486&r1=1196485&r2=1196486&view=diff
==============================================================================
--- incubator/sqoop/trunk/src/java/org/apache/sqoop/lib/DelimiterSet.java (original)
+++ incubator/sqoop/trunk/src/java/org/apache/sqoop/lib/DelimiterSet.java Wed Nov  2 07:48:56 2011
@@ -17,7 +17,6 @@
  */
 package org.apache.sqoop.lib;
 
-
 /**
  * Encapsulates a set of delimiters used to encode a record.
  */
@@ -63,6 +62,19 @@ public class DelimiterSet implements Clo
   }
 
   /**
+   * Identical to clone() but does not throw spurious exceptions.
+   * @return a new copy of this same set of delimiters.
+   */
+  public DelimiterSet copy() {
+    try {
+      return (DelimiterSet) clone();
+    } catch (CloneNotSupportedException cnse) {
+      // Should never happen for DelimiterSet.
+      return null;
+    }
+  }
+
+  /**
    * Sets the fields-terminated-by character.
    */
   public void setFieldsTerminatedBy(char f) {

Modified: incubator/sqoop/trunk/src/java/org/apache/sqoop/manager/PostgresqlManager.java
URL: http://svn.apache.org/viewvc/incubator/sqoop/trunk/src/java/org/apache/sqoop/manager/PostgresqlManager.java?rev=1196486&r1=1196485&r2=1196486&view=diff
==============================================================================
--- incubator/sqoop/trunk/src/java/org/apache/sqoop/manager/PostgresqlManager.java (original)
+++ incubator/sqoop/trunk/src/java/org/apache/sqoop/manager/PostgresqlManager.java Wed Nov  2 07:48:56 2011
@@ -150,5 +150,11 @@ public class PostgresqlManager
   private String escapeLiteral(String literal) {
     return literal.replace("'", "''");
   }
+
+  @Override
+  protected String getCurTimestampQuery() {
+    return "SELECT CURRENT_TIMESTAMP";
+  }
+
 }
 

Modified: incubator/sqoop/trunk/src/java/org/apache/sqoop/mapreduce/AvroExportMapper.java
URL: http://svn.apache.org/viewvc/incubator/sqoop/trunk/src/java/org/apache/sqoop/mapreduce/AvroExportMapper.java?rev=1196486&r1=1196485&r2=1196486&view=diff
==============================================================================
--- incubator/sqoop/trunk/src/java/org/apache/sqoop/mapreduce/AvroExportMapper.java (original)
+++ incubator/sqoop/trunk/src/java/org/apache/sqoop/mapreduce/AvroExportMapper.java Wed Nov  2 07:48:56 2011
@@ -58,7 +58,8 @@ public class AvroExportMapper
 
   private static final String BIG_DECIMAL_TYPE = "java.math.BigDecimal";
 
-  public static final String AVRO_COLUMN_TYPES_MAP = "sqoop.avro.column.types.map";
+  public static final String AVRO_COLUMN_TYPES_MAP =
+      "sqoop.avro.column.types.map";
 
   private MapWritable columnTypes;
   private SqoopRecord recordImpl;

Modified: incubator/sqoop/trunk/src/java/org/apache/sqoop/mapreduce/MySQLDumpInputFormat.java
URL: http://svn.apache.org/viewvc/incubator/sqoop/trunk/src/java/org/apache/sqoop/mapreduce/MySQLDumpInputFormat.java?rev=1196486&r1=1196485&r2=1196486&view=diff
==============================================================================
--- incubator/sqoop/trunk/src/java/org/apache/sqoop/mapreduce/MySQLDumpInputFormat.java (original)
+++ incubator/sqoop/trunk/src/java/org/apache/sqoop/mapreduce/MySQLDumpInputFormat.java Wed Nov  2 07:48:56 2011
@@ -24,7 +24,6 @@ import org.apache.hadoop.io.NullWritable
 import org.apache.hadoop.mapreduce.InputSplit;
 import org.apache.hadoop.mapreduce.RecordReader;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
-import com.cloudera.sqoop.mapreduce.DataDrivenImportJob;
 import com.cloudera.sqoop.mapreduce.db.DataDrivenDBInputFormat;
 
 /**

Modified: incubator/sqoop/trunk/src/java/org/apache/sqoop/mapreduce/RawKeyTextOutputFormat.java
URL: http://svn.apache.org/viewvc/incubator/sqoop/trunk/src/java/org/apache/sqoop/mapreduce/RawKeyTextOutputFormat.java?rev=1196486&r1=1196485&r2=1196486&view=diff
==============================================================================
--- incubator/sqoop/trunk/src/java/org/apache/sqoop/mapreduce/RawKeyTextOutputFormat.java (original)
+++ incubator/sqoop/trunk/src/java/org/apache/sqoop/mapreduce/RawKeyTextOutputFormat.java Wed Nov  2 07:48:56 2011
@@ -32,11 +32,15 @@ import org.apache.hadoop.mapreduce.Recor
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
 import org.apache.hadoop.util.*;
 
-/** An {@link OutputFormat} that writes plain text files.
+/**
+ * An {@link OutputFormat} that writes plain text files.
  * Only writes the key. Does not write any delimiter/newline after the key.
  */
 public class RawKeyTextOutputFormat<K, V> extends FileOutputFormat<K, V> {
 
+  /**
+   * RecordWriter to write to plain text files.
+   */
   public static class RawKeyRecordWriter<K, V> extends RecordWriter<K, V> {
 
     private static final String UTF8 = "UTF-8";

Modified: incubator/sqoop/trunk/src/java/org/apache/sqoop/mapreduce/db/DBConfiguration.java
URL: http://svn.apache.org/viewvc/incubator/sqoop/trunk/src/java/org/apache/sqoop/mapreduce/db/DBConfiguration.java?rev=1196486&r1=1196485&r2=1196486&view=diff
==============================================================================
--- incubator/sqoop/trunk/src/java/org/apache/sqoop/mapreduce/db/DBConfiguration.java (original)
+++ incubator/sqoop/trunk/src/java/org/apache/sqoop/mapreduce/db/DBConfiguration.java Wed Nov  2 07:48:56 2011
@@ -25,7 +25,6 @@ import org.apache.hadoop.conf.Configurat
 import org.apache.hadoop.mapreduce.lib.db.DBWritable;
 
 import com.cloudera.sqoop.mapreduce.db.DBInputFormat.NullDBWritable;
-import com.cloudera.sqoop.mapreduce.db.DBOutputFormat;
 
 /**
  * A container for configuration property names for jobs with DB input/output.



Mime
View raw message