http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/java/org/apache/sqoop/tool/ImportTool.java
----------------------------------------------------------------------
diff --git a/src/java/org/apache/sqoop/tool/ImportTool.java 
b/src/java/org/apache/sqoop/tool/ImportTool.java
index 807ec8c..e992005 100644
--- a/src/java/org/apache/sqoop/tool/ImportTool.java
+++ b/src/java/org/apache/sqoop/tool/ImportTool.java
@@ -38,28 +38,28 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.util.StringUtils;
 import org.apache.sqoop.avro.AvroSchemaMismatchException;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.SqoopOptions.InvalidOptionsException;
-import com.cloudera.sqoop.cli.RelatedOptions;
-import com.cloudera.sqoop.cli.ToolOptions;
-import com.cloudera.sqoop.hive.HiveImport;
-import com.cloudera.sqoop.manager.ImportJobContext;
-import com.cloudera.sqoop.mapreduce.MergeJob;
-import com.cloudera.sqoop.metastore.JobData;
-import com.cloudera.sqoop.metastore.JobStorage;
-import com.cloudera.sqoop.metastore.JobStorageFactory;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.SqoopOptions.InvalidOptionsException;
+import org.apache.sqoop.cli.RelatedOptions;
+import org.apache.sqoop.cli.ToolOptions;
+import org.apache.sqoop.hive.HiveImport;
+import org.apache.sqoop.manager.ImportJobContext;
+import org.apache.sqoop.mapreduce.MergeJob;
+import org.apache.sqoop.metastore.JobData;
+import org.apache.sqoop.metastore.JobStorage;
+import org.apache.sqoop.metastore.JobStorageFactory;
 import org.apache.sqoop.orm.ClassWriter;
-import com.cloudera.sqoop.orm.TableClassName;
-import com.cloudera.sqoop.util.AppendUtils;
-import com.cloudera.sqoop.util.ClassLoaderStack;
-import com.cloudera.sqoop.util.ImportException;
+import org.apache.sqoop.orm.TableClassName;
+import org.apache.sqoop.util.AppendUtils;
+import org.apache.sqoop.util.ClassLoaderStack;
+import org.apache.sqoop.util.ImportException;
 
 import static org.apache.sqoop.manager.SupportedManagers.MYSQL;
 
 /**
  * Tool that performs database imports to HDFS.
  */
-public class ImportTool extends com.cloudera.sqoop.tool.BaseSqoopTool {
+public class ImportTool extends BaseSqoopTool {
 
   public static final Log LOG = LogFactory.getLog(ImportTool.class.getName());
 

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/java/org/apache/sqoop/tool/JobTool.java
----------------------------------------------------------------------
diff --git a/src/java/org/apache/sqoop/tool/JobTool.java 
b/src/java/org/apache/sqoop/tool/JobTool.java
index 72234ba..cf5f320 100644
--- a/src/java/org/apache/sqoop/tool/JobTool.java
+++ b/src/java/org/apache/sqoop/tool/JobTool.java
@@ -18,9 +18,9 @@
 
 package org.apache.sqoop.tool;
 
-import static com.cloudera.sqoop.metastore.GenericJobStorage.META_CONNECT_KEY;
-import static com.cloudera.sqoop.metastore.GenericJobStorage.META_PASSWORD_KEY;
-import static com.cloudera.sqoop.metastore.GenericJobStorage.META_USERNAME_KEY;
+import static org.apache.sqoop.metastore.GenericJobStorage.META_CONNECT_KEY;
+import static org.apache.sqoop.metastore.GenericJobStorage.META_PASSWORD_KEY;
+import static org.apache.sqoop.metastore.GenericJobStorage.META_USERNAME_KEY;
 
 import java.io.IOException;
 
@@ -39,19 +39,21 @@ import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.util.ToolRunner;
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.SqoopOptions.InvalidOptionsException;
-import com.cloudera.sqoop.cli.ToolOptions;
-import com.cloudera.sqoop.metastore.JobData;
-import com.cloudera.sqoop.metastore.JobStorage;
-import com.cloudera.sqoop.metastore.JobStorageFactory;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.SqoopOptions.InvalidOptionsException;
+import org.apache.sqoop.cli.ToolOptions;
+import org.apache.sqoop.metastore.GenericJobStorage;
+import org.apache.sqoop.metastore.JobData;
+import org.apache.sqoop.metastore.JobStorage;
+import org.apache.sqoop.metastore.JobStorageFactory;
+import org.apache.sqoop.manager.JdbcDrivers;
 import org.apache.sqoop.metastore.PasswordRedactor;
 import org.apache.sqoop.util.LoggingUtils;
 
 /**
  * Tool that creates and executes saved jobs.
  */
-public class JobTool extends com.cloudera.sqoop.tool.BaseSqoopTool {
+public class JobTool extends BaseSqoopTool {
 
   public static final Log LOG = LogFactory.getLog(
       JobTool.class.getName());
@@ -180,8 +182,7 @@ public class JobTool extends 
com.cloudera.sqoop.tool.BaseSqoopTool {
 
     // Now that the tool is fully configured, materialize the job.
     //TODO(jarcec): Remove the cast when JobData will be moved to apache 
package
-    JobData jobData = new JobData(jobOptions,
-            (com.cloudera.sqoop.tool.SqoopTool)jobTool);
+    JobData jobData = new JobData(jobOptions, jobTool);
     this.storage.create(jobName, jobData);
     return 0; // Success.
   }

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/java/org/apache/sqoop/tool/ListDatabasesTool.java
----------------------------------------------------------------------
diff --git a/src/java/org/apache/sqoop/tool/ListDatabasesTool.java 
b/src/java/org/apache/sqoop/tool/ListDatabasesTool.java
index 2dfbfb5..86c0444 100644
--- a/src/java/org/apache/sqoop/tool/ListDatabasesTool.java
+++ b/src/java/org/apache/sqoop/tool/ListDatabasesTool.java
@@ -22,14 +22,14 @@ import org.apache.commons.cli.CommandLine;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.SqoopOptions.InvalidOptionsException;
-import com.cloudera.sqoop.cli.ToolOptions;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.SqoopOptions.InvalidOptionsException;
+import org.apache.sqoop.cli.ToolOptions;
 
 /**
  * Tool that lists available databases on a server.
  */
-public class ListDatabasesTool extends com.cloudera.sqoop.tool.BaseSqoopTool {
+public class ListDatabasesTool extends BaseSqoopTool {
 
   public static final Log LOG = LogFactory.getLog(
       ListDatabasesTool.class.getName());

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/java/org/apache/sqoop/tool/ListTablesTool.java
----------------------------------------------------------------------
diff --git a/src/java/org/apache/sqoop/tool/ListTablesTool.java 
b/src/java/org/apache/sqoop/tool/ListTablesTool.java
index 5d47bc9..0441352 100644
--- a/src/java/org/apache/sqoop/tool/ListTablesTool.java
+++ b/src/java/org/apache/sqoop/tool/ListTablesTool.java
@@ -22,14 +22,14 @@ import org.apache.commons.cli.CommandLine;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.SqoopOptions.InvalidOptionsException;
-import com.cloudera.sqoop.cli.ToolOptions;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.SqoopOptions.InvalidOptionsException;
+import org.apache.sqoop.cli.ToolOptions;
 
 /**
  * Tool that lists available tables in a database.
  */
-public class ListTablesTool extends com.cloudera.sqoop.tool.BaseSqoopTool {
+public class ListTablesTool extends BaseSqoopTool {
 
   public static final Log LOG = LogFactory.getLog(
       ListTablesTool.class.getName());

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/java/org/apache/sqoop/tool/MainframeImportTool.java
----------------------------------------------------------------------
diff --git a/src/java/org/apache/sqoop/tool/MainframeImportTool.java 
b/src/java/org/apache/sqoop/tool/MainframeImportTool.java
index 0cb91db..8883301 100644
--- a/src/java/org/apache/sqoop/tool/MainframeImportTool.java
+++ b/src/java/org/apache/sqoop/tool/MainframeImportTool.java
@@ -25,10 +25,10 @@ import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.util.ToolRunner;
 import org.apache.sqoop.mapreduce.mainframe.MainframeConfiguration;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.SqoopOptions.InvalidOptionsException;
-import com.cloudera.sqoop.cli.RelatedOptions;
-import com.cloudera.sqoop.cli.ToolOptions;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.SqoopOptions.InvalidOptionsException;
+import org.apache.sqoop.cli.RelatedOptions;
+import org.apache.sqoop.cli.ToolOptions;
 
 /**
  * Tool that performs mainframe dataset imports to HDFS.

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/java/org/apache/sqoop/tool/MergeTool.java
----------------------------------------------------------------------
diff --git a/src/java/org/apache/sqoop/tool/MergeTool.java 
b/src/java/org/apache/sqoop/tool/MergeTool.java
index a710740..311fee8 100644
--- a/src/java/org/apache/sqoop/tool/MergeTool.java
+++ b/src/java/org/apache/sqoop/tool/MergeTool.java
@@ -25,17 +25,17 @@ import org.apache.commons.cli.OptionBuilder;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.util.StringUtils;
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.SqoopOptions.InvalidOptionsException;
-import com.cloudera.sqoop.cli.RelatedOptions;
-import com.cloudera.sqoop.cli.ToolOptions;
-import com.cloudera.sqoop.mapreduce.MergeJob;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.SqoopOptions.InvalidOptionsException;
+import org.apache.sqoop.cli.RelatedOptions;
+import org.apache.sqoop.cli.ToolOptions;
+import org.apache.sqoop.mapreduce.MergeJob;
 import org.apache.sqoop.util.LoggingUtils;
 
 /**
  * Tool that merges a more recent dataset on top of an older one.
  */
-public class MergeTool extends com.cloudera.sqoop.tool.BaseSqoopTool {
+public class MergeTool extends BaseSqoopTool {
 
   public static final Log LOG = LogFactory.getLog(MergeTool.class.getName());
 

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/java/org/apache/sqoop/tool/MetastoreTool.java
----------------------------------------------------------------------
diff --git a/src/java/org/apache/sqoop/tool/MetastoreTool.java 
b/src/java/org/apache/sqoop/tool/MetastoreTool.java
index 53e56f0..596e911 100644
--- a/src/java/org/apache/sqoop/tool/MetastoreTool.java
+++ b/src/java/org/apache/sqoop/tool/MetastoreTool.java
@@ -23,16 +23,16 @@ import org.apache.commons.cli.OptionBuilder;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.SqoopOptions.InvalidOptionsException;
-import com.cloudera.sqoop.cli.RelatedOptions;
-import com.cloudera.sqoop.cli.ToolOptions;
-import com.cloudera.sqoop.metastore.hsqldb.HsqldbMetaStore;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.SqoopOptions.InvalidOptionsException;
+import org.apache.sqoop.cli.RelatedOptions;
+import org.apache.sqoop.cli.ToolOptions;
+import org.apache.sqoop.metastore.hsqldb.HsqldbMetaStore;
 
 /**
  * Tool that runs a standalone Sqoop metastore.
  */
-public class MetastoreTool extends com.cloudera.sqoop.tool.BaseSqoopTool {
+public class MetastoreTool extends BaseSqoopTool {
 
   public static final Log LOG = LogFactory.getLog(
       MetastoreTool.class.getName());

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/java/org/apache/sqoop/tool/SqoopTool.java
----------------------------------------------------------------------
diff --git a/src/java/org/apache/sqoop/tool/SqoopTool.java 
b/src/java/org/apache/sqoop/tool/SqoopTool.java
index 5b8453d..732cbe9 100644
--- a/src/java/org/apache/sqoop/tool/SqoopTool.java
+++ b/src/java/org/apache/sqoop/tool/SqoopTool.java
@@ -42,11 +42,10 @@ import org.apache.hadoop.util.ToolRunner;
 import org.apache.sqoop.util.ClassLoaderStack;
 import org.apache.sqoop.config.ConfigurationHelper;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.SqoopOptions.InvalidOptionsException;
-import com.cloudera.sqoop.cli.SqoopParser;
-import com.cloudera.sqoop.cli.ToolOptions;
-import com.cloudera.sqoop.tool.ToolDesc;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.SqoopOptions.InvalidOptionsException;
+import org.apache.sqoop.cli.SqoopParser;
+import org.apache.sqoop.cli.ToolOptions;
 
 /**
  * Base class for Sqoop subprograms (e.g., SqoopImport, SqoopExport, etc.)
@@ -412,7 +411,7 @@ public abstract class SqoopTool {
     // This tool is the "active" tool; bind it in the SqoopOptions.
     //TODO(jarcec): Remove the cast when SqoopOptions will be moved
     //              to apache package
-    out.setActiveSqoopTool((com.cloudera.sqoop.tool.SqoopTool)this);
+    out.setActiveSqoopTool(this);
 
     String [] toolArgs = args; // args after generic parser is done.
     if (useGenericOptions) {

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/java/org/apache/sqoop/tool/ToolPlugin.java
----------------------------------------------------------------------
diff --git a/src/java/org/apache/sqoop/tool/ToolPlugin.java 
b/src/java/org/apache/sqoop/tool/ToolPlugin.java
index 5fa5e6f..d5ec199 100644
--- a/src/java/org/apache/sqoop/tool/ToolPlugin.java
+++ b/src/java/org/apache/sqoop/tool/ToolPlugin.java
@@ -20,8 +20,6 @@ package org.apache.sqoop.tool;
 
 import java.util.List;
 
-import com.cloudera.sqoop.tool.ToolDesc;
-
 /**
  * Abstract base class that defines the ToolPlugin API; additional SqoopTool
  * implementations may be registered with the system via ToolPlugin classes.

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/java/org/apache/sqoop/tool/VersionTool.java
----------------------------------------------------------------------
diff --git a/src/java/org/apache/sqoop/tool/VersionTool.java 
b/src/java/org/apache/sqoop/tool/VersionTool.java
index bcd824d..aafddb2 100644
--- a/src/java/org/apache/sqoop/tool/VersionTool.java
+++ b/src/java/org/apache/sqoop/tool/VersionTool.java
@@ -18,13 +18,13 @@
 
 package org.apache.sqoop.tool;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.cli.ToolOptions;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.cli.ToolOptions;
 
 /**
  * Tool that prints Sqoop's version.
  */
-public class VersionTool extends com.cloudera.sqoop.tool.BaseSqoopTool {
+public class VersionTool extends BaseSqoopTool {
 
   public VersionTool() {
     super("version");

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/java/org/apache/sqoop/util/AppendUtils.java
----------------------------------------------------------------------
diff --git a/src/java/org/apache/sqoop/util/AppendUtils.java 
b/src/java/org/apache/sqoop/util/AppendUtils.java
index a3082c4..fa85280 100644
--- a/src/java/org/apache/sqoop/util/AppendUtils.java
+++ b/src/java/org/apache/sqoop/util/AppendUtils.java
@@ -27,8 +27,8 @@ import java.util.regex.Pattern;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import com.cloudera.sqoop.manager.ImportJobContext;
-import com.cloudera.sqoop.SqoopOptions;
+import org.apache.sqoop.manager.ImportJobContext;
+import org.apache.sqoop.SqoopOptions;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/java/org/apache/sqoop/util/CredentialsUtil.java
----------------------------------------------------------------------
diff --git a/src/java/org/apache/sqoop/util/CredentialsUtil.java 
b/src/java/org/apache/sqoop/util/CredentialsUtil.java
index c627b32..fee0bdc 100644
--- a/src/java/org/apache/sqoop/util/CredentialsUtil.java
+++ b/src/java/org/apache/sqoop/util/CredentialsUtil.java
@@ -18,7 +18,7 @@
 
 package org.apache.sqoop.util;
 
-import com.cloudera.sqoop.SqoopOptions;
+import org.apache.sqoop.SqoopOptions;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/java/org/apache/sqoop/util/DirectImportUtils.java
----------------------------------------------------------------------
diff --git a/src/java/org/apache/sqoop/util/DirectImportUtils.java 
b/src/java/org/apache/sqoop/util/DirectImportUtils.java
index d801c8f..a0a5efc 100644
--- a/src/java/org/apache/sqoop/util/DirectImportUtils.java
+++ b/src/java/org/apache/sqoop/util/DirectImportUtils.java
@@ -31,13 +31,13 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.compress.CompressionCodec;
 import org.apache.hadoop.io.compress.GzipCodec;
 import org.apache.hadoop.conf.Configuration;
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.io.CodecMap;
-import com.cloudera.sqoop.io.SplittingOutputStream;
-import com.cloudera.sqoop.io.SplittableBufferedWriter;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.io.CodecMap;
+import org.apache.sqoop.io.SplittingOutputStream;
+import org.apache.sqoop.io.SplittableBufferedWriter;
 
 import org.apache.hadoop.util.Shell;
-import com.cloudera.sqoop.manager.ImportJobContext;
+import org.apache.sqoop.manager.ImportJobContext;
 
 /**
  * Utility methods that are common to various the direct import managers.

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/java/org/apache/sqoop/util/ErrorableAsyncSink.java
----------------------------------------------------------------------
diff --git a/src/java/org/apache/sqoop/util/ErrorableAsyncSink.java 
b/src/java/org/apache/sqoop/util/ErrorableAsyncSink.java
index aaea322..47d2857 100644
--- a/src/java/org/apache/sqoop/util/ErrorableAsyncSink.java
+++ b/src/java/org/apache/sqoop/util/ErrorableAsyncSink.java
@@ -18,8 +18,6 @@
 
 package org.apache.sqoop.util;
 
-import com.cloudera.sqoop.util.AsyncSink;
-
 /**
  * Partial implementation of AsyncSink that relies on ErrorableThread to
  * provide a status bit for the join() method.

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/java/org/apache/sqoop/util/Jars.java
----------------------------------------------------------------------
diff --git a/src/java/org/apache/sqoop/util/Jars.java 
b/src/java/org/apache/sqoop/util/Jars.java
index 476d59a..3809ada 100644
--- a/src/java/org/apache/sqoop/util/Jars.java
+++ b/src/java/org/apache/sqoop/util/Jars.java
@@ -26,7 +26,7 @@ import java.util.Enumeration;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 
-import com.cloudera.sqoop.manager.ConnManager;
+import org.apache.sqoop.manager.ConnManager;
 
 /**
  * Utility class; returns the locations of various jars.

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/java/org/apache/sqoop/util/LoggingAsyncSink.java
----------------------------------------------------------------------
diff --git a/src/java/org/apache/sqoop/util/LoggingAsyncSink.java 
b/src/java/org/apache/sqoop/util/LoggingAsyncSink.java
index 5f20539..58a3c55 100644
--- a/src/java/org/apache/sqoop/util/LoggingAsyncSink.java
+++ b/src/java/org/apache/sqoop/util/LoggingAsyncSink.java
@@ -26,8 +26,6 @@ import java.io.IOException;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 
-import com.cloudera.sqoop.util.AsyncSink;
-
 /**
  * An AsyncSink that takes the contents of a stream and writes
  * it to log4j.

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/java/org/apache/sqoop/util/LoggingUtils.java
----------------------------------------------------------------------
diff --git a/src/java/org/apache/sqoop/util/LoggingUtils.java 
b/src/java/org/apache/sqoop/util/LoggingUtils.java
index 06b012d..c478245 100644
--- a/src/java/org/apache/sqoop/util/LoggingUtils.java
+++ b/src/java/org/apache/sqoop/util/LoggingUtils.java
@@ -52,7 +52,6 @@ public final class LoggingUtils {
 
   public static void setDebugLevel() {
     Logger.getLogger("org.apache.sqoop").setLevel(Level.DEBUG);
-    Logger.getLogger("com.cloudera.apache").setLevel(Level.DEBUG);
   }
 }
 

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/java/org/apache/sqoop/util/NullAsyncSink.java
----------------------------------------------------------------------
diff --git a/src/java/org/apache/sqoop/util/NullAsyncSink.java 
b/src/java/org/apache/sqoop/util/NullAsyncSink.java
index a42e4e9..ffe4560 100644
--- a/src/java/org/apache/sqoop/util/NullAsyncSink.java
+++ b/src/java/org/apache/sqoop/util/NullAsyncSink.java
@@ -26,8 +26,6 @@ import java.io.IOException;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 
-import com.cloudera.sqoop.util.AsyncSink;
-
 /**
  * An AsyncSink that takes the contents of a stream and ignores it.
  */

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/java/org/apache/sqoop/util/OptionsFileUtil.java
----------------------------------------------------------------------
diff --git a/src/java/org/apache/sqoop/util/OptionsFileUtil.java 
b/src/java/org/apache/sqoop/util/OptionsFileUtil.java
index 993ac1b..b487941 100644
--- a/src/java/org/apache/sqoop/util/OptionsFileUtil.java
+++ b/src/java/org/apache/sqoop/util/OptionsFileUtil.java
@@ -30,7 +30,7 @@ import org.apache.commons.lang.StringUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 
-import com.cloudera.sqoop.Sqoop;
+import org.apache.sqoop.Sqoop;
 
 /**
  * Provides utility functions to read in options file. An options file is a

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/java/org/apache/sqoop/util/SubprocessSecurityManager.java
----------------------------------------------------------------------
diff --git a/src/java/org/apache/sqoop/util/SubprocessSecurityManager.java 
b/src/java/org/apache/sqoop/util/SubprocessSecurityManager.java
index 7a861a0..ea39c6c 100644
--- a/src/java/org/apache/sqoop/util/SubprocessSecurityManager.java
+++ b/src/java/org/apache/sqoop/util/SubprocessSecurityManager.java
@@ -80,7 +80,7 @@ public class SubprocessSecurityManager extends 
SecurityManager {
    */
   public void checkExit(int status) {
     LOG.debug("Rejecting System.exit call with status=" + status);
-    throw new com.cloudera.sqoop.util.ExitSecurityException(status);
+    throw new org.apache.sqoop.util.ExitSecurityException(status);
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/java/org/apache/sqoop/util/TaskId.java
----------------------------------------------------------------------
diff --git a/src/java/org/apache/sqoop/util/TaskId.java 
b/src/java/org/apache/sqoop/util/TaskId.java
index c543754..4caca73 100644
--- a/src/java/org/apache/sqoop/util/TaskId.java
+++ b/src/java/org/apache/sqoop/util/TaskId.java
@@ -23,7 +23,7 @@ import java.io.IOException;
 
 import org.apache.hadoop.conf.Configuration;
 
-import com.cloudera.sqoop.config.ConfigurationConstants;
+import org.apache.sqoop.config.ConfigurationConstants;
 
 /**
  * Utility class; returns task attempt Id of the current job

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/perftest/ExportStressTest.java
----------------------------------------------------------------------
diff --git a/src/perftest/ExportStressTest.java 
b/src/perftest/ExportStressTest.java
index b5710e0..8c6f0f3 100644
--- a/src/perftest/ExportStressTest.java
+++ b/src/perftest/ExportStressTest.java
@@ -23,10 +23,10 @@ import org.apache.hadoop.fs.*;
 import org.apache.hadoop.conf.*;
 import org.apache.hadoop.util.*;
 
-import com.cloudera.sqoop.Sqoop;
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.tool.ExportTool;
-import com.cloudera.sqoop.tool.SqoopTool;
+import org.apache.sqoop.Sqoop;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.tool.ExportTool;
+import org.apache.sqoop.tool.SqoopTool;
 
 /**
  * Stress test export procedure by running a large-scale export to MySQL.

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/perftest/ExtConnFactoryTest.java
----------------------------------------------------------------------
diff --git a/src/perftest/ExtConnFactoryTest.java 
b/src/perftest/ExtConnFactoryTest.java
index 6d2dec5..c614011 100644
--- a/src/perftest/ExtConnFactoryTest.java
+++ b/src/perftest/ExtConnFactoryTest.java
@@ -20,13 +20,13 @@ import java.sql.Connection;
 import java.sql.ResultSet;
 import java.sql.SQLException;
 
-import com.cloudera.sqoop.Sqoop;
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.manager.ExportJobContext;
-import com.cloudera.sqoop.manager.ImportJobContext;
-import com.cloudera.sqoop.manager.SqlManager;
-import com.cloudera.sqoop.util.ExportException;
-import com.cloudera.sqoop.util.ImportException;
+import org.apache.sqoop.Sqoop;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.manager.ExportJobContext;
+import org.apache.sqoop.manager.ImportJobContext;
+import org.apache.sqoop.manager.SqlManager;
+import org.apache.sqoop.util.ExportException;
+import org.apache.sqoop.util.ImportException;
 
 /**
  * Test external connection factory classes on the classpath.

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/perftest/ExtFactory.java
----------------------------------------------------------------------
diff --git a/src/perftest/ExtFactory.java b/src/perftest/ExtFactory.java
index ecde08c..3e3aec8 100644
--- a/src/perftest/ExtFactory.java
+++ b/src/perftest/ExtFactory.java
@@ -19,9 +19,9 @@
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.manager.ConnManager;
-import com.cloudera.sqoop.manager.ManagerFactory;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.manager.ConnManager;
+import org.apache.sqoop.manager.ManagerFactory;
 
 /**
  * An external ConnFactory used by ExtConnFactoryTest.

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/perftest/LobFilePerfTest.java
----------------------------------------------------------------------
diff --git a/src/perftest/LobFilePerfTest.java 
b/src/perftest/LobFilePerfTest.java
index a16bb5e..f822f16 100644
--- a/src/perftest/LobFilePerfTest.java
+++ b/src/perftest/LobFilePerfTest.java
@@ -19,7 +19,7 @@
 import java.io.*;
 import org.apache.hadoop.fs.*;
 import org.apache.hadoop.conf.*;
-import com.cloudera.sqoop.io.*;
+import org.apache.sqoop.io.*;
 
 /**
  * A simple benchmark to performance test LobFile reader/writer speed.

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/perftest/LobFileStressTest.java
----------------------------------------------------------------------
diff --git a/src/perftest/LobFileStressTest.java 
b/src/perftest/LobFileStressTest.java
index dafebb4..eebc0c1 100644
--- a/src/perftest/LobFileStressTest.java
+++ b/src/perftest/LobFileStressTest.java
@@ -20,7 +20,7 @@ import java.io.*;
 import java.util.*;
 import org.apache.hadoop.fs.*;
 import org.apache.hadoop.conf.*;
-import com.cloudera.sqoop.io.*;
+import org.apache.sqoop.io.*;
 
 /**
  * Stress test LobFiles by writing a bunch of different files and reading

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/scripts/write-version-info.cmd
----------------------------------------------------------------------
diff --git a/src/scripts/write-version-info.cmd 
b/src/scripts/write-version-info.cmd
index 247f5f3..8b70a68 100644
--- a/src/scripts/write-version-info.cmd
+++ b/src/scripts/write-version-info.cmd
@@ -29,9 +29,6 @@ set buildroot=%1
 set version=%2
 set specifiedgithash=%3
 
-set outputdir=%buildroot%\src\com\cloudera\sqoop
-set outputfile=%outputdir%\SqoopVersion.java
-
 set newoutputdir=%buildroot%\src\org\apache\sqoop
 set newoutputfile=%newoutputdir%\SqoopVersion.java
 
@@ -45,31 +42,6 @@ if "%signature%"=="" (
 set host=%COMPUTERNAME%
 set compiledate=%date%-%time%
 
-mkdir %outputdir%
-
-(
-  echo.// generated by src/scripts/write-version-info.cmd
-  echo.package com.cloudera.sqoop;
-  echo.
-  echo./**
-  echo. * @deprecated use org.apache.sqoop.SqoopVersion instead
-  echo. * @see org.apache.sqoop.SqoopVersion
-  echo. */
-  echo.public final class SqoopVersion extends org.apache.sqoop.SqoopVersion {
-  echo.  public SqoopVersion^(^) {
-  echo.    super^(^);
-  echo.  }
-  echo.  public static final String VERSION =
-  echo.    org.apache.sqoop.SqoopVersion.VERSION;
-  echo.  public static final String GIT_HASH =
-  echo.    org.apache.sqoop.SqoopVersion.GIT_HASH;
-  echo.  public static final String COMPILE_USER =
-  echo.    org.apache.sqoop.SqoopVersion.COMPILE_USER;
-  echo.  public static final String COMPILE_DATE =
-  echo.    org.apache.sqoop.SqoopVersion.COMPILE_DATE;
-  echo.}
-) > %outputfile%
-
 mkdir %newoutputdir%
 
 (

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/scripts/write-version-info.sh
----------------------------------------------------------------------
diff --git a/src/scripts/write-version-info.sh 
b/src/scripts/write-version-info.sh
index 70ce3c7..a88ebf8 100755
--- a/src/scripts/write-version-info.sh
+++ b/src/scripts/write-version-info.sh
@@ -30,9 +30,6 @@ buildroot=$1
 version=$2
 specifiedgithash=$3
 
-outputdir="${buildroot}/src/com/cloudera/sqoop"
-outputfile="${outputdir}/SqoopVersion.java"
-
 newoutputdir="${buildroot}/src/org/apache/sqoop"
 newoutputfile="${newoutputdir}/SqoopVersion.java"
 
@@ -44,31 +41,6 @@ fi
 host=`hostname`
 compiledate=`date`
 
-mkdir -p "${outputdir}"
-cat > "${outputfile}" <<EOF
-// generated by src/scripts/write-version-info.sh
-package com.cloudera.sqoop;
-
-/**
- * @deprecated use org.apache.sqoop.SqoopVersion instead
- * @see org.apache.sqoop.SqoopVersion
- */
-public final class SqoopVersion extends org.apache.sqoop.SqoopVersion {
-  public SqoopVersion() {
-    super();
-  }
-  public static final String VERSION =
-    org.apache.sqoop.SqoopVersion.VERSION;
-  public static final String GIT_HASH =
-    org.apache.sqoop.SqoopVersion.GIT_HASH;
-  public static final String COMPILE_USER =
-    org.apache.sqoop.SqoopVersion.COMPILE_USER;
-  public static final String COMPILE_DATE =
-    org.apache.sqoop.SqoopVersion.COMPILE_DATE;
-}
-EOF
-
-
 mkdir -p "${newoutputdir}"
 cat > "${newoutputfile}" <<EOF
 // generated by src/scripts/write-version-info.sh

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/test/com/cloudera/sqoop/TestAllTables.java
----------------------------------------------------------------------
diff --git a/src/test/com/cloudera/sqoop/TestAllTables.java 
b/src/test/com/cloudera/sqoop/TestAllTables.java
deleted file mode 100644
index 232b82f..0000000
--- a/src/test/com/cloudera/sqoop/TestAllTables.java
+++ /dev/null
@@ -1,252 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop;
-
-import java.io.*;
-import java.sql.SQLException;
-import java.util.ArrayList;
-import java.util.List;
-
-import org.apache.avro.generic.GenericRecord;
-import org.apache.commons.lang.StringUtils;
-import org.apache.hadoop.fs.FSDataInputStream;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.IOUtils;
-import org.junit.Before;
-import org.junit.After;
-
-import com.cloudera.sqoop.testutil.CommonArgs;
-import com.cloudera.sqoop.testutil.ImportJobTestCase;
-import com.cloudera.sqoop.tool.ImportAllTablesTool;
-import org.junit.Test;
-import org.kitesdk.data.Dataset;
-import org.kitesdk.data.DatasetReader;
-import org.kitesdk.data.Datasets;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.fail;
-
-/**
- * Test the --all-tables functionality that can import multiple tables.
- */
-public class TestAllTables extends ImportJobTestCase {
-
-  /**
-   * Create the argv to pass to Sqoop.
-   * @return the argv as an array of strings.
-   */
-  private String [] getArgv(String[] extraArgs, String[] excludeTables) {
-    ArrayList<String> args = new ArrayList<String>();
-
-    CommonArgs.addHadoopFlags(args);
-    args.add("--warehouse-dir");
-    args.add(getWarehouseDir());
-    args.add("--connect");
-    args.add(getConnectString());
-    args.add("--num-mappers");
-    args.add("1");
-    args.add("--escaped-by");
-    args.add("\\");
-    if (excludeTables != null) {
-      args.add("--exclude-tables");
-      args.add(StringUtils.join(excludeTables, ","));
-    }
-    if (extraArgs != null) {
-      for (String arg : extraArgs) {
-        args.add(arg);
-      }
-    }
-
-    return args.toArray(new String[0]);
-  }
-
-  /** the names of the tables we're creating. */
-  private List<String> tableNames;
-
-  /** The strings to inject in the (ordered) tables. */
-  private List<String> expectedStrings;
-
-  @Before
-  public void setUp() {
-    // start the server
-    super.setUp();
-
-    if (useHsqldbTestServer()) {
-      // throw away TWOINTTABLE and things we don't care about.
-      try {
-        this.getTestServer().dropExistingSchema();
-      } catch (SQLException sqlE) {
-        fail(sqlE.toString());
-      }
-    }
-
-    this.tableNames = new ArrayList<String>();
-    this.expectedStrings = new ArrayList<String>();
-
-    // create two tables.
-    this.expectedStrings.add("A winner");
-    this.expectedStrings.add("is you!");
-    this.expectedStrings.add(null);
-
-    int i = 0;
-    for (String expectedStr: this.expectedStrings) {
-      String wrappedStr = null;
-      if (expectedStr != null) {
-        wrappedStr = "'" + expectedStr + "'";
-      }
-
-      String [] types = { "INT NOT NULL PRIMARY KEY", "VARCHAR(32)" };
-      String [] vals = { Integer.toString(i++) , wrappedStr };
-      this.createTableWithColTypes(types, vals);
-      this.tableNames.add(this.getTableName());
-      this.removeTableDir();
-      incrementTableNum();
-    }
-  }
-
-  @After
-  public void tearDown() {
-    try {
-      for (String table : tableNames) {
-        dropTableIfExists(table);
-      }
-    } catch(SQLException e) {
-      LOG.error("Can't clean up the database:", e);
-    }
-    super.tearDown();
-  }
-
-  @Test
-  public void testMultiTableImport() throws IOException {
-    String [] argv = getArgv(null, null);
-    runImport(new ImportAllTablesTool(), argv);
-
-    Path warehousePath = new Path(this.getWarehouseDir());
-    int i = 0;
-    for (String tableName : this.tableNames) {
-      Path tablePath = new Path(warehousePath, tableName);
-      Path filePath = new Path(tablePath, "part-m-00000");
-
-      // dequeue the expected value for this table. This
-      // list has the same order as the tableNames list.
-      String expectedVal = Integer.toString(i++) + ","
-          + this.expectedStrings.get(0);
-      this.expectedStrings.remove(0);
-
-      BufferedReader reader = null;
-      if (!isOnPhysicalCluster()) {
-        reader = new BufferedReader(
-            new InputStreamReader(new FileInputStream(
-                new File(filePath.toString()))));
-      } else {
-        FileSystem dfs = FileSystem.get(getConf());
-        FSDataInputStream dis = dfs.open(filePath);
-        reader = new BufferedReader(new InputStreamReader(dis));
-      }
-      try {
-        String line = reader.readLine();
-        assertEquals("Table " + tableName + " expected a different string",
-            expectedVal, line);
-      } finally {
-        IOUtils.closeStream(reader);
-      }
-    }
-  }
-
-  @Test
-  public void testMultiTableImportAsParquetFormat() throws IOException {
-    String [] argv = getArgv(new String[]{"--as-parquetfile"}, null);
-    runImport(new ImportAllTablesTool(), argv);
-
-    Path warehousePath = new Path(this.getWarehouseDir());
-    int i = 0;
-    for (String tableName : this.tableNames) {
-      Path tablePath = new Path(warehousePath, tableName);
-      Dataset dataset = Datasets.load("dataset:file:" + tablePath);
-
-      // dequeue the expected value for this table. This
-      // list has the same order as the tableNames list.
-      String expectedVal = Integer.toString(i++) + ","
-          + this.expectedStrings.get(0);
-      this.expectedStrings.remove(0);
-
-      DatasetReader<GenericRecord> reader = dataset.newReader();
-      try {
-        GenericRecord record = reader.next();
-        String line = record.get(0) + "," + record.get(1);
-        assertEquals("Table " + tableName + " expected a different string",
-            expectedVal, line);
-        assertFalse(reader.hasNext());
-      } finally {
-        reader.close();
-      }
-    }
-  }
-
-  @Test
-  public void testMultiTableImportWithExclude() throws IOException {
-    String exclude = this.tableNames.get(0);
-    String [] argv = getArgv(null, new String[]{ exclude });
-    runImport(new ImportAllTablesTool(), argv);
-
-    Path warehousePath = new Path(this.getWarehouseDir());
-    int i = 0;
-    for (String tableName : this.tableNames) {
-      Path tablePath = new Path(warehousePath, tableName);
-      Path filePath = new Path(tablePath, "part-m-00000");
-
-      // dequeue the expected value for this table. This
-      // list has the same order as the tableNames list.
-      String expectedVal = Integer.toString(i++) + ","
-          + this.expectedStrings.get(0);
-      this.expectedStrings.remove(0);
-
-      BufferedReader reader = null;
-      if (!isOnPhysicalCluster()) {
-        reader = new BufferedReader(
-            new InputStreamReader(new FileInputStream(
-                new File(filePath.toString()))));
-      } else {
-        FSDataInputStream dis;
-        FileSystem dfs = FileSystem.get(getConf());
-        if (tableName.equals(exclude)) {
-          try {
-            dis = dfs.open(filePath);
-            assertFalse(true);
-          } catch (FileNotFoundException e) {
-            // Success
-            continue;
-          }
-        } else {
-          dis = dfs.open(filePath);
-        }
-        reader = new BufferedReader(new InputStreamReader(dis));
-      }
-      try {
-        String line = reader.readLine();
-        assertEquals("Table " + tableName + " expected a different string",
-            expectedVal, line);
-      } finally {
-        IOUtils.closeStream(reader);
-      }
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/test/com/cloudera/sqoop/TestAppendUtils.java
----------------------------------------------------------------------
diff --git a/src/test/com/cloudera/sqoop/TestAppendUtils.java 
b/src/test/com/cloudera/sqoop/TestAppendUtils.java
deleted file mode 100644
index 486afee..0000000
--- a/src/test/com/cloudera/sqoop/TestAppendUtils.java
+++ /dev/null
@@ -1,315 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Comparator;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileStatus;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.util.StringUtils;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-
-import com.cloudera.sqoop.manager.ImportJobContext;
-
-import com.cloudera.sqoop.testutil.CommonArgs;
-import com.cloudera.sqoop.testutil.HsqldbTestServer;
-import com.cloudera.sqoop.testutil.ImportJobTestCase;
-import com.cloudera.sqoop.tool.ImportTool;
-import com.cloudera.sqoop.util.AppendUtils;
-import org.junit.Test;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
-
-/**
- * Test that --append works.
- */
-public class TestAppendUtils extends ImportJobTestCase {
-
-  private static final int PARTITION_DIGITS = 5;
-  private static final String FILEPART_SEPARATOR = "-";
-
-  public static final Log LOG = LogFactory.getLog(TestAppendUtils.class
-      .getName());
-
-  /**
-   * Create the argv to pass to Sqoop.
-   *
-   * @return the argv as an array of strings.
-   */
-  protected ArrayList getOutputlessArgv(boolean includeHadoopFlags, boolean 
queryBased,
-      String[] colNames, Configuration conf) {
-    if (null == colNames) {
-      colNames = getColNames();
-    }
-
-    String splitByCol = colNames[0];
-    String columnsString = "";
-    for (String col : colNames) {
-      columnsString += col + ",";
-    }
-
-    ArrayList<String> args = new ArrayList<String>();
-
-    if (includeHadoopFlags) {
-      CommonArgs.addHadoopFlags(args);
-    }
-
-    if(queryBased) {
-      args.add("--query");
-      args.add("SELECT * FROM " + getTableName() + " WHERE $CONDITIONS");
-    } else {
-      args.add("--table");
-      args.add(getTableName());
-    }
-    args.add("--columns");
-    args.add(columnsString);
-    args.add("--split-by");
-    args.add(splitByCol);
-    args.add("--connect");
-    args.add(getConnectString());
-    args.add("--as-sequencefile");
-    args.add("--num-mappers");
-    args.add("1");
-
-    args.addAll(getExtraArgs(conf));
-
-    return args;
-  }
-
-  // this test just uses the two int table.
-  protected String getTableName() {
-    return HsqldbTestServer.getTableName();
-  }
-
-  /** the same than ImportJobTestCase but without removing tabledir. */
-  protected void runUncleanImport(String[] argv) throws IOException {
-    // run the tool through the normal entry-point.
-    int ret;
-    try {
-      Configuration conf = getConf();
-      SqoopOptions opts = getSqoopOptions(conf);
-      Sqoop sqoop = new Sqoop(new ImportTool(), conf, opts);
-      ret = Sqoop.runSqoop(sqoop, argv);
-    } catch (Exception e) {
-      LOG.error("Got exception running Sqoop: " + e.toString());
-      e.printStackTrace();
-      ret = 1;
-    }
-
-    // expect a successful return.
-    if (0 != ret) {
-      throw new IOException("Failure during job; return status " + ret);
-    }
-  }
-
-  /** @return FileStatus for data files only. */
-  private FileStatus[] listFiles(FileSystem fs, Path path) throws IOException {
-    FileStatus[] fileStatuses = fs.listStatus(path);
-    ArrayList files = new ArrayList();
-    Pattern patt = Pattern.compile("part.*-([0-9][0-9][0-9][0-9][0-9]).*");
-    for (FileStatus fstat : fileStatuses) {
-      String fname = fstat.getPath().getName();
-      if (!fstat.isDir()) {
-        Matcher mat = patt.matcher(fname);
-        if (mat.matches()) {
-          files.add(fstat);
-        }
-      }
-    }
-    return (FileStatus[]) files.toArray(new FileStatus[files.size()]);
-  }
-
-  private class StatusPathComparator implements Comparator<FileStatus> {
-
-    @Override
-    public int compare(FileStatus fs1, FileStatus fs2) {
-      return fs1.getPath().toString().compareTo(fs2.getPath().toString());
-    }
-  }
-
-  /** @return a concat. string with file-creation dates excluding folders. */
-  private String getFileCreationTimeImage(FileSystem fs, Path outputPath,
-      int fileCount) throws IOException {
-    // create string image with all file creation dates
-    StringBuffer image = new StringBuffer();
-    FileStatus[] fileStatuses = listFiles(fs, outputPath);
-    // sort the file statuses by path so we have a stable order for
-    // using 'fileCount'.
-    Arrays.sort(fileStatuses, new StatusPathComparator());
-    for (int i = 0; i < fileStatuses.length && i < fileCount; i++) {
-      image.append(fileStatuses[i].getPath() + "="
-          + fileStatuses[i].getModificationTime());
-    }
-    return image.toString();
-  }
-
-  /** @return the number part of a partition */
-  private int getFilePartition(Path file) {
-    String filename = file.getName();
-    int pos = filename.lastIndexOf(FILEPART_SEPARATOR);
-    if (pos != -1) {
-      String part = filename.substring(pos + 1, pos + 1 + PARTITION_DIGITS);
-      return Integer.parseInt(part);
-    } else {
-      return 0;
-    }
-  }
-
-  /**
-   * Test for ouput path file-count increase, current files untouched and new
-   * correct partition number.
-   *
-   * @throws IOException
-   */
-  public void runAppendTest(ArrayList args, Path outputPath)
-      throws IOException {
-
-    try {
-
-      // ensure non-existing output dir for insert phase
-      FileSystem fs = FileSystem.get(getConf());
-      if (fs.exists(outputPath)) {
-        fs.delete(outputPath, true);
-      }
-
-      // run Sqoop in INSERT mode
-      String[] argv = (String[]) args.toArray(new String[0]);
-      runUncleanImport(argv);
-
-      // get current file count
-      FileStatus[] fileStatuses = listFiles(fs, outputPath);
-      Arrays.sort(fileStatuses, new StatusPathComparator());
-      int previousFileCount = fileStatuses.length;
-
-      // get string image with all file creation dates
-      String previousImage = getFileCreationTimeImage(fs, outputPath,
-          previousFileCount);
-
-      // get current last partition number
-      Path lastFile = fileStatuses[fileStatuses.length - 1].getPath();
-      int lastPartition = getFilePartition(lastFile);
-
-      // run Sqoop in APPEND mode
-      args.add("--append");
-      argv = (String[]) args.toArray(new String[0]);
-      runUncleanImport(argv);
-
-      // check directory file increase
-      fileStatuses = listFiles(fs, outputPath);
-      Arrays.sort(fileStatuses, new StatusPathComparator());
-      int currentFileCount = fileStatuses.length;
-      assertTrue("Output directory didn't got increased in file count ",
-          currentFileCount > previousFileCount);
-
-      // check previous files weren't modified, also works for partition
-      // overlapping
-      String currentImage = getFileCreationTimeImage(fs, outputPath,
-          previousFileCount);
-      assertEquals("Previous files to appending operation were modified",
-          currentImage, previousImage);
-
-      // check that exists at least 1 new correlative partition
-      // let's use a different way than the code being tested
-      Path newFile = fileStatuses[previousFileCount].getPath(); // there is a
-                                                                // new bound 
now
-      int newPartition = getFilePartition(newFile);
-      assertTrue("New partition file isn't correlative",
-          lastPartition + 1 == newPartition);
-
-    } catch (Exception e) {
-      LOG.error("Got Exception: " + StringUtils.stringifyException(e));
-      fail(e.toString());
-    }
-  }
-
-  /** independent to target-dir. */
-  @Test
-  public void testAppend() throws IOException {
-    ArrayList args = getOutputlessArgv(false, false, 
HsqldbTestServer.getFieldNames(), getConf());
-    args.add("--warehouse-dir");
-    args.add(getWarehouseDir());
-
-    Path output = new Path(getWarehouseDir(), HsqldbTestServer.getTableName());
-    runAppendTest(args, output);
-  }
-
-  /** working with target-dir. */
-  @Test
-  public void testAppendToTargetDir() throws IOException {
-    ArrayList args = getOutputlessArgv(false, false, 
HsqldbTestServer.getFieldNames(), getConf());
-    String targetDir = getWarehouseDir() + "/tempTargetDir";
-    args.add("--target-dir");
-    args.add(targetDir);
-
-    // there's no need for a new param
-    // in diff. w/--warehouse-dir there will no be $tablename dir
-    Path output = new Path(targetDir);
-    runAppendTest(args, output);
-  }
-
-  /**
-   * Query based import should also work in append mode.
-   *
-   * @throws IOException
-   */
-  @Test
-  public void testAppendWithQuery() throws IOException {
-    ArrayList args = getOutputlessArgv(false, true, 
HsqldbTestServer.getFieldNames(), getConf());
-    String targetDir = getWarehouseDir() + "/tempTargetDir";
-    args.add("--target-dir");
-    args.add(targetDir);
-
-    Path output = new Path(targetDir);
-    runAppendTest(args, output);
-  }
-
-  /**
-   * If the append source does not exist, don't crash.
-   */
-  @Test
-  public void testAppendSrcDoesNotExist() throws IOException {
-    Configuration conf = new Configuration();
-    if (!isOnPhysicalCluster()) {
-      conf.set(CommonArgs.FS_DEFAULT_NAME, CommonArgs.LOCAL_FS);
-    }
-    SqoopOptions options = new SqoopOptions(conf);
-    options.setTableName("meep");
-    Path missingPath = new Path("doesNotExistForAnyReason");
-    FileSystem local = FileSystem.getLocal(conf);
-    assertFalse(local.exists(missingPath));
-    ImportJobContext importContext = new ImportJobContext("meep", null,
-        options, missingPath);
-    AppendUtils utils = new AppendUtils(importContext);
-    utils.append();
-  }
-
-}
-

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/test/com/cloudera/sqoop/TestAvroExport.java
----------------------------------------------------------------------
diff --git a/src/test/com/cloudera/sqoop/TestAvroExport.java 
b/src/test/com/cloudera/sqoop/TestAvroExport.java
deleted file mode 100644
index ea456e2..0000000
--- a/src/test/com/cloudera/sqoop/TestAvroExport.java
+++ /dev/null
@@ -1,536 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop;
-
-import static org.junit.Assert.assertArrayEquals;
-import static org.junit.Assert.assertEquals;
-
-import com.cloudera.sqoop.testutil.BaseSqoopTestCase;
-import com.cloudera.sqoop.testutil.CommonArgs;
-import com.cloudera.sqoop.testutil.ExportJobTestCase;
-import com.google.common.collect.Lists;
-
-import java.io.IOException;
-import java.io.OutputStream;
-import java.math.BigDecimal;
-import java.nio.ByteBuffer;
-import java.sql.Connection;
-import java.sql.PreparedStatement;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.sql.Statement;
-import java.util.ArrayList;
-import java.util.List;
-
-import org.apache.avro.Conversions;
-import org.apache.avro.LogicalTypes;
-import org.apache.avro.Schema;
-import org.apache.avro.Schema.Field;
-import org.apache.avro.file.DataFileWriter;
-import org.apache.avro.generic.GenericData;
-import org.apache.avro.generic.GenericDatumWriter;
-import org.apache.avro.generic.GenericRecord;
-import org.apache.avro.io.DatumWriter;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.junit.Test;
-import org.junit.Rule;
-import org.junit.rules.ExpectedException;
-
-/**
- * Test that we can export Avro Data Files from HDFS into databases.
- */
-
-public class TestAvroExport extends ExportJobTestCase {
-
-  @Rule
-  public ExpectedException thrown = ExpectedException.none();
-
-  /**
-   * @return an argv for the CodeGenTool to use when creating tables to export.
-   */
-  protected String [] getCodeGenArgv(String... extraArgs) {
-    List<String> codeGenArgv = new ArrayList<String>();
-
-    if (null != extraArgs) {
-      for (String arg : extraArgs) {
-        codeGenArgv.add(arg);
-      }
-    }
-
-    codeGenArgv.add("--table");
-    codeGenArgv.add(getTableName());
-    codeGenArgv.add("--connect");
-    codeGenArgv.add(getConnectString());
-
-    return codeGenArgv.toArray(new String[0]);
-  }
-
-  /** When generating data for export tests, each column is generated
-      according to a ColumnGenerator. Methods exist for determining
-      what to put into Avro objects in the files to export, as well
-      as what the object representation of the column as returned by
-      the database should look like.
-    */
-  public interface ColumnGenerator {
-    /** For a row with id rowNum, what should we write into that
-        Avro record to export?
-      */
-    Object getExportValue(int rowNum);
-
-    /** Return the Avro schema for the field. */
-    Schema getColumnAvroSchema();
-
-    /** For a row with id rowNum, what should the database return
-        for the given column's value?
-      */
-    Object getVerifyValue(int rowNum);
-
-    /** Return the column type to put in the CREATE TABLE statement. */
-    String getColumnType();
-  }
-
-  private ColumnGenerator colGenerator(final Object exportValue,
-      final Schema schema, final Object verifyValue,
-      final String columnType) {
-    return new ColumnGenerator() {
-      @Override
-      public Object getVerifyValue(int rowNum) {
-        return verifyValue;
-      }
-      @Override
-      public Object getExportValue(int rowNum) {
-        return exportValue;
-      }
-      @Override
-      public String getColumnType() {
-        return columnType;
-      }
-      @Override
-      public Schema getColumnAvroSchema() {
-        return schema;
-      }
-    };
-  }
-
-  /**
-   * Create a data file that gets exported to the db.
-   * @param fileNum the number of the file (for multi-file export)
-   * @param numRecords how many records to write to the file.
-   */
-  protected void createAvroFile(int fileNum, int numRecords,
-      ColumnGenerator... extraCols) throws IOException {
-
-    Path tablePath = getTablePath();
-    Path filePath = new Path(tablePath, "part" + fileNum);
-
-    Configuration conf = new Configuration();
-    if (!BaseSqoopTestCase.isOnPhysicalCluster()) {
-      conf.set(CommonArgs.FS_DEFAULT_NAME, CommonArgs.LOCAL_FS);
-    }
-    FileSystem fs = FileSystem.get(conf);
-    fs.mkdirs(tablePath);
-    OutputStream os = fs.create(filePath);
-
-    Schema schema = buildAvroSchema(extraCols);
-    DatumWriter<GenericRecord> datumWriter =
-      new GenericDatumWriter<GenericRecord>();
-    DataFileWriter<GenericRecord> dataFileWriter =
-      new DataFileWriter<GenericRecord>(datumWriter);
-    dataFileWriter.create(schema, os);
-
-    for (int i = 0; i < numRecords; i++) {
-      GenericRecord record = new GenericData.Record(schema);
-      record.put("id", i);
-      record.put("msg", getMsgPrefix() + i);
-      addExtraColumns(record, i, extraCols);
-      dataFileWriter.append(record);
-    }
-
-    dataFileWriter.close();
-    os.close();
-  }
-
-  private Schema buildAvroSchema(ColumnGenerator... extraCols) {
-    List<Field> fields = new ArrayList<Field>();
-    fields.add(buildAvroField("id", Schema.Type.INT));
-    fields.add(buildAvroField("msg", Schema.Type.STRING));
-    int colNum = 0;
-    // Issue [SQOOP-2846]
-    if (null != extraCols) {
-      for (ColumnGenerator gen : extraCols) {
-        if (gen.getColumnAvroSchema() != null) {
-          fields.add(buildAvroField(forIdx(colNum++), 
gen.getColumnAvroSchema()));
-        }
-      }
-    }
-    Schema schema = Schema.createRecord("myschema", null, null, false);
-    schema.setFields(fields);
-    return schema;
-  }
-
-  private void addExtraColumns(GenericRecord record, int rowNum,
-      ColumnGenerator[] extraCols) {
-    int colNum = 0;
-    // Issue [SQOOP-2846]
-    if (null != extraCols) {
-      for (ColumnGenerator gen : extraCols) {
-        if (gen.getColumnAvroSchema() != null) {
-          record.put(forIdx(colNum++), gen.getExportValue(rowNum));
-        }
-      }
-    }
-  }
-
-  private Field buildAvroField(String name, Schema.Type type) {
-    return new Field(name, Schema.create(type), null, null);
-  }
-
-  private Field buildAvroField(String name, Schema schema) {
-    return new Field(name, schema, null, null);
-  }
-
-  /** Return the column name for a column index.
-   *  Each table contains two columns named 'id' and 'msg', and then an
-   *  arbitrary number of additional columns defined by ColumnGenerators.
-   *  These columns are referenced by idx 0, 1, 2...
-   *  @param idx the index of the ColumnGenerator in the array passed to
-   *   createTable().
-   *  @return the name of the column
-   */
-  protected String forIdx(int idx) {
-    return "col" + idx;
-  }
-
-  /**
-   * Return a SQL statement that drops a table, if it exists.
-   * @param tableName the table to drop.
-   * @return the SQL statement to drop that table.
-   */
-  protected String getDropTableStatement(String tableName) {
-    return "DROP TABLE " + tableName + " IF EXISTS";
-  }
-
-  /** Create the table definition to export to, removing any prior table.
-      By specifying ColumnGenerator arguments, you can add extra columns
-      to the table of arbitrary type.
-   */
-  private void createTable(ColumnGenerator... extraColumns)
-      throws SQLException {
-    Connection conn = getConnection();
-    PreparedStatement statement = conn.prepareStatement(
-        getDropTableStatement(getTableName()),
-        ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
-    try {
-      statement.executeUpdate();
-      conn.commit();
-    } finally {
-      statement.close();
-    }
-
-    StringBuilder sb = new StringBuilder();
-    sb.append("CREATE TABLE ");
-    sb.append(getTableName());
-    sb.append(" (id INT NOT NULL PRIMARY KEY, msg VARCHAR(64)");
-    int colNum = 0;
-    for (ColumnGenerator gen : extraColumns) {
-      if (gen.getColumnType() != null) {
-        sb.append(", " + forIdx(colNum++)  + " " + gen.getColumnType());
-      }
-    }
-    sb.append(")");
-
-    statement = conn.prepareStatement(sb.toString(),
-        ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
-    try {
-      statement.executeUpdate();
-      conn.commit();
-    } finally {
-      statement.close();
-    }
-  }
-
-  /**
-   * Create the table definition to export and also inserting one records for
-   * identifying the updates. Issue [SQOOP-2846]
-   */
-  private void createTableWithInsert() throws SQLException {
-    Connection conn = getConnection();
-    PreparedStatement statement = 
conn.prepareStatement(getDropTableStatement(getTableName()),
-        ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
-    try {
-      statement.executeUpdate();
-      conn.commit();
-    } finally {
-      statement.close();
-    }
-
-    StringBuilder sb = new StringBuilder();
-    sb.append("CREATE TABLE ");
-    sb.append(getTableName());
-    sb.append(" (id INT NOT NULL PRIMARY KEY, msg VARCHAR(64)");
-    sb.append(")");
-    statement = conn.prepareStatement(sb.toString(), 
ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
-    try {
-      statement.executeUpdate();
-      Statement statement2 = conn.createStatement();
-      String insertCmd = "INSERT INTO " + getTableName() + " (ID,MSG) VALUES(" 
+ 0 + ",'testMsg');";
-      statement2.execute(insertCmd);
-      conn.commit();
-    } finally {
-      statement.close();
-    }
-  }
-
-
-  /** Verify that on a given row, a column has a given value.
-   * @param id the id column specifying the row to test.
-   */
-  private void assertColValForRowId(int id, String colName, Object expectedVal)
-      throws SQLException {
-    Connection conn = getConnection();
-    LOG.info("Verifying column " + colName + " has value " + expectedVal);
-
-    PreparedStatement statement = conn.prepareStatement(
-        "SELECT " + colName + " FROM " + getTableName() + " WHERE ID = " + id,
-        ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
-    Object actualVal = null;
-    try {
-      ResultSet rs = statement.executeQuery();
-      try {
-        rs.next();
-        actualVal = rs.getObject(1);
-      } finally {
-        rs.close();
-      }
-    } finally {
-      statement.close();
-    }
-
-    if (expectedVal != null && expectedVal instanceof byte[]) {
-      assertArrayEquals((byte[]) expectedVal, (byte[]) actualVal);
-    } else {
-      assertEquals("Got unexpected column value", expectedVal, actualVal);
-    }
-  }
-
-  /** Verify that for the max and min values of the 'id' column, the values
-      for a given column meet the expected values.
-   */
-  protected void assertColMinAndMax(String colName, ColumnGenerator generator)
-      throws SQLException {
-    Connection conn = getConnection();
-    int minId = getMinRowId(conn);
-    int maxId = getMaxRowId(conn);
-
-    LOG.info("Checking min/max for column " + colName + " with type "
-        + generator.getColumnType());
-
-    Object expectedMin = generator.getVerifyValue(minId);
-    Object expectedMax = generator.getVerifyValue(maxId);
-
-    assertColValForRowId(minId, colName, expectedMin);
-    assertColValForRowId(maxId, colName, expectedMax);
-  }
-
-  @Test
-  public void testSupportedAvroTypes() throws IOException, SQLException {
-    GenericData.get().addLogicalTypeConversion(new 
Conversions.DecimalConversion());
-
-    String[] argv = {};
-    final int TOTAL_RECORDS = 1 * 10;
-
-    byte[] b = new byte[] { (byte) 1, (byte) 2 };
-    Schema fixed = Schema.createFixed("myfixed", null, null, 2);
-    Schema enumeration = Schema.createEnum("myenum", null, null,
-        Lists.newArrayList("a", "b"));
-    Schema decimalSchema = LogicalTypes.decimal(3,2)
-        .addToSchema(Schema.createFixed("dec1", null, null, 2));
-
-    ColumnGenerator[] gens = new ColumnGenerator[] {
-      colGenerator(true, Schema.create(Schema.Type.BOOLEAN), true, "BIT"),
-      colGenerator(100, Schema.create(Schema.Type.INT), 100, "INTEGER"),
-      colGenerator(200L, Schema.create(Schema.Type.LONG), 200L, "BIGINT"),
-      // HSQLDB maps REAL to double, not float:
-      colGenerator(1.0f, Schema.create(Schema.Type.FLOAT), 1.0d, "REAL"),
-      colGenerator(2.0d, Schema.create(Schema.Type.DOUBLE), 2.0d, "DOUBLE"),
-      colGenerator("s", Schema.create(Schema.Type.STRING), "s", "VARCHAR(8)"),
-      colGenerator(ByteBuffer.wrap(b), Schema.create(Schema.Type.BYTES),
-          b, "VARBINARY(8)"),
-      colGenerator(new GenericData.Fixed(fixed, b), fixed,
-          b, "BINARY(2)"),
-      colGenerator(new GenericData.EnumSymbol(enumeration, "a"), enumeration,
-          "a", "VARCHAR(8)"),
-      colGenerator(new BigDecimal("2.00"), decimalSchema,
-          new BigDecimal("2.00"), "DECIMAL(3,2)"),
-      colGenerator("22.00", Schema.create(Schema.Type.STRING),
-          new BigDecimal("22.00"), "DECIMAL(4,2)"),
-    };
-    createAvroFile(0, TOTAL_RECORDS, gens);
-    createTable(gens);
-    runExport(getArgv(true, 10, 10, newStrArray(argv, "-m", "" + 1)));
-    verifyExport(TOTAL_RECORDS);
-    for (int i = 0; i < gens.length; i++) {
-      assertColMinAndMax(forIdx(i), gens[i]);
-    }
-  }
-
-  @Test
-  public void testPathPatternInExportDir() throws IOException, SQLException {
-    final int TOTAL_RECORDS = 10;
-
-    ColumnGenerator[] gens = new ColumnGenerator[] {
-      colGenerator(true, Schema.create(Schema.Type.BOOLEAN), true, "BIT"),
-    };
-
-    createAvroFile(0, TOTAL_RECORDS, gens);
-    createTable(gens);
-
-    // Converts path to an unary set while preserving the leading '/'
-    String pathPattern = new StringBuilder(getTablePath().toString())
-            .insert(1, "{")
-            .append("}")
-            .toString();
-
-    runExport(getArgv(true, 10, 10, "--export-dir", pathPattern));
-    verifyExport(TOTAL_RECORDS);
-  }
-
-  @Test
-  public void testNullableField() throws IOException, SQLException {
-    String[] argv = {};
-    final int TOTAL_RECORDS = 1 * 10;
-
-    List<Schema> childSchemas = new ArrayList<Schema>();
-    childSchemas.add(Schema.create(Schema.Type.NULL));
-    childSchemas.add(Schema.create(Schema.Type.STRING));
-    Schema schema =  Schema.createUnion(childSchemas);
-    ColumnGenerator gen0 = colGenerator(null, schema, null, "VARCHAR(64)");
-    ColumnGenerator gen1 = colGenerator("s", schema, "s", "VARCHAR(64)");
-    createAvroFile(0, TOTAL_RECORDS, gen0, gen1);
-    createTable(gen0, gen1);
-    runExport(getArgv(true, 10, 10, newStrArray(argv, "-m", "" + 1)));
-    verifyExport(TOTAL_RECORDS);
-    assertColMinAndMax(forIdx(0), gen0);
-    assertColMinAndMax(forIdx(1), gen1);
-  }
-
-  @Test
-  public void testAvroRecordsNotSupported() throws IOException, SQLException {
-    String[] argv = {};
-    final int TOTAL_RECORDS = 1;
-
-    Schema schema =  Schema.createRecord("nestedrecord", null, null, false);
-    schema.setFields(Lists.newArrayList(buildAvroField("myint",
-        Schema.Type.INT)));
-    GenericRecord record = new GenericData.Record(schema);
-    record.put("myint", 100);
-    // DB type is not used so can be anything:
-    ColumnGenerator gen = colGenerator(record, schema, null, "VARCHAR(64)");
-    createAvroFile(0, TOTAL_RECORDS,  gen);
-    createTable(gen);
-
-    thrown.expect(Exception.class);
-    thrown.reportMissingExceptionWithMessage("Expected Exception as Avro 
records are not supported");
-    runExport(getArgv(true, 10, 10, newStrArray(argv, "-m", "" + 1)));
-  }
-
-  @Test
-  public void testMissingDatabaseFields() throws IOException, SQLException {
-    String[] argv = {};
-    final int TOTAL_RECORDS = 1;
-
-    // null column type means don't create a database column
-    // the Avro value will not be exported
-    ColumnGenerator gen = colGenerator(100, Schema.create(Schema.Type.INT),
-        null, null);
-    createAvroFile(0, TOTAL_RECORDS, gen);
-    createTable(gen);
-    runExport(getArgv(true, 10, 10, newStrArray(argv, "-m", "" + 1)));
-    verifyExport(TOTAL_RECORDS);
-  }
-
-  // Test Case for Issue [SQOOP-2846]
-  @Test
-  public void testAvroWithUpsert() throws IOException, SQLException {
-    String[] argv = { "--update-key", "ID", "--update-mode", "allowinsert" };
-    final int TOTAL_RECORDS = 2;
-    // ColumnGenerator gen = colGenerator("100",
-    // Schema.create(Schema.Type.STRING), null, "VARCHAR(64)");
-    createAvroFile(0, TOTAL_RECORDS, null);
-    createTableWithInsert();
-
-    thrown.expect(Exception.class);
-    thrown.reportMissingExceptionWithMessage("Expected Exception during Avro 
export with --update-mode");
-    runExport(getArgv(true, 10, 10, newStrArray(argv, "-m", "" + 1)));
-  }
-
-  // Test Case for Issue [SQOOP-2846]
-  @Test
-  public void testAvroWithUpdateKey() throws IOException, SQLException {
-    String[] argv = { "--update-key", "ID" };
-    final int TOTAL_RECORDS = 1;
-    // ColumnGenerator gen = colGenerator("100",
-    // Schema.create(Schema.Type.STRING), null, "VARCHAR(64)");
-    createAvroFile(0, TOTAL_RECORDS, null);
-    createTableWithInsert();
-    runExport(getArgv(true, 10, 10, newStrArray(argv, "-m", "" + 1)));
-    verifyExport(getMsgPrefix() + "0");
-  }
-
-  @Test
-  public void testMissingAvroFields()  throws IOException, SQLException {
-    String[] argv = {};
-    final int TOTAL_RECORDS = 1;
-
-    // null Avro schema means don't create an Avro field
-    ColumnGenerator gen = colGenerator(null, null, null, "VARCHAR(64)");
-    createAvroFile(0, TOTAL_RECORDS, gen);
-    createTable(gen);
-
-    thrown.expect(Exception.class);
-    thrown.reportMissingExceptionWithMessage("Expected Exception on missing 
Avro fields");
-    runExport(getArgv(true, 10, 10, newStrArray(argv, "-m", "" + 1)));
-  }
-
-  @Test
-  public void testSpecifiedColumnsAsAvroFields()  throws IOException, 
SQLException {
-    final int TOTAL_RECORDS = 10;
-    ColumnGenerator[] gens = new ColumnGenerator[] {
-      colGenerator(000, Schema.create(Schema.Type.INT), 100, "INTEGER"), //col0
-      colGenerator(111, Schema.create(Schema.Type.INT), 100, "INTEGER"), //col1
-      colGenerator(222, Schema.create(Schema.Type.INT), 100, "INTEGER"), //col2
-      colGenerator(333, Schema.create(Schema.Type.INT), 100, "INTEGER")  //col3
-    };
-    createAvroFile(0, TOTAL_RECORDS, gens);
-    createTable(gens);
-    runExport(getArgv(true, 10, 10, newStrArray(null, "-m", "" + 1, 
"--columns", "ID,MSG,COL1,COL2")));
-    verifyExport(TOTAL_RECORDS);
-    assertColValForRowId(0, "col0", null);
-    assertColValForRowId(0, "col1", 111);
-    assertColValForRowId(0, "col2", 222);
-    assertColValForRowId(0, "col3", null);
-    assertColValForRowId(9, "col0", null);
-    assertColValForRowId(9, "col1", 111);
-    assertColValForRowId(9, "col2", 222);
-    assertColValForRowId(9, "col3", null);
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/test/com/cloudera/sqoop/TestAvroImport.java
----------------------------------------------------------------------
diff --git a/src/test/com/cloudera/sqoop/TestAvroImport.java 
b/src/test/com/cloudera/sqoop/TestAvroImport.java
deleted file mode 100644
index da79c7a..0000000
--- a/src/test/com/cloudera/sqoop/TestAvroImport.java
+++ /dev/null
@@ -1,382 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.cloudera.sqoop;
-
-import java.io.File;
-import java.io.IOException;
-import java.nio.ByteBuffer;
-import java.sql.SQLException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-
-import org.apache.avro.Schema;
-import org.apache.avro.Schema.Field;
-import org.apache.avro.Schema.Type;
-import org.apache.avro.file.DataFileConstants;
-import org.apache.avro.file.DataFileReader;
-import org.apache.avro.generic.GenericDatumReader;
-import org.apache.avro.generic.GenericRecord;
-import org.apache.avro.io.DatumReader;
-import org.apache.avro.mapred.FsInput;
-import org.apache.avro.util.Utf8;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.Path;
-
-import com.cloudera.sqoop.testutil.BaseSqoopTestCase;
-import com.cloudera.sqoop.testutil.CommonArgs;
-import com.cloudera.sqoop.testutil.HsqldbTestServer;
-import com.cloudera.sqoop.testutil.ImportJobTestCase;
-import org.junit.Test;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertNull;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
-
-/**
- * Tests --as-avrodatafile.
- */
-public class TestAvroImport extends ImportJobTestCase {
-
-  public static final Log LOG = LogFactory
-      .getLog(TestAvroImport.class.getName());
-
-  /**
-   * Create the argv to pass to Sqoop.
-   *
-   * @return the argv as an array of strings.
-   */
-  protected String[] getOutputArgv(boolean includeHadoopFlags,
-          String[] extraArgs) {
-    ArrayList<String> args = new ArrayList<String>();
-
-    if (includeHadoopFlags) {
-      CommonArgs.addHadoopFlags(args);
-    }
-    args.add("-m");
-    args.add("1");
-    args.add("--table");
-    args.add(getTableName());
-    args.add("--connect");
-    args.add(HsqldbTestServer.getUrl());
-    args.add("--warehouse-dir");
-    args.add(getWarehouseDir());
-    args.add("--split-by");
-    args.add("INTFIELD1");
-    args.add("--as-avrodatafile");
-    if (extraArgs != null) {
-      args.addAll(Arrays.asList(extraArgs));
-    }
-
-    return args.toArray(new String[0]);
-  }
-
-  @Test
-  public void testAvroImport() throws IOException {
-    this.setCurTableName("Avro_Import_Test");
-    avroImportTestHelper(null, null);
-  }
-
-  @Test
-  public void testDeflateCompressedAvroImport() throws IOException {
-    this.setCurTableName("Deflate_Compressed_Avro_Import_Test_1");
-    avroImportTestHelper(new String[] {"--compression-codec",
-      "org.apache.hadoop.io.compress.DefaultCodec", }, "deflate");
-  }
-
-  @Test
-  public void testDefaultCompressedAvroImport() throws IOException {
-    this.setCurTableName("Deflate_Compressed_Avro_Import_Test_2");
-    avroImportTestHelper(new String[] {"--compress", }, "deflate");
-  }
-
-  @Test
-  public void testUnsupportedCodec() throws IOException {
-    try {
-      this.setCurTableName("Deflate_Compressed_Avro_Import_Test_3");
-      avroImportTestHelper(new String[] {"--compression-codec", "foobar", },
-        null);
-      fail("Expected IOException");
-    } catch (IOException e) {
-      // Exception is expected
-    }
-  }
-
-  /**
-   * Helper method that runs an import using Avro with optional command line
-   * arguments and checks that the created file matches the expectations.
-   * <p/>
-   * This can be used to test various extra options that are implemented for
-   * the Avro input.
-   *
-   * @param extraArgs extra command line arguments to pass to Sqoop in addition
-   *                  to those that {@link #getOutputArgv(boolean, String[])}
-   *                  returns
-   */
-  protected void avroImportTestHelper(String[] extraArgs, String codec)
-      throws IOException {
-    String[] types =
-      {"BIT", "INTEGER", "BIGINT", "REAL", "DOUBLE", "VARCHAR(6)",
-        "VARBINARY(2)", "DECIMAL(3,2)"};
-    String[] vals = {"true", "100", "200", "1.0", "2.0", "'s'", "'0102'", 
"'1.00'"};
-    createTableWithColTypes(types, vals);
-
-    runImport(getOutputArgv(true, extraArgs));
-
-    Path outputFile = new Path(getTablePath(), "part-m-00000.avro");
-    DataFileReader<GenericRecord> reader = read(outputFile);
-    Schema schema = reader.getSchema();
-    assertEquals(Schema.Type.RECORD, schema.getType());
-    List<Field> fields = schema.getFields();
-    assertEquals(types.length, fields.size());
-
-    checkField(fields.get(0), "DATA_COL0", Schema.Type.BOOLEAN);
-    checkField(fields.get(1), "DATA_COL1", Schema.Type.INT);
-    checkField(fields.get(2), "DATA_COL2", Schema.Type.LONG);
-    checkField(fields.get(3), "DATA_COL3", Schema.Type.FLOAT);
-    checkField(fields.get(4), "DATA_COL4", Schema.Type.DOUBLE);
-    checkField(fields.get(5), "DATA_COL5", Schema.Type.STRING);
-    checkField(fields.get(6), "DATA_COL6", Schema.Type.BYTES);
-    checkField(fields.get(7), "DATA_COL7", Schema.Type.STRING);
-
-    GenericRecord record1 = reader.next();
-    assertEquals("DATA_COL0", true, record1.get("DATA_COL0"));
-    assertEquals("DATA_COL1", 100, record1.get("DATA_COL1"));
-    assertEquals("DATA_COL2", 200L, record1.get("DATA_COL2"));
-    assertEquals("DATA_COL3", 1.0f, record1.get("DATA_COL3"));
-    assertEquals("DATA_COL4", 2.0, record1.get("DATA_COL4"));
-    assertEquals("DATA_COL5", new Utf8("s"), record1.get("DATA_COL5"));
-    Object object = record1.get("DATA_COL6");
-    assertTrue(object instanceof ByteBuffer);
-    ByteBuffer b = ((ByteBuffer) object);
-    assertEquals((byte) 1, b.get(0));
-    assertEquals((byte) 2, b.get(1));
-    assertEquals("DATA_COL7", "1.00", record1.get("DATA_COL7").toString());
-
-    if (codec != null) {
-      assertEquals(codec, reader.getMetaString(DataFileConstants.CODEC));
-    }
-
-    checkSchemaFile(schema);
-  }
-
-  @Test
-  public void testOverrideTypeMapping() throws IOException {
-    String [] types = { "INT" };
-    String [] vals = { "10" };
-    createTableWithColTypes(types, vals);
-
-    String [] extraArgs = { "--map-column-java", "DATA_COL0=String"};
-
-    runImport(getOutputArgv(true, extraArgs));
-
-    Path outputFile = new Path(getTablePath(), "part-m-00000.avro");
-    DataFileReader<GenericRecord> reader = read(outputFile);
-    Schema schema = reader.getSchema();
-    assertEquals(Schema.Type.RECORD, schema.getType());
-    List<Field> fields = schema.getFields();
-    assertEquals(types.length, fields.size());
-
-    checkField(fields.get(0), "DATA_COL0", Schema.Type.STRING);
-
-    GenericRecord record1 = reader.next();
-    assertEquals("DATA_COL0", new Utf8("10"), record1.get("DATA_COL0"));
-  }
-
-  @Test
-  public void testFirstUnderscoreInColumnName() throws IOException {
-    String [] names = { "_NAME" };
-    String [] types = { "INT" };
-    String [] vals = { "1987" };
-    createTableWithColTypesAndNames(names, types, vals);
-
-    runImport(getOutputArgv(true, null));
-
-    Path outputFile = new Path(getTablePath(), "part-m-00000.avro");
-    DataFileReader<GenericRecord> reader = read(outputFile);
-    Schema schema = reader.getSchema();
-    assertEquals(Schema.Type.RECORD, schema.getType());
-    List<Field> fields = schema.getFields();
-    assertEquals(types.length, fields.size());
-
-    checkField(fields.get(0), "__NAME", Type.INT);
-
-    GenericRecord record1 = reader.next();
-    assertEquals("__NAME", 1987, record1.get("__NAME"));
-  }
-
-  @Test
-  public void testNonstandardCharactersInColumnName() throws IOException {
-    String [] names = { "avro\uC3A11" };
-    String [] types = { "INT" };
-    String [] vals = { "1987" };
-    this.setCurTableName("Non_Std_Character_Test");
-    createTableWithColTypesAndNames(names, types, vals);
-
-    runImport(getOutputArgv(true, null));
-
-    Path outputFile = new Path(getTablePath(), "part-m-00000.avro");
-    DataFileReader<GenericRecord> reader = read(outputFile);
-    Schema schema = reader.getSchema();
-    assertEquals(Schema.Type.RECORD, schema.getType());
-    List<Field> fields = schema.getFields();
-    assertEquals(types.length, fields.size());
-
-    checkField(fields.get(0), "AVRO\uC3A11", Type.INT);
-
-    GenericRecord record1 = reader.next();
-    assertEquals("AVRO\uC3A11", 1987, record1.get("AVRO\uC3A11"));
-  }
-
-  @Test
-  public void testNonIdentCharactersInColumnName() throws IOException {
-    String [] names = { "test_a-v+r/o" };
-    String [] types = { "INT" };
-    String [] vals = { "2015" };
-    createTableWithColTypesAndNames(names, types, vals);
-
-    runImport(getOutputArgv(true, null));
-
-    Path outputFile = new Path(getTablePath(), "part-m-00000.avro");
-    DataFileReader<GenericRecord> reader = read(outputFile);
-    Schema schema = reader.getSchema();
-    assertEquals(Schema.Type.RECORD, schema.getType());
-    List<Field> fields = schema.getFields();
-    assertEquals(types.length, fields.size());
-
-    checkField(fields.get(0), "TEST_A_V_R_O", Type.INT);
-
-    GenericRecord record1 = reader.next();
-    assertEquals("TEST_A_V_R_O", 2015, record1.get("TEST_A_V_R_O"));
-  }
-
-  /*
-   * Test Case For checking multiple columns having non standard characters in 
multiple columns
-   */
-  @Test
-  public void testNonstandardCharactersInMultipleColumns() throws IOException {
-    String[] names = { "id$1", "id1$" };
-    String[] types = { "INT", "INT" };
-    String[] vals = { "1987", "1988" };
-    this.setCurTableName("Non_Std_Character_Test_For_Multiple_Columns");
-    createTableWithColTypesAndNames(names, types, vals);
-
-    runImport(getOutputArgv(true, null));
-
-    Path outputFile = new Path(getTablePath(), "part-m-00000.avro");
-    DataFileReader<GenericRecord> reader = read(outputFile);
-    Schema schema = reader.getSchema();
-    assertEquals(Schema.Type.RECORD, schema.getType());
-    List<Field> fields = schema.getFields();
-    assertEquals(types.length, fields.size());
-
-    checkField(fields.get(0), "ID_1", Type.INT);
-
-    GenericRecord record1 = reader.next();
-    assertEquals("ID_1", 1987, record1.get("ID_1"));
-    checkField(fields.get(1), "ID1_", Type.INT);
-    assertEquals("ID1_", 1988, record1.get("ID1_"));
-  }
-
-  protected void checkField(Field field, String name, Type type) {
-    assertEquals(name, field.name());
-    assertEquals(Schema.Type.UNION, field.schema().getType());
-    assertEquals(Schema.Type.NULL, field.schema().getTypes().get(0).getType());
-    assertEquals(type, field.schema().getTypes().get(1).getType());
-  }
-
-  @Test
-  public void testNullableAvroImport() throws IOException, SQLException {
-    String [] types = { "INT" };
-    String [] vals = { null };
-    createTableWithColTypes(types, vals);
-
-    runImport(getOutputArgv(true, null));
-
-    Path outputFile = new Path(getTablePath(), "part-m-00000.avro");
-    DataFileReader<GenericRecord> reader = read(outputFile);
-
-    GenericRecord record1 = reader.next();
-    assertNull(record1.get("DATA_COL0"));
-  }
-
-  @Test
-  public void testSpecialCharactersInColumnMappingWithConvertion() throws 
IOException, SQLException {
-    // escaping enabled by default
-    String [] extraArgsEscapeColNamesWithMapping = { "--map-column-java",
-        
"INTFIELD1=String,DATA_#_COL0=String,DATA#COL1=String,DATA___COL2=String"};
-
-    // disable escaping
-    String [] extraArgsEscapingDisables = {"--escape-mapping-column-names", 
"false"};
-
-    // escaping enabled but mapping not provided
-    String [] extraArgsEscapingWithoutMapping = {};
-
-    checkRecordWithExtraArgs(extraArgsEscapeColNamesWithMapping, "TABLE1");
-    checkRecordWithExtraArgs(extraArgsEscapingDisables, "TABLE2");
-    checkRecordWithExtraArgs(extraArgsEscapingWithoutMapping, "TABLE3");
-  }
-
-  private void checkRecordWithExtraArgs(String[] extraArgs, String tableName) 
throws IOException {
-    String date = "2017-01-19";
-    String timeStamp = "2017-01-19 14:47:57.112000";
-
-    String [] names = {"INTFIELD1", "DATA_#_COL0", "DATA#COL1", "DATA___COL2"};
-    String [] types = { "INT", "DATE", "TIMESTAMP", "DECIMAL(2,20)" };
-    String [] vals = {"1", "{ts \'" + date + "\'}", "{ts \'" + timeStamp + 
"\'}", "2e20"};
-
-    String [] checkNames =  {"INTFIELD1", "DATA___COL0", "DATA_COL1", 
"DATA___COL2"};
-
-    setCurTableName(tableName);
-
-    createTableWithColTypesAndNames(names, types, vals);
-    runImport(getOutputArgv(true, extraArgs));
-
-    Path outputFile = new Path(getTablePath(), "part-m-00000.avro");
-    DataFileReader<GenericRecord> reader = read(outputFile);
-    GenericRecord record = reader.next();
-
-    for (String columnName : checkNames) {
-      assertNotNull(record.get(columnName));
-    }
-
-    removeTableDir();
-  }
-
-  protected DataFileReader<GenericRecord> read(Path filename) throws 
IOException {
-    Configuration conf = new Configuration();
-    if (!BaseSqoopTestCase.isOnPhysicalCluster()) {
-      conf.set(CommonArgs.FS_DEFAULT_NAME, CommonArgs.LOCAL_FS);
-    }
-    FsInput fsInput = new FsInput(filename, conf);
-    DatumReader<GenericRecord> datumReader =
-      new GenericDatumReader<GenericRecord>();
-    return new DataFileReader<GenericRecord>(fsInput, datumReader);
-  }
-
-  protected void checkSchemaFile(final Schema schema) throws IOException {
-    final File schemaFile = new File(schema.getName() + ".avsc");
-    assertTrue(schemaFile.exists());
-    assertEquals(schema, new Schema.Parser().parse(schemaFile));
-  }
-}

Reply via email to