[ 
https://issues.apache.org/jira/browse/ATLAS-1572?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=15885396#comment-15885396
 ] 

Vimal Sharma commented on ATLAS-1572:
-------------------------------------

Function deserializeClassInstances in GraphHelper.java is the culprit. Added a 
couple of LOG statements to debug the issue:
{code}
                LOG.info("Deserialized JSON {}", entityInstance.toString());
                ITypedReferenceableInstance typedInstrance = 
getTypedReferenceableInstance(typeSystem, entityInstance);
                LOG.info("ITypedReferenceable instance {}", 
((ReferenceableInstance)typedInstrance).toString());
{code}


Got the below logs
{code}
2017-02-27 08:24:11,731 INFO  - [NotificationHookConsumer thread-0:] ~ 
Deserialized JSON {Id='(type: sqoop_process, id: <unassigned>)', traits=[], 
values={outputs={Id='(type: hive_table, id: <unassigned>)', traits=[], 
values={name=sqoopregression, 
qualifiedName=default.sqoopregression@Sqoop_Regression, db={Id='(type: hive_db, 
id: <unassigned>)', traits=[], values={name=default, 
qualifiedName=default@Sqoop_Regression, clusterName=Sqoop_Regression}}}}, 
commandlineOpts={db.clear.staging.table=false, hive.import=true, 
db.require.password=true, codegen.output.delimiters.enclose=0, 
codegen.input.delimiters.field=0, customtool.options.jsonmap={}, 
hive.compute.stats.table=false, 
db.connect.string=jdbc:mysql://sqoop-regression-1.openstacklocal/sqoopTest, 
incremental.mode=None, db.table=sqoopRegression, verbose=false, 
codegen.output.delimiters.enclose.required=false, mapreduce.num.mappers=1, 
hdfs.append.dir=false, direct.import=false, hive.drop.delims=false, 
hive.overwrite.table=false, hbase.bulk.load.enabled=false, 
hive.fail.table.exists=false, relaxed.isolation=false, 
hdfs.delete-target.dir=false, jdbc.driver.class=com.mysql.jdbc.Driver, 
split.limit=null, db.username=sqoop, 
codegen.input.delimiters.enclose.required=false, codegen.output.dir=., 
import.direct.split.size=0, reset.onemapper=false, 
codegen.output.delimiters.record=10, temporary.dirRoot=_sqoop, 
hcatalog.create.table=false, db.batch=false, import.fetch.size=1000, 
accumulo.max.latency=5000, hdfs.file.format=TextFile, 
codegen.output.delimiters.field=1, mainframe.input.dataset.type=p, 
codegen.output.delimiters.escape=0, hcatalog.drop.and.create.table=false, 
import.max.inline.lob.size=16777216, hbase.create.table=false, 
codegen.auto.compile.dir=true, 
codegen.compile.dir=/tmp/sqoop-hdfs/compile/ad793204f04ae1977f4abd2fc0715113, 
codegen.input.delimiters.enclose=0, export.new.update=UpdateOnly, 
enable.compression=false, skip.dist.cache=false, accumulo.batch.size=10240000, 
hdfs.warehouse.dir=/apps/hive/warehouse/, codegen.input.delimiters.record=0, 
codegen.input.delimiters.escape=0, accumulo.create.table=false}, 
inputs={Id='(type: sqoop_dbdatastore, id: <unassigned>)', traits=[], 
values={owner=hdfs, 
storeUri=jdbc:mysql://sqoop-regression-1.openstacklocal/sqoopTest, 
dbStoreType=mysql, qualifiedName=mysql --url 
jdbc:mysql://sqoop-regression-1.openstacklocal/sqoopTest --table 
sqoopRegression, name=mysql --url 
jdbc:mysql://sqoop-regression-1.openstacklocal/sqoopTest --table 
sqoopRegression, description=, source=sqoopRegression, storeUse=TABLE}}, 
qualifiedName=sqoop import --connect 
jdbc:mysql://sqoop-regression-1.openstacklocal/sqoopTest --table 
sqoopRegression --hive-import --hive-database default --hive-table 
sqoopregression --hive-cluster Sqoop_Regression, name=sqoop import --connect 
jdbc:mysql://sqoop-regression-1.openstacklocal/sqoopTest --table 
sqoopRegression --hive-import --hive-database default --hive-table 
sqoopregression --hive-cluster Sqoop_Regression, 
startTime=2017-02-27T08:23:23.538Z, endTime=2017-02-27T08:24:07.007Z, 
userName=hdfs, operation=import}} (GraphHelper:948)

2017-02-27 08:24:12,086 INFO  - [NotificationHookConsumer thread-0:] ~ 
ITypedReferenceable instance {
        id : (type: sqoop_process, id: <unassigned>)
        operation :     import
        commandlineOpts :       {accumulo.batch.size=10240000, 
accumulo.create.table=false, accumulo.max.latency=5000, 
codegen.auto.compile.dir=true, 
codegen.compile.dir=/tmp/sqoop-hdfs/compile/ad793204f04ae1977f4abd2fc0715113, 
codegen.input.delimiters.enclose=0, 
codegen.input.delimiters.enclose.required=false, 
codegen.input.delimiters.escape=0, codegen.input.delimiters.field=0, 
codegen.input.delimiters.record=0, codegen.output.delimiters.enclose=0, 
codegen.output.delimiters.enclose.required=false, 
codegen.output.delimiters.escape=0, codegen.output.delimiters.field=1, 
codegen.output.delimiters.record=10, codegen.output.dir=., 
customtool.options.jsonmap={}, db.batch=false, db.clear.staging.table=false, 
db.connect.string=jdbc:mysql://sqoop-regression-1.openstacklocal/sqoopTest, 
db.require.password=true, db.table=sqoopRegression, db.username=sqoop, 
direct.import=false, enable.compression=false, export.new.update=UpdateOnly, 
hbase.bulk.load.enabled=false, hbase.create.table=false, 
hcatalog.create.table=false, hcatalog.drop.and.create.table=false, 
hdfs.append.dir=false, hdfs.delete-target.dir=false, hdfs.file.format=TextFile, 
hdfs.warehouse.dir=/apps/hive/warehouse/, hive.compute.stats.table=false, 
hive.drop.delims=false, hive.fail.table.exists=false, hive.import=true, 
hive.overwrite.table=false, import.direct.split.size=0, import.fetch.size=1000, 
import.max.inline.lob.size=16777216, incremental.mode=None, 
jdbc.driver.class=com.mysql.jdbc.Driver, mainframe.input.dataset.type=p, 
mapreduce.num.mappers=1, relaxed.isolation=false, reset.onemapper=false, 
skip.dist.cache=false, split.limit=null, temporary.dirRoot=_sqoop, 
verbose=false}
        startTime :     2017-02-27T08:23:23.538Z
        endTime :       2017-02-27T08:24:07.007Z
        userName :      hdfs
        inputs :        [{
        id : (type: sqoop_dbdatastore, id: <unassigned>)
        dbStoreType :   mysql
        storeUse :      TABLE
        storeUri :      jdbc:mysql://sqoop-regression-1.openstacklocal/sqoopTest
        source :        sqoopRegression
        qualifiedName :         mysql --url 
jdbc:mysql://sqoop-regression-1.openstacklocal/sqoopTest --table sqoopRegression
        name :  mysql --url 
jdbc:mysql://sqoop-regression-1.openstacklocal/sqoopTest --table sqoopRegression
        description :   
        owner :         hdfs
}]
        outputs :       [{
        id : (type: hive_table, id: <unassigned>)
        db : (type: hive_db, id: <unassigned>)
        createTime :    <null>
        lastAccessTime :        <null>
        comment :       <null>
        retention :     0
        sd : <null>

        partitionKeys :         <null>
        aliases :       <null>
        columns :       <null>
        parameters :    <null>
        viewOriginalText :      <null>
        viewExpandedText :      <null>
        tableType :     <null>
        temporary :     false
        qualifiedName :         default.sqoopregression@Sqoop_Regression
        name :  sqoopregression
        description :   <null>
        owner :         <null>
}]
        qualifiedName :         sqoop import --connect 
jdbc:mysql://sqoop-regression-1.openstacklocal/sqoopTest --table 
sqoopRegression --hive-import --hive-database default --hive-table 
sqoopregression --hive-cluster Sqoop_Regression
        name :  sqoop import --connect 
jdbc:mysql://sqoop-regression-1.openstacklocal/sqoopTest --table 
sqoopRegression --hive-import --hive-database default --hive-table 
sqoopregression --hive-cluster Sqoop_Regression
        description :   <null>
        owner :         <null>
} (GraphHelper:950)
{code}

The function getTypedReferenceableInstance converts values in attributes 
"inputs" and "outputs" of a "sqoop_process" into a list of Referenceables. For 
this reason, the 'sqoop_process' registration succeeds.

One way to handle this on the server side is to route the 'sqoop_process' 
creation/updation request using the Old Entity Store

> sqoop metadata ingest fails with 
> "org.apache.atlas.exception.AtlasBaseException: expected type List or Set; 
> found org.apache.atlas.typesystem.Referenceable"
> ------------------------------------------------------------------------------------------------------------------------------------------------------------
>
>                 Key: ATLAS-1572
>                 URL: https://issues.apache.org/jira/browse/ATLAS-1572
>             Project: Atlas
>          Issue Type: Bug
>          Components: atlas-intg
>    Affects Versions: trunk, 0.8-incubating
>            Reporter: Ayub Khan
>            Assignee: Vimal Sharma
>            Priority: Blocker
>             Fix For: trunk, 0.8-incubating
>
>         Attachments: ATLAS-1572.patch
>
>
> Sqoop import succeeds but the metadata ingest fails with below error
> {noformat}
> 2017-02-20 11:20:43,179 WARN  - [NotificationHookConsumer thread-0:] ~ Max 
> retries exceeded for message [{Id='(type: sqoop_dbdatastore, id: 
> <unassigned>)', traits=[], values={owner=hrt_qa, 
> storeUri=jdbc:mysql://ctr-e129-1487033772569-11862-01-000002.hwx.site/sqoopTests?zeroDateTimeBehavior=convertToNull,
>  dbStoreType=mysql, qualifiedName=mysql --url 
> jdbc:mysql://ctr-e129-1487033772569-11862-01-000002.hwx.site/sqoopTests?zeroDateTimeBehavior=convertToNull
>  --table createSqoopTablejmnwwf8rji, name=mysql --url 
> jdbc:mysql://ctr-e129-1487033772569-11862-01-000002.hwx.site/sqoopTests?zeroDateTimeBehavior=convertToNull
>  --table createSqoopTablejmnwwf8rji, description=, 
> source=createSqoopTablejmnwwf8rji, storeUse=TABLE}}, {Id='(type: hive_db, id: 
> <unassigned>)', traits=[], values={name=default, qualifiedName=default@cl1, 
> clusterName=cl1}}, {Id='(type: hive_table, id: <unassigned>)', traits=[], 
> values={name=createsqooptablejmnwwf8rji, 
> qualifiedName=default.createsqooptablejmnwwf8rji@cl1, db={Id='(type: hive_db, 
> id: <unassigned>)', traits=[], values={name=default, 
> qualifiedName=default@cl1, clusterName=cl1}}}}, {Id='(type: sqoop_process, 
> id: <unassigned>)', traits=[], values={outputs={Id='(type: hive_table, id: 
> <unassigned>)', traits=[], values={name=createsqooptablejmnwwf8rji, 
> qualifiedName=default.createsqooptablejmnwwf8rji@cl1, db={Id='(type: hive_db, 
> id: <unassigned>)', traits=[], values={name=default, 
> qualifiedName=default@cl1, clusterName=cl1}}}}, 
> commandlineOpts={db.clear.staging.table=false, hive.import=true, 
> db.require.password=true, codegen.output.delimiters.enclose=0, 
> codegen.input.delimiters.field=0, customtool.options.jsonmap={}, 
> hive.compute.stats.table=false, 
> db.connect.string=jdbc:mysql://ctr-e129-1487033772569-11862-01-000002.hwx.site/sqoopTests?zeroDateTimeBehavior=convertToNull,
>  incremental.mode=None, db.table=createSqoopTablejmnwwf8rji, verbose=false, 
> codegen.output.delimiters.enclose.required=false, mapreduce.num.mappers=4, 
> hdfs.append.dir=false, direct.import=false, hive.drop.delims=false, 
> hive.overwrite.table=false, hbase.bulk.load.enabled=false, 
> hive.fail.table.exists=false, relaxed.isolation=false, 
> hdfs.delete-target.dir=false, split.limit=null, db.username=sqoop, 
> codegen.input.delimiters.enclose.required=false, codegen.output.dir=., 
> import.direct.split.size=0, reset.onemapper=false, 
> codegen.output.delimiters.record=10, temporary.dirRoot=_sqoop, 
> hcatalog.create.table=false, db.batch=false, import.fetch.size=-2147483648, 
> accumulo.max.latency=5000, hdfs.file.format=TextFile, 
> codegen.output.delimiters.field=1, mainframe.input.dataset.type=p, 
> codegen.output.delimiters.escape=0, hcatalog.drop.and.create.table=false, 
> import.max.inline.lob.size=16777216, hbase.create.table=false, 
> codegen.auto.compile.dir=true, 
> codegen.compile.dir=/tmp/sqoop-hrt_qa/compile/f7c5181710812973a20a3e4776c5a6ef,
>  codegen.input.delimiters.enclose=0, export.new.update=UpdateOnly, 
> enable.compression=false, skip.dist.cache=false, 
> accumulo.batch.size=10240000, hdfs.warehouse.dir=/user/hrt_qa, 
> codegen.input.delimiters.record=0, codegen.input.delimiters.escape=0, 
> accumulo.create.table=false}, inputs={Id='(type: sqoop_dbdatastore, id: 
> <unassigned>)', traits=[], values={owner=hrt_qa, 
> storeUri=jdbc:mysql://ctr-e129-1487033772569-11862-01-000002.hwx.site/sqoopTests?zeroDateTimeBehavior=convertToNull,
>  dbStoreType=mysql, qualifiedName=mysql --url 
> jdbc:mysql://ctr-e129-1487033772569-11862-01-000002.hwx.site/sqoopTests?zeroDateTimeBehavior=convertToNull
>  --table createSqoopTablejmnwwf8rji, name=mysql --url 
> jdbc:mysql://ctr-e129-1487033772569-11862-01-000002.hwx.site/sqoopTests?zeroDateTimeBehavior=convertToNull
>  --table createSqoopTablejmnwwf8rji, description=, 
> source=createSqoopTablejmnwwf8rji, storeUse=TABLE}}, qualifiedName=sqoop 
> import --connect 
> jdbc:mysql://ctr-e129-1487033772569-11862-01-000002.hwx.site/sqoopTests?zeroDateTimeBehavior=convertToNull
>  --table createSqoopTablejmnwwf8rji --hive-import --hive-database default 
> --hive-table createsqooptablejmnwwf8rji --hive-cluster cl1, name=sqoop import 
> --connect 
> jdbc:mysql://ctr-e129-1487033772569-11862-01-000002.hwx.site/sqoopTests?zeroDateTimeBehavior=convertToNull
>  --table createSqoopTablejmnwwf8rji --hive-import --hive-database default 
> --hive-table createsqooptablejmnwwf8rji --hive-cluster cl1, 
> startTime=2017-02-20T11:20:04.901Z, endTime=2017-02-20T11:20:40.055Z, 
> userName=hrt_qa, operation=import}}] (NotificationHookConsumer:324)
> org.apache.atlas.exception.AtlasBaseException: expected type List or Set; 
> found org.apache.atlas.typesystem.Referenceable
>         at 
> org.apache.atlas.repository.converters.AtlasArrayFormatConverter.fromV1ToV2(AtlasArrayFormatConverter.java:50)
>         at 
> org.apache.atlas.repository.converters.AtlasArrayFormatConverter.fromV1ToV2(AtlasArrayFormatConverter.java:34)
>         at 
> org.apache.atlas.repository.converters.AtlasStructFormatConverter.fromV1ToV2(AtlasStructFormatConverter.java:169)
>         at 
> org.apache.atlas.repository.converters.AtlasEntityFormatConverter.fromV1ToV2(AtlasEntityFormatConverter.java:74)
>         at 
> org.apache.atlas.repository.converters.AtlasInstanceConverter.fromV1toV2Entity(AtlasInstanceConverter.java:222)
>         at 
> org.apache.atlas.repository.converters.AtlasInstanceConverter.toAtlasEntities(AtlasInstanceConverter.java:204)
>         at 
> org.apache.atlas.notification.NotificationHookConsumer$HookConsumer.handleMessage(NotificationHookConsumer.java:256)
>         at 
> org.apache.atlas.notification.NotificationHookConsumer$HookConsumer.run(NotificationHookConsumer.java:230)
>         at 
> java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
>         at java.util.concurrent.FutureTask.run(FutureTask.java:266)
>         at 
> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
>         at 
> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
>         at java.lang.Thread.run(Thread.java:745)
> 2017-02-20 11:21:06,094 ERROR - [NotificationHookConsumer thread-0:] ~ 
> [DROPPED_NOTIFICATION] 
> {"version":{"version":"1.0.0"},"message":{"entities":[{"jsonClass":"org.apache.atlas.typesystem.json.InstanceSerialization$_Reference","id":{"jsonClass":"org.apache.atlas.typesystem.json.InstanceSerialization$_Id","id":"-16210205233888062","version":0,"typeName":"sqoop_dbdatastore","state":"ACTIVE"},"typeName":"sqoop_dbdatastore","values":{"name":"mysql
>  --url 
> jdbc:mysql://ctr-e129-1487033772569-11862-01-000002.hwx.site/sqoopTests?zeroDateTimeBehavior\u003dconvertToNull
>  --table 
> createSqoopTablejmnwwf8rji","source":"createSqoopTablejmnwwf8rji","storeUse":"TABLE","description":"","storeUri":"jdbc:mysql://ctr-e129-1487033772569-11862-01-000002.hwx.site/sqoopTests?zeroDateTimeBehavior\u003dconvertToNull","qualifiedName":"mysql
>  --url 
> jdbc:mysql://ctr-e129-1487033772569-11862-01-000002.hwx.site/sqoopTests?zeroDateTimeBehavior\u003dconvertToNull
>  --table 
> createSqoopTablejmnwwf8rji","owner":"hrt_qa","dbStoreType":"mysql"},"traitNames":[],"traits":{},"systemAttributes":{}},{"jsonClass":"org.apache.atlas.typesystem.json.InstanceSerialization$_Reference","id":{"jsonClass":"org.apache.atlas.typesystem.json.InstanceSerialization$_Id","id":"-16210205233888061","version":0,"typeName":"hive_db","state":"ACTIVE"},"typeName":"hive_db","values":{"qualifiedName":"default@cl1","clusterName":"cl1","name":"default"},"traitNames":[],"traits":{},"systemAttributes":{}},{"jsonClass":"org.apache.atlas.typesystem.json.InstanceSerialization$_Reference","id":{"jsonClass":"org.apache.atlas.typesystem.json.InstanceSerialization$_Id","id":"-16210205233888060","version":0,"typeName":"hive_table","state":"ACTIVE"},"typeName":"hive_table","values":{"qualifiedName":"default.createsqooptablejmnwwf8rji@cl1","db":{"jsonClass":"org.apache.atlas.typesystem.json.InstanceSerialization$_Reference","id":{"jsonClass":"org.apache.atlas.typesystem.json.InstanceSerialization$_Id","id":"-16210205233888061","version":0,"typeName":"hive_db","state":"ACTIVE"},"typeName":"hive_db","values":{"qualifiedName":"default@cl1","clusterName":"cl1","name":"default"},"traitNames":[],"traits":{},"systemAttributes":{}},"name":"createsqooptablejmnwwf8rji"},"traitNames":[],"traits":{},"systemAttributes":{}},{"jsonClass":"org.apache.atlas.typesystem.json.InstanceSerialization$_Reference","id":{"jsonClass":"org.apache.atlas.typesystem.json.InstanceSerialization$_Id","id":"-16210205233888059","version":0,"typeName":"sqoop_process","state":"ACTIVE"},"typeName":"sqoop_process","values":{"name":"sqoop
>  import --connect 
> jdbc:mysql://ctr-e129-1487033772569-11862-01-000002.hwx.site/sqoopTests?zeroDateTimeBehavior\u003dconvertToNull
>  --table createSqoopTablejmnwwf8rji --hive-import --hive-database default 
> --hive-table createsqooptablejmnwwf8rji --hive-cluster 
> cl1","startTime":"2017-02-20T11:20:04.901Z","outputs":{"jsonClass":"org.apache.atlas.typesystem.json.InstanceSerialization$_Reference","id":{"jsonClass":"org.apache.atlas.typesystem.json.InstanceSerialization$_Id","id":"-16210205233888060","version":0,"typeName":"hive_table","state":"ACTIVE"},"typeName":"hive_table","values":{"qualifiedName":"default.createsqooptablejmnwwf8rji@cl1","db":{"jsonClass":"org.apache.atlas.typesystem.json.InstanceSerialization$_Reference","id":{"jsonClass":"org.apache.atlas.typesystem.json.InstanceSerialization$_Id","id":"-16210205233888061","version":0,"typeName":"hive_db","state":"ACTIVE"},"typeName":"hive_db","values":{"qualifiedName":"default@cl1","clusterName":"cl1","name":"default"},"traitNames":[],"traits":{},"systemAttributes":{}},"name":"createsqooptablejmnwwf8rji"},"traitNames":[],"traits":{},"systemAttributes":{}},"commandlineOpts":{"db.clear.staging.table":"false","hive.import":"true","db.require.password":"true","codegen.output.delimiters.enclose":"0","codegen.input.delimiters.field":"0","customtool.options.jsonmap":"{}","hive.compute.stats.table":"false","db.connect.string":"jdbc:mysql://ctr-e129-1487033772569-11862-01-000002.hwx.site/sqoopTests?zeroDateTimeBehavior\u003dconvertToNull","incremental.mode":"None","db.table":"createSqoopTablejmnwwf8rji","verbose":"false","codegen.output.delimiters.enclose.required":"false","mapreduce.num.mappers":"4","hdfs.append.dir":"false","direct.import":"false","hive.drop.delims":"false","hive.overwrite.table":"false","hbase.bulk.load.enabled":"false","hive.fail.table.exists":"false","relaxed.isolation":"false","hdfs.delete-target.dir":"false","split.limit":"null","db.username":"sqoop","codegen.input.delimiters.enclose.required":"false","codegen.output.dir":".","import.direct.split.size":"0","reset.onemapper":"false","codegen.output.delimiters.record":"10","temporary.dirRoot":"_sqoop","hcatalog.create.table":"false","db.batch":"false","import.fetch.size":"-2147483648","accumulo.max.latency":"5000","hdfs.file.format":"TextFile","codegen.output.delimiters.field":"1","mainframe.input.dataset.type":"p","codegen.output.delimiters.escape":"0","hcatalog.drop.and.create.table":"false","import.max.inline.lob.size":"16777216","hbase.create.table":"false","codegen.auto.compile.dir":"true","codegen.compile.dir":"/tmp/sqoop-hrt_qa/compile/f7c5181710812973a20a3e4776c5a6ef","codegen.input.delimiters.enclose":"0","export.new.update":"UpdateOnly","enable.compression":"false","skip.dist.cache":"false","accumulo.batch.size":"10240000","hdfs.warehouse.dir":"/user/hrt_qa","codegen.input.delimiters.record":"0","codegen.input.delimiters.escape":"0","accumulo.create.table":"false"},"endTime":"2017-02-20T11:20:40.055Z","inputs":{"jsonClass":"org.apache.atlas.typesystem.json.InstanceSerialization$_Reference","id":{"jsonClass":"org.apache.atlas.typesystem.json.InstanceSerialization$_Id","id":"-16210205233888062","version":0,"typeName":"sqoop_dbdatastore","state":"ACTIVE"},"typeName":"sqoop_dbdatastore","values":{"name":"mysql
>  --url 
> jdbc:mysql://ctr-e129-1487033772569-11862-01-000002.hwx.site/sqoopTests?zeroDateTimeBehavior\u003dconvertToNull
>  --table 
> createSqoopTablejmnwwf8rji","source":"createSqoopTablejmnwwf8rji","storeUse":"TABLE","description":"","storeUri":"jdbc:mysql://ctr-e129-1487033772569-11862-01-000002.hwx.site/sqoopTests?zeroDateTimeBehavior\u003dconvertToNull","qualifiedName":"mysql
>  --url 
> jdbc:mysql://ctr-e129-1487033772569-11862-01-000002.hwx.site/sqoopTests?zeroDateTimeBehavior\u003dconvertToNull
>  --table 
> createSqoopTablejmnwwf8rji","owner":"hrt_qa","dbStoreType":"mysql"},"traitNames":[],"traits":{},"systemAttributes":{}},"operation":"import","qualifiedName":"sqoop
>  import --connect 
> jdbc:mysql://ctr-e129-1487033772569-11862-01-000002.hwx.site/sqoopTests?zeroDateTimeBehavior\u003dconvertToNull
>  --table createSqoopTablejmnwwf8rji --hive-import --hive-database default 
> --hive-table createsqooptablejmnwwf8rji --hive-cluster 
> cl1","userName":"hrt_qa"},"traitNames":[],"traits":{},"systemAttributes":{}}],"type":"ENTITY_CREATE","user":"hrt_qa"}}
>  (FAILED:339)
> {noformat}
> Steps to repro:
> Run the below sqoop import command
> {noformat}
>  /usr/hdp/current/sqoop-client/bin/sqoop  import  --connect 
> jdbc:mysql://ctr-e129-1487033772569-11862-01-000002.hwx.site/sqoopTests 
> --username sqoop --password sqoop --hive-import  --warehouse-dir /user/hrt_qa 
> --table createSqoopTablejmnwwf8rji
> {noformat}



--
This message was sent by Atlassian JIRA
(v6.3.15#6346)

Reply via email to