[ 
https://issues.apache.org/jira/browse/SPARK-20712?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=16502740#comment-16502740
 ] 

niuhuawei commented on SPARK-20712:
-----------------------------------

I tried to fix the problem. 

First, I attempted to reproduce it, and I got it.

Just run ./bin/pyspark under the root of SPARK_HOME, then type the flowing words

"
>>> spark.range(10).selectExpr(*(map(lambda x:  "id as 
>>> very_long_column_name_id" + str(x), range(200)))).selectExpr("struct(*) as 
>>> nested").write.saveAsTable("test")
"

so, it appears again 

"
NestedThrowablesStackTrace: java.sql.SQLDataException: A truncation error was 
encountered trying to shrink VARCHAR 
'struct<very_long_column_name_id0:bigint,very_long_column_nam&' to length 4000. 
at org.apache.derby.impl.jdbc.SQLExceptionFactory.getSQLException(Unknown 
Source) at org.apache.derby.impl.jdbc.Util.generateCsSQLException(Unknown 
Source) at 
org.apache.derby.impl.jdbc.TransactionResourceImpl.wrapInSQLException(Unknown 
Source) at 
org.apache.derby.impl.jdbc.TransactionResourceImpl.handleException(Unknown 
Source) at org.apache.derby.impl.jdbc.EmbedConnection.handleException(Unknown 
Source) at org.apache.derby.impl.jdbc.ConnectionChild.handleException(Unknown 
Source) at org.apache.derby.impl.jdbc.EmbedStatement.executeStatement(Unknown 
Source) at 
org.apache.derby.impl.jdbc.EmbedPreparedStatement.executeStatement(Unknown 
Source) at 
org.apache.derby.impl.jdbc.EmbedPreparedStatement.executeLargeUpdate(Unknown 
Source) at 
org.apache.derby.impl.jdbc.EmbedPreparedStatement.executeUpdate(Unknown Source) 
at 
com.jolbox.bonecp.PreparedStatementHandle.executeUpdate(PreparedStatementHandle.java:205)
 at 
org.datanucleus.store.rdbms.ParamLoggingPreparedStatement.executeUpdate(ParamLoggingPreparedStatement.java:399)
 at 
org.datanucleus.store.rdbms.SQLController.executeStatementUpdate(SQLController.java:439)
 at 
org.datanucleus.store.rdbms.scostore.JoinListStore.internalAdd(JoinListStore.java:304)
 at 
org.datanucleus.store.rdbms.scostore.AbstractListStore.addAll(AbstractListStore.java:136)
 at 
org.datanucleus.store.rdbms.mapping.java.CollectionMapping.postInsert(CollectionMapping.java:136)
 at 
org.datanucleus.store.rdbms.request.InsertRequest.execute(InsertRequest.java:519)
 at 
org.datanucleus.store.rdbms.RDBMSPersistenceHandler.insertTable(RDBMSPersistenceHandler.java:167)
 at 
org.datanucleus.store.rdbms.RDBMSPersistenceHandler.insertObject(RDBMSPersistenceHandler.java:143)
 at 
org.datanucleus.state.JDOStateManager.internalMakePersistent(JDOStateManager.java:3784)
 at 
org.datanucleus.state.JDOStateManager.makePersistent(JDOStateManager.java:3760) 
at 
org.datanucleus.ExecutionContextImpl.persistObjectInternal(ExecutionContextImpl.java:2219)
 at 
org.datanucleus.ExecutionContextImpl.persistObjectInternal(ExecutionContextImpl.java:2314)
 at 
org.datanucleus.store.rdbms.mapping.java.PersistableMapping.setObjectAsValue(PersistableMapping.java:567)
 at 
org.datanucleus.store.rdbms.mapping.java.PersistableMapping.setObject(PersistableMapping.java:326)
 at 
org.datanucleus.store.rdbms.fieldmanager.ParameterSetter.storeObjectField(ParameterSetter.java:193)
 at 
org.datanucleus.state.JDOStateManager.providedObjectField(JDOStateManager.java:1269)
 at 
org.apache.hadoop.hive.metastore.model.MStorageDescriptor.jdoProvideField(MStorageDescriptor.java)
 at 
org.apache.hadoop.hive.metastore.model.MStorageDescriptor.jdoProvideFields(MStorageDescriptor.java)
 at 
org.datanucleus.state.JDOStateManager.provideFields(JDOStateManager.java:1346) 
at 
org.datanucleus.store.rdbms.request.InsertRequest.execute(InsertRequest.java:289)
 at 
org.datanucleus.store.rdbms.RDBMSPersistenceHandler.insertTable(RDBMSPersistenceHandler.java:167)
 at 
org.datanucleus.store.rdbms.RDBMSPersistenceHandler.insertObject(RDBMSPersistenceHandler.java:143)
 at 
org.datanucleus.state.JDOStateManager.internalMakePersistent(JDOStateManager.java:3784)
 at 
org.datanucleus.state.JDOStateManager.makePersistent(JDOStateManager.java:3760) 
at 
org.datanucleus.ExecutionContextImpl.persistObjectInternal(ExecutionContextImpl.java:2219)
 at 
org.datanucleus.ExecutionContextImpl.persistObjectInternal(ExecutionContextImpl.java:2314)
 at 
org.datanucleus.store.rdbms.mapping.java.PersistableMapping.setObjectAsValue(PersistableMapping.java:567)
 at 
org.datanucleus.store.rdbms.mapping.java.PersistableMapping.setObject(PersistableMapping.java:326)
 at 
org.datanucleus.store.rdbms.fieldmanager.ParameterSetter.storeObjectField(ParameterSetter.java:193)
 at 
org.datanucleus.state.JDOStateManager.providedObjectField(JDOStateManager.java:1269)
 at org.apache.hadoop.hive.metastore.model.MTable.jdoProvideField(MTable.java) 
at org.apache.hadoop.hive.metastore.model.MTable.jdoProvideFields(MTable.java) 
at 
org.datanucleus.state.JDOStateManager.provideFields(JDOStateManager.java:1346) 
at 
org.datanucleus.store.rdbms.request.InsertRequest.execute(InsertRequest.java:289)
 at 
org.datanucleus.store.rdbms.RDBMSPersistenceHandler.insertTable(RDBMSPersistenceHandler.java:167)
 at 
org.datanucleus.store.rdbms.RDBMSPersistenceHandler.insertObject(RDBMSPersistenceHandler.java:143)
 at 
org.datanucleus.state.JDOStateManager.internalMakePersistent(JDOStateManager.java:3784)
 at 
org.datanucleus.state.JDOStateManager.makePersistent(JDOStateManager.java:3760) 
at 
org.datanucleus.ExecutionContextImpl.persistObjectInternal(ExecutionContextImpl.java:2219)
 at 
org.datanucleus.ExecutionContextImpl.persistObjectWork(ExecutionContextImpl.java:2065)
 at 
org.datanucleus.ExecutionContextImpl.persistObject(ExecutionContextImpl.java:1913)
 at 
org.datanucleus.ExecutionContextThreadedImpl.persistObject(ExecutionContextThreadedImpl.java:217)
 at 
org.datanucleus.api.jdo.JDOPersistenceManager.jdoMakePersistent(JDOPersistenceManager.java:727)
 at 
org.datanucleus.api.jdo.JDOPersistenceManager.makePersistent(JDOPersistenceManager.java:752)
 at 
org.apache.hadoop.hive.metastore.ObjectStore.createTable(ObjectStore.java:814) 
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at 
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) 
at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
 at java.lang.reflect.Method.invoke(Method.java:497) at 
org.apache.hadoop.hive.metastore.RawStoreProxy.invoke(RawStoreProxy.java:114) 
at com.sun.proxy.$Proxy12.createTable(Unknown Source) at 
org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.create_table_core(HiveMetaStore.java:1416)
 at 
org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.create_table_with_environment_context(HiveMetaStore.java:1449)
 at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at 
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) 
at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
 at java.lang.reflect.Method.invoke(Method.java:497) at 
org.apache.hadoop.hive.metastore.RetryingHMSHandler.invoke(RetryingHMSHandler.java:107)
 at com.sun.proxy.$Proxy14.create_table_with_environment_context(Unknown 
Source) at 
org.apache.hadoop.hive.metastore.HiveMetaStoreClient.create_table_with_environment_context(HiveMetaStoreClient.java:2050)
 at 
org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.create_table_with_environment_context(SessionHiveMetaStoreClient.java:97)
 at 
org.apache.hadoop.hive.metastore.HiveMetaStoreClient.createTable(HiveMetaStoreClient.java:669)
 at 
org.apache.hadoop.hive.metastore.HiveMetaStoreClient.createTable(HiveMetaStoreClient.java:657)
 at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at 
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) 
at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
 at java.lang.reflect.Method.invoke(Method.java:497) at 
org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:156)
 at com.sun.proxy.$Proxy15.createTable(Unknown Source) at 
org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:714) at 
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$createTable$1.apply$mcV$sp(HiveClientImpl.scala:404)
 at 
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$createTable$1.apply(HiveClientImpl.scala:404)
 at 
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$createTable$1.apply(HiveClientImpl.scala:404)
 at 
org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$withHiveState$1.apply(HiveClientImpl.scala:262)
 at 
org.apache.spark.sql.hive.client.HiveClientImpl.liftedTree1$1(HiveClientImpl.scala:209)
 at 
org.apache.spark.sql.hive.client.HiveClientImpl.retryLocked(HiveClientImpl.scala:208)
 at 
org.apache.spark.sql.hive.client.HiveClientImpl.withHiveState(HiveClientImpl.scala:251)
 at 
org.apache.spark.sql.hive.client.HiveClientImpl.createTable(HiveClientImpl.scala:403)
 at 
org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$createTable$1.apply$mcV$sp(HiveExternalCatalog.scala:188)
 at 
org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$createTable$1.apply(HiveExternalCatalog.scala:152)
 at 
org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$createTable$1.apply(HiveExternalCatalog.scala:152)
 at 
org.apache.spark.sql.hive.HiveExternalCatalog.withClient(HiveExternalCatalog.scala:72)
 at 
org.apache.spark.sql.hive.HiveExternalCatalog.createTable(HiveExternalCatalog.scala:152)
 at 
org.apache.spark.sql.catalyst.catalog.SessionCatalog.createTable(SessionCatalog.scala:226)
 at 
org.apache.spark.sql.execution.command.CreateDataSourceTableUtils$.createDataSourceTable(createDataSourceTables.scala:487)
 at 
org.apache.spark.sql.execution.command.CreateDataSourceTableAsSelectCommand.run(createDataSourceTables.scala:256)
 at 
org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:60)
 at 
org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:58)
 at 
org.apache.spark.sql.execution.command.ExecutedCommandExec.doExecute(commands.scala:74)
 at 
org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:115)
 at 
org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:115)
 at 
org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:136)
 at 
org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) 
at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:133) 
at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:114) at 
org.apache.spark.sql.execution.QueryExecution.toRdd$lzycompute(QueryExecution.scala:86)
 at 
org.apache.spark.sql.execution.QueryExecution.toRdd(QueryExecution.scala:86) at 
org.apache.spark.sql.DataFrameWriter.saveAsTable(DataFrameWriter.scala:378) at 
org.apache.spark.sql.DataFrameWriter.saveAsTable(DataFrameWriter.scala:354) at 
sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at 
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) 
at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
 at java.lang.reflect.Method.invoke(Method.java:497) at 
py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:237) at 
py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357) at 
py4j.Gateway.invoke(Gateway.java:280) at 
py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:128) at 
py4j.commands.CallCommand.execute(CallCommand.java:79) at 
py4j.GatewayConnection.run(GatewayConnection.java:211) at 
java.lang.Thread.run(Thread.java:745) Caused by: ERROR 22001: A truncation 
error was encountered trying to shrink VARCHAR 
'struct<very_long_column_name_id0:bigint,very_long_column_nam&' to length 4000. 
at org.apache.derby.iapi.error.StandardException.newException(Unknown Source) 
at org.apache.derby.iapi.error.StandardException.newException(Unknown Source) 
at org.apache.derby.iapi.types.SQLChar.hasNonBlankChars(Unknown Source) at 
org.apache.derby.iapi.types.SQLVarchar.normalize(Unknown Source) at 
org.apache.derby.iapi.types.SQLVarchar.normalize(Unknown Source) at 
org.apache.derby.iapi.types.DataTypeDescriptor.normalize(Unknown Source) at 
org.apache.derby.impl.sql.execute.NormalizeResultSet.normalizeColumn(Unknown 
Source) at 
org.apache.derby.impl.sql.execute.NormalizeResultSet.normalizeRow(Unknown 
Source) at 
org.apache.derby.impl.sql.execute.NormalizeResultSet.getNextRowCore(Unknown 
Source) at 
org.apache.derby.impl.sql.execute.DMLWriteResultSet.getNextRowCore(Unknown 
Source) at 
org.apache.derby.impl.sql.execute.InsertResultSet.getNextRowCore(Unknown 
Source) at org.apache.derby.impl.sql.execute.InsertResultSet.open(Unknown 
Source) at 
org.apache.derby.impl.sql.GenericPreparedStatement.executeStmt(Unknown Source) 
at org.apache.derby.impl.sql.GenericPreparedStatement.execute(Unknown Source) 
... 115 more

 

> [SPARK 2.1 REGRESSION][SQL] Spark can't read Hive table when column type has 
> length greater than 4000 bytes
> -----------------------------------------------------------------------------------------------------------
>
>                 Key: SPARK-20712
>                 URL: https://issues.apache.org/jira/browse/SPARK-20712
>             Project: Spark
>          Issue Type: Bug
>          Components: SQL
>    Affects Versions: 2.1.1, 2.1.2, 2.2.0, 2.3.0
>            Reporter: Maciej Bryński
>            Priority: Critical
>
> Hi,
> I have following issue.
> I'm trying to read a table from hive when one of the column is nested so it's 
> schema has length longer than 4000 bytes.
> Everything worked on Spark 2.0.2. On 2.1.1 I'm getting Exception:
> {code}
> >> spark.read.table("SOME_TABLE")
> Traceback (most recent call last):
>   File "<stdin>", line 1, in <module>
>   File "/opt/spark-2.1.1/python/pyspark/sql/readwriter.py", line 259, in table
>     return self._df(self._jreader.table(tableName))
>   File 
> "/opt/spark-2.1.1/python/lib/py4j-0.10.4-src.zip/py4j/java_gateway.py", line 
> 1133, in __call__
>   File "/opt/spark-2.1.1/python/pyspark/sql/utils.py", line 63, in deco
>     return f(*a, **kw)
>   File "/opt/spark-2.1.1/python/lib/py4j-0.10.4-src.zip/py4j/protocol.py", 
> line 319, in get_return_value
> py4j.protocol.Py4JJavaError: An error occurred while calling o71.table.
> : org.apache.spark.SparkException: Cannot recognize hive type string: 
> SOME_VERY_LONG_FIELD_TYPE
>         at 
> org.apache.spark.sql.hive.client.HiveClientImpl.org$apache$spark$sql$hive$client$HiveClientImpl$$fromHiveColumn(HiveClientImpl.scala:789)
>         at 
> org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$getTableOption$1$$anonfun$apply$11$$anonfun$7.apply(HiveClientImpl.scala:365)
>         at 
> org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$getTableOption$1$$anonfun$apply$11$$anonfun$7.apply(HiveClientImpl.scala:365)
>         at 
> scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
>         at 
> scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
>         at scala.collection.Iterator$class.foreach(Iterator.scala:893)
>         at scala.collection.AbstractIterator.foreach(Iterator.scala:1336)
>         at scala.collection.IterableLike$class.foreach(IterableLike.scala:72)
>         at scala.collection.AbstractIterable.foreach(Iterable.scala:54)
>         at 
> scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
>         at scala.collection.AbstractTraversable.map(Traversable.scala:104)
>         at 
> org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$getTableOption$1$$anonfun$apply$11.apply(HiveClientImpl.scala:365)
>         at 
> org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$getTableOption$1$$anonfun$apply$11.apply(HiveClientImpl.scala:361)
>         at scala.Option.map(Option.scala:146)
>         at 
> org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$getTableOption$1.apply(HiveClientImpl.scala:361)
>         at 
> org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$getTableOption$1.apply(HiveClientImpl.scala:359)
>         at 
> org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$withHiveState$1.apply(HiveClientImpl.scala:279)
>         at 
> org.apache.spark.sql.hive.client.HiveClientImpl.liftedTree1$1(HiveClientImpl.scala:226)
>         at 
> org.apache.spark.sql.hive.client.HiveClientImpl.retryLocked(HiveClientImpl.scala:225)
>         at 
> org.apache.spark.sql.hive.client.HiveClientImpl.withHiveState(HiveClientImpl.scala:268)
>         at 
> org.apache.spark.sql.hive.client.HiveClientImpl.getTableOption(HiveClientImpl.scala:359)
>         at 
> org.apache.spark.sql.hive.client.HiveClient$class.getTable(HiveClient.scala:74)
>         at 
> org.apache.spark.sql.hive.client.HiveClientImpl.getTable(HiveClientImpl.scala:78)
>         at 
> org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$org$apache$spark$sql$hive$HiveExternalCatalog$$getRawTable$1.apply(HiveExternalCatalog.scala:118)
>         at 
> org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$org$apache$spark$sql$hive$HiveExternalCatalog$$getRawTable$1.apply(HiveExternalCatalog.scala:118)
>         at 
> org.apache.spark.sql.hive.HiveExternalCatalog.withClient(HiveExternalCatalog.scala:97)
>         at 
> org.apache.spark.sql.hive.HiveExternalCatalog.org$apache$spark$sql$hive$HiveExternalCatalog$$getRawTable(HiveExternalCatalog.scala:117)
>         at 
> org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$getTable$1.apply(HiveExternalCatalog.scala:628)
>         at 
> org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$getTable$1.apply(HiveExternalCatalog.scala:628)
>         at 
> org.apache.spark.sql.hive.HiveExternalCatalog.withClient(HiveExternalCatalog.scala:97)
>         at 
> org.apache.spark.sql.hive.HiveExternalCatalog.getTable(HiveExternalCatalog.scala:627)
>         at 
> org.apache.spark.sql.hive.HiveMetastoreCatalog.lookupRelation(HiveMetastoreCatalog.scala:124)
>         at 
> org.apache.spark.sql.hive.HiveSessionCatalog.lookupRelation(HiveSessionCatalog.scala:70)
>         at 
> org.apache.spark.sql.DataFrameReader.table(DataFrameReader.scala:473)
>         at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>         at 
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
>         at 
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>         at java.lang.reflect.Method.invoke(Method.java:497)
>         at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244)
>         at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357)
>         at py4j.Gateway.invoke(Gateway.java:280)
>         at 
> py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)
>         at py4j.commands.CallCommand.execute(CallCommand.java:79)
>         at py4j.GatewayConnection.run(GatewayConnection.java:214)
>         at java.lang.Thread.run(Thread.java:745)
> Caused by: org.apache.spark.sql.catalyst.parser.ParseException:
> mismatched input '<EOF>' expecting ':'(line 1, pos 4000)
> {code}
> EDIT: 
> Way to reproduce this error (from pyspark)
> {code}
> >>> spark.range(10).selectExpr(*(map(lambda x:  "id as 
> >>> very_long_column_name_id" + str(x), range(200)))).selectExpr("struct(*) 
> >>> as nested").write.saveAsTable("test")
> >>> spark.read.table("test")
> Traceback (most recent call last):
>   File "<stdin>", line 1, in <module>
>   File "/opt/spark-2.1.1/python/pyspark/sql/readwriter.py", line 259, in table
>     return self._df(self._jreader.table(tableName))
>   File 
> "/opt/spark-2.1.1/python/lib/py4j-0.10.4-src.zip/py4j/java_gateway.py", line 
> 1133, in __call__
>   File "/opt/spark-2.1.1/python/pyspark/sql/utils.py", line 63, in deco
>     return f(*a, **kw)
>   File "/opt/spark-2.1.1/python/lib/py4j-0.10.4-src.zip/py4j/protocol.py", 
> line 319, in get_return_value
> py4j.protocol.Py4JJavaError: An error occurred while calling o260.table.
> : org.apache.spark.SparkException: Cannot recognize hive type 
> string:struct<very_long_column_name_id0:bigint,very_long_column_name_id1:bigint,very_long_column_name_id2:bigint,very_long_column_name_id3:bigint,very_long_column_name_id4:bigint,very_long_column_name_id5:bigint,very_long_column_name_id6:bigint,very_long_column_name_id7:bigint,very_long_column_name_id8:bigint,very_long_column_name_id9:bigint,very_long_column_name_id10:bigint,very_long_column_name_id11:bigint,very_long_column_name_id12:bigint,very_long_column_name_id13:bigint,very_long_column_name_id14:bigint,very_long_column_name_id15:bigint,very_long_column_name_id16:bigint,very_long_column_name_id17:bigint,very_long_column_name_id18:bigint,very_long_column_name_id19:bigint,very_long_column_name_id20:bigint,very_long_column_name_id21:bigint,very_long_column_name_id22:bigint,very_long_column_name_id23:bigint,very_long_column_name_id24:bigint,very_long_column_name_id25:bigint,very_long_column_name_id26:bigint,very_long_column_name_id27:bigint,very_long_column_name_id28:bigint,very_long_column_name_id29:bigint,very_long_column_name_id30:bigint,very_long_column_name_id31:bigint,very_long_column_name_id32:bigint,very_long_column_name_id33:bigint,very_long_column_name_id34:bigint,very_long_column_name_id35:bigint,very_long_column_name_id36:bigint,very_long_column_name_id37:bigint,very_long_column_name_id38:bigint,very_long_column_name_id39:bigint,very_long_column_name_id40:bigint,very_long_column_name_id41:bigint,very_long_column_name_id42:bigint,very_long_column_name_id43:bigint,very_long_column_name_id44:bigint,very_long_column_name_id45:bigint,very_long_column_name_id46:bigint,very_long_column_name_id47:bigint,very_long_column_name_id48:bigint,very_long_column_name_id49:bigint,very_long_column_name_id50:bigint,very_long_column_name_id51:bigint,very_long_column_name_id52:bigint,very_long_column_name_id53:bigint,very_long_column_name_id54:bigint,very_long_column_name_id55:bigint,very_long_column_name_id56:bigint,very_long_column_name_id57:bigint,very_long_column_name_id58:bigint,very_long_column_name_id59:bigint,very_long_column_name_id60:bigint,very_long_column_name_id61:bigint,very_long_column_name_id62:bigint,very_long_column_name_id63:bigint,very_long_column_name_id64:bigint,very_long_column_name_id65:bigint,very_long_column_name_id66:bigint,very_long_column_name_id67:bigint,very_long_column_name_id68:bigint,very_long_column_name_id69:bigint,very_long_column_name_id70:bigint,very_long_column_name_id71:bigint,very_long_column_name_id72:bigint,very_long_column_name_id73:bigint,very_long_column_name_id74:bigint,very_long_column_name_id75:bigint,very_long_column_name_id76:bigint,very_long_column_name_id77:bigint,very_long_column_name_id78:bigint,very_long_column_name_id79:bigint,very_long_column_name_id80:bigint,very_long_column_name_id81:bigint,very_long_column_name_id82:bigint,very_long_column_name_id83:bigint,very_long_column_name_id84:bigint,very_long_column_name_id85:bigint,very_long_column_name_id86:bigint,very_long_column_name_id87:bigint,very_long_column_name_id88:bigint,very_long_column_name_id89:bigint,very_long_column_name_id90:bigint,very_long_column_name_id91:bigint,very_long_column_name_id92:bigint,very_long_column_name_id93:bigint,very_long_column_name_id94:bigint,very_long_column_name_id95:bigint,very_long_column_name_id96:bigint,very_long_column_name_id97:bigint,very_long_column_name_id98:bigint,very_long_column_name_id99:bigint,very_long_column_name_id100:bigint,very_long_column_name_id101:bigint,very_long_column_name_id102:bigint,very_long_column_name_id103:bigint,very_long_column_name_id104:bigint,very_long_column_name_id105:bigint,very_long_column_name_id106:bigint,very_long_column_name_id107:bigint,very_long_column_name_id108:bigint,very_long_column_name_id109:bigint,very_long_column_name_id110:bigint,very_long_column_name_id111:bigint,very_long_column_name_id112:bigint,very_long_column_name_id113:bigint,very_long_column_name_id114:bigint,very_long_column_name_id115:bigint,very_long_column_name_id116:bigint,very_lon
>         at 
> org.apache.spark.sql.hive.client.HiveClientImpl.org$apache$spark$sql$hive$client$HiveClientImpl$$fromHiveColumn(HiveClientImpl.scala:789)
>         at 
> org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$getTableOption$1$$anonfun$apply$11$$anonfun$7.apply(HiveClientImpl.scala:365)
>         at 
> org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$getTableOption$1$$anonfun$apply$11$$anonfun$7.apply(HiveClientImpl.scala:365)
>         at 
> scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
>         at 
> scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
>         at scala.collection.Iterator$class.foreach(Iterator.scala:893)
>         at scala.collection.AbstractIterator.foreach(Iterator.scala:1336)
>         at scala.collection.IterableLike$class.foreach(IterableLike.scala:72)
>         at scala.collection.AbstractIterable.foreach(Iterable.scala:54)
>         at 
> scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
>         at scala.collection.AbstractTraversable.map(Traversable.scala:104)
>         at 
> org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$getTableOption$1$$anonfun$apply$11.apply(HiveClientImpl.scala:365)
>         at 
> org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$getTableOption$1$$anonfun$apply$11.apply(HiveClientImpl.scala:361)
>         at scala.Option.map(Option.scala:146)
>         at 
> org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$getTableOption$1.apply(HiveClientImpl.scala:361)
>         at 
> org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$getTableOption$1.apply(HiveClientImpl.scala:359)
>         at 
> org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$withHiveState$1.apply(HiveClientImpl.scala:279)
>         at 
> org.apache.spark.sql.hive.client.HiveClientImpl.liftedTree1$1(HiveClientImpl.scala:226)
>         at 
> org.apache.spark.sql.hive.client.HiveClientImpl.retryLocked(HiveClientImpl.scala:225)
>         at 
> org.apache.spark.sql.hive.client.HiveClientImpl.withHiveState(HiveClientImpl.scala:268)
>         at 
> org.apache.spark.sql.hive.client.HiveClientImpl.getTableOption(HiveClientImpl.scala:359)
>         at 
> org.apache.spark.sql.hive.client.HiveClient$class.getTable(HiveClient.scala:74)
>         at 
> org.apache.spark.sql.hive.client.HiveClientImpl.getTable(HiveClientImpl.scala:78)
>         at 
> org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$org$apache$spark$sql$hive$HiveExternalCatalog$$getRawTable$1.apply(HiveExternalCatalog.scala:118)
>         at 
> org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$org$apache$spark$sql$hive$HiveExternalCatalog$$getRawTable$1.apply(HiveExternalCatalog.scala:118)
>         at 
> org.apache.spark.sql.hive.HiveExternalCatalog.withClient(HiveExternalCatalog.scala:97)
>         at 
> org.apache.spark.sql.hive.HiveExternalCatalog.org$apache$spark$sql$hive$HiveExternalCatalog$$getRawTable(HiveExternalCatalog.scala:117)
>         at 
> org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$getTable$1.apply(HiveExternalCatalog.scala:628)
>         at 
> org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$getTable$1.apply(HiveExternalCatalog.scala:628)
>         at 
> org.apache.spark.sql.hive.HiveExternalCatalog.withClient(HiveExternalCatalog.scala:97)
>         at 
> org.apache.spark.sql.hive.HiveExternalCatalog.getTable(HiveExternalCatalog.scala:627)
>         at 
> org.apache.spark.sql.hive.HiveMetastoreCatalog.lookupRelation(HiveMetastoreCatalog.scala:124)
>         at 
> org.apache.spark.sql.hive.HiveSessionCatalog.lookupRelation(HiveSessionCatalog.scala:70)
>         at 
> org.apache.spark.sql.DataFrameReader.table(DataFrameReader.scala:473)
>         at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>         at 
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
>         at 
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>         at java.lang.reflect.Method.invoke(Method.java:497)
>         at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244)
>         at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357)
>         at py4j.Gateway.invoke(Gateway.java:280)
>         at 
> py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)
>         at py4j.commands.CallCommand.execute(CallCommand.java:79)
>         at py4j.GatewayConnection.run(GatewayConnection.java:214)
>         at java.lang.Thread.run(Thread.java:745)
> Caused by: org.apache.spark.sql.catalyst.parser.ParseException:
> mismatched input '<EOF>' expecting ':'(line 1, pos 4000)
> {code}
> From Spark 2.0.2:
> {code}
> >>> spark.read.table("test")
> DataFrame[nested: 
> struct<very_long_column_name_id0:bigint,very_long_column_name_id1:bigint,very_long_column_name_id2:bigint,very_long_column_name_id3:bigint,very_long_column_name_id4:bigint,very_long_column_name_id5:bigint,very_long_column_name_id6:bigint,very_long_column_name_id7:bigint,very_long_column_name_id8:bigint,very_long_column_name_id9:bigint,very_long_column_name_id10:bigint,very_long_column_name_id11:bigint,very_long_column_name_id12:bigint,very_long_column_name_id13:bigint,very_long_column_name_id14:bigint,very_long_column_name_id15:bigint,very_long_column_name_id16:bigint,very_long_column_name_id17:bigint,very_long_column_name_id18:bigint,very_long_column_name_id19:bigint,very_long_column_name_id20:bigint,very_long_column_name_id21:bigint,very_long_column_name_id22:bigint,very_long_column_name_id23:bigint,very_long_column_name_id24:bigint,very_long_column_name_id25:bigint,very_long_column_name_id26:bigint,very_long_column_name_id27:bigint,very_long_column_name_id28:bigint,very_long_column_name_id29:bigint,very_long_column_name_id30:bigint,very_long_column_name_id31:bigint,very_long_column_name_id32:bigint,very_long_column_name_id33:bigint,very_long_column_name_id34:bigint,very_long_column_name_id35:bigint,very_long_column_name_id36:bigint,very_long_column_name_id37:bigint,very_long_column_name_id38:bigint,very_long_column_name_id39:bigint,very_long_column_name_id40:bigint,very_long_column_name_id41:bigint,very_long_column_name_id42:bigint,very_long_column_name_id43:bigint,very_long_column_name_id44:bigint,very_long_column_name_id45:bigint,very_long_column_name_id46:bigint,very_long_column_name_id47:bigint,very_long_column_name_id48:bigint,very_long_column_name_id49:bigint,very_long_column_name_id50:bigint,very_long_column_name_id51:bigint,very_long_column_name_id52:bigint,very_long_column_name_id53:bigint,very_long_column_name_id54:bigint,very_long_column_name_id55:bigint,very_long_column_name_id56:bigint,very_long_column_name_id57:bigint,very_long_column_name_id58:bigint,very_long_column_name_id59:bigint,very_long_column_name_id60:bigint,very_long_column_name_id61:bigint,very_long_column_name_id62:bigint,very_long_column_name_id63:bigint,very_long_column_name_id64:bigint,very_long_column_name_id65:bigint,very_long_column_name_id66:bigint,very_long_column_name_id67:bigint,very_long_column_name_id68:bigint,very_long_column_name_id69:bigint,very_long_column_name_id70:bigint,very_long_column_name_id71:bigint,very_long_column_name_id72:bigint,very_long_column_name_id73:bigint,very_long_column_name_id74:bigint,very_long_column_name_id75:bigint,very_long_column_name_id76:bigint,very_long_column_name_id77:bigint,very_long_column_name_id78:bigint,very_long_column_name_id79:bigint,very_long_column_name_id80:bigint,very_long_column_name_id81:bigint,very_long_column_name_id82:bigint,very_long_column_name_id83:bigint,very_long_column_name_id84:bigint,very_long_column_name_id85:bigint,very_long_column_name_id86:bigint,very_long_column_name_id87:bigint,very_long_column_name_id88:bigint,very_long_column_name_id89:bigint,very_long_column_name_id90:bigint,very_long_column_name_id91:bigint,very_long_column_name_id92:bigint,very_long_column_name_id93:bigint,very_long_column_name_id94:bigint,very_long_column_name_id95:bigint,very_long_column_name_id96:bigint,very_long_column_name_id97:bigint,very_long_column_name_id98:bigint,very_long_column_name_id99:bigint,very_long_column_name_id100:bigint,very_long_column_name_id101:bigint,very_long_column_name_id102:bigint,very_long_column_name_id103:bigint,very_long_column_name_id104:bigint,very_long_column_name_id105:bigint,very_long_column_name_id106:bigint,very_long_column_name_id107:bigint,very_long_column_name_id108:bigint,very_long_column_name_id109:bigint,very_long_column_name_id110:bigint,very_long_column_name_id111:bigint,very_long_column_name_id112:bigint,very_long_column_name_id113:bigint,very_long_column_name_id114:bigint,very_long_column_name_id115:bigint,very_long_column_name_id116:bigint,very_long_column_name_id117:bigint,very_long_column_name_id118:bigint,very_long_column_name_id119:bigint,very_long_column_name_id120:bigint,very_long_column_name_id121:bigint,very_long_column_name_id122:bigint,very_long_column_name_id123:bigint,very_long_column_name_id124:bigint,very_long_column_name_id125:bigint,very_long_column_name_id126:bigint,very_long_column_name_id127:bigint,very_long_column_name_id128:bigint,very_long_column_name_id129:bigint,very_long_column_name_id130:bigint,very_long_column_name_id131:bigint,very_long_column_name_id132:bigint,very_long_column_name_id133:bigint,very_long_column_name_id134:bigint,very_long_column_name_id135:bigint,very_long_column_name_id136:bigint,very_long_column_name_id137:bigint,very_long_column_name_id138:bigint,very_long_column_name_id139:bigint,very_long_column_name_id140:bigint,very_long_column_name_id141:bigint,very_long_column_name_id142:bigint,very_long_column_name_id143:bigint,very_long_column_name_id144:bigint,very_long_column_name_id145:bigint,very_long_column_name_id146:bigint,very_long_column_name_id147:bigint,very_long_column_name_id148:bigint,very_long_column_name_id149:bigint,very_long_column_name_id150:bigint,very_long_column_name_id151:bigint,very_long_column_name_id152:bigint,very_long_column_name_id153:bigint,very_long_column_name_id154:bigint,very_long_column_name_id155:bigint,very_long_column_name_id156:bigint,very_long_column_name_id157:bigint,very_long_column_name_id158:bigint,very_long_column_name_id159:bigint,very_long_column_name_id160:bigint,very_long_column_name_id161:bigint,very_long_column_name_id162:bigint,very_long_column_name_id163:bigint,very_long_column_name_id164:bigint,very_long_column_name_id165:bigint,very_long_column_name_id166:bigint,very_long_column_name_id167:bigint,very_long_column_name_id168:bigint,very_long_column_name_id169:bigint,very_long_column_name_id170:bigint,very_long_column_name_id171:bigint,very_long_column_name_id172:bigint,very_long_column_name_id173:bigint,very_long_column_name_id174:bigint,very_long_column_name_id175:bigint,very_long_column_name_id176:bigint,very_long_column_name_id177:bigint,very_long_column_name_id178:bigint,very_long_column_name_id179:bigint,very_long_column_name_id180:bigint,very_long_column_name_id181:bigint,very_long_column_name_id182:bigint,very_long_column_name_id183:bigint,very_long_column_name_id184:bigint,very_long_column_name_id185:bigint,very_long_column_name_id186:bigint,very_long_column_name_id187:bigint,very_long_column_name_id188:bigint,very_long_column_name_id189:bigint,very_long_column_name_id190:bigint,very_long_column_name_id191:bigint,very_long_column_name_id192:bigint,very_long_column_name_id193:bigint,very_long_column_name_id194:bigint,very_long_column_name_id195:bigint,very_long_column_name_id196:bigint,very_long_column_name_id197:bigint,very_long_column_name_id198:bigint,very_long_column_name_id199:bigint>]
> {code}



--
This message was sent by Atlassian JIRA
(v7.6.3#76005)

---------------------------------------------------------------------
To unsubscribe, e-mail: issues-unsubscr...@spark.apache.org
For additional commands, e-mail: issues-h...@spark.apache.org

Reply via email to