[ 
https://issues.apache.org/jira/browse/CARBONDATA-749?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel
 ]

anubhav tarar reassigned CARBONDATA-749:
----------------------------------------

    Assignee: anubhav tarar

> Unexpected error log message while dropping carbon table
> --------------------------------------------------------
>
>                 Key: CARBONDATA-749
>                 URL: https://issues.apache.org/jira/browse/CARBONDATA-749
>             Project: CarbonData
>          Issue Type: Bug
>          Components: sql
>    Affects Versions: 1.0.0-incubating
>            Reporter: Liang Chen
>            Assignee: anubhav tarar
>            Priority: Minor
>
> 1.Create a table with the below script:
> carbon.sql("CREATE TABLE carbontable1 (id int, age string, year string) 
> STORED BY 'carbondata'")
> 2.Drop table "carbontable1" with the below script:
> carbon.sql("drop table carbontable1")
> Unexpected error log message as below:
> AUDIT 07-03 07:50:11,944 - 
> [AppledeMacBook-Pro.local][apple][Thread-1]Deleting table [carbontable1] 
> under database [default]
> AUDIT 07-03 07:50:12,086 - 
> [AppledeMacBook-Pro.local][apple][Thread-1]Creating Table with Database name 
> [default] and Table name [carbontable1]
> AUDIT 07-03 07:50:12,095 - [AppledeMacBook-Pro.local][apple][Thread-1]Table 
> creation with Database name [default] and Table name [carbontable1] failed. 
> Table [carbontable1] already exists under database [default]
> WARN  07-03 07:50:12,095 - 
> org.spark_project.guava.util.concurrent.UncheckedExecutionException: 
> java.lang.RuntimeException: Table [carbontable1] already exists under 
> database [default]
> org.spark_project.guava.util.concurrent.UncheckedExecutionException: 
> java.lang.RuntimeException: Table [carbontable1] already exists under 
> database [default]
>       at 
> org.spark_project.guava.cache.LocalCache$Segment.get(LocalCache.java:2263)
>       at org.spark_project.guava.cache.LocalCache.get(LocalCache.java:4000)
>       at 
> org.spark_project.guava.cache.LocalCache.getOrLoad(LocalCache.java:4004)
>       at 
> org.spark_project.guava.cache.LocalCache$LocalLoadingCache.get(LocalCache.java:4874)
>       at 
> org.spark_project.guava.cache.LocalCache$LocalLoadingCache.getUnchecked(LocalCache.java:4880)
>       at 
> org.spark_project.guava.cache.LocalCache$LocalLoadingCache.apply(LocalCache.java:4898)
>       at 
> org.apache.spark.sql.hive.HiveMetastoreCatalog.lookupRelation(HiveMetastoreCatalog.scala:110)
>       at 
> org.apache.spark.sql.hive.HiveSessionCatalog.lookupRelation(HiveSessionCatalog.scala:69)
>       at org.apache.spark.sql.SparkSession.table(SparkSession.scala:578)
>       at org.apache.spark.sql.SparkSession.table(SparkSession.scala:574)
>       at 
> org.apache.spark.sql.execution.command.DropTableCommand.run(ddl.scala:203)
>       at 
> org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:58)
>       at 
> org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:56)
>       at 
> org.apache.spark.sql.execution.command.ExecutedCommandExec.doExecute(commands.scala:74)
>       at 
> org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:114)
>       at 
> org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:114)
>       at 
> org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:135)
>       at 
> org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
>       at 
> org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:132)
>       at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:113)
>       at 
> org.apache.spark.sql.execution.QueryExecution.toRdd$lzycompute(QueryExecution.scala:87)
>       at 
> org.apache.spark.sql.execution.QueryExecution.toRdd(QueryExecution.scala:87)
>       at org.apache.spark.sql.Dataset.<init>(Dataset.scala:185)
>       at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:64)
>       at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:592)
>       at 
> org.apache.spark.sql.hive.CarbonHiveMetadataUtil$.invalidateAndDropTable(CarbonHiveMetadataUtil.scala:44)
>       at 
> org.apache.spark.sql.hive.CarbonMetastore.dropTable(CarbonMetastore.scala:435)
>       at 
> org.apache.spark.sql.execution.command.CarbonDropTableCommand.run(carbonTableSchema.scala:665)
>       at 
> org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:58)
>       at 
> org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:56)
>       at 
> org.apache.spark.sql.execution.command.ExecutedCommandExec.doExecute(commands.scala:74)
>       at 
> org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:114)
>       at 
> org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:114)
>       at 
> org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:135)
>       at 
> org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
>       at 
> org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:132)
>       at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:113)
>       at 
> org.apache.spark.sql.execution.QueryExecution.toRdd$lzycompute(QueryExecution.scala:87)
>       at 
> org.apache.spark.sql.execution.QueryExecution.toRdd(QueryExecution.scala:87)
>       at org.apache.spark.sql.Dataset.<init>(Dataset.scala:185)
>       at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:64)
>       at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:592)
>       at 
> $line40.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:31)
>       at 
> $line40.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:36)
>       at $line40.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:38)
>       at $line40.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:40)
>       at $line40.$read$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:42)
>       at $line40.$read$$iw$$iw$$iw$$iw$$iw.<init>(<console>:44)
>       at $line40.$read$$iw$$iw$$iw$$iw.<init>(<console>:46)
>       at $line40.$read$$iw$$iw$$iw.<init>(<console>:48)
>       at $line40.$read$$iw$$iw.<init>(<console>:50)
>       at $line40.$read$$iw.<init>(<console>:52)
>       at $line40.$read.<init>(<console>:54)
>       at $line40.$read$.<init>(<console>:58)
>       at $line40.$read$.<clinit>(<console>)
>       at $line40.$eval$.$print$lzycompute(<console>:7)
>       at $line40.$eval$.$print(<console>:6)
>       at $line40.$eval.$print(<console>)
>       at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>       at 
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
>       at 
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>       at java.lang.reflect.Method.invoke(Method.java:497)
>       at scala.tools.nsc.interpreter.IMain$ReadEvalPrint.call(IMain.scala:786)
>       at 
> scala.tools.nsc.interpreter.IMain$Request.loadAndRun(IMain.scala:1047)
>       at 
> scala.tools.nsc.interpreter.IMain$WrappedRequest$$anonfun$loadAndRunReq$1.apply(IMain.scala:638)
>       at 
> scala.tools.nsc.interpreter.IMain$WrappedRequest$$anonfun$loadAndRunReq$1.apply(IMain.scala:637)
>       at 
> scala.reflect.internal.util.ScalaClassLoader$class.asContext(ScalaClassLoader.scala:31)
>       at 
> scala.reflect.internal.util.AbstractFileClassLoader.asContext(AbstractFileClassLoader.scala:19)
>       at 
> scala.tools.nsc.interpreter.IMain$WrappedRequest.loadAndRunReq(IMain.scala:637)
>       at scala.tools.nsc.interpreter.IMain.interpret(IMain.scala:569)
>       at scala.tools.nsc.interpreter.IMain.interpret(IMain.scala:565)
>       at 
> scala.tools.nsc.interpreter.ILoop.interpretStartingWith(ILoop.scala:807)
>       at scala.tools.nsc.interpreter.ILoop.command(ILoop.scala:681)
>       at scala.tools.nsc.interpreter.ILoop.processLine(ILoop.scala:395)
>       at scala.tools.nsc.interpreter.ILoop.loop(ILoop.scala:415)
>       at 
> scala.tools.nsc.interpreter.ILoop$$anonfun$process$1.apply$mcZ$sp(ILoop.scala:923)
>       at 
> scala.tools.nsc.interpreter.ILoop$$anonfun$process$1.apply(ILoop.scala:909)
>       at 
> scala.tools.nsc.interpreter.ILoop$$anonfun$process$1.apply(ILoop.scala:909)
>       at 
> scala.reflect.internal.util.ScalaClassLoader$.savingContextLoader(ScalaClassLoader.scala:97)
>       at scala.tools.nsc.interpreter.ILoop.process(ILoop.scala:909)
>       at org.apache.spark.repl.Main$.doMain(Main.scala:68)
>       at org.apache.spark.repl.Main$.main(Main.scala:51)
>       at org.apache.spark.repl.Main.main(Main.scala)
>       at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>       at 
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
>       at 
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>       at java.lang.reflect.Method.invoke(Method.java:497)
>       at 
> org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:738)
>       at 
> org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:187)
>       at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:212)
>       at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:126)
>       at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
> Caused by: java.lang.RuntimeException: Table [carbontable1] already exists 
> under database [default]
>       at scala.sys.package$.error(package.scala:27)
>       at 
> org.apache.spark.sql.execution.command.CreateTable.run(carbonTableSchema.scala:156)
>       at 
> org.apache.spark.sql.CarbonSource.createTableIfNotExists(CarbonSource.scala:154)
>       at 
> org.apache.spark.sql.CarbonSource.createRelation(CarbonSource.scala:95)
>       at 
> org.apache.spark.sql.execution.datasources.DataSource.resolveRelation(DataSource.scala:328)
>       at 
> org.apache.spark.sql.hive.HiveMetastoreCatalog$$anon$1.load(HiveMetastoreCatalog.scala:76)
>       at 
> org.apache.spark.sql.hive.HiveMetastoreCatalog$$anon$1.load(HiveMetastoreCatalog.scala:58)
>       at 
> org.spark_project.guava.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3599)
>       at 
> org.spark_project.guava.cache.LocalCache$Segment.loadSync(LocalCache.java:2379)
>       at 
> org.spark_project.guava.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2342)
>       at 
> org.spark_project.guava.cache.LocalCache$Segment.get(LocalCache.java:2257)
>       ... 91 more



--
This message was sent by Atlassian JIRA
(v6.3.15#6346)

Reply via email to