[ 
https://issues.apache.org/jira/browse/SPARK-6709?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel
 ]

Michael Armbrust resolved SPARK-6709.
-------------------------------------
    Resolution: Won't Fix

Use `backticks` for identifiers that are reserved words:

{{SELECT `count` FROM table}}

> SparkSQL cannot parse sql correctly when the table contains "count" column.
> ---------------------------------------------------------------------------
>
>                 Key: SPARK-6709
>                 URL: https://issues.apache.org/jira/browse/SPARK-6709
>             Project: Spark
>          Issue Type: Bug
>          Components: SQL
>    Affects Versions: 1.1.0, 1.1.1, 1.2.0, 1.2.1, 1.3.0
>            Reporter: Patrick Liu
>
> bin/spark-shell
> scala> val sqlContext = new org.apache.spark.sql.SQLContext(sc)
> scala> import sqlContext.implicits._
> scala> case class what(id: Int, count: Int)
> scala> val whats = sc.parallelize( 0 to 10).map(x => what(x, x*10)).toDF()
> scala> whats.registerTempTable("whats")
> scala> sqlContext.sql("select * from whats where count < 20").collect
> Error Log:
> scala> sqlContext.sql("select * from whats where count < 20").collect
> java.lang.RuntimeException: [1.33] failure: ``('' expected but `<' found
> select * from whats where count < 20
>                                 ^
>       at scala.sys.package$.error(package.scala:27)
>       at 
> org.apache.spark.sql.catalyst.AbstractSparkSQLParser.apply(AbstractSparkSQLParser.scala:40)
>       at 
> org.apache.spark.sql.SQLContext$$anonfun$2.apply(SQLContext.scala:130)
>       at 
> org.apache.spark.sql.SQLContext$$anonfun$2.apply(SQLContext.scala:130)
>       at 
> org.apache.spark.sql.SparkSQLParser$$anonfun$org$apache$spark$sql$SparkSQLParser$$others$1.apply(SparkSQLParser.scala:96)
>       at 
> org.apache.spark.sql.SparkSQLParser$$anonfun$org$apache$spark$sql$SparkSQLParser$$others$1.apply(SparkSQLParser.scala:95)
>       at scala.util.parsing.combinator.Parsers$Success.map(Parsers.scala:136)
>       at scala.util.parsing.combinator.Parsers$Success.map(Parsers.scala:135)
>       at 
> scala.util.parsing.combinator.Parsers$Parser$$anonfun$map$1.apply(Parsers.scala:242)
>       at 
> scala.util.parsing.combinator.Parsers$Parser$$anonfun$map$1.apply(Parsers.scala:242)
>       at 
> scala.util.parsing.combinator.Parsers$$anon$3.apply(Parsers.scala:222)
>       at 
> scala.util.parsing.combinator.Parsers$Parser$$anonfun$append$1$$anonfun$apply$2.apply(Parsers.scala:254)
>       at 
> scala.util.parsing.combinator.Parsers$Parser$$anonfun$append$1$$anonfun$apply$2.apply(Parsers.scala:254)
>       at 
> scala.util.parsing.combinator.Parsers$Failure.append(Parsers.scala:202)
>       at 
> scala.util.parsing.combinator.Parsers$Parser$$anonfun$append$1.apply(Parsers.scala:254)
>       at 
> scala.util.parsing.combinator.Parsers$Parser$$anonfun$append$1.apply(Parsers.scala:254)
>       at 
> scala.util.parsing.combinator.Parsers$$anon$3.apply(Parsers.scala:222)
>       at 
> scala.util.parsing.combinator.Parsers$$anon$2$$anonfun$apply$14.apply(Parsers.scala:891)
>       at 
> scala.util.parsing.combinator.Parsers$$anon$2$$anonfun$apply$14.apply(Parsers.scala:891)
>       at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
>       at 
> scala.util.parsing.combinator.Parsers$$anon$2.apply(Parsers.scala:890)
>       at 
> scala.util.parsing.combinator.PackratParsers$$anon$1.apply(PackratParsers.scala:110)
>       at 
> org.apache.spark.sql.catalyst.AbstractSparkSQLParser.apply(AbstractSparkSQLParser.scala:38)
>       at 
> org.apache.spark.sql.SQLContext$$anonfun$parseSql$1.apply(SQLContext.scala:134)
>       at 
> org.apache.spark.sql.SQLContext$$anonfun$parseSql$1.apply(SQLContext.scala:134)
>       at scala.Option.getOrElse(Option.scala:120)
>       at org.apache.spark.sql.SQLContext.parseSql(SQLContext.scala:134)
>       at org.apache.spark.sql.SQLContext.sql(SQLContext.scala:915)
>       at 
> $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:27)
>       at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:32)
>       at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:34)
>       at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:36)
>       at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:38)
>       at $iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:40)
>       at $iwC$$iwC$$iwC$$iwC.<init>(<console>:42)
>       at $iwC$$iwC$$iwC.<init>(<console>:44)
>       at $iwC$$iwC.<init>(<console>:46)
>       at $iwC.<init>(<console>:48)
>       at <init>(<console>:50)
>       at .<init>(<console>:54)
>       at .<clinit>(<console>)
>       at .<init>(<console>:7)
>       at .<clinit>(<console>)
>       at $print(<console>)
>       at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>       at 
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
>       at 
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
>       at java.lang.reflect.Method.invoke(Method.java:597)
>       at 
> org.apache.spark.repl.SparkIMain$ReadEvalPrint.call(SparkIMain.scala:1065)
>       at 
> org.apache.spark.repl.SparkIMain$Request.loadAndRun(SparkIMain.scala:1338)
>       at 
> org.apache.spark.repl.SparkIMain.loadAndRunReq$1(SparkIMain.scala:840)
>       at org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:871)
>       at org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:819)
>       at 
> org.apache.spark.repl.SparkILoop.reallyInterpret$1(SparkILoop.scala:856)
>       at 
> org.apache.spark.repl.SparkILoop.interpretStartingWith(SparkILoop.scala:901)
>       at org.apache.spark.repl.SparkILoop.command(SparkILoop.scala:813)
>       at org.apache.spark.repl.SparkILoop.processLine$1(SparkILoop.scala:656)
>       at org.apache.spark.repl.SparkILoop.innerLoop$1(SparkILoop.scala:664)
>       at 
> org.apache.spark.repl.SparkILoop.org$apache$spark$repl$SparkILoop$$loop(SparkILoop.scala:669)
>       at 
> org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply$mcZ$sp(SparkILoop.scala:996)
>       at 
> org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply(SparkILoop.scala:944)
>       at 
> org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply(SparkILoop.scala:944)
>       at 
> scala.tools.nsc.util.ScalaClassLoader$.savingContextLoader(ScalaClassLoader.scala:135)
>       at 
> org.apache.spark.repl.SparkILoop.org$apache$spark$repl$SparkILoop$$process(SparkILoop.scala:944)
>       at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:1058)
>       at org.apache.spark.repl.Main$.main(Main.scala:31)
>       at org.apache.spark.repl.Main.main(Main.scala)
>       at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>       at 
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
>       at 
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
>       at java.lang.reflect.Method.invoke(Method.java:597)
>       at 
> org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:569)
>       at 
> org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:166)
>       at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:189)
>       at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:110)
>       at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)



--
This message was sent by Atlassian JIRA
(v6.3.4#6332)

---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to