Pravesh Jain created SPARK-3284:
-----------------------------------

             Summary: saveAsParquetFile not working on windows
                 Key: SPARK-3284
                 URL: https://issues.apache.org/jira/browse/SPARK-3284
             Project: Spark
          Issue Type: Bug
    Affects Versions: 1.0.2
         Environment: Windows
            Reporter: Pravesh Jain
            Priority: Minor


object parquet {

  case class Person(name: String, age: Int)

  def main(args: Array[String]) {

    val sparkConf = new 
SparkConf().setMaster("local").setAppName("HdfsWordCount")
    val sc = new SparkContext(sparkConf)
    val sqlContext = new org.apache.spark.sql.SQLContext(sc)
    // createSchemaRDD is used to implicitly convert an RDD to a SchemaRDD.
    import sqlContext.createSchemaRDD

    val people = 
sc.textFile("C:/Users/pravesh.jain/Desktop/people/people.txt").map(_.split(",")).map(p
 => Person(p(0), p(1).trim.toInt))
    
people.saveAsParquetFile("C:/Users/pravesh.jain/Desktop/people/people.parquet")

    val parquetFile = 
sqlContext.parquetFile("C:/Users/pravesh.jain/Desktop/people/people.parquet")
  }
}

gives the error



    Exception in thread "main" java.lang.NullPointerException at 
org.apache.spark.parquet$.main(parquet.scala:16)

which is the line saveAsParquetFile.



--
This message was sent by Atlassian JIRA
(v6.2#6252)

---------------------------------------------------------------------
To unsubscribe, e-mail: issues-unsubscr...@spark.apache.org
For additional commands, e-mail: issues-h...@spark.apache.org

Reply via email to