[HOTFIX] Fix streaming test case issue for file input source

Fix streaming test case issue for file input source

This closes #1922


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/36ff9321
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/36ff9321
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/36ff9321

Branch: refs/heads/branch-1.3
Commit: 36ff93216d7acce7bde7287a285d00944065da3b
Parents: 11f2371
Author: QiangCai <qiang...@qq.com>
Authored: Sat Feb 3 18:04:49 2018 +0800
Committer: chenliang613 <chenliang...@huawei.com>
Committed: Sat Feb 3 21:27:31 2018 +0800

----------------------------------------------------------------------
 .../spark/carbondata/TestStreamingTableOperation.scala   | 11 ++++-------
 1 file changed, 4 insertions(+), 7 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/36ff9321/integration/spark2/src/test/scala/org/apache/spark/carbondata/TestStreamingTableOperation.scala
----------------------------------------------------------------------
diff --git 
a/integration/spark2/src/test/scala/org/apache/spark/carbondata/TestStreamingTableOperation.scala
 
b/integration/spark2/src/test/scala/org/apache/spark/carbondata/TestStreamingTableOperation.scala
index e1e41dc..a368cef 100644
--- 
a/integration/spark2/src/test/scala/org/apache/spark/carbondata/TestStreamingTableOperation.scala
+++ 
b/integration/spark2/src/test/scala/org/apache/spark/carbondata/TestStreamingTableOperation.scala
@@ -233,6 +233,9 @@ class TestStreamingTableOperation extends QueryTest with 
BeforeAndAfterAll {
       sql("select count(*) from streaming.stream_table_file"),
       Seq(Row(25))
     )
+
+    val row = sql("select * from streaming.stream_table_file order by 
id").head()
+    assertResult(Row(10, "name_10", "city_10", 100000.0))(row)
   }
 
   // bad records
@@ -875,13 +878,7 @@ class TestStreamingTableOperation extends QueryTest with 
BeforeAndAfterAll {
           .add("file", "string")
         var qry: StreamingQuery = null
         try {
-          val readSocketDF = spark.readStream
-            .format("csv")
-            .option("sep", ",")
-            .schema(inputSchema)
-            .option("path", csvDataDir)
-            .option("header", "false")
-            .load()
+          val readSocketDF = spark.readStream.text(csvDataDir)
 
           // Write data from socket stream to carbondata file
           qry = readSocketDF.writeStream

Reply via email to