[ 
https://issues.apache.org/jira/browse/SPARK-20698?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel
 ]

someshwar kale updated SPARK-20698:
-----------------------------------
    Description: 
I have written below spark program- its not working as expected

++++++++++++++++++++++++
{code}
package computedBatch;

import org.apache.log4j.Level;
import org.apache.log4j.Logger;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.sql.DataFrame;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.RowFactory;
import org.apache.spark.sql.SQLContext;
import org.apache.spark.sql.hive.HiveContext;
import org.apache.spark.sql.types.DataTypes;
import org.apache.spark.sql.types.StructField;
import org.apache.spark.sql.types.StructType;

import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;


public class ArithmeticIssueTest {
    private transient JavaSparkContext javaSparkContext;
    private transient SQLContext sqlContext;

    public ArithmeticIssueTest() {
        Logger.getLogger("org").setLevel(Level.OFF);
        Logger.getLogger("akka").setLevel(Level.OFF);
        SparkConf conf = new 
SparkConf().setAppName("ArithmeticIssueTest").setMaster("local[4]");
        javaSparkContext = new JavaSparkContext(conf);
        sqlContext = new HiveContext(javaSparkContext);
    }

    public static void main(String[] args) {
      ArithmeticIssueTest arithmeticIssueTest = new ArithmeticIssueTest();
        arithmeticIssueTest.execute();
    }

    private void execute(){
        List<String> data = Arrays.asList(
                "a1,1494389759,99.8793003568,325.389705932",
                "a1,1494389759,99.9472573803,325.27559502",
                "a1,1494389759,99.7887233987,325.334374851",
                "a1,1494389759,99.9547800925,325.371537062",
                "a1,1494389759,99.8039111691,325.305285877",
                "a1,1494389759,99.8342317379,325.24881354",
                "a1,1494389759,99.9849449235,325.396678931",
                "a1,1494389759,99.9396731311,325.336115345",
                "a1,1494389759,99.9320915068,325.242622938",
                "a1,1494389759,99.8943333669,325.320965146",
                "a1,1494389759,99.7735359781,325.345168334",
                "a1,1494389759,99.9698837734,325.352291407",
                "a1,1494389759,99.8418330703,325.296539372",
                "a1,1494389759,99.796315751,325.347570632",
                "a1,1494389759,99.7811931613,325.351137315",
                "a1,1494389759,99.9773765104,325.218131741",
                "a1,1494389759,99.8189825201,325.288197381",
                "a1,1494389759,99.8115005369,325.282327633",
                "a1,1494389759,99.9924539722,325.24048614",
                "a1,1494389759,99.9170191204,325.299431664");
        JavaRDD<String> rawData = javaSparkContext.parallelize(data);
        List<StructField> fields = new ArrayList<>();
        fields.add(DataTypes.createStructField("ASSET_ID", 
DataTypes.StringType, true));
        fields.add(DataTypes.createStructField("TIMESTAMP", DataTypes.LongType, 
true));
        fields.add(DataTypes.createStructField("fuel", DataTypes.DoubleType, 
true));
        fields.add(DataTypes.createStructField("temperature", 
DataTypes.DoubleType, true));
        StructType schema = DataTypes.createStructType(fields);
        JavaRDD<Row> rowRDD = rawData.map(
                (Function<String, Row>) record -> {
                    String[] fields1 = record.split(",");
                    return RowFactory.create(
                            fields1[0].trim(),
                            Long.parseLong(fields1[1].trim()),
                            Double.parseDouble(fields1[2].trim()),
                            Double.parseDouble(fields1[3].trim()));
                });
        DataFrame df = sqlContext.createDataFrame(rowRDD, schema);
        df.show(false);
        df.registerTempTable("x_linkx1087571272_filtered");

        sqlContext.sql("SELECT x_linkx1087571272_filtered.ASSET_ID, count(case 
when x_linkx1087571272_filtered" +
                ".temperature=325.0 then 1 else 0 end) AS xsumptionx1582594572, 
max(x_linkx1087571272_filtered" +
                ".TIMESTAMP) AS eventTime  FROM x_linkx1087571272_filtered 
GROUP BY x_linkx1087571272_filtered" +
                ".ASSET_ID").show(false);

        sqlContext.sql("SELECT x_linkx1087571272_filtered.ASSET_ID, count(case 
when x_linkx1087571272_filtered" +
                ".fuel>99.8 then 1 else 0 end) AS xnsumptionx352569416, 
max(x_linkx1087571272_filtered.TIMESTAMP) AS " +
                "eventTime  FROM x_linkx1087571272_filtered GROUP BY 
x_linkx1087571272_filtered.ASSET_ID").show(false);

//        +++++++++
        sqlContext.sql("SELECT x_linkx1087571272_filtered.ASSET_ID, count(case 
when x_linkx1087571272_filtered" +
                ".temperature==325.0 then 1 else 0 end) AS 
xsumptionx1582594572, max(x_linkx1087571272_filtered" +
                ".TIMESTAMP) AS eventTime  FROM x_linkx1087571272_filtered 
GROUP BY x_linkx1087571272_filtered" +
                ".ASSET_ID").show(false);
    }
}
{code}
++++++++++++++++++++++++++++++

Logs-

+--------+----------+-------------+-------------+
|ASSET_ID|TIMESTAMP |fuel         |temperature  |
+--------+----------+-------------+-------------+
|a1      |1494389759|99.8793003568|325.389705932|
|a1      |1494389759|99.9472573803|325.27559502 |
|a1      |1494389759|99.7887233987|325.334374851|
|a1      |1494389759|99.9547800925|325.371537062|
|a1      |1494389759|99.8039111691|325.305285877|
|a1      |1494389759|99.8342317379|325.24881354 |
|a1      |1494389759|99.9849449235|325.396678931|
|a1      |1494389759|99.9396731311|325.336115345|
|a1      |1494389759|99.9320915068|325.242622938|
|a1      |1494389759|99.8943333669|325.320965146|
|a1      |1494389759|99.7735359781|325.345168334|
|a1      |1494389759|99.9698837734|325.352291407|
|a1      |1494389759|99.8418330703|325.296539372|
|a1      |1494389759|99.796315751 |325.347570632|
|a1      |1494389759|99.7811931613|325.351137315|
|a1      |1494389759|99.9773765104|325.218131741|
|a1      |1494389759|99.8189825201|325.288197381|
|a1      |1494389759|99.8115005369|325.282327633|
|a1      |1494389759|99.9924539722|325.24048614 |
|a1      |1494389759|99.9170191204|325.299431664|
+--------+----------+-------------+-------------+

17/05/11 00:22:08 INFO ParseDriver: Parsing command: SELECT 
x_linkx1087571272_filtered.ASSET_ID, count(case when 
x_linkx1087571272_filtered.temperature=325.0 then 1 else 0 end) AS 
xsumptionx1582594572, max(x_linkx1087571272_filtered.TIMESTAMP) AS eventTime  
FROM x_linkx1087571272_filtered GROUP BY x_linkx1087571272_filtered.ASSET_ID
17/05/11 00:22:09 INFO ParseDriver: Parse Completed
[Stage 5:======================================================>(198 + 1) / 
199]+--------+--------------------+----------+
|ASSET_ID|xsumptionx1582594572|eventTime |
+--------+--------------------+----------+
|a1      |20                  |1494389759|
+--------+--------------------+----------+

17/05/11 00:22:16 INFO ParseDriver: Parsing command: SELECT 
x_linkx1087571272_filtered.ASSET_ID, count(case when 
x_linkx1087571272_filtered.fuel>99.8 then 1 else 0 end) AS 
xnsumptionx352569416, max(x_linkx1087571272_filtered.TIMESTAMP) AS eventTime  
FROM x_linkx1087571272_filtered GROUP BY x_linkx1087571272_filtered.ASSET_ID
17/05/11 00:22:16 INFO ParseDriver: Parse Completed
+--------+--------------------+----------+
|ASSET_ID|xnsumptionx352569416|eventTime |
+--------+--------------------+----------+
|a1      |20                  |1494389759|
+--------+--------------------+----------+

17/05/11 00:22:24 INFO ParseDriver: Parsing command: SELECT 
x_linkx1087571272_filtered.ASSET_ID, count(case when 
x_linkx1087571272_filtered.temperature==325.0 then 1 else 0 end) AS 
xsumptionx1582594572, max(x_linkx1087571272_filtered.TIMESTAMP) AS eventTime  
FROM x_linkx1087571272_filtered GROUP BY x_linkx1087571272_filtered.ASSET_ID
17/05/11 00:22:24 INFO ParseDriver: Parse Completed
[Stage 13:==========================================>           (158 + 4) / 
199]+--------+--------------------+----------+
|ASSET_ID|xsumptionx1582594572|eventTime |
+--------+--------------------+----------+
|a1      |20                  |1494389759|
+--------+--------------------+----------+

both the queries are resulting to wrong values


  was:
I have written below spark program- its not working as expected

++++++++++++++++++++++++
package computedBatch;

import org.apache.log4j.Level;
import org.apache.log4j.Logger;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.sql.DataFrame;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.RowFactory;
import org.apache.spark.sql.SQLContext;
import org.apache.spark.sql.hive.HiveContext;
import org.apache.spark.sql.types.DataTypes;
import org.apache.spark.sql.types.StructField;
import org.apache.spark.sql.types.StructType;

import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;


public class ArithmeticIssueTest {
    private transient JavaSparkContext javaSparkContext;
    private transient SQLContext sqlContext;

    public ArithmeticIssueTest() {
        Logger.getLogger("org").setLevel(Level.OFF);
        Logger.getLogger("akka").setLevel(Level.OFF);
        SparkConf conf = new 
SparkConf().setAppName("ArithmeticIssueTest").setMaster("local[4]");
        javaSparkContext = new JavaSparkContext(conf);
        sqlContext = new HiveContext(javaSparkContext);
    }

    public static void main(String[] args) {
      ArithmeticIssueTest arithmeticIssueTest = new ArithmeticIssueTest();
        arithmeticIssueTest.execute();
    }

    private void execute(){
        List<String> data = Arrays.asList(
                "a1,1494389759,99.8793003568,325.389705932",
                "a1,1494389759,99.9472573803,325.27559502",
                "a1,1494389759,99.7887233987,325.334374851",
                "a1,1494389759,99.9547800925,325.371537062",
                "a1,1494389759,99.8039111691,325.305285877",
                "a1,1494389759,99.8342317379,325.24881354",
                "a1,1494389759,99.9849449235,325.396678931",
                "a1,1494389759,99.9396731311,325.336115345",
                "a1,1494389759,99.9320915068,325.242622938",
                "a1,1494389759,99.8943333669,325.320965146",
                "a1,1494389759,99.7735359781,325.345168334",
                "a1,1494389759,99.9698837734,325.352291407",
                "a1,1494389759,99.8418330703,325.296539372",
                "a1,1494389759,99.796315751,325.347570632",
                "a1,1494389759,99.7811931613,325.351137315",
                "a1,1494389759,99.9773765104,325.218131741",
                "a1,1494389759,99.8189825201,325.288197381",
                "a1,1494389759,99.8115005369,325.282327633",
                "a1,1494389759,99.9924539722,325.24048614",
                "a1,1494389759,99.9170191204,325.299431664");
        JavaRDD<String> rawData = javaSparkContext.parallelize(data);
        List<StructField> fields = new ArrayList<>();
        fields.add(DataTypes.createStructField("ASSET_ID", 
DataTypes.StringType, true));
        fields.add(DataTypes.createStructField("TIMESTAMP", DataTypes.LongType, 
true));
        fields.add(DataTypes.createStructField("fuel", DataTypes.DoubleType, 
true));
        fields.add(DataTypes.createStructField("temperature", 
DataTypes.DoubleType, true));
        StructType schema = DataTypes.createStructType(fields);
        JavaRDD<Row> rowRDD = rawData.map(
                (Function<String, Row>) record -> {
                    String[] fields1 = record.split(",");
                    return RowFactory.create(
                            fields1[0].trim(),
                            Long.parseLong(fields1[1].trim()),
                            Double.parseDouble(fields1[2].trim()),
                            Double.parseDouble(fields1[3].trim()));
                });
        DataFrame df = sqlContext.createDataFrame(rowRDD, schema);
        df.show(false);
        df.registerTempTable("x_linkx1087571272_filtered");

        sqlContext.sql("SELECT x_linkx1087571272_filtered.ASSET_ID, count(case 
when x_linkx1087571272_filtered" +
                ".temperature=325.0 then 1 else 0 end) AS xsumptionx1582594572, 
max(x_linkx1087571272_filtered" +
                ".TIMESTAMP) AS eventTime  FROM x_linkx1087571272_filtered 
GROUP BY x_linkx1087571272_filtered" +
                ".ASSET_ID").show(false);

        sqlContext.sql("SELECT x_linkx1087571272_filtered.ASSET_ID, count(case 
when x_linkx1087571272_filtered" +
                ".fuel>99.8 then 1 else 0 end) AS xnsumptionx352569416, 
max(x_linkx1087571272_filtered.TIMESTAMP) AS " +
                "eventTime  FROM x_linkx1087571272_filtered GROUP BY 
x_linkx1087571272_filtered.ASSET_ID").show(false);

//        +++++++++
        sqlContext.sql("SELECT x_linkx1087571272_filtered.ASSET_ID, count(case 
when x_linkx1087571272_filtered" +
                ".temperature==325.0 then 1 else 0 end) AS 
xsumptionx1582594572, max(x_linkx1087571272_filtered" +
                ".TIMESTAMP) AS eventTime  FROM x_linkx1087571272_filtered 
GROUP BY x_linkx1087571272_filtered" +
                ".ASSET_ID").show(false);
    }
}

++++++++++++++++++++++++++++++

Logs-

+--------+----------+-------------+-------------+
|ASSET_ID|TIMESTAMP |fuel         |temperature  |
+--------+----------+-------------+-------------+
|a1      |1494389759|99.8793003568|325.389705932|
|a1      |1494389759|99.9472573803|325.27559502 |
|a1      |1494389759|99.7887233987|325.334374851|
|a1      |1494389759|99.9547800925|325.371537062|
|a1      |1494389759|99.8039111691|325.305285877|
|a1      |1494389759|99.8342317379|325.24881354 |
|a1      |1494389759|99.9849449235|325.396678931|
|a1      |1494389759|99.9396731311|325.336115345|
|a1      |1494389759|99.9320915068|325.242622938|
|a1      |1494389759|99.8943333669|325.320965146|
|a1      |1494389759|99.7735359781|325.345168334|
|a1      |1494389759|99.9698837734|325.352291407|
|a1      |1494389759|99.8418330703|325.296539372|
|a1      |1494389759|99.796315751 |325.347570632|
|a1      |1494389759|99.7811931613|325.351137315|
|a1      |1494389759|99.9773765104|325.218131741|
|a1      |1494389759|99.8189825201|325.288197381|
|a1      |1494389759|99.8115005369|325.282327633|
|a1      |1494389759|99.9924539722|325.24048614 |
|a1      |1494389759|99.9170191204|325.299431664|
+--------+----------+-------------+-------------+

17/05/11 00:22:08 INFO ParseDriver: Parsing command: SELECT 
x_linkx1087571272_filtered.ASSET_ID, count(case when 
x_linkx1087571272_filtered.temperature=325.0 then 1 else 0 end) AS 
xsumptionx1582594572, max(x_linkx1087571272_filtered.TIMESTAMP) AS eventTime  
FROM x_linkx1087571272_filtered GROUP BY x_linkx1087571272_filtered.ASSET_ID
17/05/11 00:22:09 INFO ParseDriver: Parse Completed
[Stage 5:======================================================>(198 + 1) / 
199]+--------+--------------------+----------+
|ASSET_ID|xsumptionx1582594572|eventTime |
+--------+--------------------+----------+
|a1      |20                  |1494389759|
+--------+--------------------+----------+

17/05/11 00:22:16 INFO ParseDriver: Parsing command: SELECT 
x_linkx1087571272_filtered.ASSET_ID, count(case when 
x_linkx1087571272_filtered.fuel>99.8 then 1 else 0 end) AS 
xnsumptionx352569416, max(x_linkx1087571272_filtered.TIMESTAMP) AS eventTime  
FROM x_linkx1087571272_filtered GROUP BY x_linkx1087571272_filtered.ASSET_ID
17/05/11 00:22:16 INFO ParseDriver: Parse Completed
+--------+--------------------+----------+
|ASSET_ID|xnsumptionx352569416|eventTime |
+--------+--------------------+----------+
|a1      |20                  |1494389759|
+--------+--------------------+----------+

17/05/11 00:22:24 INFO ParseDriver: Parsing command: SELECT 
x_linkx1087571272_filtered.ASSET_ID, count(case when 
x_linkx1087571272_filtered.temperature==325.0 then 1 else 0 end) AS 
xsumptionx1582594572, max(x_linkx1087571272_filtered.TIMESTAMP) AS eventTime  
FROM x_linkx1087571272_filtered GROUP BY x_linkx1087571272_filtered.ASSET_ID
17/05/11 00:22:24 INFO ParseDriver: Parse Completed
[Stage 13:==========================================>           (158 + 4) / 
199]+--------+--------------------+----------+
|ASSET_ID|xsumptionx1582594572|eventTime |
+--------+--------------------+----------+
|a1      |20                  |1494389759|
+--------+--------------------+----------+

both the queries are resulting to wrong values



> =, ==, > is not working as expected when used in sql query
> ----------------------------------------------------------
>
>                 Key: SPARK-20698
>                 URL: https://issues.apache.org/jira/browse/SPARK-20698
>             Project: Spark
>          Issue Type: Bug
>          Components: SQL
>    Affects Versions: 1.6.2
>         Environment: windows
>            Reporter: someshwar kale
>            Priority: Critical
>             Fix For: 1.6.2
>
>
> I have written below spark program- its not working as expected
> ++++++++++++++++++++++++
> {code}
> package computedBatch;
> import org.apache.log4j.Level;
> import org.apache.log4j.Logger;
> import org.apache.spark.SparkConf;
> import org.apache.spark.api.java.JavaRDD;
> import org.apache.spark.api.java.JavaSparkContext;
> import org.apache.spark.api.java.function.Function;
> import org.apache.spark.sql.DataFrame;
> import org.apache.spark.sql.Row;
> import org.apache.spark.sql.RowFactory;
> import org.apache.spark.sql.SQLContext;
> import org.apache.spark.sql.hive.HiveContext;
> import org.apache.spark.sql.types.DataTypes;
> import org.apache.spark.sql.types.StructField;
> import org.apache.spark.sql.types.StructType;
> import java.util.ArrayList;
> import java.util.Arrays;
> import java.util.List;
> public class ArithmeticIssueTest {
>     private transient JavaSparkContext javaSparkContext;
>     private transient SQLContext sqlContext;
>     public ArithmeticIssueTest() {
>         Logger.getLogger("org").setLevel(Level.OFF);
>         Logger.getLogger("akka").setLevel(Level.OFF);
>         SparkConf conf = new 
> SparkConf().setAppName("ArithmeticIssueTest").setMaster("local[4]");
>         javaSparkContext = new JavaSparkContext(conf);
>         sqlContext = new HiveContext(javaSparkContext);
>     }
>     public static void main(String[] args) {
>       ArithmeticIssueTest arithmeticIssueTest = new ArithmeticIssueTest();
>         arithmeticIssueTest.execute();
>     }
>     private void execute(){
>         List<String> data = Arrays.asList(
>                 "a1,1494389759,99.8793003568,325.389705932",
>                 "a1,1494389759,99.9472573803,325.27559502",
>                 "a1,1494389759,99.7887233987,325.334374851",
>                 "a1,1494389759,99.9547800925,325.371537062",
>                 "a1,1494389759,99.8039111691,325.305285877",
>                 "a1,1494389759,99.8342317379,325.24881354",
>                 "a1,1494389759,99.9849449235,325.396678931",
>                 "a1,1494389759,99.9396731311,325.336115345",
>                 "a1,1494389759,99.9320915068,325.242622938",
>                 "a1,1494389759,99.8943333669,325.320965146",
>                 "a1,1494389759,99.7735359781,325.345168334",
>                 "a1,1494389759,99.9698837734,325.352291407",
>                 "a1,1494389759,99.8418330703,325.296539372",
>                 "a1,1494389759,99.796315751,325.347570632",
>                 "a1,1494389759,99.7811931613,325.351137315",
>                 "a1,1494389759,99.9773765104,325.218131741",
>                 "a1,1494389759,99.8189825201,325.288197381",
>                 "a1,1494389759,99.8115005369,325.282327633",
>                 "a1,1494389759,99.9924539722,325.24048614",
>                 "a1,1494389759,99.9170191204,325.299431664");
>         JavaRDD<String> rawData = javaSparkContext.parallelize(data);
>         List<StructField> fields = new ArrayList<>();
>         fields.add(DataTypes.createStructField("ASSET_ID", 
> DataTypes.StringType, true));
>         fields.add(DataTypes.createStructField("TIMESTAMP", 
> DataTypes.LongType, true));
>         fields.add(DataTypes.createStructField("fuel", DataTypes.DoubleType, 
> true));
>         fields.add(DataTypes.createStructField("temperature", 
> DataTypes.DoubleType, true));
>         StructType schema = DataTypes.createStructType(fields);
>         JavaRDD<Row> rowRDD = rawData.map(
>                 (Function<String, Row>) record -> {
>                     String[] fields1 = record.split(",");
>                     return RowFactory.create(
>                             fields1[0].trim(),
>                             Long.parseLong(fields1[1].trim()),
>                             Double.parseDouble(fields1[2].trim()),
>                             Double.parseDouble(fields1[3].trim()));
>                 });
>         DataFrame df = sqlContext.createDataFrame(rowRDD, schema);
>         df.show(false);
>         df.registerTempTable("x_linkx1087571272_filtered");
>         sqlContext.sql("SELECT x_linkx1087571272_filtered.ASSET_ID, 
> count(case when x_linkx1087571272_filtered" +
>                 ".temperature=325.0 then 1 else 0 end) AS 
> xsumptionx1582594572, max(x_linkx1087571272_filtered" +
>                 ".TIMESTAMP) AS eventTime  FROM x_linkx1087571272_filtered 
> GROUP BY x_linkx1087571272_filtered" +
>                 ".ASSET_ID").show(false);
>         sqlContext.sql("SELECT x_linkx1087571272_filtered.ASSET_ID, 
> count(case when x_linkx1087571272_filtered" +
>                 ".fuel>99.8 then 1 else 0 end) AS xnsumptionx352569416, 
> max(x_linkx1087571272_filtered.TIMESTAMP) AS " +
>                 "eventTime  FROM x_linkx1087571272_filtered GROUP BY 
> x_linkx1087571272_filtered.ASSET_ID").show(false);
> //        +++++++++
>         sqlContext.sql("SELECT x_linkx1087571272_filtered.ASSET_ID, 
> count(case when x_linkx1087571272_filtered" +
>                 ".temperature==325.0 then 1 else 0 end) AS 
> xsumptionx1582594572, max(x_linkx1087571272_filtered" +
>                 ".TIMESTAMP) AS eventTime  FROM x_linkx1087571272_filtered 
> GROUP BY x_linkx1087571272_filtered" +
>                 ".ASSET_ID").show(false);
>     }
> }
> {code}
> ++++++++++++++++++++++++++++++
> Logs-
> +--------+----------+-------------+-------------+
> |ASSET_ID|TIMESTAMP |fuel         |temperature  |
> +--------+----------+-------------+-------------+
> |a1      |1494389759|99.8793003568|325.389705932|
> |a1      |1494389759|99.9472573803|325.27559502 |
> |a1      |1494389759|99.7887233987|325.334374851|
> |a1      |1494389759|99.9547800925|325.371537062|
> |a1      |1494389759|99.8039111691|325.305285877|
> |a1      |1494389759|99.8342317379|325.24881354 |
> |a1      |1494389759|99.9849449235|325.396678931|
> |a1      |1494389759|99.9396731311|325.336115345|
> |a1      |1494389759|99.9320915068|325.242622938|
> |a1      |1494389759|99.8943333669|325.320965146|
> |a1      |1494389759|99.7735359781|325.345168334|
> |a1      |1494389759|99.9698837734|325.352291407|
> |a1      |1494389759|99.8418330703|325.296539372|
> |a1      |1494389759|99.796315751 |325.347570632|
> |a1      |1494389759|99.7811931613|325.351137315|
> |a1      |1494389759|99.9773765104|325.218131741|
> |a1      |1494389759|99.8189825201|325.288197381|
> |a1      |1494389759|99.8115005369|325.282327633|
> |a1      |1494389759|99.9924539722|325.24048614 |
> |a1      |1494389759|99.9170191204|325.299431664|
> +--------+----------+-------------+-------------+
> 17/05/11 00:22:08 INFO ParseDriver: Parsing command: SELECT 
> x_linkx1087571272_filtered.ASSET_ID, count(case when 
> x_linkx1087571272_filtered.temperature=325.0 then 1 else 0 end) AS 
> xsumptionx1582594572, max(x_linkx1087571272_filtered.TIMESTAMP) AS eventTime  
> FROM x_linkx1087571272_filtered GROUP BY x_linkx1087571272_filtered.ASSET_ID
> 17/05/11 00:22:09 INFO ParseDriver: Parse Completed
> [Stage 5:======================================================>(198 + 1) / 
> 199]+--------+--------------------+----------+
> |ASSET_ID|xsumptionx1582594572|eventTime |
> +--------+--------------------+----------+
> |a1      |20                  |1494389759|
> +--------+--------------------+----------+
> 17/05/11 00:22:16 INFO ParseDriver: Parsing command: SELECT 
> x_linkx1087571272_filtered.ASSET_ID, count(case when 
> x_linkx1087571272_filtered.fuel>99.8 then 1 else 0 end) AS 
> xnsumptionx352569416, max(x_linkx1087571272_filtered.TIMESTAMP) AS eventTime  
> FROM x_linkx1087571272_filtered GROUP BY x_linkx1087571272_filtered.ASSET_ID
> 17/05/11 00:22:16 INFO ParseDriver: Parse Completed
> +--------+--------------------+----------+
> |ASSET_ID|xnsumptionx352569416|eventTime |
> +--------+--------------------+----------+
> |a1      |20                  |1494389759|
> +--------+--------------------+----------+
> 17/05/11 00:22:24 INFO ParseDriver: Parsing command: SELECT 
> x_linkx1087571272_filtered.ASSET_ID, count(case when 
> x_linkx1087571272_filtered.temperature==325.0 then 1 else 0 end) AS 
> xsumptionx1582594572, max(x_linkx1087571272_filtered.TIMESTAMP) AS eventTime  
> FROM x_linkx1087571272_filtered GROUP BY x_linkx1087571272_filtered.ASSET_ID
> 17/05/11 00:22:24 INFO ParseDriver: Parse Completed
> [Stage 13:==========================================>           (158 + 4) / 
> 199]+--------+--------------------+----------+
> |ASSET_ID|xsumptionx1582594572|eventTime |
> +--------+--------------------+----------+
> |a1      |20                  |1494389759|
> +--------+--------------------+----------+
> both the queries are resulting to wrong values



--
This message was sent by Atlassian JIRA
(v6.3.15#6346)

---------------------------------------------------------------------
To unsubscribe, e-mail: issues-unsubscr...@spark.apache.org
For additional commands, e-mail: issues-h...@spark.apache.org

Reply via email to