Github user ChinmaySKulkarni commented on a diff in the pull request:

    https://github.com/apache/phoenix/pull/402#discussion_r229564032
  
    --- Diff: phoenix-spark/src/it/java/org/apache/phoenix/spark/OrderByIT.java 
---
    @@ -0,0 +1,444 @@
    +package org.apache.phoenix.spark;
    +
    +import static org.apache.phoenix.util.TestUtil.TEST_PROPERTIES;
    +import static org.junit.Assert.assertEquals;
    +import static org.junit.Assert.assertFalse;
    +import static org.junit.Assert.assertTrue;
    +
    +import java.sql.Connection;
    +import java.sql.Date;
    +import java.sql.DriverManager;
    +import java.sql.PreparedStatement;
    +import java.sql.ResultSet;
    +import java.sql.SQLException;
    +import java.util.List;
    +import java.util.Properties;
    +
    +import org.apache.phoenix.end2end.BaseOrderByIT;
    +import org.apache.phoenix.util.PropertiesUtil;
    +import org.apache.phoenix.util.QueryBuilder;
    +import org.apache.spark.sql.Dataset;
    +import org.apache.spark.sql.Row;
    +import org.apache.spark.sql.SQLContext;
    +import org.junit.Ignore;
    +import org.junit.Test;
    +
    +import com.google.common.collect.Lists;
    +
    +import scala.Option;
    +import scala.collection.JavaConverters;
    +
    +public class OrderByIT extends BaseOrderByIT {
    +
    +    @Override
    +    protected ResultSet executeQuery(Connection conn, QueryBuilder 
queryBuilder) throws SQLException {
    +        return SparkUtil.executeQuery(conn, queryBuilder, getUrl(), 
config);
    +    }
    +
    +    @Test
    +    public void testOrderByWithJoin() throws Exception {
    +        Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
    +        try (Connection conn = DriverManager.getConnection(getUrl(), 
props)) {
    +            conn.setAutoCommit(false);
    +            String tableName1 = generateUniqueName();
    +            String ddl = "CREATE TABLE " + tableName1 +
    +                    "  (a_string varchar not null, cf1.a integer, cf1.b 
varchar, col1 integer, cf2.c varchar, cf2.d integer " +
    +                    "  CONSTRAINT pk PRIMARY KEY (a_string))\n";
    +            createTestTable(getUrl(), ddl);
    +            String dml = "UPSERT INTO " + tableName1 + " 
VALUES(?,?,?,?,?,?)";
    +            PreparedStatement stmt = conn.prepareStatement(dml);
    +            stmt.setString(1, "a");
    +            stmt.setInt(2, 40);
    +            stmt.setString(3, "aa");
    +            stmt.setInt(4, 10);
    +            stmt.setString(5, "bb");
    +            stmt.setInt(6, 20);
    +            stmt.execute();
    +            stmt.setString(1, "c");
    +            stmt.setInt(2, 30);
    +            stmt.setString(3, "cc");
    +            stmt.setInt(4, 50);
    +            stmt.setString(5, "dd");
    +            stmt.setInt(6, 60);
    +            stmt.execute();
    +            stmt.setString(1, "b");
    +            stmt.setInt(2, 40);
    +            stmt.setString(3, "bb");
    +            stmt.setInt(4, 5);
    +            stmt.setString(5, "aa");
    +            stmt.setInt(6, 80);
    +            stmt.execute();
    +            conn.commit();
    +
    +            String tableName2 = generateUniqueName();
    +            ddl = "CREATE TABLE " + tableName2 +
    +                    "  (a_string varchar not null, col1 integer" +
    +                    "  CONSTRAINT pk PRIMARY KEY (a_string))\n";
    +            createTestTable(getUrl(), ddl);
    +
    +            dml = "UPSERT INTO " + tableName2 + " VALUES(?, ?)";
    +            stmt = conn.prepareStatement(dml);
    +            stmt.setString(1, "a");
    +            stmt.setInt(2, 40);
    +            stmt.execute();
    +            stmt.setString(1, "b");
    +            stmt.setInt(2, 20);
    +            stmt.execute();
    +            stmt.setString(1, "c");
    +            stmt.setInt(2, 30);
    +            stmt.execute();
    +            conn.commit();
    +
    +            List<String> table1Columns = Lists.newArrayList("A_STRING", 
"CF1.A", "CF1.B", "COL1", "CF2.C", "CF2.D");
    +            SQLContext sqlContext = new 
SQLContext(SparkUtil.getSparkContext());
    +            Dataset phoenixDataSet =
    +                    new PhoenixRDD(SparkUtil.getSparkContext(), tableName1,
    +                            
JavaConverters.collectionAsScalaIterableConverter(table1Columns)
    +                                    .asScala().toSeq(),
    --- End diff --
    
    Can you add some comments here


---

Reply via email to