Fix some more test failures

Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/ffb984f4
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/ffb984f4
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/ffb984f4

Branch: refs/heads/encodecolumns2
Commit: ffb984f44f4841314ad2ea2328a9d5b284174929
Parents: 4128b56
Author: Samarth <samarth.j...@salesforce.com>
Authored: Wed Nov 23 00:52:04 2016 -0800
Committer: Thomas D'Silva <tdsi...@salesforce.com>
Committed: Thu Dec 22 13:00:44 2016 -0800

----------------------------------------------------------------------
 .../phoenix/iterate/BaseResultIterators.java    |  2 +-
 .../apache/phoenix/schema/MetaDataClient.java   |  1 +
 .../java/org/apache/phoenix/schema/PTable.java  | 27 ++++++++++++++++-
 .../org/apache/phoenix/schema/PTableImpl.java   |  3 ++
 .../apache/phoenix/util/EncodedColumnsUtil.java | 31 ++++++++++++++++++++
 .../org/apache/phoenix/util/SchemaUtil.java     | 27 +----------------
 6 files changed, 63 insertions(+), 28 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/phoenix/blob/ffb984f4/phoenix-core/src/main/java/org/apache/phoenix/iterate/BaseResultIterators.java
----------------------------------------------------------------------
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/iterate/BaseResultIterators.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/iterate/BaseResultIterators.java
index 96797a9..6357e52 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/iterate/BaseResultIterators.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/iterate/BaseResultIterators.java
@@ -279,7 +279,7 @@ public abstract class BaseResultIterators extends 
ExplainTable implements Result
         }
         Map<byte[], NavigableSet<byte[]>> familyMap = scan.getFamilyMap();
 
-        Map<String, Pair<Integer, Integer>> qualifierRanges = 
SchemaUtil.getQualifierRanges(table);
+        Map<String, Pair<Integer, Integer>> qualifierRanges = 
EncodedColumnsUtil.getQualifierRanges(table);
         for (Entry<byte[], NavigableSet<byte[]>> entry : familyMap.entrySet()) 
{
             if (entry.getValue() != null) {
                 for (byte[] cq : entry.getValue()) {

http://git-wip-us.apache.org/repos/asf/phoenix/blob/ffb984f4/phoenix-core/src/main/java/org/apache/phoenix/schema/MetaDataClient.java
----------------------------------------------------------------------
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/schema/MetaDataClient.java 
b/phoenix-core/src/main/java/org/apache/phoenix/schema/MetaDataClient.java
index d3b9596..8936b9b 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/schema/MetaDataClient.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/schema/MetaDataClient.java
@@ -2076,6 +2076,7 @@ public class MetaDataClient {
                  * then we rely on the PTable, with appropriate storage 
scheme, returned in the MetadataMutationResult to be updated 
                  * in the client cache. If the phoenix table already doesn't 
exist then the non-encoded column qualifier scheme works
                  * because we cannot control the column qualifiers that were 
used when populating the hbase table.
+                 * TODO: samarth add a test case for this
                  */
                 if (parent != null) {
                     storageScheme = parent.getStorageScheme();

http://git-wip-us.apache.org/repos/asf/phoenix/blob/ffb984f4/phoenix-core/src/main/java/org/apache/phoenix/schema/PTable.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/schema/PTable.java 
b/phoenix-core/src/main/java/org/apache/phoenix/schema/PTable.java
index b9565a1..1ee2320 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/schema/PTable.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/schema/PTable.java
@@ -213,6 +213,11 @@ public interface PTable extends PMetaDataEntity {
             public boolean isEncodeable(String value) {
                 return true;
             }
+            
+            @Override
+            public String toString() {
+                return "NON_ENCODED_QUALIFIERS";
+            }
         };
         public static final QualifierEncodingScheme ONE_BYTE_QUALIFIERS = new 
QualifierEncodingScheme<Long>((byte)1, "ONE_BYTE_QUALIFIERS", 255l) {
             @Override
@@ -234,6 +239,11 @@ public interface PTable extends PMetaDataEntity {
             public boolean isEncodeable(Long value) {
                 return true;
             }
+            
+            @Override
+            public String toString() {
+                return "ONE_BYTE_QUALIFIERS";
+            }
         };
         public static final QualifierEncodingScheme TWO_BYTE_QUALIFIERS = new 
QualifierEncodingScheme<Long>((byte)2, "TWO_BYTE_QUALIFIERS", 65535l) {
             @Override
@@ -255,6 +265,11 @@ public interface PTable extends PMetaDataEntity {
             public boolean isEncodeable(Long value) {
                 return true;
             }
+            
+            @Override
+            public String toString() {
+                return "TWO_BYTE_QUALIFIERS";
+            }
         };
         public static final QualifierEncodingScheme THREE_BYTE_QUALIFIERS = 
new QualifierEncodingScheme<Long>((byte)3, "THREE_BYTE_QUALIFIERS", 16777215l) {
             @Override
@@ -276,6 +291,11 @@ public interface PTable extends PMetaDataEntity {
             public boolean isEncodeable(Long value) {
                 return true;
             }
+            
+            @Override
+            public String toString() {
+                return "THREE_BYTE_QUALIFIERS";
+            }
         };
         public static final QualifierEncodingScheme FOUR_BYTE_QUALIFIERS = new 
QualifierEncodingScheme<Long>((byte)4, "FOUR_BYTE_QUALIFIERS", 4294967295l) {
             @Override
@@ -297,6 +317,11 @@ public interface PTable extends PMetaDataEntity {
             public boolean isEncodeable(Long value) {
                 return true;
             }
+            
+            @Override
+            public String toString() {
+                return "FOUR_BYTE_QUALIFIERS";
+            }
         };
         public static final QualifierEncodingScheme[] schemes = 
{NON_ENCODED_QUALIFIERS, ONE_BYTE_QUALIFIERS, TWO_BYTE_QUALIFIERS, 
THREE_BYTE_QUALIFIERS, FOUR_BYTE_QUALIFIERS}; 
         private final byte[] metadataBytes;
@@ -318,7 +343,7 @@ public interface PTable extends PMetaDataEntity {
         }
 
         public static QualifierEncodingScheme fromSerializedValue(byte 
serializedValue) {
-            if (serializedValue < 0 || serializedValue > schemes.length - 1) {
+            if (serializedValue < 0 || serializedValue >= schemes.length) {
                 return null;
             }
             return schemes[serializedValue];

http://git-wip-us.apache.org/repos/asf/phoenix/blob/ffb984f4/phoenix-core/src/main/java/org/apache/phoenix/schema/PTableImpl.java
----------------------------------------------------------------------
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/schema/PTableImpl.java 
b/phoenix-core/src/main/java/org/apache/phoenix/schema/PTableImpl.java
index 98c361e..e225940 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/schema/PTableImpl.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/schema/PTableImpl.java
@@ -1414,6 +1414,9 @@ public class PTableImpl implements PTable {
               builder.addEncodedCQCounters(cqBuilder.build());
           }
       }
+      if (table.getEncodingScheme() != null) {
+          builder.setEncodingScheme(ByteStringer.wrap(new 
byte[]{table.getEncodingScheme().getSerializedMetadataValue()}));
+      }
       return builder.build();
     }
 

http://git-wip-us.apache.org/repos/asf/phoenix/blob/ffb984f4/phoenix-core/src/main/java/org/apache/phoenix/util/EncodedColumnsUtil.java
----------------------------------------------------------------------
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/util/EncodedColumnsUtil.java 
b/phoenix-core/src/main/java/org/apache/phoenix/util/EncodedColumnsUtil.java
index e672aa0..c73e5b0 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/util/EncodedColumnsUtil.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/util/EncodedColumnsUtil.java
@@ -18,6 +18,9 @@
 package org.apache.phoenix.util;
 
 import static com.google.common.base.Preconditions.checkArgument;
+import static 
org.apache.phoenix.schema.PTable.QualifierEncodingScheme.NON_ENCODED_QUALIFIERS;
+
+import java.util.Map;
 
 import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.util.Bytes;
@@ -29,6 +32,9 @@ import org.apache.phoenix.schema.PTable;
 import org.apache.phoenix.schema.PTable.QualifierEncodingScheme;
 import org.apache.phoenix.schema.PTable.StorageScheme;
 
+import com.google.common.base.Preconditions;
+import com.google.common.collect.Maps;
+
 public class EncodedColumnsUtil {
 
     public static boolean usesEncodedColumnNames(PTable table) {
@@ -136,5 +142,30 @@ public class EncodedColumnsUtil {
     public static boolean useQualifierAsIndex(Pair<Integer, Integer> 
minMaxQualifiers) {
         return minMaxQualifiers != null;
     }
+
+    public static Map<String, Pair<Integer, Integer>> 
getQualifierRanges(PTable table) {
+        Preconditions.checkArgument(table.getEncodingScheme() != 
NON_ENCODED_QUALIFIERS,
+            "Use this method only for tables with encoding scheme "
+                    + NON_ENCODED_QUALIFIERS);
+        Map<String, Pair<Integer, Integer>> toReturn = 
Maps.newHashMapWithExpectedSize(table.getColumns().size());
+        for (PColumn column : table.getColumns()) {
+            if (!SchemaUtil.isPKColumn(column)) {
+                String colFamily = column.getFamilyName().getString();
+                Pair<Integer, Integer> minMaxQualifiers = 
toReturn.get(colFamily);
+                Integer encodedColumnQualifier = 
column.getEncodedColumnQualifier();
+                if (minMaxQualifiers == null) {
+                    minMaxQualifiers = new Pair<>(encodedColumnQualifier, 
encodedColumnQualifier);
+                    toReturn.put(colFamily, minMaxQualifiers);
+                } else {
+                    if (encodedColumnQualifier < minMaxQualifiers.getFirst()) {
+                        minMaxQualifiers.setFirst(encodedColumnQualifier);
+                    } else if (encodedColumnQualifier > 
minMaxQualifiers.getSecond()) {
+                        minMaxQualifiers.setSecond(encodedColumnQualifier);
+                    }
+                }
+            }
+        }
+        return toReturn;
+    }
     
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/ffb984f4/phoenix-core/src/main/java/org/apache/phoenix/util/SchemaUtil.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/util/SchemaUtil.java 
b/phoenix-core/src/main/java/org/apache/phoenix/util/SchemaUtil.java
index 6081d13..964bacc 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/util/SchemaUtil.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/util/SchemaUtil.java
@@ -47,7 +47,6 @@ import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
 import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
 import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.hadoop.hbase.util.Pair;
 import org.apache.phoenix.exception.DataExceedsCapacityException;
 import org.apache.phoenix.exception.SQLExceptionCode;
 import org.apache.phoenix.exception.SQLExceptionInfo;
@@ -71,6 +70,7 @@ import org.apache.phoenix.schema.PNameFactory;
 import org.apache.phoenix.schema.PTable;
 import org.apache.phoenix.schema.PTableType;
 import org.apache.phoenix.schema.RowKeySchema;
+import org.apache.phoenix.schema.PTable.QualifierEncodingScheme;
 import org.apache.phoenix.schema.PTable.StorageScheme;
 import org.apache.phoenix.schema.RowKeySchema.RowKeySchemaBuilder;
 import org.apache.phoenix.schema.SaltingUtil;
@@ -1094,29 +1094,4 @@ public class SchemaUtil {
             }
         }
     }
-    
-    public static Map<String, Pair<Integer, Integer>> 
getQualifierRanges(PTable table) {
-        Preconditions.checkArgument(table.getStorageScheme() == 
StorageScheme.ONE_CELL_PER_KEYVALUE_COLUMN,
-            "Use this method only for tables with storage scheme "
-                    + StorageScheme.ONE_CELL_PER_KEYVALUE_COLUMN.name());
-        Map<String, Pair<Integer, Integer>> toReturn = 
Maps.newHashMapWithExpectedSize(table.getColumns().size());
-        for (PColumn column : table.getColumns()) {
-            if (!isPKColumn(column)) {
-                String colFamily = column.getFamilyName().getString();
-                Pair<Integer, Integer> minMaxQualifiers = 
toReturn.get(colFamily);
-                Integer encodedColumnQualifier = 
column.getEncodedColumnQualifier();
-                if (minMaxQualifiers == null) {
-                    minMaxQualifiers = new Pair<>(encodedColumnQualifier, 
encodedColumnQualifier);
-                    toReturn.put(colFamily, minMaxQualifiers);
-                } else {
-                    if (encodedColumnQualifier < minMaxQualifiers.getFirst()) {
-                        minMaxQualifiers.setFirst(encodedColumnQualifier);
-                    } else if (encodedColumnQualifier > 
minMaxQualifiers.getSecond()) {
-                        minMaxQualifiers.setSecond(encodedColumnQualifier);
-                    }
-                }
-            }
-        }
-        return toReturn;
-    }
 }

Reply via email to