Repository: hive Updated Branches: refs/heads/branch-3 4397f38c9 -> 818c8cd50
HIVE-19131: DecimalColumnStatsMergerTest comparison review (Laszlo Bodor via Zoltan Haindrich) Signed-off-by: Zoltan Haindrich <k...@rxd.hu> Project: http://git-wip-us.apache.org/repos/asf/hive/repo Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/818c8cd5 Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/818c8cd5 Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/818c8cd5 Branch: refs/heads/branch-3 Commit: 818c8cd50c01e6b2ecac326608cd26226b30b6a4 Parents: 4397f38 Author: Laszlo Bodor <bodorlaszlo0...@gmail.com> Authored: Mon Apr 23 13:14:51 2018 +0200 Committer: Zoltan Haindrich <k...@rxd.hu> Committed: Mon Apr 23 13:26:47 2018 +0200 ---------------------------------------------------------------------- .../hive/ql/exec/ColumnStatsUpdateTask.java | 5 +- .../ql/stats/ColumnStatisticsObjTranslator.java | 3 +- .../stats_analyze_decimal_compare.q | 4 + .../stats_analyze_decimal_compare.q.out | 45 +++++ .../gen/thrift/gen-cpp/hive_metastore_types.cpp | 40 ++--- .../gen/thrift/gen-cpp/hive_metastore_types.h | 12 +- .../hadoop/hive/metastore/api/Decimal.java | 170 +++++++++---------- .../src/gen/thrift/gen-php/metastore/Types.php | 34 ++-- .../gen/thrift/gen-py/hive_metastore/ttypes.py | 24 +-- .../gen/thrift/gen-rb/hive_metastore_types.rb | 8 +- .../hive/metastore/StatObjectConverter.java | 38 ++--- .../hive/metastore/api/utils/DecimalUtils.java | 49 ++++++ .../aggr/DecimalColumnStatsAggregator.java | 5 +- .../src/main/thrift/hive_metastore.thrift | 4 +- .../merge/DecimalColumnStatsMergerTest.java | 23 +-- 15 files changed, 275 insertions(+), 189 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/hive/blob/818c8cd5/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsUpdateTask.java ---------------------------------------------------------------------- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsUpdateTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsUpdateTask.java index a7465a7..207b66f 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsUpdateTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsUpdateTask.java @@ -36,6 +36,7 @@ import org.apache.hadoop.hive.metastore.api.Date; import org.apache.hadoop.hive.metastore.api.Decimal; import org.apache.hadoop.hive.metastore.api.MetaException; import org.apache.hadoop.hive.metastore.api.SetPartitionsStatsRequest; +import org.apache.hadoop.hive.metastore.api.utils.DecimalUtils; import org.apache.hadoop.hive.metastore.columnstats.cache.DateColumnStatsDataInspector; import org.apache.hadoop.hive.metastore.columnstats.cache.DecimalColumnStatsDataInspector; import org.apache.hadoop.hive.metastore.columnstats.cache.DoubleColumnStatsDataInspector; @@ -226,11 +227,11 @@ public class ColumnStatsUpdateTask extends Task<ColumnStatsUpdateWork> { decimalStats.setNumDVs(Long.parseLong(value)); } else if (fName.equals("lowValue")) { BigDecimal d = new BigDecimal(value); - decimalStats.setLowValue(new Decimal(ByteBuffer.wrap(d + decimalStats.setLowValue(DecimalUtils.getDecimal(ByteBuffer.wrap(d .unscaledValue().toByteArray()), (short) d.scale())); } else if (fName.equals("highValue")) { BigDecimal d = new BigDecimal(value); - decimalStats.setHighValue(new Decimal(ByteBuffer.wrap(d + decimalStats.setHighValue(DecimalUtils.getDecimal(ByteBuffer.wrap(d .unscaledValue().toByteArray()), (short) d.scale())); } else { throw new SemanticException("Unknown stat"); http://git-wip-us.apache.org/repos/asf/hive/blob/818c8cd5/ql/src/java/org/apache/hadoop/hive/ql/stats/ColumnStatisticsObjTranslator.java ---------------------------------------------------------------------- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/stats/ColumnStatisticsObjTranslator.java b/ql/src/java/org/apache/hadoop/hive/ql/stats/ColumnStatisticsObjTranslator.java index 08cda4a..607545d 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/stats/ColumnStatisticsObjTranslator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/stats/ColumnStatisticsObjTranslator.java @@ -28,6 +28,7 @@ import org.apache.hadoop.hive.metastore.api.ColumnStatisticsData; import org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj; import org.apache.hadoop.hive.metastore.api.Date; import org.apache.hadoop.hive.metastore.api.Decimal; +import org.apache.hadoop.hive.metastore.api.utils.DecimalUtils; import org.apache.hadoop.hive.metastore.columnstats.cache.DateColumnStatsDataInspector; import org.apache.hadoop.hive.metastore.columnstats.cache.DecimalColumnStatsDataInspector; import org.apache.hadoop.hive.metastore.columnstats.cache.DoubleColumnStatsDataInspector; @@ -130,7 +131,7 @@ public class ColumnStatisticsObjTranslator { } private static Decimal convertToThriftDecimal(HiveDecimal d) { - return new Decimal(ByteBuffer.wrap(d.unscaledValue().toByteArray()), (short) d.scale()); + return DecimalUtils.getDecimal(ByteBuffer.wrap(d.unscaledValue().toByteArray()), (short) d.scale()); } private static void unpackLongStats(ObjectInspector oi, Object o, String fName, ColumnStatisticsObj statsObj) { http://git-wip-us.apache.org/repos/asf/hive/blob/818c8cd5/ql/src/test/queries/clientpositive/stats_analyze_decimal_compare.q ---------------------------------------------------------------------- diff --git a/ql/src/test/queries/clientpositive/stats_analyze_decimal_compare.q b/ql/src/test/queries/clientpositive/stats_analyze_decimal_compare.q new file mode 100644 index 0000000..9e73be5 --- /dev/null +++ b/ql/src/test/queries/clientpositive/stats_analyze_decimal_compare.q @@ -0,0 +1,4 @@ +create table stats_analyze_decimal_compare (a decimal) tblproperties ("transactional"="false"); +insert into stats_analyze_decimal_compare values (5); +insert into stats_analyze_decimal_compare values (10); +desc formatted stats_analyze_decimal_compare a; http://git-wip-us.apache.org/repos/asf/hive/blob/818c8cd5/ql/src/test/results/clientpositive/stats_analyze_decimal_compare.q.out ---------------------------------------------------------------------- diff --git a/ql/src/test/results/clientpositive/stats_analyze_decimal_compare.q.out b/ql/src/test/results/clientpositive/stats_analyze_decimal_compare.q.out new file mode 100644 index 0000000..e41a7be --- /dev/null +++ b/ql/src/test/results/clientpositive/stats_analyze_decimal_compare.q.out @@ -0,0 +1,45 @@ +PREHOOK: query: create table stats_analyze_decimal_compare (a decimal) tblproperties ("transactional"="false") +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@stats_analyze_decimal_compare +POSTHOOK: query: create table stats_analyze_decimal_compare (a decimal) tblproperties ("transactional"="false") +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@stats_analyze_decimal_compare +PREHOOK: query: insert into stats_analyze_decimal_compare values (5) +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@stats_analyze_decimal_compare +POSTHOOK: query: insert into stats_analyze_decimal_compare values (5) +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@stats_analyze_decimal_compare +POSTHOOK: Lineage: stats_analyze_decimal_compare.a SCRIPT [] +PREHOOK: query: insert into stats_analyze_decimal_compare values (10) +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@stats_analyze_decimal_compare +POSTHOOK: query: insert into stats_analyze_decimal_compare values (10) +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@stats_analyze_decimal_compare +POSTHOOK: Lineage: stats_analyze_decimal_compare.a SCRIPT [] +PREHOOK: query: desc formatted stats_analyze_decimal_compare a +PREHOOK: type: DESCTABLE +PREHOOK: Input: default@stats_analyze_decimal_compare +POSTHOOK: query: desc formatted stats_analyze_decimal_compare a +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: default@stats_analyze_decimal_compare +col_name a +data_type decimal(10,0) +min 5 +max 10 +num_nulls 0 +distinct_count 2 +avg_col_len +max_col_len +num_trues +num_falses +bitVector HL +comment from deserializer +COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"a\":\"true\"}} http://git-wip-us.apache.org/repos/asf/hive/blob/818c8cd5/standalone-metastore/src/gen/thrift/gen-cpp/hive_metastore_types.cpp ---------------------------------------------------------------------- diff --git a/standalone-metastore/src/gen/thrift/gen-cpp/hive_metastore_types.cpp b/standalone-metastore/src/gen/thrift/gen-cpp/hive_metastore_types.cpp index 9902427..052b595 100644 --- a/standalone-metastore/src/gen/thrift/gen-cpp/hive_metastore_types.cpp +++ b/standalone-metastore/src/gen/thrift/gen-cpp/hive_metastore_types.cpp @@ -8491,14 +8491,14 @@ Decimal::~Decimal() throw() { } -void Decimal::__set_unscaled(const std::string& val) { - this->unscaled = val; -} - void Decimal::__set_scale(const int16_t val) { this->scale = val; } +void Decimal::__set_unscaled(const std::string& val) { + this->unscaled = val; +} + uint32_t Decimal::read(::apache::thrift::protocol::TProtocol* iprot) { apache::thrift::protocol::TInputRecursionTracker tracker(*iprot); @@ -8511,8 +8511,8 @@ uint32_t Decimal::read(::apache::thrift::protocol::TProtocol* iprot) { using ::apache::thrift::protocol::TProtocolException; - bool isset_unscaled = false; bool isset_scale = false; + bool isset_unscaled = false; while (true) { @@ -8522,14 +8522,6 @@ uint32_t Decimal::read(::apache::thrift::protocol::TProtocol* iprot) { } switch (fid) { - case 1: - if (ftype == ::apache::thrift::protocol::T_STRING) { - xfer += iprot->readBinary(this->unscaled); - isset_unscaled = true; - } else { - xfer += iprot->skip(ftype); - } - break; case 3: if (ftype == ::apache::thrift::protocol::T_I16) { xfer += iprot->readI16(this->scale); @@ -8538,6 +8530,14 @@ uint32_t Decimal::read(::apache::thrift::protocol::TProtocol* iprot) { xfer += iprot->skip(ftype); } break; + case 1: + if (ftype == ::apache::thrift::protocol::T_STRING) { + xfer += iprot->readBinary(this->unscaled); + isset_unscaled = true; + } else { + xfer += iprot->skip(ftype); + } + break; default: xfer += iprot->skip(ftype); break; @@ -8547,10 +8547,10 @@ uint32_t Decimal::read(::apache::thrift::protocol::TProtocol* iprot) { xfer += iprot->readStructEnd(); - if (!isset_unscaled) - throw TProtocolException(TProtocolException::INVALID_DATA); if (!isset_scale) throw TProtocolException(TProtocolException::INVALID_DATA); + if (!isset_unscaled) + throw TProtocolException(TProtocolException::INVALID_DATA); return xfer; } @@ -8574,24 +8574,24 @@ uint32_t Decimal::write(::apache::thrift::protocol::TProtocol* oprot) const { void swap(Decimal &a, Decimal &b) { using ::std::swap; - swap(a.unscaled, b.unscaled); swap(a.scale, b.scale); + swap(a.unscaled, b.unscaled); } Decimal::Decimal(const Decimal& other310) { - unscaled = other310.unscaled; scale = other310.scale; + unscaled = other310.unscaled; } Decimal& Decimal::operator=(const Decimal& other311) { - unscaled = other311.unscaled; scale = other311.scale; + unscaled = other311.unscaled; return *this; } void Decimal::printTo(std::ostream& out) const { using ::apache::thrift::to_string; out << "Decimal("; - out << "unscaled=" << to_string(unscaled); - out << ", " << "scale=" << to_string(scale); + out << "scale=" << to_string(scale); + out << ", " << "unscaled=" << to_string(unscaled); out << ")"; } http://git-wip-us.apache.org/repos/asf/hive/blob/818c8cd5/standalone-metastore/src/gen/thrift/gen-cpp/hive_metastore_types.h ---------------------------------------------------------------------- diff --git a/standalone-metastore/src/gen/thrift/gen-cpp/hive_metastore_types.h b/standalone-metastore/src/gen/thrift/gen-cpp/hive_metastore_types.h index 2c95007..6b872a6 100644 --- a/standalone-metastore/src/gen/thrift/gen-cpp/hive_metastore_types.h +++ b/standalone-metastore/src/gen/thrift/gen-cpp/hive_metastore_types.h @@ -3890,23 +3890,23 @@ class Decimal { Decimal(const Decimal&); Decimal& operator=(const Decimal&); - Decimal() : unscaled(), scale(0) { + Decimal() : scale(0), unscaled() { } virtual ~Decimal() throw(); - std::string unscaled; int16_t scale; - - void __set_unscaled(const std::string& val); + std::string unscaled; void __set_scale(const int16_t val); + void __set_unscaled(const std::string& val); + bool operator == (const Decimal & rhs) const { - if (!(unscaled == rhs.unscaled)) - return false; if (!(scale == rhs.scale)) return false; + if (!(unscaled == rhs.unscaled)) + return false; return true; } bool operator != (const Decimal &rhs) const { http://git-wip-us.apache.org/repos/asf/hive/blob/818c8cd5/standalone-metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Decimal.java ---------------------------------------------------------------------- diff --git a/standalone-metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Decimal.java b/standalone-metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Decimal.java index 895b9e2..361d58a 100644 --- a/standalone-metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Decimal.java +++ b/standalone-metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Decimal.java @@ -38,8 +38,8 @@ import org.slf4j.LoggerFactory; @org.apache.hadoop.classification.InterfaceAudience.Public @org.apache.hadoop.classification.InterfaceStability.Stable public class Decimal implements org.apache.thrift.TBase<Decimal, Decimal._Fields>, java.io.Serializable, Cloneable, Comparable<Decimal> { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("Decimal"); - private static final org.apache.thrift.protocol.TField UNSCALED_FIELD_DESC = new org.apache.thrift.protocol.TField("unscaled", org.apache.thrift.protocol.TType.STRING, (short)1); private static final org.apache.thrift.protocol.TField SCALE_FIELD_DESC = new org.apache.thrift.protocol.TField("scale", org.apache.thrift.protocol.TType.I16, (short)3); + private static final org.apache.thrift.protocol.TField UNSCALED_FIELD_DESC = new org.apache.thrift.protocol.TField("unscaled", org.apache.thrift.protocol.TType.STRING, (short)1); private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>(); static { @@ -47,13 +47,13 @@ import org.slf4j.LoggerFactory; schemes.put(TupleScheme.class, new DecimalTupleSchemeFactory()); } - private ByteBuffer unscaled; // required private short scale; // required + private ByteBuffer unscaled; // required /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */ public enum _Fields implements org.apache.thrift.TFieldIdEnum { - UNSCALED((short)1, "unscaled"), - SCALE((short)3, "scale"); + SCALE((short)3, "scale"), + UNSCALED((short)1, "unscaled"); private static final Map<String, _Fields> byName = new HashMap<String, _Fields>(); @@ -68,10 +68,10 @@ import org.slf4j.LoggerFactory; */ public static _Fields findByThriftId(int fieldId) { switch(fieldId) { - case 1: // UNSCALED - return UNSCALED; case 3: // SCALE return SCALE; + case 1: // UNSCALED + return UNSCALED; default: return null; } @@ -117,10 +117,10 @@ import org.slf4j.LoggerFactory; public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap; static { Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class); - tmpMap.put(_Fields.UNSCALED, new org.apache.thrift.meta_data.FieldMetaData("unscaled", org.apache.thrift.TFieldRequirementType.REQUIRED, - new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING , true))); tmpMap.put(_Fields.SCALE, new org.apache.thrift.meta_data.FieldMetaData("scale", org.apache.thrift.TFieldRequirementType.REQUIRED, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I16))); + tmpMap.put(_Fields.UNSCALED, new org.apache.thrift.meta_data.FieldMetaData("unscaled", org.apache.thrift.TFieldRequirementType.REQUIRED, + new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING , true))); metaDataMap = Collections.unmodifiableMap(tmpMap); org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(Decimal.class, metaDataMap); } @@ -129,13 +129,13 @@ import org.slf4j.LoggerFactory; } public Decimal( - ByteBuffer unscaled, - short scale) + short scale, + ByteBuffer unscaled) { this(); - this.unscaled = org.apache.thrift.TBaseHelper.copyBinary(unscaled); this.scale = scale; setScaleIsSet(true); + this.unscaled = org.apache.thrift.TBaseHelper.copyBinary(unscaled); } /** @@ -143,10 +143,10 @@ import org.slf4j.LoggerFactory; */ public Decimal(Decimal other) { __isset_bitfield = other.__isset_bitfield; + this.scale = other.scale; if (other.isSetUnscaled()) { this.unscaled = org.apache.thrift.TBaseHelper.copyBinary(other.unscaled); } - this.scale = other.scale; } public Decimal deepCopy() { @@ -155,9 +155,31 @@ import org.slf4j.LoggerFactory; @Override public void clear() { - this.unscaled = null; setScaleIsSet(false); this.scale = 0; + this.unscaled = null; + } + + public short getScale() { + return this.scale; + } + + public void setScale(short scale) { + this.scale = scale; + setScaleIsSet(true); + } + + public void unsetScale() { + __isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __SCALE_ISSET_ID); + } + + /** Returns true if field scale is set (has been assigned a value) and false otherwise */ + public boolean isSetScale() { + return EncodingUtils.testBit(__isset_bitfield, __SCALE_ISSET_ID); + } + + public void setScaleIsSet(boolean value) { + __isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __SCALE_ISSET_ID, value); } public byte[] getUnscaled() { @@ -192,43 +214,21 @@ import org.slf4j.LoggerFactory; } } - public short getScale() { - return this.scale; - } - - public void setScale(short scale) { - this.scale = scale; - setScaleIsSet(true); - } - - public void unsetScale() { - __isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __SCALE_ISSET_ID); - } - - /** Returns true if field scale is set (has been assigned a value) and false otherwise */ - public boolean isSetScale() { - return EncodingUtils.testBit(__isset_bitfield, __SCALE_ISSET_ID); - } - - public void setScaleIsSet(boolean value) { - __isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __SCALE_ISSET_ID, value); - } - public void setFieldValue(_Fields field, Object value) { switch (field) { - case UNSCALED: + case SCALE: if (value == null) { - unsetUnscaled(); + unsetScale(); } else { - setUnscaled((ByteBuffer)value); + setScale((Short)value); } break; - case SCALE: + case UNSCALED: if (value == null) { - unsetScale(); + unsetUnscaled(); } else { - setScale((Short)value); + setUnscaled((ByteBuffer)value); } break; @@ -237,12 +237,12 @@ import org.slf4j.LoggerFactory; public Object getFieldValue(_Fields field) { switch (field) { - case UNSCALED: - return getUnscaled(); - case SCALE: return getScale(); + case UNSCALED: + return getUnscaled(); + } throw new IllegalStateException(); } @@ -254,10 +254,10 @@ import org.slf4j.LoggerFactory; } switch (field) { - case UNSCALED: - return isSetUnscaled(); case SCALE: return isSetScale(); + case UNSCALED: + return isSetUnscaled(); } throw new IllegalStateException(); } @@ -275,15 +275,6 @@ import org.slf4j.LoggerFactory; if (that == null) return false; - boolean this_present_unscaled = true && this.isSetUnscaled(); - boolean that_present_unscaled = true && that.isSetUnscaled(); - if (this_present_unscaled || that_present_unscaled) { - if (!(this_present_unscaled && that_present_unscaled)) - return false; - if (!this.unscaled.equals(that.unscaled)) - return false; - } - boolean this_present_scale = true; boolean that_present_scale = true; if (this_present_scale || that_present_scale) { @@ -293,6 +284,15 @@ import org.slf4j.LoggerFactory; return false; } + boolean this_present_unscaled = true && this.isSetUnscaled(); + boolean that_present_unscaled = true && that.isSetUnscaled(); + if (this_present_unscaled || that_present_unscaled) { + if (!(this_present_unscaled && that_present_unscaled)) + return false; + if (!this.unscaled.equals(that.unscaled)) + return false; + } + return true; } @@ -300,16 +300,16 @@ import org.slf4j.LoggerFactory; public int hashCode() { List<Object> list = new ArrayList<Object>(); - boolean present_unscaled = true && (isSetUnscaled()); - list.add(present_unscaled); - if (present_unscaled) - list.add(unscaled); - boolean present_scale = true; list.add(present_scale); if (present_scale) list.add(scale); + boolean present_unscaled = true && (isSetUnscaled()); + list.add(present_unscaled); + if (present_unscaled) + list.add(unscaled); + return list.hashCode(); } @@ -321,22 +321,22 @@ import org.slf4j.LoggerFactory; int lastComparison = 0; - lastComparison = Boolean.valueOf(isSetUnscaled()).compareTo(other.isSetUnscaled()); + lastComparison = Boolean.valueOf(isSetScale()).compareTo(other.isSetScale()); if (lastComparison != 0) { return lastComparison; } - if (isSetUnscaled()) { - lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.unscaled, other.unscaled); + if (isSetScale()) { + lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.scale, other.scale); if (lastComparison != 0) { return lastComparison; } } - lastComparison = Boolean.valueOf(isSetScale()).compareTo(other.isSetScale()); + lastComparison = Boolean.valueOf(isSetUnscaled()).compareTo(other.isSetUnscaled()); if (lastComparison != 0) { return lastComparison; } - if (isSetScale()) { - lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.scale, other.scale); + if (isSetUnscaled()) { + lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.unscaled, other.unscaled); if (lastComparison != 0) { return lastComparison; } @@ -361,6 +361,10 @@ import org.slf4j.LoggerFactory; StringBuilder sb = new StringBuilder("Decimal("); boolean first = true; + sb.append("scale:"); + sb.append(this.scale); + first = false; + if (!first) sb.append(", "); sb.append("unscaled:"); if (this.unscaled == null) { sb.append("null"); @@ -368,24 +372,20 @@ import org.slf4j.LoggerFactory; org.apache.thrift.TBaseHelper.toString(this.unscaled, sb); } first = false; - if (!first) sb.append(", "); - sb.append("scale:"); - sb.append(this.scale); - first = false; sb.append(")"); return sb.toString(); } public void validate() throws org.apache.thrift.TException { // check for required fields - if (!isSetUnscaled()) { - throw new org.apache.thrift.protocol.TProtocolException("Required field 'unscaled' is unset! Struct:" + toString()); - } - if (!isSetScale()) { throw new org.apache.thrift.protocol.TProtocolException("Required field 'scale' is unset! Struct:" + toString()); } + if (!isSetUnscaled()) { + throw new org.apache.thrift.protocol.TProtocolException("Required field 'unscaled' is unset! Struct:" + toString()); + } + // check for sub-struct validity } @@ -425,14 +425,6 @@ import org.slf4j.LoggerFactory; break; } switch (schemeField.id) { - case 1: // UNSCALED - if (schemeField.type == org.apache.thrift.protocol.TType.STRING) { - struct.unscaled = iprot.readBinary(); - struct.setUnscaledIsSet(true); - } else { - org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); - } - break; case 3: // SCALE if (schemeField.type == org.apache.thrift.protocol.TType.I16) { struct.scale = iprot.readI16(); @@ -441,6 +433,14 @@ import org.slf4j.LoggerFactory; org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; + case 1: // UNSCALED + if (schemeField.type == org.apache.thrift.protocol.TType.STRING) { + struct.unscaled = iprot.readBinary(); + struct.setUnscaledIsSet(true); + } else { + org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); + } + break; default: org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } @@ -479,17 +479,17 @@ import org.slf4j.LoggerFactory; @Override public void write(org.apache.thrift.protocol.TProtocol prot, Decimal struct) throws org.apache.thrift.TException { TTupleProtocol oprot = (TTupleProtocol) prot; - oprot.writeBinary(struct.unscaled); oprot.writeI16(struct.scale); + oprot.writeBinary(struct.unscaled); } @Override public void read(org.apache.thrift.protocol.TProtocol prot, Decimal struct) throws org.apache.thrift.TException { TTupleProtocol iprot = (TTupleProtocol) prot; - struct.unscaled = iprot.readBinary(); - struct.setUnscaledIsSet(true); struct.scale = iprot.readI16(); struct.setScaleIsSet(true); + struct.unscaled = iprot.readBinary(); + struct.setUnscaledIsSet(true); } } http://git-wip-us.apache.org/repos/asf/hive/blob/818c8cd5/standalone-metastore/src/gen/thrift/gen-php/metastore/Types.php ---------------------------------------------------------------------- diff --git a/standalone-metastore/src/gen/thrift/gen-php/metastore/Types.php b/standalone-metastore/src/gen/thrift/gen-php/metastore/Types.php index c4969d5..1625788 100644 --- a/standalone-metastore/src/gen/thrift/gen-php/metastore/Types.php +++ b/standalone-metastore/src/gen/thrift/gen-php/metastore/Types.php @@ -8647,34 +8647,34 @@ class Decimal { static $_TSPEC; /** - * @var string - */ - public $unscaled = null; - /** * @var int */ public $scale = null; + /** + * @var string + */ + public $unscaled = null; public function __construct($vals=null) { if (!isset(self::$_TSPEC)) { self::$_TSPEC = array( - 1 => array( - 'var' => 'unscaled', - 'type' => TType::STRING, - ), 3 => array( 'var' => 'scale', 'type' => TType::I16, ), + 1 => array( + 'var' => 'unscaled', + 'type' => TType::STRING, + ), ); } if (is_array($vals)) { - if (isset($vals['unscaled'])) { - $this->unscaled = $vals['unscaled']; - } if (isset($vals['scale'])) { $this->scale = $vals['scale']; } + if (isset($vals['unscaled'])) { + $this->unscaled = $vals['unscaled']; + } } } @@ -8697,16 +8697,16 @@ class Decimal { } switch ($fid) { - case 1: - if ($ftype == TType::STRING) { - $xfer += $input->readString($this->unscaled); + case 3: + if ($ftype == TType::I16) { + $xfer += $input->readI16($this->scale); } else { $xfer += $input->skip($ftype); } break; - case 3: - if ($ftype == TType::I16) { - $xfer += $input->readI16($this->scale); + case 1: + if ($ftype == TType::STRING) { + $xfer += $input->readString($this->unscaled); } else { $xfer += $input->skip($ftype); } http://git-wip-us.apache.org/repos/asf/hive/blob/818c8cd5/standalone-metastore/src/gen/thrift/gen-py/hive_metastore/ttypes.py ---------------------------------------------------------------------- diff --git a/standalone-metastore/src/gen/thrift/gen-py/hive_metastore/ttypes.py b/standalone-metastore/src/gen/thrift/gen-py/hive_metastore/ttypes.py index 9bf9843..1d09cb8 100644 --- a/standalone-metastore/src/gen/thrift/gen-py/hive_metastore/ttypes.py +++ b/standalone-metastore/src/gen/thrift/gen-py/hive_metastore/ttypes.py @@ -5954,8 +5954,8 @@ class BinaryColumnStatsData: class Decimal: """ Attributes: - - unscaled - scale + - unscaled """ thrift_spec = ( @@ -5965,9 +5965,9 @@ class Decimal: (3, TType.I16, 'scale', None, None, ), # 3 ) - def __init__(self, unscaled=None, scale=None,): - self.unscaled = unscaled + def __init__(self, scale=None, unscaled=None,): self.scale = scale + self.unscaled = unscaled def read(self, iprot): if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: @@ -5978,16 +5978,16 @@ class Decimal: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break - if fid == 1: - if ftype == TType.STRING: - self.unscaled = iprot.readString() - else: - iprot.skip(ftype) - elif fid == 3: + if fid == 3: if ftype == TType.I16: self.scale = iprot.readI16() else: iprot.skip(ftype) + elif fid == 1: + if ftype == TType.STRING: + self.unscaled = iprot.readString() + else: + iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() @@ -6010,17 +6010,17 @@ class Decimal: oprot.writeStructEnd() def validate(self): - if self.unscaled is None: - raise TProtocol.TProtocolException(message='Required field unscaled is unset!') if self.scale is None: raise TProtocol.TProtocolException(message='Required field scale is unset!') + if self.unscaled is None: + raise TProtocol.TProtocolException(message='Required field unscaled is unset!') return def __hash__(self): value = 17 - value = (value * 31) ^ hash(self.unscaled) value = (value * 31) ^ hash(self.scale) + value = (value * 31) ^ hash(self.unscaled) return value def __repr__(self): http://git-wip-us.apache.org/repos/asf/hive/blob/818c8cd5/standalone-metastore/src/gen/thrift/gen-rb/hive_metastore_types.rb ---------------------------------------------------------------------- diff --git a/standalone-metastore/src/gen/thrift/gen-rb/hive_metastore_types.rb b/standalone-metastore/src/gen/thrift/gen-rb/hive_metastore_types.rb index 3dbe4d8..47e8d0f 100644 --- a/standalone-metastore/src/gen/thrift/gen-rb/hive_metastore_types.rb +++ b/standalone-metastore/src/gen/thrift/gen-rb/hive_metastore_types.rb @@ -1320,19 +1320,19 @@ end class Decimal include ::Thrift::Struct, ::Thrift::Struct_Union - UNSCALED = 1 SCALE = 3 + UNSCALED = 1 FIELDS = { - UNSCALED => {:type => ::Thrift::Types::STRING, :name => 'unscaled', :binary => true}, - SCALE => {:type => ::Thrift::Types::I16, :name => 'scale'} + SCALE => {:type => ::Thrift::Types::I16, :name => 'scale'}, + UNSCALED => {:type => ::Thrift::Types::STRING, :name => 'unscaled', :binary => true} } def struct_fields; FIELDS; end def validate - raise ::Thrift::ProtocolException.new(::Thrift::ProtocolException::UNKNOWN, 'Required field unscaled is unset!') unless @unscaled raise ::Thrift::ProtocolException.new(::Thrift::ProtocolException::UNKNOWN, 'Required field scale is unset!') unless @scale + raise ::Thrift::ProtocolException.new(::Thrift::ProtocolException::UNKNOWN, 'Required field unscaled is unset!') unless @unscaled end ::Thrift::Struct.generate_accessors self http://git-wip-us.apache.org/repos/asf/hive/blob/818c8cd5/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/StatObjectConverter.java ---------------------------------------------------------------------- diff --git a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/StatObjectConverter.java b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/StatObjectConverter.java index 0074e01..ef2d670 100644 --- a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/StatObjectConverter.java +++ b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/StatObjectConverter.java @@ -38,6 +38,7 @@ import org.apache.hadoop.hive.metastore.api.LongColumnStatsData; import org.apache.hadoop.hive.metastore.api.MetaException; import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; import org.apache.hadoop.hive.metastore.api.StringColumnStatsData; +import org.apache.hadoop.hive.metastore.api.utils.DecimalUtils; import org.apache.hadoop.hive.metastore.columnstats.cache.DateColumnStatsDataInspector; import org.apache.hadoop.hive.metastore.columnstats.cache.DecimalColumnStatsDataInspector; import org.apache.hadoop.hive.metastore.columnstats.cache.DoubleColumnStatsDataInspector; @@ -97,8 +98,8 @@ public class StatObjectConverter { doubleStats.isSetHighValue() ? doubleStats.getHighValue() : null); } else if (statsObj.getStatsData().isSetDecimalStats()) { DecimalColumnStatsData decimalStats = statsObj.getStatsData().getDecimalStats(); - String low = decimalStats.isSetLowValue() ? createJdoDecimalString(decimalStats.getLowValue()) : null; - String high = decimalStats.isSetHighValue() ? createJdoDecimalString(decimalStats.getHighValue()) : null; + String low = decimalStats.isSetLowValue() ? DecimalUtils.createJdoDecimalString(decimalStats.getLowValue()) : null; + String high = decimalStats.isSetHighValue() ? DecimalUtils.createJdoDecimalString(decimalStats.getHighValue()) : null; mColStats.setDecimalStats( decimalStats.isSetNumNulls() ? decimalStats.getNumNulls() : null, decimalStats.isSetNumDVs() ? decimalStats.getNumDVs() : null, @@ -282,11 +283,11 @@ public class StatObjectConverter { decimalStats.setNumNulls(mStatsObj.getNumNulls()); String decimalHighValue = mStatsObj.getDecimalHighValue(); if (decimalHighValue != null) { - decimalStats.setHighValue(createThriftDecimal(decimalHighValue)); + decimalStats.setHighValue(DecimalUtils.createThriftDecimal(decimalHighValue)); } String decimalLowValue = mStatsObj.getDecimalLowValue(); if (decimalLowValue != null) { - decimalStats.setLowValue(createThriftDecimal(decimalLowValue)); + decimalStats.setLowValue(DecimalUtils.createThriftDecimal(decimalLowValue)); } decimalStats.setNumDVs(mStatsObj.getNumDVs()); decimalStats.setBitVectors((mStatsObj.getBitVector()==null||!enableBitVector)? null : mStatsObj.getBitVector()); @@ -362,8 +363,8 @@ public class StatObjectConverter { doubleStats.isSetHighValue() ? doubleStats.getHighValue() : null); } else if (statsObj.getStatsData().isSetDecimalStats()) { DecimalColumnStatsData decimalStats = statsObj.getStatsData().getDecimalStats(); - String low = decimalStats.isSetLowValue() ? createJdoDecimalString(decimalStats.getLowValue()) : null; - String high = decimalStats.isSetHighValue() ? createJdoDecimalString(decimalStats.getHighValue()) : null; + String low = decimalStats.isSetLowValue() ? DecimalUtils.createJdoDecimalString(decimalStats.getLowValue()) : null; + String high = decimalStats.isSetHighValue() ? DecimalUtils.createJdoDecimalString(decimalStats.getHighValue()) : null; mColStats.setDecimalStats( decimalStats.isSetNumNulls() ? decimalStats.getNumNulls() : null, decimalStats.isSetNumDVs() ? decimalStats.getNumDVs() : null, @@ -454,10 +455,10 @@ public class StatObjectConverter { DecimalColumnStatsDataInspector decimalStats = new DecimalColumnStatsDataInspector(); decimalStats.setNumNulls(mStatsObj.getNumNulls()); if (mStatsObj.getDecimalHighValue() != null) { - decimalStats.setHighValue(createThriftDecimal(mStatsObj.getDecimalHighValue())); + decimalStats.setHighValue(DecimalUtils.createThriftDecimal(mStatsObj.getDecimalHighValue())); } if (mStatsObj.getDecimalLowValue() != null) { - decimalStats.setLowValue(createThriftDecimal(mStatsObj.getDecimalLowValue())); + decimalStats.setLowValue(DecimalUtils.createThriftDecimal(mStatsObj.getDecimalLowValue())); } decimalStats.setNumDVs(mStatsObj.getNumDVs()); decimalStats.setBitVectors((mStatsObj.getBitVector()==null||!enableBitVector)? null : mStatsObj.getBitVector()); @@ -543,10 +544,10 @@ public class StatObjectConverter { DecimalColumnStatsDataInspector decimalStats = new DecimalColumnStatsDataInspector(); decimalStats.setNumNulls(MetaStoreDirectSql.extractSqlLong(nulls)); if (dechigh != null) { - decimalStats.setHighValue(createThriftDecimal((String)dechigh)); + decimalStats.setHighValue(DecimalUtils.createThriftDecimal((String)dechigh)); } if (declow != null) { - decimalStats.setLowValue(createThriftDecimal((String)declow)); + decimalStats.setLowValue(DecimalUtils.createThriftDecimal((String)declow)); } decimalStats.setNumDVs(MetaStoreDirectSql.extractSqlLong(dist)); decimalStats.setBitVectors(MetaStoreDirectSql.extractSqlBlob(bitVector)); @@ -700,19 +701,19 @@ public class StatObjectConverter { BigDecimal bhigh = null; if (dechigh instanceof BigDecimal) { bhigh = (BigDecimal) dechigh; - high = new Decimal(ByteBuffer.wrap(bhigh.unscaledValue().toByteArray()), + high = DecimalUtils.getDecimal(ByteBuffer.wrap(bhigh.unscaledValue().toByteArray()), (short) bhigh.scale()); } else if (dechigh instanceof String) { bhigh = new BigDecimal((String) dechigh); - high = createThriftDecimal((String) dechigh); + high = DecimalUtils.createThriftDecimal((String) dechigh); } decimalStats.setHighValue(high); if (declow instanceof BigDecimal) { blow = (BigDecimal) declow; - low = new Decimal(ByteBuffer.wrap(blow.unscaledValue().toByteArray()), (short) blow.scale()); + low = DecimalUtils.getDecimal(ByteBuffer.wrap(blow.unscaledValue().toByteArray()), (short) blow.scale()); } else if (dechigh instanceof String) { blow = new BigDecimal((String) declow); - low = createThriftDecimal((String) declow); + low = DecimalUtils.createThriftDecimal((String) declow); } decimalStats.setLowValue(low); long lowerBound = MetaStoreDirectSql.extractSqlLong(dist); @@ -735,15 +736,6 @@ public class StatObjectConverter { } } - public static Decimal createThriftDecimal(String s) { - BigDecimal d = new BigDecimal(s); - return new Decimal(ByteBuffer.wrap(d.unscaledValue().toByteArray()), (short)d.scale()); - } - - private static String createJdoDecimalString(Decimal d) { - return new BigDecimal(new BigInteger(d.getUnscaled()), d.getScale()).toString(); - } - /** * Set field values in oldStatObj from newStatObj * @param oldStatObj http://git-wip-us.apache.org/repos/asf/hive/blob/818c8cd5/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/api/utils/DecimalUtils.java ---------------------------------------------------------------------- diff --git a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/api/utils/DecimalUtils.java b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/api/utils/DecimalUtils.java new file mode 100644 index 0000000..e5d8b0b --- /dev/null +++ b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/api/utils/DecimalUtils.java @@ -0,0 +1,49 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.metastore.api.utils; + +import java.nio.ByteBuffer; +import java.math.BigDecimal; +import java.math.BigInteger; +import org.apache.hadoop.hive.metastore.api.Decimal; + +/** + * This class contains helper methods for handling thrift api's Decimal + */ +public class DecimalUtils { + + public static Decimal getDecimal(int number, int scale) { + ByteBuffer bb = ByteBuffer.allocate(4); + bb.asIntBuffer().put(number); + return new Decimal((short) scale, bb); + } + + public static Decimal getDecimal(ByteBuffer unscaled, short scale) { + return new Decimal((short) scale, unscaled); + } + + public static Decimal createThriftDecimal(String s) { + BigDecimal d = new BigDecimal(s); + return new Decimal((short) d.scale(), ByteBuffer.wrap(d.unscaledValue().toByteArray())); + } + + public static String createJdoDecimalString(Decimal d) { + return new BigDecimal(new BigInteger(d.getUnscaled()), d.getScale()).toString(); + } +} http://git-wip-us.apache.org/repos/asf/hive/blob/818c8cd5/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/columnstats/aggr/DecimalColumnStatsAggregator.java ---------------------------------------------------------------------- diff --git a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/columnstats/aggr/DecimalColumnStatsAggregator.java b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/columnstats/aggr/DecimalColumnStatsAggregator.java index 2715599..ac7e8e3 100644 --- a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/columnstats/aggr/DecimalColumnStatsAggregator.java +++ b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/columnstats/aggr/DecimalColumnStatsAggregator.java @@ -33,6 +33,7 @@ import org.apache.hadoop.hive.metastore.api.ColumnStatisticsData; import org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj; import org.apache.hadoop.hive.metastore.api.DecimalColumnStatsData; import org.apache.hadoop.hive.metastore.api.MetaException; +import org.apache.hadoop.hive.metastore.api.utils.DecimalUtils; import org.apache.hadoop.hive.metastore.columnstats.cache.DecimalColumnStatsDataInspector; import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils; import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils.ColStatsObjWithSourceInfo; @@ -363,9 +364,9 @@ public class DecimalColumnStatsAggregator extends ColumnStatsAggregator implemen ndv = (long) (ndvMin + (ndvMax - ndvMin) * minInd / (minInd - maxInd)); } } - extrapolateDecimalData.setLowValue(StatObjectConverter.createThriftDecimal(String + extrapolateDecimalData.setLowValue(DecimalUtils.createThriftDecimal(String .valueOf(lowValue))); - extrapolateDecimalData.setHighValue(StatObjectConverter.createThriftDecimal(String + extrapolateDecimalData.setHighValue(DecimalUtils.createThriftDecimal(String .valueOf(highValue))); extrapolateDecimalData.setNumNulls(numNulls); extrapolateDecimalData.setNumDVs(ndv); http://git-wip-us.apache.org/repos/asf/hive/blob/818c8cd5/standalone-metastore/src/main/thrift/hive_metastore.thrift ---------------------------------------------------------------------- diff --git a/standalone-metastore/src/main/thrift/hive_metastore.thrift b/standalone-metastore/src/main/thrift/hive_metastore.thrift index 5bba329..12e4e40 100644 --- a/standalone-metastore/src/main/thrift/hive_metastore.thrift +++ b/standalone-metastore/src/main/thrift/hive_metastore.thrift @@ -506,8 +506,8 @@ struct BinaryColumnStatsData { struct Decimal { -1: required binary unscaled, -3: required i16 scale +3: required i16 scale, // force using scale first in Decimal.compareTo +1: required binary unscaled } struct DecimalColumnStatsData { http://git-wip-us.apache.org/repos/asf/hive/blob/818c8cd5/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/columnstats/merge/DecimalColumnStatsMergerTest.java ---------------------------------------------------------------------- diff --git a/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/columnstats/merge/DecimalColumnStatsMergerTest.java b/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/columnstats/merge/DecimalColumnStatsMergerTest.java index 3b74d1e..8ec3a2f 100644 --- a/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/columnstats/merge/DecimalColumnStatsMergerTest.java +++ b/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/columnstats/merge/DecimalColumnStatsMergerTest.java @@ -25,6 +25,7 @@ import org.apache.hadoop.hive.metastore.annotation.MetastoreUnitTest; import org.apache.hadoop.hive.metastore.api.ColumnStatisticsData; import org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj; import org.apache.hadoop.hive.metastore.api.Decimal; +import org.apache.hadoop.hive.metastore.api.utils.DecimalUtils; import org.apache.hadoop.hive.metastore.columnstats.cache.DecimalColumnStatsDataInspector; import org.junit.Assert; import org.junit.Test; @@ -33,9 +34,9 @@ import org.junit.experimental.categories.Category; @Category(MetastoreUnitTest.class) public class DecimalColumnStatsMergerTest { - private static final Decimal DECIMAL_3 = getDecimal(3, 0); - private static final Decimal DECIMAL_5 = getDecimal(5, 0); - private static final Decimal DECIMAL_20 = getDecimal(2, 1); + private static final Decimal DECIMAL_3 = DecimalUtils.getDecimal(3, 0); + private static final Decimal DECIMAL_5 = DecimalUtils.getDecimal(5, 0); + private static final Decimal DECIMAL_20 = DecimalUtils.getDecimal(2, 1); private DecimalColumnStatsMerger merger = new DecimalColumnStatsMerger(); @@ -184,12 +185,10 @@ public class DecimalColumnStatsMergerTest { Assert.assertEquals(DECIMAL_3, merger.getMin(DECIMAL_5, DECIMAL_3)); } - /* - * it should pass, but fails because of HIVE-19131, get back to this later! - * - * @Test public void testCompareUnscaledValue() { Assert.assertEquals(DECIMAL_20, - * merger.compareValues(DECIMAL_3, DECIMAL_20)); } - */ + @Test + public void testCompareUnscaledValue() { + Assert.assertEquals(DECIMAL_20, merger.getMax(DECIMAL_3, DECIMAL_20)); + } @Test public void testCompareNullsMin() { @@ -221,12 +220,6 @@ public class DecimalColumnStatsMergerTest { Assert.assertEquals(DECIMAL_3, merger.getMax(DECIMAL_3, null)); } - private static Decimal getDecimal(int number, int scale) { - ByteBuffer bb = ByteBuffer.allocate(4); - bb.asIntBuffer().put(number); - return new Decimal(bb, (short) scale); - } - private DecimalColumnStatsDataInspector createData(ColumnStatisticsObj objNulls, Decimal lowValue, Decimal highValue) { ColumnStatisticsData statisticsData = new ColumnStatisticsData();