[hive] branch master updated (989e72a -> c63d58d)
This is an automated email from the ASF dual-hosted git repository. dkuzmenko pushed a change to branch master in repository https://gitbox.apache.org/repos/asf/hive.git. from 989e72a HIVE-24390: Spelling fixes - serde (#2802) (Josh Soref reviewed by Zoltan Haindrich) add c63d58d HIVE-25115: Addendum: Cleaner queue order change (Denys Kuzmenko, reviewed by Krisztian Kasa) No new revisions were added by this update. Summary of changes: .../java/org/apache/hadoop/hive/metastore/txn/CompactionTxnHandler.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-)
[hive] branch master updated: HIVE-24390: Spelling fixes - serde (#2802) (Josh Soref reviewed by Zoltan Haindrich)
This is an automated email from the ASF dual-hosted git repository. kgyrtkirk pushed a commit to branch master in repository https://gitbox.apache.org/repos/asf/hive.git The following commit(s) were added to refs/heads/master by this push: new 989e72a HIVE-24390: Spelling fixes - serde (#2802) (Josh Soref reviewed by Zoltan Haindrich) 989e72a is described below commit 989e72a393356c5f91f96d1bab6455a4c75c77a7 Author: Josh Soref <2119212+jso...@users.noreply.github.com> AuthorDate: Tue Nov 23 03:14:51 2021 -0500 HIVE-24390: Spelling fixes - serde (#2802) (Josh Soref reviewed by Zoltan Haindrich) --- .../src/java/org/apache/hadoop/hive/serde2/RandomTypeUtil.java | 6 +++--- .../java/org/apache/hadoop/hive/serde2/SerDeStatsStruct.java | 2 +- serde/src/java/org/apache/hadoop/hive/serde2/SerDeUtils.java | 2 +- .../org/apache/hadoop/hive/serde2/avro/AvroDeserializer.java | 8 .../org/apache/hadoop/hive/serde2/avro/AvroSerializer.java | 2 +- .../org/apache/hadoop/hive/serde2/columnar/ColumnarSerDe.java | 2 +- .../org/apache/hadoop/hive/serde2/io/HiveCharWritable.java | 2 +- .../org/apache/hadoop/hive/serde2/json/HiveJsonReader.java | 2 +- .../org/apache/hadoop/hive/serde2/json/HiveJsonWriter.java | 8 .../java/org/apache/hadoop/hive/serde2/lazy/LazyFactory.java | 2 +- .../org/apache/hadoop/hive/serde2/lazy/LazyHiveDecimal.java| 2 +- .../java/org/apache/hadoop/hive/serde2/lazy/LazyTimestamp.java | 2 +- .../apache/hadoop/hive/serde2/lazy/fast/StringToDouble.java| 2 +- .../apache/hadoop/hive/serde2/lazybinary/LazyBinaryArray.java | 6 +++--- .../apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe2.java | 2 +- .../hive/serde2/objectinspector/ListObjectsEqualComparer.java | 2 +- .../hive/serde2/objectinspector/ObjectInspectorConverters.java | 2 +- .../hive/serde2/objectinspector/ObjectInspectorUtils.java | 4 ++-- .../primitive/PrimitiveObjectInspectorUtils.java | 2 +- .../hive/serde2/teradata/TeradataBinaryDataInputStream.java| 2 +- .../hive/serde2/teradata/TeradataBinaryDataOutputStream.java | 2 +- .../hadoop/hive/serde2/teradata/TeradataBinarySerde.java | 4 ++-- .../hadoop/hive/serde2/thrift/TCTLSeparatedProtocol.java | 2 +- .../hadoop/hive/serde2/typeinfo/TimestampLocalTZTypeInfo.java | 2 +- .../org/apache/hadoop/hive/serde2/typeinfo/TypeInfoUtils.java | 6 +++--- .../org/apache/hadoop/hive/serde2/SerdeRandomRowSource.java| 2 +- .../src/test/org/apache/hadoop/hive/serde2/TestJsonSerDe.java | 4 ++-- .../apache/hadoop/hive/serde2/TestTCTLSeparatedProtocol.java | 10 +- .../apache/hadoop/hive/serde2/avro/TestAvroDeserializer.java | 2 +- .../hive/serde2/avro/TestAvroObjectInspectorGenerator.java | 8 .../org/apache/hadoop/hive/serde2/avro/TestAvroSerdeUtils.java | 4 ++-- .../hive/serde2/binarysortable/TestBinarySortableFast.java | 2 +- .../org/apache/hadoop/hive/serde2/lazy/TestLazySimpleFast.java | 2 +- .../hadoop/hive/serde2/lazybinary/TestLazyBinaryFast.java | 2 +- .../serde2/objectinspector/TestObjectInspectorConverters.java | 4 ++-- 35 files changed, 59 insertions(+), 59 deletions(-) diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/RandomTypeUtil.java b/serde/src/java/org/apache/hadoop/hive/serde2/RandomTypeUtil.java index ad9de4c..c0e0583 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/RandomTypeUtil.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/RandomTypeUtil.java @@ -131,7 +131,7 @@ public class RandomTypeUtil { public static final long NANOSECONDS_PER_SECOND = TimeUnit.SECONDS.toNanos(1); public static final long MILLISECONDS_PER_SECOND = TimeUnit.SECONDS.toMillis(1); - public static final long NANOSECONDS_PER_MILLISSECOND = TimeUnit.MILLISECONDS.toNanos(1); + public static final long NANOSECONDS_PER_MILLISECOND = TimeUnit.MILLISECONDS.toNanos(1); private static final ThreadLocal DATE_FORMAT = new ThreadLocal() { @@ -172,12 +172,12 @@ public class RandomTypeUtil { case 2: // Limit to milliseconds only... optionalNanos = String.format(".%09d", - Integer.valueOf(r.nextInt((int) MILLISECONDS_PER_SECOND)) * NANOSECONDS_PER_MILLISSECOND); + Integer.valueOf(r.nextInt((int) MILLISECONDS_PER_SECOND)) * NANOSECONDS_PER_MILLISECOND); break; case 3: // Limit to below milliseconds only... optionalNanos = String.format(".%09d", - Integer.valueOf(r.nextInt((int) NANOSECONDS_PER_MILLISSECOND))); + Integer.valueOf(r.nextInt((int) NANOSECONDS_PER_MILLISECOND))); break; } String timestampStr = String.format("%04d-%02d-%02d %02d:%02d:%02d%s", diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/SerDeStatsStruct.java b/serde/src/java/org/apache/hadoop/hive/serde2/SerDeStatsStruct.java index cf86e5e..8f0fa9d 100644 --- a/serde/src/java/org/apache/hadoop/hive/serd
[hive] branch master updated: HIVE-25582: Empty result when using offset limit with MR (#2693) (Zhihua Deng reviewed by Laszlo Bodor and Zoltan Haindrich)
This is an automated email from the ASF dual-hosted git repository. kgyrtkirk pushed a commit to branch master in repository https://gitbox.apache.org/repos/asf/hive.git The following commit(s) were added to refs/heads/master by this push: new cb23045 HIVE-25582: Empty result when using offset limit with MR (#2693) (Zhihua Deng reviewed by Laszlo Bodor and Zoltan Haindrich) cb23045 is described below commit cb23045f92c62bc43ef5739532b486b524d99e03 Author: dengzh AuthorDate: Tue Nov 23 16:02:58 2021 +0800 HIVE-25582: Empty result when using offset limit with MR (#2693) (Zhihua Deng reviewed by Laszlo Bodor and Zoltan Haindrich) --- .../test/resources/testconfiguration.properties| 1 + .../apache/hadoop/hive/ql/exec/ObjectCache.java| 1 - .../apache/hadoop/hive/ql/exec/mr/ObjectCache.java | 23 -- .../test/queries/clientpositive/offset_limit_mr.q | 12 +++ .../results/clientpositive/offset_limit_mr.q.out | 88 ++ 5 files changed, 118 insertions(+), 7 deletions(-) diff --git a/itests/src/test/resources/testconfiguration.properties b/itests/src/test/resources/testconfiguration.properties index 638af07..6b887f5 100644 --- a/itests/src/test/resources/testconfiguration.properties +++ b/itests/src/test/resources/testconfiguration.properties @@ -239,6 +239,7 @@ mr.query.files=\ masking_5.q,\ nonmr_fetch.q,\ nonreserved_keywords_input37.q,\ + offset_limit_mr.q,\ parenthesis_star_by.q,\ partition_vs_table_metadata.q,\ row__id.q,\ diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ObjectCache.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ObjectCache.java index cf04e1d..c9282b3 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/ObjectCache.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ObjectCache.java @@ -48,7 +48,6 @@ public interface ObjectCache { * * @param * @param key - * function to generate the object if it's not there * @return the last cached object with the key, null if none. */ public T retrieve(String key) throws HiveException; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ObjectCache.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ObjectCache.java index 5bb96e3..0acf6d7 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ObjectCache.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ObjectCache.java @@ -18,7 +18,9 @@ package org.apache.hadoop.hive.ql.exec.mr; +import java.util.Map; import java.util.concurrent.Callable; +import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ExecutionException; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; @@ -26,36 +28,45 @@ import java.util.concurrent.TimeoutException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; + import org.apache.hadoop.hive.ql.metadata.HiveException; /** - * ObjectCache. No-op implementation on MR we don't have a means to reuse - * Objects between runs of the same task. + * ObjectCache. Simple implementation on MR we don't have a means to reuse + * Objects between runs of the same task, this acts as a local cache. * */ public class ObjectCache implements org.apache.hadoop.hive.ql.exec.ObjectCache { private static final Logger LOG = LoggerFactory.getLogger(ObjectCache.class.getName()); + private final Map cache = new ConcurrentHashMap<>(); + @Override public void release(String key) { -// nothing to do LOG.debug("{} no longer needed", key); +cache.remove(key); } @Override public T retrieve(String key) throws HiveException { -return retrieve(key, null); +return (T) cache.get(key); } @Override public T retrieve(String key, Callable fn) throws HiveException { +T value = (T) cache.get(key); +if (value != null || fn == null) { + return value; +} try { LOG.debug("Creating {}", key); - return fn.call(); + value = fn.call(); } catch (Exception e) { throw new HiveException(e); } +T previous = (T) cache.putIfAbsent(key, value); +return previous != null ? previous : value; } @Override @@ -94,6 +105,6 @@ public class ObjectCache implements org.apache.hadoop.hive.ql.exec.ObjectCache { @Override public void remove(String key) { -// nothing to do +cache.remove(key); } } diff --git a/ql/src/test/queries/clientpositive/offset_limit_mr.q b/ql/src/test/queries/clientpositive/offset_limit_mr.q new file mode 100644 index 000..caba496 --- /dev/null +++ b/ql/src/test/queries/clientpositive/offset_limit_mr.q @@ -0,0 +1,12 @@ +--! qt:dataset:src + +SELECT src.key, sum(substr(src.value,5)) FROM src GROUP BY src.key ORDER BY src.key LIMIT 10,10; + +SELECT src.key, sum(substr(src.value,5)) FROM src GROUP BY src.key ORDER BY src.key LIMIT 0,10; + +SELECT src.key, sum(substr(src.value,5)) FROM src GROUP BY src.key ORDER BY src.key LIMIT 1,10; + +SELECT src.key, sum(substr(src.v
[hive] branch master updated: HIVE-25680 : Authorize #get_table_meta HiveMetastore Server API to use any of the HiveMetastore Authorization model (#2770) (Syed Shameerur Rahman reviewed by Zoltan Hain
This is an automated email from the ASF dual-hosted git repository. kgyrtkirk pushed a commit to branch master in repository https://gitbox.apache.org/repos/asf/hive.git The following commit(s) were added to refs/heads/master by this push: new 78b20c8 HIVE-25680 : Authorize #get_table_meta HiveMetastore Server API to use any of the HiveMetastore Authorization model (#2770) (Syed Shameerur Rahman reviewed by Zoltan Haindrich) 78b20c8 is described below commit 78b20c803ef2e75a7fe830325df2c19c15b203a3 Author: Syed Shameerur Rahman AuthorDate: Tue Nov 23 13:31:36 2021 +0530 HIVE-25680 : Authorize #get_table_meta HiveMetastore Server API to use any of the HiveMetastore Authorization model (#2770) (Syed Shameerur Rahman reviewed by Zoltan Haindrich) --- ...stMetastoreClientSideAuthorizationProvider.java | 145 + .../apache/hadoop/hive/metastore/HMSHandler.java | 52 2 files changed, 197 insertions(+) diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestMetastoreClientSideAuthorizationProvider.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestMetastoreClientSideAuthorizationProvider.java new file mode 100644 index 000..dbd71cb --- /dev/null +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestMetastoreClientSideAuthorizationProvider.java @@ -0,0 +1,145 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.security; + +import com.google.common.collect.Lists; +import org.apache.hadoop.hive.cli.CliSessionState; +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.metastore.HiveMetaStoreClient; +import org.apache.hadoop.hive.metastore.MetaStoreTestUtils; +import org.apache.hadoop.hive.metastore.api.TableMeta; +import org.apache.hadoop.hive.ql.DriverFactory; +import org.apache.hadoop.hive.ql.IDriver; +import org.apache.hadoop.hive.ql.security.authorization.DefaultHiveAuthorizationProvider; +import org.apache.hadoop.hive.ql.session.SessionState; +import org.apache.hadoop.hive.shims.Utils; +import org.apache.hadoop.security.UserGroupInformation; +import static org.junit.Assert.assertEquals; + +import org.junit.Before; +import org.junit.Test; + +import java.util.List; + +/** + * TestMetastoreClientSideAuthorizationProvider : Simple base test for Metastore client side + * Authorization Providers. By default, tests DefaultHiveAuthorizationProvider + */ +public class TestMetastoreClientSideAuthorizationProvider { +private HiveConf clientHiveConf; +private HiveMetaStoreClient msc; +private IDriver driver; +private UserGroupInformation ugi; + +@Before +public void setUp() throws Exception { + System.setProperty(HiveConf.ConfVars.METASTORE_PRE_EVENT_LISTENERS.varname, + "org.apache.hadoop.hive.ql.security.authorization.AuthorizationPreEventListener"); + +int port = MetaStoreTestUtils.startMetaStoreWithRetry(); + +clientHiveConf = new HiveConf(this.getClass()); + +// Turn on client-side authorization + clientHiveConf.setBoolVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_ENABLED,true); + clientHiveConf.set(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER.varname, +getAuthorizationProvider()); + clientHiveConf.set(HiveConf.ConfVars.HIVE_AUTHENTICATOR_MANAGER.varname, +InjectableDummyAuthenticator.class.getName()); + clientHiveConf.set(HiveConf.ConfVars.HIVE_AUTHORIZATION_TABLE_OWNER_GRANTS.varname, ""); +clientHiveConf.setVar(HiveConf.ConfVars.HIVEMAPREDMODE, "nonstrict"); +clientHiveConf.setVar(HiveConf.ConfVars.METASTOREURIS, "thrift://localhost:" + port); + clientHiveConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3); +clientHiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); + +clientHiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); +clientHiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); + +ugi = Utils.getUGI(); + +SessionState.start(new CliSessionState(clientHiveConf)); +msc = new HiveMetaStoreClient(clientHive