[hive] branch master updated: HIVE-25738: NullIf doesn't support complex types (#2816) (Zoltan Haindrich reviewed by Zhihua Deng and Stamatis Zampetakis)
This is an automated email from the ASF dual-hosted git repository. kgyrtkirk pushed a commit to branch master in repository https://gitbox.apache.org/repos/asf/hive.git The following commit(s) were added to refs/heads/master by this push: new 1cafaef HIVE-25738: NullIf doesn't support complex types (#2816) (Zoltan Haindrich reviewed by Zhihua Deng and Stamatis Zampetakis) 1cafaef is described below commit 1cafaef0456fd2714c20f57b7c81a4968f425d44 Author: Zoltan Haindrich AuthorDate: Wed Dec 1 17:25:57 2021 +0100 HIVE-25738: NullIf doesn't support complex types (#2816) (Zoltan Haindrich reviewed by Zhihua Deng and Stamatis Zampetakis) --- .../hive/ql/udf/generic/GenericUDFNullif.java | 24 ++ .../hive/ql/udf/generic/TestGenericUDFNullif.java | 47 +++ ql/src/test/queries/clientnegative/nullif_union.q | 3 ++ ql/src/test/queries/clientpositive/udf_nullif.q| 9 .../test/results/clientnegative/nullif_union.q.out | 1 + .../results/clientpositive/llap/udf_nullif.q.out | 54 ++ 6 files changed, 130 insertions(+), 8 deletions(-) diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFNullif.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFNullif.java index a47882e..b99efa1 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFNullif.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFNullif.java @@ -23,6 +23,7 @@ import org.apache.hadoop.hive.ql.exec.UDFArgumentException; import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils; import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils; import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveGrouping; @@ -49,6 +50,17 @@ public class GenericUDFNullif extends GenericUDF { returnOIResolver = new GenericUDFUtils.ReturnObjectInspectorResolver(true); returnOIResolver.update(arguments[0]); +switch (arguments[0].getCategory()) { +case LIST: +case MAP: +case STRUCT: +case PRIMITIVE: + break; +case UNION: +default: + throw new UDFArgumentTypeException(0, "Unsupported Argument type category: " + arguments[0].getCategory()); +} + boolean isPrimitive = (arguments[0] instanceof PrimitiveObjectInspector); if (isPrimitive) { @@ -86,17 +98,13 @@ public class GenericUDFNullif extends GenericUDF { public Object evaluate(DeferredObject[] arguments) throws HiveException { Object arg0 = arguments[0].get(); Object arg1 = arguments[1].get(); -Object value0 = null; -if (arg0 != null) { - value0 = returnOIResolver.convertIfNecessary(arg0, argumentOIs[0], false); -} +Object value0 = returnOIResolver.convertIfNecessary(arg0, argumentOIs[0], false); if (arg0 == null || arg1 == null) { return value0; } -PrimitiveObjectInspector compareOI = (PrimitiveObjectInspector) returnOIResolver.get(); -if (PrimitiveObjectInspectorUtils.comparePrimitiveObjects( -value0, compareOI, -returnOIResolver.convertIfNecessary(arg1, argumentOIs[1], false), compareOI)) { +Object value1 = returnOIResolver.convertIfNecessary(arg1, argumentOIs[1], false); +ObjectInspector compareOI = returnOIResolver.get(); +if (ObjectInspectorUtils.compare(value0, compareOI, value1, compareOI) == 0) { return null; } return value0; diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFNullif.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFNullif.java index 281b0d5..2ff4408 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFNullif.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFNullif.java @@ -18,6 +18,8 @@ package org.apache.hadoop.hive.ql.udf.generic; +import static java.util.Arrays.asList; + import org.apache.hadoop.hive.ql.exec.UDFArgumentException; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject; @@ -27,7 +29,9 @@ import org.apache.hadoop.hive.serde2.io.DateWritableV2; import org.apache.hadoop.hive.serde2.lazy.LazyInteger; import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyPrimitiveObjectInspectorFactory; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory; import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.StandardListObjectInspector;
[hive] branch master updated (52a0399 -> 1a6414a)
This is an automated email from the ASF dual-hosted git repository. ychena pushed a change to branch master in repository https://gitbox.apache.org/repos/asf/hive.git. from 52a0399 HIVE-25609: Preserve XAttrs in normal file copy case. (Haymant Mangla, reviewed by Ayush Saxena) add 1a6414a HIVE-21075 : Metastore: Drop partition performance downgrade with Postgres (#2826) (Oleksiy Sayankin, reviewed by Peter Vary, Yongzhi Chen)) No new revisions were added by this update. Summary of changes: .../apache/hadoop/hive/metastore/ObjectStore.java | 67 ++ 1 file changed, 55 insertions(+), 12 deletions(-)
[hive] branch master updated: HIVE-25609: Preserve XAttrs in normal file copy case. (Haymant Mangla, reviewed by Ayush Saxena)
This is an automated email from the ASF dual-hosted git repository. pravin pushed a commit to branch master in repository https://gitbox.apache.org/repos/asf/hive.git The following commit(s) were added to refs/heads/master by this push: new 52a0399 HIVE-25609: Preserve XAttrs in normal file copy case. (Haymant Mangla, reviewed by Ayush Saxena) 52a0399 is described below commit 52a0399b146814e8a6a7c38b1c5f6e215b7851fb Author: Haymant Mangla <79496857+hmangl...@users.noreply.github.com> AuthorDate: Wed Dec 1 14:44:18 2021 +0530 HIVE-25609: Preserve XAttrs in normal file copy case. (Haymant Mangla, reviewed by Ayush Saxena) --- .../org/apache/hadoop/hive/common/FileUtils.java | 145 - .../apache/hadoop/hive/common/TestFileUtils.java | 81 .../parse/TestReplicationScenariosAcidTables.java | 77 +++ .../hadoop/hive/ql/parse/repl/CopyUtils.java | 11 +- .../apache/hadoop/hive/shims/Hadoop23Shims.java| 17 +-- .../java/org/apache/hadoop/hive/shims/Utils.java | 14 ++ 6 files changed, 323 insertions(+), 22 deletions(-) diff --git a/common/src/java/org/apache/hadoop/hive/common/FileUtils.java b/common/src/java/org/apache/hadoop/hive/common/FileUtils.java index fdd78cb..d5cf3d6 100644 --- a/common/src/java/org/apache/hadoop/hive/common/FileUtils.java +++ b/common/src/java/org/apache/hadoop/hive/common/FileUtils.java @@ -37,14 +37,13 @@ import java.util.HashSet; import java.util.List; import java.util.Random; import java.util.Set; +import java.util.Map; import com.google.common.annotations.VisibleForTesting; - import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.ContentSummary; import org.apache.hadoop.fs.FileStatus; -import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.fs.GlobFilter; import org.apache.hadoop.fs.LocalFileSystem; import org.apache.hadoop.fs.LocatedFileStatus; @@ -53,10 +52,14 @@ import org.apache.hadoop.fs.PathFilter; import org.apache.hadoop.fs.RemoteIterator; import org.apache.hadoop.fs.Trash; import org.apache.hadoop.fs.permission.FsAction; +import org.apache.hadoop.fs.FSDataOutputStream; +import org.apache.hadoop.fs.PathExistsException; +import org.apache.hadoop.fs.PathIsDirectoryException; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.shims.HadoopShims; import org.apache.hadoop.hive.shims.ShimLoader; import org.apache.hadoop.hive.shims.Utils; +import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.util.StringUtils; import org.apache.hive.common.util.ShutdownHookManager; @@ -661,11 +664,145 @@ public final class FileUtils { // is tried and it fails. We depend upon that behaviour in cases like replication, // wherein if distcp fails, there is good reason to not plod along with a trivial // implementation, and fail instead. - copied = FileUtil.copy(srcFS, src, dstFS, dst, deleteSource, overwrite, conf); + copied = copy(srcFS, srcFS.getFileStatus(src), dstFS, dst, deleteSource, overwrite, shouldPreserveXAttrs(conf, srcFS, dstFS), conf); } return copied; } + public static boolean copy(FileSystem srcFS, FileStatus srcStatus, FileSystem dstFS, Path dst, boolean deleteSource, + boolean overwrite, boolean preserveXAttrs, Configuration conf) throws IOException { +Path src = srcStatus.getPath(); +dst = checkDest(src.getName(), dstFS, dst, overwrite); +if (srcStatus.isDirectory()) { + checkDependencies(srcFS, src, dstFS, dst); + if (!dstFS.mkdirs(dst)) { +return false; + } + + RemoteIterator fileIterator = srcFS.listStatusIterator(src); + while(fileIterator.hasNext()) { +FileStatus file = fileIterator.next(); +copy(srcFS, file, dstFS, new Path(dst, file.getPath().getName()), deleteSource, overwrite, preserveXAttrs, conf); + } + if (preserveXAttrs) { +preserveXAttr(srcFS, src, dstFS, dst); + } +} else { + InputStream in = null; + FSDataOutputStream out = null; + + try { +in = srcFS.open(src); +out = dstFS.create(dst, overwrite); +IOUtils.copyBytes(in, out, conf, true); +if (preserveXAttrs) { + preserveXAttr(srcFS, src, dstFS, dst); +} + } catch (IOException var11) { +IOUtils.closeStream(in); +IOUtils.closeStream(out); +throw var11; + } +} + +return deleteSource ? srcFS.delete(src, true) : true; + } + + public static boolean copy(FileSystem srcFS, Path[] srcs, FileSystem dstFS, Path dst, boolean deleteSource, boolean overwrite, boolean preserveXAttr, Configuration conf) throws IOException { +boolean gotException = false; +boolean returnVal = true; +StringBuilder exceptions = new StringBuilder(); +if (srcs.length == 1