http://git-wip-us.apache.org/repos/asf/hive/blob/f9517efd/pom.xml ---------------------------------------------------------------------- diff --git a/pom.xml b/pom.xml index a400f78..3b3303c 100644 --- a/pom.xml +++ b/pom.xml @@ -123,11 +123,9 @@ <dropwizard.version>3.1.0</dropwizard.version> <guava.version>14.0.1</guava.version> <groovy.version>2.4.4</groovy.version> - <hadoop-20S.version>1.2.1</hadoop-20S.version> - <hadoop-23.version>2.6.0</hadoop-23.version> + <hadoop.version>2.6.0</hadoop.version> <hadoop.bin.path>${basedir}/${hive.path.to.root}/testutils/hadoop</hadoop.bin.path> - <hbase.hadoop1.version>0.98.9-hadoop1</hbase.hadoop1.version> - <hbase.hadoop2.version>1.1.1</hbase.hadoop2.version> + <hbase.version>1.1.1</hbase.version> <!-- httpcomponents are not always in version sync --> <httpcomponents.client.version>4.4</httpcomponents.client.version> <httpcomponents.core.version>4.4</httpcomponents.core.version> @@ -236,7 +234,6 @@ </repository> </repositories> - <!-- Hadoop dependency management is done at the bottom under profiles --> <dependencyManagement> <dependencies> <!-- dependencies are always listed in sorted order by groupId, artifectId --> @@ -599,6 +596,87 @@ <artifactId>xercesImpl</artifactId> <version>${xerces.version}</version> </dependency> + <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-client</artifactId> + <version>${hadoop.version}</version> + </dependency> + <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-common</artifactId> + <version>${hadoop.version}</version> + <exclusions> + <exclusion> + <groupId>org.apache.httpcomponents</groupId> + <artifactId>httpcore</artifactId> + </exclusion> + <exclusion> + <groupId>org.apache.httpcomponents</groupId> + <artifactId>httpclient</artifactId> + </exclusion> + </exclusions> + </dependency> + <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-hdfs</artifactId> + <version>${hadoop.version}</version> + </dependency> + <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-mapreduce-client-jobclient</artifactId> + <version>${hadoop.version}</version> + </dependency> + <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-mapreduce-client-core</artifactId> + <version>${hadoop.version}</version> + </dependency> + <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-minikdc</artifactId> + <version>${hadoop.version}</version> + </dependency> + <dependency> + <groupId>org.apache.hbase</groupId> + <artifactId>hbase-common</artifactId> + <version>${hbase.version}</version> + </dependency> + <dependency> + <groupId>org.apache.hbase</groupId> + <artifactId>hbase-hadoop-compat</artifactId> + <version>${hbase.version}</version> + </dependency> + <dependency> + <groupId>org.apache.hbase</groupId> + <artifactId>hbase-hadoop2-compat</artifactId> + <version>${hbase.version}</version> + </dependency> + <dependency> + <groupId>org.apache.hbase</groupId> + <artifactId>hbase-server</artifactId> + <version>${hbase.version}</version> + </dependency> + <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-minicluster</artifactId> + <version>${hadoop.version}</version> + </dependency> + <dependency> + <groupId>org.scala-lang</groupId> + <artifactId>scala-library</artifactId> + <version>${scala.version}</version> + </dependency> + <dependency> + <groupId>org.apache.spark</groupId> + <artifactId>spark-core_${scala.binary.version}</artifactId> + <version>${spark.version}</version> + <exclusions> + <exclusion> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-core</artifactId> + </exclusion> + </exclusions> + </dependency> </dependencies> </dependencyManagement> @@ -1061,146 +1139,6 @@ </plugins> </reporting> </profile> - - <!-- hadoop profiles in the root pom are only used for dependency management --> - <profile> - <id>hadoop-1</id> - <dependencyManagement> - <dependencies> - <dependency> - <groupId>org.apache.hadoop</groupId> - <artifactId>hadoop-client</artifactId> - <version>${hadoop-20S.version}</version> - </dependency> - <dependency> - <groupId>org.apache.hadoop</groupId> - <artifactId>hadoop-core</artifactId> - <version>${hadoop-20S.version}</version> - </dependency> - <dependency> - <groupId>org.apache.hadoop</groupId> - <artifactId>hadoop-test</artifactId> - <version>${hadoop-20S.version}</version> - </dependency> - <dependency> - <groupId>org.apache.hadoop</groupId> - <artifactId>hadoop-tools</artifactId> - <version>${hadoop-20S.version}</version> - </dependency> - <dependency> - <groupId>org.apache.hbase</groupId> - <artifactId>hbase-common</artifactId> - <version>${hbase.hadoop1.version}</version> - </dependency> - <dependency> - <groupId>org.apache.hbase</groupId> - <artifactId>hbase-hadoop-compat</artifactId> - <version>${hbase.hadoop1.version}</version> - </dependency> - <dependency> - <groupId>org.apache.hbase</groupId> - <artifactId>hbase-hadoop1-compat</artifactId> - <version>${hbase.hadoop1.version}</version> - </dependency> - <dependency> - <groupId>org.apache.hbase</groupId> - <artifactId>hbase-server</artifactId> - <version>${hbase.hadoop1.version}</version> - </dependency> - </dependencies> - </dependencyManagement> - </profile> - <profile> - <id>hadoop-2</id> - <modules> - <module>llap-server</module> - </modules> - <dependencyManagement> - <dependencies> - <dependency> - <groupId>org.apache.hadoop</groupId> - <artifactId>hadoop-client</artifactId> - <version>${hadoop-23.version}</version> - </dependency> - <dependency> - <groupId>org.apache.hadoop</groupId> - <artifactId>hadoop-common</artifactId> - <version>${hadoop-23.version}</version> - <exclusions> - <exclusion> - <groupId>org.apache.httpcomponents</groupId> - <artifactId>httpcore</artifactId> - </exclusion> - <exclusion> - <groupId>org.apache.httpcomponents</groupId> - <artifactId>httpclient</artifactId> - </exclusion> - </exclusions> - </dependency> - <dependency> - <groupId>org.apache.hadoop</groupId> - <artifactId>hadoop-hdfs</artifactId> - <version>${hadoop-23.version}</version> - </dependency> - <dependency> - <groupId>org.apache.hadoop</groupId> - <artifactId>hadoop-mapreduce-client-jobclient</artifactId> - <version>${hadoop-23.version}</version> - </dependency> - <dependency> - <groupId>org.apache.hadoop</groupId> - <artifactId>hadoop-mapreduce-client-core</artifactId> - <version>${hadoop-23.version}</version> - </dependency> - <dependency> - <groupId>org.apache.hadoop</groupId> - <artifactId>hadoop-minikdc</artifactId> - <version>${hadoop-23.version}</version> - </dependency> - <dependency> - <groupId>org.apache.hbase</groupId> - <artifactId>hbase-common</artifactId> - <version>${hbase.hadoop2.version}</version> - </dependency> - <dependency> - <groupId>org.apache.hbase</groupId> - <artifactId>hbase-hadoop-compat</artifactId> - <version>${hbase.hadoop2.version}</version> - </dependency> - <dependency> - <groupId>org.apache.hbase</groupId> - <artifactId>hbase-hadoop2-compat</artifactId> - <version>${hbase.hadoop2.version}</version> - </dependency> - <dependency> - <groupId>org.apache.hbase</groupId> - <artifactId>hbase-server</artifactId> - <version>${hbase.hadoop2.version}</version> - </dependency> - <dependency> - <groupId>org.apache.hadoop</groupId> - <artifactId>hadoop-minicluster</artifactId> - <version>${hadoop-23.version}</version> - </dependency> - <dependency> - <groupId>org.scala-lang</groupId> - <artifactId>scala-library</artifactId> - <version>${scala.version}</version> - </dependency> - <dependency> - <groupId>org.apache.spark</groupId> - <artifactId>spark-core_${scala.binary.version}</artifactId> - <version>${spark.version}</version> - <exclusions> - <exclusion> - <groupId>org.apache.hadoop</groupId> - <artifactId>hadoop-core</artifactId> - </exclusion> - </exclusions> - </dependency> - </dependencies> - </dependencyManagement> - </profile> <profile> <!-- Windows-specific settings to allow unit tests to work --> <id>windows-test</id>
http://git-wip-us.apache.org/repos/asf/hive/blob/f9517efd/ql/pom.xml ---------------------------------------------------------------------- diff --git a/ql/pom.xml b/ql/pom.xml index 83b9ebf..8ac13a6 100644 --- a/ql/pom.xml +++ b/ql/pom.xml @@ -174,6 +174,74 @@ <version>${libfb303.version}</version> </dependency> <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-common</artifactId> + <version>${hadoop.version}</version> + <exclusions> + <exclusion> + <groupId>javax.servlet</groupId> + <artifactId>servlet-api</artifactId> + </exclusion> + </exclusions> + <optional>true</optional> + </dependency> + <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-archives</artifactId> + <version>${hadoop.version}</version> + <optional>true</optional> + </dependency> + <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-mapreduce-client-core</artifactId> + <version>${hadoop.version}</version> + <optional>true</optional> + </dependency> + <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-mapreduce-client-common</artifactId> + <version>${hadoop.version}</version> + <optional>true</optional> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-hdfs</artifactId> + <version>${hadoop.version}</version> + <exclusions> + <exclusion> + <groupId>javax.servlet</groupId> + <artifactId>servlet-api</artifactId> + </exclusion> + </exclusions> + <optional>true</optional> + </dependency> + <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-yarn-api</artifactId> + <version>${hadoop.version}</version> + <optional>true</optional> + </dependency> + <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-yarn-common</artifactId> + <version>${hadoop.version}</version> + <exclusions> + <exclusion> + <groupId>javax.servlet</groupId> + <artifactId>servlet-api</artifactId> + </exclusion> + </exclusions> + <optional>true</optional> + </dependency> + <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-yarn-client</artifactId> + <version>${hadoop.version}</version> + <optional>true</optional> + </dependency> + + <dependency> <groupId>org.apache.ivy</groupId> <artifactId>ivy</artifactId> <version>${ivy.version}</version> @@ -494,91 +562,6 @@ <profiles> <profile> - <id>hadoop-1</id> - <build> - <plugins> - <plugin> - <groupId>org.apache.maven.plugins</groupId> - <artifactId>maven-compiler-plugin</artifactId> - <version>2.3.2</version> - <configuration> - <excludes> - <exclude>**/ATSHook.java</exclude> - </excludes> - </configuration> - </plugin> - </plugins> - </build> - <dependencies> - <dependency> - <groupId>org.apache.hadoop</groupId> - <artifactId>hadoop-core</artifactId> - <version>${hadoop-20S.version}</version> - <optional>true</optional> - </dependency> - <dependency> - <groupId>org.apache.hadoop</groupId> - <artifactId>hadoop-tools</artifactId> - <version>${hadoop-20S.version}</version> - <optional>true</optional> - </dependency> - </dependencies> - </profile> - <profile> - <id>hadoop-2</id> - <dependencies> - <dependency> - <groupId>org.apache.hadoop</groupId> - <artifactId>hadoop-common</artifactId> - <version>${hadoop-23.version}</version> - <optional>true</optional> - </dependency> - <dependency> - <groupId>org.apache.hadoop</groupId> - <artifactId>hadoop-archives</artifactId> - <version>${hadoop-23.version}</version> - <optional>true</optional> - </dependency> - <dependency> - <groupId>org.apache.hadoop</groupId> - <artifactId>hadoop-mapreduce-client-core</artifactId> - <version>${hadoop-23.version}</version> - <optional>true</optional> - </dependency> - <dependency> - <groupId>org.apache.hadoop</groupId> - <artifactId>hadoop-mapreduce-client-common</artifactId> - <version>${hadoop-23.version}</version> - <optional>true</optional> - <scope>test</scope> - </dependency> - <dependency> - <groupId>org.apache.hadoop</groupId> - <artifactId>hadoop-hdfs</artifactId> - <version>${hadoop-23.version}</version> - <optional>true</optional> - </dependency> - <dependency> - <groupId>org.apache.hadoop</groupId> - <artifactId>hadoop-yarn-api</artifactId> - <version>${hadoop-23.version}</version> - <optional>true</optional> - </dependency> - <dependency> - <groupId>org.apache.hadoop</groupId> - <artifactId>hadoop-yarn-common</artifactId> - <version>${hadoop-23.version}</version> - <optional>true</optional> - </dependency> - <dependency> - <groupId>org.apache.hadoop</groupId> - <artifactId>hadoop-yarn-client</artifactId> - <version>${hadoop-23.version}</version> - <optional>true</optional> - </dependency> - </dependencies> - </profile> - <profile> <id>protobuf</id> <build> <plugins> @@ -722,7 +705,6 @@ <include>org.json:json</include> <include>org.apache.avro:avro</include> <include>org.apache.avro:avro-mapred</include> - <include>org.apache.hive.shims:hive-shims-0.20S</include> <include>org.apache.hive.shims:hive-shims-0.23</include> <include>org.apache.hive.shims:hive-shims-0.23</include> <include>org.apache.hive.shims:hive-shims-common</include> http://git-wip-us.apache.org/repos/asf/hive/blob/f9517efd/serde/pom.xml ---------------------------------------------------------------------- diff --git a/serde/pom.xml b/serde/pom.xml index b6c0d0c..99c89ed 100644 --- a/serde/pom.xml +++ b/serde/pom.xml @@ -85,6 +85,18 @@ <artifactId>parquet-hadoop-bundle</artifactId> <version>${parquet.version}</version> </dependency> + <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-common</artifactId> + <version>${hadoop.version}</version> + <optional>true</optional> + </dependency> + <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-mapreduce-client-core</artifactId> + <version>${hadoop.version}</version> + <optional>true</optional> + </dependency> <!-- test inter-project --> <dependency> @@ -111,66 +123,28 @@ <version>${jersey.version}</version> <scope>test</scope> </dependency> + <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-common</artifactId> + <version>${hadoop.version}</version> + <classifier>tests</classifier> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-hdfs</artifactId> + <version>${hadoop.version}</version> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-hdfs</artifactId> + <version>${hadoop.version}</version> + <classifier>tests</classifier> + <scope>test</scope> + </dependency> </dependencies> - <profiles> - <profile> - <id>hadoop-1</id> - <dependencies> - <dependency> - <groupId>org.apache.hadoop</groupId> - <artifactId>hadoop-core</artifactId> - <version>${hadoop-20S.version}</version> - <optional>true</optional> - </dependency> - <dependency> - <groupId>org.apache.hadoop</groupId> - <artifactId>hadoop-test</artifactId> - <version>${hadoop-20S.version}</version> - <scope>test</scope> - </dependency> - </dependencies> - </profile> - <profile> - <id>hadoop-2</id> - <dependencies> - <dependency> - <groupId>org.apache.hadoop</groupId> - <artifactId>hadoop-common</artifactId> - <version>${hadoop-23.version}</version> - <optional>true</optional> - </dependency> - <dependency> - <groupId>org.apache.hadoop</groupId> - <artifactId>hadoop-common</artifactId> - <version>${hadoop-23.version}</version> - <classifier>tests</classifier> - <scope>test</scope> - </dependency> - <dependency> - <groupId>org.apache.hadoop</groupId> - <artifactId>hadoop-mapreduce-client-core</artifactId> - <version>${hadoop-23.version}</version> - <optional>true</optional> - </dependency> - <dependency> - <groupId>org.apache.hadoop</groupId> - <artifactId>hadoop-hdfs</artifactId> - <version>${hadoop-23.version}</version> - <scope>test</scope> - </dependency> - <dependency> - <groupId>org.apache.hadoop</groupId> - <artifactId>hadoop-hdfs</artifactId> - <version>${hadoop-23.version}</version> - <classifier>tests</classifier> - <scope>test</scope> - </dependency> - </dependencies> - </profile> - </profiles> - - <build> <sourceDirectory>${basedir}/src/java</sourceDirectory> <testSourceDirectory>${basedir}/src/test</testSourceDirectory> http://git-wip-us.apache.org/repos/asf/hive/blob/f9517efd/service/pom.xml ---------------------------------------------------------------------- diff --git a/service/pom.xml b/service/pom.xml index 07eeb9a..d7ab5bf 100644 --- a/service/pom.xml +++ b/service/pom.xml @@ -96,7 +96,19 @@ <artifactId>curator-recipes</artifactId> <version>${curator.version}</version> </dependency> - <!-- intra-project --> + <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-common</artifactId> + <version>${hadoop.version}</version> + <optional>true</optional> + </dependency> + <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-mapreduce-client-core</artifactId> + <version>${hadoop.version}</version> + <optional>true</optional> + </dependency> + <!-- intra-project --> <dependency> <groupId>org.apache.hive</groupId> <artifactId>hive-exec</artifactId> @@ -113,37 +125,6 @@ </dependency> </dependencies> - <profiles> - <profile> - <id>hadoop-1</id> - <dependencies> - <dependency> - <groupId>org.apache.hadoop</groupId> - <artifactId>hadoop-core</artifactId> - <version>${hadoop-20S.version}</version> - <optional>true</optional> - </dependency> - </dependencies> - </profile> - <profile> - <id>hadoop-2</id> - <dependencies> - <dependency> - <groupId>org.apache.hadoop</groupId> - <artifactId>hadoop-common</artifactId> - <version>${hadoop-23.version}</version> - <optional>true</optional> - </dependency> - <dependency> - <groupId>org.apache.hadoop</groupId> - <artifactId>hadoop-mapreduce-client-core</artifactId> - <version>${hadoop-23.version}</version> - <optional>true</optional> - </dependency> - </dependencies> - </profile> - </profiles> - <build> <sourceDirectory>${basedir}/src/java</sourceDirectory> <testSourceDirectory>${basedir}/src/test</testSourceDirectory> http://git-wip-us.apache.org/repos/asf/hive/blob/f9517efd/shims/0.20S/pom.xml ---------------------------------------------------------------------- diff --git a/shims/0.20S/pom.xml b/shims/0.20S/pom.xml deleted file mode 100644 index 565dd5e..0000000 --- a/shims/0.20S/pom.xml +++ /dev/null @@ -1,63 +0,0 @@ -<?xml version="1.0" encoding="UTF-8"?> -<!-- - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. ---> -<project xmlns="http://maven.apache.org/POM/4.0.0" - xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" - xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> - <modelVersion>4.0.0</modelVersion> - <parent> - <groupId>org.apache.hive</groupId> - <artifactId>hive</artifactId> - <version>2.0.0-SNAPSHOT</version> - <relativePath>../../pom.xml</relativePath> - </parent> - - <groupId>org.apache.hive.shims</groupId> - <artifactId>hive-shims-0.20S</artifactId> - <packaging>jar</packaging> - <name>Hive Shims 0.20S</name> - - <properties> - <hive.path.to.root>../..</hive.path.to.root> - </properties> - - <dependencies> - <!-- dependencies are always listed in sorted order by groupId, artifectId --> - <!-- intra-project --> - <dependency> - <groupId>org.apache.hive.shims</groupId> - <artifactId>hive-shims-common</artifactId> - <version>${project.version}</version> - </dependency> - <!-- inter-project --> - <dependency> - <groupId>org.apache.hadoop</groupId> - <artifactId>hadoop-core</artifactId> - <version>${hadoop-20S.version}</version> - <optional>true</optional> - </dependency> - <dependency> - <groupId>org.apache.hadoop</groupId> - <artifactId>hadoop-test</artifactId> - <version>${hadoop-20S.version}</version> - <optional>true</optional> - </dependency> - <dependency> - <groupId>org.apache.hadoop</groupId> - <artifactId>hadoop-tools</artifactId> - <version>${hadoop-20S.version}</version> - <scope>provided</scope> - </dependency> - </dependencies> -</project> http://git-wip-us.apache.org/repos/asf/hive/blob/f9517efd/shims/0.20S/src/main/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java ---------------------------------------------------------------------- diff --git a/shims/0.20S/src/main/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java b/shims/0.20S/src/main/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java deleted file mode 100644 index f60e8f0..0000000 --- a/shims/0.20S/src/main/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java +++ /dev/null @@ -1,734 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.hive.shims; - -import java.io.IOException; -import java.net.InetSocketAddress; -import java.net.MalformedURLException; -import java.net.URI; -import java.net.URL; -import java.nio.ByteBuffer; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.Comparator; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.TreeMap; - -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.filecache.DistributedCache; -import org.apache.hadoop.fs.BlockLocation; -import org.apache.hadoop.fs.FSDataInputStream; -import org.apache.hadoop.fs.FSDataOutputStream; -import org.apache.hadoop.fs.FileStatus; -import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.fs.FsShell; -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.fs.PathFilter; -import org.apache.hadoop.fs.ProxyFileSystem; -import org.apache.hadoop.fs.Trash; -import org.apache.hadoop.fs.permission.FsPermission; -import org.apache.hadoop.hdfs.MiniDFSCluster; -import org.apache.hadoop.io.LongWritable; -import org.apache.hadoop.mapred.ClusterStatus; -import org.apache.hadoop.mapred.InputSplit; -import org.apache.hadoop.mapred.JobConf; -import org.apache.hadoop.mapred.JobInProgress; -import org.apache.hadoop.mapred.JobTracker; -import org.apache.hadoop.mapred.MiniMRCluster; -import org.apache.hadoop.mapred.RecordReader; -import org.apache.hadoop.mapred.Reporter; -import org.apache.hadoop.mapred.TaskLogServlet; -import org.apache.hadoop.mapred.WebHCatJTShim20S; -import org.apache.hadoop.mapred.lib.TotalOrderPartitioner; -import org.apache.hadoop.mapreduce.Job; -import org.apache.hadoop.mapreduce.JobContext; -import org.apache.hadoop.mapreduce.JobID; -import org.apache.hadoop.mapreduce.JobStatus; -import org.apache.hadoop.mapreduce.OutputFormat; -import org.apache.hadoop.mapreduce.TaskAttemptContext; -import org.apache.hadoop.mapreduce.TaskAttemptID; -import org.apache.hadoop.mapreduce.TaskID; -import org.apache.hadoop.security.Credentials; -import org.apache.hadoop.security.KerberosName; -import org.apache.hadoop.security.UserGroupInformation; -import org.apache.hadoop.security.token.Token; -import org.apache.hadoop.tools.distcp2.DistCp; -import org.apache.hadoop.tools.distcp2.DistCpOptions; -import org.apache.hadoop.tools.distcp2.DistCpOptions.FileAttribute; - -import org.apache.hadoop.util.Progressable; -import org.apache.hadoop.util.VersionInfo; - - -/** - * Implemention of shims against Hadoop 0.20 with Security. - */ -public class Hadoop20SShims extends HadoopShimsSecure { - - @Override - public HadoopShims.CombineFileInputFormatShim getCombineFileInputFormat() { - return new CombineFileInputFormatShim() { - @Override - public RecordReader getRecordReader(InputSplit split, - JobConf job, Reporter reporter) throws IOException { - throw new IOException("CombineFileInputFormat.getRecordReader not needed."); - } - - @Override - protected FileStatus[] listStatus(JobConf job) throws IOException { - FileStatus[] result = super.listStatus(job); - boolean foundDir = false; - for (FileStatus stat: result) { - if (stat.isDir()) { - foundDir = true; - break; - } - } - if (!foundDir) { - return result; - } - ArrayList<FileStatus> files = new ArrayList<FileStatus>(); - for (FileStatus stat: result) { - if (!stat.isDir()) { - files.add(stat); - } - } - return files.toArray(new FileStatus[files.size()]); - } - }; - } - - @Override - public String getTaskAttemptLogUrl(JobConf conf, - String taskTrackerHttpAddress, String taskAttemptId) - throws MalformedURLException { - URL taskTrackerHttpURL = new URL(taskTrackerHttpAddress); - return TaskLogServlet.getTaskLogUrl( - taskTrackerHttpURL.getHost(), - Integer.toString(taskTrackerHttpURL.getPort()), - taskAttemptId); - } - - @Override - public JobTrackerState getJobTrackerState(ClusterStatus clusterStatus) throws Exception { - switch (clusterStatus.getJobTrackerState()) { - case INITIALIZING: - return JobTrackerState.INITIALIZING; - case RUNNING: - return JobTrackerState.RUNNING; - default: - String errorMsg = "Unrecognized JobTracker state: " + clusterStatus.getJobTrackerState(); - throw new Exception(errorMsg); - } - } - - @Override - public org.apache.hadoop.mapreduce.TaskAttemptContext newTaskAttemptContext(Configuration conf, final Progressable progressable) { - return new org.apache.hadoop.mapreduce.TaskAttemptContext(conf, new TaskAttemptID()) { - @Override - public void progress() { - progressable.progress(); - } - }; - } - - @Override - public TaskAttemptID newTaskAttemptID(JobID jobId, boolean isMap, int taskId, int id) { - return new TaskAttemptID(jobId.getJtIdentifier(), jobId.getId(), isMap, taskId, id); - } - - @Override - public org.apache.hadoop.mapreduce.JobContext newJobContext(Job job) { - return new org.apache.hadoop.mapreduce.JobContext(job.getConfiguration(), job.getJobID()); - } - - @Override - public boolean isLocalMode(Configuration conf) { - return "local".equals(getJobLauncherRpcAddress(conf)); - } - - @Override - public String getJobLauncherRpcAddress(Configuration conf) { - return conf.get("mapred.job.tracker"); - } - - @Override - public void setJobLauncherRpcAddress(Configuration conf, String val) { - conf.set("mapred.job.tracker", val); - } - - @Override - public String getJobLauncherHttpAddress(Configuration conf) { - return conf.get("mapred.job.tracker.http.address"); - } - - @Override - public boolean moveToAppropriateTrash(FileSystem fs, Path path, Configuration conf) - throws IOException { - // older versions of Hadoop don't have a Trash constructor based on the - // Path or FileSystem. So need to achieve this by creating a dummy conf. - // this needs to be filtered out based on version - - Configuration dupConf = new Configuration(conf); - FileSystem.setDefaultUri(dupConf, fs.getUri()); - Trash trash = new Trash(dupConf); - return trash.moveToTrash(path); - } - @Override - public long getDefaultBlockSize(FileSystem fs, Path path) { - return fs.getDefaultBlockSize(); - } - - @Override - public short getDefaultReplication(FileSystem fs, Path path) { - return fs.getDefaultReplication(); - } - - @Override - public void refreshDefaultQueue(Configuration conf, String userName) { - // MR1 does not expose API required to set MR queue mapping for user - } - - @Override - public void setTotalOrderPartitionFile(JobConf jobConf, Path partitionFile){ - TotalOrderPartitioner.setPartitionFile(jobConf, partitionFile); - } - - @Override - public Comparator<LongWritable> getLongComparator() { - return new Comparator<LongWritable>() { - @Override - public int compare(LongWritable o1, LongWritable o2) { - return o1.compareTo(o2); - } - }; - } - - /** - * Returns a shim to wrap MiniMrCluster - */ - @Override - public MiniMrShim getMiniMrCluster(Configuration conf, int numberOfTaskTrackers, - String nameNode, int numDir) throws IOException { - return new MiniMrShim(conf, numberOfTaskTrackers, nameNode, numDir); - } - - @Override - public MiniMrShim getMiniTezCluster(Configuration conf, int numberOfTaskTrackers, - String nameNode, boolean isLlap) throws IOException { - throw new IOException("Cannot run tez on current hadoop, Version: " + VersionInfo.getVersion()); - } - - @Override - public MiniMrShim getMiniSparkCluster(Configuration conf, int numberOfTaskTrackers, - String nameNode, int numDir) throws IOException { - throw new IOException("Cannot run Spark on YARN on current Hadoop, Version: " + VersionInfo.getVersion()); - } - - /** - * Shim for MiniMrCluster - */ - public class MiniMrShim implements HadoopShims.MiniMrShim { - - private final MiniMRCluster mr; - - public MiniMrShim(Configuration conf, int numberOfTaskTrackers, - String nameNode, int numDir) throws IOException { - this.mr = new MiniMRCluster(numberOfTaskTrackers, nameNode, numDir); - } - - @Override - public int getJobTrackerPort() throws UnsupportedOperationException { - return mr.getJobTrackerPort(); - } - - @Override - public void shutdown() throws IOException { - MiniMRCluster.JobTrackerRunner runner = mr.getJobTrackerRunner(); - JobTracker tracker = runner.getJobTracker(); - if (tracker != null) { - for (JobInProgress running : tracker.getRunningJobs()) { - try { - running.kill(); - } catch (Exception e) { - // ignore - } - } - } - runner.shutdown(); - } - - @Override - public void setupConfiguration(Configuration conf) { - setJobLauncherRpcAddress(conf, "localhost:" + mr.getJobTrackerPort()); - } - } - - // Don't move this code to the parent class. There's a binary - // incompatibility between hadoop 1 and 2 wrt MiniDFSCluster and we - // need to have two different shim classes even though they are - // exactly the same. - @Override - public HadoopShims.MiniDFSShim getMiniDfs(Configuration conf, - int numDataNodes, - boolean format, - String[] racks) throws IOException { - return new MiniDFSShim(new MiniDFSCluster(conf, numDataNodes, format, racks)); - } - - /** - * MiniDFSShim. - * - */ - public class MiniDFSShim implements HadoopShims.MiniDFSShim { - private final MiniDFSCluster cluster; - - public MiniDFSShim(MiniDFSCluster cluster) { - this.cluster = cluster; - } - - @Override - public FileSystem getFileSystem() throws IOException { - return cluster.getFileSystem(); - } - - @Override - public void shutdown() { - cluster.shutdown(); - } - } - private volatile HCatHadoopShims hcatShimInstance; - @Override - public HCatHadoopShims getHCatShim() { - if(hcatShimInstance == null) { - hcatShimInstance = new HCatHadoopShims20S(); - } - return hcatShimInstance; - } - private final class HCatHadoopShims20S implements HCatHadoopShims { - @Override - public TaskID createTaskID() { - return new TaskID(); - } - - @Override - public TaskAttemptID createTaskAttemptID() { - return new TaskAttemptID(); - } - - @Override - public TaskAttemptContext createTaskAttemptContext(Configuration conf, TaskAttemptID taskId) { - return new TaskAttemptContext(conf, taskId); - } - - @Override - public org.apache.hadoop.mapred.TaskAttemptContext createTaskAttemptContext(org.apache.hadoop.mapred.JobConf conf, - org.apache.hadoop.mapred.TaskAttemptID taskId, Progressable progressable) { - org.apache.hadoop.mapred.TaskAttemptContext newContext = null; - try { - java.lang.reflect.Constructor construct = org.apache.hadoop.mapred.TaskAttemptContext.class.getDeclaredConstructor( - org.apache.hadoop.mapred.JobConf.class, org.apache.hadoop.mapred.TaskAttemptID.class, - Progressable.class); - construct.setAccessible(true); - newContext = (org.apache.hadoop.mapred.TaskAttemptContext)construct.newInstance(conf, taskId, progressable); - } catch (Exception e) { - throw new RuntimeException(e); - } - return newContext; - } - - @Override - public JobContext createJobContext(Configuration conf, - JobID jobId) { - return new JobContext(conf, jobId); - } - - @Override - public org.apache.hadoop.mapred.JobContext createJobContext(org.apache.hadoop.mapred.JobConf conf, - org.apache.hadoop.mapreduce.JobID jobId, Progressable progressable) { - org.apache.hadoop.mapred.JobContext newContext = null; - try { - java.lang.reflect.Constructor construct = org.apache.hadoop.mapred.JobContext.class.getDeclaredConstructor( - org.apache.hadoop.mapred.JobConf.class, org.apache.hadoop.mapreduce.JobID.class, - Progressable.class); - construct.setAccessible(true); - newContext = (org.apache.hadoop.mapred.JobContext)construct.newInstance(conf, jobId, progressable); - } catch (Exception e) { - throw new RuntimeException(e); - } - return newContext; - } - - @Override - public void commitJob(OutputFormat outputFormat, Job job) throws IOException { - if( job.getConfiguration().get("mapred.job.tracker", "").equalsIgnoreCase("local") ) { - try { - //In local mode, mapreduce will not call OutputCommitter.cleanupJob. - //Calling it from here so that the partition publish happens. - //This call needs to be removed after MAPREDUCE-1447 is fixed. - outputFormat.getOutputCommitter(createTaskAttemptContext( - job.getConfiguration(), createTaskAttemptID())).commitJob(job); - } catch (IOException e) { - throw new IOException("Failed to cleanup job",e); - } catch (InterruptedException e) { - throw new IOException("Failed to cleanup job",e); - } - } - } - - @Override - public void abortJob(OutputFormat outputFormat, Job job) throws IOException { - if (job.getConfiguration().get("mapred.job.tracker", "") - .equalsIgnoreCase("local")) { - try { - // This call needs to be removed after MAPREDUCE-1447 is fixed. - outputFormat.getOutputCommitter(createTaskAttemptContext( - job.getConfiguration(), new TaskAttemptID())).abortJob(job, JobStatus.State.FAILED); - } catch (IOException e) { - throw new IOException("Failed to abort job", e); - } catch (InterruptedException e) { - throw new IOException("Failed to abort job", e); - } - } - } - - @Override - public InetSocketAddress getResourceManagerAddress(Configuration conf) - { - return JobTracker.getAddress(conf); - } - - @Override - public String getPropertyName(PropertyName name) { - switch (name) { - case CACHE_ARCHIVES: - return DistributedCache.CACHE_ARCHIVES; - case CACHE_FILES: - return DistributedCache.CACHE_FILES; - case CACHE_SYMLINK: - return DistributedCache.CACHE_SYMLINK; - case CLASSPATH_ARCHIVES: - return "mapred.job.classpath.archives"; - case CLASSPATH_FILES: - return "mapred.job.classpath.files"; - } - - return ""; - } - - @Override - public boolean isFileInHDFS(FileSystem fs, Path path) throws IOException { - // In hadoop 1.x.x the file system URI is sufficient to determine the uri of the file - return "hdfs".equals(fs.getUri().getScheme()); - } - } - @Override - public WebHCatJTShim getWebHCatShim(Configuration conf, UserGroupInformation ugi) throws IOException { - return new WebHCatJTShim20S(conf, ugi);//this has state, so can't be cached - } - - @Override - public List<FileStatus> listLocatedStatus(final FileSystem fs, - final Path path, - final PathFilter filter - ) throws IOException { - return Arrays.asList(fs.listStatus(path, filter)); - } - - @Override - public BlockLocation[] getLocations(FileSystem fs, - FileStatus status) throws IOException { - return fs.getFileBlockLocations(status, 0, status.getLen()); - } - - @Override - public TreeMap<Long, BlockLocation> getLocationsWithOffset(FileSystem fs, - FileStatus status) throws IOException { - TreeMap<Long, BlockLocation> offsetBlockMap = new TreeMap<Long, BlockLocation>(); - BlockLocation[] locations = getLocations(fs, status); - for (BlockLocation location : locations) { - offsetBlockMap.put(location.getOffset(), location); - } - return offsetBlockMap; - } - - @Override - public void hflush(FSDataOutputStream stream) throws IOException { - stream.sync(); - } - - @Override - public HdfsFileStatus getFullFileStatus(Configuration conf, FileSystem fs, Path file) - throws IOException { - return new Hadoop20SFileStatus(fs.getFileStatus(file)); - } - - @Override - public void setFullFileStatus(Configuration conf, HdfsFileStatus sourceStatus, - FileSystem fs, Path target) throws IOException { - String group = sourceStatus.getFileStatus().getGroup(); - String permission = Integer.toString(sourceStatus.getFileStatus().getPermission().toShort(), 8); - //use FsShell to change group and permissions recursively - try { - FsShell fshell = new FsShell(); - fshell.setConf(conf); - run(fshell, new String[]{"-chgrp", "-R", group, target.toString()}); - run(fshell, new String[]{"-chmod", "-R", permission, target.toString()}); - } catch (Exception e) { - throw new IOException("Unable to set permissions of " + target, e); - } - try { - if (LOG.isDebugEnabled()) { //some trace logging - getFullFileStatus(conf, fs, target).debugLog(); - } - } catch (Exception e) { - //ignore. - } - } - - public class Hadoop20SFileStatus implements HdfsFileStatus { - private final FileStatus fileStatus; - public Hadoop20SFileStatus(FileStatus fileStatus) { - this.fileStatus = fileStatus; - } - @Override - public FileStatus getFileStatus() { - return fileStatus; - } - @Override - public void debugLog() { - if (fileStatus != null) { - LOG.debug(fileStatus.toString()); - } - } - } - - @Override - public FileSystem createProxyFileSystem(FileSystem fs, URI uri) { - return new ProxyFileSystem(fs, uri); - } - @Override - public Map<String, String> getHadoopConfNames() { - Map<String, String> ret = new HashMap<String, String>(); - ret.put("HADOOPFS", "fs.default.name"); - ret.put("HADOOPMAPFILENAME", "map.input.file"); - ret.put("HADOOPMAPREDINPUTDIR", "mapred.input.dir"); - ret.put("HADOOPMAPREDINPUTDIRRECURSIVE", "mapred.input.dir.recursive"); - ret.put("MAPREDMAXSPLITSIZE", "mapred.max.split.size"); - ret.put("MAPREDMINSPLITSIZE", "mapred.min.split.size"); - ret.put("MAPREDMINSPLITSIZEPERNODE", "mapred.min.split.size.per.node"); - ret.put("MAPREDMINSPLITSIZEPERRACK", "mapred.min.split.size.per.rack"); - ret.put("HADOOPNUMREDUCERS", "mapred.reduce.tasks"); - ret.put("HADOOPJOBNAME", "mapred.job.name"); - ret.put("HADOOPSPECULATIVEEXECREDUCERS", "mapred.reduce.tasks.speculative.execution"); - ret.put("MAPREDSETUPCLEANUPNEEDED", "mapred.committer.job.setup.cleanup.needed"); - ret.put("MAPREDTASKCLEANUPNEEDED", "mapreduce.job.committer.task.cleanup.needed"); - return ret; - } - - @Override - public ZeroCopyReaderShim getZeroCopyReader(FSDataInputStream in, ByteBufferPoolShim pool) throws IOException { - /* not supported */ - return null; - } - - @Override - public DirectDecompressorShim getDirectDecompressor(DirectCompressionType codec) { - /* not supported */ - return null; - } - - @Override - public Configuration getConfiguration(org.apache.hadoop.mapreduce.JobContext context) { - return context.getConfiguration(); - } - - @Override - public JobConf getJobConf(org.apache.hadoop.mapred.JobContext context) { - return context.getJobConf(); - } - - @Override - public FileSystem getNonCachedFileSystem(URI uri, Configuration conf) throws IOException { - boolean origDisableHDFSCache = - conf.getBoolean("fs." + uri.getScheme() + ".impl.disable.cache", false); - // hadoop-1 compatible flag. - conf.setBoolean("fs." + uri.getScheme() + ".impl.disable.cache", true); - FileSystem fs = FileSystem.get(uri, conf); - conf.setBoolean("fs." + uri.getScheme() + ".impl.disable.cache", origDisableHDFSCache); - return fs; - } - - @Override - public void getMergedCredentials(JobConf jobConf) throws IOException { - throw new IOException("Merging of credentials not supported in this version of hadoop"); - } - - @Override - public void mergeCredentials(JobConf dest, JobConf src) throws IOException { - throw new IOException("Merging of credentials not supported in this version of hadoop"); - } - - @Override - public String getPassword(Configuration conf, String name) { - // No password API, just retrieve value from conf - return conf.get(name); - } - - @Override - public boolean supportStickyBit() { - return false; - } - - @Override - public boolean hasStickyBit(FsPermission permission) { - return false; - } - - @Override - public boolean supportTrashFeature() { - return false; - } - - @Override - public Path getCurrentTrashPath(Configuration conf, FileSystem fs) { - return null; - } - - @Override - public boolean isDirectory(FileStatus fileStatus) { - return fileStatus.isDir(); - } - - /** - * Returns a shim to wrap KerberosName - */ - @Override - public KerberosNameShim getKerberosNameShim(String name) throws IOException { - return new KerberosNameShim(name); - } - - /** - * Shim for KerberosName - */ - public class KerberosNameShim implements HadoopShimsSecure.KerberosNameShim { - - private final KerberosName kerberosName; - - public KerberosNameShim(String name) { - kerberosName = new KerberosName(name); - } - - @Override - public String getDefaultRealm() { - return kerberosName.getDefaultRealm(); - } - - @Override - public String getServiceName() { - return kerberosName.getServiceName(); - } - - @Override - public String getHostName() { - return kerberosName.getHostName(); - } - - @Override - public String getRealm() { - return kerberosName.getRealm(); - } - - @Override - public String getShortName() throws IOException { - return kerberosName.getShortName(); - } - } - - @Override - public StoragePolicyShim getStoragePolicyShim(FileSystem fs) { - return null; - } - - @Override - public boolean runDistCp(Path src, Path dst, Configuration conf) throws IOException { - - DistCpOptions options = new DistCpOptions(Collections.singletonList(src), dst); - options.setSyncFolder(true); - options.setSkipCRC(true); - options.preserve(FileAttribute.BLOCKSIZE); - try { - DistCp distcp = new DistCp(conf, options); - distcp.execute(); - return true; - } catch (Exception e) { - throw new IOException("Cannot execute DistCp process: " + e, e); - } - } - - @Override - public HdfsEncryptionShim createHdfsEncryptionShim(FileSystem fs, Configuration conf) throws IOException { - return new HadoopShims.NoopHdfsEncryptionShim(); - } - - @Override - public Path getPathWithoutSchemeAndAuthority(Path path) { - return path; - } - - @Override - public List<HdfsFileStatusWithId> listLocatedHdfsStatus( - FileSystem fs, Path path, PathFilter filter) throws IOException { - throw new UnsupportedOperationException("Not supported on old version"); - } - - @Override - public int readByteBuffer(FSDataInputStream file, ByteBuffer dest) throws IOException { - // Inefficient for direct buffers; only here for compat. - int pos = dest.position(); - if (dest.hasArray()) { - int result = file.read(dest.array(), dest.arrayOffset(), dest.remaining()); - if (result > 0) { - dest.position(pos + result); - } - return result; - } else { - byte[] arr = new byte[dest.remaining()]; - int result = file.read(arr, 0, arr.length); - if (result > 0) { - dest.put(arr, 0, result); - dest.position(pos + result); - } - return result; - } - } - - @Override - public void addDelegationTokens(FileSystem fs, Credentials cred, String uname) throws IOException { - Token<?> fsToken = fs.getDelegationToken(uname); - cred.addToken(fsToken.getService(), fsToken); - } - - @Override - public long getFileId(FileSystem fs, String path) throws IOException { - throw new UnsupportedOperationException("Not supported on old version"); - } -} http://git-wip-us.apache.org/repos/asf/hive/blob/f9517efd/shims/0.20S/src/main/java/org/apache/hadoop/hive/shims/Jetty20SShims.java ---------------------------------------------------------------------- diff --git a/shims/0.20S/src/main/java/org/apache/hadoop/hive/shims/Jetty20SShims.java b/shims/0.20S/src/main/java/org/apache/hadoop/hive/shims/Jetty20SShims.java deleted file mode 100644 index 75659ff..0000000 --- a/shims/0.20S/src/main/java/org/apache/hadoop/hive/shims/Jetty20SShims.java +++ /dev/null @@ -1,53 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.hive.shims; - - -import org.mortbay.jetty.bio.SocketConnector; -import org.mortbay.jetty.handler.RequestLogHandler; -import org.mortbay.jetty.webapp.WebAppContext; - -import java.io.IOException; - -public class Jetty20SShims implements JettyShims { - public Server startServer(String listen, int port) throws IOException { - Server s = new Server(); - s.setupListenerHostPort(listen, port); - return s; - } - - private static class Server extends org.mortbay.jetty.Server implements JettyShims.Server { - public void addWar(String war, String contextPath) { - WebAppContext wac = new WebAppContext(); - wac.setContextPath(contextPath); - wac.setWar(war); - RequestLogHandler rlh = new RequestLogHandler(); - rlh.setHandler(wac); - this.addHandler(rlh); - } - - public void setupListenerHostPort(String listen, int port) - throws IOException { - - SocketConnector connector = new SocketConnector(); - connector.setPort(port); - connector.setHost(listen); - this.addConnector(connector); - } - } -} http://git-wip-us.apache.org/repos/asf/hive/blob/f9517efd/shims/0.20S/src/main/java/org/apache/hadoop/mapred/WebHCatJTShim20S.java ---------------------------------------------------------------------- diff --git a/shims/0.20S/src/main/java/org/apache/hadoop/mapred/WebHCatJTShim20S.java b/shims/0.20S/src/main/java/org/apache/hadoop/mapred/WebHCatJTShim20S.java deleted file mode 100644 index 367ea60..0000000 --- a/shims/0.20S/src/main/java/org/apache/hadoop/mapred/WebHCatJTShim20S.java +++ /dev/null @@ -1,123 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.mapred; - -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.filecache.DistributedCache; -import org.apache.hadoop.hive.shims.HadoopShims.WebHCatJTShim; -import org.apache.hadoop.ipc.RPC; -import org.apache.hadoop.mapreduce.Job; -import org.apache.hadoop.net.NetUtils; -import org.apache.hadoop.security.UserGroupInformation; - -import java.io.IOException; -import java.net.InetSocketAddress; -import java.net.URI; -import java.util.HashSet; -import java.util.Set; - -/** - * This is in org.apache.hadoop.mapred package because it relies on - * JobSubmissionProtocol which is package private - */ -public class WebHCatJTShim20S implements WebHCatJTShim { - private JobSubmissionProtocol cnx; - - /** - * Create a connection to the Job Tracker. - */ - public WebHCatJTShim20S(Configuration conf, UserGroupInformation ugi) - throws IOException { - cnx = (JobSubmissionProtocol) - RPC.getProxy(JobSubmissionProtocol.class, - JobSubmissionProtocol.versionID, - getAddress(conf), - ugi, - conf, - NetUtils.getSocketFactory(conf, - JobSubmissionProtocol.class)); - } - - /** - * Grab a handle to a job that is already known to the JobTracker. - * - * @return Profile of the job, or null if not found. - */ - public JobProfile getJobProfile(org.apache.hadoop.mapred.JobID jobid) - throws IOException { - return cnx.getJobProfile(jobid); - } - - /** - * Grab a handle to a job that is already known to the JobTracker. - * - * @return Status of the job, or null if not found. - */ - public org.apache.hadoop.mapred.JobStatus getJobStatus(org.apache.hadoop.mapred.JobID jobid) - throws IOException { - return cnx.getJobStatus(jobid); - } - - - /** - * Kill a job. - */ - public void killJob(org.apache.hadoop.mapred.JobID jobid) - throws IOException { - cnx.killJob(jobid); - } - - /** - * Get all the jobs submitted. - */ - public org.apache.hadoop.mapred.JobStatus[] getAllJobs() - throws IOException { - return cnx.getAllJobs(); - } - - /** - * Close the connection to the Job Tracker. - */ - public void close() { - RPC.stopProxy(cnx); - } - private InetSocketAddress getAddress(Configuration conf) { - String jobTrackerStr = conf.get("mapred.job.tracker", "localhost:8012"); - return NetUtils.createSocketAddr(jobTrackerStr); - } - @Override - public void addCacheFile(URI uri, Job job) { - DistributedCache.addCacheFile(uri, job.getConfiguration()); - } - /** - * Kill jobs is only supported on hadoop 2.0+. - */ - @Override - public void killJobs(String tag, long timestamp) { - return; - } - /** - * Get jobs is only supported on hadoop 2.0+. - */ - @Override - public Set<String> getJobs(String tag, long timestamp) - { - return new HashSet<String>(); - } -} - http://git-wip-us.apache.org/repos/asf/hive/blob/f9517efd/shims/0.23/pom.xml ---------------------------------------------------------------------- diff --git a/shims/0.23/pom.xml b/shims/0.23/pom.xml index 3b1fb97..eee594e 100644 --- a/shims/0.23/pom.xml +++ b/shims/0.23/pom.xml @@ -54,31 +54,32 @@ <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-common</artifactId> - <version>${hadoop-23.version}</version> + <version>${hadoop.version}</version> <optional>true</optional> </dependency> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-hdfs</artifactId> - <version>${hadoop-23.version}</version> + <version>${hadoop.version}</version> + <optional>true</optional> </dependency> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-hdfs</artifactId> - <version>${hadoop-23.version}</version> + <version>${hadoop.version}</version> <type>test-jar</type> <optional>true</optional> </dependency> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-mapreduce-client-core</artifactId> - <version>${hadoop-23.version}</version> + <version>${hadoop.version}</version> <optional>true</optional> </dependency> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-mapreduce-client-jobclient</artifactId> - <version>${hadoop-23.version}</version> + <version>${hadoop.version}</version> <type>test-jar</type> <optional>true</optional> </dependency> @@ -103,25 +104,25 @@ <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-yarn-api</artifactId> - <version>${hadoop-23.version}</version> + <version>${hadoop.version}</version> <optional>true</optional> </dependency> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-yarn-common</artifactId> - <version>${hadoop-23.version}</version> + <version>${hadoop.version}</version> <optional>true</optional> </dependency> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-yarn-client</artifactId> - <version>${hadoop-23.version}</version> + <version>${hadoop.version}</version> <optional>true</optional> </dependency> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-yarn-server-resourcemanager</artifactId> - <version>${hadoop-23.version}</version> + <version>${hadoop.version}</version> <exclusions> <exclusion> <groupId>javax.servlet</groupId> @@ -139,15 +140,16 @@ <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-yarn-server-tests</artifactId> - <version>${hadoop-23.version}</version> + <version>${hadoop.version}</version> <optional>true</optional> <type>test-jar</type> </dependency> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-distcp</artifactId> - <version>${hadoop-23.version}</version> + <version>${hadoop.version}</version> <scope>provided</scope> </dependency> - </dependencies> + </dependencies> + </project> http://git-wip-us.apache.org/repos/asf/hive/blob/f9517efd/shims/aggregator/pom.xml ---------------------------------------------------------------------- diff --git a/shims/aggregator/pom.xml b/shims/aggregator/pom.xml index 07f6d1b..d8c39a2 100644 --- a/shims/aggregator/pom.xml +++ b/shims/aggregator/pom.xml @@ -41,12 +41,6 @@ </dependency> <dependency> <groupId>org.apache.hive.shims</groupId> - <artifactId>hive-shims-0.20S</artifactId> - <version>${project.version}</version> - <scope>runtime</scope> - </dependency> - <dependency> - <groupId>org.apache.hive.shims</groupId> <artifactId>hive-shims-0.23</artifactId> <version>${project.version}</version> <scope>runtime</scope> http://git-wip-us.apache.org/repos/asf/hive/blob/f9517efd/shims/common/pom.xml ---------------------------------------------------------------------- diff --git a/shims/common/pom.xml b/shims/common/pom.xml index dfdec2b..76d8da5 100644 --- a/shims/common/pom.xml +++ b/shims/common/pom.xml @@ -62,8 +62,8 @@ </dependency> <dependency> <groupId>org.apache.hadoop</groupId> - <artifactId>hadoop-core</artifactId> - <version>${hadoop-20S.version}</version> + <artifactId>hadoop-client</artifactId> + <version>${hadoop.version}</version> <optional>true</optional> </dependency> <dependency> http://git-wip-us.apache.org/repos/asf/hive/blob/f9517efd/shims/common/src/main/java/org/apache/hadoop/hive/shims/ShimLoader.java ---------------------------------------------------------------------- diff --git a/shims/common/src/main/java/org/apache/hadoop/hive/shims/ShimLoader.java b/shims/common/src/main/java/org/apache/hadoop/hive/shims/ShimLoader.java index c7fa11b..0fe3169 100644 --- a/shims/common/src/main/java/org/apache/hadoop/hive/shims/ShimLoader.java +++ b/shims/common/src/main/java/org/apache/hadoop/hive/shims/ShimLoader.java @@ -17,19 +17,18 @@ */ package org.apache.hadoop.hive.shims; -import java.util.HashMap; -import java.util.Map; - import org.apache.hadoop.hive.thrift.HadoopThriftAuthBridge; import org.apache.hadoop.util.VersionInfo; import org.apache.log4j.AppenderSkeleton; +import java.util.HashMap; +import java.util.Map; + /** * ShimLoader. * */ public abstract class ShimLoader { - public static String HADOOP20SVERSIONNAME = "0.20S"; public static String HADOOP23VERSIONNAME = "0.23"; private static HadoopShims hadoopShims; @@ -45,7 +44,6 @@ public abstract class ShimLoader { new HashMap<String, String>(); static { - HADOOP_SHIM_CLASSES.put(HADOOP20SVERSIONNAME, "org.apache.hadoop.hive.shims.Hadoop20SShims"); HADOOP_SHIM_CLASSES.put(HADOOP23VERSIONNAME, "org.apache.hadoop.hive.shims.Hadoop23Shims"); } @@ -57,7 +55,6 @@ public abstract class ShimLoader { new HashMap<String, String>(); static { - JETTY_SHIM_CLASSES.put(HADOOP20SVERSIONNAME, "org.apache.hadoop.hive.shims.Jetty20SShims"); JETTY_SHIM_CLASSES.put(HADOOP23VERSIONNAME, "org.apache.hadoop.hive.shims.Jetty23Shims"); } @@ -68,21 +65,17 @@ public abstract class ShimLoader { new HashMap<String, String>(); static { - EVENT_COUNTER_SHIM_CLASSES.put(HADOOP20SVERSIONNAME, "org.apache.hadoop.log.metrics" + - ".EventCounter"); EVENT_COUNTER_SHIM_CLASSES.put(HADOOP23VERSIONNAME, "org.apache.hadoop.log.metrics" + ".EventCounter"); } /** - * The names of the classes for shimming {@link HadoopThriftAuthBridge} + * The names of the classes for shimming HadoopThriftAuthBridge */ private static final HashMap<String, String> HADOOP_THRIFT_AUTH_BRIDGE_CLASSES = new HashMap<String, String>(); static { - HADOOP_THRIFT_AUTH_BRIDGE_CLASSES.put(HADOOP20SVERSIONNAME, - "org.apache.hadoop.hive.thrift.HadoopThriftAuthBridge"); HADOOP_THRIFT_AUTH_BRIDGE_CLASSES.put(HADOOP23VERSIONNAME, "org.apache.hadoop.hive.thrift.HadoopThriftAuthBridge23"); } @@ -166,8 +159,6 @@ public abstract class ShimLoader { } switch (Integer.parseInt(parts[0])) { - case 1: - return HADOOP20SVERSIONNAME; case 2: return HADOOP23VERSIONNAME; default: http://git-wip-us.apache.org/repos/asf/hive/blob/f9517efd/shims/common/src/main/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge.java ---------------------------------------------------------------------- diff --git a/shims/common/src/main/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge.java b/shims/common/src/main/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge.java index 7ed7265..6b0bd10 100644 --- a/shims/common/src/main/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge.java +++ b/shims/common/src/main/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge.java @@ -71,8 +71,11 @@ import org.apache.thrift.transport.TTransportFactory; /** * Functions that bridge Thrift's SASL transports to Hadoop's * SASL callback handlers and authentication classes. + * HIVE-11378 This class is not directly used anymore. It now exists only as a shell to be + * extended by HadoopThriftAuthBridge23 in 0.23 shims. I have made it abstract + * to avoid maintenance errors. */ -public class HadoopThriftAuthBridge { +public abstract class HadoopThriftAuthBridge { private static final Log LOG = LogFactory.getLog(HadoopThriftAuthBridge.class); public Client createClient() { @@ -164,11 +167,7 @@ public class HadoopThriftAuthBridge { * @return Hadoop SASL configuration */ - public Map<String, String> getHadoopSaslProperties(Configuration conf) { - // Initialize the SaslRpcServer to ensure QOP parameters are read from conf - SaslRpcServer.init(conf); - return SaslRpcServer.SASL_PROPS; - } + public abstract Map<String, String> getHadoopSaslProperties(Configuration conf); public static class Client { /** http://git-wip-us.apache.org/repos/asf/hive/blob/f9517efd/shims/pom.xml ---------------------------------------------------------------------- diff --git a/shims/pom.xml b/shims/pom.xml index 12113d5..ffacf75 100644 --- a/shims/pom.xml +++ b/shims/pom.xml @@ -33,7 +33,6 @@ <modules> <module>common</module> - <module>0.20S</module> <module>0.23</module> <module>scheduler</module> <module>aggregator</module> http://git-wip-us.apache.org/repos/asf/hive/blob/f9517efd/shims/scheduler/pom.xml ---------------------------------------------------------------------- diff --git a/shims/scheduler/pom.xml b/shims/scheduler/pom.xml index 407d57d..276b6cb 100644 --- a/shims/scheduler/pom.xml +++ b/shims/scheduler/pom.xml @@ -49,43 +49,43 @@ <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-common</artifactId> - <version>${hadoop-23.version}</version> + <version>${hadoop.version}</version> <optional>true</optional> </dependency> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-mapreduce-client-core</artifactId> - <version>${hadoop-23.version}</version> + <version>${hadoop.version}</version> <optional>true</optional> </dependency> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-yarn-api</artifactId> - <version>${hadoop-23.version}</version> + <version>${hadoop.version}</version> <optional>true</optional> </dependency> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-yarn-common</artifactId> - <version>${hadoop-23.version}</version> + <version>${hadoop.version}</version> <optional>true</optional> </dependency> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-yarn-client</artifactId> - <version>${hadoop-23.version}</version> + <version>${hadoop.version}</version> <optional>true</optional> </dependency> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-yarn-server-resourcemanager</artifactId> - <version>${hadoop-23.version}</version> + <version>${hadoop.version}</version> <optional>true</optional> </dependency> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-yarn-server-tests</artifactId> - <version>${hadoop-23.version}</version> + <version>${hadoop.version}</version> <optional>true</optional> <type>test-jar</type> </dependency> http://git-wip-us.apache.org/repos/asf/hive/blob/f9517efd/storage-api/pom.xml ---------------------------------------------------------------------- diff --git a/storage-api/pom.xml b/storage-api/pom.xml index 71b79f1..0af0d27 100644 --- a/storage-api/pom.xml +++ b/storage-api/pom.xml @@ -34,6 +34,12 @@ <dependencies> <!-- test inter-project --> <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-common</artifactId> + <version>${hadoop.version}</version> + <optional>true</optional> + </dependency> + <dependency> <groupId>junit</groupId> <artifactId>junit</artifactId> <version>${junit.version}</version> @@ -41,31 +47,6 @@ </dependency> </dependencies> - <profiles> - <profile> - <id>hadoop-1</id> - <dependencies> - <dependency> - <groupId>org.apache.hadoop</groupId> - <artifactId>hadoop-core</artifactId> - <version>${hadoop-20S.version}</version> - <optional>true</optional> - </dependency> - </dependencies> - </profile> - <profile> - <id>hadoop-2</id> - <dependencies> - <dependency> - <groupId>org.apache.hadoop</groupId> - <artifactId>hadoop-common</artifactId> - <version>${hadoop-23.version}</version> - <optional>true</optional> - </dependency> - </dependencies> - </profile> - </profiles> - <build> <sourceDirectory>${basedir}/src/java</sourceDirectory> <testSourceDirectory>${basedir}/src/test</testSourceDirectory>