http://git-wip-us.apache.org/repos/asf/ambari/blob/ba3d6926/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/resources/hbase-default.xml ---------------------------------------------------------------------- diff --git a/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/resources/hbase-default.xml b/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/resources/hbase-default.xml deleted file mode 100644 index 7502346..0000000 --- a/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/resources/hbase-default.xml +++ /dev/null @@ -1,36 +0,0 @@ -<?xml version="1.0"?> -<?xml-stylesheet type="text/xsl" href="configuration.xsl"?> -<!-- -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ ---> -<configuration> - <property> - <name>hbase.defaults.for.version.skip</name> - <value>true</value> - <description> - Set to true to skip the 'hbase.defaults.for.version' check. - Setting this to true can be useful in contexts other than - the other side of a maven generation; i.e. running in an - ide. You'll want to set this boolean to true to avoid - seeing the RuntimException complaint: "hbase-default.xml file - seems to be for and old version of HBase (@@@VERSION@@@), this - version is X.X.X-SNAPSHOT" - </description> - </property> -</configuration>
http://git-wip-us.apache.org/repos/asf/ambari/blob/ba3d6926/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/resources/lib/org/apache/phoenix/phoenix-core-tests/4.2.0/phoenix-core-tests-4.2.0.jar ---------------------------------------------------------------------- diff --git a/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/resources/lib/org/apache/phoenix/phoenix-core-tests/4.2.0/phoenix-core-tests-4.2.0.jar b/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/resources/lib/org/apache/phoenix/phoenix-core-tests/4.2.0/phoenix-core-tests-4.2.0.jar deleted file mode 100644 index 468409a..0000000 Binary files a/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/resources/lib/org/apache/phoenix/phoenix-core-tests/4.2.0/phoenix-core-tests-4.2.0.jar and /dev/null differ http://git-wip-us.apache.org/repos/asf/ambari/blob/ba3d6926/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/resources/lib/org/apache/phoenix/phoenix-core-tests/4.2.0/phoenix-core-tests-4.2.0.pom ---------------------------------------------------------------------- diff --git a/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/resources/lib/org/apache/phoenix/phoenix-core-tests/4.2.0/phoenix-core-tests-4.2.0.pom b/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/resources/lib/org/apache/phoenix/phoenix-core-tests/4.2.0/phoenix-core-tests-4.2.0.pom deleted file mode 100644 index 83c7106..0000000 --- a/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/resources/lib/org/apache/phoenix/phoenix-core-tests/4.2.0/phoenix-core-tests-4.2.0.pom +++ /dev/null @@ -1,9 +0,0 @@ -<?xml version="1.0" encoding="UTF-8"?> -<project xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd" xmlns="http://maven.apache.org/POM/4.0.0" - xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"> - <modelVersion>4.0.0</modelVersion> - <groupId>org.apache.phoenix</groupId> - <artifactId>phoenix-core-tests</artifactId> - <version>4.2.0</version> - <description>POM was created from install:install-file</description> -</project> http://git-wip-us.apache.org/repos/asf/ambari/blob/ba3d6926/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/resources/lib/org/apache/phoenix/phoenix-core-tests/maven-metadata-local.xml ---------------------------------------------------------------------- diff --git a/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/resources/lib/org/apache/phoenix/phoenix-core-tests/maven-metadata-local.xml b/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/resources/lib/org/apache/phoenix/phoenix-core-tests/maven-metadata-local.xml deleted file mode 100644 index 53bbfc2..0000000 --- a/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/resources/lib/org/apache/phoenix/phoenix-core-tests/maven-metadata-local.xml +++ /dev/null @@ -1,12 +0,0 @@ -<?xml version="1.0" encoding="UTF-8"?> -<metadata> - <groupId>org.apache.phoenix</groupId> - <artifactId>phoenix-core-tests</artifactId> - <versioning> - <release>4.2.0</release> - <versions> - <version>4.2.0</version> - </versions> - <lastUpdated>20141103224551</lastUpdated> - </versioning> -</metadata> http://git-wip-us.apache.org/repos/asf/ambari/blob/ba3d6926/ambari-metrics/ambari-metrics-timelineservice/src/main/assemblies/ats.xml ---------------------------------------------------------------------- diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/main/assemblies/ats.xml b/ambari-metrics/ambari-metrics-timelineservice/src/main/assemblies/ats.xml new file mode 100644 index 0000000..21a6b36 --- /dev/null +++ b/ambari-metrics/ambari-metrics-timelineservice/src/main/assemblies/ats.xml @@ -0,0 +1,34 @@ +<?xml version="1.0"?> +<!-- + Licensed to the Apache Software Foundation (ASF) under one or more + contributor license agreements. See the NOTICE file distributed with + this work for additional information regarding copyright ownership. + The ASF licenses this file to You under the Apache License, Version 2.0 + (the "License"); you may not use this file except in compliance with + the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +--> +<assembly> + <!--This 'all' id is not appended to the produced bundle because we do this: + http://maven.apache.org/plugins/maven-assembly-plugin/faq.html#required-classifiers + --> + <id>dist</id> + <formats> + <format>dir</format> + <format>tar.gz</format> + </formats> + <includeBaseDirectory>false</includeBaseDirectory> + <files> + <file> + <source>${project.build.directory}/${artifact.artifactId}-${artifact.version}.jar</source> + <outputDirectory>ambari-metrics-${project.version}/lib/ambari-metrics</outputDirectory> + </file> + </files> +</assembly> http://git-wip-us.apache.org/repos/asf/ambari/blob/ba3d6926/ambari-metrics/ambari-metrics-timelineservice/src/main/assemblies/empty.xml ---------------------------------------------------------------------- diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/main/assemblies/empty.xml b/ambari-metrics/ambari-metrics-timelineservice/src/main/assemblies/empty.xml new file mode 100644 index 0000000..35738b1 --- /dev/null +++ b/ambari-metrics/ambari-metrics-timelineservice/src/main/assemblies/empty.xml @@ -0,0 +1,21 @@ +<!-- + Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +--> +<assembly> + <id>empty</id> + <formats/> +</assembly> http://git-wip-us.apache.org/repos/asf/ambari/blob/ba3d6926/ambari-metrics/ambari-metrics-timelineservice/src/main/assemblies/phoenix-client.xml ---------------------------------------------------------------------- diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/main/assemblies/phoenix-client.xml b/ambari-metrics/ambari-metrics-timelineservice/src/main/assemblies/phoenix-client.xml new file mode 100644 index 0000000..beca5bd --- /dev/null +++ b/ambari-metrics/ambari-metrics-timelineservice/src/main/assemblies/phoenix-client.xml @@ -0,0 +1,62 @@ +<?xml version='1.0'?> +<!-- + ~ Licensed to the Apache Software Foundation (ASF) under one + ~ or more contributor license agreements. See the NOTICE file + ~ distributed with this work for additional information + ~ regarding copyright ownership. The ASF licenses this file + ~ to you under the Apache License, Version 2.0 (the + ~ "License"); you may not use this file except in compliance + ~ with the License. You may obtain a copy of the License at + ~ + ~ http://www.apache.org/licenses/LICENSE-2.0 + ~ + ~ Unless required by applicable law or agreed to in writing, software + ~ distributed under the License is distributed on an "AS IS" BASIS, + ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + ~ See the License for the specific language governing permissions and + ~ limitations under the License. + --> + +<assembly xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0" + xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" + xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0 http://maven.apache.org/xsd/assembly-1.1.0.xsd"> + <id>client</id> + <!-- All the dependencies (unpacked) necessary to run phoenix from a single, stand-alone jar --> + <formats> + <format>jar</format> + </formats> + <includeBaseDirectory>false</includeBaseDirectory> + + <componentDescriptors> + <componentDescriptor>src/main/assemblies/phoenix-components-minimal.xml</componentDescriptor> + <componentDescriptor>src/main/assemblies/phoenix-components-major-client.xml</componentDescriptor> + </componentDescriptors> + + <dependencySets> + <dependencySet> + <!-- Unpack all the dependencies to class files, since java doesn't support + jar of jars for running --> + <unpack>true</unpack> + <!-- save these dependencies to the top-level --> + <outputDirectory>/</outputDirectory> + <includes> + <include>jline:jline</include> + <include>sqlline:sqlline</include> + <include>org.apache.hbase:hbase*</include> + <include>org.cloudera.htrace:htrace-core</include> + <include>io.netty:netty</include> + <include>commons-codec:commons-codec</include> + </includes> + </dependencySet> + + <!-- Make sure we get all the components, not just the minimal client ones (e.g. + phoenix-flume, phoenix-pig, etc) --> + <dependencySet> + <outputDirectory>/</outputDirectory> + <unpack>true</unpack> + <includes> + <include>org.apache.phoenix:phoenix-*</include> + </includes> + </dependencySet> + </dependencySets> +</assembly> http://git-wip-us.apache.org/repos/asf/ambari/blob/ba3d6926/ambari-metrics/ambari-metrics-timelineservice/src/main/assemblies/phoenix-components-major-client.xml ---------------------------------------------------------------------- diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/main/assemblies/phoenix-components-major-client.xml b/ambari-metrics/ambari-metrics-timelineservice/src/main/assemblies/phoenix-components-major-client.xml new file mode 100644 index 0000000..13692fe --- /dev/null +++ b/ambari-metrics/ambari-metrics-timelineservice/src/main/assemblies/phoenix-components-major-client.xml @@ -0,0 +1,53 @@ +<?xml version='1.0'?> +<!-- + ~ Licensed to the Apache Software Foundation (ASF) under one + ~ or more contributor license agreements. See the NOTICE file + ~ distributed with this work for additional information + ~ regarding copyright ownership. The ASF licenses this file + ~ to you under the Apache License, Version 2.0 (the + ~ "License"); you may not use this file except in compliance + ~ with the License. You may obtain a copy of the License at + ~ + ~ http://www.apache.org/licenses/LICENSE-2.0 + ~ + ~ Unless required by applicable law or agreed to in writing, software + ~ distributed under the License is distributed on an "AS IS" BASIS, + ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + ~ See the License for the specific language governing permissions and + ~ limitations under the License. + --> + +<component> + <!-- Components that the client needs (except for HBase) --> + <dependencySets> + <dependencySet> + <!-- Unpack all the dependencies to class files, since java doesn't support + jar of jars for running --> + <unpack>true</unpack> + <!-- save these dependencies to the top-level --> + <outputDirectory>/</outputDirectory> + <!-- Maybe a blacklist is easier? --> + <includes> + <!-- We use a newer version of guava than HBase - this might be an issue? --> + <include>com.google.guava:guava</include> + <!-- HBase also pulls in these dependencies on its own, should we include-them? --> + <include>com.google.protobuf:protobuf-java</include> + <include>org.slf4j:slf4j-api</include> + <include>org.slf4j:slf4j-log4j12</include> + <include>org.apache.zookeeper:zookeeper</include> + <include>log4j:log4j</include> + <include>org.apache.hadoop:hadoop*</include> + <include>commons-configuration:commons-configuration</include> + <include>commons-io:commons-io</include> + <include>commons-logging:commons-logging</include> + <include>commons-lang:commons-lang</include> + <include>commons-cli:commons-cli</include> + <include>org.apache.commons:commons-csv</include> + <include>org.codehaus.jackson:jackson-mapper-asl</include> + <include>org.codehaus.jackson:jackson-core-asl</include> + <include>org.xerial.snappy:snappy-java</include> + <include>commons-collections:commons-collections</include> + </includes> + </dependencySet> + </dependencySets> +</component> http://git-wip-us.apache.org/repos/asf/ambari/blob/ba3d6926/ambari-metrics/ambari-metrics-timelineservice/src/main/assemblies/phoenix-components-minimal.xml ---------------------------------------------------------------------- diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/main/assemblies/phoenix-components-minimal.xml b/ambari-metrics/ambari-metrics-timelineservice/src/main/assemblies/phoenix-components-minimal.xml new file mode 100644 index 0000000..bf7de85 --- /dev/null +++ b/ambari-metrics/ambari-metrics-timelineservice/src/main/assemblies/phoenix-components-minimal.xml @@ -0,0 +1,71 @@ +<?xml version='1.0'?> +<!-- + ~ Licensed to the Apache Software Foundation (ASF) under one + ~ or more contributor license agreements. See the NOTICE file + ~ distributed with this work for additional information + ~ regarding copyright ownership. The ASF licenses this file + ~ to you under the Apache License, Version 2.0 (the + ~ "License"); you may not use this file except in compliance + ~ with the License. You may obtain a copy of the License at + ~ + ~ http://www.apache.org/licenses/LICENSE-2.0 + ~ + ~ Unless required by applicable law or agreed to in writing, software + ~ distributed under the License is distributed on an "AS IS" BASIS, + ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + ~ See the License for the specific language governing permissions and + ~ limitations under the License. + --> + +<component> + <!-- Just the basic components that Phoenix pulls in, that is not a transitive dependency from Hadoop/HBase/Pig --> + <dependencySets> + <dependencySet> + <!-- Unpack all the dependencies to class files, since java doesn't support + jar of jars for running --> + <unpack>true</unpack> + <!-- save these dependencies to the top-level --> + <outputDirectory>/</outputDirectory> + <!-- Just include the extra things that phoenix needs --> + <includes> + <include>org.antlr:antlr*</include> + </includes> + </dependencySet> + + <dependencySet> + <outputDirectory>/</outputDirectory> + <unpack>true</unpack> + <includes> + <include>org.apache.phoenix:phoenix-*</include> + </includes> + <excludes> + <exclude>org.apache.phoenix:phoenix-flume</exclude> + <exclude>org.apache.phoenix:phoenix-pig</exclude> + </excludes> + </dependencySet> + </dependencySets> + + <fileSets> + <fileSet> + <!--Get misc project files --> + <directory>${project.basedir}/..</directory> + <outputDirectory>/</outputDirectory> + <includes> + <include>*.txt*</include> + <include>*.md</include> + <include>NOTICE*</include> + </includes> + <excludes> + <exclude>build.txt</exclude> + </excludes> + </fileSet> + <fileSet> + <!--Get map-red-config properties files --> + <directory>${project.basedir}/../config</directory> + <outputDirectory>/</outputDirectory> + <includes> + <include>csv-bulk-load-config.properties</include> + </includes> + </fileSet> + </fileSets> +</component> http://git-wip-us.apache.org/repos/asf/ambari/blob/ba3d6926/ambari-metrics/ambari-metrics-timelineservice/src/main/assemblies/phoenix-server.xml ---------------------------------------------------------------------- diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/main/assemblies/phoenix-server.xml b/ambari-metrics/ambari-metrics-timelineservice/src/main/assemblies/phoenix-server.xml new file mode 100644 index 0000000..be8a516 --- /dev/null +++ b/ambari-metrics/ambari-metrics-timelineservice/src/main/assemblies/phoenix-server.xml @@ -0,0 +1,46 @@ +<?xml version='1.0'?> +<!-- + ~ Licensed to the Apache Software Foundation (ASF) under one + ~ or more contributor license agreements. See the NOTICE file + ~ distributed with this work for additional information + ~ regarding copyright ownership. The ASF licenses this file + ~ to you under the Apache License, Version 2.0 (the + ~ "License"); you may not use this file except in compliance + ~ with the License. You may obtain a copy of the License at + ~ + ~ http://www.apache.org/licenses/LICENSE-2.0 + ~ + ~ Unless required by applicable law or agreed to in writing, software + ~ distributed under the License is distributed on an "AS IS" BASIS, + ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + ~ See the License for the specific language governing permissions and + ~ limitations under the License. + --> + +<assembly xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0" + xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" + xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0 http://maven.apache.org/xsd/assembly-1.1.0.xsd"> + <!-- build the phoenix server side jar, that includes phoenix-hadoopX-compat, phoenix-hadoop-compat and antlr --> + <id>server</id> + <formats> + <format>jar</format> + </formats> + <includeBaseDirectory>false</includeBaseDirectory> + <dependencySets> + <dependencySet> + <outputDirectory>/</outputDirectory> + <unpack>true</unpack> + <includes> + <include>org.apache.phoenix:phoenix-core</include> + <include>org.apache.phoenix:phoenix-hadoop*</include> + </includes> + </dependencySet> + <dependencySet> + <unpack>true</unpack> + <outputDirectory>/</outputDirectory> + <includes> + <include>org.antlr:antlr*</include> + </includes> + </dependencySet> + </dependencySets> +</assembly> http://git-wip-us.apache.org/repos/asf/ambari/blob/ba3d6926/ambari-metrics/ambari-metrics-timelineservice/src/main/assemblies/simulator.xml ---------------------------------------------------------------------- diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/main/assemblies/simulator.xml b/ambari-metrics/ambari-metrics-timelineservice/src/main/assemblies/simulator.xml new file mode 100644 index 0000000..0f77976 --- /dev/null +++ b/ambari-metrics/ambari-metrics-timelineservice/src/main/assemblies/simulator.xml @@ -0,0 +1,68 @@ +<?xml version="1.0"?> +<!-- + Licensed to the Apache Software Foundation (ASF) under one or more + contributor license agreements. See the NOTICE file distributed with + this work for additional information regarding copyright ownership. + The ASF licenses this file to You under the Apache License, Version 2.0 + (the "License"); you may not use this file except in compliance with + the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +--> +<assembly> + <id>dist</id> + <formats> + <format>dir</format> + <format>tar.gz</format> + </formats> + <includeBaseDirectory>false</includeBaseDirectory> + <files> + <file> + <source>${project.build.directory}/${artifact.artifactId}-simulator-${artifact.version}.jar</source> + <outputDirectory>ambari-metrics-${project.version}/lib/ambari-metrics</outputDirectory> + </file> + </files> + <fileSets> + <!-- + Adds startup scripts to the root directory of zip package. The startup + scripts are located to src/main/scripts directory as stated by Maven + conventions. + --> + <fileSet> + <directory>${basedir}/src/main/resources/scripts</directory> + <outputDirectory>ambari-metrics-${project.version}/bin</outputDirectory> + <includes> + <include>*.sh</include> + </includes> + <fileMode>0755</fileMode> + </fileSet> + <!-- adds jar package to the root directory of zip package --> +<!-- <fileSet> + <directory>${project.build.directory}</directory> + <outputDirectory></outputDirectory> + <includes> + <include>*.jar</include> + </includes> + </fileSet>--> + </fileSets> + + + <dependencySets> + <dependencySet> + <outputDirectory>ambari-metrics-${project.version}/lib/ambari-metrics</outputDirectory> +<!-- + <useProjectArtifact>false</useProjectArtifact> +--> + <unpack>false</unpack> + <scope>compile</scope> + + </dependencySet> + </dependencySets> + +</assembly> \ No newline at end of file http://git-wip-us.apache.org/repos/asf/ambari/blob/ba3d6926/ambari-metrics/ambari-metrics-timelineservice/src/main/conf/hbase-site-metrics-service.xml ---------------------------------------------------------------------- diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/main/conf/hbase-site-metrics-service.xml b/ambari-metrics/ambari-metrics-timelineservice/src/main/conf/hbase-site-metrics-service.xml new file mode 100644 index 0000000..dabef50 --- /dev/null +++ b/ambari-metrics/ambari-metrics-timelineservice/src/main/conf/hbase-site-metrics-service.xml @@ -0,0 +1,80 @@ +<?xml version="1.0"?> +<?xml-stylesheet type="text/xsl" href="configuration.xsl"?> +<!-- +/** + * + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +--> +<configuration> + <property> + <name>hbase.rootdir</name> + <value>file:///grid/0/hbase</value> + </property> + <property> + <name>hbase.tmp.dir</name> + <value>/grid/0/hbase-tmp</value> + </property> + <property> + <name>hbase.cluster.distributed</name> + <value>true</value> + </property> + <property> + <name>hbase.master.wait.on.regionservers.mintostart</name> + <value>1</value> + </property> + <property> + <name>hbase.zookeeper.quorum</name> + <value>localhost</value> + </property> + <property> + <name>phoenix.query.spoolThresholdBytes</name> + <value>12582912</value> + </property> + <property> + <name>hbase.zookeeper.property.dataDir</name> + <value>/grid/0/zookeeper</value> + </property> + <property> + <name>hbase.client.scanner.caching</name> + <value>10000</value> + </property> + <property> + <name>hfile.block.cache.size</name> + <value>0.3</value> + </property> + <property> + <name>hbase.regionserver.global.memstore.upperLimit</name> + <value>0.5</value> + </property> + <property> + <name>hbase.regionserver.global.memstore.lowerLimit</name> + <value>0.4</value> + </property> + <property> + <name>phoenix.groupby.maxCacheSize</name> + <value>307200000</value> + </property> + <property> + <name>hbase.hregion.memstore.block.multiplier</name> + <value>4</value> + </property> + <property> + <name>hbase.hstore.flusher.count</name> + <value>2</value> + </property> +</configuration> \ No newline at end of file http://git-wip-us.apache.org/repos/asf/ambari/blob/ba3d6926/ambari-metrics/ambari-metrics-timelineservice/src/main/conf/simulator-log4j.xml ---------------------------------------------------------------------- diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/main/conf/simulator-log4j.xml b/ambari-metrics/ambari-metrics-timelineservice/src/main/conf/simulator-log4j.xml new file mode 100644 index 0000000..ac505f6 --- /dev/null +++ b/ambari-metrics/ambari-metrics-timelineservice/src/main/conf/simulator-log4j.xml @@ -0,0 +1,45 @@ +<?xml version="1.0"?> +<!-- + Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +--> +<!DOCTYPE log4j:configuration SYSTEM "log4j.dtd"> + +<log4j:configuration debug="false"> + + <appender name="console" class="org.apache.log4j.ConsoleAppender"> + <param name="target" value="System.out" /> + <param name="threshold" value="debug" /> + <layout class="org.apache.log4j.PatternLayout"> + <param name="ConversionPattern" value="%d{ISO8601} %-5p [%c{1}] - %m%n" /> + </layout> + </appender> + + <appender name="fileAppender" class="org.apache.log4j.RollingFileAppender"> + <param name="Threshold" value="INFO" /> + <param name="File" value="loadsimulator.log"/> + <layout class="org.apache.log4j.PatternLayout"> + <param name="ConversionPattern" value="%d %-5p [%c{1}] %m %n" /> + </layout> + </appender> + + <root> + <priority value ="info" /> + <!-- we may want async appender--> + <appender-ref ref="fileAppender" /> + </root> + +</log4j:configuration> \ No newline at end of file http://git-wip-us.apache.org/repos/asf/ambari/blob/ba3d6926/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryClientService.java ---------------------------------------------------------------------- diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryClientService.java b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryClientService.java new file mode 100644 index 0000000..e15198b --- /dev/null +++ b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryClientService.java @@ -0,0 +1,211 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.server.applicationhistoryservice; + +import java.io.IOException; +import java.net.InetSocketAddress; +import java.util.ArrayList; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.classification.InterfaceAudience.Private; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.ipc.Server; +import org.apache.hadoop.service.AbstractService; +import org.apache.hadoop.yarn.api.ApplicationHistoryProtocol; +import org.apache.hadoop.yarn.api.protocolrecords.CancelDelegationTokenRequest; +import org.apache.hadoop.yarn.api.protocolrecords.CancelDelegationTokenResponse; +import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationAttemptReportRequest; +import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationAttemptReportResponse; +import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationAttemptsRequest; +import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationAttemptsResponse; +import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportRequest; +import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportResponse; +import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest; +import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsResponse; +import org.apache.hadoop.yarn.api.protocolrecords.GetContainerReportRequest; +import org.apache.hadoop.yarn.api.protocolrecords.GetContainerReportResponse; +import org.apache.hadoop.yarn.api.protocolrecords.GetContainersRequest; +import org.apache.hadoop.yarn.api.protocolrecords.GetContainersResponse; +import org.apache.hadoop.yarn.api.protocolrecords.GetDelegationTokenRequest; +import org.apache.hadoop.yarn.api.protocolrecords.GetDelegationTokenResponse; +import org.apache.hadoop.yarn.api.protocolrecords.RenewDelegationTokenRequest; +import org.apache.hadoop.yarn.api.protocolrecords.RenewDelegationTokenResponse; +import org.apache.hadoop.yarn.api.records.ApplicationAttemptReport; +import org.apache.hadoop.yarn.api.records.ApplicationId; +import org.apache.hadoop.yarn.api.records.ApplicationReport; +import org.apache.hadoop.yarn.api.records.ContainerReport; +import org.apache.hadoop.yarn.conf.YarnConfiguration; +import org.apache.hadoop.yarn.exceptions.ApplicationAttemptNotFoundException; +import org.apache.hadoop.yarn.exceptions.ApplicationNotFoundException; +import org.apache.hadoop.yarn.exceptions.ContainerNotFoundException; +import org.apache.hadoop.yarn.exceptions.YarnException; +import org.apache.hadoop.yarn.ipc.YarnRPC; + +public class ApplicationHistoryClientService extends AbstractService { + private static final Log LOG = LogFactory + .getLog(ApplicationHistoryClientService.class); + private ApplicationHistoryManager history; + private ApplicationHistoryProtocol protocolHandler; + private Server server; + private InetSocketAddress bindAddress; + + public ApplicationHistoryClientService(ApplicationHistoryManager history) { + super("ApplicationHistoryClientService"); + this.history = history; + this.protocolHandler = new ApplicationHSClientProtocolHandler(); + } + + protected void serviceStart() throws Exception { + Configuration conf = getConfig(); + YarnRPC rpc = YarnRPC.create(conf); + InetSocketAddress address = + conf.getSocketAddr(YarnConfiguration.TIMELINE_SERVICE_ADDRESS, + YarnConfiguration.DEFAULT_TIMELINE_SERVICE_ADDRESS, + YarnConfiguration.DEFAULT_TIMELINE_SERVICE_PORT); + + server = + rpc.getServer(ApplicationHistoryProtocol.class, protocolHandler, + address, conf, null, conf.getInt( + YarnConfiguration.TIMELINE_SERVICE_HANDLER_THREAD_COUNT, + YarnConfiguration.DEFAULT_TIMELINE_SERVICE_CLIENT_THREAD_COUNT)); + + server.start(); + this.bindAddress = + conf.updateConnectAddr(YarnConfiguration.TIMELINE_SERVICE_ADDRESS, + server.getListenerAddress()); + LOG.info("Instantiated ApplicationHistoryClientService at " + + this.bindAddress); + + super.serviceStart(); + } + + @Override + protected void serviceStop() throws Exception { + if (server != null) { + server.stop(); + } + super.serviceStop(); + } + + @Private + public ApplicationHistoryProtocol getClientHandler() { + return this.protocolHandler; + } + + @Private + public InetSocketAddress getBindAddress() { + return this.bindAddress; + } + + private class ApplicationHSClientProtocolHandler implements + ApplicationHistoryProtocol { + + @Override + public CancelDelegationTokenResponse cancelDelegationToken( + CancelDelegationTokenRequest request) throws YarnException, IOException { + // TODO Auto-generated method stub + return null; + } + + @Override + public GetApplicationAttemptReportResponse getApplicationAttemptReport( + GetApplicationAttemptReportRequest request) throws YarnException, + IOException { + try { + GetApplicationAttemptReportResponse response = + GetApplicationAttemptReportResponse.newInstance(history + .getApplicationAttempt(request.getApplicationAttemptId())); + return response; + } catch (IOException e) { + throw new ApplicationAttemptNotFoundException(e.getMessage()); + } + } + + @Override + public GetApplicationAttemptsResponse getApplicationAttempts( + GetApplicationAttemptsRequest request) throws YarnException, + IOException { + GetApplicationAttemptsResponse response = + GetApplicationAttemptsResponse + .newInstance(new ArrayList<ApplicationAttemptReport>(history + .getApplicationAttempts(request.getApplicationId()).values())); + return response; + } + + @Override + public GetApplicationReportResponse getApplicationReport( + GetApplicationReportRequest request) throws YarnException, IOException { + try { + ApplicationId applicationId = request.getApplicationId(); + GetApplicationReportResponse response = + GetApplicationReportResponse.newInstance(history + .getApplication(applicationId)); + return response; + } catch (IOException e) { + throw new ApplicationNotFoundException(e.getMessage()); + } + } + + @Override + public GetApplicationsResponse getApplications( + GetApplicationsRequest request) throws YarnException, IOException { + GetApplicationsResponse response = + GetApplicationsResponse.newInstance(new ArrayList<ApplicationReport>( + history.getAllApplications().values())); + return response; + } + + @Override + public GetContainerReportResponse getContainerReport( + GetContainerReportRequest request) throws YarnException, IOException { + try { + GetContainerReportResponse response = + GetContainerReportResponse.newInstance(history.getContainer(request + .getContainerId())); + return response; + } catch (IOException e) { + throw new ContainerNotFoundException(e.getMessage()); + } + } + + @Override + public GetContainersResponse getContainers(GetContainersRequest request) + throws YarnException, IOException { + GetContainersResponse response = + GetContainersResponse.newInstance(new ArrayList<ContainerReport>( + history.getContainers(request.getApplicationAttemptId()).values())); + return response; + } + + @Override + public GetDelegationTokenResponse getDelegationToken( + GetDelegationTokenRequest request) throws YarnException, IOException { + // TODO Auto-generated method stub + return null; + } + + @Override + public RenewDelegationTokenResponse renewDelegationToken( + RenewDelegationTokenRequest request) throws YarnException, IOException { + // TODO Auto-generated method stub + return null; + } + } +} http://git-wip-us.apache.org/repos/asf/ambari/blob/ba3d6926/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryManager.java ---------------------------------------------------------------------- diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryManager.java b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryManager.java new file mode 100644 index 0000000..db25d29 --- /dev/null +++ b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryManager.java @@ -0,0 +1,28 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.server.applicationhistoryservice; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.classification.InterfaceStability; +import org.apache.hadoop.yarn.server.api.ApplicationContext; + +@InterfaceAudience.Public +@InterfaceStability.Unstable +public interface ApplicationHistoryManager extends ApplicationContext { +} http://git-wip-us.apache.org/repos/asf/ambari/blob/ba3d6926/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryManagerImpl.java ---------------------------------------------------------------------- diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryManagerImpl.java b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryManagerImpl.java new file mode 100644 index 0000000..85a5e3a --- /dev/null +++ b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryManagerImpl.java @@ -0,0 +1,250 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.server.applicationhistoryservice; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; +import java.util.Map.Entry; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.classification.InterfaceAudience.Private; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.service.AbstractService; +import org.apache.hadoop.util.ReflectionUtils; +import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; +import org.apache.hadoop.yarn.api.records.ApplicationAttemptReport; +import org.apache.hadoop.yarn.api.records.ApplicationId; +import org.apache.hadoop.yarn.api.records.ApplicationReport; +import org.apache.hadoop.yarn.api.records.ContainerId; +import org.apache.hadoop.yarn.api.records.ContainerReport; +import org.apache.hadoop.yarn.conf.YarnConfiguration; +import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationAttemptHistoryData; +import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationHistoryData; +import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ContainerHistoryData; +import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.MemoryTimelineStore; +import org.apache.hadoop.yarn.webapp.util.WebAppUtils; + +import com.google.common.annotations.VisibleForTesting; + +import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricConfiguration.DISABLE_APPLICATION_TIMELINE_STORE; + +public class ApplicationHistoryManagerImpl extends AbstractService implements + ApplicationHistoryManager { + private static final Log LOG = LogFactory + .getLog(ApplicationHistoryManagerImpl.class); + private static final String UNAVAILABLE = "N/A"; + + private ApplicationHistoryStore historyStore; + private String serverHttpAddress; + + public ApplicationHistoryManagerImpl() { + super(ApplicationHistoryManagerImpl.class.getName()); + } + + @Override + protected void serviceInit(Configuration conf) throws Exception { + LOG.info("ApplicationHistory Init"); + historyStore = createApplicationHistoryStore(conf); + historyStore.init(conf); + serverHttpAddress = WebAppUtils.getHttpSchemePrefix(conf) + + WebAppUtils.getAHSWebAppURLWithoutScheme(conf); + super.serviceInit(conf); + } + + @Override + protected void serviceStart() throws Exception { + LOG.info("Starting ApplicationHistory"); + historyStore.start(); + super.serviceStart(); + } + + @Override + protected void serviceStop() throws Exception { + LOG.info("Stopping ApplicationHistory"); + historyStore.stop(); + super.serviceStop(); + } + + protected ApplicationHistoryStore createApplicationHistoryStore( + Configuration conf) { + if (conf.getBoolean(DISABLE_APPLICATION_TIMELINE_STORE, true)) { + LOG.info("Explicitly disabled application timeline store."); + return new NullApplicationHistoryStore(); + } + return ReflectionUtils.newInstance(conf.getClass( + YarnConfiguration.APPLICATION_HISTORY_STORE, + NullApplicationHistoryStore.class, + ApplicationHistoryStore.class), conf); + } + + @Override + public ContainerReport getAMContainer(ApplicationAttemptId appAttemptId) + throws IOException { + ApplicationReport app = + getApplication(appAttemptId.getApplicationId()); + return convertToContainerReport(historyStore.getAMContainer(appAttemptId), + app == null ? null : app.getUser()); + } + + @Override + public Map<ApplicationId, ApplicationReport> getAllApplications() + throws IOException { + Map<ApplicationId, ApplicationHistoryData> histData = + historyStore.getAllApplications(); + HashMap<ApplicationId, ApplicationReport> applicationsReport = + new HashMap<ApplicationId, ApplicationReport>(); + for (Entry<ApplicationId, ApplicationHistoryData> entry : histData + .entrySet()) { + applicationsReport.put(entry.getKey(), + convertToApplicationReport(entry.getValue())); + } + return applicationsReport; + } + + @Override + public ApplicationReport getApplication(ApplicationId appId) + throws IOException { + return convertToApplicationReport(historyStore.getApplication(appId)); + } + + private ApplicationReport convertToApplicationReport( + ApplicationHistoryData appHistory) throws IOException { + ApplicationAttemptId currentApplicationAttemptId = null; + String trackingUrl = UNAVAILABLE; + String host = UNAVAILABLE; + int rpcPort = -1; + + ApplicationAttemptHistoryData lastAttempt = + getLastAttempt(appHistory.getApplicationId()); + if (lastAttempt != null) { + currentApplicationAttemptId = lastAttempt.getApplicationAttemptId(); + trackingUrl = lastAttempt.getTrackingURL(); + host = lastAttempt.getHost(); + rpcPort = lastAttempt.getRPCPort(); + } + return ApplicationReport.newInstance(appHistory.getApplicationId(), + currentApplicationAttemptId, appHistory.getUser(), appHistory.getQueue(), + appHistory.getApplicationName(), host, rpcPort, null, + appHistory.getYarnApplicationState(), appHistory.getDiagnosticsInfo(), + trackingUrl, appHistory.getStartTime(), appHistory.getFinishTime(), + appHistory.getFinalApplicationStatus(), null, "", 100, + appHistory.getApplicationType(), null); + } + + private ApplicationAttemptHistoryData getLastAttempt(ApplicationId appId) + throws IOException { + Map<ApplicationAttemptId, ApplicationAttemptHistoryData> attempts = + historyStore.getApplicationAttempts(appId); + ApplicationAttemptId prevMaxAttemptId = null; + for (ApplicationAttemptId attemptId : attempts.keySet()) { + if (prevMaxAttemptId == null) { + prevMaxAttemptId = attemptId; + } else { + if (prevMaxAttemptId.getAttemptId() < attemptId.getAttemptId()) { + prevMaxAttemptId = attemptId; + } + } + } + return attempts.get(prevMaxAttemptId); + } + + private ApplicationAttemptReport convertToApplicationAttemptReport( + ApplicationAttemptHistoryData appAttemptHistory) { + return ApplicationAttemptReport.newInstance( + appAttemptHistory.getApplicationAttemptId(), appAttemptHistory.getHost(), + appAttemptHistory.getRPCPort(), appAttemptHistory.getTrackingURL(), + appAttemptHistory.getDiagnosticsInfo(), + appAttemptHistory.getYarnApplicationAttemptState(), + appAttemptHistory.getMasterContainerId()); + } + + @Override + public ApplicationAttemptReport getApplicationAttempt( + ApplicationAttemptId appAttemptId) throws IOException { + return convertToApplicationAttemptReport(historyStore + .getApplicationAttempt(appAttemptId)); + } + + @Override + public Map<ApplicationAttemptId, ApplicationAttemptReport> + getApplicationAttempts(ApplicationId appId) throws IOException { + Map<ApplicationAttemptId, ApplicationAttemptHistoryData> histData = + historyStore.getApplicationAttempts(appId); + HashMap<ApplicationAttemptId, ApplicationAttemptReport> applicationAttemptsReport = + new HashMap<ApplicationAttemptId, ApplicationAttemptReport>(); + for (Entry<ApplicationAttemptId, ApplicationAttemptHistoryData> entry : histData + .entrySet()) { + applicationAttemptsReport.put(entry.getKey(), + convertToApplicationAttemptReport(entry.getValue())); + } + return applicationAttemptsReport; + } + + @Override + public ContainerReport getContainer(ContainerId containerId) + throws IOException { + ApplicationReport app = + getApplication(containerId.getApplicationAttemptId().getApplicationId()); + return convertToContainerReport(historyStore.getContainer(containerId), + app == null ? null: app.getUser()); + } + + private ContainerReport convertToContainerReport( + ContainerHistoryData containerHistory, String user) { + // If the container has the aggregated log, add the server root url + String logUrl = WebAppUtils.getAggregatedLogURL( + serverHttpAddress, + containerHistory.getAssignedNode().toString(), + containerHistory.getContainerId().toString(), + containerHistory.getContainerId().toString(), + user); + return ContainerReport.newInstance(containerHistory.getContainerId(), + containerHistory.getAllocatedResource(), + containerHistory.getAssignedNode(), containerHistory.getPriority(), + containerHistory.getStartTime(), containerHistory.getFinishTime(), + containerHistory.getDiagnosticsInfo(), logUrl, + containerHistory.getContainerExitStatus(), + containerHistory.getContainerState()); + } + + @Override + public Map<ContainerId, ContainerReport> getContainers( + ApplicationAttemptId appAttemptId) throws IOException { + ApplicationReport app = + getApplication(appAttemptId.getApplicationId()); + Map<ContainerId, ContainerHistoryData> histData = + historyStore.getContainers(appAttemptId); + HashMap<ContainerId, ContainerReport> containersReport = + new HashMap<ContainerId, ContainerReport>(); + for (Entry<ContainerId, ContainerHistoryData> entry : histData.entrySet()) { + containersReport.put(entry.getKey(), + convertToContainerReport(entry.getValue(), + app == null ? null : app.getUser())); + } + return containersReport; + } + + @Private + @VisibleForTesting + public ApplicationHistoryStore getHistoryStore() { + return this.historyStore; + } +} http://git-wip-us.apache.org/repos/asf/ambari/blob/ba3d6926/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryReader.java ---------------------------------------------------------------------- diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryReader.java b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryReader.java new file mode 100644 index 0000000..590853a --- /dev/null +++ b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryReader.java @@ -0,0 +1,117 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.server.applicationhistoryservice; + +import java.io.IOException; +import java.util.Map; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.classification.InterfaceStability; +import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; +import org.apache.hadoop.yarn.api.records.ApplicationId; +import org.apache.hadoop.yarn.api.records.ContainerId; +import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationAttemptHistoryData; +import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationHistoryData; +import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ContainerHistoryData; + +@InterfaceAudience.Public +@InterfaceStability.Unstable +public interface ApplicationHistoryReader { + + /** + * This method returns Application {@link ApplicationHistoryData} for the + * specified {@link ApplicationId}. + * + * @param appId + * + * @return {@link ApplicationHistoryData} for the ApplicationId. + * @throws IOException + */ + ApplicationHistoryData getApplication(ApplicationId appId) throws IOException; + + /** + * This method returns all Application {@link ApplicationHistoryData}s + * + * @return map of {@link ApplicationId} to {@link ApplicationHistoryData}s. + * @throws IOException + */ + Map<ApplicationId, ApplicationHistoryData> getAllApplications() + throws IOException; + + /** + * Application can have multiple application attempts + * {@link ApplicationAttemptHistoryData}. This method returns the all + * {@link ApplicationAttemptHistoryData}s for the Application. + * + * @param appId + * + * @return all {@link ApplicationAttemptHistoryData}s for the Application. + * @throws IOException + */ + Map<ApplicationAttemptId, ApplicationAttemptHistoryData> + getApplicationAttempts(ApplicationId appId) throws IOException; + + /** + * This method returns {@link ApplicationAttemptHistoryData} for specified + * {@link ApplicationId}. + * + * @param appAttemptId + * {@link ApplicationAttemptId} + * @return {@link ApplicationAttemptHistoryData} for ApplicationAttemptId + * @throws IOException + */ + ApplicationAttemptHistoryData getApplicationAttempt( + ApplicationAttemptId appAttemptId) throws IOException; + + /** + * This method returns {@link ContainerHistoryData} for specified + * {@link ContainerId}. + * + * @param containerId + * {@link ContainerId} + * @return {@link ContainerHistoryData} for ContainerId + * @throws IOException + */ + ContainerHistoryData getContainer(ContainerId containerId) throws IOException; + + /** + * This method returns {@link ContainerHistoryData} for specified + * {@link ApplicationAttemptId}. + * + * @param appAttemptId + * {@link ApplicationAttemptId} + * @return {@link ContainerHistoryData} for ApplicationAttemptId + * @throws IOException + */ + ContainerHistoryData getAMContainer(ApplicationAttemptId appAttemptId) + throws IOException; + + /** + * This method returns Map{@link ContainerId} to {@link ContainerHistoryData} + * for specified {@link ApplicationAttemptId}. + * + * @param appAttemptId + * {@link ApplicationAttemptId} + * @return Map{@link ContainerId} to {@link ContainerHistoryData} for + * ApplicationAttemptId + * @throws IOException + */ + Map<ContainerId, ContainerHistoryData> getContainers( + ApplicationAttemptId appAttemptId) throws IOException; +} http://git-wip-us.apache.org/repos/asf/ambari/blob/ba3d6926/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryServer.java ---------------------------------------------------------------------- diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryServer.java b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryServer.java new file mode 100644 index 0000000..3adb3b8 --- /dev/null +++ b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryServer.java @@ -0,0 +1,197 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.server.applicationhistoryservice; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.classification.InterfaceAudience.Private; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem; +import org.apache.hadoop.metrics2.source.JvmMetrics; +import org.apache.hadoop.service.CompositeService; +import org.apache.hadoop.service.Service; +import org.apache.hadoop.util.ExitUtil; +import org.apache.hadoop.util.ReflectionUtils; +import org.apache.hadoop.util.ShutdownHookManager; +import org.apache.hadoop.util.StringUtils; +import org.apache.hadoop.yarn.YarnUncaughtExceptionHandler; +import org.apache.hadoop.yarn.conf.YarnConfiguration; +import org.apache.hadoop.yarn.exceptions.YarnRuntimeException; +import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.HBaseTimelineMetricStore; +import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricConfiguration; +import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricStore; +import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.MemoryTimelineStore; +import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.TimelineStore; +import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.LeveldbTimelineStore; +import org.apache.hadoop.yarn.server.applicationhistoryservice.webapp.AHSWebApp; +import org.apache.hadoop.yarn.webapp.WebApp; +import org.apache.hadoop.yarn.webapp.WebApps; +import org.apache.hadoop.yarn.webapp.util.WebAppUtils; + +import com.google.common.annotations.VisibleForTesting; + +import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricConfiguration.*; + +/** + * History server that keeps track of all types of history in the cluster. + * Application specific history to start with. + */ +public class ApplicationHistoryServer extends CompositeService { + + public static final int SHUTDOWN_HOOK_PRIORITY = 30; + private static final Log LOG = LogFactory + .getLog(ApplicationHistoryServer.class); + + ApplicationHistoryClientService ahsClientService; + ApplicationHistoryManager historyManager; + TimelineStore timelineStore; + TimelineMetricStore timelineMetricStore; + private WebApp webApp; + + public ApplicationHistoryServer() { + super(ApplicationHistoryServer.class.getName()); + } + + @Override + protected void serviceInit(Configuration conf) throws Exception { + historyManager = createApplicationHistory(); + ahsClientService = createApplicationHistoryClientService(historyManager); + addService(ahsClientService); + addService((Service) historyManager); + timelineStore = createTimelineStore(conf); + timelineMetricStore = createTimelineMetricStore(conf); + addIfService(timelineStore); + addIfService(timelineMetricStore); + super.serviceInit(conf); + } + + @Override + protected void serviceStart() throws Exception { + DefaultMetricsSystem.initialize("ApplicationHistoryServer"); + JvmMetrics.initSingleton("ApplicationHistoryServer", null); + + startWebApp(); + super.serviceStart(); + } + + @Override + protected void serviceStop() throws Exception { + if (webApp != null) { + webApp.stop(); + } + + DefaultMetricsSystem.shutdown(); + super.serviceStop(); + } + + @Private + @VisibleForTesting + public ApplicationHistoryClientService getClientService() { + return this.ahsClientService; + } + + protected ApplicationHistoryClientService + createApplicationHistoryClientService( + ApplicationHistoryManager historyManager) { + return new ApplicationHistoryClientService(historyManager); + } + + protected ApplicationHistoryManager createApplicationHistory() { + return new ApplicationHistoryManagerImpl(); + } + + protected ApplicationHistoryManager getApplicationHistory() { + return this.historyManager; + } + + static ApplicationHistoryServer launchAppHistoryServer(String[] args) { + Thread + .setDefaultUncaughtExceptionHandler(new YarnUncaughtExceptionHandler()); + StringUtils.startupShutdownMessage(ApplicationHistoryServer.class, args, + LOG); + ApplicationHistoryServer appHistoryServer = null; + try { + appHistoryServer = new ApplicationHistoryServer(); + ShutdownHookManager.get().addShutdownHook( + new CompositeServiceShutdownHook(appHistoryServer), + SHUTDOWN_HOOK_PRIORITY); + YarnConfiguration conf = new YarnConfiguration(); + appHistoryServer.init(conf); + appHistoryServer.start(); + } catch (Throwable t) { + LOG.fatal("Error starting ApplicationHistoryServer", t); + ExitUtil.terminate(-1, "Error starting ApplicationHistoryServer"); + } + return appHistoryServer; + } + + public static void main(String[] args) { + launchAppHistoryServer(args); + } + + protected ApplicationHistoryManager createApplicationHistoryManager( + Configuration conf) { + return new ApplicationHistoryManagerImpl(); + } + + protected TimelineStore createTimelineStore(Configuration conf) { + if (conf.getBoolean(DISABLE_APPLICATION_TIMELINE_STORE, true)) { + LOG.info("Explicitly disabled application timeline store."); + return new MemoryTimelineStore(); + } + return ReflectionUtils.newInstance(conf.getClass( + YarnConfiguration.TIMELINE_SERVICE_STORE, LeveldbTimelineStore.class, + TimelineStore.class), conf); + } + + protected TimelineMetricStore createTimelineMetricStore(Configuration conf) { + LOG.info("Creating metrics store."); + return ReflectionUtils.newInstance(HBaseTimelineMetricStore.class, conf); + } + + protected void startWebApp() { + String bindAddress = WebAppUtils.getAHSWebAppURLWithoutScheme(getConfig()); + LOG.info("Instantiating AHSWebApp at " + bindAddress); + try { + webApp = + WebApps + .$for("applicationhistory", ApplicationHistoryClientService.class, + ahsClientService, "ws") + .with(getConfig()) + .withHttpSpnegoPrincipalKey( + YarnConfiguration.TIMELINE_SERVICE_WEBAPP_SPNEGO_USER_NAME_KEY) + .withHttpSpnegoKeytabKey( + YarnConfiguration.TIMELINE_SERVICE_WEBAPP_SPNEGO_KEYTAB_FILE_KEY) + .at(bindAddress) + .start(new AHSWebApp(historyManager, timelineStore, timelineMetricStore)); + } catch (Exception e) { + String msg = "AHSWebApp failed to start."; + LOG.error(msg, e); + throw new YarnRuntimeException(msg, e); + } + } + /** + * @return ApplicationTimelineStore + */ + @Private + @VisibleForTesting + public TimelineStore getTimelineStore() { + return timelineStore; + } +} http://git-wip-us.apache.org/repos/asf/ambari/blob/ba3d6926/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryStore.java ---------------------------------------------------------------------- diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryStore.java b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryStore.java new file mode 100644 index 0000000..c26faef --- /dev/null +++ b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryStore.java @@ -0,0 +1,37 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.server.applicationhistoryservice; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.classification.InterfaceStability; +import org.apache.hadoop.service.Service; + +/** + * This class is the abstract of the storage of the application history data. It + * is a {@link Service}, such that the implementation of this class can make use + * of the service life cycle to initialize and cleanup the storage. Users can + * access the storage via {@link ApplicationHistoryReader} and + * {@link ApplicationHistoryWriter} interfaces. + * + */ +@InterfaceAudience.Public +@InterfaceStability.Unstable +public interface ApplicationHistoryStore extends Service, + ApplicationHistoryReader, ApplicationHistoryWriter { +} http://git-wip-us.apache.org/repos/asf/ambari/blob/ba3d6926/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryWriter.java ---------------------------------------------------------------------- diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryWriter.java b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryWriter.java new file mode 100644 index 0000000..09ba36d --- /dev/null +++ b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryWriter.java @@ -0,0 +1,112 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.server.applicationhistoryservice; + +import java.io.IOException; + +import org.apache.hadoop.classification.InterfaceAudience.Private; +import org.apache.hadoop.classification.InterfaceStability.Unstable; +import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationAttemptFinishData; +import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationAttemptStartData; +import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationFinishData; +import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationStartData; +import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ContainerFinishData; +import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ContainerStartData; + +/** + * It is the interface of writing the application history, exposing the methods + * of writing {@link ApplicationStartData}, {@link ApplicationFinishData} + * {@link ApplicationAttemptStartData}, {@link ApplicationAttemptFinishData}, + * {@link ContainerStartData} and {@link ContainerFinishData}. + */ +@Private +@Unstable +public interface ApplicationHistoryWriter { + + /** + * This method writes the information of <code>RMApp</code> that is available + * when it starts. + * + * @param appStart + * the record of the information of <code>RMApp</code> that is + * available when it starts + * @throws IOException + */ + void applicationStarted(ApplicationStartData appStart) throws IOException; + + /** + * This method writes the information of <code>RMApp</code> that is available + * when it finishes. + * + * @param appFinish + * the record of the information of <code>RMApp</code> that is + * available when it finishes + * @throws IOException + */ + void applicationFinished(ApplicationFinishData appFinish) throws IOException; + + /** + * This method writes the information of <code>RMAppAttempt</code> that is + * available when it starts. + * + * @param appAttemptStart + * the record of the information of <code>RMAppAttempt</code> that is + * available when it starts + * @throws IOException + */ + void applicationAttemptStarted(ApplicationAttemptStartData appAttemptStart) + throws IOException; + + /** + * This method writes the information of <code>RMAppAttempt</code> that is + * available when it finishes. + * + * @param appAttemptFinish + * the record of the information of <code>RMAppAttempt</code> that is + * available when it finishes + * @throws IOException + */ + void + applicationAttemptFinished(ApplicationAttemptFinishData appAttemptFinish) + throws IOException; + + /** + * This method writes the information of <code>RMContainer</code> that is + * available when it starts. + * + * @param containerStart + * the record of the information of <code>RMContainer</code> that is + * available when it starts + * @throws IOException + */ + void containerStarted(ContainerStartData containerStart) throws IOException; + + /** + * This method writes the information of <code>RMContainer</code> that is + * available when it finishes. + * + * @param containerFinish + * the record of the information of <code>RMContainer</code> that is + * available when it finishes + * @throws IOException + */ + void containerFinished(ContainerFinishData containerFinish) + throws IOException; + +}