Repository: ambari
Updated Branches:
  refs/heads/trunk b04fb769a -> b0634464e


http://git-wip-us.apache.org/repos/asf/ambari/blob/b0634464/contrib/views/hive/src/main/resources/view.xml
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/resources/view.xml 
b/contrib/views/hive/src/main/resources/view.xml
index 56967d2..8e1b4c5 100644
--- a/contrib/views/hive/src/main/resources/view.xml
+++ b/contrib/views/hive/src/main/resources/view.xml
@@ -23,19 +23,106 @@
 
     
<validator-class>org.apache.ambari.view.hive.PropertyValidator</validator-class>
 
+    <!-- Hive Configs -->
+    <parameter>
+      <name>hive.host</name>
+      <description>Enter the HiveServer2 host. Host must be accessible from 
Ambari Server.</description>
+      <label>HiveServer2 Host</label>
+      <placeholder>127.0.0.1</placeholder>
+      <cluster-config>fake</cluster-config>
+      <required>true</required>
+    </parameter>
+
+    <parameter>
+      <name>hive.port</name>
+      <description>HiveServer2 Thrift port (example: 10000).</description>
+      <label>HiveServer2 Thrift port</label>
+      <placeholder>10000</placeholder>
+      <default-value>10000</default-value>
+      <cluster-config>hive-site/hive.server2.thrift.port</cluster-config>
+      <required>true</required>
+    </parameter>
+
+    <parameter>
+      <name>hive.auth</name>
+      <description>Semicolon-separated authentication configs.</description>
+      <label>Hive Authentication</label>
+      <placeholder>auth=NONE</placeholder>
+      <default-value>auth=NONE</default-value>
+      <required>false</required>
+    </parameter>
+
     <!-- HDFS Configs -->
     <parameter>
         <name>webhdfs.url</name>
-        <description>Enter the WebHDFS FileSystem URI. Typically this is the 
dfs.namenode.http-address property in the hdfs-site.xml configuration. URL must 
be accessible from Ambari Server.</description>
+        <description>Enter the WebHDFS FileSystem URI. Typically this is the 
dfs.namenode.http-address
+            property in the hdfs-site.xml configuration. URL must be 
accessible from Ambari Server.</description>
         <label>WebHDFS FileSystem URI</label>
         <placeholder>webhdfs://namenode:50070</placeholder>
         <required>true</required>
+        <cluster-config>core-site/fs.defaultFS</cluster-config>
+    </parameter>
+    <parameter>
+        <name>webhdfs.nameservices</name>
+        <description>Comma-separated list of nameservices. Value of 
hdfs-site/dfs.nameservices property</description>
+        <label>Logical name of the NameNode cluster</label>
+        <required>false</required>
+        <cluster-config>hdfs-site/dfs.nameservices</cluster-config>
+    </parameter>
+    <parameter>
+        <name>webhdfs.ha.namenodes.list</name>
+        <description>Comma-separated list of namenodes for a given nameservice.
+          Value of hdfs-site/dfs.ha.namenodes.[nameservice] 
property</description>
+        <label>List of NameNodes</label>
+        <required>false</required>
+        <cluster-config>fake</cluster-config>
+    </parameter>
+    <parameter>
+        <name>webhdfs.ha.namenode.rpc-address.nn1</name>
+        <description>RPC address for first name node.
+          Value of 
hdfs-site/dfs.namenode.rpc-address.[nameservice].[namenode1] 
property</description>
+        <label>First NameNode RPC Address</label>
+        <required>false</required>
+        <cluster-config>fake</cluster-config>
+    </parameter>
+    <parameter>
+        <name>webhdfs.ha.namenode.rpc-address.nn2</name>
+        <description>RPC address for second name node.
+          Value of 
hdfs-site/dfs.namenode.rpc-address.[nameservice].[namenode2] 
property</description>
+        <label>Second NameNode RPC Address</label>
+        <required>false</required>
+        <cluster-config>fake</cluster-config>
+    </parameter>
+    <parameter>
+        <name>webhdfs.ha.namenode.http-address.nn1</name>
+        <description>WebHDFS address for first name node.
+          Value of 
hdfs-site/dfs.namenode.http-address.[nameservice].[namenode1] 
property</description>
+        <label>First NameNode HTTP (WebHDFS) Address</label>
+        <required>false</required>
+        <cluster-config>fake</cluster-config>
+    </parameter>
+    <parameter>
+        <name>webhdfs.ha.namenode.http-address.nn2</name>
+        <description>WebHDFS address for second name node.
+          Value of 
hdfs-site/dfs.namenode.http-address.[nameservice].[namenode2] 
property</description>
+        <label>Second NameNode HTTP (WebHDFS) Address</label>
+        <required>false</required>
+        <cluster-config>fake</cluster-config>
+    </parameter>
+    <parameter>
+        <name>webhdfs.client.failover.proxy.provider</name>
+        <description>The Java class that HDFS clients use to contact the 
Active NameNode
+          Value of hdfs-site/dfs.client.failover.proxy.provider.[nameservice] 
property</description>
+        <label>Failover Proxy Provider</label>
+        <required>false</required>
+        <cluster-config>fake</cluster-config>
     </parameter>
 
     <parameter>
         <name>webhdfs.username</name>
         <description>doAs for proxy user for HDFS. By default, uses the 
currently logged-in Ambari user.</description>
         <label>WebHDFS Username</label>
+        <default-value>${username}</default-value>
         <required>false</required>
     </parameter>
 
@@ -44,7 +131,6 @@
         <description>Semicolon-separated authentication configs.</description>
         <label>WebHDFS Authentication</label>
         <placeholder>auth=SIMPLE</placeholder>
-        <default-value>auth=SIMPLE</default-value>
         <required>false</required>
     </parameter>
 
@@ -84,36 +170,11 @@
     </parameter>
 
     <parameter>
-        <name>hive.host</name>
-        <description>Enter the HiveServer2 host. Host must be accessible from 
Ambari Server.</description>
-        <label>HiveServer2 Host</label>
-        <placeholder>127.0.0.1</placeholder>
-        <required>true</required>
-    </parameter>
-
-    <parameter>
-        <name>hive.port</name>
-        <description>HiveServer2 Thrift port (example: 10000).</description>
-        <label>HiveServer2 Thrift port</label>
-        <placeholder>10000</placeholder>
-        <default-value>10000</default-value>
-        <required>true</required>
-    </parameter>
-
-    <parameter>
-        <name>hive.auth</name>
-        <description>Semicolon-separated authentication configs.</description>
-        <label>Hive Authentication</label>
-        <placeholder>auth=NONE</placeholder>
-        <default-value>auth=NONE</default-value>
-        <required>false</required>
-    </parameter>
-
-    <parameter>
         <name>yarn.ats.url</name>
         <description>The URL to the YARN Application Timeline Server, used to 
provide Jobs information, typically, this is the 
yarn.timeline-service.webapp.address property in the yarn-site.xml 
configuration.</description>
         <label>YARN Application Timeline Server URL</label>
         <placeholder>http://yarn.ats.address:8188</placeholder>
+        
<cluster-config>yarn-site/yarn.timeline-service.webapp.address</cluster-config>
         <required>true</required>
     </parameter>
 
@@ -122,6 +183,7 @@
         <description>The URL to the YARN ResourceManager, used to provide YARN 
Application data.</description>
         <label>YARN ResourceManager URL</label>
         <placeholder>http://yarn.resourcemanager.address:8088</placeholder>
+        
<cluster-config>yarn-site/yarn.resourcemanager.webapp.address</cluster-config>
         <required>true</required>
     </parameter>
 
@@ -202,4 +264,14 @@
             <id-property>id</id-property>
         </entity>
     </persistence>
+
+    <auto-instance>
+        <name>AUTO_INSTANCE</name>
+        <label>Auto Create instance for the Hive view</label>
+        <description>This view instance is auto created when the Hive service 
is added to a cluster.</description>
+        <stack-id>HDP-2.*</stack-id>
+        <services>
+           <service>Hive</service>
+        </services>
+    </auto-instance>
 </view>

http://git-wip-us.apache.org/repos/asf/ambari/blob/b0634464/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/BaseHiveTest.java
----------------------------------------------------------------------
diff --git 
a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/BaseHiveTest.java
 
b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/BaseHiveTest.java
index f38d0e9..2b915f0 100644
--- 
a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/BaseHiveTest.java
+++ 
b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/BaseHiveTest.java
@@ -69,6 +69,8 @@ public abstract class BaseHiveTest {
     properties.put("dataworker.storagePath", hiveStorageFile.toString());
     properties.put("scripts.dir", "/tmp/.hiveQueries");
     properties.put("jobs.dir", "/tmp/.hiveJobs");
+    properties.put("yarn.ats.url", "127.0.0.1:8188");
+    properties.put("yarn.resourcemanager.url", "127.0.0.1:8088");
 
     setupProperties(properties, baseDir);
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/b0634464/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/files/FileServiceTest.java
----------------------------------------------------------------------
diff --git 
a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/files/FileServiceTest.java
 
b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/files/FileServiceTest.java
index a5a0f48..d6f640c 100644
--- 
a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/files/FileServiceTest.java
+++ 
b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/files/FileServiceTest.java
@@ -21,7 +21,7 @@ package org.apache.ambari.view.hive.resources.files;
 import org.apache.ambari.view.hive.ServiceTestUtils;
 import org.apache.ambari.view.hive.HDFSTest;
 import org.apache.ambari.view.hive.utils.*;
-import org.apache.ambari.view.hive.utils.HdfsApi;
+import org.apache.ambari.view.utils.hdfs.HdfsApi;
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.Path;
 import org.json.simple.JSONObject;

http://git-wip-us.apache.org/repos/asf/ambari/blob/b0634464/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/jobs/AggregatorTest.java
----------------------------------------------------------------------
diff --git 
a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/jobs/AggregatorTest.java
 
b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/jobs/AggregatorTest.java
index 08c6d00..3347d15 100644
--- 
a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/jobs/AggregatorTest.java
+++ 
b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/jobs/AggregatorTest.java
@@ -261,8 +261,9 @@ public class AggregatorTest {
     @Override
     public Job read(Object id) throws ItemNotFound {
       for(Job job : jobs) {
-        if (job.getId().equals(id))
+        if (job.getId().equals(id)) {
           return job;
+        }
       }
       throw new ItemNotFound();
     }

http://git-wip-us.apache.org/repos/asf/ambari/blob/b0634464/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/jobs/JobServiceTest.java
----------------------------------------------------------------------
diff --git 
a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/jobs/JobServiceTest.java
 
b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/jobs/JobServiceTest.java
index 3c4202a..897d951 100644
--- 
a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/jobs/JobServiceTest.java
+++ 
b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/jobs/JobServiceTest.java
@@ -27,7 +27,8 @@ import org.apache.ambari.view.hive.client.HiveClientException;
 import org.apache.ambari.view.hive.resources.savedQueries.SavedQuery;
 import org.apache.ambari.view.hive.resources.savedQueries.SavedQueryService;
 import org.apache.ambari.view.hive.utils.BadRequestFormattedException;
-import org.apache.ambari.view.hive.utils.HdfsApi;
+import org.apache.ambari.view.utils.hdfs.HdfsApi;
+import org.apache.ambari.view.utils.hdfs.HdfsApiException;
 import org.apache.hive.service.cli.thrift.*;
 import org.json.simple.JSONObject;
 import org.junit.*;
@@ -80,7 +81,7 @@ public class JobServiceTest extends BaseHiveTest {
   }
 
   @Test
-  public void createJobFromQuery() throws IOException, InterruptedException {
+  public void createJobFromQuery() throws IOException, InterruptedException, 
HdfsApiException {
     setupHdfsApiMock();
 
     SavedQuery savedQueryForJob = createSavedQuery("Test", null);
@@ -99,7 +100,7 @@ public class JobServiceTest extends BaseHiveTest {
   }
 
   @Test
-  public void createJobForcedContent() throws IOException, 
InterruptedException {
+  public void createJobForcedContent() throws IOException, 
InterruptedException, HdfsApiException {
     HdfsApiMock hdfsApiMock = setupHdfsApiMock();
 
     JobService.JobRequest request = new JobService.JobRequest();
@@ -206,7 +207,7 @@ public class JobServiceTest extends BaseHiveTest {
     return ((Map) jobObj.get("job")).get(field);
   }
 
-  private HdfsApiMock setupHdfsApiMock() throws IOException, 
InterruptedException {
+  private HdfsApiMock setupHdfsApiMock() throws IOException, 
InterruptedException, HdfsApiException {
     HdfsApiMock hdfsApiMock = new HdfsApiMock("select * from Z");
     HdfsApi hdfsApi = hdfsApiMock.getHdfsApi();
     jobService.getSharedObjectsFactory().setInstance(HdfsApi.class, hdfsApi);

http://git-wip-us.apache.org/repos/asf/ambari/blob/b0634464/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/savedQueries/SavedQueryResourceManagerTest.java
----------------------------------------------------------------------
diff --git 
a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/savedQueries/SavedQueryResourceManagerTest.java
 
b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/savedQueries/SavedQueryResourceManagerTest.java
new file mode 100644
index 0000000..b9fac9f
--- /dev/null
+++ 
b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/savedQueries/SavedQueryResourceManagerTest.java
@@ -0,0 +1,55 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.resources.savedQueries;
+
+import org.junit.Assert;
+import org.junit.Test;
+
+public class SavedQueryResourceManagerTest {
+
+  @Test
+  public void testMakeShortQuery() throws Exception {
+    String query = "select * from table;";
+    String shortQuery = SavedQueryResourceManager.makeShortQuery(query);
+    Assert.assertEquals(query, shortQuery);
+  }
+
+  @Test
+  public void testMakeShortQuery42Trim() throws Exception {
+    String str50 = "12345678901234567890123456789012345678901234567890";
+    String str42 = "123456789012345678901234567890123456789012";
+    String shortQuery = SavedQueryResourceManager.makeShortQuery(str50);
+    Assert.assertEquals(str42, shortQuery);
+  }
+
+  @Test
+  public void testMakeShortQueryRemoveSet() throws Exception {
+    String str50 = "set hive.execution.engine=tez;\nselect * from table;";
+    String shortQuery = SavedQueryResourceManager.makeShortQuery(str50);
+    Assert.assertEquals("select * from table;", shortQuery);
+
+    str50 = "set hive.execution.engine = tez;  \n select * from table;";
+    shortQuery = SavedQueryResourceManager.makeShortQuery(str50);
+    Assert.assertEquals("select * from table;", shortQuery);
+
+    str50 = "SET  property=value;\nselect * from table;";
+    shortQuery = SavedQueryResourceManager.makeShortQuery(str50);
+    Assert.assertEquals("select * from table;", shortQuery);
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/b0634464/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/savedQueries/SavedQueryServiceTest.java
----------------------------------------------------------------------
diff --git 
a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/savedQueries/SavedQueryServiceTest.java
 
b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/savedQueries/SavedQueryServiceTest.java
index 9b26a5b..d55858f 100644
--- 
a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/savedQueries/SavedQueryServiceTest.java
+++ 
b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/savedQueries/SavedQueryServiceTest.java
@@ -19,9 +19,7 @@
 package org.apache.ambari.view.hive.resources.savedQueries;
 
 import org.apache.ambari.view.hive.HDFSTest;
-import org.apache.ambari.view.hive.utils.HdfsApi;
 import org.apache.ambari.view.hive.utils.NotFoundFormattedException;
-import org.apache.ambari.view.hive.utils.SharedObjectsFactory;
 import org.json.simple.JSONObject;
 import org.junit.*;
 import org.junit.rules.ExpectedException;

http://git-wip-us.apache.org/repos/asf/ambari/blob/b0634464/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/utils/HdfsApiMock.java
----------------------------------------------------------------------
diff --git 
a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/utils/HdfsApiMock.java
 
b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/utils/HdfsApiMock.java
index bb922e2..8eae827 100644
--- 
a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/utils/HdfsApiMock.java
+++ 
b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/utils/HdfsApiMock.java
@@ -18,6 +18,8 @@
 
 package org.apache.ambari.view.hive.utils;
 
+import org.apache.ambari.view.utils.hdfs.HdfsApi;
+import org.apache.ambari.view.utils.hdfs.HdfsApiException;
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FSDataOutputStream;
 
@@ -34,11 +36,11 @@ public class HdfsApiMock {
   private ByteArrayOutputStream fsLogsOutputStream;
   private HdfsApi hdfsApi;
 
-  public HdfsApiMock(String inputFileContent) throws IOException, 
InterruptedException {
+  public HdfsApiMock(String inputFileContent) throws IOException, 
InterruptedException, HdfsApiException {
     setupHdfsApi(inputFileContent);
   }
 
-  protected void setupHdfsApi(String inputFileContent) throws IOException, 
InterruptedException {
+  protected void setupHdfsApi(String inputFileContent) throws IOException, 
InterruptedException, HdfsApiException {
     hdfsApi = createNiceMock(HdfsApi.class);
 
     hdfsApi.copy(anyString(), anyString());

Reply via email to