http://git-wip-us.apache.org/repos/asf/ambari/blob/c0f9621f/contrib/views/hive/src/main/resources/view.log4j.properties ---------------------------------------------------------------------- diff --git a/contrib/views/hive/src/main/resources/view.log4j.properties b/contrib/views/hive/src/main/resources/view.log4j.properties deleted file mode 100644 index 03c3e93..0000000 --- a/contrib/views/hive/src/main/resources/view.log4j.properties +++ /dev/null @@ -1,27 +0,0 @@ -# Copyright 2011 The Apache Software Foundation -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -log4j.appender.hiveView=org.apache.log4j.RollingFileAppender -log4j.appender.hiveView.File=${ambari.log.dir}/hive-view/hive-view.log -log4j.appender.hiveView.MaxFileSize=80MB -log4j.appender.hiveView.MaxBackupIndex=60 -log4j.appender.hiveView.layout=org.apache.log4j.PatternLayout -log4j.appender.hiveView.layout.ConversionPattern=%d{DATE} %5p [%t] [%X{viewName} %X{viewVersion} %X{viewInstanceName}] %c{1}:%L - %m%n - -log4j.logger.org.apache.ambari.view.hive=INFO,hiveView -log4j.additivity.org.apache.ambari.view.hive = false
http://git-wip-us.apache.org/repos/asf/ambari/blob/c0f9621f/contrib/views/hive/src/main/resources/view.xml ---------------------------------------------------------------------- diff --git a/contrib/views/hive/src/main/resources/view.xml b/contrib/views/hive/src/main/resources/view.xml deleted file mode 100644 index 36b43d5..0000000 --- a/contrib/views/hive/src/main/resources/view.xml +++ /dev/null @@ -1,347 +0,0 @@ -<!-- - Licensed to the Apache Software Foundation (ASF) under one or more - contributor license agreements. See the NOTICE file distributed with - this work for additional information regarding copyright ownership. - The ASF licenses this file to You under the Apache License, Version 2.0 - (the "License"); you may not use this file except in compliance with - the License. You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. ---> -<view> - <name>HIVE</name> - <label>Hive</label> - <version>1.0.0</version> - <build>${env.BUILD_NUMBER}</build> - - <min-ambari-version>2.0.*</min-ambari-version> - - <validator-class>org.apache.ambari.view.hive.PropertyValidator</validator-class> - <view-class>org.apache.ambari.view.hive.HiveViewImpl</view-class> - - <!-- Hive Configs --> - <parameter> - <name>hive.host</name> - <description>Enter the HiveServer2 host. Host must be accessible from Ambari Server.</description> - <label>HiveServer2 Host</label> - <placeholder>127.0.0.1</placeholder> - <cluster-config>fake</cluster-config> - <required>true</required> - </parameter> - - <parameter> - <name>hive.port</name> - <description>HiveServer2 Thrift port (example: 10000).</description> - <label>HiveServer2 Thrift port</label> - <placeholder>10000</placeholder> - <default-value>10000</default-value> - <cluster-config>hive-site/hive.server2.thrift.port</cluster-config> - <required>true</required> - </parameter> - - <parameter> - <name>hive.http.port</name> - <description>HiveServer2 Http port (example: 10001).</description> - <label>HiveServer2 Http port</label> - <placeholder>10001</placeholder> - <default-value>10001</default-value> - <cluster-config>hive-site/hive.server2.thrift.http.port</cluster-config> - <required>true</required> - </parameter> - - <parameter> - <name>hive.http.path</name> - <description>HiveServer2 Http path (example: cliservice).</description> - <label>HiveServer2 Http path</label> - <placeholder>cliservice</placeholder> - <default-value>cliservice</default-value> - <cluster-config>hive-site/hive.server2.thrift.http.path</cluster-config> - <required>true</required> - </parameter> - - <parameter> - <name>hive.transport.mode</name> - <description>HiveServer2 Transport Mode (example: http/binary).</description> - <label>HiveServer2 Transport Mode</label> - <placeholder>binary</placeholder> - <default-value>binary</default-value> - <cluster-config>hive-site/hive.server2.transport.mode</cluster-config> - <required>true</required> - </parameter> - - <parameter> - <name>hive.auth</name> - <description>Semicolon-separated authentication configs.</description> - <label>Hive Authentication</label> - <placeholder>auth=NONE</placeholder> - <required>false</required> - </parameter> - - <parameter> - <name>hive.metastore.warehouse.dir</name> - <description>Hive Metastore directory (example: /apps/hive/warehouse)</description> - <label>Hive Metastore directory</label> - <placeholder>/apps/hive/warehouse</placeholder> - <default-value>/apps/hive/warehouse</default-value> - <cluster-config>hive-site/hive.metastore.warehouse.dir</cluster-config> - <required>false</required> - </parameter> - - <!-- HDFS Configs --> - <parameter> - <name>webhdfs.url</name> - <description>Enter the WebHDFS FileSystem URI. Typically this is the dfs.namenode.http-address - property in the hdfs-site.xml configuration. URL must be accessible from Ambari Server.</description> - <label>WebHDFS FileSystem URI</label> - <placeholder>webhdfs://namenode:50070</placeholder> - <required>true</required> - <cluster-config>core-site/fs.defaultFS</cluster-config> - </parameter> - <parameter> - <name>webhdfs.nameservices</name> - <description>Comma-separated list of nameservices. Value of hdfs-site/dfs.nameservices property</description> - <label>Logical name of the NameNode cluster</label> - <required>false</required> - <cluster-config>hdfs-site/dfs.nameservices</cluster-config> - </parameter> - <parameter> - <name>webhdfs.ha.namenodes.list</name> - <description>Comma-separated list of namenodes for a given nameservice. - Value of hdfs-site/dfs.ha.namenodes.[nameservice] property</description> - <label>List of NameNodes</label> - <required>false</required> - <cluster-config>fake</cluster-config> - </parameter> - <parameter> - <name>webhdfs.ha.namenode.rpc-address.nn1</name> - <description>RPC address for first name node. - Value of hdfs-site/dfs.namenode.rpc-address.[nameservice].[namenode1] property</description> - <label>First NameNode RPC Address</label> - <required>false</required> - <cluster-config>fake</cluster-config> - </parameter> - <parameter> - <name>webhdfs.ha.namenode.rpc-address.nn2</name> - <description>RPC address for second name node. - Value of hdfs-site/dfs.namenode.rpc-address.[nameservice].[namenode2] property</description> - <label>Second NameNode RPC Address</label> - <required>false</required> - <cluster-config>fake</cluster-config> - </parameter> - <parameter> - <name>webhdfs.ha.namenode.http-address.nn1</name> - <description>WebHDFS address for first name node. - Value of hdfs-site/dfs.namenode.http-address.[nameservice].[namenode1] property</description> - <label>First NameNode HTTP (WebHDFS) Address</label> - <required>false</required> - <cluster-config>fake</cluster-config> - </parameter> - <parameter> - <name>webhdfs.ha.namenode.http-address.nn2</name> - <description>WebHDFS address for second name node. - Value of hdfs-site/dfs.namenode.http-address.[nameservice].[namenode2] property</description> - <label>Second NameNode HTTP (WebHDFS) Address</label> - <required>false</required> - <cluster-config>fake</cluster-config> - </parameter> - <parameter> - <name>webhdfs.ha.namenode.https-address.nn1</name> - <description>WebHDFS Https address for first name node. - Value of hdfs-site/dfs.namenode.https-address.[nameservice].[namenode1] property</description> - <label>First NameNode HTTPS (WebHDFS) Address</label> - <required>false</required> - <cluster-config>fake</cluster-config> - </parameter> - <parameter> - <name>webhdfs.ha.namenode.https-address.nn2</name> - <description>WebHDFS Https address for second name node. - Value of hdfs-site/dfs.namenode.https-address.[nameservice].[namenode2] property</description> - <label>Second NameNode HTTPS (WebHDFS) Address</label> - <required>false</required> - <cluster-config>fake</cluster-config> - </parameter> - <parameter> - <name>webhdfs.client.failover.proxy.provider</name> - <description>The Java class that HDFS clients use to contact the Active NameNode - Value of hdfs-site/dfs.client.failover.proxy.provider.[nameservice] property</description> - <label>Failover Proxy Provider</label> - <required>false</required> - <cluster-config>fake</cluster-config> - </parameter> - - <parameter> - <name>webhdfs.username</name> - <description>doAs for proxy user for HDFS. By default, uses the currently logged-in Ambari user.</description> - <label>WebHDFS Username</label> - <default-value>${username}</default-value> - <required>false</required> - </parameter> - - <parameter> - <name>webhdfs.auth</name> - <description>Semicolon-separated authentication configs.</description> - <label>WebHDFS Authentication</label> - <placeholder>auth=SIMPLE</placeholder> - <required>false</required> - </parameter> - - <parameter> - <name>hdfs.umask-mode</name> - <description>The umask used when creating files and directories. Defaults to 022</description> - <label>Umask</label> - <default-value>022</default-value> - <required>false</required> - <cluster-config>hdfs-site/fs.permissions.umask-mode</cluster-config> - </parameter> - - <parameter> - <name>hdfs.auth_to_local</name> - <description>Auth to Local Configuration</description> - <label>Auth To Local</label> - <required>false</required> - <cluster-config>core-site/hadoop.security.auth_to_local</cluster-config> - </parameter> - - <!-- General Configs --> - - <parameter> - <name>views.tez.instance</name> - <description>Instance name of Tez view.</description> - <label>Instance name of Tez view</label> - <required>false</required> - </parameter> - - <parameter> - <name>scripts.dir</name> - <description>HDFS directory path to store Hive scripts.</description> - <label>Scripts HDFS Directory</label> - <placeholder>/user/${username}/hive/scripts</placeholder> - <default-value>/user/${username}/hive/scripts</default-value> - <required>true</required> - </parameter> - - <parameter> - <name>jobs.dir</name> - <description>HDFS directory path to store Hive job status.</description> - <label>Jobs HDFS Directory</label> - <placeholder>/user/${username}/hive/jobs</placeholder> - <default-value>/user/${username}/hive/jobs</default-value> - <required>true</required> - </parameter> - - <parameter> - <name>scripts.settings.defaults-file</name> - <description>File path for saving default settings for query</description> - <label>Default script settings file</label> - <default-value>/user/${username}/.${instanceName}.defaultSettings</default-value> - <required>true</required> - </parameter> - - <parameter> - <name>yarn.ats.url</name> - <description>The URL to the YARN Application Timeline Server, used to provide Jobs information, typically, this is the yarn.timeline-service.webapp.address property in the yarn-site.xml configuration.</description> - <label>YARN Application Timeline Server URL</label> - <placeholder>http://yarn.ats.address:8188</placeholder> - <cluster-config>yarn-site/yarn.timeline-service.webapp.address</cluster-config> - <required>true</required> - </parameter> - - <parameter> - <name>yarn.resourcemanager.url</name> - <description>The URL to the YARN ResourceManager, used to provide YARN Application data. If YARN ResourceManager HA is enabled, provide a comma separated list of URLs for all the Resource Managers.</description> - <label>YARN ResourceManager URL</label> - <placeholder>http://yarn.resourcemanager.address:8088</placeholder> - <cluster-config>yarn-site/yarn.resourcemanager.webapp.address</cluster-config> - <required>true</required> - </parameter> - - <resource> - <name>savedQuery</name> - <plural-name>savedQueries</plural-name> - <id-property>id</id-property> - <resource-class>org.apache.ambari.view.hive.resources.savedQueries.SavedQuery</resource-class> - <provider-class>org.apache.ambari.view.hive.resources.savedQueries.SavedQueryResourceProvider</provider-class> - <service-class>org.apache.ambari.view.hive.resources.savedQueries.SavedQueryService</service-class> - </resource> - - <resource> - <name>fileResource</name> - <plural-name>fileResources</plural-name> - <id-property>id</id-property> - <resource-class>org.apache.ambari.view.hive.resources.resources.FileResourceItem</resource-class> - <provider-class>org.apache.ambari.view.hive.resources.resources.FileResourceResourceProvider</provider-class> - <service-class>org.apache.ambari.view.hive.resources.resources.FileResourceService</service-class> - </resource> - - <resource> - <name>udf</name> - <plural-name>udfs</plural-name> - <id-property>id</id-property> - <resource-class>org.apache.ambari.view.hive.resources.udfs.UDF</resource-class> - <provider-class>org.apache.ambari.view.hive.resources.udfs.UDFResourceProvider</provider-class> - <service-class>org.apache.ambari.view.hive.resources.udfs.UDFService</service-class> - </resource> - - <resource> - <name>job</name> - <plural-name>jobs</plural-name> - <id-property>id</id-property> - <resource-class>org.apache.ambari.view.hive.resources.jobs.viewJobs.JobImpl</resource-class> - <provider-class>org.apache.ambari.view.hive.resources.jobs.JobResourceProvider</provider-class> - <service-class>org.apache.ambari.view.hive.resources.jobs.JobService</service-class> - </resource> - - <resource> - <name>upload</name> - <plural-name>uploads</plural-name> - <service-class>org.apache.ambari.view.hive.resources.uploads.UploadService</service-class> - </resource> - - <resource> - <name>file</name> - <service-class>org.apache.ambari.view.hive.resources.files.FileService</service-class> - </resource> - - <resource> - <name>ddl</name> - <service-class>org.apache.ambari.view.hive.resources.browser.HiveBrowserService</service-class> - </resource> - - <resource> - <name>hive</name> - <service-class>org.apache.ambari.view.hive.HelpService</service-class> - </resource> - - <persistence> - <entity> - <class>org.apache.ambari.view.hive.resources.jobs.viewJobs.JobImpl</class> - <id-property>id</id-property> - </entity> - <entity> - <class>org.apache.ambari.view.hive.resources.jobs.StoredOperationHandle</class> - <id-property>id</id-property> - </entity> - <entity> - <class>org.apache.ambari.view.hive.resources.savedQueries.SavedQuery</class> - <id-property>id</id-property> - </entity> - <entity> - <class>org.apache.ambari.view.hive.resources.udfs.UDF</class> - <id-property>id</id-property> - </entity> - <entity> - <class>org.apache.ambari.view.hive.resources.resources.FileResourceItem</class> - <id-property>id</id-property> - </entity> - <entity> - <class>org.apache.ambari.view.hive.TestBean</class> - <id-property>id</id-property> - </entity> - </persistence> -</view> http://git-wip-us.apache.org/repos/asf/ambari/blob/c0f9621f/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/BaseHiveTest.java ---------------------------------------------------------------------- diff --git a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/BaseHiveTest.java b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/BaseHiveTest.java deleted file mode 100644 index adcd988..0000000 --- a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/BaseHiveTest.java +++ /dev/null @@ -1,116 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.ambari.view.hive; - -import com.google.inject.AbstractModule; -import com.google.inject.Guice; -import com.google.inject.Injector; -import org.apache.ambari.view.ViewContext; -import org.apache.ambari.view.ViewResourceHandler; -import org.apache.hadoop.fs.FileUtil; -import org.junit.After; -import org.junit.AfterClass; -import org.junit.Before; -import org.junit.BeforeClass; - -import java.io.File; -import java.util.HashMap; -import java.util.Map; - -import static org.easymock.EasyMock.*; - -public abstract class BaseHiveTest { - protected ViewResourceHandler handler; - protected ViewContext context; - protected static File hiveStorageFile; - protected static File baseDir; - protected Map<String, String> properties; - - protected static String DATA_DIRECTORY = "./target/HiveTest"; - - @BeforeClass - public static void startUp() throws Exception { - File baseDir = new File(DATA_DIRECTORY) - .getAbsoluteFile(); - FileUtil.fullyDelete(baseDir); - } - - @AfterClass - public static void shutDown() throws Exception { - } - - @Before - public void setUp() throws Exception { - handler = createNiceMock(ViewResourceHandler.class); - - properties = new HashMap<String, String>(); - baseDir = new File(DATA_DIRECTORY) - .getAbsoluteFile(); - hiveStorageFile = new File("./target/HiveTest/storage.dat") - .getAbsoluteFile(); - - setupDefaultContextProperties(properties); - setupProperties(properties, baseDir); - - context = makeContext(properties, "ambari-qa", "MyHive"); - - replay(handler, context); - } - - public void setupDefaultContextProperties(Map<String, String> properties) { - properties.put("dataworker.storagePath", hiveStorageFile.toString()); - properties.put("scripts.dir", "/tmp/.hiveQueries"); - properties.put("jobs.dir", "/tmp/.hiveJobs"); - properties.put("yarn.ats.url", "http://127.0.0.1:8188"); - properties.put("yarn.resourcemanager.url", "http://127.0.0.1:8088"); - } - - public ViewContext makeContext(Map<String, String> properties, String username, String instanceName) throws Exception { - setupDefaultContextProperties(properties); - setupProperties(properties, baseDir); - - ViewContext context = createNiceMock(ViewContext.class); - expect(context.getProperties()).andReturn(properties).anyTimes(); - expect(context.getUsername()).andReturn(username).anyTimes(); - expect(context.getInstanceName()).andReturn(instanceName).anyTimes(); - return context; - } - - protected void setupProperties(Map<String, String> properties, File baseDir) throws Exception { - - } - - @After - public void tearDown() throws Exception { - - } - - protected static <T> T getService(Class<T> clazz, - final ViewResourceHandler viewResourceHandler, - final ViewContext viewInstanceContext) { - Injector viewInstanceInjector = Guice.createInjector(new AbstractModule() { - @Override - protected void configure() { - bind(ViewResourceHandler.class).toInstance(viewResourceHandler); - bind(ViewContext.class).toInstance(viewInstanceContext); - } - }); - return viewInstanceInjector.getInstance(clazz); - } -} http://git-wip-us.apache.org/repos/asf/ambari/blob/c0f9621f/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/HDFSTest.java ---------------------------------------------------------------------- diff --git a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/HDFSTest.java b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/HDFSTest.java deleted file mode 100644 index 0ee8eb3..0000000 --- a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/HDFSTest.java +++ /dev/null @@ -1,64 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.ambari.view.hive; - -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.FileUtil; -import org.apache.hadoop.hdfs.MiniDFSCluster; -import org.junit.AfterClass; -import org.junit.BeforeClass; - -import java.io.File; -import java.util.Map; - -public abstract class HDFSTest extends BaseHiveTest { - protected static MiniDFSCluster hdfsCluster; - protected static String hdfsURI; - - @BeforeClass - public static void startUp() throws Exception { - BaseHiveTest.startUp(); // super - File hdfsDir = new File("./target/HiveTest/hdfs/") - .getAbsoluteFile(); - FileUtil.fullyDelete(hdfsDir); - - Configuration conf = new Configuration(); - conf.set(MiniDFSCluster.HDFS_MINIDFS_BASEDIR, hdfsDir.getAbsolutePath()); - conf.set("hadoop.proxyuser." + System.getProperty("user.name") + ".groups", "*"); - conf.set("hadoop.proxyuser." + System.getProperty("user.name") + ".hosts", "*"); - - MiniDFSCluster.Builder builder = new MiniDFSCluster.Builder(conf); - hdfsCluster = builder.build(); - hdfsURI = hdfsCluster.getURI().toString(); - } - - @AfterClass - public static void shutDown() throws Exception { - BaseHiveTest.shutDown(); - hdfsCluster.shutdown(); - hdfsCluster = null; - } - - @Override - protected void setupProperties(Map<String, String> properties, File baseDir) throws Exception { - super.setupProperties(properties, baseDir); - properties.put("webhdfs.url", hdfsURI); - properties.put("webhdfs.username", System.getProperty("user.name")); - } -} http://git-wip-us.apache.org/repos/asf/ambari/blob/c0f9621f/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/PropertyValidatorTest.java ---------------------------------------------------------------------- diff --git a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/PropertyValidatorTest.java b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/PropertyValidatorTest.java deleted file mode 100644 index 84caae9..0000000 --- a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/PropertyValidatorTest.java +++ /dev/null @@ -1,112 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.ambari.view.hive; - -import org.apache.ambari.view.ViewInstanceDefinition; -import org.apache.ambari.view.validation.Validator; -import org.easymock.EasyMock; -import org.junit.Test; - -import java.util.HashMap; -import java.util.Map; - -import static org.easymock.EasyMock.expect; -import static org.easymock.EasyMock.replay; -import static org.junit.Assert.*; - -public class PropertyValidatorTest { - - @Test - public void testValidatePropertyWebHDFSCom() throws Exception { - PropertyValidator validator = new PropertyValidator(); - ViewInstanceDefinition definition = getViewInstanceDefinition(); - - definition.getPropertyMap().put(PropertyValidator.WEBHDFS_URL, "hdfs://hostname.com:8020"); - - assertTrue(validator.validateProperty(PropertyValidator.WEBHDFS_URL, - definition, Validator.ValidationContext.PRE_CREATE).isValid()); - - definition.getPropertyMap().put(PropertyValidator.WEBHDFS_URL, "webhdfs://hostname.com:50070"); - - assertTrue(validator.validateProperty(PropertyValidator.WEBHDFS_URL, - definition, Validator.ValidationContext.PRE_CREATE).isValid()); - - definition.getPropertyMap().put(PropertyValidator.WEBHDFS_URL, "http://hostname.com:50070"); - - assertFalse(validator.validateProperty(PropertyValidator.WEBHDFS_URL, - definition, Validator.ValidationContext.PRE_CREATE).isValid()); - } - - @Test - public void testValidatePropertyWebHDFSInternal() throws Exception { - PropertyValidator validator = new PropertyValidator(); - ViewInstanceDefinition definition = getViewInstanceDefinition(); - - definition.getPropertyMap().put(PropertyValidator.WEBHDFS_URL, "hdfs://hostname.internal:8020"); - - assertTrue(validator.validateProperty(PropertyValidator.WEBHDFS_URL, - definition, Validator.ValidationContext.PRE_CREATE).isValid()); - - definition.getPropertyMap().put(PropertyValidator.WEBHDFS_URL, "webhdfs://hostname.internal:50070"); - - assertTrue(validator.validateProperty(PropertyValidator.WEBHDFS_URL, - definition, Validator.ValidationContext.PRE_CREATE).isValid()); - - definition.getPropertyMap().put(PropertyValidator.WEBHDFS_URL, "swebhdfs://hostname.internal:50070"); - - assertTrue(validator.validateProperty(PropertyValidator.WEBHDFS_URL, - definition, Validator.ValidationContext.PRE_CREATE).isValid()); - - definition.getPropertyMap().put(PropertyValidator.WEBHDFS_URL, "http://hostname.internal:50070"); - - assertFalse(validator.validateProperty(PropertyValidator.WEBHDFS_URL, - definition, Validator.ValidationContext.PRE_CREATE).isValid()); - } - - @Test - public void testValidatePropertyATSCom() throws Exception { - PropertyValidator validator = new PropertyValidator(); - ViewInstanceDefinition definition = getViewInstanceDefinition(); - - definition.getPropertyMap().put(PropertyValidator.YARN_ATS_URL, "http://hostname.com:8088"); - - assertTrue(validator.validateProperty(PropertyValidator.YARN_ATS_URL, - definition, Validator.ValidationContext.PRE_CREATE).isValid()); - } - - @Test - public void testValidatePropertyATSInternal() throws Exception { - PropertyValidator validator = new PropertyValidator(); - ViewInstanceDefinition definition = getViewInstanceDefinition(); - - definition.getPropertyMap().put(PropertyValidator.YARN_ATS_URL, "http://hostname.internal:8088"); - - assertTrue(validator.validateProperty(PropertyValidator.YARN_ATS_URL, - definition, Validator.ValidationContext.PRE_CREATE).isValid()); - } - - private ViewInstanceDefinition getViewInstanceDefinition() { - ViewInstanceDefinition definition = EasyMock.createNiceMock(ViewInstanceDefinition.class); - expect(definition.getClusterHandle()).andReturn(null).anyTimes(); - Map<String, String> properties = new HashMap<String, String>(); - expect(definition.getPropertyMap()).andReturn(properties).anyTimes(); - replay(definition); - return definition; - } -} \ No newline at end of file http://git-wip-us.apache.org/repos/asf/ambari/blob/c0f9621f/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/ServiceTestUtils.java ---------------------------------------------------------------------- diff --git a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/ServiceTestUtils.java b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/ServiceTestUtils.java deleted file mode 100644 index ac913a9..0000000 --- a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/ServiceTestUtils.java +++ /dev/null @@ -1,63 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.ambari.view.hive; - -import org.junit.Assert; - -import javax.servlet.http.HttpServletResponse; -import javax.ws.rs.core.Response; -import javax.ws.rs.core.UriBuilder; -import javax.ws.rs.core.UriInfo; - -import java.net.URI; - -import static org.easymock.EasyMock.*; - -public class ServiceTestUtils { - public static void assertHTTPResponseOK(Response response) { - Assert.assertEquals(200, response.getStatus()); - } - - public static void assertHTTPResponseCreated(Response response) { - Assert.assertEquals(201, response.getStatus()); - } - - public static void assertHTTPResponseNoContent(Response response) { - Assert.assertEquals(204, response.getStatus()); - } - - public static void expectLocationHeaderInResponse(HttpServletResponse resp_obj) { - resp_obj.setHeader(eq("Location"), anyString()); - } - - public static UriInfo getDefaultUriInfo() { - UriInfo uriInfo = createNiceMock(UriInfo.class); - URI uri = UriBuilder.fromUri("http://host/a/b").build(); - expect(uriInfo.getAbsolutePath()).andReturn(uri); - replay(uriInfo); - return uriInfo; - } - - public static HttpServletResponse getResponseWithLocation() { - HttpServletResponse resp_obj = createNiceMock(HttpServletResponse.class); - expectLocationHeaderInResponse(resp_obj); - replay(resp_obj); - return resp_obj; - } -} http://git-wip-us.apache.org/repos/asf/ambari/blob/c0f9621f/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/backgroundjobs/BackgroundJobControllerTest.java ---------------------------------------------------------------------- diff --git a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/backgroundjobs/BackgroundJobControllerTest.java b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/backgroundjobs/BackgroundJobControllerTest.java deleted file mode 100644 index ceb3677..0000000 --- a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/backgroundjobs/BackgroundJobControllerTest.java +++ /dev/null @@ -1,77 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.ambari.view.hive.backgroundjobs; - -import org.apache.ambari.view.hive.BaseHiveTest; -import org.junit.Assert; -import org.junit.Test; - -public class BackgroundJobControllerTest extends BaseHiveTest { - - private static final long MAX_WAIT_TIME = 2000; - - @Test - public void testStartJob() throws Exception { - BackgroundJobController backgroundJobController = new BackgroundJobController(context); - - HangingRunnable runnable = new HangingRunnable(); - backgroundJobController.startJob("key", runnable); - - assertStateIs(backgroundJobController, "key", Thread.State.RUNNABLE); - - runnable.goOn(); - assertStateIs(backgroundJobController, "key", Thread.State.TERMINATED); - } - - @Test - public void testInterrupt() throws Exception { - BackgroundJobController backgroundJobController = new BackgroundJobController(context); - - HangingRunnable runnable = new HangingRunnable(); - backgroundJobController.startJob("key", runnable); - - assertStateIs(backgroundJobController, "key", Thread.State.RUNNABLE); - - backgroundJobController.interrupt("key"); - assertStateIs(backgroundJobController, "key", Thread.State.TERMINATED); - } - - private void assertStateIs(BackgroundJobController backgroundJobController, String key, Thread.State state) throws InterruptedException { - long start = System.currentTimeMillis(); - while (backgroundJobController.state(key) != state) { - Thread.sleep(100); - if (System.currentTimeMillis() - start > MAX_WAIT_TIME) - break; - } - Assert.assertEquals(state, backgroundJobController.state(key)); - } - - private static class HangingRunnable implements Runnable { - private boolean waitMe = true; - - @Override - public void run() { - while(waitMe && !Thread.interrupted()); - } - - public void goOn() { - this.waitMe = false; - } - } -} \ No newline at end of file http://git-wip-us.apache.org/repos/asf/ambari/blob/c0f9621f/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/client/ConnectionTest.java ---------------------------------------------------------------------- diff --git a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/client/ConnectionTest.java b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/client/ConnectionTest.java deleted file mode 100644 index 0b57b6a..0000000 --- a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/client/ConnectionTest.java +++ /dev/null @@ -1,73 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.ambari.view.hive.client; - -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; - -import java.util.HashMap; - -import static org.junit.Assert.*; - -public class ConnectionTest { - @Rule - public ExpectedException thrown = ExpectedException.none(); - - @Test - public void testOpenConnection() throws Exception { - HashMap<String, String> auth = new HashMap<String, String>(); - auth.put("auth", "NONE"); - - thrown.expect(HiveClientException.class); - thrown.expectMessage("Connection refused"); - new Connection("127.0.0.1", 42420, auth, "ambari-qa", null); - } - - @Test - public void testOpenConnectionMessage() throws Exception { - HashMap<String, String> auth = new HashMap<String, String>(); - auth.put("auth", "NONE"); - - thrown.expect(HiveClientException.class); - thrown.expectMessage("H020 Could not establish connection to"); - new Connection("127.0.0.1", 42420, auth, "ambari-qa", null); - } - - @Test - public void testAskPasswordWithoutPassword() throws Exception { - HashMap<String, String> auth = new HashMap<String, String>(); - auth.put("auth", "NONE"); - auth.put("password", "${ask_password}"); - - thrown.expect(HiveAuthRequiredException.class); - new Connection("127.0.0.1", 42420, auth, "ambari-qa", null); - } - - @Test - public void testAskPasswordWithPassword() throws Exception { - HashMap<String, String> auth = new HashMap<String, String>(); - auth.put("auth", "NONE"); - auth.put("password", "${ask_password}"); - - thrown.expect(HiveClientException.class); - thrown.expectMessage("Connection refused"); - new Connection("127.0.0.1", 42420, auth, "ambari-qa", "password"); - } -} http://git-wip-us.apache.org/repos/asf/ambari/blob/c0f9621f/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/client/UtilsTest.java ---------------------------------------------------------------------- diff --git a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/client/UtilsTest.java b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/client/UtilsTest.java deleted file mode 100644 index 0dafcb1..0000000 --- a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/client/UtilsTest.java +++ /dev/null @@ -1,78 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.ambari.view.hive.client; - -import org.apache.hive.service.cli.thrift.TStatus; -import org.apache.hive.service.cli.thrift.TStatusCode; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; - -import static org.easymock.EasyMock.createNiceMock; -import static org.easymock.EasyMock.expect; -import static org.easymock.EasyMock.replay; -import static org.junit.Assert.*; - -public class UtilsTest { - - @Rule - public ExpectedException thrown = ExpectedException.none(); - - @Test - public void testRemoveEmptyStrings() throws Exception { - String[] arrayWithSomeEmptyStrings = new String[] { "", null, "string1", null, "", "string2", "" }; - String[] expectedStrings = Utils.removeEmptyStrings(arrayWithSomeEmptyStrings); - - assertEquals(2, expectedStrings.length); - assertEquals("string1", expectedStrings[0]); - assertEquals("string2", expectedStrings[1]); - } - - @Test - public void testVerifySuccessWithHiveInvalidQueryException() throws Exception{ - String msg = "Error in compiling"; - String comment = "H110 Unable to submit statement"; - - TStatus status = createMockTStatus(10000,msg,TStatusCode.ERROR_STATUS); - thrown.expect(HiveInvalidQueryException.class); - thrown.expectMessage(msg); - - Utils.verifySuccess(status,comment); - } - - @Test - public void testVerifySuccessWithHiveErrorStatusException() throws Exception{ - String msg = "Error in compiling"; - String comment = "H110 Unable to submit statement"; - - TStatus status = createMockTStatus(40000,msg,TStatusCode.ERROR_STATUS); - thrown.expect(HiveErrorStatusException.class); - thrown.expectMessage(String.format("%s. %s",comment,msg)); - - Utils.verifySuccess(status,comment); - } - - private TStatus createMockTStatus(int errorCode,String msg,TStatusCode tStatusCode){ - TStatus status = createNiceMock(TStatus.class); - expect(status.getErrorCode()).andReturn(errorCode).anyTimes(); - expect(status.getStatusCode()).andReturn(tStatusCode).anyTimes(); - expect(status.getErrorMessage()).andReturn(msg).anyTimes(); - replay(status); - return status; - } -} \ No newline at end of file http://git-wip-us.apache.org/repos/asf/ambari/blob/c0f9621f/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/files/FileServiceTest.java ---------------------------------------------------------------------- diff --git a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/files/FileServiceTest.java b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/files/FileServiceTest.java deleted file mode 100644 index c57d2b4..0000000 --- a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/files/FileServiceTest.java +++ /dev/null @@ -1,273 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.ambari.view.hive.resources.files; - -import org.apache.ambari.view.URLStreamProvider; -import org.apache.ambari.view.hive.ServiceTestUtils; -import org.apache.ambari.view.hive.HDFSTest; -import org.apache.ambari.view.hive.utils.*; -import org.apache.ambari.view.utils.hdfs.HdfsApi; -import org.apache.commons.codec.binary.Base64; -import org.apache.commons.io.IOUtils; -import org.apache.hadoop.fs.FSDataInputStream; -import org.apache.hadoop.fs.Path; -import org.easymock.EasyMock; -import org.json.simple.JSONObject; -import org.junit.*; -import org.junit.rules.ExpectedException; - -import javax.ws.rs.core.Response; -import java.io.IOException; -import java.io.InputStream; -import java.nio.charset.Charset; -import java.util.Arrays; -import java.util.Map; - -import static org.easymock.EasyMock.*; -import static org.easymock.EasyMock.expect; - -public class FileServiceTest extends HDFSTest { - private final static int PAGINATOR_PAGE_SIZE = 4; //4 bytes - private FileService fileService; - - @Rule public ExpectedException thrown = ExpectedException.none(); - - @Override - @Before - public void setUp() throws Exception { - super.setUp(); - fileService = getService(FileService.class, handler, context); - FilePaginator.setPageSize(PAGINATOR_PAGE_SIZE); - } - - @BeforeClass - public static void startUp() throws Exception { - HDFSTest.startUp(); // super - } - - @AfterClass - public static void shutDown() throws Exception { - HDFSTest.shutDown(); // super - } - - @Override - @After - public void tearDown() throws Exception { - fileService.getSharedObjectsFactory().clear(HdfsApi.class); - } - - @Test - public void testCreateFile() throws IOException, InterruptedException { - Response response = createFile("/tmp/testCreateFile", "testCreateFile content"); - - ServiceTestUtils.assertHTTPResponseNoContent(response); - assertHDFSFileContains("/tmp/testCreateFile", "testCreateFile content"); - } - - @Test - public void testCreateExistingFileForbidden() throws IOException, InterruptedException { - createFile("/tmp/testOverwriteFile", "original content"); - thrown.expect(ServiceFormattedException.class); - createFile("/tmp/testOverwriteFile", "new content"); - } - - @Test - public void testCreateFilePathNotExists() throws IOException, InterruptedException { - Response response = createFile("/non/existent/path/Luke", null); - ServiceTestUtils.assertHTTPResponseNoContent(response); - - Response response2 = createFile("/tmp/Leia", null); - ServiceTestUtils.assertHTTPResponseNoContent(response2); - - thrown.expect(ServiceFormattedException.class); - Response response3 = createFile("/tmp/Leia", null); // file already exists - Assert.assertEquals(400, response3.getStatus()); - } - - @Test - public void testUpdateFileContent() throws Exception { - createFile("/tmp/testUpdateFileContent", "some content"); - - FileService.FileResourceRequest updateRequest = new FileService.FileResourceRequest(); - updateRequest.file = new FileResource(); - updateRequest.file.setFileContent("new content"); - - Response response = fileService.updateFile(updateRequest, "/tmp/testUpdateFileContent"); - - ServiceTestUtils.assertHTTPResponseNoContent(response); - assertHDFSFileContains("/tmp/testUpdateFileContent", "new content"); - } - - @Test - public void testPagination() throws Exception { - createFile("/tmp/testPagination", "1234567890"); // 10 bytes, 3 pages if 1 page is 4 bytes - - Response response = fileService.getFilePage("/tmp/testPagination", 0L); - ServiceTestUtils.assertHTTPResponseOK(response); - - JSONObject obj = ((JSONObject) response.getEntity()); - assertFileJsonResponseSanity(obj); - - FileResource firstPage = (FileResource) obj.get("file"); - Assert.assertEquals("1234", firstPage.getFileContent()); - Assert.assertEquals(3, firstPage.getPageCount()); - Assert.assertEquals(0, firstPage.getPage()); - Assert.assertTrue(firstPage.isHasNext()); - Assert.assertEquals("/tmp/testPagination", firstPage.getFilePath()); - - - response = fileService.getFilePage("/tmp/testPagination", 1L); - ServiceTestUtils.assertHTTPResponseOK(response); - - FileResource secondPage = (FileResource) ((JSONObject) response.getEntity()).get("file"); - Assert.assertEquals("5678", secondPage.getFileContent()); - Assert.assertEquals(1, secondPage.getPage()); - Assert.assertTrue(secondPage.isHasNext()); - - - response = fileService.getFilePage("/tmp/testPagination", 2L); - ServiceTestUtils.assertHTTPResponseOK(response); - - FileResource thirdPage = (FileResource) ((JSONObject) response.getEntity()).get("file"); - Assert.assertEquals("90", thirdPage.getFileContent()); - Assert.assertEquals(2, thirdPage.getPage()); - Assert.assertFalse(thirdPage.isHasNext()); - - - thrown.expect(BadRequestFormattedException.class); - fileService.getFilePage("/tmp/testPagination", 3L); - } - - @Test - public void testZeroLengthFile() throws Exception { - createFile("/tmp/testZeroLengthFile", ""); - - Response response = fileService.getFilePage("/tmp/testZeroLengthFile", 0L); - - ServiceTestUtils.assertHTTPResponseOK(response); - JSONObject obj = ((JSONObject) response.getEntity()); - assertFileJsonResponseSanity(obj); - - FileResource fileResource = (FileResource) obj.get("file"); - Assert.assertEquals("", fileResource.getFileContent()); - Assert.assertEquals(0, fileResource.getPage()); - Assert.assertFalse(fileResource.isHasNext()); - } - - @Test - public void testFileNotFound() throws IOException, InterruptedException { - assertHDFSFileNotExists("/tmp/notExistentFile"); - - thrown.expect(NotFoundFormattedException.class); - fileService.getFilePage("/tmp/notExistentFile", 2L); - } - - @Test - public void testDeleteFile() throws IOException, InterruptedException { - createFile("/tmp/testDeleteFile", "some content"); - - assertHDFSFileExists("/tmp/testDeleteFile"); - - Response response = fileService.deleteFile("/tmp/testDeleteFile"); - ServiceTestUtils.assertHTTPResponseNoContent(response); - - assertHDFSFileNotExists("/tmp/testDeleteFile"); - } - - @Test - public void testFakeFile() throws IOException, InterruptedException { - String content = "Fake file content"; - String encodedContent = Base64.encodeBase64String(content.getBytes()); - String filepath = "fakefile://"+encodedContent; - Response response = fileService.getFilePage(filepath,0l); - - ServiceTestUtils.assertHTTPResponseOK(response); - JSONObject obj = ((JSONObject) response.getEntity()); - assertFileJsonResponseSanity(obj); - - FileResource fileResource = (FileResource) obj.get("file"); - Assert.assertEquals(content, fileResource.getFileContent()); - Assert.assertEquals(0, fileResource.getPage()); - Assert.assertFalse(fileResource.isHasNext()); - } - - @Test - public void testJsonFakeFile() throws IOException, InterruptedException,Exception { - String content = "{\"queryText\":\"Query Content\"}"; - String url = "http://fileurl/content#queryText"; - String filepath = "jsonpath:"+url; - - URLStreamProvider urlStreamProvider = createNiceMock(URLStreamProvider.class); - InputStream inputStream = IOUtils.toInputStream(content); - reset(context); - expect(context.getProperties()).andReturn(properties).anyTimes(); - expect(context.getURLStreamProvider()).andReturn(urlStreamProvider); - expect(urlStreamProvider.readFrom(eq(url),eq("GET"),anyString(), EasyMock.<Map<String, String>>anyObject())).andReturn(inputStream); - - fileService = getService(FileService.class, handler, context); - replay(context,urlStreamProvider); - - Response response = fileService.getFilePage(filepath,0l); - - ServiceTestUtils.assertHTTPResponseOK(response); - JSONObject obj = ((JSONObject) response.getEntity()); - assertFileJsonResponseSanity(obj); - - FileResource fileResource = (FileResource) obj.get("file"); - Assert.assertEquals("Query Content", fileResource.getFileContent()); - Assert.assertEquals(0, fileResource.getPage()); - Assert.assertFalse(fileResource.isHasNext()); - } - - - private Response createFile(String filePath, String content) throws IOException, InterruptedException { - FileService.FileResourceRequest request = new FileService.FileResourceRequest(); - request.file = new FileResource(); - request.file.setFilePath(filePath); - request.file.setFileContent(content); - - return fileService.createFile(request, - ServiceTestUtils.getResponseWithLocation(), ServiceTestUtils.getDefaultUriInfo()); - } - - - private void assertFileJsonResponseSanity(JSONObject obj) { - Assert.assertTrue(obj.containsKey("file")); - } - - private void assertHDFSFileContains(String filePath, String expectedContent) throws IOException { - FSDataInputStream fileInputStream = hdfsCluster.getFileSystem().open(new Path(filePath)); - byte[] buffer = new byte[256]; - int read = fileInputStream.read(buffer); - - byte[] readData = Arrays.copyOfRange(buffer, 0, read); - String actualContent = new String(readData, Charset.forName("UTF-8")); - - Assert.assertEquals(expectedContent, actualContent); - } - - private void assertHDFSFileExists(String filePath) throws IOException { - Assert.assertTrue( hdfsCluster.getFileSystem().exists(new Path(filePath)) ); - } - - private void assertHDFSFileNotExists(String filePath) throws IOException { - Assert.assertFalse(hdfsCluster.getFileSystem().exists(new Path(filePath)) ); - } - -}