http://git-wip-us.apache.org/repos/asf/knox/blob/416ee7c1/gateway-test/src/test/java/org/apache/knox/gateway/GatewayPortMappingDisableFeatureTest.java ---------------------------------------------------------------------- diff --cc gateway-test/src/test/java/org/apache/knox/gateway/GatewayPortMappingDisableFeatureTest.java index db1dbe0,0000000..a4d8166 mode 100644,000000..100644 --- a/gateway-test/src/test/java/org/apache/knox/gateway/GatewayPortMappingDisableFeatureTest.java +++ b/gateway-test/src/test/java/org/apache/knox/gateway/GatewayPortMappingDisableFeatureTest.java @@@ -1,252 -1,0 +1,252 @@@ +package org.apache.knox.gateway; + +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * <p> + * http://www.apache.org/licenses/LICENSE-2.0 + * <p> + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import com.mycila.xmltool.XMLDoc; +import com.mycila.xmltool.XMLTag; +import org.apache.hadoop.test.TestUtils; +import org.apache.hadoop.test.category.ReleaseTest; +import org.apache.hadoop.test.mock.MockServer; +import org.apache.http.HttpStatus; +import org.junit.After; +import org.junit.Before; +import org.junit.Rule; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import org.junit.rules.ExpectedException; + +import java.io.IOException; +import java.net.ConnectException; +import java.util.concurrent.ConcurrentHashMap; + +import static io.restassured.RestAssured.given; +import static org.apache.hadoop.test.TestUtils.LOG_ENTER; +import static org.apache.hadoop.test.TestUtils.LOG_EXIT; +import static org.hamcrest.CoreMatchers.is; + +/** + * Test that the Gateway Topology Port Mapping feature is disabled properly. + * + */ +@Category(ReleaseTest.class) +public class GatewayPortMappingDisableFeatureTest { + + // Specifies if the test requests should go through the gateway or directly to the services. + // This is frequently used to verify the behavior of the test both with and without the gateway. + private static final boolean USE_GATEWAY = true; + + // Specifies if the test requests should be sent to mock services or the real services. + // This is frequently used to verify the behavior of the test both with and without mock services. + private static final boolean USE_MOCK_SERVICES = true; + + private static GatewayTestDriver driver = new GatewayTestDriver(); + + private static MockServer masterServer; + + private int eeriePort; + + @Rule + public ExpectedException exception = ExpectedException.none(); + + + public GatewayPortMappingDisableFeatureTest() { + super(); + } + + /** + * Creates a deployment of a gateway instance that all test methods will share. This method also creates a + * registry of sorts for all of the services that will be used by the test methods. + * The createTopology method is used to create the topology file that would normally be read from disk. + * The driver.setupGateway invocation is where the creation of GATEWAY_HOME occurs. + * <p/> + * This would normally be done once for this suite but the failure tests start affecting each other depending + * on the state the last 'active' url + * + * @throws Exception Thrown if any failure occurs. + */ + @Before + public void setup() throws Exception { + LOG_ENTER(); + + eeriePort = getAvailablePort(1240, 49151); + + ConcurrentHashMap<String, Integer> topologyPortMapping = new ConcurrentHashMap<String, Integer>(); + topologyPortMapping.put("eerie", eeriePort); + + masterServer = new MockServer("master", true); + GatewayTestConfig config = new GatewayTestConfig(); + config.setGatewayPath("gateway"); + config.setTopologyPortMapping(topologyPortMapping); + // disable the feature + config.setGatewayPortMappingEnabled(false); + + driver.setResourceBase(WebHdfsHaFuncTest.class); + driver.setupLdap(0); + + driver.setupService("WEBHDFS", "http://vm.local:50070/webhdfs", "/eerie/webhdfs", USE_MOCK_SERVICES); + + driver.setupGateway(config, "eerie", createTopology("WEBHDFS"), USE_GATEWAY); + + LOG_EXIT(); + } + + @After + public void cleanup() throws Exception { + LOG_ENTER(); + driver.cleanup(); + driver.reset(); + masterServer.reset(); + LOG_EXIT(); + } + + /** + * Test the standard case + * + * @throws IOException + */ + @Test(timeout = TestUtils.MEDIUM_TIMEOUT ) + public void testBasicListOperation() throws IOException { + LOG_ENTER(); + test(driver.getUrl("WEBHDFS") ); + LOG_EXIT(); + } + + /** + * Test the multi port fail scenario when the feature is disabled. + * @throws IOException + */ + @Test(timeout = TestUtils.MEDIUM_TIMEOUT ) + public void testMultiPortFailOperation() throws IOException { + LOG_ENTER(); + exception.expect(ConnectException.class); + exception.expectMessage("Connection refused"); + + test("http://localhost:" + eeriePort + "/webhdfs" ); + LOG_EXIT(); + } + + + private void test (final String url) throws IOException { + String password = "hdfs-password"; + String username = "hdfs"; + + masterServer.expect() + .method("GET") + .pathInfo("/webhdfs/v1/") + .queryParam("op", "LISTSTATUS") + .queryParam("user.name", username) + .respond() + .status(HttpStatus.SC_OK) + .content(driver.getResourceBytes("webhdfs-liststatus-success.json")) + .contentType("application/json"); + + given() + .auth().preemptive().basic(username, password) + .header("X-XSRF-Header", "jksdhfkhdsf") + .queryParam("op", "LISTSTATUS") - .expect() ++ .then() + .log().ifError() + .statusCode(HttpStatus.SC_OK) - .content("FileStatuses.FileStatus[0].pathSuffix", is("app-logs")) ++ .body("FileStatuses.FileStatus[0].pathSuffix", is("app-logs")) + .when().get(url + "/v1/"); + masterServer.isEmpty(); + } + + + /** + * Creates a topology that is deployed to the gateway instance for the test suite. + * Note that this topology is shared by all of the test methods in this suite. + * + * @return A populated XML structure for a topology file. + */ + private static XMLTag createTopology(final String role) { + XMLTag xml = XMLDoc.newDocument(true) + .addRoot("topology") + .addTag("gateway") + .addTag("provider") + .addTag("role").addText("webappsec") + .addTag("name").addText("WebAppSec") + .addTag("enabled").addText("true") + .addTag("param") + .addTag("name").addText("csrf.enabled") + .addTag("value").addText("true").gotoParent().gotoParent() + .addTag("provider") + .addTag("role").addText("authentication") + .addTag("name").addText("ShiroProvider") + .addTag("enabled").addText("true") + .addTag("param") + .addTag("name").addText("main.ldapRealm") + .addTag("value").addText("org.apache.knox.gateway.shirorealm.KnoxLdapRealm").gotoParent() + .addTag("param") + .addTag("name").addText("main.ldapRealm.userDnTemplate") + .addTag("value").addText("uid={0},ou=people,dc=hadoop,dc=apache,dc=org").gotoParent() + .addTag("param") + .addTag("name").addText("main.ldapRealm.contextFactory.url") + .addTag("value").addText(driver.getLdapUrl()).gotoParent() + .addTag("param") + .addTag("name").addText("main.ldapRealm.contextFactory.authenticationMechanism") + .addTag("value").addText("simple").gotoParent() + .addTag("param") + .addTag("name").addText("urls./**") + .addTag("value").addText("authcBasic").gotoParent().gotoParent() + .addTag("provider") + .addTag("role").addText("identity-assertion") + .addTag("enabled").addText("true") + .addTag("name").addText("Default").gotoParent() + .addTag("provider") + .addTag("role").addText("authorization") + .addTag("enabled").addText("true") + .addTag("name").addText("AclsAuthz").gotoParent() + .addTag("param") + .addTag("name").addText("webhdfs-acl") + .addTag("value").addText("hdfs;*;*").gotoParent() + .addTag("provider") + .addTag("role").addText("ha") + .addTag("enabled").addText("true") + .addTag("name").addText("HaProvider") + .addTag("param") + .addTag("name").addText("WEBHDFS") + .addTag("value").addText("maxFailoverAttempts=3;failoverSleep=15;maxRetryAttempts=3;retrySleep=10;enabled=true").gotoParent() + .gotoRoot() + .addTag("service") + .addTag("role").addText(role) + .addTag("url").addText("http://localhost:" + masterServer.getPort() + "/webhdfs") + .gotoRoot(); + return xml; + } + + /** + * This utility method will return the next available port + * that can be used. + * + * @return Port that is available. + */ + public static int getAvailablePort(final int min, final int max) { + + for (int i = min; i <= max; i++) { + + if (!GatewayServer.isPortInUse(i)) { + return i; + } + } + // too bad + return -1; + } + +}
http://git-wip-us.apache.org/repos/asf/knox/blob/416ee7c1/gateway-test/src/test/java/org/apache/knox/gateway/GatewayPortMappingFailTest.java ---------------------------------------------------------------------- diff --cc gateway-test/src/test/java/org/apache/knox/gateway/GatewayPortMappingFailTest.java index ee9e802,0000000..bc01c86 mode 100644,000000..100644 --- a/gateway-test/src/test/java/org/apache/knox/gateway/GatewayPortMappingFailTest.java +++ b/gateway-test/src/test/java/org/apache/knox/gateway/GatewayPortMappingFailTest.java @@@ -1,150 -1,0 +1,150 @@@ +package org.apache.knox.gateway; + +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * <p> + * http://www.apache.org/licenses/LICENSE-2.0 + * <p> + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import org.apache.hadoop.test.TestUtils; +import org.apache.hadoop.test.category.ReleaseTest; +import org.apache.hadoop.test.mock.MockServer; +import org.apache.http.HttpStatus; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; +import org.junit.experimental.categories.Category; + +import java.io.IOException; +import java.util.concurrent.ConcurrentHashMap; + +import static io.restassured.RestAssured.given; +import static org.apache.hadoop.test.TestUtils.LOG_ENTER; +import static org.apache.hadoop.test.TestUtils.LOG_EXIT; + +/** + * Test the fail cases for the Port Mapping Feature + */ +@Category(ReleaseTest.class) +public class GatewayPortMappingFailTest { + + // Specifies if the test requests should go through the gateway or directly to the services. + // This is frequently used to verify the behavior of the test both with and without the gateway. + private static final boolean USE_GATEWAY = true; + + // Specifies if the test requests should be sent to mock services or the real services. + // This is frequently used to verify the behavior of the test both with and without mock services. + private static final boolean USE_MOCK_SERVICES = true; + + private static GatewayTestDriver driver = new GatewayTestDriver(); + + private static MockServer masterServer; + + private static int eeriePort; + + /** + * Create an instance + */ + public GatewayPortMappingFailTest() { + super(); + } + + /** + * Creates a deployment of a gateway instance that all test methods will share. This method also creates a + * registry of sorts for all of the services that will be used by the test methods. + * The createTopology method is used to create the topology file that would normally be read from disk. + * The driver.setupGateway invocation is where the creation of GATEWAY_HOME occurs. + * <p/> + * This would normally be done once for this suite but the failure tests start affecting each other depending + * on the state the last 'active' url + * + * @throws Exception Thrown if any failure occurs. + */ + @BeforeClass + public static void setup() throws Exception { + LOG_ENTER(); + + eeriePort = GatewayPortMappingFuncTest.getAvailablePort(1240, 49151); + + ConcurrentHashMap<String, Integer> topologyPortMapping = new ConcurrentHashMap<String, Integer>(); + topologyPortMapping.put("eerie", eeriePort); + + masterServer = new MockServer("master", true); + GatewayTestConfig config = new GatewayTestConfig(); + config.setGatewayPath("gateway"); + config.setTopologyPortMapping(topologyPortMapping); + + driver.setResourceBase(WebHdfsHaFuncTest.class); + driver.setupLdap(0); + + driver.setupService("WEBHDFS", "http://vm.local:50070/webhdfs", "/eerie/webhdfs", USE_MOCK_SERVICES); + + driver.setupGateway(config, "eerie", GatewayPortMappingFuncTest.createTopology("WEBHDFS", driver.getLdapUrl(), masterServer.getPort()), USE_GATEWAY); + + LOG_EXIT(); + } + + @AfterClass + public static void cleanup() throws Exception { + LOG_ENTER(); + driver.cleanup(); + driver.reset(); + masterServer.reset(); + LOG_EXIT(); + } + + + /** + * Fail when trying to use this feature on the standard port. + * Here we do not have Default Topology Feature not enabled. + * + * http://localhost:{gatewayPort}/webhdfs/v1 + * + * @throws IOException + */ + @Test(timeout = TestUtils.MEDIUM_TIMEOUT ) + public void testMultiPortOperationFail() throws IOException { + LOG_ENTER(); + final String url = "http://localhost:" + driver.getGatewayPort() + "/webhdfs" ; + + String password = "hdfs-password"; + String username = "hdfs"; + + masterServer.expect() + .method("GET") + .pathInfo("/webhdfs/v1/") + .queryParam("op", "LISTSTATUS") + .queryParam("user.name", username) + .respond() + .status(HttpStatus.SC_OK) + .content(driver.getResourceBytes("webhdfs-liststatus-success.json")) + .contentType("application/json"); + + given() + .auth().preemptive().basic(username, password) + .header("X-XSRF-Header", "jksdhfkhdsf") + .queryParam("op", "LISTSTATUS") - .expect() ++ .then() + //.log().ifError() + .statusCode(HttpStatus.SC_NOT_FOUND) + //.content("FileStatuses.FileStatus[0].pathSuffix", is("app-logs")) + .when().get(url + "/v1/"); + masterServer.isEmpty(); + + LOG_EXIT(); + } + + +} http://git-wip-us.apache.org/repos/asf/knox/blob/416ee7c1/gateway-test/src/test/java/org/apache/knox/gateway/GatewayPortMappingFuncTest.java ---------------------------------------------------------------------- diff --cc gateway-test/src/test/java/org/apache/knox/gateway/GatewayPortMappingFuncTest.java index b597f41,0000000..cbf138b mode 100644,000000..100644 --- a/gateway-test/src/test/java/org/apache/knox/gateway/GatewayPortMappingFuncTest.java +++ b/gateway-test/src/test/java/org/apache/knox/gateway/GatewayPortMappingFuncTest.java @@@ -1,276 -1,0 +1,276 @@@ +package org.apache.knox.gateway; + +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * <p> + * http://www.apache.org/licenses/LICENSE-2.0 + * <p> + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import com.mycila.xmltool.XMLDoc; +import com.mycila.xmltool.XMLTag; +import org.apache.hadoop.test.TestUtils; +import org.apache.hadoop.test.category.ReleaseTest; +import org.apache.hadoop.test.mock.MockServer; +import org.apache.http.HttpStatus; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; +import org.junit.experimental.categories.Category; + +import java.io.IOException; +import java.util.concurrent.ConcurrentHashMap; + +import static io.restassured.RestAssured.given; +import static org.apache.hadoop.test.TestUtils.LOG_ENTER; +import static org.apache.hadoop.test.TestUtils.LOG_EXIT; +import static org.hamcrest.CoreMatchers.is; + +/** + * Test the Gateway Topology Port Mapping functionality + * + */ +@Category(ReleaseTest.class) +public class GatewayPortMappingFuncTest { + + // Specifies if the test requests should go through the gateway or directly to the services. + // This is frequently used to verify the behavior of the test both with and without the gateway. + private static final boolean USE_GATEWAY = true; + + // Specifies if the test requests should be sent to mock services or the real services. + // This is frequently used to verify the behavior of the test both with and without mock services. + private static final boolean USE_MOCK_SERVICES = true; + + private static GatewayTestDriver driver = new GatewayTestDriver(); + + private static MockServer masterServer; + + private static int eeriePort; + + public GatewayPortMappingFuncTest() { + super(); + } + + /** + * Creates a deployment of a gateway instance that all test methods will share. This method also creates a + * registry of sorts for all of the services that will be used by the test methods. + * The createTopology method is used to create the topology file that would normally be read from disk. + * The driver.setupGateway invocation is where the creation of GATEWAY_HOME occurs. + * <p/> + * This would normally be done once for this suite but the failure tests start affecting each other depending + * on the state the last 'active' url + * + * @throws Exception Thrown if any failure occurs. + */ + @BeforeClass + public static void setup() throws Exception { + LOG_ENTER(); + + eeriePort = getAvailablePort(1240, 49151); + + ConcurrentHashMap<String, Integer> topologyPortMapping = new ConcurrentHashMap<String, Integer>(); + topologyPortMapping.put("eerie", eeriePort); + + masterServer = new MockServer("master", true); + GatewayTestConfig config = new GatewayTestConfig(); + config.setGatewayPath("gateway"); + config.setTopologyPortMapping(topologyPortMapping); + + // Enable default topology + config.setDefaultTopologyName("eerie"); + + driver.setResourceBase(WebHdfsHaFuncTest.class); + driver.setupLdap(0); + + driver.setupService("WEBHDFS", "http://vm.local:50070/webhdfs", "/eerie/webhdfs", USE_MOCK_SERVICES); + + driver.setupGateway(config, "eerie", createTopology("WEBHDFS", driver.getLdapUrl(), masterServer.getPort()), USE_GATEWAY); + + LOG_EXIT(); + } + + @AfterClass + public static void cleanup() throws Exception { + LOG_ENTER(); + driver.cleanup(); + driver.reset(); + masterServer.reset(); + LOG_EXIT(); + } + + /** + * Test the standard case: + * http://localhost:{gatewayPort}/gateway/eerie/webhdfs/v1 + * + * @throws IOException + */ + @Test(timeout = TestUtils.MEDIUM_TIMEOUT ) + public void testBasicListOperation() throws IOException { + LOG_ENTER(); + test("http://localhost:" + driver.getGatewayPort() + "/gateway/eerie" + "/webhdfs" ); + LOG_EXIT(); + } + + /** + * Test the Default Topology Feature, activated by property + * "default.app.topology.name" + * + * http://localhost:{eeriePort}/gateway/eerie/webhdfs/v1 + * + * @throws IOException + */ + @Test(timeout = TestUtils.MEDIUM_TIMEOUT ) + public void testDefaultTopologyFeature() throws IOException { + LOG_ENTER(); + test("http://localhost:" + driver.getGatewayPort() + "/webhdfs" ); + LOG_EXIT(); + } + + /** + * Test the multi port scenario. + * + * http://localhost:{eeriePort}/webhdfs/v1 + * + * @throws IOException + */ + @Test(timeout = TestUtils.MEDIUM_TIMEOUT ) + public void testMultiPortOperation() throws IOException { + LOG_ENTER(); + test("http://localhost:" + eeriePort + "/webhdfs" ); + LOG_EXIT(); + } + + /** + * Test the multi port scenario when gateway path is included. + * + * http://localhost:{eeriePort}/gateway/eerie/webhdfs/v1 + * + * @throws IOException + */ + @Test(timeout = TestUtils.MEDIUM_TIMEOUT ) + public void testMultiPortWithGatewayPath() throws IOException { + LOG_ENTER(); + test("http://localhost:" + eeriePort + "/gateway/eerie" + "/webhdfs" ); + LOG_EXIT(); + } + + + private void test (final String url) throws IOException { + String password = "hdfs-password"; + String username = "hdfs"; + + masterServer.expect() + .method("GET") + .pathInfo("/webhdfs/v1/") + .queryParam("op", "LISTSTATUS") + .queryParam("user.name", username) + .respond() + .status(HttpStatus.SC_OK) + .content(driver.getResourceBytes("webhdfs-liststatus-success.json")) + .contentType("application/json"); + + given() + .auth().preemptive().basic(username, password) + .header("X-XSRF-Header", "jksdhfkhdsf") + .queryParam("op", "LISTSTATUS") - .expect() ++ .then() + .log().ifError() + .statusCode(HttpStatus.SC_OK) - .content("FileStatuses.FileStatus[0].pathSuffix", is("app-logs")) ++ .body("FileStatuses.FileStatus[0].pathSuffix", is("app-logs")) + .when().get(url + "/v1/"); + masterServer.isEmpty(); + } + + + /** + * Creates a topology that is deployed to the gateway instance for the test suite. + * Note that this topology is shared by all of the test methods in this suite. + * + * @return A populated XML structure for a topology file. + */ + public static XMLTag createTopology(final String role, final String ldapURL, final int gatewayPort ) { + XMLTag xml = XMLDoc.newDocument(true) + .addRoot("topology") + .addTag("gateway") + .addTag("provider") + .addTag("role").addText("webappsec") + .addTag("name").addText("WebAppSec") + .addTag("enabled").addText("true") + .addTag("param") + .addTag("name").addText("csrf.enabled") + .addTag("value").addText("true").gotoParent().gotoParent() + .addTag("provider") + .addTag("role").addText("authentication") + .addTag("name").addText("ShiroProvider") + .addTag("enabled").addText("true") + .addTag("param") + .addTag("name").addText("main.ldapRealm") + .addTag("value").addText("org.apache.knox.gateway.shirorealm.KnoxLdapRealm").gotoParent() + .addTag("param") + .addTag("name").addText("main.ldapRealm.userDnTemplate") + .addTag("value").addText("uid={0},ou=people,dc=hadoop,dc=apache,dc=org").gotoParent() + .addTag("param") + .addTag("name").addText("main.ldapRealm.contextFactory.url") + .addTag("value").addText(ldapURL).gotoParent() + .addTag("param") + .addTag("name").addText("main.ldapRealm.contextFactory.authenticationMechanism") + .addTag("value").addText("simple").gotoParent() + .addTag("param") + .addTag("name").addText("urls./**") + .addTag("value").addText("authcBasic").gotoParent().gotoParent() + .addTag("provider") + .addTag("role").addText("identity-assertion") + .addTag("enabled").addText("true") + .addTag("name").addText("Default").gotoParent() + .addTag("provider") + .addTag("role").addText("authorization") + .addTag("enabled").addText("true") + .addTag("name").addText("AclsAuthz").gotoParent() + .addTag("param") + .addTag("name").addText("webhdfs-acl") + .addTag("value").addText("hdfs;*;*").gotoParent() + .addTag("provider") + .addTag("role").addText("ha") + .addTag("enabled").addText("true") + .addTag("name").addText("HaProvider") + .addTag("param") + .addTag("name").addText("WEBHDFS") + .addTag("value").addText("maxFailoverAttempts=3;failoverSleep=15;maxRetryAttempts=3;retrySleep=10;enabled=true").gotoParent() + .gotoRoot() + .addTag("service") + .addTag("role").addText(role) + .addTag("url").addText("http://localhost:" + gatewayPort + "/webhdfs") + .gotoRoot(); + return xml; + } + + /** + * This utility method will return the next available port + * that can be used. + * + * @return Port that is available. + */ + public static int getAvailablePort(final int min, final int max) { + + for (int i = min; i <= max; i++) { + + if (!GatewayServer.isPortInUse(i)) { + return i; + } + } + // too bad + return -1; + } + +} http://git-wip-us.apache.org/repos/asf/knox/blob/416ee7c1/gateway-test/src/test/java/org/apache/knox/gateway/GatewaySampleFuncTest.java ---------------------------------------------------------------------- diff --cc gateway-test/src/test/java/org/apache/knox/gateway/GatewaySampleFuncTest.java index 4c87a90,0000000..b146972 mode 100644,000000..100644 --- a/gateway-test/src/test/java/org/apache/knox/gateway/GatewaySampleFuncTest.java +++ b/gateway-test/src/test/java/org/apache/knox/gateway/GatewaySampleFuncTest.java @@@ -1,180 -1,0 +1,180 @@@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.knox.gateway; + +import com.mycila.xmltool.XMLDoc; +import com.mycila.xmltool.XMLTag; +import org.apache.directory.server.protocol.shared.transport.TcpTransport; +import org.apache.knox.gateway.config.GatewayConfig; +import org.apache.knox.gateway.security.ldap.SimpleLdapDirectoryServer; +import org.apache.knox.gateway.services.DefaultGatewayServices; +import org.apache.knox.gateway.services.ServiceLifecycleException; +import org.apache.hadoop.test.TestUtils; +import org.apache.http.HttpStatus; +import org.apache.log4j.Appender; +import org.hamcrest.MatcherAssert; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.FileOutputStream; +import java.io.IOException; +import java.util.Enumeration; +import java.util.HashMap; +import java.util.Map; +import java.util.UUID; + +import static io.restassured.RestAssured.given; +import static org.apache.hadoop.test.TestUtils.LOG_ENTER; +import static org.apache.hadoop.test.TestUtils.LOG_EXIT; +import static org.hamcrest.CoreMatchers.is; +import static org.hamcrest.CoreMatchers.notNullValue; + +public class GatewaySampleFuncTest { + + private static Logger LOG = LoggerFactory.getLogger( GatewaySampleFuncTest.class ); + + public static Enumeration<Appender> appenders; + public static GatewayConfig config; + public static GatewayServer gateway; + public static String gatewayUrl; + public static String clusterUrl; + private static GatewayTestDriver driver = new GatewayTestDriver(); + + @BeforeClass + public static void setupSuite() throws Exception { + LOG_ENTER(); + //appenders = NoOpAppender.setUp(); + driver.setupLdap(0); + setupGateway(); + LOG_EXIT(); + } + + @AfterClass + public static void cleanupSuite() throws Exception { + LOG_ENTER(); + gateway.stop(); + driver.cleanup(); + //FileUtils.deleteQuietly( new File( config.getGatewayHomeDir() ) ); + //NoOpAppender.tearDown( appenders ); + LOG_EXIT(); + } + + public static void setupGateway() throws Exception { + + File targetDir = new File( System.getProperty( "user.dir" ), "target" ); + File gatewayDir = new File( targetDir, "gateway-home-" + UUID.randomUUID() ); + gatewayDir.mkdirs(); + + GatewayTestConfig testConfig = new GatewayTestConfig(); + config = testConfig; + testConfig.setGatewayHomeDir( gatewayDir.getAbsolutePath() ); + + File topoDir = new File( testConfig.getGatewayTopologyDir() ); + topoDir.mkdirs(); + + File deployDir = new File( testConfig.getGatewayDeploymentDir() ); + deployDir.mkdirs(); + + File descriptor = new File( topoDir, "test-cluster.xml" ); + FileOutputStream stream = new FileOutputStream( descriptor ); + createTopology().toStream( stream ); + stream.close(); + + DefaultGatewayServices srvcs = new DefaultGatewayServices(); + Map<String,String> options = new HashMap<>(); + options.put( "persist-master", "false" ); + options.put( "master", "password" ); + try { + srvcs.init( testConfig, options ); + } catch ( ServiceLifecycleException e ) { + e.printStackTrace(); // I18N not required. + } + + gateway = GatewayServer.startGateway( testConfig, srvcs ); + MatcherAssert.assertThat( "Failed to start gateway.", gateway, notNullValue() ); + + LOG.info( "Gateway port = " + gateway.getAddresses()[ 0 ].getPort() ); + + gatewayUrl = "http://localhost:" + gateway.getAddresses()[0].getPort() + "/" + config.getGatewayPath(); + clusterUrl = gatewayUrl + "/test-cluster"; + } + + private static XMLTag createTopology() { + XMLTag xml = XMLDoc.newDocument( true ) + .addRoot( "topology" ) + .addTag( "gateway" ) + .addTag( "provider" ) + .addTag( "role" ).addText( "authentication" ) + .addTag( "name" ).addText( "ShiroProvider" ) + .addTag( "enabled" ).addText( "true" ) + .addTag( "param" ) + .addTag( "name" ).addText( "main.ldapRealm" ) + .addTag( "value" ).addText( "org.apache.knox.gateway.shirorealm.KnoxLdapRealm" ).gotoParent() + .addTag( "param" ) + .addTag( "name" ).addText( "main.ldapRealm.userDnTemplate" ) + .addTag( "value" ).addText( "uid={0},ou=people,dc=hadoop,dc=apache,dc=org" ).gotoParent() + .addTag( "param" ) + .addTag( "name" ).addText( "main.ldapRealm.contextFactory.url" ) + .addTag( "value" ).addText( driver.getLdapUrl() ).gotoParent() + .addTag( "param" ) + .addTag( "name" ).addText( "main.ldapRealm.contextFactory.authenticationMechanism" ) + .addTag( "value" ).addText( "simple" ).gotoParent() + .addTag( "param" ) + .addTag( "name" ).addText( "urls./**" ) + .addTag( "value" ).addText( "authcBasic" ).gotoParent().gotoParent() + .addTag( "provider" ) + .addTag( "role" ).addText( "identity-assertion" ) + .addTag( "enabled" ).addText( "true" ) + .addTag( "name" ).addText( "Default" ).gotoParent() + .addTag( "provider" ) + .gotoRoot() + .addTag( "service" ) + .addTag( "role" ).addText( "test-service-role" ) + .gotoRoot(); + // System.out.println( "GATEWAY=" + xml.toString() ); + return xml; + } + + //@Test + public void waitForManualTesting() throws IOException { + System.in.read(); + } + + @Test( timeout = TestUtils.MEDIUM_TIMEOUT ) + public void testTestService() throws ClassNotFoundException { + LOG_ENTER(); + String username = "guest"; + String password = "guest-password"; + String serviceUrl = clusterUrl + "/test-service-path/test-service-resource"; + given() + //.log().all() + .auth().preemptive().basic( username, password ) - .expect() ++ .then() + //.log().all() + .statusCode( HttpStatus.SC_OK ) + .contentType( "text/plain" ) + .body( is( "test-service-response" ) ) + .when().get( serviceUrl ); + LOG_EXIT(); + } + +} http://git-wip-us.apache.org/repos/asf/knox/blob/416ee7c1/gateway-test/src/test/java/org/apache/knox/gateway/Knox242FuncTest.java ---------------------------------------------------------------------- diff --cc gateway-test/src/test/java/org/apache/knox/gateway/Knox242FuncTest.java index 7aaf056,0000000..cd30311 mode 100755,000000..100755 --- a/gateway-test/src/test/java/org/apache/knox/gateway/Knox242FuncTest.java +++ b/gateway-test/src/test/java/org/apache/knox/gateway/Knox242FuncTest.java @@@ -1,284 -1,0 +1,284 @@@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.knox.gateway; + +import static io.restassured.RestAssured.given; +import static org.apache.hadoop.test.TestUtils.LOG_ENTER; +import static org.apache.hadoop.test.TestUtils.LOG_EXIT; +import static org.hamcrest.CoreMatchers.is; +import static org.hamcrest.CoreMatchers.notNullValue; + +import java.io.File; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.net.InetSocketAddress; +import java.net.URL; +import java.nio.file.FileSystems; +import java.nio.file.Path; +import java.util.Enumeration; +import java.util.HashMap; +import java.util.Map; +import java.util.UUID; + +import org.apache.knox.gateway.config.GatewayConfig; +import org.apache.knox.gateway.services.DefaultGatewayServices; +import org.apache.knox.gateway.services.GatewayServices; +import org.apache.knox.gateway.services.ServiceLifecycleException; +import org.apache.knox.gateway.services.security.AliasService; +import org.apache.hadoop.test.TestUtils; +import org.apache.http.HttpStatus; +import org.apache.log4j.Appender; +import org.hamcrest.MatcherAssert; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Ignore; +import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.mycila.xmltool.XMLDoc; +import com.mycila.xmltool.XMLTag; + +/** + * Functional test to verify : KNOX-242 LDAP Enhancements + * Please see + * https://issues.apache.org/jira/browse/KNOX-242 + * + */ +public class Knox242FuncTest { + + private static Logger LOG = LoggerFactory.getLogger( Knox242FuncTest.class ); + + public static Enumeration<Appender> appenders; + public static GatewayConfig config; + public static GatewayServer gateway; + public static String gatewayUrl; + public static String clusterUrl; + public static String serviceUrl; + private static GatewayTestDriver driver = new GatewayTestDriver(); + + @BeforeClass + public static void setupSuite() throws Exception { + LOG_ENTER(); + //appenders = NoOpAppender.setUp(); + String basedir = System.getProperty("basedir"); + if (basedir == null) { + basedir = new File(".").getCanonicalPath(); + } + Path path = FileSystems.getDefault().getPath(basedir, "/src/test/resources/users-dynamic.ldif"); + driver.setupLdap( 0 , path.toFile() ); + setupGateway(); + TestUtils.awaitNon404HttpStatus( new URL( serviceUrl ), 10000, 100 ); + LOG_EXIT(); + } + + @AfterClass + public static void cleanupSuite() throws Exception { + LOG_ENTER(); + gateway.stop(); + driver.cleanup(); + //FileUtils.deleteQuietly( new File( config.getGatewayHomeDir() ) ); + //NoOpAppender.tearDown( appenders ); + LOG_EXIT(); + } + + public static void setupGateway() throws IOException, Exception { + + File targetDir = new File( System.getProperty( "user.dir" ), "target" ); + File gatewayDir = new File( targetDir, "gateway-home-" + UUID.randomUUID() ); + gatewayDir.mkdirs(); + + GatewayTestConfig testConfig = new GatewayTestConfig(); + config = testConfig; + testConfig.setGatewayHomeDir( gatewayDir.getAbsolutePath() ); + + File topoDir = new File( testConfig.getGatewayTopologyDir() ); + topoDir.mkdirs(); + + File deployDir = new File( testConfig.getGatewayDeploymentDir() ); + deployDir.mkdirs(); + + DefaultGatewayServices srvcs = new DefaultGatewayServices(); + Map<String,String> options = new HashMap<>(); + options.put( "persist-master", "false" ); + options.put( "master", "password" ); + try { + srvcs.init( testConfig, options ); + } catch ( ServiceLifecycleException e ) { + e.printStackTrace(); // I18N not required. + } + + gateway = GatewayServer.startGateway( testConfig, srvcs ); + MatcherAssert.assertThat( "Failed to start gateway.", gateway, notNullValue() ); + + LOG.info( "Gateway port = " + gateway.getAddresses()[ 0 ].getPort() ); + + gatewayUrl = "http://localhost:" + gateway.getAddresses()[0].getPort() + "/" + config.getGatewayPath(); + clusterUrl = gatewayUrl + "/testdg-cluster"; + serviceUrl = clusterUrl + "/test-service-path/test-service-resource"; + + GatewayServices services = GatewayServer.getGatewayServices(); + AliasService aliasService = (AliasService)services.getService(GatewayServices.ALIAS_SERVICE); + aliasService.addAliasForCluster("testdg-cluster", "ldcSystemPassword", "guest-password"); + - char[] password1 = aliasService.getPasswordFromAliasForCluster( "testdg-cluster", "ldcSystemPassword"); ++ // char[] password1 = aliasService.getPasswordFromAliasForCluster( "testdg-cluster", "ldcSystemPassword"); + //System.err.println("SETUP password 10: " + ((password1 == null) ? "NULL" : new String(password1))); + + File descriptor = new File( topoDir, "testdg-cluster.xml" ); + FileOutputStream stream = new FileOutputStream( descriptor ); + createTopology().toStream( stream ); + stream.close(); + } + + private static XMLTag createTopology() { + XMLTag xml = XMLDoc.newDocument( true ) + .addRoot( "topology" ) + .addTag( "gateway" ) + + .addTag( "provider" ) + .addTag( "role" ).addText( "authentication" ) + .addTag( "name" ).addText( "ShiroProvider" ) + .addTag( "enabled" ).addText( "true" ) + .addTag( "param" ) + .addTag( "name" ).addText( "main.ldapRealm" ) + .addTag( "value" ).addText( "org.apache.knox.gateway.shirorealm.KnoxLdapRealm" ) + .gotoParent().addTag( "param" ) + .addTag( "name" ).addText( "main.ldapGroupContextFactory" ) + .addTag( "value" ).addText( "org.apache.knox.gateway.shirorealm.KnoxLdapContextFactory" ) + .gotoParent().addTag( "param" ) + .addTag( "name" ).addText( "main.ldapRealm.contextFactory" ) + .addTag( "value" ).addText( "$ldapGroupContextFactory" ) + .gotoParent().addTag( "param" ) + .addTag( "name" ).addText( "main.ldapRealm.contextFactory.authenticationMechanism" ) + .addTag( "value" ).addText( "simple" ) + .gotoParent().addTag( "param" ) + .addTag( "name" ).addText( "main.ldapRealm.contextFactory.url" ) + .addTag( "value" ).addText( driver.getLdapUrl()) + .gotoParent().addTag( "param" ) + .addTag( "name" ).addText( "main.ldapRealm.userDnTemplate" ) + .addTag( "value" ).addText( "uid={0},ou=people,dc=hadoop,dc=apache,dc=org" ) + + .gotoParent().addTag( "param" ) + .addTag( "name" ).addText( "main.ldapRealm.searchBase" ) + .addTag( "value" ).addText( "dc=hadoop,dc=apache,dc=org" ) + + .gotoParent().addTag( "param" ) + .addTag( "name" ).addText( "main.ldapRealm.userSearchAttributeName" ) + .addTag( "value" ).addText( "uid" ) + .gotoParent().addTag( "param" ) + .addTag( "name" ).addText( "main.ldapRealm.userObjectClass" ) + .addTag( "value" ).addText( "person" ) + .gotoParent().addTag( "param" ) + .addTag( "name" ).addText( "main.ldapRealm.userSearchBase" ) + .addTag( "value" ).addText( "dc=hadoop,dc=apache,dc=org" ) + .gotoParent().addTag( "param" ) + .addTag( "name" ).addText( "main.ldapRealm.groupSearchBase" ) + .addTag( "value" ).addText( "ou=groups,dc=hadoop,dc=apache,dc=org" ) + + .gotoParent().addTag( "param" ) + .addTag( "name" ).addText( "main.ldapRealm.authorizationEnabled" ) + .addTag( "value" ).addText( "true" ) + .gotoParent().addTag( "param" ) + .addTag( "name" ).addText( "main.ldapRealm.contextFactory.systemAuthenticationMechanism" ) + .addTag( "value" ).addText( "simple" ) + .gotoParent().addTag( "param" ) + .addTag( "name" ).addText( "main.ldapRealm.groupObjectClass" ) + .addTag( "value" ).addText( "groupofurls" ) + .gotoParent().addTag( "param" ) + .addTag( "name" ).addText( "main.ldapRealm.memberAttribute" ) + .addTag( "value" ).addText( "memberurl" ) + .gotoParent().addTag( "param" ) + .addTag( "name" ).addText( "main.ldapRealm.memberAttributeValueTemplate" ) + .addTag( "value" ).addText( "uid={0},ou=people,dc=hadoop,dc=apache,dc=org" ) + .gotoParent().addTag( "param" ) + .addTag( "name" ).addText( "main.ldapRealm.contextFactory.systemUsername" ) + .addTag( "value" ).addText( "uid=guest,ou=people,dc=hadoop,dc=apache,dc=org" ) + .gotoParent().addTag( "param" ) + .addTag( "name" ).addText( "main.ldapRealm.contextFactory.clusterName" ) + .addTag( "value" ).addText( "testdg-cluster" ) + .gotoParent().addTag( "param" ) + .addTag( "name" ).addText( "main.ldapRealm.contextFactory.systemPassword" ) + .addTag( "value" ).addText( "S{ALIAS=ldcSystemPassword}" ) + // .addTag( "value" ).addText( "guest-password" ) + .gotoParent().addTag( "param" ) + .addTag( "name" ).addText( "urls./**" ) + .addTag( "value" ).addText( "authcBasic" ) + + .gotoParent().gotoParent().addTag( "provider" ) + .addTag( "role" ).addText( "authorization" ) + .addTag( "name" ).addText( "AclsAuthz" ) + .addTag( "enabled" ).addText( "true" ) + .addTag( "param" ) + .addTag( "name" ).addText( "test-service-role.acl" ) + .addTag( "value" ).addText( "*;directors;*" ) + + .gotoParent().gotoParent().addTag( "provider" ) + .addTag( "role" ).addText( "identity-assertion" ) + .addTag( "enabled" ).addText( "true" ) + .addTag( "name" ).addText( "Default" ).gotoParent() + + .gotoRoot() + .addTag( "service" ) + .addTag( "role" ).addText( "test-service-role" ) + .gotoRoot(); + // System.out.println( "GATEWAY=" + xml.toString() ); + return xml; + } + + @Ignore + // @Test + public void waitForManualTesting() throws IOException { + System.in.read(); + } + + @Test( timeout = TestUtils.MEDIUM_TIMEOUT ) + public void testGroupMember() throws ClassNotFoundException, Exception { + LOG_ENTER(); + String username = "joe"; + String password = "joe-password"; + String serviceUrl = clusterUrl + "/test-service-path/test-service-resource"; + given() + //.log().all() + .auth().preemptive().basic( username, password ) - .expect() ++ .then() + //.log().all() + .statusCode( HttpStatus.SC_OK ) + .contentType( "text/plain" ) + .body( is( "test-service-response" ) ) + .when().get( serviceUrl ); + LOG_EXIT(); + } + + @Test( timeout = TestUtils.MEDIUM_TIMEOUT ) + public void testNonGroupMember() throws ClassNotFoundException { + LOG_ENTER(); + String username = "guest"; + String password = "guest-password"; + String serviceUrl = clusterUrl + "/test-service-path/test-service-resource"; + given() + //.log().all() + .auth().preemptive().basic( username, password ) - .expect() ++ .then() + //.log().all() + .statusCode( HttpStatus.SC_FORBIDDEN ) + .when().get( serviceUrl ); + LOG_EXIT(); + } + +} http://git-wip-us.apache.org/repos/asf/knox/blob/416ee7c1/gateway-test/src/test/java/org/apache/knox/gateway/WebHdfsHaFuncTest.java ---------------------------------------------------------------------- diff --cc gateway-test/src/test/java/org/apache/knox/gateway/WebHdfsHaFuncTest.java index d7496b6,0000000..98739a1 mode 100644,000000..100644 --- a/gateway-test/src/test/java/org/apache/knox/gateway/WebHdfsHaFuncTest.java +++ b/gateway-test/src/test/java/org/apache/knox/gateway/WebHdfsHaFuncTest.java @@@ -1,466 -1,0 +1,466 @@@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * <p/> + * http://www.apache.org/licenses/LICENSE-2.0 + * <p/> + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.knox.gateway; + +import com.mycila.xmltool.XMLDoc; +import com.mycila.xmltool.XMLTag; +import org.apache.hadoop.test.TestUtils; +import org.apache.hadoop.test.category.ReleaseTest; +import org.apache.hadoop.test.mock.MockServer; +import org.apache.http.HttpStatus; +import org.junit.After; +import org.junit.Before; +import org.junit.Ignore; +import org.junit.Test; +import org.junit.experimental.categories.Category; + +import java.io.IOException; + +import static io.restassured.RestAssured.given; +import static org.apache.hadoop.test.TestUtils.LOG_ENTER; +import static org.apache.hadoop.test.TestUtils.LOG_EXIT; +import static org.hamcrest.CoreMatchers.is; + +@Category(ReleaseTest.class) +public class WebHdfsHaFuncTest { + + // Specifies if the test requests should go through the gateway or directly to the services. + // This is frequently used to verify the behavior of the test both with and without the gateway. + private static final boolean USE_GATEWAY = true; + + // Specifies if the test requests should be sent to mock services or the real services. + // This is frequently used to verify the behavior of the test both with and without mock services. + private static final boolean USE_MOCK_SERVICES = true; + + private static GatewayTestDriver driver = new GatewayTestDriver(); + + private static MockServer masterServer; + + private static MockServer standbyServer; + + /** + * Creates a deployment of a gateway instance that all test methods will share. This method also creates a + * registry of sorts for all of the services that will be used by the test methods. + * The createTopology method is used to create the topology file that would normally be read from disk. + * The driver.setupGateway invocation is where the creation of GATEWAY_HOME occurs. + * <p/> + * This would normally be done once for this suite but the failure tests start affecting each other depending + * on the state the last 'active' url + * + * @throws Exception Thrown if any failure occurs. + */ + @Before + public void setup() throws Exception { + LOG_ENTER(); + //Log.setLog(new NoOpLogger()); + masterServer = new MockServer("master", true); + standbyServer = new MockServer("standby", true); + GatewayTestConfig config = new GatewayTestConfig(); + config.setGatewayPath("gateway"); + driver.setResourceBase(WebHdfsHaFuncTest.class); + driver.setupLdap(0); + driver.setupService("WEBHDFS", "http://vm.local:50070/webhdfs", "/cluster/webhdfs", USE_MOCK_SERVICES); + driver.setupGateway(config, "cluster", createTopology(), USE_GATEWAY); + LOG_EXIT(); + } + + @After + public void cleanup() throws Exception { + LOG_ENTER(); + driver.cleanup(); + driver.reset(); + masterServer.reset(); + standbyServer.reset(); + LOG_EXIT(); + } + + /** + * Creates a topology that is deployed to the gateway instance for the test suite. + * Note that this topology is shared by all of the test methods in this suite. + * + * @return A populated XML structure for a topology file. + */ + private static XMLTag createTopology() { + XMLTag xml = XMLDoc.newDocument(true) + .addRoot("topology") + .addTag("gateway") + .addTag("provider") + .addTag("role").addText("webappsec") + .addTag("name").addText("WebAppSec") + .addTag("enabled").addText("true") + .addTag("param") + .addTag("name").addText("csrf.enabled") + .addTag("value").addText("true").gotoParent().gotoParent() + .addTag("provider") + .addTag("role").addText("authentication") + .addTag("name").addText("ShiroProvider") + .addTag("enabled").addText("true") + .addTag("param") + .addTag("name").addText("main.ldapRealm") + .addTag("value").addText("org.apache.knox.gateway.shirorealm.KnoxLdapRealm").gotoParent() + .addTag("param") + .addTag("name").addText("main.ldapRealm.userDnTemplate") + .addTag("value").addText("uid={0},ou=people,dc=hadoop,dc=apache,dc=org").gotoParent() + .addTag("param") + .addTag("name").addText("main.ldapRealm.contextFactory.url") + .addTag("value").addText(driver.getLdapUrl()).gotoParent() + .addTag("param") + .addTag("name").addText("main.ldapRealm.contextFactory.authenticationMechanism") + .addTag("value").addText("simple").gotoParent() + .addTag("param") + .addTag("name").addText("urls./**") + .addTag("value").addText("authcBasic").gotoParent().gotoParent() + .addTag("provider") + .addTag("role").addText("identity-assertion") + .addTag("enabled").addText("true") + .addTag("name").addText("Default").gotoParent() + .addTag("provider") + .addTag("role").addText("authorization") + .addTag("enabled").addText("true") + .addTag("name").addText("AclsAuthz").gotoParent() + .addTag("param") + .addTag("name").addText("webhdfs-acl") + .addTag("value").addText("hdfs;*;*").gotoParent() + .addTag("provider") + .addTag("role").addText("ha") + .addTag("enabled").addText("true") + .addTag("name").addText("HaProvider") + .addTag("param") + .addTag("name").addText("WEBHDFS") + .addTag("value").addText("maxFailoverAttempts=3;failoverSleep=15;maxRetryAttempts=3;retrySleep=10;enabled=true").gotoParent() + .gotoRoot() + .addTag("service") + .addTag("role").addText("WEBHDFS") + .addTag("url").addText("http://localhost:" + masterServer.getPort() + "/webhdfs") + .addTag("url").addText("http://localhost:" + standbyServer.getPort() + "/webhdfs").gotoParent() + .gotoRoot(); +// System.out.println( "GATEWAY=" + xml.toString() ); + return xml; + } + + @Test( timeout = TestUtils.MEDIUM_TIMEOUT ) + public void testBasicListOperation() throws IOException { + LOG_ENTER(); + String username = "hdfs"; + String password = "hdfs-password"; + masterServer.expect() + .method("GET") + .pathInfo("/webhdfs/v1/") + .queryParam("op", "LISTSTATUS") + .queryParam("user.name", username) + .respond() + .status(HttpStatus.SC_OK) + .content(driver.getResourceBytes("webhdfs-liststatus-success.json")) + .contentType("application/json"); + given() + .auth().preemptive().basic(username, password) + .header("X-XSRF-Header", "jksdhfkhdsf") + .queryParam("op", "LISTSTATUS") - .expect() ++ .then() + .log().ifError() + .statusCode(HttpStatus.SC_OK) - .content("FileStatuses.FileStatus[0].pathSuffix", is("app-logs")) ++ .body("FileStatuses.FileStatus[0].pathSuffix", is("app-logs")) + .when().get(driver.getUrl("WEBHDFS") + "/v1/"); + masterServer.isEmpty(); + LOG_EXIT(); + } + + @Test( timeout = TestUtils.MEDIUM_TIMEOUT ) + @Ignore( "KNOX-446" ) + public void testFailoverListOperation() throws Exception { + LOG_ENTER(); + String username = "hdfs"; + String password = "hdfs-password"; - //Shutdown master and expect standby to serve the list response ++ //Shutdown master and then standby to serve the list response + masterServer.stop(); + standbyServer.expect() + .method("GET") + .pathInfo("/webhdfs/v1/") + .queryParam("op", "LISTSTATUS") + .queryParam("user.name", username) + .respond() + .status(HttpStatus.SC_OK) + .content(driver.getResourceBytes("webhdfs-liststatus-success.json")) + .contentType("application/json"); + given() + .auth().preemptive().basic(username, password) + .header("X-XSRF-Header", "jksdhfkhdsf") + .queryParam("op", "LISTSTATUS") - .expect() ++ .then() + .log().ifError() + .statusCode(HttpStatus.SC_OK) - .content("FileStatuses.FileStatus[0].pathSuffix", is("app-logs")) ++ .body("FileStatuses.FileStatus[0].pathSuffix", is("app-logs")) + .when().get(driver.getUrl("WEBHDFS") + "/v1/"); + standbyServer.isEmpty(); + masterServer.start(); + LOG_EXIT(); + } + + @Test( timeout = TestUtils.MEDIUM_TIMEOUT ) + public void testFailoverLimit() throws Exception { + LOG_ENTER(); + String username = "hdfs"; + String password = "hdfs-password"; - //Shutdown master and expect standby to serve the list response ++ //Shutdown master and then standby to serve the list response + masterServer.stop(); + standbyServer.stop(); + given() + .auth().preemptive().basic(username, password) + .header("X-XSRF-Header", "jksdhfkhdsf") + .queryParam("op", "LISTSTATUS") - .expect() ++ .then() +// .log().ifError() + .statusCode(HttpStatus.SC_INTERNAL_SERVER_ERROR) + .when().get(driver.getUrl("WEBHDFS") + "/v1/"); + standbyServer.start(); + masterServer.start(); + LOG_EXIT(); + } + + + @Test( timeout = TestUtils.MEDIUM_TIMEOUT ) + @Ignore( "KNOX-446" ) + public void testServerInStandby() throws IOException { + LOG_ENTER(); + String username = "hdfs"; + String password = "hdfs-password"; + //make master the server that is in standby + masterServer.expect() + .method("GET") + .pathInfo("/webhdfs/v1/") + .queryParam("op", "LISTSTATUS") + .queryParam("user.name", username) + .respond() + .status(HttpStatus.SC_FORBIDDEN) + .content(driver.getResourceBytes("webhdfs-liststatus-standby.json")) + .contentType("application/json"); + //standby server is 'active' in this test case and serves the list response + standbyServer.expect() + .method("GET") + .pathInfo("/webhdfs/v1/") + .queryParam("op", "LISTSTATUS") + .queryParam("user.name", username) + .respond() + .status(HttpStatus.SC_OK) + .content(driver.getResourceBytes("webhdfs-liststatus-success.json")) + .contentType("application/json"); + given() + .auth().preemptive().basic(username, password) + .header("X-XSRF-Header", "jksdhfkhdsf") + .queryParam("op", "LISTSTATUS") - .expect() ++ .then() + .log().ifError() + .statusCode(HttpStatus.SC_OK) - .content("FileStatuses.FileStatus[0].pathSuffix", is("app-logs")) ++ .body("FileStatuses.FileStatus[0].pathSuffix", is("app-logs")) + .when().get(driver.getUrl("WEBHDFS") + "/v1/"); + masterServer.isEmpty(); + standbyServer.isEmpty(); + LOG_EXIT(); + } + + @Test( timeout = TestUtils.MEDIUM_TIMEOUT ) + public void testServerInStandbyFailoverLimit() throws IOException { + LOG_ENTER(); + String username = "hdfs"; + String password = "hdfs-password"; + //make master the server that is in standby + masterServer.expect() + .method("GET") + .pathInfo("/webhdfs/v1/") + .queryParam("op", "LISTSTATUS") + .queryParam("user.name", username) + .respond() + .status(HttpStatus.SC_FORBIDDEN) + .content(driver.getResourceBytes("webhdfs-liststatus-standby.json")) + .contentType("application/json"); + standbyServer.expect() + .method("GET") + .pathInfo("/webhdfs/v1/") + .queryParam("op", "LISTSTATUS") + .queryParam("user.name", username) + .respond() + .status(HttpStatus.SC_FORBIDDEN) + .content(driver.getResourceBytes("webhdfs-liststatus-standby.json")) + .contentType("application/json"); + masterServer.expect() + .method("GET") + .pathInfo("/webhdfs/v1/") + .queryParam("op", "LISTSTATUS") + .queryParam("user.name", username) + .respond() + .status(HttpStatus.SC_FORBIDDEN) + .content(driver.getResourceBytes("webhdfs-liststatus-standby.json")) + .contentType("application/json"); + standbyServer.expect() + .method("GET") + .pathInfo("/webhdfs/v1/") + .queryParam("op", "LISTSTATUS") + .queryParam("user.name", username) + .respond() + .status(HttpStatus.SC_FORBIDDEN) + .content(driver.getResourceBytes("webhdfs-liststatus-standby.json")) + .contentType("application/json"); + given() + .auth().preemptive().basic(username, password) + .header("X-XSRF-Header", "jksdhfkhdsf") + .queryParam("op", "LISTSTATUS") - .expect() ++ .then() +// .log().ifError() + .statusCode(HttpStatus.SC_INTERNAL_SERVER_ERROR) + .when().get(driver.getUrl("WEBHDFS") + "/v1/"); + masterServer.isEmpty(); + standbyServer.isEmpty(); + LOG_EXIT(); + } + + @Test( timeout = TestUtils.MEDIUM_TIMEOUT ) + public void testServerInSafeMode() throws IOException { + LOG_ENTER(); + String username = "hdfs"; + String password = "hdfs-password"; + //master is in safe mode + masterServer.expect() + .method("POST") + .pathInfo("/webhdfs/v1/user/hdfs/foo.txt") + .queryParam("op", "RENAME") + .queryParam("destination", "/user/hdfs/foo.txt") + .queryParam("user.name", username) + .respond() + .status(HttpStatus.SC_FORBIDDEN) + .content(driver.getResourceBytes("webhdfs-rename-safemode.json")) + .contentType("application/json"); + masterServer.expect() + .method("POST") + .pathInfo("/webhdfs/v1/user/hdfs/foo.txt") + .queryParam("op", "RENAME") + .queryParam("destination", "/user/hdfs/foo.txt") + .queryParam("user.name", username) + .respond() + .status(HttpStatus.SC_OK) + .content(driver.getResourceBytes("webhdfs-rename-safemode-off.json")) + .contentType("application/json"); + given() + .auth().preemptive().basic(username, password) + .header("X-XSRF-Header", "jksdhfkhdsf") + .queryParam("op", "RENAME") + .queryParam("destination", "/user/hdfs/foo.txt") - .expect() ++ .then() + .log().ifError() + .statusCode(HttpStatus.SC_OK) - .content("boolean", is(true)) ++ .body("boolean", is(true)) + .when().post(driver.getUrl("WEBHDFS") + "/v1/user/hdfs/foo.txt"); + masterServer.isEmpty(); + LOG_EXIT(); + } + + @Test( timeout = TestUtils.MEDIUM_TIMEOUT ) + public void testServerInSafeModeRetriableException() throws IOException { + LOG_ENTER(); + String username = "hdfs"; + String password = "hdfs-password"; + //master is in safe mode + masterServer.expect() + .method("POST") + .pathInfo("/webhdfs/v1/user/hdfs/new") + .queryParam("op", "MKDIRS") + .queryParam("user.name", username) + .respond() + .status(HttpStatus.SC_FORBIDDEN) + .content(driver.getResourceBytes("webhdfs-mkdirs-safemode.json")) + .contentType("application/json"); + masterServer.expect() + .method("POST") + .pathInfo("/webhdfs/v1/user/hdfs/new") + .queryParam("op", "MKDIRS") + .queryParam("user.name", username) + .respond() + .status(HttpStatus.SC_OK) + .content(driver.getResourceBytes("webhdfs-rename-safemode-off.json")) + .contentType("application/json"); + given() + .auth().preemptive().basic(username, password) + .header("X-XSRF-Header", "jksdhfkhdsf") + .queryParam("op", "MKDIRS") - .expect() ++ .then() + .log().ifError() + .statusCode(HttpStatus.SC_OK) - .content("boolean", is(true)) ++ .body("boolean", is(true)) + .when().post(driver.getUrl("WEBHDFS") + "/v1/user/hdfs/new"); + masterServer.isEmpty(); + LOG_EXIT(); + } + + @Test( timeout = TestUtils.MEDIUM_TIMEOUT ) + public void testServerInSafeModeRetryLimit() throws IOException { + LOG_ENTER(); + String username = "hdfs"; + String password = "hdfs-password"; + //master is in safe mode + masterServer.expect() + .method("POST") + .pathInfo("/webhdfs/v1/user/hdfs/foo.txt") + .queryParam("op", "RENAME") + .queryParam("destination", "/user/hdfs/foo.txt") + .queryParam("user.name", username) + .respond() + .status(HttpStatus.SC_FORBIDDEN) + .content(driver.getResourceBytes("webhdfs-rename-safemode.json")) + .contentType("application/json"); + masterServer.expect() + .method("POST") + .pathInfo("/webhdfs/v1/user/hdfs/foo.txt") + .queryParam("op", "RENAME") + .queryParam("destination", "/user/hdfs/foo.txt") + .queryParam("user.name", username) + .respond() + .status(HttpStatus.SC_FORBIDDEN) + .content(driver.getResourceBytes("webhdfs-rename-safemode.json")) + .contentType("application/json"); + masterServer.expect() + .method("POST") + .pathInfo("/webhdfs/v1/user/hdfs/foo.txt") + .queryParam("op", "RENAME") + .queryParam("destination", "/user/hdfs/foo.txt") + .queryParam("user.name", username) + .respond() + .status(HttpStatus.SC_FORBIDDEN) + .content(driver.getResourceBytes("webhdfs-rename-safemode.json")) + .contentType("application/json"); + masterServer.expect() + .method("POST") + .pathInfo("/webhdfs/v1/user/hdfs/foo.txt") + .queryParam("op", "RENAME") + .queryParam("destination", "/user/hdfs/foo.txt") + .queryParam("user.name", username) + .respond() + .status(HttpStatus.SC_FORBIDDEN) + .content(driver.getResourceBytes("webhdfs-rename-safemode.json")) + .contentType("application/json"); + given() + .auth().preemptive().basic(username, password) + .header("X-XSRF-Header", "jksdhfkhdsf") + .queryParam("op", "RENAME") + .queryParam("destination", "/user/hdfs/foo.txt") - .expect() ++ .then() +// .log().ifError() + .statusCode(HttpStatus.SC_INTERNAL_SERVER_ERROR) + .when().post(driver.getUrl("WEBHDFS") + "/v1/user/hdfs/foo.txt"); + masterServer.isEmpty(); + LOG_EXIT(); + } +}