http://git-wip-us.apache.org/repos/asf/hadoop/blob/5877f20f/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMS.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMS.java b/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMS.java index d8755ec..271c339 100644 --- a/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMS.java +++ b/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMS.java @@ -24,6 +24,7 @@ import org.apache.hadoop.crypto.key.KeyProvider.KeyVersion; import org.apache.hadoop.crypto.key.KeyProviderCryptoExtension; import org.apache.hadoop.crypto.key.KeyProviderCryptoExtension.EncryptedKeyVersion; import org.apache.hadoop.crypto.key.kms.KMSRESTConstants; +import org.apache.hadoop.http.JettyUtils; import org.apache.hadoop.security.AccessControlException; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.crypto.key.kms.KMSClientProvider; @@ -101,7 +102,7 @@ public class KMS { @POST @Path(KMSRESTConstants.KEYS_RESOURCE) @Consumes(MediaType.APPLICATION_JSON) - @Produces(MediaType.APPLICATION_JSON) + @Produces(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8) @SuppressWarnings("unchecked") public Response createKey(Map jsonKey) throws Exception { try{ @@ -204,7 +205,7 @@ public class KMS { @POST @Path(KMSRESTConstants.KEY_RESOURCE + "/{name:.*}") @Consumes(MediaType.APPLICATION_JSON) - @Produces(MediaType.APPLICATION_JSON) + @Produces(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8) public Response rolloverKey(@PathParam("name") final String name, Map jsonMaterial) throws Exception { try { @@ -254,7 +255,7 @@ public class KMS { @GET @Path(KMSRESTConstants.KEYS_METADATA_RESOURCE) - @Produces(MediaType.APPLICATION_JSON) + @Produces(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8) public Response getKeysMetadata(@QueryParam(KMSRESTConstants.KEY) List<String> keyNamesList) throws Exception { try { @@ -287,7 +288,7 @@ public class KMS { @GET @Path(KMSRESTConstants.KEYS_NAMES_RESOURCE) - @Produces(MediaType.APPLICATION_JSON) + @Produces(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8) public Response getKeyNames() throws Exception { try { LOG.trace("Entering getKeyNames method."); @@ -332,7 +333,7 @@ public class KMS { @GET @Path(KMSRESTConstants.KEY_RESOURCE + "/{name:.*}/" + KMSRESTConstants.METADATA_SUB_RESOURCE) - @Produces(MediaType.APPLICATION_JSON) + @Produces(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8) public Response getMetadata(@PathParam("name") final String name) throws Exception { try { @@ -366,7 +367,7 @@ public class KMS { @GET @Path(KMSRESTConstants.KEY_RESOURCE + "/{name:.*}/" + KMSRESTConstants.CURRENT_VERSION_SUB_RESOURCE) - @Produces(MediaType.APPLICATION_JSON) + @Produces(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8) public Response getCurrentVersion(@PathParam("name") final String name) throws Exception { try { @@ -399,7 +400,7 @@ public class KMS { @GET @Path(KMSRESTConstants.KEY_VERSION_RESOURCE + "/{versionName:.*}") - @Produces(MediaType.APPLICATION_JSON) + @Produces(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8) public Response getKeyVersion( @PathParam("versionName") final String versionName) throws Exception { try { @@ -436,7 +437,7 @@ public class KMS { @GET @Path(KMSRESTConstants.KEY_RESOURCE + "/{name:.*}/" + KMSRESTConstants.EEK_SUB_RESOURCE) - @Produces(MediaType.APPLICATION_JSON) + @Produces(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8) public Response generateEncryptedKeys( @PathParam("name") final String name, @QueryParam(KMSRESTConstants.EEK_OP) String edekOp, @@ -508,7 +509,7 @@ public class KMS { @POST @Path(KMSRESTConstants.KEY_VERSION_RESOURCE + "/{versionName:.*}/" + KMSRESTConstants.EEK_SUB_RESOURCE) - @Produces(MediaType.APPLICATION_JSON) + @Produces(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8) public Response decryptEncryptedKey( @PathParam("versionName") final String versionName, @QueryParam(KMSRESTConstants.EEK_OP) String eekOp, @@ -577,7 +578,7 @@ public class KMS { @GET @Path(KMSRESTConstants.KEY_RESOURCE + "/{name:.*}/" + KMSRESTConstants.VERSIONS_SUB_RESOURCE) - @Produces(MediaType.APPLICATION_JSON) + @Produces(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8) public Response getKeyVersions(@PathParam("name") final String name) throws Exception { try {
http://git-wip-us.apache.org/repos/asf/hadoop/blob/5877f20f/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSAuthenticationFilter.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSAuthenticationFilter.java b/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSAuthenticationFilter.java index 928a8aa..8efef73 100644 --- a/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSAuthenticationFilter.java +++ b/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSAuthenticationFilter.java @@ -114,7 +114,19 @@ public class KMSAuthenticationFilter super.sendError(sc); } + /** + * Calls setStatus(int sc, String msg) on the wrapped + * {@link HttpServletResponseWrapper} object. + * + * @param sc the status code + * @param sm the status message + * @deprecated {@link HttpServletResponseWrapper#setStatus(int, String)} is + * deprecated. To set a status code use {@link #setStatus(int)}, to send an + * error with a description use {@link #sendError(int, String)} + */ @Override + @Deprecated + @SuppressWarnings("deprecation") public void setStatus(int sc, String sm) { statusCode = sc; msg = sm; http://git-wip-us.apache.org/repos/asf/hadoop/blob/5877f20f/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSJSONWriter.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSJSONWriter.java b/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSJSONWriter.java index 31fac9f..4b8a5be 100644 --- a/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSJSONWriter.java +++ b/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSJSONWriter.java @@ -18,6 +18,7 @@ package org.apache.hadoop.crypto.key.kms.server; import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.http.JettyUtils; import org.codehaus.jackson.map.ObjectMapper; import javax.ws.rs.Produces; @@ -41,7 +42,7 @@ import java.util.Map; * to their JSON representation. */ @Provider -@Produces(MediaType.APPLICATION_JSON) +@Produces(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8) @InterfaceAudience.Private public class KMSJSONWriter implements MessageBodyWriter<Object> { http://git-wip-us.apache.org/repos/asf/hadoop/blob/5877f20f/hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/MiniKMS.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/MiniKMS.java b/hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/MiniKMS.java index 292d731..8b181ad 100644 --- a/hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/MiniKMS.java +++ b/hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/MiniKMS.java @@ -21,14 +21,19 @@ import com.google.common.base.Preconditions; import org.apache.commons.io.IOUtils; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.crypto.key.kms.KMSRESTConstants; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.http.JettyUtils; import org.apache.hadoop.util.ThreadUtil; -import org.apache.hadoop.security.ssl.SslSelectChannelConnectorSecure; -import org.mortbay.jetty.Connector; -import org.mortbay.jetty.Server; -import org.mortbay.jetty.security.SslSelectChannelConnector; -import org.mortbay.jetty.webapp.WebAppContext; +import org.eclipse.jetty.http.HttpVersion; +import org.eclipse.jetty.server.ConnectionFactory; +import org.eclipse.jetty.server.HttpConfiguration; +import org.eclipse.jetty.server.HttpConnectionFactory; +import org.eclipse.jetty.server.SecureRequestCustomizer; +import org.eclipse.jetty.server.Server; +import org.eclipse.jetty.server.ServerConnector; +import org.eclipse.jetty.server.SslConnectionFactory; +import org.eclipse.jetty.util.ssl.SslContextFactory; +import org.eclipse.jetty.webapp.WebAppContext; import java.io.File; import java.io.FileOutputStream; @@ -37,11 +42,7 @@ import java.io.InputStream; import java.io.OutputStream; import java.io.Writer; import java.io.IOException; -import java.net.InetAddress; import java.net.MalformedURLException; -import java.net.ServerSocket; -import java.net.URI; -import java.net.URISyntaxException; import java.net.URL; import java.util.UUID; @@ -51,18 +52,28 @@ public class MiniKMS { try { boolean ssl = keyStore != null; String host = "localhost"; - Server server = new Server(inPort); - if (!ssl) { - server.getConnectors()[0].setHost(host); - } else { - SslSelectChannelConnector c = new SslSelectChannelConnectorSecure(); - c.setHost(host); - c.setNeedClientAuth(false); - c.setKeystore(keyStore); - c.setKeystoreType("jks"); - c.setKeyPassword(password); - server.setConnectors(new Connector[]{c}); + Server server = new Server(); + ServerConnector conn = new ServerConnector(server); + HttpConfiguration httpConfig = new HttpConfiguration(); + httpConfig.setRequestHeaderSize(JettyUtils.HEADER_SIZE); + httpConfig.setResponseHeaderSize(JettyUtils.HEADER_SIZE); + httpConfig.setSecureScheme("https"); + httpConfig.addCustomizer(new SecureRequestCustomizer()); + ConnectionFactory connFactory = new HttpConnectionFactory(httpConfig); + conn.addConnectionFactory(connFactory); + conn.setHost(host); + conn.setPort(inPort); + if (ssl) { + SslContextFactory sslContextFactory = new SslContextFactory(); + sslContextFactory.setNeedClientAuth(false); + sslContextFactory.setKeyStorePath(keyStore); + sslContextFactory.setKeyStoreType("jks"); + sslContextFactory.setKeyStorePassword(password); + conn.addFirstConnectionFactory( + new SslConnectionFactory(sslContextFactory, + HttpVersion.HTTP_1_1.asString())); } + server.addConnector(conn); return server; } catch (Exception ex) { throw new RuntimeException("Could not start embedded servlet container, " @@ -71,13 +82,13 @@ public class MiniKMS { } private static URL getJettyURL(Server server) { - boolean ssl = server.getConnectors()[0].getClass() - == SslSelectChannelConnectorSecure.class; + boolean ssl = server.getConnectors()[0] + .getConnectionFactory(SslConnectionFactory.class) != null; try { String scheme = (ssl) ? "https" : "http"; return new URL(scheme + "://" + - server.getConnectors()[0].getHost() + ":" + - server.getConnectors()[0].getLocalPort()); + ((ServerConnector)server.getConnectors()[0]).getHost() + ":" + + ((ServerConnector)server.getConnectors()[0]).getLocalPort()); } catch (MalformedURLException ex) { throw new RuntimeException("It should never happen, " + ex.getMessage(), ex); @@ -217,7 +228,7 @@ public class MiniKMS { if (webXmlInJar) { context.setClassLoader(cl); } - jetty.addHandler(context); + jetty.setHandler(context); jetty.start(); kmsURL = new URL(getJettyURL(jetty), "kms"); } http://git-wip-us.apache.org/repos/asf/hadoop/blob/5877f20f/hadoop-common-project/hadoop-nfs/pom.xml ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-nfs/pom.xml b/hadoop-common-project/hadoop-nfs/pom.xml index c943f3c..027cf34 100644 --- a/hadoop-common-project/hadoop-nfs/pom.xml +++ b/hadoop-common-project/hadoop-nfs/pom.xml @@ -63,7 +63,7 @@ </dependency> <dependency> <groupId>javax.servlet</groupId> - <artifactId>servlet-api</artifactId> + <artifactId>javax.servlet-api</artifactId> <scope>provided</scope> </dependency> <dependency> http://git-wip-us.apache.org/repos/asf/hadoop/blob/5877f20f/hadoop-hdfs-project/hadoop-hdfs-httpfs/pom.xml ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/pom.xml b/hadoop-hdfs-project/hadoop-hdfs-httpfs/pom.xml index 0aa5fc1..928ada9 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/pom.xml +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/pom.xml @@ -75,7 +75,7 @@ </dependency> <dependency> <groupId>javax.servlet</groupId> - <artifactId>servlet-api</artifactId> + <artifactId>javax.servlet-api</artifactId> <scope>provided</scope> </dependency> <dependency> @@ -89,8 +89,8 @@ <scope>compile</scope> </dependency> <dependency> - <groupId>org.mortbay.jetty</groupId> - <artifactId>jetty</artifactId> + <groupId>org.eclipse.jetty</groupId> + <artifactId>jetty-server</artifactId> <scope>test</scope> </dependency> <dependency> @@ -108,22 +108,22 @@ </exclusion> <exclusion> <groupId>javax.servlet</groupId> - <artifactId>servlet-api</artifactId> + <artifactId>javax.servlet-api</artifactId> </exclusion> <exclusion> <groupId>javax.servlet.jsp</groupId> <artifactId>jsp-api</artifactId> </exclusion> <exclusion> - <groupId>org.mortbay.jetty</groupId> - <artifactId>jetty</artifactId> + <groupId>org.eclipse.jetty</groupId> + <artifactId>jetty-server</artifactId> </exclusion> <exclusion> - <groupId>org.mortbay.jetty</groupId> + <groupId>org.eclipse.jetty</groupId> <artifactId>jetty-util</artifactId> </exclusion> <exclusion> - <groupId>org.mortbay.jetty</groupId> + <groupId>org.eclipse.jetty</groupId> <artifactId>servlet-api-2.5</artifactId> </exclusion> <exclusion> @@ -155,18 +155,18 @@ </exclusion> <exclusion> <groupId>javax.servlet</groupId> - <artifactId>servlet-api</artifactId> + <artifactId>javax.servlet-api</artifactId> </exclusion> <exclusion> - <groupId>org.mortbay.jetty</groupId> - <artifactId>jetty</artifactId> + <groupId>org.eclipse.jetty</groupId> + <artifactId>jetty-server</artifactId> </exclusion> <exclusion> - <groupId>org.mortbay.jetty</groupId> + <groupId>org.eclipse.jetty</groupId> <artifactId>jetty-util</artifactId> </exclusion> <exclusion> - <groupId>org.mortbay.jetty</groupId> + <groupId>org.eclipse.jetty</groupId> <artifactId>servlet-api-2.5</artifactId> </exclusion> <exclusion> http://git-wip-us.apache.org/repos/asf/hadoop/blob/5877f20f/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSServer.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSServer.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSServer.java index a4db124..677bca7 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSServer.java +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSServer.java @@ -48,6 +48,7 @@ import org.apache.hadoop.fs.http.server.HttpFSParametersProvider.XAttrEncodingPa import org.apache.hadoop.fs.http.server.HttpFSParametersProvider.XAttrNameParam; import org.apache.hadoop.fs.http.server.HttpFSParametersProvider.XAttrSetFlagParam; import org.apache.hadoop.fs.http.server.HttpFSParametersProvider.XAttrValueParam; +import org.apache.hadoop.http.JettyUtils; import org.apache.hadoop.lib.service.FileSystemAccess; import org.apache.hadoop.lib.service.FileSystemAccessException; import org.apache.hadoop.lib.service.Groups; @@ -168,7 +169,7 @@ public class HttpFSServer { * {@link HttpFSExceptionProvider}. */ @GET - @Produces(MediaType.APPLICATION_JSON) + @Produces(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8) public Response getRoot(@QueryParam(OperationParam.NAME) OperationParam op, @Context Parameters params, @Context HttpServletRequest request) @@ -197,7 +198,8 @@ public class HttpFSServer { */ @GET @Path("{path:.*}") - @Produces({MediaType.APPLICATION_OCTET_STREAM, MediaType.APPLICATION_JSON}) + @Produces({MediaType.APPLICATION_OCTET_STREAM + "; " + JettyUtils.UTF_8, + MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8}) public Response get(@PathParam("path") String path, @QueryParam(OperationParam.NAME) OperationParam op, @Context Parameters params, @@ -363,7 +365,7 @@ public class HttpFSServer { */ @DELETE @Path("{path:.*}") - @Produces(MediaType.APPLICATION_JSON) + @Produces(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8) public Response delete(@PathParam("path") String path, @QueryParam(OperationParam.NAME) OperationParam op, @Context Parameters params, @@ -414,7 +416,7 @@ public class HttpFSServer { @POST @Path("{path:.*}") @Consumes({"*/*"}) - @Produces({MediaType.APPLICATION_JSON}) + @Produces({MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8}) public Response post(InputStream is, @Context UriInfo uriInfo, @PathParam("path") String path, @@ -509,7 +511,7 @@ public class HttpFSServer { @PUT @Path("{path:.*}") @Consumes({"*/*"}) - @Produces({MediaType.APPLICATION_JSON}) + @Produces({MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8}) public Response put(InputStream is, @Context UriInfo uriInfo, @PathParam("path") String path, http://git-wip-us.apache.org/repos/asf/hadoop/blob/5877f20f/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/JSONMapProvider.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/JSONMapProvider.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/JSONMapProvider.java index 05bb9a1..a1525a2 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/JSONMapProvider.java +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/JSONMapProvider.java @@ -19,6 +19,7 @@ package org.apache.hadoop.lib.wsrs; import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.http.JettyUtils; import org.json.simple.JSONObject; import javax.ws.rs.Produces; @@ -37,7 +38,7 @@ import java.nio.charset.StandardCharsets; import java.util.Map; @Provider -@Produces(MediaType.APPLICATION_JSON) +@Produces(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8) @InterfaceAudience.Private public class JSONMapProvider implements MessageBodyWriter<Map> { private static final String ENTER = System.getProperty("line.separator"); http://git-wip-us.apache.org/repos/asf/hadoop/blob/5877f20f/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/JSONProvider.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/JSONProvider.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/JSONProvider.java index 7e1f98b..8ae9145 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/JSONProvider.java +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/JSONProvider.java @@ -19,6 +19,7 @@ package org.apache.hadoop.lib.wsrs; import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.http.JettyUtils; import org.json.simple.JSONStreamAware; import javax.ws.rs.Produces; @@ -36,7 +37,7 @@ import java.lang.reflect.Type; import java.nio.charset.StandardCharsets; @Provider -@Produces(MediaType.APPLICATION_JSON) +@Produces(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8) @InterfaceAudience.Private public class JSONProvider implements MessageBodyWriter<JSONStreamAware> { private static final String ENTER = System.getProperty("line.separator"); http://git-wip-us.apache.org/repos/asf/hadoop/blob/5877f20f/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/BaseTestHttpFSWith.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/BaseTestHttpFSWith.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/BaseTestHttpFSWith.java index e475803..4e25e56 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/BaseTestHttpFSWith.java +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/BaseTestHttpFSWith.java @@ -49,8 +49,8 @@ import org.junit.Assume; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; -import org.mortbay.jetty.Server; -import org.mortbay.jetty.webapp.WebAppContext; +import org.eclipse.jetty.server.Server; +import org.eclipse.jetty.webapp.WebAppContext; import com.google.common.collect.Lists; @@ -127,7 +127,7 @@ public abstract class BaseTestHttpFSWith extends HFSTestCase { URL url = cl.getResource("webapp"); WebAppContext context = new WebAppContext(url.getPath(), "/webhdfs"); Server server = TestJettyHelper.getJettyServer(); - server.addHandler(context); + server.setHandler(context); server.start(); } http://git-wip-us.apache.org/repos/asf/hadoop/blob/5877f20f/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSServer.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSServer.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSServer.java index c6a7a9d..a9c36b0 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSServer.java +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSServer.java @@ -64,8 +64,8 @@ import org.apache.hadoop.test.TestJettyHelper; import org.json.simple.JSONObject; import org.json.simple.parser.JSONParser; import org.junit.Test; -import org.mortbay.jetty.Server; -import org.mortbay.jetty.webapp.WebAppContext; +import org.eclipse.jetty.server.Server; +import org.eclipse.jetty.webapp.WebAppContext; import com.google.common.collect.Maps; import java.util.Properties; @@ -171,7 +171,7 @@ public class TestHttpFSServer extends HFSTestCase { URL url = cl.getResource("webapp"); WebAppContext context = new WebAppContext(url.getPath(), "/webhdfs"); Server server = TestJettyHelper.getJettyServer(); - server.addHandler(context); + server.setHandler(context); server.start(); if (addDelegationTokenAuthHandler) { HttpFSServerWebApp.get().setAuthority(TestJettyHelper.getAuthority()); http://git-wip-us.apache.org/repos/asf/hadoop/blob/5877f20f/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSServerNoACLs.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSServerNoACLs.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSServerNoACLs.java index cadec2e..289ddc4 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSServerNoACLs.java +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSServerNoACLs.java @@ -31,8 +31,8 @@ import org.apache.hadoop.test.TestJetty; import org.apache.hadoop.test.TestJettyHelper; import org.junit.Assert; import org.junit.Test; -import org.mortbay.jetty.Server; -import org.mortbay.jetty.webapp.WebAppContext; +import org.eclipse.jetty.server.Server; +import org.eclipse.jetty.webapp.WebAppContext; import java.io.BufferedReader; import java.io.File; @@ -151,7 +151,7 @@ public class TestHttpFSServerNoACLs extends HTestCase { } WebAppContext context = new WebAppContext(url.getPath(), "/webhdfs"); Server server = TestJettyHelper.getJettyServer(); - server.addHandler(context); + server.setHandler(context); server.start(); } http://git-wip-us.apache.org/repos/asf/hadoop/blob/5877f20f/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSServerNoXAttrs.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSServerNoXAttrs.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSServerNoXAttrs.java index 951b3fb..7571125 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSServerNoXAttrs.java +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSServerNoXAttrs.java @@ -32,8 +32,8 @@ import org.apache.hadoop.test.TestJetty; import org.apache.hadoop.test.TestJettyHelper; import org.junit.Assert; import org.junit.Test; -import org.mortbay.jetty.Server; -import org.mortbay.jetty.webapp.WebAppContext; +import org.eclipse.jetty.server.Server; +import org.eclipse.jetty.webapp.WebAppContext; import java.io.BufferedReader; import java.io.File; @@ -152,7 +152,7 @@ public class TestHttpFSServerNoXAttrs extends HTestCase { } WebAppContext context = new WebAppContext(url.getPath(), "/webhdfs"); Server server = TestJettyHelper.getJettyServer(); - server.addHandler(context); + server.setHandler(context); server.start(); } http://git-wip-us.apache.org/repos/asf/hadoop/blob/5877f20f/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSWithKerberos.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSWithKerberos.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSWithKerberos.java index 757e3fd..fafeff0 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSWithKerberos.java +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSWithKerberos.java @@ -41,8 +41,8 @@ import org.json.simple.parser.JSONParser; import org.junit.After; import org.junit.Assert; import org.junit.Test; -import org.mortbay.jetty.Server; -import org.mortbay.jetty.webapp.WebAppContext; +import org.eclipse.jetty.server.Server; +import org.eclipse.jetty.webapp.WebAppContext; import java.io.File; import java.io.FileOutputStream; @@ -105,7 +105,7 @@ public class TestHttpFSWithKerberos extends HFSTestCase { URL url = cl.getResource("webapp"); WebAppContext context = new WebAppContext(url.getPath(), "/webhdfs"); Server server = TestJettyHelper.getJettyServer(); - server.addHandler(context); + server.setHandler(context); server.start(); HttpFSServerWebApp.get().setAuthority(TestJettyHelper.getAuthority()); } http://git-wip-us.apache.org/repos/asf/hadoop/blob/5877f20f/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestHFSTestCase.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestHFSTestCase.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestHFSTestCase.java index eb2cdc6..2d09b80 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestHFSTestCase.java +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestHFSTestCase.java @@ -38,9 +38,9 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.util.Time; +import org.eclipse.jetty.servlet.ServletContextHandler; import org.junit.Test; -import org.mortbay.jetty.Server; -import org.mortbay.jetty.servlet.Context; +import org.eclipse.jetty.server.Server; public class TestHFSTestCase extends HFSTestCase { @@ -165,11 +165,11 @@ public class TestHFSTestCase extends HFSTestCase { @Test @TestJetty public void testJetty() throws Exception { - Context context = new Context(); + ServletContextHandler context = new ServletContextHandler(); context.setContextPath("/"); context.addServlet(MyServlet.class, "/bar"); Server server = TestJettyHelper.getJettyServer(); - server.addHandler(context); + server.setHandler(context); server.start(); URL url = new URL(TestJettyHelper.getJettyURL(), "/bar"); HttpURLConnection conn = (HttpURLConnection) url.openConnection(); http://git-wip-us.apache.org/repos/asf/hadoop/blob/5877f20f/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestHTestCase.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestHTestCase.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestHTestCase.java index 74d34ec..be01285 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestHTestCase.java +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestHTestCase.java @@ -33,9 +33,9 @@ import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.apache.hadoop.util.Time; +import org.eclipse.jetty.servlet.ServletContextHandler; import org.junit.Test; -import org.mortbay.jetty.Server; -import org.mortbay.jetty.servlet.Context; +import org.eclipse.jetty.server.Server; public class TestHTestCase extends HTestCase { @@ -132,11 +132,11 @@ public class TestHTestCase extends HTestCase { @Test @TestJetty public void testJetty() throws Exception { - Context context = new Context(); + ServletContextHandler context = new ServletContextHandler(); context.setContextPath("/"); context.addServlet(MyServlet.class, "/bar"); Server server = TestJettyHelper.getJettyServer(); - server.addHandler(context); + server.setHandler(context); server.start(); URL url = new URL(TestJettyHelper.getJettyURL(), "/bar"); HttpURLConnection conn = (HttpURLConnection) url.openConnection(); http://git-wip-us.apache.org/repos/asf/hadoop/blob/5877f20f/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestJettyHelper.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestJettyHelper.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestJettyHelper.java index a9b661e..1da3901 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestJettyHelper.java +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestJettyHelper.java @@ -24,14 +24,19 @@ import java.net.ServerSocket; import java.net.URL; import java.net.UnknownHostException; -import org.apache.hadoop.security.ssl.SslSelectChannelConnectorSecure; -import org.junit.Test; +import org.apache.hadoop.http.JettyUtils; +import org.eclipse.jetty.http.HttpVersion; +import org.eclipse.jetty.server.ConnectionFactory; +import org.eclipse.jetty.server.HttpConfiguration; +import org.eclipse.jetty.server.HttpConnectionFactory; +import org.eclipse.jetty.server.SecureRequestCustomizer; +import org.eclipse.jetty.server.Server; +import org.eclipse.jetty.server.ServerConnector; +import org.eclipse.jetty.server.SslConnectionFactory; +import org.eclipse.jetty.util.ssl.SslContextFactory; import org.junit.rules.MethodRule; import org.junit.runners.model.FrameworkMethod; import org.junit.runners.model.Statement; -import org.mortbay.jetty.Connector; -import org.mortbay.jetty.Server; -import org.mortbay.jetty.security.SslSelectChannelConnector; public class TestJettyHelper implements MethodRule { private boolean ssl; @@ -88,20 +93,27 @@ public class TestJettyHelper implements MethodRule { ServerSocket ss = new ServerSocket(0, 50, localhost); int port = ss.getLocalPort(); ss.close(); - Server server = new Server(0); - if (!ssl) { - server.getConnectors()[0].setHost(host); - server.getConnectors()[0].setPort(port); - } else { - SslSelectChannelConnector c = new SslSelectChannelConnectorSecure(); - c.setHost(host); - c.setPort(port); - c.setNeedClientAuth(false); - c.setKeystore(keyStore); - c.setKeystoreType(keyStoreType); - c.setKeyPassword(keyStorePassword); - server.setConnectors(new Connector[] {c}); + Server server = new Server(); + ServerConnector conn = new ServerConnector(server); + HttpConfiguration http_config = new HttpConfiguration(); + http_config.setRequestHeaderSize(JettyUtils.HEADER_SIZE); + http_config.setResponseHeaderSize(JettyUtils.HEADER_SIZE); + http_config.setSecureScheme("https"); + http_config.addCustomizer(new SecureRequestCustomizer()); + ConnectionFactory connFactory = new HttpConnectionFactory(http_config); + conn.addConnectionFactory(connFactory); + conn.setHost(host); + conn.setPort(port); + if (ssl) { + SslContextFactory sslContextFactory = new SslContextFactory(); + sslContextFactory.setNeedClientAuth(false); + sslContextFactory.setKeyStorePath(keyStore); + sslContextFactory.setKeyStoreType(keyStoreType); + sslContextFactory.setKeyStorePassword(keyStorePassword); + conn.addFirstConnectionFactory(new SslConnectionFactory(sslContextFactory, + HttpVersion.HTTP_1_1.asString())); } + server.addConnector(conn); return server; } catch (Exception ex) { throw new RuntimeException("Could not start embedded servlet container, " + ex.getMessage(), ex); @@ -117,8 +129,8 @@ public class TestJettyHelper implements MethodRule { Server server = getJettyServer(); try { InetAddress add = - InetAddress.getByName(server.getConnectors()[0].getHost()); - int port = server.getConnectors()[0].getPort(); + InetAddress.getByName(((ServerConnector)server.getConnectors()[0]).getHost()); + int port = ((ServerConnector)server.getConnectors()[0]).getPort(); return new InetSocketAddress(add, port); } catch (UnknownHostException ex) { throw new RuntimeException(ex); @@ -157,8 +169,8 @@ public class TestJettyHelper implements MethodRule { try { String scheme = (helper.ssl) ? "https" : "http"; return new URL(scheme + "://" + - helper.server.getConnectors()[0].getHost() + ":" + - helper.server.getConnectors()[0].getPort()); + ((ServerConnector)helper.server.getConnectors()[0]).getHost() + ":" + + ((ServerConnector)helper.server.getConnectors()[0]).getPort()); } catch (MalformedURLException ex) { throw new RuntimeException("It should never happen, " + ex.getMessage(), ex); } http://git-wip-us.apache.org/repos/asf/hadoop/blob/5877f20f/hadoop-hdfs-project/hadoop-hdfs-nfs/pom.xml ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs-nfs/pom.xml b/hadoop-hdfs-project/hadoop-hdfs-nfs/pom.xml index 7bbfe87..5746814 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-nfs/pom.xml +++ b/hadoop-hdfs-project/hadoop-hdfs-nfs/pom.xml @@ -85,12 +85,12 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd"> <scope>compile</scope> </dependency> <dependency> - <groupId>org.mortbay.jetty</groupId> - <artifactId>jetty</artifactId> + <groupId>org.eclipse.jetty</groupId> + <artifactId>jetty-server</artifactId> <scope>compile</scope> </dependency> <dependency> - <groupId>org.mortbay.jetty</groupId> + <groupId>org.eclipse.jetty</groupId> <artifactId>jetty-util</artifactId> <scope>compile</scope> </dependency> @@ -146,7 +146,7 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd"> </dependency> <dependency> <groupId>javax.servlet</groupId> - <artifactId>servlet-api</artifactId> + <artifactId>javax.servlet-api</artifactId> <scope>compile</scope> </dependency> <dependency> http://git-wip-us.apache.org/repos/asf/hadoop/blob/5877f20f/hadoop-hdfs-project/hadoop-hdfs/pom.xml ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs/pom.xml b/hadoop-hdfs-project/hadoop-hdfs/pom.xml index b06cd4c..36db4d8 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/pom.xml +++ b/hadoop-hdfs-project/hadoop-hdfs/pom.xml @@ -75,16 +75,21 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd"> <scope>compile</scope> </dependency> <dependency> - <groupId>org.mortbay.jetty</groupId> - <artifactId>jetty</artifactId> + <groupId>org.eclipse.jetty</groupId> + <artifactId>jetty-server</artifactId> <scope>compile</scope> </dependency> <dependency> - <groupId>org.mortbay.jetty</groupId> + <groupId>org.eclipse.jetty</groupId> <artifactId>jetty-util</artifactId> <scope>compile</scope> </dependency> <dependency> + <groupId>org.eclipse.jetty</groupId> + <artifactId>jetty-util-ajax</artifactId> + <scope>compile</scope> + </dependency> + <dependency> <groupId>com.sun.jersey</groupId> <artifactId>jersey-core</artifactId> <scope>compile</scope> @@ -136,7 +141,7 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd"> </dependency> <dependency> <groupId>javax.servlet</groupId> - <artifactId>servlet-api</artifactId> + <artifactId>javax.servlet-api</artifactId> <scope>compile</scope> </dependency> <dependency> @@ -356,6 +361,18 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd"> <output>${project.build.directory}/generated-sources/java</output> </configuration> </execution> + <execution> + <id>resource-gz</id> + <phase>generate-resources</phase> + <goals> + <goal>resource-gz</goal> + </goals> + <configuration> + <inputDirectory>${basedir}/src/main/webapps/static</inputDirectory> + <outputDirectory>${basedir}/target/webapps/static</outputDirectory> + <extensions>js,css</extensions> + </configuration> + </execution> </executions> </plugin> <plugin> http://git-wip-us.apache.org/repos/asf/hadoop/blob/5877f20f/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/server/JournalNode.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/server/JournalNode.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/server/JournalNode.java index d08b40f..cde0112 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/server/JournalNode.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/server/JournalNode.java @@ -49,7 +49,7 @@ import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.ToolRunner; import org.apache.htrace.core.Tracer; -import org.mortbay.util.ajax.JSON; +import org.eclipse.jetty.util.ajax.JSON; import com.google.common.base.Preconditions; import com.google.common.collect.Maps; http://git-wip-us.apache.org/repos/asf/hadoop/blob/5877f20f/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java index 9ceffc2..f89d38c 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java @@ -212,7 +212,7 @@ import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.Time; import org.apache.hadoop.util.VersionInfo; import org.apache.htrace.core.Tracer; -import org.mortbay.util.ajax.JSON; +import org.eclipse.jetty.util.ajax.JSON; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Joiner; http://git-wip-us.apache.org/repos/asf/hadoop/blob/5877f20f/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java index 88c7681..3c782e2 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java @@ -283,7 +283,7 @@ import org.apache.hadoop.util.VersionInfo; import org.apache.log4j.Appender; import org.apache.log4j.AsyncAppender; import org.apache.log4j.Logger; -import org.mortbay.util.ajax.JSON; +import org.eclipse.jetty.util.ajax.JSON; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Charsets; http://git-wip-us.apache.org/repos/asf/hadoop/blob/5877f20f/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NNStorage.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NNStorage.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NNStorage.java index 5135838..a846f60 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NNStorage.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NNStorage.java @@ -53,7 +53,7 @@ import org.apache.hadoop.hdfs.util.PersistentLongFile; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.net.DNS; import org.apache.hadoop.util.Time; -import org.mortbay.util.ajax.JSON; +import org.eclipse.jetty.util.ajax.JSON; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; http://git-wip-us.apache.org/repos/asf/hadoop/blob/5877f20f/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/TransferFsImage.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/TransferFsImage.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/TransferFsImage.java index 0186d8b..e4b95ee 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/TransferFsImage.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/TransferFsImage.java @@ -64,7 +64,7 @@ import org.apache.http.client.utils.URIBuilder; import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.Lists; -import org.mortbay.jetty.EofException; +import org.eclipse.jetty.io.EofException; /** * This class provides fetching a specified file from the NameNode. http://git-wip-us.apache.org/repos/asf/hadoop/blob/5877f20f/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java index 4247a67..cea3339 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java @@ -84,6 +84,7 @@ import org.apache.hadoop.hdfs.web.ParamFilter; import org.apache.hadoop.hdfs.web.WebHdfsConstants; import org.apache.hadoop.hdfs.web.WebHdfsFileSystem; import org.apache.hadoop.hdfs.web.resources.*; +import org.apache.hadoop.http.JettyUtils; import org.apache.hadoop.io.Text; import org.apache.hadoop.ipc.ExternalCall; import org.apache.hadoop.ipc.RetriableException; @@ -355,7 +356,8 @@ public class NamenodeWebHdfsMethods { @PUT @Path("/") @Consumes({"*/*"}) - @Produces({MediaType.APPLICATION_OCTET_STREAM, MediaType.APPLICATION_JSON}) + @Produces({MediaType.APPLICATION_OCTET_STREAM + "; " + JettyUtils.UTF_8, + MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8}) public Response putRoot( @Context final UserGroupInformation ugi, @QueryParam(DelegationParam.NAME) @DefaultValue(DelegationParam.DEFAULT) @@ -395,13 +397,13 @@ public class NamenodeWebHdfsMethods { final CreateParentParam createParent, @QueryParam(TokenArgumentParam.NAME) @DefaultValue(TokenArgumentParam.DEFAULT) final TokenArgumentParam delegationTokenArgument, - @QueryParam(AclPermissionParam.NAME) @DefaultValue(AclPermissionParam.DEFAULT) + @QueryParam(AclPermissionParam.NAME) @DefaultValue(AclPermissionParam.DEFAULT) final AclPermissionParam aclPermission, - @QueryParam(XAttrNameParam.NAME) @DefaultValue(XAttrNameParam.DEFAULT) + @QueryParam(XAttrNameParam.NAME) @DefaultValue(XAttrNameParam.DEFAULT) final XAttrNameParam xattrName, - @QueryParam(XAttrValueParam.NAME) @DefaultValue(XAttrValueParam.DEFAULT) + @QueryParam(XAttrValueParam.NAME) @DefaultValue(XAttrValueParam.DEFAULT) final XAttrValueParam xattrValue, - @QueryParam(XAttrSetFlagParam.NAME) @DefaultValue(XAttrSetFlagParam.DEFAULT) + @QueryParam(XAttrSetFlagParam.NAME) @DefaultValue(XAttrSetFlagParam.DEFAULT) final XAttrSetFlagParam xattrSetFlag, @QueryParam(SnapshotNameParam.NAME) @DefaultValue(SnapshotNameParam.DEFAULT) final SnapshotNameParam snapshotName, @@ -426,7 +428,8 @@ public class NamenodeWebHdfsMethods { @PUT @Path("{" + UriFsPathParam.NAME + ":.*}") @Consumes({"*/*"}) - @Produces({MediaType.APPLICATION_OCTET_STREAM, MediaType.APPLICATION_JSON}) + @Produces({MediaType.APPLICATION_OCTET_STREAM + "; " + JettyUtils.UTF_8, + MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8}) public Response put( @Context final UserGroupInformation ugi, @QueryParam(DelegationParam.NAME) @DefaultValue(DelegationParam.DEFAULT) @@ -532,7 +535,7 @@ public class NamenodeWebHdfsMethods { final TokenArgumentParam delegationTokenArgument, final AclPermissionParam aclPermission, final XAttrNameParam xattrName, - final XAttrValueParam xattrValue, + final XAttrValueParam xattrValue, final XAttrSetFlagParam xattrSetFlag, final SnapshotNameParam snapshotName, final OldSnapshotNameParam oldSnapshotName, @@ -690,7 +693,8 @@ public class NamenodeWebHdfsMethods { @POST @Path("/") @Consumes({"*/*"}) - @Produces({MediaType.APPLICATION_OCTET_STREAM, MediaType.APPLICATION_JSON}) + @Produces({MediaType.APPLICATION_OCTET_STREAM + "; " + JettyUtils.UTF_8, + MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8}) public Response postRoot( @Context final UserGroupInformation ugi, @QueryParam(DelegationParam.NAME) @DefaultValue(DelegationParam.DEFAULT) @@ -720,7 +724,8 @@ public class NamenodeWebHdfsMethods { @POST @Path("{" + UriFsPathParam.NAME + ":.*}") @Consumes({"*/*"}) - @Produces({MediaType.APPLICATION_OCTET_STREAM, MediaType.APPLICATION_JSON}) + @Produces({MediaType.APPLICATION_OCTET_STREAM + "; " + JettyUtils.UTF_8, + MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8}) public Response post( @Context final UserGroupInformation ugi, @QueryParam(DelegationParam.NAME) @DefaultValue(DelegationParam.DEFAULT) @@ -799,7 +804,7 @@ public class NamenodeWebHdfsMethods { "newLength parameter is Missing"); } // We treat each rest request as a separate client. - final boolean b = np.truncate(fullpath, newLength.getValue(), + final boolean b = np.truncate(fullpath, newLength.getValue(), "DFSClient_" + DFSUtil.getSecureRandom().nextLong()); final String js = JsonUtil.toJsonString("boolean", b); return Response.ok(js).type(MediaType.APPLICATION_JSON).build(); @@ -812,7 +817,8 @@ public class NamenodeWebHdfsMethods { /** Handle HTTP GET request for the root. */ @GET @Path("/") - @Produces({MediaType.APPLICATION_OCTET_STREAM, MediaType.APPLICATION_JSON}) + @Produces({MediaType.APPLICATION_OCTET_STREAM + "; " + JettyUtils.UTF_8, + MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8}) public Response getRoot( @Context final UserGroupInformation ugi, @QueryParam(DelegationParam.NAME) @DefaultValue(DelegationParam.DEFAULT) @@ -831,9 +837,9 @@ public class NamenodeWebHdfsMethods { final RenewerParam renewer, @QueryParam(BufferSizeParam.NAME) @DefaultValue(BufferSizeParam.DEFAULT) final BufferSizeParam bufferSize, - @QueryParam(XAttrNameParam.NAME) @DefaultValue(XAttrNameParam.DEFAULT) + @QueryParam(XAttrNameParam.NAME) @DefaultValue(XAttrNameParam.DEFAULT) final List<XAttrNameParam> xattrNames, - @QueryParam(XAttrEncodingParam.NAME) @DefaultValue(XAttrEncodingParam.DEFAULT) + @QueryParam(XAttrEncodingParam.NAME) @DefaultValue(XAttrEncodingParam.DEFAULT) final XAttrEncodingParam xattrEncoding, @QueryParam(ExcludeDatanodesParam.NAME) @DefaultValue(ExcludeDatanodesParam.DEFAULT) final ExcludeDatanodesParam excludeDatanodes, @@ -856,7 +862,8 @@ public class NamenodeWebHdfsMethods { /** Handle HTTP GET request. */ @GET @Path("{" + UriFsPathParam.NAME + ":.*}") - @Produces({MediaType.APPLICATION_OCTET_STREAM, MediaType.APPLICATION_JSON}) + @Produces({MediaType.APPLICATION_OCTET_STREAM + "; " + JettyUtils.UTF_8, + MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8}) public Response get( @Context final UserGroupInformation ugi, @QueryParam(DelegationParam.NAME) @DefaultValue(DelegationParam.DEFAULT) @@ -1131,7 +1138,7 @@ public class NamenodeWebHdfsMethods { /** Handle HTTP DELETE request for the root. */ @DELETE @Path("/") - @Produces(MediaType.APPLICATION_JSON) + @Produces(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8) public Response deleteRoot( @Context final UserGroupInformation ugi, @QueryParam(DelegationParam.NAME) @DefaultValue(DelegationParam.DEFAULT) @@ -1154,7 +1161,7 @@ public class NamenodeWebHdfsMethods { /** Handle HTTP DELETE request. */ @DELETE @Path("{" + UriFsPathParam.NAME + ":.*}") - @Produces(MediaType.APPLICATION_JSON) + @Produces(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8) public Response delete( @Context final UserGroupInformation ugi, @QueryParam(DelegationParam.NAME) @DefaultValue(DelegationParam.DEFAULT) http://git-wip-us.apache.org/repos/asf/hadoop/blob/5877f20f/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDecommission.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDecommission.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDecommission.java index 6ca1e79..34ae12c 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDecommission.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDecommission.java @@ -64,7 +64,7 @@ import org.apache.log4j.Level; import org.junit.Assert; import org.junit.Ignore; import org.junit.Test; -import org.mortbay.util.ajax.JSON; +import org.eclipse.jetty.util.ajax.JSON; import org.slf4j.Logger; import org.slf4j.LoggerFactory; http://git-wip-us.apache.org/repos/asf/hadoop/blob/5877f20f/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/qjournal/server/TestJournalNodeMXBean.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/qjournal/server/TestJournalNodeMXBean.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/qjournal/server/TestJournalNodeMXBean.java index 45b245a..1de37a4 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/qjournal/server/TestJournalNodeMXBean.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/qjournal/server/TestJournalNodeMXBean.java @@ -35,7 +35,7 @@ import org.apache.hadoop.hdfs.server.protocol.NamespaceInfo; import org.junit.After; import org.junit.Before; import org.junit.Test; -import org.mortbay.util.ajax.JSON; +import org.eclipse.jetty.util.ajax.JSON; /** * Test {@link JournalNodeMXBean} http://git-wip-us.apache.org/repos/asf/hadoop/blob/5877f20f/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/TestBlockStatsMXBean.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/TestBlockStatsMXBean.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/TestBlockStatsMXBean.java index 9e3112e..476565dc 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/TestBlockStatsMXBean.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/TestBlockStatsMXBean.java @@ -40,7 +40,7 @@ import org.apache.hadoop.hdfs.server.datanode.DataNodeTestUtils; import org.junit.After; import org.junit.Before; import org.junit.Test; -import org.mortbay.util.ajax.JSON; +import org.eclipse.jetty.util.ajax.JSON; /** * Class for testing {@link BlockStatsMXBean} implementation http://git-wip-us.apache.org/repos/asf/hadoop/blob/5877f20f/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataNodeMXBean.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataNodeMXBean.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataNodeMXBean.java index a77c943..6933c3e 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataNodeMXBean.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataNodeMXBean.java @@ -40,7 +40,7 @@ import org.codehaus.jackson.map.ObjectMapper; import org.codehaus.jackson.type.TypeReference; import org.junit.Assert; import org.junit.Test; -import org.mortbay.util.ajax.JSON; +import org.eclipse.jetty.util.ajax.JSON; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; http://git-wip-us.apache.org/repos/asf/hadoop/blob/5877f20f/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestFSNamesystemMBean.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestFSNamesystemMBean.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestFSNamesystemMBean.java index 3c0d54a..4687cfd 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestFSNamesystemMBean.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestFSNamesystemMBean.java @@ -35,7 +35,7 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hdfs.MiniDFSCluster; import org.junit.Test; -import org.mortbay.util.ajax.JSON; +import org.eclipse.jetty.util.ajax.JSON; /** * Class for testing {@link NameNodeMXBean} implementation http://git-wip-us.apache.org/repos/asf/hadoop/blob/5877f20f/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestNameNodeMXBean.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestNameNodeMXBean.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestNameNodeMXBean.java index 47f1c85..659a1d2 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestNameNodeMXBean.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestNameNodeMXBean.java @@ -53,7 +53,7 @@ import org.apache.hadoop.util.VersionInfo; import org.codehaus.jackson.map.ObjectMapper; import org.junit.Assert; import org.junit.Test; -import org.mortbay.util.ajax.JSON; +import org.eclipse.jetty.util.ajax.JSON; import javax.management.MBeanServer; import javax.management.ObjectName; http://git-wip-us.apache.org/repos/asf/hadoop/blob/5877f20f/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestStartupProgressServlet.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestStartupProgressServlet.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestStartupProgressServlet.java index 0f22e9a..bff549a 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestStartupProgressServlet.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestStartupProgressServlet.java @@ -36,7 +36,7 @@ import com.google.common.collect.ImmutableMap; import org.apache.hadoop.hdfs.server.namenode.startupprogress.StartupProgress; import org.junit.Before; import org.junit.Test; -import org.mortbay.util.ajax.JSON; +import org.eclipse.jetty.util.ajax.JSON; public class TestStartupProgressServlet { http://git-wip-us.apache.org/repos/asf/hadoop/blob/5877f20f/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestTransferFsImage.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestTransferFsImage.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestTransferFsImage.java index ca97e32..df02b35 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestTransferFsImage.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestTransferFsImage.java @@ -120,7 +120,7 @@ public class TestTransferFsImage { /** * Test to verify the read timeout */ - @Test(timeout = 5000) + @Test(timeout = 10000) public void testGetImageTimeout() throws Exception { HttpServer2 testServer = HttpServerFunctionalTest.createServer("hdfs"); try { http://git-wip-us.apache.org/repos/asf/hadoop/blob/5877f20f/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHDFSForHA.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHDFSForHA.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHDFSForHA.java index 9f78548..4e1ceed 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHDFSForHA.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHDFSForHA.java @@ -56,7 +56,7 @@ import org.apache.hadoop.security.token.Token; import org.junit.Assert; import org.junit.Test; import org.mockito.internal.util.reflection.Whitebox; -import org.mortbay.util.ajax.JSON; +import org.eclipse.jetty.util.ajax.JSON; import javax.servlet.http.HttpServletResponse; import javax.ws.rs.core.Response; http://git-wip-us.apache.org/repos/asf/hadoop/blob/5877f20f/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/test/MiniDFSClusterManager.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/test/MiniDFSClusterManager.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/test/MiniDFSClusterManager.java index f8e931a..1373891 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/test/MiniDFSClusterManager.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/test/MiniDFSClusterManager.java @@ -38,7 +38,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hdfs.HdfsConfiguration; import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.StartupOption; -import org.mortbay.util.ajax.JSON; +import org.eclipse.jetty.util.ajax.JSON; /** * This class drives the creation of a mini-cluster on the local machine. By http://git-wip-us.apache.org/repos/asf/hadoop/blob/5877f20f/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/JobEndNotifier.java ---------------------------------------------------------------------- diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/JobEndNotifier.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/JobEndNotifier.java index 05bb40b..836fad5 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/JobEndNotifier.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/JobEndNotifier.java @@ -30,7 +30,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.mapred.JobContext; import org.apache.hadoop.mapreduce.MRJobConfig; import org.apache.hadoop.mapreduce.v2.api.records.JobReport; -import org.mortbay.log.Log; +import org.eclipse.jetty.util.log.Log; /** * <p>This class handles job end notification. Submitters of jobs can choose to @@ -101,11 +101,12 @@ public class JobEndNotifier implements Configurable { int port = Integer.parseInt(portConf); proxyToUse = new Proxy(proxyType, new InetSocketAddress(hostname, port)); - Log.info("Job end notification using proxy type \"" + proxyType + - "\" hostname \"" + hostname + "\" and port \"" + port + "\""); + Log.getLog().info("Job end notification using proxy type \"" + + proxyType + "\" hostname \"" + hostname + "\" and port \"" + port + + "\""); } catch(NumberFormatException nfe) { - Log.warn("Job end notification couldn't parse configured proxy's port " - + portConf + ". Not going to use a proxy"); + Log.getLog().warn("Job end notification couldn't parse configured" + + "proxy's port " + portConf + ". Not going to use a proxy"); } } @@ -121,23 +122,25 @@ public class JobEndNotifier implements Configurable { protected boolean notifyURLOnce() { boolean success = false; try { - Log.info("Job end notification trying " + urlToNotify); + Log.getLog().info("Job end notification trying " + urlToNotify); HttpURLConnection conn = (HttpURLConnection) urlToNotify.openConnection(proxyToUse); conn.setConnectTimeout(timeout); conn.setReadTimeout(timeout); conn.setAllowUserInteraction(false); if(conn.getResponseCode() != HttpURLConnection.HTTP_OK) { - Log.warn("Job end notification to " + urlToNotify +" failed with code: " - + conn.getResponseCode() + " and message \"" + conn.getResponseMessage() - +"\""); + Log.getLog().warn("Job end notification to " + urlToNotify + + " failed with code: " + conn.getResponseCode() + " and message \"" + + conn.getResponseMessage() + "\""); } else { success = true; - Log.info("Job end notification to " + urlToNotify + " succeeded"); + Log.getLog().info("Job end notification to " + urlToNotify + + " succeeded"); } } catch(IOException ioe) { - Log.warn("Job end notification to " + urlToNotify + " failed", ioe); + Log.getLog().warn("Job end notification to " + urlToNotify + " failed", + ioe); } return success; } @@ -152,7 +155,7 @@ public class JobEndNotifier implements Configurable { throws InterruptedException { // Do we need job-end notification? if (userUrl == null) { - Log.info("Job end notification URL not set, skipping."); + Log.getLog().info("Job end notification URL not set, skipping."); return; } @@ -168,23 +171,25 @@ public class JobEndNotifier implements Configurable { try { urlToNotify = new URL(userUrl); } catch (MalformedURLException mue) { - Log.warn("Job end notification couldn't parse " + userUrl, mue); + Log.getLog().warn("Job end notification couldn't parse " + userUrl, mue); return; } // Send notification boolean success = false; while (numTries-- > 0 && !success) { - Log.info("Job end notification attempts left " + numTries); + Log.getLog().info("Job end notification attempts left " + numTries); success = notifyURLOnce(); if (!success) { Thread.sleep(waitInterval); } } if (!success) { - Log.warn("Job end notification failed to notify : " + urlToNotify); + Log.getLog().warn("Job end notification failed to notify : " + + urlToNotify); } else { - Log.info("Job end notification succeeded for " + jobReport.getJobId()); + Log.getLog().info("Job end notification succeeded for " + + jobReport.getJobId()); } } } http://git-wip-us.apache.org/repos/asf/hadoop/blob/5877f20f/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AMWebServices.java ---------------------------------------------------------------------- diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AMWebServices.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AMWebServices.java index 5d50db7..f477d31 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AMWebServices.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AMWebServices.java @@ -38,6 +38,7 @@ import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import javax.ws.rs.core.Response.Status; +import org.apache.hadoop.http.JettyUtils; import org.apache.hadoop.mapreduce.JobACL; import org.apache.hadoop.mapreduce.v2.api.protocolrecords.KillTaskAttemptRequest; import org.apache.hadoop.mapreduce.v2.api.protocolrecords.KillTaskAttemptResponse; @@ -221,14 +222,16 @@ public class AMWebServices { } @GET - @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML }) + @Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8, + MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8 }) public AppInfo get() { return getAppInfo(); } @GET @Path("/info") - @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML }) + @Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8, + MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8 }) public AppInfo getAppInfo() { init(); return new AppInfo(this.app, this.app.context); @@ -236,7 +239,8 @@ public class AMWebServices { @GET @Path("/blacklistednodes") - @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML }) + @Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8, + MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8 }) public BlacklistedNodesInfo getBlacklistedNodes() { init(); return new BlacklistedNodesInfo(this.app.context); @@ -244,7 +248,8 @@ public class AMWebServices { @GET @Path("/jobs") - @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML }) + @Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8, + MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8 }) public JobsInfo getJobs(@Context HttpServletRequest hsr) { init(); JobsInfo allJobs = new JobsInfo(); @@ -261,7 +266,8 @@ public class AMWebServices { @GET @Path("/jobs/{jobid}") - @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML }) + @Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8, + MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8 }) public JobInfo getJob(@Context HttpServletRequest hsr, @PathParam("jobid") String jid) { init(); @@ -271,7 +277,8 @@ public class AMWebServices { @GET @Path("/jobs/{jobid}/jobattempts") - @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML }) + @Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8, + MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8 }) public AMAttemptsInfo getJobAttempts(@PathParam("jobid") String jid) { init(); Job job = getJobFromJobIdString(jid, appCtx); @@ -286,7 +293,8 @@ public class AMWebServices { @GET @Path("/jobs/{jobid}/counters") - @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML }) + @Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8, + MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8 }) public JobCounterInfo getJobCounters(@Context HttpServletRequest hsr, @PathParam("jobid") String jid) { init(); @@ -297,7 +305,8 @@ public class AMWebServices { @GET @Path("/jobs/{jobid}/conf") - @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML }) + @Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8, + MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8 }) public ConfInfo getJobConf(@Context HttpServletRequest hsr, @PathParam("jobid") String jid) { @@ -316,7 +325,8 @@ public class AMWebServices { @GET @Path("/jobs/{jobid}/tasks") - @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML }) + @Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8, + MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8 }) public TasksInfo getJobTasks(@Context HttpServletRequest hsr, @PathParam("jobid") String jid, @QueryParam("type") String type) { @@ -343,7 +353,8 @@ public class AMWebServices { @GET @Path("/jobs/{jobid}/tasks/{taskid}") - @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML }) + @Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8, + MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8 }) public TaskInfo getJobTask(@Context HttpServletRequest hsr, @PathParam("jobid") String jid, @PathParam("taskid") String tid) { @@ -356,7 +367,8 @@ public class AMWebServices { @GET @Path("/jobs/{jobid}/tasks/{taskid}/counters") - @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML }) + @Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8, + MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8 }) public JobTaskCounterInfo getSingleTaskCounters( @Context HttpServletRequest hsr, @PathParam("jobid") String jid, @PathParam("taskid") String tid) { @@ -370,7 +382,8 @@ public class AMWebServices { @GET @Path("/jobs/{jobid}/tasks/{taskid}/attempts") - @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML }) + @Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8, + MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8 }) public TaskAttemptsInfo getJobTaskAttempts(@Context HttpServletRequest hsr, @PathParam("jobid") String jid, @PathParam("taskid") String tid) { @@ -394,7 +407,8 @@ public class AMWebServices { @GET @Path("/jobs/{jobid}/tasks/{taskid}/attempts/{attemptid}") - @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML }) + @Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8, + MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8 }) public TaskAttemptInfo getJobTaskAttemptId(@Context HttpServletRequest hsr, @PathParam("jobid") String jid, @PathParam("taskid") String tid, @PathParam("attemptid") String attId) { @@ -413,7 +427,8 @@ public class AMWebServices { @GET @Path("/jobs/{jobid}/tasks/{taskid}/attempts/{attemptid}/state") - @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML }) + @Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8, + MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8 }) public JobTaskAttemptState getJobTaskAttemptState( @Context HttpServletRequest hsr, @PathParam("jobid") String jid, @PathParam("taskid") String tid, @@ -429,7 +444,8 @@ public class AMWebServices { @PUT @Path("/jobs/{jobid}/tasks/{taskid}/attempts/{attemptid}/state") - @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML }) + @Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8, + MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8 }) @Consumes({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML }) public Response updateJobTaskAttemptState(JobTaskAttemptState targetState, @Context HttpServletRequest hsr, @PathParam("jobid") String jid, @@ -466,7 +482,8 @@ public class AMWebServices { @GET @Path("/jobs/{jobid}/tasks/{taskid}/attempts/{attemptid}/counters") - @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML }) + @Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8, + MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8 }) public JobTaskAttemptCounterInfo getJobTaskAttemptIdCounters( @Context HttpServletRequest hsr, @PathParam("jobid") String jid, @PathParam("taskid") String tid, @PathParam("attemptid") String attId) { http://git-wip-us.apache.org/repos/asf/hadoop/blob/5877f20f/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServices.java ---------------------------------------------------------------------- diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServices.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServices.java index 7138196..c5dc290 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServices.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServices.java @@ -31,6 +31,7 @@ import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.http.JettyUtils; import org.apache.hadoop.mapreduce.v2.app.AppContext; import org.apache.hadoop.mapreduce.v2.app.MockAppContext; import org.apache.hadoop.yarn.webapp.GenericExceptionHandler; @@ -112,7 +113,8 @@ public class TestAMWebServices extends JerseyTestBase { WebResource r = resource(); ClientResponse response = r.path("ws").path("v1").path("mapreduce") .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class); - assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); + assertEquals(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8, + response.getType().toString()); JSONObject json = response.getEntity(JSONObject.class); assertEquals("incorrect number of elements", 1, json.length()); verifyAMInfo(json.getJSONObject("info"), appContext); @@ -123,7 +125,8 @@ public class TestAMWebServices extends JerseyTestBase { WebResource r = resource(); ClientResponse response = r.path("ws").path("v1").path("mapreduce/") .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class); - assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); + assertEquals(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8, + response.getType().toString()); JSONObject json = response.getEntity(JSONObject.class); assertEquals("incorrect number of elements", 1, json.length()); verifyAMInfo(json.getJSONObject("info"), appContext); @@ -134,7 +137,8 @@ public class TestAMWebServices extends JerseyTestBase { WebResource r = resource(); ClientResponse response = r.path("ws").path("v1").path("mapreduce/") .get(ClientResponse.class); - assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); + assertEquals(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8, + response.getType().toString()); JSONObject json = response.getEntity(JSONObject.class); assertEquals("incorrect number of elements", 1, json.length()); verifyAMInfo(json.getJSONObject("info"), appContext); @@ -145,7 +149,8 @@ public class TestAMWebServices extends JerseyTestBase { WebResource r = resource(); ClientResponse response = r.path("ws").path("v1").path("mapreduce") .accept(MediaType.APPLICATION_XML).get(ClientResponse.class); - assertEquals(MediaType.APPLICATION_XML_TYPE, response.getType()); + assertEquals(MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8, + response.getType().toString()); String xml = response.getEntity(String.class); verifyAMInfoXML(xml, appContext); } @@ -156,7 +161,8 @@ public class TestAMWebServices extends JerseyTestBase { ClientResponse response = r.path("ws").path("v1").path("mapreduce") .path("info").accept(MediaType.APPLICATION_JSON) .get(ClientResponse.class); - assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); + assertEquals(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8, + response.getType().toString()); JSONObject json = response.getEntity(JSONObject.class); assertEquals("incorrect number of elements", 1, json.length()); verifyAMInfo(json.getJSONObject("info"), appContext); @@ -168,7 +174,8 @@ public class TestAMWebServices extends JerseyTestBase { ClientResponse response = r.path("ws").path("v1").path("mapreduce") .path("info/").accept(MediaType.APPLICATION_JSON) .get(ClientResponse.class); - assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); + assertEquals(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8, + response.getType().toString()); JSONObject json = response.getEntity(JSONObject.class); assertEquals("incorrect number of elements", 1, json.length()); verifyAMInfo(json.getJSONObject("info"), appContext); @@ -179,7 +186,8 @@ public class TestAMWebServices extends JerseyTestBase { WebResource r = resource(); ClientResponse response = r.path("ws").path("v1").path("mapreduce") .path("info/").get(ClientResponse.class); - assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); + assertEquals(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8, + response.getType().toString()); JSONObject json = response.getEntity(JSONObject.class); assertEquals("incorrect number of elements", 1, json.length()); verifyAMInfo(json.getJSONObject("info"), appContext); @@ -191,7 +199,8 @@ public class TestAMWebServices extends JerseyTestBase { ClientResponse response = r.path("ws").path("v1").path("mapreduce") .path("info/").accept(MediaType.APPLICATION_XML) .get(ClientResponse.class); - assertEquals(MediaType.APPLICATION_XML_TYPE, response.getType()); + assertEquals(MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8, + response.getType().toString()); String xml = response.getEntity(String.class); verifyAMInfoXML(xml, appContext); } @@ -251,7 +260,8 @@ public class TestAMWebServices extends JerseyTestBase { ClientResponse response = r.path("ws").path("v1").path("mapreduce") .path("blacklistednodes").accept(MediaType.APPLICATION_JSON) .get(ClientResponse.class); - assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); + assertEquals(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8, + response.getType().toString()); JSONObject json = response.getEntity(JSONObject.class); assertEquals("incorrect number of elements", 1, json.length()); verifyBlacklistedNodesInfo(json, appContext); @@ -263,7 +273,8 @@ public class TestAMWebServices extends JerseyTestBase { ClientResponse response = r.path("ws").path("v1").path("mapreduce") .path("blacklistednodes").accept(MediaType.APPLICATION_XML) .get(ClientResponse.class); - assertEquals(MediaType.APPLICATION_XML_TYPE, response.getType()); + assertEquals(MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8, + response.getType().toString()); String xml = response.getEntity(String.class); verifyBlacklistedNodesInfoXML(xml, appContext); } http://git-wip-us.apache.org/repos/asf/hadoop/blob/5877f20f/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServicesAttempt.java ---------------------------------------------------------------------- diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServicesAttempt.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServicesAttempt.java index f2e6d63..f20ac6f 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServicesAttempt.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServicesAttempt.java @@ -32,6 +32,7 @@ import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.http.JettyUtils; import org.apache.hadoop.mapreduce.v2.api.records.JobId; import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId; import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptState; @@ -152,7 +153,8 @@ public class TestAMWebServicesAttempt extends JerseyTestBase { .path("attempts").path(attid).path("state") .queryParam("user.name", webserviceUserName) .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class); - assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); + assertEquals(MediaType.APPLICATION_JSON_TYPE + "; " + + JettyUtils.UTF_8, response.getType().toString()); JSONObject json = response.getEntity(JSONObject.class); assertEquals("incorrect number of elements", 1, json.length()); assertEquals(att.getState().toString(), json.get("state")); @@ -180,7 +182,8 @@ public class TestAMWebServicesAttempt extends JerseyTestBase { .queryParam("user.name", webserviceUserName) .accept(MediaType.APPLICATION_XML).get(ClientResponse.class); - assertEquals(MediaType.APPLICATION_XML_TYPE, response.getType()); + assertEquals(MediaType.APPLICATION_XML_TYPE + "; " + JettyUtils.UTF_8, + response.getType().toString()); String xml = response.getEntity(String.class); DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance(); DocumentBuilder db = dbf.newDocumentBuilder(); @@ -219,7 +222,8 @@ public class TestAMWebServicesAttempt extends JerseyTestBase { .accept(MediaType.APPLICATION_JSON) .type(MediaType.APPLICATION_JSON) .put(ClientResponse.class, "{\"state\":\"KILLED\"}"); - assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); + assertEquals(MediaType.APPLICATION_JSON_TYPE + "; " + + JettyUtils.UTF_8, response.getType().toString()); JSONObject json = response.getEntity(JSONObject.class); assertEquals("incorrect number of elements", 1, json.length()); assertEquals(TaskAttemptState.KILLED.toString(), json.get("state")); @@ -252,7 +256,8 @@ public class TestAMWebServicesAttempt extends JerseyTestBase { .put(ClientResponse.class, "<jobTaskAttemptState><state>KILLED" + "</state></jobTaskAttemptState>"); - assertEquals(MediaType.APPLICATION_XML_TYPE, response.getType()); + assertEquals(MediaType.APPLICATION_XML_TYPE + "; " + JettyUtils.UTF_8, + response.getType().toString()); String xml = response.getEntity(String.class); DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance(); DocumentBuilder db = dbf.newDocumentBuilder(); --------------------------------------------------------------------- To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org