http://git-wip-us.apache.org/repos/asf/hadoop/blob/8b7fcb09/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/core/launch/CredentialUtils.java
----------------------------------------------------------------------
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/core/launch/CredentialUtils.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/core/launch/CredentialUtils.java
deleted file mode 100644
index 1fd49ab..0000000
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/core/launch/CredentialUtils.java
+++ /dev/null
@@ -1,379 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.slider.core.launch;
-
-import com.google.common.base.Preconditions;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.io.DataOutputBuffer;
-import org.apache.hadoop.io.Text;
-import org.apache.hadoop.security.Credentials;
-import org.apache.hadoop.security.SecurityUtil;
-import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.hadoop.security.token.Token;
-import org.apache.hadoop.security.token.TokenIdentifier;
-import 
org.apache.hadoop.security.token.delegation.AbstractDelegationTokenIdentifier;
-import org.apache.hadoop.yarn.client.ClientRMProxy;
-import org.apache.hadoop.yarn.client.api.TimelineClient;
-import org.apache.hadoop.yarn.client.api.YarnClient;
-import org.apache.hadoop.yarn.conf.HAUtil;
-import org.apache.hadoop.yarn.conf.YarnConfiguration;
-import org.apache.hadoop.yarn.exceptions.YarnException;
-import 
org.apache.hadoop.yarn.security.client.TimelineDelegationTokenIdentifier;
-import org.apache.hadoop.yarn.util.ConverterUtils;
-import org.apache.hadoop.yarn.service.conf.SliderXmlConfKeys;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.DataOutputStream;
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.io.FileOutputStream;
-import java.io.IOException;
-import java.io.Serializable;
-import java.nio.ByteBuffer;
-import java.text.DateFormat;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.Comparator;
-import java.util.Date;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-
-import static 
org.apache.hadoop.security.UserGroupInformation.HADOOP_TOKEN_FILE_LOCATION;
-import static org.apache.hadoop.yarn.conf.YarnConfiguration.*;
-
-/**
- * Utils to work with credentials and tokens.
- *
- * Designed to be movable to Hadoop core
- */
-public final class CredentialUtils {
-
-  public static final String JOB_CREDENTIALS_BINARY
-      = SliderXmlConfKeys.MAPREDUCE_JOB_CREDENTIALS_BINARY;
-
-  private CredentialUtils() {
-  }
-
-  private static final Logger LOG =
-      LoggerFactory.getLogger(CredentialUtils.class);
-
-  /**
-   * Save credentials to a byte buffer. Returns null if there were no
-   * credentials to save
-   * @param credentials credential set
-   * @return a byte buffer of serialized tokens
-   * @throws IOException if the credentials could not be written to the stream
-   */
-  public static ByteBuffer marshallCredentials(Credentials credentials) throws 
IOException {
-    ByteBuffer buffer = null;
-    if (!credentials.getAllTokens().isEmpty()) {
-      DataOutputBuffer dob = new DataOutputBuffer();
-      try {
-        credentials.writeTokenStorageToStream(dob);
-      } finally {
-        dob.close();
-      }
-      buffer = ByteBuffer.wrap(dob.getData(), 0, dob.getLength());
-    }
-    return buffer;
-  }
-
-  public static File locateEnvCredentials(Map<String, String> env,
-      Configuration conf,
-      StringBuffer sourceTextOut) throws FileNotFoundException {
-    String tokenFilename = env.get(HADOOP_TOKEN_FILE_LOCATION);
-    String source = "environment variable " + HADOOP_TOKEN_FILE_LOCATION;
-    if (tokenFilename == null) {
-      tokenFilename = conf.get(JOB_CREDENTIALS_BINARY);
-      source = "configuration option " + JOB_CREDENTIALS_BINARY;
-    }
-    if (tokenFilename != null) {
-      // use delegation tokens, i.e. from Oozie
-      File file = new File(tokenFilename.trim());
-      String details = String.format(
-          "Token File %s from %s",
-          file,
-          source);
-      if (!file.exists()) {
-        throw new FileNotFoundException("No " + details);
-      }
-      if (!file.isFile() && !file.canRead()) {
-        throw new FileNotFoundException("Cannot read " + details);
-      }
-      sourceTextOut.append(details);
-      return file;
-    } else {
-      return null;
-    }
-  }
-
-  /**
-   * Load the credentials from the environment. This looks at
-   * the value of {@link UserGroupInformation#HADOOP_TOKEN_FILE_LOCATION}
-   * and attempts to read in the value
-   * @param env environment to resolve the variable from
-   * @param conf configuration use when reading the tokens
-   * @return a set of credentials, or null if the environment did not
-   * specify any
-   * @throws IOException if a location for credentials was defined, but
-   * the credentials could not be loaded.
-   */
-  public static Credentials loadTokensFromEnvironment(Map<String, String> env,
-      Configuration conf)
-      throws IOException {
-    StringBuffer origin = new StringBuffer();
-    File file = locateEnvCredentials(env, conf, origin);
-    if (file != null) {
-      LOG.debug("Using {}", origin);
-      return Credentials.readTokenStorageFile(file, conf);
-    } else {
-      return null;
-    }
-  }
-
-  /**
-   * Save credentials to a file
-   * @param file file to save to (will be overwritten)
-   * @param credentials credentials to write
-   * @throws IOException
-   */
-  public static void saveTokens(File file,
-      Credentials credentials) throws IOException {
-    try(DataOutputStream daos = new DataOutputStream(
-        new FileOutputStream(file))) {
-      credentials.writeTokenStorageToStream(daos);
-    }
-  }
-
-  /**
-   * Look up and return the resource manager's principal. This method
-   * automatically does the <code>_HOST</code> replacement in the principal and
-   * correctly handles HA resource manager configurations.
-   *
-   * From: YARN-4629
-   * @param conf the {@link Configuration} file from which to read the
-   * principal
-   * @return the resource manager's principal string
-   * @throws IOException thrown if there's an error replacing the host name
-   */
-  public static String getRMPrincipal(Configuration conf) throws IOException {
-    String principal = conf.get(RM_PRINCIPAL, "");
-    String hostname;
-    Preconditions.checkState(!principal.isEmpty(), "Not set: " + RM_PRINCIPAL);
-
-    if (HAUtil.isHAEnabled(conf)) {
-      YarnConfiguration yarnConf = new YarnConfiguration(conf);
-      if (yarnConf.get(RM_HA_ID) == null) {
-        // If RM_HA_ID is not configured, use the first of RM_HA_IDS.
-        // Any valid RM HA ID should work.
-        String[] rmIds = yarnConf.getStrings(RM_HA_IDS);
-        Preconditions.checkState((rmIds != null) && (rmIds.length > 0),
-            "Not set " + RM_HA_IDS);
-        yarnConf.set(RM_HA_ID, rmIds[0]);
-      }
-
-      hostname = yarnConf.getSocketAddr(
-          RM_ADDRESS,
-          DEFAULT_RM_ADDRESS,
-          DEFAULT_RM_PORT).getHostName();
-    } else {
-      hostname = conf.getSocketAddr(
-          RM_ADDRESS,
-          DEFAULT_RM_ADDRESS,
-          DEFAULT_RM_PORT).getHostName();
-    }
-    return SecurityUtil.getServerPrincipal(principal, hostname);
-  }
-
-  /**
-   * Create and add any filesystem delegation tokens with
-   * the RM(s) configured to be able to renew them. Returns null
-   * on an insecure cluster (i.e. harmless)
-   * @param conf configuration
-   * @param fs filesystem
-   * @param credentials credentials to update
-   * @return a list of all added tokens.
-   * @throws IOException
-   */
-  public static Token<?>[] addRMRenewableFSDelegationTokens(Configuration conf,
-      FileSystem fs,
-      Credentials credentials) throws IOException {
-    Preconditions.checkArgument(conf != null);
-    Preconditions.checkArgument(credentials != null);
-    if (UserGroupInformation.isSecurityEnabled()) {
-      return fs.addDelegationTokens(CredentialUtils.getRMPrincipal(conf),
-          credentials);
-    }
-    return null;
-  }
-
-  /**
-   * Add an FS delegation token which can be renewed by the current user
-   * @param fs filesystem
-   * @param credentials credentials to update
-   * @throws IOException problems.
-   */
-  public static void addSelfRenewableFSDelegationTokens(
-      FileSystem fs,
-      Credentials credentials) throws IOException {
-    Preconditions.checkArgument(fs != null);
-    Preconditions.checkArgument(credentials != null);
-    fs.addDelegationTokens(
-        getSelfRenewer(),
-        credentials);
-  }
-
-  public static String getSelfRenewer() throws IOException {
-    return UserGroupInformation.getLoginUser().getShortUserName();
-  }
-
-  /**
-   * Create and add an RM delegation token to the credentials
-   * @param yarnClient Yarn Client
-   * @param credentials to add token to
-   * @return the token which was added
-   * @throws IOException
-   * @throws YarnException
-   */
-  public static Token<TokenIdentifier> addRMDelegationToken(YarnClient 
yarnClient,
-      Credentials credentials)
-      throws IOException, YarnException {
-    Configuration conf = yarnClient.getConfig();
-    Text rmPrincipal = new Text(CredentialUtils.getRMPrincipal(conf));
-    Text rmDTService = ClientRMProxy.getRMDelegationTokenService(conf);
-    Token<TokenIdentifier> rmDelegationToken =
-        ConverterUtils.convertFromYarn(
-            yarnClient.getRMDelegationToken(rmPrincipal),
-            rmDTService);
-    credentials.addToken(rmDelegationToken.getService(), rmDelegationToken);
-    return rmDelegationToken;
-  }
-
-  public static Token<TimelineDelegationTokenIdentifier> maybeAddTimelineToken(
-      Configuration conf,
-      Credentials credentials)
-      throws IOException, YarnException {
-    if (conf.getBoolean(YarnConfiguration.TIMELINE_SERVICE_ENABLED, false)) {
-      LOG.debug("Timeline service enabled -fetching token");
-
-      try(TimelineClient timelineClient = 
TimelineClient.createTimelineClient()) {
-        timelineClient.init(conf);
-        timelineClient.start();
-        Token<TimelineDelegationTokenIdentifier> token =
-            timelineClient.getDelegationToken(
-                CredentialUtils.getRMPrincipal(conf));
-        credentials.addToken(token.getService(), token);
-        return token;
-      }
-    } else {
-      LOG.debug("Timeline service is disabled");
-      return null;
-    }
-  }
-
-  /**
-   * Filter a list of tokens from a set of credentials
-   * @param credentials credential source (a new credential set os re
-   * @param filter List of tokens to strip out
-   * @return a new, filtered, set of credentials
-   */
-  public static Credentials filterTokens(Credentials credentials,
-      List<Text> filter) {
-    Credentials result = new Credentials(credentials);
-    Iterator<Token<? extends TokenIdentifier>> iter =
-        result.getAllTokens().iterator();
-    while (iter.hasNext()) {
-      Token<? extends TokenIdentifier> token = iter.next();
-      LOG.debug("Token {}", token.getKind());
-      if (filter.contains(token.getKind())) {
-        LOG.debug("Filtering token {}", token.getKind());
-        iter.remove();
-      }
-    }
-    return result;
-  }
-
-  public static String dumpTokens(Credentials credentials, String separator) {
-    ArrayList<Token<? extends TokenIdentifier>> sorted =
-        new ArrayList<>(credentials.getAllTokens());
-    Collections.sort(sorted, new TokenComparator());
-    StringBuilder buffer = new StringBuilder(sorted.size()* 128);
-    for (Token<? extends TokenIdentifier> token : sorted) {
-      buffer.append(tokenToString(token)).append(separator);
-    }
-    return buffer.toString();
-  }
-
-  /**
-   * Create a string for people to look at
-   * @param token token to convert to a string form
-   * @return a printable view of the token
-   */
-  public static String tokenToString(Token<? extends TokenIdentifier> token) {
-    DateFormat df = DateFormat.getDateTimeInstance(
-        DateFormat.SHORT, DateFormat.SHORT);
-    StringBuilder buffer = new StringBuilder(128);
-    buffer.append(token.toString());
-    try {
-      TokenIdentifier ti = token.decodeIdentifier();
-      buffer.append("; ").append(ti);
-      if (ti instanceof AbstractDelegationTokenIdentifier) {
-        // details in human readable form, and compensate for information HDFS 
DT omits
-        AbstractDelegationTokenIdentifier dt = 
(AbstractDelegationTokenIdentifier) ti;
-        buffer.append("; Renewer: ").append(dt.getRenewer());
-        buffer.append("; Issued: ")
-            .append(df.format(new Date(dt.getIssueDate())));
-        buffer.append("; Max Date: ")
-            .append(df.format(new Date(dt.getMaxDate())));
-      }
-    } catch (IOException e) {
-      //marshall problem; not ours
-      LOG.debug("Failed to decode {}: {}", token, e, e);
-    }
-    return buffer.toString();
-  }
-
-  /**
-   * Get the expiry time of a token.
-   * @param token token to examine
-   * @return the time in milliseconds after which the token is invalid.
-   * @throws IOException
-   */
-  public static long getTokenExpiryTime(Token token) throws IOException {
-    TokenIdentifier identifier = token.decodeIdentifier();
-    Preconditions.checkState(identifier instanceof 
AbstractDelegationTokenIdentifier,
-        "Token %s of type: %s has an identifier which cannot be examined: %s",
-        token, token.getClass(), identifier);
-    AbstractDelegationTokenIdentifier id =
-        (AbstractDelegationTokenIdentifier) identifier;
-    return id.getMaxDate();
-  }
-
-  private static class TokenComparator
-      implements Comparator<Token<? extends TokenIdentifier>>, Serializable {
-    @Override
-    public int compare(Token<? extends TokenIdentifier> left,
-        Token<? extends TokenIdentifier> right) {
-      return left.getKind().toString().compareTo(right.getKind().toString());
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/8b7fcb09/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/core/launch/JavaCommandLineBuilder.java
----------------------------------------------------------------------
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/core/launch/JavaCommandLineBuilder.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/core/launch/JavaCommandLineBuilder.java
deleted file mode 100644
index b8aa4c6..0000000
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/core/launch/JavaCommandLineBuilder.java
+++ /dev/null
@@ -1,182 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.slider.core.launch;
-
-
-import com.google.common.base.Preconditions;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.yarn.api.ApplicationConstants;
-import org.apache.slider.common.tools.SliderUtils;
-import org.apache.slider.core.exceptions.BadConfigException;
-
-import java.util.Iterator;
-import java.util.Map;
-
-/**
- * Command line builder purely for the Java CLI.
- * Some of the <code>define</code> methods are designed to work with Hadoop 
tool and
- * Slider launcher applications.
- */
-public class JavaCommandLineBuilder extends CommandLineBuilder {
-
-  public JavaCommandLineBuilder() {
-    add(getJavaBinary());
-  }
-
-  /**
-   * Get the java binary. This is called in the constructor so don't try and
-   * do anything other than return a constant.
-   * @return the path to the Java binary
-   */
-  protected String getJavaBinary() {
-    return ApplicationConstants.Environment.JAVA_HOME.$$() + "/bin/java";
-  }
-
-  /**
-   * Set the size of the heap if a non-empty heap is passed in. 
-   * @param heap empty string or something like "128M" ,"1G" etc. The value is
-   * trimmed.
-   */
-  public void setJVMHeap(String heap) {
-    if (SliderUtils.isSet(heap)) {
-      add("-Xmx" + heap.trim());
-    }
-  }
-
-  /**
-   * Turn Java assertions on
-   */
-  public void enableJavaAssertions() {
-    add("-ea");
-    add("-esa");
-  }
-
-  /**
-   * Add a system property definition -must be used before setting the main 
entry point
-   * @param property
-   * @param value
-   */
-  public void sysprop(String property, String value) {
-    Preconditions.checkArgument(property != null, "null property name");
-    Preconditions.checkArgument(value != null, "null value");
-    add("-D" + property + "=" + value);
-  }
-  
-  public JavaCommandLineBuilder forceIPv4() {
-    sysprop("java.net.preferIPv4Stack", "true");
-    return this;
-  }
-  
-  public JavaCommandLineBuilder headless() {
-    sysprop("java.awt.headless", "true");
-    return this;
-  }
-
-  public boolean addConfOption(Configuration conf, String key) {
-    return defineIfSet(key, conf.get(key));
-  }
-
-  /**
-   * Add a varargs list of configuration parameters —if they are present
-   * @param conf configuration source
-   * @param keys keys
-   */
-  public void addConfOptions(Configuration conf, String... keys) {
-    for (String key : keys) {
-      addConfOption(conf, key);
-    }
-  }
-
-  /**
-   * Add all configuration options which match the prefix
-   * @param conf configuration
-   * @param prefix prefix, e.g {@code "slider."}
-   * @return the number of entries copied
-   */
-  public int addPrefixedConfOptions(Configuration conf, String prefix) {
-    int copied = 0;
-    for (Map.Entry<String, String> entry : conf) {
-      if (entry.getKey().startsWith(prefix)) {
-        define(entry.getKey(), entry.getValue());
-        copied++;
-      }
-    }
-    return copied;
-  }
-
-  /**
-   * Ass a configuration option to the command line of  the application
-   * @param conf configuration
-   * @param key key
-   * @param defVal default value
-   * @return the resolved configuration option
-   * @throws IllegalArgumentException if key is null or the looked up value
-   * is null (that is: the argument is missing and devVal was null.
-   */
-  public String addConfOptionToCLI(Configuration conf,
-      String key,
-      String defVal) {
-    Preconditions.checkArgument(key != null, "null key");
-    String val = conf.get(key, defVal);
-    define(key, val);
-    return val;
-  }
-
-  /**
-   * Add a <code>-D key=val</code> command to the CLI. This is very Hadoop API
-   * @param key key
-   * @param val value
-   * @throws IllegalArgumentException if either argument is null
-   */
-  public void define(String key, String val) {
-    Preconditions.checkArgument(key != null, "null key");
-    Preconditions.checkArgument(val != null, "null value");
-    add("-D", key + "=" + val);
-  }
-
-  /**
-   * Add a <code>-D key=val</code> command to the CLI if <code>val</code>
-   * is not null
-   * @param key key
-   * @param val value
-   */
-  public boolean defineIfSet(String key, String val) {
-    Preconditions.checkArgument(key != null, "null key");
-    if (val != null) {
-      define(key, val);
-      return true;
-    } else {
-      return false;
-    }
-  }
-
-  /**
-   * Add a mandatory config option
-   * @param conf configuration
-   * @param key key
-   * @throws BadConfigException if the key is missing
-   */
-  public void addMandatoryConfOption(Configuration conf,
-      String key) throws BadConfigException {
-    if (!addConfOption(conf, key)) {
-      throw new BadConfigException("Missing configuration option: " + key);
-    }
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/8b7fcb09/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/core/launch/SerializedApplicationReport.java
----------------------------------------------------------------------
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/core/launch/SerializedApplicationReport.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/core/launch/SerializedApplicationReport.java
deleted file mode 100644
index 8e0ef5a..0000000
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/core/launch/SerializedApplicationReport.java
+++ /dev/null
@@ -1,98 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.slider.core.launch;
-
-import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
-import org.apache.hadoop.yarn.api.records.ApplicationReport;
-import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
-import org.apache.slider.core.persist.ApplicationReportSerDeser;
-import org.codehaus.jackson.annotate.JsonIgnoreProperties;
-import org.codehaus.jackson.map.annotate.JsonSerialize;
-
-import java.io.IOException;
-
-/**
- * Serialized form of an application report which can be persisted
- * and then parsed. It can not be converted back into a
- * real YARN application report
- * 
- * Useful for testing
- */
-
-@JsonIgnoreProperties(ignoreUnknown = true)
-@JsonSerialize(include = JsonSerialize.Inclusion.NON_NULL)
-
-public class SerializedApplicationReport {
-
-  public String applicationId;
-  public String applicationAttemptId;
-  public String name;
-  public String applicationType;
-  public String user;
-  public String queue;
-  public String host;
-  public Integer rpcPort;
-  public String state;
-  public String diagnostics;
-  public String url;
-  /**
-   * This value is non-null only when a report is generated from a submission 
context.
-   * The YARN {@link ApplicationReport} structure does not propagate this value
-   * from the RM.
-   */
-  public Long submitTime;
-  public Long startTime;
-  public Long finishTime;
-  public String finalStatus;
-  public String origTrackingUrl;
-  public Float progress;
-  
-  public SerializedApplicationReport() {
-  }
-  
-  public SerializedApplicationReport(ApplicationReport report) {
-    this.applicationId = report.getApplicationId().toString();
-    ApplicationAttemptId attemptId = report.getCurrentApplicationAttemptId();
-    this.applicationAttemptId = attemptId != null ? attemptId.toString() : 
"N/A";
-    this.name = report.getName();
-    this.applicationType = report.getApplicationType();
-    this.user = report.getUser();
-    this.queue = report.getQueue();
-    this.host = report.getHost();
-    this.rpcPort = report.getRpcPort();
-    this.state = report.getYarnApplicationState().toString();
-    this.diagnostics = report.getDiagnostics();
-    this.startTime = report.getStartTime();
-    this.finishTime = report.getFinishTime();
-    FinalApplicationStatus appStatus = report.getFinalApplicationStatus();
-    this.finalStatus = appStatus == null ? "" : appStatus.toString();
-    this.progress = report.getProgress();
-    this.url = report.getTrackingUrl();
-    this.origTrackingUrl= report.getOriginalTrackingUrl();
-  }
-
-  @Override
-  public String toString() {
-    try {
-      return ApplicationReportSerDeser.toString(this);
-    } catch (IOException e) {
-      return super.toString();
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/8b7fcb09/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/core/main/ExitCodeProvider.java
----------------------------------------------------------------------
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/core/main/ExitCodeProvider.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/core/main/ExitCodeProvider.java
deleted file mode 100644
index 503b9b9..0000000
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/core/main/ExitCodeProvider.java
+++ /dev/null
@@ -1,32 +0,0 @@
-/*
- *  Licensed to the Apache Software Foundation (ASF) under one
- *  or more contributor license agreements.  See the NOTICE file
- *  distributed with this work for additional information
- *  regarding copyright ownership.  The ASF licenses this file
- *  to you under the Apache License, Version 2.0 (the
- *  "License"); you may not use this file except in compliance
- *  with the License.  You may obtain a copy of the License at
- *
- *       http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- */
-
-package org.apache.slider.core.main;
-
-/**
- * Get the exit code of an exception. Making it an interface allows
- * us to retrofit exit codes onto existing classes
- */
-public interface ExitCodeProvider {
-
-  /**
-   * Method to get the exit code
-   * @return the exit code
-   */
-  int  getExitCode();
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/8b7fcb09/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/core/main/IrqHandler.java
----------------------------------------------------------------------
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/core/main/IrqHandler.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/core/main/IrqHandler.java
deleted file mode 100644
index 42442d1..0000000
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/core/main/IrqHandler.java
+++ /dev/null
@@ -1,103 +0,0 @@
-/*
- *  Licensed to the Apache Software Foundation (ASF) under one
- *  or more contributor license agreements.  See the NOTICE file
- *  distributed with this work for additional information
- *  regarding copyright ownership.  The ASF licenses this file
- *  to you under the Apache License, Version 2.0 (the
- *  "License"); you may not use this file except in compliance
- *  with the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- */
-
-package org.apache.slider.core.main;
-
-import sun.misc.Signal;
-import sun.misc.SignalHandler;
-
-import java.io.IOException;
-
-/**
- * This class bundles up all the compiler warnings about abuse of sun.misc
- * interrupt handling code
- * into one place.
- */
-@SuppressWarnings("UseOfSunClasses")
-public final class IrqHandler implements SignalHandler {
-
-  public static final String CONTROL_C = "INT";
-  public static final String SIGTERM = "TERM";
-
-  private final String name;
-  private final Interrupted handler;
-
-  /**
-   * Create an IRQ handler bound to the specific interrupt
-   * @param name signal name
-   * @param handler handler
-   * @throws IOException
-   */
-  public IrqHandler(String name, Interrupted handler) throws IOException {
-    this.handler = handler;
-    this.name = name;
-    try {
-      Signal.handle(new Signal(name), this);
-    } catch (IllegalArgumentException e) {
-      throw new IOException(
-        "Could not set handler for signal \"" + name + "\"."
-        + "This can happen if the JVM has the -Xrs set.",
-        e);
-    }
-  }
-
-  @Override
-  public String toString() {
-    return "IrqHandler for signal " + name ;
-  }
-
-  /**
-   * Handler for the JVM API for signal handling
-   * @param signal signal raised
-   */
-//  @Override
-  public void handle(Signal signal) {
-    InterruptData data = new InterruptData(signal.getName(), 
signal.getNumber());
-    handler.interrupted(data);
-  }
-
-  /**
-   * Interrupt data to pass on.
-   */
-  public static class InterruptData {
-    public final String name;
-    public final int number;
-
-    public InterruptData(String name, int number) {
-      this.name = name;
-      this.number = number;
-    }
-
-    @Override
-    public String toString() {
-      return "signal " + name + '(' + number + ')';
-    }
-  }
-
-  /**
-   * Callback on interruption
-   */
-  public interface Interrupted {
-
-    /**
-     * Handle an interrupt
-     * @param interruptData data
-     */
-    void interrupted(InterruptData interruptData);
-  }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hadoop/blob/8b7fcb09/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/core/main/LauncherExitCodes.java
----------------------------------------------------------------------
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/core/main/LauncherExitCodes.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/core/main/LauncherExitCodes.java
deleted file mode 100644
index 83e89f0..0000000
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/core/main/LauncherExitCodes.java
+++ /dev/null
@@ -1,196 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- *  or more contributor license agreements.  See the NOTICE file
- *  distributed with this work for additional information
- *  regarding copyright ownership.  The ASF licenses this file
- *  to you under the Apache License, Version 2.0 (the
- *  "License"); you may not use this file except in compliance
- *  with the License.  You may obtain a copy of the License at
- *
- *       http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- */
-
-package org.apache.slider.core.main;
-
-/*
- * Common Exit codes
- * <p>
- * Exit codes from 64 up are application specific.
- * <p>
- * Many of the exit codes are designed to resemble HTTP error codes,
- * squashed into a single byte. e.g 44 , "not found" is the equivalent
- * of 404
- * <pre>
- *    0-10: general command issues
- *   30-39: equivalent to the 3XX responses, where those responses are
- *          considered errors by the application.
- *   40-49: request-related errors
- *   50-59: server-side problems. These may be triggered by the request.
- *   64-  : application specific error codes
- * </pre>
- */
-public interface LauncherExitCodes {
-  
-  /**
-   * 0: success
-   */
-  int EXIT_SUCCESS                    =  0;
-
-  /**
-   * -1: generic "false" response. The operation worked but
-   * the result was not true
-   */
-  int EXIT_FALSE                      = -1;
-
-  /**
-   * Exit code when a client requested service termination: {@value}
-   */
-  int EXIT_CLIENT_INITIATED_SHUTDOWN  =  1;
-
-  /**
-   * Exit code when targets could not be launched: {@value}
-   */
-  int EXIT_TASK_LAUNCH_FAILURE        =  2;
-
-  /**
-   * Exit code when a control-C, kill -3, signal was picked up: {@value}
-   */
-  int EXIT_INTERRUPTED                = 3;
-
-  /**
-   * Exit code when a usage message was printed: {@value}
-   */
-  int EXIT_USAGE                      = 4;
-
-  /**
-   * Exit code when something happened but we can't be specific: {@value}
-   */
-  int EXIT_OTHER_FAILURE               = 5;
-
-  /**
-   * Exit code on connectivity problems: {@value}
-   */
-  int EXIT_MOVED                      = 31;
-  
-  /**
-   * found: {@value}.
-   * <p>
-   * This is low value as in HTTP it is normally a success/redirect;
-   * whereas on the command line 0 is the sole success code.
-   * <p>
-   * <code>302 Found</code>
-   */
-  int EXIT_FOUND                      = 32;
-
-  /**
-   * Exit code on a request where the destination has not changed
-   * and (somehow) the command specified that this is an error.
-   * That is, this exit code is somehow different from a "success"
-   * : {@value}
-   * <p>
-   * <code>304 Not Modified </code>
-  */
-  int EXIT_NOT_MODIFIED               = 34;
-
-  /**
-   * Exit code when the command line doesn't parse: {@value}, or
-   * when it is otherwise invalid.
-   * <p>
-   * <code>400 BAD REQUEST</code>
-   */
-  int EXIT_COMMAND_ARGUMENT_ERROR     = 40;
-
-  /**
-   * The request requires user authentication: {@value}
-   * <p>
-   * <code>401 Unauthorized</code>
-   */
-  int EXIT_UNAUTHORIZED               = 41;
-  
-  /**
-   * Forbidden action: {@value}
-   * <p>
-   * <code>403: Forbidden</code>
-   */
-  int EXIT_FORBIDDEN                  = 43;
-  
-  /**
-   * Something was not found: {@value}
-   * <p>
-   * <code>404: NOT FOUND</code>
-   */
-  int EXIT_NOT_FOUND                  = 44;
-
-  /**
-   * The operation is not allowed: {@value}
-   * <p>
-   * <code>405: NOT ALLOWED</code>
-   */
-  int EXIT_OPERATION_NOT_ALLOWED       = 45;
-
-  /**
-   * The command is somehow not acceptable: {@value}
-   * <p>
-   * <code>406: NOT ACCEPTABLE</code>
-   */
-  int EXIT_NOT_ACCEPTABLE            = 46;
-
-  /**
-   * Exit code on connectivity problems: {@value}
-   * <p>
-   * <code>408: Request Timeout</code>
-   */
-  int EXIT_CONNECTIVITY_PROBLEM       = 48;
-
-  /**
-   * The request could not be completed due to a conflict with the current
-   * state of the resource.  {@value}
-   * <p>
-   * <code>409: conflict</code>
-   */
-  int EXIT_CONFLICT                   = 49;
-
-  /**
-   * internal error: {@value}
-   * <p>
-   * <code>500 Internal Server Error</code>
-   */
-  int EXIT_INTERNAL_ERROR             = 50;
-
-  /**
-   * Unimplemented feature: {@value}
-   * <p>
-   * <code>501: Not Implemented</code>
-   */
-  int EXIT_UNIMPLEMENTED              = 51;
-
-  /**
-   * Service Unavailable; it may be available later: {@value}
-   * <p>
-   * <code>503 Service Unavailable</code>
-   */
-  int EXIT_SERVICE_UNAVAILABLE        = 53;
-
-  /**
-   * The application does not support, or refuses to support this version: 
{@value}.
-   * If raised, this is expected to be raised server-side and likely due
-   * to client/server version incompatibilities.
-   * <p>
-   * <code> 505: Version Not Supported</code>
-   */
-  int EXIT_UNSUPPORTED_VERSION        = 55;
-
-  /**
-   * Exit code when an exception was thrown from the service: {@value}
-   * <p>
-   * <code>5XX</code>
-   */
-  int EXIT_EXCEPTION_THROWN           = 56;
-
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/8b7fcb09/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/core/main/RunService.java
----------------------------------------------------------------------
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/core/main/RunService.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/core/main/RunService.java
deleted file mode 100644
index c3a1d0e..0000000
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/core/main/RunService.java
+++ /dev/null
@@ -1,62 +0,0 @@
-/*
- *  Licensed to the Apache Software Foundation (ASF) under one
- *  or more contributor license agreements.  See the NOTICE file
- *  distributed with this work for additional information
- *  regarding copyright ownership.  The ASF licenses this file
- *  to you under the Apache License, Version 2.0 (the
- *  "License"); you may not use this file except in compliance
- *  with the License.  You may obtain a copy of the License at
- *
- *       http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- */
-
-package org.apache.slider.core.main;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.service.Service;
-
-/**
- * An interface which services can implement to have their
- * execution managed by the ServiceLauncher.
- * The command line options will be passed down before the 
- * {@link Service#init(Configuration)} operation is invoked via an
- * invocation of {@link RunService#bindArgs(Configuration, String...)}
- * After the service has been successfully started via {@link Service#start()}
- * the {@link RunService#runService()} method is called to execute the 
- * service. When this method returns, the service launcher will exit, using
- * the return code from the method as its exit option.
- */
-public interface RunService extends Service {
-
-  /**
-   * Propagate the command line arguments.
-   * This method is called before {@link Service#init(Configuration)};
-   * the configuration that is returned from this operation
-   * is the one that is passed on to the init operation.
-   * This permits implemenations to change the configuration before
-   * the init operation.n
-   * 
-   *
-   * @param config the initial configuration build up by the
-   * service launcher.
-   * @param args argument list list of arguments passed to the command line
-   * after any launcher-specific commands have been stripped.
-   * @return the configuration to init the service with. This MUST NOT be null.
-   * Recommended: pass down the config parameter with any changes
-   * @throws Exception any problem
-   */
-  Configuration bindArgs(Configuration config, String... args) throws 
Exception;
-  
-  /**
-   * Run a service. This called after {@link Service#start()}
-   * @return the exit code
-   * @throws Throwable any exception to report
-   */
-  int runService() throws Throwable ;
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/8b7fcb09/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/core/main/ServiceLaunchException.java
----------------------------------------------------------------------
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/core/main/ServiceLaunchException.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/core/main/ServiceLaunchException.java
deleted file mode 100644
index 27813b7..0000000
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/core/main/ServiceLaunchException.java
+++ /dev/null
@@ -1,73 +0,0 @@
-/*
- *  Licensed to the Apache Software Foundation (ASF) under one
- *  or more contributor license agreements.  See the NOTICE file
- *  distributed with this work for additional information
- *  regarding copyright ownership.  The ASF licenses this file
- *  to you under the Apache License, Version 2.0 (the
- *  "License"); you may not use this file except in compliance
- *  with the License.  You may obtain a copy of the License at
- *
- *       http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- */
-
-package org.apache.slider.core.main;
-
-
-import org.apache.hadoop.yarn.exceptions.YarnException;
-
-/**
- * A service launch exception that includes an exit code;
- * when caught by the ServiceLauncher, it will convert that
- * into a process exit code.
- */
-public class ServiceLaunchException extends YarnException
-  implements ExitCodeProvider, LauncherExitCodes {
-
-  private final int exitCode;
-
-  /**
-   * Create an exception with the specific exit code
-   * @param exitCode exit code
-   * @param cause cause of the exception
-   */
-  public ServiceLaunchException(int exitCode, Throwable cause) {
-    super(cause);
-    this.exitCode = exitCode;
-  }
-
-  /**
-   * Create an exception with the specific exit code and text
-   * @param exitCode exit code
-   * @param message message to use in exception
-   */
-  public ServiceLaunchException(int exitCode, String message) {
-    super(message);
-    this.exitCode = exitCode;
-  }
-
-  /**
-   * Create an exception with the specific exit code, text and cause
-   * @param exitCode exit code
-   * @param message message to use in exception
-   * @param cause cause of the exception
-   */
-  public ServiceLaunchException(int exitCode, String message, Throwable cause) 
{
-    super(message, cause);
-    this.exitCode = exitCode;
-  }
-
-  /**
-   * Get the exit code
-   * @return the exit code
-   */
-  @Override
-  public int getExitCode() {
-    return exitCode;
-  }
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/8b7fcb09/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/core/main/ServiceLauncher.java
----------------------------------------------------------------------
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/core/main/ServiceLauncher.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/core/main/ServiceLauncher.java
deleted file mode 100644
index f192ec8..0000000
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/core/main/ServiceLauncher.java
+++ /dev/null
@@ -1,642 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.slider.core.main;
-
-import com.google.common.base.Preconditions;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.net.NetUtils;
-import org.apache.hadoop.service.Service;
-import org.apache.hadoop.util.ExitUtil;
-import org.apache.hadoop.util.ShutdownHookManager;
-import org.apache.hadoop.util.VersionInfo;
-import org.apache.hadoop.yarn.YarnUncaughtExceptionHandler;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.File;
-import java.io.IOException;
-import java.lang.reflect.InvocationTargetException;
-import java.net.MalformedURLException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-import java.util.ListIterator;
-import java.util.concurrent.atomic.AtomicBoolean;
-
-/**
- * A class to launch any service by name.
- * 
- * It's designed to be subclassed for custom entry points.
- * 
- * 
- * Workflow
- * <ol>
- *   <li>An instance of the class is created</li>
- *   <li>If it implements RunService, it is given the binding args off the 
CLI</li>
- *   <li>Its service.init() and service.start() methods are called.</li>
- *   <li>If it implements RunService, runService() is called and its return
- *   code used as the exit code.</li>
- *   <li>Otherwise: it waits for the service to stop, assuming in its start() 
method
- *   it begins work</li>
- *   <li>If an exception returned an exit code, that becomes the exit code of 
the
- *   command.</li>
- * </ol>
- * Error and warning messages are logged to stderr. Why? If the classpath
- * is wrong & logger configurations not on it, then no error messages by
- * the started app will be seen and the caller is left trying to debug
- * using exit codes. 
- * 
- */
-@SuppressWarnings("UseOfSystemOutOrSystemErr")
-public class ServiceLauncher<S extends Service>
-  implements LauncherExitCodes, IrqHandler.Interrupted,
-    Thread.UncaughtExceptionHandler {
-  private static final Logger LOG = LoggerFactory.getLogger(
-      ServiceLauncher.class);
-
-  protected static final int PRIORITY = 30;
-
-  public static final String NAME = "ServiceLauncher";
-
-  /**
-   * Name of the "--conf" argument. 
-   */
-  public static final String ARG_CONF = "--conf";
-
-  public static final String USAGE_MESSAGE =
-      "Usage: " + NAME + " classname [" + ARG_CONF +
-      "<conf file>] <service arguments> | ";
-  static final int SHUTDOWN_TIME_ON_INTERRUPT = 30 * 1000;
-
-  private volatile S service;
-  private int serviceExitCode;
-  @SuppressWarnings("MismatchedQueryAndUpdateOfCollection")
-  private final List<IrqHandler> interruptHandlers = new ArrayList<>(1);
-  private Configuration configuration;
-  private String serviceClassName;
-  private static AtomicBoolean signalAlreadyReceived = new 
AtomicBoolean(false);
-
-
-  /**
-   * Create an instance of the launcher
-   * @param serviceClassName classname of the service
-   */
-  public ServiceLauncher(String serviceClassName) {
-    this.serviceClassName = serviceClassName;
-  }
-
-  /**
-   * Get the service. Null until and unless
-   * {@link #launchService(Configuration, String[], boolean)} has completed
-   * @return the service
-   */
-  public S getService() {
-    return service;
-  }
-
-  /**
-   * Get the configuration constructed from the command line arguments
-   * @return the configuration used to create the service
-   */
-  public Configuration getConfiguration() {
-    return configuration;
-  }
-
-  /**
-   * The exit code from a successful service execution
-   * @return the exit code. 
-   */
-  public int getServiceExitCode() {
-    return serviceExitCode;
-  }
-
-  @Override
-  public String toString() {
-    return "ServiceLauncher for " + serviceClassName;
-  }
-
-  /**
-   * Launch the service, by creating it, initing it, starting it and then
-   * maybe running it. {@link RunService#bindArgs(Configuration, String...)} 
is invoked
-   * on the service between creation and init.
-   *
-   * All exceptions that occur are propagated upwards.
-   *
-   * If the method returns a status code, it means that it got as far starting
-   * the service, and if it implements {@link RunService}, that the 
-   * method {@link RunService#runService()} has completed. 
-   *
-   * At this point, the service is returned by {@link #getService()}.
-   *
-   * @param conf configuration
-   * @param processedArgs arguments after the configuration parameters
-   * have been stripped out.
-   * @param addProcessHooks should process failure handlers be added to
-   * terminate this service on shutdown. Tests should set this to false.
-   * @throws ClassNotFoundException classname not on the classpath
-   * @throws IllegalAccessException not allowed at the class
-   * @throws InstantiationException not allowed to instantiate it
-   * @throws InterruptedException thread interrupted
-   * @throws Throwable any other failure
-   */
-  public int launchService(Configuration conf,
-      String[] processedArgs,
-      boolean addProcessHooks)
-    throws Throwable {
-
-    instantiateService(conf);
-
-    // add any process shutdown hooks
-    if (addProcessHooks) {
-      ServiceShutdownHook shutdownHook = new ServiceShutdownHook(service);
-      ShutdownHookManager.get().addShutdownHook(shutdownHook, PRIORITY);
-    }
-    RunService runService = null;
-
-    if (service instanceof RunService) {
-      //if its a runService, pass in the conf and arguments before init)
-      runService = (RunService) service;
-      configuration = runService.bindArgs(configuration, processedArgs);
-      Preconditions.checkNotNull(configuration,
-          "null configuration returned by bindArgs()");
-    }
-
-    //some class constructors init; here this is picked up on.
-    if (!service.isInState(Service.STATE.INITED)) {
-      service.init(configuration);
-    }
-    service.start();
-    int exitCode = EXIT_SUCCESS;
-    if (runService != null) {
-      //assume that runnable services are meant to run from here
-      exitCode = runService.runService();
-      LOG.debug("Service exited with exit code {}", exitCode);
-
-    } else {
-      //run the service until it stops or an interrupt happens on a different 
thread.
-      LOG.debug("waiting for service threads to terminate");
-      service.waitForServiceToStop(0);
-    }
-    //exit
-    serviceExitCode = exitCode;
-    return serviceExitCode;
-  }
-
-  /**
-   * Instantiate the service defined in <code>serviceClassName</code>
-   * . Sets the <code>configuration</code> field
-   * to the configuration, and <code>service</code> to the service.
-   *
-   * @param conf configuration to use
-   * @throws ClassNotFoundException classname not on the classpath
-   * @throws IllegalAccessException not allowed at the class
-   * @throws InstantiationException not allowed to instantiate it
-   */
-  @SuppressWarnings("unchecked")
-  public Service instantiateService(Configuration conf)
-      throws ClassNotFoundException, InstantiationException, 
IllegalAccessException,
-      ExitUtil.ExitException, NoSuchMethodException, InvocationTargetException 
{
-    Preconditions.checkArgument(conf != null, "null conf");
-    configuration = conf;
-
-    //Instantiate the class -this requires the service to have a public
-    // zero-argument constructor
-    Class<?> serviceClass =
-        this.getClass().getClassLoader().loadClass(serviceClassName);
-    Object instance = serviceClass.getConstructor().newInstance();
-    if (!(instance instanceof Service)) {
-      //not a service
-      throw new ExitUtil.ExitException(EXIT_COMMAND_ARGUMENT_ERROR,
-          "Not a Service class: " + serviceClassName);
-    }
-
-    service = (S) instance;
-    return service;
-  }
-
-  /**
-   * Register this class as the handler for the control-C interrupt.
-   * Can be overridden for testing.
-   */
-  protected void registerInterruptHandler() {
-    try {
-      interruptHandlers.add(new IrqHandler(IrqHandler.CONTROL_C, this));
-      interruptHandlers.add(new IrqHandler(IrqHandler.SIGTERM, this));
-    } catch (IOException e) {
-      error("Signal handler setup failed : {}" + e, e);
-    }
-  }
-
-  /**
-   * The service has been interrupted -try to shut down the service.
-   * Give the service time to do this before the exit operation is called 
-   * @param interruptData the interrupted data.
-   */
-  @Override
-  public void interrupted(IrqHandler.InterruptData interruptData) {
-    String message = "Service interrupted by " + interruptData.toString();
-    warn(message);
-    if (!signalAlreadyReceived.compareAndSet(false, true)) {
-      warn("Repeated interrupt: escalating to a JVM halt");
-      // signal already received. On a second request to a hard JVM
-      // halt and so bypass any blocking shutdown hooks.
-      ExitUtil.halt(EXIT_INTERRUPTED, message);
-    }
-    int shutdownTimeMillis = SHUTDOWN_TIME_ON_INTERRUPT;
-    //start an async shutdown thread with a timeout
-    ServiceForcedShutdown forcedShutdown =
-      new ServiceForcedShutdown(shutdownTimeMillis);
-    Thread thread = new Thread(forcedShutdown);
-    thread.setDaemon(true);
-    thread.start();
-    //wait for that thread to finish
-    try {
-      thread.join(shutdownTimeMillis);
-    } catch (InterruptedException ignored) {
-      //ignored
-    }
-    if (!forcedShutdown.isServiceStopped()) {
-      warn("Service did not shut down in time");
-    }
-    exit(EXIT_INTERRUPTED, message);
-  }
-
-  /**
-   * Uncaught exception handler.
-   * If an error is raised: shutdown
-   * The state of the system is unknown at this point -attempting
-   * a clean shutdown is dangerous. Instead: exit
-   * @param thread thread that failed
-   * @param exception exception
-   */
-  @Override
-  public void uncaughtException(Thread thread, Throwable exception) {
-    if (ShutdownHookManager.get().isShutdownInProgress()) {
-      LOG.error("Thread {} threw an error during shutdown: {}.",
-          thread.toString(),
-          exception,
-          exception);
-    } else if (exception instanceof Error) {
-      try {
-        LOG.error("Thread {} threw an error: {}. Shutting down",
-            thread.toString(),
-            exception,
-            exception);
-      } catch (Throwable err) {
-        // We don't want to not exit because of an issue with logging
-      }
-      if (exception instanceof OutOfMemoryError) {
-        // After catching an OOM java says it is undefined behavior, so don't
-        // even try to clean up or we can get stuck on shutdown.
-        try {
-          System.err.println("Halting due to Out Of Memory Error...");
-        } catch (Throwable err) {
-          // Again we don't want to exit because of logging issues.
-        }
-        ExitUtil.halt(EXIT_EXCEPTION_THROWN);
-      } else {
-        // error other than OutOfMemory
-        exit(convertToExitException(exception));
-      }
-    } else {
-      // simple exception in a thread. There's a policy decision here:
-      // terminate the service vs. keep going after a thread has failed
-      LOG.error("Thread {} threw an exception: {}",
-          thread.toString(),
-          exception,
-          exception);
-    }
-  }
-
-  /**
-   * Print a warning: currently this goes to stderr
-   * @param text
-   */
-  protected void warn(String text) {
-    System.err.println(text);
-  }
-
-  /**
-   * Report an error. The message is printed to stderr; the exception
-   * is logged via the current logger.
-   * @param message message for the user
-   * @param thrown the exception thrown
-   */
-  protected void error(String message, Throwable thrown) {
-    String text = "Exception: " + message;
-    warn(text);
-    LOG.error(text, thrown);
-  }
-
-  /**
-   * Exit the code.
-   * This is method can be overridden for testing, throwing an 
-   * exception instead. Any subclassed method MUST raise an 
-   * <code>ExitUtil.ExitException</code> instance.
-   * The service launcher code assumes that after this method is invoked,
-   * no other code in the same method is called.
-   * @param exitCode code to exit
-   */
-  protected void exit(int exitCode, String message) {
-    ExitUtil.terminate(exitCode, message);
-  }
-
-  /**
-   * Exit off an exception. This can be subclassed for testing
-   * @param ee exit exception
-   */
-  protected void exit(ExitUtil.ExitException ee) {
-    ExitUtil.terminate(ee.status, ee);
-  }
-
-  /**
-   * Get the service name via {@link Service#getName()}.
-   * If the service is not instantiated, the classname is returned instead.
-   * @return the service name
-   */
-  public String getServiceName() {
-    Service s = service;
-    String name = null;
-    if (s != null) {
-      try {
-        name = s.getName();
-      } catch (Exception ignored) {
-        // ignored
-      }
-    }
-    if (name != null) {
-      return "service " + name;
-    } else {
-      return "service classname " + serviceClassName;
-    }
-  }
-
-  /**
-   * Parse the command line, building a configuration from it, then
-   * launch the service and wait for it to finish. finally, exit
-   * passing the status code to the #exit(int) method.
-   * @param args arguments to the service. arg[0] is 
-   * assumed to be the service classname and is automatically
-   */
-  public void launchServiceAndExit(List<String> args) {
-
-    registerInterruptHandler();
-    //Currently the config just the default
-    Configuration conf = new Configuration();
-    String[] processedArgs = extractConfigurationArgs(conf, args);
-    ExitUtil.ExitException ee = launchServiceRobustly(conf, processedArgs);
-    System.out.flush();
-    System.err.flush();
-    exit(ee);
-  }
-
-  /**
-   * Extract the configuration arguments and apply them to the configuration,
-   * building an array of processed arguments to hand down to the service.
-   *
-   * @param conf configuration to update
-   * @param args main arguments. args[0] is assumed to be the service
-   * classname and is skipped
-   * @return the processed list.
-   */
-  public static String[] extractConfigurationArgs(Configuration conf,
-                                                  List<String> args) {
-
-    //convert args to a list
-    int argCount = args.size();
-    if (argCount <= 1 ) {
-      return new String[0];
-    }
-    List<String> argsList = new ArrayList<String>(argCount);
-    ListIterator<String> arguments = args.listIterator();
-    //skip that first entry
-    arguments.next();
-    while (arguments.hasNext()) {
-      String arg = arguments.next();
-      if (arg.equals(ARG_CONF)) {
-        //the argument is a --conf file tuple: extract the path and load
-        //it in as a configuration resource.
-
-        //increment the loop iterator
-        if (!arguments.hasNext()) {
-          //overshot the end of the file
-          exitWithMessage(EXIT_COMMAND_ARGUMENT_ERROR,
-              ARG_CONF + ": missing configuration file after ");
-        }
-        File file = new File(arguments.next());
-        if (!file.exists()) {
-          exitWithMessage(EXIT_COMMAND_ARGUMENT_ERROR,
-              ARG_CONF + ": configuration file not found: " + file);
-        }
-        try {
-          conf.addResource(file.toURI().toURL());
-        } catch (MalformedURLException e) {
-          LOG.debug("File {} cannot be converted to URL", file, e);
-          exitWithMessage(EXIT_COMMAND_ARGUMENT_ERROR,
-              ARG_CONF + ": configuration file path invalid: " + file);
-        }
-      } else {
-        argsList.add(arg);
-      }
-    }
-    String[] processedArgs = new String[argsList.size()];
-    argsList.toArray(processedArgs);
-    return processedArgs;
-  }
-
-  /**
-   * Launch a service catching all exceptions and downgrading them to exit 
codes
-   * after logging.
-   * @param conf configuration to use
-   * @param processedArgs command line after the launcher-specific arguments 
have
-   * been stripped out
-   * @return an exit exception, which will have a status code of 0 if it worked
-   */
-  public ExitUtil.ExitException launchServiceRobustly(Configuration conf,
-                                   String[] processedArgs) {
-    ExitUtil.ExitException exitException;
-    try {
-      int exitCode = launchService(conf, processedArgs, true);
-      if (service != null) {
-        Throwable failure = service.getFailureCause();
-        if (failure != null) {
-          //the service exited with a failure.
-          //check what state it is in
-          Service.STATE failureState = service.getFailureState();
-          if (failureState == Service.STATE.STOPPED) {
-            //the failure occurred during shutdown, not important enough to 
bother
-            //the user as it may just scare them
-            LOG.debug("Failure during shutdown:{} ", failure, failure);
-          } else {
-            //throw it for the catch handlers to deal with
-            throw failure;
-          }
-        }
-      }
-      exitException = new ExitUtil.ExitException(exitCode,
-                                     "In " + serviceClassName);
-      // either the service succeeded, or an error raised during shutdown, 
-      // which we don't worry that much about
-    } catch (ExitUtil.ExitException ee) {
-      exitException = ee;
-    } catch (Throwable thrown) {
-      exitException = convertToExitException(thrown);
-    }
-    return exitException;
-  }
-
-  /**
-   * Convert the exception to one that can be handed off to ExitUtils;
-   * if it is of the write type it is passed throw as is. If not, a 
-   * new exception with the exit code {@link #EXIT_EXCEPTION_THROWN}
-   * is created, with the argument <code>thrown</code> as the inner cause
-   * @param thrown the exception thrown
-   * @return an exception to terminate the process with
-   */
-  protected ExitUtil.ExitException convertToExitException(Throwable thrown) {
-    ExitUtil.ExitException exitException;
-    int exitCode;
-    String message = thrown.getMessage();
-    if (message == null) {
-      message = thrown.toString();
-    }
-    if (thrown instanceof ExitCodeProvider) {
-      exitCode = ((ExitCodeProvider) thrown).getExitCode();
-      if (LOG.isDebugEnabled()) {
-        LOG.debug("While running {}: {}", getServiceName(), message, thrown);
-      }
-      LOG.error(message);
-    } else {
-      // not any of the service launcher exceptions -assume something worse
-      error(message, thrown);
-      exitCode = EXIT_EXCEPTION_THROWN;
-    }
-    exitException = new ExitUtil.ExitException(exitCode, message);
-    exitException.initCause(thrown);
-    return exitException;
-  }
-
-
-  /**
-   * Build a log message for starting up and shutting down. 
-   * This was grabbed from the ToolRunner code.
-   * @param classname the class of the server
-   * @param args arguments
-   */
-  public static String startupShutdownMessage(String classname,
-                                              List<String> args) {
-    final String hostname = NetUtils.getHostname();
-
-    return toStartupShutdownString("STARTUP_MSG: ", new String[]{
-      "Starting " + classname,
-      "  host = " + hostname,
-      "  args = " + args,
-      "  version = " + VersionInfo.getVersion(),
-      "  classpath = " + System.getProperty("java.class.path"),
-      "  build = " + VersionInfo.getUrl() + " -r "
-      + VersionInfo.getRevision()
-      + "; compiled by '" + VersionInfo.getUser()
-      + "' on " + VersionInfo.getDate(),
-      "  java = " + System.getProperty("java.version")
-    });
-  }
-
-  /**
-   * Exit with a printed message
-   * @param status status code
-   * @param message message
-   */
-  private static void exitWithMessage(int status, String message) {
-    System.err.println(message);
-    ExitUtil.terminate(status);
-  }
-
-  private static String toStartupShutdownString(String prefix, String[] msg) {
-    StringBuilder b = new StringBuilder(prefix);
-    
b.append("\n/************************************************************");
-    for (String s : msg) {
-      b.append("\n").append(prefix).append(s);
-    }
-    
b.append("\n************************************************************/");
-    return b.toString();
-  }
-
-  /**
-   * forced shutdown runnable.
-   */
-  protected class ServiceForcedShutdown implements Runnable {
-
-    private final int shutdownTimeMillis;
-    private boolean serviceStopped;
-
-    public ServiceForcedShutdown(int shutdownTimeoutMillis) {
-      this.shutdownTimeMillis = shutdownTimeoutMillis;
-    }
-
-    @Override
-    public void run() {
-      if (service != null) {
-        service.stop();
-        serviceStopped = service.waitForServiceToStop(shutdownTimeMillis);
-      } else {
-        serviceStopped = true;
-      }
-    }
-
-    private boolean isServiceStopped() {
-      return serviceStopped;
-    }
-  }
-
-  /**
-   * The real main function, which takes the arguments as a list
-   * arg 0 must be the service classname
-   * @param argsList the list of arguments
-   */
-  public static void serviceMain(List<String> argsList) {
-    if (argsList.isEmpty()) {
-      exitWithMessage(EXIT_USAGE, USAGE_MESSAGE);
-    } else {
-      String serviceClassName = argsList.get(0);
-
-      if (LOG.isDebugEnabled()) {
-        LOG.debug(startupShutdownMessage(serviceClassName, argsList));
-        StringBuilder builder = new StringBuilder();
-        for (String arg : argsList) {
-          builder.append('"').append(arg).append("\" ");
-        }
-        LOG.debug(builder.toString());
-      }
-      Thread.setDefaultUncaughtExceptionHandler(
-        new YarnUncaughtExceptionHandler());
-
-      ServiceLauncher serviceLauncher = new 
ServiceLauncher<>(serviceClassName);
-      serviceLauncher.launchServiceAndExit(argsList);
-    }
-  }
-
-  /**
-   * This is the main entry point for the service launcher.
-   * @param args command line arguments.
-   */
-  public static void main(String[] args) {
-    List<String> argsList = Arrays.asList(args);
-    serviceMain(argsList);
-  }
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/8b7fcb09/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/core/main/ServiceShutdownHook.java
----------------------------------------------------------------------
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/core/main/ServiceShutdownHook.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/core/main/ServiceShutdownHook.java
deleted file mode 100644
index de55789..0000000
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/core/main/ServiceShutdownHook.java
+++ /dev/null
@@ -1,80 +0,0 @@
-/*
- *  Licensed to the Apache Software Foundation (ASF) under one
- *  or more contributor license agreements.  See the NOTICE file
- *  distributed with this work for additional information
- *  regarding copyright ownership.  The ASF licenses this file
- *  to you under the Apache License, Version 2.0 (the
- *  "License"); you may not use this file except in compliance
- *  with the License.  You may obtain a copy of the License at
- *
- *       http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- */
-
-package org.apache.slider.core.main;
-
-import org.apache.hadoop.service.Service;
-import org.apache.hadoop.util.ShutdownHookManager;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.lang.ref.WeakReference;
-
-/**
- * JVM Shutdown hook for Service which will stop the
- * Service gracefully in case of JVM shutdown.
- * This hook uses a weak reference to the service, so
- * does not cause services to be retained after they have
- * been stopped and deferenced elsewhere.
- */
-public class ServiceShutdownHook implements Runnable {
-  private static final Logger LOG = LoggerFactory.getLogger(
-      ServiceShutdownHook.class);
-
-  private final WeakReference<Service> serviceRef;
-  private Runnable hook;
-
-  public ServiceShutdownHook(Service service) {
-    serviceRef = new WeakReference<>(service);
-  }
-
-  public void register(int priority) {
-    unregister();
-    hook = this;
-    ShutdownHookManager.get().addShutdownHook(hook, priority);
-  }
-
-  public synchronized void unregister() {
-    if (hook != null) {
-      try {
-        ShutdownHookManager.get().removeShutdownHook(hook);
-      } catch (IllegalStateException e) {
-        LOG.info("Failed to unregister shutdown hook: {}", e, e);
-      }
-      hook = null;
-    }
-  }
-
-  @Override
-  public void run() {
-    Service service;
-    synchronized (this) {
-      service = serviceRef.get();
-      serviceRef.clear();
-    }
-    if (service == null) {
-      return;
-    }
-    try {
-      // Stop the  Service
-      service.stop();
-    } catch (Throwable t) {
-      LOG.info("Error stopping {}", service.getName(), t);
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/8b7fcb09/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/core/persist/ApplicationReportSerDeser.java
----------------------------------------------------------------------
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/core/persist/ApplicationReportSerDeser.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/core/persist/ApplicationReportSerDeser.java
deleted file mode 100644
index a8c72ce..0000000
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/core/persist/ApplicationReportSerDeser.java
+++ /dev/null
@@ -1,57 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.slider.core.persist;
-
-import org.apache.slider.core.launch.SerializedApplicationReport;
-import org.codehaus.jackson.JsonGenerationException;
-import org.codehaus.jackson.JsonParseException;
-import org.codehaus.jackson.map.JsonMappingException;
-
-import java.io.IOException;
-
-/**
- * Persistence of {@link SerializedApplicationReport}
- * 
- */
-public class ApplicationReportSerDeser
-    extends JsonSerDeser<SerializedApplicationReport> {
-  public ApplicationReportSerDeser() {
-    super(SerializedApplicationReport.class);
-  }
-
-
-  private static final ApplicationReportSerDeser
-      staticinstance = new ApplicationReportSerDeser();
-
-  /**
-   * Convert an instance to a JSON string -sync access to a shared ser/deser
-   * object instance
-   * @param instance object to convert
-   * @return a JSON string description
-   * @throws JsonParseException parse problems
-   * @throws JsonMappingException O/J mapping problems
-   */
-  public static String toString(SerializedApplicationReport instance)
-      throws IOException, JsonGenerationException, JsonMappingException {
-    synchronized (staticinstance) {
-      return staticinstance.toJson(instance);
-    }
-  }
- 
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/8b7fcb09/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/core/persist/Filenames.java
----------------------------------------------------------------------
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/core/persist/Filenames.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/core/persist/Filenames.java
deleted file mode 100644
index 06ecc51..0000000
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/core/persist/Filenames.java
+++ /dev/null
@@ -1,28 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.slider.core.persist;
-
-public interface Filenames {
-
-  String RESOURCES = "resources.json";
-  String APPCONF = "app_config.json";
-  String INTERNAL = "internal.json";
-  String WRITELOCK = "writelock";
-  String READLOCK = "readlock";
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/8b7fcb09/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/core/persist/JsonSerDeser.java
----------------------------------------------------------------------
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/core/persist/JsonSerDeser.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/core/persist/JsonSerDeser.java
deleted file mode 100644
index 8fe2549..0000000
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/core/persist/JsonSerDeser.java
+++ /dev/null
@@ -1,249 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.slider.core.persist;
-
-import org.apache.hadoop.fs.FSDataInputStream;
-import org.apache.hadoop.fs.FSDataOutputStream;
-import org.apache.hadoop.fs.FileStatus;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.IOUtils;
-import org.codehaus.jackson.JsonGenerationException;
-import org.codehaus.jackson.JsonParseException;
-import org.codehaus.jackson.map.DeserializationConfig;
-import org.codehaus.jackson.map.JsonMappingException;
-import org.codehaus.jackson.map.ObjectMapper;
-import org.codehaus.jackson.map.PropertyNamingStrategy;
-import org.codehaus.jackson.map.SerializationConfig;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.EOFException;
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.io.FileOutputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-
-/**
- * Support for marshalling objects to and from JSON.
- * This class is NOT thread safe; it constructs an object mapper
- * as an instance field.
- * @param <T>
- */
-public class JsonSerDeser<T> {
-
-  private static final Logger log = 
LoggerFactory.getLogger(JsonSerDeser.class);
-  private static final String UTF_8 = "UTF-8";
-
-  private final Class<T> classType;
-  private final ObjectMapper mapper;
-
-  /**
-   * Create an instance bound to a specific type
-   * @param classType class type
-   */
-  public JsonSerDeser(Class<T> classType) {
-    this.classType = classType;
-    this.mapper = new ObjectMapper();
-    mapper.configure(DeserializationConfig.Feature.FAIL_ON_UNKNOWN_PROPERTIES, 
false);
-  }
-
-  public JsonSerDeser(Class<T> classType, PropertyNamingStrategy 
namingStrategy) {
-    this(classType);
-    mapper.setPropertyNamingStrategy(namingStrategy);
-  }
-
-  /**
-   * Convert from JSON
-   * @param json input
-   * @return the parsed JSON
-   * @throws IOException IO
-   * @throws JsonMappingException failure to map from the JSON to this class
-   */
-  public T fromJson(String json)
-    throws IOException, JsonParseException, JsonMappingException {
-    try {
-      return mapper.readValue(json, classType);
-    } catch (IOException e) {
-      log.error("Exception while parsing json : " + e + "\n" + json, e);
-      throw e;
-    }
-  }
-
-  /**
-   * Convert from a JSON file
-   * @param jsonFile input file
-   * @return the parsed JSON
-   * @throws IOException IO problems
-   * @throws JsonMappingException failure to map from the JSON to this class
-   */
-  public T fromFile(File jsonFile)
-    throws IOException, JsonParseException, JsonMappingException {
-    File absoluteFile = jsonFile.getAbsoluteFile();
-    try {
-      return mapper.readValue(absoluteFile, classType);
-    } catch (IOException e) {
-      log.error("Exception while parsing json file {}", absoluteFile, e);
-      throw e;
-    }
-  }
-
-  /**
-   * Convert from a JSON file
-   * @param resource input file
-   * @return the parsed JSON
-   * @throws IOException IO problems
-   * @throws JsonMappingException failure to map from the JSON to this class
-   */
- public T fromResource(String resource)
-    throws IOException, JsonParseException, JsonMappingException {
-    try(InputStream resStream = this.getClass().getResourceAsStream(resource)) 
{
-      if (resStream == null) {
-        throw new FileNotFoundException(resource);
-      }
-      return (T) (mapper.readValue(resStream, classType));
-    } catch (IOException e) {
-      log.error("Exception while parsing json resource {}", resource, e);
-      throw e;
-    }
-  }
-
-  /**
-   * Convert from an input stream, closing the stream afterwards.
-   * @param stream
-   * @return the parsed JSON
-   * @throws IOException IO problems
-   */
-  public T fromStream(InputStream stream) throws IOException {
-    try {
-      return (T) (mapper.readValue(stream, classType));
-    } catch (IOException e) {
-      log.error("Exception while parsing json input stream", e);
-      throw e;
-    } finally {
-      IOUtils.closeStream(stream);
-    }
-  }
-
-  /**
-   * clone by converting to JSON and back again.
-   * This is much less efficient than any Java clone process.
-   * @param instance instance to duplicate
-   * @return a new instance
-   * @throws IOException problems.
-   */
-  public T fromInstance(T instance) throws IOException {
-    return fromJson(toJson(instance));
-  }
-
-  /**
-   * Deserialize from a byte array
-   * @param b
-   * @return the deserialized value
-   * @throws IOException parse problems
-   */
-  public T fromBytes(byte[] b) throws IOException {
-    String json = new String(b, 0, b.length, UTF_8);
-    return fromJson(json);
-  }
-  
-  /**
-   * Load from a Hadoop filesystem
-   * @param fs filesystem
-   * @param path path
-   * @return a loaded CD
-   * @throws IOException IO problems
-   * @throws JsonParseException parse problems
-   * @throws JsonMappingException O/J mapping problems
-   */
-  public T load(FileSystem fs, Path path)
-    throws IOException, JsonParseException, JsonMappingException {
-    FileStatus status = fs.getFileStatus(path);
-    long len = status.getLen();
-    byte[] b = new byte[(int) len];
-    FSDataInputStream dataInputStream = fs.open(path);
-    int count = dataInputStream.read(b);
-    if (count != len) {
-      throw new EOFException("Read of " + path +" finished prematurely");
-    }
-    return fromBytes(b);
-  }
-
-
-  /**
-   * Save to a hadoop filesystem
-   * @param fs filesystem
-   * @param path path
-   * @param instance instance to save
-   * @param overwrite should any existing file be overwritten
-   * @throws IOException IO exception
-   */
-  public void save(FileSystem fs, Path path, T instance,
-                   boolean overwrite) throws
-                                      IOException {
-    FSDataOutputStream dataOutputStream = fs.create(path, overwrite);
-    writeJsonAsBytes(instance, dataOutputStream);
-  }
-
-  /**
-   * Save an instance to a file
-   * @param instance instance to save
-   * @param file file
-   * @throws IOException
-   */
-  public void save(T instance, File file) throws
-      IOException {
-    writeJsonAsBytes(instance, new FileOutputStream(file.getAbsoluteFile()));
-  }
-
-  /**
-   * Write the json as bytes -then close the file
-   * @param dataOutputStream an outout stream that will always be closed
-   * @throws IOException on any failure
-   */
-  private void writeJsonAsBytes(T instance,
-      OutputStream dataOutputStream) throws IOException {
-    try {
-      String json = toJson(instance);
-      byte[] b = json.getBytes(UTF_8);
-      dataOutputStream.write(b);
-      dataOutputStream.flush();
-      dataOutputStream.close();
-    } finally {
-      IOUtils.closeStream(dataOutputStream);
-    }
-  }
-
-  /**
-   * Convert an object to a JSON string
-   * @param instance instance to convert
-   * @return a JSON string description
-   * @throws JsonParseException parse problems
-   * @throws JsonMappingException O/J mapping problems
-   */
-  public String toJson(T instance) throws IOException,
-                                               JsonGenerationException,
-                                               JsonMappingException {
-    mapper.configure(SerializationConfig.Feature.INDENT_OUTPUT, true);
-    return mapper.writeValueAsString(instance);
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/8b7fcb09/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/core/persist/LockHeldAction.java
----------------------------------------------------------------------
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/core/persist/LockHeldAction.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/core/persist/LockHeldAction.java
deleted file mode 100644
index 6659687..0000000
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/core/persist/LockHeldAction.java
+++ /dev/null
@@ -1,38 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.slider.core.persist;
-
-import org.apache.slider.core.exceptions.SliderException;
-
-import java.io.IOException;
-
-/**
- * Optional action to add while the lock is held; this is needed to execute
- * some other persistent operations within the scope at the same lock
- * without inserting too much code into the persister
- */
-public interface LockHeldAction {
-
-  /**
-   * Execute the action
-   * @throws IOException on any failure
-   */
-  public void execute() throws IOException, SliderException;
-  
-}


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org

Reply via email to