http://git-wip-us.apache.org/repos/asf/hadoop/blob/40ab068e/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/monitor/probe/MonitorUtils.java
----------------------------------------------------------------------
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/monitor/probe/MonitorUtils.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/monitor/probe/MonitorUtils.java
new file mode 100644
index 0000000..684f655
--- /dev/null
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/monitor/probe/MonitorUtils.java
@@ -0,0 +1,84 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.service.monitor.probe;
+
+import org.apache.hadoop.yarn.service.api.records.ReadinessCheck;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.Formatter;
+import java.util.Locale;
+
+/**
+ * Various utils to work with the monitor
+ */
+public final class MonitorUtils {
+  protected static final Logger LOG = LoggerFactory.getLogger(MonitorUtils
+      .class);
+
+  private MonitorUtils() {
+  }
+
+  public static String toPlural(int val) {
+    return val != 1 ? "s" : "";
+  }
+
+  /**
+   * Convert milliseconds to human time -the exact format is unspecified
+   * @param milliseconds a time in milliseconds
+   * @return a time that is converted to human intervals
+   */
+  public static String millisToHumanTime(long milliseconds) {
+    StringBuilder sb = new StringBuilder();
+    // Send all output to the Appendable object sb
+    Formatter formatter = new Formatter(sb, Locale.US);
+
+    long s = Math.abs(milliseconds / 1000);
+    long m = Math.abs(milliseconds % 1000);
+    if (milliseconds > 0) {
+      formatter.format("%d.%03ds", s, m);
+    } else if (milliseconds == 0) {
+      formatter.format("0");
+    } else {
+      formatter.format("-%d.%03ds", s, m);
+    }
+    return sb.toString();
+  }
+
+  public static Probe getProbe(ReadinessCheck readinessCheck) {
+    if (readinessCheck == null) {
+      return null;
+    }
+    if (readinessCheck.getType() == null) {
+      return null;
+    }
+    try {
+      switch (readinessCheck.getType()) {
+      case HTTP:
+        return HttpProbe.create(readinessCheck.getProps());
+      case PORT:
+        return PortProbe.create(readinessCheck.getProps());
+      default:
+        return null;
+      }
+    } catch (Throwable t) {
+      throw new IllegalArgumentException("Error creating readiness check " +
+          t);
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/40ab068e/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/monitor/probe/PortProbe.java
----------------------------------------------------------------------
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/monitor/probe/PortProbe.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/monitor/probe/PortProbe.java
new file mode 100644
index 0000000..aba5859
--- /dev/null
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/monitor/probe/PortProbe.java
@@ -0,0 +1,98 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.service.monitor.probe;
+
+import org.apache.hadoop.io.IOUtils;
+import org.apache.hadoop.yarn.service.component.instance.ComponentInstance;
+import org.apache.hadoop.yarn.service.utils.SliderUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.net.InetSocketAddress;
+import java.net.Socket;
+import java.util.Map;
+
+/**
+ * Probe for a port being open.
+ */
+public class PortProbe extends Probe {
+  protected static final Logger log = LoggerFactory.getLogger(PortProbe.class);
+  private final int port;
+  private final int timeout;
+
+  public PortProbe(int port, int timeout) {
+    super("Port probe of " + port + " for " + timeout + "ms", null);
+    this.port = port;
+    this.timeout = timeout;
+  }
+
+  public static PortProbe create(Map<String, String> props)
+      throws IOException {
+    int port = getPropertyInt(props, PORT_PROBE_PORT, null);
+
+    if (port >= 65536) {
+      throw new IOException(PORT_PROBE_PORT + " " + port + " is out of " +
+          "range");
+    }
+
+    int timeout = getPropertyInt(props, PORT_PROBE_CONNECT_TIMEOUT,
+        PORT_PROBE_CONNECT_TIMEOUT_DEFAULT);
+
+    return new PortProbe(port, timeout);
+  }
+
+  /**
+   * Try to connect to the (host,port); a failure to connect within
+   * the specified timeout is a failure.
+   * @param instance role instance
+   * @return the outcome
+   */
+  @Override
+  public ProbeStatus ping(ComponentInstance instance) {
+    ProbeStatus status = new ProbeStatus();
+
+    if (instance.getContainerStatus() == null || SliderUtils
+        .isEmpty(instance.getContainerStatus().getIPs())) {
+      status.fail(this, new IOException(
+          instance.getCompInstanceName() + ": IP is not available yet"));
+      return status;
+    }
+
+    String ip = instance.getContainerStatus().getIPs().get(0);
+    InetSocketAddress sockAddr = new InetSocketAddress(ip, port);
+    Socket socket = new Socket();
+    try {
+      if (log.isDebugEnabled()) {
+        log.debug(instance.getCompInstanceName() + ": Connecting " + sockAddr
+            .toString() + ", timeout=" + MonitorUtils
+            .millisToHumanTime(timeout));
+      }
+      socket.connect(sockAddr, timeout);
+      status.succeed(this);
+    } catch (Throwable e) {
+      String error =
+          instance.getCompInstanceName() + ": Probe " + sockAddr + " failed";
+      log.debug(error, e);
+      status.fail(this, new IOException(error, e));
+    } finally {
+      IOUtils.closeSocket(socket);
+    }
+    return status;
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/40ab068e/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/monitor/probe/Probe.java
----------------------------------------------------------------------
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/monitor/probe/Probe.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/monitor/probe/Probe.java
new file mode 100644
index 0000000..3237a2b
--- /dev/null
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/monitor/probe/Probe.java
@@ -0,0 +1,100 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.service.monitor.probe;
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.yarn.service.component.instance.ComponentInstance;
+
+import java.io.IOException;
+import java.util.Map;
+
+/**
+ * Base class of all probes.
+ */
+public abstract class Probe implements MonitorKeys {
+
+  protected final Configuration conf;
+  private String name;
+
+  /**
+   * Create a probe of a specific name
+   *
+   * @param name probe name
+   * @param conf configuration being stored.
+   */
+  public Probe(String name, Configuration conf) {
+    this.name = name;
+    this.conf = conf;
+  }
+
+
+  protected void setName(String name) {
+    this.name = name;
+  }
+
+  public String getName() {
+    return name;
+  }
+
+
+  @Override
+  public String toString() {
+    return getName();
+  }
+
+  public static String getProperty(Map<String, String> props, String name,
+      String defaultValue) throws IOException {
+    String value = props.get(name);
+    if (StringUtils.isEmpty(value)) {
+      if (defaultValue == null) {
+        throw new IOException(name + " not specified");
+      }
+      return defaultValue;
+    }
+    return value;
+  }
+
+  public static int getPropertyInt(Map<String, String> props, String name,
+      Integer defaultValue) throws IOException {
+    String value = props.get(name);
+    if (StringUtils.isEmpty(value)) {
+      if (defaultValue == null) {
+        throw new IOException(name + " not specified");
+      }
+      return defaultValue;
+    }
+    return Integer.parseInt(value);
+  }
+
+  /**
+   * perform any prelaunch initialization
+   */
+  public void init() throws IOException {
+
+  }
+
+  /**
+   * Ping the endpoint. All exceptions must be caught and included in the
+   * (failure) status.
+   *
+   * @param instance instance to ping
+   * @return the status
+   */
+  public abstract ProbeStatus ping(ComponentInstance instance);
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/40ab068e/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/monitor/probe/ProbeStatus.java
----------------------------------------------------------------------
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/monitor/probe/ProbeStatus.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/monitor/probe/ProbeStatus.java
new file mode 100644
index 0000000..bc62dcd
--- /dev/null
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/monitor/probe/ProbeStatus.java
@@ -0,0 +1,160 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.service.monitor.probe;
+
+import java.io.Serializable;
+import java.util.Date;
+
+/**
+ * Status message of a probe. This is designed to be sent over the wire, 
though the exception
+ * Had better be unserializable at the far end if that is to work.
+ */
+public final class ProbeStatus implements Serializable {
+  private static final long serialVersionUID = 165468L;
+
+  private long timestamp;
+  private String timestampText;
+  private boolean success;
+  private boolean realOutcome;
+  private String message;
+  private Throwable thrown;
+  private transient Probe originator;
+
+  public ProbeStatus() {
+  }
+
+  public ProbeStatus(long timestamp, String message, Throwable thrown) {
+    this.success = false;
+    this.message = message;
+    this.thrown = thrown;
+    setTimestamp(timestamp);
+  }
+
+  public ProbeStatus(long timestamp, String message) {
+    this.success = true;
+    setTimestamp(timestamp);
+    this.message = message;
+    this.thrown = null;
+  }
+
+  public long getTimestamp() {
+    return timestamp;
+  }
+
+  public void setTimestamp(long timestamp) {
+    this.timestamp = timestamp;
+    timestampText = new Date(timestamp).toString();
+  }
+
+  public boolean isSuccess() {
+    return success;
+  }
+
+  /**
+   * Set both the success and the real outcome bits to the same value
+   * @param success the new value
+   */
+  public void setSuccess(boolean success) {
+    this.success = success;
+    realOutcome = success;
+  }
+
+  public String getTimestampText() {
+    return timestampText;
+  }
+
+  public boolean getRealOutcome() {
+    return realOutcome;
+  }
+
+  public String getMessage() {
+    return message;
+  }
+
+  public void setMessage(String message) {
+    this.message = message;
+  }
+
+  public Throwable getThrown() {
+    return thrown;
+  }
+
+  public void setThrown(Throwable thrown) {
+    this.thrown = thrown;
+  }
+
+  /**
+   * Get the probe that generated this result. May be null
+   * @return a possibly null reference to a probe
+   */
+  public Probe getOriginator() {
+    return originator;
+  }
+
+  /**
+   * The probe has succeeded -capture the current timestamp, set
+   * success to true, and record any other data needed.
+   * @param probe probe
+   */
+  public void succeed(Probe probe) {
+    finish(probe, true, probe.getName(), null);
+  }
+
+  /**
+   * A probe has failed either because the test returned false, or an exception
+   * was thrown. The {@link #success} field is set to false, any exception 
+   * thrown is recorded.
+   * @param probe probe that failed
+   * @param thrown an exception that was thrown.
+   */
+  public void fail(Probe probe, Throwable thrown) {
+    finish(probe, false, "Failure in " + probe, thrown);
+  }
+
+  public void finish(Probe probe, boolean succeeded, String text, Throwable 
thrown) {
+    setTimestamp(System.currentTimeMillis());
+    setSuccess(succeeded);
+    originator = probe;
+    message = text;
+    this.thrown = thrown;
+  }
+
+  @Override
+  public String toString() {
+    LogEntryBuilder builder = new LogEntryBuilder("Probe Status");
+    builder.elt("time", timestampText)
+           .elt("outcome", (success ? "success" : "failure"));
+
+    if (success != realOutcome) {
+      builder.elt("originaloutcome", (realOutcome ? "success" : "failure"));
+    }
+    builder.elt("message", message);
+    if (thrown != null) {
+      builder.elt("exception", thrown);
+    }
+
+    return builder.toString();
+  }
+
+  /**
+   * Flip the success bit on while the real outcome bit is kept false
+   */
+  public void markAsSuccessful() {
+    success = true;
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/40ab068e/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/provider/AbstractClientProvider.java
----------------------------------------------------------------------
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/provider/AbstractClientProvider.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/provider/AbstractClientProvider.java
new file mode 100644
index 0000000..0d11be2
--- /dev/null
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/provider/AbstractClientProvider.java
@@ -0,0 +1,122 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.service.provider;
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.yarn.service.api.records.Artifact;
+import org.apache.hadoop.yarn.service.api.records.ConfigFile;
+import org.apache.hadoop.yarn.service.utils.SliderUtils;
+
+import java.io.IOException;
+import java.nio.file.Paths;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+
+public abstract class AbstractClientProvider {
+
+  public AbstractClientProvider() {
+  }
+
+  /**
+   * Generates a fixed format of application tags given one or more of
+   * application name, version and description. This allows subsequent query 
for
+   * an application with a name only, version only or description only or any
+   * combination of those as filters.
+   *
+   * @param appName name of the application
+   * @param appVersion version of the application
+   * @param appDescription brief description of the application
+   * @return
+   */
+  public static final Set<String> createApplicationTags(String appName,
+      String appVersion, String appDescription) {
+    Set<String> tags = new HashSet<>();
+    tags.add(SliderUtils.createNameTag(appName));
+    if (appVersion != null) {
+      tags.add(SliderUtils.createVersionTag(appVersion));
+    }
+    if (appDescription != null) {
+      tags.add(SliderUtils.createDescriptionTag(appDescription));
+    }
+    return tags;
+  }
+
+  /**
+   * Validate the artifact.
+   * @param artifact
+   */
+  public abstract void validateArtifact(Artifact artifact, FileSystem
+      fileSystem) throws IOException;
+
+  protected abstract void validateConfigFile(ConfigFile configFile, FileSystem
+      fileSystem) throws IOException;
+
+  /**
+   * Validate the config files.
+   * @param configFiles config file list
+   * @param fs file system
+   */
+  public void validateConfigFiles(List<ConfigFile> configFiles,
+      FileSystem fs) throws IOException {
+    Set<String> destFileSet = new HashSet<>();
+
+    for (ConfigFile file : configFiles) {
+      if (file.getType() == null) {
+        throw new IllegalArgumentException("File type is empty");
+      }
+
+      if (file.getType().equals(ConfigFile.TypeEnum.TEMPLATE) && StringUtils
+          .isEmpty(file.getSrcFile())) {
+        throw new IllegalArgumentException(
+            "Src_file is empty for " + ConfigFile.TypeEnum.TEMPLATE);
+
+      }
+      if (!StringUtils.isEmpty(file.getSrcFile())) {
+        Path p = new Path(file.getSrcFile());
+        if (!fs.exists(p)) {
+          throw new IllegalArgumentException(
+              "Src_file does not exist for config file: " + file
+                  .getSrcFile());
+        }
+      }
+
+      if (StringUtils.isEmpty(file.getDestFile())) {
+        throw new IllegalArgumentException("Dest_file is empty.");
+      }
+
+      if (destFileSet.contains(file.getDestFile())) {
+        throw new IllegalArgumentException(
+            "Duplicated ConfigFile exists: " + file.getDestFile());
+      }
+      destFileSet.add(file.getDestFile());
+
+      java.nio.file.Path destPath = Paths.get(file.getDestFile());
+      if (!destPath.isAbsolute() && destPath.getNameCount() > 1) {
+        throw new IllegalArgumentException("Non-absolute dest_file has more " +
+            "than one path element");
+      }
+
+      // provider-specific validation
+      validateConfigFile(file, fs);
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/40ab068e/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/provider/AbstractProviderService.java
----------------------------------------------------------------------
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/provider/AbstractProviderService.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/provider/AbstractProviderService.java
new file mode 100644
index 0000000..8d607ab
--- /dev/null
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/provider/AbstractProviderService.java
@@ -0,0 +1,109 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.yarn.service.provider;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.yarn.api.ApplicationConstants;
+import org.apache.hadoop.yarn.service.api.records.Service;
+import org.apache.hadoop.yarn.service.conf.YarnServiceConf;
+import org.apache.hadoop.yarn.service.api.records.Component;
+import org.apache.hadoop.yarn.service.conf.YarnServiceConstants;
+import org.apache.hadoop.yarn.service.utils.SliderFileSystem;
+import org.apache.hadoop.yarn.service.utils.SliderUtils;
+import org.apache.hadoop.yarn.service.exceptions.SliderException;
+import org.apache.hadoop.yarn.service.containerlaunch.AbstractLauncher;
+import org.apache.hadoop.yarn.service.containerlaunch.CommandLineBuilder;
+import org.apache.hadoop.yarn.service.component.instance.ComponentInstance;
+import org.apache.hadoop.yarn.service.ServiceContext;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.util.Map;
+import java.util.Map.Entry;
+
+import static 
org.apache.hadoop.yarn.service.conf.YarnServiceConf.CONTAINER_RETRY_INTERVAL;
+import static 
org.apache.hadoop.yarn.service.conf.YarnServiceConf.CONTAINER_RETRY_MAX;
+import static org.apache.hadoop.yarn.service.utils.ServiceApiUtil.$;
+
+public abstract class AbstractProviderService implements ProviderService,
+    YarnServiceConstants {
+
+  protected static final Logger log =
+      LoggerFactory.getLogger(AbstractProviderService.class);
+
+  public abstract void processArtifact(AbstractLauncher launcher,
+      ComponentInstance compInstance, SliderFileSystem fileSystem,
+      Service service)
+      throws IOException;
+
+  public void buildContainerLaunchContext(AbstractLauncher launcher,
+      Service service, ComponentInstance instance,
+      SliderFileSystem fileSystem, Configuration yarnConf)
+      throws IOException, SliderException {
+    Component component = instance.getComponent().getComponentSpec();;
+    processArtifact(launcher, instance, fileSystem, service);
+
+    ServiceContext context =
+        instance.getComponent().getScheduler().getContext();
+    // Generate tokens (key-value pair) for config substitution.
+    // Get pre-defined tokens
+    Map<String, String> globalTokens =
+        instance.getComponent().getScheduler().globalTokens;
+    Map<String, String> tokensForSubstitution = ProviderUtils
+        .initCompTokensForSubstitute(instance);
+    tokensForSubstitution.putAll(globalTokens);
+    // Set the environment variables in launcher
+    launcher.putEnv(SliderUtils
+        .buildEnvMap(component.getConfiguration(), tokensForSubstitution));
+    launcher.setEnv("WORK_DIR", ApplicationConstants.Environment.PWD.$());
+    launcher.setEnv("LOG_DIR", ApplicationConstants.LOG_DIR_EXPANSION_VAR);
+    if (System.getenv(HADOOP_USER_NAME) != null) {
+      launcher.setEnv(HADOOP_USER_NAME, System.getenv(HADOOP_USER_NAME));
+    }
+    launcher.setEnv("LANG", "en_US.UTF-8");
+    launcher.setEnv("LC_ALL", "en_US.UTF-8");
+    launcher.setEnv("LANGUAGE", "en_US.UTF-8");
+
+    for (Entry<String, String> entry : launcher.getEnv().entrySet()) {
+      tokensForSubstitution.put($(entry.getKey()), entry.getValue());
+    }
+    //TODO add component host tokens?
+//    ProviderUtils.addComponentHostTokens(tokensForSubstitution, amState);
+
+    // create config file on hdfs and add local resource
+    ProviderUtils.createConfigFileAndAddLocalResource(launcher, fileSystem,
+        component, tokensForSubstitution, instance, context);
+
+    // substitute launch command
+    String launchCommand = ProviderUtils
+        .substituteStrWithTokens(component.getLaunchCommand(),
+            tokensForSubstitution);
+    CommandLineBuilder operation = new CommandLineBuilder();
+    operation.add(launchCommand);
+    operation.addOutAndErrFiles(OUT_FILE, ERR_FILE);
+    launcher.addCommand(operation.build());
+
+    // By default retry forever every 30 seconds
+    launcher.setRetryContext(YarnServiceConf
+        .getInt(CONTAINER_RETRY_MAX, -1, service.getConfiguration(),
+            yarnConf), YarnServiceConf
+        .getInt(CONTAINER_RETRY_INTERVAL, 30000, service.getConfiguration(),
+            yarnConf));
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/40ab068e/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/provider/ProviderFactory.java
----------------------------------------------------------------------
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/provider/ProviderFactory.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/provider/ProviderFactory.java
new file mode 100644
index 0000000..0f949e0
--- /dev/null
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/provider/ProviderFactory.java
@@ -0,0 +1,76 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.service.provider;
+
+import 
org.apache.hadoop.yarn.service.provider.defaultImpl.DefaultProviderFactory;
+import org.apache.hadoop.yarn.service.api.records.Artifact;
+import org.apache.hadoop.yarn.service.provider.docker.DockerProviderFactory;
+import org.apache.hadoop.yarn.service.provider.tarball.TarballProviderFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Base class for factories.
+ */
+public abstract class ProviderFactory {
+  protected static final Logger LOG =
+      LoggerFactory.getLogger(ProviderFactory.class);
+
+  protected ProviderFactory() {}
+
+  public abstract AbstractClientProvider createClientProvider();
+
+  public abstract ProviderService createServerProvider();
+
+  public static synchronized ProviderService getProviderService(Artifact
+      artifact) {
+    return createServiceProviderFactory(artifact).createServerProvider();
+  }
+
+  public static synchronized AbstractClientProvider getClientProvider(Artifact
+      artifact) {
+    return createServiceProviderFactory(artifact).createClientProvider();
+  }
+
+  /**
+   * Create a provider for a specific service
+   * @param artifact artifact
+   * @return provider factory
+   */
+  public static synchronized ProviderFactory createServiceProviderFactory(
+      Artifact artifact) {
+    if (artifact == null || artifact.getType() == null) {
+      LOG.debug("Loading service provider type default");
+      return DefaultProviderFactory.getInstance();
+    }
+    LOG.debug("Loading service provider type {}", artifact.getType());
+    switch (artifact.getType()) {
+      // TODO add handling for custom types?
+      // TODO handle service
+      case DOCKER:
+        return DockerProviderFactory.getInstance();
+      case TARBALL:
+        return TarballProviderFactory.getInstance();
+      default:
+        throw new IllegalArgumentException(String.format("Resolution error, " +
+                "%s should not be passed to createServiceProviderFactory",
+            artifact.getType()));
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/40ab068e/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/provider/ProviderService.java
----------------------------------------------------------------------
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/provider/ProviderService.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/provider/ProviderService.java
new file mode 100644
index 0000000..eb721b4
--- /dev/null
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/provider/ProviderService.java
@@ -0,0 +1,39 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.service.provider;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.yarn.service.api.records.Service;
+import org.apache.hadoop.yarn.service.utils.SliderFileSystem;
+import org.apache.hadoop.yarn.service.exceptions.SliderException;
+import org.apache.hadoop.yarn.service.containerlaunch.AbstractLauncher;
+import org.apache.hadoop.yarn.service.component.instance.ComponentInstance;
+
+import java.io.IOException;
+
+public interface ProviderService {
+
+  /**
+   * Set up the entire container launch context
+   */
+  void buildContainerLaunchContext(AbstractLauncher containerLauncher,
+      Service service, ComponentInstance instance,
+      SliderFileSystem sliderFileSystem, Configuration yarnConf)
+      throws IOException, SliderException;
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/40ab068e/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/provider/ProviderUtils.java
----------------------------------------------------------------------
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/provider/ProviderUtils.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/provider/ProviderUtils.java
new file mode 100644
index 0000000..ec0c2ca
--- /dev/null
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/provider/ProviderUtils.java
@@ -0,0 +1,402 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.service.provider;
+
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.permission.FsAction;
+import org.apache.hadoop.fs.permission.FsPermission;
+import org.apache.hadoop.yarn.api.records.LocalResource;
+import org.apache.hadoop.yarn.api.records.LocalResourceType;
+import org.apache.hadoop.yarn.service.ServiceContext;
+import org.apache.hadoop.yarn.service.api.records.Service;
+import org.apache.hadoop.yarn.service.api.records.Component;
+import org.apache.hadoop.yarn.service.api.records.ConfigFile;
+import org.apache.hadoop.yarn.service.api.records.ConfigFormat;
+import org.apache.hadoop.yarn.service.api.records.Configuration;
+import org.apache.hadoop.yarn.service.component.instance.ComponentInstance;
+import org.apache.hadoop.yarn.service.conf.YarnServiceConstants;
+import org.apache.hadoop.yarn.service.conf.YarnServiceConf;
+import org.apache.hadoop.yarn.service.containerlaunch.AbstractLauncher;
+import org.apache.hadoop.yarn.service.exceptions.BadCommandArgumentsException;
+import org.apache.hadoop.yarn.service.exceptions.SliderException;
+import org.apache.hadoop.yarn.service.utils.PublishedConfiguration;
+import org.apache.hadoop.yarn.service.utils.PublishedConfigurationOutputter;
+import org.apache.hadoop.yarn.service.utils.SliderFileSystem;
+import org.apache.hadoop.yarn.service.utils.SliderUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.concurrent.ExecutionException;
+import java.util.regex.Pattern;
+
+import static org.apache.hadoop.yarn.service.api.ServiceApiConstants.*;
+
+/**
+ * This is a factoring out of methods handy for providers. It's bonded to a log
+ * at construction time.
+ */
+public class ProviderUtils implements YarnServiceConstants {
+
+  protected static final Logger log =
+      LoggerFactory.getLogger(ProviderUtils.class);
+
+
+  /**
+   * Add oneself to the classpath. This does not work
+   * on minicluster test runs where the JAR is not built up.
+   * @param providerResources map of provider resources to add these entries to
+   * @param providerClass provider to add
+   * @param jarName name of the jar to use
+   * @param sliderFileSystem target filesystem
+   * @param tempPath path in the cluster FS for temp files
+   * @param libdir relative directory to place resources
+   * @param miniClusterTestRun true if minicluster is being used
+   * @return true if the class was found in a JAR
+   * 
+   * @throws FileNotFoundException if the JAR was not found and this is NOT
+   * a mini cluster test run
+   * @throws IOException IO problems
+   * @throws SliderException any Slider problem
+   */
+  public static boolean addProviderJar(
+      Map<String, LocalResource> providerResources,
+      Class providerClass,
+      String jarName,
+      SliderFileSystem sliderFileSystem,
+      Path tempPath,
+      String libdir,
+      boolean miniClusterTestRun) throws
+      IOException,
+      SliderException {
+    try {
+      SliderUtils.putJar(providerResources,
+          sliderFileSystem,
+          providerClass,
+          tempPath,
+          libdir,
+          jarName);
+      return true;
+    } catch (FileNotFoundException e) {
+      if (miniClusterTestRun) {
+        return false;
+      } else {
+        throw e;
+      }
+    }
+  }
+  
+  /**
+   * Loads all dependency jars from the default path.
+   * @param providerResources map of provider resources to add these entries to
+   * @param sliderFileSystem target filesystem
+   * @param tempPath path in the cluster FS for temp files
+   * @param libDir relative directory to place resources
+   * @param libLocalSrcDir explicitly supplied local libs dir
+   * @throws IOException trouble copying to HDFS
+   * @throws SliderException trouble copying to HDFS
+   */
+  public static void addAllDependencyJars(
+      Map<String, LocalResource> providerResources,
+      SliderFileSystem sliderFileSystem,
+      Path tempPath,
+      String libDir,
+      String libLocalSrcDir)
+      throws IOException, SliderException {
+    if (SliderUtils.isSet(libLocalSrcDir)) {
+      File file = new File(libLocalSrcDir);
+      if (!file.exists() || !file.isDirectory()) {
+        throw new BadCommandArgumentsException(
+            "Supplied lib src dir %s is not valid", libLocalSrcDir);
+      }
+    }
+    SliderUtils.putAllJars(providerResources, sliderFileSystem, tempPath,
+        libDir, libLocalSrcDir);
+  }
+
+  public static String substituteStrWithTokens(String content,
+      Map<String, String> tokensForSubstitution) {
+    for (Map.Entry<String, String> token : tokensForSubstitution.entrySet()) {
+      content =
+          content.replaceAll(Pattern.quote(token.getKey()), token.getValue());
+    }
+    return content;
+  }
+
+  // configs will be substituted by corresponding env in tokenMap
+  public static void substituteMapWithTokens(Map<String, String> configs,
+      Map<String, String> tokenMap) {
+    for (Map.Entry<String, String> entry : configs.entrySet()) {
+      String value = entry.getValue();
+      if (tokenMap != null) {
+        for (Map.Entry<String, String> token : tokenMap.entrySet()) {
+          value =
+              value.replaceAll(Pattern.quote(token.getKey()), 
token.getValue());
+        }
+      }
+      entry.setValue(value);
+    }
+  }
+
+  /**
+   * Localize the service keytabs for the service.
+   * @param launcher container launcher
+   * @param fileSystem file system
+   * @throws IOException trouble uploading to HDFS
+   */
+  public void localizeServiceKeytabs(AbstractLauncher launcher,
+      SliderFileSystem fileSystem, Service service) throws IOException {
+
+    Configuration conf = service.getConfiguration();
+    String keytabPathOnHost =
+        conf.getProperty(YarnServiceConf.KEY_AM_KEYTAB_LOCAL_PATH);
+    if (SliderUtils.isUnset(keytabPathOnHost)) {
+      String amKeytabName =
+          conf.getProperty(YarnServiceConf.KEY_AM_LOGIN_KEYTAB_NAME);
+      String keytabDir =
+          conf.getProperty(YarnServiceConf.KEY_HDFS_KEYTAB_DIR);
+      // we need to localize the keytab files in the directory
+      Path keytabDirPath = fileSystem.buildKeytabPath(keytabDir, null,
+          service.getName());
+      boolean serviceKeytabsDeployed = false;
+      if (fileSystem.getFileSystem().exists(keytabDirPath)) {
+        FileStatus[] keytabs = fileSystem.getFileSystem().listStatus(
+            keytabDirPath);
+        LocalResource keytabRes;
+        for (FileStatus keytab : keytabs) {
+          if (!amKeytabName.equals(keytab.getPath().getName())
+              && keytab.getPath().getName().endsWith(".keytab")) {
+            serviceKeytabsDeployed = true;
+            log.info("Localizing keytab {}", keytab.getPath().getName());
+            keytabRes = fileSystem.createAmResource(keytab.getPath(),
+                LocalResourceType.FILE);
+            launcher.addLocalResource(KEYTAB_DIR + "/" +
+                    keytab.getPath().getName(),
+                keytabRes);
+          }
+        }
+      }
+      if (!serviceKeytabsDeployed) {
+        log.warn("No service keytabs for the service have been localized.  "
+            + "If the service requires keytabs for secure operation, "
+            + "please ensure that the required keytabs have been uploaded "
+            + "to the folder {}", keytabDirPath);
+      }
+    }
+  }
+
+  // 1. Create all config files for a component on hdfs for localization
+  // 2. Add the config file to localResource
+  public static synchronized void createConfigFileAndAddLocalResource(
+      AbstractLauncher launcher, SliderFileSystem fs, Component component,
+      Map<String, String> tokensForSubstitution, ComponentInstance instance,
+      ServiceContext context) throws IOException {
+    Path compDir =
+        new Path(new Path(fs.getAppDir(), "components"), component.getName());
+    Path compInstanceDir =
+        new Path(compDir, instance.getCompInstanceName());
+    if (!fs.getFileSystem().exists(compInstanceDir)) {
+      log.info(instance.getCompInstanceId() + ": Creating dir on hdfs: " + 
compInstanceDir);
+      fs.getFileSystem().mkdirs(compInstanceDir,
+          new FsPermission(FsAction.ALL, FsAction.NONE, FsAction.NONE));
+      instance.setCompInstanceDir(compInstanceDir);
+    } else {
+      log.info("Component instance conf dir already exists: " + 
compInstanceDir);
+    }
+
+    if (log.isDebugEnabled()) {
+      log.debug("Tokens substitution for component instance: " + instance
+          .getCompInstanceName() + System.lineSeparator()
+          + tokensForSubstitution);
+    }
+
+    for (ConfigFile originalFile : component.getConfiguration().getFiles()) {
+      ConfigFile configFile = originalFile.copy();
+      String fileName = new Path(configFile.getDestFile()).getName();
+
+      // substitute file name
+      for (Map.Entry<String, String> token : tokensForSubstitution.entrySet()) 
{
+        configFile.setDestFile(configFile.getDestFile()
+            .replaceAll(Pattern.quote(token.getKey()), token.getValue()));
+      }
+
+      Path remoteFile = new Path(compInstanceDir, fileName);
+      if (!fs.getFileSystem().exists(remoteFile)) {
+        log.info("Saving config file on hdfs for component " + instance
+            .getCompInstanceName() + ": " + configFile);
+
+        if (configFile.getSrcFile() != null) {
+          // Load config file template
+          switch (configFile.getType()) {
+          case HADOOP_XML:
+            // Hadoop_xml_template
+            resolveHadoopXmlTemplateAndSaveOnHdfs(fs.getFileSystem(),
+                tokensForSubstitution, configFile, remoteFile, context);
+            break;
+          case TEMPLATE:
+            // plain-template
+            resolvePlainTemplateAndSaveOnHdfs(fs.getFileSystem(),
+                tokensForSubstitution, configFile, remoteFile, context);
+            break;
+          default:
+            log.info("Not supporting loading src_file for " + configFile);
+            break;
+          }
+        } else {
+          // non-template
+          resolveNonTemplateConfigsAndSaveOnHdfs(fs, tokensForSubstitution,
+              instance, configFile, fileName, remoteFile);
+        }
+      }
+
+      // Add resource for localization
+      LocalResource configResource =
+          fs.createAmResource(remoteFile, LocalResourceType.FILE);
+      File destFile = new File(configFile.getDestFile());
+      String symlink = APP_CONF_DIR + "/" + fileName;
+      if (destFile.isAbsolute()) {
+        launcher.addLocalResource(symlink, configResource,
+            configFile.getDestFile());
+        log.info("Add config file for localization: " + symlink + " -> "
+            + configResource.getResource().getFile() + ", dest mount path: "
+            + configFile.getDestFile());
+      } else {
+        launcher.addLocalResource(symlink, configResource);
+        log.info("Add config file for localization: " + symlink + " -> "
+            + configResource.getResource().getFile());
+      }
+    }
+  }
+
+  private static void resolveNonTemplateConfigsAndSaveOnHdfs(SliderFileSystem 
fs,
+      Map<String, String> tokensForSubstitution, ComponentInstance instance,
+      ConfigFile configFile, String fileName, Path remoteFile)
+      throws IOException {
+    // substitute non-template configs
+    substituteMapWithTokens(configFile.getProps(), tokensForSubstitution);
+
+    // write configs onto hdfs
+    PublishedConfiguration publishedConfiguration =
+        new PublishedConfiguration(fileName,
+            configFile.getProps().entrySet());
+    if (!fs.getFileSystem().exists(remoteFile)) {
+      PublishedConfigurationOutputter configurationOutputter =
+          PublishedConfigurationOutputter.createOutputter(
+              ConfigFormat.resolve(configFile.getType().toString()),
+              publishedConfiguration);
+      try (FSDataOutputStream os = fs.getFileSystem().create(remoteFile)) {
+        configurationOutputter.save(os);
+        os.flush();
+      }
+    } else {
+      log.info("Component instance = " + instance.getCompInstanceName()
+              + ", config file already exists: " + remoteFile);
+    }
+  }
+
+  // 1. substitute config template - only handle hadoop_xml format
+  // 2. save on hdfs
+  @SuppressWarnings("unchecked")
+  private static void resolveHadoopXmlTemplateAndSaveOnHdfs(FileSystem fs,
+      Map<String, String> tokensForSubstitution, ConfigFile configFile,
+      Path remoteFile, ServiceContext context) throws IOException {
+    Map<String, String> conf;
+    try {
+      conf = (Map<String, String>) context.configCache.get(configFile);
+    } catch (ExecutionException e) {
+      log.info("Failed to load config file: " + configFile, e);
+      return;
+    }
+    // make a copy for substitution
+    org.apache.hadoop.conf.Configuration confCopy =
+        new org.apache.hadoop.conf.Configuration(false);
+    for (Map.Entry<String, String> entry : conf.entrySet()) {
+      confCopy.set(entry.getKey(), entry.getValue());
+    }
+    // substitute properties
+    for (Map.Entry<String, String> entry : configFile.getProps().entrySet()) {
+      confCopy.set(entry.getKey(), entry.getValue());
+    }
+    // substitute env variables
+    for (Map.Entry<String, String> entry : confCopy) {
+      String val = entry.getValue();
+      if (val != null) {
+        for (Map.Entry<String, String> token : tokensForSubstitution
+            .entrySet()) {
+          val = val.replaceAll(Pattern.quote(token.getKey()), 
token.getValue());
+          confCopy.set(entry.getKey(), val);
+        }
+      }
+    }
+    // save on hdfs
+    try (OutputStream output = fs.create(remoteFile)) {
+      confCopy.writeXml(output);
+      log.info("Reading config from: " + configFile.getSrcFile()
+          + ", writing to: " + remoteFile);
+    }
+  }
+
+  // 1) read the template as a string
+  // 2) do token substitution
+  // 3) save on hdfs
+  private static void resolvePlainTemplateAndSaveOnHdfs(FileSystem fs,
+      Map<String, String> tokensForSubstitution, ConfigFile configFile,
+      Path remoteFile, ServiceContext context) {
+    String content;
+    try {
+      content = (String) context.configCache.get(configFile);
+    } catch (ExecutionException e) {
+      log.info("Failed to load config file: " + configFile, e);
+      return;
+    }
+    // substitute tokens
+    content = substituteStrWithTokens(content, tokensForSubstitution);
+
+    try (OutputStream output = fs.create(remoteFile)) {
+      org.apache.commons.io.IOUtils.write(content, output);
+    } catch (IOException e) {
+      log.info("Failed to create " + remoteFile);
+    }
+  }
+
+  /**
+   * Get initial component token map to be substituted into config values.
+   * @return tokens to replace
+   */
+  public static Map<String, String> initCompTokensForSubstitute(
+      ComponentInstance instance) {
+    Map<String, String> tokens = new HashMap<>();
+    tokens.put(COMPONENT_NAME, instance.getCompSpec().getName());
+    tokens
+        .put(COMPONENT_NAME_LC, 
instance.getCompSpec().getName().toLowerCase());
+    tokens.put(COMPONENT_INSTANCE_NAME, instance.getCompInstanceName());
+    tokens.put(CONTAINER_ID, instance.getContainer().getId().toString());
+    tokens.put(COMPONENT_ID,
+        String.valueOf(instance.getCompInstanceId().getId()));
+    return tokens;
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/40ab068e/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/provider/defaultImpl/DefaultClientProvider.java
----------------------------------------------------------------------
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/provider/defaultImpl/DefaultClientProvider.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/provider/defaultImpl/DefaultClientProvider.java
new file mode 100644
index 0000000..0920a9c
--- /dev/null
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/provider/defaultImpl/DefaultClientProvider.java
@@ -0,0 +1,46 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.yarn.service.provider.defaultImpl;
+
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.yarn.service.provider.AbstractClientProvider;
+import org.apache.hadoop.yarn.service.api.records.Artifact;
+import org.apache.hadoop.yarn.service.api.records.ConfigFile;
+
+import java.io.IOException;
+import java.nio.file.Paths;
+
+public class DefaultClientProvider extends AbstractClientProvider {
+
+  public DefaultClientProvider() {
+  }
+
+  @Override
+  public void validateArtifact(Artifact artifact, FileSystem fileSystem) {
+  }
+
+  @Override
+  protected void validateConfigFile(ConfigFile configFile, FileSystem
+      fileSystem) throws IOException {
+    // validate dest_file is not absolute
+    if (Paths.get(configFile.getDestFile()).isAbsolute()) {
+      throw new IllegalArgumentException(
+          "Dest_file must not be absolute path: " + configFile.getDestFile());
+    }
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hadoop/blob/40ab068e/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/provider/defaultImpl/DefaultProviderFactory.java
----------------------------------------------------------------------
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/provider/defaultImpl/DefaultProviderFactory.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/provider/defaultImpl/DefaultProviderFactory.java
new file mode 100644
index 0000000..868bba8
--- /dev/null
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/provider/defaultImpl/DefaultProviderFactory.java
@@ -0,0 +1,51 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.yarn.service.provider.defaultImpl;
+
+import org.apache.hadoop.yarn.service.provider.AbstractClientProvider;
+import org.apache.hadoop.yarn.service.provider.ProviderService;
+import org.apache.hadoop.yarn.service.provider.ProviderFactory;
+
+public final class DefaultProviderFactory extends ProviderFactory {
+  private static final ProviderFactory FACTORY = new
+      DefaultProviderFactory();
+
+  private DefaultProviderFactory() {}
+
+  private static class Client {
+    static final AbstractClientProvider PROVIDER = new DefaultClientProvider();
+  }
+
+  private static class Server {
+    static final ProviderService PROVIDER = new DefaultProviderService();
+  }
+
+  @Override
+  public AbstractClientProvider createClientProvider() {
+    return Client.PROVIDER;
+  }
+
+  @Override
+  public ProviderService createServerProvider() {
+    return Server.PROVIDER;
+  }
+
+  public static ProviderFactory getInstance() {
+    return FACTORY;
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/40ab068e/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/provider/defaultImpl/DefaultProviderService.java
----------------------------------------------------------------------
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/provider/defaultImpl/DefaultProviderService.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/provider/defaultImpl/DefaultProviderService.java
new file mode 100644
index 0000000..a3a0c1f
--- /dev/null
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/provider/defaultImpl/DefaultProviderService.java
@@ -0,0 +1,36 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.yarn.service.provider.defaultImpl;
+
+import org.apache.hadoop.yarn.service.api.records.Service;
+import org.apache.hadoop.yarn.service.component.instance.ComponentInstance;
+import org.apache.hadoop.yarn.service.provider.AbstractProviderService;
+import org.apache.hadoop.yarn.service.utils.SliderFileSystem;
+import org.apache.hadoop.yarn.service.containerlaunch.AbstractLauncher;
+
+import java.io.IOException;
+
+public class DefaultProviderService extends AbstractProviderService {
+
+  @Override
+  public void processArtifact(AbstractLauncher launcher,
+      ComponentInstance compInstance, SliderFileSystem fileSystem,
+      Service service)
+      throws IOException {
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/40ab068e/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/provider/docker/DockerClientProvider.java
----------------------------------------------------------------------
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/provider/docker/DockerClientProvider.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/provider/docker/DockerClientProvider.java
new file mode 100644
index 0000000..d4a2254
--- /dev/null
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/provider/docker/DockerClientProvider.java
@@ -0,0 +1,53 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.yarn.service.provider.docker;
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.yarn.service.api.records.Artifact;
+import org.apache.hadoop.yarn.service.api.records.ConfigFile;
+import org.apache.hadoop.yarn.service.conf.YarnServiceConstants;
+import org.apache.hadoop.yarn.service.provider.AbstractClientProvider;
+import org.apache.hadoop.yarn.service.exceptions.RestApiErrorMessages;
+
+import java.io.IOException;
+
+public class DockerClientProvider extends AbstractClientProvider
+    implements YarnServiceConstants {
+
+  public DockerClientProvider() {
+    super();
+  }
+
+  @Override
+  public void validateArtifact(Artifact artifact, FileSystem fileSystem) {
+    if (artifact == null) {
+      throw new IllegalArgumentException(
+          RestApiErrorMessages.ERROR_ARTIFACT_INVALID);
+    }
+    if (StringUtils.isEmpty(artifact.getId())) {
+      throw new IllegalArgumentException(
+          RestApiErrorMessages.ERROR_ARTIFACT_ID_INVALID);
+    }
+  }
+
+  @Override
+  protected void validateConfigFile(ConfigFile configFile, FileSystem
+      fileSystem) throws IOException {
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/40ab068e/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/provider/docker/DockerKeys.java
----------------------------------------------------------------------
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/provider/docker/DockerKeys.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/provider/docker/DockerKeys.java
new file mode 100644
index 0000000..f30c002
--- /dev/null
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/provider/docker/DockerKeys.java
@@ -0,0 +1,30 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.yarn.service.provider.docker;
+
+public interface DockerKeys {
+  String PROVIDER_DOCKER = "docker";
+  String DOCKER_PREFIX = "docker.";
+  String DOCKER_IMAGE = DOCKER_PREFIX + "image";
+  String DOCKER_NETWORK = DOCKER_PREFIX + "network";
+  String DOCKER_USE_PRIVILEGED = DOCKER_PREFIX + "usePrivileged";
+  String DOCKER_START_COMMAND = DOCKER_PREFIX + "startCommand";
+
+  String DEFAULT_DOCKER_NETWORK = "bridge";
+  Boolean DEFAULT_DOCKER_USE_PRIVILEGED = false;
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/40ab068e/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/provider/docker/DockerProviderFactory.java
----------------------------------------------------------------------
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/provider/docker/DockerProviderFactory.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/provider/docker/DockerProviderFactory.java
new file mode 100644
index 0000000..57330ab
--- /dev/null
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/provider/docker/DockerProviderFactory.java
@@ -0,0 +1,52 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.yarn.service.provider.docker;
+
+import org.apache.hadoop.yarn.service.provider.AbstractClientProvider;
+import org.apache.hadoop.yarn.service.provider.ProviderService;
+import org.apache.hadoop.yarn.service.provider.ProviderFactory;
+
+public class DockerProviderFactory extends ProviderFactory {
+  private static final ProviderFactory FACTORY = new
+      DockerProviderFactory();
+
+  private DockerProviderFactory() {
+  }
+
+  private static class Client {
+    static final AbstractClientProvider PROVIDER = new DockerClientProvider();
+  }
+
+  private static class Server {
+    static final ProviderService PROVIDER = new DockerProviderService();
+  }
+
+  @Override
+  public AbstractClientProvider createClientProvider() {
+    return Client.PROVIDER;
+  }
+
+  @Override
+  public ProviderService createServerProvider() {
+    return Server.PROVIDER;
+  }
+
+  public static ProviderFactory getInstance() {
+    return FACTORY;
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/40ab068e/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/provider/docker/DockerProviderService.java
----------------------------------------------------------------------
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/provider/docker/DockerProviderService.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/provider/docker/DockerProviderService.java
new file mode 100644
index 0000000..0741947
--- /dev/null
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/provider/docker/DockerProviderService.java
@@ -0,0 +1,57 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.yarn.service.provider.docker;
+
+import org.apache.hadoop.registry.client.api.RegistryConstants;
+import org.apache.hadoop.registry.client.binding.RegistryUtils;
+import org.apache.hadoop.yarn.service.component.instance.ComponentInstance;
+import org.apache.hadoop.yarn.service.provider.AbstractProviderService;
+import org.apache.hadoop.yarn.service.api.records.Service;
+import org.apache.hadoop.yarn.service.utils.SliderFileSystem;
+import org.apache.hadoop.yarn.service.containerlaunch.AbstractLauncher;
+
+import java.io.IOException;
+import java.text.MessageFormat;
+
+public class DockerProviderService extends AbstractProviderService
+    implements DockerKeys {
+
+  public void processArtifact(AbstractLauncher launcher,
+      ComponentInstance compInstance, SliderFileSystem fileSystem,
+      Service service) throws IOException{
+    launcher.setYarnDockerMode(true);
+    launcher.setDockerImage(compInstance.getCompSpec().getArtifact().getId());
+    launcher.setDockerNetwork(compInstance.getCompSpec().getConfiguration()
+        .getProperty(DOCKER_NETWORK, DEFAULT_DOCKER_NETWORK));
+    String domain = compInstance.getComponent().getScheduler().getConfig()
+        .get(RegistryConstants.KEY_DNS_DOMAIN);
+    String hostname;
+    if (domain == null || domain.isEmpty()) {
+      hostname = MessageFormat
+          .format("{0}.{1}.{2}", compInstance.getCompInstanceName(),
+              service.getName(), RegistryUtils.currentUser());
+    } else {
+      hostname = MessageFormat
+          .format("{0}.{1}.{2}.{3}", compInstance.getCompInstanceName(),
+              service.getName(), RegistryUtils.currentUser(), domain);
+    }
+    launcher.setDockerHostname(hostname);
+    launcher.setRunPrivilegedContainer(
+        compInstance.getCompSpec().getRunPrivilegedContainer());
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/40ab068e/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/provider/tarball/TarballClientProvider.java
----------------------------------------------------------------------
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/provider/tarball/TarballClientProvider.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/provider/tarball/TarballClientProvider.java
new file mode 100644
index 0000000..01f7b20
--- /dev/null
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/provider/tarball/TarballClientProvider.java
@@ -0,0 +1,65 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.yarn.service.provider.tarball;
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.yarn.service.api.records.Artifact;
+import org.apache.hadoop.yarn.service.api.records.ConfigFile;
+import org.apache.hadoop.yarn.service.conf.YarnServiceConstants;
+import org.apache.hadoop.yarn.service.provider.AbstractClientProvider;
+import org.apache.hadoop.yarn.service.exceptions.RestApiErrorMessages;
+
+import java.io.IOException;
+import java.nio.file.Paths;
+
+public class TarballClientProvider extends AbstractClientProvider
+    implements YarnServiceConstants {
+
+  public TarballClientProvider() {
+  }
+
+  @Override
+  public void validateArtifact(Artifact artifact, FileSystem fs)
+      throws IOException {
+    if (artifact == null) {
+      throw new IllegalArgumentException(
+          RestApiErrorMessages.ERROR_ARTIFACT_INVALID);
+    }
+    if (StringUtils.isEmpty(artifact.getId())) {
+      throw new IllegalArgumentException(
+          RestApiErrorMessages.ERROR_ARTIFACT_ID_INVALID);
+    }
+    Path p = new Path(artifact.getId());
+    if (!fs.exists(p)) {
+      throw new IllegalArgumentException( "Artifact tarball does not exist "
+          + artifact.getId());
+    }
+  }
+
+  @Override
+  protected void validateConfigFile(ConfigFile configFile, FileSystem
+      fileSystem) throws IOException {
+    // validate dest_file is not absolute
+    if (Paths.get(configFile.getDestFile()).isAbsolute()) {
+      throw new IllegalArgumentException(
+          "Dest_file must not be absolute path: " + configFile.getDestFile());
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/40ab068e/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/provider/tarball/TarballProviderFactory.java
----------------------------------------------------------------------
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/provider/tarball/TarballProviderFactory.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/provider/tarball/TarballProviderFactory.java
new file mode 100644
index 0000000..9d81f66
--- /dev/null
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/provider/tarball/TarballProviderFactory.java
@@ -0,0 +1,52 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.yarn.service.provider.tarball;
+
+import org.apache.hadoop.yarn.service.provider.AbstractClientProvider;
+import org.apache.hadoop.yarn.service.provider.ProviderService;
+import org.apache.hadoop.yarn.service.provider.ProviderFactory;
+
+public class TarballProviderFactory extends ProviderFactory {
+  private static final ProviderFactory FACTORY = new
+      TarballProviderFactory();
+
+  private TarballProviderFactory() {
+  }
+
+  private static class Client {
+    static final AbstractClientProvider PROVIDER = new TarballClientProvider();
+  }
+
+  private static class Server {
+    static final ProviderService PROVIDER = new TarballProviderService();
+  }
+
+  @Override
+  public AbstractClientProvider createClientProvider() {
+    return Client.PROVIDER;
+  }
+
+  @Override
+  public ProviderService createServerProvider() {
+    return Server.PROVIDER;
+  }
+
+  public static ProviderFactory getInstance() {
+    return FACTORY;
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/40ab068e/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/provider/tarball/TarballProviderService.java
----------------------------------------------------------------------
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/provider/tarball/TarballProviderService.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/provider/tarball/TarballProviderService.java
new file mode 100644
index 0000000..9f29c8b
--- /dev/null
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/provider/tarball/TarballProviderService.java
@@ -0,0 +1,48 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.yarn.service.provider.tarball;
+
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.yarn.api.records.LocalResource;
+import org.apache.hadoop.yarn.api.records.LocalResourceType;
+import org.apache.hadoop.yarn.service.api.records.Service;
+import org.apache.hadoop.yarn.service.component.instance.ComponentInstance;
+import org.apache.hadoop.yarn.service.provider.AbstractProviderService;
+import org.apache.hadoop.yarn.service.utils.SliderFileSystem;
+import org.apache.hadoop.yarn.service.containerlaunch.AbstractLauncher;
+
+import java.io.IOException;
+
+public class TarballProviderService extends AbstractProviderService {
+
+  @Override
+  public void processArtifact(AbstractLauncher launcher,
+      ComponentInstance instance, SliderFileSystem fileSystem,
+      Service service)
+      throws IOException {
+    Path artifact = new Path(instance.getCompSpec().getArtifact().getId());
+    if (!fileSystem.isFile(artifact)) {
+      throw new IOException(
+          "Package doesn't exist as a resource: " + artifact.toString());
+    }
+    log.info("Adding resource {}", artifact.toString());
+    LocalResourceType type = LocalResourceType.ARCHIVE;
+    LocalResource packageResource = fileSystem.createAmResource(artifact, 
type);
+    launcher.addLocalResource(APP_LIB_DIR, packageResource);
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/40ab068e/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/registry/CustomRegistryConstants.java
----------------------------------------------------------------------
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/registry/CustomRegistryConstants.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/registry/CustomRegistryConstants.java
new file mode 100644
index 0000000..56634f6
--- /dev/null
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/registry/CustomRegistryConstants.java
@@ -0,0 +1,57 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.service.registry;
+
+/**
+ * These are constants unique to the Slider AM
+ */
+public class CustomRegistryConstants {
+
+  public static final String MANAGEMENT_REST_API =
+      "classpath:org.apache.slider.management";
+  
+  public static final String REGISTRY_REST_API =
+      "classpath:org.apache.slider.registry";
+  
+  public static final String PUBLISHER_REST_API =
+      "classpath:org.apache.slider.publisher";
+
+  public static final String PUBLISHER_CONFIGURATIONS_API =
+      "classpath:org.apache.slider.publisher.configurations";
+
+  public static final String PUBLISHER_EXPORTS_API =
+      "classpath:org.apache.slider.publisher.exports";
+
+  public static final String PUBLISHER_DOCUMENTS_API =
+      "classpath:org.apache.slider.publisher.documents";
+
+  public static final String AGENT_SECURE_REST_API =
+      "classpath:org.apache.slider.agents.secure";
+
+  public static final String AGENT_ONEWAY_REST_API =
+      "classpath:org.apache.slider.agents.oneway";
+
+  public static final String AM_IPC_PROTOCOL =
+      "classpath:org.apache.slider.appmaster.ipc";
+
+  public static final String AM_REST_BASE =
+      "classpath:org.apache.slider.client.rest";
+
+  public static final String WEB_UI = "http://";;
+}


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org

Reply via email to