http://git-wip-us.apache.org/repos/asf/incubator-airflow-site/blob/11437c14/_modules/airflow/contrib/hooks/spark_submit_hook.html
----------------------------------------------------------------------
diff --git a/_modules/airflow/contrib/hooks/spark_submit_hook.html
b/_modules/airflow/contrib/hooks/spark_submit_hook.html
new file mode 100644
index 0000000..6903f5f
--- /dev/null
+++ b/_modules/airflow/contrib/hooks/spark_submit_hook.html
@@ -0,0 +1,799 @@
+
+
+<!DOCTYPE html>
+<!--[if IE 8]><html class="no-js lt-ie9" lang="en" > <![endif]-->
+<!--[if gt IE 8]><!--> <html class="no-js" lang="en" > <!--<![endif]-->
+<head>
+ <meta charset="utf-8">
+
+ <meta name="viewport" content="width=device-width, initial-scale=1.0">
+
+ <title>airflow.contrib.hooks.spark_submit_hook — Airflow
Documentation</title>
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ <link rel="stylesheet" href="../../../../_static/css/theme.css"
type="text/css" />
+
+
+
+
+
+ <link rel="index" title="Index"
+ href="../../../../genindex.html"/>
+ <link rel="search" title="Search" href="../../../../search.html"/>
+ <link rel="top" title="Airflow Documentation"
href="../../../../index.html"/>
+ <link rel="up" title="Module code" href="../../../index.html"/>
+
+
+ <script src="../../../../_static/js/modernizr.min.js"></script>
+
+</head>
+
+<body class="wy-body-for-nav" role="document">
+
+
+ <div class="wy-grid-for-nav">
+
+
+ <nav data-toggle="wy-nav-shift" class="wy-nav-side">
+ <div class="wy-side-scroll">
+ <div class="wy-side-nav-search">
+
+
+
+ <a href="../../../../index.html" class="icon icon-home"> Airflow
+
+
+
+ </a>
+
+
+
+
+
+
+
+<div role="search">
+ <form id="rtd-search-form" class="wy-form" action="../../../../search.html"
method="get">
+ <input type="text" name="q" placeholder="Search docs" />
+ <input type="hidden" name="check_keywords" value="yes" />
+ <input type="hidden" name="area" value="default" />
+ </form>
+</div>
+
+
+ </div>
+
+ <div class="wy-menu wy-menu-vertical" data-spy="affix"
role="navigation" aria-label="main navigation">
+
+
+
+
+
+
+ <ul>
+<li class="toctree-l1"><a class="reference internal"
href="../../../../project.html">Project</a></li>
+<li class="toctree-l1"><a class="reference internal"
href="../../../../license.html">License</a></li>
+<li class="toctree-l1"><a class="reference internal"
href="../../../../start.html">Quick Start</a></li>
+<li class="toctree-l1"><a class="reference internal"
href="../../../../installation.html">Installation</a></li>
+<li class="toctree-l1"><a class="reference internal"
href="../../../../tutorial.html">Tutorial</a></li>
+<li class="toctree-l1"><a class="reference internal"
href="../../../../howto/index.html">How-to Guides</a></li>
+<li class="toctree-l1"><a class="reference internal"
href="../../../../ui.html">UI / Screenshots</a></li>
+<li class="toctree-l1"><a class="reference internal"
href="../../../../concepts.html">Concepts</a></li>
+<li class="toctree-l1"><a class="reference internal"
href="../../../../profiling.html">Data Profiling</a></li>
+<li class="toctree-l1"><a class="reference internal"
href="../../../../cli.html">Command Line Interface</a></li>
+<li class="toctree-l1"><a class="reference internal"
href="../../../../scheduler.html">Scheduling & Triggers</a></li>
+<li class="toctree-l1"><a class="reference internal"
href="../../../../plugins.html">Plugins</a></li>
+<li class="toctree-l1"><a class="reference internal"
href="../../../../security.html">Security</a></li>
+<li class="toctree-l1"><a class="reference internal"
href="../../../../timezone.html">Time zones</a></li>
+<li class="toctree-l1"><a class="reference internal"
href="../../../../api.html">Experimental Rest API</a></li>
+<li class="toctree-l1"><a class="reference internal"
href="../../../../integration.html">Integration</a></li>
+<li class="toctree-l1"><a class="reference internal"
href="../../../../lineage.html">Lineage</a></li>
+<li class="toctree-l1"><a class="reference internal"
href="../../../../faq.html">FAQ</a></li>
+<li class="toctree-l1"><a class="reference internal"
href="../../../../code.html">API Reference</a></li>
+</ul>
+
+
+
+ </div>
+ </div>
+ </nav>
+
+ <section data-toggle="wy-nav-shift" class="wy-nav-content-wrap">
+
+
+ <nav class="wy-nav-top" role="navigation" aria-label="top navigation">
+
+ <i data-toggle="wy-nav-top" class="fa fa-bars"></i>
+ <a href="../../../../index.html">Airflow</a>
+
+ </nav>
+
+
+
+ <div class="wy-nav-content">
+ <div class="rst-content">
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+<div role="navigation" aria-label="breadcrumbs navigation">
+
+ <ul class="wy-breadcrumbs">
+
+ <li><a href="../../../../index.html">Docs</a> »</li>
+
+ <li><a href="../../../index.html">Module code</a> »</li>
+
+ <li>airflow.contrib.hooks.spark_submit_hook</li>
+
+
+ <li class="wy-breadcrumbs-aside">
+
+
+
+ </li>
+
+ </ul>
+
+
+ <hr/>
+</div>
+ <div role="main" class="document" itemscope="itemscope"
itemtype="http://schema.org/Article">
+ <div itemprop="articleBody">
+
+ <h1>Source code for airflow.contrib.hooks.spark_submit_hook</h1><div
class="highlight"><pre>
+<span></span><span class="c1"># -*- coding: utf-8 -*-</span>
+<span class="c1">#</span>
+<span class="c1"># Licensed to the Apache Software Foundation (ASF) under
one</span>
+<span class="c1"># or more contributor license agreements. See the NOTICE
file</span>
+<span class="c1"># distributed with this work for additional information</span>
+<span class="c1"># regarding copyright ownership. The ASF licenses this
file</span>
+<span class="c1"># to you under the Apache License, Version 2.0 (the</span>
+<span class="c1"># "License"); you may not use this file except in
compliance</span>
+<span class="c1"># with the License. You may obtain a copy of the License
at</span>
+<span class="c1">#</span>
+<span class="c1"># http://www.apache.org/licenses/LICENSE-2.0</span>
+<span class="c1">#</span>
+<span class="c1"># Unless required by applicable law or agreed to in
writing,</span>
+<span class="c1"># software distributed under the License is distributed on
an</span>
+<span class="c1"># "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS
OF ANY</span>
+<span class="c1"># KIND, either express or implied. See the License for
the</span>
+<span class="c1"># specific language governing permissions and
limitations</span>
+<span class="c1"># under the License.</span>
+<span class="c1">#</span>
+<span class="kn">import</span> <span class="nn">os</span>
+<span class="kn">import</span> <span class="nn">subprocess</span>
+<span class="kn">import</span> <span class="nn">re</span>
+<span class="kn">import</span> <span class="nn">time</span>
+
+<span class="kn">from</span> <span class="nn">airflow.hooks.base_hook</span>
<span class="k">import</span> <span class="n">BaseHook</span>
+<span class="kn">from</span> <span class="nn">airflow.exceptions</span> <span
class="k">import</span> <span class="n">AirflowException</span>
+<span class="kn">from</span> <span
class="nn">airflow.utils.log.logging_mixin</span> <span class="k">import</span>
<span class="n">LoggingMixin</span>
+<span class="kn">from</span> <span
class="nn">airflow.contrib.kubernetes</span> <span class="k">import</span>
<span class="n">kube_client</span>
+<span class="kn">from</span> <span class="nn">kubernetes.client.rest</span>
<span class="k">import</span> <span class="n">ApiException</span>
+
+
+<div class="viewcode-block" id="SparkSubmitHook"><a class="viewcode-back"
href="../../../../code.html#airflow.contrib.hooks.spark_submit_hook.SparkSubmitHook">[docs]</a><span
class="k">class</span> <span class="nc">SparkSubmitHook</span><span
class="p">(</span><span class="n">BaseHook</span><span class="p">,</span> <span
class="n">LoggingMixin</span><span class="p">):</span>
+ <span class="sd">"""</span>
+<span class="sd"> This hook is a wrapper around the spark-submit binary to
kick off a spark-submit job.</span>
+<span class="sd"> It requires that the "spark-submit" binary is
in the PATH or the spark_home to be</span>
+<span class="sd"> supplied.</span>
+<span class="sd"> :param conf: Arbitrary Spark configuration
properties</span>
+<span class="sd"> :type conf: dict</span>
+<span class="sd"> :param conn_id: The connection id as configured in
Airflow administration. When an</span>
+<span class="sd"> invalid connection_id is supplied, it
will default to yarn.</span>
+<span class="sd"> :type conn_id: str</span>
+<span class="sd"> :param files: Upload additional files to the executor
running the job, separated by a</span>
+<span class="sd"> comma. Files will be placed in the working
directory of each executor.</span>
+<span class="sd"> For example, serialized objects.</span>
+<span class="sd"> :type files: str</span>
+<span class="sd"> :param py_files: Additional python files used by the job,
can be .zip, .egg or .py.</span>
+<span class="sd"> :type py_files: str</span>
+<span class="sd"> :param driver_classpath: Additional, driver-specific,
classpath settings.</span>
+<span class="sd"> :type driver_classpath: str</span>
+<span class="sd"> :param jars: Submit additional jars to upload and place
them in executor classpath.</span>
+<span class="sd"> :type jars: str</span>
+<span class="sd"> :param java_class: the main class of the Java
application</span>
+<span class="sd"> :type java_class: str</span>
+<span class="sd"> :param packages: Comma-separated list of maven
coordinates of jars to include on the</span>
+<span class="sd"> driver and executor classpaths</span>
+<span class="sd"> :type packages: str</span>
+<span class="sd"> :param exclude_packages: Comma-separated list of maven
coordinates of jars to exclude</span>
+<span class="sd"> while resolving the dependencies provided in
'packages'</span>
+<span class="sd"> :type exclude_packages: str</span>
+<span class="sd"> :param repositories: Comma-separated list of additional
remote repositories to search</span>
+<span class="sd"> for the maven coordinates given with
'packages'</span>
+<span class="sd"> :type repositories: str</span>
+<span class="sd"> :param total_executor_cores: (Standalone & Mesos
only) Total cores for all executors</span>
+<span class="sd"> (Default: all the available cores on the worker)</span>
+<span class="sd"> :type total_executor_cores: int</span>
+<span class="sd"> :param executor_cores: (Standalone, YARN and Kubernetes
only) Number of cores per</span>
+<span class="sd"> executor (Default: 2)</span>
+<span class="sd"> :type executor_cores: int</span>
+<span class="sd"> :param executor_memory: Memory per executor (e.g. 1000M,
2G) (Default: 1G)</span>
+<span class="sd"> :type executor_memory: str</span>
+<span class="sd"> :param driver_memory: Memory allocated to the driver
(e.g. 1000M, 2G) (Default: 1G)</span>
+<span class="sd"> :type driver_memory: str</span>
+<span class="sd"> :param keytab: Full path to the file that contains the
keytab</span>
+<span class="sd"> :type keytab: str</span>
+<span class="sd"> :param principal: The name of the kerberos principal used
for keytab</span>
+<span class="sd"> :type principal: str</span>
+<span class="sd"> :param name: Name of the job (default
airflow-spark)</span>
+<span class="sd"> :type name: str</span>
+<span class="sd"> :param num_executors: Number of executors to launch</span>
+<span class="sd"> :type num_executors: int</span>
+<span class="sd"> :param application_args: Arguments for the application
being submitted</span>
+<span class="sd"> :type application_args: list</span>
+<span class="sd"> :param env_vars: Environment variables for spark-submit.
It</span>
+<span class="sd"> supports yarn and k8s mode too.</span>
+<span class="sd"> :type env_vars: dict</span>
+<span class="sd"> :param verbose: Whether to pass the verbose flag to
spark-submit process for debugging</span>
+<span class="sd"> :type verbose: bool</span>
+<span class="sd"> """</span>
+ <span class="k">def</span> <span class="nf">__init__</span><span
class="p">(</span><span class="bp">self</span><span class="p">,</span>
+ <span class="n">conf</span><span class="o">=</span><span
class="kc">None</span><span class="p">,</span>
+ <span class="n">conn_id</span><span class="o">=</span><span
class="s1">'spark_default'</span><span class="p">,</span>
+ <span class="n">files</span><span class="o">=</span><span
class="kc">None</span><span class="p">,</span>
+ <span class="n">py_files</span><span class="o">=</span><span
class="kc">None</span><span class="p">,</span>
+ <span class="n">driver_classpath</span><span
class="o">=</span><span class="kc">None</span><span class="p">,</span>
+ <span class="n">jars</span><span class="o">=</span><span
class="kc">None</span><span class="p">,</span>
+ <span class="n">java_class</span><span
class="o">=</span><span class="kc">None</span><span class="p">,</span>
+ <span class="n">packages</span><span class="o">=</span><span
class="kc">None</span><span class="p">,</span>
+ <span class="n">exclude_packages</span><span
class="o">=</span><span class="kc">None</span><span class="p">,</span>
+ <span class="n">repositories</span><span
class="o">=</span><span class="kc">None</span><span class="p">,</span>
+ <span class="n">total_executor_cores</span><span
class="o">=</span><span class="kc">None</span><span class="p">,</span>
+ <span class="n">executor_cores</span><span
class="o">=</span><span class="kc">None</span><span class="p">,</span>
+ <span class="n">executor_memory</span><span
class="o">=</span><span class="kc">None</span><span class="p">,</span>
+ <span class="n">driver_memory</span><span
class="o">=</span><span class="kc">None</span><span class="p">,</span>
+ <span class="n">keytab</span><span class="o">=</span><span
class="kc">None</span><span class="p">,</span>
+ <span class="n">principal</span><span class="o">=</span><span
class="kc">None</span><span class="p">,</span>
+ <span class="n">name</span><span class="o">=</span><span
class="s1">'default-name'</span><span class="p">,</span>
+ <span class="n">num_executors</span><span
class="o">=</span><span class="kc">None</span><span class="p">,</span>
+ <span class="n">application_args</span><span
class="o">=</span><span class="kc">None</span><span class="p">,</span>
+ <span class="n">env_vars</span><span class="o">=</span><span
class="kc">None</span><span class="p">,</span>
+ <span class="n">verbose</span><span class="o">=</span><span
class="kc">False</span><span class="p">):</span>
+ <span class="bp">self</span><span class="o">.</span><span
class="n">_conf</span> <span class="o">=</span> <span class="n">conf</span>
+ <span class="bp">self</span><span class="o">.</span><span
class="n">_conn_id</span> <span class="o">=</span> <span
class="n">conn_id</span>
+ <span class="bp">self</span><span class="o">.</span><span
class="n">_files</span> <span class="o">=</span> <span class="n">files</span>
+ <span class="bp">self</span><span class="o">.</span><span
class="n">_py_files</span> <span class="o">=</span> <span
class="n">py_files</span>
+ <span class="bp">self</span><span class="o">.</span><span
class="n">_driver_classpath</span> <span class="o">=</span> <span
class="n">driver_classpath</span>
+ <span class="bp">self</span><span class="o">.</span><span
class="n">_jars</span> <span class="o">=</span> <span class="n">jars</span>
+ <span class="bp">self</span><span class="o">.</span><span
class="n">_java_class</span> <span class="o">=</span> <span
class="n">java_class</span>
+ <span class="bp">self</span><span class="o">.</span><span
class="n">_packages</span> <span class="o">=</span> <span
class="n">packages</span>
+ <span class="bp">self</span><span class="o">.</span><span
class="n">_exclude_packages</span> <span class="o">=</span> <span
class="n">exclude_packages</span>
+ <span class="bp">self</span><span class="o">.</span><span
class="n">_repositories</span> <span class="o">=</span> <span
class="n">repositories</span>
+ <span class="bp">self</span><span class="o">.</span><span
class="n">_total_executor_cores</span> <span class="o">=</span> <span
class="n">total_executor_cores</span>
+ <span class="bp">self</span><span class="o">.</span><span
class="n">_executor_cores</span> <span class="o">=</span> <span
class="n">executor_cores</span>
+ <span class="bp">self</span><span class="o">.</span><span
class="n">_executor_memory</span> <span class="o">=</span> <span
class="n">executor_memory</span>
+ <span class="bp">self</span><span class="o">.</span><span
class="n">_driver_memory</span> <span class="o">=</span> <span
class="n">driver_memory</span>
+ <span class="bp">self</span><span class="o">.</span><span
class="n">_keytab</span> <span class="o">=</span> <span class="n">keytab</span>
+ <span class="bp">self</span><span class="o">.</span><span
class="n">_principal</span> <span class="o">=</span> <span
class="n">principal</span>
+ <span class="bp">self</span><span class="o">.</span><span
class="n">_name</span> <span class="o">=</span> <span class="n">name</span>
+ <span class="bp">self</span><span class="o">.</span><span
class="n">_num_executors</span> <span class="o">=</span> <span
class="n">num_executors</span>
+ <span class="bp">self</span><span class="o">.</span><span
class="n">_application_args</span> <span class="o">=</span> <span
class="n">application_args</span>
+ <span class="bp">self</span><span class="o">.</span><span
class="n">_env_vars</span> <span class="o">=</span> <span
class="n">env_vars</span>
+ <span class="bp">self</span><span class="o">.</span><span
class="n">_verbose</span> <span class="o">=</span> <span
class="n">verbose</span>
+ <span class="bp">self</span><span class="o">.</span><span
class="n">_submit_sp</span> <span class="o">=</span> <span
class="kc">None</span>
+ <span class="bp">self</span><span class="o">.</span><span
class="n">_yarn_application_id</span> <span class="o">=</span> <span
class="kc">None</span>
+ <span class="bp">self</span><span class="o">.</span><span
class="n">_kubernetes_driver_pod</span> <span class="o">=</span> <span
class="kc">None</span>
+
+ <span class="bp">self</span><span class="o">.</span><span
class="n">_connection</span> <span class="o">=</span> <span
class="bp">self</span><span class="o">.</span><span
class="n">_resolve_connection</span><span class="p">()</span>
+ <span class="bp">self</span><span class="o">.</span><span
class="n">_is_yarn</span> <span class="o">=</span> <span
class="s1">'yarn'</span> <span class="ow">in</span> <span
class="bp">self</span><span class="o">.</span><span
class="n">_connection</span><span class="p">[</span><span
class="s1">'master'</span><span class="p">]</span>
+ <span class="bp">self</span><span class="o">.</span><span
class="n">_is_kubernetes</span> <span class="o">=</span> <span
class="s1">'k8s'</span> <span class="ow">in</span> <span
class="bp">self</span><span class="o">.</span><span
class="n">_connection</span><span class="p">[</span><span
class="s1">'master'</span><span class="p">]</span>
+
+ <span class="bp">self</span><span class="o">.</span><span
class="n">_should_track_driver_status</span> <span class="o">=</span> <span
class="bp">self</span><span class="o">.</span><span
class="n">_resolve_should_track_driver_status</span><span class="p">()</span>
+ <span class="bp">self</span><span class="o">.</span><span
class="n">_driver_id</span> <span class="o">=</span> <span
class="kc">None</span>
+ <span class="bp">self</span><span class="o">.</span><span
class="n">_driver_status</span> <span class="o">=</span> <span
class="kc">None</span>
+ <span class="bp">self</span><span class="o">.</span><span
class="n">_spark_exit_code</span> <span class="o">=</span> <span
class="kc">None</span>
+
+ <span class="k">def</span> <span
class="nf">_resolve_should_track_driver_status</span><span
class="p">(</span><span class="bp">self</span><span class="p">):</span>
+ <span class="sd">"""</span>
+<span class="sd"> Determines whether or not this hook should poll the
spark driver status through</span>
+<span class="sd"> subsequent spark-submit status requests after the
initial spark-submit request</span>
+<span class="sd"> :return: if the driver status should be tracked</span>
+<span class="sd"> """</span>
+ <span class="k">return</span> <span class="p">(</span><span
class="s1">'spark://'</span> <span class="ow">in</span> <span
class="bp">self</span><span class="o">.</span><span
class="n">_connection</span><span class="p">[</span><span
class="s1">'master'</span><span class="p">]</span> <span
class="ow">and</span>
+ <span class="bp">self</span><span class="o">.</span><span
class="n">_connection</span><span class="p">[</span><span
class="s1">'deploy_mode'</span><span class="p">]</span> <span
class="o">==</span> <span class="s1">'cluster'</span><span
class="p">)</span>
+
+ <span class="k">def</span> <span
class="nf">_resolve_connection</span><span class="p">(</span><span
class="bp">self</span><span class="p">):</span>
+ <span class="c1"># Build from connection master or default to yarn if
not available</span>
+ <span class="n">conn_data</span> <span class="o">=</span> <span
class="p">{</span><span class="s1">'master'</span><span
class="p">:</span> <span class="s1">'yarn'</span><span
class="p">,</span>
+ <span class="s1">'queue'</span><span
class="p">:</span> <span class="kc">None</span><span class="p">,</span>
+ <span class="s1">'deploy_mode'</span><span
class="p">:</span> <span class="kc">None</span><span class="p">,</span>
+ <span class="s1">'spark_home'</span><span
class="p">:</span> <span class="kc">None</span><span class="p">,</span>
+ <span class="s1">'spark_binary'</span><span
class="p">:</span> <span class="s1">'spark-submit'</span><span
class="p">,</span>
+ <span class="s1">'namespace'</span><span
class="p">:</span> <span class="s1">'default'</span><span
class="p">}</span>
+
+ <span class="k">try</span><span class="p">:</span>
+ <span class="c1"># Master can be local, yarn, spark://HOST:PORT,
mesos://HOST:PORT and</span>
+ <span class="c1"># k8s://https://<HOST>:<PORT></span>
+ <span class="n">conn</span> <span class="o">=</span> <span
class="bp">self</span><span class="o">.</span><span
class="n">get_connection</span><span class="p">(</span><span
class="bp">self</span><span class="o">.</span><span
class="n">_conn_id</span><span class="p">)</span>
+ <span class="k">if</span> <span class="n">conn</span><span
class="o">.</span><span class="n">port</span><span class="p">:</span>
+ <span class="n">conn_data</span><span class="p">[</span><span
class="s1">'master'</span><span class="p">]</span> <span
class="o">=</span> <span class="s2">"</span><span
class="si">{}</span><span class="s2">:</span><span class="si">{}</span><span
class="s2">"</span><span class="o">.</span><span
class="n">format</span><span class="p">(</span><span class="n">conn</span><span
class="o">.</span><span class="n">host</span><span class="p">,</span> <span
class="n">conn</span><span class="o">.</span><span class="n">port</span><span
class="p">)</span>
+ <span class="k">else</span><span class="p">:</span>
+ <span class="n">conn_data</span><span class="p">[</span><span
class="s1">'master'</span><span class="p">]</span> <span
class="o">=</span> <span class="n">conn</span><span class="o">.</span><span
class="n">host</span>
+
+ <span class="c1"># Determine optional yarn queue from the extra
field</span>
+ <span class="n">extra</span> <span class="o">=</span> <span
class="n">conn</span><span class="o">.</span><span class="n">extra_dejson</span>
+ <span class="n">conn_data</span><span class="p">[</span><span
class="s1">'queue'</span><span class="p">]</span> <span
class="o">=</span> <span class="n">extra</span><span class="o">.</span><span
class="n">get</span><span class="p">(</span><span
class="s1">'queue'</span><span class="p">,</span> <span
class="kc">None</span><span class="p">)</span>
+ <span class="n">conn_data</span><span class="p">[</span><span
class="s1">'deploy_mode'</span><span class="p">]</span> <span
class="o">=</span> <span class="n">extra</span><span class="o">.</span><span
class="n">get</span><span class="p">(</span><span
class="s1">'deploy-mode'</span><span class="p">,</span> <span
class="kc">None</span><span class="p">)</span>
+ <span class="n">conn_data</span><span class="p">[</span><span
class="s1">'spark_home'</span><span class="p">]</span> <span
class="o">=</span> <span class="n">extra</span><span class="o">.</span><span
class="n">get</span><span class="p">(</span><span
class="s1">'spark-home'</span><span class="p">,</span> <span
class="kc">None</span><span class="p">)</span>
+ <span class="n">conn_data</span><span class="p">[</span><span
class="s1">'spark_binary'</span><span class="p">]</span> <span
class="o">=</span> <span class="n">extra</span><span class="o">.</span><span
class="n">get</span><span class="p">(</span><span
class="s1">'spark-binary'</span><span class="p">,</span> <span
class="s1">'spark-submit'</span><span class="p">)</span>
+ <span class="n">conn_data</span><span class="p">[</span><span
class="s1">'namespace'</span><span class="p">]</span> <span
class="o">=</span> <span class="n">extra</span><span class="o">.</span><span
class="n">get</span><span class="p">(</span><span
class="s1">'namespace'</span><span class="p">,</span> <span
class="s1">'default'</span><span class="p">)</span>
+ <span class="k">except</span> <span
class="n">AirflowException</span><span class="p">:</span>
+ <span class="bp">self</span><span class="o">.</span><span
class="n">log</span><span class="o">.</span><span class="n">debug</span><span
class="p">(</span>
+ <span class="s2">"Could not load connection string
</span><span class="si">%s</span><span class="s2">, defaulting to </span><span
class="si">%s</span><span class="s2">"</span><span class="p">,</span>
+ <span class="bp">self</span><span class="o">.</span><span
class="n">_conn_id</span><span class="p">,</span> <span
class="n">conn_data</span><span class="p">[</span><span
class="s1">'master'</span><span class="p">]</span>
+ <span class="p">)</span>
+
+ <span class="k">return</span> <span class="n">conn_data</span>
+
+ <span class="k">def</span> <span class="nf">get_conn</span><span
class="p">(</span><span class="bp">self</span><span class="p">):</span>
+ <span class="k">pass</span>
+
+ <span class="k">def</span> <span
class="nf">_get_spark_binary_path</span><span class="p">(</span><span
class="bp">self</span><span class="p">):</span>
+ <span class="c1"># If the spark_home is passed then build the
spark-submit executable path using</span>
+ <span class="c1"># the spark_home; otherwise assume that spark-submit
is present in the path to</span>
+ <span class="c1"># the executing user</span>
+ <span class="k">if</span> <span class="bp">self</span><span
class="o">.</span><span class="n">_connection</span><span
class="p">[</span><span class="s1">'spark_home'</span><span
class="p">]:</span>
+ <span class="n">connection_cmd</span> <span class="o">=</span>
<span class="p">[</span><span class="n">os</span><span class="o">.</span><span
class="n">path</span><span class="o">.</span><span class="n">join</span><span
class="p">(</span><span class="bp">self</span><span class="o">.</span><span
class="n">_connection</span><span class="p">[</span><span
class="s1">'spark_home'</span><span class="p">],</span> <span
class="s1">'bin'</span><span class="p">,</span>
+ <span class="bp">self</span><span
class="o">.</span><span class="n">_connection</span><span
class="p">[</span><span class="s1">'spark_binary'</span><span
class="p">])]</span>
+ <span class="k">else</span><span class="p">:</span>
+ <span class="n">connection_cmd</span> <span class="o">=</span>
<span class="p">[</span><span class="bp">self</span><span
class="o">.</span><span class="n">_connection</span><span
class="p">[</span><span class="s1">'spark_binary'</span><span
class="p">]]</span>
+
+ <span class="k">return</span> <span class="n">connection_cmd</span>
+
+ <span class="k">def</span> <span
class="nf">_build_spark_submit_command</span><span class="p">(</span><span
class="bp">self</span><span class="p">,</span> <span
class="n">application</span><span class="p">):</span>
+ <span class="sd">"""</span>
+<span class="sd"> Construct the spark-submit command to execute.</span>
+<span class="sd"> :param application: command to append to the
spark-submit command</span>
+<span class="sd"> :type application: str</span>
+<span class="sd"> :return: full command to be executed</span>
+<span class="sd"> """</span>
+ <span class="n">connection_cmd</span> <span class="o">=</span> <span
class="bp">self</span><span class="o">.</span><span
class="n">_get_spark_binary_path</span><span class="p">()</span>
+
+ <span class="c1"># The url ot the spark master</span>
+ <span class="n">connection_cmd</span> <span class="o">+=</span> <span
class="p">[</span><span class="s2">"--master"</span><span
class="p">,</span> <span class="bp">self</span><span class="o">.</span><span
class="n">_connection</span><span class="p">[</span><span
class="s1">'master'</span><span class="p">]]</span>
+
+ <span class="k">if</span> <span class="bp">self</span><span
class="o">.</span><span class="n">_conf</span><span class="p">:</span>
+ <span class="k">for</span> <span class="n">key</span> <span
class="ow">in</span> <span class="bp">self</span><span class="o">.</span><span
class="n">_conf</span><span class="p">:</span>
+ <span class="n">connection_cmd</span> <span
class="o">+=</span> <span class="p">[</span><span
class="s2">"--conf"</span><span class="p">,</span> <span
class="s2">"</span><span class="si">{}</span><span
class="s2">=</span><span class="si">{}</span><span
class="s2">"</span><span class="o">.</span><span
class="n">format</span><span class="p">(</span><span class="n">key</span><span
class="p">,</span> <span class="nb">str</span><span class="p">(</span><span
class="bp">self</span><span class="o">.</span><span class="n">_conf</span><span
class="p">[</span><span class="n">key</span><span class="p">]))]</span>
+ <span class="k">if</span> <span class="bp">self</span><span
class="o">.</span><span class="n">_env_vars</span> <span class="ow">and</span>
<span class="p">(</span><span class="bp">self</span><span
class="o">.</span><span class="n">_is_kubernetes</span> <span
class="ow">or</span> <span class="bp">self</span><span class="o">.</span><span
class="n">_is_yarn</span><span class="p">):</span>
+ <span class="k">if</span> <span class="bp">self</span><span
class="o">.</span><span class="n">_is_yarn</span><span class="p">:</span>
+ <span class="n">tmpl</span> <span class="o">=</span> <span
class="s2">"spark.yarn.appMasterEnv.</span><span class="si">{}</span><span
class="s2">=</span><span class="si">{}</span><span class="s2">"</span>
+ <span class="k">else</span><span class="p">:</span>
+ <span class="n">tmpl</span> <span class="o">=</span> <span
class="s2">"spark.kubernetes.driverEnv.</span><span
class="si">{}</span><span class="s2">=</span><span class="si">{}</span><span
class="s2">"</span>
+ <span class="k">for</span> <span class="n">key</span> <span
class="ow">in</span> <span class="bp">self</span><span class="o">.</span><span
class="n">_env_vars</span><span class="p">:</span>
+ <span class="n">connection_cmd</span> <span
class="o">+=</span> <span class="p">[</span>
+ <span class="s2">"--conf"</span><span
class="p">,</span>
+ <span class="n">tmpl</span><span class="o">.</span><span
class="n">format</span><span class="p">(</span><span class="n">key</span><span
class="p">,</span> <span class="nb">str</span><span class="p">(</span><span
class="bp">self</span><span class="o">.</span><span
class="n">_env_vars</span><span class="p">[</span><span
class="n">key</span><span class="p">]))]</span>
+ <span class="k">elif</span> <span class="bp">self</span><span
class="o">.</span><span class="n">_env_vars</span> <span class="ow">and</span>
<span class="bp">self</span><span class="o">.</span><span
class="n">_connection</span><span class="p">[</span><span
class="s1">'deploy_mode'</span><span class="p">]</span> <span
class="o">!=</span> <span class="s2">"cluster"</span><span
class="p">:</span>
+ <span class="bp">self</span><span class="o">.</span><span
class="n">_env</span> <span class="o">=</span> <span
class="bp">self</span><span class="o">.</span><span class="n">_env_vars</span>
<span class="c1"># Do it on Popen of the process</span>
+ <span class="k">elif</span> <span class="bp">self</span><span
class="o">.</span><span class="n">_env_vars</span> <span class="ow">and</span>
<span class="bp">self</span><span class="o">.</span><span
class="n">_connection</span><span class="p">[</span><span
class="s1">'deploy_mode'</span><span class="p">]</span> <span
class="o">==</span> <span class="s2">"cluster"</span><span
class="p">:</span>
+ <span class="k">raise</span> <span
class="n">AirflowException</span><span class="p">(</span>
+ <span class="s2">"SparkSubmitHook env_vars is not
supported in standalone-cluster mode."</span><span class="p">)</span>
+ <span class="k">if</span> <span class="bp">self</span><span
class="o">.</span><span class="n">_is_kubernetes</span><span class="p">:</span>
+ <span class="n">connection_cmd</span> <span class="o">+=</span>
<span class="p">[</span><span class="s2">"--conf"</span><span
class="p">,</span> <span
class="s2">"spark.kubernetes.namespace=</span><span
class="si">{}</span><span class="s2">"</span><span class="o">.</span><span
class="n">format</span><span class="p">(</span>
+ <span class="bp">self</span><span class="o">.</span><span
class="n">_connection</span><span class="p">[</span><span
class="s1">'namespace'</span><span class="p">])]</span>
+ <span class="k">if</span> <span class="bp">self</span><span
class="o">.</span><span class="n">_files</span><span class="p">:</span>
+ <span class="n">connection_cmd</span> <span class="o">+=</span>
<span class="p">[</span><span class="s2">"--files"</span><span
class="p">,</span> <span class="bp">self</span><span class="o">.</span><span
class="n">_files</span><span class="p">]</span>
+ <span class="k">if</span> <span class="bp">self</span><span
class="o">.</span><span class="n">_py_files</span><span class="p">:</span>
+ <span class="n">connection_cmd</span> <span class="o">+=</span>
<span class="p">[</span><span class="s2">"--py-files"</span><span
class="p">,</span> <span class="bp">self</span><span class="o">.</span><span
class="n">_py_files</span><span class="p">]</span>
+ <span class="k">if</span> <span class="bp">self</span><span
class="o">.</span><span class="n">_driver_classpath</span><span
class="p">:</span>
+ <span class="n">connection_cmd</span> <span class="o">+=</span>
<span class="p">[</span><span
class="s2">"--driver-classpath"</span><span class="p">,</span> <span
class="bp">self</span><span class="o">.</span><span
class="n">_driver_classpath</span><span class="p">]</span>
+ <span class="k">if</span> <span class="bp">self</span><span
class="o">.</span><span class="n">_jars</span><span class="p">:</span>
+ <span class="n">connection_cmd</span> <span class="o">+=</span>
<span class="p">[</span><span class="s2">"--jars"</span><span
class="p">,</span> <span class="bp">self</span><span class="o">.</span><span
class="n">_jars</span><span class="p">]</span>
+ <span class="k">if</span> <span class="bp">self</span><span
class="o">.</span><span class="n">_packages</span><span class="p">:</span>
+ <span class="n">connection_cmd</span> <span class="o">+=</span>
<span class="p">[</span><span class="s2">"--packages"</span><span
class="p">,</span> <span class="bp">self</span><span class="o">.</span><span
class="n">_packages</span><span class="p">]</span>
+ <span class="k">if</span> <span class="bp">self</span><span
class="o">.</span><span class="n">_exclude_packages</span><span
class="p">:</span>
+ <span class="n">connection_cmd</span> <span class="o">+=</span>
<span class="p">[</span><span
class="s2">"--exclude-packages"</span><span class="p">,</span> <span
class="bp">self</span><span class="o">.</span><span
class="n">_exclude_packages</span><span class="p">]</span>
+ <span class="k">if</span> <span class="bp">self</span><span
class="o">.</span><span class="n">_repositories</span><span class="p">:</span>
+ <span class="n">connection_cmd</span> <span class="o">+=</span>
<span class="p">[</span><span class="s2">"--repositories"</span><span
class="p">,</span> <span class="bp">self</span><span class="o">.</span><span
class="n">_repositories</span><span class="p">]</span>
+ <span class="k">if</span> <span class="bp">self</span><span
class="o">.</span><span class="n">_num_executors</span><span class="p">:</span>
+ <span class="n">connection_cmd</span> <span class="o">+=</span>
<span class="p">[</span><span
class="s2">"--num-executors"</span><span class="p">,</span> <span
class="nb">str</span><span class="p">(</span><span class="bp">self</span><span
class="o">.</span><span class="n">_num_executors</span><span class="p">)]</span>
+ <span class="k">if</span> <span class="bp">self</span><span
class="o">.</span><span class="n">_total_executor_cores</span><span
class="p">:</span>
+ <span class="n">connection_cmd</span> <span class="o">+=</span>
<span class="p">[</span><span
class="s2">"--total-executor-cores"</span><span class="p">,</span>
<span class="nb">str</span><span class="p">(</span><span
class="bp">self</span><span class="o">.</span><span
class="n">_total_executor_cores</span><span class="p">)]</span>
+ <span class="k">if</span> <span class="bp">self</span><span
class="o">.</span><span class="n">_executor_cores</span><span class="p">:</span>
+ <span class="n">connection_cmd</span> <span class="o">+=</span>
<span class="p">[</span><span
class="s2">"--executor-cores"</span><span class="p">,</span> <span
class="nb">str</span><span class="p">(</span><span class="bp">self</span><span
class="o">.</span><span class="n">_executor_cores</span><span
class="p">)]</span>
+ <span class="k">if</span> <span class="bp">self</span><span
class="o">.</span><span class="n">_executor_memory</span><span
class="p">:</span>
+ <span class="n">connection_cmd</span> <span class="o">+=</span>
<span class="p">[</span><span
class="s2">"--executor-memory"</span><span class="p">,</span> <span
class="bp">self</span><span class="o">.</span><span
class="n">_executor_memory</span><span class="p">]</span>
+ <span class="k">if</span> <span class="bp">self</span><span
class="o">.</span><span class="n">_driver_memory</span><span class="p">:</span>
+ <span class="n">connection_cmd</span> <span class="o">+=</span>
<span class="p">[</span><span
class="s2">"--driver-memory"</span><span class="p">,</span> <span
class="bp">self</span><span class="o">.</span><span
class="n">_driver_memory</span><span class="p">]</span>
+ <span class="k">if</span> <span class="bp">self</span><span
class="o">.</span><span class="n">_keytab</span><span class="p">:</span>
+ <span class="n">connection_cmd</span> <span class="o">+=</span>
<span class="p">[</span><span class="s2">"--keytab"</span><span
class="p">,</span> <span class="bp">self</span><span class="o">.</span><span
class="n">_keytab</span><span class="p">]</span>
+ <span class="k">if</span> <span class="bp">self</span><span
class="o">.</span><span class="n">_principal</span><span class="p">:</span>
+ <span class="n">connection_cmd</span> <span class="o">+=</span>
<span class="p">[</span><span class="s2">"--principal"</span><span
class="p">,</span> <span class="bp">self</span><span class="o">.</span><span
class="n">_principal</span><span class="p">]</span>
+ <span class="k">if</span> <span class="bp">self</span><span
class="o">.</span><span class="n">_name</span><span class="p">:</span>
+ <span class="n">connection_cmd</span> <span class="o">+=</span>
<span class="p">[</span><span class="s2">"--name"</span><span
class="p">,</span> <span class="bp">self</span><span class="o">.</span><span
class="n">_name</span><span class="p">]</span>
+ <span class="k">if</span> <span class="bp">self</span><span
class="o">.</span><span class="n">_java_class</span><span class="p">:</span>
+ <span class="n">connection_cmd</span> <span class="o">+=</span>
<span class="p">[</span><span class="s2">"--class"</span><span
class="p">,</span> <span class="bp">self</span><span class="o">.</span><span
class="n">_java_class</span><span class="p">]</span>
+ <span class="k">if</span> <span class="bp">self</span><span
class="o">.</span><span class="n">_verbose</span><span class="p">:</span>
+ <span class="n">connection_cmd</span> <span class="o">+=</span>
<span class="p">[</span><span class="s2">"--verbose"</span><span
class="p">]</span>
+ <span class="k">if</span> <span class="bp">self</span><span
class="o">.</span><span class="n">_connection</span><span
class="p">[</span><span class="s1">'queue'</span><span
class="p">]:</span>
+ <span class="n">connection_cmd</span> <span class="o">+=</span>
<span class="p">[</span><span class="s2">"--queue"</span><span
class="p">,</span> <span class="bp">self</span><span class="o">.</span><span
class="n">_connection</span><span class="p">[</span><span
class="s1">'queue'</span><span class="p">]]</span>
+ <span class="k">if</span> <span class="bp">self</span><span
class="o">.</span><span class="n">_connection</span><span
class="p">[</span><span class="s1">'deploy_mode'</span><span
class="p">]:</span>
+ <span class="n">connection_cmd</span> <span class="o">+=</span>
<span class="p">[</span><span class="s2">"--deploy-mode"</span><span
class="p">,</span> <span class="bp">self</span><span class="o">.</span><span
class="n">_connection</span><span class="p">[</span><span
class="s1">'deploy_mode'</span><span class="p">]]</span>
+
+ <span class="c1"># The actual script to execute</span>
+ <span class="n">connection_cmd</span> <span class="o">+=</span> <span
class="p">[</span><span class="n">application</span><span class="p">]</span>
+
+ <span class="c1"># Append any application arguments</span>
+ <span class="k">if</span> <span class="bp">self</span><span
class="o">.</span><span class="n">_application_args</span><span
class="p">:</span>
+ <span class="n">connection_cmd</span> <span class="o">+=</span>
<span class="bp">self</span><span class="o">.</span><span
class="n">_application_args</span>
+
+ <span class="bp">self</span><span class="o">.</span><span
class="n">log</span><span class="o">.</span><span class="n">info</span><span
class="p">(</span><span class="s2">"Spark-Submit cmd: </span><span
class="si">%s</span><span class="s2">"</span><span class="p">,</span>
<span class="n">connection_cmd</span><span class="p">)</span>
+
+ <span class="k">return</span> <span class="n">connection_cmd</span>
+
+ <span class="k">def</span> <span
class="nf">_build_track_driver_status_command</span><span
class="p">(</span><span class="bp">self</span><span class="p">):</span>
+ <span class="sd">"""</span>
+<span class="sd"> Construct the command to poll the driver
status.</span>
+
+<span class="sd"> :return: full command to be executed</span>
+<span class="sd"> """</span>
+ <span class="n">connection_cmd</span> <span class="o">=</span> <span
class="bp">self</span><span class="o">.</span><span
class="n">_get_spark_binary_path</span><span class="p">()</span>
+
+ <span class="c1"># The url ot the spark master</span>
+ <span class="n">connection_cmd</span> <span class="o">+=</span> <span
class="p">[</span><span class="s2">"--master"</span><span
class="p">,</span> <span class="bp">self</span><span class="o">.</span><span
class="n">_connection</span><span class="p">[</span><span
class="s1">'master'</span><span class="p">]]</span>
+
+ <span class="c1"># The driver id so we can poll for its status</span>
+ <span class="k">if</span> <span class="bp">self</span><span
class="o">.</span><span class="n">_driver_id</span><span class="p">:</span>
+ <span class="n">connection_cmd</span> <span class="o">+=</span>
<span class="p">[</span><span class="s2">"--status"</span><span
class="p">,</span> <span class="bp">self</span><span class="o">.</span><span
class="n">_driver_id</span><span class="p">]</span>
+ <span class="k">else</span><span class="p">:</span>
+ <span class="k">raise</span> <span
class="n">AirflowException</span><span class="p">(</span>
+ <span class="s2">"Invalid status: attempted to poll
driver "</span> <span class="o">+</span>
+ <span class="s2">"status but no driver id is known.
Giving up."</span><span class="p">)</span>
+
+ <span class="bp">self</span><span class="o">.</span><span
class="n">log</span><span class="o">.</span><span class="n">debug</span><span
class="p">(</span><span class="s2">"Poll driver status cmd: </span><span
class="si">%s</span><span class="s2">"</span><span class="p">,</span>
<span class="n">connection_cmd</span><span class="p">)</span>
+
+ <span class="k">return</span> <span class="n">connection_cmd</span>
+
+<div class="viewcode-block" id="SparkSubmitHook.submit"><a
class="viewcode-back"
href="../../../../code.html#airflow.contrib.hooks.spark_submit_hook.SparkSubmitHook.submit">[docs]</a>
<span class="k">def</span> <span class="nf">submit</span><span
class="p">(</span><span class="bp">self</span><span class="p">,</span> <span
class="n">application</span><span class="o">=</span><span
class="s2">""</span><span class="p">,</span> <span
class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
+ <span class="sd">"""</span>
+<span class="sd"> Remote Popen to execute the spark-submit job</span>
+
+<span class="sd"> :param application: Submitted application, jar or py
file</span>
+<span class="sd"> :type application: str</span>
+<span class="sd"> :param kwargs: extra arguments to Popen (see
subprocess.Popen)</span>
+<span class="sd"> """</span>
+ <span class="n">spark_submit_cmd</span> <span class="o">=</span> <span
class="bp">self</span><span class="o">.</span><span
class="n">_build_spark_submit_command</span><span class="p">(</span><span
class="n">application</span><span class="p">)</span>
+
+ <span class="k">if</span> <span class="nb">hasattr</span><span
class="p">(</span><span class="bp">self</span><span class="p">,</span> <span
class="s1">'_env'</span><span class="p">):</span>
+ <span class="n">env</span> <span class="o">=</span> <span
class="n">os</span><span class="o">.</span><span class="n">environ</span><span
class="o">.</span><span class="n">copy</span><span class="p">()</span>
+ <span class="n">env</span><span class="o">.</span><span
class="n">update</span><span class="p">(</span><span
class="bp">self</span><span class="o">.</span><span class="n">_env</span><span
class="p">)</span>
+ <span class="n">kwargs</span><span class="p">[</span><span
class="s2">"env"</span><span class="p">]</span> <span
class="o">=</span> <span class="n">env</span>
+
+ <span class="bp">self</span><span class="o">.</span><span
class="n">_submit_sp</span> <span class="o">=</span> <span
class="n">subprocess</span><span class="o">.</span><span
class="n">Popen</span><span class="p">(</span><span
class="n">spark_submit_cmd</span><span class="p">,</span>
+ <span class="n">stdout</span><span
class="o">=</span><span class="n">subprocess</span><span
class="o">.</span><span class="n">PIPE</span><span class="p">,</span>
+ <span class="n">stderr</span><span
class="o">=</span><span class="n">subprocess</span><span
class="o">.</span><span class="n">STDOUT</span><span class="p">,</span>
+ <span class="n">bufsize</span><span
class="o">=-</span><span class="mi">1</span><span class="p">,</span>
+ <span
class="n">universal_newlines</span><span class="o">=</span><span
class="kc">True</span><span class="p">,</span>
+ <span class="o">**</span><span
class="n">kwargs</span><span class="p">)</span>
+
+ <span class="bp">self</span><span class="o">.</span><span
class="n">_process_spark_submit_log</span><span class="p">(</span><span
class="nb">iter</span><span class="p">(</span><span class="bp">self</span><span
class="o">.</span><span class="n">_submit_sp</span><span
class="o">.</span><span class="n">stdout</span><span class="o">.</span><span
class="n">readline</span><span class="p">,</span> <span
class="s1">''</span><span class="p">))</span>
+ <span class="n">returncode</span> <span class="o">=</span> <span
class="bp">self</span><span class="o">.</span><span
class="n">_submit_sp</span><span class="o">.</span><span
class="n">wait</span><span class="p">()</span>
+
+ <span class="c1"># Check spark-submit return code. In Kubernetes mode,
also check the value</span>
+ <span class="c1"># of exit code in the log, as it may differ.</span>
+ <span class="k">if</span> <span class="n">returncode</span> <span
class="ow">or</span> <span class="p">(</span><span class="bp">self</span><span
class="o">.</span><span class="n">_is_kubernetes</span> <span
class="ow">and</span> <span class="bp">self</span><span class="o">.</span><span
class="n">_spark_exit_code</span> <span class="o">!=</span> <span
class="mi">0</span><span class="p">):</span>
+ <span class="k">raise</span> <span
class="n">AirflowException</span><span class="p">(</span>
+ <span class="s2">"Cannot execute: </span><span
class="si">{}</span><span class="s2">. Error code is: </span><span
class="si">{}</span><span class="s2">."</span><span
class="o">.</span><span class="n">format</span><span class="p">(</span>
+ <span class="n">spark_submit_cmd</span><span
class="p">,</span> <span class="n">returncode</span>
+ <span class="p">)</span>
+ <span class="p">)</span>
+
+ <span class="bp">self</span><span class="o">.</span><span
class="n">log</span><span class="o">.</span><span class="n">debug</span><span
class="p">(</span><span class="s2">"Should track driver: </span><span
class="si">{}</span><span class="s2">"</span><span class="o">.</span><span
class="n">format</span><span class="p">(</span><span
class="bp">self</span><span class="o">.</span><span
class="n">_should_track_driver_status</span><span class="p">))</span>
+
+ <span class="c1"># We want the Airflow job to wait until the Spark
driver is finished</span>
+ <span class="k">if</span> <span class="bp">self</span><span
class="o">.</span><span class="n">_should_track_driver_status</span><span
class="p">:</span>
+ <span class="k">if</span> <span class="bp">self</span><span
class="o">.</span><span class="n">_driver_id</span> <span class="ow">is</span>
<span class="kc">None</span><span class="p">:</span>
+ <span class="k">raise</span> <span
class="n">AirflowException</span><span class="p">(</span>
+ <span class="s2">"No driver id is known: something
went wrong when executing "</span> <span class="o">+</span>
+ <span class="s2">"the spark submit
command"</span>
+ <span class="p">)</span>
+
+ <span class="c1"># We start with the SUBMITTED status as initial
status</span>
+ <span class="bp">self</span><span class="o">.</span><span
class="n">_driver_status</span> <span class="o">=</span> <span
class="s2">"SUBMITTED"</span>
+
+ <span class="c1"># Start tracking the driver status (blocking
function)</span>
+ <span class="bp">self</span><span class="o">.</span><span
class="n">_start_driver_status_tracking</span><span class="p">()</span>
+
+ <span class="k">if</span> <span class="bp">self</span><span
class="o">.</span><span class="n">_driver_status</span> <span
class="o">!=</span> <span class="s2">"FINISHED"</span><span
class="p">:</span>
+ <span class="k">raise</span> <span
class="n">AirflowException</span><span class="p">(</span>
+ <span class="s2">"ERROR : Driver </span><span
class="si">{}</span><span class="s2"> badly exited with status </span><span
class="si">{}</span><span class="s2">"</span>
+ <span class="o">.</span><span class="n">format</span><span
class="p">(</span><span class="bp">self</span><span class="o">.</span><span
class="n">_driver_id</span><span class="p">,</span> <span
class="bp">self</span><span class="o">.</span><span
class="n">_driver_status</span><span class="p">)</span>
+ <span class="p">)</span></div>
+
+ <span class="k">def</span> <span
class="nf">_process_spark_submit_log</span><span class="p">(</span><span
class="bp">self</span><span class="p">,</span> <span class="n">itr</span><span
class="p">):</span>
+ <span class="sd">"""</span>
+<span class="sd"> Processes the log files and extracts useful
information out of it.</span>
+
+<span class="sd"> If the deploy-mode is 'client', log the
output of the submit command as those</span>
+<span class="sd"> are the output logs of the Spark worker
directly.</span>
+
+<span class="sd"> Remark: If the driver needs to be tracked for its
status, the log-level of the</span>
+<span class="sd"> spark deploy needs to be at least INFO
(log4j.logger.org.apache.spark.deploy=INFO)</span>
+
+<span class="sd"> :param itr: An iterator which iterates over the input
of the subprocess</span>
+<span class="sd"> """</span>
+ <span class="c1"># Consume the iterator</span>
+ <span class="k">for</span> <span class="n">line</span> <span
class="ow">in</span> <span class="n">itr</span><span class="p">:</span>
+ <span class="n">line</span> <span class="o">=</span> <span
class="n">line</span><span class="o">.</span><span class="n">strip</span><span
class="p">()</span>
+ <span class="c1"># If we run yarn cluster mode, we want to extract
the application id from</span>
+ <span class="c1"># the logs so we can kill the application when we
stop it unexpectedly</span>
+ <span class="k">if</span> <span class="bp">self</span><span
class="o">.</span><span class="n">_is_yarn</span> <span class="ow">and</span>
<span class="bp">self</span><span class="o">.</span><span
class="n">_connection</span><span class="p">[</span><span
class="s1">'deploy_mode'</span><span class="p">]</span> <span
class="o">==</span> <span class="s1">'cluster'</span><span
class="p">:</span>
+ <span class="n">match</span> <span class="o">=</span> <span
class="n">re</span><span class="o">.</span><span class="n">search</span><span
class="p">(</span><span class="s1">'(application[0-9_]+)'</span><span
class="p">,</span> <span class="n">line</span><span class="p">)</span>
+ <span class="k">if</span> <span class="n">match</span><span
class="p">:</span>
+ <span class="bp">self</span><span class="o">.</span><span
class="n">_yarn_application_id</span> <span class="o">=</span> <span
class="n">match</span><span class="o">.</span><span
class="n">groups</span><span class="p">()[</span><span class="mi">0</span><span
class="p">]</span>
+ <span class="bp">self</span><span class="o">.</span><span
class="n">log</span><span class="o">.</span><span class="n">info</span><span
class="p">(</span><span class="s2">"Identified spark driver id:
</span><span class="si">%s</span><span class="s2">"</span><span
class="p">,</span>
+ <span class="bp">self</span><span
class="o">.</span><span class="n">_yarn_application_id</span><span
class="p">)</span>
+
+ <span class="c1"># If we run Kubernetes cluster mode, we want to
extract the driver pod id</span>
+ <span class="c1"># from the logs so we can kill the application
when we stop it unexpectedly</span>
+ <span class="k">elif</span> <span class="bp">self</span><span
class="o">.</span><span class="n">_is_kubernetes</span><span class="p">:</span>
+ <span class="n">match</span> <span class="o">=</span> <span
class="n">re</span><span class="o">.</span><span class="n">search</span><span
class="p">(</span><span class="s1">'\s*pod name:
((.+?)-([a-z0-9]+)-driver)'</span><span class="p">,</span> <span
class="n">line</span><span class="p">)</span>
+ <span class="k">if</span> <span class="n">match</span><span
class="p">:</span>
+ <span class="bp">self</span><span class="o">.</span><span
class="n">_kubernetes_driver_pod</span> <span class="o">=</span> <span
class="n">match</span><span class="o">.</span><span
class="n">groups</span><span class="p">()[</span><span class="mi">0</span><span
class="p">]</span>
+ <span class="bp">self</span><span class="o">.</span><span
class="n">log</span><span class="o">.</span><span class="n">info</span><span
class="p">(</span><span class="s2">"Identified spark driver pod:
</span><span class="si">%s</span><span class="s2">"</span><span
class="p">,</span>
+ <span class="bp">self</span><span
class="o">.</span><span class="n">_kubernetes_driver_pod</span><span
class="p">)</span>
+
+ <span class="c1"># Store the Spark Exit code</span>
+ <span class="n">match_exit_code</span> <span
class="o">=</span> <span class="n">re</span><span class="o">.</span><span
class="n">search</span><span class="p">(</span><span class="s1">'\s*Exit
code: (\d+)'</span><span class="p">,</span> <span
class="n">line</span><span class="p">)</span>
+ <span class="k">if</span> <span
class="n">match_exit_code</span><span class="p">:</span>
+ <span class="bp">self</span><span class="o">.</span><span
class="n">_spark_exit_code</span> <span class="o">=</span> <span
class="nb">int</span><span class="p">(</span><span
class="n">match_exit_code</span><span class="o">.</span><span
class="n">groups</span><span class="p">()[</span><span class="mi">0</span><span
class="p">])</span>
+
+ <span class="c1"># if we run in standalone cluster mode and we
want to track the driver status</span>
+ <span class="c1"># we need to extract the driver id from the logs.
This allows us to poll for</span>
+ <span class="c1"># the status using the driver id. Also, we can
kill the driver when needed.</span>
+ <span class="k">elif</span> <span class="bp">self</span><span
class="o">.</span><span class="n">_should_track_driver_status</span> <span
class="ow">and</span> <span class="ow">not</span> <span
class="bp">self</span><span class="o">.</span><span
class="n">_driver_id</span><span class="p">:</span>
+ <span class="n">match_driver_id</span> <span
class="o">=</span> <span class="n">re</span><span class="o">.</span><span
class="n">search</span><span class="p">(</span><span
class="s1">'(driver-[0-9\-]+)'</span><span class="p">,</span> <span
class="n">line</span><span class="p">)</span>
+ <span class="k">if</span> <span
class="n">match_driver_id</span><span class="p">:</span>
+ <span class="bp">self</span><span class="o">.</span><span
class="n">_driver_id</span> <span class="o">=</span> <span
class="n">match_driver_id</span><span class="o">.</span><span
class="n">groups</span><span class="p">()[</span><span class="mi">0</span><span
class="p">]</span>
+ <span class="bp">self</span><span class="o">.</span><span
class="n">log</span><span class="o">.</span><span class="n">info</span><span
class="p">(</span><span class="s2">"identified spark driver id:
</span><span class="si">{}</span><span class="s2">"</span>
+ <span class="o">.</span><span
class="n">format</span><span class="p">(</span><span
class="bp">self</span><span class="o">.</span><span
class="n">_driver_id</span><span class="p">))</span>
+
+ <span class="k">else</span><span class="p">:</span>
+ <span class="bp">self</span><span class="o">.</span><span
class="n">log</span><span class="o">.</span><span class="n">info</span><span
class="p">(</span><span class="n">line</span><span class="p">)</span>
+
+ <span class="bp">self</span><span class="o">.</span><span
class="n">log</span><span class="o">.</span><span class="n">debug</span><span
class="p">(</span><span class="s2">"spark submit log: </span><span
class="si">{}</span><span class="s2">"</span><span class="o">.</span><span
class="n">format</span><span class="p">(</span><span class="n">line</span><span
class="p">))</span>
+
+ <span class="k">def</span> <span
class="nf">_process_spark_status_log</span><span class="p">(</span><span
class="bp">self</span><span class="p">,</span> <span class="n">itr</span><span
class="p">):</span>
+ <span class="sd">"""</span>
+<span class="sd"> parses the logs of the spark driver status query
process</span>
+
+<span class="sd"> :param itr: An iterator which iterates over the input
of the subprocess</span>
+<span class="sd"> """</span>
+ <span class="c1"># Consume the iterator</span>
+ <span class="k">for</span> <span class="n">line</span> <span
class="ow">in</span> <span class="n">itr</span><span class="p">:</span>
+ <span class="n">line</span> <span class="o">=</span> <span
class="n">line</span><span class="o">.</span><span class="n">strip</span><span
class="p">()</span>
+
+ <span class="c1"># Check if the log line is about the driver
status and extract the status.</span>
+ <span class="k">if</span> <span
class="s2">"driverState"</span> <span class="ow">in</span> <span
class="n">line</span><span class="p">:</span>
+ <span class="bp">self</span><span class="o">.</span><span
class="n">_driver_status</span> <span class="o">=</span> <span
class="n">line</span><span class="o">.</span><span class="n">split</span><span
class="p">(</span><span class="s1">' : '</span><span
class="p">)[</span><span class="mi">1</span><span class="p">]</span> \
+ <span class="o">.</span><span
class="n">replace</span><span class="p">(</span><span
class="s1">','</span><span class="p">,</span> <span
class="s1">''</span><span class="p">)</span><span
class="o">.</span><span class="n">replace</span><span class="p">(</span><span
class="s1">'</span><span class="se">\"</span><span
class="s1">'</span><span class="p">,</span> <span
class="s1">''</span><span class="p">)</span><span
class="o">.</span><span class="n">strip</span><span class="p">()</span>
+
+ <span class="bp">self</span><span class="o">.</span><span
class="n">log</span><span class="o">.</span><span class="n">debug</span><span
class="p">(</span><span class="s2">"spark driver status log: </span><span
class="si">{}</span><span class="s2">"</span><span class="o">.</span><span
class="n">format</span><span class="p">(</span><span class="n">line</span><span
class="p">))</span>
+
+ <span class="k">def</span> <span
class="nf">_start_driver_status_tracking</span><span class="p">(</span><span
class="bp">self</span><span class="p">):</span>
+ <span class="sd">"""</span>
+<span class="sd"> Polls the driver based on self._driver_id to get the
status.</span>
+<span class="sd"> Finish successfully when the status is
FINISHED.</span>
+<span class="sd"> Finish failed when the status is
ERROR/UNKNOWN/KILLED/FAILED.</span>
+
+<span class="sd"> Possible status:</span>
+<span class="sd"> SUBMITTED: Submitted but not yet scheduled on a
worker</span>
+<span class="sd"> RUNNING: Has been allocated to a worker to
run</span>
+<span class="sd"> FINISHED: Previously ran and exited cleanly</span>
+<span class="sd"> RELAUNCHING: Exited non-zero or due to worker
failure, but has not yet</span>
+<span class="sd"> started running again</span>
+<span class="sd"> UNKNOWN: The status of the driver is temporarily
not known due to</span>
+<span class="sd"> master failure recovery</span>
+<span class="sd"> KILLED: A user manually killed this driver</span>
+<span class="sd"> FAILED: The driver exited non-zero and was not
supervised</span>
+<span class="sd"> ERROR: Unable to run or restart due to an
unrecoverable error</span>
+<span class="sd"> (e.g. missing jar file)</span>
+<span class="sd"> """</span>
+
+ <span class="c1"># When your Spark Standalone cluster is not
performing well</span>
+ <span class="c1"># due to misconfiguration or heavy loads.</span>
+ <span class="c1"># it is possible that the polling request will
timeout.</span>
+ <span class="c1"># Therefore we use a simple retry mechanism.</span>
+ <span class="n">missed_job_status_reports</span> <span
class="o">=</span> <span class="mi">0</span>
+ <span class="n">max_missed_job_status_reports</span> <span
class="o">=</span> <span class="mi">10</span>
+
+ <span class="c1"># Keep polling as long as the driver is
processing</span>
+ <span class="k">while</span> <span class="bp">self</span><span
class="o">.</span><span class="n">_driver_status</span> <span
class="ow">not</span> <span class="ow">in</span> <span class="p">[</span><span
class="s2">"FINISHED"</span><span class="p">,</span> <span
class="s2">"UNKNOWN"</span><span class="p">,</span>
+ <span
class="s2">"KILLED"</span><span class="p">,</span> <span
class="s2">"FAILED"</span><span class="p">,</span> <span
class="s2">"ERROR"</span><span class="p">]:</span>
+
+ <span class="c1"># Sleep for 1 second as we do not want to spam
the cluster</span>
+ <span class="n">time</span><span class="o">.</span><span
class="n">sleep</span><span class="p">(</span><span class="mi">1</span><span
class="p">)</span>
+
+ <span class="bp">self</span><span class="o">.</span><span
class="n">log</span><span class="o">.</span><span class="n">debug</span><span
class="p">(</span><span class="s2">"polling status of spark driver with id
</span><span class="si">{}</span><span class="s2">"</span>
+ <span class="o">.</span><span
class="n">format</span><span class="p">(</span><span
class="bp">self</span><span class="o">.</span><span
class="n">_driver_id</span><span class="p">))</span>
+
+ <span class="n">poll_drive_status_cmd</span> <span
class="o">=</span> <span class="bp">self</span><span class="o">.</span><span
class="n">_build_track_driver_status_command</span><span class="p">()</span>
+ <span class="n">status_process</span> <span class="o">=</span>
<span class="n">subprocess</span><span class="o">.</span><span
class="n">Popen</span><span class="p">(</span><span
class="n">poll_drive_status_cmd</span><span class="p">,</span>
+ <span
class="n">stdout</span><span class="o">=</span><span
class="n">subprocess</span><span class="o">.</span><span
class="n">PIPE</span><span class="p">,</span>
+ <span
class="n">stderr</span><span class="o">=</span><span
class="n">subprocess</span><span class="o">.</span><span
class="n">STDOUT</span><span class="p">,</span>
+ <span
class="n">bufsize</span><span class="o">=-</span><span class="mi">1</span><span
class="p">,</span>
+ <span
class="n">universal_newlines</span><span class="o">=</span><span
class="kc">True</span><span class="p">)</span>
+
+ <span class="bp">self</span><span class="o">.</span><span
class="n">_process_spark_status_log</span><span class="p">(</span><span
class="nb">iter</span><span class="p">(</span><span
class="n">status_process</span><span class="o">.</span><span
class="n">stdout</span><span class="o">.</span><span
class="n">readline</span><span class="p">,</span> <span
class="s1">''</span><span class="p">))</span>
+ <span class="n">returncode</span> <span class="o">=</span> <span
class="n">status_process</span><span class="o">.</span><span
class="n">wait</span><span class="p">()</span>
+
+ <span class="k">if</span> <span class="n">returncode</span><span
class="p">:</span>
+ <span class="k">if</span> <span
class="n">missed_job_status_reports</span> <span class="o"><</span> <span
class="n">max_missed_job_status_reports</span><span class="p">:</span>
+ <span class="n">missed_job_status_reports</span> <span
class="o">=</span> <span class="n">missed_job_status_reports</span> <span
class="o">+</span> <span class="mi">1</span>
+ <span class="k">else</span><span class="p">:</span>
+ <span class="k">raise</span> <span
class="n">AirflowException</span><span class="p">(</span>
+ <span class="s2">"Failed to poll for the driver
status </span><span class="si">{}</span><span class="s2"> times: returncode =
</span><span class="si">{}</span><span class="s2">"</span>
+ <span class="o">.</span><span
class="n">format</span><span class="p">(</span><span
class="n">max_missed_job_status_reports</span><span class="p">,</span> <span
class="n">returncode</span><span class="p">)</span>
+ <span class="p">)</span>
+
+ <span class="k">def</span> <span
class="nf">_build_spark_driver_kill_command</span><span class="p">(</span><span
class="bp">self</span><span class="p">):</span>
+ <span class="sd">"""</span>
+<span class="sd"> Construct the spark-submit command to kill a
driver.</span>
+<span class="sd"> :return: full command to kill a driver</span>
+<span class="sd"> """</span>
+
+ <span class="c1"># If the spark_home is passed then build the
spark-submit executable path using</span>
+ <span class="c1"># the spark_home; otherwise assume that spark-submit
is present in the path to</span>
+ <span class="c1"># the executing user</span>
+ <span class="k">if</span> <span class="bp">self</span><span
class="o">.</span><span class="n">_connection</span><span
class="p">[</span><span class="s1">'spark_home'</span><span
class="p">]:</span>
+ <span class="n">connection_cmd</span> <span class="o">=</span>
<span class="p">[</span><span class="n">os</span><span class="o">.</span><span
class="n">path</span><span class="o">.</span><span class="n">join</span><span
class="p">(</span><span class="bp">self</span><span class="o">.</span><span
class="n">_connection</span><span class="p">[</span><span
class="s1">'spark_home'</span><span class="p">],</span>
+ <span
class="s1">'bin'</span><span class="p">,</span>
+ <span class="bp">self</span><span
class="o">.</span><span class="n">_connection</span><span
class="p">[</span><span class="s1">'spark_binary'</span><span
class="p">])]</span>
+ <span class="k">else</span><span class="p">:</span>
+ <span class="n">connection_cmd</span> <span class="o">=</span>
<span class="p">[</span><span class="bp">self</span><span
class="o">.</span><span class="n">_connection</span><span
class="p">[</span><span class="s1">'spark_binary'</span><span
class="p">]]</span>
+
+ <span class="c1"># The url ot the spark master</span>
+ <span class="n">connection_cmd</span> <span class="o">+=</span> <span
class="p">[</span><span class="s2">"--master"</span><span
class="p">,</span> <span class="bp">self</span><span class="o">.</span><span
class="n">_connection</span><span class="p">[</span><span
class="s1">'master'</span><span class="p">]]</span>
+
+ <span class="c1"># The actual kill command</span>
+ <span class="n">connection_cmd</span> <span class="o">+=</span> <span
class="p">[</span><span class="s2">"--kill"</span><span
class="p">,</span> <span class="bp">self</span><span class="o">.</span><span
class="n">_driver_id</span><span class="p">]</span>
+
+ <span class="bp">self</span><span class="o">.</span><span
class="n">log</span><span class="o">.</span><span class="n">debug</span><span
class="p">(</span><span class="s2">"Spark-Kill cmd: </span><span
class="si">%s</span><span class="s2">"</span><span class="p">,</span>
<span class="n">connection_cmd</span><span class="p">)</span>
+
+ <span class="k">return</span> <span class="n">connection_cmd</span>
+
+ <span class="k">def</span> <span class="nf">on_kill</span><span
class="p">(</span><span class="bp">self</span><span class="p">):</span>
+
+ <span class="bp">self</span><span class="o">.</span><span
class="n">log</span><span class="o">.</span><span class="n">debug</span><span
class="p">(</span><span class="s2">"Kill Command is being
called"</span><span class="p">)</span>
+
+ <span class="k">if</span> <span class="bp">self</span><span
class="o">.</span><span class="n">_should_track_driver_status</span><span
class="p">:</span>
+ <span class="k">if</span> <span class="bp">self</span><span
class="o">.</span><span class="n">_driver_id</span><span class="p">:</span>
+ <span class="bp">self</span><span class="o">.</span><span
class="n">log</span><span class="o">.</span><span class="n">info</span><span
class="p">(</span><span class="s1">'Killing driver </span><span
class="si">{}</span><span class="s1"> on cluster'</span>
+ <span class="o">.</span><span
class="n">format</span><span class="p">(</span><span
class="bp">self</span><span class="o">.</span><span
class="n">_driver_id</span><span class="p">))</span>
+
+ <span class="n">kill_cmd</span> <span class="o">=</span> <span
class="bp">self</span><span class="o">.</span><span
class="n">_build_spark_driver_kill_command</span><span class="p">()</span>
+ <span class="n">driver_kill</span> <span class="o">=</span>
<span class="n">subprocess</span><span class="o">.</span><span
class="n">Popen</span><span class="p">(</span><span
class="n">kill_cmd</span><span class="p">,</span>
+ <span
class="n">stdout</span><span class="o">=</span><span
class="n">subprocess</span><span class="o">.</span><span
class="n">PIPE</span><span class="p">,</span>
+ <span
class="n">stderr</span><span class="o">=</span><span
class="n">subprocess</span><span class="o">.</span><span
class="n">PIPE</span><span class="p">)</span>
+
+ <span class="bp">self</span><span class="o">.</span><span
class="n">log</span><span class="o">.</span><span class="n">info</span><span
class="p">(</span><span class="s2">"Spark driver </span><span
class="si">{}</span><span class="s2"> killed with return code: </span><span
class="si">{}</span><span class="s2">"</span>
+ <span class="o">.</span><span
class="n">format</span><span class="p">(</span><span
class="bp">self</span><span class="o">.</span><span
class="n">_driver_id</span><span class="p">,</span> <span
class="n">driver_kill</span><span class="o">.</span><span
class="n">wait</span><span class="p">()))</span>
+
+ <span class="k">if</span> <span class="bp">self</span><span
class="o">.</span><span class="n">_submit_sp</span> <span class="ow">and</span>
<span class="bp">self</span><span class="o">.</span><span
class="n">_submit_sp</span><span class="o">.</span><span
class="n">poll</span><span class="p">()</span> <span class="ow">is</span> <span
class="kc">None</span><span class="p">:</span>
+ <span class="bp">self</span><span class="o">.</span><span
class="n">log</span><span class="o">.</span><span class="n">info</span><span
class="p">(</span><span class="s1">'Sending kill signal to </span><span
class="si">%s</span><span class="s1">'</span><span class="p">,</span> <span
class="bp">self</span><span class="o">.</span><span
class="n">_connection</span><span class="p">[</span><span
class="s1">'spark_binary'</span><span class="p">])</span>
+ <span class="bp">self</span><span class="o">.</span><span
class="n">_submit_sp</span><span class="o">.</span><span
class="n">kill</span><span class="p">()</span>
+
+ <span class="k">if</span> <span class="bp">self</span><span
class="o">.</span><span class="n">_yarn_application_id</span><span
class="p">:</span>
+ <span class="bp">self</span><span class="o">.</span><span
class="n">log</span><span class="o">.</span><span class="n">info</span><span
class="p">(</span><span class="s1">'Killing application </span><span
class="si">{}</span><span class="s1"> on YARN'</span>
+ <span class="o">.</span><span
class="n">format</span><span class="p">(</span><span
class="bp">self</span><span class="o">.</span><span
class="n">_yarn_application_id</span><span class="p">))</span>
+
+ <span class="n">kill_cmd</span> <span class="o">=</span> <span
class="s2">"yarn application -kill </span><span class="si">{}</span><span
class="s2">"</span> \
+ <span class="o">.</span><span class="n">format</span><span
class="p">(</span><span class="bp">self</span><span class="o">.</span><span
class="n">_yarn_application_id</span><span class="p">)</span><span
class="o">.</span><span class="n">split</span><span class="p">()</span>
+ <span class="n">yarn_kill</span> <span class="o">=</span>
<span class="n">subprocess</span><span class="o">.</span><span
class="n">Popen</span><span class="p">(</span><span
class="n">kill_cmd</span><span class="p">,</span>
+ <span
class="n">stdout</span><span class="o">=</span><span
class="n">subprocess</span><span class="o">.</span><span
class="n">PIPE</span><span class="p">,</span>
+ <span
class="n">stderr</span><span class="o">=</span><span
class="n">subprocess</span><span class="o">.</span><span
class="n">PIPE</span><span class="p">)</span>
+
+ <span class="bp">self</span><span class="o">.</span><span
class="n">log</span><span class="o">.</span><span class="n">info</span><span
class="p">(</span><span class="s2">"YARN killed with return code:
</span><span class="si">%s</span><span class="s2">"</span><span
class="p">,</span> <span class="n">yarn_kill</span><span
class="o">.</span><span class="n">wait</span><span class="p">())</span>
+
+ <span class="k">if</span> <span class="bp">self</span><span
class="o">.</span><span class="n">_kubernetes_driver_pod</span><span
class="p">:</span>
+ <span class="bp">self</span><span class="o">.</span><span
class="n">log</span><span class="o">.</span><span class="n">info</span><span
class="p">(</span><span class="s1">'Killing pod </span><span
class="si">%s</span><span class="s1"> on Kubernetes'</span><span
class="p">,</span> <span class="bp">self</span><span class="o">.</span><span
class="n">_kubernetes_driver_pod</span><span class="p">)</span>
+
+ <span class="c1"># Currently only instantiate Kubernetes
client for killing a spark pod.</span>
+ <span class="k">try</span><span class="p">:</span>
+ <span class="n">client</span> <span class="o">=</span>
<span class="n">kube_client</span><span class="o">.</span><span
class="n">get_kube_client</span><span class="p">()</span>
+ <span class="n">api_response</span> <span
class="o">=</span> <span class="n">client</span><span class="o">.</span><span
class="n">delete_namespaced_pod</span><span class="p">(</span>
+ <span class="bp">self</span><span
class="o">.</span><span class="n">_kubernetes_driver_pod</span><span
class="p">,</span>
+ <span class="bp">self</span><span
class="o">.</span><span class="n">_connection</span><span
class="p">[</span><span class="s1">'namespace'</span><span
class="p">],</span>
+ <span class="n">body</span><span
class="o">=</span><span class="n">client</span><span class="o">.</span><span
class="n">V1DeleteOptions</span><span class="p">(),</span>
+ <span class="n">pretty</span><span
class="o">=</span><span class="kc">True</span><span class="p">)</span>
+
+ <span class="bp">self</span><span class="o">.</span><span
class="n">log</span><span class="o">.</span><span class="n">info</span><span
class="p">(</span><span class="s2">"Spark on K8s killed with response:
</span><span class="si">%s</span><span class="s2">"</span><span
class="p">,</span> <span class="n">api_response</span><span class="p">)</span>
+
+ <span class="k">except</span> <span
class="n">ApiException</span> <span class="k">as</span> <span
class="n">e</span><span class="p">:</span>
+ <span class="bp">self</span><span class="o">.</span><span
class="n">log</span><span class="o">.</span><span class="n">info</span><span
class="p">(</span><span class="s2">"Exception when attempting to kill
Spark on K8s:"</span><span class="p">)</span>
+ <span class="bp">self</span><span class="o">.</span><span
class="n">log</span><span class="o">.</span><span
class="n">exception</span><span class="p">(</span><span class="n">e</span><span
class="p">)</span></div>
+</pre></div>
+
+ </div>
+ <div class="articleComments">
+
+ </div>
+ </div>
+ <footer>
+
+
+ <hr/>
+
+ <div role="contentinfo">
+ <p>
+
+ </p>
+ </div>
+ Built with <a href="http://sphinx-doc.org/">Sphinx</a> using a <a
href="https://github.com/snide/sphinx_rtd_theme">theme</a> provided by <a
href="https://readthedocs.org">Read the Docs</a>.
+
+</footer>
+
+ </div>
+ </div>
+
+ </section>
+
+ </div>
+
+
+
+
+
+ <script type="text/javascript">
+ var DOCUMENTATION_OPTIONS = {
+ URL_ROOT:'../../../../',
+ VERSION:'',
+ COLLAPSE_INDEX:false,
+ FILE_SUFFIX:'.html',
+ HAS_SOURCE: true,
+ SOURCELINK_SUFFIX: '.txt'
+ };
+ </script>
+ <script type="text/javascript"
src="../../../../_static/jquery.js"></script>
+ <script type="text/javascript"
src="../../../../_static/underscore.js"></script>
+ <script type="text/javascript"
src="../../../../_static/doctools.js"></script>
+
+
+
+
+
+ <script type="text/javascript"
src="../../../../_static/js/theme.js"></script>
+
+
+
+
+ <script type="text/javascript">
+ jQuery(function () {
+ SphinxRtdTheme.StickyNav.enable();
+ });
+ </script>
+
+
+</body>
+</html>
\ No newline at end of file