http://git-wip-us.apache.org/repos/asf/incubator-airflow-site/blob/9c75ee9e/code.html
----------------------------------------------------------------------
diff --git a/code.html b/code.html
index 91192d3..732db2e 100644
--- a/code.html
+++ b/code.html
@@ -30,6 +30,9 @@
   
 
   
+        <link rel="index" title="Index"
+              href="genindex.html"/>
+        <link rel="search" title="Search" href="search.html"/>
     <link rel="top" title="Airflow Documentation" href="index.html"/>
         <link rel="prev" title="FAQ" href="faq.html"/> 
 
@@ -40,6 +43,7 @@
 
 <body class="wy-body-for-nav" role="document">
 
+   
   <div class="wy-grid-for-nav">
 
     
@@ -90,13 +94,15 @@
 <li class="toctree-l1"><a class="reference internal" 
href="scheduler.html">Scheduling &amp; Triggers</a></li>
 <li class="toctree-l1"><a class="reference internal" 
href="plugins.html">Plugins</a></li>
 <li class="toctree-l1"><a class="reference internal" 
href="security.html">Security</a></li>
+<li class="toctree-l1"><a class="reference internal" 
href="api.html">Experimental Rest API</a></li>
+<li class="toctree-l1"><a class="reference internal" 
href="integration.html">Integration</a></li>
 <li class="toctree-l1"><a class="reference internal" 
href="faq.html">FAQ</a></li>
 <li class="toctree-l1 current"><a class="current reference internal" 
href="#">API Reference</a><ul>
 <li class="toctree-l2"><a class="reference internal" 
href="#operators">Operators</a><ul>
 <li class="toctree-l3"><a class="reference internal" 
href="#baseoperator">BaseOperator</a></li>
 <li class="toctree-l3"><a class="reference internal" 
href="#basesensoroperator">BaseSensorOperator</a></li>
-<li class="toctree-l3"><a class="reference internal" 
href="#module-airflow.operators">Operator API</a></li>
-<li class="toctree-l3"><a class="reference internal" 
href="#module-airflow.contrib.operators">Community-contributed 
Operators</a></li>
+<li class="toctree-l3"><a class="reference internal" 
href="#operator-api">Operator API</a></li>
+<li class="toctree-l3"><a class="reference internal" 
href="#community-contributed-operators">Community-contributed Operators</a></li>
 </ul>
 </li>
 <li class="toctree-l2"><a class="reference internal" 
href="#macros">Macros</a><ul>
@@ -105,8 +111,8 @@
 </ul>
 </li>
 <li class="toctree-l2"><a class="reference internal" 
href="#models">Models</a></li>
-<li class="toctree-l2"><a class="reference internal" 
href="#module-airflow.hooks">Hooks</a><ul>
-<li class="toctree-l3"><a class="reference internal" 
href="#module-airflow.contrib.hooks">Community contributed hooks</a></li>
+<li class="toctree-l2"><a class="reference internal" 
href="#hooks">Hooks</a><ul>
+<li class="toctree-l3"><a class="reference internal" 
href="#community-contributed-hooks">Community contributed hooks</a></li>
 </ul>
 </li>
 <li class="toctree-l2"><a class="reference internal" 
href="#executors">Executors</a><ul>
@@ -127,8 +133,10 @@
 
       
       <nav class="wy-nav-top" role="navigation" aria-label="top navigation">
-        <i data-toggle="wy-nav-top" class="fa fa-bars"></i>
-        <a href="index.html">Airflow</a>
+        
+          <i data-toggle="wy-nav-top" class="fa fa-bars"></i>
+          <a href="index.html">Airflow</a>
+        
       </nav>
 
 
@@ -137,23 +145,40 @@
         <div class="rst-content">
           
 
- 
+
+
+
+
+
+
+
+
+
+
+
 
 
 
 <div role="navigation" aria-label="breadcrumbs navigation">
+
   <ul class="wy-breadcrumbs">
-    <li><a href="index.html">Docs</a> &raquo;</li>
-      
-    <li>API Reference</li>
+    
+      <li><a href="index.html">Docs</a> &raquo;</li>
+        
+      <li>API Reference</li>
+    
+    
       <li class="wy-breadcrumbs-aside">
         
-          
-            <a href="_sources/code.txt" rel="nofollow"> View page source</a>
+            
+            <a href="_sources/code.rst.txt" rel="nofollow"> View page 
source</a>
           
         
       </li>
+    
   </ul>
+
+  
   <hr/>
 </div>
           <div role="main" class="document" itemscope="itemscope" 
itemtype="http://schema.org/Article";>
@@ -187,12 +212,12 @@ to understand the primitive features that can be 
leveraged in your
 DAGs.</p>
 <dl class="class">
 <dt id="airflow.models.BaseOperator">
-<em class="property">class </em><code 
class="descclassname">airflow.models.</code><code 
class="descname">BaseOperator</code><span 
class="sig-paren">(</span><em>task_id</em>, <em>owner='airflow'</em>, 
<em>email=None</em>, <em>email_on_retry=True</em>, 
<em>email_on_failure=True</em>, <em>retries=0</em>, 
<em>retry_delay=datetime.timedelta(0</em>, <em>300)</em>, 
<em>retry_exponential_backoff=False</em>, <em>max_retry_delay=None</em>, 
<em>start_date=None</em>, <em>end_date=None</em>, 
<em>schedule_interval=None</em>, <em>depends_on_past=False</em>, 
<em>wait_for_downstream=False</em>, <em>dag=None</em>, <em>params=None</em>, 
<em>default_args=None</em>, <em>adhoc=False</em>, <em>priority_weight=1</em>, 
<em>queue='default'</em>, <em>pool=None</em>, <em>sla=None</em>, 
<em>execution_timeout=None</em>, <em>on_failure_callback=None</em>, 
<em>on_success_callback=None</em>, <em>on_retry_callback=None</em>, 
<em>trigger_rule=u'all_success'</em>, <em>resources=None</em>, <em>*args</em>, 
<em>**kwargs<
 /em><span class="sig-paren">)</span><a class="reference internal" 
href="_modules/airflow/models.html#BaseOperator"><span 
class="viewcode-link">[source]</span></a><a class="headerlink" 
href="#airflow.models.BaseOperator" title="Permalink to this 
definition">¶</a></dt>
+<em class="property">class </em><code 
class="descclassname">airflow.models.</code><code 
class="descname">BaseOperator</code><span 
class="sig-paren">(</span><em>task_id</em>, <em>owner='Airflow'</em>, 
<em>email=None</em>, <em>email_on_retry=True</em>, 
<em>email_on_failure=True</em>, <em>retries=0</em>, 
<em>retry_delay=datetime.timedelta(0</em>, <em>300)</em>, 
<em>retry_exponential_backoff=False</em>, <em>max_retry_delay=None</em>, 
<em>start_date=None</em>, <em>end_date=None</em>, 
<em>schedule_interval=None</em>, <em>depends_on_past=False</em>, 
<em>wait_for_downstream=False</em>, <em>dag=None</em>, <em>params=None</em>, 
<em>default_args=None</em>, <em>adhoc=False</em>, <em>priority_weight=1</em>, 
<em>queue='default'</em>, <em>pool=None</em>, <em>sla=None</em>, 
<em>execution_timeout=None</em>, <em>on_failure_callback=None</em>, 
<em>on_success_callback=None</em>, <em>on_retry_callback=None</em>, 
<em>trigger_rule=u'all_success'</em>, <em>resources=None</em>, 
<em>run_as_user=None</em>, <e
 m>*args</em>, <em>**kwargs</em><span class="sig-paren">)</span><a 
class="reference internal" 
href="_modules/airflow/models.html#BaseOperator"><span 
class="viewcode-link">[source]</span></a><a class="headerlink" 
href="#airflow.models.BaseOperator" title="Permalink to this 
definition">¶</a></dt>
 <dd><p>Abstract base class for all operators. Since operators create objects 
that
 become node in the dag, BaseOperator contains many recursive methods for
 dag crawling behavior. To derive this class, you are expected to override
 the constructor as well as the &#8216;execute&#8217; method.</p>
-<p>Operators derived from this task should perform or trigger certain tasks
+<p>Operators derived from this class should perform or trigger certain tasks
 synchronously (wait for completion). Example of operators could be an
 operator the runs a Pig job (PigOperator), a sensor operator that
 waits for a partition to land in Hive (HiveSensorOperator), or one that
@@ -261,7 +286,7 @@ way to limit concurrency for certain tasks</li>
 this represents the <code class="docutils literal"><span 
class="pre">timedelta</span></code> after the period is closed. For
 example if you set an SLA of 1 hour, the scheduler would send dan email
 soon after 1:00AM on the <code class="docutils literal"><span 
class="pre">2016-01-02</span></code> if the <code class="docutils 
literal"><span class="pre">2016-01-01</span></code> instance
-has not succeede yet.
+has not succeeded yet.
 The scheduler pays special attention for jobs with an SLA and
 sends alert
 emails for sla misses. SLA misses are also recorded in the database
@@ -288,6 +313,7 @@ using the constants defined in the static class
 <code class="docutils literal"><span 
class="pre">airflow.utils.TriggerRule</span></code></li>
 <li><strong>resources</strong> (<em>dict</em>) &#8211; A map of resource 
parameter names (the argument names of the
 Resources constructor) to their values.</li>
+<li><strong>run_as_user</strong> (<em>str</em>) &#8211; unix username to 
impersonate while running the task</li>
 </ul>
 </td>
 </tr>
@@ -326,9 +352,9 @@ between each tries</li>
 </dd></dl>
 
 </div>
-<div class="section" id="module-airflow.operators">
-<span id="operator-api"></span><h3>Operator API<a class="headerlink" 
href="#module-airflow.operators" title="Permalink to this headline">¶</a></h3>
-<p>Importer that dynamically loads a class and module from its parent. This
+<div class="section" id="operator-api">
+<h3>Operator API<a class="headerlink" href="#operator-api" title="Permalink to 
this headline">¶</a></h3>
+<span class="target" id="module-airflow.operators"></span><p>Importer that 
dynamically loads a class and module from its parent. This
 allows Airflow to support <code class="docutils literal"><span 
class="pre">from</span> <span class="pre">airflow.operators</span> <span 
class="pre">import</span> <span class="pre">BashOperator</span></code>
 even though BashOperator is actually in
 <code class="docutils literal"><span 
class="pre">airflow.operators.bash_operator</span></code>.</p>
@@ -427,7 +453,7 @@ DAG.</p>
 
 <dl class="class">
 <dt id="airflow.operators.EmailOperator">
-<em class="property">class </em><code 
class="descclassname">airflow.operators.</code><code 
class="descname">EmailOperator</code><span 
class="sig-paren">(</span><em>to</em>, <em>subject</em>, <em>html_content</em>, 
<em>files=None</em>, <em>cc=None</em>, <em>bcc=None</em>, <em>*args</em>, 
<em>**kwargs</em><span class="sig-paren">)</span><a class="reference internal" 
href="_modules/email_operator.html#EmailOperator"><span 
class="viewcode-link">[source]</span></a><a class="headerlink" 
href="#airflow.operators.EmailOperator" title="Permalink to this 
definition">¶</a></dt>
+<em class="property">class </em><code 
class="descclassname">airflow.operators.</code><code 
class="descname">EmailOperator</code><span 
class="sig-paren">(</span><em>to</em>, <em>subject</em>, <em>html_content</em>, 
<em>files=None</em>, <em>cc=None</em>, <em>bcc=None</em>, 
<em>mime_subtype='mixed'</em>, <em>*args</em>, <em>**kwargs</em><span 
class="sig-paren">)</span><a class="reference internal" 
href="_modules/email_operator.html#EmailOperator"><span 
class="viewcode-link">[source]</span></a><a class="headerlink" 
href="#airflow.operators.EmailOperator" title="Permalink to this 
definition">¶</a></dt>
 <dd><p>Bases: <a class="reference internal" 
href="#airflow.models.BaseOperator" title="airflow.models.BaseOperator"><code 
class="xref py py-class docutils literal"><span 
class="pre">airflow.models.BaseOperator</span></code></a></p>
 <p>Sends an email.</p>
 <table class="docutils field-list" frame="void" rules="none">
@@ -435,13 +461,13 @@ DAG.</p>
 <col class="field-body" />
 <tbody valign="top">
 <tr class="field-odd field"><th class="field-name">Parameters:</th><td 
class="field-body"><ul class="first last simple">
-<li><strong>to</strong> (<em>list or string (comma or semicolon 
delimited)</em>) &#8211; list of emails to send the email to</li>
+<li><strong>to</strong> (<em>list</em><em> or </em><em>string</em><em> 
(</em><em>comma</em><em> or </em><em>semicolon 
delimited</em><em>)</em><em></em>) &#8211; list of emails to send the email 
to</li>
 <li><strong>subject</strong> (<em>string</em>) &#8211; subject line for the 
email (templated)</li>
 <li><strong>html_content</strong> (<em>string</em>) &#8211; content of the 
email (templated), html markup
 is allowed</li>
 <li><strong>files</strong> (<em>list</em>) &#8211; file names to attach in 
email</li>
-<li><strong>cc</strong> (<em>list or string (comma or semicolon 
delimited)</em>) &#8211; list of recipients to be added in CC field</li>
-<li><strong>bcc</strong> (<em>list or string (comma or semicolon 
delimited)</em>) &#8211; list of recipients to be added in BCC field</li>
+<li><strong>cc</strong> (<em>list</em><em> or </em><em>string</em><em> 
(</em><em>comma</em><em> or </em><em>semicolon 
delimited</em><em>)</em><em></em>) &#8211; list of recipients to be added in CC 
field</li>
+<li><strong>bcc</strong> (<em>list</em><em> or </em><em>string</em><em> 
(</em><em>comma</em><em> or </em><em>semicolon 
delimited</em><em>)</em><em></em>) &#8211; list of recipients to be added in 
BCC field</li>
 </ul>
 </td>
 </tr>
@@ -497,7 +523,7 @@ needs to expose a <cite>get_records</cite> method, and the 
destination a
 <li><strong>destination_table</strong> (<em>str</em>) &#8211; target table</li>
 <li><strong>source_conn_id</strong> (<em>str</em>) &#8211; source 
connection</li>
 <li><strong>destination_conn_id</strong> (<em>str</em>) &#8211; source 
connection</li>
-<li><strong>preoperator</strong> (<em>str or list of str</em>) &#8211; sql 
statement or list of statements to be
+<li><strong>preoperator</strong> (<em>str</em><em> or </em><em>list of 
str</em>) &#8211; sql statement or list of statements to be
 executed prior to loading the data</li>
 </ul>
 </td>
@@ -508,61 +534,28 @@ executed prior to loading the data</li>
 
 <dl class="class">
 <dt id="airflow.operators.HdfsSensor">
-<em class="property">class </em><code 
class="descclassname">airflow.operators.</code><code 
class="descname">HdfsSensor</code><span 
class="sig-paren">(</span><em>filepath</em>, 
<em>hdfs_conn_id='hdfs_default'</em>, <em>*args</em>, <em>**kwargs</em><span 
class="sig-paren">)</span><a class="reference internal" 
href="_modules/sensors.html#HdfsSensor"><span 
class="viewcode-link">[source]</span></a><a class="headerlink" 
href="#airflow.operators.HdfsSensor" title="Permalink to this 
definition">¶</a></dt>
+<em class="property">class </em><code 
class="descclassname">airflow.operators.</code><code 
class="descname">HdfsSensor</code><span class="sig-paren">(</span><em>filepath, 
hdfs_conn_id='hdfs_default', ignored_ext=['_COPYING_'], ignore_copying=True, 
file_size=None, hook=&lt;class 'airflow.hooks.hdfs_hook.HDFSHook'&gt;, *args, 
**kwargs</em><span class="sig-paren">)</span><a class="reference internal" 
href="_modules/sensors.html#HdfsSensor"><span 
class="viewcode-link">[source]</span></a><a class="headerlink" 
href="#airflow.operators.HdfsSensor" title="Permalink to this 
definition">¶</a></dt>
 <dd><p>Bases: <a class="reference internal" 
href="#airflow.operators.sensors.BaseSensorOperator" 
title="airflow.operators.sensors.BaseSensorOperator"><code class="xref py 
py-class docutils literal"><span 
class="pre">sensors.BaseSensorOperator</span></code></a></p>
 <p>Waits for a file or folder to land in HDFS</p>
+<dl class="staticmethod">
+<dt id="airflow.operators.HdfsSensor.filter_for_filesize">
+<em class="property">static </em><code 
class="descname">filter_for_filesize</code><span 
class="sig-paren">(</span><em>result</em>, <em>size=None</em><span 
class="sig-paren">)</span><a class="reference internal" 
href="_modules/sensors.html#HdfsSensor.filter_for_filesize"><span 
class="viewcode-link">[source]</span></a><a class="headerlink" 
href="#airflow.operators.HdfsSensor.filter_for_filesize" title="Permalink to 
this definition">¶</a></dt>
+<dd><p>Will test the filepath result and test if its size is at least 
self.filesize
+:param result: a list of dicts returned by Snakebite ls
+:param size: the file size in MB a file should be at least to trigger True
+:return: (bool) depending on the matching criteria</p>
 </dd></dl>
 
-<dl class="class">
-<dt id="airflow.operators.Hive2SambaOperator">
-<em class="property">class </em><code 
class="descclassname">airflow.operators.</code><code 
class="descname">Hive2SambaOperator</code><span 
class="sig-paren">(</span><em>hql</em>, <em>destination_filepath</em>, 
<em>samba_conn_id='samba_default'</em>, 
<em>hiveserver2_conn_id='hiveserver2_default'</em>, <em>*args</em>, 
<em>**kwargs</em><span class="sig-paren">)</span><a class="reference internal" 
href="_modules/hive_to_samba_operator.html#Hive2SambaOperator"><span 
class="viewcode-link">[source]</span></a><a class="headerlink" 
href="#airflow.operators.Hive2SambaOperator" title="Permalink to this 
definition">¶</a></dt>
-<dd><p>Bases: <a class="reference internal" 
href="#airflow.models.BaseOperator" title="airflow.models.BaseOperator"><code 
class="xref py py-class docutils literal"><span 
class="pre">airflow.models.BaseOperator</span></code></a></p>
-<p>Executes hql code in a specific Hive database and loads the
-results of the query as a csv to a Samba location.</p>
-<table class="docutils field-list" frame="void" rules="none">
-<col class="field-name" />
-<col class="field-body" />
-<tbody valign="top">
-<tr class="field-odd field"><th class="field-name">Parameters:</th><td 
class="field-body"><ul class="first last simple">
-<li><strong>hql</strong> (<em>string</em>) &#8211; the hql to be exported</li>
-<li><strong>hiveserver2_conn_id</strong> (<em>string</em>) &#8211; reference 
to the hiveserver2 service</li>
-<li><strong>samba_conn_id</strong> (<em>string</em>) &#8211; reference to the 
samba destination</li>
-</ul>
-</td>
-</tr>
-</tbody>
-</table>
+<dl class="staticmethod">
+<dt id="airflow.operators.HdfsSensor.filter_for_ignored_ext">
+<em class="property">static </em><code 
class="descname">filter_for_ignored_ext</code><span 
class="sig-paren">(</span><em>result</em>, <em>ignored_ext</em>, 
<em>ignore_copying</em><span class="sig-paren">)</span><a class="reference 
internal" href="_modules/sensors.html#HdfsSensor.filter_for_ignored_ext"><span 
class="viewcode-link">[source]</span></a><a class="headerlink" 
href="#airflow.operators.HdfsSensor.filter_for_ignored_ext" title="Permalink to 
this definition">¶</a></dt>
+<dd><p>Will filter if instructed to do so the result to remove matching 
criteria
+:param result: (list) of dicts returned by Snakebite ls
+:param ignored_ext: (list) of ignored extentions
+:param ignore_copying: (bool) shall we ignore ?
+:return:</p>
 </dd></dl>
 
-<dl class="class">
-<dt id="airflow.operators.HiveOperator">
-<em class="property">class </em><code 
class="descclassname">airflow.operators.</code><code 
class="descname">HiveOperator</code><span 
class="sig-paren">(</span><em>hql</em>, 
<em>hive_cli_conn_id='hive_cli_default'</em>, <em>schema='default'</em>, 
<em>hiveconf_jinja_translate=False</em>, <em>script_begin_tag=None</em>, 
<em>run_as_owner=False</em>, <em>mapred_queue=None</em>, 
<em>mapred_queue_priority=None</em>, <em>mapred_job_name=None</em>, 
<em>*args</em>, <em>**kwargs</em><span class="sig-paren">)</span><a 
class="reference internal" 
href="_modules/hive_operator.html#HiveOperator"><span 
class="viewcode-link">[source]</span></a><a class="headerlink" 
href="#airflow.operators.HiveOperator" title="Permalink to this 
definition">¶</a></dt>
-<dd><p>Bases: <a class="reference internal" 
href="#airflow.models.BaseOperator" title="airflow.models.BaseOperator"><code 
class="xref py py-class docutils literal"><span 
class="pre">airflow.models.BaseOperator</span></code></a></p>
-<p>Executes hql code in a specific Hive database.</p>
-<table class="docutils field-list" frame="void" rules="none">
-<col class="field-name" />
-<col class="field-body" />
-<tbody valign="top">
-<tr class="field-odd field"><th class="field-name">Parameters:</th><td 
class="field-body"><ul class="first last simple">
-<li><strong>hql</strong> (<em>string</em>) &#8211; the hql to be executed</li>
-<li><strong>hive_cli_conn_id</strong> (<em>string</em>) &#8211; reference to 
the Hive database</li>
-<li><strong>hiveconf_jinja_translate</strong> (<em>boolean</em>) &#8211; when 
True, hiveconf-type templating
-${var} gets translated into jinja-type templating {{ var }}. Note that
-you may want to use this along with the
-<code class="docutils literal"><span 
class="pre">DAG(user_defined_macros=myargs)</span></code> parameter. View the 
DAG
-object documentation for more details.</li>
-<li><strong>script_begin_tag</strong> (<em>str</em>) &#8211; If defined, the 
operator will get rid of the
-part of the script before the first occurrence of 
<cite>script_begin_tag</cite></li>
-<li><strong>mapred_queue</strong> (<em>string</em>) &#8211; queue used by the 
Hadoop CapacityScheduler</li>
-<li><strong>mapred_queue_priority</strong> (<em>string</em>) &#8211; priority 
within CapacityScheduler queue.
-Possible settings include: VERY_HIGH, HIGH, NORMAL, LOW, VERY_LOW</li>
-<li><strong>mapred_job_name</strong> (<em>string</em>) &#8211; This name will 
appear in the jobtracker.
-This can make monitoring easier.</li>
-</ul>
-</td>
-</tr>
-</tbody>
-</table>
 </dd></dl>
 
 <dl class="class">
@@ -594,73 +587,8 @@ connection id</li>
 </dd></dl>
 
 <dl class="class">
-<dt id="airflow.operators.HiveToDruidTransfer">
-<em class="property">class </em><code 
class="descclassname">airflow.operators.</code><code 
class="descname">HiveToDruidTransfer</code><span 
class="sig-paren">(</span><em>sql</em>, <em>druid_datasource</em>, 
<em>ts_dim</em>, <em>metric_spec=None</em>, 
<em>hive_cli_conn_id='hive_cli_default'</em>, 
<em>druid_ingest_conn_id='druid_ingest_default'</em>, 
<em>metastore_conn_id='metastore_default'</em>, 
<em>hadoop_dependency_coordinates=None</em>, <em>intervals=None</em>, 
<em>num_shards=-1</em>, <em>target_partition_size=-1</em>, <em>*args</em>, 
<em>**kwargs</em><span class="sig-paren">)</span><a class="reference internal" 
href="_modules/hive_to_druid.html#HiveToDruidTransfer"><span 
class="viewcode-link">[source]</span></a><a class="headerlink" 
href="#airflow.operators.HiveToDruidTransfer" title="Permalink to this 
definition">¶</a></dt>
-<dd><p>Bases: <a class="reference internal" 
href="#airflow.models.BaseOperator" title="airflow.models.BaseOperator"><code 
class="xref py py-class docutils literal"><span 
class="pre">airflow.models.BaseOperator</span></code></a></p>
-<p>Moves data from Hive to Druid, [del]note that for now the data is loaded
-into memory before being pushed to Druid, so this operator should
-be used for smallish amount of data.[/del]</p>
-<table class="docutils field-list" frame="void" rules="none">
-<col class="field-name" />
-<col class="field-body" />
-<tbody valign="top">
-<tr class="field-odd field"><th class="field-name">Parameters:</th><td 
class="field-body"><ul class="first last simple">
-<li><strong>sql</strong> (<em>str</em>) &#8211; SQL query to execute against 
the Druid database</li>
-<li><strong>druid_datasource</strong> (<em>str</em>) &#8211; the datasource 
you want to ingest into in druid</li>
-<li><strong>ts_dim</strong> (<em>str</em>) &#8211; the timestamp dimension</li>
-<li><strong>metric_spec</strong> (<em>list</em>) &#8211; the metrics you want 
to define for your data</li>
-<li><strong>hive_cli_conn_id</strong> (<em>str</em>) &#8211; the hive 
connection id</li>
-<li><strong>druid_ingest_conn_id</strong> (<em>str</em>) &#8211; the druid 
ingest connection id</li>
-<li><strong>metastore_conn_id</strong> (<em>str</em>) &#8211; the metastore 
connection id</li>
-<li><strong>hadoop_dependency_coordinates</strong> (<em>list of str</em>) 
&#8211; list of coordinates to squeeze
-int the ingest json</li>
-<li><strong>intervals</strong> (<em>list</em>) &#8211; list of time intervals 
that defines segments, this
-is passed as is to the json object</li>
-</ul>
-</td>
-</tr>
-</tbody>
-</table>
-</dd></dl>
-
-<dl class="class">
-<dt id="airflow.operators.HiveToMySqlTransfer">
-<em class="property">class </em><code 
class="descclassname">airflow.operators.</code><code 
class="descname">HiveToMySqlTransfer</code><span 
class="sig-paren">(</span><em>sql</em>, <em>mysql_table</em>, 
<em>hiveserver2_conn_id='hiveserver2_default'</em>, 
<em>mysql_conn_id='mysql_default'</em>, <em>mysql_preoperator=None</em>, 
<em>mysql_postoperator=None</em>, <em>bulk_load=False</em>, <em>*args</em>, 
<em>**kwargs</em><span class="sig-paren">)</span><a class="reference internal" 
href="_modules/hive_to_mysql.html#HiveToMySqlTransfer"><span 
class="viewcode-link">[source]</span></a><a class="headerlink" 
href="#airflow.operators.HiveToMySqlTransfer" title="Permalink to this 
definition">¶</a></dt>
-<dd><p>Bases: <a class="reference internal" 
href="#airflow.models.BaseOperator" title="airflow.models.BaseOperator"><code 
class="xref py py-class docutils literal"><span 
class="pre">airflow.models.BaseOperator</span></code></a></p>
-<p>Moves data from Hive to MySQL, note that for now the data is loaded
-into memory before being pushed to MySQL, so this operator should
-be used for smallish amount of data.</p>
-<table class="docutils field-list" frame="void" rules="none">
-<col class="field-name" />
-<col class="field-body" />
-<tbody valign="top">
-<tr class="field-odd field"><th class="field-name">Parameters:</th><td 
class="field-body"><ul class="first last simple">
-<li><strong>sql</strong> (<em>str</em>) &#8211; SQL query to execute against 
the MySQL database</li>
-<li><strong>mysql_table</strong> (<em>str</em>) &#8211; target MySQL table, 
use dot notation to target a
-specific database</li>
-<li><strong>mysql_conn_id</strong> (<em>str</em>) &#8211; source mysql 
connection</li>
-<li><strong>hiveserver2_conn_id</strong> (<em>str</em>) &#8211; destination 
hive connection</li>
-<li><strong>mysql_preoperator</strong> (<em>str</em>) &#8211; sql statement to 
run against mysql prior to
-import, typically use to truncate of delete in place of the data
-coming in, allowing the task to be idempotent (running the task
-twice won&#8217;t double load data)</li>
-<li><strong>mysql_postoperator</strong> (<em>str</em>) &#8211; sql statement 
to run against mysql after the
-import, typically used to move data from staging to production
-and issue cleanup commands.</li>
-<li><strong>bulk_load</strong> (<em>bool</em>) &#8211; flag to use bulk_load 
option.  This loads mysql directly
-from a tab-delimited text file using the LOAD DATA LOCAL INFILE command.
-This option requires an extra connection parameter for the
-destination MySQL connection: {&#8216;local_infile&#8217;: true}.</li>
-</ul>
-</td>
-</tr>
-</tbody>
-</table>
-</dd></dl>
-
-<dl class="class">
 <dt id="airflow.operators.SimpleHttpOperator">
-<em class="property">class </em><code 
class="descclassname">airflow.operators.</code><code 
class="descname">SimpleHttpOperator</code><span 
class="sig-paren">(</span><em>endpoint</em>, <em>method='POST'</em>, 
<em>data=None</em>, <em>headers=None</em>, <em>response_check=None</em>, 
<em>extra_options=None</em>, <em>http_conn_id='http_default'</em>, 
<em>*args</em>, <em>**kwargs</em><span class="sig-paren">)</span><a 
class="reference internal" 
href="_modules/http_operator.html#SimpleHttpOperator"><span 
class="viewcode-link">[source]</span></a><a class="headerlink" 
href="#airflow.operators.SimpleHttpOperator" title="Permalink to this 
definition">¶</a></dt>
+<em class="property">class </em><code 
class="descclassname">airflow.operators.</code><code 
class="descname">SimpleHttpOperator</code><span 
class="sig-paren">(</span><em>endpoint</em>, <em>method='POST'</em>, 
<em>data=None</em>, <em>headers=None</em>, <em>response_check=None</em>, 
<em>extra_options=None</em>, <em>xcom_push=False</em>, 
<em>http_conn_id='http_default'</em>, <em>*args</em>, <em>**kwargs</em><span 
class="sig-paren">)</span><a class="reference internal" 
href="_modules/http_operator.html#SimpleHttpOperator"><span 
class="viewcode-link">[source]</span></a><a class="headerlink" 
href="#airflow.operators.SimpleHttpOperator" title="Permalink to this 
definition">¶</a></dt>
 <dd><p>Bases: <a class="reference internal" 
href="#airflow.models.BaseOperator" title="airflow.models.BaseOperator"><code 
class="xref py py-class docutils literal"><span 
class="pre">airflow.models.BaseOperator</span></code></a></p>
 <p>Calls an endpoint on an HTTP system to execute an action</p>
 <table class="docutils field-list" frame="void" rules="none">
@@ -671,13 +599,13 @@ destination MySQL connection: 
{&#8216;local_infile&#8217;: true}.</li>
 <li><strong>http_conn_id</strong> (<em>string</em>) &#8211; The connection to 
run the sensor against</li>
 <li><strong>endpoint</strong> (<em>string</em>) &#8211; The relative part of 
the full url</li>
 <li><strong>method</strong> (<em>string</em>) &#8211; The HTTP method to use, 
default = &#8220;POST&#8221;</li>
-<li><strong>data</strong> (<em>For POST/PUT, depends on the content-type 
parameter,
-for GET a dictionary of key/value string pairs</em>) &#8211; The data to pass. 
POST-data in POST/PUT and params
+<li><strong>data</strong> (<em>For POST/PUT</em><em>, </em><em>depends on the 
content-type parameter</em><em>,
+</em><em>for GET a dictionary of key/value string pairs</em>) &#8211; The data 
to pass. POST-data in POST/PUT and params
 in the URL for a GET request.</li>
 <li><strong>headers</strong> (<em>a dictionary of string key/value pairs</em>) 
&#8211; The HTTP headers to be added to the GET request</li>
-<li><strong>response_check</strong> (<em>A lambda or defined function.</em>) 
&#8211; A check against the &#8216;requests&#8217; response object.
+<li><strong>response_check</strong> (<em>A lambda</em><em> or </em><em>defined 
function.</em>) &#8211; A check against the &#8216;requests&#8217; response 
object.
 Returns True for &#8216;pass&#8217; and False otherwise.</li>
-<li><strong>extra_options</strong> (<em>A dictionary of options, where key is 
string and value
+<li><strong>extra_options</strong> (<em>A dictionary of options</em><em>, 
</em><em>where key is string and value
 depends on the option that's being modified.</em>) &#8211; Extra options for 
the &#8216;requests&#8217; library, see the
 &#8216;requests&#8217; documentation (options to modify timeout, ssl, 
etc.)</li>
 </ul>
@@ -704,9 +632,9 @@ depends on the option that's being modified.</em>) &#8211; 
Extra options for the
 <li><strong>endpoint</strong> (<em>string</em>) &#8211; The relative part of 
the full url</li>
 <li><strong>params</strong> (<em>a dictionary of string key/value pairs</em>) 
&#8211; The parameters to be added to the GET url</li>
 <li><strong>headers</strong> (<em>a dictionary of string key/value pairs</em>) 
&#8211; The HTTP headers to be added to the GET request</li>
-<li><strong>response_check</strong> (<em>A lambda or defined function.</em>) 
&#8211; A check against the &#8216;requests&#8217; response object.
+<li><strong>response_check</strong> (<em>A lambda</em><em> or </em><em>defined 
function.</em>) &#8211; A check against the &#8216;requests&#8217; response 
object.
 Returns True for &#8216;pass&#8217; and False otherwise.</li>
-<li><strong>extra_options</strong> (<em>A dictionary of options, where key is 
string and value
+<li><strong>extra_options</strong> (<em>A dictionary of options</em><em>, 
</em><em>where key is string and value
 depends on the option that's being modified.</em>) &#8211; Extra options for 
the &#8216;requests&#8217; library, see the
 &#8216;requests&#8217; documentation (options to modify timeout, ssl, 
etc.)</li>
 </ul>
@@ -745,55 +673,8 @@ Examples: <code class="docutils literal"><span 
class="pre">ds=2016-01-01</span><
 </dd></dl>
 
 <dl class="class">
-<dt id="airflow.operators.MsSqlOperator">
-<em class="property">class </em><code 
class="descclassname">airflow.operators.</code><code 
class="descname">MsSqlOperator</code><span 
class="sig-paren">(</span><em>sql</em>, <em>mssql_conn_id='mssql_default'</em>, 
<em>parameters=None</em>, <em>*args</em>, <em>**kwargs</em><span 
class="sig-paren">)</span><a class="reference internal" 
href="_modules/mssql_operator.html#MsSqlOperator"><span 
class="viewcode-link">[source]</span></a><a class="headerlink" 
href="#airflow.operators.MsSqlOperator" title="Permalink to this 
definition">¶</a></dt>
-<dd><p>Bases: <a class="reference internal" 
href="#airflow.models.BaseOperator" title="airflow.models.BaseOperator"><code 
class="xref py py-class docutils literal"><span 
class="pre">airflow.models.BaseOperator</span></code></a></p>
-<p>Executes sql code in a specific Microsoft SQL database
-:param mssql_conn_id: reference to a specific mssql database
-:type mssql_conn_id: string
-:param sql: the sql code to be executed
-:type sql: string or string pointing to a template file.
-File must have a &#8216;.sql&#8217; extensions.</p>
-</dd></dl>
-
-<dl class="class">
-<dt id="airflow.operators.MsSqlToHiveTransfer">
-<em class="property">class </em><code 
class="descclassname">airflow.operators.</code><code 
class="descname">MsSqlToHiveTransfer</code><span 
class="sig-paren">(</span><em>sql</em>, <em>hive_table</em>, 
<em>create=True</em>, <em>recreate=False</em>, <em>partition=None</em>, 
<em>delimiter=u'x01'</em>, <em>mssql_conn_id='mssql_default'</em>, 
<em>hive_cli_conn_id='hive_cli_default'</em>, <em>*args</em>, 
<em>**kwargs</em><span class="sig-paren">)</span><a class="reference internal" 
href="_modules/mssql_to_hive.html#MsSqlToHiveTransfer"><span 
class="viewcode-link">[source]</span></a><a class="headerlink" 
href="#airflow.operators.MsSqlToHiveTransfer" title="Permalink to this 
definition">¶</a></dt>
-<dd><p>Bases: <a class="reference internal" 
href="#airflow.models.BaseOperator" title="airflow.models.BaseOperator"><code 
class="xref py py-class docutils literal"><span 
class="pre">airflow.models.BaseOperator</span></code></a></p>
-<p>Moves data from Microsoft SQL Server to Hive. The operator runs
-your query against Microsoft SQL Server, stores the file locally
-before loading it into a Hive table. If the <code class="docutils 
literal"><span class="pre">create</span></code> or
-<code class="docutils literal"><span class="pre">recreate</span></code> 
arguments are set to <code class="docutils literal"><span 
class="pre">True</span></code>,
-a <code class="docutils literal"><span class="pre">CREATE</span> <span 
class="pre">TABLE</span></code> and <code class="docutils literal"><span 
class="pre">DROP</span> <span class="pre">TABLE</span></code> statements are 
generated.
-Hive data types are inferred from the cursor&#8217;s metadata.
-Note that the table generated in Hive uses <code class="docutils 
literal"><span class="pre">STORED</span> <span class="pre">AS</span> <span 
class="pre">textfile</span></code>
-which isn&#8217;t the most efficient serialization format. If a
-large amount of data is loaded and/or if the table gets
-queried considerably, you may want to use this operator only to
-stage the data into a temporary table before loading it into its
-final destination using a <code class="docutils literal"><span 
class="pre">HiveOperator</span></code>.
-:param sql: SQL query to execute against the Microsoft SQL Server database
-:type sql: str
-:param hive_table: target Hive table, use dot notation to target a
-specific database
-:type hive_table: str
-:param create: whether to create the table if it doesn&#8217;t exist
-:type create: bool
-:param recreate: whether to drop and recreate the table at every execution
-:type recreate: bool
-:param partition: target partition as a dict of partition columns and values
-:type partition: dict
-:param delimiter: field delimiter in the file
-:type delimiter: str
-:param mssql_conn_id: source Microsoft SQL Server connection
-:type mssql_conn_id: str
-:param hive_conn_id: destination hive connection
-:type hive_conn_id: str</p>
-</dd></dl>
-
-<dl class="class">
 <dt id="airflow.operators.MySqlOperator">
-<em class="property">class </em><code 
class="descclassname">airflow.operators.</code><code 
class="descname">MySqlOperator</code><span 
class="sig-paren">(</span><em>sql</em>, <em>mysql_conn_id='mysql_default'</em>, 
<em>parameters=None</em>, <em>autocommit=False</em>, <em>*args</em>, 
<em>**kwargs</em><span class="sig-paren">)</span><a class="reference internal" 
href="_modules/mysql_operator.html#MySqlOperator"><span 
class="viewcode-link">[source]</span></a><a class="headerlink" 
href="#airflow.operators.MySqlOperator" title="Permalink to this 
definition">¶</a></dt>
+<em class="property">class </em><code 
class="descclassname">airflow.operators.</code><code 
class="descname">MySqlOperator</code><span 
class="sig-paren">(</span><em>sql</em>, <em>mysql_conn_id='mysql_default'</em>, 
<em>parameters=None</em>, <em>autocommit=False</em>, <em>database=None</em>, 
<em>*args</em>, <em>**kwargs</em><span class="sig-paren">)</span><a 
class="reference internal" 
href="_modules/mysql_operator.html#MySqlOperator"><span 
class="viewcode-link">[source]</span></a><a class="headerlink" 
href="#airflow.operators.MySqlOperator" title="Permalink to this 
definition">¶</a></dt>
 <dd><p>Bases: <a class="reference internal" 
href="#airflow.models.BaseOperator" title="airflow.models.BaseOperator"><code 
class="xref py py-class docutils literal"><span 
class="pre">airflow.models.BaseOperator</span></code></a></p>
 <p>Executes sql code in a specific MySQL database</p>
 <table class="docutils field-list" frame="void" rules="none">
@@ -802,47 +683,10 @@ specific database
 <tbody valign="top">
 <tr class="field-odd field"><th class="field-name">Parameters:</th><td 
class="field-body"><ul class="first last simple">
 <li><strong>mysql_conn_id</strong> (<em>string</em>) &#8211; reference to a 
specific mysql database</li>
-<li><strong>sql</strong> (<em>Can receive a str representing a sql statement,
-a list of str (sql statements), or reference to a template file.
+<li><strong>sql</strong> (<em>Can receive a str representing a sql 
statement</em><em>,
+</em><em>a list of str</em><em> (</em><em>sql 
statements</em><em>)</em><em></em><em>, or </em><em>reference to a template 
file.
 Template reference are recognized by str ending in '.sql'</em>) &#8211; the 
sql code to be executed</li>
-</ul>
-</td>
-</tr>
-</tbody>
-</table>
-</dd></dl>
-
-<dl class="class">
-<dt id="airflow.operators.MySqlToHiveTransfer">
-<em class="property">class </em><code 
class="descclassname">airflow.operators.</code><code 
class="descname">MySqlToHiveTransfer</code><span 
class="sig-paren">(</span><em>sql</em>, <em>hive_table</em>, 
<em>create=True</em>, <em>recreate=False</em>, <em>partition=None</em>, 
<em>delimiter=u'x01'</em>, <em>mysql_conn_id='mysql_default'</em>, 
<em>hive_cli_conn_id='hive_cli_default'</em>, <em>*args</em>, 
<em>**kwargs</em><span class="sig-paren">)</span><a class="reference internal" 
href="_modules/mysql_to_hive.html#MySqlToHiveTransfer"><span 
class="viewcode-link">[source]</span></a><a class="headerlink" 
href="#airflow.operators.MySqlToHiveTransfer" title="Permalink to this 
definition">¶</a></dt>
-<dd><p>Bases: <a class="reference internal" 
href="#airflow.models.BaseOperator" title="airflow.models.BaseOperator"><code 
class="xref py py-class docutils literal"><span 
class="pre">airflow.models.BaseOperator</span></code></a></p>
-<p>Moves data from MySql to Hive. The operator runs your query against
-MySQL, stores the file locally before loading it into a Hive table.
-If the <code class="docutils literal"><span class="pre">create</span></code> 
or <code class="docutils literal"><span class="pre">recreate</span></code> 
arguments are set to <code class="docutils literal"><span 
class="pre">True</span></code>,
-a <code class="docutils literal"><span class="pre">CREATE</span> <span 
class="pre">TABLE</span></code> and <code class="docutils literal"><span 
class="pre">DROP</span> <span class="pre">TABLE</span></code> statements are 
generated.
-Hive data types are inferred from the cursor&#8217;s metadata. Note that the
-table generated in Hive uses <code class="docutils literal"><span 
class="pre">STORED</span> <span class="pre">AS</span> <span 
class="pre">textfile</span></code>
-which isn&#8217;t the most efficient serialization format. If a
-large amount of data is loaded and/or if the table gets
-queried considerably, you may want to use this operator only to
-stage the data into a temporary table before loading it into its
-final destination using a <code class="docutils literal"><span 
class="pre">HiveOperator</span></code>.</p>
-<table class="docutils field-list" frame="void" rules="none">
-<col class="field-name" />
-<col class="field-body" />
-<tbody valign="top">
-<tr class="field-odd field"><th class="field-name">Parameters:</th><td 
class="field-body"><ul class="first last simple">
-<li><strong>sql</strong> (<em>str</em>) &#8211; SQL query to execute against 
the MySQL database</li>
-<li><strong>hive_table</strong> (<em>str</em>) &#8211; target Hive table, use 
dot notation to target a
-specific database</li>
-<li><strong>create</strong> (<em>bool</em>) &#8211; whether to create the 
table if it doesn&#8217;t exist</li>
-<li><strong>recreate</strong> (<em>bool</em>) &#8211; whether to drop and 
recreate the table at every
-execution</li>
-<li><strong>partition</strong> (<em>dict</em>) &#8211; target partition as a 
dict of partition columns
-and values</li>
-<li><strong>delimiter</strong> (<em>str</em>) &#8211; field delimiter in the 
file</li>
-<li><strong>mysql_conn_id</strong> (<em>str</em>) &#8211; source mysql 
connection</li>
-<li><strong>hive_conn_id</strong> (<em>str</em>) &#8211; destination hive 
connection</li>
+<li><strong>database</strong> (<em>string</em>) &#8211; name of database which 
overwrite defined one in connection</li>
 </ul>
 </td>
 </tr>
@@ -877,27 +721,6 @@ connection id</li>
 </dd></dl>
 
 <dl class="class">
-<dt id="airflow.operators.PostgresOperator">
-<em class="property">class </em><code 
class="descclassname">airflow.operators.</code><code 
class="descname">PostgresOperator</code><span 
class="sig-paren">(</span><em>sql</em>, 
<em>postgres_conn_id='postgres_default'</em>, <em>autocommit=False</em>, 
<em>parameters=None</em>, <em>*args</em>, <em>**kwargs</em><span 
class="sig-paren">)</span><a class="reference internal" 
href="_modules/postgres_operator.html#PostgresOperator"><span 
class="viewcode-link">[source]</span></a><a class="headerlink" 
href="#airflow.operators.PostgresOperator" title="Permalink to this 
definition">¶</a></dt>
-<dd><p>Bases: <a class="reference internal" 
href="#airflow.models.BaseOperator" title="airflow.models.BaseOperator"><code 
class="xref py py-class docutils literal"><span 
class="pre">airflow.models.BaseOperator</span></code></a></p>
-<p>Executes sql code in a specific Postgres database</p>
-<table class="docutils field-list" frame="void" rules="none">
-<col class="field-name" />
-<col class="field-body" />
-<tbody valign="top">
-<tr class="field-odd field"><th class="field-name">Parameters:</th><td 
class="field-body"><ul class="first last simple">
-<li><strong>postgres_conn_id</strong> (<em>string</em>) &#8211; reference to a 
specific postgres database</li>
-<li><strong>sql</strong> (<em>Can receive a str representing a sql statement,
-a list of str (sql statements), or reference to a template file.
-Template reference are recognized by str ending in '.sql'</em>) &#8211; the 
sql code to be executed</li>
-</ul>
-</td>
-</tr>
-</tbody>
-</table>
-</dd></dl>
-
-<dl class="class">
 <dt id="airflow.operators.PrestoCheckOperator">
 <em class="property">class </em><code 
class="descclassname">airflow.operators.</code><code 
class="descname">PrestoCheckOperator</code><span 
class="sig-paren">(</span><em>sql</em>, 
<em>presto_conn_id='presto_default'</em>, <em>*args</em>, 
<em>**kwargs</em><span class="sig-paren">)</span><a class="reference internal" 
href="_modules/presto_check_operator.html#PrestoCheckOperator"><span 
class="viewcode-link">[source]</span></a><a class="headerlink" 
href="#airflow.operators.PrestoCheckOperator" title="Permalink to this 
definition">¶</a></dt>
 <dd><p>Bases: <code class="xref py py-class docutils literal"><span 
class="pre">airflow.operators.check_operator.CheckOperator</span></code></p>
@@ -1039,52 +862,6 @@ Unix wildcard pattern</li>
 </dd></dl>
 
 <dl class="class">
-<dt id="airflow.operators.S3ToHiveTransfer">
-<em class="property">class </em><code 
class="descclassname">airflow.operators.</code><code 
class="descname">S3ToHiveTransfer</code><span 
class="sig-paren">(</span><em>s3_key</em>, <em>field_dict</em>, 
<em>hive_table</em>, <em>delimiter='</em>, <em>'</em>, <em>create=True</em>, 
<em>recreate=False</em>, <em>partition=None</em>, <em>headers=False</em>, 
<em>check_headers=False</em>, <em>wildcard_match=False</em>, 
<em>s3_conn_id='s3_default'</em>, <em>hive_cli_conn_id='hive_cli_default'</em>, 
<em>*args</em>, <em>**kwargs</em><span class="sig-paren">)</span><a 
class="reference internal" 
href="_modules/s3_to_hive_operator.html#S3ToHiveTransfer"><span 
class="viewcode-link">[source]</span></a><a class="headerlink" 
href="#airflow.operators.S3ToHiveTransfer" title="Permalink to this 
definition">¶</a></dt>
-<dd><p>Bases: <a class="reference internal" 
href="#airflow.models.BaseOperator" title="airflow.models.BaseOperator"><code 
class="xref py py-class docutils literal"><span 
class="pre">airflow.models.BaseOperator</span></code></a></p>
-<p>Moves data from S3 to Hive. The operator downloads a file from S3,
-stores the file locally before loading it into a Hive table.
-If the <code class="docutils literal"><span class="pre">create</span></code> 
or <code class="docutils literal"><span class="pre">recreate</span></code> 
arguments are set to <code class="docutils literal"><span 
class="pre">True</span></code>,
-a <code class="docutils literal"><span class="pre">CREATE</span> <span 
class="pre">TABLE</span></code> and <code class="docutils literal"><span 
class="pre">DROP</span> <span class="pre">TABLE</span></code> statements are 
generated.
-Hive data types are inferred from the cursor&#8217;s metadata from.</p>
-<p>Note that the table generated in Hive uses <code class="docutils 
literal"><span class="pre">STORED</span> <span class="pre">AS</span> <span 
class="pre">textfile</span></code>
-which isn&#8217;t the most efficient serialization format. If a
-large amount of data is loaded and/or if the tables gets
-queried considerably, you may want to use this operator only to
-stage the data into a temporary table before loading it into its
-final destination using a <code class="docutils literal"><span 
class="pre">HiveOperator</span></code>.</p>
-<table class="docutils field-list" frame="void" rules="none">
-<col class="field-name" />
-<col class="field-body" />
-<tbody valign="top">
-<tr class="field-odd field"><th class="field-name">Parameters:</th><td 
class="field-body"><ul class="first last simple">
-<li><strong>s3_key</strong> (<em>str</em>) &#8211; The key to be retrieved 
from S3</li>
-<li><strong>field_dict</strong> (<em>dict</em>) &#8211; A dictionary of the 
fields name in the file
-as keys and their Hive types as values</li>
-<li><strong>hive_table</strong> (<em>str</em>) &#8211; target Hive table, use 
dot notation to target a
-specific database</li>
-<li><strong>create</strong> (<em>bool</em>) &#8211; whether to create the 
table if it doesn&#8217;t exist</li>
-<li><strong>recreate</strong> (<em>bool</em>) &#8211; whether to drop and 
recreate the table at every
-execution</li>
-<li><strong>partition</strong> (<em>dict</em>) &#8211; target partition as a 
dict of partition columns
-and values</li>
-<li><strong>headers</strong> (<em>bool</em>) &#8211; whether the file contains 
column names on the first
-line</li>
-<li><strong>check_headers</strong> (<em>bool</em>) &#8211; whether the column 
names on the first line should be
-checked against the keys of field_dict</li>
-<li><strong>wildcard_match</strong> (<em>bool</em>) &#8211; whether the s3_key 
should be interpreted as a Unix
-wildcard pattern</li>
-<li><strong>delimiter</strong> (<em>str</em>) &#8211; field delimiter in the 
file</li>
-<li><strong>s3_conn_id</strong> (<em>str</em>) &#8211; source s3 
connection</li>
-<li><strong>hive_conn_id</strong> (<em>str</em>) &#8211; destination hive 
connection</li>
-</ul>
-</td>
-</tr>
-</tbody>
-</table>
-</dd></dl>
-
-<dl class="class">
 <dt id="airflow.operators.ShortCircuitOperator">
 <em class="property">class </em><code 
class="descclassname">airflow.operators.</code><code 
class="descname">ShortCircuitOperator</code><span 
class="sig-paren">(</span><em>python_callable</em>, <em>op_args=None</em>, 
<em>op_kwargs=None</em>, <em>provide_context=False</em>, 
<em>templates_dict=None</em>, <em>templates_exts=None</em>, <em>*args</em>, 
<em>**kwargs</em><span class="sig-paren">)</span><a class="reference internal" 
href="_modules/python_operator.html#ShortCircuitOperator"><span 
class="viewcode-link">[source]</span></a><a class="headerlink" 
href="#airflow.operators.ShortCircuitOperator" title="Permalink to this 
definition">¶</a></dt>
 <dd><p>Bases: <code class="xref py py-class docutils literal"><span 
class="pre">python_operator.PythonOperator</span></code></p>
@@ -1098,66 +875,6 @@ True, downstream tasks proceed as normal.</p>
 </dd></dl>
 
 <dl class="class">
-<dt id="airflow.operators.SlackAPIOperator">
-<em class="property">class </em><code 
class="descclassname">airflow.operators.</code><code 
class="descname">SlackAPIOperator</code><span 
class="sig-paren">(</span><em>token='unset'</em>, <em>method='unset'</em>, 
<em>api_params=None</em>, <em>*args</em>, <em>**kwargs</em><span 
class="sig-paren">)</span><a class="reference internal" 
href="_modules/slack_operator.html#SlackAPIOperator"><span 
class="viewcode-link">[source]</span></a><a class="headerlink" 
href="#airflow.operators.SlackAPIOperator" title="Permalink to this 
definition">¶</a></dt>
-<dd><p>Bases: <a class="reference internal" 
href="#airflow.models.BaseOperator" title="airflow.models.BaseOperator"><code 
class="xref py py-class docutils literal"><span 
class="pre">airflow.models.BaseOperator</span></code></a></p>
-<p>Base Slack Operator
-The SlackAPIPostOperator is derived from this operator.
-In the future additional Slack API Operators will be derived from this class 
as well</p>
-<table class="docutils field-list" frame="void" rules="none">
-<col class="field-name" />
-<col class="field-body" />
-<tbody valign="top">
-<tr class="field-odd field"><th class="field-name">Parameters:</th><td 
class="field-body"><ul class="first last simple">
-<li><strong>token</strong> (<em>string</em>) &#8211; Slack API token (<a 
class="reference external" 
href="https://api.slack.com/web";>https://api.slack.com/web</a>)</li>
-<li><strong>method</strong> (<em>string</em>) &#8211; The Slack API Method to 
Call (<a class="reference external" 
href="https://api.slack.com/methods";>https://api.slack.com/methods</a>)</li>
-<li><strong>api_params</strong> (<em>dict</em>) &#8211; API Method call 
parameters (<a class="reference external" 
href="https://api.slack.com/methods";>https://api.slack.com/methods</a>)</li>
-</ul>
-</td>
-</tr>
-</tbody>
-</table>
-<dl class="method">
-<dt id="airflow.operators.SlackAPIOperator.construct_api_call_params">
-<code class="descname">construct_api_call_params</code><span 
class="sig-paren">(</span><span class="sig-paren">)</span><a class="reference 
internal" 
href="_modules/slack_operator.html#SlackAPIOperator.construct_api_call_params"><span
 class="viewcode-link">[source]</span></a><a class="headerlink" 
href="#airflow.operators.SlackAPIOperator.construct_api_call_params" 
title="Permalink to this definition">¶</a></dt>
-<dd><p>Used by the execute function. Allows templating on the source fields of 
the api_call_params dict before construction</p>
-<p>Override in child classes.
-Each SlackAPIOperator child class is responsible for having a 
construct_api_call_params function
-which sets self.api_call_params with a dict of API call parameters (<a 
class="reference external" 
href="https://api.slack.com/methods";>https://api.slack.com/methods</a>)</p>
-</dd></dl>
-
-<dl class="method">
-<dt id="airflow.operators.SlackAPIOperator.execute">
-<code class="descname">execute</code><span 
class="sig-paren">(</span><em>**kwargs</em><span class="sig-paren">)</span><a 
class="reference internal" 
href="_modules/slack_operator.html#SlackAPIOperator.execute"><span 
class="viewcode-link">[source]</span></a><a class="headerlink" 
href="#airflow.operators.SlackAPIOperator.execute" title="Permalink to this 
definition">¶</a></dt>
-<dd><p>SlackAPIOperator calls will not fail even if the call is not 
unsuccessful.
-It should not prevent a DAG from completing in success</p>
-</dd></dl>
-
-</dd></dl>
-
-<dl class="class">
-<dt id="airflow.operators.SlackAPIPostOperator">
-<em class="property">class </em><code 
class="descclassname">airflow.operators.</code><code 
class="descname">SlackAPIPostOperator</code><span 
class="sig-paren">(</span><em>channel='#general'</em>, 
<em>username='Airflow'</em>, <em>text='No message has been set.nHere is a cat 
video insteadnhttps://www.youtube.com/watch?v=J---aiyznGQ'</em>, 
<em>icon_url='https://raw.githubusercontent.com/airbnb/airflow/master/airflow/www/static/pin_100.png'</em>,
 <em>attachments=None</em>, <em>*args</em>, <em>**kwargs</em><span 
class="sig-paren">)</span><a class="reference internal" 
href="_modules/slack_operator.html#SlackAPIPostOperator"><span 
class="viewcode-link">[source]</span></a><a class="headerlink" 
href="#airflow.operators.SlackAPIPostOperator" title="Permalink to this 
definition">¶</a></dt>
-<dd><p>Bases: <code class="xref py py-class docutils literal"><span 
class="pre">slack_operator.SlackAPIOperator</span></code></p>
-<p>Posts messages to a slack channel</p>
-<table class="docutils field-list" frame="void" rules="none">
-<col class="field-name" />
-<col class="field-body" />
-<tbody valign="top">
-<tr class="field-odd field"><th class="field-name">Parameters:</th><td 
class="field-body"><ul class="first last simple">
-<li><strong>channel</strong> (<em>string</em>) &#8211; channel in which to 
post message on slack name (#general) or ID (C12318391)</li>
-<li><strong>username</strong> (<em>string</em>) &#8211; Username that airflow 
will be posting to Slack as</li>
-<li><strong>text</strong> (<em>string</em>) &#8211; message to send to 
slack</li>
-<li><strong>icon_url</strong> (<em>string</em>) &#8211; url to icon used for 
this message</li>
-<li><strong>attachments</strong> (<em>array of hashes</em>) &#8211; extra 
formatting details - see <a class="reference external" 
href="https://api.slack.com/docs/attachments";>https://api.slack.com/docs/attachments</a></li>
-</ul>
-</td>
-</tr>
-</tbody>
-</table>
-</dd></dl>
-
-<dl class="class">
 <dt id="airflow.operators.SqlSensor">
 <em class="property">class </em><code 
class="descclassname">airflow.operators.</code><code 
class="descname">SqlSensor</code><span 
class="sig-paren">(</span><em>conn_id</em>, <em>sql</em>, <em>*args</em>, 
<em>**kwargs</em><span class="sig-paren">)</span><a class="reference internal" 
href="_modules/sensors.html#SqlSensor"><span 
class="viewcode-link">[source]</span></a><a class="headerlink" 
href="#airflow.operators.SqlSensor" title="Permalink to this 
definition">¶</a></dt>
 <dd><p>Bases: <a class="reference internal" 
href="#airflow.operators.sensors.BaseSensorOperator" 
title="airflow.operators.sensors.BaseSensorOperator"><code class="xref py 
py-class docutils literal"><span 
class="pre">sensors.BaseSensorOperator</span></code></a></p>
@@ -1200,56 +917,10 @@ that contains a non-zero / empty string value.</li>
 <p>Waits for a file or folder to land in HDFS</p>
 </dd></dl>
 
-<dl class="class">
-<dt id="airflow.operators.docker_operator.DockerOperator">
-<em class="property">class </em><code 
class="descclassname">airflow.operators.docker_operator.</code><code 
class="descname">DockerOperator</code><span 
class="sig-paren">(</span><em>image</em>, <em>api_version=None</em>, 
<em>command=None</em>, <em>cpus=1.0</em>, 
<em>docker_url='unix://var/run/docker.sock'</em>, <em>environment=None</em>, 
<em>force_pull=False</em>, <em>mem_limit=None</em>, <em>network_mode=None</em>, 
<em>tls_ca_cert=None</em>, <em>tls_client_cert=None</em>, 
<em>tls_client_key=None</em>, <em>tls_hostname=None</em>, 
<em>tls_ssl_version=None</em>, <em>tmp_dir='/tmp/airflow'</em>, 
<em>user=None</em>, <em>volumes=None</em>, <em>xcom_push=False</em>, 
<em>xcom_all=False</em>, <em>*args</em>, <em>**kwargs</em><span 
class="sig-paren">)</span><a class="reference internal" 
href="_modules/airflow/operators/docker_operator.html#DockerOperator"><span 
class="viewcode-link">[source]</span></a><a class="headerlink" 
href="#airflow.operators.docker_operator.DockerOperator" title="Permal
 ink to this definition">¶</a></dt>
-<dd><p>Execute a command inside a docker container.</p>
-<p>A temporary directory is created on the host and mounted into a container 
to allow storing files
-that together exceed the default disk size of 10GB in a container. The path to 
the mounted
-directory can be accessed via the environment variable <code class="docutils 
literal"><span class="pre">AIRFLOW_TMP_DIR</span></code>.</p>
-<table class="docutils field-list" frame="void" rules="none">
-<col class="field-name" />
-<col class="field-body" />
-<tbody valign="top">
-<tr class="field-odd field"><th class="field-name">Parameters:</th><td 
class="field-body"><ul class="first last simple">
-<li><strong>image</strong> (<em>str</em>) &#8211; Docker image from which to 
create the container.</li>
-<li><strong>api_version</strong> (<em>str</em>) &#8211; Remote API 
version.</li>
-<li><strong>command</strong> (<em>str or list</em>) &#8211; Command to be run 
in the container.</li>
-<li><strong>cpus</strong> (<em>float</em>) &#8211; Number of CPUs to assign to 
the container.
-This value gets multiplied with 1024. See
-<a class="reference external" 
href="https://docs.docker.com/engine/reference/run/#cpu-share-constraint";>https://docs.docker.com/engine/reference/run/#cpu-share-constraint</a></li>
-<li><strong>docker_url</strong> (<em>str</em>) &#8211; URL of the host running 
the docker daemon.</li>
-<li><strong>environment</strong> (<em>dict</em>) &#8211; Environment variables 
to set in the container.</li>
-<li><strong>force_pull</strong> (<em>bool</em>) &#8211; Pull the docker image 
on every run.</li>
-<li><strong>mem_limit</strong> (<em>float or str</em>) &#8211; Maximum amount 
of memory the container can use. Either a float value, which
-represents the limit in bytes, or a string like <code class="docutils 
literal"><span class="pre">128m</span></code> or <code class="docutils 
literal"><span class="pre">1g</span></code>.</li>
-<li><strong>network_mode</strong> (<em>str</em>) &#8211; Network mode for the 
container.</li>
-<li><strong>tls_ca_cert</strong> (<em>str</em>) &#8211; Path to a PEM-encoded 
certificate authority to secure the docker connection.</li>
-<li><strong>tls_client_cert</strong> (<em>str</em>) &#8211; Path to the 
PEM-encoded certificate used to authenticate docker client.</li>
-<li><strong>tls_client_key</strong> (<em>str</em>) &#8211; Path to the 
PEM-encoded key used to authenticate docker client.</li>
-<li><strong>tls_hostname</strong> (<em>str or bool</em>) &#8211; Hostname to 
match against the docker server certificate or False to
-disable the check.</li>
-<li><strong>tls_ssl_version</strong> (<em>str</em>) &#8211; Version of SSL to 
use when communicating with docker daemon.</li>
-<li><strong>tmp_dir</strong> (<em>str</em>) &#8211; Mount point inside the 
container to a temporary directory created on the host by
-the operator. The path is also made available via the environment variable
-<code class="docutils literal"><span class="pre">AIRFLOW_TMP_DIR</span></code> 
inside the container.</li>
-<li><strong>user</strong> (<em>int or str</em>) &#8211; Default user inside 
the docker container.</li>
-<li><strong>volumes</strong> &#8211; List of volumes to mount into the 
container, e.g.
-<code class="docutils literal"><span 
class="pre">['/host/path:/container/path',</span> <span 
class="pre">'/host/path2:/container/path2:ro']</span></code>.</li>
-<li><strong>xcom_push</strong> (<em>bool</em>) &#8211; Does the stdout will be 
pushed to the next step using XCom.
-The default is False.</li>
-<li><strong>xcom_all</strong> (<em>bool</em>) &#8211; Push all the stdout or 
just the last line. The default is False (last line).</li>
-</ul>
-</td>
-</tr>
-</tbody>
-</table>
-</dd></dl>
-
 </div>
-<div class="section" id="module-airflow.contrib.operators">
-<span id="community-contributed-operators"></span><h3>Community-contributed 
Operators<a class="headerlink" href="#module-airflow.contrib.operators" 
title="Permalink to this headline">¶</a></h3>
-<p>Importer that dynamically loads a class and module from its parent. This
+<div class="section" id="community-contributed-operators">
+<h3>Community-contributed Operators<a class="headerlink" 
href="#community-contributed-operators" title="Permalink to this 
headline">¶</a></h3>
+<span class="target" id="module-airflow.contrib.operators"></span><p>Importer 
that dynamically loads a class and module from its parent. This
 allows Airflow to support <code class="docutils literal"><span 
class="pre">from</span> <span class="pre">airflow.operators</span> <span 
class="pre">import</span> <span class="pre">BashOperator</span></code>
 even though BashOperator is actually in
 <code class="docutils literal"><span 
class="pre">airflow.operators.bash_operator</span></code>.</p>
@@ -1286,371 +957,73 @@ behavior.</li>
 </dd></dl>
 
 <dl class="class">
-<dt id="airflow.contrib.operators.VerticaOperator">
-<em class="property">class </em><code 
class="descclassname">airflow.contrib.operators.</code><code 
class="descname">VerticaOperator</code><span 
class="sig-paren">(</span><em>sql</em>, 
<em>vertica_conn_id='vertica_default'</em>, <em>*args</em>, 
<em>**kwargs</em><span class="sig-paren">)</span><a class="reference internal" 
href="_modules/vertica_operator.html#VerticaOperator"><span 
class="viewcode-link">[source]</span></a><a class="headerlink" 
href="#airflow.contrib.operators.VerticaOperator" title="Permalink to this 
definition">¶</a></dt>
-<dd><p>Bases: <a class="reference internal" 
href="#airflow.models.BaseOperator" title="airflow.models.BaseOperator"><code 
class="xref py py-class docutils literal"><span 
class="pre">airflow.models.BaseOperator</span></code></a></p>
-<p>Executes sql code in a specific Vertica database</p>
-<table class="docutils field-list" frame="void" rules="none">
-<col class="field-name" />
-<col class="field-body" />
-<tbody valign="top">
-<tr class="field-odd field"><th class="field-name">Parameters:</th><td 
class="field-body"><ul class="first last simple">
-<li><strong>vertica_conn_id</strong> (<em>string</em>) &#8211; reference to a 
specific Vertica database</li>
-<li><strong>sql</strong> (<em>Can receive a str representing a sql statement,
-a list of str (sql statements), or reference to a template file.
-Template reference are recognized by str ending in '.sql'</em>) &#8211; the 
sql code to be executed</li>
-</ul>
-</td>
-</tr>
-</tbody>
-</table>
-</dd></dl>
-
-<dl class="class">
-<dt id="airflow.contrib.operators.VerticaToHiveTransfer">
-<em class="property">class </em><code 
class="descclassname">airflow.contrib.operators.</code><code 
class="descname">VerticaToHiveTransfer</code><span 
class="sig-paren">(</span><em>sql</em>, <em>hive_table</em>, 
<em>create=True</em>, <em>recreate=False</em>, <em>partition=None</em>, 
<em>delimiter=u'x01'</em>, <em>vertica_conn_id='vertica_default'</em>, 
<em>hive_cli_conn_id='hive_cli_default'</em>, <em>*args</em>, 
<em>**kwargs</em><span class="sig-paren">)</span><a class="reference internal" 
href="_modules/vertica_to_hive.html#VerticaToHiveTransfer"><span 
class="viewcode-link">[source]</span></a><a class="headerlink" 
href="#airflow.contrib.operators.VerticaToHiveTransfer" title="Permalink to 
this definition">¶</a></dt>
-<dd><p>Bases: <a class="reference internal" 
href="#airflow.models.BaseOperator" title="airflow.models.BaseOperator"><code 
class="xref py py-class docutils literal"><span 
class="pre">airflow.models.BaseOperator</span></code></a></p>
-<p>Moves data from Vertia to Hive. The operator runs
-your query against Vertia, stores the file locally
-before loading it into a Hive table. If the <code class="docutils 
literal"><span class="pre">create</span></code> or
-<code class="docutils literal"><span class="pre">recreate</span></code> 
arguments are set to <code class="docutils literal"><span 
class="pre">True</span></code>,
-a <code class="docutils literal"><span class="pre">CREATE</span> <span 
class="pre">TABLE</span></code> and <code class="docutils literal"><span 
class="pre">DROP</span> <span class="pre">TABLE</span></code> statements are 
generated.
-Hive data types are inferred from the cursor&#8217;s metadata.
-Note that the table generated in Hive uses <code class="docutils 
literal"><span class="pre">STORED</span> <span class="pre">AS</span> <span 
class="pre">textfile</span></code>
-which isn&#8217;t the most efficient serialization format. If a
-large amount of data is loaded and/or if the table gets
-queried considerably, you may want to use this operator only to
-stage the data into a temporary table before loading it into its
-final destination using a <code class="docutils literal"><span 
class="pre">HiveOperator</span></code>.</p>
-<table class="docutils field-list" frame="void" rules="none">
-<col class="field-name" />
-<col class="field-body" />
-<tbody valign="top">
-<tr class="field-odd field"><th class="field-name">Parameters:</th><td 
class="field-body"><ul class="first last simple">
-<li><strong>sql</strong> (<em>str</em>) &#8211; SQL query to execute against 
the Vertia database</li>
-<li><strong>hive_table</strong> (<em>str</em>) &#8211; target Hive table, use 
dot notation to target a
-specific database</li>
-<li><strong>create</strong> (<em>bool</em>) &#8211; whether to create the 
table if it doesn&#8217;t exist</li>
-<li><strong>recreate</strong> (<em>bool</em>) &#8211; whether to drop and 
recreate the table at every execution</li>
-<li><strong>partition</strong> (<em>dict</em>) &#8211; target partition as a 
dict of partition columns and values</li>
-<li><strong>delimiter</strong> (<em>str</em>) &#8211; field delimiter in the 
file</li>
-<li><strong>vertica_conn_id</strong> (<em>str</em>) &#8211; source Vertica 
connection</li>
-<li><strong>hive_conn_id</strong> (<em>str</em>) &#8211; destination hive 
connection</li>
-</ul>
-</td>
-</tr>
-</tbody>
-</table>
-</dd></dl>
-
-<dl class="class">
-<dt id="airflow.contrib.operators.bigquery_operator.BigQueryOperator">
-<em class="property">class </em><code 
class="descclassname">airflow.contrib.operators.bigquery_operator.</code><code 
class="descname">BigQueryOperator</code><span 
class="sig-paren">(</span><em>bql</em>, 
<em>destination_dataset_table=False</em>, 
<em>write_disposition='WRITE_EMPTY'</em>, <em>allow_large_results=False</em>, 
<em>bigquery_conn_id='bigquery_default'</em>, <em>delegate_to=None</em>, 
<em>udf_config=False</em>, <em>*args</em>, <em>**kwargs</em><span 
class="sig-paren">)</span><a class="reference internal" 
href="_modules/airflow/contrib/operators/bigquery_operator.html#BigQueryOperator"><span
 class="viewcode-link">[source]</span></a><a class="headerlink" 
href="#airflow.contrib.operators.bigquery_operator.BigQueryOperator" 
title="Permalink to this definition">¶</a></dt>
-<dd><p>Executes BigQuery SQL queries in a specific BigQuery database</p>
-</dd></dl>
-
-<dl class="class">
-<dt 
id="airflow.contrib.operators.bigquery_to_gcs.BigQueryToCloudStorageOperator">
-<em class="property">class </em><code 
class="descclassname">airflow.contrib.operators.bigquery_to_gcs.</code><code 
class="descname">BigQueryToCloudStorageOperator</code><span 
class="sig-paren">(</span><em>source_project_dataset_table</em>, 
<em>destination_cloud_storage_uris</em>, <em>compression='NONE'</em>, 
<em>export_format='CSV'</em>, <em>field_delimiter='</em>, <em>'</em>, 
<em>print_header=True</em>, <em>bigquery_conn_id='bigquery_default'</em>, 
<em>delegate_to=None</em>, <em>*args</em>, <em>**kwargs</em><span 
class="sig-paren">)</span><a class="reference internal" 
href="_modules/airflow/contrib/operators/bigquery_to_gcs.html#BigQueryToCloudStorageOperator"><span
 class="viewcode-link">[source]</span></a><a class="headerlink" 
href="#airflow.contrib.operators.bigquery_to_gcs.BigQueryToCloudStorageOperator"
 title="Permalink to this definition">¶</a></dt>
-<dd><p>Transfers a BigQuery table to a Google Cloud Storage bucket.</p>
+<dt id="airflow.contrib.operators.hipchat_operator.HipChatAPIOperator">
+<em class="property">class </em><code 
class="descclassname">airflow.contrib.operators.hipchat_operator.</code><code 
class="descname">HipChatAPIOperator</code><span 
class="sig-paren">(</span><em>token</em>, 
<em>base_url='https://api.hipchat.com/v2'</em>, <em>*args</em>, 
<em>**kwargs</em><span class="sig-paren">)</span><a class="reference internal" 
href="_modules/airflow/contrib/operators/hipchat_operator.html#HipChatAPIOperator"><span
 class="viewcode-link">[source]</span></a><a class="headerlink" 
href="#airflow.contrib.operators.hipchat_operator.HipChatAPIOperator" 
title="Permalink to this definition">¶</a></dt>
+<dd><p>Base HipChat Operator.
+All derived HipChat operators reference from HipChat&#8217;s official REST API 
documentation
+at <a class="reference external" 
href="https://www.hipchat.com/docs/apiv2";>https://www.hipchat.com/docs/apiv2</a>.
 Before using any HipChat API operators you need
+to get an authentication token at <a class="reference external" 
href="https://www.hipchat.com/docs/apiv2/auth";>https://www.hipchat.com/docs/apiv2/auth</a>.
+In the future additional HipChat operators will be derived from this class as 
well.
+:param token: HipChat REST API authentication token
+:type token: str
+:param base_url: HipChat REST API base url.
+:type base_url: str</p>
 </dd></dl>
 
 <dl class="class">
-<dt 
id="airflow.contrib.operators.gcs_download_operator.GoogleCloudStorageDownloadOperator">
-<em class="property">class </em><code 
class="descclassname">airflow.contrib.operators.gcs_download_operator.</code><code
 class="descname">GoogleCloudStorageDownloadOperator</code><span 
class="sig-paren">(</span><em>bucket</em>, <em>object</em>, 
<em>filename=False</em>, <em>store_to_xcom_key=False</em>, 
<em>google_cloud_storage_conn_id='google_cloud_storage_default'</em>, 
<em>delegate_to=None</em>, <em>*args</em>, <em>**kwargs</em><span 
class="sig-paren">)</span><a class="reference internal" 
href="_modules/airflow/contrib/operators/gcs_download_operator.html#GoogleCloudStorageDownloadOperator"><span
 class="viewcode-link">[source]</span></a><a class="headerlink" 
href="#airflow.contrib.operators.gcs_download_operator.GoogleCloudStorageDownloadOperator"
 title="Permalink to this definition">¶</a></dt>
-<dd><p>Downloads a file from Google Cloud Storage.</p>
+<dt 
id="airflow.contrib.operators.hipchat_operator.HipChatAPISendRoomNotificationOperator">
+<em class="property">class </em><code 
class="descclassname">airflow.contrib.operators.hipchat_operator.</code><code 
class="descname">HipChatAPISendRoomNotificationOperator</code><span 
class="sig-paren">(</span><em>room_id</em>, <em>message</em>, <em>*args</em>, 
<em>**kwargs</em><span class="sig-paren">)</span><a class="reference internal" 
href="_modules/airflow/contrib/operators/hipchat_operator.html#HipChatAPISendRoomNotificationOperator"><span
 class="viewcode-link">[source]</span></a><a class="headerlink" 
href="#airflow.contrib.operators.hipchat_operator.HipChatAPISendRoomNotificationOperator"
 title="Permalink to this definition">¶</a></dt>
+<dd><p>Send notification to a specific HipChat room.
+More info: <a class="reference external" 
href="https://www.hipchat.com/docs/apiv2/method/send_room_notification";>https://www.hipchat.com/docs/apiv2/method/send_room_notification</a>
+:param room_id: Room in which to send notification on HipChat
+:type room_id: str
+:param message: The message body
+:type message: str
+:param frm: Label to be shown in addition to sender&#8217;s name
+:type frm: str
+:param message_format: How the notification is rendered: html or text
+:type message_format: str
+:param color: Background color of the msg: yellow, green, red, purple, gray, 
or random
+:type color: str
+:param attach_to: The message id to attach this notification to
+:type attach_to: str
+:param notify: Whether this message should trigger a user notification
+:type notify: bool
+:param card: HipChat-defined card object
+:type card: dict</p>
 </dd></dl>
 
-<dl class="class">
-<dt id="airflow.contrib.operators.QuboleOperator">
-<em class="property">class </em><code 
class="descclassname">airflow.contrib.operators.</code><code 
class="descname">QuboleOperator</code><span 
class="sig-paren">(</span><em>qubole_conn_id='qubole_default'</em>, 
<em>*args</em>, <em>**kwargs</em><span class="sig-paren">)</span><a 
class="reference internal" 
href="_modules/qubole_operator.html#QuboleOperator"><span 
class="viewcode-link">[source]</span></a><a class="headerlink" 
href="#airflow.contrib.operators.QuboleOperator" title="Permalink to this 
definition">¶</a></dt>
-<dd><p>Execute tasks (commands) on QDS (<a class="reference external" 
href="https://qubole.com";>https://qubole.com</a>).</p>
-<table class="docutils field-list" frame="void" rules="none">
-<col class="field-name" />
-<col class="field-body" />
-<tbody valign="top">
-<tr class="field-odd field"><th class="field-name">Parameters:</th><td 
class="field-body"><strong>qubole_conn_id</strong> (<em>str</em>) &#8211; 
Connection id which consists of qds auth_token</td>
+</div>
+</div>
+<div class="section" id="macros">
+<span id="id1"></span><h2>Macros<a class="headerlink" href="#macros" 
title="Permalink to this headline">¶</a></h2>
+<p>Here&#8217;s a list of variables and macros that can be used in 
templates</p>
+<div class="section" id="default-variables">
+<h3>Default Variables<a class="headerlink" href="#default-variables" 
title="Permalink to this headline">¶</a></h3>
+<p>The Airflow engine passes a few variables by default that are accessible
+in all templates</p>
+<table border="1" class="docutils">
+<colgroup>
+<col width="35%" />
+<col width="65%" />
+</colgroup>
+<thead valign="bottom">
+<tr class="row-odd"><th class="head">Variable</th>
+<th class="head">Description</th>
 </tr>
-</tbody>
-</table>
-<dl class="docutils">
-<dt>kwargs:</dt>
-<dd><table class="first docutils field-list" frame="void" rules="none">
-<col class="field-name" />
-<col class="field-body" />
+</thead>
 <tbody valign="top">
-<tr class="field-odd field"><th class="field-name">command_type:</th><td 
class="field-body">type of command to be executed, e.g. hivecmd, shellcmd, 
hadoopcmd</td>
+<tr class="row-even"><td><code class="docutils literal"><span 
class="pre">{{</span> <span class="pre">ds</span> <span 
class="pre">}}</span></code></td>
+<td>the execution date as <code class="docutils literal"><span 
class="pre">YYYY-MM-DD</span></code></td>
 </tr>
-<tr class="field-even field"><th class="field-name">tags:</th><td 
class="field-body">array of tags to be assigned with the command</td>
+<tr class="row-odd"><td><code class="docutils literal"><span 
class="pre">{{</span> <span class="pre">ds_nodash</span> <span 
class="pre">}}</span></code></td>
+<td>the execution date as <code class="docutils literal"><span 
class="pre">YYYYMMDD</span></code></td>
 </tr>
-<tr class="field-odd field"><th class="field-name">cluster_label:</th><td 
class="field-body">cluster label on which the command will be executed</td>
+<tr class="row-even"><td><code class="docutils literal"><span 
class="pre">{{</span> <span class="pre">yesterday_ds</span> <span 
class="pre">}}</span></code></td>
+<td>yesterday&#8217;s date as <code class="docutils literal"><span 
class="pre">YYYY-MM-DD</span></code></td>
 </tr>
-<tr class="field-even field"><th class="field-name">name:</th><td 
class="field-body">name to be given to command</td>
-</tr>
-</tbody>
-</table>
-<p><strong>Arguments specific to command types</strong></p>
-<dl class="last docutils">
-<dt>hivecmd:</dt>
-<dd><table class="first last docutils field-list" frame="void" rules="none">
-<col class="field-name" />
-<col class="field-body" />
-<tbody valign="top">
-<tr class="field-odd field"><th class="field-name">query:</th><td 
class="field-body">inline query statement</td>
-</tr>
-<tr class="field-even field"><th class="field-name" 
colspan="2">script_location:</th></tr>
-<tr class="field-even field"><td>&nbsp;</td><td class="field-body">s3 location 
containing query statement</td>
-</tr>
-<tr class="field-odd field"><th class="field-name">sample_size:</th><td 
class="field-body">size of sample in bytes on which to run query</td>
-</tr>
-<tr class="field-even field"><th class="field-name">macros:</th><td 
class="field-body">macro values which were used in query</td>
-</tr>
-</tbody>
-</table>
-</dd>
-<dt>prestocmd:</dt>
-<dd><table class="first last docutils field-list" frame="void" rules="none">
-<col class="field-name" />
-<col class="field-body" />
-<tbody valign="top">
-<tr class="field-odd field"><th class="field-name">query:</th><td 
class="field-body">inline query statement</td>
-</tr>
-<tr class="field-even field"><th class="field-name" 
colspan="2">script_location:</th></tr>
-<tr class="field-even field"><td>&nbsp;</td><td class="field-body">s3 location 
containing query statement</td>
-</tr>
-<tr class="field-odd field"><th class="field-name">macros:</th><td 
class="field-body">macro values which were used in query</td>
-</tr>
-</tbody>
-</table>
-</dd>
-<dt>hadoopcmd:</dt>
-<dd><table class="first last docutils field-list" frame="void" rules="none">
-<col class="field-name" />
-<col class="field-body" />
-<tbody valign="top">
-<tr class="field-odd field"><th class="field-name">sub_commnad:</th><td 
class="field-body">must be one these [&#8220;jar&#8221;, 
&#8220;s3distcp&#8221;, &#8220;streaming&#8221;] followed by 1 or more args</td>
-</tr>
-</tbody>
-</table>
-</dd>
-<dt>shellcmd:</dt>
-<dd><table class="first last docutils field-list" frame="void" rules="none">
-<col class="field-name" />
-<col class="field-body" />
-<tbody valign="top">
-<tr class="field-odd field"><th class="field-name">script:</th><td 
class="field-body">inline command with args</td>
-</tr>
-<tr class="field-even field"><th class="field-name" 
colspan="2">script_location:</th></tr>
-<tr class="field-even field"><td>&nbsp;</td><td class="field-body">s3 location 
containing query statement</td>
-</tr>
-<tr class="field-odd field"><th class="field-name">files:</th><td 
class="field-body">list of files in s3 bucket as file1,file2 format. These 
files will be copied into the working directory where the qubole command is 
being executed.</td>
-</tr>
-<tr class="field-even field"><th class="field-name">archives:</th><td 
class="field-body">list of archives in s3 bucket as archive1,archive2 format. 
These will be unarchived intothe working directory where the qubole command is 
being executed</td>
-</tr>
-<tr class="field-odd field"><th class="field-name">parameters:</th><td 
class="field-body">any extra args which need to be passed to script (only when 
script_location is supplied)</td>
-</tr>
-</tbody>
-</table>
-</dd>
-<dt>pigcmd:</dt>
-<dd><table class="first last docutils field-list" frame="void" rules="none">
-<col class="field-name" />
-<col class="field-body" />
-<tbody valign="top">
-<tr class="field-odd field"><th class="field-name">script:</th><td 
class="field-body">inline query statement (latin_statements)</td>
-</tr>
-<tr class="field-even field"><th class="field-name" 
colspan="2">script_location:</th></tr>
-<tr class="field-even field"><td>&nbsp;</td><td class="field-body">s3 location 
containing pig query</td>
-</tr>
-<tr class="field-odd field"><th class="field-name">parameters:</th><td 
class="field-body">any extra args which need to be passed to script (only when 
script_location is supplied</td>
-</tr>
-</tbody>
-</table>
-</dd>
-<dt>sparkcmd:</dt>
-<dd><table class="first last docutils field-list" frame="void" rules="none">
-<col class="field-name" />
-<col class="field-body" />
-<tbody valign="top">
-<tr class="field-odd field"><th class="field-name">program:</th><td 
class="field-body">the complete Spark Program in Scala, SQL, Command, R, or 
Python</td>
-</tr>
-<tr class="field-even field"><th class="field-name">cmdline:</th><td 
class="field-body">spark-submit command line, all required information must be 
specify in cmdline itself.</td>
-</tr>
-<tr class="field-odd field"><th class="field-name">sql:</th><td 
class="field-body">inline sql query</td>
-</tr>
-<tr class="field-even field"><th class="field-name" 
colspan="2">script_location:</th></tr>
-<tr class="field-even field"><td>&nbsp;</td><td class="field-body">s3 location 
containing query statement</td>
-</tr>
-<tr class="field-odd field"><th class="field-name">language:</th><td 
class="field-body">language of the program, Scala, SQL, Command, R, or 
Python</td>
-</tr>
-<tr class="field-even field"><th class="field-name">app_id:</th><td 
class="field-body">ID of an Spark job server app</td>
-</tr>
-<tr class="field-odd field"><th class="field-name">arguments:</th><td 
class="field-body">spark-submit command line arguments</td>
-</tr>
-<tr class="field-even field"><th class="field-name" 
colspan="2">user_program_arguments:</th></tr>
-<tr class="field-even field"><td>&nbsp;</td><td class="field-body">arguments 
that the user program takes in</td>
-</tr>
-<tr class="field-odd field"><th class="field-name">macros:</th><td 
class="field-body">macro values which were used in query</td>
-</tr>
-</tbody>
-</table>
-</dd>
-<dt>dbtapquerycmd:</dt>
-<dd><table class="first last docutils field-list" frame="void" rules="none">
-<col class="field-name" />
-<col class="field-body" />
-<tbody valign="top">
-<tr class="field-odd field"><th class="field-name">db_tap_id:</th><td 
class="field-body">data store ID of the target database, in Qubole.</td>
-</tr>
-<tr class="field-even field"><th class="field-name">query:</th><td 
class="field-body">inline query statement</td>
-</tr>
-<tr class="field-odd field"><th class="field-name">macros:</th><td 
class="field-body">macro values which were used in query</td>
-</tr>
-</tbody>
-</table>
-</dd>
-<dt>dbexportcmd:</dt>
-<dd><table class="first last docutils field-list" frame="void" rules="none">
-<col class="field-name" />
-<col class="field-body" />
-<tbody valign="top">
-<tr class="field-odd field"><th class="field-name">mode:</th><td 
class="field-body">1 (simple), 2 (advance)</td>
-</tr>
-<tr class="field-even field"><th class="field-name">hive_table:</th><td 
class="field-body">Name of the hive table</td>
-</tr>
-<tr class="field-odd field"><th class="field-name">partition_spec:</th><td 
class="field-body">partition specification for Hive table.</td>
-</tr>
-<tr class="field-even field"><th class="field-name">dbtap_id:</th><td 
class="field-body">data store ID of the target database, in Qubole.</td>
-</tr>
-<tr class="field-odd field"><th class="field-name">db_table:</th><td 
class="field-body">name of the db table</td>
-</tr>
-<tr class="field-even field"><th class="field-name">db_update_mode:</th><td 
class="field-body">allowinsert or updateonly</td>
-</tr>
-<tr class="field-odd field"><th class="field-name">db_update_keys:</th><td 
class="field-body">columns used to determine the uniqueness of rows</td>
-</tr>
-<tr class="field-even field"><th class="field-name">export_dir:</th><td 
class="field-body">HDFS/S3 location from which data will be exported.</td>
-</tr>
-<tr class="field-odd field"><th class="field-name" 
colspan="2">fields_terminated_by:</th></tr>
-<tr class="field-odd field"><td>&nbsp;</td><td class="field-body">hex of the 
char used as column separator in the dataset.</td>
-</tr>
-</tbody>
-</table>
-</dd>
-<dt>dbimportcmd:</dt>
-<dd><table class="first last docutils field-list" frame="void" rules="none">
-<col class="field-name" />
-<col class="field-body" />
-<tbody valign="top">
-<tr class="field-odd field"><th class="field-name">mode:</th><td 
class="field-body">1 (simple), 2 (advance)</td>
-</tr>
-<tr class="field-even field"><th class="field-name">hive_table:</th><td 
class="field-body">Name of the hive table</td>
-</tr>
-<tr class="field-odd field"><th class="field-name">dbtap_id:</th><td 
class="field-body">data store ID of the target database, in Qubole.</td>
-</tr>
-<tr class="field-even field"><th class="field-name">db_table:</th><td 
class="field-body">name of the db table</td>
-</tr>
-<tr class="field-odd field"><th class="field-name">where_clause:</th><td 
class="field-body">where clause, if any</td>
-</tr>
-<tr class="field-even field"><th class="field-name">parallelism:</th><td 
class="field-body">number of parallel db connections to use for extracting 
data</td>
-</tr>
-<tr class="field-odd field"><th class="field-name">extract_query:</th><td 
class="field-body">SQL query to extract data from db. $CONDITIONS must be part 
of the where clause.</td>
-</tr>
-<tr class="field-even field"><th class="field-name">boundary_query:</th><td 
class="field-body">Query to be used get range of row IDs to be extracted</td>
-</tr>
-<tr class="field-odd field"><th class="field-name">split_column:</th><td 
class="field-body">Column used as row ID to split data into ranges (mode 2)</td>
-</tr>
-</tbody>
-</table>
-</dd>
-</dl>
-</dd>
-</dl>
-<div class="admonition note">
-<p class="first admonition-title">Note</p>
-<p class="last">Following fields are template-supported : <code 
class="docutils literal"><span class="pre">query</span></code>, <code 
class="docutils literal"><span class="pre">script_location</span></code>, <code 
class="docutils literal"><span class="pre">sub_command</span></code>, <code 
class="docutils literal"><span class="pre">script</span></code>, <code 
class="docutils literal"><span class="pre">files</span></code>,
-<code class="docutils literal"><span class="pre">archives</span></code>, <code 
class="docutils literal"><span class="pre">program</span></code>, <code 
class="docutils literal"><span class="pre">cmdline</span></code>, <code 
class="docutils literal"><span class="pre">sql</span></code>, <code 
class="docutils literal"><span class="pre">where_clause</span></code>, <code 
class="docutils literal"><span class="pre">extract_query</span></code>, <code 
class="docutils literal"><span class="pre">boundary_query</span></code>, <code 
class="docutils literal"><span class="pre">macros</span></code>, <code 
class="docutils literal"><span class="pre">tags</span></code>,
-<code class="docutils literal"><span class="pre">name</span></code>, <code 
class="docutils literal"><span class="pre">parameters</span></code>. You can 
also use <code class="docutils literal"><span class="pre">.txt</span></code> 
files for template driven use cases.</p>
-</div>
-</dd></dl>
-
-<dl class="class">
-<dt id="airflow.contrib.operators.hipchat_operator.HipChatAPIOperator">
-<em class="property">class </em><code 
class="descclassname">airflow.contrib.operators.hipchat_operator.</code><code 
class="descname">HipChatAPIOperator</code><span 
class="sig-paren">(</span><em>token</em>, 
<em>base_url='https://api.hipchat.com/v2'</em>, <em>*args</em>, 
<em>**kwargs</em><span class="sig-paren">)</span><a class="reference internal" 
href="_modules/airflow/contrib/operators/hipchat_operator.html#HipChatAPIOperator"><span
 class="viewcode-link">[source]</span></a><a class="headerlink" 
href="#airflow.contrib.operators.hipchat_operator.HipChatAPIOperator" 
title="Permalink to this definition">¶</a></dt>
-<dd><p>Base HipChat Operator.
-All derived HipChat operators reference from HipChat&#8217;s official REST API 
documentation
-at <a class="reference external" 
href="https://www.hipchat.com/docs/apiv2";>https://www.hipchat.com/docs/apiv2</a>.
 Before using any HipChat API operators you need
-to get an authentication token at <a class="reference external" 
href="https://www.hipchat.com/docs/apiv2/auth";>https://www.hipchat.com/docs/apiv2/auth</a>.
-In the future additional HipChat operators will be derived from this class as 
well.</p>
-<table class="docutils field-list" frame="void" rules="none">
-<col class="field-name" />
-<col class="field-body" />
-<tbody valign="top">
-<tr class="field-odd field"><th class="field-name">Parameters:</th><td 
class="field-body"><ul class="first last simple">
-<li><strong>token</strong> (<em>str</em>) &#8211; HipChat REST API 
authentication token</li>
-<li><strong>base_url</strong> (<em>str</em>) &#8211; HipChat REST API base 
url.</li>
-</ul>
-</td>
-</tr>
-</tbody>
-</table>
-</dd></dl>
-
-<dl class="class">
-<dt 
id="airflow.contrib.operators.hipchat_operator.HipChatAPISendRoomNotificationOperator">
-<em class="property">class </em><code 
class="descclassname">airflow.contrib.operators.hipchat_operator.</code><code 
class="descname">HipChatAPISendRoomNotificationOperator</code><span 
class="sig-paren">(</span><em>room_id</em>, <em>message</em>, <em>*args</em>, 
<em>**kwargs</em><span class="sig-paren">)</span><a class="reference internal" 
href="_modules/airflow/contrib/operators/hipchat_operator.html#HipChatAPISendRoomNotificationOperator"><span
 class="viewcode-link">[source]</span></a><a class="headerlink" 
href="#airflow.contrib.operators.hipchat_operator.HipChatAPISendRoomNotificationOperator"
 title="Permalink to this definition">¶</a></dt>
-<dd><p>Send notification to a specific HipChat room.
-More info: <a class="reference external" 
href="https://www.hipchat.com/docs/apiv2/method/send_room_notification";>https://www.hipchat.com/docs/apiv2/method/send_room_notification</a></p>
-<table class="docutils field-list" frame="void" rules="none">
-<col class="field-name" />
-<col class="field-body" />
-<tbody valign="top">
-<tr class="field-odd field"><th class="field-name">Parameters:</th><td 
class="field-body"><ul class="first last simple">
-<li><strong>room_id</strong> (<em>str</em>) &#8211; Room in which to send 
notification on HipChat</li>
-<li><strong>message</strong> (<em>str</em>) &#8211; The message body</li>
-<li><strong>frm</stro

<TRUNCATED>

Reply via email to