http://git-wip-us.apache.org/repos/asf/spark-website/blob/52917ac4/site/docs/2.4.0/api/R/unpersist.html
----------------------------------------------------------------------
diff --git a/site/docs/2.4.0/api/R/unpersist.html 
b/site/docs/2.4.0/api/R/unpersist.html
new file mode 100644
index 0000000..1463c71
--- /dev/null
+++ b/site/docs/2.4.0/api/R/unpersist.html
@@ -0,0 +1,118 @@
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" 
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd";><html 
xmlns="http://www.w3.org/1999/xhtml";><head><title>R: Unpersist</title>
+<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+<link rel="stylesheet" type="text/css" href="R.css" />
+
+<link rel="stylesheet" 
href="https://cdnjs.cloudflare.com/ajax/libs/highlight.js/8.3/styles/github.min.css";>
+<script 
src="https://cdnjs.cloudflare.com/ajax/libs/highlight.js/8.3/highlight.min.js";></script>
+<script 
src="https://cdnjs.cloudflare.com/ajax/libs/highlight.js/8.3/languages/r.min.js";></script>
+<script>hljs.initHighlightingOnLoad();</script>
+</head><body>
+
+<table width="100%" summary="page for unpersist {SparkR}"><tr><td>unpersist 
{SparkR}</td><td style="text-align: right;">R Documentation</td></tr></table>
+
+<h2>Unpersist</h2>
+
+<h3>Description</h3>
+
+<p>Mark this SparkDataFrame as non-persistent, and remove all blocks for it 
from memory and
+disk.
+</p>
+
+
+<h3>Usage</h3>
+
+<pre>
+unpersist(x, ...)
+
+## S4 method for signature 'SparkDataFrame'
+unpersist(x, blocking = TRUE)
+</pre>
+
+
+<h3>Arguments</h3>
+
+<table summary="R argblock">
+<tr valign="top"><td><code>x</code></td>
+<td>
+<p>the SparkDataFrame to unpersist.</p>
+</td></tr>
+<tr valign="top"><td><code>...</code></td>
+<td>
+<p>further arguments to be passed to or from other methods.</p>
+</td></tr>
+<tr valign="top"><td><code>blocking</code></td>
+<td>
+<p>whether to block until all blocks are deleted.</p>
+</td></tr>
+</table>
+
+
+<h3>Note</h3>
+
+<p>unpersist since 1.4.0
+</p>
+
+
+<h3>See Also</h3>
+
+<p>Other SparkDataFrame functions: <code><a 
href="SparkDataFrame.html">SparkDataFrame-class</a></code>,
+<code><a href="summarize.html">agg</a></code>, <code><a 
href="alias.html">alias</a></code>,
+<code><a href="arrange.html">arrange</a></code>, <code><a 
href="as.data.frame.html">as.data.frame</a></code>,
+<code><a href="attach.html">attach,SparkDataFrame-method</a></code>,
+<code><a href="broadcast.html">broadcast</a></code>, <code><a 
href="cache.html">cache</a></code>,
+<code><a href="checkpoint.html">checkpoint</a></code>, <code><a 
href="coalesce.html">coalesce</a></code>,
+<code><a href="collect.html">collect</a></code>, <code><a 
href="columns.html">colnames</a></code>,
+<code><a href="coltypes.html">coltypes</a></code>,
+<code><a 
href="createOrReplaceTempView.html">createOrReplaceTempView</a></code>,
+<code><a href="crossJoin.html">crossJoin</a></code>, <code><a 
href="cube.html">cube</a></code>,
+<code><a href="dapplyCollect.html">dapplyCollect</a></code>, <code><a 
href="dapply.html">dapply</a></code>,
+<code><a href="describe.html">describe</a></code>, <code><a 
href="dim.html">dim</a></code>,
+<code><a href="distinct.html">distinct</a></code>, <code><a 
href="dropDuplicates.html">dropDuplicates</a></code>,
+<code><a href="nafunctions.html">dropna</a></code>, <code><a 
href="drop.html">drop</a></code>,
+<code><a href="dtypes.html">dtypes</a></code>, <code><a 
href="exceptAll.html">exceptAll</a></code>,
+<code><a href="except.html">except</a></code>, <code><a 
href="explain.html">explain</a></code>,
+<code><a href="filter.html">filter</a></code>, <code><a 
href="first.html">first</a></code>,
+<code><a href="gapplyCollect.html">gapplyCollect</a></code>, <code><a 
href="gapply.html">gapply</a></code>,
+<code><a href="getNumPartitions.html">getNumPartitions</a></code>, <code><a 
href="groupBy.html">group_by</a></code>,
+<code><a href="head.html">head</a></code>, <code><a 
href="hint.html">hint</a></code>,
+<code><a href="histogram.html">histogram</a></code>, <code><a 
href="insertInto.html">insertInto</a></code>,
+<code><a href="intersectAll.html">intersectAll</a></code>, <code><a 
href="intersect.html">intersect</a></code>,
+<code><a href="isLocal.html">isLocal</a></code>, <code><a 
href="isStreaming.html">isStreaming</a></code>,
+<code><a href="join.html">join</a></code>, <code><a 
href="limit.html">limit</a></code>,
+<code><a href="localCheckpoint.html">localCheckpoint</a></code>, <code><a 
href="merge.html">merge</a></code>,
+<code><a href="mutate.html">mutate</a></code>, <code><a 
href="ncol.html">ncol</a></code>,
+<code><a href="nrow.html">nrow</a></code>, <code><a 
href="persist.html">persist</a></code>,
+<code><a href="printSchema.html">printSchema</a></code>, <code><a 
href="randomSplit.html">randomSplit</a></code>,
+<code><a href="rbind.html">rbind</a></code>, <code><a 
href="rename.html">rename</a></code>,
+<code><a href="repartitionByRange.html">repartitionByRange</a></code>,
+<code><a href="repartition.html">repartition</a></code>, <code><a 
href="rollup.html">rollup</a></code>,
+<code><a href="sample.html">sample</a></code>, <code><a 
href="saveAsTable.html">saveAsTable</a></code>,
+<code><a href="schema.html">schema</a></code>, <code><a 
href="selectExpr.html">selectExpr</a></code>,
+<code><a href="select.html">select</a></code>, <code><a 
href="showDF.html">showDF</a></code>,
+<code><a href="show.html">show</a></code>, <code><a 
href="storageLevel.html">storageLevel</a></code>,
+<code><a href="str.html">str</a></code>, <code><a 
href="subset.html">subset</a></code>,
+<code><a href="summary.html">summary</a></code>, <code><a 
href="take.html">take</a></code>,
+<code><a href="toJSON.html">toJSON</a></code>, <code><a 
href="unionByName.html">unionByName</a></code>,
+<code><a href="union.html">union</a></code>, <code><a 
href="withColumn.html">withColumn</a></code>,
+<code><a href="withWatermark.html">withWatermark</a></code>, <code><a 
href="with.html">with</a></code>,
+<code><a href="write.df.html">write.df</a></code>, <code><a 
href="write.jdbc.html">write.jdbc</a></code>,
+<code><a href="write.json.html">write.json</a></code>, <code><a 
href="write.orc.html">write.orc</a></code>,
+<code><a href="write.parquet.html">write.parquet</a></code>, <code><a 
href="write.stream.html">write.stream</a></code>,
+<code><a href="write.text.html">write.text</a></code>
+</p>
+
+
+<h3>Examples</h3>
+
+<pre><code class="r">## Not run: 
+##D sparkR.session()
+##D path &lt;- &quot;path/to/file.json&quot;
+##D df &lt;- read.json(path)
+##D persist(df, &quot;MEMORY_AND_DISK&quot;)
+##D unpersist(df)
+## End(Not run)
+</code></pre>
+
+
+<hr /><div style="text-align: center;">[Package <em>SparkR</em> version 2.4.0 
<a href="00Index.html">Index</a>]</div>
+</body></html>

http://git-wip-us.apache.org/repos/asf/spark-website/blob/52917ac4/site/docs/2.4.0/api/R/windowOrderBy.html
----------------------------------------------------------------------
diff --git a/site/docs/2.4.0/api/R/windowOrderBy.html 
b/site/docs/2.4.0/api/R/windowOrderBy.html
new file mode 100644
index 0000000..7576ade
--- /dev/null
+++ b/site/docs/2.4.0/api/R/windowOrderBy.html
@@ -0,0 +1,71 @@
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" 
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd";><html 
xmlns="http://www.w3.org/1999/xhtml";><head><title>R: windowOrderBy</title>
+<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+<link rel="stylesheet" type="text/css" href="R.css" />
+
+<link rel="stylesheet" 
href="https://cdnjs.cloudflare.com/ajax/libs/highlight.js/8.3/styles/github.min.css";>
+<script 
src="https://cdnjs.cloudflare.com/ajax/libs/highlight.js/8.3/highlight.min.js";></script>
+<script 
src="https://cdnjs.cloudflare.com/ajax/libs/highlight.js/8.3/languages/r.min.js";></script>
+<script>hljs.initHighlightingOnLoad();</script>
+</head><body>
+
+<table width="100%" summary="page for windowOrderBy 
{SparkR}"><tr><td>windowOrderBy {SparkR}</td><td style="text-align: right;">R 
Documentation</td></tr></table>
+
+<h2>windowOrderBy</h2>
+
+<h3>Description</h3>
+
+<p>Creates a WindowSpec with the ordering defined.
+</p>
+
+
+<h3>Usage</h3>
+
+<pre>
+windowOrderBy(col, ...)
+
+## S4 method for signature 'character'
+windowOrderBy(col, ...)
+
+## S4 method for signature 'Column'
+windowOrderBy(col, ...)
+</pre>
+
+
+<h3>Arguments</h3>
+
+<table summary="R argblock">
+<tr valign="top"><td><code>col</code></td>
+<td>
+<p>A column name or Column by which rows are ordered within
+windows.</p>
+</td></tr>
+<tr valign="top"><td><code>...</code></td>
+<td>
+<p>Optional column names or Columns in addition to col, by
+which rows are ordered within windows.</p>
+</td></tr>
+</table>
+
+
+<h3>Note</h3>
+
+<p>windowOrderBy(character) since 2.0.0
+</p>
+<p>windowOrderBy(Column) since 2.0.0
+</p>
+
+
+<h3>Examples</h3>
+
+<pre><code class="r">## Not run: 
+##D   ws &lt;- windowOrderBy(&quot;key1&quot;, &quot;key2&quot;)
+##D   df1 &lt;- select(df, over(lead(&quot;value&quot;, 1), ws))
+##D 
+##D   ws &lt;- windowOrderBy(df$key1, df$key2)
+##D   df1 &lt;- select(df, over(lead(&quot;value&quot;, 1), ws))
+## End(Not run)
+</code></pre>
+
+
+<hr /><div style="text-align: center;">[Package <em>SparkR</em> version 2.4.0 
<a href="00Index.html">Index</a>]</div>
+</body></html>

http://git-wip-us.apache.org/repos/asf/spark-website/blob/52917ac4/site/docs/2.4.0/api/R/windowPartitionBy.html
----------------------------------------------------------------------
diff --git a/site/docs/2.4.0/api/R/windowPartitionBy.html 
b/site/docs/2.4.0/api/R/windowPartitionBy.html
new file mode 100644
index 0000000..6e897d5
--- /dev/null
+++ b/site/docs/2.4.0/api/R/windowPartitionBy.html
@@ -0,0 +1,71 @@
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" 
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd";><html 
xmlns="http://www.w3.org/1999/xhtml";><head><title>R: windowPartitionBy</title>
+<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+<link rel="stylesheet" type="text/css" href="R.css" />
+
+<link rel="stylesheet" 
href="https://cdnjs.cloudflare.com/ajax/libs/highlight.js/8.3/styles/github.min.css";>
+<script 
src="https://cdnjs.cloudflare.com/ajax/libs/highlight.js/8.3/highlight.min.js";></script>
+<script 
src="https://cdnjs.cloudflare.com/ajax/libs/highlight.js/8.3/languages/r.min.js";></script>
+<script>hljs.initHighlightingOnLoad();</script>
+</head><body>
+
+<table width="100%" summary="page for windowPartitionBy 
{SparkR}"><tr><td>windowPartitionBy {SparkR}</td><td style="text-align: 
right;">R Documentation</td></tr></table>
+
+<h2>windowPartitionBy</h2>
+
+<h3>Description</h3>
+
+<p>Creates a WindowSpec with the partitioning defined.
+</p>
+
+
+<h3>Usage</h3>
+
+<pre>
+windowPartitionBy(col, ...)
+
+## S4 method for signature 'character'
+windowPartitionBy(col, ...)
+
+## S4 method for signature 'Column'
+windowPartitionBy(col, ...)
+</pre>
+
+
+<h3>Arguments</h3>
+
+<table summary="R argblock">
+<tr valign="top"><td><code>col</code></td>
+<td>
+<p>A column name or Column by which rows are partitioned to
+windows.</p>
+</td></tr>
+<tr valign="top"><td><code>...</code></td>
+<td>
+<p>Optional column names or Columns in addition to col, by
+which rows are partitioned to windows.</p>
+</td></tr>
+</table>
+
+
+<h3>Note</h3>
+
+<p>windowPartitionBy(character) since 2.0.0
+</p>
+<p>windowPartitionBy(Column) since 2.0.0
+</p>
+
+
+<h3>Examples</h3>
+
+<pre><code class="r">## Not run: 
+##D   ws &lt;- orderBy(windowPartitionBy(&quot;key1&quot;, &quot;key2&quot;), 
&quot;key3&quot;)
+##D   df1 &lt;- select(df, over(lead(&quot;value&quot;, 1), ws))
+##D 
+##D   ws &lt;- orderBy(windowPartitionBy(df$key1, df$key2), df$key3)
+##D   df1 &lt;- select(df, over(lead(&quot;value&quot;, 1), ws))
+## End(Not run)
+</code></pre>
+
+
+<hr /><div style="text-align: center;">[Package <em>SparkR</em> version 2.4.0 
<a href="00Index.html">Index</a>]</div>
+</body></html>

http://git-wip-us.apache.org/repos/asf/spark-website/blob/52917ac4/site/docs/2.4.0/api/R/with.html
----------------------------------------------------------------------
diff --git a/site/docs/2.4.0/api/R/with.html b/site/docs/2.4.0/api/R/with.html
new file mode 100644
index 0000000..2687287
--- /dev/null
+++ b/site/docs/2.4.0/api/R/with.html
@@ -0,0 +1,119 @@
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" 
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd";><html 
xmlns="http://www.w3.org/1999/xhtml";><head><title>R: Evaluate a R expression in 
an environment constructed from a...</title>
+<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+<link rel="stylesheet" type="text/css" href="R.css" />
+
+<link rel="stylesheet" 
href="https://cdnjs.cloudflare.com/ajax/libs/highlight.js/8.3/styles/github.min.css";>
+<script 
src="https://cdnjs.cloudflare.com/ajax/libs/highlight.js/8.3/highlight.min.js";></script>
+<script 
src="https://cdnjs.cloudflare.com/ajax/libs/highlight.js/8.3/languages/r.min.js";></script>
+<script>hljs.initHighlightingOnLoad();</script>
+</head><body>
+
+<table width="100%" summary="page for with {SparkR}"><tr><td>with 
{SparkR}</td><td style="text-align: right;">R Documentation</td></tr></table>
+
+<h2>Evaluate a R expression in an environment constructed from a 
SparkDataFrame</h2>
+
+<h3>Description</h3>
+
+<p>Evaluate a R expression in an environment constructed from a SparkDataFrame
+with() allows access to columns of a SparkDataFrame by simply referring to
+their name. It appends every column of a SparkDataFrame into a new
+environment. Then, the given expression is evaluated in this new
+environment.
+</p>
+
+
+<h3>Usage</h3>
+
+<pre>
+with(data, expr, ...)
+
+## S4 method for signature 'SparkDataFrame'
+with(data, expr, ...)
+</pre>
+
+
+<h3>Arguments</h3>
+
+<table summary="R argblock">
+<tr valign="top"><td><code>data</code></td>
+<td>
+<p>(SparkDataFrame) SparkDataFrame to use for constructing an environment.</p>
+</td></tr>
+<tr valign="top"><td><code>expr</code></td>
+<td>
+<p>(expression) Expression to evaluate.</p>
+</td></tr>
+<tr valign="top"><td><code>...</code></td>
+<td>
+<p>arguments to be passed to future methods.</p>
+</td></tr>
+</table>
+
+
+<h3>Note</h3>
+
+<p>with since 1.6.0
+</p>
+
+
+<h3>See Also</h3>
+
+<p><a href="attach.html">attach</a>
+</p>
+<p>Other SparkDataFrame functions: <code><a 
href="SparkDataFrame.html">SparkDataFrame-class</a></code>,
+<code><a href="summarize.html">agg</a></code>, <code><a 
href="alias.html">alias</a></code>,
+<code><a href="arrange.html">arrange</a></code>, <code><a 
href="as.data.frame.html">as.data.frame</a></code>,
+<code><a href="attach.html">attach,SparkDataFrame-method</a></code>,
+<code><a href="broadcast.html">broadcast</a></code>, <code><a 
href="cache.html">cache</a></code>,
+<code><a href="checkpoint.html">checkpoint</a></code>, <code><a 
href="coalesce.html">coalesce</a></code>,
+<code><a href="collect.html">collect</a></code>, <code><a 
href="columns.html">colnames</a></code>,
+<code><a href="coltypes.html">coltypes</a></code>,
+<code><a 
href="createOrReplaceTempView.html">createOrReplaceTempView</a></code>,
+<code><a href="crossJoin.html">crossJoin</a></code>, <code><a 
href="cube.html">cube</a></code>,
+<code><a href="dapplyCollect.html">dapplyCollect</a></code>, <code><a 
href="dapply.html">dapply</a></code>,
+<code><a href="describe.html">describe</a></code>, <code><a 
href="dim.html">dim</a></code>,
+<code><a href="distinct.html">distinct</a></code>, <code><a 
href="dropDuplicates.html">dropDuplicates</a></code>,
+<code><a href="nafunctions.html">dropna</a></code>, <code><a 
href="drop.html">drop</a></code>,
+<code><a href="dtypes.html">dtypes</a></code>, <code><a 
href="exceptAll.html">exceptAll</a></code>,
+<code><a href="except.html">except</a></code>, <code><a 
href="explain.html">explain</a></code>,
+<code><a href="filter.html">filter</a></code>, <code><a 
href="first.html">first</a></code>,
+<code><a href="gapplyCollect.html">gapplyCollect</a></code>, <code><a 
href="gapply.html">gapply</a></code>,
+<code><a href="getNumPartitions.html">getNumPartitions</a></code>, <code><a 
href="groupBy.html">group_by</a></code>,
+<code><a href="head.html">head</a></code>, <code><a 
href="hint.html">hint</a></code>,
+<code><a href="histogram.html">histogram</a></code>, <code><a 
href="insertInto.html">insertInto</a></code>,
+<code><a href="intersectAll.html">intersectAll</a></code>, <code><a 
href="intersect.html">intersect</a></code>,
+<code><a href="isLocal.html">isLocal</a></code>, <code><a 
href="isStreaming.html">isStreaming</a></code>,
+<code><a href="join.html">join</a></code>, <code><a 
href="limit.html">limit</a></code>,
+<code><a href="localCheckpoint.html">localCheckpoint</a></code>, <code><a 
href="merge.html">merge</a></code>,
+<code><a href="mutate.html">mutate</a></code>, <code><a 
href="ncol.html">ncol</a></code>,
+<code><a href="nrow.html">nrow</a></code>, <code><a 
href="persist.html">persist</a></code>,
+<code><a href="printSchema.html">printSchema</a></code>, <code><a 
href="randomSplit.html">randomSplit</a></code>,
+<code><a href="rbind.html">rbind</a></code>, <code><a 
href="rename.html">rename</a></code>,
+<code><a href="repartitionByRange.html">repartitionByRange</a></code>,
+<code><a href="repartition.html">repartition</a></code>, <code><a 
href="rollup.html">rollup</a></code>,
+<code><a href="sample.html">sample</a></code>, <code><a 
href="saveAsTable.html">saveAsTable</a></code>,
+<code><a href="schema.html">schema</a></code>, <code><a 
href="selectExpr.html">selectExpr</a></code>,
+<code><a href="select.html">select</a></code>, <code><a 
href="showDF.html">showDF</a></code>,
+<code><a href="show.html">show</a></code>, <code><a 
href="storageLevel.html">storageLevel</a></code>,
+<code><a href="str.html">str</a></code>, <code><a 
href="subset.html">subset</a></code>,
+<code><a href="summary.html">summary</a></code>, <code><a 
href="take.html">take</a></code>,
+<code><a href="toJSON.html">toJSON</a></code>, <code><a 
href="unionByName.html">unionByName</a></code>,
+<code><a href="union.html">union</a></code>, <code><a 
href="unpersist.html">unpersist</a></code>,
+<code><a href="withColumn.html">withColumn</a></code>, <code><a 
href="withWatermark.html">withWatermark</a></code>,
+<code><a href="write.df.html">write.df</a></code>, <code><a 
href="write.jdbc.html">write.jdbc</a></code>,
+<code><a href="write.json.html">write.json</a></code>, <code><a 
href="write.orc.html">write.orc</a></code>,
+<code><a href="write.parquet.html">write.parquet</a></code>, <code><a 
href="write.stream.html">write.stream</a></code>,
+<code><a href="write.text.html">write.text</a></code>
+</p>
+
+
+<h3>Examples</h3>
+
+<pre><code class="r">## Not run: 
+##D with(irisDf, nrow(Sepal_Width))
+## End(Not run)
+</code></pre>
+
+
+<hr /><div style="text-align: center;">[Package <em>SparkR</em> version 2.4.0 
<a href="00Index.html">Index</a>]</div>
+</body></html>

http://git-wip-us.apache.org/repos/asf/spark-website/blob/52917ac4/site/docs/2.4.0/api/R/withColumn.html
----------------------------------------------------------------------
diff --git a/site/docs/2.4.0/api/R/withColumn.html 
b/site/docs/2.4.0/api/R/withColumn.html
new file mode 100644
index 0000000..9652796
--- /dev/null
+++ b/site/docs/2.4.0/api/R/withColumn.html
@@ -0,0 +1,133 @@
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" 
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd";><html 
xmlns="http://www.w3.org/1999/xhtml";><head><title>R: WithColumn</title>
+<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+<link rel="stylesheet" type="text/css" href="R.css" />
+
+<link rel="stylesheet" 
href="https://cdnjs.cloudflare.com/ajax/libs/highlight.js/8.3/styles/github.min.css";>
+<script 
src="https://cdnjs.cloudflare.com/ajax/libs/highlight.js/8.3/highlight.min.js";></script>
+<script 
src="https://cdnjs.cloudflare.com/ajax/libs/highlight.js/8.3/languages/r.min.js";></script>
+<script>hljs.initHighlightingOnLoad();</script>
+</head><body>
+
+<table width="100%" summary="page for withColumn {SparkR}"><tr><td>withColumn 
{SparkR}</td><td style="text-align: right;">R Documentation</td></tr></table>
+
+<h2>WithColumn</h2>
+
+<h3>Description</h3>
+
+<p>Return a new SparkDataFrame by adding a column or replacing the existing 
column
+that has the same name.
+</p>
+
+
+<h3>Usage</h3>
+
+<pre>
+withColumn(x, colName, col)
+
+## S4 method for signature 'SparkDataFrame,character'
+withColumn(x, colName, col)
+</pre>
+
+
+<h3>Arguments</h3>
+
+<table summary="R argblock">
+<tr valign="top"><td><code>x</code></td>
+<td>
+<p>a SparkDataFrame.</p>
+</td></tr>
+<tr valign="top"><td><code>colName</code></td>
+<td>
+<p>a column name.</p>
+</td></tr>
+<tr valign="top"><td><code>col</code></td>
+<td>
+<p>a Column expression (which must refer only to this SparkDataFrame), or an 
atomic
+vector in the length of 1 as literal value.</p>
+</td></tr>
+</table>
+
+
+<h3>Value</h3>
+
+<p>A SparkDataFrame with the new column added or the existing column replaced.
+</p>
+
+
+<h3>Note</h3>
+
+<p>withColumn since 1.4.0
+</p>
+
+
+<h3>See Also</h3>
+
+<p><a href="rename.html">rename</a> <a href="mutate.html">mutate</a> <a 
href="subset.html">subset</a>
+</p>
+<p>Other SparkDataFrame functions: <code><a 
href="SparkDataFrame.html">SparkDataFrame-class</a></code>,
+<code><a href="summarize.html">agg</a></code>, <code><a 
href="alias.html">alias</a></code>,
+<code><a href="arrange.html">arrange</a></code>, <code><a 
href="as.data.frame.html">as.data.frame</a></code>,
+<code><a href="attach.html">attach,SparkDataFrame-method</a></code>,
+<code><a href="broadcast.html">broadcast</a></code>, <code><a 
href="cache.html">cache</a></code>,
+<code><a href="checkpoint.html">checkpoint</a></code>, <code><a 
href="coalesce.html">coalesce</a></code>,
+<code><a href="collect.html">collect</a></code>, <code><a 
href="columns.html">colnames</a></code>,
+<code><a href="coltypes.html">coltypes</a></code>,
+<code><a 
href="createOrReplaceTempView.html">createOrReplaceTempView</a></code>,
+<code><a href="crossJoin.html">crossJoin</a></code>, <code><a 
href="cube.html">cube</a></code>,
+<code><a href="dapplyCollect.html">dapplyCollect</a></code>, <code><a 
href="dapply.html">dapply</a></code>,
+<code><a href="describe.html">describe</a></code>, <code><a 
href="dim.html">dim</a></code>,
+<code><a href="distinct.html">distinct</a></code>, <code><a 
href="dropDuplicates.html">dropDuplicates</a></code>,
+<code><a href="nafunctions.html">dropna</a></code>, <code><a 
href="drop.html">drop</a></code>,
+<code><a href="dtypes.html">dtypes</a></code>, <code><a 
href="exceptAll.html">exceptAll</a></code>,
+<code><a href="except.html">except</a></code>, <code><a 
href="explain.html">explain</a></code>,
+<code><a href="filter.html">filter</a></code>, <code><a 
href="first.html">first</a></code>,
+<code><a href="gapplyCollect.html">gapplyCollect</a></code>, <code><a 
href="gapply.html">gapply</a></code>,
+<code><a href="getNumPartitions.html">getNumPartitions</a></code>, <code><a 
href="groupBy.html">group_by</a></code>,
+<code><a href="head.html">head</a></code>, <code><a 
href="hint.html">hint</a></code>,
+<code><a href="histogram.html">histogram</a></code>, <code><a 
href="insertInto.html">insertInto</a></code>,
+<code><a href="intersectAll.html">intersectAll</a></code>, <code><a 
href="intersect.html">intersect</a></code>,
+<code><a href="isLocal.html">isLocal</a></code>, <code><a 
href="isStreaming.html">isStreaming</a></code>,
+<code><a href="join.html">join</a></code>, <code><a 
href="limit.html">limit</a></code>,
+<code><a href="localCheckpoint.html">localCheckpoint</a></code>, <code><a 
href="merge.html">merge</a></code>,
+<code><a href="mutate.html">mutate</a></code>, <code><a 
href="ncol.html">ncol</a></code>,
+<code><a href="nrow.html">nrow</a></code>, <code><a 
href="persist.html">persist</a></code>,
+<code><a href="printSchema.html">printSchema</a></code>, <code><a 
href="randomSplit.html">randomSplit</a></code>,
+<code><a href="rbind.html">rbind</a></code>, <code><a 
href="rename.html">rename</a></code>,
+<code><a href="repartitionByRange.html">repartitionByRange</a></code>,
+<code><a href="repartition.html">repartition</a></code>, <code><a 
href="rollup.html">rollup</a></code>,
+<code><a href="sample.html">sample</a></code>, <code><a 
href="saveAsTable.html">saveAsTable</a></code>,
+<code><a href="schema.html">schema</a></code>, <code><a 
href="selectExpr.html">selectExpr</a></code>,
+<code><a href="select.html">select</a></code>, <code><a 
href="showDF.html">showDF</a></code>,
+<code><a href="show.html">show</a></code>, <code><a 
href="storageLevel.html">storageLevel</a></code>,
+<code><a href="str.html">str</a></code>, <code><a 
href="subset.html">subset</a></code>,
+<code><a href="summary.html">summary</a></code>, <code><a 
href="take.html">take</a></code>,
+<code><a href="toJSON.html">toJSON</a></code>, <code><a 
href="unionByName.html">unionByName</a></code>,
+<code><a href="union.html">union</a></code>, <code><a 
href="unpersist.html">unpersist</a></code>,
+<code><a href="withWatermark.html">withWatermark</a></code>, <code><a 
href="with.html">with</a></code>,
+<code><a href="write.df.html">write.df</a></code>, <code><a 
href="write.jdbc.html">write.jdbc</a></code>,
+<code><a href="write.json.html">write.json</a></code>, <code><a 
href="write.orc.html">write.orc</a></code>,
+<code><a href="write.parquet.html">write.parquet</a></code>, <code><a 
href="write.stream.html">write.stream</a></code>,
+<code><a href="write.text.html">write.text</a></code>
+</p>
+
+
+<h3>Examples</h3>
+
+<pre><code class="r">## Not run: 
+##D sparkR.session()
+##D path &lt;- &quot;path/to/file.json&quot;
+##D df &lt;- read.json(path)
+##D newDF &lt;- withColumn(df, &quot;newCol&quot;, df$col1 * 5)
+##D # Replace an existing column
+##D newDF2 &lt;- withColumn(newDF, &quot;newCol&quot;, newDF$col1)
+##D newDF3 &lt;- withColumn(newDF, &quot;newCol&quot;, 42)
+##D # Use extract operator to set an existing or new column
+##D df[[&quot;age&quot;]] &lt;- 23
+##D df[[2]] &lt;- df$col1
+##D df[[2]] &lt;- NULL # drop column
+## End(Not run)
+</code></pre>
+
+
+<hr /><div style="text-align: center;">[Package <em>SparkR</em> version 2.4.0 
<a href="00Index.html">Index</a>]</div>
+</body></html>

http://git-wip-us.apache.org/repos/asf/spark-website/blob/52917ac4/site/docs/2.4.0/api/R/withWatermark.html
----------------------------------------------------------------------
diff --git a/site/docs/2.4.0/api/R/withWatermark.html 
b/site/docs/2.4.0/api/R/withWatermark.html
new file mode 100644
index 0000000..8cc9ac6
--- /dev/null
+++ b/site/docs/2.4.0/api/R/withWatermark.html
@@ -0,0 +1,149 @@
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" 
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd";><html 
xmlns="http://www.w3.org/1999/xhtml";><head><title>R: withWatermark</title>
+<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+<link rel="stylesheet" type="text/css" href="R.css" />
+
+<link rel="stylesheet" 
href="https://cdnjs.cloudflare.com/ajax/libs/highlight.js/8.3/styles/github.min.css";>
+<script 
src="https://cdnjs.cloudflare.com/ajax/libs/highlight.js/8.3/highlight.min.js";></script>
+<script 
src="https://cdnjs.cloudflare.com/ajax/libs/highlight.js/8.3/languages/r.min.js";></script>
+<script>hljs.initHighlightingOnLoad();</script>
+</head><body>
+
+<table width="100%" summary="page for withWatermark 
{SparkR}"><tr><td>withWatermark {SparkR}</td><td style="text-align: right;">R 
Documentation</td></tr></table>
+
+<h2>withWatermark</h2>
+
+<h3>Description</h3>
+
+<p>Defines an event time watermark for this streaming SparkDataFrame. A 
watermark tracks a point in
+time before which we assume no more late data is going to arrive.
+</p>
+
+
+<h3>Usage</h3>
+
+<pre>
+withWatermark(x, eventTime, delayThreshold)
+
+## S4 method for signature 'SparkDataFrame,character,character'
+withWatermark(x, eventTime,
+  delayThreshold)
+</pre>
+
+
+<h3>Arguments</h3>
+
+<table summary="R argblock">
+<tr valign="top"><td><code>x</code></td>
+<td>
+<p>a streaming SparkDataFrame</p>
+</td></tr>
+<tr valign="top"><td><code>eventTime</code></td>
+<td>
+<p>a string specifying the name of the Column that contains the event time of 
the
+row.</p>
+</td></tr>
+<tr valign="top"><td><code>delayThreshold</code></td>
+<td>
+<p>a string specifying the minimum delay to wait to data to arrive late,
+relative to the latest record that has been processed in the form of an
+interval (e.g. &quot;1 minute&quot; or &quot;5 hours&quot;). NOTE: This should 
not be negative.</p>
+</td></tr>
+</table>
+
+
+<h3>Details</h3>
+
+<p>Spark will use this watermark for several purposes:
+</p>
+
+<ul>
+<li><p> To know when a given time window aggregation can be finalized and thus 
can be emitted
+when using output modes that do not allow updates.
+</p>
+</li>
+<li><p> To minimize the amount of state that we need to keep for on-going 
aggregations.
+</p>
+</li></ul>
+
+<p>The current watermark is computed by looking at the 
<code>MAX(eventTime)</code> seen across
+all of the partitions in the query minus a user specified 
<code>delayThreshold</code>. Due to the cost
+of coordinating this value across partitions, the actual watermark used is 
only guaranteed
+to be at least <code>delayThreshold</code> behind the actual event time.  In 
some cases we may still
+process records that arrive more than <code>delayThreshold</code> late.
+</p>
+
+
+<h3>Value</h3>
+
+<p>a SparkDataFrame.
+</p>
+
+
+<h3>Note</h3>
+
+<p>withWatermark since 2.3.0
+</p>
+
+
+<h3>See Also</h3>
+
+<p>Other SparkDataFrame functions: <code><a 
href="SparkDataFrame.html">SparkDataFrame-class</a></code>,
+<code><a href="summarize.html">agg</a></code>, <code><a 
href="alias.html">alias</a></code>,
+<code><a href="arrange.html">arrange</a></code>, <code><a 
href="as.data.frame.html">as.data.frame</a></code>,
+<code><a href="attach.html">attach,SparkDataFrame-method</a></code>,
+<code><a href="broadcast.html">broadcast</a></code>, <code><a 
href="cache.html">cache</a></code>,
+<code><a href="checkpoint.html">checkpoint</a></code>, <code><a 
href="coalesce.html">coalesce</a></code>,
+<code><a href="collect.html">collect</a></code>, <code><a 
href="columns.html">colnames</a></code>,
+<code><a href="coltypes.html">coltypes</a></code>,
+<code><a 
href="createOrReplaceTempView.html">createOrReplaceTempView</a></code>,
+<code><a href="crossJoin.html">crossJoin</a></code>, <code><a 
href="cube.html">cube</a></code>,
+<code><a href="dapplyCollect.html">dapplyCollect</a></code>, <code><a 
href="dapply.html">dapply</a></code>,
+<code><a href="describe.html">describe</a></code>, <code><a 
href="dim.html">dim</a></code>,
+<code><a href="distinct.html">distinct</a></code>, <code><a 
href="dropDuplicates.html">dropDuplicates</a></code>,
+<code><a href="nafunctions.html">dropna</a></code>, <code><a 
href="drop.html">drop</a></code>,
+<code><a href="dtypes.html">dtypes</a></code>, <code><a 
href="exceptAll.html">exceptAll</a></code>,
+<code><a href="except.html">except</a></code>, <code><a 
href="explain.html">explain</a></code>,
+<code><a href="filter.html">filter</a></code>, <code><a 
href="first.html">first</a></code>,
+<code><a href="gapplyCollect.html">gapplyCollect</a></code>, <code><a 
href="gapply.html">gapply</a></code>,
+<code><a href="getNumPartitions.html">getNumPartitions</a></code>, <code><a 
href="groupBy.html">group_by</a></code>,
+<code><a href="head.html">head</a></code>, <code><a 
href="hint.html">hint</a></code>,
+<code><a href="histogram.html">histogram</a></code>, <code><a 
href="insertInto.html">insertInto</a></code>,
+<code><a href="intersectAll.html">intersectAll</a></code>, <code><a 
href="intersect.html">intersect</a></code>,
+<code><a href="isLocal.html">isLocal</a></code>, <code><a 
href="isStreaming.html">isStreaming</a></code>,
+<code><a href="join.html">join</a></code>, <code><a 
href="limit.html">limit</a></code>,
+<code><a href="localCheckpoint.html">localCheckpoint</a></code>, <code><a 
href="merge.html">merge</a></code>,
+<code><a href="mutate.html">mutate</a></code>, <code><a 
href="ncol.html">ncol</a></code>,
+<code><a href="nrow.html">nrow</a></code>, <code><a 
href="persist.html">persist</a></code>,
+<code><a href="printSchema.html">printSchema</a></code>, <code><a 
href="randomSplit.html">randomSplit</a></code>,
+<code><a href="rbind.html">rbind</a></code>, <code><a 
href="rename.html">rename</a></code>,
+<code><a href="repartitionByRange.html">repartitionByRange</a></code>,
+<code><a href="repartition.html">repartition</a></code>, <code><a 
href="rollup.html">rollup</a></code>,
+<code><a href="sample.html">sample</a></code>, <code><a 
href="saveAsTable.html">saveAsTable</a></code>,
+<code><a href="schema.html">schema</a></code>, <code><a 
href="selectExpr.html">selectExpr</a></code>,
+<code><a href="select.html">select</a></code>, <code><a 
href="showDF.html">showDF</a></code>,
+<code><a href="show.html">show</a></code>, <code><a 
href="storageLevel.html">storageLevel</a></code>,
+<code><a href="str.html">str</a></code>, <code><a 
href="subset.html">subset</a></code>,
+<code><a href="summary.html">summary</a></code>, <code><a 
href="take.html">take</a></code>,
+<code><a href="toJSON.html">toJSON</a></code>, <code><a 
href="unionByName.html">unionByName</a></code>,
+<code><a href="union.html">union</a></code>, <code><a 
href="unpersist.html">unpersist</a></code>,
+<code><a href="withColumn.html">withColumn</a></code>, <code><a 
href="with.html">with</a></code>,
+<code><a href="write.df.html">write.df</a></code>, <code><a 
href="write.jdbc.html">write.jdbc</a></code>,
+<code><a href="write.json.html">write.json</a></code>, <code><a 
href="write.orc.html">write.orc</a></code>,
+<code><a href="write.parquet.html">write.parquet</a></code>, <code><a 
href="write.stream.html">write.stream</a></code>,
+<code><a href="write.text.html">write.text</a></code>
+</p>
+
+
+<h3>Examples</h3>
+
+<pre><code class="r">## Not run: 
+##D sparkR.session()
+##D schema &lt;- structType(structField(&quot;time&quot;, 
&quot;timestamp&quot;), structField(&quot;value&quot;, &quot;double&quot;))
+##D df &lt;- read.stream(&quot;json&quot;, path = jsonDir, schema = schema, 
maxFilesPerTrigger = 1)
+##D df &lt;- withWatermark(df, &quot;time&quot;, &quot;10 minutes&quot;)
+## End(Not run)
+</code></pre>
+
+
+<hr /><div style="text-align: center;">[Package <em>SparkR</em> version 2.4.0 
<a href="00Index.html">Index</a>]</div>
+</body></html>

http://git-wip-us.apache.org/repos/asf/spark-website/blob/52917ac4/site/docs/2.4.0/api/R/write.df.html
----------------------------------------------------------------------
diff --git a/site/docs/2.4.0/api/R/write.df.html 
b/site/docs/2.4.0/api/R/write.df.html
new file mode 100644
index 0000000..2320abc
--- /dev/null
+++ b/site/docs/2.4.0/api/R/write.df.html
@@ -0,0 +1,163 @@
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" 
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd";><html 
xmlns="http://www.w3.org/1999/xhtml";><head><title>R: Save the contents of 
SparkDataFrame to a data source.</title>
+<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+<link rel="stylesheet" type="text/css" href="R.css" />
+
+<link rel="stylesheet" 
href="https://cdnjs.cloudflare.com/ajax/libs/highlight.js/8.3/styles/github.min.css";>
+<script 
src="https://cdnjs.cloudflare.com/ajax/libs/highlight.js/8.3/highlight.min.js";></script>
+<script 
src="https://cdnjs.cloudflare.com/ajax/libs/highlight.js/8.3/languages/r.min.js";></script>
+<script>hljs.initHighlightingOnLoad();</script>
+</head><body>
+
+<table width="100%" summary="page for write.df {SparkR}"><tr><td>write.df 
{SparkR}</td><td style="text-align: right;">R Documentation</td></tr></table>
+
+<h2>Save the contents of SparkDataFrame to a data source.</h2>
+
+<h3>Description</h3>
+
+<p>The data source is specified by the <code>source</code> and a set of 
options (...).
+If <code>source</code> is not specified, the default data source configured by
+spark.sql.sources.default will be used.
+</p>
+
+
+<h3>Usage</h3>
+
+<pre>
+write.df(df, path = NULL, ...)
+
+saveDF(df, path, source = NULL, mode = "error", ...)
+
+write.df(df, path = NULL, ...)
+
+## S4 method for signature 'SparkDataFrame'
+write.df(df, path = NULL, source = NULL,
+  mode = "error", ...)
+
+## S4 method for signature 'SparkDataFrame,character'
+saveDF(df, path, source = NULL,
+  mode = "error", ...)
+</pre>
+
+
+<h3>Arguments</h3>
+
+<table summary="R argblock">
+<tr valign="top"><td><code>df</code></td>
+<td>
+<p>a SparkDataFrame.</p>
+</td></tr>
+<tr valign="top"><td><code>path</code></td>
+<td>
+<p>a name for the table.</p>
+</td></tr>
+<tr valign="top"><td><code>...</code></td>
+<td>
+<p>additional argument(s) passed to the method.</p>
+</td></tr>
+<tr valign="top"><td><code>source</code></td>
+<td>
+<p>a name for external data source.</p>
+</td></tr>
+<tr valign="top"><td><code>mode</code></td>
+<td>
+<p>one of 'append', 'overwrite', 'error', 'errorifexists', 'ignore'
+save mode (it is 'error' by default)</p>
+</td></tr>
+</table>
+
+
+<h3>Details</h3>
+
+<p>Additionally, mode is used to specify the behavior of the save operation 
when data already
+exists in the data source. There are four modes:
+</p>
+
+<ul>
+<li><p> 'append': Contents of this SparkDataFrame are expected to be appended 
to existing data.
+</p>
+</li>
+<li><p> 'overwrite': Existing data is expected to be overwritten by the 
contents of this
+SparkDataFrame.
+</p>
+</li>
+<li><p> 'error' or 'errorifexists': An exception is expected to be thrown.
+</p>
+</li>
+<li><p> 'ignore': The save operation is expected to not save the contents of 
the SparkDataFrame
+and to not change the existing data.
+</p>
+</li></ul>
+
+
+
+<h3>Note</h3>
+
+<p>write.df since 1.4.0
+</p>
+<p>saveDF since 1.4.0
+</p>
+
+
+<h3>See Also</h3>
+
+<p>Other SparkDataFrame functions: <code><a 
href="SparkDataFrame.html">SparkDataFrame-class</a></code>,
+<code><a href="summarize.html">agg</a></code>, <code><a 
href="alias.html">alias</a></code>,
+<code><a href="arrange.html">arrange</a></code>, <code><a 
href="as.data.frame.html">as.data.frame</a></code>,
+<code><a href="attach.html">attach,SparkDataFrame-method</a></code>,
+<code><a href="broadcast.html">broadcast</a></code>, <code><a 
href="cache.html">cache</a></code>,
+<code><a href="checkpoint.html">checkpoint</a></code>, <code><a 
href="coalesce.html">coalesce</a></code>,
+<code><a href="collect.html">collect</a></code>, <code><a 
href="columns.html">colnames</a></code>,
+<code><a href="coltypes.html">coltypes</a></code>,
+<code><a 
href="createOrReplaceTempView.html">createOrReplaceTempView</a></code>,
+<code><a href="crossJoin.html">crossJoin</a></code>, <code><a 
href="cube.html">cube</a></code>,
+<code><a href="dapplyCollect.html">dapplyCollect</a></code>, <code><a 
href="dapply.html">dapply</a></code>,
+<code><a href="describe.html">describe</a></code>, <code><a 
href="dim.html">dim</a></code>,
+<code><a href="distinct.html">distinct</a></code>, <code><a 
href="dropDuplicates.html">dropDuplicates</a></code>,
+<code><a href="nafunctions.html">dropna</a></code>, <code><a 
href="drop.html">drop</a></code>,
+<code><a href="dtypes.html">dtypes</a></code>, <code><a 
href="exceptAll.html">exceptAll</a></code>,
+<code><a href="except.html">except</a></code>, <code><a 
href="explain.html">explain</a></code>,
+<code><a href="filter.html">filter</a></code>, <code><a 
href="first.html">first</a></code>,
+<code><a href="gapplyCollect.html">gapplyCollect</a></code>, <code><a 
href="gapply.html">gapply</a></code>,
+<code><a href="getNumPartitions.html">getNumPartitions</a></code>, <code><a 
href="groupBy.html">group_by</a></code>,
+<code><a href="head.html">head</a></code>, <code><a 
href="hint.html">hint</a></code>,
+<code><a href="histogram.html">histogram</a></code>, <code><a 
href="insertInto.html">insertInto</a></code>,
+<code><a href="intersectAll.html">intersectAll</a></code>, <code><a 
href="intersect.html">intersect</a></code>,
+<code><a href="isLocal.html">isLocal</a></code>, <code><a 
href="isStreaming.html">isStreaming</a></code>,
+<code><a href="join.html">join</a></code>, <code><a 
href="limit.html">limit</a></code>,
+<code><a href="localCheckpoint.html">localCheckpoint</a></code>, <code><a 
href="merge.html">merge</a></code>,
+<code><a href="mutate.html">mutate</a></code>, <code><a 
href="ncol.html">ncol</a></code>,
+<code><a href="nrow.html">nrow</a></code>, <code><a 
href="persist.html">persist</a></code>,
+<code><a href="printSchema.html">printSchema</a></code>, <code><a 
href="randomSplit.html">randomSplit</a></code>,
+<code><a href="rbind.html">rbind</a></code>, <code><a 
href="rename.html">rename</a></code>,
+<code><a href="repartitionByRange.html">repartitionByRange</a></code>,
+<code><a href="repartition.html">repartition</a></code>, <code><a 
href="rollup.html">rollup</a></code>,
+<code><a href="sample.html">sample</a></code>, <code><a 
href="saveAsTable.html">saveAsTable</a></code>,
+<code><a href="schema.html">schema</a></code>, <code><a 
href="selectExpr.html">selectExpr</a></code>,
+<code><a href="select.html">select</a></code>, <code><a 
href="showDF.html">showDF</a></code>,
+<code><a href="show.html">show</a></code>, <code><a 
href="storageLevel.html">storageLevel</a></code>,
+<code><a href="str.html">str</a></code>, <code><a 
href="subset.html">subset</a></code>,
+<code><a href="summary.html">summary</a></code>, <code><a 
href="take.html">take</a></code>,
+<code><a href="toJSON.html">toJSON</a></code>, <code><a 
href="unionByName.html">unionByName</a></code>,
+<code><a href="union.html">union</a></code>, <code><a 
href="unpersist.html">unpersist</a></code>,
+<code><a href="withColumn.html">withColumn</a></code>, <code><a 
href="withWatermark.html">withWatermark</a></code>,
+<code><a href="with.html">with</a></code>, <code><a 
href="write.jdbc.html">write.jdbc</a></code>,
+<code><a href="write.json.html">write.json</a></code>, <code><a 
href="write.orc.html">write.orc</a></code>,
+<code><a href="write.parquet.html">write.parquet</a></code>, <code><a 
href="write.stream.html">write.stream</a></code>,
+<code><a href="write.text.html">write.text</a></code>
+</p>
+
+
+<h3>Examples</h3>
+
+<pre><code class="r">## Not run: 
+##D sparkR.session()
+##D path &lt;- &quot;path/to/file.json&quot;
+##D df &lt;- read.json(path)
+##D write.df(df, &quot;myfile&quot;, &quot;parquet&quot;, 
&quot;overwrite&quot;)
+##D saveDF(df, parquetPath2, &quot;parquet&quot;, mode = &quot;append&quot;, 
mergeSchema = TRUE)
+## End(Not run)
+</code></pre>
+
+
+<hr /><div style="text-align: center;">[Package <em>SparkR</em> version 2.4.0 
<a href="00Index.html">Index</a>]</div>
+</body></html>

http://git-wip-us.apache.org/repos/asf/spark-website/blob/52917ac4/site/docs/2.4.0/api/R/write.jdbc.html
----------------------------------------------------------------------
diff --git a/site/docs/2.4.0/api/R/write.jdbc.html 
b/site/docs/2.4.0/api/R/write.jdbc.html
new file mode 100644
index 0000000..22ff2b8
--- /dev/null
+++ b/site/docs/2.4.0/api/R/write.jdbc.html
@@ -0,0 +1,150 @@
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" 
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd";><html 
xmlns="http://www.w3.org/1999/xhtml";><head><title>R: Save the content of 
SparkDataFrame to an external database...</title>
+<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+<link rel="stylesheet" type="text/css" href="R.css" />
+
+<link rel="stylesheet" 
href="https://cdnjs.cloudflare.com/ajax/libs/highlight.js/8.3/styles/github.min.css";>
+<script 
src="https://cdnjs.cloudflare.com/ajax/libs/highlight.js/8.3/highlight.min.js";></script>
+<script 
src="https://cdnjs.cloudflare.com/ajax/libs/highlight.js/8.3/languages/r.min.js";></script>
+<script>hljs.initHighlightingOnLoad();</script>
+</head><body>
+
+<table width="100%" summary="page for write.jdbc {SparkR}"><tr><td>write.jdbc 
{SparkR}</td><td style="text-align: right;">R Documentation</td></tr></table>
+
+<h2>Save the content of SparkDataFrame to an external database table via 
JDBC.</h2>
+
+<h3>Description</h3>
+
+<p>Save the content of the SparkDataFrame to an external database table via 
JDBC. Additional JDBC
+database connection properties can be set (...)
+</p>
+
+
+<h3>Usage</h3>
+
+<pre>
+write.jdbc(x, url, tableName, mode = "error", ...)
+
+## S4 method for signature 'SparkDataFrame,character,character'
+write.jdbc(x, url,
+  tableName, mode = "error", ...)
+</pre>
+
+
+<h3>Arguments</h3>
+
+<table summary="R argblock">
+<tr valign="top"><td><code>x</code></td>
+<td>
+<p>a SparkDataFrame.</p>
+</td></tr>
+<tr valign="top"><td><code>url</code></td>
+<td>
+<p>JDBC database url of the form <code>jdbc:subprotocol:subname</code>.</p>
+</td></tr>
+<tr valign="top"><td><code>tableName</code></td>
+<td>
+<p>yhe name of the table in the external database.</p>
+</td></tr>
+<tr valign="top"><td><code>mode</code></td>
+<td>
+<p>one of 'append', 'overwrite', 'error', 'errorifexists', 'ignore'
+save mode (it is 'error' by default)</p>
+</td></tr>
+<tr valign="top"><td><code>...</code></td>
+<td>
+<p>additional JDBC database connection properties.</p>
+</td></tr>
+</table>
+
+
+<h3>Details</h3>
+
+<p>Also, mode is used to specify the behavior of the save operation when
+data already exists in the data source. There are four modes:
+</p>
+
+<ul>
+<li><p> 'append': Contents of this SparkDataFrame are expected to be appended 
to existing data.
+</p>
+</li>
+<li><p> 'overwrite': Existing data is expected to be overwritten by the 
contents of this
+SparkDataFrame.
+</p>
+</li>
+<li><p> 'error' or 'errorifexists': An exception is expected to be thrown.
+</p>
+</li>
+<li><p> 'ignore': The save operation is expected to not save the contents of 
the SparkDataFrame
+and to not change the existing data.
+</p>
+</li></ul>
+
+
+
+<h3>Note</h3>
+
+<p>write.jdbc since 2.0.0
+</p>
+
+
+<h3>See Also</h3>
+
+<p>Other SparkDataFrame functions: <code><a 
href="SparkDataFrame.html">SparkDataFrame-class</a></code>,
+<code><a href="summarize.html">agg</a></code>, <code><a 
href="alias.html">alias</a></code>,
+<code><a href="arrange.html">arrange</a></code>, <code><a 
href="as.data.frame.html">as.data.frame</a></code>,
+<code><a href="attach.html">attach,SparkDataFrame-method</a></code>,
+<code><a href="broadcast.html">broadcast</a></code>, <code><a 
href="cache.html">cache</a></code>,
+<code><a href="checkpoint.html">checkpoint</a></code>, <code><a 
href="coalesce.html">coalesce</a></code>,
+<code><a href="collect.html">collect</a></code>, <code><a 
href="columns.html">colnames</a></code>,
+<code><a href="coltypes.html">coltypes</a></code>,
+<code><a 
href="createOrReplaceTempView.html">createOrReplaceTempView</a></code>,
+<code><a href="crossJoin.html">crossJoin</a></code>, <code><a 
href="cube.html">cube</a></code>,
+<code><a href="dapplyCollect.html">dapplyCollect</a></code>, <code><a 
href="dapply.html">dapply</a></code>,
+<code><a href="describe.html">describe</a></code>, <code><a 
href="dim.html">dim</a></code>,
+<code><a href="distinct.html">distinct</a></code>, <code><a 
href="dropDuplicates.html">dropDuplicates</a></code>,
+<code><a href="nafunctions.html">dropna</a></code>, <code><a 
href="drop.html">drop</a></code>,
+<code><a href="dtypes.html">dtypes</a></code>, <code><a 
href="exceptAll.html">exceptAll</a></code>,
+<code><a href="except.html">except</a></code>, <code><a 
href="explain.html">explain</a></code>,
+<code><a href="filter.html">filter</a></code>, <code><a 
href="first.html">first</a></code>,
+<code><a href="gapplyCollect.html">gapplyCollect</a></code>, <code><a 
href="gapply.html">gapply</a></code>,
+<code><a href="getNumPartitions.html">getNumPartitions</a></code>, <code><a 
href="groupBy.html">group_by</a></code>,
+<code><a href="head.html">head</a></code>, <code><a 
href="hint.html">hint</a></code>,
+<code><a href="histogram.html">histogram</a></code>, <code><a 
href="insertInto.html">insertInto</a></code>,
+<code><a href="intersectAll.html">intersectAll</a></code>, <code><a 
href="intersect.html">intersect</a></code>,
+<code><a href="isLocal.html">isLocal</a></code>, <code><a 
href="isStreaming.html">isStreaming</a></code>,
+<code><a href="join.html">join</a></code>, <code><a 
href="limit.html">limit</a></code>,
+<code><a href="localCheckpoint.html">localCheckpoint</a></code>, <code><a 
href="merge.html">merge</a></code>,
+<code><a href="mutate.html">mutate</a></code>, <code><a 
href="ncol.html">ncol</a></code>,
+<code><a href="nrow.html">nrow</a></code>, <code><a 
href="persist.html">persist</a></code>,
+<code><a href="printSchema.html">printSchema</a></code>, <code><a 
href="randomSplit.html">randomSplit</a></code>,
+<code><a href="rbind.html">rbind</a></code>, <code><a 
href="rename.html">rename</a></code>,
+<code><a href="repartitionByRange.html">repartitionByRange</a></code>,
+<code><a href="repartition.html">repartition</a></code>, <code><a 
href="rollup.html">rollup</a></code>,
+<code><a href="sample.html">sample</a></code>, <code><a 
href="saveAsTable.html">saveAsTable</a></code>,
+<code><a href="schema.html">schema</a></code>, <code><a 
href="selectExpr.html">selectExpr</a></code>,
+<code><a href="select.html">select</a></code>, <code><a 
href="showDF.html">showDF</a></code>,
+<code><a href="show.html">show</a></code>, <code><a 
href="storageLevel.html">storageLevel</a></code>,
+<code><a href="str.html">str</a></code>, <code><a 
href="subset.html">subset</a></code>,
+<code><a href="summary.html">summary</a></code>, <code><a 
href="take.html">take</a></code>,
+<code><a href="toJSON.html">toJSON</a></code>, <code><a 
href="unionByName.html">unionByName</a></code>,
+<code><a href="union.html">union</a></code>, <code><a 
href="unpersist.html">unpersist</a></code>,
+<code><a href="withColumn.html">withColumn</a></code>, <code><a 
href="withWatermark.html">withWatermark</a></code>,
+<code><a href="with.html">with</a></code>, <code><a 
href="write.df.html">write.df</a></code>,
+<code><a href="write.json.html">write.json</a></code>, <code><a 
href="write.orc.html">write.orc</a></code>,
+<code><a href="write.parquet.html">write.parquet</a></code>, <code><a 
href="write.stream.html">write.stream</a></code>,
+<code><a href="write.text.html">write.text</a></code>
+</p>
+
+
+<h3>Examples</h3>
+
+<pre><code class="r">## Not run: 
+##D sparkR.session()
+##D jdbcUrl &lt;- &quot;jdbc:mysql://localhost:3306/databasename&quot;
+##D write.jdbc(df, jdbcUrl, &quot;table&quot;, user = &quot;username&quot;, 
password = &quot;password&quot;)
+## End(Not run)
+</code></pre>
+
+
+<hr /><div style="text-align: center;">[Package <em>SparkR</em> version 2.4.0 
<a href="00Index.html">Index</a>]</div>
+</body></html>

http://git-wip-us.apache.org/repos/asf/spark-website/blob/52917ac4/site/docs/2.4.0/api/R/write.json.html
----------------------------------------------------------------------
diff --git a/site/docs/2.4.0/api/R/write.json.html 
b/site/docs/2.4.0/api/R/write.json.html
new file mode 100644
index 0000000..f4ac4b6
--- /dev/null
+++ b/site/docs/2.4.0/api/R/write.json.html
@@ -0,0 +1,124 @@
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" 
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd";><html 
xmlns="http://www.w3.org/1999/xhtml";><head><title>R: Save the contents of 
SparkDataFrame as a JSON file</title>
+<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+<link rel="stylesheet" type="text/css" href="R.css" />
+
+<link rel="stylesheet" 
href="https://cdnjs.cloudflare.com/ajax/libs/highlight.js/8.3/styles/github.min.css";>
+<script 
src="https://cdnjs.cloudflare.com/ajax/libs/highlight.js/8.3/highlight.min.js";></script>
+<script 
src="https://cdnjs.cloudflare.com/ajax/libs/highlight.js/8.3/languages/r.min.js";></script>
+<script>hljs.initHighlightingOnLoad();</script>
+</head><body>
+
+<table width="100%" summary="page for write.json {SparkR}"><tr><td>write.json 
{SparkR}</td><td style="text-align: right;">R Documentation</td></tr></table>
+
+<h2>Save the contents of SparkDataFrame as a JSON file</h2>
+
+<h3>Description</h3>
+
+<p>Save the contents of a SparkDataFrame as a JSON file (<a 
href="http://jsonlines.org/";>
+JSON Lines text format or newline-delimited JSON</a>). Files written out
+with this method can be read back in as a SparkDataFrame using read.json().
+</p>
+
+
+<h3>Usage</h3>
+
+<pre>
+write.json(x, path, ...)
+
+## S4 method for signature 'SparkDataFrame,character'
+write.json(x, path, mode = "error",
+  ...)
+</pre>
+
+
+<h3>Arguments</h3>
+
+<table summary="R argblock">
+<tr valign="top"><td><code>x</code></td>
+<td>
+<p>A SparkDataFrame</p>
+</td></tr>
+<tr valign="top"><td><code>path</code></td>
+<td>
+<p>The directory where the file is saved</p>
+</td></tr>
+<tr valign="top"><td><code>...</code></td>
+<td>
+<p>additional argument(s) passed to the method.</p>
+</td></tr>
+<tr valign="top"><td><code>mode</code></td>
+<td>
+<p>one of 'append', 'overwrite', 'error', 'errorifexists', 'ignore'
+save mode (it is 'error' by default)</p>
+</td></tr>
+</table>
+
+
+<h3>Note</h3>
+
+<p>write.json since 1.6.0
+</p>
+
+
+<h3>See Also</h3>
+
+<p>Other SparkDataFrame functions: <code><a 
href="SparkDataFrame.html">SparkDataFrame-class</a></code>,
+<code><a href="summarize.html">agg</a></code>, <code><a 
href="alias.html">alias</a></code>,
+<code><a href="arrange.html">arrange</a></code>, <code><a 
href="as.data.frame.html">as.data.frame</a></code>,
+<code><a href="attach.html">attach,SparkDataFrame-method</a></code>,
+<code><a href="broadcast.html">broadcast</a></code>, <code><a 
href="cache.html">cache</a></code>,
+<code><a href="checkpoint.html">checkpoint</a></code>, <code><a 
href="coalesce.html">coalesce</a></code>,
+<code><a href="collect.html">collect</a></code>, <code><a 
href="columns.html">colnames</a></code>,
+<code><a href="coltypes.html">coltypes</a></code>,
+<code><a 
href="createOrReplaceTempView.html">createOrReplaceTempView</a></code>,
+<code><a href="crossJoin.html">crossJoin</a></code>, <code><a 
href="cube.html">cube</a></code>,
+<code><a href="dapplyCollect.html">dapplyCollect</a></code>, <code><a 
href="dapply.html">dapply</a></code>,
+<code><a href="describe.html">describe</a></code>, <code><a 
href="dim.html">dim</a></code>,
+<code><a href="distinct.html">distinct</a></code>, <code><a 
href="dropDuplicates.html">dropDuplicates</a></code>,
+<code><a href="nafunctions.html">dropna</a></code>, <code><a 
href="drop.html">drop</a></code>,
+<code><a href="dtypes.html">dtypes</a></code>, <code><a 
href="exceptAll.html">exceptAll</a></code>,
+<code><a href="except.html">except</a></code>, <code><a 
href="explain.html">explain</a></code>,
+<code><a href="filter.html">filter</a></code>, <code><a 
href="first.html">first</a></code>,
+<code><a href="gapplyCollect.html">gapplyCollect</a></code>, <code><a 
href="gapply.html">gapply</a></code>,
+<code><a href="getNumPartitions.html">getNumPartitions</a></code>, <code><a 
href="groupBy.html">group_by</a></code>,
+<code><a href="head.html">head</a></code>, <code><a 
href="hint.html">hint</a></code>,
+<code><a href="histogram.html">histogram</a></code>, <code><a 
href="insertInto.html">insertInto</a></code>,
+<code><a href="intersectAll.html">intersectAll</a></code>, <code><a 
href="intersect.html">intersect</a></code>,
+<code><a href="isLocal.html">isLocal</a></code>, <code><a 
href="isStreaming.html">isStreaming</a></code>,
+<code><a href="join.html">join</a></code>, <code><a 
href="limit.html">limit</a></code>,
+<code><a href="localCheckpoint.html">localCheckpoint</a></code>, <code><a 
href="merge.html">merge</a></code>,
+<code><a href="mutate.html">mutate</a></code>, <code><a 
href="ncol.html">ncol</a></code>,
+<code><a href="nrow.html">nrow</a></code>, <code><a 
href="persist.html">persist</a></code>,
+<code><a href="printSchema.html">printSchema</a></code>, <code><a 
href="randomSplit.html">randomSplit</a></code>,
+<code><a href="rbind.html">rbind</a></code>, <code><a 
href="rename.html">rename</a></code>,
+<code><a href="repartitionByRange.html">repartitionByRange</a></code>,
+<code><a href="repartition.html">repartition</a></code>, <code><a 
href="rollup.html">rollup</a></code>,
+<code><a href="sample.html">sample</a></code>, <code><a 
href="saveAsTable.html">saveAsTable</a></code>,
+<code><a href="schema.html">schema</a></code>, <code><a 
href="selectExpr.html">selectExpr</a></code>,
+<code><a href="select.html">select</a></code>, <code><a 
href="showDF.html">showDF</a></code>,
+<code><a href="show.html">show</a></code>, <code><a 
href="storageLevel.html">storageLevel</a></code>,
+<code><a href="str.html">str</a></code>, <code><a 
href="subset.html">subset</a></code>,
+<code><a href="summary.html">summary</a></code>, <code><a 
href="take.html">take</a></code>,
+<code><a href="toJSON.html">toJSON</a></code>, <code><a 
href="unionByName.html">unionByName</a></code>,
+<code><a href="union.html">union</a></code>, <code><a 
href="unpersist.html">unpersist</a></code>,
+<code><a href="withColumn.html">withColumn</a></code>, <code><a 
href="withWatermark.html">withWatermark</a></code>,
+<code><a href="with.html">with</a></code>, <code><a 
href="write.df.html">write.df</a></code>,
+<code><a href="write.jdbc.html">write.jdbc</a></code>, <code><a 
href="write.orc.html">write.orc</a></code>,
+<code><a href="write.parquet.html">write.parquet</a></code>, <code><a 
href="write.stream.html">write.stream</a></code>,
+<code><a href="write.text.html">write.text</a></code>
+</p>
+
+
+<h3>Examples</h3>
+
+<pre><code class="r">## Not run: 
+##D sparkR.session()
+##D path &lt;- &quot;path/to/file.json&quot;
+##D df &lt;- read.json(path)
+##D write.json(df, &quot;/tmp/sparkr-tmp/&quot;)
+## End(Not run)
+</code></pre>
+
+
+<hr /><div style="text-align: center;">[Package <em>SparkR</em> version 2.4.0 
<a href="00Index.html">Index</a>]</div>
+</body></html>

http://git-wip-us.apache.org/repos/asf/spark-website/blob/52917ac4/site/docs/2.4.0/api/R/write.ml.html
----------------------------------------------------------------------
diff --git a/site/docs/2.4.0/api/R/write.ml.html 
b/site/docs/2.4.0/api/R/write.ml.html
new file mode 100644
index 0000000..44695c9
--- /dev/null
+++ b/site/docs/2.4.0/api/R/write.ml.html
@@ -0,0 +1,62 @@
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" 
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd";><html 
xmlns="http://www.w3.org/1999/xhtml";><head><title>R: Saves the MLlib model to 
the input path</title>
+<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+<link rel="stylesheet" type="text/css" href="R.css" />
+</head><body>
+
+<table width="100%" summary="page for write.ml {SparkR}"><tr><td>write.ml 
{SparkR}</td><td style="text-align: right;">R Documentation</td></tr></table>
+
+<h2>Saves the MLlib model to the input path</h2>
+
+<h3>Description</h3>
+
+<p>Saves the MLlib model to the input path. For more information, see the 
specific
+MLlib model below.
+</p>
+
+
+<h3>Usage</h3>
+
+<pre>
+write.ml(object, path, ...)
+</pre>
+
+
+<h3>Arguments</h3>
+
+<table summary="R argblock">
+<tr valign="top"><td><code>object</code></td>
+<td>
+<p>a fitted ML model object.</p>
+</td></tr>
+<tr valign="top"><td><code>path</code></td>
+<td>
+<p>the directory where the model is saved.</p>
+</td></tr>
+<tr valign="top"><td><code>...</code></td>
+<td>
+<p>additional argument(s) passed to the method.</p>
+</td></tr>
+</table>
+
+
+<h3>See Also</h3>
+
+<p><a href="spark.als.html">spark.als</a>, <a 
href="spark.bisectingKmeans.html">spark.bisectingKmeans</a>, <a 
href="spark.decisionTree.html">spark.decisionTree</a>,
+</p>
+<p><a href="spark.gaussianMixture.html">spark.gaussianMixture</a>, <a 
href="spark.gbt.html">spark.gbt</a>,
+</p>
+<p><a href="spark.glm.html">spark.glm</a>, <a href="glm.html">glm</a>, <a 
href="spark.isoreg.html">spark.isoreg</a>,
+</p>
+<p><a href="spark.kmeans.html">spark.kmeans</a>,
+</p>
+<p><a href="spark.lda.html">spark.lda</a>, <a 
href="spark.logit.html">spark.logit</a>,
+</p>
+<p><a href="spark.mlp.html">spark.mlp</a>, <a 
href="spark.naiveBayes.html">spark.naiveBayes</a>,
+</p>
+<p><a href="spark.randomForest.html">spark.randomForest</a>, <a 
href="spark.survreg.html">spark.survreg</a>, <a 
href="spark.svmLinear.html">spark.svmLinear</a>,
+</p>
+<p><a href="read.ml.html">read.ml</a>
+</p>
+
+<hr /><div style="text-align: center;">[Package <em>SparkR</em> version 2.4.0 
<a href="00Index.html">Index</a>]</div>
+</body></html>

http://git-wip-us.apache.org/repos/asf/spark-website/blob/52917ac4/site/docs/2.4.0/api/R/write.orc.html
----------------------------------------------------------------------
diff --git a/site/docs/2.4.0/api/R/write.orc.html 
b/site/docs/2.4.0/api/R/write.orc.html
new file mode 100644
index 0000000..259cacc
--- /dev/null
+++ b/site/docs/2.4.0/api/R/write.orc.html
@@ -0,0 +1,123 @@
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" 
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd";><html 
xmlns="http://www.w3.org/1999/xhtml";><head><title>R: Save the contents of 
SparkDataFrame as an ORC file,...</title>
+<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+<link rel="stylesheet" type="text/css" href="R.css" />
+
+<link rel="stylesheet" 
href="https://cdnjs.cloudflare.com/ajax/libs/highlight.js/8.3/styles/github.min.css";>
+<script 
src="https://cdnjs.cloudflare.com/ajax/libs/highlight.js/8.3/highlight.min.js";></script>
+<script 
src="https://cdnjs.cloudflare.com/ajax/libs/highlight.js/8.3/languages/r.min.js";></script>
+<script>hljs.initHighlightingOnLoad();</script>
+</head><body>
+
+<table width="100%" summary="page for write.orc {SparkR}"><tr><td>write.orc 
{SparkR}</td><td style="text-align: right;">R Documentation</td></tr></table>
+
+<h2>Save the contents of SparkDataFrame as an ORC file, preserving the 
schema.</h2>
+
+<h3>Description</h3>
+
+<p>Save the contents of a SparkDataFrame as an ORC file, preserving the 
schema. Files written out
+with this method can be read back in as a SparkDataFrame using read.orc().
+</p>
+
+
+<h3>Usage</h3>
+
+<pre>
+write.orc(x, path, ...)
+
+## S4 method for signature 'SparkDataFrame,character'
+write.orc(x, path, mode = "error",
+  ...)
+</pre>
+
+
+<h3>Arguments</h3>
+
+<table summary="R argblock">
+<tr valign="top"><td><code>x</code></td>
+<td>
+<p>A SparkDataFrame</p>
+</td></tr>
+<tr valign="top"><td><code>path</code></td>
+<td>
+<p>The directory where the file is saved</p>
+</td></tr>
+<tr valign="top"><td><code>...</code></td>
+<td>
+<p>additional argument(s) passed to the method.</p>
+</td></tr>
+<tr valign="top"><td><code>mode</code></td>
+<td>
+<p>one of 'append', 'overwrite', 'error', 'errorifexists', 'ignore'
+save mode (it is 'error' by default)</p>
+</td></tr>
+</table>
+
+
+<h3>Note</h3>
+
+<p>write.orc since 2.0.0
+</p>
+
+
+<h3>See Also</h3>
+
+<p>Other SparkDataFrame functions: <code><a 
href="SparkDataFrame.html">SparkDataFrame-class</a></code>,
+<code><a href="summarize.html">agg</a></code>, <code><a 
href="alias.html">alias</a></code>,
+<code><a href="arrange.html">arrange</a></code>, <code><a 
href="as.data.frame.html">as.data.frame</a></code>,
+<code><a href="attach.html">attach,SparkDataFrame-method</a></code>,
+<code><a href="broadcast.html">broadcast</a></code>, <code><a 
href="cache.html">cache</a></code>,
+<code><a href="checkpoint.html">checkpoint</a></code>, <code><a 
href="coalesce.html">coalesce</a></code>,
+<code><a href="collect.html">collect</a></code>, <code><a 
href="columns.html">colnames</a></code>,
+<code><a href="coltypes.html">coltypes</a></code>,
+<code><a 
href="createOrReplaceTempView.html">createOrReplaceTempView</a></code>,
+<code><a href="crossJoin.html">crossJoin</a></code>, <code><a 
href="cube.html">cube</a></code>,
+<code><a href="dapplyCollect.html">dapplyCollect</a></code>, <code><a 
href="dapply.html">dapply</a></code>,
+<code><a href="describe.html">describe</a></code>, <code><a 
href="dim.html">dim</a></code>,
+<code><a href="distinct.html">distinct</a></code>, <code><a 
href="dropDuplicates.html">dropDuplicates</a></code>,
+<code><a href="nafunctions.html">dropna</a></code>, <code><a 
href="drop.html">drop</a></code>,
+<code><a href="dtypes.html">dtypes</a></code>, <code><a 
href="exceptAll.html">exceptAll</a></code>,
+<code><a href="except.html">except</a></code>, <code><a 
href="explain.html">explain</a></code>,
+<code><a href="filter.html">filter</a></code>, <code><a 
href="first.html">first</a></code>,
+<code><a href="gapplyCollect.html">gapplyCollect</a></code>, <code><a 
href="gapply.html">gapply</a></code>,
+<code><a href="getNumPartitions.html">getNumPartitions</a></code>, <code><a 
href="groupBy.html">group_by</a></code>,
+<code><a href="head.html">head</a></code>, <code><a 
href="hint.html">hint</a></code>,
+<code><a href="histogram.html">histogram</a></code>, <code><a 
href="insertInto.html">insertInto</a></code>,
+<code><a href="intersectAll.html">intersectAll</a></code>, <code><a 
href="intersect.html">intersect</a></code>,
+<code><a href="isLocal.html">isLocal</a></code>, <code><a 
href="isStreaming.html">isStreaming</a></code>,
+<code><a href="join.html">join</a></code>, <code><a 
href="limit.html">limit</a></code>,
+<code><a href="localCheckpoint.html">localCheckpoint</a></code>, <code><a 
href="merge.html">merge</a></code>,
+<code><a href="mutate.html">mutate</a></code>, <code><a 
href="ncol.html">ncol</a></code>,
+<code><a href="nrow.html">nrow</a></code>, <code><a 
href="persist.html">persist</a></code>,
+<code><a href="printSchema.html">printSchema</a></code>, <code><a 
href="randomSplit.html">randomSplit</a></code>,
+<code><a href="rbind.html">rbind</a></code>, <code><a 
href="rename.html">rename</a></code>,
+<code><a href="repartitionByRange.html">repartitionByRange</a></code>,
+<code><a href="repartition.html">repartition</a></code>, <code><a 
href="rollup.html">rollup</a></code>,
+<code><a href="sample.html">sample</a></code>, <code><a 
href="saveAsTable.html">saveAsTable</a></code>,
+<code><a href="schema.html">schema</a></code>, <code><a 
href="selectExpr.html">selectExpr</a></code>,
+<code><a href="select.html">select</a></code>, <code><a 
href="showDF.html">showDF</a></code>,
+<code><a href="show.html">show</a></code>, <code><a 
href="storageLevel.html">storageLevel</a></code>,
+<code><a href="str.html">str</a></code>, <code><a 
href="subset.html">subset</a></code>,
+<code><a href="summary.html">summary</a></code>, <code><a 
href="take.html">take</a></code>,
+<code><a href="toJSON.html">toJSON</a></code>, <code><a 
href="unionByName.html">unionByName</a></code>,
+<code><a href="union.html">union</a></code>, <code><a 
href="unpersist.html">unpersist</a></code>,
+<code><a href="withColumn.html">withColumn</a></code>, <code><a 
href="withWatermark.html">withWatermark</a></code>,
+<code><a href="with.html">with</a></code>, <code><a 
href="write.df.html">write.df</a></code>,
+<code><a href="write.jdbc.html">write.jdbc</a></code>, <code><a 
href="write.json.html">write.json</a></code>,
+<code><a href="write.parquet.html">write.parquet</a></code>, <code><a 
href="write.stream.html">write.stream</a></code>,
+<code><a href="write.text.html">write.text</a></code>
+</p>
+
+
+<h3>Examples</h3>
+
+<pre><code class="r">## Not run: 
+##D sparkR.session()
+##D path &lt;- &quot;path/to/file.json&quot;
+##D df &lt;- read.json(path)
+##D write.orc(df, &quot;/tmp/sparkr-tmp1/&quot;)
+## End(Not run)
+</code></pre>
+
+
+<hr /><div style="text-align: center;">[Package <em>SparkR</em> version 2.4.0 
<a href="00Index.html">Index</a>]</div>
+</body></html>

http://git-wip-us.apache.org/repos/asf/spark-website/blob/52917ac4/site/docs/2.4.0/api/R/write.parquet.html
----------------------------------------------------------------------
diff --git a/site/docs/2.4.0/api/R/write.parquet.html 
b/site/docs/2.4.0/api/R/write.parquet.html
new file mode 100644
index 0000000..f3fc770
--- /dev/null
+++ b/site/docs/2.4.0/api/R/write.parquet.html
@@ -0,0 +1,131 @@
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" 
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd";><html 
xmlns="http://www.w3.org/1999/xhtml";><head><title>R: Save the contents of 
SparkDataFrame as a Parquet file,...</title>
+<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+<link rel="stylesheet" type="text/css" href="R.css" />
+
+<link rel="stylesheet" 
href="https://cdnjs.cloudflare.com/ajax/libs/highlight.js/8.3/styles/github.min.css";>
+<script 
src="https://cdnjs.cloudflare.com/ajax/libs/highlight.js/8.3/highlight.min.js";></script>
+<script 
src="https://cdnjs.cloudflare.com/ajax/libs/highlight.js/8.3/languages/r.min.js";></script>
+<script>hljs.initHighlightingOnLoad();</script>
+</head><body>
+
+<table width="100%" summary="page for write.parquet 
{SparkR}"><tr><td>write.parquet {SparkR}</td><td style="text-align: right;">R 
Documentation</td></tr></table>
+
+<h2>Save the contents of SparkDataFrame as a Parquet file, preserving the 
schema.</h2>
+
+<h3>Description</h3>
+
+<p>Save the contents of a SparkDataFrame as a Parquet file, preserving the 
schema. Files written out
+with this method can be read back in as a SparkDataFrame using read.parquet().
+</p>
+
+
+<h3>Usage</h3>
+
+<pre>
+write.parquet(x, path, ...)
+
+saveAsParquetFile(x, path)
+
+## S4 method for signature 'SparkDataFrame,character'
+write.parquet(x, path,
+  mode = "error", ...)
+
+## S4 method for signature 'SparkDataFrame,character'
+saveAsParquetFile(x, path)
+</pre>
+
+
+<h3>Arguments</h3>
+
+<table summary="R argblock">
+<tr valign="top"><td><code>x</code></td>
+<td>
+<p>A SparkDataFrame</p>
+</td></tr>
+<tr valign="top"><td><code>path</code></td>
+<td>
+<p>The directory where the file is saved</p>
+</td></tr>
+<tr valign="top"><td><code>...</code></td>
+<td>
+<p>additional argument(s) passed to the method.</p>
+</td></tr>
+<tr valign="top"><td><code>mode</code></td>
+<td>
+<p>one of 'append', 'overwrite', 'error', 'errorifexists', 'ignore'
+save mode (it is 'error' by default)</p>
+</td></tr>
+</table>
+
+
+<h3>Note</h3>
+
+<p>write.parquet since 1.6.0
+</p>
+<p>saveAsParquetFile since 1.4.0
+</p>
+
+
+<h3>See Also</h3>
+
+<p>Other SparkDataFrame functions: <code><a 
href="SparkDataFrame.html">SparkDataFrame-class</a></code>,
+<code><a href="summarize.html">agg</a></code>, <code><a 
href="alias.html">alias</a></code>,
+<code><a href="arrange.html">arrange</a></code>, <code><a 
href="as.data.frame.html">as.data.frame</a></code>,
+<code><a href="attach.html">attach,SparkDataFrame-method</a></code>,
+<code><a href="broadcast.html">broadcast</a></code>, <code><a 
href="cache.html">cache</a></code>,
+<code><a href="checkpoint.html">checkpoint</a></code>, <code><a 
href="coalesce.html">coalesce</a></code>,
+<code><a href="collect.html">collect</a></code>, <code><a 
href="columns.html">colnames</a></code>,
+<code><a href="coltypes.html">coltypes</a></code>,
+<code><a 
href="createOrReplaceTempView.html">createOrReplaceTempView</a></code>,
+<code><a href="crossJoin.html">crossJoin</a></code>, <code><a 
href="cube.html">cube</a></code>,
+<code><a href="dapplyCollect.html">dapplyCollect</a></code>, <code><a 
href="dapply.html">dapply</a></code>,
+<code><a href="describe.html">describe</a></code>, <code><a 
href="dim.html">dim</a></code>,
+<code><a href="distinct.html">distinct</a></code>, <code><a 
href="dropDuplicates.html">dropDuplicates</a></code>,
+<code><a href="nafunctions.html">dropna</a></code>, <code><a 
href="drop.html">drop</a></code>,
+<code><a href="dtypes.html">dtypes</a></code>, <code><a 
href="exceptAll.html">exceptAll</a></code>,
+<code><a href="except.html">except</a></code>, <code><a 
href="explain.html">explain</a></code>,
+<code><a href="filter.html">filter</a></code>, <code><a 
href="first.html">first</a></code>,
+<code><a href="gapplyCollect.html">gapplyCollect</a></code>, <code><a 
href="gapply.html">gapply</a></code>,
+<code><a href="getNumPartitions.html">getNumPartitions</a></code>, <code><a 
href="groupBy.html">group_by</a></code>,
+<code><a href="head.html">head</a></code>, <code><a 
href="hint.html">hint</a></code>,
+<code><a href="histogram.html">histogram</a></code>, <code><a 
href="insertInto.html">insertInto</a></code>,
+<code><a href="intersectAll.html">intersectAll</a></code>, <code><a 
href="intersect.html">intersect</a></code>,
+<code><a href="isLocal.html">isLocal</a></code>, <code><a 
href="isStreaming.html">isStreaming</a></code>,
+<code><a href="join.html">join</a></code>, <code><a 
href="limit.html">limit</a></code>,
+<code><a href="localCheckpoint.html">localCheckpoint</a></code>, <code><a 
href="merge.html">merge</a></code>,
+<code><a href="mutate.html">mutate</a></code>, <code><a 
href="ncol.html">ncol</a></code>,
+<code><a href="nrow.html">nrow</a></code>, <code><a 
href="persist.html">persist</a></code>,
+<code><a href="printSchema.html">printSchema</a></code>, <code><a 
href="randomSplit.html">randomSplit</a></code>,
+<code><a href="rbind.html">rbind</a></code>, <code><a 
href="rename.html">rename</a></code>,
+<code><a href="repartitionByRange.html">repartitionByRange</a></code>,
+<code><a href="repartition.html">repartition</a></code>, <code><a 
href="rollup.html">rollup</a></code>,
+<code><a href="sample.html">sample</a></code>, <code><a 
href="saveAsTable.html">saveAsTable</a></code>,
+<code><a href="schema.html">schema</a></code>, <code><a 
href="selectExpr.html">selectExpr</a></code>,
+<code><a href="select.html">select</a></code>, <code><a 
href="showDF.html">showDF</a></code>,
+<code><a href="show.html">show</a></code>, <code><a 
href="storageLevel.html">storageLevel</a></code>,
+<code><a href="str.html">str</a></code>, <code><a 
href="subset.html">subset</a></code>,
+<code><a href="summary.html">summary</a></code>, <code><a 
href="take.html">take</a></code>,
+<code><a href="toJSON.html">toJSON</a></code>, <code><a 
href="unionByName.html">unionByName</a></code>,
+<code><a href="union.html">union</a></code>, <code><a 
href="unpersist.html">unpersist</a></code>,
+<code><a href="withColumn.html">withColumn</a></code>, <code><a 
href="withWatermark.html">withWatermark</a></code>,
+<code><a href="with.html">with</a></code>, <code><a 
href="write.df.html">write.df</a></code>,
+<code><a href="write.jdbc.html">write.jdbc</a></code>, <code><a 
href="write.json.html">write.json</a></code>,
+<code><a href="write.orc.html">write.orc</a></code>, <code><a 
href="write.stream.html">write.stream</a></code>,
+<code><a href="write.text.html">write.text</a></code>
+</p>
+
+
+<h3>Examples</h3>
+
+<pre><code class="r">## Not run: 
+##D sparkR.session()
+##D path &lt;- &quot;path/to/file.json&quot;
+##D df &lt;- read.json(path)
+##D write.parquet(df, &quot;/tmp/sparkr-tmp1/&quot;)
+##D saveAsParquetFile(df, &quot;/tmp/sparkr-tmp2/&quot;)
+## End(Not run)
+</code></pre>
+
+
+<hr /><div style="text-align: center;">[Package <em>SparkR</em> version 2.4.0 
<a href="00Index.html">Index</a>]</div>
+</body></html>

http://git-wip-us.apache.org/repos/asf/spark-website/blob/52917ac4/site/docs/2.4.0/api/R/write.stream.html
----------------------------------------------------------------------
diff --git a/site/docs/2.4.0/api/R/write.stream.html 
b/site/docs/2.4.0/api/R/write.stream.html
new file mode 100644
index 0000000..5657d9c
--- /dev/null
+++ b/site/docs/2.4.0/api/R/write.stream.html
@@ -0,0 +1,183 @@
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" 
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd";><html 
xmlns="http://www.w3.org/1999/xhtml";><head><title>R: Write the streaming 
SparkDataFrame to a data source.</title>
+<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+<link rel="stylesheet" type="text/css" href="R.css" />
+
+<link rel="stylesheet" 
href="https://cdnjs.cloudflare.com/ajax/libs/highlight.js/8.3/styles/github.min.css";>
+<script 
src="https://cdnjs.cloudflare.com/ajax/libs/highlight.js/8.3/highlight.min.js";></script>
+<script 
src="https://cdnjs.cloudflare.com/ajax/libs/highlight.js/8.3/languages/r.min.js";></script>
+<script>hljs.initHighlightingOnLoad();</script>
+</head><body>
+
+<table width="100%" summary="page for write.stream 
{SparkR}"><tr><td>write.stream {SparkR}</td><td style="text-align: right;">R 
Documentation</td></tr></table>
+
+<h2>Write the streaming SparkDataFrame to a data source.</h2>
+
+<h3>Description</h3>
+
+<p>The data source is specified by the <code>source</code> and a set of 
options (...).
+If <code>source</code> is not specified, the default data source configured by
+spark.sql.sources.default will be used.
+</p>
+
+
+<h3>Usage</h3>
+
+<pre>
+write.stream(df, source = NULL, outputMode = NULL, ...)
+
+## S4 method for signature 'SparkDataFrame'
+write.stream(df, source = NULL,
+  outputMode = NULL, partitionBy = NULL,
+  trigger.processingTime = NULL, trigger.once = NULL, ...)
+</pre>
+
+
+<h3>Arguments</h3>
+
+<table summary="R argblock">
+<tr valign="top"><td><code>df</code></td>
+<td>
+<p>a streaming SparkDataFrame.</p>
+</td></tr>
+<tr valign="top"><td><code>source</code></td>
+<td>
+<p>a name for external data source.</p>
+</td></tr>
+<tr valign="top"><td><code>outputMode</code></td>
+<td>
+<p>one of 'append', 'complete', 'update'.</p>
+</td></tr>
+<tr valign="top"><td><code>...</code></td>
+<td>
+<p>additional external data source specific named options.</p>
+</td></tr>
+<tr valign="top"><td><code>partitionBy</code></td>
+<td>
+<p>a name or a list of names of columns to partition the output by on the file
+system. If specified, the output is laid out on the file system similar to 
Hive's
+partitioning scheme.</p>
+</td></tr>
+<tr valign="top"><td><code>trigger.processingTime</code></td>
+<td>
+<p>a processing time interval as a string, e.g. '5 seconds',
+'1 minute'. This is a trigger that runs a query periodically based on the 
processing
+time. If value is '0 seconds', the query will run as fast as possible, this is 
the
+default. Only one trigger can be set.</p>
+</td></tr>
+<tr valign="top"><td><code>trigger.once</code></td>
+<td>
+<p>a logical, must be set to <code>TRUE</code>. This is a trigger that 
processes only
+one batch of data in a streaming query then terminates the query. Only one 
trigger can be
+set.</p>
+</td></tr>
+</table>
+
+
+<h3>Details</h3>
+
+<p>Additionally, <code>outputMode</code> specifies how data of a streaming 
SparkDataFrame is written to a
+output data source. There are three modes:
+</p>
+
+<ul>
+<li><p> append: Only the new rows in the streaming SparkDataFrame will be 
written out. This
+output mode can be only be used in queries that do not contain any aggregation.
+</p>
+</li>
+<li><p> complete: All the rows in the streaming SparkDataFrame will be written 
out every time
+there are some updates. This output mode can only be used in queries that
+contain aggregations.
+</p>
+</li>
+<li><p> update: Only the rows that were updated in the streaming 
SparkDataFrame will be written
+out every time there are some updates. If the query doesn't contain 
aggregations,
+it will be equivalent to <code>append</code> mode.
+</p>
+</li></ul>
+
+
+
+<h3>Note</h3>
+
+<p>write.stream since 2.2.0
+</p>
+<p>experimental
+</p>
+
+
+<h3>See Also</h3>
+
+<p><a href="read.stream.html">read.stream</a>
+</p>
+<p>Other SparkDataFrame functions: <code><a 
href="SparkDataFrame.html">SparkDataFrame-class</a></code>,
+<code><a href="summarize.html">agg</a></code>, <code><a 
href="alias.html">alias</a></code>,
+<code><a href="arrange.html">arrange</a></code>, <code><a 
href="as.data.frame.html">as.data.frame</a></code>,
+<code><a href="attach.html">attach,SparkDataFrame-method</a></code>,
+<code><a href="broadcast.html">broadcast</a></code>, <code><a 
href="cache.html">cache</a></code>,
+<code><a href="checkpoint.html">checkpoint</a></code>, <code><a 
href="coalesce.html">coalesce</a></code>,
+<code><a href="collect.html">collect</a></code>, <code><a 
href="columns.html">colnames</a></code>,
+<code><a href="coltypes.html">coltypes</a></code>,
+<code><a 
href="createOrReplaceTempView.html">createOrReplaceTempView</a></code>,
+<code><a href="crossJoin.html">crossJoin</a></code>, <code><a 
href="cube.html">cube</a></code>,
+<code><a href="dapplyCollect.html">dapplyCollect</a></code>, <code><a 
href="dapply.html">dapply</a></code>,
+<code><a href="describe.html">describe</a></code>, <code><a 
href="dim.html">dim</a></code>,
+<code><a href="distinct.html">distinct</a></code>, <code><a 
href="dropDuplicates.html">dropDuplicates</a></code>,
+<code><a href="nafunctions.html">dropna</a></code>, <code><a 
href="drop.html">drop</a></code>,
+<code><a href="dtypes.html">dtypes</a></code>, <code><a 
href="exceptAll.html">exceptAll</a></code>,
+<code><a href="except.html">except</a></code>, <code><a 
href="explain.html">explain</a></code>,
+<code><a href="filter.html">filter</a></code>, <code><a 
href="first.html">first</a></code>,
+<code><a href="gapplyCollect.html">gapplyCollect</a></code>, <code><a 
href="gapply.html">gapply</a></code>,
+<code><a href="getNumPartitions.html">getNumPartitions</a></code>, <code><a 
href="groupBy.html">group_by</a></code>,
+<code><a href="head.html">head</a></code>, <code><a 
href="hint.html">hint</a></code>,
+<code><a href="histogram.html">histogram</a></code>, <code><a 
href="insertInto.html">insertInto</a></code>,
+<code><a href="intersectAll.html">intersectAll</a></code>, <code><a 
href="intersect.html">intersect</a></code>,
+<code><a href="isLocal.html">isLocal</a></code>, <code><a 
href="isStreaming.html">isStreaming</a></code>,
+<code><a href="join.html">join</a></code>, <code><a 
href="limit.html">limit</a></code>,
+<code><a href="localCheckpoint.html">localCheckpoint</a></code>, <code><a 
href="merge.html">merge</a></code>,
+<code><a href="mutate.html">mutate</a></code>, <code><a 
href="ncol.html">ncol</a></code>,
+<code><a href="nrow.html">nrow</a></code>, <code><a 
href="persist.html">persist</a></code>,
+<code><a href="printSchema.html">printSchema</a></code>, <code><a 
href="randomSplit.html">randomSplit</a></code>,
+<code><a href="rbind.html">rbind</a></code>, <code><a 
href="rename.html">rename</a></code>,
+<code><a href="repartitionByRange.html">repartitionByRange</a></code>,
+<code><a href="repartition.html">repartition</a></code>, <code><a 
href="rollup.html">rollup</a></code>,
+<code><a href="sample.html">sample</a></code>, <code><a 
href="saveAsTable.html">saveAsTable</a></code>,
+<code><a href="schema.html">schema</a></code>, <code><a 
href="selectExpr.html">selectExpr</a></code>,
+<code><a href="select.html">select</a></code>, <code><a 
href="showDF.html">showDF</a></code>,
+<code><a href="show.html">show</a></code>, <code><a 
href="storageLevel.html">storageLevel</a></code>,
+<code><a href="str.html">str</a></code>, <code><a 
href="subset.html">subset</a></code>,
+<code><a href="summary.html">summary</a></code>, <code><a 
href="take.html">take</a></code>,
+<code><a href="toJSON.html">toJSON</a></code>, <code><a 
href="unionByName.html">unionByName</a></code>,
+<code><a href="union.html">union</a></code>, <code><a 
href="unpersist.html">unpersist</a></code>,
+<code><a href="withColumn.html">withColumn</a></code>, <code><a 
href="withWatermark.html">withWatermark</a></code>,
+<code><a href="with.html">with</a></code>, <code><a 
href="write.df.html">write.df</a></code>,
+<code><a href="write.jdbc.html">write.jdbc</a></code>, <code><a 
href="write.json.html">write.json</a></code>,
+<code><a href="write.orc.html">write.orc</a></code>, <code><a 
href="write.parquet.html">write.parquet</a></code>,
+<code><a href="write.text.html">write.text</a></code>
+</p>
+
+
+<h3>Examples</h3>
+
+<pre><code class="r">## Not run: 
+##D sparkR.session()
+##D df &lt;- read.stream(&quot;socket&quot;, host = &quot;localhost&quot;, 
port = 9999)
+##D isStreaming(df)
+##D wordCounts &lt;- count(group_by(df, &quot;value&quot;))
+##D 
+##D # console
+##D q &lt;- write.stream(wordCounts, &quot;console&quot;, outputMode = 
&quot;complete&quot;)
+##D # text stream
+##D q &lt;- write.stream(df, &quot;text&quot;, path = 
&quot;/home/user/out&quot;, checkpointLocation = &quot;/home/user/cp&quot;
+##D                   partitionBy = c(&quot;year&quot;, &quot;month&quot;), 
trigger.processingTime = &quot;30 seconds&quot;)
+##D # memory stream
+##D q &lt;- write.stream(wordCounts, &quot;memory&quot;, queryName = 
&quot;outs&quot;, outputMode = &quot;complete&quot;)
+##D head(sql(&quot;SELECT * from outs&quot;))
+##D queryName(q)
+##D 
+##D stopQuery(q)
+## End(Not run)
+</code></pre>
+
+
+<hr /><div style="text-align: center;">[Package <em>SparkR</em> version 2.4.0 
<a href="00Index.html">Index</a>]</div>
+</body></html>


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to