Repository: spark
Updated Branches:
  refs/heads/master f1e7361f6 -> 55ab77707


[SPARK-3870] EOL character enforcement

We have shell scripts and Windows batch files, so we should enforce proper EOL 
character.

Author: Kousuke Saruta <saru...@oss.nttdata.co.jp>

Closes #2726 from sarutak/eol-enforcement and squashes the following commits:

9748c3f [Kousuke Saruta] Fixed make.bat
252de89 [Kousuke Saruta] Removed extra characters from make.bat
5b81c00 [Kousuke Saruta] Merge branch 'master' of git://git.apache.org/spark 
into eol-enforcement
8633ed2 [Kousuke Saruta] merge branch 'master' of git://git.apache.org/spark 
into eol-enforcement
5d630d8 [Kousuke Saruta] Merged
ba10797 [Kousuke Saruta] Merge branch 'master' of git://git.apache.org/spark 
into eol-enforcement
7407515 [Kousuke Saruta] Merge branch 'master' of git://git.apache.org/spark 
into eol-enforcement
772fd4e [Kousuke Saruta] Normized EOL character in make.bat and 
compute-classpath.cmd
ac7f873 [Kousuke Saruta] Added an entry for .gitattributes to .rat-excludes
1570e77 [Kousuke Saruta] Added .gitattributes


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/55ab7770
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/55ab7770
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/55ab7770

Branch: refs/heads/master
Commit: 55ab77707891408e635843cf80079747625bd28e
Parents: f1e7361
Author: Kousuke Saruta <saru...@oss.nttdata.co.jp>
Authored: Fri Oct 31 12:39:52 2014 -0700
Committer: Josh Rosen <joshro...@databricks.com>
Committed: Fri Oct 31 12:39:52 2014 -0700

----------------------------------------------------------------------
 .gitattributes            |   2 +
 .rat-excludes             |   1 +
 bin/compute-classpath.cmd | 234 ++++++++++----------
 python/docs/make.bat      |  12 +-
 python/docs/make2.bat     | 486 ++++++++++++++++++++---------------------
 5 files changed, 369 insertions(+), 366 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/55ab7770/.gitattributes
----------------------------------------------------------------------
diff --git a/.gitattributes b/.gitattributes
new file mode 100644
index 0000000..2b65f6f
--- /dev/null
+++ b/.gitattributes
@@ -0,0 +1,2 @@
+*.bat text eol=crlf
+*.cmd text eol=crlf

http://git-wip-us.apache.org/repos/asf/spark/blob/55ab7770/.rat-excludes
----------------------------------------------------------------------
diff --git a/.rat-excludes b/.rat-excludes
index ae97456..20e3372 100644
--- a/.rat-excludes
+++ b/.rat-excludes
@@ -1,5 +1,6 @@
 target
 .gitignore
+.gitattributes
 .project
 .classpath
 .mima-excludes

http://git-wip-us.apache.org/repos/asf/spark/blob/55ab7770/bin/compute-classpath.cmd
----------------------------------------------------------------------
diff --git a/bin/compute-classpath.cmd b/bin/compute-classpath.cmd
index 3cd0579..a4c099f 100644
--- a/bin/compute-classpath.cmd
+++ b/bin/compute-classpath.cmd
@@ -1,117 +1,117 @@
-@echo off
-
-rem
-rem Licensed to the Apache Software Foundation (ASF) under one or more
-rem contributor license agreements.  See the NOTICE file distributed with
-rem this work for additional information regarding copyright ownership.
-rem The ASF licenses this file to You under the Apache License, Version 2.0
-rem (the "License"); you may not use this file except in compliance with
-rem the License.  You may obtain a copy of the License at
-rem
-rem    http://www.apache.org/licenses/LICENSE-2.0
-rem
-rem Unless required by applicable law or agreed to in writing, software
-rem distributed under the License is distributed on an "AS IS" BASIS,
-rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-rem See the License for the specific language governing permissions and
-rem limitations under the License.
-rem
-
-rem This script computes Spark's classpath and prints it to stdout; it's used 
by both the "run"
-rem script and the ExecutorRunner in standalone cluster mode.
-
-rem If we're called from spark-class2.cmd, it already set 
enabledelayedexpansion and setting
-rem it here would stop us from affecting its copy of the CLASSPATH variable; 
otherwise we
-rem need to set it here because we use !datanucleus_jars! below.
-if "%DONT_PRINT_CLASSPATH%"=="1" goto skip_delayed_expansion
-setlocal enabledelayedexpansion
-:skip_delayed_expansion
-
-set SCALA_VERSION=2.10
-
-rem Figure out where the Spark framework is installed
-set FWDIR=%~dp0..\
-
-rem Load environment variables from conf\spark-env.cmd, if it exists
-if exist "%FWDIR%conf\spark-env.cmd" call "%FWDIR%conf\spark-env.cmd"
-
-rem Build up classpath
-set CLASSPATH=%SPARK_CLASSPATH%;%SPARK_SUBMIT_CLASSPATH%
-
-if not "x%SPARK_CONF_DIR%"=="x" (
-  set CLASSPATH=%CLASSPATH%;%SPARK_CONF_DIR%
-) else (
-  set CLASSPATH=%CLASSPATH%;%FWDIR%conf
-)
-
-if exist "%FWDIR%RELEASE" (
-  for %%d in ("%FWDIR%lib\spark-assembly*.jar") do (
-    set ASSEMBLY_JAR=%%d
-  )
-) else (
-  for %%d in 
("%FWDIR%assembly\target\scala-%SCALA_VERSION%\spark-assembly*hadoop*.jar") do (
-    set ASSEMBLY_JAR=%%d
-  )
-)
-
-set CLASSPATH=%CLASSPATH%;%ASSEMBLY_JAR%
-
-rem When Hive support is needed, Datanucleus jars must be included on the 
classpath.
-rem Datanucleus jars do not work if only included in the uber jar as 
plugin.xml metadata is lost.
-rem Both sbt and maven will populate "lib_managed/jars/" with the datanucleus 
jars when Spark is
-rem built with Hive, so look for them there.
-if exist "%FWDIR%RELEASE" (
-  set datanucleus_dir=%FWDIR%lib
-) else (
-  set datanucleus_dir=%FWDIR%lib_managed\jars
-)
-set "datanucleus_jars="
-for %%d in ("%datanucleus_dir%\datanucleus-*.jar") do (
-  set datanucleus_jars=!datanucleus_jars!;%%d
-)
-set CLASSPATH=%CLASSPATH%;%datanucleus_jars%
-
-set SPARK_CLASSES=%FWDIR%core\target\scala-%SCALA_VERSION%\classes
-set 
SPARK_CLASSES=%SPARK_CLASSES%;%FWDIR%repl\target\scala-%SCALA_VERSION%\classes
-set 
SPARK_CLASSES=%SPARK_CLASSES%;%FWDIR%mllib\target\scala-%SCALA_VERSION%\classes
-set 
SPARK_CLASSES=%SPARK_CLASSES%;%FWDIR%bagel\target\scala-%SCALA_VERSION%\classes
-set 
SPARK_CLASSES=%SPARK_CLASSES%;%FWDIR%graphx\target\scala-%SCALA_VERSION%\classes
-set 
SPARK_CLASSES=%SPARK_CLASSES%;%FWDIR%streaming\target\scala-%SCALA_VERSION%\classes
-set 
SPARK_CLASSES=%SPARK_CLASSES%;%FWDIR%tools\target\scala-%SCALA_VERSION%\classes
-set 
SPARK_CLASSES=%SPARK_CLASSES%;%FWDIR%sql\catalyst\target\scala-%SCALA_VERSION%\classes
-set 
SPARK_CLASSES=%SPARK_CLASSES%;%FWDIR%sql\core\target\scala-%SCALA_VERSION%\classes
-set 
SPARK_CLASSES=%SPARK_CLASSES%;%FWDIR%sql\hive\target\scala-%SCALA_VERSION%\classes
-
-set SPARK_TEST_CLASSES=%FWDIR%core\target\scala-%SCALA_VERSION%\test-classes
-set 
SPARK_TEST_CLASSES=%SPARK_TEST_CLASSES%;%FWDIR%repl\target\scala-%SCALA_VERSION%\test-classes
-set 
SPARK_TEST_CLASSES=%SPARK_TEST_CLASSES%;%FWDIR%mllib\target\scala-%SCALA_VERSION%\test-classes
-set 
SPARK_TEST_CLASSES=%SPARK_TEST_CLASSES%;%FWDIR%bagel\target\scala-%SCALA_VERSION%\test-classes
-set 
SPARK_TEST_CLASSES=%SPARK_TEST_CLASSES%;%FWDIR%graphx\target\scala-%SCALA_VERSION%\test-classes
-set 
SPARK_TEST_CLASSES=%SPARK_TEST_CLASSES%;%FWDIR%streaming\target\scala-%SCALA_VERSION%\test-classes
-set 
SPARK_TEST_CLASSES=%SPARK_TEST_CLASSES%;%FWDIR%sql\catalyst\target\scala-%SCALA_VERSION%\test-classes
-set 
SPARK_TEST_CLASSES=%SPARK_TEST_CLASSES%;%FWDIR%sql\core\target\scala-%SCALA_VERSION%\test-classes
-set 
SPARK_TEST_CLASSES=%SPARK_TEST_CLASSES%;%FWDIR%sql\hive\target\scala-%SCALA_VERSION%\test-classes
-
-if "x%SPARK_TESTING%"=="x1" (
-  rem Add test clases to path - note, add SPARK_CLASSES and SPARK_TEST_CLASSES 
before CLASSPATH
-  rem so that local compilation takes precedence over assembled jar
-  set CLASSPATH=%SPARK_CLASSES%;%SPARK_TEST_CLASSES%;%CLASSPATH%
-)
-
-rem Add hadoop conf dir - else FileSystem.*, etc fail
-rem Note, this assumes that there is either a HADOOP_CONF_DIR or YARN_CONF_DIR 
which hosts
-rem the configurtion files.
-if "x%HADOOP_CONF_DIR%"=="x" goto no_hadoop_conf_dir
-  set CLASSPATH=%CLASSPATH%;%HADOOP_CONF_DIR%
-:no_hadoop_conf_dir
-
-if "x%YARN_CONF_DIR%"=="x" goto no_yarn_conf_dir
-  set CLASSPATH=%CLASSPATH%;%YARN_CONF_DIR%
-:no_yarn_conf_dir
-
-rem A bit of a hack to allow calling this script within run2.cmd without 
seeing output
-if "%DONT_PRINT_CLASSPATH%"=="1" goto exit
-
-echo %CLASSPATH%
-
-:exit
+@echo off
+
+rem
+rem Licensed to the Apache Software Foundation (ASF) under one or more
+rem contributor license agreements.  See the NOTICE file distributed with
+rem this work for additional information regarding copyright ownership.
+rem The ASF licenses this file to You under the Apache License, Version 2.0
+rem (the "License"); you may not use this file except in compliance with
+rem the License.  You may obtain a copy of the License at
+rem
+rem    http://www.apache.org/licenses/LICENSE-2.0
+rem
+rem Unless required by applicable law or agreed to in writing, software
+rem distributed under the License is distributed on an "AS IS" BASIS,
+rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+rem See the License for the specific language governing permissions and
+rem limitations under the License.
+rem
+
+rem This script computes Spark's classpath and prints it to stdout; it's used 
by both the "run"
+rem script and the ExecutorRunner in standalone cluster mode.
+
+rem If we're called from spark-class2.cmd, it already set 
enabledelayedexpansion and setting
+rem it here would stop us from affecting its copy of the CLASSPATH variable; 
otherwise we
+rem need to set it here because we use !datanucleus_jars! below.
+if "%DONT_PRINT_CLASSPATH%"=="1" goto skip_delayed_expansion
+setlocal enabledelayedexpansion
+:skip_delayed_expansion
+
+set SCALA_VERSION=2.10
+
+rem Figure out where the Spark framework is installed
+set FWDIR=%~dp0..\
+
+rem Load environment variables from conf\spark-env.cmd, if it exists
+if exist "%FWDIR%conf\spark-env.cmd" call "%FWDIR%conf\spark-env.cmd"
+
+rem Build up classpath
+set CLASSPATH=%SPARK_CLASSPATH%;%SPARK_SUBMIT_CLASSPATH%
+
+if not "x%SPARK_CONF_DIR%"=="x" (
+  set CLASSPATH=%CLASSPATH%;%SPARK_CONF_DIR%
+) else (
+  set CLASSPATH=%CLASSPATH%;%FWDIR%conf
+)
+
+if exist "%FWDIR%RELEASE" (
+  for %%d in ("%FWDIR%lib\spark-assembly*.jar") do (
+    set ASSEMBLY_JAR=%%d
+  )
+) else (
+  for %%d in 
("%FWDIR%assembly\target\scala-%SCALA_VERSION%\spark-assembly*hadoop*.jar") do (
+    set ASSEMBLY_JAR=%%d
+  )
+)
+
+set CLASSPATH=%CLASSPATH%;%ASSEMBLY_JAR%
+
+rem When Hive support is needed, Datanucleus jars must be included on the 
classpath.
+rem Datanucleus jars do not work if only included in the uber jar as 
plugin.xml metadata is lost.
+rem Both sbt and maven will populate "lib_managed/jars/" with the datanucleus 
jars when Spark is
+rem built with Hive, so look for them there.
+if exist "%FWDIR%RELEASE" (
+  set datanucleus_dir=%FWDIR%lib
+) else (
+  set datanucleus_dir=%FWDIR%lib_managed\jars
+)
+set "datanucleus_jars="
+for %%d in ("%datanucleus_dir%\datanucleus-*.jar") do (
+  set datanucleus_jars=!datanucleus_jars!;%%d
+)
+set CLASSPATH=%CLASSPATH%;%datanucleus_jars%
+
+set SPARK_CLASSES=%FWDIR%core\target\scala-%SCALA_VERSION%\classes
+set 
SPARK_CLASSES=%SPARK_CLASSES%;%FWDIR%repl\target\scala-%SCALA_VERSION%\classes
+set 
SPARK_CLASSES=%SPARK_CLASSES%;%FWDIR%mllib\target\scala-%SCALA_VERSION%\classes
+set 
SPARK_CLASSES=%SPARK_CLASSES%;%FWDIR%bagel\target\scala-%SCALA_VERSION%\classes
+set 
SPARK_CLASSES=%SPARK_CLASSES%;%FWDIR%graphx\target\scala-%SCALA_VERSION%\classes
+set 
SPARK_CLASSES=%SPARK_CLASSES%;%FWDIR%streaming\target\scala-%SCALA_VERSION%\classes
+set 
SPARK_CLASSES=%SPARK_CLASSES%;%FWDIR%tools\target\scala-%SCALA_VERSION%\classes
+set 
SPARK_CLASSES=%SPARK_CLASSES%;%FWDIR%sql\catalyst\target\scala-%SCALA_VERSION%\classes
+set 
SPARK_CLASSES=%SPARK_CLASSES%;%FWDIR%sql\core\target\scala-%SCALA_VERSION%\classes
+set 
SPARK_CLASSES=%SPARK_CLASSES%;%FWDIR%sql\hive\target\scala-%SCALA_VERSION%\classes
+
+set SPARK_TEST_CLASSES=%FWDIR%core\target\scala-%SCALA_VERSION%\test-classes
+set 
SPARK_TEST_CLASSES=%SPARK_TEST_CLASSES%;%FWDIR%repl\target\scala-%SCALA_VERSION%\test-classes
+set 
SPARK_TEST_CLASSES=%SPARK_TEST_CLASSES%;%FWDIR%mllib\target\scala-%SCALA_VERSION%\test-classes
+set 
SPARK_TEST_CLASSES=%SPARK_TEST_CLASSES%;%FWDIR%bagel\target\scala-%SCALA_VERSION%\test-classes
+set 
SPARK_TEST_CLASSES=%SPARK_TEST_CLASSES%;%FWDIR%graphx\target\scala-%SCALA_VERSION%\test-classes
+set 
SPARK_TEST_CLASSES=%SPARK_TEST_CLASSES%;%FWDIR%streaming\target\scala-%SCALA_VERSION%\test-classes
+set 
SPARK_TEST_CLASSES=%SPARK_TEST_CLASSES%;%FWDIR%sql\catalyst\target\scala-%SCALA_VERSION%\test-classes
+set 
SPARK_TEST_CLASSES=%SPARK_TEST_CLASSES%;%FWDIR%sql\core\target\scala-%SCALA_VERSION%\test-classes
+set 
SPARK_TEST_CLASSES=%SPARK_TEST_CLASSES%;%FWDIR%sql\hive\target\scala-%SCALA_VERSION%\test-classes
+
+if "x%SPARK_TESTING%"=="x1" (
+  rem Add test clases to path - note, add SPARK_CLASSES and SPARK_TEST_CLASSES 
before CLASSPATH
+  rem so that local compilation takes precedence over assembled jar
+  set CLASSPATH=%SPARK_CLASSES%;%SPARK_TEST_CLASSES%;%CLASSPATH%
+)
+
+rem Add hadoop conf dir - else FileSystem.*, etc fail
+rem Note, this assumes that there is either a HADOOP_CONF_DIR or YARN_CONF_DIR 
which hosts
+rem the configurtion files.
+if "x%HADOOP_CONF_DIR%"=="x" goto no_hadoop_conf_dir
+  set CLASSPATH=%CLASSPATH%;%HADOOP_CONF_DIR%
+:no_hadoop_conf_dir
+
+if "x%YARN_CONF_DIR%"=="x" goto no_yarn_conf_dir
+  set CLASSPATH=%CLASSPATH%;%YARN_CONF_DIR%
+:no_yarn_conf_dir
+
+rem A bit of a hack to allow calling this script within run2.cmd without 
seeing output
+if "%DONT_PRINT_CLASSPATH%"=="1" goto exit
+
+echo %CLASSPATH%
+
+:exit

http://git-wip-us.apache.org/repos/asf/spark/blob/55ab7770/python/docs/make.bat
----------------------------------------------------------------------
diff --git a/python/docs/make.bat b/python/docs/make.bat
index c011e82..cc29acd 100644
--- a/python/docs/make.bat
+++ b/python/docs/make.bat
@@ -1,6 +1,6 @@
-@ECHO OFF
-
-rem This is the entry point for running Sphinx documentation. To avoid 
polluting the
-rem environment, it just launches a new cmd to do the real work.
-
-cmd /V /E /C %~dp0make2.bat %*
+@ECHO OFF
+
+rem This is the entry point for running Sphinx documentation. To avoid 
polluting the
+rem environment, it just launches a new cmd to do the real work.
+
+cmd /V /E /C %~dp0make2.bat %*

http://git-wip-us.apache.org/repos/asf/spark/blob/55ab7770/python/docs/make2.bat
----------------------------------------------------------------------
diff --git a/python/docs/make2.bat b/python/docs/make2.bat
index 7bcaeaf..05d22eb 100644
--- a/python/docs/make2.bat
+++ b/python/docs/make2.bat
@@ -1,243 +1,243 @@
-@ECHO OFF
-
-REM Command file for Sphinx documentation
-
-
-if "%SPHINXBUILD%" == "" (
-       set SPHINXBUILD=sphinx-build
-)
-set BUILDDIR=_build
-set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% .
-set I18NSPHINXOPTS=%SPHINXOPTS% .
-if NOT "%PAPER%" == "" (
-       set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS%
-       set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS%
-)
-
-if "%1" == "" goto help
-
-if "%1" == "help" (
-       :help
-       echo.Please use `make ^<target^>` where ^<target^> is one of
-       echo.  html       to make standalone HTML files
-       echo.  dirhtml    to make HTML files named index.html in directories
-       echo.  singlehtml to make a single large HTML file
-       echo.  pickle     to make pickle files
-       echo.  json       to make JSON files
-       echo.  htmlhelp   to make HTML files and a HTML help project
-       echo.  qthelp     to make HTML files and a qthelp project
-       echo.  devhelp    to make HTML files and a Devhelp project
-       echo.  epub       to make an epub
-       echo.  latex      to make LaTeX files, you can set PAPER=a4 or 
PAPER=letter
-       echo.  text       to make text files
-       echo.  man        to make manual pages
-       echo.  texinfo    to make Texinfo files
-       echo.  gettext    to make PO message catalogs
-       echo.  changes    to make an overview over all changed/added/deprecated 
items
-       echo.  xml        to make Docutils-native XML files
-       echo.  pseudoxml  to make pseudoxml-XML files for display purposes
-       echo.  linkcheck  to check all external links for integrity
-       echo.  doctest    to run all doctests embedded in the documentation if 
enabled
-       goto end
-)
-
-if "%1" == "clean" (
-       for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i
-       del /q /s %BUILDDIR%\*
-       goto end
-)
-
-
-%SPHINXBUILD% 2> nul
-if errorlevel 9009 (
-       echo.
-       echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
-       echo.installed, then set the SPHINXBUILD environment variable to point
-       echo.to the full path of the 'sphinx-build' executable. Alternatively 
you
-       echo.may add the Sphinx directory to PATH.
-       echo.
-       echo.If you don't have Sphinx installed, grab it from
-       echo.http://sphinx-doc.org/
-       exit /b 1
-)
-
-if "%1" == "html" (
-       %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html
-       if errorlevel 1 exit /b 1
-       echo.
-       echo.Build finished. The HTML pages are in %BUILDDIR%/html.
-       goto end
-)
-
-if "%1" == "dirhtml" (
-       %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml
-       if errorlevel 1 exit /b 1
-       echo.
-       echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml.
-       goto end
-)
-
-if "%1" == "singlehtml" (
-       %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml
-       if errorlevel 1 exit /b 1
-       echo.
-       echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml.
-       goto end
-)
-
-if "%1" == "pickle" (
-       %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle
-       if errorlevel 1 exit /b 1
-       echo.
-       echo.Build finished; now you can process the pickle files.
-       goto end
-)
-
-if "%1" == "json" (
-       %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json
-       if errorlevel 1 exit /b 1
-       echo.
-       echo.Build finished; now you can process the JSON files.
-       goto end
-)
-
-if "%1" == "htmlhelp" (
-       %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp
-       if errorlevel 1 exit /b 1
-       echo.
-       echo.Build finished; now you can run HTML Help Workshop with the ^
-.hhp project file in %BUILDDIR%/htmlhelp.
-       goto end
-)
-
-if "%1" == "qthelp" (
-       %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp
-       if errorlevel 1 exit /b 1
-       echo.
-       echo.Build finished; now you can run "qcollectiongenerator" with the ^
-.qhcp project file in %BUILDDIR%/qthelp, like this:
-       echo.^> qcollectiongenerator %BUILDDIR%\qthelp\pyspark.qhcp
-       echo.To view the help file:
-       echo.^> assistant -collectionFile %BUILDDIR%\qthelp\pyspark.ghc
-       goto end
-)
-
-if "%1" == "devhelp" (
-       %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp
-       if errorlevel 1 exit /b 1
-       echo.
-       echo.Build finished.
-       goto end
-)
-
-if "%1" == "epub" (
-       %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub
-       if errorlevel 1 exit /b 1
-       echo.
-       echo.Build finished. The epub file is in %BUILDDIR%/epub.
-       goto end
-)
-
-if "%1" == "latex" (
-       %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
-       if errorlevel 1 exit /b 1
-       echo.
-       echo.Build finished; the LaTeX files are in %BUILDDIR%/latex.
-       goto end
-)
-
-if "%1" == "latexpdf" (
-       %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
-       cd %BUILDDIR%/latex
-       make all-pdf
-       cd %BUILDDIR%/..
-       echo.
-       echo.Build finished; the PDF files are in %BUILDDIR%/latex.
-       goto end
-)
-
-if "%1" == "latexpdfja" (
-       %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
-       cd %BUILDDIR%/latex
-       make all-pdf-ja
-       cd %BUILDDIR%/..
-       echo.
-       echo.Build finished; the PDF files are in %BUILDDIR%/latex.
-       goto end
-)
-
-if "%1" == "text" (
-       %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text
-       if errorlevel 1 exit /b 1
-       echo.
-       echo.Build finished. The text files are in %BUILDDIR%/text.
-       goto end
-)
-
-if "%1" == "man" (
-       %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man
-       if errorlevel 1 exit /b 1
-       echo.
-       echo.Build finished. The manual pages are in %BUILDDIR%/man.
-       goto end
-)
-
-if "%1" == "texinfo" (
-       %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo
-       if errorlevel 1 exit /b 1
-       echo.
-       echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo.
-       goto end
-)
-
-if "%1" == "gettext" (
-       %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale
-       if errorlevel 1 exit /b 1
-       echo.
-       echo.Build finished. The message catalogs are in %BUILDDIR%/locale.
-       goto end
-)
-
-if "%1" == "changes" (
-       %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes
-       if errorlevel 1 exit /b 1
-       echo.
-       echo.The overview file is in %BUILDDIR%/changes.
-       goto end
-)
-
-if "%1" == "linkcheck" (
-       %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck
-       if errorlevel 1 exit /b 1
-       echo.
-       echo.Link check complete; look for any errors in the above output ^
-or in %BUILDDIR%/linkcheck/output.txt.
-       goto end
-)
-
-if "%1" == "doctest" (
-       %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest
-       if errorlevel 1 exit /b 1
-       echo.
-       echo.Testing of doctests in the sources finished, look at the ^
-results in %BUILDDIR%/doctest/output.txt.
-       goto end
-)
-
-if "%1" == "xml" (
-       %SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml
-       if errorlevel 1 exit /b 1
-       echo.
-       echo.Build finished. The XML files are in %BUILDDIR%/xml.
-       goto end
-)
-
-if "%1" == "pseudoxml" (
-       %SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml
-       if errorlevel 1 exit /b 1
-       echo.
-       echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml.
-       goto end
-)
-
-:end
+@ECHO OFF
+
+REM Command file for Sphinx documentation
+
+
+if "%SPHINXBUILD%" == "" (
+       set SPHINXBUILD=sphinx-build
+)
+set BUILDDIR=_build
+set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% .
+set I18NSPHINXOPTS=%SPHINXOPTS% .
+if NOT "%PAPER%" == "" (
+       set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS%
+       set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS%
+)
+
+if "%1" == "" goto help
+
+if "%1" == "help" (
+       :help
+       echo.Please use `make ^<target^>` where ^<target^> is one of
+       echo.  html       to make standalone HTML files
+       echo.  dirhtml    to make HTML files named index.html in directories
+       echo.  singlehtml to make a single large HTML file
+       echo.  pickle     to make pickle files
+       echo.  json       to make JSON files
+       echo.  htmlhelp   to make HTML files and a HTML help project
+       echo.  qthelp     to make HTML files and a qthelp project
+       echo.  devhelp    to make HTML files and a Devhelp project
+       echo.  epub       to make an epub
+       echo.  latex      to make LaTeX files, you can set PAPER=a4 or 
PAPER=letter
+       echo.  text       to make text files
+       echo.  man        to make manual pages
+       echo.  texinfo    to make Texinfo files
+       echo.  gettext    to make PO message catalogs
+       echo.  changes    to make an overview over all changed/added/deprecated 
items
+       echo.  xml        to make Docutils-native XML files
+       echo.  pseudoxml  to make pseudoxml-XML files for display purposes
+       echo.  linkcheck  to check all external links for integrity
+       echo.  doctest    to run all doctests embedded in the documentation if 
enabled
+       goto end
+)
+
+if "%1" == "clean" (
+       for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i
+       del /q /s %BUILDDIR%\*
+       goto end
+)
+
+
+%SPHINXBUILD% 2> nul
+if errorlevel 9009 (
+       echo.
+       echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
+       echo.installed, then set the SPHINXBUILD environment variable to point
+       echo.to the full path of the 'sphinx-build' executable. Alternatively 
you
+       echo.may add the Sphinx directory to PATH.
+       echo.
+       echo.If you don't have Sphinx installed, grab it from
+       echo.http://sphinx-doc.org/
+       exit /b 1
+)
+
+if "%1" == "html" (
+       %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html
+       if errorlevel 1 exit /b 1
+       echo.
+       echo.Build finished. The HTML pages are in %BUILDDIR%/html.
+       goto end
+)
+
+if "%1" == "dirhtml" (
+       %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml
+       if errorlevel 1 exit /b 1
+       echo.
+       echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml.
+       goto end
+)
+
+if "%1" == "singlehtml" (
+       %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml
+       if errorlevel 1 exit /b 1
+       echo.
+       echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml.
+       goto end
+)
+
+if "%1" == "pickle" (
+       %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle
+       if errorlevel 1 exit /b 1
+       echo.
+       echo.Build finished; now you can process the pickle files.
+       goto end
+)
+
+if "%1" == "json" (
+       %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json
+       if errorlevel 1 exit /b 1
+       echo.
+       echo.Build finished; now you can process the JSON files.
+       goto end
+)
+
+if "%1" == "htmlhelp" (
+       %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp
+       if errorlevel 1 exit /b 1
+       echo.
+       echo.Build finished; now you can run HTML Help Workshop with the ^
+.hhp project file in %BUILDDIR%/htmlhelp.
+       goto end
+)
+
+if "%1" == "qthelp" (
+       %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp
+       if errorlevel 1 exit /b 1
+       echo.
+       echo.Build finished; now you can run "qcollectiongenerator" with the ^
+.qhcp project file in %BUILDDIR%/qthelp, like this:
+       echo.^> qcollectiongenerator %BUILDDIR%\qthelp\pyspark.qhcp
+       echo.To view the help file:
+       echo.^> assistant -collectionFile %BUILDDIR%\qthelp\pyspark.ghc
+       goto end
+)
+
+if "%1" == "devhelp" (
+       %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp
+       if errorlevel 1 exit /b 1
+       echo.
+       echo.Build finished.
+       goto end
+)
+
+if "%1" == "epub" (
+       %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub
+       if errorlevel 1 exit /b 1
+       echo.
+       echo.Build finished. The epub file is in %BUILDDIR%/epub.
+       goto end
+)
+
+if "%1" == "latex" (
+       %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
+       if errorlevel 1 exit /b 1
+       echo.
+       echo.Build finished; the LaTeX files are in %BUILDDIR%/latex.
+       goto end
+)
+
+if "%1" == "latexpdf" (
+       %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
+       cd %BUILDDIR%/latex
+       make all-pdf
+       cd %BUILDDIR%/..
+       echo.
+       echo.Build finished; the PDF files are in %BUILDDIR%/latex.
+       goto end
+)
+
+if "%1" == "latexpdfja" (
+       %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
+       cd %BUILDDIR%/latex
+       make all-pdf-ja
+       cd %BUILDDIR%/..
+       echo.
+       echo.Build finished; the PDF files are in %BUILDDIR%/latex.
+       goto end
+)
+
+if "%1" == "text" (
+       %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text
+       if errorlevel 1 exit /b 1
+       echo.
+       echo.Build finished. The text files are in %BUILDDIR%/text.
+       goto end
+)
+
+if "%1" == "man" (
+       %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man
+       if errorlevel 1 exit /b 1
+       echo.
+       echo.Build finished. The manual pages are in %BUILDDIR%/man.
+       goto end
+)
+
+if "%1" == "texinfo" (
+       %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo
+       if errorlevel 1 exit /b 1
+       echo.
+       echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo.
+       goto end
+)
+
+if "%1" == "gettext" (
+       %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale
+       if errorlevel 1 exit /b 1
+       echo.
+       echo.Build finished. The message catalogs are in %BUILDDIR%/locale.
+       goto end
+)
+
+if "%1" == "changes" (
+       %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes
+       if errorlevel 1 exit /b 1
+       echo.
+       echo.The overview file is in %BUILDDIR%/changes.
+       goto end
+)
+
+if "%1" == "linkcheck" (
+       %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck
+       if errorlevel 1 exit /b 1
+       echo.
+       echo.Link check complete; look for any errors in the above output ^
+or in %BUILDDIR%/linkcheck/output.txt.
+       goto end
+)
+
+if "%1" == "doctest" (
+       %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest
+       if errorlevel 1 exit /b 1
+       echo.
+       echo.Testing of doctests in the sources finished, look at the ^
+results in %BUILDDIR%/doctest/output.txt.
+       goto end
+)
+
+if "%1" == "xml" (
+       %SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml
+       if errorlevel 1 exit /b 1
+       echo.
+       echo.Build finished. The XML files are in %BUILDDIR%/xml.
+       goto end
+)
+
+if "%1" == "pseudoxml" (
+       %SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml
+       if errorlevel 1 exit /b 1
+       echo.
+       echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml.
+       goto end
+)
+
+:end


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to