Github user vanzin commented on a diff in the pull request:

    https://github.com/apache/spark/pull/3916#discussion_r26079464
  
    --- Diff: bin/pyspark2.cmd ---
    @@ -17,59 +17,22 @@ rem See the License for the specific language governing 
permissions and
     rem limitations under the License.
     rem
     
    -set SCALA_VERSION=2.10
    -
     rem Figure out where the Spark framework is installed
    -set FWDIR=%~dp0..\
    -
    -rem Export this as SPARK_HOME
    -set SPARK_HOME=%FWDIR%
    -
    -rem Test whether the user has built Spark
    -if exist "%FWDIR%RELEASE" goto skip_build_test
    -set FOUND_JAR=0
    -for %%d in 
("%FWDIR%assembly\target\scala-%SCALA_VERSION%\spark-assembly*hadoop*.jar") do (
    -  set FOUND_JAR=1
    -)
    -if [%FOUND_JAR%] == [0] (
    -  echo Failed to find Spark assembly JAR.
    -  echo You need to build Spark before running this program.
    -  goto exit
    -)
    -:skip_build_test
    +set SPARK_HOME=%~dp0..
     
     rem Load environment variables from conf\spark-env.cmd, if it exists
    -if exist "%FWDIR%conf\spark-env.cmd" call "%FWDIR%conf\spark-env.cmd"
    +if exist "%SPARK_HOME%\conf\spark-env.cmd" call 
"%SPARK_HOME%\conf\spark-env.cmd"
     
     rem Figure out which Python to use.
    -if [%PYSPARK_PYTHON%] == [] set PYSPARK_PYTHON=python
    +if "x%PYSPARK_DRIVER_PYTHON%"=="x" (
    +  set PYSPARK_DRIVER_PYTHON=python
    +  if not [%PYSPARK_PYTHON%] == [] set 
PYSPARK_DRIVER_PYTHON=%PYSPARK_PYTHON%
    +)
     
    -set PYTHONPATH=%FWDIR%python;%PYTHONPATH%
    -set PYTHONPATH=%FWDIR%python\lib\py4j-0.8.2.1-src.zip;%PYTHONPATH%
    +set PYTHONPATH=%SPARK_HOME%\python;%PYTHONPATH%
    +set PYTHONPATH=%SPARK_HOME%\python\lib\py4j-0.8.2.1-src.zip;%PYTHONPATH%
     
     set OLD_PYTHONSTARTUP=%PYTHONSTARTUP%
    -set PYTHONSTARTUP=%FWDIR%python\pyspark\shell.py
    -set PYSPARK_SUBMIT_ARGS=%*
    -
    -echo Running %PYSPARK_PYTHON% with PYTHONPATH=%PYTHONPATH%
    -
    -rem Check whether the argument is a file
    -for /f %%i in ('echo %1^| findstr /R "\.py"') do (
    -  set PYTHON_FILE=%%i
    -)
    -
    -if [%PYTHON_FILE%] == [] (
    -  if [%IPYTHON%] == [1] (
    -   ipython %IPYTHON_OPTS%
    -  ) else (
    -   %PYSPARK_PYTHON%
    -  ) 
    -) else (
    -  echo.
    -  echo WARNING: Running python applications through ./bin/pyspark.cmd is 
deprecated as of Spark 1.0.
    -  echo Use ./bin/spark-submit ^<python file^>
    -  echo.
    -  "%FWDIR%\bin\spark-submit.cmd" %PYSPARK_SUBMIT_ARGS%
    -)
    +set PYTHONSTARTUP=%SPARK_HOME%\python\pyspark\shell.py
    --- End diff --
    
    Windows has all sorts of weird rules about spaces, they have absolutely no 
correlation to what happens in the bash world. This line works correctly as is.


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastruct...@apache.org or file a JIRA ticket
with INFRA.
---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to