Repository: spark
Updated Branches:
  refs/heads/master efd0036ec -> a1877f45c


[SPARK-22597][SQL] Add spark-sql cmd script for Windows users

## What changes were proposed in this pull request?

This PR proposes to add cmd scripts so that Windows users can also run 
`spark-sql` script.

## How was this patch tested?

Manually tested on Windows.

**Before**

```cmd
C:\...\spark>.\bin\spark-sql
'.\bin\spark-sql' is not recognized as an internal or external command,
operable program or batch file.

C:\...\spark>.\bin\spark-sql.cmd
'.\bin\spark-sql.cmd' is not recognized as an internal or external command,
operable program or batch file.
```

**After**

```cmd
C:\...\spark>.\bin\spark-sql
...
spark-sql> SELECT 'Hello World !!';
...
Hello World !!
```

Author: hyukjinkwon <gurwls...@gmail.com>

Closes #19808 from HyukjinKwon/spark-sql-cmd.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/a1877f45
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/a1877f45
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/a1877f45

Branch: refs/heads/master
Commit: a1877f45c3451d18879083ed9b71dd9d5f583f1c
Parents: efd0036
Author: hyukjinkwon <gurwls...@gmail.com>
Authored: Fri Nov 24 19:55:26 2017 +0100
Committer: Wenchen Fan <wenc...@databricks.com>
Committed: Fri Nov 24 19:55:26 2017 +0100

----------------------------------------------------------------------
 bin/find-spark-home.cmd |  2 +-
 bin/run-example.cmd     |  2 +-
 bin/spark-sql.cmd       | 25 +++++++++++++++++++++++++
 bin/spark-sql2.cmd      | 25 +++++++++++++++++++++++++
 bin/sparkR2.cmd         |  3 +--
 5 files changed, 53 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/a1877f45/bin/find-spark-home.cmd
----------------------------------------------------------------------
diff --git a/bin/find-spark-home.cmd b/bin/find-spark-home.cmd
index c75e7ee..6025f67 100644
--- a/bin/find-spark-home.cmd
+++ b/bin/find-spark-home.cmd
@@ -32,7 +32,7 @@ if not "x%PYSPARK_PYTHON%"=="x" (
 )
 
 rem If there is python installed, trying to use the root dir as SPARK_HOME
-where %PYTHON_RUNNER% > nul 2>$1
+where %PYTHON_RUNNER% > nul 2>&1
 if %ERRORLEVEL% neq 0 (
   if not exist %PYTHON_RUNNER% (
     if "x%SPARK_HOME%"=="x" (

http://git-wip-us.apache.org/repos/asf/spark/blob/a1877f45/bin/run-example.cmd
----------------------------------------------------------------------
diff --git a/bin/run-example.cmd b/bin/run-example.cmd
index cc6b234..2dd396e 100644
--- a/bin/run-example.cmd
+++ b/bin/run-example.cmd
@@ -20,7 +20,7 @@ rem
 rem Figure out where the Spark framework is installed
 call "%~dp0find-spark-home.cmd"
 
-set _SPARK_CMD_USAGE=Usage: ./bin/run-example [options] example-class [example 
args]
+set _SPARK_CMD_USAGE=Usage: .\bin\run-example [options] example-class [example 
args]
 
 rem The outermost quotes are used to prevent Windows command line parse error
 rem when there are some quotes in parameters, see SPARK-21877.

http://git-wip-us.apache.org/repos/asf/spark/blob/a1877f45/bin/spark-sql.cmd
----------------------------------------------------------------------
diff --git a/bin/spark-sql.cmd b/bin/spark-sql.cmd
new file mode 100644
index 0000000..919e321
--- /dev/null
+++ b/bin/spark-sql.cmd
@@ -0,0 +1,25 @@
+@echo off
+
+rem
+rem Licensed to the Apache Software Foundation (ASF) under one or more
+rem contributor license agreements.  See the NOTICE file distributed with
+rem this work for additional information regarding copyright ownership.
+rem The ASF licenses this file to You under the Apache License, Version 2.0
+rem (the "License"); you may not use this file except in compliance with
+rem the License.  You may obtain a copy of the License at
+rem
+rem    http://www.apache.org/licenses/LICENSE-2.0
+rem
+rem Unless required by applicable law or agreed to in writing, software
+rem distributed under the License is distributed on an "AS IS" BASIS,
+rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+rem See the License for the specific language governing permissions and
+rem limitations under the License.
+rem
+
+rem This is the entry point for running SparkSQL. To avoid polluting the
+rem environment, it just launches a new cmd to do the real work.
+
+rem The outermost quotes are used to prevent Windows command line parse error
+rem when there are some quotes in parameters, see SPARK-21877.
+cmd /V /E /C ""%~dp0spark-sql2.cmd" %*"

http://git-wip-us.apache.org/repos/asf/spark/blob/a1877f45/bin/spark-sql2.cmd
----------------------------------------------------------------------
diff --git a/bin/spark-sql2.cmd b/bin/spark-sql2.cmd
new file mode 100644
index 0000000..c34a3c5
--- /dev/null
+++ b/bin/spark-sql2.cmd
@@ -0,0 +1,25 @@
+@echo off
+
+rem
+rem Licensed to the Apache Software Foundation (ASF) under one or more
+rem contributor license agreements.  See the NOTICE file distributed with
+rem this work for additional information regarding copyright ownership.
+rem The ASF licenses this file to You under the Apache License, Version 2.0
+rem (the "License"); you may not use this file except in compliance with
+rem the License.  You may obtain a copy of the License at
+rem
+rem    http://www.apache.org/licenses/LICENSE-2.0
+rem
+rem Unless required by applicable law or agreed to in writing, software
+rem distributed under the License is distributed on an "AS IS" BASIS,
+rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+rem See the License for the specific language governing permissions and
+rem limitations under the License.
+rem
+
+rem Figure out where the Spark framework is installed
+call "%~dp0find-spark-home.cmd"
+
+set _SPARK_CMD_USAGE=Usage: .\bin\spark-sql [options] [cli option]
+
+call "%SPARK_HOME%\bin\spark-submit2.cmd" --class 
org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver %*

http://git-wip-us.apache.org/repos/asf/spark/blob/a1877f45/bin/sparkR2.cmd
----------------------------------------------------------------------
diff --git a/bin/sparkR2.cmd b/bin/sparkR2.cmd
index b48bea3..446f0c3 100644
--- a/bin/sparkR2.cmd
+++ b/bin/sparkR2.cmd
@@ -21,6 +21,5 @@ rem Figure out where the Spark framework is installed
 call "%~dp0find-spark-home.cmd"
 
 call "%SPARK_HOME%\bin\load-spark-env.cmd"
-
-
+set _SPARK_CMD_USAGE=Usage: .\bin\sparkR [options]
 call "%SPARK_HOME%\bin\spark-submit2.cmd" sparkr-shell-main %*


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to