Repository: spark
Updated Branches:
  refs/heads/master 1b9001f78 -> 0c681dd6b


[EC2] don't duplicate default values

This PR makes two minor changes to the `spark-ec2` script:

1. The script's input parameter default values are duplicated into the help 
text. This is unnecessary. This PR replaces the duplicated info with the 
appropriate `optparse`  placeholder.
2. The default Spark version currently needs to be updated by hand during each 
release, which is known to be a faulty process. This PR places that default 
value in an easy-to-spot place.

Author: Nicholas Chammas <nicholas.cham...@gmail.com>

Closes #2290 from nchammas/spark-ec2-default-version and squashes the following 
commits:

0c6d3bb [Nicholas Chammas] don't duplicate default values


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/0c681dd6
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/0c681dd6
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/0c681dd6

Branch: refs/heads/master
Commit: 0c681dd6b24431eb35770884e50f22ebaeaade33
Parents: 1b9001f
Author: Nicholas Chammas <nicholas.cham...@gmail.com>
Authored: Sat Sep 6 14:39:29 2014 -0700
Committer: Josh Rosen <joshro...@apache.org>
Committed: Sat Sep 6 14:39:29 2014 -0700

----------------------------------------------------------------------
 ec2/spark_ec2.py | 24 +++++++++++++-----------
 1 file changed, 13 insertions(+), 11 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/0c681dd6/ec2/spark_ec2.py
----------------------------------------------------------------------
diff --git a/ec2/spark_ec2.py b/ec2/spark_ec2.py
index 8ec88d9..bfd0759 100755
--- a/ec2/spark_ec2.py
+++ b/ec2/spark_ec2.py
@@ -38,6 +38,8 @@ import boto
 from boto.ec2.blockdevicemapping import BlockDeviceMapping, BlockDeviceType, 
EBSBlockDeviceType
 from boto import ec2
 
+DEFAULT_SPARK_VERSION = "1.0.0"
+
 # A URL prefix from which to fetch AMI information
 AMI_PREFIX = "https://raw.github.com/mesos/spark-ec2/v2/ami-list";
 
@@ -57,10 +59,10 @@ def parse_args():
         help="Show this help message and exit")
     parser.add_option(
         "-s", "--slaves", type="int", default=1,
-        help="Number of slaves to launch (default: 1)")
+        help="Number of slaves to launch (default: %default)")
     parser.add_option(
         "-w", "--wait", type="int", default=120,
-        help="Seconds to wait for nodes to start (default: 120)")
+        help="Seconds to wait for nodes to start (default: %default)")
     parser.add_option(
         "-k", "--key-pair",
         help="Key pair to use on instances")
@@ -69,7 +71,7 @@ def parse_args():
         help="SSH private key file to use for logging into instances")
     parser.add_option(
         "-t", "--instance-type", default="m1.large",
-        help="Type of instance to launch (default: m1.large). " +
+        help="Type of instance to launch (default: %default). " +
              "WARNING: must be 64-bit; small instances won't work")
     parser.add_option(
         "-m", "--master-instance-type", default="",
@@ -84,15 +86,15 @@ def parse_args():
              "between zones applies)")
     parser.add_option("-a", "--ami", help="Amazon Machine Image ID to use")
     parser.add_option(
-        "-v", "--spark-version", default="1.0.0",
-        help="Version of Spark to use: 'X.Y.Z' or a specific git hash")
+        "-v", "--spark-version", default=DEFAULT_SPARK_VERSION,
+        help="Version of Spark to use: 'X.Y.Z' or a specific git hash 
(default: %default)")
     parser.add_option(
         "--spark-git-repo",
         default="https://github.com/apache/spark";,
         help="Github repo from which to checkout supplied commit hash")
     parser.add_option(
         "--hadoop-major-version", default="1",
-        help="Major version of Hadoop (default: 1)")
+        help="Major version of Hadoop (default: %default)")
     parser.add_option(
         "-D", metavar="[ADDRESS:]PORT", dest="proxy_port",
         help="Use SSH dynamic port forwarding to create a SOCKS proxy at " +
@@ -116,21 +118,21 @@ def parse_args():
              "Only support up to 8 EBS volumes.")
     parser.add_option(
         "--swap", metavar="SWAP", type="int", default=1024,
-        help="Swap space to set up per node, in MB (default: 1024)")
+        help="Swap space to set up per node, in MB (default: %default)")
     parser.add_option(
         "--spot-price", metavar="PRICE", type="float",
         help="If specified, launch slaves as spot instances with the given " +
              "maximum price (in dollars)")
     parser.add_option(
         "--ganglia", action="store_true", default=True,
-        help="Setup Ganglia monitoring on cluster (default: on). NOTE: " +
+        help="Setup Ganglia monitoring on cluster (default: %default). NOTE: " 
+
              "the Ganglia page will be publicly accessible")
     parser.add_option(
         "--no-ganglia", action="store_false", dest="ganglia",
         help="Disable Ganglia monitoring for the cluster")
     parser.add_option(
         "-u", "--user", default="root",
-        help="The SSH user you want to connect as (default: root)")
+        help="The SSH user you want to connect as (default: %default)")
     parser.add_option(
         "--delete-groups", action="store_true", default=False,
         help="When destroying a cluster, delete the security groups that were 
created.")
@@ -139,7 +141,7 @@ def parse_args():
         help="Launch fresh slaves, but use an existing stopped master if 
possible")
     parser.add_option(
         "--worker-instances", type="int", default=1,
-        help="Number of instances per worker: variable SPARK_WORKER_INSTANCES 
(default: 1)")
+        help="Number of instances per worker: variable SPARK_WORKER_INSTANCES 
(default: %default)")
     parser.add_option(
         "--master-opts", type="string", default="",
         help="Extra options to give to master through SPARK_MASTER_OPTS 
variable " +
@@ -152,7 +154,7 @@ def parse_args():
         help="Use this prefix for the security group rather than the cluster 
name.")
     parser.add_option(
         "--authorized-address", type="string", default="0.0.0.0/0",
-        help="Address to authorize on created security groups (default: 
0.0.0.0/0)")
+        help="Address to authorize on created security groups (default: 
%default)")
     parser.add_option(
         "--additional-security-group", type="string", default="",
         help="Additional security group to place the machines in")


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to