@@ -66,13 +66,13 @@ class SparkContext(config: SparkConf)
6666 * <span class="badge" style="float: right; background-color: #44751E;">DEVELOPER API</span>
6767 * Alternative constructor for setting preferred locations where Spark will create executors.
6868 *
69- * @param preferredNodeLocationData used in YARN mode to select nodes to launch containers on. Can
69+ * @param preferredNodeLocationData used in YARN mode to select nodes to launch containers on. Ca
7070 * be generated using [[org.apache.spark.scheduler.InputFormatInfo.computePreferredLocations ]]
7171 * from a list of input files or InputFormats for the application.
7272 */
73- def this (config : SparkConf , preferredNodeLocationData : Map [String , Set [SplitInfo ]]) = {
74- this (config)
75- this .preferredNodeLocationData = preferredNodeLocationData
73+ def this (config : SparkConf , preferredNodeLocationData : Map [String , Set [SplitInfo ]]) = {
74+ this (config)
75+ this .preferredNodeLocationData = preferredNodeLocationData
7676 }
7777
7878 /**
@@ -85,6 +85,10 @@ class SparkContext(config: SparkConf)
8585 def this (master : String , appName : String , conf : SparkConf ) =
8686 this (SparkContext .updatedConf(conf, master, appName))
8787
88+ // NOTE: The below constructors could be consolidated using default arguments. Due to
89+ // Scala bug SI-8479, however, this causes the compile step to fail when generating docs.
90+ // Until we have a good workaround for that bug the constructors remain broken out.
91+
8892 /**
8993 * Alternative constructor that allows setting common Spark properties directly
9094 *
@@ -100,13 +104,42 @@ class SparkContext(config: SparkConf)
100104 appName : String ,
101105 sparkHome : String = null ,
102106 jars : Seq [String ] = Nil ,
103- environment : Map [String , String ] = Map (),
104- preferredNodeLocationData : Map [String , Set [SplitInfo ]] = Map ()) =
107+ environment : Map [String , String ] = Map ()) =
105108 {
106- this (SparkContext .updatedConf(new SparkConf (), master, appName, sparkHome, jars, environment),
107- preferredNodeLocationData)
109+ this (SparkContext .updatedConf(new SparkConf (), master, appName, sparkHome, jars, environment))
108110 }
109111
112+ /**
113+ * Alternative constructor that allows setting common Spark properties directly
114+ *
115+ * @param master Cluster URL to connect to (e.g. mesos://host:port, spark://host:port, local[4]).
116+ * @param appName A name for your application, to display on the cluster web UI.
117+ */
118+ def this (master : String , appName : String ) =
119+ this (master, appName, null , Nil , Map ())
120+
121+ /**
122+ * Alternative constructor that allows setting common Spark properties directly
123+ *
124+ * @param master Cluster URL to connect to (e.g. mesos://host:port, spark://host:port, local[4]).
125+ * @param appName A name for your application, to display on the cluster web UI.
126+ * @param sparkHome Location where Spark is installed on cluster nodes.
127+ */
128+ def this (master : String , appName : String , sparkHome : String ) =
129+ this (master, appName, sparkHome, Nil , Map ())
130+
131+ /**
132+ * Alternative constructor that allows setting common Spark properties directly
133+ *
134+ * @param master Cluster URL to connect to (e.g. mesos://host:port, spark://host:port, local[4]).
135+ * @param appName A name for your application, to display on the cluster web UI.
136+ * @param sparkHome Location where Spark is installed on cluster nodes.
137+ * @param jars Collection of JARs to send to the cluster. These can be paths on the local file
138+ * system or HDFS, HTTP, HTTPS, or FTP URLs.
139+ */
140+ def this (master : String , appName : String , sparkHome : String , jars : Seq [String ]) =
141+ this (master, appName, sparkHome, jars, Map ())
142+
110143 private [spark] val conf = config.clone()
111144
112145 /**
0 commit comments