@@ -223,26 +223,7 @@ class SparkContext(config: SparkConf) extends Logging {
223223 private [spark] val ui = new SparkUI (this )
224224
225225 /** A default Hadoop Configuration for the Hadoop code (e.g. file systems) that we reuse. */
226- val hadoopConfiguration : Configuration = {
227- val hadoopConf = SparkHadoopUtil .get.newConfiguration()
228- // Explicitly check for S3 environment variables
229- if (System .getenv(" AWS_ACCESS_KEY_ID" ) != null &&
230- System .getenv(" AWS_SECRET_ACCESS_KEY" ) != null ) {
231- hadoopConf.set(" fs.s3.awsAccessKeyId" , System .getenv(" AWS_ACCESS_KEY_ID" ))
232- hadoopConf.set(" fs.s3n.awsAccessKeyId" , System .getenv(" AWS_ACCESS_KEY_ID" ))
233- hadoopConf.set(" fs.s3.awsSecretAccessKey" , System .getenv(" AWS_SECRET_ACCESS_KEY" ))
234- hadoopConf.set(" fs.s3n.awsSecretAccessKey" , System .getenv(" AWS_SECRET_ACCESS_KEY" ))
235- }
236- // Copy any "spark.hadoop.foo=bar" system properties into conf as "foo=bar"
237- conf.getAll.foreach { case (key, value) =>
238- if (key.startsWith(" spark.hadoop." )) {
239- hadoopConf.set(key.substring(" spark.hadoop." .length), value)
240- }
241- }
242- val bufferSize = conf.get(" spark.buffer.size" , " 65536" )
243- hadoopConf.set(" io.file.buffer.size" , bufferSize)
244- hadoopConf
245- }
226+ val hadoopConfiguration = SparkHadoopUtil .get.newConfiguration(conf)
246227
247228 // Optionally log Spark events
248229 private [spark] val eventLogger : Option [EventLoggingListener ] = {
0 commit comments