Skip to content
Closed
4 changes: 2 additions & 2 deletions core/src/main/scala/org/apache/spark/SparkContext.scala
Original file line number Diff line number Diff line change
Expand Up @@ -836,7 +836,7 @@ class SparkContext(config: SparkConf) extends Logging with ExecutorAllocationCli
minPartitions: Int = defaultMinPartitions): RDD[String] = withScope {
assertNotStopped()
hadoopFile(path, classOf[TextInputFormat], classOf[LongWritable], classOf[Text],
minPartitions).map(pair => pair._2.toString)
minPartitions).map(pair => pair._2.toString).setName(path)
}

/**
Expand Down Expand Up @@ -885,7 +885,7 @@ class SparkContext(config: SparkConf) extends Logging with ExecutorAllocationCli
classOf[Text],
classOf[Text],
updateConf,
minPartitions).setName(path).map(record => (record._1.toString, record._2.toString))
minPartitions).map(record => (record._1.toString, record._2.toString)).setName(path)
}

/**
Expand Down
25 changes: 25 additions & 0 deletions core/src/test/scala/org/apache/spark/SparkContextSuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -274,6 +274,31 @@ class SparkContextSuite extends SparkFunSuite with LocalSparkContext {
}
}

test("Default path for file based RDDs is properly set (SPARK-12517)") {
sc = new SparkContext(new SparkConf().setAppName("test").setMaster("local"))

// Test filetextFile, wholeTextFiles, binaryFiles, hadoopFile and
// newAPIHadoopFile for setting the default path as the RDD name
val mockPath = "default/path/for/"

var targetPath = mockPath + "textFile"
assert(sc.textFile(targetPath).name === targetPath)

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Could you compare by === instead of == ?
We can get better error messages by === operator.

targetPath = mockPath + "wholeTextFiles"
assert(sc.wholeTextFiles(targetPath).name === targetPath)

targetPath = mockPath + "binaryFiles"
assert(sc.binaryFiles(targetPath).name === targetPath)

targetPath = mockPath + "hadoopFile"
assert(sc.hadoopFile(targetPath).name === targetPath)

targetPath = mockPath + "newAPIHadoopFile"
assert(sc.newAPIHadoopFile(targetPath).name === targetPath)

sc.stop()
}

test("calling multiple sc.stop() must not throw any exception") {
noException should be thrownBy {
sc = new SparkContext(new SparkConf().setAppName("test").setMaster("local"))
Expand Down