Skip to content

Commit fec43fe

Browse files
dongjoon-hyunHyukjinKwon
authored andcommitted
[SPARK-19613][SS][TEST] Random.nextString is not safe for directory namePrefix
## What changes were proposed in this pull request? `Random.nextString` is good for generating random string data, but it's not proper for directory name prefix in `Utils.createDirectory(tempDir, Random.nextString(10))`. This PR uses more safe directory namePrefix. ```scala scala> scala.util.Random.nextString(10) res0: String = 馨쭔ᎰႻ穚䃈兩㻞藑並 ``` ```scala StateStoreRDDSuite: - versioning and immutability - recovering from files - usage with iterators - only gets and only puts - preferred locations using StateStoreCoordinator *** FAILED *** java.io.IOException: Failed to create a temp directory (under /.../spark/sql/core/target/tmp/StateStoreRDDSuite8712796397908632676) after 10 attempts! at org.apache.spark.util.Utils$.createDirectory(Utils.scala:295) at org.apache.spark.sql.execution.streaming.state.StateStoreRDDSuite$$anonfun$13$$anonfun$apply$6.apply(StateStoreRDDSuite.scala:152) at org.apache.spark.sql.execution.streaming.state.StateStoreRDDSuite$$anonfun$13$$anonfun$apply$6.apply(StateStoreRDDSuite.scala:149) at org.apache.spark.sql.catalyst.util.package$.quietly(package.scala:42) at org.apache.spark.sql.execution.streaming.state.StateStoreRDDSuite$$anonfun$13.apply(StateStoreRDDSuite.scala:149) at org.apache.spark.sql.execution.streaming.state.StateStoreRDDSuite$$anonfun$13.apply(StateStoreRDDSuite.scala:149) ... - distributed test *** FAILED *** java.io.IOException: Failed to create a temp directory (under /.../spark/sql/core/target/tmp/StateStoreRDDSuite8712796397908632676) after 10 attempts! at org.apache.spark.util.Utils$.createDirectory(Utils.scala:295) ``` ## How was this patch tested? Pass the existing tests.StateStoreRDDSuite: Author: Dongjoon Hyun <[email protected]> Closes #21446 from dongjoon-hyun/SPARK-19613. (cherry picked from commit b31b587) Signed-off-by: hyukjinkwon <[email protected]>
1 parent a9700cb commit fec43fe

File tree

1 file changed

+5
-5
lines changed

1 file changed

+5
-5
lines changed

sql/core/src/test/scala/org/apache/spark/sql/execution/streaming/state/StateStoreRDDSuite.scala

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -55,7 +55,7 @@ class StateStoreRDDSuite extends SparkFunSuite with BeforeAndAfter with BeforeAn
5555

5656
test("versioning and immutability") {
5757
withSparkSession(SparkSession.builder.config(sparkConf).getOrCreate()) { spark =>
58-
val path = Utils.createDirectory(tempDir, Random.nextString(10)).toString
58+
val path = Utils.createDirectory(tempDir, Random.nextFloat.toString).toString
5959
val rdd1 = makeRDD(spark.sparkContext, Seq("a", "b", "a")).mapPartitionsWithStateStore(
6060
spark.sqlContext, operatorStateInfo(path, version = 0), keySchema, valueSchema, None)(
6161
increment)
@@ -73,7 +73,7 @@ class StateStoreRDDSuite extends SparkFunSuite with BeforeAndAfter with BeforeAn
7373
}
7474

7575
test("recovering from files") {
76-
val path = Utils.createDirectory(tempDir, Random.nextString(10)).toString
76+
val path = Utils.createDirectory(tempDir, Random.nextFloat.toString).toString
7777

7878
def makeStoreRDD(
7979
spark: SparkSession,
@@ -101,7 +101,7 @@ class StateStoreRDDSuite extends SparkFunSuite with BeforeAndAfter with BeforeAn
101101
test("usage with iterators - only gets and only puts") {
102102
withSparkSession(SparkSession.builder.config(sparkConf).getOrCreate()) { spark =>
103103
implicit val sqlContext = spark.sqlContext
104-
val path = Utils.createDirectory(tempDir, Random.nextString(10)).toString
104+
val path = Utils.createDirectory(tempDir, Random.nextFloat.toString).toString
105105
val opId = 0
106106

107107
// Returns an iterator of the incremented value made into the store
@@ -149,7 +149,7 @@ class StateStoreRDDSuite extends SparkFunSuite with BeforeAndAfter with BeforeAn
149149
quietly {
150150
val queryRunId = UUID.randomUUID
151151
val opId = 0
152-
val path = Utils.createDirectory(tempDir, Random.nextString(10)).toString
152+
val path = Utils.createDirectory(tempDir, Random.nextFloat.toString).toString
153153

154154
withSparkSession(SparkSession.builder.config(sparkConf).getOrCreate()) { spark =>
155155
implicit val sqlContext = spark.sqlContext
@@ -189,7 +189,7 @@ class StateStoreRDDSuite extends SparkFunSuite with BeforeAndAfter with BeforeAn
189189
.config(sparkConf.setMaster("local-cluster[2, 1, 1024]"))
190190
.getOrCreate()) { spark =>
191191
implicit val sqlContext = spark.sqlContext
192-
val path = Utils.createDirectory(tempDir, Random.nextString(10)).toString
192+
val path = Utils.createDirectory(tempDir, Random.nextFloat.toString).toString
193193
val opId = 0
194194
val rdd1 = makeRDD(spark.sparkContext, Seq("a", "b", "a")).mapPartitionsWithStateStore(
195195
sqlContext, operatorStateInfo(path, version = 0), keySchema, valueSchema, None)(increment)

0 commit comments

Comments
 (0)