Skip to content

Commit 1412abd

Browse files
committed
fix
1 parent a46dc70 commit 1412abd

File tree

1 file changed

+5
-5
lines changed

1 file changed

+5
-5
lines changed

sql/core/src/test/scala/org/apache/spark/sql/execution/streaming/state/StateStoreRDDSuite.scala

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -55,7 +55,7 @@ class StateStoreRDDSuite extends SparkFunSuite with BeforeAndAfter with BeforeAn
5555

5656
test("versioning and immutability") {
5757
withSparkSession(SparkSession.builder.config(sparkConf).getOrCreate()) { spark =>
58-
val path = Utils.createDirectory(tempDir, Random.nextInt.abs.toString).toString
58+
val path = Utils.createDirectory(tempDir, Random.nextFloat.toString).toString
5959
val rdd1 = makeRDD(spark.sparkContext, Seq("a", "b", "a")).mapPartitionsWithStateStore(
6060
spark.sqlContext, operatorStateInfo(path, version = 0), keySchema, valueSchema, None)(
6161
increment)
@@ -73,7 +73,7 @@ class StateStoreRDDSuite extends SparkFunSuite with BeforeAndAfter with BeforeAn
7373
}
7474

7575
test("recovering from files") {
76-
val path = Utils.createDirectory(tempDir, Random.nextInt.abs.toString).toString
76+
val path = Utils.createDirectory(tempDir, Random.nextFloat.toString).toString
7777

7878
def makeStoreRDD(
7979
spark: SparkSession,
@@ -101,7 +101,7 @@ class StateStoreRDDSuite extends SparkFunSuite with BeforeAndAfter with BeforeAn
101101
test("usage with iterators - only gets and only puts") {
102102
withSparkSession(SparkSession.builder.config(sparkConf).getOrCreate()) { spark =>
103103
implicit val sqlContext = spark.sqlContext
104-
val path = Utils.createDirectory(tempDir, Random.nextInt.abs.toString).toString
104+
val path = Utils.createDirectory(tempDir, Random.nextFloat.toString).toString
105105
val opId = 0
106106

107107
// Returns an iterator of the incremented value made into the store
@@ -149,7 +149,7 @@ class StateStoreRDDSuite extends SparkFunSuite with BeforeAndAfter with BeforeAn
149149
quietly {
150150
val queryRunId = UUID.randomUUID
151151
val opId = 0
152-
val path = Utils.createDirectory(tempDir, Random.nextInt.abs.toString).toString
152+
val path = Utils.createDirectory(tempDir, Random.nextFloat.toString).toString
153153

154154
withSparkSession(SparkSession.builder.config(sparkConf).getOrCreate()) { spark =>
155155
implicit val sqlContext = spark.sqlContext
@@ -189,7 +189,7 @@ class StateStoreRDDSuite extends SparkFunSuite with BeforeAndAfter with BeforeAn
189189
.config(sparkConf.setMaster("local-cluster[2, 1, 1024]"))
190190
.getOrCreate()) { spark =>
191191
implicit val sqlContext = spark.sqlContext
192-
val path = Utils.createDirectory(tempDir, Random.nextInt.abs.toString).toString
192+
val path = Utils.createDirectory(tempDir, Random.nextFloat.toString).toString
193193
val opId = 0
194194
val rdd1 = makeRDD(spark.sparkContext, Seq("a", "b", "a")).mapPartitionsWithStateStore(
195195
sqlContext, operatorStateInfo(path, version = 0), keySchema, valueSchema, None)(increment)

0 commit comments

Comments
 (0)