@@ -48,7 +48,7 @@ class BlockManagerInfoSuite extends SparkFunSuite {
4848 assert(bmInfo.blocks.asScala ===
4949 Map (broadcastId -> BlockStatus (StorageLevel .MEMORY_AND_DISK , 0 , 100 )))
5050 assert(bmInfo.exclusiveCachedBlocks.isEmpty)
51- assert(bmInfo.remainingMem == 29800 )
51+ assert(bmInfo.remainingMem === 29800 )
5252 }
5353
5454 testWithShuffleServiceOnOff(" RDD block with MEMORY_ONLY" ) { (svcEnabled, bmInfo) =>
@@ -57,7 +57,7 @@ class BlockManagerInfoSuite extends SparkFunSuite {
5757 assert(bmInfo.blocks.asScala ===
5858 Map (rddId -> BlockStatus (StorageLevel .MEMORY_ONLY , 200 , 0 )))
5959 assert(bmInfo.exclusiveCachedBlocks === Set (rddId))
60- assert(bmInfo.remainingMem == 29800 )
60+ assert(bmInfo.remainingMem === 29800 )
6161 if (svcEnabled) {
6262 assert(bmInfo.externalShuffleServiceBlockStatus.get.isEmpty)
6363 }
@@ -71,7 +71,7 @@ class BlockManagerInfoSuite extends SparkFunSuite {
7171 Map (rddId -> BlockStatus (StorageLevel .MEMORY_AND_DISK , 0 , 400 )))
7272 val exclusiveCachedBlocksForOneMemoryOnly = if (svcEnabled) Set () else Set (rddId)
7373 assert(bmInfo.exclusiveCachedBlocks === exclusiveCachedBlocksForOneMemoryOnly)
74- assert(bmInfo.remainingMem == 29800 )
74+ assert(bmInfo.remainingMem === 29800 )
7575 if (svcEnabled) {
7676 assert(bmInfo.externalShuffleServiceBlockStatus.get.asScala ===
7777 Map (rddId -> BlockStatus (StorageLevel .MEMORY_AND_DISK , 0 , 400 )))
@@ -85,7 +85,7 @@ class BlockManagerInfoSuite extends SparkFunSuite {
8585 Map (rddId -> BlockStatus (StorageLevel .DISK_ONLY , 0 , 200 )))
8686 val exclusiveCachedBlocksForOneMemoryOnly = if (svcEnabled) Set () else Set (rddId)
8787 assert(bmInfo.exclusiveCachedBlocks === exclusiveCachedBlocksForOneMemoryOnly)
88- assert(bmInfo.remainingMem == 30000 )
88+ assert(bmInfo.remainingMem === 30000 )
8989 if (svcEnabled) {
9090 assert(bmInfo.externalShuffleServiceBlockStatus.get.asScala ===
9191 Map (rddId -> BlockStatus (StorageLevel .DISK_ONLY , 0 , 200 )))
@@ -97,16 +97,16 @@ class BlockManagerInfoSuite extends SparkFunSuite {
9797 bmInfo.updateBlockInfo(rddId, StorageLevel .MEMORY_ONLY , memSize = 200 , 0 )
9898 assert(bmInfo.blocks.asScala === Map (rddId -> BlockStatus (StorageLevel .MEMORY_ONLY , 200 , 0 )))
9999 assert(bmInfo.exclusiveCachedBlocks === Set (rddId))
100- assert(bmInfo.remainingMem == 29800 )
100+ assert(bmInfo.remainingMem === 29800 )
101101 if (svcEnabled) {
102- assert(bmInfo.externalShuffleServiceBlockStatus.get.asScala == Map () )
102+ assert(bmInfo.externalShuffleServiceBlockStatus.get.isEmpty )
103103 }
104104
105105 bmInfo.updateBlockInfo(rddId, StorageLevel .DISK_ONLY , memSize = 0 , diskSize = 200 )
106106 assert(bmInfo.blocks.asScala === Map (rddId -> BlockStatus (StorageLevel .DISK_ONLY , 0 , 200 )))
107107 val exclusiveCachedBlocksForNoMemoryOnly = if (svcEnabled) Set () else Set (rddId)
108108 assert(bmInfo.exclusiveCachedBlocks === exclusiveCachedBlocksForNoMemoryOnly)
109- assert(bmInfo.remainingMem == 30000 )
109+ assert(bmInfo.remainingMem === 30000 )
110110 if (svcEnabled) {
111111 assert(bmInfo.externalShuffleServiceBlockStatus.get.asScala ===
112112 Map (rddId -> BlockStatus (StorageLevel .DISK_ONLY , 0 , 200 )))
@@ -116,11 +116,10 @@ class BlockManagerInfoSuite extends SparkFunSuite {
116116 testWithShuffleServiceOnOff(" using invalid StorageLevel" ) { (svcEnabled, bmInfo) =>
117117 val rddId : BlockId = RDDBlockId (0 , 0 )
118118 bmInfo.updateBlockInfo(rddId, StorageLevel .DISK_ONLY , memSize = 0 , diskSize = 200 )
119- assert(bmInfo.blocks.asScala
120- === Map (rddId -> BlockStatus (StorageLevel .DISK_ONLY , 0 , 200 )))
119+ assert(bmInfo.blocks.asScala === Map (rddId -> BlockStatus (StorageLevel .DISK_ONLY , 0 , 200 )))
121120 val exclusiveCachedBlocksForOneMemoryOnly = if (svcEnabled) Set () else Set (rddId)
122121 assert(bmInfo.exclusiveCachedBlocks === exclusiveCachedBlocksForOneMemoryOnly)
123- assert(bmInfo.remainingMem == 30000 )
122+ assert(bmInfo.remainingMem === 30000 )
124123 if (svcEnabled) {
125124 assert(bmInfo.externalShuffleServiceBlockStatus.get.asScala ===
126125 Map (rddId -> BlockStatus (StorageLevel .DISK_ONLY , 0 , 200 )))
@@ -129,20 +128,19 @@ class BlockManagerInfoSuite extends SparkFunSuite {
129128 bmInfo.updateBlockInfo(rddId, StorageLevel .NONE , memSize = 0 , diskSize = 200 )
130129 assert(bmInfo.blocks.isEmpty)
131130 assert(bmInfo.exclusiveCachedBlocks.isEmpty)
132- assert(bmInfo.remainingMem == 30000 )
131+ assert(bmInfo.remainingMem === 30000 )
133132 if (svcEnabled) {
134- assert(bmInfo.externalShuffleServiceBlockStatus.get.asScala === Map () )
133+ assert(bmInfo.externalShuffleServiceBlockStatus.get.isEmpty )
135134 }
136135 }
137136
138137 testWithShuffleServiceOnOff(" remove block" ) { (svcEnabled, bmInfo) =>
139138 val rddId : BlockId = RDDBlockId (0 , 0 )
140139 bmInfo.updateBlockInfo(rddId, StorageLevel .DISK_ONLY , memSize = 0 , diskSize = 200 )
141- assert(bmInfo.blocks.asScala
142- === Map (rddId -> BlockStatus (StorageLevel .DISK_ONLY , 0 , 200 )))
140+ assert(bmInfo.blocks.asScala === Map (rddId -> BlockStatus (StorageLevel .DISK_ONLY , 0 , 200 )))
143141 val exclusiveCachedBlocksForOneMemoryOnly = if (svcEnabled) Set () else Set (rddId)
144142 assert(bmInfo.exclusiveCachedBlocks === exclusiveCachedBlocksForOneMemoryOnly)
145- assert(bmInfo.remainingMem == 30000 )
143+ assert(bmInfo.remainingMem === 30000 )
146144 if (svcEnabled) {
147145 assert(bmInfo.externalShuffleServiceBlockStatus.get.asScala ===
148146 Map (rddId -> BlockStatus (StorageLevel .DISK_ONLY , 0 , 200 )))
@@ -151,7 +149,7 @@ class BlockManagerInfoSuite extends SparkFunSuite {
151149 bmInfo.removeBlock(rddId)
152150 assert(bmInfo.blocks.asScala.isEmpty)
153151 assert(bmInfo.exclusiveCachedBlocks.isEmpty)
154- assert(bmInfo.remainingMem == 30000 )
152+ assert(bmInfo.remainingMem === 30000 )
155153 if (svcEnabled) {
156154 assert(bmInfo.externalShuffleServiceBlockStatus.get.isEmpty)
157155 }
0 commit comments