Skip to content

Commit 6a22c1a

Browse files
committed
fix scalastyle
1 parent 2825a13 commit 6a22c1a

File tree

2 files changed

+15
-15
lines changed

2 files changed

+15
-15
lines changed

core/src/main/scala/org/apache/spark/storage/BlockManagerMasterActor.scala

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -313,8 +313,8 @@ class BlockManagerMasterActor(val isLocal: Boolean, conf: SparkConf, listenerBus
313313
}
314314

315315
private[spark] case class BlockStatus(
316-
storageLevel: StorageLevel,
317-
memSize: Long,
316+
storageLevel: StorageLevel,
317+
memSize: Long,
318318
diskSize: Long,
319319
tachyonSize: Long)
320320

@@ -339,14 +339,14 @@ private[spark] class BlockManagerInfo(
339339
}
340340

341341
def updateBlockInfo(
342-
blockId: BlockId,
343-
storageLevel: StorageLevel,
342+
blockId: BlockId,
343+
storageLevel: StorageLevel,
344344
memSize: Long,
345-
diskSize: Long,
345+
diskSize: Long,
346346
tachyonSize: Long) {
347-
347+
348348
updateLastSeenMs()
349-
349+
350350
if (_blocks.containsKey(blockId)) {
351351
// The block exists on the slave already.
352352
val originalLevel: StorageLevel = _blocks.get(blockId).storageLevel
@@ -355,7 +355,7 @@ private[spark] class BlockManagerInfo(
355355
_remainingMem += memSize
356356
}
357357
}
358-
358+
359359
if (storageLevel.isValid) {
360360
/* isValid means it is either stored in-memory or on-disk.
361361
* But the memSize here indicates the data size in or dropped from memory,
@@ -391,8 +391,8 @@ private[spark] class BlockManagerInfo(
391391
Utils.bytesToString(_remainingMem)))
392392
}
393393
if (blockStatus.storageLevel.useDisk) {
394-
logInfo("Removed %s on %s on disk (size: %s)".format(
395-
blockId, blockManagerId.hostPort, Utils.bytesToString(blockStatus.diskSize)))
394+
logInfo("Removed %s on %s on disk (size: %s)".format(
395+
blockId, blockManagerId.hostPort, Utils.bytesToString(blockStatus.diskSize)))
396396
}
397397
if (blockStatus.storageLevel.useTachyon) {
398398
logInfo("Removed %s on %s on tachyon (size: %s)".format(

core/src/main/scala/org/apache/spark/storage/StorageUtils.scala

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -49,10 +49,10 @@ class StorageStatus(
4949

5050
private[spark]
5151
class RDDInfo(
52-
val id: Int,
53-
val name: String,
54-
val numPartitions: Int,
55-
val storageLevel: StorageLevel) extends Ordered[RDDInfo] {
52+
val id: Int,
53+
val name: String,
54+
val numPartitions: Int,
55+
val storageLevel: StorageLevel) extends Ordered[RDDInfo] {
5656

5757
var numCachedPartitions = 0
5858
var memSize = 0L
@@ -112,7 +112,7 @@ object StorageUtils {
112112

113113
val rddStorageInfos = blockStatusMap.flatMap { case (rddId, blocks) =>
114114
// Add up memory, disk and Tachyon sizes
115-
val persistedBlocks =
115+
val persistedBlocks =
116116
blocks.filter { status => status.memSize + status.diskSize + status.tachyonSize > 0 }
117117
val memSize = persistedBlocks.map(_.memSize).reduceOption(_ + _).getOrElse(0L)
118118
val diskSize = persistedBlocks.map(_.diskSize).reduceOption(_ + _).getOrElse(0L)

0 commit comments

Comments
 (0)