Skip to content

Commit 090544a

Browse files
committed
Privatize methods
1 parent 13920c9 commit 090544a

File tree

1 file changed

+6
-6
lines changed

1 file changed

+6
-6
lines changed

core/src/main/scala/org/apache/spark/util/collection/ExternalAppendOnlyMap.scala

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -242,7 +242,7 @@ private[spark] class ExternalAppendOnlyMap[K, V, C](
242242
* In the event of key hash collisions, this ensures no pairs are hidden from being merged.
243243
* Assume the given iterator is in sorted order.
244244
*/
245-
def getMorePairs(it: Iterator[(K, C)]): ArrayBuffer[(K, C)] = {
245+
private def getMorePairs(it: Iterator[(K, C)]): ArrayBuffer[(K, C)] = {
246246
val kcPairs = new ArrayBuffer[(K, C)]
247247
if (it.hasNext) {
248248
var kc = it.next()
@@ -260,7 +260,7 @@ private[spark] class ExternalAppendOnlyMap[K, V, C](
260260
* If the given buffer contains a value for the given key, merge that value into
261261
* baseCombiner and remove the corresponding (K, C) pair from the buffer
262262
*/
263-
def mergeIfKeyExists(key: K, baseCombiner: C, buffer: StreamBuffer): C = {
263+
private def mergeIfKeyExists(key: K, baseCombiner: C, buffer: StreamBuffer): C = {
264264
var i = 0
265265
while (i < buffer.pairs.size) {
266266
val (k, c) = buffer.pairs(i)
@@ -320,7 +320,7 @@ private[spark] class ExternalAppendOnlyMap[K, V, C](
320320
*
321321
* StreamBuffers are ordered by the minimum key hash found across all of their own pairs.
322322
*/
323-
case class StreamBuffer(iterator: Iterator[(K, C)], pairs: ArrayBuffer[(K, C)])
323+
private case class StreamBuffer(iterator: Iterator[(K, C)], pairs: ArrayBuffer[(K, C)])
324324
extends Comparable[StreamBuffer] {
325325

326326
def minKeyHash: Int = {
@@ -358,7 +358,7 @@ private[spark] class ExternalAppendOnlyMap[K, V, C](
358358
/**
359359
* Construct a stream that reads only from the next batch
360360
*/
361-
def nextBatchStream(): InputStream = {
361+
private def nextBatchStream(): InputStream = {
362362
if (batchSizes.length > 0) {
363363
ByteStreams.limit(bufferedStream, batchSizes.remove(0))
364364
} else {
@@ -373,7 +373,7 @@ private[spark] class ExternalAppendOnlyMap[K, V, C](
373373
* If the current batch is drained, construct a stream for the next batch and read from it.
374374
* If no more pairs are left, return null.
375375
*/
376-
def readNextItem(): (K, C) = {
376+
private def readNextItem(): (K, C) = {
377377
try {
378378
val item = deserializeStream.readObject().asInstanceOf[(K, C)]
379379
objectsRead += 1
@@ -408,7 +408,7 @@ private[spark] class ExternalAppendOnlyMap[K, V, C](
408408
}
409409

410410
// TODO: Ensure this gets called even if the iterator isn't drained.
411-
def cleanup() {
411+
private def cleanup() {
412412
deserializeStream.close()
413413
file.delete()
414414
}

0 commit comments

Comments
 (0)