Skip to content

Commit 9de1368

Browse files
uncleGencmonkey
authored andcommitted
[SPARK-19227][SPARK-19251] remove unused imports and outdated comments
## What changes were proposed in this pull request? remove ununsed imports and outdated comments, and fix some minor code style issue. ## How was this patch tested? existing ut Author: uncleGen <[email protected]> Closes apache#16591 from uncleGen/SPARK-19227.
1 parent d74b3fc commit 9de1368

File tree

47 files changed

+25
-79
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

47 files changed

+25
-79
lines changed

core/src/main/scala/org/apache/spark/SecurityManager.scala

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -26,11 +26,9 @@ import javax.net.ssl._
2626
import com.google.common.hash.HashCodes
2727
import com.google.common.io.Files
2828
import org.apache.hadoop.io.Text
29-
import org.apache.hadoop.security.Credentials
3029

3130
import org.apache.spark.deploy.SparkHadoopUtil
3231
import org.apache.spark.internal.Logging
33-
import org.apache.spark.internal.config._
3432
import org.apache.spark.network.sasl.SecretKeyHolder
3533
import org.apache.spark.util.Utils
3634

core/src/main/scala/org/apache/spark/SparkContext.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@ package org.apache.spark
1919

2020
import java.io._
2121
import java.lang.reflect.Constructor
22-
import java.net.{URI}
22+
import java.net.URI
2323
import java.util.{Arrays, Locale, Properties, ServiceLoader, UUID}
2424
import java.util.concurrent.{ConcurrentHashMap, ConcurrentMap}
2525
import java.util.concurrent.atomic.{AtomicBoolean, AtomicInteger, AtomicReference}

core/src/main/scala/org/apache/spark/deploy/ExternalShuffleServiceSource.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@ package org.apache.spark.deploy
1919

2020
import javax.annotation.concurrent.ThreadSafe
2121

22-
import com.codahale.metrics.{Gauge, MetricRegistry}
22+
import com.codahale.metrics.MetricRegistry
2323

2424
import org.apache.spark.metrics.source.Source
2525
import org.apache.spark.network.shuffle.ExternalShuffleBlockHandler

core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@
1717

1818
package org.apache.spark.deploy
1919

20-
import java.io.{File, IOException, PrintStream}
20+
import java.io.{File, IOException}
2121
import java.lang.reflect.{InvocationTargetException, Modifier, UndeclaredThrowableException}
2222
import java.net.URL
2323
import java.security.PrivilegedExceptionAction

core/src/main/scala/org/apache/spark/deploy/worker/ui/LogPage.scala

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -22,8 +22,6 @@ import javax.servlet.http.HttpServletRequest
2222

2323
import scala.xml.{Node, Unparsed}
2424

25-
import com.google.common.cache.{CacheBuilder, CacheLoader, LoadingCache}
26-
2725
import org.apache.spark.internal.Logging
2826
import org.apache.spark.ui.{UIUtils, WebUIPage}
2927
import org.apache.spark.util.Utils

core/src/main/scala/org/apache/spark/internal/config/ConfigEntry.scala

Lines changed: 5 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -17,12 +17,6 @@
1717

1818
package org.apache.spark.internal.config
1919

20-
import java.util.{Map => JMap}
21-
22-
import scala.util.matching.Regex
23-
24-
import org.apache.spark.SparkConf
25-
2620
/**
2721
* An entry contains all meta information for a configuration.
2822
*
@@ -34,7 +28,6 @@ import org.apache.spark.SparkConf
3428
* value declared as a string.
3529
*
3630
* @param key the key for the configuration
37-
* @param defaultValue the default value for the configuration
3831
* @param valueConverter how to convert a string to the value. It should throw an exception if the
3932
* string does not have the required format.
4033
* @param stringConverter how to convert a value to a string that the user can use it as a valid
@@ -76,7 +69,7 @@ private class ConfigEntryWithDefault[T] (
7669
stringConverter: T => String,
7770
doc: String,
7871
isPublic: Boolean)
79-
extends ConfigEntry(key, valueConverter, stringConverter, doc, isPublic) {
72+
extends ConfigEntry(key, valueConverter, stringConverter, doc, isPublic) {
8073

8174
override def defaultValue: Option[T] = Some(_defaultValue)
8275

@@ -95,7 +88,7 @@ private class ConfigEntryWithDefaultString[T] (
9588
stringConverter: T => String,
9689
doc: String,
9790
isPublic: Boolean)
98-
extends ConfigEntry(key, valueConverter, stringConverter, doc, isPublic) {
91+
extends ConfigEntry(key, valueConverter, stringConverter, doc, isPublic) {
9992

10093
override def defaultValue: Option[T] = Some(valueConverter(_defaultValue))
10194

@@ -118,8 +111,8 @@ private[spark] class OptionalConfigEntry[T](
118111
val rawStringConverter: T => String,
119112
doc: String,
120113
isPublic: Boolean)
121-
extends ConfigEntry[Option[T]](key, s => Some(rawValueConverter(s)),
122-
v => v.map(rawStringConverter).orNull, doc, isPublic) {
114+
extends ConfigEntry[Option[T]](key, s => Some(rawValueConverter(s)),
115+
v => v.map(rawStringConverter).orNull, doc, isPublic) {
123116

124117
override def defaultValueString: String = "<undefined>"
125118

@@ -137,7 +130,7 @@ private class FallbackConfigEntry[T] (
137130
doc: String,
138131
isPublic: Boolean,
139132
private[config] val fallback: ConfigEntry[T])
140-
extends ConfigEntry[T](key, fallback.valueConverter, fallback.stringConverter, doc, isPublic) {
133+
extends ConfigEntry[T](key, fallback.valueConverter, fallback.stringConverter, doc, isPublic) {
141134

142135
override def defaultValueString: String = s"<value of ${fallback.key}>"
143136

core/src/main/scala/org/apache/spark/internal/config/ConfigReader.scala

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,6 @@
1818
package org.apache.spark.internal.config
1919

2020
import java.util.{Map => JMap}
21-
import java.util.regex.Pattern
2221

2322
import scala.collection.mutable.HashMap
2423
import scala.util.matching.Regex

core/src/main/scala/org/apache/spark/rpc/RpcTimeout.scala

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -19,11 +19,10 @@ package org.apache.spark.rpc
1919

2020
import java.util.concurrent.TimeoutException
2121

22-
import scala.concurrent.{Await, Future}
22+
import scala.concurrent.Future
2323
import scala.concurrent.duration._
24-
import scala.util.control.NonFatal
2524

26-
import org.apache.spark.{SparkConf, SparkException}
25+
import org.apache.spark.SparkConf
2726
import org.apache.spark.util.{ThreadUtils, Utils}
2827

2928
/**

core/src/main/scala/org/apache/spark/scheduler/ResultTask.scala

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,6 @@ import java.util.Properties
2424

2525
import org.apache.spark._
2626
import org.apache.spark.broadcast.Broadcast
27-
import org.apache.spark.executor.TaskMetrics
2827
import org.apache.spark.rdd.RDD
2928

3029
/**

core/src/main/scala/org/apache/spark/scheduler/ShuffleMapTask.scala

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,6 @@ import scala.language.existentials
2525

2626
import org.apache.spark._
2727
import org.apache.spark.broadcast.Broadcast
28-
import org.apache.spark.executor.TaskMetrics
2928
import org.apache.spark.internal.Logging
3029
import org.apache.spark.rdd.RDD
3130
import org.apache.spark.shuffle.ShuffleWriter

0 commit comments

Comments
 (0)