Skip to content

Commit c5b0b29

Browse files
committed
[SPARK-6765] Enable scalastyle on test code.
Turn scalastyle on for all test code. Most of the violations have been resolved in my previous pull requests: Core: #5484 SQL: #5412 MLlib: #5411 GraphX: #5410 Streaming: #5409 Author: Reynold Xin <[email protected]> Closes #5486 from rxin/test-style-enable and squashes the following commits: 01683de [Reynold Xin] Fixed new code. a4ab46e [Reynold Xin] Fixed tests. 20adbc8 [Reynold Xin] Missed one violation. 5e36521 [Reynold Xin] [SPARK-6765] Enable scalastyle on test code.
1 parent 77620be commit c5b0b29

File tree

10 files changed

+52
-40
lines changed

10 files changed

+52
-40
lines changed

core/src/test/scala/org/apache/spark/deploy/ClientSuite.scala

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -46,5 +46,4 @@ class ClientSuite extends FunSuite with Matchers {
4646
// Invalid syntax.
4747
ClientArguments.isValidJarUrl("hdfs:") should be (false)
4848
}
49-
5049
}

dev/scalastyle

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -18,9 +18,10 @@
1818
#
1919

2020
echo -e "q\n" | build/sbt -Phive -Phive-thriftserver scalastyle > scalastyle.txt
21+
echo -e "q\n" | build/sbt -Phive -Phive-thriftserver test:scalastyle >> scalastyle.txt
2122
# Check style with YARN built too
22-
echo -e "q\n" | build/sbt -Pyarn -Phadoop-2.2 -Dhadoop.version=2.2.0 scalastyle \
23-
>> scalastyle.txt
23+
echo -e "q\n" | build/sbt -Pyarn -Phadoop-2.2 -Dhadoop.version=2.2.0 scalastyle >> scalastyle.txt
24+
echo -e "q\n" | build/sbt -Pyarn -Phadoop-2.2 -Dhadoop.version=2.2.0 test:scalastyle >> scalastyle.txt
2425

2526
ERRORS=$(cat scalastyle.txt | awk '{if($1~/error/)print}')
2627
rm scalastyle.txt

mllib/src/test/scala/org/apache/spark/ml/feature/VectorIndexerSuite.scala

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -150,7 +150,8 @@ class VectorIndexerSuite extends FunSuite with MLlibTestSparkContext {
150150
val vectorIndexer = getIndexer.setMaxCategories(maxCategories)
151151
val model = vectorIndexer.fit(data)
152152
val categoryMaps = model.categoryMaps
153-
assert(categoryMaps.keys.toSet === categoricalFeatures) // Chose correct categorical features
153+
// Chose correct categorical features
154+
assert(categoryMaps.keys.toSet === categoricalFeatures)
154155
val transformed = model.transform(data).select("indexed")
155156
val indexedRDD: RDD[Vector] = transformed.map(_.getAs[Vector](0))
156157
val featureAttrs = AttributeGroup.fromStructField(transformed.schema("indexed"))

sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/CliSuite.scala

Lines changed: 7 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,12 @@
11
/*
2-
* Licensed to the Apache Software Foundation (ASF) under one
3-
* or more contributor license agreements. See the NOTICE file
4-
* distributed with this work for additional information
5-
* regarding copyright ownership. The ASF licenses this file
6-
* to you under the Apache License, Version 2.0 (the
7-
* "License"); you may not use this file except in compliance
8-
* with the License. You may obtain a copy of the License at
2+
* Licensed to the Apache Software Foundation (ASF) under one or more
3+
* contributor license agreements. See the NOTICE file distributed with
4+
* this work for additional information regarding copyright ownership.
5+
* The ASF licenses this file to You under the Apache License, Version 2.0
6+
* (the "License"); you may not use this file except in compliance with
7+
* the License. You may obtain a copy of the License at
98
*
10-
* http://www.apache.org/licenses/LICENSE-2.0
9+
* http://www.apache.org/licenses/LICENSE-2.0
1110
*
1211
* Unless required by applicable law or agreed to in writing, software
1312
* distributed under the License is distributed on an "AS IS" BASIS,

sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2Suites.scala

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,7 @@
1818
package org.apache.spark.sql.hive.thriftserver
1919

2020
import java.io.File
21+
import java.net.URL
2122
import java.sql.{Date, DriverManager, Statement}
2223

2324
import scala.collection.mutable.ArrayBuffer
@@ -41,7 +42,7 @@ import org.apache.spark.sql.hive.HiveShim
4142
import org.apache.spark.util.Utils
4243

4344
object TestData {
44-
def getTestDataFilePath(name: String) = {
45+
def getTestDataFilePath(name: String): URL = {
4546
Thread.currentThread().getContextClassLoader.getResource(s"data/files/$name")
4647
}
4748

@@ -50,7 +51,7 @@ object TestData {
5051
}
5152

5253
class HiveThriftBinaryServerSuite extends HiveThriftJdbcTest {
53-
override def mode = ServerMode.binary
54+
override def mode: ServerMode.Value = ServerMode.binary
5455

5556
private def withCLIServiceClient(f: ThriftCLIServiceClient => Unit): Unit = {
5657
// Transport creation logics below mimics HiveConnection.createBinaryTransport
@@ -337,7 +338,7 @@ class HiveThriftBinaryServerSuite extends HiveThriftJdbcTest {
337338
}
338339

339340
class HiveThriftHttpServerSuite extends HiveThriftJdbcTest {
340-
override def mode = ServerMode.http
341+
override def mode: ServerMode.Value = ServerMode.http
341342

342343
test("JDBC query execution") {
343344
withJdbcStatement { statement =>

sql/hive/src/test/scala/org/apache/spark/sql/hive/QueryPartitionSuite.scala

Lines changed: 16 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -17,15 +17,12 @@
1717

1818
package org.apache.spark.sql.hive
1919

20-
import java.io.File
21-
2220
import com.google.common.io.Files
21+
2322
import org.apache.spark.sql.{QueryTest, _}
2423
import org.apache.spark.sql.hive.test.TestHive
25-
import org.apache.spark.util.Utils
26-
/* Implicits */
2724
import org.apache.spark.sql.hive.test.TestHive._
28-
25+
import org.apache.spark.util.Utils
2926

3027

3128
class QueryPartitionSuite extends QueryTest {
@@ -37,23 +34,28 @@ class QueryPartitionSuite extends QueryTest {
3734
testData.registerTempTable("testData")
3835

3936
val tmpDir = Files.createTempDir()
40-
//create the table for test
41-
sql(s"CREATE TABLE table_with_partition(key int,value string) PARTITIONED by (ds string) location '${tmpDir.toURI.toString}' ")
42-
sql("INSERT OVERWRITE TABLE table_with_partition partition (ds='1') SELECT key,value FROM testData")
43-
sql("INSERT OVERWRITE TABLE table_with_partition partition (ds='2') SELECT key,value FROM testData")
44-
sql("INSERT OVERWRITE TABLE table_with_partition partition (ds='3') SELECT key,value FROM testData")
45-
sql("INSERT OVERWRITE TABLE table_with_partition partition (ds='4') SELECT key,value FROM testData")
37+
// create the table for test
38+
sql(s"CREATE TABLE table_with_partition(key int,value string) " +
39+
s"PARTITIONED by (ds string) location '${tmpDir.toURI.toString}' ")
40+
sql("INSERT OVERWRITE TABLE table_with_partition partition (ds='1') " +
41+
"SELECT key,value FROM testData")
42+
sql("INSERT OVERWRITE TABLE table_with_partition partition (ds='2') " +
43+
"SELECT key,value FROM testData")
44+
sql("INSERT OVERWRITE TABLE table_with_partition partition (ds='3') " +
45+
"SELECT key,value FROM testData")
46+
sql("INSERT OVERWRITE TABLE table_with_partition partition (ds='4') " +
47+
"SELECT key,value FROM testData")
4648

47-
//test for the exist path
49+
// test for the exist path
4850
checkAnswer(sql("select key,value from table_with_partition"),
4951
testData.toSchemaRDD.collect ++ testData.toSchemaRDD.collect
5052
++ testData.toSchemaRDD.collect ++ testData.toSchemaRDD.collect)
5153

52-
//delect the path of one partition
54+
// delete the path of one partition
5355
val folders = tmpDir.listFiles.filter(_.isDirectory)
5456
Utils.deleteRecursively(folders(0))
5557

56-
//test for affter delete the path
58+
// test for after delete the path
5759
checkAnswer(sql("select key,value from table_with_partition"),
5860
testData.toSchemaRDD.collect ++ testData.toSchemaRDD.collect
5961
++ testData.toSchemaRDD.collect)

streaming/src/test/scala/org/apache/spark/streaming/ui/StreamingJobProgressListenerSuite.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -21,14 +21,14 @@ import org.scalatest.Matchers
2121

2222
import org.apache.spark.streaming.dstream.DStream
2323
import org.apache.spark.streaming.scheduler._
24-
import org.apache.spark.streaming.{Time, Milliseconds, TestSuiteBase}
24+
import org.apache.spark.streaming.{Duration, Time, Milliseconds, TestSuiteBase}
2525

2626
class StreamingJobProgressListenerSuite extends TestSuiteBase with Matchers {
2727

2828
val input = (1 to 4).map(Seq(_)).toSeq
2929
val operation = (d: DStream[Int]) => d.map(x => x)
3030

31-
override def batchDuration = Milliseconds(100)
31+
override def batchDuration: Duration = Milliseconds(100)
3232

3333
test("onBatchSubmitted, onBatchStarted, onBatchCompleted, " +
3434
"onReceiverStarted, onReceiverError, onReceiverStopped") {

yarn/src/test/scala/org/apache/spark/deploy/yarn/ClientSuite.scala

Lines changed: 13 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -232,19 +232,26 @@ class ClientSuite extends FunSuite with Matchers with BeforeAndAfterAll {
232232
testCode(conf)
233233
}
234234

235-
def newEnv = MutableHashMap[String, String]()
235+
def newEnv: MutableHashMap[String, String] = MutableHashMap[String, String]()
236236

237-
def classpath(env: MutableHashMap[String, String]) = env(Environment.CLASSPATH.name).split(":|;|<CPS>")
237+
def classpath(env: MutableHashMap[String, String]): Array[String] =
238+
env(Environment.CLASSPATH.name).split(":|;|<CPS>")
238239

239-
def flatten(a: Option[Seq[String]], b: Option[Seq[String]]) = (a ++ b).flatten.toArray
240+
def flatten(a: Option[Seq[String]], b: Option[Seq[String]]): Array[String] =
241+
(a ++ b).flatten.toArray
240242

241-
def getFieldValue[A, B](clazz: Class[_], field: String, defaults: => B)(mapTo: A => B): B =
242-
Try(clazz.getField(field)).map(_.get(null).asInstanceOf[A]).toOption.map(mapTo).getOrElse(defaults)
243+
def getFieldValue[A, B](clazz: Class[_], field: String, defaults: => B)(mapTo: A => B): B = {
244+
Try(clazz.getField(field))
245+
.map(_.get(null).asInstanceOf[A])
246+
.toOption
247+
.map(mapTo)
248+
.getOrElse(defaults)
249+
}
243250

244251
def getFieldValue2[A: ClassTag, A1: ClassTag, B](
245252
clazz: Class[_],
246253
field: String,
247-
defaults: => B)(mapTo: A => B)(mapTo1: A1 => B) : B = {
254+
defaults: => B)(mapTo: A => B)(mapTo1: A1 => B): B = {
248255
Try(clazz.getField(field)).map(_.get(null)).map {
249256
case v: A => mapTo(v)
250257
case v1: A1 => mapTo1(v1)

yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnAllocatorSuite.scala

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -79,7 +79,7 @@ class YarnAllocatorSuite extends FunSuite with Matchers with BeforeAndAfterEach
7979
}
8080

8181
class MockSplitInfo(host: String) extends SplitInfo(null, host, null, 1, null) {
82-
override def equals(other: Any) = false
82+
override def equals(other: Any): Boolean = false
8383
}
8484

8585
def createAllocator(maxExecutors: Int = 5): YarnAllocator = {
@@ -118,7 +118,9 @@ class YarnAllocatorSuite extends FunSuite with Matchers with BeforeAndAfterEach
118118
handler.getNumExecutorsRunning should be (1)
119119
handler.allocatedContainerToHostMap.get(container.getId).get should be ("host1")
120120
handler.allocatedHostToContainersMap.get("host1").get should contain (container.getId)
121-
rmClient.getMatchingRequests(container.getPriority, "host1", containerResource).size should be (0)
121+
122+
val size = rmClient.getMatchingRequests(container.getPriority, "host1", containerResource).size
123+
size should be (0)
122124
}
123125

124126
test("some containers allocated") {

yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnSparkHadoopUtilSuite.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -46,7 +46,7 @@ class YarnSparkHadoopUtilSuite extends FunSuite with Matchers with Logging {
4646
logWarning("Cannot execute bash, skipping bash tests.")
4747
}
4848

49-
def bashTest(name: String)(fn: => Unit) =
49+
def bashTest(name: String)(fn: => Unit): Unit =
5050
if (hasBash) test(name)(fn) else ignore(name)(fn)
5151

5252
bashTest("shell script escaping") {

0 commit comments

Comments
 (0)