Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -1717,7 +1717,7 @@ package object config {
"the codec")
.version("0.8.0")
.stringConf
.createWithDefaultString("lz4")
.createWithDefaultString("zstd")

private[spark] val IO_COMPRESSION_ZSTD_BUFFERSIZE =
ConfigBuilder("spark.io.compression.zstd.bufferSize")
Expand All @@ -1734,7 +1734,7 @@ package object config {
.doc("If true, enable buffer pool of ZSTD JNI library.")
.version("3.2.0")
.booleanConf
.createWithDefault(true)
.createWithDefault(false)

private[spark] val IO_COMPRESSION_ZSTD_LEVEL =
ConfigBuilder("spark.io.compression.zstd.level")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ class CompressionCodecSuite extends SparkFunSuite {

test("default compression codec") {
val codec = CompressionCodec.createCodec(conf)
assert(codec.getClass === classOf[LZ4CompressionCodec])
assert(codec.getClass === classOf[ZStdCompressionCodec])
testCodec(codec)
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ package org.apache.spark.sql.execution
import org.scalatest.BeforeAndAfterAll

import org.apache.spark.{SparkConf, SparkFunSuite}
import org.apache.spark.internal.config.IO_ENCRYPTION_ENABLED
import org.apache.spark.internal.config.{IO_COMPRESSION_CODEC, IO_ENCRYPTION_ENABLED}
import org.apache.spark.internal.config.UI.UI_ENABLED
import org.apache.spark.sql._
import org.apache.spark.sql.execution.adaptive._
Expand Down Expand Up @@ -66,6 +66,7 @@ class CoalesceShufflePartitionsSuite extends SparkFunSuite with BeforeAndAfterAl
.setAppName("test")
.set(UI_ENABLED, false)
.set(IO_ENCRYPTION_ENABLED, enableIOEncryption)
.set(IO_COMPRESSION_CODEC.key, "lz4")
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Curious, does this test require it to be lz4 ?
Same for AdaptiveQueryExecSuite below - why not rely on the new default value of zstd ?

.set(SQLConf.SHUFFLE_PARTITIONS.key, "5")
.set(SQLConf.COALESCE_PARTITIONS_INITIAL_PARTITION_NUM.key, "5")
.set(SQLConf.ADAPTIVE_EXECUTION_ENABLED.key, "true")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,8 @@ class AdaptiveQueryExecSuite

import testImplicits._

override protected def sparkConf = super.sparkConf.set("spark.io.compression.codec", "lz4")
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Should this be define more like a method with braces, etc or is that how similar code does it?

Copy link
Member Author

@dongjoon-hyun dongjoon-hyun Jun 17, 2021

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Sorry for late reply.

For this one, we can use this simple form like the other places.

$ git grep 'override protected def sparkConf' | grep super
sql/core/src/test/scala/org/apache/spark/sql/AggregateHashMapSuite.scala:  override protected def sparkConf: SparkConf = super.sparkConf
sql/core/src/test/scala/org/apache/spark/sql/AggregateHashMapSuite.scala:  override protected def sparkConf: SparkConf = super.sparkConf
sql/core/src/test/scala/org/apache/spark/sql/AggregateHashMapSuite.scala:  override protected def sparkConf: SparkConf = super.sparkConf
sql/core/src/test/scala/org/apache/spark/sql/connector/FileDataSourceV2FallBackSuite.scala:  override protected def sparkConf: SparkConf = super.sparkConf.set(SQLConf.USE_V1_SOURCE_LIST, "")
sql/core/src/test/scala/org/apache/spark/sql/execution/DataSourceScanExecRedactionSuite.scala:  override protected def sparkConf: SparkConf = super.sparkConf
sql/core/src/test/scala/org/apache/spark/sql/execution/DataSourceScanExecRedactionSuite.scala:  override protected def sparkConf: SparkConf = super.sparkConf
sql/core/src/test/scala/org/apache/spark/sql/execution/DataSourceScanExecRedactionSuite.scala:  override protected def sparkConf: SparkConf = super.sparkConf
sql/core/src/test/scala/org/apache/spark/sql/execution/adaptive/AdaptiveQueryExecSuite.scala:  override protected def sparkConf = super.sparkConf.set("spark.io.compression.codec", "lz4")
sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/AlterTableRecoverPartitionsSuite.scala:  override protected def sparkConf = super.sparkConf
sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/AlterTableRecoverPartitionsSuite.scala:  override protected def sparkConf = super.sparkConf
sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/orc/OrcPartitionDiscoverySuite.scala:  override protected def sparkConf: SparkConf = super.sparkConf.set(SQLConf.USE_V1_SOURCE_LIST, "")
sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamSuite.scala:  override protected def sparkConf: SparkConf = super.sparkConf

Or, like the following~

  override protected def sparkConf: SparkConf =
    super
      .sparkConf
      .set(SQLConf.USE_V1_SOURCE_LIST, "parquet")


setupTestData()

private def runAdaptiveAndVerifyResult(query: String): (SparkPlan, SparkPlan) = {
Expand Down