Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -384,9 +384,16 @@ private[serializer] object KryoSerializer {
classOf[HighlyCompressedMapStatus],
classOf[CompactBuffer[_]],
classOf[BlockManagerId],
classOf[Array[Boolean]],
classOf[Array[Byte]],
classOf[Array[Short]],
classOf[Array[Int]],
classOf[Array[Long]],
classOf[Array[Float]],
classOf[Array[Double]],
classOf[Array[Char]],
classOf[Array[String]],
classOf[Array[Array[String]]],
classOf[BoundedPriorityQueue[_]],
classOf[SparkConf]
)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -76,6 +76,9 @@ class KryoSerializerSuite extends SparkFunSuite with SharedSparkContext {
}

test("basic types") {
val conf = new SparkConf(false)
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

You could do this to the next test too, and then, I think you'd have to register Tuple2, but that sounds like a good idea.

And then the next for Scala Map, Seq, List. Again, probably a good idea, as in the JIRA. I actually don't know why this isn't done already.

Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Unit test have been updated, nothing new was found.

Tuples are registered starting from line 149 in KryoSerializer.scala.

conf.set("spark.kryo.registrationRequired", "true")

val ser = new KryoSerializer(conf).newInstance()
def check[T: ClassTag](t: T) {
assert(ser.deserialize[T](ser.serialize(t)) === t)
Expand Down Expand Up @@ -106,6 +109,9 @@ class KryoSerializerSuite extends SparkFunSuite with SharedSparkContext {
}

test("pairs") {
val conf = new SparkConf(false)
conf.set("spark.kryo.registrationRequired", "true")

val ser = new KryoSerializer(conf).newInstance()
def check[T: ClassTag](t: T) {
assert(ser.deserialize[T](ser.serialize(t)) === t)
Expand All @@ -130,12 +136,16 @@ class KryoSerializerSuite extends SparkFunSuite with SharedSparkContext {
}

test("Scala data structures") {
val conf = new SparkConf(false)
conf.set("spark.kryo.registrationRequired", "true")

val ser = new KryoSerializer(conf).newInstance()
def check[T: ClassTag](t: T) {
assert(ser.deserialize[T](ser.serialize(t)) === t)
}
check(List[Int]())
check(List[Int](1, 2, 3))
check(Seq[Int](1, 2, 3))
check(List[String]())
check(List[String]("x", "y", "z"))
check(None)
Expand Down