Skip to content

Commit 40e6b2f

Browse files
committed
Fix the issue that Iterator.map().toSeq is not Serializable
1 parent 0784e02 commit 40e6b2f

File tree

2 files changed

+18
-1
lines changed

2 files changed

+18
-1
lines changed

sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -604,7 +604,7 @@ class SQLContext private[sql](
604604
val className = beanClass.getName
605605
val beanInfo = Introspector.getBeanInfo(beanClass)
606606
val rows = SQLContext.beansToRows(data.asScala.iterator, beanInfo, attrSeq)
607-
DataFrame(self, LocalRelation(attrSeq, rows.toSeq))
607+
DataFrame(self, LocalRelation(attrSeq, rows.toArray))
608608
}
609609

610610

sql/core/src/test/scala/org/apache/spark/sql/SQLContextSuite.scala

Lines changed: 17 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -65,4 +65,21 @@ class SQLContextSuite extends SparkFunSuite with SharedSparkContext{
6565
session2.sql("select myadd(1, 2)").explain()
6666
}
6767
}
68+
69+
test("SPARK-13390: createDataFrame(java.util.List[_],Class[_]) NotSerializableException") {
70+
val rows = new java.util.ArrayList[IntJavaBean]()
71+
rows.add(new IntJavaBean(1))
72+
val sqlContext = SQLContext.getOrCreate(sc)
73+
// Without the fix for SPARK-13390, this will throw NotSerializableException
74+
sqlContext.createDataFrame(rows, classOf[IntJavaBean]).groupBy("int").count().collect()
75+
}
76+
}
77+
78+
class IntJavaBean(private var i: Int) extends Serializable {
79+
80+
def getInt(): Int = i
81+
82+
def setInt(i: Int): Unit = {
83+
this.i = i
84+
}
6885
}

0 commit comments

Comments
 (0)