@@ -42,6 +42,20 @@ import org.apache.spark.sql.test.TestSQLContext._
4242
4343case class TestRDDEntry (key : Int , value : String )
4444
45+ case class NullReflectData (
46+ intField : java.lang.Integer ,
47+ longField : java.lang.Long ,
48+ floatField : java.lang.Float ,
49+ doubleField : java.lang.Double ,
50+ booleanField : java.lang.Boolean )
51+
52+ case class OptionalReflectData (
53+ intField : Option [Int ],
54+ longField : Option [Long ],
55+ floatField : Option [Float ],
56+ doubleField : Option [Double ],
57+ booleanField : Option [Boolean ])
58+
4559class ParquetQuerySuite extends QueryTest with FunSuite with BeforeAndAfterAll {
4660 import TestData ._
4761 TestData // Load test data tables.
@@ -195,5 +209,35 @@ class ParquetQuerySuite extends QueryTest with FunSuite with BeforeAndAfterAll {
195209 Utils .deleteRecursively(ParquetTestData .testDir)
196210 ParquetTestData .writeFile()
197211 }
212+
213+ test(" save and load case class RDD with nulls as parquet" ) {
214+ val data = NullReflectData (null , null , null , null , null )
215+ val rdd = sparkContext.parallelize(data :: Nil )
216+
217+ val file = getTempFilePath(" parquet" )
218+ val path = file.toString
219+ rdd.saveAsParquetFile(path)
220+ val readFile = parquetFile(path)
221+
222+ val rdd_saved = readFile.collect()
223+ assert(rdd_saved(0 ) === Seq .fill(5 )(null ))
224+ Utils .deleteRecursively(file)
225+ assert(true )
226+ }
227+
228+ test(" save and load case class RDD with Nones as parquet" ) {
229+ val data = OptionalReflectData (null , null , null , null , null )
230+ val rdd = sparkContext.parallelize(data :: Nil )
231+
232+ val file = getTempFilePath(" parquet" )
233+ val path = file.toString
234+ rdd.saveAsParquetFile(path)
235+ val readFile = parquetFile(path)
236+
237+ val rdd_saved = readFile.collect()
238+ assert(rdd_saved(0 ) === Seq .fill(5 )(null ))
239+ Utils .deleteRecursively(file)
240+ assert(true )
241+ }
198242}
199243
0 commit comments