Skip to content

Commit fceacd6

Browse files
committed
Check if a value is null when the field has a complex type.
1 parent e8422c5 commit fceacd6

File tree

2 files changed

+54
-9
lines changed

2 files changed

+54
-9
lines changed

sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveInspectors.scala

Lines changed: 18 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -346,16 +346,20 @@ private[hive] trait HiveInspectors {
346346
case soi: StandardStructObjectInspector =>
347347
val wrappers = soi.getAllStructFieldRefs.map(ref => wrapperFor(ref.getFieldObjectInspector))
348348
(o: Any) => {
349-
val struct = soi.create()
350-
(soi.getAllStructFieldRefs, wrappers, o.asInstanceOf[Row]).zipped.foreach {
351-
(field, wrapper, data) => soi.setStructFieldData(struct, field, wrapper(data))
349+
if (o != null) {
350+
val struct = soi.create()
351+
(soi.getAllStructFieldRefs, wrappers, o.asInstanceOf[Row]).zipped.foreach {
352+
(field, wrapper, data) => soi.setStructFieldData(struct, field, wrapper(data))
353+
}
354+
struct
355+
} else {
356+
null
352357
}
353-
struct
354358
}
355359

356360
case loi: ListObjectInspector =>
357361
val wrapper = wrapperFor(loi.getListElementObjectInspector)
358-
(o: Any) => seqAsJavaList(o.asInstanceOf[Seq[_]].map(wrapper))
362+
(o: Any) => if (o != null) seqAsJavaList(o.asInstanceOf[Seq[_]].map(wrapper)) else null
359363

360364
case moi: MapObjectInspector =>
361365
// The Predef.Map is scala.collection.immutable.Map.
@@ -364,9 +368,15 @@ private[hive] trait HiveInspectors {
364368

365369
val keyWrapper = wrapperFor(moi.getMapKeyObjectInspector)
366370
val valueWrapper = wrapperFor(moi.getMapValueObjectInspector)
367-
(o: Any) => mapAsJavaMap(o.asInstanceOf[Map[_, _]].map { case (key, value) =>
368-
keyWrapper(key) -> valueWrapper(value)
369-
})
371+
(o: Any) => {
372+
if (o != null) {
373+
mapAsJavaMap(o.asInstanceOf[Map[_, _]].map { case (key, value) =>
374+
keyWrapper(key) -> valueWrapper(value)
375+
})
376+
} else {
377+
null
378+
}
379+
}
370380

371381
case _ =>
372382
identity[Any]

sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala

Lines changed: 36 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ import org.apache.spark.sql.QueryTest
2121

2222
import org.apache.spark.sql.Row
2323
import org.apache.spark.sql.hive.test.TestHive._
24-
import org.apache.spark.util.Utils
24+
import org.apache.spark.sql.types._
2525

2626
case class Nested1(f1: Nested2)
2727
case class Nested2(f2: Nested3)
@@ -214,4 +214,39 @@ class SQLQuerySuite extends QueryTest {
214214
Seq.empty[Row])
215215
}
216216
}
217+
218+
test("SPARK-5284 Insert into Hive throws NPE when a inner complex type field has a null value") {
219+
val schema = StructType(
220+
StructField("s",
221+
StructType(
222+
StructField("innerStruct", StructType(StructField("s1", StringType, true) :: Nil)) ::
223+
StructField("innerArray", ArrayType(IntegerType), true) ::
224+
StructField("innerMap", MapType(StringType, IntegerType)) :: Nil), true) :: Nil)
225+
val row = Row(Row(null, null, null))
226+
227+
val rowRdd = sparkContext.parallelize(row :: Nil)
228+
229+
applySchema(rowRdd, schema).registerTempTable("testTable")
230+
231+
sql(
232+
"""CREATE TABLE nullValuesInInnerComplexTypes
233+
| (s struct<innerStruct: struct<s1:string>,
234+
| innerArray:array<int>,
235+
| innerMap: map<string, int>>)
236+
""".stripMargin).collect
237+
238+
sql(
239+
"""
240+
|INSERT OVERWRITE TABLE nullValuesInInnerComplexTypes
241+
|SELECT * FROM testTable
242+
""".stripMargin)
243+
244+
checkAnswer(
245+
sql("SELECT * FROM nullValuesInInnerComplexTypes"),
246+
Seq(Seq(Seq(null, null, null)))
247+
)
248+
249+
sql("DROP TABLE nullValuesInInnerComplexTypes")
250+
dropTempTable("testTable")
251+
}
217252
}

0 commit comments

Comments
 (0)