File tree Expand file tree Collapse file tree 1 file changed +3
-12
lines changed
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions Expand file tree Collapse file tree 1 file changed +3
-12
lines changed Original file line number Diff line number Diff line change 1717
1818package org .apache .spark .sql .catalyst .expressions
1919
20- import java .io .{ObjectInputStream , IOException }
21-
2220import scala .collection .Map
2321
2422import org .apache .spark .sql .catalyst .{CatalystTypeConverters , trees }
2523import org .apache .spark .sql .types ._
26- import org .apache .spark .util .Utils
2724
2825/**
2926 * An expression that produces zero or more rows given a single input row.
@@ -85,16 +82,10 @@ case class UserDefinedGenerator(
8582 }.asInstanceOf [(Row => Row )]
8683 }
8784
88- initializeConverters()
89-
90- @ throws(classOf [IOException ])
91- private def readObject (ois : ObjectInputStream ): Unit = Utils .tryOrIOException {
92- ois.defaultReadObject()
93- initializeConverters()
94- }
95-
9685 override def eval (input : Row ): TraversableOnce [Row ] = {
97- // TODO(davies): improve this
86+ if (inputRow == null ) {
87+ initializeConverters()
88+ }
9889 // Convert the objects into Scala Type before calling function, we need schema to support UDT
9990 function(convertToScala(inputRow(input)))
10091 }
You can’t perform that action at this time.
0 commit comments