Skip to content

Commit 2b820f2

Browse files
viiryaliancheng
authored andcommitted
[MINOR] [SQL] Minor fix for CatalystSchemaConverter
ping liancheng Author: Liang-Chi Hsieh <[email protected]> Closes apache#7224 from viirya/few_fix_catalystschema and squashes the following commits: d994330 [Liang-Chi Hsieh] Minor fix for CatalystSchemaConverter.
1 parent c991ef5 commit 2b820f2

File tree

2 files changed

+7
-7
lines changed

2 files changed

+7
-7
lines changed

sql/core/src/main/scala/org/apache/spark/sql/SQLConf.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -273,7 +273,7 @@ private[spark] object SQLConf {
273273
val PARQUET_FOLLOW_PARQUET_FORMAT_SPEC = booleanConf(
274274
key = "spark.sql.parquet.followParquetFormatSpec",
275275
defaultValue = Some(false),
276-
doc = "Wether to stick to Parquet format specification when converting Parquet schema to " +
276+
doc = "Whether to stick to Parquet format specification when converting Parquet schema to " +
277277
"Spark SQL schema and vice versa. Sticks to the specification if set to true; falls back " +
278278
"to compatible mode if set to false.",
279279
isPublic = false)

sql/core/src/main/scala/org/apache/spark/sql/parquet/CatalystSchemaConverter.scala

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -142,15 +142,15 @@ private[parquet] class CatalystSchemaConverter(
142142
DecimalType(precision, scale)
143143
}
144144

145-
field.getPrimitiveTypeName match {
145+
typeName match {
146146
case BOOLEAN => BooleanType
147147

148148
case FLOAT => FloatType
149149

150150
case DOUBLE => DoubleType
151151

152152
case INT32 =>
153-
field.getOriginalType match {
153+
originalType match {
154154
case INT_8 => ByteType
155155
case INT_16 => ShortType
156156
case INT_32 | null => IntegerType
@@ -161,7 +161,7 @@ private[parquet] class CatalystSchemaConverter(
161161
}
162162

163163
case INT64 =>
164-
field.getOriginalType match {
164+
originalType match {
165165
case INT_64 | null => LongType
166166
case DECIMAL => makeDecimalType(maxPrecisionForBytes(8))
167167
case TIMESTAMP_MILLIS => typeNotImplemented()
@@ -176,7 +176,7 @@ private[parquet] class CatalystSchemaConverter(
176176
TimestampType
177177

178178
case BINARY =>
179-
field.getOriginalType match {
179+
originalType match {
180180
case UTF8 | ENUM => StringType
181181
case null if assumeBinaryIsString => StringType
182182
case null => BinaryType
@@ -185,7 +185,7 @@ private[parquet] class CatalystSchemaConverter(
185185
}
186186

187187
case FIXED_LEN_BYTE_ARRAY =>
188-
field.getOriginalType match {
188+
originalType match {
189189
case DECIMAL => makeDecimalType(maxPrecisionForBytes(field.getTypeLength))
190190
case INTERVAL => typeNotImplemented()
191191
case _ => illegalType()
@@ -261,7 +261,7 @@ private[parquet] class CatalystSchemaConverter(
261261
// Here we implement Parquet LIST backwards-compatibility rules.
262262
// See: https://github.com/apache/parquet-format/blob/master/LogicalTypes.md#backward-compatibility-rules
263263
// scalastyle:on
264-
private def isElementType(repeatedType: Type, parentName: String) = {
264+
private def isElementType(repeatedType: Type, parentName: String): Boolean = {
265265
{
266266
// For legacy 2-level list types with primitive element type, e.g.:
267267
//

0 commit comments

Comments
 (0)