diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveShim.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveShim.scala index 18f8c53609812..2362a34c09c44 100644 --- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveShim.scala +++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveShim.scala @@ -675,12 +675,21 @@ private[client] class Shim_v0_13 extends Shim_v0_12 { val useAdvanced = SQLConf.get.advancedPartitionPredicatePushdownEnabled object ExtractAttribute { + var castToStr = false + def unapply(expr: Expression): Option[Attribute] = { expr match { - case attr: Attribute => Some(attr) + case attr: Attribute + if (!castToStr || attr.dataType == StringType) => + castToStr = false + Some(attr) case Cast(child @ AtomicType(), dt: AtomicType, _) - if Cast.canUpCast(child.dataType.asInstanceOf[AtomicType], dt) => unapply(child) - case _ => None + if Cast.canUpCast(child.dataType.asInstanceOf[AtomicType], dt) => + castToStr = (castToStr || dt == StringType) + unapply(child) + case _ => + castToStr = false + None } } } diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala index 29de55f7040f1..d7abc3f0e9e0c 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala @@ -2384,4 +2384,13 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils with TestHiveSingleton { } } } + + test("SPARK-27814: test cast operation for partition key") { + withTable("t1") { + sql("CREATE TABLE t1(c1 INT, c2 STRING) PARTITIONED BY (p1 INT)") + sql("INSERT INTO TABLE t1 PARTITION (p1 = 5) values(1, 'str')") + checkAnswer(sql("SELECT c1 FROM t1 WHERE CAST(p1 as STRING) = '5'"), Row(1)) + checkAnswer(sql("SELECT c1 FROM t1 WHERE CAST( CAST(p1 AS BIGINT) AS STRING) = '5'"), Row(1)) + } + } }