From 0c929be1e800352c6452dd11282ff0aa5900d533 Mon Sep 17 00:00:00 2001 From: chenzhx Date: Wed, 10 Aug 2022 23:51:47 +0800 Subject: [PATCH] [SPARK-38899][SQL][FOLLOWUP]Fix bug extract datetime in DS V2 pushdown --- .../apache/spark/sql/connector/expressions/Extract.java | 7 +++++++ .../scala/org/apache/spark/sql/jdbc/JDBCV2Suite.scala | 9 +++++++++ 2 files changed, 16 insertions(+) diff --git a/sql/catalyst/src/main/java/org/apache/spark/sql/connector/expressions/Extract.java b/sql/catalyst/src/main/java/org/apache/spark/sql/connector/expressions/Extract.java index a925f1ee31a98..ed9f4415f7da1 100644 --- a/sql/catalyst/src/main/java/org/apache/spark/sql/connector/expressions/Extract.java +++ b/sql/catalyst/src/main/java/org/apache/spark/sql/connector/expressions/Extract.java @@ -18,6 +18,7 @@ package org.apache.spark.sql.connector.expressions; import org.apache.spark.annotation.Evolving; +import org.apache.spark.sql.internal.connector.ToStringSQLBuilder; import java.io.Serializable; @@ -59,4 +60,10 @@ public Extract(String field, Expression source) { @Override public Expression[] children() { return new Expression[]{ source() }; } + + @Override + public String toString() { + ToStringSQLBuilder builder = new ToStringSQLBuilder(); + return builder.build(this); + } } diff --git a/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCV2Suite.scala b/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCV2Suite.scala index a5ea2589b6303..f47efae88c865 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCV2Suite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCV2Suite.scala @@ -1374,6 +1374,15 @@ class JDBCV2Suite extends QueryTest with SharedSparkSession with ExplainSuiteHel "PushedFilters: [DATE1 IS NOT NULL, ((EXTRACT(DAY_OF_WEEK FROM DATE1) % 7) + 1) = 4]" checkPushedInfo(df8, expectedPlanFragment8) checkAnswer(df8, Seq(Row("alex"))) + + val df9 = sql("SELECT name FROM h2.test.datetime WHERE " + + "dayofyear(date1) > 100 order by dayofyear(date1) limit 1") + checkFiltersRemoved(df9) + val expectedPlanFragment9 = + "PushedFilters: [DATE1 IS NOT NULL, EXTRACT(DAY_OF_YEAR FROM DATE1) > 100], " + + "PushedTopN: ORDER BY [EXTRACT(DAY_OF_YEAR FROM DATE1) ASC NULLS FIRST] LIMIT 1," + checkPushedInfo(df9, expectedPlanFragment9) + checkAnswer(df9, Seq(Row("alex"))) } test("scan with filter push-down with misc functions") {