-
Couldn't load subscription status.
- Fork 28.9k
[SPARK-33393][SQL] Support SHOW TABLE EXTENDED in v2 #37588
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from all commits
39f3c42
cbe7433
023414b
0697773
676455e
9876120
7d71eb0
5c40c7c
3c3651a
b1cc56c
ab50fb0
9c4a9ac
56ceeac
be4199c
3a296c0
f54a832
892de53
f0b2db4
d898932
ffd3148
54531d8
59369a5
482dea1
3b12d76
7684a94
474b963
ac7586f
bcb5db7
74e52ba
723e664
0f5c3c7
e06f867
4af1c5e
ee254f4
02065eb
9c3940c
b47e619
be5b337
73ff15b
e6978e3
1bab140
26ae679
ef4213a
d318f6b
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change | ||||||||||
|---|---|---|---|---|---|---|---|---|---|---|---|---|
|
|
@@ -23,7 +23,7 @@ import org.apache.hadoop.fs.Path | |||||||||||
|
|
||||||||||||
| import org.apache.spark.{SPARK_DOC_ROOT, SparkException, SparkThrowable, SparkThrowableHelper, SparkUnsupportedOperationException} | ||||||||||||
| import org.apache.spark.sql.AnalysisException | ||||||||||||
| import org.apache.spark.sql.catalyst.{ExtendedAnalysisException, FunctionIdentifier, QualifiedTableName, TableIdentifier} | ||||||||||||
| import org.apache.spark.sql.catalyst.{ExtendedAnalysisException, FunctionIdentifier, InternalRow, QualifiedTableName, TableIdentifier} | ||||||||||||
| import org.apache.spark.sql.catalyst.analysis.{CannotReplaceMissingTableException, FunctionAlreadyExistsException, NamespaceAlreadyExistsException, NoSuchFunctionException, NoSuchNamespaceException, NoSuchPartitionException, NoSuchTableException, ResolvedTable, Star, TableAlreadyExistsException, UnresolvedRegex} | ||||||||||||
| import org.apache.spark.sql.catalyst.catalog.{CatalogTable, InvalidUDFClassException} | ||||||||||||
| import org.apache.spark.sql.catalyst.catalog.CatalogTypes.TablePartitionSpec | ||||||||||||
|
|
@@ -2139,12 +2139,6 @@ private[sql] object QueryCompilationErrors extends QueryErrorsBase with Compilat | |||||||||||
| "inputTypesLen" -> bound.inputTypes().length.toString)) | ||||||||||||
| } | ||||||||||||
|
|
||||||||||||
| def commandUnsupportedInV2TableError(name: String): Throwable = { | ||||||||||||
| new AnalysisException( | ||||||||||||
| errorClass = "_LEGACY_ERROR_TEMP_1200", | ||||||||||||
| messageParameters = Map("name" -> name)) | ||||||||||||
| } | ||||||||||||
|
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. We can remove the error class too. There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. To reduce the complexity of this PR, it will be placed in a separate PR to complete the renaming or removing of the error class-related operations. There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. In fact, just remove the error class, it will not increase the complexity. There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Yes, I misunderstood your meaning. That's right, we can remove it from |
||||||||||||
|
|
||||||||||||
| def cannotResolveColumnNameAmongAttributesError( | ||||||||||||
| colName: String, fieldNames: String): Throwable = { | ||||||||||||
| new AnalysisException( | ||||||||||||
|
|
@@ -2471,7 +2465,7 @@ private[sql] object QueryCompilationErrors extends QueryErrorsBase with Compilat | |||||||||||
| errorClass = "_LEGACY_ERROR_TEMP_1231", | ||||||||||||
| messageParameters = Map( | ||||||||||||
| "key" -> key, | ||||||||||||
| "tblName" -> tblName)) | ||||||||||||
| "tblName" -> toSQLId(tblName))) | ||||||||||||
|
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. It seems this change is not related to PR. There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. We can change it if we rename the error class. |
||||||||||||
| } | ||||||||||||
|
|
||||||||||||
| def invalidPartitionSpecError( | ||||||||||||
|
|
@@ -2483,7 +2477,7 @@ private[sql] object QueryCompilationErrors extends QueryErrorsBase with Compilat | |||||||||||
| messageParameters = Map( | ||||||||||||
| "specKeys" -> specKeys, | ||||||||||||
| "partitionColumnNames" -> partitionColumnNames.mkString(", "), | ||||||||||||
| "tableName" -> tableName)) | ||||||||||||
| "tableName" -> toSQLId(tableName))) | ||||||||||||
|
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. ditto There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. If this is not modified, the following issues will occur
spark/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala Lines 1343 to 1346 in efa891c
spark/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolvePartitionSpec.scala Line 66 in efa891c
The reason is that there are two entrances to calling the method and the format of the parameter There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. It seems there are two There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Hmm... this is a newly added UT. 😄 There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I got it now. In fact, you added the test cases into the base class After this PR, you can create another PR to fix the error class. There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Let's avoid unrelated change I said. There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. @cloud-fan There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. This change is necessary as it helps to unify the v1/v2 command behavior, which is an important goal of adding new v2 commands |
||||||||||||
| } | ||||||||||||
|
|
||||||||||||
| def columnAlreadyExistsError(columnName: String): Throwable = { | ||||||||||||
|
|
@@ -2541,6 +2535,13 @@ private[sql] object QueryCompilationErrors extends QueryErrorsBase with Compilat | |||||||||||
| new NoSuchPartitionException(db, table, partition) | ||||||||||||
| } | ||||||||||||
|
|
||||||||||||
| def notExistPartitionError( | ||||||||||||
| table: Identifier, | ||||||||||||
| partitionIdent: InternalRow, | ||||||||||||
| partitionSchema: StructType): Throwable = { | ||||||||||||
| new NoSuchPartitionException(table.toString, partitionIdent, partitionSchema) | ||||||||||||
| } | ||||||||||||
|
|
||||||||||||
| def analyzingColumnStatisticsNotSupportedForColumnTypeError( | ||||||||||||
| name: String, | ||||||||||||
| dataType: DataType): Throwable = { | ||||||||||||
|
|
||||||||||||

There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Because we change
ShowTableExtendedtoShowTablesExtended, shall we update the name in g4 ?There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
@cloud-fan
Do we need this related modification?
Because renaming the name to
ShowTablesExtendedorShowTablePartitiondoes not seem to convey the meaning of the following two commands at the same time.spark/sql/api/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBaseParser.g4
Lines 176 to 177 in 57e73da
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Let's not rename the parser rule now. It matches both commands.