diff --git a/sql/core/src/test/resources/sql-tests/results/describe.sql.out b/sql/core/src/test/resources/sql-tests/results/describe.sql.out index 422d548ea8de8..ac6c0ca2e5202 100644 --- a/sql/core/src/test/resources/sql-tests/results/describe.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/describe.sql.out @@ -23,13 +23,13 @@ DESCRIBE t -- !query 2 schema struct -- !query 2 output -# Partition Information -# col_name data_type comment a string b int c string -c string d string +# Partition Information +# col_name data_type comment +c string d string @@ -38,13 +38,13 @@ DESC t -- !query 3 schema struct -- !query 3 output -# Partition Information -# col_name data_type comment a string b int c string -c string d string +# Partition Information +# col_name data_type comment +c string d string @@ -53,13 +53,13 @@ DESC TABLE t -- !query 4 schema struct -- !query 4 output -# Partition Information -# col_name data_type comment a string b int c string -c string d string +# Partition Information +# col_name data_type comment +c string d string @@ -68,27 +68,29 @@ DESC FORMATTED t -- !query 5 schema struct -- !query 5 output -# Detailed Table Information +a string +b int +c string +d string # Partition Information -# Storage Information # col_name data_type comment -Comment: table_comment -Compressed: No -Created: +c string +d string + +# Detailed Table Information Database: default +Owner: +Created: Last Access: Location: sql/core/spark-warehouse/t -Owner: -Partition Provider: Catalog -Storage Desc Parameters: -Table Parameters: Table Type: MANAGED -a string -b int -c string -c string -d string -d string +Comment: table_comment +Table Parameters: + +# Storage Information +Compressed: No +Storage Desc Parameters: +Partition Provider: Catalog -- !query 6 @@ -96,6 +98,15 @@ DESC EXTENDED t -- !query 6 schema struct -- !query 6 output +a string +b int +c string +d string +# Partition Information +# col_name data_type comment +c string +d string + # Detailed Table Information CatalogTable( Table: `default`.`t` Created: @@ -106,15 +117,7 @@ struct Partition Columns: [`c`, `d`] Comment: table_comment Storage(Location: sql/core/spark-warehouse/t) - Partition Provider: Catalog) -# Partition Information -# col_name data_type comment -a string -b int -c string -c string -d string -d string + Partition Provider: Catalog) -- !query 7 @@ -122,13 +125,13 @@ DESC t PARTITION (c='Us', d=1) -- !query 7 schema struct -- !query 7 output -# Partition Information -# col_name data_type comment a string b int c string -c string d string +# Partition Information +# col_name data_type comment +c string d string @@ -137,18 +140,19 @@ DESC EXTENDED t PARTITION (c='Us', d=1) -- !query 8 schema struct -- !query 8 output -# Partition Information -# col_name data_type comment -Detailed Partition Information CatalogPartition( - Partition Values: [c=Us, d=1] - Storage(Location: sql/core/spark-warehouse/t/c=Us/d=1) - Partition Parameters:{}) a string b int c string +d string +# Partition Information +# col_name data_type comment c string d string -d string + +Detailed Partition Information CatalogPartition( + Partition Values: [c=Us, d=1] + Storage(Location: sql/core/spark-warehouse/t/c=Us/d=1) + Partition Parameters:{}) -- !query 9 @@ -156,23 +160,25 @@ DESC FORMATTED t PARTITION (c='Us', d=1) -- !query 9 schema struct -- !query 9 output -# Detailed Partition Information -# Partition Information -# Storage Information -# col_name data_type comment -Compressed: No -Database: default -Location: sql/core/spark-warehouse/t/c=Us/d=1 -Partition Parameters: -Partition Value: [Us, 1] -Storage Desc Parameters: -Table: t a string b int c string +d string +# Partition Information +# col_name data_type comment c string d string -d string + +# Detailed Partition Information +Partition Value: [Us, 1] +Database: default +Table: t +Location: sql/core/spark-warehouse/t/c=Us/d=1 +Partition Parameters: + +# Storage Information +Compressed: No +Storage Desc Parameters: -- !query 10 diff --git a/sql/core/src/test/resources/sql-tests/results/json-functions.sql.out b/sql/core/src/test/resources/sql-tests/results/json-functions.sql.out index 315e1730ce7df..f1013b434b516 100644 --- a/sql/core/src/test/resources/sql-tests/results/json-functions.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/json-functions.sql.out @@ -7,8 +7,8 @@ describe function to_json -- !query 0 schema struct -- !query 0 output -Class: org.apache.spark.sql.catalyst.expressions.StructsToJson Function: to_json +Class: org.apache.spark.sql.catalyst.expressions.StructsToJson Usage: to_json(expr[, options]) - Returns a json string with a given struct value @@ -17,7 +17,9 @@ describe function extended to_json -- !query 1 schema struct -- !query 1 output +Function: to_json Class: org.apache.spark.sql.catalyst.expressions.StructsToJson +Usage: to_json(expr[, options]) - Returns a json string with a given struct value Extended Usage: Examples: > SELECT to_json(named_struct('a', 1, 'b', 2)); @@ -26,9 +28,6 @@ Extended Usage: {"time":"26/08/2015"} > SELECT to_json(array(named_struct('a', 1, 'b', 2)); [{"a":1,"b":2}] - -Function: to_json -Usage: to_json(expr[, options]) - Returns a json string with a given struct value -- !query 2 @@ -87,8 +86,8 @@ describe function from_json -- !query 8 schema struct -- !query 8 output -Class: org.apache.spark.sql.catalyst.expressions.JsonToStructs Function: from_json +Class: org.apache.spark.sql.catalyst.expressions.JsonToStructs Usage: from_json(jsonStr, schema[, options]) - Returns a struct value with the given `jsonStr` and `schema`. @@ -97,16 +96,15 @@ describe function extended from_json -- !query 9 schema struct -- !query 9 output +Function: from_json Class: org.apache.spark.sql.catalyst.expressions.JsonToStructs +Usage: from_json(jsonStr, schema[, options]) - Returns a struct value with the given `jsonStr` and `schema`. Extended Usage: Examples: > SELECT from_json('{"a":1, "b":0.8}', 'a INT, b DOUBLE'); {"a":1, "b":0.8} > SELECT from_json('{"time":"26/08/2015"}', 'time Timestamp', map('timestampFormat', 'dd/MM/yyyy')); {"time":"2015-08-26 00:00:00.0"} - -Function: from_json -Usage: from_json(jsonStr, schema[, options]) - Returns a struct value with the given `jsonStr` and `schema`. -- !query 10 diff --git a/sql/core/src/test/resources/sql-tests/results/show-tables.sql.out b/sql/core/src/test/resources/sql-tests/results/show-tables.sql.out index 6d62e6092147b..0fc28e7d6b597 100644 --- a/sql/core/src/test/resources/sql-tests/results/show-tables.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/show-tables.sql.out @@ -63,10 +63,10 @@ SHOW TABLES -- !query 7 schema struct -- !query 7 output -arraydata -mapdata show_t1 show_t2 +arraydata +mapdata show_t3 testdata @@ -76,10 +76,10 @@ SHOW TABLES IN showdb -- !query 8 schema struct -- !query 8 output -arraydata -mapdata show_t1 show_t2 +arraydata +mapdata show_t3 testdata @@ -118,14 +118,6 @@ SHOW TABLE EXTENDED LIKE 'show_t*' -- !query 12 schema struct -- !query 12 output -show_t3 true CatalogTable( - Table: `show_t3` - Created: - Last Access: - Type: VIEW - Schema: [StructField(e,IntegerType,true)] - Storage()) - showdb show_t1 false CatalogTable( Table: `showdb`.`show_t1` Created: @@ -146,6 +138,14 @@ showdb show_t2 false CatalogTable( Provider: parquet Storage(Location: sql/core/spark-warehouse/showdb.db/show_t2)) + show_t3 true CatalogTable( + Table: `show_t3` + Created: + Last Access: + Type: VIEW + Schema: [StructField(e,IntegerType,true)] + Storage()) + -- !query 13 SHOW TABLE EXTENDED diff --git a/sql/core/src/test/resources/sql-tests/results/show_columns.sql.out b/sql/core/src/test/resources/sql-tests/results/show_columns.sql.out index 05c3a083ee3b3..5d779df8d79c5 100644 --- a/sql/core/src/test/resources/sql-tests/results/show_columns.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/show_columns.sql.out @@ -55,8 +55,8 @@ SHOW COLUMNS IN showcolumn1 -- !query 6 schema struct -- !query 6 output -col 2 col1 +col 2 -- !query 7 @@ -64,8 +64,8 @@ SHOW COLUMNS IN showdb.showcolumn1 -- !query 7 schema struct -- !query 7 output -col 2 col1 +col 2 -- !query 8 @@ -73,8 +73,8 @@ SHOW COLUMNS IN showcolumn1 FROM showdb -- !query 8 schema struct -- !query 8 output -col 2 col1 +col 2 -- !query 9 @@ -82,10 +82,10 @@ SHOW COLUMNS IN showcolumn2 IN showdb -- !query 9 schema struct -- !query 9 output -month price qty year +month -- !query 10 @@ -102,8 +102,8 @@ SHOW COLUMNS IN showdb.showcolumn1 from SHOWDB -- !query 11 schema struct -- !query 11 output -col 2 col1 +col 2 -- !query 12 @@ -120,8 +120,8 @@ SHOW COLUMNS IN showcolumn3 -- !query 13 schema struct -- !query 13 output -col 4 col3 +col 4 -- !query 14 @@ -156,8 +156,8 @@ SHOW COLUMNS IN global_temp.showcolumn4 -- !query 17 schema struct -- !query 17 output -col 5 col1 +col 5 -- !query 18 @@ -165,8 +165,8 @@ SHOW COLUMNS IN showcolumn4 FROM global_temp -- !query 18 schema struct -- !query 18 output -col 5 col1 +col 5 -- !query 19 diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SQLQueryTestSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/SQLQueryTestSuite.scala index 4092862c430b1..44fb0ea40362b 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/SQLQueryTestSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/SQLQueryTestSuite.scala @@ -165,8 +165,8 @@ class SQLQueryTestSuite extends QueryTest with SharedSQLContext { s"-- Number of queries: ${outputs.size}\n\n\n" + outputs.zipWithIndex.map{case (qr, i) => qr.toString(i)}.mkString("\n\n\n") + "\n" } - val resultFile = new File(testCase.resultFile); - val parent = resultFile.getParentFile(); + val resultFile = new File(testCase.resultFile) + val parent = resultFile.getParentFile if (!parent.exists()) { assert(parent.mkdirs(), "Could not create directory: " + parent) } @@ -214,6 +214,7 @@ class SQLQueryTestSuite extends QueryTest with SharedSQLContext { // Returns true if the plan is supposed to be sorted. def isSorted(plan: LogicalPlan): Boolean = plan match { case _: Join | _: Aggregate | _: Generate | _: Sample | _: Distinct => false + case _: Command => true case PhysicalOperation(_, _, Sort(_, true, _)) => true case _ => plan.children.iterator.exists(isSorted) }