@@ -20,14 +20,16 @@ package org.apache.spark.sql.hive.client
2020import java .io .File
2121
2222import org .apache .hadoop .util .VersionInfo
23-
23+ import org . apache . spark . sql . Row
2424import org .apache .spark .sql .hive .HiveContext
2525import org .apache .spark .{Logging , SparkFunSuite }
26- import org .apache .spark .sql .catalyst .expressions .{NamedExpression , Literal , AttributeReference , EqualTo }
26+ import org .apache .spark .sql .catalyst .expressions .{AttributeReference , EqualTo , Literal , NamedExpression }
2727import org .apache .spark .sql .catalyst .util .quietly
2828import org .apache .spark .sql .types .IntegerType
2929import org .apache .spark .tags .ExtendedHiveTest
3030import org .apache .spark .util .Utils
31+ import org .apache .spark .sql .test .SQLTestUtils
32+ import org .apache .spark .sql .hive .test .TestHiveSingleton
3133
3234/**
3335 * A simple set of tests that call the methods of a hive ClientInterface, loading different version
@@ -36,7 +38,7 @@ import org.apache.spark.util.Utils
3638 * is not fully tested.
3739 */
3840@ ExtendedHiveTest
39- class VersionsSuite extends SparkFunSuite with Logging {
41+ class VersionsSuite extends SparkFunSuite with SQLTestUtils with TestHiveSingleton with Logging {
4042
4143 // In order to speed up test execution during development or in Jenkins, you can specify the path
4244 // of an existing Ivy cache:
@@ -216,5 +218,37 @@ class VersionsSuite extends SparkFunSuite with Logging {
216218 " as 'COMPACT' WITH DEFERRED REBUILD" )
217219 client.reset()
218220 }
221+
222+ test(s " $version: CREATE TABLE AS SELECT " ) {
223+ withTable(" tbl" ) {
224+ sqlContext.sql(" CREATE TABLE tbl AS SELECT 1 AS a" )
225+ assert(sqlContext.table(" tbl" ).collect().toSeq == Seq (Row (1 )))
226+ }
227+ }
228+
229+ test(s " $version: Delete the temporary staging directory and files after each insert " ) {
230+ withTempDir { tmpDir =>
231+ withTable(" tab" , " tbl" ) {
232+ sqlContext.sql(
233+ s """
234+ |CREATE TABLE tab(c1 string)
235+ |location ' ${tmpDir.toURI.toString}'
236+ """ .stripMargin)
237+
238+ sqlContext.sql(" CREATE TABLE tbl AS SELECT 1 AS a" )
239+
240+ sqlContext.sql(s " INSERT OVERWRITE TABLE tab SELECT * from tbl " )
241+
242+ def listFiles (path : File ): List [String ] = {
243+ val dir = path.listFiles()
244+ val folders = dir.filter(_.isDirectory).toList
245+ val filePaths = dir.map(_.getName).toList
246+ folders.flatMap(listFiles) ++: filePaths
247+ }
248+ val expectedFiles = " .part-00000.crc" :: " part-00000" :: Nil
249+ assert(listFiles(tmpDir).sorted == expectedFiles)
250+ }
251+ }
252+ }
219253 }
220254}
0 commit comments