File tree Expand file tree Collapse file tree 4 files changed +13
-13
lines changed
main/scala/org/apache/spark/sql/sources
test/scala/org/apache/spark/sql/sources Expand file tree Collapse file tree 4 files changed +13
-13
lines changed Original file line number Diff line number Diff line change @@ -209,12 +209,15 @@ private[sql] case class CreateTableUsing(
209209 sys.error(s " Failed to load class for data source: $provider" )
210210 }
211211 }
212- val dataSource =
213- clazz.newInstance().asInstanceOf [org.apache.spark.sql.sources.SchemaRelationProvider ]
214212 val relation = if (tableCols.isEmpty) {
215- dataSource.createRelation(
216- sqlContext, new CaseInsensitiveMap (options))
213+ val dataSource =
214+ clazz.newInstance().asInstanceOf [org.apache.spark.sql.sources.RelationProvider ]
215+
216+ dataSource.createRelation(sqlContext, new CaseInsensitiveMap (options))
217217 } else {
218+ val dataSource =
219+ clazz.newInstance().asInstanceOf [org.apache.spark.sql.sources.SchemaRelationProvider ]
220+
218221 dataSource.createRelation(
219222 sqlContext, new CaseInsensitiveMap (options), Some (StructType (tableCols)))
220223 }
Original file line number Diff line number Diff line change @@ -21,11 +21,10 @@ import scala.language.existentials
2121
2222import org .apache .spark .sql ._
2323
24- class FilteredScanSource extends SchemaRelationProvider {
24+ class FilteredScanSource extends RelationProvider {
2525 override def createRelation (
2626 sqlContext : SQLContext ,
27- parameters : Map [String , String ],
28- schema : Option [StructType ] = None ): BaseRelation = {
27+ parameters : Map [String , String ]): BaseRelation = {
2928 SimpleFilteredScan (parameters(" from" ).toInt, parameters(" to" ).toInt)(sqlContext)
3029 }
3130}
Original file line number Diff line number Diff line change @@ -19,11 +19,10 @@ package org.apache.spark.sql.sources
1919
2020import org .apache .spark .sql ._
2121
22- class PrunedScanSource extends SchemaRelationProvider {
22+ class PrunedScanSource extends RelationProvider {
2323 override def createRelation (
2424 sqlContext : SQLContext ,
25- parameters : Map [String , String ],
26- schema : Option [StructType ] = None ): BaseRelation = {
25+ parameters : Map [String , String ]): BaseRelation = {
2726 SimplePrunedScan (parameters(" from" ).toInt, parameters(" to" ).toInt)(sqlContext)
2827 }
2928}
Original file line number Diff line number Diff line change @@ -21,11 +21,10 @@ import org.apache.spark.sql._
2121
2222class DefaultSource extends SimpleScanSource
2323
24- class SimpleScanSource extends SchemaRelationProvider {
24+ class SimpleScanSource extends RelationProvider {
2525 override def createRelation (
2626 sqlContext : SQLContext ,
27- parameters : Map [String , String ],
28- schema : Option [StructType ] = None ): BaseRelation = {
27+ parameters : Map [String , String ]): BaseRelation = {
2928 SimpleScan (parameters(" from" ).toInt, parameters(" TO" ).toInt)(sqlContext)
3029 }
3130}
You can’t perform that action at this time.
0 commit comments