Skip to content

Commit c025a46

Browse files
yhuaimarmbrus
authored andcommitted
[SQL] Move SaveMode to SQL package.
Author: Yin Huai <[email protected]> Closes #4542 from yhuai/moveSaveMode and squashes the following commits: 65a4425 [Yin Huai] Move SaveMode to sql package.
1 parent ada993e commit c025a46

File tree

12 files changed

+9
-14
lines changed

12 files changed

+9
-14
lines changed

python/pyspark/sql/dataframe.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -149,7 +149,7 @@ def insertInto(self, tableName, overwrite=False):
149149
def _java_save_mode(self, mode):
150150
"""Returns the Java save mode based on the Python save mode represented by a string.
151151
"""
152-
jSaveMode = self._sc._jvm.org.apache.spark.sql.sources.SaveMode
152+
jSaveMode = self._sc._jvm.org.apache.spark.sql.SaveMode
153153
jmode = jSaveMode.ErrorIfExists
154154
mode = mode.lower()
155155
if mode == "append":

sql/core/src/main/java/org/apache/spark/sql/sources/SaveMode.java renamed to sql/core/src/main/java/org/apache/spark/sql/SaveMode.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@
1414
* See the License for the specific language governing permissions and
1515
* limitations under the License.
1616
*/
17-
package org.apache.spark.sql.sources;
17+
package org.apache.spark.sql;
1818

1919
/**
2020
* SaveMode is used to specify the expected behavior of saving a DataFrame to a data source.

sql/core/src/main/scala/org/apache/spark/sql/DataFrame.scala

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,6 @@ import org.apache.spark.api.java.JavaRDD
2727
import org.apache.spark.rdd.RDD
2828
import org.apache.spark.storage.StorageLevel
2929
import org.apache.spark.sql.catalyst.plans.logical._
30-
import org.apache.spark.sql.sources.SaveMode
3130
import org.apache.spark.sql.types.StructType
3231
import org.apache.spark.util.Utils
3332

sql/core/src/main/scala/org/apache/spark/sql/IncomputableColumn.scala

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,6 @@ import org.apache.spark.sql.catalyst.analysis.{UnresolvedAttribute, UnresolvedSt
2626
import org.apache.spark.sql.catalyst.expressions._
2727
import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
2828
import org.apache.spark.storage.StorageLevel
29-
import org.apache.spark.sql.sources.SaveMode
3029
import org.apache.spark.sql.types.StructType
3130

3231
private[sql] class IncomputableColumn(protected[sql] val expr: Expression) extends Column {

sql/core/src/main/scala/org/apache/spark/sql/json/JSONRelation.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ import java.io.IOException
2121

2222
import org.apache.hadoop.fs.Path
2323

24-
import org.apache.spark.sql.{DataFrame, SQLContext}
24+
import org.apache.spark.sql.{SaveMode, DataFrame, SQLContext}
2525
import org.apache.spark.sql.sources._
2626
import org.apache.spark.sql.types.StructType
2727

sql/core/src/main/scala/org/apache/spark/sql/parquet/newParquet.scala

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -44,9 +44,8 @@ import org.apache.spark.rdd.{NewHadoopPartition, NewHadoopRDD, RDD}
4444
import org.apache.spark.sql.catalyst.expressions._
4545
import org.apache.spark.sql.parquet.ParquetTypesConverter._
4646
import org.apache.spark.sql.sources._
47+
import org.apache.spark.sql.{DataFrame, Row, SaveMode, SQLConf, SQLContext}
4748
import org.apache.spark.sql.types.{IntegerType, StructField, StructType, _}
48-
import org.apache.spark.sql.types.StructType._
49-
import org.apache.spark.sql.{DataFrame, Row, SQLConf, SQLContext}
5049
import org.apache.spark.{Partition => SparkPartition, TaskContext, SerializableWritable, Logging, SparkException}
5150

5251

sql/core/src/main/scala/org/apache/spark/sql/sources/ddl.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@ package org.apache.spark.sql.sources
2020
import scala.language.implicitConversions
2121

2222
import org.apache.spark.Logging
23-
import org.apache.spark.sql.{DataFrame, SQLContext}
23+
import org.apache.spark.sql.{SaveMode, DataFrame, SQLContext}
2424
import org.apache.spark.sql.catalyst.plans.logical._
2525
import org.apache.spark.sql.catalyst.AbstractSparkSQLParser
2626
import org.apache.spark.sql.catalyst.analysis.UnresolvedRelation

sql/core/src/main/scala/org/apache/spark/sql/sources/interfaces.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@ package org.apache.spark.sql.sources
1818

1919
import org.apache.spark.annotation.{Experimental, DeveloperApi}
2020
import org.apache.spark.rdd.RDD
21-
import org.apache.spark.sql.{DataFrame, Row, SQLContext}
21+
import org.apache.spark.sql.{SaveMode, DataFrame, Row, SQLContext}
2222
import org.apache.spark.sql.catalyst.expressions.{Expression, Attribute}
2323
import org.apache.spark.sql.types.StructType
2424

sql/core/src/test/scala/org/apache/spark/sql/sources/SaveLoadSuite.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@ import java.io.File
2222
import org.scalatest.BeforeAndAfterAll
2323

2424
import org.apache.spark.sql.catalyst.util
25-
import org.apache.spark.sql.{SQLConf, DataFrame}
25+
import org.apache.spark.sql.{SaveMode, SQLConf, DataFrame}
2626
import org.apache.spark.sql.types._
2727
import org.apache.spark.util.Utils
2828

sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/commands.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ import org.apache.spark.annotation.DeveloperApi
2121
import org.apache.spark.sql.catalyst.analysis.EliminateSubQueries
2222
import org.apache.spark.sql.catalyst.util._
2323
import org.apache.spark.sql.sources._
24-
import org.apache.spark.sql.{DataFrame, SQLContext}
24+
import org.apache.spark.sql.{SaveMode, DataFrame, SQLContext}
2525
import org.apache.spark.sql.catalyst.expressions.Row
2626
import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
2727
import org.apache.spark.sql.execution.RunnableCommand

0 commit comments

Comments
 (0)