Skip to content

Commit 55cf0c3

Browse files
committed
fix python api
1 parent b0c252a commit 55cf0c3

File tree

1 file changed

+24
-3
lines changed

1 file changed

+24
-3
lines changed

python/pyspark/sql/catalog.py

Lines changed: 24 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,7 @@
1515
# limitations under the License.
1616
#
1717

18+
import warnings
1819
from collections import namedtuple
1920

2021
from pyspark import since
@@ -138,7 +139,27 @@ def listColumns(self, tableName, dbName=None):
138139

139140
@since(2.0)
140141
def createExternalTable(self, tableName, path=None, source=None, schema=None, **options):
141-
"""Creates an external table based on the dataset in a data source.
142+
"""Creates a table based on the dataset in a data source.
143+
144+
It returns the DataFrame associated with the external table.
145+
146+
The data source is specified by the ``source`` and a set of ``options``.
147+
If ``source`` is not specified, the default data source configured by
148+
``spark.sql.sources.default`` will be used.
149+
150+
Optionally, a schema can be provided as the schema of the returned :class:`DataFrame` and
151+
created external table.
152+
153+
:return: :class:`DataFrame`
154+
"""
155+
warnings.warn(
156+
"createExternalTable is deprecated since Spark 2.2, please use createTable instead.",
157+
DeprecationWarning)
158+
return self.createTable(tableName, path, source, schema, **options)
159+
160+
@since(2.2)
161+
def createTable(self, tableName, path=None, source=None, schema=None, **options):
162+
"""Creates a table based on the dataset in a data source.
142163
143164
It returns the DataFrame associated with the external table.
144165
@@ -157,12 +178,12 @@ def createExternalTable(self, tableName, path=None, source=None, schema=None, **
157178
source = self._sparkSession.conf.get(
158179
"spark.sql.sources.default", "org.apache.spark.sql.parquet")
159180
if schema is None:
160-
df = self._jcatalog.createExternalTable(tableName, source, options)
181+
df = self._jcatalog.createTable(tableName, source, options)
161182
else:
162183
if not isinstance(schema, StructType):
163184
raise TypeError("schema should be StructType")
164185
scala_datatype = self._jsparkSession.parseDataType(schema.json())
165-
df = self._jcatalog.createExternalTable(tableName, source, scala_datatype, options)
186+
df = self._jcatalog.createTable(tableName, source, scala_datatype, options)
166187
return DataFrame(df, self._sparkSession._wrapped)
167188

168189
@since(2.0)

0 commit comments

Comments
 (0)