Skip to content
Closed
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 8 additions & 2 deletions python/pyspark/serializers.py
Original file line number Diff line number Diff line change
Expand Up @@ -216,9 +216,10 @@ def _create_batch(series, timezone):
:param timezone: A timezone to respect when handling timestamp values
:return: Arrow RecordBatch
"""

from pyspark.sql.types import _check_series_convert_timestamps_internal
import decimal
from distutils.version import LooseVersion
import pyarrow as pa
from pyspark.sql.types import _check_series_convert_timestamps_internal
# Make input conform to [(series1, type1), (series2, type2), ...]
if not isinstance(series, (list, tuple)) or \
(len(series) == 2 and isinstance(series[1], pa.DataType)):
Expand All @@ -236,6 +237,11 @@ def create_array(s, t):
# TODO: need decode before converting to Arrow in Python 2
return pa.Array.from_pandas(s.apply(
lambda v: v.decode("utf-8") if isinstance(v, str) else v), mask=mask, type=t)
elif t is not None and pa.types.is_decimal(t) and \
LooseVersion("0.9.0") <= LooseVersion(pa.__version__) < LooseVersion("0.10.0"):
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

consider a single place to check pyarrow versions?

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yea, but not sure if I am aware of other issues specific to PyArrow versions. Will make a single place if I happen to fix things specific to PyArrow versions for sure.

# TODO: see ARROW-2432. Remove when the minimum PyArrow version becomes 0.10.0.
return pa.Array.from_pandas(s.apply(
lambda v: decimal.Decimal('NaN') if v is None else v), mask=mask, type=t)
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

add test?

Copy link
Member Author

@HyukjinKwon HyukjinKwon Jul 31, 2018

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

existing test should test this test_vectorized_udf_null_decimal. This is failed without the current change when PyArrow 0.9.0 is used.

return pa.Array.from_pandas(s, mask=mask, type=t)

arrs = [create_array(s, t) for s, t in series]
Expand Down