Skip to content

Commit 56de571

Browse files
committed
fix style
1 parent d0c5bb8 commit 56de571

File tree

2 files changed

+10
-10
lines changed

2 files changed

+10
-10
lines changed

examples/src/main/python/ml/simple_text_classification_pipeline.py

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -29,28 +29,28 @@
2929
sc.parallelize([(0L, "a b c d e spark", 1.0),
3030
(1L, "b d", 0.0),
3131
(2L, "spark f g h", 1.0),
32-
(3L, "hadoop mapreduce", 0.0)]) \
32+
(3L, "hadoop mapreduce", 0.0)])
3333
.map(lambda x: Row(id=x[0], text=x[1], label=x[2])))
3434

3535
tokenizer = Tokenizer() \
36-
.setInputCol("text") \
37-
.setOutputCol("words")
36+
.setInputCol("text") \
37+
.setOutputCol("words")
3838
hashingTF = HashingTF() \
39-
.setInputCol(tokenizer.getOutputCol()) \
40-
.setOutputCol("features")
39+
.setInputCol(tokenizer.getOutputCol()) \
40+
.setOutputCol("features")
4141
lr = LogisticRegression() \
42-
.setMaxIter(10) \
43-
.setRegParam(0.01)
42+
.setMaxIter(10) \
43+
.setRegParam(0.01)
4444
pipeline = Pipeline() \
45-
.setStages([tokenizer, hashingTF, lr])
45+
.setStages([tokenizer, hashingTF, lr])
4646

4747
model = pipeline.fit(training)
4848

4949
test = sqlCtx.inferSchema(
5050
sc.parallelize([(4L, "spark i j k"),
5151
(5L, "l m n"),
5252
(6L, "mapreduce spark"),
53-
(7L, "apache hadoop")]) \
53+
(7L, "apache hadoop")])
5454
.map(lambda x: Row(id=x[0], text=x[1])))
5555

5656
for row in model.transform(test).collect():

python/pyspark/ml/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@
1818
from abc import ABCMeta, abstractmethod, abstractproperty
1919

2020
from pyspark import SparkContext
21-
from pyspark.sql import SchemaRDD, inherit_doc # TODO: move inherit_doc to Spark Core
21+
from pyspark.sql import SchemaRDD, inherit_doc # TODO: move inherit_doc to Spark Core
2222
from pyspark.ml.param import Param, Params
2323
from pyspark.ml.util import Identifiable
2424

0 commit comments

Comments
 (0)