Skip to content

Commit 3f780dc

Browse files
committed
resolve conflict
2 parents 1225496 + 123b4fb commit 3f780dc

File tree

746 files changed

+22939
-8048
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

746 files changed

+22939
-8048
lines changed

R/check-cran.sh

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -20,18 +20,18 @@
2020
set -o pipefail
2121
set -e
2222

23-
FWDIR="$(cd `dirname "${BASH_SOURCE[0]}"`; pwd)"
24-
pushd $FWDIR > /dev/null
23+
FWDIR="$(cd "`dirname "${BASH_SOURCE[0]}"`"; pwd)"
24+
pushd "$FWDIR" > /dev/null
2525

26-
. $FWDIR/find-r.sh
26+
. "$FWDIR/find-r.sh"
2727

2828
# Install the package (this is required for code in vignettes to run when building it later)
2929
# Build the latest docs, but not vignettes, which is built with the package next
30-
. $FWDIR/install-dev.sh
30+
. "$FWDIR/install-dev.sh"
3131

3232
# Build source package with vignettes
3333
SPARK_HOME="$(cd "${FWDIR}"/..; pwd)"
34-
. "${SPARK_HOME}"/bin/load-spark-env.sh
34+
. "${SPARK_HOME}/bin/load-spark-env.sh"
3535
if [ -f "${SPARK_HOME}/RELEASE" ]; then
3636
SPARK_JARS_DIR="${SPARK_HOME}/jars"
3737
else
@@ -40,16 +40,16 @@ fi
4040

4141
if [ -d "$SPARK_JARS_DIR" ]; then
4242
# Build a zip file containing the source package with vignettes
43-
SPARK_HOME="${SPARK_HOME}" "$R_SCRIPT_PATH/"R CMD build $FWDIR/pkg
43+
SPARK_HOME="${SPARK_HOME}" "$R_SCRIPT_PATH/R" CMD build "$FWDIR/pkg"
4444

4545
find pkg/vignettes/. -not -name '.' -not -name '*.Rmd' -not -name '*.md' -not -name '*.pdf' -not -name '*.html' -delete
4646
else
47-
echo "Error Spark JARs not found in $SPARK_HOME"
47+
echo "Error Spark JARs not found in '$SPARK_HOME'"
4848
exit 1
4949
fi
5050

5151
# Run check as-cran.
52-
VERSION=`grep Version $FWDIR/pkg/DESCRIPTION | awk '{print $NF}'`
52+
VERSION=`grep Version "$FWDIR/pkg/DESCRIPTION" | awk '{print $NF}'`
5353

5454
CRAN_CHECK_OPTIONS="--as-cran"
5555

@@ -67,10 +67,10 @@ echo "Running CRAN check with $CRAN_CHECK_OPTIONS options"
6767

6868
if [ -n "$NO_TESTS" ] && [ -n "$NO_MANUAL" ]
6969
then
70-
"$R_SCRIPT_PATH/"R CMD check $CRAN_CHECK_OPTIONS SparkR_"$VERSION".tar.gz
70+
"$R_SCRIPT_PATH/R" CMD check $CRAN_CHECK_OPTIONS "SparkR_$VERSION.tar.gz"
7171
else
7272
# This will run tests and/or build vignettes, and require SPARK_HOME
73-
SPARK_HOME="${SPARK_HOME}" "$R_SCRIPT_PATH/"R CMD check $CRAN_CHECK_OPTIONS SparkR_"$VERSION".tar.gz
73+
SPARK_HOME="${SPARK_HOME}" "$R_SCRIPT_PATH/R" CMD check $CRAN_CHECK_OPTIONS "SparkR_$VERSION.tar.gz"
7474
fi
7575

7676
popd > /dev/null

R/create-docs.sh

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -33,23 +33,23 @@ export FWDIR="$(cd "`dirname "${BASH_SOURCE[0]}"`"; pwd)"
3333
export SPARK_HOME="$(cd "`dirname "${BASH_SOURCE[0]}"`"/..; pwd)"
3434

3535
# Required for setting SPARK_SCALA_VERSION
36-
. "${SPARK_HOME}"/bin/load-spark-env.sh
36+
. "${SPARK_HOME}/bin/load-spark-env.sh"
3737

3838
echo "Using Scala $SPARK_SCALA_VERSION"
3939

40-
pushd $FWDIR > /dev/null
41-
. $FWDIR/find-r.sh
40+
pushd "$FWDIR" > /dev/null
41+
. "$FWDIR/find-r.sh"
4242

4343
# Install the package (this will also generate the Rd files)
44-
. $FWDIR/install-dev.sh
44+
. "$FWDIR/install-dev.sh"
4545

4646
# Now create HTML files
4747

4848
# knit_rd puts html in current working directory
4949
mkdir -p pkg/html
5050
pushd pkg/html
5151

52-
"$R_SCRIPT_PATH/"Rscript -e 'libDir <- "../../lib"; library(SparkR, lib.loc=libDir); library(knitr); knit_rd("SparkR", links = tools::findHTMLlinks(paste(libDir, "SparkR", sep="/")))'
52+
"$R_SCRIPT_PATH/Rscript" -e 'libDir <- "../../lib"; library(SparkR, lib.loc=libDir); library(knitr); knit_rd("SparkR", links = tools::findHTMLlinks(paste(libDir, "SparkR", sep="/")))'
5353

5454
popd
5555

R/create-rd.sh

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -29,9 +29,9 @@
2929
set -o pipefail
3030
set -e
3131

32-
FWDIR="$(cd `dirname "${BASH_SOURCE[0]}"`; pwd)"
33-
pushd $FWDIR > /dev/null
34-
. $FWDIR/find-r.sh
32+
FWDIR="$(cd "`dirname "${BASH_SOURCE[0]}"`"; pwd)"
33+
pushd "$FWDIR" > /dev/null
34+
. "$FWDIR/find-r.sh"
3535

3636
# Generate Rd files if devtools is installed
37-
"$R_SCRIPT_PATH/"Rscript -e ' if("devtools" %in% rownames(installed.packages())) { library(devtools); devtools::document(pkg="./pkg", roclets=c("rd")) }'
37+
"$R_SCRIPT_PATH/Rscript" -e ' if("devtools" %in% rownames(installed.packages())) { library(devtools); devtools::document(pkg="./pkg", roclets=c("rd")) }'

R/install-dev.sh

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -29,21 +29,21 @@
2929
set -o pipefail
3030
set -e
3131

32-
FWDIR="$(cd `dirname "${BASH_SOURCE[0]}"`; pwd)"
32+
FWDIR="$(cd "`dirname "${BASH_SOURCE[0]}"`"; pwd)"
3333
LIB_DIR="$FWDIR/lib"
3434

35-
mkdir -p $LIB_DIR
35+
mkdir -p "$LIB_DIR"
3636

37-
pushd $FWDIR > /dev/null
38-
. $FWDIR/find-r.sh
37+
pushd "$FWDIR" > /dev/null
38+
. "$FWDIR/find-r.sh"
3939

40-
. $FWDIR/create-rd.sh
40+
. "$FWDIR/create-rd.sh"
4141

4242
# Install SparkR to $LIB_DIR
43-
"$R_SCRIPT_PATH/"R CMD INSTALL --library=$LIB_DIR $FWDIR/pkg/
43+
"$R_SCRIPT_PATH/R" CMD INSTALL --library="$LIB_DIR" "$FWDIR/pkg/"
4444

4545
# Zip the SparkR package so that it can be distributed to worker nodes on YARN
46-
cd $LIB_DIR
46+
cd "$LIB_DIR"
4747
jar cfM "$LIB_DIR/sparkr.zip" SparkR
4848

4949
popd > /dev/null

R/install-source-package.sh

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -29,28 +29,28 @@
2929
set -o pipefail
3030
set -e
3131

32-
FWDIR="$(cd `dirname "${BASH_SOURCE[0]}"`; pwd)"
33-
pushd $FWDIR > /dev/null
34-
. $FWDIR/find-r.sh
32+
FWDIR="$(cd "`dirname "${BASH_SOURCE[0]}"`"; pwd)"
33+
pushd "$FWDIR" > /dev/null
34+
. "$FWDIR/find-r.sh"
3535

3636
if [ -z "$VERSION" ]; then
37-
VERSION=`grep Version $FWDIR/pkg/DESCRIPTION | awk '{print $NF}'`
37+
VERSION=`grep Version "$FWDIR/pkg/DESCRIPTION" | awk '{print $NF}'`
3838
fi
3939

40-
if [ ! -f "$FWDIR"/SparkR_"$VERSION".tar.gz ]; then
41-
echo -e "R source package file $FWDIR/SparkR_$VERSION.tar.gz is not found."
40+
if [ ! -f "$FWDIR/SparkR_$VERSION.tar.gz" ]; then
41+
echo -e "R source package file '$FWDIR/SparkR_$VERSION.tar.gz' is not found."
4242
echo -e "Please build R source package with check-cran.sh"
4343
exit -1;
4444
fi
4545

4646
echo "Removing lib path and installing from source package"
4747
LIB_DIR="$FWDIR/lib"
48-
rm -rf $LIB_DIR
49-
mkdir -p $LIB_DIR
50-
"$R_SCRIPT_PATH/"R CMD INSTALL SparkR_"$VERSION".tar.gz --library=$LIB_DIR
48+
rm -rf "$LIB_DIR"
49+
mkdir -p "$LIB_DIR"
50+
"$R_SCRIPT_PATH/R" CMD INSTALL "SparkR_$VERSION.tar.gz" --library="$LIB_DIR"
5151

5252
# Zip the SparkR package so that it can be distributed to worker nodes on YARN
53-
pushd $LIB_DIR > /dev/null
53+
pushd "$LIB_DIR" > /dev/null
5454
jar cfM "$LIB_DIR/sparkr.zip" SparkR
5555
popd > /dev/null
5656

R/pkg/DESCRIPTION

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -35,6 +35,7 @@ Collate:
3535
'WindowSpec.R'
3636
'backend.R'
3737
'broadcast.R'
38+
'catalog.R'
3839
'client.R'
3940
'context.R'
4041
'deserialize.R'
@@ -43,6 +44,7 @@ Collate:
4344
'jvm.R'
4445
'mllib_classification.R'
4546
'mllib_clustering.R'
47+
'mllib_fpm.R'
4648
'mllib_recommendation.R'
4749
'mllib_regression.R'
4850
'mllib_stat.R'
@@ -51,6 +53,7 @@ Collate:
5153
'serialize.R'
5254
'sparkR.R'
5355
'stats.R'
56+
'streaming.R'
5457
'types.R'
5558
'utils.R'
5659
'window.R'

R/pkg/NAMESPACE

Lines changed: 29 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -66,7 +66,10 @@ exportMethods("glm",
6666
"spark.randomForest",
6767
"spark.gbt",
6868
"spark.bisectingKmeans",
69-
"spark.svmLinear")
69+
"spark.svmLinear",
70+
"spark.fpGrowth",
71+
"spark.freqItemsets",
72+
"spark.associationRules")
7073

7174
# Job group lifecycle management methods
7275
export("setJobGroup",
@@ -82,6 +85,7 @@ exportMethods("arrange",
8285
"as.data.frame",
8386
"attach",
8487
"cache",
88+
"checkpoint",
8589
"coalesce",
8690
"collect",
8791
"colnames",
@@ -121,6 +125,7 @@ exportMethods("arrange",
121125
"insertInto",
122126
"intersect",
123127
"isLocal",
128+
"isStreaming",
124129
"join",
125130
"limit",
126131
"merge",
@@ -169,6 +174,7 @@ exportMethods("arrange",
169174
"write.json",
170175
"write.orc",
171176
"write.parquet",
177+
"write.stream",
172178
"write.text",
173179
"write.ml")
174180

@@ -355,17 +361,29 @@ export("as.DataFrame",
355361
"clearCache",
356362
"createDataFrame",
357363
"createExternalTable",
364+
"createTable",
365+
"currentDatabase",
358366
"dropTempTable",
359367
"dropTempView",
360368
"jsonFile",
369+
"listColumns",
370+
"listDatabases",
371+
"listFunctions",
372+
"listTables",
361373
"loadDF",
362374
"parquetFile",
363375
"read.df",
364376
"read.jdbc",
365377
"read.json",
366378
"read.orc",
367379
"read.parquet",
380+
"read.stream",
368381
"read.text",
382+
"recoverPartitions",
383+
"refreshByPath",
384+
"refreshTable",
385+
"setCheckpointDir",
386+
"setCurrentDatabase",
369387
"spark.lapply",
370388
"spark.addFile",
371389
"spark.getSparkFilesRootDirectory",
@@ -402,6 +420,16 @@ export("partitionBy",
402420
export("windowPartitionBy",
403421
"windowOrderBy")
404422

423+
exportClasses("StreamingQuery")
424+
425+
export("awaitTermination",
426+
"isActive",
427+
"lastProgress",
428+
"queryName",
429+
"status",
430+
"stopQuery")
431+
432+
405433
S3method(print, jobj)
406434
S3method(print, structField)
407435
S3method(print, structType)

0 commit comments

Comments
 (0)