Skip to content

Commit f421a1c

Browse files
committed
doc fix
1 parent 9a49f9a commit f421a1c

File tree

3 files changed

+20
-9
lines changed

3 files changed

+20
-9
lines changed

R/pkg/R/DataFrame.R

Lines changed: 13 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -936,7 +936,7 @@ setMethod("unique",
936936

937937
#' Sample
938938
#'
939-
#' Return a sampled subset of this SparkDataFrame using a random seed.
939+
#' Return a sampled subset of this SparkDataFrame using a random seed.
940940
#' Note: this is not guaranteed to provide exactly the fraction specified
941941
#' of the total count of of the given SparkDataFrame.
942942
#'
@@ -1825,6 +1825,8 @@ setMethod("[", signature(x = "SparkDataFrame"),
18251825
#' Return subsets of SparkDataFrame according to given conditions
18261826
#' @param x a SparkDataFrame.
18271827
#' @param i,subset (Optional) a logical expression to filter on rows.
1828+
#' For extract operator [[ and replacement operator [[<-, the indexing parameter for
1829+
#' a single Column.
18281830
#' @param j,select expression for the single Column or a list of columns to select from the SparkDataFrame.
18291831
#' @param drop if TRUE, a Column will be returned if the resulting dataset has only one column.
18301832
#' Otherwise, a SparkDataFrame will always be returned.
@@ -1835,6 +1837,7 @@ setMethod("[", signature(x = "SparkDataFrame"),
18351837
#' @export
18361838
#' @family SparkDataFrame functions
18371839
#' @aliases subset,SparkDataFrame-method
1840+
#' @seealso \link{withColumn}
18381841
#' @rdname subset
18391842
#' @name subset
18401843
#' @family subsetting functions
@@ -1852,6 +1855,10 @@ setMethod("[", signature(x = "SparkDataFrame"),
18521855
#' subset(df, df$age %in% c(19, 30), 1:2)
18531856
#' subset(df, df$age %in% c(19), select = c(1,2))
18541857
#' subset(df, select = c(1,2))
1858+
#' # Columns can be selected and set
1859+
#' df[["age"]] <- 23
1860+
#' df[[1]] <- df$age
1861+
#' df[[2]] <- NULL # drop column
18551862
#' }
18561863
#' @note subset since 1.5.0
18571864
setMethod("subset", signature(x = "SparkDataFrame"),
@@ -1976,7 +1983,7 @@ setMethod("selectExpr",
19761983
#' @aliases withColumn,SparkDataFrame,character-method
19771984
#' @rdname withColumn
19781985
#' @name withColumn
1979-
#' @seealso \link{rename} \link{mutate}
1986+
#' @seealso \link{rename} \link{mutate} \link{subset}
19801987
#' @export
19811988
#' @examples
19821989
#'\dontrun{
@@ -1987,6 +1994,10 @@ setMethod("selectExpr",
19871994
#' # Replace an existing column
19881995
#' newDF2 <- withColumn(newDF, "newCol", newDF$col1)
19891996
#' newDF3 <- withColumn(newDF, "newCol", 42)
1997+
#' # Use extract operator to set an existing or new column
1998+
#' df[["age"]] <- 23
1999+
#' df[[2]] <- df$col1
2000+
#' df[[2]] <- NULL # drop column
19902001
#' }
19912002
#' @note withColumn since 1.4.0
19922003
setMethod("withColumn",

R/pkg/R/mllib.R

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -173,7 +173,7 @@ predict_internal <- function(object, newData) {
173173

174174
#' Generalized Linear Models
175175
#'
176-
#' Fits generalized linear model against a Spark DataFrame.
176+
#' Fits generalized linear model against a SparkDataFrame.
177177
#' Users can call \code{summary} to print a summary of the fitted model, \code{predict} to make
178178
#' predictions on new data, and \code{write.ml}/\code{read.ml} to save/load fitted models.
179179
#'
@@ -499,7 +499,7 @@ setMethod("write.ml", signature(object = "LDAModel", path = "character"),
499499

500500
#' Isotonic Regression Model
501501
#'
502-
#' Fits an Isotonic Regression model against a Spark DataFrame, similarly to R's isoreg().
502+
#' Fits an Isotonic Regression model against a SparkDataFrame, similarly to R's isoreg().
503503
#' Users can print, make predictions on the produced model and save the model to the input path.
504504
#'
505505
#' @param data SparkDataFrame for training.
@@ -588,7 +588,7 @@ setMethod("summary", signature(object = "IsotonicRegressionModel"),
588588

589589
#' K-Means Clustering Model
590590
#'
591-
#' Fits a k-means clustering model against a Spark DataFrame, similarly to R's kmeans().
591+
#' Fits a k-means clustering model against a SparkDataFrame, similarly to R's kmeans().
592592
#' Users can call \code{summary} to print a summary of the fitted model, \code{predict} to make
593593
#' predictions on new data, and \code{write.ml}/\code{read.ml} to save/load fitted models.
594594
#'
@@ -712,7 +712,7 @@ setMethod("predict", signature(object = "KMeansModel"),
712712

713713
#' Logistic Regression Model
714714
#'
715-
#' Fits an logistic regression model against a Spark DataFrame. It supports "binomial": Binary logistic regression
715+
#' Fits an logistic regression model against a SparkDataFrame. It supports "binomial": Binary logistic regression
716716
#' with pivoting; "multinomial": Multinomial logistic (softmax) regression without pivoting, similar to glmnet.
717717
#' Users can print, make predictions on the produced model and save the model to the input path.
718718
#'
@@ -1321,7 +1321,7 @@ setMethod("predict", signature(object = "AFTSurvivalRegressionModel"),
13211321

13221322
#' Multivariate Gaussian Mixture Model (GMM)
13231323
#'
1324-
#' Fits multivariate gaussian mixture model against a Spark DataFrame, similarly to R's
1324+
#' Fits multivariate gaussian mixture model against a SparkDataFrame, similarly to R's
13251325
#' mvnormalmixEM(). Users can call \code{summary} to print a summary of the fitted model,
13261326
#' \code{predict} to make predictions on new data, and \code{write.ml}/\code{read.ml}
13271327
#' to save/load fitted models.

R/pkg/vignettes/sparkr-vignettes.Rmd

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -923,9 +923,9 @@ The main method calls of actual computation happen in the Spark JVM of the drive
923923

924924
Two kinds of RPCs are supported in the SparkR JVM backend: method invocation and creating new objects. Method invocation can be done in two ways.
925925

926-
* `sparkR.invokeJMethod` takes a reference to an existing Java object and a list of arguments to be passed on to the method.
926+
* `sparkR.callJMethod` takes a reference to an existing Java object and a list of arguments to be passed on to the method.
927927

928-
* `sparkR.invokeJStatic` takes a class name for static method and a list of arguments to be passed on to the method.
928+
* `sparkR.callJStatic` takes a class name for static method and a list of arguments to be passed on to the method.
929929

930930
The arguments are serialized using our custom wire format which is then deserialized on the JVM side. We then use Java reflection to invoke the appropriate method.
931931

0 commit comments

Comments
 (0)