@@ -58,14 +58,23 @@ setMethod("initialize", "SparkDataFrame", function(.Object, sdf, isCached) {
5858# ' Set options/mode and then return the write object
5959# ' @noRd
6060setWriteOptions  <-  function (write , path  =  NULL , mode  =  " error"  , ... ) {
61-     options  <-  varargsToStrEnv(... )
62-     if  (! is.null(path )) {
63-       options [[" path"  ]] <-  path 
64-     }
65-     jmode  <-  convertToJSaveMode(mode )
66-     write  <-  callJMethod(write , " mode"  , jmode )
67-     write  <-  callJMethod(write , " options"  , options )
68-     write 
61+   options  <-  varargsToStrEnv(... )
62+   if  (! is.null(path )) {
63+     options [[" path"  ]] <-  path 
64+   }
65+   write  <-  setWriteMode(write , mode )
66+   write  <-  callJMethod(write , " options"  , options )
67+   write 
68+ }
69+ 
70+ # ' Set mode and then return the write object
71+ # ' @noRd
72+ setWriteMode  <-  function (write , mode ) {
73+   if  (! is.character(mode )) {
74+     stop(" mode should be character or omitted. It is 'error' by default."  )
75+   }
76+   write  <-  handledCallJMethod(write , " mode"  , mode )
77+   write 
6978}
7079
7180# ' @export
@@ -556,9 +565,8 @@ setMethod("registerTempTable",
556565setMethod ("insertInto ",
557566          signature(x  =  " SparkDataFrame"  , tableName  =  " character"  ),
558567          function (x , tableName , overwrite  =  FALSE ) {
559-             jmode  <-  convertToJSaveMode(ifelse(overwrite , " overwrite"  , " append"  ))
560568            write  <-  callJMethod(x @ sdf , " write"  )
561-             write  <-  callJMethod (write , " mode "  , jmode )
569+             write  <-  setWriteMode (write , ifelse( overwrite ,  " overwrite "  , " append " ) )
562570            invisible (callJMethod(write , " insertInto"  , tableName ))
563571          })
564572
@@ -810,7 +818,8 @@ setMethod("toJSON",
810818# '
811819# ' @param x A SparkDataFrame
812820# ' @param path The directory where the file is saved
813- # ' @param mode one of 'append', 'overwrite', 'error', 'ignore' save mode (it is 'error' by default)
821+ # ' @param mode one of 'append', 'overwrite', 'error', 'errorifexists', 'ignore'
822+ # '             save mode (it is 'error' by default)
814823# ' @param ... additional argument(s) passed to the method.
815824# '
816825# ' @family SparkDataFrame functions
@@ -841,7 +850,8 @@ setMethod("write.json",
841850# '
842851# ' @param x A SparkDataFrame
843852# ' @param path The directory where the file is saved
844- # ' @param mode one of 'append', 'overwrite', 'error', 'ignore' save mode (it is 'error' by default)
853+ # ' @param mode one of 'append', 'overwrite', 'error', 'errorifexists', 'ignore'
854+ # '             save mode (it is 'error' by default)
845855# ' @param ... additional argument(s) passed to the method.
846856# '
847857# ' @family SparkDataFrame functions
@@ -872,7 +882,8 @@ setMethod("write.orc",
872882# '
873883# ' @param x A SparkDataFrame
874884# ' @param path The directory where the file is saved
875- # ' @param mode one of 'append', 'overwrite', 'error', 'ignore' save mode (it is 'error' by default)
885+ # ' @param mode one of 'append', 'overwrite', 'error', 'errorifexists', 'ignore'
886+ # '             save mode (it is 'error' by default)
876887# ' @param ... additional argument(s) passed to the method.
877888# '
878889# ' @family SparkDataFrame functions
@@ -917,7 +928,8 @@ setMethod("saveAsParquetFile",
917928# '
918929# ' @param x A SparkDataFrame
919930# ' @param path The directory where the file is saved
920- # ' @param mode one of 'append', 'overwrite', 'error', 'ignore' save mode (it is 'error' by default)
931+ # ' @param mode one of 'append', 'overwrite', 'error', 'errorifexists', 'ignore'
932+ # '             save mode (it is 'error' by default)
921933# ' @param ... additional argument(s) passed to the method.
922934# '
923935# ' @family SparkDataFrame functions
@@ -1191,6 +1203,9 @@ setMethod("collect",
11911203                    vec  <-  do.call(c , col )
11921204                    stopifnot(class(vec ) !=  " list"  )
11931205                    class(vec ) <-  PRIMITIVE_TYPES [[colType ]]
1206+                     if  (is.character(vec ) &&  stringsAsFactors ) {
1207+                       vec  <-  as.factor(vec )
1208+                     }
11941209                    df [[colIndex ]] <-  vec 
11951210                  } else  {
11961211                    df [[colIndex ]] <-  col 
@@ -2868,18 +2883,19 @@ setMethod("except",
28682883# ' Additionally, mode is used to specify the behavior of the save operation when data already
28692884# ' exists in the data source. There are four modes:
28702885# ' \itemize{
2871- # '   \item append: Contents of this SparkDataFrame are expected to be appended to existing data.
2872- # '   \item overwrite: Existing data is expected to be overwritten by the contents of this
2886+ # '   \item ' append' : Contents of this SparkDataFrame are expected to be appended to existing data.
2887+ # '   \item ' overwrite' : Existing data is expected to be overwritten by the contents of this
28732888# '         SparkDataFrame.
2874- # '   \item error: An exception is expected to be thrown.
2875- # '   \item ignore: The save operation is expected to not save the contents of the SparkDataFrame
2889+ # '   \item ' error' or 'errorifexists' : An exception is expected to be thrown.
2890+ # '   \item ' ignore' : The save operation is expected to not save the contents of the SparkDataFrame
28762891# '         and to not change the existing data.
28772892# ' }
28782893# '
28792894# ' @param df a SparkDataFrame.
28802895# ' @param path a name for the table.
28812896# ' @param source a name for external data source.
2882- # ' @param mode one of 'append', 'overwrite', 'error', 'ignore' save mode (it is 'error' by default)
2897+ # ' @param mode one of 'append', 'overwrite', 'error', 'errorifexists', 'ignore'
2898+ # '             save mode (it is 'error' by default)
28832899# ' @param ... additional argument(s) passed to the method.
28842900# '
28852901# ' @family SparkDataFrame functions
@@ -2937,17 +2953,18 @@ setMethod("saveDF",
29372953# '
29382954# ' Additionally, mode is used to specify the behavior of the save operation when
29392955# ' data already exists in the data source. There are four modes: \cr
2940- # '  append: Contents of this SparkDataFrame are expected to be appended to existing data. \cr
2941- # '  overwrite: Existing data is expected to be overwritten by the contents of this
2956+ # '  ' append' : Contents of this SparkDataFrame are expected to be appended to existing data. \cr
2957+ # '  ' overwrite' : Existing data is expected to be overwritten by the contents of this
29422958# '     SparkDataFrame. \cr
2943- # '  error: An exception is expected to be thrown. \cr
2944- # '  ignore: The save operation is expected to not save the contents of the SparkDataFrame
2959+ # '  ' error' or 'errorifexists' : An exception is expected to be thrown. \cr
2960+ # '  ' ignore' : The save operation is expected to not save the contents of the SparkDataFrame
29452961# '     and to not change the existing data. \cr
29462962# '
29472963# ' @param df a SparkDataFrame.
29482964# ' @param tableName a name for the table.
29492965# ' @param source a name for external data source.
2950- # ' @param mode one of 'append', 'overwrite', 'error', 'ignore' save mode (it is 'error' by default).
2966+ # ' @param mode one of 'append', 'overwrite', 'error', 'errorifexists', 'ignore'
2967+ # '             save mode (it is 'error' by default)
29512968# ' @param ... additional option(s) passed to the method.
29522969# '
29532970# ' @family SparkDataFrame functions
@@ -2969,12 +2986,11 @@ setMethod("saveAsTable",
29692986            if  (is.null(source )) {
29702987              source  <-  getDefaultSqlSource()
29712988            }
2972-             jmode  <-  convertToJSaveMode(mode )
29732989            options  <-  varargsToStrEnv(... )
29742990
29752991            write  <-  callJMethod(df @ sdf , " write"  )
29762992            write  <-  callJMethod(write , " format"  , source )
2977-             write  <-  callJMethod (write , " mode" ,  jmode )
2993+             write  <-  setWriteMode (write , mode )
29782994            write  <-  callJMethod(write , " options"  , options )
29792995            invisible (callJMethod(write , " saveAsTable"  , tableName ))
29802996          })
@@ -3233,7 +3249,7 @@ setMethod("as.data.frame",
32333249# '
32343250# ' @family SparkDataFrame functions
32353251# ' @rdname attach
3236- # ' @aliases attach,SparkDataFrame-method
3252+ # ' @aliases attach attach ,SparkDataFrame-method
32373253# ' @param what (SparkDataFrame) The SparkDataFrame to attach
32383254# ' @param pos (integer) Specify position in search() where to attach.
32393255# ' @param name (character) Name to use for the attached SparkDataFrame. Names
@@ -3249,9 +3265,12 @@ setMethod("as.data.frame",
32493265# ' @note attach since 1.6.0
32503266setMethod ("attach ",
32513267          signature(what  =  " SparkDataFrame"  ),
3252-           function (what , pos  =  2 , name  =  deparse(substitute(what )), warn.conflicts  =  TRUE ) {
3253-             newEnv  <-  assignNewEnv(what )
3254-             attach(newEnv , pos  =  pos , name  =  name , warn.conflicts  =  warn.conflicts )
3268+           function (what , pos  =  2L , name  =  deparse(substitute(what ), backtick  =  FALSE ),
3269+                    warn.conflicts  =  TRUE ) {
3270+             args  <-  as.list(environment()) #  capture all parameters - this must be the first line
3271+             newEnv  <-  assignNewEnv(args $ what )
3272+             args $ what  <-  newEnv 
3273+             do.call(attach , args )
32553274          })
32563275
32573276# ' Evaluate a R expression in an environment constructed from a SparkDataFrame
@@ -3538,18 +3557,19 @@ setMethod("histogram",
35383557# ' Also, mode is used to specify the behavior of the save operation when
35393558# ' data already exists in the data source. There are four modes:
35403559# ' \itemize{
3541- # '   \item append: Contents of this SparkDataFrame are expected to be appended to existing data.
3542- # '   \item overwrite: Existing data is expected to be overwritten by the contents of this
3560+ # '   \item ' append' : Contents of this SparkDataFrame are expected to be appended to existing data.
3561+ # '   \item ' overwrite' : Existing data is expected to be overwritten by the contents of this
35433562# '         SparkDataFrame.
3544- # '   \item error: An exception is expected to be thrown.
3545- # '   \item ignore: The save operation is expected to not save the contents of the SparkDataFrame
3563+ # '   \item ' error' or 'errorifexists' : An exception is expected to be thrown.
3564+ # '   \item ' ignore' : The save operation is expected to not save the contents of the SparkDataFrame
35463565# '         and to not change the existing data.
35473566# ' }
35483567# '
35493568# ' @param x a SparkDataFrame.
35503569# ' @param url JDBC database url of the form \code{jdbc:subprotocol:subname}.
35513570# ' @param tableName yhe name of the table in the external database.
3552- # ' @param mode one of 'append', 'overwrite', 'error', 'ignore' save mode (it is 'error' by default).
3571+ # ' @param mode one of 'append', 'overwrite', 'error', 'errorifexists', 'ignore'
3572+ # '             save mode (it is 'error' by default)
35533573# ' @param ... additional JDBC database connection properties.
35543574# ' @family SparkDataFrame functions
35553575# ' @rdname write.jdbc
@@ -3566,10 +3586,9 @@ setMethod("histogram",
35663586setMethod ("write.jdbc ",
35673587          signature(x  =  " SparkDataFrame"  , url  =  " character"  , tableName  =  " character"  ),
35683588          function (x , url , tableName , mode  =  " error"  , ... ) {
3569-             jmode  <-  convertToJSaveMode(mode )
35703589            jprops  <-  varargsToJProperties(... )
35713590            write  <-  callJMethod(x @ sdf , " write"  )
3572-             write  <-  callJMethod (write , " mode" ,  jmode )
3591+             write  <-  setWriteMode (write , mode )
35733592            invisible (handledCallJMethod(write , " jdbc"  , url , tableName , jprops ))
35743593          })
35753594
0 commit comments