1818context(" test functions in sparkR.R" )
1919
2020test_that(" Check masked functions" , {
21+ skip_on_cran()
22+
2123 # Check that we are not masking any new function from base, stats, testthat unexpectedly
2224 # NOTE: We should avoid adding entries to *namesOfMaskedCompletely* as masked functions make it
2325 # hard for users to use base R functions. Please check when in doubt.
@@ -55,6 +57,8 @@ test_that("Check masked functions", {
5557})
5658
5759test_that(" repeatedly starting and stopping SparkR" , {
60+ skip_on_cran()
61+
5862 for (i in 1 : 4 ) {
5963 sc <- suppressWarnings(sparkR.init())
6064 rdd <- parallelize(sc , 1 : 20 , 2L )
@@ -73,6 +77,8 @@ test_that("repeatedly starting and stopping SparkSession", {
7377})
7478
7579test_that(" rdd GC across sparkR.stop" , {
80+ skip_on_cran()
81+
7682 sc <- sparkR.sparkContext() # sc should get id 0
7783 rdd1 <- parallelize(sc , 1 : 20 , 2L ) # rdd1 should get id 1
7884 rdd2 <- parallelize(sc , 1 : 10 , 2L ) # rdd2 should get id 2
@@ -96,6 +102,8 @@ test_that("rdd GC across sparkR.stop", {
96102})
97103
98104test_that(" job group functions can be called" , {
105+ skip_on_cran()
106+
99107 sc <- sparkR.sparkContext()
100108 setJobGroup(" groupId" , " job description" , TRUE )
101109 cancelJobGroup(" groupId" )
@@ -108,12 +116,16 @@ test_that("job group functions can be called", {
108116})
109117
110118test_that(" utility function can be called" , {
119+ skip_on_cran()
120+
111121 sparkR.sparkContext()
112122 setLogLevel(" ERROR" )
113123 sparkR.session.stop()
114124})
115125
116126test_that(" getClientModeSparkSubmitOpts() returns spark-submit args from whitelist" , {
127+ skip_on_cran()
128+
117129 e <- new.env()
118130 e [[" spark.driver.memory" ]] <- " 512m"
119131 ops <- getClientModeSparkSubmitOpts(" sparkrmain" , e )
@@ -141,6 +153,8 @@ test_that("getClientModeSparkSubmitOpts() returns spark-submit args from whiteli
141153})
142154
143155test_that(" sparkJars sparkPackages as comma-separated strings" , {
156+ skip_on_cran()
157+
144158 expect_warning(processSparkJars(" a, b " ))
145159 jars <- suppressWarnings(processSparkJars(" a, b " ))
146160 expect_equal(lapply(jars , basename ), list (" a" , " b" ))
@@ -168,6 +182,8 @@ test_that("spark.lapply should perform simple transforms", {
168182})
169183
170184test_that(" add and get file to be downloaded with Spark job on every node" , {
185+ skip_on_cran()
186+
171187 sparkR.sparkContext()
172188 # Test add file.
173189 path <- tempfile(pattern = " hello" , fileext = " .txt" )
0 commit comments