Skip to content

Commit fc4965a

Browse files
committed
skip tests
1 parent 80e9cf1 commit fc4965a

19 files changed

+306
-3
lines changed

R/pkg/inst/tests/testthat/test_Serde.R

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,8 @@ context("SerDe functionality")
2020
sparkSession <- sparkR.session(enableHiveSupport = FALSE)
2121

2222
test_that("SerDe of primitive types", {
23+
skip_on_cran()
24+
2325
x <- callJStatic("SparkRHandler", "echo", 1L)
2426
expect_equal(x, 1L)
2527
expect_equal(class(x), "integer")
@@ -38,6 +40,8 @@ test_that("SerDe of primitive types", {
3840
})
3941

4042
test_that("SerDe of list of primitive types", {
43+
skip_on_cran()
44+
4145
x <- list(1L, 2L, 3L)
4246
y <- callJStatic("SparkRHandler", "echo", x)
4347
expect_equal(x, y)
@@ -65,6 +69,8 @@ test_that("SerDe of list of primitive types", {
6569
})
6670

6771
test_that("SerDe of list of lists", {
72+
skip_on_cran()
73+
6874
x <- list(list(1L, 2L, 3L), list(1, 2, 3),
6975
list(TRUE, FALSE), list("a", "b", "c"))
7076
y <- callJStatic("SparkRHandler", "echo", x)

R/pkg/inst/tests/testthat/test_Windows.R

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,8 @@
1717
context("Windows-specific tests")
1818

1919
test_that("sparkJars tag in SparkContext", {
20+
skip_on_cran()
21+
2022
if (.Platform$OS.type != "windows") {
2123
skip("This test is only for Windows, skipped")
2224
}

R/pkg/inst/tests/testthat/test_binaryFile.R

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -24,6 +24,8 @@ sc <- callJStatic("org.apache.spark.sql.api.r.SQLUtils", "getJavaSparkContext",
2424
mockFile <- c("Spark is pretty.", "Spark is awesome.")
2525

2626
test_that("saveAsObjectFile()/objectFile() following textFile() works", {
27+
skip_on_cran()
28+
2729
fileName1 <- tempfile(pattern = "spark-test", fileext = ".tmp")
2830
fileName2 <- tempfile(pattern = "spark-test", fileext = ".tmp")
2931
writeLines(mockFile, fileName1)
@@ -38,6 +40,8 @@ test_that("saveAsObjectFile()/objectFile() following textFile() works", {
3840
})
3941

4042
test_that("saveAsObjectFile()/objectFile() works on a parallelized list", {
43+
skip_on_cran()
44+
4145
fileName <- tempfile(pattern = "spark-test", fileext = ".tmp")
4246

4347
l <- list(1, 2, 3)
@@ -50,6 +54,8 @@ test_that("saveAsObjectFile()/objectFile() works on a parallelized list", {
5054
})
5155

5256
test_that("saveAsObjectFile()/objectFile() following RDD transformations works", {
57+
skip_on_cran()
58+
5359
fileName1 <- tempfile(pattern = "spark-test", fileext = ".tmp")
5460
fileName2 <- tempfile(pattern = "spark-test", fileext = ".tmp")
5561
writeLines(mockFile, fileName1)
@@ -74,6 +80,8 @@ test_that("saveAsObjectFile()/objectFile() following RDD transformations works",
7480
})
7581

7682
test_that("saveAsObjectFile()/objectFile() works with multiple paths", {
83+
skip_on_cran()
84+
7785
fileName1 <- tempfile(pattern = "spark-test", fileext = ".tmp")
7886
fileName2 <- tempfile(pattern = "spark-test", fileext = ".tmp")
7987

R/pkg/inst/tests/testthat/test_binary_function.R

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -29,6 +29,8 @@ rdd <- parallelize(sc, nums, 2L)
2929
mockFile <- c("Spark is pretty.", "Spark is awesome.")
3030

3131
test_that("union on two RDDs", {
32+
skip_on_cran()
33+
3234
actual <- collectRDD(unionRDD(rdd, rdd))
3335
expect_equal(actual, as.list(rep(nums, 2)))
3436

@@ -51,6 +53,8 @@ test_that("union on two RDDs", {
5153
})
5254

5355
test_that("cogroup on two RDDs", {
56+
skip_on_cran()
57+
5458
rdd1 <- parallelize(sc, list(list(1, 1), list(2, 4)))
5559
rdd2 <- parallelize(sc, list(list(1, 2), list(1, 3)))
5660
cogroup.rdd <- cogroup(rdd1, rdd2, numPartitions = 2L)
@@ -69,6 +73,8 @@ test_that("cogroup on two RDDs", {
6973
})
7074

7175
test_that("zipPartitions() on RDDs", {
76+
skip_on_cran()
77+
7278
rdd1 <- parallelize(sc, 1:2, 2L) # 1, 2
7379
rdd2 <- parallelize(sc, 1:4, 2L) # 1:2, 3:4
7480
rdd3 <- parallelize(sc, 1:6, 2L) # 1:3, 4:6

R/pkg/inst/tests/testthat/test_broadcast.R

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -26,6 +26,8 @@ nums <- 1:2
2626
rrdd <- parallelize(sc, nums, 2L)
2727

2828
test_that("using broadcast variable", {
29+
skip_on_cran()
30+
2931
randomMat <- matrix(nrow = 10, ncol = 10, data = rnorm(100))
3032
randomMatBr <- broadcast(sc, randomMat)
3133

@@ -38,6 +40,8 @@ test_that("using broadcast variable", {
3840
})
3941

4042
test_that("without using broadcast variable", {
43+
skip_on_cran()
44+
4145
randomMat <- matrix(nrow = 10, ncol = 10, data = rnorm(100))
4246

4347
useBroadcast <- function(x) {

R/pkg/inst/tests/testthat/test_client.R

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,8 @@
1818
context("functions in client.R")
1919

2020
test_that("adding spark-testing-base as a package works", {
21+
skip_on_cran()
22+
2123
args <- generateSparkSubmitArgs("", "", "", "",
2224
"holdenk:spark-testing-base:1.3.0_0.0.5")
2325
expect_equal(gsub("[[:space:]]", "", args),
@@ -26,16 +28,22 @@ test_that("adding spark-testing-base as a package works", {
2628
})
2729

2830
test_that("no package specified doesn't add packages flag", {
31+
skip_on_cran()
32+
2933
args <- generateSparkSubmitArgs("", "", "", "", "")
3034
expect_equal(gsub("[[:space:]]", "", args),
3135
"")
3236
})
3337

3438
test_that("multiple packages don't produce a warning", {
39+
skip_on_cran()
40+
3541
expect_warning(generateSparkSubmitArgs("", "", "", "", c("A", "B")), NA)
3642
})
3743

3844
test_that("sparkJars sparkPackages as character vectors", {
45+
skip_on_cran()
46+
3947
args <- generateSparkSubmitArgs("", "", c("one.jar", "two.jar", "three.jar"), "",
4048
c("com.databricks:spark-avro_2.10:2.0.1"))
4149
expect_match(args, "--jars one.jar,two.jar,three.jar")

R/pkg/inst/tests/testthat/test_context.R

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,8 @@
1818
context("test functions in sparkR.R")
1919

2020
test_that("Check masked functions", {
21+
skip_on_cran()
22+
2123
# Check that we are not masking any new function from base, stats, testthat unexpectedly
2224
# NOTE: We should avoid adding entries to *namesOfMaskedCompletely* as masked functions make it
2325
# hard for users to use base R functions. Please check when in doubt.
@@ -55,6 +57,8 @@ test_that("Check masked functions", {
5557
})
5658

5759
test_that("repeatedly starting and stopping SparkR", {
60+
skip_on_cran()
61+
5862
for (i in 1:4) {
5963
sc <- suppressWarnings(sparkR.init())
6064
rdd <- parallelize(sc, 1:20, 2L)
@@ -73,6 +77,8 @@ test_that("repeatedly starting and stopping SparkSession", {
7377
})
7478

7579
test_that("rdd GC across sparkR.stop", {
80+
skip_on_cran()
81+
7682
sc <- sparkR.sparkContext() # sc should get id 0
7783
rdd1 <- parallelize(sc, 1:20, 2L) # rdd1 should get id 1
7884
rdd2 <- parallelize(sc, 1:10, 2L) # rdd2 should get id 2
@@ -96,6 +102,8 @@ test_that("rdd GC across sparkR.stop", {
96102
})
97103

98104
test_that("job group functions can be called", {
105+
skip_on_cran()
106+
99107
sc <- sparkR.sparkContext()
100108
setJobGroup("groupId", "job description", TRUE)
101109
cancelJobGroup("groupId")
@@ -108,12 +116,16 @@ test_that("job group functions can be called", {
108116
})
109117

110118
test_that("utility function can be called", {
119+
skip_on_cran()
120+
111121
sparkR.sparkContext()
112122
setLogLevel("ERROR")
113123
sparkR.session.stop()
114124
})
115125

116126
test_that("getClientModeSparkSubmitOpts() returns spark-submit args from whitelist", {
127+
skip_on_cran()
128+
117129
e <- new.env()
118130
e[["spark.driver.memory"]] <- "512m"
119131
ops <- getClientModeSparkSubmitOpts("sparkrmain", e)
@@ -141,6 +153,8 @@ test_that("getClientModeSparkSubmitOpts() returns spark-submit args from whiteli
141153
})
142154

143155
test_that("sparkJars sparkPackages as comma-separated strings", {
156+
skip_on_cran()
157+
144158
expect_warning(processSparkJars(" a, b "))
145159
jars <- suppressWarnings(processSparkJars(" a, b "))
146160
expect_equal(lapply(jars, basename), list("a", "b"))
@@ -168,6 +182,8 @@ test_that("spark.lapply should perform simple transforms", {
168182
})
169183

170184
test_that("add and get file to be downloaded with Spark job on every node", {
185+
skip_on_cran()
186+
171187
sparkR.sparkContext()
172188
# Test add file.
173189
path <- tempfile(pattern = "hello", fileext = ".txt")

R/pkg/inst/tests/testthat/test_includePackage.R

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -26,6 +26,8 @@ nums <- 1:2
2626
rdd <- parallelize(sc, nums, 2L)
2727

2828
test_that("include inside function", {
29+
skip_on_cran()
30+
2931
# Only run the test if plyr is installed.
3032
if ("plyr" %in% rownames(installed.packages())) {
3133
suppressPackageStartupMessages(library(plyr))
@@ -42,6 +44,8 @@ test_that("include inside function", {
4244
})
4345

4446
test_that("use include package", {
47+
skip_on_cran()
48+
4549
# Only run the test if plyr is installed.
4650
if ("plyr" %in% rownames(installed.packages())) {
4751
suppressPackageStartupMessages(library(plyr))

R/pkg/inst/tests/testthat/test_mllib_clustering.R

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -255,6 +255,8 @@ test_that("spark.lda with libsvm", {
255255
})
256256

257257
test_that("spark.lda with text input", {
258+
skip_on_cran()
259+
258260
text <- read.text(absoluteSparkPath("data/mllib/sample_lda_data.txt"))
259261
model <- spark.lda(text, optimizer = "online", features = "value")
260262

@@ -297,6 +299,8 @@ test_that("spark.lda with text input", {
297299
})
298300

299301
test_that("spark.posterior and spark.perplexity", {
302+
skip_on_cran()
303+
300304
text <- read.text(absoluteSparkPath("data/mllib/sample_lda_data.txt"))
301305
model <- spark.lda(text, features = "value", k = 3)
302306

R/pkg/inst/tests/testthat/test_mllib_regression.R

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,8 @@ context("MLlib regression algorithms, except for tree-based algorithms")
2323
sparkSession <- sparkR.session(enableHiveSupport = FALSE)
2424

2525
test_that("formula of spark.glm", {
26+
skip_on_cran()
27+
2628
training <- suppressWarnings(createDataFrame(iris))
2729
# directly calling the spark API
2830
# dot minus and intercept vs native glm
@@ -195,6 +197,8 @@ test_that("spark.glm summary", {
195197
})
196198

197199
test_that("spark.glm save/load", {
200+
skip_on_cran()
201+
198202
training <- suppressWarnings(createDataFrame(iris))
199203
m <- spark.glm(training, Sepal_Width ~ Sepal_Length + Species)
200204
s <- summary(m)
@@ -222,6 +226,8 @@ test_that("spark.glm save/load", {
222226
})
223227

224228
test_that("formula of glm", {
229+
skip_on_cran()
230+
225231
training <- suppressWarnings(createDataFrame(iris))
226232
# dot minus and intercept vs native glm
227233
model <- glm(Sepal_Width ~ . - Species + 0, data = training)
@@ -248,6 +254,8 @@ test_that("formula of glm", {
248254
})
249255

250256
test_that("glm and predict", {
257+
skip_on_cran()
258+
251259
training <- suppressWarnings(createDataFrame(iris))
252260
# gaussian family
253261
model <- glm(Sepal_Width ~ Sepal_Length + Species, data = training)
@@ -292,6 +300,8 @@ test_that("glm and predict", {
292300
})
293301

294302
test_that("glm summary", {
303+
skip_on_cran()
304+
295305
# gaussian family
296306
training <- suppressWarnings(createDataFrame(iris))
297307
stats <- summary(glm(Sepal_Width ~ Sepal_Length + Species, data = training))
@@ -341,6 +351,8 @@ test_that("glm summary", {
341351
})
342352

343353
test_that("glm save/load", {
354+
skip_on_cran()
355+
344356
training <- suppressWarnings(createDataFrame(iris))
345357
m <- glm(Sepal_Width ~ Sepal_Length + Species, data = training)
346358
s <- summary(m)

0 commit comments

Comments
 (0)