Skip to content

Commit 6d7b2fd

Browse files
committed
Speed up running the kubernetes integration tests locally by allowing folks to skip the tgz dist build and extraction
1 parent 57d6fbf commit 6d7b2fd

File tree

2 files changed

+16
-8
lines changed

2 files changed

+16
-8
lines changed

resource-managers/kubernetes/integration-tests/scripts/setup-integration-test-env.sh

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -58,15 +58,15 @@ while (( "$#" )); do
5858
shift
5959
done
6060

61-
if [[ $SPARK_TGZ == "N/A" ]];
61+
rm -rf $UNPACKED_SPARK_TGZ
62+
if [[ $SPARK_TGZ == "N/A" && $IMAGE_TAG == "N/A" ]];
6263
then
63-
echo "Must specify a Spark tarball to build Docker images against with --spark-tgz." && exit 1;
64+
echo "Must specify a Spark tarball to build Docker images against with --spark-tgz OR image with --image-tag." && exit 1;
65+
else
66+
mkdir -p $UNPACKED_SPARK_TGZ
67+
tar -xzvf $SPARK_TGZ --strip-components=1 -C $UNPACKED_SPARK_TGZ;
6468
fi
6569

66-
rm -rf $UNPACKED_SPARK_TGZ
67-
mkdir -p $UNPACKED_SPARK_TGZ
68-
tar -xzvf $SPARK_TGZ --strip-components=1 -C $UNPACKED_SPARK_TGZ;
69-
7070
if [[ $IMAGE_TAG == "N/A" ]];
7171
then
7272
IMAGE_TAG=$(uuidgen);

resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/KubernetesSuite.scala

Lines changed: 10 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -103,8 +103,16 @@ class KubernetesSuite extends SparkFunSuite
103103
System.clearProperty(key)
104104
}
105105

106-
val sparkDirProp = System.getProperty(CONFIG_KEY_UNPACK_DIR)
107-
require(sparkDirProp != null, "Spark home directory must be provided in system properties.")
106+
val possible_spark_dirs = List(
107+
// If someone specified the tgz for the tests look at the extraction dir
108+
System.getProperty(CONFIG_KEY_UNPACK_DIR),
109+
// If otherwise use my working dir + 3 up
110+
new File(Paths.get(System.getProperty("user.dir")).toFile, ("../" * 3)).getAbsolutePath()
111+
)
112+
val sparkDirProp = possible_spark_dirs.filter(x =>
113+
new File(Paths.get(x).toFile, "bin/spark-submit").exists).headOption.getOrElse(null)
114+
require(sparkDirProp != null,
115+
s"Spark home directory must be provided in system properties tested $possible_spark_dirs")
108116
sparkHomeDir = Paths.get(sparkDirProp)
109117
require(sparkHomeDir.toFile.isDirectory,
110118
s"No directory found for spark home specified at $sparkHomeDir.")

0 commit comments

Comments
 (0)