Skip to content

Commit c47c9c5

Browse files
committed
Small fixes to --packages
1 parent b14cd23 commit c47c9c5

File tree

2 files changed

+40
-21
lines changed

2 files changed

+40
-21
lines changed

core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala

Lines changed: 26 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -722,13 +722,31 @@ private[deploy] object SparkSubmitUtils {
722722
/**
723723
* Extracts maven coordinates from a comma-delimited string
724724
* @param remoteRepos Comma-delimited string of remote repositories
725+
* @param ivySettings The Ivy settings for this session
725726
* @return A ChainResolver used by Ivy to search for and resolve dependencies.
726727
*/
727-
def createRepoResolvers(remoteRepos: Option[String]): ChainResolver = {
728+
def createRepoResolvers(remoteRepos: Option[String], ivySettings: IvySettings): ChainResolver = {
728729
// We need a chain resolver if we want to check multiple repositories
729730
val cr = new ChainResolver
730731
cr.setName("list")
731732

733+
val localM2 = new IBiblioResolver
734+
localM2.setM2compatible(true)
735+
val m2Path = ".m2" + File.separator + "repository" + File.separator
736+
localM2.setRoot(new File(System.getProperty("user.home"), m2Path).toURI.toString)
737+
localM2.setUsepoms(true)
738+
localM2.setName("local-m2-cache")
739+
cr.add(localM2)
740+
741+
val localIvy = new IBiblioResolver
742+
localIvy.setRoot(new File(ivySettings.getDefaultIvyUserDir,
743+
"local" + File.separator).toURI.toString)
744+
val ivyPattern = Seq("[organisation]", "[module]", "[revision]", "[type]s",
745+
"[artifact](-[classifier]).[ext]").mkString(File.separator)
746+
localIvy.setPattern(ivyPattern)
747+
localIvy.setName("local-ivy-cache")
748+
cr.add(localIvy)
749+
732750
// the biblio resolver resolves POM declared dependencies
733751
val br: IBiblioResolver = new IBiblioResolver
734752
br.setM2compatible(true)
@@ -761,8 +779,7 @@ private[deploy] object SparkSubmitUtils {
761779

762780
/**
763781
* Output a comma-delimited list of paths for the downloaded jars to be added to the classpath
764-
* (will append to jars in SparkSubmit). The name of the jar is given
765-
* after a '!' by Ivy. It also sometimes contains '(bundle)' after '.jar'. Remove that as well.
782+
* (will append to jars in SparkSubmit).
766783
* @param artifacts Sequence of dependencies that were resolved and retrieved
767784
* @param cacheDirectory directory where jars are cached
768785
* @return a comma-delimited list of paths for the dependencies
@@ -771,10 +788,9 @@ private[deploy] object SparkSubmitUtils {
771788
artifacts: Array[AnyRef],
772789
cacheDirectory: File): String = {
773790
artifacts.map { artifactInfo =>
774-
val artifactString = artifactInfo.toString
775-
val jarName = artifactString.drop(artifactString.lastIndexOf("!") + 1)
791+
val artifact = artifactInfo.asInstanceOf[Artifact].getModuleRevisionId
776792
cacheDirectory.getAbsolutePath + File.separator +
777-
jarName.substring(0, jarName.lastIndexOf(".jar") + 4)
793+
s"${artifact.getOrganisation}_${artifact.getName}-${artifact.getRevision}.jar"
778794
}.mkString(",")
779795
}
780796

@@ -856,6 +872,7 @@ private[deploy] object SparkSubmitUtils {
856872
if (alternateIvyCache.trim.isEmpty) {
857873
new File(ivySettings.getDefaultIvyUserDir, "jars")
858874
} else {
875+
ivySettings.setDefaultIvyUserDir(new File(alternateIvyCache))
859876
ivySettings.setDefaultCache(new File(alternateIvyCache, "cache"))
860877
new File(alternateIvyCache, "jars")
861878
}
@@ -865,7 +882,7 @@ private[deploy] object SparkSubmitUtils {
865882
// create a pattern matcher
866883
ivySettings.addMatcher(new GlobPatternMatcher)
867884
// create the dependency resolvers
868-
val repoResolver = createRepoResolvers(remoteRepos)
885+
val repoResolver = createRepoResolvers(remoteRepos, ivySettings)
869886
ivySettings.addResolver(repoResolver)
870887
ivySettings.setDefaultResolver(repoResolver.getName)
871888

@@ -899,7 +916,8 @@ private[deploy] object SparkSubmitUtils {
899916
}
900917
// retrieve all resolved dependencies
901918
ivy.retrieve(rr.getModuleDescriptor.getModuleRevisionId,
902-
packagesDirectory.getAbsolutePath + File.separator + "[artifact](-[classifier]).[ext]",
919+
packagesDirectory.getAbsolutePath + File.separator +
920+
"[organization]_[artifact]-[revision].[ext]",
903921
retrieveOptions.setConfs(Array(ivyConfName)))
904922
System.setOut(sysOut)
905923
resolveDependencyPaths(rr.getArtifacts.toArray, packagesDirectory)

core/src/test/scala/org/apache/spark/deploy/SparkSubmitUtilsSuite.scala

Lines changed: 14 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,8 @@ package org.apache.spark.deploy
1919

2020
import java.io.{PrintStream, OutputStream, File}
2121

22+
import org.apache.ivy.core.settings.IvySettings
23+
2224
import scala.collection.mutable.ArrayBuffer
2325

2426
import org.scalatest.{BeforeAndAfterAll, FunSuite}
@@ -56,24 +58,23 @@ class SparkSubmitUtilsSuite extends FunSuite with BeforeAndAfterAll {
5658
}
5759

5860
test("create repo resolvers") {
59-
val resolver1 = SparkSubmitUtils.createRepoResolvers(None)
61+
val settings = new IvySettings
62+
val res1 = SparkSubmitUtils.createRepoResolvers(None, settings)
6063
// should have central and spark-packages by default
61-
assert(resolver1.getResolvers.size() === 2)
62-
assert(resolver1.getResolvers.get(0).asInstanceOf[IBiblioResolver].getName === "central")
63-
assert(resolver1.getResolvers.get(1).asInstanceOf[IBiblioResolver].getName === "spark-packages")
64+
assert(res1.getResolvers.size() === 4)
65+
assert(res1.getResolvers.get(0).asInstanceOf[IBiblioResolver].getName === "local-m2-cache")
66+
assert(res1.getResolvers.get(1).asInstanceOf[IBiblioResolver].getName === "local-ivy-cache")
67+
assert(res1.getResolvers.get(2).asInstanceOf[IBiblioResolver].getName === "central")
68+
assert(res1.getResolvers.get(3).asInstanceOf[IBiblioResolver].getName === "spark-packages")
6469

6570
val repos = "a/1,b/2,c/3"
66-
val resolver2 = SparkSubmitUtils.createRepoResolvers(Option(repos))
67-
assert(resolver2.getResolvers.size() === 5)
71+
val resolver2 = SparkSubmitUtils.createRepoResolvers(Option(repos), settings)
72+
assert(resolver2.getResolvers.size() === 7)
6873
val expected = repos.split(",").map(r => s"$r/")
6974
resolver2.getResolvers.toArray.zipWithIndex.foreach { case (resolver: IBiblioResolver, i) =>
70-
if (i == 0) {
71-
assert(resolver.getName === "central")
72-
} else if (i == 1) {
73-
assert(resolver.getName === "spark-packages")
74-
} else {
75-
assert(resolver.getName === s"repo-${i - 1}")
76-
assert(resolver.getRoot === expected(i - 2))
75+
if (i > 3) {
76+
assert(resolver.getName === s"repo-${i - 3}")
77+
assert(resolver.getRoot === expected(i - 4))
7778
}
7879
}
7980
}

0 commit comments

Comments
 (0)