Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 8 additions & 0 deletions core/src/main/scala/org/apache/spark/SparkConf.scala
Original file line number Diff line number Diff line change
Expand Up @@ -508,6 +508,14 @@ class SparkConf(loadDefaults: Boolean)
.map { case (k, v) => (k.substring(prefix.length), v) }
}

/**
* Get all parameters that start with `prefix` and apply f.
*/
def getAllWithPrefix[K](prefix: String, f: String => K): Array[(K, String)] = {
getAll.filter { case (k, _) => k.startsWith(prefix) }
.map { case (k, v) => (f(k), v) }
}

/** Get all executor environment variables set on this SparkConf */
def getExecutorEnv: Seq[(String, String)] = {
getAllWithPrefix("spark.executorEnv.").toImmutableArraySeq
Expand Down
20 changes: 20 additions & 0 deletions core/src/test/scala/org/apache/spark/SparkConfSuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -127,6 +127,26 @@ class SparkConfSuite extends SparkFunSuite with LocalSparkContext with ResetSyst
Set(("main.suffix", "v1"), ("main2.suffix", "v2"), ("main3.extra1.suffix", "v3")))
}

test("more flexible getAllWithPrefix") {
val prefix = "spark.fs.s3a."
val newPrefix = "spark.hadoop.fs.s3a."
val conf = new SparkConf(false)
conf.set("spark.fs.s3a.config1", "v1")
val f = (k: String) => {
val keyWithoutPrefix = k.substring(prefix.length)
newPrefix + keyWithoutPrefix
}
assert(conf.getAllWithPrefix(prefix, f).toSet ===
Set(("spark.hadoop.fs.s3a.config1", "v1")))

conf.set("spark.fs.s3a.config1.suffix", "v2")
conf.set("spark.fs.s3a.config1.extra.suffix", "v3")
conf.set("spark.notMatching.main4", "v4")

assert(conf.getAllWithPrefix(prefix).toSet ===
Set(("config1", "v1"), ("config1.suffix", "v2"), ("config1.extra.suffix", "v3")))
}

test("creating SparkContext without master and app name") {
val conf = new SparkConf(false)
intercept[SparkException] { sc = new SparkContext(conf) }
Expand Down