本栏目基于spark2.1.1
SparkDriver 用于提交用户的应用程序,
一、SparkConf负责SparkContext的配置参数加载, 主要通过ConcurrentHashMap来维护各种spark.*
的配置属性
class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging with Serializable {
import SparkConf._
/** Create a SparkConf that loads defaults from system properties and the classpath */
def this() = this(true)
/** 维护一个ConcurrentHashMap 来存储spark配置 */
private val settings = new ConcurrentHashMap[String, String]()
if (loadDefaults) {
loadFromSystemProperties(false)
}
/**
* 加载spark.*的配置
*/
private[spark] def loadFromSystemProperties(silent: Boolean): SparkConf = {
// Load any spark.* system properties, 只加载spark.*的配置
for ((key, value)
关注
打赏