6262 'spark.kubernetes.executor.label.paasta.yelp.com/instance' ,
6363 'spark.kubernetes.executor.label.paasta.yelp.com/cluster' ,
6464}
65- K8S_AUTH_FOLDER = '/etc/spark_k8s_secrets'
65+ K8S_AUTH_FOLDER = '/etc/pki/spark'
66+ DEFAULT_SPARK_K8S_SECRET_VOLUME = {
67+ 'hostPath' : K8S_AUTH_FOLDER ,
68+ 'containerPath' : K8S_AUTH_FOLDER ,
69+ 'mode' : 'RO' ,
70+ }
71+
6672log = logging .Logger (__name__ )
6773
6874
@@ -163,6 +169,23 @@ def _get_mesos_docker_volumes_conf(
163169 return {'spark.mesos.executor.docker.volumes' : volume_str }
164170
165171
172+ def _get_k8s_docker_volumes_conf (
173+ volumes : Optional [List [Mapping [str , str ]]] = None ,
174+ ):
175+ env = {}
176+ k8s_volumes = volumes or []
177+ k8s_volumes .append (DEFAULT_SPARK_K8S_SECRET_VOLUME )
178+ k8s_volumes .append ({'containerPath' : '/etc/passwd' , 'hostPath' : '/etc/passwd' , 'mode' : 'RO' })
179+ k8s_volumes .append ({'containerPath' : '/etc/group' , 'hostPath' : '/etc/group' , 'mode' : 'RO' })
180+ for volume_name , volume in enumerate (k8s_volumes ):
181+ env [f'spark.kubernetes.executor.volumes.hostPath.{ volume_name } .mount.path' ] = volume ['containerPath' ]
182+ env [f'spark.kubernetes.executor.volumes.hostPath.{ volume_name } .mount.readOnly' ] = (
183+ 'true' if volume ['mode' ].lower () == 'ro' else 'false'
184+ )
185+ env [f'spark.kubernetes.executor.volumes.hostPath.{ volume_name } .options.path' ] = volume ['hostPath' ]
186+ return env
187+
188+
166189def _append_sql_shuffle_partitions_conf (spark_opts : Dict [str , str ]) -> Dict [str , str ]:
167190 if 'spark.sql.shuffle.partitions' in spark_opts :
168191 return spark_opts
@@ -409,7 +432,7 @@ def _get_k8s_spark_env(
409432 paasta_pool : str ,
410433) -> Dict [str , str ]:
411434 spark_env = {
412- 'spark.master' : f'k8s://https://k8s.paasta- { paasta_cluster } .yelp:16443 ' ,
435+ 'spark.master' : f'k8s://https://k8s.{ paasta_cluster } .paasta:6443 ' ,
413436 'spark.executorEnv.PAASTA_SERVICE' : paasta_service ,
414437 'spark.executorEnv.PAASTA_INSTANCE' : paasta_instance ,
415438 'spark.executorEnv.PAASTA_CLUSTER' : paasta_cluster ,
@@ -430,14 +453,8 @@ def _get_k8s_spark_env(
430453 'spark.kubernetes.executor.label.paasta.yelp.com/cluster' : paasta_cluster ,
431454 'spark.kubernetes.node.selector.yelp.com/pool' : paasta_pool ,
432455 'spark.kubernetes.executor.label.yelp.com/pool' : paasta_pool ,
456+ ** _get_k8s_docker_volumes_conf (volumes ),
433457 }
434- for i , volume in enumerate (volumes or []):
435- volume_name = i
436- spark_env [f'spark.kubernetes.executor.volumes.hostPath.{ volume_name } .mount.path' ] = volume ['containerPath' ]
437- spark_env [f'spark.kubernetes.executor.volumes.hostPath.{ volume_name } .mount.readOnly' ] = (
438- 'true' if volume ['mode' ].lower () == 'ro' else 'false'
439- )
440- spark_env [f'spark.kubernetes.executor.volumes.hostPath.{ volume_name } .options.path' ] = volume ['hostPath' ]
441458 return spark_env
442459
443460
0 commit comments