Skip to content

Commit 1bbd10b

Browse files
author
Giulio Collura
authored
Merge pull request #74 from Yelp/gcoll_COREML-2697_infer_mesos_options_into_k8s
[COREML-2697] handle missing kubernetes config values for mass migration
2 parents ba4c4ca + ff07d78 commit 1bbd10b

File tree

3 files changed

+33
-1
lines changed

3 files changed

+33
-1
lines changed

service_configuration_lib/spark_config.py

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -285,6 +285,17 @@ def _adjust_spark_requested_resources(
285285
max_cores = int(user_spark_opts.setdefault('spark.cores.max', str(DEFAULT_MAX_CORES)))
286286
executor_instances = max_cores / executor_cores
287287
elif cluster_manager == 'kubernetes':
288+
# TODO(gcoll|COREML-2697): Consider cleaning this part of the code up
289+
# once mesos is not longer around at Yelp.
290+
if 'spark.executor.instances' not in user_spark_opts:
291+
executor_instances = int(user_spark_opts.get('spark.cores.max', str(DEFAULT_MAX_CORES))) // executor_cores
292+
user_spark_opts['spark.executor.instances'] = str(executor_instances)
293+
if (
294+
'spark.mesos.executor.memoryOverhead' in user_spark_opts and
295+
'spark.executor.memoryOverhead' not in user_spark_opts
296+
):
297+
user_spark_opts['spark.executor.memoryOverhead'] = user_spark_opts['spark.mesos.executor.memoryOverhead']
298+
288299
executor_instances = int(
289300
user_spark_opts.setdefault('spark.executor.instances', str(DEFAULT_EXECUTOR_INSTANCES)),
290301
)

setup.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@
1717

1818
setup(
1919
name='service-configuration-lib',
20-
version='2.9.0',
20+
version='2.10.0',
2121
provides=['service_configuration_lib'],
2222
description='Start, stop, and inspect Yelp SOA services',
2323
url='https://github.com/Yelp/service_configuration_lib',

tests/spark_config_test.py

Lines changed: 21 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -325,6 +325,27 @@ def gpu_pool(self, tmpdir, monkeypatch):
325325
'spark.scheduler.maxRegisteredResourcesWaitingTime': '35min',
326326
},
327327
),
328+
# kubernetes migration
329+
(
330+
'kubernetes',
331+
{
332+
'spark.executor.memory': '2g',
333+
'spark.executor.cores': '4',
334+
'spark.cores.max': '12',
335+
'spark.mesos.executor.memoryOverhead': '4096',
336+
},
337+
{
338+
'spark.executor.memory': '2g',
339+
'spark.executor.cores': '4',
340+
'spark.executor.instances': '3',
341+
'spark.cores.max': '12',
342+
'spark.kubernetes.executor.limit.cores': '4',
343+
'spark.kubernetes.allocation.batch.size': '3',
344+
'spark.scheduler.maxRegisteredResourcesWaitingTime': '15min',
345+
'spark.executor.memoryOverhead': '4096',
346+
'spark.mesos.executor.memoryOverhead': '4096',
347+
},
348+
),
328349
# use default mesos settings
329350
(
330351
'mesos',

0 commit comments

Comments
 (0)