Skip to content

Commit acfe14d

Browse files
CaptainSameSameer SharmaSameer Sharma
authored
Coreml 3238 | set k8s allocation batch size accurately (#79)
* COREML-3238 | set spark.kubernetes.allocation.batch.size accurately * COREML-3238 | add spark config tests Co-authored-by: Sameer Sharma <[email protected]> Co-authored-by: Sameer Sharma <[email protected]>
1 parent 22dd4b2 commit acfe14d

File tree

3 files changed

+30
-7
lines changed

3 files changed

+30
-7
lines changed

service_configuration_lib/spark_config.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -368,7 +368,7 @@ def _adjust_spark_requested_resources(
368368
max_cores = executor_instances * executor_cores
369369
user_spark_opts.setdefault(
370370
'spark.kubernetes.allocation.batch.size',
371-
str(min(executor_instances, DEFAULT_K8S_BATCH_SIZE)),
371+
str(DEFAULT_K8S_BATCH_SIZE),
372372
)
373373
user_spark_opts.setdefault('spark.kubernetes.executor.limit.cores', str(executor_cores))
374374
waiting_time = (

setup.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@
1717

1818
setup(
1919
name='service-configuration-lib',
20-
version='2.10.4',
20+
version='2.10.5',
2121
provides=['service_configuration_lib'],
2222
description='Start, stop, and inspect Yelp SOA services',
2323
url='https://github.com/Yelp/service_configuration_lib',

tests/spark_config_test.py

Lines changed: 28 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -309,7 +309,7 @@ def gpu_pool(self, tmpdir, monkeypatch):
309309
'spark.executor.cores': '4',
310310
'spark.executor.instances': '2',
311311
'spark.kubernetes.executor.limit.cores': '4',
312-
'spark.kubernetes.allocation.batch.size': '2',
312+
'spark.kubernetes.allocation.batch.size': '512',
313313
'spark.scheduler.maxRegisteredResourcesWaitingTime': '15min',
314314
},
315315
),
@@ -327,7 +327,7 @@ def gpu_pool(self, tmpdir, monkeypatch):
327327
'spark.executor.cores': '4',
328328
'spark.executor.instances': '2',
329329
'spark.kubernetes.executor.limit.cores': '4',
330-
'spark.kubernetes.allocation.batch.size': '2',
330+
'spark.kubernetes.allocation.batch.size': '512',
331331
'spark.scheduler.maxRegisteredResourcesWaitingTime': '15min',
332332
},
333333
),
@@ -359,10 +359,33 @@ def gpu_pool(self, tmpdir, monkeypatch):
359359
'spark.executor.cores': '4',
360360
'spark.executor.instances': '32',
361361
'spark.kubernetes.executor.limit.cores': '4',
362-
'spark.kubernetes.allocation.batch.size': '32',
362+
'spark.kubernetes.allocation.batch.size': '512',
363363
'spark.scheduler.maxRegisteredResourcesWaitingTime': '16min',
364364
},
365365
),
366+
# k8s allocation batch size not specified
367+
(
368+
'kubernetes',
369+
{
370+
'spark.executor.cores': '4',
371+
'spark.cores.max': '128',
372+
},
373+
{
374+
'spark.kubernetes.allocation.batch.size': '512',
375+
},
376+
),
377+
# k8s allocation batch size specified
378+
(
379+
'kubernetes',
380+
{
381+
'spark.executor.cores': '4',
382+
'spark.cores.max': '128',
383+
'spark.kubernetes.allocation.batch.size': '151',
384+
},
385+
{
386+
'spark.kubernetes.allocation.batch.size': '151',
387+
},
388+
),
366389
# use default k8s settings
367390
(
368391
'kubernetes',
@@ -372,7 +395,7 @@ def gpu_pool(self, tmpdir, monkeypatch):
372395
'spark.executor.cores': '2',
373396
'spark.executor.instances': '2',
374397
'spark.kubernetes.executor.limit.cores': '2',
375-
'spark.kubernetes.allocation.batch.size': '2',
398+
'spark.kubernetes.allocation.batch.size': '512',
376399
'spark.scheduler.maxRegisteredResourcesWaitingTime': '15min',
377400
},
378401
),
@@ -407,7 +430,7 @@ def gpu_pool(self, tmpdir, monkeypatch):
407430
'spark.executor.instances': '3',
408431
'spark.cores.max': '12',
409432
'spark.kubernetes.executor.limit.cores': '4',
410-
'spark.kubernetes.allocation.batch.size': '3',
433+
'spark.kubernetes.allocation.batch.size': '512',
411434
'spark.scheduler.maxRegisteredResourcesWaitingTime': '15min',
412435
'spark.executor.memoryOverhead': '4096',
413436
'spark.mesos.executor.memoryOverhead': '4096',

0 commit comments

Comments
 (0)