1616TEST_ACCOUNT_ID = '123456789'
1717TEST_USER = 'UNIT_TEST_USER'
1818
19+ UI_PORT_RETURN_VALUE = 65432
20+ EPHEMERAL_PORT_RETURN_VALUE = '12345'
21+ TIME_RETURN_VALUE = 123.456
22+ RANDOM_STRING_RETURN_VALUE = 'do1re2mi3fa4sol4'
23+
1924
2025@pytest .fixture
2126def mock_log (monkeypatch ):
@@ -26,8 +31,14 @@ def mock_log(monkeypatch):
2631
2732@pytest .fixture
2833def mock_time ():
29- with mock .patch .object (spark_config .time , 'time' , return_value = 123.456 ):
30- yield 123.456
34+ with mock .patch .object (spark_config .time , 'time' , return_value = TIME_RETURN_VALUE ):
35+ yield TIME_RETURN_VALUE
36+
37+
38+ @pytest .fixture
39+ def mock_get_random_string ():
40+ with mock .patch .object (utils , 'get_random_string' , return_value = RANDOM_STRING_RETURN_VALUE ):
41+ yield RANDOM_STRING_RETURN_VALUE
3142
3243
3344class TestGetAWSCredentials :
@@ -1083,11 +1094,10 @@ def test_convert_user_spark_opts_value_str(self):
10831094
10841095 @pytest .fixture
10851096 def mock_ephemeral_port_reserve_range (self ):
1086- port = '12345'
1087- with mock .patch .object (utils , 'ephemeral_port_reserve_range' , return_value = port ):
1088- yield port
1097+ with mock .patch .object (utils , 'ephemeral_port_reserve_range' , return_value = EPHEMERAL_PORT_RETURN_VALUE ):
1098+ yield EPHEMERAL_PORT_RETURN_VALUE
10891099
1090- @pytest .fixture (params = [None , '23456' ])
1100+ @pytest .fixture (params = [None , str ( UI_PORT_RETURN_VALUE ) ])
10911101 def ui_port (self , request ):
10921102 return request .param
10931103
@@ -1115,13 +1125,21 @@ def user_spark_opts(self, request):
11151125 return request .param
11161126
11171127 @pytest .fixture
1118- def assert_app_name (self , spark_opts_from_env , user_spark_opts , ui_port , mock_ephemeral_port_reserve_range ):
1128+ def assert_app_name (
1129+ self ,
1130+ spark_opts_from_env ,
1131+ user_spark_opts ,
1132+ ui_port ,
1133+ mock_ephemeral_port_reserve_range ,
1134+ mock_get_random_string ,
1135+ ):
11191136 expected_output = (spark_opts_from_env or {}).get ('spark.app.name' )
1137+
11201138 if not expected_output :
1121- expected_output = (
1122- ( user_spark_opts or {}). get ( 'spark.app.name' ) or
1123- self . spark_app_base_name
1124- ) + '_' + ( ui_port or mock_ephemeral_port_reserve_range ) + '_123 '
1139+ base_name = (user_spark_opts or {}). get ( 'spark.app.name' ) or self . spark_app_base_name
1140+ port = ui_port or mock_ephemeral_port_reserve_range
1141+ time_int = int ( TIME_RETURN_VALUE )
1142+ expected_output = f' { base_name } _ { port } _ { time_int } _ { mock_get_random_string } '
11251143
11261144 def verify (output ):
11271145 key = 'spark.app.name'
@@ -1477,13 +1495,6 @@ def test_adjust_cpu_mem_ratio_thresh_non_regular_pool(
14771495 assert int (result_dict ['spark.task.cpus' ]) == 1
14781496
14791497
1480- def test_stringify_spark_env ():
1481- conf = {'spark.mesos.leader' : '1234' , 'spark.mesos.principal' : 'spark' }
1482- assert spark_config .stringify_spark_env (conf ) == (
1483- '--conf spark.mesos.leader=1234 --conf spark.mesos.principal=spark'
1484- )
1485-
1486-
14871498@pytest .mark .parametrize (
14881499 'memory_string,expected_output' , [
14891500 ('1g' , 1024 ),
0 commit comments