@@ -28,7 +28,7 @@ async def read_response(response):
2828 jsonMessage = json .loads (message )
2929 print (jsonMessage )
3030
31- async def send_request (session , url ,exclusions , json ):
31+ async def send_request (session , url ,exclusions , json , force ):
3232 if (json .get ('inetOrgPerson' , {}).get ('employeeNumber' ) == None and json .get ('$setOnInsert' , {}).get ('inetOrgPerson' , {}).get ('employeeNumber' ) == None ):
3333 print (f"MISSING employeeNumber -> $set: { json .get ('inetOrgPerson' , {})} , $setOnInsert: { json .get ('$setOnInsert' , {}).get ('inetOrgPerson' , {})} " )
3434 return
@@ -40,6 +40,7 @@ async def send_request(session, url,exclusions, json):
4040 params = {
4141 # "filters[inetOrgPerson.employeeNumber]": f"{json.get('inetOrgPerson', {}).get('employeeNumber')}",
4242 "filters[inetOrgPerson.employeeType]" : "TAIGA" ,
43+ "force" : force ,
4344 }
4445
4546 employeeNumber = json .get ('inetOrgPerson' , {}).get ('employeeNumber' ) or json .get ('$setOnInsert' , {}).get ('inetOrgPerson' , {}).get ('employeeNumber' )
@@ -91,21 +92,21 @@ async def get_data(data, config):
9192
9293
9394
94- async def process_data (data , config , file , session ):
95+ async def process_data (data , config , file , session , force ):
9596 print (f"Processing { file } " )
9697 result = await get_data (data , config )
9798 with open (f'./data/{ file } ' , 'w' , encoding = 'utf-8' ) as fichier :
9899 json .dump (result , fichier , ensure_ascii = False , indent = 4 )
99100 exclude = config .get ('exclude' ,[])
100- tasks = [send_request (session , f'{ sesame_api_baseurl } /management/identities/upsert' ,config .get ('exclude' ,[]),entry ) for entry in result ]
101+ tasks = [send_request (session , f'{ sesame_api_baseurl } /management/identities/upsert' ,config .get ('exclude' ,[]),entry , force ) for entry in result ]
101102 await gather_with_concurrency (sesame_import_parallels_files , tasks )
102103 print (f"Processed { file } " )
103104
104105async def load_config ():
105106 with open ('./config.yml' , 'r' , encoding = 'utf-8' ) as fichier :
106107 return yaml .load (fichier , Loader = yaml .FullLoader )
107108
108- async def import_ind ():
109+ async def import_ind (force : bool ):
109110 configs = await load_config ()
110111 cache_files = os .listdir ('./cache' )
111112 datas = {}
@@ -116,5 +117,5 @@ async def import_ind():
116117 datas [file ] = json .load (fichier ).get ('data' )
117118
118119 async with aiohttp .ClientSession () as session :
119- tasks = [process_data (datas [file ], configs [file ], file , session ) for file in cache_files if file in configs .keys ()]
120+ tasks = [process_data (datas [file ], configs [file ], file , session , force ) for file in cache_files if file in configs .keys ()]
120121 await gather_with_concurrency (sesame_import_parallels_files , tasks )
0 commit comments