Skip to content

Commit 86145b7

Browse files
Mise à jour de l'importation des données***
1 parent 8f5e49b commit 86145b7

File tree

2 files changed

+15
-10
lines changed

2 files changed

+15
-10
lines changed

main.py

Lines changed: 10 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -66,14 +66,18 @@
6666

6767

6868
async def main():
69-
logger.info("Starting Taiga crawler...")
70-
await a_moins_b(url, 0, -1, headers)
71-
collection_tasks = [col.get('function')(url, col, headers) for col in collections]
72-
await asyncio.gather(*collection_tasks)
73-
print("Taiga crawler ended successful !!!")
69+
# logger.info("Starting Taiga crawler...")
70+
# await a_moins_b(url, 0, -1, headers)
71+
# collection_tasks = [col.get('function')(url, col, headers) for col in collections]
72+
# await asyncio.gather(*collection_tasks)
73+
# print("Taiga crawler ended successful !!!")
7474

75-
print("Process Data")
75+
print("Starting import_ind...")
76+
start_time = datetime.now()
7677
await import_ind()
78+
end_time = datetime.now()
79+
execution_time = end_time - start_time
80+
print(f"import_ind completed in {execution_time}")
7781

7882
if __name__ == '__main__':
7983
loop = asyncio.get_event_loop()

src/import_ind.py

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,6 @@
77

88
dotenv.load_dotenv()
99
sesame_api_baseurl = os.getenv('SESAME_API_BASEURL')
10-
sem = asyncio.Semaphore(50)
1110

1211
configs = {
1312
"taiga_etd.json": {
@@ -134,7 +133,7 @@ async def process_data(data, config, file, session):
134133
with open(f'./data/{file}', 'w', encoding='utf-8') as fichier:
135134
json.dump(result, fichier, ensure_ascii=False)
136135
tasks = [send_request(session, f'{sesame_api_baseurl}/management/identities/upsert', entry) for entry in result]
137-
await gather_with_concurrency(10, tasks)
136+
await gather_with_concurrency(25, tasks)
138137
print(f"Processed {file}")
139138

140139
async def import_ind():
@@ -145,5 +144,7 @@ async def import_ind():
145144
datas[file] = json.load(fichier).get('data')
146145

147146
async with aiohttp.ClientSession() as session:
148-
for file in cache_files:
149-
await process_data(datas[file], configs[file], file, session)
147+
# for file in cache_files:
148+
# await process_data(datas[file], configs[file], file, session)
149+
tasks = [process_data(datas[file], configs[file], file, session) for file in cache_files]
150+
await gather_with_concurrency(10, tasks)

0 commit comments

Comments
 (0)