44import networkx as nx
55import json
66import boto3
7+ import logging
78from redis import Redis
89from rq import Queue
910from dotenv import load_dotenv
1617
1718load_dotenv ()
1819
20+ logger = logging .getLogger (__name__ )
1921
2022REDIS_URL = os .getenv ("REDIS_URL" , "redis://localhost:6379/0" )
2123try :
2224 connection = Redis .from_url (REDIS_URL , decode_responses = False )
2325 connection .ping ()
24- print ("[Worker] Connected to Redis successfully" )
26+ logger . info ("[Worker] Connected to Redis successfully" )
2527except Exception as e :
26- print ( "[Worker] Redis connection failed:" , e )
28+ logger . error ( f "[Worker] Redis connection failed: { e } " )
2729 raise
2830
2931queue = Queue ("pr_context_queue" , connection = connection )
3032
31-
3233s3_client = boto3 .client (
3334 "s3" ,
3435 aws_access_key_id = os .getenv ("AWS_ACCESS_KEY_ID" ),
@@ -47,24 +48,25 @@ def upload_to_s3(file_path, key_prefix):
4748 return f"s3://{ bucket_name } /{ s3_key } "
4849
4950def process_pr (pr_data ):
50- print ("pr_data" ,pr_data )
51+ logger .info (f"pr_data { pr_data } " )
52+
5153 repo_url = pr_data ["clone_url" ]
5254 pr_number = pr_data ["pr_number" ]
5355 base_branch = pr_data ["base_branch" ]
5456 head_branch = pr_data ["head_branch" ]
5557 repo_name = pr_data .get ("repo_name" , os .path .basename (repo_url ).replace (".git" , "" ))
5658 commit_sha = pr_data .get ("commit_sha" )
57-
59+
5860 progress_comment_id = pr_data .get ("progress_comment_id" )
5961 installation_id = pr_data .get ("installation_id" )
60- print ( "installationid" , installation_id )
62+ logger . info ( f "installationid { installation_id } " )
6163 owner = pr_data .get ("owner" )
6264 repo = pr_data .get ("repo" )
63-
65+
6466 safe_repo_name = repo_name .replace ("/" , "_" )
6567 s3_prefix = f"pr_contexts/{ safe_repo_name } _{ pr_number } "
6668
67- print (f"[Worker] Reviewing PR #{ pr_number } from { repo_name } " )
69+ logger . info (f"[Worker] Reviewing PR #{ pr_number } from { repo_name } " )
6870
6971 temp_dir = tempfile .mkdtemp ()
7072
@@ -133,7 +135,7 @@ def parse_file_if_needed(file_path, file_name):
133135
134136 s3_json_uri = upload_to_s3 (context_json_path , s3_prefix )
135137 s3_txt_uri = upload_to_s3 (context_txt_path , s3_prefix )
136- print (f"[Worker] Uploaded context files to S3: { s3_json_uri } , { s3_txt_uri } " )
138+ logger . info (f"[Worker] Uploaded context files to S3: { s3_json_uri } , { s3_txt_uri } " )
137139
138140 queue_data = {
139141 "pr_number" : pr_number ,
@@ -148,7 +150,8 @@ def parse_file_if_needed(file_path, file_name):
148150 "owner" : owner ,
149151 "repo" : repo
150152 }
151- print ("queue" ,queue_data )
153+ logger .info (f"queue { queue_data } " )
154+
152155 queue .enqueue (process_ai_job , queue_data )
153156
154157 return {
@@ -162,8 +165,8 @@ def parse_file_if_needed(file_path, file_name):
162165 }
163166
164167 except Exception as e :
165- print (f"[Worker] Error: { e } " )
168+ logger . error (f"[Worker] Error: { e } " )
166169 return {"error" : str (e )}
167170
168171 finally :
169- shutil .rmtree (temp_dir )
172+ shutil .rmtree (temp_dir )
0 commit comments