-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathget_refRange.py
More file actions
88 lines (66 loc) · 3.2 KB
/
get_refRange.py
File metadata and controls
88 lines (66 loc) · 3.2 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
import os
import json
import requests
import logging
from pathlib import Path
from dotenv import load_dotenv
BASE_DIR = Path(__file__).resolve().parent
load_dotenv(BASE_DIR / '.env')
log_file_path = os.path.join(BASE_DIR/"logs", 'subtests_and_refRange.log')
logger = logging.getLogger(__name__)
logging.basicConfig(
filename=log_file_path,
format='%(asctime)s - %(levelname)s - %(message)s',
datefmt='%m/%d/%Y %I:%M:%S %p',
level=logging.INFO
)
path_to_json_files = os.path.join(BASE_DIR/"from_parameters")
json_files = [pos_json for pos_json in os.listdir(path_to_json_files) if pos_json.endswith('.json')]
# fetch env variables
login_url = os.getenv('LOGIN_URL')
login_success_url = os.getenv('LOGIN_SUCCESS_URL')
protected_url = os.getenv('PROTECTED_URL_BASE')
username = os.getenv('LOGIN_USERNAME')
password = os.getenv('LOGIN_PASSWORD')
payload = {
'usernameOrEmail': username,
'password': password
}
# Scraping logic
session = requests.Session()
try:
subTests_and_refRange = []
login_response = session.post(login_url, data=payload)
if login_response.url == login_success_url:
logger.info(f"Login was successful. This was the login response URL: {login_response.url}")
for json_file in json_files:
json_file_path = os.path.join(path_to_json_files, json_file)
with open(json_file_path, 'r') as file:
data = json.load(file)
records = data.get("testnames", [])
logger.info(f"Processing file: {json_file} with {len(records)} records.")
for record in records:
test_id = record.get("id")
response = session.get(f"{protected_url}testresult/showall/{test_id}?test_id={test_id}")
if response.status_code == 200:
logger.info(f"Fetched data for TestID[{test_id}] from {protected_url}testresult/showall/{test_id}?test_id={test_id}")
data = response.json()
if not data:
logger.warning(f"No data found for TestID[{test_id}]")
continue
test_results = data.get("testResults", [])
# if isinstance(test_results, list) and test_results:
# ref_range = test_results[0].get("refRange", [])
# logger.info(f"Retrieved refRange for TestID[{test_id}]: {ref_range}")
record["testResults"] = test_results
subTests_and_refRange.append(record)
with open((os.path.join(BASE_DIR/"from_parameters/subTests_and_refRange", json_file)), 'w') as file:
json.dump(
subTests_and_refRange,
file,
indent=4
)
else:
logger.error(f"Login failed. Please check your credentials and try again. This was the succes url: {login_response.url}")
except Exception as e:
logger.exception(f"An error occurred during the scraping process:")