From 2683c9fb59cf72b2ba6487accc091e23c39009a1 Mon Sep 17 00:00:00 2001 From: psyray Date: Mon, 3 Mar 2025 23:52:50 +0100 Subject: [PATCH] refactor: improve logging and task categorization This commit refactors the logging system and task categorization within the application. The changes improve code organization, readability, and provide more context in log messages. Specifically, the ANSI color codes are moved to a dedicated Colors class, and task logging now includes color-coded task categories for better visual distinction. Additionally, several log messages have been adjusted to provide more relevant information and use more appropriate log levels. Finally, the docker-compose file is updated to improve container behavior. --- docker/docker-compose.yml | 6 + web/api/views.py | 8 +- web/dashboard/views.py | 3 +- web/reNgine/celery_custom_task.py | 19 +-- web/reNgine/common_views.py | 5 +- web/reNgine/context_processors.py | 5 +- web/reNgine/gpt.py | 5 +- web/reNgine/init.py | 13 +- web/reNgine/settings.py | 2 + web/reNgine/tasks/command.py | 3 +- web/reNgine/tasks/detect.py | 15 +- web/reNgine/tasks/dns.py | 11 +- web/reNgine/tasks/fuzzing.py | 7 +- web/reNgine/tasks/geo.py | 3 +- web/reNgine/tasks/http.py | 7 +- web/reNgine/tasks/llm.py | 4 +- web/reNgine/tasks/notification.py | 4 +- web/reNgine/tasks/osint.py | 12 +- web/reNgine/tasks/port_scan.py | 16 +- web/reNgine/tasks/reporting.py | 4 +- web/reNgine/tasks/scan.py | 15 +- web/reNgine/tasks/screenshot.py | 8 +- web/reNgine/tasks/subdomain.py | 11 +- web/reNgine/tasks/url.py | 13 +- web/reNgine/tasks/vulnerability.py | 16 +- web/reNgine/utils/colors.py | 57 +++++++ web/reNgine/utils/command_builder.py | 6 +- web/reNgine/utils/command_executor.py | 19 +-- web/reNgine/utils/db.py | 34 ++--- web/reNgine/utils/debug.py | 6 +- web/reNgine/utils/dns.py | 11 +- web/reNgine/utils/formatters.py | 6 +- web/reNgine/utils/http.py | 3 +- web/reNgine/utils/ip.py | 9 +- web/reNgine/utils/logger.py | 139 ++++++++++-------- web/reNgine/utils/mock.py | 15 +- web/reNgine/utils/nmap.py | 3 +- web/reNgine/utils/nmap_service.py | 7 +- web/reNgine/utils/notifications.py | 3 +- web/reNgine/utils/parsers.py | 32 ++-- web/reNgine/utils/scan_helpers.py | 7 +- web/reNgine/utils/task_config.py | 3 +- web/reNgine/utils/utils.py | 5 +- web/recon_note/views.py | 8 +- web/scanEngine/fixtures/scanEngine.json | 6 +- .../migrations/0059_auto_20250216_1450.py | 4 +- web/startScan/signals.py | 3 +- web/startScan/views.py | 3 +- web/targetApp/views.py | 24 ++- web/tests/test_nmap.py | 3 +- web/tests/test_scan.py | 4 - 51 files changed, 320 insertions(+), 315 deletions(-) create mode 100644 web/reNgine/utils/colors.py diff --git a/docker/docker-compose.yml b/docker/docker-compose.yml index 6685a62d2..8d5703861 100644 --- a/docker/docker-compose.yml +++ b/docker/docker-compose.yml @@ -16,6 +16,7 @@ services: retries: 5 networks: - rengine_network + tty: true redis: image: ghcr.io/security-tools-alliance/rengine-ng:rengine-redis-v${RENGINE_VERSION} @@ -31,6 +32,7 @@ services: retries: 5 networks: - rengine_network + tty: true celery: image: ghcr.io/security-tools-alliance/rengine-ng:rengine-celery-v${RENGINE_VERSION} @@ -64,6 +66,7 @@ services: condition: service_healthy networks: - rengine_network + tty: true celery-beat: image: ghcr.io/security-tools-alliance/rengine-ng:rengine-celery-v${RENGINE_VERSION} @@ -90,6 +93,7 @@ services: - wordlist:/home/rengine/wordlists networks: - rengine_network + tty: true web: image: ghcr.io/security-tools-alliance/rengine-ng:rengine-web-v${RENGINE_VERSION} @@ -126,6 +130,7 @@ services: rengine_network: aliases: - rengine + tty: true proxy: image: ghcr.io/security-tools-alliance/rengine-ng:rengine-proxy-v${RENGINE_VERSION} @@ -156,6 +161,7 @@ services: ports: - 8082:8082/tcp - 443:443/tcp + tty: true ollama: image: ghcr.io/security-tools-alliance/rengine-ng:rengine-ollama-v${RENGINE_VERSION} diff --git a/web/api/views.py b/web/api/views.py index a57e93d99..1a26ff1f6 100644 --- a/web/api/views.py +++ b/web/api/views.py @@ -1,4 +1,3 @@ -import logging import re import os.path from pathlib import Path @@ -25,6 +24,7 @@ from recon_note.models import TodoNote from reNgine.celery import app +from reNgine.utils.logger import default_logger as logger from reNgine.utils.db import ( get_lookup_keywords, ) @@ -126,8 +126,6 @@ VulnerabilitySerializer ) -logger = logging.getLogger(__name__) - class OllamaManager(APIView): def get(self, request): @@ -848,7 +846,7 @@ def post(self, request): SUCCESS_TASK) response['status'] = True except Exception as e: - logging.error(e) + logger.error(e) response = {'status': False, 'message': str(e)} elif scan_id: try: @@ -864,7 +862,7 @@ def post(self, request): SUCCESS_TASK) response['status'] = True except Exception as e: - logging.error(e) + logger.error(e) response = {'status': False, 'message': str(e)} logger.warning(f'Revoking tasks {task_ids}') diff --git a/web/dashboard/views.py b/web/dashboard/views.py index 157a624ca..3ca5058ab 100644 --- a/web/dashboard/views.py +++ b/web/dashboard/views.py @@ -1,5 +1,4 @@ import json -import logging from datetime import timedelta @@ -25,8 +24,8 @@ from dashboard.models import Project, OpenAiAPIKey, NetlasAPIKey from dashboard.forms import ProjectForm from reNgine.definitions import PERM_MODIFY_SYSTEM_CONFIGURATIONS, FOUR_OH_FOUR_URL +from reNgine.utils.logger import default_logger as logger -logger = logging.getLogger(__name__) def index(request, slug): try: diff --git a/web/reNgine/celery_custom_task.py b/web/reNgine/celery_custom_task.py index f2abb0607..bdf30fa4c 100644 --- a/web/reNgine/celery_custom_task.py +++ b/web/reNgine/celery_custom_task.py @@ -2,7 +2,6 @@ import json from celery import Task -from celery.utils.log import get_task_logger from celery.worker.request import Request from django.utils import timezone from redis import Redis @@ -25,13 +24,12 @@ get_task_cache_key, get_traceback_path ) +from reNgine.utils.logger import default_logger as logger from reNgine.utils.utils import format_json_output from scanEngine.models import EngineType from startScan.models import ScanActivity, ScanHistory, SubScan -logger = get_task_logger(__name__) - cache = None if 'CELERY_BROKER' in os.environ: cache = Redis.from_url(os.environ['CELERY_BROKER']) @@ -75,7 +73,6 @@ def __call__(self, *args, **kwargs): # Get task info self.task_name = self.name.split('.')[-1] self.description = kwargs.get('description') or ' '.join(self.task_name.split('_')).capitalize() - logger = get_task_logger(self.task_name) # Get reNgine context ctx = kwargs.get('ctx', {}) @@ -135,9 +132,9 @@ def __call__(self, *args, **kwargs): # Create ScanActivity for this task and send start scan notifs if self.track: if self.domain: - logger.warning(f'Task {self.task_name} for {self.subdomain.name if self.subdomain else self.domain.name} is RUNNING') + logger.info(f'Task {self.task_name} for {self.subdomain.name if self.subdomain else self.domain.name} is RUNNING') else: - logger.warning(f'Task {self.task_name} is RUNNING') + logger.info(f'Task {self.task_name} is RUNNING') self.create_scan_activity() if RENGINE_CACHE_ENABLED: @@ -148,9 +145,9 @@ def __call__(self, *args, **kwargs): self.status = SUCCESS_TASK if RENGINE_RECORD_ENABLED and self.track: if self.domain: - logger.warning(f'Task {self.task_name} for {self.subdomain.name if self.subdomain else self.domain.name} status is SUCCESS (CACHED)') + logger.info(f'Task {self.task_name} for {self.subdomain.name if self.subdomain else self.domain.name} status is SUCCESS (CACHED)') else: - logger.warning(f'Task {self.task_name} status is SUCCESS (CACHED)') + logger.info(f'Task {self.task_name} status is SUCCESS (CACHED)') self.update_scan_activity() return json.loads(result) @@ -186,7 +183,7 @@ def __call__(self, *args, **kwargs): else: msg = f'Task {self.task_name} status is {self.status_str}' msg += f' | Error: {self.error}' if self.error else '' - logger.warning(msg) + logger.info(msg) self.update_scan_activity() # Set task result in cache if task was successful @@ -208,7 +205,7 @@ def write_results(self): json.dump(self.result, f, indent=4) else: f.write(self.result) - logger.warning(f'Wrote {self.task_name} results to {self.output_path}') + logger.info(f'Wrote {self.task_name} results to {self.output_path}') def create_scan_activity(self): if not self.track: @@ -292,7 +289,7 @@ def get_from_cache(self, *args, **kwargs): if target: msg += f' for {target}' msg += ' status is SUCCESS (CACHED)' - logger.warning(msg) + logger.info(msg) self.update_scan_activity() return json.loads(result) return None diff --git a/web/reNgine/common_views.py b/web/reNgine/common_views.py index 54978aa61..14cf38f43 100644 --- a/web/reNgine/common_views.py +++ b/web/reNgine/common_views.py @@ -3,9 +3,8 @@ from django.template import RequestContext from django.utils.module_loading import import_string -import logging - -logger = logging.getLogger(__name__) +from reNgine.utils.logger import default_logger as logger + def permission_denied(request): logger.warning(f"Permission denied for user {request.user}") diff --git a/web/reNgine/context_processors.py b/web/reNgine/context_processors.py index f9f08c6de..6a166988e 100644 --- a/web/reNgine/context_processors.py +++ b/web/reNgine/context_processors.py @@ -1,9 +1,8 @@ from . import settings import requests from django.core.cache import cache -import logging +from reNgine.utils.logger import default_logger as logger -logger = logging.getLogger(__name__) def version(request): return {"RENGINE_CURRENT_VERSION": settings.RENGINE_CURRENT_VERSION} @@ -22,7 +21,7 @@ def misc(request): # Handle the exception if the request fails external_ip = 'Unable to retrieve IP' # Default value in case of error # You can also log the error if necessary - logger.error(f"Error retrieving external IP: {e}") + logger.exception(f"Error retrieving external IP: {e}") return { 'external_ip': external_ip diff --git a/web/reNgine/gpt.py b/web/reNgine/gpt.py index 973739489..28e9e49da 100644 --- a/web/reNgine/gpt.py +++ b/web/reNgine/gpt.py @@ -10,9 +10,8 @@ from langchain_community.llms import Ollama from dashboard.models import OllamaSettings -import logging +from reNgine.utils.logger import default_logger as logger -logger = logging.getLogger(__name__) class GPTVulnerabilityReportGenerator: @@ -131,7 +130,7 @@ def get_attack_suggestion(self, input): 'input': input } except ValueError as e: - logger.error("Error in get_attack_suggestion: %s", str(e), exc_info=True) + logger.exception(f"Error in get_attack_suggestion: {str(e)}") return { 'status': False, 'error': "An error occurred while processing your request.", diff --git a/web/reNgine/init.py b/web/reNgine/init.py index 023a6b768..99e534be4 100644 --- a/web/reNgine/init.py +++ b/web/reNgine/init.py @@ -1,8 +1,7 @@ -import logging import secrets import os +from reNgine.utils.logger import default_logger as logger -logger = logging.getLogger(__name__) ''' @@ -19,11 +18,11 @@ def first_run(secret_file, base_dir): else: try: secret_key = get_random() - secret = open(secret_file, 'w') - secret.write(secret_key) - secret.close() - except OSError: - raise Exception(f'Secret file generation failed. Path: {secret_file}') + with open(secret_file, 'w') as secret: + secret.write(secret_key) + except OSError as e: + logger.exception(f'Secret file generation failed. Path: {secret_file}') + raise Exception(f'Secret file generation failed. Path: {secret_file}') from e return secret_key diff --git a/web/reNgine/settings.py b/web/reNgine/settings.py index 8930b67e9..6b116de91 100644 --- a/web/reNgine/settings.py +++ b/web/reNgine/settings.py @@ -237,6 +237,8 @@ class SensitiveDataFilter(logging.Filter): def filter(self, record): sensitive_keys = [ NETLAS_API_KEY, + os.environ.get('AWS_ACCESS_KEY_ID'), + os.environ.get('AWS_SECRET_ACCESS_KEY') ] for key in filter(None, sensitive_keys): diff --git a/web/reNgine/tasks/command.py b/web/reNgine/tasks/command.py index 708b01802..780fdf6e4 100644 --- a/web/reNgine/tasks/command.py +++ b/web/reNgine/tasks/command.py @@ -1,8 +1,7 @@ from reNgine.celery import app from reNgine.utils.command_executor import run_command -from reNgine.utils.logger import Logger +from reNgine.utils.logger import default_logger as logger -logger = Logger(True) @app.task(name='run_command_line', bind=False, queue='run_command_queue') def run_command_line(cmd, **kwargs): diff --git a/web/reNgine/tasks/detect.py b/web/reNgine/tasks/detect.py index 31a1d3671..c087146fd 100644 --- a/web/reNgine/tasks/detect.py +++ b/web/reNgine/tasks/detect.py @@ -12,7 +12,7 @@ from reNgine.settings import RENGINE_TOOL_PATH from reNgine.tasks.command import run_command_line from reNgine.utils.command_builder import CommandBuilder, build_cmsseek_cmd, build_wafw00f_cmd -from reNgine.utils.logger import Logger +from reNgine.utils.logger import default_logger as logger from reNgine.utils.http import get_subdomain_from_url, prepare_urls_with_fallback from reNgine.utils.task_config import TaskConfig @@ -22,9 +22,6 @@ Celery tasks. """ -logger = Logger(is_task_logger=True) # Use task logger for Celery tasks - - @app.task(name='waf_detection', queue='io_queue', base=RengineTask, bind=True) def waf_detection(self, ctx=None, description=None): """ @@ -50,7 +47,7 @@ def waf_detection(self, ctx=None, description=None): ctx=ctx ) if not urls: - logger.error('πŸ›‘οΈ No URLs to check for WAF. Skipping.') + logger.warning('πŸ›‘οΈ No URLs to check for WAF. Skipping.') return run_command_line.delay( @@ -92,7 +89,7 @@ def waf_detection(self, ctx=None, description=None): subdomain.waf.add(waf) subdomain.save() except Subdomain.DoesNotExist: - logger.warning(f'πŸ›‘οΈ Subdomain {subdomain_name} was not found in the db, skipping waf detection.') + logger.info(f'πŸ›‘οΈ Subdomain {subdomain_name} was not found in the db, skipping waf detection.') return wafs @@ -123,12 +120,12 @@ def run_cmseek(url): try: shutil.rmtree(os.path.dirname(json_path)) except Exception as e: - logger.error(f"Error cleaning up CMSeeK results: {e}") + logger.exception(f"Error cleaning up CMSeeK results: {e}") return result except Exception as e: - logger.error(f"Error running CMSeeK: {e}") + logger.exception(f"Error running CMSeeK: {e}") return {'status': False, 'message': str(e)} @app.task(name='run_wafw00f', bind=False, queue='run_command_queue') @@ -153,5 +150,5 @@ def run_wafw00f(url): logger.info("No WAF detected") return "No WAF detected" except Exception as e: - logger.error(f"Unexpected error: {e}") + logger.exception(f"Unexpected error: {e}") return f"Unexpected error: {str(e)}" diff --git a/web/reNgine/tasks/dns.py b/web/reNgine/tasks/dns.py index d62f5a10d..8e8b206e6 100644 --- a/web/reNgine/tasks/dns.py +++ b/web/reNgine/tasks/dns.py @@ -2,7 +2,7 @@ from django.utils import timezone from reNgine.celery import app -from reNgine.utils.logger import Logger +from reNgine.utils.logger import default_logger as logger from reNgine.utils.dns import ( get_domain_info_from_db, get_domain_historical_ip_address, @@ -17,7 +17,6 @@ Registrar, ) -logger = Logger(True) @app.task(name='query_whois', bind=False, queue='io_queue') def query_whois(ip_domain, force_reload_whois=False): @@ -51,7 +50,7 @@ def query_whois(ip_domain, force_reload_whois=False): historical_ips = get_domain_historical_ip_address(ip_domain) domain_info.historical_ips = historical_ips except Exception as e: - logger.error(f'HistoricalIP for {ip_domain} not found!\nError: {str(e)}') + logger.exception(f'HistoricalIP for {ip_domain} not found!\nError: {str(e)}') historical_ips = [] # Step 2: Find associated domains using reverse whois @@ -59,7 +58,7 @@ def query_whois(ip_domain, force_reload_whois=False): related_domains = reverse_whois(ip_domain.split('.')[0]) domain_info.related_domains = [domain['name'] for domain in related_domains] except Exception as e: - logger.error(f'Associated domain not found for {ip_domain}\nError: {str(e)}') + logger.exception(f'Associated domain not found for {ip_domain}\nError: {str(e)}') domain_info.related_domains = [] # Step 3: Find related TLDs @@ -67,7 +66,7 @@ def query_whois(ip_domain, force_reload_whois=False): related_tlds = find_related_tlds(ip_domain) domain_info.related_tlds = related_tlds except Exception as e: - logger.error(f'Related TLDs not found for {ip_domain}\nError: {str(e)}') + logger.exception(f'Related TLDs not found for {ip_domain}\nError: {str(e)}') domain_info.related_tlds = [] # Step 4: Execute WHOIS @@ -76,7 +75,7 @@ def query_whois(ip_domain, force_reload_whois=False): # Update domain_info with whois data domain_info.update(whois_data) except Exception as e: - logger.error(f'Error executing whois for {ip_domain}\nError: {str(e)}') + logger.exception(f'Error executing whois for {ip_domain}\nError: {str(e)}') # Step 5: Save information to database if we have a domain object if domain: diff --git a/web/reNgine/tasks/fuzzing.py b/web/reNgine/tasks/fuzzing.py index e2acbd835..2cd168cd6 100644 --- a/web/reNgine/tasks/fuzzing.py +++ b/web/reNgine/tasks/fuzzing.py @@ -7,7 +7,7 @@ from reNgine.celery import app from reNgine.celery_custom_task import RengineTask from reNgine.utils.command_executor import stream_command -from reNgine.utils.logger import Logger +from reNgine.utils.logger import default_logger as logger from reNgine.utils.http import get_subdomain_from_url, prepare_urls_with_fallback from startScan.models import DirectoryScan, Subdomain from reNgine.utils.command_builder import build_ffuf_cmd @@ -18,9 +18,6 @@ Celery tasks. """ -logger = Logger(is_task_logger=True) # Use task logger for Celery tasks - - @app.task(name='dir_file_fuzz', queue='io_queue', base=RengineTask, bind=True) def dir_file_fuzz(self, ctx=None, description=None): """Perform directory scan, and currently uses `ffuf` as a default tool. @@ -181,7 +178,7 @@ def process_ffuf_result(parsed_result, subdomain, dirscan, ctx, crawl_urls, subs # Log newly created file or directory if debug activated if created and CELERY_DEBUG: - logger.warning(f'πŸ”¨ Found new directory or file {url}') + logger.info(f'πŸ”¨ Found new directory or file {url}') # Add file to current dirscan dirscan.directory_files.add(dfile) diff --git a/web/reNgine/tasks/geo.py b/web/reNgine/tasks/geo.py index 469f5f198..32f84f678 100644 --- a/web/reNgine/tasks/geo.py +++ b/web/reNgine/tasks/geo.py @@ -1,8 +1,7 @@ from reNgine.celery import app -from reNgine.utils.logger import Logger +from reNgine.utils.logger import default_logger as logger from reNgine.utils.ip import geo_localize_ip -logger = Logger(True) @app.task(name='geo_localize', bind=False, queue='io_queue') def geo_localize(host, ip_id=None): diff --git a/web/reNgine/tasks/http.py b/web/reNgine/tasks/http.py index 6cd3a8fa4..9652293e1 100644 --- a/web/reNgine/tasks/http.py +++ b/web/reNgine/tasks/http.py @@ -5,7 +5,7 @@ from reNgine.celery_custom_task import RengineTask from reNgine.utils.command_builder import build_httpx_cmd from reNgine.utils.command_executor import stream_command -from reNgine.utils.logger import Logger +from reNgine.utils.logger import default_logger as logger from reNgine.utils.http import get_subdomain_from_url, prepare_urls_for_http_scan from reNgine.utils.parsers import parse_httpx_result from reNgine.utils.task_config import TaskConfig @@ -15,7 +15,6 @@ from startScan.models import Subdomain -logger = Logger(True) @app.task(name='http_crawl', queue='io_queue', base=RengineTask, bind=True) def http_crawl(self, urls=None, method=None, recrawl=False, ctx=None, track=True, description=None, update_subdomain_metadatas=False, @@ -79,7 +78,7 @@ def initialize_http_crawl(self, urls, ctx, duplicate_removal_fields, recrawl): ) if not urls: - logger.error('🌐 No URLs to crawl. Skipping.') + logger.warning('🌐 No URLs to crawl. Skipping.') return None return config, task_config, input_path, urls, subdomain_metadata_update @@ -142,7 +141,7 @@ def process_http_line(self, line, cmd, follow_redirect, update_subdomain_metadat return None # Log and notify about the endpoint - logger.warning(f'🌐 {endpoint_str}') + logger.info(f'🌐 {endpoint_str}') notify_findings(self, endpoint, endpoint_str, result_data) diff --git a/web/reNgine/tasks/llm.py b/web/reNgine/tasks/llm.py index 7ed711968..91c491d01 100644 --- a/web/reNgine/tasks/llm.py +++ b/web/reNgine/tasks/llm.py @@ -1,13 +1,11 @@ from urllib.parse import urlparse from django.db import transaction from reNgine.celery import app -from reNgine.utils.logger import Logger +from reNgine.utils.logger import default_logger as logger from reNgine.gpt import GPTVulnerabilityReportGenerator from reNgine.utils.utils import get_gpt_vuln_input_description from startScan.models import GPTVulnerabilityReport, Vulnerability, VulnerabilityReference -logger = Logger(True) - @app.task(name='llm_vulnerability_description', bind=False, queue='cpu_queue') def llm_vulnerability_description(vulnerability_id): """Generate and store Vulnerability Description using GPT. diff --git a/web/reNgine/tasks/notification.py b/web/reNgine/tasks/notification.py index 95023ae64..2d3341601 100644 --- a/web/reNgine/tasks/notification.py +++ b/web/reNgine/tasks/notification.py @@ -6,7 +6,7 @@ from reNgine.definitions import NUCLEI_SEVERITY_MAP, STATUS_TO_SEVERITIES from reNgine.celery import app -from reNgine.utils.logger import Logger +from reNgine.utils.logger import default_logger as logger from reNgine.utils.formatters import ( get_scan_url, get_task_title, @@ -32,8 +32,6 @@ ) from django.core.cache import cache -logger = Logger(True) - @app.task(name='send_notif', bind=False, queue='send_notif_queue') def send_notif( message, diff --git a/web/reNgine/tasks/osint.py b/web/reNgine/tasks/osint.py index a9d908ce3..992f9d59b 100644 --- a/web/reNgine/tasks/osint.py +++ b/web/reNgine/tasks/osint.py @@ -15,7 +15,7 @@ from reNgine.celery_custom_task import RengineTask from reNgine.utils.command_builder import build_gofuzz_cmd, build_harvester_cmd, build_h8mail_cmd, build_infoga_cmd from reNgine.utils.http import get_subdomain_from_url -from reNgine.utils.logger import Logger +from reNgine.utils.logger import default_logger as logger from reNgine.utils.task_config import TaskConfig from reNgine.tasks.command import run_command_line from reNgine.tasks.http import http_crawl @@ -23,8 +23,6 @@ from scanEngine.models import Proxy from startScan.models import Dork, MetaFinderDocument, ScanHistory, Subdomain -logger = Logger(True) - @app.task(name='osint', bind=True, base=RengineTask) def osint(self, host=None, ctx=None, description=None): """Run Open-Source Intelligence tools on selected domain. @@ -308,7 +306,7 @@ def h8mail(self, ctx, host, scan_history_id, activity_id, results_dir): if ctx is None: ctx = {} - logger.warning('Getting leaked credentials') + logger.info('Getting leaked credentials') config = TaskConfig(ctx, OSINT) @@ -330,7 +328,7 @@ def h8mail(self, ctx, host, scan_history_id, activity_id, results_dir): # TODO: go through h8mail output and save emails to DB scan_history = ScanHistory.objects.get(pk=scan_history_id) for cred in creds: - logger.warning(cred) + logger.info(cred) email_address = cred['target'] pwn_num = cred['pwn_num'] pwn_data = cred.get('data', []) @@ -448,7 +446,7 @@ def save_metadata_info(meta_dict): Returns: list: List of startScan.MetaFinderDocument objects. """ - logger.warning(f'Getting metadata for {meta_dict.osint_target}') + logger.info(f'Getting metadata for {meta_dict.osint_target}') scan_history = ScanHistory.objects.get(id=meta_dict.scan_id) @@ -456,7 +454,7 @@ def save_metadata_info(meta_dict): #result = extract_metadata_from_google_search(meta_dict.osint_target, meta_dict.documents_limit) result=[] if not result: - logger.error(f'No metadata result from Google Search for {meta_dict.osint_target}.') + logger.warning(f'No metadata result from Google Search for {meta_dict.osint_target}.') return [] # Add metadata info to DB diff --git a/web/reNgine/tasks/port_scan.py b/web/reNgine/tasks/port_scan.py index 38de6eeb2..8479cf03a 100644 --- a/web/reNgine/tasks/port_scan.py +++ b/web/reNgine/tasks/port_scan.py @@ -12,7 +12,7 @@ from reNgine.utils.command_builder import build_naabu_cmd from reNgine.utils.command_executor import stream_command from reNgine.utils.formatters import SafePath, get_task_title -from reNgine.utils.logger import Logger +from reNgine.utils.logger import default_logger as logger from reNgine.utils.nmap import parse_http_ports_data from reNgine.utils.nmap_service import process_nmap_service_results from reNgine.utils.parsers import parse_nmap_results, parse_naabu_result @@ -21,8 +21,6 @@ from scanEngine.models import Notification -logger = Logger(True) - @app.task(name='port_scan', queue='io_queue', base=RengineTask, bind=True) def port_scan(self, hosts=None, ctx=None, description=None): """Run port scan. @@ -113,8 +111,8 @@ def port_scan(self, hosts=None, ctx=None, description=None): logger.info('πŸ”Œ Finished running naabu port scan.') if task_config['nmap_enabled']: - logger.warning('πŸ”Œ Starting nmap scans ...') - logger.warning(ports_data) + logger.info('πŸ”Œ Starting nmap scans ...') + logger.info(ports_data) # Process nmap results: 1 process per host sigs = [] for host, port_list in ports_data.items(): @@ -192,7 +190,7 @@ def nmap(self, args=None, ports=None, host=None, input_file=None, script=None, s output_file = self.output_path output_file_xml = f'{self.results_dir}/{host}_{self.filename}' vulns_file = f'{self.results_dir}/{host}_{filename_vulns}' - logger.warning(f'Running nmap on {host}') + logger.info(f'Running nmap on {host}') # Build cmd nmap_cmd = build_nmap_cmd( @@ -266,7 +264,7 @@ def scan_http_ports(self, host, ctx=None, description=None): create_dir=False ) except (ValueError, OSError) as e: - logger.error(f"Failed to create safe path for XML file: {str(e)}") + logger.exception(f"Failed to create safe path for XML file: {str(e)}") return None # Configure ports to scan @@ -297,7 +295,7 @@ def scan_http_ports(self, host, ctx=None, description=None): time.sleep(retry_delay) except Exception as e: - logger.error(f"Attempt {attempt + 1}/{max_retries}: Nmap scan failed: {str(e)}") + logger.exception(f"Attempt {attempt + 1}/{max_retries}: Nmap scan failed: {str(e)}") if attempt == max_retries - 1: return None time.sleep(retry_delay) @@ -377,5 +375,5 @@ def process_port_scan_result(parsed_result, domain, scan, urls, ports_data, ctx, ports_data[host] = [port_number] # Send notification - logger.warning(f'πŸ”Œ Found opened port {port_number} on {ip_address} ({host})') + logger.info(f'πŸ”Œ Found opened port {port_number} on {ip_address} ({host})') return True diff --git a/web/reNgine/tasks/reporting.py b/web/reNgine/tasks/reporting.py index 44c377bb6..949f9de1f 100644 --- a/web/reNgine/tasks/reporting.py +++ b/web/reNgine/tasks/reporting.py @@ -2,12 +2,10 @@ from reNgine.definitions import SUCCESS_TASK, FAILED_TASK from reNgine.celery import app -from reNgine.utils.logger import Logger +from reNgine.utils.logger import default_logger as logger from startScan.models import ScanActivity, ScanHistory, SubScan from reNgine.tasks.notification import send_scan_notif -logger = Logger(True) - @app.task(name='report', bind=False, queue='report_queue') def report(ctx=None, description=None): """Report task running after all other tasks. diff --git a/web/reNgine/tasks/scan.py b/web/reNgine/tasks/scan.py index b19a562fe..91330bd98 100644 --- a/web/reNgine/tasks/scan.py +++ b/web/reNgine/tasks/scan.py @@ -11,7 +11,7 @@ from reNgine.celery import app from reNgine.utils.debug import debug from reNgine.utils.formatters import SafePath, fmt_traceback -from reNgine.utils.logger import Logger +from reNgine.utils.logger import default_logger as logger from reNgine.utils.scan_helpers import ( get_scan_engine, handle_ip_scan, @@ -31,9 +31,6 @@ Celery tasks. """ -logger = Logger(is_task_logger=True) # Use task logger for Celery tasks - - @app.task(name='initiate_scan', queue='orchestrator_queue', bind=True) def initiate_scan(self, scan_history_id, domain_id, engine_id=None, scan_type=LIVE_SCAN, results_dir=RENGINE_RESULTS, imported_subdomains=None, @@ -73,7 +70,7 @@ def initiate_scan(self, scan_history_id, domain_id, engine_id=None, scan_type=LI raise ValueError("🚫 Failed to initialize scan") # Send start notification - logger.warning(f'πŸš€ Starting scan {scan_history_id}') + logger.info(f'πŸš€ Starting scan {scan_history_id}') send_scan_notif.apply_async( kwargs={ 'scan_history_id': scan.id, @@ -100,7 +97,7 @@ def initiate_scan(self, scan_history_id, domain_id, engine_id=None, scan_type=LI except (ValidationError, ScanHistory.DoesNotExist, Domain.DoesNotExist) as e: # Manage expected errors error_msg = str(e) - logger.error(f"🚫 Validation/DB error: {error_msg}") + logger.exception(f"🚫 Validation/DB error: {error_msg}") if scan: scan.scan_status = FAILED_TASK @@ -112,7 +109,7 @@ def initiate_scan(self, scan_history_id, domain_id, engine_id=None, scan_type=LI except Exception as e: # Manage unexpected errors error_msg = str(e) - logger.error(f"🚫 Unexpected error: {error_msg} {fmt_traceback(e)}") + logger.exception(f"🚫 Unexpected error: {error_msg} {fmt_traceback(e)}") if scan: scan.scan_status = FAILED_TASK @@ -178,7 +175,7 @@ def initiate_subscan( components=[domain.name, 'subscans', str(uuid_scan)] ) except (ValueError, OSError) as e: - logger.error(f"Failed to create results directory: {str(e)}") + logger.exception(f"Failed to create results directory: {str(e)}") subscan.scan_status = FAILED_TASK subscan.error_message = "Failed to create results directory, scan failed" subscan.save() @@ -216,7 +213,7 @@ def initiate_subscan( } ctx_str = json.dumps(ctx, indent=2) - logger.warning(f'Starting subscan {subscan.id} with context:\n{ctx_str}') + logger.info(f'Starting subscan {subscan.id} with context:\n{ctx_str}') if enable_http_crawl: results = http_crawl( diff --git a/web/reNgine/tasks/screenshot.py b/web/reNgine/tasks/screenshot.py index caa370850..faf808f57 100644 --- a/web/reNgine/tasks/screenshot.py +++ b/web/reNgine/tasks/screenshot.py @@ -7,7 +7,7 @@ from reNgine.utils.command_builder import build_eyewitness_cmd from reNgine.utils.formatters import get_output_file_name from reNgine.utils.http import prepare_urls_with_fallback -from reNgine.utils.logger import Logger +from reNgine.utils.logger import default_logger as logger from reNgine.utils.task_config import TaskConfig from reNgine.utils.utils import extract_columns, remove_file_or_pattern from scanEngine.models import Notification @@ -15,8 +15,6 @@ from reNgine.tasks.command import run_command_line from reNgine.tasks.notification import send_file_to_discord -logger = Logger(True) - @app.task(name='screenshot', queue='io_queue', base=RengineTask, bind=True) def screenshot(self, ctx=None, description=None): """Uses EyeWitness to gather screenshot of a domain and/or url. @@ -43,7 +41,7 @@ def screenshot(self, ctx=None, description=None): ctx=ctx ) if not urls: - logger.error('πŸ“Έ No URLs to take screenshot of. Skipping.') + logger.warning('πŸ“Έ No URLs to take screenshot of. Skipping.') return # Send start notif @@ -80,7 +78,7 @@ def screenshot(self, ctx=None, description=None): screenshot_paths.append(screenshot_path) subdomain.screenshot_path = screenshot_path.replace(RENGINE_RESULTS, '') subdomain.save() - logger.warning(f'πŸ“Έ Added screenshot for {protocol}://{subdomain.name}:{port} to DB') + logger.info(f'πŸ“Έ Added screenshot for {protocol}://{subdomain.name}:{port} to DB') # Remove all db, html extra files in screenshot results diff --git a/web/reNgine/tasks/subdomain.py b/web/reNgine/tasks/subdomain.py index fab3bf22e..ab491283a 100644 --- a/web/reNgine/tasks/subdomain.py +++ b/web/reNgine/tasks/subdomain.py @@ -9,7 +9,7 @@ from reNgine.celery import app from reNgine.celery_custom_task import RengineTask from reNgine.utils.command_builder import CommandBuilder, build_piped_command, build_subdomain_tool_commands -from reNgine.utils.logger import Logger +from reNgine.utils.logger import default_logger as logger from reNgine.utils.mock import prepare_subdomain_mock from reNgine.utils.task_config import TaskConfig from reNgine.tasks.command import run_command_line @@ -18,7 +18,6 @@ from scanEngine.models import Notification from startScan.models import Subdomain -logger = Logger(True) @app.task(name='subdomain_discovery', queue='io_queue', base=RengineTask, bind=True) @@ -49,7 +48,7 @@ def subdomain_discovery( host = self.subdomain.name if self.subdomain else self.domain.name if self.url_filter: - logger.warning(f'🌍 Ignoring subdomains scan as an URL path filter was passed ({self.url_filter}).') + logger.info(f'🌍 Ignoring subdomains scan as an URL path filter was passed ({self.url_filter}).') return # Check if dry run mode is enabled @@ -93,7 +92,7 @@ def subdomain_discovery( activity_id=self.activity_id ) except Exception as e: - logger.error(f'🌍 Error running command: {cmd}, error: {e}') + logger.exception(f'🌍 Error running command: {cmd}, error: {e}') continue # Gather all the tools' results in one single file. Write subdomains into @@ -160,14 +159,14 @@ def subdomain_discovery( valid_url ) if not valid_domain: - logger.error(f'Subdomain {subdomain_name} is not a valid domain, IP or URL. Skipping.') + logger.warning(f'Subdomain {subdomain_name} is not a valid domain, IP or URL. Skipping.') continue if valid_url: subdomain_name = urlparse(subdomain_name).netloc if subdomain_name in self.out_of_scope_subdomains: - logger.error(f'Subdomain {subdomain_name} is out of scope. Skipping.') + logger.warning(f'Subdomain {subdomain_name} is out of scope. Skipping.') continue # Add subdomain diff --git a/web/reNgine/tasks/url.py b/web/reNgine/tasks/url.py index ac42a320a..ae997dab0 100644 --- a/web/reNgine/tasks/url.py +++ b/web/reNgine/tasks/url.py @@ -14,7 +14,7 @@ from reNgine.settings import DELETE_DUPLICATES_THRESHOLD from reNgine.celery import app from reNgine.celery_custom_task import RengineTask -from reNgine.utils.logger import Logger +from reNgine.utils.logger import default_logger as logger from reNgine.tasks.command import run_command_line from reNgine.utils.http import ( get_subdomain_from_url, @@ -25,7 +25,6 @@ from reNgine.utils.command_builder import CommandBuilder, build_piped_command, build_fetch_url_commands from reNgine.utils.task_config import TaskConfig -logger = Logger(True) @app.task(name='fetch_url', queue='io_queue', base=RengineTask, bind=True) def fetch_url(self, urls=None, ctx=None, description=None): @@ -86,7 +85,7 @@ def fetch_url(self, urls=None, ctx=None, description=None): logger.warning(f'Tool {tool} not supported. Skipping.') continue - logger.warning(f'Running {tool} for URL discovery') + logger.info(f'Running {tool} for URL discovery') # Prepare output path for this tool self.output_path = config.get_working_dir(filename=f'urls_{tool}.txt') @@ -208,7 +207,7 @@ def _run_gf_patterns(self, gf_patterns, ctx): continue # Run gf on current pattern - logger.warning(f'Running gf on pattern "{gf_pattern}"') + logger.info(f'Running gf on pattern "{gf_pattern}"') config = TaskConfig(ctx, FETCH_URL) gf_output_file = config.get_working_dir(filename=f'gf_patterns_{gf_pattern}.txt') @@ -305,7 +304,7 @@ def run_gf_list(): } except Exception as e: - logger.error(f"Error running GF list: {e}") + logger.exception(f"Error running GF list: {e}") return { 'status': False, 'message': str(e) @@ -376,11 +375,11 @@ def remove_duplicate_endpoints(scan_history_id, domain_id, subdomain_id=None, fi continue msg += f'\n\t {ep.http_url} [{ep.http_status}] {filter_criteria}' ep.delete() - logger.warning(msg) + logger.info(msg) return len(eps_to_delete) return 0 except Exception as e: - logger.error(f'Error removing duplicate endpoints: {str(e)}') + logger.exception(f'Error removing duplicate endpoints: {str(e)}') return 0 diff --git a/web/reNgine/tasks/vulnerability.py b/web/reNgine/tasks/vulnerability.py index 50c3111bd..8aceee474 100644 --- a/web/reNgine/tasks/vulnerability.py +++ b/web/reNgine/tasks/vulnerability.py @@ -18,7 +18,7 @@ from reNgine.celery_custom_task import RengineTask from reNgine.tasks.notification import send_hackerone_report from reNgine.tasks.command import run_command_line -from reNgine.utils.logger import Logger +from reNgine.utils.logger import default_logger as logger from reNgine.utils.command_executor import stream_command from reNgine.utils.command_builder import ( CommandBuilder, @@ -51,8 +51,6 @@ Vulnerability, ) -logger = Logger(True) - @app.task(name='vulnerability_scan', queue='io_queue', base=RengineTask, bind=True) def vulnerability_scan(self, urls=None, ctx=None, description=None): """ @@ -154,7 +152,7 @@ def nuclei_scan(self, urls=None, ctx=None, description=None): ) if not urls: - logger.error('πŸ”’ No URLs to scan for Nuclei. Skipping.') + logger.warning('πŸ”’ No URLs to scan for Nuclei. Skipping.') return if task_config['intensity'] == 'normal': # reduce number of endpoints to scan @@ -362,7 +360,7 @@ def nuclei_individual_severity_module(self, cmd, severity, enable_http_crawl, sh # Print vuln severity = line['info'].get('severity', 'unknown') - logger.warning(str(vuln)) + logger.info(str(vuln)) send_vulnerability_notification( self, @@ -416,7 +414,7 @@ def dalfox_scan(self, urls=None, ctx=None, description=None): ) if not urls: - logger.error('No URLs to scan for XSS. Skipping.') + logger.warning('No URLs to scan for XSS. Skipping.') return # command builder @@ -529,7 +527,7 @@ def crlfuzz_scan(self, urls=None, ctx=None, description=None): ) if not urls: - logger.error('No URLs to scan for CRLF. Skipping.') + logger.warning('No URLs to scan for CRLF. Skipping.') return # command builder @@ -625,7 +623,7 @@ def s3scanner(self, ctx=None, description=None): subdomains = Subdomain.objects.filter(scan_history=self.scan) if not subdomains: - logger.error('No subdomains found for S3Scanner. Skipping.') + logger.warning('No subdomains found for S3Scanner. Skipping.') return with open(input_path, 'w') as f: @@ -703,4 +701,4 @@ def process_vulnerability_gpt_reports(self, source, log_message=None, task_confi try: future.result() except Exception as e: - logger.error(f"Exception for Vulnerability {vuln}: {e}") + logger.exception(f"Exception for Vulnerability {vuln}: {e}") diff --git a/web/reNgine/utils/colors.py b/web/reNgine/utils/colors.py new file mode 100644 index 000000000..725c5eea5 --- /dev/null +++ b/web/reNgine/utils/colors.py @@ -0,0 +1,57 @@ +class Colors: + """ ANSI color codes """ + # System + RED = "\033[0;31m" + BLUE = "\033[0;34m" + YELLOW = "\x1b[33m" + + # Custom + BLACK = "\033[0;30m" + BROWN = "\033[0;33m" + CYAN = "\033[0;36m" + GRAY = "\x1b[38;20m" + GREEN = "\033[0;32m" + MAGENTA = "\x1b[35;20m" + ORANGE = "\x1b[38;5;214m" + PURPLE = "\033[0;35m" + WHITE = "\x1b[37;20m" + BOLD_RED = "\x1b[31;1m" + + # Light/Dark + LIGHT_GRAY = "\033[0;37m" + LIGHT_ORANGE = "\x1b[38;5;215m" + LIGHT_RED = "\033[1;31m" + LIGHT_GREEN = "\033[1;32m" + LIGHT_BLUE = "\033[1;34m" + LIGHT_PURPLE = "\033[1;35m" + LIGHT_CYAN = "\033[1;36m" + LIGHT_WHITE = "\033[1;37m" + DARK_GRAY = "\033[1;30m" + + # Formatting + BOLD = "\033[1m" + FAINT = "\033[2m" + ITALIC = "\033[3m" + UNDERLINE = "\033[4m" + BLINK = "\033[5m" + NEGATIVE = "\033[7m" + CROSSED = "\033[9m" + END = "\033[0m" + RESET = "\x1b[0m" + + # cancel SGR codes if we don't write to a terminal + if not __import__("sys").stdout.isatty(): + for _ in dir(): + if isinstance(_, str) and _[0] != "_": + locals()[_] = "" + elif __import__("platform").system() == "Windows": + kernel32 = __import__("ctypes").windll.kernel32 + kernel32.SetConsoleMode(kernel32.GetStdHandle(-11), 7) + del kernel32 + + + +if __name__ == '__main__': + for i in dir(Colors): + if i[:1] != "_" and i != "END": + print("{:>16} {}".format(i, getattr(Colors, i) + i + Colors.END)) \ No newline at end of file diff --git a/web/reNgine/utils/command_builder.py b/web/reNgine/utils/command_builder.py index a762f76a3..0e451cf21 100644 --- a/web/reNgine/utils/command_builder.py +++ b/web/reNgine/utils/command_builder.py @@ -2,11 +2,9 @@ import shlex from reNgine.definitions import USE_SUBFINDER_CONFIG -from reNgine.utils.logger import Logger +from reNgine.utils.logger import default_logger as logger from reNgine.utils.api import get_netlas_key -logger = Logger(True) - class CommandBuilder: """Secure command builder that prevents shell injection""" @@ -196,7 +194,7 @@ def generate_header_param(custom_header, tool_name=None): if not parsed_header: return '' except ValueError as e: - logger.error(f"🚨 Header parsing failed: {str(e)}") + logger.exception(f"🚨 Header parsing failed: {str(e)}") return '' # Common formats diff --git a/web/reNgine/utils/command_executor.py b/web/reNgine/utils/command_executor.py index 8efa49bea..5fe92b043 100644 --- a/web/reNgine/utils/command_executor.py +++ b/web/reNgine/utils/command_executor.py @@ -7,7 +7,6 @@ import time import select import subprocess -import logging from django.utils import timezone from django.apps import apps import shlex @@ -19,8 +18,7 @@ from reNgine.utils.mock import generate_mock_crlfuzz_vulnerabilities, generate_mock_dalfox_vulnerabilities, generate_mock_nuclei_vulnerabilities, generate_mock_s3scanner_vulnerabilities from reNgine.settings import COMMAND_EXECUTOR_DRY_RUN from reNgine.utils.utils import format_json_output - -logger = logging.getLogger(__name__) +from reNgine.utils.logger import default_logger as logger class CommandExecutor: """Unified command execution handler with streaming capabilities""" @@ -43,14 +41,14 @@ def execute(self, stream=False): """Main execution entry point""" logger.debug(f"πŸ”§ Starting command execution in {'STREAM' if stream else 'BUFFER'} mode") logger.debug(f"πŸ”§ Command: {self.cmd}") - logger.debug(f"πŸ”§ Context: {self.context}") + logger.debug(f"πŸ”§ Context: {format_json_output(self.context)}") self.stream_mode = stream self._pre_execution_setup() try: return self._handle_execution(stream) except Exception as e: - logger.error(f"πŸ”₯ Critical execution error: {str(e)}", exc_info=True) + logger.exception(f"πŸ”₯ Critical execution error: {str(e)}") self._handle_execution_error(e) finally: if not stream: @@ -149,7 +147,7 @@ def _buffer_output(self): try: return self._format_buffer_output() except Exception as e: - logger.error(f"πŸ”₯ Buffer processing failed: {str(e)}") + logger.exception(f"πŸ”₯ Buffer processing failed: {str(e)}") return self.process.returncode, '' def _format_buffer_output(self): @@ -237,7 +235,7 @@ def _process_json_line(self, line): break except Exception as e: - logger.error(f"❌ JSON processing failed: {str(e)}") + logger.exception(f"❌ JSON processing failed: {str(e)}") logger.debug(f"❌ Problematic content: {line[:200]}...") def _check_timeout(self): @@ -284,7 +282,7 @@ def _read_ready_stream(self, ready): return decoded except Exception as e: - logger.error(f"🚨 Stream read failed: {str(e)}") + logger.exception(f"🚨 Stream read failed: {str(e)}") return None def _update_command_object(self, data, is_stream=False): @@ -321,14 +319,13 @@ def _update_command_object(self, data, is_stream=False): "βœ… Command object updated successfully with stream mode", ) except Exception as e: - logger.error(f"❌ Output update failed: {str(e)}") + logger.exception(f"❌ Output update failed: {str(e)}") self.command_obj.output = f"Error: {str(e)}" self.command_obj.save(update_fields=['output']) def _save_command_object(self, current_output, arg1, arg2): self.command_obj.output = current_output + arg1 self.command_obj.save(update_fields=['output']) - logger.debug(arg2) def _handle_execution_error(self, error): """Handle execution errors""" @@ -390,7 +387,7 @@ def _save_return_code(self): logger.debug(f"πŸ’Ύ Saved return code: {return_code}") except Exception as e: - logger.error(f"❌ Failed to save return code: {str(e)}") + logger.exception(f"❌ Failed to save return code: {str(e)}") self.command_obj.return_code = -1 self.command_obj.save(update_fields=['return_code']) diff --git a/web/reNgine/utils/db.py b/web/reNgine/utils/db.py index 6c2c4031a..79fa74d03 100644 --- a/web/reNgine/utils/db.py +++ b/web/reNgine/utils/db.py @@ -51,12 +51,10 @@ is_valid_url, sanitize_url, ) -from reNgine.utils.logger import Logger +from reNgine.utils.logger import default_logger as logger from reNgine.gpt import GPTVulnerabilityReportGenerator -logger = Logger(True) - def save_vulns(self, notif, vulns_file, vulns): with open(vulns_file, 'w') as f: json.dump(vulns, f, indent=4) @@ -80,11 +78,11 @@ def save_vulns(self, notif, vulns_file, vulns): **vuln_data) vulns_str += f'β€’ {str(vuln)}\n' if created: - logger.warning(str(vuln)) + logger.info(str(vuln)) # Send only 1 notif for all vulns to reduce number of notifs if notif and notif.send_vuln_notif and vulns_str: - logger.warning(vulns_str) + logger.info(vulns_str) self.notify(fields={'CVEs': vulns_str}) def save_vulnerability(**vuln_data): @@ -149,11 +147,11 @@ def save_endpoint(http_url, ctx=None, crawl=False, is_default=False, http_status scheme = urlparse(http_url).scheme if not scheme: - logger.error(f'{http_url} is missing scheme (http or https). Skipping.') + logger.warning(f'{http_url} is missing scheme (http or https). Skipping.') return None, False if not is_valid_url(http_url): - logger.error(f'{http_url} is not a valid URL. Skipping.') + logger.warning(f'{http_url} is not a valid URL. Skipping.') return None, False # Get required objects @@ -170,7 +168,7 @@ def save_endpoint(http_url, ctx=None, crawl=False, is_default=False, http_status # For regular domain scans, validate URL belongs to domain if not is_ip_scan and domain.name not in http_url: - logger.error(f"{http_url} is not a URL of domain {domain.name}. Skipping.") + logger.warning(f"{http_url} is not a URL of domain {domain.name}. Skipping.") return None, False http_url = sanitize_url(http_url) @@ -197,7 +195,7 @@ def save_endpoint(http_url, ctx=None, crawl=False, is_default=False, http_status custom_ctx['track'] = False results = http_crawl(urls=[http_url], ctx=custom_ctx) if not results or results[0]['failed']: - logger.error(f'Endpoint for {http_url} does not seem to be up. Skipping.') + logger.warning(f'Endpoint for {http_url} does not seem to be up. Skipping.') return None, False endpoint_data = results[0] @@ -246,12 +244,12 @@ def save_subdomain(subdomain_name, ctx=None): validators.ipv6(subdomain_name) ) if not valid_domain: - logger.error(f'{subdomain_name} is not a valid domain/IP. Skipping.') + logger.warning(f'{subdomain_name} is not a valid domain/IP. Skipping.') return None, False # Check if subdomain is in scope if subdomain_name in out_of_scope_subdomains: - logger.error(f'{subdomain_name} is out-of-scope. Skipping.') + logger.warning(f'{subdomain_name} is out-of-scope. Skipping.') return None, False # Get domain object and check if we're scanning an IP @@ -266,7 +264,7 @@ def save_subdomain(subdomain_name, ctx=None): # For regular domain scans, validate subdomain belongs to domain if not is_ip_scan and ctx.get('domain_id') and domain.name not in subdomain_name: - logger.error(f"{subdomain_name} is not a subdomain of domain {domain.name}. Skipping.") + logger.warning(f"{subdomain_name} is not a subdomain of domain {domain.name}. Skipping.") return None, False # Create or get subdomain object @@ -293,7 +291,7 @@ def save_subdomain_metadata(subdomain, endpoint, extra_datas=None): subdomain.http_url = http_url subdomain.save() else: - logger.error(f'No HTTP URL found for {subdomain.name}. Skipping.') + logger.warning(f'No HTTP URL found for {subdomain.name}. Skipping.') def _save_alive_subdomain_metadata(endpoint, subdomain, extra_datas): @@ -336,7 +334,7 @@ def save_employee(name, designation, scan_history=None): name=name, designation=designation) if created: - logger.warning(f'πŸ’Ύ Found new employee {name}') + logger.info(f'πŸ’Ύ Found new employee {name}') # Add employee to ScanHistory if scan_history: @@ -371,7 +369,7 @@ def save_imported_subdomains(subdomains, ctx=None): if not subdomains: return - logger.warning(f'Found {len(subdomains)} imported subdomains.') + logger.info(f'Found {len(subdomains)} imported subdomains.') with open(f'{results_dir}/from_imported.txt', 'w+') as output_file: url_filter = ctx.get('url_filter') enable_http_crawl = ctx.get('yaml_configuration').get(ENABLE_HTTP_CRAWL, DEFAULT_ENABLE_HTTP_CRAWL) @@ -711,7 +709,7 @@ def get_random_proxy(): if not proxy.use_proxy: return '' proxy_name = random.choice(proxy.proxies.splitlines()) - logger.warning(f'🌐 Using proxy: {proxy_name}') + logger.info(f'🌐 Using proxy: {proxy_name}') # os.environ['HTTP_PROXY'] = proxy_name # os.environ['HTTPS_PROXY'] = proxy_name return proxy_name @@ -757,7 +755,7 @@ def get_subdomains(write_filepath=None, exclude_subdomains=False, ctx=None): if subdomain.name ] if not subdomains: - logger.error('πŸ’Ύ No subdomains were found in query !') + logger.warning('πŸ’Ύ No subdomains were found in query !') if url_filter: subdomains = [f'{subdomain}/{url_filter}' for subdomain in subdomains] @@ -966,7 +964,7 @@ def get_http_urls(is_alive=False, is_uncrawled=False, strict=False, ignore_files endpoints = [e for e in endpoints if not urlparse(e).path.endswith(extensions)] if not endpoints: - logger.error('πŸ’Ύ No endpoints were found in query !') + logger.warning('πŸ’Ύ No endpoints were found in query !') if write_filepath: with open(write_filepath, 'w') as f: diff --git a/web/reNgine/utils/debug.py b/web/reNgine/utils/debug.py index 95ced18f0..b93d04a97 100644 --- a/web/reNgine/utils/debug.py +++ b/web/reNgine/utils/debug.py @@ -1,11 +1,9 @@ import os import threading import debugpy -from reNgine.utils.logger import Logger +from reNgine.utils.logger import default_logger as logger from reNgine.settings import CELERY_REMOTE_DEBUG, CELERY_REMOTE_DEBUG_PORT -logger = Logger(True) - def debug(): try: # Activate remote debug for scan worker @@ -27,4 +25,4 @@ def wait_for_client_with_timeout(): except Exception as e: - logger.error(e) \ No newline at end of file + logger.exception(e) \ No newline at end of file diff --git a/web/reNgine/utils/dns.py b/web/reNgine/utils/dns.py index c0abe998d..5f18a1f85 100644 --- a/web/reNgine/utils/dns.py +++ b/web/reNgine/utils/dns.py @@ -3,7 +3,7 @@ import subprocess import tldextract -from reNgine.utils.logger import Logger +from reNgine.utils.logger import default_logger as logger from reNgine.utils.command_builder import build_tlsx_cmd, build_whois_cmd from reNgine.common_serializers import ( DomainDNSRecordSerializer, @@ -14,7 +14,6 @@ ) from dotted_dict import DottedDict -logger = Logger(True) def reverse_whois(lookup_keyword): domains = [] @@ -119,9 +118,9 @@ def get_domain_historical_ip_address(domain): }) except requests.RequestException as e: - logger.error(f"Error retrieving historical IP data for {domain}: {str(e)}") + logger.exception(f"Error retrieving historical IP data for {domain}: {str(e)}") except Exception as e: - logger.error(f"Error parsing historical IP data for {domain}: {str(e)}") + logger.exception(f"Error parsing historical IP data for {domain}: {str(e)}") return ips @@ -285,7 +284,7 @@ def find_related_tlds(domain): related_tlds = list(set(related_tlds)) except Exception as e: - logger.error(f"Error finding related TLDs for {domain}: {str(e)}") + logger.exception(f"Error finding related TLDs for {domain}: {str(e)}") return related_tlds @@ -421,5 +420,5 @@ def execute_whois(domain): return whois_data except Exception as e: - logger.error(f"Error executing whois for {domain}: {str(e)}") + logger.exception(f"Error executing whois for {domain}: {str(e)}") return None diff --git a/web/reNgine/utils/formatters.py b/web/reNgine/utils/formatters.py index 1fa99f137..ed931c75e 100644 --- a/web/reNgine/utils/formatters.py +++ b/web/reNgine/utils/formatters.py @@ -4,11 +4,9 @@ from pathlib import Path from typing import List, Union from django.utils import timezone -from reNgine.utils.logger import Logger +from reNgine.utils.logger import default_logger as logger from reNgine.settings import DOMAIN_NAME, RENGINE_TASK_IGNORE_CACHE_KWARGS -logger = Logger(__name__) - class SafePath: """Utility class for safe path handling and directory creation.""" @@ -75,7 +73,7 @@ def create_safe_path( return str(abs_path) except Exception as e: - logger.error(f"Error creating safe path: {str(e)}") + logger.exception(f"Error creating safe path: {str(e)}") raise @classmethod diff --git a/web/reNgine/utils/http.py b/web/reNgine/utils/http.py index 7f7fe3221..02cf7d7d8 100644 --- a/web/reNgine/utils/http.py +++ b/web/reNgine/utils/http.py @@ -8,11 +8,10 @@ from django.core.validators import URLValidator from django.core.exceptions import ValidationError -from reNgine.utils.logger import Logger +from reNgine.utils.logger import default_logger as logger from reNgine.definitions import ENABLE_HTTP_CRAWL from reNgine.settings import DEFAULT_ENABLE_HTTP_CRAWL -logger = Logger(True) def get_subdomain_from_url(url): """Get subdomain from HTTP URL. diff --git a/web/reNgine/utils/ip.py b/web/reNgine/utils/ip.py index b6ae45c10..af24ef177 100644 --- a/web/reNgine/utils/ip.py +++ b/web/reNgine/utils/ip.py @@ -1,11 +1,10 @@ import ipaddress import validators from reNgine.utils.command_executor import run_command -from reNgine.utils.logger import Logger +from reNgine.utils.logger import default_logger as logger from startScan.models import IpAddress, CountryISO from django.db import transaction -logger = Logger(True) def get_ip_info(ip_address): """ @@ -44,7 +43,7 @@ def get_ips_from_cidr_range(target): try: return [str(ip) for ip in ipaddress.IPv4Network(target)] except ValueError: - logger.error(f'🌍 {target} is not a valid CIDR range. Skipping.') + logger.exception(f'🌍 {target} is not a valid CIDR range. Skipping.') return [] def save_ip_address(ip_address, subdomain=None, subscan=None, **kwargs): @@ -122,7 +121,7 @@ def geo_localize_ip(host, ip_id=None): ip.geo_iso = geo_object ip.save() except IpAddress.DoesNotExist: - logger.error(f"🌍 IP address with id {ip_id} not found") + logger.exception(f"🌍 IP address with id {ip_id} not found") return None # Return geo data @@ -132,5 +131,5 @@ def geo_localize_ip(host, ip_id=None): } except Exception as e: - logger.error(f"🌍 Error during geolocation of {host}: {str(e)}") + logger.exception(f"🌍 Error during geolocation of {host}: {str(e)}") return None diff --git a/web/reNgine/utils/logger.py b/web/reNgine/utils/logger.py index db3ebc667..ece3f52ca 100644 --- a/web/reNgine/utils/logger.py +++ b/web/reNgine/utils/logger.py @@ -2,19 +2,7 @@ import os from celery.utils.log import get_task_logger from celery import current_task - -# ANSI color codes -GREY = "\x1b[38;20m" -BLUE = "\x1b[34;20m" -YELLOW = "\x1b[33;20m" -RED = "\x1b[31;20m" -BOLD_RED = "\x1b[31;1m" -RESET = "\x1b[0m" -BOLD = "\x1b[1m" -GREEN = "\x1b[32;20m" -CYAN = "\x1b[36;20m" -MAGENTA = "\x1b[35;20m" -WHITE = "\x1b[37;20m" +from .colors import Colors # Force colors even in Docker FORCE_COLOR = os.environ.get('FORCE_COLOR', 'true').lower() != 'false' @@ -25,11 +13,11 @@ class CustomFormatter(logging.Formatter): FORMAT = "%(levelname)s | %(message)s" FORMATS = { - logging.DEBUG: BLUE + FORMAT + RESET, - logging.INFO: GREY + FORMAT + RESET, - logging.WARNING: YELLOW + FORMAT + RESET, - logging.ERROR: RED + FORMAT + RESET, - logging.CRITICAL: BOLD_RED + FORMAT + RESET + logging.DEBUG: Colors.BLUE + FORMAT + Colors.RESET, + logging.INFO: Colors.GRAY + FORMAT + Colors.RESET, + logging.WARNING: Colors.YELLOW + FORMAT + Colors.RESET, + logging.ERROR: Colors.RED + FORMAT + Colors.RESET, + logging.CRITICAL: Colors.BOLD_RED + FORMAT + Colors.RESET } def format(self, record): @@ -59,62 +47,75 @@ def __init__(self, is_task_logger=False): name = is_task_logger self.is_task_logger = True + # Colors for different task categories + base_task_colors = Colors.GRAY + scan_management_colors = Colors.WHITE + detection_analysis_colors = Colors.PURPLE + web_discovery_colors = Colors.GREEN + reconnaissance_colors = Colors.BLUE + osint_colors = Colors.LIGHT_CYAN + vulnerability_colors = Colors.LIGHT_ORANGE + notification_colors = Colors.WHITE + system_colors = Colors.GRAY + # Map of task colors self.task_colors = { # Base tasks - 'default': WHITE, - 'run_command_line': YELLOW, + 'default': base_task_colors, + 'run_command_line': Colors.YELLOW, + # Scan management + 'initiate_scan': scan_management_colors, + 'initiate_subscan': scan_management_colors, + 'post_process': scan_management_colors, + 'remove_duplicate_endpoints': scan_management_colors, + # Detection and analysis - 'waf_detection': YELLOW, - 'vulnerability_scan': RED, - 'port_scan': BLUE, - 'nmap': CYAN, - 'scan_http_ports': MAGENTA, + 'waf_detection': detection_analysis_colors, + 'port_scan': detection_analysis_colors, + 'nmap': detection_analysis_colors, + 'scan_http_ports': detection_analysis_colors, + # Web discovery + 'http_crawl': web_discovery_colors, + 'fetch_url': web_discovery_colors, + 'run_cmseek': web_discovery_colors, + 'screenshot': web_discovery_colors, + # Reconnaissance - 'subdomain_discovery': BLUE, - 'osint_discovery': CYAN, - 'theHarvester': CYAN, - 'find_subdomains': BLUE, - 'query_whois': GREEN, - 'query_reverse_whois': GREEN, - 'query_ip_history': GREEN, - - # Fuzzing and exploration - 'dir_file_fuzz': YELLOW, - 'http_crawl': MAGENTA, - 'dalfox_scan': RED, - 'crlfuzz_scan': RED, - 's3scanner': RED, + 'subdomain_discovery': reconnaissance_colors, + 'geo_localize': reconnaissance_colors, - # Screenshots and visual - 'screenshot': GREY, - 'fetch_url': GREY, + #Β OSINT + 'osint': Colors.CYAN, + 'osint_discovery': osint_colors, + 'dorking': osint_colors, + 'theHarvester': osint_colors, + 'h8mail': osint_colors, + 'find_subdomains': osint_colors, + 'query_whois': osint_colors, + 'query_reverse_whois': osint_colors, + 'query_ip_history': osint_colors, # Vulnerability analysis - 'nuclei_scan': BOLD_RED, - 'llm_vulnerability_description': RED, + 'vulnerability_scan': Colors.ORANGE, + 'nuclei_scan': vulnerability_colors, + 'nuclei_individual_severity_module': vulnerability_colors, + 'dir_file_fuzz': vulnerability_colors, + 'dalfox_scan': vulnerability_colors, + 'crlfuzz_scan': vulnerability_colors, + 's3scanner': vulnerability_colors, + 'llm_vulnerability_description': vulnerability_colors, # Notifications - 'send_scan_notif': GREEN, - 'send_task_notif': GREEN, - 'send_file_to_discord': GREEN, - 'send_hackerone_report': GREEN, - - # Scan management - 'initiate_scan': MAGENTA, - 'initiate_subscan': MAGENTA, - 'post_process': CYAN, - 'remove_duplicate_endpoints': BLUE, - - # Infrastructure - 'geo_localize': CYAN, - 'run_cmseek': YELLOW, + 'send_scan_notif': notification_colors, + 'send_task_notif': notification_colors, + 'send_file_to_discord': notification_colors, + 'send_hackerone_report': notification_colors, # System tasks - 'report': GREEN, - 'scan_activity': WHITE + 'report': system_colors, + 'scan_activity': system_colors } self.logger = logging.getLogger(name) @@ -132,6 +133,16 @@ def __init__(self, is_task_logger=False): task_handler = logging.StreamHandler() task_handler.setFormatter(CustomFormatter()) self.task_logger.addHandler(task_handler) + + # Initialization of colors for log levels + self.level_colors = { + 'DEBUG': Colors.BLUE, + 'INFO': Colors.GRAY, + 'WARNING': Colors.YELLOW, + 'ERROR': Colors.RED, + 'CRITICAL': Colors.BOLD_RED + } + self._initialized = True def info(self, message): @@ -169,12 +180,12 @@ def _log(self, message, level): for line in lines: if FORCE_COLOR: color = self.task_colors.get(task_name.split('.')[-1], self.task_colors['default']) - level_color = self.level_colors.get(level, GREY) - bold = BOLD if level != 'INFO' else '' - colored_line = f"{color}{task_name:<35}{RESET} | {level_color}{bold}{level:<8}{RESET} | {color}{line}{RESET}" + level_color = self.level_colors.get(level, Colors.GRAY) if level != 'INFO' else color + bold = Colors.BOLD if level != 'INFO' else '' + colored_line = f"{color}{task_name:<20}{Colors.RESET} | {level_color}{bold}{level:<8}{Colors.RESET} | {level_color}{line}{Colors.RESET}" colored_lines.append(colored_line) else: - colored_lines.append(f"{task_name:<35} | {level:<8} | {line}") + colored_lines.append(f"{task_name:<20} | {level:<8} | {line}") # Join lines and print formatted_message = '\n'.join(colored_lines) diff --git a/web/reNgine/utils/mock.py b/web/reNgine/utils/mock.py index e6c4f6452..8d4ebf877 100644 --- a/web/reNgine/utils/mock.py +++ b/web/reNgine/utils/mock.py @@ -4,10 +4,9 @@ import random -from reNgine.utils.logger import Logger +from reNgine.utils.logger import default_logger as logger from reNgine.utils.parsers import parse_dalfox_result, parse_s3scanner_result -logger = Logger(True) def generate_mock_urls(count=10, base_domains=None, subdomains=True, paths=True, params=False): """Generate mock URLs for dry run testing @@ -545,7 +544,7 @@ def prepare_port_scan_mock(host, results_dir, context=None): return parse_http_ports_data(xml_file) if Path(xml_file).exists() else None except Exception as e: - logger.error(f"Failed to prepare mock port scan data: {str(e)}") + logger.exception(f"Failed to prepare mock port scan data: {str(e)}") return None def prepare_subdomain_mock(host, context=None): @@ -567,7 +566,7 @@ def prepare_subdomain_mock(host, context=None): host, context, SubdomainSerializer ) except Exception as e: - logger.error(f"Failed to prepare mock subdomain data: {str(e)}") + logger.exception(f"Failed to prepare mock subdomain data: {str(e)}") return [] @@ -648,7 +647,7 @@ def prepare_nuclei_vulnerability_mock(urls, context=None): ), } except Exception as e: - logger.error(f"Failed to prepare mock vulnerability data: {str(e)}") + logger.exception(f"Failed to prepare mock vulnerability data: {str(e)}") return {'status': 'error', 'message': str(e)} def prepare_dalfox_vulnerability_mock(urls, context=None): @@ -678,7 +677,7 @@ def prepare_dalfox_vulnerability_mock(urls, context=None): 'total_count': len(parsed_vulnerabilities) } except Exception as e: - logger.error(f"Failed to prepare mock Dalfox vulnerability data: {str(e)}") + logger.exception(f"Failed to prepare mock Dalfox vulnerability data: {str(e)}") return {'status': 'error', 'message': str(e)} def prepare_crlfuzz_vulnerability_mock(urls, context=None): @@ -705,7 +704,7 @@ def prepare_crlfuzz_vulnerability_mock(urls, context=None): 'total_count': len(parsed_vulnerabilities) } except Exception as e: - logger.error(f"Failed to prepare mock CRLFUZZ vulnerability data: {str(e)}") + logger.exception(f"Failed to prepare mock CRLFUZZ vulnerability data: {str(e)}") return {'status': 'error', 'message': str(e)} def prepare_s3scanner_vulnerability_mock(context=None): @@ -746,5 +745,5 @@ def prepare_s3scanner_vulnerability_mock(context=None): ), } except Exception as e: - logger.error(f"Failed to prepare mock S3Scanner bucket data: {str(e)}") + logger.exception(f"Failed to prepare mock S3Scanner bucket data: {str(e)}") return {'status': 'error', 'message': str(e)} diff --git a/web/reNgine/utils/nmap.py b/web/reNgine/utils/nmap.py index bafefdb0d..f28d92cee 100644 --- a/web/reNgine/utils/nmap.py +++ b/web/reNgine/utils/nmap.py @@ -3,13 +3,12 @@ from django.db import transaction from reNgine.definitions import UNCOMMON_WEB_PORTS -from reNgine.utils.logger import Logger +from reNgine.utils.logger import default_logger as logger from reNgine.utils.formatters import SafePath from reNgine.utils.ip import save_ip_address from reNgine.utils.nmap_service import create_or_update_port_with_service from reNgine.utils.parsers import parse_nmap_results -logger = Logger(True) def get_nmap_http_datas(host, ctx): """Check if standard and non-standard HTTP ports are open for given hosts. diff --git a/web/reNgine/utils/nmap_service.py b/web/reNgine/utils/nmap_service.py index a7d114dc5..63ecca44e 100644 --- a/web/reNgine/utils/nmap_service.py +++ b/web/reNgine/utils/nmap_service.py @@ -1,11 +1,10 @@ import xml.etree.ElementTree as ET from reNgine.definitions import UNCOMMON_WEB_PORTS -from reNgine.utils.logger import Logger +from reNgine.utils.logger import default_logger as logger from reNgine.utils.parsers import parse_nmap_results from startScan.models import IpAddress, Port -logger = Logger(True) def create_or_update_port_with_service(port_number, service_info, ip_address=None): """Create or update port with service information from nmap for specific IP.""" @@ -51,7 +50,7 @@ def process_nmap_service_results(xml_file): ip_address=ip_address ) except Exception as e: - logger.error(f"Failed to process port {service['port']}: {str(e)}") + logger.exception(f"Failed to process port {service['port']}: {str(e)}") def update_port_service_info(port, service_info): """Update port service information consistently.""" @@ -71,7 +70,7 @@ def update_port_service_info(port, service_info): port.save(update_fields=['service_name', 'description']) except Exception as e: - logger.error(f"Error updating port {port.number}: {str(e)}") + logger.exception(f"Error updating port {port.number}: {str(e)}") raise def get_or_create_port(ip_address, port_number, service_info=None): diff --git a/web/reNgine/utils/notifications.py b/web/reNgine/utils/notifications.py index 13a2d6f53..625c807a2 100644 --- a/web/reNgine/utils/notifications.py +++ b/web/reNgine/utils/notifications.py @@ -12,11 +12,10 @@ from reNgine.settings import CELERY_BROKER_URL from scanEngine.models import Notification, EngineType from startScan.models import ScanHistory, SubScan, ScanActivity -from reNgine.utils.logger import Logger +from reNgine.utils.logger import default_logger as logger from reNgine.utils.formatters import get_scan_fields, get_scan_title, get_scan_url from reNgine.utils.utils import format_json_output -logger = Logger(True) DISCORD_WEBHOOKS_CACHE = redis.Redis.from_url(CELERY_BROKER_URL) diff --git a/web/reNgine/utils/parsers.py b/web/reNgine/utils/parsers.py index 168c110e8..2b536dfbb 100644 --- a/web/reNgine/utils/parsers.py +++ b/web/reNgine/utils/parsers.py @@ -14,9 +14,7 @@ NUCLEI_DEFAULT_TEMPLATES_PATH, ) from reNgine.utils.http import extract_httpx_url -from reNgine.utils.logger import Logger - -logger = Logger(__name__) +from reNgine.utils.logger import default_logger as logger def parse_httpx_result(line, subdomain, ctx, follow_redirect, update_subdomain_metadatas, subscan=None): """Process a single line from httpx output. @@ -343,8 +341,8 @@ def parse_nmap_results(xml_file, output_file=None, parse_type='vulnerabilities') try: nmap_results = xmltodict.parse(content) except Exception as e: - logger.warning(e) - logger.error(f'Cannot parse {xml_file} to valid JSON. Skipping.') + logger.debug(e) + logger.exception(f'Cannot parse {xml_file} to valid JSON. Skipping.') return [] if output_file: @@ -492,30 +490,30 @@ def parse_nmap_vulscan_output(script_output): entry = {'id': id, 'title': title} data[provider_name]['entries'].append(entry) - logger.warning('Vulscan parsed output:') - logger.warning(pprint.pformat(data)) + logger.info('Vulscan parsed output:') + logger.info(pprint.pformat(data)) for provider_name in data: if provider_name == 'Exploit-DB': - logger.error(f'Provider {provider_name} is not supported YET.') + logger.warning(f'Provider {provider_name} is not supported YET.') elif provider_name == 'IBM X-Force': - logger.error(f'Provider {provider_name} is not supported YET.') + logger.warning(f'Provider {provider_name} is not supported YET.') elif provider_name == 'MITRE CVE': - logger.error(f'Provider {provider_name} is not supported YET.') + logger.warning(f'Provider {provider_name} is not supported YET.') for entry in data[provider_name]['entries']: cve_id = entry['id'] vuln = cve_to_vuln(cve_id) vulns.append(vuln) elif provider_name == 'OSVDB': - logger.error(f'Provider {provider_name} is not supported YET.') + logger.warning(f'Provider {provider_name} is not supported YET.') elif provider_name == 'OpenVAS (Nessus)': - logger.error(f'Provider {provider_name} is not supported YET.') + logger.warning(f'Provider {provider_name} is not supported YET.') elif provider_name == 'SecurityFocus': - logger.error(f'Provider {provider_name} is not supported YET.') + logger.warning(f'Provider {provider_name} is not supported YET.') elif provider_name == 'VulDB': - logger.error(f'Provider {provider_name} is not supported YET.') + logger.warning(f'Provider {provider_name} is not supported YET.') else: - logger.error(f'Provider {provider_name} is not supported.') + logger.warning(f'Provider {provider_name} is not supported.') return vulns @@ -553,7 +551,7 @@ def cve_to_vuln(cve_id, vuln_type=''): """ cve_info = CVESearch('https://cve.circl.lu').id(cve_id) if not cve_info: - logger.error(f'Could not fetch CVE info for cve {cve_id}. Skipping.') + logger.warning(f'Could not fetch CVE info for cve {cve_id}. Skipping.') return None vuln_cve_id = cve_info['id'] vuln_name = vuln_cve_id @@ -588,7 +586,7 @@ def cve_to_vuln(cve_id, vuln_type=''): msg += f'\n\tOSVDB: {id}' for exploit_id in exploit_ids: msg += f'\n\tEXPLOITDB: {exploit_id}' - logger.warning(msg) + logger.info(msg) return { 'name': vuln_name, 'type': vuln_type, diff --git a/web/reNgine/utils/scan_helpers.py b/web/reNgine/utils/scan_helpers.py index 50909eda2..8881c3e6b 100644 --- a/web/reNgine/utils/scan_helpers.py +++ b/web/reNgine/utils/scan_helpers.py @@ -11,7 +11,7 @@ from reNgine.definitions import DEFAULT_GF_PATTERNS, FAILED_TASK, FETCH_URL, GF_PATTERNS, RUNNING_TASK, SCHEDULED_SCAN, LIVE_SCAN from reNgine.settings import COMMAND_EXECUTOR_DRY_RUN, YAML_CACHE_TIMEOUT from reNgine.utils.db import create_scan_object -from reNgine.utils.logger import Logger +from reNgine.utils.logger import default_logger as logger from reNgine.utils.utils import format_json_output, is_iterable from reNgine.utils.formatters import SafePath, fmt_traceback from reNgine.utils.task_config import TaskConfig @@ -29,7 +29,6 @@ from scanEngine.models import EngineType -logger = Logger(True) def get_scan_engine(engine_id, scan): """Get scan engine and log available engines.""" @@ -110,7 +109,7 @@ def initialize_scan_history(scan, domain, engine, scan_type, initiated_by_id, re return scan, ctx except Exception as e: - logger.error(f"Failed to initialize scan: {str(e)} {fmt_traceback(e)}") + logger.exception(f"Failed to initialize scan: {str(e)} {fmt_traceback(e)}") if scan: scan.scan_status = FAILED_TASK @@ -675,6 +674,6 @@ def execute_grouped_tasks(task_instance, grouped_tasks, task_name="unnamed_task" # The post_process callback will handle completion return result, result.id except Exception as e: - logger.error(f'❌ Error executing tasks for {task_name}: {str(e)}') + logger.exception(f'❌ Error executing tasks for {task_name}: {str(e)}') # Re-raise to let Celery handle the error raise \ No newline at end of file diff --git a/web/reNgine/utils/task_config.py b/web/reNgine/utils/task_config.py index 5832aeb06..88fb197b5 100644 --- a/web/reNgine/utils/task_config.py +++ b/web/reNgine/utils/task_config.py @@ -97,11 +97,10 @@ from reNgine.utils.parsers import parse_custom_header from reNgine.utils.db import get_random_proxy from reNgine.utils.utils import format_json_output, return_iterable -from reNgine.utils.logger import Logger +from reNgine.utils.logger import default_logger as logger from scanEngine.models import InstalledExternalTool -logger = Logger(True) class TaskConfig: """Helper class to manage configuration for scan tasks""" diff --git a/web/reNgine/utils/utils.py b/web/reNgine/utils/utils.py index 850d07151..1dd313335 100644 --- a/web/reNgine/utils/utils.py +++ b/web/reNgine/utils/utils.py @@ -1,10 +1,9 @@ import json import os import glob -from reNgine.utils.logger import Logger +from reNgine.utils.logger import default_logger as logger import shutil -logger = Logger(True) def is_safe_path(basedir, path, follow_symlinks=True): # Source: https://security.openstack.org/guidelines/dg_using-file-paths.html @@ -113,7 +112,7 @@ def remove_file_or_pattern(path, pattern=None, history_file=None, scan_id=None, return True except Exception as e: full_path = os.path.join(path, pattern) if pattern else path - logger.error(f"πŸ“ Failed to delete {full_path}: {str(e)}") + logger.exception(f"πŸ“ Failed to delete {full_path}: {str(e)}") return False def check_process_status(pid): diff --git a/web/recon_note/views.py b/web/recon_note/views.py index 19f0082f5..4de80ee1a 100644 --- a/web/recon_note/views.py +++ b/web/recon_note/views.py @@ -5,12 +5,12 @@ the management of todo notesand related operations. """ import json -import logging from django.http import JsonResponse from django.shortcuts import render from recon_note.models import TodoNote +from reNgine.utils.logger import default_logger as logger def list_note(request, slug): """ @@ -51,7 +51,7 @@ def flip_todo_status(request): body_unicode = request.body.decode('utf-8') body = json.loads(body_unicode) except json.JSONDecodeError as e: - logging.error('JSON decode error: %s', e) + logger.error(f'JSON decode error: {e}') return JsonResponse({'status': False, 'error': 'Invalid JSON.'}, status=400) note_id = body.get('id') @@ -91,7 +91,7 @@ def flip_important_status(request): body_unicode = request.body.decode('utf-8') body = json.loads(body_unicode) except json.JSONDecodeError as e: - logging.error('JSON decode error: %s', e) + logger.error(f'JSON decode error: {e}') return JsonResponse({'status': False, 'error': 'Invalid JSON.'}, status=400) note_id = body.get('id') @@ -130,7 +130,7 @@ def delete_note(request): body_unicode = request.body.decode('utf-8') body = json.loads(body_unicode) except json.JSONDecodeError as e: - logging.error('JSON decode error: %s', e) + logger.error(f'JSON decode error: {e}') return JsonResponse({'status': False, 'error': 'Invalid JSON.'}, status=400) note_id = body.get('id') diff --git a/web/scanEngine/fixtures/scanEngine.json b/web/scanEngine/fixtures/scanEngine.json index 030b7b148..bfce7b172 100644 --- a/web/scanEngine/fixtures/scanEngine.json +++ b/web/scanEngine/fixtures/scanEngine.json @@ -4,7 +4,7 @@ "pk": 1, "fields": { "engine_name": "Full Scan", - "yaml_configuration": "subdomain_discovery: {\r\n 'uses_tools': ['subfinder', 'ctfr', 'sublist3r', 'tlsx', 'oneforall', 'netlas'],\r\n 'enable_http_crawl': true,\r\n 'threads': 30,\r\n 'timeout': 5,\r\n}\r\nhttp_crawl: {}\r\nport_scan: {\r\n 'enable_http_crawl': true,\r\n 'timeout': 5,\r\n # 'exclude_ports': [],\r\n # 'exclude_subdomains': [],\r\n 'ports': ['top-100'],\r\n 'rate_limit': 150,\r\n 'threads': 30,\r\n 'passive': false,\r\n # 'use_naabu_config': false,\r\n # 'enable_nmap': true,\r\n # 'nmap_cmd': '',\r\n # 'nmap_script': '',\r\n # 'nmap_script_args': ''\r\n}\r\nosint: {\r\n 'discover': [\r\n 'emails',\r\n 'metainfo',\r\n 'employees'\r\n ],\r\n 'dorks': [\r\n 'login_pages',\r\n 'admin_panels',\r\n 'dashboard_pages',\r\n 'stackoverflow',\r\n 'social_media',\r\n 'project_management',\r\n 'code_sharing',\r\n 'config_files',\r\n 'jenkins',\r\n 'wordpress_files',\r\n 'php_error',\r\n 'exposed_documents',\r\n 'db_files',\r\n 'git_exposed'\r\n ],\r\n 'intensity': 'normal',\r\n 'documents_limit': 50\r\n}\r\ndir_file_fuzz: {\r\n 'auto_calibration': true,\r\n 'enable_http_crawl': true,\r\n 'rate_limit': 150,\r\n 'extensions': [],\r\n 'follow_redirect': false,\r\n 'max_time': 0,\r\n 'match_http_status': [200, 204],\r\n 'recursive_level': 0,\r\n 'stop_on_error': false,\r\n 'timeout': 5,\r\n 'threads': 30,\r\n 'wordlist_name': 'default', # fuzz-Bo0oM\r\n}\r\nfetch_url: {\r\n 'uses_tools': ['gospider', 'hakrawler', 'waybackurls', 'katana', 'gau'],\r\n 'remove_duplicate_endpoints': true,\r\n 'duplicate_fields': ['content_length', 'page_title'],\r\n 'follow_redirect': false,\r\n 'enable_http_crawl': true,\r\n 'gf_patterns': ['debug_logic', 'idor', 'interestingEXT', 'interestingparams', 'interestingsubs', 'lfi', 'rce', 'redirect', 'sqli', 'ssrf', 'ssti', 'xss'],\r\n 'ignore_file_extensions': ['png', 'jpg', 'jpeg', 'gif', 'mp4', 'mpeg', 'mp3'],\r\n 'threads': 30\r\n}\r\nvulnerability_scan: {\r\n 'run_nuclei': true,\r\n 'run_dalfox': true,\r\n 'run_crlfuzz': true,\r\n 'enable_http_crawl': true,\r\n 'concurrency': 50,\r\n 'intensity': 'normal',\r\n 'rate_limit': 150,\r\n 'retries': 1,\r\n 'timeout': 5,\r\n 'fetch_gpt_report': true,\r\n 'nuclei': {\r\n 'use_nuclei_config': false,\r\n 'severities': ['unknown', 'info', 'low', 'medium', 'high', 'critical']\r\n }\r\n}\r\nwaf_detection: {\r\n\r\n}\r\nscreenshot: {\r\n 'enable_http_crawl': true,\r\n 'intensity': 'normal',\r\n 'timeout': 10,\r\n 'threads': 40\r\n}\r\n\r\n# custom_header: \"Cookie: Test\"", + "yaml_configuration": "subdomain_discovery: {\r\n 'uses_tools': ['subfinder', 'ctfr', 'sublist3r', 'tlsx', 'oneforall', 'netlas'],\r\n 'enable_http_crawl': true,\r\n 'threads': 30,\r\n 'timeout': 5,\r\n}\r\nhttp_crawl: {}\r\nport_scan: {\r\n 'enable_http_crawl': true,\r\n 'timeout': 5,\r\n # 'exclude_ports': [],\r\n # 'exclude_subdomains': [],\r\n 'ports': ['top-100'],\r\n 'rate_limit': 150,\r\n 'threads': 30,\r\n 'passive': false,\r\n # 'use_naabu_config': false,\r\n # 'nmap_enabled': true,\r\n # 'nmap_cmd': '',\r\n # 'nmap_script': '',\r\n # 'nmap_script_args': ''\r\n}\r\nosint: {\r\n 'discover': [\r\n 'emails',\r\n 'metainfo',\r\n 'employees'\r\n ],\r\n 'dorks': [\r\n 'login_pages',\r\n 'admin_panels',\r\n 'dashboard_pages',\r\n 'stackoverflow',\r\n 'social_media',\r\n 'project_management',\r\n 'code_sharing',\r\n 'config_files',\r\n 'jenkins',\r\n 'wordpress_files',\r\n 'php_error',\r\n 'exposed_documents',\r\n 'db_files',\r\n 'git_exposed'\r\n ],\r\n 'intensity': 'normal',\r\n 'documents_limit': 50\r\n}\r\ndir_file_fuzz: {\r\n 'auto_calibration': true,\r\n 'enable_http_crawl': true,\r\n 'rate_limit': 150,\r\n 'extensions': [],\r\n 'follow_redirect': false,\r\n 'max_time': 0,\r\n 'match_http_status': [200, 204],\r\n 'recursive_level': 0,\r\n 'stop_on_error': false,\r\n 'timeout': 5,\r\n 'threads': 30,\r\n 'wordlist_name': 'default', # fuzz-Bo0oM\r\n}\r\nfetch_url: {\r\n 'uses_tools': ['gospider', 'hakrawler', 'waybackurls', 'katana', 'gau'],\r\n 'remove_duplicate_endpoints': true,\r\n 'duplicate_fields': ['content_length', 'page_title'],\r\n 'follow_redirect': false,\r\n 'enable_http_crawl': true,\r\n 'gf_patterns': ['debug_logic', 'idor', 'interestingEXT', 'interestingparams', 'interestingsubs', 'lfi', 'rce', 'redirect', 'sqli', 'ssrf', 'ssti', 'xss'],\r\n 'ignore_file_extensions': ['png', 'jpg', 'jpeg', 'gif', 'mp4', 'mpeg', 'mp3'],\r\n 'threads': 30\r\n}\r\nvulnerability_scan: {\r\n 'run_nuclei': true,\r\n 'run_dalfox': true,\r\n 'run_crlfuzz': true,\r\n 'enable_http_crawl': true,\r\n 'concurrency': 50,\r\n 'intensity': 'normal',\r\n 'rate_limit': 150,\r\n 'retries': 1,\r\n 'timeout': 5,\r\n 'fetch_gpt_report': true,\r\n 'nuclei': {\r\n 'use_nuclei_config': false,\r\n 'severities': ['unknown', 'info', 'low', 'medium', 'high', 'critical']\r\n }\r\n}\r\nwaf_detection: {\r\n\r\n}\r\nscreenshot: {\r\n 'enable_http_crawl': true,\r\n 'intensity': 'normal',\r\n 'timeout': 10,\r\n 'threads': 40\r\n}\r\n\r\n# custom_header: \"Cookie: Test\"", "default_engine": true } }, @@ -40,7 +40,7 @@ "pk": 5, "fields": { "engine_name": "Port Scan", - "yaml_configuration": "http_crawl: {}\r\nport_scan: {\r\n 'enable_http_crawl': true,\r\n 'timeout': 5,\r\n # 'exclude_ports': [],\r\n # 'exclude_subdomains': [],\r\n 'ports': ['top-100'],\r\n 'rate_limit': 150,\r\n 'threads': 30,\r\n 'passive': false,\r\n # 'use_naabu_config': false,\r\n # 'enable_nmap': true,\r\n # 'nmap_cmd': '',\r\n # 'nmap_script': '',\r\n # 'nmap_script_args': ''\r\n}", + "yaml_configuration": "http_crawl: {}\r\nport_scan: {\r\n 'enable_http_crawl': true,\r\n 'timeout': 5,\r\n # 'exclude_ports': [],\r\n # 'exclude_subdomains': [],\r\n 'ports': ['top-100'],\r\n 'rate_limit': 150,\r\n 'threads': 30,\r\n 'passive': false,\r\n # 'use_naabu_config': false,\r\n # 'nmap_enabled': true,\r\n # 'nmap_cmd': '',\r\n # 'nmap_script': '',\r\n # 'nmap_script_args': ''\r\n}", "default_engine": true } }, @@ -58,7 +58,7 @@ "pk": 7, "fields": { "engine_name": "Full (perso)", - "yaml_configuration": "# Global vars for all tools\r\n#\r\n# Custom header - FFUF, Nuclei, Dalfox, CRL Fuzz, HTTPx, Fetch URL (Hakrawler, Katana, Gospider)\r\n# custom_header: {\r\n# 'Cookie':'Test',\r\n# 'User-Agent': 'Mozilla/5.0',\r\n# 'Custom-Header': 'My custom header'\r\n# }\r\n# 'user_agent': '' # Dalfox only\r\n# 'enable_http_crawl': true # All tools\r\n# 'timeout': 10 # Subdomain discovery, Screenshot, Port scan, FFUF, Nuclei \r\n# 'threads': 30 # All tools\r\n# 'rate_limit': 150 # Port scan, FFUF, Nuclei\r\n# 'intensity': 'normal' # Screenshot (grab only the root endpoints of each subdomain), Nuclei (reduce number of endpoints to scan), OSINT (not implemented yet)\r\n# 'retries': 1 # Nuclei\r\n\r\nsubdomain_discovery: {\r\n 'uses_tools': ['subfinder', 'ctfr', 'sublist3r', 'tlsx', 'oneforall', 'netlas'], # amass-passive, amass-active, All\r\n 'enable_http_crawl': true,\r\n 'threads': 30,\r\n 'timeout': 5,\r\n # 'use_subfinder_config': false,\r\n # 'use_amass_config': false,\r\n # 'amass_wordlist': 'deepmagic.com-prefixes-top50000'\r\n}\r\nhttp_crawl: {\r\n # 'custom_header': {\r\n # 'Cookie':'Test',\r\n # 'User-Agent': 'Mozilla/5.0'\r\n # },\r\n # 'threads': 30,\r\n # 'follow_redirect': false\r\n}\r\nport_scan: {\r\n 'enable_http_crawl': true,\r\n 'timeout': 5,\r\n # 'exclude_ports': [],\r\n # 'exclude_subdomains': [],\r\n 'ports': ['top-100'],\r\n 'rate_limit': 150,\r\n 'threads': 30,\r\n 'passive': false,\r\n # 'use_naabu_config': false,\r\n # 'enable_nmap': true,\r\n # 'nmap_cmd': '',\r\n # 'nmap_script': '',\r\n # 'nmap_script_args': ''\r\n}\r\nosint: {\r\n 'discover': [\r\n 'emails',\r\n 'metainfo',\r\n 'employees'\r\n ],\r\n 'dorks': [\r\n 'login_pages',\r\n 'admin_panels',\r\n 'dashboard_pages',\r\n 'stackoverflow',\r\n 'social_media',\r\n 'project_management',\r\n 'code_sharing',\r\n 'config_files',\r\n 'jenkins',\r\n 'wordpress_files',\r\n 'php_error',\r\n 'exposed_documents',\r\n 'db_files',\r\n 'git_exposed'\r\n ],\r\n # 'custom_dorks': [],\r\n 'intensity': 'normal',\r\n 'documents_limit': 50\r\n}\r\ndir_file_fuzz: {\r\n # 'custom_header': {\r\n # 'Cookie':'Test',\r\n # 'User-Agent': 'Mozilla/5.0',\r\n # 'Custom-Header': 'My custom header'\r\n # },\r\n 'auto_calibration': true,\r\n 'enable_http_crawl': true,\r\n 'rate_limit': 150,\r\n 'extensions': [],\r\n 'follow_redirect': false,\r\n 'max_time': 0,\r\n 'match_http_status': [200, 204],\r\n 'recursive_level': 0,\r\n 'stop_on_error': false,\r\n 'timeout': 5,\r\n 'threads': 30,\r\n 'wordlist_name': 'default', # fuzz-Bo0oM,\r\n}\r\nfetch_url: {\r\n # 'custom_header': {\r\n # 'Cookie':'Test',\r\n # 'User-Agent': 'Mozilla/5.0',\r\n # 'Custom-Header': 'My custom header'\r\n # },\r\n 'uses_tools': ['gospider', 'hakrawler', 'waybackurls', 'katana', 'gau'],\r\n 'remove_duplicate_endpoints': true,\r\n 'duplicate_fields': ['content_length', 'page_title'],\r\n 'follow_redirect': false,\r\n 'enable_http_crawl': true,\r\n 'gf_patterns': ['debug_logic', 'idor', 'interestingEXT', 'interestingparams', 'interestingsubs', 'lfi', 'rce', 'redirect', 'sqli', 'ssrf', 'ssti', 'xss'],\r\n 'ignore_file_extensions': ['png', 'jpg', 'jpeg', 'gif', 'mp4', 'mpeg', 'mp3'],\r\n 'threads': 30,\r\n # 'exclude_subdomains': false\r\n}\r\nvulnerability_scan: {\r\n # 'custom_header': {\r\n # 'Cookie':'Test',\r\n # 'User-Agent': 'Mozilla/5.0',\r\n # 'Custom-Header': 'My custom header'\r\n # },\r\n 'run_nuclei': true,\r\n 'run_dalfox': false,\r\n 'run_crlfuzz': false,\r\n 'run_s3scanner': false,\r\n 'enable_http_crawl': true,\r\n 'concurrency': 50,\r\n 'intensity': 'normal',\r\n 'rate_limit': 150,\r\n 'retries': 1,\r\n 'timeout': 5,\r\n 'fetch_gpt_report': true,\r\n 'nuclei': {\r\n 'use_nuclei_config': false,\r\n 'severities': ['unknown', 'info', 'low', 'medium', 'high', 'critical'],\r\n # 'tags': [], # Nuclei tags (https://github.com/projectdiscovery/nuclei-templates)\r\n # 'templates': [], # Nuclei templates (https://github.com/projectdiscovery/nuclei-templates)\r\n # 'custom_templates': [] # Nuclei custom templates uploaded in reNgine\r\n }\r\n}\r\nwaf_detection: {\r\n 'enable_http_crawl': true\r\n}\r\nscreenshot: {\r\n 'enable_http_crawl': true,\r\n 'intensity': 'normal',\r\n 'timeout': 10,\r\n 'threads': 40\r\n}", + "yaml_configuration": "# Global vars for all tools\r\n#\r\n# Custom header - FFUF, Nuclei, Dalfox, CRL Fuzz, HTTPx, Fetch URL (Hakrawler, Katana, Gospider)\r\n# custom_header: {\r\n# 'Cookie':'Test',\r\n# 'User-Agent': 'Mozilla/5.0',\r\n# 'Custom-Header': 'My custom header'\r\n# }\r\n# 'user_agent': '' # Dalfox only\r\n# 'enable_http_crawl': true # All tools\r\n# 'timeout': 10 # Subdomain discovery, Screenshot, Port scan, FFUF, Nuclei \r\n# 'threads': 30 # All tools\r\n# 'rate_limit': 150 # Port scan, FFUF, Nuclei\r\n# 'intensity': 'normal' # Screenshot (grab only the root endpoints of each subdomain), Nuclei (reduce number of endpoints to scan), OSINT (not implemented yet)\r\n# 'retries': 1 # Nuclei\r\n\r\nsubdomain_discovery: {\r\n 'uses_tools': ['subfinder', 'ctfr', 'sublist3r', 'tlsx', 'oneforall', 'netlas'], # amass-passive, amass-active, All\r\n 'enable_http_crawl': true,\r\n 'threads': 30,\r\n 'timeout': 5,\r\n # 'use_subfinder_config': false,\r\n # 'use_amass_config': false,\r\n # 'amass_wordlist': 'deepmagic.com-prefixes-top50000'\r\n}\r\nhttp_crawl: {\r\n # 'custom_header': {\r\n # 'Cookie':'Test',\r\n # 'User-Agent': 'Mozilla/5.0'\r\n # },\r\n # 'threads': 30,\r\n # 'follow_redirect': false\r\n}\r\nport_scan: {\r\n 'enable_http_crawl': true,\r\n 'timeout': 5,\r\n # 'exclude_ports': [],\r\n # 'exclude_subdomains': [],\r\n 'ports': ['top-100'],\r\n 'rate_limit': 150,\r\n 'threads': 30,\r\n 'passive': false,\r\n # 'use_naabu_config': false,\r\n # 'nmap_enabled': true,\r\n # 'nmap_cmd': '',\r\n # 'nmap_script': '',\r\n # 'nmap_script_args': ''\r\n}\r\nosint: {\r\n 'discover': [\r\n 'emails',\r\n 'metainfo',\r\n 'employees'\r\n ],\r\n 'dorks': [\r\n 'login_pages',\r\n 'admin_panels',\r\n 'dashboard_pages',\r\n 'stackoverflow',\r\n 'social_media',\r\n 'project_management',\r\n 'code_sharing',\r\n 'config_files',\r\n 'jenkins',\r\n 'wordpress_files',\r\n 'php_error',\r\n 'exposed_documents',\r\n 'db_files',\r\n 'git_exposed'\r\n ],\r\n # 'custom_dorks': [],\r\n 'intensity': 'normal',\r\n 'documents_limit': 50\r\n}\r\ndir_file_fuzz: {\r\n # 'custom_header': {\r\n # 'Cookie':'Test',\r\n # 'User-Agent': 'Mozilla/5.0',\r\n # 'Custom-Header': 'My custom header'\r\n # },\r\n 'auto_calibration': true,\r\n 'enable_http_crawl': true,\r\n 'rate_limit': 150,\r\n 'extensions': [],\r\n 'follow_redirect': false,\r\n 'max_time': 0,\r\n 'match_http_status': [200, 204],\r\n 'recursive_level': 0,\r\n 'stop_on_error': false,\r\n 'timeout': 5,\r\n 'threads': 30,\r\n 'wordlist_name': 'default', # fuzz-Bo0oM,\r\n}\r\nfetch_url: {\r\n # 'custom_header': {\r\n # 'Cookie':'Test',\r\n # 'User-Agent': 'Mozilla/5.0',\r\n # 'Custom-Header': 'My custom header'\r\n # },\r\n 'uses_tools': ['gospider', 'hakrawler', 'waybackurls', 'katana', 'gau'],\r\n 'remove_duplicate_endpoints': true,\r\n 'duplicate_fields': ['content_length', 'page_title'],\r\n 'follow_redirect': false,\r\n 'enable_http_crawl': true,\r\n 'gf_patterns': ['debug_logic', 'idor', 'interestingEXT', 'interestingparams', 'interestingsubs', 'lfi', 'rce', 'redirect', 'sqli', 'ssrf', 'ssti', 'xss'],\r\n 'ignore_file_extensions': ['png', 'jpg', 'jpeg', 'gif', 'mp4', 'mpeg', 'mp3'],\r\n 'threads': 30,\r\n # 'exclude_subdomains': false\r\n}\r\nvulnerability_scan: {\r\n # 'custom_header': {\r\n # 'Cookie':'Test',\r\n # 'User-Agent': 'Mozilla/5.0',\r\n # 'Custom-Header': 'My custom header'\r\n # },\r\n 'run_nuclei': true,\r\n 'run_dalfox': false,\r\n 'run_crlfuzz': false,\r\n 'run_s3scanner': false,\r\n 'enable_http_crawl': true,\r\n 'concurrency': 50,\r\n 'intensity': 'normal',\r\n 'rate_limit': 150,\r\n 'retries': 1,\r\n 'timeout': 5,\r\n 'fetch_gpt_report': true,\r\n 'nuclei': {\r\n 'use_nuclei_config': false,\r\n 'severities': ['unknown', 'info', 'low', 'medium', 'high', 'critical'],\r\n # 'tags': [], # Nuclei tags (https://github.com/projectdiscovery/nuclei-templates)\r\n # 'templates': [], # Nuclei templates (https://github.com/projectdiscovery/nuclei-templates)\r\n # 'custom_templates': [] # Nuclei custom templates uploaded in reNgine\r\n }\r\n}\r\nwaf_detection: {\r\n 'enable_http_crawl': true\r\n}\r\nscreenshot: {\r\n 'enable_http_crawl': true,\r\n 'intensity': 'normal',\r\n 'timeout': 10,\r\n 'threads': 40\r\n}", "default_engine": false } }, diff --git a/web/startScan/migrations/0059_auto_20250216_1450.py b/web/startScan/migrations/0059_auto_20250216_1450.py index da4a431ad..9a568bbc4 100644 --- a/web/startScan/migrations/0059_auto_20250216_1450.py +++ b/web/startScan/migrations/0059_auto_20250216_1450.py @@ -2,7 +2,6 @@ from django.db import migrations, models import django.db.models.deletion -import logging def migrate_ports_to_ip(apps, schema_editor): @@ -11,7 +10,8 @@ def migrate_ports_to_ip(apps, schema_editor): IPAddress = apps.get_model('startScan', 'IPAddress') # Explicit retrieval of the intermediate model IPPortM2M = apps.get_model('startScan', 'ipaddress_ports') - logger = logging.getLogger(__name__) + from reNgine.utils.logger import default_logger as logger + logger.info(f"Starting port migration for {IPAddress.objects.count()} IP addresses") diff --git a/web/startScan/signals.py b/web/startScan/signals.py index 8dc793c52..74361ce0a 100644 --- a/web/startScan/signals.py +++ b/web/startScan/signals.py @@ -2,9 +2,8 @@ from django.dispatch import receiver from .models import Subdomain, IpAddress from django.db import transaction -import logging +from reNgine.utils.logger import default_logger as logger -logger = logging.getLogger(__name__) @receiver(pre_delete, sender=Subdomain) def handle_subdomain_deletion(sender, instance, **kwargs): diff --git a/web/startScan/views.py b/web/startScan/views.py index 5d8467fc7..a9a170196 100644 --- a/web/startScan/views.py +++ b/web/startScan/views.py @@ -19,7 +19,7 @@ from api.serializers import IpSerializer from reNgine.celery import app from reNgine.utils.db import create_scan_object, create_scan_activity, get_interesting_subdomains -from reNgine.utils.logger import Logger +from reNgine.utils.logger import default_logger as logger from reNgine.utils.utils import format_json_output, safe_int_cast from reNgine.settings import RENGINE_RESULTS from reNgine.definitions import ABORTED_TASK, SUCCESS_TASK, RUNNING_TASK, LIVE_SCAN, SCHEDULED_SCAN, PERM_INITATE_SCANS_SUBSCANS, PERM_MODIFY_SCAN_RESULTS, PERM_MODIFY_SCAN_REPORT, PERM_MODIFY_SYSTEM_CONFIGURATIONS, FOUR_OH_FOUR_URL @@ -30,7 +30,6 @@ from targetApp.models import Domain, Organization from reNgine.utils.command_builder import CommandBuilder -logger = Logger(True) def scan_history(request, slug): host = ScanHistory.objects.filter(domain__project__slug=slug).order_by('-start_scan_date') diff --git a/web/targetApp/views.py b/web/targetApp/views.py index b46007826..35dd62613 100644 --- a/web/targetApp/views.py +++ b/web/targetApp/views.py @@ -1,6 +1,5 @@ import csv import io -import logging from datetime import timedelta from urllib.parse import urlparse import validators @@ -58,8 +57,8 @@ UpdateOrganizationForm, ) from reNgine.utils.utils import format_json_output +from reNgine.utils.logger import default_logger as logger -logger = logging.getLogger(__name__) def index(request): """ @@ -94,7 +93,7 @@ def add_target(request, slug): if multiple_targets: bulk_targets = [t.rstrip() for t in request.POST['addTargets'].split('\n') if t] sanitized_targets = [target if isinstance(target, str) and validators.domain(target) else 'Invalid target' for target in bulk_targets] - logger.info('Adding multiple targets: %s', sanitized_targets) + logger.info(f'Adding multiple targets: {sanitized_targets}') description = request.POST.get('targetDescription', '') h1_team_handle = request.POST.get('targetH1TeamHandle') organization_name = request.POST.get('targetOrganization') @@ -157,8 +156,7 @@ def add_target(request, slug): sanitized_domains = [domain if isinstance(domain, str) and validators.domain(domain) else 'Invalid Domain' for domain in domains] sanitized_http_urls = [url if validators.url(url) else 'Invalid URL' for url in http_urls] sanitized_ports = [port if isinstance(port, int) else 'Invalid Port' for port in ports] - logger.info('IPs: %s | Domains: %s | URLs: %s | Ports: %s', - sanitized_ips, sanitized_domains, sanitized_http_urls, sanitized_ports) + logger.info(f'IPs: {sanitized_ips} | Domains: {sanitized_domains} | URLs: {sanitized_http_urls} | Ports: {sanitized_ports}') for domain_name in domains: if not Domain.objects.filter(name=domain_name).exists(): @@ -172,7 +170,7 @@ def add_target(request, slug): domain.save() added_target_count += 1 if created: - logger.info('Added new domain %s', domain.name) + logger.info(f'Added new domain {domain.name}') if organization_name: organization = None @@ -193,7 +191,7 @@ def add_target(request, slug): target_domain=domain, http_url=http_url) if created: - logger.info('Added new endpoint %s', endpoint.http_url) + logger.info(f'Added new endpoint {endpoint.http_url}') for ip_address in ips: ip_data = get_ip_info(ip_address) @@ -203,7 +201,7 @@ def add_target(request, slug): ip.version = ip_data.version ip.save() if created: - logger.warning('Added new IP %s', ip) + logger.warning(f'Added new IP {ip}') for port_number in ports: port, created = Port.objects.get_or_create( @@ -211,7 +209,7 @@ def add_target(request, slug): defaults={'is_uncommon': port_number not in [80, 443, 8080, 8443]} ) if created: - logger.warning('Added new port %s', port.number) + logger.warning(f'Added new port {port.number}') # Import from txt / csv elif 'import-txt-target' in request.POST or 'import-csv-target' in request.POST: @@ -316,7 +314,7 @@ def add_target(request, slug): domain.save() added_target_count += 1 if created: - logger.info('Added new domain %s', domain.name) + logger.info(f'Added new domain {domain.name}') if is_ip: ip_data = get_ip_info(ip) ip, created = IpAddress.objects.get_or_create(address=ip) @@ -325,7 +323,7 @@ def add_target(request, slug): ip.version = ip_data.version ip.save() if created: - logger.info('Added new IP %s', ip) + logger.info(f'Added new IP {ip}') except (Http404, ValueError) as e: logger.exception(e) @@ -388,7 +386,7 @@ def delete_target(request, slug, id): ) except Http404: if isinstance(id, int): # Ensure id is an integer - logger.error('Domain not found: %d', id) + logger.error(f'Domain not found: {id}') else: logger.error('Domain not found: Invalid ID provided') messages.add_message( @@ -399,7 +397,7 @@ def delete_target(request, slug, id): else: valid_methods = ['GET', 'POST', 'PUT', 'DELETE', 'PATCH', 'OPTIONS', 'HEAD'] if request.method in valid_methods: - logger.error('Invalid request method: %s', request.method) + logger.error(f'Invalid request method: {request.method}') else: logger.error('Invalid request method: Unknown method provided') diff --git a/web/tests/test_nmap.py b/web/tests/test_nmap.py index 6b1f7c298..f3cd6bea7 100644 --- a/web/tests/test_nmap.py +++ b/web/tests/test_nmap.py @@ -8,11 +8,10 @@ os.environ['RENGINE_SECRET_KEY'] = 'secret' os.environ['CELERY_ALWAYS_EAGER'] = 'True' -from celery.utils.log import get_task_logger +from reNgine.utils.logger import default_logger as logger from reNgine.settings import CELERY_DEBUG from reNgine.utils.parsers import parse_nmap_results -logger = get_task_logger(__name__) DOMAIN_NAME = os.environ['DOMAIN_NAME'] FIXTURES_DIR = pathlib.Path().absolute() / 'fixtures' / 'nmap_xml' diff --git a/web/tests/test_scan.py b/web/tests/test_scan.py index 2aa654251..fc3c1bd82 100644 --- a/web/tests/test_scan.py +++ b/web/tests/test_scan.py @@ -1,10 +1,7 @@ -import json import os import unittest import yaml from dotenv import load_dotenv -from reNgine.settings import CELERY_DEBUG -from celery.utils.log import get_task_logger from scanEngine.models import EngineType from django.utils import timezone from reNgine.tasks.url import fetch_url @@ -21,7 +18,6 @@ os.environ.setdefault('CELERY_ALWAYS_EAGER', os.getenv('CELERY_ALWAYS_EAGER', 'True')) -logger = get_task_logger(__name__) # To pass the DOMAIN_NAME variable when running tests, you can use: # DOMAIN_NAME=example.com python3 manage.py test # Or set a default value if the environment variable is not defined