Skip to content

Commit

Permalink
refactor: improve logging and task categorization
Browse files Browse the repository at this point in the history
This commit refactors the logging system and task categorization within the application. The changes improve code organization, readability, and provide more context in log messages. Specifically, the ANSI color codes are moved to a dedicated Colors class, and task logging now includes color-coded task categories for better visual distinction. Additionally, several log messages have been adjusted to provide more relevant information and use more appropriate log levels. Finally, the docker-compose file is updated to improve container behavior.
  • Loading branch information
psyray committed Mar 3, 2025
1 parent 0242c45 commit 2683c9f
Show file tree
Hide file tree
Showing 51 changed files with 320 additions and 315 deletions.
6 changes: 6 additions & 0 deletions docker/docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ services:
retries: 5
networks:
- rengine_network
tty: true

redis:
image: ghcr.io/security-tools-alliance/rengine-ng:rengine-redis-v${RENGINE_VERSION}
Expand All @@ -31,6 +32,7 @@ services:
retries: 5
networks:
- rengine_network
tty: true

celery:
image: ghcr.io/security-tools-alliance/rengine-ng:rengine-celery-v${RENGINE_VERSION}
Expand Down Expand Up @@ -64,6 +66,7 @@ services:
condition: service_healthy
networks:
- rengine_network
tty: true

celery-beat:
image: ghcr.io/security-tools-alliance/rengine-ng:rengine-celery-v${RENGINE_VERSION}
Expand All @@ -90,6 +93,7 @@ services:
- wordlist:/home/rengine/wordlists
networks:
- rengine_network
tty: true

web:
image: ghcr.io/security-tools-alliance/rengine-ng:rengine-web-v${RENGINE_VERSION}
Expand Down Expand Up @@ -126,6 +130,7 @@ services:
rengine_network:
aliases:
- rengine
tty: true

proxy:
image: ghcr.io/security-tools-alliance/rengine-ng:rengine-proxy-v${RENGINE_VERSION}
Expand Down Expand Up @@ -156,6 +161,7 @@ services:
ports:
- 8082:8082/tcp
- 443:443/tcp
tty: true

ollama:
image: ghcr.io/security-tools-alliance/rengine-ng:rengine-ollama-v${RENGINE_VERSION}
Expand Down
8 changes: 3 additions & 5 deletions web/api/views.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
import logging
import re
import os.path
from pathlib import Path
Expand All @@ -25,6 +24,7 @@

from recon_note.models import TodoNote
from reNgine.celery import app
from reNgine.utils.logger import default_logger as logger
from reNgine.utils.db import (
get_lookup_keywords,
)
Expand Down Expand Up @@ -126,8 +126,6 @@
VulnerabilitySerializer
)

logger = logging.getLogger(__name__)


class OllamaManager(APIView):
def get(self, request):
Expand Down Expand Up @@ -848,7 +846,7 @@ def post(self, request):
SUCCESS_TASK)
response['status'] = True
except Exception as e:
logging.error(e)
logger.error(e)
response = {'status': False, 'message': str(e)}
elif scan_id:
try:
Expand All @@ -864,7 +862,7 @@ def post(self, request):
SUCCESS_TASK)
response['status'] = True
except Exception as e:
logging.error(e)
logger.error(e)
response = {'status': False, 'message': str(e)}

logger.warning(f'Revoking tasks {task_ids}')
Expand Down
3 changes: 1 addition & 2 deletions web/dashboard/views.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
import json
import logging

from datetime import timedelta

Expand All @@ -25,8 +24,8 @@
from dashboard.models import Project, OpenAiAPIKey, NetlasAPIKey
from dashboard.forms import ProjectForm
from reNgine.definitions import PERM_MODIFY_SYSTEM_CONFIGURATIONS, FOUR_OH_FOUR_URL
from reNgine.utils.logger import default_logger as logger

logger = logging.getLogger(__name__)

def index(request, slug):
try:
Expand Down
19 changes: 8 additions & 11 deletions web/reNgine/celery_custom_task.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@
import json

from celery import Task
from celery.utils.log import get_task_logger
from celery.worker.request import Request
from django.utils import timezone
from redis import Redis
Expand All @@ -25,13 +24,12 @@
get_task_cache_key,
get_traceback_path
)
from reNgine.utils.logger import default_logger as logger
from reNgine.utils.utils import format_json_output

from scanEngine.models import EngineType
from startScan.models import ScanActivity, ScanHistory, SubScan

logger = get_task_logger(__name__)

cache = None
if 'CELERY_BROKER' in os.environ:
cache = Redis.from_url(os.environ['CELERY_BROKER'])
Expand Down Expand Up @@ -75,7 +73,6 @@ def __call__(self, *args, **kwargs):
# Get task info
self.task_name = self.name.split('.')[-1]
self.description = kwargs.get('description') or ' '.join(self.task_name.split('_')).capitalize()
logger = get_task_logger(self.task_name)

# Get reNgine context
ctx = kwargs.get('ctx', {})
Expand Down Expand Up @@ -135,9 +132,9 @@ def __call__(self, *args, **kwargs):
# Create ScanActivity for this task and send start scan notifs
if self.track:
if self.domain:
logger.warning(f'Task {self.task_name} for {self.subdomain.name if self.subdomain else self.domain.name} is RUNNING')
logger.info(f'Task {self.task_name} for {self.subdomain.name if self.subdomain else self.domain.name} is RUNNING')
else:
logger.warning(f'Task {self.task_name} is RUNNING')
logger.info(f'Task {self.task_name} is RUNNING')
self.create_scan_activity()

if RENGINE_CACHE_ENABLED:
Expand All @@ -148,9 +145,9 @@ def __call__(self, *args, **kwargs):
self.status = SUCCESS_TASK
if RENGINE_RECORD_ENABLED and self.track:
if self.domain:
logger.warning(f'Task {self.task_name} for {self.subdomain.name if self.subdomain else self.domain.name} status is SUCCESS (CACHED)')
logger.info(f'Task {self.task_name} for {self.subdomain.name if self.subdomain else self.domain.name} status is SUCCESS (CACHED)')
else:
logger.warning(f'Task {self.task_name} status is SUCCESS (CACHED)')
logger.info(f'Task {self.task_name} status is SUCCESS (CACHED)')
self.update_scan_activity()
return json.loads(result)

Expand Down Expand Up @@ -186,7 +183,7 @@ def __call__(self, *args, **kwargs):
else:
msg = f'Task {self.task_name} status is {self.status_str}'
msg += f' | Error: {self.error}' if self.error else ''
logger.warning(msg)
logger.info(msg)
self.update_scan_activity()

# Set task result in cache if task was successful
Expand All @@ -208,7 +205,7 @@ def write_results(self):
json.dump(self.result, f, indent=4)
else:
f.write(self.result)
logger.warning(f'Wrote {self.task_name} results to {self.output_path}')
logger.info(f'Wrote {self.task_name} results to {self.output_path}')

def create_scan_activity(self):
if not self.track:
Expand Down Expand Up @@ -292,7 +289,7 @@ def get_from_cache(self, *args, **kwargs):
if target:
msg += f' for {target}'
msg += ' status is SUCCESS (CACHED)'
logger.warning(msg)
logger.info(msg)
self.update_scan_activity()
return json.loads(result)
return None
5 changes: 2 additions & 3 deletions web/reNgine/common_views.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,8 @@
from django.template import RequestContext
from django.utils.module_loading import import_string

import logging

logger = logging.getLogger(__name__)
from reNgine.utils.logger import default_logger as logger


def permission_denied(request):
logger.warning(f"Permission denied for user {request.user}")
Expand Down
5 changes: 2 additions & 3 deletions web/reNgine/context_processors.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,8 @@
from . import settings
import requests
from django.core.cache import cache
import logging
from reNgine.utils.logger import default_logger as logger

logger = logging.getLogger(__name__)

def version(request):
return {"RENGINE_CURRENT_VERSION": settings.RENGINE_CURRENT_VERSION}
Expand All @@ -22,7 +21,7 @@ def misc(request):
# Handle the exception if the request fails
external_ip = 'Unable to retrieve IP' # Default value in case of error
# You can also log the error if necessary
logger.error(f"Error retrieving external IP: {e}")
logger.exception(f"Error retrieving external IP: {e}")

return {
'external_ip': external_ip
Expand Down
5 changes: 2 additions & 3 deletions web/reNgine/gpt.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,9 +10,8 @@
from langchain_community.llms import Ollama

from dashboard.models import OllamaSettings
import logging
from reNgine.utils.logger import default_logger as logger

logger = logging.getLogger(__name__)

class GPTVulnerabilityReportGenerator:

Expand Down Expand Up @@ -131,7 +130,7 @@ def get_attack_suggestion(self, input):
'input': input
}
except ValueError as e:
logger.error("Error in get_attack_suggestion: %s", str(e), exc_info=True)
logger.exception(f"Error in get_attack_suggestion: {str(e)}")
return {
'status': False,
'error': "An error occurred while processing your request.",
Expand Down
13 changes: 6 additions & 7 deletions web/reNgine/init.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,7 @@
import logging
import secrets
import os
from reNgine.utils.logger import default_logger as logger

logger = logging.getLogger(__name__)


'''
Expand All @@ -19,11 +18,11 @@ def first_run(secret_file, base_dir):
else:
try:
secret_key = get_random()
secret = open(secret_file, 'w')
secret.write(secret_key)
secret.close()
except OSError:
raise Exception(f'Secret file generation failed. Path: {secret_file}')
with open(secret_file, 'w') as secret:
secret.write(secret_key)

Check failure

Code scanning / CodeQL

Clear-text storage of sensitive information High

This expression stores
sensitive data (secret)
as clear text.
except OSError as e:
logger.exception(f'Secret file generation failed. Path: {secret_file}')
raise Exception(f'Secret file generation failed. Path: {secret_file}') from e
return secret_key


Expand Down
2 changes: 2 additions & 0 deletions web/reNgine/settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -237,6 +237,8 @@ class SensitiveDataFilter(logging.Filter):
def filter(self, record):
sensitive_keys = [
NETLAS_API_KEY,
os.environ.get('AWS_ACCESS_KEY_ID'),
os.environ.get('AWS_SECRET_ACCESS_KEY')
]

for key in filter(None, sensitive_keys):
Expand Down
3 changes: 1 addition & 2 deletions web/reNgine/tasks/command.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,7 @@
from reNgine.celery import app
from reNgine.utils.command_executor import run_command

Check notice

Code scanning / CodeQL

Cyclic import Note

Import of module
reNgine.utils.command_executor
begins an import cycle.
from reNgine.utils.logger import Logger
from reNgine.utils.logger import default_logger as logger

logger = Logger(True)

@app.task(name='run_command_line', bind=False, queue='run_command_queue')
def run_command_line(cmd, **kwargs):

Check notice

Code scanning / CodeQL

Explicit returns mixed with implicit (fall through) returns Note

Mixing implicit and explicit returns may indicate an error as implicit returns always return None.
Expand Down
15 changes: 6 additions & 9 deletions web/reNgine/tasks/detect.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
from reNgine.settings import RENGINE_TOOL_PATH
from reNgine.tasks.command import run_command_line
from reNgine.utils.command_builder import CommandBuilder, build_cmsseek_cmd, build_wafw00f_cmd
from reNgine.utils.logger import Logger
from reNgine.utils.logger import default_logger as logger
from reNgine.utils.http import get_subdomain_from_url, prepare_urls_with_fallback
from reNgine.utils.task_config import TaskConfig

Expand All @@ -22,9 +22,6 @@
Celery tasks.
"""

logger = Logger(is_task_logger=True) # Use task logger for Celery tasks


@app.task(name='waf_detection', queue='io_queue', base=RengineTask, bind=True)
def waf_detection(self, ctx=None, description=None):

Check notice

Code scanning / CodeQL

Explicit returns mixed with implicit (fall through) returns Note

Mixing implicit and explicit returns may indicate an error as implicit returns always return None.
"""
Expand All @@ -50,7 +47,7 @@ def waf_detection(self, ctx=None, description=None):
ctx=ctx
)
if not urls:
logger.error('🛡️ No URLs to check for WAF. Skipping.')
logger.warning('🛡️ No URLs to check for WAF. Skipping.')
return

run_command_line.delay(
Expand Down Expand Up @@ -92,7 +89,7 @@ def waf_detection(self, ctx=None, description=None):
subdomain.waf.add(waf)
subdomain.save()
except Subdomain.DoesNotExist:
logger.warning(f'🛡️ Subdomain {subdomain_name} was not found in the db, skipping waf detection.')
logger.info(f'🛡️ Subdomain {subdomain_name} was not found in the db, skipping waf detection.')

return wafs

Expand Down Expand Up @@ -123,12 +120,12 @@ def run_cmseek(url):
try:
shutil.rmtree(os.path.dirname(json_path))
except Exception as e:
logger.error(f"Error cleaning up CMSeeK results: {e}")
logger.exception(f"Error cleaning up CMSeeK results: {e}")

return result

except Exception as e:
logger.error(f"Error running CMSeeK: {e}")
logger.exception(f"Error running CMSeeK: {e}")
return {'status': False, 'message': str(e)}

@app.task(name='run_wafw00f', bind=False, queue='run_command_queue')
Expand All @@ -153,5 +150,5 @@ def run_wafw00f(url):
logger.info("No WAF detected")
return "No WAF detected"
except Exception as e:
logger.error(f"Unexpected error: {e}")
logger.exception(f"Unexpected error: {e}")
return f"Unexpected error: {str(e)}"
11 changes: 5 additions & 6 deletions web/reNgine/tasks/dns.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
from django.utils import timezone

from reNgine.celery import app
from reNgine.utils.logger import Logger
from reNgine.utils.logger import default_logger as logger
from reNgine.utils.dns import (
get_domain_info_from_db,
get_domain_historical_ip_address,
Expand All @@ -17,7 +17,6 @@
Registrar,
)

logger = Logger(True)

@app.task(name='query_whois', bind=False, queue='io_queue')
def query_whois(ip_domain, force_reload_whois=False):
Expand Down Expand Up @@ -51,23 +50,23 @@ def query_whois(ip_domain, force_reload_whois=False):
historical_ips = get_domain_historical_ip_address(ip_domain)
domain_info.historical_ips = historical_ips
except Exception as e:
logger.error(f'HistoricalIP for {ip_domain} not found!\nError: {str(e)}')
logger.exception(f'HistoricalIP for {ip_domain} not found!\nError: {str(e)}')
historical_ips = []

Check notice

Code scanning / CodeQL

Unused local variable Note

Variable historical_ips is not used.

# Step 2: Find associated domains using reverse whois
try:
related_domains = reverse_whois(ip_domain.split('.')[0])
domain_info.related_domains = [domain['name'] for domain in related_domains]
except Exception as e:
logger.error(f'Associated domain not found for {ip_domain}\nError: {str(e)}')
logger.exception(f'Associated domain not found for {ip_domain}\nError: {str(e)}')
domain_info.related_domains = []

# Step 3: Find related TLDs
try:
related_tlds = find_related_tlds(ip_domain)
domain_info.related_tlds = related_tlds
except Exception as e:
logger.error(f'Related TLDs not found for {ip_domain}\nError: {str(e)}')
logger.exception(f'Related TLDs not found for {ip_domain}\nError: {str(e)}')
domain_info.related_tlds = []

# Step 4: Execute WHOIS
Expand All @@ -76,7 +75,7 @@ def query_whois(ip_domain, force_reload_whois=False):
# Update domain_info with whois data
domain_info.update(whois_data)
except Exception as e:
logger.error(f'Error executing whois for {ip_domain}\nError: {str(e)}')
logger.exception(f'Error executing whois for {ip_domain}\nError: {str(e)}')

# Step 5: Save information to database if we have a domain object
if domain:
Expand Down
Loading

0 comments on commit 2683c9f

Please sign in to comment.