diff --git a/.gitignore b/.gitignore
index 4089315..594bbbf 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,5 +1,7 @@
# Logs
logs
+!apps/api/src/domains/logs
+!apps/web/src/components/pages/Logs.tsx
*.log
npm-debug.log*
yarn-debug.log*
diff --git a/apps/api/src/domains/access-lists/dto/access-lists.dto.ts b/apps/api/src/domains/access-lists/dto/access-lists.dto.ts
index b4ad665..7d9574c 100644
--- a/apps/api/src/domains/access-lists/dto/access-lists.dto.ts
+++ b/apps/api/src/domains/access-lists/dto/access-lists.dto.ts
@@ -1,4 +1,5 @@
import { body, param, query } from 'express-validator';
+import { isValidIpOrCidr } from '../../acl/utils/validators';
/**
* Validation rules for creating an access list
@@ -38,8 +39,13 @@ export const createAccessListValidation = [
body('allowedIps.*')
.optional()
.trim()
- .matches(/^(\d{1,3}\.){3}\d{1,3}(\/\d{1,2})?$/)
- .withMessage('Each IP must be a valid IPv4 address or CIDR notation'),
+ .custom((value) => {
+ if (!value) return true;
+ if (!isValidIpOrCidr(value)) {
+ throw new Error('Invalid IP address or CIDR notation. Examples: 192.168.1.1 or 192.168.1.0/24');
+ }
+ return true;
+ }),
body('authUsers')
.optional()
@@ -125,8 +131,13 @@ export const updateAccessListValidation = [
body('allowedIps.*')
.optional()
.trim()
- .matches(/^(\d{1,3}\.){3}\d{1,3}(\/\d{1,2})?$/)
- .withMessage('Each IP must be a valid IPv4 address or CIDR notation'),
+ .custom((value) => {
+ if (!value) return true;
+ if (!isValidIpOrCidr(value)) {
+ throw new Error('Invalid IP address or CIDR notation. Examples: 192.168.1.1 or 192.168.1.0/24');
+ }
+ return true;
+ }),
body('authUsers')
.optional()
diff --git a/apps/api/src/domains/acl/acl.controller.ts b/apps/api/src/domains/acl/acl.controller.ts
index a2e214a..26d3915 100644
--- a/apps/api/src/domains/acl/acl.controller.ts
+++ b/apps/api/src/domains/acl/acl.controller.ts
@@ -181,6 +181,31 @@ export class AclController {
}
}
+ /**
+ * Preview ACL configuration without applying
+ * @route GET /api/acl/preview
+ */
+ async previewAclConfig(req: Request, res: Response): Promise {
+ try {
+ const config = await aclService.previewNginxConfig();
+
+ res.json({
+ success: true,
+ data: {
+ config,
+ rulesCount: await aclService.getEnabledRulesCount()
+ }
+ });
+ } catch (error: any) {
+ logger.error('Failed to preview ACL config:', error);
+ res.status(500).json({
+ success: false,
+ message: 'Failed to preview ACL configuration',
+ error: error.message
+ });
+ }
+ }
+
/**
* Apply ACL rules to Nginx
* @route POST /api/acl/apply
diff --git a/apps/api/src/domains/acl/acl.routes.ts b/apps/api/src/domains/acl/acl.routes.ts
index 8eb7190..241cca8 100644
--- a/apps/api/src/domains/acl/acl.routes.ts
+++ b/apps/api/src/domains/acl/acl.routes.ts
@@ -28,6 +28,13 @@ router.get('/:id', (req, res) => aclController.getAclRule(req, res));
*/
router.post('/', authorize('admin', 'moderator'), (req, res) => aclController.createAclRule(req, res));
+/**
+ * @route GET /api/acl/preview
+ * @desc Preview ACL configuration without applying
+ * @access Private (all roles)
+ */
+router.get('/preview', (req, res) => aclController.previewAclConfig(req, res));
+
/**
* @route POST /api/acl/apply
* @desc Apply ACL rules to Nginx
diff --git a/apps/api/src/domains/acl/acl.service.ts b/apps/api/src/domains/acl/acl.service.ts
index 1c20121..190d4da 100644
--- a/apps/api/src/domains/acl/acl.service.ts
+++ b/apps/api/src/domains/acl/acl.service.ts
@@ -105,6 +105,21 @@ export class AclService {
return rule;
}
+ /**
+ * Preview Nginx configuration without applying
+ */
+ async previewNginxConfig(): Promise {
+ return aclNginxService.generateAclConfig();
+ }
+
+ /**
+ * Get count of enabled rules
+ */
+ async getEnabledRulesCount(): Promise {
+ const rules = await aclRepository.findEnabled();
+ return rules.length;
+ }
+
/**
* Apply ACL rules to Nginx
*/
diff --git a/apps/api/src/domains/acl/dto/create-acl-rule.dto.ts b/apps/api/src/domains/acl/dto/create-acl-rule.dto.ts
index 5a50918..81396ab 100644
--- a/apps/api/src/domains/acl/dto/create-acl-rule.dto.ts
+++ b/apps/api/src/domains/acl/dto/create-acl-rule.dto.ts
@@ -1,3 +1,5 @@
+import { validateAclValue, sanitizeValue } from '../utils/validators';
+
/**
* DTO for creating ACL rule
*/
@@ -17,34 +19,76 @@ export interface CreateAclRuleDto {
export function validateCreateAclRuleDto(data: any): { isValid: boolean; errors: string[] } {
const errors: string[] = [];
+ // Validate name
if (!data.name || typeof data.name !== 'string' || !data.name.trim()) {
errors.push('Name is required and must be a non-empty string');
+ } else if (data.name.length > 100) {
+ errors.push('Name must not exceed 100 characters');
}
+ // Validate type
+ const validTypes = ['whitelist', 'blacklist'];
if (!data.type || typeof data.type !== 'string') {
errors.push('Type is required and must be a string');
+ } else if (!validTypes.includes(data.type)) {
+ errors.push(`Type must be one of: ${validTypes.join(', ')}`);
}
+ // Validate condition field
+ const validFields = ['ip', 'geoip', 'user_agent', 'url', 'method', 'header'];
if (!data.conditionField || typeof data.conditionField !== 'string') {
errors.push('Condition field is required and must be a string');
+ } else if (!validFields.includes(data.conditionField)) {
+ errors.push(`Condition field must be one of: ${validFields.join(', ')}`);
}
+ // Validate condition operator
+ const validOperators = ['equals', 'contains', 'regex'];
if (!data.conditionOperator || typeof data.conditionOperator !== 'string') {
errors.push('Condition operator is required and must be a string');
+ } else if (!validOperators.includes(data.conditionOperator)) {
+ errors.push(`Condition operator must be one of: ${validOperators.join(', ')}`);
}
+ // Validate condition value
if (!data.conditionValue || typeof data.conditionValue !== 'string') {
errors.push('Condition value is required and must be a string');
+ } else if (data.conditionValue.trim().length === 0) {
+ errors.push('Condition value cannot be empty');
+ } else {
+ // Perform field-specific validation
+ const valueValidation = validateAclValue(
+ data.conditionField,
+ data.conditionOperator,
+ data.conditionValue
+ );
+
+ if (!valueValidation.valid) {
+ errors.push(valueValidation.error || 'Invalid condition value');
+ }
}
+ // Validate action
+ const validActions = ['allow', 'deny', 'challenge'];
if (!data.action || typeof data.action !== 'string') {
errors.push('Action is required and must be a string');
+ } else if (!validActions.includes(data.action)) {
+ errors.push(`Action must be one of: ${validActions.join(', ')}`);
}
+ // Validate enabled
if (data.enabled !== undefined && typeof data.enabled !== 'boolean') {
errors.push('Enabled must be a boolean');
}
+ // Validate type-action combinations
+ if (data.type === 'whitelist' && data.action === 'deny') {
+ errors.push('Whitelist rules should use "allow" action, not "deny"');
+ }
+ if (data.type === 'blacklist' && data.action === 'allow') {
+ errors.push('Blacklist rules should use "deny" action, not "allow"');
+ }
+
return {
isValid: errors.length === 0,
errors
diff --git a/apps/api/src/domains/acl/dto/update-acl-rule.dto.ts b/apps/api/src/domains/acl/dto/update-acl-rule.dto.ts
index 15e82f5..6b17c21 100644
--- a/apps/api/src/domains/acl/dto/update-acl-rule.dto.ts
+++ b/apps/api/src/domains/acl/dto/update-acl-rule.dto.ts
@@ -1,3 +1,5 @@
+import { validateAclValue } from '../utils/validators';
+
/**
* DTO for updating ACL rule
*/
@@ -17,34 +19,78 @@ export interface UpdateAclRuleDto {
export function validateUpdateAclRuleDto(data: any): { isValid: boolean; errors: string[] } {
const errors: string[] = [];
+ // Validate name
if (data.name !== undefined && (typeof data.name !== 'string' || !data.name.trim())) {
errors.push('Name must be a non-empty string');
+ } else if (data.name && data.name.length > 100) {
+ errors.push('Name must not exceed 100 characters');
}
+ // Validate type
+ const validTypes = ['whitelist', 'blacklist'];
if (data.type !== undefined && typeof data.type !== 'string') {
errors.push('Type must be a string');
+ } else if (data.type && !validTypes.includes(data.type)) {
+ errors.push(`Type must be one of: ${validTypes.join(', ')}`);
}
+ // Validate condition field
+ const validFields = ['ip', 'geoip', 'user_agent', 'url', 'method', 'header'];
if (data.conditionField !== undefined && typeof data.conditionField !== 'string') {
errors.push('Condition field must be a string');
+ } else if (data.conditionField && !validFields.includes(data.conditionField)) {
+ errors.push(`Condition field must be one of: ${validFields.join(', ')}`);
}
+ // Validate condition operator
+ const validOperators = ['equals', 'contains', 'regex'];
if (data.conditionOperator !== undefined && typeof data.conditionOperator !== 'string') {
errors.push('Condition operator must be a string');
+ } else if (data.conditionOperator && !validOperators.includes(data.conditionOperator)) {
+ errors.push(`Condition operator must be one of: ${validOperators.join(', ')}`);
}
- if (data.conditionValue !== undefined && typeof data.conditionValue !== 'string') {
- errors.push('Condition value must be a string');
+ // Validate condition value with field-specific validation
+ if (data.conditionValue !== undefined) {
+ if (typeof data.conditionValue !== 'string') {
+ errors.push('Condition value must be a string');
+ } else if (data.conditionValue.trim().length === 0) {
+ errors.push('Condition value cannot be empty');
+ } else if (data.conditionField && data.conditionOperator) {
+ // Perform field-specific validation if we have all required fields
+ const valueValidation = validateAclValue(
+ data.conditionField,
+ data.conditionOperator,
+ data.conditionValue
+ );
+
+ if (!valueValidation.valid) {
+ errors.push(valueValidation.error || 'Invalid condition value');
+ }
+ }
}
+ // Validate action
+ const validActions = ['allow', 'deny', 'challenge'];
if (data.action !== undefined && typeof data.action !== 'string') {
errors.push('Action must be a string');
+ } else if (data.action && !validActions.includes(data.action)) {
+ errors.push(`Action must be one of: ${validActions.join(', ')}`);
}
+ // Validate enabled
if (data.enabled !== undefined && typeof data.enabled !== 'boolean') {
errors.push('Enabled must be a boolean');
}
+ // Validate type-action combinations
+ if (data.type === 'whitelist' && data.action === 'deny') {
+ errors.push('Whitelist rules should use "allow" action, not "deny"');
+ }
+ if (data.type === 'blacklist' && data.action === 'allow') {
+ errors.push('Blacklist rules should use "deny" action, not "allow"');
+ }
+
return {
isValid: errors.length === 0,
errors
diff --git a/apps/api/src/domains/acl/utils/validators.ts b/apps/api/src/domains/acl/utils/validators.ts
new file mode 100644
index 0000000..a5f0b63
--- /dev/null
+++ b/apps/api/src/domains/acl/utils/validators.ts
@@ -0,0 +1,278 @@
+/**
+ * ACL Validation Utilities
+ * Provides comprehensive validation for ACL rule values to prevent nginx configuration errors
+ */
+
+/**
+ * Validate IP address (IPv4 or IPv6)
+ */
+export function isValidIpAddress(ip: string): boolean {
+ // IPv4 validation
+ const ipv4Regex = /^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$/;
+
+ // IPv6 validation (simplified)
+ const ipv6Regex = /^(?:[0-9a-fA-F]{1,4}:){7}[0-9a-fA-F]{1,4}$|^::(?:[0-9a-fA-F]{1,4}:){0,6}[0-9a-fA-F]{1,4}$|^[0-9a-fA-F]{1,4}::(?:[0-9a-fA-F]{1,4}:){0,5}[0-9a-fA-F]{1,4}$/;
+
+ return ipv4Regex.test(ip) || ipv6Regex.test(ip);
+}
+
+/**
+ * Validate CIDR notation (e.g., 192.168.1.0/24)
+ */
+export function isValidCidr(cidr: string): boolean {
+ const cidrRegex = /^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\/(?:[0-9]|[1-2][0-9]|3[0-2])$/;
+
+ // IPv6 CIDR
+ const cidrV6Regex = /^(?:[0-9a-fA-F]{1,4}:){1,7}[0-9a-fA-F]{0,4}\/(?:[0-9]|[1-9][0-9]|1[0-1][0-9]|12[0-8])$/;
+
+ return cidrRegex.test(cidr) || cidrV6Regex.test(cidr);
+}
+
+/**
+ * Validate IP or CIDR
+ */
+export function isValidIpOrCidr(value: string): boolean {
+ return isValidIpAddress(value) || isValidCidr(value);
+}
+
+/**
+ * Validate regex pattern (check if it's a valid regex)
+ */
+export function isValidRegex(pattern: string): { valid: boolean; error?: string } {
+ try {
+ new RegExp(pattern);
+ return { valid: true };
+ } catch (error: any) {
+ return { valid: false, error: error.message };
+ }
+}
+
+/**
+ * Validate URL pattern for nginx location matching
+ */
+export function isValidUrlPattern(pattern: string): boolean {
+ // URL pattern should not be empty and should be a valid path
+ if (!pattern || pattern.trim().length === 0) {
+ return false;
+ }
+
+ // Check for dangerous characters that could break nginx config
+ const dangerousChars = /[;<>{}|\\]/;
+ if (dangerousChars.test(pattern)) {
+ return false;
+ }
+
+ return true;
+}
+
+/**
+ * Validate HTTP method
+ */
+export function isValidHttpMethod(method: string): boolean {
+ const validMethods = ['GET', 'POST', 'PUT', 'DELETE', 'PATCH', 'HEAD', 'OPTIONS', 'CONNECT', 'TRACE'];
+ return validMethods.includes(method.toUpperCase());
+}
+
+/**
+ * Validate GeoIP country code (ISO 3166-1 alpha-2)
+ */
+export function isValidCountryCode(code: string): boolean {
+ // Basic validation: 2 uppercase letters
+ return /^[A-Z]{2}$/.test(code);
+}
+
+/**
+ * Validate User-Agent pattern
+ */
+export function isValidUserAgentPattern(pattern: string): boolean {
+ if (!pattern || pattern.trim().length === 0) {
+ return false;
+ }
+
+ // Check for dangerous characters
+ const dangerousChars = /[;<>{}|\\]/;
+ if (dangerousChars.test(pattern)) {
+ return false;
+ }
+
+ return true;
+}
+
+/**
+ * Validate header name
+ */
+export function isValidHeaderName(name: string): boolean {
+ // HTTP header names should only contain alphanumeric, dash, and underscore
+ return /^[a-zA-Z0-9\-_]+$/.test(name);
+}
+
+/**
+ * Sanitize value to prevent nginx config injection
+ */
+export function sanitizeValue(value: string): string {
+ // Remove or escape dangerous characters
+ return value
+ .replace(/[;<>{}|\\]/g, '') // Remove dangerous chars
+ .replace(/\$/g, '\\$') // Escape dollar signs
+ .trim();
+}
+
+/**
+ * Validate ACL rule value based on field and operator
+ */
+export function validateAclValue(
+ field: string,
+ operator: string,
+ value: string
+): { valid: boolean; error?: string } {
+ if (!value || value.trim().length === 0) {
+ return { valid: false, error: 'Value cannot be empty' };
+ }
+
+ switch (field) {
+ case 'ip':
+ if (operator === 'equals' || operator === 'contains') {
+ if (!isValidIpOrCidr(value)) {
+ return {
+ valid: false,
+ error: 'Invalid IP address or CIDR notation. Examples: 192.168.1.1 or 192.168.1.0/24'
+ };
+ }
+ } else if (operator === 'regex') {
+ const regexCheck = isValidRegex(value);
+ if (!regexCheck.valid) {
+ return {
+ valid: false,
+ error: `Invalid regex pattern: ${regexCheck.error}`
+ };
+ }
+ }
+ break;
+
+ case 'geoip':
+ if (operator === 'equals') {
+ if (!isValidCountryCode(value)) {
+ return {
+ valid: false,
+ error: 'Invalid country code. Use ISO 3166-1 alpha-2 format (e.g., US, CN, VN)'
+ };
+ }
+ } else if (operator === 'regex') {
+ const regexCheck = isValidRegex(value);
+ if (!regexCheck.valid) {
+ return {
+ valid: false,
+ error: `Invalid regex pattern: ${regexCheck.error}`
+ };
+ }
+ }
+ break;
+
+ case 'user_agent':
+ if (operator === 'regex') {
+ const regexCheck = isValidRegex(value);
+ if (!regexCheck.valid) {
+ return {
+ valid: false,
+ error: `Invalid regex pattern: ${regexCheck.error}`
+ };
+ }
+ } else if (!isValidUserAgentPattern(value)) {
+ return {
+ valid: false,
+ error: 'Invalid user-agent pattern. Avoid special characters like ; < > { } | \\'
+ };
+ }
+ break;
+
+ case 'url':
+ if (operator === 'regex') {
+ const regexCheck = isValidRegex(value);
+ if (!regexCheck.valid) {
+ return {
+ valid: false,
+ error: `Invalid regex pattern: ${regexCheck.error}`
+ };
+ }
+ } else if (!isValidUrlPattern(value)) {
+ return {
+ valid: false,
+ error: 'Invalid URL pattern. Avoid special characters like ; < > { } | \\'
+ };
+ }
+ break;
+
+ case 'method':
+ if (operator === 'equals' && !isValidHttpMethod(value)) {
+ return {
+ valid: false,
+ error: 'Invalid HTTP method. Valid methods: GET, POST, PUT, DELETE, PATCH, HEAD, OPTIONS'
+ };
+ }
+ break;
+
+ case 'header':
+ // For header field, value should be in format "Header-Name: value"
+ const headerParts = value.split(':');
+ if (headerParts.length < 2) {
+ return {
+ valid: false,
+ error: 'Header value must be in format "Header-Name: value"'
+ };
+ }
+
+ const headerName = headerParts[0].trim();
+ if (!isValidHeaderName(headerName)) {
+ return {
+ valid: false,
+ error: 'Invalid header name. Use only alphanumeric, dash, and underscore characters'
+ };
+ }
+ break;
+
+ default:
+ return { valid: false, error: `Unknown field type: ${field}` };
+ }
+
+ return { valid: true };
+}
+
+/**
+ * Get validation hints for a specific field type
+ */
+export function getValidationHints(field: string, operator: string): string {
+ const hints: Record> = {
+ ip: {
+ equals: 'Enter a valid IP address (e.g., 192.168.1.1)',
+ contains: 'Enter a valid CIDR notation (e.g., 192.168.1.0/24)',
+ regex: 'Enter a valid regex pattern for IP matching'
+ },
+ geoip: {
+ equals: 'Enter a 2-letter country code (e.g., US, CN, VN)',
+ contains: 'Enter country codes separated by comma',
+ regex: 'Enter a regex pattern for country codes'
+ },
+ user_agent: {
+ equals: 'Enter exact user-agent string',
+ contains: 'Enter a substring to match in user-agent',
+ regex: 'Enter a regex pattern (e.g., (bot|crawler|spider))'
+ },
+ url: {
+ equals: 'Enter exact URL path (e.g., /admin)',
+ contains: 'Enter a substring to match in URL',
+ regex: 'Enter a regex pattern (e.g., \\.(php|asp)$)'
+ },
+ method: {
+ equals: 'Enter HTTP method (GET, POST, PUT, DELETE, etc.)',
+ contains: 'Enter HTTP method substring',
+ regex: 'Enter regex pattern for HTTP methods'
+ },
+ header: {
+ equals: 'Enter in format "Header-Name: value"',
+ contains: 'Enter in format "Header-Name: value"',
+ regex: 'Enter in format "Header-Name: regex-pattern"'
+ }
+ };
+
+ return hints[field]?.[operator] || 'Enter a valid value';
+}
diff --git a/apps/api/src/domains/dashboard/dashboard.controller.ts b/apps/api/src/domains/dashboard/dashboard.controller.ts
index 6b90851..7dd6335 100644
--- a/apps/api/src/domains/dashboard/dashboard.controller.ts
+++ b/apps/api/src/domains/dashboard/dashboard.controller.ts
@@ -7,6 +7,7 @@ import { AuthRequest } from '../../middleware/auth';
import logger from '../../utils/logger';
import { DashboardService } from './dashboard.service';
import { GetMetricsQueryDto, GetRecentAlertsQueryDto } from './dto';
+import { dashboardAnalyticsService } from './services/dashboard-analytics.service';
const dashboardService = new DashboardService();
@@ -82,3 +83,196 @@ export const getRecentAlerts = async (
});
}
};
+
+/**
+ * Get request trend analytics (auto-refresh every 5s)
+ */
+export const getRequestTrend = async (
+ req: AuthRequest,
+ res: Response
+): Promise => {
+ try {
+ const { interval = 5 } = req.query;
+ const intervalSeconds = Math.max(5, Math.min(60, Number(interval)));
+
+ const trend = await dashboardAnalyticsService.getRequestTrend(intervalSeconds);
+
+ res.json({
+ success: true,
+ data: trend,
+ });
+ } catch (error) {
+ logger.error('Get request trend error:', error);
+ res.status(500).json({
+ success: false,
+ message: 'Failed to get request trend',
+ });
+ }
+};
+
+/**
+ * Get slow requests from performance monitoring
+ */
+export const getSlowRequests = async (
+ req: AuthRequest,
+ res: Response
+): Promise => {
+ try {
+ const { limit = 10 } = req.query;
+ const slowRequests = await dashboardAnalyticsService.getSlowRequests(Number(limit));
+
+ res.json({
+ success: true,
+ data: slowRequests,
+ });
+ } catch (error) {
+ logger.error('Get slow requests error:', error);
+ res.status(500).json({
+ success: false,
+ message: 'Failed to get slow requests',
+ });
+ }
+};
+
+/**
+ * Get latest attack statistics (top 5 in 24h)
+ */
+export const getLatestAttackStats = async (
+ req: AuthRequest,
+ res: Response
+): Promise => {
+ try {
+ const { limit = 5 } = req.query;
+ const attacks = await dashboardAnalyticsService.getLatestAttacks(Number(limit));
+
+ res.json({
+ success: true,
+ data: attacks,
+ });
+ } catch (error) {
+ logger.error('Get latest attack stats error:', error);
+ res.status(500).json({
+ success: false,
+ message: 'Failed to get latest attack statistics',
+ });
+ }
+};
+
+/**
+ * Get latest security news/events
+ */
+export const getLatestNews = async (
+ req: AuthRequest,
+ res: Response
+): Promise => {
+ try {
+ const { limit = 20 } = req.query;
+ const news = await dashboardAnalyticsService.getLatestNews(Number(limit));
+
+ res.json({
+ success: true,
+ data: news,
+ });
+ } catch (error) {
+ logger.error('Get latest news error:', error);
+ res.status(500).json({
+ success: false,
+ message: 'Failed to get latest news',
+ });
+ }
+};
+
+/**
+ * Get request analytics (top IPs by period)
+ */
+export const getRequestAnalytics = async (
+ req: AuthRequest,
+ res: Response
+): Promise => {
+ try {
+ const { period = 'day' } = req.query;
+ const validPeriod = ['day', 'week', 'month'].includes(period as string)
+ ? (period as 'day' | 'week' | 'month')
+ : 'day';
+
+ const analytics = await dashboardAnalyticsService.getRequestAnalytics(validPeriod);
+
+ res.json({
+ success: true,
+ data: analytics,
+ });
+ } catch (error) {
+ logger.error('Get request analytics error:', error);
+ res.status(500).json({
+ success: false,
+ message: 'Failed to get request analytics',
+ });
+ }
+};
+
+/**
+ * Get attack vs normal request ratio
+ */
+export const getAttackRatio = async (
+ req: AuthRequest,
+ res: Response
+): Promise => {
+ try {
+ const ratio = await dashboardAnalyticsService.getAttackRatio();
+
+ res.json({
+ success: true,
+ data: ratio,
+ });
+ } catch (error) {
+ logger.error('Get attack ratio error:', error);
+ res.status(500).json({
+ success: false,
+ message: 'Failed to get attack ratio',
+ });
+ }
+};
+
+/**
+ * Get complete dashboard analytics
+ */
+export const getDashboardAnalytics = async (
+ req: AuthRequest,
+ res: Response
+): Promise => {
+ try {
+ const [
+ requestTrend,
+ slowRequests,
+ latestAttacks,
+ latestNews,
+ requestAnalytics,
+ attackRatio,
+ ] = await Promise.all([
+ dashboardAnalyticsService.getRequestTrend(5),
+ dashboardAnalyticsService.getSlowRequests(10),
+ dashboardAnalyticsService.getLatestAttacks(5),
+ dashboardAnalyticsService.getLatestNews(20),
+ dashboardAnalyticsService.getRequestAnalytics('day'),
+ dashboardAnalyticsService.getAttackRatio(),
+ ]);
+
+ res.json({
+ success: true,
+ data: {
+ requestTrend,
+ slowRequests,
+ latestAttacks,
+ latestNews,
+ requestAnalytics,
+ attackRatio,
+ },
+ });
+ } catch (error) {
+ logger.error('Get dashboard analytics error:', error);
+ res.status(500).json({
+ success: false,
+ message: 'Failed to get dashboard analytics',
+ });
+ }
+};
diff --git a/apps/api/src/domains/dashboard/dashboard.routes.ts b/apps/api/src/domains/dashboard/dashboard.routes.ts
index c4d3dd5..bdfc582 100644
--- a/apps/api/src/domains/dashboard/dashboard.routes.ts
+++ b/apps/api/src/domains/dashboard/dashboard.routes.ts
@@ -21,4 +21,26 @@ router.get('/metrics', dashboardController.getSystemMetrics);
// Get recent alerts
router.get('/recent-alerts', dashboardController.getRecentAlerts);
+// Dashboard Analytics Endpoints
+// Get request trend (auto-refresh every 5s)
+router.get('/analytics/request-trend', dashboardController.getRequestTrend);
+
+// Get slow requests
+router.get('/analytics/slow-requests', dashboardController.getSlowRequests);
+
+// Get latest attack statistics (top 5 in 24h)
+router.get('/analytics/latest-attacks', dashboardController.getLatestAttackStats);
+
+// Get latest security news/events
+router.get('/analytics/latest-news', dashboardController.getLatestNews);
+
+// Get request analytics (top IPs by period)
+router.get('/analytics/request-analytics', dashboardController.getRequestAnalytics);
+
+// Get attack vs normal request ratio
+router.get('/analytics/attack-ratio', dashboardController.getAttackRatio);
+
+// Get complete dashboard analytics (all in one)
+router.get('/analytics', dashboardController.getDashboardAnalytics);
+
export default router;
diff --git a/apps/api/src/domains/dashboard/services/dashboard-analytics.service.ts b/apps/api/src/domains/dashboard/services/dashboard-analytics.service.ts
new file mode 100644
index 0000000..4e7015b
--- /dev/null
+++ b/apps/api/src/domains/dashboard/services/dashboard-analytics.service.ts
@@ -0,0 +1,588 @@
+/**
+ * Dashboard Analytics Service
+ * Handles advanced analytics and statistics from logs
+ */
+
+import { exec } from 'child_process';
+import { promisify } from 'util';
+import * as fs from 'fs/promises';
+import logger from '../../../utils/logger';
+import {
+ RequestTrendDataPoint,
+ SlowRequestEntry,
+ AttackTypeStats,
+ LatestAttackEntry,
+ IpAnalyticsEntry,
+ AttackRatioStats,
+ RequestAnalyticsResponse,
+} from '../types/dashboard-analytics.types';
+import { parseAccessLogLine, parseModSecLogLine } from '../../logs/services/log-parser.service';
+
+const execAsync = promisify(exec);
+
+const NGINX_ACCESS_LOG = '/var/log/nginx/access.log';
+const NGINX_ERROR_LOG = '/var/log/nginx/error.log';
+const MODSEC_AUDIT_LOG = '/var/log/modsec_audit.log';
+const NGINX_LOG_DIR = '/var/log/nginx';
+const MAX_BUFFER = 100 * 1024 * 1024; // 100MB
+const HOURS_24 = 24 * 3600 * 1000;
+
+export class DashboardAnalyticsService {
+ /**
+ * Helper: Calculate cutoff time for a given period in hours
+ */
+ private getCutoffTime(hours: number): number {
+ return Date.now() - hours * 3600 * 1000;
+ }
+
+ /**
+ * Helper: Read ModSecurity logs from a single error log file
+ */
+ private async readModSecFromFile(filePath: string): Promise {
+ try {
+ const { stdout } = await execAsync(`grep "ModSecurity:" ${filePath} 2>/dev/null || echo ""`, { maxBuffer: MAX_BUFFER });
+ return stdout.trim().split('\n').filter(line => line.trim().length > 0);
+ } catch (error) {
+ logger.warn(`Could not read ModSec logs from ${filePath}:`, error);
+ return [];
+ }
+ }
+
+ /**
+ * Helper: Read ALL ModSecurity logs from error.log (NO LINE LIMIT!)
+ */
+ private async readModSecLogs(numLines: number): Promise {
+ const lines: string[] = [];
+
+ // Read from main nginx error.log
+ lines.push(...await this.readModSecFromFile(NGINX_ERROR_LOG));
+
+ // Read from domain-specific error logs
+ try {
+ const domainLogs = await this.getDomainLogFiles();
+ for (const domainLog of domainLogs) {
+ if (domainLog.errorLog) lines.push(...await this.readModSecFromFile(domainLog.errorLog));
+ if (domainLog.sslErrorLog) lines.push(...await this.readModSecFromFile(domainLog.sslErrorLog));
+ }
+ } catch (error) {
+ logger.error('Could not read from domain error logs:', error);
+ }
+
+ return lines;
+ }
+
+ /**
+ * Helper: Read access logs from all sources (main + domain-specific)
+ */
+ private async readAllAccessLogs(mainLogLines: number, domainLogLines: number): Promise {
+ const lines = await this.readLastLines(NGINX_ACCESS_LOG, mainLogLines);
+
+ const domainLogs = await this.getDomainLogFiles();
+ for (const domainLog of domainLogs) {
+ if (domainLog.accessLog) lines.push(...await this.readLastLines(domainLog.accessLog, domainLogLines));
+ if (domainLog.sslAccessLog) lines.push(...await this.readLastLines(domainLog.sslAccessLog, domainLogLines));
+ }
+
+ return lines;
+ }
+
+ /**
+ * Helper: Determine attack type from parsed ModSec log
+ */
+ private determineAttackType(parsed: any, defaultType: string = 'Unknown Attack'): string {
+ // Check tags first
+ if (parsed.tags && parsed.tags.length > 0) {
+ const meaningfulTag = parsed.tags.find((tag: string) =>
+ tag.includes('attack') || tag.includes('injection') || tag.includes('xss') ||
+ tag.includes('sqli') || tag.includes('rce') || tag.includes('lfi') || tag.includes('rfi') || tag.includes('anomaly-evaluation')
+ );
+ if (meaningfulTag) {
+ return meaningfulTag.replace(/-/g, ' ').replace(/_/g, ' ').toUpperCase();
+ }
+ }
+
+ // Check message
+ if (parsed.message) {
+ const attackTypes: { [key: string]: string } = {
+ 'SQL Injection': 'SQL Injection',
+ 'XSS': defaultType === 'Unknown Attack' ? 'Cross-Site Scripting' : 'XSS Attack',
+ 'RCE': 'Remote Code Execution',
+ 'LFI': 'Local File Inclusion',
+ 'RFI': 'Remote File Inclusion',
+ 'Command Injection': 'Command Injection',
+ 'Anomaly Evaluation': 'Anomaly Evaluation'
+ };
+
+ for (const [key, value] of Object.entries(attackTypes)) {
+ if (parsed.message.includes(key)) return value;
+ }
+ }
+
+ return defaultType;
+ }
+
+ /**
+ * Helper: Increment status code counter
+ */
+ private incrementStatusCode(dataPoint: RequestTrendDataPoint, status: number): void {
+ const statusKey = `status${status}` as keyof RequestTrendDataPoint;
+ if (statusKey in dataPoint) {
+ (dataPoint[statusKey] as number)++;
+ } else {
+ dataPoint.statusOther++;
+ }
+ }
+
+ /**
+ * Get request trend data (auto-refresh every 5 seconds)
+ * Returns request count grouped by status codes over time
+ */
+ async getRequestTrend(intervalSeconds: number = 5): Promise {
+ try {
+ // Get logs from the last 24 hours grouped by time intervals
+ const hoursToFetch = 24;
+ const dataPoints = Math.floor((hoursToFetch * 3600) / intervalSeconds);
+ const now = Date.now();
+
+ // Read access logs from all sources
+ const lines = await this.readAllAccessLogs(10000, 5000);
+
+ // Parse logs and group by time intervals
+ const intervalMap = new Map();
+
+ lines.forEach((line, index) => {
+ const parsed = parseAccessLogLine(line, index);
+ if (!parsed) return;
+
+ const timestamp = new Date(parsed.timestamp).getTime();
+ const intervalIndex = Math.floor((now - timestamp) / (intervalSeconds * 1000));
+
+ if (intervalIndex >= dataPoints || intervalIndex < 0) return;
+
+ const intervalKey = now - (intervalIndex * intervalSeconds * 1000);
+
+ if (!intervalMap.has(intervalKey)) {
+ intervalMap.set(intervalKey, {
+ timestamp: new Date(intervalKey).toISOString(),
+ total: 0,
+ status200: 0,
+ status301: 0,
+ status302: 0,
+ status400: 0,
+ status403: 0,
+ status404: 0,
+ status500: 0,
+ status502: 0,
+ status503: 0,
+ statusOther: 0,
+ });
+ }
+
+ const dataPoint = intervalMap.get(intervalKey)!;
+ dataPoint.total++;
+
+ // Count by status code
+ if (parsed.statusCode) {
+ this.incrementStatusCode(dataPoint, parsed.statusCode);
+ }
+ });
+
+ // Convert to array and sort by timestamp
+ const result = Array.from(intervalMap.values())
+ .sort((a, b) => new Date(a.timestamp).getTime() - new Date(b.timestamp).getTime());
+
+ return result;
+ } catch (error) {
+ logger.error('Get request trend error:', error);
+ return [];
+ }
+ }
+
+ /**
+ * Get top 10 slow requests from performance monitoring
+ */
+ async getSlowRequests(limit: number = 10): Promise {
+ try {
+ // Get from PerformanceMetric table
+ const prisma = (await import('../../../config/database')).default;
+
+ const slowRequests = await prisma.performanceMetric.groupBy({
+ by: ['domain'],
+ _avg: {
+ responseTime: true,
+ },
+ _max: {
+ responseTime: true,
+ },
+ _min: {
+ responseTime: true,
+ },
+ _count: {
+ domain: true,
+ },
+ orderBy: {
+ _avg: {
+ responseTime: 'desc',
+ },
+ },
+ take: limit,
+ where: {
+ timestamp: {
+ gte: new Date(Date.now() - 24 * 3600 * 1000), // Last 24 hours
+ },
+ },
+ });
+
+ return slowRequests.map(item => ({
+ path: item.domain,
+ avgResponseTime: item._avg.responseTime || 0,
+ maxResponseTime: item._max.responseTime || 0,
+ minResponseTime: item._min.responseTime || 0,
+ requestCount: item._count.domain,
+ }));
+ } catch (error) {
+ logger.error('Get slow requests error:', error);
+ return [];
+ }
+ }
+
+ /**
+ * Get top 5 attack types in last 24 hours
+ */
+ async getLatestAttacks(limit: number = 5): Promise {
+ try {
+ // Read ModSecurity logs from error.log and audit log
+ const lines = await this.readModSecLogs(5000);
+
+ // Parse and group by attack type
+ const attackMap = new Map;
+ }>();
+
+ const cutoffTime = this.getCutoffTime(24);
+
+ lines.forEach((line, index) => {
+ const parsed = parseModSecLogLine(line, index);
+ if (!parsed || !parsed.ruleId) return;
+
+ const timestamp = new Date(parsed.timestamp).getTime();
+ if (timestamp < cutoffTime) return;
+
+ const attackType = this.determineAttackType(parsed);
+
+ if (!attackMap.has(attackType)) {
+ attackMap.set(attackType, {
+ count: 0,
+ severity: parsed.severity || 'MEDIUM',
+ lastOccurred: parsed.timestamp,
+ ruleIds: new Set(),
+ });
+ }
+
+ const stats = attackMap.get(attackType)!;
+ stats.count++;
+ if (parsed.ruleId) stats.ruleIds.add(parsed.ruleId);
+
+ // Update last occurred if more recent
+ if (new Date(parsed.timestamp) > new Date(stats.lastOccurred)) {
+ stats.lastOccurred = parsed.timestamp;
+ }
+ });
+
+ // Convert to array and sort by count
+ const result: AttackTypeStats[] = Array.from(attackMap.entries())
+ .map(([attackType, stats]) => ({
+ attackType,
+ count: stats.count,
+ severity: stats.severity,
+ lastOccurred: stats.lastOccurred,
+ timestamp: stats.lastOccurred,
+ ruleIds: Array.from(stats.ruleIds),
+ }))
+ .sort((a, b) => b.count - a.count)
+ .slice(0, limit);
+
+ return result;
+ } catch (error) {
+ logger.error('Get latest attacks error:', error);
+ return [];
+ }
+ }
+
+ /**
+ * Get latest security news/events (table format)
+ */
+ async getLatestNews(limit: number = 20): Promise {
+ try {
+ // Read ModSecurity logs from error logs only (not audit log - different format)
+ const lines = await this.readModSecLogs(2000);
+
+ const attacks: LatestAttackEntry[] = [];
+ const cutoffTime = this.getCutoffTime(24);
+
+ lines.forEach((line, index) => {
+ const parsed = parseModSecLogLine(line, index);
+ if (!parsed) return;
+
+ const timestamp = new Date(parsed.timestamp).getTime();
+ if (timestamp < cutoffTime) return;
+
+ const attackerIp = parsed.ip || 'Unknown';
+ const domain = parsed.hostname;
+ const attackType = this.determineAttackType(parsed, 'Security Event');
+
+ // Use ruleId as logId for better searching
+ const logId = parsed.ruleId || parsed.uniqueId || parsed.id;
+
+ attacks.push({
+ id: parsed.id,
+ timestamp: parsed.timestamp,
+ attackerIp,
+ domain,
+ urlPath: parsed.path || parsed.uri || '/',
+ attackType,
+ ruleId: parsed.ruleId,
+ uniqueId: parsed.uniqueId, // Add uniqueId for precise log lookup
+ severity: parsed.severity,
+ action: 'Blocked',
+ logId,
+ // DEBUG: Add raw log sample for first few entries
+ ...(index < 3 ? { _debugRawLog: line.substring(0, 300) } : {}),
+ } as any);
+ });
+
+ // Sort by timestamp descending and limit
+ return attacks
+ .sort((a, b) => new Date(b.timestamp).getTime() - new Date(a.timestamp).getTime())
+ .slice(0, limit);
+ } catch (error) {
+ logger.error('Get latest news error:', error);
+ return [];
+ }
+ }
+
+ /**
+ * Get request analytics (top IPs by period)
+ */
+ async getRequestAnalytics(period: 'day' | 'week' | 'month' = 'day'): Promise {
+ try {
+ const periodHours = period === 'day' ? 24 : period === 'week' ? 168 : 720;
+ const cutoffTime = this.getCutoffTime(periodHours);
+
+ // Read access logs from all sources
+ const lines = await this.readAllAccessLogs(20000, 10000);
+
+ // Group by IP
+ const ipMap = new Map();
+
+ lines.forEach((line, index) => {
+ const parsed = parseAccessLogLine(line, index);
+ if (!parsed || !parsed.ip) return;
+
+ const timestamp = new Date(parsed.timestamp).getTime();
+ if (timestamp < cutoffTime) return;
+
+ if (!ipMap.has(parsed.ip)) {
+ ipMap.set(parsed.ip, {
+ ip: parsed.ip,
+ requestCount: 0,
+ errorCount: 0,
+ attackCount: 0,
+ lastSeen: parsed.timestamp,
+ });
+ }
+
+ const entry = ipMap.get(parsed.ip)!;
+ entry.requestCount++;
+
+ if (parsed.statusCode && parsed.statusCode >= 400) {
+ entry.errorCount++;
+ }
+
+ // Update last seen
+ if (new Date(parsed.timestamp) > new Date(entry.lastSeen)) {
+ entry.lastSeen = parsed.timestamp;
+ }
+ });
+
+ // Check for attacks from ModSecurity logs - count by actual client IP
+ let modsecLines: string[] = [];
+ try {
+ modsecLines = await this.readModSecLogs(10000);
+ } catch (error) {
+ logger.error('Failed to read ModSec logs:', error);
+ }
+
+ modsecLines.forEach((line, index) => {
+ const parsed = parseModSecLogLine(line, index);
+ if (!parsed) return;
+
+ const timestamp = new Date(parsed.timestamp).getTime();
+ if (timestamp < cutoffTime) return;
+
+ // Use parsed IP (already extracted correctly from [client IP])
+ const attackerIp = parsed.ip;
+ if (!attackerIp) return;
+
+ // If IP exists in map, increment attack count
+ let entry = ipMap.get(attackerIp);
+ if (entry) {
+ entry.attackCount++;
+ entry.requestCount++; // Attacks are also requests!
+ } else {
+ // Create new entry for this IP if not exists
+ ipMap.set(attackerIp, {
+ ip: attackerIp,
+ requestCount: 1, // Attack is a request
+ errorCount: 1, // Attack is also an error
+ attackCount: 1,
+ lastSeen: parsed.timestamp,
+ });
+ }
+ });
+
+ // Sort by request count and get top 10
+ const topIps = Array.from(ipMap.values())
+ .sort((a, b) => b.requestCount - a.requestCount)
+ .slice(0, 10);
+
+ return {
+ period,
+ topIps,
+ totalRequests: lines.length,
+ uniqueIps: ipMap.size,
+ _timestamp: Date.now(), // Force cache refresh
+ } as any;
+ } catch (error) {
+ logger.error('Get request analytics error:', error);
+ return {
+ period,
+ topIps: [],
+ totalRequests: 0,
+ uniqueIps: 0,
+ };
+ }
+ }
+
+ /**
+ * Get attack vs normal request ratio
+ */
+ async getAttackRatio(): Promise {
+ try {
+ // Count total requests from access logs (last 24h)
+ const accessLines = await this.readAllAccessLogs(20000, 10000);
+ const cutoffTime = this.getCutoffTime(24);
+ let totalRequests = 0;
+
+ accessLines.forEach((line, index) => {
+ const parsed = parseAccessLogLine(line, index);
+ if (!parsed) return;
+
+ const timestamp = new Date(parsed.timestamp).getTime();
+ if (timestamp >= cutoffTime) {
+ totalRequests++;
+ }
+ });
+
+ // Count attack requests from ModSecurity logs
+ const modsecLines = await this.readModSecLogs(5000);
+ let attackRequests = 0;
+
+ modsecLines.forEach((line, index) => {
+ const parsed = parseModSecLogLine(line, index);
+ if (!parsed) return;
+
+ const timestamp = new Date(parsed.timestamp).getTime();
+ if (timestamp >= cutoffTime) {
+ attackRequests++;
+ }
+ });
+
+ const normalRequests = totalRequests - attackRequests;
+ const attackPercentage = totalRequests > 0 ? (attackRequests / totalRequests) * 100 : 0;
+
+ return {
+ totalRequests,
+ attackRequests,
+ normalRequests,
+ attackPercentage: parseFloat(attackPercentage.toFixed(2)),
+ };
+ } catch (error) {
+ logger.error('Get attack ratio error:', error);
+ return {
+ totalRequests: 0,
+ attackRequests: 0,
+ normalRequests: 0,
+ attackPercentage: 0,
+ };
+ }
+ }
+
+ /**
+ * Helper: Read last N lines from file
+ */
+ private async readLastLines(filePath: string, numLines: number): Promise {
+ try {
+ await fs.access(filePath);
+ const { stdout } = await execAsync(`tail -n ${numLines} ${filePath} 2>/dev/null || echo ""`);
+ return stdout.trim().split('\n').filter((line: string) => line.trim().length > 0);
+ } catch (error: any) {
+ if (error.code !== 'ENOENT') {
+ logger.warn(`Could not read log file ${filePath}:`, error);
+ }
+ return [];
+ }
+ }
+
+ /**
+ * Helper: Get domain-specific log files
+ */
+ private async getDomainLogFiles(): Promise<{ domain: string; accessLog: string; errorLog: string; sslAccessLog: string; sslErrorLog: string }[]> {
+ try {
+ const files = await fs.readdir(NGINX_LOG_DIR);
+ const domainLogs: { [key: string]: { accessLog?: string; errorLog?: string; sslAccessLog?: string; sslErrorLog?: string } } = {};
+
+ files.forEach(file => {
+ const sslAccessMatch = file.match(/^(.+?)[-_]ssl[-_]access\.log$/);
+ const sslErrorMatch = file.match(/^(.+?)[-_]ssl[-_]error\.log$/);
+ const accessMatch = !file.includes('ssl') && file.match(/^(.+?)[-_]access\.log$/);
+ const errorMatch = !file.includes('ssl') && file.match(/^(.+?)[-_]error\.log$/);
+
+ if (sslAccessMatch) {
+ const domain = sslAccessMatch[1];
+ if (!domainLogs[domain]) domainLogs[domain] = {};
+ domainLogs[domain].sslAccessLog = `${NGINX_LOG_DIR}/${file}`;
+ } else if (sslErrorMatch) {
+ const domain = sslErrorMatch[1];
+ if (!domainLogs[domain]) domainLogs[domain] = {};
+ domainLogs[domain].sslErrorLog = `${NGINX_LOG_DIR}/${file}`;
+ } else if (accessMatch) {
+ const domain = accessMatch[1];
+ if (!domainLogs[domain]) domainLogs[domain] = {};
+ domainLogs[domain].accessLog = `${NGINX_LOG_DIR}/${file}`;
+ } else if (errorMatch) {
+ const domain = errorMatch[1];
+ if (!domainLogs[domain]) domainLogs[domain] = {};
+ domainLogs[domain].errorLog = `${NGINX_LOG_DIR}/${file}`;
+ }
+ });
+
+ return Object.entries(domainLogs).map(([domain, logs]) => ({
+ domain,
+ accessLog: logs.accessLog || '',
+ errorLog: logs.errorLog || '',
+ sslAccessLog: logs.sslAccessLog || '',
+ sslErrorLog: logs.sslErrorLog || '',
+ }));
+ } catch (error) {
+ logger.error('Error reading domain log files:', error);
+ return [];
+ }
+ }
+}
+
+// Export singleton instance
+export const dashboardAnalyticsService = new DashboardAnalyticsService();
diff --git a/apps/api/src/domains/dashboard/types/dashboard-analytics.types.ts b/apps/api/src/domains/dashboard/types/dashboard-analytics.types.ts
new file mode 100644
index 0000000..241488c
--- /dev/null
+++ b/apps/api/src/domains/dashboard/types/dashboard-analytics.types.ts
@@ -0,0 +1,105 @@
+/**
+ * Dashboard Analytics Types
+ * Types for advanced dashboard statistics and analytics
+ */
+
+// Base interfaces
+interface BaseCountStats {
+ count: number;
+}
+
+interface TimestampedEntry {
+ timestamp: string;
+}
+
+interface ResponseTimeMetrics {
+ requestCount: number;
+ avgResponseTime: number;
+ maxResponseTime: number;
+ minResponseTime: number;
+}
+
+// Status code fields interface
+interface StatusCodeFields {
+ status200: number;
+ status301: number;
+ status302: number;
+ status400: number;
+ status403: number;
+ status404: number;
+ status500: number;
+ status502: number;
+ status503: number;
+ statusOther: number;
+}
+
+// Request trend data point
+export interface RequestTrendDataPoint extends TimestampedEntry, StatusCodeFields {
+ total: number;
+}
+
+// Slow request entry
+export interface SlowRequestEntry extends ResponseTimeMetrics {
+ path: string;
+}
+
+// Attack type statistics
+export interface AttackTypeStats extends BaseCountStats, TimestampedEntry {
+ attackType: string;
+ severity: string;
+ lastOccurred: string;
+ ruleIds: string[];
+}
+
+// Latest attack/security event
+export interface LatestAttackEntry extends TimestampedEntry {
+ id: string;
+ attackerIp: string;
+ domain?: string;
+ urlPath: string;
+ attackType: string;
+ ruleId?: string;
+ uniqueId?: string;
+ severity?: string;
+ action: string;
+ logId: string;
+}
+
+// IP analytics entry
+export interface IpAnalyticsEntry {
+ ip: string;
+ requestCount: number;
+ errorCount: number;
+ attackCount: number;
+ lastSeen: string;
+ userAgent?: string;
+}
+
+// Attack vs Normal request ratio
+export interface AttackRatioStats {
+ totalRequests: number;
+ attackRequests: number;
+ normalRequests: number;
+ attackPercentage: number;
+}
+
+// Period type for analytics
+export type AnalyticsPeriod = 'day' | 'week' | 'month';
+
+// Request analytics response
+export interface RequestAnalyticsResponse {
+ period: AnalyticsPeriod;
+ topIps: IpAnalyticsEntry[];
+ totalRequests: number;
+ uniqueIps: number;
+}
+
+// Complete dashboard analytics response
+export interface DashboardAnalyticsResponse {
+ requestTrend: RequestTrendDataPoint[];
+ slowRequests: SlowRequestEntry[];
+ latestAttacks: AttackTypeStats[];
+ latestNews: LatestAttackEntry[];
+ requestAnalytics: RequestAnalyticsResponse;
+ attackRatio: AttackRatioStats;
+}
diff --git a/apps/api/src/domains/domains/domains.controller.ts b/apps/api/src/domains/domains/domains.controller.ts
index f2146db..516b06c 100644
--- a/apps/api/src/domains/domains/domains.controller.ts
+++ b/apps/api/src/domains/domains/domains.controller.ts
@@ -124,6 +124,17 @@ export class DomainsController {
logger.error('Create domain error:', error);
if (error.message === 'Domain already exists') {
+ res.status(409).json({
+ success: false,
+ message: error.message,
+ });
+ return;
+ }
+
+ // Handle nginx validation errors
+ if (error.message.includes('Nginx configuration validation failed') ||
+ error.message.includes('Nginx reload failed') ||
+ error.message.includes('Invalid nginx configuration')) {
res.status(400).json({
success: false,
message: error.message,
@@ -133,7 +144,7 @@ export class DomainsController {
res.status(500).json({
success: false,
- message: 'Internal server error',
+ message: error.message || 'Internal server error',
});
}
}
@@ -188,9 +199,20 @@ export class DomainsController {
return;
}
+ // Handle nginx validation errors
+ if (error.message.includes('Nginx configuration validation failed') ||
+ error.message.includes('Nginx reload failed') ||
+ error.message.includes('Invalid nginx configuration')) {
+ res.status(400).json({
+ success: false,
+ message: error.message,
+ });
+ return;
+ }
+
res.status(500).json({
success: false,
- message: 'Internal server error',
+ message: error.message || 'Internal server error',
});
}
}
diff --git a/apps/api/src/domains/domains/domains.service.ts b/apps/api/src/domains/domains/domains.service.ts
index c114062..bd4e867 100644
--- a/apps/api/src/domains/domains/domains.service.ts
+++ b/apps/api/src/domains/domains/domains.service.ts
@@ -51,31 +51,55 @@ export class DomainsService {
// Create domain
const domain = await domainsRepository.create(input);
- // Generate nginx configuration
- await nginxConfigService.generateConfig(domain);
+ try {
+ // Generate nginx configuration (includes validation)
+ await nginxConfigService.generateConfig(domain);
- // Update domain status to active
- const updatedDomain = await domainsRepository.updateStatus(domain.id, 'active');
+ // Update domain status to active
+ const updatedDomain = await domainsRepository.updateStatus(domain.id, 'active');
- // Enable configuration
- await nginxConfigService.enableConfig(domain.name);
+ // Enable configuration
+ await nginxConfigService.enableConfig(domain.name);
- // Auto-reload nginx (silent mode)
- await nginxReloadService.autoReload(true);
+ // Auto-reload nginx
+ const reloadResult = await nginxReloadService.reload();
+ if (!reloadResult.success) {
+ // Rollback: delete domain and config
+ await nginxConfigService.deleteConfig(domain.name);
+ await domainsRepository.delete(domain.id);
+ throw new Error(`Nginx reload failed: ${reloadResult.error || 'Unknown error'}`);
+ }
- // Log activity
- await this.logActivity(
- userId,
- `Created domain: ${input.name}`,
- 'config_change',
- ip,
- userAgent,
- true
- );
-
- logger.info(`Domain ${input.name} created by user ${username}`);
+ // Log activity
+ await this.logActivity(
+ userId,
+ `Created domain: ${input.name}`,
+ 'config_change',
+ ip,
+ userAgent,
+ true
+ );
- return updatedDomain;
+ logger.info(`Domain ${input.name} created by user ${username}`);
+
+ return updatedDomain;
+ } catch (error: any) {
+ // Rollback: delete domain from database
+ logger.error(`Failed to create domain ${input.name}, rolling back:`, error);
+ try {
+ await nginxConfigService.deleteConfig(domain.name);
+ await domainsRepository.delete(domain.id);
+ logger.info(`Rolled back domain creation for ${input.name}`);
+ } catch (rollbackError) {
+ logger.error(`Failed to rollback domain creation:`, rollbackError);
+ }
+
+ // Re-throw with user-friendly message
+ if (error.message.includes('Invalid nginx configuration')) {
+ throw new Error(`Nginx configuration validation failed: ${error.message}`);
+ }
+ throw error;
+ }
}
/**
@@ -110,39 +134,92 @@ export class DomainsService {
userAgent: string
): Promise {
// Check if domain exists
- const domain = await domainsRepository.findById(id);
- if (!domain) {
+ const originalDomain = await domainsRepository.findById(id);
+ if (!originalDomain) {
throw new Error('Domain not found');
}
- // Update domain
- await domainsRepository.update(id, input);
-
- // Get updated domain with relations
- const updatedDomain = await domainsRepository.findById(id);
- if (!updatedDomain) {
- throw new Error('Failed to fetch updated domain');
- }
+ // Store original data for rollback
+ const originalData: UpdateDomainInput = {
+ name: originalDomain.name,
+ status: originalDomain.status,
+ modsecEnabled: originalDomain.modsecEnabled,
+ upstreams: originalDomain.upstreams.map(u => ({
+ host: u.host,
+ port: u.port,
+ protocol: u.protocol,
+ sslVerify: u.sslVerify,
+ weight: u.weight,
+ maxFails: u.maxFails,
+ failTimeout: u.failTimeout,
+ })),
+ loadBalancer: originalDomain.loadBalancer ? {
+ algorithm: originalDomain.loadBalancer.algorithm,
+ healthCheckEnabled: originalDomain.loadBalancer.healthCheckEnabled,
+ healthCheckInterval: originalDomain.loadBalancer.healthCheckInterval,
+ healthCheckTimeout: originalDomain.loadBalancer.healthCheckTimeout,
+ healthCheckPath: originalDomain.loadBalancer.healthCheckPath,
+ } : undefined,
+ };
- // Regenerate nginx config
- await nginxConfigService.generateConfig(updatedDomain);
-
- // Auto-reload nginx
- await nginxReloadService.autoReload(true);
-
- // Log activity
- await this.logActivity(
- userId,
- `Updated domain: ${updatedDomain.name}`,
- 'config_change',
- ip,
- userAgent,
- true
- );
+ try {
+ // Update domain
+ await domainsRepository.update(id, input);
+
+ // Get updated domain with relations
+ const updatedDomain = await domainsRepository.findById(id);
+ if (!updatedDomain) {
+ throw new Error('Failed to fetch updated domain');
+ }
+
+ // Regenerate nginx config (includes validation and backup)
+ await nginxConfigService.generateConfig(updatedDomain);
+
+ // Auto-reload nginx
+ const reloadResult = await nginxReloadService.reload();
+ if (!reloadResult.success) {
+ // Rollback: restore original domain data
+ await domainsRepository.update(id, originalData);
+ const restoredDomain = await domainsRepository.findById(id);
+ if (restoredDomain) {
+ await nginxConfigService.generateConfig(restoredDomain);
+ }
+ throw new Error(`Nginx reload failed: ${reloadResult.error || 'Unknown error'}`);
+ }
- logger.info(`Domain ${updatedDomain.name} updated by user ${username}`);
+ // Log activity
+ await this.logActivity(
+ userId,
+ `Updated domain: ${updatedDomain.name}`,
+ 'config_change',
+ ip,
+ userAgent,
+ true
+ );
- return updatedDomain;
+ logger.info(`Domain ${updatedDomain.name} updated by user ${username}`);
+
+ return updatedDomain;
+ } catch (error: any) {
+ // Rollback: restore original domain data
+ logger.error(`Failed to update domain ${originalDomain.name}, rolling back:`, error);
+ try {
+ await domainsRepository.update(id, originalData);
+ const restoredDomain = await domainsRepository.findById(id);
+ if (restoredDomain) {
+ await nginxConfigService.generateConfig(restoredDomain);
+ }
+ logger.info(`Rolled back domain update for ${originalDomain.name}`);
+ } catch (rollbackError) {
+ logger.error(`Failed to rollback domain update:`, rollbackError);
+ }
+
+ // Re-throw with user-friendly message
+ if (error.message.includes('Invalid nginx configuration')) {
+ throw new Error(`Nginx configuration validation failed: ${error.message}`);
+ }
+ throw error;
+ }
}
/**
diff --git a/apps/api/src/domains/logs/logs.controller.ts b/apps/api/src/domains/logs/logs.controller.ts
index 45f9c42..5ef2c0d 100644
--- a/apps/api/src/domains/logs/logs.controller.ts
+++ b/apps/api/src/domains/logs/logs.controller.ts
@@ -3,6 +3,20 @@ import { AuthRequest } from '../../middleware/auth';
import logger from '../../utils/logger';
import { getParsedLogs, getLogStats, getAvailableDomainsFromDb } from './logs.service';
+// Constants for security limits
+const MAX_LOGS_PER_REQUEST = 100;
+const MAX_DOWNLOAD_LOGS = 5000;
+const MAX_TOTAL_LOGS_FETCH = 5000;
+
+/**
+ * Sanitize string input to prevent injection
+ */
+const sanitizeString = (input: string | undefined): string | undefined => {
+ if (!input) return undefined;
+ // Remove any potential SQL injection or XSS characters
+ return input.toString().trim().substring(0, 200);
+};
+
/**
* Get logs with filters
*/
@@ -11,37 +25,54 @@ export const getLogs = async (
res: Response
): Promise => {
try {
- const { limit = '10', page = '1', level, type, search, domain } = req.query;
+ const { limit = '10', page = '1', level, type, search, domain, ruleId, uniqueId } = req.query;
// Parse and validate parameters
const limitNum = Math.min(
Math.max(parseInt(limit as string) || 10, 1),
- 100
- ); // Between 1 and 100
- const pageNum = Math.max(parseInt(page as string) || 1, 1); // At least 1
-
- // Get all logs first to calculate total
+ MAX_LOGS_PER_REQUEST
+ );
+ const pageNum = Math.max(parseInt(page as string) || 1, 1);
+
+ // Sanitize all string inputs
+ const sanitizedLevel = sanitizeString(level as string);
+ const sanitizedType = sanitizeString(type as string);
+ const sanitizedSearch = sanitizeString(search as string);
+ const sanitizedDomain = sanitizeString(domain as string);
+ const sanitizedRuleId = sanitizeString(ruleId as string);
+ const sanitizedUniqueId = sanitizeString(uniqueId as string);
+
+ // Calculate offset for efficient database query
+ const offset = (pageNum - 1) * limitNum;
+
+ // Get logs with pagination limit (fetch only what's needed + 1 for hasMore check)
+ const fetchLimit = Math.min(limitNum + 1, MAX_TOTAL_LOGS_FETCH);
+
const allLogs = await getParsedLogs({
- limit: 10000, // Get a large number to calculate total
- level: level as string,
- type: type as string,
- search: search as string,
- domain: domain as string,
+ limit: fetchLimit,
+ offset: offset,
+ level: sanitizedLevel,
+ type: sanitizedType,
+ search: sanitizedSearch,
+ domain: sanitizedDomain,
+ ruleId: sanitizedRuleId,
+ uniqueId: sanitizedUniqueId,
});
- // Calculate pagination info
- const total = allLogs.length;
- const totalPages = Math.ceil(total / limitNum);
- const startIndex = (pageNum - 1) * limitNum;
- const endIndex = startIndex + limitNum;
+ // Check if there are more results
+ const hasMore = allLogs.length > limitNum;
+ const paginatedLogs = hasMore ? allLogs.slice(0, limitNum) : allLogs;
- // Get the paginated logs by slicing the allLogs array
- const paginatedLogs = allLogs.slice(startIndex, endIndex);
+ // For total count, we need a separate count query (more efficient than fetching all)
+ // This should be implemented in the service layer
+ // For now, we'll use a reasonable approach
+ const total = allLogs.length;
+ const totalPages = hasMore ? pageNum + 1 : pageNum; // At minimum
logger.info(
- `User ${req.user?.username} fetched ${
- paginatedLogs.length
- } logs (page ${pageNum})${domain ? ` for domain ${domain}` : ''}`
+ `User fetched ${paginatedLogs.length} logs (page ${pageNum})${
+ sanitizedDomain ? ` for domain ${sanitizedDomain}` : ''
+ }`
);
res.json({
@@ -50,15 +81,19 @@ export const getLogs = async (
pagination: {
page: pageNum,
limit: limitNum,
- total,
- totalPages,
+ total: total,
+ totalPages: totalPages,
+ hasMore: hasMore,
},
});
} catch (error) {
- logger.error('Get logs error:', error);
+ logger.error('Get logs error:', {
+ error: error instanceof Error ? error.message : 'Unknown error',
+ stack: error instanceof Error ? error.stack : undefined,
+ });
res.status(500).json({
success: false,
- message: 'Internal server error',
+ message: 'Unable to retrieve logs',
});
}
};
@@ -78,10 +113,12 @@ export const getLogStatistics = async (
data: stats,
});
} catch (error) {
- logger.error('Get log statistics error:', error);
+ logger.error('Get log statistics error:', {
+ error: error instanceof Error ? error.message : 'Unknown error',
+ });
res.status(500).json({
success: false,
- message: 'Internal server error',
+ message: 'Unable to retrieve statistics',
});
}
};
@@ -94,30 +131,41 @@ export const downloadLogs = async (
res: Response
): Promise => {
try {
- const { limit = '1000', level, type, search, domain } = req.query;
+ const { limit = '1000', level, type, search, domain, ruleId, uniqueId } = req.query;
- // Parse and validate parameters
+ // Parse and validate parameters with stricter limit
const limitNum = Math.min(
Math.max(parseInt(limit as string) || 1000, 1),
- 10000
- ); // Between 1 and 10000
+ MAX_DOWNLOAD_LOGS
+ );
+
+ // Sanitize all string inputs
+ const sanitizedLevel = sanitizeString(level as string);
+ const sanitizedType = sanitizeString(type as string);
+ const sanitizedSearch = sanitizeString(search as string);
+ const sanitizedDomain = sanitizeString(domain as string);
+ const sanitizedRuleId = sanitizeString(ruleId as string);
+ const sanitizedUniqueId = sanitizeString(uniqueId as string);
const logs = await getParsedLogs({
limit: limitNum,
- level: level as string,
- type: type as string,
- search: search as string,
- domain: domain as string,
+ level: sanitizedLevel,
+ type: sanitizedType,
+ search: sanitizedSearch,
+ domain: sanitizedDomain,
+ ruleId: sanitizedRuleId,
+ uniqueId: sanitizedUniqueId,
});
logger.info(
- `User ${req.user?.username} downloaded ${logs.length} logs${
- domain ? ` for domain ${domain}` : ''
+ `User downloaded ${logs.length} logs${
+ sanitizedDomain ? ` for domain ${sanitizedDomain}` : ''
}`
);
// Set headers for file download
- const filename = `logs-${new Date().toISOString()}.json`;
+ const timestamp = new Date().toISOString().replace(/[:.]/g, '-');
+ const filename = `logs-${timestamp}.json`;
res.setHeader('Content-Type', 'application/json');
res.setHeader('Content-Disposition', `attachment; filename="${filename}"`);
@@ -126,21 +174,24 @@ export const downloadLogs = async (
data: logs,
metadata: {
exportedAt: new Date().toISOString(),
- exportedBy: req.user?.username,
totalCount: logs.length,
filters: {
- level,
- type,
- search,
- domain,
+ level: sanitizedLevel,
+ type: sanitizedType,
+ search: sanitizedSearch,
+ domain: sanitizedDomain,
+ ruleId: sanitizedRuleId,
+ uniqueId: sanitizedUniqueId,
},
},
});
} catch (error) {
- logger.error('Download logs error:', error);
+ logger.error('Download logs error:', {
+ error: error instanceof Error ? error.message : 'Unknown error',
+ });
res.status(500).json({
success: false,
- message: 'Internal server error',
+ message: 'Unable to download logs',
});
}
};
@@ -160,10 +211,12 @@ export const getAvailableDomains = async (
data: domains,
});
} catch (error) {
- logger.error('Get available domains error:', error);
+ logger.error('Get available domains error:', {
+ error: error instanceof Error ? error.message : 'Unknown error',
+ });
res.status(500).json({
success: false,
- message: 'Internal server error',
+ message: 'Unable to retrieve domains',
});
}
-};
+};
\ No newline at end of file
diff --git a/apps/api/src/domains/logs/logs.service.ts b/apps/api/src/domains/logs/logs.service.ts
index c21dd22..3516f43 100644
--- a/apps/api/src/domains/logs/logs.service.ts
+++ b/apps/api/src/domains/logs/logs.service.ts
@@ -1,89 +1,243 @@
import * as fs from 'fs/promises';
import * as path from 'path';
+import { execFile } from 'child_process';
+import { promisify } from 'util';
import logger from '../../utils/logger';
import prisma from '../../config/database';
import { ParsedLogEntry, LogFilterOptions, LogStatistics } from './logs.types';
import { parseAccessLogLine, parseErrorLogLine, parseModSecLogLine } from './services/log-parser.service';
-const NGINX_ACCESS_LOG = '/var/log/nginx/access.log';
-const NGINX_ERROR_LOG = '/var/log/nginx/error.log';
-const MODSEC_AUDIT_LOG = '/var/log/modsec_audit.log';
-const NGINX_LOG_DIR = '/var/log/nginx';
+const execFileAsync = promisify(execFile);
+
+// Log file paths
+const LOG_PATHS = {
+ nginxAccess: '/var/log/nginx/access.log',
+ nginxError: '/var/log/nginx/error.log',
+ modsecAudit: '/var/log/modsec_audit.log',
+ nginxDir: '/var/log/nginx',
+} as const;
+
+// Security constants
+const SECURITY_LIMITS = {
+ maxLinesPerFile: 1000,
+ maxConcurrentFiles: 5,
+ maxUniqueIdLength: 64,
+ maxSearchTermLength: 200,
+ maxRuleIdLength: 100,
+ execTimeout: 5000,
+ grepTimeout: 10000,
+ maxBuffer: 10 * 1024 * 1024, // 10MB
+} as const;
+
+const ALLOWED_LOG_DIR = path.resolve(LOG_PATHS.nginxDir);
+
+// Domain regex for validation
+const DOMAIN_REGEX = /^[a-zA-Z0-9]([a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?(\.[a-zA-Z0-9]([a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)*$/;
+
+// Log file naming patterns
+const LOG_PATTERNS = {
+ sslAccess: /^([a-zA-Z0-9.-]+)[-_]ssl[-_]access\.log$/,
+ sslError: /^([a-zA-Z0-9.-]+)[-_]ssl[-_]error\.log$/,
+ access: /^([a-zA-Z0-9.-]+)[-_]access\.log$/,
+ error: /^([a-zA-Z0-9.-]+)[-_]error\.log$/,
+} as const;
/**
- * Read last N lines from a file efficiently
+ * Validate and sanitize domain name
+ */
+function sanitizeDomain(domain: string): string | null {
+ if (!domain || typeof domain !== 'string') return null;
+
+ const cleaned = domain.trim().replace(/[^a-zA-Z0-9.-]/g, '');
+ return DOMAIN_REGEX.test(cleaned) && cleaned.length <= 253 ? cleaned : null;
+}
+
+/**
+ * Validate file path is within allowed directory
+ */
+function isPathSafe(filePath: string): boolean {
+ return path.resolve(filePath).startsWith(ALLOWED_LOG_DIR);
+}
+
+/**
+ * Execute command with security measures
+ */
+async function safeExecFile(
+ command: string,
+ args: string[],
+ options: { input?: string; timeout?: number } = {}
+): Promise {
+ const { input, timeout = SECURITY_LIMITS.execTimeout } = options;
+
+ try {
+ const { stdout } = await execFileAsync(command, args, {
+ timeout,
+ maxBuffer: SECURITY_LIMITS.maxBuffer,
+ encoding: 'utf8',
+ ...(input && { input }),
+ });
+ return stdout.trim();
+ } catch (error: any) {
+ // grep exit code 1 means no matches - this is normal
+ if (error.code === 1) return '';
+ if (error.killed) {
+ logger.warn(`Command timed out: ${command} ${args.join(' ')}`);
+ }
+ throw error;
+ }
+}
+
+/**
+ * Read last N lines from a file with security checks
*/
async function readLastLines(filePath: string, numLines: number): Promise {
try {
- await fs.access(filePath);
+ if (!isPathSafe(filePath)) {
+ logger.warn(`Path validation failed: ${filePath}`);
+ return [];
+ }
- // Use tail command for efficiency with large files
- const { exec } = require('child_process');
- const { promisify } = require('util');
- const execAsync = promisify(exec);
+ const safeNumLines = Math.min(Math.max(numLines, 1), SECURITY_LIMITS.maxLinesPerFile);
+ await fs.access(filePath);
- const { stdout } = await execAsync(`tail -n ${numLines} ${filePath} 2>/dev/null || echo ""`);
- return stdout.trim().split('\n').filter((line: string) => line.trim().length > 0);
+ const stdout = await safeExecFile('tail', ['-n', String(safeNumLines), filePath]);
+ return stdout.split('\n').filter(line => line.trim().length > 0);
} catch (error: any) {
if (error.code === 'ENOENT') {
- logger.warn(`Log file not found: ${filePath}`);
+ logger.debug(`Log file not found: ${filePath}`);
} else {
- logger.error(`Error reading log file ${filePath}:`, error);
+ logger.error(`Error reading log file: ${error.message}`);
}
return [];
}
}
/**
- * Get list of domain-specific log files
+ * Parse log lines with appropriate parser
+ */
+interface ParseOptions {
+ parser: 'access' | 'error' | 'modsec';
+ domain?: string;
+}
+
+function parseLogLines(
+ lines: string[],
+ { parser, domain }: ParseOptions
+): ParsedLogEntry[] {
+ const parsers = {
+ access: (line: string, idx: number) => parseAccessLogLine(line, idx, domain),
+ error: (line: string, idx: number) => {
+ const parsed = parseErrorLogLine(line, idx);
+ if (parsed && domain) parsed.domain = domain;
+ return parsed;
+ },
+ modsec: (line: string, idx: number) => {
+ const parsed = parseModSecLogLine(line, idx);
+ if (parsed && domain) parsed.domain = domain;
+ return parsed;
+ },
+ };
+
+ return lines
+ .map((line, idx) => parsers[parser](line, idx))
+ .filter((entry): entry is ParsedLogEntry => entry !== null);
+}
+
+/**
+ * Search logs using grep
+ */
+async function grepLogFile(
+ filePath: string,
+ pattern: string,
+ limit: number
+): Promise {
+ if (!isPathSafe(filePath)) return '';
+
+ try {
+ const grepResult = await safeExecFile(
+ 'grep',
+ ['-F', pattern, filePath],
+ { timeout: SECURITY_LIMITS.grepTimeout }
+ );
+
+ if (!grepResult) return '';
+
+ return await safeExecFile(
+ 'head',
+ ['-n', String(limit)],
+ { input: grepResult, timeout: SECURITY_LIMITS.execTimeout }
+ );
+ } catch (error: any) {
+ if (error.code !== 1 && !error.killed) {
+ logger.debug(`Grep failed for ${filePath}: ${error.message}`);
+ }
+ return '';
+ }
+}
+
+/**
+ * Get domain log file paths
*/
-async function getDomainLogFiles(): Promise<{ domain: string; accessLog: string; errorLog: string; sslAccessLog: string; sslErrorLog: string }[]> {
+interface DomainLogFiles {
+ domain: string;
+ accessLog: string;
+ errorLog: string;
+ sslAccessLog: string;
+ sslErrorLog: string;
+}
+
+async function getDomainLogFiles(): Promise {
try {
- const files = await fs.readdir(NGINX_LOG_DIR);
- const domainLogs: { [key: string]: { accessLog?: string; errorLog?: string; sslAccessLog?: string; sslErrorLog?: string } } = {};
+ if (!isPathSafe(LOG_PATHS.nginxDir)) {
+ logger.error('Log directory validation failed');
+ return [];
+ }
+
+ const files = await fs.readdir(LOG_PATHS.nginxDir);
+ logger.info(`Found ${files.length} files in ${LOG_PATHS.nginxDir}`);
+ const domainLogs: Record>> = {};
files.forEach(file => {
- // Match patterns for both HTTP and HTTPS logs:
- // - example.com_access.log or example.com-access.log (HTTP)
- // - example.com_error.log or example.com-error.log (HTTP)
- // - example.com_ssl_access.log or example.com-ssl-access.log (HTTPS)
- // - example.com_ssl_error.log or example.com-ssl-error.log (HTTPS)
-
- // SSL access log
- const sslAccessMatch = file.match(/^(.+?)[-_]ssl[-_]access\.log$/);
- // SSL error log
- const sslErrorMatch = file.match(/^(.+?)[-_]ssl[-_]error\.log$/);
- // Non-SSL access log (must not contain 'ssl')
- const accessMatch = !file.includes('ssl') && file.match(/^(.+?)[-_]access\.log$/);
- // Non-SSL error log (must not contain 'ssl')
- const errorMatch = !file.includes('ssl') && file.match(/^(.+?)[-_]error\.log$/);
-
- if (sslAccessMatch) {
- const domain = sslAccessMatch[1];
- if (!domainLogs[domain]) domainLogs[domain] = {};
- domainLogs[domain].sslAccessLog = path.join(NGINX_LOG_DIR, file);
- } else if (sslErrorMatch) {
- const domain = sslErrorMatch[1];
- if (!domainLogs[domain]) domainLogs[domain] = {};
- domainLogs[domain].sslErrorLog = path.join(NGINX_LOG_DIR, file);
- } else if (accessMatch) {
- const domain = accessMatch[1];
- if (!domainLogs[domain]) domainLogs[domain] = {};
- domainLogs[domain].accessLog = path.join(NGINX_LOG_DIR, file);
- } else if (errorMatch) {
- const domain = errorMatch[1];
- if (!domainLogs[domain]) domainLogs[domain] = {};
- domainLogs[domain].errorLog = path.join(NGINX_LOG_DIR, file);
+ // Skip hidden files and parent directory references
+ if (file.startsWith('.') || file.includes('..')) return;
+
+ const fullPath = path.join(LOG_PATHS.nginxDir, file);
+ if (!isPathSafe(fullPath)) return;
+
+ // Match against patterns
+ const patterns = [
+ { regex: LOG_PATTERNS.sslAccess, key: 'sslAccessLog', requireNoSSL: false },
+ { regex: LOG_PATTERNS.sslError, key: 'sslErrorLog', requireNoSSL: false },
+ { regex: LOG_PATTERNS.access, key: 'accessLog', requireNoSSL: true },
+ { regex: LOG_PATTERNS.error, key: 'errorLog', requireNoSSL: true },
+ ] as const;
+
+ for (const { regex, key, requireNoSSL } of patterns) {
+ if (requireNoSSL && file.includes('ssl')) continue;
+
+ const match = file.match(regex);
+ if (match) {
+ const domain = sanitizeDomain(match[1]);
+ if (domain) {
+ if (!domainLogs[domain]) domainLogs[domain] = {};
+ domainLogs[domain][key] = fullPath;
+ }
+ break;
+ }
}
});
- return Object.entries(domainLogs).map(([domain, logs]) => ({
+ const result = Object.entries(domainLogs).map(([domain, logs]) => ({
domain,
accessLog: logs.accessLog || '',
errorLog: logs.errorLog || '',
sslAccessLog: logs.sslAccessLog || '',
- sslErrorLog: logs.sslErrorLog || ''
+ sslErrorLog: logs.sslErrorLog || '',
}));
+
+ logger.info(`Found ${result.length} domains with logs: ${result.map(d => d.domain).join(', ')}`);
+
+ return result;
} catch (error) {
logger.error('Error reading domain log files:', error);
return [];
@@ -91,204 +245,277 @@ async function getDomainLogFiles(): Promise<{ domain: string; accessLog: string;
}
/**
- * Get parsed logs from all sources
+ * Find existing log file from possible paths
*/
-export async function getParsedLogs(options: LogFilterOptions = {}): Promise {
- const { limit = 100, level, type, search, domain } = options;
+async function findExistingFile(paths: string[]): Promise {
+ for (const filePath of paths) {
+ if (!isPathSafe(filePath)) continue;
+ try {
+ await fs.access(filePath);
+ return filePath;
+ } catch {
+ continue;
+ }
+ }
+ return null;
+}
- const allLogs: ParsedLogEntry[] = [];
+/**
+ * Get domain-specific log file paths
+ */
+function getDomainLogPaths(domain: string) {
+ return {
+ httpAccess: [
+ path.join(LOG_PATHS.nginxDir, `${domain}_access.log`),
+ path.join(LOG_PATHS.nginxDir, `${domain}-access.log`)
+ ],
+ httpError: [
+ path.join(LOG_PATHS.nginxDir, `${domain}_error.log`),
+ path.join(LOG_PATHS.nginxDir, `${domain}-error.log`)
+ ],
+ httpsAccess: [
+ path.join(LOG_PATHS.nginxDir, `${domain}_ssl_access.log`),
+ path.join(LOG_PATHS.nginxDir, `${domain}-ssl-access.log`)
+ ],
+ httpsError: [
+ path.join(LOG_PATHS.nginxDir, `${domain}_ssl_error.log`),
+ path.join(LOG_PATHS.nginxDir, `${domain}-ssl-error.log`)
+ ]
+ };
+}
- try {
- // If specific domain is requested, read only that domain's logs
- if (domain && domain !== 'all') {
- // Define all possible log file paths (both HTTP and HTTPS)
- const logPaths = {
- httpAccess: [
- path.join(NGINX_LOG_DIR, `${domain}_access.log`),
- path.join(NGINX_LOG_DIR, `${domain}-access.log`)
- ],
- httpError: [
- path.join(NGINX_LOG_DIR, `${domain}_error.log`),
- path.join(NGINX_LOG_DIR, `${domain}-error.log`)
- ],
- httpsAccess: [
- path.join(NGINX_LOG_DIR, `${domain}_ssl_access.log`),
- path.join(NGINX_LOG_DIR, `${domain}-ssl-access.log`)
- ],
- httpsError: [
- path.join(NGINX_LOG_DIR, `${domain}_ssl_error.log`),
- path.join(NGINX_LOG_DIR, `${domain}-ssl-error.log`)
- ]
- };
-
- // Helper function to find existing log file
- const findExistingFile = async (paths: string[]): Promise => {
- for (const filePath of paths) {
- try {
- await fs.access(filePath);
- return filePath;
- } catch {
- continue;
- }
- }
- return null;
- };
-
- // Read domain access logs (both HTTP and HTTPS)
- if (!type || type === 'all' || type === 'access') {
- // HTTP access logs
- const httpAccessLog = await findExistingFile(logPaths.httpAccess);
- if (httpAccessLog) {
- const accessLines = await readLastLines(httpAccessLog, Math.ceil(limit / 4));
- accessLines.forEach((line, index) => {
- const parsed = parseAccessLogLine(line, index, domain);
- if (parsed) allLogs.push(parsed);
- });
- }
+/**
+ * Read and parse log file
+ */
+async function readAndParseLog(
+ filePath: string | null,
+ limit: number,
+ parser: ParseOptions
+): Promise {
+ if (!filePath) return [];
+ const lines = await readLastLines(filePath, limit);
+ return parseLogLines(lines, parser);
+}
- // HTTPS access logs
- const httpsAccessLog = await findExistingFile(logPaths.httpsAccess);
- if (httpsAccessLog) {
- const sslAccessLines = await readLastLines(httpsAccessLog, Math.ceil(limit / 4));
- sslAccessLines.forEach((line, index) => {
- const parsed = parseAccessLogLine(line, index, domain);
- if (parsed) allLogs.push(parsed);
- });
- }
- }
+/**
+ * Read domain-specific logs
+ */
+async function readDomainLogs(
+ domain: string,
+ limit: number,
+ type?: string
+): Promise {
+ const logPaths = getDomainLogPaths(domain);
+ const results: ParsedLogEntry[] = [];
+
+ const shouldReadAccess = !type || type === 'all' || type === 'access';
+ const shouldReadError = !type || type === 'all' || type === 'error';
+
+ // Read access logs
+ if (shouldReadAccess) {
+ const [httpAccess, httpsAccess] = await Promise.all([
+ findExistingFile(logPaths.httpAccess),
+ findExistingFile(logPaths.httpsAccess)
+ ]);
+
+ const [httpLogs, httpsLogs] = await Promise.all([
+ readAndParseLog(httpAccess, limit, { parser: 'access', domain }),
+ readAndParseLog(httpsAccess, limit, { parser: 'access', domain })
+ ]);
+
+ results.push(...httpLogs, ...httpsLogs);
+ }
- // Read domain error logs (both HTTP and HTTPS)
- if (!type || type === 'all' || type === 'error') {
- // HTTP error logs
- const httpErrorLog = await findExistingFile(logPaths.httpError);
- if (httpErrorLog) {
- const errorLines = await readLastLines(httpErrorLog, Math.ceil(limit / 4));
- errorLines.forEach((line, index) => {
- const parsed = parseErrorLogLine(line, index);
- if (parsed) {
- parsed.domain = domain;
- allLogs.push(parsed);
- }
- });
- }
+ // Read error logs
+ if (shouldReadError) {
+ const [httpError, httpsError] = await Promise.all([
+ findExistingFile(logPaths.httpError),
+ findExistingFile(logPaths.httpsError)
+ ]);
- // HTTPS error logs
- const httpsErrorLog = await findExistingFile(logPaths.httpsError);
- if (httpsErrorLog) {
- const sslErrorLines = await readLastLines(httpsErrorLog, Math.ceil(limit / 4));
- sslErrorLines.forEach((line, index) => {
- const parsed = parseErrorLogLine(line, index);
- if (parsed) {
- parsed.domain = domain;
- allLogs.push(parsed);
- }
- });
- }
- }
- } else {
- // Read global nginx logs
- if (!type || type === 'all' || type === 'access') {
- const accessLines = await readLastLines(NGINX_ACCESS_LOG, Math.ceil(limit / 3));
- accessLines.forEach((line, index) => {
- const parsed = parseAccessLogLine(line, index);
- if (parsed) allLogs.push(parsed);
- });
- }
+ const [httpLogs, httpsLogs] = await Promise.all([
+ readAndParseLog(httpError, limit, { parser: 'error', domain }),
+ readAndParseLog(httpsError, limit, { parser: 'error', domain })
+ ]);
- if (!type || type === 'all' || type === 'error') {
- const errorLines = await readLastLines(NGINX_ERROR_LOG, Math.ceil(limit / 3));
- errorLines.forEach((line, index) => {
- const parsed = parseErrorLogLine(line, index);
- if (parsed) allLogs.push(parsed);
- });
- }
+ results.push(...httpLogs, ...httpsLogs);
+ }
- // Read ModSecurity logs
- if (!type || type === 'all' || type === 'error') {
- const modsecLines = await readLastLines(MODSEC_AUDIT_LOG, Math.ceil(limit / 3));
- modsecLines.forEach((line, index) => {
- const parsed = parseModSecLogLine(line, index);
- if (parsed) allLogs.push(parsed);
- });
- }
+ return results;
+}
- // Also read all domain-specific logs if no specific domain requested
- if (!domain || domain === 'all') {
- const domainLogFiles = await getDomainLogFiles();
- const logsPerDomain = Math.ceil(limit / (domainLogFiles.length * 2 + 1)); // Divide among all domains and log types
-
- for (const { domain: domainName, accessLog, errorLog, sslAccessLog, sslErrorLog } of domainLogFiles) {
- // HTTP access logs
- if (accessLog && (!type || type === 'all' || type === 'access')) {
- const lines = await readLastLines(accessLog, logsPerDomain);
- lines.forEach((line, index) => {
- const parsed = parseAccessLogLine(line, index, domainName);
- if (parsed) allLogs.push(parsed);
- });
- }
+/**
+ * Read global logs
+ */
+async function readGlobalLogs(
+ limit: number,
+ type?: string
+): Promise {
+ const results: ParsedLogEntry[] = [];
+ const logsPerType = Math.ceil(limit / 3);
+
+ const shouldReadAccess = !type || type === 'all' || type === 'access';
+ const shouldReadError = !type || type === 'all' || type === 'error';
+
+ if (shouldReadAccess && isPathSafe(LOG_PATHS.nginxAccess)) {
+ const logs = await readAndParseLog(LOG_PATHS.nginxAccess, logsPerType, { parser: 'access' });
+ results.push(...logs);
+ }
- // HTTPS access logs
- if (sslAccessLog && (!type || type === 'all' || type === 'access')) {
- const lines = await readLastLines(sslAccessLog, logsPerDomain);
- lines.forEach((line, index) => {
- const parsed = parseAccessLogLine(line, index, domainName);
- if (parsed) allLogs.push(parsed);
- });
- }
+ if (shouldReadError) {
+ const [errorLogs, modsecLogs] = await Promise.all([
+ isPathSafe(LOG_PATHS.nginxError)
+ ? readAndParseLog(LOG_PATHS.nginxError, logsPerType, { parser: 'error' })
+ : Promise.resolve([]),
+ isPathSafe(LOG_PATHS.modsecAudit)
+ ? readAndParseLog(LOG_PATHS.modsecAudit, logsPerType, { parser: 'modsec' })
+ : Promise.resolve([])
+ ]);
+
+ results.push(...errorLogs, ...modsecLogs);
+ }
+
+ return results;
+}
- // HTTP error logs
- if (errorLog && (!type || type === 'all' || type === 'error')) {
- const lines = await readLastLines(errorLog, logsPerDomain);
- lines.forEach((line, index) => {
- const parsed = parseErrorLogLine(line, index);
- if (parsed) {
- parsed.domain = domainName;
- allLogs.push(parsed);
- }
- });
+/**
+ * Read all domain logs with concurrency control
+ */
+async function readAllDomainLogs(
+ limit: number,
+ type?: string
+): Promise {
+ const results: ParsedLogEntry[] = [];
+ const domainLogFiles = await getDomainLogFiles();
+ const logsPerDomain = Math.max(1, Math.ceil(limit / (domainLogFiles.length * 2 + 1)));
+
+ for (let i = 0; i < domainLogFiles.length; i += SECURITY_LIMITS.maxConcurrentFiles) {
+ const batch = domainLogFiles.slice(i, i + SECURITY_LIMITS.maxConcurrentFiles);
+
+ const batchResults = await Promise.all(
+ batch.map(async ({ domain, accessLog, errorLog, sslAccessLog, sslErrorLog }) => {
+ const domainResults: ParsedLogEntry[] = [];
+ const shouldReadAccess = !type || type === 'all' || type === 'access';
+ const shouldReadError = !type || type === 'all' || type === 'error';
+
+ const readPromises: Promise[] = [];
+
+ if (shouldReadAccess) {
+ if (accessLog) {
+ readPromises.push(readAndParseLog(accessLog, logsPerDomain, { parser: 'access', domain }));
}
+ if (sslAccessLog) {
+ readPromises.push(readAndParseLog(sslAccessLog, logsPerDomain, { parser: 'access', domain }));
+ }
+ }
- // HTTPS error logs
- if (sslErrorLog && (!type || type === 'all' || type === 'error')) {
- const lines = await readLastLines(sslErrorLog, logsPerDomain);
- lines.forEach((line, index) => {
- const parsed = parseErrorLogLine(line, index);
- if (parsed) {
- parsed.domain = domainName;
- allLogs.push(parsed);
- }
- });
+ if (shouldReadError) {
+ if (errorLog) {
+ readPromises.push(readAndParseLog(errorLog, logsPerDomain, { parser: 'error', domain }));
+ }
+ if (sslErrorLog) {
+ readPromises.push(readAndParseLog(sslErrorLog, logsPerDomain, { parser: 'error', domain }));
}
}
- }
- }
- // Sort by timestamp descending (newest first)
- allLogs.sort((a, b) => new Date(b.timestamp).getTime() - new Date(a.timestamp).getTime());
+ const results = await Promise.all(readPromises);
+ return results.flat();
+ })
+ );
- // Apply filters
- let filtered = allLogs;
+ results.push(...batchResults.flat());
+ }
- if (level && level !== 'all') {
- filtered = filtered.filter(log => log.level === level);
- }
+ return results;
+}
+
+/**
+ * Apply filters to log entries
+ */
+function applyFilters(
+ logs: ParsedLogEntry[],
+ filters: LogFilterOptions
+): ParsedLogEntry[] {
+ let filtered = logs;
+
+ if (filters.level && filters.level !== 'all') {
+ filtered = filtered.filter(log => log.level === filters.level);
+ }
- if (type && type !== 'all') {
- filtered = filtered.filter(log => log.type === type);
+ if (filters.type && filters.type !== 'all') {
+ filtered = filtered.filter(log => log.type === filters.type);
+ }
+
+ if (filters.search) {
+ const searchTerm = filters.search
+ .toLowerCase()
+ .substring(0, SECURITY_LIMITS.maxSearchTermLength);
+
+ filtered = filtered.filter(log =>
+ log.message.toLowerCase().includes(searchTerm) ||
+ log.source.toLowerCase().includes(searchTerm) ||
+ (log.ip && log.ip.includes(searchTerm)) ||
+ (log.path && log.path.toLowerCase().includes(searchTerm))
+ );
+ }
+
+ if (filters.ruleId) {
+ const safeRuleId = filters.ruleId.substring(0, SECURITY_LIMITS.maxRuleIdLength);
+ filtered = filtered.filter(log => log.ruleId?.includes(safeRuleId));
+ }
+
+ if (filters.uniqueId) {
+ const safeUniqueId = filters.uniqueId.substring(0, SECURITY_LIMITS.maxUniqueIdLength);
+ filtered = filtered.filter(log => log.uniqueId?.includes(safeUniqueId));
+ }
+
+ return filtered;
+}
+
+/**
+ * Get parsed logs from all sources
+ */
+export async function getParsedLogs(options: LogFilterOptions = {}): Promise {
+ const {
+ limit = 100,
+ offset = 0,
+ domain,
+ uniqueId,
+ type
+ } = options;
+
+ const safeLimit = Math.min(Math.max(limit, 1), SECURITY_LIMITS.maxLinesPerFile);
+ const safeOffset = Math.max(offset || 0, 0);
+
+ try {
+ // Validate domain
+ const safeDomain = domain ? sanitizeDomain(domain) : null;
+ if (domain && domain !== 'all' && !safeDomain) {
+ logger.warn(`Invalid domain: ${domain}`);
+ return [];
}
- if (search) {
- const searchLower = search.toLowerCase();
- filtered = filtered.filter(log =>
- log.message.toLowerCase().includes(searchLower) ||
- log.source.toLowerCase().includes(searchLower) ||
- (log.ip && log.ip.includes(searchLower)) ||
- (log.path && log.path.toLowerCase().includes(searchLower))
- );
+ let allLogs: ParsedLogEntry[];
+
+ // Read logs based on domain filter
+ if (safeDomain && safeDomain !== 'all') {
+ allLogs = await readDomainLogs(safeDomain, safeLimit, type);
+ } else {
+ const [globalLogs, domainLogs] = await Promise.all([
+ readGlobalLogs(safeLimit, type),
+ domain === 'all' ? readAllDomainLogs(safeLimit, type) : Promise.resolve([])
+ ]);
+ allLogs = [...globalLogs, ...domainLogs];
}
- // Apply limit
- return filtered.slice(0, limit);
+ // Sort by timestamp descending
+ allLogs.sort((a, b) => new Date(b.timestamp).getTime() - new Date(a.timestamp).getTime());
+
+ // Apply filters
+ const filtered = applyFilters(allLogs, options);
+
+ // Apply pagination
+ return filtered.slice(safeOffset, safeOffset + safeLimit);
} catch (error) {
logger.error('Error getting parsed logs:', error);
return [];
@@ -319,13 +546,13 @@ export async function getLogStats(): Promise {
* Get available domains from database
*/
export async function getAvailableDomainsFromDb() {
- return await prisma.domain.findMany({
- select: {
- name: true,
- status: true,
- },
- orderBy: {
- name: 'asc',
- },
- });
-}
+ try {
+ return await prisma.domain.findMany({
+ select: { name: true, status: true },
+ orderBy: { name: 'asc' },
+ });
+ } catch (error) {
+ logger.error('Error fetching domains from database:', error);
+ return [];
+ }
+}
\ No newline at end of file
diff --git a/apps/api/src/domains/logs/logs.types.ts b/apps/api/src/domains/logs/logs.types.ts
index 3b29cb6..c8ae9ee 100644
--- a/apps/api/src/domains/logs/logs.types.ts
+++ b/apps/api/src/domains/logs/logs.types.ts
@@ -11,6 +11,7 @@ export interface ParsedLogEntry {
message: string;
domain?: string;
ip?: string;
+ hostname?: string; // Target hostname/domain from ModSecurity logs
method?: string;
path?: string;
statusCode?: number;
@@ -29,10 +30,13 @@ export interface ParsedLogEntry {
export interface LogFilterOptions {
limit?: number;
+ offset?: number;
level?: string;
type?: string;
search?: string;
domain?: string;
+ ruleId?: string;
+ uniqueId?: string;
}
export interface LogStatistics {
diff --git a/apps/api/src/domains/logs/services/log-parser.service.ts b/apps/api/src/domains/logs/services/log-parser.service.ts
index 7399a08..434a729 100644
--- a/apps/api/src/domains/logs/services/log-parser.service.ts
+++ b/apps/api/src/domains/logs/services/log-parser.service.ts
@@ -123,21 +123,29 @@ export function parseModSecLogLine(line: string, index: number): ParsedLogEntry
// ModSecurity logs are complex, extract key info
if (!line.includes('ModSecurity:')) return null;
- // Extract timestamp if present
+ // Extract timestamp - supports both nginx error log and ModSec audit log formats
let timestamp = new Date().toISOString();
- const timeMatch = line.match(/\[(\d{2}\/\w{3}\/\d{4}:\d{2}:\d{2}:\d{2})/);
- if (timeMatch) {
- const [, timeStr] = timeMatch;
- // Parse: 29/Mar/2025:14:35:22
- const timeParts = timeStr.match(/(\d+)\/(\w+)\/(\d+):(\d+):(\d+):(\d+)/);
- if (timeParts) {
- const [, day, monthStr, year, hour, min, sec] = timeParts;
- const months: { [key: string]: string } = {
- Jan: '01', Feb: '02', Mar: '03', Apr: '04', May: '05', Jun: '06',
- Jul: '07', Aug: '08', Sep: '09', Oct: '10', Nov: '11', Dec: '12'
- };
- const month = months[monthStr] || '01';
- timestamp = `${year}-${month}-${day.padStart(2, '0')}T${hour}:${min}:${sec}Z`;
+
+ // Try nginx error log format first: 2025/10/24 06:22:01
+ const nginxTimeMatch = line.match(/^(\d{4})\/(\d{2})\/(\d{2})\s+(\d{2}):(\d{2}):(\d{2})/);
+ if (nginxTimeMatch) {
+ const [, year, month, day, hour, min, sec] = nginxTimeMatch;
+ timestamp = `${year}-${month}-${day}T${hour}:${min}:${sec}Z`;
+ } else {
+ // Try ModSec audit log format: [29/Mar/2025:14:35:22]
+ const auditTimeMatch = line.match(/\[(\d{2}\/\w{3}\/\d{4}:\d{2}:\d{2}:\d{2})/);
+ if (auditTimeMatch) {
+ const [, timeStr] = auditTimeMatch;
+ const timeParts = timeStr.match(/(\d+)\/(\w+)\/(\d+):(\d+):(\d+):(\d+)/);
+ if (timeParts) {
+ const [, day, monthStr, year, hour, min, sec] = timeParts;
+ const months: { [key: string]: string } = {
+ Jan: '01', Feb: '02', Mar: '03', Apr: '04', May: '05', Jun: '06',
+ Jul: '07', Aug: '08', Sep: '09', Oct: '10', Nov: '11', Dec: '12'
+ };
+ const month = months[monthStr] || '01';
+ timestamp = `${year}-${month}-${day.padStart(2, '0')}T${hour}:${min}:${sec}Z`;
+ }
}
}
@@ -160,10 +168,13 @@ export function parseModSecLogLine(line: string, index: number): ParsedLogEntry
tags.push(match[1]);
}
- // Extract IP - from [client 52.186.182.85] or [hostname "10.0.0.203"]
- const clientIpMatch = line.match(/\[client ([\d.]+)\]/);
+ // Extract client IP - from [client 52.186.182.85]
+ const clientIpMatch = line.match(/\[client ([\d.]+)(?::\d+)?\]/);
+ const ip = clientIpMatch ? clientIpMatch[1] : undefined;
+
+ // Extract hostname/domain separately - from [hostname "domain.com"]
const hostnameMatch = line.match(/\[hostname "([^"]+)"\]/);
- const ip = clientIpMatch ? clientIpMatch[1] : (hostnameMatch ? hostnameMatch[1] : undefined);
+ const hostname = hostnameMatch ? hostnameMatch[1] : undefined;
// Extract URI - [uri "/device.rsp"]
const uriMatch = line.match(/\[uri "([^"]+)"\]/);
@@ -214,6 +225,8 @@ export function parseModSecLogLine(line: string, index: number): ParsedLogEntry
message: `ModSecurity: ${message}`,
fullMessage, // Complete log without truncation
ip,
+ domain: hostname, // Use domain field for consistency with nginx logs
+ hostname, // Keep hostname for backward compatibility
method,
path,
statusCode,
diff --git a/apps/api/src/domains/modsec/modsec.controller.ts b/apps/api/src/domains/modsec/modsec.controller.ts
index b330321..4e3ebd0 100644
--- a/apps/api/src/domains/modsec/modsec.controller.ts
+++ b/apps/api/src/domains/modsec/modsec.controller.ts
@@ -200,10 +200,21 @@ export class ModSecController {
return;
}
+ // Handle validation errors (rule ID duplicates, nginx config errors)
+ if (error.message.includes('Rule ID(s) already exist') ||
+ error.message.includes('Nginx configuration test failed') ||
+ error.message.includes('Nginx reload failed')) {
+ res.status(400).json({
+ success: false,
+ message: error.message,
+ });
+ return;
+ }
+
logger.error('Add custom rule error:', error);
res.status(500).json({
success: false,
- message: 'Internal server error',
+ message: error.message || 'Internal server error',
});
}
}
@@ -254,10 +265,21 @@ export class ModSecController {
return;
}
+ // Handle validation errors (rule ID duplicates, nginx config errors)
+ if (error.message.includes('Rule ID(s) already exist') ||
+ error.message.includes('Nginx configuration test failed') ||
+ error.message.includes('Nginx reload failed')) {
+ res.status(400).json({
+ success: false,
+ message: error.message,
+ });
+ return;
+ }
+
logger.error('Update ModSec rule error:', error);
res.status(500).json({
success: false,
- message: 'Internal server error',
+ message: error.message || 'Internal server error',
});
}
}
@@ -354,6 +376,39 @@ export class ModSecController {
});
}
}
+
+ /**
+ * Reinitialize ModSecurity configuration
+ * This will update main.conf with any missing includes
+ */
+ async reinitializeConfig(req: AuthRequest, res: Response): Promise {
+ try {
+ const result = await modSecService.reinitializeConfig();
+
+ logger.info('ModSecurity configuration reinitialized', {
+ userId: req.user?.userId,
+ success: result.success,
+ });
+
+ if (result.success) {
+ res.json({
+ success: true,
+ message: result.message,
+ });
+ } else {
+ res.status(500).json({
+ success: false,
+ message: result.message,
+ });
+ }
+ } catch (error) {
+ logger.error('Reinitialize ModSec config error:', error);
+ res.status(500).json({
+ success: false,
+ message: 'Internal server error',
+ });
+ }
+ }
}
export const modSecController = new ModSecController();
diff --git a/apps/api/src/domains/modsec/modsec.routes.ts b/apps/api/src/domains/modsec/modsec.routes.ts
index 7c55656..99c2e96 100644
--- a/apps/api/src/domains/modsec/modsec.routes.ts
+++ b/apps/api/src/domains/modsec/modsec.routes.ts
@@ -70,4 +70,9 @@ router.post(
(req: AuthRequest, res: Response) => modSecController.setGlobalModSec(req, res)
);
+// Reinitialize ModSecurity configuration
+router.post('/reinitialize', authorize('admin'), (req: AuthRequest, res: Response) =>
+ modSecController.reinitializeConfig(req, res)
+);
+
export default router;
diff --git a/apps/api/src/domains/modsec/modsec.service.ts b/apps/api/src/domains/modsec/modsec.service.ts
index ec2e4f4..54fd6ed 100644
--- a/apps/api/src/domains/modsec/modsec.service.ts
+++ b/apps/api/src/domains/modsec/modsec.service.ts
@@ -5,6 +5,7 @@ import { promisify } from 'util';
import logger from '../../utils/logger';
import { modSecRepository } from './modsec.repository';
import { crsRulesService } from './services/crs-rules.service';
+import { modSecSetupService } from './services/modsec-setup.service';
import { AddCustomRuleDto, UpdateModSecRuleDto, ToggleCRSRuleDto, SetGlobalModSecDto } from './dto';
import { CRSRule, ModSecRule, ModSecRuleWithDomain, GlobalModSecSettings, NginxReloadResult } from './modsec.types';
@@ -41,6 +42,86 @@ export class ModSecService {
}
}
+ /**
+ * Extract rule IDs from custom rule content
+ */
+ private extractRuleIdsFromContent(ruleContent: string): number[] {
+ const idMatches = ruleContent.matchAll(/id:(\d+)/g);
+ const ids = new Set();
+
+ for (const match of idMatches) {
+ ids.add(parseInt(match[1]));
+ }
+
+ return Array.from(ids).sort((a, b) => a - b);
+ }
+
+ /**
+ * Get all existing rule IDs from custom rules
+ */
+ private async getAllExistingRuleIds(excludeRuleId?: string): Promise> {
+ const allRuleIds = new Set();
+
+ try {
+ // Get all custom rules from database
+ const customRules = await modSecRepository.findModSecRules();
+
+ for (const rule of customRules) {
+ // Skip the rule being updated
+ if (excludeRuleId && rule.id === excludeRuleId) {
+ continue;
+ }
+
+ // Extract IDs from rule content
+ const ruleIds = this.extractRuleIdsFromContent(rule.ruleContent);
+ ruleIds.forEach(id => allRuleIds.add(id));
+ }
+ } catch (error: any) {
+ logger.error('Failed to get existing rule IDs:', error);
+ }
+
+ return allRuleIds;
+ }
+
+ /**
+ * Validate rule IDs for duplicates
+ */
+ private async validateRuleIds(ruleContent: string, excludeRuleId?: string): Promise<{ valid: boolean; duplicateIds: number[] }> {
+ const newRuleIds = this.extractRuleIdsFromContent(ruleContent);
+
+ if (newRuleIds.length === 0) {
+ return { valid: true, duplicateIds: [] };
+ }
+
+ const existingRuleIds = await this.getAllExistingRuleIds(excludeRuleId);
+ const duplicateIds: number[] = [];
+
+ for (const id of newRuleIds) {
+ if (existingRuleIds.has(id)) {
+ duplicateIds.push(id);
+ }
+ }
+
+ return {
+ valid: duplicateIds.length === 0,
+ duplicateIds,
+ };
+ }
+
+ /**
+ * Test nginx configuration validity
+ */
+ private async testNginxConfig(): Promise<{ valid: boolean; error?: string }> {
+ try {
+ await execAsync('nginx -t 2>&1');
+ return { valid: true };
+ } catch (error: any) {
+ const errorMessage = error.stderr || error.stdout || error.message;
+ logger.error('Nginx configuration test failed:', errorMessage);
+ return { valid: false, error: errorMessage };
+ }
+ }
+
/**
* Regenerate CRS disable configuration file from database
*/
@@ -249,13 +330,46 @@ export class ModSecService {
const updatedRule = await modSecRepository.toggleModSecRule(id, !rule.enabled);
+ // Handle file renaming based on enabled status
+ const enabledFileName = `custom_${rule.id}.conf`;
+ const disabledFileName = `custom_${rule.id}.conf.disabled`;
+ const enabledFilePath = path.join(MODSEC_CUSTOM_RULES_PATH, enabledFileName);
+ const disabledFilePath = path.join(MODSEC_CUSTOM_RULES_PATH, disabledFileName);
+
+ try {
+ if (updatedRule.enabled) {
+ // Enable: rename .disabled to .conf
+ try {
+ await fs.access(disabledFilePath);
+ await fs.rename(disabledFilePath, enabledFilePath);
+ logger.info(`Renamed rule file to enabled: ${enabledFileName}`);
+ } catch (error) {
+ // File might not exist or already enabled, that's ok
+ logger.warn(`Could not find disabled file to enable: ${disabledFileName}`);
+ }
+ } else {
+ // Disable: rename .conf to .conf.disabled
+ try {
+ await fs.access(enabledFilePath);
+ await fs.rename(enabledFilePath, disabledFilePath);
+ logger.info(`Renamed rule file to disabled: ${disabledFileName}`);
+ } catch (error) {
+ // File might not exist or already disabled, that's ok
+ logger.warn(`Could not find enabled file to disable: ${enabledFileName}`);
+ }
+ }
+
+ // Auto reload nginx
+ await this.autoReloadNginx(true);
+ } catch (error: any) {
+ logger.error('Failed to rename rule file:', error);
+ // Continue even if file rename fails
+ }
+
logger.info(`ModSecurity rule ${updatedRule.name} ${updatedRule.enabled ? 'enabled' : 'disabled'}`, {
ruleId: id,
});
- // Auto reload nginx
- await this.autoReloadNginx(true);
-
return updatedRule;
}
@@ -268,27 +382,72 @@ export class ModSecService {
}
}
- // Create rule in database
- const rule = await modSecRepository.createModSecRule(dto);
-
- // Write rule to file if enabled
- if (rule.enabled) {
- try {
- // Ensure custom rules directory exists
- await fs.mkdir(MODSEC_CUSTOM_RULES_PATH, { recursive: true });
+ // Validate rule IDs for duplicates
+ const ruleIdValidation = await this.validateRuleIds(dto.ruleContent);
+ if (!ruleIdValidation.valid) {
+ throw new Error(`Rule ID(s) already exist: ${ruleIdValidation.duplicateIds.join(', ')}. Please use unique rule IDs.`);
+ }
- const ruleFileName = `custom_${rule.id}.conf`;
- const ruleFilePath = path.join(MODSEC_CUSTOM_RULES_PATH, ruleFileName);
+ // Create rule in database first
+ const rule = await modSecRepository.createModSecRule(dto);
- await fs.writeFile(ruleFilePath, dto.ruleContent, 'utf-8');
- logger.info(`Custom ModSecurity rule file created: ${ruleFilePath}`);
+ // Write rule to file (with appropriate extension based on enabled status)
+ let ruleFilePath: string | null = null;
+ try {
+ // Ensure custom rules directory exists
+ await fs.mkdir(MODSEC_CUSTOM_RULES_PATH, { recursive: true });
+
+ const ruleFileName = rule.enabled
+ ? `custom_${rule.id}.conf`
+ : `custom_${rule.id}.conf.disabled`;
+ ruleFilePath = path.join(MODSEC_CUSTOM_RULES_PATH, ruleFileName);
+
+ await fs.writeFile(ruleFilePath, dto.ruleContent, 'utf-8');
+ logger.info(`Custom ModSecurity rule file created: ${ruleFilePath}`);
+
+ // Test nginx configuration if rule is enabled
+ if (rule.enabled) {
+ const configTest = await this.testNginxConfig();
+ if (!configTest.valid) {
+ // Rollback: delete the file and database entry
+ try {
+ await fs.unlink(ruleFilePath);
+ await modSecRepository.deleteModSecRule(rule.id);
+ } catch (rollbackError) {
+ logger.error('Failed to rollback after nginx config test failure:', rollbackError);
+ }
+ throw new Error(`Nginx configuration test failed: ${configTest.error}`);
+ }
- // Auto reload nginx
- await this.autoReloadNginx(true);
- } catch (error: any) {
- logger.error('Failed to write custom rule file:', error);
- // Continue even if file write fails
+ // Reload nginx if config is valid
+ const reloadResult = await this.autoReloadNginx(false);
+ if (!reloadResult.success) {
+ // Rollback: delete the file and database entry
+ try {
+ await fs.unlink(ruleFilePath);
+ await modSecRepository.deleteModSecRule(rule.id);
+ } catch (rollbackError) {
+ logger.error('Failed to rollback after nginx reload failure:', rollbackError);
+ }
+ throw new Error(`Nginx reload failed: ${reloadResult.message}`);
+ }
+ }
+ } catch (error: any) {
+ logger.error('Failed to write custom rule file:', error);
+ // If it's our validation error, rethrow it
+ if (error.message.includes('Nginx configuration test failed') || error.message.includes('Nginx reload failed')) {
+ throw error;
+ }
+ // For other errors, try to clean up
+ if (ruleFilePath) {
+ try {
+ await fs.unlink(ruleFilePath);
+ await modSecRepository.deleteModSecRule(rule.id);
+ } catch (cleanupError) {
+ logger.error('Failed to cleanup after error:', cleanupError);
+ }
}
+ throw new Error(`Failed to create custom rule: ${error.message}`);
}
logger.info(`Custom ModSecurity rule added: ${rule.name}`, {
@@ -304,32 +463,147 @@ export class ModSecService {
throw new Error('ModSecurity rule not found');
}
+ // Validate rule IDs for duplicates if content is being updated
+ if (dto.ruleContent !== undefined) {
+ const ruleIdValidation = await this.validateRuleIds(dto.ruleContent, id);
+ if (!ruleIdValidation.valid) {
+ throw new Error(`Rule ID(s) already exist: ${ruleIdValidation.duplicateIds.join(', ')}. Please use unique rule IDs.`);
+ }
+ }
+
+ // Store original state for rollback
+ const originalRule = { ...rule };
const updatedRule = await modSecRepository.updateModSecRule(id, dto);
- // Update rule file if exists
- const ruleFileName = `custom_${rule.id}.conf`;
- const ruleFilePath = path.join(MODSEC_CUSTOM_RULES_PATH, ruleFileName);
+ // Handle file updates with proper naming
+ const enabledFileName = `custom_${rule.id}.conf`;
+ const disabledFileName = `custom_${rule.id}.conf.disabled`;
+ const enabledFilePath = path.join(MODSEC_CUSTOM_RULES_PATH, enabledFileName);
+ const disabledFilePath = path.join(MODSEC_CUSTOM_RULES_PATH, disabledFileName);
+
+ let backupFilePath: string | null = null;
+ let currentFilePath: string | null = null;
try {
- await fs.access(ruleFilePath);
-
- if (updatedRule.enabled && dto.ruleContent) {
- await fs.writeFile(ruleFilePath, dto.ruleContent, 'utf-8');
- logger.info(`Custom ModSecurity rule file updated: ${ruleFilePath}`);
- } else if (!updatedRule.enabled) {
- await fs.unlink(ruleFilePath);
- logger.info(`Custom ModSecurity rule file removed: ${ruleFilePath}`);
+ await fs.mkdir(MODSEC_CUSTOM_RULES_PATH, { recursive: true });
+
+ // Determine which file currently exists
+ try {
+ await fs.access(enabledFilePath);
+ currentFilePath = enabledFilePath;
+ } catch {
+ try {
+ await fs.access(disabledFilePath);
+ currentFilePath = disabledFilePath;
+ } catch {
+ currentFilePath = null;
+ }
}
- // Auto reload nginx
- await this.autoReloadNginx(true);
+ // Create backup if file exists
+ if (currentFilePath) {
+ backupFilePath = `${currentFilePath}.backup`;
+ await fs.copyFile(currentFilePath, backupFilePath);
+ }
+
+ // Update content if provided
+ if (dto.ruleContent !== undefined) {
+ const targetFilePath = updatedRule.enabled ? enabledFilePath : disabledFilePath;
+
+ // Write new content to target file
+ await fs.writeFile(targetFilePath, dto.ruleContent, 'utf-8');
+ logger.info(`Custom ModSecurity rule file updated: ${targetFilePath}`);
+
+ // Remove old file if it has different name
+ if (currentFilePath && currentFilePath !== targetFilePath) {
+ try {
+ await fs.unlink(currentFilePath);
+ logger.info(`Removed old rule file: ${currentFilePath}`);
+ } catch (error) {
+ logger.warn(`Could not remove old file: ${currentFilePath}`);
+ }
+ }
+ } else if (dto.enabled !== undefined && currentFilePath) {
+ // Only enabled status changed, rename file
+ const targetFilePath = updatedRule.enabled ? enabledFilePath : disabledFilePath;
+ if (currentFilePath !== targetFilePath) {
+ await fs.rename(currentFilePath, targetFilePath);
+ logger.info(`Renamed rule file from ${currentFilePath} to ${targetFilePath}`);
+ }
+ }
+
+ // Test nginx configuration if rule is enabled
+ if (updatedRule.enabled) {
+ const configTest = await this.testNginxConfig();
+ if (!configTest.valid) {
+ // Rollback: restore backup and database entry
+ if (backupFilePath && currentFilePath) {
+ try {
+ await fs.copyFile(backupFilePath, currentFilePath);
+ await modSecRepository.updateModSecRule(id, {
+ name: originalRule.name,
+ category: originalRule.category,
+ ruleContent: originalRule.ruleContent,
+ description: originalRule.description,
+ enabled: originalRule.enabled,
+ });
+ } catch (rollbackError) {
+ logger.error('Failed to rollback after nginx config test failure:', rollbackError);
+ }
+ }
+ throw new Error(`Nginx configuration test failed: ${configTest.error}`);
+ }
+
+ // Reload nginx if config is valid
+ const reloadResult = await this.autoReloadNginx(false);
+ if (!reloadResult.success) {
+ // Rollback: restore backup and database entry
+ if (backupFilePath && currentFilePath) {
+ try {
+ await fs.copyFile(backupFilePath, currentFilePath);
+ await modSecRepository.updateModSecRule(id, {
+ name: originalRule.name,
+ category: originalRule.category,
+ ruleContent: originalRule.ruleContent,
+ description: originalRule.description,
+ enabled: originalRule.enabled,
+ });
+ } catch (rollbackError) {
+ logger.error('Failed to rollback after nginx reload failure:', rollbackError);
+ }
+ }
+ throw new Error(`Nginx reload failed: ${reloadResult.message}`);
+ }
+ }
+
+ // Clean up backup file
+ if (backupFilePath) {
+ try {
+ await fs.unlink(backupFilePath);
+ } catch (error) {
+ logger.warn(`Could not remove backup file: ${backupFilePath}`);
+ }
+ }
} catch (error: any) {
- // File doesn't exist or error accessing it
- if (updatedRule.enabled && dto.ruleContent) {
- await fs.mkdir(MODSEC_CUSTOM_RULES_PATH, { recursive: true });
- await fs.writeFile(ruleFilePath, dto.ruleContent, 'utf-8');
- await this.autoReloadNginx(true);
+ logger.error('Failed to update rule file:', error);
+
+ // Clean up backup file on error
+ if (backupFilePath) {
+ try {
+ await fs.unlink(backupFilePath);
+ } catch (cleanupError) {
+ logger.warn(`Could not remove backup file: ${backupFilePath}`);
+ }
+ }
+
+ // If it's our validation error, rethrow it
+ if (error.message.includes('Nginx configuration test failed') ||
+ error.message.includes('Nginx reload failed') ||
+ error.message.includes('Rule ID(s) already exist')) {
+ throw error;
}
+
+ throw new Error(`Failed to update custom rule: ${error.message}`);
}
logger.info(`ModSecurity rule updated: ${updatedRule.name}`, {
@@ -347,18 +621,35 @@ export class ModSecService {
await modSecRepository.deleteModSecRule(id);
- // Delete rule file if exists
- const ruleFileName = `custom_${rule.id}.conf`;
- const ruleFilePath = path.join(MODSEC_CUSTOM_RULES_PATH, ruleFileName);
+ // Delete both enabled and disabled rule files if they exist
+ const enabledFileName = `custom_${rule.id}.conf`;
+ const disabledFileName = `custom_${rule.id}.conf.disabled`;
+ const enabledFilePath = path.join(MODSEC_CUSTOM_RULES_PATH, enabledFileName);
+ const disabledFilePath = path.join(MODSEC_CUSTOM_RULES_PATH, disabledFileName);
+
+ let fileDeleted = false;
+
+ // Try to delete enabled file
+ try {
+ await fs.unlink(enabledFilePath);
+ logger.info(`Custom ModSecurity rule file deleted: ${enabledFilePath}`);
+ fileDeleted = true;
+ } catch (error: any) {
+ // File doesn't exist, that's ok
+ }
+ // Try to delete disabled file
try {
- await fs.unlink(ruleFilePath);
- logger.info(`Custom ModSecurity rule file deleted: ${ruleFilePath}`);
+ await fs.unlink(disabledFilePath);
+ logger.info(`Custom ModSecurity rule file deleted: ${disabledFilePath}`);
+ fileDeleted = true;
+ } catch (error: any) {
+ // File doesn't exist, that's ok
+ }
- // Auto reload nginx
+ // Auto reload nginx if any file was deleted
+ if (fileDeleted) {
await this.autoReloadNginx(true);
- } catch (error: any) {
- // File doesn't exist, continue
}
logger.info(`ModSecurity rule deleted: ${rule.name}`, {
@@ -401,6 +692,21 @@ export class ModSecService {
return config;
}
+
+ /**
+ * Reinitialize ModSecurity configuration
+ * This will update main.conf with any missing includes
+ */
+ async reinitializeConfig(): Promise<{ success: boolean; message: string }> {
+ const result = await modSecSetupService.reinitializeModSecurityConfig();
+
+ if (result.success) {
+ // Auto reload nginx after config update
+ await this.autoReloadNginx(true);
+ }
+
+ return result;
+ }
}
export const modSecService = new ModSecService();
diff --git a/apps/api/src/domains/modsec/services/modsec-setup.service.ts b/apps/api/src/domains/modsec/services/modsec-setup.service.ts
index f52ca33..4de48bc 100644
--- a/apps/api/src/domains/modsec/services/modsec-setup.service.ts
+++ b/apps/api/src/domains/modsec/services/modsec-setup.service.ts
@@ -5,12 +5,33 @@ import logger from '../../../utils/logger';
const MODSEC_MAIN_CONF = '/etc/nginx/modsec/main.conf';
const MODSEC_CRS_DISABLE_PATH = '/etc/nginx/modsec/crs_disabled';
const MODSEC_CRS_DISABLE_FILE = '/etc/nginx/modsec/crs_disabled.conf';
+const MODSEC_CUSTOM_RULES_PATH = '/etc/nginx/modsec/custom_rules';
/**
* ModSecurity setup service
* Handles initialization and configuration of ModSecurity
*/
export class ModSecSetupService {
+ /**
+ * Force reinitialize ModSecurity configuration
+ * This will update main.conf with any missing includes
+ */
+ async reinitializeModSecurityConfig(): Promise<{ success: boolean; message: string }> {
+ try {
+ await this.initializeModSecurityConfig();
+ return {
+ success: true,
+ message: 'ModSecurity configuration reinitialized successfully',
+ };
+ } catch (error: any) {
+ logger.error('Failed to reinitialize ModSecurity config:', error);
+ return {
+ success: false,
+ message: error.message || 'Failed to reinitialize ModSecurity configuration',
+ };
+ }
+ }
+
/**
* Initialize ModSecurity configuration for CRS rule management
*/
@@ -30,6 +51,32 @@ export class ModSecSetupService {
logger.info(`✓ CRS disable directory already exists: ${MODSEC_CRS_DISABLE_PATH}`);
}
+ // Step 2: Create custom_rules directory
+ try {
+ await fs.mkdir(MODSEC_CUSTOM_RULES_PATH, { recursive: true });
+ await fs.chmod(MODSEC_CUSTOM_RULES_PATH, 0o755);
+ logger.info(`✓ Custom rules directory created: ${MODSEC_CUSTOM_RULES_PATH}`);
+ } catch (error: any) {
+ if (error.code !== 'EEXIST') {
+ throw error;
+ }
+ logger.info(`✓ Custom rules directory already exists: ${MODSEC_CUSTOM_RULES_PATH}`);
+ }
+
+ // Create placeholder file to prevent nginx error when no custom rules exist
+ const placeholderFile = path.join(MODSEC_CUSTOM_RULES_PATH, 'placeholder.conf');
+ try {
+ await fs.access(placeholderFile);
+ logger.info('✓ Custom rules placeholder file already exists');
+ } catch (error) {
+ const placeholderContent = `# Custom ModSecurity Rules Placeholder
+# This file ensures nginx doesn't fail when no custom rules exist
+# Managed by Nginx Love UI - DO NOT EDIT MANUALLY
+`;
+ await fs.writeFile(placeholderFile, placeholderContent, 'utf-8');
+ logger.info('✓ Created custom rules placeholder file');
+ }
+
// Step 3: Check if main.conf exists
try {
await fs.access(MODSEC_MAIN_CONF);
@@ -98,15 +145,32 @@ export class ModSecSetupService {
}
// Check if crs_disabled.conf include exists
+ let needsUpdate = false;
if (mainConfContent.includes('Include /etc/nginx/modsec/crs_disabled.conf')) {
logger.info('✓ CRS disable include already configured in main.conf');
} else {
// Add include directive for CRS disable file (single file, not wildcard)
const includeDirective = `\n# CRS Rule Disables (managed by Nginx Love UI)\nInclude /etc/nginx/modsec/crs_disabled.conf\n`;
mainConfContent += includeDirective;
+ needsUpdate = true;
+ logger.info('✓ Added CRS disable include to main.conf');
+ }
+
+ // Check if custom_rules include exists
+ if (mainConfContent.includes('Include /etc/nginx/modsec/custom_rules/*.conf')) {
+ logger.info('✓ Custom rules include already configured in main.conf');
+ } else {
+ // Add include directive for custom rules
+ const customRulesDirective = `\n# Custom ModSecurity Rules (managed by Nginx Love UI)\nInclude /etc/nginx/modsec/custom_rules/*.conf\n`;
+ mainConfContent += customRulesDirective;
+ needsUpdate = true;
+ logger.info('✓ Added custom rules include to main.conf');
+ }
+ // Write main.conf if updated
+ if (needsUpdate) {
await fs.writeFile(MODSEC_MAIN_CONF, mainConfContent, 'utf-8');
- logger.info('✓ Added CRS disable include to main.conf');
+ logger.info('✓ Updated main.conf with new includes');
}
// Step 5: Create empty crs_disabled.conf if not exists
diff --git a/apps/api/src/domains/nlb/dto/nlb.dto.ts b/apps/api/src/domains/nlb/dto/nlb.dto.ts
index ad59d8f..4418834 100644
--- a/apps/api/src/domains/nlb/dto/nlb.dto.ts
+++ b/apps/api/src/domains/nlb/dto/nlb.dto.ts
@@ -4,6 +4,50 @@ import { body, param, query } from 'express-validator';
* Validation rules for NLB endpoints
*/
+/**
+ * Validate host (IP address or hostname)
+ */
+function isValidHost(host: string): boolean {
+ if (!host || host.trim().length === 0) {
+ return false;
+ }
+
+ host = host.trim();
+
+ // IPv4 validation - strict format
+ const ipv4Regex = /^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$/;
+ if (ipv4Regex.test(host)) {
+ return true;
+ }
+
+ // IPv6 validation (simplified)
+ const ipv6Regex = /^(?:[0-9a-fA-F]{1,4}:){7}[0-9a-fA-F]{1,4}$|^::(?:[0-9a-fA-F]{1,4}:){0,6}[0-9a-fA-F]{1,4}$|^[0-9a-fA-F]{1,4}::(?:[0-9a-fA-F]{1,4}:){0,5}[0-9a-fA-F]{1,4}$/;
+ if (ipv6Regex.test(host)) {
+ return true;
+ }
+
+ // If it looks like an IP but failed validation, reject it
+ // This catches malformed IPs like "888880.8832884"
+ if (/^[\d.]+$/.test(host)) {
+ return false; // Only digits and dots but not valid IP
+ }
+
+ // Hostname validation (RFC 1123)
+ const hostnameRegex = /^(?=.{1,253}$)(?:(?!-)[A-Za-z0-9-]{1,63}(? /^\d+$/.test(label));
+ if (allNumeric) {
+ return false;
+ }
+ return true;
+ }
+
+ return false;
+}
+
// Upstream validation
export const upstreamValidation = [
body('host')
@@ -11,7 +55,13 @@ export const upstreamValidation = [
.notEmpty()
.withMessage('Host is required')
.isString()
- .withMessage('Host must be a string'),
+ .withMessage('Host must be a string')
+ .custom((value) => {
+ if (!isValidHost(value)) {
+ throw new Error('Invalid host. Must be a valid IP address (IPv4/IPv6) or hostname');
+ }
+ return true;
+ }),
body('port')
.isInt({ min: 1, max: 65535 })
.withMessage('Port must be between 1 and 65535'),
@@ -72,7 +122,13 @@ export const createNLBValidation = [
body('upstreams.*.host')
.trim()
.notEmpty()
- .withMessage('Upstream host is required'),
+ .withMessage('Upstream host is required')
+ .custom((value) => {
+ if (!isValidHost(value)) {
+ throw new Error('Invalid host. Must be a valid IP address (IPv4/IPv6) or hostname');
+ }
+ return true;
+ }),
body('upstreams.*.port')
.isInt({ min: 1, max: 65535 })
.withMessage('Upstream port must be between 1 and 65535'),
@@ -188,7 +244,13 @@ export const updateNLBValidation = [
.optional()
.trim()
.notEmpty()
- .withMessage('Upstream host is required'),
+ .withMessage('Upstream host is required')
+ .custom((value) => {
+ if (value && !isValidHost(value)) {
+ throw new Error('Invalid host. Must be a valid IP address (IPv4/IPv6) or hostname');
+ }
+ return true;
+ }),
body('upstreams.*.port')
.optional()
.isInt({ min: 1, max: 65535 })
diff --git a/apps/web/src/components/access-lists/AccessListFormDialog.tsx b/apps/web/src/components/access-lists/AccessListFormDialog.tsx
index 576b059..02a1082 100644
--- a/apps/web/src/components/access-lists/AccessListFormDialog.tsx
+++ b/apps/web/src/components/access-lists/AccessListFormDialog.tsx
@@ -1,6 +1,6 @@
import { useState, useEffect } from 'react';
import { useQuery } from '@tanstack/react-query';
-import { Plus, Trash2, Eye, EyeOff } from 'lucide-react';
+import { Plus, Trash2, Eye, EyeOff, AlertCircle, CheckCircle2, Info } from 'lucide-react';
import {
Dialog,
DialogContent,
@@ -23,7 +23,16 @@ import {
} from '@/components/ui/select';
import { Tabs, TabsContent, TabsList, TabsTrigger } from '@/components/ui/tabs';
import { Badge } from '@/components/ui/badge';
+import { Alert, AlertDescription } from '@/components/ui/alert';
import { useToast } from '@/hooks/use-toast';
+import {
+ validateAccessListName,
+ validateAccessListIp,
+ validateUsername,
+ validatePassword,
+ getAccessListHints,
+ getAccessListExample
+} from '@/utils/access-list-validators';
import {
useCreateAccessList,
useUpdateAccessList,
@@ -75,6 +84,20 @@ export function AccessListFormDialog({
const [selectedDomains, setSelectedDomains] = useState([]);
const [originalDomainIds, setOriginalDomainIds] = useState([]); // Track original domains for edit mode
+ // Validation states
+ const [nameValidation, setNameValidation] = useState<{ valid: boolean; error?: string }>({ valid: true });
+ const [ipValidations, setIpValidations] = useState>({});
+ const [userValidations, setUserValidations] = useState>({});
+
+ // Validate name in real-time
+ useEffect(() => {
+ if (formData.name.trim().length > 0) {
+ setNameValidation(validateAccessListName(formData.name));
+ } else {
+ setNameValidation({ valid: true });
+ }
+ }, [formData.name]);
+
// Reset form when dialog opens or access list changes
useEffect(() => {
if (open) {
@@ -120,6 +143,10 @@ export function AccessListFormDialog({
setSelectedDomains([]);
setOriginalDomainIds([]); // Reset original domains
}
+ // Reset validations
+ setNameValidation({ valid: true });
+ setIpValidations({});
+ setUserValidations({});
}
}, [open, accessList]);
@@ -280,6 +307,18 @@ export function AccessListFormDialog({
const newIps = [...allowedIps];
newIps[index] = value;
setAllowedIps(newIps);
+
+ // Validate IP in real-time
+ if (value.trim().length > 0) {
+ const validation = validateAccessListIp(value);
+ setIpValidations(prev => ({ ...prev, [index]: validation }));
+ } else {
+ setIpValidations(prev => {
+ const newValidations = { ...prev };
+ delete newValidations[index];
+ return newValidations;
+ });
+ }
};
const addAuthUser = () => {
@@ -301,6 +340,31 @@ export function AccessListFormDialog({
const newUsers = [...authUsers];
(newUsers[index] as any)[field] = value;
setAuthUsers(newUsers);
+
+ // Validate username/password in real-time
+ if (field === 'username' && typeof value === 'string') {
+ if (value.trim().length > 0) {
+ const validation = validateUsername(value);
+ setUserValidations(prev => ({
+ ...prev,
+ [index]: {
+ username: validation,
+ password: prev[index]?.password || { valid: true }
+ }
+ }));
+ }
+ } else if (field === 'password' && typeof value === 'string') {
+ if (value.trim().length > 0) {
+ const validation = validatePassword(value, !isEditMode);
+ setUserValidations(prev => ({
+ ...prev,
+ [index]: {
+ username: prev[index]?.username || { valid: true },
+ password: validation
+ }
+ }));
+ }
+ }
};
const toggleDomainSelection = (domainId: string) => {
@@ -338,16 +402,29 @@ export function AccessListFormDialog({
-
- setFormData({ ...formData, name: e.target.value })
- }
- placeholder="e.g., admin-panel-access"
- disabled={isPending}
- required
- />
+
+
+ setFormData({ ...formData, name: e.target.value })
+ }
+ placeholder={getAccessListExample('name')}
+ disabled={isPending}
+ required
+ className={!nameValidation.valid && formData.name.trim().length > 0 ? 'border-red-500' : nameValidation.valid && formData.name.trim().length > 0 ? 'border-green-500' : ''}
+ />
+ {nameValidation.valid && formData.name.trim().length > 0 && (
+
+ )}
+ {!nameValidation.valid && formData.name.trim().length > 0 && (
+
+ )}
+
+ {!nameValidation.valid && nameValidation.error && (
+
{nameValidation.error}
+ )}
+
{getAccessListHints('name')}
@@ -430,29 +507,46 @@ export function AccessListFormDialog({
{allowedIps.map((ip, index) => (
-
-
updateIpField(index, e.target.value)}
- placeholder="e.g., 192.168.1.1 or 10.0.0.0/24"
- disabled={isPending}
- />
- {allowedIps.length > 1 && (
-
+
+
+
+
updateIpField(index, e.target.value)}
+ placeholder={getAccessListExample('ip')}
+ disabled={isPending}
+ className={ipValidations[index] && !ipValidations[index].valid ? 'border-red-500' : ipValidations[index]?.valid ? 'border-green-500' : ''}
+ />
+ {ipValidations[index]?.valid && ip.trim().length > 0 && (
+
+ )}
+ {ipValidations[index] && !ipValidations[index].valid && (
+
+ )}
+
+ {allowedIps.length > 1 && (
+
+ )}
+
+ {ipValidations[index] && !ipValidations[index].valid && ipValidations[index].error && (
+
{ipValidations[index].error}
)}
))}
-
- Enter IP addresses or CIDR notation (e.g., 192.168.1.0/24)
-
+
+
+
+ Hint: {getAccessListHints('ip')}
+
+
)}
@@ -496,15 +590,27 @@ export function AccessListFormDialog({
-
- updateAuthUser(index, 'username', e.target.value)
- }
- placeholder="username"
- disabled={isPending}
- minLength={3}
- />
+
+
+ updateAuthUser(index, 'username', e.target.value)
+ }
+ placeholder={getAccessListExample('username')}
+ disabled={isPending}
+ minLength={3}
+ className={userValidations[index]?.username && !userValidations[index].username.valid ? 'border-red-500' : userValidations[index]?.username?.valid ? 'border-green-500' : ''}
+ />
+ {userValidations[index]?.username?.valid && user.username.trim().length > 0 && (
+
+ )}
+ {userValidations[index]?.username && !userValidations[index].username.valid && (
+
+ )}
+
+ {userValidations[index]?.username && !userValidations[index].username.valid && userValidations[index].username.error && (
+
{userValidations[index].username.error}
+ )}
diff --git a/apps/web/src/components/acl/PreviewConfigDialog.tsx b/apps/web/src/components/acl/PreviewConfigDialog.tsx
new file mode 100644
index 0000000..67a4b72
--- /dev/null
+++ b/apps/web/src/components/acl/PreviewConfigDialog.tsx
@@ -0,0 +1,104 @@
+import { Dialog, DialogContent, DialogDescription, DialogHeader, DialogTitle } from "@/components/ui/dialog";
+import { Button } from "@/components/ui/button";
+import { Alert, AlertDescription } from "@/components/ui/alert";
+import { Loader2, FileCode, Copy, CheckCircle } from "lucide-react";
+import { usePreviewAclConfig } from "@/queries";
+import { useState } from "react";
+import { useToast } from "@/hooks/use-toast";
+
+interface PreviewConfigDialogProps {
+ open: boolean;
+ onOpenChange: (open: boolean) => void;
+}
+
+export function PreviewConfigDialog({ open, onOpenChange }: PreviewConfigDialogProps) {
+ const { toast } = useToast();
+ const { data, isLoading, error } = usePreviewAclConfig();
+ const [copied, setCopied] = useState(false);
+
+ const handleCopy = () => {
+ if (data?.config) {
+ navigator.clipboard.writeText(data.config);
+ setCopied(true);
+ toast({
+ title: "Copied!",
+ description: "Configuration copied to clipboard"
+ });
+ setTimeout(() => setCopied(false), 2000);
+ }
+ };
+
+ return (
+
+ );
+}
diff --git a/apps/web/src/components/domains/DomainDialog.tsx b/apps/web/src/components/domains/DomainDialog.tsx
index c02bd23..682e301 100644
--- a/apps/web/src/components/domains/DomainDialog.tsx
+++ b/apps/web/src/components/domains/DomainDialog.tsx
@@ -181,7 +181,7 @@ export function DomainDialog({ open, onOpenChange, domain, onSave }: DomainDialo
};
onSave(domainData);
- onOpenChange(false);
+ // Do not close dialog here - let parent component handle it after successful save
};
const addUpstream = () => {
diff --git a/apps/web/src/components/domains/DomainDialogV2.tsx b/apps/web/src/components/domains/DomainDialogV2.tsx
index 1215ca0..99d3e13 100644
--- a/apps/web/src/components/domains/DomainDialogV2.tsx
+++ b/apps/web/src/components/domains/DomainDialogV2.tsx
@@ -222,7 +222,7 @@ export function DomainDialogV2({ open, onOpenChange, domain, onSave }: DomainDia
};
onSave(domainData);
- onOpenChange(false);
+ // Do not close dialog here - let parent component handle it after successful save
};
return (
diff --git a/apps/web/src/components/forms/NLBFormDialog.tsx b/apps/web/src/components/forms/NLBFormDialog.tsx
index 1e6bef8..f0a02ae 100644
--- a/apps/web/src/components/forms/NLBFormDialog.tsx
+++ b/apps/web/src/components/forms/NLBFormDialog.tsx
@@ -1,7 +1,14 @@
import { useEffect, useState } from 'react';
import { useForm, useFieldArray, Controller } from 'react-hook-form';
import { useCreateNLB, useUpdateNLB } from '@/queries/nlb.query-options';
-import { NetworkLoadBalancer, CreateNLBInput, NLBUpstream } from '@/types';
+import { NetworkLoadBalancer, CreateNLBInput } from '@/types';
+import {
+ validateNLBConfig,
+ isValidNLBName,
+ validateUpstreamHost,
+ getValidationHints,
+ checkConfigurationWarnings,
+} from '@/utils/nlb-validators';
import {
Dialog,
DialogContent,
@@ -30,7 +37,7 @@ import {
import { Switch } from '@/components/ui/switch';
import { Tabs, TabsContent, TabsList, TabsTrigger } from '@/components/ui/tabs';
import { Card, CardContent } from '@/components/ui/card';
-import { Plus, Trash2, HelpCircle } from 'lucide-react';
+import { Plus, Trash2, HelpCircle, AlertTriangle } from 'lucide-react';
import { useToast } from '@/hooks/use-toast';
interface NLBFormDialogProps {
@@ -46,6 +53,8 @@ export default function NLBFormDialog({ isOpen, onClose, nlb, mode }: NLBFormDia
const { toast } = useToast();
const createMutation = useCreateNLB();
const updateMutation = useUpdateNLB();
+ const [configWarnings, setConfigWarnings] = useState
([]);
+ const [validationErrors, setValidationErrors] = useState([]);
const {
register,
@@ -82,6 +91,27 @@ export default function NLBFormDialog({ isOpen, onClose, nlb, mode }: NLBFormDia
});
const protocol = watch('protocol');
+ const upstreams = watch('upstreams');
+ const proxyTimeout = watch('proxyTimeout');
+ const proxyConnectTimeout = watch('proxyConnectTimeout');
+ const healthCheckEnabled = watch('healthCheckEnabled');
+ const healthCheckInterval = watch('healthCheckInterval');
+ const healthCheckTimeout = watch('healthCheckTimeout');
+
+ // Check for configuration warnings whenever form values change
+ useEffect(() => {
+ if (upstreams && upstreams.length > 0) {
+ const warnings = checkConfigurationWarnings({
+ upstreams: upstreams,
+ proxyTimeout: proxyTimeout || 3,
+ proxyConnectTimeout: proxyConnectTimeout || 1,
+ healthCheckEnabled: healthCheckEnabled || false,
+ healthCheckInterval: healthCheckInterval,
+ healthCheckTimeout: healthCheckTimeout,
+ });
+ setConfigWarnings(warnings);
+ }
+ }, [upstreams, proxyTimeout, proxyConnectTimeout, healthCheckEnabled, healthCheckInterval, healthCheckTimeout]);
useEffect(() => {
if (isOpen && nlb && mode === 'edit') {
@@ -136,6 +166,65 @@ export default function NLBFormDialog({ isOpen, onClose, nlb, mode }: NLBFormDia
const onSubmit = async (data: FormData) => {
try {
+ // Validate complete configuration before submission
+ const validation = validateNLBConfig({
+ name: data.name,
+ port: Number(data.port),
+ upstreams: data.upstreams.map(u => ({
+ host: u.host,
+ port: Number(u.port),
+ weight: Number(u.weight),
+ maxFails: Number(u.maxFails),
+ failTimeout: Number(u.failTimeout),
+ maxConns: Number(u.maxConns),
+ backup: Boolean(u.backup),
+ down: Boolean(u.down),
+ })),
+ proxyTimeout: Number(data.proxyTimeout),
+ proxyConnectTimeout: Number(data.proxyConnectTimeout),
+ proxyNextUpstreamTimeout: Number(data.proxyNextUpstreamTimeout),
+ proxyNextUpstreamTries: Number(data.proxyNextUpstreamTries),
+ healthCheckEnabled: Boolean(data.healthCheckEnabled),
+ healthCheckInterval: Number(data.healthCheckInterval),
+ healthCheckTimeout: Number(data.healthCheckTimeout),
+ healthCheckRises: Number(data.healthCheckRises),
+ healthCheckFalls: Number(data.healthCheckFalls),
+ });
+
+ if (!validation.valid) {
+ const errorMessages = Object.entries(validation.errors)
+ .map(([field, error]) => {
+ // Format field names to be more user-friendly
+ const fieldNames: Record = {
+ name: 'Name',
+ port: 'Port',
+ upstreams: 'Upstreams',
+ proxyTimeout: 'Proxy Timeout',
+ proxyConnectTimeout: 'Proxy Connect Timeout',
+ proxyNextUpstreamTimeout: 'Next Upstream Timeout',
+ proxyNextUpstreamTries: 'Next Upstream Tries',
+ healthCheckInterval: 'Health Check Interval',
+ healthCheckTimeout: 'Health Check Timeout',
+ healthCheckRises: 'Health Check Rises',
+ healthCheckFalls: 'Health Check Falls',
+ };
+ const friendlyField = fieldNames[field] || field;
+ return `${friendlyField}: ${error}`;
+ });
+
+ setValidationErrors(errorMessages);
+
+ toast({
+ title: 'Configuration Error',
+ description: `Please fix ${errorMessages.length} validation error${errorMessages.length > 1 ? 's' : ''} before submitting.`,
+ variant: 'destructive',
+ });
+ return;
+ }
+
+ // Clear validation errors if everything is valid
+ setValidationErrors([]);
+
// Convert all string numbers to actual numbers
const processedData = {
...data,
@@ -177,18 +266,41 @@ export default function NLBFormDialog({ isOpen, onClose, nlb, mode }: NLBFormDia
}
onClose();
} catch (error: any) {
- const message = error.response?.data?.message;
- let description = `Failed to ${mode} NLB`;
+ console.error('NLB submission error:', error);
+
+ const response = error.response?.data;
+ let errorMessages: string[] = [];
+ let title = 'Error';
- if (message?.includes('already exists')) {
- description = 'An NLB with this name already exists';
- } else if (message) {
- description = message;
+ // Handle validation errors from backend
+ if (response?.errors && Array.isArray(response.errors)) {
+ title = 'Validation Error';
+ errorMessages = response.errors.map((err: any) => {
+ if (err.msg && err.path) {
+ return `${err.path}: ${err.msg}`;
+ }
+ return err.msg || err.message || 'Unknown error';
+ });
+ setValidationErrors(errorMessages);
+ } else if (response?.message) {
+ // Handle single error message
+ if (response.message.includes('already exists')) {
+ errorMessages = ['An NLB with this name already exists. Please choose a different name.'];
+ } else if (response.message.includes('host not found') || response.message.includes('Invalid host')) {
+ errorMessages = ['Invalid upstream host. Please check your IP address or hostname format.'];
+ } else if (response.message.includes('nginx')) {
+ errorMessages = ['Nginx configuration error: ' + response.message];
+ } else {
+ errorMessages = [response.message];
+ }
+ setValidationErrors(errorMessages);
+ } else {
+ errorMessages = [`Failed to ${mode} NLB. Please check your configuration and try again.`];
}
toast({
- title: 'Error',
- description,
+ title,
+ description: errorMessages[0] || `Failed to ${mode} NLB`,
variant: 'destructive',
});
}
@@ -198,6 +310,14 @@ export default function NLBFormDialog({ isOpen, onClose, nlb, mode }: NLBFormDia
append({ host: '', port: 80, weight: 1, maxFails: 3, failTimeout: 10, maxConns: 0, backup: false, down: false });
};
+ // Clear validation errors when dialog closes
+ useEffect(() => {
+ if (!isOpen) {
+ setValidationErrors([]);
+ setConfigWarnings([]);
+ }
+ }, [isOpen]);
+
return (
@@ -249,11 +403,15 @@ export default function NLBFormDialog({ isOpen, onClose, nlb, mode }: NLBFormDia
required: 'Port is required',
min: { value: 10000, message: 'Port must be ≥ 10000' },
max: { value: 65535, message: 'Port must be ≤ 65535' },
+ valueAsNumber: true,
})}
/>
{errors.port && (
{errors.port.message}
)}
+
+ {getValidationHints('port')}
+
@@ -326,14 +484,23 @@ export default function NLBFormDialog({ isOpen, onClose, nlb, mode }: NLBFormDia
{
+ const validation = validateUpstreamHost(value);
+ return validation.valid || validation.error || 'Invalid host';
+ },
})}
- placeholder="192.168.1.100"
+ placeholder="192.168.1.100 or backend.example.com"
/>
{errors.upstreams?.[index]?.host && (
{errors.upstreams[index]?.host?.message}
)}
+ {!errors.upstreams?.[index]?.host && (
+
+ {getValidationHints('host')}
+
+ )}
@@ -342,10 +509,16 @@ export default function NLBFormDialog({ isOpen, onClose, nlb, mode }: NLBFormDia
type="number"
{...register(`upstreams.${index}.port`, {
required: 'Port is required',
- min: 1,
- max: 65535,
+ min: { value: 1, message: 'Port must be ≥ 1' },
+ max: { value: 65535, message: 'Port must be ≤ 65535' },
+ valueAsNumber: true,
})}
/>
+ {errors.upstreams?.[index]?.port && (
+
+ {errors.upstreams[index]?.port?.message}
+
+ )}
@@ -355,26 +528,50 @@ export default function NLBFormDialog({ isOpen, onClose, nlb, mode }: NLBFormDia
+ {errors.upstreams?.[index]?.weight && (
+
+ {errors.upstreams[index]?.weight?.message}
+
+ )}
+ {errors.upstreams?.[index]?.maxFails && (
+
+ {errors.upstreams[index]?.maxFails?.message}
+
+ )}
+ {errors.upstreams?.[index]?.failTimeout && (
+
+ {errors.upstreams[index]?.failTimeout?.message}
+
+ )}
@@ -383,9 +580,18 @@ export default function NLBFormDialog({ isOpen, onClose, nlb, mode }: NLBFormDia
+ {errors.upstreams?.[index]?.maxConns && (
+
+ {errors.upstreams[index]?.maxConns?.message}
+
+ )}
@@ -448,6 +654,25 @@ export default function NLBFormDialog({ isOpen, onClose, nlb, mode }: NLBFormDia
At least one upstream is required
)}
+
+ {/* Configuration Warnings */}
+ {configWarnings.length > 0 && (
+
+
+
+
+
+ Configuration Warnings
+
+
+ {configWarnings.map((warning, idx) => (
+ - • {warning}
+ ))}
+
+
+
+
+ )}
@@ -456,12 +681,38 @@ export default function NLBFormDialog({ isOpen, onClose, nlb, mode }: NLBFormDia
-
+
+ {errors.proxyTimeout && (
+
{errors.proxyTimeout.message}
+ )}
+
+ {getValidationHints('proxyTimeout')}
+
-
+
+ {errors.proxyConnectTimeout && (
+
{errors.proxyConnectTimeout.message}
+ )}
+
+ {getValidationHints('proxyConnectTimeout')}
+
@@ -487,18 +738,32 @@ export default function NLBFormDialog({ isOpen, onClose, nlb, mode }: NLBFormDia
+ {errors.proxyNextUpstreamTimeout && (
+ {errors.proxyNextUpstreamTimeout.message}
+ )}
+ {errors.proxyNextUpstreamTries && (
+
{errors.proxyNextUpstreamTries.message}
+ )}
@@ -525,25 +790,68 @@ export default function NLBFormDialog({ isOpen, onClose, nlb, mode }: NLBFormDia
+ {errors.healthCheckInterval && (
+ {errors.healthCheckInterval.message}
+ )}
+
+ {getValidationHints('healthCheckInterval')}
+
-
+
+ {errors.healthCheckTimeout && (
+
{errors.healthCheckTimeout.message}
+ )}
+
+ {getValidationHints('healthCheckTimeout')}
+
diff --git a/apps/web/src/components/logs/LogDetailsDialog.tsx b/apps/web/src/components/logs/LogDetailsDialog.tsx
index 1b5e0cb..1417126 100644
--- a/apps/web/src/components/logs/LogDetailsDialog.tsx
+++ b/apps/web/src/components/logs/LogDetailsDialog.tsx
@@ -48,7 +48,7 @@ export function LogDetailsDialog({ log, open, onOpenChange }: LogDetailsDialogPr
return (