started log ingestion and analysis

This commit is contained in:
Jake Kasper
2026-04-24 14:15:58 -04:00
parent c2537dd955
commit 9ac96cee9a
27 changed files with 1368 additions and 179 deletions

37
app/services/log_rules.py Normal file
View File

@@ -0,0 +1,37 @@
"""JSON-backed log rule loading for runtime-editable log analysis."""
from __future__ import annotations
import json
from pathlib import Path
RULES_PATH = Path(__file__).resolve().parents[2] / "config" / "log_rules.json"
_rules_cache: dict[str, list[dict]] | None = None
_rules_cache_mtime: float | None = None
def load_category_patterns() -> dict[str, list[dict]]:
global _rules_cache, _rules_cache_mtime
try:
stat = RULES_PATH.stat()
if _rules_cache is not None and _rules_cache_mtime == stat.st_mtime:
return _rules_cache
data = json.loads(RULES_PATH.read_text())
categories = data.get("categories", {})
loaded: dict[str, list[dict]] = {}
for category, rules in categories.items():
loaded[category] = []
for rule in rules:
if not all(
key in rule
for key in ("pattern", "nf", "severity", "description", "remediation")
):
continue
loaded[category].append(rule)
_rules_cache = loaded
_rules_cache_mtime = stat.st_mtime
return loaded
except Exception:
return {}