deeper log model
This commit is contained in:
@@ -9,6 +9,7 @@ from datetime import datetime
|
||||
import re
|
||||
from app.config import (
|
||||
AI_MODE,
|
||||
ALL_NFS,
|
||||
CONTAINER_RUNTIME,
|
||||
OPENAI_API_KEY,
|
||||
OPENAI_MODEL,
|
||||
@@ -19,6 +20,9 @@ from app.config import (
|
||||
|
||||
|
||||
async def answer(query: str, network_state: dict, alerts: list, logs: list[dict] | None = None) -> str:
|
||||
special = await _handle_log_queries(query, network_state, alerts, logs or [])
|
||||
if special:
|
||||
return special
|
||||
if AI_MODE == "openai":
|
||||
return await _call_openai(query, network_state, alerts, logs or [])
|
||||
if AI_MODE == "ollama":
|
||||
@@ -53,7 +57,6 @@ def _rule_based(query: str, network_state: dict, alerts: list, logs: list[dict])
|
||||
)
|
||||
|
||||
# Specific NF query
|
||||
from app.config import ALL_NFS
|
||||
for nf_name in ALL_NFS:
|
||||
if nf_name.lower() in q:
|
||||
return _nf_detail(nf_name, nfs, alerts, log_hits)
|
||||
@@ -74,6 +77,60 @@ def _rule_based(query: str, network_state: dict, alerts: list, logs: list[dict])
|
||||
return _health_summary(up, down, alerts, cluster, log_hits)
|
||||
|
||||
|
||||
async def _handle_log_queries(query: str, network_state: dict, alerts: list, logs: list[dict]) -> str | None:
|
||||
from app.services import log_analyzer, log_ingest
|
||||
|
||||
q = query.strip()
|
||||
lowered = q.lower()
|
||||
|
||||
if "trace" in lowered and any(word in lowered for word in ["stop", "end", "disable", "finish"]):
|
||||
summary = await log_ingest.stop_subscriber_trace()
|
||||
if not summary.get("started_at"):
|
||||
return "ℹ️ No subscriber trace is currently active."
|
||||
return (
|
||||
f"🛑 **Subscriber trace stopped** for `{summary.get('filter')}`\n\n"
|
||||
f"Started: {summary.get('started_at')}\n"
|
||||
f"Matched events: **{summary.get('matched_events', 0)}**\n"
|
||||
f"Restored nodes: {', '.join(summary.get('restored_nodes', [])) or 'none'}"
|
||||
)
|
||||
|
||||
trace_target = _extract_trace_target(q)
|
||||
if trace_target:
|
||||
state = await log_ingest.start_subscriber_trace(trace_target)
|
||||
events = log_ingest.get_subscriber_events(trace_target, limit=20)
|
||||
findings = log_analyzer.summarize_event_slice(events)
|
||||
return _format_trace_response(trace_target, state, events, findings)
|
||||
|
||||
supi_query = _extract_supi_query(q)
|
||||
asks_logs = any(
|
||||
phrase in lowered
|
||||
for phrase in ["show me the logs", "show logs", "logs for", "what do the logs show", "trace output", "recent logs"]
|
||||
)
|
||||
nf_query = _extract_nf_query(q)
|
||||
|
||||
if supi_query and (_is_bare_supi(q) or "subscriber" in lowered or "supi" in lowered or "imsi" in lowered or asks_logs):
|
||||
events = log_ingest.get_subscriber_events(supi_query, limit=500)
|
||||
findings = log_analyzer.summarize_event_slice(events)
|
||||
return _format_log_slice(
|
||||
title=f"Subscriber logs for `{supi_query}`",
|
||||
events=events,
|
||||
findings=findings,
|
||||
empty_message=f"ℹ️ No recent logs matched subscriber `{supi_query}`.",
|
||||
)
|
||||
|
||||
if nf_query and ("process" in lowered or asks_logs or "show me" in lowered):
|
||||
events = log_ingest.get_process_events(nf_query, limit=500)
|
||||
findings = log_analyzer.summarize_event_slice(events)
|
||||
return _format_log_slice(
|
||||
title=f"Process logs for `{nf_query}`",
|
||||
events=events,
|
||||
findings=findings,
|
||||
empty_message=f"ℹ️ No recent logs are buffered for process `{nf_query}`.",
|
||||
)
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def _health_summary(up: list, down: list, alerts: list, cluster: dict, log_hits: list[dict]) -> str:
|
||||
ts = datetime.now().strftime("%H:%M:%S")
|
||||
crit = [a for a in alerts if a.get("severity") == "critical"]
|
||||
@@ -237,6 +294,90 @@ def _log_summary(log_hits: list[dict], logs: list[dict]) -> str:
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
def _extract_supi_query(query: str) -> str:
|
||||
lowered = query.lower()
|
||||
match = re.search(r"(imsi-\d{6,20}|\b\d{6,20}\b)", lowered)
|
||||
if not match:
|
||||
return ""
|
||||
token = match.group(1)
|
||||
if token.startswith("imsi-"):
|
||||
return token
|
||||
return f"imsi-{token}"
|
||||
|
||||
|
||||
def _is_bare_supi(query: str) -> bool:
|
||||
cleaned = query.strip().lower()
|
||||
return bool(re.fullmatch(r"(imsi-\d{6,20}|\d{6,20})", cleaned))
|
||||
|
||||
|
||||
def _extract_nf_query(query: str) -> str:
|
||||
text = query.upper()
|
||||
for nf_name in ALL_NFS:
|
||||
if nf_name in text:
|
||||
return nf_name
|
||||
return ""
|
||||
|
||||
|
||||
def _extract_trace_target(query: str) -> str:
|
||||
lowered = query.lower()
|
||||
if "trace" not in lowered:
|
||||
return ""
|
||||
if not any(word in lowered for word in ["start", "run", "begin", "trace"]):
|
||||
return ""
|
||||
return _extract_supi_query(query)
|
||||
|
||||
|
||||
def _format_log_slice(*, title: str, events: list[dict], findings: list[dict], empty_message: str) -> str:
|
||||
if not events:
|
||||
return empty_message
|
||||
lines = [f"🧾 **{title}**", f"Buffered lines: **{len(events)}**\n"]
|
||||
if findings:
|
||||
lines.append("Rule hits:")
|
||||
for finding in findings[:6]:
|
||||
lines.append(
|
||||
f"• **{finding['severity'].upper()}** {finding['nf']} on {finding.get('node','unknown')}: "
|
||||
f"{finding['description']}"
|
||||
)
|
||||
lines.append(f" Fix: {finding['remediation']}")
|
||||
lines.append("")
|
||||
lines.append("Recent log lines:")
|
||||
for event in events[-12:]:
|
||||
lines.append(
|
||||
f"• {event.get('timestamp','')} — {event.get('node','unknown')} {event.get('nf','SYSTEM')}: "
|
||||
f"{_trim_message(event.get('message',''), 220)}"
|
||||
)
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
def _format_trace_response(target: str, state: dict, events: list[dict], findings: list[dict]) -> str:
|
||||
lines = [
|
||||
f"🔎 **Subscriber trace active** for `{target}`",
|
||||
f"Level override: **{state.get('level', 'debug')}**",
|
||||
f"Nodes updated: {', '.join(state.get('nodes', [])) or 'none'}",
|
||||
f"Matched events so far: **{state.get('matched_events', 0)}**\n",
|
||||
]
|
||||
if findings:
|
||||
lines.append("Current rule-based diagnosis:")
|
||||
for finding in findings[:5]:
|
||||
lines.append(
|
||||
f"• **{finding['severity'].upper()}** {finding['nf']} on {finding.get('node','unknown')}: "
|
||||
f"{finding['description']}"
|
||||
)
|
||||
lines.append(f" Fix: {finding['remediation']}")
|
||||
lines.append("")
|
||||
if events:
|
||||
lines.append("Current trace lines:")
|
||||
for event in events[-10:]:
|
||||
lines.append(
|
||||
f"• {event.get('timestamp','')} — {event.get('node','unknown')} {event.get('nf','SYSTEM')}: "
|
||||
f"{_trim_message(event.get('message',''), 220)}"
|
||||
)
|
||||
else:
|
||||
lines.append("No matching subscriber logs have arrived yet.")
|
||||
lines.append("\nUse `stop trace` when the attach/session test is complete.")
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
def _nf_label(nf: dict) -> str:
|
||||
placements = nf.get("nodes", [])
|
||||
if not placements:
|
||||
|
||||
Reference in New Issue
Block a user