started log ingestion and analysis
This commit is contained in:
BIN
app/routers/__pycache__/logs.cpython-314.pyc
Normal file
BIN
app/routers/__pycache__/logs.cpython-314.pyc
Normal file
Binary file not shown.
Binary file not shown.
36
app/routers/logs.py
Normal file
36
app/routers/logs.py
Normal file
@@ -0,0 +1,36 @@
|
||||
from fastapi import APIRouter, HTTPException, Query
|
||||
|
||||
from app.services import log_ingest
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
@router.get("/logs/status")
|
||||
async def get_log_status():
|
||||
return log_ingest.receiver_status()
|
||||
|
||||
|
||||
@router.get("/logs/events")
|
||||
async def get_log_events(
|
||||
limit: int = Query(default=200, ge=1, le=5000),
|
||||
node: str | None = None,
|
||||
nf: str | None = None,
|
||||
imsi: str | None = None,
|
||||
):
|
||||
return {
|
||||
"events": log_ingest.get_events(limit=limit, node=node, nf=nf, imsi=imsi),
|
||||
"status": log_ingest.receiver_status(),
|
||||
}
|
||||
|
||||
|
||||
@router.get("/logs/contexts")
|
||||
async def get_log_contexts(limit: int = Query(default=20, ge=1, le=200)):
|
||||
return {"contexts": log_ingest.recent_alert_context(limit=limit)}
|
||||
|
||||
|
||||
@router.post("/logs/configure")
|
||||
async def configure_log_shipping():
|
||||
try:
|
||||
return await log_ingest.configure_site_output()
|
||||
except Exception as exc:
|
||||
raise HTTPException(status_code=500, detail=str(exc)) from exc
|
||||
@@ -1,6 +1,6 @@
|
||||
from fastapi import APIRouter
|
||||
from pydantic import BaseModel
|
||||
from app.services import cluster_inventory, alertmanager, ai
|
||||
from app.services import cluster_inventory, alertmanager, ai, log_ingest
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
@@ -11,14 +11,15 @@ class QueryRequest(BaseModel):
|
||||
|
||||
@router.post("/query")
|
||||
async def query(req: QueryRequest):
|
||||
network_state, alerts = await _gather(req.query)
|
||||
response = await ai.answer(req.query, network_state, alerts)
|
||||
return {"response": response, "network_state": network_state, "alerts": alerts}
|
||||
network_state, alerts, logs = await _gather(req.query)
|
||||
response = await ai.answer(req.query, network_state, alerts, logs)
|
||||
return {"response": response, "network_state": network_state, "alerts": alerts, "logs": logs}
|
||||
|
||||
|
||||
async def _gather(query_text: str):
|
||||
import asyncio
|
||||
nfs_task = asyncio.create_task(cluster_inventory.get_network_status())
|
||||
alerts_task = asyncio.create_task(alertmanager.get_alerts())
|
||||
network_state, alerts = await asyncio.gather(nfs_task, alerts_task)
|
||||
return network_state, alerts
|
||||
logs_task = asyncio.to_thread(log_ingest.get_events, 200)
|
||||
network_state, alerts, logs = await asyncio.gather(nfs_task, alerts_task, logs_task)
|
||||
return network_state, alerts, logs
|
||||
|
||||
Reference in New Issue
Block a user