File size: 16,176 Bytes
2d521fd
afa4de7
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2d521fd
 
 
 
 
afa4de7
 
2d521fd
 
 
 
 
 
afa4de7
2d521fd
 
 
 
 
 
 
afa4de7
2d521fd
 
 
 
 
 
 
 
 
 
 
afa4de7
2d521fd
 
 
 
 
 
 
 
 
 
 
 
 
 
afa4de7
2d521fd
 
 
 
 
 
 
 
 
 
 
 
deb6676
afa4de7
2d521fd
 
 
 
 
 
 
 
 
 
 
 
 
 
afa4de7
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2d521fd
 
 
afa4de7
2d521fd
 
 
afa4de7
 
2d521fd
 
 
afa4de7
 
2d521fd
 
 
 
 
 
 
 
 
 
 
 
afa4de7
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2d521fd
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
afa4de7
 
2d521fd
 
 
 
afa4de7
 
2d521fd
 
 
 
 
 
 
 
afa4de7
 
2d521fd
 
 
 
afa4de7
 
2d521fd
afa4de7
 
 
 
 
2d521fd
 
afa4de7
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2d521fd
afa4de7
2d521fd
 
afa4de7
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2d521fd
 
 
 
 
afa4de7
 
 
 
 
 
 
 
 
 
 
 
 
2d521fd
 
 
 
 
 
 
 
 
afa4de7
2d521fd
 
 
 
 
 
 
 
 
 
afa4de7
2d521fd
 
 
afa4de7
 
2d521fd
 
 
 
 
afa4de7
2d521fd
 
 
 
afa4de7
2d521fd
afa4de7
2d521fd
afa4de7
 
2d521fd
 
afa4de7
 
2d521fd
afa4de7
 
2d521fd
afa4de7
 
2d521fd
afa4de7
 
2d521fd
afa4de7
 
2d521fd
afa4de7
 
2d521fd
afa4de7
 
2d521fd
afa4de7
 
 
 
 
2d521fd
 
 
 
afa4de7
2d521fd
 
 
 
 
afa4de7
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
"""
ARF API Control Plane β€” Main Application Entry Point
====================================================

The control plane serves as the HTTP layer between the **Agentic Reliability
Framework (ARF)** core engine and external consumers (front‑end dashboard,
enterprise clients, and monitoring infrastructure).

It is responsible for:

* **Lifetime management** of the Bayesian risk engine, policy engine,
  semantic memory (RAG graph), and epistemic models.
* **Observability** via optional OpenTelemetry tracing and Prometheus metrics
  (the latter exposed automatically by ``prometheus-fastapi-instrumentator``
  on ``/metrics``).
* **Rate limiting** and **usage tracking** with atomic quota consumption.
* **CORS** configuration for the public ARF front‑end.
* **Database‑backed persistence** of the conjugate Bayesian posteriors so
  that online learning survives restarts.
* **Automated Rust enforcer canary promotion** via Wilson confidence interval
  monitoring of the agreement counters.

All heavy components are loaded **lazily and best‑effort** – if a dependency
is missing the API continues to serve health‑check and status endpoints,
degrading gracefully rather than crashing.
"""
import logging
import os
import sys
import json
import threading
import time as _time
from contextlib import asynccontextmanager
from typing import Dict

from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware

# ── Optional: Prometheus metrics ─────────────────────────────
try:
    from prometheus_fastapi_instrumentator import Instrumentator
    PROMETHEUS_AVAILABLE = True
except ImportError:
    PROMETHEUS_AVAILABLE = False
    Instrumentator = None

# ── Optional: rate‑limiting (slowapi) ────────────────────────
try:
    from slowapi import _rate_limit_exceeded_handler
    from slowapi.errors import RateLimitExceeded
    from slowapi.middleware import SlowAPIMiddleware
    SLOWAPI_AVAILABLE = True
except ImportError:
    SLOWAPI_AVAILABLE = False
    _rate_limit_exceeded_handler = None
    RateLimitExceeded = None
    SlowAPIMiddleware = None

# ── Core ARF engine (optional but essential for governance) ──
try:
    from agentic_reliability_framework.core.governance.risk_engine import RiskEngine
    from agentic_reliability_framework.core.governance.policy_engine import PolicyEngine
    from agentic_reliability_framework.runtime.memory import create_faiss_index, RAGGraphMemory
    from agentic_reliability_framework.runtime.memory.constants import MemoryConstants
    ARF_AVAILABLE = True
except ImportError:
    ARF_AVAILABLE = False
    RiskEngine = None
    PolicyEngine = None
    create_faiss_index = None
    RAGGraphMemory = None
    MemoryConstants = None

# ── Usage tracker ────────────────────────────────────────────
from app.core.usage_tracker import init_tracker, tracker, Tier

from app.api import (
    routes_governance,
    routes_history,
    routes_incidents,
    routes_intents,
    routes_risk,
    routes_memory,
    routes_admin,
    routes_payments,
    webhooks,
    routes_users,
    routes_pricing,
)
from app.api.deps import limiter
from app.core.config import settings

logger = logging.getLogger("arf.api")
logging.basicConfig(
    level=logging.INFO,
    format="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
    handlers=[logging.StreamHandler(sys.stdout)],
)


@asynccontextmanager
async def lifespan(app: FastAPI):
    """
    Application lifespan manager.

    All initialisation that requires a running event loop (database
    connections, model loading, etc.) happens **before** the ``yield``.
    Cleanup (if any) happens after the ``yield``.

    Initialisation order:
        1. Risk engine (Bayesian scoring + HMC).
        2. Load persisted conjugate posterior state (``beta_state`` table).
        3. OpenTelemetry tracing (console exporter by default).
        4. Policy engine, RAG memory, and epistemic model.
        5. Usage tracker (SQLite / Redis).
        6. Wilson confidence monitor for Rust enforcer canary promotion.
    """
    logger.info("πŸš€ Starting ARF API Control Plane")
    logger.debug(f"Python path: {sys.path}")

    # ── 1. Risk engine ────────────────────────────────────────
    if ARF_AVAILABLE:
        hmc_model_path = os.getenv("ARF_HMC_MODEL", "models/hmc_model.json")
        use_hyperpriors = os.getenv(
            "ARF_USE_HYPERPRIORS", "false"
        ).lower() == "true"
        logger.info(
            "Initializing RiskEngine – HMC model: %s, hyperpriors: %s",
            hmc_model_path,
            use_hyperpriors,
        )
        try:
            app.state.risk_engine = RiskEngine(
                hmc_model_path=hmc_model_path,
                use_hyperpriors=use_hyperpriors,
                n0=1000,
                hyperprior_weight=0.3,
            )
            logger.info("βœ… RiskEngine initialized successfully.")
        except Exception as e:
            logger.exception("πŸ’₯ Fatal error initializing RiskEngine")
            raise RuntimeError("RiskEngine initialization failed") from e

        # ── 2. Persisted Bayesian state ───────────────────────
        try:
            from app.database.session import SessionLocal
            from app.database.models_intents import BetaStateDB
            from agentic_reliability_framework.core.governance.risk_engine import ActionCategory

            db = SessionLocal()
            try:
                rows = db.query(BetaStateDB).all()
                if rows:
                    state = {
                        ActionCategory(row.category): (row.alpha, row.beta)
                        for row in rows
                    }
                    app.state.risk_engine.beta_store.load_state(state)
                    logger.info(
                        "Loaded Bayesian posterior state from database (%d categories).",
                        len(state),
                    )
                else:
                    logger.info(
                        "No persisted Bayesian state found; using default priors."
                    )
            finally:
                db.close()
        except Exception as e:
            logger.warning(
                "Could not load Bayesian state from database: %s", e
            )

        # ── 3. Tracing (OpenTelemetry) ─────────────────────────
        try:
            from opentelemetry import trace
            from opentelemetry.sdk.resources import SERVICE_NAME, Resource
            from opentelemetry.sdk.trace import TracerProvider
            from opentelemetry.sdk.trace.export import SimpleSpanProcessor, ConsoleSpanExporter
            from opentelemetry.instrumentation.fastapi import FastAPIInstrumentor

            resource = Resource.create({SERVICE_NAME: "arf-api"})
            provider = TracerProvider(resource=resource)
            provider.add_span_processor(SimpleSpanProcessor(ConsoleSpanExporter()))
            trace.set_tracer_provider(provider)

            FastAPIInstrumentor.instrument_app(app)
            logger.info("βœ… Tracing initialized (console exporter).")
        except Exception as e:
            logger.warning("Tracing initialization skipped: %s", e)

        # ── 4. Policy engine, RAG, epistemic model ─────────────
        try:
            app.state.policy_engine = PolicyEngine()
            logger.info("βœ… PolicyEngine initialized successfully.")
        except Exception as e:
            logger.warning(f"PolicyEngine initialization failed: {e}")
            app.state.policy_engine = None

        try:
            faiss_index = create_faiss_index(dim=MemoryConstants.VECTOR_DIM)
            app.state.rag_graph = RAGGraphMemory(faiss_index)
            logger.info("βœ… RAGGraphMemory initialized successfully.")
        except Exception as e:
            logger.warning(f"RAGGraphMemory initialization failed: {e}")
            app.state.rag_graph = None

        epistemic_model_name = os.getenv("EPISTEMIC_MODEL", "")
        if epistemic_model_name:
            try:
                from sentence_transformers import SentenceTransformer
                logger.info(f"Loading epistemic model: {epistemic_model_name}")
                app.state.epistemic_model = SentenceTransformer(
                    epistemic_model_name
                )
                app.state.epistemic_tokenizer = app.state.epistemic_model.tokenizer
                logger.info("βœ… Epistemic model loaded.")
            except ImportError:
                logger.warning(
                    "sentence-transformers not installed; epistemic signals will be zeros."
                )
                app.state.epistemic_model = None
                app.state.epistemic_tokenizer = None
            except Exception as e:
                logger.warning(f"Failed to load epistemic model: {e}")
                app.state.epistemic_model = None
                app.state.epistemic_tokenizer = None
        else:
            logger.info(
                "EPISTEMIC_MODEL not set; epistemic signals will be zeros."
            )
            app.state.epistemic_model = None
            app.state.epistemic_tokenizer = None
    else:
        logger.warning(
            "agentic_reliability_framework not installed; risk engine, policy engine, RAG disabled."
        )

    # ── 5. Usage tracker ──────────────────────────────────────
    usage_tracking_disabled = (
        os.getenv("ARF_USAGE_TRACKING", "true").lower() == "false"
    )
    if not usage_tracking_disabled:
        logger.info("Initialising usage tracker...")
        try:
            init_tracker(
                db_path=os.getenv("ARF_USAGE_DB_PATH", "arf_usage.db"),
                redis_url=os.getenv("ARF_REDIS_URL"),
            )
            # Seed initial API keys from environment variable (for testing / demo)
            api_keys_json = os.getenv("ARF_API_KEYS", "{}")
            try:
                api_keys = json.loads(api_keys_json)
                for key, tier_str in api_keys.items():
                    try:
                        tier = Tier(tier_str.lower())
                        tracker.get_or_create_api_key(key, tier)
                        logger.info(f"Seeded API key for tier {tier.value}")
                    except ValueError:
                        logger.warning(
                            f"Invalid tier '{tier_str}' for key {key}, skipping"
                        )
            except json.JSONDecodeError:
                logger.warning(
                    "ARF_API_KEYS environment variable is not valid JSON; skipping seeding."
                )
            app.state.usage_tracker = tracker
            logger.info("βœ… Usage tracker ready.")
        except Exception as e:
            logger.critical(f"Failed to initialise usage tracker: {e}")
            raise RuntimeError("Usage tracker initialisation failed") from e
    else:
        logger.info("Usage tracking disabled by ARF_USAGE_TRACKING=false.")
        app.state.usage_tracker = None

    # ── 6. Wilson confidence monitor ──────────────────────────
    try:
        from app.services.wilson_monitor import update as wilson_update
        from prometheus_client import REGISTRY

        def _wilson_updater():
            while True:
                try:
                    agreed = REGISTRY.get_sample_value(
                        'arf_rust_agreement_total', {'result': 'agreed'}
                    ) or 0.0
                    diverged = REGISTRY.get_sample_value(
                        'arf_rust_agreement_total', {'result': 'diverged'}
                    ) or 0.0
                    wilson_update(int(agreed), int(diverged))
                except Exception as e:
                    logger.debug("Wilson updater error: %s", e)
                _time.sleep(300)  # every 5 minutes

        threading.Thread(target=_wilson_updater, daemon=True).start()
        logger.info("βœ… Wilson monitor background updater started.")
    except Exception as e:
        logger.warning("Wilson monitor initialization skipped: %s", e)

    yield
    logger.info("πŸ›‘ Shutting down ARF API")


def create_app() -> FastAPI:
    """
    Build and configure the FastAPI application.

    Middleware order:
        1. CORS (restricted to the public front‑end origin).
        2. Rate limiting (if slowapi is installed).
        3. Prometheus metrics exposition (if available).

    All API routers are included under the ``/api/v1`` prefix except
    memory (``/v1/memory``) and webhooks (root level).

    A simple ``/health`` endpoint is provided for liveness probes.
    """
    app = FastAPI(
        title=settings.app_name,
        version="0.5.0",
        lifespan=lifespan,
        docs_url="/docs",
        redoc_url="/redoc",
        description="Agentic Reliability Framework (ARF) API",
    )

    # ── CORS ──────────────────────────────────────────────────
    allowed_origins = ["https://arf-frontend-sandy.vercel.app"]
    app.add_middleware(
        CORSMiddleware,
        allow_origins=allowed_origins,
        allow_credentials=True,
        allow_methods=["*"],
        allow_headers=["*"],
    )
    logger.debug("CORS middleware configured")

    # ── Rate limiter ──────────────────────────────────────────
    if SLOWAPI_AVAILABLE:
        app.state.limiter = limiter
        app.add_exception_handler(
            RateLimitExceeded, _rate_limit_exceeded_handler
        )
        app.add_middleware(SlowAPIMiddleware)
        logger.debug("Rate limiter middleware configured")
    else:
        logger.debug("Rate limiter disabled (slowapi not installed)")

    # ── Prometheus ────────────────────────────────────────────
    if PROMETHEUS_AVAILABLE:
        Instrumentator().instrument(app).expose(app)
        logger.debug("Prometheus instrumentator configured")
    else:
        logger.debug("Prometheus instrumentator disabled (module not installed)")

    # ── API Routers ───────────────────────────────────────────
    app.include_router(
        routes_incidents.router, prefix="/api/v1", tags=["incidents"]
    )
    app.include_router(routes_risk.router, prefix="/api/v1", tags=["risk"])
    app.include_router(
        routes_intents.router, prefix="/api/v1", tags=["intents"]
    )
    app.include_router(
        routes_history.router, prefix="/api/v1", tags=["history"]
    )
    app.include_router(
        routes_governance.router, prefix="/api/v1", tags=["governance"]
    )
    app.include_router(
        routes_memory.router, prefix="/v1/memory", tags=["memory"]
    )
    app.include_router(
        routes_admin.router, prefix="/api/v1", tags=["admin"]
    )
    app.include_router(
        routes_payments.router, prefix="/api/v1", tags=["payments"]
    )
    app.include_router(
        webhooks.router, tags=["webhooks"]
    )
    app.include_router(
        routes_users.router, prefix="/api/v1", tags=["users"]
    )
    app.include_router(
        routes_pricing.router, prefix="/api/v1", tags=["pricing"]
    )
    logger.debug("All API routers included")

    @app.get("/health", tags=["health"])
    async def health() -> Dict[str, str]:
        """Liveness probe – returns 200 when the application is running."""
        return {"status": "ok"}

    return app


app = create_app()