Spaces:
Sleeping
Sleeping
File size: 2,372 Bytes
7d87fe9 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 | """FastAPI application for Getaround pricing API."""
import logging
from contextlib import asynccontextmanager
from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware
from src.api.routers.predict import router as predict_router
from src.config.settings import configure_logging, get_settings
from src.ml.predict import get_predictor
settings = get_settings()
configure_logging(settings)
logger = logging.getLogger(__name__)
@asynccontextmanager
async def lifespan(app: FastAPI):
"""Manage application startup and shutdown.
Args:
app: The FastAPI application instance.
"""
# Startup: preload model
logger.info("Starting up - preloading model")
try:
get_predictor()
logger.info("Model loaded successfully")
except Exception as e:
logger.warning("Model not available at startup: %s", e)
yield
# Shutdown
logger.info("Shutting down")
app = FastAPI(
title="Getaround Pricing API",
description="""
API for predicting optimal rental prices for cars.
## Endpoints
- **POST /predict**: Predict rental prices based on car features
- **GET /health**: Health check endpoint
## Usage
Send a POST request to `/predict` with car features:
```json
{
"cars": [{
"model_key": "Citroen",
"mileage": 100000,
"engine_power": 120,
"fuel": "diesel",
"paint_color": "black",
"car_type": "sedan",
"private_parking_available": true,
"has_gps": true,
"has_air_conditioning": true,
"automatic_car": false,
"has_getaround_connect": false,
"has_speed_regulator": true,
"winter_tires": false
}]
}
```
Response:
```json
{
"prediction": [124]
}
```
""",
version="1.0.0",
docs_url="/docs",
redoc_url="/redoc",
lifespan=lifespan,
)
app.add_middleware(
CORSMiddleware,
allow_origins=["*"],
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
app.include_router(predict_router)
@app.get("/health", tags=["health"])
async def health_check() -> dict:
"""Health check endpoint.
Returns:
Status dictionary with model availability.
"""
from src.ml.predict import _predictor_instance
model_loaded = _predictor_instance is not None
return {"status": "healthy", "model_loaded": model_loaded}
|