Spaces:
Running
Running
| """ | |
| Structured JSON artifact export with complete run metadata. | |
| Produces a self-contained JSON document suitable for programmatic | |
| consumption, API integration, and archival. | |
| """ | |
| import json | |
| import os | |
| from datetime import datetime, timezone | |
| def export_json(data, video_meta, engine_config, out_dir): | |
| """Generate a comprehensive JSON export of the analysis run. | |
| Args: | |
| data: Engine result dict (counts, congestion, events, etc.) | |
| video_meta: dict with filename, fps, frames, duration, resolution, pixels | |
| engine_config: dict with imgsz, conf, iou, stride, etc. | |
| out_dir: Output directory path | |
| Returns: | |
| Filename string or None | |
| """ | |
| class_in = data.get("class_in", {}) | |
| class_out = data.get("class_out", {}) | |
| congestion = data.get("congestion", []) | |
| raw_events = data.get("raw_events", []) | |
| pcu_data = data.get("pcu", {}) | |
| speed_data = data.get("speed", {}) | |
| # Build events list from raw_events (skip header row) | |
| events = [] | |
| for row in raw_events[1:]: | |
| events.append({ | |
| "frame": row[0], | |
| "timestamp_sec": row[1], | |
| "vehicle_id": row[2], | |
| "class_name": row[3], | |
| "direction": row[4], | |
| }) | |
| total_in = sum(class_in.values()) | |
| total_out = sum(class_out.values()) | |
| doc = { | |
| "urbanflow_version": "1.1", | |
| "generated_at": datetime.now(timezone.utc).isoformat(), | |
| "video": video_meta, | |
| "engine": { | |
| "model": "VehicleNet-Y26s (OpenVINO INT8)", | |
| "imgsz": engine_config.get("imgsz", 736), | |
| "conf": engine_config.get("conf", 0.12), | |
| "iou": engine_config.get("iou", 0.6), | |
| "stride": engine_config.get("detect_stride", 2), | |
| "tracker": "ByteTrack (custom)", | |
| "batch": 2, | |
| }, | |
| "performance": { | |
| "processing_time_sec": data.get("processing_time", 0), | |
| "actual_fps": data.get("actual_fps", 0), | |
| "speed_vs_realtime": data.get("speed_vs_realtime", 0), | |
| }, | |
| "counts": { | |
| "total_in": total_in, | |
| "total_out": total_out, | |
| "total": total_in + total_out, | |
| "per_class_in": {str(k): v for k, v in class_in.items()}, | |
| "per_class_out": {str(k): v for k, v in class_out.items()}, | |
| }, | |
| "pcu": pcu_data, | |
| "speed_distribution": speed_data.get("distribution", {}), | |
| "congestion": { | |
| "peak": max(congestion) if congestion else 0, | |
| "average": round(sum(congestion) / len(congestion), 1) if congestion else 0, | |
| "timeline": congestion, | |
| }, | |
| "flow_times": data.get("flow_times", []), | |
| "events": events, | |
| } | |
| path = os.path.join(out_dir, "analysis.json") | |
| with open(path, "w") as f: | |
| json.dump(doc, f, indent=2, default=str) | |
| return "analysis.json" | |