|
|
@@ -1,10 +1,15 @@
|
|
|
import os
|
|
|
import time
|
|
|
import threading
|
|
|
+import json
|
|
|
+import logging
|
|
|
+from datetime import datetime
|
|
|
+from logging.handlers import RotatingFileHandler
|
|
|
|
|
|
import cv2
|
|
|
import numpy as np
|
|
|
-from fastapi import FastAPI, HTTPException
|
|
|
+from fastapi import FastAPI, HTTPException, Request
|
|
|
+from starlette.concurrency import iterate_in_threadpool
|
|
|
import uvicorn
|
|
|
|
|
|
from depth_common import (
|
|
|
@@ -27,6 +32,97 @@ MAX_SAVED_IMAGES = int(os.getenv("MAX_SAVED_IMAGES", "1000"))
|
|
|
SETTINGS = Settings.from_env()
|
|
|
|
|
|
app = FastAPI(title="Cargo Height API")
|
|
|
+request_logger = logging.getLogger("cargo_height.request")
|
|
|
+MAX_LOG_RESPONSE_LEN = 1000
|
|
|
+REQUEST_LOG_MAX_BYTES = int(os.getenv("REQUEST_LOG_MAX_BYTES", str(20 * 1024 * 1024)))
|
|
|
+REQUEST_LOG_BACKUP_COUNT = int(os.getenv("REQUEST_LOG_BACKUP_COUNT", "10"))
|
|
|
+
|
|
|
+
|
|
|
+def _setup_request_logger():
|
|
|
+ log_dir = os.path.join(os.getcwd(), "Log")
|
|
|
+ os.makedirs(log_dir, exist_ok=True)
|
|
|
+ log_file = os.path.join(log_dir, "request.log")
|
|
|
+
|
|
|
+ request_logger.setLevel(logging.INFO)
|
|
|
+ request_logger.propagate = False
|
|
|
+
|
|
|
+ if request_logger.handlers:
|
|
|
+ return
|
|
|
+
|
|
|
+ formatter = logging.Formatter(
|
|
|
+ "%(asctime)s [%(levelname)s] %(name)s - %(message)s",
|
|
|
+ "%Y-%m-%d %H:%M:%S",
|
|
|
+ )
|
|
|
+
|
|
|
+ file_handler = RotatingFileHandler(
|
|
|
+ log_file,
|
|
|
+ maxBytes=REQUEST_LOG_MAX_BYTES,
|
|
|
+ backupCount=REQUEST_LOG_BACKUP_COUNT,
|
|
|
+ encoding="utf-8",
|
|
|
+ )
|
|
|
+ file_handler.setLevel(logging.INFO)
|
|
|
+ file_handler.setFormatter(formatter)
|
|
|
+
|
|
|
+ stream_handler = logging.StreamHandler()
|
|
|
+ stream_handler.setLevel(logging.INFO)
|
|
|
+ stream_handler.setFormatter(formatter)
|
|
|
+
|
|
|
+ request_logger.addHandler(file_handler)
|
|
|
+ request_logger.addHandler(stream_handler)
|
|
|
+
|
|
|
+
|
|
|
+_setup_request_logger()
|
|
|
+
|
|
|
+
|
|
|
+@app.middleware("http")
|
|
|
+async def request_log_middleware(request: Request, call_next):
|
|
|
+ request_time = datetime.now().strftime("%Y-%m-%d %H:%M:%S.%f")[:-3]
|
|
|
+ start = time.perf_counter()
|
|
|
+ try:
|
|
|
+ response = await call_next(request)
|
|
|
+ except Exception:
|
|
|
+ elapsed_ms = (time.perf_counter() - start) * 1000
|
|
|
+ request_logger.exception(
|
|
|
+ "request_time=%s method=%s path=%s status=%s duration_ms=%.2f response=%s",
|
|
|
+ request_time,
|
|
|
+ request.method,
|
|
|
+ request.url.path,
|
|
|
+ 500,
|
|
|
+ elapsed_ms,
|
|
|
+ "internal_error",
|
|
|
+ )
|
|
|
+ raise
|
|
|
+
|
|
|
+ body = b""
|
|
|
+ async for chunk in response.body_iterator:
|
|
|
+ body += chunk
|
|
|
+ response.body_iterator = iterate_in_threadpool(iter([body]))
|
|
|
+
|
|
|
+ if not body:
|
|
|
+ response_text = ""
|
|
|
+ else:
|
|
|
+ content_type = response.headers.get("content-type", "")
|
|
|
+ if "application/json" in content_type:
|
|
|
+ try:
|
|
|
+ response_text = json.dumps(json.loads(body), ensure_ascii=False)
|
|
|
+ except Exception:
|
|
|
+ response_text = body.decode("utf-8", errors="replace")
|
|
|
+ else:
|
|
|
+ response_text = body.decode("utf-8", errors="replace")
|
|
|
+ if len(response_text) > MAX_LOG_RESPONSE_LEN:
|
|
|
+ response_text = response_text[:MAX_LOG_RESPONSE_LEN] + "...(truncated)"
|
|
|
+
|
|
|
+ elapsed_ms = (time.perf_counter() - start) * 1000
|
|
|
+ request_logger.info(
|
|
|
+ "request_time=%s method=%s path=%s status=%s duration_ms=%.2f response=%s",
|
|
|
+ request_time,
|
|
|
+ request.method,
|
|
|
+ request.url.path,
|
|
|
+ response.status_code,
|
|
|
+ elapsed_ms,
|
|
|
+ response_text,
|
|
|
+ )
|
|
|
+ return response
|
|
|
|
|
|
|
|
|
# 相机相关的全局状态(由锁保护)
|