fix: disk leak, upload limit, thread safety, pydantic defaults

- #4: background thread deletes export ZIPs older than 1h
- #5: 50 MB per-file upload limit via read(MAX+1) guard
- #6: replace _jobs .update() with atomic key assignments
- #7: ExportRequest mutable dict fields use Field(default_factory=dict)

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
2026-04-23 12:24:42 +00:00
parent be0a79095f
commit 69adfe6abb
+29 -10
View File
@@ -8,13 +8,13 @@ import threading
import zipfile import zipfile
from datetime import date from datetime import date
from time import time from time import time
from typing import List from typing import List, Optional
from dotenv import load_dotenv from dotenv import load_dotenv
from fastapi import FastAPI, File, Form, HTTPException, Request, UploadFile from fastapi import FastAPI, File, Form, HTTPException, Request, UploadFile
from fastapi.responses import FileResponse, JSONResponse, Response, StreamingResponse from fastapi.responses import FileResponse, JSONResponse, Response, StreamingResponse
from fastapi.middleware.cors import CORSMiddleware from fastapi.middleware.cors import CORSMiddleware
from pydantic import BaseModel from pydantic import BaseModel, Field
import secrets import secrets
import uvicorn import uvicorn
from PIL import Image from PIL import Image
@@ -116,9 +116,9 @@ class ExportRequest(BaseModel):
brightness: float = 1.0 brightness: float = 1.0
contrast: float = 1.0 contrast: float = 1.0
saturation: float = 1.0 saturation: float = 1.0
text_watermark: dict = {} text_watermark: dict = Field(default_factory=dict)
image_watermark_path: str = "" image_watermark_path: str = ""
image_watermark_settings: dict = {} image_watermark_settings: dict = Field(default_factory=dict)
@app.get("/") @app.get("/")
@@ -128,6 +128,7 @@ def serve_frontend():
UPLOAD_ALLOWED_EXTENSIONS = {".jpg", ".jpeg", ".png", ".webp", ".heic", ".heif"} UPLOAD_ALLOWED_EXTENSIONS = {".jpg", ".jpeg", ".png", ".webp", ".heic", ".heif"}
_HEIC_EXTS = {".heic", ".heif"} _HEIC_EXTS = {".heic", ".heif"}
MAX_UPLOAD_BYTES = 50 * 1024 * 1024 # 50 MB per file
@app.post("/upload") @app.post("/upload")
@@ -143,7 +144,9 @@ async def upload_files(files: List[UploadFile] = File(...), folder: str = Form("
ext = os.path.splitext(file.filename or "")[1].lower() ext = os.path.splitext(file.filename or "")[1].lower()
if ext not in UPLOAD_ALLOWED_EXTENSIONS: if ext not in UPLOAD_ALLOWED_EXTENSIONS:
continue continue
raw = await file.read() raw = await file.read(MAX_UPLOAD_BYTES + 1)
if len(raw) > MAX_UPLOAD_BYTES:
continue
if ext in _HEIC_EXTS and _HEIF_SUPPORTED: if ext in _HEIC_EXTS and _HEIF_SUPPORTED:
# Convert HEIC/HEIF → JPEG so cv2 and browsers can handle it # Convert HEIC/HEIF → JPEG so cv2 and browsers can handle it
safe_name = os.path.splitext(os.path.basename(file.filename))[0] + ".jpg" safe_name = os.path.splitext(os.path.basename(file.filename))[0] + ".jpg"
@@ -243,11 +246,9 @@ _PHASE_LABELS = {
def _run_analyze_job(job_id: str, req: AnalyzeRequest): def _run_analyze_job(job_id: str, req: AnalyzeRequest):
try: try:
def on_progress(done, total, phase): def on_progress(done, total, phase):
_jobs[job_id].update({ _jobs[job_id]["done"] = done
"done": done, _jobs[job_id]["total"] = total
"total": total, _jobs[job_id]["phase"] = _PHASE_LABELS.get(phase, phase)
"phase": _PHASE_LABELS.get(phase, phase),
})
api_key = os.getenv("ANTHROPIC_API_KEY") if req.use_ai else None api_key = os.getenv("ANTHROPIC_API_KEY") if req.use_ai else None
results = analyze_folder( results = analyze_folder(
@@ -352,6 +353,24 @@ def move_files(req: MoveRequest):
_zip_store: dict = {} # zip_id -> path _zip_store: dict = {} # zip_id -> path
_ZIP_TTL = 3600 # ZIPs older than 1 h are deleted automatically
def _cleanup_zips():
while True:
import time as _time
_time.sleep(_ZIP_TTL)
cutoff = time() - _ZIP_TTL
for zip_id, path in list(_zip_store.items()):
try:
if os.path.getmtime(path) < cutoff:
os.unlink(path)
_zip_store.pop(zip_id, None)
except OSError:
_zip_store.pop(zip_id, None)
threading.Thread(target=_cleanup_zips, daemon=True).start()
def _run_export_job(job_id: str, req: ExportRequest): def _run_export_job(job_id: str, req: ExportRequest):