update: upload fix

This commit is contained in:
2026-03-10 14:25:21 +08:00
parent a3d928e697
commit fd2b574d5a
19 changed files with 575 additions and 156 deletions

View File

@@ -20,18 +20,30 @@ class Settings(BaseSettings):
max_upload_size_mb: int = 20
allowed_extensions: set[str] = {"png", "jpg", "jpeg", "webp"}
# LLM
llm_provider: str = "openai" # openai | anthropic | deepseek | custom_openai
# --- OCR (vision) model ---
ocr_provider: str = "openai" # openai | anthropic | deepseek | custom_openai
ocr_model: str | None = None # if None, falls back to provider default
# --- Inference (reasoning) model ---
inference_provider: str = "openai"
inference_model: str | None = None
# --- Provider credentials (shared between OCR and inference) ---
openai_api_key: str | None = None
anthropic_api_key: str | None = None
deepseek_api_key: str | None = None
custom_openai_api_key: str | None = None
custom_openai_base_url: str | None = None
# Provider default model names (used when ocr_model / inference_model is None)
openai_model: str = "gpt-4o"
anthropic_model: str = "claude-3-5-sonnet-20241022"
deepseek_model: str = "deepseek-chat"
custom_openai_model: str = "gpt-4o-mini"
# Legacy compat: llm_provider maps to ocr_provider on load
llm_provider: str | None = None
class Config:
env_file = ".env"
env_file_encoding = "utf-8"
@@ -40,8 +52,24 @@ class Settings(BaseSettings):
_runtime_overrides: dict[str, str | None] = {}
_ALLOWED_RUNTIME_KEYS = {
"ocr_provider",
"ocr_model",
"inference_provider",
"inference_model",
"openai_api_key",
"anthropic_api_key",
"deepseek_api_key",
"custom_openai_api_key",
"custom_openai_base_url",
"custom_openai_model",
}
def _apply_overrides(settings: Settings) -> Settings:
# Legacy: if llm_provider is set but ocr_provider is default, use it
if settings.llm_provider and settings.ocr_provider == "openai":
settings.ocr_provider = settings.llm_provider
for key, value in _runtime_overrides.items():
if hasattr(settings, key):
setattr(settings, key, value)
@@ -53,19 +81,32 @@ def get_settings() -> Settings:
return _apply_overrides(Settings())
def update_runtime_settings(payload: dict[str, str | None]) -> Settings:
"""Update runtime settings and refresh cached Settings object."""
allowed = {
"llm_provider",
"openai_api_key",
"anthropic_api_key",
"deepseek_api_key",
"custom_openai_api_key",
"custom_openai_base_url",
"custom_openai_model",
def _resolve_model(provider: str, explicit_model: str | None, settings: Settings) -> str:
"""Return the model name to use for a given provider."""
if explicit_model:
return explicit_model
defaults = {
"openai": settings.openai_model,
"anthropic": settings.anthropic_model,
"deepseek": settings.deepseek_model,
"custom_openai": settings.custom_openai_model,
}
return defaults.get(provider, settings.openai_model)
def get_ocr_model() -> str:
s = get_settings()
return _resolve_model(s.ocr_provider, s.ocr_model, s)
def get_inference_model() -> str:
s = get_settings()
return _resolve_model(s.inference_provider, s.inference_model, s)
def update_runtime_settings(payload: dict[str, str | None]) -> Settings:
for key, value in payload.items():
if key in allowed:
if key in _ALLOWED_RUNTIME_KEYS:
_runtime_overrides[key] = value
get_settings.cache_clear()
return get_settings()
@@ -74,9 +115,12 @@ def update_runtime_settings(payload: dict[str, str | None]) -> Settings:
def public_settings() -> dict:
s = get_settings()
return {
"llm_provider": s.llm_provider,
"ocr_provider": s.ocr_provider,
"ocr_model": get_ocr_model(),
"inference_provider": s.inference_provider,
"inference_model": get_inference_model(),
"providers": ["openai", "anthropic", "deepseek", "custom_openai"],
"models": {
"provider_defaults": {
"openai": s.openai_model,
"anthropic": s.anthropic_model,
"deepseek": s.deepseek_model,