- 新增 app/baked/algorithm|pipeline,非部署参数不再走 env;Settings 保留 DB/HTTP/RTSP/海康/百度/MinIO/Demo - 移除 init_db_schema 与 reload 配置;main 仅 check_database;start*.sh 在 uvicorn 前执行 alembic upgrade head - 依赖 psycopg[binary] 供 Alembic 同步 URL;alembic/env 注释与预发清单更新 - 撕段门控消费管线、各视频/语音/归档调用改为 baked - 百度环境变量仅 BAIDU_APP_ID、BAIDU_API_KEY、BAIDU_SECRET_KEY 与 BAIDU_* 超时/ASR;人脸脚本与 baidu_speech 文案同步 - 全量单测与 .env.example 更新;.gitignore 忽略 refs/(本地权重/视频不入库) Made-with: Cursor
123 lines
4.1 KiB
Python
123 lines
4.1 KiB
Python
"""进程重启后的归档恢复集成测试。
|
||
|
||
场景:某次手术结束后写库失败 → ArchivePersister 将明细写入 durable fallback 目录。
|
||
之后 API 进程重启(相当于重新 ``create_app()``)时,``AppContainer.start()`` 会调用
|
||
``camera_session_manager.start_archive_retry_loop()`` → ``recover_from_durable_fallback()``,
|
||
把磁盘上的待落库归档读回内存;随后走真实 DB 写入路径将其成功持久化。
|
||
"""
|
||
|
||
from __future__ import annotations
|
||
|
||
import asyncio
|
||
import json
|
||
from collections.abc import AsyncGenerator
|
||
from datetime import datetime, timezone
|
||
|
||
import pytest
|
||
import pytest_asyncio
|
||
from fastapi.testclient import TestClient
|
||
from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker, create_async_engine
|
||
|
||
import app.db.models # noqa: F401 register ORM tables
|
||
import main as main_module
|
||
from app.baked import pipeline as bp
|
||
from app.db.base import Base
|
||
from app.dependencies import AppContainer, build_container
|
||
from app.domain.consumption import SurgeryConsumptionStored
|
||
from app.services.video.archive_persister import _serialize_details
|
||
|
||
|
||
@pytest_asyncio.fixture
|
||
async def sqlite_factory() -> AsyncGenerator[async_sessionmaker[AsyncSession], None]:
|
||
engine = create_async_engine("sqlite+aiosqlite:///:memory:")
|
||
async with engine.begin() as conn:
|
||
await conn.run_sync(Base.metadata.create_all)
|
||
factory = async_sessionmaker(
|
||
engine,
|
||
class_=AsyncSession,
|
||
expire_on_commit=False,
|
||
autoflush=False,
|
||
autobegin=False,
|
||
)
|
||
yield factory
|
||
await engine.dispose()
|
||
|
||
|
||
def _seed_durable_fallback(directory, surgery_id: str) -> None:
|
||
directory.mkdir(parents=True, exist_ok=True)
|
||
details = [
|
||
SurgeryConsumptionStored(
|
||
item_id="item-1",
|
||
item_name="纱布",
|
||
qty=2,
|
||
doctor_id="voice",
|
||
timestamp=datetime(2026, 4, 23, 12, 0, tzinfo=timezone.utc),
|
||
source="voice",
|
||
),
|
||
]
|
||
payload = {
|
||
"surgery_id": surgery_id,
|
||
"saved_at": datetime.now(timezone.utc).isoformat(),
|
||
"details": _serialize_details(details),
|
||
}
|
||
(directory / f"{surgery_id}.json").write_text(
|
||
json.dumps(payload, ensure_ascii=False, indent=2),
|
||
encoding="utf-8",
|
||
)
|
||
|
||
|
||
def test_durable_fallback_recovers_on_startup_and_persists(
|
||
monkeypatch: pytest.MonkeyPatch,
|
||
sqlite_factory: async_sessionmaker[AsyncSession],
|
||
tmp_path,
|
||
) -> None:
|
||
durable_dir = tmp_path / "pending_archive"
|
||
surgery_id = "200001"
|
||
_seed_durable_fallback(durable_dir, surgery_id)
|
||
assert (durable_dir / f"{surgery_id}.json").exists()
|
||
|
||
async def _noop() -> None:
|
||
return None
|
||
|
||
monkeypatch.setattr(main_module, "check_database", _noop)
|
||
|
||
class _FakeEngine:
|
||
async def dispose(self) -> None:
|
||
return None
|
||
|
||
monkeypatch.setattr(main_module, "engine", _FakeEngine())
|
||
|
||
from app.config import settings as real_settings
|
||
|
||
monkeypatch.setattr(
|
||
bp,
|
||
"ARCHIVE_PERSIST_DURABLE_FALLBACK_DIR",
|
||
str(durable_dir),
|
||
)
|
||
monkeypatch.setattr(bp, "ARCHIVE_PERSIST_RETRY_INTERVAL_SECONDS", 5.0)
|
||
|
||
def _build(*_a, **_kw) -> AppContainer:
|
||
return build_container(real_settings, session_factory=sqlite_factory)
|
||
|
||
monkeypatch.setattr(main_module, "build_container", _build)
|
||
|
||
app = main_module.create_app()
|
||
with TestClient(app) as client:
|
||
container: AppContainer = client.app.state.container
|
||
archive = container.camera_session_manager._archive
|
||
assert archive.archived_details(surgery_id) is not None
|
||
|
||
ok = asyncio.get_event_loop().run_until_complete(
|
||
archive.try_persist_archive(surgery_id)
|
||
)
|
||
assert ok, "Expected immediate retry to persist against sqlite"
|
||
assert archive.archived_details(surgery_id) is None
|
||
assert not (durable_dir / f"{surgery_id}.json").exists()
|
||
|
||
r = client.get(f"/client/surgeries/{surgery_id}/result")
|
||
assert r.status_code == 200, r.text
|
||
body = r.json()
|
||
assert len(body["details"]) == 1
|
||
assert body["details"][0]["item_name"] == "纱布"
|
||
assert body["details"][0]["qty"] == 2
|