Files
ATOCore/tests/test_api_storage.py
Anto01 be4099486c deploy: add build_sha visibility for precise drift detection
Make /health report the precise git SHA the container was built from,
so 'is the live service current?' can be answered without ambiguity.
0.2.0 was too coarse to trust as a 'live is current' signal — many
commits share the same __version__.

Three layers:

1. /health endpoint (src/atocore/api/routes.py)
   - Reads ATOCORE_BUILD_SHA, ATOCORE_BUILD_TIME, ATOCORE_BUILD_BRANCH
     from environment, defaults to 'unknown'
   - Reports them alongside existing code_version field

2. docker-compose.yml
   - Forwards the three env vars from the host into the container
   - Defaults to 'unknown' so direct `docker compose up` runs (without
     deploy.sh) cleanly signal missing build provenance

3. deploy.sh
   - Step 2 captures git SHA + UTC timestamp + branch and exports them
     as env vars before `docker compose up -d --build`
   - Step 6 reads /health post-deploy and compares the reported
     build_sha against the freshly-built one. Mismatch exits non-zero
     (exit code 6) with a remediation hint covering cached image,
     env propagation, and concurrent restart cases

Tests (tests/test_api_storage.py):
- test_health_endpoint_reports_code_version_from_module
- test_health_endpoint_reports_build_metadata_from_env
- test_health_endpoint_reports_unknown_when_build_env_unset

Docs (docs/dalidou-deployment.md):
- Three-level drift detection table (code_version coarse,
  build_sha precise, build_time/branch forensic)
- Canonical drift check script using LIVE_SHA vs EXPECTED_SHA
- Note that running deploy.sh is itself the simplest drift check

219/219 tests passing.

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-04-08 20:25:32 -04:00

637 lines
20 KiB
Python

"""Tests for storage-related API readiness endpoints."""
from contextlib import contextmanager
from fastapi.testclient import TestClient
import atocore.config as config
from atocore.main import app
def test_sources_endpoint_reports_configured_sources(tmp_data_dir, monkeypatch):
vault_dir = tmp_data_dir / "vault-source"
drive_dir = tmp_data_dir / "drive-source"
vault_dir.mkdir()
drive_dir.mkdir()
monkeypatch.setenv("ATOCORE_VAULT_SOURCE_DIR", str(vault_dir))
monkeypatch.setenv("ATOCORE_DRIVE_SOURCE_DIR", str(drive_dir))
config.settings = config.Settings()
client = TestClient(app)
response = client.get("/sources")
assert response.status_code == 200
body = response.json()
assert body["vault_enabled"] is True
assert body["drive_enabled"] is True
assert len(body["sources"]) == 2
assert all(source["read_only"] for source in body["sources"])
def test_health_endpoint_exposes_machine_paths_and_source_readiness(tmp_data_dir, monkeypatch):
vault_dir = tmp_data_dir / "vault-source"
drive_dir = tmp_data_dir / "drive-source"
vault_dir.mkdir()
drive_dir.mkdir()
monkeypatch.setenv("ATOCORE_VAULT_SOURCE_DIR", str(vault_dir))
monkeypatch.setenv("ATOCORE_DRIVE_SOURCE_DIR", str(drive_dir))
config.settings = config.Settings()
client = TestClient(app)
response = client.get("/health")
assert response.status_code == 200
body = response.json()
assert body["status"] == "ok"
assert body["sources_ready"] is True
assert "db_path" in body["machine_paths"]
assert "run_dir" in body["machine_paths"]
def test_health_endpoint_reports_code_version_from_module(tmp_data_dir):
"""The /health response must include code_version reflecting
atocore.__version__, so deployment drift detection works."""
from atocore import __version__
client = TestClient(app)
response = client.get("/health")
assert response.status_code == 200
body = response.json()
assert body["version"] == __version__
assert body["code_version"] == __version__
def test_health_endpoint_reports_build_metadata_from_env(tmp_data_dir, monkeypatch):
"""The /health response must include build_sha, build_time, and
build_branch from the ATOCORE_BUILD_* env vars, so deploy.sh can
detect precise drift via SHA comparison instead of relying on
the coarse code_version field.
Regression test for the codex finding from 2026-04-08:
code_version 0.2.0 is too coarse to trust as a 'live is current'
signal because it only changes on manual bumps. The build_sha
field changes per commit and is set by deploy.sh.
"""
monkeypatch.setenv("ATOCORE_BUILD_SHA", "abc1234567890fedcba0987654321")
monkeypatch.setenv("ATOCORE_BUILD_TIME", "2026-04-09T01:23:45Z")
monkeypatch.setenv("ATOCORE_BUILD_BRANCH", "main")
client = TestClient(app)
response = client.get("/health")
assert response.status_code == 200
body = response.json()
assert body["build_sha"] == "abc1234567890fedcba0987654321"
assert body["build_time"] == "2026-04-09T01:23:45Z"
assert body["build_branch"] == "main"
def test_health_endpoint_reports_unknown_when_build_env_unset(tmp_data_dir, monkeypatch):
"""When deploy.sh hasn't set the build env vars (e.g. someone
ran `docker compose up` directly), /health reports 'unknown'
for all three build fields. This is a clear signal to the
operator that the deploy provenance is missing and they should
re-run via deploy.sh."""
monkeypatch.delenv("ATOCORE_BUILD_SHA", raising=False)
monkeypatch.delenv("ATOCORE_BUILD_TIME", raising=False)
monkeypatch.delenv("ATOCORE_BUILD_BRANCH", raising=False)
client = TestClient(app)
response = client.get("/health")
assert response.status_code == 200
body = response.json()
assert body["build_sha"] == "unknown"
assert body["build_time"] == "unknown"
assert body["build_branch"] == "unknown"
def test_projects_endpoint_reports_registered_projects(tmp_data_dir, monkeypatch):
vault_dir = tmp_data_dir / "vault-source"
drive_dir = tmp_data_dir / "drive-source"
config_dir = tmp_data_dir / "config"
project_dir = vault_dir / "incoming" / "projects" / "p04-gigabit"
project_dir.mkdir(parents=True)
drive_dir.mkdir()
config_dir.mkdir()
registry_path = config_dir / "project-registry.json"
registry_path.write_text(
"""
{
"projects": [
{
"id": "p04-gigabit",
"aliases": ["p04"],
"description": "P04 docs",
"ingest_roots": [
{"source": "vault", "subpath": "incoming/projects/p04-gigabit"}
]
}
]
}
""".strip(),
encoding="utf-8",
)
monkeypatch.setenv("ATOCORE_VAULT_SOURCE_DIR", str(vault_dir))
monkeypatch.setenv("ATOCORE_DRIVE_SOURCE_DIR", str(drive_dir))
monkeypatch.setenv("ATOCORE_PROJECT_REGISTRY_PATH", str(registry_path))
config.settings = config.Settings()
client = TestClient(app)
response = client.get("/projects")
assert response.status_code == 200
body = response.json()
assert body["projects"][0]["id"] == "p04-gigabit"
assert body["projects"][0]["ingest_roots"][0]["exists"] is True
def test_project_refresh_endpoint_uses_registered_roots(tmp_data_dir, monkeypatch):
vault_dir = tmp_data_dir / "vault-source"
drive_dir = tmp_data_dir / "drive-source"
config_dir = tmp_data_dir / "config"
project_dir = vault_dir / "incoming" / "projects" / "p05-interferometer"
project_dir.mkdir(parents=True)
drive_dir.mkdir()
config_dir.mkdir()
registry_path = config_dir / "project-registry.json"
registry_path.write_text(
"""
{
"projects": [
{
"id": "p05-interferometer",
"aliases": ["p05"],
"description": "P05 docs",
"ingest_roots": [
{"source": "vault", "subpath": "incoming/projects/p05-interferometer"}
]
}
]
}
""".strip(),
encoding="utf-8",
)
calls = []
def fake_refresh_registered_project(project_name, purge_deleted=False):
calls.append((project_name, purge_deleted))
return {
"project": "p05-interferometer",
"aliases": ["p05"],
"description": "P05 docs",
"purge_deleted": purge_deleted,
"status": "ingested",
"roots_ingested": 1,
"roots_skipped": 0,
"roots": [
{
"source": "vault",
"subpath": "incoming/projects/p05-interferometer",
"path": str(project_dir),
"status": "ingested",
"results": [],
}
],
}
monkeypatch.setenv("ATOCORE_VAULT_SOURCE_DIR", str(vault_dir))
monkeypatch.setenv("ATOCORE_DRIVE_SOURCE_DIR", str(drive_dir))
monkeypatch.setenv("ATOCORE_PROJECT_REGISTRY_PATH", str(registry_path))
config.settings = config.Settings()
monkeypatch.setattr("atocore.api.routes.refresh_registered_project", fake_refresh_registered_project)
client = TestClient(app)
response = client.post("/projects/p05/refresh")
assert response.status_code == 200
assert calls == [("p05", False)]
assert response.json()["project"] == "p05-interferometer"
def test_project_refresh_endpoint_serializes_ingestion(tmp_data_dir, monkeypatch):
config.settings = config.Settings()
events = []
@contextmanager
def fake_lock():
events.append("enter")
try:
yield
finally:
events.append("exit")
def fake_refresh_registered_project(project_name, purge_deleted=False):
events.append(("refresh", project_name, purge_deleted))
return {
"project": "p05-interferometer",
"aliases": ["p05"],
"description": "P05 docs",
"purge_deleted": purge_deleted,
"status": "nothing_to_ingest",
"roots_ingested": 0,
"roots_skipped": 0,
"roots": [],
}
monkeypatch.setattr("atocore.api.routes.exclusive_ingestion", fake_lock)
monkeypatch.setattr("atocore.api.routes.refresh_registered_project", fake_refresh_registered_project)
client = TestClient(app)
response = client.post("/projects/p05/refresh")
assert response.status_code == 200
assert events == ["enter", ("refresh", "p05", False), "exit"]
def test_projects_template_endpoint_returns_template(tmp_data_dir, monkeypatch):
config.settings = config.Settings()
client = TestClient(app)
response = client.get("/projects/template")
assert response.status_code == 200
body = response.json()
assert body["allowed_sources"] == ["vault", "drive"]
assert body["template"]["projects"][0]["id"] == "p07-example"
def test_project_proposal_endpoint_returns_normalized_preview(tmp_data_dir, monkeypatch):
vault_dir = tmp_data_dir / "vault-source"
drive_dir = tmp_data_dir / "drive-source"
config_dir = tmp_data_dir / "config"
staged = vault_dir / "incoming" / "projects" / "p07-example"
staged.mkdir(parents=True)
drive_dir.mkdir()
config_dir.mkdir()
registry_path = config_dir / "project-registry.json"
registry_path.write_text('{"projects": []}', encoding="utf-8")
monkeypatch.setenv("ATOCORE_VAULT_SOURCE_DIR", str(vault_dir))
monkeypatch.setenv("ATOCORE_DRIVE_SOURCE_DIR", str(drive_dir))
monkeypatch.setenv("ATOCORE_PROJECT_REGISTRY_PATH", str(registry_path))
config.settings = config.Settings()
client = TestClient(app)
response = client.post(
"/projects/proposal",
json={
"project_id": "p07-example",
"aliases": ["p07", "example-project", "p07"],
"description": "Example project",
"ingest_roots": [
{
"source": "vault",
"subpath": "incoming/projects/p07-example",
"label": "Primary docs",
}
],
},
)
assert response.status_code == 200
body = response.json()
assert body["project"]["aliases"] == ["p07", "example-project"]
assert body["resolved_ingest_roots"][0]["exists"] is True
assert body["valid"] is True
def test_project_register_endpoint_persists_entry(tmp_data_dir, monkeypatch):
vault_dir = tmp_data_dir / "vault-source"
drive_dir = tmp_data_dir / "drive-source"
config_dir = tmp_data_dir / "config"
staged = vault_dir / "incoming" / "projects" / "p07-example"
staged.mkdir(parents=True)
drive_dir.mkdir()
config_dir.mkdir()
registry_path = config_dir / "project-registry.json"
registry_path.write_text('{"projects": []}', encoding="utf-8")
monkeypatch.setenv("ATOCORE_VAULT_SOURCE_DIR", str(vault_dir))
monkeypatch.setenv("ATOCORE_DRIVE_SOURCE_DIR", str(drive_dir))
monkeypatch.setenv("ATOCORE_PROJECT_REGISTRY_PATH", str(registry_path))
config.settings = config.Settings()
client = TestClient(app)
response = client.post(
"/projects/register",
json={
"project_id": "p07-example",
"aliases": ["p07", "example-project"],
"description": "Example project",
"ingest_roots": [
{
"source": "vault",
"subpath": "incoming/projects/p07-example",
"label": "Primary docs",
}
],
},
)
assert response.status_code == 200
body = response.json()
assert body["status"] == "registered"
assert body["project"]["id"] == "p07-example"
assert '"p07-example"' in registry_path.read_text(encoding="utf-8")
def test_project_register_endpoint_rejects_collisions(tmp_data_dir, monkeypatch):
vault_dir = tmp_data_dir / "vault-source"
drive_dir = tmp_data_dir / "drive-source"
config_dir = tmp_data_dir / "config"
vault_dir.mkdir()
drive_dir.mkdir()
config_dir.mkdir()
registry_path = config_dir / "project-registry.json"
registry_path.write_text(
"""
{
"projects": [
{
"id": "p05-interferometer",
"aliases": ["p05", "interferometer"],
"ingest_roots": [
{"source": "vault", "subpath": "incoming/projects/p05-interferometer"}
]
}
]
}
""".strip(),
encoding="utf-8",
)
monkeypatch.setenv("ATOCORE_VAULT_SOURCE_DIR", str(vault_dir))
monkeypatch.setenv("ATOCORE_DRIVE_SOURCE_DIR", str(drive_dir))
monkeypatch.setenv("ATOCORE_PROJECT_REGISTRY_PATH", str(registry_path))
config.settings = config.Settings()
client = TestClient(app)
response = client.post(
"/projects/register",
json={
"project_id": "p07-example",
"aliases": ["interferometer"],
"ingest_roots": [
{
"source": "vault",
"subpath": "incoming/projects/p07-example",
}
],
},
)
assert response.status_code == 400
assert "collisions" in response.json()["detail"]
def test_project_update_endpoint_persists_changes(tmp_data_dir, monkeypatch):
vault_dir = tmp_data_dir / "vault-source"
drive_dir = tmp_data_dir / "drive-source"
config_dir = tmp_data_dir / "config"
project_dir = vault_dir / "incoming" / "projects" / "p04-gigabit"
project_dir.mkdir(parents=True)
drive_dir.mkdir()
config_dir.mkdir()
registry_path = config_dir / "project-registry.json"
registry_path.write_text(
"""
{
"projects": [
{
"id": "p04-gigabit",
"aliases": ["p04", "gigabit"],
"description": "Old description",
"ingest_roots": [
{"source": "vault", "subpath": "incoming/projects/p04-gigabit"}
]
}
]
}
""".strip(),
encoding="utf-8",
)
monkeypatch.setenv("ATOCORE_VAULT_SOURCE_DIR", str(vault_dir))
monkeypatch.setenv("ATOCORE_DRIVE_SOURCE_DIR", str(drive_dir))
monkeypatch.setenv("ATOCORE_PROJECT_REGISTRY_PATH", str(registry_path))
config.settings = config.Settings()
client = TestClient(app)
response = client.put(
"/projects/p04",
json={
"aliases": ["p04", "gigabit", "gigabit-project"],
"description": "Updated P04 docs",
},
)
assert response.status_code == 200
body = response.json()
assert body["status"] == "updated"
assert body["project"]["aliases"] == ["p04", "gigabit", "gigabit-project"]
assert body["project"]["description"] == "Updated P04 docs"
def test_project_update_endpoint_rejects_collisions(tmp_data_dir, monkeypatch):
vault_dir = tmp_data_dir / "vault-source"
drive_dir = tmp_data_dir / "drive-source"
config_dir = tmp_data_dir / "config"
vault_dir.mkdir()
drive_dir.mkdir()
config_dir.mkdir()
registry_path = config_dir / "project-registry.json"
registry_path.write_text(
"""
{
"projects": [
{
"id": "p04-gigabit",
"aliases": ["p04", "gigabit"],
"ingest_roots": [
{"source": "vault", "subpath": "incoming/projects/p04-gigabit"}
]
},
{
"id": "p05-interferometer",
"aliases": ["p05", "interferometer"],
"ingest_roots": [
{"source": "vault", "subpath": "incoming/projects/p05-interferometer"}
]
}
]
}
""".strip(),
encoding="utf-8",
)
monkeypatch.setenv("ATOCORE_VAULT_SOURCE_DIR", str(vault_dir))
monkeypatch.setenv("ATOCORE_DRIVE_SOURCE_DIR", str(drive_dir))
monkeypatch.setenv("ATOCORE_PROJECT_REGISTRY_PATH", str(registry_path))
config.settings = config.Settings()
client = TestClient(app)
response = client.put(
"/projects/p04",
json={
"aliases": ["p04", "interferometer"],
},
)
assert response.status_code == 400
assert "collisions" in response.json()["detail"]
def test_admin_backup_create_without_chroma(tmp_data_dir, monkeypatch):
config.settings = config.Settings()
captured = {}
def fake_create_runtime_backup(timestamp=None, include_chroma=False):
captured["include_chroma"] = include_chroma
return {
"created_at": "2026-04-06T23:00:00+00:00",
"backup_root": "/tmp/fake",
"db_snapshot_path": "/tmp/fake/db/atocore.db",
"db_size_bytes": 0,
"registry_snapshot_path": "",
"chroma_snapshot_path": "",
"chroma_snapshot_bytes": 0,
"chroma_snapshot_files": 0,
"chroma_snapshot_included": False,
"vector_store_note": "skipped",
}
monkeypatch.setattr("atocore.api.routes.create_runtime_backup", fake_create_runtime_backup)
client = TestClient(app)
response = client.post("/admin/backup", json={})
assert response.status_code == 200
assert captured == {"include_chroma": False}
body = response.json()
assert body["chroma_snapshot_included"] is False
def test_admin_backup_create_with_chroma_holds_lock(tmp_data_dir, monkeypatch):
config.settings = config.Settings()
events = []
@contextmanager
def fake_lock():
events.append("enter")
try:
yield
finally:
events.append("exit")
def fake_create_runtime_backup(timestamp=None, include_chroma=False):
events.append(("backup", include_chroma))
return {
"created_at": "2026-04-06T23:30:00+00:00",
"backup_root": "/tmp/fake",
"db_snapshot_path": "/tmp/fake/db/atocore.db",
"db_size_bytes": 0,
"registry_snapshot_path": "",
"chroma_snapshot_path": "/tmp/fake/chroma",
"chroma_snapshot_bytes": 4,
"chroma_snapshot_files": 1,
"chroma_snapshot_included": True,
"vector_store_note": "included",
}
monkeypatch.setattr("atocore.api.routes.exclusive_ingestion", fake_lock)
monkeypatch.setattr("atocore.api.routes.create_runtime_backup", fake_create_runtime_backup)
client = TestClient(app)
response = client.post("/admin/backup", json={"include_chroma": True})
assert response.status_code == 200
assert events == ["enter", ("backup", True), "exit"]
assert response.json()["chroma_snapshot_included"] is True
def test_admin_backup_list_and_validate_endpoints(tmp_data_dir, monkeypatch):
config.settings = config.Settings()
def fake_list_runtime_backups():
return [
{
"stamp": "20260406T220000Z",
"path": "/tmp/fake/snapshots/20260406T220000Z",
"has_metadata": True,
"metadata": {"db_snapshot_path": "/tmp/fake/snapshots/20260406T220000Z/db/atocore.db"},
}
]
def fake_validate_backup(stamp):
if stamp == "missing":
return {
"stamp": stamp,
"path": f"/tmp/fake/snapshots/{stamp}",
"exists": False,
"errors": ["snapshot_directory_missing"],
}
return {
"stamp": stamp,
"path": f"/tmp/fake/snapshots/{stamp}",
"exists": True,
"db_ok": True,
"registry_ok": True,
"chroma_ok": None,
"valid": True,
"errors": [],
}
monkeypatch.setattr("atocore.api.routes.list_runtime_backups", fake_list_runtime_backups)
monkeypatch.setattr("atocore.api.routes.validate_backup", fake_validate_backup)
client = TestClient(app)
listing = client.get("/admin/backup")
assert listing.status_code == 200
listing_body = listing.json()
assert "backup_dir" in listing_body
assert listing_body["backups"][0]["stamp"] == "20260406T220000Z"
valid = client.get("/admin/backup/20260406T220000Z/validate")
assert valid.status_code == 200
assert valid.json()["valid"] is True
missing = client.get("/admin/backup/missing/validate")
assert missing.status_code == 404
def test_query_endpoint_accepts_project_hint(monkeypatch):
def fake_retrieve(prompt, top_k=10, filter_tags=None, project_hint=None):
assert prompt == "architecture"
assert top_k == 3
assert project_hint == "p04-gigabit"
return []
monkeypatch.setattr("atocore.api.routes.retrieve", fake_retrieve)
client = TestClient(app)
response = client.post(
"/query",
json={
"prompt": "architecture",
"top_k": 3,
"project": "p04-gigabit",
},
)
assert response.status_code == 200
assert response.json()["results"] == []