fix(retrieval): fail open on registry resolution errors

This commit is contained in:
2026-04-24 11:32:46 -04:00
parent ce6ffdbb63
commit 05c11fd4fb
7 changed files with 121 additions and 6 deletions

View File

@@ -163,6 +163,45 @@ def test_ingest_file_derives_project_id_from_registry_root(tmp_data_dir, tmp_pat
assert all(meta["project_id"] == "p04-gigabit" for meta in fake_store.metadatas)
def test_ingest_file_logs_and_fails_open_when_project_derivation_fails(
tmp_data_dir,
sample_markdown,
monkeypatch,
):
"""A broken registry should be visible but should not block ingestion."""
init_db()
warnings = []
class FakeVectorStore:
def __init__(self):
self.metadatas = []
def add(self, ids, documents, metadatas):
self.metadatas.extend(metadatas)
def delete(self, ids):
return None
fake_store = FakeVectorStore()
monkeypatch.setattr("atocore.ingestion.pipeline.get_vector_store", lambda: fake_store)
monkeypatch.setattr(
"atocore.projects.registry.derive_project_id_for_path",
lambda path: (_ for _ in ()).throw(ValueError("registry broken")),
)
monkeypatch.setattr(
"atocore.ingestion.pipeline.log.warning",
lambda event, **kwargs: warnings.append((event, kwargs)),
)
result = ingest_file(sample_markdown)
assert result["status"] == "ingested"
assert fake_store.metadatas
assert all(meta["project_id"] == "" for meta in fake_store.metadatas)
assert warnings[0][0] == "project_id_derivation_failed"
assert "registry broken" in warnings[0][1]["error"]
def test_ingest_project_folder_passes_project_id_to_files(tmp_data_dir, sample_folder, monkeypatch):
seen = []

View File

@@ -566,6 +566,59 @@ def test_retrieve_unknown_project_hint_does_not_widen_or_filter(monkeypatch):
assert [r.chunk_id for r in results] == ["chunk-a", "chunk-b"]
def test_retrieve_fails_open_when_project_scope_resolution_fails(monkeypatch):
warnings = []
class FakeStore:
def query(self, query_embedding, top_k=10, where=None):
assert top_k == 2
return {
"ids": [["chunk-a", "chunk-b"]],
"documents": [["doc a", "doc b"]],
"metadatas": [[
{
"heading_path": "Overview",
"source_file": "p04-gigabit/file.md",
"tags": "[]",
"title": "A",
"document_id": "doc-a",
},
{
"heading_path": "Overview",
"source_file": "p05-interferometer/file.md",
"tags": "[]",
"title": "B",
"document_id": "doc-b",
},
]],
"distances": [[0.2, 0.21]],
}
monkeypatch.setattr("atocore.retrieval.retriever.get_vector_store", lambda: FakeStore())
monkeypatch.setattr("atocore.retrieval.retriever.embed_query", lambda query: [0.0, 0.1])
monkeypatch.setattr(
"atocore.retrieval.retriever._existing_chunk_ids",
lambda chunk_ids: set(chunk_ids),
)
monkeypatch.setattr(
"atocore.retrieval.retriever.get_registered_project",
lambda project_name: (_ for _ in ()).throw(ValueError("registry overlap")),
)
monkeypatch.setattr(
"atocore.retrieval.retriever.log.warning",
lambda event, **kwargs: warnings.append((event, kwargs)),
)
results = retrieve("overview", top_k=2, project_hint="p04")
assert [r.chunk_id for r in results] == ["chunk-a", "chunk-b"]
assert {warning[0] for warning in warnings} == {
"project_scope_resolution_failed",
"project_match_boost_resolution_failed",
}
assert all("registry overlap" in warning[1]["error"] for warning in warnings)
def test_retrieve_downranks_archive_noise_and_prefers_high_signal_paths(monkeypatch):
class FakeStore:
def query(self, query_embedding, top_k=10, where=None):