Add project registry refresh foundation

This commit is contained in:
2026-04-06 08:02:13 -04:00
parent 0f95415530
commit 8293099025
12 changed files with 526 additions and 0 deletions

View File

@@ -13,6 +13,7 @@ ATOCORE_SOURCE_DRIVE_ENABLED=true
ATOCORE_LOG_DIR=./logs
ATOCORE_BACKUP_DIR=./backups
ATOCORE_RUN_DIR=./run
ATOCORE_PROJECT_REGISTRY_PATH=./config/project-registry.json
ATOCORE_HOST=127.0.0.1
ATOCORE_PORT=8100
ATOCORE_EMBEDDING_MODEL=sentence-transformers/paraphrase-multilingual-MiniLM-L12-v2

View File

@@ -0,0 +1,52 @@
{
"projects": [
{
"id": "atocore",
"aliases": ["ato core"],
"description": "AtoCore platform docs and trusted project materials.",
"ingest_roots": [
{
"source": "drive",
"subpath": "atocore",
"label": "AtoCore drive docs"
}
]
},
{
"id": "p04-gigabit",
"aliases": ["p04", "gigabit", "gigaBIT"],
"description": "Curated staged docs for the P04 GigaBIT project.",
"ingest_roots": [
{
"source": "vault",
"subpath": "incoming/projects/p04-gigabit",
"label": "P04 staged project docs"
}
]
},
{
"id": "p05-interferometer",
"aliases": ["p05", "interferometer"],
"description": "Curated staged docs for the P05 interferometer project.",
"ingest_roots": [
{
"source": "vault",
"subpath": "incoming/projects/p05-interferometer",
"label": "P05 staged project docs"
}
]
},
{
"id": "p06-polisher",
"aliases": ["p06", "polisher"],
"description": "Curated staged docs for the P06 polisher project.",
"ingest_roots": [
{
"source": "vault",
"subpath": "incoming/projects/p06-polisher",
"label": "P06 staged project docs"
}
]
}
]
}

View File

@@ -38,6 +38,7 @@ now includes a first curated ingestion batch for the active projects.
- vector retrieval
- context builder
- API routes for query, context, health, and source status
- project registry and per-project refresh foundation
- env-driven storage and deployment paths
- Dalidou Docker deployment foundation
- initial AtoCore self-knowledge corpus ingested on Dalidou
@@ -144,6 +145,12 @@ In `source_documents` / retrieval corpus:
- the current corpus is still selective rather than exhaustive
- that selectivity is intentional at this stage
The source refresh model now has a concrete foundation in code:
- a project registry file defines known project ids, aliases, and ingest roots
- the API can list registered projects
- the API can refresh one registered project at a time
In `Trusted Project State`:
- each active seeded project now has a conservative trusted-state set

View File

@@ -30,6 +30,7 @@ AtoCore now has:
- make the difference between source truth, staged inputs, and machine store
explicit
- move toward a project source registry and refresh workflow
- foundation now exists via project registry + per-project refresh API
5. Define backup and export procedures for Dalidou
- SQLite snapshot/backup strategy
- Chroma backup or rebuild policy

View File

@@ -78,6 +78,17 @@ The long-run workflow should become much more natural:
- AtoCore refreshes the machine representation cleanly
- OpenClaw consumes the improved context over API
## Current Foundation
The first concrete foundation for this now exists in AtoCore:
- a project registry file records known project ids, aliases, and ingest roots
- the API can list those registered projects
- the API can refresh a single registered project from its configured roots
This is not full source automation yet, but it gives the refresh model a real
home in the system.
## Healthy Mental Model
Use this distinction:

View File

@@ -33,6 +33,10 @@ from atocore.memory.service import (
update_memory,
)
from atocore.observability.logger import get_logger
from atocore.projects.registry import (
list_registered_projects,
refresh_registered_project,
)
from atocore.retrieval.retriever import retrieve
from atocore.retrieval.vector_store import get_vector_store
@@ -55,6 +59,14 @@ class IngestSourcesResponse(BaseModel):
results: list[dict]
class ProjectRefreshResponse(BaseModel):
project: str
aliases: list[str]
description: str
purge_deleted: bool
roots: list[dict]
class QueryRequest(BaseModel):
prompt: str
top_k: int = 10
@@ -148,6 +160,28 @@ def api_ingest_sources() -> IngestSourcesResponse:
return IngestSourcesResponse(results=results)
@router.get("/projects")
def api_projects() -> dict:
"""Return registered projects and their resolved ingest roots."""
return {
"projects": list_registered_projects(),
"registry_path": str(_config.settings.resolved_project_registry_path),
}
@router.post("/projects/{project_name}/refresh", response_model=ProjectRefreshResponse)
def api_refresh_project(project_name: str, purge_deleted: bool = False) -> ProjectRefreshResponse:
"""Refresh one registered project from its configured ingest roots."""
try:
result = refresh_registered_project(project_name, purge_deleted=purge_deleted)
except ValueError as e:
raise HTTPException(status_code=404, detail=str(e))
except Exception as e:
log.error("project_refresh_failed", project=project_name, error=str(e))
raise HTTPException(status_code=500, detail=f"Project refresh failed: {e}")
return ProjectRefreshResponse(**result)
@router.post("/query", response_model=QueryResponse)
def api_query(req: QueryRequest) -> QueryResponse:
"""Retrieve relevant chunks for a prompt."""

View File

@@ -21,6 +21,7 @@ class Settings(BaseSettings):
log_dir: Path = Path("./logs")
backup_dir: Path = Path("./backups")
run_dir: Path = Path("./run")
project_registry_path: Path = Path("./config/project-registry.json")
host: str = "127.0.0.1"
port: int = 8100
@@ -91,6 +92,10 @@ class Settings(BaseSettings):
return self._resolve_path(self.resolved_data_dir.parent / "run")
return self._resolve_path(self.run_dir)
@property
def resolved_project_registry_path(self) -> Path:
return self._resolve_path(self.project_registry_path)
@property
def machine_dirs(self) -> list[Path]:
return [

View File

@@ -0,0 +1 @@
"""Project registry and source refresh helpers."""

View File

@@ -0,0 +1,152 @@
"""Registered project source metadata and refresh helpers."""
from __future__ import annotations
import json
from dataclasses import asdict, dataclass
from pathlib import Path
import atocore.config as _config
from atocore.ingestion.pipeline import ingest_folder
@dataclass(frozen=True)
class ProjectSourceRef:
source: str
subpath: str
label: str = ""
@dataclass(frozen=True)
class RegisteredProject:
project_id: str
aliases: tuple[str, ...]
description: str
ingest_roots: tuple[ProjectSourceRef, ...]
def load_project_registry() -> list[RegisteredProject]:
"""Load project registry entries from JSON config."""
registry_path = _config.settings.resolved_project_registry_path
if not registry_path.exists():
return []
payload = json.loads(registry_path.read_text(encoding="utf-8"))
entries = payload.get("projects", [])
projects: list[RegisteredProject] = []
for entry in entries:
project_id = str(entry["id"]).strip()
aliases = tuple(
alias.strip()
for alias in entry.get("aliases", [])
if isinstance(alias, str) and alias.strip()
)
description = str(entry.get("description", "")).strip()
ingest_roots = tuple(
ProjectSourceRef(
source=str(root["source"]).strip(),
subpath=str(root["subpath"]).strip(),
label=str(root.get("label", "")).strip(),
)
for root in entry.get("ingest_roots", [])
if str(root.get("source", "")).strip()
and str(root.get("subpath", "")).strip()
)
projects.append(
RegisteredProject(
project_id=project_id,
aliases=aliases,
description=description,
ingest_roots=ingest_roots,
)
)
return projects
def list_registered_projects() -> list[dict]:
"""Return registry entries with resolved source readiness."""
return [_project_to_dict(project) for project in load_project_registry()]
def get_registered_project(project_name: str) -> RegisteredProject | None:
"""Resolve a registry entry by id or alias."""
needle = project_name.strip().lower()
if not needle:
return None
for project in load_project_registry():
candidates = {project.project_id.lower(), *(alias.lower() for alias in project.aliases)}
if needle in candidates:
return project
return None
def refresh_registered_project(project_name: str, purge_deleted: bool = False) -> dict:
"""Ingest all configured source roots for a registered project."""
project = get_registered_project(project_name)
if project is None:
raise ValueError(f"Unknown project: {project_name}")
roots = []
for source_ref in project.ingest_roots:
resolved = _resolve_ingest_root(source_ref)
root_result = {
"source": source_ref.source,
"subpath": source_ref.subpath,
"label": source_ref.label,
"path": str(resolved),
}
if not resolved.exists():
roots.append({**root_result, "status": "missing"})
continue
if not resolved.is_dir():
roots.append({**root_result, "status": "not_directory"})
continue
roots.append(
{
**root_result,
"status": "ingested",
"results": ingest_folder(resolved, purge_deleted=purge_deleted),
}
)
return {
"project": project.project_id,
"aliases": list(project.aliases),
"description": project.description,
"purge_deleted": purge_deleted,
"roots": roots,
}
def _project_to_dict(project: RegisteredProject) -> dict:
return {
"id": project.project_id,
"aliases": list(project.aliases),
"description": project.description,
"ingest_roots": [
{
**asdict(source_ref),
"path": str(_resolve_ingest_root(source_ref)),
"exists": _resolve_ingest_root(source_ref).exists(),
"is_dir": _resolve_ingest_root(source_ref).is_dir(),
}
for source_ref in project.ingest_roots
],
}
def _resolve_ingest_root(source_ref: ProjectSourceRef) -> Path:
base_map = {
"vault": _config.settings.resolved_vault_source_dir,
"drive": _config.settings.resolved_drive_source_dir,
}
try:
base_dir = base_map[source_ref.source]
except KeyError as exc:
raise ValueError(f"Unsupported source root: {source_ref.source}") from exc
return (base_dir / source_ref.subpath).resolve(strict=False)

View File

@@ -46,3 +46,107 @@ def test_health_endpoint_exposes_machine_paths_and_source_readiness(tmp_data_dir
assert body["sources_ready"] is True
assert "db_path" in body["machine_paths"]
assert "run_dir" in body["machine_paths"]
def test_projects_endpoint_reports_registered_projects(tmp_data_dir, monkeypatch):
vault_dir = tmp_data_dir / "vault-source"
drive_dir = tmp_data_dir / "drive-source"
config_dir = tmp_data_dir / "config"
project_dir = vault_dir / "incoming" / "projects" / "p04-gigabit"
project_dir.mkdir(parents=True)
drive_dir.mkdir()
config_dir.mkdir()
registry_path = config_dir / "project-registry.json"
registry_path.write_text(
"""
{
"projects": [
{
"id": "p04-gigabit",
"aliases": ["p04"],
"description": "P04 docs",
"ingest_roots": [
{"source": "vault", "subpath": "incoming/projects/p04-gigabit"}
]
}
]
}
""".strip(),
encoding="utf-8",
)
monkeypatch.setenv("ATOCORE_VAULT_SOURCE_DIR", str(vault_dir))
monkeypatch.setenv("ATOCORE_DRIVE_SOURCE_DIR", str(drive_dir))
monkeypatch.setenv("ATOCORE_PROJECT_REGISTRY_PATH", str(registry_path))
config.settings = config.Settings()
client = TestClient(app)
response = client.get("/projects")
assert response.status_code == 200
body = response.json()
assert body["projects"][0]["id"] == "p04-gigabit"
assert body["projects"][0]["ingest_roots"][0]["exists"] is True
def test_project_refresh_endpoint_uses_registered_roots(tmp_data_dir, monkeypatch):
vault_dir = tmp_data_dir / "vault-source"
drive_dir = tmp_data_dir / "drive-source"
config_dir = tmp_data_dir / "config"
project_dir = vault_dir / "incoming" / "projects" / "p05-interferometer"
project_dir.mkdir(parents=True)
drive_dir.mkdir()
config_dir.mkdir()
registry_path = config_dir / "project-registry.json"
registry_path.write_text(
"""
{
"projects": [
{
"id": "p05-interferometer",
"aliases": ["p05"],
"description": "P05 docs",
"ingest_roots": [
{"source": "vault", "subpath": "incoming/projects/p05-interferometer"}
]
}
]
}
""".strip(),
encoding="utf-8",
)
calls = []
def fake_refresh_registered_project(project_name, purge_deleted=False):
calls.append((project_name, purge_deleted))
return {
"project": "p05-interferometer",
"aliases": ["p05"],
"description": "P05 docs",
"purge_deleted": purge_deleted,
"roots": [
{
"source": "vault",
"subpath": "incoming/projects/p05-interferometer",
"path": str(project_dir),
"status": "ingested",
"results": [],
}
],
}
monkeypatch.setenv("ATOCORE_VAULT_SOURCE_DIR", str(vault_dir))
monkeypatch.setenv("ATOCORE_DRIVE_SOURCE_DIR", str(drive_dir))
monkeypatch.setenv("ATOCORE_PROJECT_REGISTRY_PATH", str(registry_path))
config.settings = config.Settings()
monkeypatch.setattr("atocore.api.routes.refresh_registered_project", fake_refresh_registered_project)
client = TestClient(app)
response = client.post("/projects/p05/refresh")
assert response.status_code == 200
assert calls == [("p05", False)]
assert response.json()["project"] == "p05-interferometer"

View File

@@ -12,6 +12,9 @@ def test_settings_resolve_canonical_directories(tmp_path, monkeypatch):
monkeypatch.setenv("ATOCORE_DRIVE_SOURCE_DIR", str(tmp_path / "drive-source"))
monkeypatch.setenv("ATOCORE_LOG_DIR", str(tmp_path / "logs"))
monkeypatch.setenv("ATOCORE_BACKUP_DIR", str(tmp_path / "backups"))
monkeypatch.setenv(
"ATOCORE_PROJECT_REGISTRY_PATH", str(tmp_path / "config" / "project-registry.json")
)
settings = config.Settings()
@@ -24,6 +27,9 @@ def test_settings_resolve_canonical_directories(tmp_path, monkeypatch):
assert settings.resolved_log_dir == (tmp_path / "logs").resolve()
assert settings.resolved_backup_dir == (tmp_path / "backups").resolve()
assert settings.resolved_run_dir == (tmp_path / "run").resolve()
assert settings.resolved_project_registry_path == (
tmp_path / "config" / "project-registry.json"
).resolve()
def test_settings_keep_legacy_db_path_when_present(tmp_path, monkeypatch):

View File

@@ -0,0 +1,152 @@
"""Tests for project registry resolution and refresh behavior."""
import json
import atocore.config as config
from atocore.projects.registry import (
get_registered_project,
list_registered_projects,
refresh_registered_project,
)
def test_project_registry_lists_projects_with_resolved_roots(tmp_path, monkeypatch):
vault_dir = tmp_path / "vault"
drive_dir = tmp_path / "drive"
config_dir = tmp_path / "config"
vault_dir.mkdir()
drive_dir.mkdir()
config_dir.mkdir()
(vault_dir / "incoming" / "projects" / "p04-gigabit").mkdir(parents=True)
registry_path = config_dir / "project-registry.json"
registry_path.write_text(
json.dumps(
{
"projects": [
{
"id": "p04-gigabit",
"aliases": ["p04", "gigabit"],
"description": "P04 docs",
"ingest_roots": [
{
"source": "vault",
"subpath": "incoming/projects/p04-gigabit",
"label": "P04 staged docs",
}
],
}
]
}
),
encoding="utf-8",
)
monkeypatch.setenv("ATOCORE_VAULT_SOURCE_DIR", str(vault_dir))
monkeypatch.setenv("ATOCORE_DRIVE_SOURCE_DIR", str(drive_dir))
monkeypatch.setenv("ATOCORE_PROJECT_REGISTRY_PATH", str(registry_path))
original_settings = config.settings
try:
config.settings = config.Settings()
projects = list_registered_projects()
finally:
config.settings = original_settings
assert len(projects) == 1
assert projects[0]["id"] == "p04-gigabit"
assert projects[0]["ingest_roots"][0]["exists"] is True
def test_project_registry_resolves_alias(tmp_path, monkeypatch):
vault_dir = tmp_path / "vault"
drive_dir = tmp_path / "drive"
config_dir = tmp_path / "config"
vault_dir.mkdir()
drive_dir.mkdir()
config_dir.mkdir()
registry_path = config_dir / "project-registry.json"
registry_path.write_text(
json.dumps(
{
"projects": [
{
"id": "p05-interferometer",
"aliases": ["p05", "interferometer"],
"ingest_roots": [
{"source": "vault", "subpath": "incoming/projects/p05-interferometer"}
],
}
]
}
),
encoding="utf-8",
)
monkeypatch.setenv("ATOCORE_VAULT_SOURCE_DIR", str(vault_dir))
monkeypatch.setenv("ATOCORE_DRIVE_SOURCE_DIR", str(drive_dir))
monkeypatch.setenv("ATOCORE_PROJECT_REGISTRY_PATH", str(registry_path))
original_settings = config.settings
try:
config.settings = config.Settings()
project = get_registered_project("p05")
finally:
config.settings = original_settings
assert project is not None
assert project.project_id == "p05-interferometer"
def test_refresh_registered_project_ingests_registered_roots(tmp_path, monkeypatch):
vault_dir = tmp_path / "vault"
drive_dir = tmp_path / "drive"
config_dir = tmp_path / "config"
project_dir = vault_dir / "incoming" / "projects" / "p06-polisher"
project_dir.mkdir(parents=True)
drive_dir.mkdir()
config_dir.mkdir()
registry_path = config_dir / "project-registry.json"
registry_path.write_text(
json.dumps(
{
"projects": [
{
"id": "p06-polisher",
"aliases": ["p06", "polisher"],
"description": "P06 docs",
"ingest_roots": [
{"source": "vault", "subpath": "incoming/projects/p06-polisher"}
],
}
]
}
),
encoding="utf-8",
)
calls = []
def fake_ingest_folder(path, purge_deleted=True):
calls.append((str(path), purge_deleted))
return [{"file": str(path / "README.md"), "status": "ingested"}]
monkeypatch.setenv("ATOCORE_VAULT_SOURCE_DIR", str(vault_dir))
monkeypatch.setenv("ATOCORE_DRIVE_SOURCE_DIR", str(drive_dir))
monkeypatch.setenv("ATOCORE_PROJECT_REGISTRY_PATH", str(registry_path))
original_settings = config.settings
try:
config.settings = config.Settings()
monkeypatch.setattr("atocore.projects.registry.ingest_folder", fake_ingest_folder)
result = refresh_registered_project("polisher")
finally:
config.settings = original_settings
assert result["project"] == "p06-polisher"
assert len(calls) == 1
assert calls[0][0].endswith("p06-polisher")
assert calls[0][1] is False
assert result["roots"][0]["status"] == "ingested"