Files
ATOCore/tests/test_memory.py
Anto01 88f2f7c4e1 feat: Phase 4 V1 — Robustness Hardening
Adds the observability + safety layer that turns AtoCore from
"works until something silently breaks" into "every mutation is
traceable, drift is detected, failures raise alerts."

1. Audit log (memory_audit table):
   - New table with id, memory_id, action, actor, before/after JSON,
     note, timestamp; 3 indexes for memory_id/timestamp/action
   - _audit_memory() helper called from every mutation:
     create_memory, update_memory, promote_memory,
     reject_candidate_memory, invalidate_memory, supersede_memory,
     reinforce_memory, auto_promote_reinforced, expire_stale_candidates
   - Action verb auto-selected: promoted/rejected/invalidated/
     superseded/updated based on state transition
   - "actor" threaded through: api-http, human-triage, phase10-auto-
     promote, candidate-expiry, reinforcement, etc.
   - Fail-open: audit write failure logs but never breaks the mutation
   - GET /memory/{id}/audit: full history for one memory
   - GET /admin/audit/recent: last 50 mutations across the system

2. Alerts framework (src/atocore/observability/alerts.py):
   - emit_alert(severity, title, message, context) fans out to:
     - structlog logger (always)
     - ~/atocore-logs/alerts.log append (configurable via
       ATOCORE_ALERT_LOG)
     - project_state atocore/alert/last_{severity} (dashboard surface)
     - ATOCORE_ALERT_WEBHOOK POST if set (auto-detects Discord webhook
       format for nice embeds; generic JSON otherwise)
   - Every sink fail-open — one failure doesn't prevent the others
   - Pipeline alert step in nightly cron: harness < 85% → warning;
     candidate queue > 200 → warning

3. Integrity checks (scripts/integrity_check.py):
   - Nightly scan for drift:
     - Memories → missing source_chunk_id references
     - Duplicate active memories (same type+content+project)
     - project_state → missing projects
     - Orphaned source_chunks (no parent document)
   - Results persisted to atocore/status/integrity_check_result
   - Any finding emits a warning alert
   - Added as Step G in deploy/dalidou/batch-extract.sh nightly cron

4. Dashboard surfaces it all:
   - integrity (findings + details)
   - alerts (last info/warning/critical per severity)
   - recent_audit (last 10 mutations with actor + action + preview)

Tests: 308 → 317 (9 new):
  - test_audit_create_logs_entry
  - test_audit_promote_logs_entry
  - test_audit_reject_logs_entry
  - test_audit_update_captures_before_after
  - test_audit_reinforce_logs_entry
  - test_recent_audit_returns_cross_memory_entries
  - test_emit_alert_writes_log_file
  - test_emit_alert_invalid_severity_falls_back_to_info
  - test_emit_alert_fails_open_on_log_write_error

Deferred: formal migration framework with rollback (current additive
pattern is fine for V1); memory detail wiki page with audit view
(quick follow-up).

To enable Discord alerts: set ATOCORE_ALERT_WEBHOOK to a Discord
webhook URL in Dalidou's environment. Default = log-only.

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-04-16 21:54:10 -04:00

448 lines
16 KiB
Python

"""Tests for Memory Core."""
import os
import tempfile
import pytest
import atocore.config as _config
from atocore.models.database import init_db
@pytest.fixture(autouse=True)
def isolated_db():
"""Give each test a completely isolated database."""
tmpdir = tempfile.mkdtemp()
os.environ["ATOCORE_DATA_DIR"] = tmpdir
# Replace the global settings so all modules see the new data_dir
_config.settings = _config.Settings()
# Also reset any module-level references to the old settings
import atocore.models.database
# database.py now uses _config.settings dynamically, so no patch needed
init_db()
yield tmpdir
def test_create_memory(isolated_db):
from atocore.memory.service import create_memory
mem = create_memory("identity", "User is a mechanical engineer specializing in optics")
assert mem.memory_type == "identity"
assert mem.status == "active"
assert mem.confidence == 1.0
def test_create_memory_invalid_type(isolated_db):
from atocore.memory.service import create_memory
with pytest.raises(ValueError, match="Invalid memory type"):
create_memory("invalid_type", "some content")
def test_create_memory_dedup(isolated_db):
from atocore.memory.service import create_memory
m1 = create_memory("identity", "User is an engineer")
m2 = create_memory("identity", "User is an engineer")
assert m1.id == m2.id
def test_create_memory_dedup_is_project_scoped(isolated_db):
from atocore.memory.service import create_memory
m1 = create_memory("project", "Uses SQLite for local state", project="atocore")
m2 = create_memory("project", "Uses SQLite for local state", project="openclaw")
assert m1.id != m2.id
def test_project_is_persisted_and_filterable(isolated_db):
from atocore.memory.service import create_memory, get_memories
create_memory("project", "Uses SQLite for local state", project="atocore")
create_memory("project", "Uses Postgres in production", project="openclaw")
atocore_memories = get_memories(memory_type="project", project="atocore")
assert len(atocore_memories) == 1
assert atocore_memories[0].project == "atocore"
def test_get_memories_all(isolated_db):
from atocore.memory.service import create_memory, get_memories
create_memory("identity", "User is an engineer")
create_memory("preference", "Prefers Python with type hints")
create_memory("knowledge", "Zerodur has near-zero thermal expansion")
mems = get_memories()
assert len(mems) == 3
def test_get_memories_by_type(isolated_db):
from atocore.memory.service import create_memory, get_memories
create_memory("identity", "User is an engineer")
create_memory("preference", "Prefers concise code")
create_memory("preference", "Uses FastAPI for APIs")
mems = get_memories(memory_type="preference")
assert len(mems) == 2
def test_get_memories_active_only(isolated_db):
from atocore.memory.service import create_memory, get_memories, invalidate_memory
m = create_memory("knowledge", "Fact about optics")
invalidate_memory(m.id)
assert len(get_memories(active_only=True)) == 0
assert len(get_memories(active_only=False)) == 1
def test_get_memories_min_confidence(isolated_db):
from atocore.memory.service import create_memory, get_memories
create_memory("knowledge", "High confidence fact", confidence=0.9)
create_memory("knowledge", "Low confidence fact", confidence=0.3)
high = get_memories(min_confidence=0.5)
assert len(high) == 1
assert high[0].confidence == 0.9
def test_update_memory(isolated_db):
from atocore.memory.service import create_memory, get_memories, update_memory
mem = create_memory("knowledge", "Initial fact")
update_memory(mem.id, content="Updated fact", confidence=0.8)
mems = get_memories()
assert len(mems) == 1
assert mems[0].content == "Updated fact"
assert mems[0].confidence == 0.8
def test_update_memory_rejects_duplicate_active_memory(isolated_db):
from atocore.memory.service import create_memory, update_memory
import pytest
first = create_memory("knowledge", "Canonical fact", project="atocore")
second = create_memory("knowledge", "Different fact", project="atocore")
with pytest.raises(ValueError, match="duplicate active memory"):
update_memory(second.id, content="Canonical fact")
def test_create_memory_validates_confidence(isolated_db):
from atocore.memory.service import create_memory
import pytest
with pytest.raises(ValueError, match="Confidence must be between 0.0 and 1.0"):
create_memory("knowledge", "Out of range", confidence=1.5)
def test_invalidate_memory(isolated_db):
from atocore.memory.service import create_memory, get_memories, invalidate_memory
mem = create_memory("knowledge", "Wrong fact")
invalidate_memory(mem.id)
assert len(get_memories(active_only=True)) == 0
def test_supersede_memory(isolated_db):
from atocore.memory.service import create_memory, get_memories, supersede_memory
mem = create_memory("knowledge", "Old fact")
supersede_memory(mem.id)
mems = get_memories(active_only=False)
assert len(mems) == 1
assert mems[0].status == "superseded"
def test_memories_for_context(isolated_db):
from atocore.memory.service import create_memory, get_memories_for_context
create_memory("identity", "User is a senior mechanical engineer")
create_memory("preference", "Prefers Python with type hints")
text, chars = get_memories_for_context(memory_types=["identity", "preference"], budget=500)
assert "--- AtoCore Memory ---" in text
assert "[identity]" in text
assert "[preference]" in text
assert chars > 0
def test_memories_for_context_reserves_room_for_each_type(isolated_db):
from atocore.memory.service import create_memory, get_memories_for_context
create_memory("identity", "Identity entry that is intentionally long so it could consume the whole budget on its own")
create_memory("preference", "Preference entry that should still appear")
text, _ = get_memories_for_context(memory_types=["identity", "preference"], budget=120)
assert "[preference]" in text
def test_memories_for_context_respects_actual_serialized_budget(isolated_db):
from atocore.memory.service import create_memory, get_memories_for_context
create_memory("identity", "Identity text that should fit the wrapper-aware memory budget calculation")
create_memory("preference", "Preference text that should also fit")
text, chars = get_memories_for_context(memory_types=["identity", "preference"], budget=140)
assert chars == len(text)
assert chars <= 140
def test_memories_for_context_empty(isolated_db):
from atocore.memory.service import get_memories_for_context
text, chars = get_memories_for_context()
assert text == ""
assert chars == 0
# --- Phase 10: auto-promotion + candidate expiry ---
def _get_memory_by_id(memory_id):
"""Helper: fetch a single memory by ID."""
from atocore.models.database import get_connection
with get_connection() as conn:
row = conn.execute("SELECT * FROM memories WHERE id = ?", (memory_id,)).fetchone()
return dict(row) if row else None
def test_auto_promote_reinforced_basic(isolated_db):
from atocore.memory.service import (
auto_promote_reinforced,
create_memory,
reinforce_memory,
)
mem_obj = create_memory("knowledge", "Zerodur has near-zero CTE", status="candidate", confidence=0.7)
mid = mem_obj.id
# reinforce_memory only touches active memories, so we need to
# promote first to reinforce, then demote back to candidate —
# OR just bump reference_count + last_referenced_at directly
from atocore.models.database import get_connection
from datetime import datetime, timezone
now = datetime.now(timezone.utc).strftime("%Y-%m-%d %H:%M:%S")
with get_connection() as conn:
conn.execute(
"UPDATE memories SET reference_count = 3, last_referenced_at = ? WHERE id = ?",
(now, mid),
)
promoted = auto_promote_reinforced(min_reference_count=3, min_confidence=0.7)
assert mid in promoted
mem = _get_memory_by_id(mid)
assert mem["status"] == "active"
def test_auto_promote_reinforced_ignores_low_refs(isolated_db):
from atocore.memory.service import auto_promote_reinforced, create_memory
from atocore.models.database import get_connection
from datetime import datetime, timezone
mem_obj = create_memory("knowledge", "Some knowledge", status="candidate", confidence=0.7)
mid = mem_obj.id
now = datetime.now(timezone.utc).strftime("%Y-%m-%d %H:%M:%S")
with get_connection() as conn:
conn.execute(
"UPDATE memories SET reference_count = 1, last_referenced_at = ? WHERE id = ?",
(now, mid),
)
promoted = auto_promote_reinforced(min_reference_count=3, min_confidence=0.7)
assert mid not in promoted
mem = _get_memory_by_id(mid)
assert mem["status"] == "candidate"
def test_expire_stale_candidates(isolated_db):
from atocore.memory.service import create_memory, expire_stale_candidates
from atocore.models.database import get_connection
mem_obj = create_memory("knowledge", "Old unreferenced fact", status="candidate")
mid = mem_obj.id
with get_connection() as conn:
conn.execute(
"UPDATE memories SET created_at = datetime('now', '-30 days') WHERE id = ?",
(mid,),
)
expired = expire_stale_candidates(max_age_days=14)
assert mid in expired
mem = _get_memory_by_id(mid)
assert mem["status"] == "invalid"
# --- Phase 4: memory_audit log ---
def test_audit_create_logs_entry(isolated_db):
from atocore.memory.service import create_memory, get_memory_audit
mem = create_memory("knowledge", "test content for audit", actor="test-harness")
audit = get_memory_audit(mem.id)
assert len(audit) >= 1
latest = audit[0]
assert latest["action"] == "created"
assert latest["actor"] == "test-harness"
assert latest["after"]["content"] == "test content for audit"
def test_audit_promote_logs_entry(isolated_db):
from atocore.memory.service import create_memory, get_memory_audit, promote_memory
mem = create_memory("knowledge", "candidate for promote", status="candidate")
promote_memory(mem.id, actor="test-triage")
audit = get_memory_audit(mem.id)
actions = [a["action"] for a in audit]
assert "promoted" in actions
promote_entry = next(a for a in audit if a["action"] == "promoted")
assert promote_entry["actor"] == "test-triage"
assert promote_entry["before"]["status"] == "candidate"
assert promote_entry["after"]["status"] == "active"
def test_audit_reject_logs_entry(isolated_db):
from atocore.memory.service import create_memory, get_memory_audit, reject_candidate_memory
mem = create_memory("knowledge", "candidate for reject", status="candidate")
reject_candidate_memory(mem.id, actor="test-triage", note="stale")
audit = get_memory_audit(mem.id)
actions = [a["action"] for a in audit]
assert "rejected" in actions
reject_entry = next(a for a in audit if a["action"] == "rejected")
assert reject_entry["note"] == "stale"
def test_audit_update_captures_before_after(isolated_db):
from atocore.memory.service import create_memory, get_memory_audit, update_memory
mem = create_memory("knowledge", "original content", confidence=0.5)
update_memory(mem.id, content="updated content", confidence=0.9, actor="human-edit")
audit = get_memory_audit(mem.id)
update_entries = [a for a in audit if a["action"] == "updated"]
assert len(update_entries) >= 1
u = update_entries[0]
assert u["before"]["content"] == "original content"
assert u["after"]["content"] == "updated content"
assert u["before"]["confidence"] == 0.5
assert u["after"]["confidence"] == 0.9
def test_audit_reinforce_logs_entry(isolated_db):
from atocore.memory.service import create_memory, get_memory_audit, reinforce_memory
mem = create_memory("knowledge", "reinforced mem", confidence=0.5)
reinforce_memory(mem.id, confidence_delta=0.02)
audit = get_memory_audit(mem.id)
actions = [a["action"] for a in audit]
assert "reinforced" in actions
def test_recent_audit_returns_cross_memory_entries(isolated_db):
from atocore.memory.service import create_memory, get_recent_audit
m1 = create_memory("knowledge", "mem one content", actor="harness")
m2 = create_memory("knowledge", "mem two content", actor="harness")
recent = get_recent_audit(limit=10)
ids = {e["memory_id"] for e in recent}
assert m1.id in ids and m2.id in ids
# --- Phase 3: domain_tags + valid_until ---
def test_create_memory_with_tags_and_valid_until(isolated_db):
from atocore.memory.service import create_memory
mem = create_memory(
"knowledge",
"CTE gradient dominates WFE at F/1.2",
domain_tags=["optics", "thermal", "materials"],
valid_until="2027-01-01",
)
assert mem.domain_tags == ["optics", "thermal", "materials"]
assert mem.valid_until == "2027-01-01"
def test_create_memory_normalizes_tags(isolated_db):
from atocore.memory.service import create_memory
mem = create_memory(
"knowledge",
"some content here",
domain_tags=[" Optics ", "OPTICS", "Thermal", ""],
)
# Duplicates and empty removed; lowercased; stripped
assert mem.domain_tags == ["optics", "thermal"]
def test_update_memory_sets_tags_and_valid_until(isolated_db):
from atocore.memory.service import create_memory, update_memory
from atocore.models.database import get_connection
mem = create_memory("knowledge", "some content for update test")
assert update_memory(
mem.id,
domain_tags=["controls", "firmware"],
valid_until="2026-12-31",
)
with get_connection() as conn:
row = conn.execute("SELECT domain_tags, valid_until FROM memories WHERE id = ?", (mem.id,)).fetchone()
import json as _json
assert _json.loads(row["domain_tags"]) == ["controls", "firmware"]
assert row["valid_until"] == "2026-12-31"
def test_get_memories_for_context_excludes_expired(isolated_db):
"""Expired active memories must not land in context packs."""
from atocore.memory.service import create_memory, get_memories_for_context
# Active but expired
create_memory(
"knowledge",
"stale snapshot from long ago period",
valid_until="2020-01-01",
confidence=1.0,
)
# Active and valid
create_memory(
"knowledge",
"durable engineering insight stays valid forever",
confidence=1.0,
)
text, _ = get_memories_for_context(memory_types=["knowledge"], budget=600)
assert "durable engineering" in text
assert "stale snapshot" not in text
def test_context_builder_tag_boost_orders_results(isolated_db):
"""Memories with tags matching query should rank higher."""
from atocore.memory.service import create_memory, get_memories_for_context
create_memory("knowledge", "generic content has no obvious overlap with topic", confidence=0.8, domain_tags=[])
create_memory("knowledge", "generic content has no obvious overlap topic here", confidence=0.8, domain_tags=["optics"])
text, _ = get_memories_for_context(
memory_types=["knowledge"],
budget=2000,
query="tell me about optics",
)
# Tagged memory should appear before the untagged one
idx_tagged = text.find("overlap topic here")
idx_untagged = text.find("overlap with topic")
assert idx_tagged != -1
assert idx_untagged != -1
assert idx_tagged < idx_untagged
def test_expire_stale_candidates_keeps_reinforced(isolated_db):
from atocore.memory.service import create_memory, expire_stale_candidates
from atocore.models.database import get_connection
mem_obj = create_memory("knowledge", "Referenced fact", status="candidate")
mid = mem_obj.id
with get_connection() as conn:
conn.execute(
"UPDATE memories SET reference_count = 1, "
"created_at = datetime('now', '-30 days') WHERE id = ?",
(mid,),
)
expired = expire_stale_candidates(max_age_days=14)
assert mid not in expired
mem = _get_memory_by_id(mid)
assert mem["status"] == "candidate"