feat: "Make It Actually Useful" sprint — observability + Phase 10
Pipeline observability: - Retrieval harness runs nightly (Step E in batch-extract.sh) - Pipeline summary persisted to project state after each run (pipeline_last_run, pipeline_summary, retrieval_harness_result) - Dashboard enhanced: interaction total + by_client, pipeline health (last_run, hours_since, harness results, triage stats), dynamic project list from registry Phase 10 — reinforcement-based auto-promotion: - auto_promote_reinforced(): candidates with reference_count >= 3 and confidence >= 0.7 auto-graduate to active - expire_stale_candidates(): candidates unreinforced for 14+ days auto-rejected to prevent unbounded queue growth - Both wired into nightly cron (Step B2) - Batch script: scripts/auto_promote_reinforced.py (--dry-run support) Knowledge seeding: - scripts/seed_project_state.py: 26 curated Trusted Project State entries across p04-gigabit, p05-interferometer, p06-polisher, atomizer-v2, abb-space, atocore (decisions, requirements, facts, contacts, milestones) Tests: 299 → 303 (4 new Phase 10 tests) Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
@@ -34,22 +34,36 @@ export PYTHONPATH="$APP_DIR/src:${PYTHONPATH:-}"
|
||||
log "=== AtoCore batch extraction + triage starting ==="
|
||||
log "URL=$ATOCORE_URL LIMIT=$LIMIT"
|
||||
|
||||
# --- Pipeline stats accumulator ---
|
||||
EXTRACT_OUT=""
|
||||
TRIAGE_OUT=""
|
||||
HARNESS_OUT=""
|
||||
|
||||
# Step A: Extract candidates from recent interactions
|
||||
log "Step A: LLM extraction"
|
||||
python3 "$APP_DIR/scripts/batch_llm_extract_live.py" \
|
||||
EXTRACT_OUT=$(python3 "$APP_DIR/scripts/batch_llm_extract_live.py" \
|
||||
--base-url "$ATOCORE_URL" \
|
||||
--limit "$LIMIT" \
|
||||
2>&1 || {
|
||||
2>&1) || {
|
||||
log "WARN: batch extraction failed (non-blocking)"
|
||||
}
|
||||
echo "$EXTRACT_OUT"
|
||||
|
||||
# Step B: Auto-triage candidates in the queue
|
||||
log "Step B: auto-triage"
|
||||
python3 "$APP_DIR/scripts/auto_triage.py" \
|
||||
TRIAGE_OUT=$(python3 "$APP_DIR/scripts/auto_triage.py" \
|
||||
--base-url "$ATOCORE_URL" \
|
||||
2>&1 || {
|
||||
2>&1) || {
|
||||
log "WARN: auto-triage failed (non-blocking)"
|
||||
}
|
||||
echo "$TRIAGE_OUT"
|
||||
|
||||
# Step B2: Auto-promote reinforced candidates + expire stale ones
|
||||
log "Step B2: auto-promote + expire"
|
||||
python3 "$APP_DIR/scripts/auto_promote_reinforced.py" \
|
||||
2>&1 || {
|
||||
log "WARN: auto-promote/expire failed (non-blocking)"
|
||||
}
|
||||
|
||||
# Step C: Weekly synthesis (Sundays only)
|
||||
if [[ "$(date -u +%u)" == "7" ]]; then
|
||||
@@ -66,4 +80,73 @@ if [[ "$(date -u +%u)" == "7" ]]; then
|
||||
2>&1 || true
|
||||
fi
|
||||
|
||||
# Step E: Retrieval harness (daily)
|
||||
log "Step E: retrieval harness"
|
||||
HARNESS_OUT=$(python3 "$APP_DIR/scripts/retrieval_eval.py" \
|
||||
--json \
|
||||
--base-url "$ATOCORE_URL" \
|
||||
2>&1) || {
|
||||
log "WARN: retrieval harness failed (non-blocking)"
|
||||
}
|
||||
echo "$HARNESS_OUT"
|
||||
|
||||
# Step F: Persist pipeline summary to project state
|
||||
log "Step F: pipeline summary"
|
||||
python3 -c "
|
||||
import json, urllib.request, re, sys
|
||||
|
||||
base = '$ATOCORE_URL'
|
||||
ts = '$TIMESTAMP'
|
||||
|
||||
def post_state(key, value):
|
||||
body = json.dumps({
|
||||
'project': 'atocore', 'category': 'status',
|
||||
'key': key, 'value': value, 'source': 'nightly pipeline',
|
||||
}).encode()
|
||||
req = urllib.request.Request(
|
||||
f'{base}/project/state', data=body,
|
||||
headers={'Content-Type': 'application/json'}, method='POST',
|
||||
)
|
||||
try:
|
||||
urllib.request.urlopen(req, timeout=10)
|
||||
except Exception as e:
|
||||
print(f'WARN: failed to persist {key}: {e}', file=sys.stderr)
|
||||
|
||||
# Parse harness JSON
|
||||
harness = {}
|
||||
try:
|
||||
harness = json.loads('''$HARNESS_OUT''')
|
||||
post_state('retrieval_harness_result', json.dumps({
|
||||
'passed': harness.get('passed', 0),
|
||||
'total': harness.get('total', 0),
|
||||
'failures': [f['name'] for f in harness.get('fixtures', []) if not f.get('ok')],
|
||||
'run_at': ts,
|
||||
}))
|
||||
p, t = harness.get('passed', '?'), harness.get('total', '?')
|
||||
print(f'Harness: {p}/{t}')
|
||||
except Exception:
|
||||
print('WARN: could not parse harness output')
|
||||
|
||||
# Parse triage counts from stdout
|
||||
triage_out = '''$TRIAGE_OUT'''
|
||||
promoted = len(re.findall(r'promoted', triage_out, re.IGNORECASE))
|
||||
rejected = len(re.findall(r'rejected', triage_out, re.IGNORECASE))
|
||||
needs_human = len(re.findall(r'needs.human', triage_out, re.IGNORECASE))
|
||||
|
||||
# Build summary
|
||||
summary = {
|
||||
'run_at': ts,
|
||||
'harness_passed': harness.get('passed', -1),
|
||||
'harness_total': harness.get('total', -1),
|
||||
'triage_promoted': promoted,
|
||||
'triage_rejected': rejected,
|
||||
'triage_needs_human': needs_human,
|
||||
}
|
||||
post_state('pipeline_last_run', ts)
|
||||
post_state('pipeline_summary', json.dumps(summary))
|
||||
print(f'Pipeline summary persisted: {json.dumps(summary)}')
|
||||
" 2>&1 || {
|
||||
log "WARN: pipeline summary persistence failed (non-blocking)"
|
||||
}
|
||||
|
||||
log "=== AtoCore batch extraction + triage complete ==="
|
||||
|
||||
Reference in New Issue
Block a user