Files
Atomizer/atomizer-dashboard/backend/api/routes/claude.py
Anto01 ac5e9b4054 docs: Comprehensive documentation update for Dashboard V3 and Canvas
## Documentation Updates
- DASHBOARD.md: Updated to V3.0 with Canvas V3 features, file browser, introspection
- DASHBOARD_IMPLEMENTATION_STATUS.md: Marked Canvas V3 features as COMPLETE
- CANVAS.md: New comprehensive guide for Canvas Builder V3 with all features
- CLAUDE.md: Added dashboard quick reference and Canvas V3 features

## Canvas V3 Features Documented
- File Browser: Browse studies directory for model files
- Model Introspection: Auto-discover expressions, solver type, dependencies
- One-Click Add: Add expressions as design variables instantly
- Claude Bug Fixes: WebSocket reconnection, SQL errors resolved
- Health Check: /api/health endpoint for monitoring

## Backend Services
- NX introspection service with expression discovery
- File browser API with type filtering
- Claude session management improvements
- Context builder enhancements

## Frontend Components
- FileBrowser: Modal for file selection with search
- IntrospectionPanel: View discovered model information
- ExpressionSelector: Dropdown for design variable configuration
- Improved chat hooks with reconnection logic

## Plan Documents
- Added RALPH_LOOP_CANVAS_V2/V3 implementation records
- Added ATOMIZER_DASHBOARD_V2_MASTER_PLAN
- Added investigation and sync documentation

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2026-01-16 20:48:58 -05:00

442 lines
13 KiB
Python

"""
Claude Chat API Routes
Provides endpoints for AI-powered chat within the Atomizer dashboard.
Two approaches:
1. Session-based: Persistent sessions with MCP tools (new)
2. Legacy: Stateless CLI calls (backwards compatible)
"""
from fastapi import APIRouter, HTTPException, WebSocket, WebSocketDisconnect
from fastapi.responses import StreamingResponse
from pydantic import BaseModel
from typing import Optional, List, Dict, Any, Literal
import json
router = APIRouter()
# ========== Request/Response Models ==========
class ChatMessage(BaseModel):
role: str # "user" or "assistant"
content: str
class ChatRequest(BaseModel):
message: str
study_id: Optional[str] = None
conversation_history: Optional[List[Dict[str, Any]]] = None
class ChatResponse(BaseModel):
response: str
tool_calls: Optional[List[Dict[str, Any]]] = None
study_id: Optional[str] = None
class CreateSessionRequest(BaseModel):
mode: Literal["user", "power"] = "user"
study_id: Optional[str] = None
resume_session_id: Optional[str] = None
class SwitchModeRequest(BaseModel):
mode: Literal["user", "power"]
# Store active conversations (legacy, in production use database)
_conversations: Dict[str, List[Dict[str, Any]]] = {}
# ========== Session Manager Access ==========
_session_manager = None
def get_session_manager():
"""Lazy import to avoid circular dependencies"""
global _session_manager
if _session_manager is None:
from api.services.session_manager import SessionManager
_session_manager = SessionManager()
return _session_manager
# ========== NEW: Session-based Endpoints ==========
@router.post("/sessions")
async def create_session(request: CreateSessionRequest):
"""
Create or resume a Claude session with MCP tools.
Args:
mode: "user" for safe operations, "power" for full access
study_id: Optional study to provide context
resume_session_id: Optional session ID to resume
Returns:
Session info including session_id, mode, study_id
"""
try:
manager = get_session_manager()
session = await manager.create_session(
mode=request.mode,
study_id=request.study_id,
resume_session_id=request.resume_session_id,
)
return {
"session_id": session.session_id,
"mode": session.mode,
"study_id": session.study_id,
"is_alive": session.is_alive(),
}
except Exception as e:
import traceback
error_msg = f"{type(e).__name__}: {str(e) or 'No message'}"
traceback.print_exc()
raise HTTPException(status_code=500, detail=error_msg)
@router.get("/sessions/{session_id}")
async def get_session(session_id: str):
"""Get session info"""
manager = get_session_manager()
info = manager.get_session_info(session_id)
if not info:
raise HTTPException(status_code=404, detail="Session not found")
return info
@router.post("/sessions/{session_id}/mode")
async def switch_session_mode(session_id: str, request: SwitchModeRequest):
"""
Switch session mode (requires session restart).
Args:
session_id: Session to update
mode: New mode ("user" or "power")
"""
try:
manager = get_session_manager()
session = await manager.switch_mode(session_id, request.mode)
return {
"session_id": session.session_id,
"mode": session.mode,
"message": f"Mode switched to {request.mode}",
}
except ValueError as e:
raise HTTPException(status_code=404, detail=str(e))
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))
@router.post("/sessions/{session_id}/study")
async def set_session_study(session_id: str, study_id: str):
"""Update study context for a session"""
try:
manager = get_session_manager()
await manager.set_study_context(session_id, study_id)
return {"message": f"Study context updated to {study_id}"}
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))
@router.websocket("/sessions/{session_id}/ws")
async def session_websocket(websocket: WebSocket, session_id: str):
"""
WebSocket for real-time chat with a session.
Message formats (client -> server):
{"type": "message", "content": "user message", "canvas_state": {...}}
{"type": "set_study", "study_id": "study_name"}
{"type": "set_canvas", "canvas_state": {...}}
{"type": "ping"}
Message formats (server -> client):
{"type": "text", "content": "..."}
{"type": "tool_call", "tool": {...}}
{"type": "tool_result", "result": {...}}
{"type": "done", "tool_calls": [...]}
{"type": "error", "message": "..."}
{"type": "pong"}
{"type": "context_updated", "study_id": "..."}
{"type": "canvas_updated", "canvas_state": {...}}
"""
await websocket.accept()
manager = get_session_manager()
session = manager.get_session(session_id)
if not session:
await websocket.send_json({"type": "error", "message": "Session not found"})
await websocket.close()
return
# Track current canvas state for this connection
current_canvas_state: Dict[str, Any] = {}
try:
while True:
data = await websocket.receive_json()
if data.get("type") == "message":
content = data.get("content", "")
if not content:
continue
# Get canvas state from message or use stored state
canvas_state = data.get("canvas_state") or current_canvas_state
async for chunk in manager.send_message(
session_id,
content,
canvas_state=canvas_state if canvas_state else None,
):
await websocket.send_json(chunk)
elif data.get("type") == "set_study":
study_id = data.get("study_id")
if study_id:
await manager.set_study_context(session_id, study_id)
await websocket.send_json({
"type": "context_updated",
"study_id": study_id,
})
elif data.get("type") == "set_canvas":
# Update canvas state for this connection
current_canvas_state = data.get("canvas_state", {})
await websocket.send_json({
"type": "canvas_updated",
"canvas_state": current_canvas_state,
})
elif data.get("type") == "ping":
await websocket.send_json({"type": "pong"})
except WebSocketDisconnect:
pass
except Exception as e:
try:
await websocket.send_json({"type": "error", "message": str(e)})
except:
pass
# ========== LEGACY: Stateless Endpoints (backwards compatible) ==========
@router.get("/status")
async def get_claude_status():
"""
Check if Claude CLI is available
Returns:
JSON with CLI status
"""
import shutil
claude_available = shutil.which("claude") is not None
return {
"available": claude_available,
"message": "Claude CLI is available" if claude_available else "Claude CLI not found in PATH",
"mode": "cli" # Indicate we're using CLI mode
}
@router.post("/chat", response_model=ChatResponse)
async def chat_with_claude(request: ChatRequest):
"""
Send a message to Claude via CLI with Atomizer context
Args:
request: ChatRequest with message, optional study_id, and conversation history
Returns:
ChatResponse with Claude's response
"""
try:
from api.services.claude_cli_agent import AtomizerCLIAgent
# Create agent with study context
agent = AtomizerCLIAgent(study_id=request.study_id)
# Convert conversation history format if needed
history = []
if request.conversation_history:
for msg in request.conversation_history:
if isinstance(msg.get('content'), str):
history.append(msg)
# Get response
result = await agent.chat(request.message, history)
return ChatResponse(
response=result["response"],
tool_calls=result.get("tool_calls"),
study_id=request.study_id
)
except Exception as e:
raise HTTPException(
status_code=500,
detail=f"Chat error: {str(e)}"
)
@router.post("/chat/stream")
async def chat_stream(request: ChatRequest):
"""
Stream a response from Claude CLI token by token
Args:
request: ChatRequest with message and optional context
Returns:
StreamingResponse with text/event-stream
"""
async def generate():
try:
from api.services.claude_cli_agent import AtomizerCLIAgent
agent = AtomizerCLIAgent(study_id=request.study_id)
# Convert history
history = []
if request.conversation_history:
for msg in request.conversation_history:
if isinstance(msg.get('content'), str):
history.append(msg)
# Stream response
async for token in agent.chat_stream(request.message, history):
yield f"data: {json.dumps({'token': token})}\n\n"
yield f"data: {json.dumps({'done': True})}\n\n"
except Exception as e:
yield f"data: {json.dumps({'error': str(e)})}\n\n"
return StreamingResponse(
generate(),
media_type="text/event-stream",
headers={
"Cache-Control": "no-cache",
"Connection": "keep-alive",
}
)
@router.websocket("/chat/ws")
async def websocket_chat(websocket: WebSocket):
"""
WebSocket endpoint for real-time chat
Message format (client -> server):
{"type": "message", "content": "user message", "study_id": "optional"}
Message format (server -> client):
{"type": "token", "content": "..."}
{"type": "done", "tool_calls": [...]}
{"type": "error", "message": "..."}
"""
await websocket.accept()
conversation_history = []
try:
from api.services.claude_cli_agent import AtomizerCLIAgent
while True:
# Receive message from client
data = await websocket.receive_json()
if data.get("type") == "message":
content = data.get("content", "")
study_id = data.get("study_id")
if not content:
continue
# Create agent
agent = AtomizerCLIAgent(study_id=study_id)
try:
# Use non-streaming chat
result = await agent.chat(content, conversation_history)
# Send response
await websocket.send_json({
"type": "response",
"content": result["response"],
"tool_calls": result.get("tool_calls", [])
})
# Update history
conversation_history.append({"role": "user", "content": content})
conversation_history.append({"role": "assistant", "content": result["response"]})
except Exception as e:
await websocket.send_json({
"type": "error",
"message": str(e)
})
elif data.get("type") == "clear":
# Clear conversation history
conversation_history = []
await websocket.send_json({"type": "cleared"})
except WebSocketDisconnect:
pass
except Exception as e:
try:
await websocket.send_json({
"type": "error",
"message": str(e)
})
except:
pass
@router.get("/suggestions")
async def get_chat_suggestions(study_id: Optional[str] = None):
"""
Get contextual chat suggestions based on current study
Args:
study_id: Optional study to get suggestions for
Returns:
List of suggested prompts
"""
base_suggestions = [
"What's the status of my optimization?",
"Show me the best designs found",
"Compare the top 3 trials",
"What parameters have the most impact?",
"Explain the convergence behavior"
]
if study_id:
# Add study-specific suggestions
return {
"suggestions": [
f"Summarize the {study_id} study",
"What's the current best objective value?",
"Are there any failed trials? Why?",
"Show parameter sensitivity analysis",
"What should I try next to improve results?"
] + base_suggestions[:3]
}
return {
"suggestions": [
"List all available studies",
"Help me create a new study",
"What can you help me with?"
] + base_suggestions[:3]
}