Files
Atomizer/atomizer-dashboard/backend/api/routes/claude.py
Anto01 73a7b9d9f1 feat: Add dashboard chat integration and MCP server
Major changes:
- Dashboard: WebSocket-based chat with session management
- Dashboard: New chat components (ChatPane, ChatInput, ModeToggle)
- Dashboard: Enhanced UI with parallel coordinates chart
- MCP Server: New atomizer-tools server for Claude integration
- Extractors: Enhanced Zernike OPD extractor
- Reports: Improved report generator

New studies (configs and scripts only):
- M1 Mirror: Cost reduction campaign studies
- Simple Beam, Simple Bracket, UAV Arm studies

Note: Large iteration data (2_iterations/, best_design_archive/)
excluded via .gitignore - kept on local Gitea only.

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2026-01-13 15:53:55 -05:00

419 lines
12 KiB
Python

"""
Claude Chat API Routes
Provides endpoints for AI-powered chat within the Atomizer dashboard.
Two approaches:
1. Session-based: Persistent sessions with MCP tools (new)
2. Legacy: Stateless CLI calls (backwards compatible)
"""
from fastapi import APIRouter, HTTPException, WebSocket, WebSocketDisconnect
from fastapi.responses import StreamingResponse
from pydantic import BaseModel
from typing import Optional, List, Dict, Any, Literal
import json
router = APIRouter()
# ========== Request/Response Models ==========
class ChatMessage(BaseModel):
role: str # "user" or "assistant"
content: str
class ChatRequest(BaseModel):
message: str
study_id: Optional[str] = None
conversation_history: Optional[List[Dict[str, Any]]] = None
class ChatResponse(BaseModel):
response: str
tool_calls: Optional[List[Dict[str, Any]]] = None
study_id: Optional[str] = None
class CreateSessionRequest(BaseModel):
mode: Literal["user", "power"] = "user"
study_id: Optional[str] = None
resume_session_id: Optional[str] = None
class SwitchModeRequest(BaseModel):
mode: Literal["user", "power"]
# Store active conversations (legacy, in production use database)
_conversations: Dict[str, List[Dict[str, Any]]] = {}
# ========== Session Manager Access ==========
_session_manager = None
def get_session_manager():
"""Lazy import to avoid circular dependencies"""
global _session_manager
if _session_manager is None:
from api.services.session_manager import SessionManager
_session_manager = SessionManager()
return _session_manager
# ========== NEW: Session-based Endpoints ==========
@router.post("/sessions")
async def create_session(request: CreateSessionRequest):
"""
Create or resume a Claude session with MCP tools.
Args:
mode: "user" for safe operations, "power" for full access
study_id: Optional study to provide context
resume_session_id: Optional session ID to resume
Returns:
Session info including session_id, mode, study_id
"""
try:
manager = get_session_manager()
session = await manager.create_session(
mode=request.mode,
study_id=request.study_id,
resume_session_id=request.resume_session_id,
)
return {
"session_id": session.session_id,
"mode": session.mode,
"study_id": session.study_id,
"is_alive": session.is_alive(),
}
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))
@router.get("/sessions/{session_id}")
async def get_session(session_id: str):
"""Get session info"""
manager = get_session_manager()
info = manager.get_session_info(session_id)
if not info:
raise HTTPException(status_code=404, detail="Session not found")
return info
@router.post("/sessions/{session_id}/mode")
async def switch_session_mode(session_id: str, request: SwitchModeRequest):
"""
Switch session mode (requires session restart).
Args:
session_id: Session to update
mode: New mode ("user" or "power")
"""
try:
manager = get_session_manager()
session = await manager.switch_mode(session_id, request.mode)
return {
"session_id": session.session_id,
"mode": session.mode,
"message": f"Mode switched to {request.mode}",
}
except ValueError as e:
raise HTTPException(status_code=404, detail=str(e))
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))
@router.post("/sessions/{session_id}/study")
async def set_session_study(session_id: str, study_id: str):
"""Update study context for a session"""
try:
manager = get_session_manager()
await manager.set_study_context(session_id, study_id)
return {"message": f"Study context updated to {study_id}"}
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))
@router.websocket("/sessions/{session_id}/ws")
async def session_websocket(websocket: WebSocket, session_id: str):
"""
WebSocket for real-time chat with a session.
Message formats (client -> server):
{"type": "message", "content": "user message"}
{"type": "set_study", "study_id": "study_name"}
{"type": "ping"}
Message formats (server -> client):
{"type": "text", "content": "..."}
{"type": "tool_call", "tool": {...}}
{"type": "tool_result", "result": {...}}
{"type": "done", "tool_calls": [...]}
{"type": "error", "message": "..."}
{"type": "pong"}
{"type": "context_updated", "study_id": "..."}
"""
await websocket.accept()
manager = get_session_manager()
session = manager.get_session(session_id)
if not session:
await websocket.send_json({"type": "error", "message": "Session not found"})
await websocket.close()
return
try:
while True:
data = await websocket.receive_json()
if data.get("type") == "message":
content = data.get("content", "")
if not content:
continue
async for chunk in manager.send_message(session_id, content):
await websocket.send_json(chunk)
elif data.get("type") == "set_study":
study_id = data.get("study_id")
if study_id:
await manager.set_study_context(session_id, study_id)
await websocket.send_json({
"type": "context_updated",
"study_id": study_id,
})
elif data.get("type") == "ping":
await websocket.send_json({"type": "pong"})
except WebSocketDisconnect:
pass
except Exception as e:
try:
await websocket.send_json({"type": "error", "message": str(e)})
except:
pass
# ========== LEGACY: Stateless Endpoints (backwards compatible) ==========
@router.get("/status")
async def get_claude_status():
"""
Check if Claude CLI is available
Returns:
JSON with CLI status
"""
import shutil
claude_available = shutil.which("claude") is not None
return {
"available": claude_available,
"message": "Claude CLI is available" if claude_available else "Claude CLI not found in PATH",
"mode": "cli" # Indicate we're using CLI mode
}
@router.post("/chat", response_model=ChatResponse)
async def chat_with_claude(request: ChatRequest):
"""
Send a message to Claude via CLI with Atomizer context
Args:
request: ChatRequest with message, optional study_id, and conversation history
Returns:
ChatResponse with Claude's response
"""
try:
from api.services.claude_cli_agent import AtomizerCLIAgent
# Create agent with study context
agent = AtomizerCLIAgent(study_id=request.study_id)
# Convert conversation history format if needed
history = []
if request.conversation_history:
for msg in request.conversation_history:
if isinstance(msg.get('content'), str):
history.append(msg)
# Get response
result = await agent.chat(request.message, history)
return ChatResponse(
response=result["response"],
tool_calls=result.get("tool_calls"),
study_id=request.study_id
)
except Exception as e:
raise HTTPException(
status_code=500,
detail=f"Chat error: {str(e)}"
)
@router.post("/chat/stream")
async def chat_stream(request: ChatRequest):
"""
Stream a response from Claude CLI token by token
Args:
request: ChatRequest with message and optional context
Returns:
StreamingResponse with text/event-stream
"""
async def generate():
try:
from api.services.claude_cli_agent import AtomizerCLIAgent
agent = AtomizerCLIAgent(study_id=request.study_id)
# Convert history
history = []
if request.conversation_history:
for msg in request.conversation_history:
if isinstance(msg.get('content'), str):
history.append(msg)
# Stream response
async for token in agent.chat_stream(request.message, history):
yield f"data: {json.dumps({'token': token})}\n\n"
yield f"data: {json.dumps({'done': True})}\n\n"
except Exception as e:
yield f"data: {json.dumps({'error': str(e)})}\n\n"
return StreamingResponse(
generate(),
media_type="text/event-stream",
headers={
"Cache-Control": "no-cache",
"Connection": "keep-alive",
}
)
@router.websocket("/chat/ws")
async def websocket_chat(websocket: WebSocket):
"""
WebSocket endpoint for real-time chat
Message format (client -> server):
{"type": "message", "content": "user message", "study_id": "optional"}
Message format (server -> client):
{"type": "token", "content": "..."}
{"type": "done", "tool_calls": [...]}
{"type": "error", "message": "..."}
"""
await websocket.accept()
conversation_history = []
try:
from api.services.claude_cli_agent import AtomizerCLIAgent
while True:
# Receive message from client
data = await websocket.receive_json()
if data.get("type") == "message":
content = data.get("content", "")
study_id = data.get("study_id")
if not content:
continue
# Create agent
agent = AtomizerCLIAgent(study_id=study_id)
try:
# Use non-streaming chat
result = await agent.chat(content, conversation_history)
# Send response
await websocket.send_json({
"type": "response",
"content": result["response"],
"tool_calls": result.get("tool_calls", [])
})
# Update history
conversation_history.append({"role": "user", "content": content})
conversation_history.append({"role": "assistant", "content": result["response"]})
except Exception as e:
await websocket.send_json({
"type": "error",
"message": str(e)
})
elif data.get("type") == "clear":
# Clear conversation history
conversation_history = []
await websocket.send_json({"type": "cleared"})
except WebSocketDisconnect:
pass
except Exception as e:
try:
await websocket.send_json({
"type": "error",
"message": str(e)
})
except:
pass
@router.get("/suggestions")
async def get_chat_suggestions(study_id: Optional[str] = None):
"""
Get contextual chat suggestions based on current study
Args:
study_id: Optional study to get suggestions for
Returns:
List of suggested prompts
"""
base_suggestions = [
"What's the status of my optimization?",
"Show me the best designs found",
"Compare the top 3 trials",
"What parameters have the most impact?",
"Explain the convergence behavior"
]
if study_id:
# Add study-specific suggestions
return {
"suggestions": [
f"Summarize the {study_id} study",
"What's the current best objective value?",
"Are there any failed trials? Why?",
"Show parameter sensitivity analysis",
"What should I try next to improve results?"
] + base_suggestions[:3]
}
return {
"suggestions": [
"List all available studies",
"Help me create a new study",
"What can you help me with?"
] + base_suggestions[:3]
}