Major changes: - Dashboard: WebSocket-based chat with session management - Dashboard: New chat components (ChatPane, ChatInput, ModeToggle) - Dashboard: Enhanced UI with parallel coordinates chart - MCP Server: New atomizer-tools server for Claude integration - Extractors: Enhanced Zernike OPD extractor - Reports: Improved report generator New studies (configs and scripts only): - M1 Mirror: Cost reduction campaign studies - Simple Beam, Simple Bracket, UAV Arm studies Note: Large iteration data (2_iterations/, best_design_archive/) excluded via .gitignore - kept on local Gitea only. Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
203 lines
7.2 KiB
Python
203 lines
7.2 KiB
Python
"""
|
|
Atomizer Claude CLI Agent Service
|
|
|
|
Uses Claude Code CLI (subprocess) instead of direct API calls.
|
|
This leverages the user's Claude Code subscription.
|
|
"""
|
|
|
|
import subprocess
|
|
import asyncio
|
|
import json
|
|
import os
|
|
from pathlib import Path
|
|
from typing import Optional, List, Dict, Any, AsyncGenerator
|
|
|
|
# Base studies directory
|
|
STUDIES_DIR = Path(__file__).parent.parent.parent.parent.parent / "studies"
|
|
ATOMIZER_ROOT = Path(__file__).parent.parent.parent.parent.parent
|
|
|
|
|
|
class AtomizerCLIAgent:
|
|
"""Claude agent using Claude Code CLI subprocess"""
|
|
|
|
def __init__(self, study_id: Optional[str] = None):
|
|
self.study_id = study_id
|
|
self.study_dir = STUDIES_DIR / study_id if study_id else None
|
|
self.system_context = self._build_system_context()
|
|
|
|
def _build_system_context(self) -> str:
|
|
"""Build context to prepend to prompts"""
|
|
context = """You are the Atomizer Assistant - an expert in FEA optimization built into the Atomizer dashboard.
|
|
|
|
Your role:
|
|
- Help engineers with structural optimization using NX Nastran simulations
|
|
- Analyze optimization results, identify trends, explain convergence
|
|
- Query trial data, compare configurations, find best designs
|
|
- Explain FEA concepts, Zernike polynomials, wavefront error, stress analysis
|
|
|
|
Important:
|
|
- Be concise and professional
|
|
- Use technical language appropriate for engineers
|
|
- Never mention being an AI or Claude - you are "Atomizer Assistant"
|
|
"""
|
|
|
|
# Add study-specific context if available
|
|
if self.study_id and self.study_dir and self.study_dir.exists():
|
|
context += f"\n\nCurrent Study: {self.study_id}\n"
|
|
|
|
# Try to load config
|
|
config_path = self.study_dir / "1_setup" / "optimization_config.json"
|
|
if not config_path.exists():
|
|
config_path = self.study_dir / "optimization_config.json"
|
|
|
|
if config_path.exists():
|
|
try:
|
|
with open(config_path) as f:
|
|
config = json.load(f)
|
|
|
|
dvs = config.get('design_variables', [])
|
|
objs = config.get('objectives', [])
|
|
|
|
if dvs:
|
|
context += f"Design Variables: {', '.join(dv['name'] for dv in dvs[:5])}\n"
|
|
if objs:
|
|
context += f"Objectives: {', '.join(o['name'] for o in objs)}\n"
|
|
except Exception:
|
|
pass
|
|
|
|
return context
|
|
|
|
async def chat(self, message: str, conversation_history: Optional[List[Dict]] = None) -> Dict[str, Any]:
|
|
"""
|
|
Process a chat message using Claude Code CLI
|
|
|
|
Args:
|
|
message: User's message
|
|
conversation_history: Previous messages for context
|
|
|
|
Returns:
|
|
Dict with response text
|
|
"""
|
|
# Build the full prompt with context
|
|
full_prompt = self._build_prompt(message, conversation_history)
|
|
|
|
try:
|
|
# Run Claude Code CLI
|
|
result = await self._run_claude_cli(full_prompt)
|
|
|
|
return {
|
|
"response": result,
|
|
"tool_calls": [],
|
|
"conversation": []
|
|
}
|
|
except Exception as e:
|
|
return {
|
|
"response": f"Error: {str(e)}",
|
|
"tool_calls": [],
|
|
"conversation": []
|
|
}
|
|
|
|
async def chat_stream(self, message: str, conversation_history: Optional[List[Dict]] = None) -> AsyncGenerator[str, None]:
|
|
"""
|
|
Stream a chat response using Claude Code CLI
|
|
|
|
Args:
|
|
message: User's message
|
|
conversation_history: Previous messages
|
|
|
|
Yields:
|
|
Response tokens as they arrive
|
|
"""
|
|
full_prompt = self._build_prompt(message, conversation_history)
|
|
|
|
try:
|
|
async for chunk in self._run_claude_cli_stream(full_prompt):
|
|
yield chunk
|
|
except Exception as e:
|
|
yield f"Error: {str(e)}"
|
|
|
|
def _build_prompt(self, message: str, conversation_history: Optional[List[Dict]] = None) -> str:
|
|
"""Build full prompt with context and history"""
|
|
parts = [self.system_context]
|
|
|
|
# Add conversation history (last few messages for context)
|
|
if conversation_history:
|
|
parts.append("\n--- Previous conversation ---")
|
|
for msg in conversation_history[-6:]: # Last 3 exchanges
|
|
role = "User" if msg.get("role") == "user" else "Assistant"
|
|
parts.append(f"{role}: {msg.get('content', '')}")
|
|
parts.append("--- End of history ---\n")
|
|
|
|
parts.append(f"User question: {message}")
|
|
parts.append("\nRespond concisely and helpfully:")
|
|
|
|
return "\n".join(parts)
|
|
|
|
async def _run_claude_cli(self, prompt: str) -> str:
|
|
"""Run Claude Code CLI and get response"""
|
|
try:
|
|
# Use claude CLI with --print flag for non-interactive output
|
|
# The prompt is passed as the last positional argument
|
|
process = await asyncio.create_subprocess_exec(
|
|
"claude",
|
|
"--print",
|
|
prompt,
|
|
stdout=asyncio.subprocess.PIPE,
|
|
stderr=asyncio.subprocess.PIPE,
|
|
cwd=str(ATOMIZER_ROOT)
|
|
)
|
|
|
|
stdout, stderr = await asyncio.wait_for(
|
|
process.communicate(),
|
|
timeout=120.0 # 120 second timeout for longer responses
|
|
)
|
|
|
|
if process.returncode != 0:
|
|
error_msg = stderr.decode() if stderr else "Unknown error"
|
|
raise Exception(f"Claude CLI error: {error_msg}")
|
|
|
|
return stdout.decode().strip()
|
|
|
|
except asyncio.TimeoutError:
|
|
raise Exception("Request timed out")
|
|
except FileNotFoundError:
|
|
raise Exception("Claude CLI not found. Make sure 'claude' is in PATH.")
|
|
|
|
async def _run_claude_cli_stream(self, prompt: str) -> AsyncGenerator[str, None]:
|
|
"""Run Claude Code CLI and stream response"""
|
|
try:
|
|
# Use claude CLI with --print flag
|
|
process = await asyncio.create_subprocess_exec(
|
|
"claude",
|
|
"--print",
|
|
prompt,
|
|
stdout=asyncio.subprocess.PIPE,
|
|
stderr=asyncio.subprocess.PIPE,
|
|
cwd=str(ATOMIZER_ROOT)
|
|
)
|
|
|
|
# Stream stdout line by line
|
|
buffer = ""
|
|
while True:
|
|
chunk = await process.stdout.read(100) # Read in small chunks
|
|
if not chunk:
|
|
break
|
|
|
|
text = chunk.decode()
|
|
# Yield character by character for smooth streaming effect
|
|
for char in text:
|
|
yield char
|
|
await asyncio.sleep(0.01) # Small delay for streaming effect
|
|
|
|
await process.wait()
|
|
|
|
if process.returncode != 0:
|
|
stderr = await process.stderr.read()
|
|
error_msg = stderr.decode() if stderr else "Unknown error"
|
|
yield f"\n\nError: {error_msg}"
|
|
|
|
except asyncio.TimeoutError:
|
|
yield "\n\nError: Request timed out"
|
|
except FileNotFoundError:
|
|
yield "\n\nError: Claude CLI not found. Make sure 'claude' is in PATH."
|