feat(dashboard): Enhanced chat, spec management, and Claude integration
Backend: - spec.py: New AtomizerSpec REST API endpoints - spec_manager.py: SpecManager service for unified config - interview_engine.py: Study creation interview logic - claude.py: Enhanced Claude API with context - optimization.py: Extended optimization endpoints - context_builder.py, session_manager.py: Improved services Frontend: - Chat components: Enhanced message rendering, tool call cards - Hooks: useClaudeCode, useSpecWebSocket, improved useChat - Pages: Updated Dashboard, Analysis, Insights, Setup, Home - Components: ParallelCoordinatesPlot, ParetoPlot improvements - App.tsx: Route updates for canvas/studio Infrastructure: - vite.config.ts: Build configuration updates - start/stop-dashboard.bat: Script improvements
This commit is contained in:
@@ -38,16 +38,30 @@ def resolve_study_path(study_id: str) -> Path:
|
||||
"""Find study folder by scanning all topic directories.
|
||||
|
||||
Supports nested folder structure: studies/Topic/study_name/
|
||||
Study ID is the short name (e.g., 'm1_mirror_adaptive_V14')
|
||||
Study ID can be:
|
||||
- Short name (e.g., 'm1_mirror_adaptive_V14') - scans all topic folders
|
||||
- Full nested path (e.g., 'M1_Mirror/m1_mirror_cost_reduction_lateral')
|
||||
|
||||
Returns the full path to the study directory.
|
||||
Raises HTTPException 404 if not found.
|
||||
"""
|
||||
# Handle nested path format (e.g., "M1_Mirror/m1_mirror_cost_reduction_lateral")
|
||||
if "/" in study_id:
|
||||
# Try with forward slashes
|
||||
nested_path = STUDIES_DIR / study_id
|
||||
if nested_path.exists() and nested_path.is_dir():
|
||||
if _is_valid_study_dir(nested_path):
|
||||
return nested_path
|
||||
# Try with backslashes (Windows path)
|
||||
nested_path = STUDIES_DIR / study_id.replace("/", "\\")
|
||||
if nested_path.exists() and nested_path.is_dir():
|
||||
if _is_valid_study_dir(nested_path):
|
||||
return nested_path
|
||||
|
||||
# First check direct path (backwards compatibility for flat structure)
|
||||
direct_path = STUDIES_DIR / study_id
|
||||
if direct_path.exists() and direct_path.is_dir():
|
||||
# Verify it's actually a study (has 1_setup or config)
|
||||
if (direct_path / "1_setup").exists() or (direct_path / "optimization_config.json").exists():
|
||||
if _is_valid_study_dir(direct_path):
|
||||
return direct_path
|
||||
|
||||
# Scan topic folders for nested structure
|
||||
@@ -55,13 +69,21 @@ def resolve_study_path(study_id: str) -> Path:
|
||||
if topic_dir.is_dir() and not topic_dir.name.startswith('.'):
|
||||
study_dir = topic_dir / study_id
|
||||
if study_dir.exists() and study_dir.is_dir():
|
||||
# Verify it's actually a study
|
||||
if (study_dir / "1_setup").exists() or (study_dir / "optimization_config.json").exists():
|
||||
if _is_valid_study_dir(study_dir):
|
||||
return study_dir
|
||||
|
||||
raise HTTPException(status_code=404, detail=f"Study not found: {study_id}")
|
||||
|
||||
|
||||
def _is_valid_study_dir(study_dir: Path) -> bool:
|
||||
"""Check if a directory is a valid study directory."""
|
||||
return (
|
||||
(study_dir / "1_setup").exists() or
|
||||
(study_dir / "optimization_config.json").exists() or
|
||||
(study_dir / "atomizer_spec.json").exists()
|
||||
)
|
||||
|
||||
|
||||
def get_study_topic(study_dir: Path) -> Optional[str]:
|
||||
"""Get the topic folder name for a study, or None if in root."""
|
||||
# Check if parent is a topic folder (not the root studies dir)
|
||||
@@ -1542,16 +1564,17 @@ async def get_study_image(study_id: str, image_path: str):
|
||||
raise HTTPException(status_code=500, detail=f"Failed to serve image: {str(e)}")
|
||||
|
||||
|
||||
@router.get("/studies/{study_id}/config")
|
||||
@router.get("/studies/{study_id:path}/config")
|
||||
async def get_study_config(study_id: str):
|
||||
"""
|
||||
Get the full optimization_config.json for a study
|
||||
Get the study configuration - reads from atomizer_spec.json (v2.0) first,
|
||||
falls back to legacy optimization_config.json if not found.
|
||||
|
||||
Args:
|
||||
study_id: Study identifier
|
||||
|
||||
Returns:
|
||||
JSON with the complete configuration
|
||||
JSON with the complete configuration in a unified format
|
||||
"""
|
||||
try:
|
||||
study_dir = resolve_study_path(study_id)
|
||||
@@ -1559,7 +1582,22 @@ async def get_study_config(study_id: str):
|
||||
if not study_dir.exists():
|
||||
raise HTTPException(status_code=404, detail=f"Study {study_id} not found")
|
||||
|
||||
# Look for config in various locations
|
||||
# Priority 1: atomizer_spec.json (v2.0 unified format)
|
||||
spec_file = study_dir / "atomizer_spec.json"
|
||||
if spec_file.exists():
|
||||
with open(spec_file) as f:
|
||||
spec = json.load(f)
|
||||
|
||||
# Transform AtomizerSpec to the expected config format
|
||||
config = _transform_spec_to_config(spec, study_id)
|
||||
return {
|
||||
"config": config,
|
||||
"path": str(spec_file),
|
||||
"study_id": study_id,
|
||||
"source": "atomizer_spec"
|
||||
}
|
||||
|
||||
# Priority 2: Legacy optimization_config.json
|
||||
config_file = study_dir / "1_setup" / "optimization_config.json"
|
||||
if not config_file.exists():
|
||||
config_file = study_dir / "optimization_config.json"
|
||||
@@ -1573,7 +1611,8 @@ async def get_study_config(study_id: str):
|
||||
return {
|
||||
"config": config,
|
||||
"path": str(config_file),
|
||||
"study_id": study_id
|
||||
"study_id": study_id,
|
||||
"source": "legacy_config"
|
||||
}
|
||||
|
||||
except HTTPException:
|
||||
@@ -1582,6 +1621,118 @@ async def get_study_config(study_id: str):
|
||||
raise HTTPException(status_code=500, detail=f"Failed to read config: {str(e)}")
|
||||
|
||||
|
||||
def _transform_spec_to_config(spec: dict, study_id: str) -> dict:
|
||||
"""Transform AtomizerSpec v2.0 format to legacy config format for backwards compatibility."""
|
||||
meta = spec.get("meta", {})
|
||||
model = spec.get("model", {})
|
||||
optimization = spec.get("optimization", {})
|
||||
|
||||
# Transform design variables
|
||||
design_variables = []
|
||||
for dv in spec.get("design_variables", []):
|
||||
bounds = dv.get("bounds", {})
|
||||
design_variables.append({
|
||||
"name": dv.get("name"),
|
||||
"expression_name": dv.get("expression_name"),
|
||||
"type": "float" if dv.get("type") == "continuous" else dv.get("type", "float"),
|
||||
"min": bounds.get("min"),
|
||||
"max": bounds.get("max"),
|
||||
"low": bounds.get("min"), # Alias for compatibility
|
||||
"high": bounds.get("max"), # Alias for compatibility
|
||||
"baseline": dv.get("baseline"),
|
||||
"unit": dv.get("units"),
|
||||
"units": dv.get("units"),
|
||||
"enabled": dv.get("enabled", True)
|
||||
})
|
||||
|
||||
# Transform objectives
|
||||
objectives = []
|
||||
for obj in spec.get("objectives", []):
|
||||
source = obj.get("source", {})
|
||||
objectives.append({
|
||||
"name": obj.get("name"),
|
||||
"direction": obj.get("direction", "minimize"),
|
||||
"weight": obj.get("weight", 1.0),
|
||||
"target": obj.get("target"),
|
||||
"unit": obj.get("units"),
|
||||
"units": obj.get("units"),
|
||||
"extractor_id": source.get("extractor_id"),
|
||||
"output_key": source.get("output_key")
|
||||
})
|
||||
|
||||
# Transform constraints
|
||||
constraints = []
|
||||
for con in spec.get("constraints", []):
|
||||
constraints.append({
|
||||
"name": con.get("name"),
|
||||
"type": _operator_to_type(con.get("operator", "<=")),
|
||||
"operator": con.get("operator"),
|
||||
"max_value": con.get("threshold") if con.get("operator") in ["<=", "<"] else None,
|
||||
"min_value": con.get("threshold") if con.get("operator") in [">=", ">"] else None,
|
||||
"bound": con.get("threshold"),
|
||||
"unit": con.get("units"),
|
||||
"units": con.get("units")
|
||||
})
|
||||
|
||||
# Transform extractors
|
||||
extractors = []
|
||||
for ext in spec.get("extractors", []):
|
||||
extractors.append({
|
||||
"name": ext.get("name"),
|
||||
"type": ext.get("type"),
|
||||
"builtin": ext.get("builtin", True),
|
||||
"config": ext.get("config", {}),
|
||||
"outputs": ext.get("outputs", [])
|
||||
})
|
||||
|
||||
# Get algorithm info
|
||||
algorithm = optimization.get("algorithm", {})
|
||||
budget = optimization.get("budget", {})
|
||||
|
||||
# Build the config in legacy format
|
||||
config = {
|
||||
"study_name": meta.get("study_name", study_id),
|
||||
"description": meta.get("description", ""),
|
||||
"version": meta.get("version", "2.0"),
|
||||
"design_variables": design_variables,
|
||||
"objectives": objectives,
|
||||
"constraints": constraints,
|
||||
"extractors": extractors,
|
||||
"optimization": {
|
||||
"algorithm": algorithm.get("type", "TPE"),
|
||||
"n_trials": budget.get("max_trials", 100),
|
||||
"max_time_hours": budget.get("max_time_hours"),
|
||||
"convergence_patience": budget.get("convergence_patience")
|
||||
},
|
||||
"optimization_settings": {
|
||||
"sampler": algorithm.get("type", "TPE"),
|
||||
"n_trials": budget.get("max_trials", 100)
|
||||
},
|
||||
"algorithm": {
|
||||
"name": "Optuna",
|
||||
"sampler": algorithm.get("type", "TPE"),
|
||||
"n_trials": budget.get("max_trials", 100)
|
||||
},
|
||||
"model": model,
|
||||
"sim_file": model.get("sim", {}).get("path") if isinstance(model.get("sim"), dict) else None
|
||||
}
|
||||
|
||||
return config
|
||||
|
||||
|
||||
def _operator_to_type(operator: str) -> str:
|
||||
"""Convert constraint operator to legacy type string."""
|
||||
mapping = {
|
||||
"<=": "le",
|
||||
"<": "le",
|
||||
">=": "ge",
|
||||
">": "ge",
|
||||
"==": "eq",
|
||||
"=": "eq"
|
||||
}
|
||||
return mapping.get(operator, "le")
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Process Control Endpoints
|
||||
# ============================================================================
|
||||
@@ -2851,7 +3002,162 @@ async def get_study_runs(study_id: str):
|
||||
|
||||
|
||||
class UpdateConfigRequest(BaseModel):
|
||||
config: dict
|
||||
config: Optional[dict] = None
|
||||
intent: Optional[dict] = None
|
||||
|
||||
|
||||
def intent_to_config(intent: dict, existing_config: Optional[dict] = None) -> dict:
|
||||
"""
|
||||
Convert canvas intent format to optimization_config.json format.
|
||||
|
||||
Preserves existing config fields that aren't in the intent.
|
||||
"""
|
||||
# Start with existing config or empty
|
||||
config = existing_config.copy() if existing_config else {}
|
||||
|
||||
# Metadata
|
||||
if intent.get('model', {}).get('path'):
|
||||
model_path = Path(intent['model']['path']).name
|
||||
if 'simulation' not in config:
|
||||
config['simulation'] = {}
|
||||
config['simulation']['model_file'] = model_path
|
||||
# Try to infer other files from model name
|
||||
base_name = model_path.replace('.prt', '')
|
||||
if not config['simulation'].get('fem_file'):
|
||||
config['simulation']['fem_file'] = f"{base_name}_fem1.fem"
|
||||
if not config['simulation'].get('sim_file'):
|
||||
config['simulation']['sim_file'] = f"{base_name}_sim1.sim"
|
||||
|
||||
# Solver
|
||||
if intent.get('solver', {}).get('type'):
|
||||
solver_type = intent['solver']['type']
|
||||
if 'simulation' not in config:
|
||||
config['simulation'] = {}
|
||||
config['simulation']['solver'] = 'nastran'
|
||||
# Map SOL types to analysis_types
|
||||
sol_to_analysis = {
|
||||
'SOL101': ['static'],
|
||||
'SOL103': ['modal'],
|
||||
'SOL105': ['buckling'],
|
||||
'SOL106': ['nonlinear'],
|
||||
'SOL111': ['modal', 'frequency_response'],
|
||||
'SOL112': ['modal', 'transient'],
|
||||
}
|
||||
config['simulation']['analysis_types'] = sol_to_analysis.get(solver_type, ['static'])
|
||||
|
||||
# Design Variables
|
||||
if intent.get('design_variables'):
|
||||
config['design_variables'] = []
|
||||
for dv in intent['design_variables']:
|
||||
config['design_variables'].append({
|
||||
'parameter': dv.get('name', dv.get('expression_name', '')),
|
||||
'bounds': [dv.get('min', 0), dv.get('max', 100)],
|
||||
'description': dv.get('description', f"Design variable: {dv.get('name', '')}"),
|
||||
})
|
||||
|
||||
# Extractors → used for objectives/constraints extraction
|
||||
extractor_map = {}
|
||||
if intent.get('extractors'):
|
||||
for ext in intent['extractors']:
|
||||
ext_id = ext.get('id', '')
|
||||
ext_name = ext.get('name', '')
|
||||
extractor_map[ext_name] = ext
|
||||
|
||||
# Objectives
|
||||
if intent.get('objectives'):
|
||||
config['objectives'] = []
|
||||
for obj in intent['objectives']:
|
||||
obj_config = {
|
||||
'name': obj.get('name', 'objective'),
|
||||
'goal': obj.get('direction', 'minimize'),
|
||||
'weight': obj.get('weight', 1.0),
|
||||
'description': obj.get('description', f"Objective: {obj.get('name', '')}"),
|
||||
}
|
||||
# Add extraction config if extractor referenced
|
||||
extractor_name = obj.get('extractor')
|
||||
if extractor_name and extractor_name in extractor_map:
|
||||
ext = extractor_map[extractor_name]
|
||||
ext_config = ext.get('config', {})
|
||||
obj_config['extraction'] = {
|
||||
'action': _extractor_id_to_action(ext.get('id', '')),
|
||||
'domain': 'result_extraction',
|
||||
'params': ext_config,
|
||||
}
|
||||
config['objectives'].append(obj_config)
|
||||
|
||||
# Constraints
|
||||
if intent.get('constraints'):
|
||||
config['constraints'] = []
|
||||
for con in intent['constraints']:
|
||||
op = con.get('operator', '<=')
|
||||
con_type = 'less_than' if '<' in op else 'greater_than' if '>' in op else 'equal_to'
|
||||
con_config = {
|
||||
'name': con.get('name', 'constraint'),
|
||||
'type': con_type,
|
||||
'threshold': con.get('value', 0),
|
||||
'description': con.get('description', f"Constraint: {con.get('name', '')}"),
|
||||
}
|
||||
# Add extraction config if extractor referenced
|
||||
extractor_name = con.get('extractor')
|
||||
if extractor_name and extractor_name in extractor_map:
|
||||
ext = extractor_map[extractor_name]
|
||||
ext_config = ext.get('config', {})
|
||||
con_config['extraction'] = {
|
||||
'action': _extractor_id_to_action(ext.get('id', '')),
|
||||
'domain': 'result_extraction',
|
||||
'params': ext_config,
|
||||
}
|
||||
config['constraints'].append(con_config)
|
||||
|
||||
# Optimization settings
|
||||
if intent.get('optimization'):
|
||||
opt = intent['optimization']
|
||||
if 'optimization_settings' not in config:
|
||||
config['optimization_settings'] = {}
|
||||
if opt.get('max_trials'):
|
||||
config['optimization_settings']['n_trials'] = opt['max_trials']
|
||||
if opt.get('method'):
|
||||
# Map method names to Optuna sampler names
|
||||
method_map = {
|
||||
'TPE': 'TPESampler',
|
||||
'CMA-ES': 'CmaEsSampler',
|
||||
'NSGA-II': 'NSGAIISampler',
|
||||
'RandomSearch': 'RandomSampler',
|
||||
'GP-BO': 'GPSampler',
|
||||
}
|
||||
config['optimization_settings']['sampler'] = method_map.get(opt['method'], opt['method'])
|
||||
|
||||
# Surrogate
|
||||
if intent.get('surrogate', {}).get('enabled'):
|
||||
config['surrogate'] = {
|
||||
'type': intent['surrogate'].get('type', 'MLP'),
|
||||
'min_trials': intent['surrogate'].get('min_trials', 20),
|
||||
}
|
||||
|
||||
return config
|
||||
|
||||
|
||||
def _extractor_id_to_action(ext_id: str) -> str:
|
||||
"""Map extractor IDs (E1, E2, etc.) to extraction action names."""
|
||||
action_map = {
|
||||
'E1': 'extract_displacement',
|
||||
'E2': 'extract_frequency',
|
||||
'E3': 'extract_stress',
|
||||
'E4': 'extract_mass',
|
||||
'E5': 'extract_mass',
|
||||
'E8': 'extract_zernike',
|
||||
'E9': 'extract_zernike',
|
||||
'E10': 'extract_zernike',
|
||||
'displacement': 'extract_displacement',
|
||||
'frequency': 'extract_frequency',
|
||||
'stress': 'extract_stress',
|
||||
'mass': 'extract_mass',
|
||||
'mass_bdf': 'extract_mass',
|
||||
'mass_cad': 'extract_mass',
|
||||
'zernike': 'extract_zernike',
|
||||
'zernike_opd': 'extract_zernike',
|
||||
}
|
||||
return action_map.get(ext_id, 'extract_displacement')
|
||||
|
||||
|
||||
@router.put("/studies/{study_id}/config")
|
||||
@@ -2859,9 +3165,13 @@ async def update_study_config(study_id: str, request: UpdateConfigRequest):
|
||||
"""
|
||||
Update the optimization_config.json for a study
|
||||
|
||||
Accepts either:
|
||||
- {"config": {...}} - Direct config object (overwrites)
|
||||
- {"intent": {...}} - Canvas intent (converted and merged with existing)
|
||||
|
||||
Args:
|
||||
study_id: Study identifier
|
||||
request: New configuration data
|
||||
request: New configuration data (config or intent)
|
||||
|
||||
Returns:
|
||||
JSON with success status
|
||||
@@ -2891,9 +3201,24 @@ async def update_study_config(study_id: str, request: UpdateConfigRequest):
|
||||
backup_file = config_file.with_suffix('.json.backup')
|
||||
shutil.copy(config_file, backup_file)
|
||||
|
||||
# Determine which format was provided
|
||||
if request.config is not None:
|
||||
# Direct config update
|
||||
new_config = request.config
|
||||
elif request.intent is not None:
|
||||
# Convert intent to config, merging with existing
|
||||
with open(config_file, 'r') as f:
|
||||
existing_config = json.load(f)
|
||||
new_config = intent_to_config(request.intent, existing_config)
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail="Request must include either 'config' or 'intent' field"
|
||||
)
|
||||
|
||||
# Write new config
|
||||
with open(config_file, 'w') as f:
|
||||
json.dump(request.config, f, indent=2)
|
||||
json.dump(new_config, f, indent=2)
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
|
||||
Reference in New Issue
Block a user