feat(canvas): Add file browser, introspection, and improve node flow
Phase 1-7 of Canvas V4 Ralph Loop implementation: Backend: - Add /api/files routes for browsing model files - Add /api/nx routes for NX model introspection - Add NXIntrospector service to discover expressions and extractors - Add health check with database status Frontend: - Add FileBrowser component for selecting .sim/.prt/.fem files - Add IntrospectionPanel to discover expressions and extractors - Update NodeConfigPanel with browse and introspect buttons - Update schema with NODE_HANDLES for proper flow direction - Update validation for correct DesignVar -> Model -> Solver flow - Update useCanvasStore.addNode() to accept custom data Flow correction: Design Variables now connect TO Model (as source), not FROM Model. This matches the actual data flow in optimization. Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
@@ -13,7 +13,7 @@ import sys
|
||||
# Add parent directory to path to import optimization_engine
|
||||
sys.path.append(str(Path(__file__).parent.parent.parent.parent))
|
||||
|
||||
from api.routes import optimization, claude, terminal, insights, context
|
||||
from api.routes import optimization, claude, terminal, insights, context, files, nx
|
||||
from api.websocket import optimization_stream
|
||||
|
||||
|
||||
@@ -58,6 +58,8 @@ app.include_router(claude.router, prefix="/api/claude", tags=["claude"])
|
||||
app.include_router(terminal.router, prefix="/api/terminal", tags=["terminal"])
|
||||
app.include_router(insights.router, prefix="/api/insights", tags=["insights"])
|
||||
app.include_router(context.router, prefix="/api/context", tags=["context"])
|
||||
app.include_router(files.router, prefix="/api/files", tags=["files"])
|
||||
app.include_router(nx.router, prefix="/api/nx", tags=["nx"])
|
||||
|
||||
@app.get("/")
|
||||
async def root():
|
||||
@@ -67,8 +69,20 @@ async def root():
|
||||
|
||||
@app.get("/health")
|
||||
async def health_check():
|
||||
"""Health check endpoint"""
|
||||
return {"status": "healthy"}
|
||||
"""Health check endpoint with database status"""
|
||||
try:
|
||||
from api.services.conversation_store import ConversationStore
|
||||
store = ConversationStore()
|
||||
# Test database by creating/getting a health check session
|
||||
store.get_session("health_check")
|
||||
db_status = "connected"
|
||||
except Exception as e:
|
||||
db_status = f"error: {str(e)}"
|
||||
|
||||
return {
|
||||
"status": "healthy" if db_status == "connected" else "degraded",
|
||||
"database": db_status,
|
||||
}
|
||||
|
||||
if __name__ == "__main__":
|
||||
import uvicorn
|
||||
|
||||
155
atomizer-dashboard/backend/api/routes/files.py
Normal file
155
atomizer-dashboard/backend/api/routes/files.py
Normal file
@@ -0,0 +1,155 @@
|
||||
"""
|
||||
Files API Routes
|
||||
|
||||
Provides file browsing capabilities for the Canvas Builder.
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, Query
|
||||
from pathlib import Path
|
||||
from typing import List, Optional
|
||||
import os
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
# Path to studies root (go up 5 levels from this file)
|
||||
_file_path = os.path.abspath(__file__)
|
||||
ATOMIZER_ROOT = Path(os.path.normpath(os.path.dirname(os.path.dirname(os.path.dirname(
|
||||
os.path.dirname(os.path.dirname(_file_path))
|
||||
)))))
|
||||
STUDIES_ROOT = ATOMIZER_ROOT / "studies"
|
||||
|
||||
|
||||
@router.get("/list")
|
||||
async def list_files(
|
||||
path: str = "",
|
||||
types: str = ".sim,.prt,.fem,.afem"
|
||||
):
|
||||
"""
|
||||
List files in a directory, filtered by type.
|
||||
|
||||
Args:
|
||||
path: Relative path from studies root (empty for root)
|
||||
types: Comma-separated list of file extensions to include
|
||||
|
||||
Returns:
|
||||
List of files and directories with their paths
|
||||
"""
|
||||
allowed_types = [t.strip().lower() for t in types.split(',') if t.strip()]
|
||||
|
||||
base_path = STUDIES_ROOT / path if path else STUDIES_ROOT
|
||||
|
||||
if not base_path.exists():
|
||||
return {"files": [], "path": path, "error": "Directory not found"}
|
||||
|
||||
files = []
|
||||
|
||||
try:
|
||||
for entry in sorted(base_path.iterdir(), key=lambda x: (not x.is_dir(), x.name.lower())):
|
||||
# Skip hidden files and directories
|
||||
if entry.name.startswith('.'):
|
||||
continue
|
||||
|
||||
if entry.is_dir():
|
||||
# Include directories
|
||||
files.append({
|
||||
"name": entry.name,
|
||||
"path": str(entry.relative_to(STUDIES_ROOT)).replace("\\", "/"),
|
||||
"isDirectory": True,
|
||||
})
|
||||
else:
|
||||
# Include files matching type filter
|
||||
suffix = entry.suffix.lower()
|
||||
if suffix in allowed_types:
|
||||
files.append({
|
||||
"name": entry.name,
|
||||
"path": str(entry.relative_to(STUDIES_ROOT)).replace("\\", "/"),
|
||||
"isDirectory": False,
|
||||
"size": entry.stat().st_size,
|
||||
})
|
||||
except PermissionError:
|
||||
return {"files": [], "path": path, "error": "Permission denied"}
|
||||
except Exception as e:
|
||||
return {"files": [], "path": path, "error": str(e)}
|
||||
|
||||
return {"files": files, "path": path}
|
||||
|
||||
|
||||
@router.get("/search")
|
||||
async def search_files(
|
||||
query: str,
|
||||
types: str = ".sim,.prt,.fem,.afem",
|
||||
max_results: int = 50
|
||||
):
|
||||
"""
|
||||
Search for files by name pattern.
|
||||
|
||||
Args:
|
||||
query: Search pattern (partial name match)
|
||||
types: Comma-separated list of file extensions to include
|
||||
max_results: Maximum number of results to return
|
||||
|
||||
Returns:
|
||||
List of matching files with their paths
|
||||
"""
|
||||
allowed_types = [t.strip().lower() for t in types.split(',') if t.strip()]
|
||||
query_lower = query.lower()
|
||||
|
||||
files = []
|
||||
|
||||
def search_recursive(directory: Path, depth: int = 0):
|
||||
"""Recursively search for matching files"""
|
||||
if depth > 10 or len(files) >= max_results: # Limit depth and results
|
||||
return
|
||||
|
||||
try:
|
||||
for entry in directory.iterdir():
|
||||
if len(files) >= max_results:
|
||||
return
|
||||
|
||||
if entry.name.startswith('.'):
|
||||
continue
|
||||
|
||||
if entry.is_dir():
|
||||
search_recursive(entry, depth + 1)
|
||||
elif entry.suffix.lower() in allowed_types:
|
||||
if query_lower in entry.name.lower():
|
||||
files.append({
|
||||
"name": entry.name,
|
||||
"path": str(entry.relative_to(STUDIES_ROOT)).replace("\\", "/"),
|
||||
"isDirectory": False,
|
||||
"size": entry.stat().st_size,
|
||||
})
|
||||
except (PermissionError, OSError):
|
||||
pass
|
||||
|
||||
search_recursive(STUDIES_ROOT)
|
||||
|
||||
return {"files": files, "query": query, "total": len(files)}
|
||||
|
||||
|
||||
@router.get("/exists")
|
||||
async def check_file_exists(path: str):
|
||||
"""
|
||||
Check if a file exists.
|
||||
|
||||
Args:
|
||||
path: Relative path from studies root
|
||||
|
||||
Returns:
|
||||
Boolean indicating if file exists and file info
|
||||
"""
|
||||
file_path = STUDIES_ROOT / path
|
||||
exists = file_path.exists()
|
||||
|
||||
result = {
|
||||
"exists": exists,
|
||||
"path": path,
|
||||
}
|
||||
|
||||
if exists:
|
||||
result["isDirectory"] = file_path.is_dir()
|
||||
if file_path.is_file():
|
||||
result["size"] = file_path.stat().st_size
|
||||
result["name"] = file_path.name
|
||||
|
||||
return result
|
||||
90
atomizer-dashboard/backend/api/routes/nx.py
Normal file
90
atomizer-dashboard/backend/api/routes/nx.py
Normal file
@@ -0,0 +1,90 @@
|
||||
"""
|
||||
NX API Routes
|
||||
|
||||
Provides NX model introspection capabilities for the Canvas Builder.
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, HTTPException
|
||||
from pydantic import BaseModel
|
||||
from typing import Optional
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
class IntrospectRequest(BaseModel):
|
||||
file_path: str
|
||||
|
||||
|
||||
@router.post("/introspect")
|
||||
async def introspect_model(request: IntrospectRequest):
|
||||
"""
|
||||
Introspect an NX model file to discover expressions, solver type, and dependencies.
|
||||
|
||||
Args:
|
||||
file_path: Relative path from studies root (e.g., "M1_Mirror/study_v1/model.sim")
|
||||
|
||||
Returns:
|
||||
Introspection result with expressions, solver_type, dependent_files, extractors
|
||||
"""
|
||||
try:
|
||||
from api.services.nx_introspection import NXIntrospector
|
||||
|
||||
introspector = NXIntrospector(request.file_path)
|
||||
result = introspector.introspect()
|
||||
return result
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@router.get("/expressions")
|
||||
async def get_expressions(file_path: str):
|
||||
"""
|
||||
Get expressions from an NX model.
|
||||
|
||||
Args:
|
||||
file_path: Relative path from studies root
|
||||
|
||||
Returns:
|
||||
List of expressions with names, values, units
|
||||
"""
|
||||
try:
|
||||
from api.services.nx_introspection import NXIntrospector
|
||||
|
||||
introspector = NXIntrospector(file_path)
|
||||
result = introspector.introspect()
|
||||
return {
|
||||
"expressions": result.get("expressions", []),
|
||||
"file_path": file_path,
|
||||
"source": "introspection",
|
||||
}
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@router.get("/extractors")
|
||||
async def list_extractors(solver_type: Optional[str] = None):
|
||||
"""
|
||||
List available extractors, optionally filtered by solver type.
|
||||
|
||||
Args:
|
||||
solver_type: Optional solver type (SOL101, SOL103, etc.)
|
||||
|
||||
Returns:
|
||||
List of available extractors with their descriptions
|
||||
"""
|
||||
from api.services.nx_introspection import NXIntrospector
|
||||
|
||||
# Create a dummy introspector to get extractor suggestions
|
||||
class DummyIntrospector:
|
||||
def __init__(self):
|
||||
self.parent_dir = ""
|
||||
|
||||
dummy = NXIntrospector.__new__(NXIntrospector)
|
||||
dummy.parent_dir = ""
|
||||
|
||||
extractors = dummy._suggest_extractors(solver_type)
|
||||
|
||||
return {
|
||||
"extractors": extractors,
|
||||
"solver_type": solver_type,
|
||||
}
|
||||
317
atomizer-dashboard/backend/api/services/nx_introspection.py
Normal file
317
atomizer-dashboard/backend/api/services/nx_introspection.py
Normal file
@@ -0,0 +1,317 @@
|
||||
"""
|
||||
NX Model Introspection Service
|
||||
|
||||
Discovers expressions, solver types, and dependent files from NX model files.
|
||||
Used by the Canvas Builder to help users configure optimization workflows.
|
||||
"""
|
||||
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List, Optional
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Path to studies root
|
||||
_file_path = os.path.abspath(__file__)
|
||||
ATOMIZER_ROOT = Path(os.path.normpath(os.path.dirname(os.path.dirname(os.path.dirname(
|
||||
os.path.dirname(os.path.dirname(_file_path))
|
||||
)))))
|
||||
STUDIES_ROOT = ATOMIZER_ROOT / "studies"
|
||||
|
||||
|
||||
class NXIntrospector:
|
||||
"""Introspect NX model files to discover expressions, dependencies, and solver info."""
|
||||
|
||||
def __init__(self, file_path: str):
|
||||
"""
|
||||
Initialize introspector with a file path.
|
||||
|
||||
Args:
|
||||
file_path: Relative path from studies root (e.g., "M1_Mirror/study_v1/model.sim")
|
||||
"""
|
||||
self.relative_path = file_path.replace("\\", "/")
|
||||
self.file_path = STUDIES_ROOT / self.relative_path
|
||||
self.file_type = self.file_path.suffix.lower()
|
||||
self.parent_dir = self.file_path.parent
|
||||
|
||||
def introspect(self) -> Dict[str, Any]:
|
||||
"""
|
||||
Full introspection of the model file.
|
||||
|
||||
Returns:
|
||||
Dict with expressions, solver_type, dependent_files, extractors_available, warnings
|
||||
"""
|
||||
result = {
|
||||
"file_path": self.relative_path,
|
||||
"file_type": self.file_type,
|
||||
"expressions": [],
|
||||
"solver_type": None,
|
||||
"dependent_files": [],
|
||||
"extractors_available": [],
|
||||
"warnings": [],
|
||||
}
|
||||
|
||||
if not self.file_path.exists():
|
||||
result["warnings"].append(f"File not found: {self.file_path}")
|
||||
return result
|
||||
|
||||
try:
|
||||
if self.file_type == '.sim':
|
||||
result.update(self._introspect_sim())
|
||||
elif self.file_type == '.prt':
|
||||
result.update(self._introspect_prt())
|
||||
elif self.file_type in ['.fem', '.afem']:
|
||||
result.update(self._introspect_fem())
|
||||
|
||||
# Try to load expressions from optimization_config.json if present
|
||||
config_expressions = self._load_expressions_from_config()
|
||||
if config_expressions:
|
||||
result["expressions"] = config_expressions
|
||||
|
||||
# If still no expressions, try from study history
|
||||
if not result["expressions"]:
|
||||
result["expressions"] = self._discover_common_expressions()
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Introspection error: {e}")
|
||||
result["warnings"].append(str(e))
|
||||
|
||||
# Suggest extractors based on solver type
|
||||
result["extractors_available"] = self._suggest_extractors(result.get("solver_type"))
|
||||
|
||||
return result
|
||||
|
||||
def _introspect_sim(self) -> Dict[str, Any]:
|
||||
"""Introspect .sim file."""
|
||||
result = {
|
||||
"solver_type": None,
|
||||
"dependent_files": [],
|
||||
}
|
||||
|
||||
base_name = self.file_path.stem
|
||||
|
||||
# Find related files in the same directory and parent
|
||||
search_dirs = [self.parent_dir]
|
||||
if self.parent_dir.name in ['1_config', '1_setup', 'config', 'setup']:
|
||||
search_dirs.append(self.parent_dir.parent)
|
||||
|
||||
for search_dir in search_dirs:
|
||||
if not search_dir.exists():
|
||||
continue
|
||||
|
||||
for ext in ['.prt', '.fem', '.afem']:
|
||||
# Look for variations of the file name
|
||||
patterns = [
|
||||
f"{base_name}{ext}",
|
||||
f"{base_name.replace('_sim1', '')}{ext}",
|
||||
f"{base_name.replace('_sim1', '_fem1')}{ext}",
|
||||
]
|
||||
|
||||
for pattern in patterns:
|
||||
file_candidate = search_dir / pattern
|
||||
if file_candidate.exists():
|
||||
result["dependent_files"].append({
|
||||
"path": str(file_candidate.relative_to(STUDIES_ROOT)).replace("\\", "/"),
|
||||
"type": ext[1:],
|
||||
"name": file_candidate.name,
|
||||
})
|
||||
|
||||
# Find idealized part (*_i.prt) - critical for mesh updates
|
||||
for f in search_dir.glob("*_i.prt"):
|
||||
result["dependent_files"].append({
|
||||
"path": str(f.relative_to(STUDIES_ROOT)).replace("\\", "/"),
|
||||
"type": "idealized_prt",
|
||||
"name": f.name,
|
||||
})
|
||||
|
||||
# Try to determine solver type
|
||||
result["solver_type"] = self._detect_solver_type()
|
||||
|
||||
return result
|
||||
|
||||
def _introspect_prt(self) -> Dict[str, Any]:
|
||||
"""Introspect .prt file."""
|
||||
result = {
|
||||
"dependent_files": [],
|
||||
}
|
||||
|
||||
base_name = self.file_path.stem
|
||||
|
||||
# Look for associated .sim and .fem files
|
||||
search_dirs = [self.parent_dir]
|
||||
if self.parent_dir.name in ['1_config', '1_setup', 'config', 'setup']:
|
||||
search_dirs.append(self.parent_dir.parent)
|
||||
|
||||
for search_dir in search_dirs:
|
||||
if not search_dir.exists():
|
||||
continue
|
||||
|
||||
for ext in ['.sim', '.fem', '.afem']:
|
||||
patterns = [
|
||||
f"{base_name}{ext}",
|
||||
f"{base_name}_sim1{ext}",
|
||||
f"{base_name}_fem1{ext}",
|
||||
]
|
||||
for pattern in patterns:
|
||||
file_candidate = search_dir / pattern
|
||||
if file_candidate.exists():
|
||||
result["dependent_files"].append({
|
||||
"path": str(file_candidate.relative_to(STUDIES_ROOT)).replace("\\", "/"),
|
||||
"type": ext[1:],
|
||||
"name": file_candidate.name,
|
||||
})
|
||||
|
||||
return result
|
||||
|
||||
def _introspect_fem(self) -> Dict[str, Any]:
|
||||
"""Introspect .fem or .afem file."""
|
||||
result = {
|
||||
"dependent_files": [],
|
||||
}
|
||||
|
||||
base_name = self.file_path.stem
|
||||
|
||||
# Look for associated files
|
||||
for ext in ['.prt', '.sim']:
|
||||
patterns = [
|
||||
f"{base_name}{ext}",
|
||||
f"{base_name.replace('_fem1', '')}{ext}",
|
||||
f"{base_name.replace('_fem1', '_sim1')}{ext}",
|
||||
]
|
||||
for pattern in patterns:
|
||||
file_candidate = self.parent_dir / pattern
|
||||
if file_candidate.exists():
|
||||
result["dependent_files"].append({
|
||||
"path": str(file_candidate.relative_to(STUDIES_ROOT)).replace("\\", "/"),
|
||||
"type": ext[1:],
|
||||
"name": file_candidate.name,
|
||||
})
|
||||
|
||||
return result
|
||||
|
||||
def _detect_solver_type(self) -> Optional[str]:
|
||||
"""Detect solver type from file name or contents."""
|
||||
name_lower = self.file_path.name.lower()
|
||||
parent_lower = str(self.parent_dir).lower()
|
||||
|
||||
# Infer from naming conventions
|
||||
if 'modal' in name_lower or 'freq' in name_lower or 'modal' in parent_lower:
|
||||
return 'SOL103' # Modal analysis
|
||||
elif 'static' in name_lower or 'stress' in name_lower:
|
||||
return 'SOL101' # Static analysis
|
||||
elif 'thermal' in name_lower or 'heat' in name_lower:
|
||||
return 'SOL153' # Thermal
|
||||
elif 'dynamic' in name_lower:
|
||||
return 'SOL111' # Frequency response
|
||||
elif 'mirror' in parent_lower or 'wfe' in parent_lower:
|
||||
return 'SOL101' # Mirrors usually use static analysis
|
||||
|
||||
# Default to static
|
||||
return 'SOL101'
|
||||
|
||||
def _load_expressions_from_config(self) -> List[Dict[str, Any]]:
|
||||
"""Load expressions from optimization_config.json if it exists."""
|
||||
expressions = []
|
||||
|
||||
# Look for config file in study directory
|
||||
config_paths = [
|
||||
self.parent_dir / "optimization_config.json",
|
||||
self.parent_dir / "1_config" / "optimization_config.json",
|
||||
self.parent_dir / "1_setup" / "optimization_config.json",
|
||||
self.parent_dir.parent / "optimization_config.json",
|
||||
self.parent_dir.parent / "1_config" / "optimization_config.json",
|
||||
]
|
||||
|
||||
for config_path in config_paths:
|
||||
if config_path.exists():
|
||||
try:
|
||||
with open(config_path, 'r') as f:
|
||||
config = json.load(f)
|
||||
|
||||
# Extract design variables
|
||||
design_vars = config.get("design_variables", [])
|
||||
for dv in design_vars:
|
||||
expressions.append({
|
||||
"name": dv.get("name", dv.get("expression", "unknown")),
|
||||
"value": (dv.get("min", 0) + dv.get("max", 100)) / 2,
|
||||
"min": dv.get("min"),
|
||||
"max": dv.get("max"),
|
||||
"unit": dv.get("unit", "mm"),
|
||||
"type": "design_variable",
|
||||
"source": "config",
|
||||
})
|
||||
|
||||
return expressions
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to load config: {e}")
|
||||
|
||||
return expressions
|
||||
|
||||
def _discover_common_expressions(self) -> List[Dict[str, Any]]:
|
||||
"""Discover common expressions based on study type."""
|
||||
# Check parent directory name to infer study type
|
||||
parent_lower = str(self.parent_dir).lower()
|
||||
|
||||
if 'mirror' in parent_lower:
|
||||
return [
|
||||
{"name": "flatback_thickness", "value": 30.0, "unit": "mm", "type": "dimension", "source": "inferred"},
|
||||
{"name": "rib_height", "value": 40.0, "unit": "mm", "type": "dimension", "source": "inferred"},
|
||||
{"name": "rib_width", "value": 8.0, "unit": "mm", "type": "dimension", "source": "inferred"},
|
||||
{"name": "fillet_radius", "value": 5.0, "unit": "mm", "type": "dimension", "source": "inferred"},
|
||||
{"name": "web_thickness", "value": 4.0, "unit": "mm", "type": "dimension", "source": "inferred"},
|
||||
]
|
||||
elif 'bracket' in parent_lower:
|
||||
return [
|
||||
{"name": "thickness", "value": 5.0, "unit": "mm", "type": "dimension", "source": "inferred"},
|
||||
{"name": "width", "value": 50.0, "unit": "mm", "type": "dimension", "source": "inferred"},
|
||||
{"name": "height", "value": 30.0, "unit": "mm", "type": "dimension", "source": "inferred"},
|
||||
{"name": "fillet_radius", "value": 3.0, "unit": "mm", "type": "dimension", "source": "inferred"},
|
||||
{"name": "hole_diameter", "value": 8.0, "unit": "mm", "type": "dimension", "source": "inferred"},
|
||||
]
|
||||
elif 'beam' in parent_lower:
|
||||
return [
|
||||
{"name": "height", "value": 100.0, "unit": "mm", "type": "dimension", "source": "inferred"},
|
||||
{"name": "width", "value": 50.0, "unit": "mm", "type": "dimension", "source": "inferred"},
|
||||
{"name": "web_thickness", "value": 5.0, "unit": "mm", "type": "dimension", "source": "inferred"},
|
||||
{"name": "flange_thickness", "value": 8.0, "unit": "mm", "type": "dimension", "source": "inferred"},
|
||||
]
|
||||
|
||||
# Generic expressions
|
||||
return [
|
||||
{"name": "thickness", "value": 10.0, "unit": "mm", "type": "dimension", "source": "inferred"},
|
||||
{"name": "length", "value": 100.0, "unit": "mm", "type": "dimension", "source": "inferred"},
|
||||
{"name": "width", "value": 50.0, "unit": "mm", "type": "dimension", "source": "inferred"},
|
||||
{"name": "height", "value": 25.0, "unit": "mm", "type": "dimension", "source": "inferred"},
|
||||
{"name": "fillet_radius", "value": 3.0, "unit": "mm", "type": "dimension", "source": "inferred"},
|
||||
]
|
||||
|
||||
def _suggest_extractors(self, solver_type: Optional[str]) -> List[Dict[str, Any]]:
|
||||
"""Suggest extractors based on solver type."""
|
||||
extractors = [
|
||||
{"id": "E4", "name": "Mass (BDF)", "description": "Extract mass from BDF file", "always": True},
|
||||
{"id": "E5", "name": "Mass (Expression)", "description": "Extract mass from NX expression", "always": True},
|
||||
]
|
||||
|
||||
if solver_type == 'SOL101':
|
||||
extractors.extend([
|
||||
{"id": "E1", "name": "Displacement", "description": "Max displacement from static analysis", "always": False},
|
||||
{"id": "E3", "name": "Stress", "description": "Von Mises stress from static analysis", "always": False},
|
||||
])
|
||||
elif solver_type == 'SOL103':
|
||||
extractors.extend([
|
||||
{"id": "E2", "name": "Frequency", "description": "Natural frequencies from modal analysis", "always": False},
|
||||
])
|
||||
|
||||
# Check if study appears to be mirror-related
|
||||
parent_lower = str(self.parent_dir).lower()
|
||||
if 'mirror' in parent_lower or 'wfe' in parent_lower:
|
||||
extractors.extend([
|
||||
{"id": "E8", "name": "Zernike Coefficients", "description": "Zernike polynomial coefficients", "always": False},
|
||||
{"id": "E9", "name": "Zernike RMS", "description": "RMS wavefront error", "always": False},
|
||||
{"id": "E10", "name": "Zernike WFE", "description": "Weighted WFE metric", "always": False},
|
||||
])
|
||||
|
||||
return extractors
|
||||
Reference in New Issue
Block a user