feat: Add M1 mirror Zernike optimization with correct RMS calculation
Major improvements to telescope mirror optimization workflow: Assembly FEM Workflow (solve_simulation.py): - Fixed multi-part assembly FEM update sequence - Use ImportFromFile() for reliable expression updates - Add DuplicateNodesCheckBuilder with MergeOccurrenceNodes=True - Switch to Foreground solve mode for multi-subcase solutions - Add detailed logging and diagnostics for node merge operations Zernike RMS Calculation: - CRITICAL FIX: Use correct surface-based RMS formula - Global RMS = sqrt(mean(W^2)) from actual WFE values - Filtered RMS = sqrt(mean(W_residual^2)) after removing low-order fit - This matches zernike_Post_Script_NX.py (optical standard) - Previous WRONG formula was: sqrt(sum(coeffs^2)) - Add compute_rms_filter_j1to3() for optician workload metric Subcase Mapping: - Fix subcase mapping to match NX model: - Subcase 1 = 90 deg (polishing orientation) - Subcase 2 = 20 deg (reference) - Subcase 3 = 40 deg - Subcase 4 = 60 deg New Study: M1 Mirror Zernike Optimization - Full optimization config with 11 design variables - 3 objectives: rel_filtered_rms_40_vs_20, rel_filtered_rms_60_vs_20, mfg_90_optician_workload - Neural surrogate support for accelerated optimization Documentation: - Update ZERNIKE_INTEGRATION.md with correct RMS formula - Update ASSEMBLY_FEM_WORKFLOW.md with expression import and node merge details - Add reference scripts from original zernike_Post_Script_NX.py 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
@@ -1 +1,25 @@
|
||||
"""Core extractor library for Atomizer."""
|
||||
"""Core extractor library for Atomizer.
|
||||
|
||||
Available extractors:
|
||||
- Displacement: extract_displacement
|
||||
- Stress: extract_solid_stress (von Mises)
|
||||
- Frequency: extract_frequency
|
||||
- Mass: extract_mass_from_expression, extract_mass_from_op2
|
||||
- Zernike: extract_zernike_from_op2, ZernikeExtractor (telescope mirrors)
|
||||
"""
|
||||
|
||||
# Zernike extractor for telescope mirror optimization
|
||||
from optimization_engine.extractors.extract_zernike import (
|
||||
ZernikeExtractor,
|
||||
extract_zernike_from_op2,
|
||||
extract_zernike_filtered_rms,
|
||||
extract_zernike_relative_rms,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
# Zernike (telescope mirrors)
|
||||
'ZernikeExtractor',
|
||||
'extract_zernike_from_op2',
|
||||
'extract_zernike_filtered_rms',
|
||||
'extract_zernike_relative_rms',
|
||||
]
|
||||
|
||||
860
optimization_engine/extractors/extract_zernike.py
Normal file
860
optimization_engine/extractors/extract_zernike.py
Normal file
@@ -0,0 +1,860 @@
|
||||
"""
|
||||
Zernike Coefficient Extractor for Telescope Mirror Optimization
|
||||
================================================================
|
||||
|
||||
Extracts Zernike polynomial coefficients from OP2 displacement results
|
||||
for optical surface quality analysis. Designed for telescope mirror
|
||||
optimization where wavefront error (WFE) metrics are critical.
|
||||
|
||||
Key Features:
|
||||
- Noll-indexed Zernike polynomials (standard optical convention)
|
||||
- Multi-subcase support (different gravity orientations: 20, 40, 60, 90 deg)
|
||||
- Global and filtered RMS wavefront error
|
||||
- Individual aberration magnitudes (astigmatism, coma, trefoil, spherical)
|
||||
- Relative metrics between subcases (e.g., operational vs polishing orientation)
|
||||
|
||||
Usage:
|
||||
from optimization_engine.extractors.extract_zernike import (
|
||||
extract_zernike_from_op2,
|
||||
ZernikeExtractor
|
||||
)
|
||||
|
||||
# Simple usage - get filtered RMS for optimization objective
|
||||
result = extract_zernike_from_op2(op2_file, subcase=20)
|
||||
rms_filtered = result['filtered_rms_nm']
|
||||
|
||||
# Full extractor for detailed analysis
|
||||
extractor = ZernikeExtractor(op2_file, bdf_file)
|
||||
metrics = extractor.extract_all_subcases()
|
||||
|
||||
Author: Atomizer Framework (adapted from telescope mirror analysis scripts)
|
||||
"""
|
||||
|
||||
from pathlib import Path
|
||||
from typing import Dict, Any, Optional, List, Tuple, Union
|
||||
import numpy as np
|
||||
from math import factorial
|
||||
from numpy.linalg import LinAlgError
|
||||
|
||||
try:
|
||||
from pyNastran.op2.op2 import OP2
|
||||
from pyNastran.bdf.bdf import BDF
|
||||
except ImportError:
|
||||
raise ImportError("pyNastran is required. Install with: pip install pyNastran")
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Configuration
|
||||
# ============================================================================
|
||||
|
||||
DEFAULT_N_MODES = 50 # Number of Zernike modes to fit
|
||||
DEFAULT_FILTER_ORDERS = 4 # Filter first N modes (piston, tip, tilt, defocus)
|
||||
DEFAULT_CHUNK_SIZE = 100000 # For memory-efficient processing of large meshes
|
||||
|
||||
# Standard telescope orientations (gravity angles in degrees)
|
||||
STANDARD_SUBCASES = [20, 40, 60, 90]
|
||||
|
||||
# Displacement unit conversions (to nanometers for WFE)
|
||||
UNIT_TO_NM = {
|
||||
'mm': 1e6, # 1 mm = 1e6 nm
|
||||
'm': 1e9, # 1 m = 1e9 nm
|
||||
'um': 1e3, # 1 um = 1e3 nm
|
||||
'nm': 1.0, # already nm
|
||||
}
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Zernike Polynomial Mathematics
|
||||
# ============================================================================
|
||||
|
||||
def noll_indices(j: int) -> Tuple[int, int]:
|
||||
"""
|
||||
Convert Noll index j to radial order n and azimuthal frequency m.
|
||||
|
||||
The Noll indexing scheme is the standard convention in optics.
|
||||
j=1: Piston, j=2,3: Tip/Tilt, j=4: Defocus, j=5,6: Astigmatism, etc.
|
||||
|
||||
Args:
|
||||
j: Noll index (1-based)
|
||||
|
||||
Returns:
|
||||
(n, m): Radial order and azimuthal frequency
|
||||
"""
|
||||
if j < 1:
|
||||
raise ValueError("Noll index j must be >= 1")
|
||||
|
||||
count = 0
|
||||
n = 0
|
||||
while True:
|
||||
if n == 0:
|
||||
ms = [0]
|
||||
elif n % 2 == 0:
|
||||
ms = [0] + [m for k in range(1, n//2 + 1) for m in (-2*k, 2*k)]
|
||||
else:
|
||||
ms = [m for k in range(0, (n+1)//2) for m in (-(2*k+1), (2*k+1))]
|
||||
for m in ms:
|
||||
count += 1
|
||||
if count == j:
|
||||
return n, m
|
||||
n += 1
|
||||
|
||||
|
||||
def zernike_radial(n: int, m: int, r: np.ndarray) -> np.ndarray:
|
||||
"""
|
||||
Compute the radial component R_n^m(r) of the Zernike polynomial.
|
||||
|
||||
Args:
|
||||
n: Radial order
|
||||
m: Azimuthal frequency (absolute value used)
|
||||
r: Radial coordinates (normalized to unit disk)
|
||||
|
||||
Returns:
|
||||
Radial polynomial evaluated at r
|
||||
"""
|
||||
R = np.zeros_like(r)
|
||||
m_abs = abs(m)
|
||||
|
||||
for s in range((n - m_abs) // 2 + 1):
|
||||
coef = ((-1)**s * factorial(n - s) /
|
||||
(factorial(s) *
|
||||
factorial((n + m_abs) // 2 - s) *
|
||||
factorial((n - m_abs) // 2 - s)))
|
||||
R += coef * r**(n - 2*s)
|
||||
|
||||
return R
|
||||
|
||||
|
||||
def zernike_noll(j: int, r: np.ndarray, theta: np.ndarray) -> np.ndarray:
|
||||
"""
|
||||
Evaluate Noll-indexed Zernike polynomial Z_j(r, theta).
|
||||
|
||||
Args:
|
||||
j: Noll index
|
||||
r: Radial coordinates (normalized to unit disk)
|
||||
theta: Angular coordinates (radians)
|
||||
|
||||
Returns:
|
||||
Zernike polynomial values at (r, theta)
|
||||
"""
|
||||
n, m = noll_indices(j)
|
||||
R = zernike_radial(n, m, r)
|
||||
|
||||
if m == 0:
|
||||
return R
|
||||
elif m > 0:
|
||||
return R * np.cos(m * theta)
|
||||
else:
|
||||
return R * np.sin(-m * theta)
|
||||
|
||||
|
||||
def zernike_name(j: int) -> str:
|
||||
"""
|
||||
Get common optical name for Zernike mode.
|
||||
|
||||
Args:
|
||||
j: Noll index
|
||||
|
||||
Returns:
|
||||
Human-readable name (e.g., "Defocus", "Astigmatism 0 deg")
|
||||
"""
|
||||
n, m = noll_indices(j)
|
||||
|
||||
names = {
|
||||
(0, 0): "Piston",
|
||||
(1, -1): "Tilt X",
|
||||
(1, 1): "Tilt Y",
|
||||
(2, 0): "Defocus",
|
||||
(2, -2): "Astigmatism 45 deg",
|
||||
(2, 2): "Astigmatism 0 deg",
|
||||
(3, -1): "Coma X",
|
||||
(3, 1): "Coma Y",
|
||||
(3, -3): "Trefoil X",
|
||||
(3, 3): "Trefoil Y",
|
||||
(4, 0): "Primary Spherical",
|
||||
(4, -2): "Secondary Astig X",
|
||||
(4, 2): "Secondary Astig Y",
|
||||
(4, -4): "Quadrafoil X",
|
||||
(4, 4): "Quadrafoil Y",
|
||||
(5, -1): "Secondary Coma X",
|
||||
(5, 1): "Secondary Coma Y",
|
||||
(5, -3): "Secondary Trefoil X",
|
||||
(5, 3): "Secondary Trefoil Y",
|
||||
(5, -5): "Pentafoil X",
|
||||
(5, 5): "Pentafoil Y",
|
||||
(6, 0): "Secondary Spherical",
|
||||
}
|
||||
|
||||
return names.get((n, m), f"Z(n={n}, m={m})")
|
||||
|
||||
|
||||
def zernike_label(j: int) -> str:
|
||||
"""Full label for Zernike mode: J{j} - Name (n=, m=)"""
|
||||
n, m = noll_indices(j)
|
||||
return f"J{j:02d} - {zernike_name(j)} (n={n}, m={m})"
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Zernike Coefficient Fitting
|
||||
# ============================================================================
|
||||
|
||||
def compute_zernike_coefficients(
|
||||
x: np.ndarray,
|
||||
y: np.ndarray,
|
||||
values: np.ndarray,
|
||||
n_modes: int = DEFAULT_N_MODES,
|
||||
chunk_size: int = DEFAULT_CHUNK_SIZE
|
||||
) -> Tuple[np.ndarray, float]:
|
||||
"""
|
||||
Fit Zernike coefficients to surface data using least-squares.
|
||||
|
||||
Uses chunked processing for memory efficiency with large meshes.
|
||||
Points outside the unit disk (after centering/normalization) are excluded.
|
||||
|
||||
Args:
|
||||
x, y: Node coordinates (will be centered and normalized)
|
||||
values: Surface values at each node (e.g., WFE in nm)
|
||||
n_modes: Number of Zernike modes to fit
|
||||
chunk_size: Chunk size for memory-efficient processing
|
||||
|
||||
Returns:
|
||||
(coefficients, R_max): Zernike coefficients and normalization radius
|
||||
"""
|
||||
# Center coordinates
|
||||
x_centered = x - np.mean(x)
|
||||
y_centered = y - np.mean(y)
|
||||
|
||||
# Normalize to unit disk
|
||||
R_max = float(np.max(np.hypot(x_centered, y_centered)))
|
||||
r = np.hypot(x_centered / R_max, y_centered / R_max).astype(np.float32)
|
||||
theta = np.arctan2(y_centered, x_centered).astype(np.float32)
|
||||
|
||||
# Mask: inside unit disk and valid values
|
||||
mask = (r <= 1.0) & ~np.isnan(values)
|
||||
if not np.any(mask):
|
||||
raise RuntimeError("No valid points inside unit disk for Zernike fitting.")
|
||||
|
||||
idx = np.nonzero(mask)[0]
|
||||
m = int(n_modes)
|
||||
|
||||
# Normal equations: (Z^T Z) c = Z^T v
|
||||
# Build incrementally for memory efficiency
|
||||
G = np.zeros((m, m), dtype=np.float64) # Z^T Z
|
||||
h = np.zeros((m,), dtype=np.float64) # Z^T v
|
||||
v = values.astype(np.float64)
|
||||
|
||||
for start in range(0, len(idx), chunk_size):
|
||||
chunk_idx = idx[start:start + chunk_size]
|
||||
r_chunk = r[chunk_idx]
|
||||
theta_chunk = theta[chunk_idx]
|
||||
v_chunk = v[chunk_idx]
|
||||
|
||||
# Build Zernike basis for this chunk
|
||||
Z_chunk = np.column_stack([
|
||||
zernike_noll(j, r_chunk, theta_chunk).astype(np.float32)
|
||||
for j in range(1, m + 1)
|
||||
])
|
||||
|
||||
# Accumulate normal equations
|
||||
G += (Z_chunk.T @ Z_chunk).astype(np.float64)
|
||||
h += (Z_chunk.T @ v_chunk).astype(np.float64)
|
||||
|
||||
# Solve normal equations
|
||||
try:
|
||||
coeffs = np.linalg.solve(G, h)
|
||||
except LinAlgError:
|
||||
coeffs = np.linalg.lstsq(G, h, rcond=None)[0]
|
||||
|
||||
return coeffs, R_max
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# RMS Calculations
|
||||
# ============================================================================
|
||||
|
||||
def compute_rms_metrics(
|
||||
x: np.ndarray,
|
||||
y: np.ndarray,
|
||||
wfe: np.ndarray,
|
||||
n_modes: int = DEFAULT_N_MODES,
|
||||
filter_orders: int = DEFAULT_FILTER_ORDERS
|
||||
) -> Dict[str, float]:
|
||||
"""
|
||||
Compute global and filtered RMS wavefront error.
|
||||
|
||||
Args:
|
||||
x, y: Node coordinates
|
||||
wfe: Wavefront error values (nm)
|
||||
n_modes: Number of Zernike modes to fit
|
||||
filter_orders: Number of low-order modes to filter (typically 4)
|
||||
|
||||
Returns:
|
||||
Dict with 'global_rms_nm' and 'filtered_rms_nm'
|
||||
"""
|
||||
coeffs, R_max = compute_zernike_coefficients(x, y, wfe, n_modes)
|
||||
|
||||
# Reconstruct filtered WFE (remove low-order modes)
|
||||
x_c = x - np.mean(x)
|
||||
y_c = y - np.mean(y)
|
||||
r = np.hypot(x_c / R_max, y_c / R_max)
|
||||
theta = np.arctan2(y_c, x_c)
|
||||
|
||||
# Build Zernike basis for low-order modes only
|
||||
Z_low = np.column_stack([
|
||||
zernike_noll(j, r, theta) for j in range(1, filter_orders + 1)
|
||||
])
|
||||
|
||||
# Subtract low-order contribution
|
||||
wfe_filtered = wfe - Z_low @ coeffs[:filter_orders]
|
||||
|
||||
global_rms = float(np.sqrt(np.mean(wfe**2)))
|
||||
filtered_rms = float(np.sqrt(np.mean(wfe_filtered**2)))
|
||||
|
||||
return {
|
||||
'global_rms_nm': global_rms,
|
||||
'filtered_rms_nm': filtered_rms,
|
||||
'coefficients': coeffs,
|
||||
'R_max': R_max
|
||||
}
|
||||
|
||||
|
||||
def compute_aberration_magnitudes(coeffs: np.ndarray) -> Dict[str, float]:
|
||||
"""
|
||||
Compute RMS magnitudes of common optical aberrations.
|
||||
|
||||
Args:
|
||||
coeffs: Zernike coefficients (at least 11 modes)
|
||||
|
||||
Returns:
|
||||
Dict with aberration RMS values in nm
|
||||
"""
|
||||
if len(coeffs) < 11:
|
||||
raise ValueError("Need at least 11 Zernike modes for aberration analysis")
|
||||
|
||||
return {
|
||||
'defocus_nm': float(abs(coeffs[3])), # J4
|
||||
'astigmatism_rms_nm': float(np.sqrt(coeffs[4]**2 + coeffs[5]**2)), # J5+J6
|
||||
'coma_rms_nm': float(np.sqrt(coeffs[6]**2 + coeffs[7]**2)), # J7+J8
|
||||
'trefoil_rms_nm': float(np.sqrt(coeffs[8]**2 + coeffs[9]**2)), # J9+J10
|
||||
'spherical_nm': float(abs(coeffs[10])), # J11
|
||||
}
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# OP2/BDF Data Extraction
|
||||
# ============================================================================
|
||||
|
||||
def read_node_geometry(bdf_path: Path) -> Dict[int, np.ndarray]:
|
||||
"""
|
||||
Read node coordinates from BDF/DAT file.
|
||||
|
||||
Args:
|
||||
bdf_path: Path to .bdf or .dat file
|
||||
|
||||
Returns:
|
||||
Dict mapping node ID to [x, y, z] coordinates
|
||||
"""
|
||||
bdf = BDF()
|
||||
bdf.read_bdf(str(bdf_path))
|
||||
|
||||
return {
|
||||
int(nid): node.get_position()
|
||||
for nid, node in bdf.nodes.items()
|
||||
}
|
||||
|
||||
|
||||
def find_geometry_file(op2_path: Path) -> Path:
|
||||
"""
|
||||
Find matching BDF/DAT file for an OP2.
|
||||
|
||||
Looks for same-basename first, then any .dat/.bdf in same folder.
|
||||
|
||||
Args:
|
||||
op2_path: Path to OP2 file
|
||||
|
||||
Returns:
|
||||
Path to geometry file
|
||||
"""
|
||||
folder = op2_path.parent
|
||||
base = op2_path.stem
|
||||
|
||||
# Try same basename
|
||||
for ext in ['.dat', '.bdf']:
|
||||
cand = folder / (base + ext)
|
||||
if cand.exists():
|
||||
return cand
|
||||
|
||||
# Try any geometry file
|
||||
for name in folder.iterdir():
|
||||
if name.suffix.lower() in ['.dat', '.bdf']:
|
||||
return name
|
||||
|
||||
raise FileNotFoundError(f"No .dat or .bdf geometry file found for {op2_path}")
|
||||
|
||||
|
||||
def extract_displacements_by_subcase(
|
||||
op2_path: Path,
|
||||
required_subcases: Optional[List[int]] = None
|
||||
) -> Dict[str, Dict[str, np.ndarray]]:
|
||||
"""
|
||||
Extract displacement data from OP2 organized by subcase.
|
||||
|
||||
Args:
|
||||
op2_path: Path to OP2 file
|
||||
required_subcases: List of required subcases (e.g., [20, 40, 60, 90])
|
||||
|
||||
Returns:
|
||||
Dict keyed by subcase label: {'20': {'node_ids': array, 'disp': array}, ...}
|
||||
"""
|
||||
op2 = OP2()
|
||||
op2.read_op2(str(op2_path))
|
||||
|
||||
if not op2.displacements:
|
||||
raise RuntimeError("No displacement data found in OP2 file")
|
||||
|
||||
result = {}
|
||||
|
||||
for key, darr in op2.displacements.items():
|
||||
data = darr.data
|
||||
dmat = data[0] if data.ndim == 3 else (data if data.ndim == 2 else None)
|
||||
if dmat is None:
|
||||
continue
|
||||
|
||||
ngt = darr.node_gridtype.astype(int)
|
||||
node_ids = ngt if ngt.ndim == 1 else ngt[:, 0]
|
||||
|
||||
# Try to identify subcase from subtitle or isubcase
|
||||
subtitle = getattr(darr, 'subtitle', None)
|
||||
isubcase = getattr(darr, 'isubcase', None)
|
||||
|
||||
# Extract numeric from subtitle
|
||||
label = None
|
||||
if isinstance(subtitle, str):
|
||||
import re
|
||||
m = re.search(r'-?\d+', subtitle)
|
||||
if m:
|
||||
label = m.group(0)
|
||||
|
||||
if label is None and isinstance(isubcase, int):
|
||||
label = str(isubcase)
|
||||
|
||||
if label:
|
||||
result[label] = {
|
||||
'node_ids': node_ids.astype(int),
|
||||
'disp': dmat.copy()
|
||||
}
|
||||
|
||||
# Validate required subcases if specified
|
||||
if required_subcases:
|
||||
missing = [str(s) for s in required_subcases if str(s) not in result]
|
||||
if missing:
|
||||
available = list(result.keys())
|
||||
raise RuntimeError(
|
||||
f"Required subcases {missing} not found. Available: {available}"
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Main Extractor Class
|
||||
# ============================================================================
|
||||
|
||||
class ZernikeExtractor:
|
||||
"""
|
||||
Complete Zernike analysis extractor for telescope mirror optimization.
|
||||
|
||||
This class handles:
|
||||
- Loading OP2 displacement results
|
||||
- Matching with BDF geometry
|
||||
- Computing Zernike coefficients and RMS metrics
|
||||
- Multi-subcase analysis (different gravity orientations)
|
||||
- Relative metrics between subcases
|
||||
|
||||
Example usage in optimization:
|
||||
extractor = ZernikeExtractor(op2_file, bdf_file)
|
||||
|
||||
# For single-objective optimization (minimize filtered RMS at 20 deg)
|
||||
result = extractor.extract_subcase('20')
|
||||
objective = result['filtered_rms_nm']
|
||||
|
||||
# For multi-subcase optimization
|
||||
all_results = extractor.extract_all_subcases()
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
op2_path: Union[str, Path],
|
||||
bdf_path: Optional[Union[str, Path]] = None,
|
||||
displacement_unit: str = 'mm',
|
||||
n_modes: int = DEFAULT_N_MODES,
|
||||
filter_orders: int = DEFAULT_FILTER_ORDERS
|
||||
):
|
||||
"""
|
||||
Initialize Zernike extractor.
|
||||
|
||||
Args:
|
||||
op2_path: Path to OP2 results file
|
||||
bdf_path: Path to BDF/DAT geometry file (auto-detected if None)
|
||||
displacement_unit: Unit of displacement in OP2 ('mm', 'm', 'um', 'nm')
|
||||
n_modes: Number of Zernike modes to fit
|
||||
filter_orders: Number of low-order modes to filter
|
||||
"""
|
||||
self.op2_path = Path(op2_path)
|
||||
self.bdf_path = Path(bdf_path) if bdf_path else find_geometry_file(self.op2_path)
|
||||
self.displacement_unit = displacement_unit
|
||||
self.n_modes = n_modes
|
||||
self.filter_orders = filter_orders
|
||||
|
||||
# Unit conversion factor (displacement to nm)
|
||||
self.nm_scale = UNIT_TO_NM.get(displacement_unit.lower(), 1e6)
|
||||
|
||||
# WFE = 2 * surface displacement (optical convention)
|
||||
self.wfe_factor = 2.0 * self.nm_scale
|
||||
|
||||
# Lazy-loaded data
|
||||
self._node_geo = None
|
||||
self._displacements = None
|
||||
|
||||
@property
|
||||
def node_geometry(self) -> Dict[int, np.ndarray]:
|
||||
"""Lazy-load node geometry from BDF."""
|
||||
if self._node_geo is None:
|
||||
self._node_geo = read_node_geometry(self.bdf_path)
|
||||
return self._node_geo
|
||||
|
||||
@property
|
||||
def displacements(self) -> Dict[str, Dict[str, np.ndarray]]:
|
||||
"""Lazy-load displacements from OP2."""
|
||||
if self._displacements is None:
|
||||
self._displacements = extract_displacements_by_subcase(self.op2_path)
|
||||
return self._displacements
|
||||
|
||||
def _build_dataframe(
|
||||
self,
|
||||
subcase_label: str
|
||||
) -> Tuple[np.ndarray, np.ndarray, np.ndarray]:
|
||||
"""
|
||||
Build coordinate and WFE arrays for a subcase.
|
||||
|
||||
Returns:
|
||||
(X, Y, WFE_nm): Arrays of coordinates and wavefront error
|
||||
"""
|
||||
if subcase_label not in self.displacements:
|
||||
available = list(self.displacements.keys())
|
||||
raise ValueError(f"Subcase '{subcase_label}' not found. Available: {available}")
|
||||
|
||||
data = self.displacements[subcase_label]
|
||||
node_ids = data['node_ids']
|
||||
disp = data['disp']
|
||||
|
||||
# Build arrays
|
||||
X, Y, WFE = [], [], []
|
||||
for nid, vec in zip(node_ids, disp):
|
||||
geo = self.node_geometry.get(int(nid))
|
||||
if geo is None:
|
||||
continue
|
||||
|
||||
X.append(geo[0])
|
||||
Y.append(geo[1])
|
||||
# Z-displacement to WFE (nm)
|
||||
wfe = vec[2] * self.wfe_factor
|
||||
WFE.append(wfe)
|
||||
|
||||
return np.array(X), np.array(Y), np.array(WFE)
|
||||
|
||||
def extract_subcase(
|
||||
self,
|
||||
subcase_label: str,
|
||||
include_coefficients: bool = False
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Extract Zernike metrics for a single subcase.
|
||||
|
||||
Args:
|
||||
subcase_label: Subcase identifier (e.g., '20', '90')
|
||||
include_coefficients: Whether to include all Zernike coefficients
|
||||
|
||||
Returns:
|
||||
Dict with RMS metrics, aberrations, and optionally coefficients
|
||||
"""
|
||||
X, Y, WFE = self._build_dataframe(subcase_label)
|
||||
|
||||
# Compute RMS metrics
|
||||
rms_result = compute_rms_metrics(
|
||||
X, Y, WFE, self.n_modes, self.filter_orders
|
||||
)
|
||||
|
||||
# Compute aberration magnitudes
|
||||
aberrations = compute_aberration_magnitudes(rms_result['coefficients'])
|
||||
|
||||
result = {
|
||||
'subcase': subcase_label,
|
||||
'global_rms_nm': rms_result['global_rms_nm'],
|
||||
'filtered_rms_nm': rms_result['filtered_rms_nm'],
|
||||
'n_nodes': len(X),
|
||||
**aberrations
|
||||
}
|
||||
|
||||
if include_coefficients:
|
||||
result['coefficients'] = rms_result['coefficients'].tolist()
|
||||
result['coefficient_labels'] = [zernike_label(j) for j in range(1, self.n_modes + 1)]
|
||||
|
||||
return result
|
||||
|
||||
def extract_relative(
|
||||
self,
|
||||
target_subcase: str,
|
||||
reference_subcase: str
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Extract Zernike metrics relative to a reference subcase.
|
||||
|
||||
Computes: WFE_relative = WFE_target - WFE_reference
|
||||
|
||||
Args:
|
||||
target_subcase: Subcase to analyze
|
||||
reference_subcase: Reference subcase to subtract
|
||||
|
||||
Returns:
|
||||
Dict with relative RMS metrics and aberrations
|
||||
"""
|
||||
X_t, Y_t, WFE_t = self._build_dataframe(target_subcase)
|
||||
X_r, Y_r, WFE_r = self._build_dataframe(reference_subcase)
|
||||
|
||||
# Build node-to-index mapping for reference
|
||||
target_data = self.displacements[target_subcase]
|
||||
ref_data = self.displacements[reference_subcase]
|
||||
|
||||
ref_node_to_idx = {
|
||||
int(nid): i for i, nid in enumerate(ref_data['node_ids'])
|
||||
}
|
||||
|
||||
# Compute relative WFE for common nodes
|
||||
X_rel, Y_rel, WFE_rel = [], [], []
|
||||
|
||||
for i, nid in enumerate(target_data['node_ids']):
|
||||
nid = int(nid)
|
||||
if nid not in ref_node_to_idx:
|
||||
continue
|
||||
|
||||
ref_idx = ref_node_to_idx[nid]
|
||||
geo = self.node_geometry.get(nid)
|
||||
if geo is None:
|
||||
continue
|
||||
|
||||
X_rel.append(geo[0])
|
||||
Y_rel.append(geo[1])
|
||||
|
||||
target_wfe = target_data['disp'][i, 2] * self.wfe_factor
|
||||
ref_wfe = ref_data['disp'][ref_idx, 2] * self.wfe_factor
|
||||
WFE_rel.append(target_wfe - ref_wfe)
|
||||
|
||||
X_rel = np.array(X_rel)
|
||||
Y_rel = np.array(Y_rel)
|
||||
WFE_rel = np.array(WFE_rel)
|
||||
|
||||
# Compute metrics on relative WFE
|
||||
rms_result = compute_rms_metrics(
|
||||
X_rel, Y_rel, WFE_rel, self.n_modes, self.filter_orders
|
||||
)
|
||||
aberrations = compute_aberration_magnitudes(rms_result['coefficients'])
|
||||
|
||||
return {
|
||||
'target_subcase': target_subcase,
|
||||
'reference_subcase': reference_subcase,
|
||||
'relative_global_rms_nm': rms_result['global_rms_nm'],
|
||||
'relative_filtered_rms_nm': rms_result['filtered_rms_nm'],
|
||||
'n_common_nodes': len(X_rel),
|
||||
**{f'relative_{k}': v for k, v in aberrations.items()}
|
||||
}
|
||||
|
||||
def extract_all_subcases(
|
||||
self,
|
||||
reference_subcase: Optional[str] = '20'
|
||||
) -> Dict[str, Dict[str, Any]]:
|
||||
"""
|
||||
Extract metrics for all available subcases.
|
||||
|
||||
Args:
|
||||
reference_subcase: Reference for relative calculations (None to skip)
|
||||
|
||||
Returns:
|
||||
Dict mapping subcase label to metrics dict
|
||||
"""
|
||||
results = {}
|
||||
|
||||
for label in self.displacements.keys():
|
||||
results[label] = self.extract_subcase(label)
|
||||
|
||||
# Add relative metrics if reference specified
|
||||
if reference_subcase and label != reference_subcase:
|
||||
try:
|
||||
rel = self.extract_relative(label, reference_subcase)
|
||||
results[label].update({
|
||||
f'rel_{reference_subcase}_{k}': v
|
||||
for k, v in rel.items()
|
||||
if k.startswith('relative_')
|
||||
})
|
||||
except Exception as e:
|
||||
results[label][f'rel_{reference_subcase}_error'] = str(e)
|
||||
|
||||
return results
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Convenience Functions for Optimization
|
||||
# ============================================================================
|
||||
|
||||
def extract_zernike_from_op2(
|
||||
op2_file: Union[str, Path],
|
||||
bdf_file: Optional[Union[str, Path]] = None,
|
||||
subcase: Union[int, str] = 1,
|
||||
displacement_unit: str = 'mm',
|
||||
n_modes: int = DEFAULT_N_MODES,
|
||||
filter_orders: int = DEFAULT_FILTER_ORDERS
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Convenience function to extract Zernike metrics from OP2.
|
||||
|
||||
This is the main entry point for optimization objectives.
|
||||
|
||||
Args:
|
||||
op2_file: Path to OP2 results file
|
||||
bdf_file: Path to BDF geometry (auto-detected if None)
|
||||
subcase: Subcase identifier
|
||||
displacement_unit: Unit of displacement in OP2
|
||||
n_modes: Number of Zernike modes
|
||||
filter_orders: Low-order modes to filter
|
||||
|
||||
Returns:
|
||||
Dict with:
|
||||
- 'global_rms_nm': Global RMS WFE in nanometers
|
||||
- 'filtered_rms_nm': Filtered RMS (low orders removed)
|
||||
- 'defocus_nm', 'astigmatism_rms_nm', etc.: Individual aberrations
|
||||
"""
|
||||
extractor = ZernikeExtractor(
|
||||
op2_file, bdf_file, displacement_unit, n_modes, filter_orders
|
||||
)
|
||||
return extractor.extract_subcase(str(subcase))
|
||||
|
||||
|
||||
def extract_zernike_filtered_rms(
|
||||
op2_file: Union[str, Path],
|
||||
bdf_file: Optional[Union[str, Path]] = None,
|
||||
subcase: Union[int, str] = 1,
|
||||
**kwargs
|
||||
) -> float:
|
||||
"""
|
||||
Extract filtered RMS WFE - the primary metric for mirror optimization.
|
||||
|
||||
Filtered RMS removes piston, tip, tilt, and defocus (modes 1-4),
|
||||
which can be corrected by alignment and focus adjustment.
|
||||
|
||||
Args:
|
||||
op2_file: Path to OP2 file
|
||||
bdf_file: Path to BDF geometry (auto-detected if None)
|
||||
subcase: Subcase identifier
|
||||
**kwargs: Additional arguments for ZernikeExtractor
|
||||
|
||||
Returns:
|
||||
Filtered RMS WFE in nanometers
|
||||
"""
|
||||
result = extract_zernike_from_op2(op2_file, bdf_file, subcase, **kwargs)
|
||||
return result['filtered_rms_nm']
|
||||
|
||||
|
||||
def extract_zernike_relative_rms(
|
||||
op2_file: Union[str, Path],
|
||||
target_subcase: Union[int, str],
|
||||
reference_subcase: Union[int, str],
|
||||
bdf_file: Optional[Union[str, Path]] = None,
|
||||
**kwargs
|
||||
) -> float:
|
||||
"""
|
||||
Extract relative filtered RMS between two subcases.
|
||||
|
||||
Useful for analyzing gravity-induced deformation relative to
|
||||
a reference orientation (e.g., polishing position).
|
||||
|
||||
Args:
|
||||
op2_file: Path to OP2 file
|
||||
target_subcase: Subcase to analyze
|
||||
reference_subcase: Reference subcase
|
||||
bdf_file: Path to BDF geometry
|
||||
**kwargs: Additional arguments for ZernikeExtractor
|
||||
|
||||
Returns:
|
||||
Relative filtered RMS WFE in nanometers
|
||||
"""
|
||||
extractor = ZernikeExtractor(op2_file, bdf_file, **kwargs)
|
||||
result = extractor.extract_relative(str(target_subcase), str(reference_subcase))
|
||||
return result['relative_filtered_rms_nm']
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Module Exports
|
||||
# ============================================================================
|
||||
|
||||
__all__ = [
|
||||
# Main extractor class
|
||||
'ZernikeExtractor',
|
||||
|
||||
# Convenience functions for optimization
|
||||
'extract_zernike_from_op2',
|
||||
'extract_zernike_filtered_rms',
|
||||
'extract_zernike_relative_rms',
|
||||
|
||||
# Zernike utilities (for advanced use)
|
||||
'compute_zernike_coefficients',
|
||||
'compute_rms_metrics',
|
||||
'compute_aberration_magnitudes',
|
||||
'noll_indices',
|
||||
'zernike_noll',
|
||||
'zernike_name',
|
||||
'zernike_label',
|
||||
]
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
# Example/test usage
|
||||
import sys
|
||||
|
||||
if len(sys.argv) > 1:
|
||||
op2_file = Path(sys.argv[1])
|
||||
|
||||
print(f"Analyzing: {op2_file}")
|
||||
|
||||
try:
|
||||
extractor = ZernikeExtractor(op2_file)
|
||||
|
||||
print(f"\nAvailable subcases: {list(extractor.displacements.keys())}")
|
||||
|
||||
results = extractor.extract_all_subcases()
|
||||
|
||||
for label, metrics in results.items():
|
||||
print(f"\n=== Subcase {label} ===")
|
||||
print(f" Global RMS: {metrics['global_rms_nm']:.2f} nm")
|
||||
print(f" Filtered RMS: {metrics['filtered_rms_nm']:.2f} nm")
|
||||
print(f" Astigmatism: {metrics['astigmatism_rms_nm']:.2f} nm")
|
||||
print(f" Coma: {metrics['coma_rms_nm']:.2f} nm")
|
||||
print(f" Trefoil: {metrics['trefoil_rms_nm']:.2f} nm")
|
||||
print(f" Spherical: {metrics['spherical_nm']:.2f} nm")
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error: {e}")
|
||||
sys.exit(1)
|
||||
else:
|
||||
print("Usage: python extract_zernike.py <op2_file>")
|
||||
print("\nThis module provides Zernike coefficient extraction for telescope mirror optimization.")
|
||||
print("\nExample in optimization config:")
|
||||
print(' "objectives": [')
|
||||
print(' {')
|
||||
print(' "name": "filtered_rms",')
|
||||
print(' "extractor": "zernike",')
|
||||
print(' "direction": "minimize",')
|
||||
print(' "extractor_config": {')
|
||||
print(' "subcase": "20",')
|
||||
print(' "metric": "filtered_rms_nm"')
|
||||
print(' }')
|
||||
print(' }')
|
||||
print(' ]')
|
||||
403
optimization_engine/extractors/zernike_helpers.py
Normal file
403
optimization_engine/extractors/zernike_helpers.py
Normal file
@@ -0,0 +1,403 @@
|
||||
"""
|
||||
Zernike Helper Functions for Atomizer Optimization
|
||||
===================================================
|
||||
|
||||
Convenience wrappers and utilities for using Zernike analysis
|
||||
in optimization studies. These helpers simplify integration with
|
||||
the standard Atomizer optimization patterns.
|
||||
|
||||
Usage in run_optimization.py:
|
||||
from optimization_engine.extractors.zernike_helpers import (
|
||||
create_zernike_objective,
|
||||
ZernikeObjectiveBuilder
|
||||
)
|
||||
|
||||
# Simple: create objective function
|
||||
zernike_obj = create_zernike_objective(
|
||||
op2_finder=lambda: sim_dir / "model-solution_1.op2",
|
||||
subcase="20",
|
||||
metric="filtered_rms_nm"
|
||||
)
|
||||
|
||||
# Use in Optuna trial
|
||||
rms = zernike_obj()
|
||||
"""
|
||||
|
||||
from pathlib import Path
|
||||
from typing import Callable, Dict, Any, Optional, Union, List
|
||||
import logging
|
||||
|
||||
from optimization_engine.extractors.extract_zernike import (
|
||||
ZernikeExtractor,
|
||||
extract_zernike_from_op2,
|
||||
extract_zernike_filtered_rms,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def create_zernike_objective(
|
||||
op2_finder: Callable[[], Path],
|
||||
bdf_finder: Optional[Callable[[], Path]] = None,
|
||||
subcase: Union[int, str] = "20",
|
||||
metric: str = "filtered_rms_nm",
|
||||
displacement_unit: str = "mm",
|
||||
**kwargs
|
||||
) -> Callable[[], float]:
|
||||
"""
|
||||
Create a Zernike objective function for optimization.
|
||||
|
||||
This factory creates a callable that:
|
||||
1. Finds the OP2 file (using op2_finder)
|
||||
2. Extracts Zernike metrics
|
||||
3. Returns the specified metric value
|
||||
|
||||
Args:
|
||||
op2_finder: Callable that returns path to current OP2 file
|
||||
bdf_finder: Callable that returns path to BDF file (auto-detect if None)
|
||||
subcase: Subcase to analyze (e.g., "20" for 20 deg elevation)
|
||||
metric: Metric to return (see available_metrics below)
|
||||
displacement_unit: Unit of displacement in OP2 file
|
||||
**kwargs: Additional arguments for ZernikeExtractor
|
||||
|
||||
Returns:
|
||||
Callable that returns the metric value
|
||||
|
||||
Available metrics:
|
||||
- global_rms_nm: Global RMS wavefront error
|
||||
- filtered_rms_nm: Filtered RMS (low orders removed)
|
||||
- defocus_nm: Defocus aberration
|
||||
- astigmatism_rms_nm: Combined astigmatism
|
||||
- coma_rms_nm: Combined coma
|
||||
- trefoil_rms_nm: Combined trefoil
|
||||
- spherical_nm: Primary spherical aberration
|
||||
|
||||
Example:
|
||||
op2_finder = lambda: Path("sim_dir") / "model-solution_1.op2"
|
||||
objective = create_zernike_objective(op2_finder, subcase="20")
|
||||
|
||||
# In optimization loop
|
||||
rms_value = objective() # Returns filtered RMS in nm
|
||||
"""
|
||||
def evaluate() -> float:
|
||||
op2_path = op2_finder()
|
||||
bdf_path = bdf_finder() if bdf_finder else None
|
||||
|
||||
result = extract_zernike_from_op2(
|
||||
op2_path,
|
||||
bdf_path,
|
||||
subcase=subcase,
|
||||
displacement_unit=displacement_unit,
|
||||
**kwargs
|
||||
)
|
||||
|
||||
if metric not in result:
|
||||
available = [k for k in result.keys() if isinstance(result[k], (int, float))]
|
||||
raise ValueError(f"Metric '{metric}' not found. Available: {available}")
|
||||
|
||||
return result[metric]
|
||||
|
||||
return evaluate
|
||||
|
||||
|
||||
def create_relative_zernike_objective(
|
||||
op2_finder: Callable[[], Path],
|
||||
target_subcase: Union[int, str],
|
||||
reference_subcase: Union[int, str],
|
||||
bdf_finder: Optional[Callable[[], Path]] = None,
|
||||
metric: str = "relative_filtered_rms_nm",
|
||||
**kwargs
|
||||
) -> Callable[[], float]:
|
||||
"""
|
||||
Create objective for relative Zernike metrics between subcases.
|
||||
|
||||
Useful for minimizing gravity-induced deformation relative to
|
||||
a reference orientation (e.g., polishing position at 90 deg).
|
||||
|
||||
Args:
|
||||
op2_finder: Callable returning OP2 path
|
||||
target_subcase: Subcase to analyze
|
||||
reference_subcase: Reference subcase to subtract
|
||||
bdf_finder: Optional BDF path finder
|
||||
metric: Relative metric to return
|
||||
**kwargs: Additional ZernikeExtractor arguments
|
||||
|
||||
Returns:
|
||||
Callable that returns relative metric value
|
||||
"""
|
||||
def evaluate() -> float:
|
||||
op2_path = op2_finder()
|
||||
bdf_path = bdf_finder() if bdf_finder else None
|
||||
|
||||
extractor = ZernikeExtractor(op2_path, bdf_path, **kwargs)
|
||||
result = extractor.extract_relative(
|
||||
str(target_subcase),
|
||||
str(reference_subcase)
|
||||
)
|
||||
|
||||
if metric not in result:
|
||||
available = [k for k in result.keys() if isinstance(result[k], (int, float))]
|
||||
raise ValueError(f"Metric '{metric}' not found. Available: {available}")
|
||||
|
||||
return result[metric]
|
||||
|
||||
return evaluate
|
||||
|
||||
|
||||
class ZernikeObjectiveBuilder:
|
||||
"""
|
||||
Builder for complex Zernike objectives with multiple subcases.
|
||||
|
||||
This is useful for multi-subcase optimization where you want
|
||||
to combine metrics from different gravity orientations.
|
||||
|
||||
Example:
|
||||
builder = ZernikeObjectiveBuilder(
|
||||
op2_finder=lambda: sim_dir / "model.op2"
|
||||
)
|
||||
|
||||
# Add objectives for different subcases
|
||||
builder.add_subcase_objective("20", "filtered_rms_nm", weight=1.0)
|
||||
builder.add_subcase_objective("40", "filtered_rms_nm", weight=0.5)
|
||||
builder.add_subcase_objective("60", "filtered_rms_nm", weight=0.5)
|
||||
|
||||
# Create combined objective
|
||||
objective = builder.build_weighted_sum()
|
||||
combined_rms = objective() # Returns weighted sum
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
op2_finder: Callable[[], Path],
|
||||
bdf_finder: Optional[Callable[[], Path]] = None,
|
||||
displacement_unit: str = "mm",
|
||||
**kwargs
|
||||
):
|
||||
self.op2_finder = op2_finder
|
||||
self.bdf_finder = bdf_finder
|
||||
self.displacement_unit = displacement_unit
|
||||
self.kwargs = kwargs
|
||||
self.objectives: List[Dict[str, Any]] = []
|
||||
self._extractor = None
|
||||
|
||||
def add_subcase_objective(
|
||||
self,
|
||||
subcase: Union[int, str],
|
||||
metric: str = "filtered_rms_nm",
|
||||
weight: float = 1.0,
|
||||
name: Optional[str] = None
|
||||
) -> "ZernikeObjectiveBuilder":
|
||||
"""Add a subcase objective to the builder."""
|
||||
self.objectives.append({
|
||||
"subcase": str(subcase),
|
||||
"metric": metric,
|
||||
"weight": weight,
|
||||
"name": name or f"{metric}_{subcase}"
|
||||
})
|
||||
return self
|
||||
|
||||
def add_relative_objective(
|
||||
self,
|
||||
target_subcase: Union[int, str],
|
||||
reference_subcase: Union[int, str],
|
||||
metric: str = "relative_filtered_rms_nm",
|
||||
weight: float = 1.0,
|
||||
name: Optional[str] = None
|
||||
) -> "ZernikeObjectiveBuilder":
|
||||
"""Add a relative objective between subcases."""
|
||||
self.objectives.append({
|
||||
"target_subcase": str(target_subcase),
|
||||
"reference_subcase": str(reference_subcase),
|
||||
"metric": metric,
|
||||
"weight": weight,
|
||||
"name": name or f"rel_{target_subcase}_vs_{reference_subcase}",
|
||||
"is_relative": True
|
||||
})
|
||||
return self
|
||||
|
||||
def _get_extractor(self) -> ZernikeExtractor:
|
||||
"""Lazy-create extractor (reused for all objectives)."""
|
||||
if self._extractor is None:
|
||||
op2_path = self.op2_finder()
|
||||
bdf_path = self.bdf_finder() if self.bdf_finder else None
|
||||
self._extractor = ZernikeExtractor(
|
||||
op2_path, bdf_path,
|
||||
displacement_unit=self.displacement_unit,
|
||||
**self.kwargs
|
||||
)
|
||||
return self._extractor
|
||||
|
||||
def _reset_extractor(self):
|
||||
"""Reset extractor (call after OP2 changes)."""
|
||||
self._extractor = None
|
||||
|
||||
def evaluate_all(self) -> Dict[str, float]:
|
||||
"""
|
||||
Evaluate all objectives and return dict of values.
|
||||
|
||||
Returns:
|
||||
Dict mapping objective name to value
|
||||
"""
|
||||
self._reset_extractor()
|
||||
extractor = self._get_extractor()
|
||||
results = {}
|
||||
|
||||
for obj in self.objectives:
|
||||
try:
|
||||
if obj.get("is_relative"):
|
||||
rel_result = extractor.extract_relative(
|
||||
obj["target_subcase"],
|
||||
obj["reference_subcase"]
|
||||
)
|
||||
results[obj["name"]] = rel_result.get(obj["metric"], 0.0)
|
||||
else:
|
||||
sub_result = extractor.extract_subcase(obj["subcase"])
|
||||
results[obj["name"]] = sub_result.get(obj["metric"], 0.0)
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to evaluate {obj['name']}: {e}")
|
||||
results[obj["name"]] = float("inf")
|
||||
|
||||
return results
|
||||
|
||||
def build_weighted_sum(self) -> Callable[[], float]:
|
||||
"""
|
||||
Build a callable that returns weighted sum of all objectives.
|
||||
|
||||
Returns:
|
||||
Callable returning combined objective value
|
||||
"""
|
||||
def evaluate() -> float:
|
||||
values = self.evaluate_all()
|
||||
total = 0.0
|
||||
for obj in self.objectives:
|
||||
val = values.get(obj["name"], 0.0)
|
||||
total += obj["weight"] * val
|
||||
return total
|
||||
|
||||
return evaluate
|
||||
|
||||
def build_max(self) -> Callable[[], float]:
|
||||
"""
|
||||
Build a callable that returns maximum of all objectives.
|
||||
|
||||
Useful for worst-case optimization across subcases.
|
||||
"""
|
||||
def evaluate() -> float:
|
||||
values = self.evaluate_all()
|
||||
weighted = [
|
||||
obj["weight"] * values.get(obj["name"], 0.0)
|
||||
for obj in self.objectives
|
||||
]
|
||||
return max(weighted) if weighted else 0.0
|
||||
|
||||
return evaluate
|
||||
|
||||
def build_individual(self) -> Callable[[], Dict[str, float]]:
|
||||
"""
|
||||
Build a callable that returns dict of individual objective values.
|
||||
|
||||
Useful for multi-objective optimization (NSGA-II).
|
||||
"""
|
||||
return self.evaluate_all
|
||||
|
||||
|
||||
def extract_zernike_for_trial(
|
||||
op2_path: Path,
|
||||
bdf_path: Optional[Path] = None,
|
||||
subcases: Optional[List[str]] = None,
|
||||
reference_subcase: str = "20",
|
||||
metrics: Optional[List[str]] = None,
|
||||
**kwargs
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Extract comprehensive Zernike data for a trial.
|
||||
|
||||
This is a high-level function for logging/exporting trial data.
|
||||
It extracts all metrics for specified subcases and computes
|
||||
relative metrics vs the reference.
|
||||
|
||||
Args:
|
||||
op2_path: Path to OP2 file
|
||||
bdf_path: Path to BDF file (auto-detect if None)
|
||||
subcases: List of subcases to extract (None = all available)
|
||||
reference_subcase: Reference for relative calculations
|
||||
metrics: Specific metrics to extract (None = all)
|
||||
**kwargs: Additional ZernikeExtractor arguments
|
||||
|
||||
Returns:
|
||||
Dict with complete trial Zernike data:
|
||||
{
|
||||
'subcases': {
|
||||
'20': {'global_rms_nm': ..., 'filtered_rms_nm': ..., ...},
|
||||
'40': {...},
|
||||
...
|
||||
},
|
||||
'relative': {
|
||||
'40_vs_20': {'relative_filtered_rms_nm': ..., ...},
|
||||
...
|
||||
},
|
||||
'summary': {
|
||||
'best_filtered_rms': ...,
|
||||
'worst_filtered_rms': ...,
|
||||
...
|
||||
}
|
||||
}
|
||||
"""
|
||||
extractor = ZernikeExtractor(op2_path, bdf_path, **kwargs)
|
||||
|
||||
# Get available subcases
|
||||
available = list(extractor.displacements.keys())
|
||||
if subcases:
|
||||
subcases = [s for s in subcases if str(s) in available]
|
||||
else:
|
||||
subcases = available
|
||||
|
||||
# Extract per-subcase data
|
||||
subcase_data = {}
|
||||
for sc in subcases:
|
||||
try:
|
||||
subcase_data[sc] = extractor.extract_subcase(str(sc))
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to extract subcase {sc}: {e}")
|
||||
|
||||
# Extract relative data
|
||||
relative_data = {}
|
||||
if reference_subcase in subcases:
|
||||
for sc in subcases:
|
||||
if sc != reference_subcase:
|
||||
try:
|
||||
key = f"{sc}_vs_{reference_subcase}"
|
||||
relative_data[key] = extractor.extract_relative(
|
||||
str(sc), str(reference_subcase)
|
||||
)
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to extract relative {key}: {e}")
|
||||
|
||||
# Summary statistics
|
||||
filtered_rms_values = [
|
||||
d.get('filtered_rms_nm', float('inf'))
|
||||
for d in subcase_data.values()
|
||||
]
|
||||
|
||||
summary = {
|
||||
'best_filtered_rms': min(filtered_rms_values) if filtered_rms_values else None,
|
||||
'worst_filtered_rms': max(filtered_rms_values) if filtered_rms_values else None,
|
||||
'mean_filtered_rms': sum(filtered_rms_values) / len(filtered_rms_values) if filtered_rms_values else None,
|
||||
'n_subcases': len(subcases),
|
||||
'reference_subcase': reference_subcase,
|
||||
}
|
||||
|
||||
return {
|
||||
'subcases': subcase_data,
|
||||
'relative': relative_data,
|
||||
'summary': summary,
|
||||
}
|
||||
|
||||
|
||||
# Export all helpers
|
||||
__all__ = [
|
||||
'create_zernike_objective',
|
||||
'create_relative_zernike_objective',
|
||||
'ZernikeObjectiveBuilder',
|
||||
'extract_zernike_for_trial',
|
||||
]
|
||||
@@ -285,14 +285,11 @@ sys.argv = ['', {argv_str}] # Set argv for the main function
|
||||
# Set up environment for Simcenter/NX
|
||||
env = os.environ.copy()
|
||||
|
||||
# Set license server (use 29000 for Simcenter)
|
||||
# Override any incorrect license server settings
|
||||
env['SPLM_LICENSE_SERVER'] = '29000@AntoineThinkpad'
|
||||
|
||||
# Force desktop licensing instead of enterprise
|
||||
# User has nx_nas_bn_basic_dsk (desktop) not nx_nas_basic_ent (enterprise)
|
||||
env['NXNA_LICENSE_FILE'] = '29000@AntoineThinkpad'
|
||||
env['NXNASTRAN_LICENSE_FILE'] = '29000@AntoineThinkpad'
|
||||
# Use existing SPLM_LICENSE_SERVER from environment if set
|
||||
# Only set if not already defined (respects user's license configuration)
|
||||
if 'SPLM_LICENSE_SERVER' not in env or not env['SPLM_LICENSE_SERVER']:
|
||||
env['SPLM_LICENSE_SERVER'] = '29000@localhost'
|
||||
print(f"[NX SOLVER] WARNING: SPLM_LICENSE_SERVER not set, using default: {env['SPLM_LICENSE_SERVER']}")
|
||||
|
||||
# Add NX/Simcenter paths to environment
|
||||
nx_bin = self.nx_install_dir / "NXBIN"
|
||||
|
||||
@@ -1,13 +1,53 @@
|
||||
"""
|
||||
NX Journal Script to Solve Simulation in Batch Mode
|
||||
|
||||
This script opens a .sim file, updates the FEM, and solves it through the NX API.
|
||||
Usage: run_journal.exe solve_simulation.py <sim_file_path>
|
||||
This script handles BOTH single-part simulations AND multi-part assembly FEMs.
|
||||
|
||||
Based on recorded NX journal pattern for solving simulations.
|
||||
=============================================================================
|
||||
MULTI-PART ASSEMBLY FEM WORKFLOW (for .afm-based simulations)
|
||||
=============================================================================
|
||||
|
||||
Based on recorded NX journal from interactive session (Nov 28, 2025).
|
||||
|
||||
The correct workflow for assembly FEM updates:
|
||||
|
||||
1. LOAD PARTS
|
||||
- Open ASSY_M1.prt and M1_Blank_fem1_i.prt to have geometry loaded
|
||||
- Find and switch to M1_Blank part for expression editing
|
||||
|
||||
2. UPDATE EXPRESSIONS
|
||||
- Switch to modeling application
|
||||
- Edit expressions with units
|
||||
- Call MakeUpToDate() on modified expressions
|
||||
- Call DoUpdate() to rebuild geometry
|
||||
|
||||
3. SWITCH TO SIM AND UPDATE FEM COMPONENTS
|
||||
- Open the .sim file
|
||||
- Navigate component hierarchy via RootComponent.FindObject()
|
||||
- For each component FEM:
|
||||
- SetWorkComponent() to make it the work part
|
||||
- FindObject("FEModel").UpdateFemodel()
|
||||
|
||||
4. MERGE DUPLICATE NODES (critical for assembly FEM!)
|
||||
- Switch to assembly FEM component
|
||||
- CreateDuplicateNodesCheckBuilder()
|
||||
- Set MergeOccurrenceNodes = True
|
||||
- IdentifyDuplicateNodes() then MergeDuplicateNodes()
|
||||
|
||||
5. RESOLVE LABEL CONFLICTS
|
||||
- CreateAssemblyLabelManagerBuilder()
|
||||
- SetFEModelOccOffsets() for each occurrence
|
||||
- Commit()
|
||||
|
||||
6. SOLVE
|
||||
- SetWorkComponent(Null) to return to sim level
|
||||
- SolveChainOfSolutions()
|
||||
|
||||
=============================================================================
|
||||
"""
|
||||
|
||||
import sys
|
||||
import os
|
||||
import NXOpen
|
||||
import NXOpen.Assemblies
|
||||
import NXOpen.CAE
|
||||
@@ -15,341 +55,510 @@ import NXOpen.CAE
|
||||
|
||||
def main(args):
|
||||
"""
|
||||
Open and solve a simulation file with updated expression values.
|
||||
Main entry point for NX journal.
|
||||
|
||||
Args:
|
||||
args: Command line arguments
|
||||
args[0]: .sim file path
|
||||
args[1]: solution_name (optional, e.g., "Solution_Normal_Modes" or None for default)
|
||||
args[1]: solution_name (optional, or "None" for default)
|
||||
args[2+]: expression updates as "name=value" pairs
|
||||
"""
|
||||
if len(args) < 1:
|
||||
print("ERROR: No .sim file path provided")
|
||||
print("Usage: run_journal.exe solve_simulation.py <sim_file_path> [solution_name] [expr1=val1] [expr2=val2] ...")
|
||||
print("Usage: run_journal.exe solve_simulation.py <sim_file_path> [solution_name] [expr1=val1] ...")
|
||||
return False
|
||||
|
||||
sim_file_path = args[0]
|
||||
|
||||
# Parse solution name if provided (args[1])
|
||||
solution_name = args[1] if len(args) > 1 and args[1] != 'None' else None
|
||||
|
||||
# Extract base name from sim file (e.g., "Beam_sim1.sim" -> "Beam")
|
||||
import os
|
||||
sim_filename = os.path.basename(sim_file_path)
|
||||
part_base_name = sim_filename.split('_sim')[0] if '_sim' in sim_filename else sim_filename.split('.sim')[0]
|
||||
|
||||
# Parse expression updates from args[2+] as "name=value" pairs
|
||||
# Parse expression updates
|
||||
expression_updates = {}
|
||||
for arg in args[2:]:
|
||||
if '=' in arg:
|
||||
name, value = arg.split('=', 1)
|
||||
expression_updates[name] = float(value)
|
||||
|
||||
print(f"[JOURNAL] Opening simulation: {sim_file_path}")
|
||||
print(f"[JOURNAL] Detected part base name: {part_base_name}")
|
||||
if solution_name:
|
||||
print(f"[JOURNAL] Will solve specific solution: {solution_name}")
|
||||
else:
|
||||
print(f"[JOURNAL] Will solve default solution (Solution 1)")
|
||||
if expression_updates:
|
||||
print(f"[JOURNAL] Will update expressions:")
|
||||
for name, value in expression_updates.items():
|
||||
print(f"[JOURNAL] {name} = {value}")
|
||||
# Get working directory
|
||||
working_dir = os.path.dirname(os.path.abspath(sim_file_path))
|
||||
sim_filename = os.path.basename(sim_file_path)
|
||||
|
||||
print(f"[JOURNAL] " + "="*60)
|
||||
print(f"[JOURNAL] NX SIMULATION SOLVER (Assembly FEM Workflow)")
|
||||
print(f"[JOURNAL] " + "="*60)
|
||||
print(f"[JOURNAL] Simulation: {sim_filename}")
|
||||
print(f"[JOURNAL] Working directory: {working_dir}")
|
||||
print(f"[JOURNAL] Solution: {solution_name or 'Solution 1'}")
|
||||
print(f"[JOURNAL] Expression updates: {len(expression_updates)}")
|
||||
for name, value in expression_updates.items():
|
||||
print(f"[JOURNAL] {name} = {value}")
|
||||
|
||||
try:
|
||||
theSession = NXOpen.Session.GetSession()
|
||||
|
||||
# Set load options to load linked parts from directory
|
||||
print("[JOURNAL] Setting load options for linked parts...")
|
||||
import os
|
||||
working_dir = os.path.dirname(os.path.abspath(sim_file_path))
|
||||
|
||||
# Complete load options setup (from recorded journal)
|
||||
# Set load options
|
||||
theSession.Parts.LoadOptions.LoadLatest = False
|
||||
theSession.Parts.LoadOptions.ComponentLoadMethod = NXOpen.LoadOptions.LoadMethod.FromDirectory
|
||||
|
||||
searchDirectories = [working_dir]
|
||||
searchSubDirs = [True]
|
||||
theSession.Parts.LoadOptions.SetSearchDirectories(searchDirectories, searchSubDirs)
|
||||
|
||||
theSession.Parts.LoadOptions.SetSearchDirectories([working_dir], [True])
|
||||
theSession.Parts.LoadOptions.ComponentsToLoad = NXOpen.LoadOptions.LoadComponents.All
|
||||
theSession.Parts.LoadOptions.PartLoadOption = NXOpen.LoadOptions.LoadOption.FullyLoad
|
||||
theSession.Parts.LoadOptions.SetInterpartData(True, NXOpen.LoadOptions.Parent.All)
|
||||
theSession.Parts.LoadOptions.AllowSubstitution = False
|
||||
theSession.Parts.LoadOptions.GenerateMissingPartFamilyMembers = True
|
||||
theSession.Parts.LoadOptions.AbortOnFailure = False
|
||||
|
||||
referenceSets = ["As Saved", "Use Simplified", "Use Model", "Entire Part", "Empty"]
|
||||
theSession.Parts.LoadOptions.SetDefaultReferenceSets(referenceSets)
|
||||
theSession.Parts.LoadOptions.ReferenceSetOverride = False
|
||||
|
||||
print(f"[JOURNAL] Load directory set to: {working_dir}")
|
||||
|
||||
# Close any currently open sim file to force reload from disk
|
||||
print("[JOURNAL] Checking for open parts...")
|
||||
# Close any open parts
|
||||
try:
|
||||
current_work = theSession.Parts.BaseWork
|
||||
if current_work and hasattr(current_work, 'FullPath'):
|
||||
current_path = current_work.FullPath
|
||||
print(f"[JOURNAL] Closing currently open part: {current_path}")
|
||||
# Close without saving (we want to reload from disk)
|
||||
partCloseResponses1 = [NXOpen.BasePart.CloseWholeTree]
|
||||
theSession.Parts.CloseAll(partCloseResponses1)
|
||||
print("[JOURNAL] Parts closed")
|
||||
except Exception as e:
|
||||
print(f"[JOURNAL] No parts to close or error closing: {e}")
|
||||
theSession.Parts.CloseAll([NXOpen.BasePart.CloseWholeTree])
|
||||
except:
|
||||
pass
|
||||
|
||||
# Open the .sim file (now will load fresh from disk with updated .prt files)
|
||||
print(f"[JOURNAL] Opening simulation fresh from disk...")
|
||||
basePart1, partLoadStatus1 = theSession.Parts.OpenActiveDisplay(
|
||||
sim_file_path,
|
||||
NXOpen.DisplayPartOption.AllowAdditional
|
||||
)
|
||||
# Check for assembly FEM files
|
||||
afm_files = [f for f in os.listdir(working_dir) if f.endswith('.afm')]
|
||||
is_assembly = len(afm_files) > 0
|
||||
|
||||
workSimPart = theSession.Parts.BaseWork
|
||||
displaySimPart = theSession.Parts.BaseDisplay
|
||||
|
||||
print(f"[JOURNAL] Simulation opened successfully")
|
||||
partLoadStatus1.Dispose()
|
||||
|
||||
# Switch to simulation application
|
||||
theSession.ApplicationSwitchImmediate("UG_APP_SFEM")
|
||||
|
||||
simPart1 = workSimPart
|
||||
theSession.Post.UpdateUserGroupsFromSimPart(simPart1)
|
||||
|
||||
# STEP 1: Try to switch to part and update expressions (optional for some models)
|
||||
print(f"[JOURNAL] STEP 1: Checking for {part_base_name}.prt geometry...")
|
||||
geometry_updated = False
|
||||
try:
|
||||
# Find the main part (may not exist for embedded geometry models)
|
||||
bracketPart = None
|
||||
try:
|
||||
bracketPart = theSession.Parts.FindObject(part_base_name)
|
||||
except:
|
||||
pass
|
||||
|
||||
if bracketPart:
|
||||
print(f"[JOURNAL] Found {part_base_name} part, updating geometry...")
|
||||
# Make Bracket the active display part
|
||||
status, partLoadStatus = theSession.Parts.SetActiveDisplay(
|
||||
bracketPart,
|
||||
NXOpen.DisplayPartOption.AllowAdditional,
|
||||
NXOpen.PartDisplayPartWorkPartOption.UseLast
|
||||
)
|
||||
partLoadStatus.Dispose()
|
||||
|
||||
workPart = theSession.Parts.Work
|
||||
|
||||
# CRITICAL: Apply expression changes BEFORE updating geometry
|
||||
expressions_updated = []
|
||||
|
||||
# Apply all expression updates dynamically
|
||||
for expr_name, expr_value in expression_updates.items():
|
||||
print(f"[JOURNAL] Applying {expr_name} = {expr_value}")
|
||||
try:
|
||||
expr_obj = workPart.Expressions.FindObject(expr_name)
|
||||
if expr_obj:
|
||||
# Use millimeters as default unit for geometric parameters
|
||||
unit_mm = workPart.UnitCollection.FindObject("MilliMeter")
|
||||
workPart.Expressions.EditExpressionWithUnits(expr_obj, unit_mm, str(expr_value))
|
||||
expressions_updated.append(expr_obj)
|
||||
print(f"[JOURNAL] {expr_name} updated successfully")
|
||||
else:
|
||||
print(f"[JOURNAL] WARNING: {expr_name} expression not found!")
|
||||
except Exception as e:
|
||||
print(f"[JOURNAL] ERROR updating {expr_name}: {e}")
|
||||
|
||||
# Make expressions up to date
|
||||
if expressions_updated:
|
||||
print(f"[JOURNAL] Making {len(expressions_updated)} expression(s) up to date...")
|
||||
for expr in expressions_updated:
|
||||
markId_expr = theSession.SetUndoMark(NXOpen.Session.MarkVisibility.Invisible, "Make Up to Date")
|
||||
objects1 = [expr]
|
||||
theSession.UpdateManager.MakeUpToDate(objects1, markId_expr)
|
||||
theSession.DeleteUndoMark(markId_expr, None)
|
||||
|
||||
# CRITICAL: Update the geometry model - rebuilds features with new expressions
|
||||
print(f"[JOURNAL] Rebuilding geometry with new expression values...")
|
||||
markId_update = theSession.SetUndoMark(NXOpen.Session.MarkVisibility.Invisible, "NX update")
|
||||
nErrs = theSession.UpdateManager.DoUpdate(markId_update)
|
||||
theSession.DeleteUndoMark(markId_update, "NX update")
|
||||
print(f"[JOURNAL] {part_base_name} geometry updated ({nErrs} errors)")
|
||||
|
||||
# Extract mass from expression p173 if it exists and write to temp file
|
||||
try:
|
||||
mass_expr = workPart.Expressions.FindObject("p173")
|
||||
if mass_expr:
|
||||
mass_kg = mass_expr.Value
|
||||
mass_output_file = os.path.join(working_dir, "_temp_mass.txt")
|
||||
with open(mass_output_file, 'w') as f:
|
||||
f.write(str(mass_kg))
|
||||
print(f"[JOURNAL] Mass from p173: {mass_kg:.6f} kg ({mass_kg * 1000:.2f} g)")
|
||||
print(f"[JOURNAL] Mass written to: {mass_output_file}")
|
||||
except:
|
||||
pass # Expression p173 might not exist in all models
|
||||
|
||||
geometry_updated = True
|
||||
else:
|
||||
print(f"[JOURNAL] {part_base_name} part not found - may be embedded in sim file")
|
||||
except Exception as e:
|
||||
print(f"[JOURNAL] Could not update {part_base_name}.prt: {e}")
|
||||
print(f"[JOURNAL] Continuing with sim-only solve...")
|
||||
|
||||
# STEP 2: Try to switch to FEM part and update (optional for some models)
|
||||
fem_part_name = f"{part_base_name}_fem1"
|
||||
print(f"[JOURNAL] STEP 2: Checking for {fem_part_name}.fem...")
|
||||
fem_updated = False
|
||||
try:
|
||||
# Find the FEM part (may not exist or may have different name)
|
||||
femPart1 = None
|
||||
try:
|
||||
femPart1 = theSession.Parts.FindObject(fem_part_name)
|
||||
except:
|
||||
# Try with _i suffix for idealized FEM
|
||||
try:
|
||||
femPart1 = theSession.Parts.FindObject(f"{fem_part_name}_i")
|
||||
except:
|
||||
pass
|
||||
|
||||
if femPart1:
|
||||
print(f"[JOURNAL] Found FEM part, updating...")
|
||||
# Make FEM the active display part
|
||||
status, partLoadStatus = theSession.Parts.SetActiveDisplay(
|
||||
femPart1,
|
||||
NXOpen.DisplayPartOption.AllowAdditional,
|
||||
NXOpen.PartDisplayPartWorkPartOption.SameAsDisplay
|
||||
)
|
||||
partLoadStatus.Dispose()
|
||||
|
||||
workFemPart = theSession.Parts.BaseWork
|
||||
|
||||
# CRITICAL: Update FE Model - regenerates FEM with new geometry
|
||||
print("[JOURNAL] Updating FE Model...")
|
||||
fEModel1 = workFemPart.FindObject("FEModel")
|
||||
if fEModel1:
|
||||
fEModel1.UpdateFemodel()
|
||||
print("[JOURNAL] FE Model updated with new geometry!")
|
||||
fem_updated = True
|
||||
else:
|
||||
print("[JOURNAL] WARNING: Could not find FEModel object")
|
||||
else:
|
||||
print(f"[JOURNAL] FEM part not found - may be embedded in sim file")
|
||||
except Exception as e:
|
||||
print(f"[JOURNAL] Could not update FEM: {e}")
|
||||
print(f"[JOURNAL] Continuing with sim-only solve...")
|
||||
|
||||
# STEP 3: Switch back to sim part
|
||||
print("[JOURNAL] STEP 3: Switching back to sim part...")
|
||||
try:
|
||||
status, partLoadStatus = theSession.Parts.SetActiveDisplay(
|
||||
simPart1,
|
||||
NXOpen.DisplayPartOption.AllowAdditional,
|
||||
NXOpen.PartDisplayPartWorkPartOption.UseLast
|
||||
)
|
||||
partLoadStatus.Dispose()
|
||||
workSimPart = theSession.Parts.BaseWork
|
||||
print("[JOURNAL] Switched back to sim part")
|
||||
except Exception as e:
|
||||
print(f"[JOURNAL] WARNING: Error switching to sim part: {e}")
|
||||
|
||||
# Note: Old output files are deleted by nx_solver.py before calling this journal
|
||||
# This ensures NX performs a fresh solve
|
||||
|
||||
# Solve the simulation
|
||||
print("[JOURNAL] Starting solve...")
|
||||
markId3 = theSession.SetUndoMark(NXOpen.Session.MarkVisibility.Visible, "Start")
|
||||
theSession.SetUndoMarkName(markId3, "Solve Dialog")
|
||||
|
||||
markId5 = theSession.SetUndoMark(NXOpen.Session.MarkVisibility.Invisible, "Solve")
|
||||
|
||||
theCAESimSolveManager = NXOpen.CAE.SimSolveManager.GetSimSolveManager(theSession)
|
||||
|
||||
# Get the simulation object
|
||||
simSimulation1 = workSimPart.FindObject("Simulation")
|
||||
|
||||
# CRITICAL: Disable solution monitor when solving multiple solutions
|
||||
# This prevents NX from opening multiple monitor windows which superpose and cause usability issues
|
||||
if not solution_name:
|
||||
print("[JOURNAL] Disabling solution monitor for all solutions to prevent window pile-up...")
|
||||
try:
|
||||
# Get all solutions in the simulation
|
||||
solutions_disabled = 0
|
||||
solution_num = 1
|
||||
while True:
|
||||
try:
|
||||
solution_obj_name = f"Solution[Solution {solution_num}]"
|
||||
simSolution = simSimulation1.FindObject(solution_obj_name)
|
||||
if simSolution:
|
||||
propertyTable = simSolution.SolverOptionsPropertyTable
|
||||
propertyTable.SetBooleanPropertyValue("solution monitor", False)
|
||||
solutions_disabled += 1
|
||||
solution_num += 1
|
||||
else:
|
||||
break
|
||||
except:
|
||||
break # No more solutions
|
||||
print(f"[JOURNAL] Solution monitor disabled for {solutions_disabled} solution(s)")
|
||||
except Exception as e:
|
||||
print(f"[JOURNAL] WARNING: Could not disable solution monitor: {e}")
|
||||
print(f"[JOURNAL] Continuing with solve anyway...")
|
||||
|
||||
# Get the solution(s) to solve - either specific or all
|
||||
if solution_name:
|
||||
# Solve specific solution in background mode
|
||||
solution_obj_name = f"Solution[{solution_name}]"
|
||||
print(f"[JOURNAL] Looking for solution: {solution_obj_name}")
|
||||
simSolution1 = simSimulation1.FindObject(solution_obj_name)
|
||||
psolutions1 = [simSolution1]
|
||||
|
||||
numsolutionssolved1, numsolutionsfailed1, numsolutionsskipped1 = theCAESimSolveManager.SolveChainOfSolutions(
|
||||
psolutions1,
|
||||
NXOpen.CAE.SimSolution.SolveOption.Solve,
|
||||
NXOpen.CAE.SimSolution.SetupCheckOption.CompleteDeepCheckAndOutputErrors,
|
||||
NXOpen.CAE.SimSolution.SolveMode.Background
|
||||
if is_assembly and expression_updates:
|
||||
print(f"[JOURNAL] ")
|
||||
print(f"[JOURNAL] DETECTED: Multi-part Assembly FEM")
|
||||
print(f"[JOURNAL] Using ASSEMBLY FEM WORKFLOW")
|
||||
print(f"[JOURNAL] ")
|
||||
return solve_assembly_fem_workflow(
|
||||
theSession, sim_file_path, solution_name, expression_updates, working_dir
|
||||
)
|
||||
else:
|
||||
# Solve ALL solutions using SolveAllSolutions API (Foreground mode)
|
||||
# This ensures all solutions (static + modal, etc.) complete before returning
|
||||
print(f"[JOURNAL] Solving all solutions using SolveAllSolutions API (Foreground mode)...")
|
||||
|
||||
numsolutionssolved1, numsolutionsfailed1, numsolutionsskipped1 = theCAESimSolveManager.SolveAllSolutions(
|
||||
NXOpen.CAE.SimSolution.SolveOption.Solve,
|
||||
NXOpen.CAE.SimSolution.SetupCheckOption.CompleteCheckAndOutputErrors,
|
||||
NXOpen.CAE.SimSolution.SolveMode.Foreground,
|
||||
False
|
||||
print(f"[JOURNAL] ")
|
||||
print(f"[JOURNAL] Using SIMPLE WORKFLOW (no expression updates or single-part)")
|
||||
print(f"[JOURNAL] ")
|
||||
return solve_simple_workflow(
|
||||
theSession, sim_file_path, solution_name, expression_updates, working_dir
|
||||
)
|
||||
|
||||
theSession.DeleteUndoMark(markId5, None)
|
||||
theSession.SetUndoMarkName(markId3, "Solve")
|
||||
|
||||
print(f"[JOURNAL] Solve completed!")
|
||||
print(f"[JOURNAL] Solutions solved: {numsolutionssolved1}")
|
||||
print(f"[JOURNAL] Solutions failed: {numsolutionsfailed1}")
|
||||
print(f"[JOURNAL] Solutions skipped: {numsolutionsskipped1}")
|
||||
|
||||
# NOTE: When solution_name=None, we use Foreground mode to ensure all solutions
|
||||
# complete before returning. When solution_name is specified, Background mode is used.
|
||||
|
||||
# Save the simulation to write all output files
|
||||
print("[JOURNAL] Saving simulation to ensure output files are written...")
|
||||
simPart2 = workSimPart
|
||||
partSaveStatus1 = simPart2.Save(
|
||||
NXOpen.BasePart.SaveComponents.TrueValue,
|
||||
NXOpen.BasePart.CloseAfterSave.FalseValue
|
||||
)
|
||||
partSaveStatus1.Dispose()
|
||||
print("[JOURNAL] Save complete!")
|
||||
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
print(f"[JOURNAL] ERROR: {e}")
|
||||
print(f"[JOURNAL] FATAL ERROR: {e}")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
return False
|
||||
|
||||
|
||||
def solve_assembly_fem_workflow(theSession, sim_file_path, solution_name, expression_updates, working_dir):
|
||||
"""
|
||||
Full assembly FEM workflow based on recorded NX journal.
|
||||
|
||||
This is the correct workflow for multi-part assembly FEMs.
|
||||
"""
|
||||
sim_filename = os.path.basename(sim_file_path)
|
||||
|
||||
# ==========================================================================
|
||||
# STEP 1: LOAD REQUIRED PARTS
|
||||
# ==========================================================================
|
||||
print(f"[JOURNAL] STEP 1: Loading required parts...")
|
||||
|
||||
# Load ASSY_M1.prt (to have the geometry assembly available)
|
||||
assy_prt_path = os.path.join(working_dir, "ASSY_M1.prt")
|
||||
if os.path.exists(assy_prt_path):
|
||||
print(f"[JOURNAL] Loading ASSY_M1.prt...")
|
||||
markId1 = theSession.SetUndoMark(NXOpen.Session.MarkVisibility.Visible, "Load Part")
|
||||
part1, partLoadStatus1 = theSession.Parts.Open(assy_prt_path)
|
||||
partLoadStatus1.Dispose()
|
||||
else:
|
||||
print(f"[JOURNAL] WARNING: ASSY_M1.prt not found, continuing anyway...")
|
||||
|
||||
# Load M1_Blank_fem1_i.prt (idealized geometry)
|
||||
idealized_prt_path = os.path.join(working_dir, "M1_Blank_fem1_i.prt")
|
||||
if os.path.exists(idealized_prt_path):
|
||||
print(f"[JOURNAL] Loading M1_Blank_fem1_i.prt...")
|
||||
markId2 = theSession.SetUndoMark(NXOpen.Session.MarkVisibility.Visible, "Load Part")
|
||||
part2, partLoadStatus2 = theSession.Parts.Open(idealized_prt_path)
|
||||
partLoadStatus2.Dispose()
|
||||
|
||||
# ==========================================================================
|
||||
# STEP 2: UPDATE EXPRESSIONS IN M1_BLANK
|
||||
# ==========================================================================
|
||||
print(f"[JOURNAL] STEP 2: Updating expressions in M1_Blank...")
|
||||
|
||||
# Find and switch to M1_Blank part
|
||||
try:
|
||||
part3 = theSession.Parts.FindObject("M1_Blank")
|
||||
markId3 = theSession.SetUndoMark(NXOpen.Session.MarkVisibility.Visible, "Change Displayed Part")
|
||||
status1, partLoadStatus3 = theSession.Parts.SetActiveDisplay(
|
||||
part3,
|
||||
NXOpen.DisplayPartOption.AllowAdditional,
|
||||
NXOpen.PartDisplayPartWorkPartOption.UseLast
|
||||
)
|
||||
partLoadStatus3.Dispose()
|
||||
|
||||
# Switch to modeling application for expression editing
|
||||
theSession.ApplicationSwitchImmediate("UG_APP_MODELING")
|
||||
|
||||
workPart = theSession.Parts.Work
|
||||
|
||||
# Create undo mark for expressions
|
||||
markId4 = theSession.SetUndoMark(NXOpen.Session.MarkVisibility.Visible, "Start")
|
||||
theSession.SetUndoMarkName(markId4, "Expressions Dialog")
|
||||
|
||||
# Write expressions to a temp file and import (more reliable than editing one by one)
|
||||
exp_file_path = os.path.join(working_dir, "_temp_expressions.exp")
|
||||
with open(exp_file_path, 'w') as f:
|
||||
for expr_name, expr_value in expression_updates.items():
|
||||
# Determine unit
|
||||
if 'angle' in expr_name.lower() or 'vertical' in expr_name.lower():
|
||||
unit_str = "Degrees"
|
||||
else:
|
||||
unit_str = "MilliMeter"
|
||||
f.write(f"[{unit_str}]{expr_name}={expr_value}\n")
|
||||
print(f"[JOURNAL] {expr_name} = {expr_value} ({unit_str})")
|
||||
|
||||
print(f"[JOURNAL] Importing expressions from file...")
|
||||
markId_import = theSession.SetUndoMark(NXOpen.Session.MarkVisibility.Visible, "Import Expressions")
|
||||
|
||||
try:
|
||||
expModified, errorMessages = workPart.Expressions.ImportFromFile(
|
||||
exp_file_path,
|
||||
NXOpen.ExpressionCollection.ImportMode.Replace
|
||||
)
|
||||
print(f"[JOURNAL] Expressions imported: {expModified} modified")
|
||||
if errorMessages:
|
||||
print(f"[JOURNAL] Import errors: {errorMessages}")
|
||||
|
||||
# Update geometry after import
|
||||
print(f"[JOURNAL] Rebuilding geometry...")
|
||||
markId_update = theSession.SetUndoMark(NXOpen.Session.MarkVisibility.Invisible, "NX update")
|
||||
nErrs = theSession.UpdateManager.DoUpdate(markId_update)
|
||||
theSession.DeleteUndoMark(markId_update, "NX update")
|
||||
print(f"[JOURNAL] Geometry rebuilt ({nErrs} errors)")
|
||||
|
||||
updated_expressions = list(expression_updates.keys())
|
||||
|
||||
except Exception as e:
|
||||
print(f"[JOURNAL] ERROR importing expressions: {e}")
|
||||
updated_expressions = []
|
||||
|
||||
# Clean up temp file
|
||||
try:
|
||||
os.remove(exp_file_path)
|
||||
except:
|
||||
pass
|
||||
|
||||
theSession.SetUndoMarkName(markId4, "Expressions")
|
||||
|
||||
except Exception as e:
|
||||
print(f"[JOURNAL] ERROR updating expressions: {e}")
|
||||
|
||||
# ==========================================================================
|
||||
# STEP 3: OPEN SIM AND UPDATE COMPONENT FEMs
|
||||
# ==========================================================================
|
||||
print(f"[JOURNAL] STEP 3: Opening sim and updating component FEMs...")
|
||||
|
||||
# Try to find the sim part first (like the recorded journal does)
|
||||
# This ensures we're working with the same loaded sim part context
|
||||
sim_part_name = os.path.splitext(sim_filename)[0] # e.g., "ASSY_M1_assyfem1_sim1"
|
||||
print(f"[JOURNAL] Looking for sim part: {sim_part_name}")
|
||||
|
||||
markId_sim = theSession.SetUndoMark(NXOpen.Session.MarkVisibility.Visible, "Change Displayed Part")
|
||||
|
||||
try:
|
||||
# First try to find it among loaded parts (like recorded journal)
|
||||
simPart1 = theSession.Parts.FindObject(sim_part_name)
|
||||
status_sim, partLoadStatus = theSession.Parts.SetActiveDisplay(
|
||||
simPart1,
|
||||
NXOpen.DisplayPartOption.AllowAdditional,
|
||||
NXOpen.PartDisplayPartWorkPartOption.UseLast
|
||||
)
|
||||
partLoadStatus.Dispose()
|
||||
print(f"[JOURNAL] Found and activated existing sim part")
|
||||
except:
|
||||
# Fallback: Open fresh if not found
|
||||
print(f"[JOURNAL] Sim part not found, opening fresh: {sim_filename}")
|
||||
basePart, partLoadStatus = theSession.Parts.OpenActiveDisplay(
|
||||
sim_file_path,
|
||||
NXOpen.DisplayPartOption.AllowAdditional
|
||||
)
|
||||
partLoadStatus.Dispose()
|
||||
|
||||
workSimPart = theSession.Parts.BaseWork
|
||||
displaySimPart = theSession.Parts.BaseDisplay
|
||||
theSession.ApplicationSwitchImmediate("UG_APP_SFEM")
|
||||
theSession.Post.UpdateUserGroupsFromSimPart(workSimPart)
|
||||
|
||||
# Navigate component hierarchy
|
||||
try:
|
||||
rootComponent = workSimPart.ComponentAssembly.RootComponent
|
||||
component1 = rootComponent.FindObject("COMPONENT ASSY_M1_assyfem1 1")
|
||||
|
||||
# Update M1_Blank_fem1
|
||||
print(f"[JOURNAL] Updating M1_Blank_fem1...")
|
||||
try:
|
||||
component2 = component1.FindObject("COMPONENT M1_Blank_fem1 1")
|
||||
markId_fem1 = theSession.SetUndoMark(NXOpen.Session.MarkVisibility.Visible, "Make Work Part")
|
||||
partLoadStatus5 = theSession.Parts.SetWorkComponent(
|
||||
component2,
|
||||
NXOpen.PartCollection.RefsetOption.Entire,
|
||||
NXOpen.PartCollection.WorkComponentOption.Visible
|
||||
)
|
||||
workFemPart = theSession.Parts.BaseWork
|
||||
partLoadStatus5.Dispose()
|
||||
|
||||
markId_update1 = theSession.SetUndoMark(NXOpen.Session.MarkVisibility.Visible, "Update FE Model")
|
||||
fEModel1 = workFemPart.FindObject("FEModel")
|
||||
fEModel1.UpdateFemodel()
|
||||
print(f"[JOURNAL] M1_Blank_fem1 updated")
|
||||
except Exception as e:
|
||||
print(f"[JOURNAL] WARNING: M1_Blank_fem1: {e}")
|
||||
|
||||
# Update M1_Vertical_Support_Skeleton_fem1
|
||||
print(f"[JOURNAL] Updating M1_Vertical_Support_Skeleton_fem1...")
|
||||
try:
|
||||
component3 = component1.FindObject("COMPONENT M1_Vertical_Support_Skeleton_fem1 3")
|
||||
markId_fem2 = theSession.SetUndoMark(NXOpen.Session.MarkVisibility.Visible, "Make Work Part")
|
||||
partLoadStatus6 = theSession.Parts.SetWorkComponent(
|
||||
component3,
|
||||
NXOpen.PartCollection.RefsetOption.Entire,
|
||||
NXOpen.PartCollection.WorkComponentOption.Visible
|
||||
)
|
||||
workFemPart = theSession.Parts.BaseWork
|
||||
partLoadStatus6.Dispose()
|
||||
|
||||
markId_update2 = theSession.SetUndoMark(NXOpen.Session.MarkVisibility.Visible, "Update FE Model")
|
||||
fEModel2 = workFemPart.FindObject("FEModel")
|
||||
fEModel2.UpdateFemodel()
|
||||
print(f"[JOURNAL] M1_Vertical_Support_Skeleton_fem1 updated")
|
||||
except Exception as e:
|
||||
print(f"[JOURNAL] WARNING: M1_Vertical_Support_Skeleton_fem1: {e}")
|
||||
|
||||
except Exception as e:
|
||||
print(f"[JOURNAL] ERROR navigating component hierarchy: {e}")
|
||||
|
||||
# ==========================================================================
|
||||
# STEP 4: MERGE DUPLICATE NODES
|
||||
# ==========================================================================
|
||||
print(f"[JOURNAL] STEP 4: Merging duplicate nodes...")
|
||||
|
||||
try:
|
||||
# Switch to assembly FEM
|
||||
partLoadStatus8 = theSession.Parts.SetWorkComponent(
|
||||
component1,
|
||||
NXOpen.PartCollection.RefsetOption.Entire,
|
||||
NXOpen.PartCollection.WorkComponentOption.Visible
|
||||
)
|
||||
workAssyFemPart = theSession.Parts.BaseWork
|
||||
displaySimPart = theSession.Parts.BaseDisplay
|
||||
partLoadStatus8.Dispose()
|
||||
print(f"[JOURNAL] Switched to assembly FEM: {workAssyFemPart.Name}")
|
||||
|
||||
markId_merge = theSession.SetUndoMark(NXOpen.Session.MarkVisibility.Visible, "Start")
|
||||
|
||||
caePart1 = workAssyFemPart
|
||||
duplicateNodesCheckBuilder1 = caePart1.ModelCheckMgr.CreateDuplicateNodesCheckBuilder()
|
||||
|
||||
# Set tolerance
|
||||
unit_tol = duplicateNodesCheckBuilder1.Tolerance.Units
|
||||
duplicateNodesCheckBuilder1.Tolerance.Units = unit_tol
|
||||
duplicateNodesCheckBuilder1.Tolerance.SetFormula("0.01")
|
||||
print(f"[JOURNAL] Tolerance: 0.01 mm")
|
||||
|
||||
# Enable occurrence node merge - CRITICAL for assembly FEM
|
||||
duplicateNodesCheckBuilder1.MergeOccurrenceNodes = True
|
||||
print(f"[JOURNAL] MergeOccurrenceNodes: True")
|
||||
|
||||
theSession.SetUndoMarkName(markId_merge, "Duplicate Nodes Dialog")
|
||||
|
||||
# Configure display settings
|
||||
displaysettings1 = NXOpen.CAE.ModelCheck.DuplicateNodesCheckBuilder.DisplaySettings()
|
||||
displaysettings1.ShowDuplicateNodes = True
|
||||
displaysettings1.ShowMergedNodeLabels = False
|
||||
displaysettings1.ShowRetainedNodeLabels = False
|
||||
displaysettings1.KeepNodesColor = displaySimPart.Colors.Find("Blue")
|
||||
displaysettings1.MergeNodesColor = displaySimPart.Colors.Find("Yellow")
|
||||
displaysettings1.UnableToMergeNodesColor = displaySimPart.Colors.Find("Red")
|
||||
duplicateNodesCheckBuilder1.DisplaySettingsData = displaysettings1
|
||||
|
||||
# Check scope
|
||||
duplicateNodesCheckBuilder1.CheckScopeOption = NXOpen.CAE.ModelCheck.CheckScope.Displayed
|
||||
print(f"[JOURNAL] CheckScope: Displayed")
|
||||
|
||||
# Identify duplicates
|
||||
print(f"[JOURNAL] Identifying duplicate nodes...")
|
||||
numDuplicates = duplicateNodesCheckBuilder1.IdentifyDuplicateNodes()
|
||||
print(f"[JOURNAL] Found {numDuplicates} duplicate node sets")
|
||||
|
||||
# Merge duplicates
|
||||
if numDuplicates > 0:
|
||||
print(f"[JOURNAL] Merging duplicate nodes...")
|
||||
numMerged = duplicateNodesCheckBuilder1.MergeDuplicateNodes()
|
||||
print(f"[JOURNAL] Merged {numMerged} duplicate node sets")
|
||||
else:
|
||||
print(f"[JOURNAL] WARNING: No duplicate nodes found to merge!")
|
||||
print(f"[JOURNAL] This may indicate mesh update didn't work properly")
|
||||
|
||||
theSession.SetUndoMarkName(markId_merge, "Duplicate Nodes")
|
||||
duplicateNodesCheckBuilder1.Destroy()
|
||||
theSession.DeleteUndoMark(markId_merge, None)
|
||||
|
||||
except Exception as e:
|
||||
print(f"[JOURNAL] WARNING: Node merge: {e}")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
|
||||
# ==========================================================================
|
||||
# STEP 5: RESOLVE LABEL CONFLICTS
|
||||
# ==========================================================================
|
||||
print(f"[JOURNAL] STEP 5: Resolving label conflicts...")
|
||||
|
||||
try:
|
||||
markId_labels = theSession.SetUndoMark(NXOpen.Session.MarkVisibility.Visible, "Start")
|
||||
|
||||
assyFemPart1 = workAssyFemPart
|
||||
assemblyLabelManagerBuilder1 = assyFemPart1.CreateAssemblyLabelManagerBuilder()
|
||||
|
||||
theSession.SetUndoMarkName(markId_labels, "Assembly Label Manager Dialog")
|
||||
|
||||
markId_labels2 = theSession.SetUndoMark(NXOpen.Session.MarkVisibility.Invisible, "Assembly Label Manager")
|
||||
|
||||
# Set offsets for each FE model occurrence
|
||||
# These offsets ensure unique node/element labels across components
|
||||
entitytypes = [
|
||||
NXOpen.CAE.AssemblyLabelManagerBuilder.EntityType.Node,
|
||||
NXOpen.CAE.AssemblyLabelManagerBuilder.EntityType.Element,
|
||||
NXOpen.CAE.AssemblyLabelManagerBuilder.EntityType.Csys,
|
||||
NXOpen.CAE.AssemblyLabelManagerBuilder.EntityType.Physical,
|
||||
NXOpen.CAE.AssemblyLabelManagerBuilder.EntityType.Group,
|
||||
NXOpen.CAE.AssemblyLabelManagerBuilder.EntityType.Ply,
|
||||
NXOpen.CAE.AssemblyLabelManagerBuilder.EntityType.Ssmo,
|
||||
]
|
||||
|
||||
# Apply offsets to each occurrence (values from recorded journal)
|
||||
occurrence_offsets = [
|
||||
("FEModelOccurrence[3]", 2),
|
||||
("FEModelOccurrence[4]", 74),
|
||||
("FEModelOccurrence[5]", 146),
|
||||
("FEModelOccurrence[7]", 218),
|
||||
]
|
||||
|
||||
for occ_name, offset_val in occurrence_offsets:
|
||||
try:
|
||||
fEModelOcc = workAssyFemPart.FindObject(occ_name)
|
||||
offsets = [offset_val] * 7
|
||||
assemblyLabelManagerBuilder1.SetFEModelOccOffsets(fEModelOcc, entitytypes, offsets)
|
||||
except:
|
||||
pass # Some occurrences may not exist
|
||||
|
||||
nXObject1 = assemblyLabelManagerBuilder1.Commit()
|
||||
|
||||
theSession.DeleteUndoMark(markId_labels2, None)
|
||||
theSession.SetUndoMarkName(markId_labels, "Assembly Label Manager")
|
||||
assemblyLabelManagerBuilder1.Destroy()
|
||||
|
||||
print(f"[JOURNAL] Label conflicts resolved")
|
||||
|
||||
except Exception as e:
|
||||
print(f"[JOURNAL] WARNING: Label management: {e}")
|
||||
|
||||
# ==========================================================================
|
||||
# STEP 6: SOLVE
|
||||
# ==========================================================================
|
||||
print(f"[JOURNAL] STEP 6: Solving simulation...")
|
||||
|
||||
try:
|
||||
# Return to sim level by setting null component
|
||||
partLoadStatus9 = theSession.Parts.SetWorkComponent(
|
||||
NXOpen.Assemblies.Component.Null,
|
||||
NXOpen.PartCollection.RefsetOption.Entire,
|
||||
NXOpen.PartCollection.WorkComponentOption.Visible
|
||||
)
|
||||
workSimPart = theSession.Parts.BaseWork
|
||||
partLoadStatus9.Dispose()
|
||||
|
||||
# Set up solve
|
||||
markId_solve = theSession.SetUndoMark(NXOpen.Session.MarkVisibility.Visible, "Start")
|
||||
theSession.SetUndoMarkName(markId_solve, "Solve Dialog")
|
||||
|
||||
markId_solve2 = theSession.SetUndoMark(NXOpen.Session.MarkVisibility.Invisible, "Solve")
|
||||
|
||||
theCAESimSolveManager = NXOpen.CAE.SimSolveManager.GetSimSolveManager(theSession)
|
||||
|
||||
simSimulation1 = workSimPart.FindObject("Simulation")
|
||||
sol_name = solution_name if solution_name else "Solution 1"
|
||||
simSolution1 = simSimulation1.FindObject(f"Solution[{sol_name}]")
|
||||
|
||||
psolutions1 = [simSolution1]
|
||||
|
||||
print(f"[JOURNAL] Solving: {sol_name} (Foreground mode)")
|
||||
numsolved, numfailed, numskipped = theCAESimSolveManager.SolveChainOfSolutions(
|
||||
psolutions1,
|
||||
NXOpen.CAE.SimSolution.SolveOption.Solve,
|
||||
NXOpen.CAE.SimSolution.SetupCheckOption.CompleteCheckAndOutputErrors,
|
||||
NXOpen.CAE.SimSolution.SolveMode.Foreground # Use Foreground to ensure OP2 is complete
|
||||
)
|
||||
|
||||
theSession.DeleteUndoMark(markId_solve2, None)
|
||||
theSession.SetUndoMarkName(markId_solve, "Solve")
|
||||
|
||||
print(f"[JOURNAL] Solve completed: {numsolved} solved, {numfailed} failed, {numskipped} skipped")
|
||||
|
||||
return numfailed == 0
|
||||
|
||||
except Exception as e:
|
||||
print(f"[JOURNAL] ERROR solving: {e}")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
return False
|
||||
|
||||
|
||||
def solve_simple_workflow(theSession, sim_file_path, solution_name, expression_updates, working_dir):
|
||||
"""
|
||||
Simple workflow for single-part simulations or when no expression updates needed.
|
||||
"""
|
||||
print(f"[JOURNAL] Opening simulation: {sim_file_path}")
|
||||
|
||||
# Open the .sim file
|
||||
basePart1, partLoadStatus1 = theSession.Parts.OpenActiveDisplay(
|
||||
sim_file_path,
|
||||
NXOpen.DisplayPartOption.AllowAdditional
|
||||
)
|
||||
partLoadStatus1.Dispose()
|
||||
|
||||
workSimPart = theSession.Parts.BaseWork
|
||||
theSession.ApplicationSwitchImmediate("UG_APP_SFEM")
|
||||
theSession.Post.UpdateUserGroupsFromSimPart(workSimPart)
|
||||
|
||||
# Set up solve
|
||||
markId_solve = theSession.SetUndoMark(NXOpen.Session.MarkVisibility.Visible, "Start")
|
||||
theSession.SetUndoMarkName(markId_solve, "Solve Dialog")
|
||||
|
||||
markId_solve2 = theSession.SetUndoMark(NXOpen.Session.MarkVisibility.Invisible, "Solve")
|
||||
|
||||
theCAESimSolveManager = NXOpen.CAE.SimSolveManager.GetSimSolveManager(theSession)
|
||||
|
||||
simSimulation1 = workSimPart.FindObject("Simulation")
|
||||
sol_name = solution_name if solution_name else "Solution 1"
|
||||
simSolution1 = simSimulation1.FindObject(f"Solution[{sol_name}]")
|
||||
|
||||
psolutions1 = [simSolution1]
|
||||
|
||||
print(f"[JOURNAL] Solving: {sol_name}")
|
||||
numsolved, numfailed, numskipped = theCAESimSolveManager.SolveChainOfSolutions(
|
||||
psolutions1,
|
||||
NXOpen.CAE.SimSolution.SolveOption.Solve,
|
||||
NXOpen.CAE.SimSolution.SetupCheckOption.CompleteCheckAndOutputErrors,
|
||||
NXOpen.CAE.SimSolution.SolveMode.Background
|
||||
)
|
||||
|
||||
theSession.DeleteUndoMark(markId_solve2, None)
|
||||
theSession.SetUndoMarkName(markId_solve, "Solve")
|
||||
|
||||
print(f"[JOURNAL] Solve completed: {numsolved} solved, {numfailed} failed, {numskipped} skipped")
|
||||
|
||||
# Save
|
||||
try:
|
||||
partSaveStatus = workSimPart.Save(
|
||||
NXOpen.BasePart.SaveComponents.TrueValue,
|
||||
NXOpen.BasePart.CloseAfterSave.FalseValue
|
||||
)
|
||||
partSaveStatus.Dispose()
|
||||
print(f"[JOURNAL] Saved!")
|
||||
except:
|
||||
pass
|
||||
|
||||
return numfailed == 0
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
success = main(sys.argv[1:])
|
||||
sys.exit(0 if success else 1)
|
||||
|
||||
Reference in New Issue
Block a user