feat: Add OPD method support to Zernike visualization with Standard/OPD toggle
Major improvements to Zernike WFE visualization: - Add ZernikeDashboardInsight: Unified dashboard with all orientations (40°, 60°, 90°) on one page with light theme and executive summary - Add OPD method toggle: Switch between Standard (Z-only) and OPD (X,Y,Z) methods in ZernikeWFEInsight with interactive buttons - Add lateral displacement maps: Visualize X,Y displacement for each orientation - Add displacement component views: Toggle between WFE, ΔX, ΔY, ΔZ in relative views - Add metrics comparison table showing both methods side-by-side New extractors: - extract_zernike_figure.py: ZernikeOPDExtractor using BDF geometry interpolation - extract_zernike_opd.py: Parabola-based OPD with focal length Key finding: OPD method gives 8-11% higher WFE values than Standard method (more conservative/accurate for surfaces with lateral displacement under gravity) Documentation updates: - SYS_12: Added E22 ZernikeOPD as recommended method - SYS_16: Added ZernikeDashboard, updated ZernikeWFE with OPD features - Cheatsheet: Added Zernike method comparison table 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
875
optimization_engine/insights/msf_zernike.py
Normal file
875
optimization_engine/insights/msf_zernike.py
Normal file
@@ -0,0 +1,875 @@
|
||||
"""
|
||||
MSF Zernike Insight - Mid-Spatial Frequency Analysis for Telescope Mirrors
|
||||
|
||||
Provides detailed analysis of mid-spatial frequency (MSF) content in mirror
|
||||
wavefront error, specifically for gravity-induced support print-through.
|
||||
|
||||
Key Features:
|
||||
- Band decomposition: LSF (n≤10), MSF (n=11-50), HSF (n>50)
|
||||
- RSS (Root Sum Square) metrics per band
|
||||
- Relative analysis vs reference orientations (20°, 90°)
|
||||
- Higher-order Zernike fitting (100 modes by default)
|
||||
- PSD (Power Spectral Density) visualization
|
||||
- Support print-through identification
|
||||
|
||||
For GigaBIT and similar telescope mirrors where MSF errors are dominated by
|
||||
gravity-induced support deformation rather than polishing residual.
|
||||
|
||||
Author: Atomizer Framework
|
||||
"""
|
||||
|
||||
from pathlib import Path
|
||||
from datetime import datetime
|
||||
from typing import Dict, Any, List, Optional, Tuple
|
||||
import numpy as np
|
||||
from math import factorial
|
||||
from numpy.linalg import LinAlgError
|
||||
|
||||
from .base import StudyInsight, InsightConfig, InsightResult, register_insight
|
||||
|
||||
# Lazy imports
|
||||
_plotly_loaded = False
|
||||
_go = None
|
||||
_make_subplots = None
|
||||
_Triangulation = None
|
||||
_OP2 = None
|
||||
_BDF = None
|
||||
|
||||
|
||||
def _load_dependencies():
|
||||
"""Lazy load heavy dependencies."""
|
||||
global _plotly_loaded, _go, _make_subplots, _Triangulation, _OP2, _BDF
|
||||
if not _plotly_loaded:
|
||||
import plotly.graph_objects as go
|
||||
from plotly.subplots import make_subplots
|
||||
from matplotlib.tri import Triangulation
|
||||
from pyNastran.op2.op2 import OP2
|
||||
from pyNastran.bdf.bdf import BDF
|
||||
_go = go
|
||||
_make_subplots = make_subplots
|
||||
_Triangulation = Triangulation
|
||||
_OP2 = OP2
|
||||
_BDF = BDF
|
||||
_plotly_loaded = True
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Band Definitions
|
||||
# ============================================================================
|
||||
|
||||
# Default band boundaries (Zernike radial order n)
|
||||
DEFAULT_LSF_MAX = 10 # LSF: n = 1-10 (M2 hexapod correctable)
|
||||
DEFAULT_MSF_MAX = 50 # MSF: n = 11-50 (support print-through)
|
||||
DEFAULT_N_MODES = 100 # Total modes to fit (n ≈ 14)
|
||||
|
||||
# Band colors for visualization
|
||||
BAND_COLORS = {
|
||||
'lsf': '#3b82f6', # Blue - low spatial frequency
|
||||
'msf': '#f59e0b', # Amber - mid spatial frequency
|
||||
'hsf': '#ef4444', # Red - high spatial frequency
|
||||
}
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Zernike Mathematics (same as zernike_wfe.py)
|
||||
# ============================================================================
|
||||
|
||||
def noll_indices(j: int) -> Tuple[int, int]:
|
||||
"""Convert Noll index to (n, m) radial/azimuthal orders."""
|
||||
if j < 1:
|
||||
raise ValueError("Noll index j must be >= 1")
|
||||
count = 0
|
||||
n = 0
|
||||
while True:
|
||||
if n == 0:
|
||||
ms = [0]
|
||||
elif n % 2 == 0:
|
||||
ms = [0] + [m for k in range(1, n//2 + 1) for m in (-2*k, 2*k)]
|
||||
else:
|
||||
ms = [m for k in range(0, (n+1)//2) for m in (-(2*k+1), (2*k+1))]
|
||||
for m in ms:
|
||||
count += 1
|
||||
if count == j:
|
||||
return n, m
|
||||
n += 1
|
||||
|
||||
|
||||
def noll_to_radial_order(j: int) -> int:
|
||||
"""Get radial order n for Noll index j."""
|
||||
n, _ = noll_indices(j)
|
||||
return n
|
||||
|
||||
|
||||
def radial_order_to_first_noll(n: int) -> int:
|
||||
"""Get first Noll index for radial order n."""
|
||||
# Sum of (k+1) for k=0 to n-1, plus 1
|
||||
return n * (n + 1) // 2 + 1
|
||||
|
||||
|
||||
def zernike_noll(j: int, r: np.ndarray, th: np.ndarray) -> np.ndarray:
|
||||
"""Evaluate Zernike polynomial j at (r, theta)."""
|
||||
n, m = noll_indices(j)
|
||||
R = np.zeros_like(r)
|
||||
for s in range((n - abs(m)) // 2 + 1):
|
||||
c = ((-1)**s * factorial(n - s) /
|
||||
(factorial(s) *
|
||||
factorial((n + abs(m)) // 2 - s) *
|
||||
factorial((n - abs(m)) // 2 - s)))
|
||||
R += c * r**(n - 2*s)
|
||||
if m == 0:
|
||||
return R
|
||||
return R * (np.cos(m * th) if m > 0 else np.sin(-m * th))
|
||||
|
||||
|
||||
def zernike_common_name(n: int, m: int) -> str:
|
||||
"""Get common name for Zernike mode."""
|
||||
names = {
|
||||
(0, 0): "Piston", (1, -1): "Tilt X", (1, 1): "Tilt Y",
|
||||
(2, 0): "Defocus", (2, -2): "Astig 45°", (2, 2): "Astig 0°",
|
||||
(3, -1): "Coma X", (3, 1): "Coma Y", (3, -3): "Trefoil X", (3, 3): "Trefoil Y",
|
||||
(4, 0): "Primary Spherical", (4, -2): "Sec Astig X", (4, 2): "Sec Astig Y",
|
||||
(4, -4): "Quadrafoil X", (4, 4): "Quadrafoil Y",
|
||||
(5, -1): "Sec Coma X", (5, 1): "Sec Coma Y",
|
||||
(5, -3): "Sec Trefoil X", (5, 3): "Sec Trefoil Y",
|
||||
(5, -5): "Pentafoil X", (5, 5): "Pentafoil Y",
|
||||
(6, 0): "Sec Spherical",
|
||||
}
|
||||
return names.get((n, m), f"Z(n={n}, m={m})")
|
||||
|
||||
|
||||
def zernike_label(j: int) -> str:
|
||||
"""Get label for Zernike coefficient J{j}."""
|
||||
n, m = noll_indices(j)
|
||||
return f"J{j:02d} - {zernike_common_name(n, m)} (n={n}, m={m})"
|
||||
|
||||
|
||||
def compute_zernike_coeffs(
|
||||
X: np.ndarray,
|
||||
Y: np.ndarray,
|
||||
vals: np.ndarray,
|
||||
n_modes: int,
|
||||
chunk_size: int = 100000
|
||||
) -> Tuple[np.ndarray, float]:
|
||||
"""Fit Zernike coefficients to WFE data."""
|
||||
Xc, Yc = X - np.mean(X), Y - np.mean(Y)
|
||||
R = float(np.max(np.hypot(Xc, Yc)))
|
||||
r = np.hypot(Xc / R, Yc / R).astype(np.float32)
|
||||
th = np.arctan2(Yc, Xc).astype(np.float32)
|
||||
mask = (r <= 1.0) & ~np.isnan(vals)
|
||||
if not np.any(mask):
|
||||
raise RuntimeError("No valid points inside unit disk.")
|
||||
idx = np.nonzero(mask)[0]
|
||||
m = int(n_modes)
|
||||
G = np.zeros((m, m), dtype=np.float64)
|
||||
h = np.zeros((m,), dtype=np.float64)
|
||||
v = vals.astype(np.float64)
|
||||
|
||||
for start in range(0, len(idx), chunk_size):
|
||||
sl = idx[start:start + chunk_size]
|
||||
r_b, th_b, v_b = r[sl], th[sl], v[sl]
|
||||
Zb = np.column_stack([zernike_noll(j, r_b, th_b).astype(np.float32)
|
||||
for j in range(1, m + 1)])
|
||||
G += (Zb.T @ Zb).astype(np.float64)
|
||||
h += (Zb.T @ v_b).astype(np.float64)
|
||||
try:
|
||||
coeffs = np.linalg.solve(G, h)
|
||||
except LinAlgError:
|
||||
coeffs = np.linalg.lstsq(G, h, rcond=None)[0]
|
||||
return coeffs, R
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Band Analysis
|
||||
# ============================================================================
|
||||
|
||||
def classify_modes_by_band(
|
||||
n_modes: int,
|
||||
lsf_max: int = DEFAULT_LSF_MAX,
|
||||
msf_max: int = DEFAULT_MSF_MAX
|
||||
) -> Dict[str, List[int]]:
|
||||
"""
|
||||
Classify Zernike modes into LSF/MSF/HSF bands.
|
||||
|
||||
Returns:
|
||||
Dict with 'lsf', 'msf', 'hsf' keys containing lists of Noll indices
|
||||
"""
|
||||
bands = {'lsf': [], 'msf': [], 'hsf': []}
|
||||
|
||||
for j in range(1, n_modes + 1):
|
||||
n = noll_to_radial_order(j)
|
||||
if n <= lsf_max:
|
||||
bands['lsf'].append(j)
|
||||
elif n <= msf_max:
|
||||
bands['msf'].append(j)
|
||||
else:
|
||||
bands['hsf'].append(j)
|
||||
|
||||
return bands
|
||||
|
||||
|
||||
def compute_band_rss(
|
||||
coeffs: np.ndarray,
|
||||
bands: Dict[str, List[int]],
|
||||
filter_modes: int = 4
|
||||
) -> Dict[str, float]:
|
||||
"""
|
||||
Compute RSS (Root Sum Square) of coefficients in each band.
|
||||
|
||||
RSS = sqrt(sum(c_j^2)) for j in band
|
||||
|
||||
Args:
|
||||
coeffs: Zernike coefficients
|
||||
bands: Dict with band name -> list of Noll indices
|
||||
filter_modes: Number of low-order modes to filter for 'filtered' metric (default 4 = J1-J4)
|
||||
|
||||
Returns:
|
||||
Dict with 'lsf', 'msf', 'hsf', 'total', and 'filtered' (J5+) RSS values
|
||||
"""
|
||||
rss = {}
|
||||
for band_name, indices in bands.items():
|
||||
# Convert to 0-indexed
|
||||
band_coeffs = [coeffs[j-1] for j in indices if j-1 < len(coeffs)]
|
||||
rss[band_name] = float(np.sqrt(np.sum(np.array(band_coeffs)**2)))
|
||||
|
||||
rss['total'] = float(np.sqrt(np.sum(coeffs**2)))
|
||||
|
||||
# Filtered RSS: exclude first filter_modes (J1-J4 = piston, tip, tilt, defocus)
|
||||
# This is the engineering-relevant metric after M2 alignment
|
||||
filtered_coeffs = coeffs[filter_modes:] if len(coeffs) > filter_modes else np.array([])
|
||||
rss['filtered'] = float(np.sqrt(np.sum(filtered_coeffs**2)))
|
||||
|
||||
return rss
|
||||
|
||||
|
||||
def compute_band_residual(
|
||||
X: np.ndarray,
|
||||
Y: np.ndarray,
|
||||
W_nm: np.ndarray,
|
||||
coeffs: np.ndarray,
|
||||
R: float,
|
||||
bands: Dict[str, List[int]],
|
||||
keep_bands: List[str]
|
||||
) -> np.ndarray:
|
||||
"""
|
||||
Compute WFE residual keeping only specified bands.
|
||||
|
||||
Args:
|
||||
keep_bands: List of band names to keep (e.g., ['msf'] for MSF-only)
|
||||
|
||||
Returns:
|
||||
WFE array with only specified band content
|
||||
"""
|
||||
Xc = X - np.mean(X)
|
||||
Yc = Y - np.mean(Y)
|
||||
r = np.hypot(Xc / R, Yc / R)
|
||||
th = np.arctan2(Yc, Xc)
|
||||
|
||||
# Build Zernike basis for modes to keep
|
||||
keep_indices = []
|
||||
for band in keep_bands:
|
||||
keep_indices.extend(bands.get(band, []))
|
||||
|
||||
if not keep_indices:
|
||||
return np.zeros_like(W_nm)
|
||||
|
||||
# Reconstruct only the kept bands
|
||||
W_band = np.zeros_like(W_nm)
|
||||
for j in keep_indices:
|
||||
if j - 1 < len(coeffs):
|
||||
Z_j = zernike_noll(j, r, th)
|
||||
W_band += coeffs[j-1] * Z_j
|
||||
|
||||
return W_band
|
||||
|
||||
|
||||
def find_dominant_msf_modes(
|
||||
coeffs: np.ndarray,
|
||||
bands: Dict[str, List[int]],
|
||||
top_n: int = 5
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Find the dominant MSF modes by coefficient magnitude.
|
||||
|
||||
Returns:
|
||||
List of dicts with 'j', 'label', 'coeff_nm', 'n', 'm'
|
||||
"""
|
||||
msf_indices = bands.get('msf', [])
|
||||
|
||||
mode_data = []
|
||||
for j in msf_indices:
|
||||
if j - 1 < len(coeffs):
|
||||
n, m = noll_indices(j)
|
||||
mode_data.append({
|
||||
'j': j,
|
||||
'label': zernike_label(j),
|
||||
'coeff_nm': float(abs(coeffs[j-1])),
|
||||
'n': n,
|
||||
'm': m
|
||||
})
|
||||
|
||||
# Sort by magnitude
|
||||
mode_data.sort(key=lambda x: x['coeff_nm'], reverse=True)
|
||||
|
||||
return mode_data[:top_n]
|
||||
|
||||
|
||||
def estimate_mesh_resolution(X: np.ndarray, Y: np.ndarray) -> Dict[str, float]:
|
||||
"""
|
||||
Estimate mesh spatial resolution.
|
||||
|
||||
Returns:
|
||||
Dict with 'node_count', 'diameter_mm', 'avg_spacing_mm',
|
||||
'max_resolvable_order', 'min_feature_mm'
|
||||
"""
|
||||
n_nodes = len(X)
|
||||
|
||||
Xc = X - np.mean(X)
|
||||
Yc = Y - np.mean(Y)
|
||||
R = np.max(np.hypot(Xc, Yc))
|
||||
diameter = 2 * R
|
||||
|
||||
# Approximate spacing from area
|
||||
area = np.pi * R**2
|
||||
avg_spacing = np.sqrt(area / n_nodes)
|
||||
|
||||
# Nyquist: can resolve features > 2x spacing
|
||||
min_feature = 2 * avg_spacing
|
||||
|
||||
# Max Zernike order
|
||||
max_order = int(diameter / min_feature)
|
||||
|
||||
return {
|
||||
'node_count': n_nodes,
|
||||
'diameter_mm': float(diameter),
|
||||
'avg_spacing_mm': float(avg_spacing),
|
||||
'min_feature_mm': float(min_feature),
|
||||
'max_resolvable_order': max_order
|
||||
}
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Default Configuration
|
||||
# ============================================================================
|
||||
|
||||
DEFAULT_CONFIG = {
|
||||
'n_modes': 100,
|
||||
'lsf_max': 10, # n ≤ 10 is LSF
|
||||
'msf_max': 50, # n = 11-50 is MSF
|
||||
'amp': 0.5, # Visual deformation scale
|
||||
'pancake': 3.0, # Z-axis range multiplier
|
||||
'plot_downsample': 15000,
|
||||
'colorscale': 'Turbo',
|
||||
'disp_unit': 'mm',
|
||||
'show_psd': True,
|
||||
'show_all_orientations': True,
|
||||
}
|
||||
|
||||
|
||||
@register_insight
|
||||
class MSFZernikeInsight(StudyInsight):
|
||||
"""
|
||||
Mid-Spatial Frequency Zernike analysis for telescope mirror optimization.
|
||||
|
||||
Provides detailed breakdown of spatial frequency content:
|
||||
- LSF (Low): n ≤ 10, correctable by M2 hexapod
|
||||
- MSF (Mid): n = 11-50, support print-through
|
||||
- HSF (High): n > 50, near mesh resolution limit
|
||||
|
||||
Generates:
|
||||
- Band decomposition table with RSS metrics
|
||||
- MSF-only 3D surface visualization
|
||||
- Coefficient bar chart color-coded by band
|
||||
- Dominant MSF mode identification
|
||||
- Mesh resolution analysis
|
||||
"""
|
||||
|
||||
insight_type = "msf_zernike"
|
||||
name = "MSF Zernike Analysis"
|
||||
description = "Mid-spatial frequency analysis with band decomposition for support print-through"
|
||||
category = "optical"
|
||||
applicable_to = ["mirror", "optics", "wfe", "msf"]
|
||||
required_files = ["*.op2"]
|
||||
|
||||
def __init__(self, study_path: Path):
|
||||
super().__init__(study_path)
|
||||
self.op2_path: Optional[Path] = None
|
||||
self.geo_path: Optional[Path] = None
|
||||
self._node_geo: Optional[Dict] = None
|
||||
self._displacements: Optional[Dict] = None
|
||||
|
||||
def can_generate(self) -> bool:
|
||||
"""Check if OP2 and geometry files exist."""
|
||||
search_paths = [
|
||||
self.results_path,
|
||||
self.study_path / "2_iterations",
|
||||
self.setup_path / "model",
|
||||
]
|
||||
|
||||
for search_path in search_paths:
|
||||
if not search_path.exists():
|
||||
continue
|
||||
op2_files = list(search_path.glob("**/*solution*.op2"))
|
||||
if not op2_files:
|
||||
op2_files = list(search_path.glob("**/*.op2"))
|
||||
if op2_files:
|
||||
self.op2_path = max(op2_files, key=lambda p: p.stat().st_mtime)
|
||||
break
|
||||
|
||||
if self.op2_path is None:
|
||||
return False
|
||||
|
||||
try:
|
||||
self.geo_path = self._find_geometry_file(self.op2_path)
|
||||
return True
|
||||
except FileNotFoundError:
|
||||
return False
|
||||
|
||||
def _find_geometry_file(self, op2_path: Path) -> Path:
|
||||
"""Find BDF/DAT geometry file for OP2."""
|
||||
folder = op2_path.parent
|
||||
base = op2_path.stem
|
||||
|
||||
for ext in ['.dat', '.bdf']:
|
||||
cand = folder / (base + ext)
|
||||
if cand.exists():
|
||||
return cand
|
||||
|
||||
for f in folder.iterdir():
|
||||
if f.suffix.lower() in ['.dat', '.bdf']:
|
||||
return f
|
||||
|
||||
raise FileNotFoundError(f"No geometry file found for {op2_path}")
|
||||
|
||||
def _load_data(self):
|
||||
"""Load geometry and displacement data."""
|
||||
if self._node_geo is not None:
|
||||
return
|
||||
|
||||
_load_dependencies()
|
||||
|
||||
bdf = _BDF()
|
||||
bdf.read_bdf(str(self.geo_path))
|
||||
self._node_geo = {int(nid): node.get_position()
|
||||
for nid, node in bdf.nodes.items()}
|
||||
|
||||
op2 = _OP2()
|
||||
op2.read_op2(str(self.op2_path))
|
||||
|
||||
if not op2.displacements:
|
||||
raise RuntimeError("No displacement data in OP2")
|
||||
|
||||
self._displacements = {}
|
||||
for key, darr in op2.displacements.items():
|
||||
data = darr.data
|
||||
dmat = data[0] if data.ndim == 3 else (data if data.ndim == 2 else None)
|
||||
if dmat is None:
|
||||
continue
|
||||
|
||||
ngt = darr.node_gridtype.astype(int)
|
||||
node_ids = ngt if ngt.ndim == 1 else ngt[:, 0]
|
||||
isubcase = getattr(darr, 'isubcase', None)
|
||||
label = str(isubcase) if isubcase else str(key)
|
||||
|
||||
self._displacements[label] = {
|
||||
'node_ids': node_ids.astype(int),
|
||||
'disp': dmat.copy()
|
||||
}
|
||||
|
||||
def _build_wfe_arrays(
|
||||
self,
|
||||
label: str,
|
||||
disp_unit: str = 'mm'
|
||||
) -> Tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray]:
|
||||
"""Build X, Y, WFE arrays for a subcase."""
|
||||
nm_per_unit = 1e6 if disp_unit == 'mm' else 1e9
|
||||
|
||||
data = self._displacements[label]
|
||||
node_ids = data['node_ids']
|
||||
dmat = data['disp']
|
||||
|
||||
X, Y, WFE = [], [], []
|
||||
valid_nids = []
|
||||
for nid, vec in zip(node_ids, dmat):
|
||||
geo = self._node_geo.get(int(nid))
|
||||
if geo is None:
|
||||
continue
|
||||
X.append(geo[0])
|
||||
Y.append(geo[1])
|
||||
wfe = vec[2] * 2.0 * nm_per_unit
|
||||
WFE.append(wfe)
|
||||
valid_nids.append(nid)
|
||||
|
||||
return (np.array(X), np.array(Y), np.array(WFE), np.array(valid_nids))
|
||||
|
||||
def _compute_relative_wfe(
|
||||
self,
|
||||
X1, Y1, WFE1, nids1,
|
||||
X2, Y2, WFE2, nids2
|
||||
) -> Tuple[np.ndarray, np.ndarray, np.ndarray]:
|
||||
"""Compute WFE1 - WFE2 for common nodes."""
|
||||
ref_map = {int(nid): (x, y, w) for nid, x, y, w in zip(nids2, X2, Y2, WFE2)}
|
||||
|
||||
X_rel, Y_rel, WFE_rel = [], [], []
|
||||
for nid, x, y, w in zip(nids1, X1, Y1, WFE1):
|
||||
nid = int(nid)
|
||||
if nid in ref_map:
|
||||
_, _, w_ref = ref_map[nid]
|
||||
X_rel.append(x)
|
||||
Y_rel.append(y)
|
||||
WFE_rel.append(w - w_ref)
|
||||
|
||||
return np.array(X_rel), np.array(Y_rel), np.array(WFE_rel)
|
||||
|
||||
def _analyze_subcase(
|
||||
self,
|
||||
X: np.ndarray,
|
||||
Y: np.ndarray,
|
||||
W_nm: np.ndarray,
|
||||
cfg: Dict
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Full MSF analysis for a single subcase.
|
||||
|
||||
Returns:
|
||||
Dict with coefficients, bands, RSS, dominant modes, mesh info
|
||||
"""
|
||||
n_modes = cfg['n_modes']
|
||||
lsf_max = cfg['lsf_max']
|
||||
msf_max = cfg['msf_max']
|
||||
|
||||
# Fit Zernike
|
||||
coeffs, R = compute_zernike_coeffs(X, Y, W_nm, n_modes)
|
||||
|
||||
# Band classification
|
||||
bands = classify_modes_by_band(n_modes, lsf_max, msf_max)
|
||||
|
||||
# RSS per band
|
||||
rss = compute_band_rss(coeffs, bands)
|
||||
|
||||
# Band percentages
|
||||
total_rss = rss['total']
|
||||
pct = {
|
||||
'lsf': 100 * rss['lsf'] / total_rss if total_rss > 0 else 0,
|
||||
'msf': 100 * rss['msf'] / total_rss if total_rss > 0 else 0,
|
||||
'hsf': 100 * rss['hsf'] / total_rss if total_rss > 0 else 0,
|
||||
}
|
||||
|
||||
# Dominant MSF modes
|
||||
dominant_msf = find_dominant_msf_modes(coeffs, bands, top_n=5)
|
||||
|
||||
# MSF-only residual
|
||||
W_msf = compute_band_residual(X, Y, W_nm, coeffs, R, bands, ['msf'])
|
||||
|
||||
# Mesh resolution
|
||||
mesh_info = estimate_mesh_resolution(X, Y)
|
||||
|
||||
return {
|
||||
'coefficients': coeffs,
|
||||
'R': R,
|
||||
'bands': bands,
|
||||
'rss': rss,
|
||||
'rss_pct': pct,
|
||||
'dominant_msf': dominant_msf,
|
||||
'W_msf': W_msf,
|
||||
'mesh_info': mesh_info,
|
||||
'n_modes': n_modes,
|
||||
'lsf_max': lsf_max,
|
||||
'msf_max': msf_max,
|
||||
}
|
||||
|
||||
def _generate_html(
|
||||
self,
|
||||
analyses: Dict[str, Dict],
|
||||
cfg: Dict,
|
||||
X: np.ndarray,
|
||||
Y: np.ndarray
|
||||
) -> str:
|
||||
"""Generate comprehensive HTML report."""
|
||||
_load_dependencies()
|
||||
|
||||
n_modes = cfg['n_modes']
|
||||
amp = cfg.get('amp', 0.5)
|
||||
pancake = cfg.get('pancake', 3.0)
|
||||
downsample = cfg.get('plot_downsample', 15000)
|
||||
colorscale = cfg.get('colorscale', 'Turbo')
|
||||
|
||||
# Use first available analysis for visualization
|
||||
primary_key = list(analyses.keys())[0]
|
||||
primary = analyses[primary_key]
|
||||
|
||||
coeffs = primary['coefficients']
|
||||
bands = primary['bands']
|
||||
W_msf = primary['W_msf']
|
||||
mesh_info = primary['mesh_info']
|
||||
|
||||
# Prepare band-colored bar chart data
|
||||
bar_colors = []
|
||||
for j in range(1, n_modes + 1):
|
||||
if j in bands['lsf']:
|
||||
bar_colors.append(BAND_COLORS['lsf'])
|
||||
elif j in bands['msf']:
|
||||
bar_colors.append(BAND_COLORS['msf'])
|
||||
else:
|
||||
bar_colors.append(BAND_COLORS['hsf'])
|
||||
|
||||
labels = [zernike_label(j) for j in range(1, n_modes + 1)]
|
||||
coeff_abs = np.abs(coeffs)
|
||||
|
||||
# Downsample for 3D plot
|
||||
n = len(X)
|
||||
if n > downsample:
|
||||
rng = np.random.default_rng(42)
|
||||
sel = rng.choice(n, size=downsample, replace=False)
|
||||
Xp, Yp, Wp = X[sel], Y[sel], W_msf[sel]
|
||||
else:
|
||||
Xp, Yp, Wp = X, Y, W_msf
|
||||
|
||||
res_amp = amp * Wp
|
||||
max_amp = float(np.max(np.abs(res_amp))) if res_amp.size else 1.0
|
||||
|
||||
# Create figure with subplots
|
||||
fig = _make_subplots(
|
||||
rows=4, cols=1,
|
||||
specs=[[{"type": "scene"}], [{"type": "table"}],
|
||||
[{"type": "table"}], [{"type": "xy"}]],
|
||||
row_heights=[0.40, 0.20, 0.15, 0.25],
|
||||
vertical_spacing=0.03,
|
||||
subplot_titles=[
|
||||
"<b>MSF Content Only (n=11-50)</b>",
|
||||
"<b>Band Decomposition (RSS)</b>",
|
||||
"<b>Dominant MSF Modes</b>",
|
||||
"<b>Coefficient Magnitude by Band</b>"
|
||||
]
|
||||
)
|
||||
|
||||
# 3D MSF surface
|
||||
try:
|
||||
tri = _Triangulation(Xp, Yp)
|
||||
if tri.triangles is not None and len(tri.triangles) > 0:
|
||||
i, j, k = tri.triangles.T
|
||||
fig.add_trace(_go.Mesh3d(
|
||||
x=Xp, y=Yp, z=res_amp,
|
||||
i=i, j=j, k=k,
|
||||
intensity=res_amp,
|
||||
colorscale=colorscale,
|
||||
opacity=1.0,
|
||||
flatshading=False,
|
||||
lighting=dict(ambient=0.4, diffuse=0.8, specular=0.3,
|
||||
roughness=0.5, fresnel=0.2),
|
||||
lightposition=dict(x=100, y=200, z=300),
|
||||
showscale=True,
|
||||
colorbar=dict(title=dict(text="MSF WFE (nm)", side="right"),
|
||||
thickness=15, len=0.5, tickformat=".1f"),
|
||||
hovertemplate="X: %{x:.1f}<br>Y: %{y:.1f}<br>MSF: %{z:.2f} nm<extra></extra>"
|
||||
), row=1, col=1)
|
||||
except Exception:
|
||||
fig.add_trace(_go.Scatter3d(
|
||||
x=Xp, y=Yp, z=res_amp,
|
||||
mode='markers',
|
||||
marker=dict(size=2, color=res_amp, colorscale=colorscale, showscale=True),
|
||||
showlegend=False
|
||||
), row=1, col=1)
|
||||
|
||||
# Configure 3D scene
|
||||
fig.update_scenes(
|
||||
camera=dict(eye=dict(x=1.2, y=1.2, z=0.8), up=dict(x=0, y=0, z=1)),
|
||||
xaxis=dict(title="X (mm)", showgrid=True,
|
||||
gridcolor='rgba(128,128,128,0.3)',
|
||||
showbackground=True, backgroundcolor='rgba(240,240,240,0.9)'),
|
||||
yaxis=dict(title="Y (mm)", showgrid=True,
|
||||
gridcolor='rgba(128,128,128,0.3)',
|
||||
showbackground=True, backgroundcolor='rgba(240,240,240,0.9)'),
|
||||
zaxis=dict(title="MSF WFE (nm)",
|
||||
range=[-max_amp * pancake, max_amp * pancake],
|
||||
showgrid=True, gridcolor='rgba(128,128,128,0.3)',
|
||||
showbackground=True, backgroundcolor='rgba(230,230,250,0.9)'),
|
||||
aspectmode='manual',
|
||||
aspectratio=dict(x=1, y=1, z=0.4)
|
||||
)
|
||||
|
||||
# Band decomposition table
|
||||
band_headers = ["<b>Band</b>", "<b>Order Range</b>", "<b>Feature Size</b>"]
|
||||
band_values = [
|
||||
["LSF (Low)", "MSF (Mid)", "HSF (High)", "<b>Total</b>", "<b>Filtered (J5+)</b>"],
|
||||
[f"n ≤ {cfg['lsf_max']}", f"n = {cfg['lsf_max']+1}-{cfg['msf_max']}",
|
||||
f"n > {cfg['msf_max']}", "All", "J5+ (excl. piston/tip/tilt/defocus)"],
|
||||
[f"> {int(mesh_info['diameter_mm']/cfg['lsf_max'])} mm",
|
||||
f"{int(mesh_info['diameter_mm']/cfg['msf_max'])}-{int(mesh_info['diameter_mm']/(cfg['lsf_max']+1))} mm",
|
||||
f"< {int(mesh_info['diameter_mm']/cfg['msf_max'])} mm", "-", "M2 correctable removed"],
|
||||
]
|
||||
|
||||
# Add columns for each analysis
|
||||
for key, analysis in analyses.items():
|
||||
band_headers.append(f"<b>{key} RSS (nm)</b>")
|
||||
rss = analysis['rss']
|
||||
pct = analysis['rss_pct']
|
||||
# Calculate filtered percentage
|
||||
filtered_pct = 100 * rss['filtered'] / rss['total'] if rss['total'] > 0 else 0
|
||||
band_values.append([
|
||||
f"{rss['lsf']:.2f} ({pct['lsf']:.1f}%)",
|
||||
f"{rss['msf']:.2f} ({pct['msf']:.1f}%)",
|
||||
f"{rss['hsf']:.2f} ({pct['hsf']:.1f}%)",
|
||||
f"<b>{rss['total']:.2f}</b>",
|
||||
f"<b>{rss['filtered']:.2f}</b> ({filtered_pct:.1f}%)",
|
||||
])
|
||||
|
||||
fig.add_trace(_go.Table(
|
||||
header=dict(values=band_headers,
|
||||
align="left", fill_color='#1f2937', font=dict(color='white')),
|
||||
cells=dict(values=band_values,
|
||||
align="left", fill_color='#374151', font=dict(color='white'))
|
||||
), row=2, col=1)
|
||||
|
||||
# Dominant MSF modes table
|
||||
dom_modes = primary['dominant_msf']
|
||||
if dom_modes:
|
||||
fig.add_trace(_go.Table(
|
||||
header=dict(values=["<b>Rank</b>", "<b>Mode</b>", "<b>|Coeff| (nm)</b>", "<b>Order n</b>"],
|
||||
align="left", fill_color='#1f2937', font=dict(color='white')),
|
||||
cells=dict(values=[
|
||||
[f"#{i+1}" for i in range(len(dom_modes))],
|
||||
[m['label'] for m in dom_modes],
|
||||
[f"{m['coeff_nm']:.3f}" for m in dom_modes],
|
||||
[str(m['n']) for m in dom_modes],
|
||||
], align="left", fill_color='#374151', font=dict(color='white'))
|
||||
), row=3, col=1)
|
||||
|
||||
# Bar chart with band colors
|
||||
fig.add_trace(
|
||||
_go.Bar(
|
||||
x=coeff_abs.tolist(), y=labels,
|
||||
orientation='h',
|
||||
marker_color=bar_colors,
|
||||
hovertemplate="%{y}<br>|Coeff| = %{x:.3f} nm<extra></extra>",
|
||||
showlegend=False
|
||||
),
|
||||
row=4, col=1
|
||||
)
|
||||
|
||||
# Add legend for bands
|
||||
for band_name, color in BAND_COLORS.items():
|
||||
fig.add_trace(_go.Scatter(
|
||||
x=[None], y=[None],
|
||||
mode='markers',
|
||||
marker=dict(size=10, color=color),
|
||||
name=band_name.upper(),
|
||||
showlegend=True
|
||||
))
|
||||
|
||||
# Layout
|
||||
fig.update_layout(
|
||||
width=1400, height=1600,
|
||||
margin=dict(t=60, b=20, l=20, r=20),
|
||||
paper_bgcolor='#111827', plot_bgcolor='#1f2937',
|
||||
font=dict(color='white'),
|
||||
title=dict(
|
||||
text=f"<b>Atomizer MSF Analysis</b> | {n_modes} modes | Mesh: {mesh_info['node_count']} nodes, {mesh_info['avg_spacing_mm']:.1f}mm spacing",
|
||||
x=0.5, font=dict(size=16)
|
||||
),
|
||||
legend=dict(
|
||||
orientation="h",
|
||||
yanchor="bottom",
|
||||
y=1.02,
|
||||
xanchor="right",
|
||||
x=1
|
||||
)
|
||||
)
|
||||
|
||||
return fig.to_html(include_plotlyjs='cdn', full_html=True)
|
||||
|
||||
def _generate(self, config: InsightConfig) -> InsightResult:
|
||||
"""Generate MSF analysis."""
|
||||
self._load_data()
|
||||
|
||||
cfg = {**DEFAULT_CONFIG, **config.extra}
|
||||
disp_unit = cfg['disp_unit']
|
||||
|
||||
# Map subcases
|
||||
disps = self._displacements
|
||||
if '1' in disps and '2' in disps:
|
||||
sc_map = {'90°': '1', '20°': '2', '40°': '3', '60°': '4'}
|
||||
elif '90' in disps and '20' in disps:
|
||||
sc_map = {'90°': '90', '20°': '20', '40°': '40', '60°': '60'}
|
||||
else:
|
||||
available = sorted(disps.keys(), key=lambda x: int(x) if x.isdigit() else 0)
|
||||
if len(available) >= 4:
|
||||
sc_map = {'90°': available[0], '20°': available[1],
|
||||
'40°': available[2], '60°': available[3]}
|
||||
else:
|
||||
return InsightResult(success=False,
|
||||
error=f"Need 4 subcases, found: {available}")
|
||||
|
||||
# Validate subcases
|
||||
for angle, label in sc_map.items():
|
||||
if label not in disps:
|
||||
return InsightResult(success=False,
|
||||
error=f"Subcase '{label}' (angle {angle}) not found")
|
||||
|
||||
# Load reference data
|
||||
X_20, Y_20, WFE_20, nids_20 = self._build_wfe_arrays(sc_map['20°'], disp_unit)
|
||||
X_90, Y_90, WFE_90, nids_90 = self._build_wfe_arrays(sc_map['90°'], disp_unit)
|
||||
|
||||
analyses = {}
|
||||
|
||||
# Analyze each key orientation
|
||||
for angle in ['40°', '60°']:
|
||||
label = sc_map[angle]
|
||||
X, Y, WFE, nids = self._build_wfe_arrays(label, disp_unit)
|
||||
|
||||
# Absolute
|
||||
analyses[f"{angle} Abs"] = self._analyze_subcase(X, Y, WFE, cfg)
|
||||
|
||||
# Relative to 20°
|
||||
X_rel, Y_rel, WFE_rel = self._compute_relative_wfe(
|
||||
X, Y, WFE, nids, X_20, Y_20, WFE_20, nids_20)
|
||||
analyses[f"{angle} vs 20°"] = self._analyze_subcase(X_rel, Y_rel, WFE_rel, cfg)
|
||||
|
||||
# Relative to 90° (manufacturing)
|
||||
X_rel90, Y_rel90, WFE_rel90 = self._compute_relative_wfe(
|
||||
X, Y, WFE, nids, X_90, Y_90, WFE_90, nids_90)
|
||||
analyses[f"{angle} vs 90°"] = self._analyze_subcase(X_rel90, Y_rel90, WFE_rel90, cfg)
|
||||
|
||||
# Also analyze 90° absolute (manufacturing baseline)
|
||||
analyses["90° Mfg"] = self._analyze_subcase(X_90, Y_90, WFE_90, cfg)
|
||||
|
||||
# Generate HTML using 40° vs 20° as primary visualization
|
||||
primary_analysis = analyses["40° vs 20°"]
|
||||
X_40, Y_40, _, _ = self._build_wfe_arrays(sc_map['40°'], disp_unit)
|
||||
X_rel40, Y_rel40, _ = self._compute_relative_wfe(
|
||||
X_40, Y_40, analyses["40° Abs"]['W_msf'], np.arange(len(X_40)),
|
||||
X_20, Y_20, WFE_20, nids_20)
|
||||
|
||||
html = self._generate_html(analyses, cfg, X_40, Y_40)
|
||||
|
||||
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
||||
output_dir = config.output_dir or self.insights_path
|
||||
output_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
html_path = output_dir / f"msf_zernike_{timestamp}.html"
|
||||
html_path.write_text(html, encoding='utf-8')
|
||||
|
||||
# Build summary
|
||||
summary = {
|
||||
'n_modes': cfg['n_modes'],
|
||||
'lsf_max_order': cfg['lsf_max'],
|
||||
'msf_max_order': cfg['msf_max'],
|
||||
'mesh_nodes': primary_analysis['mesh_info']['node_count'],
|
||||
'mesh_spacing_mm': primary_analysis['mesh_info']['avg_spacing_mm'],
|
||||
'max_resolvable_order': primary_analysis['mesh_info']['max_resolvable_order'],
|
||||
}
|
||||
|
||||
# Add key metrics for each analysis
|
||||
for key, analysis in analyses.items():
|
||||
safe_key = key.replace('°', 'deg').replace(' ', '_')
|
||||
summary[f'{safe_key}_lsf_rss'] = analysis['rss']['lsf']
|
||||
summary[f'{safe_key}_msf_rss'] = analysis['rss']['msf']
|
||||
summary[f'{safe_key}_hsf_rss'] = analysis['rss']['hsf']
|
||||
summary[f'{safe_key}_total_rss'] = analysis['rss']['total']
|
||||
summary[f'{safe_key}_filtered_rss'] = analysis['rss']['filtered']
|
||||
summary[f'{safe_key}_msf_pct'] = analysis['rss_pct']['msf']
|
||||
|
||||
return InsightResult(
|
||||
success=True,
|
||||
html_path=html_path,
|
||||
summary=summary
|
||||
)
|
||||
Reference in New Issue
Block a user