Files
Atomizer/optimization_engine/insights/wfe_psd.py

367 lines
12 KiB
Python
Raw Normal View History

"""
WFE Power Spectral Density (PSD) Analysis - Tony Hull's JWST Methodology
Implements Tony Hull's approach for analyzing wavefront error power spectral density,
specifically for large telescope mirrors with gravity-induced deformations.
Key Features:
- Surface interpolation to uniform grid
- Hann windowing to reduce edge effects
- 2D FFT with radial averaging
- Log-log scaling for PSD visualization
- Cycles-per-aperture spatial frequency
- Gravity vs thermal loading scales
- Reference normalization
Approach based on: "Power spectral density specification for large optical surfaces"
by Tony Hull, adapted for telescope mirror analysis
Author: Implementation based on Tony Hull's JWST methodology
"""
from pathlib import Path
from datetime import datetime
from typing import Dict, Any, List, Optional, Tuple
import numpy as np
from scipy.interpolate import RegularGridInterpolator
from numpy.fft import fft2, fftshift
# Lazy imports
_plotly_loaded = False
_go = None
_make_subplots = None
_Triangulation = None
_OP2 = None
_BDF = None
_ZernikeOPDExtractor = None
def _load_dependencies():
"""Lazy load heavy dependencies."""
global _plotly_loaded, _go, _make_subplots, _Triangulation, _OP2, _BDF, _ZernikeOPDExtractor
if not _plotly_loaded:
import plotly.graph_objects as go
from plotly.subplots import make_subplots
from matplotlib.tri import Triangulation
from pyNastran.op2.op2 import OP2
from pyNastran.bdf.bdf import BDF
from ..extractors.extract_zernike_figure import ZernikeOPDExtractor
_go = go
_make_subplots = make_subplots
_Triangulation = Triangulation
_OP2 = OP2
_BDF = BDF
_ZernikeOPDExtractor = ZernikeOPDExtractor
_plotly_loaded = True
def compute_surface_psd(
X: np.ndarray,
Y: np.ndarray,
Z: np.ndarray,
aperture_radius: float,
n_points: int = 512
) -> Tuple[np.ndarray, np.ndarray]:
"""
Compute power spectral density of surface height data.
Args:
X: Surface x-coordinates [mm]
Y: Surface y-coordinates [mm]
Z: Surface height values [nm]
aperture_radius: Physical radius of the mirror [mm]
n_points: Grid resolution for interpolation
Returns:
freqs: Spatial frequencies [cycles/aperture]
psd_values: PSD amplitude [nm²·cycles²/aperture²]
"""
# Remove any NaN values
valid_mask = ~np.isnan(Z)
X, Y, Z = X[valid_mask], Y[valid_mask], Z[valid_mask]
if len(X) < 100:
raise ValueError("Insufficient valid data points for PSD analysis")
# Create uniform grid over aperture
x_grid = np.linspace(-aperture_radius, aperture_radius, n_points)
y_grid = np.linspace(-aperture_radius, aperture_radius, n_points)
X_grid, Y_grid = np.meshgrid(x_grid, y_grid)
# Compute radial distance mask
R_grid = np.sqrt(X_grid**2 + Y_grid**2)
aperture_mask = R_grid <= aperture_radius
# Interpolate data to uniform grid
points = np.column_stack((X.flatten(), Y.flatten()))
values = Z.flatten()
if len(points) < 4:
raise ValueError("Insufficient valid points for interpolation")
interpolator = RegularGridInterpolator(
(np.linspace(X.min(), X.max(), int(np.sqrt(len(points)))).astype(float),
np.linspace(Y.min(), Y.max(), int(np.sqrt(len(points)))).astype(float)),
values,
method='linear',
bounds_error=False,
fill_value=np.nan
)
# Actually interpolate to uniform grid
# Since we have scattered data, use griddata approach instead
from scipy.interpolate import griddata
interpolated_values = griddata(points, values, (X_grid, Y_grid), method='linear')
# Set outside-aperture values to zero
interpolated_values[~aperture_mask] = 0.0
# Handle any remaining NaN values
interpolated_values = np.nan_to_num(interpolated_values, nan=0.0)
# Apply Hann window to reduce edge effects
hann_x = 0.5 * (1 - np.cos(2 * np.pi * np.arange(n_points) / (n_points - 1)))
hann_y = 0.5 * (1 - np.cos(2 * np.pi * np.arange(n_points) / (n_points - 1)))
hann_window = np.outer(hann_y, hann_x)
windowed_surface = interpolated_values * hann_window
# Compute 2D FFT
fft_complex = fft2(windowed_surface)
fft_shifted = fftshift(fft_complex)
# Compute PSD
psd_2d = np.abs(fft_shifted)**2
# Compute spatial frequency grid
freq_x = np.fft.fftfreq(n_points, d=2*aperture_radius/n_points)
freq_y = np.fft.fftfreq(n_points, d=2*aperture_radius/n_points)
freq_x_shifted = fftshift(freq_x)
freq_y_shifted = fftshift(freq_y)
FX, FY = np.meshgrid(freq_x_shifted, freq_y_shifted)
radial_freq = np.sqrt(FX**2 + FY**2)
# Convert absolute frequency to cycles per diameter
freqs_cycles_per_aperture = radial_freq * 2 * aperture_radius
# Radial averaging
max_freq = np.max(freqs_cycles_per_aperture)
freq_bins = np.logspace(
np.log10(0.01),
np.log10(np.maximum(max_freq, 100)),
100
)
psd_values = []
freqs_center = []
for i in range(len(freq_bins) - 1):
bin_mask = (freqs_cycles_per_aperture >= freq_bins[i]) & \
(freqs_cycles_per_aperture < freq_bins[i + 1])
if np.any(bin_mask) and np.any(aperture_mask[bin_mask]):
psd_bin_values = psd_2d[bin_mask] / (np.pi * (freq_bins[i+1]**2 - freq_bins[i]**2))
valid_psd = psd_bin_values[np.isfinite(psd_bin_values) & (psd_bin_values > 0)]
if len(valid_psd) > 0:
psd_avg = np.mean(valid_psd)
if psd_avg > 0:
psd_values.append(psd_avg)
freqs_center.append(np.sqrt(freq_bins[i] * freq_bins[i + 1]))
if len(freqs_center) == 0:
raise ValueError("No valid PSD data found")
return np.array(freqs_center), np.array(psd_values)
def create_psd_plot(
psd_data: Tuple[np.ndarray, np.ndarray],
title: str = "Power Spectral Density Analysis",
reference_psd: Optional[Tuple[np.ndarray, np.ndarray]] = None,
highlight_regions: bool = True
) -> _go.Figure:
"""Create an interactive PSD plot with log-log scaling."""
_load_dependencies()
frequencies, psd_values = psd_data
fig = _go.Figure()
# Main PSD trace
fig.add_trace(_go.Scatter(
x=frequencies,
y=psd_values,
mode='lines+markers',
name='Surface PSD',
line=dict(
color='#3b82f6',
width=2
),
marker=dict(
size=4,
color='#3b82f6'
),
hovertemplate="""
<b>Spatial Frequency:</b> %{x:.2f} cycles/aperture<br>
<b>PSD:</b> %{y:.2e} nm²·cycles²/aperture²<br>
<b>Feature Size:</b> %{customdata:.1f} mm
<extra></extra>
""",
customdata=(2.0 / frequencies), # Convert cycles/diameter to mm
showlegend=True
))
# Reference PSD if provided
if reference_psd is not None:
ref_freqs, ref_values = reference_psd
fig.add_trace(_go.Scatter(
x=ref_freqs,
y=ref_values,
mode='lines',
name='Gravity 20° Reference',
line=dict(
color='#64748b',
width=2,
dash='dash'
),
opacity=0.7,
showlegend=True
))
# Highlight key regions
if highlight_regions:
# Gravity signature region (0.1-2 cycles/aperture)
fig.add_vrect(
x0=0.1, x1=2.0,
fillcolor='rgba(59, 130, 246, 0.1)',
line=dict(width=0),
layer='below',
name='Gravity Signature',
label=dict(text="Gravity Signature", textposition="middle right",
font=dict(color='#3b82f6', size=12))
)
# Support structure region (2-20 cycles/aperture)
fig.add_vrect(
x0=2.0, x1=20.0,
fillcolor='rgba(245, 158, 11, 0.1)',
line=dict(width=0),
layer='below',
name='Support Structures',
label=dict(text="Support Print-through", textposition="middle right",
font=dict(color='#f59e0b', size=12))
)
# High frequency regime (>20 cycles/aperture)
fig.add_vrect(
x0=20.0, x1=100.0,
fillcolor='rgba(239, 68, 68, 0.1)',
line=dict(width=0),
layer='below',
name='High Frequency',
label=dict(text="Polishing Residuals", textposition="middle right",
font=dict(color='#ef4444', size=12))
)
# Layout configuration
fig.update_layout(
title=dict(
text=title,
font=dict(size=16, color='#1e293b'),
x=0.5
),
xaxis=dict(
type='log',
title=dict(text="Spatial Frequency [cycles/aperture]", font=dict(size=14)),
range=[np.log10(0.01), np.log10(100)],
gridcolor='rgba(100,116,139,0.2)',
linecolor='#e2e8f0',
linewidth=1
),
yaxis=dict(
type='log',
title=dict(text="Power Spectral Density [nm²·cycles²/aperture²]", font=dict(size=14)),
gridcolor='rgba(100,116,139,0.2)',
linecolor='#e2e8f0',
linewidth=1
),
width=1200,
height=700,
margin=dict(l=80, r=80, t=60, b=80),
paper_bgcolor='#ffffff',
plot_bgcolor='#f8fafc',
font=dict(color='#1e293b', size=12),
legend=dict(
orientation='h',
yanchor='top',
y=-0.15,
xanchor='center',
x=0.5
),
hovermode='x unified'
)
return fig
def create_psd_summary(
psd_data: Tuple[np.ndarray, np.ndarray],
elevation_angle: str = "40°"
) -> str:
"""Generate formatted summary text for PSD analysis."""
frequencies, psd_values = psd_data
# Define frequency bands
gravity_band = (0.1, 2.0) # cycles/aperture
support_band = (2.0, 20.0) # cycles/aperture
hf_band = (20.0, 100.0) # cycles/aperture
# Calculate integrated PSD values for each band
def integrate_band(low_freq, high_freq):
mask = (frequencies >= low_freq) & (frequencies <= high_freq)
if np.any(mask):
return np.trapz(psd_values[mask], frequencies[mask])
else:
return 0.0
gravity_rms = np.sqrt(integrate_band(*gravity_band))
support_rms = np.sqrt(integrate_band(*support_band))
hf_rms = np.sqrt(integrate_band(*hf_band))
total_rms = np.sqrt(np.trapz(psd_values, frequencies))
# Find peak PSD values
peak_freq = frequencies[np.argmax(psd_values)]
peak_amplitude = np.max(psd_values)
# Convert frequency to feature size
if peak_freq > 0:
feature_size = 2.0 / peak_freq # mm
else:
feature_size = np.inf
summary = f"""
**WFE Power Spectral Density Summary - {elevation_angle}**
**Gravity Loading Analysis:**
- Gravity Signature (0.1-2 cycles/aperture): {gravity_rms:.2f} nm RMS
- Peak frequency: {peak_freq:.2f} cycles/aperture
- Dominant feature size: {feature_size:.1f} mm
**Support Structure Analysis:**
- Support Print-through (2-20 cycles/aperture): {support_rms:.2f} nm RMS
- HF Polishing Residuals (20-100 cycles/aperture): {hf_rms:.2f} nm RMS
**Total Deformation:**
- Integrated WFE: {total_rms:.2f} nm RMS
- Gravity contribution: {100*gravity_rms/total_rms:.1f}%
- Support structure contribution: {100*support_rms/total_rms:.1f}%
**Technical Notes:**
- Frequency scaling: cycles per aperture diameter
- Reference normalization: None (absolute measurement)
- Spatial resolution: Limited by mesh density
"""
return summary