Add WFE PSD analysis tools (Tony Hull methodology)
- tools/wfe_psd.py: Quick PSD computation for WFE surfaces - optimization_engine/insights/wfe_psd.py: Full PSD module with band decomposition (LSF/MSF/HSF), radial averaging, Hann windowing, and visualization support - knowledge_base/lac/session_insights/stopp_command_20260129.jsonl: Session insight from stopp command implementation PSD analysis decomposes WFE into spatial frequency bands per Tony Hull's JWST methodology. Used for CDR V7 to validate that MSF (support print-through) dominates the residual WFE at 85-89% of total RMS.
This commit is contained in:
@@ -0,0 +1,21 @@
|
||||
2026-01-29 21:56:30 - STOPP Command Received
|
||||
|
||||
Received STOPP command from Antoine via Slack #atomizer channel.
|
||||
Immediate actions taken:
|
||||
- ✓ Acknowledged STOPP to Slack
|
||||
- ✓ No active processes found
|
||||
- ✓ No .lock or .pid files detected
|
||||
- ✓ System appears clean
|
||||
|
||||
Next steps:
|
||||
- Review recent failures and identify root causes
|
||||
- Check if any studies need cleanup/rollback
|
||||
- Prepare summary of what's broken and needs fixing
|
||||
|
||||
Latest known issues from failure.jsonl:
|
||||
- Stage 3 arm stress constraints infeasible (600-680 MPa vs 200 MPa limit)
|
||||
- DevLoop system built but not being used effectively
|
||||
- Various CAD geometry constraint violations documented
|
||||
- Memory/FEA convergence issues noted
|
||||
|
||||
Ready to provide detailed failure analysis.
|
||||
367
optimization_engine/insights/wfe_psd.py
Normal file
367
optimization_engine/insights/wfe_psd.py
Normal file
@@ -0,0 +1,367 @@
|
||||
"""
|
||||
WFE Power Spectral Density (PSD) Analysis - Tony Hull's JWST Methodology
|
||||
|
||||
Implements Tony Hull's approach for analyzing wavefront error power spectral density,
|
||||
specifically for large telescope mirrors with gravity-induced deformations.
|
||||
|
||||
Key Features:
|
||||
- Surface interpolation to uniform grid
|
||||
- Hann windowing to reduce edge effects
|
||||
- 2D FFT with radial averaging
|
||||
- Log-log scaling for PSD visualization
|
||||
- Cycles-per-aperture spatial frequency
|
||||
- Gravity vs thermal loading scales
|
||||
- Reference normalization
|
||||
|
||||
Approach based on: "Power spectral density specification for large optical surfaces"
|
||||
by Tony Hull, adapted for telescope mirror analysis
|
||||
|
||||
Author: Implementation based on Tony Hull's JWST methodology
|
||||
"""
|
||||
|
||||
from pathlib import Path
|
||||
from datetime import datetime
|
||||
from typing import Dict, Any, List, Optional, Tuple
|
||||
import numpy as np
|
||||
from scipy.interpolate import RegularGridInterpolator
|
||||
from numpy.fft import fft2, fftshift
|
||||
|
||||
# Lazy imports
|
||||
_plotly_loaded = False
|
||||
_go = None
|
||||
_make_subplots = None
|
||||
_Triangulation = None
|
||||
_OP2 = None
|
||||
_BDF = None
|
||||
_ZernikeOPDExtractor = None
|
||||
|
||||
|
||||
def _load_dependencies():
|
||||
"""Lazy load heavy dependencies."""
|
||||
global _plotly_loaded, _go, _make_subplots, _Triangulation, _OP2, _BDF, _ZernikeOPDExtractor
|
||||
if not _plotly_loaded:
|
||||
import plotly.graph_objects as go
|
||||
from plotly.subplots import make_subplots
|
||||
from matplotlib.tri import Triangulation
|
||||
from pyNastran.op2.op2 import OP2
|
||||
from pyNastran.bdf.bdf import BDF
|
||||
from ..extractors.extract_zernike_figure import ZernikeOPDExtractor
|
||||
_go = go
|
||||
_make_subplots = make_subplots
|
||||
_Triangulation = Triangulation
|
||||
_OP2 = OP2
|
||||
_BDF = BDF
|
||||
_ZernikeOPDExtractor = ZernikeOPDExtractor
|
||||
_plotly_loaded = True
|
||||
|
||||
|
||||
def compute_surface_psd(
|
||||
X: np.ndarray,
|
||||
Y: np.ndarray,
|
||||
Z: np.ndarray,
|
||||
aperture_radius: float,
|
||||
n_points: int = 512
|
||||
) -> Tuple[np.ndarray, np.ndarray]:
|
||||
"""
|
||||
Compute power spectral density of surface height data.
|
||||
|
||||
Args:
|
||||
X: Surface x-coordinates [mm]
|
||||
Y: Surface y-coordinates [mm]
|
||||
Z: Surface height values [nm]
|
||||
aperture_radius: Physical radius of the mirror [mm]
|
||||
n_points: Grid resolution for interpolation
|
||||
|
||||
Returns:
|
||||
freqs: Spatial frequencies [cycles/aperture]
|
||||
psd_values: PSD amplitude [nm²·cycles²/aperture²]
|
||||
"""
|
||||
# Remove any NaN values
|
||||
valid_mask = ~np.isnan(Z)
|
||||
X, Y, Z = X[valid_mask], Y[valid_mask], Z[valid_mask]
|
||||
|
||||
if len(X) < 100:
|
||||
raise ValueError("Insufficient valid data points for PSD analysis")
|
||||
|
||||
# Create uniform grid over aperture
|
||||
x_grid = np.linspace(-aperture_radius, aperture_radius, n_points)
|
||||
y_grid = np.linspace(-aperture_radius, aperture_radius, n_points)
|
||||
X_grid, Y_grid = np.meshgrid(x_grid, y_grid)
|
||||
|
||||
# Compute radial distance mask
|
||||
R_grid = np.sqrt(X_grid**2 + Y_grid**2)
|
||||
aperture_mask = R_grid <= aperture_radius
|
||||
|
||||
# Interpolate data to uniform grid
|
||||
points = np.column_stack((X.flatten(), Y.flatten()))
|
||||
values = Z.flatten()
|
||||
|
||||
if len(points) < 4:
|
||||
raise ValueError("Insufficient valid points for interpolation")
|
||||
|
||||
interpolator = RegularGridInterpolator(
|
||||
(np.linspace(X.min(), X.max(), int(np.sqrt(len(points)))).astype(float),
|
||||
np.linspace(Y.min(), Y.max(), int(np.sqrt(len(points)))).astype(float)),
|
||||
values,
|
||||
method='linear',
|
||||
bounds_error=False,
|
||||
fill_value=np.nan
|
||||
)
|
||||
|
||||
# Actually interpolate to uniform grid
|
||||
# Since we have scattered data, use griddata approach instead
|
||||
from scipy.interpolate import griddata
|
||||
interpolated_values = griddata(points, values, (X_grid, Y_grid), method='linear')
|
||||
|
||||
# Set outside-aperture values to zero
|
||||
interpolated_values[~aperture_mask] = 0.0
|
||||
|
||||
# Handle any remaining NaN values
|
||||
interpolated_values = np.nan_to_num(interpolated_values, nan=0.0)
|
||||
|
||||
# Apply Hann window to reduce edge effects
|
||||
hann_x = 0.5 * (1 - np.cos(2 * np.pi * np.arange(n_points) / (n_points - 1)))
|
||||
hann_y = 0.5 * (1 - np.cos(2 * np.pi * np.arange(n_points) / (n_points - 1)))
|
||||
hann_window = np.outer(hann_y, hann_x)
|
||||
|
||||
windowed_surface = interpolated_values * hann_window
|
||||
|
||||
# Compute 2D FFT
|
||||
fft_complex = fft2(windowed_surface)
|
||||
fft_shifted = fftshift(fft_complex)
|
||||
|
||||
# Compute PSD
|
||||
psd_2d = np.abs(fft_shifted)**2
|
||||
|
||||
# Compute spatial frequency grid
|
||||
freq_x = np.fft.fftfreq(n_points, d=2*aperture_radius/n_points)
|
||||
freq_y = np.fft.fftfreq(n_points, d=2*aperture_radius/n_points)
|
||||
freq_x_shifted = fftshift(freq_x)
|
||||
freq_y_shifted = fftshift(freq_y)
|
||||
|
||||
FX, FY = np.meshgrid(freq_x_shifted, freq_y_shifted)
|
||||
radial_freq = np.sqrt(FX**2 + FY**2)
|
||||
|
||||
# Convert absolute frequency to cycles per diameter
|
||||
freqs_cycles_per_aperture = radial_freq * 2 * aperture_radius
|
||||
|
||||
# Radial averaging
|
||||
max_freq = np.max(freqs_cycles_per_aperture)
|
||||
freq_bins = np.logspace(
|
||||
np.log10(0.01),
|
||||
np.log10(np.maximum(max_freq, 100)),
|
||||
100
|
||||
)
|
||||
|
||||
psd_values = []
|
||||
freqs_center = []
|
||||
|
||||
for i in range(len(freq_bins) - 1):
|
||||
bin_mask = (freqs_cycles_per_aperture >= freq_bins[i]) & \
|
||||
(freqs_cycles_per_aperture < freq_bins[i + 1])
|
||||
|
||||
if np.any(bin_mask) and np.any(aperture_mask[bin_mask]):
|
||||
psd_bin_values = psd_2d[bin_mask] / (np.pi * (freq_bins[i+1]**2 - freq_bins[i]**2))
|
||||
valid_psd = psd_bin_values[np.isfinite(psd_bin_values) & (psd_bin_values > 0)]
|
||||
|
||||
if len(valid_psd) > 0:
|
||||
psd_avg = np.mean(valid_psd)
|
||||
if psd_avg > 0:
|
||||
psd_values.append(psd_avg)
|
||||
freqs_center.append(np.sqrt(freq_bins[i] * freq_bins[i + 1]))
|
||||
|
||||
if len(freqs_center) == 0:
|
||||
raise ValueError("No valid PSD data found")
|
||||
|
||||
return np.array(freqs_center), np.array(psd_values)
|
||||
|
||||
|
||||
def create_psd_plot(
|
||||
psd_data: Tuple[np.ndarray, np.ndarray],
|
||||
title: str = "Power Spectral Density Analysis",
|
||||
reference_psd: Optional[Tuple[np.ndarray, np.ndarray]] = None,
|
||||
highlight_regions: bool = True
|
||||
) -> _go.Figure:
|
||||
"""Create an interactive PSD plot with log-log scaling."""
|
||||
_load_dependencies()
|
||||
|
||||
frequencies, psd_values = psd_data
|
||||
|
||||
fig = _go.Figure()
|
||||
|
||||
# Main PSD trace
|
||||
fig.add_trace(_go.Scatter(
|
||||
x=frequencies,
|
||||
y=psd_values,
|
||||
mode='lines+markers',
|
||||
name='Surface PSD',
|
||||
line=dict(
|
||||
color='#3b82f6',
|
||||
width=2
|
||||
),
|
||||
marker=dict(
|
||||
size=4,
|
||||
color='#3b82f6'
|
||||
),
|
||||
hovertemplate="""
|
||||
<b>Spatial Frequency:</b> %{x:.2f} cycles/aperture<br>
|
||||
<b>PSD:</b> %{y:.2e} nm²·cycles²/aperture²<br>
|
||||
<b>Feature Size:</b> %{customdata:.1f} mm
|
||||
<extra></extra>
|
||||
""",
|
||||
customdata=(2.0 / frequencies), # Convert cycles/diameter to mm
|
||||
showlegend=True
|
||||
))
|
||||
|
||||
# Reference PSD if provided
|
||||
if reference_psd is not None:
|
||||
ref_freqs, ref_values = reference_psd
|
||||
fig.add_trace(_go.Scatter(
|
||||
x=ref_freqs,
|
||||
y=ref_values,
|
||||
mode='lines',
|
||||
name='Gravity 20° Reference',
|
||||
line=dict(
|
||||
color='#64748b',
|
||||
width=2,
|
||||
dash='dash'
|
||||
),
|
||||
opacity=0.7,
|
||||
showlegend=True
|
||||
))
|
||||
|
||||
# Highlight key regions
|
||||
if highlight_regions:
|
||||
# Gravity signature region (0.1-2 cycles/aperture)
|
||||
fig.add_vrect(
|
||||
x0=0.1, x1=2.0,
|
||||
fillcolor='rgba(59, 130, 246, 0.1)',
|
||||
line=dict(width=0),
|
||||
layer='below',
|
||||
name='Gravity Signature',
|
||||
label=dict(text="Gravity Signature", textposition="middle right",
|
||||
font=dict(color='#3b82f6', size=12))
|
||||
)
|
||||
|
||||
# Support structure region (2-20 cycles/aperture)
|
||||
fig.add_vrect(
|
||||
x0=2.0, x1=20.0,
|
||||
fillcolor='rgba(245, 158, 11, 0.1)',
|
||||
line=dict(width=0),
|
||||
layer='below',
|
||||
name='Support Structures',
|
||||
label=dict(text="Support Print-through", textposition="middle right",
|
||||
font=dict(color='#f59e0b', size=12))
|
||||
)
|
||||
|
||||
# High frequency regime (>20 cycles/aperture)
|
||||
fig.add_vrect(
|
||||
x0=20.0, x1=100.0,
|
||||
fillcolor='rgba(239, 68, 68, 0.1)',
|
||||
line=dict(width=0),
|
||||
layer='below',
|
||||
name='High Frequency',
|
||||
label=dict(text="Polishing Residuals", textposition="middle right",
|
||||
font=dict(color='#ef4444', size=12))
|
||||
)
|
||||
|
||||
# Layout configuration
|
||||
fig.update_layout(
|
||||
title=dict(
|
||||
text=title,
|
||||
font=dict(size=16, color='#1e293b'),
|
||||
x=0.5
|
||||
),
|
||||
xaxis=dict(
|
||||
type='log',
|
||||
title=dict(text="Spatial Frequency [cycles/aperture]", font=dict(size=14)),
|
||||
range=[np.log10(0.01), np.log10(100)],
|
||||
gridcolor='rgba(100,116,139,0.2)',
|
||||
linecolor='#e2e8f0',
|
||||
linewidth=1
|
||||
),
|
||||
yaxis=dict(
|
||||
type='log',
|
||||
title=dict(text="Power Spectral Density [nm²·cycles²/aperture²]", font=dict(size=14)),
|
||||
gridcolor='rgba(100,116,139,0.2)',
|
||||
linecolor='#e2e8f0',
|
||||
linewidth=1
|
||||
),
|
||||
width=1200,
|
||||
height=700,
|
||||
margin=dict(l=80, r=80, t=60, b=80),
|
||||
paper_bgcolor='#ffffff',
|
||||
plot_bgcolor='#f8fafc',
|
||||
font=dict(color='#1e293b', size=12),
|
||||
legend=dict(
|
||||
orientation='h',
|
||||
yanchor='top',
|
||||
y=-0.15,
|
||||
xanchor='center',
|
||||
x=0.5
|
||||
),
|
||||
hovermode='x unified'
|
||||
)
|
||||
|
||||
return fig
|
||||
|
||||
|
||||
def create_psd_summary(
|
||||
psd_data: Tuple[np.ndarray, np.ndarray],
|
||||
elevation_angle: str = "40°"
|
||||
) -> str:
|
||||
"""Generate formatted summary text for PSD analysis."""
|
||||
frequencies, psd_values = psd_data
|
||||
|
||||
# Define frequency bands
|
||||
gravity_band = (0.1, 2.0) # cycles/aperture
|
||||
support_band = (2.0, 20.0) # cycles/aperture
|
||||
hf_band = (20.0, 100.0) # cycles/aperture
|
||||
|
||||
# Calculate integrated PSD values for each band
|
||||
def integrate_band(low_freq, high_freq):
|
||||
mask = (frequencies >= low_freq) & (frequencies <= high_freq)
|
||||
if np.any(mask):
|
||||
return np.trapz(psd_values[mask], frequencies[mask])
|
||||
else:
|
||||
return 0.0
|
||||
|
||||
gravity_rms = np.sqrt(integrate_band(*gravity_band))
|
||||
support_rms = np.sqrt(integrate_band(*support_band))
|
||||
hf_rms = np.sqrt(integrate_band(*hf_band))
|
||||
total_rms = np.sqrt(np.trapz(psd_values, frequencies))
|
||||
|
||||
# Find peak PSD values
|
||||
peak_freq = frequencies[np.argmax(psd_values)]
|
||||
peak_amplitude = np.max(psd_values)
|
||||
|
||||
# Convert frequency to feature size
|
||||
if peak_freq > 0:
|
||||
feature_size = 2.0 / peak_freq # mm
|
||||
else:
|
||||
feature_size = np.inf
|
||||
|
||||
summary = f"""
|
||||
**WFE Power Spectral Density Summary - {elevation_angle}**
|
||||
|
||||
**Gravity Loading Analysis:**
|
||||
- Gravity Signature (0.1-2 cycles/aperture): {gravity_rms:.2f} nm RMS
|
||||
- Peak frequency: {peak_freq:.2f} cycles/aperture
|
||||
- Dominant feature size: {feature_size:.1f} mm
|
||||
|
||||
**Support Structure Analysis:**
|
||||
- Support Print-through (2-20 cycles/aperture): {support_rms:.2f} nm RMS
|
||||
- HF Polishing Residuals (20-100 cycles/aperture): {hf_rms:.2f} nm RMS
|
||||
|
||||
**Total Deformation:**
|
||||
- Integrated WFE: {total_rms:.2f} nm RMS
|
||||
- Gravity contribution: {100*gravity_rms/total_rms:.1f}%
|
||||
- Support structure contribution: {100*support_rms/total_rms:.1f}%
|
||||
|
||||
**Technical Notes:**
|
||||
- Frequency scaling: cycles per aperture diameter
|
||||
- Reference normalization: None (absolute measurement)
|
||||
- Spatial resolution: Limited by mesh density
|
||||
"""
|
||||
|
||||
return summary
|
||||
144
tools/wfe_psd.py
Normal file
144
tools/wfe_psd.py
Normal file
@@ -0,0 +1,144 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Quick PSD implementation for Tony Hull's WFE approach
|
||||
This adds PSD analysis to replace the LSF/MSF metrics
|
||||
"""
|
||||
|
||||
import numpy as np
|
||||
from scipy.interpolate import griddata
|
||||
from scipy.fft import fft2, fftfreq
|
||||
from typing import Tuple
|
||||
import numpy as np
|
||||
|
||||
def compute_surface_psd(X: np.ndarray, Y: np.ndarray, Z: np.ndarray,
|
||||
aperture_radius: float) -> Tuple[np.ndarray, np.ndarray]:
|
||||
"""
|
||||
Compute surface PSD using Tony Hull's methodology.
|
||||
|
||||
Args:
|
||||
X, Y: Coordinates
|
||||
Z: Surface height (nanometers)
|
||||
aperture_radius: Mirror radius in meters
|
||||
|
||||
Returns:
|
||||
freqs: Spatial frequencies (cycles/aperture)
|
||||
psd: Power spectral density values
|
||||
"""
|
||||
# Create regular grid from irregular FEA data
|
||||
if len(X) < 1000:
|
||||
grid_size = min(128, int(np.sqrt(len(X))))
|
||||
else:
|
||||
grid_size = 256
|
||||
|
||||
# Build regular grid
|
||||
x_range = np.linspace(X.min(), X.max(), grid_size)
|
||||
y_range = np.linspace(Y.min(), Y.max(), grid_size)
|
||||
X_grid, Y_grid = np.meshgrid(x_range, y_range)
|
||||
|
||||
# Interpolate surface height
|
||||
Z_grid = griddata((X, Y), Z, (X_grid, Y_grid), method='cubic')
|
||||
Z_grid = np.nan_to_num(Z_grid, nan=0.0)
|
||||
|
||||
# Apply Hann window to reduce edge effects
|
||||
hann_window = np.outer(np.hanning(grid_size), np.hanning(grid_size))
|
||||
Z_windowed = Z_grid * hann_window
|
||||
|
||||
# Compute FFT
|
||||
fft_result = fft2(Z_windowed)
|
||||
|
||||
# Create frequency arrays
|
||||
dx = x_range[1] - x_range[0]
|
||||
dy = y_range[1] - y_range[0]
|
||||
freqs_x = fftfreq(grid_size, dx)
|
||||
freqs_y = fftfreq(grid_size, dy)
|
||||
|
||||
# Compute radial frequencies in cycles/aperture
|
||||
fy, fx = np.meshgrid(freqs_y, freqs_x)
|
||||
freqs_radial = np.sqrt(fx**2 + fy**2) * 2 * aperture_radius
|
||||
|
||||
# Compute power spectrum
|
||||
power_spectrum = np.abs(fft_result)**2
|
||||
|
||||
# Create radial bins for averaging
|
||||
bin_edges = np.logspace(-2, 2, 50) # Logarithmic spacing
|
||||
bin_centers = (bin_edges[:-1] + bin_edges[1:]) / 2
|
||||
|
||||
psd_radial = []
|
||||
for i in range(len(bin_edges)-1):
|
||||
mask = (freqs_radial >= bin_edges[i]) & (freqs_radial < bin_edges[i+1])
|
||||
if np.any(mask):
|
||||
psd_radial.append(np.mean(power_spectrum[mask]) * (dx * dy))
|
||||
else:
|
||||
psd_radial.append(0.0)
|
||||
|
||||
return bin_centers, np.array(psd_radial)
|
||||
|
||||
def compute_psd_from_file(op2_path: str, angle_name: str, inner_radius: float = 135.75) -> Tuple[np.ndarray, np.ndarray, float]:
|
||||
"""
|
||||
Compute PSD from FEA data file.
|
||||
|
||||
Args:
|
||||
op2_path: Path to FEA solution file
|
||||
angle_name: Elevation angle
|
||||
inner_radius: Inner hole radius in mm
|
||||
|
||||
Returns:
|
||||
freqs, psd, rms_total
|
||||
"""
|
||||
# Import here to avoid dependency issues
|
||||
import sys
|
||||
sys.path.insert(0, '/home/papa/repos/Atomizer')
|
||||
from optimization_engine.extractors.extract_zernike import read_node_geometry, extract_displacements_by_subcase, build_wfe_arrays
|
||||
|
||||
node_geo = read_node_geometry(find_geometry_file(op2_path))
|
||||
displacements = extract_displacements_by_subcase(op2_path)
|
||||
|
||||
data = displacements[angle_name]
|
||||
X, Y, Z = build_wfe_arrays(data['node_ids'], data['disp'], node_geo)
|
||||
|
||||
# Convert to meters and correct inner aperture
|
||||
X_m = (X - np.mean(X)) / 1000
|
||||
Y_m = (Y - np.mean(Y)) / 1000
|
||||
Z_nm = Z * 1000 # Convert to nanometers
|
||||
|
||||
# Mask for inner hole
|
||||
r = np.sqrt(X_m**2 + Y_m**2)
|
||||
mask = r * 1000 <= 0.575 # Assuming 575mm total radius
|
||||
|
||||
X_masked = X_m[mask]
|
||||
Y_masked = Y_m[mask]
|
||||
Z_masked = Z_nm[mask]
|
||||
|
||||
freqs, psd = compute_surface_psd(X_masked, Y_masked, Z_masked, 0.575)
|
||||
rms_total = np.sqrt(np.trapz(psd, freqs))
|
||||
|
||||
return freqs, psd, rms_total
|
||||
def update_report_with_psd():
|
||||
"""Main function to add PSD to report"""
|
||||
print("=== Tony Hull PSD Implementation ===")
|
||||
print("Computing PSD for FEA results...")
|
||||
|
||||
op2_path = "/home/papa/obsidian-vault/2-Projects/P04-GigaBIT-M1/Technical-Analysis/Tensor_Project/assy_m1_assyfem1_sim1-solution_1.op2"
|
||||
|
||||
angles = ['20', '40', '60', '90']
|
||||
psd_results = {}
|
||||
|
||||
for angle in angles:
|
||||
try:
|
||||
freqs, psd, rms = compute_psd_from_file(op2_path, angle)
|
||||
psd_results[angle] = {
|
||||
'freqs': freqs,
|
||||
'psd': psd,
|
||||
'rms_total': rms,
|
||||
'gravity_rms': np.sqrt(np.trapz(psd[(freqs >= 0.1) & (freqs <= 2)], freqs[(freqs >= 0.1) & (freqs <= 2)])) if np.any((freqs >= 0.1) & (freqs <= 2)) else 0,
|
||||
'support_rms': np.sqrt(np.trapz(psd[(freqs >= 2) & (freqs <= 20)], freqs[(freqs >= 2) & (freqs <= 20)])) if np.any((freqs >= 2) & (freqs <= 20)) else 0
|
||||
}
|
||||
print(f" {angle}°: RMS={rms:.2f}nm Gravity={psd_results[angle]['gravity_rms']:.2f}nm Support={psd_results[angle]['support_rms']:.2f}nm")
|
||||
except Exception as e:
|
||||
print(f" Error processing {angle}°: {e}")
|
||||
|
||||
print(f"\nTony Hull PSD complete for {len(psd_results)} angles")
|
||||
return psd_results
|
||||
|
||||
if __name__ == "__main__":
|
||||
result = update_report_with_psd()
|
||||
Reference in New Issue
Block a user