Files
Atomizer/projects/isogrid-dev-plate/studies/01_v1_tpe/run_optimization.py

642 lines
26 KiB
Python
Raw Normal View History

2026-02-19 08:00:15 +00:00
"""
Isogrid Dev Plate Mass Minimization Study 01 (TPE v1)
========================================================
Objective: Minimize total plate mass
Constraint: max von Mises stress σ_allow = 100.6 MPa (SF = 5)
No displacement constraint confirmed 2026-02-18
Material: AL7075-T6 (ρ = 2810 kg/, σ_yield = 503 MPa)
8 design variables (see PARAM_SPACE in optimization_engine/isogrid/study.py):
η₀, α, β, γ_stress, R₀, R_edge, s_min, s_max
Pipeline per trial:
1. Python Brain: params rib profiles for sandbox_1 and sandbox_2
2. NX journal: import_profile.py update sketch in-place
3. NX journal: solve_simulation.py remesh + solve + write mass JSON
4. Extract: mass from _temp_part_properties.json (written by solve journal)
5. Extract: max von Mises stress from OP2
6. Objective: mass_kg + stress_penalty
Model files (working copies in 1_setup/model/):
1_setup/model/ACS_Stack_Main_Plate_Iso_Project.prt
1_setup/model/ACS_Stack_Main_Plate_Iso_project_fem2_i.prt CRITICAL: must exist!
1_setup/model/ACS_Stack_Main_Plate_Iso_project_fem2.fem
1_setup/model/ACS_Stack_Main_Plate_Iso_project_sim2.sim
1_setup/model/adaptive_isogrid_data/geometry_sandbox_1.json
1_setup/model/adaptive_isogrid_data/geometry_sandbox_2.json
"""
from __future__ import annotations
import json
import re
import shutil
import subprocess
import sys
import time
from datetime import datetime
from pathlib import Path
import optuna
# ─── Project root + study directory on path ──────────────────────────────────
PROJECT_ROOT = Path(__file__).resolve().parents[4] # .../Atomizer
STUDY_DIR_EARLY = Path(__file__).resolve().parent # studies/01_v1_tpe/
sys.path.insert(0, str(PROJECT_ROOT))
sys.path.insert(0, str(STUDY_DIR_EARLY)) # makes plot_trial / trial_retention importable
# ─── Python Brain imports ─────────────────────────────────────────────────────
from optimization_engine.isogrid import (
generate_triangulation,
generate_pockets,
assemble_profile,
profile_to_json,
validate_profile,
normalize_geometry_schema,
)
from optimization_engine.isogrid.study import PARAM_SPACE, MANUFACTURING_CONSTRAINTS, MATH_CONSTANTS, MATERIAL
# ─── Extractor imports ────────────────────────────────────────────────────────
from optimization_engine.extractors.extract_part_mass_material import extract_part_mass_material
from optimization_engine.extractors.extract_mass_from_expression import extract_mass_from_expression
from optimization_engine.extractors.extract_von_mises_stress import extract_solid_stress
# ─── NX solver ───────────────────────────────────────────────────────────────
from optimization_engine.nx.solver import NXSolver
# ─── Local study utilities ───────────────────────────────────────────────────
from plot_trial import plot_trial_figures, plot_stress_figures
from trial_retention import TrialRetentionManager
from extract_sandbox_stress import extract_sandbox_stress_field
# =============================================================================
# Constants
# =============================================================================
STUDY_DIR = Path(__file__).parent
def _pick_model_dir(study_dir: Path) -> Path:
"""Pick the model directory that actually has the required NX files."""
candidates = [
study_dir / "model",
study_dir / "1_setup" / "model",
]
required = [
"ACS_Stack_Main_Plate_Iso_project_sim2.sim",
"ACS_Stack_Main_Plate_Iso_project_fem2_i.prt",
]
for cand in candidates:
if cand.exists() and all((cand / name).exists() for name in required):
return cand
# fallback to legacy default (keeps preflight behavior explicit)
return study_dir / "1_setup" / "model"
def _pick_results_dir(study_dir: Path) -> Path:
"""Prefer modern 3_results, but stay compatible with legacy results/."""
modern = study_dir / "3_results"
legacy = study_dir / "results"
if modern.exists() or not legacy.exists():
return modern
return legacy
MODEL_DIR = _pick_model_dir(STUDY_DIR)
DATA_DIR = MODEL_DIR / "adaptive_isogrid_data"
RESULTS_DIR = _pick_results_dir(STUDY_DIR)
ITER_DIR = STUDY_DIR / "2_iterations"
# NX model files
SIM_FILE = MODEL_DIR / "ACS_Stack_Main_Plate_Iso_project_sim2.sim"
PRT_I_FILE = MODEL_DIR / "ACS_Stack_Main_Plate_Iso_project_fem2_i.prt"
FEM_FILE = MODEL_DIR / "ACS_Stack_Main_Plate_Iso_project_fem2.fem"
# NX import journal
IMPORT_JOURNAL = PROJECT_ROOT / "tools" / "adaptive-isogrid" / "src" / "nx" / "import_profile.py"
# NX runner — DesigncenterNX2512 (production install)
NX_VERSION = "2512"
# Material: AL7075-T6
SIGMA_ALLOW = MATERIAL["sigma_allow_MPa"] # 100.6 MPa
SIGMA_YIELD = MATERIAL["sigma_yield_MPa"] # 503.0 MPa
# Optuna
N_TRIALS = 200
STUDY_NAME = "isogrid_01_v1_tpe"
DB_PATH = RESULTS_DIR / "study.db"
# =============================================================================
# Parameter helpers
# =============================================================================
def build_full_params(trial_params: dict) -> dict:
"""Merge sampled vars with fixed manufacturing constraints and math constants."""
params = dict(trial_params)
for name, cfg in MANUFACTURING_CONSTRAINTS.items():
params[name] = cfg["value"]
for name, cfg in MATH_CONSTANTS.items():
params[name] = cfg["value"]
return params
# =============================================================================
# NX journal runner
# =============================================================================
def find_run_journal_exe() -> Path:
"""Locate run_journal.exe — DesigncenterNX only (production install)."""
candidates = [
Path(f"C:/Program Files/Siemens/DesigncenterNX{NX_VERSION}/NXBIN/run_journal.exe"),
Path(f"C:/Program Files/Siemens/Simcenter3D_{NX_VERSION}/NXBIN/run_journal.exe"),
]
for p in candidates:
if p.exists():
return p
raise FileNotFoundError(
f"run_journal.exe not found. Checked: {[str(p) for p in candidates]}"
)
def run_nx_journal(journal_path: Path, model_dir: Path, timeout: int = 300) -> bool:
"""
Run an NX journal via run_journal.exe.
The journal is executed with model_dir as the working directory,
so NX will open files relative to that directory.
Returns True on success, False on failure.
"""
run_journal = find_run_journal_exe()
cmd = [
str(run_journal),
str(journal_path),
]
print(f" [NX] Running journal: {journal_path.name}")
t0 = time.time()
try:
result = subprocess.run(
cmd,
cwd=str(model_dir),
capture_output=True,
text=True,
timeout=timeout,
)
elapsed = time.time() - t0
if result.returncode != 0:
print(f" [NX] FAILED (exit {result.returncode}) in {elapsed:.1f}s")
if result.stderr:
print(f" [NX] stderr: {result.stderr[:500]}")
return False
print(f" [NX] OK in {elapsed:.1f}s")
return True
except subprocess.TimeoutExpired:
print(f" [NX] TIMEOUT after {timeout}s")
return False
except Exception as exc:
print(f" [NX] ERROR: {exc}")
return False
def _extract_mass_robust(solve_result: dict, model_dir: Path, prt_i_file: Path) -> float:
"""
Robust mass extraction 3-step fallback chain.
1. _temp_part_properties.json (full JSON from solve_simulation journal preferred)
2. _temp_mass.txt (lightweight expression dump fallback)
3. journal stdout (parse [JOURNAL] Mass ... = N lines last resort)
Temp files are cleared before each NX run (see step 4 in objective), so any
file that exists here is guaranteed to be from the current trial's solve.
"""
props_file = model_dir / "_temp_part_properties.json"
mass_file = model_dir / "_temp_mass.txt"
# 1) Full JSON written by NXOpen MeasureManager in solve_simulation journal
if props_file.exists() and prt_i_file.exists():
try:
result = extract_part_mass_material(prt_i_file, properties_file=props_file)
return float(result["mass_kg"])
except Exception as e:
print(f" [Mass] Fallback 1 failed ({e}), trying _temp_mass.txt …")
# 2) Lightweight mass dump — expression p173 written by journal
if mass_file.exists() and prt_i_file.exists():
try:
return float(extract_mass_from_expression(prt_i_file, expression_name="p173"))
except Exception as e:
print(f" [Mass] Fallback 2 failed ({e}), trying stdout parse …")
# 3) Parse journal stdout for any [JOURNAL] mass line
stdout = solve_result.get("stdout", "") or ""
m = re.search(
r"\[JOURNAL\]\s+(?:Mass extracted|MeasureManager mass|Mass expression p173)\s*=\s*([0-9.eE+-]+)",
stdout,
)
if m:
return float(m.group(1))
raise FileNotFoundError(
"Mass extraction failed: all 3 fallbacks exhausted "
"(missing _temp_part_properties.json, _temp_mass.txt, and no mass in journal stdout)"
)
# =============================================================================
# Trial numbering (filesystem-based — no separate DB needed)
# =============================================================================
def _next_trial_number(iter_dir: Path) -> int:
"""Next trial number — max of existing trial_NNNN folders + 1 (1-based)."""
max_n = 0
for p in iter_dir.glob("trial_????"):
try:
max_n = max(max_n, int(p.name.split("_")[1]))
except (IndexError, ValueError):
pass
return max_n + 1
# =============================================================================
# Objective function
# =============================================================================
def make_objective(rm: TrialRetentionManager):
"""Return the Optuna objective closure, capturing the RetentionManager."""
def objective(trial: optuna.Trial) -> float:
"""
Optuna objective: minimize mass + stress penalty.
Returns float (the combined objective). Infeasible or failed trials
return a large penalty to steer the sampler away.
"""
optuna_num = trial.number
print(f"\n--- Trial {optuna_num} ---")
# ── 1. Sample parameters ──────────────────────────────────────────────
sampled = {}
for name, cfg in PARAM_SPACE.items():
sampled[name] = trial.suggest_float(name, cfg["low"], cfg["high"])
params = build_full_params(sampled)
print(f" η₀={params['eta_0']:.3f} α={params['alpha']:.3f} β={params['beta']:.3f} "
f"γ_s={params['gamma_stress']:.3f} R₀={params['R_0']:.1f} "
f"R_e={params['R_edge']:.1f} s_min={params['s_min']:.1f} s_max={params['s_max']:.1f}")
# ── 2. Reserve trial folder (filesystem-based numbering) ──────────────
trial_number = _next_trial_number(ITER_DIR)
trial_dir = ITER_DIR / f"trial_{trial_number:04d}"
trial_dir.mkdir(parents=True, exist_ok=True)
# Write params immediately (before NX, so folder exists even on failure)
(trial_dir / "params.json").write_text(json.dumps(sampled, indent=2))
# ── 3. Python Brain: generate rib profiles ───────────────────────────
n_pockets_total = 0
sb_data: list[dict] = [] # accumulated for plotting
for sb_id in ["sandbox_1", "sandbox_2"]:
geom_path = DATA_DIR / f"geometry_{sb_id}.json"
if not geom_path.exists():
print(f" [Brain] MISSING: {geom_path.name} — skipping sandbox")
continue
with open(geom_path) as f:
geometry = normalize_geometry_schema(json.load(f))
try:
triangulation = generate_triangulation(geometry, params)
pockets = generate_pockets(triangulation, geometry, params)
ribbed_plate = assemble_profile(geometry, pockets, params)
is_valid, checks = validate_profile(ribbed_plate, params)
n_pockets = len(pockets)
n_pockets_total += n_pockets
print(f" [Brain] {sb_id}: {n_pockets} pockets "
f"valid={is_valid} "
f"mass_est≈{checks.get('mass_estimate_g', 0):.0f}g")
profile_data = profile_to_json(ribbed_plate, pockets, geometry, params)
profile_path = DATA_DIR / f"rib_profile_{sb_id}.json"
with open(profile_path, "w") as f:
json.dump(profile_data, f, indent=2)
# Copy rib profile to trial folder for reproducibility
shutil.copy2(profile_path, trial_dir / f"rib_profile_{sb_id}.json")
# Accumulate for plotting
sb_data.append({
"sandbox_id": sb_id,
"geometry": geometry,
"params": params,
"triangulation": triangulation,
"pockets": pockets,
"ribbed_plate": ribbed_plate,
})
except Exception as exc:
print(f" [Brain] ERROR on {sb_id}: {exc}")
trial.set_user_attr("error", f"Brain:{exc}")
return 1e6
print(f" [Brain] Total pockets: {n_pockets_total}")
2026-02-20 08:00:17 +00:00
# ── 3b. Degenerate check — prune near-solid trials early ─────────────
# If fewer than N_MIN_POCKETS are generated, the design is essentially a
# solid plate (likely all triangles rejected by area/fillet filter).
# Return a large penalty immediately — no point burning an NX license.
N_MIN_POCKETS = 8
if n_pockets_total < N_MIN_POCKETS:
print(f" [Brain] Degenerate trial: only {n_pockets_total} pockets "
f"(min={N_MIN_POCKETS}) — pruning before NX")
trial.set_user_attr("error", f"degenerate:{n_pockets_total}_pockets")
return 1e6
# ── 3d. Save per-trial figures (density, mesh, rib pattern) ──────────
2026-02-19 08:00:15 +00:00
t_fig = time.time()
n_figs = len(plot_trial_figures(sb_data, trial_dir))
print(f" [Plot] {n_figs} figures → trial_{trial_number:04d}/ ({time.time()-t_fig:.1f}s)")
# ── 4. Clear stale mass temp files, then import rib profiles ─────────
# Delete temp files from any previous trial so we KNOW the ones written
# after this solve are fresh — prevents silent stale-read across trials.
for _tmp in ("_temp_part_properties.json", "_temp_mass.txt"):
_p = MODEL_DIR / _tmp
try:
_p.unlink(missing_ok=True)
except Exception:
pass
ok = run_nx_journal(IMPORT_JOURNAL, MODEL_DIR, timeout=120)
if not ok:
trial.set_user_attr("error", "NX import journal failed")
return 1e6
# ── 5. NX: remesh + solve + extract mass ─────────────────────────────
solver = NXSolver(nastran_version=NX_VERSION, use_journal=True, study_name=STUDY_NAME)
try:
solve_result = solver.run_simulation(SIM_FILE)
except Exception as exc:
print(f" [NX] Solve ERROR: {exc}")
trial.set_user_attr("error", f"Solve:{exc}")
return 1e6
if not solve_result.get("success"):
errors = solve_result.get("errors", [])
print(f" [NX] Solve FAILED: {errors[:2]}")
trial.set_user_attr("error", f"SolveFailed:{errors[:1]}")
return 1e6
op2_file = solve_result.get("op2_file")
if not op2_file or not Path(op2_file).exists():
print(" [NX] OP2 not found after solve")
trial.set_user_attr("error", "OP2 missing")
return 1e6
# ── 5b. Archive model + solver outputs to trial folder (heavy — subject to retention)
# NX model copies (.prt, .fem, .sim, .afm/.afem) + Nastran results (.op2, .f06, .dat, .log)
_HEAVY_SUFFIXES = (".prt", ".fem", ".sim", ".afm", ".afem", ".op2", ".f06", ".dat", ".log")
for suffix in _HEAVY_SUFFIXES:
for src in MODEL_DIR.glob(f"*{suffix}"):
try:
shutil.copy2(src, trial_dir / src.name)
except Exception:
pass
# ── 6. Extract mass (robust fallback chain) ─────────────────────────
try:
mass_kg = _extract_mass_robust(solve_result, MODEL_DIR, PRT_I_FILE)
print(f" [Extract] Mass: {mass_kg:.4f} kg ({mass_kg * 1000:.1f} g)")
except Exception as exc:
print(f" [Extract] Mass ERROR: {exc}")
trial.set_user_attr("error", f"Mass:{exc}")
return 1e6
# ── 7. Extract max von Mises stress ──────────────────────────────────
try:
stress_result = extract_solid_stress(op2_file, subcase=1)
max_stress = stress_result["max_von_mises"] # MPa (auto-converted by extractor)
print(f" [Extract] Max stress: {max_stress:.2f} MPa "
f"(allow={SIGMA_ALLOW:.1f} SF={SIGMA_YIELD/max(max_stress, 0.001):.2f})")
except Exception as exc:
print(f" [Extract] Stress ERROR: {exc}")
trial.set_user_attr("error", f"Stress:{exc}")
return 1e6
# ── 7b. Extract per-sandbox spatial stress field → stress heatmap PNG ──
# FEM from trial folder (trial copy — mesh matches this trial's solve)
fem_copy = trial_dir / FEM_FILE.name
fem_for_stress = fem_copy if fem_copy.exists() else FEM_FILE
stress_fields: dict = {}
for sbd in sb_data:
sb_id = sbd["sandbox_id"]
try:
stress_fields[sb_id] = extract_sandbox_stress_field(
op2_file=Path(op2_file),
fem_file=fem_for_stress,
sandbox_geometry=sbd["geometry"],
subcase=1,
)
except Exception as exc:
print(f" [StressField] {sb_id} failed: {exc}")
stress_fields[sb_id] = {"nodes_xy": [], "stress_values": [], "n_elements": 0}
t_sfig = time.time()
n_sfigs = len(plot_stress_figures(sb_data, stress_fields, trial_dir, sigma_allow=SIGMA_ALLOW))
if n_sfigs:
print(f" [Plot] {n_sfigs} stress figures → trial_{trial_number:04d}/ "
f"({time.time()-t_sfig:.1f}s)")
# ── 8. Compute objective (stress-only constraint) ─────────────────────
penalty = 0.0
if max_stress > SIGMA_ALLOW:
penalty = 1e4 * ((max_stress / SIGMA_ALLOW) - 1.0) ** 2
objective_value = mass_kg + penalty
sf = SIGMA_YIELD / max(max_stress, 0.001)
feasible = max_stress <= SIGMA_ALLOW
print(f" [Obj] mass={mass_kg:.4f} kg penalty={penalty:.2f} "
f"obj={objective_value:.4f} feasible={feasible}")
# ── 9. Write results to trial folder ──────────────────────────────────
results = {
"mass_kg": round(mass_kg, 4),
"max_stress_mpa": round(max_stress, 3),
"safety_factor": round(sf, 3),
"penalty": round(penalty, 4),
"objective": round(objective_value, 4),
"feasible": feasible,
"n_pockets": n_pockets_total,
}
(trial_dir / "results.json").write_text(json.dumps(results, indent=2))
# ── 10. Log to Optuna user attrs ──────────────────────────────────────
trial.set_user_attr("mass_kg", round(mass_kg, 4))
trial.set_user_attr("max_stress_MPa", round(max_stress, 3))
trial.set_user_attr("safety_factor", round(sf, 3))
trial.set_user_attr("penalty", round(penalty, 4))
trial.set_user_attr("n_pockets", n_pockets_total)
trial.set_user_attr("feasible", feasible)
trial.set_user_attr("trial_folder", f"trial_{trial_number:04d}")
# ── 11. File retention: keep last 10 + best 5 heavy files ─────────────
rm.register(
trial_number=trial_number,
trial_dir=trial_dir,
objective=objective_value,
mass_kg=mass_kg,
feasible=feasible,
)
stripped = rm.apply()
if stripped:
print(f" [Retain] Stripped heavy files from trials: {stripped}")
return objective_value
return objective
# =============================================================================
# Pre-flight checks
# =============================================================================
def check_prerequisites():
"""Verify all required files exist before starting optimization."""
print("Pre-flight checks...")
errors = []
required = [
(SIM_FILE, "Simulation file"),
(PRT_I_FILE, "Idealized part (CRITICAL for mesh update)"),
(IMPORT_JOURNAL, "import_profile.py journal"),
(DATA_DIR / "geometry_sandbox_1.json", "Sandbox 1 geometry"),
(DATA_DIR / "geometry_sandbox_2.json", "Sandbox 2 geometry"),
]
for path, label in required:
if Path(path).exists():
print(f" [OK] {label}: {Path(path).name}")
else:
print(f" [MISSING] {label}: {path}")
errors.append(str(path))
# Verify run_journal.exe is findable
try:
rj = find_run_journal_exe()
print(f" [OK] run_journal.exe: {rj}")
except FileNotFoundError as exc:
print(f" [MISSING] {exc}")
errors.append("run_journal.exe")
if errors:
print(f"\nPre-flight FAILED — {len(errors)} missing item(s).")
print("Model files should be in: 1_setup/model/")
print("Geometry JSONs should be in: 1_setup/model/adaptive_isogrid_data/")
return False
print("Pre-flight OK.\n")
return True
# =============================================================================
# Main
# =============================================================================
def main():
print("=" * 70)
print(" Isogrid Dev Plate — Mass Minimization Study 01 (TPE v1)")
print("=" * 70)
print(f" Material: {MATERIAL['name']}")
print(f" σ_yield: {SIGMA_YIELD} MPa")
print(f" σ_allow: {SIGMA_ALLOW:.1f} MPa (SF = {MATERIAL['safety_factor']})")
print(f" Trials: {N_TRIALS}")
print(f" DB: {DB_PATH}")
print()
RESULTS_DIR.mkdir(parents=True, exist_ok=True)
ITER_DIR.mkdir(parents=True, exist_ok=True)
if not check_prerequisites():
sys.exit(1)
# Optuna study — must be created BEFORE any other DB operations
study = optuna.create_study(
study_name=STUDY_NAME,
direction="minimize",
storage=f"sqlite:///{DB_PATH}",
load_if_exists=True,
sampler=optuna.samplers.TPESampler(seed=42),
)
rm = TrialRetentionManager(ITER_DIR, keep_recent=10, keep_best=5)
n_done = len(study.trials)
if n_done > 0:
print(f"Resuming study: {n_done} trials already complete.")
best = study.best_trial
print(f"Current best: trial {best.number} obj={best.value:.4f} kg "
f"mass={best.user_attrs.get('mass_kg', '?')} kg "
f"SF={best.user_attrs.get('safety_factor', '?')}")
print()
remaining = N_TRIALS - n_done
if remaining <= 0:
print(f"Study already complete ({n_done}/{N_TRIALS} trials).")
_print_summary(study)
return
print(f"Running {remaining} more trial(s)...\n")
t_start = datetime.now()
study.optimize(
make_objective(rm),
n_trials=remaining,
show_progress_bar=True,
)
elapsed = (datetime.now() - t_start).total_seconds()
print(f"\nDone — {remaining} trials in {elapsed/60:.1f} min "
f"({elapsed/max(remaining,1):.0f}s/trial)")
_print_summary(study)
def _print_summary(study: optuna.Study):
print("\n" + "=" * 70)
print(" BEST RESULT")
print("=" * 70)
best = study.best_trial
print(f" Trial: {best.number}")
print(f" Objective: {best.value:.4f}")
print(f" Mass: {best.user_attrs.get('mass_kg', '?')} kg")
print(f" Max stress: {best.user_attrs.get('max_stress_MPa', '?')} MPa")
print(f" Safety factor: {best.user_attrs.get('safety_factor', '?')}")
print(f" Feasible: {best.user_attrs.get('feasible', '?')}")
print()
print(" Best parameters:")
for name, val in best.params.items():
desc = PARAM_SPACE[name]["desc"]
print(f" {name:14s} = {val:.4f} # {desc}")
print()
print(f" DB: {DB_PATH}")
print(f" Trial folders: {ITER_DIR}")
if __name__ == "__main__":
main()