Smart iteration management: full model copies + retention policy
- Each iteration gets full model files in iterations/iterNNN/ (openable in NX) - Retention: keep last 10 + best 3 with full models, strip the rest - Stripped iterations keep solver outputs (OP2, F06, params, results) - All paths resolved to absolute before passing to NX (fixes reference issue) - iteration_manager.py: reusable for future studies
This commit is contained in:
@@ -0,0 +1,249 @@
|
||||
"""Smart iteration folder management for Hydrotech Beam optimization.
|
||||
|
||||
Manages iteration folders with intelligent retention:
|
||||
- Each iteration gets a full copy of model files (openable in NX for debug)
|
||||
- Last N iterations: keep full model files (rolling window)
|
||||
- Best K iterations: keep full model files (by objective value)
|
||||
- All others: strip model files, keep only solver outputs + params
|
||||
|
||||
This gives debuggability (open any recent/best iteration in NX) while
|
||||
keeping disk usage bounded.
|
||||
|
||||
References:
|
||||
CEO design brief (2026-02-11): "all models properly saved in their
|
||||
iteration folder, keep last 10, keep best 3, delete stacking models"
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import logging
|
||||
import shutil
|
||||
from dataclasses import dataclass, field
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# NX model file extensions (copied to each iteration)
|
||||
MODEL_EXTENSIONS = {".prt", ".fem", ".sim"}
|
||||
|
||||
# Solver output extensions (always kept, even after stripping)
|
||||
KEEP_EXTENSIONS = {".op2", ".f06", ".dat", ".log", ".json", ".txt", ".csv"}
|
||||
|
||||
# Default retention policy
|
||||
DEFAULT_KEEP_RECENT = 10 # keep last N iterations with full models
|
||||
DEFAULT_KEEP_BEST = 3 # keep best K iterations with full models
|
||||
|
||||
|
||||
@dataclass
|
||||
class IterationInfo:
|
||||
"""Metadata for a single iteration."""
|
||||
|
||||
number: int
|
||||
path: Path
|
||||
mass: float = float("inf")
|
||||
displacement: float = float("inf")
|
||||
stress: float = float("inf")
|
||||
feasible: bool = False
|
||||
has_models: bool = True # False after stripping
|
||||
|
||||
|
||||
@dataclass
|
||||
class IterationManager:
|
||||
"""Manages iteration folders with smart retention.
|
||||
|
||||
Usage:
|
||||
mgr = IterationManager(study_dir, master_model_dir)
|
||||
|
||||
# Before each trial:
|
||||
iter_dir = mgr.prepare_iteration(iteration_number)
|
||||
|
||||
# After trial completes:
|
||||
mgr.record_result(iteration_number, mass=..., displacement=..., stress=...)
|
||||
|
||||
# Periodically or at study end:
|
||||
mgr.apply_retention()
|
||||
"""
|
||||
|
||||
study_dir: Path
|
||||
master_model_dir: Path
|
||||
keep_recent: int = DEFAULT_KEEP_RECENT
|
||||
keep_best: int = DEFAULT_KEEP_BEST
|
||||
_iterations: dict[int, IterationInfo] = field(default_factory=dict, repr=False)
|
||||
|
||||
def __post_init__(self) -> None:
|
||||
self.iterations_dir = self.study_dir / "iterations"
|
||||
self.iterations_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Scan existing iterations (for resume support)
|
||||
for d in sorted(self.iterations_dir.iterdir()):
|
||||
if d.is_dir() and d.name.startswith("iter"):
|
||||
try:
|
||||
num = int(d.name.replace("iter", ""))
|
||||
info = IterationInfo(number=num, path=d)
|
||||
|
||||
# Load results if available
|
||||
results_file = d / "results.json"
|
||||
if results_file.exists():
|
||||
data = json.loads(results_file.read_text())
|
||||
info.mass = data.get("mass_kg", float("inf"))
|
||||
info.displacement = data.get("tip_displacement_mm", float("inf"))
|
||||
info.stress = data.get("max_von_mises_mpa", float("inf"))
|
||||
info.feasible = (
|
||||
info.displacement <= 10.0 and info.stress <= 130.0
|
||||
)
|
||||
|
||||
# Check if model files are present
|
||||
info.has_models = any(
|
||||
f.suffix in MODEL_EXTENSIONS for f in d.iterdir()
|
||||
)
|
||||
|
||||
self._iterations[num] = info
|
||||
except (ValueError, json.JSONDecodeError):
|
||||
continue
|
||||
|
||||
if self._iterations:
|
||||
logger.info(
|
||||
"Loaded %d existing iterations (resume support)",
|
||||
len(self._iterations),
|
||||
)
|
||||
|
||||
def prepare_iteration(self, iteration_number: int) -> Path:
|
||||
"""Set up an iteration folder with fresh model copies.
|
||||
|
||||
Copies all model files from master_model_dir to the iteration folder.
|
||||
All paths are resolved to absolute to avoid NX reference issues.
|
||||
|
||||
Args:
|
||||
iteration_number: Trial number (1-indexed).
|
||||
|
||||
Returns:
|
||||
Absolute path to the iteration folder.
|
||||
"""
|
||||
iter_dir = (self.iterations_dir / f"iter{iteration_number:03d}").resolve()
|
||||
|
||||
# Clean up if exists (failed previous run)
|
||||
if iter_dir.exists():
|
||||
shutil.rmtree(iter_dir)
|
||||
|
||||
iter_dir.mkdir(parents=True)
|
||||
|
||||
# Copy ALL model files (so NX can resolve references within the folder)
|
||||
master = self.master_model_dir.resolve()
|
||||
copied = 0
|
||||
for ext in MODEL_EXTENSIONS:
|
||||
for src in master.glob(f"*{ext}"):
|
||||
shutil.copy2(src, iter_dir / src.name)
|
||||
copied += 1
|
||||
|
||||
logger.info(
|
||||
"Prepared iter%03d: copied %d model files to %s",
|
||||
iteration_number, copied, iter_dir,
|
||||
)
|
||||
|
||||
# Track iteration
|
||||
self._iterations[iteration_number] = IterationInfo(
|
||||
number=iteration_number,
|
||||
path=iter_dir,
|
||||
has_models=True,
|
||||
)
|
||||
|
||||
return iter_dir
|
||||
|
||||
def record_result(
|
||||
self,
|
||||
iteration_number: int,
|
||||
mass: float,
|
||||
displacement: float,
|
||||
stress: float,
|
||||
) -> None:
|
||||
"""Record results for an iteration and run retention check.
|
||||
|
||||
Args:
|
||||
iteration_number: Trial number.
|
||||
mass: Extracted mass in kg.
|
||||
displacement: Tip displacement in mm.
|
||||
stress: Max von Mises stress in MPa.
|
||||
"""
|
||||
if iteration_number in self._iterations:
|
||||
info = self._iterations[iteration_number]
|
||||
info.mass = mass
|
||||
info.displacement = displacement
|
||||
info.stress = stress
|
||||
info.feasible = displacement <= 10.0 and stress <= 130.0
|
||||
|
||||
# Apply retention every 5 iterations to keep disk in check
|
||||
if iteration_number % 5 == 0:
|
||||
self.apply_retention()
|
||||
|
||||
def apply_retention(self) -> None:
|
||||
"""Apply the smart retention policy.
|
||||
|
||||
Keep full model files for:
|
||||
1. Last `keep_recent` iterations (rolling window)
|
||||
2. Best `keep_best` iterations (by mass, feasible first)
|
||||
|
||||
Strip model files from everything else (keep solver outputs only).
|
||||
"""
|
||||
if not self._iterations:
|
||||
return
|
||||
|
||||
all_nums = sorted(self._iterations.keys())
|
||||
|
||||
# Set 1: Last N iterations
|
||||
recent_set = set(all_nums[-self.keep_recent:])
|
||||
|
||||
# Set 2: Best K by objective (feasible first, then lowest mass)
|
||||
sorted_by_quality = sorted(
|
||||
self._iterations.values(),
|
||||
key=lambda info: (
|
||||
0 if info.feasible else 1, # feasible first
|
||||
info.mass, # then lowest mass
|
||||
),
|
||||
)
|
||||
best_set = {info.number for info in sorted_by_quality[:self.keep_best]}
|
||||
|
||||
# Keep set = recent ∪ best
|
||||
keep_set = recent_set | best_set
|
||||
|
||||
# Strip model files from everything NOT in keep set
|
||||
stripped = 0
|
||||
for num, info in self._iterations.items():
|
||||
if num not in keep_set and info.has_models:
|
||||
self._strip_models(info)
|
||||
stripped += 1
|
||||
|
||||
if stripped > 0:
|
||||
logger.info(
|
||||
"Retention: kept %d recent + %d best, stripped %d iterations",
|
||||
len(recent_set), len(best_set), stripped,
|
||||
)
|
||||
|
||||
def _strip_models(self, info: IterationInfo) -> None:
|
||||
"""Remove model files from an iteration folder, keep solver outputs."""
|
||||
if not info.path.exists():
|
||||
return
|
||||
|
||||
removed = 0
|
||||
for f in info.path.iterdir():
|
||||
if f.is_file() and f.suffix in MODEL_EXTENSIONS:
|
||||
f.unlink()
|
||||
removed += 1
|
||||
|
||||
info.has_models = False
|
||||
if removed > 0:
|
||||
logger.debug(
|
||||
"Stripped %d model files from iter%03d",
|
||||
removed, info.number,
|
||||
)
|
||||
|
||||
def get_best_iterations(self, n: int = 3) -> list[IterationInfo]:
|
||||
"""Return the N best iterations (feasible first, then lowest mass)."""
|
||||
return sorted(
|
||||
self._iterations.values(),
|
||||
key=lambda info: (
|
||||
0 if info.feasible else 1,
|
||||
info.mass,
|
||||
),
|
||||
)[:n]
|
||||
@@ -159,25 +159,13 @@ class AtomizerNXSolver:
|
||||
self.use_iteration_folders = use_iteration_folders
|
||||
self._iteration = 0
|
||||
|
||||
# Iteration outputs go inside the study folder
|
||||
# Smart iteration manager — handles folder creation, model copies, retention
|
||||
from iteration_manager import IterationManager
|
||||
self.study_dir = Path(__file__).parent.resolve()
|
||||
self.iterations_dir = self.study_dir / "iterations"
|
||||
self.iterations_dir.mkdir(parents=True, exist_ok=True)
|
||||
logger.info("Iterations dir: %s", self.iterations_dir)
|
||||
|
||||
# Create one-time backup of master model files (clean state)
|
||||
# Restored before each trial to ensure isolation
|
||||
import shutil
|
||||
self._backup_dir = self.study_dir / "_model_backup"
|
||||
if not self._backup_dir.exists():
|
||||
logger.info("Creating master model backup at %s", self._backup_dir)
|
||||
self._backup_dir.mkdir(parents=True)
|
||||
for ext in ("*.prt", "*.fem", "*.sim"):
|
||||
for f in model_dir.glob(ext):
|
||||
shutil.copy2(f, self._backup_dir / f.name)
|
||||
logger.info("Backed up %d model files", len(list(self._backup_dir.iterdir())))
|
||||
else:
|
||||
logger.info("Using existing model backup at %s", self._backup_dir)
|
||||
self._iter_mgr = IterationManager(
|
||||
study_dir=self.study_dir,
|
||||
master_model_dir=self.model_dir,
|
||||
)
|
||||
|
||||
# Find the .sim file
|
||||
sim_files = list(model_dir.glob("*.sim"))
|
||||
@@ -293,25 +281,14 @@ class AtomizerNXSolver:
|
||||
trial.hole_count,
|
||||
)
|
||||
|
||||
# Create iteration output folder inside the study
|
||||
iter_dir = self.iterations_dir / f"iter{self._iteration:03d}"
|
||||
iter_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
try:
|
||||
# Step 0: Restore master model from backup (clean state each trial)
|
||||
# Files stay in models/ so NX references are intact, but content is fresh
|
||||
import shutil
|
||||
restored = 0
|
||||
for backup_file in self._backup_dir.iterdir():
|
||||
dest = self.model_dir / backup_file.name
|
||||
shutil.copy2(backup_file, dest)
|
||||
restored += 1
|
||||
logger.info("Restored %d model files from backup", restored)
|
||||
# Step 0: Prepare iteration folder with fresh model copies
|
||||
# All paths resolved to absolute — fixes NX reference issues
|
||||
iter_dir = self._iter_mgr.prepare_iteration(self._iteration)
|
||||
|
||||
# Step 1: Solve directly on master model
|
||||
# NX file references stay intact — expressions updated in-place by journal
|
||||
sim_file = self.sim_file
|
||||
prt_file = self.prt_file
|
||||
# Sim and prt files are now in the iteration folder
|
||||
sim_file = iter_dir / self.sim_file.name
|
||||
prt_file = iter_dir / self.prt_file.name
|
||||
|
||||
# Save trial params to iteration folder
|
||||
import json
|
||||
@@ -327,10 +304,11 @@ class AtomizerNXSolver:
|
||||
},
|
||||
}, indent=2))
|
||||
|
||||
# Step 2: Run NX journal (update expressions + solve) on master model
|
||||
# Step 2: Run NX journal (update expressions + solve) in iteration folder
|
||||
# All paths are absolute — critical for NX to resolve file references
|
||||
solve_result = self._nx_solver.run_simulation(
|
||||
sim_file=sim_file,
|
||||
working_dir=self.model_dir,
|
||||
working_dir=iter_dir,
|
||||
expression_updates=expressions,
|
||||
)
|
||||
|
||||
@@ -383,8 +361,24 @@ class AtomizerNXSolver:
|
||||
logger.warning("Stress extraction failed: %s", e)
|
||||
max_vm_stress = float("nan")
|
||||
|
||||
# Step 6: Copy solver outputs to iteration folder for archival
|
||||
self._archive_iteration(iter_dir, op2_path, mass_kg, tip_displacement, max_vm_stress)
|
||||
# Step 6: Record results + write summary to iteration folder
|
||||
self._iter_mgr.record_result(
|
||||
self._iteration,
|
||||
mass=mass_kg,
|
||||
displacement=tip_displacement,
|
||||
stress=max_vm_stress,
|
||||
)
|
||||
|
||||
# Write results summary JSON
|
||||
results_file = iter_dir / "results.json"
|
||||
results_file.write_text(json.dumps({
|
||||
"iteration": self._iteration,
|
||||
"mass_kg": mass_kg,
|
||||
"tip_displacement_mm": tip_displacement,
|
||||
"max_von_mises_mpa": max_vm_stress,
|
||||
"feasible": tip_displacement <= 10.0 and max_vm_stress <= 130.0,
|
||||
"op2_file": op2_path.name if op2_path else None,
|
||||
}, indent=2))
|
||||
|
||||
elapsed = time.time() - start_time
|
||||
logger.info(
|
||||
@@ -411,54 +405,14 @@ class AtomizerNXSolver:
|
||||
iteration_dir=str(iter_dir) if 'iter_dir' in locals() else None,
|
||||
)
|
||||
|
||||
def _archive_iteration(
|
||||
self,
|
||||
iter_dir: Path,
|
||||
op2_path: Path,
|
||||
mass: float,
|
||||
displacement: float,
|
||||
stress: float,
|
||||
) -> None:
|
||||
"""Copy solver outputs to iteration folder for archival.
|
||||
|
||||
Keeps the models/ directory clean — solver outputs go to the study's
|
||||
iterations/ folder. Each iteration gets: OP2, F06, mass file, and
|
||||
a results summary JSON.
|
||||
"""
|
||||
import json
|
||||
import shutil
|
||||
|
||||
# Copy OP2 and F06 files
|
||||
for suffix in [".op2", ".f06", ".log"]:
|
||||
src = op2_path.with_suffix(suffix)
|
||||
if src.exists():
|
||||
try:
|
||||
shutil.copy2(src, iter_dir / src.name)
|
||||
except Exception as e:
|
||||
logger.warning("Could not copy %s: %s", src.name, e)
|
||||
|
||||
# Copy mass temp file if it exists
|
||||
for fname in ["_temp_mass.txt", "_temp_part_properties.json"]:
|
||||
src = self.model_dir / fname
|
||||
if src.exists():
|
||||
try:
|
||||
shutil.copy2(src, iter_dir / fname)
|
||||
except Exception as e:
|
||||
logger.warning("Could not copy %s: %s", fname, e)
|
||||
|
||||
# Write results summary
|
||||
results_file = iter_dir / "results.json"
|
||||
results_file.write_text(json.dumps({
|
||||
"mass_kg": mass,
|
||||
"tip_displacement_mm": displacement,
|
||||
"max_von_mises_mpa": stress,
|
||||
"op2_file": op2_path.name,
|
||||
}, indent=2))
|
||||
|
||||
logger.info("Archived iteration %d to %s", self._iteration, iter_dir.name)
|
||||
|
||||
def close(self) -> None:
|
||||
"""Clean up NX solver resources."""
|
||||
"""Clean up NX solver resources and run final retention."""
|
||||
self._iter_mgr.apply_retention()
|
||||
best = self._iter_mgr.get_best_iterations(3)
|
||||
if best:
|
||||
logger.info("Best iterations: %s",
|
||||
[(f"iter{b.number:03d}", f"{b.mass:.1f}kg",
|
||||
"✓" if b.feasible else "✗") for b in best])
|
||||
logger.info("AtomizerNXSolver closed. %d iterations completed.", self._iteration)
|
||||
|
||||
|
||||
|
||||
Reference in New Issue
Block a user