auto: daily sync

This commit is contained in:
2026-02-19 08:00:15 +00:00
parent 6658de02f4
commit 7eb3d11f02
33 changed files with 88439 additions and 0 deletions

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,795 @@
{
"schema_version": "2.0",
"units": "mm",
"sandbox_id": "sandbox_1",
"outer_boundary": [
{
"type": "line",
"start": [
381.787159,
14.92177
],
"end": [
132.687159,
14.92177
]
},
{
"type": "line",
"start": [
132.687159,
14.92177
],
"end": [
132.687159,
-13.57823
]
},
{
"type": "line",
"start": [
132.687159,
-13.57823
],
"end": [
88.687159,
-13.57823
]
},
{
"type": "line",
"start": [
88.687159,
-13.57823
],
"end": [
88.687159,
14.92177
]
},
{
"type": "line",
"start": [
88.687159,
14.92177
],
"end": [
-13.412841,
14.92177
]
},
{
"type": "line",
"start": [
-13.412841,
14.92177
],
"end": [
-13.412841,
0.02177
]
},
{
"type": "line",
"start": [
-13.412841,
0.02177
],
"end": [
-30.812841,
0.02177
]
},
{
"type": "line",
"start": [
-30.812841,
0.02177
],
"end": [
-30.812841,
-254.17823
]
},
{
"type": "line",
"start": [
-30.812841,
-254.17823
],
"end": [
169.435852,
-254.17823
]
},
{
"type": "line",
"start": [
169.435852,
-254.17823
],
"end": [
169.435852,
-417.57823
]
},
{
"type": "line",
"start": [
169.435852,
-417.57823
],
"end": [
197.121675,
-417.57823
]
},
{
"type": "line",
"start": [
197.121675,
-417.57823
],
"end": [
197.121675,
-401.57823
]
},
{
"type": "line",
"start": [
197.121675,
-401.57823
],
"end": [
212.121675,
-401.57823
]
},
{
"type": "line",
"start": [
212.121675,
-401.57823
],
"end": [
212.121675,
-417.57823
]
},
{
"type": "line",
"start": [
212.121675,
-417.57823
],
"end": [
289.687159,
-417.57823
]
},
{
"type": "line",
"start": [
289.687159,
-417.57823
],
"end": [
304.687159,
-406.57823
]
},
{
"type": "line",
"start": [
304.687159,
-406.57823
],
"end": [
317.687159,
-406.57823
]
},
{
"type": "line",
"start": [
317.687159,
-406.57823
],
"end": [
332.687159,
-417.57823
]
},
{
"type": "line",
"start": [
332.687159,
-417.57823
],
"end": [
381.787159,
-417.57823
]
},
{
"type": "line",
"start": [
381.787159,
-417.57823
],
"end": [
381.787159,
-395.17823
]
},
{
"type": "line",
"start": [
381.787159,
-395.17823
],
"end": [
404.187159,
-395.17823
]
},
{
"type": "line",
"start": [
404.187159,
-395.17823
],
"end": [
404.187159,
-322.57823
]
},
{
"type": "line",
"start": [
404.187159,
-322.57823
],
"end": [
352.787159,
-322.57823
]
},
{
"type": "line",
"start": [
352.787159,
-322.57823
],
"end": [
352.787159,
-304.17823
]
},
{
"type": "line",
"start": [
352.787159,
-304.17823
],
"end": [
361.187159,
-304.17823
]
},
{
"type": "line",
"start": [
361.187159,
-304.17823
],
"end": [
361.187159,
-24.57823
]
},
{
"type": "line",
"start": [
361.187159,
-24.57823
],
"end": [
404.187159,
-24.57823
]
},
{
"type": "line",
"start": [
404.187159,
-24.57823
],
"end": [
404.187159,
0.02177
]
},
{
"type": "line",
"start": [
404.187159,
0.02177
],
"end": [
381.787159,
0.02177
]
},
{
"type": "line",
"start": [
381.787159,
0.02177
],
"end": [
381.787159,
14.92177
]
}
],
"inner_boundaries": [
{
"index": 0,
"segments": [
{
"type": "arc",
"start": [
0.0,
0.0
],
"end": [
0.0,
0.0
],
"center": [
0.0,
-3.07823
],
"radius": 3.07823,
"mid": [
0.0,
-6.15646
],
"clockwise": false
}
],
"num_segments": 1
},
{
"index": 1,
"segments": [
{
"type": "arc",
"start": [
366.187159,
1.02177
],
"end": [
366.187159,
1.02177
],
"center": [
366.187159,
-3.07823
],
"radius": 4.1,
"mid": [
366.187159,
-7.17823
],
"clockwise": false
}
],
"num_segments": 1
},
{
"index": 2,
"segments": [
{
"type": "arc",
"start": [
44.987159,
0.0
],
"end": [
44.987159,
0.0
],
"center": [
44.987159,
-3.07823
],
"radius": 3.07823,
"mid": [
44.987159,
-6.15646
],
"clockwise": false
}
],
"num_segments": 1
},
{
"index": 3,
"segments": [
{
"type": "arc",
"start": [
250.707159,
-272.32823
],
"end": [
250.707159,
-272.32823
],
"center": [
250.707159,
-275.57823
],
"radius": 3.25,
"mid": [
250.707159,
-278.82823
],
"clockwise": false
}
],
"num_segments": 1
},
{
"index": 4,
"segments": [
{
"type": "arc",
"start": [
44.987159,
-155.5
],
"end": [
44.987159,
-155.5
],
"center": [
44.987159,
-158.57823
],
"radius": 3.07823,
"mid": [
44.987159,
-161.65646
],
"clockwise": false
}
],
"num_segments": 1
},
{
"index": 5,
"segments": [
{
"type": "arc",
"start": [
125.187159,
-232.47823
],
"end": [
125.187159,
-232.47823
],
"center": [
125.187159,
-236.57823
],
"radius": 4.1,
"mid": [
125.187159,
-240.67823
],
"clockwise": false
}
],
"num_segments": 1
},
{
"index": 6,
"segments": [
{
"type": "arc",
"start": [
-9.812841,
-67.82823
],
"end": [
-9.812841,
-67.82823
],
"center": [
-9.812841,
-71.07823
],
"radius": 3.25,
"mid": [
-9.812841,
-74.32823
],
"clockwise": false
}
],
"num_segments": 1
},
{
"index": 7,
"segments": [
{
"type": "arc",
"start": [
362.787159,
-372.9
],
"end": [
362.787159,
-372.9
],
"center": [
362.787159,
-375.97823
],
"radius": 3.07823,
"mid": [
362.787159,
-379.05646
],
"clockwise": false
}
],
"num_segments": 1
},
{
"index": 8,
"segments": [
{
"type": "arc",
"start": [
250.707159,
-372.72823
],
"end": [
250.707159,
-372.72823
],
"center": [
250.707159,
-375.97823
],
"radius": 3.25,
"mid": [
250.707159,
-379.22823
],
"clockwise": false
}
],
"num_segments": 1
},
{
"index": 9,
"segments": [
{
"type": "arc",
"start": [
291.457159,
-311.1
],
"end": [
291.457159,
-311.1
],
"center": [
291.457159,
-314.17823
],
"radius": 3.07823,
"mid": [
291.457159,
-317.25646
],
"clockwise": false
}
],
"num_segments": 1
},
{
"index": 10,
"segments": [
{
"type": "arc",
"start": [
44.987159,
-68.0
],
"end": [
44.987159,
-68.0
],
"center": [
44.987159,
-71.07823
],
"radius": 3.07823,
"mid": [
44.987159,
-74.15646
],
"clockwise": false
}
],
"num_segments": 1
},
{
"index": 11,
"segments": [
{
"type": "arc",
"start": [
194.447159,
-372.72823
],
"end": [
194.447159,
-372.72823
],
"center": [
194.447159,
-375.97823
],
"radius": 3.25,
"mid": [
194.447159,
-379.22823
],
"clockwise": false
}
],
"num_segments": 1
},
{
"index": 12,
"segments": [
{
"type": "arc",
"start": [
291.457159,
-372.9
],
"end": [
291.457159,
-372.9
],
"center": [
291.457159,
-375.97823
],
"radius": 3.07823,
"mid": [
291.457159,
-379.05646
],
"clockwise": false
}
],
"num_segments": 1
},
{
"index": 13,
"segments": [
{
"type": "arc",
"start": [
125.187159,
-154.47823
],
"end": [
125.187159,
-154.47823
],
"center": [
125.187159,
-158.57823
],
"radius": 4.1,
"mid": [
125.187159,
-162.67823
],
"clockwise": false
}
],
"num_segments": 1
},
{
"index": 14,
"segments": [
{
"type": "arc",
"start": [
125.187159,
-66.97823
],
"end": [
125.187159,
-66.97823
],
"center": [
125.187159,
-71.07823
],
"radius": 4.1,
"mid": [
125.187159,
-75.17823
],
"clockwise": false
}
],
"num_segments": 1
},
{
"index": 15,
"segments": [
{
"type": "arc",
"start": [
194.447159,
-272.32823
],
"end": [
194.447159,
-272.32823
],
"center": [
194.447159,
-275.57823
],
"radius": 3.25,
"mid": [
194.447159,
-278.82823
],
"clockwise": false
}
],
"num_segments": 1
}
],
"num_inner_boundaries": 16,
"thickness": null,
"transform": {
"origin": [
197.57823,
184.187159,
6.35
],
"x_axis": [
0.0,
-1.0,
0.0
],
"y_axis": [
1.0,
0.0,
-0.0
],
"normal": [
0.0,
0.0,
1.0
]
}
}

View File

@@ -0,0 +1,314 @@
{
"schema_version": "2.0",
"units": "mm",
"sandbox_id": "sandbox_2",
"outer_boundary": [
{
"type": "arc",
"start": [
0.0,
0.0
],
"end": [
7.5,
-7.5
],
"center": [
0.0,
-7.5
],
"radius": 7.5,
"mid": [
5.303301,
-2.196699
],
"clockwise": true
},
{
"type": "line",
"start": [
7.5,
-7.5
],
"end": [
7.5,
-22.6
]
},
{
"type": "line",
"start": [
7.5,
-22.6
],
"end": [
22.5,
-22.6
]
},
{
"type": "line",
"start": [
22.5,
-22.6
],
"end": [
22.5,
-13.496098
]
},
{
"type": "line",
"start": [
22.5,
-13.496098
],
"end": [
74.5,
-13.496098
]
},
{
"type": "line",
"start": [
74.5,
-13.496098
],
"end": [
74.5,
-22.6
]
},
{
"type": "line",
"start": [
74.5,
-22.6
],
"end": [
102.5,
-22.6
]
},
{
"type": "line",
"start": [
102.5,
-22.6
],
"end": [
102.5,
-7.5
]
},
{
"type": "arc",
"start": [
102.5,
-7.5
],
"end": [
117.5,
-7.5
],
"center": [
110.0,
-7.5
],
"radius": 7.5,
"mid": [
110.0,
0.0
],
"clockwise": false
},
{
"type": "line",
"start": [
117.5,
-7.5
],
"end": [
117.5,
-22.6
]
},
{
"type": "line",
"start": [
117.5,
-22.6
],
"end": [
140.748693,
-22.6
]
},
{
"type": "line",
"start": [
140.748693,
-22.6
],
"end": [
140.748693,
124.4
]
},
{
"type": "line",
"start": [
140.748693,
124.4
],
"end": [
117.5,
124.4
]
},
{
"type": "line",
"start": [
117.5,
124.4
],
"end": [
117.5,
102.5
]
},
{
"type": "arc",
"start": [
117.5,
102.5
],
"end": [
102.5,
102.5
],
"center": [
110.0,
102.5
],
"radius": 7.5,
"mid": [
110.0,
95.0
],
"clockwise": true
},
{
"type": "line",
"start": [
102.5,
102.5
],
"end": [
102.5,
124.4
]
},
{
"type": "line",
"start": [
102.5,
124.4
],
"end": [
7.5,
124.4
]
},
{
"type": "line",
"start": [
7.5,
124.4
],
"end": [
7.5,
102.5
]
},
{
"type": "arc",
"start": [
7.5,
102.5
],
"end": [
0.0,
95.0
],
"center": [
0.0,
102.5
],
"radius": 7.5,
"mid": [
5.303301,
97.196699
],
"clockwise": true
},
{
"type": "line",
"start": [
0.0,
95.0
],
"end": [
-13.5,
95.0
]
},
{
"type": "line",
"start": [
-13.5,
95.0
],
"end": [
-13.5,
0.0
]
},
{
"type": "line",
"start": [
-13.5,
0.0
],
"end": [
0.0,
0.0
]
}
],
"inner_boundaries": [],
"num_inner_boundaries": 0,
"thickness": null,
"transform": {
"origin": [
-196.0,
175.5,
4.35
],
"x_axis": [
0.0,
-1.0,
0.0
],
"y_axis": [
1.0,
0.0,
-0.0
],
"normal": [
0.0,
0.0,
1.0
]
}
}

View File

@@ -0,0 +1,205 @@
# Study 01 — TPE v1: Isogrid Mass Minimization (Campaign 01)
**Parent project:** [isogrid-dev-plate](../../README.md)
**Context:** [../../CONTEXT.md](../../CONTEXT.md)
**Status:** Ready to run — not yet started
**Created:** 2026-02-18
---
## 1. Overview
This is the first optimization campaign for the ACS Stack Main Plate isogrid lightweighting project.
It uses Optuna TPE (Tree-structured Parzen Estimator) with a budget of 200 trials.
**Goal:** Find the set of 8 density-field parameters that minimizes total plate mass subject to a
stress constraint (σ_max ≤ 100.6 MPa, AL7075-T6, SF=5).
Each trial: Python Brain generates a triangular isogrid rib pattern → NX imports it into the sketch
→ NX Nastran solves SOL 101 → extractors pull mass from the idealized part and max stress from the OP2.
---
## 2. Engineering Problem
**Client:** ACS — Attitude Control System, spacecraft structural assembly
**Part:** Main structural plate (AL7075-T6)
**Challenge:** Remove as much material as possible from the plate interior via an isogrid rib pattern,
while keeping peak stress within the allowable under the primary axial load case.
**Load case:** FZ = 1,372.9 N, linear static (SOL 101), Subcase 1
**Material:** AL7075-T6 — σ_yield = 503 MPa, ρ = 2810 kg/m³, SF = 5 → σ_allow = 100.6 MPa
The rib pattern is generated in 2 sandbox regions. Ribs automatically cluster around bolt holes
and the plate perimeter based on physics-inspired density field parameters.
---
## 3. Mathematical Formulation
### Objective
```
minimize mass_kg(η₀, α, β, γ_stress, R₀, R_edge, s_min, s_max)
```
### Constraint
```
subject to σ_max ≤ 100.6 MPa (stress only — no displacement constraint)
```
### Penalty
```
objective_value = mass_kg + penalty
penalty = 1e4 × ((σ_max / σ_allow) 1)² if σ_max > σ_allow
= 0 otherwise
```
### Design Variables
| Variable | Low | High | Units | Description |
|----------|-----|------|-------|-------------|
| `eta_0` | 0.0 | 0.4 | — | Baseline density offset |
| `alpha` | 0.3 | 2.0 | — | Hole influence scale |
| `beta` | 0.0 | 1.0 | — | Edge influence scale |
| `gamma_stress` | 0.0 | 1.5 | — | FEA stress feedback gain |
| `R_0` | 10 | 100 | mm | Base hole influence radius |
| `R_edge` | 5 | 40 | mm | Edge influence radius |
| `s_min` | 8 | 20 | mm | Min cell size (densest) |
| `s_max` | 25 | 60 | mm | Max cell size (sparsest) |
Fixed parameters (manufacturing constraints + math constants): see `optimization_engine/isogrid/study.py`.
---
## 4. Optimization Algorithm
| Property | Value |
|----------|-------|
| Algorithm | Optuna TPE (Tree-structured Parzen Estimator) |
| Sampler seed | 42 |
| Direction | Minimize |
| N startup trials | 10 (random exploration before TPE kicks in) |
| Trial budget | 200 |
| Storage | SQLite — `3_results/study.db` |
| Study name | `isogrid_01_v1_tpe` |
TPE splits past trials at each parameter into "good" (low objective) and "bad" (high objective) groups,
then samples from the "good" region. With 8 continuous variables and 200 trials this is well within
TPE's effective range.
---
## 5. Result Extraction
| Quantity | Extractor | Source |
|----------|-----------|--------|
| Mass | `extract_part_mass_material(_i.prt)` | `_temp_part_properties.json` written by `solve_simulation.py` journal |
| Max von Mises | `extract_solid_stress(op2_file, subcase=1)` | OP2 binary from Nastran solve |
Mass is extracted from the **idealized part** (`_fem2_i.prt`), not estimated by the Brain.
The NX journal writes a JSON temp file after each solve; the extractor reads it back.
> ⚠️ NX FEM model uses AL6061 material properties (E≈68.98 GPa, ρ=2.711e-6 kg/mm³).
> Mass extraction is therefore slightly low (~4% underestimate vs AL7075-T6).
> Tracked as Gap G-01 in CONTEXT.md.
---
## 6. Study Structure
```
studies/01_v1_tpe/
├── README.md ← You are here
├── STUDY_REPORT.md ← Post-run results (fill after campaign)
├── check_preflight.py ← Quick validation before running
├── run_optimization.py ← Main optimization loop
├── 1_setup/ ← Model files (working copies — modified by NX each trial)
│ └── model/
│ ├── ACS_Stack_Main_Plate_Iso_Project.prt
│ ├── ACS_Stack_Main_Plate_Iso_project_fem2_i.prt ← CRITICAL
│ ├── ACS_Stack_Main_Plate_Iso_project_fem2.fem
│ ├── ACS_Stack_Main_Plate_Iso_project_sim2.sim
│ └── adaptive_isogrid_data/
│ ├── geometry_sandbox_1.json ← Sandbox boundary + holes (from NX)
│ ├── geometry_sandbox_2.json
│ ├── rib_profile_sandbox_1.json ← Written per trial (current)
│ └── rib_profile_sandbox_2.json
├── 2_iterations/ ← Per-trial logs (auto-created at runtime)
│ ├── trial_0001/
│ │ ├── params.json ← Sampled design variables
│ │ ├── results.json ← mass, stress, SF, objective
│ │ ├── rib_profile_sandbox_1.json ← Rib geometry (copy)
│ │ └── rib_profile_sandbox_2.json
│ └── trial_NNNN/
│ └── ...
└── 3_results/ ← Optimization database + summary outputs
└── study.db ← Optuna SQLite (created on first run)
```
---
## 7. Quick Start
```bash
# Step 1: Verify everything is ready
C:\Users\antoi\anaconda3\envs\atomizer\python.exe \
projects/isogrid-dev-plate/studies/01_v1_tpe/check_preflight.py
# Step 2: Launch
C:\Users\antoi\anaconda3\envs\atomizer\python.exe \
projects/isogrid-dev-plate/studies/01_v1_tpe/run_optimization.py
```
See [../../playbooks/01_FIRST_RUN.md](../../playbooks/01_FIRST_RUN.md) for full step-by-step including
monitoring, failure handling, and post-run analysis.
---
## 8. Expected Runtime
| Component | Estimate |
|-----------|----------|
| Brain (triangulation + pockets) | ~310 s |
| NX import journal | ~1530 s |
| NX remesh + Nastran solve | ~6090 s |
| Extraction (mass + stress) | ~13 s |
| **Total per trial** | **~90120 s** |
| **200 trials** | **~810 hours** |
Actual per-trial time will vary with mesh complexity (pocket count affects remesh time).
---
## 9. Success Criteria
| Criterion | Target |
|-----------|--------|
| Mass reduction vs baseline | > 10% (aspirational: > 20%) |
| Feasibility rate | > 80% of trials (σ ≤ 100.6 MPa) |
| Best trial SF | ≥ 5.0 (σ_max ≤ 100.6 MPa) |
| Convergence | Best mass stable over last 50 trials |
Baseline solid plate mass: TBD (Gap G-02 — run `extract_part_mass_material` on unmodified model).
---
## 10. Results
> **[Campaign 01 not yet started]**
>
> Fill in after run. See [STUDY_REPORT.md](STUDY_REPORT.md) for the result template.
| Metric | Value |
|--------|-------|
| Status | 🔴 Not started |
| Trials completed | 0 / 200 |
| Best mass | — |
| Best trial | — |
| Best σ_max | — |
| Best SF | — |
| Feasibility rate | — |
| Runtime | — |

View File

@@ -0,0 +1,147 @@
# Study Report — 01_v1_tpe: Isogrid Mass Minimization
**Study:** isogrid_01_v1_tpe
**Project:** Isogrid Dev Plate (ACS Stack Main Plate)
**Algorithm:** Optuna TPE, seed=42
**Budget:** 200 trials
**Date started:** [TBD]
**Date completed:** [TBD]
**Author:** Antoine + Atomizer
---
## Executive Summary
| Metric | Value |
|--------|-------|
| Status | [TBD — Not started / Running / Complete] |
| Trials completed | [TBD] / 200 |
| Best mass | [TBD] kg |
| Best trial # | [TBD] |
| Convergence trial | [TBD] (when best was first found) |
| Best σ_max | [TBD] MPa |
| Best SF | [TBD] (target ≥ 5.0) |
| Feasibility rate | [TBD] % |
| Total runtime | [TBD] h |
| Avg time/trial | [TBD] s |
---
## Best Design Found
### Parameters
| Variable | Value | Range |
|----------|-------|-------|
| `eta_0` | [TBD] | [0.0, 0.4] |
| `alpha` | [TBD] | [0.3, 2.0] |
| `beta` | [TBD] | [0.0, 1.0] |
| `gamma_stress` | [TBD] | [0.0, 1.5] |
| `R_0` | [TBD] mm | [10, 100] |
| `R_edge` | [TBD] mm | [5, 40] |
| `s_min` | [TBD] mm | [8, 20] |
| `s_max` | [TBD] mm | [25, 60] |
### Results
| Metric | Value |
|--------|-------|
| Mass | [TBD] kg ([TBD] g) |
| Max von Mises | [TBD] MPa |
| Safety factor | [TBD] |
| Feasible | [TBD] |
| Total pockets | [TBD] (sandbox_1 + sandbox_2) |
### Trial folder
```
2_iterations/trial_[TBD]/
├── params.json
├── results.json
├── rib_profile_sandbox_1.json
└── rib_profile_sandbox_2.json
```
---
## Optimization Progress
### Trial Statistics
| Metric | Value |
|--------|-------|
| Total trials | [TBD] |
| Feasible trials | [TBD] ([TBD] %) |
| Failed trials (error) | [TBD] |
| Min objective | [TBD] |
| Median objective | [TBD] |
| Max objective | [TBD] |
### Best History (top 10 improvements)
| Trial # | Mass (kg) | σ_max (MPa) | SF | Feasible |
|---------|-----------|-------------|-----|----------|
| [TBD] | [TBD] | [TBD] | [TBD] | [TBD] |
| ... | ... | ... | ... | ... |
---
## Comparison vs Baseline
| Metric | Baseline (solid) | Best isogrid | Reduction |
|--------|-----------------|--------------|-----------|
| Mass | [TBD] kg | [TBD] kg | [TBD] % |
| Max σ | [TBD] MPa | [TBD] MPa | — |
| SF | [TBD] | [TBD] | — |
> Baseline solid plate mass: TBD (Gap G-02 — run `extract_part_mass_material` on unmodified model).
---
## Runtime Performance
| Metric | Value |
|--------|-------|
| Total wall time | [TBD] h |
| Average per trial | [TBD] s |
| Brain (avg) | [TBD] s |
| NX import (avg) | [TBD] s |
| NX solve (avg) | [TBD] s |
| Extraction (avg) | [TBD] s |
| Fastest trial | [TBD] s |
| Slowest trial | [TBD] s |
---
## Failure Analysis
| Failure type | Count | Notes |
|-------------|-------|-------|
| Brain error | [TBD] | [TBD] |
| NX import failed | [TBD] | [TBD] |
| NX solve failed | [TBD] | [TBD] |
| OP2 missing | [TBD] | [TBD] |
| Extractor error | [TBD] | [TBD] |
---
## Recommendations
> Fill in after campaign completes.
- [ ] If feasibility rate < 80%: increase `s_min` lower bound (too many dense patterns stress-out)
- [ ] If convergence flat after trial 100: consider Campaign 02 with tighter bounds around best region
- [ ] If `gamma_stress` best value > 0.5: stress feedback is helping — keep it in next campaign
- [ ] If best SF >> 5.0: more aggressive lightweighting possible — widen s_max bound
---
## Configuration Reference
**Study name:** `isogrid_01_v1_tpe`
**Constraint:** σ_max ≤ 100.6 MPa (stress only)
**Material:** AL7075-T6, σ_yield = 503 MPa, SF = 5
**DB:** `3_results/study.db`
**Run script:** `run_optimization.py`
**NX version:** DesigncenterNX2512
**NX model:** `1_setup/model/ACS_Stack_Main_Plate_Iso_project_sim2.sim`

View File

@@ -0,0 +1,87 @@
"""Quick pre-flight check — run this before run_optimization.py."""
import sys
from pathlib import Path
sys.path.insert(0, str(Path(__file__).resolve().parents[4]))
STUDY_DIR = Path(__file__).parent
MODEL_DIR = STUDY_DIR / "1_setup" / "model"
DATA_DIR = MODEL_DIR / "adaptive_isogrid_data"
NX_VERSION = "2512" # DesigncenterNX2512 (production)
required = [
(MODEL_DIR / "ACS_Stack_Main_Plate_Iso_Project.prt", "Geometry part"),
(MODEL_DIR / "ACS_Stack_Main_Plate_Iso_project_fem2_i.prt", "Idealized part (CRITICAL)"),
(MODEL_DIR / "ACS_Stack_Main_Plate_Iso_project_fem2.fem", "FEM file"),
(MODEL_DIR / "ACS_Stack_Main_Plate_Iso_project_sim2.sim", "Simulation file"),
(DATA_DIR / "geometry_sandbox_1.json", "Sandbox 1 geometry"),
(DATA_DIR / "geometry_sandbox_2.json", "Sandbox 2 geometry"),
]
nx_candidates = [
Path(f"C:/Program Files/Siemens/DesigncenterNX{NX_VERSION}/NXBIN/run_journal.exe"),
Path(f"C:/Program Files/Siemens/Simcenter3D_{NX_VERSION}/NXBIN/run_journal.exe"),
]
print("Pre-flight checks")
print("=" * 60)
all_ok = True
# Model files
print("\nModel files:")
for path, label in required:
if path.exists():
mb = round(path.stat().st_size / 1_048_576, 1)
print(f" [OK] {label} ({mb} MB)")
else:
print(f" [MISSING] {label}")
print(f" -> {path}")
all_ok = False
# run_journal.exe
print("\nNX:")
rj_found = None
for c in nx_candidates:
if c.exists():
rj_found = c
break
if rj_found:
print(f" [OK] run_journal.exe: {rj_found}")
else:
print(f" [MISSING] run_journal.exe — NX {NX_VERSION} not found")
print(f" Checked: {[str(c) for c in nx_candidates]}")
all_ok = False
# Python Brain imports
print("\nPython Brain:")
try:
from optimization_engine.isogrid import (
generate_triangulation, generate_pockets,
assemble_profile, profile_to_json, validate_profile,
normalize_geometry_schema,
)
from optimization_engine.isogrid.study import PARAM_SPACE, MATERIAL
print(" [OK] optimization_engine.isogrid")
print(f" Material: {MATERIAL['name']} sigma_allow={MATERIAL['sigma_allow_MPa']:.1f} MPa")
except ImportError as e:
print(f" [FAIL] Brain import: {e}")
all_ok = False
# Extractor imports
print("\nExtractors:")
try:
from optimization_engine.extractors.extract_part_mass_material import extract_part_mass_material
from optimization_engine.extractors.extract_von_mises_stress import extract_solid_stress
print(" [OK] extract_part_mass_material + extract_solid_stress")
except ImportError as e:
print(f" [FAIL] Extractor import: {e}")
all_ok = False
print("\n" + "=" * 60)
if all_ok:
print("All checks PASSED — ready to run run_optimization.py")
else:
print("FAILED — fix the issues above before running")
sys.exit(0 if all_ok else 1)

View File

@@ -0,0 +1,163 @@
"""
Extract per-element von Mises stress within each sandbox region.
Reads from the Nastran OP2 (stress per element) + NX FEM file (node coordinates),
then filters to elements whose centroids fall inside the sandbox boundary polygon.
This gives the spatial stress field in sandbox 2D coordinates — the same schema
that solve_and_extract.py would produce via NXOpen once wired:
{
"nodes_xy": [[x, y], ...], # element centroids (mm)
"stress_values": [...], # von Mises per element (MPa)
"n_elements": int,
}
The sandbox polygon filter (Shapely point-in-polygon) is what maps "whole plate"
OP2 stress back to each individual sandbox region.
"""
from __future__ import annotations
from pathlib import Path
from typing import Dict, Any, List
import numpy as np
from shapely.geometry import Polygon, Point
def extract_sandbox_stress_field(
op2_file: Path,
fem_file: Path,
sandbox_geometry: dict,
subcase: int = 1,
convert_to_mpa: bool = True,
) -> Dict[str, Any]:
"""
Extract Von Mises stress field for one sandbox region.
Args:
op2_file: Nastran OP2 results file
fem_file: NX FEM file (BDF format) — for node coordinates
sandbox_geometry: Geometry dict with 'outer_boundary' polygon
subcase: Nastran subcase ID (default 1)
convert_to_mpa: Divide by 1000 (NX kg-mm-s outputs kPa → MPa)
Returns:
Dict with 'nodes_xy', 'stress_values', 'n_elements'
Returns empty result (n_elements=0) on any failure.
"""
_empty = {"nodes_xy": [], "stress_values": [], "n_elements": 0}
try:
from pyNastran.op2.op2 import OP2
from pyNastran.bdf.bdf import BDF
except ImportError:
print(" [StressField] pyNastran not available — skipping")
return _empty
# ── 1. Read FEM for node positions ───────────────────────────────────────
try:
bdf = BDF(debug=False)
bdf.read_bdf(str(fem_file), xref=True)
except Exception as e:
print(f" [StressField] FEM read failed: {e}")
return _empty
node_pos: Dict[int, np.ndarray] = {}
for nid, node in bdf.nodes.items():
try:
node_pos[nid] = node.get_position() # [x, y, z] in mm
except Exception:
pass
# ── 2. Read OP2 for per-element stress ────────────────────────────────────
try:
model = OP2(debug=False, log=None)
model.read_op2(str(op2_file))
except Exception as e:
print(f" [StressField] OP2 read failed: {e}")
return _empty
if not hasattr(model, "op2_results") or not hasattr(model.op2_results, "stress"):
print(" [StressField] No stress results in OP2")
return _empty
stress_container = model.op2_results.stress
# Accumulate per-element von Mises (average over integration points)
eid_vm_lists: Dict[int, List[float]] = {}
for etype in ("ctetra", "chexa", "cpenta", "cpyram"):
attr = f"{etype}_stress"
if not hasattr(stress_container, attr):
continue
stress_dict = getattr(stress_container, attr)
if not stress_dict:
continue
available = list(stress_dict.keys())
actual_sub = subcase if subcase in available else available[0]
stress = stress_dict[actual_sub]
if not stress.is_von_mises:
continue
ncols = stress.data.shape[2]
vm_col = 9 if ncols >= 10 else (7 if ncols == 8 else ncols - 1)
itime = 0
for row_idx, (eid, _nid) in enumerate(stress.element_node):
vm = float(stress.data[itime, row_idx, vm_col])
eid_vm_lists.setdefault(eid, []).append(vm)
if not eid_vm_lists:
print(" [StressField] No solid element stress found in OP2")
return _empty
# Average integration points → one value per element
eid_vm = {eid: float(np.mean(vals)) for eid, vals in eid_vm_lists.items()}
# ── 3. Compute element centroids + filter to sandbox ─────────────────────
sandbox_polygon = Polygon(sandbox_geometry["outer_boundary"])
nodes_xy: List[List[float]] = []
stress_vals: List[float] = []
for eid, vm in eid_vm.items():
if eid not in bdf.elements:
continue
try:
nids = bdf.elements[eid].node_ids
except Exception:
continue
pts = [node_pos[nid] for nid in nids if nid in node_pos]
if not pts:
continue
centroid = np.mean(pts, axis=0) # [x, y, z] 3D
x, y = float(centroid[0]), float(centroid[1]) # flat plate → Z constant
if not sandbox_polygon.contains(Point(x, y)):
continue
# Convert kPa → MPa (NX kg-mm-s unit system)
val = vm / 1000.0 if convert_to_mpa else vm
nodes_xy.append([x, y])
stress_vals.append(val)
n = len(stress_vals)
if n > 0:
print(f" [StressField] sandbox '{sandbox_geometry.get('sandbox_id', '?')}': "
f"{n} elements max={max(stress_vals):.1f} MPa")
else:
print(f" [StressField] sandbox '{sandbox_geometry.get('sandbox_id', '?')}': "
f"0 elements in polygon (check coordinate frame)")
return {
"nodes_xy": nodes_xy,
"stress_values": stress_vals,
"n_elements": n,
}

View File

@@ -0,0 +1,404 @@
"""
Per-trial figure generation for isogrid optimization.
Saves 4 PNG figures per sandbox per trial into the trial folder:
{sandbox_id}_density.png — density field heatmap η(x,y) [after Brain]
{sandbox_id}_mesh.png — Gmsh triangulation overlaid on density [after Brain]
{sandbox_id}_ribs.png — final rib profile (pockets) [after Brain]
{sandbox_id}_stress.png — Von Mises stress field (per-element) [after NX solve]
The stress figure overlays FEA results onto the rib pattern so you can see
which triangles/pockets are over/under-stressed — the key diagnostic for tuning
density field parameters (η₀, α, β, R₀, s_min, s_max).
These PNGs are NEVER deleted by the retention policy — full history is preserved.
Usage:
from plot_trial import plot_trial_figures, plot_stress_figures
# After Brain (density, mesh, ribs):
plot_trial_figures(sb_data, trial_dir)
# After NX solve (stress):
stress_fields = {
"sandbox_1": {"nodes_xy": [...], "stress_values": [...], "n_elements": N},
"sandbox_2": {...},
}
plot_stress_figures(sb_data, stress_fields, trial_dir, sigma_allow=100.6)
"""
from __future__ import annotations
import sys
from pathlib import Path
import matplotlib
matplotlib.use("Agg") # headless — required when NX session may own the display
import matplotlib.pyplot as plt
import matplotlib.patches as mpatches
import numpy as np
# Project root on path (run_optimization.py sets sys.path already)
sys.path.insert(0, str(Path(__file__).resolve().parents[4]))
from optimization_engine.isogrid.density_field import evaluate_density_grid
# ─── Resolution for density grid (mm). 5mm is fast enough for plotting. ────
_DENSITY_RESOLUTION = 5.0
_DPI = 150
def plot_trial_figures(sb_data: list[dict], trial_dir: Path) -> list[Path]:
"""
Generate and save all figures for one trial.
Parameters
----------
sb_data : list of dicts, one per sandbox.
Each dict must have keys:
sandbox_id, geometry, params, triangulation, pockets, ribbed_plate
trial_dir : Path
The trial folder where PNGs will be saved.
Returns
-------
List of Path objects for the files that were written.
"""
written: list[Path] = []
for sbd in sb_data:
try:
sb_id = sbd["sandbox_id"]
geom = sbd["geometry"]
params = sbd["params"]
tri = sbd["triangulation"]
pockets = sbd["pockets"]
plate = sbd["ribbed_plate"]
written.append(_plot_density(geom, params, trial_dir / f"{sb_id}_density.png"))
written.append(_plot_mesh(geom, params, tri, trial_dir / f"{sb_id}_mesh.png"))
written.append(_plot_ribs(geom, pockets, plate, trial_dir / f"{sb_id}_ribs.png"))
except Exception as exc:
print(f" [Plot] WARNING: could not save figures for {sbd.get('sandbox_id', '?')}: {exc}")
return [p for p in written if p is not None]
# =============================================================================
# Figure 1 — Density heatmap
# =============================================================================
def _plot_density(geometry: dict, params: dict, out_path: Path) -> Path | None:
"""Save density field heatmap with boundary and hole outlines."""
try:
X, Y, eta = evaluate_density_grid(geometry, params, resolution=_DENSITY_RESOLUTION)
except Exception:
return None
fig, ax = plt.subplots(figsize=(9, 5), dpi=_DPI)
m = ax.pcolormesh(X, Y, eta, shading="auto", cmap="viridis", vmin=0.0, vmax=1.0)
fig.colorbar(m, ax=ax, label="Density η (0 = sparse · 1 = dense)", shrink=0.8)
# Outer boundary
_draw_boundary(ax, geometry["outer_boundary"], color="white", lw=1.5, alpha=0.85)
# Holes
for hole in geometry.get("holes", []):
_draw_hole(ax, hole, color="#ff6b6b", lw=1.2)
ax.set_aspect("equal")
sb_id = geometry.get("sandbox_id", "?")
ax.set_title(
f"Density Field — {sb_id}\n"
f"η₀={params['eta_0']:.2f} α={params['alpha']:.2f} β={params['beta']:.2f} "
f"γ_s={params['gamma_stress']:.2f} R₀={params['R_0']:.0f} R_e={params['R_edge']:.0f}",
fontsize=8,
)
ax.set_xlabel("x (mm)", fontsize=8)
ax.set_ylabel("y (mm)", fontsize=8)
ax.tick_params(labelsize=7)
fig.tight_layout()
fig.savefig(out_path, dpi=_DPI, bbox_inches="tight")
plt.close(fig)
return out_path
# =============================================================================
# Figure 2 — Gmsh triangulation overlay
# =============================================================================
def _plot_mesh(geometry: dict, params: dict, triangulation: dict, out_path: Path) -> Path | None:
"""Save triangulation overlaid on a translucent density background."""
vertices = triangulation.get("vertices")
triangles = triangulation.get("triangles")
if vertices is None or len(vertices) == 0:
return None
try:
X, Y, eta = evaluate_density_grid(geometry, params, resolution=_DENSITY_RESOLUTION)
except Exception:
eta = None
fig, ax = plt.subplots(figsize=(9, 5), dpi=_DPI)
# Density background (translucent)
if eta is not None:
ax.pcolormesh(X, Y, eta, shading="auto", cmap="viridis",
vmin=0.0, vmax=1.0, alpha=0.35)
# Triangle edges
if triangles is not None and len(triangles) > 0:
ax.triplot(
vertices[:, 0], vertices[:, 1], triangles,
"k-", lw=0.35, alpha=0.75,
)
# Outer boundary
_draw_boundary(ax, geometry["outer_boundary"], color="#00cc66", lw=1.5)
# Holes (keepout rings)
for hole in geometry.get("holes", []):
_draw_hole(ax, hole, color="#4488ff", lw=1.2)
# Keepout ring (d_keep × hole_radius)
d_keep = params.get("d_keep", 1.2)
r_hole = hole.get("radius", 0) or hole.get("diameter", 0) / 2.0
if r_hole > 0:
keepout = plt.Circle(
hole["center"], r_hole * (1.0 + d_keep),
color="#4488ff", fill=False, lw=0.8, ls="--", alpha=0.5,
)
ax.add_patch(keepout)
ax.set_aspect("equal")
n_tri = len(triangles) if triangles is not None else 0
n_pts = len(vertices)
sb_id = geometry.get("sandbox_id", "?")
ax.set_title(
f"Triangulation — {sb_id} ({n_tri} triangles · {n_pts} vertices)\n"
f"s_min={params['s_min']:.1f} mm s_max={params['s_max']:.1f} mm",
fontsize=8,
)
ax.set_xlabel("x (mm)", fontsize=8)
ax.set_ylabel("y (mm)", fontsize=8)
ax.tick_params(labelsize=7)
fig.tight_layout()
fig.savefig(out_path, dpi=_DPI, bbox_inches="tight")
plt.close(fig)
return out_path
# =============================================================================
# Figure 3 — Final rib profile
# =============================================================================
def _plot_ribs(geometry: dict, pockets: list, ribbed_plate, out_path: Path) -> Path | None:
"""Save final rib pattern — pockets (material removed) + rib plate outline."""
fig, ax = plt.subplots(figsize=(9, 5), dpi=_DPI)
# Outer boundary filled (light grey = material to start with)
outer = np.array(geometry["outer_boundary"])
ax.fill(outer[:, 0], outer[:, 1], color="#e8e8e8", zorder=0)
_draw_boundary(ax, geometry["outer_boundary"], color="#00cc66", lw=1.5, zorder=3)
# Pockets (material removed = pink/salmon)
# pockets is list[dict] from generate_pockets() — each dict has a 'polyline' key
for pocket in pockets:
try:
polyline = pocket.get("polyline", [])
if len(polyline) < 3:
continue
coords = np.array(polyline)
patch = mpatches.Polygon(coords, closed=True,
facecolor="#ffaaaa", edgecolor="#cc4444",
lw=0.5, alpha=0.85, zorder=2)
ax.add_patch(patch)
except Exception:
pass
# Bolt holes (not pocketed — solid keep zones)
for hole in geometry.get("holes", []):
_draw_hole(ax, hole, color="#2255cc", lw=1.2, zorder=4)
ax.set_aspect("equal")
sb_id = geometry.get("sandbox_id", "?")
n_pockets = len(pockets)
ax.set_title(
f"Rib Profile — {sb_id} ({n_pockets} pockets)\n"
f"pink = material removed · grey = rib material · blue = bolt holes",
fontsize=8,
)
ax.set_xlabel("x (mm)", fontsize=8)
ax.set_ylabel("y (mm)", fontsize=8)
ax.tick_params(labelsize=7)
# Auto-fit to sandbox bounds
boundary = np.array(geometry["outer_boundary"])
margin = 5.0
ax.set_xlim(boundary[:, 0].min() - margin, boundary[:, 0].max() + margin)
ax.set_ylim(boundary[:, 1].min() - margin, boundary[:, 1].max() + margin)
fig.tight_layout()
fig.savefig(out_path, dpi=_DPI, bbox_inches="tight")
plt.close(fig)
return out_path
# =============================================================================
# Geometry helpers
# =============================================================================
def _draw_boundary(ax, outer_boundary, color, lw, alpha=1.0, zorder=2):
"""Draw a closed polygon boundary."""
pts = np.array(outer_boundary)
x = np.append(pts[:, 0], pts[0, 0])
y = np.append(pts[:, 1], pts[0, 1])
ax.plot(x, y, color=color, lw=lw, alpha=alpha, zorder=zorder)
def _draw_hole(ax, hole: dict, color, lw, zorder=3):
"""Draw a circular hole outline.
Note: geometry_schema normalizes inner boundaries to dicts with key
'diameter' (not 'radius'), so we use diameter/2.
"""
cx, cy = hole["center"]
# Normalized hole dicts have 'diameter'; raw dicts may have 'radius'
r = hole.get("radius", 0) or hole.get("diameter", 0) / 2.0
if r > 0:
circle = plt.Circle((cx, cy), r, color=color, fill=False, lw=lw, zorder=zorder)
ax.add_patch(circle)
# =============================================================================
# Figure 4 — Von Mises stress field (post-NX-solve)
# =============================================================================
def plot_stress_figures(
sb_data: list[dict],
stress_fields: dict,
trial_dir: Path,
sigma_allow: float = 100.6,
) -> list[Path]:
"""
Generate and save stress heatmap figures for one trial.
Must be called AFTER the NX solve (stress_fields comes from
extract_sandbox_stress_field()).
Parameters
----------
sb_data : list of dicts (same as plot_trial_figures)
stress_fields : dict keyed by sandbox_id
Each value: {"nodes_xy": [...], "stress_values": [...], "n_elements": int}
trial_dir : Path where PNGs are saved
sigma_allow : allowable stress in MPa — shown as reference line on colorbar
"""
written: list[Path] = []
for sbd in sb_data:
sb_id = sbd["sandbox_id"]
sf = stress_fields.get(sb_id, {})
if not sf.get("n_elements", 0):
continue
try:
p = _plot_stress(
geometry=sbd["geometry"],
pockets=sbd["pockets"],
stress_field=sf,
sigma_allow=sigma_allow,
out_path=trial_dir / f"{sb_id}_stress.png",
)
if p:
written.append(p)
except Exception as exc:
print(f" [Plot] WARNING: stress figure failed for {sb_id}: {exc}")
return written
def _plot_stress(
geometry: dict,
pockets: list,
stress_field: dict,
sigma_allow: float,
out_path: Path,
) -> Path | None:
"""
Von Mises stress heatmap overlaid with rib pocket outlines.
Shows which triangles/pockets are over-stressed vs under-stressed.
White pocket outlines make the rib pattern visible against the stress field.
"""
nodes_xy = np.array(stress_field.get("nodes_xy", []))
stress_vals = np.array(stress_field.get("stress_values", []))
if len(nodes_xy) < 3:
return None
fig, ax = plt.subplots(figsize=(9, 5), dpi=_DPI)
# Stress field — tricontourf when enough points, scatter otherwise
cmap = "RdYlGn_r" # green = low stress, red = high/overloaded
vmax = max(float(np.max(stress_vals)), sigma_allow * 1.05)
if len(nodes_xy) >= 6:
from matplotlib.tri import Triangulation, LinearTriInterpolator
try:
triang = Triangulation(nodes_xy[:, 0], nodes_xy[:, 1])
tc = ax.tricontourf(triang, stress_vals, levels=20,
cmap=cmap, vmin=0, vmax=vmax)
cb = fig.colorbar(tc, ax=ax, label="Von Mises (MPa)", shrink=0.8)
except Exception:
sc = ax.scatter(nodes_xy[:, 0], nodes_xy[:, 1], c=stress_vals,
cmap=cmap, s=10, vmin=0, vmax=vmax, alpha=0.85)
cb = fig.colorbar(sc, ax=ax, label="Von Mises (MPa)", shrink=0.8)
else:
sc = ax.scatter(nodes_xy[:, 0], nodes_xy[:, 1], c=stress_vals,
cmap=cmap, s=10, vmin=0, vmax=vmax, alpha=0.85)
cb = fig.colorbar(sc, ax=ax, label="Von Mises (MPa)", shrink=0.8)
# Mark σ_allow on colorbar
cb.ax.axhline(y=sigma_allow, color="black", lw=1.2, ls="--")
cb.ax.text(1.05, sigma_allow / vmax, f"σ_allow\n{sigma_allow:.0f}",
transform=cb.ax.transAxes, fontsize=6, va="center", ha="left")
# Rib pocket outlines (white) — so we can visually correlate stress with pockets
for pocket in pockets:
polyline = pocket.get("polyline", [])
if len(polyline) >= 3:
coords = np.array(polyline)
patch = mpatches.Polygon(coords, closed=True,
facecolor="none", edgecolor="white",
lw=0.6, alpha=0.75, zorder=3)
ax.add_patch(patch)
# Outer boundary + holes
_draw_boundary(ax, geometry["outer_boundary"], color="#00cc66", lw=1.5, zorder=4)
for hole in geometry.get("holes", []):
_draw_hole(ax, hole, color="#4488ff", lw=1.2, zorder=4)
ax.set_aspect("equal")
sb_id = geometry.get("sandbox_id", "?")
n_el = stress_field.get("n_elements", 0)
max_s = float(np.max(stress_vals))
feasible = max_s <= sigma_allow
status = "OK" if feasible else f"OVER by {max_s - sigma_allow:.1f} MPa"
ax.set_title(
f"Von Mises Stress — {sb_id} ({n_el} elements) [{status}]\n"
f"max = {max_s:.1f} MPa · σ_allow = {sigma_allow:.0f} MPa "
f"· dashed line = limit",
fontsize=8,
)
ax.set_xlabel("x (mm)", fontsize=8)
ax.set_ylabel("y (mm)", fontsize=8)
ax.tick_params(labelsize=7)
boundary = np.array(geometry["outer_boundary"])
margin = 5.0
ax.set_xlim(boundary[:, 0].min() - margin, boundary[:, 0].max() + margin)
ax.set_ylim(boundary[:, 1].min() - margin, boundary[:, 1].max() + margin)
fig.tight_layout()
fig.savefig(out_path, dpi=_DPI, bbox_inches="tight")
plt.close(fig)
return out_path

View File

@@ -0,0 +1,630 @@
"""
Isogrid Dev Plate — Mass Minimization Study 01 (TPE v1)
========================================================
Objective: Minimize total plate mass
Constraint: max von Mises stress ≤ σ_allow = 100.6 MPa (SF = 5)
No displacement constraint — confirmed 2026-02-18
Material: AL7075-T6 (ρ = 2810 kg/m³, σ_yield = 503 MPa)
8 design variables (see PARAM_SPACE in optimization_engine/isogrid/study.py):
η₀, α, β, γ_stress, R₀, R_edge, s_min, s_max
Pipeline per trial:
1. Python Brain: params → rib profiles for sandbox_1 and sandbox_2
2. NX journal: import_profile.py → update sketch in-place
3. NX journal: solve_simulation.py → remesh + solve + write mass JSON
4. Extract: mass from _temp_part_properties.json (written by solve journal)
5. Extract: max von Mises stress from OP2
6. Objective: mass_kg + stress_penalty
Model files (working copies in 1_setup/model/):
1_setup/model/ACS_Stack_Main_Plate_Iso_Project.prt
1_setup/model/ACS_Stack_Main_Plate_Iso_project_fem2_i.prt ← CRITICAL: must exist!
1_setup/model/ACS_Stack_Main_Plate_Iso_project_fem2.fem
1_setup/model/ACS_Stack_Main_Plate_Iso_project_sim2.sim
1_setup/model/adaptive_isogrid_data/geometry_sandbox_1.json
1_setup/model/adaptive_isogrid_data/geometry_sandbox_2.json
"""
from __future__ import annotations
import json
import re
import shutil
import subprocess
import sys
import time
from datetime import datetime
from pathlib import Path
import optuna
# ─── Project root + study directory on path ──────────────────────────────────
PROJECT_ROOT = Path(__file__).resolve().parents[4] # .../Atomizer
STUDY_DIR_EARLY = Path(__file__).resolve().parent # studies/01_v1_tpe/
sys.path.insert(0, str(PROJECT_ROOT))
sys.path.insert(0, str(STUDY_DIR_EARLY)) # makes plot_trial / trial_retention importable
# ─── Python Brain imports ─────────────────────────────────────────────────────
from optimization_engine.isogrid import (
generate_triangulation,
generate_pockets,
assemble_profile,
profile_to_json,
validate_profile,
normalize_geometry_schema,
)
from optimization_engine.isogrid.study import PARAM_SPACE, MANUFACTURING_CONSTRAINTS, MATH_CONSTANTS, MATERIAL
# ─── Extractor imports ────────────────────────────────────────────────────────
from optimization_engine.extractors.extract_part_mass_material import extract_part_mass_material
from optimization_engine.extractors.extract_mass_from_expression import extract_mass_from_expression
from optimization_engine.extractors.extract_von_mises_stress import extract_solid_stress
# ─── NX solver ───────────────────────────────────────────────────────────────
from optimization_engine.nx.solver import NXSolver
# ─── Local study utilities ───────────────────────────────────────────────────
from plot_trial import plot_trial_figures, plot_stress_figures
from trial_retention import TrialRetentionManager
from extract_sandbox_stress import extract_sandbox_stress_field
# =============================================================================
# Constants
# =============================================================================
STUDY_DIR = Path(__file__).parent
def _pick_model_dir(study_dir: Path) -> Path:
"""Pick the model directory that actually has the required NX files."""
candidates = [
study_dir / "model",
study_dir / "1_setup" / "model",
]
required = [
"ACS_Stack_Main_Plate_Iso_project_sim2.sim",
"ACS_Stack_Main_Plate_Iso_project_fem2_i.prt",
]
for cand in candidates:
if cand.exists() and all((cand / name).exists() for name in required):
return cand
# fallback to legacy default (keeps preflight behavior explicit)
return study_dir / "1_setup" / "model"
def _pick_results_dir(study_dir: Path) -> Path:
"""Prefer modern 3_results, but stay compatible with legacy results/."""
modern = study_dir / "3_results"
legacy = study_dir / "results"
if modern.exists() or not legacy.exists():
return modern
return legacy
MODEL_DIR = _pick_model_dir(STUDY_DIR)
DATA_DIR = MODEL_DIR / "adaptive_isogrid_data"
RESULTS_DIR = _pick_results_dir(STUDY_DIR)
ITER_DIR = STUDY_DIR / "2_iterations"
# NX model files
SIM_FILE = MODEL_DIR / "ACS_Stack_Main_Plate_Iso_project_sim2.sim"
PRT_I_FILE = MODEL_DIR / "ACS_Stack_Main_Plate_Iso_project_fem2_i.prt"
FEM_FILE = MODEL_DIR / "ACS_Stack_Main_Plate_Iso_project_fem2.fem"
# NX import journal
IMPORT_JOURNAL = PROJECT_ROOT / "tools" / "adaptive-isogrid" / "src" / "nx" / "import_profile.py"
# NX runner — DesigncenterNX2512 (production install)
NX_VERSION = "2512"
# Material: AL7075-T6
SIGMA_ALLOW = MATERIAL["sigma_allow_MPa"] # 100.6 MPa
SIGMA_YIELD = MATERIAL["sigma_yield_MPa"] # 503.0 MPa
# Optuna
N_TRIALS = 200
STUDY_NAME = "isogrid_01_v1_tpe"
DB_PATH = RESULTS_DIR / "study.db"
# =============================================================================
# Parameter helpers
# =============================================================================
def build_full_params(trial_params: dict) -> dict:
"""Merge sampled vars with fixed manufacturing constraints and math constants."""
params = dict(trial_params)
for name, cfg in MANUFACTURING_CONSTRAINTS.items():
params[name] = cfg["value"]
for name, cfg in MATH_CONSTANTS.items():
params[name] = cfg["value"]
return params
# =============================================================================
# NX journal runner
# =============================================================================
def find_run_journal_exe() -> Path:
"""Locate run_journal.exe — DesigncenterNX only (production install)."""
candidates = [
Path(f"C:/Program Files/Siemens/DesigncenterNX{NX_VERSION}/NXBIN/run_journal.exe"),
Path(f"C:/Program Files/Siemens/Simcenter3D_{NX_VERSION}/NXBIN/run_journal.exe"),
]
for p in candidates:
if p.exists():
return p
raise FileNotFoundError(
f"run_journal.exe not found. Checked: {[str(p) for p in candidates]}"
)
def run_nx_journal(journal_path: Path, model_dir: Path, timeout: int = 300) -> bool:
"""
Run an NX journal via run_journal.exe.
The journal is executed with model_dir as the working directory,
so NX will open files relative to that directory.
Returns True on success, False on failure.
"""
run_journal = find_run_journal_exe()
cmd = [
str(run_journal),
str(journal_path),
]
print(f" [NX] Running journal: {journal_path.name}")
t0 = time.time()
try:
result = subprocess.run(
cmd,
cwd=str(model_dir),
capture_output=True,
text=True,
timeout=timeout,
)
elapsed = time.time() - t0
if result.returncode != 0:
print(f" [NX] FAILED (exit {result.returncode}) in {elapsed:.1f}s")
if result.stderr:
print(f" [NX] stderr: {result.stderr[:500]}")
return False
print(f" [NX] OK in {elapsed:.1f}s")
return True
except subprocess.TimeoutExpired:
print(f" [NX] TIMEOUT after {timeout}s")
return False
except Exception as exc:
print(f" [NX] ERROR: {exc}")
return False
def _extract_mass_robust(solve_result: dict, model_dir: Path, prt_i_file: Path) -> float:
"""
Robust mass extraction — 3-step fallback chain.
1. _temp_part_properties.json (full JSON from solve_simulation journal — preferred)
2. _temp_mass.txt (lightweight expression dump — fallback)
3. journal stdout (parse [JOURNAL] Mass ... = N lines — last resort)
Temp files are cleared before each NX run (see step 4 in objective), so any
file that exists here is guaranteed to be from the current trial's solve.
"""
props_file = model_dir / "_temp_part_properties.json"
mass_file = model_dir / "_temp_mass.txt"
# 1) Full JSON written by NXOpen MeasureManager in solve_simulation journal
if props_file.exists() and prt_i_file.exists():
try:
result = extract_part_mass_material(prt_i_file, properties_file=props_file)
return float(result["mass_kg"])
except Exception as e:
print(f" [Mass] Fallback 1 failed ({e}), trying _temp_mass.txt …")
# 2) Lightweight mass dump — expression p173 written by journal
if mass_file.exists() and prt_i_file.exists():
try:
return float(extract_mass_from_expression(prt_i_file, expression_name="p173"))
except Exception as e:
print(f" [Mass] Fallback 2 failed ({e}), trying stdout parse …")
# 3) Parse journal stdout for any [JOURNAL] mass line
stdout = solve_result.get("stdout", "") or ""
m = re.search(
r"\[JOURNAL\]\s+(?:Mass extracted|MeasureManager mass|Mass expression p173)\s*=\s*([0-9.eE+-]+)",
stdout,
)
if m:
return float(m.group(1))
raise FileNotFoundError(
"Mass extraction failed: all 3 fallbacks exhausted "
"(missing _temp_part_properties.json, _temp_mass.txt, and no mass in journal stdout)"
)
# =============================================================================
# Trial numbering (filesystem-based — no separate DB needed)
# =============================================================================
def _next_trial_number(iter_dir: Path) -> int:
"""Next trial number — max of existing trial_NNNN folders + 1 (1-based)."""
max_n = 0
for p in iter_dir.glob("trial_????"):
try:
max_n = max(max_n, int(p.name.split("_")[1]))
except (IndexError, ValueError):
pass
return max_n + 1
# =============================================================================
# Objective function
# =============================================================================
def make_objective(rm: TrialRetentionManager):
"""Return the Optuna objective closure, capturing the RetentionManager."""
def objective(trial: optuna.Trial) -> float:
"""
Optuna objective: minimize mass + stress penalty.
Returns float (the combined objective). Infeasible or failed trials
return a large penalty to steer the sampler away.
"""
optuna_num = trial.number
print(f"\n--- Trial {optuna_num} ---")
# ── 1. Sample parameters ──────────────────────────────────────────────
sampled = {}
for name, cfg in PARAM_SPACE.items():
sampled[name] = trial.suggest_float(name, cfg["low"], cfg["high"])
params = build_full_params(sampled)
print(f" η₀={params['eta_0']:.3f} α={params['alpha']:.3f} β={params['beta']:.3f} "
f"γ_s={params['gamma_stress']:.3f} R₀={params['R_0']:.1f} "
f"R_e={params['R_edge']:.1f} s_min={params['s_min']:.1f} s_max={params['s_max']:.1f}")
# ── 2. Reserve trial folder (filesystem-based numbering) ──────────────
trial_number = _next_trial_number(ITER_DIR)
trial_dir = ITER_DIR / f"trial_{trial_number:04d}"
trial_dir.mkdir(parents=True, exist_ok=True)
# Write params immediately (before NX, so folder exists even on failure)
(trial_dir / "params.json").write_text(json.dumps(sampled, indent=2))
# ── 3. Python Brain: generate rib profiles ───────────────────────────
n_pockets_total = 0
sb_data: list[dict] = [] # accumulated for plotting
for sb_id in ["sandbox_1", "sandbox_2"]:
geom_path = DATA_DIR / f"geometry_{sb_id}.json"
if not geom_path.exists():
print(f" [Brain] MISSING: {geom_path.name} — skipping sandbox")
continue
with open(geom_path) as f:
geometry = normalize_geometry_schema(json.load(f))
try:
triangulation = generate_triangulation(geometry, params)
pockets = generate_pockets(triangulation, geometry, params)
ribbed_plate = assemble_profile(geometry, pockets, params)
is_valid, checks = validate_profile(ribbed_plate, params)
n_pockets = len(pockets)
n_pockets_total += n_pockets
print(f" [Brain] {sb_id}: {n_pockets} pockets "
f"valid={is_valid} "
f"mass_est≈{checks.get('mass_estimate_g', 0):.0f}g")
profile_data = profile_to_json(ribbed_plate, pockets, geometry, params)
profile_path = DATA_DIR / f"rib_profile_{sb_id}.json"
with open(profile_path, "w") as f:
json.dump(profile_data, f, indent=2)
# Copy rib profile to trial folder for reproducibility
shutil.copy2(profile_path, trial_dir / f"rib_profile_{sb_id}.json")
# Accumulate for plotting
sb_data.append({
"sandbox_id": sb_id,
"geometry": geometry,
"params": params,
"triangulation": triangulation,
"pockets": pockets,
"ribbed_plate": ribbed_plate,
})
except Exception as exc:
print(f" [Brain] ERROR on {sb_id}: {exc}")
trial.set_user_attr("error", f"Brain:{exc}")
return 1e6
print(f" [Brain] Total pockets: {n_pockets_total}")
# ── 3b. Save per-trial figures (density, mesh, rib pattern) ──────────
t_fig = time.time()
n_figs = len(plot_trial_figures(sb_data, trial_dir))
print(f" [Plot] {n_figs} figures → trial_{trial_number:04d}/ ({time.time()-t_fig:.1f}s)")
# ── 4. Clear stale mass temp files, then import rib profiles ─────────
# Delete temp files from any previous trial so we KNOW the ones written
# after this solve are fresh — prevents silent stale-read across trials.
for _tmp in ("_temp_part_properties.json", "_temp_mass.txt"):
_p = MODEL_DIR / _tmp
try:
_p.unlink(missing_ok=True)
except Exception:
pass
ok = run_nx_journal(IMPORT_JOURNAL, MODEL_DIR, timeout=120)
if not ok:
trial.set_user_attr("error", "NX import journal failed")
return 1e6
# ── 5. NX: remesh + solve + extract mass ─────────────────────────────
solver = NXSolver(nastran_version=NX_VERSION, use_journal=True, study_name=STUDY_NAME)
try:
solve_result = solver.run_simulation(SIM_FILE)
except Exception as exc:
print(f" [NX] Solve ERROR: {exc}")
trial.set_user_attr("error", f"Solve:{exc}")
return 1e6
if not solve_result.get("success"):
errors = solve_result.get("errors", [])
print(f" [NX] Solve FAILED: {errors[:2]}")
trial.set_user_attr("error", f"SolveFailed:{errors[:1]}")
return 1e6
op2_file = solve_result.get("op2_file")
if not op2_file or not Path(op2_file).exists():
print(" [NX] OP2 not found after solve")
trial.set_user_attr("error", "OP2 missing")
return 1e6
# ── 5b. Archive model + solver outputs to trial folder (heavy — subject to retention)
# NX model copies (.prt, .fem, .sim, .afm/.afem) + Nastran results (.op2, .f06, .dat, .log)
_HEAVY_SUFFIXES = (".prt", ".fem", ".sim", ".afm", ".afem", ".op2", ".f06", ".dat", ".log")
for suffix in _HEAVY_SUFFIXES:
for src in MODEL_DIR.glob(f"*{suffix}"):
try:
shutil.copy2(src, trial_dir / src.name)
except Exception:
pass
# ── 6. Extract mass (robust fallback chain) ─────────────────────────
try:
mass_kg = _extract_mass_robust(solve_result, MODEL_DIR, PRT_I_FILE)
print(f" [Extract] Mass: {mass_kg:.4f} kg ({mass_kg * 1000:.1f} g)")
except Exception as exc:
print(f" [Extract] Mass ERROR: {exc}")
trial.set_user_attr("error", f"Mass:{exc}")
return 1e6
# ── 7. Extract max von Mises stress ──────────────────────────────────
try:
stress_result = extract_solid_stress(op2_file, subcase=1)
max_stress = stress_result["max_von_mises"] # MPa (auto-converted by extractor)
print(f" [Extract] Max stress: {max_stress:.2f} MPa "
f"(allow={SIGMA_ALLOW:.1f} SF={SIGMA_YIELD/max(max_stress, 0.001):.2f})")
except Exception as exc:
print(f" [Extract] Stress ERROR: {exc}")
trial.set_user_attr("error", f"Stress:{exc}")
return 1e6
# ── 7b. Extract per-sandbox spatial stress field → stress heatmap PNG ──
# FEM from trial folder (trial copy — mesh matches this trial's solve)
fem_copy = trial_dir / FEM_FILE.name
fem_for_stress = fem_copy if fem_copy.exists() else FEM_FILE
stress_fields: dict = {}
for sbd in sb_data:
sb_id = sbd["sandbox_id"]
try:
stress_fields[sb_id] = extract_sandbox_stress_field(
op2_file=Path(op2_file),
fem_file=fem_for_stress,
sandbox_geometry=sbd["geometry"],
subcase=1,
)
except Exception as exc:
print(f" [StressField] {sb_id} failed: {exc}")
stress_fields[sb_id] = {"nodes_xy": [], "stress_values": [], "n_elements": 0}
t_sfig = time.time()
n_sfigs = len(plot_stress_figures(sb_data, stress_fields, trial_dir, sigma_allow=SIGMA_ALLOW))
if n_sfigs:
print(f" [Plot] {n_sfigs} stress figures → trial_{trial_number:04d}/ "
f"({time.time()-t_sfig:.1f}s)")
# ── 8. Compute objective (stress-only constraint) ─────────────────────
penalty = 0.0
if max_stress > SIGMA_ALLOW:
penalty = 1e4 * ((max_stress / SIGMA_ALLOW) - 1.0) ** 2
objective_value = mass_kg + penalty
sf = SIGMA_YIELD / max(max_stress, 0.001)
feasible = max_stress <= SIGMA_ALLOW
print(f" [Obj] mass={mass_kg:.4f} kg penalty={penalty:.2f} "
f"obj={objective_value:.4f} feasible={feasible}")
# ── 9. Write results to trial folder ──────────────────────────────────
results = {
"mass_kg": round(mass_kg, 4),
"max_stress_mpa": round(max_stress, 3),
"safety_factor": round(sf, 3),
"penalty": round(penalty, 4),
"objective": round(objective_value, 4),
"feasible": feasible,
"n_pockets": n_pockets_total,
}
(trial_dir / "results.json").write_text(json.dumps(results, indent=2))
# ── 10. Log to Optuna user attrs ──────────────────────────────────────
trial.set_user_attr("mass_kg", round(mass_kg, 4))
trial.set_user_attr("max_stress_MPa", round(max_stress, 3))
trial.set_user_attr("safety_factor", round(sf, 3))
trial.set_user_attr("penalty", round(penalty, 4))
trial.set_user_attr("n_pockets", n_pockets_total)
trial.set_user_attr("feasible", feasible)
trial.set_user_attr("trial_folder", f"trial_{trial_number:04d}")
# ── 11. File retention: keep last 10 + best 5 heavy files ─────────────
rm.register(
trial_number=trial_number,
trial_dir=trial_dir,
objective=objective_value,
mass_kg=mass_kg,
feasible=feasible,
)
stripped = rm.apply()
if stripped:
print(f" [Retain] Stripped heavy files from trials: {stripped}")
return objective_value
return objective
# =============================================================================
# Pre-flight checks
# =============================================================================
def check_prerequisites():
"""Verify all required files exist before starting optimization."""
print("Pre-flight checks...")
errors = []
required = [
(SIM_FILE, "Simulation file"),
(PRT_I_FILE, "Idealized part (CRITICAL for mesh update)"),
(IMPORT_JOURNAL, "import_profile.py journal"),
(DATA_DIR / "geometry_sandbox_1.json", "Sandbox 1 geometry"),
(DATA_DIR / "geometry_sandbox_2.json", "Sandbox 2 geometry"),
]
for path, label in required:
if Path(path).exists():
print(f" [OK] {label}: {Path(path).name}")
else:
print(f" [MISSING] {label}: {path}")
errors.append(str(path))
# Verify run_journal.exe is findable
try:
rj = find_run_journal_exe()
print(f" [OK] run_journal.exe: {rj}")
except FileNotFoundError as exc:
print(f" [MISSING] {exc}")
errors.append("run_journal.exe")
if errors:
print(f"\nPre-flight FAILED — {len(errors)} missing item(s).")
print("Model files should be in: 1_setup/model/")
print("Geometry JSONs should be in: 1_setup/model/adaptive_isogrid_data/")
return False
print("Pre-flight OK.\n")
return True
# =============================================================================
# Main
# =============================================================================
def main():
print("=" * 70)
print(" Isogrid Dev Plate — Mass Minimization Study 01 (TPE v1)")
print("=" * 70)
print(f" Material: {MATERIAL['name']}")
print(f" σ_yield: {SIGMA_YIELD} MPa")
print(f" σ_allow: {SIGMA_ALLOW:.1f} MPa (SF = {MATERIAL['safety_factor']})")
print(f" Trials: {N_TRIALS}")
print(f" DB: {DB_PATH}")
print()
RESULTS_DIR.mkdir(parents=True, exist_ok=True)
ITER_DIR.mkdir(parents=True, exist_ok=True)
if not check_prerequisites():
sys.exit(1)
# Optuna study — must be created BEFORE any other DB operations
study = optuna.create_study(
study_name=STUDY_NAME,
direction="minimize",
storage=f"sqlite:///{DB_PATH}",
load_if_exists=True,
sampler=optuna.samplers.TPESampler(seed=42),
)
rm = TrialRetentionManager(ITER_DIR, keep_recent=10, keep_best=5)
n_done = len(study.trials)
if n_done > 0:
print(f"Resuming study: {n_done} trials already complete.")
best = study.best_trial
print(f"Current best: trial {best.number} obj={best.value:.4f} kg "
f"mass={best.user_attrs.get('mass_kg', '?')} kg "
f"SF={best.user_attrs.get('safety_factor', '?')}")
print()
remaining = N_TRIALS - n_done
if remaining <= 0:
print(f"Study already complete ({n_done}/{N_TRIALS} trials).")
_print_summary(study)
return
print(f"Running {remaining} more trial(s)...\n")
t_start = datetime.now()
study.optimize(
make_objective(rm),
n_trials=remaining,
show_progress_bar=True,
)
elapsed = (datetime.now() - t_start).total_seconds()
print(f"\nDone — {remaining} trials in {elapsed/60:.1f} min "
f"({elapsed/max(remaining,1):.0f}s/trial)")
_print_summary(study)
def _print_summary(study: optuna.Study):
print("\n" + "=" * 70)
print(" BEST RESULT")
print("=" * 70)
best = study.best_trial
print(f" Trial: {best.number}")
print(f" Objective: {best.value:.4f}")
print(f" Mass: {best.user_attrs.get('mass_kg', '?')} kg")
print(f" Max stress: {best.user_attrs.get('max_stress_MPa', '?')} MPa")
print(f" Safety factor: {best.user_attrs.get('safety_factor', '?')}")
print(f" Feasible: {best.user_attrs.get('feasible', '?')}")
print()
print(" Best parameters:")
for name, val in best.params.items():
desc = PARAM_SPACE[name]["desc"]
print(f" {name:14s} = {val:.4f} # {desc}")
print()
print(f" DB: {DB_PATH}")
print(f" Trial folders: {ITER_DIR}")
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,159 @@
"""
Trial file retention policy for isogrid optimization.
Rules:
- NEVER delete: *.png (density, mesh, rib figures — full history always kept)
- NEVER delete: *.json (params, results, rib profiles)
- HEAVY files: NX model copies (.prt, .fem, .sim, .afm, .afem)
+ Nastran outputs (.op2, .f06, .dat, .log)
- KEEP heavy: last KEEP_RECENT trials + best KEEP_BEST trials (by objective)
- STRIP heavy: all other trial folders
Usage in run_optimization.py:
rm = TrialRetentionManager(ITER_DIR, keep_recent=10, keep_best=5)
# after each trial:
rm.register(trial_number, trial_dir, objective=obj, mass_kg=mass, feasible=ok)
rm.apply()
"""
from __future__ import annotations
from dataclasses import dataclass, field
from pathlib import Path
# Extensions considered "heavy" (copied once, stripped when not in keep set)
# NX model copies + Nastran outputs — everything needed to reproduce / re-open a trial
HEAVY_EXTENSIONS = {".prt", ".fem", ".sim", ".afm", ".afem", ".op2", ".f06", ".dat", ".log"}
# Extensions that are NEVER deleted regardless of retention policy
SAFE_EXTENSIONS = {".png", ".json"}
@dataclass
class _TrialRecord:
number: int
path: Path
objective: float = float("inf")
mass_kg: float = float("inf")
feasible: bool = False
has_heavy: bool = True
class TrialRetentionManager:
"""
Manages heavy-file retention across trial folders.
After each trial:
1. Call register() with the trial's outcome
2. Call apply() to enforce the keep-recent + keep-best policy
"""
def __init__(
self,
iter_dir: Path,
keep_recent: int = 10,
keep_best: int = 5,
):
self.iter_dir = iter_dir
self.keep_recent = keep_recent
self.keep_best = keep_best
self._records: dict[int, _TrialRecord] = {}
def register(
self,
trial_number: int,
trial_dir: Path,
objective: float,
mass_kg: float,
feasible: bool,
) -> None:
"""Register a completed trial so the retention policy can track it."""
# Detect whether any heavy files currently exist
has_heavy = False
if trial_dir.exists():
has_heavy = any(
f.is_file() and f.suffix in HEAVY_EXTENSIONS
for f in trial_dir.iterdir()
)
self._records[trial_number] = _TrialRecord(
number=trial_number,
path=trial_dir,
objective=objective,
mass_kg=mass_kg,
feasible=feasible,
has_heavy=has_heavy,
)
def apply(self) -> list[int]:
"""
Enforce retention policy.
Returns list of trial numbers whose heavy files were stripped.
"""
if not self._records:
return []
all_nums = sorted(self._records.keys())
# Set 1: most recent N trials
recent_set: set[int] = set(all_nums[-self.keep_recent :])
# Set 2: best K trials — feasible first, then lowest objective
sorted_by_quality = sorted(
self._records.values(),
key=lambda r: (0 if r.feasible else 1, r.objective),
)
best_set: set[int] = {r.number for r in sorted_by_quality[: self.keep_best]}
keep_set = recent_set | best_set
stripped: list[int] = []
for num, record in self._records.items():
if num not in keep_set and record.has_heavy:
n_removed = self._strip_heavy(record)
if n_removed > 0:
stripped.append(num)
return stripped
def _strip_heavy(self, record: _TrialRecord) -> int:
"""
Remove heavy files from a trial folder.
PNGs and JSONs are NEVER touched.
Returns the number of files removed.
"""
if not record.path.exists():
record.has_heavy = False
return 0
removed = 0
for f in list(record.path.iterdir()):
if f.is_file() and f.suffix in HEAVY_EXTENSIONS:
f.unlink()
removed += 1
record.has_heavy = False
return removed
def summary(self) -> dict:
"""Return a brief status summary."""
all_nums = sorted(self._records.keys())
recent_set = set(all_nums[-self.keep_recent :])
sorted_by_quality = sorted(
self._records.values(),
key=lambda r: (0 if r.feasible else 1, r.objective),
)
best_set = {r.number for r in sorted_by_quality[: self.keep_best]}
keep_set = recent_set | best_set
return {
"total_trials": len(self._records),
"keep_recent": self.keep_recent,
"keep_best": self.keep_best,
"currently_kept": sorted(keep_set),
"stripped": sorted(
n for n, r in self._records.items()
if not r.has_heavy and n not in keep_set
),
}