Files
Atomizer/tools/adaptive-isogrid/src/nx/extract_sandbox.py

844 lines
30 KiB
Python

"""
NXOpen script — Extract sandbox face geometry for Adaptive Isogrid.
Runs from the .sim file context. Navigates:
SIM → FEM → Idealized Part → find bodies with ISOGRID_SANDBOX attribute
For each sandbox body, exports `geometry_<sandbox_id>.json` containing:
- outer_boundary: 2D polyline of the sandbox outline
- inner_boundaries: 2D polylines of cutouts (reserved cylinder intersections, etc.)
- transform: 3D <-> 2D mapping for reimporting geometry
- thickness: from NX midsurface (if available)
Inner loops are treated as boundary constraints (edges), NOT as holes to rib around,
because hole reservations are handled by separate solid cylinders in the fixed geometry.
Usage (NX Journal — just run it, no args needed):
File > Execute > NX Journal > extract_sandbox.py
Author: Atomizer / Adaptive Isogrid
Created: 2026-02-16
"""
from __future__ import annotations
import json
import math
import os
import sys
from dataclasses import dataclass
from pathlib import Path
from typing import Any, Dict, List, Sequence, Tuple
# ---------------------------------------------------------------------------
# Geometry helpers (pure math, no NX dependency)
# ---------------------------------------------------------------------------
Point3D = Tuple[float, float, float]
Point2D = Tuple[float, float]
@dataclass
class LocalFrame:
origin: Point3D
x_axis: Point3D
y_axis: Point3D
normal: Point3D
def _norm(v: Sequence[float]) -> float:
return math.sqrt(sum(c * c for c in v))
def _normalize(v: Sequence[float]) -> Tuple[float, float, float]:
n = _norm(v)
if n < 1e-12:
return (0.0, 0.0, 1.0)
return (v[0] / n, v[1] / n, v[2] / n)
def _dot(a: Sequence[float], b: Sequence[float]) -> float:
return a[0] * b[0] + a[1] * b[1] + a[2] * b[2]
def _cross(a: Sequence[float], b: Sequence[float]) -> Tuple[float, float, float]:
return (
a[1] * b[2] - a[2] * b[1],
a[2] * b[0] - a[0] * b[2],
a[0] * b[1] - a[1] * b[0],
)
def _sub(a: Sequence[float], b: Sequence[float]) -> Tuple[float, float, float]:
return (a[0] - b[0], a[1] - b[1], a[2] - b[2])
def project_to_2d(points3d: Sequence[Point3D], frame: LocalFrame) -> List[Point2D]:
out: List[Point2D] = []
for p in points3d:
v = _sub(p, frame.origin)
out.append((_dot(v, frame.x_axis), _dot(v, frame.y_axis)))
return out
def unproject_to_3d(points2d: Sequence[Point2D], frame: LocalFrame) -> List[Point3D]:
"""Inverse of project_to_2d — reconstruct 3D from local 2D coords."""
out: List[Point3D] = []
for x, y in points2d:
px = frame.origin[0] + x * frame.x_axis[0] + y * frame.y_axis[0]
py = frame.origin[1] + x * frame.x_axis[1] + y * frame.y_axis[1]
pz = frame.origin[2] + x * frame.x_axis[2] + y * frame.y_axis[2]
out.append((px, py, pz))
return out
# ---------------------------------------------------------------------------
# NX edge sampling
# ---------------------------------------------------------------------------
def _sample_edge_polyline(edge: Any, chord_tol_mm: float, lister: Any = None) -> List[Point3D]:
"""
Sample an NX edge as a polyline.
Strategy (in order):
1) Linear edges -> vertices only
2) UF_EVAL sampling (robust parsing for NX Python return variants)
3) IBaseCurve.Evaluate fallback (if available)
4) UF arc analytic fallback for circular edges
5) Last resort -> vertices only
"""
def _log(msg: str) -> None:
if lister:
lister.WriteLine(msg)
def _parse_eval_point(result: Any) -> Point3D | None:
"""Parse NX UF_EVAL/IBaseCurve return variants into a 3D point."""
# Direct NXOpen.Point3d-like object
if hasattr(result, "X") and hasattr(result, "Y") and hasattr(result, "Z"):
return (float(result.X), float(result.Y), float(result.Z))
# Flat numeric array [x,y,z,...]
if isinstance(result, (list, tuple)):
if len(result) >= 3 and all(isinstance(v, (int, float)) for v in result[:3]):
return (float(result[0]), float(result[1]), float(result[2]))
# Nested tuple patterns, e.g. (point,), (point, deriv), etc.
for item in result:
p = _parse_eval_point(item)
if p is not None:
return p
return None
# Get start and end vertices
try:
v1, v2 = edge.GetVertices()
p1 = (float(v1.X), float(v1.Y), float(v1.Z))
p2 = (float(v2.X), float(v2.Y), float(v2.Z))
except Exception as exc:
raise RuntimeError(f"Edge.GetVertices() failed: {exc}")
is_linear = False
is_circular = False
is_closed = (_norm(_sub(p1, p2)) < 0.001)
edge_type_str = "?"
try:
edge_type_str = str(edge.SolidEdgeType)
is_linear = "Linear" in edge_type_str
is_circular = "Circular" in edge_type_str
except Exception:
pass
# Point density driven by chord_tol_mm (tighter tol => more points)
try:
length = float(edge.GetLength())
except Exception:
length = _norm(_sub(p2, p1)) if not is_closed else 50.0
tol = max(float(chord_tol_mm), 0.01)
n_pts = max(8, int(math.ceil(length / tol)))
if is_circular or is_closed:
n_pts = max(24, n_pts)
if is_linear and not is_closed:
return [p1, p2]
_log(
f"[edge] type={edge_type_str} closed={is_closed} circular={is_circular} "
f"len={length:.3f} tol={tol:.3f} n_pts={n_pts}"
)
# 1) Primary: UF_EVAL sampling
try:
import NXOpen
import NXOpen.UF
uf = NXOpen.UF.UFSession.GetUFSession()
evaluator = uf.Eval.Initialize2(edge.Tag)
limits = uf.Eval.AskLimits(evaluator)
t0, t1 = float(limits[0]), float(limits[1])
pts: List[Point3D] = []
parse_failures = 0
try:
for i in range(n_pts + 1):
t = t0 + (t1 - t0) * (i / n_pts)
result = uf.Eval.Evaluate(evaluator, 0, t)
p = _parse_eval_point(result)
if p is None:
parse_failures += 1
if parse_failures <= 3:
_log(f"[edge] UF_EVAL parse miss at t={t:.6g}, raw={repr(result)}")
continue
pts.append(p)
finally:
try:
uf.Eval.Free(evaluator)
except Exception:
pass
if len(pts) >= 2:
_log(f"[edge] sampled via UF_EVAL ({len(pts)} pts)")
return pts
_log(f"[edge] UF_EVAL insufficient points ({len(pts)}), falling back")
except Exception as exc:
_log(f"[edge] UF_EVAL failed: {exc}")
# 2) Fallback: IBaseCurve.Evaluate (signature differs by NX versions)
try:
pts: List[Point3D] = []
# Some NX APIs expose parameter limits directly on curve objects.
t0 = 0.0
t1 = 1.0
for lim_name in ("GetLimits", "AskLimits"):
lim_fn = getattr(edge, lim_name, None)
if callable(lim_fn):
try:
lim = lim_fn()
if isinstance(lim, (list, tuple)) and len(lim) >= 2:
t0, t1 = float(lim[0]), float(lim[1])
break
except Exception:
pass
for i in range(n_pts + 1):
t = t0 + (t1 - t0) * (i / n_pts)
result = edge.Evaluate(t)
p = _parse_eval_point(result)
if p is not None:
pts.append(p)
if len(pts) >= 2:
_log(f"[edge] sampled via IBaseCurve.Evaluate ({len(pts)} pts)")
return pts
_log(f"[edge] IBaseCurve.Evaluate insufficient points ({len(pts)})")
except Exception as exc:
_log(f"[edge] IBaseCurve.Evaluate failed: {exc}")
# 3) Circular analytic fallback using UF arc data
if is_circular:
try:
import NXOpen
import NXOpen.UF
uf = NXOpen.UF.UFSession.GetUFSession()
arc_data = uf.Curve.AskArcData(edge.Tag)
# Robust extraction from varying arc_data layouts
center = None
radius = None
start_angle = None
end_angle = None
if hasattr(arc_data, "arc_center") and hasattr(arc_data, "radius"):
c = arc_data.arc_center
center = (float(c[0]), float(c[1]), float(c[2]))
radius = float(arc_data.radius)
start_angle = float(getattr(arc_data, "start_angle", 0.0))
end_angle = float(getattr(arc_data, "end_angle", 2.0 * math.pi))
elif isinstance(arc_data, (list, tuple)):
# Look for center candidate [x,y,z] and a scalar radius
for item in arc_data:
if center is None and isinstance(item, (list, tuple)) and len(item) >= 3:
if all(isinstance(v, (int, float)) for v in item[:3]):
center = (float(item[0]), float(item[1]), float(item[2]))
if radius is None and isinstance(item, (int, float)) and abs(float(item)) > 1e-9:
# Keep first non-zero scalar as probable radius only if still missing
radius = float(item) if radius is None else radius
nums = [float(x) for x in arc_data if isinstance(x, (int, float))]
if len(nums) >= 2:
start_angle = nums[-2]
end_angle = nums[-1]
if center is not None and radius is not None and radius > 0.0:
# Build local basis in edge plane from endpoints + center
r1 = _sub(p1, center)
r2 = _sub(p2, center)
if _norm(r1) < 1e-9:
r1 = (radius, 0.0, 0.0)
x_axis = _normalize(r1)
normal = _normalize(_cross(r1, r2)) if _norm(_cross(r1, r2)) > 1e-9 else (0.0, 0.0, 1.0)
y_axis = _normalize(_cross(normal, x_axis))
a0 = 0.0
a1 = math.atan2(_dot(r2, y_axis), _dot(r2, x_axis))
if is_closed and abs(a1) < 1e-9:
a1 = 2.0 * math.pi
elif a1 <= 0.0:
a1 += 2.0 * math.pi
# If UF supplied angles, prefer them when they look valid
if start_angle is not None and end_angle is not None:
da = end_angle - start_angle
if abs(da) > 1e-9:
a0, a1 = start_angle, end_angle
pts = []
for i in range(n_pts + 1):
a = a0 + (a1 - a0) * (i / n_pts)
ca, sa = math.cos(a), math.sin(a)
px = center[0] + radius * (ca * x_axis[0] + sa * y_axis[0])
py = center[1] + radius * (ca * x_axis[1] + sa * y_axis[1])
pz = center[2] + radius * (ca * x_axis[2] + sa * y_axis[2])
pts.append((px, py, pz))
if len(pts) >= 2:
_log(f"[edge] sampled via UF arc analytic ({len(pts)} pts)")
return pts
_log(f"[edge] UF arc fallback could not decode arc_data: {repr(arc_data)}")
except Exception as exc:
_log(f"[edge] UF arc fallback failed: {exc}")
_log("[edge] fallback to vertices only")
return [p1, p2]
def _close_polyline(points: List[Point3D]) -> List[Point3D]:
if not points:
return points
if _norm(_sub(points[0], points[-1])) > 1e-6:
points.append(points[0])
return points
# ---------------------------------------------------------------------------
# Face local frame
# ---------------------------------------------------------------------------
def _chain_edges_into_loops(
edges: List[Any],
lister: Any = None,
tol: float = 0.01,
chord_tol_mm: float = 0.1,
) -> List[Tuple[bool, List[Point3D]]]:
"""
Chain edges into closed loops by matching vertex endpoints.
Returns list of (is_outer, points_3d) tuples.
The largest loop (by area/perimeter) is assumed to be the outer loop.
"""
def _log(msg):
if lister:
lister.WriteLine(msg)
if not edges:
return []
# Build edge segments as (start_pt, end_pt, edge_ref)
segments = []
for edge in edges:
try:
v1, v2 = edge.GetVertices()
p1 = (float(v1.X), float(v1.Y), float(v1.Z))
p2 = (float(v2.X), float(v2.Y), float(v2.Z))
segments.append((p1, p2, edge))
except Exception as exc:
_log(f"[chain] Edge.GetVertices failed: {exc}")
continue
_log(f"[chain] {len(segments)} edge segments to chain")
# Chain into loops
used = [False] * len(segments)
loops_points: List[List[Point3D]] = []
loops_edges: List[List[Any]] = []
def pts_match(a: Point3D, b: Point3D) -> bool:
return _norm(_sub(a, b)) < tol
while True:
# Find first unused segment
start_idx = None
for i, u in enumerate(used):
if not u:
start_idx = i
break
if start_idx is None:
break
# Start a new loop
chain_pts: List[Point3D] = []
chain_edges: List[Any] = []
used[start_idx] = True
p_start, p_end, edge = segments[start_idx]
# Sample this edge
edge_pts = _sample_edge_polyline(edge, chord_tol_mm=chord_tol_mm, lister=lister)
chain_pts.extend(edge_pts)
chain_edges.append(edge)
current_end = p_end
loop_start = p_start
# Follow the chain
max_iters = len(segments) + 1
for _ in range(max_iters):
if pts_match(current_end, loop_start) and len(chain_edges) > 1:
# Loop closed
break
# Find next segment connecting to current_end
found = False
for i, (s1, s2, e) in enumerate(segments):
if used[i]:
continue
if pts_match(current_end, s1):
used[i] = True
edge_pts = _sample_edge_polyline(e, chord_tol_mm=chord_tol_mm, lister=lister)
chain_pts.extend(edge_pts[1:]) # skip duplicate junction point
chain_edges.append(e)
current_end = s2
found = True
break
elif pts_match(current_end, s2):
# Edge is reversed — traverse backward
used[i] = True
edge_pts = _sample_edge_polyline(e, chord_tol_mm=chord_tol_mm, lister=lister)
edge_pts.reverse()
chain_pts.extend(edge_pts[1:])
chain_edges.append(e)
current_end = s1
found = True
break
if not found:
_log(f"[chain] Warning: could not continue chain at {current_end}")
break
loops_points.append(chain_pts)
loops_edges.append(chain_edges)
_log(f"[chain] Built {len(loops_points)} loop(s)")
if not loops_points:
return []
# Determine which loop is outer (largest perimeter)
def _perimeter(pts: List[Point3D]) -> float:
total = 0.0
for i in range(len(pts) - 1):
total += _norm(_sub(pts[i + 1], pts[i]))
return total
perimeters = [_perimeter(pts) for pts in loops_points]
outer_idx = perimeters.index(max(perimeters))
result: List[Tuple[bool, List[Point3D]]] = []
for i, pts in enumerate(loops_points):
is_outer = (i == outer_idx)
result.append((is_outer, pts))
_log(f"[chain] loop {i}: {len(pts)} pts, perimeter={perimeters[i]:.1f} mm {'(OUTER)' if is_outer else '(inner)'}")
return result
def _face_local_frame(face: Any, lister: Any = None) -> LocalFrame:
"""
Build a stable local frame on a planar face.
"""
# Get a sample point from the first edge vertex
edges = face.GetEdges()
first_edge = edges[0]
v1, v2 = first_edge.GetVertices()
sample = (float(v1.X), float(v1.Y), float(v1.Z))
# Get face normal
normal = (0.0, 0.0, 1.0)
try:
import NXOpen
pt = NXOpen.Point3d(sample[0], sample[1], sample[2])
n = face.GetFaceNormal(pt)
normal = _normalize((float(n.X), float(n.Y), float(n.Z)))
except Exception:
try:
n = face.GetFaceNormal(sample[0], sample[1], sample[2])
normal = _normalize((float(n.X), float(n.Y), float(n.Z)))
except Exception:
pass
# Build orthonormal basis
ref = (1.0, 0.0, 0.0) if abs(normal[0]) < 0.95 else (0.0, 1.0, 0.0)
x_axis = _normalize(_cross(ref, normal))
y_axis = _normalize(_cross(normal, x_axis))
return LocalFrame(origin=sample, x_axis=x_axis, y_axis=y_axis, normal=normal)
# ---------------------------------------------------------------------------
# Attribute reading
# ---------------------------------------------------------------------------
def _get_string_attribute(obj: Any, title: str) -> str | None:
"""Try multiple NX API patterns to read a string attribute."""
for method_name in ("GetStringUserAttribute", "GetUserAttributeAsString"):
try:
method = getattr(obj, method_name)
val = method(title, -1)
if val:
return str(val)
except Exception:
continue
return None
# ---------------------------------------------------------------------------
# SIM -> Idealized Part navigation
# ---------------------------------------------------------------------------
def _navigate_sim_to_idealized(session: Any) -> Any:
"""
From the active .sim work part, navigate to the idealized part (_i.prt).
Sets idealized part as work part and returns it.
"""
work_part = session.Parts.Work
part_name = work_part.Name if hasattr(work_part, "Name") else ""
lister = session.ListingWindow
lister.Open()
lister.WriteLine(f"[extract_sandbox] Starting from: {part_name}")
# Check if already in idealized part
if part_name.endswith("_i"):
lister.WriteLine("[extract_sandbox] Already in idealized part.")
return work_part
# Search loaded parts for the idealized part
idealized_part = None
for part in session.Parts:
pname = part.Name if hasattr(part, "Name") else ""
if pname.endswith("_i"):
idealized_part = part
lister.WriteLine(f"[extract_sandbox] Found idealized part: {pname}")
break
if idealized_part is None:
raise RuntimeError(
"Could not find idealized part (*_i.prt). "
"Ensure the SIM is open with FEM + idealized part loaded."
)
# Set as work part
try:
session.Parts.SetWork(idealized_part)
lister.WriteLine(f"[extract_sandbox] Set work part to: {idealized_part.Name}")
except Exception as exc:
lister.WriteLine(f"[extract_sandbox] Warning: SetWork failed: {exc}")
return idealized_part
# ---------------------------------------------------------------------------
# Sandbox discovery
# ---------------------------------------------------------------------------
def find_sandbox_bodies(
part: Any,
lister: Any,
attr_name: str = "ISOGRID_SANDBOX",
) -> List[Tuple[str, Any, Any]]:
"""
Find bodies tagged with ISOGRID_SANDBOX attribute.
Search order:
1. Body-level attributes (part.Bodies)
2. Face-level attributes
3. Feature-level attributes (part history — Promote Body features)
4. Feature name matching (e.g. 'Sandbox_1' in feature name)
5. Body name matching
Returns list of (sandbox_id, body, face) tuples.
"""
tagged: List[Tuple[str, Any, Any]] = []
found_ids: set = set()
bodies = []
try:
bodies = list(part.Bodies.ToArray()) if hasattr(part.Bodies, "ToArray") else list(part.Bodies)
except Exception:
bodies = list(part.Bodies)
lister.WriteLine(f"[extract_sandbox] Scanning {len(bodies)} bodies...")
# --- Pass 1: body-level and face-level attributes ---
for body in bodies:
body_name = ""
try:
body_name = body.Name if hasattr(body, "Name") else str(body)
except Exception:
pass
sandbox_id = _get_string_attribute(body, attr_name)
if sandbox_id and sandbox_id not in found_ids:
faces = body.GetFaces()
if faces:
tagged.append((sandbox_id, body, faces[0]))
found_ids.add(sandbox_id)
lister.WriteLine(f"[extract_sandbox] Found: {sandbox_id} (body attr on '{body_name}')")
continue
for face in body.GetFaces():
sandbox_id = _get_string_attribute(face, attr_name)
if sandbox_id and sandbox_id not in found_ids:
tagged.append((sandbox_id, body, face))
found_ids.add(sandbox_id)
lister.WriteLine(f"[extract_sandbox] Found: {sandbox_id} (face attr on '{body_name}')")
if tagged:
return tagged
# --- Pass 2: feature-level attributes (Promote Body features) ---
lister.WriteLine("[extract_sandbox] No body/face attrs found, scanning features...")
try:
features = part.Features.ToArray() if hasattr(part.Features, "ToArray") else list(part.Features)
lister.WriteLine(f"[extract_sandbox] Found {len(features)} features")
for feat in features:
feat_name = ""
try:
feat_name = feat.Name if hasattr(feat, "Name") else str(feat)
except Exception:
pass
# Check feature attribute
sandbox_id = _get_string_attribute(feat, attr_name)
if sandbox_id and sandbox_id not in found_ids:
# Get the body produced by this feature
try:
feat_bodies = feat.GetBodies()
if feat_bodies:
body = feat_bodies[0]
faces = body.GetFaces()
if faces:
tagged.append((sandbox_id, body, faces[0]))
found_ids.add(sandbox_id)
lister.WriteLine(f"[extract_sandbox] Found: {sandbox_id} (feature attr on '{feat_name}')")
except Exception as exc:
lister.WriteLine(f"[extract_sandbox] Feature '{feat_name}' has attr but GetBodies failed: {exc}")
except Exception as exc:
lister.WriteLine(f"[extract_sandbox] Feature scan error: {exc}")
if tagged:
return tagged
# --- Pass 3: feature name matching (e.g. "Sandbox_1" in name) ---
lister.WriteLine("[extract_sandbox] No feature attrs found, trying feature name matching...")
try:
features = part.Features.ToArray() if hasattr(part.Features, "ToArray") else list(part.Features)
for feat in features:
feat_name = ""
try:
feat_name = feat.Name if hasattr(feat, "Name") else str(feat)
except Exception:
continue
if "sandbox" in feat_name.lower():
try:
feat_bodies = feat.GetBodies()
if feat_bodies:
body = feat_bodies[0]
faces = body.GetFaces()
if faces:
sid = feat_name.lower().replace(" ", "_")
if sid not in found_ids:
tagged.append((sid, body, faces[0]))
found_ids.add(sid)
lister.WriteLine(f"[extract_sandbox] Found by feature name: {sid} ('{feat_name}')")
except Exception as exc:
lister.WriteLine(f"[extract_sandbox] Feature '{feat_name}' name match but GetBodies failed: {exc}")
except Exception:
pass
if tagged:
return tagged
# --- Pass 4: body name matching ---
lister.WriteLine("[extract_sandbox] No features matched, trying body name matching...")
for body in bodies:
bname = ""
try:
bname = body.Name if hasattr(body, "Name") else str(body)
except Exception:
continue
if "sandbox" in bname.lower():
faces = body.GetFaces()
if faces:
sid = bname.lower().replace(" ", "_")
if sid not in found_ids:
tagged.append((sid, body, faces[0]))
found_ids.add(sid)
lister.WriteLine(f"[extract_sandbox] Found by body name: {sid}")
return tagged
# ---------------------------------------------------------------------------
# Core extraction
# ---------------------------------------------------------------------------
def extract_sandbox_geometry(
face: Any,
body: Any,
sandbox_id: str,
lister: Any,
chord_tol_mm: float = 0.1,
) -> Dict[str, Any]:
"""
Extract a sandbox face into a JSON-serializable dict.
Inner loops are boundary constraints (reserved geometry edges), not holes.
"""
frame = _face_local_frame(face, lister)
outer_2d: List[List[float]] = []
inner_boundaries: List[Dict[str, Any]] = []
# Get all edges on the face and chain them into loops
all_edges = list(face.GetEdges())
lister.WriteLine(f"[extract_sandbox] {sandbox_id}: {len(all_edges)} edges on face")
loops = _chain_edges_into_loops(all_edges, lister, chord_tol_mm=chord_tol_mm)
lister.WriteLine(f"[extract_sandbox] {sandbox_id}: {len(loops)} loop(s) built")
for loop_index, (is_outer, loop_pts3d) in enumerate(loops):
loop_pts3d = _close_polyline(loop_pts3d)
loop_pts2d = project_to_2d(loop_pts3d, frame)
if is_outer:
outer_2d = [[round(x, 6), round(y, 6)] for x, y in loop_pts2d]
lister.WriteLine(f"[extract_sandbox] outer loop: {len(outer_2d)} pts")
else:
boundary = [[round(x, 6), round(y, 6)] for x, y in loop_pts2d]
inner_boundaries.append({
"index": len(inner_boundaries),
"boundary": boundary,
"num_points": len(boundary),
})
lister.WriteLine(f"[extract_sandbox] inner loop {len(inner_boundaries)}: {len(boundary)} pts")
# Try thickness
thickness = None
try:
thickness = float(body.GetThickness())
except Exception:
pass
return {
"schema_version": "1.0",
"units": "mm",
"sandbox_id": sandbox_id,
"outer_boundary": outer_2d,
"inner_boundaries": inner_boundaries,
"num_inner_boundaries": len(inner_boundaries),
"thickness": thickness,
"transform": {
"origin": [round(c, 6) for c in frame.origin],
"x_axis": [round(c, 6) for c in frame.x_axis],
"y_axis": [round(c, 6) for c in frame.y_axis],
"normal": [round(c, 6) for c in frame.normal],
},
}
# ---------------------------------------------------------------------------
# Main — NX Journal entry point
# ---------------------------------------------------------------------------
def main():
import NXOpen
session = NXOpen.Session.GetSession()
lister = session.ListingWindow
lister.Open()
lister.WriteLine("=" * 60)
lister.WriteLine(" Adaptive Isogrid — Sandbox Geometry Extraction")
lister.WriteLine("=" * 60)
# Navigate to idealized part
idealized_part = _navigate_sim_to_idealized(session)
# Find sandboxes
sandbox_entries = find_sandbox_bodies(idealized_part, lister)
if not sandbox_entries:
lister.WriteLine("[extract_sandbox] ERROR: No sandbox bodies found!")
lister.WriteLine("Ensure bodies have ISOGRID_SANDBOX attribute set.")
return
lister.WriteLine(f"[extract_sandbox] Found {len(sandbox_entries)} sandbox(es)")
# Output directory: next to the .sim file (or idealized part)
try:
part_dir = os.path.dirname(idealized_part.FullPath)
except Exception:
part_dir = os.getcwd()
output_dir = os.path.join(part_dir, "adaptive_isogrid_data")
os.makedirs(output_dir, exist_ok=True)
lister.WriteLine(f"[extract_sandbox] Output dir: {output_dir}")
# Extract each sandbox
for sandbox_id, body, face in sandbox_entries:
lister.WriteLine(f"\n--- Extracting {sandbox_id} ---")
try:
# Debug: print face info
lister.WriteLine(f"[extract_sandbox] Face type: {type(face).__name__}")
try:
all_edges = face.GetEdges()
lister.WriteLine(f"[extract_sandbox] Total edges on face: {len(all_edges)}")
except Exception as exc:
lister.WriteLine(f"[extract_sandbox] GetEdges failed: {exc}")
geom = extract_sandbox_geometry(
face=face,
body=body,
sandbox_id=sandbox_id,
lister=lister,
chord_tol_mm=0.1,
)
out_path = os.path.join(output_dir, f"geometry_{sandbox_id}.json")
with open(out_path, "w") as f:
json.dump(geom, f, indent=2)
lister.WriteLine(f"[extract_sandbox] Wrote: {out_path}")
# Summary
lister.WriteLine(f" outer_boundary: {len(geom['outer_boundary'])} points")
lister.WriteLine(f" inner_boundaries: {geom['num_inner_boundaries']}")
lister.WriteLine(f" thickness: {geom['thickness']}")
except Exception as exc:
import traceback
lister.WriteLine(f"[extract_sandbox] ERROR extracting {sandbox_id}: {exc}")
lister.WriteLine(traceback.format_exc())
lister.WriteLine("\n" + "=" * 60)
lister.WriteLine(f" Done — {len(sandbox_entries)} sandbox(es) exported")
lister.WriteLine(f" Output: {output_dir}")
lister.WriteLine("=" * 60)
main()