feat: Implement Study Interview Mode as default study creation method

Study Interview Mode is now the DEFAULT for all study creation requests.
This intelligent Q&A system guides users through optimization setup with:

- 7-phase interview flow: introspection → objectives → constraints → design_variables → validation → review → complete
- Material-aware validation with 12 materials and fuzzy name matching
- Anti-pattern detection for 12 common mistakes (mass-no-constraint, stress-over-yield, etc.)
- Auto extractor mapping E1-E24 based on goal keywords
- State persistence with JSON serialization and backup rotation
- StudyBlueprint generation with full validation

Triggers: "create a study", "new study", "optimize this", any study creation intent
Skip with: "skip interview", "quick setup", "manual config"

Components:
- StudyInterviewEngine: Main orchestrator
- QuestionEngine: Conditional logic evaluation
- EngineeringValidator: MaterialsDatabase + AntiPatternDetector
- InterviewPresenter: Markdown formatting for Claude
- StudyBlueprint: Validated configuration output
- InterviewState: Persistent state management

All 129 tests passing.

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
2026-01-03 11:06:07 -05:00
parent b1ffc64407
commit 32caa5d05c
27 changed files with 9737 additions and 11 deletions

View File

@@ -0,0 +1,287 @@
"""Tests for InterviewPresenter classes."""
import pytest
from optimization_engine.interview.interview_presenter import (
InterviewPresenter,
ClaudePresenter,
DashboardPresenter,
CLIPresenter,
)
from optimization_engine.interview.question_engine import Question, QuestionOption
from optimization_engine.interview.study_blueprint import (
StudyBlueprint,
DesignVariable,
Objective,
Constraint
)
class TestClaudePresenter:
"""Tests for ClaudePresenter."""
def test_present_choice_question(self):
"""Test presenting a choice question."""
presenter = ClaudePresenter()
question = Question(
id="obj_01",
category="objectives",
text="What is your primary optimization goal?",
question_type="choice",
maps_to="objectives[0].goal",
options=[
QuestionOption(value="minimize_mass", label="Minimize mass/weight"),
QuestionOption(value="minimize_displacement", label="Minimize displacement"),
],
help_text="Choose what you want to optimize for."
)
result = presenter.present_question(
question,
question_number=1,
total_questions=10,
category_name="Objectives"
)
assert "1" in result # Question number
assert "10" in result # Total
assert "What is your primary optimization goal?" in result
assert "Minimize mass/weight" in result
def test_present_numeric_question(self):
"""Test presenting a numeric question."""
presenter = ClaudePresenter()
question = Question(
id="con_01",
category="constraints",
text="What is the maximum allowable stress (MPa)?",
question_type="numeric",
maps_to="constraints[0].threshold"
)
result = presenter.present_question(
question,
question_number=3,
total_questions=8,
category_name="Constraints"
)
assert "maximum allowable stress" in result
def test_present_text_question(self):
"""Test presenting a text question."""
presenter = ClaudePresenter()
question = Question(
id="pd_01",
category="problem_definition",
text="Describe your study in a few words.",
question_type="text",
maps_to="study_description"
)
result = presenter.present_question(
question,
question_number=1,
total_questions=10,
category_name="Problem Definition"
)
assert "Describe your study" in result
def test_present_confirm_question(self):
"""Test presenting a confirmation question."""
presenter = ClaudePresenter()
question = Question(
id="val_01",
category="validation",
text="Would you like to run a baseline validation?",
question_type="confirm",
maps_to="run_baseline"
)
result = presenter.present_question(
question,
question_number=8,
total_questions=8,
category_name="Validation"
)
assert "baseline validation" in result
def test_parse_choice_response_by_number(self):
"""Test parsing choice response by number."""
presenter = ClaudePresenter()
question = Question(
id="obj_01",
category="objectives",
text="Choose goal",
question_type="choice",
maps_to="objective",
options=[
QuestionOption(value="minimize_mass", label="Minimize mass"),
QuestionOption(value="minimize_stress", label="Minimize stress"),
]
)
result = presenter.parse_response("1", question)
assert result == "minimize_mass"
result = presenter.parse_response("2", question)
assert result == "minimize_stress"
def test_parse_numeric_response(self):
"""Test parsing numeric response."""
presenter = ClaudePresenter()
question = Question(
id="con_01",
category="constraints",
text="Max stress?",
question_type="numeric",
maps_to="threshold"
)
result = presenter.parse_response("200", question)
assert result == 200.0
result = presenter.parse_response("about 150 MPa", question)
assert result == 150.0
def test_parse_confirm_response(self):
"""Test parsing confirmation response."""
presenter = ClaudePresenter()
question = Question(
id="val_01",
category="validation",
text="Run validation?",
question_type="confirm",
maps_to="run_baseline"
)
# Various ways to say yes
assert presenter.parse_response("yes", question) is True
assert presenter.parse_response("Yeah", question) is True
assert presenter.parse_response("y", question) is True
# Various ways to say no
assert presenter.parse_response("no", question) is False
assert presenter.parse_response("Nope", question) is False
assert presenter.parse_response("n", question) is False
def test_show_progress(self):
"""Test showing progress."""
presenter = ClaudePresenter()
result = presenter.show_progress(5, 10, "Objectives")
assert "5" in result or "50%" in result # May show percentage instead
assert "Objectives" in result
def test_show_summary(self):
"""Test showing blueprint summary."""
presenter = ClaudePresenter()
blueprint = StudyBlueprint(
study_name="test_study",
study_description="A test study",
model_path="/path/to/model.prt",
sim_path="/path/to/sim.sim",
design_variables=[
DesignVariable(parameter="thickness", current_value=5.0, min_value=1.0, max_value=10.0)
],
objectives=[
Objective(name="mass", goal="minimize", extractor="E4")
],
constraints=[
Constraint(name="stress", constraint_type="max", threshold=200, extractor="E3")
],
protocol="protocol_10_single",
n_trials=100,
sampler="TPE"
)
result = presenter.show_summary(blueprint)
assert "test_study" in result
assert "thickness" in result
def test_show_warning(self):
"""Test showing warning."""
presenter = ClaudePresenter()
result = presenter.show_warning("Stress limit is close to yield")
assert "yield" in result
class TestDashboardPresenter:
"""Tests for DashboardPresenter."""
def test_present_question_returns_structured_data(self):
"""Test that dashboard presenter returns structured data."""
presenter = DashboardPresenter()
question = Question(
id="obj_01",
category="objectives",
text="What is your goal?",
question_type="choice",
maps_to="objective",
options=[QuestionOption(value="mass", label="Minimize mass")]
)
result = presenter.present_question(
question,
question_number=1,
total_questions=10,
category_name="Objectives"
)
# Dashboard presenter may return nested structure
import json
if isinstance(result, str):
data = json.loads(result)
else:
data = result
# Check for question data (may be nested in 'data' key)
if "data" in data:
assert "question_id" in data["data"]
assert "text" in data["data"]
else:
assert "question_id" in data
assert "text" in data
class TestCLIPresenter:
"""Tests for CLIPresenter."""
def test_present_question_plain_text(self):
"""Test CLI presenter uses plain text."""
presenter = CLIPresenter()
question = Question(
id="obj_01",
category="objectives",
text="What is your goal?",
question_type="choice",
maps_to="objective",
options=[
QuestionOption(value="mass", label="Minimize mass"),
QuestionOption(value="stress", label="Minimize stress")
]
)
result = presenter.present_question(
question,
question_number=1,
total_questions=10,
category_name="Objectives"
)
assert "What is your goal?" in result