Files
Atomizer/run_tests.bat

189 lines
5.8 KiB
Batchfile
Raw Normal View History

@echo off
REM ============================================================================
REM Atomizer Test Runner — Tier 2 Dev Workflow
REM ============================================================================
REM Double-click this to run tests. Results sync back to Mario via Syncthing.
REM
REM Usage:
REM run_tests.bat — run all tests
REM run_tests.bat test_spec_api — run specific test file
REM run_tests.bat unit — run unit tests folder
REM run_tests.bat --quick — fast smoke test (no slow/NX tests)
REM run_tests.bat --nx — NX-dependent tests only
REM ============================================================================
setlocal enabledelayedexpansion
REM === CONFIG ===
set "ATOMIZER_ROOT=%~dp0"
set "RESULTS_DIR=%ATOMIZER_ROOT%test_results"
set "PYTHON=python"
REM Timestamp for this run
for /f "tokens=1-6 delims=/:. " %%a in ("%date% %time%") do (
set "TIMESTAMP=%%a-%%b-%%c_%%d-%%e-%%f"
)
REM Fallback: use a simpler approach
for /f %%i in ('python -c "from datetime import datetime; print(datetime.now().strftime('%%Y-%%m-%%d_%%H-%%M-%%S'))"') do set "TIMESTAMP=%%i"
set "RUN_FILE=%RESULTS_DIR%\run_%TIMESTAMP%.json"
set "LOG_FILE=%RESULTS_DIR%\run_%TIMESTAMP%.log"
REM Create results dir if needed
if not exist "%RESULTS_DIR%" mkdir "%RESULTS_DIR%"
echo.
echo ============================================================================
echo ATOMIZER TEST RUNNER
echo %date% %time%
echo ============================================================================
echo.
REM === Gather system info ===
echo Gathering environment info...
for /f "delims=" %%v in ('python --version 2^>^&1') do set "PYTHON_VER=%%v"
for /f "delims=" %%v in ('python -c "import sys; print(sys.executable)"') do set "PYTHON_EXE=%%v"
REM Check if NX is available
set "NX_AVAILABLE=false"
python -c "import NXOpen" 2>nul && set "NX_AVAILABLE=true"
REM === Determine what to run ===
set "TEST_TARGET=tests/"
set "PYTEST_ARGS=-v --tb=short"
set "TEST_MODE=all"
if "%~1"=="--quick" (
set "PYTEST_ARGS=-v --tb=short -m \"not slow and not nx\""
set "TEST_MODE=quick"
) else if "%~1"=="--nx" (
set "PYTEST_ARGS=-v --tb=short -m nx"
set "TEST_MODE=nx-only"
) else if not "%~1"=="" (
set "TEST_TARGET=tests/%~1"
if not exist "%ATOMIZER_ROOT%tests\%~1" (
set "TEST_TARGET=tests/%~1.py"
)
set "TEST_MODE=targeted"
)
echo Mode: %TEST_MODE%
echo Target: %TEST_TARGET%
echo Python: %PYTHON_VER%
echo NX: %NX_AVAILABLE%
echo Results: %RUN_FILE%
echo.
REM === Run tests ===
echo Running tests...
echo ============================================================================
cd /d "%ATOMIZER_ROOT%"
REM Run pytest with JSON report if available, otherwise parse output
python -m pytest %TEST_TARGET% %PYTEST_ARGS% --json-report --json-report-file="%RESULTS_DIR%\_pytest_report.json" 2>nul
if errorlevel 1 (
REM json-report plugin might not be installed, run without it
python -m pytest %TEST_TARGET% %PYTEST_ARGS% > "%LOG_FILE%" 2>&1
set "EXIT_CODE=!errorlevel!"
) else (
python -m pytest %TEST_TARGET% %PYTEST_ARGS% > "%LOG_FILE%" 2>&1
set "EXIT_CODE=!errorlevel!"
)
echo.
echo ============================================================================
REM === Generate results JSON ===
python -c "
import json, sys, os, platform
from datetime import datetime
log_path = r'%LOG_FILE%'
report_path = r'%RESULTS_DIR%\_pytest_report.json'
run_file = r'%RUN_FILE%'
# Read log
with open(log_path, 'r', encoding='utf-8', errors='replace') as f:
log_content = f.read()
# Parse basic stats from log
lines = log_content.split('\n')
summary_line = ''
for line in reversed(lines):
if 'passed' in line or 'failed' in line or 'error' in line:
summary_line = line.strip()
break
# Try to get JSON report
json_report = None
if os.path.exists(report_path):
try:
with open(report_path) as f:
json_report = json.load(f)
except: pass
# Extract failures
failures = []
in_failure = False
current_failure = []
for line in lines:
if line.startswith('FAILED ') or line.startswith('ERROR '):
failures.append(line.strip())
elif '_ FAILURES _' in line or '_ ERRORS _' in line:
in_failure = True
elif in_failure and line.startswith('='):
if current_failure:
failures.append('\n'.join(current_failure))
current_failure = []
in_failure = False
elif in_failure:
current_failure.append(line)
result = {
'timestamp': datetime.now().isoformat(),
'exit_code': int('%EXIT_CODE%'),
'mode': '%TEST_MODE%',
'target': '%TEST_TARGET%',
'python': '%PYTHON_VER%',
'python_exe': r'%PYTHON_EXE%',
'nx_available': %NX_AVAILABLE%,
'platform': platform.platform(),
'summary': summary_line,
'failures': failures[:20], # cap at 20
'log_file': os.path.basename(log_path),
'status': 'PASS' if int('%EXIT_CODE%') == 0 else 'FAIL'
}
# Add JSON report summary if available
if json_report and 'summary' in json_report:
result['pytest_summary'] = json_report['summary']
with open(run_file, 'w') as f:
json.dump(result, f, indent=2)
print()
print(f'Status: {result[\"status\"]}')
print(f'Summary: {summary_line}')
print(f'Results saved to: {os.path.basename(run_file)}')
"
echo.
REM === Also write a latest.json pointer ===
echo {"latest": "run_%TIMESTAMP%.json", "timestamp": "%TIMESTAMP%"} > "%RESULTS_DIR%\latest.json"
REM === Cleanup old pytest report ===
if exist "%RESULTS_DIR%\_pytest_report.json" del "%RESULTS_DIR%\_pytest_report.json"
echo.
if %EXIT_CODE% EQU 0 (
echo ALL TESTS PASSED
) else (
echo SOME TESTS FAILED — check results
)
echo.
echo Results will sync to Mario via Syncthing.
echo ============================================================================
pause