Unraid deployment fixes and generator improvements
- Add Next.js rewrites to proxy API calls through same origin (fixes login/media on werkout.treytartt.com) - Fix mediaUrl() in DayCard and ExerciseRow to use relative paths in production - Add proxyTimeout for long-running workout generation endpoints - Add CSRF trusted origin for treytartt.com - Split docker-compose into production (Unraid) and dev configs - Show display_name and descriptions on workout type cards - Generator: rules engine improvements, movement enforcement, exercise selector updates - Add new test files for rules drift, workout research generation Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
56
docker-compose.dev.yml
Normal file
56
docker-compose.dev.yml
Normal file
@@ -0,0 +1,56 @@
|
||||
services:
|
||||
db:
|
||||
image: postgres:14
|
||||
volumes:
|
||||
- database:/var/lib/postgresql/data
|
||||
environment:
|
||||
- POSTGRES_DB=werkout
|
||||
- POSTGRES_USER=postgres
|
||||
- POSTGRES_PASSWORD=postgres
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "pg_isready -U postgres"]
|
||||
interval: 5s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
|
||||
web:
|
||||
build: .
|
||||
volumes:
|
||||
- .:/code
|
||||
- /code/werkout-frontend/node_modules
|
||||
- /code/werkout-frontend/.next
|
||||
ports:
|
||||
- "8001:8000"
|
||||
- "3010:3000"
|
||||
environment:
|
||||
- POSTGRES_NAME=werkout
|
||||
- POSTGRES_USER=postgres
|
||||
- POSTGRES_PASSWORD=postgres
|
||||
depends_on:
|
||||
db:
|
||||
condition: service_healthy
|
||||
links:
|
||||
- db
|
||||
|
||||
redis:
|
||||
image: redis:alpine
|
||||
|
||||
celery:
|
||||
restart: always
|
||||
build:
|
||||
context: .
|
||||
command: celery -A werkout_api worker -l info
|
||||
volumes:
|
||||
- .:/code
|
||||
environment:
|
||||
- DB_HOST=db
|
||||
- DB_NAME=werkout
|
||||
- DB_USER=postgres
|
||||
- DB_PASS=postgres
|
||||
depends_on:
|
||||
- db
|
||||
- redis
|
||||
- web
|
||||
|
||||
volumes:
|
||||
database:
|
||||
@@ -1,8 +1,9 @@
|
||||
services:
|
||||
db:
|
||||
image: postgres:14
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
- database:/var/lib/postgresql/data
|
||||
- /mnt/user/downloads/werkout_api/postgres:/var/lib/postgresql/data
|
||||
environment:
|
||||
- POSTGRES_DB=werkout
|
||||
- POSTGRES_USER=postgres
|
||||
@@ -15,42 +16,34 @@ services:
|
||||
|
||||
web:
|
||||
build: .
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
- .:/code
|
||||
- /code/werkout-frontend/node_modules
|
||||
- /code/werkout-frontend/.next
|
||||
- /mnt/user/downloads/werkout_api/media:/code/media
|
||||
ports:
|
||||
- "8001:8000"
|
||||
- "3010:3000"
|
||||
environment:
|
||||
- POSTGRES_NAME=werkout
|
||||
- POSTGRES_USER=postgres
|
||||
- POSTGRES_PASSWORD=postgres
|
||||
- DATABASE_URL=postgres://postgres:postgres@db:5432/werkout
|
||||
- REDIS_URL=redis://redis:6379
|
||||
depends_on:
|
||||
db:
|
||||
condition: service_healthy
|
||||
links:
|
||||
- db
|
||||
redis:
|
||||
condition: service_started
|
||||
|
||||
redis:
|
||||
image: redis:alpine
|
||||
restart: unless-stopped
|
||||
|
||||
celery:
|
||||
restart: always
|
||||
build:
|
||||
context: .
|
||||
restart: unless-stopped
|
||||
command: celery -A werkout_api worker -l info
|
||||
volumes:
|
||||
- .:/code
|
||||
environment:
|
||||
- DB_HOST=db
|
||||
- DB_NAME=werkout
|
||||
- DB_USER=postgres
|
||||
- DB_PASS=postgres
|
||||
- DATABASE_URL=postgres://postgres:postgres@db:5432/werkout
|
||||
- REDIS_URL=redis://redis:6379
|
||||
depends_on:
|
||||
- db
|
||||
- redis
|
||||
- web
|
||||
|
||||
volumes:
|
||||
database:
|
||||
|
||||
@@ -18,7 +18,8 @@ from generator.rules_engine import DB_CALIBRATION
|
||||
class Command(BaseCommand):
|
||||
help = (
|
||||
'Check for drift between research doc calibration values '
|
||||
'and WorkoutType DB records. Exits 1 if mismatches found.'
|
||||
'and WorkoutType DB records. Exits 1 if mismatches, missing '
|
||||
'types, or zero fields checked.'
|
||||
)
|
||||
|
||||
# Fields to compare between DB_CALIBRATION and WorkoutType model
|
||||
@@ -73,14 +74,32 @@ class Command(BaseCommand):
|
||||
self.stdout.write('')
|
||||
|
||||
if missing_in_db:
|
||||
self.stdout.write(self.style.WARNING(
|
||||
self.stdout.write(self.style.ERROR(
|
||||
f'Missing from DB ({len(missing_in_db)}):'
|
||||
))
|
||||
for name in missing_in_db:
|
||||
self.stdout.write(f' - {name}')
|
||||
self.stdout.write('')
|
||||
|
||||
has_errors = False
|
||||
|
||||
if checked == 0:
|
||||
has_errors = True
|
||||
self.stdout.write(self.style.ERROR(
|
||||
'No calibration fields were checked. '
|
||||
'DB_CALIBRATION keys likely do not match WorkoutType.name values.'
|
||||
))
|
||||
self.stdout.write('')
|
||||
|
||||
if missing_in_db:
|
||||
has_errors = True
|
||||
self.stdout.write(self.style.ERROR(
|
||||
'Missing workout types prevent full drift validation.'
|
||||
))
|
||||
self.stdout.write('')
|
||||
|
||||
if mismatches:
|
||||
has_errors = True
|
||||
self.stdout.write(self.style.ERROR(
|
||||
f'DRIFT DETECTED: {len(mismatches)} mismatch(es)'
|
||||
))
|
||||
@@ -98,8 +117,9 @@ class Command(BaseCommand):
|
||||
'To fix: update WorkoutType records in the DB or '
|
||||
'update DB_CALIBRATION in generator/rules_engine.py.'
|
||||
))
|
||||
if has_errors:
|
||||
sys.exit(1)
|
||||
else:
|
||||
|
||||
self.stdout.write(self.style.SUCCESS(
|
||||
'No drift detected. DB values match research calibration.'
|
||||
))
|
||||
|
||||
@@ -0,0 +1,123 @@
|
||||
from django.core.management import call_command
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
WORKOUT_TYPE_CALIBRATION = {
|
||||
'functional_strength_training': {
|
||||
'typical_rest_between_sets': 60,
|
||||
'typical_intensity': 'medium',
|
||||
'rep_range_min': 8,
|
||||
'rep_range_max': 15,
|
||||
'round_range_min': 3,
|
||||
'round_range_max': 4,
|
||||
'duration_bias': 0.15,
|
||||
'superset_size_min': 2,
|
||||
'superset_size_max': 4,
|
||||
},
|
||||
'traditional_strength_training': {
|
||||
'typical_rest_between_sets': 120,
|
||||
'typical_intensity': 'high',
|
||||
'rep_range_min': 4,
|
||||
'rep_range_max': 8,
|
||||
'round_range_min': 3,
|
||||
'round_range_max': 5,
|
||||
'duration_bias': 0.1,
|
||||
'superset_size_min': 1,
|
||||
'superset_size_max': 3,
|
||||
},
|
||||
'high_intensity_interval_training': {
|
||||
'typical_rest_between_sets': 30,
|
||||
'typical_intensity': 'high',
|
||||
'rep_range_min': 10,
|
||||
'rep_range_max': 20,
|
||||
'round_range_min': 3,
|
||||
'round_range_max': 5,
|
||||
'duration_bias': 0.7,
|
||||
'superset_size_min': 3,
|
||||
'superset_size_max': 6,
|
||||
},
|
||||
'cross_training': {
|
||||
'typical_rest_between_sets': 45,
|
||||
'typical_intensity': 'high',
|
||||
'rep_range_min': 8,
|
||||
'rep_range_max': 15,
|
||||
'round_range_min': 3,
|
||||
'round_range_max': 5,
|
||||
'duration_bias': 0.4,
|
||||
'superset_size_min': 3,
|
||||
'superset_size_max': 5,
|
||||
},
|
||||
'core_training': {
|
||||
'typical_rest_between_sets': 30,
|
||||
'typical_intensity': 'medium',
|
||||
'rep_range_min': 10,
|
||||
'rep_range_max': 20,
|
||||
'round_range_min': 2,
|
||||
'round_range_max': 4,
|
||||
'duration_bias': 0.5,
|
||||
'superset_size_min': 3,
|
||||
'superset_size_max': 5,
|
||||
},
|
||||
'flexibility': {
|
||||
'typical_rest_between_sets': 15,
|
||||
'typical_intensity': 'low',
|
||||
'rep_range_min': 1,
|
||||
'rep_range_max': 5,
|
||||
'round_range_min': 1,
|
||||
'round_range_max': 2,
|
||||
'duration_bias': 0.9,
|
||||
'superset_size_min': 3,
|
||||
'superset_size_max': 6,
|
||||
},
|
||||
'cardio': {
|
||||
'typical_rest_between_sets': 30,
|
||||
'typical_intensity': 'medium',
|
||||
'rep_range_min': 1,
|
||||
'rep_range_max': 1,
|
||||
'round_range_min': 1,
|
||||
'round_range_max': 3,
|
||||
'duration_bias': 1.0,
|
||||
'superset_size_min': 1,
|
||||
'superset_size_max': 3,
|
||||
},
|
||||
'hypertrophy': {
|
||||
'typical_rest_between_sets': 90,
|
||||
'typical_intensity': 'high',
|
||||
'rep_range_min': 8,
|
||||
'rep_range_max': 15,
|
||||
'round_range_min': 3,
|
||||
'round_range_max': 4,
|
||||
'duration_bias': 0.2,
|
||||
'superset_size_min': 2,
|
||||
'superset_size_max': 4,
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
def apply_calibration(apps, schema_editor):
|
||||
WorkoutType = apps.get_model('generator', 'WorkoutType')
|
||||
|
||||
for type_name, fields in WORKOUT_TYPE_CALIBRATION.items():
|
||||
defaults = dict(fields)
|
||||
defaults.setdefault('display_name', type_name.replace('_', ' ').title())
|
||||
defaults.setdefault('description', '')
|
||||
WorkoutType.objects.update_or_create(name=type_name, defaults=defaults)
|
||||
|
||||
# Ensure the full 8 x 5 x 3 = 120 structure-rule matrix is present and calibrated.
|
||||
call_command('calibrate_structure_rules')
|
||||
|
||||
|
||||
def noop_reverse(apps, schema_editor):
|
||||
# Intentionally no-op: this migration normalizes live calibration data.
|
||||
pass
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('generator', '0005_add_periodization_fields'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(apply_calibration, noop_reverse),
|
||||
]
|
||||
121
generator/migrations/0007_force_recalibration.py
Normal file
121
generator/migrations/0007_force_recalibration.py
Normal file
@@ -0,0 +1,121 @@
|
||||
from django.core.management import call_command
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
WORKOUT_TYPE_CALIBRATION = {
|
||||
'functional_strength_training': {
|
||||
'typical_rest_between_sets': 60,
|
||||
'typical_intensity': 'medium',
|
||||
'rep_range_min': 8,
|
||||
'rep_range_max': 15,
|
||||
'round_range_min': 3,
|
||||
'round_range_max': 4,
|
||||
'duration_bias': 0.15,
|
||||
'superset_size_min': 2,
|
||||
'superset_size_max': 4,
|
||||
},
|
||||
'traditional_strength_training': {
|
||||
'typical_rest_between_sets': 120,
|
||||
'typical_intensity': 'high',
|
||||
'rep_range_min': 4,
|
||||
'rep_range_max': 8,
|
||||
'round_range_min': 3,
|
||||
'round_range_max': 5,
|
||||
'duration_bias': 0.1,
|
||||
'superset_size_min': 1,
|
||||
'superset_size_max': 3,
|
||||
},
|
||||
'high_intensity_interval_training': {
|
||||
'typical_rest_between_sets': 30,
|
||||
'typical_intensity': 'high',
|
||||
'rep_range_min': 10,
|
||||
'rep_range_max': 20,
|
||||
'round_range_min': 3,
|
||||
'round_range_max': 5,
|
||||
'duration_bias': 0.7,
|
||||
'superset_size_min': 3,
|
||||
'superset_size_max': 6,
|
||||
},
|
||||
'cross_training': {
|
||||
'typical_rest_between_sets': 45,
|
||||
'typical_intensity': 'high',
|
||||
'rep_range_min': 8,
|
||||
'rep_range_max': 15,
|
||||
'round_range_min': 3,
|
||||
'round_range_max': 5,
|
||||
'duration_bias': 0.4,
|
||||
'superset_size_min': 3,
|
||||
'superset_size_max': 5,
|
||||
},
|
||||
'core_training': {
|
||||
'typical_rest_between_sets': 30,
|
||||
'typical_intensity': 'medium',
|
||||
'rep_range_min': 10,
|
||||
'rep_range_max': 20,
|
||||
'round_range_min': 2,
|
||||
'round_range_max': 4,
|
||||
'duration_bias': 0.5,
|
||||
'superset_size_min': 3,
|
||||
'superset_size_max': 5,
|
||||
},
|
||||
'flexibility': {
|
||||
'typical_rest_between_sets': 15,
|
||||
'typical_intensity': 'low',
|
||||
'rep_range_min': 1,
|
||||
'rep_range_max': 5,
|
||||
'round_range_min': 1,
|
||||
'round_range_max': 2,
|
||||
'duration_bias': 0.9,
|
||||
'superset_size_min': 3,
|
||||
'superset_size_max': 6,
|
||||
},
|
||||
'cardio': {
|
||||
'typical_rest_between_sets': 30,
|
||||
'typical_intensity': 'medium',
|
||||
'rep_range_min': 1,
|
||||
'rep_range_max': 1,
|
||||
'round_range_min': 1,
|
||||
'round_range_max': 3,
|
||||
'duration_bias': 1.0,
|
||||
'superset_size_min': 1,
|
||||
'superset_size_max': 3,
|
||||
},
|
||||
'hypertrophy': {
|
||||
'typical_rest_between_sets': 90,
|
||||
'typical_intensity': 'high',
|
||||
'rep_range_min': 8,
|
||||
'rep_range_max': 15,
|
||||
'round_range_min': 3,
|
||||
'round_range_max': 4,
|
||||
'duration_bias': 0.2,
|
||||
'superset_size_min': 2,
|
||||
'superset_size_max': 4,
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
def apply_calibration(apps, schema_editor):
|
||||
WorkoutType = apps.get_model('generator', 'WorkoutType')
|
||||
|
||||
for type_name, fields in WORKOUT_TYPE_CALIBRATION.items():
|
||||
defaults = dict(fields)
|
||||
defaults.setdefault('display_name', type_name.replace('_', ' ').title())
|
||||
defaults.setdefault('description', '')
|
||||
WorkoutType.objects.update_or_create(name=type_name, defaults=defaults)
|
||||
|
||||
call_command('calibrate_structure_rules')
|
||||
|
||||
|
||||
def noop_reverse(apps, schema_editor):
|
||||
pass
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('generator', '0006_calibrate_workout_types_and_structure_rules'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(apply_calibration, noop_reverse),
|
||||
]
|
||||
@@ -11,6 +11,8 @@ from typing import List, Optional, Dict, Any, Tuple
|
||||
|
||||
import logging
|
||||
|
||||
from generator.services.exercise_selector import extract_movement_families
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@@ -88,7 +90,7 @@ WORKOUT_TYPE_RULES: Dict[str, Dict[str, Any]] = {
|
||||
# ------------------------------------------------------------------
|
||||
# 3. HIIT
|
||||
# ------------------------------------------------------------------
|
||||
'hiit': {
|
||||
'high_intensity_interval_training': {
|
||||
'rep_ranges': {
|
||||
'primary': (10, 20),
|
||||
'secondary': (10, 20),
|
||||
@@ -275,7 +277,7 @@ UNIVERSAL_RULES: Dict[str, Any] = {
|
||||
# ======================================================================
|
||||
|
||||
DB_CALIBRATION: Dict[str, Dict[str, Any]] = {
|
||||
'Functional Strength Training': {
|
||||
'functional_strength_training': {
|
||||
'duration_bias': 0.15,
|
||||
'typical_rest_between_sets': 60,
|
||||
'typical_intensity': 'medium',
|
||||
@@ -286,7 +288,7 @@ DB_CALIBRATION: Dict[str, Dict[str, Any]] = {
|
||||
'superset_size_min': 2,
|
||||
'superset_size_max': 4,
|
||||
},
|
||||
'Traditional Strength Training': {
|
||||
'traditional_strength_training': {
|
||||
'duration_bias': 0.1,
|
||||
'typical_rest_between_sets': 120,
|
||||
'typical_intensity': 'high',
|
||||
@@ -297,7 +299,7 @@ DB_CALIBRATION: Dict[str, Dict[str, Any]] = {
|
||||
'superset_size_min': 1,
|
||||
'superset_size_max': 3,
|
||||
},
|
||||
'HIIT': {
|
||||
'high_intensity_interval_training': {
|
||||
'duration_bias': 0.7,
|
||||
'typical_rest_between_sets': 30,
|
||||
'typical_intensity': 'high',
|
||||
@@ -308,7 +310,7 @@ DB_CALIBRATION: Dict[str, Dict[str, Any]] = {
|
||||
'superset_size_min': 3,
|
||||
'superset_size_max': 6,
|
||||
},
|
||||
'Cross Training': {
|
||||
'cross_training': {
|
||||
'duration_bias': 0.4,
|
||||
'typical_rest_between_sets': 45,
|
||||
'typical_intensity': 'high',
|
||||
@@ -319,7 +321,7 @@ DB_CALIBRATION: Dict[str, Dict[str, Any]] = {
|
||||
'superset_size_min': 3,
|
||||
'superset_size_max': 5,
|
||||
},
|
||||
'Core Training': {
|
||||
'core_training': {
|
||||
'duration_bias': 0.5,
|
||||
'typical_rest_between_sets': 30,
|
||||
'typical_intensity': 'medium',
|
||||
@@ -330,7 +332,7 @@ DB_CALIBRATION: Dict[str, Dict[str, Any]] = {
|
||||
'superset_size_min': 3,
|
||||
'superset_size_max': 5,
|
||||
},
|
||||
'Flexibility': {
|
||||
'flexibility': {
|
||||
'duration_bias': 0.9,
|
||||
'typical_rest_between_sets': 15,
|
||||
'typical_intensity': 'low',
|
||||
@@ -341,7 +343,7 @@ DB_CALIBRATION: Dict[str, Dict[str, Any]] = {
|
||||
'superset_size_min': 3,
|
||||
'superset_size_max': 6,
|
||||
},
|
||||
'Cardio': {
|
||||
'cardio': {
|
||||
'duration_bias': 1.0,
|
||||
'typical_rest_between_sets': 30,
|
||||
'typical_intensity': 'medium',
|
||||
@@ -352,7 +354,7 @@ DB_CALIBRATION: Dict[str, Dict[str, Any]] = {
|
||||
'superset_size_min': 1,
|
||||
'superset_size_max': 3,
|
||||
},
|
||||
'Hypertrophy': {
|
||||
'hypertrophy': {
|
||||
'duration_bias': 0.2,
|
||||
'typical_rest_between_sets': 90,
|
||||
'typical_intensity': 'high',
|
||||
@@ -366,13 +368,24 @@ DB_CALIBRATION: Dict[str, Dict[str, Any]] = {
|
||||
}
|
||||
|
||||
|
||||
# Canonical key aliases for workout type names. This lets callers pass
|
||||
# legacy/short names while still resolving to DB-style identifiers.
|
||||
WORKOUT_TYPE_KEY_ALIASES: Dict[str, str] = {
|
||||
'hiit': 'high_intensity_interval_training',
|
||||
}
|
||||
|
||||
|
||||
# ======================================================================
|
||||
# Validation helpers
|
||||
# ======================================================================
|
||||
|
||||
def _normalize_type_key(name: str) -> str:
|
||||
"""Convert a workout type name to the underscore key used in WORKOUT_TYPE_RULES."""
|
||||
return name.strip().lower().replace(' ', '_')
|
||||
"""Convert a workout type name to the canonical key in WORKOUT_TYPE_RULES."""
|
||||
if not name:
|
||||
return ''
|
||||
normalized = name.strip().lower().replace('-', '_').replace(' ', '_')
|
||||
normalized = '_'.join(part for part in normalized.split('_') if part)
|
||||
return WORKOUT_TYPE_KEY_ALIASES.get(normalized, normalized)
|
||||
|
||||
|
||||
def _classify_rep_weight(reps: int) -> str:
|
||||
@@ -457,6 +470,21 @@ def _check_compound_before_isolation(supersets: list) -> bool:
|
||||
return not compound_after_isolation
|
||||
|
||||
|
||||
def _focus_key_for_entry(entry: dict) -> Optional[str]:
|
||||
"""Derive a coarse focus key from an entry's exercise."""
|
||||
ex = entry.get('exercise')
|
||||
if ex is None:
|
||||
return None
|
||||
families = sorted(extract_movement_families(getattr(ex, 'name', '') or ''))
|
||||
if families:
|
||||
return families[0]
|
||||
patterns = (getattr(ex, 'movement_patterns', '') or '').lower()
|
||||
for token in ('upper pull', 'upper push', 'hip hinge', 'squat', 'lunge', 'core', 'carry'):
|
||||
if token in patterns:
|
||||
return token
|
||||
return None
|
||||
|
||||
|
||||
# ======================================================================
|
||||
# Main validation function
|
||||
# ======================================================================
|
||||
@@ -623,7 +651,53 @@ def validate_workout(
|
||||
))
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# 5. Compound before isolation ordering
|
||||
# 5. Focus spread across working supersets
|
||||
# ------------------------------------------------------------------
|
||||
if working:
|
||||
for ss in working:
|
||||
seen_focus = set()
|
||||
duplicate_focus = set()
|
||||
for entry in ss.get('exercises', []):
|
||||
focus_key = _focus_key_for_entry(entry)
|
||||
if not focus_key:
|
||||
continue
|
||||
if focus_key in seen_focus:
|
||||
duplicate_focus.add(focus_key)
|
||||
seen_focus.add(focus_key)
|
||||
if duplicate_focus:
|
||||
violations.append(RuleViolation(
|
||||
rule_id='superset_focus_repetition',
|
||||
severity='error',
|
||||
message=(
|
||||
f"Superset '{ss.get('name')}' repeats focus area(s): "
|
||||
f"{', '.join(sorted(duplicate_focus))}."
|
||||
),
|
||||
actual_value=sorted(duplicate_focus),
|
||||
))
|
||||
|
||||
# Advisory: same dominant focus in adjacent working supersets.
|
||||
previous_focus = None
|
||||
for ss in working:
|
||||
focus_keys = {
|
||||
_focus_key_for_entry(entry)
|
||||
for entry in ss.get('exercises', [])
|
||||
}
|
||||
focus_keys.discard(None)
|
||||
if previous_focus is not None and focus_keys and focus_keys == previous_focus:
|
||||
violations.append(RuleViolation(
|
||||
rule_id='adjacent_superset_focus_repetition',
|
||||
severity='info',
|
||||
message=(
|
||||
f"Adjacent supersets reuse the same focus profile "
|
||||
f"({', '.join(sorted(focus_keys))}); spread focus when possible."
|
||||
),
|
||||
actual_value=sorted(focus_keys),
|
||||
))
|
||||
if focus_keys:
|
||||
previous_focus = focus_keys
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# 6. Compound before isolation ordering
|
||||
# ------------------------------------------------------------------
|
||||
if UNIVERSAL_RULES['compound_before_isolation']:
|
||||
if not _check_compound_before_isolation(supersets):
|
||||
@@ -634,7 +708,7 @@ def validate_workout(
|
||||
))
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# 6. Warmup check
|
||||
# 7. Warmup check
|
||||
# ------------------------------------------------------------------
|
||||
if UNIVERSAL_RULES['warmup_mandatory']:
|
||||
if not _has_warmup(supersets):
|
||||
@@ -645,7 +719,7 @@ def validate_workout(
|
||||
))
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# 7. Cooldown check
|
||||
# 8. Cooldown check
|
||||
# ------------------------------------------------------------------
|
||||
if not _has_cooldown(supersets):
|
||||
violations.append(RuleViolation(
|
||||
@@ -655,9 +729,9 @@ def validate_workout(
|
||||
))
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# 8. HIIT duration cap
|
||||
# 9. HIIT duration cap
|
||||
# ------------------------------------------------------------------
|
||||
if wt_key == 'hiit':
|
||||
if wt_key == 'high_intensity_interval_training':
|
||||
max_hiit_min = UNIVERSAL_RULES.get('max_hiit_duration_min', 30)
|
||||
# Estimate total working time from working supersets
|
||||
total_working_exercises = sum(
|
||||
@@ -683,7 +757,7 @@ def validate_workout(
|
||||
))
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# 9. Total exercise count cap
|
||||
# 10. Total exercise count cap
|
||||
# ------------------------------------------------------------------
|
||||
max_exercises = wt_rules.get(
|
||||
'max_exercises_per_session',
|
||||
@@ -706,7 +780,7 @@ def validate_workout(
|
||||
))
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# 10. Workout type match percentage (refactored from _validate_workout_type_match)
|
||||
# 11. Workout type match percentage (refactored from _validate_workout_type_match)
|
||||
# ------------------------------------------------------------------
|
||||
_STRENGTH_TYPES = {
|
||||
'traditional_strength_training', 'functional_strength_training',
|
||||
|
||||
@@ -889,6 +889,8 @@ class ExerciseSelector:
|
||||
selected_names = set()
|
||||
# Intra-superset family tracking
|
||||
selected_family_groups = set() # group names used in this superset
|
||||
selected_families = set() # exact families used in this superset
|
||||
selected_family_counts = Counter() # exact family counts in this superset
|
||||
|
||||
# Shuffle to break any ordering bias
|
||||
random.shuffle(pool)
|
||||
@@ -910,8 +912,14 @@ class ExerciseSelector:
|
||||
|
||||
for fam in candidate_families:
|
||||
# Cross-workout: check family count limit
|
||||
total_count = self.used_movement_families.get(fam, 0)
|
||||
if total_count >= self._get_family_limit(fam):
|
||||
historical_count = self.used_movement_families.get(fam, 0)
|
||||
in_superset_count = selected_family_counts.get(fam, 0)
|
||||
if historical_count + in_superset_count >= self._get_family_limit(fam):
|
||||
blocked = True
|
||||
break
|
||||
|
||||
# Intra-superset: avoid exact family duplicates entirely.
|
||||
if fam in selected_families:
|
||||
blocked = True
|
||||
break
|
||||
|
||||
@@ -930,6 +938,8 @@ class ExerciseSelector:
|
||||
selected_names.add(candidate_name)
|
||||
# Track family groups for intra-superset blocking
|
||||
for fam in candidate_families:
|
||||
selected_families.add(fam)
|
||||
selected_family_counts[fam] += 1
|
||||
group = _FAMILY_TO_GROUP.get(fam)
|
||||
if group:
|
||||
selected_family_groups.add(group)
|
||||
|
||||
@@ -3,10 +3,12 @@ import math
|
||||
import random
|
||||
import time
|
||||
import uuid
|
||||
from collections import Counter
|
||||
from datetime import timedelta
|
||||
|
||||
from django.db.models import Q
|
||||
|
||||
from equipment.models import WorkoutEquipment
|
||||
from generator.models import (
|
||||
GeneratedWeeklyPlan,
|
||||
GeneratedWorkout,
|
||||
@@ -16,10 +18,17 @@ from generator.models import (
|
||||
WorkoutStructureRule,
|
||||
WorkoutType,
|
||||
)
|
||||
from generator.rules_engine import validate_workout, RuleViolation
|
||||
from generator.services.exercise_selector import ExerciseSelector
|
||||
from generator.rules_engine import (
|
||||
RuleViolation,
|
||||
UNIVERSAL_RULES,
|
||||
WORKOUT_TYPE_RULES,
|
||||
_normalize_type_key,
|
||||
validate_workout,
|
||||
)
|
||||
from generator.services.exercise_selector import ExerciseSelector, extract_movement_families
|
||||
from generator.services.plan_builder import PlanBuilder
|
||||
from generator.services.muscle_normalizer import normalize_muscle_name
|
||||
from muscle.models import ExerciseMuscle
|
||||
from workout.models import CompletedWorkout
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@@ -137,6 +146,10 @@ STRENGTH_WORKOUT_TYPES = {
|
||||
'hypertrophy', 'strength',
|
||||
}
|
||||
|
||||
# Final pass retries after full assembly (warmup + working + cooldown)
|
||||
# to guarantee conformance before returning a workout.
|
||||
FINAL_CONFORMANCE_MAX_RETRIES = 4
|
||||
|
||||
|
||||
# ======================================================================
|
||||
# Default fallback data used when ML pattern tables are empty
|
||||
@@ -825,7 +838,9 @@ class WorkoutGenerator:
|
||||
|
||||
return plan
|
||||
|
||||
def generate_single_workout(self, muscle_split, workout_type, scheduled_date):
|
||||
def generate_single_workout(
|
||||
self, muscle_split, workout_type, scheduled_date, _final_attempt=0,
|
||||
):
|
||||
"""
|
||||
Generate one workout specification dict.
|
||||
|
||||
@@ -847,6 +862,7 @@ class WorkoutGenerator:
|
||||
-------
|
||||
dict (workout_spec)
|
||||
"""
|
||||
warnings_checkpoint = len(self.warnings)
|
||||
target_muscles = list(muscle_split.get('muscles', []))
|
||||
label = muscle_split.get('label', 'Workout')
|
||||
duration_minutes = self.duration_override or self.preference.preferred_workout_duration
|
||||
@@ -854,6 +870,14 @@ class WorkoutGenerator:
|
||||
# Clamp duration to valid range (15-120 minutes)
|
||||
max_duration_sec = max(15 * 60, min(120 * 60, max_duration_sec))
|
||||
|
||||
# Respect workout-type hard duration ceilings (e.g. HIIT <= 30 min).
|
||||
if workout_type:
|
||||
wt_key = _normalize_type_key(getattr(workout_type, 'name', '') or '')
|
||||
wt_rules = WORKOUT_TYPE_RULES.get(wt_key, {})
|
||||
max_minutes_for_type = wt_rules.get('max_duration_minutes')
|
||||
if max_minutes_for_type:
|
||||
max_duration_sec = min(max_duration_sec, int(max_minutes_for_type) * 60)
|
||||
|
||||
# Apply volume adjustment from CompletedWorkout feedback loop
|
||||
volume_adj = getattr(self, '_volume_adjustment', 0.0)
|
||||
if volume_adj:
|
||||
@@ -925,7 +949,6 @@ class WorkoutGenerator:
|
||||
violations = self._check_quality_gates(working_supersets, workout_type, wt_params)
|
||||
blocking = [v for v in violations if v.severity == 'error']
|
||||
if not blocking or attempt == MAX_RETRIES:
|
||||
self.warnings.extend([v.message for v in violations])
|
||||
break
|
||||
logger.info(
|
||||
"Quality gate: %d blocking violation(s) on attempt %d, retrying",
|
||||
@@ -973,41 +996,90 @@ class WorkoutGenerator:
|
||||
)
|
||||
|
||||
# Hard cap total working exercises to prevent bloated workouts
|
||||
MAX_WORKING_EXERCISES = 30
|
||||
is_strength_workout = False
|
||||
if workout_type:
|
||||
wt_name_lower = workout_type.name.strip().lower()
|
||||
is_strength_workout = wt_name_lower in STRENGTH_WORKOUT_TYPES
|
||||
|
||||
MAX_WORKING_EXERCISES = self._max_working_exercises_for_type(workout_type)
|
||||
working_supersets = [
|
||||
ss for ss in workout_spec.get('supersets', [])
|
||||
if ss.get('name', '').startswith('Working')
|
||||
]
|
||||
first_working_superset = working_supersets[0] if working_supersets else None
|
||||
|
||||
def _min_working_exercises_for_ss(ss):
|
||||
# Allow a first straight set (single main lift) for strength workouts.
|
||||
if is_strength_workout and first_working_superset is not None and ss is first_working_superset:
|
||||
return 1
|
||||
return 2
|
||||
|
||||
total_working_ex = sum(len(ss['exercises']) for ss in working_supersets)
|
||||
if total_working_ex > MAX_WORKING_EXERCISES:
|
||||
# Trim from back working supersets
|
||||
excess = total_working_ex - MAX_WORKING_EXERCISES
|
||||
for ss in reversed(working_supersets):
|
||||
while excess > 0 and len(ss['exercises']) > 2:
|
||||
min_ex_for_ss = _min_working_exercises_for_ss(ss)
|
||||
while excess > 0 and len(ss['exercises']) > min_ex_for_ss:
|
||||
ss['exercises'].pop()
|
||||
excess -= 1
|
||||
if excess <= 0:
|
||||
break
|
||||
# Remove empty working supersets
|
||||
# If everything is already at min size, remove trailing supersets.
|
||||
if excess > 0:
|
||||
for ss in reversed(list(working_supersets)):
|
||||
current_working = [
|
||||
candidate for candidate in workout_spec.get('supersets', [])
|
||||
if candidate.get('name', '').startswith('Working')
|
||||
]
|
||||
if len(current_working) <= 1 or excess <= 0:
|
||||
break
|
||||
if is_strength_workout and ss is first_working_superset:
|
||||
# Preserve straight-set anchor for strength unless it's the last one left.
|
||||
continue
|
||||
removed_count = len(ss.get('exercises', []))
|
||||
if removed_count <= 0:
|
||||
continue
|
||||
try:
|
||||
workout_spec['supersets'].remove(ss)
|
||||
except ValueError:
|
||||
continue
|
||||
excess -= removed_count
|
||||
# Remove undersized working supersets.
|
||||
workout_spec['supersets'] = [
|
||||
ss for ss in workout_spec['supersets']
|
||||
if not ss.get('name', '').startswith('Working') or len(ss['exercises']) >= 2
|
||||
if (
|
||||
not ss.get('name', '').startswith('Working')
|
||||
or len(ss['exercises']) >= _min_working_exercises_for_ss(ss)
|
||||
)
|
||||
]
|
||||
|
||||
# Enforce min 2 exercises per working superset; merge undersized ones
|
||||
# Enforce minimum exercises per working superset; merge undersized ones.
|
||||
# First strength working set is allowed to be a straight set (1 exercise).
|
||||
all_supersets = workout_spec.get('supersets', [])
|
||||
working_indices = [
|
||||
i for i, ss in enumerate(all_supersets)
|
||||
if ss.get('name', '').startswith('Working')
|
||||
]
|
||||
undersized = [i for i in working_indices if len(all_supersets[i]['exercises']) < 2]
|
||||
first_working_index = working_indices[0] if working_indices else None
|
||||
|
||||
def _min_working_exercises_for_index(idx):
|
||||
if is_strength_workout and first_working_index is not None and idx == first_working_index:
|
||||
return 1
|
||||
return 2
|
||||
|
||||
undersized = [
|
||||
i for i in working_indices
|
||||
if len(all_supersets[i]['exercises']) < _min_working_exercises_for_index(i)
|
||||
]
|
||||
if undersized:
|
||||
# Try to redistribute exercises from undersized into adjacent supersets
|
||||
for idx in reversed(undersized):
|
||||
if len(all_supersets[idx]['exercises']) >= _min_working_exercises_for_index(idx):
|
||||
continue
|
||||
ss = all_supersets[idx]
|
||||
orphan_exercises = ss['exercises']
|
||||
# Find next working superset to absorb orphans
|
||||
absorbed = False
|
||||
for other_idx in working_indices:
|
||||
if other_idx == idx:
|
||||
continue
|
||||
@@ -1017,7 +1089,6 @@ class WorkoutGenerator:
|
||||
if len(other_ss['exercises']) < 6:
|
||||
ex_entry['order'] = len(other_ss['exercises']) + 1
|
||||
other_ss['exercises'].append(ex_entry)
|
||||
absorbed = True
|
||||
break
|
||||
# Remove the undersized superset
|
||||
all_supersets.pop(idx)
|
||||
@@ -1026,6 +1097,7 @@ class WorkoutGenerator:
|
||||
i for i, ss in enumerate(all_supersets)
|
||||
if ss.get('name', '').startswith('Working')
|
||||
]
|
||||
first_working_index = working_indices[0] if working_indices else None
|
||||
|
||||
# Post-build modality validation: ensure each working superset
|
||||
# has consistent modality (all reps or all duration)
|
||||
@@ -1044,10 +1116,50 @@ class WorkoutGenerator:
|
||||
logger.debug("Corrected reps->duration for modality consistency in %s", ss.get('name'))
|
||||
else:
|
||||
if entry.get('duration') and not entry.get('reps'):
|
||||
ex = entry.get('exercise')
|
||||
if ex is not None:
|
||||
entry['reps'] = self._pick_reps_for_exercise(ex, wt_params, workout_type)
|
||||
else:
|
||||
entry['reps'] = random.randint(wt_params['rep_min'], wt_params['rep_max'])
|
||||
entry.pop('duration', None)
|
||||
logger.debug("Corrected duration->reps for modality consistency in %s", ss.get('name'))
|
||||
|
||||
# Deterministic final-shaping fixes before strict conformance validation.
|
||||
self._enforce_compound_first_order(workout_spec, is_strength_workout=is_strength_workout)
|
||||
self._rebalance_push_pull(
|
||||
workout_spec, target_muscles, wt_params, is_strength_workout,
|
||||
workout_type=workout_type,
|
||||
)
|
||||
|
||||
final_violations = self._get_final_conformance_violations(
|
||||
workout_spec, workout_type, target_muscles,
|
||||
)
|
||||
blocking_final = [
|
||||
v for v in final_violations if self._is_blocking_final_violation(v)
|
||||
]
|
||||
if blocking_final:
|
||||
if _final_attempt < FINAL_CONFORMANCE_MAX_RETRIES:
|
||||
logger.info(
|
||||
"Final conformance failed (%d issues) on attempt %d for %s; regenerating",
|
||||
len(blocking_final), _final_attempt + 1, label,
|
||||
)
|
||||
self.warnings = self.warnings[:warnings_checkpoint]
|
||||
self.exercise_selector.reset()
|
||||
return self.generate_single_workout(
|
||||
muscle_split=muscle_split,
|
||||
workout_type=workout_type,
|
||||
scheduled_date=scheduled_date,
|
||||
_final_attempt=_final_attempt + 1,
|
||||
)
|
||||
|
||||
details = '; '.join(
|
||||
f'[{v.severity}] {v.rule_id}: {v.message}'
|
||||
for v in blocking_final[:5]
|
||||
)
|
||||
raise ValueError(
|
||||
f'Unable to generate a compliant workout for {label}: {details}'
|
||||
)
|
||||
|
||||
# Collect warnings from exercise selector
|
||||
if self.exercise_selector.warnings:
|
||||
self.warnings.extend(self.exercise_selector.warnings)
|
||||
@@ -1122,6 +1234,7 @@ class WorkoutGenerator:
|
||||
|
||||
splits.sort(key=_target_overlap, reverse=True)
|
||||
|
||||
splits = self._diversify_split_days(splits, clamped_days)
|
||||
rest_days = chosen.rest_day_positions or []
|
||||
return splits, rest_days
|
||||
|
||||
@@ -1140,8 +1253,85 @@ class WorkoutGenerator:
|
||||
|
||||
splits.sort(key=_target_overlap, reverse=True)
|
||||
|
||||
splits = self._diversify_split_days(splits, clamped_days)
|
||||
return splits, []
|
||||
|
||||
@staticmethod
|
||||
def _split_signature(split_day):
|
||||
"""Stable signature for duplicate detection across a week."""
|
||||
split_type = (split_day.get('split_type') or 'full_body').strip().lower()
|
||||
muscles = tuple(sorted(
|
||||
normalize_muscle_name(m)
|
||||
for m in split_day.get('muscles', [])
|
||||
if m
|
||||
))
|
||||
return split_type, muscles
|
||||
|
||||
def _diversify_split_days(self, splits, clamped_days):
|
||||
"""
|
||||
Reduce repetitive split allocation (for example 3+ lower-body days
|
||||
in a 4-day plan) while preserving day count and user constraints.
|
||||
"""
|
||||
if len(splits) < 4:
|
||||
return splits
|
||||
|
||||
result = [dict(s) for s in splits]
|
||||
max_same_type = 2
|
||||
max_same_signature = 1
|
||||
|
||||
# Candidate pool: defaults first, then common DB splits.
|
||||
candidates = [dict(s) for s in DEFAULT_SPLITS.get(clamped_days, [])]
|
||||
db_candidates = list(
|
||||
MuscleGroupSplit.objects.order_by('-frequency', 'id')[:50]
|
||||
)
|
||||
for mgs in db_candidates:
|
||||
candidates.append({
|
||||
'label': mgs.label or 'Workout',
|
||||
'muscles': list(mgs.muscle_names or []),
|
||||
'split_type': mgs.split_type or 'full_body',
|
||||
})
|
||||
|
||||
def _first_violation_index():
|
||||
type_counts = Counter((d.get('split_type') or 'full_body').strip().lower() for d in result)
|
||||
sig_counts = Counter(self._split_signature(d) for d in result)
|
||||
for idx, day in enumerate(result):
|
||||
split_type = (day.get('split_type') or 'full_body').strip().lower()
|
||||
sig = self._split_signature(day)
|
||||
if type_counts[split_type] > max_same_type or sig_counts[sig] > max_same_signature:
|
||||
return idx, type_counts, sig_counts
|
||||
return None, type_counts, sig_counts
|
||||
|
||||
# A small bounded repair loop prevents pathological endless churn.
|
||||
for _ in range(len(result) * 3):
|
||||
idx, type_counts, sig_counts = _first_violation_index()
|
||||
if idx is None:
|
||||
break
|
||||
|
||||
replaced = False
|
||||
for candidate in candidates:
|
||||
candidate_type = (candidate.get('split_type') or 'full_body').strip().lower()
|
||||
candidate_sig = self._split_signature(candidate)
|
||||
current_sig = self._split_signature(result[idx])
|
||||
if candidate_sig == current_sig:
|
||||
continue
|
||||
|
||||
new_type_count = type_counts[candidate_type] + (0 if candidate_type == (result[idx].get('split_type') or 'full_body').strip().lower() else 1)
|
||||
if new_type_count > max_same_type:
|
||||
continue
|
||||
|
||||
if sig_counts[candidate_sig] >= max_same_signature:
|
||||
continue
|
||||
|
||||
result[idx] = dict(candidate)
|
||||
replaced = True
|
||||
break
|
||||
|
||||
if not replaced:
|
||||
# No safe replacement found; keep original to avoid invalid state.
|
||||
break
|
||||
|
||||
return result
|
||||
|
||||
def _assign_workout_types(self, split_days):
|
||||
"""
|
||||
Distribute the user's preferred WorkoutTypes across the training
|
||||
@@ -1529,6 +1719,7 @@ class WorkoutGenerator:
|
||||
secondary_bias = GOAL_DURATION_BIAS[secondary_goal]
|
||||
goal_bias = (goal_bias * 0.7) + (secondary_bias * 0.3)
|
||||
duration_bias = (duration_bias * 0.7) + (goal_bias * 0.3)
|
||||
duration_bias = self._clamp_duration_bias_for_type(duration_bias, workout_type)
|
||||
|
||||
# Apply secondary goal influence on rep ranges (30% weight)
|
||||
if secondary_goal:
|
||||
@@ -1621,6 +1812,13 @@ class WorkoutGenerator:
|
||||
if wt_name_lower in STRENGTH_WORKOUT_TYPES:
|
||||
is_strength_workout = True
|
||||
|
||||
modality_plan = self._plan_superset_modalities(
|
||||
num_supersets=num_supersets,
|
||||
duration_bias=duration_bias,
|
||||
workout_type=workout_type,
|
||||
is_strength_workout=is_strength_workout,
|
||||
)
|
||||
|
||||
min_duration = GENERATION_RULES['min_duration']['value']
|
||||
duration_mult = GENERATION_RULES['duration_multiple']['value']
|
||||
min_volume = GENERATION_RULES['min_volume']['value']
|
||||
@@ -1628,6 +1826,8 @@ class WorkoutGenerator:
|
||||
|
||||
supersets = []
|
||||
|
||||
previous_focus_keys = set()
|
||||
|
||||
for ss_idx in range(num_supersets):
|
||||
rounds = random.randint(*wt_params['rounds'])
|
||||
ex_count = random.randint(*exercises_per_superset)
|
||||
@@ -1673,11 +1873,9 @@ class WorkoutGenerator:
|
||||
muscle_subset = target_muscles
|
||||
|
||||
# R9: Decide modality once per superset (all reps or all duration)
|
||||
# R5/R7: For strength workouts, force rep-based in working sets
|
||||
if is_strength_workout:
|
||||
superset_is_duration = False
|
||||
else:
|
||||
superset_is_duration = random.random() < duration_bias
|
||||
superset_is_duration = (
|
||||
modality_plan[ss_idx] if ss_idx < len(modality_plan) else False
|
||||
)
|
||||
|
||||
# R6: For strength workouts, prefer weighted exercises
|
||||
prefer_weighted = is_strength_workout
|
||||
@@ -1692,6 +1890,9 @@ class WorkoutGenerator:
|
||||
else:
|
||||
position_str = 'middle'
|
||||
|
||||
exercises = []
|
||||
selected_focus_keys = set()
|
||||
for _attempt in range(4):
|
||||
# Select exercises
|
||||
exercises = self.exercise_selector.select_exercises(
|
||||
muscle_groups=muscle_subset,
|
||||
@@ -1736,6 +1937,25 @@ class WorkoutGenerator:
|
||||
exercises, muscle_groups=muscle_subset, fitness_level=fitness_level,
|
||||
)
|
||||
|
||||
if self._has_duplicate_focus_in_superset(exercises):
|
||||
continue
|
||||
|
||||
selected_focus_keys = self._superset_focus_keys(exercises)
|
||||
if previous_focus_keys and selected_focus_keys and selected_focus_keys == previous_focus_keys:
|
||||
continue
|
||||
|
||||
break
|
||||
|
||||
if not exercises:
|
||||
continue
|
||||
if self._has_duplicate_focus_in_superset(exercises):
|
||||
logger.warning(
|
||||
"Allowing unresolved duplicate exercise focus in superset %d after retries",
|
||||
ss_idx + 1,
|
||||
)
|
||||
if not selected_focus_keys:
|
||||
selected_focus_keys = self._superset_focus_keys(exercises)
|
||||
|
||||
# Build exercise entries with reps/duration
|
||||
exercise_entries = []
|
||||
for i, ex in enumerate(exercises, start=1):
|
||||
@@ -1760,10 +1980,7 @@ class WorkoutGenerator:
|
||||
else:
|
||||
# R9: When superset is rep-based, always assign reps
|
||||
# even if the exercise is duration-capable
|
||||
entry['reps'] = random.randint(
|
||||
wt_params['rep_min'],
|
||||
wt_params['rep_max'],
|
||||
)
|
||||
entry['reps'] = self._pick_reps_for_exercise(ex, wt_params, workout_type)
|
||||
if ex.is_weight:
|
||||
entry['weight'] = None # user fills in their weight
|
||||
|
||||
@@ -1785,6 +2002,8 @@ class WorkoutGenerator:
|
||||
'modality': 'duration' if superset_is_duration else 'reps',
|
||||
'exercises': exercise_entries,
|
||||
})
|
||||
if selected_focus_keys:
|
||||
previous_focus_keys = selected_focus_keys
|
||||
|
||||
# Item #6: Modality consistency check
|
||||
if wt_params.get('duration_bias', 0) >= 0.6:
|
||||
@@ -1820,6 +2039,42 @@ class WorkoutGenerator:
|
||||
|
||||
return supersets
|
||||
|
||||
@staticmethod
|
||||
def _exercise_focus_key(exercise):
|
||||
"""Classify an exercise into a coarse focus key for variety checks."""
|
||||
if exercise is None:
|
||||
return None
|
||||
families = sorted(extract_movement_families(getattr(exercise, 'name', '') or ''))
|
||||
if families:
|
||||
return families[0]
|
||||
|
||||
patterns = (getattr(exercise, 'movement_patterns', '') or '').lower()
|
||||
for token in ('upper pull', 'upper push', 'hip hinge', 'squat', 'lunge', 'core', 'carry'):
|
||||
if token in patterns:
|
||||
return token
|
||||
return None
|
||||
|
||||
def _superset_focus_keys(self, exercises):
|
||||
"""Return a set of coarse focus keys for a superset."""
|
||||
keys = set()
|
||||
for ex in exercises or []:
|
||||
key = self._exercise_focus_key(ex)
|
||||
if key:
|
||||
keys.add(key)
|
||||
return keys
|
||||
|
||||
def _has_duplicate_focus_in_superset(self, exercises):
|
||||
"""Prevent same focus from being repeated inside one working superset."""
|
||||
seen = set()
|
||||
for ex in exercises or []:
|
||||
key = self._exercise_focus_key(ex)
|
||||
if not key:
|
||||
continue
|
||||
if key in seen:
|
||||
return True
|
||||
seen.add(key)
|
||||
return False
|
||||
|
||||
def _build_cooldown(self, target_muscles, workout_type=None):
|
||||
"""
|
||||
Build a cool-down superset spec: duration-based, 1 round.
|
||||
@@ -2026,6 +2281,7 @@ class WorkoutGenerator:
|
||||
target_muscles = muscle_split.get('muscles', [])
|
||||
supersets = workout_spec.get('supersets', [])
|
||||
duration_bias = wt_params.get('duration_bias', 0.3)
|
||||
duration_bias = self._clamp_duration_bias_for_type(duration_bias, workout_type)
|
||||
|
||||
# Derive strength context for workout-type-aware padding
|
||||
is_strength_workout = False
|
||||
@@ -2036,6 +2292,14 @@ class WorkoutGenerator:
|
||||
min_duration = GENERATION_RULES['min_duration']['value']
|
||||
duration_mult = GENERATION_RULES['duration_multiple']['value']
|
||||
min_volume = GENERATION_RULES['min_volume']['value']
|
||||
max_working_exercises = self._max_working_exercises_for_type(workout_type)
|
||||
|
||||
def _total_working_exercises():
|
||||
return sum(
|
||||
len(ss.get('exercises', []))
|
||||
for ss in supersets
|
||||
if ss.get('name', '').startswith('Working')
|
||||
)
|
||||
|
||||
# Find the insertion point: before Cool Down if it exists, else at end
|
||||
insert_idx = len(supersets)
|
||||
@@ -2052,6 +2316,8 @@ class WorkoutGenerator:
|
||||
self._estimate_total_time(workout_spec) < max_duration_sec * 0.9
|
||||
and pad_attempts < max_pad_attempts
|
||||
):
|
||||
if _total_working_exercises() >= max_working_exercises:
|
||||
break
|
||||
pad_attempts += 1
|
||||
|
||||
# Try adding exercises to existing working supersets first
|
||||
@@ -2061,6 +2327,8 @@ class WorkoutGenerator:
|
||||
continue
|
||||
if len(ss['exercises']) >= MAX_EXERCISES_PER_SUPERSET:
|
||||
continue
|
||||
if _total_working_exercises() >= max_working_exercises:
|
||||
break
|
||||
|
||||
# R9: Use stored modality from superset spec
|
||||
ss_is_duration = ss.get('modality') == 'duration'
|
||||
@@ -2088,10 +2356,7 @@ class WorkoutGenerator:
|
||||
# Skip non-duration exercise in duration superset (R9)
|
||||
continue
|
||||
else:
|
||||
entry['reps'] = random.randint(
|
||||
wt_params['rep_min'],
|
||||
wt_params['rep_max'],
|
||||
)
|
||||
entry['reps'] = self._pick_reps_for_exercise(ex, wt_params, workout_type)
|
||||
if ex.is_weight:
|
||||
entry['weight'] = None
|
||||
# R10: Volume floor
|
||||
@@ -2111,14 +2376,39 @@ class WorkoutGenerator:
|
||||
|
||||
# If we couldn't add to existing, create a new working superset
|
||||
if not added:
|
||||
remaining_capacity = max_working_exercises - _total_working_exercises()
|
||||
if remaining_capacity <= 0:
|
||||
break
|
||||
rounds = random.randint(*wt_params['rounds'])
|
||||
ex_count = random.randint(*wt_params['exercises_per_superset'])
|
||||
min_for_new_superset = GENERATION_RULES['min_exercises_per_superset']['value']
|
||||
if remaining_capacity < min_for_new_superset:
|
||||
break
|
||||
# R8: Min 2 exercises
|
||||
ex_count = max(GENERATION_RULES['min_exercises_per_superset']['value'], ex_count)
|
||||
ex_count = max(min_for_new_superset, ex_count)
|
||||
ex_count = min(ex_count, remaining_capacity)
|
||||
if ex_count <= 0:
|
||||
break
|
||||
# R9: Decide modality once for the new superset
|
||||
# R5/R7: For strength workouts, force rep-based
|
||||
if is_strength_workout:
|
||||
ss_is_duration = False
|
||||
else:
|
||||
working = [
|
||||
current for current in supersets
|
||||
if current.get('name', '').startswith('Working')
|
||||
]
|
||||
total_entries = sum(len(current.get('exercises', [])) for current in working)
|
||||
duration_entries = sum(
|
||||
len(current.get('exercises', []))
|
||||
for current in working
|
||||
if current.get('modality') == 'duration'
|
||||
)
|
||||
current_ratio = (duration_entries / total_entries) if total_entries else duration_bias
|
||||
if current_ratio < duration_bias - 0.05:
|
||||
ss_is_duration = True
|
||||
elif current_ratio > duration_bias + 0.05:
|
||||
ss_is_duration = False
|
||||
else:
|
||||
ss_is_duration = random.random() < duration_bias
|
||||
|
||||
@@ -2146,10 +2436,7 @@ class WorkoutGenerator:
|
||||
# Skip non-duration exercise in duration superset (R9)
|
||||
continue
|
||||
else:
|
||||
entry['reps'] = random.randint(
|
||||
wt_params['rep_min'],
|
||||
wt_params['rep_max'],
|
||||
)
|
||||
entry['reps'] = self._pick_reps_for_exercise(ex, wt_params, workout_type)
|
||||
if ex.is_weight:
|
||||
entry['weight'] = None
|
||||
exercise_entries.append(entry)
|
||||
@@ -2157,6 +2444,8 @@ class WorkoutGenerator:
|
||||
# Re-number orders after filtering
|
||||
for idx, entry in enumerate(exercise_entries, start=1):
|
||||
entry['order'] = idx
|
||||
if not exercise_entries:
|
||||
continue
|
||||
|
||||
# R10: Volume floor for new superset
|
||||
for entry in exercise_entries:
|
||||
@@ -2183,6 +2472,397 @@ class WorkoutGenerator:
|
||||
|
||||
return workout_spec
|
||||
|
||||
def _max_working_exercises_for_type(self, workout_type):
|
||||
"""Return the calibrated max working-exercise cap for this workout type."""
|
||||
fallback = UNIVERSAL_RULES.get('max_exercises_per_workout', 30)
|
||||
if not workout_type:
|
||||
return fallback
|
||||
wt_key = _normalize_type_key(getattr(workout_type, 'name', '') or '')
|
||||
wt_rules = WORKOUT_TYPE_RULES.get(wt_key, {})
|
||||
return wt_rules.get('max_exercises_per_session', fallback)
|
||||
|
||||
@staticmethod
|
||||
def _workout_type_rules(workout_type):
|
||||
if not workout_type:
|
||||
return {}
|
||||
wt_key = _normalize_type_key(getattr(workout_type, 'name', '') or '')
|
||||
return WORKOUT_TYPE_RULES.get(wt_key, {})
|
||||
|
||||
def _clamp_duration_bias_for_type(self, duration_bias, workout_type):
|
||||
wt_rules = self._workout_type_rules(workout_type)
|
||||
bias_range = wt_rules.get('duration_bias_range')
|
||||
if not bias_range:
|
||||
return max(0.0, min(1.0, duration_bias))
|
||||
low, high = bias_range
|
||||
return max(float(low), min(float(high), duration_bias))
|
||||
|
||||
def _pick_reps_for_exercise(self, exercise, wt_params, workout_type):
|
||||
wt_rules = self._workout_type_rules(workout_type)
|
||||
tier_ranges = wt_rules.get('rep_ranges', {})
|
||||
|
||||
tier = (getattr(exercise, 'exercise_tier', None) or 'accessory').lower()
|
||||
selected_range = tier_ranges.get(tier)
|
||||
if selected_range is None:
|
||||
selected_range = (wt_params['rep_min'], wt_params['rep_max'])
|
||||
|
||||
low, high = int(selected_range[0]), int(selected_range[1])
|
||||
if low > high:
|
||||
low, high = high, low
|
||||
return random.randint(low, high)
|
||||
|
||||
def _plan_superset_modalities(self, num_supersets, duration_bias, workout_type, is_strength_workout):
|
||||
if num_supersets <= 0:
|
||||
return []
|
||||
if is_strength_workout:
|
||||
return [False] * num_supersets
|
||||
|
||||
wt_rules = self._workout_type_rules(workout_type)
|
||||
bias_range = wt_rules.get('duration_bias_range')
|
||||
if bias_range:
|
||||
low, high = bias_range
|
||||
target_bias = (float(low) + float(high)) / 2.0
|
||||
min_duration_sets = max(0, math.ceil(num_supersets * float(low)))
|
||||
max_duration_sets = min(num_supersets, math.floor(num_supersets * float(high)))
|
||||
else:
|
||||
target_bias = max(0.0, min(1.0, duration_bias))
|
||||
min_duration_sets = max(0, math.floor(num_supersets * max(0.0, target_bias - 0.15)))
|
||||
max_duration_sets = min(num_supersets, math.ceil(num_supersets * min(1.0, target_bias + 0.15)))
|
||||
|
||||
duration_sets = int(round(num_supersets * target_bias))
|
||||
duration_sets = max(min_duration_sets, min(max_duration_sets, duration_sets))
|
||||
|
||||
if num_supersets > 1 and duration_sets == num_supersets and max_duration_sets < num_supersets:
|
||||
duration_sets = max_duration_sets
|
||||
if num_supersets > 1 and duration_sets == 0 and min_duration_sets > 0:
|
||||
duration_sets = min_duration_sets
|
||||
|
||||
modalities = [False] * num_supersets
|
||||
if duration_sets > 0:
|
||||
positions = list(range(num_supersets))
|
||||
random.shuffle(positions)
|
||||
for idx in positions[:duration_sets]:
|
||||
modalities[idx] = True
|
||||
return modalities
|
||||
|
||||
@staticmethod
|
||||
def _entry_has_push(entry):
|
||||
ex = entry.get('exercise')
|
||||
if ex is None:
|
||||
return False
|
||||
patterns = (getattr(ex, 'movement_patterns', '') or '').lower()
|
||||
return 'push' in patterns
|
||||
|
||||
@staticmethod
|
||||
def _entry_has_pull(entry):
|
||||
ex = entry.get('exercise')
|
||||
if ex is None:
|
||||
return False
|
||||
patterns = (getattr(ex, 'movement_patterns', '') or '').lower()
|
||||
return 'pull' in patterns
|
||||
|
||||
def _enforce_compound_first_order(self, workout_spec, is_strength_workout=False):
|
||||
"""Sort working supersets so compound-dominant work appears first."""
|
||||
supersets = workout_spec.get('supersets', [])
|
||||
working_indices = [
|
||||
i for i, ss in enumerate(supersets)
|
||||
if ss.get('name', '').startswith('Working')
|
||||
]
|
||||
if not working_indices:
|
||||
return
|
||||
|
||||
def _is_compound_entry(entry):
|
||||
ex = entry.get('exercise')
|
||||
if ex is None:
|
||||
return False
|
||||
tier = getattr(ex, 'exercise_tier', None)
|
||||
return bool(getattr(ex, 'is_compound', False) and tier in ('primary', 'secondary'))
|
||||
|
||||
working_sets = [supersets[i] for i in working_indices]
|
||||
|
||||
for ss in working_sets:
|
||||
exercises = ss.get('exercises', [])
|
||||
exercises.sort(
|
||||
key=lambda entry: (
|
||||
0 if _is_compound_entry(entry) else 1,
|
||||
entry.get('order', 0),
|
||||
)
|
||||
)
|
||||
for idx, entry in enumerate(exercises, start=1):
|
||||
entry['order'] = idx
|
||||
|
||||
pinned_first = None
|
||||
sortable_sets = working_sets
|
||||
if is_strength_workout and working_sets:
|
||||
# Preserve the first straight set for strength workouts.
|
||||
pinned_first = working_sets[0]
|
||||
sortable_sets = working_sets[1:]
|
||||
|
||||
sortable_sets.sort(
|
||||
key=lambda ss: sum(
|
||||
1 for entry in ss.get('exercises', [])
|
||||
if _is_compound_entry(entry)
|
||||
),
|
||||
reverse=True,
|
||||
)
|
||||
if pinned_first is not None:
|
||||
working_sets = [pinned_first] + sortable_sets
|
||||
else:
|
||||
working_sets = sortable_sets
|
||||
for idx, ss in enumerate(working_sets, start=1):
|
||||
ss['name'] = f'Working Set {idx}'
|
||||
|
||||
for idx, original_idx in enumerate(working_indices):
|
||||
supersets[original_idx] = working_sets[idx]
|
||||
|
||||
def _select_pull_replacement(self, target_muscles, is_duration_based, prefer_weighted):
|
||||
"""Pick a pull-pattern replacement that still respects user constraints."""
|
||||
fitness_level = getattr(self.preference, 'fitness_level', None)
|
||||
|
||||
def _candidate_pool(muscle_groups):
|
||||
qs = self.exercise_selector._get_filtered_queryset(
|
||||
muscle_groups=muscle_groups,
|
||||
is_duration_based=is_duration_based,
|
||||
fitness_level=fitness_level,
|
||||
).filter(movement_patterns__icontains='pull')
|
||||
if is_duration_based:
|
||||
qs = qs.filter(is_duration=True)
|
||||
else:
|
||||
qs = qs.filter(is_reps=True)
|
||||
return list(qs[:50])
|
||||
|
||||
candidates = _candidate_pool(target_muscles)
|
||||
if not candidates and target_muscles:
|
||||
candidates = _candidate_pool([])
|
||||
if not candidates:
|
||||
return None
|
||||
|
||||
if prefer_weighted:
|
||||
weighted = [c for c in candidates if getattr(c, 'is_weight', False)]
|
||||
if weighted:
|
||||
candidates = weighted
|
||||
|
||||
return random.choice(candidates)
|
||||
|
||||
def _rebalance_push_pull(
|
||||
self, workout_spec, target_muscles, wt_params, is_strength_workout, workout_type=None,
|
||||
):
|
||||
"""Replace push-only entries with pull entries until ratio is compliant."""
|
||||
working = [
|
||||
ss for ss in workout_spec.get('supersets', [])
|
||||
if ss.get('name', '').startswith('Working')
|
||||
]
|
||||
if not working:
|
||||
return
|
||||
|
||||
push_count = 0
|
||||
pull_count = 0
|
||||
replaceable = []
|
||||
for ss in working:
|
||||
for entry in ss.get('exercises', []):
|
||||
has_push = self._entry_has_push(entry)
|
||||
has_pull = self._entry_has_pull(entry)
|
||||
if has_push:
|
||||
push_count += 1
|
||||
if has_pull:
|
||||
pull_count += 1
|
||||
if has_push and not has_pull:
|
||||
replaceable.append((ss, entry))
|
||||
|
||||
if push_count == 0:
|
||||
return
|
||||
if pull_count == 0 and push_count <= 2:
|
||||
return
|
||||
if pull_count >= push_count:
|
||||
return
|
||||
|
||||
replacements_needed = max(1, math.ceil((push_count - pull_count) / 2))
|
||||
if not replaceable:
|
||||
return
|
||||
|
||||
min_duration = GENERATION_RULES['min_duration']['value']
|
||||
duration_mult = GENERATION_RULES['duration_multiple']['value']
|
||||
prefer_weighted = is_strength_workout
|
||||
|
||||
for ss, entry in reversed(replaceable):
|
||||
if replacements_needed <= 0:
|
||||
break
|
||||
|
||||
is_duration_based = ss.get('modality') == 'duration'
|
||||
replacement = self._select_pull_replacement(
|
||||
target_muscles=target_muscles,
|
||||
is_duration_based=is_duration_based,
|
||||
prefer_weighted=prefer_weighted,
|
||||
)
|
||||
if replacement is None:
|
||||
continue
|
||||
|
||||
old_ex = entry.get('exercise')
|
||||
entry['exercise'] = replacement
|
||||
|
||||
if is_duration_based:
|
||||
entry.pop('reps', None)
|
||||
entry.pop('weight', None)
|
||||
if entry.get('duration') is None:
|
||||
duration = random.randint(
|
||||
wt_params['duration_min'],
|
||||
wt_params['duration_max'],
|
||||
)
|
||||
entry['duration'] = max(
|
||||
min_duration, round(duration / duration_mult) * duration_mult,
|
||||
)
|
||||
else:
|
||||
entry.pop('duration', None)
|
||||
if entry.get('reps') is None:
|
||||
entry['reps'] = self._pick_reps_for_exercise(
|
||||
replacement, wt_params, workout_type,
|
||||
)
|
||||
if getattr(replacement, 'is_weight', False):
|
||||
entry['weight'] = None
|
||||
else:
|
||||
entry.pop('weight', None)
|
||||
|
||||
if old_ex is not None:
|
||||
self.exercise_selector.used_exercise_ids.discard(old_ex.pk)
|
||||
old_name = (getattr(old_ex, 'name', '') or '').lower().strip()
|
||||
if old_name:
|
||||
self.exercise_selector.used_exercise_names.discard(old_name)
|
||||
|
||||
self.exercise_selector.used_exercise_ids.add(replacement.pk)
|
||||
replacement_name = (replacement.name or '').lower().strip()
|
||||
if replacement_name:
|
||||
self.exercise_selector.used_exercise_names.add(replacement_name)
|
||||
|
||||
replacements_needed -= 1
|
||||
|
||||
def _get_final_conformance_violations(self, workout_spec, workout_type, target_muscles):
|
||||
"""Validate final workout against rules + user-preference conformance."""
|
||||
workout_type_name = workout_type.name if workout_type else 'unknown_type'
|
||||
goal = getattr(self.preference, 'primary_goal', 'general_fitness')
|
||||
violations = validate_workout(workout_spec, workout_type_name, goal)
|
||||
violations.extend(
|
||||
self._validate_user_preference_alignment(workout_spec, target_muscles)
|
||||
)
|
||||
return violations
|
||||
|
||||
def _validate_user_preference_alignment(self, workout_spec, target_muscles):
|
||||
"""Validate that final selections still honor explicit user preferences."""
|
||||
violations = []
|
||||
supersets = workout_spec.get('supersets', [])
|
||||
|
||||
all_exercises = []
|
||||
working_exercises = []
|
||||
for ss in supersets:
|
||||
is_working = ss.get('name', '').startswith('Working')
|
||||
for entry in ss.get('exercises', []):
|
||||
ex = entry.get('exercise')
|
||||
if ex is None:
|
||||
continue
|
||||
all_exercises.append(ex)
|
||||
if is_working:
|
||||
working_exercises.append(ex)
|
||||
|
||||
if not all_exercises:
|
||||
return violations
|
||||
|
||||
exercise_ids = {ex.pk for ex in all_exercises}
|
||||
ex_name_map = {ex.pk: (ex.name or f'Exercise {ex.pk}') for ex in all_exercises}
|
||||
|
||||
# 1) Excluded exercises must never appear.
|
||||
excluded_ids = set(
|
||||
self.preference.excluded_exercises.values_list('pk', flat=True)
|
||||
)
|
||||
excluded_present = sorted(exercise_ids & excluded_ids)
|
||||
if excluded_present:
|
||||
names = ', '.join(ex_name_map.get(ex_id, str(ex_id)) for ex_id in excluded_present[:3])
|
||||
violations.append(RuleViolation(
|
||||
rule_id='preference_excluded_exercise',
|
||||
severity='error',
|
||||
message=f'Workout includes excluded exercise(s): {names}.',
|
||||
actual_value=len(excluded_present),
|
||||
))
|
||||
|
||||
# 2) Equipment requirements must stay within user-available equipment.
|
||||
available_equipment_ids = set(
|
||||
self.preference.available_equipment.values_list('pk', flat=True)
|
||||
)
|
||||
equipment_requirements = {}
|
||||
for ex_id, eq_id in WorkoutEquipment.objects.filter(
|
||||
exercise_id__in=exercise_ids,
|
||||
).values_list('exercise_id', 'equipment_id'):
|
||||
equipment_requirements.setdefault(ex_id, set()).add(eq_id)
|
||||
|
||||
equipment_mismatch = []
|
||||
for ex_id, required_equipment in equipment_requirements.items():
|
||||
if not available_equipment_ids:
|
||||
equipment_mismatch.append(ex_id)
|
||||
continue
|
||||
if not required_equipment.issubset(available_equipment_ids):
|
||||
equipment_mismatch.append(ex_id)
|
||||
|
||||
if equipment_mismatch:
|
||||
names = ', '.join(ex_name_map.get(ex_id, str(ex_id)) for ex_id in equipment_mismatch[:3])
|
||||
violations.append(RuleViolation(
|
||||
rule_id='preference_equipment_mismatch',
|
||||
severity='error',
|
||||
message=f'Workout includes equipment beyond user preference: {names}.',
|
||||
actual_value=len(equipment_mismatch),
|
||||
))
|
||||
|
||||
# 3) Working exercises should mostly align with target muscles.
|
||||
normalized_targets = {
|
||||
normalize_muscle_name(m)
|
||||
for m in (target_muscles or [])
|
||||
if m
|
||||
}
|
||||
if normalized_targets and working_exercises:
|
||||
working_ids = {ex.pk for ex in working_exercises}
|
||||
exercise_muscles = {}
|
||||
for ex_id, muscle_name in ExerciseMuscle.objects.filter(
|
||||
exercise_id__in=working_ids,
|
||||
).values_list('exercise_id', 'muscle__name'):
|
||||
exercise_muscles.setdefault(ex_id, set()).add(
|
||||
normalize_muscle_name(muscle_name),
|
||||
)
|
||||
|
||||
evaluated = 0
|
||||
matched = 0
|
||||
for ex in working_exercises:
|
||||
ex_muscles = exercise_muscles.get(ex.pk)
|
||||
if not ex_muscles:
|
||||
raw = getattr(ex, 'muscle_groups', '') or ''
|
||||
ex_muscles = {
|
||||
normalize_muscle_name(part.strip())
|
||||
for part in raw.split(',')
|
||||
if part.strip()
|
||||
}
|
||||
if not ex_muscles:
|
||||
continue
|
||||
evaluated += 1
|
||||
if ex_muscles & normalized_targets:
|
||||
matched += 1
|
||||
|
||||
if evaluated > 0:
|
||||
alignment = matched / evaluated
|
||||
min_alignment = 0.7
|
||||
if alignment < min_alignment:
|
||||
violations.append(RuleViolation(
|
||||
rule_id='preference_target_muscle_alignment',
|
||||
severity='error',
|
||||
message=(
|
||||
f'Target-muscle alignment {alignment:.0%} is below '
|
||||
f'required {min_alignment:.0%}.'
|
||||
),
|
||||
actual_value=alignment,
|
||||
expected_range=(min_alignment, 1.0),
|
||||
))
|
||||
|
||||
return violations
|
||||
|
||||
@staticmethod
|
||||
def _is_blocking_final_violation(violation):
|
||||
"""Block only hard failures and warnings; keep info-level rules advisory."""
|
||||
return violation.severity in {'error', 'warning'}
|
||||
|
||||
def _check_quality_gates(self, working_supersets, workout_type, wt_params):
|
||||
"""Run quality gate validation on working supersets.
|
||||
|
||||
|
||||
56
generator/tests/test_check_rules_drift.py
Normal file
56
generator/tests/test_check_rules_drift.py
Normal file
@@ -0,0 +1,56 @@
|
||||
from django.core.management import call_command
|
||||
from django.test import TestCase
|
||||
|
||||
from generator.models import WorkoutType
|
||||
from generator.rules_engine import DB_CALIBRATION
|
||||
|
||||
|
||||
class TestCheckRulesDriftCommand(TestCase):
|
||||
"""Tests for the strict drift-check command behavior."""
|
||||
|
||||
@staticmethod
|
||||
def _sync_workout_type(name, values):
|
||||
wt, _ = WorkoutType.objects.get_or_create(
|
||||
name=name,
|
||||
defaults={
|
||||
'display_name': name.replace('_', ' ').title(),
|
||||
'description': f'Calibrated {name}',
|
||||
**values,
|
||||
},
|
||||
)
|
||||
update_fields = []
|
||||
for field_name, field_value in values.items():
|
||||
if getattr(wt, field_name) != field_value:
|
||||
setattr(wt, field_name, field_value)
|
||||
update_fields.append(field_name)
|
||||
if update_fields:
|
||||
wt.save(update_fields=update_fields)
|
||||
return wt
|
||||
|
||||
def test_passes_when_all_types_match(self):
|
||||
for type_name, values in DB_CALIBRATION.items():
|
||||
self._sync_workout_type(type_name, values)
|
||||
|
||||
# Should not raise SystemExit when everything matches.
|
||||
call_command('check_rules_drift', verbosity=0)
|
||||
|
||||
def test_fails_when_type_missing(self):
|
||||
for type_name, values in DB_CALIBRATION.items():
|
||||
self._sync_workout_type(type_name, values)
|
||||
WorkoutType.objects.filter(name='cardio').delete()
|
||||
|
||||
with self.assertRaises(SystemExit) as ctx:
|
||||
call_command('check_rules_drift', verbosity=0)
|
||||
self.assertEqual(ctx.exception.code, 1)
|
||||
|
||||
def test_fails_when_value_mismatch(self):
|
||||
for type_name, values in DB_CALIBRATION.items():
|
||||
self._sync_workout_type(type_name, values)
|
||||
|
||||
target = WorkoutType.objects.get(name='hypertrophy')
|
||||
target.typical_rest_between_sets = 999
|
||||
target.save(update_fields=['typical_rest_between_sets'])
|
||||
|
||||
with self.assertRaises(SystemExit) as ctx:
|
||||
call_command('check_rules_drift', verbosity=0)
|
||||
self.assertEqual(ctx.exception.code, 1)
|
||||
@@ -4,6 +4,7 @@ Tests for _build_working_supersets() — Items #4, #6, #7:
|
||||
- Modality consistency check (duration_bias warning)
|
||||
- Straight-set strength (first superset = single main lift)
|
||||
"""
|
||||
from datetime import date
|
||||
from django.contrib.auth import get_user_model
|
||||
from django.test import TestCase
|
||||
from unittest.mock import patch, MagicMock, PropertyMock
|
||||
@@ -16,10 +17,12 @@ from generator.models import (
|
||||
WorkoutType,
|
||||
)
|
||||
from generator.services.workout_generator import (
|
||||
FINAL_CONFORMANCE_MAX_RETRIES,
|
||||
WorkoutGenerator,
|
||||
STRENGTH_WORKOUT_TYPES,
|
||||
WORKOUT_TYPE_DEFAULTS,
|
||||
)
|
||||
from generator.rules_engine import RuleViolation, validate_workout
|
||||
from registered_user.models import RegisteredUser
|
||||
|
||||
User = get_user_model()
|
||||
@@ -58,6 +61,18 @@ class MovementEnforcementTestBase(TestCase):
|
||||
superset_size_min=3,
|
||||
superset_size_max=6,
|
||||
)
|
||||
cls.core_type = WorkoutType.objects.filter(name='core_training').first()
|
||||
if cls.core_type is None:
|
||||
cls.core_type = WorkoutType.objects.create(
|
||||
name='core_training',
|
||||
typical_rest_between_sets=30,
|
||||
typical_intensity='medium',
|
||||
rep_range_min=10,
|
||||
rep_range_max=20,
|
||||
duration_bias=0.5,
|
||||
superset_size_min=3,
|
||||
superset_size_max=5,
|
||||
)
|
||||
|
||||
# Create MovementPatternOrder records
|
||||
MovementPatternOrder.objects.create(
|
||||
@@ -169,6 +184,58 @@ class TestMovementPatternEnforcement(MovementEnforcementTestBase):
|
||||
|
||||
pref.delete()
|
||||
|
||||
def test_retries_when_superset_has_duplicate_focus(self):
|
||||
"""Generator should retry when a working superset repeats focus family."""
|
||||
pref = self._make_preference()
|
||||
gen = self._make_generator(pref)
|
||||
|
||||
curl_a = self._create_mock_exercise(
|
||||
'Alternating Bicep Curls',
|
||||
movement_patterns='upper pull',
|
||||
is_compound=False,
|
||||
exercise_tier='accessory',
|
||||
)
|
||||
curl_b = self._create_mock_exercise(
|
||||
'Bicep Curls',
|
||||
movement_patterns='upper pull',
|
||||
is_compound=False,
|
||||
exercise_tier='accessory',
|
||||
)
|
||||
pull = self._create_mock_exercise('Bent Over Row', movement_patterns='upper pull')
|
||||
hinge = self._create_mock_exercise('Romanian Deadlift', movement_patterns='hip hinge')
|
||||
|
||||
gen.exercise_selector.select_exercises.side_effect = [
|
||||
[curl_a, curl_b], # rejected: duplicate focus
|
||||
[pull, hinge], # accepted
|
||||
]
|
||||
gen.exercise_selector.balance_stretch_positions.side_effect = lambda exs, **_: exs
|
||||
|
||||
muscle_split = {
|
||||
'muscles': ['upper back', 'biceps'],
|
||||
'split_type': 'pull',
|
||||
'label': 'Pull',
|
||||
}
|
||||
wt_params = dict(WORKOUT_TYPE_DEFAULTS['hiit'])
|
||||
wt_params['num_supersets'] = (1, 1)
|
||||
wt_params['exercises_per_superset'] = (2, 2)
|
||||
wt_params['duration_bias'] = 0.0
|
||||
|
||||
supersets = gen._build_working_supersets(muscle_split, self.hiit_type, wt_params)
|
||||
self.assertEqual(len(supersets), 1)
|
||||
self.assertGreaterEqual(gen.exercise_selector.select_exercises.call_count, 2)
|
||||
|
||||
names = [
|
||||
entry['exercise'].name
|
||||
for entry in supersets[0].get('exercises', [])
|
||||
]
|
||||
self.assertNotEqual(
|
||||
set(names),
|
||||
{'Alternating Bicep Curls', 'Bicep Curls'},
|
||||
f'Expected duplicate-focus superset to be retried, got {names}',
|
||||
)
|
||||
|
||||
pref.delete()
|
||||
|
||||
|
||||
class TestStrengthStraightSets(MovementEnforcementTestBase):
|
||||
"""Item #7: First working superset in strength = single main lift."""
|
||||
@@ -288,13 +355,19 @@ class TestStrengthStraightSets(MovementEnforcementTestBase):
|
||||
|
||||
# Should have multiple supersets
|
||||
if len(supersets) >= 2:
|
||||
# Check that the second superset's select_exercises call
|
||||
# requested count >= 2 (min_ex_per_ss)
|
||||
second_call = gen.exercise_selector.select_exercises.call_args_list[1]
|
||||
count_arg = second_call.kwargs.get('count')
|
||||
if count_arg is None and len(second_call.args) > 1:
|
||||
count_arg = second_call.args[1]
|
||||
self.assertGreaterEqual(count_arg, 2)
|
||||
# Retries may add extra calls; assert at least one non-first
|
||||
# working-superset request asks for 2+ exercises.
|
||||
observed_counts = []
|
||||
for call in gen.exercise_selector.select_exercises.call_args_list:
|
||||
count_arg = call.kwargs.get('count')
|
||||
if count_arg is None and len(call.args) > 1:
|
||||
count_arg = call.args[1]
|
||||
if count_arg is not None:
|
||||
observed_counts.append(count_arg)
|
||||
self.assertTrue(
|
||||
any(c >= 2 for c in observed_counts),
|
||||
f"Expected at least one accessory superset request >=2 exercises, got {observed_counts}",
|
||||
)
|
||||
|
||||
pref.delete()
|
||||
|
||||
@@ -330,6 +403,68 @@ class TestStrengthStraightSets(MovementEnforcementTestBase):
|
||||
|
||||
pref.delete()
|
||||
|
||||
def test_strength_first_superset_survives_post_processing(self):
|
||||
"""generate_single_workout should preserve first strength straight set."""
|
||||
pref = self._make_preference(primary_goal='strength')
|
||||
gen = self._make_generator(pref)
|
||||
|
||||
main_lift = self._create_mock_exercise('Back Squat', exercise_tier='primary')
|
||||
accessory_1 = self._create_mock_exercise('DB Row', exercise_tier='secondary')
|
||||
accessory_2 = self._create_mock_exercise('RDL', exercise_tier='secondary')
|
||||
accessory_3 = self._create_mock_exercise('Lat Pulldown', exercise_tier='accessory')
|
||||
|
||||
gen._build_warmup = MagicMock(return_value=None)
|
||||
gen._build_cooldown = MagicMock(return_value=None)
|
||||
gen._check_quality_gates = MagicMock(return_value=[])
|
||||
gen._get_final_conformance_violations = MagicMock(return_value=[])
|
||||
gen._adjust_to_time_target = MagicMock(side_effect=lambda spec, *_args, **_kwargs: spec)
|
||||
gen._build_working_supersets = MagicMock(return_value=[
|
||||
{
|
||||
'name': 'Working Set 1',
|
||||
'rounds': 5,
|
||||
'rest_between_rounds': 120,
|
||||
'modality': 'reps',
|
||||
'exercises': [
|
||||
{'exercise': main_lift, 'reps': 5, 'order': 1},
|
||||
],
|
||||
},
|
||||
{
|
||||
'name': 'Working Set 2',
|
||||
'rounds': 3,
|
||||
'rest_between_rounds': 90,
|
||||
'modality': 'reps',
|
||||
'exercises': [
|
||||
{'exercise': accessory_1, 'reps': 10, 'order': 1},
|
||||
{'exercise': accessory_2, 'reps': 10, 'order': 2},
|
||||
{'exercise': accessory_3, 'reps': 12, 'order': 3},
|
||||
],
|
||||
},
|
||||
])
|
||||
|
||||
muscle_split = {
|
||||
'muscles': ['quads', 'hamstrings'],
|
||||
'split_type': 'lower',
|
||||
'label': 'Lower',
|
||||
}
|
||||
workout_spec = gen.generate_single_workout(
|
||||
muscle_split=muscle_split,
|
||||
workout_type=self.strength_type,
|
||||
scheduled_date=date(2026, 3, 2),
|
||||
)
|
||||
|
||||
working = [
|
||||
ss for ss in workout_spec.get('supersets', [])
|
||||
if ss.get('name', '').startswith('Working')
|
||||
]
|
||||
self.assertGreaterEqual(len(working), 1)
|
||||
self.assertEqual(
|
||||
len(working[0].get('exercises', [])),
|
||||
1,
|
||||
f'Expected first strength working set to stay at 1 exercise, got: {working[0]}',
|
||||
)
|
||||
|
||||
pref.delete()
|
||||
|
||||
|
||||
class TestModalityConsistency(MovementEnforcementTestBase):
|
||||
"""Item #6: Modality consistency warning for duration-dominant workouts."""
|
||||
@@ -503,3 +638,357 @@ class TestModalityConsistency(MovementEnforcementTestBase):
|
||||
)
|
||||
|
||||
pref.delete()
|
||||
|
||||
|
||||
class TestFinalConformance(MovementEnforcementTestBase):
|
||||
"""Strict final conformance enforcement for assembled workouts."""
|
||||
|
||||
def test_core_workout_respects_type_max_exercise_cap(self):
|
||||
"""Core workouts should be trimmed to the calibrated max (8 working exercises)."""
|
||||
pref = self._make_preference(primary_goal='general_fitness')
|
||||
gen = self._make_generator(pref)
|
||||
|
||||
gen._build_warmup = MagicMock(return_value=None)
|
||||
gen._build_cooldown = MagicMock(return_value=None)
|
||||
gen._check_quality_gates = MagicMock(return_value=[])
|
||||
gen._get_final_conformance_violations = MagicMock(return_value=[])
|
||||
gen._adjust_to_time_target = MagicMock(side_effect=lambda spec, *_args, **_kwargs: spec)
|
||||
|
||||
working_exercises = [
|
||||
{'exercise': self._create_mock_exercise(f'Core Push {i}', movement_patterns='upper push, core'), 'reps': 12, 'order': i + 1}
|
||||
for i in range(6)
|
||||
]
|
||||
more_working_exercises = [
|
||||
{'exercise': self._create_mock_exercise(f'Core Pull {i}', movement_patterns='upper pull, core'), 'reps': 12, 'order': i + 1}
|
||||
for i in range(6)
|
||||
]
|
||||
|
||||
gen._build_working_supersets = MagicMock(return_value=[
|
||||
{
|
||||
'name': 'Working Set 1',
|
||||
'rounds': 3,
|
||||
'rest_between_rounds': 30,
|
||||
'modality': 'reps',
|
||||
'exercises': working_exercises,
|
||||
},
|
||||
{
|
||||
'name': 'Working Set 2',
|
||||
'rounds': 3,
|
||||
'rest_between_rounds': 30,
|
||||
'modality': 'reps',
|
||||
'exercises': more_working_exercises,
|
||||
},
|
||||
])
|
||||
|
||||
workout_spec = gen.generate_single_workout(
|
||||
muscle_split={
|
||||
'muscles': ['core', 'abs', 'obliques'],
|
||||
'split_type': 'core',
|
||||
'label': 'Core Day',
|
||||
},
|
||||
workout_type=self.core_type,
|
||||
scheduled_date=date(2026, 3, 2),
|
||||
)
|
||||
|
||||
working = [
|
||||
ss for ss in workout_spec.get('supersets', [])
|
||||
if ss.get('name', '').startswith('Working')
|
||||
]
|
||||
total_working = sum(len(ss.get('exercises', [])) for ss in working)
|
||||
self.assertLessEqual(
|
||||
total_working, 8,
|
||||
f'Expected core workout to cap at 8 working exercises, got {total_working}',
|
||||
)
|
||||
|
||||
pref.delete()
|
||||
|
||||
def test_core_cap_removes_extra_minimum_supersets(self):
|
||||
"""When all sets are already at minimum size, remove trailing sets to hit cap."""
|
||||
pref = self._make_preference(primary_goal='general_fitness')
|
||||
gen = self._make_generator(pref)
|
||||
|
||||
gen._build_warmup = MagicMock(return_value=None)
|
||||
gen._build_cooldown = MagicMock(return_value=None)
|
||||
gen._check_quality_gates = MagicMock(return_value=[])
|
||||
gen._get_final_conformance_violations = MagicMock(return_value=[])
|
||||
gen._adjust_to_time_target = MagicMock(side_effect=lambda spec, *_args, **_kwargs: spec)
|
||||
|
||||
working_supersets = []
|
||||
for idx in range(6):
|
||||
push = self._create_mock_exercise(
|
||||
f'Push {idx}',
|
||||
movement_patterns='upper push',
|
||||
)
|
||||
pull = self._create_mock_exercise(
|
||||
f'Pull {idx}',
|
||||
movement_patterns='upper pull',
|
||||
)
|
||||
working_supersets.append({
|
||||
'name': f'Working Set {idx + 1}',
|
||||
'rounds': 3,
|
||||
'rest_between_rounds': 30,
|
||||
'modality': 'reps',
|
||||
'exercises': [
|
||||
{'exercise': push, 'reps': 12, 'order': 1},
|
||||
{'exercise': pull, 'reps': 12, 'order': 2},
|
||||
],
|
||||
})
|
||||
|
||||
gen._build_working_supersets = MagicMock(return_value=working_supersets)
|
||||
|
||||
workout_spec = gen.generate_single_workout(
|
||||
muscle_split={
|
||||
'muscles': ['core', 'abs', 'obliques'],
|
||||
'split_type': 'core',
|
||||
'label': 'Core Day',
|
||||
},
|
||||
workout_type=self.core_type,
|
||||
scheduled_date=date(2026, 3, 2),
|
||||
)
|
||||
|
||||
working = [
|
||||
ss for ss in workout_spec.get('supersets', [])
|
||||
if ss.get('name', '').startswith('Working')
|
||||
]
|
||||
total_working = sum(len(ss.get('exercises', [])) for ss in working)
|
||||
self.assertLessEqual(total_working, 8)
|
||||
self.assertLessEqual(len(working), 4)
|
||||
|
||||
pref.delete()
|
||||
|
||||
def test_pad_to_fill_respects_type_cap(self):
|
||||
"""Padding should stop when workout-type max working-exercise cap is reached."""
|
||||
pref = self._make_preference(primary_goal='general_fitness')
|
||||
gen = self._make_generator(pref)
|
||||
|
||||
gen._estimate_total_time = MagicMock(return_value=0)
|
||||
gen.exercise_selector.select_exercises.return_value = [
|
||||
self._create_mock_exercise('Pad Exercise', movement_patterns='upper pull')
|
||||
]
|
||||
|
||||
base_ex_a = self._create_mock_exercise('Base A', movement_patterns='upper push')
|
||||
base_ex_b = self._create_mock_exercise('Base B', movement_patterns='upper pull')
|
||||
workout_spec = {
|
||||
'supersets': [
|
||||
{
|
||||
'name': 'Working Set 1',
|
||||
'rounds': 3,
|
||||
'rest_between_rounds': 30,
|
||||
'modality': 'reps',
|
||||
'exercises': [
|
||||
{'exercise': base_ex_a, 'reps': 12, 'order': 1},
|
||||
{'exercise': base_ex_b, 'reps': 12, 'order': 2},
|
||||
{'exercise': base_ex_a, 'reps': 12, 'order': 3},
|
||||
],
|
||||
},
|
||||
{
|
||||
'name': 'Working Set 2',
|
||||
'rounds': 3,
|
||||
'rest_between_rounds': 30,
|
||||
'modality': 'reps',
|
||||
'exercises': [
|
||||
{'exercise': base_ex_b, 'reps': 12, 'order': 1},
|
||||
{'exercise': base_ex_a, 'reps': 12, 'order': 2},
|
||||
{'exercise': base_ex_b, 'reps': 12, 'order': 3},
|
||||
],
|
||||
},
|
||||
],
|
||||
}
|
||||
wt_params = dict(WORKOUT_TYPE_DEFAULTS['core'])
|
||||
wt_params['duration_bias'] = 0.0
|
||||
|
||||
padded = gen._pad_to_fill(
|
||||
workout_spec=workout_spec,
|
||||
max_duration_sec=3600,
|
||||
muscle_split={
|
||||
'muscles': ['core', 'abs'],
|
||||
'split_type': 'core',
|
||||
'label': 'Core Day',
|
||||
},
|
||||
wt_params=wt_params,
|
||||
workout_type=self.core_type,
|
||||
)
|
||||
|
||||
total_working = sum(
|
||||
len(ss.get('exercises', []))
|
||||
for ss in padded.get('supersets', [])
|
||||
if ss.get('name', '').startswith('Working')
|
||||
)
|
||||
self.assertLessEqual(total_working, 8)
|
||||
|
||||
pref.delete()
|
||||
|
||||
def test_compound_ordering_uses_validator_definition(self):
|
||||
"""Accessory-tagged entries should not be treated as compounds in ordering."""
|
||||
pref = self._make_preference(primary_goal='general_fitness')
|
||||
gen = self._make_generator(pref)
|
||||
|
||||
accessory_flagged_compound = self._create_mock_exercise(
|
||||
'Accessory Marked Compound',
|
||||
is_compound=True,
|
||||
exercise_tier='accessory',
|
||||
movement_patterns='upper push',
|
||||
)
|
||||
true_compound = self._create_mock_exercise(
|
||||
'Primary Compound',
|
||||
is_compound=True,
|
||||
exercise_tier='secondary',
|
||||
movement_patterns='upper pull',
|
||||
)
|
||||
workout_spec = {
|
||||
'supersets': [
|
||||
{
|
||||
'name': 'Working Set 1',
|
||||
'rounds': 3,
|
||||
'rest_between_rounds': 45,
|
||||
'modality': 'reps',
|
||||
'exercises': [
|
||||
{'exercise': accessory_flagged_compound, 'reps': 10, 'order': 1},
|
||||
{'exercise': true_compound, 'reps': 8, 'order': 2},
|
||||
],
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
gen._enforce_compound_first_order(workout_spec, is_strength_workout=False)
|
||||
violations = validate_workout(workout_spec, 'hiit', 'general_fitness')
|
||||
compound_order_violations = [
|
||||
v for v in violations
|
||||
if v.rule_id == 'compound_before_isolation'
|
||||
]
|
||||
self.assertEqual(len(compound_order_violations), 0)
|
||||
|
||||
pref.delete()
|
||||
|
||||
def test_final_warning_triggers_regeneration(self):
|
||||
"""A final warning should trigger full regeneration before returning."""
|
||||
pref = self._make_preference()
|
||||
gen = self._make_generator(pref)
|
||||
|
||||
gen._build_warmup = MagicMock(return_value=None)
|
||||
gen._build_cooldown = MagicMock(return_value=None)
|
||||
gen._check_quality_gates = MagicMock(return_value=[])
|
||||
gen._adjust_to_time_target = MagicMock(side_effect=lambda spec, *_args, **_kwargs: spec)
|
||||
|
||||
ex = self._create_mock_exercise('Balanced Pull', movement_patterns='upper pull')
|
||||
gen._build_working_supersets = MagicMock(return_value=[
|
||||
{
|
||||
'name': 'Working Set 1',
|
||||
'rounds': 3,
|
||||
'rest_between_rounds': 45,
|
||||
'modality': 'reps',
|
||||
'exercises': [{'exercise': ex, 'reps': 10, 'order': 1}],
|
||||
},
|
||||
])
|
||||
|
||||
gen._get_final_conformance_violations = MagicMock(side_effect=[
|
||||
[RuleViolation(
|
||||
rule_id='exercise_count_cap',
|
||||
severity='warning',
|
||||
message='Too many exercises',
|
||||
)],
|
||||
[],
|
||||
])
|
||||
|
||||
gen.generate_single_workout(
|
||||
muscle_split={
|
||||
'muscles': ['upper back', 'lats'],
|
||||
'split_type': 'pull',
|
||||
'label': 'Pull Day',
|
||||
},
|
||||
workout_type=self.hiit_type,
|
||||
scheduled_date=date(2026, 3, 3),
|
||||
)
|
||||
|
||||
self.assertEqual(
|
||||
gen._build_working_supersets.call_count, 2,
|
||||
'Expected regeneration after final warning.',
|
||||
)
|
||||
pref.delete()
|
||||
|
||||
def test_unresolved_final_violations_raise_error(self):
|
||||
"""Generator should fail fast when conformance cannot be achieved."""
|
||||
pref = self._make_preference()
|
||||
gen = self._make_generator(pref)
|
||||
|
||||
gen._build_warmup = MagicMock(return_value=None)
|
||||
gen._build_cooldown = MagicMock(return_value=None)
|
||||
gen._check_quality_gates = MagicMock(return_value=[])
|
||||
gen._adjust_to_time_target = MagicMock(side_effect=lambda spec, *_args, **_kwargs: spec)
|
||||
|
||||
ex = self._create_mock_exercise('Push Only', movement_patterns='upper push')
|
||||
gen._build_working_supersets = MagicMock(return_value=[
|
||||
{
|
||||
'name': 'Working Set 1',
|
||||
'rounds': 3,
|
||||
'rest_between_rounds': 45,
|
||||
'modality': 'reps',
|
||||
'exercises': [{'exercise': ex, 'reps': 10, 'order': 1}],
|
||||
},
|
||||
])
|
||||
gen._get_final_conformance_violations = MagicMock(return_value=[
|
||||
RuleViolation(
|
||||
rule_id='push_pull_ratio',
|
||||
severity='warning',
|
||||
message='Pull:push ratio too low',
|
||||
),
|
||||
])
|
||||
|
||||
with self.assertRaises(ValueError):
|
||||
gen.generate_single_workout(
|
||||
muscle_split={
|
||||
'muscles': ['chest', 'triceps'],
|
||||
'split_type': 'push',
|
||||
'label': 'Push Day',
|
||||
},
|
||||
workout_type=self.hiit_type,
|
||||
scheduled_date=date(2026, 3, 4),
|
||||
)
|
||||
|
||||
self.assertEqual(
|
||||
gen._build_working_supersets.call_count,
|
||||
FINAL_CONFORMANCE_MAX_RETRIES + 1,
|
||||
)
|
||||
pref.delete()
|
||||
|
||||
def test_info_violation_is_not_blocking(self):
|
||||
"""Info-level rules should not fail generation in strict mode."""
|
||||
pref = self._make_preference()
|
||||
gen = self._make_generator(pref)
|
||||
|
||||
gen._build_warmup = MagicMock(return_value=None)
|
||||
gen._build_cooldown = MagicMock(return_value=None)
|
||||
gen._check_quality_gates = MagicMock(return_value=[])
|
||||
gen._adjust_to_time_target = MagicMock(side_effect=lambda spec, *_args, **_kwargs: spec)
|
||||
|
||||
ex = self._create_mock_exercise('Compound Lift', movement_patterns='upper pull')
|
||||
gen._build_working_supersets = MagicMock(return_value=[
|
||||
{
|
||||
'name': 'Working Set 1',
|
||||
'rounds': 3,
|
||||
'rest_between_rounds': 45,
|
||||
'modality': 'reps',
|
||||
'exercises': [{'exercise': ex, 'reps': 8, 'order': 1}],
|
||||
},
|
||||
])
|
||||
gen._get_final_conformance_violations = MagicMock(return_value=[
|
||||
RuleViolation(
|
||||
rule_id='compound_before_isolation',
|
||||
severity='info',
|
||||
message='Compound exercises should generally appear before isolation.',
|
||||
),
|
||||
])
|
||||
|
||||
workout = gen.generate_single_workout(
|
||||
muscle_split={
|
||||
'muscles': ['upper back'],
|
||||
'split_type': 'pull',
|
||||
'label': 'Pull Day',
|
||||
},
|
||||
workout_type=self.strength_type,
|
||||
scheduled_date=date(2026, 3, 5),
|
||||
)
|
||||
|
||||
self.assertIsInstance(workout, dict)
|
||||
self.assertEqual(gen._build_working_supersets.call_count, 1)
|
||||
pref.delete()
|
||||
|
||||
@@ -73,7 +73,7 @@ class TestWorkoutTypeRulesCoverage(TestCase):
|
||||
expected_types = [
|
||||
'traditional_strength_training',
|
||||
'hypertrophy',
|
||||
'hiit',
|
||||
'high_intensity_interval_training',
|
||||
'functional_strength_training',
|
||||
'cross_training',
|
||||
'core_training',
|
||||
@@ -116,14 +116,14 @@ class TestDBCalibrationCoverage(TestCase):
|
||||
|
||||
def test_all_8_types_in_calibration(self):
|
||||
expected_names = [
|
||||
'Functional Strength Training',
|
||||
'Traditional Strength Training',
|
||||
'HIIT',
|
||||
'Cross Training',
|
||||
'Core Training',
|
||||
'Flexibility',
|
||||
'Cardio',
|
||||
'Hypertrophy',
|
||||
'functional_strength_training',
|
||||
'traditional_strength_training',
|
||||
'high_intensity_interval_training',
|
||||
'cross_training',
|
||||
'core_training',
|
||||
'flexibility',
|
||||
'cardio',
|
||||
'hypertrophy',
|
||||
]
|
||||
for name in expected_names:
|
||||
self.assertIn(name, DB_CALIBRATION, f"Missing {name} in DB_CALIBRATION")
|
||||
@@ -137,7 +137,11 @@ class TestHelperFunctions(TestCase):
|
||||
_normalize_type_key('Traditional Strength Training'),
|
||||
'traditional_strength_training',
|
||||
)
|
||||
self.assertEqual(_normalize_type_key('HIIT'), 'hiit')
|
||||
self.assertEqual(_normalize_type_key('HIIT'), 'high_intensity_interval_training')
|
||||
self.assertEqual(
|
||||
_normalize_type_key('high intensity interval training'),
|
||||
'high_intensity_interval_training',
|
||||
)
|
||||
self.assertEqual(_normalize_type_key('cardio'), 'cardio')
|
||||
|
||||
def test_classify_rep_weight(self):
|
||||
@@ -500,6 +504,86 @@ class TestValidateWorkout(TestCase):
|
||||
"Expected superset size warning for 8-exercise superset in strength",
|
||||
)
|
||||
|
||||
def test_superset_focus_repetition_error(self):
|
||||
"""Two curl-family exercises in one superset should produce an error."""
|
||||
curl_a = _make_exercise(
|
||||
name='Alternating Bicep Curls',
|
||||
movement_patterns='upper pull',
|
||||
is_compound=False,
|
||||
exercise_tier='accessory',
|
||||
)
|
||||
curl_b = _make_exercise(
|
||||
name='Bicep Curls',
|
||||
movement_patterns='upper pull',
|
||||
is_compound=False,
|
||||
exercise_tier='accessory',
|
||||
)
|
||||
workout_spec = {
|
||||
'supersets': [
|
||||
_make_superset(name='Warm Up', exercises=[
|
||||
_make_entry(exercise=_make_exercise(is_reps=False), duration=30),
|
||||
], rounds=1),
|
||||
_make_superset(
|
||||
name='Working Set 1',
|
||||
exercises=[
|
||||
_make_entry(exercise=curl_a, reps=10, order=1),
|
||||
_make_entry(exercise=curl_b, reps=10, order=2),
|
||||
],
|
||||
rounds=3,
|
||||
),
|
||||
_make_superset(name='Cool Down', exercises=[
|
||||
_make_entry(exercise=_make_exercise(is_reps=False), duration=30),
|
||||
], rounds=1),
|
||||
],
|
||||
}
|
||||
violations = validate_workout(
|
||||
workout_spec, 'functional_strength_training', 'general_fitness',
|
||||
)
|
||||
repetition_errors = [
|
||||
v for v in violations
|
||||
if v.rule_id == 'superset_focus_repetition' and v.severity == 'error'
|
||||
]
|
||||
self.assertTrue(
|
||||
repetition_errors,
|
||||
f"Expected superset focus repetition error, got {[v.rule_id for v in violations]}",
|
||||
)
|
||||
|
||||
def test_adjacent_focus_repetition_info(self):
|
||||
"""Adjacent working supersets with same focus profile should be advisory."""
|
||||
pull_a = _make_exercise(name='Bicep Curl', movement_patterns='upper pull')
|
||||
pull_b = _make_exercise(name='Hammer Curl', movement_patterns='upper pull')
|
||||
workout_spec = {
|
||||
'supersets': [
|
||||
_make_superset(name='Warm Up', exercises=[
|
||||
_make_entry(exercise=_make_exercise(is_reps=False), duration=30),
|
||||
], rounds=1),
|
||||
_make_superset(
|
||||
name='Working Set 1',
|
||||
exercises=[_make_entry(exercise=pull_a, reps=10, order=1)],
|
||||
rounds=3,
|
||||
),
|
||||
_make_superset(
|
||||
name='Working Set 2',
|
||||
exercises=[_make_entry(exercise=pull_b, reps=10, order=1)],
|
||||
rounds=3,
|
||||
),
|
||||
_make_superset(name='Cool Down', exercises=[
|
||||
_make_entry(exercise=_make_exercise(is_reps=False), duration=30),
|
||||
], rounds=1),
|
||||
],
|
||||
}
|
||||
violations = validate_workout(
|
||||
workout_spec, 'functional_strength_training', 'general_fitness',
|
||||
)
|
||||
adjacent_infos = [
|
||||
v for v in violations
|
||||
if v.rule_id == 'adjacent_superset_focus_repetition' and v.severity == 'info'
|
||||
]
|
||||
self.assertTrue(
|
||||
adjacent_infos,
|
||||
"Expected adjacent superset focus repetition advisory info.",
|
||||
)
|
||||
|
||||
def test_compound_before_isolation_info(self):
|
||||
"""Isolation before compound should produce info violation."""
|
||||
isolation = _make_exercise(
|
||||
|
||||
@@ -210,3 +210,42 @@ class TestWeeklySplit(TestCase):
|
||||
|
||||
bad_pattern.delete()
|
||||
pref.delete()
|
||||
|
||||
@patch('generator.services.workout_generator.random.random', return_value=0.0)
|
||||
def test_diversifies_repetitive_four_day_pattern(self, _mock_random):
|
||||
"""
|
||||
A 4-day DB pattern with 3 lower-body days should be diversified so
|
||||
split_type repetition does not dominate the week.
|
||||
"""
|
||||
lower_a = MuscleGroupSplit.objects.create(
|
||||
muscle_names=['glutes', 'hamstrings', 'core'],
|
||||
label='Lower A',
|
||||
split_type='lower',
|
||||
frequency=9,
|
||||
)
|
||||
lower_b = MuscleGroupSplit.objects.create(
|
||||
muscle_names=['quads', 'glutes', 'calves'],
|
||||
label='Lower B',
|
||||
split_type='lower',
|
||||
frequency=9,
|
||||
)
|
||||
WeeklySplitPattern.objects.create(
|
||||
days_per_week=4,
|
||||
pattern=[self.lower.pk, lower_a.pk, lower_b.pk, self.full_body.pk],
|
||||
pattern_labels=['Lower', 'Lower A', 'Lower B', 'Full Body'],
|
||||
frequency=50,
|
||||
)
|
||||
|
||||
pref = self._make_preference(days_per_week=4)
|
||||
gen = self._make_generator(pref)
|
||||
|
||||
splits, _ = gen._pick_weekly_split()
|
||||
self.assertEqual(len(splits), 4)
|
||||
|
||||
split_type_counts = Counter(s['split_type'] for s in splits)
|
||||
self.assertLessEqual(
|
||||
split_type_counts.get('lower', 0), 2,
|
||||
f"Expected diversification to avoid 3+ lower days, got: {split_type_counts}",
|
||||
)
|
||||
|
||||
pref.delete()
|
||||
|
||||
430
generator/tests/test_workout_research_generation.py
Normal file
430
generator/tests/test_workout_research_generation.py
Normal file
@@ -0,0 +1,430 @@
|
||||
"""
|
||||
Integration tests for research-backed workout generation.
|
||||
|
||||
These tests validate generated workouts against the expectations encoded from
|
||||
workout_research.md in generator.rules_engine.
|
||||
"""
|
||||
|
||||
import random
|
||||
from contextlib import contextmanager
|
||||
from datetime import date, timedelta
|
||||
from itertools import combinations
|
||||
|
||||
from django.contrib.auth import get_user_model
|
||||
from django.core.management import call_command
|
||||
from django.test import TestCase
|
||||
|
||||
from equipment.models import Equipment
|
||||
from equipment.models import WorkoutEquipment
|
||||
from exercise.models import Exercise
|
||||
from generator.models import UserPreference, WorkoutType
|
||||
from generator.rules_engine import DB_CALIBRATION, validate_workout
|
||||
from generator.services.workout_generator import WorkoutGenerator
|
||||
from muscle.models import ExerciseMuscle, Muscle
|
||||
from registered_user.models import RegisteredUser
|
||||
|
||||
|
||||
@contextmanager
|
||||
def seeded_random(seed):
|
||||
"""Use a deterministic random seed without leaking global random state."""
|
||||
state = random.getstate()
|
||||
random.seed(seed)
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
random.setstate(state)
|
||||
|
||||
|
||||
class TestWorkoutResearchGeneration(TestCase):
|
||||
"""
|
||||
TDD coverage for end-to-end generated workout quality:
|
||||
1) One workout per workout type
|
||||
2) Workouts for deterministic random workout-type pairs
|
||||
"""
|
||||
|
||||
MUSCLE_NAMES = [
|
||||
'chest',
|
||||
'upper back',
|
||||
'lats',
|
||||
'deltoids',
|
||||
'quads',
|
||||
'hamstrings',
|
||||
'glutes',
|
||||
'core',
|
||||
'biceps',
|
||||
'triceps',
|
||||
'calves',
|
||||
'forearms',
|
||||
'abs',
|
||||
'obliques',
|
||||
]
|
||||
|
||||
SPLITS_BY_TYPE = {
|
||||
'traditional_strength_training': {
|
||||
'label': 'Strength Day',
|
||||
'muscles': ['quads', 'hamstrings', 'glutes', 'core'],
|
||||
'split_type': 'lower',
|
||||
},
|
||||
'hypertrophy': {
|
||||
'label': 'Hypertrophy Day',
|
||||
'muscles': ['chest', 'upper back', 'deltoids', 'biceps', 'triceps'],
|
||||
'split_type': 'upper',
|
||||
},
|
||||
'high_intensity_interval_training': {
|
||||
'label': 'HIIT Day',
|
||||
'muscles': ['chest', 'upper back', 'quads', 'core'],
|
||||
'split_type': 'full_body',
|
||||
},
|
||||
'functional_strength_training': {
|
||||
'label': 'Functional Day',
|
||||
'muscles': ['chest', 'upper back', 'quads', 'hamstrings', 'core'],
|
||||
'split_type': 'full_body',
|
||||
},
|
||||
'cross_training': {
|
||||
'label': 'Cross Day',
|
||||
'muscles': ['chest', 'upper back', 'quads', 'core'],
|
||||
'split_type': 'full_body',
|
||||
},
|
||||
'core_training': {
|
||||
'label': 'Core Day',
|
||||
'muscles': ['abs', 'obliques', 'core'],
|
||||
'split_type': 'core',
|
||||
},
|
||||
'flexibility': {
|
||||
'label': 'Mobility Day',
|
||||
'muscles': ['hamstrings', 'glutes', 'core'],
|
||||
'split_type': 'full_body',
|
||||
},
|
||||
'cardio': {
|
||||
'label': 'Cardio Day',
|
||||
'muscles': ['quads', 'calves', 'core'],
|
||||
'split_type': 'cardio',
|
||||
},
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def setUpTestData(cls):
|
||||
User = get_user_model()
|
||||
auth_user = User.objects.create_user(
|
||||
username='research_gen',
|
||||
password='testpass123',
|
||||
)
|
||||
cls.registered_user = RegisteredUser.objects.create(
|
||||
first_name='Research',
|
||||
last_name='Generator',
|
||||
user=auth_user,
|
||||
)
|
||||
|
||||
# Keep equipment filtering permissive without triggering "no equipment" fallback warnings.
|
||||
cls.bodyweight = Equipment.objects.create(
|
||||
name='Bodyweight',
|
||||
category='none',
|
||||
is_weight=False,
|
||||
)
|
||||
|
||||
cls.preference = UserPreference.objects.create(
|
||||
registered_user=cls.registered_user,
|
||||
days_per_week=5,
|
||||
fitness_level=2,
|
||||
primary_goal='general_fitness',
|
||||
secondary_goal='',
|
||||
preferred_workout_duration=90,
|
||||
)
|
||||
cls.preference.available_equipment.add(cls.bodyweight)
|
||||
|
||||
cls.muscles = {}
|
||||
for name in cls.MUSCLE_NAMES:
|
||||
cls.muscles[name] = Muscle.objects.create(name=name)
|
||||
|
||||
cls.workout_types = {}
|
||||
for wt_name, fields in DB_CALIBRATION.items():
|
||||
wt, _ = WorkoutType.objects.get_or_create(
|
||||
name=wt_name,
|
||||
defaults={
|
||||
'display_name': wt_name.replace('_', ' ').title(),
|
||||
'description': f'Calibrated {wt_name}',
|
||||
**fields,
|
||||
},
|
||||
)
|
||||
# Keep DB values aligned with calibration regardless of fixtures/migrations.
|
||||
update_fields = []
|
||||
for field_name, field_value in fields.items():
|
||||
if getattr(wt, field_name) != field_value:
|
||||
setattr(wt, field_name, field_value)
|
||||
update_fields.append(field_name)
|
||||
if update_fields:
|
||||
wt.save(update_fields=update_fields)
|
||||
cls.workout_types[wt_name] = wt
|
||||
cls.preference.preferred_workout_types.add(wt)
|
||||
|
||||
# Populate all workout-structure expectations for all goals/sections.
|
||||
call_command('calibrate_structure_rules')
|
||||
|
||||
cls._seed_exercise_pool()
|
||||
|
||||
@classmethod
|
||||
def _create_exercise(
|
||||
cls,
|
||||
name,
|
||||
movement_patterns,
|
||||
*,
|
||||
is_weight,
|
||||
is_duration,
|
||||
is_reps,
|
||||
is_compound,
|
||||
exercise_tier='secondary',
|
||||
hr_elevation_rating=6,
|
||||
complexity_rating=3,
|
||||
difficulty_level='intermediate',
|
||||
stretch_position='mid',
|
||||
):
|
||||
ex = Exercise.objects.create(
|
||||
name=name,
|
||||
movement_patterns=movement_patterns,
|
||||
muscle_groups=', '.join(cls.MUSCLE_NAMES),
|
||||
is_weight=is_weight,
|
||||
is_duration=is_duration,
|
||||
is_reps=is_reps,
|
||||
is_compound=is_compound,
|
||||
exercise_tier=exercise_tier,
|
||||
hr_elevation_rating=hr_elevation_rating,
|
||||
complexity_rating=complexity_rating,
|
||||
difficulty_level=difficulty_level,
|
||||
stretch_position=stretch_position,
|
||||
estimated_rep_duration=3.0,
|
||||
)
|
||||
# Attach broad muscle mappings so split filtering has high coverage.
|
||||
for muscle in cls.muscles.values():
|
||||
ExerciseMuscle.objects.create(exercise=ex, muscle=muscle)
|
||||
return ex
|
||||
|
||||
@classmethod
|
||||
def _seed_exercise_pool(cls):
|
||||
working_patterns = [
|
||||
'lower push - squat, lower push, upper push, upper pull, core',
|
||||
'lower pull - hip hinge, lower pull, upper push, upper pull, core',
|
||||
'upper push - horizontal, upper push, upper pull, core',
|
||||
'upper pull - horizontal, upper pull, upper push, core',
|
||||
'upper push - vertical, upper push, upper pull, core',
|
||||
'upper pull - vertical, upper pull, upper push, core',
|
||||
'carry, core, lower push, upper pull',
|
||||
'cardio/locomotion, upper push, upper pull, core',
|
||||
'plyometric, lower push, upper pull, upper push, core',
|
||||
'arms, upper push, upper pull, core',
|
||||
]
|
||||
|
||||
duration_patterns = [
|
||||
'cardio/locomotion, upper push, upper pull, core',
|
||||
'plyometric, upper push, upper pull, lower push, core',
|
||||
'core - anti-extension, cardio/locomotion, upper push, upper pull',
|
||||
'core - anti-rotation, cardio/locomotion, upper push, upper pull',
|
||||
'core - anti-lateral flexion, cardio/locomotion, upper push, upper pull',
|
||||
]
|
||||
|
||||
for idx in range(60):
|
||||
cls._create_exercise(
|
||||
name=f'Engine Move {idx + 1:02d}',
|
||||
movement_patterns=working_patterns[idx % len(working_patterns)],
|
||||
is_weight=True,
|
||||
is_duration=False,
|
||||
is_reps=True,
|
||||
is_compound=True,
|
||||
exercise_tier='secondary',
|
||||
hr_elevation_rating=6,
|
||||
)
|
||||
|
||||
for idx in range(40):
|
||||
cls._create_exercise(
|
||||
name=f'Interval Move {idx + 1:02d}',
|
||||
movement_patterns=duration_patterns[idx % len(duration_patterns)],
|
||||
is_weight=False,
|
||||
is_duration=True,
|
||||
is_reps=True,
|
||||
is_compound=True,
|
||||
exercise_tier='secondary',
|
||||
hr_elevation_rating=8,
|
||||
)
|
||||
|
||||
for idx in range(14):
|
||||
cls._create_exercise(
|
||||
name=f'Warmup Flow {idx + 1:02d}',
|
||||
movement_patterns='dynamic stretch, activation, mobility, warm up',
|
||||
is_weight=False,
|
||||
is_duration=True,
|
||||
is_reps=False,
|
||||
is_compound=False,
|
||||
exercise_tier='accessory',
|
||||
hr_elevation_rating=3,
|
||||
complexity_rating=2,
|
||||
stretch_position='lengthened',
|
||||
)
|
||||
|
||||
for idx in range(14):
|
||||
cls._create_exercise(
|
||||
name=f'Cooldown Stretch {idx + 1:02d}',
|
||||
movement_patterns='static stretch, mobility, yoga, cool down',
|
||||
is_weight=False,
|
||||
is_duration=True,
|
||||
is_reps=False,
|
||||
is_compound=False,
|
||||
exercise_tier='accessory',
|
||||
hr_elevation_rating=2,
|
||||
complexity_rating=2,
|
||||
stretch_position='lengthened',
|
||||
)
|
||||
|
||||
def _set_goal(self, goal):
|
||||
self.preference.primary_goal = goal
|
||||
self.preference.secondary_goal = ''
|
||||
self.preference.save(update_fields=['primary_goal', 'secondary_goal'])
|
||||
|
||||
def _generate_workout_for_type(self, wt_name, *, seed, goal='general_fitness', day_offset=0):
|
||||
self._set_goal(goal)
|
||||
generator = WorkoutGenerator(self.preference, duration_override=90)
|
||||
split = dict(self.SPLITS_BY_TYPE[wt_name])
|
||||
with seeded_random(seed):
|
||||
workout = generator.generate_single_workout(
|
||||
muscle_split=split,
|
||||
workout_type=self.workout_types[wt_name],
|
||||
scheduled_date=date(2026, 3, 2) + timedelta(days=day_offset),
|
||||
)
|
||||
return workout, list(generator.warnings)
|
||||
|
||||
def _assert_research_alignment(self, workout_spec, wt_name, goal, context, generation_warnings=None):
|
||||
violations = validate_workout(workout_spec, wt_name, goal)
|
||||
blocking = [v for v in violations if v.severity in {'error', 'warning'}]
|
||||
|
||||
messages = [f'[{v.severity}] {v.rule_id}: {v.message}' for v in violations]
|
||||
self.assertEqual(
|
||||
len(blocking),
|
||||
0,
|
||||
(
|
||||
f'{context} failed strict research validation for {wt_name}/{goal}. '
|
||||
f'Violations: {messages}'
|
||||
),
|
||||
)
|
||||
|
||||
working = [
|
||||
ss for ss in workout_spec.get('supersets', [])
|
||||
if ss.get('name', '').startswith('Working')
|
||||
]
|
||||
self.assertGreaterEqual(
|
||||
len(working), 1,
|
||||
f'{context} should have at least one working superset.',
|
||||
)
|
||||
|
||||
if generation_warnings is not None:
|
||||
self.assertEqual(
|
||||
generation_warnings,
|
||||
[],
|
||||
f'{context} emitted generation warnings: {generation_warnings}',
|
||||
)
|
||||
|
||||
def test_generate_one_workout_for_each_type_matches_research(self):
|
||||
"""
|
||||
Generate one workout per workout type and ensure each passes
|
||||
research-backed rules validation.
|
||||
"""
|
||||
for idx, wt_name in enumerate(DB_CALIBRATION.keys(), start=1):
|
||||
workout, generation_warnings = self._generate_workout_for_type(
|
||||
wt_name,
|
||||
seed=7000 + idx,
|
||||
goal='general_fitness',
|
||||
day_offset=idx,
|
||||
)
|
||||
self._assert_research_alignment(
|
||||
workout,
|
||||
wt_name,
|
||||
'general_fitness',
|
||||
context='single-type generation',
|
||||
generation_warnings=generation_warnings,
|
||||
)
|
||||
|
||||
def test_generate_deterministic_random_workout_type_pairs(self):
|
||||
"""
|
||||
Generate workouts for deterministic random pairs of workout types.
|
||||
Each workout in every pair must satisfy research-backed rules.
|
||||
"""
|
||||
all_pairs = list(combinations(DB_CALIBRATION.keys(), 2))
|
||||
rng = random.Random(20260223)
|
||||
sampled_pairs = rng.sample(all_pairs, 8)
|
||||
|
||||
for pair_idx, (wt_a, wt_b) in enumerate(sampled_pairs):
|
||||
workout_a, warnings_a = self._generate_workout_for_type(
|
||||
wt_a,
|
||||
seed=8100 + pair_idx * 10,
|
||||
goal='general_fitness',
|
||||
day_offset=pair_idx * 2,
|
||||
)
|
||||
self._assert_research_alignment(
|
||||
workout_a,
|
||||
wt_a,
|
||||
'general_fitness',
|
||||
context=f'random-pair[{pair_idx}] first',
|
||||
generation_warnings=warnings_a,
|
||||
)
|
||||
|
||||
workout_b, warnings_b = self._generate_workout_for_type(
|
||||
wt_b,
|
||||
seed=8100 + pair_idx * 10 + 1,
|
||||
goal='general_fitness',
|
||||
day_offset=pair_idx * 2 + 1,
|
||||
)
|
||||
self._assert_research_alignment(
|
||||
workout_b,
|
||||
wt_b,
|
||||
'general_fitness',
|
||||
context=f'random-pair[{pair_idx}] second',
|
||||
generation_warnings=warnings_b,
|
||||
)
|
||||
|
||||
def test_generation_honors_exclusions_and_equipment_preferences(self):
|
||||
"""Generated workouts should not include excluded exercises or unavailable equipment."""
|
||||
wt_name = 'functional_strength_training'
|
||||
wt = self.workout_types[wt_name]
|
||||
|
||||
# Restrict user to only Bodyweight equipment and exclude one candidate exercise.
|
||||
self.preference.available_equipment.clear()
|
||||
self.preference.available_equipment.add(self.bodyweight)
|
||||
excluded = Exercise.objects.filter(name='Engine Move 01').first()
|
||||
self.assertIsNotNone(excluded)
|
||||
self.preference.excluded_exercises.add(excluded)
|
||||
|
||||
workout, generation_warnings = self._generate_workout_for_type(
|
||||
wt_name,
|
||||
seed=9401,
|
||||
goal='general_fitness',
|
||||
day_offset=10,
|
||||
)
|
||||
|
||||
all_exercises = []
|
||||
for ss in workout.get('supersets', []):
|
||||
for entry in ss.get('exercises', []):
|
||||
ex = entry.get('exercise')
|
||||
if ex is not None:
|
||||
all_exercises.append(ex)
|
||||
|
||||
self.assertTrue(all_exercises, 'Expected at least one exercise in generated workout.')
|
||||
self.assertNotIn(
|
||||
excluded.pk,
|
||||
{ex.pk for ex in all_exercises},
|
||||
'Excluded exercise was found in generated workout.',
|
||||
)
|
||||
|
||||
ex_ids = [ex.pk for ex in all_exercises]
|
||||
available_equipment_ids = {self.bodyweight.pk}
|
||||
requirements = {}
|
||||
for ex_id, eq_id in WorkoutEquipment.objects.filter(
|
||||
exercise_id__in=ex_ids,
|
||||
).values_list('exercise_id', 'equipment_id'):
|
||||
requirements.setdefault(ex_id, set()).add(eq_id)
|
||||
bad_equipment = [
|
||||
ex_id for ex_id, required_ids in requirements.items()
|
||||
if required_ids and not required_ids.issubset(available_equipment_ids)
|
||||
]
|
||||
self.assertEqual(
|
||||
bad_equipment,
|
||||
[],
|
||||
f'Found exercises requiring unavailable equipment: {bad_equipment}',
|
||||
)
|
||||
self.assertEqual(generation_warnings, [])
|
||||
@@ -938,6 +938,16 @@ def preview_day(request):
|
||||
|
||||
# Optional plan_id: exclude exercises from sibling workouts in the same plan (Item #9)
|
||||
plan_id = request.data.get('plan_id')
|
||||
if plan_id in ('', None):
|
||||
plan_id = None
|
||||
elif not isinstance(plan_id, int):
|
||||
try:
|
||||
plan_id = int(plan_id)
|
||||
except (TypeError, ValueError):
|
||||
return Response(
|
||||
{'error': 'plan_id must be an integer.'},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
try:
|
||||
from generator.services.workout_generator import WorkoutGenerator
|
||||
@@ -945,7 +955,7 @@ def preview_day(request):
|
||||
generator = WorkoutGenerator(preference)
|
||||
|
||||
# If plan_id is provided, exclude sibling workout exercises
|
||||
if plan_id:
|
||||
if plan_id is not None:
|
||||
try:
|
||||
plan = GeneratedWeeklyPlan.objects.get(
|
||||
pk=plan_id,
|
||||
@@ -974,6 +984,8 @@ def preview_day(request):
|
||||
workout_type=workout_type,
|
||||
scheduled_date=scheduled_date,
|
||||
)
|
||||
if plan_id is not None:
|
||||
day_preview['plan_id'] = plan_id
|
||||
except Exception as e:
|
||||
return Response(
|
||||
{'error': f'Day preview generation failed: {str(e)}'},
|
||||
|
||||
@@ -84,14 +84,14 @@ export function WorkoutTypesStep({
|
||||
isSelected ? "text-accent" : "text-zinc-100"
|
||||
}`}
|
||||
>
|
||||
{wt.name}
|
||||
{wt.display_name || wt.name}
|
||||
</span>
|
||||
<Badge variant={intensityVariant[wt.typical_intensity] || "default"}>
|
||||
{wt.typical_intensity}
|
||||
</Badge>
|
||||
</div>
|
||||
{wt.description && (
|
||||
<p className="text-sm text-zinc-400 line-clamp-2">
|
||||
<p className="text-sm text-zinc-400">
|
||||
{wt.description}
|
||||
</p>
|
||||
)}
|
||||
|
||||
@@ -63,7 +63,10 @@ function XIcon({ className = "" }: { className?: string }) {
|
||||
|
||||
function mediaUrl(path: string): string {
|
||||
if (typeof window === "undefined") return path;
|
||||
if (window.location.hostname === "localhost" || window.location.hostname === "127.0.0.1") {
|
||||
return `${window.location.protocol}//${window.location.hostname}:8001${path}`;
|
||||
}
|
||||
return path;
|
||||
}
|
||||
|
||||
function PlayIcon({ className = "" }: { className?: string }) {
|
||||
@@ -301,6 +304,7 @@ export function DayCard({
|
||||
focus_area: previewDay.focus_area,
|
||||
workout_type_id: previewDay.workout_type_id,
|
||||
date: previewDay.date,
|
||||
plan_id: previewDay.plan_id,
|
||||
});
|
||||
onPreviewDayChange(previewDayIndex, newDay);
|
||||
} catch (err) {
|
||||
@@ -421,6 +425,17 @@ export function DayCard({
|
||||
)}
|
||||
</div>
|
||||
|
||||
{previewDay.warnings && previewDay.warnings.length > 0 && (
|
||||
<div className="rounded-lg border border-yellow-500/30 bg-yellow-500/10 p-2 text-xs text-yellow-200">
|
||||
<p className="font-semibold mb-1">Warnings</p>
|
||||
<ul className="list-disc list-inside space-y-0.5">
|
||||
{previewDay.warnings.map((w, idx) => (
|
||||
<li key={idx}>{w}</li>
|
||||
))}
|
||||
</ul>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Supersets */}
|
||||
{spec && spec.supersets.length > 0 && (
|
||||
<div className="flex flex-col gap-2">
|
||||
|
||||
@@ -4,7 +4,10 @@ import type { SupersetExercise } from "@/lib/types";
|
||||
|
||||
function mediaUrl(path: string): string {
|
||||
if (typeof window === "undefined") return path;
|
||||
if (window.location.hostname === "localhost" || window.location.hostname === "127.0.0.1") {
|
||||
return `${window.location.protocol}//${window.location.hostname}:8001${path}`;
|
||||
}
|
||||
return path;
|
||||
}
|
||||
|
||||
interface ExerciseRowProps {
|
||||
|
||||
7
werkout-frontend/eslint.config.mjs
Normal file
7
werkout-frontend/eslint.config.mjs
Normal file
@@ -0,0 +1,7 @@
|
||||
import nextVitals from "eslint-config-next/core-web-vitals";
|
||||
|
||||
const config = [
|
||||
...nextVitals,
|
||||
];
|
||||
|
||||
export default config;
|
||||
@@ -1,6 +1,10 @@
|
||||
/** @type {import('next').NextConfig} */
|
||||
// v2
|
||||
const nextConfig = {
|
||||
skipTrailingSlashRedirect: true,
|
||||
experimental: {
|
||||
proxyTimeout: 120000, // 2 minutes for long-running workout generation
|
||||
},
|
||||
images: {
|
||||
remotePatterns: [
|
||||
{
|
||||
@@ -16,12 +20,24 @@ const nextConfig = {
|
||||
],
|
||||
},
|
||||
async rewrites() {
|
||||
return [
|
||||
{
|
||||
source: "/media/:path*",
|
||||
destination: "http://localhost:8000/media/:path*",
|
||||
},
|
||||
const djangoUrl = process.env.DJANGO_INTERNAL_URL || "http://localhost:8000";
|
||||
// Helper: for each Django prefix, create two rewrites:
|
||||
// 1. with trailing slash preserved
|
||||
// 2. without trailing slash → add it (Django requires trailing slashes)
|
||||
const djangoPrefixes = [
|
||||
"media", "registered_user", "exercise", "muscle",
|
||||
"equipment", "workout", "generator", "videos", "admin",
|
||||
];
|
||||
return djangoPrefixes.flatMap((prefix) => [
|
||||
{
|
||||
source: `/${prefix}/:path*/`,
|
||||
destination: `${djangoUrl}/${prefix}/:path*/`,
|
||||
},
|
||||
{
|
||||
source: `/${prefix}/:path*`,
|
||||
destination: `${djangoUrl}/${prefix}/:path*/`,
|
||||
},
|
||||
]);
|
||||
},
|
||||
};
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
"dev": "next dev",
|
||||
"build": "next build",
|
||||
"start": "next start",
|
||||
"lint": "next lint"
|
||||
"lint": "eslint ."
|
||||
},
|
||||
"keywords": [],
|
||||
"author": "",
|
||||
|
||||
@@ -156,7 +156,7 @@ if os.environ.get("DATABASE_URL"):
|
||||
# "APNS_USE_SANDBOX": False
|
||||
# }
|
||||
|
||||
CSRF_TRUSTED_ORIGINS = ['https://*.werkout.fitness']
|
||||
CSRF_TRUSTED_ORIGINS = ['https://*.werkout.fitness', 'https://*.treytartt.com']
|
||||
SECRET_KEY = os.environ.get("SECRET_KEY", 'secret')
|
||||
|
||||
# Parse the DATABASE_URL env var.
|
||||
|
||||
Reference in New Issue
Block a user