Compare commits
10 Commits
898160aa46
...
dev
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
3ffabf35e2 | ||
|
|
a1f458ea2b | ||
|
|
8e14fd5774 | ||
|
|
c80c66c2e5 | ||
|
|
63b57a83ab | ||
|
|
909c75d8ee | ||
|
|
03681c532d | ||
|
|
1c61b80731 | ||
|
|
2a16b75c4b | ||
|
|
fdebf7e499 |
13
.dockerignore
Normal file
13
.dockerignore
Normal file
@@ -0,0 +1,13 @@
|
||||
__pycache__
|
||||
*.pyc
|
||||
*.pyo
|
||||
.git
|
||||
.gitignore
|
||||
*.sqlite3
|
||||
werkout-frontend/node_modules
|
||||
werkout-frontend/.next
|
||||
media/
|
||||
AI/
|
||||
*.mp4
|
||||
*.m3u8
|
||||
media/**/*.ts
|
||||
25
.env.example
Normal file
25
.env.example
Normal file
@@ -0,0 +1,25 @@
|
||||
# Django core
|
||||
DEBUG=false
|
||||
SECRET_KEY=your-secret-key-here
|
||||
|
||||
# Allowed hosts (comma-separated)
|
||||
ALLOWED_HOSTS=yourdomain.com
|
||||
|
||||
# CORS (comma-separated origins)
|
||||
CORS_ALLOWED_ORIGINS=https://yourdomain.com
|
||||
|
||||
# Database (used when DATABASE_URL is not set)
|
||||
DB_NAME=werkout
|
||||
DB_USER=werkout
|
||||
DB_PASSWORD=your-db-password
|
||||
DB_HOST=db
|
||||
DB_PORT=5432
|
||||
|
||||
# Database URL (overrides individual DB_* vars when set)
|
||||
DATABASE_URL=postgres://user:password@db:5432/werkout
|
||||
|
||||
# Redis
|
||||
REDIS_URL=redis://redis:6379
|
||||
|
||||
# HTTPS redirect (set to false if behind a reverse proxy that handles SSL)
|
||||
SECURE_SSL_REDIRECT=true
|
||||
19
.gitignore
vendored
19
.gitignore
vendored
@@ -138,3 +138,22 @@ GitHub.sublime-settings
|
||||
!.vscode/extensions.json
|
||||
.history
|
||||
*.sqlite3
|
||||
|
||||
# Claude Code
|
||||
.claude/
|
||||
|
||||
# Media uploads
|
||||
media/
|
||||
|
||||
# Node / Next.js
|
||||
node_modules/
|
||||
.next/
|
||||
werkout-frontend/node_modules/
|
||||
werkout-frontend/.next/
|
||||
|
||||
# Supervisor
|
||||
supervisord.pid
|
||||
supervisord.log
|
||||
|
||||
# AI training data (contains PII)
|
||||
AI/
|
||||
|
||||
43
Dockerfile
43
Dockerfile
@@ -1,17 +1,50 @@
|
||||
# syntax=docker/dockerfile:1
|
||||
FROM python:3.9.13
|
||||
|
||||
# ---- Stage 1: Build Next.js frontend ----
|
||||
FROM node:20-slim AS frontend-build
|
||||
WORKDIR /frontend
|
||||
COPY werkout-frontend/package.json werkout-frontend/package-lock.json ./
|
||||
RUN npm ci
|
||||
COPY werkout-frontend/ ./
|
||||
ENV NEXT_PUBLIC_API_URL=
|
||||
RUN rm -rf .next && npm run build
|
||||
|
||||
# ---- Stage 2: Final image (Python + Node runtime) ----
|
||||
FROM python:3.12-slim
|
||||
ENV PYTHONDONTWRITEBYTECODE=1
|
||||
ENV PYTHONUNBUFFERED=1
|
||||
|
||||
RUN apt-get update
|
||||
# System deps
|
||||
RUN apt-get update && apt-get install -y \
|
||||
build-essential libpq-dev \
|
||||
swig libssl-dev dpkg-dev netcat-openbsd ffmpeg \
|
||||
supervisor curl \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
RUN apt-get install -y swig libssl-dev dpkg-dev netcat ffmpeg
|
||||
# Install Node.js 20 for Next.js runtime
|
||||
RUN curl -fsSL https://deb.nodesource.com/setup_20.x | bash - \
|
||||
&& apt-get install -y nodejs \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Python deps
|
||||
RUN pip install -U pip
|
||||
|
||||
WORKDIR /code
|
||||
COPY requirements.txt /code/
|
||||
RUN pip install -r requirements.txt
|
||||
|
||||
# Copy Django project
|
||||
COPY . /code/
|
||||
|
||||
RUN /code/manage.py collectstatic --noinput
|
||||
# Copy built frontend (overwrite source with built version)
|
||||
COPY --from=frontend-build /frontend/.next /code/werkout-frontend/.next
|
||||
COPY --from=frontend-build /frontend/node_modules /code/werkout-frontend/node_modules
|
||||
|
||||
# Collect static files
|
||||
RUN /code/manage.py collectstatic --noinput || true
|
||||
|
||||
# Supervisor config
|
||||
COPY supervisord.conf /etc/supervisor/conf.d/supervisord.conf
|
||||
|
||||
EXPOSE 8000 3000
|
||||
|
||||
CMD ["supervisord", "-c", "/etc/supervisor/conf.d/supervisord.conf"]
|
||||
|
||||
726
IMPLEMENTATION_PLAN.md
Normal file
726
IMPLEMENTATION_PLAN.md
Normal file
@@ -0,0 +1,726 @@
|
||||
# Werkout Generator - Complete Fix Plan
|
||||
|
||||
## Scope
|
||||
|
||||
Fix every issue found in the end-to-end audit: safety bugs, unused data, preference gaps, data consistency, calibration conflicts, and variety enforcement. 14 phases, ordered by dependency and impact.
|
||||
|
||||
---
|
||||
|
||||
## Phase 1: Consolidate WorkoutType Defaults (Single Source of Truth)
|
||||
|
||||
**Problem:** `calibrate_workout_types.py` and `DEFAULT_WORKOUT_TYPES` in `workout_analyzer.py` conflict. Running `analyze_workouts` after calibration overwrites calibrated values.
|
||||
|
||||
**Files:**
|
||||
- `generator/services/workout_analyzer.py` (~lines 58-195) - Update `DEFAULT_WORKOUT_TYPES` to use calibrated (research-backed) values directly
|
||||
- `generator/management/commands/calibrate_workout_types.py` - Delete this file (no longer needed)
|
||||
- `generator/management/commands/analyze_workouts.py` - Remove any reference to calibration
|
||||
|
||||
**Changes:**
|
||||
1. In `workout_analyzer.py`, update `DEFAULT_WORKOUT_TYPES` dict to merge in all calibrated values from `CALIBRATIONS` in `calibrate_workout_types.py`:
|
||||
- `functional_strength_training`: rep_min=6, rep_max=15, rest=90, intensity=high, duration_bias=0.15, superset_size_min=2, superset_size_max=4
|
||||
- `traditional_strength_training`: rep_min=3, rep_max=8, rest=150, intensity=high, duration_bias=0.0, round_min=4, round_max=6
|
||||
- `hypertrophy`: rep_min=6, rep_max=15, rest=90, intensity=high, duration_bias=0.1
|
||||
- `cross_training`: duration_bias=0.5
|
||||
- `core_training`: duration_bias=0.6
|
||||
- `flexibility`: duration_bias=1.0
|
||||
2. Populate `display_name` for all 8 types:
|
||||
- `functional_strength_training` -> "Functional Strength"
|
||||
- `traditional_strength_training` -> "Traditional Strength"
|
||||
- `high_intensity_interval_training` -> "HIIT"
|
||||
- `cross_training` -> "Cross Training"
|
||||
- `core_training` -> "Core Training"
|
||||
- `flexibility` -> "Flexibility"
|
||||
- `cardio` -> "Cardio"
|
||||
- `hypertrophy` -> "Hypertrophy"
|
||||
3. Change `get_or_create` in analyzer to use `update_or_create` so re-running always applies the latest defaults without needing a separate calibration step.
|
||||
4. Delete `calibrate_workout_types.py`.
|
||||
|
||||
**Test:** Run `python manage.py analyze_workouts --dry-run` and verify all 8 types show correct calibrated values and display_names.
|
||||
|
||||
---
|
||||
|
||||
## Phase 2: Fix Bodyweight Fallback Safety Gap
|
||||
|
||||
**Problem:** `_get_bodyweight_queryset()` (exercise_selector.py:400-446) skips fitness level and injury filters. A beginner with a knee injury could get advanced plyometric bodyweight exercises.
|
||||
|
||||
**Files:**
|
||||
- `generator/services/exercise_selector.py` - lines 400-446
|
||||
|
||||
**Changes:**
|
||||
1. Add `fitness_level` parameter to `_get_bodyweight_queryset()` signature (currently only takes `muscle_groups` and `is_duration_based`).
|
||||
2. After the muscle group filtering (line ~440), add the same fitness level filtering that exists in `_get_filtered_queryset`:
|
||||
```python
|
||||
# Fitness level safety (same as main queryset lines 367-376)
|
||||
if fitness_level is not None and fitness_level <= 1:
|
||||
for pattern in self.ADVANCED_PATTERNS:
|
||||
qs = qs.exclude(movement_patterns__icontains=pattern)
|
||||
qs = qs.exclude(difficulty_level='advanced')
|
||||
```
|
||||
3. Add injury filtering (same logic as lines 379-397):
|
||||
```python
|
||||
injuries = (self.user_preference.injuries_limitations or '').lower()
|
||||
if injuries:
|
||||
# Apply same keyword-based filtering as _get_filtered_queryset
|
||||
...
|
||||
```
|
||||
4. Update the call site at line ~172 to pass `fitness_level=self.user_preference.fitness_level`.
|
||||
|
||||
**Test:** Create a test with a beginner user who has knee injury + limited equipment. Verify bodyweight fallback never returns advanced or high-impact exercises.
|
||||
|
||||
---
|
||||
|
||||
## Phase 3: Use `hr_elevation_rating` for Warmup & Cooldown Quality
|
||||
|
||||
**Problem:** `hr_elevation_rating` (1-10) is populated on every exercise but never used. Warmup could select high-HR exercises, cooldown could select activation exercises.
|
||||
|
||||
**Files:**
|
||||
- `generator/services/exercise_selector.py` - `select_warmup_exercises()` (lines 193-230) and `select_cooldown_exercises()` (lines 231-279)
|
||||
|
||||
**Changes:**
|
||||
|
||||
### Warmup (lines 193-230):
|
||||
1. After the existing filtering and before `_weighted_pick` (line ~210), add HR-based preference:
|
||||
```python
|
||||
# Prefer moderate HR for warmup (gradual elevation)
|
||||
warmup_hr_preferred = preferred_qs.filter(hr_elevation_rating__gte=2, hr_elevation_rating__lte=5)
|
||||
warmup_hr_other = preferred_qs.filter(
|
||||
Q(hr_elevation_rating__isnull=True) | Q(hr_elevation_rating__lt=2) | Q(hr_elevation_rating__gt=5)
|
||||
)
|
||||
```
|
||||
2. Pass `warmup_hr_preferred` as the preferred queryset to `_weighted_pick`, with `warmup_hr_other` as other.
|
||||
3. In the fallback path (lines 214-224), also apply the HR preference.
|
||||
|
||||
### Cooldown (lines 231-279):
|
||||
1. After the R11 weight filtering (line ~252), add HR ceiling:
|
||||
```python
|
||||
# Hard filter: cooldown exercises should have low HR elevation
|
||||
qs = qs.filter(Q(hr_elevation_rating__lte=3) | Q(hr_elevation_rating__isnull=True))
|
||||
```
|
||||
2. Apply the same filter in the fallback path (lines 260-273).
|
||||
|
||||
**Test:** Generate workouts and verify warmup exercises have HR ratings 2-5, cooldown exercises have HR ratings <= 3.
|
||||
|
||||
---
|
||||
|
||||
## Phase 4: Use `complexity_rating` for Beginner Safety
|
||||
|
||||
**Problem:** `complexity_rating` (1-5) is populated but never used. Beginners can get complexity-5 exercises (Olympic variations) that passed the pattern filter.
|
||||
|
||||
**Files:**
|
||||
- `generator/services/exercise_selector.py` - `_get_filtered_queryset()` (lines 367-376)
|
||||
|
||||
**Changes:**
|
||||
1. After the existing fitness level filtering (line 376), add complexity cap:
|
||||
```python
|
||||
# Complexity cap by fitness level
|
||||
if fitness_level is not None:
|
||||
complexity_caps = {1: 3, 2: 4, 3: 5, 4: 5} # Beginner max 3, Intermediate max 4
|
||||
max_complexity = complexity_caps.get(fitness_level, 5)
|
||||
qs = qs.filter(
|
||||
Q(complexity_rating__lte=max_complexity) | Q(complexity_rating__isnull=True)
|
||||
)
|
||||
```
|
||||
2. Also add this to `_get_bodyweight_queryset()` (from Phase 2 changes).
|
||||
|
||||
**Test:** Generate workouts for a beginner. Verify no exercises with complexity_rating > 3 appear.
|
||||
|
||||
---
|
||||
|
||||
## Phase 5: Use `stretch_position` for Hypertrophy Workouts
|
||||
|
||||
**Problem:** `stretch_position` (lengthened/mid/shortened) is populated but unused. Hypertrophy workouts should balance stretch positions per muscle group for optimal stimulus.
|
||||
|
||||
**Files:**
|
||||
- `generator/services/exercise_selector.py` - `select_exercises()` (lines 55-192)
|
||||
- `generator/services/workout_generator.py` - `_build_working_supersets()` (lines 1176-1398)
|
||||
|
||||
**Changes:**
|
||||
|
||||
### In exercise_selector.py:
|
||||
1. Add a new method `_balance_stretch_positions(self, selected, muscle_groups)`:
|
||||
```python
|
||||
def _balance_stretch_positions(self, selected, muscle_groups, base_qs):
|
||||
"""For hypertrophy, ensure we don't have all exercises in the same stretch position for a muscle."""
|
||||
if len(selected) < 3:
|
||||
return selected
|
||||
|
||||
positions = [ex.stretch_position for ex in selected if ex.stretch_position]
|
||||
if not positions:
|
||||
return selected
|
||||
|
||||
# If >66% of exercises have the same stretch position, try to swap one
|
||||
from collections import Counter
|
||||
counts = Counter(positions)
|
||||
most_common_pos, most_common_count = counts.most_common(1)[0]
|
||||
if most_common_count / len(positions) <= 0.66:
|
||||
return selected
|
||||
|
||||
# Find underrepresented position
|
||||
all_positions = {'lengthened', 'mid', 'shortened'}
|
||||
missing = all_positions - set(positions)
|
||||
target_position = missing.pop() if missing else None
|
||||
if not target_position:
|
||||
return selected
|
||||
|
||||
# Try to swap last accessory with an exercise of the missing position
|
||||
replacement_qs = base_qs.filter(stretch_position=target_position).exclude(
|
||||
pk__in=self.used_exercise_ids
|
||||
)
|
||||
replacement = replacement_qs.first()
|
||||
if replacement:
|
||||
# Swap last non-primary exercise
|
||||
for i in range(len(selected) - 1, -1, -1):
|
||||
if selected[i].exercise_tier != 'primary':
|
||||
selected[i] = replacement
|
||||
break
|
||||
|
||||
return selected
|
||||
```
|
||||
|
||||
### In workout_generator.py:
|
||||
2. In `_build_working_supersets()`, after exercise selection (line ~1319), add:
|
||||
```python
|
||||
# Balance stretch positions for hypertrophy goals
|
||||
if self.preference.primary_goal == 'hypertrophy':
|
||||
exercises = self.exercise_selector._balance_stretch_positions(
|
||||
exercises, superset_muscles, base_qs
|
||||
)
|
||||
```
|
||||
|
||||
**Test:** Generate hypertrophy workouts. Verify exercises for the same muscle don't all share the same stretch position.
|
||||
|
||||
---
|
||||
|
||||
## Phase 6: Strengthen Recently-Used Exclusion
|
||||
|
||||
**Problem:** Recently used exercises are only down-weighted (3x to 1x in `_weighted_pick`), not excluded. Users can get the same exercises repeatedly.
|
||||
|
||||
**Files:**
|
||||
- `generator/services/exercise_selector.py` - `_weighted_pick()` (lines 447-496) and `_get_filtered_queryset()` (lines 284-399)
|
||||
- `generator/services/workout_generator.py` - lines 577-589
|
||||
|
||||
**Changes:**
|
||||
|
||||
### In exercise_selector.py:
|
||||
1. Add a `hard_exclude_ids` set to `__init__` (line 42-46):
|
||||
```python
|
||||
def __init__(self, user_preference, recently_used_ids=None, hard_exclude_ids=None):
|
||||
self.user_preference = user_preference
|
||||
self.used_exercise_ids = set()
|
||||
self.recently_used_ids = recently_used_ids or set()
|
||||
self.hard_exclude_ids = hard_exclude_ids or set() # Exercises from last 3 workouts
|
||||
```
|
||||
|
||||
2. In `_get_filtered_queryset()`, after excluding `used_exercise_ids` (line 304-305), add:
|
||||
```python
|
||||
# Hard exclude exercises from very recent workouts (last 3)
|
||||
if self.hard_exclude_ids:
|
||||
qs = qs.exclude(pk__in=self.hard_exclude_ids)
|
||||
```
|
||||
|
||||
3. Keep the existing soft penalty in `_weighted_pick` (lines 467-468) for exercises from workouts 4-7.
|
||||
|
||||
### In workout_generator.py:
|
||||
4. Split recently used into two tiers (around line 577-589):
|
||||
```python
|
||||
# Last 3 workouts: hard exclude
|
||||
very_recent_workout_ids = list(
|
||||
GeneratedWorkout.objects.filter(...)
|
||||
.order_by('-scheduled_date')[:3]
|
||||
.values_list('workout_id', flat=True)
|
||||
)
|
||||
hard_exclude_ids = set(
|
||||
SupersetExercise.objects.filter(superset__workout_id__in=very_recent_workout_ids)
|
||||
.values_list('exercise_id', flat=True)
|
||||
) if very_recent_workout_ids else set()
|
||||
|
||||
# Workouts 4-7: soft penalty (existing behavior)
|
||||
older_recent_ids = list(
|
||||
GeneratedWorkout.objects.filter(...)
|
||||
.order_by('-scheduled_date')[3:7]
|
||||
.values_list('workout_id', flat=True)
|
||||
)
|
||||
soft_penalty_ids = set(
|
||||
SupersetExercise.objects.filter(superset__workout_id__in=older_recent_ids)
|
||||
.values_list('exercise_id', flat=True)
|
||||
) if older_recent_ids else set()
|
||||
|
||||
self.exercise_selector = ExerciseSelector(
|
||||
self.preference,
|
||||
recently_used_ids=soft_penalty_ids,
|
||||
hard_exclude_ids=hard_exclude_ids
|
||||
)
|
||||
```
|
||||
|
||||
**Test:** Generate 4 weekly plans in sequence. Verify exercises from the most recent 3 workouts never appear in the next plan (unless the pool is too small and fallback kicks in).
|
||||
|
||||
---
|
||||
|
||||
## Phase 7: Apply Target Muscles to ALL Split Types
|
||||
|
||||
**Problem:** User's `target_muscle_groups` are only injected on `full_body` days (workout_generator.py:681-694). On push/pull/legs days, they're completely ignored.
|
||||
|
||||
**Files:**
|
||||
- `generator/services/workout_generator.py` - `generate_single_workout()` (lines 681-694)
|
||||
|
||||
**Changes:**
|
||||
1. Replace the full_body-only logic with universal target muscle integration:
|
||||
```python
|
||||
# Get user's target muscle groups
|
||||
user_target_muscles = list(
|
||||
self.preference.target_muscle_groups.values_list('name', flat=True)
|
||||
)
|
||||
|
||||
if user_target_muscles:
|
||||
normalized_targets = [normalize_muscle_name(m) for m in user_target_muscles]
|
||||
|
||||
if split_type == 'full_body':
|
||||
# Full body: inject all target muscles
|
||||
for m in normalized_targets:
|
||||
if m not in target_muscles:
|
||||
target_muscles.append(m)
|
||||
else:
|
||||
# Other splits: inject target muscles that are RELEVANT to this split type
|
||||
split_relevant_muscles = set()
|
||||
categories = MUSCLE_GROUP_CATEGORIES # from muscle_normalizer
|
||||
|
||||
# Map split_type to relevant muscle categories
|
||||
split_muscle_map = {
|
||||
'push': categories.get('upper_push', []),
|
||||
'pull': categories.get('upper_pull', []),
|
||||
'upper': categories.get('upper_push', []) + categories.get('upper_pull', []),
|
||||
'lower': categories.get('lower_push', []) + categories.get('lower_pull', []),
|
||||
'legs': categories.get('lower_push', []) + categories.get('lower_pull', []),
|
||||
'core': categories.get('core', []),
|
||||
}
|
||||
relevant = set(split_muscle_map.get(split_type, []))
|
||||
|
||||
# Add user targets that overlap with this split's muscle domain
|
||||
for m in normalized_targets:
|
||||
if m in relevant and m not in target_muscles:
|
||||
target_muscles.append(m)
|
||||
```
|
||||
|
||||
**Test:** Set target muscles to ["biceps", "glutes"]. Generate a PPL plan. Verify "biceps" appears in pull day targets and "glutes" appears in legs day targets. Neither should appear in push day targets.
|
||||
|
||||
---
|
||||
|
||||
## Phase 8: Expand Injury Filtering
|
||||
|
||||
**Problem:** Only 3 injury types (knee/back/shoulder) with freeform text matching. No support for wrist, ankle, hip, elbow, or neck.
|
||||
|
||||
**Files:**
|
||||
- `generator/services/exercise_selector.py` - `_get_filtered_queryset()` (lines 379-397)
|
||||
- `generator/models.py` - `UserPreference` model (line 112)
|
||||
|
||||
**Changes:**
|
||||
|
||||
### Model change (generator/models.py):
|
||||
1. Add structured injury field alongside the existing freeform field:
|
||||
```python
|
||||
INJURY_CHOICES = [
|
||||
('knee', 'Knee'),
|
||||
('back', 'Back'),
|
||||
('shoulder', 'Shoulder'),
|
||||
('wrist', 'Wrist'),
|
||||
('ankle', 'Ankle'),
|
||||
('hip', 'Hip'),
|
||||
('elbow', 'Elbow'),
|
||||
('neck', 'Neck'),
|
||||
]
|
||||
|
||||
injury_types = JSONField(default=list, blank=True, help_text='List of injury type strings')
|
||||
```
|
||||
|
||||
2. Create migration for the new field.
|
||||
|
||||
### Serializer change (generator/serializers.py):
|
||||
3. Add `injury_types` to `UserPreferenceSerializer` and `UserPreferenceUpdateSerializer` fields lists.
|
||||
|
||||
### Filtering logic (exercise_selector.py):
|
||||
4. Refactor injury filtering at lines 379-397 to use both old text field AND new structured field:
|
||||
```python
|
||||
# Structured injury types (new)
|
||||
injury_types = set(self.user_preference.injury_types or [])
|
||||
|
||||
# Also parse freeform text for backward compatibility
|
||||
injuries_text = (self.user_preference.injuries_limitations or '').lower()
|
||||
keyword_map = {
|
||||
'knee': ['knee', 'acl', 'mcl', 'meniscus', 'patella'],
|
||||
'back': ['back', 'spine', 'spinal', 'disc', 'herniat'],
|
||||
'shoulder': ['shoulder', 'rotator', 'labrum', 'impingement'],
|
||||
'wrist': ['wrist', 'carpal'],
|
||||
'ankle': ['ankle', 'achilles', 'plantar'],
|
||||
'hip': ['hip', 'labral', 'hip flexor'],
|
||||
'elbow': ['elbow', 'tennis elbow', 'golfer'],
|
||||
'neck': ['neck', 'cervical'],
|
||||
}
|
||||
for injury_type, keywords in keyword_map.items():
|
||||
if any(kw in injuries_text for kw in keywords):
|
||||
injury_types.add(injury_type)
|
||||
|
||||
# Apply filters per injury type
|
||||
if 'knee' in injury_types:
|
||||
qs = qs.exclude(impact_level='high')
|
||||
if 'back' in injury_types:
|
||||
qs = qs.exclude(impact_level='high')
|
||||
qs = qs.exclude(
|
||||
Q(movement_patterns__icontains='hip hinge') &
|
||||
Q(is_weight=True) &
|
||||
Q(difficulty_level='advanced')
|
||||
)
|
||||
if 'shoulder' in injury_types:
|
||||
qs = qs.exclude(movement_patterns__icontains='upper push - vertical')
|
||||
if 'wrist' in injury_types:
|
||||
qs = qs.exclude(
|
||||
Q(movement_patterns__icontains='olympic') |
|
||||
Q(name__icontains='wrist curl') |
|
||||
Q(name__icontains='handstand')
|
||||
)
|
||||
if 'ankle' in injury_types:
|
||||
qs = qs.exclude(impact_level__in=['high', 'medium'])
|
||||
if 'hip' in injury_types:
|
||||
qs = qs.exclude(
|
||||
Q(movement_patterns__icontains='hip hinge') &
|
||||
Q(difficulty_level='advanced')
|
||||
)
|
||||
qs = qs.exclude(impact_level='high')
|
||||
if 'elbow' in injury_types:
|
||||
qs = qs.exclude(
|
||||
Q(movement_patterns__icontains='arms') &
|
||||
Q(is_weight=True) &
|
||||
Q(difficulty_level='advanced')
|
||||
)
|
||||
if 'neck' in injury_types:
|
||||
qs = qs.exclude(name__icontains='neck')
|
||||
qs = qs.exclude(
|
||||
Q(movement_patterns__icontains='olympic') &
|
||||
Q(difficulty_level='advanced')
|
||||
)
|
||||
```
|
||||
|
||||
**Test:** Set injury_types=["knee", "wrist"]. Verify no high-impact or Olympic/handstand exercises appear.
|
||||
|
||||
---
|
||||
|
||||
## Phase 9: Fix Cardio Data & Ensure Full Rule Coverage
|
||||
|
||||
**Problem:** ML extraction can produce broken cardio rules (23-25 rounds). Some workout types may have no rules at all after analysis.
|
||||
|
||||
**Files:**
|
||||
- `generator/services/workout_analyzer.py` - `_step5_extract_workout_structure_rules()` (~lines 818-1129)
|
||||
- `generator/management/commands/calibrate_structure_rules.py` - Merge into analyzer
|
||||
|
||||
**Changes:**
|
||||
|
||||
### In workout_analyzer.py:
|
||||
1. In `_step5_extract_workout_structure_rules()`, add sanity bounds for per-superset rounds:
|
||||
```python
|
||||
# Clamp rounds to per-superset range (not total workout rounds)
|
||||
typical_rounds = max(1, min(8, typical_rounds)) # Was min(50)
|
||||
```
|
||||
|
||||
2. Add a validation pass after rule extraction:
|
||||
```python
|
||||
def _ensure_full_rule_coverage(self):
|
||||
"""Ensure every WorkoutType has at least one rule per (section, goal) combo."""
|
||||
for wt in WorkoutType.objects.all():
|
||||
for section in ['warm_up', 'working', 'cool_down']:
|
||||
for goal, _ in GOAL_CHOICES:
|
||||
exists = WorkoutStructureRule.objects.filter(
|
||||
workout_type=wt, section_type=section, goal_type=goal
|
||||
).exists()
|
||||
if not exists:
|
||||
self._create_default_rule(wt, section, goal)
|
||||
```
|
||||
|
||||
3. Merge the essential fixes from `calibrate_structure_rules.py` into the analyzer's default rule creation so they're always applied.
|
||||
|
||||
### In calibrate_structure_rules.py:
|
||||
4. Keep this command but change it to only fix known data issues (rep floor clamping, cardio round clamping) rather than creating rules from scratch. Add a check: if rules already have sane values, skip.
|
||||
|
||||
**Test:** Run `analyze_workouts`. Verify all 8 workout types x 3 sections x 5 goals = 120 rules exist, and no rule has rounds > 8 or rep_min < 1.
|
||||
|
||||
---
|
||||
|
||||
## Phase 10: Fix WeeklySplitPattern PK Stability
|
||||
|
||||
**Problem:** `WeeklySplitPattern.pattern` stores raw `MuscleGroupSplit` PKs. Re-running the analyzer creates new splits with new PKs, making old pattern references stale.
|
||||
|
||||
**Files:**
|
||||
- `generator/models.py` - `WeeklySplitPattern` (lines 196-204)
|
||||
- `generator/services/workout_analyzer.py` - `_step4_extract_weekly_split_patterns()` (~lines 650-812)
|
||||
- `generator/services/workout_generator.py` - `_pick_weekly_split()` (lines 739-796)
|
||||
|
||||
**Changes:**
|
||||
|
||||
### Option: Store labels instead of PKs
|
||||
1. In `WeeklySplitPattern`, the `pattern` field currently stores `[5, 12, 5, 8]` (MuscleGroupSplit PKs). Change the analyzer to store split_type strings instead: `["push", "pull", "push", "lower"]`.
|
||||
|
||||
2. In the analyzer's `_step4`, when building patterns:
|
||||
```python
|
||||
# Instead of storing PKs
|
||||
pattern_entry = {
|
||||
'split_types': [split.split_type for split in matched_splits],
|
||||
'labels': [split.label for split in matched_splits],
|
||||
'muscle_sets': [split.muscle_names for split in matched_splits],
|
||||
}
|
||||
```
|
||||
|
||||
3. Update `WeeklySplitPattern` model:
|
||||
```python
|
||||
pattern = JSONField(default=list) # Now stores split_type strings
|
||||
pattern_muscles = JSONField(default=list) # Stores muscle name lists per day
|
||||
```
|
||||
Create migration.
|
||||
|
||||
4. In `_pick_weekly_split()` (workout_generator.py:739-796), resolve patterns using `split_type` + `muscle_names` instead of PK lookups:
|
||||
```python
|
||||
# Instead of MuscleGroupSplit.objects.get(pk=split_id)
|
||||
# Use the pattern's stored muscle lists directly
|
||||
for i, split_type in enumerate(pattern.pattern):
|
||||
muscles = pattern.pattern_muscles[i] if pattern.pattern_muscles else []
|
||||
split_days.append({
|
||||
'label': pattern.pattern_labels[i],
|
||||
'muscles': muscles,
|
||||
'split_type': split_type,
|
||||
})
|
||||
```
|
||||
|
||||
**Test:** Run `analyze_workouts` twice. Verify the second run doesn't break weekly patterns from the first.
|
||||
|
||||
---
|
||||
|
||||
## Phase 11: Add Movement Pattern Variety Tracking
|
||||
|
||||
**Problem:** No tracking of movement patterns within a workout. Could get 3 horizontal presses in one session.
|
||||
|
||||
**Files:**
|
||||
- `generator/services/exercise_selector.py` - Add tracking
|
||||
- `generator/services/workout_generator.py` - `_build_working_supersets()` (lines 1176-1398)
|
||||
|
||||
**Changes:**
|
||||
|
||||
### In exercise_selector.py:
|
||||
1. Add pattern tracking to `__init__`:
|
||||
```python
|
||||
self.used_movement_patterns = Counter() # Track patterns used in current workout
|
||||
```
|
||||
|
||||
2. Add to `reset()`:
|
||||
```python
|
||||
self.used_movement_patterns = Counter()
|
||||
```
|
||||
|
||||
3. In `select_exercises()`, after exercises are selected (line ~122), update tracking:
|
||||
```python
|
||||
for ex in selected:
|
||||
patterns = get_movement_patterns_for_exercise(ex)
|
||||
for p in patterns:
|
||||
self.used_movement_patterns[p] += 1
|
||||
```
|
||||
|
||||
4. Add a new method to penalize overused patterns in `_weighted_pick`:
|
||||
```python
|
||||
# In _weighted_pick, when building the pool (lines 461-474):
|
||||
for ex in preferred_list:
|
||||
patterns = get_movement_patterns_for_exercise(ex)
|
||||
# If any pattern already used 2+ times, downweight
|
||||
if any(self.used_movement_patterns.get(p, 0) >= 2 for p in patterns):
|
||||
pool.extend([ex] * 1) # Reduced weight
|
||||
else:
|
||||
pool.extend([ex] * weight_preferred)
|
||||
```
|
||||
|
||||
**Test:** Generate a full-body workout. Verify no single movement pattern (e.g., "upper push - horizontal") appears more than twice in working supersets.
|
||||
|
||||
---
|
||||
|
||||
## Phase 12: Add Minimum Working Superset Validation After Trimming
|
||||
|
||||
**Problem:** Aggressive trimming could remove all working supersets, leaving only warmup + cooldown.
|
||||
|
||||
**Files:**
|
||||
- `generator/services/workout_generator.py` - `_trim_to_fit()` (lines 1537-1575)
|
||||
|
||||
**Changes:**
|
||||
1. After the trimming loop, add a minimum check:
|
||||
```python
|
||||
# Ensure at least 1 working superset remains
|
||||
working_supersets = [
|
||||
ss for ss in workout_spec['supersets']
|
||||
if ss['name'] not in ('Warm Up', 'Cool Down')
|
||||
]
|
||||
if not working_supersets:
|
||||
# Re-add the last removed working superset with minimal config
|
||||
# (2 exercises, 2 rounds - absolute minimum)
|
||||
if removed_supersets:
|
||||
minimal = removed_supersets[-1]
|
||||
minimal['exercises'] = minimal['exercises'][:2]
|
||||
minimal['rounds'] = 2
|
||||
# Insert before cooldown
|
||||
cooldown_idx = next(
|
||||
(i for i, ss in enumerate(workout_spec['supersets']) if ss['name'] == 'Cool Down'),
|
||||
len(workout_spec['supersets'])
|
||||
)
|
||||
workout_spec['supersets'].insert(cooldown_idx, minimal)
|
||||
```
|
||||
|
||||
2. Track removed supersets during trimming by adding a `removed_supersets` list.
|
||||
|
||||
**Test:** Set `preferred_workout_duration=15` (minimum). Generate a workout. Verify it has at least 1 working superset.
|
||||
|
||||
---
|
||||
|
||||
## Phase 13: Add Generation Warnings to API Response
|
||||
|
||||
**Problem:** Users never know when their preferences can't be honored (equipment fallback, target muscle ignored, etc.).
|
||||
|
||||
**Files:**
|
||||
- `generator/services/workout_generator.py` - Multiple methods
|
||||
- `generator/services/exercise_selector.py` - Fallback paths
|
||||
- `generator/views.py` - Response construction
|
||||
- `generator/serializers.py` - Add warnings field
|
||||
|
||||
**Changes:**
|
||||
|
||||
### In workout_generator.py:
|
||||
1. Add a warnings list to `__init__`:
|
||||
```python
|
||||
self.warnings = []
|
||||
```
|
||||
|
||||
2. Add warnings at each fallback point:
|
||||
- `_build_working_supersets()` line ~1327 (broader muscles fallback):
|
||||
```python
|
||||
self.warnings.append(f"Not enough {', '.join(superset_muscles)} exercises with your equipment. Used broader muscle group.")
|
||||
```
|
||||
- `_build_working_supersets()` line ~1338 (unfiltered fallback):
|
||||
```python
|
||||
self.warnings.append(f"Very few exercises available for {', '.join(superset_muscles)}. Some exercises may not match your muscle targets.")
|
||||
```
|
||||
- `generate_single_workout()` line ~681 (target muscles full_body only - with Phase 7 this becomes the "no relevant overlap" case):
|
||||
```python
|
||||
if user_targets_not_in_split:
|
||||
self.warnings.append(f"Target muscles {', '.join(user_targets_not_in_split)} don't apply to {split_type} day.")
|
||||
```
|
||||
|
||||
### In exercise_selector.py:
|
||||
3. Add a warnings list and populate it:
|
||||
- In bodyweight fallback (line ~172):
|
||||
```python
|
||||
self.warnings.append("Equipment constraints too restrictive. Using bodyweight alternatives.")
|
||||
```
|
||||
|
||||
### In views.py:
|
||||
4. Include warnings in generation response (line ~148):
|
||||
```python
|
||||
response_data = serializer.data
|
||||
response_data['warnings'] = generator.warnings + generator.exercise_selector.warnings
|
||||
```
|
||||
|
||||
### In serializers.py:
|
||||
5. No model change needed - warnings are transient, added to the response dict only.
|
||||
|
||||
**Test:** Set equipment to only "resistance band" and target muscles to "chest". Generate a plan. Verify response includes warnings about equipment fallback.
|
||||
|
||||
---
|
||||
|
||||
## Phase 14: Validate Preference Consistency
|
||||
|
||||
**Problem:** Users can set contradictory preferences (e.g., 4 days/week but only 2 preferred days).
|
||||
|
||||
**Files:**
|
||||
- `generator/serializers.py` - `UserPreferenceUpdateSerializer` (lines 79-144)
|
||||
|
||||
**Changes:**
|
||||
1. Add validation to `UserPreferenceUpdateSerializer.validate()`:
|
||||
```python
|
||||
def validate(self, attrs):
|
||||
errors = {}
|
||||
|
||||
# Preferred days vs days_per_week
|
||||
preferred_days = attrs.get('preferred_days')
|
||||
days_per_week = attrs.get('days_per_week')
|
||||
if preferred_days and days_per_week:
|
||||
if len(preferred_days) > 0 and len(preferred_days) < days_per_week:
|
||||
errors['preferred_days'] = (
|
||||
f"You selected {len(preferred_days)} preferred days "
|
||||
f"but plan to train {days_per_week} days/week. "
|
||||
f"Select at least {days_per_week} days or clear preferred days for auto-scheduling."
|
||||
)
|
||||
|
||||
# Validate preferred_days are valid weekday indices
|
||||
if preferred_days:
|
||||
invalid = [d for d in preferred_days if d < 0 or d > 6]
|
||||
if invalid:
|
||||
errors['preferred_days'] = f"Invalid day indices: {invalid}. Must be 0 (Mon) - 6 (Sun)."
|
||||
|
||||
# Duration sanity
|
||||
duration = attrs.get('preferred_workout_duration')
|
||||
if duration is not None and (duration < 15 or duration > 120):
|
||||
errors['preferred_workout_duration'] = "Duration must be between 15 and 120 minutes."
|
||||
|
||||
if errors:
|
||||
raise serializers.ValidationError(errors)
|
||||
|
||||
return attrs
|
||||
```
|
||||
|
||||
**Test:** Try to update preferences with `days_per_week=5, preferred_days=[0, 1]`. Verify it returns a validation error.
|
||||
|
||||
---
|
||||
|
||||
## Execution Order & Dependencies
|
||||
|
||||
```
|
||||
Phase 1 (Consolidate defaults) - Independent, do first
|
||||
Phase 2 (Bodyweight safety) - Independent
|
||||
Phase 3 (HR warmup/cooldown) - Independent
|
||||
Phase 4 (Complexity cap) - Depends on Phase 2 (same file area)
|
||||
Phase 5 (Stretch position) - Independent
|
||||
Phase 6 (Recently-used exclusion) - Independent
|
||||
Phase 7 (Target muscles all splits)- Independent
|
||||
Phase 8 (Injury filtering) - Requires migration, do after Phase 2
|
||||
Phase 9 (Cardio rules/coverage) - Depends on Phase 1
|
||||
Phase 10 (Pattern PK stability) - Requires migration
|
||||
Phase 11 (Movement pattern variety) - Depends on Phase 6 (same tracking area)
|
||||
Phase 12 (Min working superset) - Independent
|
||||
Phase 13 (Generation warnings) - Do last (touches all files modified above)
|
||||
Phase 14 (Preference validation) - Independent
|
||||
```
|
||||
|
||||
## Recommended Batch Order
|
||||
|
||||
**Batch 1** (Foundation - no migrations): Phases 1, 2, 3, 4, 12
|
||||
**Batch 2** (Selection quality): Phases 5, 6, 7, 11
|
||||
**Batch 3** (Schema changes - requires migrations): Phases 8, 10
|
||||
**Batch 4** (Integration): Phases 9, 13, 14
|
||||
|
||||
## Files Modified Summary
|
||||
|
||||
| File | Phases |
|
||||
|------|--------|
|
||||
| `generator/services/exercise_selector.py` | 2, 3, 4, 5, 6, 11, 13 |
|
||||
| `generator/services/workout_generator.py` | 5, 6, 7, 9, 12, 13 |
|
||||
| `generator/services/workout_analyzer.py` | 1, 9 |
|
||||
| `generator/services/muscle_normalizer.py` | (read-only, imported) |
|
||||
| `generator/services/plan_builder.py` | (no changes) |
|
||||
| `generator/models.py` | 8, 10 |
|
||||
| `generator/serializers.py` | 8, 13, 14 |
|
||||
| `generator/views.py` | 13 |
|
||||
| `generator/management/commands/calibrate_workout_types.py` | 1 (delete) |
|
||||
| `generator/management/commands/calibrate_structure_rules.py` | 9 |
|
||||
| `generator/management/commands/analyze_workouts.py` | 1 |
|
||||
|
||||
## New Migrations Required
|
||||
|
||||
1. Phase 8: Add `injury_types` JSONField to UserPreference
|
||||
2. Phase 10: Add `pattern_muscles` JSONField to WeeklySplitPattern, change `pattern` semantics
|
||||
|
||||
## Management Commands to Run After
|
||||
|
||||
1. `python manage.py migrate`
|
||||
2. `python manage.py analyze_workouts` (picks up Phase 1 + 9 changes)
|
||||
3. `python manage.py populate_exercise_fields` (ensure all exercise fields populated)
|
||||
4. `python manage.py calibrate_structure_rules` (Phase 9 rep floor + cardio fixes)
|
||||
56
docker-compose.dev.yml
Normal file
56
docker-compose.dev.yml
Normal file
@@ -0,0 +1,56 @@
|
||||
services:
|
||||
db:
|
||||
image: postgres:14
|
||||
volumes:
|
||||
- database:/var/lib/postgresql/data
|
||||
environment:
|
||||
- POSTGRES_DB=werkout
|
||||
- POSTGRES_USER=postgres
|
||||
- POSTGRES_PASSWORD=postgres
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "pg_isready -U postgres"]
|
||||
interval: 5s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
|
||||
web:
|
||||
build: .
|
||||
volumes:
|
||||
- .:/code
|
||||
- /code/werkout-frontend/node_modules
|
||||
- /code/werkout-frontend/.next
|
||||
ports:
|
||||
- "8001:8000"
|
||||
- "3010:3000"
|
||||
environment:
|
||||
- POSTGRES_NAME=werkout
|
||||
- POSTGRES_USER=postgres
|
||||
- POSTGRES_PASSWORD=postgres
|
||||
depends_on:
|
||||
db:
|
||||
condition: service_healthy
|
||||
links:
|
||||
- db
|
||||
|
||||
redis:
|
||||
image: redis:alpine
|
||||
|
||||
celery:
|
||||
restart: always
|
||||
build:
|
||||
context: .
|
||||
command: celery -A werkout_api worker -l info
|
||||
volumes:
|
||||
- .:/code
|
||||
environment:
|
||||
- DB_HOST=db
|
||||
- DB_NAME=werkout
|
||||
- DB_USER=postgres
|
||||
- DB_PASS=postgres
|
||||
depends_on:
|
||||
- db
|
||||
- redis
|
||||
- web
|
||||
|
||||
volumes:
|
||||
database:
|
||||
@@ -1,10 +1,9 @@
|
||||
version: "3.9"
|
||||
|
||||
services:
|
||||
db:
|
||||
image: postgres
|
||||
image: postgres:14
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
- database:/var/lib/postgresql/data
|
||||
- /mnt/user/downloads/werkout_api/postgres:/var/lib/postgresql/data
|
||||
environment:
|
||||
- POSTGRES_DB=werkout
|
||||
- POSTGRES_USER=postgres
|
||||
@@ -17,41 +16,40 @@ services:
|
||||
|
||||
web:
|
||||
build: .
|
||||
command: >
|
||||
sh -c "python manage.py collectstatic --noinput && python manage.py migrate && python manage.py runserver 0.0.0.0:8000"
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
- .:/code
|
||||
- /mnt/user/downloads/werkout_api/media:/code/media
|
||||
ports:
|
||||
- "8000:8000"
|
||||
- "8001:8000"
|
||||
- "3010:3000"
|
||||
environment:
|
||||
- POSTGRES_NAME=werkout
|
||||
- POSTGRES_USER=postgres
|
||||
- POSTGRES_PASSWORD=postgres
|
||||
- DATABASE_URL=postgres://postgres:postgres@db:5432/werkout
|
||||
- REDIS_URL=redis://redis:6379
|
||||
- SECRET_KEY=${SECRET_KEY:-insecure-dev-secret-key-change-in-production}
|
||||
- DEBUG=${DEBUG:-true}
|
||||
- ALLOWED_HOSTS=${ALLOWED_HOSTS:-*}
|
||||
- CORS_ALLOWED_ORIGINS=${CORS_ALLOWED_ORIGINS:-}
|
||||
depends_on:
|
||||
db:
|
||||
condition: service_healthy
|
||||
links:
|
||||
- db
|
||||
redis:
|
||||
condition: service_started
|
||||
|
||||
redis:
|
||||
image: redis:alpine
|
||||
restart: unless-stopped
|
||||
|
||||
celery:
|
||||
restart: always
|
||||
build:
|
||||
context: .
|
||||
restart: unless-stopped
|
||||
command: celery -A werkout_api worker -l info
|
||||
volumes:
|
||||
- .:/code
|
||||
environment:
|
||||
- DB_HOST=db
|
||||
- DB_NAME=werkout
|
||||
- DB_USER=postgres
|
||||
- DB_PASS=postgres
|
||||
- DATABASE_URL=postgres://postgres:postgres@db:5432/werkout
|
||||
- REDIS_URL=redis://redis:6379
|
||||
- SECRET_KEY=${SECRET_KEY:-insecure-dev-secret-key-change-in-production}
|
||||
- DEBUG=${DEBUG:-true}
|
||||
depends_on:
|
||||
- db
|
||||
- redis
|
||||
- web
|
||||
|
||||
volumes:
|
||||
database:
|
||||
@@ -0,0 +1,34 @@
|
||||
# Generated by Django 5.1.4 on 2026-02-21 05:06
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
def deduplicate_workout_equipment(apps, schema_editor):
|
||||
"""Remove duplicate WorkoutEquipment rows before adding unique constraint."""
|
||||
WorkoutEquipment = apps.get_model('equipment', 'WorkoutEquipment')
|
||||
seen = set()
|
||||
to_delete = []
|
||||
for we in WorkoutEquipment.objects.all().order_by('id'):
|
||||
key = (we.exercise_id, we.equipment_id)
|
||||
if key in seen:
|
||||
to_delete.append(we.id)
|
||||
else:
|
||||
seen.add(key)
|
||||
if to_delete:
|
||||
WorkoutEquipment.objects.filter(id__in=to_delete).delete()
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('equipment', '0002_workoutequipment'),
|
||||
('exercise', '0010_alter_exercise_complexity_rating_and_more'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(deduplicate_workout_equipment, migrations.RunPython.noop),
|
||||
migrations.AlterUniqueTogether(
|
||||
name='workoutequipment',
|
||||
unique_together={('exercise', 'equipment')},
|
||||
),
|
||||
]
|
||||
@@ -10,7 +10,7 @@ class Equipment(models.Model):
|
||||
name = models.CharField(null=True, blank=True, max_length=64)
|
||||
|
||||
def __str__(self):
|
||||
return self.category + " : " + self.name
|
||||
return f"{self.category or ''} : {self.name or ''}"
|
||||
|
||||
class WorkoutEquipment(models.Model):
|
||||
created_at = models.DateTimeField(auto_now_add=True)
|
||||
@@ -26,5 +26,8 @@ class WorkoutEquipment(models.Model):
|
||||
related_name='workout_exercise_workout'
|
||||
)
|
||||
|
||||
class Meta:
|
||||
unique_together = ('exercise', 'equipment')
|
||||
|
||||
def __str__(self):
|
||||
return self.exercise.name + " : " + self.equipment.name
|
||||
@@ -2,7 +2,6 @@ from django.shortcuts import render
|
||||
from .models import *
|
||||
from .serializers import *
|
||||
|
||||
from django.shortcuts import render
|
||||
from rest_framework.decorators import api_view
|
||||
from rest_framework.response import Response
|
||||
from rest_framework import status
|
||||
@@ -21,8 +20,8 @@ def all_equipment(request):
|
||||
data = cache.get('all_equipment')
|
||||
return Response(data=data, status=status.HTTP_200_OK)
|
||||
|
||||
users = Equipment.objects.all()
|
||||
serializer = EquipmentSerializer(users, many=True)
|
||||
equipment = Equipment.objects.all().select_related()
|
||||
serializer = EquipmentSerializer(equipment, many=True)
|
||||
data = serializer.data
|
||||
cache.set('all_equipment', data, timeout=None)
|
||||
return Response(data=data, status=status.HTTP_200_OK)
|
||||
@@ -0,0 +1,54 @@
|
||||
# Generated by Django 5.1.4 on 2026-02-20 22:03
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('exercise', '0008_exercise_video_override'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='exercise',
|
||||
name='complexity_rating',
|
||||
field=models.IntegerField(blank=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='exercise',
|
||||
name='difficulty_level',
|
||||
field=models.CharField(blank=True, choices=[('beginner', 'Beginner'), ('intermediate', 'Intermediate'), ('advanced', 'Advanced')], max_length=16, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='exercise',
|
||||
name='exercise_tier',
|
||||
field=models.CharField(blank=True, choices=[('primary', 'Primary'), ('secondary', 'Secondary'), ('accessory', 'Accessory')], max_length=16, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='exercise',
|
||||
name='hr_elevation_rating',
|
||||
field=models.IntegerField(blank=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='exercise',
|
||||
name='impact_level',
|
||||
field=models.CharField(blank=True, choices=[('none', 'None'), ('low', 'Low'), ('medium', 'Medium'), ('high', 'High')], max_length=8, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='exercise',
|
||||
name='is_compound',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='exercise',
|
||||
name='progression_of',
|
||||
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='progressions', to='exercise.exercise'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='exercise',
|
||||
name='stretch_position',
|
||||
field=models.CharField(blank=True, choices=[('lengthened', 'Lengthened'), ('mid', 'Mid-range'), ('shortened', 'Shortened')], max_length=16, null=True),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,24 @@
|
||||
# Generated by Django 5.1.4 on 2026-02-21 05:06
|
||||
|
||||
import django.core.validators
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('exercise', '0009_exercise_complexity_rating_exercise_difficulty_level_and_more'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='exercise',
|
||||
name='complexity_rating',
|
||||
field=models.IntegerField(blank=True, null=True, validators=[django.core.validators.MinValueValidator(1), django.core.validators.MaxValueValidator(5)]),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='exercise',
|
||||
name='hr_elevation_rating',
|
||||
field=models.IntegerField(blank=True, null=True, validators=[django.core.validators.MinValueValidator(1), django.core.validators.MaxValueValidator(10)]),
|
||||
),
|
||||
]
|
||||
18
exercise/migrations/0011_fix_related_names_and_nullable.py
Normal file
18
exercise/migrations/0011_fix_related_names_and_nullable.py
Normal file
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 5.1.4 on 2026-02-21 05:32
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('exercise', '0010_alter_exercise_complexity_rating_and_more'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='exercise',
|
||||
name='name',
|
||||
field=models.CharField(default='', max_length=512),
|
||||
),
|
||||
]
|
||||
@@ -1,12 +1,39 @@
|
||||
from django.db import models
|
||||
from django.conf import settings
|
||||
from random import randrange
|
||||
from django.core.validators import MinValueValidator, MaxValueValidator
|
||||
|
||||
|
||||
DIFFICULTY_CHOICES = [
|
||||
('beginner', 'Beginner'),
|
||||
('intermediate', 'Intermediate'),
|
||||
('advanced', 'Advanced'),
|
||||
]
|
||||
|
||||
TIER_CHOICES = [
|
||||
('primary', 'Primary'),
|
||||
('secondary', 'Secondary'),
|
||||
('accessory', 'Accessory'),
|
||||
]
|
||||
|
||||
IMPACT_CHOICES = [
|
||||
('none', 'None'),
|
||||
('low', 'Low'),
|
||||
('medium', 'Medium'),
|
||||
('high', 'High'),
|
||||
]
|
||||
|
||||
STRETCH_POSITION_CHOICES = [
|
||||
('lengthened', 'Lengthened'),
|
||||
('mid', 'Mid-range'),
|
||||
('shortened', 'Shortened'),
|
||||
]
|
||||
|
||||
|
||||
# Create your models here.
|
||||
class Exercise(models.Model):
|
||||
created_at = models.DateTimeField(auto_now_add=True)
|
||||
updated_at = models.DateTimeField(auto_now=True)
|
||||
name = models.CharField(null=True, blank=True, max_length=512)
|
||||
name = models.CharField(max_length=512, default='')
|
||||
description = models.CharField(null=True, blank=True, max_length=1024)
|
||||
side = models.CharField(null=True, blank=True, max_length=64)
|
||||
is_two_dumbbells = models.BooleanField(default=False)
|
||||
@@ -21,29 +48,52 @@ class Exercise(models.Model):
|
||||
equipment_required = models.CharField(null=True, blank=True, max_length=255)
|
||||
muscle_groups = models.CharField(null=True, blank=True, max_length=255)
|
||||
synonyms = models.CharField(null=True, blank=True, max_length=255)
|
||||
estimated_rep_duration = models.FloatField(null=True, blank=True, max_length=255)
|
||||
estimated_rep_duration = models.FloatField(null=True, blank=True)
|
||||
video_override = models.CharField(null=True, blank=True, max_length=255)
|
||||
|
||||
# New fields for workout generation quality
|
||||
is_compound = models.BooleanField(default=False)
|
||||
difficulty_level = models.CharField(
|
||||
max_length=16, choices=DIFFICULTY_CHOICES, null=True, blank=True
|
||||
)
|
||||
exercise_tier = models.CharField(
|
||||
max_length=16, choices=TIER_CHOICES, null=True, blank=True
|
||||
)
|
||||
complexity_rating = models.IntegerField(
|
||||
null=True, blank=True,
|
||||
validators=[MinValueValidator(1), MaxValueValidator(5)]
|
||||
)
|
||||
hr_elevation_rating = models.IntegerField(
|
||||
null=True, blank=True,
|
||||
validators=[MinValueValidator(1), MaxValueValidator(10)]
|
||||
)
|
||||
impact_level = models.CharField(
|
||||
max_length=8, choices=IMPACT_CHOICES, null=True, blank=True
|
||||
)
|
||||
stretch_position = models.CharField(
|
||||
max_length=16, choices=STRETCH_POSITION_CHOICES, null=True, blank=True
|
||||
)
|
||||
progression_of = models.ForeignKey(
|
||||
'self', null=True, blank=True, on_delete=models.SET_NULL,
|
||||
related_name='progressions'
|
||||
)
|
||||
|
||||
class Meta:
|
||||
ordering = ('name',)
|
||||
|
||||
def __str__(self):
|
||||
return self.name + " --------- " + self.description or "NA"
|
||||
return (self.name or 'Unnamed') + " --------- " + (self.description or "NA")
|
||||
|
||||
def video_url(self):
|
||||
if self.video_override is not None and len(self.video_override) > 0:
|
||||
#return "/videos/hls_video?video_name="+self.video_override+".mp4&video_type=exercise_videos"
|
||||
return '/media/hls/'+ self.video_override +'_720p.m3u8'
|
||||
# return "/media/exercise_videos/" + self.video_override
|
||||
return '/media/videos/'+ self.video_override +'_720p.m3u8'
|
||||
else:
|
||||
#return "/videos/hls_video?video_name="+self.name.replace(" ", "_")+".mp4&video_type=exercise_videos"
|
||||
name = self.name.replace(" ", "_")
|
||||
name = (self.name or '').replace(" ", "_")
|
||||
name = name.replace("'", "")
|
||||
return '/media/hls/'+ name + ".mp4" +'_720p.m3u8'
|
||||
#return "/media/exercise_videos/" + self.name.replace(" ", "_") + ".mp4"
|
||||
|
||||
def audio_url(self):
|
||||
return "/media/exercise_audio/" + self.name.replace(" ", "_") + ".m4a"
|
||||
return "/media/exercise_audio/" + (self.name or '').replace(" ", "_") + ".m4a"
|
||||
|
||||
def transition_url(self):
|
||||
return "/media/transitions_audio/" + self.name.replace(" ", "_") + ".m4a"
|
||||
@@ -1,8 +1,6 @@
|
||||
from rest_framework import serializers
|
||||
from .models import *
|
||||
from muscle.models import ExerciseMuscle
|
||||
from equipment.models import WorkoutEquipment
|
||||
from muscle.serializers import ExerciseMuscleSerializer
|
||||
from equipment.serializers import WorkoutEquipmentSerializer
|
||||
|
||||
class ExerciseMuscleSerializer(serializers.ModelSerializer):
|
||||
@@ -27,11 +25,13 @@ class ExerciseSerializer(serializers.ModelSerializer):
|
||||
fields = '__all__'
|
||||
|
||||
def get_muscles(self, obj):
|
||||
objs = ExerciseMuscle.objects.filter(exercise=obj)
|
||||
data = ExerciseMuscleSerializer(objs, many=True).data
|
||||
return data
|
||||
# Use prefetched related manager if available (avoids N+1 queries)
|
||||
# Callers should use .prefetch_related('exercise_muscle_exercise__muscle')
|
||||
objs = obj.exercise_muscle_exercise.all()
|
||||
return ExerciseMuscleSerializer(objs, many=True).data
|
||||
|
||||
def get_equipment(self, obj):
|
||||
objs = WorkoutEquipment.objects.filter(exercise=obj)
|
||||
data = WorkoutEquipmentSerializer(objs, many=True).data
|
||||
return data
|
||||
# Use prefetched related manager if available (avoids N+1 queries)
|
||||
# Callers should use .prefetch_related('workout_exercise_workout__equipment')
|
||||
objs = obj.workout_exercise_workout.all()
|
||||
return WorkoutEquipmentSerializer(objs, many=True).data
|
||||
|
||||
@@ -2,7 +2,6 @@ from django.shortcuts import render
|
||||
from .models import *
|
||||
from .serializers import *
|
||||
|
||||
from django.shortcuts import render
|
||||
from rest_framework.decorators import api_view
|
||||
from rest_framework.response import Response
|
||||
from rest_framework import status
|
||||
@@ -21,8 +20,11 @@ def all_exercises(request):
|
||||
data = cache.get('all_exercises')
|
||||
return Response(data=data, status=status.HTTP_200_OK)
|
||||
|
||||
users = Exercise.objects.all()
|
||||
serializer = ExerciseSerializer(users, many=True)
|
||||
exercises = Exercise.objects.all().prefetch_related(
|
||||
'exercise_muscle_exercise__muscle',
|
||||
'workout_exercise_workout__equipment',
|
||||
)
|
||||
serializer = ExerciseSerializer(exercises, many=True)
|
||||
data = serializer.data
|
||||
cache.set('all_exercises', data, timeout=None)
|
||||
return Response(data=data, status=status.HTTP_200_OK)
|
||||
0
generator/__init__.py
Normal file
0
generator/__init__.py
Normal file
49
generator/admin.py
Normal file
49
generator/admin.py
Normal file
@@ -0,0 +1,49 @@
|
||||
from django.contrib import admin
|
||||
from .models import (
|
||||
WorkoutType, UserPreference, GeneratedWeeklyPlan, GeneratedWorkout,
|
||||
MuscleGroupSplit, WeeklySplitPattern, WorkoutStructureRule, MovementPatternOrder
|
||||
)
|
||||
|
||||
|
||||
@admin.register(WorkoutType)
|
||||
class WorkoutTypeAdmin(admin.ModelAdmin):
|
||||
list_display = ('name', 'typical_intensity', 'rep_range_min', 'rep_range_max', 'duration_bias')
|
||||
|
||||
|
||||
@admin.register(UserPreference)
|
||||
class UserPreferenceAdmin(admin.ModelAdmin):
|
||||
list_display = ('registered_user', 'fitness_level', 'primary_goal', 'days_per_week', 'preferred_workout_duration')
|
||||
|
||||
|
||||
@admin.register(GeneratedWeeklyPlan)
|
||||
class GeneratedWeeklyPlanAdmin(admin.ModelAdmin):
|
||||
list_display = ('id', 'registered_user', 'week_start_date', 'week_end_date', 'status', 'generation_time_ms')
|
||||
list_filter = ('status',)
|
||||
|
||||
|
||||
@admin.register(GeneratedWorkout)
|
||||
class GeneratedWorkoutAdmin(admin.ModelAdmin):
|
||||
list_display = ('id', 'plan', 'scheduled_date', 'is_rest_day', 'focus_area', 'workout_type', 'status')
|
||||
list_filter = ('is_rest_day', 'status')
|
||||
|
||||
|
||||
@admin.register(MuscleGroupSplit)
|
||||
class MuscleGroupSplitAdmin(admin.ModelAdmin):
|
||||
list_display = ('label', 'split_type', 'frequency', 'typical_exercise_count', 'muscle_names')
|
||||
|
||||
|
||||
@admin.register(WeeklySplitPattern)
|
||||
class WeeklySplitPatternAdmin(admin.ModelAdmin):
|
||||
list_display = ('days_per_week', 'frequency', 'pattern_labels')
|
||||
|
||||
|
||||
@admin.register(WorkoutStructureRule)
|
||||
class WorkoutStructureRuleAdmin(admin.ModelAdmin):
|
||||
list_display = ('workout_type', 'section_type', 'goal_type', 'typical_rounds', 'typical_exercises_per_superset')
|
||||
list_filter = ('section_type', 'goal_type')
|
||||
|
||||
|
||||
@admin.register(MovementPatternOrder)
|
||||
class MovementPatternOrderAdmin(admin.ModelAdmin):
|
||||
list_display = ('movement_pattern', 'position', 'frequency', 'section_type')
|
||||
list_filter = ('position', 'section_type')
|
||||
6
generator/apps.py
Normal file
6
generator/apps.py
Normal file
@@ -0,0 +1,6 @@
|
||||
from django.apps import AppConfig
|
||||
|
||||
|
||||
class GeneratorConfig(AppConfig):
|
||||
default_auto_field = 'django.db.models.BigAutoField'
|
||||
name = 'generator'
|
||||
0
generator/management/__init__.py
Normal file
0
generator/management/__init__.py
Normal file
0
generator/management/commands/__init__.py
Normal file
0
generator/management/commands/__init__.py
Normal file
115
generator/management/commands/analyze_workouts.py
Normal file
115
generator/management/commands/analyze_workouts.py
Normal file
@@ -0,0 +1,115 @@
|
||||
"""
|
||||
Django management command to analyze existing workouts and extract ML patterns.
|
||||
|
||||
Usage:
|
||||
python manage.py analyze_workouts
|
||||
python manage.py analyze_workouts --dry-run
|
||||
python manage.py analyze_workouts --verbosity 2
|
||||
"""
|
||||
|
||||
import time
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
from generator.services.workout_analyzer import WorkoutAnalyzer
|
||||
from generator.models import (
|
||||
MuscleGroupSplit,
|
||||
MovementPatternOrder,
|
||||
WeeklySplitPattern,
|
||||
WorkoutStructureRule,
|
||||
WorkoutType,
|
||||
)
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = (
|
||||
'Analyze existing workouts in the database and extract ML patterns '
|
||||
'into WorkoutType, MuscleGroupSplit, WeeklySplitPattern, '
|
||||
'WorkoutStructureRule, and MovementPatternOrder models.'
|
||||
)
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument(
|
||||
'--dry-run',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help='Print what would be done without writing to the database.',
|
||||
)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
dry_run = options.get('dry_run', False)
|
||||
|
||||
if dry_run:
|
||||
self.stdout.write(self.style.WARNING(
|
||||
'DRY RUN mode - no changes will be written to the database.\n'
|
||||
'Remove --dry-run to actually run the analysis.\n'
|
||||
))
|
||||
self._print_current_state()
|
||||
return
|
||||
|
||||
start_time = time.time()
|
||||
|
||||
analyzer = WorkoutAnalyzer()
|
||||
analyzer.analyze()
|
||||
|
||||
elapsed = time.time() - start_time
|
||||
|
||||
self.stdout.write('')
|
||||
self._print_current_state()
|
||||
|
||||
self.stdout.write(self.style.SUCCESS(
|
||||
f'\nAnalysis complete in {elapsed:.2f}s!'
|
||||
))
|
||||
|
||||
def _print_current_state(self):
|
||||
"""Print a summary of the current state of all ML pattern models."""
|
||||
self.stdout.write(self.style.MIGRATE_HEADING('\nCurrent ML Pattern Model State:'))
|
||||
self.stdout.write(f' WorkoutType: {WorkoutType.objects.count()} records')
|
||||
self.stdout.write(f' MuscleGroupSplit: {MuscleGroupSplit.objects.count()} records')
|
||||
self.stdout.write(f' WeeklySplitPattern: {WeeklySplitPattern.objects.count()} records')
|
||||
self.stdout.write(f' WorkoutStructureRule: {WorkoutStructureRule.objects.count()} records')
|
||||
self.stdout.write(f' MovementPatternOrder: {MovementPatternOrder.objects.count()} records')
|
||||
|
||||
# List WorkoutTypes
|
||||
wts = WorkoutType.objects.all().order_by('name')
|
||||
if wts.exists():
|
||||
self.stdout.write(self.style.MIGRATE_HEADING('\n WorkoutTypes:'))
|
||||
for wt in wts:
|
||||
self.stdout.write(
|
||||
f' - {wt.name}: reps {wt.rep_range_min}-{wt.rep_range_max}, '
|
||||
f'rounds {wt.round_range_min}-{wt.round_range_max}, '
|
||||
f'intensity={wt.typical_intensity}'
|
||||
)
|
||||
|
||||
# List MuscleGroupSplits
|
||||
splits = MuscleGroupSplit.objects.all().order_by('-frequency')
|
||||
if splits.exists():
|
||||
self.stdout.write(self.style.MIGRATE_HEADING('\n Top MuscleGroupSplits:'))
|
||||
for s in splits[:10]:
|
||||
muscles_str = ', '.join(s.muscle_names[:5])
|
||||
if len(s.muscle_names) > 5:
|
||||
muscles_str += f' (+{len(s.muscle_names) - 5} more)'
|
||||
self.stdout.write(
|
||||
f' - [{s.split_type}] {s.label} | '
|
||||
f'freq={s.frequency}, ex_count={s.typical_exercise_count} | '
|
||||
f'{muscles_str}'
|
||||
)
|
||||
|
||||
# List WeeklySplitPatterns
|
||||
patterns = WeeklySplitPattern.objects.all().order_by('-frequency')
|
||||
if patterns.exists():
|
||||
self.stdout.write(self.style.MIGRATE_HEADING('\n Top WeeklySplitPatterns:'))
|
||||
for p in patterns[:10]:
|
||||
self.stdout.write(
|
||||
f' - {p.days_per_week}-day: {p.pattern_labels} '
|
||||
f'(freq={p.frequency}, rest_days={p.rest_day_positions})'
|
||||
)
|
||||
|
||||
# List WorkoutStructureRule goal distribution
|
||||
rules = WorkoutStructureRule.objects.all()
|
||||
if rules.exists():
|
||||
from collections import Counter
|
||||
goal_counts = Counter(rules.values_list('goal_type', flat=True))
|
||||
self.stdout.write(self.style.MIGRATE_HEADING('\n WorkoutStructureRule by goal:'))
|
||||
for goal, count in sorted(goal_counts.items()):
|
||||
self.stdout.write(f' - {goal}: {count} rules')
|
||||
202
generator/management/commands/audit_exercise_data.py
Normal file
202
generator/management/commands/audit_exercise_data.py
Normal file
@@ -0,0 +1,202 @@
|
||||
"""
|
||||
Comprehensive audit of exercise data quality.
|
||||
|
||||
Checks for:
|
||||
1. Null estimated_rep_duration on rep-based exercises
|
||||
2. is_weight false positives (bodyweight exercises marked as weighted)
|
||||
3. Exercises with no muscle assignments
|
||||
4. "horizonal" typo in movement_patterns
|
||||
5. Null metadata fields summary (difficulty_level, exercise_tier, etc.)
|
||||
|
||||
Exits with code 1 if any CRITICAL issues are found.
|
||||
|
||||
Usage:
|
||||
python manage.py audit_exercise_data
|
||||
"""
|
||||
|
||||
import re
|
||||
import sys
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
from exercise.models import Exercise
|
||||
from muscle.models import ExerciseMuscle
|
||||
|
||||
|
||||
# Same bodyweight patterns as fix_exercise_flags for consistency
|
||||
BODYWEIGHT_PATTERNS = [
|
||||
r'\bwall sit\b',
|
||||
r'\bplank\b',
|
||||
r'\bmountain climber\b',
|
||||
r'\bburpee\b',
|
||||
r'\bpush ?up\b',
|
||||
r'\bpushup\b',
|
||||
r'\bpull ?up\b',
|
||||
r'\bpullup\b',
|
||||
r'\bchin ?up\b',
|
||||
r'\bchinup\b',
|
||||
r'\bdips?\b',
|
||||
r'\bpike\b',
|
||||
r'\bhandstand\b',
|
||||
r'\bl sit\b',
|
||||
r'\bv sit\b',
|
||||
r'\bhollow\b',
|
||||
r'\bsuperman\b',
|
||||
r'\bbird dog\b',
|
||||
r'\bdead bug\b',
|
||||
r'\bbear crawl\b',
|
||||
r'\bcrab walk\b',
|
||||
r'\binchworm\b',
|
||||
r'\bjumping jack\b',
|
||||
r'\bhigh knee\b',
|
||||
r'\bbutt kick\b',
|
||||
r'\bskater\b',
|
||||
r'\blunge jump\b',
|
||||
r'\bjump lunge\b',
|
||||
r'\bsquat jump\b',
|
||||
r'\bjump squat\b',
|
||||
r'\bbox jump\b',
|
||||
r'\btuck jump\b',
|
||||
r'\bbroad jump\b',
|
||||
r'\bsprinter\b',
|
||||
r'\bagility ladder\b',
|
||||
r'\bbody ?weight\b',
|
||||
r'\bbodyweight\b',
|
||||
r'\bcalisthenics?\b',
|
||||
r'\bflutter kick\b',
|
||||
r'\bleg raise\b',
|
||||
r'\bsit ?up\b',
|
||||
r'\bcrunch\b',
|
||||
r'\bstretch\b',
|
||||
r'\byoga\b',
|
||||
r'\bfoam roll\b',
|
||||
r'\bjump rope\b',
|
||||
r'\bspider crawl\b',
|
||||
]
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = 'Audit exercise data quality -- exits 1 if critical issues found'
|
||||
|
||||
def handle(self, *args, **options):
|
||||
issues = []
|
||||
|
||||
# 1. Null estimated_rep_duration (excluding duration-only exercises)
|
||||
null_duration = Exercise.objects.filter(
|
||||
estimated_rep_duration__isnull=True,
|
||||
is_reps=True,
|
||||
).exclude(
|
||||
is_duration=True, is_reps=False
|
||||
).count()
|
||||
if null_duration > 0:
|
||||
issues.append(
|
||||
f"CRITICAL: {null_duration} rep-based exercises have null estimated_rep_duration"
|
||||
)
|
||||
else:
|
||||
self.stdout.write(self.style.SUCCESS(
|
||||
'OK: All rep-based exercises have estimated_rep_duration'
|
||||
))
|
||||
|
||||
# 2. is_weight false positives -- bodyweight exercises marked as weighted
|
||||
weight_false_positives = 0
|
||||
weighted_exercises = Exercise.objects.filter(is_weight=True)
|
||||
for ex in weighted_exercises:
|
||||
if not ex.name:
|
||||
continue
|
||||
name_lower = ex.name.lower()
|
||||
if any(re.search(pat, name_lower) for pat in BODYWEIGHT_PATTERNS):
|
||||
weight_false_positives += 1
|
||||
|
||||
if weight_false_positives > 0:
|
||||
issues.append(
|
||||
f"WARNING: {weight_false_positives} bodyweight exercises still have is_weight=True"
|
||||
)
|
||||
else:
|
||||
self.stdout.write(self.style.SUCCESS(
|
||||
'OK: No bodyweight exercises incorrectly marked as weighted'
|
||||
))
|
||||
|
||||
# 3. Exercises with no muscles
|
||||
exercises_with_muscles = set(
|
||||
ExerciseMuscle.objects.values_list('exercise_id', flat=True).distinct()
|
||||
)
|
||||
exercises_no_muscles = Exercise.objects.exclude(
|
||||
pk__in=exercises_with_muscles
|
||||
).count()
|
||||
if exercises_no_muscles > 0:
|
||||
issues.append(
|
||||
f"CRITICAL: {exercises_no_muscles} exercises have no muscle assignments"
|
||||
)
|
||||
else:
|
||||
self.stdout.write(self.style.SUCCESS(
|
||||
'OK: All exercises have muscle assignments'
|
||||
))
|
||||
|
||||
# 4. "horizonal" typo
|
||||
typo_count = Exercise.objects.filter(
|
||||
movement_patterns__icontains='horizonal'
|
||||
).count()
|
||||
if typo_count > 0:
|
||||
issues.append(
|
||||
f'WARNING: {typo_count} exercises have "horizonal" typo in movement_patterns'
|
||||
)
|
||||
else:
|
||||
self.stdout.write(self.style.SUCCESS(
|
||||
'OK: No "horizonal" typos in movement_patterns'
|
||||
))
|
||||
|
||||
# 5. Null metadata fields summary
|
||||
total = Exercise.objects.count()
|
||||
if total > 0:
|
||||
# Base field always present
|
||||
metadata_fields = {
|
||||
'movement_patterns': Exercise.objects.filter(
|
||||
movement_patterns__isnull=True
|
||||
).count() + Exercise.objects.filter(movement_patterns='').count(),
|
||||
}
|
||||
|
||||
# Optional fields that may not exist in all environments
|
||||
optional_fields = ['difficulty_level', 'exercise_tier']
|
||||
for field_name in optional_fields:
|
||||
if hasattr(Exercise, field_name):
|
||||
try:
|
||||
null_count = Exercise.objects.filter(
|
||||
**{f'{field_name}__isnull': True}
|
||||
).count() + Exercise.objects.filter(
|
||||
**{field_name: ''}
|
||||
).count()
|
||||
metadata_fields[field_name] = null_count
|
||||
except Exception:
|
||||
pass # Field doesn't exist in DB schema yet
|
||||
|
||||
self.stdout.write(f'\nMetadata coverage ({total} total exercises):')
|
||||
for field, null_count in metadata_fields.items():
|
||||
filled = total - null_count
|
||||
pct = (filled / total) * 100
|
||||
self.stdout.write(f' {field}: {filled}/{total} ({pct:.1f}%)')
|
||||
if null_count > total * 0.5: # More than 50% missing
|
||||
issues.append(
|
||||
f"WARNING: {field} is missing on {null_count}/{total} exercises ({100-pct:.1f}%)"
|
||||
)
|
||||
|
||||
# Report
|
||||
self.stdout.write('') # blank line
|
||||
if not issues:
|
||||
self.stdout.write(self.style.SUCCESS('All exercise data checks passed!'))
|
||||
else:
|
||||
for issue in issues:
|
||||
if issue.startswith('CRITICAL'):
|
||||
self.stdout.write(self.style.ERROR(issue))
|
||||
else:
|
||||
self.stdout.write(self.style.WARNING(issue))
|
||||
|
||||
critical = [i for i in issues if i.startswith('CRITICAL')]
|
||||
if critical:
|
||||
self.stdout.write(self.style.ERROR(
|
||||
f'\n{len(critical)} critical issue(s) found. Run fix commands to resolve.'
|
||||
))
|
||||
sys.exit(1)
|
||||
else:
|
||||
self.stdout.write(self.style.WARNING(
|
||||
f'\n{len(issues)} non-critical warning(s) found.'
|
||||
))
|
||||
1375
generator/management/commands/calibrate_structure_rules.py
Normal file
1375
generator/management/commands/calibrate_structure_rules.py
Normal file
File diff suppressed because it is too large
Load Diff
125
generator/management/commands/check_rules_drift.py
Normal file
125
generator/management/commands/check_rules_drift.py
Normal file
@@ -0,0 +1,125 @@
|
||||
"""
|
||||
CI management command: check for drift between workout_research.md
|
||||
calibration values and WorkoutType DB records.
|
||||
|
||||
Usage:
|
||||
python manage.py check_rules_drift
|
||||
python manage.py check_rules_drift --verbosity 2
|
||||
"""
|
||||
|
||||
import sys
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
from generator.models import WorkoutType
|
||||
from generator.rules_engine import DB_CALIBRATION
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = (
|
||||
'Check for drift between research doc calibration values '
|
||||
'and WorkoutType DB records. Exits 1 if mismatches, missing '
|
||||
'types, or zero fields checked.'
|
||||
)
|
||||
|
||||
# Fields to compare between DB_CALIBRATION and WorkoutType model
|
||||
FIELDS_TO_CHECK = [
|
||||
'duration_bias',
|
||||
'typical_rest_between_sets',
|
||||
'typical_intensity',
|
||||
'rep_range_min',
|
||||
'rep_range_max',
|
||||
'round_range_min',
|
||||
'round_range_max',
|
||||
'superset_size_min',
|
||||
'superset_size_max',
|
||||
]
|
||||
|
||||
def handle(self, *args, **options):
|
||||
verbosity = options.get('verbosity', 1)
|
||||
mismatches = []
|
||||
missing_in_db = []
|
||||
checked = 0
|
||||
|
||||
for type_name, expected_values in DB_CALIBRATION.items():
|
||||
try:
|
||||
wt = WorkoutType.objects.get(name=type_name)
|
||||
except WorkoutType.DoesNotExist:
|
||||
missing_in_db.append(type_name)
|
||||
continue
|
||||
|
||||
for field_name in self.FIELDS_TO_CHECK:
|
||||
if field_name not in expected_values:
|
||||
continue
|
||||
|
||||
expected = expected_values[field_name]
|
||||
actual = getattr(wt, field_name, None)
|
||||
checked += 1
|
||||
|
||||
if actual != expected:
|
||||
mismatches.append({
|
||||
'type': type_name,
|
||||
'field': field_name,
|
||||
'expected': expected,
|
||||
'actual': actual,
|
||||
})
|
||||
elif verbosity >= 2:
|
||||
self.stdout.write(
|
||||
f" OK {type_name}.{field_name} = {actual}"
|
||||
)
|
||||
|
||||
# Report results
|
||||
self.stdout.write('')
|
||||
self.stdout.write(f'Checked {checked} field(s) across {len(DB_CALIBRATION)} workout types.')
|
||||
self.stdout.write('')
|
||||
|
||||
if missing_in_db:
|
||||
self.stdout.write(self.style.ERROR(
|
||||
f'Missing from DB ({len(missing_in_db)}):'
|
||||
))
|
||||
for name in missing_in_db:
|
||||
self.stdout.write(f' - {name}')
|
||||
self.stdout.write('')
|
||||
|
||||
has_errors = False
|
||||
|
||||
if checked == 0:
|
||||
has_errors = True
|
||||
self.stdout.write(self.style.ERROR(
|
||||
'No calibration fields were checked. '
|
||||
'DB_CALIBRATION keys likely do not match WorkoutType.name values.'
|
||||
))
|
||||
self.stdout.write('')
|
||||
|
||||
if missing_in_db:
|
||||
has_errors = True
|
||||
self.stdout.write(self.style.ERROR(
|
||||
'Missing workout types prevent full drift validation.'
|
||||
))
|
||||
self.stdout.write('')
|
||||
|
||||
if mismatches:
|
||||
has_errors = True
|
||||
self.stdout.write(self.style.ERROR(
|
||||
f'DRIFT DETECTED: {len(mismatches)} mismatch(es)'
|
||||
))
|
||||
self.stdout.write('')
|
||||
header = f'{"Workout Type":<35} {"Field":<30} {"Expected":<15} {"Actual":<15}'
|
||||
self.stdout.write(header)
|
||||
self.stdout.write('-' * len(header))
|
||||
for m in mismatches:
|
||||
self.stdout.write(
|
||||
f'{m["type"]:<35} {m["field"]:<30} '
|
||||
f'{str(m["expected"]):<15} {str(m["actual"]):<15}'
|
||||
)
|
||||
self.stdout.write('')
|
||||
self.stdout.write(self.style.ERROR(
|
||||
'To fix: update WorkoutType records in the DB or '
|
||||
'update DB_CALIBRATION in generator/rules_engine.py.'
|
||||
))
|
||||
if has_errors:
|
||||
sys.exit(1)
|
||||
|
||||
self.stdout.write(self.style.SUCCESS(
|
||||
'No drift detected. DB values match research calibration.'
|
||||
))
|
||||
798
generator/management/commands/classify_exercises.py
Normal file
798
generator/management/commands/classify_exercises.py
Normal file
@@ -0,0 +1,798 @@
|
||||
"""
|
||||
Classifies all Exercise records by difficulty_level and complexity_rating
|
||||
using name-based keyword matching and movement_patterns fallback rules.
|
||||
|
||||
difficulty_level: 'beginner', 'intermediate', 'advanced'
|
||||
complexity_rating: 1-5 integer scale
|
||||
|
||||
Classification strategy (applied in order, first match wins):
|
||||
|
||||
1. **Name-based keyword rules** -- regex patterns matched against exercise.name
|
||||
- ADVANCED_NAME_PATTERNS -> 'advanced'
|
||||
- BEGINNER_NAME_PATTERNS -> 'beginner'
|
||||
- Unmatched -> 'intermediate' (default)
|
||||
|
||||
2. **Name-based complexity rules** -- regex patterns matched against exercise.name
|
||||
- COMPLEXITY_5_PATTERNS -> 5 (Olympic lifts, advanced gymnastics)
|
||||
- COMPLEXITY_4_PATTERNS -> 4 (complex multi-joint, unilateral loaded)
|
||||
- COMPLEXITY_1_PATTERNS -> 1 (single-joint isolation, simple stretches)
|
||||
- COMPLEXITY_2_PATTERNS -> 2 (basic compound or standard bodyweight)
|
||||
- Unmatched -> movement_patterns fallback -> default 3
|
||||
|
||||
3. **Movement-pattern fallback** for exercises not caught by name rules,
|
||||
using the exercise's movement_patterns CharField.
|
||||
|
||||
Usage:
|
||||
python manage.py classify_exercises
|
||||
python manage.py classify_exercises --dry-run
|
||||
python manage.py classify_exercises --dry-run --verbose
|
||||
"""
|
||||
|
||||
import re
|
||||
from django.core.management.base import BaseCommand
|
||||
from exercise.models import Exercise
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# DIFFICULTY LEVEL RULES (name-based)
|
||||
# ============================================================================
|
||||
# Each entry: (compiled_regex, difficulty_level)
|
||||
# Matched against exercise.name.lower(). First match wins.
|
||||
# Patterns use word boundaries (\b) where appropriate to avoid false positives.
|
||||
|
||||
ADVANCED_NAME_PATTERNS = [
|
||||
# --- Olympic lifts & derivatives ---
|
||||
r'\bsnatch\b',
|
||||
r'\bclean and jerk\b',
|
||||
r'\bclean & jerk\b',
|
||||
r'\bpower clean\b',
|
||||
r'\bhang clean\b',
|
||||
r'\bsquat clean\b',
|
||||
r'\bclean pull\b',
|
||||
r'\bcluster\b.*\bclean\b',
|
||||
r'\bclean\b.*\bto\b.*\bpress\b', # clean to press / clean to push press
|
||||
r'\bclean\b.*\bto\b.*\bjerk\b',
|
||||
r'\bpush jerk\b',
|
||||
r'\bsplit jerk\b',
|
||||
r'\bjerk\b(?!.*chicken)', # jerk but not "chicken jerk" type food
|
||||
r'\bthruster\b',
|
||||
r'\bwall ball\b', # high coordination + explosive
|
||||
|
||||
# --- Advanced gymnastics / calisthenics ---
|
||||
r'\bpistol\b.*\bsquat\b',
|
||||
r'\bpistol squat\b',
|
||||
r'\bmuscle.?up\b',
|
||||
r'\bhandstand\b',
|
||||
r'\bhand\s*stand\b',
|
||||
r'\bdragon flag\b',
|
||||
r'\bplanche\b',
|
||||
r'\bl.?sit\b',
|
||||
r'\bhuman flag\b',
|
||||
r'\bfront lever\b',
|
||||
r'\bback lever\b',
|
||||
r'\biron cross\b',
|
||||
r'\bmaltese\b',
|
||||
r'\bstrict press.*handstand\b',
|
||||
r'\bskin the cat\b',
|
||||
r'\bwindshield wiper\b(?!.*stretch)', # weighted windshield wipers, not stretch
|
||||
|
||||
# --- Advanced barbell lifts ---
|
||||
r'\bturkish get.?up\b',
|
||||
r'\bturkish getup\b',
|
||||
r'\btgu\b',
|
||||
r'\bzercher\b', # zercher squat/carry
|
||||
r'\bdeficit deadlift\b',
|
||||
r'\bsnatch.?grip deadlift\b',
|
||||
r'\bsumo deadlift\b', # wider stance = more mobility demand
|
||||
r'\bhack squat\b.*\bbarbell\b', # barbell hack squat (not machine)
|
||||
r'\boverhead squat\b',
|
||||
r'\bsingle.?leg deadlift\b.*\bbarbell\b',
|
||||
r'\bbarbell\b.*\bsingle.?leg deadlift\b',
|
||||
r'\bscorpion\b', # scorpion press
|
||||
|
||||
# --- Plyometric / explosive ---
|
||||
r'\bbox jump\b',
|
||||
r'\bdepth jump\b',
|
||||
r'\btuck jump\b',
|
||||
r'\bbroad jump\b',
|
||||
r'\bclap push.?up\b',
|
||||
r'\bclapping push.?up\b',
|
||||
r'\bplyometric push.?up\b',
|
||||
r'\bplyo push.?up\b',
|
||||
r'\bexplosive\b',
|
||||
r'\bkipping\b',
|
||||
|
||||
# --- Advanced core ---
|
||||
r'\bab.?wheel\b',
|
||||
r'\bab roller\b',
|
||||
r'\btoes.?to.?bar\b',
|
||||
r'\bknees.?to.?elbow\b',
|
||||
r'\bhanging.?leg.?raise\b',
|
||||
r'\bhanging.?knee.?raise\b',
|
||||
]
|
||||
|
||||
BEGINNER_NAME_PATTERNS = [
|
||||
# --- Simple machine isolation ---
|
||||
r'\bleg press\b',
|
||||
r'\bleg extension\b',
|
||||
r'\bleg curl\b',
|
||||
r'\bhamstring curl\b.*\bmachine\b',
|
||||
r'\bmachine\b.*\bhamstring curl\b',
|
||||
r'\bcalf raise\b.*\bmachine\b',
|
||||
r'\bmachine\b.*\bcalf raise\b',
|
||||
r'\bseated calf raise\b',
|
||||
r'\bchest fly\b.*\bmachine\b',
|
||||
r'\bmachine\b.*\bchest fly\b',
|
||||
r'\bpec.?deck\b',
|
||||
r'\bpec fly\b.*\bmachine\b',
|
||||
r'\bcable\b.*\bcurl\b',
|
||||
r'\bcable\b.*\btricep\b',
|
||||
r'\bcable\b.*\bpushdown\b',
|
||||
r'\btricep.?pushdown\b',
|
||||
r'\blat pulldown\b',
|
||||
r'\bseated row\b.*\bmachine\b',
|
||||
r'\bmachine\b.*\brow\b',
|
||||
r'\bsmith machine\b',
|
||||
|
||||
# --- Basic bodyweight ---
|
||||
r'\bwall sit\b',
|
||||
r'\bwall push.?up\b',
|
||||
r'\bincline push.?up\b',
|
||||
r'\bdead hang\b',
|
||||
r'\bplank\b(?!.*\bjack\b)(?!.*\bup\b.*\bdown\b)', # plank but not plank jacks or up-down planks
|
||||
r'\bside plank\b',
|
||||
r'\bglute bridge\b',
|
||||
r'\bhip bridge\b',
|
||||
r'\bbird.?dog\b',
|
||||
r'\bsuperman\b(?!.*\bpush.?up\b)',
|
||||
r'\bcrunches?\b',
|
||||
r'\bsit.?up\b',
|
||||
r'\bbicycle\b.*\bcrunch\b',
|
||||
r'\bflutter kick\b',
|
||||
r'\bleg raise\b(?!.*\bhanging\b)', # lying leg raise (not hanging)
|
||||
r'\blying\b.*\bleg raise\b',
|
||||
r'\bcalf raise\b(?!.*\bbarbell\b)(?!.*\bsingle\b)', # basic standing calf raise
|
||||
r'\bstanding calf raise\b',
|
||||
|
||||
# --- Stretches and foam rolling ---
|
||||
r'\bstretch\b',
|
||||
r'\bstretching\b',
|
||||
r'\bfoam roll\b',
|
||||
r'\bfoam roller\b',
|
||||
r'\blacrosse ball\b',
|
||||
r'\bmyofascial\b',
|
||||
r'\bself.?massage\b',
|
||||
|
||||
# --- Breathing ---
|
||||
r'\bbreathing\b',
|
||||
r'\bbreathe\b',
|
||||
r'\bdiaphragmatic\b',
|
||||
r'\bbox breathing\b',
|
||||
r'\bbreath\b',
|
||||
|
||||
# --- Basic mobility ---
|
||||
r'\bneck\b.*\bcircle\b',
|
||||
r'\barm\b.*\bcircle\b',
|
||||
r'\bshoulder\b.*\bcircle\b',
|
||||
r'\bankle\b.*\bcircle\b',
|
||||
r'\bhip\b.*\bcircle\b',
|
||||
r'\bwrist\b.*\bcircle\b',
|
||||
r'\bcat.?cow\b',
|
||||
r'\bchild.?s?\s*pose\b',
|
||||
|
||||
# --- Simple cardio ---
|
||||
r'\bwalking\b(?!.*\blunge\b)', # walking but not walking lunges
|
||||
r'\bwalk\b(?!.*\bout\b)(?!.*\blunge\b)', # walk but not walkouts or walk lunges
|
||||
r'\bjogging\b',
|
||||
r'\bjog\b',
|
||||
r'\bstepping\b',
|
||||
r'\bstep.?up\b(?!.*\bweighted\b)(?!.*\bbarbell\b)(?!.*\bdumbbell\b)',
|
||||
r'\bjumping jack\b',
|
||||
r'\bhigh knee\b',
|
||||
r'\bbutt kick\b',
|
||||
r'\bbutt kicker\b',
|
||||
r'\bmountain climber\b',
|
||||
|
||||
# --- Simple yoga poses ---
|
||||
r'\bdownward.?dog\b',
|
||||
r'\bupward.?dog\b',
|
||||
r'\bwarrior\b.*\bpose\b',
|
||||
r'\btree\b.*\bpose\b',
|
||||
r'\bcorpse\b.*\bpose\b',
|
||||
r'\bsavasana\b',
|
||||
r'\bchild.?s?\s*pose\b',
|
||||
]
|
||||
|
||||
# Compile for performance
|
||||
_ADVANCED_NAME_RE = [(re.compile(p, re.IGNORECASE), 'advanced') for p in ADVANCED_NAME_PATTERNS]
|
||||
_BEGINNER_NAME_RE = [(re.compile(p, re.IGNORECASE), 'beginner') for p in BEGINNER_NAME_PATTERNS]
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# COMPLEXITY RATING RULES (name-based, 1-5 scale)
|
||||
# ============================================================================
|
||||
# 1 = Single-joint, simple movement (curls, calf raises, stretches)
|
||||
# 2 = Basic compound or standard bodyweight
|
||||
# 3 = Standard compound with moderate coordination (bench press, squat, row)
|
||||
# 4 = Complex multi-joint, unilateral loaded, high coordination demand
|
||||
# 5 = Highly technical (Olympic lifts, advanced gymnastics)
|
||||
|
||||
COMPLEXITY_5_PATTERNS = [
|
||||
# --- Olympic lifts ---
|
||||
r'\bsnatch\b',
|
||||
r'\bclean and jerk\b',
|
||||
r'\bclean & jerk\b',
|
||||
r'\bpower clean\b',
|
||||
r'\bhang clean\b',
|
||||
r'\bsquat clean\b',
|
||||
r'\bclean pull\b',
|
||||
r'\bclean\b.*\bto\b.*\bpress\b',
|
||||
r'\bclean\b.*\bto\b.*\bjerk\b',
|
||||
r'\bpush jerk\b',
|
||||
r'\bsplit jerk\b',
|
||||
r'\bjerk\b(?!.*chicken)',
|
||||
|
||||
# --- Advanced gymnastics ---
|
||||
r'\bmuscle.?up\b',
|
||||
r'\bhandstand\b.*\bpush.?up\b',
|
||||
r'\bplanche\b',
|
||||
r'\bhuman flag\b',
|
||||
r'\bfront lever\b',
|
||||
r'\bback lever\b',
|
||||
r'\biron cross\b',
|
||||
r'\bmaltese\b',
|
||||
r'\bskin the cat\b',
|
||||
|
||||
# --- Complex loaded movements ---
|
||||
r'\bturkish get.?up\b',
|
||||
r'\bturkish getup\b',
|
||||
r'\btgu\b',
|
||||
r'\boverhead squat\b',
|
||||
]
|
||||
|
||||
COMPLEXITY_4_PATTERNS = [
|
||||
# --- Complex compound ---
|
||||
r'\bthruster\b',
|
||||
r'\bwall ball\b',
|
||||
r'\bzercher\b',
|
||||
r'\bdeficit deadlift\b',
|
||||
r'\bsumo deadlift\b',
|
||||
r'\bsnatch.?grip deadlift\b',
|
||||
r'\bpistol\b.*\bsquat\b',
|
||||
r'\bpistol squat\b',
|
||||
r'\bdragon flag\b',
|
||||
r'\bl.?sit\b',
|
||||
r'\bhandstand\b(?!.*\bpush.?up\b)', # handstand hold (not HSPU, that's 5)
|
||||
r'\bwindshield wiper\b',
|
||||
r'\btoes.?to.?bar\b',
|
||||
r'\bknees.?to.?elbow\b',
|
||||
r'\bkipping\b',
|
||||
|
||||
# --- Single-leg loaded (barbell/dumbbell) ---
|
||||
r'\bsingle.?leg deadlift\b',
|
||||
r'\bsingle.?leg rdl\b',
|
||||
r'\bsingle.?leg squat\b(?!.*\bpistol\b)',
|
||||
r'\bbulgarian split squat\b',
|
||||
r'\brear.?foot.?elevated\b.*\bsplit\b',
|
||||
|
||||
# --- Explosive / plyometric ---
|
||||
r'\bbox jump\b',
|
||||
r'\bdepth jump\b',
|
||||
r'\btuck jump\b',
|
||||
r'\bbroad jump\b',
|
||||
r'\bclap push.?up\b',
|
||||
r'\bclapping push.?up\b',
|
||||
r'\bplyometric push.?up\b',
|
||||
r'\bplyo push.?up\b',
|
||||
r'\bexplosive\b',
|
||||
|
||||
# --- Advanced core ---
|
||||
r'\bab.?wheel\b',
|
||||
r'\bab roller\b',
|
||||
r'\bhanging.?leg.?raise\b',
|
||||
r'\bhanging.?knee.?raise\b',
|
||||
|
||||
# --- Complex upper body ---
|
||||
r'\barcher\b.*\bpush.?up\b',
|
||||
r'\bdiamond push.?up\b',
|
||||
r'\bpike push.?up\b',
|
||||
r'\bmilitary press\b',
|
||||
r'\bstrict press\b',
|
||||
|
||||
# --- Carries (unilateral loaded / coordination) ---
|
||||
r'\bfarmer.?s?\s*carry\b',
|
||||
r'\bfarmer.?s?\s*walk\b',
|
||||
r'\bsuitcase carry\b',
|
||||
r'\boverhead carry\b',
|
||||
r'\brack carry\b',
|
||||
r'\bwaiter.?s?\s*carry\b',
|
||||
r'\bwaiter.?s?\s*walk\b',
|
||||
r'\bcross.?body carry\b',
|
||||
]
|
||||
|
||||
COMPLEXITY_1_PATTERNS = [
|
||||
# --- Single-joint isolation ---
|
||||
r'\bbicep curl\b',
|
||||
r'\bcurl\b(?!.*\bleg\b)(?!.*\bhamstring\b)(?!.*\bnordic\b)',
|
||||
r'\btricep extension\b',
|
||||
r'\btricep kickback\b',
|
||||
r'\btricep.?pushdown\b',
|
||||
r'\bskull.?crusher\b',
|
||||
r'\bcable\b.*\bfly\b',
|
||||
r'\bcable\b.*\bpushdown\b',
|
||||
r'\bcable\b.*\bcurl\b',
|
||||
r'\bleg extension\b',
|
||||
r'\bleg curl\b',
|
||||
r'\bhamstring curl\b',
|
||||
r'\bcalf raise\b',
|
||||
r'\blateral raise\b',
|
||||
r'\bfront raise\b',
|
||||
r'\brear delt fly\b',
|
||||
r'\breverse fly\b',
|
||||
r'\bpec.?deck\b',
|
||||
r'\bchest fly\b.*\bmachine\b',
|
||||
r'\bmachine\b.*\bchest fly\b',
|
||||
r'\bshrug\b',
|
||||
r'\bwrist curl\b',
|
||||
r'\bforearm curl\b',
|
||||
r'\bconcentration curl\b',
|
||||
r'\bhammer curl\b',
|
||||
r'\bpreacher curl\b',
|
||||
r'\bincline curl\b',
|
||||
|
||||
# --- Stretches / foam rolling ---
|
||||
r'\bstretch\b',
|
||||
r'\bstretching\b',
|
||||
r'\bfoam roll\b',
|
||||
r'\bfoam roller\b',
|
||||
r'\blacrosse ball\b',
|
||||
r'\bmyofascial\b',
|
||||
r'\bself.?massage\b',
|
||||
|
||||
# --- Breathing ---
|
||||
r'\bbreathing\b',
|
||||
r'\bbreathe\b',
|
||||
r'\bdiaphragmatic\b',
|
||||
r'\bbox breathing\b',
|
||||
r'\bbreath\b',
|
||||
|
||||
# --- Simple isolation machines ---
|
||||
r'\bpec fly\b',
|
||||
r'\bseated calf raise\b',
|
||||
|
||||
# --- Simple mobility ---
|
||||
r'\bneck\b.*\bcircle\b',
|
||||
r'\barm\b.*\bcircle\b',
|
||||
r'\bshoulder\b.*\bcircle\b',
|
||||
r'\bankle\b.*\bcircle\b',
|
||||
r'\bhip\b.*\bcircle\b',
|
||||
r'\bwrist\b.*\bcircle\b',
|
||||
r'\bcat.?cow\b',
|
||||
r'\bchild.?s?\s*pose\b',
|
||||
r'\bcorpse\b.*\bpose\b',
|
||||
r'\bsavasana\b',
|
||||
]
|
||||
|
||||
COMPLEXITY_2_PATTERNS = [
|
||||
# --- Basic bodyweight compound ---
|
||||
r'\bpush.?up\b(?!.*\bclap\b)(?!.*\bplyometric\b)(?!.*\bplyo\b)(?!.*\bpike\b)(?!.*\bdiamond\b)(?!.*\barcher\b)(?!.*\bexplosive\b)',
|
||||
r'\bsit.?up\b',
|
||||
r'\bcrunches?\b',
|
||||
r'\bbicycle\b.*\bcrunch\b',
|
||||
r'\bflutter kick\b',
|
||||
r'\bplank\b',
|
||||
r'\bside plank\b',
|
||||
r'\bglute bridge\b',
|
||||
r'\bhip bridge\b',
|
||||
r'\bbird.?dog\b',
|
||||
r'\bsuperman\b',
|
||||
r'\bwall sit\b',
|
||||
r'\bdead hang\b',
|
||||
r'\bbodyweight squat\b',
|
||||
r'\bair squat\b',
|
||||
r'\blying\b.*\bleg raise\b',
|
||||
r'\bleg raise\b(?!.*\bhanging\b)',
|
||||
r'\bjumping jack\b',
|
||||
r'\bhigh knee\b',
|
||||
r'\bbutt kick\b',
|
||||
r'\bbutt kicker\b',
|
||||
r'\bmountain climber\b',
|
||||
r'\bstep.?up\b(?!.*\bweighted\b)(?!.*\bbarbell\b)',
|
||||
|
||||
# --- Basic machine compound ---
|
||||
r'\bleg press\b',
|
||||
r'\blat pulldown\b',
|
||||
r'\bseated row\b.*\bmachine\b',
|
||||
r'\bmachine\b.*\brow\b',
|
||||
r'\bchest press\b.*\bmachine\b',
|
||||
r'\bmachine\b.*\bchest press\b',
|
||||
r'\bsmith machine\b',
|
||||
|
||||
# --- Cardio / locomotion ---
|
||||
r'\bwalking\b',
|
||||
r'\bwalk\b(?!.*\bout\b)',
|
||||
r'\bjogging\b',
|
||||
r'\bjog\b',
|
||||
r'\brunning\b',
|
||||
r'\bsprinting\b',
|
||||
r'\browing\b.*\bmachine\b',
|
||||
r'\bassault bike\b',
|
||||
r'\bstationary bike\b',
|
||||
r'\belliptical\b',
|
||||
r'\bjump rope\b',
|
||||
r'\bskipping\b',
|
||||
|
||||
# --- Simple yoga poses ---
|
||||
r'\bdownward.?dog\b',
|
||||
r'\bupward.?dog\b',
|
||||
r'\bwarrior\b.*\bpose\b',
|
||||
r'\btree\b.*\bpose\b',
|
||||
|
||||
# --- Basic combat ---
|
||||
r'\bjab\b',
|
||||
r'\bcross\b(?!.*\bbody\b.*\bcarry\b)',
|
||||
r'\bshadow\s*box\b',
|
||||
|
||||
# --- Basic resistance band ---
|
||||
r'\bband\b.*\bpull.?apart\b',
|
||||
r'\bband\b.*\bface pull\b',
|
||||
]
|
||||
|
||||
# Compile for performance
|
||||
_COMPLEXITY_5_RE = [(re.compile(p, re.IGNORECASE), 5) for p in COMPLEXITY_5_PATTERNS]
|
||||
_COMPLEXITY_4_RE = [(re.compile(p, re.IGNORECASE), 4) for p in COMPLEXITY_4_PATTERNS]
|
||||
_COMPLEXITY_1_RE = [(re.compile(p, re.IGNORECASE), 1) for p in COMPLEXITY_1_PATTERNS]
|
||||
_COMPLEXITY_2_RE = [(re.compile(p, re.IGNORECASE), 2) for p in COMPLEXITY_2_PATTERNS]
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# MOVEMENT PATTERN -> DIFFICULTY FALLBACK
|
||||
# ============================================================================
|
||||
# When name-based rules don't match, use movement_patterns field.
|
||||
# Keys are substring matches against movement_patterns (lowercased).
|
||||
# Order matters: first match wins.
|
||||
|
||||
MOVEMENT_PATTERN_DIFFICULTY = [
|
||||
# --- Advanced patterns ---
|
||||
('plyometric', 'advanced'),
|
||||
('olympic', 'advanced'),
|
||||
|
||||
# --- Beginner patterns ---
|
||||
('massage', 'beginner'),
|
||||
('breathing', 'beginner'),
|
||||
('mobility - static', 'beginner'),
|
||||
('yoga', 'beginner'),
|
||||
('stretch', 'beginner'),
|
||||
|
||||
# --- Intermediate (default for all loaded / compound patterns) ---
|
||||
('upper push - vertical', 'intermediate'),
|
||||
('upper push - horizontal', 'intermediate'),
|
||||
('upper pull - vertical', 'intermediate'),
|
||||
('upper pull - horizonal', 'intermediate'), # note: typo matches DB
|
||||
('upper pull - horizontal', 'intermediate'),
|
||||
('upper push', 'intermediate'),
|
||||
('upper pull', 'intermediate'),
|
||||
('lower push - squat', 'intermediate'),
|
||||
('lower push - lunge', 'intermediate'),
|
||||
('lower pull - hip hinge', 'intermediate'),
|
||||
('lower push', 'intermediate'),
|
||||
('lower pull', 'intermediate'),
|
||||
('core - anti-extension', 'intermediate'),
|
||||
('core - rotational', 'intermediate'),
|
||||
('core - anti-rotation', 'intermediate'),
|
||||
('core - carry', 'intermediate'),
|
||||
('core', 'intermediate'),
|
||||
('arms', 'intermediate'),
|
||||
('machine', 'intermediate'),
|
||||
('balance', 'intermediate'),
|
||||
('mobility - dynamic', 'intermediate'),
|
||||
('mobility', 'intermediate'),
|
||||
('combat', 'intermediate'),
|
||||
('cardio/locomotion', 'intermediate'),
|
||||
('cardio', 'intermediate'),
|
||||
]
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# MOVEMENT PATTERN -> COMPLEXITY FALLBACK
|
||||
# ============================================================================
|
||||
# When name-based rules don't match, use movement_patterns field.
|
||||
# Order matters: first match wins.
|
||||
|
||||
MOVEMENT_PATTERN_COMPLEXITY = [
|
||||
# --- Complexity 5 ---
|
||||
('olympic', 5),
|
||||
|
||||
# --- Complexity 4 ---
|
||||
('plyometric', 4),
|
||||
('core - carry', 4),
|
||||
|
||||
# --- Complexity 3 (standard compound) ---
|
||||
('upper push - vertical', 3),
|
||||
('upper push - horizontal', 3),
|
||||
('upper pull - vertical', 3),
|
||||
('upper pull - horizonal', 3), # typo matches DB
|
||||
('upper pull - horizontal', 3),
|
||||
('upper push', 3),
|
||||
('upper pull', 3),
|
||||
('lower push - squat', 3),
|
||||
('lower push - lunge', 3),
|
||||
('lower pull - hip hinge', 3),
|
||||
('lower push', 3),
|
||||
('lower pull', 3),
|
||||
('core - anti-extension', 3),
|
||||
('core - rotational', 3),
|
||||
('core - anti-rotation', 3),
|
||||
('balance', 3),
|
||||
('combat', 3),
|
||||
|
||||
# --- Complexity 2 ---
|
||||
('core', 2),
|
||||
('machine', 2),
|
||||
('arms', 2),
|
||||
('mobility - dynamic', 2),
|
||||
('cardio/locomotion', 2),
|
||||
('cardio', 2),
|
||||
('yoga', 2),
|
||||
|
||||
# --- Complexity 1 ---
|
||||
('mobility - static', 1),
|
||||
('massage', 1),
|
||||
('stretch', 1),
|
||||
('breathing', 1),
|
||||
('mobility', 1), # generic mobility fallback
|
||||
]
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# EQUIPMENT-BASED ADJUSTMENTS
|
||||
# ============================================================================
|
||||
# Some exercises can be bumped up or down based on equipment context.
|
||||
# These are applied AFTER name + movement_pattern rules as modifiers.
|
||||
|
||||
def _apply_equipment_adjustments(exercise, difficulty, complexity):
|
||||
"""
|
||||
Apply equipment-based adjustments to difficulty and complexity.
|
||||
|
||||
- Barbell compound lifts: ensure at least intermediate / 3
|
||||
- Kettlebell: bump complexity +1 for most movements (unstable load)
|
||||
- Stability ball: bump complexity +1 (balance demand)
|
||||
- Suspension trainer (TRX): bump complexity +1 (instability)
|
||||
- Machine: cap complexity at 2 (guided path, low coordination)
|
||||
- Resistance band: no change
|
||||
"""
|
||||
name_lower = (exercise.name or '').lower()
|
||||
equip = (exercise.equipment_required or '').lower()
|
||||
patterns = (exercise.movement_patterns or '').lower()
|
||||
|
||||
# --- Machine cap: complexity should not exceed 2 ---
|
||||
is_machine = (
|
||||
'machine' in equip
|
||||
or 'machine' in name_lower
|
||||
or 'smith' in name_lower
|
||||
or 'machine' in patterns
|
||||
)
|
||||
# But only if it's truly a guided-path machine, not cable
|
||||
is_cable = 'cable' in equip or 'cable' in name_lower
|
||||
if is_machine and not is_cable:
|
||||
complexity = min(complexity, 2)
|
||||
|
||||
# --- Kettlebell bump: +1 complexity (unstable center of mass) ---
|
||||
is_kettlebell = 'kettlebell' in equip or 'kettlebell' in name_lower
|
||||
if is_kettlebell and complexity < 5:
|
||||
# Only bump for compound movements, not simple swings etc.
|
||||
if any(kw in patterns for kw in ['upper push', 'upper pull', 'lower push', 'lower pull', 'core - carry']):
|
||||
complexity = min(complexity + 1, 5)
|
||||
|
||||
# --- Stability ball bump: +1 complexity ---
|
||||
is_stability_ball = 'stability ball' in equip or 'stability ball' in name_lower
|
||||
if is_stability_ball and complexity < 5:
|
||||
complexity = min(complexity + 1, 5)
|
||||
|
||||
# --- Suspension trainer (TRX) bump: +1 complexity ---
|
||||
is_suspension = (
|
||||
'suspension' in equip or 'trx' in name_lower
|
||||
or 'suspension' in name_lower
|
||||
)
|
||||
if is_suspension and complexity < 5:
|
||||
complexity = min(complexity + 1, 5)
|
||||
|
||||
# --- Barbell floor: ensure at least intermediate / 3 for big lifts ---
|
||||
is_barbell = 'barbell' in equip or 'barbell' in name_lower
|
||||
if is_barbell:
|
||||
for lift in ['squat', 'deadlift', 'bench', 'press', 'row', 'lunge']:
|
||||
if lift in name_lower:
|
||||
if difficulty == 'beginner':
|
||||
difficulty = 'intermediate'
|
||||
complexity = max(complexity, 3)
|
||||
break
|
||||
|
||||
return difficulty, complexity
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# CLASSIFICATION FUNCTIONS
|
||||
# ============================================================================
|
||||
|
||||
def classify_difficulty(exercise):
|
||||
"""Return difficulty_level for an exercise. First match wins."""
|
||||
name = (exercise.name or '').lower()
|
||||
|
||||
# 1. Check advanced name patterns
|
||||
for regex, level in _ADVANCED_NAME_RE:
|
||||
if regex.search(name):
|
||||
return level
|
||||
|
||||
# 2. Check beginner name patterns
|
||||
for regex, level in _BEGINNER_NAME_RE:
|
||||
if regex.search(name):
|
||||
return level
|
||||
|
||||
# 3. Movement pattern fallback
|
||||
patterns = (exercise.movement_patterns or '').lower()
|
||||
if patterns:
|
||||
for keyword, level in MOVEMENT_PATTERN_DIFFICULTY:
|
||||
if keyword in patterns:
|
||||
return level
|
||||
|
||||
# 4. Default: intermediate
|
||||
return 'intermediate'
|
||||
|
||||
|
||||
def classify_complexity(exercise):
|
||||
"""Return complexity_rating (1-5) for an exercise. First match wins."""
|
||||
name = (exercise.name or '').lower()
|
||||
|
||||
# 1. Complexity 5
|
||||
for regex, rating in _COMPLEXITY_5_RE:
|
||||
if regex.search(name):
|
||||
return rating
|
||||
|
||||
# 2. Complexity 4
|
||||
for regex, rating in _COMPLEXITY_4_RE:
|
||||
if regex.search(name):
|
||||
return rating
|
||||
|
||||
# 3. Complexity 1 (check before 2 since some patterns overlap)
|
||||
for regex, rating in _COMPLEXITY_1_RE:
|
||||
if regex.search(name):
|
||||
return rating
|
||||
|
||||
# 4. Complexity 2
|
||||
for regex, rating in _COMPLEXITY_2_RE:
|
||||
if regex.search(name):
|
||||
return rating
|
||||
|
||||
# 5. Movement pattern fallback
|
||||
patterns = (exercise.movement_patterns or '').lower()
|
||||
if patterns:
|
||||
for keyword, rating in MOVEMENT_PATTERN_COMPLEXITY:
|
||||
if keyword in patterns:
|
||||
return rating
|
||||
|
||||
# 6. Default: 3 (moderate)
|
||||
return 3
|
||||
|
||||
|
||||
def classify_exercise(exercise):
|
||||
"""
|
||||
Classify a single exercise and return (difficulty_level, complexity_rating).
|
||||
"""
|
||||
difficulty = classify_difficulty(exercise)
|
||||
complexity = classify_complexity(exercise)
|
||||
|
||||
# Apply equipment-based adjustments
|
||||
difficulty, complexity = _apply_equipment_adjustments(
|
||||
exercise, difficulty, complexity
|
||||
)
|
||||
|
||||
return difficulty, complexity
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# MANAGEMENT COMMAND
|
||||
# ============================================================================
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = (
|
||||
'Classify all exercises by difficulty_level and complexity_rating '
|
||||
'using name-based keyword rules and movement_patterns fallback.'
|
||||
)
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument(
|
||||
'--dry-run',
|
||||
action='store_true',
|
||||
help='Show what would change without saving.',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--verbose',
|
||||
action='store_true',
|
||||
help='Print each exercise classification.',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--only-unset',
|
||||
action='store_true',
|
||||
help='Only classify exercises that have NULL difficulty/complexity.',
|
||||
)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
import warnings
|
||||
warnings.warn(
|
||||
"classify_exercises is deprecated. Use 'populate_exercise_fields' instead, "
|
||||
"which populates all 8 exercise fields including difficulty and complexity.",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
self.stderr.write(self.style.WARNING(
|
||||
"DEPRECATED: Use 'python manage.py populate_exercise_fields' instead. "
|
||||
"This command only sets difficulty_level and complexity_rating, while "
|
||||
"populate_exercise_fields sets all 8 fields."
|
||||
))
|
||||
|
||||
dry_run = options['dry_run']
|
||||
verbose = options['verbose']
|
||||
only_unset = options['only_unset']
|
||||
|
||||
exercises = Exercise.objects.all().order_by('name')
|
||||
if only_unset:
|
||||
exercises = exercises.filter(
|
||||
difficulty_level__isnull=True
|
||||
) | exercises.filter(
|
||||
complexity_rating__isnull=True
|
||||
)
|
||||
exercises = exercises.distinct().order_by('name')
|
||||
|
||||
total = exercises.count()
|
||||
updated = 0
|
||||
unchanged = 0
|
||||
|
||||
# Counters for summary
|
||||
difficulty_counts = {'beginner': 0, 'intermediate': 0, 'advanced': 0}
|
||||
complexity_counts = {1: 0, 2: 0, 3: 0, 4: 0, 5: 0}
|
||||
|
||||
for ex in exercises:
|
||||
difficulty, complexity = classify_exercise(ex)
|
||||
|
||||
difficulty_counts[difficulty] += 1
|
||||
complexity_counts[complexity] += 1
|
||||
|
||||
changed = (
|
||||
ex.difficulty_level != difficulty
|
||||
or ex.complexity_rating != complexity
|
||||
)
|
||||
|
||||
if verbose:
|
||||
marker = '*' if changed else ' '
|
||||
self.stdout.write(
|
||||
f' {marker} {ex.name:<55} '
|
||||
f'difficulty={difficulty:<14} '
|
||||
f'complexity={complexity} '
|
||||
f'patterns="{ex.movement_patterns or ""}"'
|
||||
)
|
||||
|
||||
if changed:
|
||||
updated += 1
|
||||
if not dry_run:
|
||||
ex.difficulty_level = difficulty
|
||||
ex.complexity_rating = complexity
|
||||
ex.save(update_fields=['difficulty_level', 'complexity_rating'])
|
||||
else:
|
||||
unchanged += 1
|
||||
|
||||
# Summary
|
||||
prefix = '[DRY RUN] ' if dry_run else ''
|
||||
self.stdout.write('')
|
||||
self.stdout.write(f'{prefix}Processed {total} exercises:')
|
||||
self.stdout.write(f' {updated} updated, {unchanged} unchanged')
|
||||
self.stdout.write('')
|
||||
self.stdout.write('Difficulty distribution:')
|
||||
for level, count in difficulty_counts.items():
|
||||
pct = (count / total * 100) if total else 0
|
||||
self.stdout.write(f' {level:<14} {count:>5} ({pct:.1f}%)')
|
||||
self.stdout.write('')
|
||||
self.stdout.write('Complexity distribution:')
|
||||
for rating in sorted(complexity_counts.keys()):
|
||||
count = complexity_counts[rating]
|
||||
pct = (count / total * 100) if total else 0
|
||||
self.stdout.write(f' {rating} {count:>5} ({pct:.1f}%)')
|
||||
222
generator/management/commands/fix_exercise_flags.py
Normal file
222
generator/management/commands/fix_exercise_flags.py
Normal file
@@ -0,0 +1,222 @@
|
||||
"""
|
||||
Fix exercise flags and assign missing muscle associations.
|
||||
|
||||
1. Fix is_weight flags on exercises that are bodyweight but incorrectly marked
|
||||
is_weight=True (wall sits, agility ladder, planks, bodyweight exercises, etc.)
|
||||
|
||||
2. Assign muscle groups to exercises that have no ExerciseMuscle rows, using
|
||||
name keyword matching.
|
||||
|
||||
Known false positives: wall sits, agility ladder, planks, body weight exercises,
|
||||
and similar movements that use no external resistance.
|
||||
|
||||
Usage:
|
||||
python manage.py fix_exercise_flags
|
||||
python manage.py fix_exercise_flags --dry-run
|
||||
"""
|
||||
|
||||
import re
|
||||
from django.core.management.base import BaseCommand
|
||||
from exercise.models import Exercise
|
||||
from muscle.models import Muscle, ExerciseMuscle
|
||||
|
||||
try:
|
||||
from equipment.models import WorkoutEquipment
|
||||
except ImportError:
|
||||
WorkoutEquipment = None
|
||||
|
||||
|
||||
# Patterns that indicate bodyweight exercises (no external weight).
|
||||
# Uses word boundary matching to avoid substring issues (e.g. "l sit" in "wall sit").
|
||||
BODYWEIGHT_PATTERNS = [
|
||||
r'\bwall sit\b',
|
||||
r'\bplank\b',
|
||||
r'\bmountain climber\b',
|
||||
r'\bburpee\b',
|
||||
r'\bpush ?up\b',
|
||||
r'\bpushup\b',
|
||||
r'\bpull ?up\b',
|
||||
r'\bpullup\b',
|
||||
r'\bchin ?up\b',
|
||||
r'\bchinup\b',
|
||||
r'\bdips?\b',
|
||||
r'\bpike\b',
|
||||
r'\bhandstand\b',
|
||||
r'\bl sit\b',
|
||||
r'\bv sit\b',
|
||||
r'\bhollow\b',
|
||||
r'\bsuperman\b',
|
||||
r'\bbird dog\b',
|
||||
r'\bdead bug\b',
|
||||
r'\bbear crawl\b',
|
||||
r'\bcrab walk\b',
|
||||
r'\binchworm\b',
|
||||
r'\bjumping jack\b',
|
||||
r'\bhigh knee\b',
|
||||
r'\bbutt kick\b',
|
||||
r'\bskater\b',
|
||||
r'\blunge jump\b',
|
||||
r'\bjump lunge\b',
|
||||
r'\bsquat jump\b',
|
||||
r'\bjump squat\b',
|
||||
r'\bbox jump\b',
|
||||
r'\btuck jump\b',
|
||||
r'\bbroad jump\b',
|
||||
r'\bsprinter\b',
|
||||
r'\bagility ladder\b',
|
||||
r'\bbody ?weight\b',
|
||||
r'\bbodyweight\b',
|
||||
r'\bcalisthenics?\b',
|
||||
r'\bflutter kick\b',
|
||||
r'\bleg raise\b',
|
||||
r'\bsit ?up\b',
|
||||
r'\bcrunch\b',
|
||||
r'\bstretch\b',
|
||||
r'\byoga\b',
|
||||
r'\bfoam roll\b',
|
||||
r'\bjump rope\b',
|
||||
r'\bspider crawl\b',
|
||||
]
|
||||
|
||||
# Keywords for assigning muscles to exercises with no ExerciseMuscle rows.
|
||||
# Each muscle name maps to a list of name keywords to match against exercise name.
|
||||
EXERCISE_MUSCLE_KEYWORDS = {
|
||||
'chest': ['chest', 'pec', 'bench press', 'push up', 'fly'],
|
||||
'back': ['back', 'lat', 'row', 'pull up', 'pulldown'],
|
||||
'shoulders': ['shoulder', 'delt', 'press', 'raise', 'shrug'],
|
||||
'quads': ['quad', 'squat', 'leg press', 'lunge', 'extension'],
|
||||
'hamstrings': ['hamstring', 'curl', 'deadlift', 'rdl'],
|
||||
'glutes': ['glute', 'hip thrust', 'bridge'],
|
||||
'biceps': ['bicep', 'curl'],
|
||||
'triceps': ['tricep', 'pushdown', 'extension', 'dip'],
|
||||
'core': ['core', 'ab', 'crunch', 'plank', 'sit up'],
|
||||
'calves': ['calf', 'calves', 'calf raise'],
|
||||
}
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = 'Fix is_weight flags and assign missing muscle associations'
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument(
|
||||
'--dry-run',
|
||||
action='store_true',
|
||||
help='Show what would change without writing to DB',
|
||||
)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
dry_run = options['dry_run']
|
||||
|
||||
self.stdout.write(self.style.MIGRATE_HEADING('Step 1: Fix is_weight false positives'))
|
||||
weight_fixed = self._fix_is_weight_false_positives(dry_run)
|
||||
|
||||
self.stdout.write(self.style.MIGRATE_HEADING('\nStep 2: Assign missing muscles'))
|
||||
muscle_assigned = self._assign_missing_muscles(dry_run)
|
||||
|
||||
prefix = '[DRY RUN] ' if dry_run else ''
|
||||
self.stdout.write(self.style.SUCCESS(
|
||||
f'\n{prefix}Summary: Fixed {weight_fixed} is_weight flags, '
|
||||
f'assigned muscles to {muscle_assigned} exercises'
|
||||
))
|
||||
|
||||
def _fix_is_weight_false_positives(self, dry_run):
|
||||
"""Fix exercises that are bodyweight but incorrectly marked is_weight=True."""
|
||||
# Get exercises that have is_weight=True
|
||||
weighted_exercises = Exercise.objects.filter(is_weight=True)
|
||||
|
||||
# Get exercises that have equipment assigned (if WorkoutEquipment exists)
|
||||
exercises_with_equipment = set()
|
||||
if WorkoutEquipment is not None:
|
||||
exercises_with_equipment = set(
|
||||
WorkoutEquipment.objects.values_list('exercise_id', flat=True).distinct()
|
||||
)
|
||||
|
||||
fixed = 0
|
||||
for ex in weighted_exercises:
|
||||
if not ex.name:
|
||||
continue
|
||||
|
||||
name_lower = ex.name.lower()
|
||||
|
||||
# Check if name matches any bodyweight pattern
|
||||
is_bodyweight_name = any(
|
||||
re.search(pat, name_lower) for pat in BODYWEIGHT_PATTERNS
|
||||
)
|
||||
|
||||
# Also check if the exercise has no equipment assigned
|
||||
has_no_equipment = ex.pk not in exercises_with_equipment
|
||||
|
||||
if is_bodyweight_name and has_no_equipment:
|
||||
if dry_run:
|
||||
self.stdout.write(f' Would fix: {ex.name} (id={ex.pk})')
|
||||
else:
|
||||
ex.is_weight = False
|
||||
ex.save(update_fields=['is_weight'])
|
||||
self.stdout.write(f' Fixed: {ex.name} (id={ex.pk})')
|
||||
fixed += 1
|
||||
|
||||
prefix = '[DRY RUN] ' if dry_run else ''
|
||||
self.stdout.write(self.style.SUCCESS(
|
||||
f'{prefix}Fixed {fixed} exercises from is_weight=True to is_weight=False'
|
||||
))
|
||||
return fixed
|
||||
|
||||
def _assign_missing_muscles(self, dry_run):
|
||||
"""Assign muscle groups to exercises that have no ExerciseMuscle rows."""
|
||||
# Find exercises with no muscle associations
|
||||
exercises_with_muscles = set(
|
||||
ExerciseMuscle.objects.values_list('exercise_id', flat=True).distinct()
|
||||
)
|
||||
orphan_exercises = Exercise.objects.exclude(pk__in=exercises_with_muscles)
|
||||
|
||||
if not orphan_exercises.exists():
|
||||
self.stdout.write(' No exercises without muscle assignments found.')
|
||||
return 0
|
||||
|
||||
self.stdout.write(f' Found {orphan_exercises.count()} exercises without muscle assignments')
|
||||
|
||||
# Build a cache of muscle objects by name (case-insensitive)
|
||||
muscle_cache = {}
|
||||
for muscle in Muscle.objects.all():
|
||||
muscle_cache[muscle.name.lower()] = muscle
|
||||
|
||||
assigned_count = 0
|
||||
for ex in orphan_exercises:
|
||||
if not ex.name:
|
||||
continue
|
||||
|
||||
name_lower = ex.name.lower()
|
||||
matched_muscles = []
|
||||
|
||||
for muscle_name, keywords in EXERCISE_MUSCLE_KEYWORDS.items():
|
||||
for keyword in keywords:
|
||||
if keyword in name_lower:
|
||||
# Find the muscle in the cache
|
||||
muscle_obj = muscle_cache.get(muscle_name)
|
||||
if muscle_obj and muscle_obj not in matched_muscles:
|
||||
matched_muscles.append(muscle_obj)
|
||||
break # One keyword match per muscle group is enough
|
||||
|
||||
if matched_muscles:
|
||||
if dry_run:
|
||||
muscle_names = ', '.join(m.name for m in matched_muscles)
|
||||
self.stdout.write(
|
||||
f' Would assign: {ex.name} (id={ex.pk}) -> [{muscle_names}]'
|
||||
)
|
||||
else:
|
||||
for muscle_obj in matched_muscles:
|
||||
ExerciseMuscle.objects.get_or_create(
|
||||
exercise=ex,
|
||||
muscle=muscle_obj,
|
||||
)
|
||||
muscle_names = ', '.join(m.name for m in matched_muscles)
|
||||
self.stdout.write(
|
||||
f' Assigned: {ex.name} (id={ex.pk}) -> [{muscle_names}]'
|
||||
)
|
||||
assigned_count += 1
|
||||
|
||||
prefix = '[DRY RUN] ' if dry_run else ''
|
||||
self.stdout.write(self.style.SUCCESS(
|
||||
f'{prefix}Assigned muscles to {assigned_count} exercises'
|
||||
))
|
||||
return assigned_count
|
||||
109
generator/management/commands/fix_movement_pattern_typo.py
Normal file
109
generator/management/commands/fix_movement_pattern_typo.py
Normal file
@@ -0,0 +1,109 @@
|
||||
"""
|
||||
Fix the "horizonal" typo in movement_patterns fields.
|
||||
|
||||
The database has "horizonal" (missing 't') instead of "horizontal" in
|
||||
both Exercise.movement_patterns and MovementPatternOrder.movement_pattern.
|
||||
|
||||
This command is idempotent -- running it multiple times is safe.
|
||||
|
||||
Usage:
|
||||
python manage.py fix_movement_pattern_typo --dry-run
|
||||
python manage.py fix_movement_pattern_typo
|
||||
"""
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.db import transaction
|
||||
|
||||
from exercise.models import Exercise
|
||||
|
||||
# Import MovementPatternOrder if available (may not exist in test environments)
|
||||
try:
|
||||
from generator.models import MovementPatternOrder
|
||||
except ImportError:
|
||||
MovementPatternOrder = None
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = 'Fix "horizonal" -> "horizontal" typo in movement_patterns'
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument(
|
||||
'--dry-run',
|
||||
action='store_true',
|
||||
help='Show what would change without writing to DB',
|
||||
)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
dry_run = options['dry_run']
|
||||
|
||||
# Idempotency guard: check if the typo still exists
|
||||
exercises_with_typo = Exercise.objects.filter(movement_patterns__icontains='horizonal')
|
||||
has_pattern_typo = False
|
||||
if MovementPatternOrder is not None:
|
||||
patterns_with_typo = MovementPatternOrder.objects.filter(
|
||||
movement_pattern__icontains='horizonal'
|
||||
)
|
||||
has_pattern_typo = patterns_with_typo.exists()
|
||||
|
||||
if not exercises_with_typo.exists() and not has_pattern_typo:
|
||||
self.stdout.write(self.style.SUCCESS(
|
||||
'No "horizonal" typos found -- already fixed.'
|
||||
))
|
||||
return
|
||||
|
||||
exercise_fixed = 0
|
||||
pattern_fixed = 0
|
||||
|
||||
with transaction.atomic():
|
||||
# Fix Exercise.movement_patterns
|
||||
for ex in exercises_with_typo:
|
||||
old = ex.movement_patterns
|
||||
new = old.replace('horizonal', 'horizontal')
|
||||
if old != new:
|
||||
if dry_run:
|
||||
self.stdout.write(f' Exercise {ex.pk} "{ex.name}": "{old}" -> "{new}"')
|
||||
else:
|
||||
ex.movement_patterns = new
|
||||
ex.save(update_fields=['movement_patterns'])
|
||||
exercise_fixed += 1
|
||||
|
||||
# Fix MovementPatternOrder.movement_pattern
|
||||
if MovementPatternOrder is not None:
|
||||
patterns = MovementPatternOrder.objects.filter(
|
||||
movement_pattern__icontains='horizonal'
|
||||
)
|
||||
for mp in patterns:
|
||||
old = mp.movement_pattern
|
||||
new = old.replace('horizonal', 'horizontal')
|
||||
if old != new:
|
||||
if dry_run:
|
||||
self.stdout.write(
|
||||
f' MovementPatternOrder {mp.pk}: "{old}" -> "{new}"'
|
||||
)
|
||||
else:
|
||||
mp.movement_pattern = new
|
||||
mp.save(update_fields=['movement_pattern'])
|
||||
pattern_fixed += 1
|
||||
|
||||
if dry_run:
|
||||
transaction.set_rollback(True)
|
||||
|
||||
prefix = '[DRY RUN] ' if dry_run else ''
|
||||
self.stdout.write(self.style.SUCCESS(
|
||||
f'\n{prefix}Fixed {exercise_fixed} Exercise records and '
|
||||
f'{pattern_fixed} MovementPatternOrder records'
|
||||
))
|
||||
|
||||
# Verify
|
||||
if not dry_run:
|
||||
remaining = Exercise.objects.filter(
|
||||
movement_patterns__icontains='horizonal'
|
||||
).count()
|
||||
if remaining:
|
||||
self.stdout.write(self.style.WARNING(
|
||||
f' WARNING: {remaining} exercises still have "horizonal"'
|
||||
))
|
||||
else:
|
||||
self.stdout.write(self.style.SUCCESS(
|
||||
' No "horizonal" typos remain.'
|
||||
))
|
||||
463
generator/management/commands/fix_rep_durations.py
Normal file
463
generator/management/commands/fix_rep_durations.py
Normal file
@@ -0,0 +1,463 @@
|
||||
"""
|
||||
Fixes estimated_rep_duration on all Exercise records using three sources:
|
||||
|
||||
1. **Exact match** from JSON workout files (AI/all_workouts_data/ and AI/cho/workouts/)
|
||||
Each set has `estimated_duration` (total seconds) and `reps`.
|
||||
We compute per_rep = estimated_duration / reps, averaged across all
|
||||
appearances of each exercise.
|
||||
|
||||
2. **Fuzzy match** from the same JSON data for exercises whose DB name
|
||||
doesn't match exactly. Uses name normalization (strip parentheticals,
|
||||
punctuation, plurals) + difflib with a 0.85 cutoff, rejecting matches
|
||||
where the equipment type differs (e.g. barbell vs dumbbell).
|
||||
|
||||
3. **Movement-pattern lookup** for exercises not found by either method.
|
||||
Uses the exercise's `movement_patterns` field against PATTERN_DURATIONS.
|
||||
|
||||
4. **Category-based defaults** for exercises that don't match any pattern.
|
||||
Falls back to DEFAULT_DURATION (3.0s).
|
||||
|
||||
Duration-only exercises (is_duration=True AND is_reps=False) are skipped
|
||||
since they use the `duration` field instead.
|
||||
|
||||
Usage:
|
||||
python manage.py fix_rep_durations
|
||||
python manage.py fix_rep_durations --dry-run
|
||||
"""
|
||||
|
||||
import difflib
|
||||
import glob
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import statistics
|
||||
from collections import defaultdict
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
from exercise.models import Exercise
|
||||
|
||||
|
||||
# Movement-pattern lookup table: maps movement pattern keywords to per-rep durations.
|
||||
PATTERN_DURATIONS = {
|
||||
'compound_push': 3.0,
|
||||
'compound_pull': 3.0,
|
||||
'squat': 3.0,
|
||||
'hinge': 3.0,
|
||||
'lunge': 3.0,
|
||||
'isolation_push': 2.5,
|
||||
'isolation_pull': 2.5,
|
||||
'isolation': 2.5,
|
||||
'olympic': 2.0,
|
||||
'explosive': 2.0,
|
||||
'plyometric': 2.0,
|
||||
'carry': 1.0,
|
||||
'core': 2.5,
|
||||
}
|
||||
|
||||
# Category defaults keyed by substring match on movement_patterns.
|
||||
# Order matters: first match wins. More specific patterns go first.
|
||||
CATEGORY_DEFAULTS = [
|
||||
# Explosive / ballistic -- fast reps
|
||||
('plyometric', 1.5),
|
||||
('combat', 1.0),
|
||||
('cardio/locomotion', 1.0),
|
||||
|
||||
# Compound lower -- heavy, slower
|
||||
('lower pull - hip hinge', 5.0),
|
||||
('lower push - squat', 4.5),
|
||||
('lower push - lunge', 4.0),
|
||||
('lower pull', 4.5),
|
||||
('lower push', 4.0),
|
||||
|
||||
# Compound upper
|
||||
('upper push - horizontal', 3.5),
|
||||
('upper push - vertical', 3.5),
|
||||
('upper pull - vertical', 4.0),
|
||||
('upper pull - horizonal', 3.5), # note: typo is in DB
|
||||
('upper pull - horizontal', 3.5), # also match corrected version
|
||||
('upper push', 3.5),
|
||||
('upper pull', 3.5),
|
||||
|
||||
# Isolation / machine
|
||||
('machine', 2.5),
|
||||
('arms', 2.5),
|
||||
|
||||
# Core
|
||||
('core - anti-extension', 3.5),
|
||||
('core - carry', 3.0),
|
||||
('core', 3.0),
|
||||
|
||||
# Mobility / yoga -- slow, controlled
|
||||
('yoga', 5.0),
|
||||
('mobility - static', 5.0),
|
||||
('mobility - dynamic', 4.0),
|
||||
('mobility', 4.0),
|
||||
|
||||
# Olympic lifts -- explosive, technical
|
||||
('olympic', 4.0),
|
||||
|
||||
# Isolation
|
||||
('isolation', 2.5),
|
||||
|
||||
# Carry / farmer walk
|
||||
('carry', 3.0),
|
||||
|
||||
# Agility
|
||||
('agility', 1.5),
|
||||
|
||||
# Stretch / activation
|
||||
('stretch', 5.0),
|
||||
('activation', 3.0),
|
||||
('warm up', 3.0),
|
||||
('warmup', 3.0),
|
||||
]
|
||||
|
||||
# Fallback if nothing matches
|
||||
DEFAULT_DURATION = 3.0
|
||||
|
||||
# For backwards compat, also expose as DEFAULT_PER_REP
|
||||
DEFAULT_PER_REP = DEFAULT_DURATION
|
||||
|
||||
# Equipment words -- if these differ between DB and JSON name, reject the match
|
||||
EQUIPMENT_WORDS = {
|
||||
'barbell', 'dumbbell', 'kettlebell', 'cable', 'band', 'machine',
|
||||
'smith', 'trx', 'ez-bar', 'ez bar', 'landmine', 'medicine ball',
|
||||
'resistance band', 'bodyweight',
|
||||
}
|
||||
|
||||
|
||||
def _normalize_name(name):
|
||||
"""Normalize an exercise name for fuzzy comparison."""
|
||||
n = name.lower().strip()
|
||||
# Remove parenthetical content: "Squat (Back)" -> "Squat"
|
||||
n = re.sub(r'\([^)]*\)', '', n)
|
||||
# Remove common suffixes/noise
|
||||
n = re.sub(r'\b(each side|per side|each leg|per leg|each arm|per arm)\b', '', n)
|
||||
# Remove direction words (forward/backward variants are same exercise)
|
||||
n = re.sub(r'\b(forward|backward|forwards|backwards)\b', '', n)
|
||||
# Normalize punctuation and whitespace
|
||||
n = re.sub(r'[^\w\s]', ' ', n)
|
||||
n = re.sub(r'\s+', ' ', n).strip()
|
||||
# De-pluralize each word (handles "lunges"->"lunge", "curls"->"curl")
|
||||
words = []
|
||||
for w in n.split():
|
||||
if w.endswith('s') and not w.endswith('ss') and len(w) > 2:
|
||||
w = w[:-1]
|
||||
words.append(w)
|
||||
return ' '.join(words)
|
||||
|
||||
|
||||
def _extract_equipment(name):
|
||||
"""Extract the equipment word from an exercise name, if any."""
|
||||
name_lower = name.lower()
|
||||
for eq in EQUIPMENT_WORDS:
|
||||
if eq in name_lower:
|
||||
return eq
|
||||
return None
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = 'Fix estimated_rep_duration using JSON workout data + pattern/category defaults'
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument(
|
||||
'--dry-run',
|
||||
action='store_true',
|
||||
help='Show what would change without writing to DB',
|
||||
)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
dry_run = options['dry_run']
|
||||
|
||||
# -- Step 1: Parse JSON files for real per-rep timing --
|
||||
json_durations = self._parse_json_files()
|
||||
self.stdout.write(
|
||||
f'Parsed JSON: {len(json_durations)} exercises with real timing data'
|
||||
)
|
||||
|
||||
# -- Step 1b: Build fuzzy lookup from normalized JSON names --
|
||||
fuzzy_index = self._build_fuzzy_index(json_durations)
|
||||
|
||||
# -- Step 2: Update exercises --
|
||||
exercises = Exercise.objects.all()
|
||||
from_json_exact = 0
|
||||
from_json_fuzzy = 0
|
||||
from_pattern = 0
|
||||
from_category = 0
|
||||
skipped_duration_only = 0
|
||||
set_null = 0
|
||||
unchanged = 0
|
||||
fuzzy_matches = []
|
||||
|
||||
for ex in exercises:
|
||||
# Skip duration-only exercises (is_duration=True AND is_reps=False)
|
||||
if ex.is_duration and not ex.is_reps:
|
||||
if ex.estimated_rep_duration is not None:
|
||||
if not dry_run:
|
||||
ex.estimated_rep_duration = None
|
||||
ex.save(update_fields=['estimated_rep_duration'])
|
||||
set_null += 1
|
||||
else:
|
||||
skipped_duration_only += 1
|
||||
continue
|
||||
|
||||
# Duration-only exercises that aren't reps-based
|
||||
if not ex.is_reps and not ex.is_duration:
|
||||
# Edge case: neither reps nor duration -- skip
|
||||
unchanged += 1
|
||||
continue
|
||||
|
||||
# Try exact match first
|
||||
name_lower = ex.name.lower().strip()
|
||||
if name_lower in json_durations:
|
||||
new_val = json_durations[name_lower]
|
||||
source = 'json-exact'
|
||||
from_json_exact += 1
|
||||
else:
|
||||
# Try fuzzy match
|
||||
fuzzy_result = self._fuzzy_match(ex.name, json_durations, fuzzy_index)
|
||||
if fuzzy_result is not None:
|
||||
new_val, matched_name = fuzzy_result
|
||||
source = 'json-fuzzy'
|
||||
from_json_fuzzy += 1
|
||||
fuzzy_matches.append((ex.name, matched_name, new_val))
|
||||
else:
|
||||
# Try movement-pattern lookup
|
||||
pattern_val = self._get_pattern_duration(ex)
|
||||
if pattern_val is not None:
|
||||
new_val = pattern_val
|
||||
source = 'pattern'
|
||||
from_pattern += 1
|
||||
else:
|
||||
# Fall back to category defaults
|
||||
new_val = self._get_category_default(ex)
|
||||
source = 'category'
|
||||
from_category += 1
|
||||
|
||||
old_val = ex.estimated_rep_duration
|
||||
|
||||
if dry_run:
|
||||
if old_val != new_val:
|
||||
self.stdout.write(
|
||||
f' [{source}] {ex.name}: {old_val:.2f}s -> {new_val:.2f}s'
|
||||
if old_val else
|
||||
f' [{source}] {ex.name}: None -> {new_val:.2f}s'
|
||||
)
|
||||
else:
|
||||
ex.estimated_rep_duration = new_val
|
||||
ex.save(update_fields=['estimated_rep_duration'])
|
||||
|
||||
self.stdout.write(self.style.SUCCESS(
|
||||
f'\n{"[DRY RUN] " if dry_run else ""}'
|
||||
f'Updated {from_json_exact + from_json_fuzzy + from_pattern + from_category + set_null} exercises: '
|
||||
f'{from_json_exact} from JSON (exact), {from_json_fuzzy} from JSON (fuzzy), '
|
||||
f'{from_pattern} from pattern lookup, {from_category} from category defaults, '
|
||||
f'{set_null} set to null (duration-only), '
|
||||
f'{skipped_duration_only} already null (duration-only), '
|
||||
f'{unchanged} unchanged'
|
||||
))
|
||||
|
||||
# Show fuzzy matches for review
|
||||
if fuzzy_matches:
|
||||
self.stdout.write(f'\nFuzzy matches ({len(fuzzy_matches)}):')
|
||||
for db_name, json_name, val in sorted(fuzzy_matches):
|
||||
self.stdout.write(f' {db_name:50s} -> {json_name} ({val:.2f}s)')
|
||||
|
||||
# -- Step 3: Show summary stats --
|
||||
reps_exercises = Exercise.objects.filter(is_reps=True)
|
||||
total_reps = reps_exercises.count()
|
||||
with_duration = reps_exercises.exclude(estimated_rep_duration__isnull=True).count()
|
||||
without_duration = reps_exercises.filter(estimated_rep_duration__isnull=True).count()
|
||||
|
||||
coverage_pct = (with_duration / total_reps * 100) if total_reps > 0 else 0
|
||||
self.stdout.write(
|
||||
f'\nCoverage: {with_duration}/{total_reps} rep-based exercises '
|
||||
f'have estimated_rep_duration ({coverage_pct:.1f}%)'
|
||||
)
|
||||
if without_duration > 0:
|
||||
self.stdout.write(
|
||||
f' {without_duration} exercises still missing estimated_rep_duration'
|
||||
)
|
||||
|
||||
if not dry_run:
|
||||
durations = list(
|
||||
reps_exercises
|
||||
.exclude(estimated_rep_duration__isnull=True)
|
||||
.values_list('estimated_rep_duration', flat=True)
|
||||
)
|
||||
if durations:
|
||||
self.stdout.write(
|
||||
f'\nNew stats for rep-based exercises ({len(durations)}):'
|
||||
f'\n Min: {min(durations):.2f}s'
|
||||
f'\n Max: {max(durations):.2f}s'
|
||||
f'\n Mean: {statistics.mean(durations):.2f}s'
|
||||
f'\n Median: {statistics.median(durations):.2f}s'
|
||||
)
|
||||
|
||||
def _build_fuzzy_index(self, json_durations):
|
||||
"""
|
||||
Build a dict of {normalized_name: original_name} for fuzzy matching.
|
||||
"""
|
||||
index = {}
|
||||
for original_name in json_durations:
|
||||
norm = _normalize_name(original_name)
|
||||
# Keep the first occurrence if duplicates after normalization
|
||||
if norm not in index:
|
||||
index[norm] = original_name
|
||||
return index
|
||||
|
||||
def _fuzzy_match(self, db_name, json_durations, fuzzy_index):
|
||||
"""
|
||||
Try to fuzzy-match a DB exercise name to a JSON exercise name.
|
||||
|
||||
Strategy:
|
||||
1. Exact match on normalized names
|
||||
2. Containment match: all words of the shorter name appear in the longer
|
||||
3. High-cutoff difflib (0.88) with word overlap >= 75%
|
||||
|
||||
Equipment must match in all cases.
|
||||
|
||||
Returns (duration_value, matched_json_name) or None.
|
||||
"""
|
||||
db_norm = _normalize_name(db_name)
|
||||
db_equipment = _extract_equipment(db_name)
|
||||
db_words = set(db_norm.split())
|
||||
|
||||
# First try: exact match on normalized names
|
||||
if db_norm in fuzzy_index:
|
||||
original = fuzzy_index[db_norm]
|
||||
json_equipment = _extract_equipment(original)
|
||||
if db_equipment and json_equipment and db_equipment != json_equipment:
|
||||
return None
|
||||
return json_durations[original], original
|
||||
|
||||
# Second try: containment match -- shorter name's words are a
|
||||
# subset of the longer name's words (e.g. "barbell good morning"
|
||||
# is contained in "barbell russian good morning")
|
||||
for json_norm, original in fuzzy_index.items():
|
||||
json_words = set(json_norm.split())
|
||||
shorter, longer = (
|
||||
(db_words, json_words) if len(db_words) <= len(json_words)
|
||||
else (json_words, db_words)
|
||||
)
|
||||
# All words of the shorter must appear in the longer
|
||||
if shorter.issubset(longer) and len(shorter) >= 2:
|
||||
# But names shouldn't differ by too many words (max 2 extra)
|
||||
if len(longer) - len(shorter) > 2:
|
||||
continue
|
||||
json_equipment = _extract_equipment(original)
|
||||
if db_equipment and json_equipment and db_equipment != json_equipment:
|
||||
continue
|
||||
if (db_equipment is None) != (json_equipment is None):
|
||||
continue
|
||||
return json_durations[original], original
|
||||
|
||||
# Third try: high-cutoff difflib with strict word overlap
|
||||
normalized_json_names = list(fuzzy_index.keys())
|
||||
matches = difflib.get_close_matches(
|
||||
db_norm, normalized_json_names, n=3, cutoff=0.88,
|
||||
)
|
||||
|
||||
for match_norm in matches:
|
||||
original = fuzzy_index[match_norm]
|
||||
json_equipment = _extract_equipment(original)
|
||||
if db_equipment and json_equipment and db_equipment != json_equipment:
|
||||
continue
|
||||
if (db_equipment is None) != (json_equipment is None):
|
||||
continue
|
||||
# Require >= 75% word overlap
|
||||
match_words = set(match_norm.split())
|
||||
overlap = len(db_words & match_words)
|
||||
total = max(len(db_words), len(match_words))
|
||||
if total > 0 and overlap / total < 0.75:
|
||||
continue
|
||||
return json_durations[original], original
|
||||
|
||||
return None
|
||||
|
||||
def _parse_json_files(self):
|
||||
"""
|
||||
Parse all workout JSON files and compute average per-rep duration
|
||||
for each exercise. Returns {lowercase_name: avg_seconds_per_rep}.
|
||||
"""
|
||||
base = settings.BASE_DIR
|
||||
patterns = [
|
||||
os.path.join(base, 'AI', 'all_workouts_data', '*.json'),
|
||||
os.path.join(base, 'AI', 'cho', 'workouts', '*.json'),
|
||||
]
|
||||
files = []
|
||||
for pat in patterns:
|
||||
files.extend(sorted(glob.glob(pat)))
|
||||
|
||||
exercise_samples = defaultdict(list)
|
||||
|
||||
for fpath in files:
|
||||
with open(fpath) as f:
|
||||
try:
|
||||
data = json.load(f)
|
||||
except (json.JSONDecodeError, UnicodeDecodeError):
|
||||
continue
|
||||
|
||||
workouts = [data] if isinstance(data, dict) else data
|
||||
|
||||
for workout in workouts:
|
||||
if not isinstance(workout, dict):
|
||||
continue
|
||||
for section in workout.get('sections', []):
|
||||
for s in section.get('sets', []):
|
||||
if not isinstance(s, dict):
|
||||
continue
|
||||
ex = s.get('exercise', {})
|
||||
if not isinstance(ex, dict):
|
||||
continue
|
||||
name = ex.get('name', '').strip()
|
||||
if not name:
|
||||
continue
|
||||
|
||||
reps = s.get('reps', 0) or 0
|
||||
est_dur = s.get('estimated_duration', 0) or 0
|
||||
set_type = s.get('type', '')
|
||||
|
||||
if set_type == 'reps' and reps > 0 and est_dur > 0:
|
||||
per_rep = est_dur / reps
|
||||
# Sanity: ignore outliers (< 0.5s or > 20s per rep)
|
||||
if 0.5 <= per_rep <= 20.0:
|
||||
exercise_samples[name.lower()].append(per_rep)
|
||||
|
||||
# Average across all samples per exercise
|
||||
result = {}
|
||||
for name, samples in exercise_samples.items():
|
||||
result[name] = round(statistics.mean(samples), 2)
|
||||
|
||||
return result
|
||||
|
||||
def _get_pattern_duration(self, exercise):
|
||||
"""
|
||||
Return a per-rep duration based on the PATTERN_DURATIONS lookup table.
|
||||
Checks the exercise's movement_patterns field for matching patterns.
|
||||
Returns the first match, or None if no match.
|
||||
"""
|
||||
patterns_str = (exercise.movement_patterns or '').lower()
|
||||
if not patterns_str:
|
||||
return None
|
||||
|
||||
for pattern_key, duration in PATTERN_DURATIONS.items():
|
||||
if pattern_key in patterns_str:
|
||||
return duration
|
||||
|
||||
return None
|
||||
|
||||
def _get_category_default(self, exercise):
|
||||
"""
|
||||
Return a per-rep duration based on the exercise's movement_patterns
|
||||
using the more detailed CATEGORY_DEFAULTS table.
|
||||
"""
|
||||
patterns = (exercise.movement_patterns or '').lower()
|
||||
|
||||
for keyword, duration in CATEGORY_DEFAULTS:
|
||||
if keyword in patterns:
|
||||
return duration
|
||||
|
||||
return DEFAULT_DURATION
|
||||
116
generator/management/commands/normalize_muscle_names.py
Normal file
116
generator/management/commands/normalize_muscle_names.py
Normal file
@@ -0,0 +1,116 @@
|
||||
"""
|
||||
Normalize muscle names in the database and merge duplicates.
|
||||
|
||||
Uses the MUSCLE_NORMALIZATION_MAP from muscle_normalizer.py to:
|
||||
1. Rename each Muscle record to its canonical lowercase form
|
||||
2. Merge duplicates by updating ExerciseMuscle FKs to point to the canonical Muscle
|
||||
3. Delete orphaned duplicate Muscle records
|
||||
|
||||
Usage:
|
||||
python manage.py normalize_muscle_names --dry-run
|
||||
python manage.py normalize_muscle_names
|
||||
"""
|
||||
|
||||
from collections import defaultdict
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.db import transaction
|
||||
|
||||
from muscle.models import Muscle, ExerciseMuscle
|
||||
from generator.services.muscle_normalizer import normalize_muscle_name
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = 'Normalize muscle names and merge duplicates using MUSCLE_NORMALIZATION_MAP'
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument(
|
||||
'--dry-run',
|
||||
action='store_true',
|
||||
help='Show what would change without writing to DB',
|
||||
)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
dry_run = options['dry_run']
|
||||
|
||||
all_muscles = Muscle.objects.all().order_by('id')
|
||||
self.stdout.write(f'Found {all_muscles.count()} muscle records')
|
||||
|
||||
# Group muscles by their canonical name
|
||||
canonical_groups = defaultdict(list)
|
||||
for muscle in all_muscles:
|
||||
canonical = normalize_muscle_name(muscle.name)
|
||||
if canonical:
|
||||
canonical_groups[canonical].append(muscle)
|
||||
|
||||
renamed = 0
|
||||
merged = 0
|
||||
deleted = 0
|
||||
|
||||
with transaction.atomic():
|
||||
for canonical_name, muscles in canonical_groups.items():
|
||||
# Pick the keeper: prefer the one with the lowest ID (oldest)
|
||||
keeper = muscles[0]
|
||||
|
||||
# Rename keeper if needed
|
||||
if keeper.name != canonical_name:
|
||||
if dry_run:
|
||||
self.stdout.write(f' Rename: "{keeper.name}" -> "{canonical_name}" (id={keeper.pk})')
|
||||
else:
|
||||
keeper.name = canonical_name
|
||||
keeper.save(update_fields=['name'])
|
||||
renamed += 1
|
||||
|
||||
# Merge duplicates into keeper
|
||||
for dup in muscles[1:]:
|
||||
# Count affected ExerciseMuscle rows
|
||||
em_count = ExerciseMuscle.objects.filter(muscle=dup).count()
|
||||
|
||||
if dry_run:
|
||||
self.stdout.write(
|
||||
f' Merge: "{dup.name}" (id={dup.pk}) -> "{canonical_name}" '
|
||||
f'(id={keeper.pk}), {em_count} ExerciseMuscle rows'
|
||||
)
|
||||
else:
|
||||
# Update ExerciseMuscle FKs, handling unique_together conflicts
|
||||
for em in ExerciseMuscle.objects.filter(muscle=dup):
|
||||
# Check if keeper already has this exercise
|
||||
existing = ExerciseMuscle.objects.filter(
|
||||
exercise=em.exercise, muscle=keeper
|
||||
).exists()
|
||||
if existing:
|
||||
em.delete()
|
||||
else:
|
||||
em.muscle = keeper
|
||||
em.save(update_fields=['muscle'])
|
||||
|
||||
dup.delete()
|
||||
|
||||
merged += em_count
|
||||
deleted += 1
|
||||
|
||||
if dry_run:
|
||||
# Roll back the transaction for dry run
|
||||
transaction.set_rollback(True)
|
||||
|
||||
prefix = '[DRY RUN] ' if dry_run else ''
|
||||
self.stdout.write(self.style.SUCCESS(
|
||||
f'\n{prefix}Results:'
|
||||
f'\n Renamed: {renamed} muscles'
|
||||
f'\n Merged: {merged} ExerciseMuscle references'
|
||||
f'\n Deleted: {deleted} duplicate Muscle records'
|
||||
))
|
||||
|
||||
# Verify
|
||||
if not dry_run:
|
||||
dupes = (
|
||||
Muscle.objects.values('name')
|
||||
.annotate(c=__import__('django.db.models', fromlist=['Count']).Count('id'))
|
||||
.filter(c__gt=1)
|
||||
)
|
||||
if dupes.exists():
|
||||
self.stdout.write(self.style.WARNING(
|
||||
f' WARNING: {dupes.count()} duplicate names still exist!'
|
||||
))
|
||||
else:
|
||||
self.stdout.write(self.style.SUCCESS(' No duplicate muscle names remain.'))
|
||||
130
generator/management/commands/normalize_muscles.py
Normal file
130
generator/management/commands/normalize_muscles.py
Normal file
@@ -0,0 +1,130 @@
|
||||
"""
|
||||
Management command to normalize muscle names in the database.
|
||||
|
||||
Fixes casing duplicates (e.g. "Quads" vs "quads") and updates
|
||||
ExerciseMuscle records to point to the canonical muscle entries.
|
||||
|
||||
Usage:
|
||||
python manage.py normalize_muscles # apply changes
|
||||
python manage.py normalize_muscles --dry-run # preview only
|
||||
"""
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
from muscle.models import Muscle, ExerciseMuscle
|
||||
from generator.services.muscle_normalizer import normalize_muscle_name
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = 'Normalize muscle names (fix casing duplicates) and consolidate ExerciseMuscle records.'
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument(
|
||||
'--dry-run',
|
||||
action='store_true',
|
||||
help='Preview changes without modifying the database.',
|
||||
)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
dry_run = options['dry_run']
|
||||
if dry_run:
|
||||
self.stdout.write(self.style.WARNING('DRY RUN - no changes will be made.\n'))
|
||||
|
||||
muscles = Muscle.objects.all().order_by('name')
|
||||
self.stdout.write(f'Total muscles in DB: {muscles.count()}\n')
|
||||
|
||||
# Build a mapping: canonical_name -> list of Muscle objects with that canonical name
|
||||
canonical_map = {}
|
||||
for m in muscles:
|
||||
canonical = normalize_muscle_name(m.name)
|
||||
if canonical is None:
|
||||
canonical = m.name.strip().lower()
|
||||
canonical_map.setdefault(canonical, []).append(m)
|
||||
|
||||
# Identify duplicates (canonical names with > 1 Muscle record)
|
||||
duplicates = {k: v for k, v in canonical_map.items() if len(v) > 1}
|
||||
|
||||
if not duplicates:
|
||||
self.stdout.write(self.style.SUCCESS('No duplicate muscles found. Nothing to normalize.'))
|
||||
return
|
||||
|
||||
self.stdout.write(f'Found {len(duplicates)} canonical names with duplicates:\n')
|
||||
|
||||
merged_count = 0
|
||||
reassigned_count = 0
|
||||
|
||||
for canonical, muscle_list in sorted(duplicates.items()):
|
||||
names = [m.name for m in muscle_list]
|
||||
self.stdout.write(f'\n "{canonical}" <- {names}')
|
||||
|
||||
# Keep the first one (or the one whose name already matches canonical)
|
||||
keep = None
|
||||
for m in muscle_list:
|
||||
if m.name == canonical:
|
||||
keep = m
|
||||
break
|
||||
if keep is None:
|
||||
keep = muscle_list[0]
|
||||
|
||||
to_merge = [m for m in muscle_list if m.pk != keep.pk]
|
||||
|
||||
for old_muscle in to_merge:
|
||||
# Reassign ExerciseMuscle records from old_muscle to keep
|
||||
em_records = ExerciseMuscle.objects.filter(muscle=old_muscle)
|
||||
count = em_records.count()
|
||||
|
||||
if count > 0:
|
||||
self.stdout.write(f' Reassigning {count} ExerciseMuscle records: '
|
||||
f'"{old_muscle.name}" (id={old_muscle.pk}) -> '
|
||||
f'"{keep.name}" (id={keep.pk})')
|
||||
if not dry_run:
|
||||
# Check for conflicts (same exercise already linked to keep)
|
||||
for em in em_records:
|
||||
existing = ExerciseMuscle.objects.filter(
|
||||
exercise=em.exercise, muscle=keep
|
||||
).exists()
|
||||
if existing:
|
||||
em.delete()
|
||||
else:
|
||||
em.muscle = keep
|
||||
em.save()
|
||||
reassigned_count += count
|
||||
|
||||
# Rename keep to canonical if needed
|
||||
if keep.name != canonical and not dry_run:
|
||||
keep.name = canonical
|
||||
keep.save()
|
||||
|
||||
# Delete the duplicate
|
||||
self.stdout.write(f' Deleting duplicate: "{old_muscle.name}" (id={old_muscle.pk})')
|
||||
if not dry_run:
|
||||
old_muscle.delete()
|
||||
merged_count += 1
|
||||
|
||||
# Also fix names that aren't duplicates but have wrong casing
|
||||
rename_count = 0
|
||||
for canonical, muscle_list in canonical_map.items():
|
||||
if len(muscle_list) == 1:
|
||||
m = muscle_list[0]
|
||||
if m.name != canonical:
|
||||
self.stdout.write(f'\n Renaming: "{m.name}" -> "{canonical}"')
|
||||
if not dry_run:
|
||||
m.name = canonical
|
||||
m.save()
|
||||
rename_count += 1
|
||||
|
||||
self.stdout.write('\n')
|
||||
if dry_run:
|
||||
self.stdout.write(self.style.WARNING(
|
||||
f'DRY RUN complete. Would merge {merged_count} duplicates, '
|
||||
f'reassign {reassigned_count} ExerciseMuscle records, '
|
||||
f'rename {rename_count} muscles.'
|
||||
))
|
||||
else:
|
||||
remaining = Muscle.objects.count()
|
||||
self.stdout.write(self.style.SUCCESS(
|
||||
f'Done. Merged {merged_count} duplicates, '
|
||||
f'reassigned {reassigned_count} ExerciseMuscle records, '
|
||||
f'renamed {rename_count} muscles. '
|
||||
f'{remaining} muscles remaining.'
|
||||
))
|
||||
1054
generator/management/commands/populate_exercise_fields.py
Normal file
1054
generator/management/commands/populate_exercise_fields.py
Normal file
File diff suppressed because it is too large
Load Diff
107
generator/management/commands/recalculate_workout_times.py
Normal file
107
generator/management/commands/recalculate_workout_times.py
Normal file
@@ -0,0 +1,107 @@
|
||||
"""
|
||||
Recalculates estimated_time on all Workout and Superset records using
|
||||
the corrected estimated_rep_duration values + rest between rounds.
|
||||
|
||||
Formula per superset:
|
||||
active_time = sum(reps * exercise.estimated_rep_duration) + sum(durations)
|
||||
rest_time = rest_between_rounds * (rounds - 1)
|
||||
superset.estimated_time = active_time (stores single-round active time)
|
||||
|
||||
Formula per workout:
|
||||
workout.estimated_time = sum(superset_active_time * rounds + rest_time)
|
||||
|
||||
Usage:
|
||||
python manage.py recalculate_workout_times
|
||||
python manage.py recalculate_workout_times --dry-run
|
||||
python manage.py recalculate_workout_times --rest=45
|
||||
"""
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
from workout.models import Workout
|
||||
from superset.models import Superset, SupersetExercise
|
||||
|
||||
|
||||
DEFAULT_REST_BETWEEN_ROUNDS = 45 # seconds
|
||||
DEFAULT_REP_DURATION = 3.0 # fallback if null
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = 'Recalculate estimated_time on all Workouts and Supersets'
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument(
|
||||
'--dry-run',
|
||||
action='store_true',
|
||||
help='Show changes without writing to DB',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--rest',
|
||||
type=int,
|
||||
default=DEFAULT_REST_BETWEEN_ROUNDS,
|
||||
help=f'Rest between rounds in seconds (default: {DEFAULT_REST_BETWEEN_ROUNDS})',
|
||||
)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
dry_run = options['dry_run']
|
||||
rest_between_rounds = options['rest']
|
||||
|
||||
workouts = Workout.objects.prefetch_related(
|
||||
'superset_workout__superset_exercises__exercise'
|
||||
).all()
|
||||
total = workouts.count()
|
||||
updated = 0
|
||||
|
||||
for workout in workouts:
|
||||
supersets = workout.superset_workout.all().order_by('order')
|
||||
workout_total_time = 0
|
||||
|
||||
for ss in supersets:
|
||||
exercises = ss.superset_exercises.all()
|
||||
active_time = 0.0
|
||||
|
||||
for se in exercises:
|
||||
if se.reps and se.reps > 0:
|
||||
rep_dur = se.exercise.estimated_rep_duration or DEFAULT_REP_DURATION
|
||||
active_time += se.reps * rep_dur
|
||||
elif se.duration and se.duration > 0:
|
||||
active_time += se.duration
|
||||
|
||||
# Rest between rounds (not after the last round)
|
||||
rest_time = rest_between_rounds * max(0, ss.rounds - 1)
|
||||
|
||||
# Superset stores single-round active time
|
||||
old_ss_time = ss.estimated_time
|
||||
ss.estimated_time = active_time
|
||||
if not dry_run:
|
||||
ss.save(update_fields=['estimated_time'])
|
||||
|
||||
# Workout accumulates: active per round * rounds + rest
|
||||
workout_total_time += (active_time * ss.rounds) + rest_time
|
||||
|
||||
old_time = workout.estimated_time
|
||||
new_time = workout_total_time
|
||||
|
||||
if not dry_run:
|
||||
workout.estimated_time = new_time
|
||||
workout.save(update_fields=['estimated_time'])
|
||||
|
||||
updated += 1
|
||||
|
||||
self.stdout.write(self.style.SUCCESS(
|
||||
f'{"[DRY RUN] " if dry_run else ""}'
|
||||
f'Recalculated {updated}/{total} workouts '
|
||||
f'(rest between rounds: {rest_between_rounds}s)'
|
||||
))
|
||||
|
||||
# Show some examples
|
||||
if not dry_run:
|
||||
self.stdout.write('\nSample workouts:')
|
||||
for w in Workout.objects.order_by('-id')[:5]:
|
||||
mins = w.estimated_time / 60 if w.estimated_time else 0
|
||||
ss_count = Superset.objects.filter(workout=w).count()
|
||||
ex_count = SupersetExercise.objects.filter(superset__workout=w).count()
|
||||
self.stdout.write(
|
||||
f' #{w.id} "{w.name}": {mins:.0f}m '
|
||||
f'({ss_count} supersets, {ex_count} exercises)'
|
||||
)
|
||||
142
generator/migrations/0001_initial.py
Normal file
142
generator/migrations/0001_initial.py
Normal file
@@ -0,0 +1,142 @@
|
||||
# Generated by Django 5.1.4 on 2026-02-11 16:54
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
initial = True
|
||||
|
||||
dependencies = [
|
||||
('equipment', '0002_workoutequipment'),
|
||||
('exercise', '0008_exercise_video_override'),
|
||||
('muscle', '0002_exercisemuscle'),
|
||||
('registered_user', '0003_registereduser_has_nsfw_toggle'),
|
||||
('workout', '0015_alter_completedworkout_difficulty'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='MovementPatternOrder',
|
||||
fields=[
|
||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('position', models.CharField(choices=[('early', 'Early'), ('middle', 'Middle'), ('late', 'Late')], max_length=10)),
|
||||
('movement_pattern', models.CharField(max_length=100)),
|
||||
('frequency', models.IntegerField(default=0)),
|
||||
('section_type', models.CharField(choices=[('warm_up', 'Warm Up'), ('working', 'Working'), ('cool_down', 'Cool Down')], default='working', max_length=10)),
|
||||
],
|
||||
options={
|
||||
'ordering': ['position', '-frequency'],
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='MuscleGroupSplit',
|
||||
fields=[
|
||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('muscle_names', models.JSONField(default=list, help_text='List of muscle group names')),
|
||||
('frequency', models.IntegerField(default=0, help_text='How often this combo appeared')),
|
||||
('label', models.CharField(blank=True, default='', max_length=100)),
|
||||
('typical_exercise_count', models.IntegerField(default=6)),
|
||||
('split_type', models.CharField(choices=[('push', 'Push'), ('pull', 'Pull'), ('legs', 'Legs'), ('upper', 'Upper'), ('lower', 'Lower'), ('full_body', 'Full Body'), ('core', 'Core'), ('cardio', 'Cardio')], default='full_body', max_length=20)),
|
||||
],
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='WeeklySplitPattern',
|
||||
fields=[
|
||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('days_per_week', models.IntegerField()),
|
||||
('pattern', models.JSONField(default=list, help_text='Ordered list of MuscleGroupSplit IDs')),
|
||||
('pattern_labels', models.JSONField(default=list, help_text='Ordered list of split labels')),
|
||||
('frequency', models.IntegerField(default=0)),
|
||||
('rest_day_positions', models.JSONField(default=list, help_text='Day indices that are rest days')),
|
||||
],
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='WorkoutType',
|
||||
fields=[
|
||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('name', models.CharField(max_length=100, unique=True)),
|
||||
('description', models.TextField(blank=True, default='')),
|
||||
('typical_rest_between_sets', models.IntegerField(default=60, help_text='Seconds')),
|
||||
('typical_intensity', models.CharField(choices=[('low', 'Low'), ('medium', 'Medium'), ('high', 'High')], default='medium', max_length=10)),
|
||||
('rep_range_min', models.IntegerField(default=8)),
|
||||
('rep_range_max', models.IntegerField(default=12)),
|
||||
('round_range_min', models.IntegerField(default=3)),
|
||||
('round_range_max', models.IntegerField(default=4)),
|
||||
('duration_bias', models.FloatField(default=0.5, help_text='0.0=all rep-based, 1.0=all duration-based')),
|
||||
('superset_size_min', models.IntegerField(default=2)),
|
||||
('superset_size_max', models.IntegerField(default=4)),
|
||||
],
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='GeneratedWeeklyPlan',
|
||||
fields=[
|
||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('created_at', models.DateTimeField(auto_now_add=True)),
|
||||
('week_start_date', models.DateField()),
|
||||
('week_end_date', models.DateField()),
|
||||
('status', models.CharField(choices=[('pending', 'Pending'), ('completed', 'Completed'), ('failed', 'Failed')], default='pending', max_length=10)),
|
||||
('preferences_snapshot', models.JSONField(blank=True, default=dict)),
|
||||
('generation_time_ms', models.IntegerField(blank=True, null=True)),
|
||||
('registered_user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='generated_plans', to='registered_user.registereduser')),
|
||||
],
|
||||
options={
|
||||
'ordering': ['-created_at'],
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='WorkoutStructureRule',
|
||||
fields=[
|
||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('section_type', models.CharField(choices=[('warm_up', 'Warm Up'), ('working', 'Working'), ('cool_down', 'Cool Down')], max_length=10)),
|
||||
('movement_patterns', models.JSONField(default=list)),
|
||||
('typical_rounds', models.IntegerField(default=3)),
|
||||
('typical_exercises_per_superset', models.IntegerField(default=3)),
|
||||
('typical_rep_range_min', models.IntegerField(default=8)),
|
||||
('typical_rep_range_max', models.IntegerField(default=12)),
|
||||
('typical_duration_range_min', models.IntegerField(default=30, help_text='Seconds')),
|
||||
('typical_duration_range_max', models.IntegerField(default=45, help_text='Seconds')),
|
||||
('goal_type', models.CharField(choices=[('strength', 'Strength'), ('hypertrophy', 'Hypertrophy'), ('endurance', 'Endurance'), ('weight_loss', 'Weight Loss'), ('general_fitness', 'General Fitness')], default='general_fitness', max_length=20)),
|
||||
('workout_type', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='structure_rules', to='generator.workouttype')),
|
||||
],
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='UserPreference',
|
||||
fields=[
|
||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('fitness_level', models.IntegerField(choices=[(1, 'Beginner'), (2, 'Intermediate'), (3, 'Advanced'), (4, 'Elite')], default=2)),
|
||||
('primary_goal', models.CharField(choices=[('strength', 'Strength'), ('hypertrophy', 'Hypertrophy'), ('endurance', 'Endurance'), ('weight_loss', 'Weight Loss'), ('general_fitness', 'General Fitness')], default='general_fitness', max_length=20)),
|
||||
('secondary_goal', models.CharField(blank=True, choices=[('strength', 'Strength'), ('hypertrophy', 'Hypertrophy'), ('endurance', 'Endurance'), ('weight_loss', 'Weight Loss'), ('general_fitness', 'General Fitness')], default='', max_length=20)),
|
||||
('days_per_week', models.IntegerField(default=4)),
|
||||
('preferred_workout_duration', models.IntegerField(default=45, help_text='Minutes')),
|
||||
('preferred_days', models.JSONField(blank=True, default=list, help_text='List of weekday ints (0=Mon, 6=Sun)')),
|
||||
('injuries_limitations', models.TextField(blank=True, default='')),
|
||||
('available_equipment', models.ManyToManyField(blank=True, related_name='user_preferences', to='equipment.equipment')),
|
||||
('excluded_exercises', models.ManyToManyField(blank=True, related_name='excluded_by_users', to='exercise.exercise')),
|
||||
('registered_user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='generator_preference', to='registered_user.registereduser')),
|
||||
('target_muscle_groups', models.ManyToManyField(blank=True, related_name='user_preferences', to='muscle.muscle')),
|
||||
('preferred_workout_types', models.ManyToManyField(blank=True, related_name='user_preferences', to='generator.workouttype')),
|
||||
],
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='GeneratedWorkout',
|
||||
fields=[
|
||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('scheduled_date', models.DateField()),
|
||||
('day_of_week', models.IntegerField(help_text='0=Monday, 6=Sunday')),
|
||||
('is_rest_day', models.BooleanField(default=False)),
|
||||
('status', models.CharField(choices=[('pending', 'Pending'), ('accepted', 'Accepted'), ('rejected', 'Rejected'), ('completed', 'Completed')], default='pending', max_length=10)),
|
||||
('focus_area', models.CharField(blank=True, default='', max_length=255)),
|
||||
('target_muscles', models.JSONField(blank=True, default=list)),
|
||||
('user_rating', models.IntegerField(blank=True, null=True)),
|
||||
('user_feedback', models.TextField(blank=True, default='')),
|
||||
('plan', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='generated_workouts', to='generator.generatedweeklyplan')),
|
||||
('workout', models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='generated_from', to='workout.workout')),
|
||||
('workout_type', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='generated_workouts', to='generator.workouttype')),
|
||||
],
|
||||
options={
|
||||
'ordering': ['scheduled_date'],
|
||||
},
|
||||
),
|
||||
]
|
||||
18
generator/migrations/0002_add_display_name_to_workouttype.py
Normal file
18
generator/migrations/0002_add_display_name_to_workouttype.py
Normal file
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 4.2.2 on 2026-02-20 20:08
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('generator', '0001_initial'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='workouttype',
|
||||
name='display_name',
|
||||
field=models.CharField(blank=True, default='', max_length=100),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,19 @@
|
||||
# Generated by Django 5.1.4 on 2026-02-20 22:55
|
||||
|
||||
import django.core.validators
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('generator', '0002_add_display_name_to_workouttype'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='userpreference',
|
||||
name='preferred_workout_duration',
|
||||
field=models.IntegerField(default=45, help_text='Minutes', validators=[django.core.validators.MinValueValidator(15), django.core.validators.MaxValueValidator(120)]),
|
||||
),
|
||||
]
|
||||
18
generator/migrations/0004_add_injury_types.py
Normal file
18
generator/migrations/0004_add_injury_types.py
Normal file
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 5.1.4 on 2026-02-21 03:52
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('generator', '0003_alter_userpreference_preferred_workout_duration'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='userpreference',
|
||||
name='injury_types',
|
||||
field=models.JSONField(blank=True, default=list, help_text='Structured injury types: knee, lower_back, upper_back, shoulder, hip, wrist, ankle, neck'),
|
||||
),
|
||||
]
|
||||
28
generator/migrations/0005_add_periodization_fields.py
Normal file
28
generator/migrations/0005_add_periodization_fields.py
Normal file
@@ -0,0 +1,28 @@
|
||||
# Generated by Django 5.1.4 on 2026-02-21 05:39
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('generator', '0004_add_injury_types'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='generatedweeklyplan',
|
||||
name='cycle_id',
|
||||
field=models.CharField(blank=True, help_text='Groups weeks into training cycles', max_length=64, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='generatedweeklyplan',
|
||||
name='is_deload',
|
||||
field=models.BooleanField(default=False, help_text='Whether this is a recovery/deload week'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='generatedweeklyplan',
|
||||
name='week_number',
|
||||
field=models.IntegerField(default=1, help_text='Position in training cycle (1-based)'),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,123 @@
|
||||
from django.core.management import call_command
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
WORKOUT_TYPE_CALIBRATION = {
|
||||
'functional_strength_training': {
|
||||
'typical_rest_between_sets': 60,
|
||||
'typical_intensity': 'medium',
|
||||
'rep_range_min': 8,
|
||||
'rep_range_max': 15,
|
||||
'round_range_min': 3,
|
||||
'round_range_max': 4,
|
||||
'duration_bias': 0.15,
|
||||
'superset_size_min': 2,
|
||||
'superset_size_max': 4,
|
||||
},
|
||||
'traditional_strength_training': {
|
||||
'typical_rest_between_sets': 120,
|
||||
'typical_intensity': 'high',
|
||||
'rep_range_min': 4,
|
||||
'rep_range_max': 8,
|
||||
'round_range_min': 3,
|
||||
'round_range_max': 5,
|
||||
'duration_bias': 0.1,
|
||||
'superset_size_min': 1,
|
||||
'superset_size_max': 3,
|
||||
},
|
||||
'high_intensity_interval_training': {
|
||||
'typical_rest_between_sets': 30,
|
||||
'typical_intensity': 'high',
|
||||
'rep_range_min': 10,
|
||||
'rep_range_max': 20,
|
||||
'round_range_min': 3,
|
||||
'round_range_max': 5,
|
||||
'duration_bias': 0.7,
|
||||
'superset_size_min': 3,
|
||||
'superset_size_max': 6,
|
||||
},
|
||||
'cross_training': {
|
||||
'typical_rest_between_sets': 45,
|
||||
'typical_intensity': 'high',
|
||||
'rep_range_min': 8,
|
||||
'rep_range_max': 15,
|
||||
'round_range_min': 3,
|
||||
'round_range_max': 5,
|
||||
'duration_bias': 0.4,
|
||||
'superset_size_min': 3,
|
||||
'superset_size_max': 5,
|
||||
},
|
||||
'core_training': {
|
||||
'typical_rest_between_sets': 30,
|
||||
'typical_intensity': 'medium',
|
||||
'rep_range_min': 10,
|
||||
'rep_range_max': 20,
|
||||
'round_range_min': 2,
|
||||
'round_range_max': 4,
|
||||
'duration_bias': 0.5,
|
||||
'superset_size_min': 3,
|
||||
'superset_size_max': 5,
|
||||
},
|
||||
'flexibility': {
|
||||
'typical_rest_between_sets': 15,
|
||||
'typical_intensity': 'low',
|
||||
'rep_range_min': 1,
|
||||
'rep_range_max': 5,
|
||||
'round_range_min': 1,
|
||||
'round_range_max': 2,
|
||||
'duration_bias': 0.9,
|
||||
'superset_size_min': 3,
|
||||
'superset_size_max': 6,
|
||||
},
|
||||
'cardio': {
|
||||
'typical_rest_between_sets': 30,
|
||||
'typical_intensity': 'medium',
|
||||
'rep_range_min': 1,
|
||||
'rep_range_max': 1,
|
||||
'round_range_min': 1,
|
||||
'round_range_max': 3,
|
||||
'duration_bias': 1.0,
|
||||
'superset_size_min': 1,
|
||||
'superset_size_max': 3,
|
||||
},
|
||||
'hypertrophy': {
|
||||
'typical_rest_between_sets': 90,
|
||||
'typical_intensity': 'high',
|
||||
'rep_range_min': 8,
|
||||
'rep_range_max': 15,
|
||||
'round_range_min': 3,
|
||||
'round_range_max': 4,
|
||||
'duration_bias': 0.2,
|
||||
'superset_size_min': 2,
|
||||
'superset_size_max': 4,
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
def apply_calibration(apps, schema_editor):
|
||||
WorkoutType = apps.get_model('generator', 'WorkoutType')
|
||||
|
||||
for type_name, fields in WORKOUT_TYPE_CALIBRATION.items():
|
||||
defaults = dict(fields)
|
||||
defaults.setdefault('display_name', type_name.replace('_', ' ').title())
|
||||
defaults.setdefault('description', '')
|
||||
WorkoutType.objects.update_or_create(name=type_name, defaults=defaults)
|
||||
|
||||
# Ensure the full 8 x 5 x 3 = 120 structure-rule matrix is present and calibrated.
|
||||
call_command('calibrate_structure_rules')
|
||||
|
||||
|
||||
def noop_reverse(apps, schema_editor):
|
||||
# Intentionally no-op: this migration normalizes live calibration data.
|
||||
pass
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('generator', '0005_add_periodization_fields'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(apply_calibration, noop_reverse),
|
||||
]
|
||||
121
generator/migrations/0007_force_recalibration.py
Normal file
121
generator/migrations/0007_force_recalibration.py
Normal file
@@ -0,0 +1,121 @@
|
||||
from django.core.management import call_command
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
WORKOUT_TYPE_CALIBRATION = {
|
||||
'functional_strength_training': {
|
||||
'typical_rest_between_sets': 60,
|
||||
'typical_intensity': 'medium',
|
||||
'rep_range_min': 8,
|
||||
'rep_range_max': 15,
|
||||
'round_range_min': 3,
|
||||
'round_range_max': 4,
|
||||
'duration_bias': 0.15,
|
||||
'superset_size_min': 2,
|
||||
'superset_size_max': 4,
|
||||
},
|
||||
'traditional_strength_training': {
|
||||
'typical_rest_between_sets': 120,
|
||||
'typical_intensity': 'high',
|
||||
'rep_range_min': 4,
|
||||
'rep_range_max': 8,
|
||||
'round_range_min': 3,
|
||||
'round_range_max': 5,
|
||||
'duration_bias': 0.1,
|
||||
'superset_size_min': 1,
|
||||
'superset_size_max': 3,
|
||||
},
|
||||
'high_intensity_interval_training': {
|
||||
'typical_rest_between_sets': 30,
|
||||
'typical_intensity': 'high',
|
||||
'rep_range_min': 10,
|
||||
'rep_range_max': 20,
|
||||
'round_range_min': 3,
|
||||
'round_range_max': 5,
|
||||
'duration_bias': 0.7,
|
||||
'superset_size_min': 3,
|
||||
'superset_size_max': 6,
|
||||
},
|
||||
'cross_training': {
|
||||
'typical_rest_between_sets': 45,
|
||||
'typical_intensity': 'high',
|
||||
'rep_range_min': 8,
|
||||
'rep_range_max': 15,
|
||||
'round_range_min': 3,
|
||||
'round_range_max': 5,
|
||||
'duration_bias': 0.4,
|
||||
'superset_size_min': 3,
|
||||
'superset_size_max': 5,
|
||||
},
|
||||
'core_training': {
|
||||
'typical_rest_between_sets': 30,
|
||||
'typical_intensity': 'medium',
|
||||
'rep_range_min': 10,
|
||||
'rep_range_max': 20,
|
||||
'round_range_min': 2,
|
||||
'round_range_max': 4,
|
||||
'duration_bias': 0.5,
|
||||
'superset_size_min': 3,
|
||||
'superset_size_max': 5,
|
||||
},
|
||||
'flexibility': {
|
||||
'typical_rest_between_sets': 15,
|
||||
'typical_intensity': 'low',
|
||||
'rep_range_min': 1,
|
||||
'rep_range_max': 5,
|
||||
'round_range_min': 1,
|
||||
'round_range_max': 2,
|
||||
'duration_bias': 0.9,
|
||||
'superset_size_min': 3,
|
||||
'superset_size_max': 6,
|
||||
},
|
||||
'cardio': {
|
||||
'typical_rest_between_sets': 30,
|
||||
'typical_intensity': 'medium',
|
||||
'rep_range_min': 1,
|
||||
'rep_range_max': 1,
|
||||
'round_range_min': 1,
|
||||
'round_range_max': 3,
|
||||
'duration_bias': 1.0,
|
||||
'superset_size_min': 1,
|
||||
'superset_size_max': 3,
|
||||
},
|
||||
'hypertrophy': {
|
||||
'typical_rest_between_sets': 90,
|
||||
'typical_intensity': 'high',
|
||||
'rep_range_min': 8,
|
||||
'rep_range_max': 15,
|
||||
'round_range_min': 3,
|
||||
'round_range_max': 4,
|
||||
'duration_bias': 0.2,
|
||||
'superset_size_min': 2,
|
||||
'superset_size_max': 4,
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
def apply_calibration(apps, schema_editor):
|
||||
WorkoutType = apps.get_model('generator', 'WorkoutType')
|
||||
|
||||
for type_name, fields in WORKOUT_TYPE_CALIBRATION.items():
|
||||
defaults = dict(fields)
|
||||
defaults.setdefault('display_name', type_name.replace('_', ' ').title())
|
||||
defaults.setdefault('description', '')
|
||||
WorkoutType.objects.update_or_create(name=type_name, defaults=defaults)
|
||||
|
||||
call_command('calibrate_structure_rules')
|
||||
|
||||
|
||||
def noop_reverse(apps, schema_editor):
|
||||
pass
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('generator', '0006_calibrate_workout_types_and_structure_rules'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(apply_calibration, noop_reverse),
|
||||
]
|
||||
0
generator/migrations/__init__.py
Normal file
0
generator/migrations/__init__.py
Normal file
249
generator/models.py
Normal file
249
generator/models.py
Normal file
@@ -0,0 +1,249 @@
|
||||
from django.core.validators import MinValueValidator, MaxValueValidator
|
||||
from django.db import models
|
||||
from registered_user.models import RegisteredUser
|
||||
from workout.models import Workout
|
||||
from exercise.models import Exercise
|
||||
from equipment.models import Equipment
|
||||
from muscle.models import Muscle
|
||||
|
||||
|
||||
INTENSITY_CHOICES = (
|
||||
('low', 'Low'),
|
||||
('medium', 'Medium'),
|
||||
('high', 'High'),
|
||||
)
|
||||
|
||||
GOAL_CHOICES = (
|
||||
('strength', 'Strength'),
|
||||
('hypertrophy', 'Hypertrophy'),
|
||||
('endurance', 'Endurance'),
|
||||
('weight_loss', 'Weight Loss'),
|
||||
('general_fitness', 'General Fitness'),
|
||||
)
|
||||
|
||||
FITNESS_LEVEL_CHOICES = (
|
||||
(1, 'Beginner'),
|
||||
(2, 'Intermediate'),
|
||||
(3, 'Advanced'),
|
||||
(4, 'Elite'),
|
||||
)
|
||||
|
||||
PLAN_STATUS_CHOICES = (
|
||||
('pending', 'Pending'),
|
||||
('completed', 'Completed'),
|
||||
('failed', 'Failed'),
|
||||
)
|
||||
|
||||
WORKOUT_STATUS_CHOICES = (
|
||||
('pending', 'Pending'),
|
||||
('accepted', 'Accepted'),
|
||||
('rejected', 'Rejected'),
|
||||
('completed', 'Completed'),
|
||||
)
|
||||
|
||||
SPLIT_TYPE_CHOICES = (
|
||||
('push', 'Push'),
|
||||
('pull', 'Pull'),
|
||||
('legs', 'Legs'),
|
||||
('upper', 'Upper'),
|
||||
('lower', 'Lower'),
|
||||
('full_body', 'Full Body'),
|
||||
('core', 'Core'),
|
||||
('cardio', 'Cardio'),
|
||||
)
|
||||
|
||||
SECTION_TYPE_CHOICES = (
|
||||
('warm_up', 'Warm Up'),
|
||||
('working', 'Working'),
|
||||
('cool_down', 'Cool Down'),
|
||||
)
|
||||
|
||||
POSITION_CHOICES = (
|
||||
('early', 'Early'),
|
||||
('middle', 'Middle'),
|
||||
('late', 'Late'),
|
||||
)
|
||||
|
||||
|
||||
# ============================================================
|
||||
# Reference / Config Models
|
||||
# ============================================================
|
||||
|
||||
class WorkoutType(models.Model):
|
||||
name = models.CharField(max_length=100, unique=True)
|
||||
display_name = models.CharField(max_length=100, blank=True, default='')
|
||||
description = models.TextField(blank=True, default='')
|
||||
typical_rest_between_sets = models.IntegerField(default=60, help_text='Seconds')
|
||||
typical_intensity = models.CharField(max_length=10, choices=INTENSITY_CHOICES, default='medium')
|
||||
rep_range_min = models.IntegerField(default=8)
|
||||
rep_range_max = models.IntegerField(default=12)
|
||||
round_range_min = models.IntegerField(default=3)
|
||||
round_range_max = models.IntegerField(default=4)
|
||||
duration_bias = models.FloatField(default=0.5, help_text='0.0=all rep-based, 1.0=all duration-based')
|
||||
superset_size_min = models.IntegerField(default=2)
|
||||
superset_size_max = models.IntegerField(default=4)
|
||||
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
|
||||
# ============================================================
|
||||
# User Preference Model
|
||||
# ============================================================
|
||||
|
||||
class UserPreference(models.Model):
|
||||
registered_user = models.OneToOneField(
|
||||
RegisteredUser,
|
||||
on_delete=models.CASCADE,
|
||||
related_name='generator_preference'
|
||||
)
|
||||
available_equipment = models.ManyToManyField(Equipment, blank=True, related_name='user_preferences')
|
||||
target_muscle_groups = models.ManyToManyField(Muscle, blank=True, related_name='user_preferences')
|
||||
preferred_workout_types = models.ManyToManyField(WorkoutType, blank=True, related_name='user_preferences')
|
||||
fitness_level = models.IntegerField(choices=FITNESS_LEVEL_CHOICES, default=2)
|
||||
primary_goal = models.CharField(max_length=20, choices=GOAL_CHOICES, default='general_fitness')
|
||||
secondary_goal = models.CharField(max_length=20, choices=GOAL_CHOICES, blank=True, default='')
|
||||
days_per_week = models.IntegerField(default=4)
|
||||
preferred_workout_duration = models.IntegerField(
|
||||
default=45, help_text='Minutes',
|
||||
validators=[MinValueValidator(15), MaxValueValidator(120)],
|
||||
)
|
||||
preferred_days = models.JSONField(default=list, blank=True, help_text='List of weekday ints (0=Mon, 6=Sun)')
|
||||
injuries_limitations = models.TextField(blank=True, default='')
|
||||
injury_types = models.JSONField(
|
||||
default=list, blank=True,
|
||||
help_text='Structured injury types: knee, lower_back, upper_back, shoulder, hip, wrist, ankle, neck',
|
||||
)
|
||||
excluded_exercises = models.ManyToManyField(Exercise, blank=True, related_name='excluded_by_users')
|
||||
|
||||
def __str__(self):
|
||||
return f"Preferences for {self.registered_user.first_name}"
|
||||
|
||||
|
||||
# ============================================================
|
||||
# Generated Plan / Workout Models
|
||||
# ============================================================
|
||||
|
||||
class GeneratedWeeklyPlan(models.Model):
|
||||
created_at = models.DateTimeField(auto_now_add=True)
|
||||
registered_user = models.ForeignKey(
|
||||
RegisteredUser,
|
||||
on_delete=models.CASCADE,
|
||||
related_name='generated_plans'
|
||||
)
|
||||
week_start_date = models.DateField()
|
||||
week_end_date = models.DateField()
|
||||
status = models.CharField(max_length=10, choices=PLAN_STATUS_CHOICES, default='pending')
|
||||
preferences_snapshot = models.JSONField(default=dict, blank=True)
|
||||
generation_time_ms = models.IntegerField(null=True, blank=True)
|
||||
|
||||
# Periodization fields
|
||||
week_number = models.IntegerField(default=1, help_text='Position in training cycle (1-based)')
|
||||
is_deload = models.BooleanField(default=False, help_text='Whether this is a recovery/deload week')
|
||||
cycle_id = models.CharField(max_length=64, null=True, blank=True, help_text='Groups weeks into training cycles')
|
||||
|
||||
class Meta:
|
||||
ordering = ['-created_at']
|
||||
|
||||
def __str__(self):
|
||||
return f"Plan {self.id} for {self.registered_user.first_name} ({self.week_start_date})"
|
||||
|
||||
|
||||
class GeneratedWorkout(models.Model):
|
||||
plan = models.ForeignKey(
|
||||
GeneratedWeeklyPlan,
|
||||
on_delete=models.CASCADE,
|
||||
related_name='generated_workouts'
|
||||
)
|
||||
workout = models.OneToOneField(
|
||||
Workout,
|
||||
on_delete=models.SET_NULL,
|
||||
null=True,
|
||||
blank=True,
|
||||
related_name='generated_from'
|
||||
)
|
||||
workout_type = models.ForeignKey(
|
||||
WorkoutType,
|
||||
on_delete=models.SET_NULL,
|
||||
null=True,
|
||||
blank=True,
|
||||
related_name='generated_workouts'
|
||||
)
|
||||
scheduled_date = models.DateField()
|
||||
day_of_week = models.IntegerField(help_text='0=Monday, 6=Sunday')
|
||||
is_rest_day = models.BooleanField(default=False)
|
||||
status = models.CharField(max_length=10, choices=WORKOUT_STATUS_CHOICES, default='pending')
|
||||
focus_area = models.CharField(max_length=255, blank=True, default='')
|
||||
target_muscles = models.JSONField(default=list, blank=True)
|
||||
user_rating = models.IntegerField(null=True, blank=True)
|
||||
user_feedback = models.TextField(blank=True, default='')
|
||||
|
||||
class Meta:
|
||||
ordering = ['scheduled_date']
|
||||
|
||||
def __str__(self):
|
||||
if self.is_rest_day:
|
||||
return f"Rest Day - {self.scheduled_date}"
|
||||
return f"{self.focus_area} - {self.scheduled_date}"
|
||||
|
||||
|
||||
# ============================================================
|
||||
# ML Pattern Models (populated by analyze_workouts command)
|
||||
# ============================================================
|
||||
|
||||
class MuscleGroupSplit(models.Model):
|
||||
muscle_names = models.JSONField(default=list, help_text='List of muscle group names')
|
||||
frequency = models.IntegerField(default=0, help_text='How often this combo appeared')
|
||||
label = models.CharField(max_length=100, blank=True, default='')
|
||||
typical_exercise_count = models.IntegerField(default=6)
|
||||
split_type = models.CharField(max_length=20, choices=SPLIT_TYPE_CHOICES, default='full_body')
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.label} ({self.split_type}) - freq: {self.frequency}"
|
||||
|
||||
|
||||
class WeeklySplitPattern(models.Model):
|
||||
days_per_week = models.IntegerField()
|
||||
pattern = models.JSONField(default=list, help_text='Ordered list of MuscleGroupSplit IDs')
|
||||
pattern_labels = models.JSONField(default=list, help_text='Ordered list of split labels')
|
||||
frequency = models.IntegerField(default=0)
|
||||
rest_day_positions = models.JSONField(default=list, help_text='Day indices that are rest days')
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.days_per_week}-day split (freq: {self.frequency}): {self.pattern_labels}"
|
||||
|
||||
|
||||
class WorkoutStructureRule(models.Model):
|
||||
workout_type = models.ForeignKey(
|
||||
WorkoutType,
|
||||
on_delete=models.CASCADE,
|
||||
null=True,
|
||||
blank=True,
|
||||
related_name='structure_rules'
|
||||
)
|
||||
section_type = models.CharField(max_length=10, choices=SECTION_TYPE_CHOICES)
|
||||
movement_patterns = models.JSONField(default=list)
|
||||
typical_rounds = models.IntegerField(default=3)
|
||||
typical_exercises_per_superset = models.IntegerField(default=3)
|
||||
typical_rep_range_min = models.IntegerField(default=8)
|
||||
typical_rep_range_max = models.IntegerField(default=12)
|
||||
typical_duration_range_min = models.IntegerField(default=30, help_text='Seconds')
|
||||
typical_duration_range_max = models.IntegerField(default=45, help_text='Seconds')
|
||||
goal_type = models.CharField(max_length=20, choices=GOAL_CHOICES, default='general_fitness')
|
||||
|
||||
def __str__(self):
|
||||
wt = self.workout_type.name if self.workout_type else 'Any'
|
||||
return f"{wt} - {self.section_type} ({self.goal_type})"
|
||||
|
||||
|
||||
class MovementPatternOrder(models.Model):
|
||||
position = models.CharField(max_length=10, choices=POSITION_CHOICES)
|
||||
movement_pattern = models.CharField(max_length=100)
|
||||
frequency = models.IntegerField(default=0)
|
||||
section_type = models.CharField(max_length=10, choices=SECTION_TYPE_CHOICES, default='working')
|
||||
|
||||
class Meta:
|
||||
ordering = ['position', '-frequency']
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.movement_pattern} @ {self.position} (freq: {self.frequency})"
|
||||
923
generator/rules_engine.py
Normal file
923
generator/rules_engine.py
Normal file
@@ -0,0 +1,923 @@
|
||||
"""
|
||||
Rules Engine for workout validation.
|
||||
|
||||
Structured registry of quantitative workout rules extracted from
|
||||
workout_research.md. Used by the quality gates in WorkoutGenerator
|
||||
and the check_rules_drift management command.
|
||||
"""
|
||||
|
||||
import re
|
||||
from dataclasses import dataclass, field
|
||||
from typing import List, Optional, Dict, Any, Tuple
|
||||
|
||||
import logging
|
||||
|
||||
from generator.services.exercise_selector import extract_movement_families
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@dataclass
|
||||
class RuleViolation:
|
||||
"""Represents a single rule violation found during workout validation."""
|
||||
rule_id: str
|
||||
severity: str # 'error', 'warning', 'info'
|
||||
message: str
|
||||
actual_value: Any = None
|
||||
expected_range: Any = None
|
||||
|
||||
|
||||
# ======================================================================
|
||||
# Per-workout-type rules — keyed by workout type name (lowercase, underscored)
|
||||
# Values sourced from workout_research.md "DB Calibration Summary" table
|
||||
# and the detailed sections for each workout type.
|
||||
# ======================================================================
|
||||
|
||||
WORKOUT_TYPE_RULES: Dict[str, Dict[str, Any]] = {
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# 1. Traditional Strength Training
|
||||
# ------------------------------------------------------------------
|
||||
'traditional_strength_training': {
|
||||
'rep_ranges': {
|
||||
'primary': (3, 6),
|
||||
'secondary': (6, 8),
|
||||
'accessory': (8, 12),
|
||||
},
|
||||
'rest_periods': { # seconds
|
||||
'heavy': (180, 300), # 1-5 reps: 3-5 min
|
||||
'moderate': (120, 180), # 6-8 reps: 2-3 min
|
||||
'light': (60, 90), # 8-12 reps: 60-90s
|
||||
},
|
||||
'duration_bias_range': (0.0, 0.1),
|
||||
'superset_size_range': (1, 3),
|
||||
'round_range': (4, 6),
|
||||
'typical_rest': 120,
|
||||
'typical_intensity': 'high',
|
||||
'movement_pattern_order': [
|
||||
'compound_heavy', 'compound_secondary', 'isolation',
|
||||
],
|
||||
'max_exercises_per_session': 6,
|
||||
'compound_pct_min': 0.6, # 70% compounds, allow some slack
|
||||
},
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# 2. Hypertrophy
|
||||
# ------------------------------------------------------------------
|
||||
'hypertrophy': {
|
||||
'rep_ranges': {
|
||||
'primary': (6, 10),
|
||||
'secondary': (8, 12),
|
||||
'accessory': (10, 15),
|
||||
},
|
||||
'rest_periods': {
|
||||
'heavy': (120, 180), # compounds: 2-3 min
|
||||
'moderate': (60, 120), # moderate: 60-120s
|
||||
'light': (45, 90), # isolation: 45-90s
|
||||
},
|
||||
'duration_bias_range': (0.1, 0.2),
|
||||
'superset_size_range': (2, 4),
|
||||
'round_range': (3, 4),
|
||||
'typical_rest': 90,
|
||||
'typical_intensity': 'high',
|
||||
'movement_pattern_order': [
|
||||
'compound_heavy', 'compound_secondary',
|
||||
'lengthened_isolation', 'shortened_isolation',
|
||||
],
|
||||
'max_exercises_per_session': 8,
|
||||
'compound_pct_min': 0.4,
|
||||
},
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# 3. HIIT
|
||||
# ------------------------------------------------------------------
|
||||
'high_intensity_interval_training': {
|
||||
'rep_ranges': {
|
||||
'primary': (10, 20),
|
||||
'secondary': (10, 20),
|
||||
'accessory': (10, 20),
|
||||
},
|
||||
'rest_periods': {
|
||||
'heavy': (20, 40), # work:rest based
|
||||
'moderate': (20, 30),
|
||||
'light': (10, 20),
|
||||
},
|
||||
'duration_bias_range': (0.6, 0.8),
|
||||
'superset_size_range': (3, 6),
|
||||
'round_range': (3, 5),
|
||||
'typical_rest': 30,
|
||||
'typical_intensity': 'high',
|
||||
'movement_pattern_order': [
|
||||
'posterior_chain', 'upper_push', 'core_explosive',
|
||||
'upper_pull', 'lower_body', 'finisher',
|
||||
],
|
||||
'max_duration_minutes': 30,
|
||||
'max_exercises_per_session': 12,
|
||||
'compound_pct_min': 0.3,
|
||||
},
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# 4. Functional Strength Training
|
||||
# ------------------------------------------------------------------
|
||||
'functional_strength_training': {
|
||||
'rep_ranges': {
|
||||
'primary': (3, 6),
|
||||
'secondary': (6, 10),
|
||||
'accessory': (8, 12),
|
||||
},
|
||||
'rest_periods': {
|
||||
'heavy': (180, 300), # 3-5 min for heavy
|
||||
'moderate': (120, 180), # 2-3 min
|
||||
'light': (45, 90), # 45-90s for carries/circuits
|
||||
},
|
||||
'duration_bias_range': (0.1, 0.2),
|
||||
'superset_size_range': (2, 3),
|
||||
'round_range': (3, 5),
|
||||
'typical_rest': 60,
|
||||
'typical_intensity': 'medium',
|
||||
'movement_pattern_order': [
|
||||
'squat', 'hinge', 'horizontal_push', 'horizontal_pull',
|
||||
'vertical_push', 'vertical_pull', 'carry',
|
||||
],
|
||||
'max_exercises_per_session': 6,
|
||||
'compound_pct_min': 0.7, # 70% compounds, 30% accessories
|
||||
},
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# 5. Cross Training
|
||||
# ------------------------------------------------------------------
|
||||
'cross_training': {
|
||||
'rep_ranges': {
|
||||
'primary': (1, 5),
|
||||
'secondary': (6, 15),
|
||||
'accessory': (15, 30),
|
||||
},
|
||||
'rest_periods': {
|
||||
'heavy': (120, 300), # strength portions
|
||||
'moderate': (45, 120),
|
||||
'light': (30, 60),
|
||||
},
|
||||
'duration_bias_range': (0.3, 0.5),
|
||||
'superset_size_range': (3, 5),
|
||||
'round_range': (3, 5),
|
||||
'typical_rest': 45,
|
||||
'typical_intensity': 'high',
|
||||
'movement_pattern_order': [
|
||||
'complex_cns', 'moderate_complexity', 'simple_repetitive',
|
||||
],
|
||||
'max_exercises_per_session': 10,
|
||||
'compound_pct_min': 0.5,
|
||||
'pull_press_ratio_min': 1.5, # Cross training specific
|
||||
},
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# 6. Core Training
|
||||
# ------------------------------------------------------------------
|
||||
'core_training': {
|
||||
'rep_ranges': {
|
||||
'primary': (10, 20),
|
||||
'secondary': (10, 20),
|
||||
'accessory': (10, 20),
|
||||
},
|
||||
'rest_periods': {
|
||||
'heavy': (30, 90),
|
||||
'moderate': (30, 60),
|
||||
'light': (30, 45),
|
||||
},
|
||||
'duration_bias_range': (0.5, 0.6),
|
||||
'superset_size_range': (3, 5),
|
||||
'round_range': (2, 4),
|
||||
'typical_rest': 30,
|
||||
'typical_intensity': 'medium',
|
||||
'movement_pattern_order': [
|
||||
'anti_extension', 'anti_rotation', 'anti_lateral_flexion',
|
||||
'hip_flexion', 'rotation',
|
||||
],
|
||||
'max_exercises_per_session': 8,
|
||||
'compound_pct_min': 0.0,
|
||||
'required_anti_movement_patterns': [
|
||||
'anti_extension', 'anti_rotation', 'anti_lateral_flexion',
|
||||
],
|
||||
},
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# 7. Flexibility
|
||||
# ------------------------------------------------------------------
|
||||
'flexibility': {
|
||||
'rep_ranges': {
|
||||
'primary': (1, 3),
|
||||
'secondary': (1, 3),
|
||||
'accessory': (1, 5),
|
||||
},
|
||||
'rest_periods': {
|
||||
'heavy': (10, 15),
|
||||
'moderate': (10, 15),
|
||||
'light': (10, 15),
|
||||
},
|
||||
'duration_bias_range': (0.9, 1.0),
|
||||
'superset_size_range': (3, 6),
|
||||
'round_range': (1, 2),
|
||||
'typical_rest': 15,
|
||||
'typical_intensity': 'low',
|
||||
'movement_pattern_order': [
|
||||
'dynamic_warmup', 'static_stretches', 'pnf', 'cooldown_flow',
|
||||
],
|
||||
'max_exercises_per_session': 12,
|
||||
'compound_pct_min': 0.0,
|
||||
},
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# 8. Cardio
|
||||
# ------------------------------------------------------------------
|
||||
'cardio': {
|
||||
'rep_ranges': {
|
||||
'primary': (1, 1),
|
||||
'secondary': (1, 1),
|
||||
'accessory': (1, 1),
|
||||
},
|
||||
'rest_periods': {
|
||||
'heavy': (120, 180), # between hard intervals
|
||||
'moderate': (60, 120),
|
||||
'light': (30, 60),
|
||||
},
|
||||
'duration_bias_range': (0.9, 1.0),
|
||||
'superset_size_range': (1, 3),
|
||||
'round_range': (1, 3),
|
||||
'typical_rest': 30,
|
||||
'typical_intensity': 'medium',
|
||||
'movement_pattern_order': [
|
||||
'warmup', 'steady_state', 'intervals', 'cooldown',
|
||||
],
|
||||
'max_exercises_per_session': 6,
|
||||
'compound_pct_min': 0.0,
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
# ======================================================================
|
||||
# Universal Rules — apply regardless of workout type
|
||||
# ======================================================================
|
||||
|
||||
UNIVERSAL_RULES: Dict[str, Any] = {
|
||||
'push_pull_ratio_min': 1.0, # pull:push >= 1:1
|
||||
'deload_every_weeks': (4, 6),
|
||||
'compound_before_isolation': True,
|
||||
'warmup_mandatory': True,
|
||||
'cooldown_stretch_only': True,
|
||||
'max_hiit_duration_min': 30,
|
||||
'core_anti_movement_patterns': [
|
||||
'anti_extension', 'anti_rotation', 'anti_lateral_flexion',
|
||||
],
|
||||
'max_exercises_per_workout': 30,
|
||||
}
|
||||
|
||||
|
||||
# ======================================================================
|
||||
# DB Calibration reference — expected values for WorkoutType model
|
||||
# Sourced from workout_research.md Section 9.
|
||||
# ======================================================================
|
||||
|
||||
DB_CALIBRATION: Dict[str, Dict[str, Any]] = {
|
||||
'functional_strength_training': {
|
||||
'duration_bias': 0.15,
|
||||
'typical_rest_between_sets': 60,
|
||||
'typical_intensity': 'medium',
|
||||
'rep_range_min': 8,
|
||||
'rep_range_max': 15,
|
||||
'round_range_min': 3,
|
||||
'round_range_max': 4,
|
||||
'superset_size_min': 2,
|
||||
'superset_size_max': 4,
|
||||
},
|
||||
'traditional_strength_training': {
|
||||
'duration_bias': 0.1,
|
||||
'typical_rest_between_sets': 120,
|
||||
'typical_intensity': 'high',
|
||||
'rep_range_min': 4,
|
||||
'rep_range_max': 8,
|
||||
'round_range_min': 3,
|
||||
'round_range_max': 5,
|
||||
'superset_size_min': 1,
|
||||
'superset_size_max': 3,
|
||||
},
|
||||
'high_intensity_interval_training': {
|
||||
'duration_bias': 0.7,
|
||||
'typical_rest_between_sets': 30,
|
||||
'typical_intensity': 'high',
|
||||
'rep_range_min': 10,
|
||||
'rep_range_max': 20,
|
||||
'round_range_min': 3,
|
||||
'round_range_max': 5,
|
||||
'superset_size_min': 3,
|
||||
'superset_size_max': 6,
|
||||
},
|
||||
'cross_training': {
|
||||
'duration_bias': 0.4,
|
||||
'typical_rest_between_sets': 45,
|
||||
'typical_intensity': 'high',
|
||||
'rep_range_min': 8,
|
||||
'rep_range_max': 15,
|
||||
'round_range_min': 3,
|
||||
'round_range_max': 5,
|
||||
'superset_size_min': 3,
|
||||
'superset_size_max': 5,
|
||||
},
|
||||
'core_training': {
|
||||
'duration_bias': 0.5,
|
||||
'typical_rest_between_sets': 30,
|
||||
'typical_intensity': 'medium',
|
||||
'rep_range_min': 10,
|
||||
'rep_range_max': 20,
|
||||
'round_range_min': 2,
|
||||
'round_range_max': 4,
|
||||
'superset_size_min': 3,
|
||||
'superset_size_max': 5,
|
||||
},
|
||||
'flexibility': {
|
||||
'duration_bias': 0.9,
|
||||
'typical_rest_between_sets': 15,
|
||||
'typical_intensity': 'low',
|
||||
'rep_range_min': 1,
|
||||
'rep_range_max': 5,
|
||||
'round_range_min': 1,
|
||||
'round_range_max': 2,
|
||||
'superset_size_min': 3,
|
||||
'superset_size_max': 6,
|
||||
},
|
||||
'cardio': {
|
||||
'duration_bias': 1.0,
|
||||
'typical_rest_between_sets': 30,
|
||||
'typical_intensity': 'medium',
|
||||
'rep_range_min': 1,
|
||||
'rep_range_max': 1,
|
||||
'round_range_min': 1,
|
||||
'round_range_max': 3,
|
||||
'superset_size_min': 1,
|
||||
'superset_size_max': 3,
|
||||
},
|
||||
'hypertrophy': {
|
||||
'duration_bias': 0.2,
|
||||
'typical_rest_between_sets': 90,
|
||||
'typical_intensity': 'high',
|
||||
'rep_range_min': 8,
|
||||
'rep_range_max': 15,
|
||||
'round_range_min': 3,
|
||||
'round_range_max': 4,
|
||||
'superset_size_min': 2,
|
||||
'superset_size_max': 4,
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
# Canonical key aliases for workout type names. This lets callers pass
|
||||
# legacy/short names while still resolving to DB-style identifiers.
|
||||
WORKOUT_TYPE_KEY_ALIASES: Dict[str, str] = {
|
||||
'hiit': 'high_intensity_interval_training',
|
||||
}
|
||||
|
||||
|
||||
# ======================================================================
|
||||
# Validation helpers
|
||||
# ======================================================================
|
||||
|
||||
def _normalize_type_key(name: str) -> str:
|
||||
"""Convert a workout type name to the canonical key in WORKOUT_TYPE_RULES."""
|
||||
if not name:
|
||||
return ''
|
||||
normalized = name.strip().lower().replace('-', '_').replace(' ', '_')
|
||||
normalized = '_'.join(part for part in normalized.split('_') if part)
|
||||
return WORKOUT_TYPE_KEY_ALIASES.get(normalized, normalized)
|
||||
|
||||
|
||||
def _classify_rep_weight(reps: int) -> str:
|
||||
"""Classify rep count into heavy/moderate/light for rest period lookup."""
|
||||
if reps <= 5:
|
||||
return 'heavy'
|
||||
elif reps <= 10:
|
||||
return 'moderate'
|
||||
return 'light'
|
||||
|
||||
|
||||
def _has_warmup(supersets: list) -> bool:
|
||||
"""Check if the workout spec contains a warm-up superset."""
|
||||
for ss in supersets:
|
||||
name = (ss.get('name') or '').lower()
|
||||
if 'warm' in name:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def _has_cooldown(supersets: list) -> bool:
|
||||
"""Check if the workout spec contains a cool-down superset."""
|
||||
for ss in supersets:
|
||||
name = (ss.get('name') or '').lower()
|
||||
if 'cool' in name:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def _get_working_supersets(supersets: list) -> list:
|
||||
"""Extract only working (non warmup/cooldown) supersets."""
|
||||
working = []
|
||||
for ss in supersets:
|
||||
name = (ss.get('name') or '').lower()
|
||||
if 'warm' not in name and 'cool' not in name:
|
||||
working.append(ss)
|
||||
return working
|
||||
|
||||
|
||||
def _count_push_pull(supersets: list) -> Tuple[int, int]:
|
||||
"""Count push and pull exercises across working supersets.
|
||||
|
||||
Exercises with BOTH push AND pull patterns are counted as neutral
|
||||
(neither push nor pull) to avoid double-counting.
|
||||
|
||||
Returns (push_count, pull_count).
|
||||
"""
|
||||
push_count = 0
|
||||
pull_count = 0
|
||||
for ss in _get_working_supersets(supersets):
|
||||
for entry in ss.get('exercises', []):
|
||||
ex = entry.get('exercise')
|
||||
if ex is None:
|
||||
continue
|
||||
patterns = getattr(ex, 'movement_patterns', '') or ''
|
||||
patterns_lower = patterns.lower()
|
||||
is_push = 'push' in patterns_lower
|
||||
is_pull = 'pull' in patterns_lower
|
||||
if is_push and is_pull:
|
||||
# Dual pattern — count as neutral to avoid double-counting
|
||||
pass
|
||||
elif is_push:
|
||||
push_count += 1
|
||||
elif is_pull:
|
||||
pull_count += 1
|
||||
return push_count, pull_count
|
||||
|
||||
|
||||
def _check_compound_before_isolation(supersets: list) -> bool:
|
||||
"""Check that compound exercises appear before isolation in working supersets.
|
||||
|
||||
Returns True if ordering is correct (or no mix), False if isolation
|
||||
appears before compound.
|
||||
"""
|
||||
working = _get_working_supersets(supersets)
|
||||
seen_isolation = False
|
||||
compound_after_isolation = False
|
||||
for ss in working:
|
||||
for entry in ss.get('exercises', []):
|
||||
ex = entry.get('exercise')
|
||||
if ex is None:
|
||||
continue
|
||||
is_compound = getattr(ex, 'is_compound', False)
|
||||
tier = getattr(ex, 'exercise_tier', None)
|
||||
if tier == 'accessory' or (not is_compound and tier != 'primary'):
|
||||
seen_isolation = True
|
||||
elif is_compound and tier in ('primary', 'secondary'):
|
||||
if seen_isolation:
|
||||
compound_after_isolation = True
|
||||
return not compound_after_isolation
|
||||
|
||||
|
||||
def _focus_key_for_entry(entry: dict) -> Optional[str]:
|
||||
"""Derive a coarse focus key from an entry's exercise."""
|
||||
ex = entry.get('exercise')
|
||||
if ex is None:
|
||||
return None
|
||||
families = sorted(extract_movement_families(getattr(ex, 'name', '') or ''))
|
||||
if families:
|
||||
return families[0]
|
||||
patterns = (getattr(ex, 'movement_patterns', '') or '').lower()
|
||||
for token in ('upper pull', 'upper push', 'hip hinge', 'squat', 'lunge', 'core', 'carry'):
|
||||
if token in patterns:
|
||||
return token
|
||||
return None
|
||||
|
||||
|
||||
def _is_recovery_entry(entry: dict) -> bool:
|
||||
"""Return True when an entry is a recovery/stretch movement."""
|
||||
ex = entry.get('exercise')
|
||||
if ex is None:
|
||||
return False
|
||||
|
||||
name = (getattr(ex, 'name', '') or '').lower()
|
||||
# Use word boundary check to avoid over-matching (e.g. "Stretch Band Row"
|
||||
# should not be flagged as recovery).
|
||||
if re.search(r'\bstretch(ing|es|ed)?\b', name):
|
||||
return True
|
||||
|
||||
patterns = (getattr(ex, 'movement_patterns', '') or '').lower()
|
||||
recovery_tokens = (
|
||||
'mobility - static',
|
||||
'static stretch',
|
||||
'cool down',
|
||||
'cooldown',
|
||||
'yoga',
|
||||
'breathing',
|
||||
'massage',
|
||||
)
|
||||
return any(token in patterns for token in recovery_tokens)
|
||||
|
||||
|
||||
# ======================================================================
|
||||
# Main validation function
|
||||
# ======================================================================
|
||||
|
||||
def validate_workout(
|
||||
workout_spec: dict,
|
||||
workout_type_name: str,
|
||||
goal: str = 'general_fitness',
|
||||
) -> List[RuleViolation]:
|
||||
"""Validate a workout spec against all applicable rules.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
workout_spec : dict
|
||||
Must contain 'supersets' key with list of superset dicts.
|
||||
Each superset dict has 'name', 'exercises' (list of entry dicts
|
||||
with 'exercise', 'reps'/'duration', 'order'), 'rounds'.
|
||||
workout_type_name : str
|
||||
e.g. 'Traditional Strength Training' or 'hiit'
|
||||
goal : str
|
||||
User's primary goal.
|
||||
|
||||
Returns
|
||||
-------
|
||||
List[RuleViolation]
|
||||
"""
|
||||
violations: List[RuleViolation] = []
|
||||
supersets = workout_spec.get('supersets', [])
|
||||
if not supersets:
|
||||
violations.append(RuleViolation(
|
||||
rule_id='empty_workout',
|
||||
severity='error',
|
||||
message='Workout has no supersets.',
|
||||
))
|
||||
return violations
|
||||
|
||||
wt_key = _normalize_type_key(workout_type_name)
|
||||
wt_rules = WORKOUT_TYPE_RULES.get(wt_key, {})
|
||||
|
||||
working = _get_working_supersets(supersets)
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# 1. Rep range checks per exercise tier
|
||||
# ------------------------------------------------------------------
|
||||
rep_ranges = wt_rules.get('rep_ranges', {})
|
||||
if rep_ranges:
|
||||
for ss in working:
|
||||
for entry in ss.get('exercises', []):
|
||||
ex = entry.get('exercise')
|
||||
reps = entry.get('reps')
|
||||
if ex is None or reps is None:
|
||||
continue
|
||||
# Only check rep-based exercises
|
||||
is_reps = getattr(ex, 'is_reps', True)
|
||||
if not is_reps:
|
||||
continue
|
||||
tier = getattr(ex, 'exercise_tier', 'accessory') or 'accessory'
|
||||
expected = rep_ranges.get(tier)
|
||||
if expected is None:
|
||||
continue
|
||||
low, high = expected
|
||||
# Allow a small tolerance for fitness scaling
|
||||
tolerance = 2
|
||||
if reps < low - tolerance or reps > high + tolerance:
|
||||
violations.append(RuleViolation(
|
||||
rule_id=f'rep_range_{tier}',
|
||||
severity='error',
|
||||
message=(
|
||||
f'{tier.title()} exercise has {reps} reps, '
|
||||
f'expected {low}-{high} for {workout_type_name}.'
|
||||
),
|
||||
actual_value=reps,
|
||||
expected_range=(low, high),
|
||||
))
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# 2. Duration bias check
|
||||
# ------------------------------------------------------------------
|
||||
duration_bias_range = wt_rules.get('duration_bias_range')
|
||||
if duration_bias_range and working:
|
||||
total_exercises = 0
|
||||
duration_exercises = 0
|
||||
for ss in working:
|
||||
for entry in ss.get('exercises', []):
|
||||
total_exercises += 1
|
||||
if entry.get('duration') and not entry.get('reps'):
|
||||
duration_exercises += 1
|
||||
if total_exercises > 0:
|
||||
actual_bias = duration_exercises / total_exercises
|
||||
low, high = duration_bias_range
|
||||
# Allow generous tolerance for bias (it's a guideline)
|
||||
if actual_bias > high + 0.3:
|
||||
violations.append(RuleViolation(
|
||||
rule_id='duration_bias_high',
|
||||
severity='warning',
|
||||
message=(
|
||||
f'Duration bias {actual_bias:.1%} exceeds expected '
|
||||
f'range {low:.0%}-{high:.0%} for {workout_type_name}.'
|
||||
),
|
||||
actual_value=actual_bias,
|
||||
expected_range=duration_bias_range,
|
||||
))
|
||||
elif actual_bias < low - 0.3 and low > 0:
|
||||
violations.append(RuleViolation(
|
||||
rule_id='duration_bias_low',
|
||||
severity='warning',
|
||||
message=(
|
||||
f'Duration bias {actual_bias:.1%} below expected '
|
||||
f'range {low:.0%}-{high:.0%} for {workout_type_name}.'
|
||||
),
|
||||
actual_value=actual_bias,
|
||||
expected_range=duration_bias_range,
|
||||
))
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# 3. Superset size check
|
||||
# ------------------------------------------------------------------
|
||||
ss_range = wt_rules.get('superset_size_range')
|
||||
if ss_range and working:
|
||||
low, high = ss_range
|
||||
for ss in working:
|
||||
ex_count = len(ss.get('exercises', []))
|
||||
# Allow 1 extra for sided pairs
|
||||
if ex_count > high + 1:
|
||||
violations.append(RuleViolation(
|
||||
rule_id='superset_size',
|
||||
severity='warning',
|
||||
message=(
|
||||
f"Superset '{ss.get('name')}' has {ex_count} exercises, "
|
||||
f"expected {low}-{high} for {workout_type_name}."
|
||||
),
|
||||
actual_value=ex_count,
|
||||
expected_range=ss_range,
|
||||
))
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# 4. Push:Pull ratio (universal rule)
|
||||
# ------------------------------------------------------------------
|
||||
push_count, pull_count = _count_push_pull(supersets)
|
||||
if push_count > 0 and pull_count > 0:
|
||||
ratio = pull_count / push_count
|
||||
min_ratio = UNIVERSAL_RULES['push_pull_ratio_min']
|
||||
if ratio < min_ratio - 0.2: # Allow slight slack
|
||||
violations.append(RuleViolation(
|
||||
rule_id='push_pull_ratio',
|
||||
severity='warning',
|
||||
message=(
|
||||
f'Pull:push ratio {ratio:.2f} below minimum {min_ratio}. '
|
||||
f'({pull_count} pull, {push_count} push exercises)'
|
||||
),
|
||||
actual_value=ratio,
|
||||
expected_range=(min_ratio, None),
|
||||
))
|
||||
elif pull_count == 0 and push_count > 0:
|
||||
violations.append(RuleViolation(
|
||||
rule_id='push_pull_ratio',
|
||||
severity='warning',
|
||||
message=(
|
||||
f'Workout has {push_count} push exercises and 0 pull exercises. '
|
||||
f'Consider adding pull movements for balance.'
|
||||
),
|
||||
actual_value=0,
|
||||
expected_range=(UNIVERSAL_RULES['push_pull_ratio_min'], None),
|
||||
))
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# 5. Working-set guardrails (no recovery movements, non-zero rest)
|
||||
# ------------------------------------------------------------------
|
||||
for ss in working:
|
||||
ss_name = ss.get('name') or 'Working set'
|
||||
rest_between_rounds = ss.get('rest_between_rounds')
|
||||
if rest_between_rounds is None or rest_between_rounds <= 0:
|
||||
violations.append(RuleViolation(
|
||||
rule_id='working_rest_missing',
|
||||
severity='warning',
|
||||
message=(
|
||||
f"{ss_name} is missing rest_between_rounds "
|
||||
"(expected a positive value)."
|
||||
),
|
||||
actual_value=rest_between_rounds,
|
||||
expected_range=(15, None),
|
||||
))
|
||||
|
||||
recovery_names = []
|
||||
for entry in ss.get('exercises', []):
|
||||
if _is_recovery_entry(entry):
|
||||
ex = entry.get('exercise')
|
||||
recovery_names.append(getattr(ex, 'name', 'Unknown Exercise'))
|
||||
if recovery_names:
|
||||
violations.append(RuleViolation(
|
||||
rule_id='working_contains_recovery',
|
||||
severity='error',
|
||||
message=(
|
||||
f"{ss_name} contains recovery/stretch movement(s): "
|
||||
f"{', '.join(sorted(set(recovery_names)))}."
|
||||
),
|
||||
actual_value=sorted(set(recovery_names)),
|
||||
))
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# 6. Focus spread across working supersets
|
||||
# ------------------------------------------------------------------
|
||||
if working:
|
||||
for ss in working:
|
||||
seen_focus = set()
|
||||
duplicate_focus = set()
|
||||
for entry in ss.get('exercises', []):
|
||||
focus_key = _focus_key_for_entry(entry)
|
||||
if not focus_key:
|
||||
continue
|
||||
if focus_key in seen_focus:
|
||||
duplicate_focus.add(focus_key)
|
||||
seen_focus.add(focus_key)
|
||||
if duplicate_focus:
|
||||
violations.append(RuleViolation(
|
||||
rule_id='superset_focus_repetition',
|
||||
severity='error',
|
||||
message=(
|
||||
f"Superset '{ss.get('name')}' repeats focus area(s): "
|
||||
f"{', '.join(sorted(duplicate_focus))}."
|
||||
),
|
||||
actual_value=sorted(duplicate_focus),
|
||||
))
|
||||
|
||||
# Advisory: same dominant focus in adjacent working supersets.
|
||||
previous_focus = None
|
||||
for ss in working:
|
||||
focus_keys = {
|
||||
_focus_key_for_entry(entry)
|
||||
for entry in ss.get('exercises', [])
|
||||
}
|
||||
focus_keys.discard(None)
|
||||
if previous_focus is not None and focus_keys and focus_keys == previous_focus:
|
||||
violations.append(RuleViolation(
|
||||
rule_id='adjacent_superset_focus_repetition',
|
||||
severity='info',
|
||||
message=(
|
||||
f"Adjacent supersets reuse the same focus profile "
|
||||
f"({', '.join(sorted(focus_keys))}); spread focus when possible."
|
||||
),
|
||||
actual_value=sorted(focus_keys),
|
||||
))
|
||||
if focus_keys:
|
||||
previous_focus = focus_keys
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# 7. Compound before isolation ordering
|
||||
# ------------------------------------------------------------------
|
||||
if UNIVERSAL_RULES['compound_before_isolation']:
|
||||
if not _check_compound_before_isolation(supersets):
|
||||
violations.append(RuleViolation(
|
||||
rule_id='compound_before_isolation',
|
||||
severity='info',
|
||||
message='Compound exercises should generally appear before isolation.',
|
||||
))
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# 8. Warmup check
|
||||
# ------------------------------------------------------------------
|
||||
if UNIVERSAL_RULES['warmup_mandatory']:
|
||||
if not _has_warmup(supersets):
|
||||
violations.append(RuleViolation(
|
||||
rule_id='warmup_missing',
|
||||
severity='error',
|
||||
message='Workout is missing a warm-up section.',
|
||||
))
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# 9. Cooldown check
|
||||
# ------------------------------------------------------------------
|
||||
if not _has_cooldown(supersets):
|
||||
violations.append(RuleViolation(
|
||||
rule_id='cooldown_missing',
|
||||
severity='warning',
|
||||
message='Workout is missing a cool-down section.',
|
||||
))
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# 10. HIIT duration cap
|
||||
# ------------------------------------------------------------------
|
||||
if wt_key == 'high_intensity_interval_training':
|
||||
max_hiit_min = UNIVERSAL_RULES.get('max_hiit_duration_min', 30)
|
||||
# Estimate total working time from working supersets
|
||||
total_working_exercises = sum(
|
||||
len(ss.get('exercises', []))
|
||||
for ss in working
|
||||
)
|
||||
total_working_rounds = sum(
|
||||
ss.get('rounds', 1)
|
||||
for ss in working
|
||||
)
|
||||
# Rough estimate: each exercise ~30-45s of work per round
|
||||
est_working_min = (total_working_exercises * total_working_rounds * 37.5) / 60
|
||||
if est_working_min > max_hiit_min * 1.5:
|
||||
violations.append(RuleViolation(
|
||||
rule_id='hiit_duration_cap',
|
||||
severity='warning',
|
||||
message=(
|
||||
f'HIIT workout estimated at ~{est_working_min:.0f} min working time, '
|
||||
f'exceeding recommended {max_hiit_min} min cap.'
|
||||
),
|
||||
actual_value=est_working_min,
|
||||
expected_range=(0, max_hiit_min),
|
||||
))
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# 11. Total exercise count cap
|
||||
# ------------------------------------------------------------------
|
||||
max_exercises = wt_rules.get(
|
||||
'max_exercises_per_session',
|
||||
UNIVERSAL_RULES.get('max_exercises_per_workout', 30),
|
||||
)
|
||||
total_working_ex = sum(
|
||||
len(ss.get('exercises', []))
|
||||
for ss in working
|
||||
)
|
||||
if total_working_ex > max_exercises + 4:
|
||||
violations.append(RuleViolation(
|
||||
rule_id='exercise_count_cap',
|
||||
severity='warning',
|
||||
message=(
|
||||
f'Workout has {total_working_ex} working exercises, '
|
||||
f'recommended max is {max_exercises} for {workout_type_name}.'
|
||||
),
|
||||
actual_value=total_working_ex,
|
||||
expected_range=(0, max_exercises),
|
||||
))
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# 12. Workout type match percentage (refactored from _validate_workout_type_match)
|
||||
# ------------------------------------------------------------------
|
||||
_STRENGTH_TYPES = {
|
||||
'traditional_strength_training', 'functional_strength_training',
|
||||
'hypertrophy',
|
||||
}
|
||||
_HIIT_TYPES = {'high_intensity_interval_training'}
|
||||
_CARDIO_TYPES = {'cardio'}
|
||||
_CORE_TYPES = {'core_training'}
|
||||
_FLEXIBILITY_TYPES = {'flexibility'}
|
||||
|
||||
is_strength = wt_key in _STRENGTH_TYPES
|
||||
is_hiit = wt_key in _HIIT_TYPES
|
||||
is_cardio = wt_key in _CARDIO_TYPES
|
||||
is_core = wt_key in _CORE_TYPES
|
||||
is_flexibility = wt_key in _FLEXIBILITY_TYPES
|
||||
|
||||
if working:
|
||||
total_ex = 0
|
||||
matching_ex = 0
|
||||
for ss in working:
|
||||
for entry in ss.get('exercises', []):
|
||||
total_ex += 1
|
||||
ex = entry.get('exercise')
|
||||
if ex is None:
|
||||
continue
|
||||
if is_strength:
|
||||
if getattr(ex, 'is_weight', False) or getattr(ex, 'is_compound', False):
|
||||
matching_ex += 1
|
||||
elif is_hiit:
|
||||
# HIIT: favor high HR, compound, or duration-capable exercises
|
||||
hr = getattr(ex, 'hr_elevation_rating', None) or 0
|
||||
if hr >= 5 or getattr(ex, 'is_compound', False) or getattr(ex, 'is_duration', False):
|
||||
matching_ex += 1
|
||||
elif is_cardio:
|
||||
# Cardio: favor duration-capable or high-HR exercises
|
||||
hr = getattr(ex, 'hr_elevation_rating', None) or 0
|
||||
if getattr(ex, 'is_duration', False) or hr >= 5:
|
||||
matching_ex += 1
|
||||
elif is_core:
|
||||
# Core: check if exercise targets core muscles
|
||||
muscles = (getattr(ex, 'muscle_groups', '') or '').lower()
|
||||
patterns = (getattr(ex, 'movement_patterns', '') or '').lower()
|
||||
if any(tok in muscles for tok in ('core', 'abs', 'oblique')):
|
||||
matching_ex += 1
|
||||
elif 'core' in patterns or 'anti' in patterns:
|
||||
matching_ex += 1
|
||||
elif is_flexibility:
|
||||
# Flexibility: favor duration-based, stretch/mobility exercises
|
||||
patterns = (getattr(ex, 'movement_patterns', '') or '').lower()
|
||||
if getattr(ex, 'is_duration', False) or any(
|
||||
tok in patterns for tok in ('stretch', 'mobility', 'yoga', 'flexibility')
|
||||
):
|
||||
matching_ex += 1
|
||||
else:
|
||||
# Unknown type — count all as matching (no false negatives)
|
||||
matching_ex += 1
|
||||
if total_ex > 0:
|
||||
match_pct = matching_ex / total_ex
|
||||
threshold = 0.6
|
||||
if match_pct < threshold:
|
||||
violations.append(RuleViolation(
|
||||
rule_id='workout_type_match',
|
||||
severity='error',
|
||||
message=(
|
||||
f'Only {match_pct:.0%} of exercises match '
|
||||
f'{workout_type_name} character (threshold: {threshold:.0%}).'
|
||||
),
|
||||
actual_value=match_pct,
|
||||
expected_range=(threshold, 1.0),
|
||||
))
|
||||
|
||||
return violations
|
||||
391
generator/serializers.py
Normal file
391
generator/serializers.py
Normal file
@@ -0,0 +1,391 @@
|
||||
from rest_framework import serializers
|
||||
from .models import (
|
||||
WorkoutType,
|
||||
UserPreference,
|
||||
GeneratedWeeklyPlan,
|
||||
GeneratedWorkout,
|
||||
MuscleGroupSplit,
|
||||
)
|
||||
from muscle.models import Muscle
|
||||
from equipment.models import Equipment
|
||||
from exercise.models import Exercise
|
||||
from workout.serializers import WorkoutDetailSerializer
|
||||
from superset.serializers import SupersetSerializer
|
||||
from superset.models import Superset
|
||||
|
||||
|
||||
# ============================================================
|
||||
# Reference Serializers (for preference UI dropdowns)
|
||||
# ============================================================
|
||||
|
||||
class MuscleSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = Muscle
|
||||
fields = ('id', 'name')
|
||||
|
||||
|
||||
class EquipmentSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = Equipment
|
||||
fields = ('id', 'name', 'category')
|
||||
|
||||
|
||||
# ============================================================
|
||||
# WorkoutType Serializer
|
||||
# ============================================================
|
||||
|
||||
class WorkoutTypeSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = WorkoutType
|
||||
fields = '__all__'
|
||||
|
||||
|
||||
# ============================================================
|
||||
# UserPreference Serializers
|
||||
# ============================================================
|
||||
|
||||
class UserPreferenceSerializer(serializers.ModelSerializer):
|
||||
"""Read serializer -- returns nested names for M2M fields."""
|
||||
available_equipment = EquipmentSerializer(many=True, read_only=True)
|
||||
target_muscle_groups = MuscleSerializer(many=True, read_only=True)
|
||||
preferred_workout_types = WorkoutTypeSerializer(many=True, read_only=True)
|
||||
excluded_exercises = serializers.SerializerMethodField()
|
||||
|
||||
class Meta:
|
||||
model = UserPreference
|
||||
fields = (
|
||||
'id',
|
||||
'registered_user',
|
||||
'available_equipment',
|
||||
'target_muscle_groups',
|
||||
'preferred_workout_types',
|
||||
'fitness_level',
|
||||
'primary_goal',
|
||||
'secondary_goal',
|
||||
'days_per_week',
|
||||
'preferred_workout_duration',
|
||||
'preferred_days',
|
||||
'injuries_limitations',
|
||||
'injury_types',
|
||||
'excluded_exercises',
|
||||
)
|
||||
read_only_fields = ('id', 'registered_user')
|
||||
|
||||
def get_excluded_exercises(self, obj):
|
||||
return list(
|
||||
obj.excluded_exercises.values_list('id', flat=True)
|
||||
)
|
||||
|
||||
|
||||
class UserPreferenceUpdateSerializer(serializers.ModelSerializer):
|
||||
"""Write serializer -- accepts IDs for M2M fields."""
|
||||
equipment_ids = serializers.PrimaryKeyRelatedField(
|
||||
queryset=Equipment.objects.all(),
|
||||
many=True,
|
||||
required=False,
|
||||
source='available_equipment',
|
||||
)
|
||||
muscle_ids = serializers.PrimaryKeyRelatedField(
|
||||
queryset=Muscle.objects.all(),
|
||||
many=True,
|
||||
required=False,
|
||||
source='target_muscle_groups',
|
||||
)
|
||||
workout_type_ids = serializers.PrimaryKeyRelatedField(
|
||||
queryset=WorkoutType.objects.all(),
|
||||
many=True,
|
||||
required=False,
|
||||
source='preferred_workout_types',
|
||||
)
|
||||
excluded_exercise_ids = serializers.PrimaryKeyRelatedField(
|
||||
queryset=Exercise.objects.all(),
|
||||
many=True,
|
||||
required=False,
|
||||
source='excluded_exercises',
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = UserPreference
|
||||
fields = (
|
||||
'equipment_ids',
|
||||
'muscle_ids',
|
||||
'workout_type_ids',
|
||||
'fitness_level',
|
||||
'primary_goal',
|
||||
'secondary_goal',
|
||||
'days_per_week',
|
||||
'preferred_workout_duration',
|
||||
'preferred_days',
|
||||
'injuries_limitations',
|
||||
'injury_types',
|
||||
'excluded_exercise_ids',
|
||||
)
|
||||
|
||||
VALID_INJURY_TYPES = {
|
||||
'knee', 'lower_back', 'upper_back', 'shoulder',
|
||||
'hip', 'wrist', 'ankle', 'neck',
|
||||
}
|
||||
VALID_SEVERITY_LEVELS = {'mild', 'moderate', 'severe'}
|
||||
|
||||
def validate_injury_types(self, value):
|
||||
if not isinstance(value, list):
|
||||
raise serializers.ValidationError('injury_types must be a list.')
|
||||
|
||||
normalized = []
|
||||
seen = set()
|
||||
for item in value:
|
||||
# Backward compat: plain string -> {"type": str, "severity": "moderate"}
|
||||
if isinstance(item, str):
|
||||
injury_type = item
|
||||
severity = 'moderate'
|
||||
elif isinstance(item, dict):
|
||||
injury_type = item.get('type', '')
|
||||
severity = item.get('severity', 'moderate')
|
||||
else:
|
||||
raise serializers.ValidationError(
|
||||
'Each injury must be a string or {"type": str, "severity": str}.'
|
||||
)
|
||||
|
||||
if injury_type not in self.VALID_INJURY_TYPES:
|
||||
raise serializers.ValidationError(
|
||||
f'Invalid injury type: {injury_type}. '
|
||||
f'Valid types: {sorted(self.VALID_INJURY_TYPES)}'
|
||||
)
|
||||
if severity not in self.VALID_SEVERITY_LEVELS:
|
||||
raise serializers.ValidationError(
|
||||
f'Invalid severity: {severity}. '
|
||||
f'Valid levels: {sorted(self.VALID_SEVERITY_LEVELS)}'
|
||||
)
|
||||
|
||||
if injury_type not in seen:
|
||||
normalized.append({'type': injury_type, 'severity': severity})
|
||||
seen.add(injury_type)
|
||||
|
||||
return normalized
|
||||
|
||||
def validate(self, attrs):
|
||||
instance = self.instance
|
||||
days_per_week = attrs.get('days_per_week', getattr(instance, 'days_per_week', 4))
|
||||
preferred_days = attrs.get('preferred_days', getattr(instance, 'preferred_days', []))
|
||||
primary_goal = attrs.get('primary_goal', getattr(instance, 'primary_goal', ''))
|
||||
secondary_goal = attrs.get('secondary_goal', getattr(instance, 'secondary_goal', ''))
|
||||
fitness_level = attrs.get('fitness_level', getattr(instance, 'fitness_level', 2))
|
||||
duration = attrs.get(
|
||||
'preferred_workout_duration',
|
||||
getattr(instance, 'preferred_workout_duration', 45),
|
||||
)
|
||||
|
||||
warnings = []
|
||||
|
||||
if preferred_days and len(preferred_days) < days_per_week:
|
||||
warnings.append(
|
||||
f'You selected {days_per_week} days/week but only '
|
||||
f'{len(preferred_days)} preferred days. Some days will be auto-assigned.'
|
||||
)
|
||||
|
||||
if primary_goal and secondary_goal and primary_goal == secondary_goal:
|
||||
raise serializers.ValidationError({
|
||||
'secondary_goal': 'Secondary goal must differ from primary goal.',
|
||||
})
|
||||
|
||||
if primary_goal == 'strength' and duration < 30:
|
||||
warnings.append(
|
||||
'Strength workouts under 30 minutes may not have enough volume for progress.'
|
||||
)
|
||||
|
||||
if days_per_week > 6:
|
||||
warnings.append(
|
||||
'Training 7 days/week with no rest days increases injury risk.'
|
||||
)
|
||||
|
||||
# Beginner overtraining risk
|
||||
if days_per_week >= 6 and fitness_level <= 1:
|
||||
warnings.append(
|
||||
'Training 6+ days/week as a beginner significantly increases injury risk. '
|
||||
'Consider starting with 3-4 days/week.'
|
||||
)
|
||||
|
||||
# Duration too long for fitness level
|
||||
if duration > 90 and fitness_level <= 2:
|
||||
warnings.append(
|
||||
'Workouts over 90 minutes may be too long for your fitness level. '
|
||||
'Consider 45-60 minutes for best results.'
|
||||
)
|
||||
|
||||
# Strength goal without equipment
|
||||
equipment = attrs.get('available_equipment', None)
|
||||
if equipment is not None and len(equipment) == 0 and primary_goal == 'strength':
|
||||
warnings.append(
|
||||
'Strength training without equipment limits heavy loading. '
|
||||
'Consider adding equipment for better strength gains.'
|
||||
)
|
||||
|
||||
# Hypertrophy with short duration
|
||||
if primary_goal == 'hypertrophy' and duration < 30:
|
||||
warnings.append(
|
||||
'Hypertrophy workouts under 30 minutes may not provide enough volume '
|
||||
'for muscle growth. Consider at least 45 minutes.'
|
||||
)
|
||||
|
||||
attrs['_validation_warnings'] = warnings
|
||||
return attrs
|
||||
|
||||
def update(self, instance, validated_data):
|
||||
# Pop internal metadata
|
||||
validated_data.pop('_validation_warnings', [])
|
||||
|
||||
# Pop M2M fields so we can set them separately
|
||||
equipment = validated_data.pop('available_equipment', None)
|
||||
muscles = validated_data.pop('target_muscle_groups', None)
|
||||
workout_types = validated_data.pop('preferred_workout_types', None)
|
||||
excluded = validated_data.pop('excluded_exercises', None)
|
||||
|
||||
# Update scalar fields
|
||||
for attr, value in validated_data.items():
|
||||
setattr(instance, attr, value)
|
||||
instance.save()
|
||||
|
||||
# Update M2M fields only when they are explicitly provided
|
||||
if equipment is not None:
|
||||
instance.available_equipment.set(equipment)
|
||||
if muscles is not None:
|
||||
instance.target_muscle_groups.set(muscles)
|
||||
if workout_types is not None:
|
||||
instance.preferred_workout_types.set(workout_types)
|
||||
if excluded is not None:
|
||||
instance.excluded_exercises.set(excluded)
|
||||
|
||||
return instance
|
||||
|
||||
|
||||
# ============================================================
|
||||
# GeneratedWorkout Serializers
|
||||
# ============================================================
|
||||
|
||||
class GeneratedWorkoutSerializer(serializers.ModelSerializer):
|
||||
"""List-level serializer -- includes workout_type name and basic workout info."""
|
||||
workout_type_name = serializers.SerializerMethodField()
|
||||
workout_name = serializers.SerializerMethodField()
|
||||
|
||||
class Meta:
|
||||
model = GeneratedWorkout
|
||||
fields = (
|
||||
'id',
|
||||
'plan',
|
||||
'workout',
|
||||
'workout_type',
|
||||
'workout_type_name',
|
||||
'workout_name',
|
||||
'scheduled_date',
|
||||
'day_of_week',
|
||||
'is_rest_day',
|
||||
'status',
|
||||
'focus_area',
|
||||
'target_muscles',
|
||||
'user_rating',
|
||||
'user_feedback',
|
||||
)
|
||||
|
||||
def get_workout_type_name(self, obj):
|
||||
if obj.workout_type:
|
||||
return obj.workout_type.display_name or obj.workout_type.name
|
||||
return None
|
||||
|
||||
def get_workout_name(self, obj):
|
||||
if obj.workout:
|
||||
return obj.workout.name
|
||||
return None
|
||||
|
||||
|
||||
class GeneratedWorkoutDetailSerializer(serializers.ModelSerializer):
|
||||
"""Full detail serializer -- includes superset breakdown via existing SupersetSerializer."""
|
||||
workout_type_name = serializers.SerializerMethodField()
|
||||
workout_detail = serializers.SerializerMethodField()
|
||||
supersets = serializers.SerializerMethodField()
|
||||
|
||||
class Meta:
|
||||
model = GeneratedWorkout
|
||||
fields = (
|
||||
'id',
|
||||
'plan',
|
||||
'workout',
|
||||
'workout_type',
|
||||
'workout_type_name',
|
||||
'scheduled_date',
|
||||
'day_of_week',
|
||||
'is_rest_day',
|
||||
'status',
|
||||
'focus_area',
|
||||
'target_muscles',
|
||||
'user_rating',
|
||||
'user_feedback',
|
||||
'workout_detail',
|
||||
'supersets',
|
||||
)
|
||||
|
||||
def get_workout_type_name(self, obj):
|
||||
if obj.workout_type:
|
||||
return obj.workout_type.display_name or obj.workout_type.name
|
||||
return None
|
||||
|
||||
def get_workout_detail(self, obj):
|
||||
if obj.workout:
|
||||
return {
|
||||
'id': obj.workout.id,
|
||||
'name': obj.workout.name,
|
||||
'description': obj.workout.description,
|
||||
'estimated_time': obj.workout.estimated_time,
|
||||
}
|
||||
return None
|
||||
|
||||
def get_supersets(self, obj):
|
||||
if not obj.workout:
|
||||
return []
|
||||
# Use prefetched data if available (via workout__superset_workout prefetch),
|
||||
# otherwise fall back to a query with its own prefetch
|
||||
workout = obj.workout
|
||||
if hasattr(workout, '_prefetched_objects_cache') and 'superset_workout' in workout._prefetched_objects_cache:
|
||||
superset_qs = sorted(workout.superset_workout.all(), key=lambda s: s.order)
|
||||
else:
|
||||
superset_qs = Superset.objects.filter(
|
||||
workout=workout
|
||||
).prefetch_related(
|
||||
'superset_exercises__exercise',
|
||||
).order_by('order')
|
||||
return SupersetSerializer(superset_qs, many=True).data
|
||||
|
||||
|
||||
# ============================================================
|
||||
# GeneratedWeeklyPlan Serializer
|
||||
# ============================================================
|
||||
|
||||
class GeneratedWeeklyPlanSerializer(serializers.ModelSerializer):
|
||||
generated_workouts = GeneratedWorkoutSerializer(many=True, read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = GeneratedWeeklyPlan
|
||||
fields = (
|
||||
'id',
|
||||
'created_at',
|
||||
'registered_user',
|
||||
'week_start_date',
|
||||
'week_end_date',
|
||||
'status',
|
||||
'preferences_snapshot',
|
||||
'generation_time_ms',
|
||||
'week_number',
|
||||
'is_deload',
|
||||
'cycle_id',
|
||||
'generated_workouts',
|
||||
)
|
||||
read_only_fields = ('id', 'created_at', 'registered_user')
|
||||
|
||||
|
||||
# ============================================================
|
||||
# MuscleGroupSplit Serializer
|
||||
# ============================================================
|
||||
|
||||
class MuscleGroupSplitSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = MuscleGroupSplit
|
||||
fields = '__all__'
|
||||
0
generator/services/__init__.py
Normal file
0
generator/services/__init__.py
Normal file
1678
generator/services/exercise_selector.py
Normal file
1678
generator/services/exercise_selector.py
Normal file
File diff suppressed because it is too large
Load Diff
366
generator/services/muscle_normalizer.py
Normal file
366
generator/services/muscle_normalizer.py
Normal file
@@ -0,0 +1,366 @@
|
||||
"""
|
||||
Muscle name normalization and split classification.
|
||||
|
||||
The DB contains ~38 muscle entries with casing duplicates (e.g. "Quads" vs "quads",
|
||||
"Abs" vs "abs", "Core" vs "core"). This module provides a single source of truth
|
||||
for mapping raw muscle names to canonical lowercase names, organizing them into
|
||||
split categories, and classifying a set of muscles into a split type.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Set, List, Optional
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Raw name -> canonical name
|
||||
# Keys are lowercased for lookup; values are the canonical form we store.
|
||||
# ---------------------------------------------------------------------------
|
||||
MUSCLE_NORMALIZATION_MAP: dict[str, str] = {
|
||||
# --- quads ---
|
||||
'quads': 'quads',
|
||||
'quadriceps': 'quads',
|
||||
'quad': 'quads',
|
||||
|
||||
# --- hamstrings ---
|
||||
'hamstrings': 'hamstrings',
|
||||
'hamstring': 'hamstrings',
|
||||
'hams': 'hamstrings',
|
||||
|
||||
# --- glutes ---
|
||||
'glutes': 'glutes',
|
||||
'glute': 'glutes',
|
||||
'gluteus': 'glutes',
|
||||
'gluteus maximus': 'glutes',
|
||||
|
||||
# --- calves ---
|
||||
'calves': 'calves',
|
||||
'calf': 'calves',
|
||||
'gastrocnemius': 'calves',
|
||||
'soleus': 'calves',
|
||||
|
||||
# --- chest ---
|
||||
'chest': 'chest',
|
||||
'pecs': 'chest',
|
||||
'pectorals': 'chest',
|
||||
|
||||
# --- deltoids / shoulders ---
|
||||
'deltoids': 'deltoids',
|
||||
'deltoid': 'deltoids',
|
||||
'shoulders': 'deltoids',
|
||||
'shoulder': 'deltoids',
|
||||
'front deltoids': 'front deltoids',
|
||||
'front deltoid': 'front deltoids',
|
||||
'front delts': 'front deltoids',
|
||||
'rear deltoids': 'rear deltoids',
|
||||
'rear deltoid': 'rear deltoids',
|
||||
'rear delts': 'rear deltoids',
|
||||
'side deltoids': 'side deltoids',
|
||||
'side deltoid': 'side deltoids',
|
||||
'side delts': 'side deltoids',
|
||||
'lateral deltoids': 'side deltoids',
|
||||
'medial deltoids': 'side deltoids',
|
||||
|
||||
# --- triceps ---
|
||||
'triceps': 'triceps',
|
||||
'tricep': 'triceps',
|
||||
|
||||
# --- biceps ---
|
||||
'biceps': 'biceps',
|
||||
'bicep': 'biceps',
|
||||
|
||||
# --- upper back ---
|
||||
'upper back': 'upper back',
|
||||
'rhomboids': 'upper back',
|
||||
|
||||
# --- lats ---
|
||||
'lats': 'lats',
|
||||
'latissimus dorsi': 'lats',
|
||||
'lat': 'lats',
|
||||
|
||||
# --- middle back ---
|
||||
'middle back': 'middle back',
|
||||
'mid back': 'middle back',
|
||||
|
||||
# --- lower back ---
|
||||
'lower back': 'lower back',
|
||||
'erector spinae': 'lower back',
|
||||
'spinal erectors': 'lower back',
|
||||
|
||||
# --- traps ---
|
||||
'traps': 'traps',
|
||||
'trapezius': 'traps',
|
||||
|
||||
# --- abs ---
|
||||
'abs': 'abs',
|
||||
'abdominals': 'abs',
|
||||
'rectus abdominis': 'abs',
|
||||
|
||||
# --- obliques ---
|
||||
'obliques': 'obliques',
|
||||
'oblique': 'obliques',
|
||||
'external obliques': 'obliques',
|
||||
'internal obliques': 'obliques',
|
||||
|
||||
# --- core (general) ---
|
||||
'core': 'core',
|
||||
|
||||
# --- intercostals ---
|
||||
'intercostals': 'intercostals',
|
||||
|
||||
# --- hip flexor ---
|
||||
'hip flexor': 'hip flexors',
|
||||
'hip flexors': 'hip flexors',
|
||||
'iliopsoas': 'hip flexors',
|
||||
'psoas': 'hip flexors',
|
||||
|
||||
# --- hip abductors ---
|
||||
'hip abductors': 'hip abductors',
|
||||
'hip abductor': 'hip abductors',
|
||||
|
||||
# --- hip adductors ---
|
||||
'hip adductors': 'hip adductors',
|
||||
'hip adductor': 'hip adductors',
|
||||
'adductors': 'hip adductors',
|
||||
'groin': 'hip adductors',
|
||||
|
||||
# --- rotator cuff ---
|
||||
'rotator cuff': 'rotator cuff',
|
||||
|
||||
# --- forearms ---
|
||||
'forearms': 'forearms',
|
||||
'forearm': 'forearms',
|
||||
'wrist flexors': 'forearms',
|
||||
'wrist extensors': 'forearms',
|
||||
|
||||
# --- arms (general) ---
|
||||
'arms': 'arms',
|
||||
|
||||
# --- feet ---
|
||||
'feet': 'feet',
|
||||
'foot': 'feet',
|
||||
|
||||
# --- it band ---
|
||||
'it band': 'it band',
|
||||
'iliotibial band': 'it band',
|
||||
}
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Muscles grouped by functional split category.
|
||||
# Used to classify a workout's primary split type.
|
||||
# ---------------------------------------------------------------------------
|
||||
MUSCLE_GROUP_CATEGORIES: dict[str, list[str]] = {
|
||||
'upper_push': [
|
||||
'chest', 'front deltoids', 'deltoids', 'triceps', 'side deltoids',
|
||||
],
|
||||
'upper_pull': [
|
||||
'upper back', 'lats', 'biceps', 'rear deltoids', 'middle back',
|
||||
'traps', 'forearms', 'rotator cuff',
|
||||
],
|
||||
'lower_push': [
|
||||
'quads', 'calves', 'hip abductors', 'hip adductors',
|
||||
],
|
||||
'lower_pull': [
|
||||
'hamstrings', 'glutes', 'lower back', 'hip flexors',
|
||||
],
|
||||
'core': [
|
||||
'abs', 'obliques', 'core', 'intercostals', 'hip flexors',
|
||||
],
|
||||
}
|
||||
|
||||
# Reverse lookup: canonical muscle -> list of categories it belongs to
|
||||
_MUSCLE_TO_CATEGORIES: dict[str, list[str]] = {}
|
||||
for _cat, _muscles in MUSCLE_GROUP_CATEGORIES.items():
|
||||
for _m in _muscles:
|
||||
_MUSCLE_TO_CATEGORIES.setdefault(_m, []).append(_cat)
|
||||
|
||||
# Broader split groupings for classifying entire workouts
|
||||
SPLIT_CATEGORY_MAP: dict[str, str] = {
|
||||
'upper_push': 'upper',
|
||||
'upper_pull': 'upper',
|
||||
'lower_push': 'lower',
|
||||
'lower_pull': 'lower',
|
||||
'core': 'core',
|
||||
}
|
||||
|
||||
|
||||
def normalize_muscle_name(name: Optional[str]) -> Optional[str]:
|
||||
"""
|
||||
Map a raw muscle name string to its canonical lowercase form.
|
||||
|
||||
Returns None if the name is empty, None, or unrecognized.
|
||||
"""
|
||||
if not name:
|
||||
return None
|
||||
key = name.strip().lower()
|
||||
if not key:
|
||||
return None
|
||||
canonical = MUSCLE_NORMALIZATION_MAP.get(key)
|
||||
if canonical:
|
||||
return canonical
|
||||
# Fallback: return the lowered/stripped version so we don't silently
|
||||
# drop unknown muscles -- the analyzer can decide what to do.
|
||||
return key
|
||||
|
||||
|
||||
_muscle_cache: dict[int, Set[str]] = {}
|
||||
|
||||
|
||||
def get_muscles_for_exercise(exercise) -> Set[str]:
|
||||
"""
|
||||
Return the set of normalized muscle names for a given Exercise instance.
|
||||
|
||||
Uses the ExerciseMuscle join table (exercise.exercise_muscle_exercise).
|
||||
Falls back to the comma-separated Exercise.muscle_groups field if no
|
||||
ExerciseMuscle rows exist.
|
||||
|
||||
Results are cached per exercise ID to avoid repeated DB queries.
|
||||
"""
|
||||
if exercise.id in _muscle_cache:
|
||||
return _muscle_cache[exercise.id]
|
||||
|
||||
from muscle.models import ExerciseMuscle
|
||||
|
||||
muscles: Set[str] = set()
|
||||
|
||||
# Primary source: ExerciseMuscle join table
|
||||
em_qs = ExerciseMuscle.objects.filter(exercise=exercise).select_related('muscle')
|
||||
for em in em_qs:
|
||||
if em.muscle and em.muscle.name:
|
||||
normalized = normalize_muscle_name(em.muscle.name)
|
||||
if normalized:
|
||||
muscles.add(normalized)
|
||||
|
||||
# Fallback: comma-separated muscle_groups CharField on Exercise
|
||||
if not muscles and exercise.muscle_groups:
|
||||
for raw in exercise.muscle_groups.split(','):
|
||||
normalized = normalize_muscle_name(raw)
|
||||
if normalized:
|
||||
muscles.add(normalized)
|
||||
|
||||
_muscle_cache[exercise.id] = muscles
|
||||
return muscles
|
||||
|
||||
|
||||
def clear_muscle_cache() -> None:
|
||||
"""Clear the muscle cache (useful for testing or re-analysis)."""
|
||||
_muscle_cache.clear()
|
||||
|
||||
|
||||
def get_movement_patterns_for_exercise(exercise) -> List[str]:
|
||||
"""
|
||||
Parse the comma-separated movement_patterns CharField on Exercise and
|
||||
return a list of normalized (lowered, stripped) pattern strings.
|
||||
"""
|
||||
if not exercise.movement_patterns:
|
||||
return []
|
||||
patterns = []
|
||||
for raw in exercise.movement_patterns.split(','):
|
||||
cleaned = raw.strip().lower()
|
||||
if cleaned:
|
||||
patterns.append(cleaned)
|
||||
return patterns
|
||||
|
||||
|
||||
def classify_split_type(muscle_names: set[str] | list[str]) -> str:
|
||||
"""
|
||||
Given a set/list of canonical muscle names from a workout, return the
|
||||
best-fit split_type string.
|
||||
|
||||
Returns one of: 'push', 'pull', 'legs', 'upper', 'lower', 'full_body',
|
||||
'core'.
|
||||
|
||||
Note: This function intentionally does not return 'cardio' because split
|
||||
classification is muscle-based and cardio is not a muscle group. Cardio
|
||||
workout detection happens via ``WorkoutAnalyzer._infer_workout_type()``
|
||||
which examines movement patterns (cardio/locomotion) rather than muscles.
|
||||
"""
|
||||
if not muscle_names:
|
||||
return 'full_body'
|
||||
|
||||
muscle_set = set(muscle_names) if not isinstance(muscle_names, set) else muscle_names
|
||||
|
||||
# Count how many muscles fall into each category
|
||||
category_scores: dict[str, int] = {
|
||||
'upper_push': 0,
|
||||
'upper_pull': 0,
|
||||
'lower_push': 0,
|
||||
'lower_pull': 0,
|
||||
'core': 0,
|
||||
}
|
||||
for m in muscle_set:
|
||||
cats = _MUSCLE_TO_CATEGORIES.get(m, [])
|
||||
for cat in cats:
|
||||
category_scores[cat] += 1
|
||||
|
||||
total = sum(category_scores.values())
|
||||
if total == 0:
|
||||
return 'full_body'
|
||||
|
||||
upper_push = category_scores['upper_push']
|
||||
upper_pull = category_scores['upper_pull']
|
||||
lower_push = category_scores['lower_push']
|
||||
lower_pull = category_scores['lower_pull']
|
||||
core_score = category_scores['core']
|
||||
|
||||
upper_total = upper_push + upper_pull
|
||||
lower_total = lower_push + lower_pull
|
||||
|
||||
# -- Core dominant --
|
||||
if core_score > 0 and core_score >= total * 0.6:
|
||||
return 'core'
|
||||
|
||||
# -- Full body: both upper and lower have meaningful representation --
|
||||
if upper_total > 0 and lower_total > 0:
|
||||
upper_ratio = upper_total / total
|
||||
lower_ratio = lower_total / total
|
||||
# If neither upper nor lower dominates heavily, it's full body
|
||||
if 0.2 <= upper_ratio <= 0.8 and 0.2 <= lower_ratio <= 0.8:
|
||||
return 'full_body'
|
||||
|
||||
# -- Upper dominant --
|
||||
if upper_total > lower_total and upper_total >= total * 0.5:
|
||||
if upper_push > 0 and upper_pull == 0:
|
||||
return 'push'
|
||||
if upper_pull > 0 and upper_push == 0:
|
||||
return 'pull'
|
||||
if upper_push > upper_pull * 2:
|
||||
return 'push'
|
||||
if upper_pull > upper_push * 2:
|
||||
return 'pull'
|
||||
return 'upper'
|
||||
|
||||
# -- Lower dominant --
|
||||
if lower_total > upper_total and lower_total >= total * 0.5:
|
||||
if lower_push > 0 and lower_pull == 0:
|
||||
return 'legs'
|
||||
if lower_pull > 0 and lower_push == 0:
|
||||
return 'legs'
|
||||
return 'lower'
|
||||
|
||||
# -- Push dominant (upper push + lower push) --
|
||||
push_total = upper_push + lower_push
|
||||
pull_total = upper_pull + lower_pull
|
||||
if push_total > pull_total * 2:
|
||||
return 'push'
|
||||
if pull_total > push_total * 2:
|
||||
return 'pull'
|
||||
|
||||
return 'full_body'
|
||||
|
||||
|
||||
def get_broad_split_category(split_type: str) -> str:
|
||||
"""
|
||||
Simplify a split type for weekly-pattern analysis.
|
||||
Returns one of: 'upper', 'lower', 'push', 'pull', 'core', 'full_body', 'cardio'.
|
||||
"""
|
||||
mapping = {
|
||||
'push': 'push',
|
||||
'pull': 'pull',
|
||||
'legs': 'lower',
|
||||
'upper': 'upper',
|
||||
'lower': 'lower',
|
||||
'full_body': 'full_body',
|
||||
'core': 'core',
|
||||
'cardio': 'cardio',
|
||||
}
|
||||
return mapping.get(split_type, 'full_body')
|
||||
151
generator/services/plan_builder.py
Normal file
151
generator/services/plan_builder.py
Normal file
@@ -0,0 +1,151 @@
|
||||
import logging
|
||||
|
||||
from django.db import transaction
|
||||
|
||||
from workout.models import Workout
|
||||
from superset.models import Superset, SupersetExercise
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class PlanBuilder:
|
||||
"""
|
||||
Creates Django ORM objects (Workout, Superset, SupersetExercise) from
|
||||
a workout specification dict. Follows the exact same creation pattern
|
||||
used by the existing ``add_workout`` view.
|
||||
"""
|
||||
|
||||
def __init__(self, registered_user):
|
||||
self.registered_user = registered_user
|
||||
|
||||
def create_workout_from_spec(self, workout_spec):
|
||||
"""
|
||||
Create a full Workout with Supersets and SupersetExercises.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
workout_spec : dict
|
||||
Expected shape::
|
||||
|
||||
{
|
||||
'name': 'Upper Push + Core',
|
||||
'description': 'Generated workout targeting chest ...',
|
||||
'supersets': [
|
||||
{
|
||||
'name': 'Warm Up',
|
||||
'rounds': 1,
|
||||
'exercises': [
|
||||
{
|
||||
'exercise': <Exercise instance>,
|
||||
'duration': 30,
|
||||
'order': 1,
|
||||
},
|
||||
{
|
||||
'exercise': <Exercise instance>,
|
||||
'reps': 10,
|
||||
'weight': 50,
|
||||
'order': 2,
|
||||
},
|
||||
],
|
||||
},
|
||||
...
|
||||
],
|
||||
}
|
||||
|
||||
Returns
|
||||
-------
|
||||
Workout
|
||||
The fully-persisted Workout instance with all child objects.
|
||||
"""
|
||||
with transaction.atomic():
|
||||
# ---- 1. Create the Workout ----
|
||||
workout = Workout.objects.create(
|
||||
name=workout_spec.get('name', 'Generated Workout'),
|
||||
description=workout_spec.get('description', ''),
|
||||
registered_user=self.registered_user,
|
||||
)
|
||||
|
||||
workout_total_time = 0
|
||||
superset_order = 1
|
||||
|
||||
# ---- 2. Create each Superset ----
|
||||
for ss_spec in workout_spec.get('supersets', []):
|
||||
ss_name = ss_spec.get('name', f'Set {superset_order}')
|
||||
rounds = ss_spec.get('rounds', 1)
|
||||
exercises = ss_spec.get('exercises', [])
|
||||
|
||||
superset = Superset.objects.create(
|
||||
workout=workout,
|
||||
name=ss_name,
|
||||
rounds=rounds,
|
||||
order=superset_order,
|
||||
rest_between_rounds=ss_spec.get('rest_between_rounds', 45),
|
||||
)
|
||||
|
||||
superset_total_time = 0
|
||||
|
||||
# ---- 3. Create each SupersetExercise ----
|
||||
for ex_spec in exercises:
|
||||
exercise_obj = ex_spec.get('exercise')
|
||||
if exercise_obj is None:
|
||||
logger.warning(
|
||||
"Skipping exercise entry with no exercise object in "
|
||||
"superset '%s'", ss_name,
|
||||
)
|
||||
continue
|
||||
|
||||
order = ex_spec.get('order', 1)
|
||||
|
||||
# Build kwargs for create, including optional fields,
|
||||
# so we don't need a separate .save() after .create().
|
||||
create_kwargs = {
|
||||
'superset': superset,
|
||||
'exercise': exercise_obj,
|
||||
'order': order,
|
||||
}
|
||||
|
||||
if ex_spec.get('weight') is not None:
|
||||
create_kwargs['weight'] = ex_spec['weight']
|
||||
|
||||
if ex_spec.get('reps') is not None:
|
||||
create_kwargs['reps'] = ex_spec['reps']
|
||||
rep_duration = exercise_obj.estimated_rep_duration or 3.0
|
||||
superset_total_time += ex_spec['reps'] * rep_duration
|
||||
|
||||
if ex_spec.get('duration') is not None:
|
||||
create_kwargs['duration'] = ex_spec['duration']
|
||||
superset_total_time += ex_spec['duration']
|
||||
|
||||
SupersetExercise.objects.create(**create_kwargs)
|
||||
|
||||
# ---- 4. Update superset estimated_time ----
|
||||
# Store total time including all rounds and rest between rounds
|
||||
rest_between_rounds = ss_spec.get('rest_between_rounds', 45)
|
||||
rest_time = rest_between_rounds * max(0, rounds - 1)
|
||||
superset.estimated_time = (superset_total_time * rounds) + rest_time
|
||||
superset.save()
|
||||
|
||||
# Accumulate into workout total (use the already-calculated superset time)
|
||||
workout_total_time += superset.estimated_time
|
||||
superset_order += 1
|
||||
|
||||
# Add transition time between supersets
|
||||
# (matches GENERATION_RULES['rest_between_supersets'] in workout_generator)
|
||||
superset_count = superset_order - 1
|
||||
if superset_count > 1:
|
||||
rest_between_supersets = 30
|
||||
workout_total_time += rest_between_supersets * (superset_count - 1)
|
||||
|
||||
# ---- 5. Update workout estimated_time ----
|
||||
workout.estimated_time = workout_total_time
|
||||
workout.save()
|
||||
|
||||
logger.info(
|
||||
"Created workout '%s' (id=%s) with %d supersets, est. %ds",
|
||||
workout.name,
|
||||
workout.pk,
|
||||
superset_order - 1,
|
||||
workout_total_time,
|
||||
)
|
||||
|
||||
return workout
|
||||
1371
generator/services/workout_analyzer.py
Normal file
1371
generator/services/workout_analyzer.py
Normal file
File diff suppressed because it is too large
Load Diff
6
generator/services/workout_generation/__init__.py
Normal file
6
generator/services/workout_generation/__init__.py
Normal file
@@ -0,0 +1,6 @@
|
||||
"""Pure workout generation utilities.
|
||||
|
||||
These helpers are intentionally side-effect free so they can be tested
|
||||
independently from Django models and service orchestration.
|
||||
"""
|
||||
|
||||
39
generator/services/workout_generation/entry_rules.py
Normal file
39
generator/services/workout_generation/entry_rules.py
Normal file
@@ -0,0 +1,39 @@
|
||||
import math
|
||||
import random
|
||||
|
||||
|
||||
def pick_reps_for_exercise(exercise, wt_params: dict, tier_ranges: dict, rng=random) -> int:
|
||||
"""Pick reps from tier-specific range, then fallback to generic wt params."""
|
||||
tier = (getattr(exercise, 'exercise_tier', None) or 'accessory').lower()
|
||||
selected_range = tier_ranges.get(tier) or (wt_params['rep_min'], wt_params['rep_max'])
|
||||
low, high = int(selected_range[0]), int(selected_range[1])
|
||||
if low > high:
|
||||
low, high = high, low
|
||||
return rng.randint(low, high)
|
||||
|
||||
|
||||
def apply_rep_volume_floor(entries: list[dict], rounds: int, min_volume: int) -> None:
|
||||
"""Mutate entries in-place so reps*rounds meets the minimum volume floor."""
|
||||
if rounds <= 0:
|
||||
return
|
||||
for entry in entries:
|
||||
reps = entry.get('reps')
|
||||
if reps and reps * rounds < min_volume:
|
||||
entry['reps'] = max(reps, math.ceil(min_volume / rounds))
|
||||
|
||||
|
||||
def working_rest_seconds(rest_override, default_rest: int, minimum_rest: int = 15) -> int:
|
||||
"""Return guarded positive working rest in seconds."""
|
||||
rest = rest_override or default_rest or 45
|
||||
return max(minimum_rest, int(rest))
|
||||
|
||||
|
||||
def sort_entries_by_hr(entries: list[dict], is_early_block: bool) -> None:
|
||||
"""Sort entries by HR elevation and re-number order."""
|
||||
entries.sort(
|
||||
key=lambda e: getattr(e.get('exercise'), 'hr_elevation_rating', 5) or 5,
|
||||
reverse=is_early_block,
|
||||
)
|
||||
for idx, entry in enumerate(entries, start=1):
|
||||
entry['order'] = idx
|
||||
|
||||
41
generator/services/workout_generation/focus.py
Normal file
41
generator/services/workout_generation/focus.py
Normal file
@@ -0,0 +1,41 @@
|
||||
from typing import Optional
|
||||
|
||||
from generator.services.exercise_selector import extract_movement_families
|
||||
|
||||
|
||||
def focus_key_for_exercise(exercise) -> Optional[str]:
|
||||
"""Classify exercise into a coarse focus key used for variety checks."""
|
||||
if exercise is None:
|
||||
return None
|
||||
families = sorted(extract_movement_families(getattr(exercise, 'name', '') or ''))
|
||||
if families:
|
||||
return families[0]
|
||||
patterns = (getattr(exercise, 'movement_patterns', '') or '').lower()
|
||||
for token in ('upper pull', 'upper push', 'hip hinge', 'squat', 'lunge', 'core', 'carry'):
|
||||
if token in patterns:
|
||||
return token
|
||||
return None
|
||||
|
||||
|
||||
def has_duplicate_focus(exercises: list) -> bool:
|
||||
"""True when two exercises in one superset map to the same focus key."""
|
||||
seen = set()
|
||||
for ex in exercises or []:
|
||||
key = focus_key_for_exercise(ex)
|
||||
if not key:
|
||||
continue
|
||||
if key in seen:
|
||||
return True
|
||||
seen.add(key)
|
||||
return False
|
||||
|
||||
|
||||
def focus_keys_for_exercises(exercises: list) -> set:
|
||||
"""Return non-empty focus keys for a list of exercises."""
|
||||
keys = set()
|
||||
for ex in exercises or []:
|
||||
key = focus_key_for_exercise(ex)
|
||||
if key:
|
||||
keys.add(key)
|
||||
return keys
|
||||
|
||||
53
generator/services/workout_generation/modality.py
Normal file
53
generator/services/workout_generation/modality.py
Normal file
@@ -0,0 +1,53 @@
|
||||
import math
|
||||
import random
|
||||
from typing import Optional
|
||||
|
||||
|
||||
def clamp_duration_bias(duration_bias: float, duration_bias_range: Optional[tuple]) -> float:
|
||||
"""Clamp duration bias to [0,1] or workout-type specific range."""
|
||||
if not duration_bias_range:
|
||||
return max(0.0, min(1.0, duration_bias))
|
||||
low, high = duration_bias_range
|
||||
return max(float(low), min(float(high), duration_bias))
|
||||
|
||||
|
||||
def plan_superset_modalities(
|
||||
*,
|
||||
num_supersets: int,
|
||||
duration_bias: float,
|
||||
duration_bias_range: Optional[tuple],
|
||||
is_strength_workout: bool,
|
||||
rng=random,
|
||||
) -> list[bool]:
|
||||
"""Plan per-superset modality (True=duration, False=reps)."""
|
||||
if num_supersets <= 0:
|
||||
return []
|
||||
if is_strength_workout:
|
||||
return [False] * num_supersets
|
||||
|
||||
if duration_bias_range:
|
||||
low, high = duration_bias_range
|
||||
target_bias = (float(low) + float(high)) / 2.0
|
||||
min_duration_sets = max(0, math.ceil(num_supersets * float(low)))
|
||||
max_duration_sets = min(num_supersets, math.floor(num_supersets * float(high)))
|
||||
else:
|
||||
target_bias = max(0.0, min(1.0, duration_bias))
|
||||
min_duration_sets = max(0, math.floor(num_supersets * max(0.0, target_bias - 0.15)))
|
||||
max_duration_sets = min(num_supersets, math.ceil(num_supersets * min(1.0, target_bias + 0.15)))
|
||||
|
||||
duration_sets = int(round(num_supersets * target_bias))
|
||||
duration_sets = max(min_duration_sets, min(max_duration_sets, duration_sets))
|
||||
|
||||
if num_supersets > 1 and duration_sets == num_supersets and max_duration_sets < num_supersets:
|
||||
duration_sets = max_duration_sets
|
||||
if num_supersets > 1 and duration_sets == 0 and min_duration_sets > 0:
|
||||
duration_sets = min_duration_sets
|
||||
|
||||
modalities = [False] * num_supersets
|
||||
if duration_sets > 0:
|
||||
positions = list(range(num_supersets))
|
||||
rng.shuffle(positions)
|
||||
for idx in positions[:duration_sets]:
|
||||
modalities[idx] = True
|
||||
return modalities
|
||||
|
||||
26
generator/services/workout_generation/pattern_planning.py
Normal file
26
generator/services/workout_generation/pattern_planning.py
Normal file
@@ -0,0 +1,26 @@
|
||||
def working_position_label(ss_idx: int, num_supersets: int) -> str:
|
||||
"""Return early/middle/late position label for a working superset index."""
|
||||
if num_supersets <= 1 or ss_idx == 0:
|
||||
return 'early'
|
||||
if ss_idx >= num_supersets - 1:
|
||||
return 'late'
|
||||
return 'middle'
|
||||
|
||||
|
||||
def merge_pattern_preferences(position_patterns, rule_patterns):
|
||||
"""Combine positional and structure-rule pattern preferences."""
|
||||
if rule_patterns and position_patterns:
|
||||
overlap = [p for p in position_patterns if p in rule_patterns]
|
||||
return overlap or rule_patterns[:3]
|
||||
if rule_patterns:
|
||||
return rule_patterns[:3]
|
||||
return position_patterns
|
||||
|
||||
|
||||
def rotated_muscle_subset(target_muscles: list[str], ss_idx: int) -> list[str]:
|
||||
"""Rotate target muscle emphasis between supersets."""
|
||||
if len(target_muscles) <= 1:
|
||||
return target_muscles
|
||||
start = ss_idx % len(target_muscles)
|
||||
return target_muscles[start:] + target_muscles[:start]
|
||||
|
||||
14
generator/services/workout_generation/recovery.py
Normal file
14
generator/services/workout_generation/recovery.py
Normal file
@@ -0,0 +1,14 @@
|
||||
def is_recovery_exercise(ex) -> bool:
|
||||
"""True for warmup/cooldown-style recovery/stretch exercises."""
|
||||
if ex is None:
|
||||
return False
|
||||
name = (getattr(ex, 'name', '') or '').lower()
|
||||
patterns = (getattr(ex, 'movement_patterns', '') or '').lower()
|
||||
if 'stretch' in name:
|
||||
return True
|
||||
blocked = (
|
||||
'mobility - static', 'static stretch', 'yoga',
|
||||
'cool down', 'cooldown', 'breathing', 'massage',
|
||||
)
|
||||
return any(token in patterns for token in blocked)
|
||||
|
||||
31
generator/services/workout_generation/scaling.py
Normal file
31
generator/services/workout_generation/scaling.py
Normal file
@@ -0,0 +1,31 @@
|
||||
def apply_fitness_scaling(
|
||||
params: dict,
|
||||
*,
|
||||
fitness_level: int,
|
||||
scaling_config: dict,
|
||||
min_reps: int,
|
||||
min_reps_strength: int,
|
||||
is_strength: bool = False,
|
||||
) -> dict:
|
||||
"""Scale workout params based on fitness level."""
|
||||
out = dict(params)
|
||||
level = fitness_level or 2
|
||||
scaling = scaling_config.get(level, scaling_config[2])
|
||||
rep_floor = min_reps_strength if is_strength else min_reps
|
||||
|
||||
out['rep_min'] = max(rep_floor, int(out['rep_min'] * scaling['rep_min_mult']))
|
||||
out['rep_max'] = max(out['rep_min'], int(out['rep_max'] * scaling['rep_max_mult']))
|
||||
|
||||
rounds_min, rounds_max = out['rounds']
|
||||
rounds_min = max(1, rounds_min + scaling['rounds_adj'])
|
||||
rounds_max = max(rounds_min, rounds_max + scaling['rounds_adj'])
|
||||
out['rounds'] = (rounds_min, rounds_max)
|
||||
|
||||
rest = out.get('rest_between_rounds', 45)
|
||||
out['rest_between_rounds'] = max(15, rest + scaling['rest_adj'])
|
||||
|
||||
if level <= 1 and is_strength:
|
||||
out['rep_min'] = max(5, out['rep_min'])
|
||||
out['rep_max'] = max(out['rep_min'], out['rep_max'])
|
||||
return out
|
||||
|
||||
68
generator/services/workout_generation/section_builders.py
Normal file
68
generator/services/workout_generation/section_builders.py
Normal file
@@ -0,0 +1,68 @@
|
||||
import random
|
||||
from typing import Iterable, Optional
|
||||
|
||||
|
||||
def section_exercise_count(section: str, fitness_level: int, rng=random) -> int:
|
||||
"""Return section exercise count range by fitness level."""
|
||||
level = fitness_level or 2
|
||||
if section == 'warmup':
|
||||
if level <= 1:
|
||||
return rng.randint(5, 7)
|
||||
if level >= 3:
|
||||
return rng.randint(3, 5)
|
||||
return rng.randint(4, 6)
|
||||
if section == 'cooldown':
|
||||
if level <= 1:
|
||||
return rng.randint(4, 5)
|
||||
if level >= 3:
|
||||
return rng.randint(2, 3)
|
||||
return rng.randint(3, 4)
|
||||
raise ValueError(f'Unknown section: {section}')
|
||||
|
||||
|
||||
def rounded_duration(
|
||||
raw_duration: int,
|
||||
*,
|
||||
min_duration: int,
|
||||
duration_multiple: int,
|
||||
) -> int:
|
||||
"""Round duration to configured multiple and clamp to minimum."""
|
||||
return max(min_duration, round(raw_duration / duration_multiple) * duration_multiple)
|
||||
|
||||
|
||||
def build_duration_entries(
|
||||
exercises: Iterable,
|
||||
*,
|
||||
duration_min: int,
|
||||
duration_max: int,
|
||||
min_duration: int,
|
||||
duration_multiple: int,
|
||||
rng=random,
|
||||
) -> list[dict]:
|
||||
"""Build ordered duration entries from exercises."""
|
||||
entries = []
|
||||
for idx, ex in enumerate(exercises, start=1):
|
||||
duration = rng.randint(duration_min, duration_max)
|
||||
entries.append({
|
||||
'exercise': ex,
|
||||
'duration': rounded_duration(
|
||||
duration,
|
||||
min_duration=min_duration,
|
||||
duration_multiple=duration_multiple,
|
||||
),
|
||||
'order': idx,
|
||||
})
|
||||
return entries
|
||||
|
||||
|
||||
def build_section_superset(name: str, entries: list[dict]) -> Optional[dict]:
|
||||
"""Build a single-round warmup/cooldown superset payload."""
|
||||
if not entries:
|
||||
return None
|
||||
return {
|
||||
'name': name,
|
||||
'rounds': 1,
|
||||
'rest_between_rounds': 0,
|
||||
'exercises': entries,
|
||||
}
|
||||
|
||||
3091
generator/services/workout_generator.py
Normal file
3091
generator/services/workout_generator.py
Normal file
File diff suppressed because it is too large
Load Diff
0
generator/tests/__init__.py
Normal file
0
generator/tests/__init__.py
Normal file
56
generator/tests/test_check_rules_drift.py
Normal file
56
generator/tests/test_check_rules_drift.py
Normal file
@@ -0,0 +1,56 @@
|
||||
from django.core.management import call_command
|
||||
from django.test import TestCase
|
||||
|
||||
from generator.models import WorkoutType
|
||||
from generator.rules_engine import DB_CALIBRATION
|
||||
|
||||
|
||||
class TestCheckRulesDriftCommand(TestCase):
|
||||
"""Tests for the strict drift-check command behavior."""
|
||||
|
||||
@staticmethod
|
||||
def _sync_workout_type(name, values):
|
||||
wt, _ = WorkoutType.objects.get_or_create(
|
||||
name=name,
|
||||
defaults={
|
||||
'display_name': name.replace('_', ' ').title(),
|
||||
'description': f'Calibrated {name}',
|
||||
**values,
|
||||
},
|
||||
)
|
||||
update_fields = []
|
||||
for field_name, field_value in values.items():
|
||||
if getattr(wt, field_name) != field_value:
|
||||
setattr(wt, field_name, field_value)
|
||||
update_fields.append(field_name)
|
||||
if update_fields:
|
||||
wt.save(update_fields=update_fields)
|
||||
return wt
|
||||
|
||||
def test_passes_when_all_types_match(self):
|
||||
for type_name, values in DB_CALIBRATION.items():
|
||||
self._sync_workout_type(type_name, values)
|
||||
|
||||
# Should not raise SystemExit when everything matches.
|
||||
call_command('check_rules_drift', verbosity=0)
|
||||
|
||||
def test_fails_when_type_missing(self):
|
||||
for type_name, values in DB_CALIBRATION.items():
|
||||
self._sync_workout_type(type_name, values)
|
||||
WorkoutType.objects.filter(name='cardio').delete()
|
||||
|
||||
with self.assertRaises(SystemExit) as ctx:
|
||||
call_command('check_rules_drift', verbosity=0)
|
||||
self.assertEqual(ctx.exception.code, 1)
|
||||
|
||||
def test_fails_when_value_mismatch(self):
|
||||
for type_name, values in DB_CALIBRATION.items():
|
||||
self._sync_workout_type(type_name, values)
|
||||
|
||||
target = WorkoutType.objects.get(name='hypertrophy')
|
||||
target.typical_rest_between_sets = 999
|
||||
target.save(update_fields=['typical_rest_between_sets'])
|
||||
|
||||
with self.assertRaises(SystemExit) as ctx:
|
||||
call_command('check_rules_drift', verbosity=0)
|
||||
self.assertEqual(ctx.exception.code, 1)
|
||||
63
generator/tests/test_exercise_family_dedup.py
Normal file
63
generator/tests/test_exercise_family_dedup.py
Normal file
@@ -0,0 +1,63 @@
|
||||
from django.contrib.auth.models import User
|
||||
from django.test import TestCase
|
||||
|
||||
from exercise.models import Exercise
|
||||
from generator.models import UserPreference
|
||||
from generator.services.exercise_selector import (
|
||||
ExerciseSelector,
|
||||
extract_movement_families,
|
||||
)
|
||||
from registered_user.models import RegisteredUser
|
||||
|
||||
|
||||
class TestExerciseFamilyDedup(TestCase):
|
||||
def setUp(self):
|
||||
django_user = User.objects.create_user(
|
||||
username='family_dedup_user',
|
||||
password='testpass123',
|
||||
)
|
||||
registered_user = RegisteredUser.objects.create(
|
||||
user=django_user,
|
||||
first_name='Family',
|
||||
last_name='Dedup',
|
||||
)
|
||||
self.preference = UserPreference.objects.create(
|
||||
registered_user=registered_user,
|
||||
days_per_week=4,
|
||||
fitness_level=2,
|
||||
)
|
||||
|
||||
def test_high_pull_maps_to_clean_family(self):
|
||||
clean_pull_families = extract_movement_families('Barbell Clean Pull')
|
||||
high_pull_families = extract_movement_families('Barbell High Pull')
|
||||
|
||||
self.assertIn('clean', clean_pull_families)
|
||||
self.assertIn('clean', high_pull_families)
|
||||
|
||||
def test_high_pull_blocked_when_clean_family_already_used(self):
|
||||
high_pull = Exercise.objects.create(
|
||||
name='Barbell High Pull',
|
||||
movement_patterns='lower pull,lower pull - hip hinge',
|
||||
muscle_groups='glutes,hamstrings,traps',
|
||||
is_reps=True,
|
||||
is_duration=False,
|
||||
is_weight=True,
|
||||
is_compound=True,
|
||||
exercise_tier='secondary',
|
||||
complexity_rating=3,
|
||||
difficulty_level='intermediate',
|
||||
)
|
||||
selector = ExerciseSelector(self.preference)
|
||||
selector.used_movement_families['clean'] = 1
|
||||
|
||||
selected = selector._weighted_pick(
|
||||
Exercise.objects.filter(pk=high_pull.pk),
|
||||
Exercise.objects.none(),
|
||||
count=1,
|
||||
)
|
||||
|
||||
self.assertEqual(
|
||||
selected,
|
||||
[],
|
||||
'High-pull variant should be blocked when clean family is already used.',
|
||||
)
|
||||
430
generator/tests/test_exercise_metadata.py
Normal file
430
generator/tests/test_exercise_metadata.py
Normal file
@@ -0,0 +1,430 @@
|
||||
"""
|
||||
Tests for exercise metadata cleanup management commands.
|
||||
|
||||
Tests:
|
||||
- fix_rep_durations: fills null estimated_rep_duration using pattern/category lookup
|
||||
- fix_exercise_flags: fixes is_weight false positives and assigns missing muscles
|
||||
- fix_movement_pattern_typo: corrects "horizonal" -> "horizontal"
|
||||
- audit_exercise_data: reports data quality issues, exits 1 on critical
|
||||
"""
|
||||
|
||||
from django.test import TestCase
|
||||
from django.core.management import call_command
|
||||
from io import StringIO
|
||||
|
||||
from exercise.models import Exercise
|
||||
from muscle.models import Muscle, ExerciseMuscle
|
||||
|
||||
|
||||
class TestFixRepDurations(TestCase):
|
||||
"""Tests for the fix_rep_durations management command."""
|
||||
|
||||
@classmethod
|
||||
def setUpTestData(cls):
|
||||
# Exercise with null duration and a known movement pattern
|
||||
cls.ex_compound_push = Exercise.objects.create(
|
||||
name='Test Bench Press',
|
||||
estimated_rep_duration=None,
|
||||
is_reps=True,
|
||||
is_duration=False,
|
||||
is_weight=True,
|
||||
movement_patterns='compound_push',
|
||||
)
|
||||
# Exercise with null duration and a category default pattern
|
||||
cls.ex_upper_pull = Exercise.objects.create(
|
||||
name='Test Barbell Row',
|
||||
estimated_rep_duration=None,
|
||||
is_reps=True,
|
||||
is_duration=False,
|
||||
is_weight=True,
|
||||
movement_patterns='upper pull - horizontal',
|
||||
)
|
||||
# Duration-only exercise (should be skipped)
|
||||
cls.ex_duration_only = Exercise.objects.create(
|
||||
name='Test Plank Hold',
|
||||
estimated_rep_duration=None,
|
||||
is_reps=False,
|
||||
is_duration=True,
|
||||
is_weight=False,
|
||||
movement_patterns='core - anti-extension',
|
||||
)
|
||||
# Exercise with no movement patterns (should get DEFAULT_DURATION)
|
||||
cls.ex_no_patterns = Exercise.objects.create(
|
||||
name='Test Mystery Exercise',
|
||||
estimated_rep_duration=None,
|
||||
is_reps=True,
|
||||
is_duration=False,
|
||||
is_weight=False,
|
||||
movement_patterns='',
|
||||
)
|
||||
# Exercise that already has a duration (should be updated)
|
||||
cls.ex_has_duration = Exercise.objects.create(
|
||||
name='Test Curl',
|
||||
estimated_rep_duration=2.5,
|
||||
is_reps=True,
|
||||
is_duration=False,
|
||||
is_weight=True,
|
||||
movement_patterns='isolation',
|
||||
)
|
||||
|
||||
def test_no_null_rep_durations_after_fix(self):
|
||||
"""After running fix_rep_durations, no rep-based exercises should have null duration."""
|
||||
call_command('fix_rep_durations')
|
||||
count = Exercise.objects.filter(
|
||||
estimated_rep_duration__isnull=True,
|
||||
is_reps=True,
|
||||
).exclude(
|
||||
is_duration=True, is_reps=False
|
||||
).count()
|
||||
self.assertEqual(count, 0)
|
||||
|
||||
def test_duration_only_skipped(self):
|
||||
"""Duration-only exercises should remain null."""
|
||||
call_command('fix_rep_durations')
|
||||
self.ex_duration_only.refresh_from_db()
|
||||
self.assertIsNone(self.ex_duration_only.estimated_rep_duration)
|
||||
|
||||
def test_compound_push_gets_pattern_duration(self):
|
||||
"""Exercise with compound_push pattern should get 3.0s."""
|
||||
call_command('fix_rep_durations')
|
||||
self.ex_compound_push.refresh_from_db()
|
||||
self.assertIsNotNone(self.ex_compound_push.estimated_rep_duration)
|
||||
# Could be from pattern (3.0) or category default -- either is acceptable
|
||||
self.assertGreater(self.ex_compound_push.estimated_rep_duration, 0)
|
||||
|
||||
def test_no_patterns_gets_default(self):
|
||||
"""Exercise with empty movement_patterns should get DEFAULT_DURATION (3.0)."""
|
||||
call_command('fix_rep_durations')
|
||||
self.ex_no_patterns.refresh_from_db()
|
||||
self.assertEqual(self.ex_no_patterns.estimated_rep_duration, 3.0)
|
||||
|
||||
def test_fixes_idempotent(self):
|
||||
"""Running fix_rep_durations twice should produce the same result."""
|
||||
call_command('fix_rep_durations')
|
||||
# Capture state after first run
|
||||
first_run_vals = {
|
||||
ex.pk: ex.estimated_rep_duration
|
||||
for ex in Exercise.objects.all()
|
||||
}
|
||||
call_command('fix_rep_durations')
|
||||
# Capture state after second run
|
||||
for ex in Exercise.objects.all():
|
||||
self.assertEqual(
|
||||
ex.estimated_rep_duration,
|
||||
first_run_vals[ex.pk],
|
||||
f'Value changed for {ex.name} on second run'
|
||||
)
|
||||
|
||||
def test_dry_run_does_not_modify(self):
|
||||
"""Dry run should not change any values."""
|
||||
out = StringIO()
|
||||
call_command('fix_rep_durations', '--dry-run', stdout=out)
|
||||
self.ex_compound_push.refresh_from_db()
|
||||
self.assertIsNone(self.ex_compound_push.estimated_rep_duration)
|
||||
|
||||
|
||||
class TestFixExerciseFlags(TestCase):
|
||||
"""Tests for the fix_exercise_flags management command."""
|
||||
|
||||
@classmethod
|
||||
def setUpTestData(cls):
|
||||
# Bodyweight exercise incorrectly marked as weighted
|
||||
cls.ex_wall_sit = Exercise.objects.create(
|
||||
name='Wall Sit Hold',
|
||||
estimated_rep_duration=3.0,
|
||||
is_reps=False,
|
||||
is_duration=True,
|
||||
is_weight=True, # false positive
|
||||
movement_patterns='isometric',
|
||||
)
|
||||
cls.ex_plank = Exercise.objects.create(
|
||||
name='High Plank',
|
||||
estimated_rep_duration=None,
|
||||
is_reps=False,
|
||||
is_duration=True,
|
||||
is_weight=True, # false positive
|
||||
movement_patterns='core',
|
||||
)
|
||||
cls.ex_burpee = Exercise.objects.create(
|
||||
name='Burpee',
|
||||
estimated_rep_duration=2.0,
|
||||
is_reps=True,
|
||||
is_duration=False,
|
||||
is_weight=True, # false positive
|
||||
movement_patterns='plyometric',
|
||||
)
|
||||
# Legitimately weighted exercise -- should NOT be changed
|
||||
cls.ex_barbell = Exercise.objects.create(
|
||||
name='Barbell Bench Press',
|
||||
estimated_rep_duration=3.0,
|
||||
is_reps=True,
|
||||
is_duration=False,
|
||||
is_weight=True,
|
||||
movement_patterns='upper push - horizontal',
|
||||
)
|
||||
# Exercise with no muscles (for muscle assignment test)
|
||||
cls.ex_no_muscle = Exercise.objects.create(
|
||||
name='Chest Press Machine',
|
||||
estimated_rep_duration=2.5,
|
||||
is_reps=True,
|
||||
is_duration=False,
|
||||
is_weight=True,
|
||||
movement_patterns='compound_push',
|
||||
)
|
||||
# Exercise that already has muscles (should not be affected)
|
||||
cls.ex_with_muscle = Exercise.objects.create(
|
||||
name='Bicep Curl',
|
||||
estimated_rep_duration=2.5,
|
||||
is_reps=True,
|
||||
is_duration=False,
|
||||
is_weight=True,
|
||||
movement_patterns='arms',
|
||||
)
|
||||
# Create test muscles
|
||||
cls.chest = Muscle.objects.create(name='chest')
|
||||
cls.biceps = Muscle.objects.create(name='biceps')
|
||||
cls.core = Muscle.objects.create(name='core')
|
||||
|
||||
# Assign muscle to ex_with_muscle
|
||||
ExerciseMuscle.objects.create(
|
||||
exercise=cls.ex_with_muscle,
|
||||
muscle=cls.biceps,
|
||||
)
|
||||
|
||||
def test_bodyweight_not_marked_weighted(self):
|
||||
"""Bodyweight exercises should have is_weight=False after fix."""
|
||||
call_command('fix_exercise_flags')
|
||||
self.ex_wall_sit.refresh_from_db()
|
||||
self.assertFalse(self.ex_wall_sit.is_weight)
|
||||
|
||||
def test_plank_not_marked_weighted(self):
|
||||
"""Plank should have is_weight=False after fix."""
|
||||
call_command('fix_exercise_flags')
|
||||
self.ex_plank.refresh_from_db()
|
||||
self.assertFalse(self.ex_plank.is_weight)
|
||||
|
||||
def test_burpee_not_marked_weighted(self):
|
||||
"""Burpee should have is_weight=False after fix."""
|
||||
call_command('fix_exercise_flags')
|
||||
self.ex_burpee.refresh_from_db()
|
||||
self.assertFalse(self.ex_burpee.is_weight)
|
||||
|
||||
def test_weighted_exercise_stays_weighted(self):
|
||||
"""Barbell Bench Press should stay is_weight=True."""
|
||||
call_command('fix_exercise_flags')
|
||||
self.ex_barbell.refresh_from_db()
|
||||
self.assertTrue(self.ex_barbell.is_weight)
|
||||
|
||||
def test_all_exercises_have_muscles(self):
|
||||
"""After fix, exercises that matched keywords should have muscles assigned."""
|
||||
call_command('fix_exercise_flags')
|
||||
# 'Chest Press Machine' should now have chest muscle
|
||||
orphans = Exercise.objects.exclude(
|
||||
pk__in=ExerciseMuscle.objects.values_list('exercise_id', flat=True)
|
||||
)
|
||||
self.assertNotIn(
|
||||
self.ex_no_muscle.pk,
|
||||
list(orphans.values_list('pk', flat=True))
|
||||
)
|
||||
|
||||
def test_chest_press_gets_chest_muscle(self):
|
||||
"""Chest Press Machine should get the 'chest' muscle assigned."""
|
||||
call_command('fix_exercise_flags')
|
||||
has_chest = ExerciseMuscle.objects.filter(
|
||||
exercise=self.ex_no_muscle,
|
||||
muscle=self.chest,
|
||||
).exists()
|
||||
self.assertTrue(has_chest)
|
||||
|
||||
def test_existing_muscle_assignments_preserved(self):
|
||||
"""Exercises that already have muscles should not be affected."""
|
||||
call_command('fix_exercise_flags')
|
||||
muscle_count = ExerciseMuscle.objects.filter(
|
||||
exercise=self.ex_with_muscle,
|
||||
).count()
|
||||
self.assertEqual(muscle_count, 1)
|
||||
|
||||
def test_word_boundary_no_false_match(self):
|
||||
"""'l sit' pattern should not match 'wall sit' (word boundary test)."""
|
||||
# Create an exercise named "L Sit" to test word boundary matching
|
||||
l_sit = Exercise.objects.create(
|
||||
name='L Sit Hold',
|
||||
is_reps=False,
|
||||
is_duration=True,
|
||||
is_weight=True,
|
||||
movement_patterns='isometric',
|
||||
)
|
||||
call_command('fix_exercise_flags')
|
||||
l_sit.refresh_from_db()
|
||||
# L sit is in our bodyweight patterns and has no equipment, so should be fixed
|
||||
self.assertFalse(l_sit.is_weight)
|
||||
|
||||
def test_fix_idempotent(self):
|
||||
"""Running fix_exercise_flags twice should produce the same result."""
|
||||
call_command('fix_exercise_flags')
|
||||
call_command('fix_exercise_flags')
|
||||
self.ex_wall_sit.refresh_from_db()
|
||||
self.assertFalse(self.ex_wall_sit.is_weight)
|
||||
# Muscle assignments should not duplicate
|
||||
chest_count = ExerciseMuscle.objects.filter(
|
||||
exercise=self.ex_no_muscle,
|
||||
muscle=self.chest,
|
||||
).count()
|
||||
self.assertEqual(chest_count, 1)
|
||||
|
||||
def test_dry_run_does_not_modify(self):
|
||||
"""Dry run should not change any values."""
|
||||
out = StringIO()
|
||||
call_command('fix_exercise_flags', '--dry-run', stdout=out)
|
||||
self.ex_wall_sit.refresh_from_db()
|
||||
self.assertTrue(self.ex_wall_sit.is_weight) # should still be True
|
||||
|
||||
|
||||
class TestFixMovementPatternTypo(TestCase):
|
||||
"""Tests for the fix_movement_pattern_typo management command."""
|
||||
|
||||
@classmethod
|
||||
def setUpTestData(cls):
|
||||
cls.ex_typo = Exercise.objects.create(
|
||||
name='Horizontal Row',
|
||||
estimated_rep_duration=3.0,
|
||||
is_reps=True,
|
||||
is_duration=False,
|
||||
movement_patterns='upper pull - horizonal',
|
||||
)
|
||||
cls.ex_no_typo = Exercise.objects.create(
|
||||
name='Barbell Squat',
|
||||
estimated_rep_duration=4.0,
|
||||
is_reps=True,
|
||||
is_duration=False,
|
||||
movement_patterns='lower push - squat',
|
||||
)
|
||||
|
||||
def test_no_horizonal_typo(self):
|
||||
"""After fix, no exercises should have 'horizonal' in movement_patterns."""
|
||||
call_command('fix_movement_pattern_typo')
|
||||
count = Exercise.objects.filter(
|
||||
movement_patterns__icontains='horizonal'
|
||||
).count()
|
||||
self.assertEqual(count, 0)
|
||||
|
||||
def test_typo_replaced_with_correct(self):
|
||||
"""The typo should be replaced with 'horizontal'."""
|
||||
call_command('fix_movement_pattern_typo')
|
||||
self.ex_typo.refresh_from_db()
|
||||
self.assertIn('horizontal', self.ex_typo.movement_patterns)
|
||||
self.assertNotIn('horizonal', self.ex_typo.movement_patterns)
|
||||
|
||||
def test_non_typo_unchanged(self):
|
||||
"""Exercises without the typo should not be modified."""
|
||||
call_command('fix_movement_pattern_typo')
|
||||
self.ex_no_typo.refresh_from_db()
|
||||
self.assertEqual(self.ex_no_typo.movement_patterns, 'lower push - squat')
|
||||
|
||||
def test_idempotent(self):
|
||||
"""Running the fix twice should be safe and produce same result."""
|
||||
call_command('fix_movement_pattern_typo')
|
||||
call_command('fix_movement_pattern_typo')
|
||||
self.ex_typo.refresh_from_db()
|
||||
self.assertIn('horizontal', self.ex_typo.movement_patterns)
|
||||
self.assertNotIn('horizonal', self.ex_typo.movement_patterns)
|
||||
|
||||
def test_already_fixed_message(self):
|
||||
"""When no typos exist, it should print a 'already fixed' message."""
|
||||
call_command('fix_movement_pattern_typo') # fix first
|
||||
out = StringIO()
|
||||
call_command('fix_movement_pattern_typo', stdout=out) # run again
|
||||
self.assertIn('already fixed', out.getvalue())
|
||||
|
||||
|
||||
class TestAuditExerciseData(TestCase):
|
||||
"""Tests for the audit_exercise_data management command."""
|
||||
|
||||
def test_audit_reports_critical_null_duration(self):
|
||||
"""Audit should exit 1 when rep-based exercises have null duration."""
|
||||
Exercise.objects.create(
|
||||
name='Test Bench Press',
|
||||
estimated_rep_duration=None,
|
||||
is_reps=True,
|
||||
is_duration=False,
|
||||
movement_patterns='compound_push',
|
||||
)
|
||||
out = StringIO()
|
||||
with self.assertRaises(SystemExit) as cm:
|
||||
call_command('audit_exercise_data', stdout=out)
|
||||
self.assertEqual(cm.exception.code, 1)
|
||||
|
||||
def test_audit_reports_critical_no_muscles(self):
|
||||
"""Audit should exit 1 when exercises have no muscle assignments."""
|
||||
Exercise.objects.create(
|
||||
name='Test Orphan Exercise',
|
||||
estimated_rep_duration=3.0,
|
||||
is_reps=True,
|
||||
is_duration=False,
|
||||
movement_patterns='compound_push',
|
||||
)
|
||||
out = StringIO()
|
||||
with self.assertRaises(SystemExit) as cm:
|
||||
call_command('audit_exercise_data', stdout=out)
|
||||
self.assertEqual(cm.exception.code, 1)
|
||||
|
||||
def test_audit_passes_when_clean(self):
|
||||
"""Audit should pass (no SystemExit) when no critical issues exist."""
|
||||
# Create a clean exercise with muscle assignment
|
||||
muscle = Muscle.objects.create(name='chest')
|
||||
ex = Exercise.objects.create(
|
||||
name='Clean Bench Press',
|
||||
estimated_rep_duration=3.0,
|
||||
is_reps=True,
|
||||
is_duration=False,
|
||||
is_weight=True,
|
||||
movement_patterns='upper push - horizontal',
|
||||
)
|
||||
ExerciseMuscle.objects.create(exercise=ex, muscle=muscle)
|
||||
|
||||
out = StringIO()
|
||||
# Should not raise SystemExit (no critical issues)
|
||||
call_command('audit_exercise_data', stdout=out)
|
||||
output = out.getvalue()
|
||||
self.assertNotIn('CRITICAL', output)
|
||||
|
||||
def test_audit_warns_on_typo(self):
|
||||
"""Audit should warn (not critical) about horizonal typo."""
|
||||
muscle = Muscle.objects.create(name='back')
|
||||
ex = Exercise.objects.create(
|
||||
name='Test Row',
|
||||
estimated_rep_duration=3.0,
|
||||
is_reps=True,
|
||||
is_duration=False,
|
||||
movement_patterns='upper pull - horizonal',
|
||||
)
|
||||
ExerciseMuscle.objects.create(exercise=ex, muscle=muscle)
|
||||
|
||||
out = StringIO()
|
||||
# Typo is only a WARNING, not CRITICAL -- should not exit 1
|
||||
call_command('audit_exercise_data', stdout=out)
|
||||
self.assertIn('horizonal', out.getvalue())
|
||||
|
||||
def test_audit_after_all_fixes(self):
|
||||
"""Audit should have no critical issues after running all fix commands."""
|
||||
# Create exercises with all known issues
|
||||
muscle = Muscle.objects.create(name='chest')
|
||||
ex1 = Exercise.objects.create(
|
||||
name='Bench Press',
|
||||
estimated_rep_duration=None,
|
||||
is_reps=True,
|
||||
is_duration=False,
|
||||
movement_patterns='upper push - horizonal',
|
||||
)
|
||||
# This exercise has a muscle, so no orphan issue after we assign to ex1
|
||||
ExerciseMuscle.objects.create(exercise=ex1, muscle=muscle)
|
||||
|
||||
# Run all fix commands
|
||||
call_command('fix_rep_durations')
|
||||
call_command('fix_exercise_flags')
|
||||
call_command('fix_movement_pattern_typo')
|
||||
|
||||
out = StringIO()
|
||||
call_command('audit_exercise_data', stdout=out)
|
||||
output = out.getvalue()
|
||||
self.assertNotIn('CRITICAL', output)
|
||||
142
generator/tests/test_exercise_similarity_dedup.py
Normal file
142
generator/tests/test_exercise_similarity_dedup.py
Normal file
@@ -0,0 +1,142 @@
|
||||
from django.contrib.auth.models import User
|
||||
from django.test import TestCase
|
||||
|
||||
from exercise.models import Exercise
|
||||
from generator.models import UserPreference
|
||||
from generator.services.exercise_selector import ExerciseSelector
|
||||
from registered_user.models import RegisteredUser
|
||||
|
||||
|
||||
class TestExerciseSimilarityDedup(TestCase):
|
||||
def setUp(self):
|
||||
django_user = User.objects.create_user(
|
||||
username='similarity_dedup_user',
|
||||
password='testpass123',
|
||||
)
|
||||
registered_user = RegisteredUser.objects.create(
|
||||
user=django_user,
|
||||
first_name='Similarity',
|
||||
last_name='Dedup',
|
||||
)
|
||||
self.preference = UserPreference.objects.create(
|
||||
registered_user=registered_user,
|
||||
days_per_week=4,
|
||||
fitness_level=2,
|
||||
)
|
||||
|
||||
def test_hard_similarity_blocks_near_identical_working_exercise(self):
|
||||
selector = ExerciseSelector(self.preference)
|
||||
prior = Exercise.objects.create(
|
||||
name='Posterior Chain Pull Alpha',
|
||||
movement_patterns='lower pull, lower pull - hip hinge',
|
||||
muscle_groups='glutes,hamstrings,traps',
|
||||
equipment_required='barbell',
|
||||
is_reps=True,
|
||||
is_duration=False,
|
||||
is_weight=True,
|
||||
is_compound=True,
|
||||
difficulty_level='intermediate',
|
||||
)
|
||||
candidate = Exercise.objects.create(
|
||||
name='Posterior Chain Pull Beta',
|
||||
movement_patterns='lower pull, lower pull - hip hinge',
|
||||
muscle_groups='glutes,hamstrings,traps',
|
||||
equipment_required='barbell',
|
||||
is_reps=True,
|
||||
is_duration=False,
|
||||
is_weight=True,
|
||||
is_compound=True,
|
||||
difficulty_level='intermediate',
|
||||
)
|
||||
|
||||
selector.used_working_similarity_profiles.append(
|
||||
selector._build_similarity_profile(prior)
|
||||
)
|
||||
selected = selector._weighted_pick(
|
||||
Exercise.objects.filter(pk=candidate.pk),
|
||||
Exercise.objects.none(),
|
||||
count=1,
|
||||
similarity_scope='working',
|
||||
)
|
||||
self.assertEqual(
|
||||
selected,
|
||||
[],
|
||||
'Near-identical exercise should be hard-blocked in same workout.',
|
||||
)
|
||||
|
||||
def test_soft_similarity_blocks_adjacent_superset_repetition(self):
|
||||
selector = ExerciseSelector(self.preference)
|
||||
previous_set_ex = Exercise.objects.create(
|
||||
name='Hip Hinge Pattern Alpha',
|
||||
movement_patterns='lower pull, lower pull - hip hinge, core',
|
||||
muscle_groups='glutes,hamstrings,core',
|
||||
equipment_required='barbell',
|
||||
is_reps=True,
|
||||
is_duration=False,
|
||||
is_weight=True,
|
||||
is_compound=True,
|
||||
difficulty_level='intermediate',
|
||||
)
|
||||
adjacent_candidate = Exercise.objects.create(
|
||||
name='Hip Hinge Pattern Beta',
|
||||
movement_patterns='lower pull - hip hinge, core',
|
||||
muscle_groups='glutes,hamstrings,core',
|
||||
equipment_required='barbell',
|
||||
is_reps=True,
|
||||
is_duration=False,
|
||||
is_weight=True,
|
||||
is_compound=True,
|
||||
difficulty_level='intermediate',
|
||||
)
|
||||
|
||||
selector.last_working_similarity_profiles = [
|
||||
selector._build_similarity_profile(previous_set_ex)
|
||||
]
|
||||
selected = selector._weighted_pick(
|
||||
Exercise.objects.filter(pk=adjacent_candidate.pk),
|
||||
Exercise.objects.none(),
|
||||
count=1,
|
||||
similarity_scope='working',
|
||||
)
|
||||
self.assertEqual(
|
||||
selected,
|
||||
[],
|
||||
'Very similar adjacent-set exercise should be soft-blocked.',
|
||||
)
|
||||
|
||||
def test_dissimilar_exercise_is_allowed(self):
|
||||
selector = ExerciseSelector(self.preference)
|
||||
previous_set_ex = Exercise.objects.create(
|
||||
name='Posterior Chain Pull Alpha',
|
||||
movement_patterns='lower pull, lower pull - hip hinge, core',
|
||||
muscle_groups='glutes,hamstrings,core',
|
||||
equipment_required='barbell',
|
||||
is_reps=True,
|
||||
is_duration=False,
|
||||
is_weight=True,
|
||||
is_compound=True,
|
||||
difficulty_level='intermediate',
|
||||
)
|
||||
different_candidate = Exercise.objects.create(
|
||||
name='Horizontal Push Builder',
|
||||
movement_patterns='upper push - horizontal, upper push',
|
||||
muscle_groups='chest,triceps,deltoids',
|
||||
equipment_required='dumbbell',
|
||||
is_reps=True,
|
||||
is_duration=False,
|
||||
is_weight=True,
|
||||
is_compound=True,
|
||||
difficulty_level='intermediate',
|
||||
)
|
||||
|
||||
selector.last_working_similarity_profiles = [
|
||||
selector._build_similarity_profile(previous_set_ex)
|
||||
]
|
||||
selected = selector._weighted_pick(
|
||||
Exercise.objects.filter(pk=different_candidate.pk),
|
||||
Exercise.objects.none(),
|
||||
count=1,
|
||||
similarity_scope='working',
|
||||
)
|
||||
self.assertEqual(len(selected), 1)
|
||||
self.assertEqual(selected[0].pk, different_candidate.pk)
|
||||
164
generator/tests/test_injury_safety.py
Normal file
164
generator/tests/test_injury_safety.py
Normal file
@@ -0,0 +1,164 @@
|
||||
from datetime import date
|
||||
|
||||
from django.test import TestCase
|
||||
from django.contrib.auth.models import User
|
||||
from rest_framework.test import APIClient
|
||||
from rest_framework.authtoken.models import Token
|
||||
|
||||
from registered_user.models import RegisteredUser
|
||||
from generator.models import UserPreference, WorkoutType
|
||||
|
||||
|
||||
class TestInjurySafety(TestCase):
|
||||
"""Tests for injury-related preference round-trip and warning generation."""
|
||||
|
||||
def setUp(self):
|
||||
self.django_user = User.objects.create_user(
|
||||
username='testuser',
|
||||
password='testpass123',
|
||||
email='test@example.com',
|
||||
)
|
||||
self.registered_user = RegisteredUser.objects.create(
|
||||
user=self.django_user,
|
||||
first_name='Test',
|
||||
last_name='User',
|
||||
)
|
||||
self.token = Token.objects.create(user=self.django_user)
|
||||
self.client = APIClient()
|
||||
self.client.credentials(HTTP_AUTHORIZATION=f'Token {self.token.key}')
|
||||
self.preference = UserPreference.objects.create(
|
||||
registered_user=self.registered_user,
|
||||
days_per_week=3,
|
||||
)
|
||||
# Create a basic workout type for generation
|
||||
self.workout_type = WorkoutType.objects.create(
|
||||
name='functional_strength_training',
|
||||
display_name='Functional Strength',
|
||||
typical_rest_between_sets=60,
|
||||
typical_intensity='medium',
|
||||
rep_range_min=8,
|
||||
rep_range_max=12,
|
||||
round_range_min=3,
|
||||
round_range_max=4,
|
||||
duration_bias=0.3,
|
||||
superset_size_min=2,
|
||||
superset_size_max=4,
|
||||
)
|
||||
|
||||
def test_injury_types_roundtrip(self):
|
||||
"""PUT injury_types, GET back, verify data persists."""
|
||||
injuries = [
|
||||
{'type': 'knee', 'severity': 'moderate'},
|
||||
{'type': 'shoulder', 'severity': 'mild'},
|
||||
]
|
||||
response = self.client.put(
|
||||
'/generator/preferences/update/',
|
||||
{'injury_types': injuries},
|
||||
format='json',
|
||||
)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
|
||||
# GET back
|
||||
response = self.client.get('/generator/preferences/')
|
||||
self.assertEqual(response.status_code, 200)
|
||||
data = response.json()
|
||||
self.assertEqual(len(data['injury_types']), 2)
|
||||
types_set = {i['type'] for i in data['injury_types']}
|
||||
self.assertIn('knee', types_set)
|
||||
self.assertIn('shoulder', types_set)
|
||||
|
||||
def test_injury_types_validation_rejects_invalid_type(self):
|
||||
"""Invalid injury type should be rejected."""
|
||||
response = self.client.put(
|
||||
'/generator/preferences/update/',
|
||||
{'injury_types': [{'type': 'elbow', 'severity': 'mild'}]},
|
||||
format='json',
|
||||
)
|
||||
self.assertEqual(response.status_code, 400)
|
||||
|
||||
def test_injury_types_validation_rejects_invalid_severity(self):
|
||||
"""Invalid severity should be rejected."""
|
||||
response = self.client.put(
|
||||
'/generator/preferences/update/',
|
||||
{'injury_types': [{'type': 'knee', 'severity': 'extreme'}]},
|
||||
format='json',
|
||||
)
|
||||
self.assertEqual(response.status_code, 400)
|
||||
|
||||
def test_severe_knee_excludes_high_impact(self):
|
||||
"""Set knee:severe, verify the exercise selector filters correctly."""
|
||||
from generator.services.exercise_selector import ExerciseSelector
|
||||
|
||||
self.preference.injury_types = [
|
||||
{'type': 'knee', 'severity': 'severe'},
|
||||
]
|
||||
self.preference.save()
|
||||
|
||||
selector = ExerciseSelector(self.preference)
|
||||
qs = selector._get_filtered_queryset()
|
||||
|
||||
# No high-impact exercises should remain
|
||||
high_impact = qs.filter(impact_level='high')
|
||||
self.assertEqual(high_impact.count(), 0)
|
||||
|
||||
# No medium-impact exercises either (severe lower body)
|
||||
medium_impact = qs.filter(impact_level='medium')
|
||||
self.assertEqual(medium_impact.count(), 0)
|
||||
|
||||
# Warnings should mention the injury
|
||||
self.assertTrue(
|
||||
any('knee' in w.lower() for w in selector.warnings),
|
||||
f'Expected knee-related warning, got: {selector.warnings}'
|
||||
)
|
||||
|
||||
def test_no_injuries_full_pool(self):
|
||||
"""Empty injury_types should not exclude any exercises."""
|
||||
from generator.services.exercise_selector import ExerciseSelector
|
||||
|
||||
self.preference.injury_types = []
|
||||
self.preference.save()
|
||||
|
||||
selector = ExerciseSelector(self.preference)
|
||||
qs = selector._get_filtered_queryset()
|
||||
|
||||
# With no injuries, there should be no injury-based warnings
|
||||
injury_warnings = [w for w in selector.warnings if 'injury' in w.lower()]
|
||||
self.assertEqual(len(injury_warnings), 0)
|
||||
|
||||
def test_warnings_in_preview_response(self):
|
||||
"""With injuries set, verify warnings key appears in preview response."""
|
||||
self.preference.injury_types = [
|
||||
{'type': 'knee', 'severity': 'moderate'},
|
||||
]
|
||||
self.preference.save()
|
||||
self.preference.preferred_workout_types.add(self.workout_type)
|
||||
|
||||
response = self.client.post(
|
||||
'/generator/preview/',
|
||||
{'week_start_date': '2026-03-02'},
|
||||
format='json',
|
||||
)
|
||||
# Should succeed (200) even if exercise pool is limited
|
||||
self.assertIn(response.status_code, [200, 500])
|
||||
if response.status_code == 200:
|
||||
data = response.json()
|
||||
# The warnings key should exist if injuries triggered any warnings
|
||||
if 'warnings' in data:
|
||||
self.assertIsInstance(data['warnings'], list)
|
||||
|
||||
def test_backward_compat_string_injuries(self):
|
||||
"""Legacy string format should be accepted and normalized."""
|
||||
response = self.client.put(
|
||||
'/generator/preferences/update/',
|
||||
{'injury_types': ['knee', 'shoulder']},
|
||||
format='json',
|
||||
)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
|
||||
# Verify normalized to dict format
|
||||
response = self.client.get('/generator/preferences/')
|
||||
data = response.json()
|
||||
for injury in data['injury_types']:
|
||||
self.assertIn('type', injury)
|
||||
self.assertIn('severity', injury)
|
||||
self.assertEqual(injury['severity'], 'moderate')
|
||||
103
generator/tests/test_modality_guardrails.py
Normal file
103
generator/tests/test_modality_guardrails.py
Normal file
@@ -0,0 +1,103 @@
|
||||
from django.contrib.auth.models import User
|
||||
from django.test import TestCase
|
||||
|
||||
from exercise.models import Exercise
|
||||
from generator.models import UserPreference
|
||||
from generator.services.exercise_selector import ExerciseSelector
|
||||
from registered_user.models import RegisteredUser
|
||||
|
||||
|
||||
class TestModalityGuardrails(TestCase):
|
||||
def setUp(self):
|
||||
django_user = User.objects.create_user(
|
||||
username='modality_guardrails_user',
|
||||
password='testpass123',
|
||||
)
|
||||
registered_user = RegisteredUser.objects.create(
|
||||
user=django_user,
|
||||
first_name='Modality',
|
||||
last_name='Guardrails',
|
||||
)
|
||||
self.preference = UserPreference.objects.create(
|
||||
registered_user=registered_user,
|
||||
days_per_week=4,
|
||||
fitness_level=2,
|
||||
)
|
||||
|
||||
def test_rep_mode_excludes_duration_only_exercises(self):
|
||||
duration_only = Exercise.objects.create(
|
||||
name="Dumbbell Waiter's Carry",
|
||||
movement_patterns='core,core - carry',
|
||||
muscle_groups='core,deltoids,upper back',
|
||||
equipment_required='Dumbbell',
|
||||
is_weight=True,
|
||||
is_duration=True,
|
||||
is_reps=False,
|
||||
is_compound=True,
|
||||
exercise_tier='secondary',
|
||||
difficulty_level='intermediate',
|
||||
complexity_rating=3,
|
||||
)
|
||||
reps_ex = Exercise.objects.create(
|
||||
name='2 Kettlebell Clean and Press',
|
||||
movement_patterns='upper push - vertical, upper push, lower pull',
|
||||
muscle_groups='deltoids,triceps,glutes',
|
||||
equipment_required='Kettlebell',
|
||||
is_weight=True,
|
||||
is_duration=False,
|
||||
is_reps=True,
|
||||
is_compound=True,
|
||||
exercise_tier='secondary',
|
||||
difficulty_level='intermediate',
|
||||
complexity_rating=3,
|
||||
)
|
||||
|
||||
selector = ExerciseSelector(self.preference)
|
||||
picked = selector.select_exercises(
|
||||
muscle_groups=[],
|
||||
count=2,
|
||||
is_duration_based=False,
|
||||
)
|
||||
picked_ids = {e.pk for e in picked}
|
||||
|
||||
self.assertIn(reps_ex.pk, picked_ids)
|
||||
self.assertNotIn(duration_only.pk, picked_ids)
|
||||
|
||||
def test_working_selection_excludes_static_stretch_patterns(self):
|
||||
static_stretch = Exercise.objects.create(
|
||||
name='Supine Pec Stretch - T',
|
||||
movement_patterns='mobility - static, static stretch, cool down',
|
||||
muscle_groups='chest,shoulders',
|
||||
equipment_required='None',
|
||||
is_weight=False,
|
||||
is_duration=True,
|
||||
is_reps=False,
|
||||
is_compound=False,
|
||||
exercise_tier='accessory',
|
||||
difficulty_level='beginner',
|
||||
complexity_rating=1,
|
||||
)
|
||||
valid_reps = Exercise.objects.create(
|
||||
name='Barbell Clean Pull',
|
||||
movement_patterns='upper pull,hip hinge',
|
||||
muscle_groups='upper back,hamstrings,glutes',
|
||||
equipment_required='Barbell',
|
||||
is_weight=True,
|
||||
is_duration=False,
|
||||
is_reps=True,
|
||||
is_compound=True,
|
||||
exercise_tier='primary',
|
||||
difficulty_level='intermediate',
|
||||
complexity_rating=3,
|
||||
)
|
||||
|
||||
selector = ExerciseSelector(self.preference)
|
||||
picked = selector.select_exercises(
|
||||
muscle_groups=[],
|
||||
count=2,
|
||||
is_duration_based=False,
|
||||
)
|
||||
picked_ids = {e.pk for e in picked}
|
||||
|
||||
self.assertIn(valid_reps.pk, picked_ids)
|
||||
self.assertNotIn(static_stretch.pk, picked_ids)
|
||||
1100
generator/tests/test_movement_enforcement.py
Normal file
1100
generator/tests/test_movement_enforcement.py
Normal file
File diff suppressed because it is too large
Load Diff
60
generator/tests/test_rebalance_replacement_guard.py
Normal file
60
generator/tests/test_rebalance_replacement_guard.py
Normal file
@@ -0,0 +1,60 @@
|
||||
from django.contrib.auth.models import User
|
||||
from django.test import TestCase
|
||||
|
||||
from exercise.models import Exercise
|
||||
from generator.models import UserPreference
|
||||
from generator.services.workout_generator import WorkoutGenerator
|
||||
from registered_user.models import RegisteredUser
|
||||
|
||||
|
||||
class TestRebalanceReplacementGuard(TestCase):
|
||||
def setUp(self):
|
||||
django_user = User.objects.create_user(
|
||||
username='rebalance_guard_user',
|
||||
password='testpass123',
|
||||
)
|
||||
registered_user = RegisteredUser.objects.create(
|
||||
user=django_user,
|
||||
first_name='Rebalance',
|
||||
last_name='Guard',
|
||||
)
|
||||
self.preference = UserPreference.objects.create(
|
||||
registered_user=registered_user,
|
||||
days_per_week=4,
|
||||
fitness_level=2,
|
||||
)
|
||||
|
||||
def test_pull_replacement_prefers_non_sided_candidates(self):
|
||||
sided_pull = Exercise.objects.create(
|
||||
name='Single Arm Cable Row',
|
||||
side='left_arm',
|
||||
movement_patterns='upper pull - horizontal, upper pull',
|
||||
muscle_groups='lats,upper back,biceps',
|
||||
is_reps=True,
|
||||
is_duration=False,
|
||||
is_weight=True,
|
||||
is_compound=False,
|
||||
difficulty_level='intermediate',
|
||||
)
|
||||
unsided_pull = Exercise.objects.create(
|
||||
name='Chest Supported Row',
|
||||
side='',
|
||||
movement_patterns='upper pull - horizontal, upper pull',
|
||||
muscle_groups='lats,upper back,biceps',
|
||||
is_reps=True,
|
||||
is_duration=False,
|
||||
is_weight=True,
|
||||
is_compound=False,
|
||||
difficulty_level='intermediate',
|
||||
)
|
||||
|
||||
generator = WorkoutGenerator(self.preference)
|
||||
replacement = generator._select_pull_replacement(
|
||||
target_muscles=[],
|
||||
is_duration_based=False,
|
||||
prefer_weighted=False,
|
||||
)
|
||||
|
||||
self.assertIsNotNone(replacement)
|
||||
self.assertEqual(replacement.pk, unsided_pull.pk)
|
||||
self.assertNotEqual(replacement.pk, sided_pull.pk)
|
||||
232
generator/tests/test_regeneration_context.py
Normal file
232
generator/tests/test_regeneration_context.py
Normal file
@@ -0,0 +1,232 @@
|
||||
from datetime import date, timedelta
|
||||
|
||||
from django.test import TestCase
|
||||
from django.contrib.auth.models import User
|
||||
from rest_framework.test import APIClient
|
||||
from rest_framework.authtoken.models import Token
|
||||
|
||||
from registered_user.models import RegisteredUser
|
||||
from generator.models import (
|
||||
UserPreference,
|
||||
WorkoutType,
|
||||
GeneratedWeeklyPlan,
|
||||
GeneratedWorkout,
|
||||
)
|
||||
from workout.models import Workout
|
||||
from superset.models import Superset, SupersetExercise
|
||||
from exercise.models import Exercise
|
||||
|
||||
|
||||
class TestRegenerationContext(TestCase):
|
||||
"""Tests for regeneration context (sibling exercise exclusion)."""
|
||||
|
||||
def setUp(self):
|
||||
self.django_user = User.objects.create_user(
|
||||
username='regenuser',
|
||||
password='testpass123',
|
||||
email='regen@example.com',
|
||||
)
|
||||
self.registered_user = RegisteredUser.objects.create(
|
||||
user=self.django_user,
|
||||
first_name='Regen',
|
||||
last_name='User',
|
||||
)
|
||||
self.token = Token.objects.create(user=self.django_user)
|
||||
self.client = APIClient()
|
||||
self.client.credentials(HTTP_AUTHORIZATION=f'Token {self.token.key}')
|
||||
|
||||
self.workout_type = WorkoutType.objects.create(
|
||||
name='functional_strength_training',
|
||||
display_name='Functional Strength',
|
||||
typical_rest_between_sets=60,
|
||||
typical_intensity='medium',
|
||||
rep_range_min=8,
|
||||
rep_range_max=12,
|
||||
round_range_min=3,
|
||||
round_range_max=4,
|
||||
duration_bias=0.3,
|
||||
superset_size_min=2,
|
||||
superset_size_max=4,
|
||||
)
|
||||
|
||||
self.preference = UserPreference.objects.create(
|
||||
registered_user=self.registered_user,
|
||||
days_per_week=3,
|
||||
)
|
||||
self.preference.preferred_workout_types.add(self.workout_type)
|
||||
|
||||
# Create the "First Up" exercise required by superset serializer helper
|
||||
Exercise.objects.get_or_create(
|
||||
name='First Up',
|
||||
defaults={
|
||||
'is_reps': False,
|
||||
'is_duration': True,
|
||||
},
|
||||
)
|
||||
|
||||
# Create enough exercises for testing (needs large pool so hard exclusion isn't relaxed)
|
||||
self.exercises = []
|
||||
for i in range(60):
|
||||
ex = Exercise.objects.create(
|
||||
name=f'Test Exercise {i}',
|
||||
is_reps=True,
|
||||
is_weight=(i % 2 == 0),
|
||||
)
|
||||
self.exercises.append(ex)
|
||||
|
||||
# Create a plan with 2 workouts
|
||||
week_start = date(2026, 3, 2)
|
||||
self.plan = GeneratedWeeklyPlan.objects.create(
|
||||
registered_user=self.registered_user,
|
||||
week_start_date=week_start,
|
||||
week_end_date=week_start + timedelta(days=6),
|
||||
status='completed',
|
||||
)
|
||||
|
||||
# Workout 1 (Monday): uses exercises 0-4
|
||||
self.workout1 = Workout.objects.create(
|
||||
name='Monday Workout',
|
||||
registered_user=self.registered_user,
|
||||
)
|
||||
ss1 = Superset.objects.create(
|
||||
workout=self.workout1,
|
||||
name='Set 1',
|
||||
rounds=3,
|
||||
order=1,
|
||||
)
|
||||
for i in range(5):
|
||||
SupersetExercise.objects.create(
|
||||
superset=ss1,
|
||||
exercise=self.exercises[i],
|
||||
reps=10,
|
||||
order=i + 1,
|
||||
)
|
||||
self.gen_workout1 = GeneratedWorkout.objects.create(
|
||||
plan=self.plan,
|
||||
workout=self.workout1,
|
||||
workout_type=self.workout_type,
|
||||
scheduled_date=week_start,
|
||||
day_of_week=0,
|
||||
is_rest_day=False,
|
||||
status='accepted',
|
||||
focus_area='Full Body',
|
||||
target_muscles=['chest', 'back'],
|
||||
)
|
||||
|
||||
# Workout 2 (Wednesday): uses exercises 5-9
|
||||
self.workout2 = Workout.objects.create(
|
||||
name='Wednesday Workout',
|
||||
registered_user=self.registered_user,
|
||||
)
|
||||
ss2 = Superset.objects.create(
|
||||
workout=self.workout2,
|
||||
name='Set 1',
|
||||
rounds=3,
|
||||
order=1,
|
||||
)
|
||||
for i in range(5, 10):
|
||||
SupersetExercise.objects.create(
|
||||
superset=ss2,
|
||||
exercise=self.exercises[i],
|
||||
reps=10,
|
||||
order=i - 4,
|
||||
)
|
||||
self.gen_workout2 = GeneratedWorkout.objects.create(
|
||||
plan=self.plan,
|
||||
workout=self.workout2,
|
||||
workout_type=self.workout_type,
|
||||
scheduled_date=week_start + timedelta(days=2),
|
||||
day_of_week=2,
|
||||
is_rest_day=False,
|
||||
status='pending',
|
||||
focus_area='Full Body',
|
||||
target_muscles=['legs', 'shoulders'],
|
||||
)
|
||||
|
||||
def test_regenerate_excludes_sibling_exercises(self):
|
||||
"""
|
||||
Regenerating workout 2 should exclude exercises 0-4 (used by workout 1).
|
||||
"""
|
||||
# Get the exercise IDs from workout 1
|
||||
sibling_exercise_ids = set(
|
||||
SupersetExercise.objects.filter(
|
||||
superset__workout=self.workout1
|
||||
).values_list('exercise_id', flat=True)
|
||||
)
|
||||
self.assertEqual(len(sibling_exercise_ids), 5)
|
||||
|
||||
# Regenerate workout 2
|
||||
response = self.client.post(
|
||||
f'/generator/workout/{self.gen_workout2.pk}/regenerate/',
|
||||
)
|
||||
# May fail if not enough exercises in DB for the generator,
|
||||
# but the logic should at least attempt correctly
|
||||
if response.status_code == 200:
|
||||
# Check that the regenerated workout doesn't use sibling exercises
|
||||
self.gen_workout2.refresh_from_db()
|
||||
if self.gen_workout2.workout:
|
||||
new_exercise_ids = set(
|
||||
SupersetExercise.objects.filter(
|
||||
superset__workout=self.gen_workout2.workout
|
||||
).values_list('exercise_id', flat=True)
|
||||
)
|
||||
overlap = new_exercise_ids & sibling_exercise_ids
|
||||
self.assertEqual(
|
||||
len(overlap), 0,
|
||||
f'Regenerated workout should not share exercises with siblings. '
|
||||
f'Overlap: {overlap}'
|
||||
)
|
||||
|
||||
def test_preview_day_with_plan_context(self):
|
||||
"""Pass plan_id to preview_day, verify it is accepted."""
|
||||
response = self.client.post(
|
||||
'/generator/preview-day/',
|
||||
{
|
||||
'target_muscles': ['chest', 'back'],
|
||||
'focus_area': 'Upper Body',
|
||||
'workout_type_id': self.workout_type.pk,
|
||||
'date': '2026-03-04',
|
||||
'plan_id': self.plan.pk,
|
||||
},
|
||||
format='json',
|
||||
)
|
||||
# Should succeed or fail gracefully, not crash
|
||||
self.assertIn(response.status_code, [200, 500])
|
||||
if response.status_code == 200:
|
||||
data = response.json()
|
||||
self.assertFalse(data.get('is_rest_day', True))
|
||||
|
||||
def test_preview_day_without_plan_id(self):
|
||||
"""No plan_id, backward compat - should work as before."""
|
||||
response = self.client.post(
|
||||
'/generator/preview-day/',
|
||||
{
|
||||
'target_muscles': ['chest'],
|
||||
'focus_area': 'Chest',
|
||||
'date': '2026-03-04',
|
||||
},
|
||||
format='json',
|
||||
)
|
||||
# Should succeed or fail gracefully (no crash from missing plan_id)
|
||||
self.assertIn(response.status_code, [200, 500])
|
||||
if response.status_code == 200:
|
||||
data = response.json()
|
||||
self.assertIn('focus_area', data)
|
||||
|
||||
def test_regenerate_rest_day_fails(self):
|
||||
"""Regenerating a rest day should return 400."""
|
||||
rest_day = GeneratedWorkout.objects.create(
|
||||
plan=self.plan,
|
||||
workout=None,
|
||||
workout_type=None,
|
||||
scheduled_date=date(2026, 3, 7),
|
||||
day_of_week=5,
|
||||
is_rest_day=True,
|
||||
status='accepted',
|
||||
focus_area='Rest Day',
|
||||
target_muscles=[],
|
||||
)
|
||||
response = self.client.post(
|
||||
f'/generator/workout/{rest_day.pk}/regenerate/',
|
||||
)
|
||||
self.assertEqual(response.status_code, 400)
|
||||
783
generator/tests/test_rules_engine.py
Normal file
783
generator/tests/test_rules_engine.py
Normal file
@@ -0,0 +1,783 @@
|
||||
"""
|
||||
Tests for the rules engine: WORKOUT_TYPE_RULES coverage,
|
||||
validate_workout() error/warning detection, and quality gate retry logic.
|
||||
"""
|
||||
|
||||
from unittest.mock import MagicMock, patch, PropertyMock
|
||||
|
||||
from django.test import TestCase
|
||||
|
||||
from generator.rules_engine import (
|
||||
validate_workout,
|
||||
RuleViolation,
|
||||
WORKOUT_TYPE_RULES,
|
||||
UNIVERSAL_RULES,
|
||||
DB_CALIBRATION,
|
||||
_normalize_type_key,
|
||||
_classify_rep_weight,
|
||||
_has_warmup,
|
||||
_has_cooldown,
|
||||
_get_working_supersets,
|
||||
_count_push_pull,
|
||||
_check_compound_before_isolation,
|
||||
)
|
||||
|
||||
|
||||
def _make_exercise(**kwargs):
|
||||
"""Create a mock exercise object with the given attributes."""
|
||||
defaults = {
|
||||
'exercise_tier': 'accessory',
|
||||
'is_reps': True,
|
||||
'is_compound': False,
|
||||
'is_weight': False,
|
||||
'is_duration': False,
|
||||
'movement_patterns': '',
|
||||
'name': 'Test Exercise',
|
||||
'stretch_position': None,
|
||||
'difficulty_level': 'intermediate',
|
||||
'complexity_rating': 3,
|
||||
'hr_elevation_rating': 5,
|
||||
'estimated_rep_duration': 3.0,
|
||||
}
|
||||
defaults.update(kwargs)
|
||||
ex = MagicMock()
|
||||
for k, v in defaults.items():
|
||||
setattr(ex, k, v)
|
||||
return ex
|
||||
|
||||
|
||||
def _make_entry(exercise=None, reps=None, duration=None, order=1):
|
||||
"""Create an exercise entry dict for a superset."""
|
||||
entry = {'order': order}
|
||||
entry['exercise'] = exercise or _make_exercise()
|
||||
if reps is not None:
|
||||
entry['reps'] = reps
|
||||
if duration is not None:
|
||||
entry['duration'] = duration
|
||||
return entry
|
||||
|
||||
|
||||
def _make_superset(name='Working Set 1', exercises=None, rounds=3):
|
||||
"""Create a superset dict."""
|
||||
return {
|
||||
'name': name,
|
||||
'exercises': exercises or [],
|
||||
'rounds': rounds,
|
||||
}
|
||||
|
||||
|
||||
class TestWorkoutTypeRulesCoverage(TestCase):
|
||||
"""Verify that WORKOUT_TYPE_RULES covers all 8 workout types."""
|
||||
|
||||
def test_all_8_workout_types_have_rules(self):
|
||||
expected_types = [
|
||||
'traditional_strength_training',
|
||||
'hypertrophy',
|
||||
'high_intensity_interval_training',
|
||||
'functional_strength_training',
|
||||
'cross_training',
|
||||
'core_training',
|
||||
'flexibility',
|
||||
'cardio',
|
||||
]
|
||||
for wt in expected_types:
|
||||
self.assertIn(wt, WORKOUT_TYPE_RULES, f"Missing rules for {wt}")
|
||||
|
||||
def test_each_type_has_required_keys(self):
|
||||
required_keys = [
|
||||
'rep_ranges', 'rest_periods', 'duration_bias_range',
|
||||
'superset_size_range', 'round_range', 'typical_rest',
|
||||
'typical_intensity',
|
||||
]
|
||||
for wt_name, rules in WORKOUT_TYPE_RULES.items():
|
||||
for key in required_keys:
|
||||
self.assertIn(
|
||||
key, rules,
|
||||
f"Missing key '{key}' in rules for {wt_name}",
|
||||
)
|
||||
|
||||
def test_rep_ranges_have_all_tiers(self):
|
||||
for wt_name, rules in WORKOUT_TYPE_RULES.items():
|
||||
rep_ranges = rules['rep_ranges']
|
||||
for tier in ('primary', 'secondary', 'accessory'):
|
||||
self.assertIn(
|
||||
tier, rep_ranges,
|
||||
f"Missing rep range tier '{tier}' in {wt_name}",
|
||||
)
|
||||
low, high = rep_ranges[tier]
|
||||
self.assertLessEqual(
|
||||
low, high,
|
||||
f"Invalid rep range ({low}, {high}) for {tier} in {wt_name}",
|
||||
)
|
||||
|
||||
|
||||
class TestDBCalibrationCoverage(TestCase):
|
||||
"""Verify DB_CALIBRATION has entries for all 8 types."""
|
||||
|
||||
def test_all_8_types_in_calibration(self):
|
||||
expected_names = [
|
||||
'functional_strength_training',
|
||||
'traditional_strength_training',
|
||||
'high_intensity_interval_training',
|
||||
'cross_training',
|
||||
'core_training',
|
||||
'flexibility',
|
||||
'cardio',
|
||||
'hypertrophy',
|
||||
]
|
||||
for name in expected_names:
|
||||
self.assertIn(name, DB_CALIBRATION, f"Missing {name} in DB_CALIBRATION")
|
||||
|
||||
|
||||
class TestHelperFunctions(TestCase):
|
||||
"""Test utility functions used by validate_workout."""
|
||||
|
||||
def test_normalize_type_key(self):
|
||||
self.assertEqual(
|
||||
_normalize_type_key('Traditional Strength Training'),
|
||||
'traditional_strength_training',
|
||||
)
|
||||
self.assertEqual(_normalize_type_key('HIIT'), 'high_intensity_interval_training')
|
||||
self.assertEqual(
|
||||
_normalize_type_key('high intensity interval training'),
|
||||
'high_intensity_interval_training',
|
||||
)
|
||||
self.assertEqual(_normalize_type_key('cardio'), 'cardio')
|
||||
|
||||
def test_classify_rep_weight(self):
|
||||
self.assertEqual(_classify_rep_weight(3), 'heavy')
|
||||
self.assertEqual(_classify_rep_weight(5), 'heavy')
|
||||
self.assertEqual(_classify_rep_weight(8), 'moderate')
|
||||
self.assertEqual(_classify_rep_weight(12), 'light')
|
||||
|
||||
def test_has_warmup(self):
|
||||
supersets = [
|
||||
_make_superset(name='Warm Up'),
|
||||
_make_superset(name='Working Set 1'),
|
||||
]
|
||||
self.assertTrue(_has_warmup(supersets))
|
||||
self.assertFalse(_has_warmup([_make_superset(name='Working Set 1')]))
|
||||
|
||||
def test_has_cooldown(self):
|
||||
supersets = [
|
||||
_make_superset(name='Working Set 1'),
|
||||
_make_superset(name='Cool Down'),
|
||||
]
|
||||
self.assertTrue(_has_cooldown(supersets))
|
||||
self.assertFalse(_has_cooldown([_make_superset(name='Working Set 1')]))
|
||||
|
||||
def test_get_working_supersets(self):
|
||||
supersets = [
|
||||
_make_superset(name='Warm Up'),
|
||||
_make_superset(name='Working Set 1'),
|
||||
_make_superset(name='Working Set 2'),
|
||||
_make_superset(name='Cool Down'),
|
||||
]
|
||||
working = _get_working_supersets(supersets)
|
||||
self.assertEqual(len(working), 2)
|
||||
self.assertEqual(working[0]['name'], 'Working Set 1')
|
||||
|
||||
def test_count_push_pull(self):
|
||||
push_ex = _make_exercise(movement_patterns='upper push')
|
||||
pull_ex = _make_exercise(movement_patterns='upper pull')
|
||||
supersets = [
|
||||
_make_superset(
|
||||
name='Working Set 1',
|
||||
exercises=[
|
||||
_make_entry(exercise=push_ex, reps=8),
|
||||
_make_entry(exercise=pull_ex, reps=8),
|
||||
],
|
||||
),
|
||||
]
|
||||
push_count, pull_count = _count_push_pull(supersets)
|
||||
self.assertEqual(push_count, 1)
|
||||
self.assertEqual(pull_count, 1)
|
||||
|
||||
def test_compound_before_isolation_correct(self):
|
||||
compound = _make_exercise(is_compound=True, exercise_tier='primary')
|
||||
isolation = _make_exercise(is_compound=False, exercise_tier='accessory')
|
||||
supersets = [
|
||||
_make_superset(
|
||||
name='Working Set 1',
|
||||
exercises=[
|
||||
_make_entry(exercise=compound, reps=5, order=1),
|
||||
_make_entry(exercise=isolation, reps=12, order=2),
|
||||
],
|
||||
),
|
||||
]
|
||||
self.assertTrue(_check_compound_before_isolation(supersets))
|
||||
|
||||
def test_compound_before_isolation_violated(self):
|
||||
compound = _make_exercise(is_compound=True, exercise_tier='primary')
|
||||
isolation = _make_exercise(is_compound=False, exercise_tier='accessory')
|
||||
supersets = [
|
||||
_make_superset(
|
||||
name='Working Set 1',
|
||||
exercises=[
|
||||
_make_entry(exercise=isolation, reps=12, order=1),
|
||||
],
|
||||
),
|
||||
_make_superset(
|
||||
name='Working Set 2',
|
||||
exercises=[
|
||||
_make_entry(exercise=compound, reps=5, order=1),
|
||||
],
|
||||
),
|
||||
]
|
||||
self.assertFalse(_check_compound_before_isolation(supersets))
|
||||
|
||||
|
||||
class TestValidateWorkout(TestCase):
|
||||
"""Test the main validate_workout function."""
|
||||
|
||||
def test_empty_workout_produces_error(self):
|
||||
violations = validate_workout({'supersets': []}, 'hiit', 'general_fitness')
|
||||
errors = [v for v in violations if v.severity == 'error']
|
||||
self.assertTrue(len(errors) > 0)
|
||||
self.assertEqual(errors[0].rule_id, 'empty_workout')
|
||||
|
||||
def test_validate_catches_rep_range_violation(self):
|
||||
"""Strength workout with reps=20 on primary should produce error."""
|
||||
workout_spec = {
|
||||
'supersets': [
|
||||
_make_superset(
|
||||
name='Working Set 1',
|
||||
exercises=[
|
||||
_make_entry(
|
||||
exercise=_make_exercise(
|
||||
exercise_tier='primary',
|
||||
is_reps=True,
|
||||
),
|
||||
reps=20,
|
||||
),
|
||||
],
|
||||
rounds=3,
|
||||
),
|
||||
],
|
||||
}
|
||||
violations = validate_workout(
|
||||
workout_spec, 'traditional_strength_training', 'strength',
|
||||
)
|
||||
rep_errors = [
|
||||
v for v in violations
|
||||
if v.severity == 'error' and 'rep_range' in v.rule_id
|
||||
]
|
||||
self.assertTrue(
|
||||
len(rep_errors) > 0,
|
||||
f"Expected rep range error, got: {[v.rule_id for v in violations]}",
|
||||
)
|
||||
|
||||
def test_validate_passes_valid_strength_workout(self):
|
||||
"""A well-formed strength workout with warmup + working + cooldown."""
|
||||
workout_spec = {
|
||||
'supersets': [
|
||||
_make_superset(
|
||||
name='Warm Up',
|
||||
exercises=[
|
||||
_make_entry(
|
||||
exercise=_make_exercise(is_reps=False),
|
||||
duration=30,
|
||||
),
|
||||
],
|
||||
rounds=1,
|
||||
),
|
||||
_make_superset(
|
||||
name='Working Set 1',
|
||||
exercises=[
|
||||
_make_entry(
|
||||
exercise=_make_exercise(
|
||||
exercise_tier='primary',
|
||||
is_reps=True,
|
||||
is_compound=True,
|
||||
is_weight=True,
|
||||
movement_patterns='upper push',
|
||||
),
|
||||
reps=5,
|
||||
),
|
||||
],
|
||||
rounds=4,
|
||||
),
|
||||
_make_superset(
|
||||
name='Cool Down',
|
||||
exercises=[
|
||||
_make_entry(
|
||||
exercise=_make_exercise(is_reps=False),
|
||||
duration=30,
|
||||
),
|
||||
],
|
||||
rounds=1,
|
||||
),
|
||||
],
|
||||
}
|
||||
violations = validate_workout(
|
||||
workout_spec, 'traditional_strength_training', 'strength',
|
||||
)
|
||||
errors = [v for v in violations if v.severity == 'error']
|
||||
self.assertEqual(
|
||||
len(errors), 0,
|
||||
f"Unexpected errors: {[v.message for v in errors]}",
|
||||
)
|
||||
|
||||
def test_warmup_missing_produces_error(self):
|
||||
"""Workout without warmup should produce an error."""
|
||||
workout_spec = {
|
||||
'supersets': [
|
||||
_make_superset(
|
||||
name='Working Set 1',
|
||||
exercises=[
|
||||
_make_entry(
|
||||
exercise=_make_exercise(
|
||||
exercise_tier='primary',
|
||||
is_reps=True,
|
||||
is_compound=True,
|
||||
is_weight=True,
|
||||
),
|
||||
reps=5,
|
||||
),
|
||||
],
|
||||
rounds=4,
|
||||
),
|
||||
],
|
||||
}
|
||||
violations = validate_workout(
|
||||
workout_spec, 'traditional_strength_training', 'strength',
|
||||
)
|
||||
warmup_errors = [
|
||||
v for v in violations
|
||||
if v.rule_id == 'warmup_missing'
|
||||
]
|
||||
self.assertEqual(len(warmup_errors), 1)
|
||||
|
||||
def test_cooldown_missing_produces_warning(self):
|
||||
"""Workout without cooldown should produce a warning."""
|
||||
workout_spec = {
|
||||
'supersets': [
|
||||
_make_superset(name='Warm Up', exercises=[
|
||||
_make_entry(exercise=_make_exercise(is_reps=False), duration=30),
|
||||
], rounds=1),
|
||||
_make_superset(
|
||||
name='Working Set 1',
|
||||
exercises=[
|
||||
_make_entry(
|
||||
exercise=_make_exercise(
|
||||
exercise_tier='primary',
|
||||
is_reps=True,
|
||||
is_compound=True,
|
||||
is_weight=True,
|
||||
),
|
||||
reps=5,
|
||||
),
|
||||
],
|
||||
rounds=4,
|
||||
),
|
||||
],
|
||||
}
|
||||
violations = validate_workout(
|
||||
workout_spec, 'traditional_strength_training', 'strength',
|
||||
)
|
||||
cooldown_warnings = [
|
||||
v for v in violations
|
||||
if v.rule_id == 'cooldown_missing'
|
||||
]
|
||||
self.assertEqual(len(cooldown_warnings), 1)
|
||||
self.assertEqual(cooldown_warnings[0].severity, 'warning')
|
||||
|
||||
def test_push_pull_ratio_enforcement(self):
|
||||
"""All push, no pull -> warning."""
|
||||
push_exercises = [
|
||||
_make_entry(
|
||||
exercise=_make_exercise(
|
||||
movement_patterns='upper push',
|
||||
is_compound=True,
|
||||
is_weight=True,
|
||||
exercise_tier='primary',
|
||||
),
|
||||
reps=8,
|
||||
order=i + 1,
|
||||
)
|
||||
for i in range(4)
|
||||
]
|
||||
workout_spec = {
|
||||
'supersets': [
|
||||
_make_superset(name='Warm Up', exercises=[
|
||||
_make_entry(exercise=_make_exercise(is_reps=False), duration=30),
|
||||
], rounds=1),
|
||||
_make_superset(
|
||||
name='Working Set 1',
|
||||
exercises=push_exercises,
|
||||
rounds=3,
|
||||
),
|
||||
_make_superset(name='Cool Down', exercises=[
|
||||
_make_entry(exercise=_make_exercise(is_reps=False), duration=30),
|
||||
], rounds=1),
|
||||
],
|
||||
}
|
||||
violations = validate_workout(
|
||||
workout_spec, 'hypertrophy', 'hypertrophy',
|
||||
)
|
||||
ratio_violations = [v for v in violations if v.rule_id == 'push_pull_ratio']
|
||||
self.assertTrue(
|
||||
len(ratio_violations) > 0,
|
||||
"Expected push:pull ratio warning for all-push workout",
|
||||
)
|
||||
|
||||
def test_workout_type_match_violation(self):
|
||||
"""Non-strength exercises in a strength workout should trigger match violation."""
|
||||
# All duration-based, non-compound, non-weight exercises for strength
|
||||
workout_spec = {
|
||||
'supersets': [
|
||||
_make_superset(name='Warm Up', exercises=[
|
||||
_make_entry(exercise=_make_exercise(is_reps=False), duration=30),
|
||||
], rounds=1),
|
||||
_make_superset(
|
||||
name='Working Set 1',
|
||||
exercises=[
|
||||
_make_entry(
|
||||
exercise=_make_exercise(
|
||||
exercise_tier='accessory',
|
||||
is_reps=True,
|
||||
is_compound=False,
|
||||
is_weight=False,
|
||||
),
|
||||
reps=15,
|
||||
)
|
||||
for _ in range(5)
|
||||
],
|
||||
rounds=3,
|
||||
),
|
||||
_make_superset(name='Cool Down', exercises=[
|
||||
_make_entry(exercise=_make_exercise(is_reps=False), duration=30),
|
||||
], rounds=1),
|
||||
],
|
||||
}
|
||||
violations = validate_workout(
|
||||
workout_spec, 'traditional_strength_training', 'strength',
|
||||
)
|
||||
match_violations = [
|
||||
v for v in violations
|
||||
if v.rule_id == 'workout_type_match'
|
||||
]
|
||||
self.assertTrue(
|
||||
len(match_violations) > 0,
|
||||
"Expected workout type match violation for non-strength exercises",
|
||||
)
|
||||
|
||||
def test_superset_size_warning(self):
|
||||
"""Traditional strength with >5 exercises per superset should warn."""
|
||||
many_exercises = [
|
||||
_make_entry(
|
||||
exercise=_make_exercise(
|
||||
exercise_tier='accessory',
|
||||
is_reps=True,
|
||||
is_weight=True,
|
||||
is_compound=True,
|
||||
),
|
||||
reps=5,
|
||||
order=i + 1,
|
||||
)
|
||||
for i in range(8)
|
||||
]
|
||||
workout_spec = {
|
||||
'supersets': [
|
||||
_make_superset(name='Warm Up', exercises=[
|
||||
_make_entry(exercise=_make_exercise(is_reps=False), duration=30),
|
||||
], rounds=1),
|
||||
_make_superset(
|
||||
name='Working Set 1',
|
||||
exercises=many_exercises,
|
||||
rounds=3,
|
||||
),
|
||||
_make_superset(name='Cool Down', exercises=[
|
||||
_make_entry(exercise=_make_exercise(is_reps=False), duration=30),
|
||||
], rounds=1),
|
||||
],
|
||||
}
|
||||
violations = validate_workout(
|
||||
workout_spec, 'traditional_strength_training', 'strength',
|
||||
)
|
||||
size_violations = [
|
||||
v for v in violations
|
||||
if v.rule_id == 'superset_size'
|
||||
]
|
||||
self.assertTrue(
|
||||
len(size_violations) > 0,
|
||||
"Expected superset size warning for 8-exercise superset in strength",
|
||||
)
|
||||
|
||||
def test_superset_focus_repetition_error(self):
|
||||
"""Two curl-family exercises in one superset should produce an error."""
|
||||
curl_a = _make_exercise(
|
||||
name='Alternating Bicep Curls',
|
||||
movement_patterns='upper pull',
|
||||
is_compound=False,
|
||||
exercise_tier='accessory',
|
||||
)
|
||||
curl_b = _make_exercise(
|
||||
name='Bicep Curls',
|
||||
movement_patterns='upper pull',
|
||||
is_compound=False,
|
||||
exercise_tier='accessory',
|
||||
)
|
||||
workout_spec = {
|
||||
'supersets': [
|
||||
_make_superset(name='Warm Up', exercises=[
|
||||
_make_entry(exercise=_make_exercise(is_reps=False), duration=30),
|
||||
], rounds=1),
|
||||
_make_superset(
|
||||
name='Working Set 1',
|
||||
exercises=[
|
||||
_make_entry(exercise=curl_a, reps=10, order=1),
|
||||
_make_entry(exercise=curl_b, reps=10, order=2),
|
||||
],
|
||||
rounds=3,
|
||||
),
|
||||
_make_superset(name='Cool Down', exercises=[
|
||||
_make_entry(exercise=_make_exercise(is_reps=False), duration=30),
|
||||
], rounds=1),
|
||||
],
|
||||
}
|
||||
violations = validate_workout(
|
||||
workout_spec, 'functional_strength_training', 'general_fitness',
|
||||
)
|
||||
repetition_errors = [
|
||||
v for v in violations
|
||||
if v.rule_id == 'superset_focus_repetition' and v.severity == 'error'
|
||||
]
|
||||
self.assertTrue(
|
||||
repetition_errors,
|
||||
f"Expected superset focus repetition error, got {[v.rule_id for v in violations]}",
|
||||
)
|
||||
|
||||
def test_working_set_rejects_recovery_stretch_movements(self):
|
||||
stretch_ex = _make_exercise(
|
||||
name='Supine Pec Stretch - T',
|
||||
movement_patterns='mobility - static, mobility, cool down',
|
||||
is_reps=False,
|
||||
is_duration=True,
|
||||
)
|
||||
push_ex = _make_exercise(
|
||||
name='Single-Arm Dumbbell Push Press',
|
||||
movement_patterns='upper push - vertical, upper push',
|
||||
is_reps=True,
|
||||
is_duration=False,
|
||||
is_compound=True,
|
||||
is_weight=True,
|
||||
exercise_tier='secondary',
|
||||
)
|
||||
workout_spec = {
|
||||
'supersets': [
|
||||
_make_superset(name='Warm Up', exercises=[
|
||||
_make_entry(exercise=_make_exercise(is_reps=False), duration=30),
|
||||
], rounds=1),
|
||||
_make_superset(
|
||||
name='Working Set 1',
|
||||
exercises=[
|
||||
_make_entry(exercise=push_ex, reps=8, order=1),
|
||||
_make_entry(exercise=stretch_ex, duration=30, order=2),
|
||||
],
|
||||
rounds=4,
|
||||
),
|
||||
_make_superset(name='Cool Down', exercises=[
|
||||
_make_entry(exercise=_make_exercise(is_reps=False), duration=30),
|
||||
], rounds=1),
|
||||
],
|
||||
}
|
||||
|
||||
violations = validate_workout(
|
||||
workout_spec, 'functional_strength_training', 'general_fitness',
|
||||
)
|
||||
stretch_errors = [
|
||||
v for v in violations
|
||||
if v.rule_id == 'working_contains_recovery' and v.severity == 'error'
|
||||
]
|
||||
self.assertTrue(stretch_errors, 'Expected recovery/stretch error in working set.')
|
||||
|
||||
def test_working_set_requires_positive_rest_between_rounds(self):
|
||||
workout_spec = {
|
||||
'supersets': [
|
||||
_make_superset(name='Warm Up', exercises=[
|
||||
_make_entry(exercise=_make_exercise(is_reps=False), duration=30),
|
||||
], rounds=1),
|
||||
{
|
||||
'name': 'Working Set 1',
|
||||
'rounds': 4,
|
||||
'rest_between_rounds': 0,
|
||||
'exercises': [
|
||||
_make_entry(
|
||||
exercise=_make_exercise(
|
||||
name='Barbell Push Press',
|
||||
movement_patterns='upper push',
|
||||
is_compound=True,
|
||||
is_weight=True,
|
||||
exercise_tier='primary',
|
||||
),
|
||||
reps=5,
|
||||
order=1,
|
||||
),
|
||||
],
|
||||
},
|
||||
_make_superset(name='Cool Down', exercises=[
|
||||
_make_entry(exercise=_make_exercise(is_reps=False), duration=30),
|
||||
], rounds=1),
|
||||
],
|
||||
}
|
||||
|
||||
violations = validate_workout(
|
||||
workout_spec, 'functional_strength_training', 'general_fitness',
|
||||
)
|
||||
rest_warnings = [
|
||||
v for v in violations
|
||||
if v.rule_id == 'working_rest_missing' and v.severity == 'warning'
|
||||
]
|
||||
self.assertTrue(rest_warnings, 'Expected warning for missing/zero working rest.')
|
||||
|
||||
def test_adjacent_focus_repetition_info(self):
|
||||
"""Adjacent working supersets with same focus profile should be advisory."""
|
||||
pull_a = _make_exercise(name='Bicep Curl', movement_patterns='upper pull')
|
||||
pull_b = _make_exercise(name='Hammer Curl', movement_patterns='upper pull')
|
||||
workout_spec = {
|
||||
'supersets': [
|
||||
_make_superset(name='Warm Up', exercises=[
|
||||
_make_entry(exercise=_make_exercise(is_reps=False), duration=30),
|
||||
], rounds=1),
|
||||
_make_superset(
|
||||
name='Working Set 1',
|
||||
exercises=[_make_entry(exercise=pull_a, reps=10, order=1)],
|
||||
rounds=3,
|
||||
),
|
||||
_make_superset(
|
||||
name='Working Set 2',
|
||||
exercises=[_make_entry(exercise=pull_b, reps=10, order=1)],
|
||||
rounds=3,
|
||||
),
|
||||
_make_superset(name='Cool Down', exercises=[
|
||||
_make_entry(exercise=_make_exercise(is_reps=False), duration=30),
|
||||
], rounds=1),
|
||||
],
|
||||
}
|
||||
violations = validate_workout(
|
||||
workout_spec, 'functional_strength_training', 'general_fitness',
|
||||
)
|
||||
adjacent_infos = [
|
||||
v for v in violations
|
||||
if v.rule_id == 'adjacent_superset_focus_repetition' and v.severity == 'info'
|
||||
]
|
||||
self.assertTrue(
|
||||
adjacent_infos,
|
||||
"Expected adjacent superset focus repetition advisory info.",
|
||||
)
|
||||
|
||||
def test_compound_before_isolation_info(self):
|
||||
"""Isolation before compound should produce info violation."""
|
||||
isolation = _make_exercise(
|
||||
is_compound=False, exercise_tier='accessory',
|
||||
is_weight=True, is_reps=True,
|
||||
)
|
||||
compound = _make_exercise(
|
||||
is_compound=True, exercise_tier='primary',
|
||||
is_weight=True, is_reps=True,
|
||||
)
|
||||
workout_spec = {
|
||||
'supersets': [
|
||||
_make_superset(name='Warm Up', exercises=[
|
||||
_make_entry(exercise=_make_exercise(is_reps=False), duration=30),
|
||||
], rounds=1),
|
||||
_make_superset(
|
||||
name='Working Set 1',
|
||||
exercises=[
|
||||
_make_entry(exercise=isolation, reps=12, order=1),
|
||||
],
|
||||
rounds=3,
|
||||
),
|
||||
_make_superset(
|
||||
name='Working Set 2',
|
||||
exercises=[
|
||||
_make_entry(exercise=compound, reps=5, order=1),
|
||||
],
|
||||
rounds=4,
|
||||
),
|
||||
_make_superset(name='Cool Down', exercises=[
|
||||
_make_entry(exercise=_make_exercise(is_reps=False), duration=30),
|
||||
], rounds=1),
|
||||
],
|
||||
}
|
||||
violations = validate_workout(
|
||||
workout_spec, 'hypertrophy', 'hypertrophy',
|
||||
)
|
||||
order_violations = [
|
||||
v for v in violations
|
||||
if v.rule_id == 'compound_before_isolation'
|
||||
]
|
||||
self.assertTrue(
|
||||
len(order_violations) > 0,
|
||||
"Expected compound_before_isolation info for isolation-first order",
|
||||
)
|
||||
|
||||
def test_unknown_workout_type_does_not_crash(self):
|
||||
"""An unknown workout type should not crash validation."""
|
||||
workout_spec = {
|
||||
'supersets': [
|
||||
_make_superset(name='Warm Up', exercises=[
|
||||
_make_entry(exercise=_make_exercise(is_reps=False), duration=30),
|
||||
], rounds=1),
|
||||
_make_superset(
|
||||
name='Working Set 1',
|
||||
exercises=[_make_entry(reps=10)],
|
||||
rounds=3,
|
||||
),
|
||||
_make_superset(name='Cool Down', exercises=[
|
||||
_make_entry(exercise=_make_exercise(is_reps=False), duration=30),
|
||||
], rounds=1),
|
||||
],
|
||||
}
|
||||
violations = validate_workout(
|
||||
workout_spec, 'unknown_type', 'general_fitness',
|
||||
)
|
||||
# Should not raise; may produce some violations but no crash
|
||||
self.assertIsInstance(violations, list)
|
||||
|
||||
|
||||
class TestRuleViolationDataclass(TestCase):
|
||||
"""Test the RuleViolation dataclass."""
|
||||
|
||||
def test_basic_creation(self):
|
||||
v = RuleViolation(
|
||||
rule_id='test_rule',
|
||||
severity='error',
|
||||
message='Test message',
|
||||
)
|
||||
self.assertEqual(v.rule_id, 'test_rule')
|
||||
self.assertEqual(v.severity, 'error')
|
||||
self.assertEqual(v.message, 'Test message')
|
||||
self.assertIsNone(v.actual_value)
|
||||
self.assertIsNone(v.expected_range)
|
||||
|
||||
def test_with_values(self):
|
||||
v = RuleViolation(
|
||||
rule_id='rep_range_primary',
|
||||
severity='error',
|
||||
message='Reps out of range',
|
||||
actual_value=20,
|
||||
expected_range=(3, 6),
|
||||
)
|
||||
self.assertEqual(v.actual_value, 20)
|
||||
self.assertEqual(v.expected_range, (3, 6))
|
||||
|
||||
|
||||
class TestUniversalRules(TestCase):
|
||||
"""Verify universal rules have expected values."""
|
||||
|
||||
def test_push_pull_ratio_min(self):
|
||||
self.assertEqual(UNIVERSAL_RULES['push_pull_ratio_min'], 1.0)
|
||||
|
||||
def test_compound_before_isolation(self):
|
||||
self.assertTrue(UNIVERSAL_RULES['compound_before_isolation'])
|
||||
|
||||
def test_warmup_mandatory(self):
|
||||
self.assertTrue(UNIVERSAL_RULES['warmup_mandatory'])
|
||||
|
||||
def test_max_hiit_duration(self):
|
||||
self.assertEqual(UNIVERSAL_RULES['max_hiit_duration_min'], 30)
|
||||
|
||||
def test_cooldown_stretch_only(self):
|
||||
self.assertTrue(UNIVERSAL_RULES['cooldown_stretch_only'])
|
||||
203
generator/tests/test_side_pair_integrity.py
Normal file
203
generator/tests/test_side_pair_integrity.py
Normal file
@@ -0,0 +1,203 @@
|
||||
from django.contrib.auth.models import User
|
||||
from django.test import TestCase
|
||||
|
||||
from exercise.models import Exercise
|
||||
from generator.models import UserPreference
|
||||
from generator.services.exercise_selector import ExerciseSelector
|
||||
from registered_user.models import RegisteredUser
|
||||
|
||||
|
||||
class TestSidePairIntegrity(TestCase):
|
||||
def setUp(self):
|
||||
django_user = User.objects.create_user(
|
||||
username='side_pair_user',
|
||||
password='testpass123',
|
||||
)
|
||||
registered_user = RegisteredUser.objects.create(
|
||||
user=django_user,
|
||||
first_name='Side',
|
||||
last_name='Pair',
|
||||
)
|
||||
self.preference = UserPreference.objects.create(
|
||||
registered_user=registered_user,
|
||||
days_per_week=4,
|
||||
fitness_level=2,
|
||||
)
|
||||
self.selector = ExerciseSelector(self.preference)
|
||||
|
||||
def test_orphan_left_is_removed_and_replaced(self):
|
||||
left_only = Exercise.objects.create(
|
||||
name='Single Arm Row Left',
|
||||
side='Left',
|
||||
is_reps=True,
|
||||
is_duration=False,
|
||||
is_weight=True,
|
||||
movement_patterns='upper pull - horizontal, upper pull',
|
||||
muscle_groups='lats,upper back,biceps',
|
||||
difficulty_level='intermediate',
|
||||
)
|
||||
filler_a = Exercise.objects.create(
|
||||
name='Chest Supported Row',
|
||||
side='',
|
||||
is_reps=True,
|
||||
is_duration=False,
|
||||
is_weight=True,
|
||||
movement_patterns='upper pull - horizontal, upper pull',
|
||||
muscle_groups='lats,upper back,biceps',
|
||||
difficulty_level='intermediate',
|
||||
)
|
||||
filler_b = Exercise.objects.create(
|
||||
name='Face Pull',
|
||||
side='',
|
||||
is_reps=True,
|
||||
is_duration=False,
|
||||
is_weight=True,
|
||||
movement_patterns='upper pull, rear delt',
|
||||
muscle_groups='upper back,deltoids',
|
||||
difficulty_level='intermediate',
|
||||
)
|
||||
|
||||
selected = [left_only]
|
||||
base_qs = Exercise.objects.filter(pk__in=[left_only.pk, filler_a.pk, filler_b.pk])
|
||||
enforced = self.selector._ensure_side_pair_integrity(selected, base_qs, count=1)
|
||||
|
||||
self.assertEqual(len(enforced), 1)
|
||||
self.assertNotEqual(enforced[0].pk, left_only.pk)
|
||||
self.assertIn(
|
||||
enforced[0].pk,
|
||||
{filler_a.pk, filler_b.pk},
|
||||
'Orphan left-side movement should be replaced by a non-sided filler.',
|
||||
)
|
||||
|
||||
def test_left_right_pair_is_preserved(self):
|
||||
left_ex = Exercise.objects.create(
|
||||
name='Single Arm Press Left',
|
||||
side='Left',
|
||||
is_reps=True,
|
||||
is_duration=False,
|
||||
is_weight=True,
|
||||
movement_patterns='upper push - vertical, upper push',
|
||||
muscle_groups='deltoids,triceps',
|
||||
difficulty_level='intermediate',
|
||||
)
|
||||
right_ex = Exercise.objects.create(
|
||||
name='Single Arm Press Right',
|
||||
side='Right',
|
||||
is_reps=True,
|
||||
is_duration=False,
|
||||
is_weight=True,
|
||||
movement_patterns='upper push - vertical, upper push',
|
||||
muscle_groups='deltoids,triceps',
|
||||
difficulty_level='intermediate',
|
||||
)
|
||||
|
||||
enforced = self.selector._ensure_side_pair_integrity(
|
||||
[left_ex, right_ex],
|
||||
Exercise.objects.filter(pk__in=[left_ex.pk, right_ex.pk]),
|
||||
count=2,
|
||||
)
|
||||
enforced_ids = {ex.pk for ex in enforced}
|
||||
self.assertEqual(enforced_ids, {left_ex.pk, right_ex.pk})
|
||||
|
||||
def test_left_arm_right_arm_pair_is_preserved(self):
|
||||
left_ex = Exercise.objects.create(
|
||||
name='Single Arm Row',
|
||||
side='left_arm',
|
||||
is_reps=True,
|
||||
is_duration=False,
|
||||
is_weight=True,
|
||||
movement_patterns='upper pull - horizontal, upper pull',
|
||||
muscle_groups='lats,upper back,biceps',
|
||||
difficulty_level='intermediate',
|
||||
)
|
||||
right_ex = Exercise.objects.create(
|
||||
name='Single Arm Row',
|
||||
side='right_arm',
|
||||
is_reps=True,
|
||||
is_duration=False,
|
||||
is_weight=True,
|
||||
movement_patterns='upper pull - horizontal, upper pull',
|
||||
muscle_groups='lats,upper back,biceps',
|
||||
difficulty_level='intermediate',
|
||||
)
|
||||
|
||||
paired = self.selector._pair_sided_exercises(
|
||||
[left_ex],
|
||||
Exercise.objects.filter(pk__in=[left_ex.pk, right_ex.pk]),
|
||||
)
|
||||
paired_ids = {ex.pk for ex in paired}
|
||||
self.assertEqual(paired_ids, {left_ex.pk, right_ex.pk})
|
||||
|
||||
def test_orphan_left_arm_is_removed(self):
|
||||
left_ex = Exercise.objects.create(
|
||||
name='Single Arm Row',
|
||||
side='left_arm',
|
||||
is_reps=True,
|
||||
is_duration=False,
|
||||
is_weight=True,
|
||||
movement_patterns='upper pull - horizontal, upper pull',
|
||||
muscle_groups='lats,upper back,biceps',
|
||||
difficulty_level='intermediate',
|
||||
)
|
||||
filler = Exercise.objects.create(
|
||||
name='Inverted Row',
|
||||
side='',
|
||||
is_reps=True,
|
||||
is_duration=False,
|
||||
is_weight=False,
|
||||
movement_patterns='upper pull - horizontal, upper pull',
|
||||
muscle_groups='lats,upper back,biceps',
|
||||
difficulty_level='intermediate',
|
||||
)
|
||||
|
||||
enforced = self.selector._ensure_side_pair_integrity(
|
||||
[left_ex],
|
||||
Exercise.objects.filter(pk__in=[left_ex.pk, filler.pk]),
|
||||
count=1,
|
||||
)
|
||||
self.assertEqual(len(enforced), 1)
|
||||
self.assertEqual(enforced[0].pk, filler.pk)
|
||||
|
||||
def test_try_hard_fetch_adds_opposite_side_partner_from_global_db(self):
|
||||
left_ex = Exercise.objects.create(
|
||||
name='Single Arm Lateral Raise Left',
|
||||
side='Left',
|
||||
is_reps=True,
|
||||
is_duration=False,
|
||||
is_weight=True,
|
||||
movement_patterns='upper push',
|
||||
muscle_groups='deltoids',
|
||||
difficulty_level='intermediate',
|
||||
)
|
||||
right_ex = Exercise.objects.create(
|
||||
name='Single Arm Lateral Raise Right',
|
||||
side='Right',
|
||||
is_reps=True,
|
||||
is_duration=False,
|
||||
is_weight=True,
|
||||
movement_patterns='upper push',
|
||||
muscle_groups='deltoids',
|
||||
difficulty_level='intermediate',
|
||||
)
|
||||
filler = Exercise.objects.create(
|
||||
name='Shoulder Tap',
|
||||
side='',
|
||||
is_reps=True,
|
||||
is_duration=False,
|
||||
is_weight=False,
|
||||
movement_patterns='upper push',
|
||||
muscle_groups='deltoids,core',
|
||||
difficulty_level='intermediate',
|
||||
)
|
||||
|
||||
# base_qs intentionally does not include right_ex to validate global fallback.
|
||||
base_qs = Exercise.objects.filter(pk__in=[left_ex.pk, filler.pk])
|
||||
enforced = self.selector._ensure_side_pair_integrity(
|
||||
[left_ex, filler],
|
||||
base_qs,
|
||||
count=2,
|
||||
)
|
||||
enforced_ids = {ex.pk for ex in enforced}
|
||||
self.assertIn(left_ex.pk, enforced_ids)
|
||||
self.assertIn(right_ex.pk, enforced_ids)
|
||||
self.assertNotIn(filler.pk, enforced_ids)
|
||||
250
generator/tests/test_structure_rules.py
Normal file
250
generator/tests/test_structure_rules.py
Normal file
@@ -0,0 +1,250 @@
|
||||
"""
|
||||
Tests for the calibrate_structure_rules management command.
|
||||
|
||||
Verifies the full 120-rule matrix (8 types x 5 goals x 3 sections)
|
||||
is correctly populated, all values are sane, and the command is
|
||||
idempotent (running it twice doesn't create duplicates).
|
||||
"""
|
||||
from django.test import TestCase
|
||||
from django.core.management import call_command
|
||||
|
||||
from generator.models import WorkoutStructureRule, WorkoutType
|
||||
|
||||
|
||||
WORKOUT_TYPE_NAMES = [
|
||||
'traditional_strength_training',
|
||||
'hypertrophy',
|
||||
'high_intensity_interval_training',
|
||||
'functional_strength_training',
|
||||
'cross_training',
|
||||
'core_training',
|
||||
'flexibility',
|
||||
'cardio',
|
||||
]
|
||||
|
||||
GOAL_TYPES = [
|
||||
'strength', 'hypertrophy', 'endurance', 'weight_loss', 'general_fitness',
|
||||
]
|
||||
|
||||
SECTION_TYPES = ['warm_up', 'working', 'cool_down']
|
||||
|
||||
|
||||
class TestStructureRules(TestCase):
|
||||
"""Verify calibrate_structure_rules produces the correct 120-rule matrix."""
|
||||
|
||||
@classmethod
|
||||
def setUpTestData(cls):
|
||||
# Create all 8 workout types so the command can find them.
|
||||
cls.workout_types = []
|
||||
for name in WORKOUT_TYPE_NAMES:
|
||||
wt, _ = WorkoutType.objects.get_or_create(name=name)
|
||||
cls.workout_types.append(wt)
|
||||
|
||||
# Run the calibration command.
|
||||
call_command('calibrate_structure_rules')
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Coverage tests
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def test_all_120_combinations_exist(self):
|
||||
"""8 types x 5 goals x 3 sections = 120 rules."""
|
||||
count = WorkoutStructureRule.objects.count()
|
||||
self.assertEqual(count, 120, f'Expected 120 rules, got {count}')
|
||||
|
||||
def test_each_type_has_15_rules(self):
|
||||
"""Each workout type should have 5 goals x 3 sections = 15 rules."""
|
||||
for wt in self.workout_types:
|
||||
count = WorkoutStructureRule.objects.filter(
|
||||
workout_type=wt,
|
||||
).count()
|
||||
self.assertEqual(
|
||||
count, 15,
|
||||
f'{wt.name} has {count} rules, expected 15',
|
||||
)
|
||||
|
||||
def test_each_type_has_all_sections(self):
|
||||
"""Every type must cover warm_up, working, and cool_down."""
|
||||
for wt in self.workout_types:
|
||||
sections = set(
|
||||
WorkoutStructureRule.objects.filter(
|
||||
workout_type=wt,
|
||||
).values_list('section_type', flat=True)
|
||||
)
|
||||
self.assertEqual(
|
||||
sections,
|
||||
{'warm_up', 'working', 'cool_down'},
|
||||
f'{wt.name} missing sections: '
|
||||
f'{{"warm_up", "working", "cool_down"}} - {sections}',
|
||||
)
|
||||
|
||||
def test_each_type_has_all_goals(self):
|
||||
"""Every type must have all 5 goal types."""
|
||||
for wt in self.workout_types:
|
||||
goals = set(
|
||||
WorkoutStructureRule.objects.filter(
|
||||
workout_type=wt,
|
||||
).values_list('goal_type', flat=True)
|
||||
)
|
||||
expected = set(GOAL_TYPES)
|
||||
self.assertEqual(
|
||||
goals, expected,
|
||||
f'{wt.name} goals mismatch: expected {expected}, got {goals}',
|
||||
)
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Value sanity tests
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def test_working_rules_have_movement_patterns(self):
|
||||
"""All working-section rules must have at least one pattern."""
|
||||
working_rules = WorkoutStructureRule.objects.filter(
|
||||
section_type='working',
|
||||
)
|
||||
for rule in working_rules:
|
||||
self.assertTrue(
|
||||
len(rule.movement_patterns) > 0,
|
||||
f'Working rule {rule} has empty movement_patterns',
|
||||
)
|
||||
|
||||
def test_warmup_and_cooldown_have_patterns(self):
|
||||
"""Warm-up and cool-down rules should also have patterns."""
|
||||
for section in ('warm_up', 'cool_down'):
|
||||
rules = WorkoutStructureRule.objects.filter(section_type=section)
|
||||
for rule in rules:
|
||||
self.assertTrue(
|
||||
len(rule.movement_patterns) > 0,
|
||||
f'{section} rule {rule} has empty movement_patterns',
|
||||
)
|
||||
|
||||
def test_rep_ranges_valid(self):
|
||||
"""rep_min <= rep_max, and working rep_min >= 1."""
|
||||
for rule in WorkoutStructureRule.objects.all():
|
||||
self.assertLessEqual(
|
||||
rule.typical_rep_range_min,
|
||||
rule.typical_rep_range_max,
|
||||
f'Rule {rule}: rep_min ({rule.typical_rep_range_min}) '
|
||||
f'> rep_max ({rule.typical_rep_range_max})',
|
||||
)
|
||||
if rule.section_type == 'working':
|
||||
self.assertGreaterEqual(
|
||||
rule.typical_rep_range_min, 1,
|
||||
f'Rule {rule}: working rep_min below floor',
|
||||
)
|
||||
|
||||
def test_duration_ranges_valid(self):
|
||||
"""dur_min <= dur_max for every rule."""
|
||||
for rule in WorkoutStructureRule.objects.all():
|
||||
self.assertLessEqual(
|
||||
rule.typical_duration_range_min,
|
||||
rule.typical_duration_range_max,
|
||||
f'Rule {rule}: dur_min ({rule.typical_duration_range_min}) '
|
||||
f'> dur_max ({rule.typical_duration_range_max})',
|
||||
)
|
||||
|
||||
def test_warm_up_rounds_are_one(self):
|
||||
"""All warm_up sections must have exactly 1 round."""
|
||||
warmup_rules = WorkoutStructureRule.objects.filter(
|
||||
section_type='warm_up',
|
||||
)
|
||||
for rule in warmup_rules:
|
||||
self.assertEqual(
|
||||
rule.typical_rounds, 1,
|
||||
f'Warm-up rule {rule} has rounds={rule.typical_rounds}, '
|
||||
f'expected 1',
|
||||
)
|
||||
|
||||
def test_cool_down_rounds_are_one(self):
|
||||
"""All cool_down sections must have exactly 1 round."""
|
||||
cooldown_rules = WorkoutStructureRule.objects.filter(
|
||||
section_type='cool_down',
|
||||
)
|
||||
for rule in cooldown_rules:
|
||||
self.assertEqual(
|
||||
rule.typical_rounds, 1,
|
||||
f'Cool-down rule {rule} has rounds={rule.typical_rounds}, '
|
||||
f'expected 1',
|
||||
)
|
||||
|
||||
def test_cardio_rounds_not_absurd(self):
|
||||
"""Cardio working rounds should be 2-3, not 23-25 (ML artifact)."""
|
||||
cardio_wt = WorkoutType.objects.get(name='cardio')
|
||||
cardio_working = WorkoutStructureRule.objects.filter(
|
||||
workout_type=cardio_wt,
|
||||
section_type='working',
|
||||
)
|
||||
for rule in cardio_working:
|
||||
self.assertLessEqual(
|
||||
rule.typical_rounds, 5,
|
||||
f'Cardio working {rule.goal_type} has '
|
||||
f'rounds={rule.typical_rounds}, expected <= 5',
|
||||
)
|
||||
self.assertGreaterEqual(
|
||||
rule.typical_rounds, 2,
|
||||
f'Cardio working {rule.goal_type} has '
|
||||
f'rounds={rule.typical_rounds}, expected >= 2',
|
||||
)
|
||||
|
||||
def test_cool_down_has_stretch_or_mobility(self):
|
||||
"""Cool-down patterns should focus on stretch/mobility."""
|
||||
cooldown_rules = WorkoutStructureRule.objects.filter(
|
||||
section_type='cool_down',
|
||||
)
|
||||
stretch_mobility_patterns = {
|
||||
'mobility', 'mobility - static', 'yoga',
|
||||
'lower pull - hip hinge', 'cardio/locomotion',
|
||||
}
|
||||
for rule in cooldown_rules:
|
||||
patterns = set(rule.movement_patterns)
|
||||
overlap = patterns & stretch_mobility_patterns
|
||||
self.assertTrue(
|
||||
len(overlap) > 0,
|
||||
f'Cool-down rule {rule} has no stretch/mobility patterns: '
|
||||
f'{rule.movement_patterns}',
|
||||
)
|
||||
|
||||
def test_no_rep_min_below_global_floor(self):
|
||||
"""After calibration, no rule should have rep_min < 6 (the floor)."""
|
||||
below_floor = WorkoutStructureRule.objects.filter(
|
||||
typical_rep_range_min__lt=6,
|
||||
typical_rep_range_min__gt=0,
|
||||
)
|
||||
self.assertEqual(
|
||||
below_floor.count(), 0,
|
||||
f'{below_floor.count()} rules have rep_min below 6',
|
||||
)
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Idempotency test
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def test_calibrate_is_idempotent(self):
|
||||
"""Running the command again must not create duplicates."""
|
||||
# Run calibration a second time.
|
||||
call_command('calibrate_structure_rules')
|
||||
count = WorkoutStructureRule.objects.count()
|
||||
self.assertEqual(
|
||||
count, 120,
|
||||
f'After re-run, expected 120 rules, got {count}',
|
||||
)
|
||||
|
||||
def test_calibrate_updates_existing_values(self):
|
||||
"""If a rule value is changed in DB, re-running restores it."""
|
||||
# Pick a rule and mutate it.
|
||||
rule = WorkoutStructureRule.objects.filter(
|
||||
section_type='working',
|
||||
goal_type='strength',
|
||||
).first()
|
||||
original_rounds = rule.typical_rounds
|
||||
rule.typical_rounds = 99
|
||||
rule.save()
|
||||
|
||||
# Re-run calibration.
|
||||
call_command('calibrate_structure_rules')
|
||||
|
||||
rule.refresh_from_db()
|
||||
self.assertEqual(
|
||||
rule.typical_rounds, original_rounds,
|
||||
f'Expected rounds to be restored to {original_rounds}, '
|
||||
f'got {rule.typical_rounds}',
|
||||
)
|
||||
244
generator/tests/test_warmup_selector.py
Normal file
244
generator/tests/test_warmup_selector.py
Normal file
@@ -0,0 +1,244 @@
|
||||
from django.contrib.auth.models import User
|
||||
from django.test import TestCase
|
||||
|
||||
from exercise.models import Exercise
|
||||
from generator.models import UserPreference
|
||||
from generator.services.exercise_selector import ExerciseSelector
|
||||
from registered_user.models import RegisteredUser
|
||||
|
||||
|
||||
class TestWarmupSelector(TestCase):
|
||||
def setUp(self):
|
||||
django_user = User.objects.create_user(
|
||||
username='warmup_selector_user',
|
||||
password='testpass123',
|
||||
)
|
||||
registered_user = RegisteredUser.objects.create(
|
||||
user=django_user,
|
||||
first_name='Warmup',
|
||||
last_name='Tester',
|
||||
)
|
||||
self.preference = UserPreference.objects.create(
|
||||
registered_user=registered_user,
|
||||
days_per_week=4,
|
||||
fitness_level=2,
|
||||
)
|
||||
|
||||
def test_warmup_excludes_working_set_movements(self):
|
||||
dynamic_1 = Exercise.objects.create(
|
||||
name='Dynamic Warmup A',
|
||||
movement_patterns='dynamic stretch, mobility - dynamic, activation, warm up',
|
||||
is_duration=True,
|
||||
is_reps=False,
|
||||
is_weight=False,
|
||||
is_compound=False,
|
||||
exercise_tier='accessory',
|
||||
hr_elevation_rating=2,
|
||||
complexity_rating=2,
|
||||
difficulty_level='beginner',
|
||||
)
|
||||
dynamic_2 = Exercise.objects.create(
|
||||
name='Dynamic Warmup B',
|
||||
movement_patterns='mobility - dynamic, cardio/locomotion, balance',
|
||||
is_duration=True,
|
||||
is_reps=False,
|
||||
is_weight=False,
|
||||
is_compound=False,
|
||||
exercise_tier='accessory',
|
||||
hr_elevation_rating=3,
|
||||
complexity_rating=2,
|
||||
difficulty_level='beginner',
|
||||
)
|
||||
|
||||
weighted_press = Exercise.objects.create(
|
||||
name='Lying Dumbbell Tricep Extension',
|
||||
movement_patterns='upper push - horizontal, upper push, arms',
|
||||
is_duration=True,
|
||||
is_reps=False,
|
||||
is_weight=True,
|
||||
is_compound=False,
|
||||
exercise_tier='secondary',
|
||||
hr_elevation_rating=2,
|
||||
complexity_rating=2,
|
||||
difficulty_level='intermediate',
|
||||
)
|
||||
duration_push = Exercise.objects.create(
|
||||
name='Floor Press Hold',
|
||||
movement_patterns='upper push - horizontal, upper push',
|
||||
is_duration=True,
|
||||
is_reps=False,
|
||||
is_weight=False,
|
||||
is_compound=False,
|
||||
exercise_tier='secondary',
|
||||
hr_elevation_rating=2,
|
||||
complexity_rating=2,
|
||||
difficulty_level='intermediate',
|
||||
)
|
||||
|
||||
selector = ExerciseSelector(self.preference)
|
||||
selected = selector.select_warmup_exercises(target_muscles=[], count=4)
|
||||
|
||||
selected_ids = {ex.pk for ex in selected}
|
||||
|
||||
self.assertIn(dynamic_1.pk, selected_ids)
|
||||
self.assertIn(dynamic_2.pk, selected_ids)
|
||||
self.assertNotIn(weighted_press.pk, selected_ids)
|
||||
self.assertNotIn(duration_push.pk, selected_ids)
|
||||
|
||||
def test_warmup_keeps_side_specific_variants_adjacent(self):
|
||||
left_variant_a = Exercise.objects.create(
|
||||
name='Side Lying T Stretch',
|
||||
side='left_arm',
|
||||
movement_patterns='dynamic stretch, mobility - dynamic, warm up',
|
||||
is_duration=True,
|
||||
is_reps=False,
|
||||
is_weight=False,
|
||||
is_compound=False,
|
||||
exercise_tier='accessory',
|
||||
hr_elevation_rating=2,
|
||||
complexity_rating=1,
|
||||
difficulty_level='beginner',
|
||||
)
|
||||
right_variant_a = Exercise.objects.create(
|
||||
name='Side Lying T Stretch',
|
||||
side='right_arm',
|
||||
movement_patterns='dynamic stretch, mobility - dynamic, warm up',
|
||||
is_duration=True,
|
||||
is_reps=False,
|
||||
is_weight=False,
|
||||
is_compound=False,
|
||||
exercise_tier='accessory',
|
||||
hr_elevation_rating=2,
|
||||
complexity_rating=1,
|
||||
difficulty_level='beginner',
|
||||
)
|
||||
left_variant_b = Exercise.objects.create(
|
||||
name='Quadruped Adductor Stretch with Thoracic Rotation',
|
||||
side='left_side',
|
||||
movement_patterns='dynamic stretch, mobility - dynamic, warm up',
|
||||
is_duration=True,
|
||||
is_reps=False,
|
||||
is_weight=False,
|
||||
is_compound=False,
|
||||
exercise_tier='accessory',
|
||||
hr_elevation_rating=2,
|
||||
complexity_rating=1,
|
||||
difficulty_level='beginner',
|
||||
)
|
||||
right_variant_b = Exercise.objects.create(
|
||||
name='Quadruped Adductor Stretch with Thoracic Rotation',
|
||||
side='right_side',
|
||||
movement_patterns='dynamic stretch, mobility - dynamic, warm up',
|
||||
is_duration=True,
|
||||
is_reps=False,
|
||||
is_weight=False,
|
||||
is_compound=False,
|
||||
exercise_tier='accessory',
|
||||
hr_elevation_rating=2,
|
||||
complexity_rating=1,
|
||||
difficulty_level='beginner',
|
||||
)
|
||||
|
||||
selector = ExerciseSelector(self.preference)
|
||||
selected = selector.select_warmup_exercises(target_muscles=[], count=4)
|
||||
|
||||
selected_ids = [ex.pk for ex in selected]
|
||||
self.assertEqual(
|
||||
set(selected_ids),
|
||||
{left_variant_a.pk, right_variant_a.pk, left_variant_b.pk, right_variant_b.pk},
|
||||
)
|
||||
|
||||
side_pairs = {}
|
||||
for idx, ex in enumerate(selected):
|
||||
key = selector._strip_side_tokens(ex.name)
|
||||
side_pairs.setdefault(key, []).append(idx)
|
||||
|
||||
self.assertEqual(len(side_pairs['side lying t stretch']), 2)
|
||||
self.assertEqual(len(side_pairs['quadruped adductor stretch with thoracic rotation']), 2)
|
||||
self.assertEqual(
|
||||
side_pairs['side lying t stretch'][1],
|
||||
side_pairs['side lying t stretch'][0] + 1,
|
||||
)
|
||||
self.assertEqual(
|
||||
side_pairs['quadruped adductor stretch with thoracic rotation'][1],
|
||||
side_pairs['quadruped adductor stretch with thoracic rotation'][0] + 1,
|
||||
)
|
||||
|
||||
def test_cooldown_keeps_side_specific_variants_adjacent(self):
|
||||
left_variant_a = Exercise.objects.create(
|
||||
name="Matsyendra's Pose",
|
||||
side='left_side',
|
||||
movement_patterns='static stretch, cool down',
|
||||
is_duration=True,
|
||||
is_reps=False,
|
||||
is_weight=False,
|
||||
is_compound=False,
|
||||
exercise_tier='accessory',
|
||||
hr_elevation_rating=1,
|
||||
complexity_rating=1,
|
||||
difficulty_level='beginner',
|
||||
)
|
||||
right_variant_a = Exercise.objects.create(
|
||||
name="Matsyendra's Pose",
|
||||
side='right_side',
|
||||
movement_patterns='static stretch, cool down',
|
||||
is_duration=True,
|
||||
is_reps=False,
|
||||
is_weight=False,
|
||||
is_compound=False,
|
||||
exercise_tier='accessory',
|
||||
hr_elevation_rating=1,
|
||||
complexity_rating=1,
|
||||
difficulty_level='beginner',
|
||||
)
|
||||
left_variant_b = Exercise.objects.create(
|
||||
name='Miniband Reverse Clamshell',
|
||||
side='left_leg',
|
||||
movement_patterns='mobility - static, cooldown',
|
||||
is_duration=True,
|
||||
is_reps=False,
|
||||
is_weight=False,
|
||||
is_compound=False,
|
||||
exercise_tier='accessory',
|
||||
hr_elevation_rating=1,
|
||||
complexity_rating=1,
|
||||
difficulty_level='beginner',
|
||||
)
|
||||
right_variant_b = Exercise.objects.create(
|
||||
name='Miniband Reverse Clamshell',
|
||||
side='right_leg',
|
||||
movement_patterns='mobility - static, cooldown',
|
||||
is_duration=True,
|
||||
is_reps=False,
|
||||
is_weight=False,
|
||||
is_compound=False,
|
||||
exercise_tier='accessory',
|
||||
hr_elevation_rating=1,
|
||||
complexity_rating=1,
|
||||
difficulty_level='beginner',
|
||||
)
|
||||
|
||||
selector = ExerciseSelector(self.preference)
|
||||
selected = selector.select_cooldown_exercises(target_muscles=[], count=4)
|
||||
|
||||
selected_ids = [ex.pk for ex in selected]
|
||||
self.assertEqual(
|
||||
set(selected_ids),
|
||||
{left_variant_a.pk, right_variant_a.pk, left_variant_b.pk, right_variant_b.pk},
|
||||
)
|
||||
|
||||
side_pairs = {}
|
||||
for idx, ex in enumerate(selected):
|
||||
key = selector._strip_side_tokens(ex.name)
|
||||
side_pairs.setdefault(key, []).append(idx)
|
||||
|
||||
self.assertEqual(len(side_pairs["matsyendra's pose"]), 2)
|
||||
self.assertEqual(len(side_pairs['miniband reverse clamshell']), 2)
|
||||
self.assertEqual(
|
||||
side_pairs["matsyendra's pose"][1],
|
||||
side_pairs["matsyendra's pose"][0] + 1,
|
||||
)
|
||||
self.assertEqual(
|
||||
side_pairs['miniband reverse clamshell'][1],
|
||||
side_pairs['miniband reverse clamshell'][0] + 1,
|
||||
)
|
||||
251
generator/tests/test_weekly_split.py
Normal file
251
generator/tests/test_weekly_split.py
Normal file
@@ -0,0 +1,251 @@
|
||||
"""
|
||||
Tests for _pick_weekly_split() — Item #3: DB-backed WeeklySplitPattern selection.
|
||||
"""
|
||||
from collections import Counter
|
||||
|
||||
from django.contrib.auth import get_user_model
|
||||
from django.test import TestCase
|
||||
from unittest.mock import patch, MagicMock, PropertyMock
|
||||
|
||||
from generator.models import (
|
||||
MuscleGroupSplit,
|
||||
UserPreference,
|
||||
WeeklySplitPattern,
|
||||
WorkoutType,
|
||||
)
|
||||
from generator.services.workout_generator import WorkoutGenerator, DEFAULT_SPLITS
|
||||
from registered_user.models import RegisteredUser
|
||||
|
||||
User = get_user_model()
|
||||
|
||||
|
||||
class TestWeeklySplit(TestCase):
|
||||
"""Tests for _pick_weekly_split() using DB-backed WeeklySplitPattern records."""
|
||||
|
||||
@classmethod
|
||||
def setUpTestData(cls):
|
||||
# Create Django auth user
|
||||
cls.auth_user = User.objects.create_user(
|
||||
username='testsplit', password='testpass123',
|
||||
)
|
||||
cls.registered_user = RegisteredUser.objects.create(
|
||||
first_name='Test', last_name='Split', user=cls.auth_user,
|
||||
)
|
||||
|
||||
# Create MuscleGroupSplits
|
||||
cls.full_body = MuscleGroupSplit.objects.create(
|
||||
muscle_names=['chest', 'back', 'shoulders', 'quads', 'hamstrings'],
|
||||
label='Full Body',
|
||||
split_type='full_body',
|
||||
frequency=10,
|
||||
)
|
||||
cls.upper = MuscleGroupSplit.objects.create(
|
||||
muscle_names=['chest', 'back', 'shoulders', 'biceps', 'triceps'],
|
||||
label='Upper',
|
||||
split_type='upper',
|
||||
frequency=8,
|
||||
)
|
||||
cls.lower = MuscleGroupSplit.objects.create(
|
||||
muscle_names=['quads', 'hamstrings', 'glutes', 'calves'],
|
||||
label='Lower',
|
||||
split_type='lower',
|
||||
frequency=8,
|
||||
)
|
||||
|
||||
# Create patterns for 3 days/week
|
||||
cls.pattern_3day = WeeklySplitPattern.objects.create(
|
||||
days_per_week=3,
|
||||
pattern=[cls.full_body.pk, cls.upper.pk, cls.lower.pk],
|
||||
pattern_labels=['Full Body', 'Upper', 'Lower'],
|
||||
frequency=15,
|
||||
rest_day_positions=[3, 5, 6],
|
||||
)
|
||||
cls.pattern_3day_low = WeeklySplitPattern.objects.create(
|
||||
days_per_week=3,
|
||||
pattern=[cls.upper.pk, cls.lower.pk, cls.full_body.pk],
|
||||
pattern_labels=['Upper', 'Lower', 'Full Body'],
|
||||
frequency=2,
|
||||
)
|
||||
|
||||
def _make_preference(self, days_per_week=3):
|
||||
"""Create a UserPreference for testing."""
|
||||
pref = UserPreference.objects.create(
|
||||
registered_user=self.registered_user,
|
||||
days_per_week=days_per_week,
|
||||
fitness_level=2,
|
||||
primary_goal='general_fitness',
|
||||
)
|
||||
return pref
|
||||
|
||||
def _make_generator(self, pref):
|
||||
"""Create a WorkoutGenerator with mocked ExerciseSelector and PlanBuilder."""
|
||||
with patch('generator.services.workout_generator.ExerciseSelector'), \
|
||||
patch('generator.services.workout_generator.PlanBuilder'):
|
||||
gen = WorkoutGenerator(pref)
|
||||
return gen
|
||||
|
||||
def test_uses_db_patterns_when_available(self):
|
||||
"""When WeeklySplitPattern records exist for the days_per_week,
|
||||
_pick_weekly_split should return splits derived from them."""
|
||||
pref = self._make_preference(days_per_week=3)
|
||||
gen = self._make_generator(pref)
|
||||
|
||||
splits, rest_days = gen._pick_weekly_split()
|
||||
|
||||
# Should have 3 splits (from the 3-day patterns)
|
||||
self.assertEqual(len(splits), 3)
|
||||
|
||||
# Each split should have label, muscles, split_type
|
||||
for s in splits:
|
||||
self.assertIn('label', s)
|
||||
self.assertIn('muscles', s)
|
||||
self.assertIn('split_type', s)
|
||||
|
||||
# Split types should come from our MuscleGroupSplit records
|
||||
split_types = {s['split_type'] for s in splits}
|
||||
self.assertTrue(
|
||||
split_types.issubset({'full_body', 'upper', 'lower'}),
|
||||
f"Unexpected split types: {split_types}",
|
||||
)
|
||||
|
||||
# Clean up
|
||||
pref.delete()
|
||||
|
||||
def test_falls_back_to_defaults(self):
|
||||
"""When no WeeklySplitPattern exists for the requested days_per_week,
|
||||
DEFAULT_SPLITS should be used."""
|
||||
pref = self._make_preference(days_per_week=5)
|
||||
gen = self._make_generator(pref)
|
||||
|
||||
splits, rest_days = gen._pick_weekly_split()
|
||||
|
||||
# Should have 5 splits from DEFAULT_SPLITS[5]
|
||||
self.assertEqual(len(splits), len(DEFAULT_SPLITS[5]))
|
||||
|
||||
# rest_days should be empty for default fallback
|
||||
self.assertEqual(rest_days, [])
|
||||
|
||||
pref.delete()
|
||||
|
||||
def test_frequency_weighting(self):
|
||||
"""Higher-frequency patterns should be chosen more often."""
|
||||
pref = self._make_preference(days_per_week=3)
|
||||
gen = self._make_generator(pref)
|
||||
|
||||
first_pattern_count = 0
|
||||
runs = 200
|
||||
|
||||
for _ in range(runs):
|
||||
splits, _ = gen._pick_weekly_split()
|
||||
# The high-frequency pattern starts with Full Body
|
||||
if splits[0]['label'] == 'Full Body':
|
||||
first_pattern_count += 1
|
||||
|
||||
# pattern_3day has frequency=15, pattern_3day_low has frequency=2
|
||||
# Expected ratio: ~15/17 = ~88%
|
||||
# With 200 runs, high-freq pattern should be chosen at least 60% of the time
|
||||
ratio = first_pattern_count / runs
|
||||
self.assertGreater(
|
||||
ratio, 0.6,
|
||||
f"High-frequency pattern chosen only {ratio:.0%} of the time "
|
||||
f"(expected > 60%)",
|
||||
)
|
||||
|
||||
pref.delete()
|
||||
|
||||
def test_rest_day_positions_propagated(self):
|
||||
"""rest_day_positions from the chosen pattern should be returned."""
|
||||
pref = self._make_preference(days_per_week=3)
|
||||
gen = self._make_generator(pref)
|
||||
|
||||
# Run multiple times to ensure we eventually get the high-freq pattern
|
||||
found_rest_days = False
|
||||
for _ in range(50):
|
||||
splits, rest_days = gen._pick_weekly_split()
|
||||
if rest_days:
|
||||
found_rest_days = True
|
||||
# The high-freq pattern has rest_day_positions=[3, 5, 6]
|
||||
self.assertEqual(rest_days, [3, 5, 6])
|
||||
break
|
||||
|
||||
self.assertTrue(
|
||||
found_rest_days,
|
||||
"Expected rest_day_positions to be propagated from at least one run",
|
||||
)
|
||||
|
||||
pref.delete()
|
||||
|
||||
def test_clamps_days_per_week(self):
|
||||
"""days_per_week should be clamped to 1-7."""
|
||||
pref = self._make_preference(days_per_week=10)
|
||||
gen = self._make_generator(pref)
|
||||
|
||||
splits, _ = gen._pick_weekly_split()
|
||||
|
||||
# clamped to 7, which uses DEFAULT_SPLITS[7] (no DB patterns for 7)
|
||||
self.assertEqual(len(splits), len(DEFAULT_SPLITS[7]))
|
||||
|
||||
pref.delete()
|
||||
|
||||
def test_handles_missing_muscle_group_split(self):
|
||||
"""If a split_id in the pattern references a deleted MuscleGroupSplit,
|
||||
it should be gracefully skipped."""
|
||||
# Create a pattern with one bogus ID
|
||||
bad_pattern = WeeklySplitPattern.objects.create(
|
||||
days_per_week=2,
|
||||
pattern=[self.full_body.pk, 99999], # 99999 doesn't exist
|
||||
pattern_labels=['Full Body', 'Missing'],
|
||||
frequency=10,
|
||||
)
|
||||
|
||||
pref = self._make_preference(days_per_week=2)
|
||||
gen = self._make_generator(pref)
|
||||
|
||||
splits, _ = gen._pick_weekly_split()
|
||||
|
||||
# Should get 1 split (the valid one) since the bad ID is skipped
|
||||
# But since we have 1 valid split, splits should be non-empty
|
||||
self.assertGreaterEqual(len(splits), 1)
|
||||
self.assertEqual(splits[0]['label'], 'Full Body')
|
||||
|
||||
bad_pattern.delete()
|
||||
pref.delete()
|
||||
|
||||
@patch('generator.services.workout_generator.random.random', return_value=0.0)
|
||||
def test_diversifies_repetitive_four_day_pattern(self, _mock_random):
|
||||
"""
|
||||
A 4-day DB pattern with 3 lower-body days should be diversified so
|
||||
split_type repetition does not dominate the week.
|
||||
"""
|
||||
lower_a = MuscleGroupSplit.objects.create(
|
||||
muscle_names=['glutes', 'hamstrings', 'core'],
|
||||
label='Lower A',
|
||||
split_type='lower',
|
||||
frequency=9,
|
||||
)
|
||||
lower_b = MuscleGroupSplit.objects.create(
|
||||
muscle_names=['quads', 'glutes', 'calves'],
|
||||
label='Lower B',
|
||||
split_type='lower',
|
||||
frequency=9,
|
||||
)
|
||||
WeeklySplitPattern.objects.create(
|
||||
days_per_week=4,
|
||||
pattern=[self.lower.pk, lower_a.pk, lower_b.pk, self.full_body.pk],
|
||||
pattern_labels=['Lower', 'Lower A', 'Lower B', 'Full Body'],
|
||||
frequency=50,
|
||||
)
|
||||
|
||||
pref = self._make_preference(days_per_week=4)
|
||||
gen = self._make_generator(pref)
|
||||
|
||||
splits, _ = gen._pick_weekly_split()
|
||||
self.assertEqual(len(splits), 4)
|
||||
|
||||
split_type_counts = Counter(s['split_type'] for s in splits)
|
||||
self.assertLessEqual(
|
||||
split_type_counts.get('lower', 0), 2,
|
||||
f"Expected diversification to avoid 3+ lower days, got: {split_type_counts}",
|
||||
)
|
||||
|
||||
pref.delete()
|
||||
136
generator/tests/test_workout_generation_modules.py
Normal file
136
generator/tests/test_workout_generation_modules.py
Normal file
@@ -0,0 +1,136 @@
|
||||
from django.test import SimpleTestCase
|
||||
|
||||
from generator.services.workout_generation.entry_rules import (
|
||||
apply_rep_volume_floor,
|
||||
pick_reps_for_exercise,
|
||||
working_rest_seconds,
|
||||
)
|
||||
from generator.services.workout_generation.focus import (
|
||||
focus_key_for_exercise,
|
||||
has_duplicate_focus,
|
||||
)
|
||||
from generator.services.workout_generation.modality import (
|
||||
clamp_duration_bias,
|
||||
plan_superset_modalities,
|
||||
)
|
||||
from generator.services.workout_generation.pattern_planning import (
|
||||
merge_pattern_preferences,
|
||||
rotated_muscle_subset,
|
||||
working_position_label,
|
||||
)
|
||||
from generator.services.workout_generation.recovery import is_recovery_exercise
|
||||
from generator.services.workout_generation.scaling import apply_fitness_scaling
|
||||
from generator.services.workout_generation.section_builders import (
|
||||
build_duration_entries,
|
||||
build_section_superset,
|
||||
section_exercise_count,
|
||||
)
|
||||
|
||||
|
||||
class _Rng:
|
||||
def __init__(self, randint_values=None):
|
||||
self._randint_values = list(randint_values or [])
|
||||
|
||||
def randint(self, low, high):
|
||||
if self._randint_values:
|
||||
return self._randint_values.pop(0)
|
||||
return low
|
||||
|
||||
def shuffle(self, arr):
|
||||
# Deterministic for tests.
|
||||
return None
|
||||
|
||||
|
||||
class _Ex:
|
||||
def __init__(self, **kwargs):
|
||||
self.__dict__.update(kwargs)
|
||||
|
||||
|
||||
class TestWorkoutGenerationModules(SimpleTestCase):
|
||||
def test_section_count_and_duration_entries(self):
|
||||
rng = _Rng([6, 27, 31])
|
||||
self.assertEqual(section_exercise_count('warmup', 1, rng=rng), 6)
|
||||
|
||||
exercises = [_Ex(name='A'), _Ex(name='B')]
|
||||
entries = build_duration_entries(
|
||||
exercises,
|
||||
duration_min=20,
|
||||
duration_max=40,
|
||||
min_duration=20,
|
||||
duration_multiple=5,
|
||||
rng=rng,
|
||||
)
|
||||
self.assertEqual(entries[0]['duration'], 25)
|
||||
self.assertEqual(entries[1]['duration'], 30)
|
||||
section = build_section_superset('Warm Up', entries)
|
||||
self.assertEqual(section['name'], 'Warm Up')
|
||||
self.assertEqual(section['rounds'], 1)
|
||||
|
||||
def test_scaling_and_rest_floor(self):
|
||||
params = {
|
||||
'rep_min': 4,
|
||||
'rep_max': 10,
|
||||
'rounds': (3, 4),
|
||||
'rest_between_rounds': 60,
|
||||
}
|
||||
scaling = {
|
||||
1: {'rep_min_mult': 1.1, 'rep_max_mult': 1.2, 'rounds_adj': -1, 'rest_adj': 15},
|
||||
2: {'rep_min_mult': 1.0, 'rep_max_mult': 1.0, 'rounds_adj': 0, 'rest_adj': 0},
|
||||
}
|
||||
out = apply_fitness_scaling(
|
||||
params,
|
||||
fitness_level=1,
|
||||
scaling_config=scaling,
|
||||
min_reps=6,
|
||||
min_reps_strength=1,
|
||||
is_strength=True,
|
||||
)
|
||||
self.assertGreaterEqual(out['rep_min'], 5)
|
||||
self.assertEqual(working_rest_seconds(-5, 0), 15)
|
||||
|
||||
def test_modality_helpers(self):
|
||||
self.assertEqual(clamp_duration_bias(0.9, (0.2, 0.6)), 0.6)
|
||||
modalities = plan_superset_modalities(
|
||||
num_supersets=4,
|
||||
duration_bias=0.5,
|
||||
duration_bias_range=(0.25, 0.5),
|
||||
is_strength_workout=False,
|
||||
rng=_Rng(),
|
||||
)
|
||||
self.assertEqual(len(modalities), 4)
|
||||
self.assertTrue(any(modalities))
|
||||
|
||||
def test_pattern_and_focus_helpers(self):
|
||||
self.assertEqual(working_position_label(0, 3), 'early')
|
||||
self.assertEqual(working_position_label(1, 3), 'middle')
|
||||
self.assertEqual(working_position_label(2, 3), 'late')
|
||||
self.assertEqual(
|
||||
merge_pattern_preferences(['upper pull', 'core'], ['core', 'lunge']),
|
||||
['core'],
|
||||
)
|
||||
self.assertEqual(
|
||||
rotated_muscle_subset(['a', 'b', 'c'], 1),
|
||||
['b', 'c', 'a'],
|
||||
)
|
||||
|
||||
curl_a = _Ex(name='Alternating Bicep Curls', movement_patterns='upper pull')
|
||||
curl_b = _Ex(name='Bicep Curls', movement_patterns='upper pull')
|
||||
self.assertEqual(focus_key_for_exercise(curl_a), 'bicep_curl')
|
||||
self.assertTrue(has_duplicate_focus([curl_a, curl_b]))
|
||||
|
||||
def test_recovery_and_rep_selection(self):
|
||||
stretch = _Ex(name='Supine Pec Stretch - T', movement_patterns='mobility - static')
|
||||
self.assertTrue(is_recovery_exercise(stretch))
|
||||
|
||||
ex = _Ex(exercise_tier='primary')
|
||||
reps = pick_reps_for_exercise(
|
||||
ex,
|
||||
{'rep_min': 8, 'rep_max': 12},
|
||||
{'primary': (3, 6)},
|
||||
rng=_Rng([5]),
|
||||
)
|
||||
self.assertEqual(reps, 5)
|
||||
|
||||
entries = [{'reps': 3}, {'duration': 30}]
|
||||
apply_rep_volume_floor(entries, rounds=3, min_volume=12)
|
||||
self.assertEqual(entries[0]['reps'], 4)
|
||||
430
generator/tests/test_workout_research_generation.py
Normal file
430
generator/tests/test_workout_research_generation.py
Normal file
@@ -0,0 +1,430 @@
|
||||
"""
|
||||
Integration tests for research-backed workout generation.
|
||||
|
||||
These tests validate generated workouts against the expectations encoded from
|
||||
workout_research.md in generator.rules_engine.
|
||||
"""
|
||||
|
||||
import random
|
||||
from contextlib import contextmanager
|
||||
from datetime import date, timedelta
|
||||
from itertools import combinations
|
||||
|
||||
from django.contrib.auth import get_user_model
|
||||
from django.core.management import call_command
|
||||
from django.test import TestCase
|
||||
|
||||
from equipment.models import Equipment
|
||||
from equipment.models import WorkoutEquipment
|
||||
from exercise.models import Exercise
|
||||
from generator.models import UserPreference, WorkoutType
|
||||
from generator.rules_engine import DB_CALIBRATION, validate_workout
|
||||
from generator.services.workout_generator import WorkoutGenerator
|
||||
from muscle.models import ExerciseMuscle, Muscle
|
||||
from registered_user.models import RegisteredUser
|
||||
|
||||
|
||||
@contextmanager
|
||||
def seeded_random(seed):
|
||||
"""Use a deterministic random seed without leaking global random state."""
|
||||
state = random.getstate()
|
||||
random.seed(seed)
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
random.setstate(state)
|
||||
|
||||
|
||||
class TestWorkoutResearchGeneration(TestCase):
|
||||
"""
|
||||
TDD coverage for end-to-end generated workout quality:
|
||||
1) One workout per workout type
|
||||
2) Workouts for deterministic random workout-type pairs
|
||||
"""
|
||||
|
||||
MUSCLE_NAMES = [
|
||||
'chest',
|
||||
'upper back',
|
||||
'lats',
|
||||
'deltoids',
|
||||
'quads',
|
||||
'hamstrings',
|
||||
'glutes',
|
||||
'core',
|
||||
'biceps',
|
||||
'triceps',
|
||||
'calves',
|
||||
'forearms',
|
||||
'abs',
|
||||
'obliques',
|
||||
]
|
||||
|
||||
SPLITS_BY_TYPE = {
|
||||
'traditional_strength_training': {
|
||||
'label': 'Strength Day',
|
||||
'muscles': ['quads', 'hamstrings', 'glutes', 'core'],
|
||||
'split_type': 'lower',
|
||||
},
|
||||
'hypertrophy': {
|
||||
'label': 'Hypertrophy Day',
|
||||
'muscles': ['chest', 'upper back', 'deltoids', 'biceps', 'triceps'],
|
||||
'split_type': 'upper',
|
||||
},
|
||||
'high_intensity_interval_training': {
|
||||
'label': 'HIIT Day',
|
||||
'muscles': ['chest', 'upper back', 'quads', 'core'],
|
||||
'split_type': 'full_body',
|
||||
},
|
||||
'functional_strength_training': {
|
||||
'label': 'Functional Day',
|
||||
'muscles': ['chest', 'upper back', 'quads', 'hamstrings', 'core'],
|
||||
'split_type': 'full_body',
|
||||
},
|
||||
'cross_training': {
|
||||
'label': 'Cross Day',
|
||||
'muscles': ['chest', 'upper back', 'quads', 'core'],
|
||||
'split_type': 'full_body',
|
||||
},
|
||||
'core_training': {
|
||||
'label': 'Core Day',
|
||||
'muscles': ['abs', 'obliques', 'core'],
|
||||
'split_type': 'core',
|
||||
},
|
||||
'flexibility': {
|
||||
'label': 'Mobility Day',
|
||||
'muscles': ['hamstrings', 'glutes', 'core'],
|
||||
'split_type': 'full_body',
|
||||
},
|
||||
'cardio': {
|
||||
'label': 'Cardio Day',
|
||||
'muscles': ['quads', 'calves', 'core'],
|
||||
'split_type': 'cardio',
|
||||
},
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def setUpTestData(cls):
|
||||
User = get_user_model()
|
||||
auth_user = User.objects.create_user(
|
||||
username='research_gen',
|
||||
password='testpass123',
|
||||
)
|
||||
cls.registered_user = RegisteredUser.objects.create(
|
||||
first_name='Research',
|
||||
last_name='Generator',
|
||||
user=auth_user,
|
||||
)
|
||||
|
||||
# Keep equipment filtering permissive without triggering "no equipment" fallback warnings.
|
||||
cls.bodyweight = Equipment.objects.create(
|
||||
name='Bodyweight',
|
||||
category='none',
|
||||
is_weight=False,
|
||||
)
|
||||
|
||||
cls.preference = UserPreference.objects.create(
|
||||
registered_user=cls.registered_user,
|
||||
days_per_week=5,
|
||||
fitness_level=2,
|
||||
primary_goal='general_fitness',
|
||||
secondary_goal='',
|
||||
preferred_workout_duration=90,
|
||||
)
|
||||
cls.preference.available_equipment.add(cls.bodyweight)
|
||||
|
||||
cls.muscles = {}
|
||||
for name in cls.MUSCLE_NAMES:
|
||||
cls.muscles[name] = Muscle.objects.create(name=name)
|
||||
|
||||
cls.workout_types = {}
|
||||
for wt_name, fields in DB_CALIBRATION.items():
|
||||
wt, _ = WorkoutType.objects.get_or_create(
|
||||
name=wt_name,
|
||||
defaults={
|
||||
'display_name': wt_name.replace('_', ' ').title(),
|
||||
'description': f'Calibrated {wt_name}',
|
||||
**fields,
|
||||
},
|
||||
)
|
||||
# Keep DB values aligned with calibration regardless of fixtures/migrations.
|
||||
update_fields = []
|
||||
for field_name, field_value in fields.items():
|
||||
if getattr(wt, field_name) != field_value:
|
||||
setattr(wt, field_name, field_value)
|
||||
update_fields.append(field_name)
|
||||
if update_fields:
|
||||
wt.save(update_fields=update_fields)
|
||||
cls.workout_types[wt_name] = wt
|
||||
cls.preference.preferred_workout_types.add(wt)
|
||||
|
||||
# Populate all workout-structure expectations for all goals/sections.
|
||||
call_command('calibrate_structure_rules')
|
||||
|
||||
cls._seed_exercise_pool()
|
||||
|
||||
@classmethod
|
||||
def _create_exercise(
|
||||
cls,
|
||||
name,
|
||||
movement_patterns,
|
||||
*,
|
||||
is_weight,
|
||||
is_duration,
|
||||
is_reps,
|
||||
is_compound,
|
||||
exercise_tier='secondary',
|
||||
hr_elevation_rating=6,
|
||||
complexity_rating=3,
|
||||
difficulty_level='intermediate',
|
||||
stretch_position='mid',
|
||||
):
|
||||
ex = Exercise.objects.create(
|
||||
name=name,
|
||||
movement_patterns=movement_patterns,
|
||||
muscle_groups=', '.join(cls.MUSCLE_NAMES),
|
||||
is_weight=is_weight,
|
||||
is_duration=is_duration,
|
||||
is_reps=is_reps,
|
||||
is_compound=is_compound,
|
||||
exercise_tier=exercise_tier,
|
||||
hr_elevation_rating=hr_elevation_rating,
|
||||
complexity_rating=complexity_rating,
|
||||
difficulty_level=difficulty_level,
|
||||
stretch_position=stretch_position,
|
||||
estimated_rep_duration=3.0,
|
||||
)
|
||||
# Attach broad muscle mappings so split filtering has high coverage.
|
||||
for muscle in cls.muscles.values():
|
||||
ExerciseMuscle.objects.create(exercise=ex, muscle=muscle)
|
||||
return ex
|
||||
|
||||
@classmethod
|
||||
def _seed_exercise_pool(cls):
|
||||
working_patterns = [
|
||||
'lower push - squat, lower push, upper push, upper pull, core',
|
||||
'lower pull - hip hinge, lower pull, upper push, upper pull, core',
|
||||
'upper push - horizontal, upper push, upper pull, core',
|
||||
'upper pull - horizontal, upper pull, upper push, core',
|
||||
'upper push - vertical, upper push, upper pull, core',
|
||||
'upper pull - vertical, upper pull, upper push, core',
|
||||
'carry, core, lower push, upper pull',
|
||||
'cardio/locomotion, upper push, upper pull, core',
|
||||
'plyometric, lower push, upper pull, upper push, core',
|
||||
'arms, upper push, upper pull, core',
|
||||
]
|
||||
|
||||
duration_patterns = [
|
||||
'cardio/locomotion, upper push, upper pull, core',
|
||||
'plyometric, upper push, upper pull, lower push, core',
|
||||
'core - anti-extension, cardio/locomotion, upper push, upper pull',
|
||||
'core - anti-rotation, cardio/locomotion, upper push, upper pull',
|
||||
'core - anti-lateral flexion, cardio/locomotion, upper push, upper pull',
|
||||
]
|
||||
|
||||
for idx in range(60):
|
||||
cls._create_exercise(
|
||||
name=f'Engine Move {idx + 1:02d}',
|
||||
movement_patterns=working_patterns[idx % len(working_patterns)],
|
||||
is_weight=True,
|
||||
is_duration=False,
|
||||
is_reps=True,
|
||||
is_compound=True,
|
||||
exercise_tier='secondary',
|
||||
hr_elevation_rating=6,
|
||||
)
|
||||
|
||||
for idx in range(40):
|
||||
cls._create_exercise(
|
||||
name=f'Interval Move {idx + 1:02d}',
|
||||
movement_patterns=duration_patterns[idx % len(duration_patterns)],
|
||||
is_weight=False,
|
||||
is_duration=True,
|
||||
is_reps=True,
|
||||
is_compound=True,
|
||||
exercise_tier='secondary',
|
||||
hr_elevation_rating=8,
|
||||
)
|
||||
|
||||
for idx in range(14):
|
||||
cls._create_exercise(
|
||||
name=f'Warmup Flow {idx + 1:02d}',
|
||||
movement_patterns='dynamic stretch, activation, mobility, warm up',
|
||||
is_weight=False,
|
||||
is_duration=True,
|
||||
is_reps=False,
|
||||
is_compound=False,
|
||||
exercise_tier='accessory',
|
||||
hr_elevation_rating=3,
|
||||
complexity_rating=2,
|
||||
stretch_position='lengthened',
|
||||
)
|
||||
|
||||
for idx in range(14):
|
||||
cls._create_exercise(
|
||||
name=f'Cooldown Stretch {idx + 1:02d}',
|
||||
movement_patterns='static stretch, mobility, yoga, cool down',
|
||||
is_weight=False,
|
||||
is_duration=True,
|
||||
is_reps=False,
|
||||
is_compound=False,
|
||||
exercise_tier='accessory',
|
||||
hr_elevation_rating=2,
|
||||
complexity_rating=2,
|
||||
stretch_position='lengthened',
|
||||
)
|
||||
|
||||
def _set_goal(self, goal):
|
||||
self.preference.primary_goal = goal
|
||||
self.preference.secondary_goal = ''
|
||||
self.preference.save(update_fields=['primary_goal', 'secondary_goal'])
|
||||
|
||||
def _generate_workout_for_type(self, wt_name, *, seed, goal='general_fitness', day_offset=0):
|
||||
self._set_goal(goal)
|
||||
generator = WorkoutGenerator(self.preference, duration_override=90)
|
||||
split = dict(self.SPLITS_BY_TYPE[wt_name])
|
||||
with seeded_random(seed):
|
||||
workout = generator.generate_single_workout(
|
||||
muscle_split=split,
|
||||
workout_type=self.workout_types[wt_name],
|
||||
scheduled_date=date(2026, 3, 2) + timedelta(days=day_offset),
|
||||
)
|
||||
return workout, list(generator.warnings)
|
||||
|
||||
def _assert_research_alignment(self, workout_spec, wt_name, goal, context, generation_warnings=None):
|
||||
violations = validate_workout(workout_spec, wt_name, goal)
|
||||
blocking = [v for v in violations if v.severity in {'error', 'warning'}]
|
||||
|
||||
messages = [f'[{v.severity}] {v.rule_id}: {v.message}' for v in violations]
|
||||
self.assertEqual(
|
||||
len(blocking),
|
||||
0,
|
||||
(
|
||||
f'{context} failed strict research validation for {wt_name}/{goal}. '
|
||||
f'Violations: {messages}'
|
||||
),
|
||||
)
|
||||
|
||||
working = [
|
||||
ss for ss in workout_spec.get('supersets', [])
|
||||
if ss.get('name', '').startswith('Working')
|
||||
]
|
||||
self.assertGreaterEqual(
|
||||
len(working), 1,
|
||||
f'{context} should have at least one working superset.',
|
||||
)
|
||||
|
||||
if generation_warnings is not None:
|
||||
self.assertEqual(
|
||||
generation_warnings,
|
||||
[],
|
||||
f'{context} emitted generation warnings: {generation_warnings}',
|
||||
)
|
||||
|
||||
def test_generate_one_workout_for_each_type_matches_research(self):
|
||||
"""
|
||||
Generate one workout per workout type and ensure each passes
|
||||
research-backed rules validation.
|
||||
"""
|
||||
for idx, wt_name in enumerate(DB_CALIBRATION.keys(), start=1):
|
||||
workout, generation_warnings = self._generate_workout_for_type(
|
||||
wt_name,
|
||||
seed=7000 + idx,
|
||||
goal='general_fitness',
|
||||
day_offset=idx,
|
||||
)
|
||||
self._assert_research_alignment(
|
||||
workout,
|
||||
wt_name,
|
||||
'general_fitness',
|
||||
context='single-type generation',
|
||||
generation_warnings=generation_warnings,
|
||||
)
|
||||
|
||||
def test_generate_deterministic_random_workout_type_pairs(self):
|
||||
"""
|
||||
Generate workouts for deterministic random pairs of workout types.
|
||||
Each workout in every pair must satisfy research-backed rules.
|
||||
"""
|
||||
all_pairs = list(combinations(DB_CALIBRATION.keys(), 2))
|
||||
rng = random.Random(20260223)
|
||||
sampled_pairs = rng.sample(all_pairs, 8)
|
||||
|
||||
for pair_idx, (wt_a, wt_b) in enumerate(sampled_pairs):
|
||||
workout_a, warnings_a = self._generate_workout_for_type(
|
||||
wt_a,
|
||||
seed=8100 + pair_idx * 10,
|
||||
goal='general_fitness',
|
||||
day_offset=pair_idx * 2,
|
||||
)
|
||||
self._assert_research_alignment(
|
||||
workout_a,
|
||||
wt_a,
|
||||
'general_fitness',
|
||||
context=f'random-pair[{pair_idx}] first',
|
||||
generation_warnings=warnings_a,
|
||||
)
|
||||
|
||||
workout_b, warnings_b = self._generate_workout_for_type(
|
||||
wt_b,
|
||||
seed=8100 + pair_idx * 10 + 1,
|
||||
goal='general_fitness',
|
||||
day_offset=pair_idx * 2 + 1,
|
||||
)
|
||||
self._assert_research_alignment(
|
||||
workout_b,
|
||||
wt_b,
|
||||
'general_fitness',
|
||||
context=f'random-pair[{pair_idx}] second',
|
||||
generation_warnings=warnings_b,
|
||||
)
|
||||
|
||||
def test_generation_honors_exclusions_and_equipment_preferences(self):
|
||||
"""Generated workouts should not include excluded exercises or unavailable equipment."""
|
||||
wt_name = 'functional_strength_training'
|
||||
wt = self.workout_types[wt_name]
|
||||
|
||||
# Restrict user to only Bodyweight equipment and exclude one candidate exercise.
|
||||
self.preference.available_equipment.clear()
|
||||
self.preference.available_equipment.add(self.bodyweight)
|
||||
excluded = Exercise.objects.filter(name='Engine Move 01').first()
|
||||
self.assertIsNotNone(excluded)
|
||||
self.preference.excluded_exercises.add(excluded)
|
||||
|
||||
workout, generation_warnings = self._generate_workout_for_type(
|
||||
wt_name,
|
||||
seed=9401,
|
||||
goal='general_fitness',
|
||||
day_offset=10,
|
||||
)
|
||||
|
||||
all_exercises = []
|
||||
for ss in workout.get('supersets', []):
|
||||
for entry in ss.get('exercises', []):
|
||||
ex = entry.get('exercise')
|
||||
if ex is not None:
|
||||
all_exercises.append(ex)
|
||||
|
||||
self.assertTrue(all_exercises, 'Expected at least one exercise in generated workout.')
|
||||
self.assertNotIn(
|
||||
excluded.pk,
|
||||
{ex.pk for ex in all_exercises},
|
||||
'Excluded exercise was found in generated workout.',
|
||||
)
|
||||
|
||||
ex_ids = [ex.pk for ex in all_exercises]
|
||||
available_equipment_ids = {self.bodyweight.pk}
|
||||
requirements = {}
|
||||
for ex_id, eq_id in WorkoutEquipment.objects.filter(
|
||||
exercise_id__in=ex_ids,
|
||||
).values_list('exercise_id', 'equipment_id'):
|
||||
requirements.setdefault(ex_id, set()).add(eq_id)
|
||||
bad_equipment = [
|
||||
ex_id for ex_id, required_ids in requirements.items()
|
||||
if required_ids and not required_ids.issubset(available_equipment_ids)
|
||||
]
|
||||
self.assertEqual(
|
||||
bad_equipment,
|
||||
[],
|
||||
f'Found exercises requiring unavailable equipment: {bad_equipment}',
|
||||
)
|
||||
self.assertEqual(generation_warnings, [])
|
||||
45
generator/urls.py
Normal file
45
generator/urls.py
Normal file
@@ -0,0 +1,45 @@
|
||||
from django.urls import path
|
||||
from . import views
|
||||
|
||||
urlpatterns = [
|
||||
# Preferences
|
||||
path('preferences/', views.get_preferences, name='get_preferences'),
|
||||
path('preferences/update/', views.update_preferences, name='update_preferences'),
|
||||
|
||||
# Plan generation & listing
|
||||
path('generate/', views.generate_plan, name='generate_plan'),
|
||||
path('plans/', views.list_plans, name='list_plans'),
|
||||
path('plans/<int:plan_id>/', views.plan_detail, name='plan_detail'),
|
||||
|
||||
# Workout actions
|
||||
path('workout/<int:workout_id>/accept/', views.accept_workout, name='accept_workout'),
|
||||
path('workout/<int:workout_id>/reject/', views.reject_workout, name='reject_workout'),
|
||||
path('workout/<int:workout_id>/rate/', views.rate_workout, name='rate_workout'),
|
||||
path('workout/<int:workout_id>/regenerate/', views.regenerate_workout, name='regenerate_workout'),
|
||||
|
||||
# Edit actions (delete day / superset / exercise, swap exercise)
|
||||
path('workout/<int:workout_id>/delete/', views.delete_workout_day, name='delete_workout_day'),
|
||||
path('superset/<int:superset_id>/delete/', views.delete_superset, name='delete_superset'),
|
||||
path('superset-exercise/<int:exercise_id>/delete/', views.delete_superset_exercise, name='delete_superset_exercise'),
|
||||
path('superset-exercise/<int:exercise_id>/swap/', views.swap_exercise, name='swap_exercise'),
|
||||
path('exercise/<int:exercise_id>/similar/', views.similar_exercises, name='similar_exercises'),
|
||||
|
||||
# Reference data (for preference UI)
|
||||
path('muscles/', views.list_muscles, name='list_muscles'),
|
||||
path('equipment/', views.list_equipment, name='list_equipment'),
|
||||
path('workout-types/', views.list_workout_types, name='list_workout_types'),
|
||||
|
||||
# Confirm (batch-accept) a plan
|
||||
path('plans/<int:plan_id>/confirm/', views.confirm_plan, name='confirm_plan'),
|
||||
|
||||
# Preview-based generation
|
||||
path('preview/', views.preview_plan, name='preview_plan'),
|
||||
path('preview-day/', views.preview_day, name='preview_day'),
|
||||
path('save-plan/', views.save_plan, name='save_plan'),
|
||||
|
||||
# Analysis
|
||||
path('analysis/stats/', views.analysis_stats, name='analysis_stats'),
|
||||
|
||||
# Generation rules
|
||||
path('rules/', views.generation_rules, name='generation_rules'),
|
||||
]
|
||||
1213
generator/views.py
Normal file
1213
generator/views.py
Normal file
File diff suppressed because it is too large
Load Diff
482
hardening-report.md
Normal file
482
hardening-report.md
Normal file
@@ -0,0 +1,482 @@
|
||||
# Hardening Audit Report — Werkout API (Django/Python)
|
||||
|
||||
## Audit Sources
|
||||
- 5 mapper agents (100% file coverage)
|
||||
- 8 specialized domain auditors (parallel)
|
||||
- 1 cross-cutting deep audit (parallel)
|
||||
- Total source files: 75
|
||||
|
||||
---
|
||||
|
||||
## CRITICAL — Will crash or lose data (18 findings)
|
||||
|
||||
**1. werkout_api/settings.py:16** | DEBUG=True hardcoded, never disabled in production
|
||||
- What: `DEBUG = True` set at module level. Production branch (when `DATABASE_URL` set) never overrides to `False` — the code is commented out (lines 142-157). `CORS_ALLOW_ALL_ORIGINS` on line 226 depends on DEBUG, so it's always `True`.
|
||||
- Impact: Full stack traces, SQL queries, internal paths exposed to end users. CORS allows any origin with credentials.
|
||||
- Source: Security, Config, Cross-cutting
|
||||
|
||||
**2. werkout_api/settings.py:160** | SECRET_KEY falls back to 'secret'
|
||||
- What: `SECRET_KEY = os.environ.get("SECRET_KEY", 'secret')`. Neither `docker-compose.yml` nor any env file sets SECRET_KEY.
|
||||
- Impact: Session cookies, CSRF tokens, password hashes use a publicly known key. Complete auth bypass.
|
||||
- Source: Security, Config
|
||||
|
||||
**3. werkout_api/settings.py:226** | CORS allows all origins with credentials in production
|
||||
- What: `CORS_ALLOW_ALL_ORIGINS = True if DEBUG else False` is always `True` (DEBUG never False). Combined with `CORS_ALLOW_CREDENTIALS = True` (line 231).
|
||||
- Impact: Any website can make authenticated cross-origin requests and steal data.
|
||||
- Source: Security, Config
|
||||
|
||||
**4. registered_user/serializers.py:31** | Password hash exposed in API responses
|
||||
- What: `write_only_fields = ('password',)` is NOT a valid DRF Meta option. Correct: `extra_kwargs = {'password': {'write_only': True}}`. Password field is readable.
|
||||
- Impact: Hashed password returned in registration responses. Enables offline brute-force.
|
||||
- Source: Security, API, Cross-cutting
|
||||
|
||||
**5. registered_user/views.py:83-90** | update_registered_user uses request.POST — JSON requests wipe user data
|
||||
- What: `request.POST.get(...)` only works for form-encoded data. JSON requests return `None` for all fields. Lines 88-90 set `first_name=None`, `email=None`, etc. and save.
|
||||
- Impact: Any JSON profile update silently corrupts user data. Email set to None breaks login.
|
||||
- Source: Security, Logic, Cross-cutting
|
||||
|
||||
**6. registered_user/views.py:108-114** | Password update broken for JSON clients, can lock out user
|
||||
- What: `request.POST.get("new_password")` returns `None` for JSON. `set_password(None)` makes password uncheckable, permanently locking user out.
|
||||
- Impact: Password endpoint non-functional for JSON clients. Potential permanent account lockout.
|
||||
- Source: Security, Cross-cutting
|
||||
|
||||
**7. registered_user/serializers.py:46** | Registration creates RegisteredUser with non-existent phone_number field
|
||||
- What: `RegisteredUser.objects.create(phone_number=self.context.get("phone_number"), ...)` — model has no `phone_number` field (removed in migration 0002).
|
||||
- Impact: User registration crashes with TypeError if phone_number is passed in context.
|
||||
- Source: Cross-cutting, Logic
|
||||
|
||||
**8. scripts/views.py:43-45** | Anonymous cache wipe endpoint — no authentication
|
||||
- What: `clear_redis` view has no auth decorators. Active in `scripts/urls.py`. Any anonymous request wipes entire Redis cache.
|
||||
- Impact: Denial of service — any internet user can flush all cached data at will.
|
||||
- Source: Security
|
||||
|
||||
**9. video/views.py:50-59** | Path traversal vulnerability in hls_videos
|
||||
- What: `video_name` and `video_type` from GET params concatenated directly into file paths without sanitization. `../../etc/passwd` sequences can access arbitrary files.
|
||||
- Impact: Arbitrary file read on the server. Route commented out in urls.py but view exists.
|
||||
- Source: Security
|
||||
|
||||
**10. video/views.py:74** | Celery task called with zero arguments but requires filename
|
||||
- What: `create_hls_tasks.delay()` called with no args. Task signature `create_hls_tasks(filename)` requires one.
|
||||
- Impact: Every call to `/videos/create_hls/` crashes the Celery worker with TypeError.
|
||||
- Source: Celery, Cross-cutting
|
||||
|
||||
**11. supervisord.conf:13** | Production runs Django dev server (runserver) instead of WSGI
|
||||
- What: `python manage.py runserver 0.0.0.0:8000` in production. `uwsgi.ini` exists but is unused.
|
||||
- Impact: Single-threaded, no request timeouts, not designed for production. Memory leaks.
|
||||
- Source: Config
|
||||
|
||||
**12. supervisord.conf** | No Celery worker process configured
|
||||
- What: Only `django` and `nextjs` programs defined. No `[program:celery]` entry.
|
||||
- Impact: All `.delay()` calls queue tasks in Redis that are never consumed. Entire async task system non-functional.
|
||||
- Source: Celery, Config
|
||||
|
||||
**13. supervisord.conf:13** | Auto-migrate on every container start
|
||||
- What: `python manage.py migrate` in startup command runs migrations automatically without review.
|
||||
- Impact: Destructive migrations run silently. Race conditions if multiple containers start simultaneously.
|
||||
- Source: Config
|
||||
|
||||
**14. docker-compose.yml:8-10,26** | Database credentials hardcoded as postgres/postgres
|
||||
- What: `POSTGRES_USER=postgres`, `POSTGRES_PASSWORD=postgres` in compose file and DATABASE_URL. No `.env` override.
|
||||
- Impact: Trivial unauthorized access if database port exposed. Credentials in git history permanently.
|
||||
- Source: Security, Config
|
||||
|
||||
**15. AI/workouts.py + AI/cho/workouts.py** | 86K lines of PII data committed to git
|
||||
- What: Two files totaling 86,000+ lines of user workout data from Future Fitness API with user IDs, S3 URLs, timestamps.
|
||||
- Impact: PII permanently in git history. Potential GDPR/privacy liability.
|
||||
- Source: Security, Config
|
||||
|
||||
**16. generator/views.py:1032-1160** | save_plan has no transaction wrapping
|
||||
- What: Creates GeneratedWeeklyPlan, then loops creating Workout, Superset, SupersetExercise, GeneratedWorkout, PlannedWorkout objects. No `transaction.atomic()`.
|
||||
- Impact: Mid-loop failure (e.g., date parsing) leaves orphaned plan records. Partially saved plans with missing days.
|
||||
- Source: Data Integrity, Cross-cutting
|
||||
|
||||
**17. generator/views.py:789-803** | confirm_plan has no transaction wrapping
|
||||
- What: Loops through generated workouts, saves status, deletes/creates PlannedWorkouts individually.
|
||||
- Impact: Partial plan confirmation — some days accepted, others not, on any mid-loop error.
|
||||
- Source: Data Integrity
|
||||
|
||||
**18. registered_user/serializers.py:34-51** | User + RegisteredUser creation has no transaction
|
||||
- What: `User.objects.create()`, `set_password()`, `RegisteredUser.objects.create()`, `Token.objects.create()` — four DB ops with no `transaction.atomic()`.
|
||||
- Impact: Orphaned User records if RegisteredUser creation fails. Ghost users block re-registration.
|
||||
- Source: Data Integrity, Cross-cutting
|
||||
|
||||
---
|
||||
|
||||
## BUG — Incorrect behavior (28 findings)
|
||||
|
||||
**1. registered_user/views.py:30,47** | Validation errors return HTTP 500 instead of 400
|
||||
- Impact: Clients can't distinguish server errors from bad input.
|
||||
|
||||
**2. registered_user/views.py:74** | Failed login returns 404 instead of 401
|
||||
- Impact: Wrong HTTP semantics for auth failures.
|
||||
|
||||
**3. registered_user/models.py:20** | `__str__` concatenates nullable last_name — TypeError
|
||||
- Impact: Admin, logging crash for users with null last_name.
|
||||
|
||||
**4. registered_user/admin.py:11** | Token.objects.get crashes if no token exists
|
||||
- Impact: Admin list page crashes if any user lacks a Token.
|
||||
|
||||
**5. equipment/models.py:13** | `__str__` concatenates nullable category/name — TypeError
|
||||
- Impact: Admin crashes for Equipment with null fields.
|
||||
|
||||
**6. muscle/models.py:11** | `__str__` returns None when name is null
|
||||
- Impact: Violates `__str__` contract. Admin/template crashes.
|
||||
|
||||
**7. workout/views.py:45** | Workout.objects.get with no DoesNotExist handling
|
||||
- Impact: Missing workouts return 500 instead of 404.
|
||||
|
||||
**8. workout/views.py:60,143,165** | Validation errors return HTTP 500 instead of 400
|
||||
- Impact: Three views misreport client errors as server errors.
|
||||
|
||||
**9. workout/views.py:69** | GET endpoint returns 201 Created instead of 200
|
||||
- Impact: Incorrect HTTP semantics for read operation.
|
||||
|
||||
**10. workout/views.py:76** | Unreachable None check — .get() raises exception, never returns None
|
||||
- Impact: Dead code; actual DoesNotExist is unhandled (500 error).
|
||||
|
||||
**11. workout/views.py:124** | estimated_rep_duration None multiplication crashes
|
||||
- What: `exercise["reps"] * exercise_obj.estimated_rep_duration` where field can be null. `int * None` = TypeError.
|
||||
- Impact: Workout creation crashes mid-loop, orphaning partial records (no transaction).
|
||||
|
||||
**12. workout/serializers.py:37** | KeyError if 'notes' not in validated_data
|
||||
- Impact: Completing a workout without notes crashes with 500.
|
||||
|
||||
**13. workout/serializers.py:40** | Wrong attribute name — health_kit UUID never persisted
|
||||
- What: Sets `completed_workout.workout_uuid` but model field is `health_kit_workout_uuid`.
|
||||
- Impact: HealthKit UUIDs silently discarded forever.
|
||||
|
||||
**14. workout/tasks.py:85** | estimated_rep_duration None multiplication in Celery task
|
||||
- Impact: Bulk import crashes mid-way, leaving partial data.
|
||||
|
||||
**15. workout/tasks.py:73** | Exercise.objects.get with no DoesNotExist handling
|
||||
- Impact: One missing exercise aborts entire import.
|
||||
|
||||
**16. workout/urls.py:14** | Duplicate URL name 'plan workout' on two paths
|
||||
- Impact: `reverse('plan workout')` resolves to wrong URL.
|
||||
|
||||
**17. scripts/views.py:37** | NameError: MuscleGroup is not defined
|
||||
- What: Catches `MuscleGroup.DoesNotExist` but only `Muscle` is imported.
|
||||
- Impact: NameError crashes endpoint instead of catching intended exception.
|
||||
|
||||
**18. scripts/views.py:15** | equipment_required.split() crashes on None
|
||||
- Impact: sync_equipment crashes for any exercise with null equipment_required.
|
||||
|
||||
**19. video/models.py:24** | save() missing *args in signature
|
||||
- Impact: Callers passing positional args (force_insert) get TypeError.
|
||||
|
||||
**20. video/models.py:24-27** | HLS transcoding triggered on EVERY save, not just file changes
|
||||
- Impact: Redundant expensive ffmpeg jobs on metadata-only edits.
|
||||
|
||||
**21. video/serializers.py:13** | video_file can be None — AttributeError
|
||||
- Impact: Video listing crashes if any Video has no file.
|
||||
|
||||
**22. video/tasks.py:10** | Existence check uses wrong filename pattern — never matches
|
||||
- Impact: Guard clause never short-circuits; re-encodes every time.
|
||||
|
||||
**23. generator/views.py:70** | RegisteredUser.objects.get repeated ~17 times with no DoesNotExist handling
|
||||
- Impact: Any user without RegisteredUser gets unhandled 500 on every generator endpoint.
|
||||
|
||||
**24. superset/helpers.py:16** | Exercise.objects.get("First Up") with no error handling
|
||||
- Impact: Workout detail crashes if "First Up" exercise is missing.
|
||||
|
||||
**25. superset/serializers.py:20** | get_unique_id returns random UUID per serialization
|
||||
- Impact: Frontend can't use unique_id as stable key. Breaks diffing/caching.
|
||||
|
||||
**26. workout/models.py:51** | settings not imported — NameError on duration_audio()/weight_audio()
|
||||
- What: Relies on `from exercise.models import *` transitive import of settings.
|
||||
- Impact: NameError if transitive chain breaks.
|
||||
|
||||
**27. workout_generator.py:909** | None multiplication when duration is None
|
||||
- Impact: Plan generation crashes if preferences have no duration set.
|
||||
|
||||
**28. workout_generator.py:802** | sum(c.difficulty) crashes if any difficulty is None
|
||||
- Impact: Plan generation crashes for users with incomplete completion records.
|
||||
|
||||
---
|
||||
|
||||
## SILENT FAILURE — Error swallowed or ignored (5 findings)
|
||||
|
||||
**1. generator/views.py:193,491,874,989,1156** | Broad except Exception catches all errors, leaks str(e)
|
||||
- Impact: Bugs masked. Internal details leaked to clients.
|
||||
|
||||
**2. superset/helpers.py:19-23** | In-memory mutations on Exercise ORM object never saved
|
||||
- Impact: Changes silently lost. Risk of corrupting shared Exercise if accidentally saved.
|
||||
|
||||
**3. workout/helpers.py:41** | ser_data.mutable = True is a no-op
|
||||
- Impact: No effect. Indicates confusion about data type.
|
||||
|
||||
**4. audit_exercise_data.py:168-170** | except Exception: pass silently swallows all errors
|
||||
- Impact: Database errors during field checks silently ignored.
|
||||
|
||||
**5. workout/views.py:32** | Infinite cache with incomplete invalidation
|
||||
- Impact: Generated workouts never appear in all_workouts until manual cache clear.
|
||||
|
||||
---
|
||||
|
||||
## RACE CONDITION — Concurrency issue (1 finding)
|
||||
|
||||
**1. registered_user/views.py:34** | Email uniqueness check is a race condition
|
||||
- What: `User.objects.filter(email=email)` check followed by `serializer.save()`. No DB unique constraint visible.
|
||||
- Impact: Concurrent registrations can create duplicate email accounts.
|
||||
|
||||
---
|
||||
|
||||
## LOGIC ERROR — Code doesn't match intent (12 findings)
|
||||
|
||||
**1. rules_engine.py:650** | Push/pull ratio check skipped when either count is zero
|
||||
- What: Condition requires both counts > 0. A workout with 2 push, 0 pull passes silently.
|
||||
- Impact: Unbalanced push-heavy workouts pass validation.
|
||||
|
||||
**2. rules_engine.py:858-860** | Workout type match is a no-op for non-strength types
|
||||
- What: Non-strength branch unconditionally counts every exercise as matching (100% always).
|
||||
- Impact: HIIT/cardio/core workouts can contain arbitrary exercises without violations.
|
||||
|
||||
**3. workout_generator.py:1459** | Workout type affinity matching NEVER works
|
||||
- What: `SPLIT_TYPE_WORKOUT_AFFINITY` uses underscore names like `'traditional_strength_training'` but comparison uses `wt.name.strip().lower()` which yields space-separated names.
|
||||
- Impact: All workout type assignments fall through to round-robin fallback. Push splits get assigned random types.
|
||||
|
||||
**4. workout_generator.py:2070** | Modality check counts exercise capability, not actual assignment
|
||||
- What: Checks `ex.is_duration` (capability flag) not whether the entry was actually given duration.
|
||||
- Impact: False modality calculations for dual-modality exercises.
|
||||
|
||||
**5. workout_generator.py:1404** | Diversify type count wrong on replacement
|
||||
- What: Doesn't subtract from the removed type count when replacing, only adds to candidate count.
|
||||
- Impact: Valid replacements rejected. Invalid ones accepted in edge cases.
|
||||
|
||||
**6. workout_generator.py:2898** | Final conformance treats all warnings as blocking
|
||||
- What: `_is_blocking_final_violation` returns True for `severity in {'error', 'warning'}`.
|
||||
- Impact: Workouts crash with ValueError for minor advisory issues (cooldown missing, duration bias slightly off).
|
||||
|
||||
**7. workout_generator.py:1209** | Recursive retry destroys cross-day dedup state
|
||||
- What: Failed attempt's exercises already recorded in week state via `accumulate_week_state`. Retry with different exercises creates ghost entries.
|
||||
- Impact: Later days in the week have artificially smaller exercise pools.
|
||||
|
||||
**8. entry_rules.py:19** | Volume floor can violate workout type rep ranges
|
||||
- What: With `min_volume=12` and `rounds=1`, forces 12 reps. Strength (3-6 rep range) gets 12 reps.
|
||||
- Impact: Strength workouts get inflated reps contradicting their character.
|
||||
|
||||
**9. rules_engine.py:441** | Push/pull counting double-counts dual-pattern exercises
|
||||
- What: Exercise with `'upper push, upper pull'` counted in BOTH push AND pull totals.
|
||||
- Impact: Inaccurate push:pull ratio calculations.
|
||||
|
||||
**10. exercise_selector.py:631** | No-equipment path restricts to bodyweight only (contradicts docs)
|
||||
- What: MEMORY.md says "no equipment set = all exercises available." Code excludes all exercises with equipment entries.
|
||||
- Impact: Users without equipment config get dramatically reduced pool.
|
||||
|
||||
**11. muscle_normalizer.py:163** | Glutes in both lower_push and lower_pull categories
|
||||
- Impact: Glute-dominant workouts get incorrect split classification, cascading into wrong type assignments.
|
||||
|
||||
**12. exercise_selector.py:1274** | Substring partner matching causes false positives
|
||||
- What: `if base_name.lower() in partner.name.lower()` — "Curl" matches "Barbell Curl Right", "Hammer Curl Right", etc.
|
||||
- Impact: Wrong exercises paired as L/R counterparts.
|
||||
|
||||
---
|
||||
|
||||
## PERFORMANCE — Unnecessary cost (18 findings)
|
||||
|
||||
**1. exercise/serializers.py:30,35** | N+1 per exercise for muscles + equipment (~3400+ queries on cache miss)
|
||||
- Impact: `/exercise/all/` cold cache: 1133 exercises × 3 queries each.
|
||||
|
||||
**2. workout/serializers.py:56-77** | Triple N+1 on WorkoutSerializer (~5000+ queries)
|
||||
- Impact: `all_workouts` cache miss: 633 workouts × (muscles + equipment + exercise_count).
|
||||
|
||||
**3. superset/serializers.py:32** | N+1 per superset for exercises, cascading through ExerciseSerializer
|
||||
- Impact: Each workout detail triggers O(supersets × exercises × 3) queries.
|
||||
|
||||
**4. workout/helpers.py:14-71** | Cascade of N+1 queries in exercise list builder
|
||||
- Impact: ~80+ queries per workout detail (supersets + exercises + serializer chain).
|
||||
|
||||
**5. generator/serializers.py:338** | N+1 for supersets in GeneratedWorkoutDetailSerializer
|
||||
- Impact: Plan detail views trigger dozens of cascading queries per day.
|
||||
|
||||
**6. generator/views.py:1106** | Exercise.objects.get in triple-nested loop in save_plan
|
||||
- Impact: 5-day plan with 5 supersets × 3 exercises = 75 individual SELECT queries.
|
||||
|
||||
**7. muscle_normalizer.py:218** | ExerciseMuscle query per exercise in analyzer (~19,000 queries)
|
||||
- Impact: `analyze_workouts` command fires ~19,000 queries for 633 workouts.
|
||||
|
||||
**8. workout_analyzer.py:1332-1337** | 120 exists() checks in _step7
|
||||
- Impact: 8 types × 3 sections × 5 goals = 120 individual queries.
|
||||
|
||||
**9. recalculate_workout_times.py:53-58** | Triple-nested N+1 with no prefetch (~18,000 queries)
|
||||
- Impact: Command takes orders of magnitude longer than necessary.
|
||||
|
||||
**10. exercise_selector.py:593,629** | M2M querysets not cached (excluded_exercises + available_equipment)
|
||||
- Impact: 15-30 redundant identical queries per workout generation.
|
||||
|
||||
**11. populate_exercise_fields.py:1006** | Individual save() per exercise (1133 UPDATE queries)
|
||||
- Impact: Command takes minutes instead of seconds. No bulk_update.
|
||||
|
||||
**12. plan_builder.py:64,82** | Redundant save() after create() on Workout and Superset
|
||||
- Impact: 2 unnecessary DB writes per superset creation.
|
||||
|
||||
**13. Various views** | Infinite cache with no invalidation strategy
|
||||
- What: equipment, exercise, muscle, workout, video views all use `cache.set(key, data, timeout=None)` with no invalidation.
|
||||
- Impact: New/edited data never appears until manual cache clear or restart.
|
||||
|
||||
**14. workout/serializers.py:109** | Redundant re-fetch of registered_user
|
||||
- Impact: Extra query per workout detail for no reason.
|
||||
|
||||
**15. generator/views.py:570-572,604-607** | N+1 save pattern for re-ordering after delete
|
||||
- Impact: Up to N individual UPDATEs instead of 1 bulk_update.
|
||||
|
||||
**16. generator/views.py:423-429,964-976** | N+1 for sibling exercise exclusion
|
||||
- Impact: N queries instead of 1 IN query for sibling workout exercises.
|
||||
|
||||
**17. generator/views.py:70** | RegisteredUser.objects.get repeated 17x with no caching
|
||||
- Impact: 1 unnecessary query per API request across all generator endpoints.
|
||||
|
||||
**18. exercise_selector.py:1063** | Potentially large retry loop in _weighted_pick
|
||||
- What: `max_attempts = len(pool) * 3` with weighted pools of 500+ entries = 1500+ iterations.
|
||||
- Impact: CPU-bound stall risk in constrained pools.
|
||||
|
||||
---
|
||||
|
||||
## SECURITY — Vulnerability or exposure (6 additional findings)
|
||||
|
||||
**1. werkout_api/settings.py:140** | ALLOWED_HOSTS=['*'] in production
|
||||
- Impact: HTTP Host header injection, cache poisoning, password reset URL manipulation.
|
||||
|
||||
**2. werkout_api/settings.py:1-231** | Missing all HTTPS/security hardening settings
|
||||
- What: No SECURE_SSL_REDIRECT, SECURE_HSTS_SECONDS, SESSION_COOKIE_SECURE, CSRF_COOKIE_SECURE, etc.
|
||||
- Impact: Cookies sent over plaintext HTTP. No HSTS protection.
|
||||
|
||||
**3. werkout_api/settings.py:31** | Django Debug Toolbar enabled unconditionally
|
||||
- Impact: Exposes SQL queries, settings, request data at `/__debug__/` in production.
|
||||
|
||||
**4. workout/views.py:24-33** | all_workouts returns ALL users' workouts (IDOR)
|
||||
- What: `Workout.objects.all()` with no ownership filter.
|
||||
- Impact: Any authenticated user sees every user's workout data.
|
||||
|
||||
**5. workout/views.py:39-49** | workout_details has no ownership check (IDOR)
|
||||
- What: Any authenticated user can view any workout by guessing IDs.
|
||||
- Impact: Insecure Direct Object Reference.
|
||||
|
||||
**6. workout/views.py:170-172** | GET endpoint triggers data mutation — bulk import
|
||||
- What: GET triggers Celery task importing workouts for hardcoded user IDs. Any authenticated user can trigger.
|
||||
- Impact: Data corruption via idempotent-violating GET.
|
||||
|
||||
---
|
||||
|
||||
## DATA INTEGRITY — Database/model consistency issues (5 findings)
|
||||
|
||||
**1. workout/views.py:94-138** | add_workout has no transaction wrapping
|
||||
- Impact: Partial Workout/Superset records on mid-loop failure.
|
||||
|
||||
**2. plan_builder.py:59-149** | create_workout_from_spec has no transaction wrapping
|
||||
- Impact: Core builder used by all generation paths creates orphaned records on error.
|
||||
|
||||
**3. workout_analyzer.py:249-252** | _clear_existing_patterns deletes without transaction
|
||||
- Impact: If analysis crashes mid-way, ML pattern tables are empty with no recovery.
|
||||
|
||||
**4. workout/tasks.py:11-101** | Bulk import has no transaction or idempotency
|
||||
- Impact: Partial imports, duplicate records on re-run.
|
||||
|
||||
**5. workout/views.py:150** | datetime.now() without timezone in USE_TZ=True project
|
||||
- Impact: Incorrect PlannedWorkout filtering near midnight due to timezone mismatch.
|
||||
|
||||
---
|
||||
|
||||
## MODERNIZATION — Legacy pattern to update (4 findings)
|
||||
|
||||
**1. Dockerfile:13** | Python 3.9.13 base image (EOL October 2025)
|
||||
- Impact: No further security patches.
|
||||
|
||||
**2. requirements.txt** | All dependencies pinned to mid-2023 versions
|
||||
- Impact: Django 4.2.2 has had multiple security releases since.
|
||||
|
||||
**3. supervisord.conf:24** | Next.js runs `next dev` in production
|
||||
- Impact: No production optimizations, source maps exposed.
|
||||
|
||||
**4. Various models** | max_length on IntegerField/FloatField (no-op parameters)
|
||||
- What: 10+ fields across superset, workout, exercise models use meaningless `max_length` on numeric fields.
|
||||
- Impact: Misleading — suggests validation that doesn't exist.
|
||||
|
||||
---
|
||||
|
||||
## DEAD CODE / UNREACHABLE (4 findings)
|
||||
|
||||
**1. exercise/serializers.py:5** | Import shadowed by local class definition
|
||||
- What: Imports `ExerciseMuscleSerializer` then redefines it locally.
|
||||
|
||||
**2. exercise/models.py:4** | `from random import randrange` — imported but never used
|
||||
|
||||
**3. audit_exercise_data.py:88-89** | Dead `.exclude()` clause — logically impossible condition
|
||||
|
||||
**4. workout/views.py:76** | Unreachable None check after `.get()`
|
||||
|
||||
---
|
||||
|
||||
## FRAGILE — Works now but will break easily (5 findings)
|
||||
|
||||
**1. exercise_selector.py:613** | Hard exclude to soft penalty conversion mutates instance state permanently
|
||||
- What: `_warned_small_pool` guard uses `hasattr` which survives `reset()`.
|
||||
- Impact: Once triggered, ALL subsequent selections treat hard-excluded exercises with soft penalty only.
|
||||
|
||||
**2. exercise_selector.py:645** | Equipment map cache survives reset() — stale data possible
|
||||
- Impact: Low risk per-request but dangerous in long-running processes.
|
||||
|
||||
**3. workout_generator.py:1046** | Working superset detection relies on name prefix 'Working'
|
||||
- Impact: Any naming inconsistency silently breaks trimming, padding, modality validation, compound ordering, rebalancing.
|
||||
|
||||
**4. workout/models.py:51** | settings import via wildcard chain from exercise.models
|
||||
- Impact: Transitive dependency breaks if `*` re-export chain changes.
|
||||
|
||||
**5. exercise_selector.py:260** | Working set exclusion icontains 'stretch' catches valid exercises
|
||||
- Impact: Exercises like "Stiff Leg Deadlift Stretch Position" incorrectly excluded from working sets.
|
||||
|
||||
---
|
||||
|
||||
## Summary
|
||||
|
||||
### Summary by Category
|
||||
| Category | Count |
|
||||
|----------|-------|
|
||||
| Critical | 18 |
|
||||
| Bug | 28 |
|
||||
| Silent Failure | 5 |
|
||||
| Race Condition | 1 |
|
||||
| Logic Error | 12 |
|
||||
| Performance | 18 |
|
||||
| Security | 6 |
|
||||
| Data Integrity | 5 |
|
||||
| Modernization | 4 |
|
||||
| Dead Code | 4 |
|
||||
| Fragile | 5 |
|
||||
| **Total** | **106** |
|
||||
|
||||
### Summary by Source
|
||||
| Source | Findings |
|
||||
|--------|----------|
|
||||
| Security Auditor | 34 |
|
||||
| Data Integrity/ORM Auditor | 64 |
|
||||
| Logic Errors Auditor | 42 |
|
||||
| Performance Auditor | 41 |
|
||||
| Generator Logic Auditor | 22 |
|
||||
| API Correctness Auditor | 43 |
|
||||
| Celery/Async Auditor | 24 |
|
||||
| Config/Deployment Auditor | 30 |
|
||||
| Cross-cutting Deep Audit | 35 |
|
||||
| *(after dedup)* | **106 unique** |
|
||||
|
||||
### Top 10 Priorities
|
||||
|
||||
1. **[CRITICAL] settings.py — DEBUG=True + SECRET_KEY='secret' + CORS wide open in production** — Three compounding security misconfigurations that enable session forgery, CSRF bypass, and full API data theft from any website.
|
||||
|
||||
2. **[CRITICAL] registered_user/views.py:83-90 — request.POST wipes user data on JSON update** — Any JSON profile update sets email, name, image all to None. Active, reachable endpoint.
|
||||
|
||||
3. **[CRITICAL] registered_user/serializers.py:31 — Password hash exposed in API** — Invalid DRF Meta option means hashed password is readable in registration responses.
|
||||
|
||||
4. **[CRITICAL] scripts/views.py:43 — Anonymous cache wipe** — Unauthenticated endpoint wipes entire Redis cache. Active route, no auth required.
|
||||
|
||||
5. **[CRITICAL] supervisord.conf — No Celery worker + dev server in production** — All async tasks (HLS transcoding, imports) silently queue and never execute. Django dev server handles all production traffic.
|
||||
|
||||
6. **[CRITICAL] generator/views.py — No transaction.atomic() on save_plan/confirm_plan** — Multi-object creation loops with no transaction wrapping leave orphaned records on any failure.
|
||||
|
||||
7. **[BUG] workout/serializers.py:40 — HealthKit UUID silently discarded** — Sets wrong attribute name (`workout_uuid` vs `health_kit_workout_uuid`). Data permanently lost.
|
||||
|
||||
8. **[BUG] workout/views.py:124 + tasks.py:85 — None multiplication on estimated_rep_duration** — Nullable field multiplied without null check. Crashes workout creation and bulk import.
|
||||
|
||||
9. **[LOGIC] workout_generator.py:1459 — Workout type affinity matching NEVER works** — Space vs underscore comparison means all type assignments fall through to random round-robin.
|
||||
|
||||
10. **[PERFORMANCE] Serializer N+1 queries — 5000+ queries on cache miss** — WorkoutSerializer, ExerciseSerializer, and SupersetSerializer each trigger per-object queries with no prefetching. Mitigated by infinite caching but devastating on any cache clear.
|
||||
@@ -0,0 +1,34 @@
|
||||
# Generated by Django 5.1.4 on 2026-02-21 05:06
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
def deduplicate_exercise_muscles(apps, schema_editor):
|
||||
"""Remove duplicate ExerciseMuscle rows before adding unique constraint."""
|
||||
ExerciseMuscle = apps.get_model('muscle', 'ExerciseMuscle')
|
||||
seen = set()
|
||||
to_delete = []
|
||||
for em in ExerciseMuscle.objects.all().order_by('id'):
|
||||
key = (em.exercise_id, em.muscle_id)
|
||||
if key in seen:
|
||||
to_delete.append(em.id)
|
||||
else:
|
||||
seen.add(key)
|
||||
if to_delete:
|
||||
ExerciseMuscle.objects.filter(id__in=to_delete).delete()
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('exercise', '0010_alter_exercise_complexity_rating_and_more'),
|
||||
('muscle', '0002_exercisemuscle'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(deduplicate_exercise_muscles, migrations.RunPython.noop),
|
||||
migrations.AlterUniqueTogether(
|
||||
name='exercisemuscle',
|
||||
unique_together={('exercise', 'muscle')},
|
||||
),
|
||||
]
|
||||
@@ -8,7 +8,7 @@ class Muscle(models.Model):
|
||||
name = models.CharField(null=True, blank=True, max_length=64)
|
||||
|
||||
def __str__(self):
|
||||
return self.name
|
||||
return self.name or "Unnamed"
|
||||
|
||||
class ExerciseMuscle(models.Model):
|
||||
created_at = models.DateTimeField(auto_now_add=True)
|
||||
@@ -24,5 +24,8 @@ class ExerciseMuscle(models.Model):
|
||||
related_name='exercise_muscle_muscle'
|
||||
)
|
||||
|
||||
class Meta:
|
||||
unique_together = ('exercise', 'muscle')
|
||||
|
||||
def __str__(self):
|
||||
return self.exercise.name + " : " + self.muscle.name
|
||||
@@ -2,7 +2,6 @@ from django.shortcuts import render
|
||||
from .models import *
|
||||
from .serializers import *
|
||||
|
||||
from django.shortcuts import render
|
||||
from rest_framework.decorators import api_view
|
||||
from rest_framework.response import Response
|
||||
from rest_framework import status
|
||||
@@ -21,8 +20,8 @@ def all_muscles(request):
|
||||
data = cache.get('all_muscles')
|
||||
return Response(data=data, status=status.HTTP_200_OK)
|
||||
|
||||
users = Muscle.objects.all()
|
||||
serializer = MuscleSerializer(users, many=True)
|
||||
muscles = Muscle.objects.all()
|
||||
serializer = MuscleSerializer(muscles, many=True)
|
||||
data = serializer.data
|
||||
cache.set('all_muscles', data, timeout=None)
|
||||
return Response(data=data, status=status.HTTP_200_OK)
|
||||
@@ -8,4 +8,8 @@ class RegisteredUserAdmin(admin.ModelAdmin):
|
||||
list_display = ("first_name", "last_name", "nick_name", "has_nsfw_toggle", "jwt_token")
|
||||
|
||||
def jwt_token(self, obj):
|
||||
return Token.objects.get(user=obj.user).key
|
||||
try:
|
||||
token = Token.objects.get(user=obj.user)
|
||||
return token.key
|
||||
except Token.DoesNotExist:
|
||||
return "No token"
|
||||
@@ -17,4 +17,4 @@ class RegisteredUser(models.Model):
|
||||
)
|
||||
|
||||
def __str__(self):
|
||||
return self.first_name + " " + self.last_name + " : " + self.user.email
|
||||
return f"{self.first_name or ''} {self.last_name or ''} : {self.user.email}"
|
||||
@@ -2,6 +2,7 @@ from rest_framework import serializers
|
||||
from .models import RegisteredUser
|
||||
from django.contrib.auth.models import User
|
||||
from rest_framework.authtoken.models import Token
|
||||
from django.db import transaction
|
||||
|
||||
|
||||
class RegisteredUserSerializer(serializers.ModelSerializer):
|
||||
@@ -28,10 +29,11 @@ class CreateRegisteredUserThroughUserSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = User
|
||||
fields = ('id', 'password', 'email', 'first_name', 'last_name')
|
||||
write_only_fields = ('password',)
|
||||
extra_kwargs = {'password': {'write_only': True}}
|
||||
read_only_fields = ('id',)
|
||||
|
||||
def create(self, validated_data):
|
||||
with transaction.atomic():
|
||||
user = User.objects.create(
|
||||
username=validated_data['email'],
|
||||
email=validated_data['email'],
|
||||
@@ -43,7 +45,6 @@ class CreateRegisteredUserThroughUserSerializer(serializers.ModelSerializer):
|
||||
user.save()
|
||||
|
||||
reg_user = RegisteredUser.objects.create(
|
||||
phone_number=self.context.get("phone_number"),
|
||||
user=user,
|
||||
first_name=validated_data['first_name'],
|
||||
last_name=validated_data['last_name']
|
||||
|
||||
@@ -11,6 +11,7 @@ from rest_framework.permissions import IsAuthenticated
|
||||
from rest_framework.decorators import authentication_classes
|
||||
from rest_framework.decorators import permission_classes
|
||||
from django.shortcuts import get_object_or_404
|
||||
from django.db import transaction
|
||||
import json
|
||||
|
||||
|
||||
@@ -22,36 +23,48 @@ def all_registered_users(request):
|
||||
|
||||
|
||||
@api_view(['POST'])
|
||||
@authentication_classes([])
|
||||
def create_registered_user(request):
|
||||
_serializer = CreateRegisteredUserSerializer(data=request.data)
|
||||
|
||||
if not _serializer.is_valid():
|
||||
return Response(_serializer.errors, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
|
||||
return Response(_serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
email = request.data["email"]
|
||||
|
||||
if User.objects.filter(email=email):
|
||||
# Note: DB unique constraint on email is the real guard against race conditions
|
||||
if User.objects.filter(email=email).exists():
|
||||
return Response({"email": [ "Email in use" ] }, status=status.HTTP_409_CONFLICT)
|
||||
|
||||
serializer = CreateRegisteredUserThroughUserSerializer(data=request.data)
|
||||
if serializer.is_valid():
|
||||
with transaction.atomic():
|
||||
new_registered_user = serializer.save()
|
||||
serializer = RegisteredUserSerializer(new_registered_user, many=False)
|
||||
|
||||
token = Token.objects.get(user=new_registered_user.user).key
|
||||
token = get_object_or_404(Token, user=new_registered_user.user).key
|
||||
data = serializer.data
|
||||
data["token"] = token
|
||||
return Response(data,status=status.HTTP_201_CREATED)
|
||||
|
||||
return Response(serializer.errors, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
|
||||
@api_view(['POST'])
|
||||
@authentication_classes([])
|
||||
def login_registered_user(request):
|
||||
email = request.data["email"]
|
||||
password = request.data["password"]
|
||||
email = request.data.get("email", "").strip()
|
||||
password = request.data.get("password", "")
|
||||
|
||||
# Try authenticating with the input as username first, then by email lookup
|
||||
user = authenticate(username=email, password=password)
|
||||
if user is None:
|
||||
from django.contrib.auth.models import User
|
||||
try:
|
||||
user_obj = User.objects.get(email=email)
|
||||
user = authenticate(username=user_obj.username, password=password)
|
||||
except User.DoesNotExist:
|
||||
pass
|
||||
|
||||
if user is not None:
|
||||
registered_user = get_object_or_404(RegisteredUser, user=user)
|
||||
@@ -61,19 +74,19 @@ def login_registered_user(request):
|
||||
data["token"] = token
|
||||
return Response(data,status=status.HTTP_200_OK)
|
||||
else:
|
||||
return Response({}, status=status.HTTP_404_NOT_FOUND)
|
||||
return Response({"detail": "Invalid email or password"}, status=status.HTTP_401_UNAUTHORIZED)
|
||||
|
||||
|
||||
@api_view(['POST'])
|
||||
@authentication_classes([TokenAuthentication])
|
||||
@permission_classes([IsAuthenticated])
|
||||
def update_registered_user(request):
|
||||
registered_user = RegisteredUser.objects.get(user=request.user)
|
||||
registered_user = get_object_or_404(RegisteredUser, user=request.user)
|
||||
|
||||
email = request.POST.get("email")
|
||||
first_name = request.POST.get("first_name")
|
||||
last_name = request.POST.get("last_name")
|
||||
image = request.POST.get("image")
|
||||
email = request.data.get("email")
|
||||
first_name = request.data.get("first_name")
|
||||
last_name = request.data.get("last_name")
|
||||
image = request.data.get("image")
|
||||
|
||||
registered_user.first_name = first_name
|
||||
registered_user.last_name = last_name
|
||||
@@ -83,9 +96,9 @@ def update_registered_user(request):
|
||||
registered_user.save()
|
||||
registered_user.user.save()
|
||||
|
||||
registered_user = RegisteredUser.objects.get(user=request.user)
|
||||
registered_user = get_object_or_404(RegisteredUser, user=request.user)
|
||||
serializer = RegisteredUserSerializer(registered_user, many=False)
|
||||
token = Token.objects.get(user=registered_user.user).key
|
||||
token = get_object_or_404(Token, user=registered_user.user).key
|
||||
data = serializer.data
|
||||
data["token"] = token
|
||||
return Response(data,status=status.HTTP_200_OK)
|
||||
@@ -95,17 +108,17 @@ def update_registered_user(request):
|
||||
@authentication_classes([TokenAuthentication])
|
||||
@permission_classes([IsAuthenticated])
|
||||
def update_password(request):
|
||||
current_password = request.POST.get("current_password")
|
||||
new_password = request.POST.get("new_password")
|
||||
current_password = request.data.get("current_password")
|
||||
new_password = request.data.get("new_password")
|
||||
user = request.user
|
||||
|
||||
success = user.check_password(current_password)
|
||||
if success:
|
||||
user.set_password(new_password)
|
||||
user.save()
|
||||
registered_user = RegisteredUser.objects.get(user=request.user)
|
||||
registered_user = get_object_or_404(RegisteredUser, user=request.user)
|
||||
serializer = RegisteredUserSerializer(registered_user, many=False)
|
||||
token = Token.objects.get(user=registered_user.user).key
|
||||
token = get_object_or_404(Token, user=registered_user.user).key
|
||||
data = serializer.data
|
||||
data["token"] = token
|
||||
return Response(data,status=status.HTTP_200_OK)
|
||||
@@ -116,7 +129,7 @@ def update_password(request):
|
||||
@authentication_classes([TokenAuthentication])
|
||||
@permission_classes([IsAuthenticated])
|
||||
def refresh(request):
|
||||
registered_user = RegisteredUser.objects.get(user=request.user)
|
||||
registered_user = get_object_or_404(RegisteredUser, user=request.user)
|
||||
serializer = RegisteredUserSerializer(registered_user, many=False)
|
||||
token = get_object_or_404(Token, user=registered_user.user).key
|
||||
data = serializer.data
|
||||
|
||||
@@ -1,44 +1,46 @@
|
||||
amqp==5.1.1
|
||||
asgiref==3.7.2
|
||||
async-timeout==4.0.2
|
||||
billiard==4.1.0
|
||||
celery==5.3.1
|
||||
click==8.1.3
|
||||
click-didyoumean==0.3.0
|
||||
click-plugins==1.1.1
|
||||
click-repl==0.3.0
|
||||
defusedxml==0.7.1
|
||||
diff-match-patch==20230430
|
||||
Django==4.2.2
|
||||
django-debug-toolbar==4.1.0
|
||||
django-import-export==3.2.0
|
||||
django-push-notifications==3.0.0
|
||||
django-redis==5.3.0
|
||||
djangorestframework==3.14.0
|
||||
et-xmlfile==1.1.0
|
||||
gevent==22.10.1
|
||||
greenlet==1.1.3.post0
|
||||
gunicorn==20.1.0
|
||||
kombu==5.3.1
|
||||
MarkupPy==1.14
|
||||
odfpy==1.4.1
|
||||
openpyxl==3.1.2
|
||||
prompt-toolkit==3.0.38
|
||||
psycopg2==2.9.6
|
||||
python-dateutil==2.8.2
|
||||
pytz==2023.3
|
||||
PyYAML==6.0
|
||||
redis==4.6.0
|
||||
six==1.16.0
|
||||
sqlparse==0.4.4
|
||||
tablib==3.5.0
|
||||
typing_extensions==4.6.3
|
||||
tzdata==2023.3
|
||||
vine==5.0.0
|
||||
wcwidth==0.2.6
|
||||
whitenoise==6.4.0
|
||||
xlrd==2.0.1
|
||||
xlwt==1.3.0
|
||||
zope.event==5.0
|
||||
zope.interface==6.0
|
||||
amqp>=5.2.0
|
||||
asgiref>=3.8.0
|
||||
billiard>=4.2.0
|
||||
celery>=5.4.0,<6.0
|
||||
click>=8.1.7
|
||||
click-didyoumean>=0.3.1
|
||||
click-plugins>=1.1.1
|
||||
click-repl>=0.3.0
|
||||
defusedxml>=0.7.1
|
||||
diff-match-patch>=20230430
|
||||
Django>=5.2,<6.0
|
||||
django-debug-toolbar>=4.4.0
|
||||
django-import-export>=4.0
|
||||
django-push-notifications>=3.1.0
|
||||
django-redis>=5.4.0
|
||||
djangorestframework>=3.15.0
|
||||
et-xmlfile>=2.0.0
|
||||
gevent>=24.2.1
|
||||
greenlet>=3.0.0
|
||||
gunicorn>=23.0.0
|
||||
kombu>=5.4.0,<6.0
|
||||
MarkupPy>=1.14
|
||||
odfpy>=1.4.1
|
||||
openpyxl>=3.1.5
|
||||
prompt-toolkit>=3.0.43
|
||||
psycopg2>=2.9.9
|
||||
python-dateutil>=2.9.0
|
||||
pytz>=2024.1
|
||||
PyYAML>=6.0.1
|
||||
redis>=5.0.0,<8.0
|
||||
six>=1.16.0
|
||||
sqlparse>=0.5.0
|
||||
tablib>=3.6.0
|
||||
typing_extensions>=4.9.0
|
||||
tzdata>=2024.1
|
||||
vine>=5.1.0
|
||||
wcwidth>=0.2.13
|
||||
whitenoise>=6.7.0
|
||||
xlrd>=2.0.1
|
||||
xlwt>=1.3.0
|
||||
zope.event>=5.0
|
||||
zope.interface>=6.4
|
||||
python-ffmpeg-video-streaming>=0.1
|
||||
numpy>=1.26.0
|
||||
scikit-learn>=1.4.0
|
||||
django-cors-headers>=4.4.0
|
||||
|
||||
@@ -2,22 +2,26 @@ from django.shortcuts import render
|
||||
from exercise.models import Exercise
|
||||
from muscle.models import Muscle, ExerciseMuscle
|
||||
from equipment.models import Equipment, WorkoutEquipment
|
||||
from rest_framework.decorators import api_view
|
||||
from rest_framework.decorators import api_view, authentication_classes, permission_classes
|
||||
from rest_framework.authentication import TokenAuthentication
|
||||
from rest_framework.permissions import IsAuthenticated, IsAdminUser
|
||||
from rest_framework.response import Response
|
||||
from rest_framework import status
|
||||
from django.core.cache import cache
|
||||
|
||||
# Create your views here.
|
||||
@api_view(['GET'])
|
||||
@authentication_classes([TokenAuthentication])
|
||||
@permission_classes([IsAuthenticated])
|
||||
def sync_equipment(request):
|
||||
all_exercise = Exercise.objects.all()
|
||||
for exercise in all_exercise:
|
||||
all_equipment = exercise.equipment_required.split(',')
|
||||
all_equipment = (exercise.equipment_required or '').split(',')
|
||||
for equipment in all_equipment:
|
||||
if len(equipment) > 0:
|
||||
try:
|
||||
equipment_obj = Equipment.objects.get(name=equipment.lower())
|
||||
WorkoutEquipment.objects.create(exercise=exercise, equipment=equipment_obj).save()
|
||||
WorkoutEquipment.objects.create(exercise=exercise, equipment=equipment_obj)
|
||||
except Equipment.DoesNotExist:
|
||||
pass
|
||||
|
||||
@@ -25,22 +29,26 @@ def sync_equipment(request):
|
||||
return Response(status=status.HTTP_200_OK)
|
||||
|
||||
@api_view(['GET'])
|
||||
@authentication_classes([TokenAuthentication])
|
||||
@permission_classes([IsAuthenticated])
|
||||
def sync_muscle_groups(request):
|
||||
all_exercise = Exercise.objects.all()
|
||||
for exercise in all_exercise:
|
||||
all_muscle_groups = exercise.muscle_groups.split(',')
|
||||
all_muscle_groups = (exercise.muscle_groups or '').split(',')
|
||||
for muscle_group in all_muscle_groups:
|
||||
if len(muscle_group) > 0:
|
||||
try:
|
||||
muscle_obj = Muscle.objects.get(name=muscle_group.lower())
|
||||
ExerciseMuscle.objects.create(exercise=exercise, muscle=muscle_obj).save()
|
||||
except MuscleGroup.DoesNotExist:
|
||||
ExerciseMuscle.objects.create(exercise=exercise, muscle=muscle_obj)
|
||||
except Muscle.DoesNotExist:
|
||||
pass
|
||||
|
||||
|
||||
return Response(status=status.HTTP_200_OK)
|
||||
|
||||
@api_view(['GET'])
|
||||
@api_view(['POST'])
|
||||
@authentication_classes([TokenAuthentication])
|
||||
@permission_classes([IsAuthenticated, IsAdminUser])
|
||||
def clear_redis(request):
|
||||
cache.clear()
|
||||
return Response(status=status.HTTP_200_OK)
|
||||
@@ -15,7 +15,8 @@ class SupersetExerciseInline(admin.StackedInline):
|
||||
|
||||
@admin.register(Superset)
|
||||
class SupersetAdmin(ImportExportModelAdmin):
|
||||
list_display = ("name", "workout", "order", "rounds", "get_workout_id", "estimated_time",)
|
||||
search_fields = ['workout__id', 'name']
|
||||
list_display = ("name", "get_workout_id", "workout", "order", "rounds", "estimated_time",)
|
||||
ordering = ("order",)
|
||||
inlines = [
|
||||
SupersetExerciseInline,
|
||||
|
||||
@@ -12,15 +12,26 @@ def get_first_up_superset(workout):
|
||||
return first_up_superset
|
||||
|
||||
def get_first_up_superset_exercise(superset):
|
||||
try:
|
||||
exercise = Exercise.objects.get(name="First Up")
|
||||
except Exercise.DoesNotExist:
|
||||
exercise = None
|
||||
|
||||
if exercise is None:
|
||||
return None
|
||||
|
||||
first_up_superset_exercise = SupersetExercise()
|
||||
first_up_superset_exercise.exercise = Exercise.objects.get(name="First Up")
|
||||
first_up_superset_exercise.exercise = exercise
|
||||
first_up_superset_exercise.duration = 15
|
||||
first_up_superset_exercise.order = 1
|
||||
first_up_superset_exercise.exercise.side = "Chill"
|
||||
first_up_superset_exercise.exercise.joints_used = ""
|
||||
first_up_superset_exercise.exercise.movement_patterns = ""
|
||||
first_up_superset_exercise.exercise.equipment_required = ""
|
||||
first_up_superset_exercise.exercise.muscle_groups = ""
|
||||
# Build overrides as a dict instead of mutating the ORM object in memory
|
||||
first_up_superset_exercise._display_overrides = {
|
||||
'side': 'Chill',
|
||||
'joints_used': '',
|
||||
'movement_patterns': '',
|
||||
'equipment_required': '',
|
||||
'muscle_groups': '',
|
||||
}
|
||||
if superset is not None:
|
||||
first_up_superset_exercise.superset = superset
|
||||
return first_up_superset_exercise
|
||||
18
superset/migrations/0008_superset_rest_between_rounds.py
Normal file
18
superset/migrations/0008_superset_rest_between_rounds.py
Normal file
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 5.1.4 on 2026-02-20 22:55
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('superset', '0007_superset_estimated_time'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='superset',
|
||||
name='rest_between_rounds',
|
||||
field=models.IntegerField(default=45, help_text='Rest between rounds in seconds'),
|
||||
),
|
||||
]
|
||||
25
superset/migrations/0009_fix_related_names_and_nullable.py
Normal file
25
superset/migrations/0009_fix_related_names_and_nullable.py
Normal file
@@ -0,0 +1,25 @@
|
||||
# Generated by Django 5.1.4 on 2026-02-21 05:32
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('exercise', '0011_fix_related_names_and_nullable'),
|
||||
('superset', '0008_superset_rest_between_rounds'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='supersetexercise',
|
||||
name='exercise',
|
||||
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='exercise_supersets', to='exercise.exercise'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='supersetexercise',
|
||||
name='superset',
|
||||
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='superset_exercises', to='superset.superset'),
|
||||
),
|
||||
]
|
||||
@@ -14,12 +14,14 @@ class Superset(models.Model):
|
||||
related_name='superset_workout'
|
||||
)
|
||||
|
||||
rounds = models.IntegerField(max_length=3, blank=False, null=False)
|
||||
order = models.IntegerField(max_length=3, blank=False, null=False)
|
||||
estimated_time = models.FloatField(max_length=255, blank=True, null=True)
|
||||
rounds = models.IntegerField(blank=False, null=False)
|
||||
order = models.IntegerField(blank=False, null=False)
|
||||
estimated_time = models.FloatField(blank=True, null=True)
|
||||
rest_between_rounds = models.IntegerField(default=45, help_text='Rest between rounds in seconds')
|
||||
|
||||
def __str__(self):
|
||||
return self.name #+ " : " + self.description + " | by: " + self.registered_user.nick_name
|
||||
name = " -- " if self.name is None else self.name
|
||||
return name #+ " : " + self.description + " | by: " + self.registered_user.nick_name
|
||||
|
||||
class SupersetExercise(models.Model):
|
||||
created_at = models.DateTimeField(auto_now_add=True)
|
||||
@@ -28,19 +30,19 @@ class SupersetExercise(models.Model):
|
||||
exercise = models.ForeignKey(
|
||||
Exercise,
|
||||
on_delete=models.CASCADE,
|
||||
related_name='superset_exercise_exercise'
|
||||
related_name='exercise_supersets'
|
||||
)
|
||||
|
||||
superset = models.ForeignKey(
|
||||
Superset,
|
||||
on_delete=models.CASCADE,
|
||||
related_name='superset_exercise_exercise'
|
||||
related_name='superset_exercises'
|
||||
)
|
||||
|
||||
weight = models.IntegerField(null=True, blank=True, max_length=4)
|
||||
reps = models.IntegerField(null=True, blank=True, max_length=4)
|
||||
duration = models.IntegerField(null=True, blank=True, max_length=4)
|
||||
order = models.IntegerField(max_length=3, blank=False, null=False)
|
||||
weight = models.IntegerField(null=True, blank=True)
|
||||
reps = models.IntegerField(null=True, blank=True)
|
||||
duration = models.IntegerField(null=True, blank=True)
|
||||
order = models.IntegerField(blank=False, null=False)
|
||||
|
||||
def __str__(self):
|
||||
return self.superset.workout.name + " -- " + self.exercise.name
|
||||
|
||||
@@ -2,7 +2,6 @@ from rest_framework import serializers
|
||||
from .models import *
|
||||
from exercise.models import Exercise
|
||||
from exercise.serializers import ExerciseSerializer
|
||||
import uuid
|
||||
|
||||
class SupersetExerciseSerializer(serializers.ModelSerializer):
|
||||
exercise = serializers.SerializerMethodField()
|
||||
@@ -13,13 +12,26 @@ class SupersetExerciseSerializer(serializers.ModelSerializer):
|
||||
fields = '__all__'
|
||||
|
||||
def get_exercise(self, obj):
|
||||
data = ExerciseSerializer(obj.exercise, many=False).data
|
||||
return data
|
||||
try:
|
||||
exercise = obj.exercise
|
||||
except (Exercise.DoesNotExist, AttributeError):
|
||||
return None
|
||||
if exercise is None:
|
||||
return None
|
||||
return ExerciseSerializer(exercise, many=False).data
|
||||
|
||||
def get_unique_id(self, obj):
|
||||
return str(uuid.uuid4())
|
||||
return f"{obj.pk}-{obj.superset_id}" if hasattr(obj, 'superset_id') else str(obj.pk)
|
||||
|
||||
class SupersetSerializer(serializers.ModelSerializer):
|
||||
"""Serializer for Superset with nested exercises.
|
||||
|
||||
For optimal performance, callers should prefetch related data:
|
||||
Superset.objects.prefetch_related(
|
||||
'superset_exercises__exercise__exercise_muscle_exercise__muscle',
|
||||
'superset_exercises__exercise__workout_exercise_workout__equipment',
|
||||
)
|
||||
"""
|
||||
exercises = serializers.SerializerMethodField()
|
||||
|
||||
class Meta:
|
||||
@@ -27,6 +39,8 @@ class SupersetSerializer(serializers.ModelSerializer):
|
||||
fields = '__all__'
|
||||
|
||||
def get_exercises(self, obj):
|
||||
objs = SupersetExercise.objects.filter(superset=obj).order_by('order')
|
||||
data = SupersetExerciseSerializer(objs, many=True).data
|
||||
return data
|
||||
if obj.pk is None:
|
||||
return []
|
||||
# Use prefetched data if available via superset_exercises related manager
|
||||
objs = obj.superset_exercises.all().order_by('order')
|
||||
return SupersetExerciseSerializer(objs, many=True).data
|
||||
|
||||
43
supervisord.conf
Normal file
43
supervisord.conf
Normal file
@@ -0,0 +1,43 @@
|
||||
[supervisord]
|
||||
nodaemon=true
|
||||
logfile=/dev/stdout
|
||||
logfile_maxbytes=0
|
||||
|
||||
[supervisorctl]
|
||||
serverurl=unix:///tmp/supervisor.sock
|
||||
|
||||
[unix_http_server]
|
||||
file=/tmp/supervisor.sock
|
||||
|
||||
[program:django]
|
||||
# To run migrations manually: docker compose exec web python manage.py migrate
|
||||
command=gunicorn werkout_api.wsgi:application --bind 0.0.0.0:8000 --workers 3
|
||||
directory=/code
|
||||
autostart=true
|
||||
autorestart=true
|
||||
startsecs=10
|
||||
stdout_logfile=/dev/stdout
|
||||
stdout_logfile_maxbytes=0
|
||||
stderr_logfile=/dev/stderr
|
||||
stderr_logfile_maxbytes=0
|
||||
|
||||
[program:nextjs]
|
||||
command=npx next start -p 3000 -H 0.0.0.0
|
||||
directory=/code/werkout-frontend
|
||||
autostart=true
|
||||
autorestart=true
|
||||
stdout_logfile=/dev/stdout
|
||||
stdout_logfile_maxbytes=0
|
||||
stderr_logfile=/dev/stderr
|
||||
stderr_logfile_maxbytes=0
|
||||
|
||||
[program:celery]
|
||||
command=celery -A werkout_api worker -l info
|
||||
directory=/code
|
||||
autostart=true
|
||||
autorestart=true
|
||||
startsecs=10
|
||||
stdout_logfile=/dev/stdout
|
||||
stdout_logfile_maxbytes=0
|
||||
stderr_logfile=/dev/stderr
|
||||
stderr_logfile_maxbytes=0
|
||||
@@ -21,8 +21,20 @@ class Video(models.Model):
|
||||
def __str__(self):
|
||||
return str(self.video_file)
|
||||
|
||||
def save(self, **kwargs):
|
||||
super(Video, self).save(**kwargs)
|
||||
def save(self, *args, **kwargs):
|
||||
is_new = self.pk is None
|
||||
if self.pk:
|
||||
try:
|
||||
old = type(self).objects.get(pk=self.pk)
|
||||
video_changed = old.video_file != self.video_file
|
||||
except type(self).DoesNotExist:
|
||||
video_changed = True
|
||||
else:
|
||||
video_changed = bool(self.video_file)
|
||||
|
||||
super(Video, self).save(*args, **kwargs)
|
||||
|
||||
if self.video_file and (is_new or video_changed):
|
||||
filename = self.video_file.name
|
||||
create_hls_tasks.delay(filename)
|
||||
|
||||
@@ -31,10 +43,22 @@ class Video(models.Model):
|
||||
class ExerciseVideo(models.Model):
|
||||
created_at = models.DateTimeField(auto_now_add=True)
|
||||
updated_at = models.DateTimeField(auto_now=True)
|
||||
video_file = models.FileField(upload_to='exercise_videos/', null=True, verbose_name="")
|
||||
video_file = models.FileField(upload_to='videos/', null=True, verbose_name="")
|
||||
|
||||
def save(self, **kwargs):
|
||||
super(ExerciseVideo, self).save(**kwargs)
|
||||
def save(self, *args, **kwargs):
|
||||
is_new = self.pk is None
|
||||
if self.pk:
|
||||
try:
|
||||
old = type(self).objects.get(pk=self.pk)
|
||||
video_changed = old.video_file != self.video_file
|
||||
except type(self).DoesNotExist:
|
||||
video_changed = True
|
||||
else:
|
||||
video_changed = bool(self.video_file)
|
||||
|
||||
super(ExerciseVideo, self).save(*args, **kwargs)
|
||||
|
||||
if self.video_file and (is_new or video_changed):
|
||||
filename = self.video_file.name
|
||||
create_hls_tasks.delay(filename)
|
||||
|
||||
|
||||
@@ -10,4 +10,6 @@ class VideoSerializer(serializers.ModelSerializer):
|
||||
fields = ('video_file', 'gender_value',)
|
||||
|
||||
def get_video_file(self, obj):
|
||||
if not obj.video_file:
|
||||
return None
|
||||
return '/media/' + obj.video_file.name + '_720p.m3u8'
|
||||
@@ -7,7 +7,8 @@ from django.core.files.storage import default_storage
|
||||
|
||||
@shared_task()
|
||||
def create_hls_tasks(filename):
|
||||
end_location = str(settings.MEDIA_ROOT) + "/" + str(filename) +'.m3u8'
|
||||
base_name = os.path.splitext(str(filename))[0]
|
||||
end_location = str(settings.MEDIA_ROOT) + "/" + base_name + '.m3u8'
|
||||
if not default_storage.exists(end_location):
|
||||
media_location = str(settings.MEDIA_ROOT) + "/" + str(filename)
|
||||
video = ffmpeg_streaming.input(media_location)
|
||||
|
||||
@@ -3,7 +3,6 @@ from .serializers import *
|
||||
|
||||
from django.shortcuts import render
|
||||
from rest_framework.decorators import api_view
|
||||
from rest_framework.decorators import api_view
|
||||
from rest_framework.response import Response
|
||||
from rest_framework import status
|
||||
from django.contrib.auth.models import User
|
||||
@@ -48,15 +47,31 @@ def nsfw_videos(request):
|
||||
@permission_classes([IsAuthenticated])
|
||||
def hls_videos(request):
|
||||
video_url = request.GET.get('video_name', '')
|
||||
type = request.GET.get('video_type', '')
|
||||
video_type = request.GET.get('video_type', '')
|
||||
|
||||
end_location = str(settings.MEDIA_ROOT) + '/hls/'+ video_url +'.m3u8'
|
||||
# Sanitize inputs to prevent path traversal
|
||||
video_url = os.path.basename(video_url)
|
||||
video_type = os.path.basename(video_type)
|
||||
|
||||
if not video_url or not video_type:
|
||||
return Response({"error": "video_name and video_type are required"}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
end_location = os.path.join(str(settings.MEDIA_ROOT), 'hls', video_url + '.m3u8')
|
||||
end_file_name = '/media/hls/' + video_url + '_720p.m3u8'
|
||||
|
||||
# Verify the resolved path is within MEDIA_ROOT
|
||||
if not os.path.realpath(end_location).startswith(os.path.realpath(str(settings.MEDIA_ROOT))):
|
||||
return Response({"error": "Invalid path"}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
if default_storage.exists(end_location):
|
||||
return JsonResponse({'file_location': end_file_name})
|
||||
|
||||
media_location = os.path.join(settings.MEDIA_ROOT) + "/" + type + "/" + video_url
|
||||
media_location = os.path.join(str(settings.MEDIA_ROOT), video_type, video_url)
|
||||
|
||||
# Verify media_location is within MEDIA_ROOT
|
||||
if not os.path.realpath(media_location).startswith(os.path.realpath(str(settings.MEDIA_ROOT))):
|
||||
return Response({"error": "Invalid path"}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
video = ffmpeg_streaming.input(media_location)
|
||||
|
||||
hls = video.hls(Formats.h264())
|
||||
@@ -67,9 +82,17 @@ def hls_videos(request):
|
||||
# {{url}}/videos/hls_video?video_name=Recover_24.mp4&video_type=videos
|
||||
return JsonResponse({'file_location': end_file_name})
|
||||
|
||||
@api_view(['GET'])
|
||||
@api_view(['POST'])
|
||||
@authentication_classes([TokenAuthentication])
|
||||
@permission_classes([IsAuthenticated])
|
||||
def create_hls(request):
|
||||
create_hls_tasks.delay()
|
||||
filename = request.data.get('filename', '')
|
||||
if not filename:
|
||||
return Response({"error": "filename is required"}, status=status.HTTP_400_BAD_REQUEST)
|
||||
# Sanitize to prevent path traversal
|
||||
filename = os.path.basename(filename)
|
||||
full_path = os.path.join(str(settings.MEDIA_ROOT), 'videos', filename)
|
||||
if not os.path.realpath(full_path).startswith(os.path.realpath(str(settings.MEDIA_ROOT))):
|
||||
return Response({"error": "Invalid path"}, status=status.HTTP_400_BAD_REQUEST)
|
||||
create_hls_tasks.delay(os.path.join('videos', filename))
|
||||
return JsonResponse({'running': "running"})
|
||||
246
werkout-frontend/app/dashboard/page.tsx
Normal file
246
werkout-frontend/app/dashboard/page.tsx
Normal file
@@ -0,0 +1,246 @@
|
||||
"use client";
|
||||
|
||||
import { useEffect, useState, useCallback } from "react";
|
||||
import { useRouter } from "next/navigation";
|
||||
import { AuthGuard } from "@/components/auth/AuthGuard";
|
||||
import { Navbar } from "@/components/layout/Navbar";
|
||||
import { BottomNav } from "@/components/layout/BottomNav";
|
||||
import { WeeklyPlanGrid } from "@/components/plans/WeeklyPlanGrid";
|
||||
import { WeekPicker } from "@/components/plans/WeekPicker";
|
||||
import { Button } from "@/components/ui/Button";
|
||||
import { Spinner } from "@/components/ui/Spinner";
|
||||
import { api } from "@/lib/api";
|
||||
import type { GeneratedWeeklyPlan, WeeklyPreview } from "@/lib/types";
|
||||
|
||||
function getCurrentMonday(): string {
|
||||
const now = new Date();
|
||||
const day = now.getDay();
|
||||
const diff = day === 0 ? -6 : 1 - day;
|
||||
const monday = new Date(now);
|
||||
monday.setDate(now.getDate() + diff);
|
||||
const yyyy = monday.getFullYear();
|
||||
const mm = String(monday.getMonth() + 1).padStart(2, "0");
|
||||
const dd = String(monday.getDate()).padStart(2, "0");
|
||||
return `${yyyy}-${mm}-${dd}`;
|
||||
}
|
||||
|
||||
export default function DashboardPage() {
|
||||
const router = useRouter();
|
||||
const [selectedMonday, setSelectedMonday] = useState(getCurrentMonday);
|
||||
const [plans, setPlans] = useState<GeneratedWeeklyPlan[]>([]);
|
||||
const [preview, setPreview] = useState<WeeklyPreview | null>(null);
|
||||
const [loading, setLoading] = useState(true);
|
||||
const [generating, setGenerating] = useState(false);
|
||||
const [saving, setSaving] = useState(false);
|
||||
const [confirming, setConfirming] = useState(false);
|
||||
const [error, setError] = useState("");
|
||||
|
||||
const fetchPlans = useCallback(async () => {
|
||||
try {
|
||||
try {
|
||||
const prefs = await api.getPreferences();
|
||||
const hasPrefs =
|
||||
prefs.available_equipment.length > 0 ||
|
||||
prefs.preferred_workout_types.length > 0 ||
|
||||
prefs.target_muscle_groups.length > 0;
|
||||
if (!hasPrefs) {
|
||||
router.replace("/onboarding");
|
||||
return;
|
||||
}
|
||||
} catch {
|
||||
router.replace("/onboarding");
|
||||
return;
|
||||
}
|
||||
|
||||
const data = await api.getPlans();
|
||||
setPlans(data);
|
||||
} catch (err) {
|
||||
console.error("Failed to fetch plans:", err);
|
||||
} finally {
|
||||
setLoading(false);
|
||||
}
|
||||
}, [router]);
|
||||
|
||||
useEffect(() => {
|
||||
fetchPlans();
|
||||
}, [fetchPlans]);
|
||||
|
||||
// Clear preview when week changes
|
||||
useEffect(() => {
|
||||
setPreview(null);
|
||||
}, [selectedMonday]);
|
||||
|
||||
const savedPlan = plans.find((p) => p.week_start_date === selectedMonday);
|
||||
|
||||
const handleGenerate = async () => {
|
||||
setGenerating(true);
|
||||
setError("");
|
||||
try {
|
||||
const data = await api.previewPlan(selectedMonday);
|
||||
setPreview(data);
|
||||
} catch (err) {
|
||||
const msg =
|
||||
err instanceof Error ? err.message : "Failed to generate preview";
|
||||
setError(msg);
|
||||
console.error("Failed to generate preview:", err);
|
||||
} finally {
|
||||
setGenerating(false);
|
||||
}
|
||||
};
|
||||
|
||||
const handleConfirm = async () => {
|
||||
if (!savedPlan) return;
|
||||
setConfirming(true);
|
||||
setError("");
|
||||
try {
|
||||
await api.confirmPlan(savedPlan.id);
|
||||
await fetchPlans();
|
||||
} catch (err) {
|
||||
const msg = err instanceof Error ? err.message : "Failed to confirm plan";
|
||||
setError(msg);
|
||||
} finally {
|
||||
setConfirming(false);
|
||||
}
|
||||
};
|
||||
|
||||
const handleSave = async () => {
|
||||
if (!preview) return;
|
||||
setSaving(true);
|
||||
setError("");
|
||||
try {
|
||||
await api.savePlan(preview);
|
||||
setPreview(null);
|
||||
await fetchPlans();
|
||||
} catch (err) {
|
||||
const msg = err instanceof Error ? err.message : "Failed to save plan";
|
||||
setError(msg);
|
||||
console.error("Failed to save plan:", err);
|
||||
} finally {
|
||||
setSaving(false);
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<AuthGuard>
|
||||
<Navbar />
|
||||
<BottomNav />
|
||||
<main className="pt-20 pb-20 px-4 max-w-5xl mx-auto">
|
||||
<div className="flex items-center justify-between mb-6">
|
||||
<h1 className="text-2xl font-bold text-zinc-100">Dashboard</h1>
|
||||
<WeekPicker
|
||||
selectedMonday={selectedMonday}
|
||||
onChange={setSelectedMonday}
|
||||
/>
|
||||
</div>
|
||||
|
||||
{error && (
|
||||
<div className="mb-4 p-3 rounded-lg bg-red-500/10 border border-red-500/20 text-red-400 text-sm">
|
||||
{error}
|
||||
</div>
|
||||
)}
|
||||
|
||||
{preview?.warnings && preview.warnings.length > 0 && (
|
||||
<div className="mb-4 p-3 rounded-lg bg-yellow-500/10 border border-yellow-500/20 text-yellow-300 text-sm">
|
||||
<div className="font-medium mb-1">Heads up</div>
|
||||
<ul className="list-disc list-inside space-y-0.5">
|
||||
{preview.warnings.map((w, i) => (
|
||||
<li key={i}>{w}</li>
|
||||
))}
|
||||
</ul>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{loading ? (
|
||||
<div className="flex items-center justify-center py-20">
|
||||
<Spinner size="lg" />
|
||||
</div>
|
||||
) : preview ? (
|
||||
/* ===== Preview mode ===== */
|
||||
<div>
|
||||
<div className="flex items-center justify-between mb-6">
|
||||
<h2 className="text-lg font-semibold text-zinc-200">
|
||||
Preview
|
||||
</h2>
|
||||
<div className="flex gap-2">
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="sm"
|
||||
onClick={() => setPreview(null)}
|
||||
>
|
||||
Discard
|
||||
</Button>
|
||||
<Button
|
||||
variant="secondary"
|
||||
size="sm"
|
||||
loading={generating}
|
||||
onClick={handleGenerate}
|
||||
>
|
||||
Regenerate
|
||||
</Button>
|
||||
<Button
|
||||
variant="primary"
|
||||
size="sm"
|
||||
loading={saving}
|
||||
onClick={handleSave}
|
||||
>
|
||||
Save Plan
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
<WeeklyPlanGrid
|
||||
preview={preview}
|
||||
onPreviewChange={setPreview}
|
||||
/>
|
||||
</div>
|
||||
) : savedPlan ? (
|
||||
/* ===== Saved plan mode ===== */
|
||||
<div>
|
||||
<div className="flex items-center justify-between mb-6">
|
||||
<h2 className="text-lg font-semibold text-zinc-200">
|
||||
This Week's Plan
|
||||
</h2>
|
||||
<div className="flex gap-2">
|
||||
{savedPlan.generated_workouts.some(
|
||||
(w) => !w.is_rest_day && w.status === "pending"
|
||||
) && (
|
||||
<Button
|
||||
variant="primary"
|
||||
size="sm"
|
||||
loading={confirming}
|
||||
onClick={handleConfirm}
|
||||
>
|
||||
Save to Calendar
|
||||
</Button>
|
||||
)}
|
||||
<Button
|
||||
variant="secondary"
|
||||
size="sm"
|
||||
loading={generating}
|
||||
onClick={handleGenerate}
|
||||
>
|
||||
Regenerate
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
<WeeklyPlanGrid plan={savedPlan} onUpdate={fetchPlans} />
|
||||
</div>
|
||||
) : (
|
||||
/* ===== No plan ===== */
|
||||
<div className="flex flex-col items-center justify-center py-20 gap-6">
|
||||
<p className="text-zinc-400 text-lg text-center">
|
||||
No plan for this week yet. Let's get started!
|
||||
</p>
|
||||
<Button
|
||||
variant="primary"
|
||||
size="lg"
|
||||
loading={generating}
|
||||
onClick={handleGenerate}
|
||||
>
|
||||
Generate Plan
|
||||
</Button>
|
||||
</div>
|
||||
)}
|
||||
</main>
|
||||
</AuthGuard>
|
||||
);
|
||||
}
|
||||
44
werkout-frontend/app/globals.css
Normal file
44
werkout-frontend/app/globals.css
Normal file
@@ -0,0 +1,44 @@
|
||||
@tailwind base;
|
||||
@tailwind components;
|
||||
@tailwind utilities;
|
||||
|
||||
/* Range slider custom styling */
|
||||
input[type="range"].range-slider {
|
||||
-webkit-appearance: none;
|
||||
appearance: none;
|
||||
accent-color: #39ff14;
|
||||
}
|
||||
|
||||
input[type="range"].range-slider::-webkit-slider-thumb {
|
||||
-webkit-appearance: none;
|
||||
appearance: none;
|
||||
width: 18px;
|
||||
height: 18px;
|
||||
border-radius: 50%;
|
||||
background: #39ff14;
|
||||
cursor: pointer;
|
||||
border: 2px solid #09090b;
|
||||
box-shadow: 0 0 6px rgba(57, 255, 20, 0.4);
|
||||
}
|
||||
|
||||
input[type="range"].range-slider::-moz-range-thumb {
|
||||
width: 18px;
|
||||
height: 18px;
|
||||
border-radius: 50%;
|
||||
background: #39ff14;
|
||||
cursor: pointer;
|
||||
border: 2px solid #09090b;
|
||||
box-shadow: 0 0 6px rgba(57, 255, 20, 0.4);
|
||||
}
|
||||
|
||||
input[type="range"].range-slider::-webkit-slider-runnable-track {
|
||||
height: 8px;
|
||||
border-radius: 9999px;
|
||||
background: #3f3f46;
|
||||
}
|
||||
|
||||
input[type="range"].range-slider::-moz-range-track {
|
||||
height: 8px;
|
||||
border-radius: 9999px;
|
||||
background: #3f3f46;
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user