feat: add Django web app, CloudKit sync, dashboard, and game_datetime_utc export
Adds the full Django application layer on top of sportstime_parser: - core: Sport, Team, Stadium, Game models with aliases and league structure - scraper: orchestration engine, adapter, job management, Celery tasks - cloudkit: CloudKit sync client, sync state tracking, sync jobs - dashboard: staff dashboard for monitoring scrapers, sync, review queue - notifications: email reports for scrape/sync results - Docker setup for deployment (Dockerfile, docker-compose, entrypoint) Game exports now use game_datetime_utc (ISO 8601 UTC) instead of venue-local date+time strings, matching the canonical format used by the iOS app. Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
644
dashboard/views.py
Normal file
644
dashboard/views.py
Normal file
@@ -0,0 +1,644 @@
|
||||
import io
|
||||
import json
|
||||
import zipfile
|
||||
from datetime import timedelta, timezone as dt_timezone
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from django.shortcuts import render, redirect, get_object_or_404
|
||||
from django.contrib.admin.views.decorators import staff_member_required
|
||||
from django.contrib import messages
|
||||
from django.db.models import Count, Q
|
||||
from django.http import JsonResponse, HttpResponse
|
||||
from django.utils import timezone
|
||||
|
||||
from core.models import Sport, Team, Stadium, Game, Conference, Division, TeamAlias, StadiumAlias
|
||||
from scraper.models import ScraperConfig, ScrapeJob, ManualReviewItem
|
||||
from cloudkit.models import CloudKitConfiguration, CloudKitSyncState, CloudKitSyncJob
|
||||
|
||||
|
||||
@staff_member_required
|
||||
def index(request):
|
||||
"""Main dashboard overview."""
|
||||
# Get counts
|
||||
context = {
|
||||
'title': 'Dashboard',
|
||||
'sports_count': Sport.objects.filter(is_active=True).count(),
|
||||
'teams_count': Team.objects.count(),
|
||||
'stadiums_count': Stadium.objects.count(),
|
||||
'games_count': Game.objects.count(),
|
||||
# Recent activity
|
||||
'recent_jobs': ScrapeJob.objects.select_related('config__sport')[:5],
|
||||
'recent_syncs': CloudKitSyncJob.objects.select_related('configuration')[:5],
|
||||
'pending_reviews': ManualReviewItem.objects.filter(status='pending').count(),
|
||||
# Sport summaries
|
||||
'sport_stats': get_sport_stats(),
|
||||
}
|
||||
return render(request, 'dashboard/index.html', context)
|
||||
|
||||
|
||||
@staff_member_required
|
||||
def stats(request):
|
||||
"""Detailed statistics view."""
|
||||
context = {
|
||||
'title': 'Statistics',
|
||||
'sport_stats': get_sport_stats(),
|
||||
'game_stats': get_game_stats(),
|
||||
'sync_stats': get_sync_stats(),
|
||||
}
|
||||
return render(request, 'dashboard/stats.html', context)
|
||||
|
||||
|
||||
@staff_member_required
|
||||
def scraper_status(request):
|
||||
"""Scraper status and controls."""
|
||||
configs = ScraperConfig.objects.select_related('sport').order_by('-season', 'sport')
|
||||
recent_jobs = ScrapeJob.objects.select_related('config__sport').order_by('-created_at')[:20]
|
||||
|
||||
context = {
|
||||
'title': 'Scraper Status',
|
||||
'configs': configs,
|
||||
'recent_jobs': recent_jobs,
|
||||
'running_jobs': ScrapeJob.objects.filter(status='running').count(),
|
||||
'pending_jobs': ScrapeJob.objects.filter(status='pending').count(),
|
||||
}
|
||||
return render(request, 'dashboard/scraper_status.html', context)
|
||||
|
||||
|
||||
@staff_member_required
|
||||
def sync_status(request):
|
||||
"""CloudKit sync status."""
|
||||
from core.models import Game, Team, Stadium
|
||||
|
||||
# Get all configs for the dropdown
|
||||
all_configs = CloudKitConfiguration.objects.all()
|
||||
|
||||
# Get selected config from query param, or default to active
|
||||
selected_config_id = request.GET.get('config')
|
||||
if selected_config_id:
|
||||
config = CloudKitConfiguration.objects.filter(id=selected_config_id).first()
|
||||
else:
|
||||
config = CloudKitConfiguration.objects.filter(is_active=True).first()
|
||||
|
||||
# Recent sync jobs (filtered by selected config if any)
|
||||
recent_syncs = CloudKitSyncJob.objects.select_related('configuration').order_by('-created_at')
|
||||
if config:
|
||||
recent_syncs = recent_syncs.filter(configuration=config)
|
||||
running_syncs = recent_syncs.filter(status='running').count()
|
||||
recent_syncs = recent_syncs[:10]
|
||||
|
||||
# Record counts
|
||||
teams_count = Team.objects.count()
|
||||
stadiums_count = Stadium.objects.count()
|
||||
games_count = Game.objects.count()
|
||||
total_records = teams_count + stadiums_count + games_count
|
||||
|
||||
context = {
|
||||
'title': 'Sync Status',
|
||||
'config': config,
|
||||
'all_configs': all_configs,
|
||||
'recent_syncs': recent_syncs,
|
||||
'running_syncs': running_syncs,
|
||||
'total_records': total_records,
|
||||
}
|
||||
return render(request, 'dashboard/sync_status.html', context)
|
||||
|
||||
|
||||
@staff_member_required
|
||||
def review_queue(request):
|
||||
"""Manual review queue."""
|
||||
pending = ManualReviewItem.objects.filter(
|
||||
status='pending'
|
||||
).select_related('sport', 'job').order_by('-confidence', '-created_at')
|
||||
|
||||
# Group by sport and type
|
||||
review_summary = ManualReviewItem.objects.filter(
|
||||
status='pending'
|
||||
).values('sport__short_name', 'item_type').annotate(count=Count('id'))
|
||||
|
||||
context = {
|
||||
'title': 'Review Queue',
|
||||
'pending_items': pending[:50],
|
||||
'review_summary': review_summary,
|
||||
'total_pending': pending.count(),
|
||||
}
|
||||
return render(request, 'dashboard/review_queue.html', context)
|
||||
|
||||
|
||||
@staff_member_required
|
||||
def run_scraper(request, sport_code, season):
|
||||
"""Trigger a scraper job."""
|
||||
if request.method == 'POST':
|
||||
from scraper.tasks import run_scraper_task
|
||||
|
||||
config = get_object_or_404(ScraperConfig, sport__code=sport_code, season=season)
|
||||
run_scraper_task.delay(config.id)
|
||||
messages.success(request, f'Started scraper for {config}')
|
||||
|
||||
return redirect('dashboard:scraper_status')
|
||||
|
||||
|
||||
@staff_member_required
|
||||
def run_all_scrapers(request):
|
||||
"""Trigger all enabled scraper jobs."""
|
||||
if request.method == 'POST':
|
||||
from scraper.tasks import run_scraper_task
|
||||
|
||||
configs = ScraperConfig.objects.filter(is_enabled=True)
|
||||
count = 0
|
||||
for config in configs:
|
||||
run_scraper_task.delay(config.id)
|
||||
count += 1
|
||||
|
||||
if count > 0:
|
||||
messages.success(request, f'Started {count} scraper jobs')
|
||||
else:
|
||||
messages.warning(request, 'No enabled scraper configurations')
|
||||
|
||||
return redirect('dashboard:scraper_status')
|
||||
|
||||
|
||||
@staff_member_required
|
||||
def run_sync(request):
|
||||
"""Trigger a CloudKit sync."""
|
||||
if request.method == 'POST':
|
||||
from cloudkit.tasks import run_cloudkit_sync
|
||||
|
||||
# Get config from form or fall back to active config
|
||||
config_id = request.POST.get('config_id')
|
||||
if config_id:
|
||||
config = CloudKitConfiguration.objects.filter(id=config_id).first()
|
||||
else:
|
||||
config = CloudKitConfiguration.objects.filter(is_active=True).first()
|
||||
|
||||
if config:
|
||||
# Get selected record types
|
||||
record_types = request.POST.getlist('record_types')
|
||||
|
||||
if not record_types or 'all' in record_types:
|
||||
# Sync all — no record_type filter
|
||||
run_cloudkit_sync.delay(config.id)
|
||||
messages.success(request, f'Started full CloudKit sync to {config.name} ({config.environment})')
|
||||
else:
|
||||
# Queue a sync job per selected record type
|
||||
for rt in record_types:
|
||||
run_cloudkit_sync.delay(config.id, record_type=rt)
|
||||
type_list = ', '.join(record_types)
|
||||
messages.success(request, f'Started CloudKit sync for {type_list} to {config.name} ({config.environment})')
|
||||
|
||||
return redirect(f"{request.path.replace('/run-sync/', '/sync-status/')}?config={config.id}")
|
||||
else:
|
||||
messages.error(request, 'No CloudKit configuration found')
|
||||
|
||||
return redirect('dashboard:sync_status')
|
||||
|
||||
|
||||
@staff_member_required
|
||||
def sync_progress_api(request, job_id):
|
||||
"""API endpoint for sync job progress."""
|
||||
try:
|
||||
job = CloudKitSyncJob.objects.get(id=job_id)
|
||||
return JsonResponse(job.get_progress())
|
||||
except CloudKitSyncJob.DoesNotExist:
|
||||
return JsonResponse({'error': 'Job not found'}, status=404)
|
||||
|
||||
|
||||
@staff_member_required
|
||||
def running_syncs_api(request):
|
||||
"""API endpoint to check for running sync jobs."""
|
||||
running_jobs = CloudKitSyncJob.objects.filter(status='running').values(
|
||||
'id', 'configuration_id'
|
||||
)
|
||||
return JsonResponse({'running': list(running_jobs)})
|
||||
|
||||
|
||||
def get_sport_stats():
|
||||
"""Get stats per sport."""
|
||||
stats = []
|
||||
for sport in Sport.objects.filter(is_active=True):
|
||||
stats.append({
|
||||
'sport': sport,
|
||||
'teams': sport.teams.count(),
|
||||
'stadiums': sport.stadiums.count(),
|
||||
'games': sport.games.count(),
|
||||
'pending_reviews': sport.review_items.filter(status='pending').count(),
|
||||
})
|
||||
return stats
|
||||
|
||||
|
||||
def get_game_stats():
|
||||
"""Get game statistics."""
|
||||
now = timezone.now()
|
||||
return {
|
||||
'total': Game.objects.count(),
|
||||
'scheduled': Game.objects.filter(status='scheduled').count(),
|
||||
'final': Game.objects.filter(status='final').count(),
|
||||
'today': Game.objects.filter(
|
||||
game_date__date=now.date()
|
||||
).count(),
|
||||
'this_week': Game.objects.filter(
|
||||
game_date__gte=now,
|
||||
game_date__lt=now + timedelta(days=7)
|
||||
).count(),
|
||||
}
|
||||
|
||||
|
||||
def get_sync_stats():
|
||||
"""Get CloudKit sync statistics."""
|
||||
return {
|
||||
'total': CloudKitSyncState.objects.count(),
|
||||
'synced': CloudKitSyncState.objects.filter(sync_status='synced').count(),
|
||||
'pending': CloudKitSyncState.objects.filter(sync_status='pending').count(),
|
||||
'failed': CloudKitSyncState.objects.filter(sync_status='failed').count(),
|
||||
}
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Export Views
|
||||
# =============================================================================
|
||||
|
||||
@staff_member_required
|
||||
def export_data(request):
|
||||
"""Export data page with options."""
|
||||
sports = Sport.objects.filter(is_active=True).order_by('code')
|
||||
|
||||
# Get available years from game dates
|
||||
from django.db.models.functions import ExtractYear
|
||||
years = Game.objects.annotate(
|
||||
game_year=ExtractYear('game_date')
|
||||
).values_list('game_year', flat=True).distinct().order_by('-game_year')
|
||||
|
||||
# Get record counts for display
|
||||
context = {
|
||||
'title': 'Export Data',
|
||||
'sports': sports,
|
||||
'years': list(years),
|
||||
'counts': {
|
||||
'sports': Sport.objects.filter(is_active=True).count(),
|
||||
'teams': Team.objects.count(),
|
||||
'stadiums': Stadium.objects.count(),
|
||||
'games': Game.objects.count(),
|
||||
'team_aliases': TeamAlias.objects.count(),
|
||||
'stadium_aliases': StadiumAlias.objects.count(),
|
||||
'conferences': Conference.objects.count(),
|
||||
'divisions': Division.objects.count(),
|
||||
},
|
||||
}
|
||||
return render(request, 'dashboard/export.html', context)
|
||||
|
||||
|
||||
@staff_member_required
|
||||
def export_download(request):
|
||||
"""Generate and download export files."""
|
||||
# Get export options from request
|
||||
export_types = request.GET.getlist('type')
|
||||
sport_filter = request.GET.get('sport', '')
|
||||
year_filter = request.GET.get('year', '')
|
||||
|
||||
if not export_types:
|
||||
export_types = ['sports', 'league_structure', 'teams', 'stadiums', 'games', 'team_aliases', 'stadium_aliases']
|
||||
|
||||
# Convert year to int if provided
|
||||
year_int = int(year_filter) if year_filter else None
|
||||
|
||||
# Generate export data
|
||||
files = {}
|
||||
|
||||
if 'sports' in export_types:
|
||||
files['sports_canonical.json'] = export_sports(sport_filter)
|
||||
|
||||
if 'league_structure' in export_types:
|
||||
files['league_structure.json'] = export_league_structure(sport_filter)
|
||||
|
||||
if 'teams' in export_types:
|
||||
files['teams_canonical.json'] = export_teams(sport_filter)
|
||||
|
||||
if 'stadiums' in export_types:
|
||||
files['stadiums_canonical.json'] = export_stadiums(sport_filter)
|
||||
|
||||
if 'games' in export_types:
|
||||
files['games_canonical.json'] = export_games(sport_filter, year_int)
|
||||
|
||||
if 'team_aliases' in export_types:
|
||||
files['team_aliases.json'] = export_team_aliases(sport_filter)
|
||||
|
||||
if 'stadium_aliases' in export_types:
|
||||
files['stadium_aliases.json'] = export_stadium_aliases(sport_filter)
|
||||
|
||||
# If single file, return JSON directly
|
||||
if len(files) == 1:
|
||||
filename, data = list(files.items())[0]
|
||||
response = HttpResponse(
|
||||
json.dumps(data, indent=2),
|
||||
content_type='application/json'
|
||||
)
|
||||
response['Content-Disposition'] = f'attachment; filename="{filename}"'
|
||||
return response
|
||||
|
||||
# Multiple files - return as ZIP
|
||||
zip_buffer = io.BytesIO()
|
||||
with zipfile.ZipFile(zip_buffer, 'w', zipfile.ZIP_DEFLATED) as zf:
|
||||
for filename, data in files.items():
|
||||
zf.writestr(filename, json.dumps(data, indent=2))
|
||||
|
||||
zip_buffer.seek(0)
|
||||
|
||||
# Build filename
|
||||
parts = ['sportstime_export']
|
||||
if sport_filter:
|
||||
parts.append(sport_filter)
|
||||
if year_filter:
|
||||
parts.append(str(year_filter))
|
||||
zip_filename = '_'.join(parts) + '.zip'
|
||||
|
||||
response = HttpResponse(zip_buffer.read(), content_type='application/zip')
|
||||
response['Content-Disposition'] = f'attachment; filename="{zip_filename}"'
|
||||
return response
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Export Helper Functions
|
||||
# =============================================================================
|
||||
|
||||
def _get_conference_id(conference):
|
||||
"""Get conference canonical ID from DB field."""
|
||||
return conference.canonical_id
|
||||
|
||||
|
||||
def _get_division_id(division):
|
||||
"""Get division canonical ID from DB field."""
|
||||
return division.canonical_id
|
||||
|
||||
|
||||
def _extract_domain(url):
|
||||
"""Extract domain from URL."""
|
||||
try:
|
||||
parsed = urlparse(url)
|
||||
domain = parsed.netloc
|
||||
if domain.startswith('www.'):
|
||||
domain = domain[4:]
|
||||
return domain
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
|
||||
def export_sports(sport_filter=None):
|
||||
"""Export sports data."""
|
||||
sports = Sport.objects.filter(is_active=True)
|
||||
if sport_filter:
|
||||
sports = sports.filter(code=sport_filter.lower())
|
||||
|
||||
data = []
|
||||
for sport in sports.order_by('code'):
|
||||
data.append({
|
||||
'sport_id': sport.short_name.upper(),
|
||||
'abbreviation': sport.short_name.upper(),
|
||||
'display_name': sport.name,
|
||||
'icon_name': sport.icon_name or '',
|
||||
'color_hex': sport.color_hex or '',
|
||||
'season_start_month': sport.season_start_month,
|
||||
'season_end_month': sport.season_end_month,
|
||||
'is_active': sport.is_active,
|
||||
})
|
||||
|
||||
return data
|
||||
|
||||
|
||||
def export_league_structure(sport_filter=None):
|
||||
"""Export league structure data."""
|
||||
data = []
|
||||
seen_ids = set() # Track IDs to prevent duplicates
|
||||
display_order = 0
|
||||
|
||||
sports = Sport.objects.all()
|
||||
if sport_filter:
|
||||
sports = sports.filter(code=sport_filter.lower())
|
||||
|
||||
for sport in sports.order_by('code'):
|
||||
league_id = f"{sport.code}_league"
|
||||
|
||||
# Skip if we've already seen this ID
|
||||
if league_id in seen_ids:
|
||||
continue
|
||||
seen_ids.add(league_id)
|
||||
|
||||
data.append({
|
||||
'id': league_id,
|
||||
'sport': sport.short_name,
|
||||
'type': 'league',
|
||||
'name': sport.name,
|
||||
'abbreviation': sport.short_name,
|
||||
'parent_id': None,
|
||||
'display_order': display_order,
|
||||
})
|
||||
display_order += 1
|
||||
|
||||
conferences = Conference.objects.filter(sport=sport).order_by('order', 'name')
|
||||
for conf in conferences:
|
||||
conf_id = _get_conference_id(conf)
|
||||
|
||||
# Skip duplicate conference IDs
|
||||
if conf_id in seen_ids:
|
||||
continue
|
||||
seen_ids.add(conf_id)
|
||||
|
||||
data.append({
|
||||
'id': conf_id,
|
||||
'sport': sport.short_name,
|
||||
'type': 'conference',
|
||||
'name': conf.name,
|
||||
'abbreviation': conf.short_name or None,
|
||||
'parent_id': league_id,
|
||||
'display_order': conf.order,
|
||||
})
|
||||
|
||||
divisions = Division.objects.filter(conference=conf).order_by('order', 'name')
|
||||
for div in divisions:
|
||||
div_id = _get_division_id(div)
|
||||
|
||||
# Skip duplicate division IDs
|
||||
if div_id in seen_ids:
|
||||
continue
|
||||
seen_ids.add(div_id)
|
||||
|
||||
data.append({
|
||||
'id': div_id,
|
||||
'sport': sport.short_name,
|
||||
'type': 'division',
|
||||
'name': div.name,
|
||||
'abbreviation': div.short_name or None,
|
||||
'parent_id': conf_id,
|
||||
'display_order': div.order,
|
||||
})
|
||||
|
||||
return data
|
||||
|
||||
|
||||
def export_teams(sport_filter=None):
|
||||
"""Export teams data."""
|
||||
teams = Team.objects.select_related(
|
||||
'sport', 'division', 'division__conference', 'home_stadium'
|
||||
).all()
|
||||
|
||||
if sport_filter:
|
||||
teams = teams.filter(sport__code=sport_filter.lower())
|
||||
|
||||
data = []
|
||||
for team in teams.order_by('sport__code', 'city', 'name'):
|
||||
conference_id = None
|
||||
division_id = None
|
||||
if team.division:
|
||||
division_id = _get_division_id(team.division)
|
||||
conference_id = _get_conference_id(team.division.conference)
|
||||
|
||||
data.append({
|
||||
'canonical_id': team.id,
|
||||
'name': team.name,
|
||||
'abbreviation': team.abbreviation,
|
||||
'sport': team.sport.short_name,
|
||||
'city': team.city,
|
||||
'stadium_canonical_id': team.home_stadium_id,
|
||||
'conference_id': conference_id,
|
||||
'division_id': division_id,
|
||||
'primary_color': team.primary_color or None,
|
||||
'secondary_color': team.secondary_color or None,
|
||||
})
|
||||
|
||||
return data
|
||||
|
||||
|
||||
def export_stadiums(sport_filter=None):
|
||||
"""Export stadiums data."""
|
||||
stadiums = Stadium.objects.select_related('sport').all()
|
||||
|
||||
if sport_filter:
|
||||
stadiums = stadiums.filter(sport__code=sport_filter.lower())
|
||||
|
||||
# Build map of stadium -> team abbreviations
|
||||
stadium_teams = {}
|
||||
teams = Team.objects.filter(home_stadium__isnull=False).select_related('home_stadium')
|
||||
if sport_filter:
|
||||
teams = teams.filter(sport__code=sport_filter.lower())
|
||||
|
||||
for team in teams:
|
||||
if team.home_stadium_id not in stadium_teams:
|
||||
stadium_teams[team.home_stadium_id] = []
|
||||
stadium_teams[team.home_stadium_id].append(team.abbreviation)
|
||||
|
||||
data = []
|
||||
for stadium in stadiums.order_by('sport__code', 'city', 'name'):
|
||||
data.append({
|
||||
'canonical_id': stadium.id,
|
||||
'name': stadium.name,
|
||||
'city': stadium.city,
|
||||
'state': stadium.state or None,
|
||||
'latitude': float(stadium.latitude) if stadium.latitude else None,
|
||||
'longitude': float(stadium.longitude) if stadium.longitude else None,
|
||||
'capacity': stadium.capacity or 0,
|
||||
'sport': stadium.sport.short_name,
|
||||
'primary_team_abbrevs': stadium_teams.get(stadium.id, []),
|
||||
'year_opened': stadium.opened_year,
|
||||
'timezone_identifier': stadium.timezone or None,
|
||||
'image_url': stadium.image_url or None,
|
||||
})
|
||||
|
||||
return data
|
||||
|
||||
|
||||
def export_games(sport_filter=None, year_filter=None):
|
||||
"""Export games data."""
|
||||
games = Game.objects.select_related(
|
||||
'sport', 'home_team', 'away_team', 'stadium'
|
||||
).all()
|
||||
|
||||
if sport_filter:
|
||||
games = games.filter(sport__code=sport_filter.lower())
|
||||
|
||||
if year_filter:
|
||||
games = games.filter(game_date__year=year_filter)
|
||||
|
||||
data = []
|
||||
for game in games.order_by('game_date', 'sport__code'):
|
||||
# Ensure game_date is UTC-aware
|
||||
game_dt = game.game_date
|
||||
if game_dt.tzinfo is None:
|
||||
game_dt = game_dt.replace(tzinfo=dt_timezone.utc)
|
||||
utc_dt = game_dt.astimezone(dt_timezone.utc)
|
||||
|
||||
source = None
|
||||
if game.source_url:
|
||||
source = _extract_domain(game.source_url)
|
||||
|
||||
data.append({
|
||||
'canonical_id': game.id,
|
||||
'sport': game.sport.short_name,
|
||||
'season': str(game.game_date.year),
|
||||
'game_datetime_utc': utc_dt.strftime('%Y-%m-%dT%H:%M:%SZ'),
|
||||
'home_team': game.home_team.full_name,
|
||||
'away_team': game.away_team.full_name,
|
||||
'home_team_abbrev': game.home_team.abbreviation,
|
||||
'away_team_abbrev': game.away_team.abbreviation,
|
||||
'home_team_canonical_id': game.home_team_id,
|
||||
'away_team_canonical_id': game.away_team_id,
|
||||
'venue': game.stadium.name if game.stadium else None,
|
||||
'stadium_canonical_id': game.stadium_id,
|
||||
'source': source,
|
||||
'is_playoff': game.is_playoff,
|
||||
'broadcast_info': None,
|
||||
})
|
||||
|
||||
return data
|
||||
|
||||
|
||||
def export_team_aliases(sport_filter=None):
|
||||
"""Export team aliases data."""
|
||||
aliases = TeamAlias.objects.select_related('team', 'team__sport').all()
|
||||
|
||||
if sport_filter:
|
||||
aliases = aliases.filter(team__sport__code=sport_filter.lower())
|
||||
|
||||
alias_type_map = {
|
||||
'full_name': 'name',
|
||||
'city_name': 'city',
|
||||
'abbreviation': 'abbreviation',
|
||||
'nickname': 'name',
|
||||
'historical': 'name',
|
||||
}
|
||||
|
||||
data = []
|
||||
for alias in aliases.order_by('team__sport__code', 'team__id', 'id'):
|
||||
valid_from = alias.valid_from.strftime('%Y-%m-%d') if alias.valid_from else None
|
||||
valid_until = alias.valid_until.strftime('%Y-%m-%d') if alias.valid_until else None
|
||||
export_type = alias_type_map.get(alias.alias_type, 'name')
|
||||
|
||||
data.append({
|
||||
'id': f"alias_{alias.team.sport.code}_{alias.pk}",
|
||||
'team_canonical_id': alias.team_id,
|
||||
'alias_type': export_type,
|
||||
'alias_value': alias.alias,
|
||||
'valid_from': valid_from,
|
||||
'valid_until': valid_until,
|
||||
})
|
||||
|
||||
return data
|
||||
|
||||
|
||||
def export_stadium_aliases(sport_filter=None):
|
||||
"""Export stadium aliases data."""
|
||||
aliases = StadiumAlias.objects.select_related('stadium', 'stadium__sport').all()
|
||||
|
||||
if sport_filter:
|
||||
aliases = aliases.filter(stadium__sport__code=sport_filter.lower())
|
||||
|
||||
data = []
|
||||
for alias in aliases.order_by('stadium__sport__code', 'stadium__id', 'id'):
|
||||
valid_from = alias.valid_from.strftime('%Y-%m-%d') if alias.valid_from else None
|
||||
valid_until = alias.valid_until.strftime('%Y-%m-%d') if alias.valid_until else None
|
||||
|
||||
data.append({
|
||||
'alias_name': alias.alias,
|
||||
'stadium_canonical_id': alias.stadium_id,
|
||||
'valid_from': valid_from,
|
||||
'valid_until': valid_until,
|
||||
})
|
||||
|
||||
return data
|
||||
Reference in New Issue
Block a user