feat(scripts): add sportstime-parser data pipeline
Complete Python package for scraping, normalizing, and uploading sports schedule data to CloudKit. Includes: - Multi-source scrapers for NBA, MLB, NFL, NHL, MLS, WNBA, NWSL - Canonical ID system for teams, stadiums, and games - Fuzzy matching with manual alias support - CloudKit uploader with batch operations and deduplication - Comprehensive test suite with fixtures - WNBA abbreviation aliases for improved team resolution - Alias validation script to detect orphan references All 5 phases of data remediation plan completed: - Phase 1: Alias fixes (team/stadium alias additions) - Phase 2: NHL stadium coordinate fixes - Phase 3: Re-scrape validation - Phase 4: iOS bundle update - Phase 5: Code quality improvements (WNBA aliases) Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
257
sportstime_parser/tests/test_scrapers/test_mlb.py
Normal file
257
sportstime_parser/tests/test_scrapers/test_mlb.py
Normal file
@@ -0,0 +1,257 @@
|
||||
"""Tests for MLB scraper."""
|
||||
|
||||
from datetime import datetime
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
|
||||
from sportstime_parser.scrapers.mlb import MLBScraper, create_mlb_scraper
|
||||
from sportstime_parser.scrapers.base import RawGameData
|
||||
from sportstime_parser.tests.fixtures import (
|
||||
load_json_fixture,
|
||||
MLB_ESPN_SCOREBOARD_JSON,
|
||||
)
|
||||
|
||||
|
||||
class TestMLBScraperInit:
|
||||
"""Test MLBScraper initialization."""
|
||||
|
||||
def test_creates_scraper_with_season(self):
|
||||
"""Test scraper initializes with correct season."""
|
||||
scraper = MLBScraper(season=2026)
|
||||
assert scraper.sport == "mlb"
|
||||
assert scraper.season == 2026
|
||||
|
||||
def test_factory_function_creates_scraper(self):
|
||||
"""Test factory function creates correct scraper."""
|
||||
scraper = create_mlb_scraper(season=2026)
|
||||
assert isinstance(scraper, MLBScraper)
|
||||
assert scraper.season == 2026
|
||||
|
||||
def test_expected_game_count(self):
|
||||
"""Test expected game count is correct for MLB."""
|
||||
scraper = MLBScraper(season=2026)
|
||||
assert scraper.expected_game_count == 2430
|
||||
|
||||
def test_sources_in_priority_order(self):
|
||||
"""Test sources are returned in correct priority order."""
|
||||
scraper = MLBScraper(season=2026)
|
||||
sources = scraper._get_sources()
|
||||
assert sources == ["baseball_reference", "mlb_api", "espn"]
|
||||
|
||||
|
||||
class TestESPNParsing:
|
||||
"""Test ESPN API response parsing."""
|
||||
|
||||
def test_parses_completed_games(self):
|
||||
"""Test parsing completed games from ESPN."""
|
||||
scraper = MLBScraper(season=2026)
|
||||
data = load_json_fixture(MLB_ESPN_SCOREBOARD_JSON)
|
||||
games = scraper._parse_espn_response(data, "http://espn.com/api")
|
||||
|
||||
completed = [g for g in games if g.status == "final"]
|
||||
assert len(completed) == 2
|
||||
|
||||
# Yankees @ Red Sox
|
||||
nyy_bos = next(g for g in completed if g.away_team_raw == "New York Yankees")
|
||||
assert nyy_bos.home_team_raw == "Boston Red Sox"
|
||||
assert nyy_bos.away_score == 3
|
||||
assert nyy_bos.home_score == 5
|
||||
assert nyy_bos.stadium_raw == "Fenway Park"
|
||||
|
||||
def test_parses_scheduled_games(self):
|
||||
"""Test parsing scheduled games from ESPN."""
|
||||
scraper = MLBScraper(season=2026)
|
||||
data = load_json_fixture(MLB_ESPN_SCOREBOARD_JSON)
|
||||
games = scraper._parse_espn_response(data, "http://espn.com/api")
|
||||
|
||||
scheduled = [g for g in games if g.status == "scheduled"]
|
||||
assert len(scheduled) == 1
|
||||
|
||||
lad_sf = scheduled[0]
|
||||
assert lad_sf.away_team_raw == "Los Angeles Dodgers"
|
||||
assert lad_sf.home_team_raw == "San Francisco Giants"
|
||||
assert lad_sf.stadium_raw == "Oracle Park"
|
||||
|
||||
def test_parses_venue_info(self):
|
||||
"""Test venue information is extracted."""
|
||||
scraper = MLBScraper(season=2026)
|
||||
data = load_json_fixture(MLB_ESPN_SCOREBOARD_JSON)
|
||||
games = scraper._parse_espn_response(data, "http://espn.com/api")
|
||||
|
||||
for game in games:
|
||||
assert game.stadium_raw is not None
|
||||
|
||||
|
||||
class TestGameNormalization:
|
||||
"""Test game normalization and canonical ID generation."""
|
||||
|
||||
def test_normalizes_games_with_canonical_ids(self):
|
||||
"""Test games are normalized with correct canonical IDs."""
|
||||
scraper = MLBScraper(season=2026)
|
||||
|
||||
raw_games = [
|
||||
RawGameData(
|
||||
game_date=datetime(2026, 4, 15),
|
||||
home_team_raw="Boston Red Sox",
|
||||
away_team_raw="New York Yankees",
|
||||
stadium_raw="Fenway Park",
|
||||
home_score=5,
|
||||
away_score=3,
|
||||
status="final",
|
||||
source_url="http://example.com",
|
||||
)
|
||||
]
|
||||
|
||||
games, review_items = scraper._normalize_games(raw_games)
|
||||
|
||||
assert len(games) == 1
|
||||
game = games[0]
|
||||
|
||||
# Check canonical ID format
|
||||
assert game.id == "mlb_2026_nyy_bos_0415"
|
||||
assert game.sport == "mlb"
|
||||
assert game.season == 2026
|
||||
|
||||
# Check team IDs
|
||||
assert game.home_team_id == "team_mlb_bos"
|
||||
assert game.away_team_id == "team_mlb_nyy"
|
||||
|
||||
# Check scores preserved
|
||||
assert game.home_score == 5
|
||||
assert game.away_score == 3
|
||||
|
||||
def test_creates_review_items_for_unresolved_teams(self):
|
||||
"""Test review items are created for unresolved teams."""
|
||||
scraper = MLBScraper(season=2026)
|
||||
|
||||
raw_games = [
|
||||
RawGameData(
|
||||
game_date=datetime(2026, 4, 15),
|
||||
home_team_raw="Unknown Team XYZ",
|
||||
away_team_raw="Boston Red Sox",
|
||||
stadium_raw="Fenway Park",
|
||||
status="scheduled",
|
||||
),
|
||||
]
|
||||
|
||||
games, review_items = scraper._normalize_games(raw_games)
|
||||
|
||||
# Game should not be created due to unresolved team
|
||||
assert len(games) == 0
|
||||
|
||||
# But there should be a review item
|
||||
assert len(review_items) >= 1
|
||||
|
||||
|
||||
class TestTeamAndStadiumScraping:
|
||||
"""Test team and stadium data scraping."""
|
||||
|
||||
def test_scrapes_all_mlb_teams(self):
|
||||
"""Test all 30 MLB teams are returned."""
|
||||
scraper = MLBScraper(season=2026)
|
||||
teams = scraper.scrape_teams()
|
||||
|
||||
# 30 MLB teams
|
||||
assert len(teams) == 30
|
||||
|
||||
# Check team IDs are unique
|
||||
team_ids = [t.id for t in teams]
|
||||
assert len(set(team_ids)) == 30
|
||||
|
||||
# Check all teams have required fields
|
||||
for team in teams:
|
||||
assert team.id.startswith("team_mlb_")
|
||||
assert team.sport == "mlb"
|
||||
assert team.city
|
||||
assert team.name
|
||||
assert team.full_name
|
||||
assert team.abbreviation
|
||||
|
||||
def test_teams_have_leagues_and_divisions(self):
|
||||
"""Test teams have league (conference) and division info."""
|
||||
scraper = MLBScraper(season=2026)
|
||||
teams = scraper.scrape_teams()
|
||||
|
||||
# Count teams by league
|
||||
al = [t for t in teams if t.conference == "American"]
|
||||
nl = [t for t in teams if t.conference == "National"]
|
||||
|
||||
assert len(al) == 15
|
||||
assert len(nl) == 15
|
||||
|
||||
def test_scrapes_all_mlb_stadiums(self):
|
||||
"""Test all MLB stadiums are returned."""
|
||||
scraper = MLBScraper(season=2026)
|
||||
stadiums = scraper.scrape_stadiums()
|
||||
|
||||
# Should have stadiums for all teams
|
||||
assert len(stadiums) == 30
|
||||
|
||||
# Check stadium IDs are unique
|
||||
stadium_ids = [s.id for s in stadiums]
|
||||
assert len(set(stadium_ids)) == 30
|
||||
|
||||
# Check all stadiums have required fields
|
||||
for stadium in stadiums:
|
||||
assert stadium.id.startswith("stadium_mlb_")
|
||||
assert stadium.sport == "mlb"
|
||||
assert stadium.name
|
||||
assert stadium.city
|
||||
assert stadium.state
|
||||
assert stadium.country in ["USA", "Canada"]
|
||||
assert stadium.latitude != 0
|
||||
assert stadium.longitude != 0
|
||||
|
||||
|
||||
class TestScrapeFallback:
|
||||
"""Test multi-source fallback behavior."""
|
||||
|
||||
def test_falls_back_to_next_source_on_failure(self):
|
||||
"""Test scraper tries next source when first fails."""
|
||||
scraper = MLBScraper(season=2026)
|
||||
|
||||
with patch.object(scraper, '_scrape_baseball_reference') as mock_br, \
|
||||
patch.object(scraper, '_scrape_mlb_api') as mock_mlb, \
|
||||
patch.object(scraper, '_scrape_espn') as mock_espn:
|
||||
|
||||
# Make BR and MLB API fail
|
||||
mock_br.side_effect = Exception("Connection failed")
|
||||
mock_mlb.side_effect = Exception("API error")
|
||||
|
||||
# Make ESPN return data
|
||||
mock_espn.return_value = [
|
||||
RawGameData(
|
||||
game_date=datetime(2026, 4, 15),
|
||||
home_team_raw="Boston Red Sox",
|
||||
away_team_raw="New York Yankees",
|
||||
stadium_raw="Fenway Park",
|
||||
status="scheduled",
|
||||
)
|
||||
]
|
||||
|
||||
result = scraper.scrape_games()
|
||||
|
||||
assert result.success
|
||||
assert result.source == "espn"
|
||||
assert mock_br.called
|
||||
assert mock_mlb.called
|
||||
assert mock_espn.called
|
||||
|
||||
|
||||
class TestSeasonMonths:
|
||||
"""Test season month calculation."""
|
||||
|
||||
def test_gets_correct_season_months(self):
|
||||
"""Test correct months are returned for MLB season."""
|
||||
scraper = MLBScraper(season=2026)
|
||||
months = scraper._get_season_months()
|
||||
|
||||
# MLB season is March-November
|
||||
assert len(months) == 9 # Mar, Apr, May, Jun, Jul, Aug, Sep, Oct, Nov
|
||||
|
||||
# Check first month is March of season year
|
||||
assert months[0] == (2026, 3)
|
||||
|
||||
# Check last month is November
|
||||
assert months[-1] == (2026, 11)
|
||||
Reference in New Issue
Block a user