Compare commits

...

5 Commits

Author SHA1 Message Date
Trey t
da033cf12c Fix NSFW sheet scroll on iOS/iPad, clean up audio pin
- WerkoutNSFWSheet: wrap content in ScrollView + ViewThatFits(in: .horizontal)
  so iPad's narrow sheet width falls back to VStack and content scrolls.
- Tighten padding on compact layouts (38→24).
- Revert AAC-preference in pinAudioSelection (stream is all AAC, no Dolby).

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-04-16 19:30:25 -05:00
Trey t
08ad702f9d Add AudioDiagnostics heartbeat logging
Adds [AUDIO]-prefixed logs to both single-stream and multi-stream players:
1 Hz heartbeat with rate/timeControl/mute/volume/bitrate/route, plus
immediate events on rate, isMuted, volume, currentItem, media selection,
access-log, error-log, and system audio route/interruption changes.

Grep Xcode console for `[AUDIO]` or `[AUDIO SYSTEM]` to isolate.

Also reverts the AAC-preference in pinAudioSelection: the
ballgame.treytartt.com master playlist is already all mp4a.40.2 stereo,
so the Dolby-DRC theory doesn't fit. Pin simply selects the default
audible option now.

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-04-14 20:28:15 -05:00
Trey t
85a19fdd71 Fix mid-stream audio loudness jumps
Root cause: the quality upgrade path called replaceCurrentItem mid-stream,
which re-loaded the HLS master manifest and re-picked an audio rendition,
producing a perceived loudness jump 10-30s into playback. .moviePlayback
mode amplified this by re-initializing cinematic audio processing on each
variant change.

- Start streams directly at user's desiredResolution; remove
  scheduleQualityUpgrade, qualityUpgradeTask, and the 504p->best swap.
- Switch AVAudioSession mode from .moviePlayback to .default in both
  MultiStreamView and SingleStreamPlayerView.
- Pin the HLS audio rendition by selecting the default audible
  MediaSelectionGroup option on every new AVPlayerItem, preventing
  ABR from swapping channel layouts mid-stream.

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-04-14 20:04:39 -05:00
Trey t
ba24c767a0 Improve stream quality: stop capping resolution, allow AVPlayer to ramp
SingleStream: pass preserveServerResolutionWhenBest=false so "best"
always reaches the server for a full multi-variant manifest. Increase
buffer to 8s and enable automaticallyWaitsToMinimizeStalling so AVPlayer
can measure bandwidth and select higher variants. Add quality monitor
that nudges AVPlayer if observed bandwidth far exceeds indicated bitrate.

MultiStream: remove broken URL-param resolution detection that falsely
skipped upgrades, log actual indicatedBitrate instead. Extend upgrade
check windows from [2,4,7]s to [2,4,7,15,30]s for slow-to-stabilize
streams.

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-04-12 12:38:38 -05:00
Trey t
bf44a7b7eb Fix memory leaks, stale game data, and audio volume fluctuation
Memory: clean observers even during PiP, nil player on tile disappear,
track/cancel Werkout monitor tasks, add highlight player cleanup.
Data: add scenePhase-triggered reload on day change, unconditional
10-minute full schedule refresh, keep fast 60s score refresh for live games.
Audio: set mute state before playback starts, use consistent .moviePlayback
mode, add audio session interruption recovery handler.

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-04-12 12:21:21 -05:00
6 changed files with 522 additions and 137 deletions

View File

@@ -53,6 +53,8 @@ final class GamesViewModel {
@ObservationIgnored
private var refreshTask: Task<Void, Never>?
@ObservationIgnored
private var lastLoadDateString: String?
@ObservationIgnored
private var authenticatedVideoFeedCache: [String: AuthenticatedVideoFeedCacheEntry] = [:]
@ObservationIgnored
private var videoShuffleBagsByModel: [String: [String: [URL]]] = [:]
@@ -105,12 +107,21 @@ final class GamesViewModel {
func startAutoRefresh() {
stopAutoRefresh()
refreshTask = Task { [weak self] in
var ticksSinceFullLoad = 0
while !Task.isCancelled {
try? await Task.sleep(for: .seconds(60))
guard !Task.isCancelled else { break }
guard let self else { break }
// Refresh if there are live games or active streams
if !self.liveGames.isEmpty || !self.activeStreams.isEmpty {
ticksSinceFullLoad += 1
// Full schedule reload every 10 minutes (or immediately on day change)
let today = Self.dateFormatter.string(from: Date())
let dayChanged = self.lastLoadDateString != nil && self.lastLoadDateString != today
if dayChanged || ticksSinceFullLoad >= 10 {
ticksSinceFullLoad = 0
await self.loadGames()
} else if !self.liveGames.isEmpty || !self.activeStreams.isEmpty {
// Fast score refresh every 60s when games are live
await self.refreshScores()
}
}
@@ -122,6 +133,14 @@ final class GamesViewModel {
refreshTask = nil
}
func refreshIfDayChanged() async {
let today = Self.dateFormatter.string(from: Date())
if lastLoadDateString != today {
logGamesViewModel("Day changed (\(lastLoadDateString ?? "nil")\(today)), reloading games")
await loadGames()
}
}
private func refreshScores() async {
let statsGames = await fetchStatsGames()
guard !statsGames.isEmpty else { return }
@@ -227,6 +246,7 @@ final class GamesViewModel {
errorMessage = "No games found"
}
lastLoadDateString = todayDateString
isLoading = false
}

View File

@@ -260,7 +260,11 @@ struct DashboardView: View {
mediaId: selection.broadcast.mediaId,
streamURLString: selection.broadcast.streamURL
)
guard let url = await viewModel.resolveStreamURL(for: stream) else { return nil }
guard let url = await viewModel.resolveStreamURL(
for: stream,
resolutionOverride: viewModel.defaultResolution,
preserveServerResolutionWhenBest: false
) else { return nil }
return SingleStreamPlaybackSource(url: url)
}
@@ -619,22 +623,24 @@ struct WerkoutNSFWSheet: View {
sheetBackground
.ignoresSafeArea()
ViewThatFits {
HStack(alignment: .top, spacing: 32) {
overviewColumn
.frame(maxWidth: .infinity, alignment: .leading)
ScrollView(.vertical, showsIndicators: false) {
ViewThatFits(in: .horizontal) {
HStack(alignment: .top, spacing: 32) {
overviewColumn
.frame(maxWidth: .infinity, alignment: .leading)
actionColumn
.frame(width: 360, alignment: .leading)
}
actionColumn
.frame(width: 360, alignment: .leading)
}
VStack(alignment: .leading, spacing: 24) {
overviewColumn
actionColumn
.frame(maxWidth: .infinity, alignment: .leading)
VStack(alignment: .leading, spacing: 24) {
overviewColumn
actionColumn
.frame(maxWidth: .infinity, alignment: .leading)
}
}
}
.padding(38)
.padding(usesStackedLayout ? 24 : 38)
.background(
RoundedRectangle(cornerRadius: 34, style: .continuous)
.fill(.black.opacity(0.46))

View File

@@ -69,6 +69,7 @@ struct GameCenterView: View {
VideoPlayer(player: player)
.ignoresSafeArea()
.onAppear { player.play() }
.onDisappear { player.pause() }
}
}
}

View File

@@ -352,10 +352,11 @@ private struct MultiStreamTile: View {
@State private var player: AVPlayer?
@State private var hasError = false
@State private var startupPlaybackTask: Task<Void, Never>?
@State private var qualityUpgradeTask: Task<Void, Never>?
@State private var werkoutMonitorTask: Task<Void, Never>?
@State private var clipTimeLimitObserver: Any?
@State private var isAdvancingClip = false
@StateObject private var playbackDiagnostics = MultiStreamPlaybackDiagnostics()
@State private var audioDiagnostics: AudioDiagnostics?
private static let maxClipDuration: Double = 15.0
private static var audioSessionConfigured = false
@@ -445,10 +446,16 @@ private struct MultiStreamTile: View {
logMultiView("tile disappeared id=\(stream.id) label=\(stream.label)")
startupPlaybackTask?.cancel()
startupPlaybackTask = nil
qualityUpgradeTask?.cancel()
qualityUpgradeTask = nil
if let player { removeClipTimeLimit(from: player) }
werkoutMonitorTask?.cancel()
werkoutMonitorTask = nil
if let player {
removeClipTimeLimit(from: player)
player.pause()
}
player = nil
playbackDiagnostics.clear(streamID: stream.id, reason: "tile disappeared")
audioDiagnostics?.detach()
audioDiagnostics = nil
}
#if os(tvOS)
.focusEffectDisabled()
@@ -529,11 +536,8 @@ private struct MultiStreamTile: View {
.clipShape(Capsule())
}
private var multiViewStartupResolution: String { "504p" }
private var multiViewUpgradeTargetResolution: String? {
let desiredResolution = viewModel.defaultResolution
return desiredResolution == multiViewStartupResolution ? nil : desiredResolution
private var multiViewStartupResolution: String {
viewModel.defaultResolution
}
private func startStream() async {
@@ -549,18 +553,18 @@ private struct MultiStreamTile: View {
onPlaybackEnded: playbackEndedHandler(for: player)
)
scheduleStartupPlaybackRecovery(for: player)
scheduleQualityUpgrade(for: player)
installClipTimeLimit(on: player)
attachAudioDiagnostics(to: player)
logMultiView("startStream reused inline player id=\(stream.id) muted=\(player.isMuted)")
return
}
if !Self.audioSessionConfigured {
do {
try AVAudioSession.sharedInstance().setCategory(.playback)
try AVAudioSession.sharedInstance().setCategory(.playback, mode: .default)
try AVAudioSession.sharedInstance().setActive(true)
Self.audioSessionConfigured = true
logMultiView("startStream audio session configured id=\(stream.id)")
logMultiView("startStream audio session configured id=\(stream.id) mode=default")
} catch {
logMultiView("startStream audio session failed id=\(stream.id) error=\(error.localizedDescription)")
}
@@ -576,8 +580,8 @@ private struct MultiStreamTile: View {
onPlaybackEnded: playbackEndedHandler(for: existingPlayer)
)
scheduleStartupPlaybackRecovery(for: existingPlayer)
scheduleQualityUpgrade(for: existingPlayer)
installClipTimeLimit(on: existingPlayer)
attachAudioDiagnostics(to: existingPlayer)
logMultiView("startStream reused shared player id=\(stream.id) muted=\(existingPlayer.isMuted)")
return
}
@@ -604,15 +608,16 @@ private struct MultiStreamTile: View {
avPlayer.currentItem?.preferredForwardBufferDuration = 2
self.player = avPlayer
// Set mute state BEFORE playback to prevent audio spikes
viewModel.attachPlayer(avPlayer, to: stream.id)
playbackDiagnostics.attach(
to: avPlayer,
streamID: stream.id,
label: stream.label,
onPlaybackEnded: playbackEndedHandler(for: avPlayer)
)
viewModel.attachPlayer(avPlayer, to: stream.id)
scheduleStartupPlaybackRecovery(for: avPlayer)
scheduleQualityUpgrade(for: avPlayer)
attachAudioDiagnostics(to: avPlayer)
logMultiView("startStream attached player id=\(stream.id) muted=\(avPlayer.isMuted) startupResolution=\(multiViewStartupResolution) fastStart=true calling playImmediately(atRate: 1.0)")
avPlayer.playImmediately(atRate: 1.0)
installClipTimeLimit(on: avPlayer)
@@ -625,19 +630,35 @@ private struct MultiStreamTile: View {
)
let item = makePlayerItem(url: url, headers: headers)
return AVPlayer(playerItem: item)
let player = AVPlayer(playerItem: item)
player.appliesMediaSelectionCriteriaAutomatically = false
logMultiView("startStream configured AVPlayer id=\(stream.id) appliesMediaSelectionCriteriaAutomatically=false")
return player
}
private func makePlayerItem(url: URL, headers: [String: String]) -> AVPlayerItem {
let item: AVPlayerItem
if headers.isEmpty {
return AVPlayerItem(url: url)
item = AVPlayerItem(url: url)
} else {
let assetOptions: [String: Any] = [
"AVURLAssetHTTPHeaderFieldsKey": headers,
]
let asset = AVURLAsset(url: url, options: assetOptions)
item = AVPlayerItem(asset: asset)
}
item.allowedAudioSpatializationFormats = []
logMultiView("startStream configured player item id=\(stream.id) allowedAudioSpatializationFormats=[]")
pinAudioSelection(on: item)
return item
}
let assetOptions: [String: Any] = [
"AVURLAssetHTTPHeaderFieldsKey": headers,
]
let asset = AVURLAsset(url: url, options: assetOptions)
return AVPlayerItem(asset: asset)
/// Pin the HLS audio rendition so ABR can't swap channel layouts mid-stream.
private func pinAudioSelection(on item: AVPlayerItem) {
let streamID = stream.id
Task { @MainActor in
await enforcePinnedMultiStreamAudioSelection(on: item, streamID: streamID)
}
}
private func scheduleStartupPlaybackRecovery(for player: AVPlayer) {
@@ -678,92 +699,18 @@ private struct MultiStreamTile: View {
}
}
private func scheduleQualityUpgrade(for player: AVPlayer) {
qualityUpgradeTask?.cancel()
guard stream.overrideURL == nil else {
logMultiView("qualityUpgrade skip id=\(stream.id) reason=override-url")
return
}
guard let targetResolution = multiViewUpgradeTargetResolution else {
logMultiView("qualityUpgrade skip id=\(stream.id) reason=target-already-\(multiViewStartupResolution)")
return
}
let streamID = stream.id
let label = stream.label
qualityUpgradeTask = Task { @MainActor in
let checkDelays: [Double] = [2.0, 4.0, 7.0]
for delay in checkDelays {
try? await Task.sleep(for: .seconds(delay))
guard !Task.isCancelled else { return }
guard let currentPlayer = self.player, currentPlayer === player else {
logMultiView("qualityUpgrade abort id=\(streamID) label=\(label) reason=player-changed")
return
}
let itemStatus = multiViewItemStatusDescription(player.currentItem?.status ?? .unknown)
let likelyToKeepUp = player.currentItem?.isPlaybackLikelyToKeepUp ?? false
let bufferEmpty = player.currentItem?.isPlaybackBufferEmpty ?? false
let currentResolution = currentStreamResolution(for: player) ?? "unknown"
let stable = (itemStatus == "readyToPlay" || likelyToKeepUp) && !bufferEmpty
logMultiView(
"qualityUpgrade check id=\(streamID) delay=\(delay)s currentResolution=\(currentResolution) targetResolution=\(targetResolution) stable=\(stable) rate=\(player.rate)"
)
guard stable else { continue }
if currentResolution == targetResolution {
logMultiView("qualityUpgrade skip id=\(streamID) reason=already-\(targetResolution)")
return
}
guard let upgradedURL = await viewModel.resolveStreamURL(
for: stream,
resolutionOverride: targetResolution,
preserveServerResolutionWhenBest: false
) else {
logMultiView("qualityUpgrade failed id=\(streamID) targetResolution=\(targetResolution) reason=resolve-nil")
return
}
if let currentURL = currentStreamURL(for: player), currentURL == upgradedURL {
logMultiView("qualityUpgrade skip id=\(streamID) reason=same-url targetResolution=\(targetResolution)")
return
}
logMultiView("qualityUpgrade begin id=\(streamID) targetResolution=\(targetResolution) url=\(upgradedURL.absoluteString)")
let upgradedItem = AVPlayerItem(url: upgradedURL)
upgradedItem.preferredForwardBufferDuration = 4
player.replaceCurrentItem(with: upgradedItem)
player.automaticallyWaitsToMinimizeStalling = false
playbackDiagnostics.attach(to: player, streamID: streamID, label: label)
viewModel.attachPlayer(player, to: streamID)
scheduleStartupPlaybackRecovery(for: player)
logMultiView("qualityUpgrade replay id=\(streamID) targetResolution=\(targetResolution)")
player.playImmediately(atRate: 1.0)
return
}
logMultiView("qualityUpgrade timeout id=\(streamID) targetResolution=\(targetResolution)")
}
}
private func currentStreamURL(for player: AVPlayer) -> URL? {
(player.currentItem?.asset as? AVURLAsset)?.url
}
private func currentStreamResolution(for player: AVPlayer) -> String? {
guard let url = currentStreamURL(for: player) else { return nil }
return URLComponents(url: url, resolvingAgainstBaseURL: false)?
.queryItems?
.first(where: { $0.name == "resolution" })?
.value
private func attachAudioDiagnostics(to player: AVPlayer) {
if audioDiagnostics == nil {
audioDiagnostics = AudioDiagnostics(tag: "multi:\(stream.label)")
}
audioDiagnostics?.attach(to: player)
}
private func installClipTimeLimit(on player: AVPlayer) {
removeClipTimeLimit(from: player)
guard stream.id == SpecialPlaybackChannelConfig.werkoutNSFWStreamID else { return }
@@ -851,9 +798,11 @@ private struct MultiStreamTile: View {
installClipTimeLimit(on: player)
// Monitor for failure and auto-skip to next clip
Task { @MainActor in
werkoutMonitorTask?.cancel()
werkoutMonitorTask = Task { @MainActor in
for checkDelay in [1.0, 3.0] {
try? await Task.sleep(for: .seconds(checkDelay))
guard !Task.isCancelled else { return }
let postItemStatus = player.currentItem?.status
let error = player.currentItem?.error?.localizedDescription ?? "nil"
logMultiView(
@@ -869,6 +818,45 @@ private struct MultiStreamTile: View {
}
}
@MainActor
private func enforcePinnedMultiStreamAudioSelection(on item: AVPlayerItem, streamID: String) async {
let asset = item.asset
guard let group = try? await asset.loadMediaSelectionGroup(for: .audible),
let option = preferredMultiStreamAudioOption(in: group) else { return }
let current = item.currentMediaSelection.selectedMediaOption(in: group)
if current != option {
item.select(option, in: group)
}
logMultiView(
"pinAudioSelection id=\(streamID) selected=\(option.displayName) current=\(current?.displayName ?? "nil") options=\(group.options.count)"
)
}
private func preferredMultiStreamAudioOption(in group: AVMediaSelectionGroup) -> AVMediaSelectionOption? {
let defaultOption = group.defaultOption
return group.options.max { lhs, rhs in
multiStreamAudioPreferenceScore(for: lhs, defaultOption: defaultOption) < multiStreamAudioPreferenceScore(for: rhs, defaultOption: defaultOption)
} ?? defaultOption ?? group.options.first
}
private func multiStreamAudioPreferenceScore(for option: AVMediaSelectionOption, defaultOption: AVMediaSelectionOption?) -> Int {
let name = option.displayName.lowercased()
var score = 0
if option == defaultOption { score += 40 }
if name.contains("stereo") || name.contains("2.0") || name.contains("main") { score += 30 }
if name.contains("english") || name.contains("eng") { score += 20 }
if name.contains("surround") || name.contains("5.1") || name.contains("atmos") { score -= 30 }
if name.contains("spanish") || name.contains("sap") || name.contains("descriptive") || name.contains("alternate") {
score -= 25
}
if option.hasMediaCharacteristic(.describesVideoForAccessibility) {
score -= 40
}
return score
}
private struct MultiStreamPlayerLayerView: UIViewRepresentable {
let player: AVPlayer
let streamID: String
@@ -1483,3 +1471,245 @@ private func nextMultiViewFocusID(
.streamID
}
#endif
// MARK: - AudioDiagnostics
//
// Audio-specific diagnostic logging. Attach one `AudioDiagnostics` per AVPlayer
// you want to track. Emits a `[AUDIO]`-prefixed 1 Hz heartbeat with rate,
// mute, time, bitrate, and route info plus immediate logs on route changes,
// interruptions, access-log events, and media-selection changes.
//
// Grep Xcode console with `[AUDIO]` to isolate these lines.
@MainActor
final class AudioDiagnostics {
private let tag: String
private weak var player: AVPlayer?
private var heartbeatTask: Task<Void, Never>?
private var observations: [NSKeyValueObservation] = []
private var tokens: [NSObjectProtocol] = []
private static var processWideInstalled = false
init(tag: String) {
self.tag = tag
AudioDiagnostics.installProcessWideObservers()
}
deinit {
heartbeatTask?.cancel()
}
func attach(to player: AVPlayer) {
detach()
self.player = player
log("attach rate=\(player.rate) isMuted=\(player.isMuted) volume=\(player.volume)")
observations.append(
player.observe(\.rate, options: [.new]) { [weak self] p, _ in
Task { @MainActor in self?.log("rate-change rate=\(p.rate) tc=\(p.timeControlStatus.rawValue)") }
}
)
observations.append(
player.observe(\.isMuted, options: [.new]) { [weak self] p, _ in
Task { @MainActor in self?.log("isMuted-change value=\(p.isMuted)") }
}
)
observations.append(
player.observe(\.volume, options: [.new]) { [weak self] p, _ in
Task { @MainActor in self?.log("volume-change value=\(p.volume)") }
}
)
if let item = player.currentItem {
attachItemObservers(item)
}
observations.append(
player.observe(\.currentItem, options: [.new]) { [weak self] p, _ in
Task { @MainActor in
guard let self else { return }
self.log("currentItem-change newItem=\(p.currentItem != nil)")
if let item = p.currentItem {
self.attachItemObservers(item)
}
}
}
)
startHeartbeat()
}
func detach() {
heartbeatTask?.cancel()
heartbeatTask = nil
observations.removeAll()
for t in tokens {
NotificationCenter.default.removeObserver(t)
}
tokens.removeAll()
player = nil
}
private func attachItemObservers(_ item: AVPlayerItem) {
tokens.append(
NotificationCenter.default.addObserver(
forName: .AVPlayerItemNewAccessLogEntry,
object: item,
queue: .main
) { [weak self, weak item] _ in
guard let self, let event = item?.accessLog()?.events.last else { return }
Task { @MainActor in
self.log(
"accessLog indicated=\(Int(event.indicatedBitrate)) observed=\(Int(event.observedBitrate)) switches=\(event.numberOfMediaRequests) stalls=\(event.numberOfStalls) avgVideo=\(Int(event.averageVideoBitrate)) avgAudio=\(Int(event.averageAudioBitrate))"
)
}
}
)
tokens.append(
NotificationCenter.default.addObserver(
forName: .AVPlayerItemNewErrorLogEntry,
object: item,
queue: .main
) { [weak self, weak item] _ in
guard let self, let event = item?.errorLog()?.events.last else { return }
Task { @MainActor in
self.log("errorLog domain=\(event.errorDomain) statusCode=\(event.errorStatusCode) comment=\(event.errorComment ?? "nil")")
}
}
)
tokens.append(
NotificationCenter.default.addObserver(
forName: AVPlayerItem.mediaSelectionDidChangeNotification,
object: item,
queue: .main
) { [weak self, weak item] _ in
guard let self, let item else { return }
Task { @MainActor in
await enforcePinnedMultiStreamAudioSelection(on: item, streamID: self.tag)
let asset = item.asset
guard let group = try? await asset.loadMediaSelectionGroup(for: .audible),
let selected = item.currentMediaSelection.selectedMediaOption(in: group) else {
self.log("mediaSelection-change selected=nil")
return
}
let codec = (selected.mediaSubTypes as [NSNumber]).map { audioDiagFourCC($0.uint32Value) }.joined(separator: ",")
self.log("mediaSelection-change audio=\"\(selected.displayName)\" codec=\(codec)")
}
}
)
}
private func startHeartbeat() {
heartbeatTask?.cancel()
heartbeatTask = Task { @MainActor [weak self] in
while !Task.isCancelled {
self?.emitHeartbeat()
try? await Task.sleep(for: .seconds(1))
}
}
}
private func emitHeartbeat() {
guard let player, let item = player.currentItem else { return }
let currentTime = CMTimeGetSeconds(player.currentTime())
let event = item.accessLog()?.events.last
let indicated = event.map { Int($0.indicatedBitrate) } ?? 0
let observed = event.map { Int($0.observedBitrate) } ?? 0
log(
"hb t=\(String(format: "%.1f", currentTime))s rate=\(player.rate) tc=\(player.timeControlStatus.rawValue) muted=\(player.isMuted) vol=\(String(format: "%.2f", player.volume)) indicated=\(indicated) observed=\(observed) ltku=\(item.isPlaybackLikelyToKeepUp) route=\(AudioDiagnostics.currentRouteDescription())"
)
}
private func log(_ message: String) {
let ts = AudioDiagnostics.timestamp()
print("[AUDIO \(tag) \(ts)] \(message)")
}
// MARK: Process-wide
private static func installProcessWideObservers() {
guard !processWideInstalled else { return }
processWideInstalled = true
let session = AVAudioSession.sharedInstance()
print("[AUDIO SYSTEM \(timestamp())] initial category=\(session.category.rawValue) mode=\(session.mode.rawValue) sampleRate=\(session.sampleRate) route=\(currentRouteDescription())")
NotificationCenter.default.addObserver(
forName: AVAudioSession.routeChangeNotification,
object: nil,
queue: .main
) { notification in
let reasonValue = notification.userInfo?[AVAudioSessionRouteChangeReasonKey] as? UInt ?? 0
let reason = AVAudioSession.RouteChangeReason(rawValue: reasonValue).map(audioDiagReasonDescription) ?? "unknown(\(reasonValue))"
let prev = (notification.userInfo?[AVAudioSessionRouteChangePreviousRouteKey] as? AVAudioSessionRouteDescription).map(routeDescription) ?? "nil"
print("[AUDIO SYSTEM \(timestamp())] routeChange reason=\(reason) previous=\(prev) current=\(currentRouteDescription())")
}
NotificationCenter.default.addObserver(
forName: AVAudioSession.interruptionNotification,
object: nil,
queue: .main
) { notification in
let typeValue = notification.userInfo?[AVAudioSessionInterruptionTypeKey] as? UInt ?? 0
let type = AVAudioSession.InterruptionType(rawValue: typeValue).map(audioDiagInterruptionDescription) ?? "unknown(\(typeValue))"
print("[AUDIO SYSTEM \(timestamp())] interruption type=\(type)")
}
}
nonisolated static func currentRouteDescription() -> String {
routeDescription(AVAudioSession.sharedInstance().currentRoute)
}
nonisolated static func routeDescription(_ route: AVAudioSessionRouteDescription) -> String {
let outs = route.outputs.map { "\($0.portType.rawValue):\($0.portName)" }.joined(separator: ",")
return outs.isEmpty ? "none" : outs
}
nonisolated static func timestamp() -> String {
audioDiagTSFormatter.string(from: Date())
}
}
private let audioDiagTSFormatter: DateFormatter = {
let f = DateFormatter()
f.dateFormat = "HH:mm:ss.SSS"
return f
}()
private func audioDiagFourCC(_ raw: UInt32) -> String {
let bytes: [UInt8] = [
UInt8((raw >> 24) & 0xFF),
UInt8((raw >> 16) & 0xFF),
UInt8((raw >> 8) & 0xFF),
UInt8(raw & 0xFF),
]
let chars = bytes.map { b -> Character in
let scalar = UnicodeScalar(b)
return (0x20...0x7E).contains(b) ? Character(scalar) : "?"
}
return String(chars)
}
private func audioDiagReasonDescription(_ reason: AVAudioSession.RouteChangeReason) -> String {
switch reason {
case .unknown: "unknown"
case .newDeviceAvailable: "newDeviceAvailable"
case .oldDeviceUnavailable: "oldDeviceUnavailable"
case .categoryChange: "categoryChange"
case .override: "override"
case .wakeFromSleep: "wakeFromSleep"
case .noSuitableRouteForCategory: "noSuitableRouteForCategory"
case .routeConfigurationChange: "routeConfigurationChange"
@unknown default: "unknown-future"
}
}
private func audioDiagInterruptionDescription(_ type: AVAudioSession.InterruptionType) -> String {
switch type {
case .began: "began"
case .ended: "ended"
@unknown default: "unknown-future"
}
}

View File

@@ -57,22 +57,70 @@ private func singleStreamTimeControlDescription(_ status: AVPlayer.TimeControlSt
}
private func makeSingleStreamPlayerItem(from source: SingleStreamPlaybackSource) -> AVPlayerItem {
let item: AVPlayerItem
if source.httpHeaders.isEmpty {
let item = AVPlayerItem(url: source.url)
item.preferredForwardBufferDuration = 2
return item
item = AVPlayerItem(url: source.url)
} else {
let assetOptions: [String: Any] = [
"AVURLAssetHTTPHeaderFieldsKey": source.httpHeaders,
]
let asset = AVURLAsset(url: source.url, options: assetOptions)
item = AVPlayerItem(asset: asset)
logSingleStream(
"Configured authenticated AVURLAsset headerKeys=\(singleStreamHeaderKeysDescription(source.httpHeaders))"
)
}
item.preferredForwardBufferDuration = 8
item.allowedAudioSpatializationFormats = []
logSingleStream("Configured player item preferredForwardBufferDuration=8 allowedAudioSpatializationFormats=[]")
pinSingleStreamAudioSelection(on: item)
return item
}
/// Pin the HLS audio rendition so ABR can't swap channel layouts mid-stream.
private func pinSingleStreamAudioSelection(on item: AVPlayerItem) {
Task { @MainActor in
await enforcePinnedSingleStreamAudioSelection(on: item)
}
}
@MainActor
private func enforcePinnedSingleStreamAudioSelection(on item: AVPlayerItem) async {
let asset = item.asset
guard let group = try? await asset.loadMediaSelectionGroup(for: .audible),
let option = preferredSingleStreamAudioOption(in: group) else { return }
let current = item.currentMediaSelection.selectedMediaOption(in: group)
if current != option {
item.select(option, in: group)
}
logSingleStream(
"pinAudioSelection selected=\(option.displayName) current=\(current?.displayName ?? "nil") options=\(group.options.count)"
)
}
private func preferredSingleStreamAudioOption(in group: AVMediaSelectionGroup) -> AVMediaSelectionOption? {
let defaultOption = group.defaultOption
return group.options.max { lhs, rhs in
audioPreferenceScore(for: lhs, defaultOption: defaultOption) < audioPreferenceScore(for: rhs, defaultOption: defaultOption)
} ?? defaultOption ?? group.options.first
}
private func audioPreferenceScore(for option: AVMediaSelectionOption, defaultOption: AVMediaSelectionOption?) -> Int {
let name = option.displayName.lowercased()
var score = 0
if option == defaultOption { score += 40 }
if name.contains("stereo") || name.contains("2.0") || name.contains("main") { score += 30 }
if name.contains("english") || name.contains("eng") { score += 20 }
if name.contains("surround") || name.contains("5.1") || name.contains("atmos") { score -= 30 }
if name.contains("spanish") || name.contains("sap") || name.contains("descriptive") || name.contains("alternate") {
score -= 25
}
if option.hasMediaCharacteristic(.describesVideoForAccessibility) {
score -= 40
}
let assetOptions: [String: Any] = [
"AVURLAssetHTTPHeaderFieldsKey": source.httpHeaders,
]
let asset = AVURLAsset(url: source.url, options: assetOptions)
let item = AVPlayerItem(asset: asset)
item.preferredForwardBufferDuration = 2
logSingleStream(
"Configured authenticated AVURLAsset headerKeys=\(singleStreamHeaderKeysDescription(source.httpHeaders))"
)
return item
return score
}
struct SingleStreamPlaybackScreen: View {
@@ -544,24 +592,32 @@ struct SingleStreamPlayerView: UIViewControllerRepresentable {
)
do {
try AVAudioSession.sharedInstance().setCategory(.playback, mode: .moviePlayback)
try AVAudioSession.sharedInstance().setCategory(.playback, mode: .default)
try AVAudioSession.sharedInstance().setActive(true)
logSingleStream("AVAudioSession configured for playback")
logSingleStream("AVAudioSession configured for playback mode=default")
} catch {
logSingleStream("AVAudioSession configuration failed error=\(error.localizedDescription)")
}
let playerItem = makeSingleStreamPlayerItem(from: source)
let player = AVPlayer(playerItem: playerItem)
player.automaticallyWaitsToMinimizeStalling = false
player.appliesMediaSelectionCriteriaAutomatically = false
player.automaticallyWaitsToMinimizeStalling = true
player.isMuted = source.forceMuteAudio
logSingleStream("Configured player for fast start preferredForwardBufferDuration=2 automaticallyWaitsToMinimizeStalling=false")
logSingleStream(
"Configured player for quality ramp preferredForwardBufferDuration=8 automaticallyWaitsToMinimizeStalling=true appliesMediaSelectionCriteriaAutomatically=false"
)
context.coordinator.attachDebugObservers(to: player, url: url, resolveNextSource: resolveNextSource)
controller.player = player
if context.coordinator.audioDiagnostics == nil {
context.coordinator.audioDiagnostics = AudioDiagnostics(tag: "single")
}
context.coordinator.audioDiagnostics?.attach(to: player)
logSingleStream("AVPlayer assigned to controller; calling playImmediately(atRate: 1.0)")
player.playImmediately(atRate: 1.0)
context.coordinator.installClipTimeLimit(on: player, resolveNextSource: resolveNextSource)
context.coordinator.scheduleStartupRecovery(for: player)
context.coordinator.scheduleQualityMonitor(for: player)
}
return controller
@@ -583,11 +639,15 @@ struct SingleStreamPlayerView: UIViewControllerRepresentable {
static func dismantleUIViewController(_ uiViewController: AVPlayerViewController, coordinator: Coordinator) {
logSingleStream("dismantleUIViewController start isPiPActive=\(coordinator.isPiPActive)")
coordinator.clearDebugObservers()
if coordinator.isPiPActive {
logSingleStream("dismantleUIViewController skipped — PiP is active")
logSingleStream("dismantleUIViewController — PiP active, observers cleared but keeping player")
return
}
coordinator.clearDebugObservers()
Task { @MainActor in
coordinator.audioDiagnostics?.detach()
coordinator.audioDiagnostics = nil
}
uiViewController.player?.pause()
uiViewController.player = nil
logSingleStream("dismantleUIViewController complete")
@@ -597,7 +657,9 @@ struct SingleStreamPlayerView: UIViewControllerRepresentable {
private var playerObservations: [NSKeyValueObservation] = []
private var notificationTokens: [NSObjectProtocol] = []
private var startupRecoveryTask: Task<Void, Never>?
private var qualityMonitorTask: Task<Void, Never>?
private var clipTimeLimitObserver: Any?
var audioDiagnostics: AudioDiagnostics?
private static let maxClipDuration: Double = 15.0
var onTogglePitchInfo: (() -> Void)?
var onToggleGameCenter: (() -> Void)?
@@ -789,6 +851,19 @@ struct SingleStreamPlayerView: UIViewControllerRepresentable {
}
}
)
notificationTokens.append(
NotificationCenter.default.addObserver(
forName: AVPlayerItem.mediaSelectionDidChangeNotification,
object: item,
queue: .main
) { _ in
logSingleStream("Notification mediaSelectionDidChange")
Task { @MainActor in
await enforcePinnedSingleStreamAudioSelection(on: item)
}
}
)
}
func scheduleStartupRecovery(for player: AVPlayer) {
@@ -863,9 +938,38 @@ struct SingleStreamPlayerView: UIViewControllerRepresentable {
}
}
func scheduleQualityMonitor(for player: AVPlayer) {
qualityMonitorTask?.cancel()
qualityMonitorTask = Task { @MainActor [weak player] in
// Check at 5s, 15s, and 30s whether AVPlayer has ramped to a reasonable bitrate
for delay in [5.0, 15.0, 30.0] {
try? await Task.sleep(for: .seconds(delay))
guard !Task.isCancelled, let player else { return }
let indicatedBitrate = player.currentItem?.accessLog()?.events.last?.indicatedBitrate ?? 0
let observedBitrate = player.currentItem?.accessLog()?.events.last?.observedBitrate ?? 0
let likelyToKeepUp = player.currentItem?.isPlaybackLikelyToKeepUp ?? false
logSingleStream(
"qualityMonitor check delay=\(delay)s indicatedBitrate=\(Int(indicatedBitrate)) observedBitrate=\(Int(observedBitrate)) likelyToKeepUp=\(likelyToKeepUp) rate=\(player.rate)"
)
// If observed bandwidth supports higher quality but indicated is low, nudge AVPlayer
if likelyToKeepUp && indicatedBitrate > 0 && observedBitrate > indicatedBitrate * 2 {
logSingleStream(
"qualityMonitor nudge delay=\(delay)s — observed bandwidth \(Int(observedBitrate)) >> indicated \(Int(indicatedBitrate)), setting preferredPeakBitRate=0 to uncap"
)
player.currentItem?.preferredPeakBitRate = 0
}
}
}
}
func clearDebugObservers() {
startupRecoveryTask?.cancel()
startupRecoveryTask = nil
qualityMonitorTask?.cancel()
qualityMonitorTask = nil
playerObservations.removeAll()
for token in notificationTokens {
NotificationCenter.default.removeObserver(token)

View File

@@ -4,6 +4,7 @@ import SwiftUI
@main
struct mlbTVOSApp: App {
@State private var viewModel = GamesViewModel()
@Environment(\.scenePhase) private var scenePhase
init() {
configureAudioSession()
@@ -13,6 +14,11 @@ struct mlbTVOSApp: App {
WindowGroup {
ContentView()
.environment(viewModel)
.onChange(of: scenePhase) { _, newPhase in
if newPhase == .active {
Task { await viewModel.refreshIfDayChanged() }
}
}
}
}
@@ -24,5 +30,23 @@ struct mlbTVOSApp: App {
} catch {
print("Failed to set audio session: \(error)")
}
NotificationCenter.default.addObserver(
forName: AVAudioSession.interruptionNotification,
object: AVAudioSession.sharedInstance(),
queue: .main
) { notification in
guard let info = notification.userInfo,
let typeValue = info[AVAudioSessionInterruptionTypeKey] as? UInt,
let type = AVAudioSession.InterruptionType(rawValue: typeValue) else { return }
if type == .ended {
let options = (info[AVAudioSessionInterruptionOptionKey] as? UInt)
.flatMap(AVAudioSession.InterruptionOptions.init) ?? []
if options.contains(.shouldResume) {
try? AVAudioSession.sharedInstance().setActive(true)
}
}
}
}
}