Add AudioDiagnostics heartbeat logging

Adds [AUDIO]-prefixed logs to both single-stream and multi-stream players:
1 Hz heartbeat with rate/timeControl/mute/volume/bitrate/route, plus
immediate events on rate, isMuted, volume, currentItem, media selection,
access-log, error-log, and system audio route/interruption changes.

Grep Xcode console for `[AUDIO]` or `[AUDIO SYSTEM]` to isolate.

Also reverts the AAC-preference in pinAudioSelection: the
ballgame.treytartt.com master playlist is already all mp4a.40.2 stereo,
so the Dolby-DRC theory doesn't fit. Pin simply selects the default
audible option now.

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
Trey t
2026-04-14 20:28:15 -05:00
parent 85a19fdd71
commit 08ad702f9d
2 changed files with 265 additions and 4 deletions

View File

@@ -356,6 +356,7 @@ private struct MultiStreamTile: View {
@State private var clipTimeLimitObserver: Any? @State private var clipTimeLimitObserver: Any?
@State private var isAdvancingClip = false @State private var isAdvancingClip = false
@StateObject private var playbackDiagnostics = MultiStreamPlaybackDiagnostics() @StateObject private var playbackDiagnostics = MultiStreamPlaybackDiagnostics()
@State private var audioDiagnostics: AudioDiagnostics?
private static let maxClipDuration: Double = 15.0 private static let maxClipDuration: Double = 15.0
private static var audioSessionConfigured = false private static var audioSessionConfigured = false
@@ -453,6 +454,8 @@ private struct MultiStreamTile: View {
} }
player = nil player = nil
playbackDiagnostics.clear(streamID: stream.id, reason: "tile disappeared") playbackDiagnostics.clear(streamID: stream.id, reason: "tile disappeared")
audioDiagnostics?.detach()
audioDiagnostics = nil
} }
#if os(tvOS) #if os(tvOS)
.focusEffectDisabled() .focusEffectDisabled()
@@ -551,6 +554,7 @@ private struct MultiStreamTile: View {
) )
scheduleStartupPlaybackRecovery(for: player) scheduleStartupPlaybackRecovery(for: player)
installClipTimeLimit(on: player) installClipTimeLimit(on: player)
attachAudioDiagnostics(to: player)
logMultiView("startStream reused inline player id=\(stream.id) muted=\(player.isMuted)") logMultiView("startStream reused inline player id=\(stream.id) muted=\(player.isMuted)")
return return
} }
@@ -577,6 +581,7 @@ private struct MultiStreamTile: View {
) )
scheduleStartupPlaybackRecovery(for: existingPlayer) scheduleStartupPlaybackRecovery(for: existingPlayer)
installClipTimeLimit(on: existingPlayer) installClipTimeLimit(on: existingPlayer)
attachAudioDiagnostics(to: existingPlayer)
logMultiView("startStream reused shared player id=\(stream.id) muted=\(existingPlayer.isMuted)") logMultiView("startStream reused shared player id=\(stream.id) muted=\(existingPlayer.isMuted)")
return return
} }
@@ -612,6 +617,7 @@ private struct MultiStreamTile: View {
onPlaybackEnded: playbackEndedHandler(for: avPlayer) onPlaybackEnded: playbackEndedHandler(for: avPlayer)
) )
scheduleStartupPlaybackRecovery(for: avPlayer) scheduleStartupPlaybackRecovery(for: avPlayer)
attachAudioDiagnostics(to: avPlayer)
logMultiView("startStream attached player id=\(stream.id) muted=\(avPlayer.isMuted) startupResolution=\(multiViewStartupResolution) fastStart=true calling playImmediately(atRate: 1.0)") logMultiView("startStream attached player id=\(stream.id) muted=\(avPlayer.isMuted) startupResolution=\(multiViewStartupResolution) fastStart=true calling playImmediately(atRate: 1.0)")
avPlayer.playImmediately(atRate: 1.0) avPlayer.playImmediately(atRate: 1.0)
installClipTimeLimit(on: avPlayer) installClipTimeLimit(on: avPlayer)
@@ -642,8 +648,7 @@ private struct MultiStreamTile: View {
return item return item
} }
/// Lock the HLS audio rendition to the default option so ABR can't swap /// Pin the HLS audio rendition so ABR can't swap channel layouts mid-stream.
/// to a different channel layout / loudness mid-stream.
private func pinAudioSelection(on item: AVPlayerItem) { private func pinAudioSelection(on item: AVPlayerItem) {
let streamID = stream.id let streamID = stream.id
let asset = item.asset let asset = item.asset
@@ -698,6 +703,13 @@ private struct MultiStreamTile: View {
(player.currentItem?.asset as? AVURLAsset)?.url (player.currentItem?.asset as? AVURLAsset)?.url
} }
private func attachAudioDiagnostics(to player: AVPlayer) {
if audioDiagnostics == nil {
audioDiagnostics = AudioDiagnostics(tag: "multi:\(stream.label)")
}
audioDiagnostics?.attach(to: player)
}
private func installClipTimeLimit(on player: AVPlayer) { private func installClipTimeLimit(on player: AVPlayer) {
removeClipTimeLimit(from: player) removeClipTimeLimit(from: player)
@@ -1420,3 +1432,244 @@ private func nextMultiViewFocusID(
.streamID .streamID
} }
#endif #endif
// MARK: - AudioDiagnostics
//
// Audio-specific diagnostic logging. Attach one `AudioDiagnostics` per AVPlayer
// you want to track. Emits a `[AUDIO]`-prefixed 1 Hz heartbeat with rate,
// mute, time, bitrate, and route info plus immediate logs on route changes,
// interruptions, access-log events, and media-selection changes.
//
// Grep Xcode console with `[AUDIO]` to isolate these lines.
@MainActor
final class AudioDiagnostics {
private let tag: String
private weak var player: AVPlayer?
private var heartbeatTask: Task<Void, Never>?
private var observations: [NSKeyValueObservation] = []
private var tokens: [NSObjectProtocol] = []
private static var processWideInstalled = false
init(tag: String) {
self.tag = tag
AudioDiagnostics.installProcessWideObservers()
}
deinit {
heartbeatTask?.cancel()
}
func attach(to player: AVPlayer) {
detach()
self.player = player
log("attach rate=\(player.rate) isMuted=\(player.isMuted) volume=\(player.volume)")
observations.append(
player.observe(\.rate, options: [.new]) { [weak self] p, _ in
Task { @MainActor in self?.log("rate-change rate=\(p.rate) tc=\(p.timeControlStatus.rawValue)") }
}
)
observations.append(
player.observe(\.isMuted, options: [.new]) { [weak self] p, _ in
Task { @MainActor in self?.log("isMuted-change value=\(p.isMuted)") }
}
)
observations.append(
player.observe(\.volume, options: [.new]) { [weak self] p, _ in
Task { @MainActor in self?.log("volume-change value=\(p.volume)") }
}
)
if let item = player.currentItem {
attachItemObservers(item)
}
observations.append(
player.observe(\.currentItem, options: [.new]) { [weak self] p, _ in
Task { @MainActor in
guard let self else { return }
self.log("currentItem-change newItem=\(p.currentItem != nil)")
if let item = p.currentItem {
self.attachItemObservers(item)
}
}
}
)
startHeartbeat()
}
func detach() {
heartbeatTask?.cancel()
heartbeatTask = nil
observations.removeAll()
for t in tokens {
NotificationCenter.default.removeObserver(t)
}
tokens.removeAll()
player = nil
}
private func attachItemObservers(_ item: AVPlayerItem) {
tokens.append(
NotificationCenter.default.addObserver(
forName: .AVPlayerItemNewAccessLogEntry,
object: item,
queue: .main
) { [weak self, weak item] _ in
guard let self, let event = item?.accessLog()?.events.last else { return }
Task { @MainActor in
self.log(
"accessLog indicated=\(Int(event.indicatedBitrate)) observed=\(Int(event.observedBitrate)) switches=\(event.numberOfMediaRequests) stalls=\(event.numberOfStalls) avgVideo=\(Int(event.averageVideoBitrate)) avgAudio=\(Int(event.averageAudioBitrate))"
)
}
}
)
tokens.append(
NotificationCenter.default.addObserver(
forName: .AVPlayerItemNewErrorLogEntry,
object: item,
queue: .main
) { [weak self, weak item] _ in
guard let self, let event = item?.errorLog()?.events.last else { return }
Task { @MainActor in
self.log("errorLog domain=\(event.errorDomain) statusCode=\(event.errorStatusCode) comment=\(event.errorComment ?? "nil")")
}
}
)
tokens.append(
NotificationCenter.default.addObserver(
forName: AVPlayerItem.mediaSelectionDidChangeNotification,
object: item,
queue: .main
) { [weak self, weak item] _ in
guard let self, let item else { return }
Task { @MainActor in
let asset = item.asset
guard let group = try? await asset.loadMediaSelectionGroup(for: .audible),
let selected = item.currentMediaSelection.selectedMediaOption(in: group) else {
self.log("mediaSelection-change selected=nil")
return
}
let codec = (selected.mediaSubTypes as [NSNumber]).map { audioDiagFourCC($0.uint32Value) }.joined(separator: ",")
self.log("mediaSelection-change audio=\"\(selected.displayName)\" codec=\(codec)")
}
}
)
}
private func startHeartbeat() {
heartbeatTask?.cancel()
heartbeatTask = Task { @MainActor [weak self] in
while !Task.isCancelled {
self?.emitHeartbeat()
try? await Task.sleep(for: .seconds(1))
}
}
}
private func emitHeartbeat() {
guard let player, let item = player.currentItem else { return }
let currentTime = CMTimeGetSeconds(player.currentTime())
let event = item.accessLog()?.events.last
let indicated = event.map { Int($0.indicatedBitrate) } ?? 0
let observed = event.map { Int($0.observedBitrate) } ?? 0
log(
"hb t=\(String(format: "%.1f", currentTime))s rate=\(player.rate) tc=\(player.timeControlStatus.rawValue) muted=\(player.isMuted) vol=\(String(format: "%.2f", player.volume)) indicated=\(indicated) observed=\(observed) ltku=\(item.isPlaybackLikelyToKeepUp) route=\(AudioDiagnostics.currentRouteDescription())"
)
}
private func log(_ message: String) {
let ts = AudioDiagnostics.timestamp()
print("[AUDIO \(tag) \(ts)] \(message)")
}
// MARK: Process-wide
private static func installProcessWideObservers() {
guard !processWideInstalled else { return }
processWideInstalled = true
let session = AVAudioSession.sharedInstance()
print("[AUDIO SYSTEM \(timestamp())] initial category=\(session.category.rawValue) mode=\(session.mode.rawValue) sampleRate=\(session.sampleRate) route=\(currentRouteDescription())")
NotificationCenter.default.addObserver(
forName: AVAudioSession.routeChangeNotification,
object: nil,
queue: .main
) { notification in
let reasonValue = notification.userInfo?[AVAudioSessionRouteChangeReasonKey] as? UInt ?? 0
let reason = AVAudioSession.RouteChangeReason(rawValue: reasonValue).map(audioDiagReasonDescription) ?? "unknown(\(reasonValue))"
let prev = (notification.userInfo?[AVAudioSessionRouteChangePreviousRouteKey] as? AVAudioSessionRouteDescription).map(routeDescription) ?? "nil"
print("[AUDIO SYSTEM \(timestamp())] routeChange reason=\(reason) previous=\(prev) current=\(currentRouteDescription())")
}
NotificationCenter.default.addObserver(
forName: AVAudioSession.interruptionNotification,
object: nil,
queue: .main
) { notification in
let typeValue = notification.userInfo?[AVAudioSessionInterruptionTypeKey] as? UInt ?? 0
let type = AVAudioSession.InterruptionType(rawValue: typeValue).map(audioDiagInterruptionDescription) ?? "unknown(\(typeValue))"
print("[AUDIO SYSTEM \(timestamp())] interruption type=\(type)")
}
}
nonisolated static func currentRouteDescription() -> String {
routeDescription(AVAudioSession.sharedInstance().currentRoute)
}
nonisolated static func routeDescription(_ route: AVAudioSessionRouteDescription) -> String {
let outs = route.outputs.map { "\($0.portType.rawValue):\($0.portName)" }.joined(separator: ",")
return outs.isEmpty ? "none" : outs
}
nonisolated static func timestamp() -> String {
audioDiagTSFormatter.string(from: Date())
}
}
private let audioDiagTSFormatter: DateFormatter = {
let f = DateFormatter()
f.dateFormat = "HH:mm:ss.SSS"
return f
}()
private func audioDiagFourCC(_ raw: UInt32) -> String {
let bytes: [UInt8] = [
UInt8((raw >> 24) & 0xFF),
UInt8((raw >> 16) & 0xFF),
UInt8((raw >> 8) & 0xFF),
UInt8(raw & 0xFF),
]
let chars = bytes.map { b -> Character in
let scalar = UnicodeScalar(b)
return (0x20...0x7E).contains(b) ? Character(scalar) : "?"
}
return String(chars)
}
private func audioDiagReasonDescription(_ reason: AVAudioSession.RouteChangeReason) -> String {
switch reason {
case .unknown: "unknown"
case .newDeviceAvailable: "newDeviceAvailable"
case .oldDeviceUnavailable: "oldDeviceUnavailable"
case .categoryChange: "categoryChange"
case .override: "override"
case .wakeFromSleep: "wakeFromSleep"
case .noSuitableRouteForCategory: "noSuitableRouteForCategory"
case .routeConfigurationChange: "routeConfigurationChange"
@unknown default: "unknown-future"
}
}
private func audioDiagInterruptionDescription(_ type: AVAudioSession.InterruptionType) -> String {
switch type {
case .began: "began"
case .ended: "ended"
@unknown default: "unknown-future"
}
}

View File

@@ -75,8 +75,7 @@ private func makeSingleStreamPlayerItem(from source: SingleStreamPlaybackSource)
return item return item
} }
/// Lock the HLS audio rendition to the default option so ABR can't swap /// Pin the HLS audio rendition so ABR can't swap channel layouts mid-stream.
/// to a different channel layout / loudness mid-stream.
private func pinSingleStreamAudioSelection(on item: AVPlayerItem) { private func pinSingleStreamAudioSelection(on item: AVPlayerItem) {
let asset = item.asset let asset = item.asset
Task { @MainActor [weak item] in Task { @MainActor [weak item] in
@@ -571,6 +570,10 @@ struct SingleStreamPlayerView: UIViewControllerRepresentable {
logSingleStream("Configured player for quality ramp preferredForwardBufferDuration=8 automaticallyWaitsToMinimizeStalling=true") logSingleStream("Configured player for quality ramp preferredForwardBufferDuration=8 automaticallyWaitsToMinimizeStalling=true")
context.coordinator.attachDebugObservers(to: player, url: url, resolveNextSource: resolveNextSource) context.coordinator.attachDebugObservers(to: player, url: url, resolveNextSource: resolveNextSource)
controller.player = player controller.player = player
if context.coordinator.audioDiagnostics == nil {
context.coordinator.audioDiagnostics = AudioDiagnostics(tag: "single")
}
context.coordinator.audioDiagnostics?.attach(to: player)
logSingleStream("AVPlayer assigned to controller; calling playImmediately(atRate: 1.0)") logSingleStream("AVPlayer assigned to controller; calling playImmediately(atRate: 1.0)")
player.playImmediately(atRate: 1.0) player.playImmediately(atRate: 1.0)
context.coordinator.installClipTimeLimit(on: player, resolveNextSource: resolveNextSource) context.coordinator.installClipTimeLimit(on: player, resolveNextSource: resolveNextSource)
@@ -602,6 +605,10 @@ struct SingleStreamPlayerView: UIViewControllerRepresentable {
logSingleStream("dismantleUIViewController — PiP active, observers cleared but keeping player") logSingleStream("dismantleUIViewController — PiP active, observers cleared but keeping player")
return return
} }
Task { @MainActor in
coordinator.audioDiagnostics?.detach()
coordinator.audioDiagnostics = nil
}
uiViewController.player?.pause() uiViewController.player?.pause()
uiViewController.player = nil uiViewController.player = nil
logSingleStream("dismantleUIViewController complete") logSingleStream("dismantleUIViewController complete")
@@ -613,6 +620,7 @@ struct SingleStreamPlayerView: UIViewControllerRepresentable {
private var startupRecoveryTask: Task<Void, Never>? private var startupRecoveryTask: Task<Void, Never>?
private var qualityMonitorTask: Task<Void, Never>? private var qualityMonitorTask: Task<Void, Never>?
private var clipTimeLimitObserver: Any? private var clipTimeLimitObserver: Any?
var audioDiagnostics: AudioDiagnostics?
private static let maxClipDuration: Double = 15.0 private static let maxClipDuration: Double = 15.0
var onTogglePitchInfo: (() -> Void)? var onTogglePitchInfo: (() -> Void)?
var onToggleGameCenter: (() -> Void)? var onToggleGameCenter: (() -> Void)?