import AVFoundation import AVKit import OSLog import SwiftUI private struct StreamSelection: Identifiable { let id: String } private let multiViewLogger = Logger(subsystem: "com.treyt.mlbTVOS", category: "MultiView") private func logMultiView(_ message: String) { multiViewLogger.debug("\(message, privacy: .public)") print("[MultiView] \(message)") } private func multiViewStatusDescription(_ status: AVPlayer.Status) -> String { switch status { case .unknown: "unknown" case .readyToPlay: "readyToPlay" case .failed: "failed" @unknown default: "unknown-future" } } private func multiViewItemStatusDescription(_ status: AVPlayerItem.Status) -> String { switch status { case .unknown: "unknown" case .readyToPlay: "readyToPlay" case .failed: "failed" @unknown default: "unknown-future" } } private func multiViewTimeControlDescription(_ status: AVPlayer.TimeControlStatus) -> String { switch status { case .paused: "paused" case .waitingToPlayAtSpecifiedRate: "waitingToPlayAtSpecifiedRate" case .playing: "playing" @unknown default: "unknown-future" } } private func multiViewHeaderKeysDescription(_ headers: [String: String]) -> String { guard !headers.isEmpty else { return "none" } return headers.keys.sorted().joined(separator: ",") } struct MultiStreamView: View { @Environment(GamesViewModel.self) private var viewModel @State private var selectedStream: StreamSelection? @State private var showFullScreen = false var body: some View { ZStack(alignment: .bottom) { LinearGradient( colors: [ Color(red: 0.03, green: 0.04, blue: 0.08), Color(red: 0.02, green: 0.05, blue: 0.1), Color(red: 0.01, green: 0.02, blue: 0.05), ], startPoint: .topLeading, endPoint: .bottomTrailing ) .ignoresSafeArea() if viewModel.activeStreams.isEmpty { emptyState } else { VStack(spacing: 0) { toolbar .padding(.horizontal, 40) .padding(.top, 26) .padding(.bottom, 18) MultiViewCanvas( contentInsets: 18, gap: 14, videoGravity: .resizeAspectFill, cornerRadius: 22, onSelect: { stream in selectedStream = StreamSelection(id: stream.id) } ) .padding(.horizontal, 18) .padding(.bottom, 82) } ScoresTickerView() .allowsHitTesting(false) .padding(.horizontal, 18) .padding(.bottom, 14) } } .fullScreenCover(isPresented: $showFullScreen) { MultiStreamFullScreenView() } .sheet(item: $selectedStream) { selection in StreamControlSheet( streamID: selection.id, onRemove: { viewModel.removeStream(id: selection.id) selectedStream = nil } ) } .onAppear { viewModel.startAutoRefresh() } .onDisappear { viewModel.stopAutoRefresh() } } private var emptyState: some View { VStack(spacing: 24) { Image(systemName: "rectangle.split.2x2") .font(.system(size: 82, weight: .light)) .foregroundStyle(.white.opacity(0.14)) Text("No Active Streams") .font(.system(size: 30, weight: .bold, design: .rounded)) .foregroundStyle(.white.opacity(0.9)) Text("Add broadcasts from Games to build a quadbox, then pick the main tile and live audio here.") .font(.system(size: 18, weight: .medium)) .foregroundStyle(.white.opacity(0.38)) .multilineTextAlignment(.center) .frame(maxWidth: 560) } .padding(40) } private var toolbar: some View { VStack(alignment: .leading, spacing: 18) { HStack(alignment: .top, spacing: 24) { VStack(alignment: .leading, spacing: 10) { Text("Multi-View 2.0") .font(.system(size: 32, weight: .bold, design: .rounded)) .foregroundStyle(.white) Text("Choose the main tile, route audio to the game you want, and reorder feeds without leaving the grid.") .font(.system(size: 15, weight: .medium)) .foregroundStyle(.white.opacity(0.58)) } Spacer() HStack(spacing: 10) { infoChip( title: "\(viewModel.activeStreams.count)/4", icon: "rectangle.split.2x2", tint: .white ) infoChip( title: viewModel.multiViewLayoutMode.title, icon: viewModel.multiViewLayoutMode.systemImage, tint: .blue ) infoChip( title: viewModel.activeAudioStream?.label ?? "All Muted", icon: viewModel.activeAudioStream == nil ? "speaker.slash.fill" : "speaker.wave.2.fill", tint: viewModel.activeAudioStream == nil ? .white : .green ) } } HStack(spacing: 14) { MultiViewLayoutPicker(compact: false) Spacer() Button { showFullScreen = true } label: { toolbarButtonLabel( title: "Full Screen", icon: "arrow.up.left.and.arrow.down.right", tint: .white, destructive: false ) } .platformCardStyle() Button { viewModel.clearAllStreams() } label: { toolbarButtonLabel( title: "Clear All", icon: "xmark", tint: .red, destructive: true ) } .platformCardStyle() } } } private func infoChip(title: String, icon: String, tint: Color) -> some View { HStack(spacing: 8) { Image(systemName: icon) .font(.system(size: 12, weight: .bold)) Text(title) .font(.system(size: 14, weight: .semibold)) .lineLimit(1) } .foregroundStyle(tint.opacity(0.95)) .padding(.horizontal, 12) .padding(.vertical, 9) .background( RoundedRectangle(cornerRadius: 999) .fill(.white.opacity(0.08)) ) } private func toolbarButtonLabel(title: String, icon: String, tint: Color, destructive: Bool) -> some View { HStack(spacing: 8) { Image(systemName: icon) .font(.system(size: 13, weight: .bold)) Text(title) .font(.system(size: 15, weight: .semibold)) } .foregroundStyle(tint.opacity(0.9)) .padding(.horizontal, 18) .padding(.vertical, 12) .background( RoundedRectangle(cornerRadius: 999) .fill(destructive ? .red.opacity(0.12) : .white.opacity(0.08)) ) } } private struct MultiViewCanvas: View { @Environment(GamesViewModel.self) private var viewModel #if os(tvOS) @FocusState private var focusedStreamID: String? #endif let contentInsets: CGFloat let gap: CGFloat let videoGravity: AVLayerVideoGravity let cornerRadius: CGFloat let onSelect: (ActiveStream) -> Void var body: some View { GeometryReader { geo in let frames = multiViewFrames( count: viewModel.activeStreams.count, mode: viewModel.multiViewLayoutMode, size: geo.size, inset: contentInsets, gap: gap ) #if os(tvOS) let focusEntries = Array(viewModel.activeStreams.enumerated()).compactMap { index, stream -> MultiViewFocusEntry? in guard index < frames.count else { return nil } return MultiViewFocusEntry(streamID: stream.id, frame: frames[index]) } #endif ZStack(alignment: .topLeading) { ForEach(Array(viewModel.activeStreams.enumerated()), id: \.element.id) { index, stream in if index < frames.count { let frame = frames[index] tileView(for: stream, frame: frame, position: index + 1) } } } .platformFocusSection() #if os(tvOS) .onMoveCommand { direction in let currentID = focusedStreamID ?? viewModel.audioFocusStreamID ?? viewModel.activeStreams.first?.id if let nextID = nextMultiViewFocusID( from: currentID, direction: direction, entries: focusEntries ) { focusedStreamID = nextID } } #endif } #if os(tvOS) .onAppear { if focusedStreamID == nil { focusedStreamID = viewModel.audioFocusStreamID ?? viewModel.activeStreams.first?.id } } .onChange(of: viewModel.activeStreams.map(\.id)) { _, streamIDs in if let focusedStreamID, streamIDs.contains(focusedStreamID) { return } self.focusedStreamID = viewModel.audioFocusStreamID ?? streamIDs.first } .onChange(of: focusedStreamID) { _, streamID in guard let streamID else { return } if viewModel.audioFocusStreamID != streamID { viewModel.setAudioFocus(streamID: streamID) } } #endif } @ViewBuilder private func tileView(for stream: ActiveStream, frame: CGRect, position: Int) -> some View { let tile = MultiStreamTile( stream: stream, position: position, isPrimary: viewModel.isPrimaryStream(stream.id), isAudioFocused: viewModel.audioFocusStreamID == stream.id, isFocused: { #if os(tvOS) focusedStreamID == stream.id #else false #endif }(), showsPrimaryBadge: viewModel.multiViewLayoutMode == .spotlight && viewModel.isPrimaryStream(stream.id) && viewModel.activeStreams.count > 1, videoGravity: videoGravity, cornerRadius: cornerRadius, onSelect: { onSelect(stream) } ) #if os(tvOS) tile .frame(width: frame.width, height: frame.height) .position(x: frame.midX, y: frame.midY) .focused($focusedStreamID, equals: stream.id) #else tile .frame(width: frame.width, height: frame.height) .position(x: frame.midX, y: frame.midY) #endif } } private struct MultiStreamTile: View { let stream: ActiveStream let position: Int let isPrimary: Bool let isAudioFocused: Bool let isFocused: Bool let showsPrimaryBadge: Bool let videoGravity: AVLayerVideoGravity let cornerRadius: CGFloat let onSelect: () -> Void @Environment(GamesViewModel.self) private var viewModel @State private var player: AVPlayer? @State private var hasError = false @State private var startupPlaybackTask: Task? @State private var werkoutMonitorTask: Task? @State private var clipTimeLimitObserver: Any? @State private var isAdvancingClip = false @StateObject private var playbackDiagnostics = MultiStreamPlaybackDiagnostics() @State private var audioDiagnostics: AudioDiagnostics? private static let maxClipDuration: Double = 15.0 private static var audioSessionConfigured = false var body: some View { ZStack { videoLayer LinearGradient( colors: [ .black.opacity(0.68), .black.opacity(0.08), .black.opacity(0.52), ], startPoint: .top, endPoint: .bottom ) VStack(spacing: 0) { HStack(alignment: .top, spacing: 10) { HStack(spacing: 10) { Text("\(position)") .font(.system(size: 12, weight: .bold, design: .rounded)) .foregroundStyle(.white.opacity(0.95)) .frame(width: 24, height: 24) .background(.black.opacity(0.58)) .clipShape(Circle()) Text(stream.label) .font(.system(size: 16, weight: .bold, design: .rounded)) .foregroundStyle(.white) .lineLimit(1) } .padding(.horizontal, 12) .padding(.vertical, 9) .background(.black.opacity(0.52)) .clipShape(RoundedRectangle(cornerRadius: 12)) Spacer() HStack(spacing: 8) { if showsPrimaryBadge { tileBadge(title: "MAIN", color: .blue) } if isAudioFocused { tileBadge(title: "AUDIO", color: .green) } else { tileBadge(title: "MUTED", color: .white.opacity(0.7)) } } } Spacer() HStack { Spacer() if let statusText = tileStatusText, !statusText.isEmpty { Text(statusText) .font(.system(size: 13, weight: .semibold, design: .rounded)) .foregroundStyle(.white.opacity(0.82)) .padding(.horizontal, 10) .padding(.vertical, 6) .background(.black.opacity(0.48)) .clipShape(Capsule()) } } } .padding(14) } .background(.black) .overlay(tileBorder) .clipShape(RoundedRectangle(cornerRadius: cornerRadius)) .contentShape(RoundedRectangle(cornerRadius: cornerRadius)) .scaleEffect(isFocused ? 1.025 : 1.0) .shadow( color: isFocused ? .blue.opacity(0.22) : isAudioFocused ? .green.opacity(0.22) : .black.opacity(0.28), radius: isFocused ? 26 : 20, y: 10 ) .animation(.easeOut(duration: 0.18), value: isFocused) .platformFocusable() .onAppear { logMultiView("tile appeared id=\(stream.id) label=\(stream.label)") } .onDisappear { logMultiView("tile disappeared id=\(stream.id) label=\(stream.label)") startupPlaybackTask?.cancel() startupPlaybackTask = nil werkoutMonitorTask?.cancel() werkoutMonitorTask = nil if let player { removeClipTimeLimit(from: player) player.pause() } player = nil playbackDiagnostics.clear(streamID: stream.id, reason: "tile disappeared") audioDiagnostics?.detach() audioDiagnostics = nil } #if os(tvOS) .focusEffectDisabled() .onPlayPauseCommand { if player?.rate == 0 { player?.play() } else { player?.pause() } } #endif .onTapGesture { onSelect() } .task(id: stream.id) { await startStream() } } @ViewBuilder private var videoLayer: some View { if let player { MultiStreamPlayerLayerView(player: player, streamID: stream.id, videoGravity: videoGravity) .onAppear { logMultiView("videoLayer showing player id=\(stream.id) rate=\(player.rate) muted=\(player.isMuted) gravity=\(videoGravity.rawValue)") } } else if hasError { Color.black.overlay { VStack(spacing: 10) { Image(systemName: "exclamationmark.triangle.fill") .font(.system(size: 28)) .foregroundStyle(.red.opacity(0.9)) Text("Stream unavailable") .font(.system(size: 15, weight: .semibold)) .foregroundStyle(.white.opacity(0.82)) } } } else { Color.black.overlay { VStack(spacing: 10) { ProgressView() Text("Loading feed") .font(.system(size: 15, weight: .semibold)) .foregroundStyle(.white.opacity(0.72)) } } } } private var tileBorder: some View { RoundedRectangle(cornerRadius: cornerRadius) .stroke( isAudioFocused ? .green.opacity(0.95) : isFocused ? .blue.opacity(0.9) : isPrimary ? .white.opacity(0.28) : .white.opacity(0.12), lineWidth: isAudioFocused || isFocused ? 3 : isPrimary ? 2 : 1 ) } private var tileStatusText: String? { if stream.game.isLive { return stream.game.currentInningDisplay ?? stream.game.status.label } if stream.game.isFinal { return "Final" } if stream.game.status.isScheduled { return stream.game.startTime ?? stream.game.status.label } return stream.game.status.label } private func tileBadge(title: String, color: Color) -> some View { Text(title) .font(.system(size: 11, weight: .bold, design: .rounded)) .foregroundStyle(color.opacity(0.98)) .padding(.horizontal, 10) .padding(.vertical, 6) .background(.black.opacity(0.5)) .clipShape(Capsule()) } private var multiViewStartupResolution: String { viewModel.defaultResolution } private func startStream() async { logMultiView( "startStream begin id=\(stream.id) label=\(stream.label) hasInlinePlayer=\(player != nil) hasSharedPlayer=\(stream.player != nil) hasOverrideURL=\(stream.overrideURL != nil) hasOverrideHeaders=\(stream.overrideHeaders != nil)" ) if let player { playbackDiagnostics.attach( to: player, streamID: stream.id, label: stream.label, onPlaybackEnded: playbackEndedHandler(for: player) ) scheduleStartupPlaybackRecovery(for: player) installClipTimeLimit(on: player) attachAudioDiagnostics(to: player) logMultiView("startStream reused inline player id=\(stream.id) muted=\(player.isMuted)") return } if !Self.audioSessionConfigured { do { try AVAudioSession.sharedInstance().setCategory(.playback, mode: .default) try AVAudioSession.sharedInstance().setActive(true) Self.audioSessionConfigured = true logMultiView("startStream audio session configured id=\(stream.id) mode=default") } catch { logMultiView("startStream audio session failed id=\(stream.id) error=\(error.localizedDescription)") } } if let existingPlayer = stream.player { self.player = existingPlayer hasError = false playbackDiagnostics.attach( to: existingPlayer, streamID: stream.id, label: stream.label, onPlaybackEnded: playbackEndedHandler(for: existingPlayer) ) scheduleStartupPlaybackRecovery(for: existingPlayer) installClipTimeLimit(on: existingPlayer) attachAudioDiagnostics(to: existingPlayer) logMultiView("startStream reused shared player id=\(stream.id) muted=\(existingPlayer.isMuted)") return } let url: URL? if let overrideURL = stream.overrideURL { url = overrideURL logMultiView("startStream using override URL id=\(stream.id) url=\(overrideURL.absoluteString)") } else { url = await viewModel.resolveStreamURL( for: stream, resolutionOverride: multiViewStartupResolution, preserveServerResolutionWhenBest: false ) } guard let url else { hasError = true logMultiView("startStream failed id=\(stream.id) resolveStreamURL returned nil") return } let avPlayer = makePlayer(url: url, headers: stream.overrideHeaders) avPlayer.automaticallyWaitsToMinimizeStalling = false avPlayer.currentItem?.preferredForwardBufferDuration = 2 self.player = avPlayer // Set mute state BEFORE playback to prevent audio spikes viewModel.attachPlayer(avPlayer, to: stream.id) playbackDiagnostics.attach( to: avPlayer, streamID: stream.id, label: stream.label, onPlaybackEnded: playbackEndedHandler(for: avPlayer) ) scheduleStartupPlaybackRecovery(for: avPlayer) attachAudioDiagnostics(to: avPlayer) logMultiView("startStream attached player id=\(stream.id) muted=\(avPlayer.isMuted) startupResolution=\(multiViewStartupResolution) fastStart=true calling playImmediately(atRate: 1.0)") avPlayer.playImmediately(atRate: 1.0) installClipTimeLimit(on: avPlayer) } private func makePlayer(url: URL, headers: [String: String]?) -> AVPlayer { let headers = headers ?? [:] logMultiView( "startStream creating AVPlayer id=\(stream.id) url=\(url.absoluteString) headerKeys=\(multiViewHeaderKeysDescription(headers))" ) let item = makePlayerItem(url: url, headers: headers) let player = AVPlayer(playerItem: item) player.appliesMediaSelectionCriteriaAutomatically = false logMultiView("startStream configured AVPlayer id=\(stream.id) appliesMediaSelectionCriteriaAutomatically=false") return player } private func makePlayerItem(url: URL, headers: [String: String]) -> AVPlayerItem { let item: AVPlayerItem if headers.isEmpty { item = AVPlayerItem(url: url) } else { let assetOptions: [String: Any] = [ "AVURLAssetHTTPHeaderFieldsKey": headers, ] let asset = AVURLAsset(url: url, options: assetOptions) item = AVPlayerItem(asset: asset) } item.allowedAudioSpatializationFormats = [] logMultiView("startStream configured player item id=\(stream.id) allowedAudioSpatializationFormats=[]") pinAudioSelection(on: item) return item } /// Pin the HLS audio rendition so ABR can't swap channel layouts mid-stream. private func pinAudioSelection(on item: AVPlayerItem) { let streamID = stream.id Task { @MainActor in await enforcePinnedMultiStreamAudioSelection(on: item, streamID: streamID) } } private func scheduleStartupPlaybackRecovery(for player: AVPlayer) { startupPlaybackTask?.cancel() let streamID = stream.id let label = stream.label startupPlaybackTask = Task { @MainActor in let retryDelays: [Double] = [0.35, 1.0, 2.0, 4.0] for delay in retryDelays { try? await Task.sleep(for: .seconds(delay)) guard !Task.isCancelled else { return } guard let currentPlayer = self.player, currentPlayer === player else { logMultiView("startupRecovery abort id=\(streamID) label=\(label) reason=player-changed") return } let itemStatus = multiViewItemStatusDescription(player.currentItem?.status ?? .unknown) let likelyToKeepUp = player.currentItem?.isPlaybackLikelyToKeepUp ?? false let bufferEmpty = player.currentItem?.isPlaybackBufferEmpty ?? false let timeControl = multiViewTimeControlDescription(player.timeControlStatus) let startupSatisfied = player.rate > 0 && (itemStatus == "readyToPlay" || likelyToKeepUp) logMultiView( "startupRecovery check id=\(streamID) delay=\(delay)s rate=\(player.rate) timeControl=\(timeControl) itemStatus=\(itemStatus) likelyToKeepUp=\(likelyToKeepUp) bufferEmpty=\(bufferEmpty)" ) if startupSatisfied { logMultiView("startupRecovery satisfied id=\(streamID) delay=\(delay)s") return } if player.rate == 0 { logMultiView("startupRecovery replay id=\(streamID) delay=\(delay)s") player.playImmediately(atRate: 1.0) } } } } private func currentStreamURL(for player: AVPlayer) -> URL? { (player.currentItem?.asset as? AVURLAsset)?.url } private func attachAudioDiagnostics(to player: AVPlayer) { if audioDiagnostics == nil { audioDiagnostics = AudioDiagnostics(tag: "multi:\(stream.label)") } audioDiagnostics?.attach(to: player) } private func installClipTimeLimit(on player: AVPlayer) { removeClipTimeLimit(from: player) guard stream.id == SpecialPlaybackChannelConfig.werkoutNSFWStreamID else { return } let limit = CMTime(seconds: Self.maxClipDuration, preferredTimescale: 600) logMultiView("installClipTimeLimit id=\(stream.id) limit=\(Self.maxClipDuration)s") clipTimeLimitObserver = player.addBoundaryTimeObserver( forTimes: [NSValue(time: limit)], queue: .main ) { [weak player] in guard let player else { logMultiView("clipTimeLimit STOPPED id=\(stream.id) reason=player-deallocated") return } let currentTime = CMTimeGetSeconds(player.currentTime()) logMultiView("clipTimeLimit fired id=\(stream.id) currentTime=\(String(format: "%.1f", currentTime))s rate=\(player.rate) — advancing") Task { @MainActor in await playNextWerkoutClip(on: player) } } } private func removeClipTimeLimit(from player: AVPlayer) { if let observer = clipTimeLimitObserver { player.removeTimeObserver(observer) clipTimeLimitObserver = nil } } private func playbackEndedHandler(for player: AVPlayer) -> (@MainActor @Sendable () async -> Void)? { guard stream.id == SpecialPlaybackChannelConfig.werkoutNSFWStreamID else { return nil } return { let currentTime = CMTimeGetSeconds(player.currentTime()) logMultiView("playbackEnded (didPlayToEnd) id=\(stream.id) currentTime=\(String(format: "%.1f", currentTime))s rate=\(player.rate)") await playNextWerkoutClip(on: player) } } private func playNextWerkoutClip(on player: AVPlayer) async { guard !isAdvancingClip else { logMultiView("playNextWerkoutClip SKIPPED id=\(stream.id) reason=already-advancing") return } isAdvancingClip = true defer { isAdvancingClip = false } let currentURL = currentStreamURL(for: player) let playerRate = player.rate let playerStatus = player.status.rawValue let itemStatus = player.currentItem?.status.rawValue ?? -1 let timeControl = player.timeControlStatus.rawValue logMultiView( "playNextWerkoutClip begin id=\(stream.id) currentURL=\(currentURL?.absoluteString ?? "nil") playerRate=\(playerRate) playerStatus=\(playerStatus) itemStatus=\(itemStatus) timeControl=\(timeControl)" ) let resolveStart = Date() guard let nextURL = await viewModel.resolveNextAuthenticatedFeedURLForActiveStream( id: stream.id, feedURL: SpecialPlaybackChannelConfig.werkoutNSFWFeedURL, headers: SpecialPlaybackChannelConfig.werkoutNSFWHeaders, maxRetries: 3 ) else { let elapsedMs = Int(Date().timeIntervalSince(resolveStart) * 1000) logMultiView("playNextWerkoutClip STOPPED id=\(stream.id) reason=resolve-nil-after-retries elapsedMs=\(elapsedMs)") return } let resolveMs = Int(Date().timeIntervalSince(resolveStart) * 1000) logMultiView("playNextWerkoutClip resolved id=\(stream.id) resolveMs=\(resolveMs) nextURL=\(nextURL.lastPathComponent)") let nextItem = makePlayerItem( url: nextURL, headers: stream.overrideHeaders ?? SpecialPlaybackChannelConfig.werkoutNSFWHeaders ) nextItem.preferredForwardBufferDuration = 2 player.replaceCurrentItem(with: nextItem) player.automaticallyWaitsToMinimizeStalling = false playbackDiagnostics.attach( to: player, streamID: stream.id, label: stream.label, onPlaybackEnded: playbackEndedHandler(for: player) ) scheduleStartupPlaybackRecovery(for: player) logMultiView("playNextWerkoutClip replay id=\(stream.id) url=\(nextURL.lastPathComponent)") player.playImmediately(atRate: 1.0) installClipTimeLimit(on: player) // Monitor for failure and auto-skip to next clip werkoutMonitorTask?.cancel() werkoutMonitorTask = Task { @MainActor in for checkDelay in [1.0, 3.0] { try? await Task.sleep(for: .seconds(checkDelay)) guard !Task.isCancelled else { return } let postItemStatus = player.currentItem?.status let error = player.currentItem?.error?.localizedDescription ?? "nil" logMultiView( "playNextWerkoutClip postCheck id=\(stream.id) delay=\(checkDelay)s rate=\(player.rate) itemStatus=\(postItemStatus?.rawValue ?? -1) error=\(error)" ) if postItemStatus == .failed { logMultiView("playNextWerkoutClip AUTO-SKIP id=\(stream.id) reason=item-failed error=\(error)") await playNextWerkoutClip(on: player) return } } } } } @MainActor private func enforcePinnedMultiStreamAudioSelection(on item: AVPlayerItem, streamID: String) async { let asset = item.asset guard let group = try? await asset.loadMediaSelectionGroup(for: .audible), let option = preferredMultiStreamAudioOption(in: group) else { return } let current = item.currentMediaSelection.selectedMediaOption(in: group) if current != option { item.select(option, in: group) } logMultiView( "pinAudioSelection id=\(streamID) selected=\(option.displayName) current=\(current?.displayName ?? "nil") options=\(group.options.count)" ) } private func preferredMultiStreamAudioOption(in group: AVMediaSelectionGroup) -> AVMediaSelectionOption? { let defaultOption = group.defaultOption return group.options.max { lhs, rhs in multiStreamAudioPreferenceScore(for: lhs, defaultOption: defaultOption) < multiStreamAudioPreferenceScore(for: rhs, defaultOption: defaultOption) } ?? defaultOption ?? group.options.first } private func multiStreamAudioPreferenceScore(for option: AVMediaSelectionOption, defaultOption: AVMediaSelectionOption?) -> Int { let name = option.displayName.lowercased() var score = 0 if option == defaultOption { score += 40 } if name.contains("stereo") || name.contains("2.0") || name.contains("main") { score += 30 } if name.contains("english") || name.contains("eng") { score += 20 } if name.contains("surround") || name.contains("5.1") || name.contains("atmos") { score -= 30 } if name.contains("spanish") || name.contains("sap") || name.contains("descriptive") || name.contains("alternate") { score -= 25 } if option.hasMediaCharacteristic(.describesVideoForAccessibility) { score -= 40 } return score } private struct MultiStreamPlayerLayerView: UIViewRepresentable { let player: AVPlayer let streamID: String let videoGravity: AVLayerVideoGravity func makeUIView(context: Context) -> MultiStreamPlayerLayerContainerView { let view = MultiStreamPlayerLayerContainerView(videoGravity: videoGravity) view.playerLayer.player = player logMultiView("playerLayer makeUIView id=\(streamID) rate=\(player.rate) gravity=\(videoGravity.rawValue)") return view } func updateUIView(_ uiView: MultiStreamPlayerLayerContainerView, context: Context) { if uiView.playerLayer.player !== player { uiView.playerLayer.player = player logMultiView("playerLayer updateUIView reassigned player id=\(streamID) rate=\(player.rate)") } if uiView.playerLayer.videoGravity != videoGravity { uiView.playerLayer.videoGravity = videoGravity logMultiView("playerLayer updateUIView changed gravity id=\(streamID) gravity=\(videoGravity.rawValue)") } } static func dismantleUIView(_ uiView: MultiStreamPlayerLayerContainerView, coordinator: ()) { uiView.playerLayer.player = nil } } private final class MultiStreamPlayerLayerContainerView: UIView { override class var layerClass: AnyClass { AVPlayerLayer.self } var playerLayer: AVPlayerLayer { layer as! AVPlayerLayer } init(videoGravity: AVLayerVideoGravity) { super.init(frame: .zero) backgroundColor = .black playerLayer.videoGravity = videoGravity } override init(frame: CGRect) { super.init(frame: frame) backgroundColor = .black playerLayer.videoGravity = .resizeAspectFill } @available(*, unavailable) required init?(coder: NSCoder) { fatalError("init(coder:) has not been implemented") } } private final class MultiStreamPlaybackDiagnostics: ObservableObject { private var playerObservations: [NSKeyValueObservation] = [] private var notificationTokens: [NSObjectProtocol] = [] private var attachedPlayerIdentifier: ObjectIdentifier? private var attachedItemIdentifier: ObjectIdentifier? func attach( to player: AVPlayer, streamID: String, label: String, onPlaybackEnded: (@MainActor @Sendable () async -> Void)? = nil ) { let playerIdentifier = ObjectIdentifier(player) let itemIdentifier = player.currentItem.map { ObjectIdentifier($0) } if attachedPlayerIdentifier == playerIdentifier, attachedItemIdentifier == itemIdentifier { return } clear(streamID: streamID, reason: "reattach") attachedPlayerIdentifier = playerIdentifier attachedItemIdentifier = itemIdentifier logMultiView("diagnostics attach id=\(streamID) label=\(label) playerRate=\(player.rate)") playerObservations.append( player.observe(\.status, options: [.initial, .new]) { player, _ in logMultiView("player status id=\(streamID) status=\(multiViewStatusDescription(player.status)) error=\(player.error?.localizedDescription ?? "nil")") } ) playerObservations.append( player.observe(\.timeControlStatus, options: [.initial, .new]) { player, _ in let reason = player.reasonForWaitingToPlay?.rawValue ?? "nil" logMultiView("player timeControl id=\(streamID) status=\(multiViewTimeControlDescription(player.timeControlStatus)) reason=\(reason) rate=\(player.rate)") } ) playerObservations.append( player.observe(\.reasonForWaitingToPlay, options: [.initial, .new]) { player, _ in logMultiView("player waitingReason id=\(streamID) value=\(player.reasonForWaitingToPlay?.rawValue ?? "nil")") } ) guard let item = player.currentItem else { logMultiView("diagnostics attach id=\(streamID) missing currentItem") return } playerObservations.append( item.observe(\.status, options: [.initial, .new]) { item, _ in logMultiView("playerItem status id=\(streamID) status=\(multiViewItemStatusDescription(item.status)) error=\(item.error?.localizedDescription ?? "nil")") } ) playerObservations.append( item.observe(\.isPlaybackBufferEmpty, options: [.initial, .new]) { item, _ in logMultiView("playerItem bufferEmpty id=\(streamID) value=\(item.isPlaybackBufferEmpty)") } ) playerObservations.append( item.observe(\.isPlaybackLikelyToKeepUp, options: [.initial, .new]) { item, _ in logMultiView("playerItem likelyToKeepUp id=\(streamID) value=\(item.isPlaybackLikelyToKeepUp)") } ) playerObservations.append( item.observe(\.isPlaybackBufferFull, options: [.initial, .new]) { item, _ in logMultiView("playerItem bufferFull id=\(streamID) value=\(item.isPlaybackBufferFull)") } ) notificationTokens.append( NotificationCenter.default.addObserver( forName: .AVPlayerItemPlaybackStalled, object: item, queue: .main ) { _ in logMultiView("playerItem stalled id=\(streamID)") } ) notificationTokens.append( NotificationCenter.default.addObserver( forName: .AVPlayerItemFailedToPlayToEndTime, object: item, queue: .main ) { notification in let error = notification.userInfo?[AVPlayerItemFailedToPlayToEndTimeErrorKey] as? NSError logMultiView("playerItem failedToEnd id=\(streamID) error=\(error?.localizedDescription ?? "nil")") } ) notificationTokens.append( NotificationCenter.default.addObserver( forName: .AVPlayerItemDidPlayToEndTime, object: item, queue: .main ) { _ in logMultiView("playerItem didPlayToEnd id=\(streamID)") guard let onPlaybackEnded else { return } Task { @MainActor in await onPlaybackEnded() } } ) notificationTokens.append( NotificationCenter.default.addObserver( forName: .AVPlayerItemNewErrorLogEntry, object: item, queue: .main ) { _ in let event = item.errorLog()?.events.last logMultiView("playerItem errorLog id=\(streamID) domain=\(event?.errorDomain ?? "nil") statusCode=\(event?.errorStatusCode ?? 0) comment=\(event?.errorComment ?? "nil")") } ) notificationTokens.append( NotificationCenter.default.addObserver( forName: .AVPlayerItemNewAccessLogEntry, object: item, queue: .main ) { _ in if let event = item.accessLog()?.events.last { logMultiView("playerItem accessLog id=\(streamID) indicatedBitrate=\(Int(event.indicatedBitrate)) observedBitrate=\(Int(event.observedBitrate)) transferDuration=\(event.transferDuration)") } else { logMultiView("playerItem accessLog id=\(streamID) missing event") } } ) } func clear(streamID: String, reason: String) { if !playerObservations.isEmpty || !notificationTokens.isEmpty { logMultiView("diagnostics clear id=\(streamID) reason=\(reason)") } playerObservations.removeAll() for token in notificationTokens { NotificationCenter.default.removeObserver(token) } notificationTokens.removeAll() attachedPlayerIdentifier = nil attachedItemIdentifier = nil } deinit { playerObservations.removeAll() for token in notificationTokens { NotificationCenter.default.removeObserver(token) } notificationTokens.removeAll() } } private struct MultiViewLayoutPicker: View { @Environment(GamesViewModel.self) private var viewModel let compact: Bool var body: some View { HStack(spacing: compact ? 8 : 10) { ForEach(MultiViewLayoutMode.allCases) { mode in Button { viewModel.multiViewLayoutMode = mode } label: { HStack(spacing: 8) { Image(systemName: mode.systemImage) .font(.system(size: compact ? 11 : 12, weight: .bold)) Text(mode.title) .font(.system(size: compact ? 13 : 14, weight: .semibold)) } .foregroundStyle(viewModel.multiViewLayoutMode == mode ? .white : .white.opacity(0.62)) .padding(.horizontal, compact ? 12 : 14) .padding(.vertical, compact ? 9 : 10) .background( RoundedRectangle(cornerRadius: 999) .fill(viewModel.multiViewLayoutMode == mode ? .blue.opacity(0.28) : .white.opacity(0.06)) ) .overlay( RoundedRectangle(cornerRadius: 999) .stroke(viewModel.multiViewLayoutMode == mode ? .blue.opacity(0.9) : .white.opacity(0.12), lineWidth: 1) ) } .platformCardStyle() } } } } struct StreamControlSheet: View { let streamID: String let onRemove: () -> Void @Environment(GamesViewModel.self) private var viewModel @Environment(\.dismiss) private var dismiss private var stream: ActiveStream? { viewModel.activeStreams.first { $0.id == streamID } } private var isPrimary: Bool { viewModel.isPrimaryStream(streamID) } private var isAudioFocused: Bool { viewModel.audioFocusStreamID == streamID } private var forceMuteAudio: Bool { stream?.forceMuteAudio == true } var body: some View { ZStack { LinearGradient( colors: [ Color(red: 0.05, green: 0.06, blue: 0.1), Color(red: 0.03, green: 0.04, blue: 0.08), ], startPoint: .topLeading, endPoint: .bottomTrailing ) .ignoresSafeArea() if let stream { VStack(alignment: .leading, spacing: 26) { VStack(alignment: .leading, spacing: 10) { Text(stream.label) .font(.system(size: 30, weight: .bold, design: .rounded)) .foregroundStyle(.white) Text(stream.game.displayTitle) .font(.system(size: 17, weight: .medium)) .foregroundStyle(.white.opacity(0.6)) HStack(spacing: 10) { controlBadge(title: isPrimary ? "Primary Tile" : "Secondary Tile", tint: .blue) controlBadge( title: forceMuteAudio ? "Video Only" : (isAudioFocused ? "Live Audio" : "Muted"), tint: forceMuteAudio ? .orange : (isAudioFocused ? .green : .white) ) } } VStack(spacing: 14) { HStack(spacing: 14) { actionCard( title: forceMuteAudio ? "Audio Disabled" : (isAudioFocused ? "Mute All" : "Listen Here"), subtitle: forceMuteAudio ? "This channel is always muted." : (isAudioFocused ? "Silence the multiview mix." : "Route game audio to this tile."), icon: forceMuteAudio ? "speaker.slash.circle.fill" : (isAudioFocused ? "speaker.slash.fill" : "speaker.wave.2.fill"), tint: forceMuteAudio ? .orange : (isAudioFocused ? .white : .green), disabled: forceMuteAudio ) { viewModel.toggleAudioFocus(streamID: streamID) } actionCard( title: isPrimary ? "Already Main" : "Make Main Tile", subtitle: "Spotlight layout always favors the first stream.", icon: "arrow.up.left.and.arrow.down.right", tint: .blue, disabled: isPrimary ) { viewModel.promoteStream(id: streamID) } } HStack(spacing: 14) { actionCard( title: "Move Earlier", subtitle: "Shift this feed toward the front of the order.", icon: "arrow.left", tint: .white, disabled: !viewModel.canMoveStream(id: streamID, direction: -1) ) { viewModel.moveStream(id: streamID, direction: -1) } actionCard( title: "Move Later", subtitle: "Shift this feed deeper into the stack.", icon: "arrow.right", tint: .white, disabled: !viewModel.canMoveStream(id: streamID, direction: 1) ) { viewModel.moveStream(id: streamID, direction: 1) } } actionCard( title: "Remove from Multi-View", subtitle: "Close this feed and free one slot immediately.", icon: "trash.fill", tint: .red, disabled: false, destructive: true, wide: true ) { onRemove() } } Spacer() } .padding(44) } else { VStack(spacing: 18) { Text("Stream Removed") .font(.system(size: 28, weight: .bold, design: .rounded)) .foregroundStyle(.white) Text("This feed is no longer active.") .font(.system(size: 17, weight: .medium)) .foregroundStyle(.white.opacity(0.6)) } .padding(40) } } } private func controlBadge(title: String, tint: Color) -> some View { Text(title) .font(.system(size: 13, weight: .semibold)) .foregroundStyle(tint.opacity(0.95)) .padding(.horizontal, 12) .padding(.vertical, 8) .background(.white.opacity(0.08)) .clipShape(Capsule()) } private func actionCard( title: String, subtitle: String, icon: String, tint: Color, disabled: Bool, destructive: Bool = false, wide: Bool = false, action: @escaping () -> Void ) -> some View { Button { action() dismiss() } label: { VStack(alignment: .leading, spacing: 14) { Image(systemName: icon) .font(.system(size: 22, weight: .bold)) .foregroundStyle(disabled ? .white.opacity(0.28) : tint.opacity(0.95)) VStack(alignment: .leading, spacing: 6) { Text(title) .font(.system(size: 20, weight: .bold, design: .rounded)) .foregroundStyle(disabled ? .white.opacity(0.32) : .white) Text(subtitle) .font(.system(size: 14, weight: .medium)) .foregroundStyle(.white.opacity(disabled ? 0.2 : 0.5)) } } .frame(maxWidth: .infinity, minHeight: wide ? 108 : 168, alignment: .leading) .padding(22) .background( RoundedRectangle(cornerRadius: 22) .fill(destructive ? .red.opacity(0.12) : .white.opacity(0.06)) ) .overlay( RoundedRectangle(cornerRadius: 22) .stroke(disabled ? .white.opacity(0.08) : tint.opacity(0.28), lineWidth: 1) ) } .disabled(disabled) .platformCardStyle() } } struct MultiStreamFullScreenView: View { @Environment(GamesViewModel.self) private var viewModel @Environment(\.dismiss) private var dismiss @State private var selectedStream: StreamSelection? var body: some View { ZStack(alignment: .bottom) { Color.black.ignoresSafeArea() MultiViewCanvas( contentInsets: 10, gap: 10, videoGravity: .resizeAspect, cornerRadius: 12, onSelect: { stream in selectedStream = StreamSelection(id: stream.id) } ) .ignoresSafeArea() ScoresTickerView() .allowsHitTesting(false) .padding(.horizontal, 18) .padding(.bottom, 12) } .overlay(alignment: .topTrailing) { #if os(iOS) Button { dismiss() } label: { Image(systemName: "xmark.circle.fill") .font(.system(size: 28, weight: .bold)) .foregroundStyle(.white.opacity(0.9)) .padding(20) } #endif } #if os(tvOS) .onExitCommand { dismiss() } #endif .onChange(of: viewModel.activeStreams.count) { _, count in if count == 0 { dismiss() } } .sheet(item: $selectedStream) { selection in StreamControlSheet( streamID: selection.id, onRemove: { viewModel.removeStream(id: selection.id) selectedStream = nil if viewModel.activeStreams.isEmpty { dismiss() } } ) } .onAppear { viewModel.startAutoRefresh() } } } private func multiViewFrames( count: Int, mode: MultiViewLayoutMode, size: CGSize, inset: CGFloat, gap: CGFloat ) -> [CGRect] { let width = max(size.width - (inset * 2), 0) let height = max(size.height - (inset * 2), 0) switch (mode, count) { case (_, 0): return [] case (_, 1): return [CGRect(x: inset, y: inset, width: width, height: height)] case (.spotlight, 2): let primaryWidth = width * 0.66 let secondaryWidth = width - primaryWidth - gap return [ CGRect(x: inset, y: inset, width: primaryWidth, height: height), CGRect(x: inset + primaryWidth + gap, y: inset, width: secondaryWidth, height: height), ] case (.spotlight, 3): let primaryWidth = width * 0.66 let railWidth = width - primaryWidth - gap let railHeight = (height - gap) / 2 return [ CGRect(x: inset, y: inset, width: primaryWidth, height: height), CGRect(x: inset + primaryWidth + gap, y: inset, width: railWidth, height: railHeight), CGRect(x: inset + primaryWidth + gap, y: inset + railHeight + gap, width: railWidth, height: railHeight), ] case (.spotlight, _): let primaryWidth = width * 0.68 let railWidth = width - primaryWidth - gap let railHeight = (height - (gap * 2)) / 3 return [ CGRect(x: inset, y: inset, width: primaryWidth, height: height), CGRect(x: inset + primaryWidth + gap, y: inset, width: railWidth, height: railHeight), CGRect(x: inset + primaryWidth + gap, y: inset + railHeight + gap, width: railWidth, height: railHeight), CGRect(x: inset + primaryWidth + gap, y: inset + (railHeight * 2) + (gap * 2), width: railWidth, height: railHeight), ] case (.balanced, 2): let cellWidth = (width - gap) / 2 return [ CGRect(x: inset, y: inset, width: cellWidth, height: height), CGRect(x: inset + cellWidth + gap, y: inset, width: cellWidth, height: height), ] case (.balanced, 3): let cellWidth = (width - gap) / 2 let cellHeight = (height - gap) / 2 return [ CGRect(x: inset, y: inset, width: cellWidth, height: cellHeight), CGRect(x: inset + cellWidth + gap, y: inset, width: cellWidth, height: cellHeight), CGRect(x: inset, y: inset + cellHeight + gap, width: width, height: cellHeight), ] default: let cellWidth = (width - gap) / 2 let cellHeight = (height - gap) / 2 return [ CGRect(x: inset, y: inset, width: cellWidth, height: cellHeight), CGRect(x: inset + cellWidth + gap, y: inset, width: cellWidth, height: cellHeight), CGRect(x: inset, y: inset + cellHeight + gap, width: cellWidth, height: cellHeight), CGRect(x: inset + cellWidth + gap, y: inset + cellHeight + gap, width: cellWidth, height: cellHeight), ] } } private struct MultiViewFocusEntry { let streamID: String let frame: CGRect } #if os(tvOS) private func nextMultiViewFocusID( from currentID: String?, direction: MoveCommandDirection, entries: [MultiViewFocusEntry] ) -> String? { guard !entries.isEmpty else { return nil } guard let currentID, let currentEntry = entries.first(where: { $0.streamID == currentID }) else { return entries.first?.streamID } let candidates: [(entry: MultiViewFocusEntry, primaryDistance: CGFloat, secondaryDistance: CGFloat)] = entries.compactMap { candidate in guard candidate.streamID != currentEntry.streamID else { return nil } let xDelta = candidate.frame.midX - currentEntry.frame.midX let yDelta = candidate.frame.midY - currentEntry.frame.midY switch direction { case .left: guard xDelta < 0 else { return nil } return (candidate, abs(xDelta), abs(yDelta)) case .right: guard xDelta > 0 else { return nil } return (candidate, abs(xDelta), abs(yDelta)) case .up: guard yDelta < 0 else { return nil } return (candidate, abs(yDelta), abs(xDelta)) case .down: guard yDelta > 0 else { return nil } return (candidate, abs(yDelta), abs(xDelta)) default: return nil } } return candidates .sorted { lhs, rhs in if abs(lhs.primaryDistance - rhs.primaryDistance) > 1 { return lhs.primaryDistance < rhs.primaryDistance } return lhs.secondaryDistance < rhs.secondaryDistance } .first? .entry .streamID } #endif // MARK: - AudioDiagnostics // // Audio-specific diagnostic logging. Attach one `AudioDiagnostics` per AVPlayer // you want to track. Emits a `[AUDIO]`-prefixed 1 Hz heartbeat with rate, // mute, time, bitrate, and route info — plus immediate logs on route changes, // interruptions, access-log events, and media-selection changes. // // Grep Xcode console with `[AUDIO]` to isolate these lines. @MainActor final class AudioDiagnostics { private let tag: String private weak var player: AVPlayer? private var heartbeatTask: Task? private var observations: [NSKeyValueObservation] = [] private var tokens: [NSObjectProtocol] = [] private static var processWideInstalled = false init(tag: String) { self.tag = tag AudioDiagnostics.installProcessWideObservers() } deinit { heartbeatTask?.cancel() } func attach(to player: AVPlayer) { detach() self.player = player log("attach rate=\(player.rate) isMuted=\(player.isMuted) volume=\(player.volume)") observations.append( player.observe(\.rate, options: [.new]) { [weak self] p, _ in Task { @MainActor in self?.log("rate-change rate=\(p.rate) tc=\(p.timeControlStatus.rawValue)") } } ) observations.append( player.observe(\.isMuted, options: [.new]) { [weak self] p, _ in Task { @MainActor in self?.log("isMuted-change value=\(p.isMuted)") } } ) observations.append( player.observe(\.volume, options: [.new]) { [weak self] p, _ in Task { @MainActor in self?.log("volume-change value=\(p.volume)") } } ) if let item = player.currentItem { attachItemObservers(item) } observations.append( player.observe(\.currentItem, options: [.new]) { [weak self] p, _ in Task { @MainActor in guard let self else { return } self.log("currentItem-change newItem=\(p.currentItem != nil)") if let item = p.currentItem { self.attachItemObservers(item) } } } ) startHeartbeat() } func detach() { heartbeatTask?.cancel() heartbeatTask = nil observations.removeAll() for t in tokens { NotificationCenter.default.removeObserver(t) } tokens.removeAll() player = nil } private func attachItemObservers(_ item: AVPlayerItem) { tokens.append( NotificationCenter.default.addObserver( forName: .AVPlayerItemNewAccessLogEntry, object: item, queue: .main ) { [weak self, weak item] _ in guard let self, let event = item?.accessLog()?.events.last else { return } Task { @MainActor in self.log( "accessLog indicated=\(Int(event.indicatedBitrate)) observed=\(Int(event.observedBitrate)) switches=\(event.numberOfMediaRequests) stalls=\(event.numberOfStalls) avgVideo=\(Int(event.averageVideoBitrate)) avgAudio=\(Int(event.averageAudioBitrate))" ) } } ) tokens.append( NotificationCenter.default.addObserver( forName: .AVPlayerItemNewErrorLogEntry, object: item, queue: .main ) { [weak self, weak item] _ in guard let self, let event = item?.errorLog()?.events.last else { return } Task { @MainActor in self.log("errorLog domain=\(event.errorDomain) statusCode=\(event.errorStatusCode) comment=\(event.errorComment ?? "nil")") } } ) tokens.append( NotificationCenter.default.addObserver( forName: AVPlayerItem.mediaSelectionDidChangeNotification, object: item, queue: .main ) { [weak self, weak item] _ in guard let self, let item else { return } Task { @MainActor in await enforcePinnedMultiStreamAudioSelection(on: item, streamID: self.tag) let asset = item.asset guard let group = try? await asset.loadMediaSelectionGroup(for: .audible), let selected = item.currentMediaSelection.selectedMediaOption(in: group) else { self.log("mediaSelection-change selected=nil") return } let codec = (selected.mediaSubTypes as [NSNumber]).map { audioDiagFourCC($0.uint32Value) }.joined(separator: ",") self.log("mediaSelection-change audio=\"\(selected.displayName)\" codec=\(codec)") } } ) } private func startHeartbeat() { heartbeatTask?.cancel() heartbeatTask = Task { @MainActor [weak self] in while !Task.isCancelled { self?.emitHeartbeat() try? await Task.sleep(for: .seconds(1)) } } } private func emitHeartbeat() { guard let player, let item = player.currentItem else { return } let currentTime = CMTimeGetSeconds(player.currentTime()) let event = item.accessLog()?.events.last let indicated = event.map { Int($0.indicatedBitrate) } ?? 0 let observed = event.map { Int($0.observedBitrate) } ?? 0 log( "hb t=\(String(format: "%.1f", currentTime))s rate=\(player.rate) tc=\(player.timeControlStatus.rawValue) muted=\(player.isMuted) vol=\(String(format: "%.2f", player.volume)) indicated=\(indicated) observed=\(observed) ltku=\(item.isPlaybackLikelyToKeepUp) route=\(AudioDiagnostics.currentRouteDescription())" ) } private func log(_ message: String) { let ts = AudioDiagnostics.timestamp() print("[AUDIO \(tag) \(ts)] \(message)") } // MARK: Process-wide private static func installProcessWideObservers() { guard !processWideInstalled else { return } processWideInstalled = true let session = AVAudioSession.sharedInstance() print("[AUDIO SYSTEM \(timestamp())] initial category=\(session.category.rawValue) mode=\(session.mode.rawValue) sampleRate=\(session.sampleRate) route=\(currentRouteDescription())") NotificationCenter.default.addObserver( forName: AVAudioSession.routeChangeNotification, object: nil, queue: .main ) { notification in let reasonValue = notification.userInfo?[AVAudioSessionRouteChangeReasonKey] as? UInt ?? 0 let reason = AVAudioSession.RouteChangeReason(rawValue: reasonValue).map(audioDiagReasonDescription) ?? "unknown(\(reasonValue))" let prev = (notification.userInfo?[AVAudioSessionRouteChangePreviousRouteKey] as? AVAudioSessionRouteDescription).map(routeDescription) ?? "nil" print("[AUDIO SYSTEM \(timestamp())] routeChange reason=\(reason) previous=\(prev) current=\(currentRouteDescription())") } NotificationCenter.default.addObserver( forName: AVAudioSession.interruptionNotification, object: nil, queue: .main ) { notification in let typeValue = notification.userInfo?[AVAudioSessionInterruptionTypeKey] as? UInt ?? 0 let type = AVAudioSession.InterruptionType(rawValue: typeValue).map(audioDiagInterruptionDescription) ?? "unknown(\(typeValue))" print("[AUDIO SYSTEM \(timestamp())] interruption type=\(type)") } } nonisolated static func currentRouteDescription() -> String { routeDescription(AVAudioSession.sharedInstance().currentRoute) } nonisolated static func routeDescription(_ route: AVAudioSessionRouteDescription) -> String { let outs = route.outputs.map { "\($0.portType.rawValue):\($0.portName)" }.joined(separator: ",") return outs.isEmpty ? "none" : outs } nonisolated static func timestamp() -> String { audioDiagTSFormatter.string(from: Date()) } } private let audioDiagTSFormatter: DateFormatter = { let f = DateFormatter() f.dateFormat = "HH:mm:ss.SSS" return f }() private func audioDiagFourCC(_ raw: UInt32) -> String { let bytes: [UInt8] = [ UInt8((raw >> 24) & 0xFF), UInt8((raw >> 16) & 0xFF), UInt8((raw >> 8) & 0xFF), UInt8(raw & 0xFF), ] let chars = bytes.map { b -> Character in let scalar = UnicodeScalar(b) return (0x20...0x7E).contains(b) ? Character(scalar) : "?" } return String(chars) } private func audioDiagReasonDescription(_ reason: AVAudioSession.RouteChangeReason) -> String { switch reason { case .unknown: "unknown" case .newDeviceAvailable: "newDeviceAvailable" case .oldDeviceUnavailable: "oldDeviceUnavailable" case .categoryChange: "categoryChange" case .override: "override" case .wakeFromSleep: "wakeFromSleep" case .noSuitableRouteForCategory: "noSuitableRouteForCategory" case .routeConfigurationChange: "routeConfigurationChange" @unknown default: "unknown-future" } } private func audioDiagInterruptionDescription(_ type: AVAudioSession.InterruptionType) -> String { switch type { case .began: "began" case .ended: "ended" @unknown default: "unknown-future" } }