Compare commits

3 Commits

Author SHA1 Message Date
Trey t
da033cf12c Fix NSFW sheet scroll on iOS/iPad, clean up audio pin
- WerkoutNSFWSheet: wrap content in ScrollView + ViewThatFits(in: .horizontal)
  so iPad's narrow sheet width falls back to VStack and content scrolls.
- Tighten padding on compact layouts (38→24).
- Revert AAC-preference in pinAudioSelection (stream is all AAC, no Dolby).

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-04-16 19:30:25 -05:00
Trey t
08ad702f9d Add AudioDiagnostics heartbeat logging
Adds [AUDIO]-prefixed logs to both single-stream and multi-stream players:
1 Hz heartbeat with rate/timeControl/mute/volume/bitrate/route, plus
immediate events on rate, isMuted, volume, currentItem, media selection,
access-log, error-log, and system audio route/interruption changes.

Grep Xcode console for `[AUDIO]` or `[AUDIO SYSTEM]` to isolate.

Also reverts the AAC-preference in pinAudioSelection: the
ballgame.treytartt.com master playlist is already all mp4a.40.2 stereo,
so the Dolby-DRC theory doesn't fit. Pin simply selects the default
audible option now.

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-04-14 20:28:15 -05:00
Trey t
85a19fdd71 Fix mid-stream audio loudness jumps
Root cause: the quality upgrade path called replaceCurrentItem mid-stream,
which re-loaded the HLS master manifest and re-picked an audio rendition,
producing a perceived loudness jump 10-30s into playback. .moviePlayback
mode amplified this by re-initializing cinematic audio processing on each
variant change.

- Start streams directly at user's desiredResolution; remove
  scheduleQualityUpgrade, qualityUpgradeTask, and the 504p->best swap.
- Switch AVAudioSession mode from .moviePlayback to .default in both
  MultiStreamView and SingleStreamPlayerView.
- Pin the HLS audio rendition by selecting the default audible
  MediaSelectionGroup option on every new AVPlayerItem, preventing
  ABR from swapping channel layouts mid-stream.

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-04-14 20:04:39 -05:00
3 changed files with 424 additions and 117 deletions

View File

@@ -623,7 +623,8 @@ struct WerkoutNSFWSheet: View {
sheetBackground
.ignoresSafeArea()
ViewThatFits {
ScrollView(.vertical, showsIndicators: false) {
ViewThatFits(in: .horizontal) {
HStack(alignment: .top, spacing: 32) {
overviewColumn
.frame(maxWidth: .infinity, alignment: .leading)
@@ -638,7 +639,8 @@ struct WerkoutNSFWSheet: View {
.frame(maxWidth: .infinity, alignment: .leading)
}
}
.padding(38)
}
.padding(usesStackedLayout ? 24 : 38)
.background(
RoundedRectangle(cornerRadius: 34, style: .continuous)
.fill(.black.opacity(0.46))

View File

@@ -352,11 +352,11 @@ private struct MultiStreamTile: View {
@State private var player: AVPlayer?
@State private var hasError = false
@State private var startupPlaybackTask: Task<Void, Never>?
@State private var qualityUpgradeTask: Task<Void, Never>?
@State private var werkoutMonitorTask: Task<Void, Never>?
@State private var clipTimeLimitObserver: Any?
@State private var isAdvancingClip = false
@StateObject private var playbackDiagnostics = MultiStreamPlaybackDiagnostics()
@State private var audioDiagnostics: AudioDiagnostics?
private static let maxClipDuration: Double = 15.0
private static var audioSessionConfigured = false
@@ -446,8 +446,6 @@ private struct MultiStreamTile: View {
logMultiView("tile disappeared id=\(stream.id) label=\(stream.label)")
startupPlaybackTask?.cancel()
startupPlaybackTask = nil
qualityUpgradeTask?.cancel()
qualityUpgradeTask = nil
werkoutMonitorTask?.cancel()
werkoutMonitorTask = nil
if let player {
@@ -456,6 +454,8 @@ private struct MultiStreamTile: View {
}
player = nil
playbackDiagnostics.clear(streamID: stream.id, reason: "tile disappeared")
audioDiagnostics?.detach()
audioDiagnostics = nil
}
#if os(tvOS)
.focusEffectDisabled()
@@ -536,11 +536,8 @@ private struct MultiStreamTile: View {
.clipShape(Capsule())
}
private var multiViewStartupResolution: String { "504p" }
private var multiViewUpgradeTargetResolution: String? {
let desiredResolution = viewModel.defaultResolution
return desiredResolution == multiViewStartupResolution ? nil : desiredResolution
private var multiViewStartupResolution: String {
viewModel.defaultResolution
}
private func startStream() async {
@@ -556,18 +553,18 @@ private struct MultiStreamTile: View {
onPlaybackEnded: playbackEndedHandler(for: player)
)
scheduleStartupPlaybackRecovery(for: player)
scheduleQualityUpgrade(for: player)
installClipTimeLimit(on: player)
attachAudioDiagnostics(to: player)
logMultiView("startStream reused inline player id=\(stream.id) muted=\(player.isMuted)")
return
}
if !Self.audioSessionConfigured {
do {
try AVAudioSession.sharedInstance().setCategory(.playback, mode: .moviePlayback)
try AVAudioSession.sharedInstance().setCategory(.playback, mode: .default)
try AVAudioSession.sharedInstance().setActive(true)
Self.audioSessionConfigured = true
logMultiView("startStream audio session configured id=\(stream.id)")
logMultiView("startStream audio session configured id=\(stream.id) mode=default")
} catch {
logMultiView("startStream audio session failed id=\(stream.id) error=\(error.localizedDescription)")
}
@@ -583,8 +580,8 @@ private struct MultiStreamTile: View {
onPlaybackEnded: playbackEndedHandler(for: existingPlayer)
)
scheduleStartupPlaybackRecovery(for: existingPlayer)
scheduleQualityUpgrade(for: existingPlayer)
installClipTimeLimit(on: existingPlayer)
attachAudioDiagnostics(to: existingPlayer)
logMultiView("startStream reused shared player id=\(stream.id) muted=\(existingPlayer.isMuted)")
return
}
@@ -620,7 +617,7 @@ private struct MultiStreamTile: View {
onPlaybackEnded: playbackEndedHandler(for: avPlayer)
)
scheduleStartupPlaybackRecovery(for: avPlayer)
scheduleQualityUpgrade(for: avPlayer)
attachAudioDiagnostics(to: avPlayer)
logMultiView("startStream attached player id=\(stream.id) muted=\(avPlayer.isMuted) startupResolution=\(multiViewStartupResolution) fastStart=true calling playImmediately(atRate: 1.0)")
avPlayer.playImmediately(atRate: 1.0)
installClipTimeLimit(on: avPlayer)
@@ -633,19 +630,35 @@ private struct MultiStreamTile: View {
)
let item = makePlayerItem(url: url, headers: headers)
return AVPlayer(playerItem: item)
let player = AVPlayer(playerItem: item)
player.appliesMediaSelectionCriteriaAutomatically = false
logMultiView("startStream configured AVPlayer id=\(stream.id) appliesMediaSelectionCriteriaAutomatically=false")
return player
}
private func makePlayerItem(url: URL, headers: [String: String]) -> AVPlayerItem {
let item: AVPlayerItem
if headers.isEmpty {
return AVPlayerItem(url: url)
}
item = AVPlayerItem(url: url)
} else {
let assetOptions: [String: Any] = [
"AVURLAssetHTTPHeaderFieldsKey": headers,
]
let asset = AVURLAsset(url: url, options: assetOptions)
return AVPlayerItem(asset: asset)
item = AVPlayerItem(asset: asset)
}
item.allowedAudioSpatializationFormats = []
logMultiView("startStream configured player item id=\(stream.id) allowedAudioSpatializationFormats=[]")
pinAudioSelection(on: item)
return item
}
/// Pin the HLS audio rendition so ABR can't swap channel layouts mid-stream.
private func pinAudioSelection(on item: AVPlayerItem) {
let streamID = stream.id
Task { @MainActor in
await enforcePinnedMultiStreamAudioSelection(on: item, streamID: streamID)
}
}
private func scheduleStartupPlaybackRecovery(for player: AVPlayer) {
@@ -686,79 +699,17 @@ private struct MultiStreamTile: View {
}
}
private func scheduleQualityUpgrade(for player: AVPlayer) {
qualityUpgradeTask?.cancel()
guard stream.overrideURL == nil else {
logMultiView("qualityUpgrade skip id=\(stream.id) reason=override-url")
return
}
guard let targetResolution = multiViewUpgradeTargetResolution else {
logMultiView("qualityUpgrade skip id=\(stream.id) reason=target-already-\(multiViewStartupResolution)")
return
}
let streamID = stream.id
let label = stream.label
qualityUpgradeTask = Task { @MainActor in
let checkDelays: [Double] = [2.0, 4.0, 7.0, 15.0, 30.0]
for delay in checkDelays {
try? await Task.sleep(for: .seconds(delay))
guard !Task.isCancelled else { return }
guard let currentPlayer = self.player, currentPlayer === player else {
logMultiView("qualityUpgrade abort id=\(streamID) label=\(label) reason=player-changed")
return
}
let itemStatus = multiViewItemStatusDescription(player.currentItem?.status ?? .unknown)
let likelyToKeepUp = player.currentItem?.isPlaybackLikelyToKeepUp ?? false
let bufferEmpty = player.currentItem?.isPlaybackBufferEmpty ?? false
let indicatedBitrate = player.currentItem?.accessLog()?.events.last?.indicatedBitrate ?? 0
let stable = (itemStatus == "readyToPlay" || likelyToKeepUp) && !bufferEmpty
logMultiView(
"qualityUpgrade check id=\(streamID) delay=\(delay)s targetResolution=\(targetResolution) stable=\(stable) rate=\(player.rate) indicatedBitrate=\(Int(indicatedBitrate))"
)
guard stable else { continue }
guard let upgradedURL = await viewModel.resolveStreamURL(
for: stream,
resolutionOverride: targetResolution,
preserveServerResolutionWhenBest: false
) else {
logMultiView("qualityUpgrade failed id=\(streamID) targetResolution=\(targetResolution) reason=resolve-nil")
return
}
if let currentURL = currentStreamURL(for: player), currentURL == upgradedURL {
logMultiView("qualityUpgrade skip id=\(streamID) reason=same-url targetResolution=\(targetResolution)")
return
}
logMultiView("qualityUpgrade begin id=\(streamID) targetResolution=\(targetResolution) url=\(upgradedURL.absoluteString)")
let upgradedItem = AVPlayerItem(url: upgradedURL)
upgradedItem.preferredForwardBufferDuration = 4
player.replaceCurrentItem(with: upgradedItem)
player.automaticallyWaitsToMinimizeStalling = false
playbackDiagnostics.attach(to: player, streamID: streamID, label: label)
viewModel.attachPlayer(player, to: streamID)
scheduleStartupPlaybackRecovery(for: player)
logMultiView("qualityUpgrade replay id=\(streamID) targetResolution=\(targetResolution)")
player.playImmediately(atRate: 1.0)
return
}
logMultiView("qualityUpgrade timeout id=\(streamID) targetResolution=\(targetResolution)")
}
}
private func currentStreamURL(for player: AVPlayer) -> URL? {
(player.currentItem?.asset as? AVURLAsset)?.url
}
private func attachAudioDiagnostics(to player: AVPlayer) {
if audioDiagnostics == nil {
audioDiagnostics = AudioDiagnostics(tag: "multi:\(stream.label)")
}
audioDiagnostics?.attach(to: player)
}
private func installClipTimeLimit(on player: AVPlayer) {
removeClipTimeLimit(from: player)
@@ -867,6 +818,45 @@ private struct MultiStreamTile: View {
}
}
@MainActor
private func enforcePinnedMultiStreamAudioSelection(on item: AVPlayerItem, streamID: String) async {
let asset = item.asset
guard let group = try? await asset.loadMediaSelectionGroup(for: .audible),
let option = preferredMultiStreamAudioOption(in: group) else { return }
let current = item.currentMediaSelection.selectedMediaOption(in: group)
if current != option {
item.select(option, in: group)
}
logMultiView(
"pinAudioSelection id=\(streamID) selected=\(option.displayName) current=\(current?.displayName ?? "nil") options=\(group.options.count)"
)
}
private func preferredMultiStreamAudioOption(in group: AVMediaSelectionGroup) -> AVMediaSelectionOption? {
let defaultOption = group.defaultOption
return group.options.max { lhs, rhs in
multiStreamAudioPreferenceScore(for: lhs, defaultOption: defaultOption) < multiStreamAudioPreferenceScore(for: rhs, defaultOption: defaultOption)
} ?? defaultOption ?? group.options.first
}
private func multiStreamAudioPreferenceScore(for option: AVMediaSelectionOption, defaultOption: AVMediaSelectionOption?) -> Int {
let name = option.displayName.lowercased()
var score = 0
if option == defaultOption { score += 40 }
if name.contains("stereo") || name.contains("2.0") || name.contains("main") { score += 30 }
if name.contains("english") || name.contains("eng") { score += 20 }
if name.contains("surround") || name.contains("5.1") || name.contains("atmos") { score -= 30 }
if name.contains("spanish") || name.contains("sap") || name.contains("descriptive") || name.contains("alternate") {
score -= 25
}
if option.hasMediaCharacteristic(.describesVideoForAccessibility) {
score -= 40
}
return score
}
private struct MultiStreamPlayerLayerView: UIViewRepresentable {
let player: AVPlayer
let streamID: String
@@ -1481,3 +1471,245 @@ private func nextMultiViewFocusID(
.streamID
}
#endif
// MARK: - AudioDiagnostics
//
// Audio-specific diagnostic logging. Attach one `AudioDiagnostics` per AVPlayer
// you want to track. Emits a `[AUDIO]`-prefixed 1 Hz heartbeat with rate,
// mute, time, bitrate, and route info plus immediate logs on route changes,
// interruptions, access-log events, and media-selection changes.
//
// Grep Xcode console with `[AUDIO]` to isolate these lines.
@MainActor
final class AudioDiagnostics {
private let tag: String
private weak var player: AVPlayer?
private var heartbeatTask: Task<Void, Never>?
private var observations: [NSKeyValueObservation] = []
private var tokens: [NSObjectProtocol] = []
private static var processWideInstalled = false
init(tag: String) {
self.tag = tag
AudioDiagnostics.installProcessWideObservers()
}
deinit {
heartbeatTask?.cancel()
}
func attach(to player: AVPlayer) {
detach()
self.player = player
log("attach rate=\(player.rate) isMuted=\(player.isMuted) volume=\(player.volume)")
observations.append(
player.observe(\.rate, options: [.new]) { [weak self] p, _ in
Task { @MainActor in self?.log("rate-change rate=\(p.rate) tc=\(p.timeControlStatus.rawValue)") }
}
)
observations.append(
player.observe(\.isMuted, options: [.new]) { [weak self] p, _ in
Task { @MainActor in self?.log("isMuted-change value=\(p.isMuted)") }
}
)
observations.append(
player.observe(\.volume, options: [.new]) { [weak self] p, _ in
Task { @MainActor in self?.log("volume-change value=\(p.volume)") }
}
)
if let item = player.currentItem {
attachItemObservers(item)
}
observations.append(
player.observe(\.currentItem, options: [.new]) { [weak self] p, _ in
Task { @MainActor in
guard let self else { return }
self.log("currentItem-change newItem=\(p.currentItem != nil)")
if let item = p.currentItem {
self.attachItemObservers(item)
}
}
}
)
startHeartbeat()
}
func detach() {
heartbeatTask?.cancel()
heartbeatTask = nil
observations.removeAll()
for t in tokens {
NotificationCenter.default.removeObserver(t)
}
tokens.removeAll()
player = nil
}
private func attachItemObservers(_ item: AVPlayerItem) {
tokens.append(
NotificationCenter.default.addObserver(
forName: .AVPlayerItemNewAccessLogEntry,
object: item,
queue: .main
) { [weak self, weak item] _ in
guard let self, let event = item?.accessLog()?.events.last else { return }
Task { @MainActor in
self.log(
"accessLog indicated=\(Int(event.indicatedBitrate)) observed=\(Int(event.observedBitrate)) switches=\(event.numberOfMediaRequests) stalls=\(event.numberOfStalls) avgVideo=\(Int(event.averageVideoBitrate)) avgAudio=\(Int(event.averageAudioBitrate))"
)
}
}
)
tokens.append(
NotificationCenter.default.addObserver(
forName: .AVPlayerItemNewErrorLogEntry,
object: item,
queue: .main
) { [weak self, weak item] _ in
guard let self, let event = item?.errorLog()?.events.last else { return }
Task { @MainActor in
self.log("errorLog domain=\(event.errorDomain) statusCode=\(event.errorStatusCode) comment=\(event.errorComment ?? "nil")")
}
}
)
tokens.append(
NotificationCenter.default.addObserver(
forName: AVPlayerItem.mediaSelectionDidChangeNotification,
object: item,
queue: .main
) { [weak self, weak item] _ in
guard let self, let item else { return }
Task { @MainActor in
await enforcePinnedMultiStreamAudioSelection(on: item, streamID: self.tag)
let asset = item.asset
guard let group = try? await asset.loadMediaSelectionGroup(for: .audible),
let selected = item.currentMediaSelection.selectedMediaOption(in: group) else {
self.log("mediaSelection-change selected=nil")
return
}
let codec = (selected.mediaSubTypes as [NSNumber]).map { audioDiagFourCC($0.uint32Value) }.joined(separator: ",")
self.log("mediaSelection-change audio=\"\(selected.displayName)\" codec=\(codec)")
}
}
)
}
private func startHeartbeat() {
heartbeatTask?.cancel()
heartbeatTask = Task { @MainActor [weak self] in
while !Task.isCancelled {
self?.emitHeartbeat()
try? await Task.sleep(for: .seconds(1))
}
}
}
private func emitHeartbeat() {
guard let player, let item = player.currentItem else { return }
let currentTime = CMTimeGetSeconds(player.currentTime())
let event = item.accessLog()?.events.last
let indicated = event.map { Int($0.indicatedBitrate) } ?? 0
let observed = event.map { Int($0.observedBitrate) } ?? 0
log(
"hb t=\(String(format: "%.1f", currentTime))s rate=\(player.rate) tc=\(player.timeControlStatus.rawValue) muted=\(player.isMuted) vol=\(String(format: "%.2f", player.volume)) indicated=\(indicated) observed=\(observed) ltku=\(item.isPlaybackLikelyToKeepUp) route=\(AudioDiagnostics.currentRouteDescription())"
)
}
private func log(_ message: String) {
let ts = AudioDiagnostics.timestamp()
print("[AUDIO \(tag) \(ts)] \(message)")
}
// MARK: Process-wide
private static func installProcessWideObservers() {
guard !processWideInstalled else { return }
processWideInstalled = true
let session = AVAudioSession.sharedInstance()
print("[AUDIO SYSTEM \(timestamp())] initial category=\(session.category.rawValue) mode=\(session.mode.rawValue) sampleRate=\(session.sampleRate) route=\(currentRouteDescription())")
NotificationCenter.default.addObserver(
forName: AVAudioSession.routeChangeNotification,
object: nil,
queue: .main
) { notification in
let reasonValue = notification.userInfo?[AVAudioSessionRouteChangeReasonKey] as? UInt ?? 0
let reason = AVAudioSession.RouteChangeReason(rawValue: reasonValue).map(audioDiagReasonDescription) ?? "unknown(\(reasonValue))"
let prev = (notification.userInfo?[AVAudioSessionRouteChangePreviousRouteKey] as? AVAudioSessionRouteDescription).map(routeDescription) ?? "nil"
print("[AUDIO SYSTEM \(timestamp())] routeChange reason=\(reason) previous=\(prev) current=\(currentRouteDescription())")
}
NotificationCenter.default.addObserver(
forName: AVAudioSession.interruptionNotification,
object: nil,
queue: .main
) { notification in
let typeValue = notification.userInfo?[AVAudioSessionInterruptionTypeKey] as? UInt ?? 0
let type = AVAudioSession.InterruptionType(rawValue: typeValue).map(audioDiagInterruptionDescription) ?? "unknown(\(typeValue))"
print("[AUDIO SYSTEM \(timestamp())] interruption type=\(type)")
}
}
nonisolated static func currentRouteDescription() -> String {
routeDescription(AVAudioSession.sharedInstance().currentRoute)
}
nonisolated static func routeDescription(_ route: AVAudioSessionRouteDescription) -> String {
let outs = route.outputs.map { "\($0.portType.rawValue):\($0.portName)" }.joined(separator: ",")
return outs.isEmpty ? "none" : outs
}
nonisolated static func timestamp() -> String {
audioDiagTSFormatter.string(from: Date())
}
}
private let audioDiagTSFormatter: DateFormatter = {
let f = DateFormatter()
f.dateFormat = "HH:mm:ss.SSS"
return f
}()
private func audioDiagFourCC(_ raw: UInt32) -> String {
let bytes: [UInt8] = [
UInt8((raw >> 24) & 0xFF),
UInt8((raw >> 16) & 0xFF),
UInt8((raw >> 8) & 0xFF),
UInt8(raw & 0xFF),
]
let chars = bytes.map { b -> Character in
let scalar = UnicodeScalar(b)
return (0x20...0x7E).contains(b) ? Character(scalar) : "?"
}
return String(chars)
}
private func audioDiagReasonDescription(_ reason: AVAudioSession.RouteChangeReason) -> String {
switch reason {
case .unknown: "unknown"
case .newDeviceAvailable: "newDeviceAvailable"
case .oldDeviceUnavailable: "oldDeviceUnavailable"
case .categoryChange: "categoryChange"
case .override: "override"
case .wakeFromSleep: "wakeFromSleep"
case .noSuitableRouteForCategory: "noSuitableRouteForCategory"
case .routeConfigurationChange: "routeConfigurationChange"
@unknown default: "unknown-future"
}
}
private func audioDiagInterruptionDescription(_ type: AVAudioSession.InterruptionType) -> String {
switch type {
case .began: "began"
case .ended: "ended"
@unknown default: "unknown-future"
}
}

View File

@@ -57,24 +57,72 @@ private func singleStreamTimeControlDescription(_ status: AVPlayer.TimeControlSt
}
private func makeSingleStreamPlayerItem(from source: SingleStreamPlaybackSource) -> AVPlayerItem {
let item: AVPlayerItem
if source.httpHeaders.isEmpty {
let item = AVPlayerItem(url: source.url)
item.preferredForwardBufferDuration = 8
return item
}
item = AVPlayerItem(url: source.url)
} else {
let assetOptions: [String: Any] = [
"AVURLAssetHTTPHeaderFieldsKey": source.httpHeaders,
]
let asset = AVURLAsset(url: source.url, options: assetOptions)
let item = AVPlayerItem(asset: asset)
item.preferredForwardBufferDuration = 8
item = AVPlayerItem(asset: asset)
logSingleStream(
"Configured authenticated AVURLAsset headerKeys=\(singleStreamHeaderKeysDescription(source.httpHeaders))"
)
}
item.preferredForwardBufferDuration = 8
item.allowedAudioSpatializationFormats = []
logSingleStream("Configured player item preferredForwardBufferDuration=8 allowedAudioSpatializationFormats=[]")
pinSingleStreamAudioSelection(on: item)
return item
}
/// Pin the HLS audio rendition so ABR can't swap channel layouts mid-stream.
private func pinSingleStreamAudioSelection(on item: AVPlayerItem) {
Task { @MainActor in
await enforcePinnedSingleStreamAudioSelection(on: item)
}
}
@MainActor
private func enforcePinnedSingleStreamAudioSelection(on item: AVPlayerItem) async {
let asset = item.asset
guard let group = try? await asset.loadMediaSelectionGroup(for: .audible),
let option = preferredSingleStreamAudioOption(in: group) else { return }
let current = item.currentMediaSelection.selectedMediaOption(in: group)
if current != option {
item.select(option, in: group)
}
logSingleStream(
"pinAudioSelection selected=\(option.displayName) current=\(current?.displayName ?? "nil") options=\(group.options.count)"
)
}
private func preferredSingleStreamAudioOption(in group: AVMediaSelectionGroup) -> AVMediaSelectionOption? {
let defaultOption = group.defaultOption
return group.options.max { lhs, rhs in
audioPreferenceScore(for: lhs, defaultOption: defaultOption) < audioPreferenceScore(for: rhs, defaultOption: defaultOption)
} ?? defaultOption ?? group.options.first
}
private func audioPreferenceScore(for option: AVMediaSelectionOption, defaultOption: AVMediaSelectionOption?) -> Int {
let name = option.displayName.lowercased()
var score = 0
if option == defaultOption { score += 40 }
if name.contains("stereo") || name.contains("2.0") || name.contains("main") { score += 30 }
if name.contains("english") || name.contains("eng") { score += 20 }
if name.contains("surround") || name.contains("5.1") || name.contains("atmos") { score -= 30 }
if name.contains("spanish") || name.contains("sap") || name.contains("descriptive") || name.contains("alternate") {
score -= 25
}
if option.hasMediaCharacteristic(.describesVideoForAccessibility) {
score -= 40
}
return score
}
struct SingleStreamPlaybackScreen: View {
@Environment(\.dismiss) private var dismiss
let resolveSource: @Sendable () async -> SingleStreamPlaybackSource?
@@ -544,20 +592,27 @@ struct SingleStreamPlayerView: UIViewControllerRepresentable {
)
do {
try AVAudioSession.sharedInstance().setCategory(.playback, mode: .moviePlayback)
try AVAudioSession.sharedInstance().setCategory(.playback, mode: .default)
try AVAudioSession.sharedInstance().setActive(true)
logSingleStream("AVAudioSession configured for playback")
logSingleStream("AVAudioSession configured for playback mode=default")
} catch {
logSingleStream("AVAudioSession configuration failed error=\(error.localizedDescription)")
}
let playerItem = makeSingleStreamPlayerItem(from: source)
let player = AVPlayer(playerItem: playerItem)
player.appliesMediaSelectionCriteriaAutomatically = false
player.automaticallyWaitsToMinimizeStalling = true
player.isMuted = source.forceMuteAudio
logSingleStream("Configured player for quality ramp preferredForwardBufferDuration=8 automaticallyWaitsToMinimizeStalling=true")
logSingleStream(
"Configured player for quality ramp preferredForwardBufferDuration=8 automaticallyWaitsToMinimizeStalling=true appliesMediaSelectionCriteriaAutomatically=false"
)
context.coordinator.attachDebugObservers(to: player, url: url, resolveNextSource: resolveNextSource)
controller.player = player
if context.coordinator.audioDiagnostics == nil {
context.coordinator.audioDiagnostics = AudioDiagnostics(tag: "single")
}
context.coordinator.audioDiagnostics?.attach(to: player)
logSingleStream("AVPlayer assigned to controller; calling playImmediately(atRate: 1.0)")
player.playImmediately(atRate: 1.0)
context.coordinator.installClipTimeLimit(on: player, resolveNextSource: resolveNextSource)
@@ -589,6 +644,10 @@ struct SingleStreamPlayerView: UIViewControllerRepresentable {
logSingleStream("dismantleUIViewController — PiP active, observers cleared but keeping player")
return
}
Task { @MainActor in
coordinator.audioDiagnostics?.detach()
coordinator.audioDiagnostics = nil
}
uiViewController.player?.pause()
uiViewController.player = nil
logSingleStream("dismantleUIViewController complete")
@@ -600,6 +659,7 @@ struct SingleStreamPlayerView: UIViewControllerRepresentable {
private var startupRecoveryTask: Task<Void, Never>?
private var qualityMonitorTask: Task<Void, Never>?
private var clipTimeLimitObserver: Any?
var audioDiagnostics: AudioDiagnostics?
private static let maxClipDuration: Double = 15.0
var onTogglePitchInfo: (() -> Void)?
var onToggleGameCenter: (() -> Void)?
@@ -791,6 +851,19 @@ struct SingleStreamPlayerView: UIViewControllerRepresentable {
}
}
)
notificationTokens.append(
NotificationCenter.default.addObserver(
forName: AVPlayerItem.mediaSelectionDidChangeNotification,
object: item,
queue: .main
) { _ in
logSingleStream("Notification mediaSelectionDidChange")
Task { @MainActor in
await enforcePinnedSingleStreamAudioSelection(on: item)
}
}
)
}
func scheduleStartupRecovery(for player: AVPlayer) {