Fix listening practice crash on Start Speaking #16
@@ -50,45 +50,64 @@ final class PronunciationService {
|
||||
recognizer = SFSpeechRecognizer(locale: Locale(identifier: "es-ES"))
|
||||
}
|
||||
|
||||
func startRecording() throws {
|
||||
guard isAuthorized else { return }
|
||||
func startRecording() {
|
||||
guard isAuthorized else {
|
||||
print("[PronunciationService] not authorized")
|
||||
return
|
||||
}
|
||||
resolveRecognizerIfNeeded()
|
||||
guard let recognizer, recognizer.isAvailable else { return }
|
||||
guard let recognizer, recognizer.isAvailable else {
|
||||
print("[PronunciationService] recognizer unavailable")
|
||||
return
|
||||
}
|
||||
|
||||
stopRecording()
|
||||
|
||||
let audioSession = AVAudioSession.sharedInstance()
|
||||
try audioSession.setCategory(.playAndRecord, mode: .measurement, options: [.duckOthers, .defaultToSpeaker])
|
||||
try audioSession.setActive(true, options: .notifyOthersOnDeactivation)
|
||||
do {
|
||||
let audioSession = AVAudioSession.sharedInstance()
|
||||
try audioSession.setCategory(.playAndRecord, mode: .measurement, options: [.duckOthers, .defaultToSpeaker])
|
||||
try audioSession.setActive(true, options: .notifyOthersOnDeactivation)
|
||||
|
||||
audioEngine = AVAudioEngine()
|
||||
request = SFSpeechAudioBufferRecognitionRequest()
|
||||
audioEngine = AVAudioEngine()
|
||||
request = SFSpeechAudioBufferRecognitionRequest()
|
||||
|
||||
guard let audioEngine, let request else { return }
|
||||
request.shouldReportPartialResults = true
|
||||
guard let audioEngine, let request else { return }
|
||||
request.shouldReportPartialResults = true
|
||||
|
||||
let inputNode = audioEngine.inputNode
|
||||
let recordingFormat = inputNode.outputFormat(forBus: 0)
|
||||
let inputNode = audioEngine.inputNode
|
||||
let recordingFormat = inputNode.outputFormat(forBus: 0)
|
||||
|
||||
inputNode.installTap(onBus: 0, bufferSize: 1024, format: recordingFormat) { buffer, _ in
|
||||
request.append(buffer)
|
||||
}
|
||||
// Validate format — 0 channels crashes installTap
|
||||
guard recordingFormat.channelCount > 0 else {
|
||||
print("[PronunciationService] invalid recording format (0 channels)")
|
||||
self.audioEngine = nil
|
||||
self.request = nil
|
||||
return
|
||||
}
|
||||
|
||||
audioEngine.prepare()
|
||||
try audioEngine.start()
|
||||
inputNode.installTap(onBus: 0, bufferSize: 1024, format: recordingFormat) { buffer, _ in
|
||||
request.append(buffer)
|
||||
}
|
||||
|
||||
transcript = ""
|
||||
isRecording = true
|
||||
audioEngine.prepare()
|
||||
try audioEngine.start()
|
||||
|
||||
task = recognizer.recognitionTask(with: request) { [weak self] result, error in
|
||||
Task { @MainActor in
|
||||
if let result {
|
||||
self?.transcript = result.bestTranscription.formattedString
|
||||
}
|
||||
if error != nil || (result?.isFinal == true) {
|
||||
self?.stopRecording()
|
||||
transcript = ""
|
||||
isRecording = true
|
||||
|
||||
task = recognizer.recognitionTask(with: request) { [weak self] result, error in
|
||||
DispatchQueue.main.async {
|
||||
if let result {
|
||||
self?.transcript = result.bestTranscription.formattedString
|
||||
}
|
||||
if error != nil || (result?.isFinal == true) {
|
||||
self?.stopRecording()
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
print("[PronunciationService] startRecording failed: \(error)")
|
||||
stopRecording()
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -166,7 +166,7 @@ struct ListeningView: View {
|
||||
if result.score >= 0.7 { correctCount += 1 }
|
||||
withAnimation { isRevealed = true }
|
||||
} else {
|
||||
try? pronunciation.startRecording()
|
||||
pronunciation.startRecording()
|
||||
}
|
||||
} label: {
|
||||
Label(pronunciation.isRecording ? "Stop" : "Start Speaking", systemImage: pronunciation.isRecording ? "stop.circle.fill" : "mic.circle.fill")
|
||||
|
||||
Reference in New Issue
Block a user