fix speech authorization crash: decouple from SwiftData context
- Make TranscriptionService a plain Sendable class (not @Observable/@MainActor) - Request speech authorization in ContentView.onAppear via callback (no async) - Use @State pendingMemo + Task in View for transcription (Swift 6 safe) - Separate saveRecording() and startTranscription() to avoid data races Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
@@ -20,6 +20,7 @@
|
||||
BE246B75520F6505EFE3015E /* VoiceDiaryTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 2CAFA3F05939B94F1CFD372C /* VoiceDiaryTests.swift */; };
|
||||
D5E3C103027212BE7607EF81 /* ContentView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 989E1C6DD70521684A5CB77F /* ContentView.swift */; };
|
||||
D737FC41C749185F28943A87 /* DiaryEntryView.swift in Sources */ = {isa = PBXBuildFile; fileRef = D4CCBA3B214071BEDD37CB6E /* DiaryEntryView.swift */; };
|
||||
DB046641C4508472DAC45A51 /* SpeechAuthorization.swift in Sources */ = {isa = PBXBuildFile; fileRef = 0292E37072DDFA0AD07EC9A3 /* SpeechAuthorization.swift */; };
|
||||
EAE61B25765D355723F9EC66 /* TranscriptionService.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9CD664C8607AD9DC37A6C8D1 /* TranscriptionService.swift */; };
|
||||
/* End PBXBuildFile section */
|
||||
|
||||
@@ -34,6 +35,7 @@
|
||||
/* End PBXContainerItemProxy section */
|
||||
|
||||
/* Begin PBXFileReference section */
|
||||
0292E37072DDFA0AD07EC9A3 /* SpeechAuthorization.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SpeechAuthorization.swift; sourceTree = "<group>"; };
|
||||
0A2739861476DDD2140B3BA6 /* Localizable.xcstrings */ = {isa = PBXFileReference; lastKnownFileType = text.json.xcstrings; path = Localizable.xcstrings; sourceTree = "<group>"; };
|
||||
19E99B8436E44AEC9EC5DB77 /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = "<group>"; };
|
||||
1CA6B754AFDFDAF2C8AF9C01 /* VoiceDiaryTests.xctest */ = {isa = PBXFileReference; includeInIndex = 0; lastKnownFileType = wrapper.cfbundle; path = VoiceDiaryTests.xctest; sourceTree = BUILT_PRODUCTS_DIR; };
|
||||
@@ -65,6 +67,7 @@
|
||||
isa = PBXGroup;
|
||||
children = (
|
||||
F30F8103F5AEAC26F1412FEC /* AudioRecorderService.swift */,
|
||||
0292E37072DDFA0AD07EC9A3 /* SpeechAuthorization.swift */,
|
||||
BE004D539240AD09CEDA1897 /* SummarizationService.swift */,
|
||||
9CD664C8607AD9DC37A6C8D1 /* TranscriptionService.swift */,
|
||||
);
|
||||
@@ -261,6 +264,7 @@
|
||||
AFE8973BC11AC7137979D5C7 /* DiaryViewModel.swift in Sources */,
|
||||
A791D92368773881E7ECE4F6 /* RecordingView.swift in Sources */,
|
||||
978F1E53817BC842580C9C67 /* RecordingViewModel.swift in Sources */,
|
||||
DB046641C4508472DAC45A51 /* SpeechAuthorization.swift in Sources */,
|
||||
247387A3DB126648ABFA452E /* SummarizationService.swift in Sources */,
|
||||
EAE61B25765D355723F9EC66 /* TranscriptionService.swift in Sources */,
|
||||
5BC279CA41E44E646F2DB639 /* VoiceDiaryApp.swift in Sources */,
|
||||
|
||||
18
VoiceDiary/Services/SpeechAuthorization.swift
Normal file
18
VoiceDiary/Services/SpeechAuthorization.swift
Normal file
@@ -0,0 +1,18 @@
|
||||
import Speech
|
||||
|
||||
enum SpeechAuthorization {
|
||||
static func requestIfNeeded(completion: @MainActor @escaping @Sendable (Bool) -> Void) {
|
||||
let status = SFSpeechRecognizer.authorizationStatus()
|
||||
if status == .notDetermined {
|
||||
SFSpeechRecognizer.requestAuthorization { newStatus in
|
||||
Task { @MainActor in
|
||||
completion(newStatus == .authorized)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
Task { @MainActor in
|
||||
completion(status == .authorized)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,15 +1,9 @@
|
||||
import Foundation
|
||||
import Speech
|
||||
|
||||
@Observable
|
||||
@MainActor
|
||||
final class TranscriptionService {
|
||||
var authorizationStatus: SFSpeechRecognizerAuthorizationStatus = .notDetermined
|
||||
|
||||
private let speechRecognizer = SFSpeechRecognizer(locale: Locale(identifier: "de-DE"))
|
||||
|
||||
func requestAuthorization() async {
|
||||
authorizationStatus = await withCheckedContinuation { continuation in
|
||||
final class TranscriptionService: Sendable {
|
||||
func requestAuthorization() async -> SFSpeechRecognizerAuthorizationStatus {
|
||||
await withCheckedContinuation { continuation in
|
||||
SFSpeechRecognizer.requestAuthorization { status in
|
||||
continuation.resume(returning: status)
|
||||
}
|
||||
@@ -17,7 +11,9 @@ final class TranscriptionService {
|
||||
}
|
||||
|
||||
func transcribe(audioURL: URL) async throws -> String {
|
||||
guard let speechRecognizer, speechRecognizer.isAvailable else {
|
||||
let recognizer = SFSpeechRecognizer(locale: Locale(identifier: "de-DE"))
|
||||
|
||||
guard let recognizer, recognizer.isAvailable else {
|
||||
throw TranscriptionError.recognizerUnavailable
|
||||
}
|
||||
|
||||
@@ -26,8 +22,8 @@ final class TranscriptionService {
|
||||
request.shouldReportPartialResults = false
|
||||
request.addsPunctuation = true
|
||||
|
||||
let transcription: String = try await withCheckedThrowingContinuation { continuation in
|
||||
speechRecognizer.recognitionTask(with: request) { result, error in
|
||||
return try await withCheckedThrowingContinuation { continuation in
|
||||
recognizer.recognitionTask(with: request) { result, error in
|
||||
if let error {
|
||||
continuation.resume(throwing: error)
|
||||
} else if let result, result.isFinal {
|
||||
@@ -35,7 +31,6 @@ final class TranscriptionService {
|
||||
}
|
||||
}
|
||||
}
|
||||
return transcription
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -7,6 +7,7 @@ struct ContentView: View {
|
||||
@State private var recordingViewModel = RecordingViewModel()
|
||||
@State private var diaryViewModel = DiaryViewModel()
|
||||
@State private var showingRecording = false
|
||||
@State private var speechAuthorized = false
|
||||
|
||||
var body: some View {
|
||||
NavigationStack {
|
||||
@@ -34,6 +35,11 @@ struct ContentView: View {
|
||||
}
|
||||
}
|
||||
.environment(diaryViewModel)
|
||||
.onAppear {
|
||||
SpeechAuthorization.requestIfNeeded { authorized in
|
||||
speechAuthorized = authorized
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private var emptyState: some View {
|
||||
|
||||
@@ -5,8 +5,11 @@ struct RecordingView: View {
|
||||
@Environment(\.modelContext) private var modelContext
|
||||
@Environment(\.dismiss) private var dismiss
|
||||
var viewModel: RecordingViewModel
|
||||
@State private var transcriptionService = TranscriptionService()
|
||||
@State private var isTranscribing = false
|
||||
@State private var transcriptionError: Error?
|
||||
@State private var pendingMemo: VoiceMemo?
|
||||
|
||||
private let transcriptionService = TranscriptionService()
|
||||
|
||||
var body: some View {
|
||||
NavigationStack {
|
||||
@@ -29,7 +32,7 @@ struct RecordingView: View {
|
||||
|
||||
Spacer()
|
||||
|
||||
if let error = viewModel.error {
|
||||
if let error = viewModel.error ?? transcriptionError {
|
||||
Text(error.localizedDescription)
|
||||
.font(.caption)
|
||||
.foregroundStyle(.red)
|
||||
@@ -45,18 +48,13 @@ struct RecordingView: View {
|
||||
Button(String(localized: "general.done")) {
|
||||
if viewModel.isRecording {
|
||||
viewModel.stopRecording()
|
||||
saveAndTranscribe()
|
||||
saveRecording()
|
||||
}
|
||||
dismiss()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
.task {
|
||||
if transcriptionService.authorizationStatus == .notDetermined {
|
||||
await transcriptionService.requestAuthorization()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private var timerDisplay: some View {
|
||||
@@ -72,7 +70,8 @@ struct RecordingView: View {
|
||||
Button {
|
||||
if viewModel.isRecording {
|
||||
viewModel.stopRecording()
|
||||
saveAndTranscribe()
|
||||
saveRecording()
|
||||
startTranscription()
|
||||
} else {
|
||||
viewModel.startRecording()
|
||||
}
|
||||
@@ -102,7 +101,7 @@ struct RecordingView: View {
|
||||
.sensoryFeedback(.impact, trigger: viewModel.isRecording)
|
||||
}
|
||||
|
||||
private func saveAndTranscribe() {
|
||||
private func saveRecording() {
|
||||
guard let recorded = viewModel.lastRecordedFile else { return }
|
||||
|
||||
let today = Calendar.current.startOfDay(for: .now)
|
||||
@@ -112,24 +111,25 @@ struct RecordingView: View {
|
||||
memo.entry = entry
|
||||
modelContext.insert(memo)
|
||||
viewModel.lastRecordedFile = nil
|
||||
|
||||
let audioURL = recorded.url
|
||||
Task {
|
||||
await transcribe(memo: memo, audioURL: audioURL)
|
||||
}
|
||||
pendingMemo = memo
|
||||
}
|
||||
|
||||
private func transcribe(memo: VoiceMemo, audioURL: URL) async {
|
||||
guard transcriptionService.authorizationStatus == .authorized else { return }
|
||||
private func startTranscription() {
|
||||
guard let memo = pendingMemo else { return }
|
||||
let audioURL = memo.audioURL
|
||||
|
||||
isTranscribing = true
|
||||
defer { isTranscribing = false }
|
||||
transcriptionError = nil
|
||||
|
||||
do {
|
||||
let transcript = try await transcriptionService.transcribe(audioURL: audioURL)
|
||||
memo.transcript = transcript
|
||||
} catch {
|
||||
viewModel.error = error
|
||||
Task {
|
||||
do {
|
||||
let transcript = try await transcriptionService.transcribe(audioURL: audioURL)
|
||||
memo.transcript = transcript
|
||||
} catch {
|
||||
transcriptionError = error
|
||||
}
|
||||
isTranscribing = false
|
||||
pendingMemo = nil
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user