Files
voicediary/VoiceDiary/Views/RecordingView.swift
Felix Förtsch 3d42c83f75 fix speech authorization crash: decouple from SwiftData context
- Make TranscriptionService a plain Sendable class (not @Observable/@MainActor)
- Request speech authorization in ContentView.onAppear via callback (no async)
- Use @State pendingMemo + Task in View for transcription (Swift 6 safe)
- Separate saveRecording() and startTranscription() to avoid data races

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-15 23:31:59 +01:00

154 lines
3.7 KiB
Swift

import SwiftData
import SwiftUI
struct RecordingView: View {
@Environment(\.modelContext) private var modelContext
@Environment(\.dismiss) private var dismiss
var viewModel: RecordingViewModel
@State private var isTranscribing = false
@State private var transcriptionError: Error?
@State private var pendingMemo: VoiceMemo?
private let transcriptionService = TranscriptionService()
var body: some View {
NavigationStack {
VStack(spacing: 40) {
Spacer()
timerDisplay
recordButton
if isTranscribing {
HStack(spacing: 8) {
ProgressView()
.controlSize(.small)
Text(String(localized: "memo.transcribing"))
.font(.subheadline)
.foregroundStyle(.secondary)
}
}
Spacer()
if let error = viewModel.error ?? transcriptionError {
Text(error.localizedDescription)
.font(.caption)
.foregroundStyle(.red)
.multilineTextAlignment(.center)
.padding(.horizontal)
}
}
.padding()
.navigationTitle(String(localized: "recording.title"))
.navigationBarTitleDisplayMode(.inline)
.toolbar {
ToolbarItem(placement: .cancellationAction) {
Button(String(localized: "general.done")) {
if viewModel.isRecording {
viewModel.stopRecording()
saveRecording()
}
dismiss()
}
}
}
}
}
private var timerDisplay: some View {
Text(viewModel.formattedDuration)
.font(.system(size: 64, weight: .light, design: .monospaced))
.foregroundStyle(viewModel.isRecording ? .primary : .secondary)
.contentTransition(.numericText())
.animation(.default, value: viewModel.formattedDuration)
.accessibilityLabel(String(localized: "recording.duration"))
}
private var recordButton: some View {
Button {
if viewModel.isRecording {
viewModel.stopRecording()
saveRecording()
startTranscription()
} else {
viewModel.startRecording()
}
} label: {
ZStack {
Circle()
.fill(viewModel.isRecording ? .red : .red.opacity(0.15))
.frame(width: 88, height: 88)
if viewModel.isRecording {
RoundedRectangle(cornerRadius: 6)
.fill(.white)
.frame(width: 28, height: 28)
} else {
Circle()
.fill(.red)
.frame(width: 72, height: 72)
}
}
}
.disabled(isTranscribing)
.accessibilityLabel(
viewModel.isRecording
? String(localized: "recording.stop")
: String(localized: "recording.start")
)
.sensoryFeedback(.impact, trigger: viewModel.isRecording)
}
private func saveRecording() {
guard let recorded = viewModel.lastRecordedFile else { return }
let today = Calendar.current.startOfDay(for: .now)
let entry = fetchOrCreateEntry(for: today)
let memo = VoiceMemo(audioFileName: recorded.fileName, duration: recorded.duration)
memo.entry = entry
modelContext.insert(memo)
viewModel.lastRecordedFile = nil
pendingMemo = memo
}
private func startTranscription() {
guard let memo = pendingMemo else { return }
let audioURL = memo.audioURL
isTranscribing = true
transcriptionError = nil
Task {
do {
let transcript = try await transcriptionService.transcribe(audioURL: audioURL)
memo.transcript = transcript
} catch {
transcriptionError = error
}
isTranscribing = false
pendingMemo = nil
}
}
private func fetchOrCreateEntry(for date: Date) -> DiaryEntry {
let descriptor = FetchDescriptor<DiaryEntry>()
let entries = (try? modelContext.fetch(descriptor)) ?? []
if let match = entries.first(where: { Calendar.current.isDate($0.date, inSameDayAs: date) }) {
return match
}
let entry = DiaryEntry(date: date)
modelContext.insert(entry)
return entry
}
}
#Preview {
RecordingView(viewModel: RecordingViewModel())
.modelContainer(for: [DiaryEntry.self, VoiceMemo.self], inMemory: true)
}