mirror of
https://gitlab.opencode.de/bwi/bundesmessenger/clients/bundesmessenger-ios.git
synced 2026-04-20 00:24:43 +02:00
#4090 - Various tweaks and fixes following code review. Switched back to DateFormatters for formatting durations, sanitising audio player durations and current times.
This commit is contained in:
@@ -40,7 +40,7 @@ class VoiceMessageAudioPlayer: NSObject {
|
||||
private var statusObserver: NSKeyValueObservation?
|
||||
private var playbackBufferEmptyObserver: NSKeyValueObservation?
|
||||
private var rateObserver: NSKeyValueObservation?
|
||||
private var playToEndObsever: NSObjectProtocol?
|
||||
private var playToEndObserver: NSObjectProtocol?
|
||||
|
||||
private let delegateContainer = DelegateContainer()
|
||||
|
||||
@@ -55,23 +55,11 @@ class VoiceMessageAudioPlayer: NSObject {
|
||||
}
|
||||
|
||||
var duration: TimeInterval {
|
||||
guard let item = self.audioPlayer?.currentItem else {
|
||||
return 0
|
||||
}
|
||||
|
||||
let duration = CMTimeGetSeconds(item.duration)
|
||||
|
||||
return duration.isNaN ? 0.0 : duration
|
||||
return abs(CMTimeGetSeconds(self.audioPlayer?.currentItem?.duration ?? .zero))
|
||||
}
|
||||
|
||||
var currentTime: TimeInterval {
|
||||
guard let audioPlayer = self.audioPlayer else {
|
||||
return 0.0
|
||||
}
|
||||
|
||||
let currentTime = CMTimeGetSeconds(audioPlayer.currentTime())
|
||||
|
||||
return currentTime.isNaN ? 0.0 : currentTime
|
||||
return abs(CMTimeGetSeconds(audioPlayer?.currentTime() ?? .zero))
|
||||
}
|
||||
|
||||
private(set) var isStopped = true
|
||||
@@ -200,7 +188,7 @@ class VoiceMessageAudioPlayer: NSObject {
|
||||
}
|
||||
}
|
||||
|
||||
playToEndObsever = NotificationCenter.default.addObserver(forName: Notification.Name.AVPlayerItemDidPlayToEndTime, object: playerItem, queue: nil) { [weak self] notification in
|
||||
playToEndObserver = NotificationCenter.default.addObserver(forName: Notification.Name.AVPlayerItemDidPlayToEndTime, object: playerItem, queue: nil) { [weak self] notification in
|
||||
guard let self = self else { return }
|
||||
|
||||
self.delegateContainer.notifyDelegatesWithBlock { delegate in
|
||||
@@ -213,7 +201,7 @@ class VoiceMessageAudioPlayer: NSObject {
|
||||
statusObserver?.invalidate()
|
||||
playbackBufferEmptyObserver?.invalidate()
|
||||
rateObserver?.invalidate()
|
||||
NotificationCenter.default.removeObserver(playToEndObsever as Any)
|
||||
NotificationCenter.default.removeObserver(playToEndObserver as Any)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -48,7 +48,7 @@ class VoiceMessageAudioRecorder: NSObject, AVAudioRecorderDelegate {
|
||||
return audioRecorder?.isRecording ?? false
|
||||
}
|
||||
|
||||
func recordWithOuputURL(_ url: URL) {
|
||||
func recordWithOutputURL(_ url: URL) {
|
||||
|
||||
let settings = [AVFormatIDKey: Int(kAudioFormatMPEG4AAC),
|
||||
AVSampleRateKey: 12000,
|
||||
|
||||
@@ -28,6 +28,7 @@ public class VoiceMessageController: NSObject, VoiceMessageToolbarViewDelegate,
|
||||
private enum Constants {
|
||||
static let maximumAudioRecordingDuration: TimeInterval = 120.0
|
||||
static let maximumAudioRecordingLengthReachedThreshold: TimeInterval = 10.0
|
||||
static let elapsedTimeFormat = "m:ss"
|
||||
static let minimumRecordingDuration = 1.0
|
||||
}
|
||||
|
||||
@@ -47,6 +48,12 @@ public class VoiceMessageController: NSObject, VoiceMessageToolbarViewDelegate,
|
||||
private var isInLockedMode: Bool = false
|
||||
private var notifiedRemainingTime = false
|
||||
|
||||
private static let timeFormatter: DateFormatter = {
|
||||
let dateFormatter = DateFormatter()
|
||||
dateFormatter.dateFormat = Constants.elapsedTimeFormat
|
||||
return dateFormatter
|
||||
}()
|
||||
|
||||
@objc public weak var delegate: VoiceMessageControllerDelegate?
|
||||
|
||||
@objc public var isRecordingAudio: Bool {
|
||||
@@ -90,7 +97,7 @@ public class VoiceMessageController: NSObject, VoiceMessageToolbarViewDelegate,
|
||||
|
||||
// Haptic are not played during record on iOS by default. This fix works
|
||||
// only since iOS 13. A workaround for iOS 12 and earlier would be to
|
||||
// dispatch after at least 100ms recordWithOuputURL call
|
||||
// dispatch after at least 100ms recordWithOutputURL call
|
||||
if #available(iOS 13.0, *) {
|
||||
try? AVAudioSession.sharedInstance().setCategory(.playAndRecord)
|
||||
try? AVAudioSession.sharedInstance().setAllowHapticsAndSystemSoundsDuringRecording(true)
|
||||
@@ -100,7 +107,7 @@ public class VoiceMessageController: NSObject, VoiceMessageToolbarViewDelegate,
|
||||
|
||||
audioRecorder = mediaServiceProvider.audioRecorder()
|
||||
audioRecorder?.registerDelegate(self)
|
||||
audioRecorder?.recordWithOuputURL(temporaryFileURL)
|
||||
audioRecorder?.recordWithOutputURL(temporaryFileURL)
|
||||
}
|
||||
|
||||
func voiceMessageToolbarViewDidRequestRecordingFinish(_ toolbarView: VoiceMessageToolbarView) {
|
||||
@@ -335,7 +342,7 @@ public class VoiceMessageController: NSObject, VoiceMessageToolbarViewDelegate,
|
||||
|
||||
var details = VoiceMessageToolbarViewDetails()
|
||||
details.state = (isRecording ? (isInLockedMode ? .lockedModeRecord : .record) : (isInLockedMode ? .lockedModePlayback : .idle))
|
||||
details.elapsedTime = durationStringFromTimeInterval(currentTime)
|
||||
details.elapsedTime = VoiceMessageController.timeFormatter.string(from: Date(timeIntervalSinceReferenceDate: currentTime))
|
||||
details.audioSamples = audioSamples
|
||||
|
||||
if isRecording {
|
||||
@@ -384,7 +391,7 @@ public class VoiceMessageController: NSObject, VoiceMessageToolbarViewDelegate,
|
||||
|
||||
var details = VoiceMessageToolbarViewDetails()
|
||||
details.state = (audioRecorder?.isRecording ?? false ? (isInLockedMode ? .lockedModeRecord : .record) : (isInLockedMode ? .lockedModePlayback : .idle))
|
||||
details.elapsedTime = durationStringFromTimeInterval(audioPlayer.isPlaying ? audioPlayer.currentTime : audioPlayer.duration)
|
||||
details.elapsedTime = VoiceMessageController.timeFormatter.string(from: Date(timeIntervalSinceReferenceDate: (audioPlayer.isPlaying ? audioPlayer.currentTime : audioPlayer.duration)))
|
||||
details.audioSamples = audioSamples
|
||||
details.isPlaying = audioPlayer.isPlaying
|
||||
details.progress = (audioPlayer.isPlaying ? (audioPlayer.duration > 0.0 ? audioPlayer.currentTime / audioPlayer.duration : 0.0) : 0.0)
|
||||
@@ -399,18 +406,4 @@ public class VoiceMessageController: NSObject, VoiceMessageToolbarViewDelegate,
|
||||
|
||||
audioSamples = audioSamples + [Float](repeating: 0.0, count: delta)
|
||||
}
|
||||
|
||||
private func durationStringFromTimeInterval(_ interval: TimeInterval) -> String {
|
||||
guard interval.isFinite else {
|
||||
return ""
|
||||
}
|
||||
|
||||
var timeInterval = abs(interval)
|
||||
let hours = trunc(timeInterval / 3600.0)
|
||||
timeInterval -= hours * 3600.0
|
||||
let minutes = trunc(timeInterval / 60.0)
|
||||
timeInterval -= minutes * 60.0
|
||||
|
||||
return String(format: "%01.0f:%02.0f", minutes, timeInterval)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -75,12 +75,12 @@ import Foundation
|
||||
// MARK: - Private
|
||||
|
||||
private func stopAllServicesExcept(_ service: AnyObject?) {
|
||||
for audioRecoder in audioRecorders.allObjects {
|
||||
if audioRecoder === service {
|
||||
for audioRecorder in audioRecorders.allObjects {
|
||||
if audioRecorder === service {
|
||||
continue
|
||||
}
|
||||
|
||||
audioRecoder.stopRecording()
|
||||
audioRecorder.stopRecording()
|
||||
}
|
||||
|
||||
guard let audioPlayersEnumerator = audioPlayers.objectEnumerator() else {
|
||||
|
||||
@@ -26,6 +26,10 @@ enum VoiceMessagePlaybackControllerState {
|
||||
|
||||
class VoiceMessagePlaybackController: VoiceMessageAudioPlayerDelegate, VoiceMessagePlaybackViewDelegate {
|
||||
|
||||
private enum Constants {
|
||||
static let elapsedTimeFormat = "m:ss"
|
||||
}
|
||||
|
||||
private let mediaServiceProvider: VoiceMessageMediaServiceProvider
|
||||
private let cacheManager: VoiceMessageAttachmentCacheManager
|
||||
|
||||
@@ -43,6 +47,13 @@ class VoiceMessagePlaybackController: VoiceMessageAudioPlayerDelegate, VoiceMess
|
||||
}
|
||||
}
|
||||
|
||||
private static let timeFormatter: DateFormatter = {
|
||||
let dateFormatter = DateFormatter()
|
||||
dateFormatter.dateFormat = Constants.elapsedTimeFormat
|
||||
return dateFormatter
|
||||
}()
|
||||
|
||||
|
||||
let playbackView: VoiceMessagePlaybackView
|
||||
|
||||
init(mediaServiceProvider: VoiceMessageMediaServiceProvider,
|
||||
@@ -134,11 +145,11 @@ class VoiceMessagePlaybackController: VoiceMessageAudioPlayerDelegate, VoiceMess
|
||||
|
||||
switch state {
|
||||
case .stopped:
|
||||
details.currentTime = durationStringFromTimeInterval(self.duration)
|
||||
details.currentTime = VoiceMessagePlaybackController.timeFormatter.string(from: Date(timeIntervalSinceReferenceDate: self.duration))
|
||||
details.progress = 0.0
|
||||
default:
|
||||
if let audioPlayer = audioPlayer {
|
||||
details.currentTime = durationStringFromTimeInterval(audioPlayer.currentTime)
|
||||
details.currentTime = VoiceMessagePlaybackController.timeFormatter.string(from: Date(timeIntervalSinceReferenceDate: audioPlayer.currentTime))
|
||||
details.progress = (audioPlayer.duration > 0.0 ? audioPlayer.currentTime / audioPlayer.duration : 0.0)
|
||||
}
|
||||
}
|
||||
@@ -199,18 +210,4 @@ class VoiceMessagePlaybackController: VoiceMessageAudioPlayerDelegate, VoiceMess
|
||||
@objc private func updateTheme() {
|
||||
playbackView.update(theme: ThemeService.shared().theme)
|
||||
}
|
||||
|
||||
private func durationStringFromTimeInterval(_ interval: TimeInterval) -> String {
|
||||
guard interval.isFinite else {
|
||||
return ""
|
||||
}
|
||||
|
||||
var timeInterval = abs(interval)
|
||||
let hours = trunc(timeInterval / 3600.0)
|
||||
timeInterval -= hours * 3600.0
|
||||
let minutes = trunc(timeInterval / 60.0)
|
||||
timeInterval -= minutes * 60.0
|
||||
|
||||
return String(format: "%01.0f:%02.0f", minutes, timeInterval)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -115,7 +115,7 @@ class VoiceMessagePlaybackView: UIView, NibLoadable, Themable {
|
||||
playButton.backgroundColor = theme.colors.background
|
||||
playButton.tintColor = theme.colors.secondaryContent
|
||||
backgroundView.backgroundColor = theme.colors.quinaryContent
|
||||
_waveformView.primarylineColor = theme.colors.quarterlyContent
|
||||
_waveformView.primaryLineColor = theme.colors.quarterlyContent
|
||||
_waveformView.secondaryLineColor = theme.colors.secondaryContent
|
||||
elapsedTimeLabel.textColor = theme.colors.tertiaryContent
|
||||
}
|
||||
|
||||
@@ -200,7 +200,7 @@ class VoiceMessageToolbarView: PassthroughView, NibLoadable, Themable, UIGesture
|
||||
case UIGestureRecognizer.State.began:
|
||||
delegate?.voiceMessageToolbarViewDidRequestRecordingStart(self)
|
||||
case UIGestureRecognizer.State.ended:
|
||||
delegate?.voiceMessageToolbarViewDidRequestRecordingFinish(self)
|
||||
delegate?.voiceMessageToolbarViewDidRequestRecordingFinish(self)
|
||||
default:
|
||||
break
|
||||
}
|
||||
|
||||
@@ -28,10 +28,10 @@ class VoiceMessageWaveformView: UIView {
|
||||
}
|
||||
}
|
||||
|
||||
var primarylineColor = UIColor.lightGray {
|
||||
var primaryLineColor = UIColor.lightGray {
|
||||
didSet {
|
||||
backgroundLayer.strokeColor = primarylineColor.cgColor
|
||||
backgroundLayer.fillColor = primarylineColor.cgColor
|
||||
backgroundLayer.strokeColor = primaryLineColor.cgColor
|
||||
backgroundLayer.fillColor = primaryLineColor.cgColor
|
||||
}
|
||||
}
|
||||
var secondaryLineColor = UIColor.darkGray {
|
||||
@@ -60,7 +60,7 @@ class VoiceMessageWaveformView: UIView {
|
||||
override init(frame: CGRect) {
|
||||
super.init(frame: frame)
|
||||
|
||||
setupAndAdd(backgroundLayer, with: primarylineColor)
|
||||
setupAndAdd(backgroundLayer, with: primaryLineColor)
|
||||
setupAndAdd(progressLayer, with: secondaryLineColor)
|
||||
progressLayer.masksToBounds = true
|
||||
|
||||
|
||||
Reference in New Issue
Block a user