#4094 - Switched back to multiple audio player instances (1 per event identifier), strongly retaining the currenty playing one and unloading all stopped ones. Various other improvements and bug fixes.

This commit is contained in:
Stefan Ceriu
2021-07-14 10:26:21 +03:00
parent 513c950e0d
commit bcba0d2338
5 changed files with 111 additions and 48 deletions
@@ -31,13 +31,20 @@ enum VoiceMessageAttachmentCacheManagerError: Error {
Swift optimizes the callbacks to be the same instance. Wrap them so we can store them in an array.
*/
private class CompletionWrapper {
let completion: (Result<(String, URL, TimeInterval, [Float]), Error>) -> Void
let completion: (Result<VoiceMessageAttachmentCacheManagerLoadResult, Error>) -> Void
init(_ completion: @escaping (Result<(String, URL, TimeInterval, [Float]), Error>) -> Void) {
init(_ completion: @escaping (Result<VoiceMessageAttachmentCacheManagerLoadResult, Error>) -> Void) {
self.completion = completion
}
}
struct VoiceMessageAttachmentCacheManagerLoadResult {
let eventIdentifier: String
let url: URL
let duration: TimeInterval
let samples: [Float]
}
class VoiceMessageAttachmentCacheManager {
static let sharedManager = VoiceMessageAttachmentCacheManager()
@@ -48,9 +55,10 @@ class VoiceMessageAttachmentCacheManager {
private var finalURLs = [String: URL]()
private init() {
}
func loadAttachment(_ attachment: MXKAttachment, numberOfSamples: Int, completion: @escaping (Result<(String, URL, TimeInterval, [Float]), Error>) -> Void) {
func loadAttachment(_ attachment: MXKAttachment, numberOfSamples: Int, completion: @escaping (Result<VoiceMessageAttachmentCacheManagerLoadResult, Error>) -> Void) {
guard attachment.type == MXKAttachmentTypeVoiceMessage else {
completion(Result.failure(VoiceMessageAttachmentCacheManagerError.invalidAttachmentType))
return
@@ -67,14 +75,15 @@ class VoiceMessageAttachmentCacheManager {
}
if let finalURL = finalURLs[identifier], let duration = durations[identifier], let samples = samples[identifier]?[numberOfSamples] {
completion(Result.success((identifier, finalURL, duration, samples)))
let result = VoiceMessageAttachmentCacheManagerLoadResult(eventIdentifier: identifier, url: finalURL, duration: duration, samples: samples)
completion(Result.success(result))
return
}
self.enqueueLoadAttachment(attachment, identifier: identifier, numberOfSamples: numberOfSamples, completion: completion)
}
private func enqueueLoadAttachment(_ attachment: MXKAttachment, identifier: String, numberOfSamples: Int, completion: @escaping (Result<(String, URL, Double, [Float]), Error>) -> Void) {
private func enqueueLoadAttachment(_ attachment: MXKAttachment, identifier: String, numberOfSamples: Int, completion: @escaping (Result<VoiceMessageAttachmentCacheManagerLoadResult, Error>) -> Void) {
if var callbacks = completionCallbacks[identifier] {
callbacks.append(CompletionWrapper(completion))
@@ -170,10 +179,12 @@ class VoiceMessageAttachmentCacheManager {
return
}
let result = VoiceMessageAttachmentCacheManagerLoadResult(eventIdentifier: identifier, url: url, duration: duration, samples: samples)
let copy = callbacks.map { $0 }
DispatchQueue.main.async {
for wrapper in copy {
wrapper.completion(Result.success((identifier, url, duration, samples)))
wrapper.completion(Result.success(result))
}
}
@@ -99,6 +99,11 @@ class VoiceMessageAudioPlayer: NSObject {
addObservers()
}
func unloadContent() {
url = nil
audioPlayer?.replaceCurrentItem(with: nil)
}
func play() {
isStopped = false
@@ -40,6 +40,7 @@ public class VoiceMessageController: NSObject, VoiceMessageToolbarViewDelegate,
private let themeService: ThemeService
private let mediaServiceProvider: VoiceMessageMediaServiceProvider
private let temporaryFileURL: URL
private let _voiceMessageToolbarView: VoiceMessageToolbarView
private var displayLink: CADisplayLink!
@@ -67,6 +68,9 @@ public class VoiceMessageController: NSObject, VoiceMessageToolbarViewDelegate,
self.themeService = themeService
self.mediaServiceProvider = mediaServiceProvider
let temporaryDirectoryURL = URL(fileURLWithPath: NSTemporaryDirectory(), isDirectory: true)
temporaryFileURL = temporaryDirectoryURL.appendingPathComponent(ProcessInfo().globallyUniqueString).appendingPathExtension("m4a")
_voiceMessageToolbarView = VoiceMessageToolbarView.loadFromNib()
super.init()
@@ -100,9 +104,6 @@ public class VoiceMessageController: NSObject, VoiceMessageToolbarViewDelegate,
}
UIImpactFeedbackGenerator(style: .medium).impactOccurred()
let temporaryDirectoryURL = URL(fileURLWithPath: NSTemporaryDirectory(), isDirectory: true)
let temporaryFileURL = temporaryDirectoryURL.appendingPathComponent(ProcessInfo().globallyUniqueString).appendingPathExtension("m4a")
audioRecorder = mediaServiceProvider.audioRecorder()
audioRecorder?.registerDelegate(self)
@@ -127,9 +128,14 @@ public class VoiceMessageController: NSObject, VoiceMessageToolbarViewDelegate,
}
func voiceMessageToolbarViewDidRequestPlaybackToggle(_ toolbarView: VoiceMessageToolbarView) {
if audioPlayer?.isPlaying ?? false {
audioPlayer?.pause()
if audioPlayer?.url != nil {
if audioPlayer?.isPlaying ?? false {
audioPlayer?.pause()
} else {
audioPlayer?.play()
}
} else {
audioPlayer?.loadContentFromURL(temporaryFileURL)
audioPlayer?.play()
}
}
@@ -210,9 +216,8 @@ public class VoiceMessageController: NSObject, VoiceMessageToolbarViewDelegate,
return
}
audioPlayer = mediaServiceProvider.audioPlayer()
audioPlayer = mediaServiceProvider.audioPlayerForIdentifier(UUID().uuidString)
audioPlayer?.registerDelegate(self)
audioPlayer?.loadContentFromURL(url)
audioSamples = []
@@ -368,18 +373,13 @@ public class VoiceMessageController: NSObject, VoiceMessageToolbarViewDelegate,
return
}
guard let url = audioPlayer.url else {
MXLog.error("Invalid audio player url.")
return
}
displayLink.isPaused = !audioPlayer.isPlaying
let requiredNumberOfSamples = _voiceMessageToolbarView.getRequiredNumberOfSamples()
if audioSamples.count != requiredNumberOfSamples && requiredNumberOfSamples > 0 {
padSamplesArrayToSize(requiredNumberOfSamples)
waveformAnalyser = WaveformAnalyzer(audioAssetURL: url)
waveformAnalyser = WaveformAnalyzer(audioAssetURL: temporaryFileURL)
waveformAnalyser?.samples(count: requiredNumberOfSamples, completionHandler: { [weak self] samples in
guard let samples = samples else {
MXLog.error("Could not sample audio recording.")
@@ -398,7 +398,7 @@ public class VoiceMessageController: NSObject, VoiceMessageToolbarViewDelegate,
details.elapsedTime = VoiceMessageController.timeFormatter.string(from: Date(timeIntervalSinceReferenceDate: (audioPlayer.isPlaying ? audioPlayer.currentTime : audioPlayer.duration)))
details.audioSamples = audioSamples
details.isPlaying = audioPlayer.isPlaying
details.progress = (audioPlayer.duration > 0.0 ? audioPlayer.currentTime / audioPlayer.duration : 0.0)
details.progress = (audioPlayer.isPlaying ? (audioPlayer.duration > 0.0 ? audioPlayer.currentTime / audioPlayer.duration : 0.0) : 0.0)
_voiceMessageToolbarView.configureWithDetails(details)
}
@@ -18,20 +18,27 @@ import Foundation
@objc public class VoiceMessageMediaServiceProvider: NSObject, VoiceMessageAudioPlayerDelegate, VoiceMessageAudioRecorderDelegate {
private let audioPlayers: NSHashTable<VoiceMessageAudioPlayer>
private let audioPlayers: NSMapTable<NSString, VoiceMessageAudioPlayer>
private let audioRecorders: NSHashTable<VoiceMessageAudioRecorder>
// Retain currently playing audio player so it doesn't stop playing on timeline cell reusage
private var currentlyPlayingAudioPlayer: VoiceMessageAudioPlayer?
@objc public static let sharedProvider = VoiceMessageMediaServiceProvider()
private override init() {
audioPlayers = NSHashTable<VoiceMessageAudioPlayer>(options: .weakMemory)
audioPlayers = NSMapTable<NSString, VoiceMessageAudioPlayer>(valueOptions: .weakMemory)
audioRecorders = NSHashTable<VoiceMessageAudioRecorder>(options: .weakMemory)
}
@objc func audioPlayer() -> VoiceMessageAudioPlayer {
@objc func audioPlayerForIdentifier(_ identifier: String) -> VoiceMessageAudioPlayer {
if let audioPlayer = audioPlayers.object(forKey: identifier as NSString) {
return audioPlayer
}
let audioPlayer = VoiceMessageAudioPlayer()
audioPlayer.registerDelegate(self)
audioPlayers.add(audioPlayer)
audioPlayers.setObject(audioPlayer, forKey: identifier as NSString)
return audioPlayer
}
@@ -49,9 +56,16 @@ import Foundation
// MARK: - VoiceMessageAudioPlayerDelegate
func audioPlayerDidStartPlaying(_ audioPlayer: VoiceMessageAudioPlayer) {
currentlyPlayingAudioPlayer = audioPlayer
stopAllServicesExcept(audioPlayer)
}
func audioPlayerDidStopPlaying(_ audioPlayer: VoiceMessageAudioPlayer) {
if currentlyPlayingAudioPlayer == audioPlayer {
currentlyPlayingAudioPlayer = nil
}
}
// MARK: - VoiceMessageAudioRecorderDelegate
func audioRecorderDidStartRecording(_ audioRecorder: VoiceMessageAudioRecorder) {
@@ -61,14 +75,6 @@ import Foundation
// MARK: - Private
private func stopAllServicesExcept(_ service: AnyObject?) {
for audioPlayer in audioPlayers.allObjects {
if audioPlayer === service {
continue
}
audioPlayer.pause()
}
for audioRecoder in audioRecorders.allObjects {
if audioRecoder === service {
continue
@@ -76,5 +82,18 @@ import Foundation
audioRecoder.stopRecording()
}
guard let audioPlayersEnumerator = audioPlayers.objectEnumerator() else {
return
}
for case let audioPlayer as VoiceMessageAudioPlayer in audioPlayersEnumerator {
if audioPlayer === service {
continue
}
audioPlayer.stop()
audioPlayer.unloadContent()
}
}
}
@@ -36,9 +36,10 @@ class VoiceMessagePlaybackController: VoiceMessageAudioPlayerDelegate, VoiceMess
return dateFormatter
}()
private let mediaServiceProvider: VoiceMessageMediaServiceProvider
private let cacheManager: VoiceMessageAttachmentCacheManager
private let audioPlayer: VoiceMessageAudioPlayer
private var audioPlayer: VoiceMessageAudioPlayer?
private var displayLink: CADisplayLink!
private var samples: [Float] = []
private var duration: TimeInterval = 0
@@ -56,12 +57,10 @@ class VoiceMessagePlaybackController: VoiceMessageAudioPlayerDelegate, VoiceMess
init(mediaServiceProvider: VoiceMessageMediaServiceProvider,
cacheManager: VoiceMessageAttachmentCacheManager) {
self.mediaServiceProvider = mediaServiceProvider
self.cacheManager = cacheManager
playbackView = VoiceMessagePlaybackView.loadFromNib()
audioPlayer = mediaServiceProvider.audioPlayer()
audioPlayer.registerDelegate(self)
playbackView.delegate = self
displayLink = CADisplayLink(target: WeakTarget(self, selector: #selector(handleDisplayLinkTick)), selector: WeakTarget.triggerSelector)
@@ -82,9 +81,18 @@ class VoiceMessagePlaybackController: VoiceMessageAudioPlayerDelegate, VoiceMess
// MARK: - VoiceMessagePlaybackViewDelegate
func voiceMessagePlaybackViewDidRequestPlaybackToggle() {
if audioPlayer.isPlaying {
audioPlayer.pause()
} else {
guard let audioPlayer = audioPlayer else {
return
}
if audioPlayer.url != nil {
if audioPlayer.isPlaying {
audioPlayer.pause()
} else {
audioPlayer.play()
}
} else if let url = urlToLoad {
audioPlayer.loadContentFromURL(url)
audioPlayer.play()
}
}
@@ -135,8 +143,10 @@ class VoiceMessagePlaybackController: VoiceMessageAudioPlayerDelegate, VoiceMess
details.currentTime = VoiceMessagePlaybackController.timeFormatter.string(from: Date(timeIntervalSinceReferenceDate: self.duration))
details.progress = 0.0
default:
details.currentTime = VoiceMessagePlaybackController.timeFormatter.string(from: Date(timeIntervalSinceReferenceDate: audioPlayer.currentTime))
details.progress = (audioPlayer.duration > 0.0 ? audioPlayer.currentTime / audioPlayer.duration : 0.0)
if let audioPlayer = audioPlayer {
details.currentTime = VoiceMessagePlaybackController.timeFormatter.string(from: Date(timeIntervalSinceReferenceDate: audioPlayer.currentTime))
details.progress = (audioPlayer.duration > 0.0 ? audioPlayer.currentTime / audioPlayer.duration : 0.0)
}
}
details.loading = self.loading
@@ -155,19 +165,37 @@ class VoiceMessagePlaybackController: VoiceMessageAudioPlayerDelegate, VoiceMess
let requiredNumberOfSamples = playbackView.getRequiredNumberOfSamples()
cacheManager.loadAttachment(attachment, numberOfSamples: requiredNumberOfSamples) { result in
cacheManager.loadAttachment(attachment, numberOfSamples: requiredNumberOfSamples) { [weak self] result in
guard let self = self else {
return
}
switch result {
case .success(let result):
guard result.0 == attachment.eventId else {
guard result.eventIdentifier == attachment.eventId else {
return
}
self.loading = false
self.audioPlayer.loadContentFromURL(result.1)
self.duration = result.2
self.samples = result.3
// Avoid listening to old audio player delegates if the attachment for this playbackController/cell changes
self.audioPlayer?.deregisterDelegate(self)
self.updateUI()
self.audioPlayer = self.mediaServiceProvider.audioPlayerForIdentifier(result.eventIdentifier)
self.audioPlayer?.registerDelegate(self)
self.loading = false
self.urlToLoad = result.url
self.duration = result.duration
self.samples = result.samples
if let audioPlayer = self.audioPlayer {
if audioPlayer.isPlaying {
self.state = .playing
} else if audioPlayer.currentTime > 0 {
self.state = .paused
} else {
self.state = .stopped
}
}
case .failure:
self.state = .error
}