#4090 - Improved performances

This commit is contained in:
Gil Eluard
2021-07-02 07:38:56 +02:00
parent 12dc7d0b66
commit e0aafe4402
6 changed files with 146 additions and 81 deletions
@@ -31,9 +31,9 @@ enum VoiceMessageAttachmentCacheManagerError: Error {
Swift optimizes the callbacks to be the same instance. Wrap them so we can store them in an array.
*/
private class CompletionWrapper {
let completion: (Result<(URL, [Float]), Error>) -> Void
let completion: (Result<(URL, TimeInterval, [Float]), Error>) -> Void
init(_ completion: @escaping (Result<(URL, [Float]), Error>) -> Void) {
init(_ completion: @escaping (Result<(URL, TimeInterval, [Float]), Error>) -> Void) {
self.completion = completion
}
}
@@ -46,13 +46,14 @@ class VoiceMessageAttachmentCacheManager {
private var completionCallbacks = [String: [CompletionWrapper]]()
private var samples = [String: [Int: [Float]]]()
private var durations = [String: TimeInterval]()
private var finalURLs = [String: URL]()
private init() {
workQueue = DispatchQueue(label: "io.element.VoiceMessageAttachmentCacheManager.queue", qos: .userInitiated)
}
func loadAttachment(_ attachment: MXKAttachment, numberOfSamples: Int, completion: @escaping (Result<(URL, [Float]), Error>) -> Void) {
func loadAttachment(_ attachment: MXKAttachment, numberOfSamples: Int, completion: @escaping (Result<(URL, TimeInterval, [Float]), Error>) -> Void) {
guard attachment.type == MXKAttachmentTypeVoiceMessage else {
completion(Result.failure(VoiceMessageAttachmentCacheManagerError.invalidAttachmentType))
return
@@ -68,8 +69,8 @@ class VoiceMessageAttachmentCacheManager {
return
}
if let finalURL = finalURLs[identifier], let samples = samples[identifier]?[numberOfSamples] {
completion(Result.success((finalURL, samples)))
if let finalURL = finalURLs[identifier], let duration = durations[identifier], let samples = samples[identifier]?[numberOfSamples] {
completion(Result.success((finalURL, duration, samples)))
return
}
@@ -78,7 +79,8 @@ class VoiceMessageAttachmentCacheManager {
// }
}
private func enqueueLoadAttachment(_ attachment: MXKAttachment, identifier: String, numberOfSamples: Int, completion: @escaping (Result<(URL, [Float]), Error>) -> Void) {
private func enqueueLoadAttachment(_ attachment: MXKAttachment, identifier: String, numberOfSamples: Int, completion: @escaping (Result<(URL, Double, [Float]), Error>) -> Void) {
if var callbacks = completionCallbacks[identifier] {
callbacks.append(CompletionWrapper(completion))
completionCallbacks[identifier] = callbacks
@@ -87,7 +89,7 @@ class VoiceMessageAttachmentCacheManager {
completionCallbacks[identifier] = [CompletionWrapper(completion)]
}
func sampleFileAtURL(_ url: URL) {
func sampleFileAtURL(_ url: URL, duration: TimeInterval) {
let analyser = WaveformAnalyzer(audioAssetURL: url)
analyser?.samples(count: numberOfSamples, completionHandler: { samples in
// Dispatch back from the WaveformAnalyzer's internal queue
@@ -103,13 +105,13 @@ class VoiceMessageAttachmentCacheManager {
self.samples[identifier] = [numberOfSamples: samples]
}
self.invokeSuccessCallbacksForIdentifier(identifier, url: url, samples: samples)
self.invokeSuccessCallbacksForIdentifier(identifier, url: url, duration: duration, samples: samples)
}
})
}
if let finalURL = finalURLs[identifier] {
sampleFileAtURL(finalURL)
if let finalURL = finalURLs[identifier], let duration = durations[identifier] {
sampleFileAtURL(finalURL, duration: duration)
return
}
@@ -125,10 +127,21 @@ class VoiceMessageAttachmentCacheManager {
switch result {
case .success:
self.finalURLs[identifier] = newURL
sampleFileAtURL(newURL)
VoiceMessageAudioConverter.mediaDurationAt(newURL) { result in
switch result {
case .success:
if let duration = try? result.get() {
sampleFileAtURL(newURL, duration: duration)
} else {
MXLog.error("[VoiceMessageAttachmentCacheManager] enqueueLoadAttachment: Failed to retrieve media duration")
}
case .failure(let error):
MXLog.error("[VoiceMessageAttachmentCacheManager] enqueueLoadAttachment: failed getting audio duration with: \(error)")
}
}
case .failure(let error):
self.invokeFailureCallbacksForIdentifier(identifier, error: VoiceMessageAttachmentCacheManagerError.conversionError(error))
MXLog.error("Failed failed decoding audio message with: \(error)")
MXLog.error("[VoiceMessageAttachmentCacheManager] enqueueLoadAttachment: failed decoding audio message with: \(error)")
}
}
}
@@ -156,7 +169,7 @@ class VoiceMessageAttachmentCacheManager {
}
}
private func invokeSuccessCallbacksForIdentifier(_ identifier: String, url: URL, samples: [Float]) {
private func invokeSuccessCallbacksForIdentifier(_ identifier: String, url: URL, duration: TimeInterval, samples: [Float]) {
guard let callbacks = completionCallbacks[identifier] else {
return
}
@@ -164,7 +177,7 @@ class VoiceMessageAttachmentCacheManager {
let copy = callbacks.map { $0 }
DispatchQueue.main.async {
for wrapper in copy {
wrapper.completion(Result.success((url, samples)))
wrapper.completion(Result.success((url, duration, samples)))
}
}
@@ -29,10 +29,22 @@ struct VoiceMessageAudioConverter {
}
static func convertToMPEG4AAC(sourceURL: URL, destinationURL: URL, completion: @escaping (Result<Void, VoiceMessageAudioConverterError>) -> Void) {
let command = "-hide_banner -y -i \"\(sourceURL.path)\" -c:a aac_at \"\(destinationURL.path)\""
let command = "-hide_banner -y -i \"\(sourceURL.path)\" -c:a aac_at -b:a 192k \"\(destinationURL.path)\""
executeCommand(command, completion: completion)
}
static func mediaDurationAt(_ sourceURL: URL, completion: @escaping (Result<TimeInterval, VoiceMessageAudioConverterError>) -> Void) {
DispatchQueue.global(qos: .userInteractive).async {
let mediaInfoSession = FFprobeKit.getMediaInformation(sourceURL.path)
let mediaInfo = mediaInfoSession?.getMediaInformation()
if let duration = try? TimeInterval(value: mediaInfo?.getDuration() ?? "0") {
completion(.success(duration))
} else {
completion(.failure(.generic("Failed to get media duration")))
}
}
}
static private func executeCommand(_ command: String, completion: @escaping (Result<Void, VoiceMessageAudioConverterError>) -> Void) {
FFmpegKitConfig.setLogLevel(0)
@@ -41,6 +41,9 @@ class VoiceMessagePlaybackController: VoiceMessageAudioPlayerDelegate, VoiceMess
private let audioPlayer: VoiceMessageAudioPlayer
private var displayLink: CADisplayLink!
private var samples: [Float] = []
private var duration: TimeInterval = 0
private var urlToLoad: URL?
private var loading: Bool = false
private var state: VoiceMessagePlaybackControllerState = .stopped {
didSet {
@@ -82,6 +85,10 @@ class VoiceMessagePlaybackController: VoiceMessageAudioPlayerDelegate, VoiceMess
if audioPlayer.isPlaying {
audioPlayer.pause()
} else {
if let urlToLoad = urlToLoad {
audioPlayer.loadContentFromURL(urlToLoad)
}
urlToLoad = nil
audioPlayer.play()
}
}
@@ -129,12 +136,13 @@ class VoiceMessagePlaybackController: VoiceMessageAudioPlayerDelegate, VoiceMess
switch state {
case .stopped:
details.currentTime = VoiceMessagePlaybackController.timeFormatter.string(from: Date(timeIntervalSinceReferenceDate: audioPlayer.duration))
details.currentTime = VoiceMessagePlaybackController.timeFormatter.string(from: Date(timeIntervalSinceReferenceDate: self.duration))
details.progress = 0.0
default:
details.currentTime = VoiceMessagePlaybackController.timeFormatter.string(from: Date(timeIntervalSinceReferenceDate: audioPlayer.currentTime))
details.progress = (audioPlayer.duration > 0.0 ? audioPlayer.currentTime / audioPlayer.duration : 0.0)
}
details.loading = self.loading
playbackView.configureWithDetails(details)
}
@@ -144,13 +152,23 @@ class VoiceMessagePlaybackController: VoiceMessageAudioPlayerDelegate, VoiceMess
return
}
self.loading = true
updateUI()
// TODO: manage a unique instance of audio player.
if audioPlayer.isPlaying || audioPlayer.currentTime > 0 {
audioPlayer.stop()
}
let requiredNumberOfSamples = playbackView.getRequiredNumberOfSamples()
cacheManager.loadAttachment(attachment, numberOfSamples: requiredNumberOfSamples) { result in
switch result {
case .success(let result):
self.audioPlayer.loadContentFromURL(result.0)
self.samples = result.1
self.loading = false
self.urlToLoad = result.0
self.duration = result.1
self.samples = result.2
self.updateUI()
case .failure:
self.state = .error
@@ -28,6 +28,7 @@ struct VoiceMessagePlaybackViewDetails {
var playing: Bool = false
var playbackEnabled = false
var recording: Bool = false
var loading: Bool = false
}
class VoiceMessagePlaybackView: UIView, NibLoadable, Themable {
@@ -83,10 +84,17 @@ class VoiceMessagePlaybackView: UIView, NibLoadable, Themable {
}
}
elapsedTimeLabel.text = details.currentTime
_waveformView.progress = details.progress
_waveformView.setSamples(details.samples)
if details.loading {
elapsedTimeLabel.text = "--:--"
_waveformView.progress = 0
_waveformView.samples = []
_waveformView.alpha = 0.3
} else {
elapsedTimeLabel.text = details.currentTime
_waveformView.progress = details.progress
_waveformView.samples = details.samples
_waveformView.alpha = 1
}
self.details = details
@@ -20,26 +20,48 @@ class VoiceMessageWaveformView: UIView {
private let lineWidth: CGFloat = 2.0
private let linePadding: CGFloat = 2.0
private let renderingQueue: DispatchQueue = DispatchQueue(label: "io.element.VoiceMessageWaveformView.queue", qos: .userInitiated)
private var samples: [Float] = []
private var barViews: [CALayer] = []
var primarylineColor = UIColor.lightGray
var secondaryLineColor = UIColor.darkGray
var samples: [Float] = [] {
didSet {
computeWaveForm()
}
}
var primarylineColor = UIColor.lightGray {
didSet {
backgroundLayer.strokeColor = primarylineColor.cgColor
backgroundLayer.fillColor = primarylineColor.cgColor
}
}
var secondaryLineColor = UIColor.darkGray {
didSet {
progressLayer.strokeColor = secondaryLineColor.cgColor
progressLayer.fillColor = secondaryLineColor.cgColor
}
}
private let backgroundLayer = CAShapeLayer()
private let progressLayer = CAShapeLayer()
var progress = 0.0 {
didSet {
updateBarViews()
progressLayer.frame = CGRect(origin: self.bounds.origin, size: CGSize(width: self.bounds.width * CGFloat(self.progress), height: self.bounds.height))
}
}
var requiredNumberOfSamples: Int {
return barViews.count
return Int(self.bounds.size.width / (lineWidth + linePadding))
}
override init(frame: CGRect) {
super.init(frame: frame)
setupBarViews()
setupAndAdd(backgroundLayer, with: primarylineColor)
setupAndAdd(progressLayer, with: secondaryLineColor)
progressLayer.masksToBounds = true
computeWaveForm()
}
required init?(coder: NSCoder) {
@@ -48,61 +70,52 @@ class VoiceMessageWaveformView: UIView {
override func layoutSubviews() {
super.layoutSubviews()
setupBarViews()
backgroundLayer.frame = self.bounds
progressLayer.frame = CGRect(origin: self.bounds.origin, size: CGSize(width: self.bounds.width * CGFloat(self.progress), height: self.bounds.height))
computeWaveForm()
}
func setSamples(_ samples: [Float]) {
self.samples = samples
updateBarViews()
}
// MARK: - Private
private func setupBarViews() {
for layer in barViews {
layer.removeFromSuperlayer()
private func computeWaveForm() {
renderingQueue.async {
let path = UIBezierPath()
let drawMappingFactor = self.bounds.size.height
let minimumGraphAmplitude: CGFloat = 1
var xOffset: CGFloat = self.lineWidth / 2
var index = 0
while xOffset < self.bounds.width - self.lineWidth {
let sample = CGFloat(index >= self.samples.count ? 1 : self.samples[index])
let invertedDbSample = 1 - sample // sample is in dB, linearly normalized to [0, 1] (1 -> -50 dB)
let drawingAmplitude = max(minimumGraphAmplitude, invertedDbSample * drawMappingFactor)
path.move(to: CGPoint(x: xOffset, y: self.bounds.midY - drawingAmplitude / 2))
path.addLine(to: CGPoint(x: xOffset, y: self.bounds.midY + drawingAmplitude / 2))
xOffset += self.lineWidth + self.linePadding
index += 1
}
DispatchQueue.main.async {
self.backgroundLayer.path = path.cgPath
self.progressLayer.path = path.cgPath
}
}
var barViews: [CALayer] = []
var xOffset: CGFloat = lineWidth / 2
while xOffset < bounds.width - lineWidth {
let layer = CALayer()
layer.backgroundColor = primarylineColor.cgColor
layer.cornerRadius = lineWidth / 2
layer.masksToBounds = true
layer.anchorPoint = CGPoint(x: 0, y: 0.5)
layer.frame = CGRect(x: xOffset, y: bounds.midY - lineWidth / 2, width: lineWidth, height: lineWidth)
self.layer.addSublayer(layer)
barViews.append(layer)
xOffset += lineWidth + linePadding
}
self.barViews = barViews
updateBarViews()
}
private func updateBarViews() {
let drawMappingFactor = bounds.size.height
let minimumGraphAmplitude: CGFloat = lineWidth
let progressPosition = Int(floor(progress * Double(barViews.count)))
for (index, layer) in barViews.enumerated() {
let sample = CGFloat(index >= samples.count ? 1 : samples[index])
let invertedDbSample = 1 - sample // sample is in dB, linearly normalized to [0, 1] (1 -> -50 dB)
let drawingAmplitude = max(minimumGraphAmplitude, invertedDbSample * drawMappingFactor)
layer.frame.origin.y = bounds.midY - drawingAmplitude / 2
layer.frame.size.height = drawingAmplitude
layer.backgroundColor = (index < progressPosition ? secondaryLineColor.cgColor : primarylineColor.cgColor)
}
private func setupAndAdd(_ shapeLayer: CAShapeLayer, with color: UIColor) {
// shapeLayer.shouldRasterize = true
shapeLayer.drawsAsynchronously = true
shapeLayer.frame = self.bounds
shapeLayer.strokeColor = color.cgColor
shapeLayer.fillColor = color.cgColor
shapeLayer.lineCap = .round
shapeLayer.lineWidth = lineWidth
self.layer.addSublayer(shapeLayer)
}
}