Merge branch 'spaces' into gil/4498_Handle_space_link

This commit is contained in:
Gil Eluard
2021-09-23 15:26:29 +02:00
46 changed files with 1850 additions and 1780 deletions
+3 -2
View File
@@ -1015,6 +1015,7 @@ const NSTimeInterval kResizeComposerAnimationDuration = .05;
[self refreshRoomInputToolbar];
[VoiceMessageMediaServiceProvider.sharedProvider setCurrentRoomSummary:dataSource.room.summary];
_voiceMessageController.roomId = dataSource.roomId;
}
- (void)onRoomDataSourceReady
@@ -5657,8 +5658,8 @@ const NSTimeInterval kResizeComposerAnimationDuration = .05;
}];
// Show the explanation dialog
alert = [UIAlertController alertControllerWithTitle:NSLocalizedStringFromTable(@"rerequest_keys_alert_title", @"Vector", nil)
message:NSLocalizedStringFromTable(@"rerequest_keys_alert_message", @"Vector", nil)
alert = [UIAlertController alertControllerWithTitle:VectorL10n.rerequestKeysAlertTitle
message:[VectorL10n e2eRoomKeyRequestMessage:AppInfo.current.displayName]
preferredStyle:UIAlertControllerStyleAlert];
currentAlert = alert;
@@ -121,8 +121,8 @@ class VoiceMessageAudioPlayer: NSObject {
audioPlayer?.seek(to: .zero)
}
func seekToTime(_ time: TimeInterval) {
audioPlayer?.seek(to: CMTime(seconds: time, preferredTimescale: 60000))
func seekToTime(_ time: TimeInterval, completionHandler:@escaping (Bool) -> Void = { _ in }) {
audioPlayer?.seek(to: CMTime(seconds: time, preferredTimescale: 60000), completionHandler: completionHandler)
}
func registerDelegate(_ delegate: VoiceMessageAudioPlayerDelegate) {
@@ -1,4 +1,4 @@
//
//
// Copyright 2021 New Vector Ltd
//
// Licensed under the Apache License, Version 2.0 (the "License");
@@ -29,14 +29,12 @@ public class VoiceMessageController: NSObject, VoiceMessageToolbarViewDelegate,
static let maximumAudioRecordingDuration: TimeInterval = 120.0
static let maximumAudioRecordingLengthReachedThreshold: TimeInterval = 10.0
static let elapsedTimeFormat = "m:ss"
static let fileNameFormat = "'Voice message - 'MM.dd.yyyy HH.mm.ss"
static let fileNameDateFormat = "MM.dd.yyyy HH.mm.ss"
static let minimumRecordingDuration = 1.0
}
private let themeService: ThemeService
private let mediaServiceProvider: VoiceMessageMediaServiceProvider
private var temporaryFileURL: URL!
private let _voiceMessageToolbarView: VoiceMessageToolbarView
private var displayLink: CADisplayLink!
@@ -48,6 +46,7 @@ public class VoiceMessageController: NSObject, VoiceMessageToolbarViewDelegate,
private var audioSamples: [Float] = []
private var isInLockedMode: Bool = false
private var notifiedRemainingTime = false
private var recordDuration: TimeInterval?
private static let elapsedTimeFormatter: DateFormatter = {
let dateFormatter = DateFormatter()
@@ -55,11 +54,14 @@ public class VoiceMessageController: NSObject, VoiceMessageToolbarViewDelegate,
return dateFormatter
}()
private static let fileNameDateFormatter: DateFormatter = {
let dateFormatter = DateFormatter()
dateFormatter.dateFormat = Constants.fileNameFormat
return dateFormatter
}()
private var temporaryFileURL: URL? {
guard let roomId = roomId else {
return nil
}
let temporaryFileName = "Voice message-\(roomId)"
let temporaryDirectoryURL = URL(fileURLWithPath: NSTemporaryDirectory(), isDirectory: true)
return temporaryDirectoryURL.appendingPathComponent(temporaryFileName).appendingPathExtension("m4a")
}
@objc public weak var delegate: VoiceMessageControllerDelegate?
@@ -71,10 +73,15 @@ public class VoiceMessageController: NSObject, VoiceMessageToolbarViewDelegate,
return _voiceMessageToolbarView
}
@objc public var roomId: String? {
didSet {
checkForRecording()
}
}
@objc public init(themeService: ThemeService, mediaServiceProvider: VoiceMessageMediaServiceProvider) {
self.themeService = themeService
self.mediaServiceProvider = mediaServiceProvider
_voiceMessageToolbarView = VoiceMessageToolbarView.loadFromNib()
super.init()
@@ -96,11 +103,13 @@ public class VoiceMessageController: NSObject, VoiceMessageToolbarViewDelegate,
// MARK: - VoiceMessageToolbarViewDelegate
func voiceMessageToolbarViewDidRequestRecordingStart(_ toolbarView: VoiceMessageToolbarView) {
guard let temporaryFileURL = temporaryFileURL else {
return
}
guard AVAudioSession.sharedInstance().recordPermission == .granted else {
delegate?.voiceMessageControllerDidRequestMicrophonePermission(self)
return
}
// Haptic are not played during record on iOS by default. This fix works
// only since iOS 13. A workaround for iOS 12 and earlier would be to
// dispatch after at least 100ms recordWithOutputURL call
@@ -113,11 +122,6 @@ public class VoiceMessageController: NSObject, VoiceMessageToolbarViewDelegate,
audioRecorder = mediaServiceProvider.audioRecorder()
audioRecorder?.registerDelegate(self)
let temporaryDirectoryURL = URL(fileURLWithPath: NSTemporaryDirectory(), isDirectory: true)
let fileName = VoiceMessageController.fileNameDateFormatter.string(from: Date())
temporaryFileURL = temporaryDirectoryURL.appendingPathComponent(fileName).appendingPathExtension("m4a")
audioRecorder?.recordWithOutputURL(temporaryFileURL)
}
@@ -140,7 +144,8 @@ public class VoiceMessageController: NSObject, VoiceMessageToolbarViewDelegate,
}
func voiceMessageToolbarViewDidRequestPlaybackToggle(_ toolbarView: VoiceMessageToolbarView) {
guard let audioPlayer = audioPlayer else {
guard let audioPlayer = audioPlayer,
let temporaryFileURL = temporaryFileURL else {
return
}
@@ -156,7 +161,26 @@ public class VoiceMessageController: NSObject, VoiceMessageToolbarViewDelegate,
}
}
func voiceMessageToolbarViewDidRequestSeek(to progress: CGFloat) {
guard let audioPlayer = audioPlayer,
let temporaryFileURL = temporaryFileURL,
let duration = recordDuration else {
return
}
if audioPlayer.url == nil {
audioPlayer.loadContentFromURL(temporaryFileURL)
}
audioPlayer.seekToTime(duration * Double(progress)) { [weak self] _ in
self?.updateUI()
}
}
func voiceMessageToolbarViewDidRequestSend(_ toolbarView: VoiceMessageToolbarView) {
guard let temporaryFileURL = temporaryFileURL else {
return
}
audioPlayer?.stop()
audioRecorder?.stopRecording()
@@ -199,8 +223,9 @@ public class VoiceMessageController: NSObject, VoiceMessageToolbarViewDelegate,
}
func audioPlayerDidFinishPlaying(_ audioPlayer: VoiceMessageAudioPlayer) {
audioPlayer.seekToTime(0.0)
updateUI()
audioPlayer.seekToTime(0.0) { [weak self] _ in
self?.updateUI()
}
}
func audioPlayer(_ audioPlayer: VoiceMessageAudioPlayer, didFailWithError: Error) {
@@ -211,8 +236,24 @@ public class VoiceMessageController: NSObject, VoiceMessageToolbarViewDelegate,
// MARK: - Private
private func checkForRecording() {
guard let temporaryFileURL = temporaryFileURL else {
return
}
if FileManager.default.fileExists(atPath: temporaryFileURL.path) {
isInLockedMode = true
loadDraftRecording()
}
updateUI()
}
private func finishRecording() {
guard let temporaryFileURL = temporaryFileURL else {
return
}
let recordDuration = audioRecorder?.currentTime
self.recordDuration = recordDuration
audioRecorder?.stopRecording()
guard isInLockedMode else {
@@ -222,13 +263,21 @@ public class VoiceMessageController: NSObject, VoiceMessageToolbarViewDelegate,
return
}
audioPlayer = mediaServiceProvider.audioPlayerForIdentifier(UUID().uuidString)
loadDraftRecording()
updateUI()
}
private func loadDraftRecording() {
guard let temporaryFileURL = temporaryFileURL,
let roomId = roomId else {
return
}
audioPlayer = mediaServiceProvider.audioPlayerForIdentifier(roomId)
audioPlayer?.registerDelegate(self)
audioPlayer?.loadContentFromURL(temporaryFileURL)
audioSamples = []
updateUI()
}
private func sendRecordingAtURL(_ sourceURL: URL) {
@@ -380,7 +429,8 @@ public class VoiceMessageController: NSObject, VoiceMessageToolbarViewDelegate,
}
private func updateUIFromAudioPlayer() {
guard let audioPlayer = audioPlayer else {
guard let audioPlayer = audioPlayer,
let temporaryFileURL = temporaryFileURL else {
return
}
@@ -404,12 +454,25 @@ public class VoiceMessageController: NSObject, VoiceMessageToolbarViewDelegate,
})
}
let duration: TimeInterval
if let recordDuration = recordDuration {
duration = recordDuration
} else {
let asset = AVURLAsset(url: temporaryFileURL)
duration = asset.duration.seconds
recordDuration = duration
}
var details = VoiceMessageToolbarViewDetails()
details.state = (audioRecorder?.isRecording ?? false ? (isInLockedMode ? .lockedModeRecord : .record) : (isInLockedMode ? .lockedModePlayback : .idle))
details.elapsedTime = VoiceMessageController.elapsedTimeFormatter.string(from: Date(timeIntervalSinceReferenceDate: (audioPlayer.isPlaying ? audioPlayer.currentTime : audioPlayer.duration)))
// Show the current time if the player is paused, show duration when at 0.
let currentTime = audioPlayer.currentTime
let displayTime = currentTime > 0 ? currentTime : duration
details.elapsedTime = VoiceMessageController.elapsedTimeFormatter.string(from: Date(timeIntervalSinceReferenceDate: displayTime))
details.progress = duration > 0 ? currentTime / duration : 0
details.audioSamples = audioSamples
details.isPlaying = audioPlayer.isPlaying
details.progress = (audioPlayer.isPlaying ? (audioPlayer.duration > 0.0 ? audioPlayer.currentTime / audioPlayer.duration : 0.0) : 0.0)
_voiceMessageToolbarView.configureWithDetails(details)
}
@@ -97,6 +97,22 @@ class VoiceMessagePlaybackController: VoiceMessageAudioPlayerDelegate, VoiceMess
}
}
func voiceMessagePlaybackViewDidRequestSeek(to progress: CGFloat) {
guard let audioPlayer = audioPlayer else {
return
}
if audioPlayer.url == nil,
let url = urlToLoad {
audioPlayer.loadContentFromURL(url, displayName: attachment?.originalFileName)
}
audioPlayer.seekToTime(self.duration * Double(progress)) { [weak self] _ in
guard let self = self else { return }
self.updateUI()
}
}
func voiceMessagePlaybackViewDidChangeWidth() {
loadAttachmentData()
}
@@ -125,8 +141,10 @@ class VoiceMessagePlaybackController: VoiceMessageAudioPlayerDelegate, VoiceMess
}
func audioPlayerDidFinishPlaying(_ audioPlayer: VoiceMessageAudioPlayer) {
audioPlayer.seekToTime(0.0)
state = .stopped
audioPlayer.seekToTime(0.0) { [weak self] _ in
guard let self = self else { return }
self.state = .stopped
}
}
// MARK: - Private
@@ -141,20 +159,13 @@ class VoiceMessagePlaybackController: VoiceMessageAudioPlayerDelegate, VoiceMess
details.playbackEnabled = (state != .error)
details.playing = (state == .playing)
details.samples = samples
switch state {
case .stopped:
details.currentTime = VoiceMessagePlaybackController.timeFormatter.string(from: Date(timeIntervalSinceReferenceDate: self.duration))
details.progress = 0.0
default:
if let audioPlayer = audioPlayer {
details.currentTime = VoiceMessagePlaybackController.timeFormatter.string(from: Date(timeIntervalSinceReferenceDate: audioPlayer.currentTime))
details.progress = (audioPlayer.duration > 0.0 ? audioPlayer.currentTime / audioPlayer.duration : 0.0)
}
}
// Show the current time if the player is paused, show duration when at 0.
let duration = self.duration
let currentTime = audioPlayer?.currentTime ?? 0
let displayTime = currentTime > 0 ? currentTime : duration
details.currentTime = VoiceMessagePlaybackController.timeFormatter.string(from: Date(timeIntervalSinceReferenceDate: displayTime))
details.progress = duration > 0 ? currentTime / duration : 0
details.loading = self.loading
playbackView.configureWithDetails(details)
}
@@ -16,9 +16,12 @@
import Foundation
import Reusable
import UIKit
import MatrixSDK
protocol VoiceMessagePlaybackViewDelegate: AnyObject {
func voiceMessagePlaybackViewDidRequestPlaybackToggle()
func voiceMessagePlaybackViewDidRequestSeek(to progress: CGFloat)
func voiceMessagePlaybackViewDidChangeWidth()
}
@@ -40,6 +43,7 @@ class VoiceMessagePlaybackView: UIView, NibLoadable, Themable {
private var _waveformView: VoiceMessageWaveformView!
private var currentTheme: Theme?
private var scrubProgress: CGFloat?
@IBOutlet private var backgroundView: UIView!
@IBOutlet private var recordingIcon: UIView!
@@ -138,4 +142,22 @@ class VoiceMessagePlaybackView: UIView, NibLoadable, Themable {
@IBAction private func onPlayButtonTap() {
delegate?.voiceMessagePlaybackViewDidRequestPlaybackToggle()
}
@IBAction private func tap(gestureRecognizer: UITapGestureRecognizer) {
let x = gestureRecognizer.location(in: waveformContainerView).x.clamped(to: 0...waveformContainerView.bounds.width)
let progress = x / waveformContainerView.bounds.width
delegate?.voiceMessagePlaybackViewDidRequestSeek(to: progress)
}
@IBAction private func pan(gestureRecognizer: UIPanGestureRecognizer) {
switch gestureRecognizer.state {
case .began, .changed:
let x = gestureRecognizer.location(in: waveformContainerView).x.clamped(to: 0...waveformContainerView.bounds.width)
let progress = x / waveformContainerView.bounds.width
scrubProgress = progress
delegate?.voiceMessagePlaybackViewDidRequestSeek(to: progress)
default:
scrubProgress = nil
}
}
}
@@ -50,6 +50,11 @@
<view contentMode="scaleToFill" translatesAutoresizingMaskIntoConstraints="NO" id="7Fl-yZ-dZB">
<rect key="frame" x="94" y="7" width="317" height="30"/>
<color key="backgroundColor" white="0.0" alpha="0.0" colorSpace="custom" customColorSpace="genericGamma22GrayColorSpace"/>
<gestureRecognizers/>
<connections>
<outletCollection property="gestureRecognizers" destination="TvB-7x-51j" appends="YES" id="2ac-M3-OKn"/>
<outletCollection property="gestureRecognizers" destination="Fwa-Eg-VFh" appends="YES" id="meS-g1-yIc"/>
</connections>
</view>
</subviews>
<constraints>
@@ -81,9 +86,19 @@
</connections>
<point key="canvasLocation" x="-1742.753623188406" y="-299.33035714285711"/>
</view>
<panGestureRecognizer minimumNumberOfTouches="1" id="TvB-7x-51j">
<connections>
<action selector="panWithGestureRecognizer:" destination="cGR-49-HWB" id="mcT-Wy-ePL"/>
</connections>
</panGestureRecognizer>
<tapGestureRecognizer id="Fwa-Eg-VFh">
<connections>
<action selector="tapWithGestureRecognizer:" destination="cGR-49-HWB" id="VMd-gQ-DPy"/>
</connections>
</tapGestureRecognizer>
</objects>
<resources>
<image name="voice_message_play_button" width="12.5" height="15"/>
<image name="voice_message_play_button" width="15.5" height="15"/>
<image name="voice_message_record_icon" width="10" height="10"/>
</resources>
</document>
@@ -23,6 +23,7 @@ protocol VoiceMessageToolbarViewDelegate: AnyObject {
func voiceMessageToolbarViewDidRequestRecordingFinish(_ toolbarView: VoiceMessageToolbarView)
func voiceMessageToolbarViewDidRequestLockedModeRecording(_ toolbarView: VoiceMessageToolbarView)
func voiceMessageToolbarViewDidRequestPlaybackToggle(_ toolbarView: VoiceMessageToolbarView)
func voiceMessageToolbarViewDidRequestSeek(to progress: CGFloat)
func voiceMessageToolbarViewDidRequestSend(_ toolbarView: VoiceMessageToolbarView)
}
@@ -93,6 +94,7 @@ class VoiceMessageToolbarView: PassthroughView, NibLoadable, Themable, UIGesture
private var lockChevronToRecordButtonDistance: CGFloat = 0.0
private var lockChevronToLockButtonDistance: CGFloat = 0.0
private var panDirection: UISwipeGestureRecognizer.Direction?
private var tapGesture: UITapGestureRecognizer!
private var details: VoiceMessageToolbarViewDetails?
@@ -126,6 +128,8 @@ class VoiceMessageToolbarView: PassthroughView, NibLoadable, Themable, UIGesture
let tapGesture = UITapGestureRecognizer(target: self, action: #selector(handleWaveformTap))
playbackView.waveformView.addGestureRecognizer(tapGesture)
tapGesture.delegate = self
self.tapGesture = tapGesture
updateUIWithDetails(VoiceMessageToolbarViewDetails(), animated: false)
}
@@ -183,11 +187,22 @@ class VoiceMessageToolbarView: PassthroughView, NibLoadable, Themable, UIGesture
return true
}
override func gestureRecognizerShouldBegin(_ gestureRecognizer: UIGestureRecognizer) -> Bool {
if gestureRecognizer == self.tapGesture, self.lastUIState != .lockedModeRecord {
return false
}
return true
}
// MARK: - VoiceMessagePlaybackViewDelegate
func voiceMessagePlaybackViewDidRequestPlaybackToggle() {
delegate?.voiceMessageToolbarViewDidRequestPlaybackToggle(self)
}
func voiceMessagePlaybackViewDidRequestSeek(to progress: CGFloat) {
delegate?.voiceMessageToolbarViewDidRequestSeek(to: progress)
}
func voiceMessagePlaybackViewDidChangeWidth() {