Merge pull request #7146 from vector-im/phlpro/voice_broadcast_backward_forward

VoiceBroadcast: Add backward and forward buttons for playback
This commit is contained in:
Phl-Pro
2022-12-14 17:34:49 +01:00
committed by GitHub
11 changed files with 124 additions and 43 deletions
@@ -56,6 +56,8 @@ class VoiceBroadcastPlaybackViewModel: VoiceBroadcastPlaybackViewModelType, Voic
return (!isPlaybackInitialized || isPlayingLastChunk) && (state.broadcastState == .started || state.broadcastState == .resumed)
}
private static let defaultBackwardForwardValue: Float = 30000.0 // 30sec in ms
// MARK: Public
// MARK: - Setup
@@ -71,7 +73,7 @@ class VoiceBroadcastPlaybackViewModel: VoiceBroadcastPlaybackViewModelType, Voic
let viewState = VoiceBroadcastPlaybackViewState(details: details,
broadcastState: voiceBroadcastAggregator.voiceBroadcastState,
playbackState: .stopped,
playingState: VoiceBroadcastPlayingState(duration: Float(voiceBroadcastAggregator.voiceBroadcast.duration), isLive: false),
playingState: VoiceBroadcastPlayingState(duration: Float(voiceBroadcastAggregator.voiceBroadcast.duration), isLive: false, canMoveForward: false, canMoveBackward: false),
bindings: VoiceBroadcastPlaybackViewStateBindings(progress: 0))
super.init(initialViewState: viewState)
@@ -101,6 +103,10 @@ class VoiceBroadcastPlaybackViewModel: VoiceBroadcastPlaybackViewModelType, Voic
pause()
case .sliderChange(let didChange):
didSliderChanged(didChange)
case .backward:
backward()
case .forward:
forward()
}
}
@@ -164,6 +170,49 @@ class VoiceBroadcastPlaybackViewModel: VoiceBroadcastPlaybackViewModelType, Voic
audioPlayer?.stop()
}
/// Backward (30sec) a voice broadcast
private func backward() {
let newProgressValue = context.progress - VoiceBroadcastPlaybackViewModel.defaultBackwardForwardValue
seek(to: max(newProgressValue, 0.0))
}
/// Forward (30sec) a voice broadcast
private func forward() {
let newProgressValue = context.progress + VoiceBroadcastPlaybackViewModel.defaultBackwardForwardValue
seek(to: min(newProgressValue, state.playingState.duration))
}
private func seek(to seekTime: Float) {
// Flush the chunks queue and the current audio player playlist
voiceBroadcastChunkQueue = []
reloadVoiceBroadcastChunkQueue = isProcessingVoiceBroadcastChunk
audioPlayer?.removeAllPlayerItems()
let chunks = reorderVoiceBroadcastChunks(chunks: Array(voiceBroadcastAggregator.voiceBroadcast.chunks))
// Reinject the chunks we need and play them
let remainingTime = state.playingState.duration - seekTime
var chunksDuration: UInt = 0
for chunk in chunks.reversed() {
chunksDuration += chunk.duration
voiceBroadcastChunkQueue.append(chunk)
if Float(chunksDuration) >= remainingTime {
break
}
}
MXLog.debug("[VoiceBroadcastPlaybackViewModel] seekTo: restart to time: \(seekTime) milliseconds")
let time = seekTime - state.playingState.duration + Float(chunksDuration)
seekToChunkTime = TimeInterval(time / 1000)
// Check the condition to resume the playback when data will be ready (after the chunk process).
if state.playbackState != .stopped, isActuallyPaused == false {
state.playbackState = .buffering
}
processPendingVoiceBroadcastChunks()
state.bindings.progress = seekTime
updateUI()
}
// MARK: - Voice broadcast chunks playback
@@ -295,40 +344,11 @@ class VoiceBroadcastPlaybackViewModel: VoiceBroadcastPlaybackViewModelType, Voic
audioPlayer?.pause()
displayLink.isPaused = true
} else {
// Flush the chunks queue and the current audio player playlist
voiceBroadcastChunkQueue = []
reloadVoiceBroadcastChunkQueue = isProcessingVoiceBroadcastChunk
audioPlayer?.removeAllPlayerItems()
let chunks = reorderVoiceBroadcastChunks(chunks: Array(voiceBroadcastAggregator.voiceBroadcast.chunks))
// Reinject the chunks we need and play them
let remainingTime = state.playingState.duration - state.bindings.progress
var chunksDuration: UInt = 0
for chunk in chunks.reversed() {
chunksDuration += chunk.duration
voiceBroadcastChunkQueue.append(chunk)
if Float(chunksDuration) >= remainingTime {
break
}
}
MXLog.debug("[VoiceBroadcastPlaybackViewModel] didSliderChanged: restart to time: \(state.bindings.progress) milliseconds")
let time = state.bindings.progress - state.playingState.duration + Float(chunksDuration)
seekToChunkTime = TimeInterval(time / 1000)
// Check the condition to resume the playback when data will be ready (after the chunk process).
if state.playbackState != .stopped, isActuallyPaused == false {
state.playbackState = .buffering
}
processPendingVoiceBroadcastChunks()
seek(to: state.bindings.progress)
}
}
@objc private func handleDisplayLinkTick() {
updateUI()
}
private func updateUI() {
guard let playingEventId = voiceBroadcastAttachmentCacheManagerLoadResults.first(where: { result in
result.url == audioPlayer?.currentUrl
})?.eventIdentifier,
@@ -343,6 +363,13 @@ class VoiceBroadcastPlaybackViewModel: VoiceBroadcastPlaybackViewModelType, Voic
}.reduce(0) { $0 + $1.duration}) + (audioPlayer?.currentTime.rounded() ?? 0) * 1000
state.bindings.progress = Float(progress)
updateUI()
}
private func updateUI() {
state.playingState.canMoveBackward = state.bindings.progress > 0
state.playingState.canMoveForward = state.bindings.progress < state.playingState.duration
}
private func handleVoiceBroadcastChunksProcessing() {
@@ -110,7 +110,19 @@ struct VoiceBroadcastPlaybackView: View {
if viewModel.viewState.playbackState == .error {
VoiceBroadcastPlaybackErrorView()
} else {
ZStack {
HStack (spacing: 17.0) {
if viewModel.viewState.playingState.canMoveBackward {
Button {
viewModel.send(viewAction: .backward)
} label: {
Image(uiImage: Asset.Images.voiceBroadcastBackward30s.image)
.renderingMode(.original)
}
.accessibilityIdentifier("backwardButton")
} else {
Spacer().frame(width: 25.0)
}
if viewModel.viewState.playbackState == .playing || viewModel.viewState.playbackState == .buffering {
Button { viewModel.send(viewAction: .pause) } label: {
Image(uiImage: Asset.Images.voiceBroadcastPause.image)
@@ -125,6 +137,18 @@ struct VoiceBroadcastPlaybackView: View {
.disabled(viewModel.viewState.playbackState == .buffering)
.accessibilityIdentifier("playButton")
}
if viewModel.viewState.playingState.canMoveForward {
Button {
viewModel.send(viewAction: .forward)
} label: {
Image(uiImage: Asset.Images.voiceBroadcastForward30s.image)
.renderingMode(.original)
}
.accessibilityIdentifier("forwardButton")
} else {
Spacer().frame(width: 25.0)
}
}
}
@@ -21,6 +21,8 @@ enum VoiceBroadcastPlaybackViewAction {
case play
case pause
case sliderChange(didChange: Bool)
case backward
case forward
}
enum VoiceBroadcastPlaybackState {
@@ -40,6 +42,8 @@ struct VoiceBroadcastPlayingState {
var duration: Float
var durationLabel: String?
var isLive: Bool
var canMoveForward: Bool
var canMoveBackward: Bool
}
struct VoiceBroadcastPlaybackViewState: BindableState {
@@ -43,7 +43,7 @@ enum MockVoiceBroadcastPlaybackScreenState: MockScreenState, CaseIterable {
var screenView: ([Any], AnyView) {
let details = VoiceBroadcastPlaybackDetails(senderDisplayName: "Alice", avatarData: AvatarInput(mxContentUri: "", matrixItemId: "!fakeroomid:matrix.org", displayName: "The name of the room"))
let viewModel = MockVoiceBroadcastPlaybackViewModel(initialViewState: VoiceBroadcastPlaybackViewState(details: details, broadcastState: .started, playbackState: .stopped, playingState: VoiceBroadcastPlayingState(duration: 10.0, isLive: true), bindings: VoiceBroadcastPlaybackViewStateBindings(progress: 0)))
let viewModel = MockVoiceBroadcastPlaybackViewModel(initialViewState: VoiceBroadcastPlaybackViewState(details: details, broadcastState: .started, playbackState: .stopped, playingState: VoiceBroadcastPlayingState(duration: 10.0, isLive: true, canMoveForward: false, canMoveBackward: false), bindings: VoiceBroadcastPlaybackViewStateBindings(progress: 0)))
return (
[false, viewModel],