Merge branch 'develop' into mauroromito/inline_code

This commit is contained in:
Mauro Romito
2022-12-16 10:09:31 +01:00
31 changed files with 427 additions and 68 deletions
@@ -56,6 +56,23 @@ class VoiceBroadcastPlaybackViewModel: VoiceBroadcastPlaybackViewModelType, Voic
return (!isPlaybackInitialized || isPlayingLastChunk) && (state.broadcastState == .started || state.broadcastState == .resumed)
}
private static let defaultBackwardForwardValue: Float = 30000.0 // 30sec in ms
private var fullDateFormatter: DateComponentsFormatter {
let formatter = DateComponentsFormatter()
formatter.unitsStyle = .positional
formatter.allowedUnits = [.hour, .minute, .second]
return formatter
}
private var shortDateFormatter: DateComponentsFormatter {
let formatter = DateComponentsFormatter()
formatter.unitsStyle = .positional
formatter.zeroFormattingBehavior = .pad
formatter.allowedUnits = [.minute, .second]
return formatter
}
// MARK: Public
// MARK: - Setup
@@ -71,7 +88,7 @@ class VoiceBroadcastPlaybackViewModel: VoiceBroadcastPlaybackViewModelType, Voic
let viewState = VoiceBroadcastPlaybackViewState(details: details,
broadcastState: voiceBroadcastAggregator.voiceBroadcastState,
playbackState: .stopped,
playingState: VoiceBroadcastPlayingState(duration: Float(voiceBroadcastAggregator.voiceBroadcast.duration), isLive: false),
playingState: VoiceBroadcastPlayingState(duration: Float(voiceBroadcastAggregator.voiceBroadcast.duration), isLive: false, canMoveForward: false, canMoveBackward: false),
bindings: VoiceBroadcastPlaybackViewStateBindings(progress: 0))
super.init(initialViewState: viewState)
@@ -101,6 +118,10 @@ class VoiceBroadcastPlaybackViewModel: VoiceBroadcastPlaybackViewModelType, Voic
pause()
case .sliderChange(let didChange):
didSliderChanged(didChange)
case .backward:
backward()
case .forward:
forward()
}
}
@@ -164,6 +185,49 @@ class VoiceBroadcastPlaybackViewModel: VoiceBroadcastPlaybackViewModelType, Voic
audioPlayer?.stop()
}
/// Backward (30sec) a voice broadcast
private func backward() {
let newProgressValue = context.progress - VoiceBroadcastPlaybackViewModel.defaultBackwardForwardValue
seek(to: max(newProgressValue, 0.0))
}
/// Forward (30sec) a voice broadcast
private func forward() {
let newProgressValue = context.progress + VoiceBroadcastPlaybackViewModel.defaultBackwardForwardValue
seek(to: min(newProgressValue, state.playingState.duration))
}
private func seek(to seekTime: Float) {
// Flush the chunks queue and the current audio player playlist
voiceBroadcastChunkQueue = []
reloadVoiceBroadcastChunkQueue = isProcessingVoiceBroadcastChunk
audioPlayer?.removeAllPlayerItems()
let chunks = reorderVoiceBroadcastChunks(chunks: Array(voiceBroadcastAggregator.voiceBroadcast.chunks))
// Reinject the chunks we need and play them
let remainingTime = state.playingState.duration - seekTime
var chunksDuration: UInt = 0
for chunk in chunks.reversed() {
chunksDuration += chunk.duration
voiceBroadcastChunkQueue.append(chunk)
if Float(chunksDuration) >= remainingTime {
break
}
}
MXLog.debug("[VoiceBroadcastPlaybackViewModel] seekTo: restart to time: \(seekTime) milliseconds")
let time = seekTime - state.playingState.duration + Float(chunksDuration)
seekToChunkTime = TimeInterval(time / 1000)
// Check the condition to resume the playback when data will be ready (after the chunk process).
if state.playbackState != .stopped, isActuallyPaused == false {
state.playbackState = .buffering
}
processPendingVoiceBroadcastChunks()
state.bindings.progress = seekTime
updateUI()
}
// MARK: - Voice broadcast chunks playback
@@ -281,12 +345,16 @@ class VoiceBroadcastPlaybackViewModel: VoiceBroadcastPlaybackViewModelType, Voic
private func updateDuration() {
let duration = voiceBroadcastAggregator.voiceBroadcast.duration
let time = TimeInterval(duration / 1000)
let formatter = DateComponentsFormatter()
formatter.unitsStyle = .abbreviated
state.playingState.duration = Float(duration)
state.playingState.durationLabel = formatter.string(from: time)
updateUI()
}
private func dateFormatter(for time: TimeInterval) -> DateComponentsFormatter {
if time >= 3600 {
return self.fullDateFormatter
} else {
return self.shortDateFormatter
}
}
private func didSliderChanged(_ didChange: Bool) {
@@ -295,40 +363,11 @@ class VoiceBroadcastPlaybackViewModel: VoiceBroadcastPlaybackViewModelType, Voic
audioPlayer?.pause()
displayLink.isPaused = true
} else {
// Flush the chunks queue and the current audio player playlist
voiceBroadcastChunkQueue = []
reloadVoiceBroadcastChunkQueue = isProcessingVoiceBroadcastChunk
audioPlayer?.removeAllPlayerItems()
let chunks = reorderVoiceBroadcastChunks(chunks: Array(voiceBroadcastAggregator.voiceBroadcast.chunks))
// Reinject the chunks we need and play them
let remainingTime = state.playingState.duration - state.bindings.progress
var chunksDuration: UInt = 0
for chunk in chunks.reversed() {
chunksDuration += chunk.duration
voiceBroadcastChunkQueue.append(chunk)
if Float(chunksDuration) >= remainingTime {
break
}
}
MXLog.debug("[VoiceBroadcastPlaybackViewModel] didSliderChanged: restart to time: \(state.bindings.progress) milliseconds")
let time = state.bindings.progress - state.playingState.duration + Float(chunksDuration)
seekToChunkTime = TimeInterval(time / 1000)
// Check the condition to resume the playback when data will be ready (after the chunk process).
if state.playbackState != .stopped, isActuallyPaused == false {
state.playbackState = .buffering
}
processPendingVoiceBroadcastChunks()
seek(to: state.bindings.progress)
}
}
@objc private func handleDisplayLinkTick() {
updateUI()
}
private func updateUI() {
guard let playingEventId = voiceBroadcastAttachmentCacheManagerLoadResults.first(where: { result in
result.url == audioPlayer?.currentUrl
})?.eventIdentifier,
@@ -343,6 +382,25 @@ class VoiceBroadcastPlaybackViewModel: VoiceBroadcastPlaybackViewModelType, Voic
}.reduce(0) { $0 + $1.duration}) + (audioPlayer?.currentTime.rounded() ?? 0) * 1000
state.bindings.progress = Float(progress)
updateUI()
}
private func updateUI() {
let time = TimeInterval(state.playingState.duration / 1000)
let formatter = dateFormatter(for: time)
let currentProgress = TimeInterval(state.bindings.progress / 1000)
let remainingTime = time-currentProgress
var label = ""
if let remainingTimeString = formatter.string(from: remainingTime) {
label = Int(remainingTime) == 0 ? remainingTimeString : "-" + remainingTimeString
}
state.playingState.elapsedTimeLabel = formatter.string(from: currentProgress)
state.playingState.remainingTimeLabel = label
state.playingState.canMoveBackward = state.bindings.progress > 0
state.playingState.canMoveForward = state.bindings.progress < state.playingState.duration
}
private func handleVoiceBroadcastChunksProcessing() {
@@ -49,9 +49,9 @@ struct VoiceBroadcastPlaybackView: View {
VStack(alignment: .center) {
HStack (alignment: .top) {
AvatarImage(avatarData: viewModel.viewState.details.avatarData, size: .xSmall)
AvatarImage(avatarData: viewModel.viewState.details.avatarData, size: .small)
VStack(alignment: .leading, spacing: 0) {
VStack(alignment: .leading, spacing: 3) {
Text(details.avatarData.displayName ?? details.avatarData.matrixItemId)
.font(theme.fonts.bodySB)
.foregroundColor(theme.colors.primaryContent)
@@ -106,11 +106,24 @@ struct VoiceBroadcastPlaybackView: View {
}
}
.frame(maxWidth: .infinity, alignment: .leading)
.padding(EdgeInsets(top: 0.0, leading: 0.0, bottom: 4.0, trailing: 0.0))
if viewModel.viewState.playbackState == .error {
VoiceBroadcastPlaybackErrorView()
} else {
ZStack {
HStack (spacing: 34.0) {
if viewModel.viewState.playingState.canMoveBackward {
Button {
viewModel.send(viewAction: .backward)
} label: {
Image(uiImage: Asset.Images.voiceBroadcastBackward30s.image)
.renderingMode(.original)
}
.accessibilityIdentifier("backwardButton")
} else {
Spacer().frame(width: 25.0)
}
if viewModel.viewState.playbackState == .playing || viewModel.viewState.playbackState == .buffering {
Button { viewModel.send(viewAction: .pause) } label: {
Image(uiImage: Asset.Images.voiceBroadcastPause.image)
@@ -125,21 +138,41 @@ struct VoiceBroadcastPlaybackView: View {
.disabled(viewModel.viewState.playbackState == .buffering)
.accessibilityIdentifier("playButton")
}
if viewModel.viewState.playingState.canMoveForward {
Button {
viewModel.send(viewAction: .forward)
} label: {
Image(uiImage: Asset.Images.voiceBroadcastForward30s.image)
.renderingMode(.original)
}
.accessibilityIdentifier("forwardButton")
} else {
Spacer().frame(width: 25.0)
}
}
.padding(EdgeInsets(top: 10.0, leading: 0.0, bottom: 10.0, trailing: 0.0))
}
Slider(value: $viewModel.progress, in: 0...viewModel.viewState.playingState.duration) {
Text("Slider")
} minimumValueLabel: {
Text("")
} maximumValueLabel: {
Text(viewModel.viewState.playingState.durationLabel ?? "").font(.body)
} onEditingChanged: { didChange in
VoiceBroadcastSlider(value: $viewModel.progress,
minValue: 0.0,
maxValue: viewModel.viewState.playingState.duration) { didChange in
viewModel.send(viewAction: .sliderChange(didChange: didChange))
}
HStack {
Text(viewModel.viewState.playingState.elapsedTimeLabel ?? "")
.foregroundColor(theme.colors.secondaryContent)
.font(theme.fonts.caption1)
.padding(EdgeInsets(top: -8.0, leading: 4.0, bottom: 0.0, trailing: 0.0))
Spacer()
Text(viewModel.viewState.playingState.remainingTimeLabel ?? "")
.foregroundColor(theme.colors.secondaryContent)
.font(theme.fonts.caption1)
.padding(EdgeInsets(top: -8.0, leading: 0.0, bottom: 0.0, trailing: 4.0))
}
}
.padding([.horizontal, .top], 2.0)
.padding([.bottom])
.padding(EdgeInsets(top: 12.0, leading: 4.0, bottom: 12.0, trailing: 4.0))
}
}
@@ -0,0 +1,69 @@
//
// Copyright 2022 New Vector Ltd
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
import SwiftUI
/// Customized UISlider for SwiftUI.
struct VoiceBroadcastSlider: UIViewRepresentable {
@Binding var value: Float
var minValue: Float = 0.0
var maxValue: Float = 1.0
var onEditingChanged : ((Bool) -> Void)?
func makeUIView(context: Context) -> UISlider {
let slider = UISlider(frame: .zero)
slider.setThumbImage(Asset.Images.voiceBroadcastSliderThumb.image, for: .normal)
slider.setMinimumTrackImage(Asset.Images.voiceBroadcastSliderMinTrack.image, for: .normal)
slider.setMaximumTrackImage(Asset.Images.voiceBroadcastSliderMaxTrack.image, for: .normal)
slider.minimumValue = Float(minValue)
slider.maximumValue = Float(maxValue)
slider.value = Float(value)
slider.addTarget(context.coordinator, action: #selector(Coordinator.valueChanged(_:)), for: .valueChanged)
slider.addTarget(context.coordinator, action: #selector(Coordinator.sliderEditingChanged(_:)), for: .touchUpInside)
slider.addTarget(context.coordinator, action: #selector(Coordinator.sliderEditingChanged(_:)), for: .touchUpOutside)
slider.addTarget(context.coordinator, action: #selector(Coordinator.sliderEditingChanged(_:)), for: .touchDown)
return slider
}
func updateUIView(_ uiView: UISlider, context: Context) {
uiView.value = Float(value)
}
func makeCoordinator() -> VoiceBroadcastSlider.Coordinator {
Coordinator(parent: self, value: $value)
}
class Coordinator: NSObject {
var parent: VoiceBroadcastSlider
var value: Binding<Float>
init(parent: VoiceBroadcastSlider, value: Binding<Float>) {
self.value = value
self.parent = parent
}
@objc func valueChanged(_ sender: UISlider) {
self.value.wrappedValue = sender.value
}
@objc func sliderEditingChanged(_ sender: UISlider) {
parent.onEditingChanged?(sender.isTracking)
}
}
}
@@ -21,6 +21,8 @@ enum VoiceBroadcastPlaybackViewAction {
case play
case pause
case sliderChange(didChange: Bool)
case backward
case forward
}
enum VoiceBroadcastPlaybackState {
@@ -38,8 +40,11 @@ struct VoiceBroadcastPlaybackDetails {
struct VoiceBroadcastPlayingState {
var duration: Float
var durationLabel: String?
var elapsedTimeLabel: String?
var remainingTimeLabel: String?
var isLive: Bool
var canMoveForward: Bool
var canMoveBackward: Bool
}
struct VoiceBroadcastPlaybackViewState: BindableState {
@@ -43,7 +43,7 @@ enum MockVoiceBroadcastPlaybackScreenState: MockScreenState, CaseIterable {
var screenView: ([Any], AnyView) {
let details = VoiceBroadcastPlaybackDetails(senderDisplayName: "Alice", avatarData: AvatarInput(mxContentUri: "", matrixItemId: "!fakeroomid:matrix.org", displayName: "The name of the room"))
let viewModel = MockVoiceBroadcastPlaybackViewModel(initialViewState: VoiceBroadcastPlaybackViewState(details: details, broadcastState: .started, playbackState: .stopped, playingState: VoiceBroadcastPlayingState(duration: 10.0, isLive: true), bindings: VoiceBroadcastPlaybackViewStateBindings(progress: 0)))
let viewModel = MockVoiceBroadcastPlaybackViewModel(initialViewState: VoiceBroadcastPlaybackViewState(details: details, broadcastState: .started, playbackState: .stopped, playingState: VoiceBroadcastPlayingState(duration: 10.0, isLive: true, canMoveForward: false, canMoveBackward: false), bindings: VoiceBroadcastPlaybackViewStateBindings(progress: 0)))
return (
[false, viewModel],
@@ -57,6 +57,7 @@ final class VoiceBroadcastRecorderCoordinator: Coordinator, Presentable {
func toPresentable() -> UIViewController {
let view = VoiceBroadcastRecorderView(viewModel: voiceBroadcastRecorderViewModel.context)
.addDependency(AvatarService.instantiate(mediaManager: parameters.session.mediaManager))
return VectorHostingController(rootView: view)
}