Display live voice broadcast

This commit is contained in:
yostyle
2022-10-20 16:47:56 +02:00
parent 88cc160cb4
commit 412e760a21
9 changed files with 156 additions and 32 deletions
@@ -22,6 +22,7 @@ struct VoiceBroadcastPlaybackCoordinatorParameters {
let session: MXSession
let room: MXRoom
let voiceBroadcastStartEvent: MXEvent
let voiceBroadcastState: VoiceBroadcastInfo.State
let senderDisplayName: String?
}
@@ -45,7 +46,7 @@ final class VoiceBroadcastPlaybackCoordinator: Coordinator, Presentable {
init(parameters: VoiceBroadcastPlaybackCoordinatorParameters) throws {
self.parameters = parameters
let voiceBroadcastAggregator = try VoiceBroadcastAggregator(session: parameters.session, room: parameters.room, voiceBroadcastStartEventId: parameters.voiceBroadcastStartEvent.eventId)
let voiceBroadcastAggregator = try VoiceBroadcastAggregator(session: parameters.session, room: parameters.room, voiceBroadcastStartEventId: parameters.voiceBroadcastStartEvent.eventId, voiceBroadcastState: parameters.voiceBroadcastState)
let details = VoiceBroadcastPlaybackDetails(senderDisplayName: parameters.senderDisplayName)
viewModel = VoiceBroadcastPlaybackViewModel(details: details,
@@ -35,9 +35,26 @@ class VoiceBroadcastPlaybackProvider {
return coordinator.toPresentable().view
}
let dispatchGroup = DispatchGroup()
dispatchGroup.enter()
var voiceBroadcastState = VoiceBroadcastInfo.State.stopped
room.state { roomState in
if let stateEvent = roomState?.stateEvents(with: .custom(VoiceBroadcastSettings.voiceBroadcastInfoContentKeyType))?.last,
stateEvent.stateKey == event.stateKey,
let voiceBroadcastInfo = VoiceBroadcastInfo(fromJSON: stateEvent.content),
(stateEvent.eventId == event.eventId || voiceBroadcastInfo.eventId == event.eventId),
let state = VoiceBroadcastInfo.State(rawValue: voiceBroadcastInfo.state) {
voiceBroadcastState = state
}
dispatchGroup.leave()
}
let parameters = VoiceBroadcastPlaybackCoordinatorParameters(session: session,
room: room,
voiceBroadcastStartEvent: event,
voiceBroadcastState: voiceBroadcastState,
senderDisplayName: senderDisplayName)
guard let coordinator = try? VoiceBroadcastPlaybackCoordinator(parameters: parameters) else {
return nil
@@ -46,6 +63,7 @@ class VoiceBroadcastPlaybackProvider {
coordinatorsForEventIdentifiers[event.eventId] = coordinator
return coordinator.toPresentable().view
}
/// Retrieve the voiceBroadcast timeline coordinator for the given event or nil if it hasn't been created yet
@@ -23,6 +23,13 @@ struct VoiceBroadcastPlaybackView: View {
@Environment(\.theme) private var theme: ThemeSwiftUI
private var backgroundColor: Color {
if viewModel.viewState.playbackState == .playingLive {
return theme.colors.alert
}
return theme.colors.quarterlyContent
}
// MARK: Public
@ObservedObject var viewModel: VoiceBroadcastPlaybackViewModel.Context
@@ -30,25 +37,52 @@ struct VoiceBroadcastPlaybackView: View {
var body: some View {
let details = viewModel.viewState.details
VStack(alignment: .leading, spacing: 16.0) {
Text(details.senderDisplayName ?? "")
//Text(VectorL10n.voiceBroadcastInTimelineTitle)
.font(theme.fonts.bodySB)
.foregroundColor(theme.colors.primaryContent)
VStack(alignment: .center, spacing: 16.0) {
HStack {
Text(details.senderDisplayName ?? "")
//Text(VectorL10n.voiceBroadcastInTimelineTitle)
.font(theme.fonts.bodySB)
.foregroundColor(theme.colors.primaryContent)
if viewModel.viewState.broadcastState == .live {
Button { viewModel.send(viewAction: .playLive) } label:
{
HStack {
Image(uiImage: Asset.Images.voiceBroadcastLive.image)
.renderingMode(.original)
Text("Live")
.font(theme.fonts.bodySB)
.foregroundColor(Color.white)
}
}
.accessibilityIdentifier("liveButton")
.background(backgroundColor)
}
}
if viewModel.viewState.playbackState == .error {
VoiceBroadcastPlaybackErrorView()
} else {
HStack(alignment: .top, spacing: 16.0) {
if viewModel.viewState.playbackState == .playing {
ZStack {
if viewModel.viewState.playbackState == .playing ||
viewModel.viewState.playbackState == .playingLive {
Button { viewModel.send(viewAction: .pause) } label: {
Image("voice_broadcast_pause")
Image(uiImage: Asset.Images.voiceBroadcastPause.image)
.renderingMode(.original)
}
.accessibilityIdentifier("pauseButton")
} else {
Button { viewModel.send(viewAction: .play) } label: {
Image("voice_broadcast_play")
} else {
Button {
if viewModel.viewState.broadcastState == .live &&
viewModel.viewState.playbackState == .stopped {
viewModel.send(viewAction: .playLive)
} else {
viewModel.send(viewAction: .play)
}
} label: {
Image(uiImage: Asset.Images.voiceBroadcastPlay.image)
.renderingMode(.original)
}
.disabled(viewModel.viewState.playbackState == .buffering)
@@ -47,7 +47,7 @@ class VoiceBroadcastPlaybackViewModel: VoiceBroadcastPlaybackViewModelType, Voic
self.voiceBroadcastAggregator = voiceBroadcastAggregator
let viewState = VoiceBroadcastPlaybackViewState(details: details,
broadcastState: .unknown,
broadcastState: VoiceBroadcastPlaybackViewModel.getBroadcastState(from: voiceBroadcastAggregator.voiceBroadcastState),
playbackState: .stopped,
bindings: VoiceBroadcastPlaybackViewStateBindings())
super.init(initialViewState: viewState)
@@ -239,6 +239,22 @@ class VoiceBroadcastPlaybackViewModel: VoiceBroadcastPlaybackViewModelType, Voic
self.processNextVoiceBroadcastChunk()
}
}
private static func getBroadcastState(from state: VoiceBroadcastInfo.State) -> VoiceBroadcastState {
var broadcastState: VoiceBroadcastState
switch state {
case .started:
broadcastState = VoiceBroadcastState.live
case .paused:
broadcastState = VoiceBroadcastState.paused
case .resumed:
broadcastState = VoiceBroadcastState.live
case .stopped:
broadcastState = VoiceBroadcastState.stopped
}
return broadcastState
}
}
// MARK: VoiceBroadcastAggregatorDelegate
@@ -257,6 +273,10 @@ extension VoiceBroadcastPlaybackViewModel: VoiceBroadcastAggregatorDelegate {
voiceBroadcastChunkQueue.append(didReceiveChunk)
}
func voiceBroadcastAggregator(_ aggregator: VoiceBroadcastAggregator, didReceiveState: VoiceBroadcastInfo.State) {
state.broadcastState = VoiceBroadcastPlaybackViewModel.getBroadcastState(from: didReceiveState)
}
func voiceBroadcastAggregatorDidUpdateData(_ aggregator: VoiceBroadcastAggregator) {
if isLivePlayback && state.playbackState == .buffering {
// We started directly with a live playback but there was no known chuncks at that time