Merge commit '35b2fdc538dfc1f8fac2585704526ab26148a9ab' into feature/3977_merge_element_1_9_13

# Conflicts:
#	Config/AppVersion.xcconfig
#	Podfile.lock
#	Riot/Modules/Application/LegacyAppDelegate.m
#	Riot/Modules/Authentication/AuthenticationCoordinator.swift
#	Riot/Modules/Authentication/Legacy/LegacyAuthenticationCoordinator.swift
#	Riot/Modules/LaunchLoading/LaunchLoadingView.swift
#	Riot/Modules/LaunchLoading/LaunchLoadingView.xib
#	Riot/Modules/MatrixKit/Models/Account/MXKAccount.m
#	Riot/Modules/MatrixKit/Models/Room/MXKRoomBubbleCellData.m
#	Riot/Modules/Room/TimelineCells/Styles/Bubble/BubbleRoomTimelineCellProvider.m
#	Riot/Modules/Room/TimelineCells/Styles/Plain/PlainRoomTimelineCellProvider.m
#	Riot/Modules/TabBar/MasterTabBarController.m
#	fastlane/Fastfile
This commit is contained in:
Frank Rotermund
2022-12-19 14:36:30 +01:00
245 changed files with 5346 additions and 1952 deletions
@@ -18,6 +18,7 @@ import Foundation
@objc protocol ComposerCreateActionListBridgePresenterDelegate {
func composerCreateActionListBridgePresenterDelegateDidComplete(_ coordinatorBridgePresenter: ComposerCreateActionListBridgePresenter, action: ComposerCreateAction)
func composerCreateActionListBridgePresenterDelegateDidToggleTextFormatting(_ coordinatorBridgePresenter: ComposerCreateActionListBridgePresenter, enabled: Bool)
func composerCreateActionListBridgePresenterDidDismissInteractively(_ coordinatorBridgePresenter: ComposerCreateActionListBridgePresenter)
}
@@ -34,6 +35,8 @@ final class ComposerCreateActionListBridgePresenter: NSObject {
// MARK: Private
private let actions: [ComposerCreateAction]
private let wysiwygEnabled: Bool
private let textFormattingEnabled: Bool
private var coordinator: ComposerCreateActionListCoordinator?
// MARK: Public
@@ -42,10 +45,12 @@ final class ComposerCreateActionListBridgePresenter: NSObject {
// MARK: - Setup
init(actions: [Int]) {
init(actions: [Int], wysiwygEnabled: Bool, textFormattingEnabled: Bool) {
self.actions = actions.compactMap {
ComposerCreateAction(rawValue: $0)
}
self.wysiwygEnabled = wysiwygEnabled
self.textFormattingEnabled = textFormattingEnabled
super.init()
}
@@ -57,12 +62,16 @@ final class ComposerCreateActionListBridgePresenter: NSObject {
// }
func present(from viewController: UIViewController, animated: Bool) {
let composerCreateActionListCoordinator = ComposerCreateActionListCoordinator(actions: actions)
let composerCreateActionListCoordinator = ComposerCreateActionListCoordinator(actions: actions,
wysiwygEnabled: wysiwygEnabled,
textFormattingEnabled: textFormattingEnabled)
composerCreateActionListCoordinator.callback = { [weak self] action in
guard let self = self else { return }
switch action {
case .done(let composeAction):
self.delegate?.composerCreateActionListBridgePresenterDelegateDidComplete(self, action: composeAction)
case .toggleTextFormatting(let enabled):
self.delegate?.composerCreateActionListBridgePresenterDelegateDidToggleTextFormatting(self, enabled: enabled)
case .cancel:
self.delegate?.composerCreateActionListBridgePresenterDidDismissInteractively(self)
}
@@ -19,6 +19,7 @@ import SwiftUI
/// Actions returned by the coordinator callback
enum ComposerCreateActionListCoordinatorAction {
case done(ComposerCreateAction)
case toggleTextFormatting(Bool)
case cancel
}
@@ -39,18 +40,32 @@ final class ComposerCreateActionListCoordinator: NSObject, Coordinator, Presenta
// MARK: - Setup
init(actions: [ComposerCreateAction]) {
viewModel = ComposerCreateActionListViewModel(initialViewState: ComposerCreateActionListViewState(actions: actions))
init(actions: [ComposerCreateAction], wysiwygEnabled: Bool, textFormattingEnabled: Bool) {
let isScrollingEnabled: Bool
if #available(iOS 16, *) {
isScrollingEnabled = false
} else {
isScrollingEnabled = true
}
viewModel = ComposerCreateActionListViewModel(initialViewState: ComposerCreateActionListViewState(
actions: actions,
wysiwygEnabled: wysiwygEnabled,
isScrollingEnabled: isScrollingEnabled,
bindings: ComposerCreateActionListBindings(textFormattingEnabled: textFormattingEnabled)))
view = ComposerCreateActionList(viewModel: viewModel.context)
let hostingVC = VectorHostingController(rootView: view)
let height = hostingVC.sizeThatFits(in: CGSize(width: hostingVC.view.frame.width, height: UIView.layoutFittingCompressedSize.height)).height
hostingVC.bottomSheetPreferences = VectorHostingBottomSheetPreferences(
detents: [.medium],
// on iOS 15 custom will be replaced by medium which may require some scrolling
detents: [.custom(height: height)],
prefersGrabberVisible: true,
cornerRadius: 20
cornerRadius: 20,
prefersScrollingExpandsWhenScrolledToEdge: false
)
hostingController = hostingVC
super.init()
hostingVC.presentationController?.delegate = self
hostingVC.bottomSheetPreferences?.setup(viewController: hostingVC)
}
// MARK: - Public
@@ -61,6 +76,8 @@ final class ComposerCreateActionListCoordinator: NSObject, Coordinator, Presenta
switch result {
case .done(let action):
self.callback?(.done(action))
case .toggleTextFormatting(let enabled):
self.callback?(.toggleTextFormatting(enabled))
}
}
}
@@ -33,7 +33,11 @@ enum MockComposerCreateActionListScreenState: MockScreenState, CaseIterable {
case .fullList:
actions = ComposerCreateAction.allCases
}
let viewModel = ComposerCreateActionListViewModel(initialViewState: ComposerCreateActionListViewState(actions: actions))
let viewModel = ComposerCreateActionListViewModel(initialViewState: ComposerCreateActionListViewState(
actions: actions,
wysiwygEnabled: true,
isScrollingEnabled: false,
bindings: ComposerCreateActionListBindings(textFormattingEnabled: true)))
return (
[viewModel],
@@ -21,11 +21,15 @@ import Foundation
enum ComposerCreateActionListViewAction {
// The user selected an action
case selectAction(ComposerCreateAction)
// The user toggled the text formatting action
case toggleTextFormatting(Bool)
}
enum ComposerCreateActionListViewModelResult: Equatable {
// The user selected an action and is done with the screen
case done(ComposerCreateAction)
// The user toggled the text formatting setting but might not be done with the screen
case toggleTextFormatting(Bool)
}
// MARK: View
@@ -33,6 +37,14 @@ enum ComposerCreateActionListViewModelResult: Equatable {
struct ComposerCreateActionListViewState: BindableState {
/// The list of composer create actions to display to the user
let actions: [ComposerCreateAction]
let wysiwygEnabled: Bool
let isScrollingEnabled: Bool
var bindings: ComposerCreateActionListBindings
}
struct ComposerCreateActionListBindings {
var textFormattingEnabled: Bool
}
@objc enum ComposerCreateAction: Int {
@@ -23,7 +23,14 @@ class ComposerCreateActionListTests: XCTestCase {
var context: ComposerCreateActionListViewModel.Context!
override func setUpWithError() throws {
viewModel = ComposerCreateActionListViewModel(initialViewState: ComposerCreateActionListViewState(actions: ComposerCreateAction.allCases))
viewModel = ComposerCreateActionListViewModel(
initialViewState: ComposerCreateActionListViewState(
actions: ComposerCreateAction.allCases,
wysiwygEnabled: true,
isScrollingEnabled: false,
bindings: ComposerCreateActionListBindings(textFormattingEnabled: true)
)
)
context = viewModel.context
}
@@ -22,36 +22,81 @@ struct ComposerCreateActionList: View {
// MARK: Private
@Environment(\.theme) private var theme: ThemeSwiftUI
private var textFormattingIcon: String {
viewModel.textFormattingEnabled
? Asset.Images.actionFormattingEnabled.name
: Asset.Images.actionFormattingDisabled.name
}
// MARK: Public
@ObservedObject var viewModel: ComposerCreateActionListViewModel.Context
var body: some View {
VStack {
VStack(alignment: .leading) {
ForEach(viewModel.viewState.actions) { action in
HStack(spacing: 16) {
Image(action.icon)
.renderingMode(.template)
.foregroundColor(theme.colors.accent)
Text(action.title)
.foregroundColor(theme.colors.primaryContent)
.font(theme.fonts.body)
.accessibilityIdentifier(action.accessibilityIdentifier)
Spacer()
}
.contentShape(Rectangle())
.onTapGesture {
viewModel.send(viewAction: .selectAction(action))
}
.padding(.horizontal, 16)
.padding(.vertical, 12)
private var internalView: some View {
VStack(alignment: .leading) {
ForEach(viewModel.viewState.actions) { action in
HStack(spacing: 16) {
Image(action.icon)
.renderingMode(.template)
.foregroundColor(theme.colors.accent)
Text(action.title)
.foregroundColor(theme.colors.primaryContent)
.font(theme.fonts.body)
.accessibilityIdentifier(action.accessibilityIdentifier)
Spacer()
}
.contentShape(Rectangle())
.onTapGesture {
viewModel.send(viewAction: .selectAction(action))
}
.padding(.horizontal, 16)
.padding(.vertical, 12)
}
.padding(.top, 8)
Spacer()
}.background(theme.colors.background.ignoresSafeArea())
if viewModel.viewState.wysiwygEnabled {
SeparatorLine()
HStack(spacing: 16) {
Image(textFormattingIcon)
.renderingMode(.template)
.foregroundColor(theme.colors.accent)
Text(VectorL10n.wysiwygComposerStartActionTextFormatting)
.foregroundColor(theme.colors.primaryContent)
.font(theme.fonts.body)
.accessibilityIdentifier("textFormatting")
Spacer()
Toggle("", isOn: $viewModel.textFormattingEnabled)
.labelsHidden()
.toggleStyle(SwitchToggleStyle(tint: theme.colors.accent))
.onChange(of: viewModel.textFormattingEnabled) { isOn in
viewModel.send(viewAction: .toggleTextFormatting(isOn))
}
}
.contentShape(Rectangle())
.onTapGesture {
viewModel.textFormattingEnabled.toggle()
}
.padding(.horizontal, 16)
.padding(.vertical, 12)
}
}
}
var body: some View {
if viewModel.viewState.isScrollingEnabled {
ScrollView {
internalView
}
.padding(.top, 23)
.background(theme.colors.background.ignoresSafeArea())
} else {
VStack {
internalView
Spacer()
}
.padding(.top, 23)
.background(theme.colors.background.ignoresSafeArea())
}
}
}
@@ -35,6 +35,8 @@ class ComposerCreateActionListViewModel: ComposerCreateActionListViewModelType,
switch viewAction {
case .selectAction(let action):
callback?(.done(action))
case .toggleTextFormatting(let enabled):
callback?(.toggleTextFormatting(enabled))
}
}
}
@@ -29,14 +29,15 @@ enum MockComposerScreenState: MockScreenState, CaseIterable {
var screenView: ([Any], AnyView) {
let viewModel: ComposerViewModel
let bindings = ComposerBindings(focused: false)
switch self {
case .send: viewModel = ComposerViewModel(initialViewState: ComposerViewState())
case .edit: viewModel = ComposerViewModel(initialViewState: ComposerViewState(sendMode: .edit))
case .reply: viewModel = ComposerViewModel(initialViewState: ComposerViewState(eventSenderDisplayName: "TestUser", sendMode: .reply))
case .send: viewModel = ComposerViewModel(initialViewState: ComposerViewState(textFormattingEnabled: true, isLandscapePhone: false, bindings: bindings))
case .edit: viewModel = ComposerViewModel(initialViewState: ComposerViewState(sendMode: .edit, textFormattingEnabled: true, isLandscapePhone: false, bindings: bindings))
case .reply: viewModel = ComposerViewModel(initialViewState: ComposerViewState(eventSenderDisplayName: "TestUser", sendMode: .reply, textFormattingEnabled: true, isLandscapePhone: false, bindings: bindings))
}
let wysiwygviewModel = WysiwygComposerViewModel(minHeight: 20, maxHeight: 360)
let wysiwygviewModel = WysiwygComposerViewModel(minHeight: 20, maxCompressedHeight: 360)
viewModel.callback = { [weak viewModel, weak wysiwygviewModel] result in
guard let viewModel = viewModel else { return }
@@ -54,7 +55,11 @@ enum MockComposerScreenState: MockScreenState, CaseIterable {
[viewModel, wysiwygviewModel],
AnyView(VStack {
Spacer()
Composer(viewModel: viewModel.context, wysiwygViewModel: wysiwygviewModel, sendMessageAction: { _ in }, showSendMediaActions: { })
Composer(viewModel: viewModel.context,
wysiwygViewModel: wysiwygviewModel,
resizeAnimationDuration: 0.1,
sendMessageAction: { _ in },
showSendMediaActions: { })
}.frame(
minWidth: 0,
maxWidth: .infinity,
@@ -34,8 +34,8 @@ struct FormatItem {
enum FormatType {
case bold
case italic
case strikethrough
case underline
case strikethrough
}
extension FormatType: CaseIterable, Identifiable {
@@ -19,12 +19,16 @@ import Foundation
struct ComposerViewState: BindableState {
var eventSenderDisplayName: String?
var sendMode: ComposerSendMode = .send
var textFormattingEnabled: Bool
var isLandscapePhone: Bool
var placeholder: String?
var bindings: ComposerBindings
}
extension ComposerViewState {
var shouldDisplayContext: Bool {
return sendMode == .edit || sendMode == .reply
sendMode == .edit || sendMode == .reply
}
var contextDescription: String? {
@@ -44,4 +48,12 @@ extension ComposerViewState {
default: return nil
}
}
var isMinimiseForced: Bool {
isLandscapePhone || !textFormattingEnabled
}
}
struct ComposerBindings {
var focused: Bool
}
@@ -45,6 +45,42 @@ final class ComposerUITests: MockScreenTestCase {
XCTAssertTrue(maximiseButton.exists)
}
// This test requires "connect hardware keyboard" to be off on the simulator
// And may not work on the CI
func testFastTyping() throws {
app.goToScreenWithIdentifier(MockComposerScreenState.send.title)
let text = "fast typing test"
let wysiwygTextView = app.textViews.allElementsBoundByIndex[0]
XCTAssertTrue(wysiwygTextView.exists)
wysiwygTextView.tap()
sleep(1)
wysiwygTextView.typeText(text)
let options = XCTExpectedFailure.Options()
options.isStrict = false
XCTExpectFailure("Test may fail on CI", options: options)
let value = wysiwygTextView.value as? String
XCTAssert(value == text, "Text view value is: \(value ?? "nil")")
}
// This test requires "connect hardware keyboard" to be off on the simulator
// And may not work on the CI
func testLongPressDelete() throws {
app.goToScreenWithIdentifier(MockComposerScreenState.send.title)
let text = "test1 test2 test3 test4 test5 test6 test7"
let wysiwygTextView = app.textViews.allElementsBoundByIndex[0]
XCTAssertTrue(wysiwygTextView.exists)
wysiwygTextView.tap()
sleep(1)
wysiwygTextView.typeText(text)
sleep(1)
app.keys["delete"].press(forDuration: 10.0)
let options = XCTExpectedFailure.Options()
options.isStrict = false
XCTExpectFailure("Test may fail on CI", options: options)
let value = wysiwygTextView.value as? String
XCTAssert(value == "", "Text view value is: \(value ?? "nil")")
}
func testReplyMode() throws {
app.goToScreenWithIdentifier(MockComposerScreenState.reply.title)
@@ -1,4 +1,4 @@
//
//
// Copyright 2022 New Vector Ltd
//
// Licensed under the Apache License, Version 2.0 (the "License");
@@ -23,7 +23,13 @@ final class ComposerViewModelTests: XCTestCase {
var context: ComposerViewModel.Context!
override func setUpWithError() throws {
viewModel = ComposerViewModel(initialViewState: ComposerViewState())
viewModel = ComposerViewModel(
initialViewState: ComposerViewState(
textFormattingEnabled: true,
isLandscapePhone: false,
bindings: ComposerBindings(focused: false)
)
)
context = viewModel.context
}
@@ -69,4 +75,10 @@ final class ComposerViewModelTests: XCTestCase {
viewModel.placeholder = "Placeholder Test"
XCTAssert(context.viewState.placeholder == "Placeholder Test")
}
func testDimissKeyboard() {
viewModel.state.bindings.focused = true
viewModel.dismissKeyboard()
XCTAssert(context.viewState.bindings.focused == false)
}
}
@@ -14,7 +14,6 @@
// limitations under the License.
//
import DSBottomSheet
import SwiftUI
import WysiwygComposer
@@ -22,17 +21,21 @@ struct Composer: View {
// MARK: - Properties
// MARK: Private
@ObservedObject private var viewModel: ComposerViewModelType.Context
@ObservedObject private var wysiwygViewModel: WysiwygComposerViewModel
private let resizeAnimationDuration: Double
private let sendMessageAction: (WysiwygComposerContent) -> Void
private let showSendMediaActions: () -> Void
@Environment(\.theme) private var theme: ThemeSwiftUI
@State private var focused = false
@State private var isActionButtonShowing = false
private let horizontalPadding: CGFloat = 12
private let borderHeight: CGFloat = 40
private let minTextViewHeight: CGFloat = 20
private var verticalPadding: CGFloat {
(borderHeight - minTextViewHeight) / 2
(borderHeight - wysiwygViewModel.minHeight) / 2
}
private var topPadding: CGFloat {
@@ -40,13 +43,17 @@ struct Composer: View {
}
private var cornerRadius: CGFloat {
if viewModel.viewState.shouldDisplayContext || wysiwygViewModel.idealHeight > minTextViewHeight {
if shouldFixRoundCorner {
return 14
} else {
return borderHeight / 2
}
}
private var shouldFixRoundCorner: Bool {
viewModel.viewState.shouldDisplayContext || wysiwygViewModel.idealHeight > wysiwygViewModel.minHeight
}
private var actionButtonAccessibilityIdentifier: String {
viewModel.viewState.sendMode == .edit ? "editButton" : "sendButton"
}
@@ -60,69 +67,60 @@ struct Composer: View {
}
private var borderColor: Color {
focused ? theme.colors.quarterlyContent : theme.colors.quinaryContent
viewModel.focused ? theme.colors.quarterlyContent : theme.colors.quinaryContent
}
private var formatItems: [FormatItem] {
FormatType.allCases.map { type in
FormatItem(
type: type,
active: wysiwygViewModel.reversedActions.contains(type.composerAction),
disabled: wysiwygViewModel.disabledActions.contains(type.composerAction)
active: wysiwygViewModel.actionStates[type.composerAction] == .reversed,
disabled: wysiwygViewModel.actionStates[type.composerAction] == .disabled
)
}
}
// MARK: Public
@ObservedObject var viewModel: ComposerViewModelType.Context
@ObservedObject var wysiwygViewModel: WysiwygComposerViewModel
let sendMessageAction: (WysiwygComposerContent) -> Void
let showSendMediaActions: () -> Void
var body: some View {
VStack(spacing: 8) {
let rect = RoundedRectangle(cornerRadius: cornerRadius)
VStack(spacing: 12) {
if viewModel.viewState.shouldDisplayContext {
HStack {
if let imageName = viewModel.viewState.contextImageName {
Image(imageName)
.foregroundColor(theme.colors.tertiaryContent)
}
if let contextDescription = viewModel.viewState.contextDescription {
Text(contextDescription)
.accessibilityIdentifier("contextDescription")
.font(.system(size: 12, weight: .medium))
.foregroundColor(theme.colors.secondaryContent)
}
Spacer()
Button {
viewModel.send(viewAction: .cancel)
} label: {
Image(Asset.Images.inputCloseIcon.name)
.foregroundColor(theme.colors.tertiaryContent)
}
.accessibilityIdentifier("cancelButton")
private var composerContainer: some View {
let rect = RoundedRectangle(cornerRadius: cornerRadius)
return VStack(spacing: 12) {
if viewModel.viewState.shouldDisplayContext {
HStack {
if let imageName = viewModel.viewState.contextImageName {
Image(imageName)
.foregroundColor(theme.colors.tertiaryContent)
}
.padding(.top, 8)
.padding(.horizontal, horizontalPadding)
if let contextDescription = viewModel.viewState.contextDescription {
Text(contextDescription)
.accessibilityIdentifier("contextDescription")
.font(.system(size: 12, weight: .medium))
.foregroundColor(theme.colors.secondaryContent)
}
Spacer()
Button {
viewModel.send(viewAction: .cancel)
} label: {
Image(Asset.Images.inputCloseIcon.name)
.foregroundColor(theme.colors.tertiaryContent)
}
.accessibilityIdentifier("cancelButton")
}
HStack(alignment: .top, spacing: 0) {
WysiwygComposerView(
focused: $focused,
content: wysiwygViewModel.content,
replaceText: wysiwygViewModel.replaceText,
select: wysiwygViewModel.select,
didUpdateText: wysiwygViewModel.didUpdateText
)
.tintColor(theme.colors.accent)
.placeholder(viewModel.viewState.placeholder, color: theme.colors.tertiaryContent)
.frame(height: wysiwygViewModel.idealHeight)
.onAppear {
.padding(.top, 8)
.padding(.horizontal, horizontalPadding)
}
HStack(alignment: shouldFixRoundCorner ? .top : .center, spacing: 0) {
WysiwygComposerView(
focused: $viewModel.focused,
viewModel: wysiwygViewModel
)
.tintColor(theme.colors.accent)
.placeholder(viewModel.viewState.placeholder, color: theme.colors.tertiaryContent)
.frame(height: wysiwygViewModel.idealHeight)
.onAppear {
if wysiwygViewModel.isContentEmpty {
wysiwygViewModel.setup()
}
}
if !viewModel.viewState.isMinimiseForced {
Button {
wysiwygViewModel.maximised.toggle()
} label: {
@@ -135,62 +133,113 @@ struct Composer: View {
.padding(.leading, 12)
.padding(.trailing, 4)
}
.padding(.horizontal, horizontalPadding)
.padding(.top, topPadding)
.padding(.bottom, verticalPadding)
}
.clipShape(rect)
.overlay(rect.stroke(borderColor, lineWidth: 1))
.animation(.easeInOut(duration: 0.1), value: wysiwygViewModel.idealHeight)
.padding(.horizontal, horizontalPadding)
.padding(.top, 8)
.onTapGesture {
if !focused {
focused = true
.padding(.top, topPadding)
.padding(.bottom, verticalPadding)
}
.clipShape(rect)
.overlay(rect.stroke(borderColor, lineWidth: 1))
.animation(.easeInOut(duration: resizeAnimationDuration), value: wysiwygViewModel.idealHeight)
.padding(.top, 8)
.onTapGesture {
if viewModel.focused {
viewModel.focused = true
}
}
}
private var sendMediaButton: some View {
return Button {
showSendMediaActions()
} label: {
Image(Asset.Images.startComposeModule.name)
.resizable()
.foregroundColor(theme.colors.tertiaryContent)
.frame(width: 14, height: 14)
}
.frame(width: 36, height: 36)
.background(Circle().fill(theme.colors.system))
.padding(.trailing, 8)
.accessibilityLabel(VectorL10n.create)
}
private var sendButton: some View {
return Button {
sendMessageAction(wysiwygViewModel.content)
wysiwygViewModel.clearContent()
} label: {
if viewModel.viewState.sendMode == .edit {
Image(Asset.Images.saveIcon.name)
} else {
Image(Asset.Images.sendIcon.name)
}
}
.frame(width: 36, height: 36)
.padding(.leading, 8)
.isHidden(!isActionButtonShowing)
.accessibilityIdentifier(actionButtonAccessibilityIdentifier)
.accessibilityLabel(VectorL10n.send)
.onChange(of: wysiwygViewModel.isContentEmpty) { isEmpty in
viewModel.send(viewAction: .contentDidChange(isEmpty: isEmpty))
withAnimation(.easeInOut(duration: 0.15)) {
isActionButtonShowing = !isEmpty
}
}
}
// MARK: Public
init(
viewModel: ComposerViewModelType.Context,
wysiwygViewModel: WysiwygComposerViewModel,
resizeAnimationDuration: Double,
sendMessageAction: @escaping (WysiwygComposerContent) -> Void,
showSendMediaActions: @escaping () -> Void) {
self.viewModel = viewModel
self.wysiwygViewModel = wysiwygViewModel
self.resizeAnimationDuration = resizeAnimationDuration
self.sendMessageAction = sendMessageAction
self.showSendMediaActions = showSendMediaActions
}
var body: some View {
VStack(spacing: 8) {
if wysiwygViewModel.maximised {
RoundedRectangle(cornerRadius: 4)
.fill(theme.colors.quinaryContent)
.frame(width: 36, height: 5)
.padding(.top, 10)
}
HStack(alignment: .bottom, spacing: 0) {
if !viewModel.viewState.textFormattingEnabled {
sendMediaButton
.padding(.bottom, 1)
}
composerContainer
if !viewModel.viewState.textFormattingEnabled {
sendButton
.padding(.bottom, 1)
}
}
HStack(spacing: 0) {
Button {
showSendMediaActions()
} label: {
Image(Asset.Images.startComposeModule.name)
.resizable()
.foregroundColor(theme.colors.tertiaryContent)
.frame(width: 14, height: 14)
}
.frame(width: 36, height: 36)
.background(Circle().fill(theme.colors.system))
.padding(.trailing, 8)
.accessibilityLabel(VectorL10n.create)
FormattingToolbar(formatItems: formatItems) { type in
wysiwygViewModel.apply(type.action)
}
.frame(height: 44)
Spacer()
Button {
sendMessageAction(wysiwygViewModel.content)
wysiwygViewModel.clearContent()
} label: {
if viewModel.viewState.sendMode == .edit {
Image(Asset.Images.saveIcon.name)
} else {
Image(Asset.Images.sendIcon.name)
}
}
.frame(width: 36, height: 36)
.padding(.leading, 8)
.isHidden(!isActionButtonShowing)
.accessibilityIdentifier(actionButtonAccessibilityIdentifier)
.accessibilityLabel(VectorL10n.send)
.onChange(of: wysiwygViewModel.isContentEmpty) { isEmpty in
viewModel.send(viewAction: .contentDidChange(isEmpty: isEmpty))
withAnimation(.easeInOut(duration: 0.15)) {
isActionButtonShowing = !isEmpty
if viewModel.viewState.textFormattingEnabled {
HStack(alignment: .center, spacing: 0) {
sendMediaButton
FormattingToolbar(formatItems: formatItems) { type in
wysiwygViewModel.apply(type.action)
}
.frame(height: 44)
Spacer()
sendButton
}
}
.padding(.horizontal, 12)
.padding(.bottom, 4)
}
.padding(.horizontal, horizontalPadding)
.padding(.bottom, 4)
.onChange(of: viewModel.viewState.isMinimiseForced) { newValue in
if wysiwygViewModel.maximised && newValue {
wysiwygViewModel.maximised = false
}
}
}
}
@@ -35,6 +35,15 @@ final class ComposerViewModel: ComposerViewModelType, ComposerViewModelProtocol
state.sendMode = newValue
}
}
var textFormattingEnabled: Bool {
get {
state.textFormattingEnabled
}
set {
state.textFormattingEnabled = newValue
}
}
var eventSenderDisplayName: String? {
get {
@@ -54,6 +63,19 @@ final class ComposerViewModel: ComposerViewModelType, ComposerViewModelProtocol
}
}
var isLandscapePhone: Bool {
get {
state.isLandscapePhone
}
set {
state.isLandscapePhone = newValue
}
}
var isFocused: Bool {
state.bindings.focused
}
// MARK: - Public
override func process(viewAction: ComposerViewAction) {
@@ -64,4 +86,12 @@ final class ComposerViewModel: ComposerViewModelType, ComposerViewModelProtocol
callback?(.contentDidChange(isEmpty: isEmpty))
}
}
func dismissKeyboard() {
state.bindings.focused = false
}
func showKeyboard() {
state.bindings.focused = true
}
}
@@ -20,6 +20,12 @@ protocol ComposerViewModelProtocol {
var context: ComposerViewModelType.Context { get }
var callback: ((ComposerViewModelResult) -> Void)? { get set }
var sendMode: ComposerSendMode { get set }
var textFormattingEnabled: Bool { get set }
var eventSenderDisplayName: String? { get set }
var placeholder: String? { get set }
var isFocused: Bool { get }
var isLandscapePhone: Bool { get set }
func dismissKeyboard()
func showKeyboard()
}
@@ -60,7 +60,7 @@ final class TimelinePollCoordinator: Coordinator, Presentable, PollAggregatorDel
}
selectedAnswerIdentifiersSubject
.debounce(for: 1.0, scheduler: RunLoop.main)
.debounce(for: 2.0, scheduler: RunLoop.main)
.removeDuplicates()
.sink { [weak self] identifiers in
guard let self = self else { return }
@@ -16,14 +16,22 @@
import Foundation
class TimelinePollProvider {
@objcMembers
class TimelinePollProvider: NSObject {
static let shared = TimelinePollProvider()
var session: MXSession?
var session: MXSession? {
willSet {
guard let currentSession = self.session else { return }
if currentSession != newValue {
// Clear all stored coordinators on new session
coordinatorsForEventIdentifiers.removeAll()
}
}
}
var coordinatorsForEventIdentifiers = [String: TimelinePollCoordinator]()
private init() { }
/// Create or retrieve the poll timeline coordinator for this event and return
/// a view to be displayed in the timeline
func buildTimelinePollVCForEvent(_ event: MXEvent) -> UIViewController? {
@@ -49,4 +57,8 @@ class TimelinePollProvider {
func timelinePollCoordinatorForEventIdentifier(_ eventIdentifier: String) -> TimelinePollCoordinator? {
coordinatorsForEventIdentifiers[eventIdentifier]
}
func reset() {
coordinatorsForEventIdentifiers.removeAll()
}
}
@@ -22,7 +22,7 @@ struct VoiceBroadcastPlaybackCoordinatorParameters {
let session: MXSession
let room: MXRoom
let voiceBroadcastStartEvent: MXEvent
let voiceBroadcastState: VoiceBroadcastInfo.State
let voiceBroadcastState: VoiceBroadcastInfoState
let senderDisplayName: String?
}
@@ -48,7 +48,7 @@ final class VoiceBroadcastPlaybackCoordinator: Coordinator, Presentable {
let voiceBroadcastAggregator = try VoiceBroadcastAggregator(session: parameters.session, room: parameters.room, voiceBroadcastStartEventId: parameters.voiceBroadcastStartEvent.eventId, voiceBroadcastState: parameters.voiceBroadcastState)
let details = VoiceBroadcastPlaybackDetails(senderDisplayName: parameters.senderDisplayName)
let details = VoiceBroadcastPlaybackDetails(senderDisplayName: parameters.senderDisplayName, avatarData: parameters.room.avatarData)
viewModel = VoiceBroadcastPlaybackViewModel(details: details,
mediaServiceProvider: VoiceMessageMediaServiceProvider.sharedProvider,
cacheManager: VoiceMessageAttachmentCacheManager.sharedManager,
@@ -61,7 +61,9 @@ final class VoiceBroadcastPlaybackCoordinator: Coordinator, Presentable {
func start() { }
func toPresentable() -> UIViewController {
VectorHostingController(rootView: VoiceBroadcastPlaybackView(viewModel: viewModel.context))
let view = VoiceBroadcastPlaybackView(viewModel: viewModel.context)
.addDependency(AvatarService.instantiate(mediaManager: parameters.session.mediaManager))
return VectorHostingController(rootView: view)
}
func canEndVoiceBroadcast() -> Bool {
@@ -74,4 +76,8 @@ final class VoiceBroadcastPlaybackCoordinator: Coordinator, Presentable {
}
func endVoiceBroadcast() {}
func pausePlaying() {
viewModel.context.send(viewAction: .pause)
}
}
@@ -16,17 +16,26 @@
import Foundation
class VoiceBroadcastPlaybackProvider {
static let shared = VoiceBroadcastPlaybackProvider()
@objc class VoiceBroadcastPlaybackProvider: NSObject {
@objc static let shared = VoiceBroadcastPlaybackProvider()
var session: MXSession?
var session: MXSession? {
willSet {
guard let currentSession = self.session else { return }
if currentSession != newValue {
// Clear all stored coordinators on new session
coordinatorsForEventIdentifiers.removeAll()
}
}
}
var coordinatorsForEventIdentifiers = [String: VoiceBroadcastPlaybackCoordinator]()
private init() { }
private override init() { }
/// Create or retrieve the voiceBroadcast timeline coordinator for this event and return
/// a view to be displayed in the timeline
func buildVoiceBroadcastPlaybackVCForEvent(_ event: MXEvent, senderDisplayName: String?) -> UIViewController? {
func buildVoiceBroadcastPlaybackVCForEvent(_ event: MXEvent, senderDisplayName: String?, voiceBroadcastState: String) -> UIViewController? {
guard let session = session, let room = session.room(withRoomId: event.roomId) else {
return nil
}
@@ -35,26 +44,10 @@ class VoiceBroadcastPlaybackProvider {
return coordinator.toPresentable()
}
let dispatchGroup = DispatchGroup()
dispatchGroup.enter()
var voiceBroadcastState = VoiceBroadcastInfo.State.stopped
room.state { roomState in
if let stateEvent = roomState?.stateEvents(with: .custom(VoiceBroadcastSettings.voiceBroadcastInfoContentKeyType))?.last,
stateEvent.stateKey == event.stateKey,
let voiceBroadcastInfo = VoiceBroadcastInfo(fromJSON: stateEvent.content),
(stateEvent.eventId == event.eventId || voiceBroadcastInfo.eventId == event.eventId),
let state = VoiceBroadcastInfo.State(rawValue: voiceBroadcastInfo.state) {
voiceBroadcastState = state
}
dispatchGroup.leave()
}
let parameters = VoiceBroadcastPlaybackCoordinatorParameters(session: session,
room: room,
voiceBroadcastStartEvent: event,
voiceBroadcastState: voiceBroadcastState,
voiceBroadcastState: VoiceBroadcastInfoState(rawValue: voiceBroadcastState) ?? VoiceBroadcastInfoState.stopped,
senderDisplayName: senderDisplayName)
guard let coordinator = try? VoiceBroadcastPlaybackCoordinator(parameters: parameters) else {
return nil
@@ -70,4 +63,11 @@ class VoiceBroadcastPlaybackProvider {
func voiceBroadcastPlaybackCoordinatorForEventIdentifier(_ eventIdentifier: String) -> VoiceBroadcastPlaybackCoordinator? {
coordinatorsForEventIdentifiers[eventIdentifier]
}
/// Pause current voice broadcast playback.
@objc public func pausePlaying() {
coordinatorsForEventIdentifiers.forEach { _, coordinator in
coordinator.pausePlaying()
}
}
}
@@ -26,14 +26,35 @@ class VoiceBroadcastPlaybackViewModel: VoiceBroadcastPlaybackViewModelType, Voic
// MARK: - Properties
// MARK: Private
private var voiceBroadcastAggregator: VoiceBroadcastAggregator
private let mediaServiceProvider: VoiceMessageMediaServiceProvider
private let cacheManager: VoiceMessageAttachmentCacheManager
private var audioPlayer: VoiceMessageAudioPlayer?
private var voiceBroadcastAggregator: VoiceBroadcastAggregator
private var voiceBroadcastChunkQueue: [VoiceBroadcastChunk] = []
private var voiceBroadcastAttachmentCacheManagerLoadResults: [VoiceMessageAttachmentCacheManagerLoadResult] = []
private var isLivePlayback = false
private var audioPlayer: VoiceMessageAudioPlayer?
private var displayLink: CADisplayLink!
private var isPlaybackInitialized: Bool = false
private var acceptProgressUpdates: Bool = true
private var isActuallyPaused: Bool = false
private var isProcessingVoiceBroadcastChunk: Bool = false
private var reloadVoiceBroadcastChunkQueue: Bool = false
private var seekToChunkTime: TimeInterval?
private var isPlayingLastChunk: Bool {
let chunks = reorderVoiceBroadcastChunks(chunks: Array(voiceBroadcastAggregator.voiceBroadcast.chunks))
guard let chunkDuration = chunks.last?.duration else {
return false
}
return state.bindings.progress + 1000 >= state.playingState.duration - Float(chunkDuration)
}
private var isLivePlayback: Bool {
return (!isPlaybackInitialized || isPlayingLastChunk) && (state.broadcastState == .started || state.broadcastState == .resumed)
}
// MARK: Public
@@ -48,12 +69,18 @@ class VoiceBroadcastPlaybackViewModel: VoiceBroadcastPlaybackViewModelType, Voic
self.voiceBroadcastAggregator = voiceBroadcastAggregator
let viewState = VoiceBroadcastPlaybackViewState(details: details,
broadcastState: VoiceBroadcastPlaybackViewModel.getBroadcastState(from: voiceBroadcastAggregator.voiceBroadcastState),
broadcastState: voiceBroadcastAggregator.voiceBroadcastState,
playbackState: .stopped,
bindings: VoiceBroadcastPlaybackViewStateBindings())
playingState: VoiceBroadcastPlayingState(duration: Float(voiceBroadcastAggregator.voiceBroadcast.duration), isLive: false),
bindings: VoiceBroadcastPlaybackViewStateBindings(progress: 0))
super.init(initialViewState: viewState)
displayLink = CADisplayLink(target: WeakTarget(self, selector: #selector(handleDisplayLinkTick)), selector: WeakTarget.triggerSelector)
displayLink.isPaused = true
displayLink.add(to: .current, forMode: .common)
self.voiceBroadcastAggregator.delegate = self
self.voiceBroadcastAggregator.start()
}
private func release() {
@@ -70,10 +97,10 @@ class VoiceBroadcastPlaybackViewModel: VoiceBroadcastPlaybackViewModelType, Voic
switch viewAction {
case .play:
play()
case .playLive:
playLive()
case .pause:
pause()
case .sliderChange(let didChange):
didSliderChanged(didChange)
}
}
@@ -82,80 +109,56 @@ class VoiceBroadcastPlaybackViewModel: VoiceBroadcastPlaybackViewModelType, Voic
/// Listen voice broadcast
private func play() {
isLivePlayback = false
displayLink.isPaused = false
isActuallyPaused = false
if voiceBroadcastAggregator.isStarted == false {
// Start the streaming by fetching broadcast chunks
// The audio player will automatically start the playback on incoming chunks
MXLog.debug("[VoiceBroadcastPlaybackViewModel] play: Start streaming")
state.playbackState = .buffering
voiceBroadcastAggregator.start()
}
else if let audioPlayer = audioPlayer {
if let audioPlayer = audioPlayer {
MXLog.debug("[VoiceBroadcastPlaybackViewModel] play: resume")
audioPlayer.play()
}
else {
let chunks = voiceBroadcastAggregator.voiceBroadcast.chunks
MXLog.debug("[VoiceBroadcastPlaybackViewModel] play: restart from the beginning: \(chunks.count) chunks")
// Reinject all the chuncks we already have and play them
voiceBroadcastChunkQueue.append(contentsOf: chunks)
processPendingVoiceBroadcastChunks()
}
}
private func playLive() {
guard isLivePlayback == false else {
MXLog.debug("[VoiceBroadcastPlaybackViewModel] playLive: Already playing live")
return
}
isLivePlayback = true
// Flush the current audio player playlist
audioPlayer?.removeAllPlayerItems()
if voiceBroadcastAggregator.isStarted == false {
// Start the streaming by fetching broadcast chunks
// The audio player will automatically start the playback on incoming chunks
MXLog.debug("[VoiceBroadcastPlaybackViewModel] playLive: Start streaming")
} else {
state.playbackState = .buffering
voiceBroadcastAggregator.start()
}
else {
let chunks = voiceBroadcastAggregator.voiceBroadcast.chunks
MXLog.debug("[VoiceBroadcastPlaybackViewModel] playLive: restart from the last chunk: \(chunks.count) chunks")
// Reinject all the chuncks we already have and play the last one
voiceBroadcastChunkQueue.append(contentsOf: chunks)
processPendingVoiceBroadcastChunksForLivePlayback()
if voiceBroadcastAggregator.launchState == .loaded {
let chunks = voiceBroadcastAggregator.voiceBroadcast.chunks
MXLog.debug("[VoiceBroadcastPlaybackViewModel] play: restart from the beginning: \(chunks.count) chunks")
// Reinject all the chunks we already have and play them
voiceBroadcastChunkQueue = Array(chunks)
handleVoiceBroadcastChunksProcessing()
}
}
}
/// Stop voice broadcast
/// Pause voice broadcast
private func pause() {
MXLog.debug("[VoiceBroadcastPlaybackViewModel] pause")
isLivePlayback = false
displayLink.isPaused = true
isActuallyPaused = true
if let audioPlayer = audioPlayer, audioPlayer.isPlaying {
audioPlayer.pause()
} else {
state.playbackState = .paused
state.playingState.isLive = false
}
}
private func stopIfVoiceBroadcastOver() {
MXLog.debug("[VoiceBroadcastPlaybackViewModel] stopIfVoiceBroadcastOver")
// TODO: Check if the broadcast is over before stopping everything
// Check if the broadcast is over before stopping everything
// If not, the player should not stopped. The view state must be move to buffering
stop()
if state.broadcastState == .stopped, isPlayingLastChunk {
stop()
} else {
state.playbackState = .buffering
}
}
private func stop() {
MXLog.debug("[VoiceBroadcastPlaybackViewModel] stop")
isLivePlayback = false
displayLink.isPaused = true
// Objects will be released on audioPlayerDidStopPlaying
audioPlayer?.stop()
@@ -196,6 +199,13 @@ class VoiceBroadcastPlaybackViewModel: VoiceBroadcastPlaybackViewModelType, Voic
return
}
guard !isProcessingVoiceBroadcastChunk else {
// Chunks caching is already in progress
return
}
isProcessingVoiceBroadcastChunk = true
// TODO: Control the download rate to avoid to download all chunk in mass
// We could synchronise it with the number of chunks in the player playlist (audioPlayer.playerItems)
@@ -207,62 +217,142 @@ class VoiceBroadcastPlaybackViewModel: VoiceBroadcastPlaybackViewModelType, Voic
return
}
// TODO: Make sure there has no new incoming chunk that should be before this attachment
// Be careful that this new chunk is not older than the chunk being played by the audio player. Else
// we will get an unexecpted rewind.
self.isProcessingVoiceBroadcastChunk = false
if self.reloadVoiceBroadcastChunkQueue {
self.reloadVoiceBroadcastChunkQueue = false
self.processNextVoiceBroadcastChunk()
return
}
switch result {
case .success(let result):
guard result.eventIdentifier == chunk.attachment.eventId else {
return
}
case .success(let result):
guard result.eventIdentifier == chunk.attachment.eventId else {
return
}
self.voiceBroadcastAttachmentCacheManagerLoadResults.append(result)
// Instanciate audioPlayer if needed.
if self.audioPlayer == nil {
// Init and start the player on the first chunk
let audioPlayer = self.mediaServiceProvider.audioPlayerForIdentifier(result.eventIdentifier)
audioPlayer.registerDelegate(self)
if let audioPlayer = self.audioPlayer {
// Append the chunk to the current playlist
audioPlayer.addContentFromURL(result.url)
// Resume the player. Needed after a pause
if audioPlayer.isPlaying == false {
MXLog.debug("[VoiceBroadcastPlaybackViewModel] processNextVoiceBroadcastChunk: Resume the player")
audioPlayer.play()
}
}
else {
// Init and start the player on the first chunk
let audioPlayer = self.mediaServiceProvider.audioPlayerForIdentifier(result.eventIdentifier)
audioPlayer.registerDelegate(self)
audioPlayer.loadContentFromURL(result.url, displayName: chunk.attachment.originalFileName)
audioPlayer.loadContentFromURL(result.url, displayName: chunk.attachment.originalFileName)
self.audioPlayer = audioPlayer
} else {
// Append the chunk to the current playlist
self.audioPlayer?.addContentFromURL(result.url)
}
guard let audioPlayer = self.audioPlayer else {
MXLog.error("[VoiceBroadcastPlaybackViewModel] processVoiceBroadcastChunkQueue: audioPlayer is nil !")
return
}
// Start or Resume the player. Needed after a buffering
if self.state.playbackState == .buffering {
if audioPlayer.isPlaying == false {
MXLog.debug("[VoiceBroadcastPlaybackViewModel] processNextVoiceBroadcastChunk: Start or Resume the player")
self.displayLink.isPaused = false
audioPlayer.play()
self.audioPlayer = audioPlayer
}
case .failure (let error):
MXLog.error("[VoiceBroadcastPlaybackViewModel] processVoiceBroadcastChunkQueue: loadAttachment error", context: error)
if self.voiceBroadcastChunkQueue.count == 0 {
// No more chunk to try. Go to error
self.state.playbackState = .error
} else {
self.state.playbackState = .playing
self.state.playingState.isLive = self.isLivePlayback
}
}
if let time = self.seekToChunkTime {
audioPlayer.seekToTime(time)
self.seekToChunkTime = nil
}
case .failure (let error):
MXLog.error("[VoiceBroadcastPlaybackViewModel] processVoiceBroadcastChunkQueue: loadAttachment error", context: error)
if self.voiceBroadcastChunkQueue.count == 0 {
// No more chunk to try. Go to error
self.state.playbackState = .error
}
}
self.processNextVoiceBroadcastChunk()
}
}
private static func getBroadcastState(from state: VoiceBroadcastInfo.State) -> VoiceBroadcastState {
var broadcastState: VoiceBroadcastState
switch state {
case .started:
broadcastState = VoiceBroadcastState.live
case .paused:
broadcastState = VoiceBroadcastState.paused
case .resumed:
broadcastState = VoiceBroadcastState.live
case .stopped:
broadcastState = VoiceBroadcastState.stopped
private func updateDuration() {
let duration = voiceBroadcastAggregator.voiceBroadcast.duration
let time = TimeInterval(duration / 1000)
let formatter = DateComponentsFormatter()
formatter.unitsStyle = .abbreviated
state.playingState.duration = Float(duration)
state.playingState.durationLabel = formatter.string(from: time)
}
private func didSliderChanged(_ didChange: Bool) {
acceptProgressUpdates = !didChange
if didChange {
audioPlayer?.pause()
displayLink.isPaused = true
} else {
// Flush the chunks queue and the current audio player playlist
voiceBroadcastChunkQueue = []
reloadVoiceBroadcastChunkQueue = isProcessingVoiceBroadcastChunk
audioPlayer?.removeAllPlayerItems()
let chunks = reorderVoiceBroadcastChunks(chunks: Array(voiceBroadcastAggregator.voiceBroadcast.chunks))
// Reinject the chunks we need and play them
let remainingTime = state.playingState.duration - state.bindings.progress
var chunksDuration: UInt = 0
for chunk in chunks.reversed() {
chunksDuration += chunk.duration
voiceBroadcastChunkQueue.append(chunk)
if Float(chunksDuration) >= remainingTime {
break
}
}
MXLog.debug("[VoiceBroadcastPlaybackViewModel] didSliderChanged: restart to time: \(state.bindings.progress) milliseconds")
let time = state.bindings.progress - state.playingState.duration + Float(chunksDuration)
seekToChunkTime = TimeInterval(time / 1000)
// Check the condition to resume the playback when data will be ready (after the chunk process).
if state.playbackState != .stopped, isActuallyPaused == false {
state.playbackState = .buffering
}
processPendingVoiceBroadcastChunks()
}
}
@objc private func handleDisplayLinkTick() {
updateUI()
}
private func updateUI() {
guard let playingEventId = voiceBroadcastAttachmentCacheManagerLoadResults.first(where: { result in
result.url == audioPlayer?.currentUrl
})?.eventIdentifier,
let playingSequence = voiceBroadcastAggregator.voiceBroadcast.chunks.first(where: { chunk in
chunk.attachment.eventId == playingEventId
})?.sequence else {
return
}
return broadcastState
let progress = Double(voiceBroadcastAggregator.voiceBroadcast.chunks.filter { chunk in
chunk.sequence < playingSequence
}.reduce(0) { $0 + $1.duration}) + (audioPlayer?.currentTime.rounded() ?? 0) * 1000
state.bindings.progress = Float(progress)
}
private func handleVoiceBroadcastChunksProcessing() {
// Handle specifically the case where we were waiting data to start playing a live playback
if isLivePlayback, state.playbackState == .buffering {
// Start the playback on the latest one
processPendingVoiceBroadcastChunksForLivePlayback()
} else {
processPendingVoiceBroadcastChunks()
}
}
}
@@ -282,18 +372,19 @@ extension VoiceBroadcastPlaybackViewModel: VoiceBroadcastAggregatorDelegate {
voiceBroadcastChunkQueue.append(didReceiveChunk)
}
func voiceBroadcastAggregator(_ aggregator: VoiceBroadcastAggregator, didReceiveState: VoiceBroadcastInfo.State) {
state.broadcastState = VoiceBroadcastPlaybackViewModel.getBroadcastState(from: didReceiveState)
func voiceBroadcastAggregator(_ aggregator: VoiceBroadcastAggregator, didReceiveState: VoiceBroadcastInfoState) {
state.broadcastState = didReceiveState
// Handle the live icon appearance
state.playingState.isLive = isLivePlayback
}
func voiceBroadcastAggregatorDidUpdateData(_ aggregator: VoiceBroadcastAggregator) {
if isLivePlayback && state.playbackState == .buffering {
// We started directly with a live playback but there was no known chuncks at that time
// These are the first chunks we get. Start the playback on the latest one
processPendingVoiceBroadcastChunksForLivePlayback()
}
else {
processPendingVoiceBroadcastChunks()
updateDuration()
if state.playbackState != .stopped, !isActuallyPaused {
handleVoiceBroadcastChunksProcessing()
}
}
}
@@ -305,21 +396,20 @@ extension VoiceBroadcastPlaybackViewModel: VoiceMessageAudioPlayerDelegate {
}
func audioPlayerDidStartPlaying(_ audioPlayer: VoiceMessageAudioPlayer) {
if isLivePlayback {
state.playbackState = .playingLive
}
else {
state.playbackState = .playing
}
state.playbackState = .playing
state.playingState.isLive = isLivePlayback
isPlaybackInitialized = true
}
func audioPlayerDidPausePlaying(_ audioPlayer: VoiceMessageAudioPlayer) {
state.playbackState = .paused
state.playingState.isLive = false
}
func audioPlayerDidStopPlaying(_ audioPlayer: VoiceMessageAudioPlayer) {
MXLog.debug("[VoiceBroadcastPlaybackViewModel] audioPlayerDidStopPlaying")
state.playbackState = .stopped
state.playingState.isLive = false
release()
}
@@ -30,9 +30,10 @@ struct VoiceBroadcastPlaybackView: View {
// MARK: Private
@Environment(\.theme) private var theme: ThemeSwiftUI
@State private var bufferingSpinnerRotationValue = 0.0
private var backgroundColor: Color {
if viewModel.viewState.playbackState == .playingLive {
if viewModel.viewState.playingState.isLive {
return theme.colors.alert
}
return theme.colors.quarterlyContent
@@ -45,53 +46,79 @@ struct VoiceBroadcastPlaybackView: View {
var body: some View {
let details = viewModel.viewState.details
VStack(alignment: .center, spacing: 16.0) {
VStack(alignment: .center) {
HStack {
Text(details.senderDisplayName ?? "")
//Text(VectorL10n.voiceBroadcastInTimelineTitle)
.font(theme.fonts.bodySB)
.foregroundColor(theme.colors.primaryContent)
HStack (alignment: .top) {
AvatarImage(avatarData: viewModel.viewState.details.avatarData, size: .xSmall)
if viewModel.viewState.broadcastState == .live {
Button { viewModel.send(viewAction: .playLive) } label:
{
HStack {
Image(uiImage: Asset.Images.voiceBroadcastLive.image)
.renderingMode(.original)
Text("Live")
.font(theme.fonts.bodySB)
.foregroundColor(Color.white)
}
VStack(alignment: .leading, spacing: 0) {
Text(details.avatarData.displayName ?? details.avatarData.matrixItemId)
.font(theme.fonts.bodySB)
.foregroundColor(theme.colors.primaryContent)
Label {
Text(details.senderDisplayName ?? details.avatarData.matrixItemId)
.foregroundColor(theme.colors.secondaryContent)
.font(theme.fonts.caption1)
} icon: {
Image(uiImage: Asset.Images.voiceBroadcastTileMic.image)
}
.padding(5.0)
.background(RoundedRectangle(cornerRadius: 4, style: .continuous)
.fill(backgroundColor))
.accessibilityIdentifier("liveButton")
if viewModel.viewState.playbackState != .buffering {
Label {
Text(VectorL10n.voiceBroadcastTile)
.foregroundColor(theme.colors.secondaryContent)
.font(theme.fonts.caption1)
} icon: {
Image(uiImage: Asset.Images.voiceBroadcastTileLive.image)
}
} else {
Label {
Text(VectorL10n.voiceBroadcastBuffering)
.foregroundColor(theme.colors.secondaryContent)
.font(theme.fonts.caption1)
} icon: {
Image(uiImage: Asset.Images.voiceBroadcastSpinner.image)
.frame(width: 16.0, height: 16.0)
.rotationEffect(Angle.degrees(bufferingSpinnerRotationValue))
.onAppear {
let baseAnimation = Animation.linear(duration: 1.0).repeatForever(autoreverses: false)
withAnimation(baseAnimation) {
bufferingSpinnerRotationValue = 360.0
}
}
.onDisappear {
bufferingSpinnerRotationValue = 0.0
}
}
}
}.frame(maxWidth: .infinity, alignment: .leading)
if viewModel.viewState.broadcastState != .stopped {
Label {
Text(VectorL10n.voiceBroadcastLive)
.font(theme.fonts.caption1SB)
.foregroundColor(Color.white)
} icon: {
Image(uiImage: Asset.Images.voiceBroadcastLive.image)
}
.padding(.horizontal, 5)
.background(RoundedRectangle(cornerRadius: 4, style: .continuous).fill(backgroundColor))
.accessibilityIdentifier("liveLabel")
}
}
.frame(maxWidth: .infinity, alignment: .leading)
if viewModel.viewState.playbackState == .error {
VoiceBroadcastPlaybackErrorView()
} else {
ZStack {
if viewModel.viewState.playbackState == .playing ||
viewModel.viewState.playbackState == .playingLive {
if viewModel.viewState.playbackState == .playing || viewModel.viewState.playbackState == .buffering {
Button { viewModel.send(viewAction: .pause) } label: {
Image(uiImage: Asset.Images.voiceBroadcastPause.image)
.renderingMode(.original)
}
.accessibilityIdentifier("pauseButton")
} else {
Button {
if viewModel.viewState.broadcastState == .live &&
viewModel.viewState.playbackState == .stopped {
viewModel.send(viewAction: .playLive)
} else {
viewModel.send(viewAction: .play)
}
} label: {
} else {
Button { viewModel.send(viewAction: .play) } label: {
Image(uiImage: Asset.Images.voiceBroadcastPlay.image)
.renderingMode(.original)
}
@@ -99,15 +126,20 @@ struct VoiceBroadcastPlaybackView: View {
.accessibilityIdentifier("playButton")
}
}
.activityIndicator(show: viewModel.viewState.playbackState == .buffering)
}
Slider(value: $viewModel.progress, in: 0...viewModel.viewState.playingState.duration) {
Text("Slider")
} minimumValueLabel: {
Text("")
} maximumValueLabel: {
Text(viewModel.viewState.playingState.durationLabel ?? "").font(.body)
} onEditingChanged: { didChange in
viewModel.send(viewAction: .sliderChange(didChange: didChange))
}
}
.padding([.horizontal, .top], 2.0)
.padding([.bottom])
.alert(item: $viewModel.alertInfo) { info in
info.alert
}
}
}
@@ -19,44 +19,38 @@ import SwiftUI
enum VoiceBroadcastPlaybackViewAction {
case play
case playLive
case pause
case sliderChange(didChange: Bool)
}
enum VoiceBroadcastPlaybackState {
case stopped
case buffering
case playing
case playingLive
case paused
case error
}
struct VoiceBroadcastPlaybackDetails {
let senderDisplayName: String?
let avatarData: AvatarInputProtocol
}
enum VoiceBroadcastState {
case unknown
case stopped
case live
case paused
struct VoiceBroadcastPlayingState {
var duration: Float
var durationLabel: String?
var isLive: Bool
}
struct VoiceBroadcastPlaybackViewState: BindableState {
var details: VoiceBroadcastPlaybackDetails
var broadcastState: VoiceBroadcastState
var broadcastState: VoiceBroadcastInfoState
var playbackState: VoiceBroadcastPlaybackState
var playingState: VoiceBroadcastPlayingState
var bindings: VoiceBroadcastPlaybackViewStateBindings
}
struct VoiceBroadcastPlaybackViewStateBindings {
// TODO: Neeeded?
var alertInfo: AlertInfo<VoiceBroadcastPlaybackAlertType>?
}
enum VoiceBroadcastPlaybackAlertType {
// TODO: What is it?
case failedClosingVoiceBroadcast
var progress: Float
}
@@ -42,8 +42,8 @@ enum MockVoiceBroadcastPlaybackScreenState: MockScreenState, CaseIterable {
/// Generate the view struct for the screen state.
var screenView: ([Any], AnyView) {
let details = VoiceBroadcastPlaybackDetails(senderDisplayName: "Alice")
let viewModel = MockVoiceBroadcastPlaybackViewModel(initialViewState: VoiceBroadcastPlaybackViewState(details: details, broadcastState: .live, playbackState: .stopped, bindings: VoiceBroadcastPlaybackViewStateBindings()))
let details = VoiceBroadcastPlaybackDetails(senderDisplayName: "Alice", avatarData: AvatarInput(mxContentUri: "", matrixItemId: "!fakeroomid:matrix.org", displayName: "The name of the room"))
let viewModel = MockVoiceBroadcastPlaybackViewModel(initialViewState: VoiceBroadcastPlaybackViewState(details: details, broadcastState: .started, playbackState: .stopped, playingState: VoiceBroadcastPlayingState(duration: 10.0, isLive: true), bindings: VoiceBroadcastPlaybackViewStateBindings(progress: 0)))
return (
[false, viewModel],
@@ -45,7 +45,7 @@ final class VoiceBroadcastRecorderCoordinator: Coordinator, Presentable {
voiceBroadcastRecorderService = VoiceBroadcastRecorderService(session: parameters.session, roomId: parameters.room.matrixItemId)
let details = VoiceBroadcastRecorderDetails(senderDisplayName: parameters.senderDisplayName)
let details = VoiceBroadcastRecorderDetails(senderDisplayName: parameters.senderDisplayName, avatarData: parameters.room.avatarData)
let viewModel = VoiceBroadcastRecorderViewModel(details: details,
recorderService: voiceBroadcastRecorderService)
voiceBroadcastRecorderViewModel = viewModel
@@ -56,7 +56,8 @@ final class VoiceBroadcastRecorderCoordinator: Coordinator, Presentable {
func start() { }
func toPresentable() -> UIViewController {
VectorHostingController(rootView: VoiceBroadcastRecorderView(viewModel: voiceBroadcastRecorderViewModel.context))
let view = VoiceBroadcastRecorderView(viewModel: voiceBroadcastRecorderViewModel.context)
return VectorHostingController(rootView: view)
}
func pauseRecording() {
@@ -23,7 +23,16 @@ import Foundation
// MARK: - Properties
// MARK: Public
var session: MXSession?
var session: MXSession? {
willSet {
guard let currentSession = self.session else { return }
if currentSession != newValue {
// Clear all stored coordinators on new session
coordinatorsForEventIdentifiers.removeAll()
}
}
}
var coordinatorsForEventIdentifiers = [String: VoiceBroadcastRecorderCoordinator]()
// MARK: Private
@@ -33,8 +33,14 @@ class VoiceBroadcastRecorderService: VoiceBroadcastRecorderServiceProtocol {
private var chunkFile: AVAudioFile! = nil
private var chunkFrames: AVAudioFrameCount = 0
private var chunkFileNumber: Int = 1
private var chunkFileNumber: Int = 0
private var currentElapsedTime: UInt = 0 // Time in seconds.
private var currentRemainingTime: UInt { // Time in seconds.
BuildSettings.voiceBroadcastMaxLength - currentElapsedTime
}
private var elapsedTimeTimer: Timer?
// MARK: Public
weak var serviceDelegate: VoiceBroadcastRecorderServiceDelegate?
@@ -49,30 +55,44 @@ class VoiceBroadcastRecorderService: VoiceBroadcastRecorderServiceProtocol {
// MARK: - VoiceBroadcastRecorderServiceProtocol
func startRecordingVoiceBroadcast() {
let inputNode = audioEngine.inputNode
do {
try AVAudioSession.sharedInstance().setCategory(.playAndRecord, mode: .default)
try AVAudioSession.sharedInstance().setActive(true)
let inputFormat = inputNode.inputFormat(forBus: audioNodeBus)
MXLog.debug("[VoiceBroadcastRecorderService] Start recording voice broadcast for bus name : \(String(describing: inputNode.name(forInputBus: audioNodeBus)))")
let inputNode = audioEngine.inputNode
inputNode.installTap(onBus: audioNodeBus,
bufferSize: 512,
format: inputFormat) { (buffer, time) -> Void in
DispatchQueue.main.async {
self.writeBuffer(buffer)
let inputFormat = inputNode.inputFormat(forBus: audioNodeBus)
MXLog.debug("[VoiceBroadcastRecorderService] Start recording voice broadcast for bus name : \(String(describing: inputNode.name(forInputBus: audioNodeBus)))")
inputNode.installTap(onBus: audioNodeBus,
bufferSize: 512,
format: inputFormat) { (buffer, time) -> Void in
DispatchQueue.main.async {
self.writeBuffer(buffer)
}
}
}
try? audioEngine.start()
try audioEngine.start()
startTimer()
// Disable the sleep mode during the recording until we are able to handle it
UIApplication.shared.isIdleTimerDisabled = true
} catch {
MXLog.debug("[VoiceBroadcastRecorderService] startRecordingVoiceBroadcast error", context: error)
stopRecordingVoiceBroadcast()
invalidateTimer()
}
}
func stopRecordingVoiceBroadcast() {
MXLog.debug("[VoiceBroadcastRecorderService] Stop recording voice broadcast")
audioEngine.stop()
audioEngine.inputNode.removeTap(onBus: audioNodeBus)
UIApplication.shared.isIdleTimerDisabled = false
invalidateTimer()
resetValues()
voiceBroadcastService?.stopVoiceBroadcast(success: { [weak self] _ in
voiceBroadcastService?.stopVoiceBroadcast(lastChunkSequence: chunkFileNumber,
success: { [weak self] _ in
MXLog.debug("[VoiceBroadcastRecorderService] Stopped")
guard let self = self else { return }
@@ -82,25 +102,35 @@ class VoiceBroadcastRecorderService: VoiceBroadcastRecorderServiceProtocol {
// Send current chunk
if self.chunkFile != nil {
self.sendChunkFile(at: self.chunkFile.url, sequence: self.chunkFileNumber)
self.sendChunkFile(at: self.chunkFile.url, sequence: self.chunkFileNumber) {
self.tearDownVoiceBroadcastService()
}
} else {
self.tearDownVoiceBroadcastService()
}
self.session.tearDownVoiceBroadcastService()
}, failure: { error in
MXLog.error("[VoiceBroadcastRecorderService] Failed to stop voice broadcast", context: error)
// Discard the service on VoiceBroadcastService error. We keep the service in case of other error type
if error as? VoiceBroadcastServiceError != nil {
self.tearDownVoiceBroadcastService()
}
})
}
func pauseRecordingVoiceBroadcast() {
audioEngine.pause()
UIApplication.shared.isIdleTimerDisabled = false
invalidateTimer()
voiceBroadcastService?.pauseVoiceBroadcast(success: { [weak self] _ in
voiceBroadcastService?.pauseVoiceBroadcast(lastChunkSequence: chunkFileNumber,
success: { [weak self] _ in
guard let self = self else { return }
// Send current chunk
self.sendChunkFile(at: self.chunkFile.url, sequence: self.chunkFileNumber)
self.chunkFile = nil
if self.chunkFile != nil {
self.sendChunkFile(at: self.chunkFile.url, sequence: self.chunkFileNumber)
self.chunkFile = nil
}
}, failure: { error in
MXLog.error("[VoiceBroadcastRecorderService] Failed to pause voice broadcast", context: error)
})
@@ -108,12 +138,14 @@ class VoiceBroadcastRecorderService: VoiceBroadcastRecorderServiceProtocol {
func resumeRecordingVoiceBroadcast() {
try? audioEngine.start()
startTimer()
voiceBroadcastService?.resumeVoiceBroadcast(success: { [weak self] _ in
guard let self = self else { return }
// Update state
self.serviceDelegate?.voiceBroadcastRecorderService(self, didUpdateState: .started)
self.serviceDelegate?.voiceBroadcastRecorderService(self, didUpdateState: .resumed)
UIApplication.shared.isIdleTimerDisabled = true
}, failure: { error in
MXLog.error("[VoiceBroadcastRecorderService] Failed to resume voice broadcast", context: error)
})
@@ -123,7 +155,46 @@ class VoiceBroadcastRecorderService: VoiceBroadcastRecorderServiceProtocol {
/// Reset chunk values.
private func resetValues() {
chunkFrames = 0
chunkFileNumber = 1
chunkFileNumber = 0
currentElapsedTime = 0
}
/// Release the service
private func tearDownVoiceBroadcastService() {
resetValues()
session.tearDownVoiceBroadcastService()
invalidateTimer()
do {
try AVAudioSession.sharedInstance().setActive(false)
} catch {
MXLog.error("[VoiceBroadcastRecorderService] tearDownVoiceBroadcastService error", context: error)
}
}
/// Start ElapsedTimeTimer.
private func startTimer() {
elapsedTimeTimer = Timer.scheduledTimer(timeInterval: 1.0,
target: self,
selector: #selector(updateCurrentElapsedTimeValue),
userInfo: nil,
repeats: true)
}
/// Invalidate ElapsedTimeTimer.
private func invalidateTimer() {
elapsedTimeTimer?.invalidate()
elapsedTimeTimer = nil
}
/// Update currentElapsedTime value.
@objc private func updateCurrentElapsedTimeValue() {
guard currentRemainingTime > 0 else {
stopRecordingVoiceBroadcast()
return
}
currentElapsedTime += 1
serviceDelegate?.voiceBroadcastRecorderService(self, didUpdateRemainingTime: self.currentRemainingTime)
}
/// Write audio buffer to chunk file.
@@ -150,6 +221,7 @@ class VoiceBroadcastRecorderService: VoiceBroadcastRecorderServiceProtocol {
// FIXME: Manage error
return
}
chunkFileNumber += 1
let temporaryFileName = "VoiceBroadcastChunk-\(roomId)-\(chunkFileNumber)"
let fileUrl = directory
.appendingPathComponent(temporaryFileName)
@@ -165,18 +237,20 @@ class VoiceBroadcastRecorderService: VoiceBroadcastRecorderServiceProtocol {
chunkFile = try? AVAudioFile(forWriting: fileUrl, settings: settings)
if chunkFile != nil {
chunkFileNumber += 1
chunkFrames = 0
} else {
chunkFileNumber -= 1
stopRecordingVoiceBroadcast()
// FIXME: Manage error ?
}
}
/// Send chunk file to the server.
private func sendChunkFile(at url: URL, sequence: Int) {
guard let voiceBroadcastService = voiceBroadcastService else {
private func sendChunkFile(at url: URL, sequence: Int, completion: (() -> Void)? = nil) {
guard voiceBroadcastService != nil else {
// FIXME: Manage error
MXLog.debug("[VoiceBroadcastRecorderService] sendChunkFile: service is not available")
completion?()
return
}
@@ -200,21 +274,29 @@ class VoiceBroadcastRecorderService: VoiceBroadcastRecorderServiceProtocol {
}
convertAACToM4A(at: url) { [weak self] convertedUrl in
guard let self = self else { return }
guard let self = self else {
completion?()
return
}
// Delete the source file.
self.deleteRecording(at: url)
if let convertedUrl = convertedUrl {
dispatchGroup.notify(queue: .main) {
self.voiceBroadcastService?.sendChunkOfVoiceBroadcast(audioFileLocalURL: convertedUrl,
mimeType: "audio/mp4",
duration: UInt(duration * 1000),
samples: nil,
sequence: UInt(sequence)) { eventId in
MXLog.debug("[VoiceBroadcastRecorderService] Send voice broadcast chunk with success.")
if eventId != nil {
self.deleteRecording(at: url)
}
self.deleteRecording(at: convertedUrl)
completion?()
} failure: { error in
MXLog.error("[VoiceBroadcastRecorderService] Failed to send voice broadcast chunk.", context: error)
// Do not delete the file to be sent if request failed, the retry flow will need it
// There's no manual mechanism to clean it up afterwards but the tmp folder
// they live in will eventually be deleted by the system
completion?()
}
}
}
@@ -18,6 +18,7 @@ import Foundation
protocol VoiceBroadcastRecorderServiceDelegate: AnyObject {
func voiceBroadcastRecorderService(_ service: VoiceBroadcastRecorderServiceProtocol, didUpdateState state: VoiceBroadcastRecorderState)
func voiceBroadcastRecorderService(_ service: VoiceBroadcastRecorderServiceProtocol, didUpdateRemainingTime remainingTime: UInt)
}
protocol VoiceBroadcastRecorderServiceProtocol {
@@ -23,6 +23,15 @@ struct VoiceBroadcastRecorderView: View {
@Environment(\.theme) private var theme: ThemeSwiftUI
@State private var showingStopAlert = false
private var backgroundColor: Color {
if viewModel.viewState.recordingState != .paused {
return theme.colors.alert
}
return theme.colors.quarterlyContent
}
// MARK: Public
@ObservedObject var viewModel: VoiceBroadcastRecorderViewModel.Context
@@ -30,10 +39,43 @@ struct VoiceBroadcastRecorderView: View {
var body: some View {
let details = viewModel.viewState.details
VStack(alignment: .leading, spacing: 16.0) {
Text(details.senderDisplayName ?? "")
.font(theme.fonts.bodySB)
.foregroundColor(theme.colors.primaryContent)
VStack(alignment: .center) {
HStack(alignment: .top) {
AvatarImage(avatarData: viewModel.viewState.details.avatarData, size: .xSmall)
VStack(alignment: .leading, spacing: 0) {
Text(details.avatarData.displayName ?? details.avatarData.matrixItemId)
.font(theme.fonts.bodySB)
.foregroundColor(theme.colors.primaryContent)
Label {
Text(VectorL10n.voiceBroadcastTile)
.foregroundColor(theme.colors.secondaryContent)
.font(theme.fonts.caption1)
} icon: {
Image(uiImage: Asset.Images.voiceBroadcastTileLive.image)
}
Label {
Text(viewModel.viewState.currentRecordingState.remainingTimeLabel)
.foregroundColor(theme.colors.secondaryContent)
.font(theme.fonts.caption1)
} icon: {
Image(uiImage: Asset.Images.voiceBroadcastTimeLeft.image)
}
}.frame(maxWidth: .infinity, alignment: .leading)
Label {
Text(VectorL10n.voiceBroadcastLive)
.font(theme.fonts.caption1SB)
.foregroundColor(Color.white)
} icon: {
Image(uiImage: Asset.Images.voiceBroadcastLive.image)
}
.padding(.horizontal, 5)
.background(RoundedRectangle(cornerRadius: 4, style: .continuous).fill(backgroundColor))
.accessibilityIdentifier("liveButton")
}
HStack(alignment: .top, spacing: 16.0) {
Button {
@@ -57,11 +99,20 @@ struct VoiceBroadcastRecorderView: View {
.accessibilityIdentifier("recordButton")
Button {
viewModel.send(viewAction: .stop)
showingStopAlert = true
} label: {
Image("voice_broadcast_stop")
.renderingMode(.original)
}
.alert(isPresented:$showingStopAlert) {
Alert(title: Text(VectorL10n.voiceBroadcastStopAlertTitle),
message: Text(VectorL10n.voiceBroadcastStopAlertDescription),
primaryButton: .cancel(),
secondaryButton: .default(Text(VectorL10n.voiceBroadcastStopAlertAgreeButton),
action: {
viewModel.send(viewAction: .stop)
}))
}
.accessibilityIdentifier("stopButton")
.disabled(viewModel.viewState.recordingState == .stopped)
.mask(Color.black.opacity(viewModel.viewState.recordingState == .stopped ? 0.3 : 1.0))
@@ -32,11 +32,18 @@ enum VoiceBroadcastRecorderState {
struct VoiceBroadcastRecorderDetails {
let senderDisplayName: String?
let avatarData: AvatarInputProtocol
}
struct VoiceBroadcastRecordingState {
var remainingTime: UInt
var remainingTimeLabel: String
}
struct VoiceBroadcastRecorderViewState: BindableState {
var details: VoiceBroadcastRecorderDetails
var recordingState: VoiceBroadcastRecorderState
var currentRecordingState: VoiceBroadcastRecordingState
var bindings: VoiceBroadcastRecorderViewStateBindings
}
@@ -31,8 +31,9 @@ enum MockVoiceBroadcastRecorderScreenState: MockScreenState, CaseIterable {
}
var screenView: ([Any], AnyView) {
let details = VoiceBroadcastRecorderDetails(senderDisplayName: "")
let viewModel = MockVoiceBroadcastRecorderViewModel(initialViewState: VoiceBroadcastRecorderViewState(details: details, recordingState: .started, bindings: VoiceBroadcastRecorderViewStateBindings()))
let details = VoiceBroadcastRecorderDetails(senderDisplayName: "", avatarData: AvatarInput(mxContentUri: "", matrixItemId: "!fakeroomid:matrix.org", displayName: "The name of the room"))
let recordingState = VoiceBroadcastRecordingState(remainingTime: BuildSettings.voiceBroadcastMaxLength, remainingTimeLabel: "1h 20m 47s left")
let viewModel = MockVoiceBroadcastRecorderViewModel(initialViewState: VoiceBroadcastRecorderViewState(details: details, recordingState: .started, currentRecordingState: recordingState, bindings: VoiceBroadcastRecorderViewStateBindings()))
return (
[false, viewModel],
@@ -34,8 +34,10 @@ class VoiceBroadcastRecorderViewModel: VoiceBroadcastRecorderViewModelType, Voic
init(details: VoiceBroadcastRecorderDetails,
recorderService: VoiceBroadcastRecorderServiceProtocol) {
self.voiceBroadcastRecorderService = recorderService
let currentRecordingState = VoiceBroadcastRecorderViewModel.currentRecordingState(from: BuildSettings.voiceBroadcastMaxLength)
super.init(initialViewState: VoiceBroadcastRecorderViewState(details: details,
recordingState: .stopped,
currentRecordingState: currentRecordingState,
bindings: VoiceBroadcastRecorderViewStateBindings()))
self.voiceBroadcastRecorderService.serviceDelegate = self
@@ -77,10 +79,27 @@ class VoiceBroadcastRecorderViewModel: VoiceBroadcastRecorderViewModelType, Voic
self.state.recordingState = .resumed
voiceBroadcastRecorderService.resumeRecordingVoiceBroadcast()
}
private func updateRemainingTime(_ remainingTime: UInt) {
state.currentRecordingState = VoiceBroadcastRecorderViewModel.currentRecordingState(from: remainingTime)
}
private static func currentRecordingState(from remainingTime: UInt) -> VoiceBroadcastRecordingState {
let time = TimeInterval(Double(remainingTime))
let formatter = DateComponentsFormatter()
formatter.unitsStyle = .abbreviated
return VoiceBroadcastRecordingState(remainingTime: remainingTime,
remainingTimeLabel: VectorL10n.voiceBroadcastTimeLeft(formatter.string(from: time) ?? "0s"))
}
}
extension VoiceBroadcastRecorderViewModel: VoiceBroadcastRecorderServiceDelegate {
func voiceBroadcastRecorderService(_ service: VoiceBroadcastRecorderServiceProtocol, didUpdateState state: VoiceBroadcastRecorderState) {
self.state.recordingState = state
}
func voiceBroadcastRecorderService(_ service: VoiceBroadcastRecorderServiceProtocol, didUpdateRemainingTime remainingTime: UInt) {
self.updateRemainingTime(remainingTime)
}
}