130 lines
4.2 KiB
Swift
130 lines
4.2 KiB
Swift
import Foundation
|
|
import AVFoundation
|
|
import Darwin
|
|
|
|
final class AudioPlayerController: NSObject, ObservableObject {
|
|
@Published private(set) var isPlaying: Bool = false
|
|
@Published private(set) var playbackPosition: TimeInterval = 0
|
|
|
|
private let engine = AVAudioEngine()
|
|
private let playerNode = AVAudioPlayerNode()
|
|
private var audioFile: AVAudioFile?
|
|
private var progressTimer: Timer?
|
|
private var didConfigureSession = false
|
|
|
|
private var currentTrack: LocalTrack?
|
|
private var scheduleStartPosition: TimeInterval = 0
|
|
|
|
override init() {
|
|
super.init()
|
|
engine.attach(playerNode)
|
|
engine.connect(playerNode, to: engine.mainMixerNode, format: nil)
|
|
}
|
|
|
|
func play(track: LocalTrack, atUptime startUptime: TimeInterval, startPosition: TimeInterval) {
|
|
configureAudioSessionIfNeeded()
|
|
|
|
do {
|
|
let file = try AVAudioFile(forReading: track.url)
|
|
audioFile = file
|
|
currentTrack = track
|
|
scheduleStartPosition = startPosition
|
|
|
|
let startFrame = AVAudioFramePosition(startPosition * file.fileFormat.sampleRate)
|
|
let totalFrames = file.length
|
|
let framesLeft = max(AVAudioFrameCount(totalFrames - startFrame), 0)
|
|
|
|
if !engine.isRunning {
|
|
try engine.start()
|
|
}
|
|
|
|
playerNode.stop()
|
|
playerNode.scheduleSegment(
|
|
file,
|
|
startingFrame: startFrame,
|
|
frameCount: framesLeft,
|
|
at: AVAudioTime(hostTime: hostTime(forUptime: startUptime))
|
|
) { [weak self] in
|
|
DispatchQueue.main.async {
|
|
self?.isPlaying = false
|
|
self?.stopProgressTimer()
|
|
}
|
|
}
|
|
|
|
if !playerNode.isPlaying {
|
|
playerNode.play()
|
|
}
|
|
|
|
isPlaying = true
|
|
startProgressTimer()
|
|
} catch {
|
|
isPlaying = false
|
|
}
|
|
}
|
|
|
|
func pause() {
|
|
playerNode.pause()
|
|
isPlaying = false
|
|
stopProgressTimer()
|
|
}
|
|
|
|
func stop() {
|
|
playerNode.stop()
|
|
isPlaying = false
|
|
playbackPosition = 0
|
|
stopProgressTimer()
|
|
}
|
|
|
|
func seek(to position: TimeInterval) {
|
|
guard let track = currentTrack else { return }
|
|
play(track: track, atUptime: SyncClock.uptime() + 0.1, startPosition: position)
|
|
}
|
|
|
|
func correctDrift(targetPosition: TimeInterval, hostUptime: TimeInterval) {
|
|
guard let track = currentTrack else { return }
|
|
let targetNow = SyncClock.convert(hostUptime: hostUptime)
|
|
let expectedPosition = targetPosition + (SyncClock.uptime() - targetNow)
|
|
let drift = expectedPosition - playbackPosition
|
|
if abs(drift) > 0.15 {
|
|
play(track: track, atUptime: SyncClock.uptime() + 0.1, startPosition: expectedPosition)
|
|
}
|
|
}
|
|
|
|
private func startProgressTimer() {
|
|
stopProgressTimer()
|
|
progressTimer = Timer.scheduledTimer(withTimeInterval: 0.25, repeats: true) { [weak self] _ in
|
|
guard let self else { return }
|
|
if let nodeTime = self.playerNode.lastRenderTime,
|
|
let playerTime = self.playerNode.playerTime(forNodeTime: nodeTime) {
|
|
let seconds = Double(playerTime.sampleTime) / playerTime.sampleRate
|
|
self.playbackPosition = self.scheduleStartPosition + seconds
|
|
self.isPlaying = self.playerNode.isPlaying
|
|
}
|
|
}
|
|
}
|
|
|
|
private func stopProgressTimer() {
|
|
progressTimer?.invalidate()
|
|
progressTimer = nil
|
|
}
|
|
|
|
private func configureAudioSessionIfNeeded() {
|
|
guard !didConfigureSession else { return }
|
|
do {
|
|
let session = AVAudioSession.sharedInstance()
|
|
try session.setCategory(.playback, mode: .default)
|
|
try session.setActive(true)
|
|
didConfigureSession = true
|
|
} catch {
|
|
didConfigureSession = false
|
|
}
|
|
}
|
|
|
|
private func hostTime(forUptime startUptime: TimeInterval) -> UInt64 {
|
|
let nowUptime = SyncClock.uptime()
|
|
let delay = max(0, startUptime - nowUptime)
|
|
let hostDelay = AVAudioTime.hostTime(forSeconds: delay)
|
|
return mach_absolute_time() + hostDelay
|
|
}
|
|
}
|