diff --git a/Source/Engine/AudioStreamEngine.swift b/Source/Engine/AudioStreamEngine.swift index b3fb310..101c7a2 100644 --- a/Source/Engine/AudioStreamEngine.swift +++ b/Source/Engine/AudioStreamEngine.swift @@ -63,7 +63,7 @@ class AudioStreamEngine: AudioEngine { private let queue = DispatchQueue(label: "SwiftAudioPlayer.StreamEngine", qos: .userInitiated) - //From init + // From init private var converter: AudioConvertable! //Fields @@ -121,7 +121,11 @@ class AudioStreamEngine: AudioEngine { let s = predictedStreamDurationDebounceHelper if d/DEBOUNCING_BUFFER_TIME != s/DEBOUNCING_BUFFER_TIME { predictedStreamDurationDebounceHelper = predictedStreamDuration - duration = predictedStreamDuration + if AudioDataManager.shared.currentStreamFinished { + duration = AudioDataManager.shared.currentStreamFinishedWithDuration + } else { + duration = predictedStreamDuration + } } } } @@ -162,7 +166,7 @@ class AudioStreamEngine: AudioEngine { streamChangeListenerId = StreamingDownloadDirector.shared.attach { [weak self] (progress) in guard let self = self else { return } - + // polling for buffers when we receive data. This won't be throttled on fresh new audio or seeked audio but in all other cases it most likely will be throttled self.pollForNextBuffer() // no buffer updates because thread issues if I try to update buffer status in streaming listener } @@ -203,6 +207,9 @@ class AudioStreamEngine: AudioEngine { } private func pollForNextBufferRecursive() { + if(!converter.initialized) { + return + } do { var nextScheduledBuffer: AVAudioPCMBuffer! = try converter.pullBuffer() numberOfBuffersScheduledFromPoll += 1 @@ -242,15 +249,20 @@ class AudioStreamEngine: AudioEngine { let range = converter.pollNetworkAudioAvailabilityRange() isPlayable = (numberOfBuffersScheduledInTotal >= MIN_BUFFERS_TO_BE_PLAYABLE && range.1 > 0) && predictedStreamDuration > 0 Log.debug("loaded \(range), numberOfBuffersScheduledInTotal: \(numberOfBuffersScheduledInTotal), isPlayable: \(isPlayable)") - bufferedSeconds = SAAudioAvailabilityRange(startingNeedle: range.0, durationLoadedByNetwork: range.1, predictedDurationToLoad: predictedStreamDuration, isPlayable: isPlayable) + if AudioDataManager.shared.currentStreamFinished { + AudioDataManager.shared.updateDuration(d: range.1); + bufferedSeconds = SAAudioAvailabilityRange(startingNeedle: range.0, durationLoadedByNetwork: range.1, predictedDurationToLoad: range.1, isPlayable: isPlayable) + } else { + bufferedSeconds = SAAudioAvailabilityRange(startingNeedle: range.0, durationLoadedByNetwork: range.1, predictedDurationToLoad: predictedStreamDuration, isPlayable: isPlayable) + } } private func updateNeedle() { guard engine.isRunning else { return } guard let nodeTime = playerNode.lastRenderTime, - let playerTime = playerNode.playerTime(forNodeTime: nodeTime) else { - return + let playerTime = playerNode.playerTime(forNodeTime: nodeTime) else { + return } //NOTE: playerTime can sometimes be < 0 when seeking. Reason pasted below @@ -265,15 +277,20 @@ class AudioStreamEngine: AudioEngine { private func updateDuration() { if let d = converter.pollPredictedDuration() { - self.predictedStreamDuration = d - } + self.predictedStreamDuration = d + if AudioDataManager.shared.currentStreamFinished { + self.predictedStreamDuration = AudioDataManager.shared.currentStreamFinishedWithDuration + } else { + self.predictedStreamDuration = d + } + } } //MARK:- Overriden From Parent override func seek(toNeedle needle: Needle) { Log.info("didSeek to needle: \(needle)") - + // if not playable (data not loaded etc), duration could be zero. guard isPlayable else { if predictedStreamDuration == 0 { @@ -281,7 +298,7 @@ class AudioStreamEngine: AudioEngine { } return } - + guard needle < (ceil(predictedStreamDuration)) else { if !isPlayable { seekNeedleCommandBeforeEngineWasReady = needle @@ -349,7 +366,7 @@ class AudioStreamEngine: AudioEngine { self?.converter.invalidate() } } - + private func invalidateHelperDispatchQueue() { super.invalidate() } diff --git a/Source/Engine/Converter/AudioConverter.swift b/Source/Engine/Converter/AudioConverter.swift index 5670c7f..2c6ab6e 100644 --- a/Source/Engine/Converter/AudioConverter.swift +++ b/Source/Engine/Converter/AudioConverter.swift @@ -35,6 +35,7 @@ import AudioToolbox protocol AudioConvertable { var engineAudioFormat: AVAudioFormat {get} + var initialized:Bool {get} init(withRemoteUrl url: AudioURL, toEngineAudioFormat: AVAudioFormat, withPCMBufferSize size: AVAudioFrameCount) throws func pullBuffer() throws -> AVAudioPCMBuffer @@ -74,6 +75,11 @@ class AudioConverter: AudioConvertable { //Field var converter: AudioConverterRef? //set by AudioConverterNew + + public var initialized:Bool { + converter != nil + } + var currentAudioPacketIndex: AVAudioPacketCount = 0 // use to store reference to the allocated buffers from the converter to properly deallocate them before the next packet is being converted @@ -132,7 +138,9 @@ class AudioConverter: AudioConvertable { needs to eventually increment the audioPatcketIndex. We don't want threads to mess this up */ - return try queue.sync { () -> AVAudioPCMBuffer in + return try queue.sync {() -> AVAudioPCMBuffer in + + let framesPerPacket = engineAudioFormat.streamDescription.pointee.mFramesPerPacket var numberOfPacketsWeWantTheBufferToFill = pcmBuffer.frameLength / framesPerPacket diff --git a/Source/Engine/Converter/AudioConverterListener.swift b/Source/Engine/Converter/AudioConverterListener.swift index 656c16f..21c1e45 100644 --- a/Source/Engine/Converter/AudioConverterListener.swift +++ b/Source/Engine/Converter/AudioConverterListener.swift @@ -80,15 +80,16 @@ func ConverterListener(_ converter: AudioConverterRef, _ packetCount: UnsafeMuta ioData.pointee.mBuffers.mDataByteSize = UInt32(packetByteCount) selfAudioConverter.converterBuffer = ioData.pointee.mBuffers.mData - - if let lastDescription = selfAudioConverter.converterDescriptions { - lastDescription.deallocate() - } - + // Handle packet descriptions for compressed formats (MP3, AAC, etc) let fileFormatDescription = fileAudioFormat.streamDescription.pointee if fileFormatDescription.mFormatID != kAudioFormatLinearPCM { if outPacketDescriptions?.pointee == nil { + + if let lastDescription = selfAudioConverter.converterDescriptions { + lastDescription.deallocate() + } + outPacketDescriptions?.pointee = UnsafeMutablePointer.allocate(capacity: 1) } outPacketDescriptions?.pointee?.pointee.mDataByteSize = UInt32(packetByteCount) diff --git a/Source/Engine/Parser/AudioParser.swift b/Source/Engine/Parser/AudioParser.swift index c486722..aacfc76 100644 --- a/Source/Engine/Parser/AudioParser.swift +++ b/Source/Engine/Parser/AudioParser.swift @@ -84,7 +84,7 @@ class AudioParser: AudioParsable { public var totalPredictedPacketCount: AVAudioPacketCount { if parsedAudioHeaderPacketCount != 0 { - //TODO: we should log the duration to the server for better user experience + // TODO: we should log the duration to the server for better user experience return max(AVAudioPacketCount(parsedAudioHeaderPacketCount), AVAudioPacketCount(audioPackets.count)) } @@ -97,7 +97,7 @@ class AudioParser: AudioParsable { let predictedCount = AVAudioPacketCount(Double(sizeOfFileInBytes) / bytesPerPacket) guard networkProgress != 1.0 else { - return max(AVAudioPacketCount(audioPackets.count), predictedCount) + return min(AVAudioPacketCount(audioPackets.count), predictedCount) } return predictedCount @@ -118,6 +118,7 @@ class AudioParser: AudioParsable { //TODO: duration will not be accurate with WAV or AIFF } } + private let lockQueue = DispatchQueue(label: "SwiftAudioPlayer.Parser.packets.lock") var lastSentAudioPacketIndex = -1 @@ -188,6 +189,7 @@ class AudioParser: AudioParsable { // Check if we've reached the end of the packets. We have two scenarios: // 1. We've reached the end of the packet data and the file has been completely parsed // 2. We've reached the end of the data we currently have downloaded, but not the file + let packetIndex = index - indexSeekOffset var exception: ParserError? = nil @@ -223,10 +225,10 @@ class AudioParser: AudioParsable { } func tellSeek(toIndex index: AVAudioPacketCount) { - //Already within the processed audio packets. Ignore - var isIndexValid: Bool = true + // Already within the processed audio packets. Ignore + var isIndexValid = true lockQueue.sync { - if self.indexSeekOffset <= index && index < self.audioPackets.count + Int(self.indexSeekOffset) { + if self.indexSeekOffset <= index, index < self.audioPackets.count + Int(self.indexSeekOffset) { isIndexValid = false } } diff --git a/Source/Model/AudioDataManager.swift b/Source/Model/AudioDataManager.swift index d2f2714..e8ad3e4 100644 --- a/Source/Model/AudioDataManager.swift +++ b/Source/Model/AudioDataManager.swift @@ -26,6 +26,8 @@ import Foundation protocol AudioDataManagable { + var currentStreamFinished: Bool { get } + var currentStreamFinishedWithDuration: Duration { get } var numberOfQueued: Int { get } var numberOfActive: Int { get } @@ -38,6 +40,7 @@ protocol AudioDataManagable { func setDownloadDirectory(_ dir: FileManager.SearchPathDirectory) func clear() + func updateDuration(d: Duration) //Director pattern func attach(callback: @escaping (_ id: ID, _ progress: Double)->()) @@ -55,8 +58,13 @@ protocol AudioDataManagable { } class AudioDataManager: AudioDataManagable { + var currentStreamFinishedWithDuration: Duration = 0 + var allowCellular: Bool = true var downloadDirectory: FileManager.SearchPathDirectory = .documentDirectory + + public var currentStreamFinished = false + public var totalStreamedDuration = 0 static let shared: AudioDataManagable = AudioDataManager() @@ -95,6 +103,10 @@ class AudioDataManager: AudioDataManagable { progressCallback: streamProgressListener, doneCallback: streamDoneListener) } + + func updateDuration(d: Duration) { + currentStreamFinishedWithDuration = d + } func clear() { streamingCallbacks = [] @@ -125,6 +137,7 @@ class AudioDataManager: AudioDataManagable { // MARK:- Streaming extension AudioDataManager { func startStream(withRemoteURL url: AudioURL, callback: @escaping (StreamProgressPTO) -> ()) { + currentStreamFinished = false if let data = FileStorage.Audio.read(url.key) { let dto = StreamProgressDTO.init(progress: 1.0, data: data, totalBytesExpected: Int64(data.count)) callback(StreamProgressPTO(dto: dto)) @@ -154,10 +167,12 @@ extension AudioDataManager { streamWorker.resume(withId: url.key) } func seekStream(withRemoteURL url: AudioURL, toByteOffset offset: UInt64) { + currentStreamFinished = false streamWorker.seek(withId: url.key, withByteOffset: offset) } func deleteStream(withRemoteURL url: AudioURL) { + currentStreamFinished = false streamWorker.stop(withId: url.key) streamingCallbacks.removeAll { (cb: (ID, (StreamProgressPTO) -> ())) -> Bool in return cb.0 == url.key @@ -230,7 +245,7 @@ extension AudioDataManager { if error != nil { return false } - + currentStreamFinished = true downloadWorker.resumeAllActive() return false } diff --git a/Source/SAPlayerPresenter.swift b/Source/SAPlayerPresenter.swift index fc28957..19885db 100644 --- a/Source/SAPlayerPresenter.swift +++ b/Source/SAPlayerPresenter.swift @@ -192,7 +192,7 @@ extension SAPlayerPresenter : LockScreenViewPresenter { } func handleSkipBackward() { - guard let backward = delegate?.skipForwardSeconds else { return } + guard let backward = delegate?.skipBackwardSeconds else { return } handleSeek(toNeedle: (needle ?? 0) - backward) }