Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix (**) deallocate only when we're sure we've allocated something #175

Open
wants to merge 1 commit into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
17 changes: 10 additions & 7 deletions Source/Engine/AudioStreamEngine.swift
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ class AudioStreamEngine: AudioEngine {

private let queue = DispatchQueue(label: "SwiftAudioPlayer.StreamEngine", qos: .userInitiated)

//From init
// From init
private var converter: AudioConvertable!

//Fields
Expand Down Expand Up @@ -162,7 +162,7 @@ class AudioStreamEngine: AudioEngine {

streamChangeListenerId = StreamingDownloadDirector.shared.attach { [weak self] (progress) in
guard let self = self else { return }

// polling for buffers when we receive data. This won't be throttled on fresh new audio or seeked audio but in all other cases it most likely will be throttled
self.pollForNextBuffer() // no buffer updates because thread issues if I try to update buffer status in streaming listener
}
Expand Down Expand Up @@ -203,6 +203,9 @@ class AudioStreamEngine: AudioEngine {
}

private func pollForNextBufferRecursive() {
if(!converter.initialized) {
return
}
do {
var nextScheduledBuffer: AVAudioPCMBuffer! = try converter.pullBuffer()
numberOfBuffersScheduledFromPoll += 1
Expand Down Expand Up @@ -249,8 +252,8 @@ class AudioStreamEngine: AudioEngine {
guard engine.isRunning else { return }

guard let nodeTime = playerNode.lastRenderTime,
let playerTime = playerNode.playerTime(forNodeTime: nodeTime) else {
return
let playerTime = playerNode.playerTime(forNodeTime: nodeTime) else {
return
}

//NOTE: playerTime can sometimes be < 0 when seeking. Reason pasted below
Expand All @@ -273,15 +276,15 @@ class AudioStreamEngine: AudioEngine {
//MARK:- Overriden From Parent
override func seek(toNeedle needle: Needle) {
Log.info("didSeek to needle: \(needle)")

// if not playable (data not loaded etc), duration could be zero.
guard isPlayable else {
if predictedStreamDuration == 0 {
seekNeedleCommandBeforeEngineWasReady = needle
}
return
}

guard needle < (ceil(predictedStreamDuration)) else {
if !isPlayable {
seekNeedleCommandBeforeEngineWasReady = needle
Expand Down Expand Up @@ -349,7 +352,7 @@ class AudioStreamEngine: AudioEngine {
self?.converter.invalidate()
}
}

private func invalidateHelperDispatchQueue() {
super.invalidate()
}
Expand Down
10 changes: 9 additions & 1 deletion Source/Engine/Converter/AudioConverter.swift
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@ import AudioToolbox

protocol AudioConvertable {
var engineAudioFormat: AVAudioFormat {get}
var initialized:Bool {get}

init(withRemoteUrl url: AudioURL, toEngineAudioFormat: AVAudioFormat, withPCMBufferSize size: AVAudioFrameCount) throws
func pullBuffer() throws -> AVAudioPCMBuffer
Expand Down Expand Up @@ -74,6 +75,11 @@ class AudioConverter: AudioConvertable {

//Field
var converter: AudioConverterRef? //set by AudioConverterNew

public var initialized:Bool {
converter != nil
}

var currentAudioPacketIndex: AVAudioPacketCount = 0

// use to store reference to the allocated buffers from the converter to properly deallocate them before the next packet is being converted
Expand Down Expand Up @@ -132,7 +138,9 @@ class AudioConverter: AudioConvertable {
needs to eventually increment the audioPatcketIndex. We don't want threads
to mess this up
*/
return try queue.sync { () -> AVAudioPCMBuffer in
return try queue.sync {() -> AVAudioPCMBuffer in


let framesPerPacket = engineAudioFormat.streamDescription.pointee.mFramesPerPacket
var numberOfPacketsWeWantTheBufferToFill = pcmBuffer.frameLength / framesPerPacket

Expand Down
11 changes: 6 additions & 5 deletions Source/Engine/Converter/AudioConverterListener.swift
Original file line number Diff line number Diff line change
Expand Up @@ -80,15 +80,16 @@ func ConverterListener(_ converter: AudioConverterRef, _ packetCount: UnsafeMuta
ioData.pointee.mBuffers.mDataByteSize = UInt32(packetByteCount)

selfAudioConverter.converterBuffer = ioData.pointee.mBuffers.mData

if let lastDescription = selfAudioConverter.converterDescriptions {
lastDescription.deallocate()
}


// Handle packet descriptions for compressed formats (MP3, AAC, etc)
let fileFormatDescription = fileAudioFormat.streamDescription.pointee
if fileFormatDescription.mFormatID != kAudioFormatLinearPCM {
if outPacketDescriptions?.pointee == nil {

if let lastDescription = selfAudioConverter.converterDescriptions {
lastDescription.deallocate()
}

outPacketDescriptions?.pointee = UnsafeMutablePointer<AudioStreamPacketDescription>.allocate(capacity: 1)
}
outPacketDescriptions?.pointee?.pointee.mDataByteSize = UInt32(packetByteCount)
Expand Down
12 changes: 7 additions & 5 deletions Source/Engine/Parser/AudioParser.swift
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,7 @@ class AudioParser: AudioParsable {

public var totalPredictedPacketCount: AVAudioPacketCount {
if parsedAudioHeaderPacketCount != 0 {
//TODO: we should log the duration to the server for better user experience
// TODO: we should log the duration to the server for better user experience
return max(AVAudioPacketCount(parsedAudioHeaderPacketCount), AVAudioPacketCount(audioPackets.count))
}

Expand All @@ -97,7 +97,7 @@ class AudioParser: AudioParsable {
let predictedCount = AVAudioPacketCount(Double(sizeOfFileInBytes) / bytesPerPacket)

guard networkProgress != 1.0 else {
return max(AVAudioPacketCount(audioPackets.count), predictedCount)
return min(AVAudioPacketCount(audioPackets.count), predictedCount)
}

return predictedCount
Expand All @@ -118,6 +118,7 @@ class AudioParser: AudioParsable {
//TODO: duration will not be accurate with WAV or AIFF
}
}

private let lockQueue = DispatchQueue(label: "SwiftAudioPlayer.Parser.packets.lock")
var lastSentAudioPacketIndex = -1

Expand Down Expand Up @@ -188,6 +189,7 @@ class AudioParser: AudioParsable {
// Check if we've reached the end of the packets. We have two scenarios:
// 1. We've reached the end of the packet data and the file has been completely parsed
// 2. We've reached the end of the data we currently have downloaded, but not the file

let packetIndex = index - indexSeekOffset

var exception: ParserError? = nil
Expand Down Expand Up @@ -223,10 +225,10 @@ class AudioParser: AudioParsable {
}

func tellSeek(toIndex index: AVAudioPacketCount) {
//Already within the processed audio packets. Ignore
var isIndexValid: Bool = true
// Already within the processed audio packets. Ignore
var isIndexValid = true
lockQueue.sync {
if self.indexSeekOffset <= index && index < self.audioPackets.count + Int(self.indexSeekOffset) {
if self.indexSeekOffset <= index, index < self.audioPackets.count + Int(self.indexSeekOffset) {
isIndexValid = false
}
}
Expand Down