Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add device and output selection support #85

Open
wants to merge 20 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 7 additions & 2 deletions Sources/SwiftAudioEx/AVPlayerWrapper/AVPlayerWrapper.swift
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,8 @@ public enum PlaybackEndedReason: String {
class AVPlayerWrapper: AVPlayerWrapperProtocol {
// MARK: - Properties

fileprivate var avPlayer = AVPlayer()
internal var avPlayer = AVPlayer()
internal var audioTap: AudioTap? = nil
private let playerObserver = AVPlayerObserver()
internal let playerTimeObserver: AVPlayerTimeObserver
private let playerItemNotificationObserver = AVPlayerItemNotificationObserver()
Expand Down Expand Up @@ -69,7 +70,10 @@ class AVPlayerWrapper: AVPlayerWrapperProtocol {
let currentState = self._state
if (currentState != newValue) {
self._state = newValue
self.delegate?.AVWrapper(didChangeState: newValue)
// the delegate can initiate a state change, resulting in a dealock in the getter.
DispatchQueue.main.async {
self.delegate?.AVWrapper(didChangeState: newValue)
}
}
}
}
Expand Down Expand Up @@ -385,6 +389,7 @@ class AVPlayerWrapper: AVPlayerWrapperProtocol {
private func startObservingAVPlayer(item: AVPlayerItem) {
playerItemObserver.startObserving(item: item)
playerItemNotificationObserver.startObserving(item: item)
attachTap(audioTap, to: item)
}

private func stopObservingAVPlayerItem() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,8 @@ protocol AVPlayerWrapperProtocol: AnyObject {

var state: AVPlayerWrapperState { get set }

var audioTap: AudioTap? { get set }

var playWhenReady: Bool { get set }

var currentItem: AVPlayerItem? { get }
Expand Down
5 changes: 4 additions & 1 deletion Sources/SwiftAudioEx/AudioItem.swift
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ public protocol AudioItem {
func getAlbumTitle() -> String?
func getSourceType() -> SourceType
func getArtwork(_ handler: @escaping (AudioItemImage?) -> Void)

func getArtworkURL() -> URL?
}

/// Make your `AudioItem`-subclass conform to this protocol to control which AVAudioTimePitchAlgorithm is used for each item.
Expand Down Expand Up @@ -96,6 +96,9 @@ public class DefaultAudioItem: AudioItem {
handler(artwork)
}

public func getArtworkURL() -> URL? {
return nil
}
}

/// An AudioItem that also conforms to the `TimePitching`-protocol
Expand Down
8 changes: 8 additions & 0 deletions Sources/SwiftAudioEx/AudioPlayer.swift
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,14 @@ public typealias AudioPlayerState = AVPlayerWrapperState
public class AudioPlayer: AVPlayerWrapperDelegate {
/// The wrapper around the underlying AVPlayer
let wrapper: AVPlayerWrapperProtocol = AVPlayerWrapper()

/**
Set an instance of AudioTap, to receive frame information and audio buffer access during playback.
*/
public var audioTap: AudioTap? {
get { return wrapper.audioTap }
set(value) { wrapper.audioTap = value }
}

public let nowPlayingInfoController: NowPlayingInfoControllerProtocol
public let remoteCommandController: RemoteCommandController
Expand Down
98 changes: 98 additions & 0 deletions Sources/SwiftAudioEx/AudioTap.swift
Original file line number Diff line number Diff line change
@@ -0,0 +1,98 @@
//
// AudioTap.swift
//
//
// Created by Brandon Sneed on 3/31/24.
//

import Foundation
import AVFoundation

/**
Subclass this and set the AudioPlayer's `audioTap` property to start receiving the
audio stream.
*/
open class AudioTap {
// Called at tap initialization for a given player item. Use this to setup anything you might need.
open func initialize() { print("audioTap: initialize") }
// Called at teardown of the internal tap. Use this to reset any memory buffers you have created, etc.
open func finalize() { print("audioTap: finalize") }
// Called just before playback so you can perform setup based on the stream description.
open func prepare(description: AudioStreamBasicDescription) { print("audioTap: prepare") }
// Called just before finalize.
open func unprepare() { print("audioTap: unprepare") }
/**
Called periodically during audio stream playback.

Example:

```
func process(numberOfFrames: Int, buffer: UnsafeMutableAudioBufferListPointer) {
for channel in buffer {
// process audio samples here
//memset(channel.mData, 0, Int(channel.mDataByteSize))
}
}
```
*/
open func process(numberOfFrames: Int, buffer: UnsafeMutableAudioBufferListPointer) { print("audioTap: process") }
}

extension AVPlayerWrapper {
internal func attachTap(_ tap: AudioTap?, to item: AVPlayerItem) {
guard let tap else { return }
guard let track = item.asset.tracks(withMediaType: .audio).first else {
return
}

let audioMix = AVMutableAudioMix()
let params = AVMutableAudioMixInputParameters(track: track)

// we need to retain this pointer so it doesn't disappear out from under us.
// we'll then let it go after we finalize. If the tap changed upstream, we
// aren't going to pick up the new one until after this player item goes away.
let client = UnsafeMutableRawPointer(Unmanaged.passRetained(tap).toOpaque())

var callbacks = MTAudioProcessingTapCallbacks(version: kMTAudioProcessingTapCallbacksVersion_0, clientInfo: client)
{ tapRef, clientInfo, tapStorageOut in
// initial tap setup
guard let clientInfo else { return }
tapStorageOut.pointee = clientInfo
let audioTap = Unmanaged<AudioTap>.fromOpaque(clientInfo).takeUnretainedValue()
audioTap.initialize()
} finalize: { tapRef in
// clean up
let audioTap = Unmanaged<AudioTap>.fromOpaque(MTAudioProcessingTapGetStorage(tapRef)).takeUnretainedValue()
audioTap.finalize()
// we're done, we can let go of the pointer we retained.
Unmanaged.passUnretained(audioTap).release()
} prepare: { tapRef, maxFrames, processingFormat in
// allocate memory for sound processing
let audioTap = Unmanaged<AudioTap>.fromOpaque(MTAudioProcessingTapGetStorage(tapRef)).takeUnretainedValue()
audioTap.prepare(description: processingFormat.pointee)
} unprepare: { tapRef in
// deallocate memory for sound processing
let audioTap = Unmanaged<AudioTap>.fromOpaque(MTAudioProcessingTapGetStorage(tapRef)).takeUnretainedValue()
audioTap.unprepare()
} process: { tapRef, numberFrames, flags, bufferListInOut, numberFramesOut, flagsOut in
guard noErr == MTAudioProcessingTapGetSourceAudio(tapRef, numberFrames, bufferListInOut, flagsOut, nil, numberFramesOut) else {
return
}

// process sound data
let audioTap = Unmanaged<AudioTap>.fromOpaque(MTAudioProcessingTapGetStorage(tapRef)).takeUnretainedValue()
audioTap.process(numberOfFrames: numberFrames, buffer: UnsafeMutableAudioBufferListPointer(bufferListInOut))
}

var tapRef: Unmanaged<MTAudioProcessingTap>?
let error = MTAudioProcessingTapCreate(kCFAllocatorDefault, &callbacks, kMTAudioProcessingTapCreationFlag_PreEffects, &tapRef)
assert(error == noErr)

params.audioTapProcessor = tapRef?.takeUnretainedValue()
tapRef?.release()

audioMix.inputParameters = [params]
item.audioMix = audioMix
}
}

4 changes: 4 additions & 0 deletions Sources/SwiftAudioEx/Observer/AVPlayerItemObserver.swift
Original file line number Diff line number Diff line change
Expand Up @@ -63,6 +63,7 @@ class AVPlayerItemObserver: NSObject {

self.isObserving = true
self.observingItem = item

item.addObserver(self, forKeyPath: AVPlayerItemKeyPath.duration, options: [.new], context: &AVPlayerItemObserver.context)
item.addObserver(self, forKeyPath: AVPlayerItemKeyPath.loadedTimeRanges, options: [.new], context: &AVPlayerItemObserver.context)
item.addObserver(self, forKeyPath: AVPlayerItemKeyPath.playbackLikelyToKeepUp, options: [.new], context: &AVPlayerItemObserver.context)
Expand All @@ -79,6 +80,9 @@ class AVPlayerItemObserver: NSObject {
return
}

// BKS: remove a tap if we had one.
observingItem.audioMix = nil

observingItem.removeObserver(self, forKeyPath: AVPlayerItemKeyPath.duration, context: &AVPlayerItemObserver.context)
observingItem.removeObserver(self, forKeyPath: AVPlayerItemKeyPath.loadedTimeRanges, context: &AVPlayerItemObserver.context)
observingItem.removeObserver(self, forKeyPath: AVPlayerItemKeyPath.playbackLikelyToKeepUp, context: &AVPlayerItemObserver.context)
Expand Down
204 changes: 204 additions & 0 deletions Sources/SwiftAudioEx/Utils/Devices.swift
Original file line number Diff line number Diff line change
@@ -0,0 +1,204 @@
//
// File.swift
//
//
// Created by Brandon Sneed on 4/1/24.
//

import Foundation
import AVFoundation
import CoreAudio

public class AudioDevice: CustomStringConvertible, CustomDebugStringConvertible {
public var description: String {
return name ?? "Unknown"
}

public var debugDescription: String {
return name ?? "Unknown"
}

static var system: AudioDevice = {
return AudioDevice()
}()

public let deviceID: AudioDeviceID?
public let uniqueID: String?
public let name: String?

internal init(deviceID: AudioDeviceID) {
self.deviceID = deviceID
self.uniqueID = Self.propertyValue(deviceID: deviceID, selector: AudioObjectPropertySelector(kAudioDevicePropertyDeviceUID))
self.name = Self.propertyValue(deviceID: deviceID, selector: AudioObjectPropertySelector(kAudioDevicePropertyDeviceNameCFString))
}

internal init() {
self.deviceID = 0
self.uniqueID = nil
self.name = "System"
}
}

extension AudioDevice {
static func hasOutput(deviceID: AudioDeviceID) -> Bool {
var status: OSStatus = 0
var address = AudioObjectPropertyAddress(
mSelector: AudioObjectPropertySelector(kAudioDevicePropertyStreamConfiguration),
mScope: AudioObjectPropertyScope(kAudioObjectPropertyScopeOutput),
mElement: 0)

var size: UInt32 = 0
withUnsafeMutablePointer(to: &size) { size in
withUnsafePointer(to: &address) { addressPtr in
status = AudioObjectGetPropertyDataSize(deviceID, addressPtr, 0, nil, size)
}
}

if status != 0 {
// we weren't able to get the size
return false
}

let bufferList = UnsafeMutablePointer<AudioBufferList>.allocate(capacity: Int(size))
withUnsafeMutablePointer(to: &size) { size in
withUnsafePointer(to: &address) { addressPtr in
status = AudioObjectGetPropertyData(deviceID, addressPtr, 0, nil, size, bufferList)
}
}

if status != 0 {
// we couldn't get the buffer list
return false
}

let buffers = UnsafeMutableAudioBufferListPointer(bufferList)
for buffer in buffers {
if buffer.mNumberChannels > 0 {
return true
}
}

return false
}

static internal func propertyValue(deviceID: AudioDeviceID, selector: AudioObjectPropertySelector) -> String? {
var result: String? = nil

var address = AudioObjectPropertyAddress(
mSelector: selector,
mScope: AudioObjectPropertyScope(kAudioObjectPropertyScopeGlobal),
mElement: AudioObjectPropertyElement(kAudioObjectPropertyElementMain))

var name: Unmanaged<CFString>?
var size = UInt32(MemoryLayout<CFString?>.size)
withUnsafeMutablePointer(to: &size) { size in
withUnsafePointer(to: &address) { addressPtr in
let status = AudioObjectGetPropertyData(deviceID, addressPtr, 0, nil, size, &name)
if status != 0 {
return
}
result = name?.takeUnretainedValue() as String?
}
}

return result
}
}

extension AudioPlayer {
/**
Set the output device for the Player. Default is system.
*/
public func setOutputDevice(_ device: AudioDevice) {
guard let wrapper = wrapper as? AVPlayerWrapper else { return }
wrapper.avPlayer.audioOutputDeviceUniqueID = device.uniqueID
}

/**
Get the current output device
*/
public var outputDevice: AudioDevice {
get {
guard let wrapper = wrapper as? AVPlayerWrapper else { return AudioDevice.system }
guard let uniqueID = wrapper.avPlayer.audioOutputDeviceUniqueID else { return AudioDevice.system }
let devices = localDevices.filter { device in
return device.uniqueID == uniqueID
}
if let match = devices.first {
return match
}
return AudioDevice.system
}
set(value) {
guard let wrapper = wrapper as? AVPlayerWrapper else { return }
wrapper.avPlayer.audioOutputDeviceUniqueID = value.uniqueID
}
}

/**
Get a list of local audio devices capable of output.

This list will *NOT* include AirPlay devices. For Airplay and other streaming
audio devices, see AVRoutePickerView.
*/
public var localDevices: [AudioDevice] {
get {
var status: OSStatus = 0
var address = AudioObjectPropertyAddress(
mSelector: AudioObjectPropertySelector(kAudioHardwarePropertyDevices),
mScope: AudioObjectPropertyScope(kAudioObjectPropertyScopeGlobal),
mElement: AudioObjectPropertyElement(kAudioObjectPropertyElementMaster))

var size: UInt32 = 0
withUnsafeMutablePointer(to: &size) { size in
withUnsafePointer(to: &address) { address in
status = AudioObjectGetPropertyDataSize(
AudioObjectID(kAudioObjectSystemObject),
address,
UInt32(MemoryLayout<AudioObjectPropertyAddress>.size),
nil,
size)
}
}

if status != 0 {
// we couldn't get a data size
return []
}

let deviceCount = size / UInt32(MemoryLayout<AudioDeviceID>.size)
var deviceIDs = [AudioDeviceID]()
for _ in 0..<deviceCount {
deviceIDs.append(AudioDeviceID())
}

withUnsafeMutablePointer(to: &size) { size in
withUnsafePointer(to: &address) { address in
status = AudioObjectGetPropertyData(
AudioObjectID(kAudioObjectSystemObject),
address,
0,
nil,
size,
&deviceIDs)
}
}

if status != 0 {
// we couldn't get anything from property data
return []
}

var devices = [AudioDevice]()

for id in deviceIDs {
if AudioDevice.hasOutput(deviceID: id) {
let audioDevice = AudioDevice(deviceID: id)
devices.append(audioDevice)
}
}

return devices
}
}
}
Loading
Loading