Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix crash on attaching metadata output #74

Merged
merged 3 commits into from
Mar 8, 2024
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
42 changes: 26 additions & 16 deletions Sources/SwiftAudioEx/Observer/AVPlayerItemObserver.swift
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ protocol AVPlayerItemObserverDelegate: AnyObject {
Called when the duration of the observed item is updated.
*/
func item(didUpdateDuration duration: Double)

/**
Called when the playback of the observed item is or is no longer likely to keep up.
*/
Expand All @@ -32,7 +32,7 @@ protocol AVPlayerItemObserverDelegate: AnyObject {
class AVPlayerItemObserver: NSObject {

private static var context = 0
private let metadataOutput = AVPlayerItemMetadataOutput()
private var currentMetadataOutput: AVPlayerItemMetadataOutput?

private struct AVPlayerItemKeyPath {
static let duration = #keyPath(AVPlayerItem.duration)
Expand All @@ -47,7 +47,6 @@ class AVPlayerItemObserver: NSObject {

override init() {
super.init()
metadataOutput.setDelegate(self, queue: .main)
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We can also remove the private let metadataOutput = AVPlayerItemMetadataOutput() above right?

}

deinit {
Expand All @@ -68,15 +67,13 @@ class AVPlayerItemObserver: NSObject {
item.addObserver(self, forKeyPath: AVPlayerItemKeyPath.loadedTimeRanges, options: [.new], context: &AVPlayerItemObserver.context)
item.addObserver(self, forKeyPath: AVPlayerItemKeyPath.playbackLikelyToKeepUp, options: [.new], context: &AVPlayerItemObserver.context)

// We must slightly delay adding the metadata output due to the fact that
// stop observation is not a synchronous action and metadataOutput may not
// be removed from last item before we try to attach it to a new one.
DispatchQueue.main.asyncAfter(deadline: .now() + 0.001) { [weak self] in
guard let `self` = self else { return }
item.add(self.metadataOutput)
}
// Create and add a new metadata output to the item.
let metadataOutput = AVPlayerItemMetadataOutput()
metadataOutput.setDelegate(self, queue: .main)
item.add(metadataOutput)
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I wonder whether we need to add a check in the delegate method func metadataOutput() to make sure we're receiving metadata from the "currently focused' item and output before we broadcast it. We could still do it by keeping a reference to the latest created output currentMetadataOutput and comparing the output given in the delegate to it.

I think once we reassign currentMetadataOutput any old metadata outputs will keep hanging in memory until we call remove(metadataOutput) in the stopObservingCurrentItem. We should probably do a small check on that.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Updated, thanks for your advice. Kindly have a look at the new commit.

self.currentMetadataOutput = metadataOutput
}

func stopObservingCurrentItem() {
guard let observingItem = observingItem, isObserving else {
return
Expand All @@ -85,10 +82,13 @@ class AVPlayerItemObserver: NSObject {
observingItem.removeObserver(self, forKeyPath: AVPlayerItemKeyPath.duration, context: &AVPlayerItemObserver.context)
observingItem.removeObserver(self, forKeyPath: AVPlayerItemKeyPath.loadedTimeRanges, context: &AVPlayerItemObserver.context)
observingItem.removeObserver(self, forKeyPath: AVPlayerItemKeyPath.playbackLikelyToKeepUp, context: &AVPlayerItemObserver.context)
observingItem.remove(metadataOutput)

// Remove all metadata outputs from the item.
observingItem.removeAllMetadataOutputs()

isObserving = false
self.observingItem = nil
self.currentMetadataOutput = nil
}

override func observeValue(forKeyPath keyPath: String?, of object: Any?, change: [NSKeyValueChangeKey : Any]?, context: UnsafeMutableRawPointer?) {
Expand All @@ -102,17 +102,17 @@ class AVPlayerItemObserver: NSObject {
if let duration = change?[.newKey] as? CMTime {
delegate?.item(didUpdateDuration: duration.seconds)
}

case AVPlayerItemKeyPath.loadedTimeRanges:
if let ranges = change?[.newKey] as? [NSValue], let duration = ranges.first?.timeRangeValue.duration {
delegate?.item(didUpdateDuration: duration.seconds)
}

case AVPlayerItemKeyPath.playbackLikelyToKeepUp:
if let playbackLikelyToKeepUp = change?[.newKey] as? Bool {
delegate?.item(didUpdatePlaybackLikelyToKeepUp: playbackLikelyToKeepUp)
}

default: break

}
Expand All @@ -121,6 +121,16 @@ class AVPlayerItemObserver: NSObject {

extension AVPlayerItemObserver: AVPlayerItemMetadataOutputPushDelegate {
func metadataOutput(_ output: AVPlayerItemMetadataOutput, didOutputTimedMetadataGroups groups: [AVTimedMetadataGroup], from track: AVPlayerItemTrack?) {
delegate?.item(didReceiveTimedMetadata: groups)
if output == currentMetadataOutput {
delegate?.item(didReceiveTimedMetadata: groups)
}
}
}

extension AVPlayerItem {
func removeAllMetadataOutputs() {
for output in self.outputs.filter({ $0 is AVPlayerItemMetadataOutput }) {
self.remove(output)
}
}
}
Loading