Skip to content

Commit

Permalink
Merge branch 'audio'
Browse files Browse the repository at this point in the history
  • Loading branch information
MattiasBuelens committed Oct 5, 2023
2 parents e94f099 + 9c8f6e9 commit a75fe90
Show file tree
Hide file tree
Showing 10 changed files with 900 additions and 132 deletions.
117 changes: 91 additions & 26 deletions demo/app.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,9 @@
import "media-chrome";
import { BabyMediaSource, BabyVideoElement } from "../src/index";
import {
BabyMediaSource,
BabySourceBuffer,
BabyVideoElement
} from "../src/index";
import { TimeRanges } from "../src/time-ranges";
import { waitForEvent } from "../src/util";

Expand All @@ -14,6 +18,7 @@ video.addEventListener("pause", logEvent);
video.addEventListener("playing", logEvent);
// video.addEventListener("timeupdate", logEvent);
video.addEventListener("durationchange", logEvent);
video.addEventListener("ratechange", logEvent);
video.addEventListener("seeking", logEvent);
video.addEventListener("seeked", logEvent);
video.addEventListener("progress", logEvent);
Expand All @@ -28,24 +33,51 @@ if (mediaSource.readyState !== "open") {
await waitForEvent(mediaSource, "sourceopen");
}
mediaSource.duration = streamDuration;
const sourceBuffer = mediaSource.addSourceBuffer(
const videoSourceBuffer = mediaSource.addSourceBuffer(
'video/mp4; codecs="avc1.640028"'
);
const segmentURLs = [
const audioSourceBuffer = mediaSource.addSourceBuffer(
'audio/mp4; codecs="mp4a.40.5"'
);
const videoSegmentURLs = [
"https://dash.akamaized.net/akamai/bbb_30fps/bbb_30fps_640x360_1000k/bbb_30fps_640x360_1000k_0.m4v",
"https://dash.akamaized.net/akamai/bbb_30fps/bbb_30fps_640x360_1000k/bbb_30fps_640x360_1000k_1.m4v",
"https://dash.akamaized.net/akamai/bbb_30fps/bbb_30fps_640x360_1000k/bbb_30fps_640x360_1000k_2.m4v",
"https://dash.akamaized.net/akamai/bbb_30fps/bbb_30fps_1920x1080_8000k/bbb_30fps_1920x1080_8000k_0.m4v",
"https://dash.akamaized.net/akamai/bbb_30fps/bbb_30fps_1920x1080_8000k/bbb_30fps_1920x1080_8000k_2.m4v",
"https://dash.akamaized.net/akamai/bbb_30fps/bbb_30fps_1920x1080_8000k/bbb_30fps_1920x1080_8000k_3.m4v",
"https://dash.akamaized.net/akamai/bbb_30fps/bbb_30fps_1920x1080_8000k/bbb_30fps_1920x1080_8000k_4.m4v",
"https://dash.akamaized.net/akamai/bbb_30fps/bbb_30fps_1920x1080_8000k/bbb_30fps_1920x1080_8000k_4.m4v"
];
const audioSegmentURLs = [
"https://dash.akamaized.net/akamai/bbb_30fps/bbb_a64k/bbb_a64k_0.m4a",
"https://dash.akamaized.net/akamai/bbb_30fps/bbb_a64k/bbb_a64k_1.m4a",
"https://dash.akamaized.net/akamai/bbb_30fps/bbb_a64k/bbb_a64k_2.m4a",
"https://dash.akamaized.net/akamai/bbb_30fps/bbb_a64k/bbb_a64k_3.m4a",
"https://dash.akamaized.net/akamai/bbb_30fps/bbb_a64k/bbb_a64k_4.m4a"
];
for (const segmentURL of segmentURLs) {
const segmentData = await (await fetch(segmentURL)).arrayBuffer();
sourceBuffer.appendBuffer(segmentData);
await waitForEvent(sourceBuffer, "updateend");

AbortSignal.prototype.throwIfAborted ??= function throwIfAborted(
this: AbortSignal
) {
if (this.aborted) throw this.reason;
};

async function appendSegments(
sourceBuffer: BabySourceBuffer,
segmentURLs: string[]
) {
for (const segmentURL of segmentURLs) {
const segmentData = await (await fetch(segmentURL)).arrayBuffer();
sourceBuffer.appendBuffer(segmentData);
await waitForEvent(sourceBuffer, "updateend");
}
}

await Promise.all([
appendSegments(videoSourceBuffer, videoSegmentURLs),
appendSegments(audioSourceBuffer, audioSegmentURLs)
]);

interface Segment {
url: string;
startTime: number;
Expand All @@ -54,41 +86,60 @@ interface Segment {
isLast: boolean;
}

const segmentDuration = 4;
const lastSegmentIndex = Math.ceil(streamDuration / segmentDuration) - 1;

function getSegmentForTime(time: number): Segment | undefined {
function getSegmentForTime(
templateUrl: string,
segmentDuration: number,
time: number
): Segment | undefined {
const lastSegmentIndex = Math.ceil(streamDuration / segmentDuration) - 1;
const segmentIndex = Math.max(
0,
Math.min(lastSegmentIndex, Math.floor(time / segmentDuration))
);
if (segmentIndex < 0) {
return undefined;
}
const url = `https://dash.akamaized.net/akamai/bbb_30fps/bbb_30fps_1920x1080_8000k/bbb_30fps_1920x1080_8000k_${
segmentIndex + 1
}.m4v`;
const url = templateUrl.replace(/%INDEX%/, `${segmentIndex + 1}`);
return {
url,
startTime: segmentIndex * segmentDuration,
endTime: (segmentIndex + 1) * segmentDuration,
isFirst: segmentIndex === 0,
isLast: segmentIndex === lastSegmentIndex,
isLast: segmentIndex === lastSegmentIndex
};
}

function getVideoSegmentForTime(time: number): Segment | undefined {
return getSegmentForTime(
"https://dash.akamaized.net/akamai/bbb_30fps/bbb_30fps_1920x1080_8000k/bbb_30fps_1920x1080_8000k_%INDEX%.m4v",
120 / 30,
time
);
}

function getAudioSegmentForTime(time: number): Segment | undefined {
return getSegmentForTime(
"https://dash.akamaized.net/akamai/bbb_30fps/bbb_a64k/bbb_a64k_%INDEX%.m4a",
192512 / 48000,
time
);
}

const forwardBufferSize = 30;
const backwardBufferSize = 10;

let pendingBufferLoop: Promise<void> = Promise.resolve();

async function bufferLoop(signal: AbortSignal) {
await pendingBufferLoop;
async function trackBufferLoop(
sourceBuffer: BabySourceBuffer,
segmentForTime: (time: number) => Segment | undefined,
signal: AbortSignal
) {
while (true) {
if (signal.aborted) throw signal.reason;
signal.throwIfAborted();
// Check buffer health
while (true) {
const currentRange = video.buffered.find(video.currentTime);
const currentRange = sourceBuffer.buffered.find(video.currentTime);
const forward = video.playbackRate >= 0;
if (!currentRange) {
// No buffer, need new segment immediately
Expand All @@ -109,20 +160,20 @@ async function bufferLoop(signal: AbortSignal) {
await waitForEvent(video, ["timeupdate", "ratechange"], signal);
}
// Find next segment
const currentRange = video.buffered.find(video.currentTime);
const currentRange = sourceBuffer.buffered.find(video.currentTime);
const forward = video.playbackRate >= 0;
const nextTime = currentRange
? forward
? currentRange[1]
: currentRange[0] - 0.001
: video.currentTime;
const nextSegment = getSegmentForTime(nextTime)!;
const nextSegment = segmentForTime(nextTime)!;
// Remove old buffer before/after current time
const retainStart =
video.currentTime - (forward ? backwardBufferSize : forwardBufferSize);
const retainEnd =
video.currentTime + (forward ? forwardBufferSize : backwardBufferSize);
const oldBuffered = video.buffered.subtract(
const oldBuffered = sourceBuffer.buffered.subtract(
new TimeRanges([[retainStart, retainEnd]])
);
for (let i = 0; i < oldBuffered.length; i++) {
Expand All @@ -135,19 +186,33 @@ async function bufferLoop(signal: AbortSignal) {
).arrayBuffer();
sourceBuffer.appendBuffer(segmentData);
await waitForEvent(sourceBuffer, "updateend");
// Check if we're done buffering
if (forward) {
if (nextSegment.isLast) {
mediaSource.endOfStream();
break; // Stop buffering until next seek
return; // Stop buffering until next seek
}
} else {
if (nextSegment.isFirst) {
break; // Stop buffering until next seek
return; // Stop buffering until next seek
}
}
}
}

async function bufferLoop(signal: AbortSignal) {
await pendingBufferLoop;
await Promise.allSettled([
trackBufferLoop(videoSourceBuffer, getVideoSegmentForTime, signal),
trackBufferLoop(audioSourceBuffer, getAudioSegmentForTime, signal)
]);
signal.throwIfAborted();
// All tracks are done buffering until the last segment
const forward = video.playbackRate >= 0;
if (forward) {
mediaSource.endOfStream();
}
}

let bufferAbortController: AbortController = new AbortController();

function restartBuffering() {
Expand Down
1 change: 0 additions & 1 deletion demo/vendor.d.ts

This file was deleted.

36 changes: 31 additions & 5 deletions index.html
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
<!DOCTYPE html>
<!doctype html>
<html lang="en">
<head>
<meta charset="UTF-8" />
Expand All @@ -14,16 +14,42 @@
<baby-video slot="media"></baby-video>
<media-loading-indicator
slot="centered-chrome"
no-auto-hide
noautohide
></media-loading-indicator>
<media-control-bar>
<media-play-button></media-play-button>
<media-time-display show-duration></media-time-display>
<media-mute-button></media-mute-button>
<media-volume-range></media-volume-range>
<media-time-display showduration></media-time-display>
<media-time-range></media-time-range>
<media-playback-rate-button rates="-1 1"></media-playback-rate-button>
<media-fullscreen-button></media-fullscreen-button>
</media-control-bar>
</media-controller>

<p><a href="https://github.com/MattiasBuelens/baby-video">Source code on GitHub</a></p>

<p>
<a href="https://github.com/MattiasBuelens/baby-video"
>Source code on GitHub</a
>
</p>

<style>
media-mute-button + media-volume-range {
width: 0;
overflow: hidden;
padding-left: 0;
padding-right: 0;

/* Set the internal width so it reveals, not grows */
--media-range-track-width: 70px;
transition: width 0.2s ease-in;
}

/* Expand volume control when hovered or focused */
media-mute-button:is(:hover, :focus, :focus-within) + media-volume-range,
media-mute-button + media-volume-range:is(:hover, :focus, :focus-within) {
width: var(--media-range-track-width);
}
</style>
</body>
</html>
43 changes: 40 additions & 3 deletions src/media-source.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,8 @@
import { BabySourceBuffer, getVideoTrackBuffer } from "./source-buffer";
import {
BabySourceBuffer,
getAudioTrackBuffer,
getVideoTrackBuffer
} from "./source-buffer";
import {
BabyVideoElement,
MediaReadyState,
Expand All @@ -7,7 +11,7 @@ import {
updateReadyState
} from "./video-element";
import { queueTask } from "./util";
import { VideoTrackBuffer } from "./track-buffer";
import { AudioTrackBuffer, VideoTrackBuffer } from "./track-buffer";
import { setEndTimeOnLastRange, TimeRanges } from "./time-ranges";

export type MediaSourceReadyState = "closed" | "ended" | "open";
Expand All @@ -32,6 +36,9 @@ export let getBuffered: (mediaSource: BabyMediaSource) => TimeRanges;
export let getActiveVideoTrackBuffer: (
mediaSource: BabyMediaSource
) => VideoTrackBuffer | undefined;
export let getActiveAudioTrackBuffer: (
mediaSource: BabyMediaSource
) => AudioTrackBuffer | undefined;
export let openIfEnded: (mediaSource: BabyMediaSource) => void;
export let checkBuffer: (mediaSource: BabyMediaSource) => void;

Expand Down Expand Up @@ -222,6 +229,16 @@ export class BabyMediaSource extends EventTarget {
return undefined;
}

#getActiveAudioTrackBuffer(): AudioTrackBuffer | undefined {
for (const sourceBuffer of this.#sourceBuffers) {
const audioTrackBuffer = getAudioTrackBuffer(sourceBuffer);
if (audioTrackBuffer) {
return audioTrackBuffer;
}
}
return undefined;
}

#getBuffered(): TimeRanges {
// https://w3c.github.io/media-source/#htmlmediaelement-extensions-buffered
// 2.1. Let recent intersection ranges equal an empty TimeRanges object.
Expand Down Expand Up @@ -262,6 +279,8 @@ export class BabyMediaSource extends EventTarget {
}
const buffered = this.#getBuffered();
const currentTime = mediaElement.currentTime;
const duration = this.#duration;
const playbackRate = mediaElement.playbackRate;
const currentRange = buffered.find(currentTime);
// If HTMLMediaElement.buffered does not contain a TimeRanges for the current playback position:
if (currentRange === undefined) {
Expand All @@ -281,7 +300,7 @@ export class BabyMediaSource extends EventTarget {
}
// If HTMLMediaElement.buffered contains a TimeRanges that includes the current playback position
// and some time beyond the current playback position, then run the following steps:
if (buffered.containsRange(currentTime, currentTime + 0.1)) {
if (hasSomeBuffer(buffered, currentTime, duration, playbackRate)) {
// Set the HTMLMediaElement.readyState attribute to HAVE_FUTURE_DATA.
// Playback may resume at this point if it was previously suspended by a transition to HAVE_CURRENT_DATA.
updateReadyState(mediaElement, MediaReadyState.HAVE_FUTURE_DATA);
Expand Down Expand Up @@ -312,10 +331,28 @@ export class BabyMediaSource extends EventTarget {
openIfEnded = (mediaSource) => mediaSource.#openIfEnded();
getActiveVideoTrackBuffer = (mediaSource) =>
mediaSource.#getActiveVideoTrackBuffer();
getActiveAudioTrackBuffer = (mediaSource) =>
mediaSource.#getActiveAudioTrackBuffer();
checkBuffer = (mediaSource) => mediaSource.#checkBuffer();
}
}

function getHighestEndTime(buffered: TimeRanges): number {
return buffered.length > 0 ? buffered.end(buffered.length - 1) : 0;
}

export function hasSomeBuffer(
buffered: TimeRanges,
currentTime: number,
duration: number,
playbackRate: number
): boolean {
if (playbackRate >= 0) {
return buffered.containsRange(
currentTime,
Math.min(currentTime + 0.1, duration)
);
} else {
return buffered.containsRange(Math.max(0, currentTime - 0.1), currentTime);
}
}
Loading

0 comments on commit a75fe90

Please sign in to comment.