diff --git a/src/video-element.ts b/src/video-element.ts index 445f038..c02dcd2 100644 --- a/src/video-element.ts +++ b/src/video-element.ts @@ -1082,9 +1082,6 @@ export class BabyVideoElement extends HTMLElement { break; } } - if (direction === Direction.BACKWARD) { - frames.reverse(); - } this.#audioContext ??= this.#initializeAudio(firstFrame.sampleRate); this.#renderAudioFrame(frames, currentTimeInMicros, direction); // Decode more frames (if we now have more space in the queue) @@ -1098,11 +1095,8 @@ export class BabyVideoElement extends HTMLElement { ) { const firstFrame = frames[0]; const lastFrame = frames[frames.length - 1]; - let firstTimestamp = firstFrame.timestamp; - let lastTimestamp = lastFrame.timestamp + lastFrame.duration; - if (direction === Direction.BACKWARD) { - [firstTimestamp, lastTimestamp] = [lastTimestamp, firstTimestamp]; - } + const firstTimestamp = firstFrame.timestamp; + const lastTimestamp = lastFrame.timestamp + lastFrame.duration; if (DEBUG) { console.log( `render audio frames start=${firstTimestamp} end=${lastTimestamp} duration=${ @@ -1110,6 +1104,11 @@ export class BabyVideoElement extends HTMLElement { } count=${frames.length}` ); } + // For reverse playback, first put the frames back in their original order, + // so we can reverse the individual samples later. + if (direction === Direction.BACKWARD) { + frames.reverse(); + } // Create an AudioBuffer containing all frame data const { numberOfChannels, sampleRate } = frames[0]; const audioBuffer = new AudioBuffer({