diff --git a/lib/media/media_source_engine.js b/lib/media/media_source_engine.js index 3af29e849e..22edd671a0 100644 --- a/lib/media/media_source_engine.js +++ b/lib/media/media_source_engine.js @@ -551,7 +551,8 @@ shaka.media.MediaSourceEngine = class { * @return {!Promise} */ async appendBuffer( - contentType, data, reference, hasClosedCaptions, seeked, adaptation) { + contentType, data, reference, hasClosedCaptions, seeked = false, + adaptation = false) { const ContentType = shaka.util.ManifestParserUtils.ContentType; if (contentType == ContentType.TEXT) { @@ -652,6 +653,14 @@ shaka.media.MediaSourceEngine = class { await this.enqueueOperation_( contentType, () => this.append_(contentType, data)); + // If the input buffer passed to SourceBuffer#appendBuffer() does not + // contain a complete media segment, the call will exit while the + // SourceBuffer's append state is + // still PARSING_MEDIA_SEGMENT. Reset the parser state by calling + // abort() to safely reset timestampOffset to 'originalOffset'. + // https://www.w3.org/TR/media-source-2/#sourcebuffer-segment-parser-loop + await this.enqueueOperation_( + contentType, () => this.abort_(contentType)); // Reset the offset and append window. sourceBuffer.timestampOffset = originalOffset; @@ -681,6 +690,11 @@ shaka.media.MediaSourceEngine = class { // adaptation, we need to set a new timestampOffset on the sourceBuffer. if (seeked || adaptation) { const timestampOffset = reference.startTime; + // The logic to call abort() before setting the timestampOffset is + // extended during unbuffered seeks or automatic adaptations; it is + // possible for the append state to be PARSING_MEDIA_SEGMENT from the + // previous SourceBuffer#appendBuffer() call. + this.enqueueOperation_(contentType, () => this.abort_(contentType)); this.enqueueOperation_( contentType, () => this.setTimestampOffset_(contentType, timestampOffset)); diff --git a/test/media/media_source_engine_integration.js b/test/media/media_source_engine_integration.js index b950129967..b5c78b4020 100644 --- a/test/media/media_source_engine_integration.js +++ b/test/media/media_source_engine_integration.js @@ -382,6 +382,42 @@ describe('MediaSourceEngine', () => { expect(textDisplayer.appendSpy).toHaveBeenCalledTimes(3); }); + it('buffers partial TS video segments in sequence mode', async () => { + metadata = shaka.test.TestScheme.DATA['cea-708_ts']; + generators = shaka.test.TestScheme.GENERATORS['cea-708_ts']; + + const videoType = ContentType.VIDEO; + const initObject = new Map(); + initObject.set(videoType, getFakeStream(metadata.video)); + + await mediaSourceEngine.init( + initObject, /* forceTransmuxTS= */ false, /* sequenceMode= */ true); + await mediaSourceEngine.setDuration(presentationDuration); + await mediaSourceEngine.setStreamProperties( + videoType, + /* timestampOffset= */ 0, + /* appendWindowStart= */ 0, + /* appendWindowEnd= */ Infinity, + /* sequenceMode= */ true); + + const segment = generators[videoType].getSegment(0, Date.now() / 1000); + const partialSegmentLength = Math.floor(segment.byteLength / 3); + + let partialSegment = shaka.util.BufferUtils.toUint8( + segment, /* offset= */ 0, /* length= */ partialSegmentLength); + let reference = dummyReference(videoType, 0); + await mediaSourceEngine.appendBuffer( + videoType, partialSegment, reference, /* hasClosedCaptions= */ false); + + partialSegment = shaka.util.BufferUtils.toUint8( + segment, + /* offset= */ partialSegmentLength); + reference = dummyReference(videoType, 1); + await mediaSourceEngine.appendBuffer( + videoType, partialSegment, reference, /* hasClosedCaptions= */ false, + /* seeked= */ true); + }); + it('extracts CEA-708 captions from dash', async () => { // Load MP4 file with CEA-708 closed captions. metadata = shaka.test.TestScheme.DATA['cea-708_mp4']; diff --git a/test/media/media_source_engine_unit.js b/test/media/media_source_engine_unit.js index 21ace526a4..485016fda6 100644 --- a/test/media/media_source_engine_unit.js +++ b/test/media/media_source_engine_unit.js @@ -649,6 +649,60 @@ describe('MediaSourceEngine', () => { expect(videoSourceBuffer.timestampOffset).toBe(0.50); }); + + it('calls abort before setting timestampOffset', async () => { + const simulateUpdate = async () => { + await Util.shortDelay(); + videoSourceBuffer.updateend(); + }; + const initObject = new Map(); + initObject.set(ContentType.VIDEO, fakeVideoStream); + + await mediaSourceEngine.init( + initObject, /* forceTransmuxTS= */ false, /* sequenceMode= */ true); + + // First, mock the scenario where timestampOffset is set to help align + // text segments. In this case, SourceBuffer mode is still 'segments'. + let reference = dummyReference(0, 1000); + let appendVideo = mediaSourceEngine.appendBuffer( + ContentType.VIDEO, buffer, reference, /* hasClosedCaptions= */ false); + // Wait for the first appendBuffer(), in segments mode. + await simulateUpdate(); + // Next, wait for abort(), used to reset the parser state for a safe + // setting of timestampOffset. Shaka fakes an updateend event on abort(), + // so simulateUpdate() isn't needed. + await Util.shortDelay(); + // Next, wait for remove(), used to clear the SourceBuffer from the + // initial append. + await simulateUpdate(); + // Next, wait for the second appendBuffer(), falling through to normal + // operations. + await simulateUpdate(); + // Lastly, wait for the function-scoped MediaSourceEngine#appendBuffer() + // promise to resolve. + await appendVideo; + expect(videoSourceBuffer.abort).toHaveBeenCalledTimes(1); + + // Second, mock the scenario where timestampOffset is set during an + // unbuffered seek or adaptation. SourceBuffer mode is 'sequence' now. + reference = dummyReference(0, 1000); + appendVideo = mediaSourceEngine.appendBuffer( + ContentType.VIDEO, buffer, reference, /* hasClosedCaptions= */ false, + /* seeked= */ true); + // First, wait for abort(), used to reset the parser state for a safe + // setting of timestampOffset. + await Util.shortDelay(); + // The subsequent setTimestampOffset() fakes an updateend event for us, so + // simulateUpdate() isn't needed. + await Util.shortDelay(); + // Next, wait for the second appendBuffer(), falling through to normal + // operations. + await simulateUpdate(); + // Lastly, wait for the function-scoped MediaSourceEngine#appendBuffer() + // promise to resolve. + await appendVideo; + expect(videoSourceBuffer.abort).toHaveBeenCalledTimes(2); + }); }); describe('remove', () => {