diff --git a/src/media-segment-request.js b/src/media-segment-request.js index 749643ccc..53f629c6b 100644 --- a/src/media-segment-request.js +++ b/src/media-segment-request.js @@ -266,6 +266,8 @@ const transmuxAndNotify = ({ audioSegmentTimingInfoFn, id3Fn, captionsFn, + isEndOfTimeline, + endedTimelineFn, dataFn, doneFn }) => { @@ -360,6 +362,13 @@ const transmuxAndNotify = ({ onCaptions: (captions) => { captionsFn(segment, [captions]); }, + // if this is a partial transmux, the end of the timeline has not yet been reached + // until the last part of the segment is processed (at which point isPartial will + // be false) + isEndOfTimeline: isEndOfTimeline && !isPartial, + onEndedTimeline: () => { + endedTimelineFn(); + }, onDone: (result) => { // To handle partial appends, there won't be a done function passed in (since // there's still, potentially, more segment to process), so there's nothing to do. @@ -382,6 +391,8 @@ const handleSegmentBytes = ({ audioSegmentTimingInfoFn, id3Fn, captionsFn, + isEndOfTimeline, + endedTimelineFn, dataFn, doneFn }) => { @@ -513,6 +524,8 @@ const handleSegmentBytes = ({ audioSegmentTimingInfoFn, id3Fn, captionsFn, + isEndOfTimeline, + endedTimelineFn, dataFn, doneFn }); @@ -533,6 +546,11 @@ const handleSegmentBytes = ({ * @param {Function} audioSegmentTimingInfoFn * a callback that receives audio timing info based on media times and * any adjustments made by the transmuxer + * @param {boolean} isEndOfTimeline + * true if this segment represents the last segment in a timeline + * @param {Function} endedTimelineFn + * a callback made when a timeline is ended, will only be called if + * isEndOfTimeline is true * @param {Function} dataFn - a callback that is executed when segment bytes are available * and ready to use * @param {Function} doneFn - a callback that is executed after decryption has completed @@ -546,6 +564,8 @@ const decryptSegment = ({ audioSegmentTimingInfoFn, id3Fn, captionsFn, + isEndOfTimeline, + endedTimelineFn, dataFn, doneFn }) => { @@ -570,6 +590,8 @@ const decryptSegment = ({ audioSegmentTimingInfoFn, id3Fn, captionsFn, + isEndOfTimeline, + endedTimelineFn, dataFn, doneFn }); @@ -618,6 +640,11 @@ const decryptSegment = ({ * any adjustments made by the transmuxer * @param {Function} id3Fn - a callback that receives ID3 metadata * @param {Function} captionsFn - a callback that receives captions + * @param {boolean} isEndOfTimeline + * true if this segment represents the last segment in a timeline + * @param {Function} endedTimelineFn + * a callback made when a timeline is ended, will only be called if + * isEndOfTimeline is true * @param {Function} dataFn - a callback that is executed when segment bytes are available * and ready to use * @param {Function} doneFn - a callback that is executed after all resources have been @@ -632,6 +659,8 @@ const waitForCompletion = ({ audioSegmentTimingInfoFn, id3Fn, captionsFn, + isEndOfTimeline, + endedTimelineFn, dataFn, doneFn }) => { @@ -678,6 +707,8 @@ const waitForCompletion = ({ audioSegmentTimingInfoFn, id3Fn, captionsFn, + isEndOfTimeline, + endedTimelineFn, dataFn, doneFn }); @@ -693,6 +724,8 @@ const waitForCompletion = ({ audioSegmentTimingInfoFn, id3Fn, captionsFn, + isEndOfTimeline, + endedTimelineFn, dataFn, doneFn }); @@ -732,6 +765,11 @@ const handleLoadEnd = ({ loadendState, abortFn }) => (event) => { * @param {Function} audioSegmentTimingInfoFn * a callback that receives audio timing info based on media times and * any adjustments made by the transmuxer + * @param {boolean} isEndOfTimeline + * true if this segment represents the last segment in a timeline + * @param {Function} endedTimelineFn + * a callback made when a timeline is ended, will only be called if + * isEndOfTimeline is true * @param {Function} dataFn - a callback that is executed when segment bytes are available * and ready to use * @param {Event} event - the progress event object from XMLHttpRequest @@ -745,6 +783,8 @@ const handleProgress = ({ audioSegmentTimingInfoFn, id3Fn, captionsFn, + isEndOfTimeline, + endedTimelineFn, dataFn, handlePartialData }) => (event) => { @@ -782,6 +822,8 @@ const handleProgress = ({ audioSegmentTimingInfoFn, id3Fn, captionsFn, + isEndOfTimeline, + endedTimelineFn, dataFn }); } @@ -853,6 +895,11 @@ const handleProgress = ({ * any adjustments made by the transmuxer * @param {Function} id3Fn - a callback that receives ID3 metadata * @param {Function} captionsFn - a callback that receives captions + * @param {boolean} isEndOfTimeline + * true if this segment represents the last segment in a timeline + * @param {Function} endedTimelineFn + * a callback made when a timeline is ended, will only be called if + * isEndOfTimeline is true * @param {Function} dataFn - a callback that receives data from the main segment's xhr * request, transmuxed if needed * @param {Function} doneFn - a callback that is executed only once all requests have @@ -873,6 +920,8 @@ export const mediaSegmentRequest = ({ audioSegmentTimingInfoFn, id3Fn, captionsFn, + isEndOfTimeline, + endedTimelineFn, dataFn, doneFn, handlePartialData @@ -887,6 +936,8 @@ export const mediaSegmentRequest = ({ audioSegmentTimingInfoFn, id3Fn, captionsFn, + isEndOfTimeline, + endedTimelineFn, dataFn, doneFn }); @@ -954,6 +1005,8 @@ export const mediaSegmentRequest = ({ audioSegmentTimingInfoFn, id3Fn, captionsFn, + isEndOfTimeline, + endedTimelineFn, dataFn, handlePartialData }) diff --git a/src/segment-loader.js b/src/segment-loader.js index 238c69650..b6aa3f54d 100644 --- a/src/segment-loader.js +++ b/src/segment-loader.js @@ -2193,6 +2193,13 @@ export default class SegmentLoader extends videojs.EventTarget { } const simpleSegment = this.createSimplifiedSegmentObj_(segmentInfo); + const isEndOfStream = this.isEndOfStream_(segmentInfo.mediaIndex, segmentInfo.playlist); + const isWalkingForward = this.mediaIndex !== null; + const isDiscontinuity = segmentInfo.timeline !== this.currentTimeline_ && + // currentTimeline starts at -1, so we shouldn't end the timeline switching to 0, + // the first timeline + segmentInfo.timeline > 0; + const isEndOfTimeline = isEndOfStream || (isWalkingForward && isDiscontinuity); segmentInfo.abortRequests = mediaSegmentRequest({ xhr: this.vhs_.xhr, @@ -2207,6 +2214,10 @@ export default class SegmentLoader extends videojs.EventTarget { videoSegmentTimingInfoFn: this.handleSegmentTimingInfo_.bind(this, 'video', segmentInfo.requestId), audioSegmentTimingInfoFn: this.handleSegmentTimingInfo_.bind(this, 'audio', segmentInfo.requestId), captionsFn: this.handleCaptions_.bind(this), + isEndOfTimeline, + endedTimelineFn: () => { + this.logger_('received endedtimeline callback'); + }, id3Fn: this.handleId3_.bind(this), dataFn: this.handleData_.bind(this), @@ -2410,19 +2421,6 @@ export default class SegmentLoader extends videojs.EventTarget { // state away from loading until we are officially done loading the segment data. this.state = 'APPENDING'; - const isEndOfStream = this.isEndOfStream_(segmentInfo.mediaIndex, segmentInfo.playlist); - const isWalkingForward = this.mediaIndex !== null; - const isDiscontinuity = segmentInfo.timeline !== this.currentTimeline_ && - // TODO verify this behavior - // currentTimeline starts at -1, but we shouldn't end the timeline switching to 0, - // the first timeline - segmentInfo.timeline > 0; - - if (!segmentInfo.isFmp4 && - (isEndOfStream || (isWalkingForward && isDiscontinuity))) { - segmentTransmuxer.endTimeline(this.transmuxer_); - } - // used for testing this.trigger('appending'); diff --git a/src/segment-transmuxer.js b/src/segment-transmuxer.js index 93c6fae13..242b8285c 100644 --- a/src/segment-transmuxer.js +++ b/src/segment-transmuxer.js @@ -81,12 +81,15 @@ export const processTransmux = (options) => { onAudioSegmentTimingInfo, onId3, onCaptions, - onDone + onDone, + onEndedTimeline, + isEndOfTimeline } = options; const transmuxedData = { isPartial, buffer: [] }; + let waitForEndedTimelineEvent = isEndOfTimeline; const handleMessage = (event) => { if (transmuxer.currentTransmux !== options) { @@ -121,12 +124,24 @@ export const processTransmux = (options) => { if (event.data.action === 'caption') { onCaptions(event.data.caption); } + if (event.data.action === 'endedtimeline') { + waitForEndedTimelineEvent = false; + onEndedTimeline(); + } // wait for the transmuxed event since we may have audio and video if (event.data.type !== 'transmuxed') { return; } + // If the "endedtimeline" event has not yet fired, and this segment represents the end + // of a timeline, that means there may still be data events before the segment + // processing can be considerred complete. In that case, the final event should be + // an "endedtimeline" event with the type "transmuxed." + if (waitForEndedTimelineEvent) { + return; + } + transmuxer.onmessage = null; handleDone_({ transmuxedData, @@ -185,6 +200,10 @@ export const processTransmux = (options) => { // even if we didn't push any bytes, we have to make sure we flush in case we reached // the end of the segment transmuxer.postMessage({ action: isPartial ? 'partialFlush' : 'flush' }); + + if (isEndOfTimeline) { + transmuxer.postMessage({ action: 'endTimeline' }); + } }; export const dequeue = (transmuxer) => { diff --git a/test/segment-transmuxer.test.js b/test/segment-transmuxer.test.js index c17e77dee..666ed20d4 100644 --- a/test/segment-transmuxer.test.js +++ b/test/segment-transmuxer.test.js @@ -420,3 +420,170 @@ QUnit.test( ); } ); + +QUnit.test('transmux waits for endTimeline if isEndOfTimeline', function(assert) { + const done = assert.async(); + const dataFn = sinon.spy(); + const trackInfoFn = sinon.spy(); + const audioTimingFn = sinon.spy(); + const videoTimingFn = sinon.spy(); + const videoSegmentTimingInfoFn = sinon.spy(); + const audioSegmentTimingInfoFn = sinon.spy(); + const onEndedTimelineFn = sinon.spy(); + + this.transmuxer = createTransmuxer(false); + + transmux({ + transmuxer: this.transmuxer, + bytes: muxedSegment(), + audioAppendStart: null, + gopsToAlignWith: null, + isPartial: false, + onData: dataFn, + onTrackInfo: trackInfoFn, + onAudioTimingInfo: audioTimingFn, + onVideoTimingInfo: videoTimingFn, + onVideoSegmentTimingInfo: videoSegmentTimingInfoFn, + onAudioSegmentTimingInfo: audioSegmentTimingInfoFn, + onId3: noop, + onCaptions: noop, + isEndOfTimeline: true, + onEndedTimeline: onEndedTimelineFn, + onDone: () => { + assert.ok(dataFn.callCount, 'got data events'); + assert.ok(trackInfoFn.callCount, 'got trackInfo events'); + assert.ok(audioTimingFn.callCount, 'got audioTimingInfo events'); + assert.ok(videoTimingFn.callCount, 'got videoTimingInfo events'); + assert.ok(videoSegmentTimingInfoFn.callCount, 'got videoSegmentTimingInfo events'); + assert.ok(audioSegmentTimingInfoFn.callCount, 'got audioSegmentTimingInfo events'); + assert.ok(onEndedTimelineFn.callCount, 'got onEndedTimeline event'); + done(); + } + }); +}); + +QUnit.test('transmux does not wait for endTimeline if not isEndOfTimeline', function(assert) { + const done = assert.async(); + const dataFn = sinon.spy(); + const trackInfoFn = sinon.spy(); + const audioTimingFn = sinon.spy(); + const videoTimingFn = sinon.spy(); + const videoSegmentTimingInfoFn = sinon.spy(); + const audioSegmentTimingInfoFn = sinon.spy(); + const onEndedTimelineFn = sinon.spy(); + + this.transmuxer = createTransmuxer(false); + + transmux({ + transmuxer: this.transmuxer, + bytes: muxedSegment(), + audioAppendStart: null, + gopsToAlignWith: null, + isPartial: false, + onData: dataFn, + onTrackInfo: trackInfoFn, + onAudioTimingInfo: audioTimingFn, + onVideoTimingInfo: videoTimingFn, + onVideoSegmentTimingInfo: videoSegmentTimingInfoFn, + onAudioSegmentTimingInfo: audioSegmentTimingInfoFn, + onId3: noop, + onCaptions: noop, + isEndOfTimeline: false, + onEndedTimeline: onEndedTimelineFn, + onDone: () => { + assert.ok(dataFn.callCount, 'got data events'); + assert.ok(trackInfoFn.callCount, 'got trackInfo events'); + assert.ok(audioTimingFn.callCount, 'got audioTimingInfo events'); + assert.ok(videoTimingFn.callCount, 'got videoTimingInfo events'); + assert.ok(videoSegmentTimingInfoFn.callCount, 'got videoSegmentTimingInfo events'); + assert.ok(audioSegmentTimingInfoFn.callCount, 'got audioSegmentTimingInfo events'); + assert.notOk(onEndedTimelineFn.callCount, 'did not get onEndedTimeline event'); + done(); + } + }); +}); + +QUnit.test('partial transmuxer transmux waits for endTimeline if isEndOfTimeline', function(assert) { + const done = assert.async(); + const dataFn = sinon.spy(); + const trackInfoFn = sinon.spy(); + const audioTimingFn = sinon.spy(); + const videoTimingFn = sinon.spy(); + const videoSegmentTimingInfoFn = sinon.spy(); + const audioSegmentTimingInfoFn = sinon.spy(); + const onEndedTimelineFn = sinon.spy(); + + this.transmuxer = createTransmuxer(true); + + transmux({ + transmuxer: this.transmuxer, + bytes: muxedSegment(), + audioAppendStart: null, + gopsToAlignWith: null, + // isEndOfTimeline should never be true if isPartial is true + isPartial: false, + onData: dataFn, + onTrackInfo: trackInfoFn, + onAudioTimingInfo: audioTimingFn, + onVideoTimingInfo: videoTimingFn, + onVideoSegmentTimingInfo: videoSegmentTimingInfoFn, + onAudioSegmentTimingInfo: audioSegmentTimingInfoFn, + onId3: noop, + onCaptions: noop, + isEndOfTimeline: true, + onEndedTimeline: onEndedTimelineFn, + onDone: () => { + assert.ok(dataFn.callCount, 'got data events'); + assert.ok(trackInfoFn.callCount, 'got trackInfo events'); + assert.ok(audioTimingFn.callCount, 'got audioTimingInfo events'); + assert.ok(videoTimingFn.callCount, 'got videoTimingInfo events'); + // TODO: partial appends don't currently fire this + // assert.ok(videoSegmentTimingInfoFn.callCount, 'got videoSegmentTimingInfo events'); + // assert.ok(audioSegmentTimingInfoFn.callCount, 'got audioSegmentTimingInfo events'); + assert.ok(onEndedTimelineFn.callCount, 'got onEndedTimeline event'); + done(); + } + }); +}); + +QUnit.test('partial transmuxer transmux does not wait for endTimeline if not isEndOfTimeline', function(assert) { + const done = assert.async(); + const dataFn = sinon.spy(); + const trackInfoFn = sinon.spy(); + const audioTimingFn = sinon.spy(); + const videoTimingFn = sinon.spy(); + const videoSegmentTimingInfoFn = sinon.spy(); + const audioSegmentTimingInfoFn = sinon.spy(); + const onEndedTimelineFn = sinon.spy(); + + this.transmuxer = createTransmuxer(true); + + transmux({ + transmuxer: this.transmuxer, + bytes: muxedSegment(), + audioAppendStart: null, + gopsToAlignWith: null, + isPartial: false, + onData: dataFn, + onTrackInfo: trackInfoFn, + onAudioTimingInfo: audioTimingFn, + onVideoTimingInfo: videoTimingFn, + onVideoSegmentTimingInfo: videoSegmentTimingInfoFn, + onAudioSegmentTimingInfo: audioSegmentTimingInfoFn, + onId3: noop, + onCaptions: noop, + isEndOfTimeline: false, + onEndedTimeline: onEndedTimelineFn, + onDone: () => { + assert.ok(dataFn.callCount, 'got data events'); + assert.ok(trackInfoFn.callCount, 'got trackInfo events'); + assert.ok(audioTimingFn.callCount, 'got audioTimingInfo events'); + assert.ok(videoTimingFn.callCount, 'got videoTimingInfo events'); + // TODO: partial appends don't currently fire this + // assert.ok(videoSegmentTimingInfoFn.callCount, 'got videoSegmentTimingInfo events'); + // assert.ok(audioSegmentTimingInfoFn.callCount, 'got audioSegmentTimingInfo events'); + assert.notOk(onEndedTimelineFn.callCount, 'did not get onEndedTimeline event'); + done(); + } + }); +});