From d1287862dd457a00946dc5940e5eeafaaf404120 Mon Sep 17 00:00:00 2001 From: Paul Berberian Date: Mon, 29 Jul 2024 11:18:46 +0200 Subject: [PATCH] [POC] Update `Manifest` structure to make HLS implementation possible NOTE: This is just a background, low-priority, Proof-Of-Concept. It is not currently functional and is in the middle of its implementation, there's also no HLS code for now, only HLS-related ideas. Overview ======== This PR is a Proof-Of-Concept where I explore whether HLS playback can be natively implemented in the RxPlayer without sacrificing too much the RxPlayer code's readability. This is not the first attempt, but the approach here is different than previous ones. One of the core idea is to keep the RxPlayer API as is, and thus to hide all HLS differences in the RxPlayer code. We already thought of potential HLS compatibility when designing our v4, so there seem to be nothing in our API incompatible with HLS concepts. Also our core code is for the most part protocol-agnostic, so we presumably could keep, and profit from, most of our logic while playing HLS streams. However, this attempt completely changes our internal `Manifest` concept, especially the `Adaptation` subpart of it (which was equivalent to the concept of a "track"). Manifest hierarchy change ========================= The issue --------- One of the main differences between HLS and other protocols handled by the RxPlayer (such as DASH) is the concept of "variant streams" only HLS have. Basically in DASH, you first select your wanted audio + video + text tracks and then select a Representation (i.e. quality) according to your current playback conditions such as your network bandwidth. In HLS however, this is reversed. You first have to select your "variant stream" based on your playback conditions (mainly: network bandwidth) and then see which "media" - including our notion of a track - is available for that variant stream. Because of edge cases, we cannot translate the HLS model into our DASH-like model wihout losing some HLS features. Doing the reverse (adapting DASH model into HLS model, which seems to be what e.g. the shaka-player more-or-less did) could work yet it doesn't seem like an optimal solution (e.g. we would either lose some bandwidth information or creating a huge number of variant streams for all potential combinations). How I tried to fix it --------------------- So what I did here, was to change the Manifest object's hierarchy so it can adapt to both how HLS is treating thing and how DASH is treating things. The Manifest looked like this before this commit: ```ts type Manifest = { periods: Array<{ // Define a particular track adaptations: Record< "audio" | "video" | "text", Array<{ // qualities representations: Array<{ // ... That Representation's metadata }>; // ... The rest of that track's metadata }> >; // ... The rest of that Period's metadata }>; // ... The rest of the Manifest's metadata }; ``` Now it looks like: ```ts type Manifest = { periods: Array<{ tracksMetadata: Record< "audio" | "video" | "text", Record< string, // The track's id { // All qualities linked to that track representations: Record< string, // That Representation's id { // ... That Representation's metadata } >; // ... The rest of that track's metadata } > >; // Groups of available tracks and qualities combinations variantStreams: Array<{ id: string; // bandwidth for that variant, only defined for HLS, others have // only a single variant bandwidth: number | undefined; // Authorized tracks + qualities combinations in that variantStream media: Record< "audio" | "video" | "text", Array<{ // `id` the corresponding track in `tracksMetadata` linkedTrackId: string; // `id` list of the Representations' in `tracksMetadata` representations: string[]; }> >; }>; // ... The rest of that Period's metadata }>; // ... The rest of the Manifest's metadata }; ``` So basically in a Period, we'll now separate a tracks' metadata and the conditions in which we may play them: the former rely on `tracksMetadata`, the latter on `variantStreams`. Note that `variantStreams` only use `id`s to refer to tracks and Representations: it does not contain the metadata directly. This is to avoid having to repeat a track's and representation's metadata already present in the `tracksMetadata`. We cannot just rely on a same-object-reference trick because the Manifest object has to be able to be transmitted through the worker and main tread. Worker compatibility is also the main reason why we're not relyng on `Map` objects to link ids and metadata, though it may seem more logical. This new structure allows to enforce complex HLS features, like restrictions on which tracks and qualities can be played when another unrelated track is selected. Here incompatible tracks would never be present the same variantStream. Though it is very clear that it adds net complexity on top of our Manifest's structure, as those "variant streams" concept do not exist in Smooth or DASH. For Smooth and DASH, only a single `variantStream` will be present, with a `bandwidth` set to `undefined` (Representation-level `bandwidth` is still exploited by our ABR logic) and containing all tracks declared in `tracksMetadata`. --- .../adaptive_representation_selector.ts | 56 +-- src/core/adaptive/guess_based_chooser.ts | 44 ++- src/core/adaptive/index.ts | 2 + src/core/adaptive/utils/filter_by_bitrate.ts | 12 +- .../adaptive/utils/filter_by_resolution.ts | 41 ++- .../adaptive/utils/pending_requests_store.ts | 4 +- .../utils/select_optimal_representation.ts | 11 +- src/core/cmcd/cmcd_data_builder.ts | 22 +- src/core/fetchers/segment/segment_fetcher.ts | 10 +- .../content_time_boundaries_observer.ts | 168 +++++---- src/core/main/worker/track_choice_setter.ts | 14 +- src/core/main/worker/worker_main.ts | 22 +- .../segment_sinks/implementations/types.ts | 11 +- .../inventory/segment_inventory.ts | 21 +- src/core/segment_sinks/inventory/types.ts | 7 +- .../segment_sinks/segment_buffers_store.ts | 2 +- .../stream/adaptation/adaptation_stream.ts | 84 +++-- .../get_representations_switch_strategy.ts | 14 +- src/core/stream/adaptation/types.ts | 20 +- src/core/stream/index.ts | 2 +- .../get_time_ranges_for_content.ts | 6 +- .../orchestrator/stream_orchestrator.ts | 2 +- src/core/stream/period/period_stream.ts | 103 +++--- src/core/stream/period/types.ts | 35 +- .../utils/get_adaptation_switch_strategy.ts | 35 +- .../representation/representation_stream.ts | 4 +- src/core/stream/representation/types.ts | 6 +- .../utils/append_segment_to_buffer.ts | 4 +- .../utils/check_for_discontinuity.ts | 12 +- .../representation/utils/downloading_queue.ts | 16 +- .../representation/utils/get_buffer_status.ts | 8 +- .../utils/get_needed_segments.ts | 20 +- .../representation/utils/push_init_segment.ts | 4 +- .../utils/push_media_segment.ts | 4 +- src/core/types.ts | 4 +- src/errors/media_error.ts | 6 +- .../tools/VideoThumbnailLoader/types.ts | 4 +- .../video_thumbnail_loader.ts | 23 +- .../api/debug/modules/general_info.ts | 7 +- .../debug/modules/segment_buffer_content.ts | 42 +-- src/main_thread/api/public_api.ts | 137 ++++--- src/main_thread/decrypt/content_decryptor.ts | 15 +- src/main_thread/decrypt/types.ts | 6 +- src/main_thread/init/index.ts | 2 +- .../init/media_source_content_initializer.ts | 10 +- .../init/multi_thread_content_initializer.ts | 88 +++-- src/main_thread/init/types.ts | 35 +- .../utils/update_manifest_codec_support.ts | 42 +-- .../tracks_store/track_dispatcher.ts | 55 +-- src/main_thread/tracks_store/tracks_store.ts | 279 +++++++-------- .../classes/__tests__/manifest.test.ts | 4 +- src/manifest/classes/adaptation.ts | 150 ++++---- src/manifest/classes/index.ts | 4 +- src/manifest/classes/manifest.ts | 139 ++------ src/manifest/classes/period.ts | 261 +++++++------- src/manifest/classes/representation.ts | 5 +- .../classes/update_period_in_place.ts | 90 +++-- src/manifest/classes/utils.ts | 8 +- src/manifest/index.ts | 9 +- src/manifest/types.ts | 82 ++++- src/manifest/utils.ts | 336 +++++++++--------- src/multithread_types.ts | 22 +- .../__tests__/attach_trickmode_track.test.ts | 64 ++-- .../dash/common/attach_trickmode_track.ts | 38 +- .../dash/common/infer_adaptation_type.ts | 21 +- .../dash/common/parse_adaptation_sets.ts | 63 ++-- .../manifest/dash/common/parse_periods.ts | 50 ++- .../dash/common/parse_representations.ts | 7 +- .../manifest/local/parse_local_manifest.ts | 62 ++-- .../metaplaylist/metaplaylist_parser.ts | 98 +++-- src/parsers/manifest/smooth/create_parser.ts | 139 ++++---- src/parsers/manifest/types.ts | 156 ++++---- ...t_first_time_from_representations.test.ts} | 76 ++-- ...et_last_time_from_representations.test.ts} | 76 ++-- .../manifest/utils/check_manifest_ids.ts | 33 +- ...=> get_first_time_from_representations.ts} | 16 +- ... => get_last_time_from_representations.ts} | 20 +- .../manifest/utils/get_maximum_positions.ts | 30 +- .../manifest/utils/get_minimum_position.ts | 30 +- .../utils/infer_segment_container.ts | 8 +- 80 files changed, 1886 insertions(+), 1792 deletions(-) rename src/parsers/manifest/utils/__tests__/{get_first_time_from_adaptations.test.ts => get_first_time_from_representations.test.ts} (72%) rename src/parsers/manifest/utils/__tests__/{get_last_time_from_adaptation.test.ts => get_last_time_from_representations.test.ts} (72%) rename src/parsers/manifest/utils/{get_first_time_from_adaptation.ts => get_first_time_from_representations.ts} (75%) rename src/parsers/manifest/utils/{get_last_time_from_adaptation.ts => get_last_time_from_representations.ts} (73%) diff --git a/src/core/adaptive/adaptive_representation_selector.ts b/src/core/adaptive/adaptive_representation_selector.ts index 93493248c9..9d8e3d9c38 100644 --- a/src/core/adaptive/adaptive_representation_selector.ts +++ b/src/core/adaptive/adaptive_representation_selector.ts @@ -17,11 +17,11 @@ import config from "../../config"; import log from "../../log"; import type { - IAdaptation, IManifest, IPeriod, IRepresentation, ISegment, + ITrack, } from "../../manifest"; import type { ObservationPosition, @@ -87,24 +87,24 @@ export default function createAdaptiveRepresentationSelector( * @see IRepresentationEstimator * @param {Object} context * @param {Object} currentRepresentation - * @param {Object} representations + * @param {Object} representationList * @param {Object} playbackObserver * @param {Object} stopAllEstimates * @returns {Array.} */ return function getEstimates( - context: { manifest: IManifest; period: IPeriod; adaptation: IAdaptation }, + context: { manifest: IManifest; period: IPeriod; track: ITrack }, currentRepresentation: IReadOnlySharedReference, - representations: IReadOnlySharedReference, + representationList: IReadOnlySharedReference, playbackObserver: IReadOnlyPlaybackObserver, stopAllEstimates: CancellationSignal, ): IRepresentationEstimatorResponse { - const { type } = context.adaptation; - const bandwidthEstimator = _getBandwidthEstimator(type); - const initialBitrate = initialBitrates[type] ?? 0; + const { trackType } = context.track; + const bandwidthEstimator = _getBandwidthEstimator(trackType); + const initialBitrate = initialBitrates[trackType] ?? 0; const filters = { - limitResolution: throttlers.limitResolution[type] ?? limitResolutionDefaultRef, - throttleBitrate: throttlers.throttleBitrate[type] ?? throttleBitrateDefaultRef, + limitResolution: throttlers.limitResolution[trackType] ?? limitResolutionDefaultRef, + throttleBitrate: throttlers.throttleBitrate[trackType] ?? throttleBitrateDefaultRef, }; return getEstimateReference( { @@ -114,7 +114,7 @@ export default function createAdaptiveRepresentationSelector( filters, initialBitrate, playbackObserver, - representations, + representationList, lowLatencyMode, }, stopAllEstimates, @@ -166,7 +166,7 @@ function getEstimateReference( initialBitrate, lowLatencyMode, playbackObserver, - representations: representationsRef, + representationList: representationsRef, }: IRepresentationEstimatorArguments, stopAllEstimates: CancellationSignal, ): IRepresentationEstimatorResponse { @@ -222,14 +222,14 @@ function getEstimateReference( * produced. */ function createEstimateReference( - unsortedRepresentations: IRepresentation[], + unsortedRepresentations: IRepresentationListItem[], innerCancellationSignal: CancellationSignal, ): SharedReference { if (unsortedRepresentations.length <= 1) { // There's only a single Representation. Just choose it. return new SharedReference({ bitrate: undefined, - representation: unsortedRepresentations[0], + representation: unsortedRepresentations[0]?.representation, urgent: true, knownStableBitrate: undefined, }); @@ -240,7 +240,7 @@ function getEstimateReference( /** Ensure `Representation` objects are sorted by bitrates and only rely on this. */ const sortedRepresentations = unsortedRepresentations.sort( - (ra, rb) => ra.bitrate - rb.bitrate, + (ra, rb) => ra.bandwidth - rb.bandwidth, ); /** @@ -249,7 +249,7 @@ function getEstimateReference( * buffer size etc.). */ const bufferBasedChooser = new BufferBasedChooser( - sortedRepresentations.map((r) => r.bitrate), + sortedRepresentations.map((r) => r.bandwidth), ); /** Store the previous estimate made here. */ @@ -560,10 +560,10 @@ function getEstimateReference( * @returns {Array.} */ function getFilteredRepresentations( - representations: IRepresentation[], + representations: IRepresentationListItem[], resolutionLimit: IResolutionInfo | undefined, bitrateThrottle: number | undefined, -): IRepresentation[] { +): IRepresentationListItem[] { let filteredReps = representations; if (bitrateThrottle !== undefined && bitrateThrottle < Infinity) { @@ -670,7 +670,7 @@ export interface IMetricsCallbackPayload { /** Context about the segment downloaded. */ content: { representation: IRepresentation; - adaptation: IAdaptation; + track: ITrack; segment: ISegment; }; } @@ -773,15 +773,15 @@ export interface IRepresentationEstimatorArguments { */ lowLatencyMode: boolean; /** The list of Representations `getEstimateReference` can choose from. */ - representations: IReadOnlySharedReference; + representationList: IReadOnlySharedReference; /** Context for the list of Representations to choose. */ context: { /** In which Manifest the Representations are. */ manifest: IManifest; /** In which Period the Representations are. */ period: IPeriod; - /** In which Adaptation the Representations are. */ - adaptation: IAdaptation; + /** In which track the Representations are. */ + track: ITrack; }; } @@ -791,11 +791,11 @@ export interface IRepresentationEstimatorArguments { */ export type IRepresentationEstimator = ( /** Information on the content for which a Representation will be chosen */ - context: { manifest: IManifest; period: IPeriod; adaptation: IAdaptation }, + context: { manifest: IManifest; period: IPeriod; track: ITrack }, /** Reference emitting the Representation currently loaded. */ currentRepresentation: IReadOnlySharedReference, /** Reference emitting the list of available Representations to choose from. */ - representations: IReadOnlySharedReference, + representationList: IReadOnlySharedReference, /** Regularly emits playback conditions */ playbackObserver: IReadOnlyPlaybackObserver, /** @@ -847,4 +847,14 @@ export interface IRepresentationEstimatorThrottlers { throttleBitrate: Partial>>; } +export interface IRepresentationListItem { + /** + * The advised minimum bandwidth estimate at which the Representation + * should be selected. + */ + bandwidth: number; + /** The Representation itself. */ + representation: IRepresentation; +} + export type { IResolutionInfo }; diff --git a/src/core/adaptive/guess_based_chooser.ts b/src/core/adaptive/guess_based_chooser.ts index 01635e5c66..d26ef4af66 100644 --- a/src/core/adaptive/guess_based_chooser.ts +++ b/src/core/adaptive/guess_based_chooser.ts @@ -18,6 +18,7 @@ import log from "../../log"; import type { IRepresentation } from "../../manifest"; import arrayFindIndex from "../../utils/array_find_index"; import getMonotonicTimeStamp from "../../utils/monotonic_timestamp"; +import type { IRepresentationListItem } from "./adaptive_representation_selector"; import { estimateRequestBandwidth } from "./network_analyzer"; import type LastEstimateStorage from "./utils/last_estimate_storage"; import { ABRAlgorithmType } from "./utils/last_estimate_storage"; @@ -66,7 +67,7 @@ export default class GuessBasedChooser { * Perform a "guess", which basically indicates which Representation should be * chosen according to the `GuessBasedChooser`. * - * @param {Array.} representations - Array of all Representation the + * @param {Array.} representationList - Array of all Representation the * GuessBasedChooser can choose from, sorted by bitrate ascending. * /!\ It is very important that Representation in that Array are sorted by * bitrate ascending for this method to work as intented. @@ -81,7 +82,7 @@ export default class GuessBasedChooser { * algorithm). */ public getGuess( - representations: IRepresentation[], + representationList: IRepresentationListItem[], observation: { /** * For the concerned media buffer, difference in seconds between the next @@ -123,11 +124,11 @@ export default class GuessBasedChooser { } if (this._canGuessHigher(bufferGap, speed, scoreData)) { const nextRepresentation = getNextRepresentation( - representations, + representationList, currentRepresentation, ); if (nextRepresentation !== null) { - return nextRepresentation; + return nextRepresentation.representation; } } return null; @@ -156,18 +157,21 @@ export default class GuessBasedChooser { this._consecutiveWrongGuesses++; this._blockGuessesUntil = getMonotonicTimeStamp() + Math.min(this._consecutiveWrongGuesses * 15000, 120000); - return getPreviousRepresentation(representations, currentRepresentation); + return ( + getPreviousRepresentation(representationList, currentRepresentation) + ?.representation ?? null + ); } else if (scoreData === undefined) { return currentRepresentation; } if (this._canGuessHigher(bufferGap, speed, scoreData)) { const nextRepresentation = getNextRepresentation( - representations, + representationList, currentRepresentation, ); if (nextRepresentation !== null) { - return nextRepresentation; + return nextRepresentation.representation; } } return currentRepresentation; @@ -266,20 +270,20 @@ export default class GuessBasedChooser { * * /!\ The representations have to be already sorted by bitrate, in ascending * order. - * @param {Array.} representations - Available representations to choose + * @param {Array.} representationList - Available representations to choose * from, sorted by bitrate in ascending order. * @param {Object} currentRepresentation - The Representation currently * considered. * @returns {Object|null} */ function getNextRepresentation( - representations: IRepresentation[], + representationList: IRepresentationListItem[], currentRepresentation: IRepresentation, -): IRepresentation | null { - const len = representations.length; +): IRepresentationListItem | null { + const len = representationList.length; let index = arrayFindIndex( - representations, - ({ id }) => id === currentRepresentation.id, + representationList, + ({ representation }) => representation.id === currentRepresentation.id, ); if (index < 0) { log.error("ABR: Current Representation not found."); @@ -287,8 +291,9 @@ function getNextRepresentation( } while (++index < len) { - if (representations[index].bitrate > currentRepresentation.bitrate) { - return representations[index]; + // XXX TODO bitrate + if (representationList[index].bandwidth > currentRepresentation.bitrate) { + return representationList[index]; } } return null; @@ -303,12 +308,12 @@ function getNextRepresentation( * @returns {Object|null} */ function getPreviousRepresentation( - representations: IRepresentation[], + representations: IRepresentationListItem[], currentRepresentation: IRepresentation, -): IRepresentation | null { +): IRepresentationListItem | null { let index = arrayFindIndex( representations, - ({ id }) => id === currentRepresentation.id, + ({ representation }) => representation.id === currentRepresentation.id, ); if (index < 0) { log.error("ABR: Current Representation not found."); @@ -316,7 +321,8 @@ function getPreviousRepresentation( } while (--index >= 0) { - if (representations[index].bitrate < currentRepresentation.bitrate) { + // XXX TODO bitrate + if (representations[index].bandwidth < currentRepresentation.bitrate) { return representations[index]; } } diff --git a/src/core/adaptive/index.ts b/src/core/adaptive/index.ts index 7fe3e0bdfb..0531f301c7 100644 --- a/src/core/adaptive/index.ts +++ b/src/core/adaptive/index.ts @@ -23,6 +23,7 @@ import type { IRepresentationEstimatorCallbacks, IRepresentationEstimatorPlaybackObservation, IRepresentationEstimatorThrottlers as IABRThrottlers, + IRepresentationListItem, IRequestBeginCallbackPayload, IRequestEndCallbackPayload, IRequestProgressCallbackPayload, @@ -39,6 +40,7 @@ export type { IABREstimate, IMetricsCallbackPayload, IRepresentationEstimatorCallbacks, + IRepresentationListItem, IRepresentationEstimatorPlaybackObservation, IRequestBeginCallbackPayload, IRequestProgressCallbackPayload, diff --git a/src/core/adaptive/utils/filter_by_bitrate.ts b/src/core/adaptive/utils/filter_by_bitrate.ts index 4bfb66b7cb..26db0dc42d 100644 --- a/src/core/adaptive/utils/filter_by_bitrate.ts +++ b/src/core/adaptive/utils/filter_by_bitrate.ts @@ -14,8 +14,8 @@ * limitations under the License. */ -import type { IRepresentation } from "../../../manifest"; import arrayFindIndex from "../../../utils/array_find_index"; +import type { IRepresentationListItem } from "../adaptive_representation_selector"; /** * Get only representations lower or equal to a given bitrate. @@ -26,18 +26,18 @@ import arrayFindIndex from "../../../utils/array_find_index"; * @returns {Array.} */ export default function filterByBitrate( - representations: IRepresentation[], + representations: IRepresentationListItem[], bitrate: number, -): IRepresentation[] { +): IRepresentationListItem[] { if (representations.length === 0) { return []; } - representations.sort((ra, rb) => ra.bitrate - rb.bitrate); - const minimumBitrate = representations[0].bitrate; + representations.sort((ra, rb) => ra.bandwidth - rb.bandwidth); + const minimumBitrate = representations[0].bandwidth; const bitrateCeil = Math.max(bitrate, minimumBitrate); const firstSuperiorBitrateIndex = arrayFindIndex( representations, - (representation) => representation.bitrate > bitrateCeil, + (representation) => representation.bandwidth > bitrateCeil, ); if (firstSuperiorBitrateIndex === -1) { return representations; // All representations have lower bitrates. diff --git a/src/core/adaptive/utils/filter_by_resolution.ts b/src/core/adaptive/utils/filter_by_resolution.ts index d544d5b1e2..ca929c8ccc 100644 --- a/src/core/adaptive/utils/filter_by_resolution.ts +++ b/src/core/adaptive/utils/filter_by_resolution.ts @@ -14,47 +14,52 @@ * limitations under the License. */ -import type { IRepresentation } from "../../../manifest"; import arrayFind from "../../../utils/array_find"; +import type { IRepresentationListItem } from "../adaptive_representation_selector"; /** * Filter representations based on their resolution. * - the highest resolution considered will be the one linked to the first * representation which has a superior resolution or equal to the one * given. - * @param {Array.} representations - The representations array + * @param {Array.} representationList - The representations array * @param {Object} resolution * @returns {Array.} */ export default function filterByResolution( - representations: IRepresentation[], + representationList: IRepresentationListItem[], resolution: IResolutionInfo, -): IRepresentation[] { +): IRepresentationListItem[] { if (resolution.width === undefined || resolution.height === undefined) { - return representations; + return representationList; } const width = resolution.width * resolution.pixelRatio; const height = resolution.height * resolution.pixelRatio; - const sortedRepsByWidth = representations + const sortedRepsByWidth = representationList .slice() // clone - .sort((a, b) => (a.width ?? 0) - (b.width ?? 0)); + .sort((a, b) => (a.representation.width ?? 0) - (b.representation.width ?? 0)); - const repWithMaxWidth = arrayFind( + const itemWithMaxWidth = arrayFind( sortedRepsByWidth, - (representation) => - typeof representation.width === "number" && - representation.width >= width && - typeof representation.height === "number" && - representation.height >= height, + (item) => + typeof item.representation.width === "number" && + item.representation.width >= width && + typeof item.representation.height === "number" && + item.representation.height >= height, ); - if (repWithMaxWidth === undefined) { - return representations; + if (itemWithMaxWidth === undefined) { + return representationList; } - const maxWidth = typeof repWithMaxWidth.width === "number" ? repWithMaxWidth.width : 0; - return representations.filter((representation) => - typeof representation.width === "number" ? representation.width <= maxWidth : true, + const maxWidth = + typeof itemWithMaxWidth.representation.width === "number" + ? itemWithMaxWidth.representation.width + : 0; + return representationList.filter((item) => + typeof item.representation.width === "number" + ? item.representation.width <= maxWidth + : true, ); } diff --git a/src/core/adaptive/utils/pending_requests_store.ts b/src/core/adaptive/utils/pending_requests_store.ts index e6208bfb9c..ebdc1386c1 100644 --- a/src/core/adaptive/utils/pending_requests_store.ts +++ b/src/core/adaptive/utils/pending_requests_store.ts @@ -17,7 +17,7 @@ import log from "../../../log"; import type { IManifest, - IAdaptation, + ITrackMetadata, ISegment, IPeriod, IRepresentation, @@ -151,7 +151,7 @@ export interface IRequestInfo { export interface IRequestInfoContent { manifest: IManifest; period: IPeriod; - adaptation: IAdaptation; + track: ITrackMetadata; representation: IRepresentation; segment: ISegment; } diff --git a/src/core/adaptive/utils/select_optimal_representation.ts b/src/core/adaptive/utils/select_optimal_representation.ts index ba0f019fd3..63af39a11a 100644 --- a/src/core/adaptive/utils/select_optimal_representation.ts +++ b/src/core/adaptive/utils/select_optimal_representation.ts @@ -16,6 +16,7 @@ import type { IRepresentation } from "../../../manifest"; import arrayFindIndex from "../../../utils/array_find_index"; +import type { IRepresentationListItem } from "../adaptive_representation_selector"; /** * From the given array of Representations (sorted by bitrate order ascending), @@ -28,17 +29,17 @@ import arrayFindIndex from "../../../utils/array_find_index"; * @returns {Object|undefined} */ export default function selectOptimalRepresentation( - representations: IRepresentation[], + representations: IRepresentationListItem[], wantedBitrate: number, ): IRepresentation { const firstIndexTooHigh = arrayFindIndex( representations, - (representation) => representation.bitrate > wantedBitrate, + (representation) => representation.bandwidth > wantedBitrate, ); if (firstIndexTooHigh === -1) { - return representations[representations.length - 1]; + return representations[representations.length - 1].representation; } else if (firstIndexTooHigh === 0) { - return representations[0]; + return representations[0].representation; } - return representations[firstIndexTooHigh - 1]; + return representations[firstIndexTooHigh - 1].representation; } diff --git a/src/core/cmcd/cmcd_data_builder.ts b/src/core/cmcd/cmcd_data_builder.ts index 70401786e0..4b4f0572f6 100644 --- a/src/core/cmcd/cmcd_data_builder.ts +++ b/src/core/cmcd/cmcd_data_builder.ts @@ -1,11 +1,6 @@ import log from "../../log"; -import type { - IAdaptation, - IManifest, - IPeriod, - IRepresentation, - ISegment, -} from "../../manifest"; +import type { IManifest, IPeriod, IRepresentation, ISegment } from "../../manifest"; +import type { IRepresentationMetadata, ITrackMetadata } from "../../manifest/types"; import type { IReadOnlyPlaybackObserver, IRebufferingStatus, @@ -14,6 +9,7 @@ import type { import type { ICmcdOptions, ICmcdPayload, ITrackType } from "../../public_types"; import createUuid from "../../utils/create_uuid"; import isNullOrUndefined from "../../utils/is_null_or_undefined"; +import { objectValues } from "../../utils/object_values"; import type { IRange } from "../../utils/ranges"; import TaskCanceller from "../../utils/task_canceller"; @@ -39,7 +35,7 @@ export interface ICmcdSegmentInfo { /** Period metadata linked to the wanted segment. */ period: IPeriod; /** Adaptation metadata linked to the wanted segment. */ - adaptation: IAdaptation; + track: ITrackMetadata; /** Representation metadata linked to the wanted segment. */ representation: IRepresentation; /** Segment metadata linked to the wanted segment. */ @@ -218,12 +214,12 @@ export default class CmcdDataBuilder { public getCmcdDataForSegmentRequest(content: ICmcdSegmentInfo): ICmcdPayload { const lastObservation = this._playbackObserver?.getReference().getValue(); - const props = this._getCommonCmcdData(this._lastThroughput[content.adaptation.type]); + const props = this._getCommonCmcdData(this._lastThroughput[content.track.trackType]); props.br = Math.round(content.representation.bitrate / 1000); props.d = Math.round(content.segment.duration * 1000); // TODO nor (next object request) and nrr (next range request) - switch (content.adaptation.type) { + switch (content.track.trackType) { case "video": props.ot = "v"; break; @@ -243,7 +239,7 @@ export default class CmcdDataBuilder { lastObservation !== undefined && (props.ot === "v" || props.ot === "a" || props.ot === "av") ) { - const bufferedForType = lastObservation.buffered[content.adaptation.type]; + const bufferedForType = lastObservation.buffered[content.track.trackType]; if (!isNullOrUndefined(bufferedForType)) { // TODO more precize position estimate? const position = @@ -292,8 +288,8 @@ export default class CmcdDataBuilder { break; } props.st = content.manifest.isDynamic ? "l" : "v"; - props.tb = content.adaptation.representations.reduce( - (acc: number | undefined, representation: IRepresentation) => { + props.tb = objectValues(content.track.representations).reduce( + (acc: number | undefined, representation: IRepresentationMetadata) => { if ( representation.isSupported !== true || representation.decipherable === false diff --git a/src/core/fetchers/segment/segment_fetcher.ts b/src/core/fetchers/segment/segment_fetcher.ts index 7ea6118cb5..b8b0177327 100644 --- a/src/core/fetchers/segment/segment_fetcher.ts +++ b/src/core/fetchers/segment/segment_fetcher.ts @@ -19,10 +19,10 @@ import { formatError } from "../../../errors"; import log from "../../../log"; import type { IManifest, - IAdaptation, ISegment, IPeriod, IRepresentation, + ITrack, } from "../../../manifest"; import { getLoggableSegmentId } from "../../../manifest"; import type { ICdnMetadata } from "../../../parsers/manifest"; @@ -119,7 +119,7 @@ export default function createSegmentFetcher({ fetcherCallbacks: ISegmentFetcherCallbacks, cancellationSignal: CancellationSignal, ): Promise { - const { segment, adaptation, representation, manifest, period } = content; + const { segment, representation, track, manifest, period } = content; // used by logs const segmentIdString = getLoggableSegmentId(content); @@ -159,8 +159,8 @@ export default function createSegmentFetcher({ /** Segment context given to the transport pipelines. */ const context: ISegmentContext = { segment, - type: adaptation.type, - language: adaptation.language, + type: track.trackType, + language: track.language, isLive: manifest.isLive, periodStart: period.start, periodEnd: period.end, @@ -429,7 +429,7 @@ export interface ISegmentFetcherCallbacks { export interface ISegmentLoaderContent { manifest: IManifest; period: IPeriod; - adaptation: IAdaptation; + track: ITrack; representation: IRepresentation; segment: ISegment; } diff --git a/src/core/main/common/content_time_boundaries_observer.ts b/src/core/main/common/content_time_boundaries_observer.ts index 83a4703f1b..e55bbffef6 100644 --- a/src/core/main/common/content_time_boundaries_observer.ts +++ b/src/core/main/common/content_time_boundaries_observer.ts @@ -21,14 +21,16 @@ import type { import { MediaError } from "../../../errors"; import type { IManifest, - IAdaptation, + IRepresentation, IRepresentationIndex, IPeriod, + ITrack, } from "../../../manifest"; import type { IReadOnlyPlaybackObserver } from "../../../playback_observer"; import type { IPlayerError } from "../../../public_types"; import EventEmitter from "../../../utils/event_emitter"; import isNullOrUndefined from "../../../utils/is_null_or_undefined"; +import { objectValues } from "../../../utils/object_values"; import SortedList from "../../../utils/sorted_list"; import TaskCanceller from "../../../utils/task_canceller"; @@ -141,31 +143,36 @@ export default class ContentTimeBoundariesObserver extends EventEmitter; + trackReference: SharedReference; /** * Object through which Representation choices will be emitted. * @@ -60,7 +60,7 @@ export default class TrackChoiceSetter { public addTrackSetter( periodId: string, bufferType: ITrackType, - ref: SharedReference, + ref: SharedReference, ) { let obj = this._refs.get(periodId); if (obj === undefined) { @@ -119,7 +119,7 @@ export default class TrackChoiceSetter { } else { ref.representations = new SharedReference(choice.initialRepresentations); ref.trackReference.setValue({ - adaptationId: choice.adaptationId, + trackId: choice.trackId, switchingMode: choice.switchingMode, representations: ref.representations, relativeResumingPosition: choice.relativeResumingPosition, @@ -130,7 +130,7 @@ export default class TrackChoiceSetter { public updateRepresentations( periodId: string, - adaptationId: string, + trackId: string, bufferType: ITrackType, choice: IRepresentationsChoice, ): boolean { @@ -140,8 +140,8 @@ export default class TrackChoiceSetter { return false; } const val = ref.trackReference.getValue(); - if (isNullOrUndefined(val) || val.adaptationId !== adaptationId) { - log.debug("WP: Desynchronized Adaptation id", val?.adaptationId, adaptationId); + if (isNullOrUndefined(val) || val.trackId !== trackId) { + log.debug("WP: Desynchronized track id", val?.trackId, trackId); return false; } ref.representations.setValue(choice); diff --git a/src/core/main/worker/worker_main.ts b/src/core/main/worker/worker_main.ts index c3532e789b..19f61a6b8b 100644 --- a/src/core/main/worker/worker_main.ts +++ b/src/core/main/worker/worker_main.ts @@ -2,7 +2,7 @@ import config from "../../../config"; import { MediaError, OtherError } from "../../../errors"; import features from "../../../features"; import log from "../../../log"; -import Manifest, { Adaptation, Period, Representation } from "../../../manifest/classes"; +import Manifest, { Period, Representation, Track } from "../../../manifest/classes"; import type { IContentInitializationData, IDiscontinuityUpdateWorkerMessagePayload, @@ -337,7 +337,7 @@ export default function initializeWorkerMain() { } preparedContent.trackChoiceSetter.updateRepresentations( msg.value.periodId, - msg.value.adaptationId, + msg.value.trackId, msg.value.bufferType, msg.value.choice, ); @@ -662,20 +662,20 @@ function loadOrReloadPreparedContent( }); }, - adaptationChange(value) { - contentTimeBoundariesObserver.onAdaptationChange( + trackChange(value) { + contentTimeBoundariesObserver.onTrackChange( value.type, value.period, - value.adaptation, + value.track, ); if (currentLoadCanceller.signal.isCancelled()) { return; } sendMessage({ - type: WorkerMessageType.AdaptationChanged, + type: WorkerMessageType.TrackChanged, contentId, value: { - adaptationId: value.adaptation?.id ?? null, + trackId: value.track?.id ?? null, periodId: value.period.id, type: value.type, }, @@ -691,7 +691,7 @@ function loadOrReloadPreparedContent( type: WorkerMessageType.RepresentationChanged, contentId, value: { - adaptationId: value.adaptation.id, + trackId: value.track.id, representationId: value.representation?.id ?? null, periodId: value.period.id, type: value.type, @@ -722,7 +722,7 @@ function loadOrReloadPreparedContent( preparedContent.trackChoiceSetter.addTrackSetter( value.period.id, value.type, - value.adaptationRef, + value.trackRef, ); sendMessage({ type: WorkerMessageType.PeriodStreamReady, @@ -795,8 +795,8 @@ function loadOrReloadPreparedContent( if (content.period instanceof Period) { content.period = content.period.getMetadataSnapshot(); } - if (content.adaptation instanceof Adaptation) { - content.adaptation = content.adaptation.getMetadataSnapshot(); + if (content.track instanceof Track) { + content.track = content.track.getMetadataSnapshot(); } if (content.representation instanceof Representation) { content.representation = content.representation.getMetadataSnapshot(); diff --git a/src/core/segment_sinks/implementations/types.ts b/src/core/segment_sinks/implementations/types.ts index 155f66e29b..f9a2b7a98c 100644 --- a/src/core/segment_sinks/implementations/types.ts +++ b/src/core/segment_sinks/implementations/types.ts @@ -14,7 +14,12 @@ * limitations under the License. */ -import type { IAdaptation, ISegment, IPeriod, IRepresentation } from "../../../manifest"; +import type { + ISegment, + IPeriod, + IRepresentation, + ITrackMetadata, +} from "../../../manifest"; import type { IRange } from "../../../utils/ranges"; import type { IBufferedChunk, @@ -265,8 +270,8 @@ export interface IPushedChunkData { * `signalSegmentComplete` method. */ export interface ICompleteSegmentInfo { - /** Adaptation object linked to the chunk. */ - adaptation: IAdaptation; + /** track object linked to the chunk. */ + track: ITrackMetadata; /** Period object linked to the chunk. */ period: IPeriod; /** Representation object linked to the chunk. */ diff --git a/src/core/segment_sinks/inventory/segment_inventory.ts b/src/core/segment_sinks/inventory/segment_inventory.ts index fca85ab84e..91591f909b 100644 --- a/src/core/segment_sinks/inventory/segment_inventory.ts +++ b/src/core/segment_sinks/inventory/segment_inventory.ts @@ -16,7 +16,12 @@ import config from "../../../config"; import log from "../../../log"; -import type { IAdaptation, ISegment, IPeriod, IRepresentation } from "../../../manifest"; +import type { + ITrackMetadata, + ISegment, + IPeriod, + IRepresentation, +} from "../../../manifest"; import { areSameContent } from "../../../manifest"; import isNullOrUndefined from "../../../utils/is_null_or_undefined"; import getMonotonicTimeStamp from "../../../utils/monotonic_timestamp"; @@ -137,8 +142,8 @@ export interface IBufferedChunk { /** information to provide when "inserting" a new chunk into the SegmentInventory. */ export interface IInsertedChunkInfos { - /** The Adaptation that chunk is linked to */ - adaptation: IAdaptation; + /** The track that chunk is linked to */ + track: ITrackMetadata; /** The Period that chunk is linked to */ period: IPeriod; /** The Representation that chunk is linked to. */ @@ -219,7 +224,7 @@ export default class SegmentInventory { let thisSegment = inventory[0]; // Current segmentInfos considered const { MINIMUM_SEGMENT_SIZE } = config.getCurrent(); /** Type of buffer considered, used for logs */ - const bufferType: string | undefined = thisSegment?.infos.adaptation.type; + const bufferType: string | undefined = thisSegment?.infos.track.trackType; if (log.hasLevel("DEBUG")) { const prettyPrintedRanges = ranges.map((r) => `${r.start}-${r.end}`).join(","); @@ -429,7 +434,7 @@ export default class SegmentInventory { public insertChunk( { period, - adaptation, + track, representation, segment, chunkSize, @@ -443,7 +448,7 @@ export default class SegmentInventory { return; } - const bufferType = adaptation.type; + const bufferType = track.trackType; if (start >= end) { log.warn( "SI: Invalid chunked inserted: starts before it ends", @@ -466,7 +471,7 @@ export default class SegmentInventory { precizeEnd: false, bufferedStart: undefined, bufferedEnd: undefined, - infos: { segment, period, adaptation, representation }, + infos: { segment, period, track, representation }, }; // begin by the end as in most use cases this will be faster @@ -881,7 +886,7 @@ export default class SegmentInventory { */ public completeSegment(content: { period: IPeriod; - adaptation: IAdaptation; + track: ITrackMetadata; representation: IRepresentation; segment: ISegment; }): void { diff --git a/src/core/segment_sinks/inventory/types.ts b/src/core/segment_sinks/inventory/types.ts index ae1625c3d3..0cbcc016c6 100644 --- a/src/core/segment_sinks/inventory/types.ts +++ b/src/core/segment_sinks/inventory/types.ts @@ -15,19 +15,18 @@ */ import type { - IAdaptation, ISegment, IPeriod, IRepresentation, - IAdaptationMetadata, IPeriodMetadata, IRepresentationMetadata, + ITrackMetadata, } from "../../../manifest"; /** Content information for a single buffered chunk */ export interface IChunkContext { /** Adaptation this chunk is related to. */ - adaptation: IAdaptation; + track: ITrackMetadata; /** Period this chunk is related to. */ period: IPeriod; /** Representation this chunk is related to. */ @@ -37,7 +36,7 @@ export interface IChunkContext { } export interface IChunkContextSnapshot { - adaptation: IAdaptationMetadata; + track: ITrackMetadata; period: IPeriodMetadata; representation: IRepresentationMetadata; } diff --git a/src/core/segment_sinks/segment_buffers_store.ts b/src/core/segment_sinks/segment_buffers_store.ts index 9602ec5bc0..f6a211ac57 100644 --- a/src/core/segment_sinks/segment_buffers_store.ts +++ b/src/core/segment_sinks/segment_buffers_store.ts @@ -415,7 +415,7 @@ function shouldHaveNativeBuffer( function getChunkContextSnapshot(context: IChunkContext): IChunkContextSnapshot { return { - adaptation: context.adaptation.getMetadataSnapshot(), + track: context.track, period: context.period.getMetadataSnapshot(), representation: context.representation.getMetadataSnapshot(), }; diff --git a/src/core/stream/adaptation/adaptation_stream.ts b/src/core/stream/adaptation/adaptation_stream.ts index 6ba8b16daf..053882fd02 100644 --- a/src/core/stream/adaptation/adaptation_stream.ts +++ b/src/core/stream/adaptation/adaptation_stream.ts @@ -3,8 +3,9 @@ import { formatError } from "../../../errors"; import log from "../../../log"; import type { IRepresentation } from "../../../manifest"; import arrayIncludes from "../../../utils/array_includes"; -import { assertUnreachable } from "../../../utils/assert"; +import assert, { assertUnreachable } from "../../../utils/assert"; import cancellableSleep from "../../../utils/cancellable_sleep"; +import isNullOrUndefined from "../../../utils/is_null_or_undefined"; import noop from "../../../utils/noop"; import objectAssign from "../../../utils/object_assign"; import queueMicrotask from "../../../utils/queue_microtask"; @@ -12,6 +13,7 @@ import type { IReadOnlySharedReference } from "../../../utils/reference"; import SharedReference, { createMappedReference } from "../../../utils/reference"; import type { CancellationSignal } from "../../../utils/task_canceller"; import TaskCanceller from "../../../utils/task_canceller"; +import type { IRepresentationListItem } from "../../adaptive"; import type { IRepresentationsChoice, IRepresentationStreamCallbacks, @@ -65,7 +67,31 @@ export default function AdaptationStream( callbacks: IAdaptationStreamCallbacks, parentCancelSignal: CancellationSignal, ): void { - const { manifest, period, adaptation } = content; + const { manifest, period, track, representationsChoice } = content; + + /** All Representations linked to the given track id in all variant streams. */ + const allRepresentations = period.variantStreams.flatMap((variantStream) => { + return variantStream.media[track.trackType].flatMap((media) => { + return media.linkedTrack === track.id + ? media.representations.map((representationId) => { + const representation = track.representations[representationId]; + assert( + !isNullOrUndefined(representation), + "Representation from variant not found.", + ); + return { + // Take the highest ceil between variant bandwidth and + // Representation bitrate + bandwidth: Math.max( + variantStream.bandwidth ?? 0, + representation.bitrate ?? 0, + ), + representation, + }; + }) + : []; + }); + }); /** Allows to cancel everything the `AdaptationStream` is doing. */ const adapStreamCanceller = new TaskCanceller(); @@ -93,23 +119,23 @@ export default function AdaptationStream( /** Stores the last emitted bitrate. */ let previouslyEmittedBitrate: number | undefined; - const initialRepIds = content.representations.getValue().representationIds; - const initialRepresentations = content.adaptation.representations.filter( - (r) => - arrayIncludes(initialRepIds, r.id) && - r.decipherable !== false && - r.isSupported !== false, + const initialRepIds = representationsChoice.getValue().representationIds; + const initialRepresentations = allRepresentations.filter( + ({ representation }) => + arrayIncludes(initialRepIds, representation.id) && + representation.decipherable !== false && + representation.isSupported !== false, ); /** Emit the list of Representation for the adaptive logic. */ - const representationsList = new SharedReference( + const representationsList = new SharedReference( initialRepresentations, adapStreamCanceller.signal, ); // Start-up Adaptive logic const { estimates: estimateRef, callbacks: abrCallbacks } = representationEstimator( - { manifest, period, adaptation }, + { manifest, period, track }, currentRepresentation, representationsList, playbackObserver, @@ -118,7 +144,7 @@ export default function AdaptationStream( /** Allows a `RepresentationStream` to easily fetch media segments. */ const segmentFetcher = segmentFetcherCreator.createSegmentFetcher( - adaptation.type, + track.trackType, /* eslint-disable @typescript-eslint/unbound-method */ { onRequestBegin: abrCallbacks.requestBegin, @@ -141,8 +167,8 @@ export default function AdaptationStream( return; } previouslyEmittedBitrate = bitrate; - log.debug(`Stream: new ${adaptation.type} bitrate estimate`, bitrate); - callbacks.bitrateEstimateChange({ type: adaptation.type, bitrate }); + log.debug(`Stream: new ${track.trackType} bitrate estimate`, bitrate); + callbacks.bitrateEstimateChange({ type: track.trackType, bitrate }); }, { emitCurrentValue: true, clearSignal: adapStreamCanceller.signal }, ); @@ -154,14 +180,14 @@ export default function AdaptationStream( let cancelCurrentStreams: TaskCanceller | undefined; // Each time the list of wanted Representations changes, we restart the logic - content.representations.onUpdate( + representationsChoice.onUpdate( (val) => { if (cancelCurrentStreams !== undefined) { cancelCurrentStreams.cancel(); } - const newRepIds = content.representations.getValue().representationIds; - const newRepresentations = content.adaptation.representations.filter((r) => - arrayIncludes(newRepIds, r.id), + const newRepIds = representationsChoice.getValue().representationIds; + const newRepresentations = allRepresentations.filter(({ representation }) => + arrayIncludes(newRepIds, representation.id), ); representationsList.setValueIfChanged(newRepresentations); cancelCurrentStreams = new TaskCanceller(); @@ -201,7 +227,7 @@ export default function AdaptationStream( // in the buffer const switchStrat = getRepresentationsSwitchingStrategy( period, - adaptation, + track, choice, segmentSink, playbackObserver, @@ -224,7 +250,7 @@ export default function AdaptationStream( const { DELTA_POSITION_AFTER_RELOAD } = config.getCurrent(); const timeOffset = DELTA_POSITION_AFTER_RELOAD.bitrateSwitch; return callbacks.waitingMediaSourceReload({ - bufferType: adaptation.type, + bufferType: track.trackType, period, timeOffset, stayInPeriod: true, @@ -299,10 +325,10 @@ export default function AdaptationStream( return; } if (estimate.urgent) { - log.info("Stream: urgent Representation switch", adaptation.type); + log.info("Stream: urgent Representation switch", track.trackType); return terminateCurrentStream.setValue({ urgent: true }); } else { - log.info("Stream: slow Representation switch", adaptation.type); + log.info("Stream: slow Representation switch", track.trackType); return terminateCurrentStream.setValue({ urgent: false }); } }, @@ -313,8 +339,8 @@ export default function AdaptationStream( ); const repInfo = { - type: adaptation.type, - adaptation, + type: track.trackType, + track, period, representation, }; @@ -386,10 +412,10 @@ export default function AdaptationStream( bufferGoalCanceller.signal, ); const maxBufferSize = - adaptation.type === "video" ? maxVideoBufferSize : new SharedReference(Infinity); + track.trackType === "video" ? maxVideoBufferSize : new SharedReference(Infinity); log.info( "Stream: changing representation", - adaptation.type, + track.trackType, representation.id, representation.bitrate, ); @@ -433,7 +459,7 @@ export default function AdaptationStream( RepresentationStream( { playbackObserver, - content: { representation, adaptation, period, manifest }, + content: { representation, track, period, manifest }, segmentSink, segmentFetcher, terminate: terminateCurrentStream, @@ -454,15 +480,15 @@ export default function AdaptationStream( (updates) => { for (const element of updates.updatedPeriods) { if (element.period.id === period.id) { - for (const updated of element.result.updatedAdaptations) { - if (updated.adaptation === adaptation.id) { + for (const updated of element.result.updatedTracks) { + if (updated.track === track.id) { for (const rep of updated.removedRepresentations) { if (rep === representation.id) { if (fnCancelSignal.isCancelled()) { return; } return callbacks.waitingMediaSourceReload({ - bufferType: adaptation.type, + bufferType: track.trackType, period, timeOffset: 0, stayInPeriod: true, diff --git a/src/core/stream/adaptation/get_representations_switch_strategy.ts b/src/core/stream/adaptation/get_representations_switch_strategy.ts index f57fdf9069..251ebb441f 100644 --- a/src/core/stream/adaptation/get_representations_switch_strategy.ts +++ b/src/core/stream/adaptation/get_representations_switch_strategy.ts @@ -15,7 +15,7 @@ */ import config from "../../../config"; -import type { IAdaptation, IPeriod } from "../../../manifest"; +import type { IPeriod, ITrackMetadata } from "../../../manifest"; import type { IReadOnlyPlaybackObserver } from "../../../playback_observer"; import arrayIncludes from "../../../utils/array_includes"; import type { IRange } from "../../../utils/ranges"; @@ -33,7 +33,7 @@ import type { export default function getRepresentationsSwitchingStrategy( period: IPeriod, - adaptation: IAdaptation, + track: ITrackMetadata, settings: IRepresentationsChoice, segmentSink: SegmentSink, playbackObserver: IReadOnlyPlaybackObserver, @@ -47,7 +47,7 @@ export default function getRepresentationsSwitchingStrategy( for (const elt of inventory) { if ( elt.infos.period.id === period.id && - (elt.infos.adaptation.id !== adaptation.id || + (elt.infos.track.id !== track.id || !arrayIncludes(settings.representationIds, elt.infos.representation.id)) ) { insertInto(unwantedRange, { @@ -63,7 +63,7 @@ export default function getRepresentationsSwitchingStrategy( const info = operation.value.inventoryInfos; if ( info.period.id === period.id && - (info.adaptation.id !== adaptation.id || + (info.track.id !== track.id || !arrayIncludes(settings.representationIds, info.representation.id)) ) { const start = info.segment.time; @@ -73,7 +73,7 @@ export default function getRepresentationsSwitchingStrategy( } } - // Continue if we have no other Adaptation buffered in the current Period + // Continue if we have no other track buffered in the current Period if (unwantedRange.length === 0) { return { type: "continue", value: undefined }; } @@ -85,7 +85,7 @@ export default function getRepresentationsSwitchingStrategy( } } - // From here, clean-up data from the previous Adaptation, if one + // From here, clean-up data from the previous track, if one const shouldFlush = settings.switchingMode === "direct"; const rangesToExclude = []; @@ -109,7 +109,7 @@ export default function getRepresentationsSwitchingStrategy( if (!shouldFlush) { // exclude data around current position to avoid decoding issues const { ADAP_REP_SWITCH_BUFFER_PADDINGS } = config.getCurrent(); - const bufferType = adaptation.type; + const bufferType = track.trackType; /** Ranges that won't be cleaned from the current buffer. */ const paddingBefore = ADAP_REP_SWITCH_BUFFER_PADDINGS[bufferType].before ?? 0; diff --git a/src/core/stream/adaptation/types.ts b/src/core/stream/adaptation/types.ts index f1708127d7..0a3a632e28 100644 --- a/src/core/stream/adaptation/types.ts +++ b/src/core/stream/adaptation/types.ts @@ -1,4 +1,4 @@ -import type { IManifest, IAdaptation, IPeriod, IRepresentation } from "../../../manifest"; +import type { IManifest, IPeriod, ITrack, IRepresentation } from "../../../manifest"; import type { IReadOnlyPlaybackObserver } from "../../../playback_observer"; import type { IAudioTrackSwitchingMode, @@ -59,8 +59,8 @@ export interface IRepresentationChangePayload { type: IBufferType; /** The `Period` linked to the `RepresentationStream` we're creating. */ period: IPeriod; - /** The `Adaptation` linked to the `RepresentationStream` we're creating. */ - adaptation: IAdaptation; + /** The track linked to the `RepresentationStream` we're creating. */ + track: ITrack; /** * The `Representation` linked to the `RepresentationStream` we're creating. * `null` when we're choosing no Representation at all. @@ -132,8 +132,8 @@ export interface IAdaptationStreamArguments { content: { manifest: IManifest; period: IPeriod; - adaptation: IAdaptation; - representations: IReadOnlySharedReference; + track: ITrack; + representationsChoice: IReadOnlySharedReference; }; options: IAdaptationStreamOptions; /** Estimate the right Representation to play. */ @@ -190,10 +190,10 @@ export interface IAdaptationStreamOptions { enableFastSwitching: boolean; } -/** Object indicating a choice of Adaptation made by the user. */ -export interface IAdaptationChoice { - /** The Adaptation choosen. */ - adaptationId: string; +/** Object indicating a choice of track made by the user. */ +export interface ITrackChoice { + /** The track choosen. */ + trackId: string; /** "Switching mode" in which the track switch should happen. */ switchingMode: ITrackSwitchingMode; @@ -202,7 +202,7 @@ export interface IAdaptationChoice { relativeResumingPosition: number | undefined; /** * Shared reference allowing to indicate which Representations from - * that Adaptation are allowed. + * that track are allowed. */ representations: IReadOnlySharedReference; } diff --git a/src/core/stream/index.ts b/src/core/stream/index.ts index e684fddb03..a0235b34b1 100644 --- a/src/core/stream/index.ts +++ b/src/core/stream/index.ts @@ -28,7 +28,7 @@ export type { IStreamStatusPayload, } from "./representation"; export type { - IAdaptationChoice, + ITrackChoice, INeedsBufferFlushPayload, ITrackSwitchingMode, IWaitingMediaSourceReloadPayload, diff --git a/src/core/stream/orchestrator/get_time_ranges_for_content.ts b/src/core/stream/orchestrator/get_time_ranges_for_content.ts index 5187c0dced..a8cf10862f 100644 --- a/src/core/stream/orchestrator/get_time_ranges_for_content.ts +++ b/src/core/stream/orchestrator/get_time_ranges_for_content.ts @@ -16,9 +16,9 @@ import log from "../../../log"; import type { - IAdaptationMetadata, IPeriodMetadata, IRepresentationMetadata, + ITrackMetadata, } from "../../../manifest"; import type { IRange } from "../../../utils/ranges"; import type { SegmentSink } from "../../segment_sinks"; @@ -33,8 +33,8 @@ import type { SegmentSink } from "../../segment_sinks"; export default function getTimeRangesForContent( segmentSink: SegmentSink, contents: Array<{ - adaptation: IAdaptationMetadata; period: IPeriodMetadata; + track: ITrackMetadata; representation: IRepresentationMetadata; }>, ): IRange[] { @@ -49,7 +49,7 @@ export default function getTimeRangesForContent( const hasContent = contents.some((content) => { return ( chunk.infos.period.id === content.period.id && - chunk.infos.adaptation.id === content.adaptation.id && + chunk.infos.track.id === content.track.id && chunk.infos.representation.id === content.representation.id ); }); diff --git a/src/core/stream/orchestrator/stream_orchestrator.ts b/src/core/stream/orchestrator/stream_orchestrator.ts index c3c30cfcb8..3c677087dc 100644 --- a/src/core/stream/orchestrator/stream_orchestrator.ts +++ b/src/core/stream/orchestrator/stream_orchestrator.ts @@ -299,7 +299,7 @@ export default function StreamOrchestrator( ): Promise { const segmentSinkStatus = segmentSinksStore.getStatus(bufferType); const ofCurrentType = updates.filter( - (update) => update.adaptation.type === bufferType, + (update) => update.track.trackType === bufferType, ); if ( // No update concerns the current type of data diff --git a/src/core/stream/period/period_stream.ts b/src/core/stream/period/period_stream.ts index 9667758f74..982ec700cc 100644 --- a/src/core/stream/period/period_stream.ts +++ b/src/core/stream/period/period_stream.ts @@ -17,12 +17,12 @@ import config from "../../../config"; import { formatError, MediaError } from "../../../errors"; import log from "../../../log"; -import type { IAdaptation, IPeriod } from "../../../manifest"; -import { toTaggedTrack } from "../../../manifest"; +import type { IPeriod, ITrack } from "../../../manifest"; +import { getMimeTypeString, toTaggedTrack } from "../../../manifest"; import type { IReadOnlyPlaybackObserver } from "../../../playback_observer"; import type { ITrackType } from "../../../public_types"; -import arrayFind from "../../../utils/array_find"; import objectAssign from "../../../utils/object_assign"; +import { objectValues } from "../../../utils/object_values"; import queueMicrotask from "../../../utils/queue_microtask"; import { getLeftSizeOfRange } from "../../../utils/ranges"; import type { IReadOnlySharedReference } from "../../../utils/reference"; @@ -32,7 +32,7 @@ import TaskCanceller, { CancellationError } from "../../../utils/task_canceller" import type { IBufferType, SegmentSink } from "../../segment_sinks"; import SegmentSinksStore from "../../segment_sinks"; import type { - IAdaptationChoice, + ITrackChoice, IAdaptationStreamCallbacks, IAdaptationStreamPlaybackObservation, } from "../adaptation"; @@ -48,13 +48,13 @@ import getAdaptationSwitchStrategy from "./utils/get_adaptation_switch_strategy" /** * Create a single PeriodStream: * - Lazily create (or reuse) a SegmentSink for the given type. - * - Create a Stream linked to an Adaptation each time it changes, to + * - Create a Stream linked to a track each time it changes, to * download and append the corresponding segments to the SegmentSink. * - Announce when the Stream is full or is awaiting new Segments through * events * * @param {Object} args - Various arguments allowing the `PeriodStream` to - * determine which Adaptation and which Representation to choose, as well as + * determine which track and which Representation to choose, as well as * which segments to load from it. * You can check the corresponding type for more information. * @param {Object} callbacks - The `PeriodStream` relies on a system of @@ -95,11 +95,11 @@ export default function PeriodStream( const { manifest, period } = content; /** - * Emits the chosen Adaptation and Representations for the current type. - * `null` when no Adaptation is chosen (e.g. no subtitles) + * Emits the chosen track and Representations for the current type. + * `null` when no track is chosen (e.g. no subtitles) * `undefined` at the beginning (it can be ignored.). */ - const adaptationRef = new SharedReference( + const trackRef = new SharedReference( undefined, parentCancelSignal, ); @@ -108,17 +108,17 @@ export default function PeriodStream( type: bufferType, manifest, period, - adaptationRef, + trackRef, }); if (parentCancelSignal.isCancelled()) { return; } let currentStreamCanceller: TaskCanceller | undefined; - let isFirstAdaptationSwitch = true; + let isFirstTrackSwitch = true; - adaptationRef.onUpdate( - (choice: IAdaptationChoice | null | undefined) => { + trackRef.onUpdate( + (choice: ITrackChoice | null | undefined) => { // As an IIFE to profit from async/await while respecting onUpdate's signature (async (): Promise => { if (choice === undefined) { @@ -132,7 +132,7 @@ export default function PeriodStream( if (choice === null) { // Current type is disabled for that Period - log.info(`Stream: Set no ${bufferType} Adaptation. P:`, period.start); + log.info(`Stream: Set no ${bufferType} track. P:`, period.start); const segmentSinkStatus = segmentSinksStore.getStatus(bufferType); if (segmentSinkStatus.type === "initialized") { @@ -157,9 +157,9 @@ export default function PeriodStream( } } - callbacks.adaptationChange({ + callbacks.trackChange({ type: bufferType, - adaptation: null, + track: null, period, }); if (streamCanceller.isUsed()) { @@ -176,28 +176,24 @@ export default function PeriodStream( ); } - const adaptations = period.adaptations[bufferType]; - const adaptation = arrayFind( - adaptations ?? [], - (a) => a.id === choice.adaptationId, - ); - if (adaptation === undefined) { + const track = period.tracksMetadata[bufferType][choice.trackId]; + if (track === undefined) { currentStreamCanceller.cancel(); - log.warn("Stream: Unfound chosen Adaptation choice", choice.adaptationId); + log.warn("Stream: Unfound chosen track choice", choice.trackId); return; } /** - * If this is not the first Adaptation choice, we might want to apply a + * If this is not the first track choice, we might want to apply a * delta to the current position so we can re-play back some media in the - * new Adaptation to give some context back. + * new track to give some context back. * This value contains this relative position, in seconds. * @see createMediaSourceReloadRequester */ const { DELTA_POSITION_AFTER_RELOAD } = config.getCurrent(); let relativePosHasBeenDefaulted: boolean = false; let relativePosAfterSwitch: number; - if (isFirstAdaptationSwitch) { + if (isFirstTrackSwitch) { relativePosAfterSwitch = 0; } else if (choice.relativeResumingPosition !== undefined) { relativePosAfterSwitch = choice.relativeResumingPosition; @@ -215,7 +211,7 @@ export default function PeriodStream( break; } } - isFirstAdaptationSwitch = false; + isFirstTrackSwitch = false; if ( SegmentSinksStore.isNative(bufferType) && @@ -228,15 +224,15 @@ export default function PeriodStream( ); } - // Reload if the Adaptation disappears from the manifest + // Reload if the track disappears from the manifest manifest.addEventListener( "manifestUpdate", (updates) => { // If current period has been unexpectedly removed, ask to reload for (const element of updates.updatedPeriods) { if (element.period.id === period.id) { - for (const adap of element.result.removedAdaptations) { - if (adap.id === adaptation.id) { + for (const trak of element.result.removedTracks) { + if (trak.id === track.id) { return askForMediaSourceReload( relativePosAfterSwitch, true, @@ -254,12 +250,13 @@ export default function PeriodStream( const { representations } = choice; log.info( - `Stream: Updating ${bufferType} adaptation`, - `A: ${adaptation.id}`, + `Stream: Updating ${bufferType} track`, + `A: ${track.id}`, `P: ${period.start}`, ); - callbacks.adaptationChange({ type: bufferType, adaptation, period }); + callbacks.trackChange({ type: track.trackType, track, period }); + if (streamCanceller.isUsed()) { return; // Previous call has provoken cancellation by side-effect } @@ -267,12 +264,12 @@ export default function PeriodStream( const segmentSink = createOrReuseSegmentSink( segmentSinksStore, bufferType, - adaptation, + track, ); const strategy = getAdaptationSwitchStrategy( segmentSink, period, - adaptation, + track, choice.switchingMode, playbackObserver, options, @@ -311,7 +308,7 @@ export default function PeriodStream( garbageCollectors.get(segmentSink)(streamCanceller.signal); createAdaptationStream( - adaptation, + track, representations, segmentSink, streamCanceller.signal, @@ -328,25 +325,25 @@ export default function PeriodStream( ); /** - * @param {Object} adaptation - * @param {Object} representations + * @param {Object} track + * @param {Object} representationsChoice * @param {Object} segmentSink * @param {Object} cancelSignal */ function createAdaptationStream( - adaptation: IAdaptation, - representations: IReadOnlySharedReference, + track: ITrack, + representationsChoice: IReadOnlySharedReference, segmentSink: SegmentSink, cancelSignal: CancellationSignal, ): void { const adaptationPlaybackObserver = createAdaptationStreamPlaybackObserver( playbackObserver, - adaptation.type, + track.trackType, ); AdaptationStream( { - content: { manifest, period, adaptation, representations }, + content: { manifest, period, track, representationsChoice }, options, playbackObserver: adaptationPlaybackObserver, representationEstimator, @@ -446,13 +443,13 @@ export default function PeriodStream( /** * @param {string} bufferType - * @param {Object} adaptation + * @param {Object} track * @returns {Object} */ function createOrReuseSegmentSink( segmentSinksStore: SegmentSinksStore, bufferType: IBufferType, - adaptation: IAdaptation, + track: ITrack, ): SegmentSink { const segmentSinkStatus = segmentSinksStore.getStatus(bufferType); if (segmentSinkStatus.type === "initialized") { @@ -460,30 +457,28 @@ function createOrReuseSegmentSink( // eslint-disable-next-line @typescript-eslint/no-unsafe-return return segmentSinkStatus.value; } - const codec = getFirstDeclaredMimeType(adaptation); + const codec = getFirstDeclaredMimeType(track); // eslint-disable-next-line @typescript-eslint/no-unsafe-return return segmentSinksStore.createSegmentSink(bufferType, codec); } /** * Get mime-type string of the first representation declared in the given - * adaptation. - * @param {Adaptation} adaptation + * track + * @param {Object} track * @returns {string} */ -function getFirstDeclaredMimeType(adaptation: IAdaptation): string { - const representations = adaptation.representations.filter((r) => { - return r.isSupported === true && r.decipherable !== false; - }); +function getFirstDeclaredMimeType(track: ITrack): string { + const representations = objectValues(track.representations); if (representations.length === 0) { const noRepErr = new MediaError( "NO_PLAYABLE_REPRESENTATION", - "No Representation in the chosen " + adaptation.type + " Adaptation can be played", - { tracks: [toTaggedTrack(adaptation)] }, + "No Representation in the chosen " + track.trackType + " track can be played", + { tracks: [toTaggedTrack(track)] }, ); throw noRepErr; } - return representations[0].getMimeTypeString(); + return getMimeTypeString(representations[0]); } /** diff --git a/src/core/stream/period/types.ts b/src/core/stream/period/types.ts index 3d6951e2fd..b380e5bc32 100644 --- a/src/core/stream/period/types.ts +++ b/src/core/stream/period/types.ts @@ -1,4 +1,4 @@ -import type { IManifest, IAdaptation, IPeriod } from "../../../manifest"; +import type { IManifest, IPeriod, ITrack } from "../../../manifest"; import type { ObservationPosition, IReadOnlyPlaybackObserver, @@ -14,7 +14,7 @@ import type { SegmentFetcherCreator } from "../../fetchers"; import type { IBufferType, SegmentSink } from "../../segment_sinks"; import type SegmentSinksStore from "../../segment_sinks"; import type { - IAdaptationChoice, + ITrackChoice, IAdaptationStreamCallbacks, IAdaptationStreamOptions, } from "../adaptation"; @@ -25,28 +25,21 @@ export type { IPausedPlaybackObservation }; /** Callbacks called by the `AdaptationStream` on various events. */ export interface IPeriodStreamCallbacks extends IAdaptationStreamCallbacks { /** - * Called when a new `PeriodStream` is ready to start but needs an Adaptation - * (i.e. track) to be chosen first. + * Called when a new `PeriodStream` is ready to start but needs an track + * to be chosen first. */ periodStreamReady(payload: IPeriodStreamReadyPayload): void; /** - * Called when a new `AdaptationStream` is created to load segments from an - * `Adaptation`. + * Called when a new `AdaptationStream` is created to load segments from a + * track. */ - adaptationChange(payload: IAdaptationChangePayload): void; + trackChange(payload: ITrackChangeEventPayload): void; } -/** Payload for the `adaptationChange` callback. */ -export interface IAdaptationChangePayload { - /** The type of buffer for which the Representation is changing. */ - type: IBufferType; - /** The `Period` linked to the `RepresentationStream` we're creating. */ +export interface ITrackChangeEventPayload { + type: ITrackType; + track: ITrack | null; period: IPeriod; - /** - * The `Adaptation` linked to the `AdaptationStream` we're creating. - * `null` when we're choosing no Adaptation at all. - */ - adaptation: IAdaptation | null; } /** Payload for the `periodStreamReady` callback. */ @@ -58,17 +51,17 @@ export interface IPeriodStreamReadyPayload { /** The `Period` linked to the `PeriodStream` we have created. */ period: IPeriod; /** - * The reference through which any Adaptation (i.e. track) choice should be - * emitted for that `PeriodStream`. + * The reference through which any track choice should be emitted for that + * `PeriodStream`. * * The `PeriodStream` will not do anything until this Reference has emitted * at least one to give its initial choice. * You can send `null` through it to tell this `PeriodStream` that you don't - * want any `Adaptation` for now. + * want any track for now. * It is set to `undefined` by default, you SHOULD NOT set it to `undefined` * yourself. */ - adaptationRef: SharedReference; + trackRef: SharedReference; } /** Playback observation required by the `PeriodStream`. */ diff --git a/src/core/stream/period/utils/get_adaptation_switch_strategy.ts b/src/core/stream/period/utils/get_adaptation_switch_strategy.ts index 83b34df950..a3bc85fcf5 100644 --- a/src/core/stream/period/utils/get_adaptation_switch_strategy.ts +++ b/src/core/stream/period/utils/get_adaptation_switch_strategy.ts @@ -15,9 +15,11 @@ */ import config from "../../../../config"; -import type { IAdaptation, IPeriod } from "../../../../manifest"; +import { getMimeTypeString } from "../../../../manifest"; +import type { IPeriod, ITrackMetadata } from "../../../../manifest"; import type { IReadOnlyPlaybackObserver } from "../../../../playback_observer"; import areCodecsCompatible from "../../../../utils/are_codecs_compatible"; +import { objectValues } from "../../../../utils/object_values"; import type { IRange } from "../../../../utils/ranges"; import { excludeFromRanges, insertInto } from "../../../../utils/ranges"; import type { SegmentSink } from "../../../segment_sinks"; @@ -55,14 +57,14 @@ export interface IAdaptationSwitchOptions { * situation. * @param {Object} segmentSink * @param {Object} period - * @param {Object} adaptation + * @param {Object} track * @param {Object} playbackObserver * @returns {Object} */ export default function getAdaptationSwitchStrategy( segmentSink: SegmentSink, period: IPeriod, - adaptation: IAdaptation, + track: ITrackMetadata, switchingMode: ITrackSwitchingMode, playbackObserver: IReadOnlyPlaybackObserver, options: IAdaptationSwitchOptions, @@ -70,7 +72,7 @@ export default function getAdaptationSwitchStrategy( if ( segmentSink.codec !== undefined && options.onCodecSwitch === "reload" && - !hasCompatibleCodec(adaptation, segmentSink.codec) + !hasCompatibleCodec(track, segmentSink.codec) ) { return { type: "needs-reload", value: undefined }; } @@ -79,7 +81,7 @@ export default function getAdaptationSwitchStrategy( const unwantedRange: IRange[] = []; for (const elt of inventory) { - if (elt.infos.period.id === period.id && elt.infos.adaptation.id !== adaptation.id) { + if (elt.infos.period.id === period.id && elt.infos.track.id !== track.id) { insertInto(unwantedRange, { start: elt.bufferedStart ?? elt.start, end: elt.bufferedEnd ?? elt.end, @@ -91,7 +93,7 @@ export default function getAdaptationSwitchStrategy( for (const operation of pendingOperations) { if (operation.type === SegmentSinkOperation.Push) { const info = operation.value.inventoryInfos; - if (info.period.id === period.id && info.adaptation.id !== adaptation.id) { + if (info.period.id === period.id && info.track.id !== track.id) { const start = info.segment.time; const end = start + info.segment.duration; insertInto(unwantedRange, { start, end }); @@ -99,7 +101,7 @@ export default function getAdaptationSwitchStrategy( } } - // Continue if we have no other Adaptation buffered in the current Period + // Continue if we have no other track buffered in the current Period if (unwantedRange.length === 0) { return { type: "continue", value: undefined }; } @@ -111,7 +113,7 @@ export default function getAdaptationSwitchStrategy( } } - // From here, clean-up data from the previous Adaptation, if one + // From here, clean-up data from the previous track, if one const shouldCleanAll = switchingMode === "direct"; @@ -135,7 +137,7 @@ export default function getAdaptationSwitchStrategy( if (!shouldCleanAll) { // Exclude data around current position to avoid decoding issues - const bufferType = adaptation.type; + const bufferType = track.trackType; const { ADAP_REP_SWITCH_BUFFER_PADDINGS } = config.getCurrent(); /** Ranges that won't be cleaned from the current buffer. */ const paddingBefore = ADAP_REP_SWITCH_BUFFER_PADDINGS[bufferType].before ?? 0; @@ -174,23 +176,20 @@ export default function getAdaptationSwitchStrategy( return { type: "continue", value: undefined }; } - return shouldCleanAll && adaptation.type !== "text" + return shouldCleanAll && track.trackType !== "text" ? { type: "flush-buffer", value: toRemove } : { type: "clean-buffer", value: toRemove }; } /** * Returns `true` if at least one codec of the Representations in the given - * Adaptation has a codec compatible with the given SegmentSink's codec. - * @param {Object} adaptation + * track has a codec compatible with the given SegmentSink's codec. + * @param {Object} track * @param {string} segmentSinkCodec * @returns {boolean} */ -function hasCompatibleCodec(adaptation: IAdaptation, segmentSinkCodec: string): boolean { - return adaptation.representations.some( - (rep) => - rep.isSupported === true && - rep.decipherable !== false && - areCodecsCompatible(rep.getMimeTypeString(), segmentSinkCodec), +function hasCompatibleCodec(track: ITrackMetadata, segmentSinkCodec: string): boolean { + return objectValues(track.representations).some((rep) => + areCodecsCompatible(getMimeTypeString(rep), segmentSinkCodec), ); } diff --git a/src/core/stream/representation/representation_stream.ts b/src/core/stream/representation/representation_stream.ts index 6bf6b57d90..00cb91bcaa 100644 --- a/src/core/stream/representation/representation_stream.ts +++ b/src/core/stream/representation/representation_stream.ts @@ -93,9 +93,9 @@ export default function RepresentationStream( callbacks: IRepresentationStreamCallbacks, parentCancelSignal: CancellationSignal, ): void { - const { period, adaptation, representation } = content; + const { period, track, representation } = content; const { bufferGoal, maxBufferSize, drmSystemId, fastSwitchThreshold } = options; - const bufferType = adaptation.type; + const bufferType = track.trackType; /** `TaskCanceller` stopping ALL operations performed by the `RepresentationStream` */ const globalCanceller = new TaskCanceller(); diff --git a/src/core/stream/representation/types.ts b/src/core/stream/representation/types.ts index da5d4389b8..8928ddd128 100644 --- a/src/core/stream/representation/types.ts +++ b/src/core/stream/representation/types.ts @@ -1,10 +1,10 @@ import type { IContentProtection } from "../../../main_thread/types"; import type { IManifest, - IAdaptation, ISegment, IPeriod, IRepresentation, + ITrack, } from "../../../manifest"; import type { IEMSG } from "../../../parsers/containers/isobmff"; import type { @@ -133,7 +133,7 @@ export interface IStreamEventAddedSegmentPayload { /** Context about the content that has been added. */ content: { period: IPeriod; - adaptation: IAdaptation; + track: ITrack; representation: IRepresentation; }; /** The concerned Segment. */ @@ -249,7 +249,7 @@ export interface ITerminationOrder { export interface IRepresentationStreamArguments { /** The context of the Representation you want to load. */ content: { - adaptation: IAdaptation; + track: ITrack; manifest: IManifest; period: IPeriod; representation: IRepresentation; diff --git a/src/core/stream/representation/utils/append_segment_to_buffer.ts b/src/core/stream/representation/utils/append_segment_to_buffer.ts index 6410cedbb5..d4073f3413 100644 --- a/src/core/stream/representation/utils/append_segment_to_buffer.ts +++ b/src/core/stream/representation/utils/append_segment_to_buffer.ts @@ -63,7 +63,7 @@ export default async function appendSegmentToBuffer( ? appendError.toString() : "An unknown error happened when pushing content"; throw new MediaError("BUFFER_APPEND_ERROR", reason, { - tracks: [toTaggedTrack(dataInfos.inventoryInfos.adaptation)], + tracks: [toTaggedTrack(dataInfos.inventoryInfos.track)], }); } const { position } = playbackObserver.getReference().getValue(); @@ -87,7 +87,7 @@ export default async function appendSegmentToBuffer( err2 instanceof Error ? err2.toString() : "Could not clean the buffer"; throw new MediaError("BUFFER_FULL_ERROR", reason, { - tracks: [toTaggedTrack(dataInfos.inventoryInfos.adaptation)], + tracks: [toTaggedTrack(dataInfos.inventoryInfos.track)], }); } } diff --git a/src/core/stream/representation/utils/check_for_discontinuity.ts b/src/core/stream/representation/utils/check_for_discontinuity.ts index 04e48355c7..a09bc7100a 100644 --- a/src/core/stream/representation/utils/check_for_discontinuity.ts +++ b/src/core/stream/representation/utils/check_for_discontinuity.ts @@ -17,9 +17,9 @@ import log from "../../../../log"; import type { IManifest, - IAdaptation, IPeriod, IRepresentation, + ITrackMetadata, } from "../../../../manifest"; import type { IBufferedChunk } from "../../../segment_sinks"; import type { IBufferDiscontinuity } from "../types"; @@ -52,7 +52,7 @@ import type { IBufferDiscontinuity } from "../types"; */ export default function checkForDiscontinuity( content: { - adaptation: IAdaptation; + track: ITrackMetadata; manifest: IManifest; period: IPeriod; representation: IRepresentation; @@ -62,7 +62,7 @@ export default function checkForDiscontinuity( hasFinishedLoading: boolean, bufferedSegments: IBufferedChunk[], ): IBufferDiscontinuity | null { - const { period, adaptation, representation } = content; + const { period, track, representation } = content; // `bufferedSegments` might also contains segments which are before // `checkedRange`. @@ -118,7 +118,7 @@ export default function checkForDiscontinuity( } log.debug( "RS: current discontinuity encountered", - adaptation.type, + track.trackType, nextBufferedSegment.bufferedStart, ); return { start: undefined, end: discontinuityEnd }; @@ -152,7 +152,7 @@ export default function checkForDiscontinuity( } const start = segmentInfoBeforeHole.bufferedEnd as number; const end = segmentInfoAfterHole.bufferedStart as number; - log.debug("RS: future discontinuity encountered", adaptation.type, start, end); + log.debug("RS: future discontinuity encountered", track.trackType, start, end); return { start, end }; } } @@ -182,7 +182,7 @@ export default function checkForDiscontinuity( ) { log.debug( "RS: discontinuity encountered at the end of the current period", - adaptation.type, + track.trackType, lastSegment.bufferedEnd, period.end, ); diff --git a/src/core/stream/representation/utils/downloading_queue.ts b/src/core/stream/representation/utils/downloading_queue.ts index 94b9c04870..a6e255d79c 100644 --- a/src/core/stream/representation/utils/downloading_queue.ts +++ b/src/core/stream/representation/utils/downloading_queue.ts @@ -17,10 +17,10 @@ import log from "../../../../log"; import type { IManifest, - IAdaptation, ISegment, IPeriod, IRepresentation, + ITrack, } from "../../../../manifest"; import type { IPlayerError } from "../../../../public_types"; import type { @@ -177,7 +177,7 @@ export default class DownloadingQueue extends EventEmitter extends EventEmitter extends EventEmitter extends EventEmitter extends EventEmitter= lastIndexPosition && representationIndex.isInitialized() && !representationIndex.isStillAwaitingFutureSegments() && diff --git a/src/core/stream/representation/utils/get_needed_segments.ts b/src/core/stream/representation/utils/get_needed_segments.ts index bbebfc737c..94aa9532c2 100644 --- a/src/core/stream/representation/utils/get_needed_segments.ts +++ b/src/core/stream/representation/utils/get_needed_segments.ts @@ -18,10 +18,10 @@ import config from "../../../../config"; import log from "../../../../log"; import type { IManifest, - IAdaptation, ISegment, IPeriod, IRepresentation, + ITrackMetadata, } from "../../../../manifest"; import { areSameContent } from "../../../../manifest"; import objectAssign from "../../../../utils/object_assign"; @@ -34,7 +34,7 @@ import type { import { ChunkStatus } from "../../../segment_sinks/inventory/segment_inventory"; interface IContentContext { - adaptation: IAdaptation; + track: ITrackMetadata; manifest: IManifest; period: IPeriod; representation: IRepresentation; @@ -116,7 +116,7 @@ export default function getNeededSegments({ segmentsBeingPushed, maxBufferSize, }: IGetNeededSegmentsArguments): INeededSegments { - const { adaptation, representation } = content; + const { track, representation } = content; let availableBufferSize = getAvailableBufferSize( bufferedSegments, segmentsBeingPushed, @@ -187,7 +187,7 @@ export default function getNeededSegments({ const waitForPushedSegment = segmentsBeingPushed.some((pendingSegment) => { if ( pendingSegment.period.id !== content.period.id || - pendingSegment.adaptation.id !== content.adaptation.id + pendingSegment.track.id !== content.track.id ) { return false; } @@ -220,7 +220,7 @@ export default function getNeededSegments({ const areFromSamePeriod = completeSeg.infos.period.id === content.period.id; // Check if content are from same period, as there can't be overlapping // periods, we should consider a segment as already downloaded if - // it is from same period (but can be from different adaptation or + // it is from same period (but can be from different track or // representation) if (completeSeg.status === ChunkStatus.FullyLoaded && areFromSamePeriod) { const completeSegInfos = completeSeg.infos.segment; @@ -263,7 +263,7 @@ export default function getNeededSegments({ "is emitted, you might want to update the RxPlayer's settings (" + "`maxBufferAhead`, `maxVideoBufferSize` etc.) so less memory is used " + "by regular media data buffering." + - adaptation.type, + track.trackType, representation.id, segment.time, ); @@ -362,7 +362,7 @@ function getLastContiguousSegment( function shouldContentBeReplaced( oldContent: ICompleteSegmentInfo, currentContent: { - adaptation: IAdaptation; + track: ITrackMetadata; period: IPeriod; representation: IRepresentation; }, @@ -379,8 +379,8 @@ function shouldContentBeReplaced( return false; } - if (oldContent.adaptation.id !== currentContent.adaptation.id) { - return true; // replace segments from another Adaptation + if (oldContent.track.id !== currentContent.track.id) { + return true; // replace segments from another track } return canFastSwitch( @@ -408,7 +408,7 @@ function canFastSwitch( const oldContentBitrate = oldSegmentRepresentation.bitrate; const { BITRATE_REBUFFERING_RATIO } = config.getCurrent(); if (fastSwitchThreshold === undefined) { - // only re-load comparatively-poor bitrates for the same Adaptation. + // only re-load comparatively-poor bitrates for the same track. const bitrateCeil = oldContentBitrate * BITRATE_REBUFFERING_RATIO; return newSegmentRepresentation.bitrate > bitrateCeil; } diff --git a/src/core/stream/representation/utils/push_init_segment.ts b/src/core/stream/representation/utils/push_init_segment.ts index e051310e56..1928b4b971 100644 --- a/src/core/stream/representation/utils/push_init_segment.ts +++ b/src/core/stream/representation/utils/push_init_segment.ts @@ -16,10 +16,10 @@ import type { IManifest, - IAdaptation, ISegment, IPeriod, IRepresentation, + ITrack, } from "../../../../manifest"; import type { IReadOnlyPlaybackObserver } from "../../../../playback_observer"; import objectAssign from "../../../../utils/object_assign"; @@ -49,7 +49,7 @@ export default async function pushInitSegment( }: { playbackObserver: IReadOnlyPlaybackObserver; content: { - adaptation: IAdaptation; + track: ITrack; manifest: IManifest; period: IPeriod; representation: IRepresentation; diff --git a/src/core/stream/representation/utils/push_media_segment.ts b/src/core/stream/representation/utils/push_media_segment.ts index 65eff2a237..052dd3bed3 100644 --- a/src/core/stream/representation/utils/push_media_segment.ts +++ b/src/core/stream/representation/utils/push_media_segment.ts @@ -17,10 +17,10 @@ import config from "../../../../config"; import type { IManifest, - IAdaptation, ISegment, IPeriod, IRepresentation, + ITrack, } from "../../../../manifest"; import type { IReadOnlyPlaybackObserver } from "../../../../playback_observer"; import type { ISegmentParserParsedMediaChunk } from "../../../../transports"; @@ -52,7 +52,7 @@ export default async function pushMediaSegment( }: { playbackObserver: IReadOnlyPlaybackObserver; content: { - adaptation: IAdaptation; + track: ITrack; manifest: IManifest; period: IPeriod; representation: IRepresentation; diff --git a/src/core/types.ts b/src/core/types.ts index 7f9e1ba9e1..3db271eb73 100644 --- a/src/core/types.ts +++ b/src/core/types.ts @@ -14,7 +14,7 @@ import type { } from "./segment_sinks"; import type SegmentSinksStore from "./segment_sinks"; import type { - IAdaptationChoice, + ITrackChoice, IPausedPlaybackObservation, IInbandEvent, IRepresentationsChoice, @@ -44,7 +44,7 @@ export type { ITextDisplayerInterface, // Stream Metadata - IAdaptationChoice, + ITrackChoice, IInbandEvent, IPausedPlaybackObservation, IStreamOrchestratorPlaybackObservation, diff --git a/src/errors/media_error.ts b/src/errors/media_error.ts index 6e71aacc64..e893c66dd4 100644 --- a/src/errors/media_error.ts +++ b/src/errors/media_error.ts @@ -35,7 +35,7 @@ export default class MediaError extends Error { public readonly name: "MediaError"; public readonly type: "MEDIA_ERROR"; public readonly code: IMediaErrorCode; - public readonly tracksInfo: ITaggedTrack[] | undefined; + public readonly tracksMetadata: ITaggedTrack[] | undefined; public fatal: boolean; private _originalMessage: string; @@ -80,7 +80,7 @@ export default class MediaError extends Error { this.code = code; this.fatal = false; if (context?.tracks !== undefined && context?.tracks.length > 0) { - this.tracksInfo = context.tracks; + this.tracksMetadata = context.tracks; } } @@ -95,7 +95,7 @@ export default class MediaError extends Error { name: this.name, code: this.code, reason: this._originalMessage, - tracks: this.tracksInfo, + tracks: this.tracksMetadata, }; } } diff --git a/src/experimental/tools/VideoThumbnailLoader/types.ts b/src/experimental/tools/VideoThumbnailLoader/types.ts index e95c3a95c0..42467a4e21 100644 --- a/src/experimental/tools/VideoThumbnailLoader/types.ts +++ b/src/experimental/tools/VideoThumbnailLoader/types.ts @@ -14,13 +14,13 @@ * limitations under the License. */ -import type { IManifest, IAdaptation, IPeriod, IRepresentation } from "../../../manifest"; +import type { IManifest, IPeriod, IRepresentation, ITrack } from "../../../manifest"; import type { ISegmentParser, ITransportPipelines } from "../../../transports"; export interface IContentInfo { manifest: IManifest; period: IPeriod; - adaptation: IAdaptation; + track: ITrack; representation: IRepresentation; } diff --git a/src/experimental/tools/VideoThumbnailLoader/video_thumbnail_loader.ts b/src/experimental/tools/VideoThumbnailLoader/video_thumbnail_loader.ts index e683dac72e..1e9028c270 100644 --- a/src/experimental/tools/VideoThumbnailLoader/video_thumbnail_loader.ts +++ b/src/experimental/tools/VideoThumbnailLoader/video_thumbnail_loader.ts @@ -19,12 +19,13 @@ import type { ISegmentFetcher } from "../../../core/fetchers/segment/segment_fet import createSegmentFetcher from "../../../core/fetchers/segment/segment_fetcher"; import log from "../../../log"; import type { IRxPlayer } from "../../../main_thread/types"; -import type { ISegment } from "../../../manifest"; +import { getTrackListForType, type ISegment } from "../../../manifest"; import Manifest from "../../../manifest/classes"; import type { MainSourceBufferInterface } from "../../../mse/main_media_source_interface"; import arrayFind from "../../../utils/array_find"; import isNullOrUndefined from "../../../utils/is_null_or_undefined"; import objectAssign from "../../../utils/object_assign"; +import { objectValues } from "../../../utils/object_values"; import TaskCanceller, { CancellationError } from "../../../utils/task_canceller"; import loadAndPushSegment from "./load_and_push_segment"; import prepareSourceBuffer from "./prepare_source_buffer"; @@ -337,7 +338,7 @@ function areSameRepresentation( ): boolean { return ( contentInfo1.representation.id === contentInfo2.representation.id && - contentInfo1.adaptation.id === contentInfo2.adaptation.id && + contentInfo1.track.id === contentInfo2.track.id && contentInfo1.period.id === contentInfo2.period.id && contentInfo1.manifest.id === contentInfo2.manifest.id ); @@ -352,17 +353,19 @@ function areSameRepresentation( */ function getTrickModeInfo(time: number, manifest: Manifest): IContentInfo | null { const period = manifest.getPeriodForTime(time); - if ( - period === undefined || - period.adaptations.video === undefined || - period.adaptations.video.length === 0 - ) { + if (period === undefined) { return null; } - for (const videoAdaptation of period.adaptations.video) { - const representation = videoAdaptation.trickModeTracks?.[0].representations?.[0]; + const tracks = getTrackListForType(period, "video"); + if (tracks.length === 0) { + return null; + } + for (const track of tracks) { + const representation = objectValues( + track.trickModeTracks?.[0].representations ?? {}, + )[0]; if (!isNullOrUndefined(representation)) { - return { manifest, period, adaptation: videoAdaptation, representation }; + return { manifest, period, track, representation }; } } return null; diff --git a/src/main_thread/api/debug/modules/general_info.ts b/src/main_thread/api/debug/modules/general_info.ts index dcb6a498e6..79f45e1fff 100644 --- a/src/main_thread/api/debug/modules/general_info.ts +++ b/src/main_thread/api/debug/modules/general_info.ts @@ -1,4 +1,5 @@ import hasMseInWorker from "../../../../compat/has_mse_in_worker"; +import { objectValues } from "../../../../utils/object_values"; import type { CancellationSignal } from "../../../../utils/task_canceller"; import type RxPlayer from "../../public_api"; import { DEFAULT_REFRESH_INTERVAL } from "../constants"; @@ -172,9 +173,9 @@ export default function constructDebugGeneralInfo( ]); adaptationsElt.appendChild(textAdaps); } - const adaptations = instance.__priv_getCurrentAdaptation(); + const tracks = instance.__priv_getCurrentTracks(); const videoBitratesStr = - adaptations?.video?.representations.map((r) => { + objectValues(tracks?.video?.representations ?? {}).map((r) => { return ( String(r.bitrate ?? "N/A") + (r.isSupported !== false ? "" : " U!") + @@ -182,7 +183,7 @@ export default function constructDebugGeneralInfo( ); }) ?? []; const audioBitratesStr = - adaptations?.audio?.representations.map((r) => { + objectValues(tracks?.audio?.representations ?? {}).map((r) => { return ( String(r.bitrate ?? "N/A") + (r.isSupported !== false ? "" : " U!") + diff --git a/src/main_thread/api/debug/modules/segment_buffer_content.ts b/src/main_thread/api/debug/modules/segment_buffer_content.ts index b9984f0442..c191104aa4 100644 --- a/src/main_thread/api/debug/modules/segment_buffer_content.ts +++ b/src/main_thread/api/debug/modules/segment_buffer_content.ts @@ -1,7 +1,7 @@ import type { ISegmentSinkMetrics } from "../../../../core/segment_sinks/segment_buffers_store"; import type { IBufferType } from "../../../../core/types"; import type { - IAdaptationMetadata, + ITrackMetadata, IPeriodMetadata, IRepresentationMetadata, } from "../../../../manifest"; @@ -120,9 +120,9 @@ export default function createSegmentSinkGraph( loadingRangeRepInfoElt.innerHTML = ""; const rep = instance.__priv_getCurrentRepresentations()?.[bufferType]; - const adap = instance.__priv_getCurrentAdaptation()?.[bufferType]; + const track = instance.__priv_getCurrentTracks()?.[bufferType]; const manifest = instance.__priv_getManifest(); - if (manifest !== null && !isNullOrUndefined(rep) && !isNullOrUndefined(adap)) { + if (manifest !== null && !isNullOrUndefined(rep) && !isNullOrUndefined(track)) { const period = getPeriodForTime(manifest, currentTime); if (period !== undefined) { loadingRangeRepInfoElt.appendChild(createMetricTitle("load")); @@ -130,7 +130,7 @@ export default function createSegmentSinkGraph( createElement("span", { textContent: constructRepresentationInfo({ period, - adaptation: adap, + track, representation: rep, }), }), @@ -143,19 +143,11 @@ export default function createSegmentSinkGraph( function constructRepresentationInfo(content: { period: IPeriodMetadata; - adaptation: IAdaptationMetadata; + track: ITrackMetadata; representation: IRepresentationMetadata; }): string { - const period = content.period; - const { - language, - isAudioDescription, - isClosedCaption, - isTrickModeTrack, - isSignInterpreted, - type: bufferType, - } = content.adaptation; - const { id, height, width, bitrate, codecs } = content.representation; + const { period, track, representation } = content; + const { id, height, width, bitrate, codecs } = representation; let representationInfo = `"${id}" `; if (height !== undefined && width !== undefined) { representationInfo += `${width}x${height} `; @@ -166,20 +158,20 @@ function constructRepresentationInfo(content: { if (codecs !== undefined && codecs.length > 0) { representationInfo += `c:"${codecs.join(" / ")}" `; } - if (language !== undefined) { - representationInfo += `l:"${language}" `; + if (track.language !== undefined) { + representationInfo += `l:"${track.language}" `; } - if (bufferType === "video" && typeof isSignInterpreted === "boolean") { - representationInfo += `si:${isSignInterpreted ? 1 : 0} `; + if (track.trackType === "video" && typeof track.isSignInterpreted === "boolean") { + representationInfo += `si:${track.isSignInterpreted ? 1 : 0} `; } - if (bufferType === "video" && typeof isTrickModeTrack === "boolean") { - representationInfo += `tm:${isTrickModeTrack ? 1 : 0} `; + if (track.trackType === "video" && typeof track.isTrickModeTrack === "boolean") { + representationInfo += `tm:${track.isTrickModeTrack ? 1 : 0} `; } - if (bufferType === "audio" && typeof isAudioDescription === "boolean") { - representationInfo += `ad:${isAudioDescription ? 1 : 0} `; + if (track.trackType === "audio" && typeof track.isAudioDescription === "boolean") { + representationInfo += `ad:${track.isAudioDescription ? 1 : 0} `; } - if (bufferType === "text" && typeof isClosedCaption === "boolean") { - representationInfo += `cc:${isClosedCaption ? 1 : 0} `; + if (track.trackType === "text" && typeof track.isClosedCaption === "boolean") { + representationInfo += `cc:${track.isClosedCaption ? 1 : 0} `; } representationInfo += `p:${period.start}-${period.end ?? "?"}`; return representationInfo; diff --git a/src/main_thread/api/public_api.ts b/src/main_thread/api/public_api.ts index 77635def10..7245aea9b2 100644 --- a/src/main_thread/api/public_api.ts +++ b/src/main_thread/api/public_api.ts @@ -34,7 +34,7 @@ import hasWorkerApi from "../../compat/has_worker_api"; import isDebugModeEnabled from "../../compat/is_debug_mode_enabled"; import type { ISegmentSinkMetrics } from "../../core/segment_sinks/segment_buffers_store"; import type { - IAdaptationChoice, + ITrackChoice, IInbandEvent, IABRThrottlers, IBufferType, @@ -47,12 +47,12 @@ import features, { addFeatures } from "../../features"; import log from "../../log"; import type { IDecipherabilityStatusChangedElement, - IAdaptationMetadata, IManifestMetadata, IPeriodMetadata, IRepresentationMetadata, IPeriodsUpdateResult, IManifest, + ITrackMetadata, } from "../../manifest"; import { getLivePosition, @@ -1007,7 +1007,7 @@ class Player extends EventEmitter { isDirectFile, manifest: null, currentPeriod: null, - activeAdaptations: null, + activeTracks: null, activeRepresentations: null, tracksStore: null, mediaElementTracksStore, @@ -1054,8 +1054,8 @@ class Player extends EventEmitter { initializer.addEventListener("representationChange", (representationInfo) => this._priv_onRepresentationChange(contentInfos, representationInfo), ); - initializer.addEventListener("adaptationChange", (adaptationInfo) => - this._priv_onAdaptationChange(contentInfos, adaptationInfo), + initializer.addEventListener("trackChange", (trackInfo) => + this._priv_onTrackChange(contentInfos, trackInfo), ); initializer.addEventListener("bitrateEstimateChange", (bitrateEstimateInfo) => this._priv_onBitrateEstimateChange(bitrateEstimateInfo), @@ -2387,21 +2387,22 @@ class Player extends EventEmitter { } // TODO remove the need for that public method - __priv_getCurrentAdaptation(): Partial< - Record + __priv_getCurrentTracks(): Record< + ITrackType, + ITrackMetadata | null | undefined > | null { if (this._priv_contentInfos === null) { return null; } - const { currentPeriod, activeAdaptations } = this._priv_contentInfos; + const { currentPeriod, activeTracks } = this._priv_contentInfos; if ( currentPeriod === null || - activeAdaptations === null || - isNullOrUndefined(activeAdaptations[currentPeriod.id]) + activeTracks === null || + isNullOrUndefined(activeTracks[currentPeriod.id]) ) { return null; } - return activeAdaptations[currentPeriod.id]; + return activeTracks[currentPeriod.id]; } // TODO remove the need for that public method @@ -2545,8 +2546,8 @@ class Player extends EventEmitter { for (const update of updates.updatedPeriods) { if (update.period.id === currentPeriod.id) { if ( - update.result.addedAdaptations.length > 0 || - update.result.removedAdaptations.length > 0 + update.result.addedTracks.length > 0 || + update.result.removedTracks.length > 0 ) { // We might have new (or less) tracks, send events just to be sure const periodRef = currTracksStore.getPeriodObjectFromPeriod(currentPeriod); @@ -2595,7 +2596,7 @@ class Player extends EventEmitter { const isFound = arrayFind( acc, - (x) => x[0].id === elt.period.id && x[1] === elt.adaptation.type, + (x) => x[0].id === elt.period.id && x[1] === elt.track.trackType, ) !== undefined; if (!isFound) { @@ -2610,19 +2611,19 @@ class Player extends EventEmitter { if (periodRef === undefined) { return acc; } - switch (elt.adaptation.type) { + switch (elt.track.trackType) { case "audio": - isCurrent = tStore.getChosenAudioTrack(periodRef)?.id === elt.adaptation.id; + isCurrent = tStore.getChosenAudioTrack(periodRef)?.id === elt.track.id; break; case "video": - isCurrent = tStore.getChosenVideoTrack(periodRef)?.id === elt.adaptation.id; + isCurrent = tStore.getChosenVideoTrack(periodRef)?.id === elt.track.id; break; case "text": - isCurrent = tStore.getChosenTextTrack(periodRef)?.id === elt.adaptation.id; + isCurrent = tStore.getChosenTextTrack(periodRef)?.id === elt.track.id; break; } if (isCurrent) { - acc.push([elt.period, elt.adaptation.type]); + acc.push([elt.period, elt.track.trackType]); } } return acc; @@ -2719,7 +2720,7 @@ class Player extends EventEmitter { /** * Triggered each times a new "PeriodStream" is ready. - * Choose the right Adaptation for the Period and emit it. + * Choose the right track for the Period and emit it. * @param {Object} contentInfos * @param {Object} value */ @@ -2728,13 +2729,13 @@ class Player extends EventEmitter { value: { type: IBufferType; period: IPeriodMetadata; - adaptationRef: SharedReference; + trackRef: SharedReference; }, ): void { if (contentInfos.contentId !== this._priv_contentInfos?.contentId) { return; // Event for another content } - const { type, period, adaptationRef } = value; + const { type, period, trackRef } = value; const tracksStore = contentInfos.tracksStore; switch (type) { @@ -2743,9 +2744,9 @@ class Player extends EventEmitter { case "text": if (isNullOrUndefined(tracksStore)) { log.error(`API: TracksStore not instanciated for a new ${type} period`); - adaptationRef.setValue(null); + trackRef.setValue(null); } else { - tracksStore.addTrackReference(type, period, adaptationRef); + tracksStore.addTrackReference(type, period, trackRef); } break; default: @@ -2779,16 +2780,15 @@ class Player extends EventEmitter { break; } - // Clean-up stored Representation and Adaptation information - const { activeAdaptations, activeRepresentations } = contentInfos; - if ( - !isNullOrUndefined(activeAdaptations) && - !isNullOrUndefined(activeAdaptations[period.id]) - ) { - const activePeriodAdaptations = activeAdaptations[period.id]; - delete activePeriodAdaptations[type]; - if (Object.keys(activePeriodAdaptations).length === 0) { - delete activeAdaptations[period.id]; + // Clean-up stored Representation and track information + const { activeTracks, activeRepresentations } = contentInfos; + if (!isNullOrUndefined(activeTracks) && !isNullOrUndefined(activeTracks[period.id])) { + const activePeriodTracks = activeTracks[period.id]; + if (activePeriodTracks !== null) { + delete activePeriodTracks[type]; + if (Object.keys(activePeriodTracks).length === 0) { + delete activeTracks[period.id]; + } } } @@ -2805,21 +2805,17 @@ class Player extends EventEmitter { } /** - * Triggered each times a new Adaptation is considered for the current + * Triggered each times a new track is considered for the current * content. - * Store given Adaptation and emit it if from the current Period. + * Store given track and emit it if from the current Period. * @param {Object} contentInfos - * @param {Object} value + * @param {Object} trackChangeObj */ - private _priv_onAdaptationChange( + private _priv_onTrackChange( contentInfos: IPublicApiContentInfos, - { - type, - adaptation, - period, - }: { - type: IBufferType; - adaptation: IAdaptationMetadata | null; + trackChangeObj: { + type: ITrackType; + track: ITrackMetadata | null; period: IPeriodMetadata; }, ): void { @@ -2827,19 +2823,44 @@ class Player extends EventEmitter { return; // Event for another content } - // lazily create contentInfos.activeAdaptations - if (contentInfos.activeAdaptations === null) { - contentInfos.activeAdaptations = {}; + // lazily create contentInfos.activeTracks + if (contentInfos.activeTracks === null) { + contentInfos.activeTracks = {}; } - const { activeAdaptations, currentPeriod } = contentInfos; - const activePeriodAdaptations = activeAdaptations[period.id]; - if (isNullOrUndefined(activePeriodAdaptations)) { - activeAdaptations[period.id] = { [type]: adaptation }; + const { activeTracks, currentPeriod } = contentInfos; + const activePeriodTracks = activeTracks[trackChangeObj.period.id]; + if (isNullOrUndefined(activePeriodTracks)) { + if (trackChangeObj.type === "video") { + activeTracks[trackChangeObj.period.id] = { + video: trackChangeObj.track, + audio: undefined, + text: undefined, + }; + } else if (trackChangeObj.type === "audio") { + activeTracks[trackChangeObj.period.id] = { + audio: trackChangeObj.track, + video: undefined, + text: undefined, + }; + } else if (trackChangeObj.type === "text") { + activeTracks[trackChangeObj.period.id] = { + text: trackChangeObj.track, + audio: undefined, + video: undefined, + }; + } } else { - activePeriodAdaptations[type] = adaptation; + if (trackChangeObj.type === "video") { + activePeriodTracks[trackChangeObj.type] = trackChangeObj.track; + } else if (trackChangeObj.type === "audio") { + activePeriodTracks[trackChangeObj.type] = trackChangeObj.track; + } else if (trackChangeObj.type === "text") { + activePeriodTracks[trackChangeObj.type] = trackChangeObj.track; + } } + const { period } = trackChangeObj; const { tracksStore } = contentInfos; const cancelSignal = contentInfos.currentContentCanceller.signal; if ( @@ -2852,7 +2873,7 @@ class Player extends EventEmitter { if (periodRef === undefined) { return; } - switch (type) { + switch (trackChangeObj.type) { case "audio": const audioTrack = tracksStore.getChosenAudioTrack(periodRef); this._priv_triggerEventIfNotStopped( @@ -3302,11 +3323,11 @@ interface IPublicApiContentInfos { */ currentPeriod: IPeriodMetadata | null; /** - * Store currently considered adaptations, per active period. - * `null` if no Adaptation is active + * Store currently considered tracks, per active period. + * `null` if no track is active */ - activeAdaptations: { - [periodId: string]: Partial>; + activeTracks: { + [periodId: string]: Record | null; } | null; /** * Store currently considered representations, per active period. diff --git a/src/main_thread/decrypt/content_decryptor.ts b/src/main_thread/decrypt/content_decryptor.ts index 8da27ae06d..d8c7c0073c 100644 --- a/src/main_thread/decrypt/content_decryptor.ts +++ b/src/main_thread/decrypt/content_decryptor.ts @@ -20,7 +20,8 @@ import eme, { getInitData } from "../../compat/eme"; import config from "../../config"; import { EncryptedMediaError, OtherError } from "../../errors"; import log from "../../log"; -import type { IAdaptationMetadata, IPeriodMetadata } from "../../manifest"; +import { getTrackList } from "../../manifest"; +import type { IPeriodMetadata } from "../../manifest"; import type { IKeySystemOption, IPlayerError } from "../../public_types"; import areArraysOfNumbersEqual from "../../utils/are_arrays_of_numbers_equal"; import arrayFind from "../../utils/array_find"; @@ -412,7 +413,7 @@ export default class ContentDecryptor extends EventEmitter bytesToHex(k)).join(", "), ); @@ -1133,14 +1134,8 @@ function mergeKeyIdSetIntoArray(set: Set, arr: Uint8Array[]) { * @param {Object} period */ function addKeyIdsFromPeriod(set: Set, period: IPeriodMetadata) { - const adaptationsByType = period.adaptations; - const adaptations = objectValues(adaptationsByType).reduce( - // Note: the second case cannot happen. TS is just being dumb here - (acc, adaps) => (!isNullOrUndefined(adaps) ? acc.concat(adaps) : acc), - [], - ); - for (const adaptation of adaptations) { - for (const representation of adaptation.representations) { + for (const track of getTrackList(period)) { + for (const representation of objectValues(track.representations)) { if ( representation.contentProtections !== undefined && representation.contentProtections.keyIds !== undefined diff --git a/src/main_thread/decrypt/types.ts b/src/main_thread/decrypt/types.ts index 04e42868a4..cf46d65771 100644 --- a/src/main_thread/decrypt/types.ts +++ b/src/main_thread/decrypt/types.ts @@ -15,10 +15,10 @@ */ import type { - IAdaptationMetadata, IManifestMetadata, IPeriodMetadata, IRepresentationMetadata, + ITrackMetadata, } from "../../manifest"; import type { IPlayerError } from "../../public_types"; import type InitDataValuesContainer from "./utils/init_data_values_container"; @@ -195,8 +195,8 @@ export interface IContent { manifest: IManifestMetadata; /** Period object associated to the protection data. */ period: IPeriodMetadata; - /** Adaptation object associated to the protection data. */ - adaptation: IAdaptationMetadata; + /** Track associated to the protection data. */ + track: ITrackMetadata; /** Representation object associated to the protection data. */ representation: IRepresentationMetadata; } diff --git a/src/main_thread/init/index.ts b/src/main_thread/init/index.ts index 80194cc51f..c7ae9cf693 100644 --- a/src/main_thread/init/index.ts +++ b/src/main_thread/init/index.ts @@ -16,7 +16,7 @@ export type { IContentInitializerEvents, - IAdaptationChangeEventPayload, + ITrackChangeEventPayload, IStallingSituation, } from "./types"; export { ContentInitializer } from "./types"; diff --git a/src/main_thread/init/media_source_content_initializer.ts b/src/main_thread/init/media_source_content_initializer.ts index 2b9df958dc..6fbd904e11 100644 --- a/src/main_thread/init/media_source_content_initializer.ts +++ b/src/main_thread/init/media_source_content_initializer.ts @@ -422,7 +422,7 @@ export default class MediaSourceContentInitializer extends ContentInitializer { const initialTime = getInitialTime(manifest, lowLatencyMode, startAt); log.debug("Init: Initial time calculated:", initialTime); - /** Choose the right "Representation" for a given "Adaptation". */ + /** Choose the right "Representation" for a given "track". */ const representationEstimator = AdaptiveRepresentationSelector(adaptiveOptions); const subBufferOptions = objectAssign( { textTrackOptions, drmSystemId }, @@ -876,15 +876,15 @@ export default class MediaSourceContentInitializer extends ContentInitializer { lockedStream: (value) => rebufferingController.onLockedStream(value.bufferType, value.period), - adaptationChange: (value) => { - self.trigger("adaptationChange", value); + trackChange: (value) => { + self.trigger("trackChange", value); if (cancelSignal.isCancelled()) { return; // Previous call has stopped streams due to a side-effect } - contentTimeBoundariesObserver.onAdaptationChange( + contentTimeBoundariesObserver.onTrackChange( value.type, value.period, - value.adaptation, + value.track, ); }, diff --git a/src/main_thread/init/multi_thread_content_initializer.ts b/src/main_thread/init/multi_thread_content_initializer.ts index b45fa44faa..21691476cb 100644 --- a/src/main_thread/init/multi_thread_content_initializer.ts +++ b/src/main_thread/init/multi_thread_content_initializer.ts @@ -4,7 +4,7 @@ import shouldReloadMediaSourceOnDecipherabilityUpdate from "../../compat/should_ import type { ISegmentSinkMetrics } from "../../core/segment_sinks/segment_buffers_store"; import type { IAdaptiveRepresentationSelectorArguments, - IAdaptationChoice, + ITrackChoice, IResolutionInfo, } from "../../core/types"; import { @@ -18,6 +18,7 @@ import features from "../../features"; import log from "../../log"; import type { IManifestMetadata } from "../../manifest"; import { + getTrackListForType, replicateUpdatesOnManifestMetadata, updateDecipherabilityFromKeyIds, updateDecipherabilityFromProtectionData, @@ -46,6 +47,7 @@ import assert, { assertUnreachable } from "../../utils/assert"; import idGenerator from "../../utils/id_generator"; import isNullOrUndefined from "../../utils/is_null_or_undefined"; import objectAssign from "../../utils/object_assign"; +import { objectValues } from "../../utils/object_values"; import type { IReadOnlySharedReference } from "../../utils/reference"; import SharedReference from "../../utils/reference"; import { RequestError } from "../../utils/request"; @@ -642,7 +644,7 @@ export default class MultiThreadContentInitializer extends ContentInitializer { break; } - case WorkerMessageType.AdaptationChanged: { + case WorkerMessageType.TrackChanged: { if ( this._currentContentInfo?.contentId !== msgData.contentId || this._currentContentInfo.manifest === null @@ -656,25 +658,58 @@ export default class MultiThreadContentInitializer extends ContentInitializer { if (period === undefined) { return; } - if (msgData.value.adaptationId === null) { - this.trigger("adaptationChange", { + if (msgData.value.trackId === null) { + this.trigger("trackChange", { period, - adaptation: null, + track: null, type: msgData.value.type, }); return; } - const adaptations = period.adaptations[msgData.value.type] ?? []; - const adaptation = arrayFind( - adaptations, - (a) => a.id === msgData.value.adaptationId, - ); - if (adaptation !== undefined) { - this.trigger("adaptationChange", { - period, - adaptation, - type: msgData.value.type, - }); + + // TODO TypeScript is too dumb here, see if that cannot be simplified + switch (msgData.value.type) { + case "audio": + { + const tracks = getTrackListForType(period, msgData.value.type); + const track = arrayFind(tracks, (a) => a.id === msgData.value.trackId); + if (track !== undefined) { + this.trigger("trackChange", { + period, + track, + type: msgData.value.type, + }); + } + } + break; + case "video": + { + const tracks = getTrackListForType(period, msgData.value.type); + const track = arrayFind(tracks, (a) => a.id === msgData.value.trackId); + if (track !== undefined) { + this.trigger("trackChange", { + period, + track, + type: msgData.value.type, + }); + } + } + break; + case "text": + { + const tracks = getTrackListForType(period, msgData.value.type); + const track = arrayFind(tracks, (a) => a.id === msgData.value.trackId); + if (track !== undefined) { + this.trigger("trackChange", { + period, + track, + type: msgData.value.type, + }); + } + } + break; + default: + assertUnreachable(msgData.value.type); } break; } @@ -701,16 +736,13 @@ export default class MultiThreadContentInitializer extends ContentInitializer { }); return; } - const adaptations = period.adaptations[msgData.value.type] ?? []; - const adaptation = arrayFind( - adaptations, - (a) => a.id === msgData.value.adaptationId, - ); - if (adaptation === undefined) { + const tracks = getTrackListForType(period, msgData.value.type); + const track = arrayFind(tracks, (a) => a.id === msgData.value.trackId); + if (track === undefined) { return; } const representation = arrayFind( - adaptation.representations, + objectValues(track.representations), (r) => r.id === msgData.value.representationId, ); if (representation !== undefined) { @@ -821,9 +853,7 @@ export default class MultiThreadContentInitializer extends ContentInitializer { if (period === undefined) { return; } - const ref = new SharedReference( - undefined, - ); + const ref = new SharedReference(undefined); ref.onUpdate((adapChoice) => { if (this._currentContentInfo === null) { ref.finish(); @@ -840,7 +870,7 @@ export default class MultiThreadContentInitializer extends ContentInitializer { contentId: this._currentContentInfo.contentId, value: { periodId: msgData.value.periodId, - adaptationId: adapChoice.adaptationId, + trackId: adapChoice.trackId, bufferType: msgData.value.bufferType, choice: repChoice, }, @@ -856,7 +886,7 @@ export default class MultiThreadContentInitializer extends ContentInitializer { choice: isNullOrUndefined(adapChoice) ? adapChoice : { - adaptationId: adapChoice.adaptationId, + trackId: adapChoice.trackId, switchingMode: adapChoice.switchingMode, initialRepresentations: adapChoice.representations.getValue(), relativeResumingPosition: adapChoice.relativeResumingPosition, @@ -867,7 +897,7 @@ export default class MultiThreadContentInitializer extends ContentInitializer { this.trigger("periodStreamReady", { period, type: msgData.value.bufferType, - adaptationRef: ref, + trackRef: ref, }); break; } diff --git a/src/main_thread/init/types.ts b/src/main_thread/init/types.ts index c0911b5b84..49e0ca5c9f 100644 --- a/src/main_thread/init/types.ts +++ b/src/main_thread/init/types.ts @@ -16,17 +16,17 @@ import type { IMediaElement } from "../../compat/browser_compatibility_types"; import type { ISegmentSinkMetrics } from "../../core/segment_sinks/segment_buffers_store"; -import type { IBufferType, IAdaptationChoice, IInbandEvent } from "../../core/types"; +import type { IBufferType, ITrackChoice, IInbandEvent } from "../../core/types"; import type { IPeriodsUpdateResult, - IAdaptationMetadata, IManifestMetadata, IPeriodMetadata, IRepresentationMetadata, IDecipherabilityStatusChangedElement, } from "../../manifest"; +import type { ITrackMetadata } from "../../manifest/types"; import type { IMediaElementPlaybackObserver } from "../../playback_observer"; -import type { IPlayerError } from "../../public_types"; +import type { IPlayerError, ITrackType } from "../../public_types"; import EventEmitter from "../../utils/event_emitter"; import type SharedReference from "../../utils/reference"; import type { @@ -155,27 +155,24 @@ export interface IContentInitializerEvents { /** The Period we're now playing. */ period: IPeriodMetadata; }; - /** - * A new `PeriodStream` is ready to start but needs an Adaptation (i.e. track) - * to be chosen first. - */ + /** A new `PeriodStream` is ready to start but needs a track to be chosen first. */ periodStreamReady: { /** The type of buffer linked to the `PeriodStream` we want to create. */ type: IBufferType; /** The `Period` linked to the `PeriodStream` we have created. */ period: IPeriodMetadata; /** - * The Reference through which any Adaptation (i.e. track) choice should be - * emitted for that `PeriodStream`. + * The Reference through which any track choice should be emitted for that + * `PeriodStream`. * * The `PeriodStream` will not do anything until this Reference has emitted * at least one to give its initial choice. * You can send `null` through it to tell this `PeriodStream` that you don't - * want any `Adaptation`. + * want any `track`. * It is set to `undefined` by default, you SHOULD NOT set it to `undefined` * yourself. */ - adaptationRef: SharedReference; + trackRef: SharedReference; }; /** * A `PeriodStream` has been removed. @@ -199,8 +196,8 @@ export interface IContentInitializerEvents { */ period: IPeriodMetadata; }; - /** Emitted when a new `Adaptation` is being considered. */ - adaptationChange: IAdaptationChangeEventPayload; + /** Emitted when a new `track` is being considered. */ + trackChange: ITrackChangeEventPayload; /** Emitted as new bitrate estimates are done. */ bitrateEstimateChange: { /** The type of buffer for which the estimation is done. */ @@ -230,16 +227,10 @@ export interface IContentInitializerEvents { inbandEvents: IInbandEvent[]; } -export interface IAdaptationChangeEventPayload { - /** The type of buffer for which the Representation is changing. */ - type: IBufferType; - /** The `Period` linked to the `RepresentationStream` we're creating. */ +export interface ITrackChangeEventPayload { + type: ITrackType; + track: ITrackMetadata | null; period: IPeriodMetadata; - /** - * The `Adaptation` linked to the `AdaptationStream` we're creating. - * `null` when we're choosing no Adaptation at all. - */ - adaptation: IAdaptationMetadata | null; } export type IStallingSituation = diff --git a/src/main_thread/init/utils/update_manifest_codec_support.ts b/src/main_thread/init/utils/update_manifest_codec_support.ts index 61049207f1..e9bbd2c599 100644 --- a/src/main_thread/init/utils/update_manifest_codec_support.ts +++ b/src/main_thread/init/utils/update_manifest_codec_support.ts @@ -1,10 +1,12 @@ import isCodecSupported from "../../../compat/is_codec_supported"; import { MediaError } from "../../../errors"; +import { getTrackList, getTrackListForType } from "../../../manifest"; import type { IManifestMetadata } from "../../../manifest"; import type Manifest from "../../../manifest/classes"; import type { ICodecSupportInfo } from "../../../multithread_types"; import type { ITrackType } from "../../../public_types"; import isNullOrUndefined from "../../../utils/is_null_or_undefined"; +import { objectValues } from "../../../utils/object_values"; import type ContentDecryptor from "../../decrypt"; import { ContentDecryptorState } from "../../decrypt"; @@ -14,7 +16,7 @@ import { ContentDecryptorState } from "../../decrypt"; * If a representation with (`isSupported`) is undefined, we consider the * codec support as unknown. * - * This function iterates through all periods, adaptations, and representations, + * This function iterates through all periods, tracks, and representations, * and collects unknown codecs. * * @returns {Array} The list of codecs with unknown support status. @@ -27,15 +29,15 @@ export function getCodecsWithUnknownSupport( ): Array<{ mimeType: string; codec: string }> { const codecsWithUnknownSupport: Array<{ mimeType: string; codec: string }> = []; for (const period of manifest.periods) { - const checkedAdaptations = [ - ...(period.adaptations.video ?? []), - ...(period.adaptations.audio ?? []), + const checkedtracks = [ + ...getTrackListForType(period, "video"), + ...getTrackListForType(period, "audio"), ]; - for (const adaptation of checkedAdaptations) { - if (!adaptation.supportStatus.hasCodecWithUndefinedSupport) { + for (const track of checkedtracks) { + if (!track.supportStatus.hasCodecWithUndefinedSupport) { continue; } - for (const representation of adaptation.representations) { + for (const representation of objectValues(track.representations)) { if (representation.isSupported === undefined) { codecsWithUnknownSupport.push({ mimeType: representation.mimeType ?? "", @@ -49,7 +51,7 @@ export function getCodecsWithUnknownSupport( } /** - * Ensure that all `Representation` and `Adaptation` have a known status + * Ensure that all `Representation` and `track` have a known status * for their codec support and probe it for cases where that's not the * case. * @@ -123,14 +125,10 @@ export function updateManifestCodecSupport( }; manifest.periods.forEach((p) => { - [ - ...(p.adaptations.audio ?? []), - ...(p.adaptations.video ?? []), - ...(p.adaptations.text ?? []), - ].forEach((adaptation) => { + getTrackList(p).forEach((track) => { let hasSupportedCodec: boolean = false; let hasCodecWithUndefinedSupport: boolean = false; - adaptation.representations.forEach((representation) => { + objectValues(track.representations).forEach((representation) => { if (representation.isSupported !== undefined) { if (representation.isSupported) { hasSupportedCodec = true; @@ -166,24 +164,20 @@ export function updateManifestCodecSupport( } } }); - adaptation.supportStatus.hasCodecWithUndefinedSupport = - hasCodecWithUndefinedSupport; + track.supportStatus.hasCodecWithUndefinedSupport = hasCodecWithUndefinedSupport; if (hasCodecWithUndefinedSupport && !hasSupportedCodec) { - adaptation.supportStatus.hasSupportedCodec = undefined; + track.supportStatus.hasSupportedCodec = undefined; } else { - adaptation.supportStatus.hasSupportedCodec = hasSupportedCodec; + track.supportStatus.hasSupportedCodec = hasSupportedCodec; } }); ["audio" as const, "video" as const].forEach((ttype: ITrackType) => { - const forType = p.adaptations[ttype]; - if ( - forType !== undefined && - forType.every((a) => a.supportStatus.hasSupportedCodec === false) - ) { + const forType = getTrackListForType(p, ttype); + if (forType.every((a) => a.supportStatus.hasSupportedCodec === false)) { throw new MediaError( "MANIFEST_INCOMPATIBLE_CODECS_ERROR", - "No supported " + ttype + " adaptations", + "No supported " + ttype + " tracks", { tracks: undefined }, ); } diff --git a/src/main_thread/tracks_store/track_dispatcher.ts b/src/main_thread/tracks_store/track_dispatcher.ts index f8a34aef74..e74ebaa076 100644 --- a/src/main_thread/tracks_store/track_dispatcher.ts +++ b/src/main_thread/tracks_store/track_dispatcher.ts @@ -1,9 +1,9 @@ import type { - IAdaptationChoice, + ITrackChoice, IRepresentationsChoice, ITrackSwitchingMode, } from "../../core/types"; -import type { IAdaptationMetadata, IRepresentationMetadata } from "../../manifest"; +import type { ITrackMetadata, IRepresentationMetadata } from "../../manifest"; import type { IAudioRepresentationsSwitchingMode, IVideoRepresentationsSwitchingMode, @@ -11,13 +11,14 @@ import type { import arrayIncludes from "../../utils/array_includes"; import EventEmitter from "../../utils/event_emitter"; import noop from "../../utils/noop"; +import { objectValues } from "../../utils/object_values"; import type { IReadOnlySharedReference } from "../../utils/reference"; import SharedReference from "../../utils/reference"; import TaskCanceller from "../../utils/task_canceller"; /** * Class handling track changes and quality locking for a single Period and - * Adaptation type. + * track type. * @class TrackDispatcher */ export default class TrackDispatcher extends EventEmitter { @@ -35,13 +36,13 @@ export default class TrackDispatcher extends EventEmitter * Reference through which the wanted track will be emitted. * `null` is emitted if no track for that type is wanted. */ - private readonly _adaptationRef: SharedReference< + private readonly _trackRef: SharedReference< | { /** Wanted track chosen by the user. */ - adaptationId: string; + trackId: string; /** "Switching mode" in which the track switch should happen. */ switchingMode: ITrackSwitchingMode; - /** Representations "locked" for this `Adaptation`. */ + /** Representations "locked" for this `track`. */ representations: IReadOnlySharedReference; /** Relative resuming position after a track change */ relativeResumingPosition: number | undefined; @@ -51,16 +52,16 @@ export default class TrackDispatcher extends EventEmitter >; /** - * Last values emitted through `adaptationRef`. - * This value is mutated just before `adaptationRef` is "nexted". + * Last values emitted through `trackRef`. + * This value is mutated just before `trackRef` is "nexted". * * Storing this information allows to detect if some potential - * side-effects already led to the "nexting" of `adaptationRef` with the wanted + * side-effects already led to the "nexting" of `trackRef` with the wanted * settings, preventing the the `TrackDispatcher` from doing it again. */ private _lastEmitted: | { - adaptation: IAdaptationMetadata; + track: ITrackMetadata; switchingMode: ITrackSwitchingMode; lockedRepresentations: IRepresentationMetadata[] | null; } @@ -84,12 +85,12 @@ export default class TrackDispatcher extends EventEmitter * setting. * This constructor will update the Reference with the right preferences * synchronously. - * @param {Object} adaptationRef + * @param {Object} trackRef */ - constructor(adaptationRef: SharedReference) { + constructor(trackRef: SharedReference) { super(); this._canceller = new TaskCanceller(); - this._adaptationRef = adaptationRef; + this._trackRef = trackRef; this._updateToken = false; this._lastEmitted = undefined; this.refresh = noop; @@ -102,7 +103,7 @@ export default class TrackDispatcher extends EventEmitter * @returns {boolean} */ public hasSetTrack(): boolean { - return this._adaptationRef.getValue() !== undefined; + return this._trackRef.getValue() !== undefined; } /** @@ -121,10 +122,10 @@ export default class TrackDispatcher extends EventEmitter // has no point but let's still create one for simplicity sake this._canceller = new TaskCanceller(); this._lastEmitted = null; - this._adaptationRef.setValue(null); + this._trackRef.setValue(null); return; } - const { adaptation, switchingMode, relativeResumingPosition } = newTrackInfo; + const { track, switchingMode, relativeResumingPosition } = newTrackInfo; this._canceller.cancel(); this._canceller = new TaskCanceller(); const reference = this._constructLockedRepresentationsReference(newTrackInfo); @@ -132,13 +133,13 @@ export default class TrackDispatcher extends EventEmitter return; } this._lastEmitted = { - adaptation, + track, switchingMode, lockedRepresentations: null, }; this._updateToken = false; - this._adaptationRef.setValue({ - adaptationId: adaptation.id, + this._trackRef.setValue({ + trackId: track.id, switchingMode, representations: reference, relativeResumingPosition, @@ -185,8 +186,8 @@ export default class TrackDispatcher extends EventEmitter let playableRepresentations; if (repSettings === null) { // unlocking - playableRepresentations = trackInfo.adaptation.representations.filter( - (representation) => { + playableRepresentations = objectValues(trackInfo.track.representations).filter( + (representation: IRepresentationMetadata) => { return ( representation.isSupported === true && representation.decipherable !== false ); @@ -198,8 +199,8 @@ export default class TrackDispatcher extends EventEmitter } else { const { representationIds } = repSettings; switchingMode = repSettings.switchingMode; - const representations = trackInfo.adaptation.representations.filter((r) => - arrayIncludes(representationIds, r.id), + const representations = objectValues(trackInfo.track.representations).filter( + (r) => arrayIncludes(representationIds, r.id), ); playableRepresentations = representations.filter( (r) => r.isSupported === true && r.decipherable !== false, @@ -244,7 +245,7 @@ export default class TrackDispatcher extends EventEmitter public dispose(): void { this.removeEventListener(); this._canceller.cancel(); - this._adaptationRef.finish(); + this._trackRef.finish(); } } @@ -259,14 +260,14 @@ export interface ITrackDispatcherEvent { /** Define a new Track preference given to the `TrackDispatcher`. */ export interface ITrackSetting { - /** Contains the `Adaptation` wanted by the user. */ - adaptation: IAdaptationMetadata; + /** Contains the `track` wanted by the user. */ + track: ITrackMetadata; /** "Switching mode" in which the track switch should happen. */ switchingMode: ITrackSwitchingMode; /** Relative resuming position after a track change */ relativeResumingPosition?: number | undefined; /** - * Contains the last locked `Representation`s for this `Adaptation` wanted + * Contains the last locked `Representation`s for this `track` wanted * by the user. * `null` if no Representation is locked. * diff --git a/src/main_thread/tracks_store/tracks_store.ts b/src/main_thread/tracks_store/tracks_store.ts index c8c03f91d7..b5cf0d9292 100644 --- a/src/main_thread/tracks_store/tracks_store.ts +++ b/src/main_thread/tracks_store/tracks_store.ts @@ -20,16 +20,12 @@ */ import config from "../../config"; -import type { IAdaptationChoice, IRepresentationsChoice } from "../../core/types"; +import type { ITrackChoice, IRepresentationsChoice } from "../../core/types"; import { MediaError } from "../../errors"; import log from "../../log"; -import type { - IAdaptationMetadata, - IManifestMetadata, - IPeriodMetadata, -} from "../../manifest"; +import type { ITrackMetadata, IManifestMetadata, IPeriodMetadata } from "../../manifest"; import { - getSupportedAdaptations, + getTrackListForType, toAudioTrack, toTextTrack, toVideoTrack, @@ -56,6 +52,7 @@ import assert from "../../utils/assert"; import EventEmitter from "../../utils/event_emitter"; import isNullOrUndefined from "../../utils/is_null_or_undefined"; import objectAssign from "../../utils/object_assign"; +import { objectValues } from "../../utils/object_values"; import SharedReference from "../../utils/reference"; import TrackDispatcher from "./track_dispatcher"; @@ -174,12 +171,10 @@ export default class TracksStore extends EventEmitter { const curWantedTextTrack = this._storedPeriodInfo[i].text.storedSettings; if (!isNullOrUndefined(curWantedTextTrack)) { - const textAdaptations = getSupportedAdaptations(newPeriod, "text"); - const stillHere = textAdaptations.some( - (a) => a.id === curWantedTextTrack.adaptation.id, - ); + const textTracks = getSupportedTracks(newPeriod, "text"); + const stillHere = textTracks.some((a) => a.id === curWantedTextTrack.track.id); if (!stillHere) { - log.warn("TS: Chosen text Adaptation not available anymore"); + log.warn("TS: Chosen text track not available anymore"); const periodInfo = this._storedPeriodInfo[i]; periodInfo.text.storedSettings = null; this.trigger("trackUpdate", { @@ -209,27 +204,24 @@ export default class TracksStore extends EventEmitter { const curWantedVideoTrack = this._storedPeriodInfo[i].video.storedSettings; if (!isNullOrUndefined(curWantedVideoTrack)) { - const videoAdaptations = getSupportedAdaptations(newPeriod, "video"); - const stillHere = videoAdaptations.some( - (a) => a.id === curWantedVideoTrack.adaptation.id, + const videoTracks = getSupportedTracks(newPeriod, "video"); + const stillHere = videoTracks.some( + (a) => a.id === curWantedVideoTrack.track.id, ); if (!stillHere) { - log.warn("TS: Chosen video Adaptation not available anymore"); + log.warn("TS: Chosen video track not available anymore"); const periodItem = this._storedPeriodInfo[i]; let storedSettings: IVideoStoredSettings; - if (videoAdaptations.length === 0) { + if (videoTracks.length === 0) { storedSettings = null; } else { - const adaptationBase = videoAdaptations[0]; - const adaptation = getRightVideoTrack( - adaptationBase, - this._isTrickModeTrackEnabled, - ); + const trackBase = videoTracks[0]; + const track = getRightVideoTrack(trackBase, this._isTrickModeTrackEnabled); const lockedRepresentations = new SharedReference(null); storedSettings = { - adaptationBase, - adaptation, + trackBase, + track, switchingMode: DEFAULT_VIDEO_TRACK_SWITCHING_MODE, lockedRepresentations, }; @@ -262,18 +254,18 @@ export default class TracksStore extends EventEmitter { const curWantedAudioTrack = this._storedPeriodInfo[i].audio.storedSettings; if (!isNullOrUndefined(curWantedAudioTrack)) { - const audioAdaptations = getSupportedAdaptations(newPeriod, "audio"); - const stillHere = audioAdaptations.some( - (a) => a.id === curWantedAudioTrack.adaptation.id, + const audioTracks = getSupportedTracks(newPeriod, "audio"); + const stillHere = audioTracks.some( + (a) => a.id === curWantedAudioTrack.track.id, ); if (!stillHere) { - log.warn("TS: Chosen audio Adaptation not available anymore"); + log.warn("TS: Chosen audio track not available anymore"); const periodItem = this._storedPeriodInfo[i]; const storedSettings = - audioAdaptations.length === 0 + audioTracks.length === 0 ? null : { - adaptation: audioAdaptations[0], + track: audioTracks[0], switchingMode: this._defaultAudioTrackSwitchingMode, lockedRepresentations: new SharedReference(null), @@ -347,20 +339,20 @@ export default class TracksStore extends EventEmitter { } /** - * Add shared reference to choose Adaptation for new "audio", "video" or + * Add shared reference to choose track for new "audio", "video" or * "text" Period. * * Note that such reference has to be removed through `removeTrackReference` * so ressources can be freed. * @param {string} bufferType - The concerned buffer type * @param {Period} period - The concerned Period. - * @param {Object} adaptationRef - A reference through which + * @param {Object} trackRef - A reference through which * the choice will be given. */ public addTrackReference( bufferType: "audio" | "text" | "video", period: IPeriodMetadata, - adaptationRef: SharedReference, + trackRef: SharedReference, ): void { let periodObj = getPeriodItem(this._storedPeriodInfo, period.id); if (periodObj === undefined) { @@ -385,26 +377,26 @@ export default class TracksStore extends EventEmitter { return; } - const dispatcher = new TrackDispatcher(adaptationRef); + const dispatcher = new TrackDispatcher(trackRef); periodObj[bufferType].dispatcher = dispatcher; dispatcher.addEventListener("noPlayableRepresentation", () => { - const nextAdaptation = arrayFind( - period.adaptations[bufferType] ?? [], - (adaptation) => { + const nextTrack = arrayFind( + objectValues(period.tracksMetadata[bufferType]), + (track) => { if ( - adaptation.supportStatus.hasSupportedCodec === false || - adaptation.supportStatus.isDecipherable === false + track.supportStatus.hasSupportedCodec === false || + track.supportStatus.isDecipherable === false ) { return false; } - const playableRepresentations = adaptation.representations.filter( + const playableRepresentations = objectValues(track.representations).filter( (r) => r.isSupported === true && r.decipherable !== false, ); return playableRepresentations.length > 0; }, ); - if (nextAdaptation === undefined) { + if (nextTrack === undefined) { const noRepErr = new MediaError( "NO_PLAYABLE_REPRESENTATION", `No ${bufferType} Representation can be played`, @@ -421,7 +413,7 @@ export default class TracksStore extends EventEmitter { const switchingMode = bufferType === "audio" ? this._defaultAudioTrackSwitchingMode : "reload"; const storedSettings = { - adaptation: nextAdaptation, + track: nextTrack, switchingMode, lockedRepresentations: new SharedReference(null), }; @@ -492,7 +484,7 @@ export default class TracksStore extends EventEmitter { } /** - * Remove shared reference to choose an "audio", "video" or "text" Adaptation + * Remove shared reference to choose an "audio", "video" or "text" track * for a Period. * @param {string} bufferType - The concerned buffer type * @param {Period} period - The concerned Period. @@ -614,10 +606,10 @@ export default class TracksStore extends EventEmitter { } /** - * Set audio track based on the ID of its Adaptation for a given added Period. + * Set audio track based on the ID of its track for a given added Period. * @param {Object} params * @param {Object} params.periodRef - The concerned Period's object. - * @param {string} params.trackId - adaptation id of the wanted track. + * @param {string} params.trackId - track id of the wanted track. * @param {string} params.switchingMode - Behavior when replacing the track by * another. * @param {Object|null} params.lockedRepresentations - Audio Representations @@ -650,9 +642,9 @@ export default class TracksStore extends EventEmitter { } /** - * Set text track based on the ID of its Adaptation for a given added Period. + * Set text track based on the ID of its track for a given added Period. * @param {Object} periodObj - The concerned Period's object. - * @param {string} wantedId - adaptation id of the wanted track. + * @param {string} wantedId - track id of the wanted track. */ public setTextTrack(periodObj: ITSPeriodObject, wantedId: string): void { return this._setAudioOrTextTrack({ @@ -666,11 +658,11 @@ export default class TracksStore extends EventEmitter { } /** - * Set audio track based on the ID of its Adaptation for a given added Period. + * Set audio track based on the ID of its track for a given added Period. * @param {Object} params * @param {string} params.bufferType * @param {Object} params.periodRef - The concerned Period's object. - * @param {string} params.trackId - adaptation id of the wanted track. + * @param {string} params.trackId - track id of the wanted track. * @param {string} params.switchingMode - Behavior when replacing the track by * another. * @param {Array.|null} params.lockedRepresentations - Audio @@ -697,15 +689,15 @@ export default class TracksStore extends EventEmitter { throw new Error("Wanted Period not yet advertised."); } const period = periodRef.period; - const wantedAdaptation = arrayFind( - period.adaptations[bufferType] ?? [], + const wantedTrack = arrayFind( + objectValues(period.tracksMetadata[bufferType]), ({ id, supportStatus }) => supportStatus.hasSupportedCodec !== false && supportStatus.isDecipherable !== false && id === trackId, ); - if (wantedAdaptation === undefined) { + if (wantedTrack === undefined) { throw new Error(`Wanted ${bufferType} track not found.`); } @@ -715,7 +707,7 @@ export default class TracksStore extends EventEmitter { lockedRepresentationsRef = new SharedReference(null); } else { const representationsToLock = this._getRepresentationsToLock( - wantedAdaptation, + wantedTrack, lockedRepresentations, ); const repSwitchingMode = @@ -729,7 +721,7 @@ export default class TracksStore extends EventEmitter { } const storedSettings = { - adaptation: wantedAdaptation, + track: wantedTrack, switchingMode, lockedRepresentations: lockedRepresentationsRef, relativeResumingPosition, @@ -756,10 +748,10 @@ export default class TracksStore extends EventEmitter { } /** - * Set video track based on the ID of its Adaptation for a given added Period. + * Set video track based on the ID of its track for a given added Period. * @param {Object} params * @param {Object} params.periodRef - The concerned Period's object. - * @param {string} params.trackId - adaptation id of the wanted track. + * @param {string} params.trackId - track id of the wanted track. * @param {string} params.switchingMode - Behavior when replacing the track by * another. * @param {Array.|null} params.lockedRepresentations - Video @@ -785,31 +777,28 @@ export default class TracksStore extends EventEmitter { throw new Error("Wanted Period not yet advertised."); } const period = periodRef.period; - const wantedAdaptation = arrayFind( - period.adaptations.video ?? [], + const wantedTrack = arrayFind( + objectValues(period.tracksMetadata.video), ({ id, supportStatus }) => supportStatus.isDecipherable !== false && supportStatus.hasSupportedCodec !== false && id === trackId, ); - if (wantedAdaptation === undefined) { + if (wantedTrack === undefined) { throw new Error("Wanted video track not found."); } const { DEFAULT_VIDEO_TRACK_SWITCHING_MODE } = config.getCurrent(); const typeInfo = periodRef.video; - const newAdaptation = getRightVideoTrack( - wantedAdaptation, - this._isTrickModeTrackEnabled, - ); + const newTrack = getRightVideoTrack(wantedTrack, this._isTrickModeTrackEnabled); let lockedRepresentationsRef; if (lockedRepresentations === null) { lockedRepresentationsRef = new SharedReference(null); } else { const representationsToLock = this._getRepresentationsToLock( - wantedAdaptation, + wantedTrack, lockedRepresentations, ); const repSwitchingMode = DEFAULT_VIDEO_TRACK_SWITCHING_MODE; @@ -820,9 +809,9 @@ export default class TracksStore extends EventEmitter { } const storedSettings = { - adaptationBase: wantedAdaptation, + trackBase: wantedTrack, switchingMode: switchingMode ?? DEFAULT_VIDEO_TRACK_SWITCHING_MODE, - adaptation: newAdaptation, + track: newTrack, relativeResumingPosition, lockedRepresentations: lockedRepresentationsRef, }; @@ -913,7 +902,7 @@ export default class TracksStore extends EventEmitter { public getChosenAudioTrack(periodObj: ITSPeriodObject): IAudioTrack | null { return isNullOrUndefined(periodObj.audio.storedSettings) ? null - : toAudioTrack(periodObj.audio.storedSettings.adaptation, true); + : toAudioTrack(periodObj.audio.storedSettings.track, true); } /** @@ -929,7 +918,7 @@ export default class TracksStore extends EventEmitter { public getChosenTextTrack(periodObj: ITSPeriodObject): ITextTrack | null { return isNullOrUndefined(periodObj.text.storedSettings) ? null - : toTextTrack(periodObj.text.storedSettings.adaptation); + : toTextTrack(periodObj.text.storedSettings.track); } /** @@ -947,7 +936,7 @@ export default class TracksStore extends EventEmitter { return null; } - return toVideoTrack(periodObj.video.storedSettings.adaptation, true); + return toVideoTrack(periodObj.video.storedSettings.track, true); } /** @@ -963,13 +952,11 @@ export default class TracksStore extends EventEmitter { periodObj: ITSPeriodObject, ): IAvailableAudioTrack[] | undefined { const storedSettings = periodObj.audio.storedSettings; - const currentId = !isNullOrUndefined(storedSettings) - ? storedSettings.adaptation.id - : null; - const adaptations = getSupportedAdaptations(periodObj.period, "audio"); - return adaptations.map((adaptation: IAdaptationMetadata) => { - const active = currentId === null ? false : currentId === adaptation.id; - return objectAssign(toAudioTrack(adaptation, true), { active }); + const currentId = !isNullOrUndefined(storedSettings) ? storedSettings.track.id : null; + const tracks = getSupportedTracks(periodObj.period, "audio"); + return tracks.map((track: ITrackMetadata) => { + const active = currentId === null ? false : currentId === track.id; + return objectAssign(toAudioTrack(track, true), { active }); }); } @@ -986,14 +973,12 @@ export default class TracksStore extends EventEmitter { periodObj: ITSPeriodObject, ): IAvailableTextTrack[] | undefined { const storedSettings = periodObj.text.storedSettings; - const currentId = !isNullOrUndefined(storedSettings) - ? storedSettings.adaptation.id - : null; - - const adaptations = getSupportedAdaptations(periodObj.period, "text"); - return adaptations.map((adaptation) => { - const active = currentId === null ? false : currentId === adaptation.id; - return objectAssign(toTextTrack(adaptation), { active }); + const currentId = !isNullOrUndefined(storedSettings) ? storedSettings.track.id : null; + + const tracks = getSupportedTracks(periodObj.period, "text"); + return tracks.map((track) => { + const active = currentId === null ? false : currentId === track.id; + return objectAssign(toTextTrack(track), { active }); }); } @@ -1012,18 +997,18 @@ export default class TracksStore extends EventEmitter { const storedSettings = periodObj.video.storedSettings; const currentId = isNullOrUndefined(storedSettings) ? undefined - : storedSettings.adaptation.id; + : storedSettings.track.id; - const adaptations = getSupportedAdaptations(periodObj.period, "video"); - return adaptations.map((adaptation) => { - const active = currentId === null ? false : currentId === adaptation.id; - const track = toVideoTrack(adaptation, true); + const tracks = getSupportedTracks(periodObj.period, "video"); + return tracks.map((trk: ITrackMetadata) => { + const active = currentId === null ? false : currentId === trk.id; + const track = toVideoTrack(trk, true); const trickModeTracks = track.trickModeTracks !== undefined - ? track.trickModeTracks.map((trickModeAdaptation) => { + ? track.trickModeTracks.map((trickModeTrack) => { const isActive = - currentId === null ? false : currentId === trickModeAdaptation.id; - return objectAssign(trickModeAdaptation, { active: isActive }); + currentId === null ? false : currentId === trickModeTrack.id; + return objectAssign(trickModeTrack, { active: isActive }); }) : []; const availableTrack = objectAssign(track, { active }); @@ -1062,7 +1047,7 @@ export default class TracksStore extends EventEmitter { } const { DEFAULT_AUDIO_REPRESENTATIONS_SWITCHING_MODE } = config.getCurrent(); const filtered = this._getRepresentationsToLock( - storedSettings.adaptation, + storedSettings.track, lockSettings.representations, ); @@ -1085,7 +1070,7 @@ export default class TracksStore extends EventEmitter { const { DEFAULT_VIDEO_REPRESENTATIONS_SWITCHING_MODE } = config.getCurrent(); const filtered = this._getRepresentationsToLock( - storedSettings.adaptation, + storedSettings.track, lockSettings.representations, ); const switchingMode = @@ -1133,14 +1118,14 @@ export default class TracksStore extends EventEmitter { for (let i = 0; i < this._storedPeriodInfo.length; i++) { const periodObj = this._storedPeriodInfo[i]; if (!isNullOrUndefined(periodObj.video.storedSettings)) { - const chosenBaseTrack = periodObj.video.storedSettings.adaptationBase; + const chosenBaseTrack = periodObj.video.storedSettings.trackBase; if (chosenBaseTrack !== null) { const chosenTrack = getRightVideoTrack( chosenBaseTrack, this._isTrickModeTrackEnabled, ); - periodObj.video.storedSettings.adaptationBase = chosenBaseTrack; - periodObj.video.storedSettings.adaptation = chosenTrack; + periodObj.video.storedSettings.trackBase = chosenBaseTrack; + periodObj.video.storedSettings.track = chosenTrack; } } } @@ -1187,11 +1172,11 @@ export default class TracksStore extends EventEmitter { } private _getRepresentationsToLock( - adaptation: IAdaptationMetadata, + track: ITrackMetadata, representationIds: string[], ): string[] { const filtered = representationIds.reduce((acc: string[], repId) => { - const foundRep = arrayFind(adaptation.representations, (r) => { + const foundRep = arrayFind(objectValues(track.representations), (r) => { return r.id === repId; }); if (foundRep === undefined) { @@ -1230,72 +1215,72 @@ export default class TracksStore extends EventEmitter { // already processed, continue continue; } - const adaptations: IAdaptationMetadata[] = [ - ...(period.adaptations.audio ?? []), - ...(period.adaptations.video ?? []), + const tracks: ITrackMetadata[] = [ + ...getTrackListForType(period, "audio"), + ...getTrackListForType(period, "video"), ]; - const hasCodecWithUndefinedSupport = adaptations.every( + const hasCodecWithUndefinedSupport = tracks.every( (a) => a.supportStatus.hasCodecWithUndefinedSupport, ); - if (adaptations.length > 0 && hasCodecWithUndefinedSupport) { + if (tracks.length > 0 && hasCodecWithUndefinedSupport) { // Not all codecs for that Period are known yet. // Await until this is the case. continue; } - const audioAdaptation = getSupportedAdaptations(period, "audio")[0]; + const audioTrack = getSupportedTracks(period, "audio")[0]; trackStorePeriod.audio.storedSettings = - audioAdaptation === undefined + audioTrack === undefined ? null : { - adaptation: audioAdaptation, + track: audioTrack, switchingMode: this._defaultAudioTrackSwitchingMode, lockedRepresentations: new SharedReference( null, ), }; - const baseVideoAdaptation = getSupportedAdaptations(period, "video")[0]; - const videoAdaptation = getRightVideoTrack( - baseVideoAdaptation, + const baseVideoTrack = getSupportedTracks(period, "video")[0]; + const videoTrack = getRightVideoTrack( + baseVideoTrack, this._isTrickModeTrackEnabled, ); trackStorePeriod.video.storedSettings = - videoAdaptation === undefined + videoTrack === undefined ? null : { - adaptation: videoAdaptation, - adaptationBase: baseVideoAdaptation, + track: videoTrack, + trackBase: baseVideoTrack, switchingMode: DEFAULT_VIDEO_TRACK_SWITCHING_MODE, lockedRepresentations: new SharedReference( null, ), }; - let textAdaptation: IAdaptationMetadata | null = null; - const forcedSubtitles = (period.adaptations.text ?? []).filter( + let textTrack: ITrackMetadata | null = null; + const forcedSubtitles = objectValues(period.tracksMetadata.text).filter( (ad) => ad.isForcedSubtitles === true, ); if (forcedSubtitles.length > 0) { - if (audioAdaptation !== null && audioAdaptation !== undefined) { + if (audioTrack !== null && audioTrack !== undefined) { const sameLanguage = arrayFind( forcedSubtitles, - (f) => f.normalizedLanguage === audioAdaptation.normalizedLanguage, + (f) => f.normalizedLanguage === audioTrack.normalizedLanguage, ); if (sameLanguage !== undefined) { - textAdaptation = sameLanguage; + textTrack = sameLanguage; } } - if (textAdaptation === null) { - textAdaptation = + if (textTrack === null) { + textTrack = arrayFind(forcedSubtitles, (f) => f.normalizedLanguage === undefined) ?? null; } } trackStorePeriod.text.storedSettings = - textAdaptation === null + textTrack === null ? null : { - adaptation: textAdaptation, + track: textTrack, switchingMode: "direct" as const, lockedRepresentations: new SharedReference( null, @@ -1409,13 +1394,13 @@ function isPeriodItemRemovable(periodObj: ITSPeriodObject): boolean { } function getRightVideoTrack( - adaptation: IAdaptationMetadata, + track: ITrackMetadata, isTrickModeEnabled: boolean, -): IAdaptationMetadata { - if (isTrickModeEnabled && adaptation.trickModeTracks?.[0] !== undefined) { - return adaptation.trickModeTracks[0]; +): ITrackMetadata { + if (isTrickModeEnabled && track.trickModeTracks?.[0] !== undefined) { + return track.trickModeTracks[0]; } - return adaptation; + return track; } /** @@ -1494,12 +1479,12 @@ interface IAudioPeriodInfo { */ storedSettings: | { - /** Contains the last `Adaptation` wanted by the user. */ - adaptation: IAdaptationMetadata; + /** Contains the last `Track` wanted by the user. */ + track: ITrackMetadata; /** "Switching mode" in which the track switch should happen. */ switchingMode: IAudioTrackSwitchingMode; /** - * Contains the last locked `Representation`s for this `Adaptation` wanted + * Contains the last locked `Representation`s for this `Track` wanted * by the user. * `null` if no Representation is locked. */ @@ -1531,12 +1516,12 @@ export interface ITextPeriodInfo { */ storedSettings: | { - /** Contains the last `Adaptation` wanted by the user. */ - adaptation: IAdaptationMetadata; + /** Contains the last `Track` wanted by the user. */ + track: ITrackMetadata; /** "Switching mode" in which the track switch should happen. */ switchingMode: "direct"; /** - * Contains the last locked `Representation`s for this `Adaptation` wanted + * Contains the last locked `Representation`s for this `Track` wanted * by the user. * `null` if no Representation is locked. */ @@ -1581,22 +1566,22 @@ export interface IVideoPeriodInfo { type IVideoStoredSettings = { /** - * The wanted Adaptation itself (may be different from `adaptationBase` when - * a trickmode track is chosen, in which case `adaptationBase` is the - * Adaptation the trickmode track is linked to and `adaptation` is the + * The wanted track itself (may be different from `trackBase` when + * a trickmode track is chosen, in which case `trackBase` is the + * track the trickmode track is linked to and `track` is the * trickmode track). */ - adaptation: IAdaptationMetadata; + track: ITrackMetadata; /** "Switching mode" in which the track switch should happen. */ switchingMode: IVideoTrackSwitchingMode; /** - * The "base" Adaptation for `storedSettings` (if a trickmode track was - * chosen, this is the Adaptation the trickmode track is linked to, and not + * The "base" track for `storedSettings` (if a trickmode track was + * chosen, this is the track the trickmode track is linked to, and not * the trickmode track itself). */ - adaptationBase: IAdaptationMetadata; + trackBase: ITrackMetadata; /** - * Contains the last locked `Representation`s for this `Adaptation` wanted + * Contains the last locked `Representation`s for this `track` wanted * by the user. * `null` if no Representation is locked. */ @@ -1621,3 +1606,15 @@ export interface IVideoRepresentationsLockSettings { representations: string[]; switchingMode?: IVideoRepresentationsSwitchingMode | undefined; } + +function getSupportedTracks( + period: IPeriodMetadata, + trackType: ITrackType, +): ITrackMetadata[] { + return objectValues(period.tracksMetadata[trackType]).filter((t) => { + return ( + t.supportStatus.hasSupportedCodec === true && + t.supportStatus.isDecipherable !== false + ); + }); +} diff --git a/src/manifest/classes/__tests__/manifest.test.ts b/src/manifest/classes/__tests__/manifest.test.ts index 587f8356f0..6cc889ca72 100644 --- a/src/manifest/classes/__tests__/manifest.test.ts +++ b/src/manifest/classes/__tests__/manifest.test.ts @@ -297,7 +297,9 @@ describe("Manifest - Manifest", () => { expect(warnings).toHaveLength(1); expect(warnings[0].type).toEqual("MEDIA_ERROR"); expect(warnings[0].code).toEqual("MANIFEST_INCOMPATIBLE_CODECS_ERROR"); - expect((warnings[0] as unknown as { tracksInfo: unknown }).tracksInfo).toEqual([ + expect( + (warnings[0] as unknown as { tracksMetadata: unknown }).tracksMetadata, + ).toEqual([ { track: { language: "", diff --git a/src/manifest/classes/adaptation.ts b/src/manifest/classes/adaptation.ts index 5594c4518c..0ccb820ca2 100644 --- a/src/manifest/classes/adaptation.ts +++ b/src/manifest/classes/adaptation.ts @@ -15,7 +15,7 @@ */ import log from "../../log"; -import type { IParsedAdaptation } from "../../parsers/manifest"; +import type { IParsedTrack } from "../../parsers/manifest"; import type { ITrackType, IRepresentationFilter, @@ -24,137 +24,146 @@ import type { import arrayFind from "../../utils/array_find"; import isNullOrUndefined from "../../utils/is_null_or_undefined"; import normalizeLanguage from "../../utils/languages"; +import { objectValues } from "../../utils/object_values"; import type { - IAdaptationMetadata, + ITrackMetadata, IRepresentationMetadata, - IAdaptationSupportStatus, + ITrackSupportStatus, } from "../types"; import type CodecSupportCache from "./codec_support_cache"; import Representation from "./representation"; /** - * Normalized Adaptation structure. - * An `Adaptation` describes a single `Track`. For example a specific audio - * track (in a given language) or a specific video track. + * Normalized track structure. + * A `Track` describes an available media of a particular type in a content. + * For example a specific audio track (in a given language) or a specific + * video track. * It istelf can be represented in different qualities, which we call here * `Representation`. - * @class Adaptation + * @class Track */ -export default class Adaptation implements IAdaptationMetadata { - /** ID uniquely identifying the Adaptation in the Period. */ +export default class Track implements ITrackMetadata { + /** ID uniquely identifying the track in the Period. */ public readonly id: string; /** - * `true` if this Adaptation was not present in the original Manifest, but was + * `true` if this track was not present in the original Manifest, but was * manually added after through the corresponding APIs. */ public manuallyAdded?: boolean; /** - * @see IRepresentationMetadata + * @see ITrackMetadata */ - public readonly representations: Representation[]; + public readonly representations: Record; /** - * @see IRepresentationMetadata + * @see ITrackMetadata */ - public readonly type: ITrackType; + public readonly trackType: ITrackType; /** - * @see IRepresentationMetadata + * @see ITrackMetadata + */ + public inVariantStreams: string[]; + /** + * @see ITrackMetadata */ public isAudioDescription?: boolean; /** - * @see IRepresentationMetadata + * @see ITrackMetadata */ public isClosedCaption?: boolean; /** - * @see IRepresentationMetadata + * @see ITrackMetadata */ public isForcedSubtitles?: boolean; /** - * @see IRepresentationMetadata + * @see ITrackMetadata */ public isSignInterpreted?: boolean; /** - * @see IRepresentationMetadata + * @see ITrackMetadata */ public isDub?: boolean; /** - * @see IRepresentationMetadata + * @see ITrackMetadata */ public language?: string; /** - * @see IRepresentationMetadata + * @see ITrackMetadata */ public normalizedLanguage?: string; /** - * @see IRepresentationMetadata + * @see ITrackMetadata */ - public supportStatus: IAdaptationSupportStatus; + public supportStatus: ITrackSupportStatus; /** - * @see IRepresentationMetadata + * @see ITrackMetadata */ public isTrickModeTrack?: boolean; /** - * @see IRepresentationMetadata + * @see ITrackMetadata */ public label?: string; /** - * @see IRepresentationMetadata + * @see ITrackMetadata */ - public readonly trickModeTracks?: Adaptation[]; + public readonly trickModeTracks?: Track[]; /** * @constructor - * @param {Object} parsedAdaptation + * @param {Object} parsedTrack + * @param {Object} cachedCodecSupport * @param {Object|undefined} [options] */ constructor( - parsedAdaptation: IParsedAdaptation, + parsedTrack: IParsedTrack, cachedCodecSupport: CodecSupportCache, options: { representationFilter?: IRepresentationFilter | undefined; isManuallyAdded?: boolean | undefined; } = {}, ) { - const { trickModeTracks } = parsedAdaptation; + const { trickModeTracks } = parsedTrack; const { representationFilter, isManuallyAdded } = options; - this.id = parsedAdaptation.id; - this.type = parsedAdaptation.type; + this.id = parsedTrack.id; + this.trackType = parsedTrack.trackType; - if (parsedAdaptation.isTrickModeTrack !== undefined) { - this.isTrickModeTrack = parsedAdaptation.isTrickModeTrack; + // XXX TODO + this.inVariantStreams = []; + if (parsedTrack.isTrickModeTrack !== undefined) { + this.isTrickModeTrack = parsedTrack.isTrickModeTrack; } - if (parsedAdaptation.language !== undefined) { - this.language = parsedAdaptation.language; - this.normalizedLanguage = normalizeLanguage(parsedAdaptation.language); + if (parsedTrack.language !== undefined) { + this.language = parsedTrack.language; + this.normalizedLanguage = normalizeLanguage(parsedTrack.language); } - if (parsedAdaptation.closedCaption !== undefined) { - this.isClosedCaption = parsedAdaptation.closedCaption; + if (parsedTrack.isClosedCaption !== undefined) { + this.isClosedCaption = parsedTrack.isClosedCaption; } - if (parsedAdaptation.audioDescription !== undefined) { - this.isAudioDescription = parsedAdaptation.audioDescription; + if (parsedTrack.isAudioDescription !== undefined) { + this.isAudioDescription = parsedTrack.isAudioDescription; } - if (parsedAdaptation.isDub !== undefined) { - this.isDub = parsedAdaptation.isDub; + if (parsedTrack.isDub !== undefined) { + this.isDub = parsedTrack.isDub; } - if (parsedAdaptation.forcedSubtitles !== undefined) { - this.isForcedSubtitles = parsedAdaptation.forcedSubtitles; + if (parsedTrack.isForcedSubtitles !== undefined) { + this.isForcedSubtitles = parsedTrack.isForcedSubtitles; } - if (parsedAdaptation.isSignInterpreted !== undefined) { - this.isSignInterpreted = parsedAdaptation.isSignInterpreted; + if (parsedTrack.isSignInterpreted !== undefined) { + this.isSignInterpreted = parsedTrack.isSignInterpreted; } - if (parsedAdaptation.label !== undefined) { - this.label = parsedAdaptation.label; + if (parsedTrack.label !== undefined) { + this.label = parsedTrack.label; } if (trickModeTracks !== undefined && trickModeTracks.length > 0) { this.trickModeTracks = trickModeTracks.map( - (track) => new Adaptation(track, cachedCodecSupport), + (track) => new Track(track, cachedCodecSupport), ); } - const argsRepresentations = parsedAdaptation.representations; - const representations: Representation[] = []; + const argsRepresentations = parsedTrack.representations; + const representations: Record = {}; this.supportStatus = { hasSupportedCodec: false, hasCodecWithUndefinedSupport: false, @@ -163,7 +172,7 @@ export default class Adaptation implements IAdaptationMetadata { for (let i = 0; i < argsRepresentations.length; i++) { const representation = new Representation( argsRepresentations[i], - this.type, + this.trackType, cachedCodecSupport, ); let shouldAdd = true; @@ -187,7 +196,7 @@ export default class Adaptation implements IAdaptationMetadata { } } shouldAdd = representationFilter(reprObject, { - trackType: this.type, + trackType: this.trackType, language: this.language, normalizedLanguage: this.normalizedLanguage, isClosedCaption: this.isClosedCaption, @@ -197,7 +206,7 @@ export default class Adaptation implements IAdaptationMetadata { }); } if (shouldAdd) { - representations.push(representation); + representations[representation.id] = representation; if (representation.isSupported === undefined) { this.supportStatus.hasCodecWithUndefinedSupport = true; if (this.supportStatus.hasSupportedCodec === false) { @@ -216,14 +225,13 @@ export default class Adaptation implements IAdaptationMetadata { } else { log.debug( "Filtering Representation due to representationFilter", - this.type, - `Adaptation: ${this.id}`, + this.trackType, + `track: ${this.id}`, `Representation: ${representation.id}`, `(${representation.bitrate})`, ); } } - representations.sort((a, b) => a.bitrate - b.bitrate); this.representations = representations; // for manuallyAdded adaptations (not in the manifest) @@ -239,7 +247,7 @@ export default class Adaptation implements IAdaptationMetadata { * * * If the right mimetype+codec combination is found in the provided object, - * this `Adaptation`'s `isSupported` property will be updated accordingly as + * this `Track`'s `isSupported` property will be updated accordingly as * well as all of its inner `Representation`'s `isSupported` attributes. * * @param {Array.} cachedCodecSupport @@ -247,7 +255,7 @@ export default class Adaptation implements IAdaptationMetadata { refreshCodecSupport(cachedCodecSupport: CodecSupportCache): void { let hasCodecWithUndefinedSupport = false; let hasSupportedRepresentation = false; - for (const representation of this.representations) { + for (const representation of objectValues(this.representations)) { representation.refreshCodecSupport(cachedCodecSupport); if (representation.isSupported === undefined) { hasCodecWithUndefinedSupport = true; @@ -278,33 +286,33 @@ export default class Adaptation implements IAdaptationMetadata { * @returns {Object|undefined} */ getRepresentation(wantedId: number | string): Representation | undefined { - return arrayFind(this.representations, ({ id }) => wantedId === id); + return arrayFind(objectValues(this.representations), ({ id }) => wantedId === id); } /** - * Format the current `Adaptation`'s properties into a - * `IAdaptationMetadata` format which can better be communicated through + * Format the current `Track`'s properties into a + * `ITrackMetadata` format which can better be communicated through * another thread. * * Please bear in mind however that the returned object will not be updated - * when the current `Adaptation` instance is updated, it is only a + * when the current `Track` instance is updated, it is only a * snapshot at the current time. * - * If you want to keep that data up-to-date with the current `Adaptation` + * If you want to keep that data up-to-date with the current `Track` * instance, you will have to do it yourself. * * @returns {Object} */ - getMetadataSnapshot(): IAdaptationMetadata { - const representations: IRepresentationMetadata[] = []; - const baseRepresentations = this.representations; - for (const representation of baseRepresentations) { - representations.push(representation.getMetadataSnapshot()); + getMetadataSnapshot(): ITrackMetadata { + const representations: Record = {}; + for (const representation of objectValues(this.representations)) { + representations[representation.id] = representation.getMetadataSnapshot(); } return { id: this.id, - type: this.type, + trackType: this.trackType, supportStatus: this.supportStatus, + inVariantStreams: this.inVariantStreams, language: this.language, isForcedSubtitles: this.isForcedSubtitles, isClosedCaption: this.isClosedCaption, diff --git a/src/manifest/classes/index.ts b/src/manifest/classes/index.ts index 4db4d9de1a..abde8aac68 100644 --- a/src/manifest/classes/index.ts +++ b/src/manifest/classes/index.ts @@ -14,7 +14,7 @@ * limitations under the License. */ -import Adaptation from "./adaptation"; +import Track from "./adaptation"; import type { ICodecSupportInfo } from "./codec_support_cache"; import type { IDecipherabilityUpdateElement, IManifestParsingOptions } from "./manifest"; import Manifest from "./manifest"; @@ -47,7 +47,7 @@ export { areSameContent, getLoggableSegmentId, Period, - Adaptation, + Track, Representation, StaticRepresentationIndex, }; diff --git a/src/manifest/classes/manifest.ts b/src/manifest/classes/manifest.ts index 046cc04bcb..44b3cebff0 100644 --- a/src/manifest/classes/manifest.ts +++ b/src/manifest/classes/manifest.ts @@ -18,16 +18,16 @@ import { MediaError } from "../../errors"; import log from "../../log"; import { getCodecsWithUnknownSupport } from "../../main_thread/init/utils/update_manifest_codec_support"; import type { IParsedManifest } from "../../parsers/manifest"; -import type { ITrackType, IRepresentationFilter, IPlayerError } from "../../public_types"; +import type { IRepresentationFilter, IPlayerError } from "../../public_types"; import arrayFind from "../../utils/array_find"; import EventEmitter from "../../utils/event_emitter"; import idGenerator from "../../utils/id_generator"; -import warnOnce from "../../utils/warn_once"; +import { objectValues } from "../../utils/object_values"; import type { - IAdaptationMetadata, IManifestMetadata, IPeriodMetadata, IRepresentationMetadata, + ITrackMetadata, } from "../types"; import { ManifestMetadataFormat } from "../types"; import { @@ -38,10 +38,9 @@ import { getPeriodAfter, toTaggedTrack, } from "../utils"; -import type Adaptation from "./adaptation"; +import type Track from "./adaptation"; import CodecSupportCache from "./codec_support_cache"; import type { ICodecSupportInfo } from "./codec_support_cache"; -import type { IManifestAdaptations } from "./period"; import Period from "./period"; import type Representation from "./representation"; import { MANIFEST_UPDATE_TYPE } from "./types"; @@ -66,7 +65,7 @@ interface IManifestParsingOptions { export interface IDecipherabilityUpdateElement { manifest: IManifestMetadata; period: IPeriodMetadata; - adaptation: IAdaptationMetadata; + track: ITrackMetadata; representation: IRepresentationMetadata; } @@ -134,12 +133,6 @@ export default class Manifest */ public expired: Promise | null; - /** - * Deprecated. Equivalent to `manifest.periods[0].adaptations`. - * @deprecated - */ - public adaptations: IManifestAdaptations; - /** * If true, the Manifest can evolve over time: * New segments can become available in the future, properties of the manifest @@ -219,12 +212,12 @@ export default class Manifest public timeBounds: { /** * This is the theoretical minimum playable position on the content - * regardless of the current Adaptation chosen, as estimated at parsing + * regardless of the current tracks chosen, as estimated at parsing * time. * `undefined` if unknown. * * More technically, the `minimumSafePosition` is the maximum between all - * the minimum positions reachable in any of the audio and video Adaptation. + * the minimum positions reachable in any of the audio and video tracks. * * Together with `timeshiftDepth` and the `maximumTimeData` object, this * value allows to compute at any time the minimum seekable time: @@ -278,7 +271,7 @@ export default class Manifest isLinear: boolean; /** * This is the theoretical maximum playable position on the content, - * regardless of the current Adaptation chosen, as estimated at parsing + * regardless of the current tracks chosen, as estimated at parsing * time. * * More technically, the `maximumSafePosition` is the minimum between all @@ -334,12 +327,12 @@ export default class Manifest this.clockOffset = parsedManifest.clockOffset; this._cachedCodecSupport = new CodecSupportCache([]); - const unsupportedAdaptations: Adaptation[] = []; + const unsupportedTracks: Track[] = []; this.periods = parsedManifest.periods .map((parsedPeriod) => { const period = new Period( parsedPeriod, - unsupportedAdaptations, + unsupportedTracks, this._cachedCodecSupport, representationFilter, ); @@ -347,23 +340,15 @@ export default class Manifest }) .sort((a, b) => a.start - b.start); - if (unsupportedAdaptations.length > 0) { + if (unsupportedTracks.length > 0) { const error = new MediaError( "MANIFEST_INCOMPATIBLE_CODECS_ERROR", - "An Adaptation contains only incompatible codecs.", - { tracks: unsupportedAdaptations.map(toTaggedTrack) }, + "A track contains only incompatible codecs.", + { tracks: unsupportedTracks.map(toTaggedTrack) }, ); warnings.push(error); } - /** - * @deprecated It is here to ensure compatibility with the way the - * v3.x.x manages adaptations at the Manifest level - */ - /* eslint-disable import/no-deprecated */ - this.adaptations = this.periods[0] === undefined ? {} : this.periods[0].adaptations; - /* eslint-enable import/no-deprecated */ - this.timeBounds = parsedManifest.timeBounds; this.isDynamic = parsedManifest.isDynamic; this.isLive = parsedManifest.isLive; @@ -387,7 +372,7 @@ export default class Manifest * * @param {Array} [updatedCodecSupportInfo] * @returns {Error|null} - Refreshing codec support might reveal that some - * `Adaptation` don't have any of their `Representation`s supported. + * `tracks` don't have any of their `Representation`s supported. * In that case, an error object will be created and returned, so you can * e.g. later emit it as a warning through the RxPlayer API. */ @@ -399,16 +384,16 @@ export default class Manifest } this._cachedCodecSupport.addCodecs(updatedCodecSupportInfo); - const unsupportedAdaptations: Adaptation[] = []; + const unsupportedTracks: Track[] = []; for (const period of this.periods) { - period.refreshCodecSupport(unsupportedAdaptations, this._cachedCodecSupport); + period.refreshCodecSupport(unsupportedTracks, this._cachedCodecSupport); } this.trigger("supportUpdate", null); - if (unsupportedAdaptations.length > 0) { + if (unsupportedTracks.length > 0) { return new MediaError( "MANIFEST_INCOMPATIBLE_CODECS_ERROR", - "An Adaptation contains only incompatible codecs.", - { tracks: unsupportedAdaptations.map(toTaggedTrack) }, + "A track contains only incompatible codecs.", + { tracks: unsupportedTracks.map(toTaggedTrack) }, ); } return null; @@ -492,7 +477,7 @@ export default class Manifest /** * Returns the theoretical minimum playable position on the content - * regardless of the current Adaptation chosen, as estimated at parsing + * regardless of the current tracks chosen, as estimated at parsing * time. * @returns {number} */ @@ -511,7 +496,7 @@ export default class Manifest /** * Returns the theoretical maximum playable position on the content - * regardless of the current Adaptation chosen, as estimated at parsing + * regardless of the current tracks chosen, as estimated at parsing * time. */ public getMaximumSafePosition(): number { @@ -533,7 +518,7 @@ export default class Manifest isDecipherableCb: (content: { manifest: Manifest; period: Period; - adaptation: Adaptation; + track: ITrackMetadata; representation: Representation; }) => boolean | undefined, ): void { @@ -543,63 +528,6 @@ export default class Manifest } } - /** - * @deprecated only returns adaptations for the first period - * @returns {Array.} - */ - public getAdaptations(): Adaptation[] { - warnOnce( - "manifest.getAdaptations() is deprecated." + - " Please use manifest.period[].getAdaptations() instead", - ); - const firstPeriod = this.periods[0]; - if (firstPeriod === undefined) { - return []; - } - const adaptationsByType = firstPeriod.adaptations; - const adaptationsList: Adaptation[] = []; - for (const adaptationType in adaptationsByType) { - if (adaptationsByType.hasOwnProperty(adaptationType)) { - const adaptations = adaptationsByType[ - adaptationType as ITrackType - ] as Adaptation[]; - adaptationsList.push(...adaptations); - } - } - return adaptationsList; - } - - /** - * @deprecated only returns adaptations for the first period - * @returns {Array.} - */ - public getAdaptationsForType(adaptationType: ITrackType): Adaptation[] { - warnOnce( - "manifest.getAdaptationsForType(type) is deprecated." + - " Please use manifest.period[].getAdaptationsForType(type) instead", - ); - const firstPeriod = this.periods[0]; - if (firstPeriod === undefined) { - return []; - } - const adaptationsForType = firstPeriod.adaptations[adaptationType]; - return adaptationsForType === undefined ? [] : adaptationsForType; - } - - /** - * @deprecated only returns adaptations for the first period - * @returns {Array.} - */ - public getAdaptation(wantedId: number | string): Adaptation | undefined { - warnOnce( - "manifest.getAdaptation(id) is deprecated." + - " Please use manifest.period[].getAdaptation(id) instead", - ); - /* eslint-disable import/no-deprecated */ - return arrayFind(this.getAdaptations(), ({ id }) => wantedId === id); - /* eslint-enable import/no-deprecated */ - } - /** * Format the current `Manifest`'s properties into a * `IManifestMetadata` format which can better be communicated through @@ -640,7 +568,7 @@ export default class Manifest * If a representation with (`isSupported`) is undefined, we consider the * codec support as unknown. * - * This function iterates through all periods, adaptations, and representations, + * This function iterates through all periods, tracks, and representations, * and collects unknown codecs. * * @returns {Array} The list of codecs with unknown support status. @@ -690,11 +618,6 @@ export default class Manifest this.updateCodecSupport(); - // Re-set this.adaptations for retro-compatibility in v3.x.x - /* eslint-disable import/no-deprecated */ - this.adaptations = this.periods[0] === undefined ? {} : this.periods[0].adaptations; - /* eslint-enable import/no-deprecated */ - // Let's trigger events at the end, as those can trigger side-effects. // We do not want the current Manifest object to be incomplete when those // happen. @@ -718,16 +641,16 @@ function updateDeciperability( isDecipherable: (content: { manifest: Manifest; period: Period; - adaptation: Adaptation; + track: ITrackMetadata; representation: Representation; }) => boolean | undefined, ): IDecipherabilityUpdateElement[] { const updates: IDecipherabilityUpdateElement[] = []; for (const period of manifest.periods) { - for (const adaptation of period.getAdaptations()) { + for (const track of period.getTrackList()) { let hasOnlyUndecipherableRepresentations = true; - for (const representation of adaptation.representations) { - const content = { manifest, period, adaptation, representation }; + for (const representation of objectValues(track.representations)) { + const content = { manifest, period, track, representation }; const result = isDecipherable(content); if (result !== false) { hasOnlyUndecipherableRepresentations = false; @@ -736,12 +659,12 @@ function updateDeciperability( updates.push(content); representation.decipherable = result; if (result === true) { - adaptation.supportStatus.isDecipherable = true; + track.supportStatus.isDecipherable = true; } else if ( result === undefined && - adaptation.supportStatus.isDecipherable === false + track.supportStatus.isDecipherable === false ) { - adaptation.supportStatus.isDecipherable = undefined; + track.supportStatus.isDecipherable = undefined; } log.debug( `Decipherability changed for "${representation.id}"`, @@ -751,7 +674,7 @@ function updateDeciperability( } } if (hasOnlyUndecipherableRepresentations) { - adaptation.supportStatus.isDecipherable = false; + track.supportStatus.isDecipherable = false; } } } diff --git a/src/manifest/classes/period.ts b/src/manifest/classes/period.ts index e31996c27c..c2bae7a789 100644 --- a/src/manifest/classes/period.ts +++ b/src/manifest/classes/period.ts @@ -16,16 +16,13 @@ import { MediaError } from "../../errors"; import type { IManifestStreamEvent, IParsedPeriod } from "../../parsers/manifest"; import type { ITrackType, IRepresentationFilter } from "../../public_types"; -import arrayFind from "../../utils/array_find"; import isNullOrUndefined from "../../utils/is_null_or_undefined"; -import type { IAdaptationMetadata, IPeriodMetadata } from "../types"; -import { getAdaptations, getSupportedAdaptations, periodContainsTime } from "../utils"; -import Adaptation from "./adaptation"; +import { objectValues } from "../../utils/object_values"; +import type { IPeriodMetadata, ITrackMetadata } from "../types"; +import { getTrackList, getTrackListForType, periodContainsTime } from "../utils"; +import Track from "./adaptation"; import type CodecSupportCache from "./codec_support_cache"; -/** Structure listing every `Adaptation` in a Period. */ -export type IManifestAdaptations = Partial>; - /** * Class representing the tracks and qualities available from a given time * period in the the Manifest. @@ -35,9 +32,6 @@ export default class Period implements IPeriodMetadata { /** ID uniquely identifying the Period in the Manifest. */ public readonly id: string; - /** Every 'Adaptation' in that Period, per type of Adaptation. */ - public adaptations: IManifestAdaptations; - /** Absolute start time of the Period, in seconds. */ public start: number; @@ -56,83 +50,112 @@ export default class Period implements IPeriodMetadata { /** Array containing every stream event happening on the period */ public streamEvents: IManifestStreamEvent[]; + public variantStreams: Array<{ + id: string; + bandwidth: number | undefined; + media: Record< + ITrackType, + Array<{ + id: string; + /** + * Id of the "track" to which that audio media is a part of. + * + * A given audio "track" might for example provide an audio media to various + * variant streams. + */ + linkedTrack: string; + /** + * Different `Representations` (e.g. qualities) this media is available + * in. + */ + representations: string[]; + }> + >; + }>; + + public tracksMetadata: Record>; + /** * @constructor * @param {Object} args - * @param {Array.} unsupportedAdaptations - Array on which - * `Adaptation`s objects which have no supported `Representation` will be + * @param {Array.} unsupportedTracks - Array on which + * `track`s objects which have no supported `Representation` will be * pushed. * This array might be useful for minor error reporting. * @param {function|undefined} [representationFilter] */ constructor( args: IParsedPeriod, - unsupportedAdaptations: Adaptation[], + unsupportedTracks: ITrackMetadata[], cachedCodecSupport: CodecSupportCache, - representationFilter?: IRepresentationFilter | undefined, ) { this.id = args.id; - this.adaptations = ( - Object.keys(args.adaptations) as ITrackType[] - ).reduce((acc, type) => { - const adaptationsForType = args.adaptations[type]; - if (isNullOrUndefined(adaptationsForType)) { - return acc; - } - const filteredAdaptations = adaptationsForType - .map((adaptation): Adaptation => { - const newAdaptation = new Adaptation(adaptation, cachedCodecSupport, { - representationFilter, - }); - if ( - newAdaptation.representations.length > 0 && - newAdaptation.supportStatus.hasSupportedCodec === false - ) { - unsupportedAdaptations.push(newAdaptation); + + this.variantStreams = []; + this.tracksMetadata = { + audio: {}, + video: {}, + text: {}, + }; + + this.duration = args.duration; + this.start = args.start; + + if (!isNullOrUndefined(this.duration) && !isNullOrUndefined(this.start)) { + this.end = this.start + this.duration; + } + this.streamEvents = args.streamEvents === undefined ? [] : args.streamEvents; + + this.variantStreams = args.variantStreams; + this.tracksMetadata = { + audio: {}, + video: {}, + text: {}, + }; + for (const tType of ["audio", "video", "text"] as const) { + const tracks: Record = {}; + let hasSupportedTrack = false; + for (const trackMetadata of args.tracksMetadata[tType]) { + const newTrack = new Track(trackMetadata, cachedCodecSupport, { + representationFilter, + }); + const representationsNb = objectValues(newTrack.representations).length; + if (representationsNb > 0 && newTrack.supportStatus.hasSupportedCodec === false) { + unsupportedTracks.push(newTrack); + } + + if (representationsNb > 0) { + tracks[newTrack.id] = newTrack; + if (newTrack.supportStatus.hasSupportedCodec !== false) { + hasSupportedTrack = true; } - return newAdaptation; - }) - .filter( - (adaptation): adaptation is Adaptation => adaptation.representations.length > 0, - ); + } + } + if ( - filteredAdaptations.every( - (adaptation) => adaptation.supportStatus.hasSupportedCodec === false, - ) && - adaptationsForType.length > 0 && - (type === "video" || type === "audio") + !hasSupportedTrack && + args.tracksMetadata[tType].length > 0 && + (tType === "video" || tType === "audio") ) { throw new MediaError( "MANIFEST_INCOMPATIBLE_CODECS_ERROR", - "No supported " + type + " adaptations", + "No supported " + tType + " tracks", { tracks: undefined }, ); } - if (filteredAdaptations.length > 0) { - acc[type] = filteredAdaptations; - } - return acc; - }, {}); - + this.tracksMetadata[tType] = tracks; + } if ( - !Array.isArray(this.adaptations.video) && - !Array.isArray(this.adaptations.audio) + objectValues(this.tracksMetadata.video).length === 0 && + objectValues(this.tracksMetadata.audio).length === 0 ) { throw new MediaError( "MANIFEST_PARSE_ERROR", "No supported audio and video tracks.", ); } - - this.duration = args.duration; - this.start = args.start; - - if (!isNullOrUndefined(this.duration) && !isNullOrUndefined(this.start)) { - this.end = this.start + this.duration; - } - this.streamEvents = args.streamEvents === undefined ? [] : args.streamEvents; } /** @@ -142,55 +165,47 @@ export default class Period implements IPeriodMetadata { * Calling `refreshCodecSupport` manually once the codecs supported are known * by the current environnement allows to work-around this issue. * - * @param {Array.} unsupportedAdaptations - Array on which - * `Adaptation`s objects which are now known to have no supported + * @param {Array.} unsupportedTracks - Array on which track objects which + * are now known to have no supported + * @param {Object} cachedCodecSupport * `Representation` will be pushed. * This array might be useful for minor error reporting. * @param {Array.} cachedCodecSupport */ - refreshCodecSupport( - unsupportedAdaptations: Adaptation[], - cachedCodecSupport: CodecSupportCache, - ) { - (Object.keys(this.adaptations) as ITrackType[]).forEach((ttype) => { - const adaptationsForType = this.adaptations[ttype]; - if (adaptationsForType === undefined) { - return; - } - let hasSupportedAdaptations: boolean | undefined = false; - for (const adaptation of adaptationsForType) { - if (!adaptation.supportStatus.hasCodecWithUndefinedSupport) { - // Go to next adaptation as an optimisation measure. + refreshCodecSupport(unsupportedTracks: Track[], cachedCodecSupport: CodecSupportCache) { + (Object.keys(this.tracksMetadata) as ITrackType[]).forEach((ttype) => { + const tracksForType = getTrackListForType(this, ttype); + let hasSupportedTracks: boolean | undefined = false; + for (const track of tracksForType) { + if (!track.supportStatus.hasCodecWithUndefinedSupport) { + // Go to next track as an optimisation measure. // NOTE this only is true if we never change a codec from supported // to unsuported and its opposite. - if (adaptation.supportStatus.hasSupportedCodec === true) { - hasSupportedAdaptations = true; + if (track.supportStatus.hasSupportedCodec === true) { + hasSupportedTracks = true; } continue; } - const wasSupported = adaptation.supportStatus.hasSupportedCodec; - adaptation.refreshCodecSupport(cachedCodecSupport); - if ( - wasSupported !== false && - adaptation.supportStatus.hasSupportedCodec === false - ) { - unsupportedAdaptations.push(adaptation); + const wasSupported = track.supportStatus.hasSupportedCodec; + track.refreshCodecSupport(cachedCodecSupport); + if (wasSupported !== false && track.supportStatus.hasSupportedCodec === false) { + unsupportedTracks.push(track); } - if (hasSupportedAdaptations === false) { - hasSupportedAdaptations = adaptation.supportStatus.hasSupportedCodec; + if (hasSupportedTracks === false) { + hasSupportedTracks = track.supportStatus.hasSupportedCodec; } else if ( - hasSupportedAdaptations === undefined && - adaptation.supportStatus.hasSupportedCodec === true + hasSupportedTracks === undefined && + track.supportStatus.hasSupportedCodec === true ) { - hasSupportedAdaptations = true; + hasSupportedTracks = true; } } - if ((ttype === "video" || ttype === "audio") && hasSupportedAdaptations === false) { + if ((ttype === "video" || ttype === "audio") && hasSupportedTracks === false) { throw new MediaError( "MANIFEST_INCOMPATIBLE_CODECS_ERROR", - "No supported " + ttype + " adaptations", + "No supported " + ttype + " tracks", { tracks: undefined }, ); } @@ -198,43 +213,47 @@ export default class Period implements IPeriodMetadata { } /** - * Returns every `Adaptations` (or `tracks`) linked to that Period, in an - * Array. - * @returns {Array.} + * Returns a track associated with this Period by giving its id. + * Returns `null` if the track is not found. + * @param {string} id + * @returns {Object|null} */ - getAdaptations(): Adaptation[] { - return getAdaptations(this); + getTrack(id: string): Track | null { + return ( + this.tracksMetadata.audio[id] ?? + this.tracksMetadata.video[id] ?? + this.tracksMetadata.text[id] ?? + null + ); } /** - * Returns every `Adaptations` (or `tracks`) linked to that Period for a - * given type. - * @param {string} adaptationType + * Returns every tracks linked to that Period, in an + * Array. * @returns {Array.} */ - getAdaptationsForType(adaptationType: ITrackType): Adaptation[] { - const adaptationsForType = this.adaptations[adaptationType]; - return adaptationsForType ?? []; + getTrackList(): Track[] { + return getTrackList(this); } /** - * Returns the Adaptation linked to the given ID. - * @param {number|string} wantedId - * @returns {Object|undefined} + * Returns every `Tracks` linked to that Period for a given type. + * @param {string} trackType + * @returns {Array.} */ - getAdaptation(wantedId: string): Adaptation | undefined { - return arrayFind(this.getAdaptations(), ({ id }) => wantedId === id); + getTrackListForType(trackType: ITrackType): Track[] { + return getTrackListForType(this, trackType); } - /** - * Returns Adaptations that contain Representations in supported codecs. - * @param {string|undefined} type - If set filter on a specific Adaptation's - * type. Will return for all types if `undefined`. - * @returns {Array.} - */ - getSupportedAdaptations(type?: ITrackType | undefined): Adaptation[] { - return getSupportedAdaptations(this, type); - } + // /** + // * Returns Tracks that contain Representations in supported codecs. + // * @param {string|undefined} type - If set filter on a specific Track's + // * type. Will return for all types if `undefined`. + // * @returns {Array.} + // */ + // getSupportedTracks(type?: ITrackType | undefined): Track[] { + // return getSupportedTracks(this, type); + // } /** * Returns true if the give time is in the time boundaries of this `Period`. @@ -262,22 +281,14 @@ export default class Period implements IPeriodMetadata { * @returns {Object} */ public getMetadataSnapshot(): IPeriodMetadata { - const adaptations: Partial> = {}; - const baseAdaptations = this.getAdaptations(); - for (const adaptation of baseAdaptations) { - let currentAdaps: IAdaptationMetadata[] | undefined = adaptations[adaptation.type]; - if (currentAdaps === undefined) { - currentAdaps = []; - adaptations[adaptation.type] = currentAdaps; - } - currentAdaps.push(adaptation.getMetadataSnapshot()); - } return { start: this.start, end: this.end, id: this.id, + duration: this.duration, streamEvents: this.streamEvents, - adaptations, + variantStreams: this.variantStreams, + tracksMetadata: this.tracksMetadata, }; } } diff --git a/src/manifest/classes/representation.ts b/src/manifest/classes/representation.ts index 5f760a44fc..16ad556980 100644 --- a/src/manifest/classes/representation.ts +++ b/src/manifest/classes/representation.ts @@ -15,7 +15,6 @@ */ import log from "../../log"; -import type { IRepresentationMetadata } from "../../manifest"; import type { ICdnMetadata, IContentProtections, @@ -24,6 +23,8 @@ import type { import type { ITrackType, IHDRInformation } from "../../public_types"; import areArraysOfNumbersEqual from "../../utils/are_arrays_of_numbers_equal"; import idGenerator from "../../utils/id_generator"; +import type { IRepresentationMetadata } from "../types"; +import { getMimeTypeString } from "../utils"; import type codecSupportCache from "./codec_support_cache"; import type { IRepresentationIndex } from "./representation_index"; @@ -249,7 +250,7 @@ class Representation implements IRepresentationMetadata { * @returns {string} */ public getMimeTypeString(): string { - return `${this.mimeType ?? ""};codecs="${this.codecs?.[0] ?? ""}"`; + return getMimeTypeString(this); } /** diff --git a/src/manifest/classes/update_period_in_place.ts b/src/manifest/classes/update_period_in_place.ts index 918ee888f6..8ef9156bb1 100644 --- a/src/manifest/classes/update_period_in_place.ts +++ b/src/manifest/classes/update_period_in_place.ts @@ -15,9 +15,10 @@ */ import log from "../../log"; -import type { IAdaptationMetadata, IRepresentationMetadata } from "../../manifest"; +import type { IRepresentationMetadata, ITrackMetadata } from "../../manifest"; import type { ITrackType } from "../../public_types"; import arrayFindIndex from "../../utils/array_find_index"; +import { objectValues } from "../../utils/object_values"; import type Period from "./period"; import { MANIFEST_UPDATE_TYPE } from "./types"; @@ -35,50 +36,46 @@ export default function updatePeriodInPlace( updateType: MANIFEST_UPDATE_TYPE, ): IUpdatedPeriodResult { const res: IUpdatedPeriodResult = { - updatedAdaptations: [], - removedAdaptations: [], - addedAdaptations: [], + updatedTracks: [], + removedTracks: [], + addedTracks: [], }; oldPeriod.start = newPeriod.start; oldPeriod.end = newPeriod.end; oldPeriod.duration = newPeriod.duration; oldPeriod.streamEvents = newPeriod.streamEvents; - const oldAdaptations = oldPeriod.getAdaptations(); - const newAdaptations = newPeriod.getAdaptations(); + const oldTracks = oldPeriod.getTrackList(); + const newTracks = newPeriod.getTrackList(); - for (let j = 0; j < oldAdaptations.length; j++) { - const oldAdaptation = oldAdaptations[j]; - const newAdaptationIdx = arrayFindIndex( - newAdaptations, - (a) => a.id === oldAdaptation.id, - ); + for (let j = 0; j < oldTracks.length; j++) { + const oldTrack = oldTracks[j]; + const newTrackIdx = arrayFindIndex(newTracks, (a) => a.id === oldTrack.id); - if (newAdaptationIdx === -1) { - log.warn( - 'Manifest: Adaptation "' + oldAdaptations[j].id + '" not found when merging.', - ); - const [removed] = oldAdaptations.splice(j, 1); + if (newTrackIdx === -1) { + log.warn('Manifest: Track "' + oldTracks[j].id + '" not found when merging.'); + const [removed] = oldTracks.splice(j, 1); + delete oldPeriod.tracksMetadata[removed.trackType][removed.id]; j--; - res.removedAdaptations.push({ + res.removedTracks.push({ id: removed.id, - trackType: removed.type, + trackType: removed.trackType, }); } else { - const [newAdaptation] = newAdaptations.splice(newAdaptationIdx, 1); + const [newTrack] = newTracks.splice(newTrackIdx, 1); const updatedRepresentations: IRepresentationMetadata[] = []; const addedRepresentations: IRepresentationMetadata[] = []; const removedRepresentations: string[] = []; - res.updatedAdaptations.push({ - adaptation: oldAdaptation.id, - trackType: oldAdaptation.type, + res.updatedTracks.push({ + track: oldTrack.id, + trackType: oldTrack.trackType, updatedRepresentations, addedRepresentations, removedRepresentations, }); - const oldRepresentations = oldAdaptation.representations; - const newRepresentations = newAdaptation.representations.slice(); + const oldRepresentations = objectValues(oldTrack.representations); + const newRepresentations = objectValues(newTrack.representations); for (let k = 0; k < oldRepresentations.length; k++) { const oldRepresentation = oldRepresentations[k]; @@ -93,6 +90,7 @@ export default function updatePeriodInPlace( "not found when merging.", ); const [removed] = oldRepresentations.splice(k, 1); + delete oldTrack.representations[removed.id]; k--; removedRepresentations.push(removed.id); } else { @@ -112,25 +110,21 @@ export default function updatePeriodInPlace( `Manifest: ${newRepresentations.length} new Representations ` + "found when merging.", ); - oldAdaptation.representations.push(...newRepresentations); + for (const newRep of newRepresentations) { + oldTrack.representations[newRep.id] = newRep; + } addedRepresentations.push( ...newRepresentations.map((r) => r.getMetadataSnapshot()), ); } } } - if (newAdaptations.length > 0) { - log.warn( - `Manifest: ${newAdaptations.length} new Adaptations ` + "found when merging.", - ); - for (const adap of newAdaptations) { - const prevAdaps = oldPeriod.adaptations[adap.type]; - if (prevAdaps === undefined) { - oldPeriod.adaptations[adap.type] = [adap]; - } else { - prevAdaps.push(adap); - } - res.addedAdaptations.push(adap.getMetadataSnapshot()); + if (newTracks.length > 0) { + log.warn(`Manifest: ${newTracks.length} new Tracks ` + "found when merging."); + for (const trak of newTracks) { + const prevTracks = oldPeriod.tracksMetadata[trak.trackType]; + prevTracks[trak.id] = trak; + res.addedTracks.push(trak.getMetadataSnapshot()); } } return res; @@ -141,23 +135,23 @@ export default function updatePeriodInPlace( * Period. */ export interface IUpdatedPeriodResult { - /** Information on Adaptations that have been updated. */ - updatedAdaptations: Array<{ + /** Information on tracks that have been updated. */ + updatedTracks: Array<{ trackType: ITrackType; - /** The concerned Adaptation. */ - adaptation: string; + /** The concerned tracks. */ + track: string; /** Representations that have been updated. */ updatedRepresentations: IRepresentationMetadata[]; - /** Representations that have been removed from the Adaptation. */ + /** Representations that have been removed from the track. */ removedRepresentations: string[]; - /** Representations that have been added to the Adaptation. */ + /** Representations that have been added to the track. */ addedRepresentations: IRepresentationMetadata[]; }>; - /** Adaptation that have been removed from the Period. */ - removedAdaptations: Array<{ + /** Tracks that have been removed from the Period. */ + removedTracks: Array<{ id: string; trackType: ITrackType; }>; - /** Adaptation that have been added to the Period. */ - addedAdaptations: IAdaptationMetadata[]; + /** Tracks that have been added to the Period. */ + addedTracks: ITrackMetadata[]; } diff --git a/src/manifest/classes/utils.ts b/src/manifest/classes/utils.ts index 1fc3f36085..7d0bd8af7e 100644 --- a/src/manifest/classes/utils.ts +++ b/src/manifest/classes/utils.ts @@ -15,14 +15,14 @@ */ import isNullOrUndefined from "../../utils/is_null_or_undefined"; -import type Adaptation from "./adaptation"; +import type { ITrackMetadata } from "../types"; import type Period from "./period"; import type Representation from "./representation"; import type { ISegment } from "./representation_index"; /** All information needed to identify a given segment. */ export interface IBufferedChunkInfos { - adaptation: Adaptation; + track: ITrackMetadata; period: Period; representation: Representation; segment: ISegment; @@ -55,7 +55,7 @@ export function getLoggableSegmentId( if (isNullOrUndefined(content)) { return ""; } - const { period, adaptation, representation, segment } = content; + const { period, track, representation, segment } = content; let segmentString; if (segment.isInit) { segmentString = "init"; @@ -65,7 +65,7 @@ export function getLoggableSegmentId( segmentString = `${segment.time}`; } return ( - `${adaptation.type} P: ${period.id} A: ${adaptation.id} ` + + `${track.trackType} P: ${period.id} A: ${track.id} ` + `R: ${representation.id} S: ${segmentString}` ); } diff --git a/src/manifest/index.ts b/src/manifest/index.ts index b42643cefd..15011756ac 100644 --- a/src/manifest/index.ts +++ b/src/manifest/index.ts @@ -2,7 +2,7 @@ import type { IDecipherabilityUpdateElement, ICodecSupportInfo, Period, - Adaptation, + Track, Representation, ISegment, IPeriodsUpdateResult, @@ -19,8 +19,8 @@ export type IManifest = Manifest; /** Type of a `Period` class. */ export type IPeriod = Period; -/** Type of an `Adaptation` class. */ -export type IAdaptation = Adaptation; +/** Type of an `Track` class. */ +export type ITrack = Track; /** Type of a `Representation` class. */ export type IRepresentation = Representation; @@ -38,7 +38,8 @@ export { areSameContent, getLoggableSegmentId }; export type { IManifestMetadata, IPeriodMetadata, - IAdaptationMetadata, + IVariantStreamMetadata, + ITrackMetadata, IRepresentationMetadata, } from "./types"; export { ManifestMetadataFormat } from "./types"; diff --git a/src/manifest/types.ts b/src/manifest/types.ts index 0edc8e4140..0ec9f9c514 100644 --- a/src/manifest/types.ts +++ b/src/manifest/types.ts @@ -250,8 +250,27 @@ export interface IPeriodMetadata { * `undefined` for still-running Periods. */ duration?: number | undefined; - /** Every 'Adaptation' in that Period, per type of Adaptation. */ - adaptations: Partial>; + /** + * Complete information about all tracks combinations available for that + * Period. + * + * Each element in that array is an object which describes a particular + * track combination that should be considered under given conditions (the + * main one being the user's bandwidth). + * + * If what you want is a description of the available tracks for that content, + * look at `tracksMetadata` instead. + * + * Note that many transport protocols do not have that variant stream concept + * at this level and as such have only a single object in that array. This is + * for example the case with DASH and Smooth (which both rely on the user's + * bandwidth **AFTER** a track combination has already been selected), in + * contrast with HLS under which you have a high chance of having several + * objects in that array. + */ + variantStreams: IVariantStreamMetadata[]; + /** Description of all "tracks" available in this Period. */ + tracksMetadata: Record>; /** Array containing every stream event happening on the period */ streamEvents: IManifestStreamEvent[]; } @@ -260,7 +279,7 @@ export interface IPeriodMetadata { * Object describing the global support state for an Adaptation's * Representations. */ -export interface IAdaptationSupportStatus { +export interface ITrackSupportStatus { /** * `true` if at least one of its Representation has a codecs currently * supported. @@ -286,11 +305,18 @@ export interface IAdaptationSupportStatus { isDecipherable: boolean | undefined; } -export interface IAdaptationMetadata { - /** ID uniquely identifying the Adaptation in the Period. */ +export interface ITrackMetadata { + /** ID uniquely identifying this track. */ id: string; - /** Type of this Adaptation. */ - type: ITrackType; + /** The type of this track. */ + trackType: ITrackType; + /** The variant streams that track is in. */ + inVariantStreams: string[]; + /** + * Object describing the global support state for that Adaptation's + * Representations. + */ + supportStatus: ITrackSupportStatus; /** Language this Adaptation is in, as announced in the original Manifest. */ language?: string | undefined; /** Whether this Adaptation contains closed captions for the hard-of-hearing. */ @@ -306,18 +332,10 @@ export interface IAdaptationMetadata { * covered in the dubbed/localized audio Adaptation. */ isForcedSubtitles?: boolean | undefined; - /** - * Object describing the global support state for that Adaptation's - * Representations. - */ - supportStatus: IAdaptationSupportStatus; /** Language this Adaptation is in, when translated into an ISO639-3 code. */ normalizedLanguage?: string | undefined; - /** - * Different `Representations` (e.g. qualities) this Adaptation is available - * in. - */ - representations: IRepresentationMetadata[]; + /** qualities this track is in. Identified by their `id` property. */ + representations: Record; /** Label of the adaptionSet */ label?: string | undefined; /** @@ -326,7 +344,7 @@ export interface IAdaptationMetadata { */ isDub?: boolean | undefined; /** Tells if the track is a trick mode track. */ - trickModeTracks?: IAdaptationMetadata[] | undefined; + trickModeTracks?: ITrackMetadata[] | undefined; /** Tells if the track is a trick mode track. */ isTrickModeTrack?: boolean | undefined; } @@ -433,3 +451,31 @@ export interface IRepresentationMetadata { /** Encryption information for this Representation. */ contentProtections?: IContentProtections | undefined; } + +/** + * A "variant stream" is a grouping of tracks that may be selected together + * in specific conditions. + */ +export interface IVariantStreamMetadata { + /** Identifier which identify that variant stream. */ + id: string; + /** + * Identify a bandwidth floor from which that variant stream should be selected. + * `undefined` if no such consideration needs to be done for that variant stream. + * + * Note: bandwidth considerations may also exist at the Representation-level + */ + bandwidth: number | undefined; + /** Audio, video and text media existing for that variant stream. */ + media: Record; +} + +export interface IMediaMetadata { + /** Id of the "track" to which that media is a part of. */ + linkedTrack: string; + /** + * The `Representations` (e.g. qualities) this media is available in, + * defined by their id. + */ + representations: string[]; +} diff --git a/src/manifest/utils.ts b/src/manifest/utils.ts index 053ad2e077..4ce109d49f 100644 --- a/src/manifest/utils.ts +++ b/src/manifest/utils.ts @@ -1,32 +1,37 @@ import type { IProcessedProtectionData } from "../main_thread/types"; -import type { IManifest, IPeriod, IAdaptation, IPeriodsUpdateResult } from "../manifest"; +import type { + IManifest, + IPeriod, + IPeriodsUpdateResult, + IRepresentation, + ITrack, +} from "../manifest"; import type { IAudioRepresentation, + IVideoRepresentation, IAudioTrack, IRepresentationFilter, ITextTrack, ITrackType, - IVideoRepresentation, IVideoTrack, } from "../public_types"; import areArraysOfNumbersEqual from "../utils/are_arrays_of_numbers_equal"; import arrayFind from "../utils/array_find"; -import isNullOrUndefined from "../utils/is_null_or_undefined"; import getMonotonicTimeStamp from "../utils/monotonic_timestamp"; import { objectValues } from "../utils/object_values"; import type { - IAdaptationMetadata, IManifestMetadata, IPeriodMetadata, IRepresentationMetadata, + ITrackMetadata, } from "./types"; -/** List in an array every possible value for the Adaptation's `type` property. */ -export const SUPPORTED_ADAPTATIONS_TYPE: ITrackType[] = ["audio", "video", "text"]; +/** List in an array every possible value for the track's `trackType` property. */ +export const SUPPORTED_TRACK_TYPE: ITrackType[] = ["audio", "video", "text"]; /** * Returns the theoretical minimum playable position on the content - * regardless of the current Adaptation chosen, as estimated at parsing + * regardless of the current track chosen, as estimated at parsing * time. * @param {Object} manifest * @returns {number} @@ -69,7 +74,7 @@ export function getLivePosition(manifest: IManifestMetadata): number | undefined /** * Returns the theoretical maximum playable position on the content - * regardless of the current Adaptation chosen, as estimated at parsing + * regardless of the current track chosen, as estimated at parsing * time. * @param {Object} manifest * @returns {number} @@ -84,39 +89,35 @@ export function getMaximumSafePosition(manifest: IManifestMetadata): number { } /** - * Returns Adaptations that contain supported Representation(s). - * @param {string|undefined} type - If set filter on a specific Adaptation's + * Returns Tracks that contain supported Representation(s). + * @param {string|undefined} type - If set filter on a specific Track's * type. Will return for all types if `undefined`. - * @returns {Array.} + * @returns {Array.} */ -export function getSupportedAdaptations( +export function getSupportedTracks( period: IPeriod, type?: ITrackType | undefined, -): IAdaptation[]; -export function getSupportedAdaptations( +): ITrack[]; +export function getSupportedTracks( period: IPeriodMetadata, type?: ITrackType | undefined, -): IAdaptationMetadata[]; -export function getSupportedAdaptations( +): ITrackMetadata[]; +export function getSupportedTracks( period: IPeriod | IPeriodMetadata, type?: ITrackType | undefined, -): IAdaptationMetadata[] | IAdaptation[] { +): ITrackMetadata[] | ITrack[] { if (type === undefined) { - return getAdaptations(period).filter((ada) => { + return getTrackList(period).filter((trk) => { return ( - ada.supportStatus.hasSupportedCodec !== false && - ada.supportStatus.isDecipherable !== false + trk.supportStatus.hasSupportedCodec !== false && + trk.supportStatus.isDecipherable !== false ); }); } - const adaptationsForType = period.adaptations[type]; - if (adaptationsForType === undefined) { - return []; - } - return adaptationsForType.filter((ada) => { + return getTrackListForType(period, type).filter((trk) => { return ( - ada.supportStatus.hasSupportedCodec !== false && - ada.supportStatus.isDecipherable !== false + trk.supportStatus.hasSupportedCodec !== false && + trk.supportStatus.isDecipherable !== false ); }); } @@ -167,8 +168,8 @@ export function getPeriodAfter( if (endOfPeriod === undefined) { return null; } - const nextPeriod = arrayFind(manifest.periods, (_period) => { - return _period.end === undefined || endOfPeriod < _period.end; + const nextPeriod = arrayFind(manifest.periods, (p) => { + return p.end === undefined || endOfPeriod < p.end; }); return nextPeriod === undefined ? null : nextPeriod; } @@ -202,118 +203,120 @@ export function periodContainsTime( return false; } -/** - * Returns every `Adaptations` (or `tracks`) linked to that Period, in an - * Array. - * @returns {Array.} - */ -export function getAdaptations(period: IPeriod): IAdaptation[]; -export function getAdaptations(period: IPeriodMetadata): IAdaptationMetadata[]; -export function getAdaptations( - period: IPeriodMetadata | IPeriod, -): IAdaptationMetadata[] | IAdaptation[] { - const adaptationsByType = period.adaptations; - return objectValues(adaptationsByType).reduce( - // Note: the second case cannot happen. TS is just being dumb here - (acc, adaptations) => - !isNullOrUndefined(adaptations) ? acc.concat(adaptations) : acc, - [], - ); -} +// /** +// * Returns every `Tracks` (or `tracks`) linked to that Period, in an +// * Array. +// * @returns {Array.} +// */ +// export function getTracks(period: IPeriod): ITrack[]; +// export function getTracks(period: IPeriodMetadata): ITrackMetadata[]; +// export function getTracks( +// period: IPeriodMetadata | IPeriod, +// ): ITrackMetadata[] | ITrack[] { +// const tracksByType = period.tracks; +// return objectValues(tracksByType).reduce( +// // Note: the second case cannot happen. TS is just being dumb here +// (acc, tracks) => +// !isNullOrUndefined(tracks) ? acc.concat(tracks) : acc, +// [], +// ); +// } /** - * Format an `Adaptation`, generally of type `"audio"`, as an `IAudioTrack`. - * @param {Object} adaptation + * Format an audio track as an `IAudioTrack`. + * @param {Object} track * @param {boolean} filterPlayable - If `true` only "playable" Representation * will be returned. * @returns {Object} */ export function toAudioTrack( - adaptation: IAdaptationMetadata, + track: ITrackMetadata, filterPlayable: boolean, ): IAudioTrack { + const representations: IRepresentationMetadata[] = objectValues(track.representations); const formatted: IAudioTrack = { - language: adaptation.language ?? "", - normalized: adaptation.normalizedLanguage ?? "", - audioDescription: adaptation.isAudioDescription === true, - id: adaptation.id, + language: track.language ?? "", + normalized: track.normalizedLanguage ?? "", + audioDescription: track.isAudioDescription === true, + id: track.id, representations: (filterPlayable - ? adaptation.representations.filter( - (r) => r.isSupported === true && r.decipherable !== false, - ) - : adaptation.representations + ? representations.filter((r) => r.isSupported === true && r.decipherable !== false) + : representations ).map(toAudioRepresentation), - label: adaptation.label, + label: track.label, }; - if (adaptation.isDub === true) { + if (track.isDub === true) { formatted.dub = true; } return formatted; } /** - * Format an `Adaptation`, generally of type `"audio"`, as an `IAudioTrack`. - * @param {Object} adaptation + * Format a text track as an `ITextTrack`. + * @param {Object} track * @returns {Object} */ -export function toTextTrack(adaptation: IAdaptationMetadata): ITextTrack { +export function toTextTrack(track: ITrackMetadata): ITextTrack { return { - language: adaptation.language ?? "", - normalized: adaptation.normalizedLanguage ?? "", - closedCaption: adaptation.isClosedCaption === true, - id: adaptation.id, - label: adaptation.label, - forced: adaptation.isForcedSubtitles, + language: track.language ?? "", + normalized: track.normalizedLanguage ?? "", + closedCaption: track.isClosedCaption === true, + id: track.id, + label: track.label, + forced: track.isForcedSubtitles, }; } /** - * Format an `Adaptation`, generally of type `"video"`, as an `IAudioTrack`. - * @param {Object} adaptation + * Format a video track as an `IVideoTrack`. + * @param {Object} track * @param {boolean} filterPlayable - If `true` only "playable" Representation * will be returned. * @returns {Object} */ export function toVideoTrack( - adaptation: IAdaptationMetadata, + track: ITrackMetadata, filterPlayable: boolean, ): IVideoTrack { const trickModeTracks = - adaptation.trickModeTracks !== undefined - ? adaptation.trickModeTracks.map((trickModeAdaptation) => { + track.trickModeTracks !== undefined + ? track.trickModeTracks.map((trickModeTrack) => { + const tmRepresentations: IRepresentationMetadata[] = objectValues( + trickModeTrack.representations, + ); const representations = ( filterPlayable - ? trickModeAdaptation.representations.filter( - (r) => r.isSupported === true && r.decipherable !== false, + ? tmRepresentations.filter( + (r: IRepresentationMetadata) => + r.isSupported === true && r.decipherable !== false, ) - : trickModeAdaptation.representations + : tmRepresentations ).map(toVideoRepresentation); const trickMode: IVideoTrack = { - id: trickModeAdaptation.id, + id: trickModeTrack.id, representations, isTrickModeTrack: true, }; - if (trickModeAdaptation.isSignInterpreted === true) { + if (trickModeTrack.isSignInterpreted === true) { trickMode.signInterpreted = true; } return trickMode; }) : undefined; + const representations = objectValues(track.representations); const videoTrack: IVideoTrack = { - id: adaptation.id, + id: track.id, representations: (filterPlayable - ? adaptation.representations.filter( - (r) => r.isSupported === true && r.decipherable !== false, - ) - : adaptation.representations + ? representations.filter((r) => r.isSupported === true && r.decipherable !== false) + : representations ).map(toVideoRepresentation), - label: adaptation.label, + label: track.label, }; - if (adaptation.isSignInterpreted === true) { + if (track.isSignInterpreted === true) { videoTrack.signInterpreted = true; } - if (adaptation.isTrickModeTrack === true) { + if (track.isTrickModeTrack === true) { videoTrack.isTrickModeTrack = true; } if (trickModeTracks !== undefined) { @@ -372,14 +375,14 @@ function toVideoRepresentation( }; } -export function toTaggedTrack(adaptation: IAdaptation): ITaggedTrack { - switch (adaptation.type) { +export function toTaggedTrack(track: ITrackMetadata): ITaggedTrack { + switch (track.trackType) { case "audio": - return { type: "audio", track: toAudioTrack(adaptation, false) }; + return { type: "audio", track: toAudioTrack(track, false) }; case "video": - return { type: "video", track: toVideoTrack(adaptation, false) }; + return { type: "video", track: toVideoTrack(track, false) }; case "text": - return { type: "text", track: toTextTrack(adaptation) }; + return { type: "text", track: toTextTrack(track) }; } } @@ -389,7 +392,7 @@ export function toTaggedTrack(adaptation: IAdaptation): ITaggedTrack { export interface IDecipherabilityStatusChangedElement { manifest: IManifestMetadata; period: IPeriodMetadata; - adaptation: IAdaptationMetadata; + track: ITrackMetadata; representation: IRepresentationMetadata; } @@ -502,34 +505,28 @@ function updateRepresentationsDeciperability( ): IDecipherabilityStatusChangedElement[] { const updates: IDecipherabilityStatusChangedElement[] = []; for (const period of manifest.periods) { - const adaptationsByType = period.adaptations; - const adaptations = objectValues(adaptationsByType).reduce( - // Note: the second case cannot happen. TS is just being dumb here - (acc, adaps) => (!isNullOrUndefined(adaps) ? acc.concat(adaps) : acc), - [], - ); - for (const adaptation of adaptations) { + for (const track of getTrackList(period)) { let hasOnlyUndecipherableRepresentations = true; - for (const representation of adaptation.representations) { + for (const representation of objectValues(track.representations)) { const result = isDecipherable(representation); if (result !== false) { hasOnlyUndecipherableRepresentations = false; } if (result !== representation.decipherable) { if (result === true) { - adaptation.supportStatus.isDecipherable = true; + track.supportStatus.isDecipherable = true; } else if ( result === undefined && - adaptation.supportStatus.isDecipherable === false + track.supportStatus.isDecipherable === false ) { - adaptation.supportStatus.isDecipherable = undefined; + track.supportStatus.isDecipherable = undefined; } - updates.push({ manifest, period, adaptation, representation }); + updates.push({ manifest, period, track, representation }); representation.decipherable = result; } } if (hasOnlyUndecipherableRepresentations) { - adaptation.supportStatus.isDecipherable = false; + track.supportStatus.isDecipherable = false; } } } @@ -555,7 +552,6 @@ export function replicateUpdatesOnManifestMetadata( (baseManifest as any)[prop] = (newManifest as any)[prop]; } } - for (const removedPeriod of updates.removedPeriods) { for (let periodIdx = 0; periodIdx < baseManifest.periods.length; periodIdx++) { if (baseManifest.periods[periodIdx].id === removedPeriod.id) { @@ -564,97 +560,66 @@ export function replicateUpdatesOnManifestMetadata( } } } - for (const updatedPeriod of updates.updatedPeriods) { for (let periodIdx = 0; periodIdx < baseManifest.periods.length; periodIdx++) { const newPeriod = updatedPeriod.period; if (baseManifest.periods[periodIdx].id === updatedPeriod.period.id) { const basePeriod = baseManifest.periods[periodIdx]; for (const prop of Object.keys(newPeriod)) { - if (prop !== "adaptations") { + if (prop !== "tracks") { // eslint-disable-next-line (basePeriod as any)[prop] = (newPeriod as any)[prop]; } } - - for (const removedAdaptation of updatedPeriod.result.removedAdaptations) { - const ttype = removedAdaptation.trackType; - const adaptationsForType = basePeriod.adaptations[ttype] ?? []; - for (let adapIdx = 0; adapIdx < adaptationsForType.length; adapIdx++) { - if (adaptationsForType[adapIdx].id === removedAdaptation.id) { - adaptationsForType.splice(adapIdx, 1); - break; - } + for (const removedTrack of updatedPeriod.result.removedTracks) { + const ttype = removedTrack.trackType; + if (basePeriod.tracksMetadata[ttype][removedTrack.id] !== undefined) { + delete basePeriod.tracksMetadata[ttype][removedTrack.id]; } } - - for (const updatedAdaptation of updatedPeriod.result.updatedAdaptations) { - const newAdaptation = updatedAdaptation.adaptation; - const ttype = updatedAdaptation.trackType; - const adaptationsForType = basePeriod.adaptations[ttype] ?? []; - for (let adapIdx = 0; adapIdx < adaptationsForType.length; adapIdx++) { - if (adaptationsForType[adapIdx].id === newAdaptation) { - const baseAdaptation = adaptationsForType[adapIdx]; - for (const removedRepresentation of updatedAdaptation.removedRepresentations) { - for ( - let repIdx = 0; - repIdx < baseAdaptation.representations.length; - repIdx++ - ) { - if ( - baseAdaptation.representations[repIdx].id === removedRepresentation - ) { - baseAdaptation.representations.splice(repIdx, 1); - break; - } + for (const updatedTrack of updatedPeriod.result.updatedTracks) { + const newTrack = updatedTrack.track; + const ttype = updatedTrack.trackType; + const tracksForType = objectValues(basePeriod.tracksMetadata[ttype] ?? {}); + for (let trkIdx = 0; trkIdx < tracksForType.length; trkIdx++) { + if (tracksForType[trkIdx].id === newTrack) { + const baseTrack = tracksForType[trkIdx]; + for (const removedRepresentation of updatedTrack.removedRepresentations) { + if (baseTrack.representations[removedRepresentation] !== undefined) { + delete baseTrack.representations[removedRepresentation]; } } - for (const newRepresentation of updatedAdaptation.updatedRepresentations) { - for ( - let repIdx = 0; - repIdx < baseAdaptation.representations.length; - repIdx++ - ) { - if ( - baseAdaptation.representations[repIdx].id === newRepresentation.id - ) { - const baseRepresentation = baseAdaptation.representations[repIdx]; - for (const prop of Object.keys(newRepresentation) as Array< - keyof IRepresentationMetadata - >) { - if (prop !== "decipherable") { - // eslint-disable-next-line - (baseRepresentation as any)[prop] = newRepresentation[prop]; - } + for (const newRepresentation of updatedTrack.updatedRepresentations) { + const baseRepresentation = + baseTrack.representations[newRepresentation.id]; + if (baseRepresentation !== undefined) { + for (const prop of Object.keys(newRepresentation) as Array< + keyof IRepresentationMetadata + >) { + if (prop !== "decipherable") { + // eslint-disable-next-line + (baseRepresentation as any)[prop] = newRepresentation[prop]; } - break; } } } - for (const addedRepresentation of updatedAdaptation.addedRepresentations) { - baseAdaptation.representations.push(addedRepresentation); + for (const addedRepresentation of updatedTrack.addedRepresentations) { + baseTrack.representations[addedRepresentation.id] = addedRepresentation; } break; } } } - - for (const addedAdaptation of updatedPeriod.result.addedAdaptations) { - const ttype = addedAdaptation.type; - const adaptationsForType = basePeriod.adaptations[ttype]; - if (adaptationsForType === undefined) { - basePeriod.adaptations[ttype] = [addedAdaptation]; - } else { - adaptationsForType.push(addedAdaptation); - } + for (const addedTrack of updatedPeriod.result.addedTracks) { + const ttype = addedTrack.trackType; + basePeriod.tracksMetadata[ttype][addedTrack.id] = addedTrack; } break; } } } - for (const addedPeriod of updates.addedPeriods) { for (let periodIdx = 0; periodIdx < baseManifest.periods.length; periodIdx++) { if (baseManifest.periods[periodIdx].start > addedPeriod.start) { @@ -666,6 +631,31 @@ export function replicateUpdatesOnManifestMetadata( } } +export function getTrackListForType(period: IPeriod, trackType: ITrackType): ITrack[]; +export function getTrackListForType( + period: IPeriodMetadata, + trackType: ITrackType, +): ITrackMetadata[]; +export function getTrackListForType( + period: IPeriodMetadata | IPeriod, + trackType: ITrackType, +): ITrackMetadata[] { + const trackRecord: Record = period.tracksMetadata[trackType]; + return objectValues(trackRecord); +} + +export function getTrackList(period: IPeriod): ITrack[]; +export function getTrackList(period: IPeriodMetadata): ITrackMetadata[]; +export function getTrackList( + period: IPeriodMetadata | IPeriod, +): ITrackMetadata[] | ITrack[] { + return [ + ...objectValues(period.tracksMetadata.audio), + ...objectValues(period.tracksMetadata.video), + ...objectValues(period.tracksMetadata.text), + ]; +} + export function createRepresentationFilterFromFnString( fnString: string, ): IRepresentationFilter { @@ -675,6 +665,16 @@ export function createRepresentationFilterFromFnString( ) as IRepresentationFilter; } +/** + * @param {Object} representation + * @returns {string} + */ +export function getMimeTypeString( + representation: IRepresentation | IRepresentationMetadata, +): string { + return `${representation.mimeType ?? ""};codecs="${representation.codecs?.[0] ?? ""}"`; +} + interface ITaggedAudioTrack { type: "audio"; track: IAudioTrack; diff --git a/src/multithread_types.ts b/src/multithread_types.ts index a9460942cd..a3d7368b53 100644 --- a/src/multithread_types.ts +++ b/src/multithread_types.ts @@ -226,7 +226,7 @@ export interface IStartPreparedContentMessageValue { /** Behavior when a new video and/or audio codec is encountered. */ onCodecSwitch: "continue" | "reload"; - // TODO prepare chosen Adaptations here? + // TODO prepare chosen tracks here? // In which case the Period's `id` should probably be given instead of the // `initialTime` } @@ -339,15 +339,15 @@ export interface ITrackUpdateMessage { } export interface ITrackUpdateChoiceObject { - /** The Adaptation choosen. */ - adaptationId: string; + /** The track choosen. */ + trackId: string; /** "Switching mode" in which the track switch should happen. */ switchingMode: ITrackSwitchingMode; /** * Shared reference allowing to indicate which Representations from - * that Adaptation are allowed. + * that track are allowed. */ initialRepresentations: IRepresentationsChoice; @@ -361,7 +361,7 @@ export interface IRepresentationUpdateMessage { value: { periodId: string; bufferType: ITrackType; - adaptationId: string; + trackId: string; choice: IRepresentationsChoice; }; } @@ -741,11 +741,11 @@ export interface IDisposeMediaSourceWorkerMessage { value: null; } -export interface IAdaptationChangeWorkerMessage { - type: WorkerMessageType.AdaptationChanged; +export interface ITrackChangeWorkerMessage { + type: WorkerMessageType.TrackChanged; contentId: string; value: { - adaptationId: string | null; + trackId: string | null; periodId: string; type: ITrackType; }; @@ -755,7 +755,7 @@ export interface IRepresentationChangeWorkerMessage { type: WorkerMessageType.RepresentationChanged; contentId: string; value: { - adaptationId: string; + trackId: string; representationId: string | null; periodId: string; type: ITrackType; @@ -935,7 +935,7 @@ export interface ISegmentSinkStoreUpdateMessage { export const enum WorkerMessageType { AbortSourceBuffer = "abort-source-buffer", ActivePeriodChanged = "active-period-changed", - AdaptationChanged = "adaptation-changed", + TrackChanged = "track-changed", AddSourceBuffer = "add-source-buffer", AttachMediaSource = "attach-media-source", BitrateEstimateChange = "bitrate-estimate-change", @@ -975,7 +975,7 @@ export const enum WorkerMessageType { export type IWorkerMessage = | IAbortBufferWorkerMessage | IActivePeriodChangedWorkerMessage - | IAdaptationChangeWorkerMessage + | ITrackChangeWorkerMessage | IAddSourceBufferWorkerMessage | IPushTextDataWorkerMessage | IAppendBufferWorkerMessage diff --git a/src/parsers/manifest/dash/common/__tests__/attach_trickmode_track.test.ts b/src/parsers/manifest/dash/common/__tests__/attach_trickmode_track.test.ts index 980c58cafe..397cc4d1d8 100644 --- a/src/parsers/manifest/dash/common/__tests__/attach_trickmode_track.test.ts +++ b/src/parsers/manifest/dash/common/__tests__/attach_trickmode_track.test.ts @@ -1,48 +1,48 @@ import { describe, it, expect } from "vitest"; -import type { IParsedAdaptations, IParsedAdaptation } from "../../../types"; +import type { IParsedTrack } from "../../../types"; import attachTrickModeTrack from "../attach_trickmode_track"; describe("attachTrickModeTrack", () => { it("should correclty attach trickmode tracks", () => { const trickModeTracks = [ { - adaptation: { type: "video" }, - trickModeAttachedAdaptationIds: ["1", "3"], + track: { type: "video" }, + trickModeAttachedTrackIds: ["1", "3"], }, - { adaptation: { type: "audio" }, trickModeAttachedAdaptationIds: ["1"] }, + { track: { type: "audio" }, trickModeAttachedTrackIds: ["1"] }, ] as Array<{ - adaptation: IParsedAdaptation; - trickModeAttachedAdaptationIds: string[]; + track: IParsedTrack; + trickModeAttachedTrackIds: string[]; }>; - const adaptations = { - video: [ - { id: "1", trickModeTracks: undefined }, - { id: "2", trickModeTracks: undefined }, - { id: "3", trickModeTracks: undefined }, - { id: "4", trickModeTracks: undefined }, - ], - audio: [ - { id: "1", trickModeTracks: undefined }, - { id: "2", trickModeTracks: undefined }, - { id: "3", trickModeTracks: undefined }, - ], - } as unknown as IParsedAdaptations; + const tracks = { + video: { + ["1"]: { id: "1", trickModeTracks: undefined }, + ["2"]: { id: "2", trickModeTracks: undefined }, + ["3"]: { id: "3", trickModeTracks: undefined }, + ["4"]: { id: "4", trickModeTracks: undefined }, + }, + audio: { + ["1"]: { id: "1", trickModeTracks: undefined }, + ["2"]: { id: "2", trickModeTracks: undefined }, + ["3"]: { id: "3", trickModeTracks: undefined }, + }, + } as unknown as Record<"audio" | "video" | "text", Record>; - attachTrickModeTrack(adaptations, trickModeTracks); + attachTrickModeTrack(tracks, trickModeTracks); - expect(adaptations).toEqual({ - video: [ - { id: "1", trickModeTracks: [{ type: "video" }, { type: "audio" }] }, - { id: "2", trickModeTracks: undefined }, - { id: "3", trickModeTracks: [{ type: "video" }] }, - { id: "4", trickModeTracks: undefined }, - ], - audio: [ - { id: "1", trickModeTracks: [{ type: "video" }, { type: "audio" }] }, - { id: "2", trickModeTracks: undefined }, - { id: "3", trickModeTracks: [{ type: "video" }] }, - ], + expect(tracks).toEqual({ + video: { + ["1"]: { id: "1", trickModeTracks: [{ type: "video" }, { type: "audio" }] }, + ["2"]: { id: "2", trickModeTracks: undefined }, + ["3"]: { id: "3", trickModeTracks: [{ type: "video" }] }, + ["4"]: { id: "4", trickModeTracks: undefined }, + }, + audio: { + ["1"]: { id: "1", trickModeTracks: [{ type: "video" }, { type: "audio" }] }, + ["2"]: { id: "2", trickModeTracks: undefined }, + ["3"]: { id: "3", trickModeTracks: [{ type: "video" }] }, + }, }); }); }); diff --git a/src/parsers/manifest/dash/common/attach_trickmode_track.ts b/src/parsers/manifest/dash/common/attach_trickmode_track.ts index 1680ada5e2..cdaf2c6178 100644 --- a/src/parsers/manifest/dash/common/attach_trickmode_track.ts +++ b/src/parsers/manifest/dash/common/attach_trickmode_track.ts @@ -14,35 +14,35 @@ * limitations under the License. */ -import { SUPPORTED_ADAPTATIONS_TYPE } from "../../../../manifest"; -import type { IParsedAdaptation, IParsedAdaptations } from "../../types"; +import { SUPPORTED_TRACK_TYPE } from "../../../../manifest"; +import type { IParsedTrack } from "../../types"; /** - * Attach trick mode tracks to adaptations by assigning to the trickModeTracks - * property an array of trick mode track adaptations. - * @param {Object} adaptations + * Attach trick mode tracks to regular tracks by assigning to the trickModeTracks + * property an array of trick mode tracks. + * @param {Object} allTracks * @param {Array.} trickModeTracks * @returns {void} */ function attachTrickModeTrack( - adaptations: IParsedAdaptations, + allTracks: Record<"audio" | "video" | "text", IParsedTrack[]>, trickModeTracks: Array<{ - adaptation: IParsedAdaptation; - trickModeAttachedAdaptationIds: string[]; + track: IParsedTrack; + trickModeAttachedTrackIds: string[]; }>, ): void { - for (const track of trickModeTracks) { - const { adaptation, trickModeAttachedAdaptationIds } = track; - for (const trickModeAttachedAdaptationId of trickModeAttachedAdaptationIds) { - for (const adaptationType of SUPPORTED_ADAPTATIONS_TYPE) { - const adaptationsByType = adaptations[adaptationType]; - if (adaptationsByType !== undefined) { - for (const adaptationByType of adaptationsByType) { - if (adaptationByType.id === trickModeAttachedAdaptationId) { - if (adaptationByType.trickModeTracks === undefined) { - adaptationByType.trickModeTracks = []; + for (const tmTrack of trickModeTracks) { + const { track, trickModeAttachedTrackIds } = tmTrack; + for (const trickModeAttachedTrackId of trickModeAttachedTrackIds) { + for (const trackType of SUPPORTED_TRACK_TYPE) { + const tracksByType = allTracks[trackType]; + if (tracksByType !== undefined) { + for (const trackByType of tracksByType) { + if (trackByType.id === trickModeAttachedTrackId) { + if (trackByType.trickModeTracks === undefined) { + trackByType.trickModeTracks = []; } - adaptationByType.trickModeTracks.push(adaptation); + trackByType.trickModeTracks.push(track); } } } diff --git a/src/parsers/manifest/dash/common/infer_adaptation_type.ts b/src/parsers/manifest/dash/common/infer_adaptation_type.ts index ea725105f2..a27719f071 100644 --- a/src/parsers/manifest/dash/common/infer_adaptation_type.ts +++ b/src/parsers/manifest/dash/common/infer_adaptation_type.ts @@ -14,14 +14,12 @@ * limitations under the License. */ -import { SUPPORTED_ADAPTATIONS_TYPE } from "../../../../manifest"; +import { SUPPORTED_TRACK_TYPE } from "../../../../manifest"; +import type { ITrackType } from "../../../../public_types"; import arrayFind from "../../../../utils/array_find"; import arrayIncludes from "../../../../utils/array_includes"; import type { IRepresentationIntermediateRepresentation } from "../node_parser_types"; -/** Different "type" a parsed Adaptation can be. */ -type IAdaptationType = "audio" | "video" | "text"; - /** Different `role`s a text Adaptation can be. */ const SUPPORTED_TEXT_TYPES = ["subtitle", "caption"]; @@ -53,19 +51,14 @@ export default function inferAdaptationType( adaptationMimeType: string | null, adaptationCodecs: string | null, adaptationRoles: IScheme[] | null, -): IAdaptationType | undefined { +): ITrackType | undefined { function fromMimeType( mimeType: string, roles: IScheme[] | null, - ): IAdaptationType | undefined { + ): ITrackType | undefined { const topLevel = mimeType.split("/")[0]; - if ( - arrayIncludes( - SUPPORTED_ADAPTATIONS_TYPE, - topLevel as IAdaptationType, - ) - ) { - return topLevel as IAdaptationType; + if (arrayIncludes(SUPPORTED_TRACK_TYPE, topLevel as ITrackType)) { + return topLevel as ITrackType; } if (mimeType === "application/ttml+xml") { return "text"; @@ -87,7 +80,7 @@ export default function inferAdaptationType( return undefined; } } - function fromCodecs(codecs: string): IAdaptationType | undefined { + function fromCodecs(codecs: string): ITrackType | undefined { switch (codecs.substring(0, 3)) { case "avc": case "hev": diff --git a/src/parsers/manifest/dash/common/parse_adaptation_sets.ts b/src/parsers/manifest/dash/common/parse_adaptation_sets.ts index f7ffeee88d..e6bdc8b929 100644 --- a/src/parsers/manifest/dash/common/parse_adaptation_sets.ts +++ b/src/parsers/manifest/dash/common/parse_adaptation_sets.ts @@ -16,14 +16,14 @@ import log from "../../../../log"; import type { IPeriod } from "../../../../manifest"; -import { SUPPORTED_ADAPTATIONS_TYPE } from "../../../../manifest"; +import { SUPPORTED_TRACK_TYPE } from "../../../../manifest"; import type { ITrackType } from "../../../../public_types"; import arrayFind from "../../../../utils/array_find"; import arrayFindIndex from "../../../../utils/array_find_index"; import arrayIncludes from "../../../../utils/array_includes"; import isNonEmptyString from "../../../../utils/is_non_empty_string"; import isNullOrUndefined from "../../../../utils/is_null_or_undefined"; -import type { IParsedAdaptation, IParsedAdaptations } from "../../types"; +import type { IParsedTrack } from "../../types"; import type { IAdaptationSetIntermediateRepresentation, ISegmentTemplateIntermediateRepresentation, @@ -259,14 +259,14 @@ function getAdaptationSetSwitchingIDs( export default function parseAdaptationSets( adaptationsIR: IAdaptationSetIntermediateRepresentation[], context: IAdaptationSetContext, -): IParsedAdaptations { +): Record<"audio" | "video" | "text", IParsedTrack[]> { const parsedAdaptations: Record< ITrackType, - Array<[IParsedAdaptation, IAdaptationSetOrderingData]> + Array<[IParsedTrack, IAdaptationSetOrderingData]> > = { video: [], audio: [], text: [] }; - const trickModeAdaptations: Array<{ - adaptation: IParsedAdaptation; - trickModeAttachedAdaptationIds: string[]; + const trickModeTracks: Array<{ + track: IParsedTrack; + trickModeAttachedTrackIds: string[]; }> = []; const adaptationSwitchingInfos: IAdaptationSwitchingInfos = {}; @@ -342,10 +342,10 @@ export default function parseAdaptationSets( }) : undefined; - const trickModeAttachedAdaptationIds: string[] | undefined = + const trickModeAttachedTrackIds: string[] | undefined = trickModeProperty?.value?.split(" "); - const isTrickModeTrack = trickModeAttachedAdaptationIds !== undefined; + const isTrickModeTrack = trickModeAttachedTrackIds !== undefined; const { accessibilities } = adaptationChildren; @@ -404,29 +404,29 @@ export default function parseAdaptationSets( parsedAdaptationsIDs.push(adaptationID); reprCtxt.unsafelyBaseOnPreviousAdaptation = - context.unsafelyBaseOnPreviousPeriod?.getAdaptation(adaptationID) ?? null; + context.unsafelyBaseOnPreviousPeriod?.getTrack(adaptationID) ?? null; const representations = parseRepresentations(representationsIR, adaptation, reprCtxt); - const parsedAdaptationSet: IParsedAdaptation = { + const parsedAdaptationSet: IParsedTrack = { id: adaptationID, representations, - type, + trackType: type, isTrickModeTrack, }; if (!isNullOrUndefined(adaptation.attributes.language)) { parsedAdaptationSet.language = adaptation.attributes.language; } if (!isNullOrUndefined(isClosedCaption)) { - parsedAdaptationSet.closedCaption = isClosedCaption; + parsedAdaptationSet.isClosedCaption = isClosedCaption; } if (!isNullOrUndefined(isAudioDescription)) { - parsedAdaptationSet.audioDescription = isAudioDescription; + parsedAdaptationSet.isAudioDescription = isAudioDescription; } if (isDub === true) { parsedAdaptationSet.isDub = true; } if (isForcedSubtitle !== undefined) { - parsedAdaptationSet.forcedSubtitles = isForcedSubtitle; + parsedAdaptationSet.isForcedSubtitles = isForcedSubtitle; } if (isSignInterpreted === true) { parsedAdaptationSet.isSignInterpreted = true; @@ -436,10 +436,10 @@ export default function parseAdaptationSets( parsedAdaptationSet.label = label; } - if (trickModeAttachedAdaptationIds !== undefined) { - trickModeAdaptations.push({ - adaptation: parsedAdaptationSet, - trickModeAttachedAdaptationIds, + if (trickModeAttachedTrackIds !== undefined) { + trickModeTracks.push({ + track: parsedAdaptationSet, + trickModeAttachedTrackIds, }); } else { // look if we have to merge this into another Adaptation @@ -455,8 +455,8 @@ export default function parseAdaptationSets( const mergedInto = parsedAdaptations[type][mergedIntoIdx]; if ( mergedInto !== undefined && - mergedInto[0].audioDescription === parsedAdaptationSet.audioDescription && - mergedInto[0].closedCaption === parsedAdaptationSet.closedCaption && + mergedInto[0].isAudioDescription === parsedAdaptationSet.isAudioDescription && + mergedInto[0].isClosedCaption === parsedAdaptationSet.isClosedCaption && mergedInto[0].language === parsedAdaptationSet.language ) { log.info('DASH Parser: merging "switchable" AdaptationSets', originalID, id); @@ -490,21 +490,24 @@ export default function parseAdaptationSets( } } - const adaptationsPerType = SUPPORTED_ADAPTATIONS_TYPE.reduce( - (acc: IParsedAdaptations, adaptationType: ITrackType) => { + const adaptationsPerType = SUPPORTED_TRACK_TYPE.reduce( + ( + acc: Record<"audio" | "video" | "text", IParsedTrack[]>, + adaptationType: ITrackType, + ) => { const adaptationsParsedForType = parsedAdaptations[adaptationType]; if (adaptationsParsedForType.length > 0) { adaptationsParsedForType.sort(compareAdaptations); - acc[adaptationType] = adaptationsParsedForType.map( - ([parsedAdaptation]) => parsedAdaptation, - ); + for (const [adap] of adaptationsParsedForType) { + acc[adaptationType].push(adap); + } } return acc; }, - {}, + { audio: [], video: [], text: [] }, ); parsedAdaptations.video.sort(compareAdaptations); - attachTrickModeTrack(adaptationsPerType, trickModeAdaptations); + attachTrickModeTrack(adaptationsPerType, trickModeTracks); return adaptationsPerType; } @@ -533,8 +536,8 @@ interface IAdaptationSetOrderingData { * @returns {number} */ function compareAdaptations( - a: [IParsedAdaptation, IAdaptationSetOrderingData], - b: [IParsedAdaptation, IAdaptationSetOrderingData], + a: [IParsedTrack, IAdaptationSetOrderingData], + b: [IParsedTrack, IAdaptationSetOrderingData], ): number { const priorityDiff = b[1].priority - a[1].priority; if (priorityDiff !== 0) { diff --git a/src/parsers/manifest/dash/common/parse_periods.ts b/src/parsers/manifest/dash/common/parse_periods.ts index ffe26f737c..284d676fb6 100644 --- a/src/parsers/manifest/dash/common/parse_periods.ts +++ b/src/parsers/manifest/dash/common/parse_periods.ts @@ -16,18 +16,17 @@ import log from "../../../../log"; import type { IManifest } from "../../../../manifest"; -import flatMap from "../../../../utils/flat_map"; +import type { ITrackType } from "../../../../public_types"; import idGenerator from "../../../../utils/id_generator"; import isNullOrUndefined from "../../../../utils/is_null_or_undefined"; import isWorker from "../../../../utils/is_worker"; import getMonotonicTimeStamp from "../../../../utils/monotonic_timestamp"; -import objectValues from "../../../../utils/object_values"; import { utf8ToStr } from "../../../../utils/string_parsing"; import type { IManifestStreamEvent, - IParsedAdaptation, - IParsedAdaptations, + IParsedTrack, IParsedPeriod, + IParsedVariantStreamMetadata, } from "../../types"; import type { IEventStreamIntermediateRepresentation, @@ -126,7 +125,7 @@ export default function parsePeriods( start: periodStart, unsafelyBaseOnPreviousPeriod, }; - const adaptations = parseAdaptationSets(periodIR.children.adaptations, adapCtxt); + const tracksMetadata = parseAdaptationSets(periodIR.children.adaptations, adapCtxt); const namespaces = (context.xmlNamespaces ?? []).concat( periodIR.attributes.namespaces ?? [], @@ -136,18 +135,37 @@ export default function parsePeriods( periodStart, namespaces, ); + const getMediaForType = (type: ITrackType) => { + return tracksMetadata[type].map((t) => { + return { + id: t.id, + linkedTrack: t.id, + representations: t.representations.map((r) => r.id), + }; + }); + }; + const variantStream: IParsedVariantStreamMetadata = { + id: "0", + bandwidth: undefined, + media: { + audio: getMediaForType("audio"), + video: getMediaForType("video"), + text: getMediaForType("text"), + }, + }; const parsedPeriod: IParsedPeriod = { id: periodID, start: periodStart, end: periodEnd, duration: periodDuration, - adaptations, + variantStreams: [variantStream], + tracksMetadata, streamEvents, }; parsedPeriods.unshift(parsedPeriod); if (!manifestBoundsCalculator.lastPositionIsKnown()) { - const lastPosition = getMaximumLastPosition(adaptations); + const lastPosition = getMaximumLastPosition(tracksMetadata); if (!isDynamic) { if (typeof lastPosition === "number") { manifestBoundsCalculator.setLastPosition(lastPosition); @@ -246,22 +264,20 @@ function guessLastPositionFromClock( * - If segments are available but we cannot define the last position * return undefined. * - If no segment are available in that period, return null - * @param {Object} adaptationsPerType + * @param {Object} tracksMetadata * @returns {number|null|undefined} */ function getMaximumLastPosition( - adaptationsPerType: IParsedAdaptations, + tracksMetadata: Record, ): number | null | undefined { let maxEncounteredPosition: number | null = null; let allIndexAreEmpty = true; - const adaptationsVal = objectValues(adaptationsPerType).filter( - (ada): ada is IParsedAdaptation[] => !isNullOrUndefined(ada), - ); - const allAdaptations = flatMap( - adaptationsVal, - (adaptationsForType) => adaptationsForType, - ); - for (const adaptation of allAdaptations) { + const allTracks = [ + ...tracksMetadata.audio, + ...tracksMetadata.video, + ...tracksMetadata.text, + ]; + for (const adaptation of allTracks) { const representations = adaptation.representations; for (const representation of representations) { const position = representation.index.getLastAvailablePosition(); diff --git a/src/parsers/manifest/dash/common/parse_representations.ts b/src/parsers/manifest/dash/common/parse_representations.ts index ce8faaceeb..dbaa8145c4 100644 --- a/src/parsers/manifest/dash/common/parse_representations.ts +++ b/src/parsers/manifest/dash/common/parse_representations.ts @@ -15,7 +15,7 @@ */ import log from "../../../../log"; -import type { IAdaptation } from "../../../../manifest"; +import type { ITrack } from "../../../../manifest"; import type { IHDRInformation } from "../../../../public_types"; import arrayFind from "../../../../utils/array_find"; import objectAssign from "../../../../utils/object_assign"; @@ -137,8 +137,7 @@ export default function parseRepresentations( // Retrieve previous version of the Representation, if one. const unsafelyBaseOnPreviousRepresentation = - context.unsafelyBaseOnPreviousAdaptation?.getRepresentation(representationID) ?? - null; + context.unsafelyBaseOnPreviousAdaptation?.representations[representationID] ?? null; const inbandEventStreams = combineInbandEventStreams(representation, adaptation); @@ -290,7 +289,7 @@ export interface IRepresentationContext extends IInheritedRepresentationIndexCon * de-synchronization with what is actually on the server, * Use with moderation. */ - unsafelyBaseOnPreviousAdaptation: IAdaptation | null; + unsafelyBaseOnPreviousAdaptation: ITrack | null; /** Parses contentProtection elements. */ contentProtectionParser: ContentProtectionParser; } diff --git a/src/parsers/manifest/local/parse_local_manifest.ts b/src/parsers/manifest/local/parse_local_manifest.ts index 203b6fba81..c0ea3e0321 100644 --- a/src/parsers/manifest/local/parse_local_manifest.ts +++ b/src/parsers/manifest/local/parse_local_manifest.ts @@ -14,15 +14,17 @@ * limitations under the License. */ +import type { ITrackType } from "../../../public_types"; import idGenerator from "../../../utils/id_generator"; import getMonotonicTimeStamp from "../../../utils/monotonic_timestamp"; import type { IContentProtections, IContentProtectionInitData, - IParsedAdaptation, + IParsedTrack, IParsedManifest, IParsedPeriod, IParsedRepresentation, + IParsedVariantStreamMetadata, } from "../types"; import LocalRepresentationIndex from "./representation_index"; import type { @@ -86,26 +88,44 @@ function parsePeriod( period: ILocalPeriod, ctxt: { periodIdGenerator: () => string /* Generate next Period's id */ }, ): IParsedPeriod { - const adaptationIdGenerator = idGenerator(); + const trackIdGenerator = idGenerator(); + + const tracksMetadata = period.adaptations.reduce< + Record<"audio" | "video" | "text", IParsedTrack[]> + >( + (acc, ada) => { + const parsed = parseAdaptation(ada, { trackIdGenerator }); + acc[ada.type].push(parsed); + return acc; + }, + { audio: [], video: [], text: [] }, + ); + const getMediaForType = (type: ITrackType) => { + return tracksMetadata[type].map((t) => { + return { + id: t.id, + linkedTrack: t.id, + representations: t.representations.map((r) => r.id), + }; + }); + }; + const variantStream: IParsedVariantStreamMetadata = { + id: "0", + bandwidth: undefined, + media: { + audio: getMediaForType("audio"), + video: getMediaForType("video"), + text: getMediaForType("text"), + }, + }; return { id: "period-" + ctxt.periodIdGenerator(), start: period.start, end: period.end, duration: period.end - period.start, - adaptations: period.adaptations.reduce>>( - (acc, ada) => { - const type = ada.type; - let adaps = acc[type]; - if (adaps === undefined) { - adaps = []; - acc[type] = adaps; - } - adaps.push(parseAdaptation(ada, { adaptationIdGenerator })); - return acc; - }, - {}, - ), + variantStreams: [variantStream], + tracksMetadata, }; } @@ -117,15 +137,15 @@ function parsePeriod( function parseAdaptation( adaptation: ILocalAdaptation, ctxt: { - adaptationIdGenerator: () => string /* Generate next Adaptation's id */; + trackIdGenerator: () => string /* Generate next track's id */; }, -): IParsedAdaptation { +): IParsedTrack { const representationIdGenerator = idGenerator(); return { - id: "adaptation-" + ctxt.adaptationIdGenerator(), - type: adaptation.type, - audioDescription: adaptation.audioDescription, - closedCaption: adaptation.closedCaption, + id: "track-" + ctxt.trackIdGenerator(), + trackType: adaptation.type, + isAudioDescription: adaptation.audioDescription, + isClosedCaption: adaptation.closedCaption, language: adaptation.language, representations: adaptation.representations.map((representation) => parseRepresentation(representation, { representationIdGenerator }), diff --git a/src/parsers/manifest/metaplaylist/metaplaylist_parser.ts b/src/parsers/manifest/metaplaylist/metaplaylist_parser.ts index ee761bc90b..9beb22e8e4 100644 --- a/src/parsers/manifest/metaplaylist/metaplaylist_parser.ts +++ b/src/parsers/manifest/metaplaylist/metaplaylist_parser.ts @@ -15,20 +15,21 @@ */ import log from "../../../log"; -import type { IManifest } from "../../../manifest"; -import { SUPPORTED_ADAPTATIONS_TYPE } from "../../../manifest"; +import type { IManifest, ITrack } from "../../../manifest"; +import { SUPPORTED_TRACK_TYPE } from "../../../manifest"; import { StaticRepresentationIndex } from "../../../manifest/classes"; import type { ITrackType } from "../../../public_types"; import idGenerator from "../../../utils/id_generator"; import isNullOrUndefined from "../../../utils/is_null_or_undefined"; import getMonotonicTimeStamp from "../../../utils/monotonic_timestamp"; +import { objectValues } from "../../../utils/object_values"; import { getFilenameIndexInUrl } from "../../../utils/resolve_url"; import type { - IParsedAdaptation, - IParsedAdaptations, + IParsedTrack, IParsedManifest, IParsedPeriod, IParsedRepresentation, + IParsedVariantStreamMetadata, } from "../types"; import MetaRepresentationIndex from "./representation_index"; @@ -170,7 +171,7 @@ function createManifest( serverSyncInfos !== undefined ? serverSyncInfos.serverTimestamp - serverSyncInfos.clientTime : undefined; - const generateAdaptationID = idGenerator(); + const generateTrackID = idGenerator(); const generateRepresentationID = idGenerator(); const { contents } = mplData; const minimumTime = contents.length > 0 ? contents[0].startTime : 0; @@ -196,20 +197,24 @@ function createManifest( const manifestPeriods = []; for (let iPer = 0; iPer < currentManifest.periods.length; iPer++) { const currentPeriod = currentManifest.periods[iPer]; - const adaptations = SUPPORTED_ADAPTATIONS_TYPE.reduce( - (acc, type: ITrackType) => { - const currentAdaptations = currentPeriod.adaptations[type]; - if (isNullOrUndefined(currentAdaptations)) { - return acc; - } - - const adaptationsForCurrentType: IParsedAdaptation[] = []; - for (let iAda = 0; iAda < currentAdaptations.length; iAda++) { - const currentAdaptation = currentAdaptations[iAda]; + const tracks = SUPPORTED_TRACK_TYPE.reduce( + ( + acc: { + audio: IParsedTrack[]; + video: IParsedTrack[]; + text: IParsedTrack[]; + }, + type: ITrackType, + ) => { + const trackRecord: Record = currentPeriod.tracksMetadata[type]; + const currentTracks: ITrack[] = objectValues(trackRecord); + for (let iTrk = 0; iTrk < currentTracks.length; iTrk++) { + const currentTrack = currentTracks[iTrk]; + const trackReps = objectValues(currentTrack.representations); const representations: IParsedRepresentation[] = []; - for (let iRep = 0; iRep < currentAdaptation.representations.length; iRep++) { - const currentRepresentation = currentAdaptation.representations[iRep]; + for (let iRep = 0; iRep < trackReps.length; iRep++) { + const currentRepresentation = trackReps[iRep]; const baseContentMetadata = { isLive: currentManifest.isLive, @@ -251,35 +256,35 @@ function createManifest( contentProtections: currentRepresentation.contentProtections, }); } - adaptationsForCurrentType.push({ - id: currentAdaptation.id, + const track: IParsedTrack = { + id: currentTrack.id, representations, - type: currentAdaptation.type, - audioDescription: currentAdaptation.isAudioDescription, - closedCaption: currentAdaptation.isClosedCaption, - isDub: currentAdaptation.isDub, - language: currentAdaptation.language, - isSignInterpreted: currentAdaptation.isSignInterpreted, - }); - acc[type] = adaptationsForCurrentType; + trackType: currentTrack.trackType, + isAudioDescription: currentTrack.isAudioDescription, + isClosedCaption: currentTrack.isClosedCaption, + isDub: currentTrack.isDub, + language: currentTrack.language, + isSignInterpreted: currentTrack.isSignInterpreted, + }; + acc[type].push(track); } return acc; }, - {}, + { audio: [], video: [], text: [] }, ); // TODO only first period? const textTracks: IMetaPlaylistTextTrack[] = content.textTracks === undefined ? [] : content.textTracks; - const newTextAdaptations: IParsedAdaptation[] = textTracks.map((track) => { - const adaptationID = "gen-text-ada-" + generateAdaptationID(); + const newTextTracks: IParsedTrack[] = textTracks.map((track) => { + const trackID = "gen-text-ada-" + generateTrackID(); const representationID = "gen-text-rep-" + generateRepresentationID(); const indexOfFilename = getFilenameIndexInUrl(track.url); const cdnUrl = track.url.substring(0, indexOfFilename); const filename = track.url.substring(indexOfFilename); return { - id: adaptationID, - type: "text", + id: trackID, + trackType: "text", language: track.language, closedCaption: track.closedCaption, manuallyAdded: true, @@ -296,17 +301,32 @@ function createManifest( }; }, []); - if (newTextAdaptations.length > 0) { - if (isNullOrUndefined(adaptations.text)) { - adaptations.text = newTextAdaptations; - } else { - adaptations.text.push(...newTextAdaptations); - } + for (const newTextTrack of newTextTracks) { + tracks.text.push(newTextTrack); } + const getMediaForType = (type: ITrackType) => { + return tracks[type].map((a) => { + return { + id: a.id, + linkedTrack: a.id, + representations: a.representations.map((r) => r.id), + }; + }); + }; + const variantStream: IParsedVariantStreamMetadata = { + id: "0", + bandwidth: undefined, + media: { + audio: getMediaForType("audio"), + video: getMediaForType("video"), + text: getMediaForType("text"), + }, + }; const newPeriod: IParsedPeriod = { id: formatId(currentManifest.id) + "_" + formatId(currentPeriod.id), - adaptations, + tracksMetadata: tracks, + variantStreams: [variantStream], duration: currentPeriod.duration, start: contentOffset + currentPeriod.start, }; diff --git a/src/parsers/manifest/smooth/create_parser.ts b/src/parsers/manifest/smooth/create_parser.ts index 542c606fe7..483bf3a554 100644 --- a/src/parsers/manifest/smooth/create_parser.ts +++ b/src/parsers/manifest/smooth/create_parser.ts @@ -15,7 +15,8 @@ */ import log from "../../../log"; -import { SUPPORTED_ADAPTATIONS_TYPE } from "../../../manifest"; +import { SUPPORTED_TRACK_TYPE } from "../../../manifest"; +import type { ITrackType } from "../../../public_types"; import arrayIncludes from "../../../utils/array_includes"; import assert from "../../../utils/assert"; import { concat, itobe4 } from "../../../utils/byte_parsing"; @@ -28,10 +29,10 @@ import { hexToBytes } from "../../../utils/string_parsing"; import { createBox } from "../../containers/isobmff"; import type { IContentProtectionKID, - IParsedAdaptation, - IParsedAdaptations, + IParsedTrack, IParsedManifest, IParsedRepresentation, + IParsedVariantStreamMetadata, } from "../types"; import checkManifestIDs from "../utils/check_manifest_ids"; import { getAudioCodecs, getVideoCodecs } from "./get_codecs"; @@ -44,7 +45,7 @@ import parseBoolean from "./utils/parseBoolean"; import reduceChildren from "./utils/reduceChildren"; import { replaceRepresentationSmoothTokens } from "./utils/tokens"; -interface IAdaptationParserArguments { +interface ITrackParserArguments { root: Element; baseUrl: string; timescale: number; @@ -54,8 +55,6 @@ interface IAdaptationParserArguments { manifestReceivedTime?: number | undefined; } -type IAdaptationType = "audio" | "video" | "text"; - const DEFAULT_MIME_TYPES: Partial> = { audio: "audio/mp4", video: "video/mp4", @@ -266,14 +265,14 @@ function createSmoothStreamingParser( } /** - * Parse the adaptations () tree containing + * Parse the tracks () tree containing * representations () and timestamp indexes (). * Indexes can be quite huge, and this function needs to * to be optimized. * @param {Object} args * @returns {Object} */ - function parseAdaptation(args: IAdaptationParserArguments): IParsedAdaptation | null { + function parseTrack(args: ITrackParserArguments): IParsedTrack | null { const { root, timescale, @@ -293,10 +292,10 @@ function createSmoothStreamingParser( if (typeAttribute === null) { throw new Error("StreamIndex without type."); } - if (!arrayIncludes(SUPPORTED_ADAPTATIONS_TYPE, typeAttribute)) { - log.warn("Smooth Parser: Unrecognized adaptation type:", typeAttribute); + if (!arrayIncludes(SUPPORTED_TRACK_TYPE, typeAttribute)) { + log.warn("Smooth Parser: Unrecognized track type:", typeAttribute); } - const adaptationType = typeAttribute as IAdaptationType; + const trackType = typeAttribute as ITrackType; const subType = root.getAttribute("Subtype"); const language = root.getAttribute("Language"); @@ -314,14 +313,14 @@ function createSmoothStreamingParser( (res, _name, node) => { switch (_name) { case "QualityLevel": - const qualityLevel = parseQualityLevel(node, adaptationType); + const qualityLevel = parseQualityLevel(node, trackType); if (qualityLevel === null) { return res; } // filter out video qualityLevels with small bitrates if ( - adaptationType !== "video" || + trackType !== "video" || qualityLevel.bitrate > minRepresentationBitrate ) { res.qualityLevels.push(qualityLevel); @@ -347,11 +346,10 @@ function createSmoothStreamingParser( // codec and mimeType assert( qualityLevels.length !== 0, - "Adaptation should have at least one playable representation.", + "Track should have at least one playable representation.", ); - const adaptationID = - adaptationType + (isNonEmptyString(language) ? "_" + language : ""); + const trackID = trackType + (isNonEmptyString(language) ? "_" + language : ""); const representations = qualityLevels.map((qualityLevel) => { const media = replaceRepresentationSmoothTokens( @@ -361,12 +359,12 @@ function createSmoothStreamingParser( ); const mimeType = isNonEmptyString(qualityLevel.mimeType) ? qualityLevel.mimeType - : DEFAULT_MIME_TYPES[adaptationType]; + : DEFAULT_MIME_TYPES[trackType]; const codecs = qualityLevel.codecs; const id = - adaptationID + + trackID + "_" + - (!isNullOrUndefined(adaptationType) ? adaptationType + "-" : "") + + (!isNullOrUndefined(trackType) ? trackType + "-" : "") + (!isNullOrUndefined(mimeType) ? mimeType + "-" : "") + (!isNullOrUndefined(codecs) ? codecs + "-" : "") + String(qualityLevel.bitrate); @@ -439,18 +437,18 @@ function createSmoothStreamingParser( return null; } - const parsedAdaptation: IParsedAdaptation = { - id: adaptationID, - type: adaptationType, + const parsedTrack: IParsedTrack = { + id: trackID, + trackType, representations, language: language === null ? undefined : language, }; - if (adaptationType === "text" && subType === "DESC") { - parsedAdaptation.closedCaption = true; + if (trackType === "text" && subType === "DESC") { + parsedTrack.isClosedCaption = true; } - return parsedAdaptation; + return parsedTrack; } function parseFromDocument( @@ -483,9 +481,9 @@ function createSmoothStreamingParser( timescale = 10000000; } - const { protections, adaptationNodes } = reduceChildren<{ + const { protections, trackNodes } = reduceChildren<{ protections: IContentProtectionSmooth[]; - adaptationNodes: Element[]; + trackNodes: Element[]; }>( root, (res, name, node) => { @@ -495,18 +493,22 @@ function createSmoothStreamingParser( break; } case "StreamIndex": - res.adaptationNodes.push(node); + res.trackNodes.push(node); break; } return res; }, { - adaptationNodes: [], + trackNodes: [], protections: [], }, ); - const initialAdaptations: IParsedAdaptations = {}; + const initialTracks: Record = { + audio: [], + video: [], + text: [], + }; const isLive = parseBoolean(root.getAttribute("IsLive")); @@ -522,31 +524,22 @@ function createSmoothStreamingParser( } } - const adaptations: IParsedAdaptations = adaptationNodes.reduce( - (acc: IParsedAdaptations, node: Element) => { - const adaptation = parseAdaptation({ - root: node, - baseUrl, - timescale, - protections, - isLive, - timeShiftBufferDepth, - manifestReceivedTime, - }); - if (adaptation === null) { - return acc; - } - const type = adaptation.type; - const adaps = acc[type]; - if (adaps === undefined) { - acc[type] = [adaptation]; - } else { - adaps.push(adaptation); - } + const tracks = trackNodes.reduce((acc, node: Element) => { + const track = parseTrack({ + root: node, + baseUrl, + timescale, + protections, + isLive, + timeShiftBufferDepth, + manifestReceivedTime, + }); + if (track === null) { return acc; - }, - initialAdaptations, - ); + } + acc[track.trackType].push(track); + return acc; + }, initialTracks); let suggestedPresentationDelay: number | undefined; let availabilityStartTime: number | undefined; @@ -559,10 +552,8 @@ function createSmoothStreamingParser( time: number; }; - const firstVideoAdaptation = - adaptations.video !== undefined ? adaptations.video[0] : undefined; - const firstAudioAdaptation = - adaptations.audio !== undefined ? adaptations.audio[0] : undefined; + const firstVideoTrack = tracks.video !== undefined ? tracks.video[0] : undefined; + const firstAudioTrack = tracks.audio !== undefined ? tracks.audio[0] : undefined; /** Minimum time that can be reached regardless of the StreamIndex chosen. */ let safeMinimumTime: number | undefined; @@ -573,12 +564,12 @@ function createSmoothStreamingParser( /** Maximum time that can be reached in absolute on the content. */ let unsafeMaximumTime: number | undefined; - if (firstVideoAdaptation !== undefined || firstAudioAdaptation !== undefined) { + if (firstVideoTrack !== undefined || firstAudioTrack !== undefined) { const firstTimeReferences: number[] = []; const lastTimeReferences: number[] = []; - if (firstVideoAdaptation !== undefined) { - const firstVideoRepresentation = firstVideoAdaptation.representations[0]; + if (firstVideoTrack !== undefined) { + const firstVideoRepresentation = firstVideoTrack.representations[0]; if (firstVideoRepresentation !== undefined) { const firstVideoTimeReference = firstVideoRepresentation.index.getFirstAvailablePosition(); @@ -595,8 +586,8 @@ function createSmoothStreamingParser( } } - if (firstAudioAdaptation !== undefined) { - const firstAudioRepresentation = firstAudioAdaptation.representations[0]; + if (firstAudioTrack !== undefined) { + const firstAudioRepresentation = firstAudioTrack.representations[0]; if (firstAudioRepresentation !== undefined) { const firstAudioTimeReference = firstAudioRepresentation.index.getFirstAvailablePosition(); @@ -665,6 +656,25 @@ function createSmoothStreamingParser( const periodStart = isLive ? 0 : minimumTime; const periodEnd = isLive ? undefined : maximumTimeData.maximumSafePosition; + + const getMediaForType = (type: ITrackType) => { + return tracks[type].map((t) => { + return { + id: t.id, + linkedTrack: t.id, + representations: t.representations.map((r) => r.id), + }; + }); + }; + const variantStream: IParsedVariantStreamMetadata = { + id: "0", + bandwidth: undefined, + media: { + audio: getMediaForType("audio"), + video: getMediaForType("video"), + text: getMediaForType("text"), + }, + }; const manifest = { availabilityStartTime: availabilityStartTime === undefined ? 0 : availabilityStartTime, @@ -679,7 +689,8 @@ function createSmoothStreamingParser( }, periods: [ { - adaptations, + tracksMetadata: tracks, + variantStreams: [variantStream], duration: periodEnd !== undefined ? periodEnd - periodStart : duration, end: periodEnd, id: "gen-smooth-period-0", diff --git a/src/parsers/manifest/types.ts b/src/parsers/manifest/types.ts index 7f1969ac81..e7d47c5387 100644 --- a/src/parsers/manifest/types.ts +++ b/src/parsers/manifest/types.ts @@ -15,7 +15,7 @@ */ import type { IRepresentationIndex } from "../../manifest"; -import type { IHDRInformation } from "../../public_types"; +import type { IHDRInformation, ITrackType } from "../../public_types"; export interface IManifestStreamEvent { start: number; @@ -174,73 +174,44 @@ export interface IParsedRepresentation { supplementalCodecs?: string | undefined; } -/** Every possible types an Adaptation can have. */ -export type IParsedAdaptationType = "audio" | "video" | "text"; - -/** - * Collection of multiple `Adaptation`, regrouped by type, as used by a - * `Period`. - */ -export type IParsedAdaptations = Partial< - Record ->; - -/** Representation of a "track" available in any Period. */ -export interface IParsedAdaptation { - /** - * Unique ID that should not change between Manifest updates for this - * Adaptation but which should be different than any other Adaptation - * in the same Period. - */ +export interface IParsedVariantStreamMetadata { + /** Identifier which identify that track group. */ id: string; - /** Describes every qualities this Adaptation is in. */ - representations: IParsedRepresentation[]; - /** The type of track (e.g. "video", "audio" or "text"). */ - type: IParsedAdaptationType; - /** - * Whether this Adaptation is an audio-track for the visually impaired. - * Not set if unknown or if it makes no sense for the current track (e.g. for - * a video track). - */ - audioDescription?: boolean | undefined; - /** - * Whether this Adaptation are closed captions for the hard of hearing. - * Not set if unknown or if it makes no sense for the current track (e.g. for - * a video track). - */ - closedCaption?: boolean | undefined; - /** - * If `true` this Adaptation are subtitles Meant for display when no other text - * Adaptation is selected. It is used to clarify dialogue, alternate - * languages, texted graphics or location/person IDs that are not otherwise - * covered in the dubbed/localized audio Adaptation. - */ - forcedSubtitles?: boolean; - /** - * If true this Adaptation is in a dub: it was recorded in another language - * than the original(s) one(s). - */ - isDub?: boolean | undefined; - /** - * If true this Adaptation is in a sign interpreted: which is a variant of the - * video with sign language. - */ - isSignInterpreted?: boolean | undefined; - /** Tells if the track is a trick mode track. */ - isTrickModeTrack?: boolean | undefined; - /** - * Language the `Adaptation` is in. - * Not set if unknown or if it makes no sense for the current track. - */ - language?: string | undefined; /** - * Label of the `Adaptation` if it exists. - */ - label?: string; - /** - * TrickMode tracks attached to the adaptation. + * Identify a bandwidth floor from which that track group should be selected. + * `undefined` if no such consideration needs to be done for that track group. + * + * Note: bandwidth considerations may also exist at the Representation-level */ - trickModeTracks?: IParsedAdaptation[] | undefined; + bandwidth: number | undefined; + + /** List of track and qualities combination in that variant stream. */ + media: { + /** Audio media existing for that track group. */ + audio: Array<{ + /** Indentify that media. */ + id: string; + /** `id` of the track this media is linked to */ + linkedTrack: string; + /** `id`s of the different `Representations` (e.g. qualities) available */ + representations: string[]; + }>; + video: Array<{ + id: string; + /** `id` of the track this media is linked to */ + linkedTrack: string; + /** `id`s of the different `Representations` (e.g. qualities) available */ + representations: string[]; + }>; + text: Array<{ + /** Indentify that media. */ + id: string; + /** `id` of the track this media is linked to */ + linkedTrack: string; + /** `id` of the different `Representations` (e.g. qualities) available */ + representations: string[]; + }>; + }; } /** Information on a given period of time in the Manifest */ @@ -257,8 +228,15 @@ export interface IParsedPeriod { * corresponds to the time of the first available segment */ start: number; - /** Available tracks for this Period. */ - adaptations: IParsedAdaptations; + /** Available variant streams for this Period. */ + variantStreams: IParsedVariantStreamMetadata[]; + /** + * Description of all "tracks" available in this Period. + * + * To actually exploit those tracks for playback, you probably want to rely on + * `variantStreams` instead. + */ + tracksMetadata: Record; /** * Duration of the Period (from the start to the end), in seconds. * `undefined` if the Period is the last one and is still being updated. @@ -401,3 +379,47 @@ export interface IParsedManifest { /** URIs where the manifest can be refreshed by order of importance. */ uris?: string[] | undefined; } + +export interface IParsedTrack { + /** ID uniquely identifying this track. */ + id: string; + /** The "type" for that track. */ + trackType: ITrackType; + /** Language this Adaptation is in, as announced in the original Manifest. */ + language?: string | undefined; + /** Whether this Adaptation contains closed captions for the hard-of-hearing. */ + isClosedCaption?: boolean | undefined; + /** Whether this track contains an audio description for the visually impaired. */ + isAudioDescription?: boolean | undefined; + /** If true this Adaptation contains sign interpretation. */ + isSignInterpreted?: boolean | undefined; + /** + * If `true` this Adaptation are subtitles Meant for display when no other text + * Adaptation is selected. It is used to clarify dialogue, alternate + * languages, texted graphics or location/person IDs that are not otherwise + * covered in the dubbed/localized audio Adaptation. + */ + isForcedSubtitles?: boolean | undefined; + /** + * `true` if at least one Representation is in a supported codec. `false` otherwise. + * + * `undefined` for when this is not yet known (we're still in the process of + * probing for support). + */ + isSupported?: boolean | undefined; + /** Language this Adaptation is in, when translated into an ISO639-3 code. */ + normalizedLanguage?: string | undefined; + /** Label of the adaptionSet */ + label?: string | undefined; + /** + * If `true`, this Adaptation is a "dub", meaning it was recorded in another + * language than the original one. + */ + isDub?: boolean | undefined; + /** Tells if the track is a trick mode track. */ + trickModeTracks?: IParsedTrack[] | undefined; + /** Tells if the track is a trick mode track. */ + isTrickModeTrack?: boolean | undefined; + /** Qualities that track is available in. */ + representations: IParsedRepresentation[]; +} diff --git a/src/parsers/manifest/utils/__tests__/get_first_time_from_adaptations.test.ts b/src/parsers/manifest/utils/__tests__/get_first_time_from_representations.test.ts similarity index 72% rename from src/parsers/manifest/utils/__tests__/get_first_time_from_adaptations.test.ts rename to src/parsers/manifest/utils/__tests__/get_first_time_from_representations.test.ts index d4ee312d83..b84c8a7a28 100644 --- a/src/parsers/manifest/utils/__tests__/get_first_time_from_adaptations.test.ts +++ b/src/parsers/manifest/utils/__tests__/get_first_time_from_representations.test.ts @@ -1,6 +1,6 @@ import { describe, it, expect } from "vitest"; import type { IRepresentationIndex } from "../../../../manifest"; -import getFirstPositionFromAdaptation from "../get_first_time_from_adaptation"; +import getFirstPositionFromRepresentations from "../get_first_time_from_representations"; function generateRepresentationIndex( firstPosition: number | undefined | null, @@ -57,15 +57,9 @@ function generateRepresentationIndex( }; } -describe("parsers utils - getFirstPositionFromAdaptation", function () { +describe("parsers utils - getFirstPositionFromRepresentations", function () { it("should return null if no representation", () => { - expect( - getFirstPositionFromAdaptation({ - id: "0", - type: "audio", - representations: [], - }), - ).toEqual(null); + expect(getFirstPositionFromRepresentations([])).toEqual(null); }); it("should return the first position if a single representation is present", () => { @@ -87,27 +81,9 @@ describe("parsers utils - getFirstPositionFromAdaptation", function () { cdnMetadata: [], index: generateRepresentationIndex(null), }; - expect( - getFirstPositionFromAdaptation({ - id: "0", - type: "audio", - representations: [representation1], - }), - ).toEqual(37); - expect( - getFirstPositionFromAdaptation({ - id: "0", - type: "audio", - representations: [representation2], - }), - ).toEqual(undefined); - expect( - getFirstPositionFromAdaptation({ - id: "0", - type: "audio", - representations: [representation3], - }), - ).toEqual(null); + expect(getFirstPositionFromRepresentations([representation1])).toEqual(37); + expect(getFirstPositionFromRepresentations([representation2])).toEqual(undefined); + expect(getFirstPositionFromRepresentations([representation3])).toEqual(null); }); it("should return the maximum first position if many representations is present", () => { @@ -130,11 +106,11 @@ describe("parsers utils - getFirstPositionFromAdaptation", function () { index: generateRepresentationIndex(57), }; expect( - getFirstPositionFromAdaptation({ - id: "0", - type: "audio", - representations: [representation1, representation2, representation3], - }), + getFirstPositionFromRepresentations([ + representation1, + representation2, + representation3, + ]), ).toEqual(137); }); @@ -158,11 +134,11 @@ describe("parsers utils - getFirstPositionFromAdaptation", function () { index: generateRepresentationIndex(undefined), }; expect( - getFirstPositionFromAdaptation({ - id: "0", - type: "audio", - representations: [representation1, representation2, representation3], - }), + getFirstPositionFromRepresentations([ + representation1, + representation2, + representation3, + ]), ).toEqual(undefined); }); @@ -186,11 +162,11 @@ describe("parsers utils - getFirstPositionFromAdaptation", function () { index: generateRepresentationIndex(null), }; expect( - getFirstPositionFromAdaptation({ - id: "0", - type: "audio", - representations: [representation1, representation2, representation3], - }), + getFirstPositionFromRepresentations([ + representation1, + representation2, + representation3, + ]), ).toEqual(137); }); @@ -214,11 +190,11 @@ describe("parsers utils - getFirstPositionFromAdaptation", function () { index: generateRepresentationIndex(null), }; expect( - getFirstPositionFromAdaptation({ - id: "0", - type: "audio", - representations: [representation1, representation2, representation3], - }), + getFirstPositionFromRepresentations([ + representation1, + representation2, + representation3, + ]), ).toEqual(null); }); }); diff --git a/src/parsers/manifest/utils/__tests__/get_last_time_from_adaptation.test.ts b/src/parsers/manifest/utils/__tests__/get_last_time_from_representations.test.ts similarity index 72% rename from src/parsers/manifest/utils/__tests__/get_last_time_from_adaptation.test.ts rename to src/parsers/manifest/utils/__tests__/get_last_time_from_representations.test.ts index 2eada3f1c3..0c0c84bad0 100644 --- a/src/parsers/manifest/utils/__tests__/get_last_time_from_adaptation.test.ts +++ b/src/parsers/manifest/utils/__tests__/get_last_time_from_representations.test.ts @@ -1,6 +1,6 @@ import { describe, it, expect } from "vitest"; import type { IRepresentationIndex } from "../../../../manifest"; -import getLastPositionFromAdaptation from "../get_last_time_from_adaptation"; +import getLastPositionFromRepresentations from "../get_last_time_from_representations"; function generateRepresentationIndex( lastPosition: number | undefined | null, @@ -57,15 +57,9 @@ function generateRepresentationIndex( }; } -describe("parsers utils - getLastPositionFromAdaptation", function () { +describe("parsers utils - getLastPositionFromRepresentations", function () { it("should return null if no representation", () => { - expect( - getLastPositionFromAdaptation({ - id: "0", - type: "audio", - representations: [], - }), - ).toEqual(null); + expect(getLastPositionFromRepresentations([])).toEqual(null); }); it("should return the last position if a single representation is present", () => { @@ -87,27 +81,9 @@ describe("parsers utils - getLastPositionFromAdaptation", function () { cdnMetadata: [], index: generateRepresentationIndex(null), }; - expect( - getLastPositionFromAdaptation({ - id: "0", - type: "audio", - representations: [representation1], - }), - ).toEqual(37); - expect( - getLastPositionFromAdaptation({ - id: "0", - type: "audio", - representations: [representation2], - }), - ).toEqual(undefined); - expect( - getLastPositionFromAdaptation({ - id: "0", - type: "audio", - representations: [representation3], - }), - ).toEqual(null); + expect(getLastPositionFromRepresentations([representation1])).toEqual(37); + expect(getLastPositionFromRepresentations([representation2])).toEqual(undefined); + expect(getLastPositionFromRepresentations([representation3])).toEqual(null); }); it("should return the minimum first position if many representations is present", () => { @@ -130,11 +106,11 @@ describe("parsers utils - getLastPositionFromAdaptation", function () { index: generateRepresentationIndex(57), }; expect( - getLastPositionFromAdaptation({ - id: "0", - type: "audio", - representations: [representation1, representation2, representation3], - }), + getLastPositionFromRepresentations([ + representation1, + representation2, + representation3, + ]), ).toEqual(37); }); @@ -158,11 +134,11 @@ describe("parsers utils - getLastPositionFromAdaptation", function () { index: generateRepresentationIndex(undefined), }; expect( - getLastPositionFromAdaptation({ - id: "0", - type: "audio", - representations: [representation1, representation2, representation3], - }), + getLastPositionFromRepresentations([ + representation1, + representation2, + representation3, + ]), ).toEqual(undefined); }); @@ -186,11 +162,11 @@ describe("parsers utils - getLastPositionFromAdaptation", function () { index: generateRepresentationIndex(null), }; expect( - getLastPositionFromAdaptation({ - id: "0", - type: "audio", - representations: [representation1, representation2, representation3], - }), + getLastPositionFromRepresentations([ + representation1, + representation2, + representation3, + ]), ).toEqual(37); }); @@ -214,11 +190,11 @@ describe("parsers utils - getLastPositionFromAdaptation", function () { index: generateRepresentationIndex(null), }; expect( - getLastPositionFromAdaptation({ - id: "0", - type: "audio", - representations: [representation1, representation2, representation3], - }), + getLastPositionFromRepresentations([ + representation1, + representation2, + representation3, + ]), ).toEqual(null); }); }); diff --git a/src/parsers/manifest/utils/check_manifest_ids.ts b/src/parsers/manifest/utils/check_manifest_ids.ts index 29a591694b..18eef9f27e 100644 --- a/src/parsers/manifest/utils/check_manifest_ids.ts +++ b/src/parsers/manifest/utils/check_manifest_ids.ts @@ -15,11 +15,12 @@ */ import log from "../../../log"; +import type { ITrackType } from "../../../public_types"; import arrayIncludes from "../../../utils/array_includes"; -import type { IParsedAdaptationType, IParsedManifest } from "../types"; +import type { IParsedManifest } from "../types"; /** - * Ensure that no two periods, adaptations from the same period and + * Ensure that no two periods, variants and tracks from the same period and * representations from the same adaptation, have the same ID. * * Log and mutate their ID if not until this is verified. @@ -39,26 +40,26 @@ export default function checkManifestIDs(manifest: IParsedManifest): void { } else { periodIDS.push(periodID); } - const { adaptations } = period; - const adaptationIDs: string[] = []; - (Object.keys(adaptations) as IParsedAdaptationType[]).forEach((type) => { - const adaptationsForType = adaptations[type]; - if (adaptationsForType === undefined) { + const { tracksMetadata } = period; + const trackIDs: string[] = []; + (Object.keys(tracksMetadata) as ITrackType[]).forEach((type) => { + const tracksForType = tracksMetadata[type]; + if (tracksForType === undefined) { return; } - adaptationsForType.forEach((adaptation) => { - const adaptationID = adaptation.id; - if (arrayIncludes(adaptationIDs, adaptationID)) { - log.warn("Two adaptations with the same ID found. Updating.", adaptationID); - const newID = adaptationID + "-dup"; - adaptation.id = newID; + tracksForType.forEach((track) => { + const trackID = track.id; + if (arrayIncludes(trackIDs, trackID)) { + log.warn("Two tracks with the same ID found. Updating.", trackID); + const newID = trackID + "-dup"; + track.id = newID; checkManifestIDs(manifest); - adaptationIDs.push(newID); + trackIDs.push(newID); } else { - adaptationIDs.push(adaptationID); + trackIDs.push(trackID); } const representationIDs: Array = []; - adaptation.representations.forEach((representation) => { + track.representations.forEach((representation) => { const representationID = representation.id; if (arrayIncludes(representationIDs, representationID)) { log.warn( diff --git a/src/parsers/manifest/utils/get_first_time_from_adaptation.ts b/src/parsers/manifest/utils/get_first_time_from_representations.ts similarity index 75% rename from src/parsers/manifest/utils/get_first_time_from_adaptation.ts rename to src/parsers/manifest/utils/get_first_time_from_representations.ts index ee0128a233..6e859f4290 100644 --- a/src/parsers/manifest/utils/get_first_time_from_adaptation.ts +++ b/src/parsers/manifest/utils/get_first_time_from_representations.ts @@ -14,22 +14,20 @@ * limitations under the License. */ -import type { IParsedAdaptation } from "../types"; +import type { IParsedRepresentation } from "../types"; /** - * Returns "first time of reference" from the adaptation given, considering a - * dynamic content. + * Returns "first time of reference" from the Representations given, considering + * a dynamic content. * Undefined if a time could not be found. * - * We consider the latest first time from every representations in the given - * adaptation. - * @param {Object} adaptation + * We consider the latest first time from every representations. + * @param {Object} representations * @returns {Number|undefined} */ -export default function getFirstPositionFromAdaptation( - adaptation: IParsedAdaptation, +export default function getFirstPositionFromRepresentations( + representations: IParsedRepresentation[], ): number | undefined | null { - const { representations } = adaptation; let max: null | number = null; for (let i = 0; i < representations.length; i++) { const firstPosition = representations[i].index.getFirstAvailablePosition(); diff --git a/src/parsers/manifest/utils/get_last_time_from_adaptation.ts b/src/parsers/manifest/utils/get_last_time_from_representations.ts similarity index 73% rename from src/parsers/manifest/utils/get_last_time_from_adaptation.ts rename to src/parsers/manifest/utils/get_last_time_from_representations.ts index 7432afb494..824c91a519 100644 --- a/src/parsers/manifest/utils/get_last_time_from_adaptation.ts +++ b/src/parsers/manifest/utils/get_last_time_from_representations.ts @@ -14,24 +14,22 @@ * limitations under the License. */ -import type { IParsedAdaptation } from "../types"; +import type { IParsedRepresentation } from "../types"; /** - * Returns "last time of reference" from the adaptation given, considering a - * dynamic content. + * Returns "last time of reference" from the Representations given, considering + * a dynamic content. * Undefined if a time could not be found. - * Null if the Adaptation has no segments (it could be that it didn't started or - * that it already finished for example). + * Null if the Representation has a single segment (it could be that it + * didn't start yet or that it already finished for example). * - * We consider the earliest last time from every representations in the given - * adaptation. - * @param {Object} adaptation + * We consider the earliest last time from every representations. + * @param {Array.} representations * @returns {Number|undefined|null} */ -export default function getLastPositionFromAdaptation( - adaptation: IParsedAdaptation, +export default function getLastPositionFromRepresentations( + representations: IParsedRepresentation[], ): number | undefined | null { - const { representations } = adaptation; let min: null | number = null; for (let i = 0; i < representations.length; i++) { const lastPosition = representations[i].index.getLastAvailablePosition(); diff --git a/src/parsers/manifest/utils/get_maximum_positions.ts b/src/parsers/manifest/utils/get_maximum_positions.ts index 274bb4dc81..6108419123 100644 --- a/src/parsers/manifest/utils/get_maximum_positions.ts +++ b/src/parsers/manifest/utils/get_maximum_positions.ts @@ -16,7 +16,7 @@ import log from "../../../log"; import type { IParsedPeriod } from "../types"; -import getLastPositionFromAdaptation from "./get_last_time_from_adaptation"; +import getLastPositionFromRepresentations from "./get_last_time_from_representations"; /** * @param {Array.} periods @@ -27,31 +27,29 @@ export default function getMaximumPosition(periods: IParsedPeriod[]): { unsafe: number | undefined; } { for (let i = periods.length - 1; i >= 0; i--) { - const periodAdaptations = periods[i].adaptations; - const firstAudioAdaptationFromPeriod = - periodAdaptations.audio === undefined ? undefined : periodAdaptations.audio[0]; - const firstVideoAdaptationFromPeriod = - periodAdaptations.video === undefined ? undefined : periodAdaptations.video[0]; + const periodTracks = periods[i].tracksMetadata; + const firstAudioTrackFromPeriod = periodTracks.audio[0]; + const firstVideoTrackFromPeriod = periodTracks.video[0]; if ( - firstAudioAdaptationFromPeriod !== undefined || - firstVideoAdaptationFromPeriod !== undefined + firstAudioTrackFromPeriod !== undefined || + firstVideoTrackFromPeriod !== undefined ) { // null == no segment let maximumAudioPosition: number | null = null; let maximumVideoPosition: number | null = null; - if (firstAudioAdaptationFromPeriod !== undefined) { - const lastPosition = getLastPositionFromAdaptation( - firstAudioAdaptationFromPeriod, + if (firstAudioTrackFromPeriod !== undefined) { + const lastPosition = getLastPositionFromRepresentations( + firstAudioTrackFromPeriod.representations, ); if (lastPosition === undefined) { return { safe: undefined, unsafe: undefined }; } maximumAudioPosition = lastPosition; } - if (firstVideoAdaptationFromPeriod !== undefined) { - const lastPosition = getLastPositionFromAdaptation( - firstVideoAdaptationFromPeriod, + if (firstVideoTrackFromPeriod !== undefined) { + const lastPosition = getLastPositionFromRepresentations( + firstVideoTrackFromPeriod.representations, ); if (lastPosition === undefined) { return { safe: undefined, unsafe: undefined }; @@ -60,8 +58,8 @@ export default function getMaximumPosition(periods: IParsedPeriod[]): { } if ( - (firstAudioAdaptationFromPeriod !== undefined && maximumAudioPosition === null) || - (firstVideoAdaptationFromPeriod !== undefined && maximumVideoPosition === null) + (firstAudioTrackFromPeriod !== undefined && maximumAudioPosition === null) || + (firstVideoTrackFromPeriod !== undefined && maximumVideoPosition === null) ) { log.info( "Parser utils: found Period with no segment. ", diff --git a/src/parsers/manifest/utils/get_minimum_position.ts b/src/parsers/manifest/utils/get_minimum_position.ts index a892fc6bf5..be4609298b 100644 --- a/src/parsers/manifest/utils/get_minimum_position.ts +++ b/src/parsers/manifest/utils/get_minimum_position.ts @@ -16,7 +16,7 @@ import log from "../../../log"; import type { IParsedPeriod } from "../types"; -import getFirstPositionFromAdaptation from "./get_first_time_from_adaptation"; +import getFirstPositionFromRepresentations from "./get_first_time_from_representations"; /** * @param {Array.} periods @@ -24,31 +24,29 @@ import getFirstPositionFromAdaptation from "./get_first_time_from_adaptation"; */ export default function getMinimumPosition(periods: IParsedPeriod[]): number | undefined { for (let i = 0; i <= periods.length - 1; i++) { - const periodAdaptations = periods[i].adaptations; - const firstAudioAdaptationFromPeriod = - periodAdaptations.audio === undefined ? undefined : periodAdaptations.audio[0]; - const firstVideoAdaptationFromPeriod = - periodAdaptations.video === undefined ? undefined : periodAdaptations.video[0]; + const periodTracks = periods[i].tracksMetadata; + const firstAudioTrackFromPeriod = periodTracks.audio[0]; + const firstVideoTrackFromPeriod = periodTracks.video[0]; if ( - firstAudioAdaptationFromPeriod !== undefined || - firstVideoAdaptationFromPeriod !== undefined + firstAudioTrackFromPeriod !== undefined || + firstVideoTrackFromPeriod !== undefined ) { // null == no segment let minimumAudioPosition: number | null = null; let minimumVideoPosition: number | null = null; - if (firstAudioAdaptationFromPeriod !== undefined) { - const firstPosition = getFirstPositionFromAdaptation( - firstAudioAdaptationFromPeriod, + if (firstAudioTrackFromPeriod !== undefined) { + const firstPosition = getFirstPositionFromRepresentations( + firstAudioTrackFromPeriod.representations, ); if (firstPosition === undefined) { return undefined; } minimumAudioPosition = firstPosition; } - if (firstVideoAdaptationFromPeriod !== undefined) { - const firstPosition = getFirstPositionFromAdaptation( - firstVideoAdaptationFromPeriod, + if (firstVideoTrackFromPeriod !== undefined) { + const firstPosition = getFirstPositionFromRepresentations( + firstVideoTrackFromPeriod.representations, ); if (firstPosition === undefined) { return undefined; @@ -57,8 +55,8 @@ export default function getMinimumPosition(periods: IParsedPeriod[]): number | u } if ( - (firstAudioAdaptationFromPeriod !== undefined && minimumAudioPosition === null) || - (firstVideoAdaptationFromPeriod !== undefined && minimumVideoPosition === null) + (firstAudioTrackFromPeriod !== undefined && minimumAudioPosition === null) || + (firstVideoTrackFromPeriod !== undefined && minimumVideoPosition === null) ) { log.info( "Parser utils: found Period with no segment. ", diff --git a/src/transports/utils/infer_segment_container.ts b/src/transports/utils/infer_segment_container.ts index cb3ab550f5..c16efef8fa 100644 --- a/src/transports/utils/infer_segment_container.ts +++ b/src/transports/utils/infer_segment_container.ts @@ -24,15 +24,15 @@ import type { ITrackType } from "../../public_types"; * - "webm" if we can say with confidence the segment will be in a webm format * - `undefined` if we cannot say with confidence in which container the * segment will be in. - * @param {string} adaptationType + * @param {string} trackType * @param {string} mimeType * @returns {string | undefined} */ export default function inferSegmentContainer( - adaptationType: ITrackType, + trackType: ITrackType, mimeType: string | undefined, ): "webm" | "mp4" | undefined { - if (adaptationType === "audio" || adaptationType === "video") { + if (trackType === "audio" || trackType === "video") { if (mimeType === "video/mp4" || mimeType === "audio/mp4") { return "mp4"; } @@ -40,7 +40,7 @@ export default function inferSegmentContainer( return "webm"; } return undefined; - } else if (adaptationType === "text") { + } else if (trackType === "text") { return mimeType === "application/mp4" ? "mp4" : undefined; } return undefined;