Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

DASH: Prioritize selectionPriority attribute over a "main" Role when ordering AdaptationSets #1082

Merged
merged 1 commit into from
Mar 16, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
173 changes: 109 additions & 64 deletions src/parsers/manifest/dash/common/parse_adaptation_sets.ts
Original file line number Diff line number Diff line change
Expand Up @@ -15,14 +15,18 @@
*/

import log from "../../../../log";
import { Period } from "../../../../manifest";
import {
IAdaptationType,
Period,
SUPPORTED_ADAPTATIONS_TYPE,
} from "../../../../manifest";
import arrayFind from "../../../../utils/array_find";
import arrayFindIndex from "../../../../utils/array_find_index";
import arrayIncludes from "../../../../utils/array_includes";
import isNonEmptyString from "../../../../utils/is_non_empty_string";
import {
IParsedAdaptation,
IParsedAdaptations,
IParsedAdaptationType,
} from "../../types";
import {
IAdaptationSetIntermediateRepresentation,
Expand Down Expand Up @@ -235,30 +239,31 @@ export default function parseAdaptationSets(
adaptationsIR : IAdaptationSetIntermediateRepresentation[],
context : IAdaptationSetContext
): IParsedAdaptations {
const parsedAdaptations : IParsedAdaptations = {};
const parsedAdaptations : Record<
IAdaptationType,
Array<[ IParsedAdaptation,
IAdaptationSetOrderingData ]>
> = { video: [],
audio: [],
text: [],
image: [] };
const trickModeAdaptations: Array<{ adaptation: IParsedAdaptation;
trickModeAttachedAdaptationIds: string[]; }> = [];
const adaptationSwitchingInfos : IAdaptationSwitchingInfos = {};

const parsedAdaptationsIDs : string[] = [];

/**
* Index of the last parsed AdaptationSet with a Role set as "main" in
* `parsedAdaptations` for a given type.
* Not defined for a type with no main Adaptation inside.
* This is used to put main AdaptationSet first in the resulting array of
* Adaptation while still preserving the MPD order among them.
* Index of the last parsed Video AdaptationSet with a Role set as "main" in
* `parsedAdaptations.video`.
* `-1` if not yet encountered.
* Used as we merge all main video AdaptationSet due to a comprehension of the
* DASH-IF IOP.
*/
const lastMainAdaptationIndex : Partial<Record<IParsedAdaptationType, number>> = {};

// first sort AdaptationSets by absolute priority.
adaptationsIR.sort((a, b) => {
/* As of DASH-IF 4.3, `1` is the default value. */
const priority1 = a.attributes.selectionPriority ?? 1;
const priority2 = b.attributes.selectionPriority ?? 1;
return priority2 - priority1;
});
let lastMainVideoAdapIdx = -1;

for (const adaptation of adaptationsIR) {
for (let adaptationIdx = 0; adaptationIdx < adaptationsIR.length; adaptationIdx++) {
const adaptation = adaptationsIR[adaptationIdx];
const adaptationChildren = adaptation.children;
const { essentialProperties,
roles } = adaptationChildren;
Expand Down Expand Up @@ -291,6 +296,7 @@ export default function parseAdaptationSets(
continue;
}

const priority = adaptation.attributes.selectionPriority ?? 1;
const originalID = adaptation.attributes.id;
let newID : string;
const adaptationSetSwitchingIDs = getAdaptationSetSwitchingIDs(adaptation);
Expand Down Expand Up @@ -334,14 +340,11 @@ export default function parseAdaptationSets(

if (type === "video" &&
isMainAdaptation &&
parsedAdaptations.video !== undefined &&
parsedAdaptations.video.length > 0 &&
lastMainAdaptationIndex.video !== undefined &&
lastMainVideoAdapIdx >= 0 &&
parsedAdaptations.video.length > lastMainVideoAdapIdx &&
!isTrickModeTrack)
{
// Add to the already existing main video adaptation
// TODO remove that ugly custom logic?
const videoMainAdaptation = parsedAdaptations.video[lastMainAdaptationIndex.video];
const videoMainAdaptation = parsedAdaptations.video[lastMainVideoAdapIdx][0];
reprCtxt.unsafelyBaseOnPreviousAdaptation = context
.unsafelyBaseOnPreviousPeriod?.getAdaptation(videoMainAdaptation.id) ?? null;
const representations = parseRepresentations(representationsIR,
Expand Down Expand Up @@ -422,65 +425,56 @@ export default function parseAdaptationSets(
parsedAdaptationSet.isSignInterpreted = true;
}

const adaptationsOfTheSameType = parsedAdaptations[type];
if (trickModeAttachedAdaptationIds !== undefined) {
trickModeAdaptations.push({ adaptation: parsedAdaptationSet,
trickModeAttachedAdaptationIds });
} else if (adaptationsOfTheSameType === undefined) {
parsedAdaptations[type] = [parsedAdaptationSet];
if (isMainAdaptation) {
lastMainAdaptationIndex[type] = 0;
}
} else {
let mergedInto : IParsedAdaptation|null = null;

// look if we have to merge this into another Adaptation
let mergedIntoIdx = -1;
for (const id of adaptationSetSwitchingIDs) {
const switchingInfos = adaptationSwitchingInfos[id];
if (switchingInfos != null &&
if (switchingInfos !== undefined &&
switchingInfos.newID !== newID &&
arrayIncludes(switchingInfos.adaptationSetSwitchingIDs, originalID))
{
const adaptationToMergeInto = arrayFind(adaptationsOfTheSameType,
(a) => a.id === id);
if (adaptationToMergeInto != null &&
adaptationToMergeInto.audioDescription ===
mergedIntoIdx = arrayFindIndex(parsedAdaptations[type],
(a) => a[0].id === id);
const mergedInto = parsedAdaptations[type][mergedIntoIdx];
if (mergedInto !== undefined &&
mergedInto[0].audioDescription ===
parsedAdaptationSet.audioDescription &&
adaptationToMergeInto.closedCaption ===
mergedInto[0].closedCaption ===
parsedAdaptationSet.closedCaption &&
adaptationToMergeInto.language === parsedAdaptationSet.language)
mergedInto[0].language === parsedAdaptationSet.language)
{
log.info("DASH Parser: merging \"switchable\" AdaptationSets",
originalID, id);
adaptationToMergeInto.representations
.push(...parsedAdaptationSet.representations);
mergedInto = adaptationToMergeInto;
mergedInto[0].representations.push(...parsedAdaptationSet.representations);
if (type === "video" &&
isMainAdaptation &&
!mergedInto[1].isMainAdaptation)
{
lastMainVideoAdapIdx = Math.max(lastMainVideoAdapIdx, mergedIntoIdx);
}
mergedInto[1] = {
priority: Math.max(priority, mergedInto[1].priority),
isMainAdaptation: isMainAdaptation ||
mergedInto[1].isMainAdaptation,
indexInMpd: Math.min(adaptationIdx, mergedInto[1].indexInMpd),
};
}
}
}

if (isMainAdaptation) {
const oldLastMainIdx = lastMainAdaptationIndex[type];
const newLastMainIdx = oldLastMainIdx === undefined ? 0 :
oldLastMainIdx + 1;
if (mergedInto === null) {
// put "main" Adaptation after all other Main Adaptations
adaptationsOfTheSameType.splice(newLastMainIdx, 0, parsedAdaptationSet);
lastMainAdaptationIndex[type] = newLastMainIdx;
} else {
const indexOf = adaptationsOfTheSameType.indexOf(mergedInto);
if (indexOf < 0) { // Weird, not found
adaptationsOfTheSameType.splice(newLastMainIdx, 0, parsedAdaptationSet);
lastMainAdaptationIndex[type] = newLastMainIdx;
} else if (oldLastMainIdx === undefined || indexOf > oldLastMainIdx) {
// Found but was not main
adaptationsOfTheSameType.splice(indexOf, 1);
adaptationsOfTheSameType.splice(newLastMainIdx, 0, mergedInto);
lastMainAdaptationIndex[type] = newLastMainIdx;
}
if (mergedIntoIdx < 0) {
parsedAdaptations[type].push([ parsedAdaptationSet,
{ priority,
isMainAdaptation,
indexInMpd: adaptationIdx }]);
if (type === "video" && isMainAdaptation) {
lastMainVideoAdapIdx = parsedAdaptations.video.length - 1;
}
} else if (mergedInto === null) {
adaptationsOfTheSameType.push(parsedAdaptationSet);
}
}
}
Expand All @@ -490,8 +484,59 @@ export default function parseAdaptationSets(
adaptationSetSwitchingIDs };
}
}
attachTrickModeTrack(parsedAdaptations, trickModeAdaptations);
return parsedAdaptations;

const adaptationsPerType = SUPPORTED_ADAPTATIONS_TYPE
.reduce((acc : IParsedAdaptations, adaptationType : IAdaptationType) => {
const adaptationsParsedForType = parsedAdaptations[adaptationType];
if (adaptationsParsedForType.length > 0) {
adaptationsParsedForType.sort(compareAdaptations);
acc[adaptationType] = adaptationsParsedForType
.map(([parsedAdaptation]) => parsedAdaptation);
}
return acc;
}, {});
parsedAdaptations.video.sort(compareAdaptations);
attachTrickModeTrack(adaptationsPerType, trickModeAdaptations);
return adaptationsPerType;
}

/** Metadata allowing to order AdaptationSets between one another. */
interface IAdaptationSetOrderingData {
/**
* If `true`, this AdaptationSet is considered as a "main" one (e.g. it had a
* Role set to "main").
*/
isMainAdaptation : boolean;
/**
* Set to the `selectionPriority` attribute of the corresponding AdaptationSet
* or to `1` by default.
*/
priority : number;
/** Index of this AdaptationSet in the original MPD, starting from `0`. */
indexInMpd : number;
}

/**
* Compare groups of parsed AdaptationSet, alongside some ordering metadata,
* allowing to easily sort them through JavaScript's `Array.prototype.sort`
* method.
* @param {Array.<Object>} a
* @param {Array.<Object>} b
* @returns {number}
*/
function compareAdaptations(
a : [IParsedAdaptation, IAdaptationSetOrderingData],
b : [IParsedAdaptation, IAdaptationSetOrderingData]
) : number {
const priorityDiff = b[1].priority - a[1].priority;
if (priorityDiff !== 0) {
return priorityDiff;
}
if (a[1].isMainAdaptation !== b[1].isMainAdaptation) {
return a[1].isMainAdaptation ? -1 :
1;
}
return a[1].indexInMpd - b[1].indexInMpd;
}

/** Context needed when calling `parseAdaptationSets`. */
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -380,17 +380,18 @@
</AdaptationSet>


<!-- audio de mp4a.40.2 priority 100 -->
<!-- audio fr mp4a.40.5 audioDescription priority 100 -->
<AdaptationSet
group="1"
contentType="audio"
lang="de"
lang="fr"
segmentAlignment="true"
audioSamplingRate="44100"
mimeType="audio/mp4"
codecs="mp4a.40.2"
selectionPriority="100"
codecs="mp4a.40.5"
startWithSAP="1">
<Accessibility schemeIdUri="urn:tva:metadata:cs:AudioPurposeCS:2007" value= "1" />
<AudioChannelConfiguration schemeIdUri="urn:mpeg:dash:23003:3:audio_channel_configuration:2011" value="2"></AudioChannelConfiguration>
<SegmentTemplate timescale="44100" initialization="ateam-$RepresentationID$.dash" media="ateam-$RepresentationID$-$Time$.dash">
<SegmentTimeline>
Expand All @@ -401,17 +402,17 @@
</AdaptationSet>


<!-- audio fr mp4a.40.5 audioDescription -->
<!-- audio de mp4a.40.2 priority 100 -->
<AdaptationSet
group="1"
contentType="audio"
lang="fr"
lang="de"
segmentAlignment="true"
audioSamplingRate="44100"
mimeType="audio/mp4"
codecs="mp4a.40.5"
codecs="mp4a.40.2"
selectionPriority="100"
startWithSAP="1">
<Accessibility schemeIdUri="urn:tva:metadata:cs:AudioPurposeCS:2007" value= "1" />
<AudioChannelConfiguration schemeIdUri="urn:mpeg:dash:23003:3:audio_channel_configuration:2011" value="2"></AudioChannelConfiguration>
<SegmentTemplate timescale="44100" initialization="ateam-$RepresentationID$.dash" media="ateam-$RepresentationID$-$Time$.dash">
<SegmentTimeline>
Expand Down Expand Up @@ -485,6 +486,27 @@
</AdaptationSet>


<!-- audio be mp4a.40.2 audioDescription -->
<AdaptationSet
group="1"
contentType="audio"
lang="be"
segmentAlignment="true"
audioSamplingRate="44100"
mimeType="audio/mp4"
codecs="mp4a.40.2"
startWithSAP="1">
<Accessibility schemeIdUri="urn:tva:metadata:cs:AudioPurposeCS:2007" value= "1" />
<Role schemeIdUri="urn:mpeg:dash:role:2011" value="main" />
<AudioChannelConfiguration schemeIdUri="urn:mpeg:dash:23003:3:audio_channel_configuration:2011" value="2"></AudioChannelConfiguration>
<SegmentTemplate timescale="44100" initialization="ateam-$RepresentationID$.dash" media="ateam-$RepresentationID$-$Time$.dash">
<SegmentTimeline>
<S t="0" d="177341" /><S d="176128" /><S d="177152" /><S d="176128" /><S d="177152" /><S d="176128" r="1" /><S d="177152" /><S d="176128" /><S d="177152" /><S d="176128" /><S d="177152" /><S d="176128" r="1" /><S d="177152" /><S d="176128" /><S d="177152" /><S d="176128" /><S d="177152" /><S d="176128" /><S d="177152" /><S d="176128" r="1" /><S d="177152" /><S d="176128" /><S d="64512" />
</SegmentTimeline>
</SegmentTemplate>
<Representation id="audio=128000" bandwidth="128000"></Representation>
</AdaptationSet>

<!-- audio be mp4a.40.2 main -->
<AdaptationSet
group="1"
Expand Down Expand Up @@ -639,50 +661,49 @@
</AdaptationSet>



<!-- video main avc1.640028 -->
<!-- video avc1 + priority 50 + sign interpreted -->
<AdaptationSet
group="2"
contentType="video"
par="40:17"
minBandwidth="400000"
maxBandwidth="1996000"
maxBandwidth="795000"
maxWidth="2221"
maxHeight="944"
segmentAlignment="true"
mimeType="video/mp4"
selectionPriority="50"
startWithSAP="1">
<Role schemeIdUri="urn:mpeg:dash:role:2011" value="main" />
<Accessibility schemeIdUri="urn:mpeg:dash:role:2011" value= "sign" />
<SegmentTemplate timescale="1000" initialization="ateam-$RepresentationID$.dash" media="ateam-$RepresentationID$-$Time$.dash">
<SegmentTimeline><S t="0" d="4004" r="24" /><S d="1376" /></SegmentTimeline>
</SegmentTemplate>
<Representation id="video=1996000" bandwidth="1996000" width="1680" height="944" sar="472:357" codecs="avc1.640028" scanType="progressive"></Representation>
<Representation id="video=400000" bandwidth="400000" width="220" height="124" sar="248:187" codecs="avc1.42C014" scanType="progressive"></Representation>
<Representation id="video=795000" bandwidth="795000" width="368" height="208" sar="520:391" codecs="avc1.42C014" scanType="progressive"></Representation>
</AdaptationSet>


<!-- video avc1 + priority 50 + sign interpreted -->
<!-- video main + priority 50 + avc1.640028 -->
<AdaptationSet
group="2"
contentType="video"
par="40:17"
minBandwidth="400000"
maxBandwidth="795000"
maxBandwidth="1996000"
maxWidth="2221"
maxHeight="944"
segmentAlignment="true"
mimeType="video/mp4"
selectionPriority="50"
startWithSAP="1">
<Accessibility schemeIdUri="urn:mpeg:dash:role:2011" value= "sign" />
<Role schemeIdUri="urn:mpeg:dash:role:2011" value="main" />
<SegmentTemplate timescale="1000" initialization="ateam-$RepresentationID$.dash" media="ateam-$RepresentationID$-$Time$.dash">
<SegmentTimeline><S t="0" d="4004" r="24" /><S d="1376" /></SegmentTimeline>
</SegmentTemplate>
<Representation id="video=1996000" bandwidth="1996000" width="1680" height="944" sar="472:357" codecs="avc1.640028" scanType="progressive"></Representation>
<Representation id="video=400000" bandwidth="400000" width="220" height="124" sar="248:187" codecs="avc1.42C014" scanType="progressive"></Representation>
<Representation id="video=795000" bandwidth="795000" width="368" height="208" sar="520:391" codecs="avc1.42C014" scanType="progressive"></Representation>
</AdaptationSet>


<!-- video avc1.640028 + priority 2 + sign interpreted -->
<AdaptationSet
group="2"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -107,6 +107,7 @@
maxBandwidth="1996000"
maxWidth="2221"
maxHeight="944"
selectionPriority="2"
segmentAlignment="true"
mimeType="video/mp4"
startWithSAP="1">
Expand Down
Loading