Ir a la última revisión | Autoría | Comparar con el anterior | Ultima modificación | Ver Log |
/*** @license* Video.js 8.10.0 <http://videojs.com/>* Copyright Brightcove, Inc. <https://www.brightcove.com/>* Available under Apache License Version 2.0* <https://github.com/videojs/video.js/blob/main/LICENSE>** Includes vtt.js <https://github.com/mozilla/vtt.js>* Available under Apache License Version 2.0* <https://github.com/mozilla/vtt.js/blob/main/LICENSE>*/(function (global, factory) {typeof exports === 'object' && typeof module !== 'undefined' ? module.exports = factory() :typeof define === 'function' && define.amd ? define(factory) :(global = typeof globalThis !== 'undefined' ? globalThis : global || self, global.videojs = factory());})(this, (function () { 'use strict';var version$5 = "8.10.0";/*** An Object that contains lifecycle hooks as keys which point to an array* of functions that are run when a lifecycle is triggered** @private*/const hooks_ = {};/*** Get a list of hooks for a specific lifecycle** @param {string} type* the lifecycle to get hooks from** @param {Function|Function[]} [fn]* Optionally add a hook (or hooks) to the lifecycle that your are getting.** @return {Array}* an array of hooks, or an empty array if there are none.*/const hooks = function (type, fn) {hooks_[type] = hooks_[type] || [];if (fn) {hooks_[type] = hooks_[type].concat(fn);}return hooks_[type];};/*** Add a function hook to a specific videojs lifecycle.** @param {string} type* the lifecycle to hook the function to.** @param {Function|Function[]}* The function or array of functions to attach.*/const hook = function (type, fn) {hooks(type, fn);};/*** Remove a hook from a specific videojs lifecycle.** @param {string} type* the lifecycle that the function hooked to** @param {Function} fn* The hooked function to remove** @return {boolean}* The function that was removed or undef*/const removeHook = function (type, fn) {const index = hooks(type).indexOf(fn);if (index <= -1) {return false;}hooks_[type] = hooks_[type].slice();hooks_[type].splice(index, 1);return true;};/*** Add a function hook that will only run once to a specific videojs lifecycle.** @param {string} type* the lifecycle to hook the function to.** @param {Function|Function[]}* The function or array of functions to attach.*/const hookOnce = function (type, fn) {hooks(type, [].concat(fn).map(original => {const wrapper = (...args) => {removeHook(type, wrapper);return original(...args);};return wrapper;}));};/*** @file fullscreen-api.js* @module fullscreen-api*//*** Store the browser-specific methods for the fullscreen API.** @type {Object}* @see [Specification]{@link https://fullscreen.spec.whatwg.org}* @see [Map Approach From Screenfull.js]{@link https://github.com/sindresorhus/screenfull.js}*/const FullscreenApi = {prefixed: true};// browser API methodsconst apiMap = [['requestFullscreen', 'exitFullscreen', 'fullscreenElement', 'fullscreenEnabled', 'fullscreenchange', 'fullscreenerror', 'fullscreen'],// WebKit['webkitRequestFullscreen', 'webkitExitFullscreen', 'webkitFullscreenElement', 'webkitFullscreenEnabled', 'webkitfullscreenchange', 'webkitfullscreenerror', '-webkit-full-screen']];const specApi = apiMap[0];let browserApi;// determine the supported set of functionsfor (let i = 0; i < apiMap.length; i++) {// check for exitFullscreen functionif (apiMap[i][1] in document) {browserApi = apiMap[i];break;}}// map the browser API names to the spec API namesif (browserApi) {for (let i = 0; i < browserApi.length; i++) {FullscreenApi[specApi[i]] = browserApi[i];}FullscreenApi.prefixed = browserApi[0] !== specApi[0];}/*** @file create-logger.js* @module create-logger*/// This is the private tracking variable for the logging history.let history = [];/*** Log messages to the console and history based on the type of message** @private* @param {string} name* The name of the console method to use.** @param {Object} log* The arguments to be passed to the matching console method.** @param {string} [styles]* styles for name*/const LogByTypeFactory = (name, log, styles) => (type, level, args) => {const lvl = log.levels[level];const lvlRegExp = new RegExp(`^(${lvl})$`);let resultName = name;if (type !== 'log') {// Add the type to the front of the message when it's not "log".args.unshift(type.toUpperCase() + ':');}if (styles) {resultName = `%c${name}`;args.unshift(styles);}// Add console prefix after adding to history.args.unshift(resultName + ':');// Add a clone of the args at this point to history.if (history) {history.push([].concat(args));// only store 1000 history entriesconst splice = history.length - 1000;history.splice(0, splice > 0 ? splice : 0);}// If there's no console then don't try to output messages, but they will// still be stored in history.if (!window.console) {return;}// Was setting these once outside of this function, but containing them// in the function makes it easier to test cases where console doesn't exist// when the module is executed.let fn = window.console[type];if (!fn && type === 'debug') {// Certain browsers don't have support for console.debug. For those, we// should default to the closest comparable log.fn = window.console.info || window.console.log;}// Bail out if there's no console or if this type is not allowed by the// current logging level.if (!fn || !lvl || !lvlRegExp.test(type)) {return;}fn[Array.isArray(args) ? 'apply' : 'call'](window.console, args);};function createLogger$1(name, delimiter = ':', styles = '') {// This is the private tracking variable for logging level.let level = 'info';// the curried logByType bound to the specific log and historylet logByType;/*** Logs plain debug messages. Similar to `console.log`.** Due to [limitations](https://github.com/jsdoc3/jsdoc/issues/955#issuecomment-313829149)* of our JSDoc template, we cannot properly document this as both a function* and a namespace, so its function signature is documented here.** #### Arguments* ##### *args* *[]** Any combination of values that could be passed to `console.log()`.** #### Return Value** `undefined`** @namespace* @param {...*} args* One or more messages or objects that should be logged.*/const log = function (...args) {logByType('log', level, args);};// This is the logByType helper that the logging methods below uselogByType = LogByTypeFactory(name, log, styles);/*** Create a new subLogger which chains the old name to the new name.** For example, doing `videojs.log.createLogger('player')` and then using that logger will log the following:* ```js* mylogger('foo');* // > VIDEOJS: player: foo* ```** @param {string} subName* The name to add call the new logger* @param {string} [subDelimiter]* Optional delimiter* @param {string} [subStyles]* Optional styles* @return {Object}*/log.createLogger = (subName, subDelimiter, subStyles) => {const resultDelimiter = subDelimiter !== undefined ? subDelimiter : delimiter;const resultStyles = subStyles !== undefined ? subStyles : styles;const resultName = `${name} ${resultDelimiter} ${subName}`;return createLogger$1(resultName, resultDelimiter, resultStyles);};/*** Create a new logger.** @param {string} newName* The name for the new logger* @param {string} [newDelimiter]* Optional delimiter* @param {string} [newStyles]* Optional styles* @return {Object}*/log.createNewLogger = (newName, newDelimiter, newStyles) => {return createLogger$1(newName, newDelimiter, newStyles);};/*** Enumeration of available logging levels, where the keys are the level names* and the values are `|`-separated strings containing logging methods allowed* in that logging level. These strings are used to create a regular expression* matching the function name being called.** Levels provided by Video.js are:** - `off`: Matches no calls. Any value that can be cast to `false` will have* this effect. The most restrictive.* - `all`: Matches only Video.js-provided functions (`debug`, `log`,* `log.warn`, and `log.error`).* - `debug`: Matches `log.debug`, `log`, `log.warn`, and `log.error` calls.* - `info` (default): Matches `log`, `log.warn`, and `log.error` calls.* - `warn`: Matches `log.warn` and `log.error` calls.* - `error`: Matches only `log.error` calls.** @type {Object}*/log.levels = {all: 'debug|log|warn|error',off: '',debug: 'debug|log|warn|error',info: 'log|warn|error',warn: 'warn|error',error: 'error',DEFAULT: level};/*** Get or set the current logging level.** If a string matching a key from {@link module:log.levels} is provided, acts* as a setter.** @param {'all'|'debug'|'info'|'warn'|'error'|'off'} [lvl]* Pass a valid level to set a new logging level.** @return {string}* The current logging level.*/log.level = lvl => {if (typeof lvl === 'string') {if (!log.levels.hasOwnProperty(lvl)) {throw new Error(`"${lvl}" in not a valid log level`);}level = lvl;}return level;};/*** Returns an array containing everything that has been logged to the history.** This array is a shallow clone of the internal history record. However, its* contents are _not_ cloned; so, mutating objects inside this array will* mutate them in history.** @return {Array}*/log.history = () => history ? [].concat(history) : [];/*** Allows you to filter the history by the given logger name** @param {string} fname* The name to filter by** @return {Array}* The filtered list to return*/log.history.filter = fname => {return (history || []).filter(historyItem => {// if the first item in each historyItem includes `fname`, then it's a matchreturn new RegExp(`.*${fname}.*`).test(historyItem[0]);});};/*** Clears the internal history tracking, but does not prevent further history* tracking.*/log.history.clear = () => {if (history) {history.length = 0;}};/*** Disable history tracking if it is currently enabled.*/log.history.disable = () => {if (history !== null) {history.length = 0;history = null;}};/*** Enable history tracking if it is currently disabled.*/log.history.enable = () => {if (history === null) {history = [];}};/*** Logs error messages. Similar to `console.error`.** @param {...*} args* One or more messages or objects that should be logged as an error*/log.error = (...args) => logByType('error', level, args);/*** Logs warning messages. Similar to `console.warn`.** @param {...*} args* One or more messages or objects that should be logged as a warning.*/log.warn = (...args) => logByType('warn', level, args);/*** Logs debug messages. Similar to `console.debug`, but may also act as a comparable* log if `console.debug` is not available** @param {...*} args* One or more messages or objects that should be logged as debug.*/log.debug = (...args) => logByType('debug', level, args);return log;}/*** @file log.js* @module log*/const log$1 = createLogger$1('VIDEOJS');const createLogger = log$1.createLogger;/*** @file obj.js* @module obj*//*** @callback obj:EachCallback** @param {*} value* The current key for the object that is being iterated over.** @param {string} key* The current key-value for object that is being iterated over*//*** @callback obj:ReduceCallback** @param {*} accum* The value that is accumulating over the reduce loop.** @param {*} value* The current key for the object that is being iterated over.** @param {string} key* The current key-value for object that is being iterated over** @return {*}* The new accumulated value.*/const toString$1 = Object.prototype.toString;/*** Get the keys of an Object** @param {Object}* The Object to get the keys from** @return {string[]}* An array of the keys from the object. Returns an empty array if the* object passed in was invalid or had no keys.** @private*/const keys = function (object) {return isObject$1(object) ? Object.keys(object) : [];};/*** Array-like iteration for objects.** @param {Object} object* The object to iterate over** @param {obj:EachCallback} fn* The callback function which is called for each key in the object.*/function each(object, fn) {keys(object).forEach(key => fn(object[key], key));}/*** Array-like reduce for objects.** @param {Object} object* The Object that you want to reduce.** @param {Function} fn* A callback function which is called for each key in the object. It* receives the accumulated value and the per-iteration value and key* as arguments.** @param {*} [initial = 0]* Starting value** @return {*}* The final accumulated value.*/function reduce(object, fn, initial = 0) {return keys(object).reduce((accum, key) => fn(accum, object[key], key), initial);}/*** Returns whether a value is an object of any kind - including DOM nodes,* arrays, regular expressions, etc. Not functions, though.** This avoids the gotcha where using `typeof` on a `null` value* results in `'object'`.** @param {Object} value* @return {boolean}*/function isObject$1(value) {return !!value && typeof value === 'object';}/*** Returns whether an object appears to be a "plain" object - that is, a* direct instance of `Object`.** @param {Object} value* @return {boolean}*/function isPlain(value) {return isObject$1(value) && toString$1.call(value) === '[object Object]' && value.constructor === Object;}/*** Merge two objects recursively.** Performs a deep merge like* {@link https://lodash.com/docs/4.17.10#merge|lodash.merge}, but only merges* plain objects (not arrays, elements, or anything else).** Non-plain object values will be copied directly from the right-most* argument.** @param {Object[]} sources* One or more objects to merge into a new object.** @return {Object}* A new object that is the merged result of all sources.*/function merge$2(...sources) {const result = {};sources.forEach(source => {if (!source) {return;}each(source, (value, key) => {if (!isPlain(value)) {result[key] = value;return;}if (!isPlain(result[key])) {result[key] = {};}result[key] = merge$2(result[key], value);});});return result;}/*** Returns an array of values for a given object** @param {Object} source - target object* @return {Array<unknown>} - object values*/function values$1(source = {}) {const result = [];for (const key in source) {if (source.hasOwnProperty(key)) {const value = source[key];result.push(value);}}return result;}/*** Object.defineProperty but "lazy", which means that the value is only set after* it is retrieved the first time, rather than being set right away.** @param {Object} obj the object to set the property on* @param {string} key the key for the property to set* @param {Function} getValue the function used to get the value when it is needed.* @param {boolean} setter whether a setter should be allowed or not*/function defineLazyProperty(obj, key, getValue, setter = true) {const set = value => Object.defineProperty(obj, key, {value,enumerable: true,writable: true});const options = {configurable: true,enumerable: true,get() {const value = getValue();set(value);return value;}};if (setter) {options.set = set;}return Object.defineProperty(obj, key, options);}var Obj = /*#__PURE__*/Object.freeze({__proto__: null,each: each,reduce: reduce,isObject: isObject$1,isPlain: isPlain,merge: merge$2,values: values$1,defineLazyProperty: defineLazyProperty});/*** @file browser.js* @module browser*//*** Whether or not this device is an iPod.** @static* @type {Boolean}*/let IS_IPOD = false;/*** The detected iOS version - or `null`.** @static* @type {string|null}*/let IOS_VERSION = null;/*** Whether or not this is an Android device.** @static* @type {Boolean}*/let IS_ANDROID = false;/*** The detected Android version - or `null` if not Android or indeterminable.** @static* @type {number|string|null}*/let ANDROID_VERSION;/*** Whether or not this is Mozilla Firefox.** @static* @type {Boolean}*/let IS_FIREFOX = false;/*** Whether or not this is Microsoft Edge.** @static* @type {Boolean}*/let IS_EDGE = false;/*** Whether or not this is any Chromium Browser** @static* @type {Boolean}*/let IS_CHROMIUM = false;/*** Whether or not this is any Chromium browser that is not Edge.** This will also be `true` for Chrome on iOS, which will have different support* as it is actually Safari under the hood.** Deprecated, as the behaviour to not match Edge was to prevent Legacy Edge's UA matching.* IS_CHROMIUM should be used instead.* "Chromium but not Edge" could be explicitly tested with IS_CHROMIUM && !IS_EDGE** @static* @deprecated* @type {Boolean}*/let IS_CHROME = false;/*** The detected Chromium version - or `null`.** @static* @type {number|null}*/let CHROMIUM_VERSION = null;/*** The detected Google Chrome version - or `null`.* This has always been the _Chromium_ version, i.e. would return on Chromium Edge.* Deprecated, use CHROMIUM_VERSION instead.** @static* @deprecated* @type {number|null}*/let CHROME_VERSION = null;/*** The detected Internet Explorer version - or `null`.** @static* @deprecated* @type {number|null}*/let IE_VERSION = null;/*** Whether or not this is desktop Safari.** @static* @type {Boolean}*/let IS_SAFARI = false;/*** Whether or not this is a Windows machine.** @static* @type {Boolean}*/let IS_WINDOWS = false;/*** Whether or not this device is an iPad.** @static* @type {Boolean}*/let IS_IPAD = false;/*** Whether or not this device is an iPhone.** @static* @type {Boolean}*/// The Facebook app's UIWebView identifies as both an iPhone and iPad, so// to identify iPhones, we need to exclude iPads.// http://artsy.github.io/blog/2012/10/18/the-perils-of-ios-user-agent-sniffing/let IS_IPHONE = false;/*** Whether or not this device is touch-enabled.** @static* @const* @type {Boolean}*/const TOUCH_ENABLED = Boolean(isReal() && ('ontouchstart' in window || window.navigator.maxTouchPoints || window.DocumentTouch && window.document instanceof window.DocumentTouch));const UAD = window.navigator && window.navigator.userAgentData;if (UAD && UAD.platform && UAD.brands) {// If userAgentData is present, use it instead of userAgent to avoid warnings// Currently only implemented on Chromium// userAgentData does not expose Android version, so ANDROID_VERSION remains `null`IS_ANDROID = UAD.platform === 'Android';IS_EDGE = Boolean(UAD.brands.find(b => b.brand === 'Microsoft Edge'));IS_CHROMIUM = Boolean(UAD.brands.find(b => b.brand === 'Chromium'));IS_CHROME = !IS_EDGE && IS_CHROMIUM;CHROMIUM_VERSION = CHROME_VERSION = (UAD.brands.find(b => b.brand === 'Chromium') || {}).version || null;IS_WINDOWS = UAD.platform === 'Windows';}// If the browser is not Chromium, either userAgentData is not present which could be an old Chromium browser,// or it's a browser that has added userAgentData since that we don't have tests for yet. In either case,// the checks need to be made agiainst the regular userAgent string.if (!IS_CHROMIUM) {const USER_AGENT = window.navigator && window.navigator.userAgent || '';IS_IPOD = /iPod/i.test(USER_AGENT);IOS_VERSION = function () {const match = USER_AGENT.match(/OS (\d+)_/i);if (match && match[1]) {return match[1];}return null;}();IS_ANDROID = /Android/i.test(USER_AGENT);ANDROID_VERSION = function () {// This matches Android Major.Minor.Patch versions// ANDROID_VERSION is Major.Minor as a Number, if Minor isn't available, then only Major is returnedconst match = USER_AGENT.match(/Android (\d+)(?:\.(\d+))?(?:\.(\d+))*/i);if (!match) {return null;}const major = match[1] && parseFloat(match[1]);const minor = match[2] && parseFloat(match[2]);if (major && minor) {return parseFloat(match[1] + '.' + match[2]);} else if (major) {return major;}return null;}();IS_FIREFOX = /Firefox/i.test(USER_AGENT);IS_EDGE = /Edg/i.test(USER_AGENT);IS_CHROMIUM = /Chrome/i.test(USER_AGENT) || /CriOS/i.test(USER_AGENT);IS_CHROME = !IS_EDGE && IS_CHROMIUM;CHROMIUM_VERSION = CHROME_VERSION = function () {const match = USER_AGENT.match(/(Chrome|CriOS)\/(\d+)/);if (match && match[2]) {return parseFloat(match[2]);}return null;}();IE_VERSION = function () {const result = /MSIE\s(\d+)\.\d/.exec(USER_AGENT);let version = result && parseFloat(result[1]);if (!version && /Trident\/7.0/i.test(USER_AGENT) && /rv:11.0/.test(USER_AGENT)) {// IE 11 has a different user agent string than other IE versionsversion = 11.0;}return version;}();IS_SAFARI = /Safari/i.test(USER_AGENT) && !IS_CHROME && !IS_ANDROID && !IS_EDGE;IS_WINDOWS = /Windows/i.test(USER_AGENT);IS_IPAD = /iPad/i.test(USER_AGENT) || IS_SAFARI && TOUCH_ENABLED && !/iPhone/i.test(USER_AGENT);IS_IPHONE = /iPhone/i.test(USER_AGENT) && !IS_IPAD;}/*** Whether or not this is an iOS device.** @static* @const* @type {Boolean}*/const IS_IOS = IS_IPHONE || IS_IPAD || IS_IPOD;/*** Whether or not this is any flavor of Safari - including iOS.** @static* @const* @type {Boolean}*/const IS_ANY_SAFARI = (IS_SAFARI || IS_IOS) && !IS_CHROME;var browser = /*#__PURE__*/Object.freeze({__proto__: null,get IS_IPOD () { return IS_IPOD; },get IOS_VERSION () { return IOS_VERSION; },get IS_ANDROID () { return IS_ANDROID; },get ANDROID_VERSION () { return ANDROID_VERSION; },get IS_FIREFOX () { return IS_FIREFOX; },get IS_EDGE () { return IS_EDGE; },get IS_CHROMIUM () { return IS_CHROMIUM; },get IS_CHROME () { return IS_CHROME; },get CHROMIUM_VERSION () { return CHROMIUM_VERSION; },get CHROME_VERSION () { return CHROME_VERSION; },get IE_VERSION () { return IE_VERSION; },get IS_SAFARI () { return IS_SAFARI; },get IS_WINDOWS () { return IS_WINDOWS; },get IS_IPAD () { return IS_IPAD; },get IS_IPHONE () { return IS_IPHONE; },TOUCH_ENABLED: TOUCH_ENABLED,IS_IOS: IS_IOS,IS_ANY_SAFARI: IS_ANY_SAFARI});/*** @file dom.js* @module dom*//*** Detect if a value is a string with any non-whitespace characters.** @private* @param {string} str* The string to check** @return {boolean}* Will be `true` if the string is non-blank, `false` otherwise.**/function isNonBlankString(str) {// we use str.trim as it will trim any whitespace characters// from the front or back of non-whitespace characters. aka// Any string that contains non-whitespace characters will// still contain them after `trim` but whitespace only strings// will have a length of 0, failing this check.return typeof str === 'string' && Boolean(str.trim());}/*** Throws an error if the passed string has whitespace. This is used by* class methods to be relatively consistent with the classList API.** @private* @param {string} str* The string to check for whitespace.** @throws {Error}* Throws an error if there is whitespace in the string.*/function throwIfWhitespace(str) {// str.indexOf instead of regex because str.indexOf is faster performance wise.if (str.indexOf(' ') >= 0) {throw new Error('class has illegal whitespace characters');}}/*** Whether the current DOM interface appears to be real (i.e. not simulated).** @return {boolean}* Will be `true` if the DOM appears to be real, `false` otherwise.*/function isReal() {// Both document and window will never be undefined thanks to `global`.return document === window.document;}/*** Determines, via duck typing, whether or not a value is a DOM element.** @param {*} value* The value to check.** @return {boolean}* Will be `true` if the value is a DOM element, `false` otherwise.*/function isEl(value) {return isObject$1(value) && value.nodeType === 1;}/*** Determines if the current DOM is embedded in an iframe.** @return {boolean}* Will be `true` if the DOM is embedded in an iframe, `false`* otherwise.*/function isInFrame() {// We need a try/catch here because Safari will throw errors when attempting// to get either `parent` or `self`try {return window.parent !== window.self;} catch (x) {return true;}}/*** Creates functions to query the DOM using a given method.** @private* @param {string} method* The method to create the query with.** @return {Function}* The query method*/function createQuerier(method) {return function (selector, context) {if (!isNonBlankString(selector)) {return document[method](null);}if (isNonBlankString(context)) {context = document.querySelector(context);}const ctx = isEl(context) ? context : document;return ctx[method] && ctx[method](selector);};}/*** Creates an element and applies properties, attributes, and inserts content.** @param {string} [tagName='div']* Name of tag to be created.** @param {Object} [properties={}]* Element properties to be applied.** @param {Object} [attributes={}]* Element attributes to be applied.** @param {ContentDescriptor} [content]* A content descriptor object.** @return {Element}* The element that was created.*/function createEl(tagName = 'div', properties = {}, attributes = {}, content) {const el = document.createElement(tagName);Object.getOwnPropertyNames(properties).forEach(function (propName) {const val = properties[propName];// Handle textContent since it's not supported everywhere and we have a// method for it.if (propName === 'textContent') {textContent(el, val);} else if (el[propName] !== val || propName === 'tabIndex') {el[propName] = val;}});Object.getOwnPropertyNames(attributes).forEach(function (attrName) {el.setAttribute(attrName, attributes[attrName]);});if (content) {appendContent(el, content);}return el;}/*** Injects text into an element, replacing any existing contents entirely.** @param {HTMLElement} el* The element to add text content into** @param {string} text* The text content to add.** @return {Element}* The element with added text content.*/function textContent(el, text) {if (typeof el.textContent === 'undefined') {el.innerText = text;} else {el.textContent = text;}return el;}/*** Insert an element as the first child node of another** @param {Element} child* Element to insert** @param {Element} parent* Element to insert child into*/function prependTo(child, parent) {if (parent.firstChild) {parent.insertBefore(child, parent.firstChild);} else {parent.appendChild(child);}}/*** Check if an element has a class name.** @param {Element} element* Element to check** @param {string} classToCheck* Class name to check for** @return {boolean}* Will be `true` if the element has a class, `false` otherwise.** @throws {Error}* Throws an error if `classToCheck` has white space.*/function hasClass(element, classToCheck) {throwIfWhitespace(classToCheck);return element.classList.contains(classToCheck);}/*** Add a class name to an element.** @param {Element} element* Element to add class name to.** @param {...string} classesToAdd* One or more class name to add.** @return {Element}* The DOM element with the added class name.*/function addClass(element, ...classesToAdd) {element.classList.add(...classesToAdd.reduce((prev, current) => prev.concat(current.split(/\s+/)), []));return element;}/*** Remove a class name from an element.** @param {Element} element* Element to remove a class name from.** @param {...string} classesToRemove* One or more class name to remove.** @return {Element}* The DOM element with class name removed.*/function removeClass(element, ...classesToRemove) {// Protect in case the player gets disposedif (!element) {log$1.warn("removeClass was called with an element that doesn't exist");return null;}element.classList.remove(...classesToRemove.reduce((prev, current) => prev.concat(current.split(/\s+/)), []));return element;}/*** The callback definition for toggleClass.** @callback module:dom~PredicateCallback* @param {Element} element* The DOM element of the Component.** @param {string} classToToggle* The `className` that wants to be toggled** @return {boolean|undefined}* If `true` is returned, the `classToToggle` will be added to the* `element`. If `false`, the `classToToggle` will be removed from* the `element`. If `undefined`, the callback will be ignored.*//*** Adds or removes a class name to/from an element depending on an optional* condition or the presence/absence of the class name.** @param {Element} element* The element to toggle a class name on.** @param {string} classToToggle* The class that should be toggled.** @param {boolean|module:dom~PredicateCallback} [predicate]* See the return value for {@link module:dom~PredicateCallback}** @return {Element}* The element with a class that has been toggled.*/function toggleClass(element, classToToggle, predicate) {if (typeof predicate === 'function') {predicate = predicate(element, classToToggle);}if (typeof predicate !== 'boolean') {predicate = undefined;}classToToggle.split(/\s+/).forEach(className => element.classList.toggle(className, predicate));return element;}/*** Apply attributes to an HTML element.** @param {Element} el* Element to add attributes to.** @param {Object} [attributes]* Attributes to be applied.*/function setAttributes(el, attributes) {Object.getOwnPropertyNames(attributes).forEach(function (attrName) {const attrValue = attributes[attrName];if (attrValue === null || typeof attrValue === 'undefined' || attrValue === false) {el.removeAttribute(attrName);} else {el.setAttribute(attrName, attrValue === true ? '' : attrValue);}});}/*** Get an element's attribute values, as defined on the HTML tag.** Attributes are not the same as properties. They're defined on the tag* or with setAttribute.** @param {Element} tag* Element from which to get tag attributes.** @return {Object}* All attributes of the element. Boolean attributes will be `true` or* `false`, others will be strings.*/function getAttributes(tag) {const obj = {};// known boolean attributes// we can check for matching boolean properties, but not all browsers// and not all tags know about these attributes, so, we still want to check them manuallyconst knownBooleans = ['autoplay', 'controls', 'playsinline', 'loop', 'muted', 'default', 'defaultMuted'];if (tag && tag.attributes && tag.attributes.length > 0) {const attrs = tag.attributes;for (let i = attrs.length - 1; i >= 0; i--) {const attrName = attrs[i].name;/** @type {boolean|string} */let attrVal = attrs[i].value;// check for known booleans// the matching element property will return a value for typeofif (knownBooleans.includes(attrName)) {// the value of an included boolean attribute is typically an empty// string ('') which would equal false if we just check for a false value.// we also don't want support bad code like autoplay='false'attrVal = attrVal !== null ? true : false;}obj[attrName] = attrVal;}}return obj;}/*** Get the value of an element's attribute.** @param {Element} el* A DOM element.** @param {string} attribute* Attribute to get the value of.** @return {string}* The value of the attribute.*/function getAttribute(el, attribute) {return el.getAttribute(attribute);}/*** Set the value of an element's attribute.** @param {Element} el* A DOM element.** @param {string} attribute* Attribute to set.** @param {string} value* Value to set the attribute to.*/function setAttribute(el, attribute, value) {el.setAttribute(attribute, value);}/*** Remove an element's attribute.** @param {Element} el* A DOM element.** @param {string} attribute* Attribute to remove.*/function removeAttribute(el, attribute) {el.removeAttribute(attribute);}/*** Attempt to block the ability to select text.*/function blockTextSelection() {document.body.focus();document.onselectstart = function () {return false;};}/*** Turn off text selection blocking.*/function unblockTextSelection() {document.onselectstart = function () {return true;};}/*** Identical to the native `getBoundingClientRect` function, but ensures that* the method is supported at all (it is in all browsers we claim to support)* and that the element is in the DOM before continuing.** This wrapper function also shims properties which are not provided by some* older browsers (namely, IE8).** Additionally, some browsers do not support adding properties to a* `ClientRect`/`DOMRect` object; so, we shallow-copy it with the standard* properties (except `x` and `y` which are not widely supported). This helps* avoid implementations where keys are non-enumerable.** @param {Element} el* Element whose `ClientRect` we want to calculate.** @return {Object|undefined}* Always returns a plain object - or `undefined` if it cannot.*/function getBoundingClientRect(el) {if (el && el.getBoundingClientRect && el.parentNode) {const rect = el.getBoundingClientRect();const result = {};['bottom', 'height', 'left', 'right', 'top', 'width'].forEach(k => {if (rect[k] !== undefined) {result[k] = rect[k];}});if (!result.height) {result.height = parseFloat(computedStyle(el, 'height'));}if (!result.width) {result.width = parseFloat(computedStyle(el, 'width'));}return result;}}/*** Represents the position of a DOM element on the page.** @typedef {Object} module:dom~Position** @property {number} left* Pixels to the left.** @property {number} top* Pixels from the top.*//*** Get the position of an element in the DOM.** Uses `getBoundingClientRect` technique from John Resig.** @see http://ejohn.org/blog/getboundingclientrect-is-awesome/** @param {Element} el* Element from which to get offset.** @return {module:dom~Position}* The position of the element that was passed in.*/function findPosition(el) {if (!el || el && !el.offsetParent) {return {left: 0,top: 0,width: 0,height: 0};}const width = el.offsetWidth;const height = el.offsetHeight;let left = 0;let top = 0;while (el.offsetParent && el !== document[FullscreenApi.fullscreenElement]) {left += el.offsetLeft;top += el.offsetTop;el = el.offsetParent;}return {left,top,width,height};}/*** Represents x and y coordinates for a DOM element or mouse pointer.** @typedef {Object} module:dom~Coordinates** @property {number} x* x coordinate in pixels** @property {number} y* y coordinate in pixels*//*** Get the pointer position within an element.** The base on the coordinates are the bottom left of the element.** @param {Element} el* Element on which to get the pointer position on.** @param {Event} event* Event object.** @return {module:dom~Coordinates}* A coordinates object corresponding to the mouse position.**/function getPointerPosition(el, event) {const translated = {x: 0,y: 0};if (IS_IOS) {let item = el;while (item && item.nodeName.toLowerCase() !== 'html') {const transform = computedStyle(item, 'transform');if (/^matrix/.test(transform)) {const values = transform.slice(7, -1).split(/,\s/).map(Number);translated.x += values[4];translated.y += values[5];} else if (/^matrix3d/.test(transform)) {const values = transform.slice(9, -1).split(/,\s/).map(Number);translated.x += values[12];translated.y += values[13];}item = item.parentNode;}}const position = {};const boxTarget = findPosition(event.target);const box = findPosition(el);const boxW = box.width;const boxH = box.height;let offsetY = event.offsetY - (box.top - boxTarget.top);let offsetX = event.offsetX - (box.left - boxTarget.left);if (event.changedTouches) {offsetX = event.changedTouches[0].pageX - box.left;offsetY = event.changedTouches[0].pageY + box.top;if (IS_IOS) {offsetX -= translated.x;offsetY -= translated.y;}}position.y = 1 - Math.max(0, Math.min(1, offsetY / boxH));position.x = Math.max(0, Math.min(1, offsetX / boxW));return position;}/*** Determines, via duck typing, whether or not a value is a text node.** @param {*} value* Check if this value is a text node.** @return {boolean}* Will be `true` if the value is a text node, `false` otherwise.*/function isTextNode$1(value) {return isObject$1(value) && value.nodeType === 3;}/*** Empties the contents of an element.** @param {Element} el* The element to empty children from** @return {Element}* The element with no children*/function emptyEl(el) {while (el.firstChild) {el.removeChild(el.firstChild);}return el;}/*** This is a mixed value that describes content to be injected into the DOM* via some method. It can be of the following types:** Type | Description* -----------|-------------* `string` | The value will be normalized into a text node.* `Element` | The value will be accepted as-is.* `Text` | A TextNode. The value will be accepted as-is.* `Array` | A one-dimensional array of strings, elements, text nodes, or functions. These functions should return a string, element, or text node (any other return value, like an array, will be ignored).* `Function` | A function, which is expected to return a string, element, text node, or array - any of the other possible values described above. This means that a content descriptor could be a function that returns an array of functions, but those second-level functions must return strings, elements, or text nodes.** @typedef {string|Element|Text|Array|Function} ContentDescriptor*//*** Normalizes content for eventual insertion into the DOM.** This allows a wide range of content definition methods, but helps protect* from falling into the trap of simply writing to `innerHTML`, which could* be an XSS concern.** The content for an element can be passed in multiple types and* combinations, whose behavior is as follows:** @param {ContentDescriptor} content* A content descriptor value.** @return {Array}* All of the content that was passed in, normalized to an array of* elements or text nodes.*/function normalizeContent(content) {// First, invoke content if it is a function. If it produces an array,// that needs to happen before normalization.if (typeof content === 'function') {content = content();}// Next up, normalize to an array, so one or many items can be normalized,// filtered, and returned.return (Array.isArray(content) ? content : [content]).map(value => {// First, invoke value if it is a function to produce a new value,// which will be subsequently normalized to a Node of some kind.if (typeof value === 'function') {value = value();}if (isEl(value) || isTextNode$1(value)) {return value;}if (typeof value === 'string' && /\S/.test(value)) {return document.createTextNode(value);}}).filter(value => value);}/*** Normalizes and appends content to an element.** @param {Element} el* Element to append normalized content to.** @param {ContentDescriptor} content* A content descriptor value.** @return {Element}* The element with appended normalized content.*/function appendContent(el, content) {normalizeContent(content).forEach(node => el.appendChild(node));return el;}/*** Normalizes and inserts content into an element; this is identical to* `appendContent()`, except it empties the element first.** @param {Element} el* Element to insert normalized content into.** @param {ContentDescriptor} content* A content descriptor value.** @return {Element}* The element with inserted normalized content.*/function insertContent(el, content) {return appendContent(emptyEl(el), content);}/*** Check if an event was a single left click.** @param {MouseEvent} event* Event object.** @return {boolean}* Will be `true` if a single left click, `false` otherwise.*/function isSingleLeftClick(event) {// Note: if you create something draggable, be sure to// call it on both `mousedown` and `mousemove` event,// otherwise `mousedown` should be enough for a buttonif (event.button === undefined && event.buttons === undefined) {// Why do we need `buttons` ?// Because, middle mouse sometimes have this:// e.button === 0 and e.buttons === 4// Furthermore, we want to prevent combination click, something like// HOLD middlemouse then left click, that would be// e.button === 0, e.buttons === 5// just `button` is not gonna work// Alright, then what this block does ?// this is for chrome `simulate mobile devices`// I want to support this as wellreturn true;}if (event.button === 0 && event.buttons === undefined) {// Touch screen, sometimes on some specific device, `buttons`// doesn't have anything (safari on ios, blackberry...)return true;}// `mouseup` event on a single left click has// `button` and `buttons` equal to 0if (event.type === 'mouseup' && event.button === 0 && event.buttons === 0) {return true;}if (event.button !== 0 || event.buttons !== 1) {// This is the reason we have those if else block above// if any special case we can catch and let it slide// we do it above, when get to here, this definitely// is-not-left-clickreturn false;}return true;}/*** Finds a single DOM element matching `selector` within the optional* `context` of another DOM element (defaulting to `document`).** @param {string} selector* A valid CSS selector, which will be passed to `querySelector`.** @param {Element|String} [context=document]* A DOM element within which to query. Can also be a selector* string in which case the first matching element will be used* as context. If missing (or no element matches selector), falls* back to `document`.** @return {Element|null}* The element that was found or null.*/const $ = createQuerier('querySelector');/*** Finds a all DOM elements matching `selector` within the optional* `context` of another DOM element (defaulting to `document`).** @param {string} selector* A valid CSS selector, which will be passed to `querySelectorAll`.** @param {Element|String} [context=document]* A DOM element within which to query. Can also be a selector* string in which case the first matching element will be used* as context. If missing (or no element matches selector), falls* back to `document`.** @return {NodeList}* A element list of elements that were found. Will be empty if none* were found.**/const $$ = createQuerier('querySelectorAll');/*** A safe getComputedStyle.** This is needed because in Firefox, if the player is loaded in an iframe with* `display:none`, then `getComputedStyle` returns `null`, so, we do a* null-check to make sure that the player doesn't break in these cases.** @param {Element} el* The element you want the computed style of** @param {string} prop* The property name you want** @see https://bugzilla.mozilla.org/show_bug.cgi?id=548397*/function computedStyle(el, prop) {if (!el || !prop) {return '';}if (typeof window.getComputedStyle === 'function') {let computedStyleValue;try {computedStyleValue = window.getComputedStyle(el);} catch (e) {return '';}return computedStyleValue ? computedStyleValue.getPropertyValue(prop) || computedStyleValue[prop] : '';}return '';}/*** Copy document style sheets to another window.** @param {Window} win* The window element you want to copy the document style sheets to.**/function copyStyleSheetsToWindow(win) {[...document.styleSheets].forEach(styleSheet => {try {const cssRules = [...styleSheet.cssRules].map(rule => rule.cssText).join('');const style = document.createElement('style');style.textContent = cssRules;win.document.head.appendChild(style);} catch (e) {const link = document.createElement('link');link.rel = 'stylesheet';link.type = styleSheet.type;// For older Safari this has to be the string; on other browsers setting the MediaList workslink.media = styleSheet.media.mediaText;link.href = styleSheet.href;win.document.head.appendChild(link);}});}var Dom = /*#__PURE__*/Object.freeze({__proto__: null,isReal: isReal,isEl: isEl,isInFrame: isInFrame,createEl: createEl,textContent: textContent,prependTo: prependTo,hasClass: hasClass,addClass: addClass,removeClass: removeClass,toggleClass: toggleClass,setAttributes: setAttributes,getAttributes: getAttributes,getAttribute: getAttribute,setAttribute: setAttribute,removeAttribute: removeAttribute,blockTextSelection: blockTextSelection,unblockTextSelection: unblockTextSelection,getBoundingClientRect: getBoundingClientRect,findPosition: findPosition,getPointerPosition: getPointerPosition,isTextNode: isTextNode$1,emptyEl: emptyEl,normalizeContent: normalizeContent,appendContent: appendContent,insertContent: insertContent,isSingleLeftClick: isSingleLeftClick,$: $,$$: $$,computedStyle: computedStyle,copyStyleSheetsToWindow: copyStyleSheetsToWindow});/*** @file setup.js - Functions for setting up a player without* user interaction based on the data-setup `attribute` of the video tag.** @module setup*/let _windowLoaded = false;let videojs$1;/*** Set up any tags that have a data-setup `attribute` when the player is started.*/const autoSetup = function () {if (videojs$1.options.autoSetup === false) {return;}const vids = Array.prototype.slice.call(document.getElementsByTagName('video'));const audios = Array.prototype.slice.call(document.getElementsByTagName('audio'));const divs = Array.prototype.slice.call(document.getElementsByTagName('video-js'));const mediaEls = vids.concat(audios, divs);// Check if any media elements existif (mediaEls && mediaEls.length > 0) {for (let i = 0, e = mediaEls.length; i < e; i++) {const mediaEl = mediaEls[i];// Check if element exists, has getAttribute func.if (mediaEl && mediaEl.getAttribute) {// Make sure this player hasn't already been set up.if (mediaEl.player === undefined) {const options = mediaEl.getAttribute('data-setup');// Check if data-setup attr exists.// We only auto-setup if they've added the data-setup attr.if (options !== null) {// Create new video.js instance.videojs$1(mediaEl);}}// If getAttribute isn't defined, we need to wait for the DOM.} else {autoSetupTimeout(1);break;}}// No videos were found, so keep looping unless page is finished loading.} else if (!_windowLoaded) {autoSetupTimeout(1);}};/*** Wait until the page is loaded before running autoSetup. This will be called in* autoSetup if `hasLoaded` returns false.** @param {number} wait* How long to wait in ms** @param {module:videojs} [vjs]* The videojs library function*/function autoSetupTimeout(wait, vjs) {// Protect against breakage in non-browser environmentsif (!isReal()) {return;}if (vjs) {videojs$1 = vjs;}window.setTimeout(autoSetup, wait);}/*** Used to set the internal tracking of window loaded state to true.** @private*/function setWindowLoaded() {_windowLoaded = true;window.removeEventListener('load', setWindowLoaded);}if (isReal()) {if (document.readyState === 'complete') {setWindowLoaded();} else {/*** Listen for the load event on window, and set _windowLoaded to true.** We use a standard event listener here to avoid incrementing the GUID* before any players are created.** @listens load*/window.addEventListener('load', setWindowLoaded);}}/*** @file stylesheet.js* @module stylesheet*//*** Create a DOM style element given a className for it.** @param {string} className* The className to add to the created style element.** @return {Element}* The element that was created.*/const createStyleElement = function (className) {const style = document.createElement('style');style.className = className;return style;};/*** Add text to a DOM element.** @param {Element} el* The Element to add text content to.** @param {string} content* The text to add to the element.*/const setTextContent = function (el, content) {if (el.styleSheet) {el.styleSheet.cssText = content;} else {el.textContent = content;}};/*** @file dom-data.js* @module dom-data*//*** Element Data Store.** Allows for binding data to an element without putting it directly on the* element. Ex. Event listeners are stored here.* (also from jsninja.com, slightly modified and updated for closure compiler)** @type {Object}* @private*/var DomData = new WeakMap();/*** @file guid.js* @module guid*/// Default value for GUIDs. This allows us to reset the GUID counter in tests.//// The initial GUID is 3 because some users have come to rely on the first// default player ID ending up as `vjs_video_3`.//// See: https://github.com/videojs/video.js/pull/6216const _initialGuid = 3;/*** Unique ID for an element or function** @type {Number}*/let _guid = _initialGuid;/*** Get a unique auto-incrementing ID by number that has not been returned before.** @return {number}* A new unique ID.*/function newGUID() {return _guid++;}/*** @file events.js. An Event System (John Resig - Secrets of a JS Ninja http://jsninja.com/)* (Original book version wasn't completely usable, so fixed some things and made Closure Compiler compatible)* This should work very similarly to jQuery's events, however it's based off the book version which isn't as* robust as jquery's, so there's probably some differences.** @file events.js* @module events*//*** Clean up the listener cache and dispatchers** @param {Element|Object} elem* Element to clean up** @param {string} type* Type of event to clean up*/function _cleanUpEvents(elem, type) {if (!DomData.has(elem)) {return;}const data = DomData.get(elem);// Remove the events of a particular type if there are none leftif (data.handlers[type].length === 0) {delete data.handlers[type];// data.handlers[type] = null;// Setting to null was causing an error with data.handlers// Remove the meta-handler from the elementif (elem.removeEventListener) {elem.removeEventListener(type, data.dispatcher, false);} else if (elem.detachEvent) {elem.detachEvent('on' + type, data.dispatcher);}}// Remove the events object if there are no types leftif (Object.getOwnPropertyNames(data.handlers).length <= 0) {delete data.handlers;delete data.dispatcher;delete data.disabled;}// Finally remove the element data if there is no data leftif (Object.getOwnPropertyNames(data).length === 0) {DomData.delete(elem);}}/*** Loops through an array of event types and calls the requested method for each type.** @param {Function} fn* The event method we want to use.** @param {Element|Object} elem* Element or object to bind listeners to** @param {string[]} types* Type of event to bind to.** @param {Function} callback* Event listener.*/function _handleMultipleEvents(fn, elem, types, callback) {types.forEach(function (type) {// Call the event method for each one of the typesfn(elem, type, callback);});}/*** Fix a native event to have standard property values** @param {Object} event* Event object to fix.** @return {Object}* Fixed event object.*/function fixEvent(event) {if (event.fixed_) {return event;}function returnTrue() {return true;}function returnFalse() {return false;}// Test if fixing up is needed// Used to check if !event.stopPropagation instead of isPropagationStopped// But native events return true for stopPropagation, but don't have// other expected methods like isPropagationStopped. Seems to be a problem// with the Javascript Ninja code. So we're just overriding all events now.if (!event || !event.isPropagationStopped || !event.isImmediatePropagationStopped) {const old = event || window.event;event = {};// Clone the old object so that we can modify the values event = {};// IE8 Doesn't like when you mess with native event properties// Firefox returns false for event.hasOwnProperty('type') and other props// which makes copying more difficult.// TODO: Probably best to create a whitelist of event propsfor (const key in old) {// Safari 6.0.3 warns you if you try to copy deprecated layerX/Y// Chrome warns you if you try to copy deprecated keyboardEvent.keyLocation// and webkitMovementX/Y// Lighthouse complains if Event.path is copiedif (key !== 'layerX' && key !== 'layerY' && key !== 'keyLocation' && key !== 'webkitMovementX' && key !== 'webkitMovementY' && key !== 'path') {// Chrome 32+ warns if you try to copy deprecated returnValue, but// we still want to if preventDefault isn't supported (IE8).if (!(key === 'returnValue' && old.preventDefault)) {event[key] = old[key];}}}// The event occurred on this elementif (!event.target) {event.target = event.srcElement || document;}// Handle which other element the event is related toif (!event.relatedTarget) {event.relatedTarget = event.fromElement === event.target ? event.toElement : event.fromElement;}// Stop the default browser actionevent.preventDefault = function () {if (old.preventDefault) {old.preventDefault();}event.returnValue = false;old.returnValue = false;event.defaultPrevented = true;};event.defaultPrevented = false;// Stop the event from bubblingevent.stopPropagation = function () {if (old.stopPropagation) {old.stopPropagation();}event.cancelBubble = true;old.cancelBubble = true;event.isPropagationStopped = returnTrue;};event.isPropagationStopped = returnFalse;// Stop the event from bubbling and executing other handlersevent.stopImmediatePropagation = function () {if (old.stopImmediatePropagation) {old.stopImmediatePropagation();}event.isImmediatePropagationStopped = returnTrue;event.stopPropagation();};event.isImmediatePropagationStopped = returnFalse;// Handle mouse positionif (event.clientX !== null && event.clientX !== undefined) {const doc = document.documentElement;const body = document.body;event.pageX = event.clientX + (doc && doc.scrollLeft || body && body.scrollLeft || 0) - (doc && doc.clientLeft || body && body.clientLeft || 0);event.pageY = event.clientY + (doc && doc.scrollTop || body && body.scrollTop || 0) - (doc && doc.clientTop || body && body.clientTop || 0);}// Handle key pressesevent.which = event.charCode || event.keyCode;// Fix button for mouse clicks:// 0 == left; 1 == middle; 2 == rightif (event.button !== null && event.button !== undefined) {// The following is disabled because it does not pass videojs-standard// and... yikes./* eslint-disable */event.button = event.button & 1 ? 0 : event.button & 4 ? 1 : event.button & 2 ? 2 : 0;/* eslint-enable */}}event.fixed_ = true;// Returns fixed-up instancereturn event;}/*** Whether passive event listeners are supported*/let _supportsPassive;const supportsPassive = function () {if (typeof _supportsPassive !== 'boolean') {_supportsPassive = false;try {const opts = Object.defineProperty({}, 'passive', {get() {_supportsPassive = true;}});window.addEventListener('test', null, opts);window.removeEventListener('test', null, opts);} catch (e) {// disregard}}return _supportsPassive;};/*** Touch events Chrome expects to be passive*/const passiveEvents = ['touchstart', 'touchmove'];/*** Add an event listener to element* It stores the handler function in a separate cache object* and adds a generic handler to the element's event,* along with a unique id (guid) to the element.** @param {Element|Object} elem* Element or object to bind listeners to** @param {string|string[]} type* Type of event to bind to.** @param {Function} fn* Event listener.*/function on(elem, type, fn) {if (Array.isArray(type)) {return _handleMultipleEvents(on, elem, type, fn);}if (!DomData.has(elem)) {DomData.set(elem, {});}const data = DomData.get(elem);// We need a place to store all our handler dataif (!data.handlers) {data.handlers = {};}if (!data.handlers[type]) {data.handlers[type] = [];}if (!fn.guid) {fn.guid = newGUID();}data.handlers[type].push(fn);if (!data.dispatcher) {data.disabled = false;data.dispatcher = function (event, hash) {if (data.disabled) {return;}event = fixEvent(event);const handlers = data.handlers[event.type];if (handlers) {// Copy handlers so if handlers are added/removed during the process it doesn't throw everything off.const handlersCopy = handlers.slice(0);for (let m = 0, n = handlersCopy.length; m < n; m++) {if (event.isImmediatePropagationStopped()) {break;} else {try {handlersCopy[m].call(elem, event, hash);} catch (e) {log$1.error(e);}}}}};}if (data.handlers[type].length === 1) {if (elem.addEventListener) {let options = false;if (supportsPassive() && passiveEvents.indexOf(type) > -1) {options = {passive: true};}elem.addEventListener(type, data.dispatcher, options);} else if (elem.attachEvent) {elem.attachEvent('on' + type, data.dispatcher);}}}/*** Removes event listeners from an element** @param {Element|Object} elem* Object to remove listeners from.** @param {string|string[]} [type]* Type of listener to remove. Don't include to remove all events from element.** @param {Function} [fn]* Specific listener to remove. Don't include to remove listeners for an event* type.*/function off(elem, type, fn) {// Don't want to add a cache object through getElData if not neededif (!DomData.has(elem)) {return;}const data = DomData.get(elem);// If no events exist, nothing to unbindif (!data.handlers) {return;}if (Array.isArray(type)) {return _handleMultipleEvents(off, elem, type, fn);}// Utility functionconst removeType = function (el, t) {data.handlers[t] = [];_cleanUpEvents(el, t);};// Are we removing all bound events?if (type === undefined) {for (const t in data.handlers) {if (Object.prototype.hasOwnProperty.call(data.handlers || {}, t)) {removeType(elem, t);}}return;}const handlers = data.handlers[type];// If no handlers exist, nothing to unbindif (!handlers) {return;}// If no listener was provided, remove all listeners for typeif (!fn) {removeType(elem, type);return;}// We're only removing a single handlerif (fn.guid) {for (let n = 0; n < handlers.length; n++) {if (handlers[n].guid === fn.guid) {handlers.splice(n--, 1);}}}_cleanUpEvents(elem, type);}/*** Trigger an event for an element** @param {Element|Object} elem* Element to trigger an event on** @param {EventTarget~Event|string} event* A string (the type) or an event object with a type attribute** @param {Object} [hash]* data hash to pass along with the event** @return {boolean|undefined}* Returns the opposite of `defaultPrevented` if default was* prevented. Otherwise, returns `undefined`*/function trigger(elem, event, hash) {// Fetches element data and a reference to the parent (for bubbling).// Don't want to add a data object to cache for every parent,// so checking hasElData first.const elemData = DomData.has(elem) ? DomData.get(elem) : {};const parent = elem.parentNode || elem.ownerDocument;// type = event.type || event,// handler;// If an event name was passed as a string, creates an event out of itif (typeof event === 'string') {event = {type: event,target: elem};} else if (!event.target) {event.target = elem;}// Normalizes the event properties.event = fixEvent(event);// If the passed element has a dispatcher, executes the established handlers.if (elemData.dispatcher) {elemData.dispatcher.call(elem, event, hash);}// Unless explicitly stopped or the event does not bubble (e.g. media events)// recursively calls this function to bubble the event up the DOM.if (parent && !event.isPropagationStopped() && event.bubbles === true) {trigger.call(null, parent, event, hash);// If at the top of the DOM, triggers the default action unless disabled.} else if (!parent && !event.defaultPrevented && event.target && event.target[event.type]) {if (!DomData.has(event.target)) {DomData.set(event.target, {});}const targetData = DomData.get(event.target);// Checks if the target has a default action for this event.if (event.target[event.type]) {// Temporarily disables event dispatching on the target as we have already executed the handler.targetData.disabled = true;// Executes the default action.if (typeof event.target[event.type] === 'function') {event.target[event.type]();}// Re-enables event dispatching.targetData.disabled = false;}}// Inform the triggerer if the default was prevented by returning falsereturn !event.defaultPrevented;}/*** Trigger a listener only once for an event.** @param {Element|Object} elem* Element or object to bind to.** @param {string|string[]} type* Name/type of event** @param {Event~EventListener} fn* Event listener function*/function one(elem, type, fn) {if (Array.isArray(type)) {return _handleMultipleEvents(one, elem, type, fn);}const func = function () {off(elem, type, func);fn.apply(this, arguments);};// copy the guid to the new function so it can removed using the original function's IDfunc.guid = fn.guid = fn.guid || newGUID();on(elem, type, func);}/*** Trigger a listener only once and then turn if off for all* configured events** @param {Element|Object} elem* Element or object to bind to.** @param {string|string[]} type* Name/type of event** @param {Event~EventListener} fn* Event listener function*/function any(elem, type, fn) {const func = function () {off(elem, type, func);fn.apply(this, arguments);};// copy the guid to the new function so it can removed using the original function's IDfunc.guid = fn.guid = fn.guid || newGUID();// multiple ons, but one off for everythingon(elem, type, func);}var Events = /*#__PURE__*/Object.freeze({__proto__: null,fixEvent: fixEvent,on: on,off: off,trigger: trigger,one: one,any: any});/*** @file fn.js* @module fn*/const UPDATE_REFRESH_INTERVAL = 30;/*** A private, internal-only function for changing the context of a function.** It also stores a unique id on the function so it can be easily removed from* events.** @private* @function* @param {*} context* The object to bind as scope.** @param {Function} fn* The function to be bound to a scope.** @param {number} [uid]* An optional unique ID for the function to be set** @return {Function}* The new function that will be bound into the context given*/const bind_ = function (context, fn, uid) {// Make sure the function has a unique IDif (!fn.guid) {fn.guid = newGUID();}// Create the new function that changes the contextconst bound = fn.bind(context);// Allow for the ability to individualize this function// Needed in the case where multiple objects might share the same prototype// IF both items add an event listener with the same function, then you try to remove just one// it will remove both because they both have the same guid.// when using this, you need to use the bind method when you remove the listener as well.// currently used in text tracksbound.guid = uid ? uid + '_' + fn.guid : fn.guid;return bound;};/*** Wraps the given function, `fn`, with a new function that only invokes `fn`* at most once per every `wait` milliseconds.** @function* @param {Function} fn* The function to be throttled.** @param {number} wait* The number of milliseconds by which to throttle.** @return {Function}*/const throttle = function (fn, wait) {let last = window.performance.now();const throttled = function (...args) {const now = window.performance.now();if (now - last >= wait) {fn(...args);last = now;}};return throttled;};/*** Creates a debounced function that delays invoking `func` until after `wait`* milliseconds have elapsed since the last time the debounced function was* invoked.** Inspired by lodash and underscore implementations.** @function* @param {Function} func* The function to wrap with debounce behavior.** @param {number} wait* The number of milliseconds to wait after the last invocation.** @param {boolean} [immediate]* Whether or not to invoke the function immediately upon creation.** @param {Object} [context=window]* The "context" in which the debounced function should debounce. For* example, if this function should be tied to a Video.js player,* the player can be passed here. Alternatively, defaults to the* global `window` object.** @return {Function}* A debounced function.*/const debounce = function (func, wait, immediate, context = window) {let timeout;const cancel = () => {context.clearTimeout(timeout);timeout = null;};/* eslint-disable consistent-this */const debounced = function () {const self = this;const args = arguments;let later = function () {timeout = null;later = null;if (!immediate) {func.apply(self, args);}};if (!timeout && immediate) {func.apply(self, args);}context.clearTimeout(timeout);timeout = context.setTimeout(later, wait);};/* eslint-enable consistent-this */debounced.cancel = cancel;return debounced;};var Fn = /*#__PURE__*/Object.freeze({__proto__: null,UPDATE_REFRESH_INTERVAL: UPDATE_REFRESH_INTERVAL,bind_: bind_,throttle: throttle,debounce: debounce});/*** @file src/js/event-target.js*/let EVENT_MAP;/*** `EventTarget` is a class that can have the same API as the DOM `EventTarget`. It* adds shorthand functions that wrap around lengthy functions. For example:* the `on` function is a wrapper around `addEventListener`.** @see [EventTarget Spec]{@link https://www.w3.org/TR/DOM-Level-2-Events/events.html#Events-EventTarget}* @class EventTarget*/class EventTarget$2 {/*** Adds an `event listener` to an instance of an `EventTarget`. An `event listener` is a* function that will get called when an event with a certain name gets triggered.** @param {string|string[]} type* An event name or an array of event names.** @param {Function} fn* The function to call with `EventTarget`s*/on(type, fn) {// Remove the addEventListener alias before calling Events.on// so we don't get into an infinite type loopconst ael = this.addEventListener;this.addEventListener = () => {};on(this, type, fn);this.addEventListener = ael;}/*** Removes an `event listener` for a specific event from an instance of `EventTarget`.* This makes it so that the `event listener` will no longer get called when the* named event happens.** @param {string|string[]} type* An event name or an array of event names.** @param {Function} fn* The function to remove.*/off(type, fn) {off(this, type, fn);}/*** This function will add an `event listener` that gets triggered only once. After the* first trigger it will get removed. This is like adding an `event listener`* with {@link EventTarget#on} that calls {@link EventTarget#off} on itself.** @param {string|string[]} type* An event name or an array of event names.** @param {Function} fn* The function to be called once for each event name.*/one(type, fn) {// Remove the addEventListener aliasing Events.on// so we don't get into an infinite type loopconst ael = this.addEventListener;this.addEventListener = () => {};one(this, type, fn);this.addEventListener = ael;}/*** This function will add an `event listener` that gets triggered only once and is* removed from all events. This is like adding an array of `event listener`s* with {@link EventTarget#on} that calls {@link EventTarget#off} on all events the* first time it is triggered.** @param {string|string[]} type* An event name or an array of event names.** @param {Function} fn* The function to be called once for each event name.*/any(type, fn) {// Remove the addEventListener aliasing Events.on// so we don't get into an infinite type loopconst ael = this.addEventListener;this.addEventListener = () => {};any(this, type, fn);this.addEventListener = ael;}/*** This function causes an event to happen. This will then cause any `event listeners`* that are waiting for that event, to get called. If there are no `event listeners`* for an event then nothing will happen.** If the name of the `Event` that is being triggered is in `EventTarget.allowedEvents_`.* Trigger will also call the `on` + `uppercaseEventName` function.** Example:* 'click' is in `EventTarget.allowedEvents_`, so, trigger will attempt to call* `onClick` if it exists.** @param {string|EventTarget~Event|Object} event* The name of the event, an `Event`, or an object with a key of type set to* an event name.*/trigger(event) {const type = event.type || event;// deprecation// In a future version we should default target to `this`// similar to how we default the target to `elem` in// `Events.trigger`. Right now the default `target` will be// `document` due to the `Event.fixEvent` call.if (typeof event === 'string') {event = {type};}event = fixEvent(event);if (this.allowedEvents_[type] && this['on' + type]) {this['on' + type](event);}trigger(this, event);}queueTrigger(event) {// only set up EVENT_MAP if it'll be usedif (!EVENT_MAP) {EVENT_MAP = new Map();}const type = event.type || event;let map = EVENT_MAP.get(this);if (!map) {map = new Map();EVENT_MAP.set(this, map);}const oldTimeout = map.get(type);map.delete(type);window.clearTimeout(oldTimeout);const timeout = window.setTimeout(() => {map.delete(type);// if we cleared out all timeouts for the current target, delete its mapif (map.size === 0) {map = null;EVENT_MAP.delete(this);}this.trigger(event);}, 0);map.set(type, timeout);}}/*** A Custom DOM event.** @typedef {CustomEvent} Event* @see [Properties]{@link https://developer.mozilla.org/en-US/docs/Web/API/CustomEvent}*//*** All event listeners should follow the following format.** @callback EventListener* @this {EventTarget}** @param {Event} event* the event that triggered this function** @param {Object} [hash]* hash of data sent during the event*//*** An object containing event names as keys and booleans as values.** > NOTE: If an event name is set to a true value here {@link EventTarget#trigger}* will have extra functionality. See that function for more information.** @property EventTarget.prototype.allowedEvents_* @protected*/EventTarget$2.prototype.allowedEvents_ = {};/*** An alias of {@link EventTarget#on}. Allows `EventTarget` to mimic* the standard DOM API.** @function* @see {@link EventTarget#on}*/EventTarget$2.prototype.addEventListener = EventTarget$2.prototype.on;/*** An alias of {@link EventTarget#off}. Allows `EventTarget` to mimic* the standard DOM API.** @function* @see {@link EventTarget#off}*/EventTarget$2.prototype.removeEventListener = EventTarget$2.prototype.off;/*** An alias of {@link EventTarget#trigger}. Allows `EventTarget` to mimic* the standard DOM API.** @function* @see {@link EventTarget#trigger}*/EventTarget$2.prototype.dispatchEvent = EventTarget$2.prototype.trigger;/*** @file mixins/evented.js* @module evented*/const objName = obj => {if (typeof obj.name === 'function') {return obj.name();}if (typeof obj.name === 'string') {return obj.name;}if (obj.name_) {return obj.name_;}if (obj.constructor && obj.constructor.name) {return obj.constructor.name;}return typeof obj;};/*** Returns whether or not an object has had the evented mixin applied.** @param {Object} object* An object to test.** @return {boolean}* Whether or not the object appears to be evented.*/const isEvented = object => object instanceof EventTarget$2 || !!object.eventBusEl_ && ['on', 'one', 'off', 'trigger'].every(k => typeof object[k] === 'function');/*** Adds a callback to run after the evented mixin applied.** @param {Object} target* An object to Add* @param {Function} callback* The callback to run.*/const addEventedCallback = (target, callback) => {if (isEvented(target)) {callback();} else {if (!target.eventedCallbacks) {target.eventedCallbacks = [];}target.eventedCallbacks.push(callback);}};/*** Whether a value is a valid event type - non-empty string or array.** @private* @param {string|Array} type* The type value to test.** @return {boolean}* Whether or not the type is a valid event type.*/const isValidEventType = type =>// The regex here verifies that the `type` contains at least one non-// whitespace character.typeof type === 'string' && /\S/.test(type) || Array.isArray(type) && !!type.length;/*** Validates a value to determine if it is a valid event target. Throws if not.** @private* @throws {Error}* If the target does not appear to be a valid event target.** @param {Object} target* The object to test.** @param {Object} obj* The evented object we are validating for** @param {string} fnName* The name of the evented mixin function that called this.*/const validateTarget = (target, obj, fnName) => {if (!target || !target.nodeName && !isEvented(target)) {throw new Error(`Invalid target for ${objName(obj)}#${fnName}; must be a DOM node or evented object.`);}};/*** Validates a value to determine if it is a valid event target. Throws if not.** @private* @throws {Error}* If the type does not appear to be a valid event type.** @param {string|Array} type* The type to test.** @param {Object} obj* The evented object we are validating for** @param {string} fnName* The name of the evented mixin function that called this.*/const validateEventType = (type, obj, fnName) => {if (!isValidEventType(type)) {throw new Error(`Invalid event type for ${objName(obj)}#${fnName}; must be a non-empty string or array.`);}};/*** Validates a value to determine if it is a valid listener. Throws if not.** @private* @throws {Error}* If the listener is not a function.** @param {Function} listener* The listener to test.** @param {Object} obj* The evented object we are validating for** @param {string} fnName* The name of the evented mixin function that called this.*/const validateListener = (listener, obj, fnName) => {if (typeof listener !== 'function') {throw new Error(`Invalid listener for ${objName(obj)}#${fnName}; must be a function.`);}};/*** Takes an array of arguments given to `on()` or `one()`, validates them, and* normalizes them into an object.** @private* @param {Object} self* The evented object on which `on()` or `one()` was called. This* object will be bound as the `this` value for the listener.** @param {Array} args* An array of arguments passed to `on()` or `one()`.** @param {string} fnName* The name of the evented mixin function that called this.** @return {Object}* An object containing useful values for `on()` or `one()` calls.*/const normalizeListenArgs = (self, args, fnName) => {// If the number of arguments is less than 3, the target is always the// evented object itself.const isTargetingSelf = args.length < 3 || args[0] === self || args[0] === self.eventBusEl_;let target;let type;let listener;if (isTargetingSelf) {target = self.eventBusEl_;// Deal with cases where we got 3 arguments, but we are still listening to// the evented object itself.if (args.length >= 3) {args.shift();}[type, listener] = args;} else {[target, type, listener] = args;}validateTarget(target, self, fnName);validateEventType(type, self, fnName);validateListener(listener, self, fnName);listener = bind_(self, listener);return {isTargetingSelf,target,type,listener};};/*** Adds the listener to the event type(s) on the target, normalizing for* the type of target.** @private* @param {Element|Object} target* A DOM node or evented object.** @param {string} method* The event binding method to use ("on" or "one").** @param {string|Array} type* One or more event type(s).** @param {Function} listener* A listener function.*/const listen = (target, method, type, listener) => {validateTarget(target, target, method);if (target.nodeName) {Events[method](target, type, listener);} else {target[method](type, listener);}};/*** Contains methods that provide event capabilities to an object which is passed* to {@link module:evented|evented}.** @mixin EventedMixin*/const EventedMixin = {/*** Add a listener to an event (or events) on this object or another evented* object.** @param {string|Array|Element|Object} targetOrType* If this is a string or array, it represents the event type(s)* that will trigger the listener.** Another evented object can be passed here instead, which will* cause the listener to listen for events on _that_ object.** In either case, the listener's `this` value will be bound to* this object.** @param {string|Array|Function} typeOrListener* If the first argument was a string or array, this should be the* listener function. Otherwise, this is a string or array of event* type(s).** @param {Function} [listener]* If the first argument was another evented object, this will be* the listener function.*/on(...args) {const {isTargetingSelf,target,type,listener} = normalizeListenArgs(this, args, 'on');listen(target, 'on', type, listener);// If this object is listening to another evented object.if (!isTargetingSelf) {// If this object is disposed, remove the listener.const removeListenerOnDispose = () => this.off(target, type, listener);// Use the same function ID as the listener so we can remove it later it// using the ID of the original listener.removeListenerOnDispose.guid = listener.guid;// Add a listener to the target's dispose event as well. This ensures// that if the target is disposed BEFORE this object, we remove the// removal listener that was just added. Otherwise, we create a memory leak.const removeRemoverOnTargetDispose = () => this.off('dispose', removeListenerOnDispose);// Use the same function ID as the listener so we can remove it later// it using the ID of the original listener.removeRemoverOnTargetDispose.guid = listener.guid;listen(this, 'on', 'dispose', removeListenerOnDispose);listen(target, 'on', 'dispose', removeRemoverOnTargetDispose);}},/*** Add a listener to an event (or events) on this object or another evented* object. The listener will be called once per event and then removed.** @param {string|Array|Element|Object} targetOrType* If this is a string or array, it represents the event type(s)* that will trigger the listener.** Another evented object can be passed here instead, which will* cause the listener to listen for events on _that_ object.** In either case, the listener's `this` value will be bound to* this object.** @param {string|Array|Function} typeOrListener* If the first argument was a string or array, this should be the* listener function. Otherwise, this is a string or array of event* type(s).** @param {Function} [listener]* If the first argument was another evented object, this will be* the listener function.*/one(...args) {const {isTargetingSelf,target,type,listener} = normalizeListenArgs(this, args, 'one');// Targeting this evented object.if (isTargetingSelf) {listen(target, 'one', type, listener);// Targeting another evented object.} else {// TODO: This wrapper is incorrect! It should only// remove the wrapper for the event type that called it.// Instead all listeners are removed on the first trigger!// see https://github.com/videojs/video.js/issues/5962const wrapper = (...largs) => {this.off(target, type, wrapper);listener.apply(null, largs);};// Use the same function ID as the listener so we can remove it later// it using the ID of the original listener.wrapper.guid = listener.guid;listen(target, 'one', type, wrapper);}},/*** Add a listener to an event (or events) on this object or another evented* object. The listener will only be called once for the first event that is triggered* then removed.** @param {string|Array|Element|Object} targetOrType* If this is a string or array, it represents the event type(s)* that will trigger the listener.** Another evented object can be passed here instead, which will* cause the listener to listen for events on _that_ object.** In either case, the listener's `this` value will be bound to* this object.** @param {string|Array|Function} typeOrListener* If the first argument was a string or array, this should be the* listener function. Otherwise, this is a string or array of event* type(s).** @param {Function} [listener]* If the first argument was another evented object, this will be* the listener function.*/any(...args) {const {isTargetingSelf,target,type,listener} = normalizeListenArgs(this, args, 'any');// Targeting this evented object.if (isTargetingSelf) {listen(target, 'any', type, listener);// Targeting another evented object.} else {const wrapper = (...largs) => {this.off(target, type, wrapper);listener.apply(null, largs);};// Use the same function ID as the listener so we can remove it later// it using the ID of the original listener.wrapper.guid = listener.guid;listen(target, 'any', type, wrapper);}},/*** Removes listener(s) from event(s) on an evented object.** @param {string|Array|Element|Object} [targetOrType]* If this is a string or array, it represents the event type(s).** Another evented object can be passed here instead, in which case* ALL 3 arguments are _required_.** @param {string|Array|Function} [typeOrListener]* If the first argument was a string or array, this may be the* listener function. Otherwise, this is a string or array of event* type(s).** @param {Function} [listener]* If the first argument was another evented object, this will be* the listener function; otherwise, _all_ listeners bound to the* event type(s) will be removed.*/off(targetOrType, typeOrListener, listener) {// Targeting this evented object.if (!targetOrType || isValidEventType(targetOrType)) {off(this.eventBusEl_, targetOrType, typeOrListener);// Targeting another evented object.} else {const target = targetOrType;const type = typeOrListener;// Fail fast and in a meaningful way!validateTarget(target, this, 'off');validateEventType(type, this, 'off');validateListener(listener, this, 'off');// Ensure there's at least a guid, even if the function hasn't been usedlistener = bind_(this, listener);// Remove the dispose listener on this evented object, which was given// the same guid as the event listener in on().this.off('dispose', listener);if (target.nodeName) {off(target, type, listener);off(target, 'dispose', listener);} else if (isEvented(target)) {target.off(type, listener);target.off('dispose', listener);}}},/*** Fire an event on this evented object, causing its listeners to be called.** @param {string|Object} event* An event type or an object with a type property.** @param {Object} [hash]* An additional object to pass along to listeners.** @return {boolean}* Whether or not the default behavior was prevented.*/trigger(event, hash) {validateTarget(this.eventBusEl_, this, 'trigger');const type = event && typeof event !== 'string' ? event.type : event;if (!isValidEventType(type)) {throw new Error(`Invalid event type for ${objName(this)}#trigger; ` + 'must be a non-empty string or object with a type key that has a non-empty value.');}return trigger(this.eventBusEl_, event, hash);}};/*** Applies {@link module:evented~EventedMixin|EventedMixin} to a target object.** @param {Object} target* The object to which to add event methods.** @param {Object} [options={}]* Options for customizing the mixin behavior.** @param {string} [options.eventBusKey]* By default, adds a `eventBusEl_` DOM element to the target object,* which is used as an event bus. If the target object already has a* DOM element that should be used, pass its key here.** @return {Object}* The target object.*/function evented(target, options = {}) {const {eventBusKey} = options;// Set or create the eventBusEl_.if (eventBusKey) {if (!target[eventBusKey].nodeName) {throw new Error(`The eventBusKey "${eventBusKey}" does not refer to an element.`);}target.eventBusEl_ = target[eventBusKey];} else {target.eventBusEl_ = createEl('span', {className: 'vjs-event-bus'});}Object.assign(target, EventedMixin);if (target.eventedCallbacks) {target.eventedCallbacks.forEach(callback => {callback();});}// When any evented object is disposed, it removes all its listeners.target.on('dispose', () => {target.off();[target, target.el_, target.eventBusEl_].forEach(function (val) {if (val && DomData.has(val)) {DomData.delete(val);}});window.setTimeout(() => {target.eventBusEl_ = null;}, 0);});return target;}/*** @file mixins/stateful.js* @module stateful*//*** Contains methods that provide statefulness to an object which is passed* to {@link module:stateful}.** @mixin StatefulMixin*/const StatefulMixin = {/*** A hash containing arbitrary keys and values representing the state of* the object.** @type {Object}*/state: {},/*** Set the state of an object by mutating its* {@link module:stateful~StatefulMixin.state|state} object in place.** @fires module:stateful~StatefulMixin#statechanged* @param {Object|Function} stateUpdates* A new set of properties to shallow-merge into the plugin state.* Can be a plain object or a function returning a plain object.** @return {Object|undefined}* An object containing changes that occurred. If no changes* occurred, returns `undefined`.*/setState(stateUpdates) {// Support providing the `stateUpdates` state as a function.if (typeof stateUpdates === 'function') {stateUpdates = stateUpdates();}let changes;each(stateUpdates, (value, key) => {// Record the change if the value is different from what's in the// current state.if (this.state[key] !== value) {changes = changes || {};changes[key] = {from: this.state[key],to: value};}this.state[key] = value;});// Only trigger "statechange" if there were changes AND we have a trigger// function. This allows us to not require that the target object be an// evented object.if (changes && isEvented(this)) {/*** An event triggered on an object that is both* {@link module:stateful|stateful} and {@link module:evented|evented}* indicating that its state has changed.** @event module:stateful~StatefulMixin#statechanged* @type {Object}* @property {Object} changes* A hash containing the properties that were changed and* the values they were changed `from` and `to`.*/this.trigger({changes,type: 'statechanged'});}return changes;}};/*** Applies {@link module:stateful~StatefulMixin|StatefulMixin} to a target* object.** If the target object is {@link module:evented|evented} and has a* `handleStateChanged` method, that method will be automatically bound to the* `statechanged` event on itself.** @param {Object} target* The object to be made stateful.** @param {Object} [defaultState]* A default set of properties to populate the newly-stateful object's* `state` property.** @return {Object}* Returns the `target`.*/function stateful(target, defaultState) {Object.assign(target, StatefulMixin);// This happens after the mixing-in because we need to replace the `state`// added in that step.target.state = Object.assign({}, target.state, defaultState);// Auto-bind the `handleStateChanged` method of the target object if it exists.if (typeof target.handleStateChanged === 'function' && isEvented(target)) {target.on('statechanged', target.handleStateChanged);}return target;}/*** @file str.js* @module to-lower-case*//*** Lowercase the first letter of a string.** @param {string} string* String to be lowercased** @return {string}* The string with a lowercased first letter*/const toLowerCase = function (string) {if (typeof string !== 'string') {return string;}return string.replace(/./, w => w.toLowerCase());};/*** Uppercase the first letter of a string.** @param {string} string* String to be uppercased** @return {string}* The string with an uppercased first letter*/const toTitleCase$1 = function (string) {if (typeof string !== 'string') {return string;}return string.replace(/./, w => w.toUpperCase());};/*** Compares the TitleCase versions of the two strings for equality.** @param {string} str1* The first string to compare** @param {string} str2* The second string to compare** @return {boolean}* Whether the TitleCase versions of the strings are equal*/const titleCaseEquals = function (str1, str2) {return toTitleCase$1(str1) === toTitleCase$1(str2);};var Str = /*#__PURE__*/Object.freeze({__proto__: null,toLowerCase: toLowerCase,toTitleCase: toTitleCase$1,titleCaseEquals: titleCaseEquals});var commonjsGlobal = typeof globalThis !== 'undefined' ? globalThis : typeof window !== 'undefined' ? window : typeof global !== 'undefined' ? global : typeof self !== 'undefined' ? self : {};function unwrapExports (x) {return x && x.__esModule && Object.prototype.hasOwnProperty.call(x, 'default') ? x['default'] : x;}function createCommonjsModule(fn, module) {return module = { exports: {} }, fn(module, module.exports), module.exports;}var keycode = createCommonjsModule(function (module, exports) {// Source: http://jsfiddle.net/vWx8V/// http://stackoverflow.com/questions/5603195/full-list-of-javascript-keycodes/*** Conenience method returns corresponding value for given keyName or keyCode.** @param {Mixed} keyCode {Number} or keyName {String}* @return {Mixed}* @api public*/function keyCode(searchInput) {// Keyboard Eventsif (searchInput && 'object' === typeof searchInput) {var hasKeyCode = searchInput.which || searchInput.keyCode || searchInput.charCode;if (hasKeyCode) searchInput = hasKeyCode;}// Numbersif ('number' === typeof searchInput) return names[searchInput];// Everything else (cast to string)var search = String(searchInput);// check codesvar foundNamedKey = codes[search.toLowerCase()];if (foundNamedKey) return foundNamedKey;// check aliasesvar foundNamedKey = aliases[search.toLowerCase()];if (foundNamedKey) return foundNamedKey;// weird character?if (search.length === 1) return search.charCodeAt(0);return undefined;}/*** Compares a keyboard event with a given keyCode or keyName.** @param {Event} event Keyboard event that should be tested* @param {Mixed} keyCode {Number} or keyName {String}* @return {Boolean}* @api public*/keyCode.isEventKey = function isEventKey(event, nameOrCode) {if (event && 'object' === typeof event) {var keyCode = event.which || event.keyCode || event.charCode;if (keyCode === null || keyCode === undefined) {return false;}if (typeof nameOrCode === 'string') {// check codesvar foundNamedKey = codes[nameOrCode.toLowerCase()];if (foundNamedKey) {return foundNamedKey === keyCode;}// check aliasesvar foundNamedKey = aliases[nameOrCode.toLowerCase()];if (foundNamedKey) {return foundNamedKey === keyCode;}} else if (typeof nameOrCode === 'number') {return nameOrCode === keyCode;}return false;}};exports = module.exports = keyCode;/*** Get by name** exports.code['enter'] // => 13*/var codes = exports.code = exports.codes = {'backspace': 8,'tab': 9,'enter': 13,'shift': 16,'ctrl': 17,'alt': 18,'pause/break': 19,'caps lock': 20,'esc': 27,'space': 32,'page up': 33,'page down': 34,'end': 35,'home': 36,'left': 37,'up': 38,'right': 39,'down': 40,'insert': 45,'delete': 46,'command': 91,'left command': 91,'right command': 93,'numpad *': 106,'numpad +': 107,'numpad -': 109,'numpad .': 110,'numpad /': 111,'num lock': 144,'scroll lock': 145,'my computer': 182,'my calculator': 183,';': 186,'=': 187,',': 188,'-': 189,'.': 190,'/': 191,'`': 192,'[': 219,'\\': 220,']': 221,"'": 222};// Helper aliasesvar aliases = exports.aliases = {'windows': 91,'⇧': 16,'⌥': 18,'⌃': 17,'⌘': 91,'ctl': 17,'control': 17,'option': 18,'pause': 19,'break': 19,'caps': 20,'return': 13,'escape': 27,'spc': 32,'spacebar': 32,'pgup': 33,'pgdn': 34,'ins': 45,'del': 46,'cmd': 91};/*!* Programatically add the following*/// lower case charsfor (i = 97; i < 123; i++) codes[String.fromCharCode(i)] = i - 32;// numbersfor (var i = 48; i < 58; i++) codes[i - 48] = i;// function keysfor (i = 1; i < 13; i++) codes['f' + i] = i + 111;// numpad keysfor (i = 0; i < 10; i++) codes['numpad ' + i] = i + 96;/*** Get by code** exports.name[13] // => 'Enter'*/var names = exports.names = exports.title = {}; // title for backward compat// Create reverse mappingfor (i in codes) names[codes[i]] = i;// Add aliasesfor (var alias in aliases) {codes[alias] = aliases[alias];}});keycode.code;keycode.codes;keycode.aliases;keycode.names;keycode.title;/*** Player Component - Base class for all UI objects** @file component.js*//*** Base class for all UI Components.* Components are UI objects which represent both a javascript object and an element* in the DOM. They can be children of other components, and can have* children themselves.** Components can also use methods from {@link EventTarget}*/class Component$1 {/*** A callback that is called when a component is ready. Does not have any* parameters and any callback value will be ignored.** @callback ReadyCallback* @this Component*//*** Creates an instance of this class.** @param { import('./player').default } player* The `Player` that this class should be attached to.** @param {Object} [options]* The key/value store of component options.** @param {Object[]} [options.children]* An array of children objects to initialize this component with. Children objects have* a name property that will be used if more than one component of the same type needs to be* added.** @param {string} [options.className]* A class or space separated list of classes to add the component** @param {ReadyCallback} [ready]* Function that gets called when the `Component` is ready.*/constructor(player, options, ready) {// The component might be the player itself and we can't pass `this` to superif (!player && this.play) {this.player_ = player = this; // eslint-disable-line} else {this.player_ = player;}this.isDisposed_ = false;// Hold the reference to the parent component via `addChild` methodthis.parentComponent_ = null;// Make a copy of prototype.options_ to protect against overriding defaultsthis.options_ = merge$2({}, this.options_);// Updated options with supplied optionsoptions = this.options_ = merge$2(this.options_, options);// Get ID from options or options element if one is suppliedthis.id_ = options.id || options.el && options.el.id;// If there was no ID from the options, generate oneif (!this.id_) {// Don't require the player ID function in the case of mock playersconst id = player && player.id && player.id() || 'no_player';this.id_ = `${id}_component_${newGUID()}`;}this.name_ = options.name || null;// Create element if one wasn't provided in optionsif (options.el) {this.el_ = options.el;} else if (options.createEl !== false) {this.el_ = this.createEl();}if (options.className && this.el_) {options.className.split(' ').forEach(c => this.addClass(c));}// Remove the placeholder event methods. If the component is evented, the// real methods are added next['on', 'off', 'one', 'any', 'trigger'].forEach(fn => {this[fn] = undefined;});// if evented is anything except false, we want to mixin in eventedif (options.evented !== false) {// Make this an evented object and use `el_`, if available, as its event busevented(this, {eventBusKey: this.el_ ? 'el_' : null});this.handleLanguagechange = this.handleLanguagechange.bind(this);this.on(this.player_, 'languagechange', this.handleLanguagechange);}stateful(this, this.constructor.defaultState);this.children_ = [];this.childIndex_ = {};this.childNameIndex_ = {};this.setTimeoutIds_ = new Set();this.setIntervalIds_ = new Set();this.rafIds_ = new Set();this.namedRafs_ = new Map();this.clearingTimersOnDispose_ = false;// Add any child components in optionsif (options.initChildren !== false) {this.initChildren();}// Don't want to trigger ready here or it will go before init is actually// finished for all children that run this constructorthis.ready(ready);if (options.reportTouchActivity !== false) {this.enableTouchActivity();}}// `on`, `off`, `one`, `any` and `trigger` are here so tsc includes them in definitions.// They are replaced or removed in the constructor/*** Adds an `event listener` to an instance of an `EventTarget`. An `event listener` is a* function that will get called when an event with a certain name gets triggered.** @param {string|string[]} type* An event name or an array of event names.** @param {Function} fn* The function to call with `EventTarget`s*/on(type, fn) {}/*** Removes an `event listener` for a specific event from an instance of `EventTarget`.* This makes it so that the `event listener` will no longer get called when the* named event happens.** @param {string|string[]} type* An event name or an array of event names.** @param {Function} [fn]* The function to remove. If not specified, all listeners managed by Video.js will be removed.*/off(type, fn) {}/*** This function will add an `event listener` that gets triggered only once. After the* first trigger it will get removed. This is like adding an `event listener`* with {@link EventTarget#on} that calls {@link EventTarget#off} on itself.** @param {string|string[]} type* An event name or an array of event names.** @param {Function} fn* The function to be called once for each event name.*/one(type, fn) {}/*** This function will add an `event listener` that gets triggered only once and is* removed from all events. This is like adding an array of `event listener`s* with {@link EventTarget#on} that calls {@link EventTarget#off} on all events the* first time it is triggered.** @param {string|string[]} type* An event name or an array of event names.** @param {Function} fn* The function to be called once for each event name.*/any(type, fn) {}/*** This function causes an event to happen. This will then cause any `event listeners`* that are waiting for that event, to get called. If there are no `event listeners`* for an event then nothing will happen.** If the name of the `Event` that is being triggered is in `EventTarget.allowedEvents_`.* Trigger will also call the `on` + `uppercaseEventName` function.** Example:* 'click' is in `EventTarget.allowedEvents_`, so, trigger will attempt to call* `onClick` if it exists.** @param {string|Event|Object} event* The name of the event, an `Event`, or an object with a key of type set to* an event name.** @param {Object} [hash]* Optionally extra argument to pass through to an event listener*/trigger(event, hash) {}/*** Dispose of the `Component` and all child components.** @fires Component#dispose** @param {Object} options* @param {Element} options.originalEl element with which to replace player element*/dispose(options = {}) {// Bail out if the component has already been disposed.if (this.isDisposed_) {return;}if (this.readyQueue_) {this.readyQueue_.length = 0;}/*** Triggered when a `Component` is disposed.** @event Component#dispose* @type {Event}** @property {boolean} [bubbles=false]* set to false so that the dispose event does not* bubble up*/this.trigger({type: 'dispose',bubbles: false});this.isDisposed_ = true;// Dispose all children.if (this.children_) {for (let i = this.children_.length - 1; i >= 0; i--) {if (this.children_[i].dispose) {this.children_[i].dispose();}}}// Delete child referencesthis.children_ = null;this.childIndex_ = null;this.childNameIndex_ = null;this.parentComponent_ = null;if (this.el_) {// Remove element from DOMif (this.el_.parentNode) {if (options.restoreEl) {this.el_.parentNode.replaceChild(options.restoreEl, this.el_);} else {this.el_.parentNode.removeChild(this.el_);}}this.el_ = null;}// remove reference to the player after disposing of the elementthis.player_ = null;}/*** Determine whether or not this component has been disposed.** @return {boolean}* If the component has been disposed, will be `true`. Otherwise, `false`.*/isDisposed() {return Boolean(this.isDisposed_);}/*** Return the {@link Player} that the `Component` has attached to.** @return { import('./player').default }* The player that this `Component` has attached to.*/player() {return this.player_;}/*** Deep merge of options objects with new options.* > Note: When both `obj` and `options` contain properties whose values are objects.* The two properties get merged using {@link module:obj.merge}** @param {Object} obj* The object that contains new options.** @return {Object}* A new object of `this.options_` and `obj` merged together.*/options(obj) {if (!obj) {return this.options_;}this.options_ = merge$2(this.options_, obj);return this.options_;}/*** Get the `Component`s DOM element** @return {Element}* The DOM element for this `Component`.*/el() {return this.el_;}/*** Create the `Component`s DOM element.** @param {string} [tagName]* Element's DOM node type. e.g. 'div'** @param {Object} [properties]* An object of properties that should be set.** @param {Object} [attributes]* An object of attributes that should be set.** @return {Element}* The element that gets created.*/createEl(tagName, properties, attributes) {return createEl(tagName, properties, attributes);}/*** Localize a string given the string in english.** If tokens are provided, it'll try and run a simple token replacement on the provided string.* The tokens it looks for look like `{1}` with the index being 1-indexed into the tokens array.** If a `defaultValue` is provided, it'll use that over `string`,* if a value isn't found in provided language files.* This is useful if you want to have a descriptive key for token replacement* but have a succinct localized string and not require `en.json` to be included.** Currently, it is used for the progress bar timing.* ```js* {* "progress bar timing: currentTime={1} duration={2}": "{1} of {2}"* }* ```* It is then used like so:* ```js* this.localize('progress bar timing: currentTime={1} duration{2}',* [this.player_.currentTime(), this.player_.duration()],* '{1} of {2}');* ```** Which outputs something like: `01:23 of 24:56`.*** @param {string} string* The string to localize and the key to lookup in the language files.* @param {string[]} [tokens]* If the current item has token replacements, provide the tokens here.* @param {string} [defaultValue]* Defaults to `string`. Can be a default value to use for token replacement* if the lookup key is needed to be separate.** @return {string}* The localized string or if no localization exists the english string.*/localize(string, tokens, defaultValue = string) {const code = this.player_.language && this.player_.language();const languages = this.player_.languages && this.player_.languages();const language = languages && languages[code];const primaryCode = code && code.split('-')[0];const primaryLang = languages && languages[primaryCode];let localizedString = defaultValue;if (language && language[string]) {localizedString = language[string];} else if (primaryLang && primaryLang[string]) {localizedString = primaryLang[string];}if (tokens) {localizedString = localizedString.replace(/\{(\d+)\}/g, function (match, index) {const value = tokens[index - 1];let ret = value;if (typeof value === 'undefined') {ret = match;}return ret;});}return localizedString;}/*** Handles language change for the player in components. Should be overridden by sub-components.** @abstract*/handleLanguagechange() {}/*** Return the `Component`s DOM element. This is where children get inserted.* This will usually be the the same as the element returned in {@link Component#el}.** @return {Element}* The content element for this `Component`.*/contentEl() {return this.contentEl_ || this.el_;}/*** Get this `Component`s ID** @return {string}* The id of this `Component`*/id() {return this.id_;}/*** Get the `Component`s name. The name gets used to reference the `Component`* and is set during registration.** @return {string}* The name of this `Component`.*/name() {return this.name_;}/*** Get an array of all child components** @return {Array}* The children*/children() {return this.children_;}/*** Returns the child `Component` with the given `id`.** @param {string} id* The id of the child `Component` to get.** @return {Component|undefined}* The child `Component` with the given `id` or undefined.*/getChildById(id) {return this.childIndex_[id];}/*** Returns the child `Component` with the given `name`.** @param {string} name* The name of the child `Component` to get.** @return {Component|undefined}* The child `Component` with the given `name` or undefined.*/getChild(name) {if (!name) {return;}return this.childNameIndex_[name];}/*** Returns the descendant `Component` following the givent* descendant `names`. For instance ['foo', 'bar', 'baz'] would* try to get 'foo' on the current component, 'bar' on the 'foo'* component and 'baz' on the 'bar' component and return undefined* if any of those don't exist.** @param {...string[]|...string} names* The name of the child `Component` to get.** @return {Component|undefined}* The descendant `Component` following the given descendant* `names` or undefined.*/getDescendant(...names) {// flatten array argument into the main arraynames = names.reduce((acc, n) => acc.concat(n), []);let currentChild = this;for (let i = 0; i < names.length; i++) {currentChild = currentChild.getChild(names[i]);if (!currentChild || !currentChild.getChild) {return;}}return currentChild;}/*** Adds an SVG icon element to another element or component.** @param {string} iconName* The name of icon. A list of all the icon names can be found at 'sandbox/svg-icons.html'** @param {Element} [el=this.el()]* Element to set the title on. Defaults to the current Component's element.** @return {Element}* The newly created icon element.*/setIcon(iconName, el = this.el()) {// TODO: In v9 of video.js, we will want to remove font icons entirely.// This means this check, as well as the others throughout the code, and// the unecessary CSS for font icons, will need to be removed.// See https://github.com/videojs/video.js/pull/8260 as to which components// need updating.if (!this.player_.options_.experimentalSvgIcons) {return;}const xmlnsURL = 'http://www.w3.org/2000/svg';// The below creates an element in the format of:// <span><svg><use>....</use></svg></span>const iconContainer = createEl('span', {className: 'vjs-icon-placeholder vjs-svg-icon'}, {'aria-hidden': 'true'});const svgEl = document.createElementNS(xmlnsURL, 'svg');svgEl.setAttributeNS(null, 'viewBox', '0 0 512 512');const useEl = document.createElementNS(xmlnsURL, 'use');svgEl.appendChild(useEl);useEl.setAttributeNS(null, 'href', `#vjs-icon-${iconName}`);iconContainer.appendChild(svgEl);// Replace a pre-existing icon if one exists.if (this.iconIsSet_) {el.replaceChild(iconContainer, el.querySelector('.vjs-icon-placeholder'));} else {el.appendChild(iconContainer);}this.iconIsSet_ = true;return iconContainer;}/*** Add a child `Component` inside the current `Component`.** @param {string|Component} child* The name or instance of a child to add.** @param {Object} [options={}]* The key/value store of options that will get passed to children of* the child.** @param {number} [index=this.children_.length]* The index to attempt to add a child into.*** @return {Component}* The `Component` that gets added as a child. When using a string the* `Component` will get created by this process.*/addChild(child, options = {}, index = this.children_.length) {let component;let componentName;// If child is a string, create component with optionsif (typeof child === 'string') {componentName = toTitleCase$1(child);const componentClassName = options.componentClass || componentName;// Set name through optionsoptions.name = componentName;// Create a new object & element for this controls set// If there's no .player_, this is a playerconst ComponentClass = Component$1.getComponent(componentClassName);if (!ComponentClass) {throw new Error(`Component ${componentClassName} does not exist`);}// data stored directly on the videojs object may be// misidentified as a component to retain// backwards-compatibility with 4.x. check to make sure the// component class can be instantiated.if (typeof ComponentClass !== 'function') {return null;}component = new ComponentClass(this.player_ || this, options);// child is a component instance} else {component = child;}if (component.parentComponent_) {component.parentComponent_.removeChild(component);}this.children_.splice(index, 0, component);component.parentComponent_ = this;if (typeof component.id === 'function') {this.childIndex_[component.id()] = component;}// If a name wasn't used to create the component, check if we can use the// name function of the componentcomponentName = componentName || component.name && toTitleCase$1(component.name());if (componentName) {this.childNameIndex_[componentName] = component;this.childNameIndex_[toLowerCase(componentName)] = component;}// Add the UI object's element to the container div (box)// Having an element is not requiredif (typeof component.el === 'function' && component.el()) {// If inserting before a component, insert before that component's elementlet refNode = null;if (this.children_[index + 1]) {// Most children are components, but the video tech is an HTML elementif (this.children_[index + 1].el_) {refNode = this.children_[index + 1].el_;} else if (isEl(this.children_[index + 1])) {refNode = this.children_[index + 1];}}this.contentEl().insertBefore(component.el(), refNode);}// Return so it can stored on parent object if desired.return component;}/*** Remove a child `Component` from this `Component`s list of children. Also removes* the child `Component`s element from this `Component`s element.** @param {Component} component* The child `Component` to remove.*/removeChild(component) {if (typeof component === 'string') {component = this.getChild(component);}if (!component || !this.children_) {return;}let childFound = false;for (let i = this.children_.length - 1; i >= 0; i--) {if (this.children_[i] === component) {childFound = true;this.children_.splice(i, 1);break;}}if (!childFound) {return;}component.parentComponent_ = null;this.childIndex_[component.id()] = null;this.childNameIndex_[toTitleCase$1(component.name())] = null;this.childNameIndex_[toLowerCase(component.name())] = null;const compEl = component.el();if (compEl && compEl.parentNode === this.contentEl()) {this.contentEl().removeChild(component.el());}}/*** Add and initialize default child `Component`s based upon options.*/initChildren() {const children = this.options_.children;if (children) {// `this` is `parent`const parentOptions = this.options_;const handleAdd = child => {const name = child.name;let opts = child.opts;// Allow options for children to be set at the parent options// e.g. videojs(id, { controlBar: false });// instead of videojs(id, { children: { controlBar: false });if (parentOptions[name] !== undefined) {opts = parentOptions[name];}// Allow for disabling default components// e.g. options['children']['posterImage'] = falseif (opts === false) {return;}// Allow options to be passed as a simple boolean if no configuration// is necessary.if (opts === true) {opts = {};}// We also want to pass the original player options// to each component as well so they don't need to// reach back into the player for options later.opts.playerOptions = this.options_.playerOptions;// Create and add the child component.// Add a direct reference to the child by name on the parent instance.// If two of the same component are used, different names should be supplied// for eachconst newChild = this.addChild(name, opts);if (newChild) {this[name] = newChild;}};// Allow for an array of children details to passed in the optionslet workingChildren;const Tech = Component$1.getComponent('Tech');if (Array.isArray(children)) {workingChildren = children;} else {workingChildren = Object.keys(children);}workingChildren// children that are in this.options_ but also in workingChildren would// give us extra children we do not want. So, we want to filter them out..concat(Object.keys(this.options_).filter(function (child) {return !workingChildren.some(function (wchild) {if (typeof wchild === 'string') {return child === wchild;}return child === wchild.name;});})).map(child => {let name;let opts;if (typeof child === 'string') {name = child;opts = children[name] || this.options_[name] || {};} else {name = child.name;opts = child;}return {name,opts};}).filter(child => {// we have to make sure that child.name isn't in the techOrder since// techs are registered as Components but can't aren't compatible// See https://github.com/videojs/video.js/issues/2772const c = Component$1.getComponent(child.opts.componentClass || toTitleCase$1(child.name));return c && !Tech.isTech(c);}).forEach(handleAdd);}}/*** Builds the default DOM class name. Should be overridden by sub-components.** @return {string}* The DOM class name for this object.** @abstract*/buildCSSClass() {// Child classes can include a function that does:// return 'CLASS NAME' + this._super();return '';}/*** Bind a listener to the component's ready state.* Different from event listeners in that if the ready event has already happened* it will trigger the function immediately.** @param {ReadyCallback} fn* Function that gets called when the `Component` is ready.** @return {Component}* Returns itself; method can be chained.*/ready(fn, sync = false) {if (!fn) {return;}if (!this.isReady_) {this.readyQueue_ = this.readyQueue_ || [];this.readyQueue_.push(fn);return;}if (sync) {fn.call(this);} else {// Call the function asynchronously by default for consistencythis.setTimeout(fn, 1);}}/*** Trigger all the ready listeners for this `Component`.** @fires Component#ready*/triggerReady() {this.isReady_ = true;// Ensure ready is triggered asynchronouslythis.setTimeout(function () {const readyQueue = this.readyQueue_;// Reset Ready Queuethis.readyQueue_ = [];if (readyQueue && readyQueue.length > 0) {readyQueue.forEach(function (fn) {fn.call(this);}, this);}// Allow for using event listeners also/*** Triggered when a `Component` is ready.** @event Component#ready* @type {Event}*/this.trigger('ready');}, 1);}/*** Find a single DOM element matching a `selector`. This can be within the `Component`s* `contentEl()` or another custom context.** @param {string} selector* A valid CSS selector, which will be passed to `querySelector`.** @param {Element|string} [context=this.contentEl()]* A DOM element within which to query. Can also be a selector string in* which case the first matching element will get used as context. If* missing `this.contentEl()` gets used. If `this.contentEl()` returns* nothing it falls back to `document`.** @return {Element|null}* the dom element that was found, or null** @see [Information on CSS Selectors](https://developer.mozilla.org/en-US/docs/Web/Guide/CSS/Getting_Started/Selectors)*/$(selector, context) {return $(selector, context || this.contentEl());}/*** Finds all DOM element matching a `selector`. This can be within the `Component`s* `contentEl()` or another custom context.** @param {string} selector* A valid CSS selector, which will be passed to `querySelectorAll`.** @param {Element|string} [context=this.contentEl()]* A DOM element within which to query. Can also be a selector string in* which case the first matching element will get used as context. If* missing `this.contentEl()` gets used. If `this.contentEl()` returns* nothing it falls back to `document`.** @return {NodeList}* a list of dom elements that were found** @see [Information on CSS Selectors](https://developer.mozilla.org/en-US/docs/Web/Guide/CSS/Getting_Started/Selectors)*/$$(selector, context) {return $$(selector, context || this.contentEl());}/*** Check if a component's element has a CSS class name.** @param {string} classToCheck* CSS class name to check.** @return {boolean}* - True if the `Component` has the class.* - False if the `Component` does not have the class`*/hasClass(classToCheck) {return hasClass(this.el_, classToCheck);}/*** Add a CSS class name to the `Component`s element.** @param {...string} classesToAdd* One or more CSS class name to add.*/addClass(...classesToAdd) {addClass(this.el_, ...classesToAdd);}/*** Remove a CSS class name from the `Component`s element.** @param {...string} classesToRemove* One or more CSS class name to remove.*/removeClass(...classesToRemove) {removeClass(this.el_, ...classesToRemove);}/*** Add or remove a CSS class name from the component's element.* - `classToToggle` gets added when {@link Component#hasClass} would return false.* - `classToToggle` gets removed when {@link Component#hasClass} would return true.** @param {string} classToToggle* The class to add or remove based on (@link Component#hasClass}** @param {boolean|Dom~predicate} [predicate]* An {@link Dom~predicate} function or a boolean*/toggleClass(classToToggle, predicate) {toggleClass(this.el_, classToToggle, predicate);}/*** Show the `Component`s element if it is hidden by removing the* 'vjs-hidden' class name from it.*/show() {this.removeClass('vjs-hidden');}/*** Hide the `Component`s element if it is currently showing by adding the* 'vjs-hidden` class name to it.*/hide() {this.addClass('vjs-hidden');}/*** Lock a `Component`s element in its visible state by adding the 'vjs-lock-showing'* class name to it. Used during fadeIn/fadeOut.** @private*/lockShowing() {this.addClass('vjs-lock-showing');}/*** Unlock a `Component`s element from its visible state by removing the 'vjs-lock-showing'* class name from it. Used during fadeIn/fadeOut.** @private*/unlockShowing() {this.removeClass('vjs-lock-showing');}/*** Get the value of an attribute on the `Component`s element.** @param {string} attribute* Name of the attribute to get the value from.** @return {string|null}* - The value of the attribute that was asked for.* - Can be an empty string on some browsers if the attribute does not exist* or has no value* - Most browsers will return null if the attribute does not exist or has* no value.** @see [DOM API]{@link https://developer.mozilla.org/en-US/docs/Web/API/Element/getAttribute}*/getAttribute(attribute) {return getAttribute(this.el_, attribute);}/*** Set the value of an attribute on the `Component`'s element** @param {string} attribute* Name of the attribute to set.** @param {string} value* Value to set the attribute to.** @see [DOM API]{@link https://developer.mozilla.org/en-US/docs/Web/API/Element/setAttribute}*/setAttribute(attribute, value) {setAttribute(this.el_, attribute, value);}/*** Remove an attribute from the `Component`s element.** @param {string} attribute* Name of the attribute to remove.** @see [DOM API]{@link https://developer.mozilla.org/en-US/docs/Web/API/Element/removeAttribute}*/removeAttribute(attribute) {removeAttribute(this.el_, attribute);}/*** Get or set the width of the component based upon the CSS styles.* See {@link Component#dimension} for more detailed information.** @param {number|string} [num]* The width that you want to set postfixed with '%', 'px' or nothing.** @param {boolean} [skipListeners]* Skip the componentresize event trigger** @return {number|undefined}* The width when getting, zero if there is no width*/width(num, skipListeners) {return this.dimension('width', num, skipListeners);}/*** Get or set the height of the component based upon the CSS styles.* See {@link Component#dimension} for more detailed information.** @param {number|string} [num]* The height that you want to set postfixed with '%', 'px' or nothing.** @param {boolean} [skipListeners]* Skip the componentresize event trigger** @return {number|undefined}* The height when getting, zero if there is no height*/height(num, skipListeners) {return this.dimension('height', num, skipListeners);}/*** Set both the width and height of the `Component` element at the same time.** @param {number|string} width* Width to set the `Component`s element to.** @param {number|string} height* Height to set the `Component`s element to.*/dimensions(width, height) {// Skip componentresize listeners on width for optimizationthis.width(width, true);this.height(height);}/*** Get or set width or height of the `Component` element. This is the shared code* for the {@link Component#width} and {@link Component#height}.** Things to know:* - If the width or height in an number this will return the number postfixed with 'px'.* - If the width/height is a percent this will return the percent postfixed with '%'* - Hidden elements have a width of 0 with `window.getComputedStyle`. This function* defaults to the `Component`s `style.width` and falls back to `window.getComputedStyle`.* See [this]{@link http://www.foliotek.com/devblog/getting-the-width-of-a-hidden-element-with-jquery-using-width/}* for more information* - If you want the computed style of the component, use {@link Component#currentWidth}* and {@link {Component#currentHeight}** @fires Component#componentresize** @param {string} widthOrHeight8 'width' or 'height'** @param {number|string} [num]8 New dimension** @param {boolean} [skipListeners]* Skip componentresize event trigger** @return {number|undefined}* The dimension when getting or 0 if unset*/dimension(widthOrHeight, num, skipListeners) {if (num !== undefined) {// Set to zero if null or literally NaN (NaN !== NaN)if (num === null || num !== num) {num = 0;}// Check if using css width/height (% or px) and adjustif (('' + num).indexOf('%') !== -1 || ('' + num).indexOf('px') !== -1) {this.el_.style[widthOrHeight] = num;} else if (num === 'auto') {this.el_.style[widthOrHeight] = '';} else {this.el_.style[widthOrHeight] = num + 'px';}// skipListeners allows us to avoid triggering the resize event when setting both width and heightif (!skipListeners) {/*** Triggered when a component is resized.** @event Component#componentresize* @type {Event}*/this.trigger('componentresize');}return;}// Not setting a value, so getting it// Make sure element existsif (!this.el_) {return 0;}// Get dimension value from styleconst val = this.el_.style[widthOrHeight];const pxIndex = val.indexOf('px');if (pxIndex !== -1) {// Return the pixel value with no 'px'return parseInt(val.slice(0, pxIndex), 10);}// No px so using % or no style was set, so falling back to offsetWidth/height// If component has display:none, offset will return 0// TODO: handle display:none and no dimension style using pxreturn parseInt(this.el_['offset' + toTitleCase$1(widthOrHeight)], 10);}/*** Get the computed width or the height of the component's element.** Uses `window.getComputedStyle`.** @param {string} widthOrHeight* A string containing 'width' or 'height'. Whichever one you want to get.** @return {number}* The dimension that gets asked for or 0 if nothing was set* for that dimension.*/currentDimension(widthOrHeight) {let computedWidthOrHeight = 0;if (widthOrHeight !== 'width' && widthOrHeight !== 'height') {throw new Error('currentDimension only accepts width or height value');}computedWidthOrHeight = computedStyle(this.el_, widthOrHeight);// remove 'px' from variable and parse as integercomputedWidthOrHeight = parseFloat(computedWidthOrHeight);// if the computed value is still 0, it's possible that the browser is lying// and we want to check the offset values.// This code also runs wherever getComputedStyle doesn't exist.if (computedWidthOrHeight === 0 || isNaN(computedWidthOrHeight)) {const rule = `offset${toTitleCase$1(widthOrHeight)}`;computedWidthOrHeight = this.el_[rule];}return computedWidthOrHeight;}/*** An object that contains width and height values of the `Component`s* computed style. Uses `window.getComputedStyle`.** @typedef {Object} Component~DimensionObject** @property {number} width* The width of the `Component`s computed style.** @property {number} height* The height of the `Component`s computed style.*//*** Get an object that contains computed width and height values of the* component's element.** Uses `window.getComputedStyle`.** @return {Component~DimensionObject}* The computed dimensions of the component's element.*/currentDimensions() {return {width: this.currentDimension('width'),height: this.currentDimension('height')};}/*** Get the computed width of the component's element.** Uses `window.getComputedStyle`.** @return {number}* The computed width of the component's element.*/currentWidth() {return this.currentDimension('width');}/*** Get the computed height of the component's element.** Uses `window.getComputedStyle`.** @return {number}* The computed height of the component's element.*/currentHeight() {return this.currentDimension('height');}/*** Set the focus to this component*/focus() {this.el_.focus();}/*** Remove the focus from this component*/blur() {this.el_.blur();}/*** When this Component receives a `keydown` event which it does not process,* it passes the event to the Player for handling.** @param {KeyboardEvent} event* The `keydown` event that caused this function to be called.*/handleKeyDown(event) {if (this.player_) {// We only stop propagation here because we want unhandled events to fall// back to the browser. Exclude Tab for focus trapping.if (!keycode.isEventKey(event, 'Tab')) {event.stopPropagation();}this.player_.handleKeyDown(event);}}/*** Many components used to have a `handleKeyPress` method, which was poorly* named because it listened to a `keydown` event. This method name now* delegates to `handleKeyDown`. This means anyone calling `handleKeyPress`* will not see their method calls stop working.** @param {KeyboardEvent} event* The event that caused this function to be called.*/handleKeyPress(event) {this.handleKeyDown(event);}/*** Emit a 'tap' events when touch event support gets detected. This gets used to* support toggling the controls through a tap on the video. They get enabled* because every sub-component would have extra overhead otherwise.** @protected* @fires Component#tap* @listens Component#touchstart* @listens Component#touchmove* @listens Component#touchleave* @listens Component#touchcancel* @listens Component#touchend*/emitTapEvents() {// Track the start time so we can determine how long the touch lastedlet touchStart = 0;let firstTouch = null;// Maximum movement allowed during a touch event to still be considered a tap// Other popular libs use anywhere from 2 (hammer.js) to 15,// so 10 seems like a nice, round number.const tapMovementThreshold = 10;// The maximum length a touch can be while still being considered a tapconst touchTimeThreshold = 200;let couldBeTap;this.on('touchstart', function (event) {// If more than one finger, don't consider treating this as a clickif (event.touches.length === 1) {// Copy pageX/pageY from the objectfirstTouch = {pageX: event.touches[0].pageX,pageY: event.touches[0].pageY};// Record start time so we can detect a tap vs. "touch and hold"touchStart = window.performance.now();// Reset couldBeTap trackingcouldBeTap = true;}});this.on('touchmove', function (event) {// If more than one finger, don't consider treating this as a clickif (event.touches.length > 1) {couldBeTap = false;} else if (firstTouch) {// Some devices will throw touchmoves for all but the slightest of taps.// So, if we moved only a small distance, this could still be a tapconst xdiff = event.touches[0].pageX - firstTouch.pageX;const ydiff = event.touches[0].pageY - firstTouch.pageY;const touchDistance = Math.sqrt(xdiff * xdiff + ydiff * ydiff);if (touchDistance > tapMovementThreshold) {couldBeTap = false;}}});const noTap = function () {couldBeTap = false;};// TODO: Listen to the original target. http://youtu.be/DujfpXOKUp8?t=13m8sthis.on('touchleave', noTap);this.on('touchcancel', noTap);// When the touch ends, measure how long it took and trigger the appropriate// eventthis.on('touchend', function (event) {firstTouch = null;// Proceed only if the touchmove/leave/cancel event didn't happenif (couldBeTap === true) {// Measure how long the touch lastedconst touchTime = window.performance.now() - touchStart;// Make sure the touch was less than the threshold to be considered a tapif (touchTime < touchTimeThreshold) {// Don't let browser turn this into a clickevent.preventDefault();/*** Triggered when a `Component` is tapped.** @event Component#tap* @type {MouseEvent}*/this.trigger('tap');// It may be good to copy the touchend event object and change the// type to tap, if the other event properties aren't exact after// Events.fixEvent runs (e.g. event.target)}}});}/*** This function reports user activity whenever touch events happen. This can get* turned off by any sub-components that wants touch events to act another way.** Report user touch activity when touch events occur. User activity gets used to* determine when controls should show/hide. It is simple when it comes to mouse* events, because any mouse event should show the controls. So we capture mouse* events that bubble up to the player and report activity when that happens.* With touch events it isn't as easy as `touchstart` and `touchend` toggle player* controls. So touch events can't help us at the player level either.** User activity gets checked asynchronously. So what could happen is a tap event* on the video turns the controls off. Then the `touchend` event bubbles up to* the player. Which, if it reported user activity, would turn the controls right* back on. We also don't want to completely block touch events from bubbling up.* Furthermore a `touchmove` event and anything other than a tap, should not turn* controls back on.** @listens Component#touchstart* @listens Component#touchmove* @listens Component#touchend* @listens Component#touchcancel*/enableTouchActivity() {// Don't continue if the root player doesn't support reporting user activityif (!this.player() || !this.player().reportUserActivity) {return;}// listener for reporting that the user is activeconst report = bind_(this.player(), this.player().reportUserActivity);let touchHolding;this.on('touchstart', function () {report();// For as long as the they are touching the device or have their mouse down,// we consider them active even if they're not moving their finger or mouse.// So we want to continue to update that they are activethis.clearInterval(touchHolding);// report at the same interval as activityChecktouchHolding = this.setInterval(report, 250);});const touchEnd = function (event) {report();// stop the interval that maintains activity if the touch is holdingthis.clearInterval(touchHolding);};this.on('touchmove', report);this.on('touchend', touchEnd);this.on('touchcancel', touchEnd);}/*** A callback that has no parameters and is bound into `Component`s context.** @callback Component~GenericCallback* @this Component*//*** Creates a function that runs after an `x` millisecond timeout. This function is a* wrapper around `window.setTimeout`. There are a few reasons to use this one* instead though:* 1. It gets cleared via {@link Component#clearTimeout} when* {@link Component#dispose} gets called.* 2. The function callback will gets turned into a {@link Component~GenericCallback}** > Note: You can't use `window.clearTimeout` on the id returned by this function. This* will cause its dispose listener not to get cleaned up! Please use* {@link Component#clearTimeout} or {@link Component#dispose} instead.** @param {Component~GenericCallback} fn* The function that will be run after `timeout`.** @param {number} timeout* Timeout in milliseconds to delay before executing the specified function.** @return {number}* Returns a timeout ID that gets used to identify the timeout. It can also* get used in {@link Component#clearTimeout} to clear the timeout that* was set.** @listens Component#dispose* @see [Similar to]{@link https://developer.mozilla.org/en-US/docs/Web/API/WindowTimers/setTimeout}*/setTimeout(fn, timeout) {// declare as variables so they are properly available in timeout function// eslint-disable-next-linevar timeoutId;fn = bind_(this, fn);this.clearTimersOnDispose_();timeoutId = window.setTimeout(() => {if (this.setTimeoutIds_.has(timeoutId)) {this.setTimeoutIds_.delete(timeoutId);}fn();}, timeout);this.setTimeoutIds_.add(timeoutId);return timeoutId;}/*** Clears a timeout that gets created via `window.setTimeout` or* {@link Component#setTimeout}. If you set a timeout via {@link Component#setTimeout}* use this function instead of `window.clearTimout`. If you don't your dispose* listener will not get cleaned up until {@link Component#dispose}!** @param {number} timeoutId* The id of the timeout to clear. The return value of* {@link Component#setTimeout} or `window.setTimeout`.** @return {number}* Returns the timeout id that was cleared.** @see [Similar to]{@link https://developer.mozilla.org/en-US/docs/Web/API/WindowTimers/clearTimeout}*/clearTimeout(timeoutId) {if (this.setTimeoutIds_.has(timeoutId)) {this.setTimeoutIds_.delete(timeoutId);window.clearTimeout(timeoutId);}return timeoutId;}/*** Creates a function that gets run every `x` milliseconds. This function is a wrapper* around `window.setInterval`. There are a few reasons to use this one instead though.* 1. It gets cleared via {@link Component#clearInterval} when* {@link Component#dispose} gets called.* 2. The function callback will be a {@link Component~GenericCallback}** @param {Component~GenericCallback} fn* The function to run every `x` seconds.** @param {number} interval* Execute the specified function every `x` milliseconds.** @return {number}* Returns an id that can be used to identify the interval. It can also be be used in* {@link Component#clearInterval} to clear the interval.** @listens Component#dispose* @see [Similar to]{@link https://developer.mozilla.org/en-US/docs/Web/API/WindowTimers/setInterval}*/setInterval(fn, interval) {fn = bind_(this, fn);this.clearTimersOnDispose_();const intervalId = window.setInterval(fn, interval);this.setIntervalIds_.add(intervalId);return intervalId;}/*** Clears an interval that gets created via `window.setInterval` or* {@link Component#setInterval}. If you set an interval via {@link Component#setInterval}* use this function instead of `window.clearInterval`. If you don't your dispose* listener will not get cleaned up until {@link Component#dispose}!** @param {number} intervalId* The id of the interval to clear. The return value of* {@link Component#setInterval} or `window.setInterval`.** @return {number}* Returns the interval id that was cleared.** @see [Similar to]{@link https://developer.mozilla.org/en-US/docs/Web/API/WindowTimers/clearInterval}*/clearInterval(intervalId) {if (this.setIntervalIds_.has(intervalId)) {this.setIntervalIds_.delete(intervalId);window.clearInterval(intervalId);}return intervalId;}/*** Queues up a callback to be passed to requestAnimationFrame (rAF), but* with a few extra bonuses:** - Supports browsers that do not support rAF by falling back to* {@link Component#setTimeout}.** - The callback is turned into a {@link Component~GenericCallback} (i.e.* bound to the component).** - Automatic cancellation of the rAF callback is handled if the component* is disposed before it is called.** @param {Component~GenericCallback} fn* A function that will be bound to this component and executed just* before the browser's next repaint.** @return {number}* Returns an rAF ID that gets used to identify the timeout. It can* also be used in {@link Component#cancelAnimationFrame} to cancel* the animation frame callback.** @listens Component#dispose* @see [Similar to]{@link https://developer.mozilla.org/en-US/docs/Web/API/window/requestAnimationFrame}*/requestAnimationFrame(fn) {this.clearTimersOnDispose_();// declare as variables so they are properly available in rAF function// eslint-disable-next-linevar id;fn = bind_(this, fn);id = window.requestAnimationFrame(() => {if (this.rafIds_.has(id)) {this.rafIds_.delete(id);}fn();});this.rafIds_.add(id);return id;}/*** Request an animation frame, but only one named animation* frame will be queued. Another will never be added until* the previous one finishes.** @param {string} name* The name to give this requestAnimationFrame** @param {Component~GenericCallback} fn* A function that will be bound to this component and executed just* before the browser's next repaint.*/requestNamedAnimationFrame(name, fn) {if (this.namedRafs_.has(name)) {return;}this.clearTimersOnDispose_();fn = bind_(this, fn);const id = this.requestAnimationFrame(() => {fn();if (this.namedRafs_.has(name)) {this.namedRafs_.delete(name);}});this.namedRafs_.set(name, id);return name;}/*** Cancels a current named animation frame if it exists.** @param {string} name* The name of the requestAnimationFrame to cancel.*/cancelNamedAnimationFrame(name) {if (!this.namedRafs_.has(name)) {return;}this.cancelAnimationFrame(this.namedRafs_.get(name));this.namedRafs_.delete(name);}/*** Cancels a queued callback passed to {@link Component#requestAnimationFrame}* (rAF).** If you queue an rAF callback via {@link Component#requestAnimationFrame},* use this function instead of `window.cancelAnimationFrame`. If you don't,* your dispose listener will not get cleaned up until {@link Component#dispose}!** @param {number} id* The rAF ID to clear. The return value of {@link Component#requestAnimationFrame}.** @return {number}* Returns the rAF ID that was cleared.** @see [Similar to]{@link https://developer.mozilla.org/en-US/docs/Web/API/window/cancelAnimationFrame}*/cancelAnimationFrame(id) {if (this.rafIds_.has(id)) {this.rafIds_.delete(id);window.cancelAnimationFrame(id);}return id;}/*** A function to setup `requestAnimationFrame`, `setTimeout`,* and `setInterval`, clearing on dispose.** > Previously each timer added and removed dispose listeners on it's own.* For better performance it was decided to batch them all, and use `Set`s* to track outstanding timer ids.** @private*/clearTimersOnDispose_() {if (this.clearingTimersOnDispose_) {return;}this.clearingTimersOnDispose_ = true;this.one('dispose', () => {[['namedRafs_', 'cancelNamedAnimationFrame'], ['rafIds_', 'cancelAnimationFrame'], ['setTimeoutIds_', 'clearTimeout'], ['setIntervalIds_', 'clearInterval']].forEach(([idName, cancelName]) => {// for a `Set` key will actually be the value again// so forEach((val, val) =>` but for maps we want to use// the key.this[idName].forEach((val, key) => this[cancelName](key));});this.clearingTimersOnDispose_ = false;});}/*** Register a `Component` with `videojs` given the name and the component.** > NOTE: {@link Tech}s should not be registered as a `Component`. {@link Tech}s* should be registered using {@link Tech.registerTech} or* {@link videojs:videojs.registerTech}.** > NOTE: This function can also be seen on videojs as* {@link videojs:videojs.registerComponent}.** @param {string} name* The name of the `Component` to register.** @param {Component} ComponentToRegister* The `Component` class to register.** @return {Component}* The `Component` that was registered.*/static registerComponent(name, ComponentToRegister) {if (typeof name !== 'string' || !name) {throw new Error(`Illegal component name, "${name}"; must be a non-empty string.`);}const Tech = Component$1.getComponent('Tech');// We need to make sure this check is only done if Tech has been registered.const isTech = Tech && Tech.isTech(ComponentToRegister);const isComp = Component$1 === ComponentToRegister || Component$1.prototype.isPrototypeOf(ComponentToRegister.prototype);if (isTech || !isComp) {let reason;if (isTech) {reason = 'techs must be registered using Tech.registerTech()';} else {reason = 'must be a Component subclass';}throw new Error(`Illegal component, "${name}"; ${reason}.`);}name = toTitleCase$1(name);if (!Component$1.components_) {Component$1.components_ = {};}const Player = Component$1.getComponent('Player');if (name === 'Player' && Player && Player.players) {const players = Player.players;const playerNames = Object.keys(players);// If we have players that were disposed, then their name will still be// in Players.players. So, we must loop through and verify that the value// for each item is not null. This allows registration of the Player component// after all players have been disposed or before any were created.if (players && playerNames.length > 0 && playerNames.map(pname => players[pname]).every(Boolean)) {throw new Error('Can not register Player component after player has been created.');}}Component$1.components_[name] = ComponentToRegister;Component$1.components_[toLowerCase(name)] = ComponentToRegister;return ComponentToRegister;}/*** Get a `Component` based on the name it was registered with.** @param {string} name* The Name of the component to get.** @return {typeof Component}* The `Component` that got registered under the given name.*/static getComponent(name) {if (!name || !Component$1.components_) {return;}return Component$1.components_[name];}}Component$1.registerComponent('Component', Component$1);/*** @file time.js* @module time*//*** Returns the time for the specified index at the start or end* of a TimeRange object.** @typedef {Function} TimeRangeIndex** @param {number} [index=0]* The range number to return the time for.** @return {number}* The time offset at the specified index.** @deprecated The index argument must be provided.* In the future, leaving it out will throw an error.*//*** An object that contains ranges of time, which mimics {@link TimeRanges}.** @typedef {Object} TimeRange** @property {number} length* The number of time ranges represented by this object.** @property {module:time~TimeRangeIndex} start* Returns the time offset at which a specified time range begins.** @property {module:time~TimeRangeIndex} end* Returns the time offset at which a specified time range ends.** @see https://developer.mozilla.org/en-US/docs/Web/API/TimeRanges*//*** Check if any of the time ranges are over the maximum index.** @private* @param {string} fnName* The function name to use for logging** @param {number} index* The index to check** @param {number} maxIndex* The maximum possible index** @throws {Error} if the timeRanges provided are over the maxIndex*/function rangeCheck(fnName, index, maxIndex) {if (typeof index !== 'number' || index < 0 || index > maxIndex) {throw new Error(`Failed to execute '${fnName}' on 'TimeRanges': The index provided (${index}) is non-numeric or out of bounds (0-${maxIndex}).`);}}/*** Get the time for the specified index at the start or end* of a TimeRange object.** @private* @param {string} fnName* The function name to use for logging** @param {string} valueIndex* The property that should be used to get the time. should be* 'start' or 'end'** @param {Array} ranges* An array of time ranges** @param {Array} [rangeIndex=0]* The index to start the search at** @return {number}* The time that offset at the specified index.** @deprecated rangeIndex must be set to a value, in the future this will throw an error.* @throws {Error} if rangeIndex is more than the length of ranges*/function getRange(fnName, valueIndex, ranges, rangeIndex) {rangeCheck(fnName, rangeIndex, ranges.length - 1);return ranges[rangeIndex][valueIndex];}/*** Create a time range object given ranges of time.** @private* @param {Array} [ranges]* An array of time ranges.** @return {TimeRange}*/function createTimeRangesObj(ranges) {let timeRangesObj;if (ranges === undefined || ranges.length === 0) {timeRangesObj = {length: 0,start() {throw new Error('This TimeRanges object is empty');},end() {throw new Error('This TimeRanges object is empty');}};} else {timeRangesObj = {length: ranges.length,start: getRange.bind(null, 'start', 0, ranges),end: getRange.bind(null, 'end', 1, ranges)};}if (window.Symbol && window.Symbol.iterator) {timeRangesObj[window.Symbol.iterator] = () => (ranges || []).values();}return timeRangesObj;}/*** Create a `TimeRange` object which mimics an* {@link https://developer.mozilla.org/en-US/docs/Web/API/TimeRanges|HTML5 TimeRanges instance}.** @param {number|Array[]} start* The start of a single range (a number) or an array of ranges (an* array of arrays of two numbers each).** @param {number} end* The end of a single range. Cannot be used with the array form of* the `start` argument.** @return {TimeRange}*/function createTimeRanges$1(start, end) {if (Array.isArray(start)) {return createTimeRangesObj(start);} else if (start === undefined || end === undefined) {return createTimeRangesObj();}return createTimeRangesObj([[start, end]]);}/*** Format seconds as a time string, H:MM:SS or M:SS. Supplying a guide (in* seconds) will force a number of leading zeros to cover the length of the* guide.** @private* @param {number} seconds* Number of seconds to be turned into a string** @param {number} guide* Number (in seconds) to model the string after** @return {string}* Time formatted as H:MM:SS or M:SS*/const defaultImplementation = function (seconds, guide) {seconds = seconds < 0 ? 0 : seconds;let s = Math.floor(seconds % 60);let m = Math.floor(seconds / 60 % 60);let h = Math.floor(seconds / 3600);const gm = Math.floor(guide / 60 % 60);const gh = Math.floor(guide / 3600);// handle invalid timesif (isNaN(seconds) || seconds === Infinity) {// '-' is false for all relational operators (e.g. <, >=) so this setting// will add the minimum number of fields specified by the guideh = m = s = '-';}// Check if we need to show hoursh = h > 0 || gh > 0 ? h + ':' : '';// If hours are showing, we may need to add a leading zero.// Always show at least one digit of minutes.m = ((h || gm >= 10) && m < 10 ? '0' + m : m) + ':';// Check if leading zero is need for secondss = s < 10 ? '0' + s : s;return h + m + s;};// Internal pointer to the current implementation.let implementation = defaultImplementation;/*** Replaces the default formatTime implementation with a custom implementation.** @param {Function} customImplementation* A function which will be used in place of the default formatTime* implementation. Will receive the current time in seconds and the* guide (in seconds) as arguments.*/function setFormatTime(customImplementation) {implementation = customImplementation;}/*** Resets formatTime to the default implementation.*/function resetFormatTime() {implementation = defaultImplementation;}/*** Delegates to either the default time formatting function or a custom* function supplied via `setFormatTime`.** Formats seconds as a time string (H:MM:SS or M:SS). Supplying a* guide (in seconds) will force a number of leading zeros to cover the* length of the guide.** @example formatTime(125, 600) === "02:05"* @param {number} seconds* Number of seconds to be turned into a string** @param {number} guide* Number (in seconds) to model the string after** @return {string}* Time formatted as H:MM:SS or M:SS*/function formatTime(seconds, guide = seconds) {return implementation(seconds, guide);}var Time = /*#__PURE__*/Object.freeze({__proto__: null,createTimeRanges: createTimeRanges$1,createTimeRange: createTimeRanges$1,setFormatTime: setFormatTime,resetFormatTime: resetFormatTime,formatTime: formatTime});/*** @file buffer.js* @module buffer*//*** Compute the percentage of the media that has been buffered.** @param { import('./time').TimeRange } buffered* The current `TimeRanges` object representing buffered time ranges** @param {number} duration* Total duration of the media** @return {number}* Percent buffered of the total duration in decimal form.*/function bufferedPercent(buffered, duration) {let bufferedDuration = 0;let start;let end;if (!duration) {return 0;}if (!buffered || !buffered.length) {buffered = createTimeRanges$1(0, 0);}for (let i = 0; i < buffered.length; i++) {start = buffered.start(i);end = buffered.end(i);// buffered end can be bigger than duration by a very small fractionif (end > duration) {end = duration;}bufferedDuration += end - start;}return bufferedDuration / duration;}/*** @file media-error.js*//*** A Custom `MediaError` class which mimics the standard HTML5 `MediaError` class.** @param {number|string|Object|MediaError} value* This can be of multiple types:* - number: should be a standard error code* - string: an error message (the code will be 0)* - Object: arbitrary properties* - `MediaError` (native): used to populate a video.js `MediaError` object* - `MediaError` (video.js): will return itself if it's already a* video.js `MediaError` object.** @see [MediaError Spec]{@link https://dev.w3.org/html5/spec-author-view/video.html#mediaerror}* @see [Encrypted MediaError Spec]{@link https://www.w3.org/TR/2013/WD-encrypted-media-20130510/#error-codes}** @class MediaError*/function MediaError(value) {// Allow redundant calls to this constructor to avoid having `instanceof`// checks peppered around the code.if (value instanceof MediaError) {return value;}if (typeof value === 'number') {this.code = value;} else if (typeof value === 'string') {// default code is zero, so this is a custom errorthis.message = value;} else if (isObject$1(value)) {// We assign the `code` property manually because native `MediaError` objects// do not expose it as an own/enumerable property of the object.if (typeof value.code === 'number') {this.code = value.code;}Object.assign(this, value);}if (!this.message) {this.message = MediaError.defaultMessages[this.code] || '';}}/*** The error code that refers two one of the defined `MediaError` types** @type {Number}*/MediaError.prototype.code = 0;/*** An optional message that to show with the error. Message is not part of the HTML5* video spec but allows for more informative custom errors.** @type {String}*/MediaError.prototype.message = '';/*** An optional status code that can be set by plugins to allow even more detail about* the error. For example a plugin might provide a specific HTTP status code and an* error message for that code. Then when the plugin gets that error this class will* know how to display an error message for it. This allows a custom message to show* up on the `Player` error overlay.** @type {Array}*/MediaError.prototype.status = null;/*** Errors indexed by the W3C standard. The order **CANNOT CHANGE**! See the* specification listed under {@link MediaError} for more information.** @enum {array}* @readonly* @property {string} 0 - MEDIA_ERR_CUSTOM* @property {string} 1 - MEDIA_ERR_ABORTED* @property {string} 2 - MEDIA_ERR_NETWORK* @property {string} 3 - MEDIA_ERR_DECODE* @property {string} 4 - MEDIA_ERR_SRC_NOT_SUPPORTED* @property {string} 5 - MEDIA_ERR_ENCRYPTED*/MediaError.errorTypes = ['MEDIA_ERR_CUSTOM', 'MEDIA_ERR_ABORTED', 'MEDIA_ERR_NETWORK', 'MEDIA_ERR_DECODE', 'MEDIA_ERR_SRC_NOT_SUPPORTED', 'MEDIA_ERR_ENCRYPTED'];/*** The default `MediaError` messages based on the {@link MediaError.errorTypes}.** @type {Array}* @constant*/MediaError.defaultMessages = {1: 'You aborted the media playback',2: 'A network error caused the media download to fail part-way.',3: 'The media playback was aborted due to a corruption problem or because the media used features your browser did not support.',4: 'The media could not be loaded, either because the server or network failed or because the format is not supported.',5: 'The media is encrypted and we do not have the keys to decrypt it.'};// Add types as properties on MediaError// e.g. MediaError.MEDIA_ERR_SRC_NOT_SUPPORTED = 4;for (let errNum = 0; errNum < MediaError.errorTypes.length; errNum++) {MediaError[MediaError.errorTypes[errNum]] = errNum;// values should be accessible on both the class and instanceMediaError.prototype[MediaError.errorTypes[errNum]] = errNum;}var tuple = SafeParseTuple;function SafeParseTuple(obj, reviver) {var json;var error = null;try {json = JSON.parse(obj, reviver);} catch (err) {error = err;}return [error, json];}/*** Returns whether an object is `Promise`-like (i.e. has a `then` method).** @param {Object} value* An object that may or may not be `Promise`-like.** @return {boolean}* Whether or not the object is `Promise`-like.*/function isPromise(value) {return value !== undefined && value !== null && typeof value.then === 'function';}/*** Silence a Promise-like object.** This is useful for avoiding non-harmful, but potentially confusing "uncaught* play promise" rejection error messages.** @param {Object} value* An object that may or may not be `Promise`-like.*/function silencePromise(value) {if (isPromise(value)) {value.then(null, e => {});}}/*** @file text-track-list-converter.js Utilities for capturing text track state and* re-creating tracks based on a capture.** @module text-track-list-converter*//*** Examine a single {@link TextTrack} and return a JSON-compatible javascript object that* represents the {@link TextTrack}'s state.** @param {TextTrack} track* The text track to query.** @return {Object}* A serializable javascript representation of the TextTrack.* @private*/const trackToJson_ = function (track) {const ret = ['kind', 'label', 'language', 'id', 'inBandMetadataTrackDispatchType', 'mode', 'src'].reduce((acc, prop, i) => {if (track[prop]) {acc[prop] = track[prop];}return acc;}, {cues: track.cues && Array.prototype.map.call(track.cues, function (cue) {return {startTime: cue.startTime,endTime: cue.endTime,text: cue.text,id: cue.id};})});return ret;};/*** Examine a {@link Tech} and return a JSON-compatible javascript array that represents the* state of all {@link TextTrack}s currently configured. The return array is compatible with* {@link text-track-list-converter:jsonToTextTracks}.** @param { import('../tech/tech').default } tech* The tech object to query** @return {Array}* A serializable javascript representation of the {@link Tech}s* {@link TextTrackList}.*/const textTracksToJson = function (tech) {const trackEls = tech.$$('track');const trackObjs = Array.prototype.map.call(trackEls, t => t.track);const tracks = Array.prototype.map.call(trackEls, function (trackEl) {const json = trackToJson_(trackEl.track);if (trackEl.src) {json.src = trackEl.src;}return json;});return tracks.concat(Array.prototype.filter.call(tech.textTracks(), function (track) {return trackObjs.indexOf(track) === -1;}).map(trackToJson_));};/*** Create a set of remote {@link TextTrack}s on a {@link Tech} based on an array of javascript* object {@link TextTrack} representations.** @param {Array} json* An array of `TextTrack` representation objects, like those that would be* produced by `textTracksToJson`.** @param {Tech} tech* The `Tech` to create the `TextTrack`s on.*/const jsonToTextTracks = function (json, tech) {json.forEach(function (track) {const addedTrack = tech.addRemoteTextTrack(track).track;if (!track.src && track.cues) {track.cues.forEach(cue => addedTrack.addCue(cue));}});return tech.textTracks();};var textTrackConverter = {textTracksToJson,jsonToTextTracks,trackToJson_};/*** @file modal-dialog.js*/const MODAL_CLASS_NAME = 'vjs-modal-dialog';/*** The `ModalDialog` displays over the video and its controls, which blocks* interaction with the player until it is closed.** Modal dialogs include a "Close" button and will close when that button* is activated - or when ESC is pressed anywhere.** @extends Component*/class ModalDialog extends Component$1 {/*** Create an instance of this class.** @param { import('./player').default } player* The `Player` that this class should be attached to.** @param {Object} [options]* The key/value store of player options.** @param { import('./utils/dom').ContentDescriptor} [options.content=undefined]* Provide customized content for this modal.** @param {string} [options.description]* A text description for the modal, primarily for accessibility.** @param {boolean} [options.fillAlways=false]* Normally, modals are automatically filled only the first time* they open. This tells the modal to refresh its content* every time it opens.** @param {string} [options.label]* A text label for the modal, primarily for accessibility.** @param {boolean} [options.pauseOnOpen=true]* If `true`, playback will will be paused if playing when* the modal opens, and resumed when it closes.** @param {boolean} [options.temporary=true]* If `true`, the modal can only be opened once; it will be* disposed as soon as it's closed.** @param {boolean} [options.uncloseable=false]* If `true`, the user will not be able to close the modal* through the UI in the normal ways. Programmatic closing is* still possible.*/constructor(player, options) {super(player, options);this.handleKeyDown_ = e => this.handleKeyDown(e);this.close_ = e => this.close(e);this.opened_ = this.hasBeenOpened_ = this.hasBeenFilled_ = false;this.closeable(!this.options_.uncloseable);this.content(this.options_.content);// Make sure the contentEl is defined AFTER any children are initialized// because we only want the contents of the modal in the contentEl// (not the UI elements like the close button).this.contentEl_ = createEl('div', {className: `${MODAL_CLASS_NAME}-content`}, {role: 'document'});this.descEl_ = createEl('p', {className: `${MODAL_CLASS_NAME}-description vjs-control-text`,id: this.el().getAttribute('aria-describedby')});textContent(this.descEl_, this.description());this.el_.appendChild(this.descEl_);this.el_.appendChild(this.contentEl_);}/*** Create the `ModalDialog`'s DOM element** @return {Element}* The DOM element that gets created.*/createEl() {return super.createEl('div', {className: this.buildCSSClass(),tabIndex: -1}, {'aria-describedby': `${this.id()}_description`,'aria-hidden': 'true','aria-label': this.label(),'role': 'dialog'});}dispose() {this.contentEl_ = null;this.descEl_ = null;this.previouslyActiveEl_ = null;super.dispose();}/*** Builds the default DOM `className`.** @return {string}* The DOM `className` for this object.*/buildCSSClass() {return `${MODAL_CLASS_NAME} vjs-hidden ${super.buildCSSClass()}`;}/*** Returns the label string for this modal. Primarily used for accessibility.** @return {string}* the localized or raw label of this modal.*/label() {return this.localize(this.options_.label || 'Modal Window');}/*** Returns the description string for this modal. Primarily used for* accessibility.** @return {string}* The localized or raw description of this modal.*/description() {let desc = this.options_.description || this.localize('This is a modal window.');// Append a universal closeability message if the modal is closeable.if (this.closeable()) {desc += ' ' + this.localize('This modal can be closed by pressing the Escape key or activating the close button.');}return desc;}/*** Opens the modal.** @fires ModalDialog#beforemodalopen* @fires ModalDialog#modalopen*/open() {if (!this.opened_) {const player = this.player();/*** Fired just before a `ModalDialog` is opened.** @event ModalDialog#beforemodalopen* @type {Event}*/this.trigger('beforemodalopen');this.opened_ = true;// Fill content if the modal has never opened before and// never been filled.if (this.options_.fillAlways || !this.hasBeenOpened_ && !this.hasBeenFilled_) {this.fill();}// If the player was playing, pause it and take note of its previously// playing state.this.wasPlaying_ = !player.paused();if (this.options_.pauseOnOpen && this.wasPlaying_) {player.pause();}this.on('keydown', this.handleKeyDown_);// Hide controls and note if they were enabled.this.hadControls_ = player.controls();player.controls(false);this.show();this.conditionalFocus_();this.el().setAttribute('aria-hidden', 'false');/*** Fired just after a `ModalDialog` is opened.** @event ModalDialog#modalopen* @type {Event}*/this.trigger('modalopen');this.hasBeenOpened_ = true;}}/*** If the `ModalDialog` is currently open or closed.** @param {boolean} [value]* If given, it will open (`true`) or close (`false`) the modal.** @return {boolean}* the current open state of the modaldialog*/opened(value) {if (typeof value === 'boolean') {this[value ? 'open' : 'close']();}return this.opened_;}/*** Closes the modal, does nothing if the `ModalDialog` is* not open.** @fires ModalDialog#beforemodalclose* @fires ModalDialog#modalclose*/close() {if (!this.opened_) {return;}const player = this.player();/*** Fired just before a `ModalDialog` is closed.** @event ModalDialog#beforemodalclose* @type {Event}*/this.trigger('beforemodalclose');this.opened_ = false;if (this.wasPlaying_ && this.options_.pauseOnOpen) {player.play();}this.off('keydown', this.handleKeyDown_);if (this.hadControls_) {player.controls(true);}this.hide();this.el().setAttribute('aria-hidden', 'true');/*** Fired just after a `ModalDialog` is closed.** @event ModalDialog#modalclose* @type {Event}*/this.trigger('modalclose');this.conditionalBlur_();if (this.options_.temporary) {this.dispose();}}/*** Check to see if the `ModalDialog` is closeable via the UI.** @param {boolean} [value]* If given as a boolean, it will set the `closeable` option.** @return {boolean}* Returns the final value of the closable option.*/closeable(value) {if (typeof value === 'boolean') {const closeable = this.closeable_ = !!value;let close = this.getChild('closeButton');// If this is being made closeable and has no close button, add one.if (closeable && !close) {// The close button should be a child of the modal - not its// content element, so temporarily change the content element.const temp = this.contentEl_;this.contentEl_ = this.el_;close = this.addChild('closeButton', {controlText: 'Close Modal Dialog'});this.contentEl_ = temp;this.on(close, 'close', this.close_);}// If this is being made uncloseable and has a close button, remove it.if (!closeable && close) {this.off(close, 'close', this.close_);this.removeChild(close);close.dispose();}}return this.closeable_;}/*** Fill the modal's content element with the modal's "content" option.* The content element will be emptied before this change takes place.*/fill() {this.fillWith(this.content());}/*** Fill the modal's content element with arbitrary content.* The content element will be emptied before this change takes place.** @fires ModalDialog#beforemodalfill* @fires ModalDialog#modalfill** @param { import('./utils/dom').ContentDescriptor} [content]* The same rules apply to this as apply to the `content` option.*/fillWith(content) {const contentEl = this.contentEl();const parentEl = contentEl.parentNode;const nextSiblingEl = contentEl.nextSibling;/*** Fired just before a `ModalDialog` is filled with content.** @event ModalDialog#beforemodalfill* @type {Event}*/this.trigger('beforemodalfill');this.hasBeenFilled_ = true;// Detach the content element from the DOM before performing// manipulation to avoid modifying the live DOM multiple times.parentEl.removeChild(contentEl);this.empty();insertContent(contentEl, content);/*** Fired just after a `ModalDialog` is filled with content.** @event ModalDialog#modalfill* @type {Event}*/this.trigger('modalfill');// Re-inject the re-filled content element.if (nextSiblingEl) {parentEl.insertBefore(contentEl, nextSiblingEl);} else {parentEl.appendChild(contentEl);}// make sure that the close button is last in the dialog DOMconst closeButton = this.getChild('closeButton');if (closeButton) {parentEl.appendChild(closeButton.el_);}}/*** Empties the content element. This happens anytime the modal is filled.** @fires ModalDialog#beforemodalempty* @fires ModalDialog#modalempty*/empty() {/*** Fired just before a `ModalDialog` is emptied.** @event ModalDialog#beforemodalempty* @type {Event}*/this.trigger('beforemodalempty');emptyEl(this.contentEl());/*** Fired just after a `ModalDialog` is emptied.** @event ModalDialog#modalempty* @type {Event}*/this.trigger('modalempty');}/*** Gets or sets the modal content, which gets normalized before being* rendered into the DOM.** This does not update the DOM or fill the modal, but it is called during* that process.** @param { import('./utils/dom').ContentDescriptor} [value]* If defined, sets the internal content value to be used on the* next call(s) to `fill`. This value is normalized before being* inserted. To "clear" the internal content value, pass `null`.** @return { import('./utils/dom').ContentDescriptor}* The current content of the modal dialog*/content(value) {if (typeof value !== 'undefined') {this.content_ = value;}return this.content_;}/*** conditionally focus the modal dialog if focus was previously on the player.** @private*/conditionalFocus_() {const activeEl = document.activeElement;const playerEl = this.player_.el_;this.previouslyActiveEl_ = null;if (playerEl.contains(activeEl) || playerEl === activeEl) {this.previouslyActiveEl_ = activeEl;this.focus();}}/*** conditionally blur the element and refocus the last focused element** @private*/conditionalBlur_() {if (this.previouslyActiveEl_) {this.previouslyActiveEl_.focus();this.previouslyActiveEl_ = null;}}/*** Keydown handler. Attached when modal is focused.** @listens keydown*/handleKeyDown(event) {// Do not allow keydowns to reach out of the modal dialog.event.stopPropagation();if (keycode.isEventKey(event, 'Escape') && this.closeable()) {event.preventDefault();this.close();return;}// exit early if it isn't a tab keyif (!keycode.isEventKey(event, 'Tab')) {return;}const focusableEls = this.focusableEls_();const activeEl = this.el_.querySelector(':focus');let focusIndex;for (let i = 0; i < focusableEls.length; i++) {if (activeEl === focusableEls[i]) {focusIndex = i;break;}}if (document.activeElement === this.el_) {focusIndex = 0;}if (event.shiftKey && focusIndex === 0) {focusableEls[focusableEls.length - 1].focus();event.preventDefault();} else if (!event.shiftKey && focusIndex === focusableEls.length - 1) {focusableEls[0].focus();event.preventDefault();}}/*** get all focusable elements** @private*/focusableEls_() {const allChildren = this.el_.querySelectorAll('*');return Array.prototype.filter.call(allChildren, child => {return (child instanceof window.HTMLAnchorElement || child instanceof window.HTMLAreaElement) && child.hasAttribute('href') || (child instanceof window.HTMLInputElement || child instanceof window.HTMLSelectElement || child instanceof window.HTMLTextAreaElement || child instanceof window.HTMLButtonElement) && !child.hasAttribute('disabled') || child instanceof window.HTMLIFrameElement || child instanceof window.HTMLObjectElement || child instanceof window.HTMLEmbedElement || child.hasAttribute('tabindex') && child.getAttribute('tabindex') !== -1 || child.hasAttribute('contenteditable');});}}/*** Default options for `ModalDialog` default options.** @type {Object}* @private*/ModalDialog.prototype.options_ = {pauseOnOpen: true,temporary: true};Component$1.registerComponent('ModalDialog', ModalDialog);/*** @file track-list.js*//*** Common functionaliy between {@link TextTrackList}, {@link AudioTrackList}, and* {@link VideoTrackList}** @extends EventTarget*/class TrackList extends EventTarget$2 {/*** Create an instance of this class** @param { import('./track').default[] } tracks* A list of tracks to initialize the list with.** @abstract*/constructor(tracks = []) {super();this.tracks_ = [];/*** @memberof TrackList* @member {number} length* The current number of `Track`s in the this Trackist.* @instance*/Object.defineProperty(this, 'length', {get() {return this.tracks_.length;}});for (let i = 0; i < tracks.length; i++) {this.addTrack(tracks[i]);}}/*** Add a {@link Track} to the `TrackList`** @param { import('./track').default } track* The audio, video, or text track to add to the list.** @fires TrackList#addtrack*/addTrack(track) {const index = this.tracks_.length;if (!('' + index in this)) {Object.defineProperty(this, index, {get() {return this.tracks_[index];}});}// Do not add duplicate tracksif (this.tracks_.indexOf(track) === -1) {this.tracks_.push(track);/*** Triggered when a track is added to a track list.** @event TrackList#addtrack* @type {Event}* @property {Track} track* A reference to track that was added.*/this.trigger({track,type: 'addtrack',target: this});}/*** Triggered when a track label is changed.** @event TrackList#addtrack* @type {Event}* @property {Track} track* A reference to track that was added.*/track.labelchange_ = () => {this.trigger({track,type: 'labelchange',target: this});};if (isEvented(track)) {track.addEventListener('labelchange', track.labelchange_);}}/*** Remove a {@link Track} from the `TrackList`** @param { import('./track').default } rtrack* The audio, video, or text track to remove from the list.** @fires TrackList#removetrack*/removeTrack(rtrack) {let track;for (let i = 0, l = this.length; i < l; i++) {if (this[i] === rtrack) {track = this[i];if (track.off) {track.off();}this.tracks_.splice(i, 1);break;}}if (!track) {return;}/*** Triggered when a track is removed from track list.** @event TrackList#removetrack* @type {Event}* @property {Track} track* A reference to track that was removed.*/this.trigger({track,type: 'removetrack',target: this});}/*** Get a Track from the TrackList by a tracks id** @param {string} id - the id of the track to get* @method getTrackById* @return { import('./track').default }* @private*/getTrackById(id) {let result = null;for (let i = 0, l = this.length; i < l; i++) {const track = this[i];if (track.id === id) {result = track;break;}}return result;}}/*** Triggered when a different track is selected/enabled.** @event TrackList#change* @type {Event}*//*** Events that can be called with on + eventName. See {@link EventHandler}.** @property {Object} TrackList#allowedEvents_* @protected*/TrackList.prototype.allowedEvents_ = {change: 'change',addtrack: 'addtrack',removetrack: 'removetrack',labelchange: 'labelchange'};// emulate attribute EventHandler support to allow for feature detectionfor (const event in TrackList.prototype.allowedEvents_) {TrackList.prototype['on' + event] = null;}/*** @file audio-track-list.js*//*** Anywhere we call this function we diverge from the spec* as we only support one enabled audiotrack at a time** @param {AudioTrackList} list* list to work on** @param { import('./audio-track').default } track* The track to skip** @private*/const disableOthers$1 = function (list, track) {for (let i = 0; i < list.length; i++) {if (!Object.keys(list[i]).length || track.id === list[i].id) {continue;}// another audio track is enabled, disable itlist[i].enabled = false;}};/*** The current list of {@link AudioTrack} for a media file.** @see [Spec]{@link https://html.spec.whatwg.org/multipage/embedded-content.html#audiotracklist}* @extends TrackList*/class AudioTrackList extends TrackList {/*** Create an instance of this class.** @param { import('./audio-track').default[] } [tracks=[]]* A list of `AudioTrack` to instantiate the list with.*/constructor(tracks = []) {// make sure only 1 track is enabled// sorted from last index to first indexfor (let i = tracks.length - 1; i >= 0; i--) {if (tracks[i].enabled) {disableOthers$1(tracks, tracks[i]);break;}}super(tracks);this.changing_ = false;}/*** Add an {@link AudioTrack} to the `AudioTrackList`.** @param { import('./audio-track').default } track* The AudioTrack to add to the list** @fires TrackList#addtrack*/addTrack(track) {if (track.enabled) {disableOthers$1(this, track);}super.addTrack(track);// native tracks don't have thisif (!track.addEventListener) {return;}track.enabledChange_ = () => {// when we are disabling other tracks (since we don't support// more than one track at a time) we will set changing_// to true so that we don't trigger additional change eventsif (this.changing_) {return;}this.changing_ = true;disableOthers$1(this, track);this.changing_ = false;this.trigger('change');};/*** @listens AudioTrack#enabledchange* @fires TrackList#change*/track.addEventListener('enabledchange', track.enabledChange_);}removeTrack(rtrack) {super.removeTrack(rtrack);if (rtrack.removeEventListener && rtrack.enabledChange_) {rtrack.removeEventListener('enabledchange', rtrack.enabledChange_);rtrack.enabledChange_ = null;}}}/*** @file video-track-list.js*//*** Un-select all other {@link VideoTrack}s that are selected.** @param {VideoTrackList} list* list to work on** @param { import('./video-track').default } track* The track to skip** @private*/const disableOthers = function (list, track) {for (let i = 0; i < list.length; i++) {if (!Object.keys(list[i]).length || track.id === list[i].id) {continue;}// another video track is enabled, disable itlist[i].selected = false;}};/*** The current list of {@link VideoTrack} for a video.** @see [Spec]{@link https://html.spec.whatwg.org/multipage/embedded-content.html#videotracklist}* @extends TrackList*/class VideoTrackList extends TrackList {/*** Create an instance of this class.** @param {VideoTrack[]} [tracks=[]]* A list of `VideoTrack` to instantiate the list with.*/constructor(tracks = []) {// make sure only 1 track is enabled// sorted from last index to first indexfor (let i = tracks.length - 1; i >= 0; i--) {if (tracks[i].selected) {disableOthers(tracks, tracks[i]);break;}}super(tracks);this.changing_ = false;/*** @member {number} VideoTrackList#selectedIndex* The current index of the selected {@link VideoTrack`}.*/Object.defineProperty(this, 'selectedIndex', {get() {for (let i = 0; i < this.length; i++) {if (this[i].selected) {return i;}}return -1;},set() {}});}/*** Add a {@link VideoTrack} to the `VideoTrackList`.** @param { import('./video-track').default } track* The VideoTrack to add to the list** @fires TrackList#addtrack*/addTrack(track) {if (track.selected) {disableOthers(this, track);}super.addTrack(track);// native tracks don't have thisif (!track.addEventListener) {return;}track.selectedChange_ = () => {if (this.changing_) {return;}this.changing_ = true;disableOthers(this, track);this.changing_ = false;this.trigger('change');};/*** @listens VideoTrack#selectedchange* @fires TrackList#change*/track.addEventListener('selectedchange', track.selectedChange_);}removeTrack(rtrack) {super.removeTrack(rtrack);if (rtrack.removeEventListener && rtrack.selectedChange_) {rtrack.removeEventListener('selectedchange', rtrack.selectedChange_);rtrack.selectedChange_ = null;}}}/*** @file text-track-list.js*//*** The current list of {@link TextTrack} for a media file.** @see [Spec]{@link https://html.spec.whatwg.org/multipage/embedded-content.html#texttracklist}* @extends TrackList*/class TextTrackList extends TrackList {/*** Add a {@link TextTrack} to the `TextTrackList`** @param { import('./text-track').default } track* The text track to add to the list.** @fires TrackList#addtrack*/addTrack(track) {super.addTrack(track);if (!this.queueChange_) {this.queueChange_ = () => this.queueTrigger('change');}if (!this.triggerSelectedlanguagechange) {this.triggerSelectedlanguagechange_ = () => this.trigger('selectedlanguagechange');}/*** @listens TextTrack#modechange* @fires TrackList#change*/track.addEventListener('modechange', this.queueChange_);const nonLanguageTextTrackKind = ['metadata', 'chapters'];if (nonLanguageTextTrackKind.indexOf(track.kind) === -1) {track.addEventListener('modechange', this.triggerSelectedlanguagechange_);}}removeTrack(rtrack) {super.removeTrack(rtrack);// manually remove the event handlers we addedif (rtrack.removeEventListener) {if (this.queueChange_) {rtrack.removeEventListener('modechange', this.queueChange_);}if (this.selectedlanguagechange_) {rtrack.removeEventListener('modechange', this.triggerSelectedlanguagechange_);}}}}/*** @file html-track-element-list.js*//*** The current list of {@link HtmlTrackElement}s.*/class HtmlTrackElementList {/*** Create an instance of this class.** @param {HtmlTrackElement[]} [tracks=[]]* A list of `HtmlTrackElement` to instantiate the list with.*/constructor(trackElements = []) {this.trackElements_ = [];/*** @memberof HtmlTrackElementList* @member {number} length* The current number of `Track`s in the this Trackist.* @instance*/Object.defineProperty(this, 'length', {get() {return this.trackElements_.length;}});for (let i = 0, length = trackElements.length; i < length; i++) {this.addTrackElement_(trackElements[i]);}}/*** Add an {@link HtmlTrackElement} to the `HtmlTrackElementList`** @param {HtmlTrackElement} trackElement* The track element to add to the list.** @private*/addTrackElement_(trackElement) {const index = this.trackElements_.length;if (!('' + index in this)) {Object.defineProperty(this, index, {get() {return this.trackElements_[index];}});}// Do not add duplicate elementsif (this.trackElements_.indexOf(trackElement) === -1) {this.trackElements_.push(trackElement);}}/*** Get an {@link HtmlTrackElement} from the `HtmlTrackElementList` given an* {@link TextTrack}.** @param {TextTrack} track* The track associated with a track element.** @return {HtmlTrackElement|undefined}* The track element that was found or undefined.** @private*/getTrackElementByTrack_(track) {let trackElement_;for (let i = 0, length = this.trackElements_.length; i < length; i++) {if (track === this.trackElements_[i].track) {trackElement_ = this.trackElements_[i];break;}}return trackElement_;}/*** Remove a {@link HtmlTrackElement} from the `HtmlTrackElementList`** @param {HtmlTrackElement} trackElement* The track element to remove from the list.** @private*/removeTrackElement_(trackElement) {for (let i = 0, length = this.trackElements_.length; i < length; i++) {if (trackElement === this.trackElements_[i]) {if (this.trackElements_[i].track && typeof this.trackElements_[i].track.off === 'function') {this.trackElements_[i].track.off();}if (typeof this.trackElements_[i].off === 'function') {this.trackElements_[i].off();}this.trackElements_.splice(i, 1);break;}}}}/*** @file text-track-cue-list.js*//*** @typedef {Object} TextTrackCueList~TextTrackCue** @property {string} id* The unique id for this text track cue** @property {number} startTime* The start time for this text track cue** @property {number} endTime* The end time for this text track cue** @property {boolean} pauseOnExit* Pause when the end time is reached if true.** @see [Spec]{@link https://html.spec.whatwg.org/multipage/embedded-content.html#texttrackcue}*//*** A List of TextTrackCues.** @see [Spec]{@link https://html.spec.whatwg.org/multipage/embedded-content.html#texttrackcuelist}*/class TextTrackCueList {/*** Create an instance of this class..** @param {Array} cues* A list of cues to be initialized with*/constructor(cues) {TextTrackCueList.prototype.setCues_.call(this, cues);/*** @memberof TextTrackCueList* @member {number} length* The current number of `TextTrackCue`s in the TextTrackCueList.* @instance*/Object.defineProperty(this, 'length', {get() {return this.length_;}});}/*** A setter for cues in this list. Creates getters* an an index for the cues.** @param {Array} cues* An array of cues to set** @private*/setCues_(cues) {const oldLength = this.length || 0;let i = 0;const l = cues.length;this.cues_ = cues;this.length_ = cues.length;const defineProp = function (index) {if (!('' + index in this)) {Object.defineProperty(this, '' + index, {get() {return this.cues_[index];}});}};if (oldLength < l) {i = oldLength;for (; i < l; i++) {defineProp.call(this, i);}}}/*** Get a `TextTrackCue` that is currently in the `TextTrackCueList` by id.** @param {string} id* The id of the cue that should be searched for.** @return {TextTrackCueList~TextTrackCue|null}* A single cue or null if none was found.*/getCueById(id) {let result = null;for (let i = 0, l = this.length; i < l; i++) {const cue = this[i];if (cue.id === id) {result = cue;break;}}return result;}}/*** @file track-kinds.js*//*** All possible `VideoTrackKind`s** @see https://html.spec.whatwg.org/multipage/embedded-content.html#dom-videotrack-kind* @typedef VideoTrack~Kind* @enum*/const VideoTrackKind = {alternative: 'alternative',captions: 'captions',main: 'main',sign: 'sign',subtitles: 'subtitles',commentary: 'commentary'};/*** All possible `AudioTrackKind`s** @see https://html.spec.whatwg.org/multipage/embedded-content.html#dom-audiotrack-kind* @typedef AudioTrack~Kind* @enum*/const AudioTrackKind = {'alternative': 'alternative','descriptions': 'descriptions','main': 'main','main-desc': 'main-desc','translation': 'translation','commentary': 'commentary'};/*** All possible `TextTrackKind`s** @see https://html.spec.whatwg.org/multipage/embedded-content.html#dom-texttrack-kind* @typedef TextTrack~Kind* @enum*/const TextTrackKind = {subtitles: 'subtitles',captions: 'captions',descriptions: 'descriptions',chapters: 'chapters',metadata: 'metadata'};/*** All possible `TextTrackMode`s** @see https://html.spec.whatwg.org/multipage/embedded-content.html#texttrackmode* @typedef TextTrack~Mode* @enum*/const TextTrackMode = {disabled: 'disabled',hidden: 'hidden',showing: 'showing'};/*** @file track.js*//*** A Track class that contains all of the common functionality for {@link AudioTrack},* {@link VideoTrack}, and {@link TextTrack}.** > Note: This class should not be used directly** @see {@link https://html.spec.whatwg.org/multipage/embedded-content.html}* @extends EventTarget* @abstract*/class Track extends EventTarget$2 {/*** Create an instance of this class.** @param {Object} [options={}]* Object of option names and values** @param {string} [options.kind='']* A valid kind for the track type you are creating.** @param {string} [options.id='vjs_track_' + Guid.newGUID()]* A unique id for this AudioTrack.** @param {string} [options.label='']* The menu label for this track.** @param {string} [options.language='']* A valid two character language code.** @abstract*/constructor(options = {}) {super();const trackProps = {id: options.id || 'vjs_track_' + newGUID(),kind: options.kind || '',language: options.language || ''};let label = options.label || '';/*** @memberof Track* @member {string} id* The id of this track. Cannot be changed after creation.* @instance** @readonly*//*** @memberof Track* @member {string} kind* The kind of track that this is. Cannot be changed after creation.* @instance** @readonly*//*** @memberof Track* @member {string} language* The two letter language code for this track. Cannot be changed after* creation.* @instance** @readonly*/for (const key in trackProps) {Object.defineProperty(this, key, {get() {return trackProps[key];},set() {}});}/*** @memberof Track* @member {string} label* The label of this track. Cannot be changed after creation.* @instance** @fires Track#labelchange*/Object.defineProperty(this, 'label', {get() {return label;},set(newLabel) {if (newLabel !== label) {label = newLabel;/*** An event that fires when label changes on this track.** > Note: This is not part of the spec!** @event Track#labelchange* @type {Event}*/this.trigger('labelchange');}}});}}/*** @file url.js* @module url*//*** @typedef {Object} url:URLObject** @property {string} protocol* The protocol of the url that was parsed.** @property {string} hostname* The hostname of the url that was parsed.** @property {string} port* The port of the url that was parsed.** @property {string} pathname* The pathname of the url that was parsed.** @property {string} search* The search query of the url that was parsed.** @property {string} hash* The hash of the url that was parsed.** @property {string} host* The host of the url that was parsed.*//*** Resolve and parse the elements of a URL.** @function* @param {String} url* The url to parse** @return {url:URLObject}* An object of url details*/const parseUrl = function (url) {// This entire method can be replace with URL once we are able to drop IE11const props = ['protocol', 'hostname', 'port', 'pathname', 'search', 'hash', 'host'];// add the url to an anchor and let the browser parse the URLconst a = document.createElement('a');a.href = url;// Copy the specific URL properties to a new object// This is also needed for IE because the anchor loses its// properties when it's removed from the domconst details = {};for (let i = 0; i < props.length; i++) {details[props[i]] = a[props[i]];}// IE adds the port to the host property unlike everyone else. If// a port identifier is added for standard ports, strip it.if (details.protocol === 'http:') {details.host = details.host.replace(/:80$/, '');}if (details.protocol === 'https:') {details.host = details.host.replace(/:443$/, '');}if (!details.protocol) {details.protocol = window.location.protocol;}/* istanbul ignore if */if (!details.host) {details.host = window.location.host;}return details;};/*** Get absolute version of relative URL.** @function* @param {string} url* URL to make absolute** @return {string}* Absolute URL** @see http://stackoverflow.com/questions/470832/getting-an-absolute-url-from-a-relative-one-ie6-issue*/const getAbsoluteURL = function (url) {// Check if absolute URLif (!url.match(/^https?:\/\//)) {// Add the url to an anchor and let the browser parse it to convert to an absolute urlconst a = document.createElement('a');a.href = url;url = a.href;}return url;};/*** Returns the extension of the passed file name. It will return an empty string* if passed an invalid path.** @function* @param {string} path* The fileName path like '/path/to/file.mp4'** @return {string}* The extension in lower case or an empty string if no* extension could be found.*/const getFileExtension = function (path) {if (typeof path === 'string') {const splitPathRe = /^(\/?)([\s\S]*?)((?:\.{1,2}|[^\/]+?)(\.([^\.\/\?]+)))(?:[\/]*|[\?].*)$/;const pathParts = splitPathRe.exec(path);if (pathParts) {return pathParts.pop().toLowerCase();}}return '';};/*** Returns whether the url passed is a cross domain request or not.** @function* @param {string} url* The url to check.** @param {Object} [winLoc]* the domain to check the url against, defaults to window.location** @param {string} [winLoc.protocol]* The window location protocol defaults to window.location.protocol** @param {string} [winLoc.host]* The window location host defaults to window.location.host** @return {boolean}* Whether it is a cross domain request or not.*/const isCrossOrigin = function (url, winLoc = window.location) {const urlInfo = parseUrl(url);// IE8 protocol relative urls will return ':' for protocolconst srcProtocol = urlInfo.protocol === ':' ? winLoc.protocol : urlInfo.protocol;// Check if url is for another domain/origin// IE8 doesn't know location.origin, so we won't rely on it hereconst crossOrigin = srcProtocol + urlInfo.host !== winLoc.protocol + winLoc.host;return crossOrigin;};var Url = /*#__PURE__*/Object.freeze({__proto__: null,parseUrl: parseUrl,getAbsoluteURL: getAbsoluteURL,getFileExtension: getFileExtension,isCrossOrigin: isCrossOrigin});var win;if (typeof window !== "undefined") {win = window;} else if (typeof commonjsGlobal !== "undefined") {win = commonjsGlobal;} else if (typeof self !== "undefined") {win = self;} else {win = {};}var window_1 = win;var _extends_1 = createCommonjsModule(function (module) {function _extends() {module.exports = _extends = Object.assign ? Object.assign.bind() : function (target) {for (var i = 1; i < arguments.length; i++) {var source = arguments[i];for (var key in source) {if (Object.prototype.hasOwnProperty.call(source, key)) {target[key] = source[key];}}}return target;}, module.exports.__esModule = true, module.exports["default"] = module.exports;return _extends.apply(this, arguments);}module.exports = _extends, module.exports.__esModule = true, module.exports["default"] = module.exports;});var _extends$1 = unwrapExports(_extends_1);var isFunction_1 = isFunction;var toString = Object.prototype.toString;function isFunction(fn) {if (!fn) {return false;}var string = toString.call(fn);return string === '[object Function]' || typeof fn === 'function' && string !== '[object RegExp]' || typeof window !== 'undefined' && (// IE8 and belowfn === window.setTimeout || fn === window.alert || fn === window.confirm || fn === window.prompt);}var httpResponseHandler = function httpResponseHandler(callback, decodeResponseBody) {if (decodeResponseBody === void 0) {decodeResponseBody = false;}return function (err, response, responseBody) {// if the XHR failed, return that errorif (err) {callback(err);return;} // if the HTTP status code is 4xx or 5xx, the request also failedif (response.statusCode >= 400 && response.statusCode <= 599) {var cause = responseBody;if (decodeResponseBody) {if (window_1.TextDecoder) {var charset = getCharset(response.headers && response.headers['content-type']);try {cause = new TextDecoder(charset).decode(responseBody);} catch (e) {}} else {cause = String.fromCharCode.apply(null, new Uint8Array(responseBody));}}callback({cause: cause});return;} // otherwise, request succeededcallback(null, responseBody);};};function getCharset(contentTypeHeader) {if (contentTypeHeader === void 0) {contentTypeHeader = '';}return contentTypeHeader.toLowerCase().split(';').reduce(function (charset, contentType) {var _contentType$split = contentType.split('='),type = _contentType$split[0],value = _contentType$split[1];if (type.trim() === 'charset') {return value.trim();}return charset;}, 'utf-8');}var httpHandler = httpResponseHandler;createXHR.httpHandler = httpHandler;/*** @license* slighly modified parse-headers 2.0.2 <https://github.com/kesla/parse-headers/>* Copyright (c) 2014 David Björklund* Available under the MIT license* <https://github.com/kesla/parse-headers/blob/master/LICENCE>*/var parseHeaders = function parseHeaders(headers) {var result = {};if (!headers) {return result;}headers.trim().split('\n').forEach(function (row) {var index = row.indexOf(':');var key = row.slice(0, index).trim().toLowerCase();var value = row.slice(index + 1).trim();if (typeof result[key] === 'undefined') {result[key] = value;} else if (Array.isArray(result[key])) {result[key].push(value);} else {result[key] = [result[key], value];}});return result;};var lib = createXHR; // Allow use of default import syntax in TypeScriptvar default_1 = createXHR;createXHR.XMLHttpRequest = window_1.XMLHttpRequest || noop$1;createXHR.XDomainRequest = "withCredentials" in new createXHR.XMLHttpRequest() ? createXHR.XMLHttpRequest : window_1.XDomainRequest;forEachArray(["get", "put", "post", "patch", "head", "delete"], function (method) {createXHR[method === "delete" ? "del" : method] = function (uri, options, callback) {options = initParams(uri, options, callback);options.method = method.toUpperCase();return _createXHR(options);};});function forEachArray(array, iterator) {for (var i = 0; i < array.length; i++) {iterator(array[i]);}}function isEmpty(obj) {for (var i in obj) {if (obj.hasOwnProperty(i)) return false;}return true;}function initParams(uri, options, callback) {var params = uri;if (isFunction_1(options)) {callback = options;if (typeof uri === "string") {params = {uri: uri};}} else {params = _extends_1({}, options, {uri: uri});}params.callback = callback;return params;}function createXHR(uri, options, callback) {options = initParams(uri, options, callback);return _createXHR(options);}function _createXHR(options) {if (typeof options.callback === "undefined") {throw new Error("callback argument missing");}var called = false;var callback = function cbOnce(err, response, body) {if (!called) {called = true;options.callback(err, response, body);}};function readystatechange() {if (xhr.readyState === 4) {setTimeout(loadFunc, 0);}}function getBody() {// Chrome with requestType=blob throws errors arround when even testing access to responseTextvar body = undefined;if (xhr.response) {body = xhr.response;} else {body = xhr.responseText || getXml(xhr);}if (isJson) {try {body = JSON.parse(body);} catch (e) {}}return body;}function errorFunc(evt) {clearTimeout(timeoutTimer);if (!(evt instanceof Error)) {evt = new Error("" + (evt || "Unknown XMLHttpRequest Error"));}evt.statusCode = 0;return callback(evt, failureResponse);} // will load the data & process the response in a special response objectfunction loadFunc() {if (aborted) return;var status;clearTimeout(timeoutTimer);if (options.useXDR && xhr.status === undefined) {//IE8 CORS GET successful response doesn't have a status field, but body is finestatus = 200;} else {status = xhr.status === 1223 ? 204 : xhr.status;}var response = failureResponse;var err = null;if (status !== 0) {response = {body: getBody(),statusCode: status,method: method,headers: {},url: uri,rawRequest: xhr};if (xhr.getAllResponseHeaders) {//remember xhr can in fact be XDR for CORS in IEresponse.headers = parseHeaders(xhr.getAllResponseHeaders());}} else {err = new Error("Internal XMLHttpRequest Error");}return callback(err, response, response.body);}var xhr = options.xhr || null;if (!xhr) {if (options.cors || options.useXDR) {xhr = new createXHR.XDomainRequest();} else {xhr = new createXHR.XMLHttpRequest();}}var key;var aborted;var uri = xhr.url = options.uri || options.url;var method = xhr.method = options.method || "GET";var body = options.body || options.data;var headers = xhr.headers = options.headers || {};var sync = !!options.sync;var isJson = false;var timeoutTimer;var failureResponse = {body: undefined,headers: {},statusCode: 0,method: method,url: uri,rawRequest: xhr};if ("json" in options && options.json !== false) {isJson = true;headers["accept"] || headers["Accept"] || (headers["Accept"] = "application/json"); //Don't override existing accept header declared by userif (method !== "GET" && method !== "HEAD") {headers["content-type"] || headers["Content-Type"] || (headers["Content-Type"] = "application/json"); //Don't override existing accept header declared by userbody = JSON.stringify(options.json === true ? body : options.json);}}xhr.onreadystatechange = readystatechange;xhr.onload = loadFunc;xhr.onerror = errorFunc; // IE9 must have onprogress be set to a unique function.xhr.onprogress = function () {// IE must die};xhr.onabort = function () {aborted = true;};xhr.ontimeout = errorFunc;xhr.open(method, uri, !sync, options.username, options.password); //has to be after openif (!sync) {xhr.withCredentials = !!options.withCredentials;} // Cannot set timeout with sync request// not setting timeout on the xhr object, because of old webkits etc. not handling that correctly// both npm's request and jquery 1.x use this kind of timeout, so this is being consistentif (!sync && options.timeout > 0) {timeoutTimer = setTimeout(function () {if (aborted) return;aborted = true; //IE9 may still call readystatechangexhr.abort("timeout");var e = new Error("XMLHttpRequest timeout");e.code = "ETIMEDOUT";errorFunc(e);}, options.timeout);}if (xhr.setRequestHeader) {for (key in headers) {if (headers.hasOwnProperty(key)) {xhr.setRequestHeader(key, headers[key]);}}} else if (options.headers && !isEmpty(options.headers)) {throw new Error("Headers cannot be set on an XDomainRequest object");}if ("responseType" in options) {xhr.responseType = options.responseType;}if ("beforeSend" in options && typeof options.beforeSend === "function") {options.beforeSend(xhr);} // Microsoft Edge browser sends "undefined" when send is called with undefined value.// XMLHttpRequest spec says to pass null as body to indicate no body// See https://github.com/naugtur/xhr/issues/100.xhr.send(body || null);return xhr;}function getXml(xhr) {// xhr.responseXML will throw Exception "InvalidStateError" or "DOMException"// See https://developer.mozilla.org/en-US/docs/Web/API/XMLHttpRequest/responseXML.try {if (xhr.responseType === "document") {return xhr.responseXML;}var firefoxBugTakenEffect = xhr.responseXML && xhr.responseXML.documentElement.nodeName === "parsererror";if (xhr.responseType === "" && !firefoxBugTakenEffect) {return xhr.responseXML;}} catch (e) {}return null;}function noop$1() {}lib.default = default_1;/*** @file text-track.js*//*** Takes a webvtt file contents and parses it into cues** @param {string} srcContent* webVTT file contents** @param {TextTrack} track* TextTrack to add cues to. Cues come from the srcContent.** @private*/const parseCues = function (srcContent, track) {const parser = new window.WebVTT.Parser(window, window.vttjs, window.WebVTT.StringDecoder());const errors = [];parser.oncue = function (cue) {track.addCue(cue);};parser.onparsingerror = function (error) {errors.push(error);};parser.onflush = function () {track.trigger({type: 'loadeddata',target: track});};parser.parse(srcContent);if (errors.length > 0) {if (window.console && window.console.groupCollapsed) {window.console.groupCollapsed(`Text Track parsing errors for ${track.src}`);}errors.forEach(error => log$1.error(error));if (window.console && window.console.groupEnd) {window.console.groupEnd();}}parser.flush();};/*** Load a `TextTrack` from a specified url.** @param {string} src* Url to load track from.** @param {TextTrack} track* Track to add cues to. Comes from the content at the end of `url`.** @private*/const loadTrack = function (src, track) {const opts = {uri: src};const crossOrigin = isCrossOrigin(src);if (crossOrigin) {opts.cors = crossOrigin;}const withCredentials = track.tech_.crossOrigin() === 'use-credentials';if (withCredentials) {opts.withCredentials = withCredentials;}lib(opts, bind_(this, function (err, response, responseBody) {if (err) {return log$1.error(err, response);}track.loaded_ = true;// Make sure that vttjs has loaded, otherwise, wait till it finished loading// NOTE: this is only used for the alt/video.novtt.js buildif (typeof window.WebVTT !== 'function') {if (track.tech_) {// to prevent use before define eslint error, we define loadHandler// as a let heretrack.tech_.any(['vttjsloaded', 'vttjserror'], event => {if (event.type === 'vttjserror') {log$1.error(`vttjs failed to load, stopping trying to process ${track.src}`);return;}return parseCues(responseBody, track);});}} else {parseCues(responseBody, track);}}));};/*** A representation of a single `TextTrack`.** @see [Spec]{@link https://html.spec.whatwg.org/multipage/embedded-content.html#texttrack}* @extends Track*/class TextTrack extends Track {/*** Create an instance of this class.** @param {Object} options={}* Object of option names and values** @param { import('../tech/tech').default } options.tech* A reference to the tech that owns this TextTrack.** @param {TextTrack~Kind} [options.kind='subtitles']* A valid text track kind.** @param {TextTrack~Mode} [options.mode='disabled']* A valid text track mode.** @param {string} [options.id='vjs_track_' + Guid.newGUID()]* A unique id for this TextTrack.** @param {string} [options.label='']* The menu label for this track.** @param {string} [options.language='']* A valid two character language code.** @param {string} [options.srclang='']* A valid two character language code. An alternative, but deprioritized* version of `options.language`** @param {string} [options.src]* A url to TextTrack cues.** @param {boolean} [options.default]* If this track should default to on or off.*/constructor(options = {}) {if (!options.tech) {throw new Error('A tech was not provided.');}const settings = merge$2(options, {kind: TextTrackKind[options.kind] || 'subtitles',language: options.language || options.srclang || ''});let mode = TextTrackMode[settings.mode] || 'disabled';const default_ = settings.default;if (settings.kind === 'metadata' || settings.kind === 'chapters') {mode = 'hidden';}super(settings);this.tech_ = settings.tech;this.cues_ = [];this.activeCues_ = [];this.preload_ = this.tech_.preloadTextTracks !== false;const cues = new TextTrackCueList(this.cues_);const activeCues = new TextTrackCueList(this.activeCues_);let changed = false;this.timeupdateHandler = bind_(this, function (event = {}) {if (this.tech_.isDisposed()) {return;}if (!this.tech_.isReady_) {if (event.type !== 'timeupdate') {this.rvf_ = this.tech_.requestVideoFrameCallback(this.timeupdateHandler);}return;}// Accessing this.activeCues for the side-effects of updating itself// due to its nature as a getter function. Do not remove or cues will// stop updating!// Use the setter to prevent deletion from uglify (pure_getters rule)this.activeCues = this.activeCues;if (changed) {this.trigger('cuechange');changed = false;}if (event.type !== 'timeupdate') {this.rvf_ = this.tech_.requestVideoFrameCallback(this.timeupdateHandler);}});const disposeHandler = () => {this.stopTracking();};this.tech_.one('dispose', disposeHandler);if (mode !== 'disabled') {this.startTracking();}Object.defineProperties(this, {/*** @memberof TextTrack* @member {boolean} default* If this track was set to be on or off by default. Cannot be changed after* creation.* @instance** @readonly*/default: {get() {return default_;},set() {}},/*** @memberof TextTrack* @member {string} mode* Set the mode of this TextTrack to a valid {@link TextTrack~Mode}. Will* not be set if setting to an invalid mode.* @instance** @fires TextTrack#modechange*/mode: {get() {return mode;},set(newMode) {if (!TextTrackMode[newMode]) {return;}if (mode === newMode) {return;}mode = newMode;if (!this.preload_ && mode !== 'disabled' && this.cues.length === 0) {// On-demand load.loadTrack(this.src, this);}this.stopTracking();if (mode !== 'disabled') {this.startTracking();}/*** An event that fires when mode changes on this track. This allows* the TextTrackList that holds this track to act accordingly.** > Note: This is not part of the spec!** @event TextTrack#modechange* @type {Event}*/this.trigger('modechange');}},/*** @memberof TextTrack* @member {TextTrackCueList} cues* The text track cue list for this TextTrack.* @instance*/cues: {get() {if (!this.loaded_) {return null;}return cues;},set() {}},/*** @memberof TextTrack* @member {TextTrackCueList} activeCues* The list text track cues that are currently active for this TextTrack.* @instance*/activeCues: {get() {if (!this.loaded_) {return null;}// nothing to doif (this.cues.length === 0) {return activeCues;}const ct = this.tech_.currentTime();const active = [];for (let i = 0, l = this.cues.length; i < l; i++) {const cue = this.cues[i];if (cue.startTime <= ct && cue.endTime >= ct) {active.push(cue);}}changed = false;if (active.length !== this.activeCues_.length) {changed = true;} else {for (let i = 0; i < active.length; i++) {if (this.activeCues_.indexOf(active[i]) === -1) {changed = true;}}}this.activeCues_ = active;activeCues.setCues_(this.activeCues_);return activeCues;},// /!\ Keep this setter empty (see the timeupdate handler above)set() {}}});if (settings.src) {this.src = settings.src;if (!this.preload_) {// Tracks will load on-demand.// Act like we're loaded for other purposes.this.loaded_ = true;}if (this.preload_ || settings.kind !== 'subtitles' && settings.kind !== 'captions') {loadTrack(this.src, this);}} else {this.loaded_ = true;}}startTracking() {// More precise cues based on requestVideoFrameCallback with a requestAnimationFram fallbackthis.rvf_ = this.tech_.requestVideoFrameCallback(this.timeupdateHandler);// Also listen to timeupdate in case rVFC/rAF stops (window in background, audio in video el)this.tech_.on('timeupdate', this.timeupdateHandler);}stopTracking() {if (this.rvf_) {this.tech_.cancelVideoFrameCallback(this.rvf_);this.rvf_ = undefined;}this.tech_.off('timeupdate', this.timeupdateHandler);}/*** Add a cue to the internal list of cues.** @param {TextTrack~Cue} cue* The cue to add to our internal list*/addCue(originalCue) {let cue = originalCue;// Testing if the cue is a VTTCue in a way that survives minificationif (!('getCueAsHTML' in cue)) {cue = new window.vttjs.VTTCue(originalCue.startTime, originalCue.endTime, originalCue.text);for (const prop in originalCue) {if (!(prop in cue)) {cue[prop] = originalCue[prop];}}// make sure that `id` is copied overcue.id = originalCue.id;cue.originalCue_ = originalCue;}const tracks = this.tech_.textTracks();for (let i = 0; i < tracks.length; i++) {if (tracks[i] !== this) {tracks[i].removeCue(cue);}}this.cues_.push(cue);this.cues.setCues_(this.cues_);}/*** Remove a cue from our internal list** @param {TextTrack~Cue} removeCue* The cue to remove from our internal list*/removeCue(removeCue) {let i = this.cues_.length;while (i--) {const cue = this.cues_[i];if (cue === removeCue || cue.originalCue_ && cue.originalCue_ === removeCue) {this.cues_.splice(i, 1);this.cues.setCues_(this.cues_);break;}}}}/*** cuechange - One or more cues in the track have become active or stopped being active.* @protected*/TextTrack.prototype.allowedEvents_ = {cuechange: 'cuechange'};/*** A representation of a single `AudioTrack`. If it is part of an {@link AudioTrackList}* only one `AudioTrack` in the list will be enabled at a time.** @see [Spec]{@link https://html.spec.whatwg.org/multipage/embedded-content.html#audiotrack}* @extends Track*/class AudioTrack extends Track {/*** Create an instance of this class.** @param {Object} [options={}]* Object of option names and values** @param {AudioTrack~Kind} [options.kind='']* A valid audio track kind** @param {string} [options.id='vjs_track_' + Guid.newGUID()]* A unique id for this AudioTrack.** @param {string} [options.label='']* The menu label for this track.** @param {string} [options.language='']* A valid two character language code.** @param {boolean} [options.enabled]* If this track is the one that is currently playing. If this track is part of* an {@link AudioTrackList}, only one {@link AudioTrack} will be enabled.*/constructor(options = {}) {const settings = merge$2(options, {kind: AudioTrackKind[options.kind] || ''});super(settings);let enabled = false;/*** @memberof AudioTrack* @member {boolean} enabled* If this `AudioTrack` is enabled or not. When setting this will* fire {@link AudioTrack#enabledchange} if the state of enabled is changed.* @instance** @fires VideoTrack#selectedchange*/Object.defineProperty(this, 'enabled', {get() {return enabled;},set(newEnabled) {// an invalid or unchanged valueif (typeof newEnabled !== 'boolean' || newEnabled === enabled) {return;}enabled = newEnabled;/*** An event that fires when enabled changes on this track. This allows* the AudioTrackList that holds this track to act accordingly.** > Note: This is not part of the spec! Native tracks will do* this internally without an event.** @event AudioTrack#enabledchange* @type {Event}*/this.trigger('enabledchange');}});// if the user sets this track to selected then// set selected to that true value otherwise// we keep it falseif (settings.enabled) {this.enabled = settings.enabled;}this.loaded_ = true;}}/*** A representation of a single `VideoTrack`.** @see [Spec]{@link https://html.spec.whatwg.org/multipage/embedded-content.html#videotrack}* @extends Track*/class VideoTrack extends Track {/*** Create an instance of this class.** @param {Object} [options={}]* Object of option names and values** @param {string} [options.kind='']* A valid {@link VideoTrack~Kind}** @param {string} [options.id='vjs_track_' + Guid.newGUID()]* A unique id for this AudioTrack.** @param {string} [options.label='']* The menu label for this track.** @param {string} [options.language='']* A valid two character language code.** @param {boolean} [options.selected]* If this track is the one that is currently playing.*/constructor(options = {}) {const settings = merge$2(options, {kind: VideoTrackKind[options.kind] || ''});super(settings);let selected = false;/*** @memberof VideoTrack* @member {boolean} selected* If this `VideoTrack` is selected or not. When setting this will* fire {@link VideoTrack#selectedchange} if the state of selected changed.* @instance** @fires VideoTrack#selectedchange*/Object.defineProperty(this, 'selected', {get() {return selected;},set(newSelected) {// an invalid or unchanged valueif (typeof newSelected !== 'boolean' || newSelected === selected) {return;}selected = newSelected;/*** An event that fires when selected changes on this track. This allows* the VideoTrackList that holds this track to act accordingly.** > Note: This is not part of the spec! Native tracks will do* this internally without an event.** @event VideoTrack#selectedchange* @type {Event}*/this.trigger('selectedchange');}});// if the user sets this track to selected then// set selected to that true value otherwise// we keep it falseif (settings.selected) {this.selected = settings.selected;}}}/*** @file html-track-element.js*//*** A single track represented in the DOM.** @see [Spec]{@link https://html.spec.whatwg.org/multipage/embedded-content.html#htmltrackelement}* @extends EventTarget*/class HTMLTrackElement extends EventTarget$2 {/*** Create an instance of this class.** @param {Object} options={}* Object of option names and values** @param { import('../tech/tech').default } options.tech* A reference to the tech that owns this HTMLTrackElement.** @param {TextTrack~Kind} [options.kind='subtitles']* A valid text track kind.** @param {TextTrack~Mode} [options.mode='disabled']* A valid text track mode.** @param {string} [options.id='vjs_track_' + Guid.newGUID()]* A unique id for this TextTrack.** @param {string} [options.label='']* The menu label for this track.** @param {string} [options.language='']* A valid two character language code.** @param {string} [options.srclang='']* A valid two character language code. An alternative, but deprioritized* version of `options.language`** @param {string} [options.src]* A url to TextTrack cues.** @param {boolean} [options.default]* If this track should default to on or off.*/constructor(options = {}) {super();let readyState;const track = new TextTrack(options);this.kind = track.kind;this.src = track.src;this.srclang = track.language;this.label = track.label;this.default = track.default;Object.defineProperties(this, {/*** @memberof HTMLTrackElement* @member {HTMLTrackElement~ReadyState} readyState* The current ready state of the track element.* @instance*/readyState: {get() {return readyState;}},/*** @memberof HTMLTrackElement* @member {TextTrack} track* The underlying TextTrack object.* @instance**/track: {get() {return track;}}});readyState = HTMLTrackElement.NONE;/*** @listens TextTrack#loadeddata* @fires HTMLTrackElement#load*/track.addEventListener('loadeddata', () => {readyState = HTMLTrackElement.LOADED;this.trigger({type: 'load',target: this});});}}/*** @protected*/HTMLTrackElement.prototype.allowedEvents_ = {load: 'load'};/*** The text track not loaded state.** @type {number}* @static*/HTMLTrackElement.NONE = 0;/*** The text track loading state.** @type {number}* @static*/HTMLTrackElement.LOADING = 1;/*** The text track loaded state.** @type {number}* @static*/HTMLTrackElement.LOADED = 2;/*** The text track failed to load state.** @type {number}* @static*/HTMLTrackElement.ERROR = 3;/** This file contains all track properties that are used in* player.js, tech.js, html5.js and possibly other techs in the future.*/const NORMAL = {audio: {ListClass: AudioTrackList,TrackClass: AudioTrack,capitalName: 'Audio'},video: {ListClass: VideoTrackList,TrackClass: VideoTrack,capitalName: 'Video'},text: {ListClass: TextTrackList,TrackClass: TextTrack,capitalName: 'Text'}};Object.keys(NORMAL).forEach(function (type) {NORMAL[type].getterName = `${type}Tracks`;NORMAL[type].privateName = `${type}Tracks_`;});const REMOTE = {remoteText: {ListClass: TextTrackList,TrackClass: TextTrack,capitalName: 'RemoteText',getterName: 'remoteTextTracks',privateName: 'remoteTextTracks_'},remoteTextEl: {ListClass: HtmlTrackElementList,TrackClass: HTMLTrackElement,capitalName: 'RemoteTextTrackEls',getterName: 'remoteTextTrackEls',privateName: 'remoteTextTrackEls_'}};const ALL = Object.assign({}, NORMAL, REMOTE);REMOTE.names = Object.keys(REMOTE);NORMAL.names = Object.keys(NORMAL);ALL.names = [].concat(REMOTE.names).concat(NORMAL.names);var minDoc = {};var topLevel = typeof commonjsGlobal !== 'undefined' ? commonjsGlobal : typeof window !== 'undefined' ? window : {};var doccy;if (typeof document !== 'undefined') {doccy = document;} else {doccy = topLevel['__GLOBAL_DOCUMENT_CACHE@4'];if (!doccy) {doccy = topLevel['__GLOBAL_DOCUMENT_CACHE@4'] = minDoc;}}var document_1 = doccy;/*** Copyright 2013 vtt.js Contributors** Licensed under the Apache License, Version 2.0 (the "License");* you may not use this file except in compliance with the License.* You may obtain a copy of the License at** http://www.apache.org/licenses/LICENSE-2.0** Unless required by applicable law or agreed to in writing, software* distributed under the License is distributed on an "AS IS" BASIS,* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.* See the License for the specific language governing permissions and* limitations under the License.*//* -*- Mode: Java; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- *//* vim: set shiftwidth=2 tabstop=2 autoindent cindent expandtab: */var _objCreate = Object.create || function () {function F() {}return function (o) {if (arguments.length !== 1) {throw new Error('Object.create shim only accepts one parameter.');}F.prototype = o;return new F();};}();// Creates a new ParserError object from an errorData object. The errorData// object should have default code and message properties. The default message// property can be overriden by passing in a message parameter.// See ParsingError.Errors below for acceptable errors.function ParsingError(errorData, message) {this.name = "ParsingError";this.code = errorData.code;this.message = message || errorData.message;}ParsingError.prototype = _objCreate(Error.prototype);ParsingError.prototype.constructor = ParsingError;// ParsingError metadata for acceptable ParsingErrors.ParsingError.Errors = {BadSignature: {code: 0,message: "Malformed WebVTT signature."},BadTimeStamp: {code: 1,message: "Malformed time stamp."}};// Try to parse input as a time stamp.function parseTimeStamp(input) {function computeSeconds(h, m, s, f) {return (h | 0) * 3600 + (m | 0) * 60 + (s | 0) + (f | 0) / 1000;}var m = input.match(/^(\d+):(\d{1,2})(:\d{1,2})?\.(\d{3})/);if (!m) {return null;}if (m[3]) {// Timestamp takes the form of [hours]:[minutes]:[seconds].[milliseconds]return computeSeconds(m[1], m[2], m[3].replace(":", ""), m[4]);} else if (m[1] > 59) {// Timestamp takes the form of [hours]:[minutes].[milliseconds]// First position is hours as it's over 59.return computeSeconds(m[1], m[2], 0, m[4]);} else {// Timestamp takes the form of [minutes]:[seconds].[milliseconds]return computeSeconds(0, m[1], m[2], m[4]);}}// A settings object holds key/value pairs and will ignore anything but the first// assignment to a specific key.function Settings() {this.values = _objCreate(null);}Settings.prototype = {// Only accept the first assignment to any key.set: function (k, v) {if (!this.get(k) && v !== "") {this.values[k] = v;}},// Return the value for a key, or a default value.// If 'defaultKey' is passed then 'dflt' is assumed to be an object with// a number of possible default values as properties where 'defaultKey' is// the key of the property that will be chosen; otherwise it's assumed to be// a single value.get: function (k, dflt, defaultKey) {if (defaultKey) {return this.has(k) ? this.values[k] : dflt[defaultKey];}return this.has(k) ? this.values[k] : dflt;},// Check whether we have a value for a key.has: function (k) {return k in this.values;},// Accept a setting if its one of the given alternatives.alt: function (k, v, a) {for (var n = 0; n < a.length; ++n) {if (v === a[n]) {this.set(k, v);break;}}},// Accept a setting if its a valid (signed) integer.integer: function (k, v) {if (/^-?\d+$/.test(v)) {// integerthis.set(k, parseInt(v, 10));}},// Accept a setting if its a valid percentage.percent: function (k, v) {if (v.match(/^([\d]{1,3})(\.[\d]*)?%$/)) {v = parseFloat(v);if (v >= 0 && v <= 100) {this.set(k, v);return true;}}return false;}};// Helper function to parse input into groups separated by 'groupDelim', and// interprete each group as a key/value pair separated by 'keyValueDelim'.function parseOptions(input, callback, keyValueDelim, groupDelim) {var groups = groupDelim ? input.split(groupDelim) : [input];for (var i in groups) {if (typeof groups[i] !== "string") {continue;}var kv = groups[i].split(keyValueDelim);if (kv.length !== 2) {continue;}var k = kv[0].trim();var v = kv[1].trim();callback(k, v);}}function parseCue(input, cue, regionList) {// Remember the original input if we need to throw an error.var oInput = input;// 4.1 WebVTT timestampfunction consumeTimeStamp() {var ts = parseTimeStamp(input);if (ts === null) {throw new ParsingError(ParsingError.Errors.BadTimeStamp, "Malformed timestamp: " + oInput);}// Remove time stamp from input.input = input.replace(/^[^\sa-zA-Z-]+/, "");return ts;}// 4.4.2 WebVTT cue settingsfunction consumeCueSettings(input, cue) {var settings = new Settings();parseOptions(input, function (k, v) {switch (k) {case "region":// Find the last region we parsed with the same region id.for (var i = regionList.length - 1; i >= 0; i--) {if (regionList[i].id === v) {settings.set(k, regionList[i].region);break;}}break;case "vertical":settings.alt(k, v, ["rl", "lr"]);break;case "line":var vals = v.split(","),vals0 = vals[0];settings.integer(k, vals0);settings.percent(k, vals0) ? settings.set("snapToLines", false) : null;settings.alt(k, vals0, ["auto"]);if (vals.length === 2) {settings.alt("lineAlign", vals[1], ["start", "center", "end"]);}break;case "position":vals = v.split(",");settings.percent(k, vals[0]);if (vals.length === 2) {settings.alt("positionAlign", vals[1], ["start", "center", "end"]);}break;case "size":settings.percent(k, v);break;case "align":settings.alt(k, v, ["start", "center", "end", "left", "right"]);break;}}, /:/, /\s/);// Apply default values for any missing fields.cue.region = settings.get("region", null);cue.vertical = settings.get("vertical", "");try {cue.line = settings.get("line", "auto");} catch (e) {}cue.lineAlign = settings.get("lineAlign", "start");cue.snapToLines = settings.get("snapToLines", true);cue.size = settings.get("size", 100);// Safari still uses the old middle value and won't accept centertry {cue.align = settings.get("align", "center");} catch (e) {cue.align = settings.get("align", "middle");}try {cue.position = settings.get("position", "auto");} catch (e) {cue.position = settings.get("position", {start: 0,left: 0,center: 50,middle: 50,end: 100,right: 100}, cue.align);}cue.positionAlign = settings.get("positionAlign", {start: "start",left: "start",center: "center",middle: "center",end: "end",right: "end"}, cue.align);}function skipWhitespace() {input = input.replace(/^\s+/, "");}// 4.1 WebVTT cue timings.skipWhitespace();cue.startTime = consumeTimeStamp(); // (1) collect cue start timeskipWhitespace();if (input.substr(0, 3) !== "-->") {// (3) next characters must match "-->"throw new ParsingError(ParsingError.Errors.BadTimeStamp, "Malformed time stamp (time stamps must be separated by '-->'): " + oInput);}input = input.substr(3);skipWhitespace();cue.endTime = consumeTimeStamp(); // (5) collect cue end time// 4.1 WebVTT cue settings list.skipWhitespace();consumeCueSettings(input, cue);}// When evaluating this file as part of a Webpack bundle for server// side rendering, `document` is an empty object.var TEXTAREA_ELEMENT = document_1.createElement && document_1.createElement("textarea");var TAG_NAME = {c: "span",i: "i",b: "b",u: "u",ruby: "ruby",rt: "rt",v: "span",lang: "span"};// 5.1 default text color// 5.2 default text background color is equivalent to text color with bg_ prefixvar DEFAULT_COLOR_CLASS = {white: 'rgba(255,255,255,1)',lime: 'rgba(0,255,0,1)',cyan: 'rgba(0,255,255,1)',red: 'rgba(255,0,0,1)',yellow: 'rgba(255,255,0,1)',magenta: 'rgba(255,0,255,1)',blue: 'rgba(0,0,255,1)',black: 'rgba(0,0,0,1)'};var TAG_ANNOTATION = {v: "title",lang: "lang"};var NEEDS_PARENT = {rt: "ruby"};// Parse content into a document fragment.function parseContent(window, input) {function nextToken() {// Check for end-of-string.if (!input) {return null;}// Consume 'n' characters from the input.function consume(result) {input = input.substr(result.length);return result;}var m = input.match(/^([^<]*)(<[^>]*>?)?/);// If there is some text before the next tag, return it, otherwise return// the tag.return consume(m[1] ? m[1] : m[2]);}function unescape(s) {TEXTAREA_ELEMENT.innerHTML = s;s = TEXTAREA_ELEMENT.textContent;TEXTAREA_ELEMENT.textContent = "";return s;}function shouldAdd(current, element) {return !NEEDS_PARENT[element.localName] || NEEDS_PARENT[element.localName] === current.localName;}// Create an element for this tag.function createElement(type, annotation) {var tagName = TAG_NAME[type];if (!tagName) {return null;}var element = window.document.createElement(tagName);var name = TAG_ANNOTATION[type];if (name && annotation) {element[name] = annotation.trim();}return element;}var rootDiv = window.document.createElement("div"),current = rootDiv,t,tagStack = [];while ((t = nextToken()) !== null) {if (t[0] === '<') {if (t[1] === "/") {// If the closing tag matches, move back up to the parent node.if (tagStack.length && tagStack[tagStack.length - 1] === t.substr(2).replace(">", "")) {tagStack.pop();current = current.parentNode;}// Otherwise just ignore the end tag.continue;}var ts = parseTimeStamp(t.substr(1, t.length - 2));var node;if (ts) {// Timestamps are lead nodes as well.node = window.document.createProcessingInstruction("timestamp", ts);current.appendChild(node);continue;}var m = t.match(/^<([^.\s/0-9>]+)(\.[^\s\\>]+)?([^>\\]+)?(\\?)>?$/);// If we can't parse the tag, skip to the next tag.if (!m) {continue;}// Try to construct an element, and ignore the tag if we couldn't.node = createElement(m[1], m[3]);if (!node) {continue;}// Determine if the tag should be added based on the context of where it// is placed in the cuetext.if (!shouldAdd(current, node)) {continue;}// Set the class list (as a list of classes, separated by space).if (m[2]) {var classes = m[2].split('.');classes.forEach(function (cl) {var bgColor = /^bg_/.test(cl);// slice out `bg_` if it's a background colorvar colorName = bgColor ? cl.slice(3) : cl;if (DEFAULT_COLOR_CLASS.hasOwnProperty(colorName)) {var propName = bgColor ? 'background-color' : 'color';var propValue = DEFAULT_COLOR_CLASS[colorName];node.style[propName] = propValue;}});node.className = classes.join(' ');}// Append the node to the current node, and enter the scope of the new// node.tagStack.push(m[1]);current.appendChild(node);current = node;continue;}// Text nodes are leaf nodes.current.appendChild(window.document.createTextNode(unescape(t)));}return rootDiv;}// This is a list of all the Unicode characters that have a strong// right-to-left category. What this means is that these characters are// written right-to-left for sure. It was generated by pulling all the strong// right-to-left characters out of the Unicode data table. That table can// found at: http://www.unicode.org/Public/UNIDATA/UnicodeData.txtvar strongRTLRanges = [[0x5be, 0x5be], [0x5c0, 0x5c0], [0x5c3, 0x5c3], [0x5c6, 0x5c6], [0x5d0, 0x5ea], [0x5f0, 0x5f4], [0x608, 0x608], [0x60b, 0x60b], [0x60d, 0x60d], [0x61b, 0x61b], [0x61e, 0x64a], [0x66d, 0x66f], [0x671, 0x6d5], [0x6e5, 0x6e6], [0x6ee, 0x6ef], [0x6fa, 0x70d], [0x70f, 0x710], [0x712, 0x72f], [0x74d, 0x7a5], [0x7b1, 0x7b1], [0x7c0, 0x7ea], [0x7f4, 0x7f5], [0x7fa, 0x7fa], [0x800, 0x815], [0x81a, 0x81a], [0x824, 0x824], [0x828, 0x828], [0x830, 0x83e], [0x840, 0x858], [0x85e, 0x85e], [0x8a0, 0x8a0], [0x8a2, 0x8ac], [0x200f, 0x200f], [0xfb1d, 0xfb1d], [0xfb1f, 0xfb28], [0xfb2a, 0xfb36], [0xfb38, 0xfb3c], [0xfb3e, 0xfb3e], [0xfb40, 0xfb41], [0xfb43, 0xfb44], [0xfb46, 0xfbc1], [0xfbd3, 0xfd3d], [0xfd50, 0xfd8f], [0xfd92, 0xfdc7], [0xfdf0, 0xfdfc], [0xfe70, 0xfe74], [0xfe76, 0xfefc], [0x10800, 0x10805], [0x10808, 0x10808], [0x1080a, 0x10835], [0x10837, 0x10838], [0x1083c, 0x1083c], [0x1083f, 0x10855], [0x10857, 0x1085f], [0x10900, 0x1091b], [0x10920, 0x10939], [0x1093f, 0x1093f], [0x10980, 0x109b7], [0x109be, 0x109bf], [0x10a00, 0x10a00], [0x10a10, 0x10a13], [0x10a15, 0x10a17], [0x10a19, 0x10a33], [0x10a40, 0x10a47], [0x10a50, 0x10a58], [0x10a60, 0x10a7f], [0x10b00, 0x10b35], [0x10b40, 0x10b55], [0x10b58, 0x10b72], [0x10b78, 0x10b7f], [0x10c00, 0x10c48], [0x1ee00, 0x1ee03], [0x1ee05, 0x1ee1f], [0x1ee21, 0x1ee22], [0x1ee24, 0x1ee24], [0x1ee27, 0x1ee27], [0x1ee29, 0x1ee32], [0x1ee34, 0x1ee37], [0x1ee39, 0x1ee39], [0x1ee3b, 0x1ee3b], [0x1ee42, 0x1ee42], [0x1ee47, 0x1ee47], [0x1ee49, 0x1ee49], [0x1ee4b, 0x1ee4b], [0x1ee4d, 0x1ee4f], [0x1ee51, 0x1ee52], [0x1ee54, 0x1ee54], [0x1ee57, 0x1ee57], [0x1ee59, 0x1ee59], [0x1ee5b, 0x1ee5b], [0x1ee5d, 0x1ee5d], [0x1ee5f, 0x1ee5f], [0x1ee61, 0x1ee62], [0x1ee64, 0x1ee64], [0x1ee67, 0x1ee6a], [0x1ee6c, 0x1ee72], [0x1ee74, 0x1ee77], [0x1ee79, 0x1ee7c], [0x1ee7e, 0x1ee7e], [0x1ee80, 0x1ee89], [0x1ee8b, 0x1ee9b], [0x1eea1, 0x1eea3], [0x1eea5, 0x1eea9], [0x1eeab, 0x1eebb], [0x10fffd, 0x10fffd]];function isStrongRTLChar(charCode) {for (var i = 0; i < strongRTLRanges.length; i++) {var currentRange = strongRTLRanges[i];if (charCode >= currentRange[0] && charCode <= currentRange[1]) {return true;}}return false;}function determineBidi(cueDiv) {var nodeStack = [],text = "",charCode;if (!cueDiv || !cueDiv.childNodes) {return "ltr";}function pushNodes(nodeStack, node) {for (var i = node.childNodes.length - 1; i >= 0; i--) {nodeStack.push(node.childNodes[i]);}}function nextTextNode(nodeStack) {if (!nodeStack || !nodeStack.length) {return null;}var node = nodeStack.pop(),text = node.textContent || node.innerText;if (text) {// TODO: This should match all unicode type B characters (paragraph// separator characters). See issue #115.var m = text.match(/^.*(\n|\r)/);if (m) {nodeStack.length = 0;return m[0];}return text;}if (node.tagName === "ruby") {return nextTextNode(nodeStack);}if (node.childNodes) {pushNodes(nodeStack, node);return nextTextNode(nodeStack);}}pushNodes(nodeStack, cueDiv);while (text = nextTextNode(nodeStack)) {for (var i = 0; i < text.length; i++) {charCode = text.charCodeAt(i);if (isStrongRTLChar(charCode)) {return "rtl";}}}return "ltr";}function computeLinePos(cue) {if (typeof cue.line === "number" && (cue.snapToLines || cue.line >= 0 && cue.line <= 100)) {return cue.line;}if (!cue.track || !cue.track.textTrackList || !cue.track.textTrackList.mediaElement) {return -1;}var track = cue.track,trackList = track.textTrackList,count = 0;for (var i = 0; i < trackList.length && trackList[i] !== track; i++) {if (trackList[i].mode === "showing") {count++;}}return ++count * -1;}function StyleBox() {}// Apply styles to a div. If there is no div passed then it defaults to the// div on 'this'.StyleBox.prototype.applyStyles = function (styles, div) {div = div || this.div;for (var prop in styles) {if (styles.hasOwnProperty(prop)) {div.style[prop] = styles[prop];}}};StyleBox.prototype.formatStyle = function (val, unit) {return val === 0 ? 0 : val + unit;};// Constructs the computed display state of the cue (a div). Places the div// into the overlay which should be a block level element (usually a div).function CueStyleBox(window, cue, styleOptions) {StyleBox.call(this);this.cue = cue;// Parse our cue's text into a DOM tree rooted at 'cueDiv'. This div will// have inline positioning and will function as the cue background box.this.cueDiv = parseContent(window, cue.text);var styles = {color: "rgba(255, 255, 255, 1)",backgroundColor: "rgba(0, 0, 0, 0.8)",position: "relative",left: 0,right: 0,top: 0,bottom: 0,display: "inline",writingMode: cue.vertical === "" ? "horizontal-tb" : cue.vertical === "lr" ? "vertical-lr" : "vertical-rl",unicodeBidi: "plaintext"};this.applyStyles(styles, this.cueDiv);// Create an absolutely positioned div that will be used to position the cue// div. Note, all WebVTT cue-setting alignments are equivalent to the CSS// mirrors of them except middle instead of center on Safari.this.div = window.document.createElement("div");styles = {direction: determineBidi(this.cueDiv),writingMode: cue.vertical === "" ? "horizontal-tb" : cue.vertical === "lr" ? "vertical-lr" : "vertical-rl",unicodeBidi: "plaintext",textAlign: cue.align === "middle" ? "center" : cue.align,font: styleOptions.font,whiteSpace: "pre-line",position: "absolute"};this.applyStyles(styles);this.div.appendChild(this.cueDiv);// Calculate the distance from the reference edge of the viewport to the text// position of the cue box. The reference edge will be resolved later when// the box orientation styles are applied.var textPos = 0;switch (cue.positionAlign) {case "start":case "line-left":textPos = cue.position;break;case "center":textPos = cue.position - cue.size / 2;break;case "end":case "line-right":textPos = cue.position - cue.size;break;}// Horizontal box orientation; textPos is the distance from the left edge of the// area to the left edge of the box and cue.size is the distance extending to// the right from there.if (cue.vertical === "") {this.applyStyles({left: this.formatStyle(textPos, "%"),width: this.formatStyle(cue.size, "%")});// Vertical box orientation; textPos is the distance from the top edge of the// area to the top edge of the box and cue.size is the height extending// downwards from there.} else {this.applyStyles({top: this.formatStyle(textPos, "%"),height: this.formatStyle(cue.size, "%")});}this.move = function (box) {this.applyStyles({top: this.formatStyle(box.top, "px"),bottom: this.formatStyle(box.bottom, "px"),left: this.formatStyle(box.left, "px"),right: this.formatStyle(box.right, "px"),height: this.formatStyle(box.height, "px"),width: this.formatStyle(box.width, "px")});};}CueStyleBox.prototype = _objCreate(StyleBox.prototype);CueStyleBox.prototype.constructor = CueStyleBox;// Represents the co-ordinates of an Element in a way that we can easily// compute things with such as if it overlaps or intersects with another Element.// Can initialize it with either a StyleBox or another BoxPosition.function BoxPosition(obj) {// Either a BoxPosition was passed in and we need to copy it, or a StyleBox// was passed in and we need to copy the results of 'getBoundingClientRect'// as the object returned is readonly. All co-ordinate values are in reference// to the viewport origin (top left).var lh, height, width, top;if (obj.div) {height = obj.div.offsetHeight;width = obj.div.offsetWidth;top = obj.div.offsetTop;var rects = (rects = obj.div.childNodes) && (rects = rects[0]) && rects.getClientRects && rects.getClientRects();obj = obj.div.getBoundingClientRect();// In certain cases the outter div will be slightly larger then the sum of// the inner div's lines. This could be due to bold text, etc, on some platforms.// In this case we should get the average line height and use that. This will// result in the desired behaviour.lh = rects ? Math.max(rects[0] && rects[0].height || 0, obj.height / rects.length) : 0;}this.left = obj.left;this.right = obj.right;this.top = obj.top || top;this.height = obj.height || height;this.bottom = obj.bottom || top + (obj.height || height);this.width = obj.width || width;this.lineHeight = lh !== undefined ? lh : obj.lineHeight;}// Move the box along a particular axis. Optionally pass in an amount to move// the box. If no amount is passed then the default is the line height of the// box.BoxPosition.prototype.move = function (axis, toMove) {toMove = toMove !== undefined ? toMove : this.lineHeight;switch (axis) {case "+x":this.left += toMove;this.right += toMove;break;case "-x":this.left -= toMove;this.right -= toMove;break;case "+y":this.top += toMove;this.bottom += toMove;break;case "-y":this.top -= toMove;this.bottom -= toMove;break;}};// Check if this box overlaps another box, b2.BoxPosition.prototype.overlaps = function (b2) {return this.left < b2.right && this.right > b2.left && this.top < b2.bottom && this.bottom > b2.top;};// Check if this box overlaps any other boxes in boxes.BoxPosition.prototype.overlapsAny = function (boxes) {for (var i = 0; i < boxes.length; i++) {if (this.overlaps(boxes[i])) {return true;}}return false;};// Check if this box is within another box.BoxPosition.prototype.within = function (container) {return this.top >= container.top && this.bottom <= container.bottom && this.left >= container.left && this.right <= container.right;};// Check if this box is entirely within the container or it is overlapping// on the edge opposite of the axis direction passed. For example, if "+x" is// passed and the box is overlapping on the left edge of the container, then// return true.BoxPosition.prototype.overlapsOppositeAxis = function (container, axis) {switch (axis) {case "+x":return this.left < container.left;case "-x":return this.right > container.right;case "+y":return this.top < container.top;case "-y":return this.bottom > container.bottom;}};// Find the percentage of the area that this box is overlapping with another// box.BoxPosition.prototype.intersectPercentage = function (b2) {var x = Math.max(0, Math.min(this.right, b2.right) - Math.max(this.left, b2.left)),y = Math.max(0, Math.min(this.bottom, b2.bottom) - Math.max(this.top, b2.top)),intersectArea = x * y;return intersectArea / (this.height * this.width);};// Convert the positions from this box to CSS compatible positions using// the reference container's positions. This has to be done because this// box's positions are in reference to the viewport origin, whereas, CSS// values are in referecne to their respective edges.BoxPosition.prototype.toCSSCompatValues = function (reference) {return {top: this.top - reference.top,bottom: reference.bottom - this.bottom,left: this.left - reference.left,right: reference.right - this.right,height: this.height,width: this.width};};// Get an object that represents the box's position without anything extra.// Can pass a StyleBox, HTMLElement, or another BoxPositon.BoxPosition.getSimpleBoxPosition = function (obj) {var height = obj.div ? obj.div.offsetHeight : obj.tagName ? obj.offsetHeight : 0;var width = obj.div ? obj.div.offsetWidth : obj.tagName ? obj.offsetWidth : 0;var top = obj.div ? obj.div.offsetTop : obj.tagName ? obj.offsetTop : 0;obj = obj.div ? obj.div.getBoundingClientRect() : obj.tagName ? obj.getBoundingClientRect() : obj;var ret = {left: obj.left,right: obj.right,top: obj.top || top,height: obj.height || height,bottom: obj.bottom || top + (obj.height || height),width: obj.width || width};return ret;};// Move a StyleBox to its specified, or next best, position. The containerBox// is the box that contains the StyleBox, such as a div. boxPositions are// a list of other boxes that the styleBox can't overlap with.function moveBoxToLinePosition(window, styleBox, containerBox, boxPositions) {// Find the best position for a cue box, b, on the video. The axis parameter// is a list of axis, the order of which, it will move the box along. For example:// Passing ["+x", "-x"] will move the box first along the x axis in the positive// direction. If it doesn't find a good position for it there it will then move// it along the x axis in the negative direction.function findBestPosition(b, axis) {var bestPosition,specifiedPosition = new BoxPosition(b),percentage = 1; // Highest possible so the first thing we get is better.for (var i = 0; i < axis.length; i++) {while (b.overlapsOppositeAxis(containerBox, axis[i]) || b.within(containerBox) && b.overlapsAny(boxPositions)) {b.move(axis[i]);}// We found a spot where we aren't overlapping anything. This is our// best position.if (b.within(containerBox)) {return b;}var p = b.intersectPercentage(containerBox);// If we're outside the container box less then we were on our last try// then remember this position as the best position.if (percentage > p) {bestPosition = new BoxPosition(b);percentage = p;}// Reset the box position to the specified position.b = new BoxPosition(specifiedPosition);}return bestPosition || specifiedPosition;}var boxPosition = new BoxPosition(styleBox),cue = styleBox.cue,linePos = computeLinePos(cue),axis = [];// If we have a line number to align the cue to.if (cue.snapToLines) {var size;switch (cue.vertical) {case "":axis = ["+y", "-y"];size = "height";break;case "rl":axis = ["+x", "-x"];size = "width";break;case "lr":axis = ["-x", "+x"];size = "width";break;}var step = boxPosition.lineHeight,position = step * Math.round(linePos),maxPosition = containerBox[size] + step,initialAxis = axis[0];// If the specified intial position is greater then the max position then// clamp the box to the amount of steps it would take for the box to// reach the max position.if (Math.abs(position) > maxPosition) {position = position < 0 ? -1 : 1;position *= Math.ceil(maxPosition / step) * step;}// If computed line position returns negative then line numbers are// relative to the bottom of the video instead of the top. Therefore, we// need to increase our initial position by the length or width of the// video, depending on the writing direction, and reverse our axis directions.if (linePos < 0) {position += cue.vertical === "" ? containerBox.height : containerBox.width;axis = axis.reverse();}// Move the box to the specified position. This may not be its best// position.boxPosition.move(initialAxis, position);} else {// If we have a percentage line value for the cue.var calculatedPercentage = boxPosition.lineHeight / containerBox.height * 100;switch (cue.lineAlign) {case "center":linePos -= calculatedPercentage / 2;break;case "end":linePos -= calculatedPercentage;break;}// Apply initial line position to the cue box.switch (cue.vertical) {case "":styleBox.applyStyles({top: styleBox.formatStyle(linePos, "%")});break;case "rl":styleBox.applyStyles({left: styleBox.formatStyle(linePos, "%")});break;case "lr":styleBox.applyStyles({right: styleBox.formatStyle(linePos, "%")});break;}axis = ["+y", "-x", "+x", "-y"];// Get the box position again after we've applied the specified positioning// to it.boxPosition = new BoxPosition(styleBox);}var bestPosition = findBestPosition(boxPosition, axis);styleBox.move(bestPosition.toCSSCompatValues(containerBox));}function WebVTT$1() {// Nothing}// Helper to allow strings to be decoded instead of the default binary utf8 data.WebVTT$1.StringDecoder = function () {return {decode: function (data) {if (!data) {return "";}if (typeof data !== "string") {throw new Error("Error - expected string data.");}return decodeURIComponent(encodeURIComponent(data));}};};WebVTT$1.convertCueToDOMTree = function (window, cuetext) {if (!window || !cuetext) {return null;}return parseContent(window, cuetext);};var FONT_SIZE_PERCENT = 0.05;var FONT_STYLE = "sans-serif";var CUE_BACKGROUND_PADDING = "1.5%";// Runs the processing model over the cues and regions passed to it.// @param overlay A block level element (usually a div) that the computed cues// and regions will be placed into.WebVTT$1.processCues = function (window, cues, overlay) {if (!window || !cues || !overlay) {return null;}// Remove all previous children.while (overlay.firstChild) {overlay.removeChild(overlay.firstChild);}var paddedOverlay = window.document.createElement("div");paddedOverlay.style.position = "absolute";paddedOverlay.style.left = "0";paddedOverlay.style.right = "0";paddedOverlay.style.top = "0";paddedOverlay.style.bottom = "0";paddedOverlay.style.margin = CUE_BACKGROUND_PADDING;overlay.appendChild(paddedOverlay);// Determine if we need to compute the display states of the cues. This could// be the case if a cue's state has been changed since the last computation or// if it has not been computed yet.function shouldCompute(cues) {for (var i = 0; i < cues.length; i++) {if (cues[i].hasBeenReset || !cues[i].displayState) {return true;}}return false;}// We don't need to recompute the cues' display states. Just reuse them.if (!shouldCompute(cues)) {for (var i = 0; i < cues.length; i++) {paddedOverlay.appendChild(cues[i].displayState);}return;}var boxPositions = [],containerBox = BoxPosition.getSimpleBoxPosition(paddedOverlay),fontSize = Math.round(containerBox.height * FONT_SIZE_PERCENT * 100) / 100;var styleOptions = {font: fontSize + "px " + FONT_STYLE};(function () {var styleBox, cue;for (var i = 0; i < cues.length; i++) {cue = cues[i];// Compute the intial position and styles of the cue div.styleBox = new CueStyleBox(window, cue, styleOptions);paddedOverlay.appendChild(styleBox.div);// Move the cue div to it's correct line position.moveBoxToLinePosition(window, styleBox, containerBox, boxPositions);// Remember the computed div so that we don't have to recompute it later// if we don't have too.cue.displayState = styleBox.div;boxPositions.push(BoxPosition.getSimpleBoxPosition(styleBox));}})();};WebVTT$1.Parser = function (window, vttjs, decoder) {if (!decoder) {decoder = vttjs;vttjs = {};}if (!vttjs) {vttjs = {};}this.window = window;this.vttjs = vttjs;this.state = "INITIAL";this.buffer = "";this.decoder = decoder || new TextDecoder("utf8");this.regionList = [];};WebVTT$1.Parser.prototype = {// If the error is a ParsingError then report it to the consumer if// possible. If it's not a ParsingError then throw it like normal.reportOrThrowError: function (e) {if (e instanceof ParsingError) {this.onparsingerror && this.onparsingerror(e);} else {throw e;}},parse: function (data) {var self = this;// If there is no data then we won't decode it, but will just try to parse// whatever is in buffer already. This may occur in circumstances, for// example when flush() is called.if (data) {// Try to decode the data that we received.self.buffer += self.decoder.decode(data, {stream: true});}function collectNextLine() {var buffer = self.buffer;var pos = 0;while (pos < buffer.length && buffer[pos] !== '\r' && buffer[pos] !== '\n') {++pos;}var line = buffer.substr(0, pos);// Advance the buffer early in case we fail below.if (buffer[pos] === '\r') {++pos;}if (buffer[pos] === '\n') {++pos;}self.buffer = buffer.substr(pos);return line;}// 3.4 WebVTT region and WebVTT region settings syntaxfunction parseRegion(input) {var settings = new Settings();parseOptions(input, function (k, v) {switch (k) {case "id":settings.set(k, v);break;case "width":settings.percent(k, v);break;case "lines":settings.integer(k, v);break;case "regionanchor":case "viewportanchor":var xy = v.split(',');if (xy.length !== 2) {break;}// We have to make sure both x and y parse, so use a temporary// settings object here.var anchor = new Settings();anchor.percent("x", xy[0]);anchor.percent("y", xy[1]);if (!anchor.has("x") || !anchor.has("y")) {break;}settings.set(k + "X", anchor.get("x"));settings.set(k + "Y", anchor.get("y"));break;case "scroll":settings.alt(k, v, ["up"]);break;}}, /=/, /\s/);// Create the region, using default values for any values that were not// specified.if (settings.has("id")) {var region = new (self.vttjs.VTTRegion || self.window.VTTRegion)();region.width = settings.get("width", 100);region.lines = settings.get("lines", 3);region.regionAnchorX = settings.get("regionanchorX", 0);region.regionAnchorY = settings.get("regionanchorY", 100);region.viewportAnchorX = settings.get("viewportanchorX", 0);region.viewportAnchorY = settings.get("viewportanchorY", 100);region.scroll = settings.get("scroll", "");// Register the region.self.onregion && self.onregion(region);// Remember the VTTRegion for later in case we parse any VTTCues that// reference it.self.regionList.push({id: settings.get("id"),region: region});}}// draft-pantos-http-live-streaming-20// https://tools.ietf.org/html/draft-pantos-http-live-streaming-20#section-3.5// 3.5 WebVTTfunction parseTimestampMap(input) {var settings = new Settings();parseOptions(input, function (k, v) {switch (k) {case "MPEGT":settings.integer(k + 'S', v);break;case "LOCA":settings.set(k + 'L', parseTimeStamp(v));break;}}, /[^\d]:/, /,/);self.ontimestampmap && self.ontimestampmap({"MPEGTS": settings.get("MPEGTS"),"LOCAL": settings.get("LOCAL")});}// 3.2 WebVTT metadata header syntaxfunction parseHeader(input) {if (input.match(/X-TIMESTAMP-MAP/)) {// This line contains HLS X-TIMESTAMP-MAP metadataparseOptions(input, function (k, v) {switch (k) {case "X-TIMESTAMP-MAP":parseTimestampMap(v);break;}}, /=/);} else {parseOptions(input, function (k, v) {switch (k) {case "Region":// 3.3 WebVTT region metadata header syntaxparseRegion(v);break;}}, /:/);}}// 5.1 WebVTT file parsing.try {var line;if (self.state === "INITIAL") {// We can't start parsing until we have the first line.if (!/\r\n|\n/.test(self.buffer)) {return this;}line = collectNextLine();var m = line.match(/^WEBVTT([ \t].*)?$/);if (!m || !m[0]) {throw new ParsingError(ParsingError.Errors.BadSignature);}self.state = "HEADER";}var alreadyCollectedLine = false;while (self.buffer) {// We can't parse a line until we have the full line.if (!/\r\n|\n/.test(self.buffer)) {return this;}if (!alreadyCollectedLine) {line = collectNextLine();} else {alreadyCollectedLine = false;}switch (self.state) {case "HEADER":// 13-18 - Allow a header (metadata) under the WEBVTT line.if (/:/.test(line)) {parseHeader(line);} else if (!line) {// An empty line terminates the header and starts the body (cues).self.state = "ID";}continue;case "NOTE":// Ignore NOTE blocks.if (!line) {self.state = "ID";}continue;case "ID":// Check for the start of NOTE blocks.if (/^NOTE($|[ \t])/.test(line)) {self.state = "NOTE";break;}// 19-29 - Allow any number of line terminators, then initialize new cue values.if (!line) {continue;}self.cue = new (self.vttjs.VTTCue || self.window.VTTCue)(0, 0, "");// Safari still uses the old middle value and won't accept centertry {self.cue.align = "center";} catch (e) {self.cue.align = "middle";}self.state = "CUE";// 30-39 - Check if self line contains an optional identifier or timing data.if (line.indexOf("-->") === -1) {self.cue.id = line;continue;}// Process line as start of a cue./*falls through*/case "CUE":// 40 - Collect cue timings and settings.try {parseCue(line, self.cue, self.regionList);} catch (e) {self.reportOrThrowError(e);// In case of an error ignore rest of the cue.self.cue = null;self.state = "BADCUE";continue;}self.state = "CUETEXT";continue;case "CUETEXT":var hasSubstring = line.indexOf("-->") !== -1;// 34 - If we have an empty line then report the cue.// 35 - If we have the special substring '-->' then report the cue,// but do not collect the line as we need to process the current// one as a new cue.if (!line || hasSubstring && (alreadyCollectedLine = true)) {// We are done parsing self cue.self.oncue && self.oncue(self.cue);self.cue = null;self.state = "ID";continue;}if (self.cue.text) {self.cue.text += "\n";}self.cue.text += line.replace(/\u2028/g, '\n').replace(/u2029/g, '\n');continue;case "BADCUE":// BADCUE// 54-62 - Collect and discard the remaining cue.if (!line) {self.state = "ID";}continue;}}} catch (e) {self.reportOrThrowError(e);// If we are currently parsing a cue, report what we have.if (self.state === "CUETEXT" && self.cue && self.oncue) {self.oncue(self.cue);}self.cue = null;// Enter BADWEBVTT state if header was not parsed correctly otherwise// another exception occurred so enter BADCUE state.self.state = self.state === "INITIAL" ? "BADWEBVTT" : "BADCUE";}return this;},flush: function () {var self = this;try {// Finish decoding the stream.self.buffer += self.decoder.decode();// Synthesize the end of the current cue or region.if (self.cue || self.state === "HEADER") {self.buffer += "\n\n";self.parse();}// If we've flushed, parsed, and we're still on the INITIAL state then// that means we don't have enough of the stream to parse the first// line.if (self.state === "INITIAL") {throw new ParsingError(ParsingError.Errors.BadSignature);}} catch (e) {self.reportOrThrowError(e);}self.onflush && self.onflush();return this;}};var vtt = WebVTT$1;/*** Copyright 2013 vtt.js Contributors** Licensed under the Apache License, Version 2.0 (the "License");* you may not use this file except in compliance with the License.* You may obtain a copy of the License at** http://www.apache.org/licenses/LICENSE-2.0** Unless required by applicable law or agreed to in writing, software* distributed under the License is distributed on an "AS IS" BASIS,* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.* See the License for the specific language governing permissions and* limitations under the License.*/var autoKeyword = "auto";var directionSetting = {"": 1,"lr": 1,"rl": 1};var alignSetting = {"start": 1,"center": 1,"end": 1,"left": 1,"right": 1,"auto": 1,"line-left": 1,"line-right": 1};function findDirectionSetting(value) {if (typeof value !== "string") {return false;}var dir = directionSetting[value.toLowerCase()];return dir ? value.toLowerCase() : false;}function findAlignSetting(value) {if (typeof value !== "string") {return false;}var align = alignSetting[value.toLowerCase()];return align ? value.toLowerCase() : false;}function VTTCue(startTime, endTime, text) {/*** Shim implementation specific properties. These properties are not in* the spec.*/// Lets us know when the VTTCue's data has changed in such a way that we need// to recompute its display state. This lets us compute its display state// lazily.this.hasBeenReset = false;/*** VTTCue and TextTrackCue properties* http://dev.w3.org/html5/webvtt/#vttcue-interface*/var _id = "";var _pauseOnExit = false;var _startTime = startTime;var _endTime = endTime;var _text = text;var _region = null;var _vertical = "";var _snapToLines = true;var _line = "auto";var _lineAlign = "start";var _position = "auto";var _positionAlign = "auto";var _size = 100;var _align = "center";Object.defineProperties(this, {"id": {enumerable: true,get: function () {return _id;},set: function (value) {_id = "" + value;}},"pauseOnExit": {enumerable: true,get: function () {return _pauseOnExit;},set: function (value) {_pauseOnExit = !!value;}},"startTime": {enumerable: true,get: function () {return _startTime;},set: function (value) {if (typeof value !== "number") {throw new TypeError("Start time must be set to a number.");}_startTime = value;this.hasBeenReset = true;}},"endTime": {enumerable: true,get: function () {return _endTime;},set: function (value) {if (typeof value !== "number") {throw new TypeError("End time must be set to a number.");}_endTime = value;this.hasBeenReset = true;}},"text": {enumerable: true,get: function () {return _text;},set: function (value) {_text = "" + value;this.hasBeenReset = true;}},"region": {enumerable: true,get: function () {return _region;},set: function (value) {_region = value;this.hasBeenReset = true;}},"vertical": {enumerable: true,get: function () {return _vertical;},set: function (value) {var setting = findDirectionSetting(value);// Have to check for false because the setting an be an empty string.if (setting === false) {throw new SyntaxError("Vertical: an invalid or illegal direction string was specified.");}_vertical = setting;this.hasBeenReset = true;}},"snapToLines": {enumerable: true,get: function () {return _snapToLines;},set: function (value) {_snapToLines = !!value;this.hasBeenReset = true;}},"line": {enumerable: true,get: function () {return _line;},set: function (value) {if (typeof value !== "number" && value !== autoKeyword) {throw new SyntaxError("Line: an invalid number or illegal string was specified.");}_line = value;this.hasBeenReset = true;}},"lineAlign": {enumerable: true,get: function () {return _lineAlign;},set: function (value) {var setting = findAlignSetting(value);if (!setting) {console.warn("lineAlign: an invalid or illegal string was specified.");} else {_lineAlign = setting;this.hasBeenReset = true;}}},"position": {enumerable: true,get: function () {return _position;},set: function (value) {if (value < 0 || value > 100) {throw new Error("Position must be between 0 and 100.");}_position = value;this.hasBeenReset = true;}},"positionAlign": {enumerable: true,get: function () {return _positionAlign;},set: function (value) {var setting = findAlignSetting(value);if (!setting) {console.warn("positionAlign: an invalid or illegal string was specified.");} else {_positionAlign = setting;this.hasBeenReset = true;}}},"size": {enumerable: true,get: function () {return _size;},set: function (value) {if (value < 0 || value > 100) {throw new Error("Size must be between 0 and 100.");}_size = value;this.hasBeenReset = true;}},"align": {enumerable: true,get: function () {return _align;},set: function (value) {var setting = findAlignSetting(value);if (!setting) {throw new SyntaxError("align: an invalid or illegal alignment string was specified.");}_align = setting;this.hasBeenReset = true;}}});/*** Other <track> spec defined properties*/// http://www.whatwg.org/specs/web-apps/current-work/multipage/the-video-element.html#text-track-cue-display-statethis.displayState = undefined;}/*** VTTCue methods*/VTTCue.prototype.getCueAsHTML = function () {// Assume WebVTT.convertCueToDOMTree is on the global.return WebVTT.convertCueToDOMTree(window, this.text);};var vttcue = VTTCue;/*** Copyright 2013 vtt.js Contributors** Licensed under the Apache License, Version 2.0 (the "License");* you may not use this file except in compliance with the License.* You may obtain a copy of the License at** http://www.apache.org/licenses/LICENSE-2.0** Unless required by applicable law or agreed to in writing, software* distributed under the License is distributed on an "AS IS" BASIS,* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.* See the License for the specific language governing permissions and* limitations under the License.*/var scrollSetting = {"": true,"up": true};function findScrollSetting(value) {if (typeof value !== "string") {return false;}var scroll = scrollSetting[value.toLowerCase()];return scroll ? value.toLowerCase() : false;}function isValidPercentValue(value) {return typeof value === "number" && value >= 0 && value <= 100;}// VTTRegion shim http://dev.w3.org/html5/webvtt/#vttregion-interfacefunction VTTRegion() {var _width = 100;var _lines = 3;var _regionAnchorX = 0;var _regionAnchorY = 100;var _viewportAnchorX = 0;var _viewportAnchorY = 100;var _scroll = "";Object.defineProperties(this, {"width": {enumerable: true,get: function () {return _width;},set: function (value) {if (!isValidPercentValue(value)) {throw new Error("Width must be between 0 and 100.");}_width = value;}},"lines": {enumerable: true,get: function () {return _lines;},set: function (value) {if (typeof value !== "number") {throw new TypeError("Lines must be set to a number.");}_lines = value;}},"regionAnchorY": {enumerable: true,get: function () {return _regionAnchorY;},set: function (value) {if (!isValidPercentValue(value)) {throw new Error("RegionAnchorX must be between 0 and 100.");}_regionAnchorY = value;}},"regionAnchorX": {enumerable: true,get: function () {return _regionAnchorX;},set: function (value) {if (!isValidPercentValue(value)) {throw new Error("RegionAnchorY must be between 0 and 100.");}_regionAnchorX = value;}},"viewportAnchorY": {enumerable: true,get: function () {return _viewportAnchorY;},set: function (value) {if (!isValidPercentValue(value)) {throw new Error("ViewportAnchorY must be between 0 and 100.");}_viewportAnchorY = value;}},"viewportAnchorX": {enumerable: true,get: function () {return _viewportAnchorX;},set: function (value) {if (!isValidPercentValue(value)) {throw new Error("ViewportAnchorX must be between 0 and 100.");}_viewportAnchorX = value;}},"scroll": {enumerable: true,get: function () {return _scroll;},set: function (value) {var setting = findScrollSetting(value);// Have to check for false as an empty string is a legal value.if (setting === false) {console.warn("Scroll: an invalid or illegal string was specified.");} else {_scroll = setting;}}}});}var vttregion = VTTRegion;var browserIndex = createCommonjsModule(function (module) {/*** Copyright 2013 vtt.js Contributors** Licensed under the Apache License, Version 2.0 (the "License");* you may not use this file except in compliance with the License.* You may obtain a copy of the License at** http://www.apache.org/licenses/LICENSE-2.0** Unless required by applicable law or agreed to in writing, software* distributed under the License is distributed on an "AS IS" BASIS,* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.* See the License for the specific language governing permissions and* limitations under the License.*/// Default exports for Node. Export the extended versions of VTTCue and// VTTRegion in Node since we likely want the capability to convert back and// forth between JSON. If we don't then it's not that big of a deal since we're// off browser.var vttjs = module.exports = {WebVTT: vtt,VTTCue: vttcue,VTTRegion: vttregion};window_1.vttjs = vttjs;window_1.WebVTT = vttjs.WebVTT;var cueShim = vttjs.VTTCue;var regionShim = vttjs.VTTRegion;var nativeVTTCue = window_1.VTTCue;var nativeVTTRegion = window_1.VTTRegion;vttjs.shim = function () {window_1.VTTCue = cueShim;window_1.VTTRegion = regionShim;};vttjs.restore = function () {window_1.VTTCue = nativeVTTCue;window_1.VTTRegion = nativeVTTRegion;};if (!window_1.VTTCue) {vttjs.shim();}});browserIndex.WebVTT;browserIndex.VTTCue;browserIndex.VTTRegion;/*** @file tech.js*//*** An Object containing a structure like: `{src: 'url', type: 'mimetype'}` or string* that just contains the src url alone.* * `var SourceObject = {src: 'http://ex.com/video.mp4', type: 'video/mp4'};`* `var SourceString = 'http://example.com/some-video.mp4';`** @typedef {Object|string} SourceObject** @property {string} src* The url to the source** @property {string} type* The mime type of the source*//*** A function used by {@link Tech} to create a new {@link TextTrack}.** @private** @param {Tech} self* An instance of the Tech class.** @param {string} kind* `TextTrack` kind (subtitles, captions, descriptions, chapters, or metadata)** @param {string} [label]* Label to identify the text track** @param {string} [language]* Two letter language abbreviation** @param {Object} [options={}]* An object with additional text track options** @return {TextTrack}* The text track that was created.*/function createTrackHelper(self, kind, label, language, options = {}) {const tracks = self.textTracks();options.kind = kind;if (label) {options.label = label;}if (language) {options.language = language;}options.tech = self;const track = new ALL.text.TrackClass(options);tracks.addTrack(track);return track;}/*** This is the base class for media playback technology controllers, such as* {@link HTML5}** @extends Component*/class Tech extends Component$1 {/*** Create an instance of this Tech.** @param {Object} [options]* The key/value store of player options.** @param {Function} [ready]* Callback function to call when the `HTML5` Tech is ready.*/constructor(options = {}, ready = function () {}) {// we don't want the tech to report user activity automatically.// This is done manually in addControlsListenersoptions.reportTouchActivity = false;super(null, options, ready);this.onDurationChange_ = e => this.onDurationChange(e);this.trackProgress_ = e => this.trackProgress(e);this.trackCurrentTime_ = e => this.trackCurrentTime(e);this.stopTrackingCurrentTime_ = e => this.stopTrackingCurrentTime(e);this.disposeSourceHandler_ = e => this.disposeSourceHandler(e);this.queuedHanders_ = new Set();// keep track of whether the current source has played at all to// implement a very limited played()this.hasStarted_ = false;this.on('playing', function () {this.hasStarted_ = true;});this.on('loadstart', function () {this.hasStarted_ = false;});ALL.names.forEach(name => {const props = ALL[name];if (options && options[props.getterName]) {this[props.privateName] = options[props.getterName];}});// Manually track progress in cases where the browser/tech doesn't report it.if (!this.featuresProgressEvents) {this.manualProgressOn();}// Manually track timeupdates in cases where the browser/tech doesn't report it.if (!this.featuresTimeupdateEvents) {this.manualTimeUpdatesOn();}['Text', 'Audio', 'Video'].forEach(track => {if (options[`native${track}Tracks`] === false) {this[`featuresNative${track}Tracks`] = false;}});if (options.nativeCaptions === false || options.nativeTextTracks === false) {this.featuresNativeTextTracks = false;} else if (options.nativeCaptions === true || options.nativeTextTracks === true) {this.featuresNativeTextTracks = true;}if (!this.featuresNativeTextTracks) {this.emulateTextTracks();}this.preloadTextTracks = options.preloadTextTracks !== false;this.autoRemoteTextTracks_ = new ALL.text.ListClass();this.initTrackListeners();// Turn on component tap events only if not using native controlsif (!options.nativeControlsForTouch) {this.emitTapEvents();}if (this.constructor) {this.name_ = this.constructor.name || 'Unknown Tech';}}/*** A special function to trigger source set in a way that will allow player* to re-trigger if the player or tech are not ready yet.** @fires Tech#sourceset* @param {string} src The source string at the time of the source changing.*/triggerSourceset(src) {if (!this.isReady_) {// on initial ready we have to trigger source set// 1ms after ready so that player can watch for it.this.one('ready', () => this.setTimeout(() => this.triggerSourceset(src), 1));}/*** Fired when the source is set on the tech causing the media element* to reload.** @see {@link Player#event:sourceset}* @event Tech#sourceset* @type {Event}*/this.trigger({src,type: 'sourceset'});}/* Fallbacks for unsupported event types================================================================================ *//*** Polyfill the `progress` event for browsers that don't support it natively.** @see {@link Tech#trackProgress}*/manualProgressOn() {this.on('durationchange', this.onDurationChange_);this.manualProgress = true;// Trigger progress watching when a source begins loadingthis.one('ready', this.trackProgress_);}/*** Turn off the polyfill for `progress` events that was created in* {@link Tech#manualProgressOn}*/manualProgressOff() {this.manualProgress = false;this.stopTrackingProgress();this.off('durationchange', this.onDurationChange_);}/*** This is used to trigger a `progress` event when the buffered percent changes. It* sets an interval function that will be called every 500 milliseconds to check if the* buffer end percent has changed.** > This function is called by {@link Tech#manualProgressOn}** @param {Event} event* The `ready` event that caused this to run.** @listens Tech#ready* @fires Tech#progress*/trackProgress(event) {this.stopTrackingProgress();this.progressInterval = this.setInterval(bind_(this, function () {// Don't trigger unless buffered amount is greater than last timeconst numBufferedPercent = this.bufferedPercent();if (this.bufferedPercent_ !== numBufferedPercent) {/*** See {@link Player#progress}** @event Tech#progress* @type {Event}*/this.trigger('progress');}this.bufferedPercent_ = numBufferedPercent;if (numBufferedPercent === 1) {this.stopTrackingProgress();}}), 500);}/*** Update our internal duration on a `durationchange` event by calling* {@link Tech#duration}.** @param {Event} event* The `durationchange` event that caused this to run.** @listens Tech#durationchange*/onDurationChange(event) {this.duration_ = this.duration();}/*** Get and create a `TimeRange` object for buffering.** @return { import('../utils/time').TimeRange }* The time range object that was created.*/buffered() {return createTimeRanges$1(0, 0);}/*** Get the percentage of the current video that is currently buffered.** @return {number}* A number from 0 to 1 that represents the decimal percentage of the* video that is buffered.**/bufferedPercent() {return bufferedPercent(this.buffered(), this.duration_);}/*** Turn off the polyfill for `progress` events that was created in* {@link Tech#manualProgressOn}* Stop manually tracking progress events by clearing the interval that was set in* {@link Tech#trackProgress}.*/stopTrackingProgress() {this.clearInterval(this.progressInterval);}/*** Polyfill the `timeupdate` event for browsers that don't support it.** @see {@link Tech#trackCurrentTime}*/manualTimeUpdatesOn() {this.manualTimeUpdates = true;this.on('play', this.trackCurrentTime_);this.on('pause', this.stopTrackingCurrentTime_);}/*** Turn off the polyfill for `timeupdate` events that was created in* {@link Tech#manualTimeUpdatesOn}*/manualTimeUpdatesOff() {this.manualTimeUpdates = false;this.stopTrackingCurrentTime();this.off('play', this.trackCurrentTime_);this.off('pause', this.stopTrackingCurrentTime_);}/*** Sets up an interval function to track current time and trigger `timeupdate` every* 250 milliseconds.** @listens Tech#play* @triggers Tech#timeupdate*/trackCurrentTime() {if (this.currentTimeInterval) {this.stopTrackingCurrentTime();}this.currentTimeInterval = this.setInterval(function () {/*** Triggered at an interval of 250ms to indicated that time is passing in the video.** @event Tech#timeupdate* @type {Event}*/this.trigger({type: 'timeupdate',target: this,manuallyTriggered: true});// 42 = 24 fps // 250 is what Webkit uses // FF uses 15}, 250);}/*** Stop the interval function created in {@link Tech#trackCurrentTime} so that the* `timeupdate` event is no longer triggered.** @listens {Tech#pause}*/stopTrackingCurrentTime() {this.clearInterval(this.currentTimeInterval);// #1002 - if the video ends right before the next timeupdate would happen,// the progress bar won't make it all the way to the endthis.trigger({type: 'timeupdate',target: this,manuallyTriggered: true});}/*** Turn off all event polyfills, clear the `Tech`s {@link AudioTrackList},* {@link VideoTrackList}, and {@link TextTrackList}, and dispose of this Tech.** @fires Component#dispose*/dispose() {// clear out all tracks because we can't reuse them between techsthis.clearTracks(NORMAL.names);// Turn off any manual progress or timeupdate trackingif (this.manualProgress) {this.manualProgressOff();}if (this.manualTimeUpdates) {this.manualTimeUpdatesOff();}super.dispose();}/*** Clear out a single `TrackList` or an array of `TrackLists` given their names.** > Note: Techs without source handlers should call this between sources for `video`* & `audio` tracks. You don't want to use them between tracks!** @param {string[]|string} types* TrackList names to clear, valid names are `video`, `audio`, and* `text`.*/clearTracks(types) {types = [].concat(types);// clear out all tracks because we can't reuse them between techstypes.forEach(type => {const list = this[`${type}Tracks`]() || [];let i = list.length;while (i--) {const track = list[i];if (type === 'text') {this.removeRemoteTextTrack(track);}list.removeTrack(track);}});}/*** Remove any TextTracks added via addRemoteTextTrack that are* flagged for automatic garbage collection*/cleanupAutoTextTracks() {const list = this.autoRemoteTextTracks_ || [];let i = list.length;while (i--) {const track = list[i];this.removeRemoteTextTrack(track);}}/*** Reset the tech, which will removes all sources and reset the internal readyState.** @abstract*/reset() {}/*** Get the value of `crossOrigin` from the tech.** @abstract** @see {Html5#crossOrigin}*/crossOrigin() {}/*** Set the value of `crossOrigin` on the tech.** @abstract** @param {string} crossOrigin the crossOrigin value* @see {Html5#setCrossOrigin}*/setCrossOrigin() {}/*** Get or set an error on the Tech.** @param {MediaError} [err]* Error to set on the Tech** @return {MediaError|null}* The current error object on the tech, or null if there isn't one.*/error(err) {if (err !== undefined) {this.error_ = new MediaError(err);this.trigger('error');}return this.error_;}/*** Returns the `TimeRange`s that have been played through for the current source.** > NOTE: This implementation is incomplete. It does not track the played `TimeRange`.* It only checks whether the source has played at all or not.** @return { import('../utils/time').TimeRange }* - A single time range if this video has played* - An empty set of ranges if not.*/played() {if (this.hasStarted_) {return createTimeRanges$1(0, 0);}return createTimeRanges$1();}/*** Start playback** @abstract** @see {Html5#play}*/play() {}/*** Set whether we are scrubbing or not** @abstract* @param {boolean} _isScrubbing* - true for we are currently scrubbing* - false for we are no longer scrubbing** @see {Html5#setScrubbing}*/setScrubbing(_isScrubbing) {}/*** Get whether we are scrubbing or not** @abstract** @see {Html5#scrubbing}*/scrubbing() {}/*** Causes a manual time update to occur if {@link Tech#manualTimeUpdatesOn} was* previously called.** @param {number} _seconds* Set the current time of the media to this.* @fires Tech#timeupdate*/setCurrentTime(_seconds) {// improve the accuracy of manual timeupdatesif (this.manualTimeUpdates) {/*** A manual `timeupdate` event.** @event Tech#timeupdate* @type {Event}*/this.trigger({type: 'timeupdate',target: this,manuallyTriggered: true});}}/*** Turn on listeners for {@link VideoTrackList}, {@link {AudioTrackList}, and* {@link TextTrackList} events.** This adds {@link EventTarget~EventListeners} for `addtrack`, and `removetrack`.** @fires Tech#audiotrackchange* @fires Tech#videotrackchange* @fires Tech#texttrackchange*/initTrackListeners() {/*** Triggered when tracks are added or removed on the Tech {@link AudioTrackList}** @event Tech#audiotrackchange* @type {Event}*//*** Triggered when tracks are added or removed on the Tech {@link VideoTrackList}** @event Tech#videotrackchange* @type {Event}*//*** Triggered when tracks are added or removed on the Tech {@link TextTrackList}** @event Tech#texttrackchange* @type {Event}*/NORMAL.names.forEach(name => {const props = NORMAL[name];const trackListChanges = () => {this.trigger(`${name}trackchange`);};const tracks = this[props.getterName]();tracks.addEventListener('removetrack', trackListChanges);tracks.addEventListener('addtrack', trackListChanges);this.on('dispose', () => {tracks.removeEventListener('removetrack', trackListChanges);tracks.removeEventListener('addtrack', trackListChanges);});});}/*** Emulate TextTracks using vtt.js if necessary** @fires Tech#vttjsloaded* @fires Tech#vttjserror*/addWebVttScript_() {if (window.WebVTT) {return;}// Initially, Tech.el_ is a child of a dummy-div wait until the Component system// signals that the Tech is ready at which point Tech.el_ is part of the DOM// before inserting the WebVTT scriptif (document.body.contains(this.el())) {// load via require if available and vtt.js script location was not passed in// as an option. novtt builds will turn the above require call into an empty object// which will cause this if check to always fail.if (!this.options_['vtt.js'] && isPlain(browserIndex) && Object.keys(browserIndex).length > 0) {this.trigger('vttjsloaded');return;}// load vtt.js via the script location option or the cdn of no location was// passed inconst script = document.createElement('script');script.src = this.options_['vtt.js'] || 'https://vjs.zencdn.net/vttjs/0.14.1/vtt.min.js';script.onload = () => {/*** Fired when vtt.js is loaded.** @event Tech#vttjsloaded* @type {Event}*/this.trigger('vttjsloaded');};script.onerror = () => {/*** Fired when vtt.js was not loaded due to an error** @event Tech#vttjsloaded* @type {Event}*/this.trigger('vttjserror');};this.on('dispose', () => {script.onload = null;script.onerror = null;});// but have not loaded yet and we set it to true before the inject so that// we don't overwrite the injected window.WebVTT if it loads right awaywindow.WebVTT = true;this.el().parentNode.appendChild(script);} else {this.ready(this.addWebVttScript_);}}/*** Emulate texttracks**/emulateTextTracks() {const tracks = this.textTracks();const remoteTracks = this.remoteTextTracks();const handleAddTrack = e => tracks.addTrack(e.track);const handleRemoveTrack = e => tracks.removeTrack(e.track);remoteTracks.on('addtrack', handleAddTrack);remoteTracks.on('removetrack', handleRemoveTrack);this.addWebVttScript_();const updateDisplay = () => this.trigger('texttrackchange');const textTracksChanges = () => {updateDisplay();for (let i = 0; i < tracks.length; i++) {const track = tracks[i];track.removeEventListener('cuechange', updateDisplay);if (track.mode === 'showing') {track.addEventListener('cuechange', updateDisplay);}}};textTracksChanges();tracks.addEventListener('change', textTracksChanges);tracks.addEventListener('addtrack', textTracksChanges);tracks.addEventListener('removetrack', textTracksChanges);this.on('dispose', function () {remoteTracks.off('addtrack', handleAddTrack);remoteTracks.off('removetrack', handleRemoveTrack);tracks.removeEventListener('change', textTracksChanges);tracks.removeEventListener('addtrack', textTracksChanges);tracks.removeEventListener('removetrack', textTracksChanges);for (let i = 0; i < tracks.length; i++) {const track = tracks[i];track.removeEventListener('cuechange', updateDisplay);}});}/*** Create and returns a remote {@link TextTrack} object.** @param {string} kind* `TextTrack` kind (subtitles, captions, descriptions, chapters, or metadata)** @param {string} [label]* Label to identify the text track** @param {string} [language]* Two letter language abbreviation** @return {TextTrack}* The TextTrack that gets created.*/addTextTrack(kind, label, language) {if (!kind) {throw new Error('TextTrack kind is required but was not provided');}return createTrackHelper(this, kind, label, language);}/*** Create an emulated TextTrack for use by addRemoteTextTrack** This is intended to be overridden by classes that inherit from* Tech in order to create native or custom TextTracks.** @param {Object} options* The object should contain the options to initialize the TextTrack with.** @param {string} [options.kind]* `TextTrack` kind (subtitles, captions, descriptions, chapters, or metadata).** @param {string} [options.label].* Label to identify the text track** @param {string} [options.language]* Two letter language abbreviation.** @return {HTMLTrackElement}* The track element that gets created.*/createRemoteTextTrack(options) {const track = merge$2(options, {tech: this});return new REMOTE.remoteTextEl.TrackClass(track);}/*** Creates a remote text track object and returns an html track element.** > Note: This can be an emulated {@link HTMLTrackElement} or a native one.** @param {Object} options* See {@link Tech#createRemoteTextTrack} for more detailed properties.** @param {boolean} [manualCleanup=false]* - When false: the TextTrack will be automatically removed from the video* element whenever the source changes* - When True: The TextTrack will have to be cleaned up manually** @return {HTMLTrackElement}* An Html Track Element.**/addRemoteTextTrack(options = {}, manualCleanup) {const htmlTrackElement = this.createRemoteTextTrack(options);if (typeof manualCleanup !== 'boolean') {manualCleanup = false;}// store HTMLTrackElement and TextTrack to remote listthis.remoteTextTrackEls().addTrackElement_(htmlTrackElement);this.remoteTextTracks().addTrack(htmlTrackElement.track);if (manualCleanup === false) {// create the TextTrackList if it doesn't existthis.ready(() => this.autoRemoteTextTracks_.addTrack(htmlTrackElement.track));}return htmlTrackElement;}/*** Remove a remote text track from the remote `TextTrackList`.** @param {TextTrack} track* `TextTrack` to remove from the `TextTrackList`*/removeRemoteTextTrack(track) {const trackElement = this.remoteTextTrackEls().getTrackElementByTrack_(track);// remove HTMLTrackElement and TextTrack from remote listthis.remoteTextTrackEls().removeTrackElement_(trackElement);this.remoteTextTracks().removeTrack(track);this.autoRemoteTextTracks_.removeTrack(track);}/*** Gets available media playback quality metrics as specified by the W3C's Media* Playback Quality API.** @see [Spec]{@link https://wicg.github.io/media-playback-quality}** @return {Object}* An object with supported media playback quality metrics** @abstract*/getVideoPlaybackQuality() {return {};}/*** Attempt to create a floating video window always on top of other windows* so that users may continue consuming media while they interact with other* content sites, or applications on their device.** @see [Spec]{@link https://wicg.github.io/picture-in-picture}** @return {Promise|undefined}* A promise with a Picture-in-Picture window if the browser supports* Promises (or one was passed in as an option). It returns undefined* otherwise.** @abstract*/requestPictureInPicture() {return Promise.reject();}/*** A method to check for the value of the 'disablePictureInPicture' <video> property.* Defaults to true, as it should be considered disabled if the tech does not support pip** @abstract*/disablePictureInPicture() {return true;}/*** A method to set or unset the 'disablePictureInPicture' <video> property.** @abstract*/setDisablePictureInPicture() {}/*** A fallback implementation of requestVideoFrameCallback using requestAnimationFrame** @param {function} cb* @return {number} request id*/requestVideoFrameCallback(cb) {const id = newGUID();if (!this.isReady_ || this.paused()) {this.queuedHanders_.add(id);this.one('playing', () => {if (this.queuedHanders_.has(id)) {this.queuedHanders_.delete(id);cb();}});} else {this.requestNamedAnimationFrame(id, cb);}return id;}/*** A fallback implementation of cancelVideoFrameCallback** @param {number} id id of callback to be cancelled*/cancelVideoFrameCallback(id) {if (this.queuedHanders_.has(id)) {this.queuedHanders_.delete(id);} else {this.cancelNamedAnimationFrame(id);}}/*** A method to set a poster from a `Tech`.** @abstract*/setPoster() {}/*** A method to check for the presence of the 'playsinline' <video> attribute.** @abstract*/playsinline() {}/*** A method to set or unset the 'playsinline' <video> attribute.** @abstract*/setPlaysinline() {}/*** Attempt to force override of native audio tracks.** @param {boolean} override - If set to true native audio will be overridden,* otherwise native audio will potentially be used.** @abstract*/overrideNativeAudioTracks(override) {}/*** Attempt to force override of native video tracks.** @param {boolean} override - If set to true native video will be overridden,* otherwise native video will potentially be used.** @abstract*/overrideNativeVideoTracks(override) {}/*** Check if the tech can support the given mime-type.** The base tech does not support any type, but source handlers might* overwrite this.** @param {string} _type* The mimetype to check for support** @return {string}* 'probably', 'maybe', or empty string** @see [Spec]{@link https://developer.mozilla.org/en-US/docs/Web/API/HTMLMediaElement/canPlayType}** @abstract*/canPlayType(_type) {return '';}/*** Check if the type is supported by this tech.** The base tech does not support any type, but source handlers might* overwrite this.** @param {string} _type* The media type to check* @return {string} Returns the native video element's response*/static canPlayType(_type) {return '';}/*** Check if the tech can support the given source** @param {Object} srcObj* The source object* @param {Object} options* The options passed to the tech* @return {string} 'probably', 'maybe', or '' (empty string)*/static canPlaySource(srcObj, options) {return Tech.canPlayType(srcObj.type);}/** Return whether the argument is a Tech or not.* Can be passed either a Class like `Html5` or a instance like `player.tech_`** @param {Object} component* The item to check** @return {boolean}* Whether it is a tech or not* - True if it is a tech* - False if it is not*/static isTech(component) {return component.prototype instanceof Tech || component instanceof Tech || component === Tech;}/*** Registers a `Tech` into a shared list for videojs.** @param {string} name* Name of the `Tech` to register.** @param {Object} tech* The `Tech` class to register.*/static registerTech(name, tech) {if (!Tech.techs_) {Tech.techs_ = {};}if (!Tech.isTech(tech)) {throw new Error(`Tech ${name} must be a Tech`);}if (!Tech.canPlayType) {throw new Error('Techs must have a static canPlayType method on them');}if (!Tech.canPlaySource) {throw new Error('Techs must have a static canPlaySource method on them');}name = toTitleCase$1(name);Tech.techs_[name] = tech;Tech.techs_[toLowerCase(name)] = tech;if (name !== 'Tech') {// camel case the techName for use in techOrderTech.defaultTechOrder_.push(name);}return tech;}/*** Get a `Tech` from the shared list by name.** @param {string} name* `camelCase` or `TitleCase` name of the Tech to get** @return {Tech|undefined}* The `Tech` or undefined if there was no tech with the name requested.*/static getTech(name) {if (!name) {return;}if (Tech.techs_ && Tech.techs_[name]) {return Tech.techs_[name];}name = toTitleCase$1(name);if (window && window.videojs && window.videojs[name]) {log$1.warn(`The ${name} tech was added to the videojs object when it should be registered using videojs.registerTech(name, tech)`);return window.videojs[name];}}}/*** Get the {@link VideoTrackList}** @returns {VideoTrackList}* @method Tech.prototype.videoTracks*//*** Get the {@link AudioTrackList}** @returns {AudioTrackList}* @method Tech.prototype.audioTracks*//*** Get the {@link TextTrackList}** @returns {TextTrackList}* @method Tech.prototype.textTracks*//*** Get the remote element {@link TextTrackList}** @returns {TextTrackList}* @method Tech.prototype.remoteTextTracks*//*** Get the remote element {@link HtmlTrackElementList}** @returns {HtmlTrackElementList}* @method Tech.prototype.remoteTextTrackEls*/ALL.names.forEach(function (name) {const props = ALL[name];Tech.prototype[props.getterName] = function () {this[props.privateName] = this[props.privateName] || new props.ListClass();return this[props.privateName];};});/*** List of associated text tracks** @type {TextTrackList}* @private* @property Tech#textTracks_*//*** List of associated audio tracks.** @type {AudioTrackList}* @private* @property Tech#audioTracks_*//*** List of associated video tracks.** @type {VideoTrackList}* @private* @property Tech#videoTracks_*//*** Boolean indicating whether the `Tech` supports volume control.** @type {boolean}* @default*/Tech.prototype.featuresVolumeControl = true;/*** Boolean indicating whether the `Tech` supports muting volume.** @type {boolean}* @default*/Tech.prototype.featuresMuteControl = true;/*** Boolean indicating whether the `Tech` supports fullscreen resize control.* Resizing plugins using request fullscreen reloads the plugin** @type {boolean}* @default*/Tech.prototype.featuresFullscreenResize = false;/*** Boolean indicating whether the `Tech` supports changing the speed at which the video* plays. Examples:* - Set player to play 2x (twice) as fast* - Set player to play 0.5x (half) as fast** @type {boolean}* @default*/Tech.prototype.featuresPlaybackRate = false;/*** Boolean indicating whether the `Tech` supports the `progress` event.* This will be used to determine if {@link Tech#manualProgressOn} should be called.** @type {boolean}* @default*/Tech.prototype.featuresProgressEvents = false;/*** Boolean indicating whether the `Tech` supports the `sourceset` event.** A tech should set this to `true` and then use {@link Tech#triggerSourceset}* to trigger a {@link Tech#event:sourceset} at the earliest time after getting* a new source.** @type {boolean}* @default*/Tech.prototype.featuresSourceset = false;/*** Boolean indicating whether the `Tech` supports the `timeupdate` event.* This will be used to determine if {@link Tech#manualTimeUpdates} should be called.** @type {boolean}* @default*/Tech.prototype.featuresTimeupdateEvents = false;/*** Boolean indicating whether the `Tech` supports the native `TextTrack`s.* This will help us integrate with native `TextTrack`s if the browser supports them.** @type {boolean}* @default*/Tech.prototype.featuresNativeTextTracks = false;/*** Boolean indicating whether the `Tech` supports `requestVideoFrameCallback`.** @type {boolean}* @default*/Tech.prototype.featuresVideoFrameCallback = false;/*** A functional mixin for techs that want to use the Source Handler pattern.* Source handlers are scripts for handling specific formats.* The source handler pattern is used for adaptive formats (HLS, DASH) that* manually load video data and feed it into a Source Buffer (Media Source Extensions)* Example: `Tech.withSourceHandlers.call(MyTech);`** @param {Tech} _Tech* The tech to add source handler functions to.** @mixes Tech~SourceHandlerAdditions*/Tech.withSourceHandlers = function (_Tech) {/*** Register a source handler** @param {Function} handler* The source handler class** @param {number} [index]* Register it at the following index*/_Tech.registerSourceHandler = function (handler, index) {let handlers = _Tech.sourceHandlers;if (!handlers) {handlers = _Tech.sourceHandlers = [];}if (index === undefined) {// add to the end of the listindex = handlers.length;}handlers.splice(index, 0, handler);};/*** Check if the tech can support the given type. Also checks the* Techs sourceHandlers.** @param {string} type* The mimetype to check.** @return {string}* 'probably', 'maybe', or '' (empty string)*/_Tech.canPlayType = function (type) {const handlers = _Tech.sourceHandlers || [];let can;for (let i = 0; i < handlers.length; i++) {can = handlers[i].canPlayType(type);if (can) {return can;}}return '';};/*** Returns the first source handler that supports the source.** TODO: Answer question: should 'probably' be prioritized over 'maybe'** @param {SourceObject} source* The source object** @param {Object} options* The options passed to the tech** @return {SourceHandler|null}* The first source handler that supports the source or null if* no SourceHandler supports the source*/_Tech.selectSourceHandler = function (source, options) {const handlers = _Tech.sourceHandlers || [];let can;for (let i = 0; i < handlers.length; i++) {can = handlers[i].canHandleSource(source, options);if (can) {return handlers[i];}}return null;};/*** Check if the tech can support the given source.** @param {SourceObject} srcObj* The source object** @param {Object} options* The options passed to the tech** @return {string}* 'probably', 'maybe', or '' (empty string)*/_Tech.canPlaySource = function (srcObj, options) {const sh = _Tech.selectSourceHandler(srcObj, options);if (sh) {return sh.canHandleSource(srcObj, options);}return '';};/*** When using a source handler, prefer its implementation of* any function normally provided by the tech.*/const deferrable = ['seekable', 'seeking', 'duration'];/*** A wrapper around {@link Tech#seekable} that will call a `SourceHandler`s seekable* function if it exists, with a fallback to the Techs seekable function.** @method _Tech.seekable*//*** A wrapper around {@link Tech#duration} that will call a `SourceHandler`s duration* function if it exists, otherwise it will fallback to the techs duration function.** @method _Tech.duration*/deferrable.forEach(function (fnName) {const originalFn = this[fnName];if (typeof originalFn !== 'function') {return;}this[fnName] = function () {if (this.sourceHandler_ && this.sourceHandler_[fnName]) {return this.sourceHandler_[fnName].apply(this.sourceHandler_, arguments);}return originalFn.apply(this, arguments);};}, _Tech.prototype);/*** Create a function for setting the source using a source object* and source handlers.* Should never be called unless a source handler was found.** @param {SourceObject} source* A source object with src and type keys*/_Tech.prototype.setSource = function (source) {let sh = _Tech.selectSourceHandler(source, this.options_);if (!sh) {// Fall back to a native source handler when unsupported sources are// deliberately setif (_Tech.nativeSourceHandler) {sh = _Tech.nativeSourceHandler;} else {log$1.error('No source handler found for the current source.');}}// Dispose any existing source handlerthis.disposeSourceHandler();this.off('dispose', this.disposeSourceHandler_);if (sh !== _Tech.nativeSourceHandler) {this.currentSource_ = source;}this.sourceHandler_ = sh.handleSource(source, this, this.options_);this.one('dispose', this.disposeSourceHandler_);};/*** Clean up any existing SourceHandlers and listeners when the Tech is disposed.** @listens Tech#dispose*/_Tech.prototype.disposeSourceHandler = function () {// if we have a source and get another one// then we are loading something new// than clear all of our current tracksif (this.currentSource_) {this.clearTracks(['audio', 'video']);this.currentSource_ = null;}// always clean up auto-text tracksthis.cleanupAutoTextTracks();if (this.sourceHandler_) {if (this.sourceHandler_.dispose) {this.sourceHandler_.dispose();}this.sourceHandler_ = null;}};};// The base Tech class needs to be registered as a Component. It is the only// Tech that can be registered as a Component.Component$1.registerComponent('Tech', Tech);Tech.registerTech('Tech', Tech);/*** A list of techs that should be added to techOrder on Players** @private*/Tech.defaultTechOrder_ = [];/*** @file middleware.js* @module middleware*/const middlewares = {};const middlewareInstances = {};const TERMINATOR = {};/*** A middleware object is a plain JavaScript object that has methods that* match the {@link Tech} methods found in the lists of allowed* {@link module:middleware.allowedGetters|getters},* {@link module:middleware.allowedSetters|setters}, and* {@link module:middleware.allowedMediators|mediators}.** @typedef {Object} MiddlewareObject*//*** A middleware factory function that should return a* {@link module:middleware~MiddlewareObject|MiddlewareObject}.** This factory will be called for each player when needed, with the player* passed in as an argument.** @callback MiddlewareFactory* @param { import('../player').default } player* A Video.js player.*//*** Define a middleware that the player should use by way of a factory function* that returns a middleware object.** @param {string} type* The MIME type to match or `"*"` for all MIME types.** @param {MiddlewareFactory} middleware* A middleware factory function that will be executed for* matching types.*/function use(type, middleware) {middlewares[type] = middlewares[type] || [];middlewares[type].push(middleware);}/*** Asynchronously sets a source using middleware by recursing through any* matching middlewares and calling `setSource` on each, passing along the* previous returned value each time.** @param { import('../player').default } player* A {@link Player} instance.** @param {Tech~SourceObject} src* A source object.** @param {Function}* The next middleware to run.*/function setSource(player, src, next) {player.setTimeout(() => setSourceHelper(src, middlewares[src.type], next, player), 1);}/*** When the tech is set, passes the tech to each middleware's `setTech` method.** @param {Object[]} middleware* An array of middleware instances.** @param { import('../tech/tech').default } tech* A Video.js tech.*/function setTech(middleware, tech) {middleware.forEach(mw => mw.setTech && mw.setTech(tech));}/*** Calls a getter on the tech first, through each middleware* from right to left to the player.** @param {Object[]} middleware* An array of middleware instances.** @param { import('../tech/tech').default } tech* The current tech.** @param {string} method* A method name.** @return {*}* The final value from the tech after middleware has intercepted it.*/function get(middleware, tech, method) {return middleware.reduceRight(middlewareIterator(method), tech[method]());}/*** Takes the argument given to the player and calls the setter method on each* middleware from left to right to the tech.** @param {Object[]} middleware* An array of middleware instances.** @param { import('../tech/tech').default } tech* The current tech.** @param {string} method* A method name.** @param {*} arg* The value to set on the tech.** @return {*}* The return value of the `method` of the `tech`.*/function set(middleware, tech, method, arg) {return tech[method](middleware.reduce(middlewareIterator(method), arg));}/*** Takes the argument given to the player and calls the `call` version of the* method on each middleware from left to right.** Then, call the passed in method on the tech and return the result unchanged* back to the player, through middleware, this time from right to left.** @param {Object[]} middleware* An array of middleware instances.** @param { import('../tech/tech').default } tech* The current tech.** @param {string} method* A method name.** @param {*} arg* The value to set on the tech.** @return {*}* The return value of the `method` of the `tech`, regardless of the* return values of middlewares.*/function mediate(middleware, tech, method, arg = null) {const callMethod = 'call' + toTitleCase$1(method);const middlewareValue = middleware.reduce(middlewareIterator(callMethod), arg);const terminated = middlewareValue === TERMINATOR;// deprecated. The `null` return value should instead return TERMINATOR to// prevent confusion if a techs method actually returns null.const returnValue = terminated ? null : tech[method](middlewareValue);executeRight(middleware, method, returnValue, terminated);return returnValue;}/*** Enumeration of allowed getters where the keys are method names.** @type {Object}*/const allowedGetters = {buffered: 1,currentTime: 1,duration: 1,muted: 1,played: 1,paused: 1,seekable: 1,volume: 1,ended: 1};/*** Enumeration of allowed setters where the keys are method names.** @type {Object}*/const allowedSetters = {setCurrentTime: 1,setMuted: 1,setVolume: 1};/*** Enumeration of allowed mediators where the keys are method names.** @type {Object}*/const allowedMediators = {play: 1,pause: 1};function middlewareIterator(method) {return (value, mw) => {// if the previous middleware terminated, pass along the terminationif (value === TERMINATOR) {return TERMINATOR;}if (mw[method]) {return mw[method](value);}return value;};}function executeRight(mws, method, value, terminated) {for (let i = mws.length - 1; i >= 0; i--) {const mw = mws[i];if (mw[method]) {mw[method](terminated, value);}}}/*** Clear the middleware cache for a player.** @param { import('../player').default } player* A {@link Player} instance.*/function clearCacheForPlayer(player) {middlewareInstances[player.id()] = null;}/*** {* [playerId]: [[mwFactory, mwInstance], ...]* }** @private*/function getOrCreateFactory(player, mwFactory) {const mws = middlewareInstances[player.id()];let mw = null;if (mws === undefined || mws === null) {mw = mwFactory(player);middlewareInstances[player.id()] = [[mwFactory, mw]];return mw;}for (let i = 0; i < mws.length; i++) {const [mwf, mwi] = mws[i];if (mwf !== mwFactory) {continue;}mw = mwi;}if (mw === null) {mw = mwFactory(player);mws.push([mwFactory, mw]);}return mw;}function setSourceHelper(src = {}, middleware = [], next, player, acc = [], lastRun = false) {const [mwFactory, ...mwrest] = middleware;// if mwFactory is a string, then we're at a fork in the roadif (typeof mwFactory === 'string') {setSourceHelper(src, middlewares[mwFactory], next, player, acc, lastRun);// if we have an mwFactory, call it with the player to get the mw,// then call the mw's setSource method} else if (mwFactory) {const mw = getOrCreateFactory(player, mwFactory);// if setSource isn't present, implicitly select this middlewareif (!mw.setSource) {acc.push(mw);return setSourceHelper(src, mwrest, next, player, acc, lastRun);}mw.setSource(Object.assign({}, src), function (err, _src) {// something happened, try the next middleware on the current level// make sure to use the old srcif (err) {return setSourceHelper(src, mwrest, next, player, acc, lastRun);}// we've succeeded, now we need to go deeperacc.push(mw);// if it's the same type, continue down the current chain// otherwise, we want to go down the new chainsetSourceHelper(_src, src.type === _src.type ? mwrest : middlewares[_src.type], next, player, acc, lastRun);});} else if (mwrest.length) {setSourceHelper(src, mwrest, next, player, acc, lastRun);} else if (lastRun) {next(src, acc);} else {setSourceHelper(src, middlewares['*'], next, player, acc, true);}}/*** Mimetypes** @see https://www.iana.org/assignments/media-types/media-types.xhtml* @typedef Mimetypes~Kind* @enum*/const MimetypesKind = {opus: 'video/ogg',ogv: 'video/ogg',mp4: 'video/mp4',mov: 'video/mp4',m4v: 'video/mp4',mkv: 'video/x-matroska',m4a: 'audio/mp4',mp3: 'audio/mpeg',aac: 'audio/aac',caf: 'audio/x-caf',flac: 'audio/flac',oga: 'audio/ogg',wav: 'audio/wav',m3u8: 'application/x-mpegURL',mpd: 'application/dash+xml',jpg: 'image/jpeg',jpeg: 'image/jpeg',gif: 'image/gif',png: 'image/png',svg: 'image/svg+xml',webp: 'image/webp'};/*** Get the mimetype of a given src url if possible** @param {string} src* The url to the src** @return {string}* return the mimetype if it was known or empty string otherwise*/const getMimetype = function (src = '') {const ext = getFileExtension(src);const mimetype = MimetypesKind[ext.toLowerCase()];return mimetype || '';};/*** Find the mime type of a given source string if possible. Uses the player* source cache.** @param { import('../player').default } player* The player object** @param {string} src* The source string** @return {string}* The type that was found*/const findMimetype = (player, src) => {if (!src) {return '';}// 1. check for the type in the `source` cacheif (player.cache_.source.src === src && player.cache_.source.type) {return player.cache_.source.type;}// 2. see if we have this source in our `currentSources` cacheconst matchingSources = player.cache_.sources.filter(s => s.src === src);if (matchingSources.length) {return matchingSources[0].type;}// 3. look for the src url in source elements and use the type thereconst sources = player.$$('source');for (let i = 0; i < sources.length; i++) {const s = sources[i];if (s.type && s.src && s.src === src) {return s.type;}}// 4. finally fallback to our list of mime types based on src url extensionreturn getMimetype(src);};/*** @module filter-source*//*** Filter out single bad source objects or multiple source objects in an* array. Also flattens nested source object arrays into a 1 dimensional* array of source objects.** @param {Tech~SourceObject|Tech~SourceObject[]} src* The src object to filter** @return {Tech~SourceObject[]}* An array of sourceobjects containing only valid sources** @private*/const filterSource = function (src) {// traverse arrayif (Array.isArray(src)) {let newsrc = [];src.forEach(function (srcobj) {srcobj = filterSource(srcobj);if (Array.isArray(srcobj)) {newsrc = newsrc.concat(srcobj);} else if (isObject$1(srcobj)) {newsrc.push(srcobj);}});src = newsrc;} else if (typeof src === 'string' && src.trim()) {// convert string into objectsrc = [fixSource({src})];} else if (isObject$1(src) && typeof src.src === 'string' && src.src && src.src.trim()) {// src is already validsrc = [fixSource(src)];} else {// invalid source, turn it into an empty arraysrc = [];}return src;};/*** Checks src mimetype, adding it when possible** @param {Tech~SourceObject} src* The src object to check* @return {Tech~SourceObject}* src Object with known type*/function fixSource(src) {if (!src.type) {const mimetype = getMimetype(src.src);if (mimetype) {src.type = mimetype;}}return src;}var icons = "<svg xmlns=\"http://www.w3.org/2000/svg\">\n <defs>\n <symbol viewBox=\"0 0 48 48\" id=\"vjs-icon-play\">\n <path d=\"M16 10v28l22-14z\"></path>\n </symbol>\n <symbol viewBox=\"0 0 48 48\" id=\"vjs-icon-pause\">\n <path d=\"M12 38h8V10h-8v28zm16-28v28h8V10h-8z\"></path>\n </symbol>\n <symbol viewBox=\"0 0 48 48\" id=\"vjs-icon-audio\">\n <path d=\"M24 2C14.06 2 6 10.06 6 20v14c0 3.31 2.69 6 6 6h6V24h-8v-4c0-7.73 6.27-14 14-14s14 6.27 14 14v4h-8v16h6c3.31 0 6-2.69 6-6V20c0-9.94-8.06-18-18-18z\"></path>\n </symbol>\n <symbol viewBox=\"0 0 48 48\" id=\"vjs-icon-captions\">\n <path d=\"M38 8H10c-2.21 0-4 1.79-4 4v24c0 2.21 1.79 4 4 4h28c2.21 0 4-1.79 4-4V12c0-2.21-1.79-4-4-4zM22 22h-3v-1h-4v6h4v-1h3v2a2 2 0 0 1-2 2h-6a2 2 0 0 1-2-2v-8a2 2 0 0 1 2-2h6a2 2 0 0 1 2 2v2zm14 0h-3v-1h-4v6h4v-1h3v2a2 2 0 0 1-2 2h-6a2 2 0 0 1-2-2v-8a2 2 0 0 1 2-2h6a2 2 0 0 1 2 2v2z\"></path>\n </symbol>\n <symbol viewBox=\"0 0 48 48\" id=\"vjs-icon-subtitles\">\n <path d=\"M40 8H8c-2.21 0-4 1.79-4 4v24c0 2.21 1.79 4 4 4h32c2.21 0 4-1.79 4-4V12c0-2.21-1.79-4-4-4zM8 24h8v4H8v-4zm20 12H8v-4h20v4zm12 0h-8v-4h8v4zm0-8H20v-4h20v4z\"></path>\n </symbol>\n <symbol viewBox=\"0 0 48 48\" id=\"vjs-icon-fullscreen-enter\">\n <path d=\"M14 28h-4v10h10v-4h-6v-6zm-4-8h4v-6h6v-4H10v10zm24 14h-6v4h10V28h-4v6zm-6-24v4h6v6h4V10H28z\"></path>\n </symbol>\n <symbol viewBox=\"0 0 48 48\" id=\"vjs-icon-fullscreen-exit\">\n <path d=\"M10 32h6v6h4V28H10v4zm6-16h-6v4h10V10h-4v6zm12 22h4v-6h6v-4H28v10zm4-22v-6h-4v10h10v-4h-6z\"></path>\n </symbol>\n <symbol viewBox=\"0 0 48 48\" id=\"vjs-icon-play-circle\">\n <path d=\"M20 33l12-9-12-9v18zm4-29C12.95 4 4 12.95 4 24s8.95 20 20 20 20-8.95 20-20S35.05 4 24 4zm0 36c-8.82 0-16-7.18-16-16S15.18 8 24 8s16 7.18 16 16-7.18 16-16 16z\"></path>\n </symbol>\n <symbol viewBox=\"0 0 48 48\" id=\"vjs-icon-volume-mute\">\n <path d=\"M33 24c0-3.53-2.04-6.58-5-8.05v4.42l4.91 4.91c.06-.42.09-.85.09-1.28zm5 0c0 1.88-.41 3.65-1.08 5.28l3.03 3.03C41.25 29.82 42 27 42 24c0-8.56-5.99-15.72-14-17.54v4.13c5.78 1.72 10 7.07 10 13.41zM8.55 6L6 8.55 15.45 18H6v12h8l10 10V26.55l8.51 8.51c-1.34 1.03-2.85 1.86-4.51 2.36v4.13a17.94 17.94 0 0 0 7.37-3.62L39.45 42 42 39.45l-18-18L8.55 6zM24 8l-4.18 4.18L24 16.36V8z\"></path>\n </symbol>\n <symbol viewBox=\"0 0 48 48\" id=\"vjs-icon-volume-low\">\n <path d=\"M14 18v12h8l10 10V8L22 18h-8z\"></path>\n </symbol>\n <symbol viewBox=\"0 0 48 48\" id=\"vjs-icon-volume-medium\">\n <path d=\"M37 24c0-3.53-2.04-6.58-5-8.05v16.11c2.96-1.48 5-4.53 5-8.06zm-27-6v12h8l10 10V8L18 18h-8z\"></path>\n </symbol>\n <symbol viewBox=\"0 0 48 48\" id=\"vjs-icon-volume-high\">\n <path d=\"M6 18v12h8l10 10V8L14 18H6zm27 6c0-3.53-2.04-6.58-5-8.05v16.11c2.96-1.48 5-4.53 5-8.06zM28 6.46v4.13c5.78 1.72 10 7.07 10 13.41s-4.22 11.69-10 13.41v4.13c8.01-1.82 14-8.97 14-17.54S36.01 8.28 28 6.46z\"></path>\n </symbol>\n <symbol viewBox=\"0 0 48 48\" id=\"vjs-icon-spinner\">\n <path d=\"M18.8 21l9.53-16.51C26.94 4.18 25.49 4 24 4c-4.8 0-9.19 1.69-12.64 4.51l7.33 12.69.11-.2zm24.28-3c-1.84-5.85-6.3-10.52-11.99-12.68L23.77 18h19.31zm.52 2H28.62l.58 1 9.53 16.5C41.99 33.94 44 29.21 44 24c0-1.37-.14-2.71-.4-4zm-26.53 4l-7.8-13.5C6.01 14.06 4 18.79 4 24c0 1.37.14 2.71.4 4h14.98l-2.31-4zM4.92 30c1.84 5.85 6.3 10.52 11.99 12.68L24.23 30H4.92zm22.54 0l-7.8 13.51c1.4.31 2.85.49 4.34.49 4.8 0 9.19-1.69 12.64-4.51L29.31 26.8 27.46 30z\"></path>\n </symbol>\n <symbol viewBox=\"0 0 24 24\" id=\"vjs-icon-hd\">\n <path d=\"M19 3H5a2 2 0 0 0-2 2v14a2 2 0 0 0 2 2h14c1.1 0 2-.9 2-2V5c0-1.1-.9-2-2-2zm-8 12H9.5v-2h-2v2H6V9h1.5v2.5h2V9H11v6zm2-6h4c.55 0 1 .45 1 1v4c0 .55-.45 1-1 1h-4V9zm1.5 4.5h2v-3h-2v3z\"></path>\n </symbol>\n <symbol viewBox=\"0 0 48 48\" id=\"vjs-icon-chapters\">\n <path d=\"M6 26h4v-4H6v4zm0 8h4v-4H6v4zm0-16h4v-4H6v4zm8 8h28v-4H14v4zm0 8h28v-4H14v4zm0-20v4h28v-4H14z\"></path>\n </symbol>\n <symbol viewBox=\"0 0 40 40\" id=\"vjs-icon-downloading\">\n <path d=\"M18.208 36.875q-3.208-.292-5.979-1.729-2.771-1.438-4.812-3.729-2.042-2.292-3.188-5.229-1.146-2.938-1.146-6.23 0-6.583 4.334-11.416 4.333-4.834 10.833-5.5v3.166q-5.167.75-8.583 4.646Q6.25 14.75 6.25 19.958q0 5.209 3.396 9.104 3.396 3.896 8.562 4.646zM20 28.417L11.542 20l2.083-2.083 4.917 4.916v-11.25h2.916v11.25l4.875-4.916L28.417 20zm1.792 8.458v-3.167q1.833-.25 3.541-.958 1.709-.708 3.167-1.875l2.333 2.292q-1.958 1.583-4.25 2.541-2.291.959-4.791 1.167zm6.791-27.792q-1.541-1.125-3.25-1.854-1.708-.729-3.541-1.021V3.042q2.5.25 4.77 1.208 2.271.958 4.271 2.5zm4.584 21.584l-2.25-2.25q1.166-1.5 1.854-3.209.687-1.708.937-3.541h3.209q-.292 2.5-1.229 4.791-.938 2.292-2.521 4.209zm.541-12.417q-.291-1.833-.958-3.562-.667-1.73-1.833-3.188l2.375-2.208q1.541 1.916 2.458 4.208.917 2.292 1.167 4.75z\"></path>\n </symbol>\n <symbol viewBox=\"0 0 48 48\" id=\"vjs-icon-file-download\">\n <path d=\"M10.8 40.55q-1.35 0-2.375-1T7.4 37.15v-7.7h3.4v7.7h26.35v-7.7h3.4v7.7q0 1.4-1 2.4t-2.4 1zM24 32.1L13.9 22.05l2.45-2.45 5.95 5.95V7.15h3.4v18.4l5.95-5.95 2.45 2.45z\"></path>\n </symbol>\n <symbol viewBox=\"0 0 48 48\" id=\"vjs-icon-file-download-done\">\n <path d=\"M9.8 40.5v-3.45h28.4v3.45zm9.2-9.05L7.4 19.85l2.45-2.35L19 26.65l19.2-19.2 2.4 2.4z\"></path>\n </symbol>\n <symbol viewBox=\"0 0 48 48\" id=\"vjs-icon-file-download-off\">\n <path d=\"M4.9 4.75L43.25 43.1 41 45.3l-4.75-4.75q-.05.05-.075.025-.025-.025-.075-.025H10.8q-1.35 0-2.375-1T7.4 37.15v-7.7h3.4v7.7h22.05l-7-7-1.85 1.8L13.9 21.9l1.85-1.85L2.7 7zm26.75 14.7l2.45 2.45-3.75 3.8-2.45-2.5zM25.7 7.15V21.1l-3.4-3.45V7.15z\"></path>\n </symbol>\n <symbol viewBox=\"0 0 48 48\" id=\"vjs-icon-share\">\n <path d=\"M36 32.17c-1.52 0-2.89.59-3.93 1.54L17.82 25.4c.11-.45.18-.92.18-1.4s-.07-.95-.18-1.4l14.1-8.23c1.07 1 2.5 1.62 4.08 1.62 3.31 0 6-2.69 6-6s-2.69-6-6-6-6 2.69-6 6c0 .48.07.95.18 1.4l-14.1 8.23c-1.07-1-2.5-1.62-4.08-1.62-3.31 0-6 2.69-6 6s2.69 6 6 6c1.58 0 3.01-.62 4.08-1.62l14.25 8.31c-.1.42-.16.86-.16 1.31A5.83 5.83 0 1 036 32.17z\"></path>\n </symbol>\n <symbol viewBox=\"0 0 48 48\" id=\"vjs-icon-cog\">\n <path d=\"M38.86 25.95c.08-.64.14-1.29.14-1.95s-.06-1.31-.14-1.95l4.23-3.31c.38-.3.49-.84.24-1.28l-4-6.93c-.25-.43-.77-.61-1.22-.43l-4.98 2.01c-1.03-.79-2.16-1.46-3.38-1.97L29 4.84c-.09-.47-.5-.84-1-.84h-8c-.5 0-.91.37-.99.84l-.75 5.3a14.8 14.8 0 0 0-3.38 1.97L9.9 10.1a1 1 0 0 0-1.22.43l-4 6.93c-.25.43-.14.97.24 1.28l4.22 3.31C9.06 22.69 9 23.34 9 24s.06 1.31.14 1.95l-4.22 3.31c-.38.3-.49.84-.24 1.28l4 6.93c.25.43.77.61 1.22.43l4.98-2.01c1.03.79 2.16 1.46 3.38 1.97l.75 5.3c.08.47.49.84.99.84h8c.5 0 .91-.37.99-.84l.75-5.3a14.8 14.8 0 0 0 3.38-1.97l4.98 2.01a1 1 0 0 0 1.22-.43l4-6.93c.25-.43.14-.97-.24-1.28l-4.22-3.31zM24 31c-3.87 0-7-3.13-7-7s3.13-7 7-7 7 3.13 7 7-3.13 7-7 7z\"></path>\n </symbol>\n <symbol viewBox=\"0 0 48 48\" id=\"vjs-icon-square\">\n <path d=\"M36 8H12c-2.21 0-4 1.79-4 4v24c0 2.21 1.79 4 4 4h24c2.21 0 4-1.79 4-4V12c0-2.21-1.79-4-4-4zm0 28H12V12h24v24z\"></path>\n </symbol>\n<symbol viewBox=\"0 0 48 48\" id=\"vjs-icon-circle\">\n <circle cx=\"24\" cy=\"24\" r=\"20\"></circle>\n </symbol>\n <symbol viewBox=\"0 0 48 48\" id=\"vjs-icon-circle-outline\">\n <path d=\"M24 4C12.95 4 4 12.95 4 24s8.95 20 20 20 20-8.95 20-20S35.05 4 24 4zm0 36c-8.82 0-16-7.18-16-16S15.18 8 24 8s16 7.18 16 16-7.18 16-16 16z\"></path>\n </symbol>\n <symbol viewBox=\"0 0 48 48\" id=\"vjs-icon-circle-inner-circle\">\n <path d=\"M24 4C12.97 4 4 12.97 4 24s8.97 20 20 20 20-8.97 20-20S35.03 4 24 4zm0 36c-8.82 0-16-7.18-16-16S15.18 8 24 8s16 7.18 16 16-7.18 16-16 16zm6-16c0 3.31-2.69 6-6 6s-6-2.69-6-6 2.69-6 6-6 6 2.69 6 6z\"></path>\n </symbol>\n <symbol viewBox=\"0 0 48 48\" id=\"vjs-icon-cancel\">\n <path d=\"M24 4C12.95 4 4 12.95 4 24s8.95 20 20 20 20-8.95 20-20S35.05 4 24 4zm10 27.17L31.17 34 24 26.83 16.83 34 14 31.17 21.17 24 14 16.83 16.83 14 24 21.17 31.17 14 34 16.83 26.83 24 34 31.17z\"></path>\n </symbol>\n <symbol viewBox=\"0 0 48 48\" id=\"vjs-icon-replay\">\n <path d=\"M24 10V2L14 12l10 10v-8c6.63 0 12 5.37 12 12s-5.37 12-12 12-12-5.37-12-12H8c0 8.84 7.16 16 16 16s16-7.16 16-16-7.16-16-16-16z\"></path>\n </symbol>\n <symbol viewBox=\"0 0 48 48\" id=\"vjs-icon-repeat\">\n <path d=\"M14 14h20v6l8-8-8-8v6H10v12h4v-8zm20 20H14v-6l-8 8 8 8v-6h24V26h-4v8z\"></path>\n </symbol>\n <symbol viewBox=\"0 96 48 48\" id=\"vjs-icon-replay-5\">\n <path d=\"M17.689 98l-8.697 8.696 8.697 8.697 2.486-2.485-4.32-4.319h1.302c4.93 0 9.071 1.722 12.424 5.165 3.352 3.443 5.029 7.638 5.029 12.584h3.55c0-2.958-.553-5.73-1.658-8.313-1.104-2.583-2.622-4.841-4.555-6.774-1.932-1.932-4.19-3.45-6.773-4.555-2.584-1.104-5.355-1.657-8.313-1.657H15.5l4.615-4.615zm-8.08 21.659v13.861h11.357v5.008H9.609V143h12.7c.834 0 1.55-.298 2.146-.894.596-.597.895-1.31.895-2.145v-7.781c0-.835-.299-1.55-.895-2.147a2.929 2.929 0 0 0-2.147-.894h-8.227v-5.096H25.35v-4.384z\"></path>\n </symbol>\n <symbol viewBox=\"0 96 48 48\" id=\"vjs-icon-replay-10\">\n <path d=\"M42.315 125.63c0-4.997-1.694-9.235-5.08-12.713-3.388-3.479-7.571-5.218-12.552-5.218h-1.315l4.363 4.363-2.51 2.51-8.787-8.786L25.221 97l2.45 2.45-4.662 4.663h1.375c2.988 0 5.788.557 8.397 1.673 2.61 1.116 4.892 2.65 6.844 4.602 1.953 1.953 3.487 4.234 4.602 6.844 1.116 2.61 1.674 5.41 1.674 8.398zM8.183 142v-19.657H3.176V117.8h9.643V142zm13.63 0c-1.156 0-2.127-.393-2.912-1.178-.778-.778-1.168-1.746-1.168-2.902v-16.04c0-1.156.393-2.127 1.178-2.912.779-.779 1.746-1.168 2.902-1.168h7.696c1.156 0 2.126.392 2.911 1.177.779.78 1.168 1.747 1.168 2.903v16.04c0 1.156-.392 2.127-1.177 2.912-.779.779-1.746 1.168-2.902 1.168zm.556-4.636h6.583v-15.02H22.37z\"></path>\n </symbol>\n <symbol viewBox=\"0 96 48 48\" id=\"vjs-icon-replay-30\">\n <path d=\"M26.047 97l-8.733 8.732 8.733 8.733 2.496-2.494-4.336-4.338h1.307c4.95 0 9.108 1.73 12.474 5.187 3.367 3.458 5.051 7.668 5.051 12.635h3.565c0-2.97-.556-5.751-1.665-8.346-1.109-2.594-2.633-4.862-4.574-6.802-1.94-1.941-4.208-3.466-6.803-4.575-2.594-1.109-5.375-1.664-8.345-1.664H23.85l4.634-4.634zM2.555 117.531v4.688h10.297v5.25H5.873v4.687h6.979v5.156H2.555V142H13.36c1.061 0 1.95-.395 2.668-1.186.718-.79 1.076-1.772 1.076-2.94v-16.218c0-1.168-.358-2.149-1.076-2.94-.717-.79-1.607-1.185-2.668-1.185zm22.482.14c-1.149 0-2.11.39-2.885 1.165-.78.78-1.172 1.744-1.172 2.893v15.943c0 1.149.388 2.11 1.163 2.885.78.78 1.745 1.172 2.894 1.172h7.649c1.148 0 2.11-.388 2.884-1.163.78-.78 1.17-1.745 1.17-2.894v-15.943c0-1.15-.386-2.111-1.16-2.885-.78-.78-1.746-1.172-2.894-1.172zm.553 4.518h6.545v14.93H25.59z\"></path>\n </symbol>\n <symbol viewBox=\"0 96 48 48\" id=\"vjs-icon-forward-5\">\n <path d=\"M29.508 97l-2.431 2.43 4.625 4.625h-1.364c-2.965 0-5.742.554-8.332 1.66-2.589 1.107-4.851 2.629-6.788 4.566-1.937 1.937-3.458 4.2-4.565 6.788-1.107 2.59-1.66 5.367-1.66 8.331h3.557c0-4.957 1.68-9.16 5.04-12.611 3.36-3.45 7.51-5.177 12.451-5.177h1.304l-4.326 4.33 2.49 2.49 8.715-8.716zm-9.783 21.61v13.89h11.382v5.018H19.725V142h12.727a2.93 2.93 0 0 0 2.15-.896 2.93 2.93 0 0 0 .896-2.15v-7.798c0-.837-.299-1.554-.896-2.152a2.93 2.93 0 0 0-2.15-.896h-8.245V123h11.29v-4.392z\"></path>\n </symbol>\n <symbol viewBox=\"0 96 48 48\" id=\"vjs-icon-forward-10\">\n <path d=\"M23.119 97l-2.386 2.383 4.538 4.538h-1.339c-2.908 0-5.633.543-8.173 1.63-2.54 1.085-4.76 2.577-6.66 4.478-1.9 1.9-3.392 4.12-4.478 6.66-1.085 2.54-1.629 5.264-1.629 8.172h3.49c0-4.863 1.648-8.986 4.944-12.372 3.297-3.385 7.368-5.078 12.216-5.078h1.279l-4.245 4.247 2.443 2.442 8.55-8.55zm-9.52 21.45v4.42h4.871V142h4.513v-23.55zm18.136 0c-1.125 0-2.066.377-2.824 1.135-.764.764-1.148 1.709-1.148 2.834v15.612c0 1.124.38 2.066 1.139 2.824.764.764 1.708 1.145 2.833 1.145h7.489c1.125 0 2.066-.378 2.824-1.136.764-.764 1.145-1.709 1.145-2.833v-15.612c0-1.125-.378-2.067-1.136-2.825-.764-.764-1.708-1.145-2.833-1.145zm.54 4.42h6.408v14.617h-6.407z\"></path>\n </symbol>\n <symbol viewBox=\"0 96 48 48\" id=\"vjs-icon-forward-30\">\n <path d=\"M25.549 97l-2.437 2.434 4.634 4.635H26.38c-2.97 0-5.753.555-8.347 1.664-2.594 1.109-4.861 2.633-6.802 4.574-1.94 1.94-3.465 4.207-4.574 6.802-1.109 2.594-1.664 5.377-1.664 8.347h3.565c0-4.967 1.683-9.178 5.05-12.636 3.366-3.458 7.525-5.187 12.475-5.187h1.307l-4.335 4.338 2.495 2.494 8.732-8.732zm-11.553 20.53v4.689h10.297v5.249h-6.978v4.688h6.978v5.156H13.996V142h10.808c1.06 0 1.948-.395 2.666-1.186.718-.79 1.077-1.771 1.077-2.94v-16.217c0-1.169-.36-2.15-1.077-2.94-.718-.79-1.605-1.186-2.666-1.186zm21.174.168c-1.149 0-2.11.389-2.884 1.163-.78.78-1.172 1.745-1.172 2.894v15.942c0 1.15.388 2.11 1.162 2.885.78.78 1.745 1.17 2.894 1.17h7.649c1.149 0 2.11-.386 2.885-1.16.78-.78 1.17-1.746 1.17-2.895v-15.942c0-1.15-.387-2.11-1.161-2.885-.78-.78-1.745-1.172-2.894-1.172zm.552 4.516h6.542v14.931h-6.542z\"></path>\n </symbol>\n <symbol viewBox=\"0 0 512 512\" id=\"vjs-icon-audio-description\">\n <g fill-rule=\"evenodd\"><path d=\"M227.29 381.351V162.993c50.38-1.017 89.108-3.028 117.631 17.126 27.374 19.342 48.734 56.965 44.89 105.325-4.067 51.155-41.335 94.139-89.776 98.475-24.085 2.155-71.972 0-71.972 0s-.84-1.352-.773-2.568m48.755-54.804c31.43 1.26 53.208-16.633 56.495-45.386 4.403-38.51-21.188-63.552-58.041-60.796v103.612c-.036 1.466.575 2.22 1.546 2.57\"></path><path d=\"M383.78 381.328c13.336 3.71 17.387-11.06 23.215-21.408 12.722-22.571 22.294-51.594 22.445-84.774.221-47.594-18.343-82.517-35.6-106.182h-8.51c-.587 3.874 2.226 7.315 3.865 10.276 13.166 23.762 25.367 56.553 25.54 94.194.2 43.176-14.162 79.278-30.955 107.894\"></path><path d=\"M425.154 381.328c13.336 3.71 17.384-11.061 23.215-21.408 12.721-22.571 22.291-51.594 22.445-84.774.221-47.594-18.343-82.517-35.6-106.182h-8.511c-.586 3.874 2.226 7.315 3.866 10.276 13.166 23.762 25.367 56.553 25.54 94.194.2 43.176-14.162 79.278-30.955 107.894\"></path><path d=\"M466.26 381.328c13.337 3.71 17.385-11.061 23.216-21.408 12.722-22.571 22.292-51.594 22.445-84.774.221-47.594-18.343-82.517-35.6-106.182h-8.51c-.587 3.874 2.225 7.315 3.865 10.276 13.166 23.762 25.367 56.553 25.54 94.194.2 43.176-14.162 79.278-30.955 107.894M4.477 383.005H72.58l18.573-28.484 64.169-.135s.065 19.413.065 28.62h48.756V160.307h-58.816c-5.653 9.537-140.85 222.697-140.85 222.697zm152.667-145.282v71.158l-40.453-.27 40.453-70.888z\"></path></g>\n </symbol>\n <symbol viewBox=\"0 0 48 48\" id=\"vjs-icon-next-item\">\n <path d=\"M12 36l17-12-17-12v24zm20-24v24h4V12h-4z\"></path>\n </symbol>\n <symbol viewBox=\"0 0 48 48\" id=\"vjs-icon-previous-item\">\n <path d=\"M12 12h4v24h-4zm7 12l17 12V12z\"></path>\n </symbol>\n <symbol viewBox=\"0 0 48 48\" id=\"vjs-icon-shuffle\">\n <path d=\"M21.17 18.34L10.83 8 8 10.83l10.34 10.34 2.83-2.83zM29 8l4.09 4.09L8 37.17 10.83 40l25.09-25.09L40 19V8H29zm.66 18.83l-2.83 2.83 6.26 6.26L29 40h11V29l-4.09 4.09-6.25-6.26z\"></path>\n </symbol>\n <symbol viewBox=\"0 0 48 48\" id=\"vjs-icon-cast\">\n <path d=\"M42 6H6c-2.21 0-4 1.79-4 4v6h4v-6h36v28H28v4h14c2.21 0 4-1.79 4-4V10c0-2.21-1.79-4-4-4zM2 36v6h6c0-3.31-2.69-6-6-6zm0-8v4c5.52 0 10 4.48 10 10h4c0-7.73-6.27-14-14-14zm0-8v4c9.94 0 18 8.06 18 18h4c0-12.15-9.85-22-22-22z\"></path>\n </symbol>\n <symbol viewBox=\"0 0 48 48\" id=\"vjs-icon-picture-in-picture-enter\">\n <path d=\"M38 22H22v11.99h16V22zm8 16V9.96C46 7.76 44.2 6 42 6H6C3.8 6 2 7.76 2 9.96V38c0 2.2 1.8 4 4 4h36c2.2 0 4-1.8 4-4zm-4 .04H6V9.94h36v28.1z\"></path>\n </symbol>\n <symbol viewBox=\"0 0 22 18\" id=\"vjs-icon-picture-in-picture-exit\">\n <path d=\"M18 4H4v10h14V4zm4 12V1.98C22 .88 21.1 0 20 0H2C.9 0 0 .88 0 1.98V16c0 1.1.9 2 2 2h18c1.1 0 2-.9 2-2zm-2 .02H2V1.97h18v14.05z\"></path>\n <path fill=\"none\" d=\"M-1-3h24v24H-1z\"></path>\n </symbol>\n <symbol viewBox=\"0 0 1792 1792\" id=\"vjs-icon-facebook\">\n <path d=\"M1343 12v264h-157q-86 0-116 36t-30 108v189h293l-39 296h-254v759H734V905H479V609h255V391q0-186 104-288.5T1115 0q147 0 228 12z\"></path>\n </symbol>\n <symbol viewBox=\"0 0 1792 1792\" id=\"vjs-icon-linkedin\">\n <path d=\"M477 625v991H147V625h330zm21-306q1 73-50.5 122T312 490h-2q-82 0-132-49t-50-122q0-74 51.5-122.5T314 148t133 48.5T498 319zm1166 729v568h-329v-530q0-105-40.5-164.5T1168 862q-63 0-105.5 34.5T999 982q-11 30-11 81v553H659q2-399 2-647t-1-296l-1-48h329v144h-2q20-32 41-56t56.5-52 87-43.5T1285 602q171 0 275 113.5t104 332.5z\"></path>\n </symbol>\n <symbol viewBox=\"0 0 1792 1792\" id=\"vjs-icon-twitter\">\n <path d=\"M1684 408q-67 98-162 167 1 14 1 42 0 130-38 259.5T1369.5 1125 1185 1335.5t-258 146-323 54.5q-271 0-496-145 35 4 78 4 225 0 401-138-105-2-188-64.5T285 1033q33 5 61 5 43 0 85-11-112-23-185.5-111.5T172 710v-4q68 38 146 41-66-44-105-115t-39-154q0-88 44-163 121 149 294.5 238.5T884 653q-8-38-8-74 0-134 94.5-228.5T1199 256q140 0 236 102 109-21 205-78-37 115-142 178 93-10 186-50z\"></path>\n </symbol>\n <symbol viewBox=\"0 0 1792 1792\" id=\"vjs-icon-tumblr\">\n <path d=\"M1328 1329l80 237q-23 35-111 66t-177 32q-104 2-190.5-26T787 1564t-95-106-55.5-120-16.5-118V676H452V461q72-26 129-69.5t91-90 58-102 34-99T779 12q1-5 4.5-8.5T791 0h244v424h333v252h-334v518q0 30 6.5 56t22.5 52.5 49.5 41.5 81.5 14q78-2 134-29z\"></path>\n </symbol>\n <symbol viewBox=\"0 0 1792 1792\" id=\"vjs-icon-pinterest\">\n <path d=\"M1664 896q0 209-103 385.5T1281.5 1561 896 1664q-111 0-218-32 59-93 78-164 9-34 54-211 20 39 73 67.5t114 28.5q121 0 216-68.5t147-188.5 52-270q0-114-59.5-214T1180 449t-255-63q-105 0-196 29t-154.5 77-109 110.5-67 129.5T377 866q0 104 40 183t117 111q30 12 38-20 2-7 8-31t8-30q6-23-11-43-51-61-51-151 0-151 104.5-259.5T904 517q151 0 235.5 82t84.5 213q0 170-68.5 289T980 1220q-61 0-98-43.5T859 1072q8-35 26.5-93.5t30-103T927 800q0-50-27-83t-77-33q-62 0-105 57t-43 142q0 73 25 122l-99 418q-17 70-13 177-206-91-333-281T128 896q0-209 103-385.5T510.5 231 896 128t385.5 103T1561 510.5 1664 896z\"></path>\n </symbol>\n </defs>\n</svg>";/*** @file loader.js*//*** The `MediaLoader` is the `Component` that decides which playback technology to load* when a player is initialized.** @extends Component*/class MediaLoader extends Component$1 {/*** Create an instance of this class.** @param { import('../player').default } player* The `Player` that this class should attach to.** @param {Object} [options]* The key/value store of player options.** @param {Function} [ready]* The function that is run when this component is ready.*/constructor(player, options, ready) {// MediaLoader has no elementconst options_ = merge$2({createEl: false}, options);super(player, options_, ready);// If there are no sources when the player is initialized,// load the first supported playback technology.if (!options.playerOptions.sources || options.playerOptions.sources.length === 0) {for (let i = 0, j = options.playerOptions.techOrder; i < j.length; i++) {const techName = toTitleCase$1(j[i]);let tech = Tech.getTech(techName);// Support old behavior of techs being registered as components.// Remove once that deprecated behavior is removed.if (!techName) {tech = Component$1.getComponent(techName);}// Check if the browser supports this technologyif (tech && tech.isSupported()) {player.loadTech_(techName);break;}}} else {// Loop through playback technologies (e.g. HTML5) and check for support.// Then load the best source.// A few assumptions here:// All playback technologies respect preload false.player.src(options.playerOptions.sources);}}}Component$1.registerComponent('MediaLoader', MediaLoader);/*** @file clickable-component.js*//*** Component which is clickable or keyboard actionable, but is not a* native HTML button.** @extends Component*/class ClickableComponent extends Component$1 {/*** Creates an instance of this class.** @param { import('./player').default } player* The `Player` that this class should be attached to.** @param {Object} [options]* The key/value store of component options.** @param {function} [options.clickHandler]* The function to call when the button is clicked / activated** @param {string} [options.controlText]* The text to set on the button** @param {string} [options.className]* A class or space separated list of classes to add the component**/constructor(player, options) {super(player, options);if (this.options_.controlText) {this.controlText(this.options_.controlText);}this.handleMouseOver_ = e => this.handleMouseOver(e);this.handleMouseOut_ = e => this.handleMouseOut(e);this.handleClick_ = e => this.handleClick(e);this.handleKeyDown_ = e => this.handleKeyDown(e);this.emitTapEvents();this.enable();}/*** Create the `ClickableComponent`s DOM element.** @param {string} [tag=div]* The element's node type.** @param {Object} [props={}]* An object of properties that should be set on the element.** @param {Object} [attributes={}]* An object of attributes that should be set on the element.** @return {Element}* The element that gets created.*/createEl(tag = 'div', props = {}, attributes = {}) {props = Object.assign({className: this.buildCSSClass(),tabIndex: 0}, props);if (tag === 'button') {log$1.error(`Creating a ClickableComponent with an HTML element of ${tag} is not supported; use a Button instead.`);}// Add ARIA attributes for clickable element which is not a native HTML buttonattributes = Object.assign({role: 'button'}, attributes);this.tabIndex_ = props.tabIndex;const el = createEl(tag, props, attributes);if (!this.player_.options_.experimentalSvgIcons) {el.appendChild(createEl('span', {className: 'vjs-icon-placeholder'}, {'aria-hidden': true}));}this.createControlTextEl(el);return el;}dispose() {// remove controlTextEl_ on disposethis.controlTextEl_ = null;super.dispose();}/*** Create a control text element on this `ClickableComponent`** @param {Element} [el]* Parent element for the control text.** @return {Element}* The control text element that gets created.*/createControlTextEl(el) {this.controlTextEl_ = createEl('span', {className: 'vjs-control-text'}, {// let the screen reader user know that the text of the element may change'aria-live': 'polite'});if (el) {el.appendChild(this.controlTextEl_);}this.controlText(this.controlText_, el);return this.controlTextEl_;}/*** Get or set the localize text to use for the controls on the `ClickableComponent`.** @param {string} [text]* Control text for element.** @param {Element} [el=this.el()]* Element to set the title on.** @return {string}* - The control text when getting*/controlText(text, el = this.el()) {if (text === undefined) {return this.controlText_ || 'Need Text';}const localizedText = this.localize(text);/** @protected */this.controlText_ = text;textContent(this.controlTextEl_, localizedText);if (!this.nonIconControl && !this.player_.options_.noUITitleAttributes) {// Set title attribute if only an icon is shownel.setAttribute('title', localizedText);}}/*** Builds the default DOM `className`.** @return {string}* The DOM `className` for this object.*/buildCSSClass() {return `vjs-control vjs-button ${super.buildCSSClass()}`;}/*** Enable this `ClickableComponent`*/enable() {if (!this.enabled_) {this.enabled_ = true;this.removeClass('vjs-disabled');this.el_.setAttribute('aria-disabled', 'false');if (typeof this.tabIndex_ !== 'undefined') {this.el_.setAttribute('tabIndex', this.tabIndex_);}this.on(['tap', 'click'], this.handleClick_);this.on('keydown', this.handleKeyDown_);}}/*** Disable this `ClickableComponent`*/disable() {this.enabled_ = false;this.addClass('vjs-disabled');this.el_.setAttribute('aria-disabled', 'true');if (typeof this.tabIndex_ !== 'undefined') {this.el_.removeAttribute('tabIndex');}this.off('mouseover', this.handleMouseOver_);this.off('mouseout', this.handleMouseOut_);this.off(['tap', 'click'], this.handleClick_);this.off('keydown', this.handleKeyDown_);}/*** Handles language change in ClickableComponent for the player in components***/handleLanguagechange() {this.controlText(this.controlText_);}/*** Event handler that is called when a `ClickableComponent` receives a* `click` or `tap` event.** @param {Event} event* The `tap` or `click` event that caused this function to be called.** @listens tap* @listens click* @abstract*/handleClick(event) {if (this.options_.clickHandler) {this.options_.clickHandler.call(this, arguments);}}/*** Event handler that is called when a `ClickableComponent` receives a* `keydown` event.** By default, if the key is Space or Enter, it will trigger a `click` event.** @param {KeyboardEvent} event* The `keydown` event that caused this function to be called.** @listens keydown*/handleKeyDown(event) {// Support Space or Enter key operation to fire a click event. Also,// prevent the event from propagating through the DOM and triggering// Player hotkeys.if (keycode.isEventKey(event, 'Space') || keycode.isEventKey(event, 'Enter')) {event.preventDefault();event.stopPropagation();this.trigger('click');} else {// Pass keypress handling up for unsupported keyssuper.handleKeyDown(event);}}}Component$1.registerComponent('ClickableComponent', ClickableComponent);/*** @file poster-image.js*//*** A `ClickableComponent` that handles showing the poster image for the player.** @extends ClickableComponent*/class PosterImage extends ClickableComponent {/*** Create an instance of this class.** @param { import('./player').default } player* The `Player` that this class should attach to.** @param {Object} [options]* The key/value store of player options.*/constructor(player, options) {super(player, options);this.update();this.update_ = e => this.update(e);player.on('posterchange', this.update_);}/*** Clean up and dispose of the `PosterImage`.*/dispose() {this.player().off('posterchange', this.update_);super.dispose();}/*** Create the `PosterImage`s DOM element.** @return {Element}* The element that gets created.*/createEl() {// The el is an empty div to keep position in the DOM// A picture and img el will be inserted when a source is setreturn createEl('div', {className: 'vjs-poster'});}/*** Get or set the `PosterImage`'s crossOrigin option.** @param {string|null} [value]* The value to set the crossOrigin to. If an argument is* given, must be one of `'anonymous'` or `'use-credentials'`, or 'null'.** @return {string|null}* - The current crossOrigin value of the `Player` when getting.* - undefined when setting*/crossOrigin(value) {// `null` can be set to unset a valueif (typeof value === 'undefined') {if (this.$('img')) {// If the poster's element exists, give its valuereturn this.$('img').crossOrigin;} else if (this.player_.tech_ && this.player_.tech_.isReady_) {// If not but the tech is ready, query the techreturn this.player_.crossOrigin();}// Otherwise check options as the poster is usually set before the state of crossorigin// can be retrieved by the getterreturn this.player_.options_.crossOrigin || this.player_.options_.crossorigin || null;}if (value !== null && value !== 'anonymous' && value !== 'use-credentials') {this.player_.log.warn(`crossOrigin must be null, "anonymous" or "use-credentials", given "${value}"`);return;}if (this.$('img')) {this.$('img').crossOrigin = value;}return;}/*** An {@link EventTarget~EventListener} for {@link Player#posterchange} events.** @listens Player#posterchange** @param {Event} [event]* The `Player#posterchange` event that triggered this function.*/update(event) {const url = this.player().poster();this.setSrc(url);// If there's no poster source we should display:none on this component// so it's not still clickable or right-clickableif (url) {this.show();} else {this.hide();}}/*** Set the source of the `PosterImage` depending on the display method. (Re)creates* the inner picture and img elementss when needed.** @param {string} [url]* The URL to the source for the `PosterImage`. If not specified or falsy,* any source and ant inner picture/img are removed.*/setSrc(url) {if (!url) {this.el_.textContent = '';return;}if (!this.$('img')) {this.el_.appendChild(createEl('picture', {className: 'vjs-poster',// Don't want poster to be tabbable.tabIndex: -1}, {}, createEl('img', {loading: 'lazy',crossOrigin: this.crossOrigin()}, {alt: ''})));}this.$('img').src = url;}/*** An {@link EventTarget~EventListener} for clicks on the `PosterImage`. See* {@link ClickableComponent#handleClick} for instances where this will be triggered.** @listens tap* @listens click* @listens keydown** @param {Event} event+ The `click`, `tap` or `keydown` event that caused this function to be called.*/handleClick(event) {// We don't want a click to trigger playback when controls are disabledif (!this.player_.controls()) {return;}if (this.player_.tech(true)) {this.player_.tech(true).focus();}if (this.player_.paused()) {silencePromise(this.player_.play());} else {this.player_.pause();}}}/*** Get or set the `PosterImage`'s crossorigin option. For the HTML5 player, this* sets the `crossOrigin` property on the `<img>` tag to control the CORS* behavior.** @param {string|null} [value]* The value to set the `PosterImages`'s crossorigin to. If an argument is* given, must be one of `anonymous` or `use-credentials`.** @return {string|null|undefined}* - The current crossorigin value of the `Player` when getting.* - undefined when setting*/PosterImage.prototype.crossorigin = PosterImage.prototype.crossOrigin;Component$1.registerComponent('PosterImage', PosterImage);/*** @file text-track-display.js*/const darkGray = '#222';const lightGray = '#ccc';const fontMap = {monospace: 'monospace',sansSerif: 'sans-serif',serif: 'serif',monospaceSansSerif: '"Andale Mono", "Lucida Console", monospace',monospaceSerif: '"Courier New", monospace',proportionalSansSerif: 'sans-serif',proportionalSerif: 'serif',casual: '"Comic Sans MS", Impact, fantasy',script: '"Monotype Corsiva", cursive',smallcaps: '"Andale Mono", "Lucida Console", monospace, sans-serif'};/*** Construct an rgba color from a given hex color code.** @param {number} color* Hex number for color, like #f0e or #f604e2.** @param {number} opacity* Value for opacity, 0.0 - 1.0.** @return {string}* The rgba color that was created, like 'rgba(255, 0, 0, 0.3)'.*/function constructColor(color, opacity) {let hex;if (color.length === 4) {// color looks like "#f0e"hex = color[1] + color[1] + color[2] + color[2] + color[3] + color[3];} else if (color.length === 7) {// color looks like "#f604e2"hex = color.slice(1);} else {throw new Error('Invalid color code provided, ' + color + '; must be formatted as e.g. #f0e or #f604e2.');}return 'rgba(' + parseInt(hex.slice(0, 2), 16) + ',' + parseInt(hex.slice(2, 4), 16) + ',' + parseInt(hex.slice(4, 6), 16) + ',' + opacity + ')';}/*** Try to update the style of a DOM element. Some style changes will throw an error,* particularly in IE8. Those should be noops.** @param {Element} el* The DOM element to be styled.** @param {string} style* The CSS property on the element that should be styled.** @param {string} rule* The style rule that should be applied to the property.** @private*/function tryUpdateStyle(el, style, rule) {try {el.style[style] = rule;} catch (e) {// Satisfies linter.return;}}/*** Converts the CSS top/right/bottom/left property numeric value to string in pixels.** @param {number} position* The CSS top/right/bottom/left property value.** @return {string}* The CSS property value that was created, like '10px'.** @private*/function getCSSPositionValue(position) {return position ? `${position}px` : '';}/*** The component for displaying text track cues.** @extends Component*/class TextTrackDisplay extends Component$1 {/*** Creates an instance of this class.** @param { import('../player').default } player* The `Player` that this class should be attached to.** @param {Object} [options]* The key/value store of player options.** @param {Function} [ready]* The function to call when `TextTrackDisplay` is ready.*/constructor(player, options, ready) {super(player, options, ready);const updateDisplayTextHandler = e => this.updateDisplay(e);const updateDisplayHandler = e => {this.updateDisplayOverlay();this.updateDisplay(e);};player.on('loadstart', e => this.toggleDisplay(e));player.on('texttrackchange', updateDisplayTextHandler);player.on('loadedmetadata', e => {this.updateDisplayOverlay();this.preselectTrack(e);});// This used to be called during player init, but was causing an error// if a track should show by default and the display hadn't loaded yet.// Should probably be moved to an external track loader when we support// tracks that don't need a display.player.ready(bind_(this, function () {if (player.tech_ && player.tech_.featuresNativeTextTracks) {this.hide();return;}player.on('fullscreenchange', updateDisplayHandler);player.on('playerresize', updateDisplayHandler);const screenOrientation = window.screen.orientation || window;const changeOrientationEvent = window.screen.orientation ? 'change' : 'orientationchange';screenOrientation.addEventListener(changeOrientationEvent, updateDisplayHandler);player.on('dispose', () => screenOrientation.removeEventListener(changeOrientationEvent, updateDisplayHandler));const tracks = this.options_.playerOptions.tracks || [];for (let i = 0; i < tracks.length; i++) {this.player_.addRemoteTextTrack(tracks[i], true);}this.preselectTrack();}));}/*** Preselect a track following this precedence:* - matches the previously selected {@link TextTrack}'s language and kind* - matches the previously selected {@link TextTrack}'s language only* - is the first default captions track* - is the first default descriptions track** @listens Player#loadstart*/preselectTrack() {const modes = {captions: 1,subtitles: 1};const trackList = this.player_.textTracks();const userPref = this.player_.cache_.selectedLanguage;let firstDesc;let firstCaptions;let preferredTrack;for (let i = 0; i < trackList.length; i++) {const track = trackList[i];if (userPref && userPref.enabled && userPref.language && userPref.language === track.language && track.kind in modes) {// Always choose the track that matches both language and kindif (track.kind === userPref.kind) {preferredTrack = track;// or choose the first track that matches language} else if (!preferredTrack) {preferredTrack = track;}// clear everything if offTextTrackMenuItem was clicked} else if (userPref && !userPref.enabled) {preferredTrack = null;firstDesc = null;firstCaptions = null;} else if (track.default) {if (track.kind === 'descriptions' && !firstDesc) {firstDesc = track;} else if (track.kind in modes && !firstCaptions) {firstCaptions = track;}}}// The preferredTrack matches the user preference and takes// precedence over all the other tracks.// So, display the preferredTrack before the first default track// and the subtitles/captions track before the descriptions trackif (preferredTrack) {preferredTrack.mode = 'showing';} else if (firstCaptions) {firstCaptions.mode = 'showing';} else if (firstDesc) {firstDesc.mode = 'showing';}}/*** Turn display of {@link TextTrack}'s from the current state into the other state.* There are only two states:* - 'shown'* - 'hidden'** @listens Player#loadstart*/toggleDisplay() {if (this.player_.tech_ && this.player_.tech_.featuresNativeTextTracks) {this.hide();} else {this.show();}}/*** Create the {@link Component}'s DOM element.** @return {Element}* The element that was created.*/createEl() {return super.createEl('div', {className: 'vjs-text-track-display'}, {'translate': 'yes','aria-live': 'off','aria-atomic': 'true'});}/*** Clear all displayed {@link TextTrack}s.*/clearDisplay() {if (typeof window.WebVTT === 'function') {window.WebVTT.processCues(window, [], this.el_);}}/*** Update the displayed TextTrack when a either a {@link Player#texttrackchange} or* a {@link Player#fullscreenchange} is fired.** @listens Player#texttrackchange* @listens Player#fullscreenchange*/updateDisplay() {const tracks = this.player_.textTracks();const allowMultipleShowingTracks = this.options_.allowMultipleShowingTracks;this.clearDisplay();if (allowMultipleShowingTracks) {const showingTracks = [];for (let i = 0; i < tracks.length; ++i) {const track = tracks[i];if (track.mode !== 'showing') {continue;}showingTracks.push(track);}this.updateForTrack(showingTracks);return;}// Track display prioritization model: if multiple tracks are 'showing',// display the first 'subtitles' or 'captions' track which is 'showing',// otherwise display the first 'descriptions' track which is 'showing'let descriptionsTrack = null;let captionsSubtitlesTrack = null;let i = tracks.length;while (i--) {const track = tracks[i];if (track.mode === 'showing') {if (track.kind === 'descriptions') {descriptionsTrack = track;} else {captionsSubtitlesTrack = track;}}}if (captionsSubtitlesTrack) {if (this.getAttribute('aria-live') !== 'off') {this.setAttribute('aria-live', 'off');}this.updateForTrack(captionsSubtitlesTrack);} else if (descriptionsTrack) {if (this.getAttribute('aria-live') !== 'assertive') {this.setAttribute('aria-live', 'assertive');}this.updateForTrack(descriptionsTrack);}}/*** Updates the displayed TextTrack to be sure it overlays the video when a either* a {@link Player#texttrackchange} or a {@link Player#fullscreenchange} is fired.*/updateDisplayOverlay() {// inset-inline and inset-block are not supprted on old chrome, but these are// only likely to be used on TV devicesif (!this.player_.videoHeight() || !window.CSS.supports('inset-inline: 10px')) {return;}const playerWidth = this.player_.currentWidth();const playerHeight = this.player_.currentHeight();const playerAspectRatio = playerWidth / playerHeight;const videoAspectRatio = this.player_.videoWidth() / this.player_.videoHeight();let insetInlineMatch = 0;let insetBlockMatch = 0;if (Math.abs(playerAspectRatio - videoAspectRatio) > 0.1) {if (playerAspectRatio > videoAspectRatio) {insetInlineMatch = Math.round((playerWidth - playerHeight * videoAspectRatio) / 2);} else {insetBlockMatch = Math.round((playerHeight - playerWidth / videoAspectRatio) / 2);}}tryUpdateStyle(this.el_, 'insetInline', getCSSPositionValue(insetInlineMatch));tryUpdateStyle(this.el_, 'insetBlock', getCSSPositionValue(insetBlockMatch));}/*** Style {@Link TextTrack} activeCues according to {@Link TextTrackSettings}.** @param {TextTrack} track* Text track object containing active cues to style.*/updateDisplayState(track) {const overrides = this.player_.textTrackSettings.getValues();const cues = track.activeCues;let i = cues.length;while (i--) {const cue = cues[i];if (!cue) {continue;}const cueDiv = cue.displayState;if (overrides.color) {cueDiv.firstChild.style.color = overrides.color;}if (overrides.textOpacity) {tryUpdateStyle(cueDiv.firstChild, 'color', constructColor(overrides.color || '#fff', overrides.textOpacity));}if (overrides.backgroundColor) {cueDiv.firstChild.style.backgroundColor = overrides.backgroundColor;}if (overrides.backgroundOpacity) {tryUpdateStyle(cueDiv.firstChild, 'backgroundColor', constructColor(overrides.backgroundColor || '#000', overrides.backgroundOpacity));}if (overrides.windowColor) {if (overrides.windowOpacity) {tryUpdateStyle(cueDiv, 'backgroundColor', constructColor(overrides.windowColor, overrides.windowOpacity));} else {cueDiv.style.backgroundColor = overrides.windowColor;}}if (overrides.edgeStyle) {if (overrides.edgeStyle === 'dropshadow') {cueDiv.firstChild.style.textShadow = `2px 2px 3px ${darkGray}, 2px 2px 4px ${darkGray}, 2px 2px 5px ${darkGray}`;} else if (overrides.edgeStyle === 'raised') {cueDiv.firstChild.style.textShadow = `1px 1px ${darkGray}, 2px 2px ${darkGray}, 3px 3px ${darkGray}`;} else if (overrides.edgeStyle === 'depressed') {cueDiv.firstChild.style.textShadow = `1px 1px ${lightGray}, 0 1px ${lightGray}, -1px -1px ${darkGray}, 0 -1px ${darkGray}`;} else if (overrides.edgeStyle === 'uniform') {cueDiv.firstChild.style.textShadow = `0 0 4px ${darkGray}, 0 0 4px ${darkGray}, 0 0 4px ${darkGray}, 0 0 4px ${darkGray}`;}}if (overrides.fontPercent && overrides.fontPercent !== 1) {const fontSize = window.parseFloat(cueDiv.style.fontSize);cueDiv.style.fontSize = fontSize * overrides.fontPercent + 'px';cueDiv.style.height = 'auto';cueDiv.style.top = 'auto';}if (overrides.fontFamily && overrides.fontFamily !== 'default') {if (overrides.fontFamily === 'small-caps') {cueDiv.firstChild.style.fontVariant = 'small-caps';} else {cueDiv.firstChild.style.fontFamily = fontMap[overrides.fontFamily];}}}}/*** Add an {@link TextTrack} to to the {@link Tech}s {@link TextTrackList}.** @param {TextTrack|TextTrack[]} tracks* Text track object or text track array to be added to the list.*/updateForTrack(tracks) {if (!Array.isArray(tracks)) {tracks = [tracks];}if (typeof window.WebVTT !== 'function' || tracks.every(track => {return !track.activeCues;})) {return;}const cues = [];// push all active track cuesfor (let i = 0; i < tracks.length; ++i) {const track = tracks[i];for (let j = 0; j < track.activeCues.length; ++j) {cues.push(track.activeCues[j]);}}// removes all cues before it processes new oneswindow.WebVTT.processCues(window, cues, this.el_);// add unique class to each language text track & add settings styling if necessaryfor (let i = 0; i < tracks.length; ++i) {const track = tracks[i];for (let j = 0; j < track.activeCues.length; ++j) {const cueEl = track.activeCues[j].displayState;addClass(cueEl, 'vjs-text-track-cue', 'vjs-text-track-cue-' + (track.language ? track.language : i));if (track.language) {setAttribute(cueEl, 'lang', track.language);}}if (this.player_.textTrackSettings) {this.updateDisplayState(track);}}}}Component$1.registerComponent('TextTrackDisplay', TextTrackDisplay);/*** @file loading-spinner.js*//*** A loading spinner for use during waiting/loading events.** @extends Component*/class LoadingSpinner extends Component$1 {/*** Create the `LoadingSpinner`s DOM element.** @return {Element}* The dom element that gets created.*/createEl() {const isAudio = this.player_.isAudio();const playerType = this.localize(isAudio ? 'Audio Player' : 'Video Player');const controlText = createEl('span', {className: 'vjs-control-text',textContent: this.localize('{1} is loading.', [playerType])});const el = super.createEl('div', {className: 'vjs-loading-spinner',dir: 'ltr'});el.appendChild(controlText);return el;}/*** Update control text on languagechange*/handleLanguagechange() {this.$('.vjs-control-text').textContent = this.localize('{1} is loading.', [this.player_.isAudio() ? 'Audio Player' : 'Video Player']);}}Component$1.registerComponent('LoadingSpinner', LoadingSpinner);/*** @file button.js*//*** Base class for all buttons.** @extends ClickableComponent*/class Button extends ClickableComponent {/*** Create the `Button`s DOM element.** @param {string} [tag="button"]* The element's node type. This argument is IGNORED: no matter what* is passed, it will always create a `button` element.** @param {Object} [props={}]* An object of properties that should be set on the element.** @param {Object} [attributes={}]* An object of attributes that should be set on the element.** @return {Element}* The element that gets created.*/createEl(tag, props = {}, attributes = {}) {tag = 'button';props = Object.assign({className: this.buildCSSClass()}, props);// Add attributes for button elementattributes = Object.assign({// Necessary since the default button type is "submit"type: 'button'}, attributes);const el = createEl(tag, props, attributes);if (!this.player_.options_.experimentalSvgIcons) {el.appendChild(createEl('span', {className: 'vjs-icon-placeholder'}, {'aria-hidden': true}));}this.createControlTextEl(el);return el;}/*** Add a child `Component` inside of this `Button`.** @param {string|Component} child* The name or instance of a child to add.** @param {Object} [options={}]* The key/value store of options that will get passed to children of* the child.** @return {Component}* The `Component` that gets added as a child. When using a string the* `Component` will get created by this process.** @deprecated since version 5*/addChild(child, options = {}) {const className = this.constructor.name;log$1.warn(`Adding an actionable (user controllable) child to a Button (${className}) is not supported; use a ClickableComponent instead.`);// Avoid the error message generated by ClickableComponent's addChild methodreturn Component$1.prototype.addChild.call(this, child, options);}/*** Enable the `Button` element so that it can be activated or clicked. Use this with* {@link Button#disable}.*/enable() {super.enable();this.el_.removeAttribute('disabled');}/*** Disable the `Button` element so that it cannot be activated or clicked. Use this with* {@link Button#enable}.*/disable() {super.disable();this.el_.setAttribute('disabled', 'disabled');}/*** This gets called when a `Button` has focus and `keydown` is triggered via a key* press.** @param {KeyboardEvent} event* The event that caused this function to get called.** @listens keydown*/handleKeyDown(event) {// Ignore Space or Enter key operation, which is handled by the browser for// a button - though not for its super class, ClickableComponent. Also,// prevent the event from propagating through the DOM and triggering Player// hotkeys. We do not preventDefault here because we _want_ the browser to// handle it.if (keycode.isEventKey(event, 'Space') || keycode.isEventKey(event, 'Enter')) {event.stopPropagation();return;}// Pass keypress handling up for unsupported keyssuper.handleKeyDown(event);}}Component$1.registerComponent('Button', Button);/*** @file big-play-button.js*//*** The initial play button that shows before the video has played. The hiding of the* `BigPlayButton` get done via CSS and `Player` states.** @extends Button*/class BigPlayButton extends Button {constructor(player, options) {super(player, options);this.mouseused_ = false;this.setIcon('play');this.on('mousedown', e => this.handleMouseDown(e));}/*** Builds the default DOM `className`.** @return {string}* The DOM `className` for this object. Always returns 'vjs-big-play-button'.*/buildCSSClass() {return 'vjs-big-play-button';}/*** This gets called when a `BigPlayButton` "clicked". See {@link ClickableComponent}* for more detailed information on what a click can be.** @param {KeyboardEvent|MouseEvent|TouchEvent} event* The `keydown`, `tap`, or `click` event that caused this function to be* called.** @listens tap* @listens click*/handleClick(event) {const playPromise = this.player_.play();// exit early if clicked via the mouseif (this.mouseused_ && 'clientX' in event && 'clientY' in event) {silencePromise(playPromise);if (this.player_.tech(true)) {this.player_.tech(true).focus();}return;}const cb = this.player_.getChild('controlBar');const playToggle = cb && cb.getChild('playToggle');if (!playToggle) {this.player_.tech(true).focus();return;}const playFocus = () => playToggle.focus();if (isPromise(playPromise)) {playPromise.then(playFocus, () => {});} else {this.setTimeout(playFocus, 1);}}/*** Event handler that is called when a `BigPlayButton` receives a* `keydown` event.** @param {KeyboardEvent} event* The `keydown` event that caused this function to be called.** @listens keydown*/handleKeyDown(event) {this.mouseused_ = false;super.handleKeyDown(event);}/*** Handle `mousedown` events on the `BigPlayButton`.** @param {MouseEvent} event* `mousedown` or `touchstart` event that triggered this function** @listens mousedown*/handleMouseDown(event) {this.mouseused_ = true;}}/*** The text that should display over the `BigPlayButton`s controls. Added to for localization.** @type {string}* @protected*/BigPlayButton.prototype.controlText_ = 'Play Video';Component$1.registerComponent('BigPlayButton', BigPlayButton);/*** @file close-button.js*//*** The `CloseButton` is a `{@link Button}` that fires a `close` event when* it gets clicked.** @extends Button*/class CloseButton extends Button {/*** Creates an instance of the this class.** @param { import('./player').default } player* The `Player` that this class should be attached to.** @param {Object} [options]* The key/value store of player options.*/constructor(player, options) {super(player, options);this.setIcon('cancel');this.controlText(options && options.controlText || this.localize('Close'));}/*** Builds the default DOM `className`.** @return {string}* The DOM `className` for this object.*/buildCSSClass() {return `vjs-close-button ${super.buildCSSClass()}`;}/*** This gets called when a `CloseButton` gets clicked. See* {@link ClickableComponent#handleClick} for more information on when* this will be triggered** @param {Event} event* The `keydown`, `tap`, or `click` event that caused this function to be* called.** @listens tap* @listens click* @fires CloseButton#close*/handleClick(event) {/*** Triggered when the a `CloseButton` is clicked.** @event CloseButton#close* @type {Event}** @property {boolean} [bubbles=false]* set to false so that the close event does not* bubble up to parents if there is no listener*/this.trigger({type: 'close',bubbles: false});}/*** Event handler that is called when a `CloseButton` receives a* `keydown` event.** By default, if the key is Esc, it will trigger a `click` event.** @param {KeyboardEvent} event* The `keydown` event that caused this function to be called.** @listens keydown*/handleKeyDown(event) {// Esc button will trigger `click` eventif (keycode.isEventKey(event, 'Esc')) {event.preventDefault();event.stopPropagation();this.trigger('click');} else {// Pass keypress handling up for unsupported keyssuper.handleKeyDown(event);}}}Component$1.registerComponent('CloseButton', CloseButton);/*** @file play-toggle.js*//*** Button to toggle between play and pause.** @extends Button*/class PlayToggle extends Button {/*** Creates an instance of this class.** @param { import('./player').default } player* The `Player` that this class should be attached to.** @param {Object} [options={}]* The key/value store of player options.*/constructor(player, options = {}) {super(player, options);// show or hide replay iconoptions.replay = options.replay === undefined || options.replay;this.setIcon('play');this.on(player, 'play', e => this.handlePlay(e));this.on(player, 'pause', e => this.handlePause(e));if (options.replay) {this.on(player, 'ended', e => this.handleEnded(e));}}/*** Builds the default DOM `className`.** @return {string}* The DOM `className` for this object.*/buildCSSClass() {return `vjs-play-control ${super.buildCSSClass()}`;}/*** This gets called when an `PlayToggle` is "clicked". See* {@link ClickableComponent} for more detailed information on what a click can be.** @param {Event} [event]* The `keydown`, `tap`, or `click` event that caused this function to be* called.** @listens tap* @listens click*/handleClick(event) {if (this.player_.paused()) {silencePromise(this.player_.play());} else {this.player_.pause();}}/*** This gets called once after the video has ended and the user seeks so that* we can change the replay button back to a play button.** @param {Event} [event]* The event that caused this function to run.** @listens Player#seeked*/handleSeeked(event) {this.removeClass('vjs-ended');if (this.player_.paused()) {this.handlePause(event);} else {this.handlePlay(event);}}/*** Add the vjs-playing class to the element so it can change appearance.** @param {Event} [event]* The event that caused this function to run.** @listens Player#play*/handlePlay(event) {this.removeClass('vjs-ended', 'vjs-paused');this.addClass('vjs-playing');// change the button text to "Pause"this.setIcon('pause');this.controlText('Pause');}/*** Add the vjs-paused class to the element so it can change appearance.** @param {Event} [event]* The event that caused this function to run.** @listens Player#pause*/handlePause(event) {this.removeClass('vjs-playing');this.addClass('vjs-paused');// change the button text to "Play"this.setIcon('play');this.controlText('Play');}/*** Add the vjs-ended class to the element so it can change appearance** @param {Event} [event]* The event that caused this function to run.** @listens Player#ended*/handleEnded(event) {this.removeClass('vjs-playing');this.addClass('vjs-ended');// change the button text to "Replay"this.setIcon('replay');this.controlText('Replay');// on the next seek remove the replay buttonthis.one(this.player_, 'seeked', e => this.handleSeeked(e));}}/*** The text that should display over the `PlayToggle`s controls. Added for localization.** @type {string}* @protected*/PlayToggle.prototype.controlText_ = 'Play';Component$1.registerComponent('PlayToggle', PlayToggle);/*** @file time-display.js*//*** Displays time information about the video** @extends Component*/class TimeDisplay extends Component$1 {/*** Creates an instance of this class.** @param { import('../../player').default } player* The `Player` that this class should be attached to.** @param {Object} [options]* The key/value store of player options.*/constructor(player, options) {super(player, options);this.on(player, ['timeupdate', 'ended', 'seeking'], e => this.update(e));this.updateTextNode_();}/*** Create the `Component`'s DOM element** @return {Element}* The element that was created.*/createEl() {const className = this.buildCSSClass();const el = super.createEl('div', {className: `${className} vjs-time-control vjs-control`});const span = createEl('span', {className: 'vjs-control-text',textContent: `${this.localize(this.labelText_)}\u00a0`}, {role: 'presentation'});el.appendChild(span);this.contentEl_ = createEl('span', {className: `${className}-display`}, {// span elements have no implicit role, but some screen readers (notably VoiceOver)// treat them as a break between items in the DOM when using arrow keys// (or left-to-right swipes on iOS) to read contents of a page. Using// role='presentation' causes VoiceOver to NOT treat this span as a break.role: 'presentation'});el.appendChild(this.contentEl_);return el;}dispose() {this.contentEl_ = null;this.textNode_ = null;super.dispose();}/*** Updates the displayed time according to the `updateContent` function which is defined in the child class.** @param {Event} [event]* The `timeupdate`, `ended` or `seeking` (if enableSmoothSeeking is true) event that caused this function to be called.*/update(event) {if (!this.player_.options_.enableSmoothSeeking && event.type === 'seeking') {return;}this.updateContent(event);}/*** Updates the time display text node with a new time** @param {number} [time=0] the time to update to** @private*/updateTextNode_(time = 0) {time = formatTime(time);if (this.formattedTime_ === time) {return;}this.formattedTime_ = time;this.requestNamedAnimationFrame('TimeDisplay#updateTextNode_', () => {if (!this.contentEl_) {return;}let oldNode = this.textNode_;if (oldNode && this.contentEl_.firstChild !== oldNode) {oldNode = null;log$1.warn('TimeDisplay#updateTextnode_: Prevented replacement of text node element since it was no longer a child of this node. Appending a new node instead.');}this.textNode_ = document.createTextNode(this.formattedTime_);if (!this.textNode_) {return;}if (oldNode) {this.contentEl_.replaceChild(this.textNode_, oldNode);} else {this.contentEl_.appendChild(this.textNode_);}});}/*** To be filled out in the child class, should update the displayed time* in accordance with the fact that the current time has changed.** @param {Event} [event]* The `timeupdate` event that caused this to run.** @listens Player#timeupdate*/updateContent(event) {}}/*** The text that is added to the `TimeDisplay` for screen reader users.** @type {string}* @private*/TimeDisplay.prototype.labelText_ = 'Time';/*** The text that should display over the `TimeDisplay`s controls. Added to for localization.** @type {string}* @protected** @deprecated in v7; controlText_ is not used in non-active display Components*/TimeDisplay.prototype.controlText_ = 'Time';Component$1.registerComponent('TimeDisplay', TimeDisplay);/*** @file current-time-display.js*//*** Displays the current time** @extends Component*/class CurrentTimeDisplay extends TimeDisplay {/*** Builds the default DOM `className`.** @return {string}* The DOM `className` for this object.*/buildCSSClass() {return 'vjs-current-time';}/*** Update current time display** @param {Event} [event]* The `timeupdate` event that caused this function to run.** @listens Player#timeupdate*/updateContent(event) {// Allows for smooth scrubbing, when player can't keep up.let time;if (this.player_.ended()) {time = this.player_.duration();} else {time = this.player_.scrubbing() ? this.player_.getCache().currentTime : this.player_.currentTime();}this.updateTextNode_(time);}}/*** The text that is added to the `CurrentTimeDisplay` for screen reader users.** @type {string}* @private*/CurrentTimeDisplay.prototype.labelText_ = 'Current Time';/*** The text that should display over the `CurrentTimeDisplay`s controls. Added to for localization.** @type {string}* @protected** @deprecated in v7; controlText_ is not used in non-active display Components*/CurrentTimeDisplay.prototype.controlText_ = 'Current Time';Component$1.registerComponent('CurrentTimeDisplay', CurrentTimeDisplay);/*** @file duration-display.js*//*** Displays the duration** @extends Component*/class DurationDisplay extends TimeDisplay {/*** Creates an instance of this class.** @param { import('../../player').default } player* The `Player` that this class should be attached to.** @param {Object} [options]* The key/value store of player options.*/constructor(player, options) {super(player, options);const updateContent = e => this.updateContent(e);// we do not want to/need to throttle duration changes,// as they should always display the changed duration as// it has changedthis.on(player, 'durationchange', updateContent);// Listen to loadstart because the player duration is reset when a new media element is loaded,// but the durationchange on the user agent will not fire.// @see [Spec]{@link https://www.w3.org/TR/2011/WD-html5-20110113/video.html#media-element-load-algorithm}this.on(player, 'loadstart', updateContent);// Also listen for timeupdate (in the parent) and loadedmetadata because removing those// listeners could have broken dependent applications/libraries. These// can likely be removed for 7.0.this.on(player, 'loadedmetadata', updateContent);}/*** Builds the default DOM `className`.** @return {string}* The DOM `className` for this object.*/buildCSSClass() {return 'vjs-duration';}/*** Update duration time display.** @param {Event} [event]* The `durationchange`, `timeupdate`, or `loadedmetadata` event that caused* this function to be called.** @listens Player#durationchange* @listens Player#timeupdate* @listens Player#loadedmetadata*/updateContent(event) {const duration = this.player_.duration();this.updateTextNode_(duration);}}/*** The text that is added to the `DurationDisplay` for screen reader users.** @type {string}* @private*/DurationDisplay.prototype.labelText_ = 'Duration';/*** The text that should display over the `DurationDisplay`s controls. Added to for localization.** @type {string}* @protected** @deprecated in v7; controlText_ is not used in non-active display Components*/DurationDisplay.prototype.controlText_ = 'Duration';Component$1.registerComponent('DurationDisplay', DurationDisplay);/*** @file time-divider.js*//*** The separator between the current time and duration.* Can be hidden if it's not needed in the design.** @extends Component*/class TimeDivider extends Component$1 {/*** Create the component's DOM element** @return {Element}* The element that was created.*/createEl() {const el = super.createEl('div', {className: 'vjs-time-control vjs-time-divider'}, {// this element and its contents can be hidden from assistive techs since// it is made extraneous by the announcement of the control text// for the current time and duration displays'aria-hidden': true});const div = super.createEl('div');const span = super.createEl('span', {textContent: '/'});div.appendChild(span);el.appendChild(div);return el;}}Component$1.registerComponent('TimeDivider', TimeDivider);/*** @file remaining-time-display.js*//*** Displays the time left in the video** @extends Component*/class RemainingTimeDisplay extends TimeDisplay {/*** Creates an instance of this class.** @param { import('../../player').default } player* The `Player` that this class should be attached to.** @param {Object} [options]* The key/value store of player options.*/constructor(player, options) {super(player, options);this.on(player, 'durationchange', e => this.updateContent(e));}/*** Builds the default DOM `className`.** @return {string}* The DOM `className` for this object.*/buildCSSClass() {return 'vjs-remaining-time';}/*** Create the `Component`'s DOM element with the "minus" character prepend to the time** @return {Element}* The element that was created.*/createEl() {const el = super.createEl();if (this.options_.displayNegative !== false) {el.insertBefore(createEl('span', {}, {'aria-hidden': true}, '-'), this.contentEl_);}return el;}/*** Update remaining time display.** @param {Event} [event]* The `timeupdate` or `durationchange` event that caused this to run.** @listens Player#timeupdate* @listens Player#durationchange*/updateContent(event) {if (typeof this.player_.duration() !== 'number') {return;}let time;// @deprecated We should only use remainingTimeDisplay// as of video.js 7if (this.player_.ended()) {time = 0;} else if (this.player_.remainingTimeDisplay) {time = this.player_.remainingTimeDisplay();} else {time = this.player_.remainingTime();}this.updateTextNode_(time);}}/*** The text that is added to the `RemainingTimeDisplay` for screen reader users.** @type {string}* @private*/RemainingTimeDisplay.prototype.labelText_ = 'Remaining Time';/*** The text that should display over the `RemainingTimeDisplay`s controls. Added to for localization.** @type {string}* @protected** @deprecated in v7; controlText_ is not used in non-active display Components*/RemainingTimeDisplay.prototype.controlText_ = 'Remaining Time';Component$1.registerComponent('RemainingTimeDisplay', RemainingTimeDisplay);/*** @file live-display.js*/// TODO - Future make it click to snap to live/*** Displays the live indicator when duration is Infinity.** @extends Component*/class LiveDisplay extends Component$1 {/*** Creates an instance of this class.** @param { import('./player').default } player* The `Player` that this class should be attached to.** @param {Object} [options]* The key/value store of player options.*/constructor(player, options) {super(player, options);this.updateShowing();this.on(this.player(), 'durationchange', e => this.updateShowing(e));}/*** Create the `Component`'s DOM element** @return {Element}* The element that was created.*/createEl() {const el = super.createEl('div', {className: 'vjs-live-control vjs-control'});this.contentEl_ = createEl('div', {className: 'vjs-live-display'}, {'aria-live': 'off'});this.contentEl_.appendChild(createEl('span', {className: 'vjs-control-text',textContent: `${this.localize('Stream Type')}\u00a0`}));this.contentEl_.appendChild(document.createTextNode(this.localize('LIVE')));el.appendChild(this.contentEl_);return el;}dispose() {this.contentEl_ = null;super.dispose();}/*** Check the duration to see if the LiveDisplay should be showing or not. Then show/hide* it accordingly** @param {Event} [event]* The {@link Player#durationchange} event that caused this function to run.** @listens Player#durationchange*/updateShowing(event) {if (this.player().duration() === Infinity) {this.show();} else {this.hide();}}}Component$1.registerComponent('LiveDisplay', LiveDisplay);/*** @file seek-to-live.js*//*** Displays the live indicator when duration is Infinity.** @extends Component*/class SeekToLive extends Button {/*** Creates an instance of this class.** @param { import('./player').default } player* The `Player` that this class should be attached to.** @param {Object} [options]* The key/value store of player options.*/constructor(player, options) {super(player, options);this.updateLiveEdgeStatus();if (this.player_.liveTracker) {this.updateLiveEdgeStatusHandler_ = e => this.updateLiveEdgeStatus(e);this.on(this.player_.liveTracker, 'liveedgechange', this.updateLiveEdgeStatusHandler_);}}/*** Create the `Component`'s DOM element** @return {Element}* The element that was created.*/createEl() {const el = super.createEl('button', {className: 'vjs-seek-to-live-control vjs-control'});this.setIcon('circle', el);this.textEl_ = createEl('span', {className: 'vjs-seek-to-live-text',textContent: this.localize('LIVE')}, {'aria-hidden': 'true'});el.appendChild(this.textEl_);return el;}/*** Update the state of this button if we are at the live edge* or not*/updateLiveEdgeStatus() {// default to live edgeif (!this.player_.liveTracker || this.player_.liveTracker.atLiveEdge()) {this.setAttribute('aria-disabled', true);this.addClass('vjs-at-live-edge');this.controlText('Seek to live, currently playing live');} else {this.setAttribute('aria-disabled', false);this.removeClass('vjs-at-live-edge');this.controlText('Seek to live, currently behind live');}}/*** On click bring us as near to the live point as possible.* This requires that we wait for the next `live-seekable-change`* event which will happen every segment length seconds.*/handleClick() {this.player_.liveTracker.seekToLiveEdge();}/*** Dispose of the element and stop tracking*/dispose() {if (this.player_.liveTracker) {this.off(this.player_.liveTracker, 'liveedgechange', this.updateLiveEdgeStatusHandler_);}this.textEl_ = null;super.dispose();}}/*** The text that should display over the `SeekToLive`s control. Added for localization.** @type {string}* @protected*/SeekToLive.prototype.controlText_ = 'Seek to live, currently playing live';Component$1.registerComponent('SeekToLive', SeekToLive);/*** @file num.js* @module num*//*** Keep a number between a min and a max value** @param {number} number* The number to clamp** @param {number} min* The minimum value* @param {number} max* The maximum value** @return {number}* the clamped number*/function clamp(number, min, max) {number = Number(number);return Math.min(max, Math.max(min, isNaN(number) ? min : number));}var Num = /*#__PURE__*/Object.freeze({__proto__: null,clamp: clamp});/*** @file slider.js*//*** The base functionality for a slider. Can be vertical or horizontal.* For instance the volume bar or the seek bar on a video is a slider.** @extends Component*/class Slider extends Component$1 {/*** Create an instance of this class** @param { import('../player').default } player* The `Player` that this class should be attached to.** @param {Object} [options]* The key/value store of player options.*/constructor(player, options) {super(player, options);this.handleMouseDown_ = e => this.handleMouseDown(e);this.handleMouseUp_ = e => this.handleMouseUp(e);this.handleKeyDown_ = e => this.handleKeyDown(e);this.handleClick_ = e => this.handleClick(e);this.handleMouseMove_ = e => this.handleMouseMove(e);this.update_ = e => this.update(e);// Set property names to bar to match with the child Slider class is looking forthis.bar = this.getChild(this.options_.barName);// Set a horizontal or vertical class on the slider depending on the slider typethis.vertical(!!this.options_.vertical);this.enable();}/*** Are controls are currently enabled for this slider or not.** @return {boolean}* true if controls are enabled, false otherwise*/enabled() {return this.enabled_;}/*** Enable controls for this slider if they are disabled*/enable() {if (this.enabled()) {return;}this.on('mousedown', this.handleMouseDown_);this.on('touchstart', this.handleMouseDown_);this.on('keydown', this.handleKeyDown_);this.on('click', this.handleClick_);// TODO: deprecated, controlsvisible does not seem to be firedthis.on(this.player_, 'controlsvisible', this.update);if (this.playerEvent) {this.on(this.player_, this.playerEvent, this.update);}this.removeClass('disabled');this.setAttribute('tabindex', 0);this.enabled_ = true;}/*** Disable controls for this slider if they are enabled*/disable() {if (!this.enabled()) {return;}const doc = this.bar.el_.ownerDocument;this.off('mousedown', this.handleMouseDown_);this.off('touchstart', this.handleMouseDown_);this.off('keydown', this.handleKeyDown_);this.off('click', this.handleClick_);this.off(this.player_, 'controlsvisible', this.update_);this.off(doc, 'mousemove', this.handleMouseMove_);this.off(doc, 'mouseup', this.handleMouseUp_);this.off(doc, 'touchmove', this.handleMouseMove_);this.off(doc, 'touchend', this.handleMouseUp_);this.removeAttribute('tabindex');this.addClass('disabled');if (this.playerEvent) {this.off(this.player_, this.playerEvent, this.update);}this.enabled_ = false;}/*** Create the `Slider`s DOM element.** @param {string} type* Type of element to create.** @param {Object} [props={}]* List of properties in Object form.** @param {Object} [attributes={}]* list of attributes in Object form.** @return {Element}* The element that gets created.*/createEl(type, props = {}, attributes = {}) {// Add the slider element class to all sub classesprops.className = props.className + ' vjs-slider';props = Object.assign({tabIndex: 0}, props);attributes = Object.assign({'role': 'slider','aria-valuenow': 0,'aria-valuemin': 0,'aria-valuemax': 100}, attributes);return super.createEl(type, props, attributes);}/*** Handle `mousedown` or `touchstart` events on the `Slider`.** @param {MouseEvent} event* `mousedown` or `touchstart` event that triggered this function** @listens mousedown* @listens touchstart* @fires Slider#slideractive*/handleMouseDown(event) {const doc = this.bar.el_.ownerDocument;if (event.type === 'mousedown') {event.preventDefault();}// Do not call preventDefault() on touchstart in Chrome// to avoid console warnings. Use a 'touch-action: none' style// instead to prevent unintended scrolling.// https://developers.google.com/web/updates/2017/01/scrolling-interventionif (event.type === 'touchstart' && !IS_CHROME) {event.preventDefault();}blockTextSelection();this.addClass('vjs-sliding');/*** Triggered when the slider is in an active state** @event Slider#slideractive* @type {MouseEvent}*/this.trigger('slideractive');this.on(doc, 'mousemove', this.handleMouseMove_);this.on(doc, 'mouseup', this.handleMouseUp_);this.on(doc, 'touchmove', this.handleMouseMove_);this.on(doc, 'touchend', this.handleMouseUp_);this.handleMouseMove(event, true);}/*** Handle the `mousemove`, `touchmove`, and `mousedown` events on this `Slider`.* The `mousemove` and `touchmove` events will only only trigger this function during* `mousedown` and `touchstart`. This is due to {@link Slider#handleMouseDown} and* {@link Slider#handleMouseUp}.** @param {MouseEvent} event* `mousedown`, `mousemove`, `touchstart`, or `touchmove` event that triggered* this function* @param {boolean} mouseDown this is a flag that should be set to true if `handleMouseMove` is called directly. It allows us to skip things that should not happen if coming from mouse down but should happen on regular mouse move handler. Defaults to false.** @listens mousemove* @listens touchmove*/handleMouseMove(event) {}/*** Handle `mouseup` or `touchend` events on the `Slider`.** @param {MouseEvent} event* `mouseup` or `touchend` event that triggered this function.** @listens touchend* @listens mouseup* @fires Slider#sliderinactive*/handleMouseUp(event) {const doc = this.bar.el_.ownerDocument;unblockTextSelection();this.removeClass('vjs-sliding');/*** Triggered when the slider is no longer in an active state.** @event Slider#sliderinactive* @type {Event}*/this.trigger('sliderinactive');this.off(doc, 'mousemove', this.handleMouseMove_);this.off(doc, 'mouseup', this.handleMouseUp_);this.off(doc, 'touchmove', this.handleMouseMove_);this.off(doc, 'touchend', this.handleMouseUp_);this.update();}/*** Update the progress bar of the `Slider`.** @return {number}* The percentage of progress the progress bar represents as a* number from 0 to 1.*/update() {// In VolumeBar init we have a setTimeout for update that pops and update// to the end of the execution stack. The player is destroyed before then// update will cause an error// If there's no bar...if (!this.el_ || !this.bar) {return;}// clamp progress between 0 and 1// and only round to four decimal places, as we round to two belowconst progress = this.getProgress();if (progress === this.progress_) {return progress;}this.progress_ = progress;this.requestNamedAnimationFrame('Slider#update', () => {// Set the new bar width or heightconst sizeKey = this.vertical() ? 'height' : 'width';// Convert to a percentage for css valuethis.bar.el().style[sizeKey] = (progress * 100).toFixed(2) + '%';});return progress;}/*** Get the percentage of the bar that should be filled* but clamped and rounded.** @return {number}* percentage filled that the slider is*/getProgress() {return Number(clamp(this.getPercent(), 0, 1).toFixed(4));}/*** Calculate distance for slider** @param {Event} event* The event that caused this function to run.** @return {number}* The current position of the Slider.* - position.x for vertical `Slider`s* - position.y for horizontal `Slider`s*/calculateDistance(event) {const position = getPointerPosition(this.el_, event);if (this.vertical()) {return position.y;}return position.x;}/*** Handle a `keydown` event on the `Slider`. Watches for left, right, up, and down* arrow keys. This function will only be called when the slider has focus. See* {@link Slider#handleFocus} and {@link Slider#handleBlur}.** @param {KeyboardEvent} event* the `keydown` event that caused this function to run.** @listens keydown*/handleKeyDown(event) {// Left and Down Arrowsif (keycode.isEventKey(event, 'Left') || keycode.isEventKey(event, 'Down')) {event.preventDefault();event.stopPropagation();this.stepBack();// Up and Right Arrows} else if (keycode.isEventKey(event, 'Right') || keycode.isEventKey(event, 'Up')) {event.preventDefault();event.stopPropagation();this.stepForward();} else {// Pass keydown handling up for unsupported keyssuper.handleKeyDown(event);}}/*** Listener for click events on slider, used to prevent clicks* from bubbling up to parent elements like button menus.** @param {Object} event* Event that caused this object to run*/handleClick(event) {event.stopPropagation();event.preventDefault();}/*** Get/set if slider is horizontal for vertical** @param {boolean} [bool]* - true if slider is vertical,* - false is horizontal** @return {boolean}* - true if slider is vertical, and getting* - false if the slider is horizontal, and getting*/vertical(bool) {if (bool === undefined) {return this.vertical_ || false;}this.vertical_ = !!bool;if (this.vertical_) {this.addClass('vjs-slider-vertical');} else {this.addClass('vjs-slider-horizontal');}}}Component$1.registerComponent('Slider', Slider);/*** @file load-progress-bar.js*/// get the percent width of a time compared to the total endconst percentify = (time, end) => clamp(time / end * 100, 0, 100).toFixed(2) + '%';/*** Shows loading progress** @extends Component*/class LoadProgressBar extends Component$1 {/*** Creates an instance of this class.** @param { import('../../player').default } player* The `Player` that this class should be attached to.** @param {Object} [options]* The key/value store of player options.*/constructor(player, options) {super(player, options);this.partEls_ = [];this.on(player, 'progress', e => this.update(e));}/*** Create the `Component`'s DOM element** @return {Element}* The element that was created.*/createEl() {const el = super.createEl('div', {className: 'vjs-load-progress'});const wrapper = createEl('span', {className: 'vjs-control-text'});const loadedText = createEl('span', {textContent: this.localize('Loaded')});const separator = document.createTextNode(': ');this.percentageEl_ = createEl('span', {className: 'vjs-control-text-loaded-percentage',textContent: '0%'});el.appendChild(wrapper);wrapper.appendChild(loadedText);wrapper.appendChild(separator);wrapper.appendChild(this.percentageEl_);return el;}dispose() {this.partEls_ = null;this.percentageEl_ = null;super.dispose();}/*** Update progress bar** @param {Event} [event]* The `progress` event that caused this function to run.** @listens Player#progress*/update(event) {this.requestNamedAnimationFrame('LoadProgressBar#update', () => {const liveTracker = this.player_.liveTracker;const buffered = this.player_.buffered();const duration = liveTracker && liveTracker.isLive() ? liveTracker.seekableEnd() : this.player_.duration();const bufferedEnd = this.player_.bufferedEnd();const children = this.partEls_;const percent = percentify(bufferedEnd, duration);if (this.percent_ !== percent) {// update the width of the progress barthis.el_.style.width = percent;// update the control-texttextContent(this.percentageEl_, percent);this.percent_ = percent;}// add child elements to represent the individual buffered time rangesfor (let i = 0; i < buffered.length; i++) {const start = buffered.start(i);const end = buffered.end(i);let part = children[i];if (!part) {part = this.el_.appendChild(createEl());children[i] = part;}// only update if changedif (part.dataset.start === start && part.dataset.end === end) {continue;}part.dataset.start = start;part.dataset.end = end;// set the percent based on the width of the progress bar (bufferedEnd)part.style.left = percentify(start, bufferedEnd);part.style.width = percentify(end - start, bufferedEnd);}// remove unused buffered range elementsfor (let i = children.length; i > buffered.length; i--) {this.el_.removeChild(children[i - 1]);}children.length = buffered.length;});}}Component$1.registerComponent('LoadProgressBar', LoadProgressBar);/*** @file time-tooltip.js*//*** Time tooltips display a time above the progress bar.** @extends Component*/class TimeTooltip extends Component$1 {/*** Creates an instance of this class.** @param { import('../../player').default } player* The {@link Player} that this class should be attached to.** @param {Object} [options]* The key/value store of player options.*/constructor(player, options) {super(player, options);this.update = throttle(bind_(this, this.update), UPDATE_REFRESH_INTERVAL);}/*** Create the time tooltip DOM element** @return {Element}* The element that was created.*/createEl() {return super.createEl('div', {className: 'vjs-time-tooltip'}, {'aria-hidden': 'true'});}/*** Updates the position of the time tooltip relative to the `SeekBar`.** @param {Object} seekBarRect* The `ClientRect` for the {@link SeekBar} element.** @param {number} seekBarPoint* A number from 0 to 1, representing a horizontal reference point* from the left edge of the {@link SeekBar}*/update(seekBarRect, seekBarPoint, content) {const tooltipRect = findPosition(this.el_);const playerRect = getBoundingClientRect(this.player_.el());const seekBarPointPx = seekBarRect.width * seekBarPoint;// do nothing if either rect isn't available// for example, if the player isn't in the DOM for testingif (!playerRect || !tooltipRect) {return;}// This is the space left of the `seekBarPoint` available within the bounds// of the player. We calculate any gap between the left edge of the player// and the left edge of the `SeekBar` and add the number of pixels in the// `SeekBar` before hitting the `seekBarPoint`const spaceLeftOfPoint = seekBarRect.left - playerRect.left + seekBarPointPx;// This is the space right of the `seekBarPoint` available within the bounds// of the player. We calculate the number of pixels from the `seekBarPoint`// to the right edge of the `SeekBar` and add to that any gap between the// right edge of the `SeekBar` and the player.const spaceRightOfPoint = seekBarRect.width - seekBarPointPx + (playerRect.right - seekBarRect.right);// This is the number of pixels by which the tooltip will need to be pulled// further to the right to center it over the `seekBarPoint`.let pullTooltipBy = tooltipRect.width / 2;// Adjust the `pullTooltipBy` distance to the left or right depending on// the results of the space calculations above.if (spaceLeftOfPoint < pullTooltipBy) {pullTooltipBy += pullTooltipBy - spaceLeftOfPoint;} else if (spaceRightOfPoint < pullTooltipBy) {pullTooltipBy = spaceRightOfPoint;}// Due to the imprecision of decimal/ratio based calculations and varying// rounding behaviors, there are cases where the spacing adjustment is off// by a pixel or two. This adds insurance to these calculations.if (pullTooltipBy < 0) {pullTooltipBy = 0;} else if (pullTooltipBy > tooltipRect.width) {pullTooltipBy = tooltipRect.width;}// prevent small width fluctuations within 0.4px from// changing the value below.// This really helps for live to prevent the play// progress time tooltip from jitteringpullTooltipBy = Math.round(pullTooltipBy);this.el_.style.right = `-${pullTooltipBy}px`;this.write(content);}/*** Write the time to the tooltip DOM element.** @param {string} content* The formatted time for the tooltip.*/write(content) {textContent(this.el_, content);}/*** Updates the position of the time tooltip relative to the `SeekBar`.** @param {Object} seekBarRect* The `ClientRect` for the {@link SeekBar} element.** @param {number} seekBarPoint* A number from 0 to 1, representing a horizontal reference point* from the left edge of the {@link SeekBar}** @param {number} time* The time to update the tooltip to, not used during live playback** @param {Function} cb* A function that will be called during the request animation frame* for tooltips that need to do additional animations from the default*/updateTime(seekBarRect, seekBarPoint, time, cb) {this.requestNamedAnimationFrame('TimeTooltip#updateTime', () => {let content;const duration = this.player_.duration();if (this.player_.liveTracker && this.player_.liveTracker.isLive()) {const liveWindow = this.player_.liveTracker.liveWindow();const secondsBehind = liveWindow - seekBarPoint * liveWindow;content = (secondsBehind < 1 ? '' : '-') + formatTime(secondsBehind, liveWindow);} else {content = formatTime(time, duration);}this.update(seekBarRect, seekBarPoint, content);if (cb) {cb();}});}}Component$1.registerComponent('TimeTooltip', TimeTooltip);/*** @file play-progress-bar.js*//*** Used by {@link SeekBar} to display media playback progress as part of the* {@link ProgressControl}.** @extends Component*/class PlayProgressBar extends Component$1 {/*** Creates an instance of this class.** @param { import('../../player').default } player* The {@link Player} that this class should be attached to.** @param {Object} [options]* The key/value store of player options.*/constructor(player, options) {super(player, options);this.setIcon('circle');this.update = throttle(bind_(this, this.update), UPDATE_REFRESH_INTERVAL);}/*** Create the the DOM element for this class.** @return {Element}* The element that was created.*/createEl() {return super.createEl('div', {className: 'vjs-play-progress vjs-slider-bar'}, {'aria-hidden': 'true'});}/*** Enqueues updates to its own DOM as well as the DOM of its* {@link TimeTooltip} child.** @param {Object} seekBarRect* The `ClientRect` for the {@link SeekBar} element.** @param {number} seekBarPoint* A number from 0 to 1, representing a horizontal reference point* from the left edge of the {@link SeekBar}*/update(seekBarRect, seekBarPoint) {const timeTooltip = this.getChild('timeTooltip');if (!timeTooltip) {return;}const time = this.player_.scrubbing() ? this.player_.getCache().currentTime : this.player_.currentTime();timeTooltip.updateTime(seekBarRect, seekBarPoint, time);}}/*** Default options for {@link PlayProgressBar}.** @type {Object}* @private*/PlayProgressBar.prototype.options_ = {children: []};// Time tooltips should not be added to a player on mobile devicesif (!IS_IOS && !IS_ANDROID) {PlayProgressBar.prototype.options_.children.push('timeTooltip');}Component$1.registerComponent('PlayProgressBar', PlayProgressBar);/*** @file mouse-time-display.js*//*** The {@link MouseTimeDisplay} component tracks mouse movement over the* {@link ProgressControl}. It displays an indicator and a {@link TimeTooltip}* indicating the time which is represented by a given point in the* {@link ProgressControl}.** @extends Component*/class MouseTimeDisplay extends Component$1 {/*** Creates an instance of this class.** @param { import('../../player').default } player* The {@link Player} that this class should be attached to.** @param {Object} [options]* The key/value store of player options.*/constructor(player, options) {super(player, options);this.update = throttle(bind_(this, this.update), UPDATE_REFRESH_INTERVAL);}/*** Create the DOM element for this class.** @return {Element}* The element that was created.*/createEl() {return super.createEl('div', {className: 'vjs-mouse-display'});}/*** Enqueues updates to its own DOM as well as the DOM of its* {@link TimeTooltip} child.** @param {Object} seekBarRect* The `ClientRect` for the {@link SeekBar} element.** @param {number} seekBarPoint* A number from 0 to 1, representing a horizontal reference point* from the left edge of the {@link SeekBar}*/update(seekBarRect, seekBarPoint) {const time = seekBarPoint * this.player_.duration();this.getChild('timeTooltip').updateTime(seekBarRect, seekBarPoint, time, () => {this.el_.style.left = `${seekBarRect.width * seekBarPoint}px`;});}}/*** Default options for `MouseTimeDisplay`** @type {Object}* @private*/MouseTimeDisplay.prototype.options_ = {children: ['timeTooltip']};Component$1.registerComponent('MouseTimeDisplay', MouseTimeDisplay);/*** @file seek-bar.js*/// The number of seconds the `step*` functions move the timeline.const STEP_SECONDS = 5;// The multiplier of STEP_SECONDS that PgUp/PgDown move the timeline.const PAGE_KEY_MULTIPLIER = 12;/*** Seek bar and container for the progress bars. Uses {@link PlayProgressBar}* as its `bar`.** @extends Slider*/class SeekBar extends Slider {/*** Creates an instance of this class.** @param { import('../../player').default } player* The `Player` that this class should be attached to.** @param {Object} [options]* The key/value store of player options.*/constructor(player, options) {super(player, options);this.setEventHandlers_();}/*** Sets the event handlers** @private*/setEventHandlers_() {this.update_ = bind_(this, this.update);this.update = throttle(this.update_, UPDATE_REFRESH_INTERVAL);this.on(this.player_, ['ended', 'durationchange', 'timeupdate'], this.update);if (this.player_.liveTracker) {this.on(this.player_.liveTracker, 'liveedgechange', this.update);}// when playing, let's ensure we smoothly update the play progress bar// via an intervalthis.updateInterval = null;this.enableIntervalHandler_ = e => this.enableInterval_(e);this.disableIntervalHandler_ = e => this.disableInterval_(e);this.on(this.player_, ['playing'], this.enableIntervalHandler_);this.on(this.player_, ['ended', 'pause', 'waiting'], this.disableIntervalHandler_);// we don't need to update the play progress if the document is hidden,// also, this causes the CPU to spike and eventually crash the page on IE11.if ('hidden' in document && 'visibilityState' in document) {this.on(document, 'visibilitychange', this.toggleVisibility_);}}toggleVisibility_(e) {if (document.visibilityState === 'hidden') {this.cancelNamedAnimationFrame('SeekBar#update');this.cancelNamedAnimationFrame('Slider#update');this.disableInterval_(e);} else {if (!this.player_.ended() && !this.player_.paused()) {this.enableInterval_();}// we just switched back to the page and someone may be looking, so, update ASAPthis.update();}}enableInterval_() {if (this.updateInterval) {return;}this.updateInterval = this.setInterval(this.update, UPDATE_REFRESH_INTERVAL);}disableInterval_(e) {if (this.player_.liveTracker && this.player_.liveTracker.isLive() && e && e.type !== 'ended') {return;}if (!this.updateInterval) {return;}this.clearInterval(this.updateInterval);this.updateInterval = null;}/*** Create the `Component`'s DOM element** @return {Element}* The element that was created.*/createEl() {return super.createEl('div', {className: 'vjs-progress-holder'}, {'aria-label': this.localize('Progress Bar')});}/*** This function updates the play progress bar and accessibility* attributes to whatever is passed in.** @param {Event} [event]* The `timeupdate` or `ended` event that caused this to run.** @listens Player#timeupdate** @return {number}* The current percent at a number from 0-1*/update(event) {// ignore updates while the tab is hiddenif (document.visibilityState === 'hidden') {return;}const percent = super.update();this.requestNamedAnimationFrame('SeekBar#update', () => {const currentTime = this.player_.ended() ? this.player_.duration() : this.getCurrentTime_();const liveTracker = this.player_.liveTracker;let duration = this.player_.duration();if (liveTracker && liveTracker.isLive()) {duration = this.player_.liveTracker.liveCurrentTime();}if (this.percent_ !== percent) {// machine readable value of progress bar (percentage complete)this.el_.setAttribute('aria-valuenow', (percent * 100).toFixed(2));this.percent_ = percent;}if (this.currentTime_ !== currentTime || this.duration_ !== duration) {// human readable value of progress bar (time complete)this.el_.setAttribute('aria-valuetext', this.localize('progress bar timing: currentTime={1} duration={2}', [formatTime(currentTime, duration), formatTime(duration, duration)], '{1} of {2}'));this.currentTime_ = currentTime;this.duration_ = duration;}// update the progress bar time tooltip with the current timeif (this.bar) {this.bar.update(getBoundingClientRect(this.el()), this.getProgress());}});return percent;}/*** Prevent liveThreshold from causing seeks to seem like they* are not happening from a user perspective.** @param {number} ct* current time to seek to*/userSeek_(ct) {if (this.player_.liveTracker && this.player_.liveTracker.isLive()) {this.player_.liveTracker.nextSeekedFromUser();}this.player_.currentTime(ct);}/*** Get the value of current time but allows for smooth scrubbing,* when player can't keep up.** @return {number}* The current time value to display** @private*/getCurrentTime_() {return this.player_.scrubbing() ? this.player_.getCache().currentTime : this.player_.currentTime();}/*** Get the percentage of media played so far.** @return {number}* The percentage of media played so far (0 to 1).*/getPercent() {const currentTime = this.getCurrentTime_();let percent;const liveTracker = this.player_.liveTracker;if (liveTracker && liveTracker.isLive()) {percent = (currentTime - liveTracker.seekableStart()) / liveTracker.liveWindow();// prevent the percent from changing at the live edgeif (liveTracker.atLiveEdge()) {percent = 1;}} else {percent = currentTime / this.player_.duration();}return percent;}/*** Handle mouse down on seek bar** @param {MouseEvent} event* The `mousedown` event that caused this to run.** @listens mousedown*/handleMouseDown(event) {if (!isSingleLeftClick(event)) {return;}// Stop event propagation to prevent double fire in progress-control.jsevent.stopPropagation();this.videoWasPlaying = !this.player_.paused();this.player_.pause();super.handleMouseDown(event);}/*** Handle mouse move on seek bar** @param {MouseEvent} event* The `mousemove` event that caused this to run.* @param {boolean} mouseDown this is a flag that should be set to true if `handleMouseMove` is called directly. It allows us to skip things that should not happen if coming from mouse down but should happen on regular mouse move handler. Defaults to false** @listens mousemove*/handleMouseMove(event, mouseDown = false) {if (!isSingleLeftClick(event) || isNaN(this.player_.duration())) {return;}if (!mouseDown && !this.player_.scrubbing()) {this.player_.scrubbing(true);}let newTime;const distance = this.calculateDistance(event);const liveTracker = this.player_.liveTracker;if (!liveTracker || !liveTracker.isLive()) {newTime = distance * this.player_.duration();// Don't let video end while scrubbing.if (newTime === this.player_.duration()) {newTime = newTime - 0.1;}} else {if (distance >= 0.99) {liveTracker.seekToLiveEdge();return;}const seekableStart = liveTracker.seekableStart();const seekableEnd = liveTracker.liveCurrentTime();newTime = seekableStart + distance * liveTracker.liveWindow();// Don't let video end while scrubbing.if (newTime >= seekableEnd) {newTime = seekableEnd;}// Compensate for precision differences so that currentTime is not less// than seekable startif (newTime <= seekableStart) {newTime = seekableStart + 0.1;}// On android seekableEnd can be Infinity sometimes,// this will cause newTime to be Infinity, which is// not a valid currentTime.if (newTime === Infinity) {return;}}// Set new time (tell player to seek to new time)this.userSeek_(newTime);if (this.player_.options_.enableSmoothSeeking) {this.update();}}enable() {super.enable();const mouseTimeDisplay = this.getChild('mouseTimeDisplay');if (!mouseTimeDisplay) {return;}mouseTimeDisplay.show();}disable() {super.disable();const mouseTimeDisplay = this.getChild('mouseTimeDisplay');if (!mouseTimeDisplay) {return;}mouseTimeDisplay.hide();}/*** Handle mouse up on seek bar** @param {MouseEvent} event* The `mouseup` event that caused this to run.** @listens mouseup*/handleMouseUp(event) {super.handleMouseUp(event);// Stop event propagation to prevent double fire in progress-control.jsif (event) {event.stopPropagation();}this.player_.scrubbing(false);/*** Trigger timeupdate because we're done seeking and the time has changed.* This is particularly useful for if the player is paused to time the time displays.** @event Tech#timeupdate* @type {Event}*/this.player_.trigger({type: 'timeupdate',target: this,manuallyTriggered: true});if (this.videoWasPlaying) {silencePromise(this.player_.play());} else {// We're done seeking and the time has changed.// If the player is paused, make sure we display the correct time on the seek bar.this.update_();}}/*** Move more quickly fast forward for keyboard-only users*/stepForward() {this.userSeek_(this.player_.currentTime() + STEP_SECONDS);}/*** Move more quickly rewind for keyboard-only users*/stepBack() {this.userSeek_(this.player_.currentTime() - STEP_SECONDS);}/*** Toggles the playback state of the player* This gets called when enter or space is used on the seekbar** @param {KeyboardEvent} event* The `keydown` event that caused this function to be called**/handleAction(event) {if (this.player_.paused()) {this.player_.play();} else {this.player_.pause();}}/*** Called when this SeekBar has focus and a key gets pressed down.* Supports the following keys:** Space or Enter key fire a click event* Home key moves to start of the timeline* End key moves to end of the timeline* Digit "0" through "9" keys move to 0%, 10% ... 80%, 90% of the timeline* PageDown key moves back a larger step than ArrowDown* PageUp key moves forward a large step** @param {KeyboardEvent} event* The `keydown` event that caused this function to be called.** @listens keydown*/handleKeyDown(event) {const liveTracker = this.player_.liveTracker;if (keycode.isEventKey(event, 'Space') || keycode.isEventKey(event, 'Enter')) {event.preventDefault();event.stopPropagation();this.handleAction(event);} else if (keycode.isEventKey(event, 'Home')) {event.preventDefault();event.stopPropagation();this.userSeek_(0);} else if (keycode.isEventKey(event, 'End')) {event.preventDefault();event.stopPropagation();if (liveTracker && liveTracker.isLive()) {this.userSeek_(liveTracker.liveCurrentTime());} else {this.userSeek_(this.player_.duration());}} else if (/^[0-9]$/.test(keycode(event))) {event.preventDefault();event.stopPropagation();const gotoFraction = (keycode.codes[keycode(event)] - keycode.codes['0']) * 10.0 / 100.0;if (liveTracker && liveTracker.isLive()) {this.userSeek_(liveTracker.seekableStart() + liveTracker.liveWindow() * gotoFraction);} else {this.userSeek_(this.player_.duration() * gotoFraction);}} else if (keycode.isEventKey(event, 'PgDn')) {event.preventDefault();event.stopPropagation();this.userSeek_(this.player_.currentTime() - STEP_SECONDS * PAGE_KEY_MULTIPLIER);} else if (keycode.isEventKey(event, 'PgUp')) {event.preventDefault();event.stopPropagation();this.userSeek_(this.player_.currentTime() + STEP_SECONDS * PAGE_KEY_MULTIPLIER);} else {// Pass keydown handling up for unsupported keyssuper.handleKeyDown(event);}}dispose() {this.disableInterval_();this.off(this.player_, ['ended', 'durationchange', 'timeupdate'], this.update);if (this.player_.liveTracker) {this.off(this.player_.liveTracker, 'liveedgechange', this.update);}this.off(this.player_, ['playing'], this.enableIntervalHandler_);this.off(this.player_, ['ended', 'pause', 'waiting'], this.disableIntervalHandler_);// we don't need to update the play progress if the document is hidden,// also, this causes the CPU to spike and eventually crash the page on IE11.if ('hidden' in document && 'visibilityState' in document) {this.off(document, 'visibilitychange', this.toggleVisibility_);}super.dispose();}}/*** Default options for the `SeekBar`** @type {Object}* @private*/SeekBar.prototype.options_ = {children: ['loadProgressBar', 'playProgressBar'],barName: 'playProgressBar'};// MouseTimeDisplay tooltips should not be added to a player on mobile devicesif (!IS_IOS && !IS_ANDROID) {SeekBar.prototype.options_.children.splice(1, 0, 'mouseTimeDisplay');}Component$1.registerComponent('SeekBar', SeekBar);/*** @file progress-control.js*//*** The Progress Control component contains the seek bar, load progress,* and play progress.** @extends Component*/class ProgressControl extends Component$1 {/*** Creates an instance of this class.** @param { import('../../player').default } player* The `Player` that this class should be attached to.** @param {Object} [options]* The key/value store of player options.*/constructor(player, options) {super(player, options);this.handleMouseMove = throttle(bind_(this, this.handleMouseMove), UPDATE_REFRESH_INTERVAL);this.throttledHandleMouseSeek = throttle(bind_(this, this.handleMouseSeek), UPDATE_REFRESH_INTERVAL);this.handleMouseUpHandler_ = e => this.handleMouseUp(e);this.handleMouseDownHandler_ = e => this.handleMouseDown(e);this.enable();}/*** Create the `Component`'s DOM element** @return {Element}* The element that was created.*/createEl() {return super.createEl('div', {className: 'vjs-progress-control vjs-control'});}/*** When the mouse moves over the `ProgressControl`, the pointer position* gets passed down to the `MouseTimeDisplay` component.** @param {Event} event* The `mousemove` event that caused this function to run.** @listen mousemove*/handleMouseMove(event) {const seekBar = this.getChild('seekBar');if (!seekBar) {return;}const playProgressBar = seekBar.getChild('playProgressBar');const mouseTimeDisplay = seekBar.getChild('mouseTimeDisplay');if (!playProgressBar && !mouseTimeDisplay) {return;}const seekBarEl = seekBar.el();const seekBarRect = findPosition(seekBarEl);let seekBarPoint = getPointerPosition(seekBarEl, event).x;// The default skin has a gap on either side of the `SeekBar`. This means// that it's possible to trigger this behavior outside the boundaries of// the `SeekBar`. This ensures we stay within it at all times.seekBarPoint = clamp(seekBarPoint, 0, 1);if (mouseTimeDisplay) {mouseTimeDisplay.update(seekBarRect, seekBarPoint);}if (playProgressBar) {playProgressBar.update(seekBarRect, seekBar.getProgress());}}/*** A throttled version of the {@link ProgressControl#handleMouseSeek} listener.** @method ProgressControl#throttledHandleMouseSeek* @param {Event} event* The `mousemove` event that caused this function to run.** @listen mousemove* @listen touchmove*//*** Handle `mousemove` or `touchmove` events on the `ProgressControl`.** @param {Event} event* `mousedown` or `touchstart` event that triggered this function** @listens mousemove* @listens touchmove*/handleMouseSeek(event) {const seekBar = this.getChild('seekBar');if (seekBar) {seekBar.handleMouseMove(event);}}/*** Are controls are currently enabled for this progress control.** @return {boolean}* true if controls are enabled, false otherwise*/enabled() {return this.enabled_;}/*** Disable all controls on the progress control and its children*/disable() {this.children().forEach(child => child.disable && child.disable());if (!this.enabled()) {return;}this.off(['mousedown', 'touchstart'], this.handleMouseDownHandler_);this.off(this.el_, 'mousemove', this.handleMouseMove);this.removeListenersAddedOnMousedownAndTouchstart();this.addClass('disabled');this.enabled_ = false;// Restore normal playback state if controls are disabled while scrubbingif (this.player_.scrubbing()) {const seekBar = this.getChild('seekBar');this.player_.scrubbing(false);if (seekBar.videoWasPlaying) {silencePromise(this.player_.play());}}}/*** Enable all controls on the progress control and its children*/enable() {this.children().forEach(child => child.enable && child.enable());if (this.enabled()) {return;}this.on(['mousedown', 'touchstart'], this.handleMouseDownHandler_);this.on(this.el_, 'mousemove', this.handleMouseMove);this.removeClass('disabled');this.enabled_ = true;}/*** Cleanup listeners after the user finishes interacting with the progress controls*/removeListenersAddedOnMousedownAndTouchstart() {const doc = this.el_.ownerDocument;this.off(doc, 'mousemove', this.throttledHandleMouseSeek);this.off(doc, 'touchmove', this.throttledHandleMouseSeek);this.off(doc, 'mouseup', this.handleMouseUpHandler_);this.off(doc, 'touchend', this.handleMouseUpHandler_);}/*** Handle `mousedown` or `touchstart` events on the `ProgressControl`.** @param {Event} event* `mousedown` or `touchstart` event that triggered this function** @listens mousedown* @listens touchstart*/handleMouseDown(event) {const doc = this.el_.ownerDocument;const seekBar = this.getChild('seekBar');if (seekBar) {seekBar.handleMouseDown(event);}this.on(doc, 'mousemove', this.throttledHandleMouseSeek);this.on(doc, 'touchmove', this.throttledHandleMouseSeek);this.on(doc, 'mouseup', this.handleMouseUpHandler_);this.on(doc, 'touchend', this.handleMouseUpHandler_);}/*** Handle `mouseup` or `touchend` events on the `ProgressControl`.** @param {Event} event* `mouseup` or `touchend` event that triggered this function.** @listens touchend* @listens mouseup*/handleMouseUp(event) {const seekBar = this.getChild('seekBar');if (seekBar) {seekBar.handleMouseUp(event);}this.removeListenersAddedOnMousedownAndTouchstart();}}/*** Default options for `ProgressControl`** @type {Object}* @private*/ProgressControl.prototype.options_ = {children: ['seekBar']};Component$1.registerComponent('ProgressControl', ProgressControl);/*** @file picture-in-picture-toggle.js*//*** Toggle Picture-in-Picture mode** @extends Button*/class PictureInPictureToggle extends Button {/*** Creates an instance of this class.** @param { import('./player').default } player* The `Player` that this class should be attached to.** @param {Object} [options]* The key/value store of player options.** @listens Player#enterpictureinpicture* @listens Player#leavepictureinpicture*/constructor(player, options) {super(player, options);this.setIcon('picture-in-picture-enter');this.on(player, ['enterpictureinpicture', 'leavepictureinpicture'], e => this.handlePictureInPictureChange(e));this.on(player, ['disablepictureinpicturechanged', 'loadedmetadata'], e => this.handlePictureInPictureEnabledChange(e));this.on(player, ['loadedmetadata', 'audioonlymodechange', 'audiopostermodechange'], () => this.handlePictureInPictureAudioModeChange());// TODO: Deactivate button on player emptied event.this.disable();}/*** Builds the default DOM `className`.** @return {string}* The DOM `className` for this object.*/buildCSSClass() {return `vjs-picture-in-picture-control vjs-hidden ${super.buildCSSClass()}`;}/*** Displays or hides the button depending on the audio mode detection.* Exits picture-in-picture if it is enabled when switching to audio mode.*/handlePictureInPictureAudioModeChange() {// This audio detection will not detect HLS or DASH audio-only streams because there was no reliable way to detect them at the timeconst isSourceAudio = this.player_.currentType().substring(0, 5) === 'audio';const isAudioMode = isSourceAudio || this.player_.audioPosterMode() || this.player_.audioOnlyMode();if (!isAudioMode) {this.show();return;}if (this.player_.isInPictureInPicture()) {this.player_.exitPictureInPicture();}this.hide();}/*** Enables or disables button based on availability of a Picture-In-Picture mode.** Enabled if* - `player.options().enableDocumentPictureInPicture` is true and* window.documentPictureInPicture is available; or* - `player.disablePictureInPicture()` is false and* element.requestPictureInPicture is available*/handlePictureInPictureEnabledChange() {if (document.pictureInPictureEnabled && this.player_.disablePictureInPicture() === false || this.player_.options_.enableDocumentPictureInPicture && 'documentPictureInPicture' in window) {this.enable();} else {this.disable();}}/*** Handles enterpictureinpicture and leavepictureinpicture on the player and change control text accordingly.** @param {Event} [event]* The {@link Player#enterpictureinpicture} or {@link Player#leavepictureinpicture} event that caused this function to be* called.** @listens Player#enterpictureinpicture* @listens Player#leavepictureinpicture*/handlePictureInPictureChange(event) {if (this.player_.isInPictureInPicture()) {this.setIcon('picture-in-picture-exit');this.controlText('Exit Picture-in-Picture');} else {this.setIcon('picture-in-picture-enter');this.controlText('Picture-in-Picture');}this.handlePictureInPictureEnabledChange();}/*** This gets called when an `PictureInPictureToggle` is "clicked". See* {@link ClickableComponent} for more detailed information on what a click can be.** @param {Event} [event]* The `keydown`, `tap`, or `click` event that caused this function to be* called.** @listens tap* @listens click*/handleClick(event) {if (!this.player_.isInPictureInPicture()) {this.player_.requestPictureInPicture();} else {this.player_.exitPictureInPicture();}}/*** Show the `Component`s element if it is hidden by removing the* 'vjs-hidden' class name from it only in browsers that support the Picture-in-Picture API.*/show() {// Does not allow to display the pictureInPictureToggle in browsers that do not support the Picture-in-Picture API, e.g. Firefox.if (typeof document.exitPictureInPicture !== 'function') {return;}super.show();}}/*** The text that should display over the `PictureInPictureToggle`s controls. Added for localization.** @type {string}* @protected*/PictureInPictureToggle.prototype.controlText_ = 'Picture-in-Picture';Component$1.registerComponent('PictureInPictureToggle', PictureInPictureToggle);/*** @file fullscreen-toggle.js*//*** Toggle fullscreen video** @extends Button*/class FullscreenToggle extends Button {/*** Creates an instance of this class.** @param { import('./player').default } player* The `Player` that this class should be attached to.** @param {Object} [options]* The key/value store of player options.*/constructor(player, options) {super(player, options);this.setIcon('fullscreen-enter');this.on(player, 'fullscreenchange', e => this.handleFullscreenChange(e));if (document[player.fsApi_.fullscreenEnabled] === false) {this.disable();}}/*** Builds the default DOM `className`.** @return {string}* The DOM `className` for this object.*/buildCSSClass() {return `vjs-fullscreen-control ${super.buildCSSClass()}`;}/*** Handles fullscreenchange on the player and change control text accordingly.** @param {Event} [event]* The {@link Player#fullscreenchange} event that caused this function to be* called.** @listens Player#fullscreenchange*/handleFullscreenChange(event) {if (this.player_.isFullscreen()) {this.controlText('Exit Fullscreen');this.setIcon('fullscreen-exit');} else {this.controlText('Fullscreen');this.setIcon('fullscreen-enter');}}/*** This gets called when an `FullscreenToggle` is "clicked". See* {@link ClickableComponent} for more detailed information on what a click can be.** @param {Event} [event]* The `keydown`, `tap`, or `click` event that caused this function to be* called.** @listens tap* @listens click*/handleClick(event) {if (!this.player_.isFullscreen()) {this.player_.requestFullscreen();} else {this.player_.exitFullscreen();}}}/*** The text that should display over the `FullscreenToggle`s controls. Added for localization.** @type {string}* @protected*/FullscreenToggle.prototype.controlText_ = 'Fullscreen';Component$1.registerComponent('FullscreenToggle', FullscreenToggle);/*** Check if volume control is supported and if it isn't hide the* `Component` that was passed using the `vjs-hidden` class.** @param { import('../../component').default } self* The component that should be hidden if volume is unsupported** @param { import('../../player').default } player* A reference to the player** @private*/const checkVolumeSupport = function (self, player) {// hide volume controls when they're not supported by the current techif (player.tech_ && !player.tech_.featuresVolumeControl) {self.addClass('vjs-hidden');}self.on(player, 'loadstart', function () {if (!player.tech_.featuresVolumeControl) {self.addClass('vjs-hidden');} else {self.removeClass('vjs-hidden');}});};/*** @file volume-level.js*//*** Shows volume level** @extends Component*/class VolumeLevel extends Component$1 {/*** Create the `Component`'s DOM element** @return {Element}* The element that was created.*/createEl() {const el = super.createEl('div', {className: 'vjs-volume-level'});this.setIcon('circle', el);el.appendChild(super.createEl('span', {className: 'vjs-control-text'}));return el;}}Component$1.registerComponent('VolumeLevel', VolumeLevel);/*** @file volume-level-tooltip.js*//*** Volume level tooltips display a volume above or side by side the volume bar.** @extends Component*/class VolumeLevelTooltip extends Component$1 {/*** Creates an instance of this class.** @param { import('../../player').default } player* The {@link Player} that this class should be attached to.** @param {Object} [options]* The key/value store of player options.*/constructor(player, options) {super(player, options);this.update = throttle(bind_(this, this.update), UPDATE_REFRESH_INTERVAL);}/*** Create the volume tooltip DOM element** @return {Element}* The element that was created.*/createEl() {return super.createEl('div', {className: 'vjs-volume-tooltip'}, {'aria-hidden': 'true'});}/*** Updates the position of the tooltip relative to the `VolumeBar` and* its content text.** @param {Object} rangeBarRect* The `ClientRect` for the {@link VolumeBar} element.** @param {number} rangeBarPoint* A number from 0 to 1, representing a horizontal/vertical reference point* from the left edge of the {@link VolumeBar}** @param {boolean} vertical* Referees to the Volume control position* in the control bar{@link VolumeControl}**/update(rangeBarRect, rangeBarPoint, vertical, content) {if (!vertical) {const tooltipRect = getBoundingClientRect(this.el_);const playerRect = getBoundingClientRect(this.player_.el());const volumeBarPointPx = rangeBarRect.width * rangeBarPoint;if (!playerRect || !tooltipRect) {return;}const spaceLeftOfPoint = rangeBarRect.left - playerRect.left + volumeBarPointPx;const spaceRightOfPoint = rangeBarRect.width - volumeBarPointPx + (playerRect.right - rangeBarRect.right);let pullTooltipBy = tooltipRect.width / 2;if (spaceLeftOfPoint < pullTooltipBy) {pullTooltipBy += pullTooltipBy - spaceLeftOfPoint;} else if (spaceRightOfPoint < pullTooltipBy) {pullTooltipBy = spaceRightOfPoint;}if (pullTooltipBy < 0) {pullTooltipBy = 0;} else if (pullTooltipBy > tooltipRect.width) {pullTooltipBy = tooltipRect.width;}this.el_.style.right = `-${pullTooltipBy}px`;}this.write(`${content}%`);}/*** Write the volume to the tooltip DOM element.** @param {string} content* The formatted volume for the tooltip.*/write(content) {textContent(this.el_, content);}/*** Updates the position of the volume tooltip relative to the `VolumeBar`.** @param {Object} rangeBarRect* The `ClientRect` for the {@link VolumeBar} element.** @param {number} rangeBarPoint* A number from 0 to 1, representing a horizontal/vertical reference point* from the left edge of the {@link VolumeBar}** @param {boolean} vertical* Referees to the Volume control position* in the control bar{@link VolumeControl}** @param {number} volume* The volume level to update the tooltip to** @param {Function} cb* A function that will be called during the request animation frame* for tooltips that need to do additional animations from the default*/updateVolume(rangeBarRect, rangeBarPoint, vertical, volume, cb) {this.requestNamedAnimationFrame('VolumeLevelTooltip#updateVolume', () => {this.update(rangeBarRect, rangeBarPoint, vertical, volume.toFixed(0));if (cb) {cb();}});}}Component$1.registerComponent('VolumeLevelTooltip', VolumeLevelTooltip);/*** @file mouse-volume-level-display.js*//*** The {@link MouseVolumeLevelDisplay} component tracks mouse movement over the* {@link VolumeControl}. It displays an indicator and a {@link VolumeLevelTooltip}* indicating the volume level which is represented by a given point in the* {@link VolumeBar}.** @extends Component*/class MouseVolumeLevelDisplay extends Component$1 {/*** Creates an instance of this class.** @param { import('../../player').default } player* The {@link Player} that this class should be attached to.** @param {Object} [options]* The key/value store of player options.*/constructor(player, options) {super(player, options);this.update = throttle(bind_(this, this.update), UPDATE_REFRESH_INTERVAL);}/*** Create the DOM element for this class.** @return {Element}* The element that was created.*/createEl() {return super.createEl('div', {className: 'vjs-mouse-display'});}/*** Enquires updates to its own DOM as well as the DOM of its* {@link VolumeLevelTooltip} child.** @param {Object} rangeBarRect* The `ClientRect` for the {@link VolumeBar} element.** @param {number} rangeBarPoint* A number from 0 to 1, representing a horizontal/vertical reference point* from the left edge of the {@link VolumeBar}** @param {boolean} vertical* Referees to the Volume control position* in the control bar{@link VolumeControl}**/update(rangeBarRect, rangeBarPoint, vertical) {const volume = 100 * rangeBarPoint;this.getChild('volumeLevelTooltip').updateVolume(rangeBarRect, rangeBarPoint, vertical, volume, () => {if (vertical) {this.el_.style.bottom = `${rangeBarRect.height * rangeBarPoint}px`;} else {this.el_.style.left = `${rangeBarRect.width * rangeBarPoint}px`;}});}}/*** Default options for `MouseVolumeLevelDisplay`** @type {Object}* @private*/MouseVolumeLevelDisplay.prototype.options_ = {children: ['volumeLevelTooltip']};Component$1.registerComponent('MouseVolumeLevelDisplay', MouseVolumeLevelDisplay);/*** @file volume-bar.js*//*** The bar that contains the volume level and can be clicked on to adjust the level** @extends Slider*/class VolumeBar extends Slider {/*** Creates an instance of this class.** @param { import('../../player').default } player* The `Player` that this class should be attached to.** @param {Object} [options]* The key/value store of player options.*/constructor(player, options) {super(player, options);this.on('slideractive', e => this.updateLastVolume_(e));this.on(player, 'volumechange', e => this.updateARIAAttributes(e));player.ready(() => this.updateARIAAttributes());}/*** Create the `Component`'s DOM element** @return {Element}* The element that was created.*/createEl() {return super.createEl('div', {className: 'vjs-volume-bar vjs-slider-bar'}, {'aria-label': this.localize('Volume Level'),'aria-live': 'polite'});}/*** Handle mouse down on volume bar** @param {Event} event* The `mousedown` event that caused this to run.** @listens mousedown*/handleMouseDown(event) {if (!isSingleLeftClick(event)) {return;}super.handleMouseDown(event);}/*** Handle movement events on the {@link VolumeMenuButton}.** @param {Event} event* The event that caused this function to run.** @listens mousemove*/handleMouseMove(event) {const mouseVolumeLevelDisplay = this.getChild('mouseVolumeLevelDisplay');if (mouseVolumeLevelDisplay) {const volumeBarEl = this.el();const volumeBarRect = getBoundingClientRect(volumeBarEl);const vertical = this.vertical();let volumeBarPoint = getPointerPosition(volumeBarEl, event);volumeBarPoint = vertical ? volumeBarPoint.y : volumeBarPoint.x;// The default skin has a gap on either side of the `VolumeBar`. This means// that it's possible to trigger this behavior outside the boundaries of// the `VolumeBar`. This ensures we stay within it at all times.volumeBarPoint = clamp(volumeBarPoint, 0, 1);mouseVolumeLevelDisplay.update(volumeBarRect, volumeBarPoint, vertical);}if (!isSingleLeftClick(event)) {return;}this.checkMuted();this.player_.volume(this.calculateDistance(event));}/*** If the player is muted unmute it.*/checkMuted() {if (this.player_.muted()) {this.player_.muted(false);}}/*** Get percent of volume level** @return {number}* Volume level percent as a decimal number.*/getPercent() {if (this.player_.muted()) {return 0;}return this.player_.volume();}/*** Increase volume level for keyboard users*/stepForward() {this.checkMuted();this.player_.volume(this.player_.volume() + 0.1);}/*** Decrease volume level for keyboard users*/stepBack() {this.checkMuted();this.player_.volume(this.player_.volume() - 0.1);}/*** Update ARIA accessibility attributes** @param {Event} [event]* The `volumechange` event that caused this function to run.** @listens Player#volumechange*/updateARIAAttributes(event) {const ariaValue = this.player_.muted() ? 0 : this.volumeAsPercentage_();this.el_.setAttribute('aria-valuenow', ariaValue);this.el_.setAttribute('aria-valuetext', ariaValue + '%');}/*** Returns the current value of the player volume as a percentage** @private*/volumeAsPercentage_() {return Math.round(this.player_.volume() * 100);}/*** When user starts dragging the VolumeBar, store the volume and listen for* the end of the drag. When the drag ends, if the volume was set to zero,* set lastVolume to the stored volume.** @listens slideractive* @private*/updateLastVolume_() {const volumeBeforeDrag = this.player_.volume();this.one('sliderinactive', () => {if (this.player_.volume() === 0) {this.player_.lastVolume_(volumeBeforeDrag);}});}}/*** Default options for the `VolumeBar`** @type {Object}* @private*/VolumeBar.prototype.options_ = {children: ['volumeLevel'],barName: 'volumeLevel'};// MouseVolumeLevelDisplay tooltip should not be added to a player on mobile devicesif (!IS_IOS && !IS_ANDROID) {VolumeBar.prototype.options_.children.splice(0, 0, 'mouseVolumeLevelDisplay');}/*** Call the update event for this Slider when this event happens on the player.** @type {string}*/VolumeBar.prototype.playerEvent = 'volumechange';Component$1.registerComponent('VolumeBar', VolumeBar);/*** @file volume-control.js*//*** The component for controlling the volume level** @extends Component*/class VolumeControl extends Component$1 {/*** Creates an instance of this class.** @param { import('../../player').default } player* The `Player` that this class should be attached to.** @param {Object} [options={}]* The key/value store of player options.*/constructor(player, options = {}) {options.vertical = options.vertical || false;// Pass the vertical option down to the VolumeBar if// the VolumeBar is turned on.if (typeof options.volumeBar === 'undefined' || isPlain(options.volumeBar)) {options.volumeBar = options.volumeBar || {};options.volumeBar.vertical = options.vertical;}super(player, options);// hide this control if volume support is missingcheckVolumeSupport(this, player);this.throttledHandleMouseMove = throttle(bind_(this, this.handleMouseMove), UPDATE_REFRESH_INTERVAL);this.handleMouseUpHandler_ = e => this.handleMouseUp(e);this.on('mousedown', e => this.handleMouseDown(e));this.on('touchstart', e => this.handleMouseDown(e));this.on('mousemove', e => this.handleMouseMove(e));// while the slider is active (the mouse has been pressed down and// is dragging) or in focus we do not want to hide the VolumeBarthis.on(this.volumeBar, ['focus', 'slideractive'], () => {this.volumeBar.addClass('vjs-slider-active');this.addClass('vjs-slider-active');this.trigger('slideractive');});this.on(this.volumeBar, ['blur', 'sliderinactive'], () => {this.volumeBar.removeClass('vjs-slider-active');this.removeClass('vjs-slider-active');this.trigger('sliderinactive');});}/*** Create the `Component`'s DOM element** @return {Element}* The element that was created.*/createEl() {let orientationClass = 'vjs-volume-horizontal';if (this.options_.vertical) {orientationClass = 'vjs-volume-vertical';}return super.createEl('div', {className: `vjs-volume-control vjs-control ${orientationClass}`});}/*** Handle `mousedown` or `touchstart` events on the `VolumeControl`.** @param {Event} event* `mousedown` or `touchstart` event that triggered this function** @listens mousedown* @listens touchstart*/handleMouseDown(event) {const doc = this.el_.ownerDocument;this.on(doc, 'mousemove', this.throttledHandleMouseMove);this.on(doc, 'touchmove', this.throttledHandleMouseMove);this.on(doc, 'mouseup', this.handleMouseUpHandler_);this.on(doc, 'touchend', this.handleMouseUpHandler_);}/*** Handle `mouseup` or `touchend` events on the `VolumeControl`.** @param {Event} event* `mouseup` or `touchend` event that triggered this function.** @listens touchend* @listens mouseup*/handleMouseUp(event) {const doc = this.el_.ownerDocument;this.off(doc, 'mousemove', this.throttledHandleMouseMove);this.off(doc, 'touchmove', this.throttledHandleMouseMove);this.off(doc, 'mouseup', this.handleMouseUpHandler_);this.off(doc, 'touchend', this.handleMouseUpHandler_);}/*** Handle `mousedown` or `touchstart` events on the `VolumeControl`.** @param {Event} event* `mousedown` or `touchstart` event that triggered this function** @listens mousedown* @listens touchstart*/handleMouseMove(event) {this.volumeBar.handleMouseMove(event);}}/*** Default options for the `VolumeControl`** @type {Object}* @private*/VolumeControl.prototype.options_ = {children: ['volumeBar']};Component$1.registerComponent('VolumeControl', VolumeControl);/*** Check if muting volume is supported and if it isn't hide the mute toggle* button.** @param { import('../../component').default } self* A reference to the mute toggle button** @param { import('../../player').default } player* A reference to the player** @private*/const checkMuteSupport = function (self, player) {// hide mute toggle button if it's not supported by the current techif (player.tech_ && !player.tech_.featuresMuteControl) {self.addClass('vjs-hidden');}self.on(player, 'loadstart', function () {if (!player.tech_.featuresMuteControl) {self.addClass('vjs-hidden');} else {self.removeClass('vjs-hidden');}});};/*** @file mute-toggle.js*//*** A button component for muting the audio.** @extends Button*/class MuteToggle extends Button {/*** Creates an instance of this class.** @param { import('./player').default } player* The `Player` that this class should be attached to.** @param {Object} [options]* The key/value store of player options.*/constructor(player, options) {super(player, options);// hide this control if volume support is missingcheckMuteSupport(this, player);this.on(player, ['loadstart', 'volumechange'], e => this.update(e));}/*** Builds the default DOM `className`.** @return {string}* The DOM `className` for this object.*/buildCSSClass() {return `vjs-mute-control ${super.buildCSSClass()}`;}/*** This gets called when an `MuteToggle` is "clicked". See* {@link ClickableComponent} for more detailed information on what a click can be.** @param {Event} [event]* The `keydown`, `tap`, or `click` event that caused this function to be* called.** @listens tap* @listens click*/handleClick(event) {const vol = this.player_.volume();const lastVolume = this.player_.lastVolume_();if (vol === 0) {const volumeToSet = lastVolume < 0.1 ? 0.1 : lastVolume;this.player_.volume(volumeToSet);this.player_.muted(false);} else {this.player_.muted(this.player_.muted() ? false : true);}}/*** Update the `MuteToggle` button based on the state of `volume` and `muted`* on the player.** @param {Event} [event]* The {@link Player#loadstart} event if this function was called* through an event.** @listens Player#loadstart* @listens Player#volumechange*/update(event) {this.updateIcon_();this.updateControlText_();}/*** Update the appearance of the `MuteToggle` icon.** Possible states (given `level` variable below):* - 0: crossed out* - 1: zero bars of volume* - 2: one bar of volume* - 3: two bars of volume** @private*/updateIcon_() {const vol = this.player_.volume();let level = 3;this.setIcon('volume-high');// in iOS when a player is loaded with muted attribute// and volume is changed with a native mute button// we want to make sure muted state is updatedif (IS_IOS && this.player_.tech_ && this.player_.tech_.el_) {this.player_.muted(this.player_.tech_.el_.muted);}if (vol === 0 || this.player_.muted()) {this.setIcon('volume-mute');level = 0;} else if (vol < 0.33) {this.setIcon('volume-low');level = 1;} else if (vol < 0.67) {this.setIcon('volume-medium');level = 2;}removeClass(this.el_, [0, 1, 2, 3].reduce((str, i) => str + `${i ? ' ' : ''}vjs-vol-${i}`, ''));addClass(this.el_, `vjs-vol-${level}`);}/*** If `muted` has changed on the player, update the control text* (`title` attribute on `vjs-mute-control` element and content of* `vjs-control-text` element).** @private*/updateControlText_() {const soundOff = this.player_.muted() || this.player_.volume() === 0;const text = soundOff ? 'Unmute' : 'Mute';if (this.controlText() !== text) {this.controlText(text);}}}/*** The text that should display over the `MuteToggle`s controls. Added for localization.** @type {string}* @protected*/MuteToggle.prototype.controlText_ = 'Mute';Component$1.registerComponent('MuteToggle', MuteToggle);/*** @file volume-control.js*//*** A Component to contain the MuteToggle and VolumeControl so that* they can work together.** @extends Component*/class VolumePanel extends Component$1 {/*** Creates an instance of this class.** @param { import('./player').default } player* The `Player` that this class should be attached to.** @param {Object} [options={}]* The key/value store of player options.*/constructor(player, options = {}) {if (typeof options.inline !== 'undefined') {options.inline = options.inline;} else {options.inline = true;}// pass the inline option down to the VolumeControl as vertical if// the VolumeControl is on.if (typeof options.volumeControl === 'undefined' || isPlain(options.volumeControl)) {options.volumeControl = options.volumeControl || {};options.volumeControl.vertical = !options.inline;}super(player, options);// this handler is used by mouse handler methods belowthis.handleKeyPressHandler_ = e => this.handleKeyPress(e);this.on(player, ['loadstart'], e => this.volumePanelState_(e));this.on(this.muteToggle, 'keyup', e => this.handleKeyPress(e));this.on(this.volumeControl, 'keyup', e => this.handleVolumeControlKeyUp(e));this.on('keydown', e => this.handleKeyPress(e));this.on('mouseover', e => this.handleMouseOver(e));this.on('mouseout', e => this.handleMouseOut(e));// while the slider is active (the mouse has been pressed down and// is dragging) we do not want to hide the VolumeBarthis.on(this.volumeControl, ['slideractive'], this.sliderActive_);this.on(this.volumeControl, ['sliderinactive'], this.sliderInactive_);}/*** Add vjs-slider-active class to the VolumePanel** @listens VolumeControl#slideractive* @private*/sliderActive_() {this.addClass('vjs-slider-active');}/*** Removes vjs-slider-active class to the VolumePanel** @listens VolumeControl#sliderinactive* @private*/sliderInactive_() {this.removeClass('vjs-slider-active');}/*** Adds vjs-hidden or vjs-mute-toggle-only to the VolumePanel* depending on MuteToggle and VolumeControl state** @listens Player#loadstart* @private*/volumePanelState_() {// hide volume panel if neither volume control or mute toggle// are displayedif (this.volumeControl.hasClass('vjs-hidden') && this.muteToggle.hasClass('vjs-hidden')) {this.addClass('vjs-hidden');}// if only mute toggle is visible we don't want// volume panel expanding when hovered or activeif (this.volumeControl.hasClass('vjs-hidden') && !this.muteToggle.hasClass('vjs-hidden')) {this.addClass('vjs-mute-toggle-only');}}/*** Create the `Component`'s DOM element** @return {Element}* The element that was created.*/createEl() {let orientationClass = 'vjs-volume-panel-horizontal';if (!this.options_.inline) {orientationClass = 'vjs-volume-panel-vertical';}return super.createEl('div', {className: `vjs-volume-panel vjs-control ${orientationClass}`});}/*** Dispose of the `volume-panel` and all child components.*/dispose() {this.handleMouseOut();super.dispose();}/*** Handles `keyup` events on the `VolumeControl`, looking for ESC, which closes* the volume panel and sets focus on `MuteToggle`.** @param {Event} event* The `keyup` event that caused this function to be called.** @listens keyup*/handleVolumeControlKeyUp(event) {if (keycode.isEventKey(event, 'Esc')) {this.muteToggle.focus();}}/*** This gets called when a `VolumePanel` gains hover via a `mouseover` event.* Turns on listening for `mouseover` event. When they happen it* calls `this.handleMouseOver`.** @param {Event} event* The `mouseover` event that caused this function to be called.** @listens mouseover*/handleMouseOver(event) {this.addClass('vjs-hover');on(document, 'keyup', this.handleKeyPressHandler_);}/*** This gets called when a `VolumePanel` gains hover via a `mouseout` event.* Turns on listening for `mouseout` event. When they happen it* calls `this.handleMouseOut`.** @param {Event} event* The `mouseout` event that caused this function to be called.** @listens mouseout*/handleMouseOut(event) {this.removeClass('vjs-hover');off(document, 'keyup', this.handleKeyPressHandler_);}/*** Handles `keyup` event on the document or `keydown` event on the `VolumePanel`,* looking for ESC, which hides the `VolumeControl`.** @param {Event} event* The keypress that triggered this event.** @listens keydown | keyup*/handleKeyPress(event) {if (keycode.isEventKey(event, 'Esc')) {this.handleMouseOut();}}}/*** Default options for the `VolumeControl`** @type {Object}* @private*/VolumePanel.prototype.options_ = {children: ['muteToggle', 'volumeControl']};Component$1.registerComponent('VolumePanel', VolumePanel);/*** Button to skip forward a configurable amount of time* through a video. Renders in the control bar.** e.g. options: {controlBar: {skipButtons: forward: 5}}** @extends Button*/class SkipForward extends Button {constructor(player, options) {super(player, options);this.validOptions = [5, 10, 30];this.skipTime = this.getSkipForwardTime();if (this.skipTime && this.validOptions.includes(this.skipTime)) {this.setIcon(`forward-${this.skipTime}`);this.controlText(this.localize('Skip forward {1} seconds', [this.skipTime]));this.show();} else {this.hide();}}getSkipForwardTime() {const playerOptions = this.options_.playerOptions;return playerOptions.controlBar && playerOptions.controlBar.skipButtons && playerOptions.controlBar.skipButtons.forward;}buildCSSClass() {return `vjs-skip-forward-${this.getSkipForwardTime()} ${super.buildCSSClass()}`;}/*** On click, skips forward in the duration/seekable range by a configurable amount of seconds.* If the time left in the duration/seekable range is less than the configured 'skip forward' time,* skips to end of duration/seekable range.** Handle a click on a `SkipForward` button** @param {EventTarget~Event} event* The `click` event that caused this function* to be called*/handleClick(event) {if (isNaN(this.player_.duration())) {return;}const currentVideoTime = this.player_.currentTime();const liveTracker = this.player_.liveTracker;const duration = liveTracker && liveTracker.isLive() ? liveTracker.seekableEnd() : this.player_.duration();let newTime;if (currentVideoTime + this.skipTime <= duration) {newTime = currentVideoTime + this.skipTime;} else {newTime = duration;}this.player_.currentTime(newTime);}/*** Update control text on languagechange*/handleLanguagechange() {this.controlText(this.localize('Skip forward {1} seconds', [this.skipTime]));}}SkipForward.prototype.controlText_ = 'Skip Forward';Component$1.registerComponent('SkipForward', SkipForward);/*** Button to skip backward a configurable amount of time* through a video. Renders in the control bar.** * e.g. options: {controlBar: {skipButtons: backward: 5}}** @extends Button*/class SkipBackward extends Button {constructor(player, options) {super(player, options);this.validOptions = [5, 10, 30];this.skipTime = this.getSkipBackwardTime();if (this.skipTime && this.validOptions.includes(this.skipTime)) {this.setIcon(`replay-${this.skipTime}`);this.controlText(this.localize('Skip backward {1} seconds', [this.skipTime]));this.show();} else {this.hide();}}getSkipBackwardTime() {const playerOptions = this.options_.playerOptions;return playerOptions.controlBar && playerOptions.controlBar.skipButtons && playerOptions.controlBar.skipButtons.backward;}buildCSSClass() {return `vjs-skip-backward-${this.getSkipBackwardTime()} ${super.buildCSSClass()}`;}/*** On click, skips backward in the video by a configurable amount of seconds.* If the current time in the video is less than the configured 'skip backward' time,* skips to beginning of video or seekable range.** Handle a click on a `SkipBackward` button** @param {EventTarget~Event} event* The `click` event that caused this function* to be called*/handleClick(event) {const currentVideoTime = this.player_.currentTime();const liveTracker = this.player_.liveTracker;const seekableStart = liveTracker && liveTracker.isLive() && liveTracker.seekableStart();let newTime;if (seekableStart && currentVideoTime - this.skipTime <= seekableStart) {newTime = seekableStart;} else if (currentVideoTime >= this.skipTime) {newTime = currentVideoTime - this.skipTime;} else {newTime = 0;}this.player_.currentTime(newTime);}/*** Update control text on languagechange*/handleLanguagechange() {this.controlText(this.localize('Skip backward {1} seconds', [this.skipTime]));}}SkipBackward.prototype.controlText_ = 'Skip Backward';Component$1.registerComponent('SkipBackward', SkipBackward);/*** @file menu.js*//*** The Menu component is used to build popup menus, including subtitle and* captions selection menus.** @extends Component*/class Menu extends Component$1 {/*** Create an instance of this class.** @param { import('../player').default } player* the player that this component should attach to** @param {Object} [options]* Object of option names and values**/constructor(player, options) {super(player, options);if (options) {this.menuButton_ = options.menuButton;}this.focusedChild_ = -1;this.on('keydown', e => this.handleKeyDown(e));// All the menu item instances share the same blur handler provided by the menu container.this.boundHandleBlur_ = e => this.handleBlur(e);this.boundHandleTapClick_ = e => this.handleTapClick(e);}/*** Add event listeners to the {@link MenuItem}.** @param {Object} component* The instance of the `MenuItem` to add listeners to.**/addEventListenerForItem(component) {if (!(component instanceof Component$1)) {return;}this.on(component, 'blur', this.boundHandleBlur_);this.on(component, ['tap', 'click'], this.boundHandleTapClick_);}/*** Remove event listeners from the {@link MenuItem}.** @param {Object} component* The instance of the `MenuItem` to remove listeners.**/removeEventListenerForItem(component) {if (!(component instanceof Component$1)) {return;}this.off(component, 'blur', this.boundHandleBlur_);this.off(component, ['tap', 'click'], this.boundHandleTapClick_);}/*** This method will be called indirectly when the component has been added* before the component adds to the new menu instance by `addItem`.* In this case, the original menu instance will remove the component* by calling `removeChild`.** @param {Object} component* The instance of the `MenuItem`*/removeChild(component) {if (typeof component === 'string') {component = this.getChild(component);}this.removeEventListenerForItem(component);super.removeChild(component);}/*** Add a {@link MenuItem} to the menu.** @param {Object|string} component* The name or instance of the `MenuItem` to add.**/addItem(component) {const childComponent = this.addChild(component);if (childComponent) {this.addEventListenerForItem(childComponent);}}/*** Create the `Menu`s DOM element.** @return {Element}* the element that was created*/createEl() {const contentElType = this.options_.contentElType || 'ul';this.contentEl_ = createEl(contentElType, {className: 'vjs-menu-content'});this.contentEl_.setAttribute('role', 'menu');const el = super.createEl('div', {append: this.contentEl_,className: 'vjs-menu'});el.appendChild(this.contentEl_);// Prevent clicks from bubbling up. Needed for Menu Buttons,// where a click on the parent is significanton(el, 'click', function (event) {event.preventDefault();event.stopImmediatePropagation();});return el;}dispose() {this.contentEl_ = null;this.boundHandleBlur_ = null;this.boundHandleTapClick_ = null;super.dispose();}/*** Called when a `MenuItem` loses focus.** @param {Event} event* The `blur` event that caused this function to be called.** @listens blur*/handleBlur(event) {const relatedTarget = event.relatedTarget || document.activeElement;// Close menu popup when a user clicks outside the menuif (!this.children().some(element => {return element.el() === relatedTarget;})) {const btn = this.menuButton_;if (btn && btn.buttonPressed_ && relatedTarget !== btn.el().firstChild) {btn.unpressButton();}}}/*** Called when a `MenuItem` gets clicked or tapped.** @param {Event} event* The `click` or `tap` event that caused this function to be called.** @listens click,tap*/handleTapClick(event) {// Unpress the associated MenuButton, and move focus back to itif (this.menuButton_) {this.menuButton_.unpressButton();const childComponents = this.children();if (!Array.isArray(childComponents)) {return;}const foundComponent = childComponents.filter(component => component.el() === event.target)[0];if (!foundComponent) {return;}// don't focus menu button if item is a caption settings item// because focus will move elsewhereif (foundComponent.name() !== 'CaptionSettingsMenuItem') {this.menuButton_.focus();}}}/*** Handle a `keydown` event on this menu. This listener is added in the constructor.** @param {KeyboardEvent} event* A `keydown` event that happened on the menu.** @listens keydown*/handleKeyDown(event) {// Left and Down Arrowsif (keycode.isEventKey(event, 'Left') || keycode.isEventKey(event, 'Down')) {event.preventDefault();event.stopPropagation();this.stepForward();// Up and Right Arrows} else if (keycode.isEventKey(event, 'Right') || keycode.isEventKey(event, 'Up')) {event.preventDefault();event.stopPropagation();this.stepBack();}}/*** Move to next (lower) menu item for keyboard users.*/stepForward() {let stepChild = 0;if (this.focusedChild_ !== undefined) {stepChild = this.focusedChild_ + 1;}this.focus(stepChild);}/*** Move to previous (higher) menu item for keyboard users.*/stepBack() {let stepChild = 0;if (this.focusedChild_ !== undefined) {stepChild = this.focusedChild_ - 1;}this.focus(stepChild);}/*** Set focus on a {@link MenuItem} in the `Menu`.** @param {Object|string} [item=0]* Index of child item set focus on.*/focus(item = 0) {const children = this.children().slice();const haveTitle = children.length && children[0].hasClass('vjs-menu-title');if (haveTitle) {children.shift();}if (children.length > 0) {if (item < 0) {item = 0;} else if (item >= children.length) {item = children.length - 1;}this.focusedChild_ = item;children[item].el_.focus();}}}Component$1.registerComponent('Menu', Menu);/*** @file menu-button.js*//*** A `MenuButton` class for any popup {@link Menu}.** @extends Component*/class MenuButton extends Component$1 {/*** Creates an instance of this class.** @param { import('../player').default } player* The `Player` that this class should be attached to.** @param {Object} [options={}]* The key/value store of player options.*/constructor(player, options = {}) {super(player, options);this.menuButton_ = new Button(player, options);this.menuButton_.controlText(this.controlText_);this.menuButton_.el_.setAttribute('aria-haspopup', 'true');// Add buildCSSClass values to the button, not the wrapperconst buttonClass = Button.prototype.buildCSSClass();this.menuButton_.el_.className = this.buildCSSClass() + ' ' + buttonClass;this.menuButton_.removeClass('vjs-control');this.addChild(this.menuButton_);this.update();this.enabled_ = true;const handleClick = e => this.handleClick(e);this.handleMenuKeyUp_ = e => this.handleMenuKeyUp(e);this.on(this.menuButton_, 'tap', handleClick);this.on(this.menuButton_, 'click', handleClick);this.on(this.menuButton_, 'keydown', e => this.handleKeyDown(e));this.on(this.menuButton_, 'mouseenter', () => {this.addClass('vjs-hover');this.menu.show();on(document, 'keyup', this.handleMenuKeyUp_);});this.on('mouseleave', e => this.handleMouseLeave(e));this.on('keydown', e => this.handleSubmenuKeyDown(e));}/*** Update the menu based on the current state of its items.*/update() {const menu = this.createMenu();if (this.menu) {this.menu.dispose();this.removeChild(this.menu);}this.menu = menu;this.addChild(menu);/*** Track the state of the menu button** @type {Boolean}* @private*/this.buttonPressed_ = false;this.menuButton_.el_.setAttribute('aria-expanded', 'false');if (this.items && this.items.length <= this.hideThreshold_) {this.hide();this.menu.contentEl_.removeAttribute('role');} else {this.show();this.menu.contentEl_.setAttribute('role', 'menu');}}/*** Create the menu and add all items to it.** @return {Menu}* The constructed menu*/createMenu() {const menu = new Menu(this.player_, {menuButton: this});/*** Hide the menu if the number of items is less than or equal to this threshold. This defaults* to 0 and whenever we add items which can be hidden to the menu we'll increment it. We list* it here because every time we run `createMenu` we need to reset the value.** @protected* @type {Number}*/this.hideThreshold_ = 0;// Add a title list item to the topif (this.options_.title) {const titleEl = createEl('li', {className: 'vjs-menu-title',textContent: toTitleCase$1(this.options_.title),tabIndex: -1});const titleComponent = new Component$1(this.player_, {el: titleEl});menu.addItem(titleComponent);}this.items = this.createItems();if (this.items) {// Add menu items to the menufor (let i = 0; i < this.items.length; i++) {menu.addItem(this.items[i]);}}return menu;}/*** Create the list of menu items. Specific to each subclass.** @abstract*/createItems() {}/*** Create the `MenuButtons`s DOM element.** @return {Element}* The element that gets created.*/createEl() {return super.createEl('div', {className: this.buildWrapperCSSClass()}, {});}/*** Overwrites the `setIcon` method from `Component`.* In this case, we want the icon to be appended to the menuButton.** @param {string} name* The icon name to be added.*/setIcon(name) {super.setIcon(name, this.menuButton_.el_);}/*** Allow sub components to stack CSS class names for the wrapper element** @return {string}* The constructed wrapper DOM `className`*/buildWrapperCSSClass() {let menuButtonClass = 'vjs-menu-button';// If the inline option is passed, we want to use different styles altogether.if (this.options_.inline === true) {menuButtonClass += '-inline';} else {menuButtonClass += '-popup';}// TODO: Fix the CSS so that this isn't necessaryconst buttonClass = Button.prototype.buildCSSClass();return `vjs-menu-button ${menuButtonClass} ${buttonClass} ${super.buildCSSClass()}`;}/*** Builds the default DOM `className`.** @return {string}* The DOM `className` for this object.*/buildCSSClass() {let menuButtonClass = 'vjs-menu-button';// If the inline option is passed, we want to use different styles altogether.if (this.options_.inline === true) {menuButtonClass += '-inline';} else {menuButtonClass += '-popup';}return `vjs-menu-button ${menuButtonClass} ${super.buildCSSClass()}`;}/*** Get or set the localized control text that will be used for accessibility.** > NOTE: This will come from the internal `menuButton_` element.** @param {string} [text]* Control text for element.** @param {Element} [el=this.menuButton_.el()]* Element to set the title on.** @return {string}* - The control text when getting*/controlText(text, el = this.menuButton_.el()) {return this.menuButton_.controlText(text, el);}/*** Dispose of the `menu-button` and all child components.*/dispose() {this.handleMouseLeave();super.dispose();}/*** Handle a click on a `MenuButton`.* See {@link ClickableComponent#handleClick} for instances where this is called.** @param {Event} event* The `keydown`, `tap`, or `click` event that caused this function to be* called.** @listens tap* @listens click*/handleClick(event) {if (this.buttonPressed_) {this.unpressButton();} else {this.pressButton();}}/*** Handle `mouseleave` for `MenuButton`.** @param {Event} event* The `mouseleave` event that caused this function to be called.** @listens mouseleave*/handleMouseLeave(event) {this.removeClass('vjs-hover');off(document, 'keyup', this.handleMenuKeyUp_);}/*** Set the focus to the actual button, not to this element*/focus() {this.menuButton_.focus();}/*** Remove the focus from the actual button, not this element*/blur() {this.menuButton_.blur();}/*** Handle tab, escape, down arrow, and up arrow keys for `MenuButton`. See* {@link ClickableComponent#handleKeyDown} for instances where this is called.** @param {Event} event* The `keydown` event that caused this function to be called.** @listens keydown*/handleKeyDown(event) {// Escape or Tab unpress the 'button'if (keycode.isEventKey(event, 'Esc') || keycode.isEventKey(event, 'Tab')) {if (this.buttonPressed_) {this.unpressButton();}// Don't preventDefault for Tab key - we still want to lose focusif (!keycode.isEventKey(event, 'Tab')) {event.preventDefault();// Set focus back to the menu button's buttonthis.menuButton_.focus();}// Up Arrow or Down Arrow also 'press' the button to open the menu} else if (keycode.isEventKey(event, 'Up') || keycode.isEventKey(event, 'Down')) {if (!this.buttonPressed_) {event.preventDefault();this.pressButton();}}}/*** Handle a `keyup` event on a `MenuButton`. The listener for this is added in* the constructor.** @param {Event} event* Key press event** @listens keyup*/handleMenuKeyUp(event) {// Escape hides popup menuif (keycode.isEventKey(event, 'Esc') || keycode.isEventKey(event, 'Tab')) {this.removeClass('vjs-hover');}}/*** This method name now delegates to `handleSubmenuKeyDown`. This means* anyone calling `handleSubmenuKeyPress` will not see their method calls* stop working.** @param {Event} event* The event that caused this function to be called.*/handleSubmenuKeyPress(event) {this.handleSubmenuKeyDown(event);}/*** Handle a `keydown` event on a sub-menu. The listener for this is added in* the constructor.** @param {Event} event* Key press event** @listens keydown*/handleSubmenuKeyDown(event) {// Escape or Tab unpress the 'button'if (keycode.isEventKey(event, 'Esc') || keycode.isEventKey(event, 'Tab')) {if (this.buttonPressed_) {this.unpressButton();}// Don't preventDefault for Tab key - we still want to lose focusif (!keycode.isEventKey(event, 'Tab')) {event.preventDefault();// Set focus back to the menu button's buttonthis.menuButton_.focus();}}}/*** Put the current `MenuButton` into a pressed state.*/pressButton() {if (this.enabled_) {this.buttonPressed_ = true;this.menu.show();this.menu.lockShowing();this.menuButton_.el_.setAttribute('aria-expanded', 'true');// set the focus into the submenu, except on iOS where it is resulting in// undesired scrolling behavior when the player is in an iframeif (IS_IOS && isInFrame()) {// Return early so that the menu isn't focusedreturn;}this.menu.focus();}}/*** Take the current `MenuButton` out of a pressed state.*/unpressButton() {if (this.enabled_) {this.buttonPressed_ = false;this.menu.unlockShowing();this.menu.hide();this.menuButton_.el_.setAttribute('aria-expanded', 'false');}}/*** Disable the `MenuButton`. Don't allow it to be clicked.*/disable() {this.unpressButton();this.enabled_ = false;this.addClass('vjs-disabled');this.menuButton_.disable();}/*** Enable the `MenuButton`. Allow it to be clicked.*/enable() {this.enabled_ = true;this.removeClass('vjs-disabled');this.menuButton_.enable();}}Component$1.registerComponent('MenuButton', MenuButton);/*** @file track-button.js*//*** The base class for buttons that toggle specific track types (e.g. subtitles).** @extends MenuButton*/class TrackButton extends MenuButton {/*** Creates an instance of this class.** @param { import('./player').default } player* The `Player` that this class should be attached to.** @param {Object} [options]* The key/value store of player options.*/constructor(player, options) {const tracks = options.tracks;super(player, options);if (this.items.length <= 1) {this.hide();}if (!tracks) {return;}const updateHandler = bind_(this, this.update);tracks.addEventListener('removetrack', updateHandler);tracks.addEventListener('addtrack', updateHandler);tracks.addEventListener('labelchange', updateHandler);this.player_.on('ready', updateHandler);this.player_.on('dispose', function () {tracks.removeEventListener('removetrack', updateHandler);tracks.removeEventListener('addtrack', updateHandler);tracks.removeEventListener('labelchange', updateHandler);});}}Component$1.registerComponent('TrackButton', TrackButton);/*** @file menu-keys.js*//*** All keys used for operation of a menu (`MenuButton`, `Menu`, and `MenuItem`)* Note that 'Enter' and 'Space' are not included here (otherwise they would* prevent the `MenuButton` and `MenuItem` from being keyboard-clickable)** @typedef MenuKeys* @array*/const MenuKeys = ['Tab', 'Esc', 'Up', 'Down', 'Right', 'Left'];/*** @file menu-item.js*//*** The component for a menu item. `<li>`** @extends ClickableComponent*/class MenuItem extends ClickableComponent {/*** Creates an instance of the this class.** @param { import('../player').default } player* The `Player` that this class should be attached to.** @param {Object} [options={}]* The key/value store of player options.**/constructor(player, options) {super(player, options);this.selectable = options.selectable;this.isSelected_ = options.selected || false;this.multiSelectable = options.multiSelectable;this.selected(this.isSelected_);if (this.selectable) {if (this.multiSelectable) {this.el_.setAttribute('role', 'menuitemcheckbox');} else {this.el_.setAttribute('role', 'menuitemradio');}} else {this.el_.setAttribute('role', 'menuitem');}}/*** Create the `MenuItem's DOM element** @param {string} [type=li]* Element's node type, not actually used, always set to `li`.** @param {Object} [props={}]* An object of properties that should be set on the element** @param {Object} [attrs={}]* An object of attributes that should be set on the element** @return {Element}* The element that gets created.*/createEl(type, props, attrs) {// The control is textual, not just an iconthis.nonIconControl = true;const el = super.createEl('li', Object.assign({className: 'vjs-menu-item',tabIndex: -1}, props), attrs);// swap icon with menu item text.const menuItemEl = createEl('span', {className: 'vjs-menu-item-text',textContent: this.localize(this.options_.label)});// If using SVG icons, the element with vjs-icon-placeholder will be added separately.if (this.player_.options_.experimentalSvgIcons) {el.appendChild(menuItemEl);} else {el.replaceChild(menuItemEl, el.querySelector('.vjs-icon-placeholder'));}return el;}/*** Ignore keys which are used by the menu, but pass any other ones up. See* {@link ClickableComponent#handleKeyDown} for instances where this is called.** @param {KeyboardEvent} event* The `keydown` event that caused this function to be called.** @listens keydown*/handleKeyDown(event) {if (!MenuKeys.some(key => keycode.isEventKey(event, key))) {// Pass keydown handling up for unused keyssuper.handleKeyDown(event);}}/*** Any click on a `MenuItem` puts it into the selected state.* See {@link ClickableComponent#handleClick} for instances where this is called.** @param {Event} event* The `keydown`, `tap`, or `click` event that caused this function to be* called.** @listens tap* @listens click*/handleClick(event) {this.selected(true);}/*** Set the state for this menu item as selected or not.** @param {boolean} selected* if the menu item is selected or not*/selected(selected) {if (this.selectable) {if (selected) {this.addClass('vjs-selected');this.el_.setAttribute('aria-checked', 'true');// aria-checked isn't fully supported by browsers/screen readers,// so indicate selected state to screen reader in the control text.this.controlText(', selected');this.isSelected_ = true;} else {this.removeClass('vjs-selected');this.el_.setAttribute('aria-checked', 'false');// Indicate un-selected state to screen readerthis.controlText('');this.isSelected_ = false;}}}}Component$1.registerComponent('MenuItem', MenuItem);/*** @file text-track-menu-item.js*//*** The specific menu item type for selecting a language within a text track kind** @extends MenuItem*/class TextTrackMenuItem extends MenuItem {/*** Creates an instance of this class.** @param { import('../../player').default } player* The `Player` that this class should be attached to.** @param {Object} [options]* The key/value store of player options.*/constructor(player, options) {const track = options.track;const tracks = player.textTracks();// Modify options for parent MenuItem class's init.options.label = track.label || track.language || 'Unknown';options.selected = track.mode === 'showing';super(player, options);this.track = track;// Determine the relevant kind(s) of tracks for this component and filter// out empty kinds.this.kinds = (options.kinds || [options.kind || this.track.kind]).filter(Boolean);const changeHandler = (...args) => {this.handleTracksChange.apply(this, args);};const selectedLanguageChangeHandler = (...args) => {this.handleSelectedLanguageChange.apply(this, args);};player.on(['loadstart', 'texttrackchange'], changeHandler);tracks.addEventListener('change', changeHandler);tracks.addEventListener('selectedlanguagechange', selectedLanguageChangeHandler);this.on('dispose', function () {player.off(['loadstart', 'texttrackchange'], changeHandler);tracks.removeEventListener('change', changeHandler);tracks.removeEventListener('selectedlanguagechange', selectedLanguageChangeHandler);});// iOS7 doesn't dispatch change events to TextTrackLists when an// associated track's mode changes. Without something like// Object.observe() (also not present on iOS7), it's not// possible to detect changes to the mode attribute and polyfill// the change event. As a poor substitute, we manually dispatch// change events whenever the controls modify the mode.if (tracks.onchange === undefined) {let event;this.on(['tap', 'click'], function () {if (typeof window.Event !== 'object') {// Android 2.3 throws an Illegal Constructor error for window.Eventtry {event = new window.Event('change');} catch (err) {// continue regardless of error}}if (!event) {event = document.createEvent('Event');event.initEvent('change', true, true);}tracks.dispatchEvent(event);});}// set the default state based on current tracksthis.handleTracksChange();}/*** This gets called when an `TextTrackMenuItem` is "clicked". See* {@link ClickableComponent} for more detailed information on what a click can be.** @param {Event} event* The `keydown`, `tap`, or `click` event that caused this function to be* called.** @listens tap* @listens click*/handleClick(event) {const referenceTrack = this.track;const tracks = this.player_.textTracks();super.handleClick(event);if (!tracks) {return;}for (let i = 0; i < tracks.length; i++) {const track = tracks[i];// If the track from the text tracks list is not of the right kind,// skip it. We do not want to affect tracks of incompatible kind(s).if (this.kinds.indexOf(track.kind) === -1) {continue;}// If this text track is the component's track and it is not showing,// set it to showing.if (track === referenceTrack) {if (track.mode !== 'showing') {track.mode = 'showing';}// If this text track is not the component's track and it is not// disabled, set it to disabled.} else if (track.mode !== 'disabled') {track.mode = 'disabled';}}}/*** Handle text track list change** @param {Event} event* The `change` event that caused this function to be called.** @listens TextTrackList#change*/handleTracksChange(event) {const shouldBeSelected = this.track.mode === 'showing';// Prevent redundant selected() calls because they may cause// screen readers to read the appended control text unnecessarilyif (shouldBeSelected !== this.isSelected_) {this.selected(shouldBeSelected);}}handleSelectedLanguageChange(event) {if (this.track.mode === 'showing') {const selectedLanguage = this.player_.cache_.selectedLanguage;// Don't replace the kind of track across the same languageif (selectedLanguage && selectedLanguage.enabled && selectedLanguage.language === this.track.language && selectedLanguage.kind !== this.track.kind) {return;}this.player_.cache_.selectedLanguage = {enabled: true,language: this.track.language,kind: this.track.kind};}}dispose() {// remove reference to track object on disposethis.track = null;super.dispose();}}Component$1.registerComponent('TextTrackMenuItem', TextTrackMenuItem);/*** @file off-text-track-menu-item.js*//*** A special menu item for turning off a specific type of text track** @extends TextTrackMenuItem*/class OffTextTrackMenuItem extends TextTrackMenuItem {/*** Creates an instance of this class.** @param { import('../../player').default } player* The `Player` that this class should be attached to.** @param {Object} [options]* The key/value store of player options.*/constructor(player, options) {// Create pseudo track info// Requires options['kind']options.track = {player,// it is no longer necessary to store `kind` or `kinds` on the track itself// since they are now stored in the `kinds` property of all instances of// TextTrackMenuItem, but this will remain for backwards compatibilitykind: options.kind,kinds: options.kinds,default: false,mode: 'disabled'};if (!options.kinds) {options.kinds = [options.kind];}if (options.label) {options.track.label = options.label;} else {options.track.label = options.kinds.join(' and ') + ' off';}// MenuItem is selectableoptions.selectable = true;// MenuItem is NOT multiSelectable (i.e. only one can be marked "selected" at a time)options.multiSelectable = false;super(player, options);}/*** Handle text track change** @param {Event} event* The event that caused this function to run*/handleTracksChange(event) {const tracks = this.player().textTracks();let shouldBeSelected = true;for (let i = 0, l = tracks.length; i < l; i++) {const track = tracks[i];if (this.options_.kinds.indexOf(track.kind) > -1 && track.mode === 'showing') {shouldBeSelected = false;break;}}// Prevent redundant selected() calls because they may cause// screen readers to read the appended control text unnecessarilyif (shouldBeSelected !== this.isSelected_) {this.selected(shouldBeSelected);}}handleSelectedLanguageChange(event) {const tracks = this.player().textTracks();let allHidden = true;for (let i = 0, l = tracks.length; i < l; i++) {const track = tracks[i];if (['captions', 'descriptions', 'subtitles'].indexOf(track.kind) > -1 && track.mode === 'showing') {allHidden = false;break;}}if (allHidden) {this.player_.cache_.selectedLanguage = {enabled: false};}}/*** Update control text and label on languagechange*/handleLanguagechange() {this.$('.vjs-menu-item-text').textContent = this.player_.localize(this.options_.label);super.handleLanguagechange();}}Component$1.registerComponent('OffTextTrackMenuItem', OffTextTrackMenuItem);/*** @file text-track-button.js*//*** The base class for buttons that toggle specific text track types (e.g. subtitles)** @extends MenuButton*/class TextTrackButton extends TrackButton {/*** Creates an instance of this class.** @param { import('../../player').default } player* The `Player` that this class should be attached to.** @param {Object} [options={}]* The key/value store of player options.*/constructor(player, options = {}) {options.tracks = player.textTracks();super(player, options);}/*** Create a menu item for each text track** @param {TextTrackMenuItem[]} [items=[]]* Existing array of items to use during creation** @return {TextTrackMenuItem[]}* Array of menu items that were created*/createItems(items = [], TrackMenuItem = TextTrackMenuItem) {// Label is an override for the [track] off label// USed to localise captions/subtitleslet label;if (this.label_) {label = `${this.label_} off`;}// Add an OFF menu item to turn all tracks offitems.push(new OffTextTrackMenuItem(this.player_, {kinds: this.kinds_,kind: this.kind_,label}));this.hideThreshold_ += 1;const tracks = this.player_.textTracks();if (!Array.isArray(this.kinds_)) {this.kinds_ = [this.kind_];}for (let i = 0; i < tracks.length; i++) {const track = tracks[i];// only add tracks that are of an appropriate kind and have a labelif (this.kinds_.indexOf(track.kind) > -1) {const item = new TrackMenuItem(this.player_, {track,kinds: this.kinds_,kind: this.kind_,// MenuItem is selectableselectable: true,// MenuItem is NOT multiSelectable (i.e. only one can be marked "selected" at a time)multiSelectable: false});item.addClass(`vjs-${track.kind}-menu-item`);items.push(item);}}return items;}}Component$1.registerComponent('TextTrackButton', TextTrackButton);/*** @file chapters-track-menu-item.js*//*** The chapter track menu item** @extends MenuItem*/class ChaptersTrackMenuItem extends MenuItem {/*** Creates an instance of this class.** @param { import('../../player').default } player* The `Player` that this class should be attached to.** @param {Object} [options]* The key/value store of player options.*/constructor(player, options) {const track = options.track;const cue = options.cue;const currentTime = player.currentTime();// Modify options for parent MenuItem class's init.options.selectable = true;options.multiSelectable = false;options.label = cue.text;options.selected = cue.startTime <= currentTime && currentTime < cue.endTime;super(player, options);this.track = track;this.cue = cue;}/*** This gets called when an `ChaptersTrackMenuItem` is "clicked". See* {@link ClickableComponent} for more detailed information on what a click can be.** @param {Event} [event]* The `keydown`, `tap`, or `click` event that caused this function to be* called.** @listens tap* @listens click*/handleClick(event) {super.handleClick();this.player_.currentTime(this.cue.startTime);}}Component$1.registerComponent('ChaptersTrackMenuItem', ChaptersTrackMenuItem);/*** @file chapters-button.js*//*** The button component for toggling and selecting chapters* Chapters act much differently than other text tracks* Cues are navigation vs. other tracks of alternative languages** @extends TextTrackButton*/class ChaptersButton extends TextTrackButton {/*** Creates an instance of this class.** @param { import('../../player').default } player* The `Player` that this class should be attached to.** @param {Object} [options]* The key/value store of player options.** @param {Function} [ready]* The function to call when this function is ready.*/constructor(player, options, ready) {super(player, options, ready);this.setIcon('chapters');this.selectCurrentItem_ = () => {this.items.forEach(item => {item.selected(this.track_.activeCues[0] === item.cue);});};}/*** Builds the default DOM `className`.** @return {string}* The DOM `className` for this object.*/buildCSSClass() {return `vjs-chapters-button ${super.buildCSSClass()}`;}buildWrapperCSSClass() {return `vjs-chapters-button ${super.buildWrapperCSSClass()}`;}/*** Update the menu based on the current state of its items.** @param {Event} [event]* An event that triggered this function to run.** @listens TextTrackList#addtrack* @listens TextTrackList#removetrack* @listens TextTrackList#change*/update(event) {if (event && event.track && event.track.kind !== 'chapters') {return;}const track = this.findChaptersTrack();if (track !== this.track_) {this.setTrack(track);super.update();} else if (!this.items || track && track.cues && track.cues.length !== this.items.length) {// Update the menu initially or if the number of cues has changed since setsuper.update();}}/*** Set the currently selected track for the chapters button.** @param {TextTrack} track* The new track to select. Nothing will change if this is the currently selected* track.*/setTrack(track) {if (this.track_ === track) {return;}if (!this.updateHandler_) {this.updateHandler_ = this.update.bind(this);}// here this.track_ refers to the old track instanceif (this.track_) {const remoteTextTrackEl = this.player_.remoteTextTrackEls().getTrackElementByTrack_(this.track_);if (remoteTextTrackEl) {remoteTextTrackEl.removeEventListener('load', this.updateHandler_);}this.track_.removeEventListener('cuechange', this.selectCurrentItem_);this.track_ = null;}this.track_ = track;// here this.track_ refers to the new track instanceif (this.track_) {this.track_.mode = 'hidden';const remoteTextTrackEl = this.player_.remoteTextTrackEls().getTrackElementByTrack_(this.track_);if (remoteTextTrackEl) {remoteTextTrackEl.addEventListener('load', this.updateHandler_);}this.track_.addEventListener('cuechange', this.selectCurrentItem_);}}/*** Find the track object that is currently in use by this ChaptersButton** @return {TextTrack|undefined}* The current track or undefined if none was found.*/findChaptersTrack() {const tracks = this.player_.textTracks() || [];for (let i = tracks.length - 1; i >= 0; i--) {// We will always choose the last track as our chaptersTrackconst track = tracks[i];if (track.kind === this.kind_) {return track;}}}/*** Get the caption for the ChaptersButton based on the track label. This will also* use the current tracks localized kind as a fallback if a label does not exist.** @return {string}* The tracks current label or the localized track kind.*/getMenuCaption() {if (this.track_ && this.track_.label) {return this.track_.label;}return this.localize(toTitleCase$1(this.kind_));}/*** Create menu from chapter track** @return { import('../../menu/menu').default }* New menu for the chapter buttons*/createMenu() {this.options_.title = this.getMenuCaption();return super.createMenu();}/*** Create a menu item for each text track** @return { import('./text-track-menu-item').default[] }* Array of menu items*/createItems() {const items = [];if (!this.track_) {return items;}const cues = this.track_.cues;if (!cues) {return items;}for (let i = 0, l = cues.length; i < l; i++) {const cue = cues[i];const mi = new ChaptersTrackMenuItem(this.player_, {track: this.track_,cue});items.push(mi);}return items;}}/*** `kind` of TextTrack to look for to associate it with this menu.** @type {string}* @private*/ChaptersButton.prototype.kind_ = 'chapters';/*** The text that should display over the `ChaptersButton`s controls. Added for localization.** @type {string}* @protected*/ChaptersButton.prototype.controlText_ = 'Chapters';Component$1.registerComponent('ChaptersButton', ChaptersButton);/*** @file descriptions-button.js*//*** The button component for toggling and selecting descriptions** @extends TextTrackButton*/class DescriptionsButton extends TextTrackButton {/*** Creates an instance of this class.** @param { import('../../player').default } player* The `Player` that this class should be attached to.** @param {Object} [options]* The key/value store of player options.** @param {Function} [ready]* The function to call when this component is ready.*/constructor(player, options, ready) {super(player, options, ready);this.setIcon('audio-description');const tracks = player.textTracks();const changeHandler = bind_(this, this.handleTracksChange);tracks.addEventListener('change', changeHandler);this.on('dispose', function () {tracks.removeEventListener('change', changeHandler);});}/*** Handle text track change** @param {Event} event* The event that caused this function to run** @listens TextTrackList#change*/handleTracksChange(event) {const tracks = this.player().textTracks();let disabled = false;// Check whether a track of a different kind is showingfor (let i = 0, l = tracks.length; i < l; i++) {const track = tracks[i];if (track.kind !== this.kind_ && track.mode === 'showing') {disabled = true;break;}}// If another track is showing, disable this menu buttonif (disabled) {this.disable();} else {this.enable();}}/*** Builds the default DOM `className`.** @return {string}* The DOM `className` for this object.*/buildCSSClass() {return `vjs-descriptions-button ${super.buildCSSClass()}`;}buildWrapperCSSClass() {return `vjs-descriptions-button ${super.buildWrapperCSSClass()}`;}}/*** `kind` of TextTrack to look for to associate it with this menu.** @type {string}* @private*/DescriptionsButton.prototype.kind_ = 'descriptions';/*** The text that should display over the `DescriptionsButton`s controls. Added for localization.** @type {string}* @protected*/DescriptionsButton.prototype.controlText_ = 'Descriptions';Component$1.registerComponent('DescriptionsButton', DescriptionsButton);/*** @file subtitles-button.js*//*** The button component for toggling and selecting subtitles** @extends TextTrackButton*/class SubtitlesButton extends TextTrackButton {/*** Creates an instance of this class.** @param { import('../../player').default } player* The `Player` that this class should be attached to.** @param {Object} [options]* The key/value store of player options.** @param {Function} [ready]* The function to call when this component is ready.*/constructor(player, options, ready) {super(player, options, ready);this.setIcon('subtitles');}/*** Builds the default DOM `className`.** @return {string}* The DOM `className` for this object.*/buildCSSClass() {return `vjs-subtitles-button ${super.buildCSSClass()}`;}buildWrapperCSSClass() {return `vjs-subtitles-button ${super.buildWrapperCSSClass()}`;}}/*** `kind` of TextTrack to look for to associate it with this menu.** @type {string}* @private*/SubtitlesButton.prototype.kind_ = 'subtitles';/*** The text that should display over the `SubtitlesButton`s controls. Added for localization.** @type {string}* @protected*/SubtitlesButton.prototype.controlText_ = 'Subtitles';Component$1.registerComponent('SubtitlesButton', SubtitlesButton);/*** @file caption-settings-menu-item.js*//*** The menu item for caption track settings menu** @extends TextTrackMenuItem*/class CaptionSettingsMenuItem extends TextTrackMenuItem {/*** Creates an instance of this class.** @param { import('../../player').default } player* The `Player` that this class should be attached to.** @param {Object} [options]* The key/value store of player options.*/constructor(player, options) {options.track = {player,kind: options.kind,label: options.kind + ' settings',selectable: false,default: false,mode: 'disabled'};// CaptionSettingsMenuItem has no concept of 'selected'options.selectable = false;options.name = 'CaptionSettingsMenuItem';super(player, options);this.addClass('vjs-texttrack-settings');this.controlText(', opens ' + options.kind + ' settings dialog');}/*** This gets called when an `CaptionSettingsMenuItem` is "clicked". See* {@link ClickableComponent} for more detailed information on what a click can be.** @param {Event} [event]* The `keydown`, `tap`, or `click` event that caused this function to be* called.** @listens tap* @listens click*/handleClick(event) {this.player().getChild('textTrackSettings').open();}/*** Update control text and label on languagechange*/handleLanguagechange() {this.$('.vjs-menu-item-text').textContent = this.player_.localize(this.options_.kind + ' settings');super.handleLanguagechange();}}Component$1.registerComponent('CaptionSettingsMenuItem', CaptionSettingsMenuItem);/*** @file captions-button.js*//*** The button component for toggling and selecting captions** @extends TextTrackButton*/class CaptionsButton extends TextTrackButton {/*** Creates an instance of this class.** @param { import('../../player').default } player* The `Player` that this class should be attached to.** @param {Object} [options]* The key/value store of player options.** @param {Function} [ready]* The function to call when this component is ready.*/constructor(player, options, ready) {super(player, options, ready);this.setIcon('captions');}/*** Builds the default DOM `className`.** @return {string}* The DOM `className` for this object.*/buildCSSClass() {return `vjs-captions-button ${super.buildCSSClass()}`;}buildWrapperCSSClass() {return `vjs-captions-button ${super.buildWrapperCSSClass()}`;}/*** Create caption menu items** @return {CaptionSettingsMenuItem[]}* The array of current menu items.*/createItems() {const items = [];if (!(this.player().tech_ && this.player().tech_.featuresNativeTextTracks) && this.player().getChild('textTrackSettings')) {items.push(new CaptionSettingsMenuItem(this.player_, {kind: this.kind_}));this.hideThreshold_ += 1;}return super.createItems(items);}}/*** `kind` of TextTrack to look for to associate it with this menu.** @type {string}* @private*/CaptionsButton.prototype.kind_ = 'captions';/*** The text that should display over the `CaptionsButton`s controls. Added for localization.** @type {string}* @protected*/CaptionsButton.prototype.controlText_ = 'Captions';Component$1.registerComponent('CaptionsButton', CaptionsButton);/*** @file subs-caps-menu-item.js*//*** SubsCapsMenuItem has an [cc] icon to distinguish captions from subtitles* in the SubsCapsMenu.** @extends TextTrackMenuItem*/class SubsCapsMenuItem extends TextTrackMenuItem {createEl(type, props, attrs) {const el = super.createEl(type, props, attrs);const parentSpan = el.querySelector('.vjs-menu-item-text');if (this.options_.track.kind === 'captions') {if (this.player_.options_.experimentalSvgIcons) {this.setIcon('captions', el);} else {parentSpan.appendChild(createEl('span', {className: 'vjs-icon-placeholder'}, {'aria-hidden': true}));}parentSpan.appendChild(createEl('span', {className: 'vjs-control-text',// space added as the text will visually flow with the// labeltextContent: ` ${this.localize('Captions')}`}));}return el;}}Component$1.registerComponent('SubsCapsMenuItem', SubsCapsMenuItem);/*** @file sub-caps-button.js*//*** The button component for toggling and selecting captions and/or subtitles** @extends TextTrackButton*/class SubsCapsButton extends TextTrackButton {/*** Creates an instance of this class.** @param { import('../../player').default } player* The `Player` that this class should be attached to.** @param {Object} [options]* The key/value store of player options.** @param {Function} [ready]* The function to call when this component is ready.*/constructor(player, options = {}) {super(player, options);// Although North America uses "captions" in most cases for// "captions and subtitles" other locales use "subtitles"this.label_ = 'subtitles';this.setIcon('subtitles');if (['en', 'en-us', 'en-ca', 'fr-ca'].indexOf(this.player_.language_) > -1) {this.label_ = 'captions';this.setIcon('captions');}this.menuButton_.controlText(toTitleCase$1(this.label_));}/*** Builds the default DOM `className`.** @return {string}* The DOM `className` for this object.*/buildCSSClass() {return `vjs-subs-caps-button ${super.buildCSSClass()}`;}buildWrapperCSSClass() {return `vjs-subs-caps-button ${super.buildWrapperCSSClass()}`;}/*** Create caption/subtitles menu items** @return {CaptionSettingsMenuItem[]}* The array of current menu items.*/createItems() {let items = [];if (!(this.player().tech_ && this.player().tech_.featuresNativeTextTracks) && this.player().getChild('textTrackSettings')) {items.push(new CaptionSettingsMenuItem(this.player_, {kind: this.label_}));this.hideThreshold_ += 1;}items = super.createItems(items, SubsCapsMenuItem);return items;}}/*** `kind`s of TextTrack to look for to associate it with this menu.** @type {array}* @private*/SubsCapsButton.prototype.kinds_ = ['captions', 'subtitles'];/*** The text that should display over the `SubsCapsButton`s controls.*** @type {string}* @protected*/SubsCapsButton.prototype.controlText_ = 'Subtitles';Component$1.registerComponent('SubsCapsButton', SubsCapsButton);/*** @file audio-track-menu-item.js*//*** An {@link AudioTrack} {@link MenuItem}** @extends MenuItem*/class AudioTrackMenuItem extends MenuItem {/*** Creates an instance of this class.** @param { import('../../player').default } player* The `Player` that this class should be attached to.** @param {Object} [options]* The key/value store of player options.*/constructor(player, options) {const track = options.track;const tracks = player.audioTracks();// Modify options for parent MenuItem class's init.options.label = track.label || track.language || 'Unknown';options.selected = track.enabled;super(player, options);this.track = track;this.addClass(`vjs-${track.kind}-menu-item`);const changeHandler = (...args) => {this.handleTracksChange.apply(this, args);};tracks.addEventListener('change', changeHandler);this.on('dispose', () => {tracks.removeEventListener('change', changeHandler);});}createEl(type, props, attrs) {const el = super.createEl(type, props, attrs);const parentSpan = el.querySelector('.vjs-menu-item-text');if (['main-desc', 'description'].indexOf(this.options_.track.kind) >= 0) {parentSpan.appendChild(createEl('span', {className: 'vjs-icon-placeholder'}, {'aria-hidden': true}));parentSpan.appendChild(createEl('span', {className: 'vjs-control-text',textContent: ' ' + this.localize('Descriptions')}));}return el;}/*** This gets called when an `AudioTrackMenuItem is "clicked". See {@link ClickableComponent}* for more detailed information on what a click can be.** @param {Event} [event]* The `keydown`, `tap`, or `click` event that caused this function to be* called.** @listens tap* @listens click*/handleClick(event) {super.handleClick(event);// the audio track list will automatically toggle other tracks// off for us.this.track.enabled = true;// when native audio tracks are used, we want to make sure that other tracks are turned offif (this.player_.tech_.featuresNativeAudioTracks) {const tracks = this.player_.audioTracks();for (let i = 0; i < tracks.length; i++) {const track = tracks[i];// skip the current track since we enabled it aboveif (track === this.track) {continue;}track.enabled = track === this.track;}}}/*** Handle any {@link AudioTrack} change.** @param {Event} [event]* The {@link AudioTrackList#change} event that caused this to run.** @listens AudioTrackList#change*/handleTracksChange(event) {this.selected(this.track.enabled);}}Component$1.registerComponent('AudioTrackMenuItem', AudioTrackMenuItem);/*** @file audio-track-button.js*//*** The base class for buttons that toggle specific {@link AudioTrack} types.** @extends TrackButton*/class AudioTrackButton extends TrackButton {/*** Creates an instance of this class.** @param {Player} player* The `Player` that this class should be attached to.** @param {Object} [options={}]* The key/value store of player options.*/constructor(player, options = {}) {options.tracks = player.audioTracks();super(player, options);this.setIcon('audio');}/*** Builds the default DOM `className`.** @return {string}* The DOM `className` for this object.*/buildCSSClass() {return `vjs-audio-button ${super.buildCSSClass()}`;}buildWrapperCSSClass() {return `vjs-audio-button ${super.buildWrapperCSSClass()}`;}/*** Create a menu item for each audio track** @param {AudioTrackMenuItem[]} [items=[]]* An array of existing menu items to use.** @return {AudioTrackMenuItem[]}* An array of menu items*/createItems(items = []) {// if there's only one audio track, there no point in showing itthis.hideThreshold_ = 1;const tracks = this.player_.audioTracks();for (let i = 0; i < tracks.length; i++) {const track = tracks[i];items.push(new AudioTrackMenuItem(this.player_, {track,// MenuItem is selectableselectable: true,// MenuItem is NOT multiSelectable (i.e. only one can be marked "selected" at a time)multiSelectable: false}));}return items;}}/*** The text that should display over the `AudioTrackButton`s controls. Added for localization.** @type {string}* @protected*/AudioTrackButton.prototype.controlText_ = 'Audio Track';Component$1.registerComponent('AudioTrackButton', AudioTrackButton);/*** @file playback-rate-menu-item.js*//*** The specific menu item type for selecting a playback rate.** @extends MenuItem*/class PlaybackRateMenuItem extends MenuItem {/*** Creates an instance of this class.** @param { import('../../player').default } player* The `Player` that this class should be attached to.** @param {Object} [options]* The key/value store of player options.*/constructor(player, options) {const label = options.rate;const rate = parseFloat(label, 10);// Modify options for parent MenuItem class's init.options.label = label;options.selected = rate === player.playbackRate();options.selectable = true;options.multiSelectable = false;super(player, options);this.label = label;this.rate = rate;this.on(player, 'ratechange', e => this.update(e));}/*** This gets called when an `PlaybackRateMenuItem` is "clicked". See* {@link ClickableComponent} for more detailed information on what a click can be.** @param {Event} [event]* The `keydown`, `tap`, or `click` event that caused this function to be* called.** @listens tap* @listens click*/handleClick(event) {super.handleClick();this.player().playbackRate(this.rate);}/*** Update the PlaybackRateMenuItem when the playbackrate changes.** @param {Event} [event]* The `ratechange` event that caused this function to run.** @listens Player#ratechange*/update(event) {this.selected(this.player().playbackRate() === this.rate);}}/*** The text that should display over the `PlaybackRateMenuItem`s controls. Added for localization.** @type {string}* @private*/PlaybackRateMenuItem.prototype.contentElType = 'button';Component$1.registerComponent('PlaybackRateMenuItem', PlaybackRateMenuItem);/*** @file playback-rate-menu-button.js*//*** The component for controlling the playback rate.** @extends MenuButton*/class PlaybackRateMenuButton extends MenuButton {/*** Creates an instance of this class.** @param { import('../../player').default } player* The `Player` that this class should be attached to.** @param {Object} [options]* The key/value store of player options.*/constructor(player, options) {super(player, options);this.menuButton_.el_.setAttribute('aria-describedby', this.labelElId_);this.updateVisibility();this.updateLabel();this.on(player, 'loadstart', e => this.updateVisibility(e));this.on(player, 'ratechange', e => this.updateLabel(e));this.on(player, 'playbackrateschange', e => this.handlePlaybackRateschange(e));}/*** Create the `Component`'s DOM element** @return {Element}* The element that was created.*/createEl() {const el = super.createEl();this.labelElId_ = 'vjs-playback-rate-value-label-' + this.id_;this.labelEl_ = createEl('div', {className: 'vjs-playback-rate-value',id: this.labelElId_,textContent: '1x'});el.appendChild(this.labelEl_);return el;}dispose() {this.labelEl_ = null;super.dispose();}/*** Builds the default DOM `className`.** @return {string}* The DOM `className` for this object.*/buildCSSClass() {return `vjs-playback-rate ${super.buildCSSClass()}`;}buildWrapperCSSClass() {return `vjs-playback-rate ${super.buildWrapperCSSClass()}`;}/*** Create the list of menu items. Specific to each subclass.**/createItems() {const rates = this.playbackRates();const items = [];for (let i = rates.length - 1; i >= 0; i--) {items.push(new PlaybackRateMenuItem(this.player(), {rate: rates[i] + 'x'}));}return items;}/*** On playbackrateschange, update the menu to account for the new items.** @listens Player#playbackrateschange*/handlePlaybackRateschange(event) {this.update();}/*** Get possible playback rates** @return {Array}* All possible playback rates*/playbackRates() {const player = this.player();return player.playbackRates && player.playbackRates() || [];}/*** Get whether playback rates is supported by the tech* and an array of playback rates exists** @return {boolean}* Whether changing playback rate is supported*/playbackRateSupported() {return this.player().tech_ && this.player().tech_.featuresPlaybackRate && this.playbackRates() && this.playbackRates().length > 0;}/*** Hide playback rate controls when they're no playback rate options to select** @param {Event} [event]* The event that caused this function to run.** @listens Player#loadstart*/updateVisibility(event) {if (this.playbackRateSupported()) {this.removeClass('vjs-hidden');} else {this.addClass('vjs-hidden');}}/*** Update button label when rate changed** @param {Event} [event]* The event that caused this function to run.** @listens Player#ratechange*/updateLabel(event) {if (this.playbackRateSupported()) {this.labelEl_.textContent = this.player().playbackRate() + 'x';}}}/*** The text that should display over the `PlaybackRateMenuButton`s controls.** Added for localization.** @type {string}* @protected*/PlaybackRateMenuButton.prototype.controlText_ = 'Playback Rate';Component$1.registerComponent('PlaybackRateMenuButton', PlaybackRateMenuButton);/*** @file spacer.js*//*** Just an empty spacer element that can be used as an append point for plugins, etc.* Also can be used to create space between elements when necessary.** @extends Component*/class Spacer extends Component$1 {/*** Builds the default DOM `className`.** @return {string}* The DOM `className` for this object.*/buildCSSClass() {return `vjs-spacer ${super.buildCSSClass()}`;}/*** Create the `Component`'s DOM element** @return {Element}* The element that was created.*/createEl(tag = 'div', props = {}, attributes = {}) {if (!props.className) {props.className = this.buildCSSClass();}return super.createEl(tag, props, attributes);}}Component$1.registerComponent('Spacer', Spacer);/*** @file custom-control-spacer.js*//*** Spacer specifically meant to be used as an insertion point for new plugins, etc.** @extends Spacer*/class CustomControlSpacer extends Spacer {/*** Builds the default DOM `className`.** @return {string}* The DOM `className` for this object.*/buildCSSClass() {return `vjs-custom-control-spacer ${super.buildCSSClass()}`;}/*** Create the `Component`'s DOM element** @return {Element}* The element that was created.*/createEl() {return super.createEl('div', {className: this.buildCSSClass(),// No-flex/table-cell mode requires there be some content// in the cell to fill the remaining space of the table.textContent: '\u00a0'});}}Component$1.registerComponent('CustomControlSpacer', CustomControlSpacer);/*** @file control-bar.js*//*** Container of main controls.** @extends Component*/class ControlBar extends Component$1 {/*** Create the `Component`'s DOM element** @return {Element}* The element that was created.*/createEl() {return super.createEl('div', {className: 'vjs-control-bar',dir: 'ltr'});}}/*** Default options for `ControlBar`** @type {Object}* @private*/ControlBar.prototype.options_ = {children: ['playToggle', 'skipBackward', 'skipForward', 'volumePanel', 'currentTimeDisplay', 'timeDivider', 'durationDisplay', 'progressControl', 'liveDisplay', 'seekToLive', 'remainingTimeDisplay', 'customControlSpacer', 'playbackRateMenuButton', 'chaptersButton', 'descriptionsButton', 'subsCapsButton', 'audioTrackButton', 'pictureInPictureToggle', 'fullscreenToggle']};Component$1.registerComponent('ControlBar', ControlBar);/*** @file error-display.js*//*** A display that indicates an error has occurred. This means that the video* is unplayable.** @extends ModalDialog*/class ErrorDisplay extends ModalDialog {/*** Creates an instance of this class.** @param { import('./player').default } player* The `Player` that this class should be attached to.** @param {Object} [options]* The key/value store of player options.*/constructor(player, options) {super(player, options);this.on(player, 'error', e => {this.close();this.open(e);});}/*** Builds the default DOM `className`.** @return {string}* The DOM `className` for this object.** @deprecated Since version 5.*/buildCSSClass() {return `vjs-error-display ${super.buildCSSClass()}`;}/*** Gets the localized error message based on the `Player`s error.** @return {string}* The `Player`s error message localized or an empty string.*/content() {const error = this.player().error();return error ? this.localize(error.message) : '';}}/*** The default options for an `ErrorDisplay`.** @private*/ErrorDisplay.prototype.options_ = Object.assign({}, ModalDialog.prototype.options_, {pauseOnOpen: false,fillAlways: true,temporary: false,uncloseable: true});Component$1.registerComponent('ErrorDisplay', ErrorDisplay);/*** @file text-track-settings.js*/const LOCAL_STORAGE_KEY$1 = 'vjs-text-track-settings';const COLOR_BLACK = ['#000', 'Black'];const COLOR_BLUE = ['#00F', 'Blue'];const COLOR_CYAN = ['#0FF', 'Cyan'];const COLOR_GREEN = ['#0F0', 'Green'];const COLOR_MAGENTA = ['#F0F', 'Magenta'];const COLOR_RED = ['#F00', 'Red'];const COLOR_WHITE = ['#FFF', 'White'];const COLOR_YELLOW = ['#FF0', 'Yellow'];const OPACITY_OPAQUE = ['1', 'Opaque'];const OPACITY_SEMI = ['0.5', 'Semi-Transparent'];const OPACITY_TRANS = ['0', 'Transparent'];// Configuration for the various <select> elements in the DOM of this component.//// Possible keys include://// `default`:// The default option index. Only needs to be provided if not zero.// `parser`:// A function which is used to parse the value from the selected option in// a customized way.// `selector`:// The selector used to find the associated <select> element.const selectConfigs = {backgroundColor: {selector: '.vjs-bg-color > select',id: 'captions-background-color-%s',label: 'Color',options: [COLOR_BLACK, COLOR_WHITE, COLOR_RED, COLOR_GREEN, COLOR_BLUE, COLOR_YELLOW, COLOR_MAGENTA, COLOR_CYAN]},backgroundOpacity: {selector: '.vjs-bg-opacity > select',id: 'captions-background-opacity-%s',label: 'Opacity',options: [OPACITY_OPAQUE, OPACITY_SEMI, OPACITY_TRANS]},color: {selector: '.vjs-text-color > select',id: 'captions-foreground-color-%s',label: 'Color',options: [COLOR_WHITE, COLOR_BLACK, COLOR_RED, COLOR_GREEN, COLOR_BLUE, COLOR_YELLOW, COLOR_MAGENTA, COLOR_CYAN]},edgeStyle: {selector: '.vjs-edge-style > select',id: '%s',label: 'Text Edge Style',options: [['none', 'None'], ['raised', 'Raised'], ['depressed', 'Depressed'], ['uniform', 'Uniform'], ['dropshadow', 'Drop shadow']]},fontFamily: {selector: '.vjs-font-family > select',id: 'captions-font-family-%s',label: 'Font Family',options: [['proportionalSansSerif', 'Proportional Sans-Serif'], ['monospaceSansSerif', 'Monospace Sans-Serif'], ['proportionalSerif', 'Proportional Serif'], ['monospaceSerif', 'Monospace Serif'], ['casual', 'Casual'], ['script', 'Script'], ['small-caps', 'Small Caps']]},fontPercent: {selector: '.vjs-font-percent > select',id: 'captions-font-size-%s',label: 'Font Size',options: [['0.50', '50%'], ['0.75', '75%'], ['1.00', '100%'], ['1.25', '125%'], ['1.50', '150%'], ['1.75', '175%'], ['2.00', '200%'], ['3.00', '300%'], ['4.00', '400%']],default: 2,parser: v => v === '1.00' ? null : Number(v)},textOpacity: {selector: '.vjs-text-opacity > select',id: 'captions-foreground-opacity-%s',label: 'Opacity',options: [OPACITY_OPAQUE, OPACITY_SEMI]},// Options for this object are defined below.windowColor: {selector: '.vjs-window-color > select',id: 'captions-window-color-%s',label: 'Color'},// Options for this object are defined below.windowOpacity: {selector: '.vjs-window-opacity > select',id: 'captions-window-opacity-%s',label: 'Opacity',options: [OPACITY_TRANS, OPACITY_SEMI, OPACITY_OPAQUE]}};selectConfigs.windowColor.options = selectConfigs.backgroundColor.options;/*** Get the actual value of an option.** @param {string} value* The value to get** @param {Function} [parser]* Optional function to adjust the value.** @return {*}* - Will be `undefined` if no value exists* - Will be `undefined` if the given value is "none".* - Will be the actual value otherwise.** @private*/function parseOptionValue(value, parser) {if (parser) {value = parser(value);}if (value && value !== 'none') {return value;}}/*** Gets the value of the selected <option> element within a <select> element.** @param {Element} el* the element to look in** @param {Function} [parser]* Optional function to adjust the value.** @return {*}* - Will be `undefined` if no value exists* - Will be `undefined` if the given value is "none".* - Will be the actual value otherwise.** @private*/function getSelectedOptionValue(el, parser) {const value = el.options[el.options.selectedIndex].value;return parseOptionValue(value, parser);}/*** Sets the selected <option> element within a <select> element based on a* given value.** @param {Element} el* The element to look in.** @param {string} value* the property to look on.** @param {Function} [parser]* Optional function to adjust the value before comparing.** @private*/function setSelectedOption(el, value, parser) {if (!value) {return;}for (let i = 0; i < el.options.length; i++) {if (parseOptionValue(el.options[i].value, parser) === value) {el.selectedIndex = i;break;}}}/*** Manipulate Text Tracks settings.** @extends ModalDialog*/class TextTrackSettings extends ModalDialog {/*** Creates an instance of this class.** @param { import('../player').default } player* The `Player` that this class should be attached to.** @param {Object} [options]* The key/value store of player options.*/constructor(player, options) {options.temporary = false;super(player, options);this.updateDisplay = this.updateDisplay.bind(this);// fill the modal and pretend we have opened itthis.fill();this.hasBeenOpened_ = this.hasBeenFilled_ = true;this.endDialog = createEl('p', {className: 'vjs-control-text',textContent: this.localize('End of dialog window.')});this.el().appendChild(this.endDialog);this.setDefaults();// Grab `persistTextTrackSettings` from the player options if not passed in child optionsif (options.persistTextTrackSettings === undefined) {this.options_.persistTextTrackSettings = this.options_.playerOptions.persistTextTrackSettings;}this.on(this.$('.vjs-done-button'), 'click', () => {this.saveSettings();this.close();});this.on(this.$('.vjs-default-button'), 'click', () => {this.setDefaults();this.updateDisplay();});each(selectConfigs, config => {this.on(this.$(config.selector), 'change', this.updateDisplay);});if (this.options_.persistTextTrackSettings) {this.restoreSettings();}}dispose() {this.endDialog = null;super.dispose();}/*** Create a <select> element with configured options.** @param {string} key* Configuration key to use during creation.** @param {string} [legendId]* Id of associated <legend>.** @param {string} [type=label]* Type of labelling element, `label` or `legend`** @return {string}* An HTML string.** @private*/createElSelect_(key, legendId = '', type = 'label') {const config = selectConfigs[key];const id = config.id.replace('%s', this.id_);const selectLabelledbyIds = [legendId, id].join(' ').trim();const guid = `vjs_select_${newGUID()}`;return [`<${type} id="${id}"${type === 'label' ? ` for="${guid}" class="vjs-label"` : ''}>`, this.localize(config.label), `</${type}>`, `<select aria-labelledby="${selectLabelledbyIds}" id="${guid}">`].concat(config.options.map(o => {const optionId = id + '-' + o[1].replace(/\W+/g, '');return [`<option id="${optionId}" value="${o[0]}" `, `aria-labelledby="${selectLabelledbyIds} ${optionId}">`, this.localize(o[1]), '</option>'].join('');})).concat('</select>').join('');}/*** Create foreground color element for the component** @return {string}* An HTML string.** @private*/createElFgColor_() {const legendId = `captions-text-legend-${this.id_}`;return ['<fieldset class="vjs-fg vjs-track-setting">', `<legend id="${legendId}">`, this.localize('Text'), '</legend>', '<span class="vjs-text-color">', this.createElSelect_('color', legendId), '</span>', '<span class="vjs-text-opacity vjs-opacity">', this.createElSelect_('textOpacity', legendId), '</span>', '</fieldset>'].join('');}/*** Create background color element for the component** @return {string}* An HTML string.** @private*/createElBgColor_() {const legendId = `captions-background-${this.id_}`;return ['<fieldset class="vjs-bg vjs-track-setting">', `<legend id="${legendId}">`, this.localize('Text Background'), '</legend>', '<span class="vjs-bg-color">', this.createElSelect_('backgroundColor', legendId), '</span>', '<span class="vjs-bg-opacity vjs-opacity">', this.createElSelect_('backgroundOpacity', legendId), '</span>', '</fieldset>'].join('');}/*** Create window color element for the component** @return {string}* An HTML string.** @private*/createElWinColor_() {const legendId = `captions-window-${this.id_}`;return ['<fieldset class="vjs-window vjs-track-setting">', `<legend id="${legendId}">`, this.localize('Caption Area Background'), '</legend>', '<span class="vjs-window-color">', this.createElSelect_('windowColor', legendId), '</span>', '<span class="vjs-window-opacity vjs-opacity">', this.createElSelect_('windowOpacity', legendId), '</span>', '</fieldset>'].join('');}/*** Create color elements for the component** @return {Element}* The element that was created** @private*/createElColors_() {return createEl('div', {className: 'vjs-track-settings-colors',innerHTML: [this.createElFgColor_(), this.createElBgColor_(), this.createElWinColor_()].join('')});}/*** Create font elements for the component** @return {Element}* The element that was created.** @private*/createElFont_() {return createEl('div', {className: 'vjs-track-settings-font',innerHTML: ['<fieldset class="vjs-font-percent vjs-track-setting">', this.createElSelect_('fontPercent', '', 'legend'), '</fieldset>', '<fieldset class="vjs-edge-style vjs-track-setting">', this.createElSelect_('edgeStyle', '', 'legend'), '</fieldset>', '<fieldset class="vjs-font-family vjs-track-setting">', this.createElSelect_('fontFamily', '', 'legend'), '</fieldset>'].join('')});}/*** Create controls for the component** @return {Element}* The element that was created.** @private*/createElControls_() {const defaultsDescription = this.localize('restore all settings to the default values');return createEl('div', {className: 'vjs-track-settings-controls',innerHTML: [`<button type="button" class="vjs-default-button" title="${defaultsDescription}">`, this.localize('Reset'), `<span class="vjs-control-text"> ${defaultsDescription}</span>`, '</button>', `<button type="button" class="vjs-done-button">${this.localize('Done')}</button>`].join('')});}content() {return [this.createElColors_(), this.createElFont_(), this.createElControls_()];}label() {return this.localize('Caption Settings Dialog');}description() {return this.localize('Beginning of dialog window. Escape will cancel and close the window.');}buildCSSClass() {return super.buildCSSClass() + ' vjs-text-track-settings';}/*** Gets an object of text track settings (or null).** @return {Object}* An object with config values parsed from the DOM or localStorage.*/getValues() {return reduce(selectConfigs, (accum, config, key) => {const value = getSelectedOptionValue(this.$(config.selector), config.parser);if (value !== undefined) {accum[key] = value;}return accum;}, {});}/*** Sets text track settings from an object of values.** @param {Object} values* An object with config values parsed from the DOM or localStorage.*/setValues(values) {each(selectConfigs, (config, key) => {setSelectedOption(this.$(config.selector), values[key], config.parser);});}/*** Sets all `<select>` elements to their default values.*/setDefaults() {each(selectConfigs, config => {const index = config.hasOwnProperty('default') ? config.default : 0;this.$(config.selector).selectedIndex = index;});}/*** Restore texttrack settings from localStorage*/restoreSettings() {let values;try {values = JSON.parse(window.localStorage.getItem(LOCAL_STORAGE_KEY$1));} catch (err) {log$1.warn(err);}if (values) {this.setValues(values);}}/*** Save text track settings to localStorage*/saveSettings() {if (!this.options_.persistTextTrackSettings) {return;}const values = this.getValues();try {if (Object.keys(values).length) {window.localStorage.setItem(LOCAL_STORAGE_KEY$1, JSON.stringify(values));} else {window.localStorage.removeItem(LOCAL_STORAGE_KEY$1);}} catch (err) {log$1.warn(err);}}/*** Update display of text track settings*/updateDisplay() {const ttDisplay = this.player_.getChild('textTrackDisplay');if (ttDisplay) {ttDisplay.updateDisplay();}}/*** conditionally blur the element and refocus the captions button** @private*/conditionalBlur_() {this.previouslyActiveEl_ = null;const cb = this.player_.controlBar;const subsCapsBtn = cb && cb.subsCapsButton;const ccBtn = cb && cb.captionsButton;if (subsCapsBtn) {subsCapsBtn.focus();} else if (ccBtn) {ccBtn.focus();}}/*** Repopulate dialog with new localizations on languagechange*/handleLanguagechange() {this.fill();}}Component$1.registerComponent('TextTrackSettings', TextTrackSettings);/*** @file resize-manager.js*//*** A Resize Manager. It is in charge of triggering `playerresize` on the player in the right conditions.** It'll either create an iframe and use a debounced resize handler on it or use the new {@link https://wicg.github.io/ResizeObserver/|ResizeObserver}.** If the ResizeObserver is available natively, it will be used. A polyfill can be passed in as an option.* If a `playerresize` event is not needed, the ResizeManager component can be removed from the player, see the example below.** @example <caption>How to disable the resize manager</caption>* const player = videojs('#vid', {* resizeManager: false* });** @see {@link https://wicg.github.io/ResizeObserver/|ResizeObserver specification}** @extends Component*/class ResizeManager extends Component$1 {/*** Create the ResizeManager.** @param {Object} player* The `Player` that this class should be attached to.** @param {Object} [options]* The key/value store of ResizeManager options.** @param {Object} [options.ResizeObserver]* A polyfill for ResizeObserver can be passed in here.* If this is set to null it will ignore the native ResizeObserver and fall back to the iframe fallback.*/constructor(player, options) {let RESIZE_OBSERVER_AVAILABLE = options.ResizeObserver || window.ResizeObserver;// if `null` was passed, we want to disable the ResizeObserverif (options.ResizeObserver === null) {RESIZE_OBSERVER_AVAILABLE = false;}// Only create an element when ResizeObserver isn't availableconst options_ = merge$2({createEl: !RESIZE_OBSERVER_AVAILABLE,reportTouchActivity: false}, options);super(player, options_);this.ResizeObserver = options.ResizeObserver || window.ResizeObserver;this.loadListener_ = null;this.resizeObserver_ = null;this.debouncedHandler_ = debounce(() => {this.resizeHandler();}, 100, false, this);if (RESIZE_OBSERVER_AVAILABLE) {this.resizeObserver_ = new this.ResizeObserver(this.debouncedHandler_);this.resizeObserver_.observe(player.el());} else {this.loadListener_ = () => {if (!this.el_ || !this.el_.contentWindow) {return;}const debouncedHandler_ = this.debouncedHandler_;let unloadListener_ = this.unloadListener_ = function () {off(this, 'resize', debouncedHandler_);off(this, 'unload', unloadListener_);unloadListener_ = null;};// safari and edge can unload the iframe before resizemanager dispose// we have to dispose of event handlers correctly before that happenson(this.el_.contentWindow, 'unload', unloadListener_);on(this.el_.contentWindow, 'resize', debouncedHandler_);};this.one('load', this.loadListener_);}}createEl() {return super.createEl('iframe', {className: 'vjs-resize-manager',tabIndex: -1,title: this.localize('No content')}, {'aria-hidden': 'true'});}/*** Called when a resize is triggered on the iframe or a resize is observed via the ResizeObserver** @fires Player#playerresize*/resizeHandler() {/*** Called when the player size has changed** @event Player#playerresize* @type {Event}*/// make sure player is still around to trigger// prevents this from causing an error after disposeif (!this.player_ || !this.player_.trigger) {return;}this.player_.trigger('playerresize');}dispose() {if (this.debouncedHandler_) {this.debouncedHandler_.cancel();}if (this.resizeObserver_) {if (this.player_.el()) {this.resizeObserver_.unobserve(this.player_.el());}this.resizeObserver_.disconnect();}if (this.loadListener_) {this.off('load', this.loadListener_);}if (this.el_ && this.el_.contentWindow && this.unloadListener_) {this.unloadListener_.call(this.el_.contentWindow);}this.ResizeObserver = null;this.resizeObserver = null;this.debouncedHandler_ = null;this.loadListener_ = null;super.dispose();}}Component$1.registerComponent('ResizeManager', ResizeManager);const defaults = {trackingThreshold: 20,liveTolerance: 15};/*track when we are at the live edge, and other helpers for live playback *//*** A class for checking live current time and determining when the player* is at or behind the live edge.*/class LiveTracker extends Component$1 {/*** Creates an instance of this class.** @param { import('./player').default } player* The `Player` that this class should be attached to.** @param {Object} [options]* The key/value store of player options.** @param {number} [options.trackingThreshold=20]* Number of seconds of live window (seekableEnd - seekableStart) that* media needs to have before the liveui will be shown.** @param {number} [options.liveTolerance=15]* Number of seconds behind live that we have to be* before we will be considered non-live. Note that this will only* be used when playing at the live edge. This allows large seekable end* changes to not effect whether we are live or not.*/constructor(player, options) {// LiveTracker does not need an elementconst options_ = merge$2(defaults, options, {createEl: false});super(player, options_);this.trackLiveHandler_ = () => this.trackLive_();this.handlePlay_ = e => this.handlePlay(e);this.handleFirstTimeupdate_ = e => this.handleFirstTimeupdate(e);this.handleSeeked_ = e => this.handleSeeked(e);this.seekToLiveEdge_ = e => this.seekToLiveEdge(e);this.reset_();this.on(this.player_, 'durationchange', e => this.handleDurationchange(e));// we should try to toggle tracking on canplay as native playback engines, like Safari// may not have the proper values for things like seekableEnd until thenthis.on(this.player_, 'canplay', () => this.toggleTracking());}/*** all the functionality for tracking when seek end changes* and for tracking how far past seek end we should be*/trackLive_() {const seekable = this.player_.seekable();// skip undefined seekableif (!seekable || !seekable.length) {return;}const newTime = Number(window.performance.now().toFixed(4));const deltaTime = this.lastTime_ === -1 ? 0 : (newTime - this.lastTime_) / 1000;this.lastTime_ = newTime;this.pastSeekEnd_ = this.pastSeekEnd() + deltaTime;const liveCurrentTime = this.liveCurrentTime();const currentTime = this.player_.currentTime();// we are behind live if any are true// 1. the player is paused// 2. the user seeked to a location 2 seconds away from live// 3. the difference between live and current time is greater// liveTolerance which defaults to 15slet isBehind = this.player_.paused() || this.seekedBehindLive_ || Math.abs(liveCurrentTime - currentTime) > this.options_.liveTolerance;// we cannot be behind if// 1. until we have not seen a timeupdate yet// 2. liveCurrentTime is Infinity, which happens on Android and Native Safariif (!this.timeupdateSeen_ || liveCurrentTime === Infinity) {isBehind = false;}if (isBehind !== this.behindLiveEdge_) {this.behindLiveEdge_ = isBehind;this.trigger('liveedgechange');}}/*** handle a durationchange event on the player* and start/stop tracking accordingly.*/handleDurationchange() {this.toggleTracking();}/*** start/stop tracking*/toggleTracking() {if (this.player_.duration() === Infinity && this.liveWindow() >= this.options_.trackingThreshold) {if (this.player_.options_.liveui) {this.player_.addClass('vjs-liveui');}this.startTracking();} else {this.player_.removeClass('vjs-liveui');this.stopTracking();}}/*** start tracking live playback*/startTracking() {if (this.isTracking()) {return;}// If we haven't seen a timeupdate, we need to check whether playback// began before this component started tracking. This can happen commonly// when using autoplay.if (!this.timeupdateSeen_) {this.timeupdateSeen_ = this.player_.hasStarted();}this.trackingInterval_ = this.setInterval(this.trackLiveHandler_, UPDATE_REFRESH_INTERVAL);this.trackLive_();this.on(this.player_, ['play', 'pause'], this.trackLiveHandler_);if (!this.timeupdateSeen_) {this.one(this.player_, 'play', this.handlePlay_);this.one(this.player_, 'timeupdate', this.handleFirstTimeupdate_);} else {this.on(this.player_, 'seeked', this.handleSeeked_);}}/*** handle the first timeupdate on the player if it wasn't already playing* when live tracker started tracking.*/handleFirstTimeupdate() {this.timeupdateSeen_ = true;this.on(this.player_, 'seeked', this.handleSeeked_);}/*** Keep track of what time a seek starts, and listen for seeked* to find where a seek ends.*/handleSeeked() {const timeDiff = Math.abs(this.liveCurrentTime() - this.player_.currentTime());this.seekedBehindLive_ = this.nextSeekedFromUser_ && timeDiff > 2;this.nextSeekedFromUser_ = false;this.trackLive_();}/*** handle the first play on the player, and make sure that we seek* right to the live edge.*/handlePlay() {this.one(this.player_, 'timeupdate', this.seekToLiveEdge_);}/*** Stop tracking, and set all internal variables to* their initial value.*/reset_() {this.lastTime_ = -1;this.pastSeekEnd_ = 0;this.lastSeekEnd_ = -1;this.behindLiveEdge_ = true;this.timeupdateSeen_ = false;this.seekedBehindLive_ = false;this.nextSeekedFromUser_ = false;this.clearInterval(this.trackingInterval_);this.trackingInterval_ = null;this.off(this.player_, ['play', 'pause'], this.trackLiveHandler_);this.off(this.player_, 'seeked', this.handleSeeked_);this.off(this.player_, 'play', this.handlePlay_);this.off(this.player_, 'timeupdate', this.handleFirstTimeupdate_);this.off(this.player_, 'timeupdate', this.seekToLiveEdge_);}/*** The next seeked event is from the user. Meaning that any seek* > 2s behind live will be considered behind live for real and* liveTolerance will be ignored.*/nextSeekedFromUser() {this.nextSeekedFromUser_ = true;}/*** stop tracking live playback*/stopTracking() {if (!this.isTracking()) {return;}this.reset_();this.trigger('liveedgechange');}/*** A helper to get the player seekable end* so that we don't have to null check everywhere** @return {number}* The furthest seekable end or Infinity.*/seekableEnd() {const seekable = this.player_.seekable();const seekableEnds = [];let i = seekable ? seekable.length : 0;while (i--) {seekableEnds.push(seekable.end(i));}// grab the furthest seekable end after sorting, or if there are none// default to Infinityreturn seekableEnds.length ? seekableEnds.sort()[seekableEnds.length - 1] : Infinity;}/*** A helper to get the player seekable start* so that we don't have to null check everywhere** @return {number}* The earliest seekable start or 0.*/seekableStart() {const seekable = this.player_.seekable();const seekableStarts = [];let i = seekable ? seekable.length : 0;while (i--) {seekableStarts.push(seekable.start(i));}// grab the first seekable start after sorting, or if there are none// default to 0return seekableStarts.length ? seekableStarts.sort()[0] : 0;}/*** Get the live time window aka* the amount of time between seekable start and* live current time.** @return {number}* The amount of seconds that are seekable in* the live video.*/liveWindow() {const liveCurrentTime = this.liveCurrentTime();// if liveCurrenTime is Infinity then we don't have a liveWindow at allif (liveCurrentTime === Infinity) {return 0;}return liveCurrentTime - this.seekableStart();}/*** Determines if the player is live, only checks if this component* is tracking live playback or not** @return {boolean}* Whether liveTracker is tracking*/isLive() {return this.isTracking();}/*** Determines if currentTime is at the live edge and won't fall behind* on each seekableendchange** @return {boolean}* Whether playback is at the live edge*/atLiveEdge() {return !this.behindLiveEdge();}/*** get what we expect the live current time to be** @return {number}* The expected live current time*/liveCurrentTime() {return this.pastSeekEnd() + this.seekableEnd();}/*** The number of seconds that have occurred after seekable end* changed. This will be reset to 0 once seekable end changes.** @return {number}* Seconds past the current seekable end*/pastSeekEnd() {const seekableEnd = this.seekableEnd();if (this.lastSeekEnd_ !== -1 && seekableEnd !== this.lastSeekEnd_) {this.pastSeekEnd_ = 0;}this.lastSeekEnd_ = seekableEnd;return this.pastSeekEnd_;}/*** If we are currently behind the live edge, aka currentTime will be* behind on a seekableendchange** @return {boolean}* If we are behind the live edge*/behindLiveEdge() {return this.behindLiveEdge_;}/*** Whether live tracker is currently tracking or not.*/isTracking() {return typeof this.trackingInterval_ === 'number';}/*** Seek to the live edge if we are behind the live edge*/seekToLiveEdge() {this.seekedBehindLive_ = false;if (this.atLiveEdge()) {return;}this.nextSeekedFromUser_ = false;this.player_.currentTime(this.liveCurrentTime());}/*** Dispose of liveTracker*/dispose() {this.stopTracking();super.dispose();}}Component$1.registerComponent('LiveTracker', LiveTracker);/*** Displays an element over the player which contains an optional title and* description for the current content.** Much of the code for this component originated in the now obsolete* videojs-dock plugin: https://github.com/brightcove/videojs-dock/** @extends Component*/class TitleBar extends Component$1 {constructor(player, options) {super(player, options);this.on('statechanged', e => this.updateDom_());this.updateDom_();}/*** Create the `TitleBar`'s DOM element** @return {Element}* The element that was created.*/createEl() {this.els = {title: createEl('div', {className: 'vjs-title-bar-title',id: `vjs-title-bar-title-${newGUID()}`}),description: createEl('div', {className: 'vjs-title-bar-description',id: `vjs-title-bar-description-${newGUID()}`})};return createEl('div', {className: 'vjs-title-bar'}, {}, values$1(this.els));}/*** Updates the DOM based on the component's state object.*/updateDom_() {const tech = this.player_.tech_;const techEl = tech && tech.el_;const techAriaAttrs = {title: 'aria-labelledby',description: 'aria-describedby'};['title', 'description'].forEach(k => {const value = this.state[k];const el = this.els[k];const techAriaAttr = techAriaAttrs[k];emptyEl(el);if (value) {textContent(el, value);}// If there is a tech element available, update its ARIA attributes// according to whether a title and/or description have been provided.if (techEl) {techEl.removeAttribute(techAriaAttr);if (value) {techEl.setAttribute(techAriaAttr, el.id);}}});if (this.state.title || this.state.description) {this.show();} else {this.hide();}}/*** Update the contents of the title bar component with new title and* description text.** If both title and description are missing, the title bar will be hidden.** If either title or description are present, the title bar will be visible.** NOTE: Any previously set value will be preserved. To unset a previously* set value, you must pass an empty string or null.** For example:** ```* update({title: 'foo', description: 'bar'}) // title: 'foo', description: 'bar'* update({description: 'bar2'}) // title: 'foo', description: 'bar2'* update({title: ''}) // title: '', description: 'bar2'* update({title: 'foo', description: null}) // title: 'foo', description: null* ```** @param {Object} [options={}]* An options object. When empty, the title bar will be hidden.** @param {string} [options.title]* A title to display in the title bar.** @param {string} [options.description]* A description to display in the title bar.*/update(options) {this.setState(options);}/*** Dispose the component.*/dispose() {const tech = this.player_.tech_;const techEl = tech && tech.el_;if (techEl) {techEl.removeAttribute('aria-labelledby');techEl.removeAttribute('aria-describedby');}super.dispose();this.els = null;}}Component$1.registerComponent('TitleBar', TitleBar);/*** This function is used to fire a sourceset when there is something* similar to `mediaEl.load()` being called. It will try to find the source via* the `src` attribute and then the `<source>` elements. It will then fire `sourceset`* with the source that was found or empty string if we cannot know. If it cannot* find a source then `sourceset` will not be fired.** @param { import('./html5').default } tech* The tech object that sourceset was setup on** @return {boolean}* returns false if the sourceset was not fired and true otherwise.*/const sourcesetLoad = tech => {const el = tech.el();// if `el.src` is set, that source will be loaded.if (el.hasAttribute('src')) {tech.triggerSourceset(el.src);return true;}/*** Since there isn't a src property on the media element, source elements will be used for* implementing the source selection algorithm. This happens asynchronously and* for most cases were there is more than one source we cannot tell what source will* be loaded, without re-implementing the source selection algorithm. At this time we are not* going to do that. There are three special cases that we do handle here though:** 1. If there are no sources, do not fire `sourceset`.* 2. If there is only one `<source>` with a `src` property/attribute that is our `src`* 3. If there is more than one `<source>` but all of them have the same `src` url.* That will be our src.*/const sources = tech.$$('source');const srcUrls = [];let src = '';// if there are no sources, do not fire sourcesetif (!sources.length) {return false;}// only count valid/non-duplicate source elementsfor (let i = 0; i < sources.length; i++) {const url = sources[i].src;if (url && srcUrls.indexOf(url) === -1) {srcUrls.push(url);}}// there were no valid sourcesif (!srcUrls.length) {return false;}// there is only one valid source element url// use thatif (srcUrls.length === 1) {src = srcUrls[0];}tech.triggerSourceset(src);return true;};/*** our implementation of an `innerHTML` descriptor for browsers* that do not have one.*/const innerHTMLDescriptorPolyfill = Object.defineProperty({}, 'innerHTML', {get() {return this.cloneNode(true).innerHTML;},set(v) {// make a dummy node to use innerHTML onconst dummy = document.createElement(this.nodeName.toLowerCase());// set innerHTML to the value provideddummy.innerHTML = v;// make a document fragment to hold the nodes from dummyconst docFrag = document.createDocumentFragment();// copy all of the nodes created by the innerHTML on dummy// to the document fragmentwhile (dummy.childNodes.length) {docFrag.appendChild(dummy.childNodes[0]);}// remove contentthis.innerText = '';// now we add all of that html in one by appending the// document fragment. This is how innerHTML does it.window.Element.prototype.appendChild.call(this, docFrag);// then return the result that innerHTML's setter wouldreturn this.innerHTML;}});/*** Get a property descriptor given a list of priorities and the* property to get.*/const getDescriptor = (priority, prop) => {let descriptor = {};for (let i = 0; i < priority.length; i++) {descriptor = Object.getOwnPropertyDescriptor(priority[i], prop);if (descriptor && descriptor.set && descriptor.get) {break;}}descriptor.enumerable = true;descriptor.configurable = true;return descriptor;};const getInnerHTMLDescriptor = tech => getDescriptor([tech.el(), window.HTMLMediaElement.prototype, window.Element.prototype, innerHTMLDescriptorPolyfill], 'innerHTML');/*** Patches browser internal functions so that we can tell synchronously* if a `<source>` was appended to the media element. For some reason this* causes a `sourceset` if the the media element is ready and has no source.* This happens when:* - The page has just loaded and the media element does not have a source.* - The media element was emptied of all sources, then `load()` was called.** It does this by patching the following functions/properties when they are supported:** - `append()` - can be used to add a `<source>` element to the media element* - `appendChild()` - can be used to add a `<source>` element to the media element* - `insertAdjacentHTML()` - can be used to add a `<source>` element to the media element* - `innerHTML` - can be used to add a `<source>` element to the media element** @param {Html5} tech* The tech object that sourceset is being setup on.*/const firstSourceWatch = function (tech) {const el = tech.el();// make sure firstSourceWatch isn't setup twice.if (el.resetSourceWatch_) {return;}const old = {};const innerDescriptor = getInnerHTMLDescriptor(tech);const appendWrapper = appendFn => (...args) => {const retval = appendFn.apply(el, args);sourcesetLoad(tech);return retval;};['append', 'appendChild', 'insertAdjacentHTML'].forEach(k => {if (!el[k]) {return;}// store the old functionold[k] = el[k];// call the old function with a sourceset if a source// was loadedel[k] = appendWrapper(old[k]);});Object.defineProperty(el, 'innerHTML', merge$2(innerDescriptor, {set: appendWrapper(innerDescriptor.set)}));el.resetSourceWatch_ = () => {el.resetSourceWatch_ = null;Object.keys(old).forEach(k => {el[k] = old[k];});Object.defineProperty(el, 'innerHTML', innerDescriptor);};// on the first sourceset, we need to revert our changestech.one('sourceset', el.resetSourceWatch_);};/*** our implementation of a `src` descriptor for browsers* that do not have one*/const srcDescriptorPolyfill = Object.defineProperty({}, 'src', {get() {if (this.hasAttribute('src')) {return getAbsoluteURL(window.Element.prototype.getAttribute.call(this, 'src'));}return '';},set(v) {window.Element.prototype.setAttribute.call(this, 'src', v);return v;}});const getSrcDescriptor = tech => getDescriptor([tech.el(), window.HTMLMediaElement.prototype, srcDescriptorPolyfill], 'src');/*** setup `sourceset` handling on the `Html5` tech. This function* patches the following element properties/functions:** - `src` - to determine when `src` is set* - `setAttribute()` - to determine when `src` is set* - `load()` - this re-triggers the source selection algorithm, and can* cause a sourceset.** If there is no source when we are adding `sourceset` support or during a `load()`* we also patch the functions listed in `firstSourceWatch`.** @param {Html5} tech* The tech to patch*/const setupSourceset = function (tech) {if (!tech.featuresSourceset) {return;}const el = tech.el();// make sure sourceset isn't setup twice.if (el.resetSourceset_) {return;}const srcDescriptor = getSrcDescriptor(tech);const oldSetAttribute = el.setAttribute;const oldLoad = el.load;Object.defineProperty(el, 'src', merge$2(srcDescriptor, {set: v => {const retval = srcDescriptor.set.call(el, v);// we use the getter here to get the actual value set on srctech.triggerSourceset(el.src);return retval;}}));el.setAttribute = (n, v) => {const retval = oldSetAttribute.call(el, n, v);if (/src/i.test(n)) {tech.triggerSourceset(el.src);}return retval;};el.load = () => {const retval = oldLoad.call(el);// if load was called, but there was no source to fire// sourceset on. We have to watch for a source append// as that can trigger a `sourceset` when the media element// has no sourceif (!sourcesetLoad(tech)) {tech.triggerSourceset('');firstSourceWatch(tech);}return retval;};if (el.currentSrc) {tech.triggerSourceset(el.currentSrc);} else if (!sourcesetLoad(tech)) {firstSourceWatch(tech);}el.resetSourceset_ = () => {el.resetSourceset_ = null;el.load = oldLoad;el.setAttribute = oldSetAttribute;Object.defineProperty(el, 'src', srcDescriptor);if (el.resetSourceWatch_) {el.resetSourceWatch_();}};};/*** @file html5.js*//*** HTML5 Media Controller - Wrapper for HTML5 Media API** @mixes Tech~SourceHandlerAdditions* @extends Tech*/class Html5 extends Tech {/*** Create an instance of this Tech.** @param {Object} [options]* The key/value store of player options.** @param {Function} [ready]* Callback function to call when the `HTML5` Tech is ready.*/constructor(options, ready) {super(options, ready);const source = options.source;let crossoriginTracks = false;this.featuresVideoFrameCallback = this.featuresVideoFrameCallback && this.el_.tagName === 'VIDEO';// Set the source if one is provided// 1) Check if the source is new (if not, we want to keep the original so playback isn't interrupted)// 2) Check to see if the network state of the tag was failed at init, and if so, reset the source// anyway so the error gets fired.if (source && (this.el_.currentSrc !== source.src || options.tag && options.tag.initNetworkState_ === 3)) {this.setSource(source);} else {this.handleLateInit_(this.el_);}// setup sourceset after late sourceset/initif (options.enableSourceset) {this.setupSourcesetHandling_();}this.isScrubbing_ = false;if (this.el_.hasChildNodes()) {const nodes = this.el_.childNodes;let nodesLength = nodes.length;const removeNodes = [];while (nodesLength--) {const node = nodes[nodesLength];const nodeName = node.nodeName.toLowerCase();if (nodeName === 'track') {if (!this.featuresNativeTextTracks) {// Empty video tag tracks so the built-in player doesn't use them also.// This may not be fast enough to stop HTML5 browsers from reading the tags// so we'll need to turn off any default tracks if we're manually doing// captions and subtitles. videoElement.textTracksremoveNodes.push(node);} else {// store HTMLTrackElement and TextTrack to remote listthis.remoteTextTrackEls().addTrackElement_(node);this.remoteTextTracks().addTrack(node.track);this.textTracks().addTrack(node.track);if (!crossoriginTracks && !this.el_.hasAttribute('crossorigin') && isCrossOrigin(node.src)) {crossoriginTracks = true;}}}}for (let i = 0; i < removeNodes.length; i++) {this.el_.removeChild(removeNodes[i]);}}this.proxyNativeTracks_();if (this.featuresNativeTextTracks && crossoriginTracks) {log$1.warn('Text Tracks are being loaded from another origin but the crossorigin attribute isn\'t used.\n' + 'This may prevent text tracks from loading.');}// prevent iOS Safari from disabling metadata text tracks during native playbackthis.restoreMetadataTracksInIOSNativePlayer_();// Determine if native controls should be used// Our goal should be to get the custom controls on mobile solid everywhere// so we can remove this all together. Right now this will block custom// controls on touch enabled laptops like the Chrome Pixelif ((TOUCH_ENABLED || IS_IPHONE) && options.nativeControlsForTouch === true) {this.setControls(true);}// on iOS, we want to proxy `webkitbeginfullscreen` and `webkitendfullscreen`// into a `fullscreenchange` eventthis.proxyWebkitFullscreen_();this.triggerReady();}/*** Dispose of `HTML5` media element and remove all tracks.*/dispose() {if (this.el_ && this.el_.resetSourceset_) {this.el_.resetSourceset_();}Html5.disposeMediaElement(this.el_);this.options_ = null;// tech will handle clearing of the emulated track listsuper.dispose();}/*** Modify the media element so that we can detect when* the source is changed. Fires `sourceset` just after the source has changed*/setupSourcesetHandling_() {setupSourceset(this);}/*** When a captions track is enabled in the iOS Safari native player, all other* tracks are disabled (including metadata tracks), which nulls all of their* associated cue points. This will restore metadata tracks to their pre-fullscreen* state in those cases so that cue points are not needlessly lost.** @private*/restoreMetadataTracksInIOSNativePlayer_() {const textTracks = this.textTracks();let metadataTracksPreFullscreenState;// captures a snapshot of every metadata track's current stateconst takeMetadataTrackSnapshot = () => {metadataTracksPreFullscreenState = [];for (let i = 0; i < textTracks.length; i++) {const track = textTracks[i];if (track.kind === 'metadata') {metadataTracksPreFullscreenState.push({track,storedMode: track.mode});}}};// snapshot each metadata track's initial state, and update the snapshot// each time there is a track 'change' eventtakeMetadataTrackSnapshot();textTracks.addEventListener('change', takeMetadataTrackSnapshot);this.on('dispose', () => textTracks.removeEventListener('change', takeMetadataTrackSnapshot));const restoreTrackMode = () => {for (let i = 0; i < metadataTracksPreFullscreenState.length; i++) {const storedTrack = metadataTracksPreFullscreenState[i];if (storedTrack.track.mode === 'disabled' && storedTrack.track.mode !== storedTrack.storedMode) {storedTrack.track.mode = storedTrack.storedMode;}}// we only want this handler to be executed on the first 'change' eventtextTracks.removeEventListener('change', restoreTrackMode);};// when we enter fullscreen playback, stop updating the snapshot and// restore all track modes to their pre-fullscreen statethis.on('webkitbeginfullscreen', () => {textTracks.removeEventListener('change', takeMetadataTrackSnapshot);// remove the listener before adding it just in case it wasn't previously removedtextTracks.removeEventListener('change', restoreTrackMode);textTracks.addEventListener('change', restoreTrackMode);});// start updating the snapshot again after leaving fullscreenthis.on('webkitendfullscreen', () => {// remove the listener before adding it just in case it wasn't previously removedtextTracks.removeEventListener('change', takeMetadataTrackSnapshot);textTracks.addEventListener('change', takeMetadataTrackSnapshot);// remove the restoreTrackMode handler in case it wasn't triggered during fullscreen playbacktextTracks.removeEventListener('change', restoreTrackMode);});}/*** Attempt to force override of tracks for the given type** @param {string} type - Track type to override, possible values include 'Audio',* 'Video', and 'Text'.* @param {boolean} override - If set to true native audio/video will be overridden,* otherwise native audio/video will potentially be used.* @private*/overrideNative_(type, override) {// If there is no behavioral change don't add/remove listenersif (override !== this[`featuresNative${type}Tracks`]) {return;}const lowerCaseType = type.toLowerCase();if (this[`${lowerCaseType}TracksListeners_`]) {Object.keys(this[`${lowerCaseType}TracksListeners_`]).forEach(eventName => {const elTracks = this.el()[`${lowerCaseType}Tracks`];elTracks.removeEventListener(eventName, this[`${lowerCaseType}TracksListeners_`][eventName]);});}this[`featuresNative${type}Tracks`] = !override;this[`${lowerCaseType}TracksListeners_`] = null;this.proxyNativeTracksForType_(lowerCaseType);}/*** Attempt to force override of native audio tracks.** @param {boolean} override - If set to true native audio will be overridden,* otherwise native audio will potentially be used.*/overrideNativeAudioTracks(override) {this.overrideNative_('Audio', override);}/*** Attempt to force override of native video tracks.** @param {boolean} override - If set to true native video will be overridden,* otherwise native video will potentially be used.*/overrideNativeVideoTracks(override) {this.overrideNative_('Video', override);}/*** Proxy native track list events for the given type to our track* lists if the browser we are playing in supports that type of track list.** @param {string} name - Track type; values include 'audio', 'video', and 'text'* @private*/proxyNativeTracksForType_(name) {const props = NORMAL[name];const elTracks = this.el()[props.getterName];const techTracks = this[props.getterName]();if (!this[`featuresNative${props.capitalName}Tracks`] || !elTracks || !elTracks.addEventListener) {return;}const listeners = {change: e => {const event = {type: 'change',target: techTracks,currentTarget: techTracks,srcElement: techTracks};techTracks.trigger(event);// if we are a text track change event, we should also notify the// remote text track list. This can potentially cause a false positive// if we were to get a change event on a non-remote track and// we triggered the event on the remote text track list which doesn't// contain that track. However, best practices mean looping through the// list of tracks and searching for the appropriate mode value, so,// this shouldn't pose an issueif (name === 'text') {this[REMOTE.remoteText.getterName]().trigger(event);}},addtrack(e) {techTracks.addTrack(e.track);},removetrack(e) {techTracks.removeTrack(e.track);}};const removeOldTracks = function () {const removeTracks = [];for (let i = 0; i < techTracks.length; i++) {let found = false;for (let j = 0; j < elTracks.length; j++) {if (elTracks[j] === techTracks[i]) {found = true;break;}}if (!found) {removeTracks.push(techTracks[i]);}}while (removeTracks.length) {techTracks.removeTrack(removeTracks.shift());}};this[props.getterName + 'Listeners_'] = listeners;Object.keys(listeners).forEach(eventName => {const listener = listeners[eventName];elTracks.addEventListener(eventName, listener);this.on('dispose', e => elTracks.removeEventListener(eventName, listener));});// Remove (native) tracks that are not used anymorethis.on('loadstart', removeOldTracks);this.on('dispose', e => this.off('loadstart', removeOldTracks));}/*** Proxy all native track list events to our track lists if the browser we are playing* in supports that type of track list.** @private*/proxyNativeTracks_() {NORMAL.names.forEach(name => {this.proxyNativeTracksForType_(name);});}/*** Create the `Html5` Tech's DOM element.** @return {Element}* The element that gets created.*/createEl() {let el = this.options_.tag;// Check if this browser supports moving the element into the box.// On the iPhone video will break if you move the element,// So we have to create a brand new element.// If we ingested the player div, we do not need to move the media element.if (!el || !(this.options_.playerElIngest || this.movingMediaElementInDOM)) {// If the original tag is still there, clone and remove it.if (el) {const clone = el.cloneNode(true);if (el.parentNode) {el.parentNode.insertBefore(clone, el);}Html5.disposeMediaElement(el);el = clone;} else {el = document.createElement('video');// determine if native controls should be usedconst tagAttributes = this.options_.tag && getAttributes(this.options_.tag);const attributes = merge$2({}, tagAttributes);if (!TOUCH_ENABLED || this.options_.nativeControlsForTouch !== true) {delete attributes.controls;}setAttributes(el, Object.assign(attributes, {id: this.options_.techId,class: 'vjs-tech'}));}el.playerId = this.options_.playerId;}if (typeof this.options_.preload !== 'undefined') {setAttribute(el, 'preload', this.options_.preload);}if (this.options_.disablePictureInPicture !== undefined) {el.disablePictureInPicture = this.options_.disablePictureInPicture;}// Update specific tag settings, in case they were overridden// `autoplay` has to be *last* so that `muted` and `playsinline` are present// when iOS/Safari or other browsers attempt to autoplay.const settingsAttrs = ['loop', 'muted', 'playsinline', 'autoplay'];for (let i = 0; i < settingsAttrs.length; i++) {const attr = settingsAttrs[i];const value = this.options_[attr];if (typeof value !== 'undefined') {if (value) {setAttribute(el, attr, attr);} else {removeAttribute(el, attr);}el[attr] = value;}}return el;}/*** This will be triggered if the loadstart event has already fired, before videojs was* ready. Two known examples of when this can happen are:* 1. If we're loading the playback object after it has started loading* 2. The media is already playing the (often with autoplay on) then** This function will fire another loadstart so that videojs can catchup.** @fires Tech#loadstart** @return {undefined}* returns nothing.*/handleLateInit_(el) {if (el.networkState === 0 || el.networkState === 3) {// The video element hasn't started loading the source yet// or didn't find a sourcereturn;}if (el.readyState === 0) {// NetworkState is set synchronously BUT loadstart is fired at the// end of the current stack, usually before setInterval(fn, 0).// So at this point we know loadstart may have already fired or is// about to fire, and either way the player hasn't seen it yet.// We don't want to fire loadstart prematurely here and cause a// double loadstart so we'll wait and see if it happens between now// and the next loop, and fire it if not.// HOWEVER, we also want to make sure it fires before loadedmetadata// which could also happen between now and the next loop, so we'll// watch for that also.let loadstartFired = false;const setLoadstartFired = function () {loadstartFired = true;};this.on('loadstart', setLoadstartFired);const triggerLoadstart = function () {// We did miss the original loadstart. Make sure the player// sees loadstart before loadedmetadataif (!loadstartFired) {this.trigger('loadstart');}};this.on('loadedmetadata', triggerLoadstart);this.ready(function () {this.off('loadstart', setLoadstartFired);this.off('loadedmetadata', triggerLoadstart);if (!loadstartFired) {// We did miss the original native loadstart. Fire it now.this.trigger('loadstart');}});return;}// From here on we know that loadstart already fired and we missed it.// The other readyState events aren't as much of a problem if we double// them, so not going to go to as much trouble as loadstart to prevent// that unless we find reason to.const eventsToTrigger = ['loadstart'];// loadedmetadata: newly equal to HAVE_METADATA (1) or greatereventsToTrigger.push('loadedmetadata');// loadeddata: newly increased to HAVE_CURRENT_DATA (2) or greaterif (el.readyState >= 2) {eventsToTrigger.push('loadeddata');}// canplay: newly increased to HAVE_FUTURE_DATA (3) or greaterif (el.readyState >= 3) {eventsToTrigger.push('canplay');}// canplaythrough: newly equal to HAVE_ENOUGH_DATA (4)if (el.readyState >= 4) {eventsToTrigger.push('canplaythrough');}// We still need to give the player time to add event listenersthis.ready(function () {eventsToTrigger.forEach(function (type) {this.trigger(type);}, this);});}/*** Set whether we are scrubbing or not.* This is used to decide whether we should use `fastSeek` or not.* `fastSeek` is used to provide trick play on Safari browsers.** @param {boolean} isScrubbing* - true for we are currently scrubbing* - false for we are no longer scrubbing*/setScrubbing(isScrubbing) {this.isScrubbing_ = isScrubbing;}/*** Get whether we are scrubbing or not.** @return {boolean} isScrubbing* - true for we are currently scrubbing* - false for we are no longer scrubbing*/scrubbing() {return this.isScrubbing_;}/*** Set current time for the `HTML5` tech.** @param {number} seconds* Set the current time of the media to this.*/setCurrentTime(seconds) {try {if (this.isScrubbing_ && this.el_.fastSeek && IS_ANY_SAFARI) {this.el_.fastSeek(seconds);} else {this.el_.currentTime = seconds;}} catch (e) {log$1(e, 'Video is not ready. (Video.js)');// this.warning(VideoJS.warnings.videoNotReady);}}/*** Get the current duration of the HTML5 media element.** @return {number}* The duration of the media or 0 if there is no duration.*/duration() {// Android Chrome will report duration as Infinity for VOD HLS until after// playback has started, which triggers the live display erroneously.// Return NaN if playback has not started and trigger a durationupdate once// the duration can be reliably known.if (this.el_.duration === Infinity && IS_ANDROID && IS_CHROME && this.el_.currentTime === 0) {// Wait for the first `timeupdate` with currentTime > 0 - there may be// several with 0const checkProgress = () => {if (this.el_.currentTime > 0) {// Trigger durationchange for genuinely live videoif (this.el_.duration === Infinity) {this.trigger('durationchange');}this.off('timeupdate', checkProgress);}};this.on('timeupdate', checkProgress);return NaN;}return this.el_.duration || NaN;}/*** Get the current width of the HTML5 media element.** @return {number}* The width of the HTML5 media element.*/width() {return this.el_.offsetWidth;}/*** Get the current height of the HTML5 media element.** @return {number}* The height of the HTML5 media element.*/height() {return this.el_.offsetHeight;}/*** Proxy iOS `webkitbeginfullscreen` and `webkitendfullscreen` into* `fullscreenchange` event.** @private* @fires fullscreenchange* @listens webkitendfullscreen* @listens webkitbeginfullscreen* @listens webkitbeginfullscreen*/proxyWebkitFullscreen_() {if (!('webkitDisplayingFullscreen' in this.el_)) {return;}const endFn = function () {this.trigger('fullscreenchange', {isFullscreen: false});// Safari will sometimes set controls on the videoelement when existing fullscreen.if (this.el_.controls && !this.options_.nativeControlsForTouch && this.controls()) {this.el_.controls = false;}};const beginFn = function () {if ('webkitPresentationMode' in this.el_ && this.el_.webkitPresentationMode !== 'picture-in-picture') {this.one('webkitendfullscreen', endFn);this.trigger('fullscreenchange', {isFullscreen: true,// set a flag in case another tech triggers fullscreenchangenativeIOSFullscreen: true});}};this.on('webkitbeginfullscreen', beginFn);this.on('dispose', () => {this.off('webkitbeginfullscreen', beginFn);this.off('webkitendfullscreen', endFn);});}/*** Check if fullscreen is supported on the video el.** @return {boolean}* - True if fullscreen is supported.* - False if fullscreen is not supported.*/supportsFullScreen() {return typeof this.el_.webkitEnterFullScreen === 'function';}/*** Request that the `HTML5` Tech enter fullscreen.*/enterFullScreen() {const video = this.el_;if (video.paused && video.networkState <= video.HAVE_METADATA) {// attempt to prime the video element for programmatic access// this isn't necessary on the desktop but shouldn't hurtsilencePromise(this.el_.play());// playing and pausing synchronously during the transition to fullscreen// can get iOS ~6.1 devices into a play/pause loopthis.setTimeout(function () {video.pause();try {video.webkitEnterFullScreen();} catch (e) {this.trigger('fullscreenerror', e);}}, 0);} else {try {video.webkitEnterFullScreen();} catch (e) {this.trigger('fullscreenerror', e);}}}/*** Request that the `HTML5` Tech exit fullscreen.*/exitFullScreen() {if (!this.el_.webkitDisplayingFullscreen) {this.trigger('fullscreenerror', new Error('The video is not fullscreen'));return;}this.el_.webkitExitFullScreen();}/*** Create a floating video window always on top of other windows so that users may* continue consuming media while they interact with other content sites, or* applications on their device.** @see [Spec]{@link https://wicg.github.io/picture-in-picture}** @return {Promise}* A promise with a Picture-in-Picture window.*/requestPictureInPicture() {return this.el_.requestPictureInPicture();}/*** Native requestVideoFrameCallback if supported by browser/tech, or fallback* Don't use rVCF on Safari when DRM is playing, as it doesn't fire* Needs to be checked later than the constructor* This will be a false positive for clear sources loaded after a Fairplay source** @param {function} cb function to call* @return {number} id of request*/requestVideoFrameCallback(cb) {if (this.featuresVideoFrameCallback && !this.el_.webkitKeys) {return this.el_.requestVideoFrameCallback(cb);}return super.requestVideoFrameCallback(cb);}/*** Native or fallback requestVideoFrameCallback** @param {number} id request id to cancel*/cancelVideoFrameCallback(id) {if (this.featuresVideoFrameCallback && !this.el_.webkitKeys) {this.el_.cancelVideoFrameCallback(id);} else {super.cancelVideoFrameCallback(id);}}/*** A getter/setter for the `Html5` Tech's source object.* > Note: Please use {@link Html5#setSource}** @param {Tech~SourceObject} [src]* The source object you want to set on the `HTML5` techs element.** @return {Tech~SourceObject|undefined}* - The current source object when a source is not passed in.* - undefined when setting** @deprecated Since version 5.*/src(src) {if (src === undefined) {return this.el_.src;}// Setting src through `src` instead of `setSrc` will be deprecatedthis.setSrc(src);}/*** Reset the tech by removing all sources and then calling* {@link Html5.resetMediaElement}.*/reset() {Html5.resetMediaElement(this.el_);}/*** Get the current source on the `HTML5` Tech. Falls back to returning the source from* the HTML5 media element.** @return {Tech~SourceObject}* The current source object from the HTML5 tech. With a fallback to the* elements source.*/currentSrc() {if (this.currentSource_) {return this.currentSource_.src;}return this.el_.currentSrc;}/*** Set controls attribute for the HTML5 media Element.** @param {string} val* Value to set the controls attribute to*/setControls(val) {this.el_.controls = !!val;}/*** Create and returns a remote {@link TextTrack} object.** @param {string} kind* `TextTrack` kind (subtitles, captions, descriptions, chapters, or metadata)** @param {string} [label]* Label to identify the text track** @param {string} [language]* Two letter language abbreviation** @return {TextTrack}* The TextTrack that gets created.*/addTextTrack(kind, label, language) {if (!this.featuresNativeTextTracks) {return super.addTextTrack(kind, label, language);}return this.el_.addTextTrack(kind, label, language);}/*** Creates either native TextTrack or an emulated TextTrack depending* on the value of `featuresNativeTextTracks`** @param {Object} options* The object should contain the options to initialize the TextTrack with.** @param {string} [options.kind]* `TextTrack` kind (subtitles, captions, descriptions, chapters, or metadata).** @param {string} [options.label]* Label to identify the text track** @param {string} [options.language]* Two letter language abbreviation.** @param {boolean} [options.default]* Default this track to on.** @param {string} [options.id]* The internal id to assign this track.** @param {string} [options.src]* A source url for the track.** @return {HTMLTrackElement}* The track element that gets created.*/createRemoteTextTrack(options) {if (!this.featuresNativeTextTracks) {return super.createRemoteTextTrack(options);}const htmlTrackElement = document.createElement('track');if (options.kind) {htmlTrackElement.kind = options.kind;}if (options.label) {htmlTrackElement.label = options.label;}if (options.language || options.srclang) {htmlTrackElement.srclang = options.language || options.srclang;}if (options.default) {htmlTrackElement.default = options.default;}if (options.id) {htmlTrackElement.id = options.id;}if (options.src) {htmlTrackElement.src = options.src;}return htmlTrackElement;}/*** Creates a remote text track object and returns an html track element.** @param {Object} options The object should contain values for* kind, language, label, and src (location of the WebVTT file)* @param {boolean} [manualCleanup=false] if set to true, the TextTrack* will not be removed from the TextTrackList and HtmlTrackElementList* after a source change* @return {HTMLTrackElement} An Html Track Element.* This can be an emulated {@link HTMLTrackElement} or a native one.**/addRemoteTextTrack(options, manualCleanup) {const htmlTrackElement = super.addRemoteTextTrack(options, manualCleanup);if (this.featuresNativeTextTracks) {this.el().appendChild(htmlTrackElement);}return htmlTrackElement;}/*** Remove remote `TextTrack` from `TextTrackList` object** @param {TextTrack} track* `TextTrack` object to remove*/removeRemoteTextTrack(track) {super.removeRemoteTextTrack(track);if (this.featuresNativeTextTracks) {const tracks = this.$$('track');let i = tracks.length;while (i--) {if (track === tracks[i] || track === tracks[i].track) {this.el().removeChild(tracks[i]);}}}}/*** Gets available media playback quality metrics as specified by the W3C's Media* Playback Quality API.** @see [Spec]{@link https://wicg.github.io/media-playback-quality}** @return {Object}* An object with supported media playback quality metrics*/getVideoPlaybackQuality() {if (typeof this.el().getVideoPlaybackQuality === 'function') {return this.el().getVideoPlaybackQuality();}const videoPlaybackQuality = {};if (typeof this.el().webkitDroppedFrameCount !== 'undefined' && typeof this.el().webkitDecodedFrameCount !== 'undefined') {videoPlaybackQuality.droppedVideoFrames = this.el().webkitDroppedFrameCount;videoPlaybackQuality.totalVideoFrames = this.el().webkitDecodedFrameCount;}if (window.performance) {videoPlaybackQuality.creationTime = window.performance.now();}return videoPlaybackQuality;}}/* HTML5 Support Testing ---------------------------------------------------- *//*** Element for testing browser HTML5 media capabilities** @type {Element}* @constant* @private*/defineLazyProperty(Html5, 'TEST_VID', function () {if (!isReal()) {return;}const video = document.createElement('video');const track = document.createElement('track');track.kind = 'captions';track.srclang = 'en';track.label = 'English';video.appendChild(track);return video;});/*** Check if HTML5 media is supported by this browser/device.** @return {boolean}* - True if HTML5 media is supported.* - False if HTML5 media is not supported.*/Html5.isSupported = function () {// IE with no Media Player is a LIAR! (#984)try {Html5.TEST_VID.volume = 0.5;} catch (e) {return false;}return !!(Html5.TEST_VID && Html5.TEST_VID.canPlayType);};/*** Check if the tech can support the given type** @param {string} type* The mimetype to check* @return {string} 'probably', 'maybe', or '' (empty string)*/Html5.canPlayType = function (type) {return Html5.TEST_VID.canPlayType(type);};/*** Check if the tech can support the given source** @param {Object} srcObj* The source object* @param {Object} options* The options passed to the tech* @return {string} 'probably', 'maybe', or '' (empty string)*/Html5.canPlaySource = function (srcObj, options) {return Html5.canPlayType(srcObj.type);};/*** Check if the volume can be changed in this browser/device.* Volume cannot be changed in a lot of mobile devices.* Specifically, it can't be changed from 1 on iOS.** @return {boolean}* - True if volume can be controlled* - False otherwise*/Html5.canControlVolume = function () {// IE will error if Windows Media Player not installed #3315try {const volume = Html5.TEST_VID.volume;Html5.TEST_VID.volume = volume / 2 + 0.1;const canControl = volume !== Html5.TEST_VID.volume;// With the introduction of iOS 15, there are cases where the volume is read as// changed but reverts back to its original state at the start of the next tick.// To determine whether volume can be controlled on iOS,// a timeout is set and the volume is checked asynchronously.// Since `features` doesn't currently work asynchronously, the value is manually set.if (canControl && IS_IOS) {window.setTimeout(() => {if (Html5 && Html5.prototype) {Html5.prototype.featuresVolumeControl = volume !== Html5.TEST_VID.volume;}});// default iOS to false, which will be updated in the timeout above.return false;}return canControl;} catch (e) {return false;}};/*** Check if the volume can be muted in this browser/device.* Some devices, e.g. iOS, don't allow changing volume* but permits muting/unmuting.** @return {boolean}* - True if volume can be muted* - False otherwise*/Html5.canMuteVolume = function () {try {const muted = Html5.TEST_VID.muted;// in some versions of iOS muted property doesn't always// work, so we want to set both property and attributeHtml5.TEST_VID.muted = !muted;if (Html5.TEST_VID.muted) {setAttribute(Html5.TEST_VID, 'muted', 'muted');} else {removeAttribute(Html5.TEST_VID, 'muted', 'muted');}return muted !== Html5.TEST_VID.muted;} catch (e) {return false;}};/*** Check if the playback rate can be changed in this browser/device.** @return {boolean}* - True if playback rate can be controlled* - False otherwise*/Html5.canControlPlaybackRate = function () {// Playback rate API is implemented in Android Chrome, but doesn't do anything// https://github.com/videojs/video.js/issues/3180if (IS_ANDROID && IS_CHROME && CHROME_VERSION < 58) {return false;}// IE will error if Windows Media Player not installed #3315try {const playbackRate = Html5.TEST_VID.playbackRate;Html5.TEST_VID.playbackRate = playbackRate / 2 + 0.1;return playbackRate !== Html5.TEST_VID.playbackRate;} catch (e) {return false;}};/*** Check if we can override a video/audio elements attributes, with* Object.defineProperty.** @return {boolean}* - True if builtin attributes can be overridden* - False otherwise*/Html5.canOverrideAttributes = function () {// if we cannot overwrite the src/innerHTML property, there is no support// iOS 7 safari for instance cannot do this.try {const noop = () => {};Object.defineProperty(document.createElement('video'), 'src', {get: noop,set: noop});Object.defineProperty(document.createElement('audio'), 'src', {get: noop,set: noop});Object.defineProperty(document.createElement('video'), 'innerHTML', {get: noop,set: noop});Object.defineProperty(document.createElement('audio'), 'innerHTML', {get: noop,set: noop});} catch (e) {return false;}return true;};/*** Check to see if native `TextTrack`s are supported by this browser/device.** @return {boolean}* - True if native `TextTrack`s are supported.* - False otherwise*/Html5.supportsNativeTextTracks = function () {return IS_ANY_SAFARI || IS_IOS && IS_CHROME;};/*** Check to see if native `VideoTrack`s are supported by this browser/device** @return {boolean}* - True if native `VideoTrack`s are supported.* - False otherwise*/Html5.supportsNativeVideoTracks = function () {return !!(Html5.TEST_VID && Html5.TEST_VID.videoTracks);};/*** Check to see if native `AudioTrack`s are supported by this browser/device** @return {boolean}* - True if native `AudioTrack`s are supported.* - False otherwise*/Html5.supportsNativeAudioTracks = function () {return !!(Html5.TEST_VID && Html5.TEST_VID.audioTracks);};/*** An array of events available on the Html5 tech.** @private* @type {Array}*/Html5.Events = ['loadstart', 'suspend', 'abort', 'error', 'emptied', 'stalled', 'loadedmetadata', 'loadeddata', 'canplay', 'canplaythrough', 'playing', 'waiting', 'seeking', 'seeked', 'ended', 'durationchange', 'timeupdate', 'progress', 'play', 'pause', 'ratechange', 'resize', 'volumechange'];/*** Boolean indicating whether the `Tech` supports volume control.** @type {boolean}* @default {@link Html5.canControlVolume}*//*** Boolean indicating whether the `Tech` supports muting volume.** @type {boolean}* @default {@link Html5.canMuteVolume}*//*** Boolean indicating whether the `Tech` supports changing the speed at which the media* plays. Examples:* - Set player to play 2x (twice) as fast* - Set player to play 0.5x (half) as fast** @type {boolean}* @default {@link Html5.canControlPlaybackRate}*//*** Boolean indicating whether the `Tech` supports the `sourceset` event.** @type {boolean}* @default*//*** Boolean indicating whether the `HTML5` tech currently supports native `TextTrack`s.** @type {boolean}* @default {@link Html5.supportsNativeTextTracks}*//*** Boolean indicating whether the `HTML5` tech currently supports native `VideoTrack`s.** @type {boolean}* @default {@link Html5.supportsNativeVideoTracks}*//*** Boolean indicating whether the `HTML5` tech currently supports native `AudioTrack`s.** @type {boolean}* @default {@link Html5.supportsNativeAudioTracks}*/[['featuresMuteControl', 'canMuteVolume'], ['featuresPlaybackRate', 'canControlPlaybackRate'], ['featuresSourceset', 'canOverrideAttributes'], ['featuresNativeTextTracks', 'supportsNativeTextTracks'], ['featuresNativeVideoTracks', 'supportsNativeVideoTracks'], ['featuresNativeAudioTracks', 'supportsNativeAudioTracks']].forEach(function ([key, fn]) {defineLazyProperty(Html5.prototype, key, () => Html5[fn](), true);});Html5.prototype.featuresVolumeControl = Html5.canControlVolume();/*** Boolean indicating whether the `HTML5` tech currently supports the media element* moving in the DOM. iOS breaks if you move the media element, so this is set this to* false there. Everywhere else this should be true.** @type {boolean}* @default*/Html5.prototype.movingMediaElementInDOM = !IS_IOS;// TODO: Previous comment: No longer appears to be used. Can probably be removed.// Is this true?/*** Boolean indicating whether the `HTML5` tech currently supports automatic media resize* when going into fullscreen.** @type {boolean}* @default*/Html5.prototype.featuresFullscreenResize = true;/*** Boolean indicating whether the `HTML5` tech currently supports the progress event.* If this is false, manual `progress` events will be triggered instead.** @type {boolean}* @default*/Html5.prototype.featuresProgressEvents = true;/*** Boolean indicating whether the `HTML5` tech currently supports the timeupdate event.* If this is false, manual `timeupdate` events will be triggered instead.** @default*/Html5.prototype.featuresTimeupdateEvents = true;/*** Whether the HTML5 el supports `requestVideoFrameCallback`** @type {boolean}*/Html5.prototype.featuresVideoFrameCallback = !!(Html5.TEST_VID && Html5.TEST_VID.requestVideoFrameCallback);Html5.disposeMediaElement = function (el) {if (!el) {return;}if (el.parentNode) {el.parentNode.removeChild(el);}// remove any child track or source nodes to prevent their loadingwhile (el.hasChildNodes()) {el.removeChild(el.firstChild);}// remove any src reference. not setting `src=''` because that causes a warning// in firefoxel.removeAttribute('src');// force the media element to update its loading state by calling load()// however IE on Windows 7N has a bug that throws an error so need a try/catch (#793)if (typeof el.load === 'function') {// wrapping in an iife so it's not deoptimized (#1060#discussion_r10324473)(function () {try {el.load();} catch (e) {// not supported}})();}};Html5.resetMediaElement = function (el) {if (!el) {return;}const sources = el.querySelectorAll('source');let i = sources.length;while (i--) {el.removeChild(sources[i]);}// remove any src reference.// not setting `src=''` because that throws an errorel.removeAttribute('src');if (typeof el.load === 'function') {// wrapping in an iife so it's not deoptimized (#1060#discussion_r10324473)(function () {try {el.load();} catch (e) {// satisfy linter}})();}};/* Native HTML5 element property wrapping ----------------------------------- */// Wrap native boolean attributes with getters that check both property and attribute// The list is as followed:// muted, defaultMuted, autoplay, controls, loop, playsinline[/*** Get the value of `muted` from the media element. `muted` indicates* that the volume for the media should be set to silent. This does not actually change* the `volume` attribute.** @method Html5#muted* @return {boolean}* - True if the value of `volume` should be ignored and the audio set to silent.* - False if the value of `volume` should be used.** @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#dom-media-muted}*/'muted',/*** Get the value of `defaultMuted` from the media element. `defaultMuted` indicates* whether the media should start muted or not. Only changes the default state of the* media. `muted` and `defaultMuted` can have different values. {@link Html5#muted} indicates the* current state.** @method Html5#defaultMuted* @return {boolean}* - The value of `defaultMuted` from the media element.* - True indicates that the media should start muted.* - False indicates that the media should not start muted** @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#dom-media-defaultmuted}*/'defaultMuted',/*** Get the value of `autoplay` from the media element. `autoplay` indicates* that the media should start to play as soon as the page is ready.** @method Html5#autoplay* @return {boolean}* - The value of `autoplay` from the media element.* - True indicates that the media should start as soon as the page loads.* - False indicates that the media should not start as soon as the page loads.** @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#attr-media-autoplay}*/'autoplay',/*** Get the value of `controls` from the media element. `controls` indicates* whether the native media controls should be shown or hidden.** @method Html5#controls* @return {boolean}* - The value of `controls` from the media element.* - True indicates that native controls should be showing.* - False indicates that native controls should be hidden.** @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#attr-media-controls}*/'controls',/*** Get the value of `loop` from the media element. `loop` indicates* that the media should return to the start of the media and continue playing once* it reaches the end.** @method Html5#loop* @return {boolean}* - The value of `loop` from the media element.* - True indicates that playback should seek back to start once* the end of a media is reached.* - False indicates that playback should not loop back to the start when the* end of the media is reached.** @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#attr-media-loop}*/'loop',/*** Get the value of `playsinline` from the media element. `playsinline` indicates* to the browser that non-fullscreen playback is preferred when fullscreen* playback is the native default, such as in iOS Safari.** @method Html5#playsinline* @return {boolean}* - The value of `playsinline` from the media element.* - True indicates that the media should play inline.* - False indicates that the media should not play inline.** @see [Spec]{@link https://html.spec.whatwg.org/#attr-video-playsinline}*/'playsinline'].forEach(function (prop) {Html5.prototype[prop] = function () {return this.el_[prop] || this.el_.hasAttribute(prop);};});// Wrap native boolean attributes with setters that set both property and attribute// The list is as followed:// setMuted, setDefaultMuted, setAutoplay, setLoop, setPlaysinline// setControls is special-cased above[/*** Set the value of `muted` on the media element. `muted` indicates that the current* audio level should be silent.** @method Html5#setMuted* @param {boolean} muted* - True if the audio should be set to silent* - False otherwise** @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#dom-media-muted}*/'muted',/*** Set the value of `defaultMuted` on the media element. `defaultMuted` indicates that the current* audio level should be silent, but will only effect the muted level on initial playback..** @method Html5.prototype.setDefaultMuted* @param {boolean} defaultMuted* - True if the audio should be set to silent* - False otherwise** @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#dom-media-defaultmuted}*/'defaultMuted',/*** Set the value of `autoplay` on the media element. `autoplay` indicates* that the media should start to play as soon as the page is ready.** @method Html5#setAutoplay* @param {boolean} autoplay* - True indicates that the media should start as soon as the page loads.* - False indicates that the media should not start as soon as the page loads.** @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#attr-media-autoplay}*/'autoplay',/*** Set the value of `loop` on the media element. `loop` indicates* that the media should return to the start of the media and continue playing once* it reaches the end.** @method Html5#setLoop* @param {boolean} loop* - True indicates that playback should seek back to start once* the end of a media is reached.* - False indicates that playback should not loop back to the start when the* end of the media is reached.** @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#attr-media-loop}*/'loop',/*** Set the value of `playsinline` from the media element. `playsinline` indicates* to the browser that non-fullscreen playback is preferred when fullscreen* playback is the native default, such as in iOS Safari.** @method Html5#setPlaysinline* @param {boolean} playsinline* - True indicates that the media should play inline.* - False indicates that the media should not play inline.** @see [Spec]{@link https://html.spec.whatwg.org/#attr-video-playsinline}*/'playsinline'].forEach(function (prop) {Html5.prototype['set' + toTitleCase$1(prop)] = function (v) {this.el_[prop] = v;if (v) {this.el_.setAttribute(prop, prop);} else {this.el_.removeAttribute(prop);}};});// Wrap native properties with a getter// The list is as followed// paused, currentTime, buffered, volume, poster, preload, error, seeking// seekable, ended, playbackRate, defaultPlaybackRate, disablePictureInPicture// played, networkState, readyState, videoWidth, videoHeight, crossOrigin[/*** Get the value of `paused` from the media element. `paused` indicates whether the media element* is currently paused or not.** @method Html5#paused* @return {boolean}* The value of `paused` from the media element.** @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#dom-media-paused}*/'paused',/*** Get the value of `currentTime` from the media element. `currentTime` indicates* the current second that the media is at in playback.** @method Html5#currentTime* @return {number}* The value of `currentTime` from the media element.** @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#dom-media-currenttime}*/'currentTime',/*** Get the value of `buffered` from the media element. `buffered` is a `TimeRange`* object that represents the parts of the media that are already downloaded and* available for playback.** @method Html5#buffered* @return {TimeRange}* The value of `buffered` from the media element.** @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#dom-media-buffered}*/'buffered',/*** Get the value of `volume` from the media element. `volume` indicates* the current playback volume of audio for a media. `volume` will be a value from 0* (silent) to 1 (loudest and default).** @method Html5#volume* @return {number}* The value of `volume` from the media element. Value will be between 0-1.** @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#dom-a-volume}*/'volume',/*** Get the value of `poster` from the media element. `poster` indicates* that the url of an image file that can/will be shown when no media data is available.** @method Html5#poster* @return {string}* The value of `poster` from the media element. Value will be a url to an* image.** @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#attr-video-poster}*/'poster',/*** Get the value of `preload` from the media element. `preload` indicates* what should download before the media is interacted with. It can have the following* values:* - none: nothing should be downloaded* - metadata: poster and the first few frames of the media may be downloaded to get* media dimensions and other metadata* - auto: allow the media and metadata for the media to be downloaded before* interaction** @method Html5#preload* @return {string}* The value of `preload` from the media element. Will be 'none', 'metadata',* or 'auto'.** @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#attr-media-preload}*/'preload',/*** Get the value of the `error` from the media element. `error` indicates any* MediaError that may have occurred during playback. If error returns null there is no* current error.** @method Html5#error* @return {MediaError|null}* The value of `error` from the media element. Will be `MediaError` if there* is a current error and null otherwise.** @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#dom-media-error}*/'error',/*** Get the value of `seeking` from the media element. `seeking` indicates whether the* media is currently seeking to a new position or not.** @method Html5#seeking* @return {boolean}* - The value of `seeking` from the media element.* - True indicates that the media is currently seeking to a new position.* - False indicates that the media is not seeking to a new position at this time.** @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#dom-media-seeking}*/'seeking',/*** Get the value of `seekable` from the media element. `seekable` returns a* `TimeRange` object indicating ranges of time that can currently be `seeked` to.** @method Html5#seekable* @return {TimeRange}* The value of `seekable` from the media element. A `TimeRange` object* indicating the current ranges of time that can be seeked to.** @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#dom-media-seekable}*/'seekable',/*** Get the value of `ended` from the media element. `ended` indicates whether* the media has reached the end or not.** @method Html5#ended* @return {boolean}* - The value of `ended` from the media element.* - True indicates that the media has ended.* - False indicates that the media has not ended.** @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#dom-media-ended}*/'ended',/*** Get the value of `playbackRate` from the media element. `playbackRate` indicates* the rate at which the media is currently playing back. Examples:* - if playbackRate is set to 2, media will play twice as fast.* - if playbackRate is set to 0.5, media will play half as fast.** @method Html5#playbackRate* @return {number}* The value of `playbackRate` from the media element. A number indicating* the current playback speed of the media, where 1 is normal speed.** @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#dom-media-playbackrate}*/'playbackRate',/*** Get the value of `defaultPlaybackRate` from the media element. `defaultPlaybackRate` indicates* the rate at which the media is currently playing back. This value will not indicate the current* `playbackRate` after playback has started, use {@link Html5#playbackRate} for that.** Examples:* - if defaultPlaybackRate is set to 2, media will play twice as fast.* - if defaultPlaybackRate is set to 0.5, media will play half as fast.** @method Html5.prototype.defaultPlaybackRate* @return {number}* The value of `defaultPlaybackRate` from the media element. A number indicating* the current playback speed of the media, where 1 is normal speed.** @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#dom-media-playbackrate}*/'defaultPlaybackRate',/*** Get the value of 'disablePictureInPicture' from the video element.** @method Html5#disablePictureInPicture* @return {boolean} value* - The value of `disablePictureInPicture` from the video element.* - True indicates that the video can't be played in Picture-In-Picture mode* - False indicates that the video can be played in Picture-In-Picture mode** @see [Spec]{@link https://w3c.github.io/picture-in-picture/#disable-pip}*/'disablePictureInPicture',/*** Get the value of `played` from the media element. `played` returns a `TimeRange`* object representing points in the media timeline that have been played.** @method Html5#played* @return {TimeRange}* The value of `played` from the media element. A `TimeRange` object indicating* the ranges of time that have been played.** @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#dom-media-played}*/'played',/*** Get the value of `networkState` from the media element. `networkState` indicates* the current network state. It returns an enumeration from the following list:* - 0: NETWORK_EMPTY* - 1: NETWORK_IDLE* - 2: NETWORK_LOADING* - 3: NETWORK_NO_SOURCE** @method Html5#networkState* @return {number}* The value of `networkState` from the media element. This will be a number* from the list in the description.** @see [Spec] {@link https://www.w3.org/TR/html5/embedded-content-0.html#dom-media-networkstate}*/'networkState',/*** Get the value of `readyState` from the media element. `readyState` indicates* the current state of the media element. It returns an enumeration from the* following list:* - 0: HAVE_NOTHING* - 1: HAVE_METADATA* - 2: HAVE_CURRENT_DATA* - 3: HAVE_FUTURE_DATA* - 4: HAVE_ENOUGH_DATA** @method Html5#readyState* @return {number}* The value of `readyState` from the media element. This will be a number* from the list in the description.** @see [Spec] {@link https://www.w3.org/TR/html5/embedded-content-0.html#ready-states}*/'readyState',/*** Get the value of `videoWidth` from the video element. `videoWidth` indicates* the current width of the video in css pixels.** @method Html5#videoWidth* @return {number}* The value of `videoWidth` from the video element. This will be a number* in css pixels.** @see [Spec] {@link https://www.w3.org/TR/html5/embedded-content-0.html#dom-video-videowidth}*/'videoWidth',/*** Get the value of `videoHeight` from the video element. `videoHeight` indicates* the current height of the video in css pixels.** @method Html5#videoHeight* @return {number}* The value of `videoHeight` from the video element. This will be a number* in css pixels.** @see [Spec] {@link https://www.w3.org/TR/html5/embedded-content-0.html#dom-video-videowidth}*/'videoHeight',/*** Get the value of `crossOrigin` from the media element. `crossOrigin` indicates* to the browser that should sent the cookies along with the requests for the* different assets/playlists** @method Html5#crossOrigin* @return {string}* - anonymous indicates that the media should not sent cookies.* - use-credentials indicates that the media should sent cookies along the requests.** @see [Spec]{@link https://html.spec.whatwg.org/#attr-media-crossorigin}*/'crossOrigin'].forEach(function (prop) {Html5.prototype[prop] = function () {return this.el_[prop];};});// Wrap native properties with a setter in this format:// set + toTitleCase(name)// The list is as follows:// setVolume, setSrc, setPoster, setPreload, setPlaybackRate, setDefaultPlaybackRate,// setDisablePictureInPicture, setCrossOrigin[/*** Set the value of `volume` on the media element. `volume` indicates the current* audio level as a percentage in decimal form. This means that 1 is 100%, 0.5 is 50%, and* so on.** @method Html5#setVolume* @param {number} percentAsDecimal* The volume percent as a decimal. Valid range is from 0-1.** @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#dom-a-volume}*/'volume',/*** Set the value of `src` on the media element. `src` indicates the current* {@link Tech~SourceObject} for the media.** @method Html5#setSrc* @param {Tech~SourceObject} src* The source object to set as the current source.** @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#dom-media-src}*/'src',/*** Set the value of `poster` on the media element. `poster` is the url to* an image file that can/will be shown when no media data is available.** @method Html5#setPoster* @param {string} poster* The url to an image that should be used as the `poster` for the media* element.** @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#attr-media-poster}*/'poster',/*** Set the value of `preload` on the media element. `preload` indicates* what should download before the media is interacted with. It can have the following* values:* - none: nothing should be downloaded* - metadata: poster and the first few frames of the media may be downloaded to get* media dimensions and other metadata* - auto: allow the media and metadata for the media to be downloaded before* interaction** @method Html5#setPreload* @param {string} preload* The value of `preload` to set on the media element. Must be 'none', 'metadata',* or 'auto'.** @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#attr-media-preload}*/'preload',/*** Set the value of `playbackRate` on the media element. `playbackRate` indicates* the rate at which the media should play back. Examples:* - if playbackRate is set to 2, media will play twice as fast.* - if playbackRate is set to 0.5, media will play half as fast.** @method Html5#setPlaybackRate* @return {number}* The value of `playbackRate` from the media element. A number indicating* the current playback speed of the media, where 1 is normal speed.** @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#dom-media-playbackrate}*/'playbackRate',/*** Set the value of `defaultPlaybackRate` on the media element. `defaultPlaybackRate` indicates* the rate at which the media should play back upon initial startup. Changing this value* after a video has started will do nothing. Instead you should used {@link Html5#setPlaybackRate}.** Example Values:* - if playbackRate is set to 2, media will play twice as fast.* - if playbackRate is set to 0.5, media will play half as fast.** @method Html5.prototype.setDefaultPlaybackRate* @return {number}* The value of `defaultPlaybackRate` from the media element. A number indicating* the current playback speed of the media, where 1 is normal speed.** @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#dom-media-defaultplaybackrate}*/'defaultPlaybackRate',/*** Prevents the browser from suggesting a Picture-in-Picture context menu* or to request Picture-in-Picture automatically in some cases.** @method Html5#setDisablePictureInPicture* @param {boolean} value* The true value will disable Picture-in-Picture mode.** @see [Spec]{@link https://w3c.github.io/picture-in-picture/#disable-pip}*/'disablePictureInPicture',/*** Set the value of `crossOrigin` from the media element. `crossOrigin` indicates* to the browser that should sent the cookies along with the requests for the* different assets/playlists** @method Html5#setCrossOrigin* @param {string} crossOrigin* - anonymous indicates that the media should not sent cookies.* - use-credentials indicates that the media should sent cookies along the requests.** @see [Spec]{@link https://html.spec.whatwg.org/#attr-media-crossorigin}*/'crossOrigin'].forEach(function (prop) {Html5.prototype['set' + toTitleCase$1(prop)] = function (v) {this.el_[prop] = v;};});// wrap native functions with a function// The list is as follows:// pause, load, play[/*** A wrapper around the media elements `pause` function. This will call the `HTML5`* media elements `pause` function.** @method Html5#pause* @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#dom-media-pause}*/'pause',/*** A wrapper around the media elements `load` function. This will call the `HTML5`s* media element `load` function.** @method Html5#load* @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#dom-media-load}*/'load',/*** A wrapper around the media elements `play` function. This will call the `HTML5`s* media element `play` function.** @method Html5#play* @see [Spec]{@link https://www.w3.org/TR/html5/embedded-content-0.html#dom-media-play}*/'play'].forEach(function (prop) {Html5.prototype[prop] = function () {return this.el_[prop]();};});Tech.withSourceHandlers(Html5);/*** Native source handler for Html5, simply passes the source to the media element.** @property {Tech~SourceObject} source* The source object** @property {Html5} tech* The instance of the HTML5 tech.*/Html5.nativeSourceHandler = {};/*** Check if the media element can play the given mime type.** @param {string} type* The mimetype to check** @return {string}* 'probably', 'maybe', or '' (empty string)*/Html5.nativeSourceHandler.canPlayType = function (type) {// IE without MediaPlayer throws an error (#519)try {return Html5.TEST_VID.canPlayType(type);} catch (e) {return '';}};/*** Check if the media element can handle a source natively.** @param {Tech~SourceObject} source* The source object** @param {Object} [options]* Options to be passed to the tech.** @return {string}* 'probably', 'maybe', or '' (empty string).*/Html5.nativeSourceHandler.canHandleSource = function (source, options) {// If a type was provided we should rely on thatif (source.type) {return Html5.nativeSourceHandler.canPlayType(source.type);// If no type, fall back to checking 'video/[EXTENSION]'} else if (source.src) {const ext = getFileExtension(source.src);return Html5.nativeSourceHandler.canPlayType(`video/${ext}`);}return '';};/*** Pass the source to the native media element.** @param {Tech~SourceObject} source* The source object** @param {Html5} tech* The instance of the Html5 tech** @param {Object} [options]* The options to pass to the source*/Html5.nativeSourceHandler.handleSource = function (source, tech, options) {tech.setSrc(source.src);};/*** A noop for the native dispose function, as cleanup is not needed.*/Html5.nativeSourceHandler.dispose = function () {};// Register the native source handlerHtml5.registerSourceHandler(Html5.nativeSourceHandler);Tech.registerTech('Html5', Html5);/*** @file player.js*/// The following tech events are simply re-triggered// on the player when they happenconst TECH_EVENTS_RETRIGGER = [/*** Fired while the user agent is downloading media data.** @event Player#progress* @type {Event}*//*** Retrigger the `progress` event that was triggered by the {@link Tech}.** @private* @method Player#handleTechProgress_* @fires Player#progress* @listens Tech#progress*/'progress',/*** Fires when the loading of an audio/video is aborted.** @event Player#abort* @type {Event}*//*** Retrigger the `abort` event that was triggered by the {@link Tech}.** @private* @method Player#handleTechAbort_* @fires Player#abort* @listens Tech#abort*/'abort',/*** Fires when the browser is intentionally not getting media data.** @event Player#suspend* @type {Event}*//*** Retrigger the `suspend` event that was triggered by the {@link Tech}.** @private* @method Player#handleTechSuspend_* @fires Player#suspend* @listens Tech#suspend*/'suspend',/*** Fires when the current playlist is empty.** @event Player#emptied* @type {Event}*//*** Retrigger the `emptied` event that was triggered by the {@link Tech}.** @private* @method Player#handleTechEmptied_* @fires Player#emptied* @listens Tech#emptied*/'emptied',/*** Fires when the browser is trying to get media data, but data is not available.** @event Player#stalled* @type {Event}*//*** Retrigger the `stalled` event that was triggered by the {@link Tech}.** @private* @method Player#handleTechStalled_* @fires Player#stalled* @listens Tech#stalled*/'stalled',/*** Fires when the browser has loaded meta data for the audio/video.** @event Player#loadedmetadata* @type {Event}*//*** Retrigger the `loadedmetadata` event that was triggered by the {@link Tech}.** @private* @method Player#handleTechLoadedmetadata_* @fires Player#loadedmetadata* @listens Tech#loadedmetadata*/'loadedmetadata',/*** Fires when the browser has loaded the current frame of the audio/video.** @event Player#loadeddata* @type {event}*//*** Retrigger the `loadeddata` event that was triggered by the {@link Tech}.** @private* @method Player#handleTechLoaddeddata_* @fires Player#loadeddata* @listens Tech#loadeddata*/'loadeddata',/*** Fires when the current playback position has changed.** @event Player#timeupdate* @type {event}*//*** Retrigger the `timeupdate` event that was triggered by the {@link Tech}.** @private* @method Player#handleTechTimeUpdate_* @fires Player#timeupdate* @listens Tech#timeupdate*/'timeupdate',/*** Fires when the video's intrinsic dimensions change** @event Player#resize* @type {event}*//*** Retrigger the `resize` event that was triggered by the {@link Tech}.** @private* @method Player#handleTechResize_* @fires Player#resize* @listens Tech#resize*/'resize',/*** Fires when the volume has been changed** @event Player#volumechange* @type {event}*//*** Retrigger the `volumechange` event that was triggered by the {@link Tech}.** @private* @method Player#handleTechVolumechange_* @fires Player#volumechange* @listens Tech#volumechange*/'volumechange',/*** Fires when the text track has been changed** @event Player#texttrackchange* @type {event}*//*** Retrigger the `texttrackchange` event that was triggered by the {@link Tech}.** @private* @method Player#handleTechTexttrackchange_* @fires Player#texttrackchange* @listens Tech#texttrackchange*/'texttrackchange'];// events to queue when playback rate is zero// this is a hash for the sole purpose of mapping non-camel-cased event names// to camel-cased function namesconst TECH_EVENTS_QUEUE = {canplay: 'CanPlay',canplaythrough: 'CanPlayThrough',playing: 'Playing',seeked: 'Seeked'};const BREAKPOINT_ORDER = ['tiny', 'xsmall', 'small', 'medium', 'large', 'xlarge', 'huge'];const BREAKPOINT_CLASSES = {};// grep: vjs-layout-tiny// grep: vjs-layout-x-small// grep: vjs-layout-small// grep: vjs-layout-medium// grep: vjs-layout-large// grep: vjs-layout-x-large// grep: vjs-layout-hugeBREAKPOINT_ORDER.forEach(k => {const v = k.charAt(0) === 'x' ? `x-${k.substring(1)}` : k;BREAKPOINT_CLASSES[k] = `vjs-layout-${v}`;});const DEFAULT_BREAKPOINTS = {tiny: 210,xsmall: 320,small: 425,medium: 768,large: 1440,xlarge: 2560,huge: Infinity};/*** An instance of the `Player` class is created when any of the Video.js setup methods* are used to initialize a video.** After an instance has been created it can be accessed globally in three ways:* 1. By calling `videojs.getPlayer('example_video_1');`* 2. By calling `videojs('example_video_1');` (not recommended)* 2. By using it directly via `videojs.players.example_video_1;`** @extends Component* @global*/class Player extends Component$1 {/*** Create an instance of this class.** @param {Element} tag* The original video DOM element used for configuring options.** @param {Object} [options]* Object of option names and values.** @param {Function} [ready]* Ready callback function.*/constructor(tag, options, ready) {// Make sure tag ID exists// also here.. probably bettertag.id = tag.id || options.id || `vjs_video_${newGUID()}`;// Set Options// The options argument overrides options set in the video tag// which overrides globally set options.// This latter part coincides with the load order// (tag must exist before Player)options = Object.assign(Player.getTagSettings(tag), options);// Delay the initialization of children because we need to set up// player properties first, and can't use `this` before `super()`options.initChildren = false;// Same with creating the elementoptions.createEl = false;// don't auto mixin the evented mixinoptions.evented = false;// we don't want the player to report touch activity on itself// see enableTouchActivity in Componentoptions.reportTouchActivity = false;// If language is not set, get the closest lang attributeif (!options.language) {const closest = tag.closest('[lang]');if (closest) {options.language = closest.getAttribute('lang');}}// Run base component initializing with new optionssuper(null, options, ready);// Create bound methods for document listeners.this.boundDocumentFullscreenChange_ = e => this.documentFullscreenChange_(e);this.boundFullWindowOnEscKey_ = e => this.fullWindowOnEscKey(e);this.boundUpdateStyleEl_ = e => this.updateStyleEl_(e);this.boundApplyInitTime_ = e => this.applyInitTime_(e);this.boundUpdateCurrentBreakpoint_ = e => this.updateCurrentBreakpoint_(e);this.boundHandleTechClick_ = e => this.handleTechClick_(e);this.boundHandleTechDoubleClick_ = e => this.handleTechDoubleClick_(e);this.boundHandleTechTouchStart_ = e => this.handleTechTouchStart_(e);this.boundHandleTechTouchMove_ = e => this.handleTechTouchMove_(e);this.boundHandleTechTouchEnd_ = e => this.handleTechTouchEnd_(e);this.boundHandleTechTap_ = e => this.handleTechTap_(e);// default isFullscreen_ to falsethis.isFullscreen_ = false;// create loggerthis.log = createLogger(this.id_);// Hold our own reference to fullscreen api so it can be mocked in teststhis.fsApi_ = FullscreenApi;// Tracks when a tech changes the posterthis.isPosterFromTech_ = false;// Holds callback info that gets queued when playback rate is zero// and a seek is happeningthis.queuedCallbacks_ = [];// Turn off API access because we're loading a new tech that might load asynchronouslythis.isReady_ = false;// Init state hasStarted_this.hasStarted_ = false;// Init state userActive_this.userActive_ = false;// Init debugEnabled_this.debugEnabled_ = false;// Init state audioOnlyMode_this.audioOnlyMode_ = false;// Init state audioPosterMode_this.audioPosterMode_ = false;// Init state audioOnlyCache_this.audioOnlyCache_ = {playerHeight: null,hiddenChildren: []};// if the global option object was accidentally blown away by// someone, bail early with an informative errorif (!this.options_ || !this.options_.techOrder || !this.options_.techOrder.length) {throw new Error('No techOrder specified. Did you overwrite ' + 'videojs.options instead of just changing the ' + 'properties you want to override?');}// Store the original tag used to set optionsthis.tag = tag;// Store the tag attributes used to restore html5 elementthis.tagAttributes = tag && getAttributes(tag);// Update current languagethis.language(this.options_.language);// Update Supported Languagesif (options.languages) {// Normalise player option languages to lowercaseconst languagesToLower = {};Object.getOwnPropertyNames(options.languages).forEach(function (name) {languagesToLower[name.toLowerCase()] = options.languages[name];});this.languages_ = languagesToLower;} else {this.languages_ = Player.prototype.options_.languages;}this.resetCache_();// Set poster/** @type string */this.poster_ = options.poster || '';// Set controls/** @type {boolean} */this.controls_ = !!options.controls;// Original tag settings stored in options// now remove immediately so native controls don't flash.// May be turned back on by HTML5 tech if nativeControlsForTouch is truetag.controls = false;tag.removeAttribute('controls');this.changingSrc_ = false;this.playCallbacks_ = [];this.playTerminatedQueue_ = [];// the attribute overrides the optionif (tag.hasAttribute('autoplay')) {this.autoplay(true);} else {// otherwise use the setter to validate and// set the correct value.this.autoplay(this.options_.autoplay);}// check pluginsif (options.plugins) {Object.keys(options.plugins).forEach(name => {if (typeof this[name] !== 'function') {throw new Error(`plugin "${name}" does not exist`);}});}/** Store the internal state of scrubbing** @private* @return {Boolean} True if the user is scrubbing*/this.scrubbing_ = false;this.el_ = this.createEl();// Make this an evented object and use `el_` as its event bus.evented(this, {eventBusKey: 'el_'});// listen to document and player fullscreenchange handlers so we receive those events// before a user can receive them so we can update isFullscreen appropriately.// make sure that we listen to fullscreenchange events before everything else to make sure that// our isFullscreen method is updated properly for internal components as well as external.if (this.fsApi_.requestFullscreen) {on(document, this.fsApi_.fullscreenchange, this.boundDocumentFullscreenChange_);this.on(this.fsApi_.fullscreenchange, this.boundDocumentFullscreenChange_);}if (this.fluid_) {this.on(['playerreset', 'resize'], this.boundUpdateStyleEl_);}// We also want to pass the original player options to each component and plugin// as well so they don't need to reach back into the player for options later.// We also need to do another copy of this.options_ so we don't end up with// an infinite loop.const playerOptionsCopy = merge$2(this.options_);// Load pluginsif (options.plugins) {Object.keys(options.plugins).forEach(name => {this[name](options.plugins[name]);});}// Enable debug mode to fire debugon event for all plugins.if (options.debug) {this.debug(true);}this.options_.playerOptions = playerOptionsCopy;this.middleware_ = [];this.playbackRates(options.playbackRates);if (options.experimentalSvgIcons) {// Add SVG Sprite to the DOMconst parser = new window.DOMParser();const parsedSVG = parser.parseFromString(icons, 'image/svg+xml');const errorNode = parsedSVG.querySelector('parsererror');if (errorNode) {log$1.warn('Failed to load SVG Icons. Falling back to Font Icons.');this.options_.experimentalSvgIcons = null;} else {const sprite = parsedSVG.documentElement;sprite.style.display = 'none';this.el_.appendChild(sprite);this.addClass('vjs-svg-icons-enabled');}}this.initChildren();// Set isAudio based on whether or not an audio tag was usedthis.isAudio(tag.nodeName.toLowerCase() === 'audio');// Update controls className. Can't do this when the controls are initially// set because the element doesn't exist yet.if (this.controls()) {this.addClass('vjs-controls-enabled');} else {this.addClass('vjs-controls-disabled');}// Set ARIA label and region role depending on player typethis.el_.setAttribute('role', 'region');if (this.isAudio()) {this.el_.setAttribute('aria-label', this.localize('Audio Player'));} else {this.el_.setAttribute('aria-label', this.localize('Video Player'));}if (this.isAudio()) {this.addClass('vjs-audio');}// TODO: Make this smarter. Toggle user state between touching/mousing// using events, since devices can have both touch and mouse events.// TODO: Make this check be performed again when the window switches between monitors// (See https://github.com/videojs/video.js/issues/5683)if (TOUCH_ENABLED) {this.addClass('vjs-touch-enabled');}// iOS Safari has broken hover handlingif (!IS_IOS) {this.addClass('vjs-workinghover');}// Make player easily findable by IDPlayer.players[this.id_] = this;// Add a major version class to aid css in pluginsconst majorVersion = version$5.split('.')[0];this.addClass(`vjs-v${majorVersion}`);// When the player is first initialized, trigger activity so components// like the control bar show themselves if neededthis.userActive(true);this.reportUserActivity();this.one('play', e => this.listenForUserActivity_(e));this.on('keydown', e => this.handleKeyDown(e));this.on('languagechange', e => this.handleLanguagechange(e));this.breakpoints(this.options_.breakpoints);this.responsive(this.options_.responsive);// Calling both the audio mode methods after the player is fully// setup to be able to listen to the events triggered by themthis.on('ready', () => {// Calling the audioPosterMode method first so that// the audioOnlyMode can take precedence when both options are set to truethis.audioPosterMode(this.options_.audioPosterMode);this.audioOnlyMode(this.options_.audioOnlyMode);});}/*** Destroys the video player and does any necessary cleanup.** This is especially helpful if you are dynamically adding and removing videos* to/from the DOM.** @fires Player#dispose*/dispose() {/*** Called when the player is being disposed of.** @event Player#dispose* @type {Event}*/this.trigger('dispose');// prevent dispose from being called twicethis.off('dispose');// Make sure all player-specific document listeners are unbound. This isoff(document, this.fsApi_.fullscreenchange, this.boundDocumentFullscreenChange_);off(document, 'keydown', this.boundFullWindowOnEscKey_);if (this.styleEl_ && this.styleEl_.parentNode) {this.styleEl_.parentNode.removeChild(this.styleEl_);this.styleEl_ = null;}// Kill reference to this playerPlayer.players[this.id_] = null;if (this.tag && this.tag.player) {this.tag.player = null;}if (this.el_ && this.el_.player) {this.el_.player = null;}if (this.tech_) {this.tech_.dispose();this.isPosterFromTech_ = false;this.poster_ = '';}if (this.playerElIngest_) {this.playerElIngest_ = null;}if (this.tag) {this.tag = null;}clearCacheForPlayer(this);// remove all event handlers for track lists// all tracks and track listeners are removed on// tech disposeALL.names.forEach(name => {const props = ALL[name];const list = this[props.getterName]();// if it is not a native list// we have to manually remove event listenersif (list && list.off) {list.off();}});// the actual .el_ is removed here, or replaced ifsuper.dispose({restoreEl: this.options_.restoreEl});}/*** Create the `Player`'s DOM element.** @return {Element}* The DOM element that gets created.*/createEl() {let tag = this.tag;let el;let playerElIngest = this.playerElIngest_ = tag.parentNode && tag.parentNode.hasAttribute && tag.parentNode.hasAttribute('data-vjs-player');const divEmbed = this.tag.tagName.toLowerCase() === 'video-js';if (playerElIngest) {el = this.el_ = tag.parentNode;} else if (!divEmbed) {el = this.el_ = super.createEl('div');}// Copy over all the attributes from the tag, including ID and class// ID will now reference player box, not the video tagconst attrs = getAttributes(tag);if (divEmbed) {el = this.el_ = tag;tag = this.tag = document.createElement('video');while (el.children.length) {tag.appendChild(el.firstChild);}if (!hasClass(el, 'video-js')) {addClass(el, 'video-js');}el.appendChild(tag);playerElIngest = this.playerElIngest_ = el;// move properties over from our custom `video-js` element// to our new `video` element. This will move things like// `src` or `controls` that were set via js before the player// was initialized.Object.keys(el).forEach(k => {try {tag[k] = el[k];} catch (e) {// we got a a property like outerHTML which we can't actually copy, ignore it}});}// set tabindex to -1 to remove the video element from the focus ordertag.setAttribute('tabindex', '-1');attrs.tabindex = '-1';// Workaround for #4583 on Chrome (on Windows) with JAWS.// See https://github.com/FreedomScientific/VFO-standards-support/issues/78// Note that we can't detect if JAWS is being used, but this ARIA attribute// doesn't change behavior of Chrome if JAWS is not being usedif (IS_CHROME && IS_WINDOWS) {tag.setAttribute('role', 'application');attrs.role = 'application';}// Remove width/height attrs from tag so CSS can make it 100% width/heighttag.removeAttribute('width');tag.removeAttribute('height');if ('width' in attrs) {delete attrs.width;}if ('height' in attrs) {delete attrs.height;}Object.getOwnPropertyNames(attrs).forEach(function (attr) {// don't copy over the class attribute to the player element when we're in a div embed// the class is already set up properly in the divEmbed case// and we want to make sure that the `video-js` class doesn't get lostif (!(divEmbed && attr === 'class')) {el.setAttribute(attr, attrs[attr]);}if (divEmbed) {tag.setAttribute(attr, attrs[attr]);}});// Update tag id/class for use as HTML5 playback tech// Might think we should do this after embedding in container so .vjs-tech class// doesn't flash 100% width/height, but class only applies with .video-js parenttag.playerId = tag.id;tag.id += '_html5_api';tag.className = 'vjs-tech';// Make player findable on elementstag.player = el.player = this;// Default state of video is pausedthis.addClass('vjs-paused');// Add a style element in the player that we'll use to set the width/height// of the player in a way that's still overridable by CSS, just like the// video elementif (window.VIDEOJS_NO_DYNAMIC_STYLE !== true) {this.styleEl_ = createStyleElement('vjs-styles-dimensions');const defaultsStyleEl = $('.vjs-styles-defaults');const head = $('head');head.insertBefore(this.styleEl_, defaultsStyleEl ? defaultsStyleEl.nextSibling : head.firstChild);}this.fill_ = false;this.fluid_ = false;// Pass in the width/height/aspectRatio options which will update the style elthis.width(this.options_.width);this.height(this.options_.height);this.fill(this.options_.fill);this.fluid(this.options_.fluid);this.aspectRatio(this.options_.aspectRatio);// support both crossOrigin and crossorigin to reduce confusion and issues around the namethis.crossOrigin(this.options_.crossOrigin || this.options_.crossorigin);// Hide any links within the video/audio tag,// because IE doesn't hide them completely from screen readers.const links = tag.getElementsByTagName('a');for (let i = 0; i < links.length; i++) {const linkEl = links.item(i);addClass(linkEl, 'vjs-hidden');linkEl.setAttribute('hidden', 'hidden');}// insertElFirst seems to cause the networkState to flicker from 3 to 2, so// keep track of the original for later so we can know if the source originally failedtag.initNetworkState_ = tag.networkState;// Wrap video tag in div (el/box) containerif (tag.parentNode && !playerElIngest) {tag.parentNode.insertBefore(el, tag);}// insert the tag as the first child of the player element// then manually add it to the children array so that this.addChild// will work properly for other components//// Breaks iPhone, fixed in HTML5 setup.prependTo(tag, el);this.children_.unshift(tag);// Set lang attr on player to ensure CSS :lang() in consistent with player// if it's been set to something different to the docthis.el_.setAttribute('lang', this.language_);this.el_.setAttribute('translate', 'no');this.el_ = el;return el;}/*** Get or set the `Player`'s crossOrigin option. For the HTML5 player, this* sets the `crossOrigin` property on the `<video>` tag to control the CORS* behavior.** @see [Video Element Attributes]{@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/video#attr-crossorigin}** @param {string|null} [value]* The value to set the `Player`'s crossOrigin to. If an argument is* given, must be one of `'anonymous'` or `'use-credentials'`, or 'null'.** @return {string|null|undefined}* - The current crossOrigin value of the `Player` when getting.* - undefined when setting*/crossOrigin(value) {// `null` can be set to unset a valueif (typeof value === 'undefined') {return this.techGet_('crossOrigin');}if (value !== null && value !== 'anonymous' && value !== 'use-credentials') {log$1.warn(`crossOrigin must be null, "anonymous" or "use-credentials", given "${value}"`);return;}this.techCall_('setCrossOrigin', value);if (this.posterImage) {this.posterImage.crossOrigin(value);}return;}/*** A getter/setter for the `Player`'s width. Returns the player's configured value.* To get the current width use `currentWidth()`.** @param {number|string} [value]* CSS value to set the `Player`'s width to.** @return {number|undefined}* - The current width of the `Player` when getting.* - Nothing when setting*/width(value) {return this.dimension('width', value);}/*** A getter/setter for the `Player`'s height. Returns the player's configured value.* To get the current height use `currentheight()`.** @param {number|string} [value]* CSS value to set the `Player`'s height to.** @return {number|undefined}* - The current height of the `Player` when getting.* - Nothing when setting*/height(value) {return this.dimension('height', value);}/*** A getter/setter for the `Player`'s width & height.** @param {string} dimension* This string can be:* - 'width'* - 'height'** @param {number|string} [value]* Value for dimension specified in the first argument.** @return {number}* The dimension arguments value when getting (width/height).*/dimension(dimension, value) {const privDimension = dimension + '_';if (value === undefined) {return this[privDimension] || 0;}if (value === '' || value === 'auto') {// If an empty string is given, reset the dimension to be automaticthis[privDimension] = undefined;this.updateStyleEl_();return;}const parsedVal = parseFloat(value);if (isNaN(parsedVal)) {log$1.error(`Improper value "${value}" supplied for for ${dimension}`);return;}this[privDimension] = parsedVal;this.updateStyleEl_();}/*** A getter/setter/toggler for the vjs-fluid `className` on the `Player`.** Turning this on will turn off fill mode.** @param {boolean} [bool]* - A value of true adds the class.* - A value of false removes the class.* - No value will be a getter.** @return {boolean|undefined}* - The value of fluid when getting.* - `undefined` when setting.*/fluid(bool) {if (bool === undefined) {return !!this.fluid_;}this.fluid_ = !!bool;if (isEvented(this)) {this.off(['playerreset', 'resize'], this.boundUpdateStyleEl_);}if (bool) {this.addClass('vjs-fluid');this.fill(false);addEventedCallback(this, () => {this.on(['playerreset', 'resize'], this.boundUpdateStyleEl_);});} else {this.removeClass('vjs-fluid');}this.updateStyleEl_();}/*** A getter/setter/toggler for the vjs-fill `className` on the `Player`.** Turning this on will turn off fluid mode.** @param {boolean} [bool]* - A value of true adds the class.* - A value of false removes the class.* - No value will be a getter.** @return {boolean|undefined}* - The value of fluid when getting.* - `undefined` when setting.*/fill(bool) {if (bool === undefined) {return !!this.fill_;}this.fill_ = !!bool;if (bool) {this.addClass('vjs-fill');this.fluid(false);} else {this.removeClass('vjs-fill');}}/*** Get/Set the aspect ratio** @param {string} [ratio]* Aspect ratio for player** @return {string|undefined}* returns the current aspect ratio when getting*//*** A getter/setter for the `Player`'s aspect ratio.** @param {string} [ratio]* The value to set the `Player`'s aspect ratio to.** @return {string|undefined}* - The current aspect ratio of the `Player` when getting.* - undefined when setting*/aspectRatio(ratio) {if (ratio === undefined) {return this.aspectRatio_;}// Check for width:height formatif (!/^\d+\:\d+$/.test(ratio)) {throw new Error('Improper value supplied for aspect ratio. The format should be width:height, for example 16:9.');}this.aspectRatio_ = ratio;// We're assuming if you set an aspect ratio you want fluid mode,// because in fixed mode you could calculate width and height yourself.this.fluid(true);this.updateStyleEl_();}/*** Update styles of the `Player` element (height, width and aspect ratio).** @private* @listens Tech#loadedmetadata*/updateStyleEl_() {if (window.VIDEOJS_NO_DYNAMIC_STYLE === true) {const width = typeof this.width_ === 'number' ? this.width_ : this.options_.width;const height = typeof this.height_ === 'number' ? this.height_ : this.options_.height;const techEl = this.tech_ && this.tech_.el();if (techEl) {if (width >= 0) {techEl.width = width;}if (height >= 0) {techEl.height = height;}}return;}let width;let height;let aspectRatio;let idClass;// The aspect ratio is either used directly or to calculate width and height.if (this.aspectRatio_ !== undefined && this.aspectRatio_ !== 'auto') {// Use any aspectRatio that's been specifically setaspectRatio = this.aspectRatio_;} else if (this.videoWidth() > 0) {// Otherwise try to get the aspect ratio from the video metadataaspectRatio = this.videoWidth() + ':' + this.videoHeight();} else {// Or use a default. The video element's is 2:1, but 16:9 is more common.aspectRatio = '16:9';}// Get the ratio as a decimal we can use to calculate dimensionsconst ratioParts = aspectRatio.split(':');const ratioMultiplier = ratioParts[1] / ratioParts[0];if (this.width_ !== undefined) {// Use any width that's been specifically setwidth = this.width_;} else if (this.height_ !== undefined) {// Or calculate the width from the aspect ratio if a height has been setwidth = this.height_ / ratioMultiplier;} else {// Or use the video's metadata, or use the video el's default of 300width = this.videoWidth() || 300;}if (this.height_ !== undefined) {// Use any height that's been specifically setheight = this.height_;} else {// Otherwise calculate the height from the ratio and the widthheight = width * ratioMultiplier;}// Ensure the CSS class is valid by starting with an alpha characterif (/^[^a-zA-Z]/.test(this.id())) {idClass = 'dimensions-' + this.id();} else {idClass = this.id() + '-dimensions';}// Ensure the right class is still on the player for the style elementthis.addClass(idClass);setTextContent(this.styleEl_, `.${idClass} {width: ${width}px;height: ${height}px;}.${idClass}.vjs-fluid:not(.vjs-audio-only-mode) {padding-top: ${ratioMultiplier * 100}%;}`);}/*** Load/Create an instance of playback {@link Tech} including element* and API methods. Then append the `Tech` element in `Player` as a child.** @param {string} techName* name of the playback technology** @param {string} source* video source** @private*/loadTech_(techName, source) {// Pause and remove current playback technologyif (this.tech_) {this.unloadTech_();}const titleTechName = toTitleCase$1(techName);const camelTechName = techName.charAt(0).toLowerCase() + techName.slice(1);// get rid of the HTML5 video tag as soon as we are using another techif (titleTechName !== 'Html5' && this.tag) {Tech.getTech('Html5').disposeMediaElement(this.tag);this.tag.player = null;this.tag = null;}this.techName_ = titleTechName;// Turn off API access because we're loading a new tech that might load asynchronouslythis.isReady_ = false;let autoplay = this.autoplay();// if autoplay is a string (or `true` with normalizeAutoplay: true) we pass false to the tech// because the player is going to handle autoplay on `loadstart`if (typeof this.autoplay() === 'string' || this.autoplay() === true && this.options_.normalizeAutoplay) {autoplay = false;}// Grab tech-specific options from player options and add source and parent element to use.const techOptions = {source,autoplay,'nativeControlsForTouch': this.options_.nativeControlsForTouch,'playerId': this.id(),'techId': `${this.id()}_${camelTechName}_api`,'playsinline': this.options_.playsinline,'preload': this.options_.preload,'loop': this.options_.loop,'disablePictureInPicture': this.options_.disablePictureInPicture,'muted': this.options_.muted,'poster': this.poster(),'language': this.language(),'playerElIngest': this.playerElIngest_ || false,'vtt.js': this.options_['vtt.js'],'canOverridePoster': !!this.options_.techCanOverridePoster,'enableSourceset': this.options_.enableSourceset};ALL.names.forEach(name => {const props = ALL[name];techOptions[props.getterName] = this[props.privateName];});Object.assign(techOptions, this.options_[titleTechName]);Object.assign(techOptions, this.options_[camelTechName]);Object.assign(techOptions, this.options_[techName.toLowerCase()]);if (this.tag) {techOptions.tag = this.tag;}if (source && source.src === this.cache_.src && this.cache_.currentTime > 0) {techOptions.startTime = this.cache_.currentTime;}// Initialize tech instanceconst TechClass = Tech.getTech(techName);if (!TechClass) {throw new Error(`No Tech named '${titleTechName}' exists! '${titleTechName}' should be registered using videojs.registerTech()'`);}this.tech_ = new TechClass(techOptions);// player.triggerReady is always async, so don't need this to be asyncthis.tech_.ready(bind_(this, this.handleTechReady_), true);textTrackConverter.jsonToTextTracks(this.textTracksJson_ || [], this.tech_);// Listen to all HTML5-defined events and trigger them on the playerTECH_EVENTS_RETRIGGER.forEach(event => {this.on(this.tech_, event, e => this[`handleTech${toTitleCase$1(event)}_`](e));});Object.keys(TECH_EVENTS_QUEUE).forEach(event => {this.on(this.tech_, event, eventObj => {if (this.tech_.playbackRate() === 0 && this.tech_.seeking()) {this.queuedCallbacks_.push({callback: this[`handleTech${TECH_EVENTS_QUEUE[event]}_`].bind(this),event: eventObj});return;}this[`handleTech${TECH_EVENTS_QUEUE[event]}_`](eventObj);});});this.on(this.tech_, 'loadstart', e => this.handleTechLoadStart_(e));this.on(this.tech_, 'sourceset', e => this.handleTechSourceset_(e));this.on(this.tech_, 'waiting', e => this.handleTechWaiting_(e));this.on(this.tech_, 'ended', e => this.handleTechEnded_(e));this.on(this.tech_, 'seeking', e => this.handleTechSeeking_(e));this.on(this.tech_, 'play', e => this.handleTechPlay_(e));this.on(this.tech_, 'pause', e => this.handleTechPause_(e));this.on(this.tech_, 'durationchange', e => this.handleTechDurationChange_(e));this.on(this.tech_, 'fullscreenchange', (e, data) => this.handleTechFullscreenChange_(e, data));this.on(this.tech_, 'fullscreenerror', (e, err) => this.handleTechFullscreenError_(e, err));this.on(this.tech_, 'enterpictureinpicture', e => this.handleTechEnterPictureInPicture_(e));this.on(this.tech_, 'leavepictureinpicture', e => this.handleTechLeavePictureInPicture_(e));this.on(this.tech_, 'error', e => this.handleTechError_(e));this.on(this.tech_, 'posterchange', e => this.handleTechPosterChange_(e));this.on(this.tech_, 'textdata', e => this.handleTechTextData_(e));this.on(this.tech_, 'ratechange', e => this.handleTechRateChange_(e));this.on(this.tech_, 'loadedmetadata', this.boundUpdateStyleEl_);this.usingNativeControls(this.techGet_('controls'));if (this.controls() && !this.usingNativeControls()) {this.addTechControlsListeners_();}// Add the tech element in the DOM if it was not already there// Make sure to not insert the original video element if using Html5if (this.tech_.el().parentNode !== this.el() && (titleTechName !== 'Html5' || !this.tag)) {prependTo(this.tech_.el(), this.el());}// Get rid of the original video tag reference after the first tech is loadedif (this.tag) {this.tag.player = null;this.tag = null;}}/*** Unload and dispose of the current playback {@link Tech}.** @private*/unloadTech_() {// Save the current text tracks so that we can reuse the same text tracks with the next techALL.names.forEach(name => {const props = ALL[name];this[props.privateName] = this[props.getterName]();});this.textTracksJson_ = textTrackConverter.textTracksToJson(this.tech_);this.isReady_ = false;this.tech_.dispose();this.tech_ = false;if (this.isPosterFromTech_) {this.poster_ = '';this.trigger('posterchange');}this.isPosterFromTech_ = false;}/*** Return a reference to the current {@link Tech}.* It will print a warning by default about the danger of using the tech directly* but any argument that is passed in will silence the warning.** @param {*} [safety]* Anything passed in to silence the warning** @return {Tech}* The Tech*/tech(safety) {if (safety === undefined) {log$1.warn('Using the tech directly can be dangerous. I hope you know what you\'re doing.\n' + 'See https://github.com/videojs/video.js/issues/2617 for more info.\n');}return this.tech_;}/*** An object that contains Video.js version.** @typedef {Object} PlayerVersion** @property {string} 'video.js' - Video.js version*//*** Returns an object with Video.js version.** @return {PlayerVersion}* An object with Video.js version.*/version() {return {'video.js': version$5};}/*** Set up click and touch listeners for the playback element** - On desktops: a click on the video itself will toggle playback* - On mobile devices: a click on the video toggles controls* which is done by toggling the user state between active and* inactive* - A tap can signal that a user has become active or has become inactive* e.g. a quick tap on an iPhone movie should reveal the controls. Another* quick tap should hide them again (signaling the user is in an inactive* viewing state)* - In addition to this, we still want the user to be considered inactive after* a few seconds of inactivity.** > Note: the only part of iOS interaction we can't mimic with this setup* is a touch and hold on the video element counting as activity in order to* keep the controls showing, but that shouldn't be an issue. A touch and hold* on any controls will still keep the user active** @private*/addTechControlsListeners_() {// Make sure to remove all the previous listeners in case we are called multiple times.this.removeTechControlsListeners_();this.on(this.tech_, 'click', this.boundHandleTechClick_);this.on(this.tech_, 'dblclick', this.boundHandleTechDoubleClick_);// If the controls were hidden we don't want that to change without a tap event// so we'll check if the controls were already showing before reporting user// activitythis.on(this.tech_, 'touchstart', this.boundHandleTechTouchStart_);this.on(this.tech_, 'touchmove', this.boundHandleTechTouchMove_);this.on(this.tech_, 'touchend', this.boundHandleTechTouchEnd_);// The tap listener needs to come after the touchend listener because the tap// listener cancels out any reportedUserActivity when setting userActive(false)this.on(this.tech_, 'tap', this.boundHandleTechTap_);}/*** Remove the listeners used for click and tap controls. This is needed for* toggling to controls disabled, where a tap/touch should do nothing.** @private*/removeTechControlsListeners_() {// We don't want to just use `this.off()` because there might be other needed// listeners added by techs that extend this.this.off(this.tech_, 'tap', this.boundHandleTechTap_);this.off(this.tech_, 'touchstart', this.boundHandleTechTouchStart_);this.off(this.tech_, 'touchmove', this.boundHandleTechTouchMove_);this.off(this.tech_, 'touchend', this.boundHandleTechTouchEnd_);this.off(this.tech_, 'click', this.boundHandleTechClick_);this.off(this.tech_, 'dblclick', this.boundHandleTechDoubleClick_);}/*** Player waits for the tech to be ready** @private*/handleTechReady_() {this.triggerReady();// Keep the same volume as beforeif (this.cache_.volume) {this.techCall_('setVolume', this.cache_.volume);}// Look if the tech found a higher resolution poster while loadingthis.handleTechPosterChange_();// Update the duration if availablethis.handleTechDurationChange_();}/*** Retrigger the `loadstart` event that was triggered by the {@link Tech}.** @fires Player#loadstart* @listens Tech#loadstart* @private*/handleTechLoadStart_() {// TODO: Update to use `emptied` event instead. See #1277.this.removeClass('vjs-ended', 'vjs-seeking');// reset the error statethis.error(null);// Update the durationthis.handleTechDurationChange_();if (!this.paused()) {/*** Fired when the user agent begins looking for media data** @event Player#loadstart* @type {Event}*/this.trigger('loadstart');} else {// reset the hasStarted statethis.hasStarted(false);this.trigger('loadstart');}// autoplay happens after loadstart for the browser,// so we mimic that behaviorthis.manualAutoplay_(this.autoplay() === true && this.options_.normalizeAutoplay ? 'play' : this.autoplay());}/*** Handle autoplay string values, rather than the typical boolean* values that should be handled by the tech. Note that this is not* part of any specification. Valid values and what they do can be* found on the autoplay getter at Player#autoplay()*/manualAutoplay_(type) {if (!this.tech_ || typeof type !== 'string') {return;}// Save original muted() value, set muted to true, and attempt to play().// On promise rejection, restore muted from saved valueconst resolveMuted = () => {const previouslyMuted = this.muted();this.muted(true);const restoreMuted = () => {this.muted(previouslyMuted);};// restore muted on play terminatationthis.playTerminatedQueue_.push(restoreMuted);const mutedPromise = this.play();if (!isPromise(mutedPromise)) {return;}return mutedPromise.catch(err => {restoreMuted();throw new Error(`Rejection at manualAutoplay. Restoring muted value. ${err ? err : ''}`);});};let promise;// if muted defaults to true// the only thing we can do is call playif (type === 'any' && !this.muted()) {promise = this.play();if (isPromise(promise)) {promise = promise.catch(resolveMuted);}} else if (type === 'muted' && !this.muted()) {promise = resolveMuted();} else {promise = this.play();}if (!isPromise(promise)) {return;}return promise.then(() => {this.trigger({type: 'autoplay-success',autoplay: type});}).catch(() => {this.trigger({type: 'autoplay-failure',autoplay: type});});}/*** Update the internal source caches so that we return the correct source from* `src()`, `currentSource()`, and `currentSources()`.** > Note: `currentSources` will not be updated if the source that is passed in exists* in the current `currentSources` cache.*** @param {Tech~SourceObject} srcObj* A string or object source to update our caches to.*/updateSourceCaches_(srcObj = '') {let src = srcObj;let type = '';if (typeof src !== 'string') {src = srcObj.src;type = srcObj.type;}// make sure all the caches are set to default values// to prevent null checkingthis.cache_.source = this.cache_.source || {};this.cache_.sources = this.cache_.sources || [];// try to get the type of the src that was passed inif (src && !type) {type = findMimetype(this, src);}// update `currentSource` cache alwaysthis.cache_.source = merge$2({}, srcObj, {src,type});const matchingSources = this.cache_.sources.filter(s => s.src && s.src === src);const sourceElSources = [];const sourceEls = this.$$('source');const matchingSourceEls = [];for (let i = 0; i < sourceEls.length; i++) {const sourceObj = getAttributes(sourceEls[i]);sourceElSources.push(sourceObj);if (sourceObj.src && sourceObj.src === src) {matchingSourceEls.push(sourceObj.src);}}// if we have matching source els but not matching sources// the current source cache is not up to dateif (matchingSourceEls.length && !matchingSources.length) {this.cache_.sources = sourceElSources;// if we don't have matching source or source els set the// sources cache to the `currentSource` cache} else if (!matchingSources.length) {this.cache_.sources = [this.cache_.source];}// update the tech `src` cachethis.cache_.src = src;}/*** *EXPERIMENTAL* Fired when the source is set or changed on the {@link Tech}* causing the media element to reload.** It will fire for the initial source and each subsequent source.* This event is a custom event from Video.js and is triggered by the {@link Tech}.** The event object for this event contains a `src` property that will contain the source* that was available when the event was triggered. This is generally only necessary if Video.js* is switching techs while the source was being changed.** It is also fired when `load` is called on the player (or media element)* because the {@link https://html.spec.whatwg.org/multipage/media.html#dom-media-load|specification for `load`}* says that the resource selection algorithm needs to be aborted and restarted.* In this case, it is very likely that the `src` property will be set to the* empty string `""` to indicate we do not know what the source will be but* that it is changing.** *This event is currently still experimental and may change in minor releases.** __To use this, pass `enableSourceset` option to the player.__** @event Player#sourceset* @type {Event}* @prop {string} src* The source url available when the `sourceset` was triggered.* It will be an empty string if we cannot know what the source is* but know that the source will change.*//*** Retrigger the `sourceset` event that was triggered by the {@link Tech}.** @fires Player#sourceset* @listens Tech#sourceset* @private*/handleTechSourceset_(event) {// only update the source cache when the source// was not updated using the player apiif (!this.changingSrc_) {let updateSourceCaches = src => this.updateSourceCaches_(src);const playerSrc = this.currentSource().src;const eventSrc = event.src;// if we have a playerSrc that is not a blob, and a tech src that is a blobif (playerSrc && !/^blob:/.test(playerSrc) && /^blob:/.test(eventSrc)) {// if both the tech source and the player source were updated we assume// something like @videojs/http-streaming did the sourceset and skip updating the source cache.if (!this.lastSource_ || this.lastSource_.tech !== eventSrc && this.lastSource_.player !== playerSrc) {updateSourceCaches = () => {};}}// update the source to the initial source right away// in some cases this will be empty stringupdateSourceCaches(eventSrc);// if the `sourceset` `src` was an empty string// wait for a `loadstart` to update the cache to `currentSrc`.// If a sourceset happens before a `loadstart`, we reset the stateif (!event.src) {this.tech_.any(['sourceset', 'loadstart'], e => {// if a sourceset happens before a `loadstart` there// is nothing to do as this `handleTechSourceset_`// will be called again and this will be handled there.if (e.type === 'sourceset') {return;}const techSrc = this.techGet_('currentSrc');this.lastSource_.tech = techSrc;this.updateSourceCaches_(techSrc);});}}this.lastSource_ = {player: this.currentSource().src,tech: event.src};this.trigger({src: event.src,type: 'sourceset'});}/*** Add/remove the vjs-has-started class*** @param {boolean} request* - true: adds the class* - false: remove the class** @return {boolean}* the boolean value of hasStarted_*/hasStarted(request) {if (request === undefined) {// act as getter, if we have no request to changereturn this.hasStarted_;}if (request === this.hasStarted_) {return;}this.hasStarted_ = request;if (this.hasStarted_) {this.addClass('vjs-has-started');} else {this.removeClass('vjs-has-started');}}/*** Fired whenever the media begins or resumes playback** @see [Spec]{@link https://html.spec.whatwg.org/multipage/embedded-content.html#dom-media-play}* @fires Player#play* @listens Tech#play* @private*/handleTechPlay_() {this.removeClass('vjs-ended', 'vjs-paused');this.addClass('vjs-playing');// hide the poster when the user hits playthis.hasStarted(true);/*** Triggered whenever an {@link Tech#play} event happens. Indicates that* playback has started or resumed.** @event Player#play* @type {Event}*/this.trigger('play');}/*** Retrigger the `ratechange` event that was triggered by the {@link Tech}.** If there were any events queued while the playback rate was zero, fire* those events now.** @private* @method Player#handleTechRateChange_* @fires Player#ratechange* @listens Tech#ratechange*/handleTechRateChange_() {if (this.tech_.playbackRate() > 0 && this.cache_.lastPlaybackRate === 0) {this.queuedCallbacks_.forEach(queued => queued.callback(queued.event));this.queuedCallbacks_ = [];}this.cache_.lastPlaybackRate = this.tech_.playbackRate();/*** Fires when the playing speed of the audio/video is changed** @event Player#ratechange* @type {event}*/this.trigger('ratechange');}/*** Retrigger the `waiting` event that was triggered by the {@link Tech}.** @fires Player#waiting* @listens Tech#waiting* @private*/handleTechWaiting_() {this.addClass('vjs-waiting');/*** A readyState change on the DOM element has caused playback to stop.** @event Player#waiting* @type {Event}*/this.trigger('waiting');// Browsers may emit a timeupdate event after a waiting event. In order to prevent// premature removal of the waiting class, wait for the time to change.const timeWhenWaiting = this.currentTime();const timeUpdateListener = () => {if (timeWhenWaiting !== this.currentTime()) {this.removeClass('vjs-waiting');this.off('timeupdate', timeUpdateListener);}};this.on('timeupdate', timeUpdateListener);}/*** Retrigger the `canplay` event that was triggered by the {@link Tech}.* > Note: This is not consistent between browsers. See #1351** @fires Player#canplay* @listens Tech#canplay* @private*/handleTechCanPlay_() {this.removeClass('vjs-waiting');/*** The media has a readyState of HAVE_FUTURE_DATA or greater.** @event Player#canplay* @type {Event}*/this.trigger('canplay');}/*** Retrigger the `canplaythrough` event that was triggered by the {@link Tech}.** @fires Player#canplaythrough* @listens Tech#canplaythrough* @private*/handleTechCanPlayThrough_() {this.removeClass('vjs-waiting');/*** The media has a readyState of HAVE_ENOUGH_DATA or greater. This means that the* entire media file can be played without buffering.** @event Player#canplaythrough* @type {Event}*/this.trigger('canplaythrough');}/*** Retrigger the `playing` event that was triggered by the {@link Tech}.** @fires Player#playing* @listens Tech#playing* @private*/handleTechPlaying_() {this.removeClass('vjs-waiting');/*** The media is no longer blocked from playback, and has started playing.** @event Player#playing* @type {Event}*/this.trigger('playing');}/*** Retrigger the `seeking` event that was triggered by the {@link Tech}.** @fires Player#seeking* @listens Tech#seeking* @private*/handleTechSeeking_() {this.addClass('vjs-seeking');/*** Fired whenever the player is jumping to a new time** @event Player#seeking* @type {Event}*/this.trigger('seeking');}/*** Retrigger the `seeked` event that was triggered by the {@link Tech}.** @fires Player#seeked* @listens Tech#seeked* @private*/handleTechSeeked_() {this.removeClass('vjs-seeking', 'vjs-ended');/*** Fired when the player has finished jumping to a new time** @event Player#seeked* @type {Event}*/this.trigger('seeked');}/*** Retrigger the `pause` event that was triggered by the {@link Tech}.** @fires Player#pause* @listens Tech#pause* @private*/handleTechPause_() {this.removeClass('vjs-playing');this.addClass('vjs-paused');/*** Fired whenever the media has been paused** @event Player#pause* @type {Event}*/this.trigger('pause');}/*** Retrigger the `ended` event that was triggered by the {@link Tech}.** @fires Player#ended* @listens Tech#ended* @private*/handleTechEnded_() {this.addClass('vjs-ended');this.removeClass('vjs-waiting');if (this.options_.loop) {this.currentTime(0);this.play();} else if (!this.paused()) {this.pause();}/*** Fired when the end of the media resource is reached (currentTime == duration)** @event Player#ended* @type {Event}*/this.trigger('ended');}/*** Fired when the duration of the media resource is first known or changed** @listens Tech#durationchange* @private*/handleTechDurationChange_() {this.duration(this.techGet_('duration'));}/*** Handle a click on the media element to play/pause** @param {Event} event* the event that caused this function to trigger** @listens Tech#click* @private*/handleTechClick_(event) {// When controls are disabled a click should not toggle playback because// the click is considered a controlif (!this.controls_) {return;}if (this.options_ === undefined || this.options_.userActions === undefined || this.options_.userActions.click === undefined || this.options_.userActions.click !== false) {if (this.options_ !== undefined && this.options_.userActions !== undefined && typeof this.options_.userActions.click === 'function') {this.options_.userActions.click.call(this, event);} else if (this.paused()) {silencePromise(this.play());} else {this.pause();}}}/*** Handle a double-click on the media element to enter/exit fullscreen** @param {Event} event* the event that caused this function to trigger** @listens Tech#dblclick* @private*/handleTechDoubleClick_(event) {if (!this.controls_) {return;}// we do not want to toggle fullscreen state// when double-clicking inside a control bar or a modalconst inAllowedEls = Array.prototype.some.call(this.$$('.vjs-control-bar, .vjs-modal-dialog'), el => el.contains(event.target));if (!inAllowedEls) {/** options.userActions.doubleClick** If `undefined` or `true`, double-click toggles fullscreen if controls are present* Set to `false` to disable double-click handling* Set to a function to substitute an external double-click handler*/if (this.options_ === undefined || this.options_.userActions === undefined || this.options_.userActions.doubleClick === undefined || this.options_.userActions.doubleClick !== false) {if (this.options_ !== undefined && this.options_.userActions !== undefined && typeof this.options_.userActions.doubleClick === 'function') {this.options_.userActions.doubleClick.call(this, event);} else if (this.isFullscreen()) {this.exitFullscreen();} else {this.requestFullscreen();}}}}/*** Handle a tap on the media element. It will toggle the user* activity state, which hides and shows the controls.** @listens Tech#tap* @private*/handleTechTap_() {this.userActive(!this.userActive());}/*** Handle touch to start** @listens Tech#touchstart* @private*/handleTechTouchStart_() {this.userWasActive = this.userActive();}/*** Handle touch to move** @listens Tech#touchmove* @private*/handleTechTouchMove_() {if (this.userWasActive) {this.reportUserActivity();}}/*** Handle touch to end** @param {Event} event* the touchend event that triggered* this function** @listens Tech#touchend* @private*/handleTechTouchEnd_(event) {// Stop the mouse events from also happeningif (event.cancelable) {event.preventDefault();}}/*** @private*/toggleFullscreenClass_() {if (this.isFullscreen()) {this.addClass('vjs-fullscreen');} else {this.removeClass('vjs-fullscreen');}}/*** when the document fschange event triggers it calls this*/documentFullscreenChange_(e) {const targetPlayer = e.target.player;// if another player was fullscreen// do a null check for targetPlayer because older firefox's would put document as e.targetif (targetPlayer && targetPlayer !== this) {return;}const el = this.el();let isFs = document[this.fsApi_.fullscreenElement] === el;if (!isFs && el.matches) {isFs = el.matches(':' + this.fsApi_.fullscreen);}this.isFullscreen(isFs);}/*** Handle Tech Fullscreen Change** @param {Event} event* the fullscreenchange event that triggered this function** @param {Object} data* the data that was sent with the event** @private* @listens Tech#fullscreenchange* @fires Player#fullscreenchange*/handleTechFullscreenChange_(event, data) {if (data) {if (data.nativeIOSFullscreen) {this.addClass('vjs-ios-native-fs');this.tech_.one('webkitendfullscreen', () => {this.removeClass('vjs-ios-native-fs');});}this.isFullscreen(data.isFullscreen);}}handleTechFullscreenError_(event, err) {this.trigger('fullscreenerror', err);}/*** @private*/togglePictureInPictureClass_() {if (this.isInPictureInPicture()) {this.addClass('vjs-picture-in-picture');} else {this.removeClass('vjs-picture-in-picture');}}/*** Handle Tech Enter Picture-in-Picture.** @param {Event} event* the enterpictureinpicture event that triggered this function** @private* @listens Tech#enterpictureinpicture*/handleTechEnterPictureInPicture_(event) {this.isInPictureInPicture(true);}/*** Handle Tech Leave Picture-in-Picture.** @param {Event} event* the leavepictureinpicture event that triggered this function** @private* @listens Tech#leavepictureinpicture*/handleTechLeavePictureInPicture_(event) {this.isInPictureInPicture(false);}/*** Fires when an error occurred during the loading of an audio/video.** @private* @listens Tech#error*/handleTechError_() {const error = this.tech_.error();if (error) {this.error(error);}}/*** Retrigger the `textdata` event that was triggered by the {@link Tech}.** @fires Player#textdata* @listens Tech#textdata* @private*/handleTechTextData_() {let data = null;if (arguments.length > 1) {data = arguments[1];}/*** Fires when we get a textdata event from tech** @event Player#textdata* @type {Event}*/this.trigger('textdata', data);}/*** Get object for cached values.** @return {Object}* get the current object cache*/getCache() {return this.cache_;}/*** Resets the internal cache object.** Using this function outside the player constructor or reset method may* have unintended side-effects.** @private*/resetCache_() {this.cache_ = {// Right now, the currentTime is not _really_ cached because it is always// retrieved from the tech (see: currentTime). However, for completeness,// we set it to zero here to ensure that if we do start actually caching// it, we reset it along with everything else.currentTime: 0,initTime: 0,inactivityTimeout: this.options_.inactivityTimeout,duration: NaN,lastVolume: 1,lastPlaybackRate: this.defaultPlaybackRate(),media: null,src: '',source: {},sources: [],playbackRates: [],volume: 1};}/*** Pass values to the playback tech** @param {string} [method]* the method to call** @param {Object} [arg]* the argument to pass** @private*/techCall_(method, arg) {// If it's not ready yet, call method when it isthis.ready(function () {if (method in allowedSetters) {return set(this.middleware_, this.tech_, method, arg);} else if (method in allowedMediators) {return mediate(this.middleware_, this.tech_, method, arg);}try {if (this.tech_) {this.tech_[method](arg);}} catch (e) {log$1(e);throw e;}}, true);}/*** Mediate attempt to call playback tech method* and return the value of the method called.** @param {string} method* Tech method** @return {*}* Value returned by the tech method called, undefined if tech* is not ready or tech method is not present** @private*/techGet_(method) {if (!this.tech_ || !this.tech_.isReady_) {return;}if (method in allowedGetters) {return get(this.middleware_, this.tech_, method);} else if (method in allowedMediators) {return mediate(this.middleware_, this.tech_, method);}// Log error when playback tech object is present but method// is undefined or unavailabletry {return this.tech_[method]();} catch (e) {// When building additional tech libs, an expected method may not be defined yetif (this.tech_[method] === undefined) {log$1(`Video.js: ${method} method not defined for ${this.techName_} playback technology.`, e);throw e;}// When a method isn't available on the object it throws a TypeErrorif (e.name === 'TypeError') {log$1(`Video.js: ${method} unavailable on ${this.techName_} playback technology element.`, e);this.tech_.isReady_ = false;throw e;}// If error unknown, just log and throwlog$1(e);throw e;}}/*** Attempt to begin playback at the first opportunity.** @return {Promise|undefined}* Returns a promise if the browser supports Promises (or one* was passed in as an option). This promise will be resolved on* the return value of play. If this is undefined it will fulfill the* promise chain otherwise the promise chain will be fulfilled when* the promise from play is fulfilled.*/play() {return new Promise(resolve => {this.play_(resolve);});}/*** The actual logic for play, takes a callback that will be resolved on the* return value of play. This allows us to resolve to the play promise if there* is one on modern browsers.** @private* @param {Function} [callback]* The callback that should be called when the techs play is actually called*/play_(callback = silencePromise) {this.playCallbacks_.push(callback);const isSrcReady = Boolean(!this.changingSrc_ && (this.src() || this.currentSrc()));const isSafariOrIOS = Boolean(IS_ANY_SAFARI || IS_IOS);// treat calls to play_ somewhat like the `one` event functionif (this.waitToPlay_) {this.off(['ready', 'loadstart'], this.waitToPlay_);this.waitToPlay_ = null;}// if the player/tech is not ready or the src itself is not ready// queue up a call to play on `ready` or `loadstart`if (!this.isReady_ || !isSrcReady) {this.waitToPlay_ = e => {this.play_();};this.one(['ready', 'loadstart'], this.waitToPlay_);// if we are in Safari, there is a high chance that loadstart will trigger after the gesture timeperiod// in that case, we need to prime the video element by calling load so it'll be ready in timeif (!isSrcReady && isSafariOrIOS) {this.load();}return;}// If the player/tech is ready and we have a source, we can attempt playback.const val = this.techGet_('play');// For native playback, reset the progress bar if we get a play call from a replay.const isNativeReplay = isSafariOrIOS && this.hasClass('vjs-ended');if (isNativeReplay) {this.resetProgressBar_();}// play was terminated if the returned value is nullif (val === null) {this.runPlayTerminatedQueue_();} else {this.runPlayCallbacks_(val);}}/*** These functions will be run when if play is terminated. If play* runPlayCallbacks_ is run these function will not be run. This allows us* to differentiate between a terminated play and an actual call to play.*/runPlayTerminatedQueue_() {const queue = this.playTerminatedQueue_.slice(0);this.playTerminatedQueue_ = [];queue.forEach(function (q) {q();});}/*** When a callback to play is delayed we have to run these* callbacks when play is actually called on the tech. This function* runs the callbacks that were delayed and accepts the return value* from the tech.** @param {undefined|Promise} val* The return value from the tech.*/runPlayCallbacks_(val) {const callbacks = this.playCallbacks_.slice(0);this.playCallbacks_ = [];// clear play terminatedQueue since we finished a real playthis.playTerminatedQueue_ = [];callbacks.forEach(function (cb) {cb(val);});}/*** Pause the video playback*/pause() {this.techCall_('pause');}/*** Check if the player is paused or has yet to play** @return {boolean}* - false: if the media is currently playing* - true: if media is not currently playing*/paused() {// The initial state of paused should be true (in Safari it's actually false)return this.techGet_('paused') === false ? false : true;}/*** Get a TimeRange object representing the current ranges of time that the user* has played.** @return { import('./utils/time').TimeRange }* A time range object that represents all the increments of time that have* been played.*/played() {return this.techGet_('played') || createTimeRanges$1(0, 0);}/*** Sets or returns whether or not the user is "scrubbing". Scrubbing is* when the user has clicked the progress bar handle and is* dragging it along the progress bar.** @param {boolean} [isScrubbing]* whether the user is or is not scrubbing** @return {boolean|undefined}* - The value of scrubbing when getting* - Nothing when setting*/scrubbing(isScrubbing) {if (typeof isScrubbing === 'undefined') {return this.scrubbing_;}this.scrubbing_ = !!isScrubbing;this.techCall_('setScrubbing', this.scrubbing_);if (isScrubbing) {this.addClass('vjs-scrubbing');} else {this.removeClass('vjs-scrubbing');}}/*** Get or set the current time (in seconds)** @param {number|string} [seconds]* The time to seek to in seconds** @return {number|undefined}* - the current time in seconds when getting* - Nothing when setting*/currentTime(seconds) {if (seconds === undefined) {// cache last currentTime and return. default to 0 seconds//// Caching the currentTime is meant to prevent a massive amount of reads on the tech's// currentTime when scrubbing, but may not provide much performance benefit after all.// Should be tested. Also something has to read the actual current time or the cache will// never get updated.this.cache_.currentTime = this.techGet_('currentTime') || 0;return this.cache_.currentTime;}if (seconds < 0) {seconds = 0;}if (!this.isReady_ || this.changingSrc_ || !this.tech_ || !this.tech_.isReady_) {this.cache_.initTime = seconds;this.off('canplay', this.boundApplyInitTime_);this.one('canplay', this.boundApplyInitTime_);return;}this.techCall_('setCurrentTime', seconds);this.cache_.initTime = 0;if (isFinite(seconds)) {this.cache_.currentTime = Number(seconds);}}/*** Apply the value of initTime stored in cache as currentTime.** @private*/applyInitTime_() {this.currentTime(this.cache_.initTime);}/*** Normally gets the length in time of the video in seconds;* in all but the rarest use cases an argument will NOT be passed to the method** > **NOTE**: The video must have started loading before the duration can be* known, and depending on preload behaviour may not be known until the video starts* playing.** @fires Player#durationchange** @param {number} [seconds]* The duration of the video to set in seconds** @return {number|undefined}* - The duration of the video in seconds when getting* - Nothing when setting*/duration(seconds) {if (seconds === undefined) {// return NaN if the duration is not knownreturn this.cache_.duration !== undefined ? this.cache_.duration : NaN;}seconds = parseFloat(seconds);// Standardize on Infinity for signaling video is liveif (seconds < 0) {seconds = Infinity;}if (seconds !== this.cache_.duration) {// Cache the last set value for optimized scrubbingthis.cache_.duration = seconds;if (seconds === Infinity) {this.addClass('vjs-live');} else {this.removeClass('vjs-live');}if (!isNaN(seconds)) {// Do not fire durationchange unless the duration value is known.// @see [Spec]{@link https://www.w3.org/TR/2011/WD-html5-20110113/video.html#media-element-load-algorithm}/*** @event Player#durationchange* @type {Event}*/this.trigger('durationchange');}}}/*** Calculates how much time is left in the video. Not part* of the native video API.** @return {number}* The time remaining in seconds*/remainingTime() {return this.duration() - this.currentTime();}/*** A remaining time function that is intended to be used when* the time is to be displayed directly to the user.** @return {number}* The rounded time remaining in seconds*/remainingTimeDisplay() {return Math.floor(this.duration()) - Math.floor(this.currentTime());}//// Kind of like an array of portions of the video that have been downloaded./*** Get a TimeRange object with an array of the times of the video* that have been downloaded. If you just want the percent of the* video that's been downloaded, use bufferedPercent.** @see [Buffered Spec]{@link http://dev.w3.org/html5/spec/video.html#dom-media-buffered}** @return { import('./utils/time').TimeRange }* A mock {@link TimeRanges} object (following HTML spec)*/buffered() {let buffered = this.techGet_('buffered');if (!buffered || !buffered.length) {buffered = createTimeRanges$1(0, 0);}return buffered;}/*** Get the TimeRanges of the media that are currently available* for seeking to.** @see [Seekable Spec]{@link https://html.spec.whatwg.org/multipage/media.html#dom-media-seekable}** @return { import('./utils/time').TimeRange }* A mock {@link TimeRanges} object (following HTML spec)*/seekable() {let seekable = this.techGet_('seekable');if (!seekable || !seekable.length) {seekable = createTimeRanges$1(0, 0);}return seekable;}/*** Returns whether the player is in the "seeking" state.** @return {boolean} True if the player is in the seeking state, false if not.*/seeking() {return this.techGet_('seeking');}/*** Returns whether the player is in the "ended" state.** @return {boolean} True if the player is in the ended state, false if not.*/ended() {return this.techGet_('ended');}/*** Returns the current state of network activity for the element, from* the codes in the list below.* - NETWORK_EMPTY (numeric value 0)* The element has not yet been initialised. All attributes are in* their initial states.* - NETWORK_IDLE (numeric value 1)* The element's resource selection algorithm is active and has* selected a resource, but it is not actually using the network at* this time.* - NETWORK_LOADING (numeric value 2)* The user agent is actively trying to download data.* - NETWORK_NO_SOURCE (numeric value 3)* The element's resource selection algorithm is active, but it has* not yet found a resource to use.** @see https://html.spec.whatwg.org/multipage/embedded-content.html#network-states* @return {number} the current network activity state*/networkState() {return this.techGet_('networkState');}/*** Returns a value that expresses the current state of the element* with respect to rendering the current playback position, from the* codes in the list below.* - HAVE_NOTHING (numeric value 0)* No information regarding the media resource is available.* - HAVE_METADATA (numeric value 1)* Enough of the resource has been obtained that the duration of the* resource is available.* - HAVE_CURRENT_DATA (numeric value 2)* Data for the immediate current playback position is available.* - HAVE_FUTURE_DATA (numeric value 3)* Data for the immediate current playback position is available, as* well as enough data for the user agent to advance the current* playback position in the direction of playback.* - HAVE_ENOUGH_DATA (numeric value 4)* The user agent estimates that enough data is available for* playback to proceed uninterrupted.** @see https://html.spec.whatwg.org/multipage/embedded-content.html#dom-media-readystate* @return {number} the current playback rendering state*/readyState() {return this.techGet_('readyState');}/*** Get the percent (as a decimal) of the video that's been downloaded.* This method is not a part of the native HTML video API.** @return {number}* A decimal between 0 and 1 representing the percent* that is buffered 0 being 0% and 1 being 100%*/bufferedPercent() {return bufferedPercent(this.buffered(), this.duration());}/*** Get the ending time of the last buffered time range* This is used in the progress bar to encapsulate all time ranges.** @return {number}* The end of the last buffered time range*/bufferedEnd() {const buffered = this.buffered();const duration = this.duration();let end = buffered.end(buffered.length - 1);if (end > duration) {end = duration;}return end;}/*** Get or set the current volume of the media** @param {number} [percentAsDecimal]* The new volume as a decimal percent:* - 0 is muted/0%/off* - 1.0 is 100%/full* - 0.5 is half volume or 50%** @return {number|undefined}* The current volume as a percent when getting*/volume(percentAsDecimal) {let vol;if (percentAsDecimal !== undefined) {// Force value to between 0 and 1vol = Math.max(0, Math.min(1, percentAsDecimal));this.cache_.volume = vol;this.techCall_('setVolume', vol);if (vol > 0) {this.lastVolume_(vol);}return;}// Default to 1 when returning current volume.vol = parseFloat(this.techGet_('volume'));return isNaN(vol) ? 1 : vol;}/*** Get the current muted state, or turn mute on or off** @param {boolean} [muted]* - true to mute* - false to unmute** @return {boolean|undefined}* - true if mute is on and getting* - false if mute is off and getting* - nothing if setting*/muted(muted) {if (muted !== undefined) {this.techCall_('setMuted', muted);return;}return this.techGet_('muted') || false;}/*** Get the current defaultMuted state, or turn defaultMuted on or off. defaultMuted* indicates the state of muted on initial playback.** ```js* var myPlayer = videojs('some-player-id');** myPlayer.src("http://www.example.com/path/to/video.mp4");** // get, should be false* console.log(myPlayer.defaultMuted());* // set to true* myPlayer.defaultMuted(true);* // get should be true* console.log(myPlayer.defaultMuted());* ```** @param {boolean} [defaultMuted]* - true to mute* - false to unmute** @return {boolean|undefined}* - true if defaultMuted is on and getting* - false if defaultMuted is off and getting* - Nothing when setting*/defaultMuted(defaultMuted) {if (defaultMuted !== undefined) {this.techCall_('setDefaultMuted', defaultMuted);}return this.techGet_('defaultMuted') || false;}/*** Get the last volume, or set it** @param {number} [percentAsDecimal]* The new last volume as a decimal percent:* - 0 is muted/0%/off* - 1.0 is 100%/full* - 0.5 is half volume or 50%** @return {number|undefined}* - The current value of lastVolume as a percent when getting* - Nothing when setting** @private*/lastVolume_(percentAsDecimal) {if (percentAsDecimal !== undefined && percentAsDecimal !== 0) {this.cache_.lastVolume = percentAsDecimal;return;}return this.cache_.lastVolume;}/*** Check if current tech can support native fullscreen* (e.g. with built in controls like iOS)** @return {boolean}* if native fullscreen is supported*/supportsFullScreen() {return this.techGet_('supportsFullScreen') || false;}/*** Check if the player is in fullscreen mode or tell the player that it* is or is not in fullscreen mode.** > NOTE: As of the latest HTML5 spec, isFullscreen is no longer an official* property and instead document.fullscreenElement is used. But isFullscreen is* still a valuable property for internal player workings.** @param {boolean} [isFS]* Set the players current fullscreen state** @return {boolean|undefined}* - true if fullscreen is on and getting* - false if fullscreen is off and getting* - Nothing when setting*/isFullscreen(isFS) {if (isFS !== undefined) {const oldValue = this.isFullscreen_;this.isFullscreen_ = Boolean(isFS);// if we changed fullscreen state and we're in prefixed mode, trigger fullscreenchange// this is the only place where we trigger fullscreenchange events for older browsers// fullWindow mode is treated as a prefixed event and will get a fullscreenchange event as wellif (this.isFullscreen_ !== oldValue && this.fsApi_.prefixed) {/*** @event Player#fullscreenchange* @type {Event}*/this.trigger('fullscreenchange');}this.toggleFullscreenClass_();return;}return this.isFullscreen_;}/*** Increase the size of the video to full screen* In some browsers, full screen is not supported natively, so it enters* "full window mode", where the video fills the browser window.* In browsers and devices that support native full screen, sometimes the* browser's default controls will be shown, and not the Video.js custom skin.* This includes most mobile devices (iOS, Android) and older versions of* Safari.** @param {Object} [fullscreenOptions]* Override the player fullscreen options** @fires Player#fullscreenchange*/requestFullscreen(fullscreenOptions) {if (this.isInPictureInPicture()) {this.exitPictureInPicture();}const self = this;return new Promise((resolve, reject) => {function offHandler() {self.off('fullscreenerror', errorHandler);self.off('fullscreenchange', changeHandler);}function changeHandler() {offHandler();resolve();}function errorHandler(e, err) {offHandler();reject(err);}self.one('fullscreenchange', changeHandler);self.one('fullscreenerror', errorHandler);const promise = self.requestFullscreenHelper_(fullscreenOptions);if (promise) {promise.then(offHandler, offHandler);promise.then(resolve, reject);}});}requestFullscreenHelper_(fullscreenOptions) {let fsOptions;// Only pass fullscreen options to requestFullscreen in spec-compliant browsers.// Use defaults or player configured option unless passed directly to this method.if (!this.fsApi_.prefixed) {fsOptions = this.options_.fullscreen && this.options_.fullscreen.options || {};if (fullscreenOptions !== undefined) {fsOptions = fullscreenOptions;}}// This method works as follows:// 1. if a fullscreen api is available, use it// 1. call requestFullscreen with potential options// 2. if we got a promise from above, use it to update isFullscreen()// 2. otherwise, if the tech supports fullscreen, call `enterFullScreen` on it.// This is particularly used for iPhone, older iPads, and non-safari browser on iOS.// 3. otherwise, use "fullWindow" modeif (this.fsApi_.requestFullscreen) {const promise = this.el_[this.fsApi_.requestFullscreen](fsOptions);// Even on browsers with promise support this may not return a promiseif (promise) {promise.then(() => this.isFullscreen(true), () => this.isFullscreen(false));}return promise;} else if (this.tech_.supportsFullScreen() && !this.options_.preferFullWindow === true) {// we can't take the video.js controls fullscreen but we can go fullscreen// with native controlsthis.techCall_('enterFullScreen');} else {// fullscreen isn't supported so we'll just stretch the video element to// fill the viewportthis.enterFullWindow();}}/*** Return the video to its normal size after having been in full screen mode** @fires Player#fullscreenchange*/exitFullscreen() {const self = this;return new Promise((resolve, reject) => {function offHandler() {self.off('fullscreenerror', errorHandler);self.off('fullscreenchange', changeHandler);}function changeHandler() {offHandler();resolve();}function errorHandler(e, err) {offHandler();reject(err);}self.one('fullscreenchange', changeHandler);self.one('fullscreenerror', errorHandler);const promise = self.exitFullscreenHelper_();if (promise) {promise.then(offHandler, offHandler);// map the promise to our resolve/reject methodspromise.then(resolve, reject);}});}exitFullscreenHelper_() {if (this.fsApi_.requestFullscreen) {const promise = document[this.fsApi_.exitFullscreen]();// Even on browsers with promise support this may not return a promiseif (promise) {// we're splitting the promise here, so, we want to catch the// potential error so that this chain doesn't have unhandled errorssilencePromise(promise.then(() => this.isFullscreen(false)));}return promise;} else if (this.tech_.supportsFullScreen() && !this.options_.preferFullWindow === true) {this.techCall_('exitFullScreen');} else {this.exitFullWindow();}}/*** When fullscreen isn't supported we can stretch the* video container to as wide as the browser will let us.** @fires Player#enterFullWindow*/enterFullWindow() {this.isFullscreen(true);this.isFullWindow = true;// Storing original doc overflow value to return to when fullscreen is offthis.docOrigOverflow = document.documentElement.style.overflow;// Add listener for esc key to exit fullscreenon(document, 'keydown', this.boundFullWindowOnEscKey_);// Hide any scroll barsdocument.documentElement.style.overflow = 'hidden';// Apply fullscreen stylesaddClass(document.body, 'vjs-full-window');/*** @event Player#enterFullWindow* @type {Event}*/this.trigger('enterFullWindow');}/*** Check for call to either exit full window or* full screen on ESC key** @param {string} event* Event to check for key press*/fullWindowOnEscKey(event) {if (keycode.isEventKey(event, 'Esc')) {if (this.isFullscreen() === true) {if (!this.isFullWindow) {this.exitFullscreen();} else {this.exitFullWindow();}}}}/*** Exit full window** @fires Player#exitFullWindow*/exitFullWindow() {this.isFullscreen(false);this.isFullWindow = false;off(document, 'keydown', this.boundFullWindowOnEscKey_);// Unhide scroll bars.document.documentElement.style.overflow = this.docOrigOverflow;// Remove fullscreen stylesremoveClass(document.body, 'vjs-full-window');// Resize the box, controller, and poster to original sizes// this.positionAll();/*** @event Player#exitFullWindow* @type {Event}*/this.trigger('exitFullWindow');}/*** Get or set disable Picture-in-Picture mode.** @param {boolean} [value]* - true will disable Picture-in-Picture mode* - false will enable Picture-in-Picture mode*/disablePictureInPicture(value) {if (value === undefined) {return this.techGet_('disablePictureInPicture');}this.techCall_('setDisablePictureInPicture', value);this.options_.disablePictureInPicture = value;this.trigger('disablepictureinpicturechanged');}/*** Check if the player is in Picture-in-Picture mode or tell the player that it* is or is not in Picture-in-Picture mode.** @param {boolean} [isPiP]* Set the players current Picture-in-Picture state** @return {boolean|undefined}* - true if Picture-in-Picture is on and getting* - false if Picture-in-Picture is off and getting* - nothing if setting*/isInPictureInPicture(isPiP) {if (isPiP !== undefined) {this.isInPictureInPicture_ = !!isPiP;this.togglePictureInPictureClass_();return;}return !!this.isInPictureInPicture_;}/*** Create a floating video window always on top of other windows so that users may* continue consuming media while they interact with other content sites, or* applications on their device.** This can use document picture-in-picture or element picture in picture** Set `enableDocumentPictureInPicture` to `true` to use docPiP on a supported browser* Else set `disablePictureInPicture` to `false` to disable elPiP on a supported browser*** @see [Spec]{@link https://w3c.github.io/picture-in-picture/}* @see [Spec]{@link https://wicg.github.io/document-picture-in-picture/}** @fires Player#enterpictureinpicture** @return {Promise}* A promise with a Picture-in-Picture window.*/requestPictureInPicture() {if (this.options_.enableDocumentPictureInPicture && window.documentPictureInPicture) {const pipContainer = document.createElement(this.el().tagName);pipContainer.classList = this.el().classList;pipContainer.classList.add('vjs-pip-container');if (this.posterImage) {pipContainer.appendChild(this.posterImage.el().cloneNode(true));}if (this.titleBar) {pipContainer.appendChild(this.titleBar.el().cloneNode(true));}pipContainer.appendChild(createEl('p', {className: 'vjs-pip-text'}, {}, this.localize('Playing in picture-in-picture')));return window.documentPictureInPicture.requestWindow({// The aspect ratio won't be correct, Chrome bug https://crbug.com/1407629width: this.videoWidth(),height: this.videoHeight()}).then(pipWindow => {copyStyleSheetsToWindow(pipWindow);this.el_.parentNode.insertBefore(pipContainer, this.el_);pipWindow.document.body.appendChild(this.el_);pipWindow.document.body.classList.add('vjs-pip-window');this.player_.isInPictureInPicture(true);this.player_.trigger('enterpictureinpicture');// Listen for the PiP closing event to move the video back.pipWindow.addEventListener('pagehide', event => {const pipVideo = event.target.querySelector('.video-js');pipContainer.parentNode.replaceChild(pipVideo, pipContainer);this.player_.isInPictureInPicture(false);this.player_.trigger('leavepictureinpicture');});return pipWindow;});}if ('pictureInPictureEnabled' in document && this.disablePictureInPicture() === false) {/*** This event fires when the player enters picture in picture mode** @event Player#enterpictureinpicture* @type {Event}*/return this.techGet_('requestPictureInPicture');}return Promise.reject('No PiP mode is available');}/*** Exit Picture-in-Picture mode.** @see [Spec]{@link https://wicg.github.io/picture-in-picture}** @fires Player#leavepictureinpicture** @return {Promise}* A promise.*/exitPictureInPicture() {if (window.documentPictureInPicture && window.documentPictureInPicture.window) {// With documentPictureInPicture, Player#leavepictureinpicture is fired in the pagehide handlerwindow.documentPictureInPicture.window.close();return Promise.resolve();}if ('pictureInPictureEnabled' in document) {/*** This event fires when the player leaves picture in picture mode** @event Player#leavepictureinpicture* @type {Event}*/return document.exitPictureInPicture();}}/*** Called when this Player has focus and a key gets pressed down, or when* any Component of this player receives a key press that it doesn't handle.* This allows player-wide hotkeys (either as defined below, or optionally* by an external function).** @param {KeyboardEvent} event* The `keydown` event that caused this function to be called.** @listens keydown*/handleKeyDown(event) {const {userActions} = this.options_;// Bail out if hotkeys are not configured.if (!userActions || !userActions.hotkeys) {return;}// Function that determines whether or not to exclude an element from// hotkeys handling.const excludeElement = el => {const tagName = el.tagName.toLowerCase();// The first and easiest test is for `contenteditable` elements.if (el.isContentEditable) {return true;}// Inputs matching these types will still trigger hotkey handling as// they are not text inputs.const allowedInputTypes = ['button', 'checkbox', 'hidden', 'radio', 'reset', 'submit'];if (tagName === 'input') {return allowedInputTypes.indexOf(el.type) === -1;}// The final test is by tag name. These tags will be excluded entirely.const excludedTags = ['textarea'];return excludedTags.indexOf(tagName) !== -1;};// Bail out if the user is focused on an interactive form element.if (excludeElement(this.el_.ownerDocument.activeElement)) {return;}if (typeof userActions.hotkeys === 'function') {userActions.hotkeys.call(this, event);} else {this.handleHotkeys(event);}}/*** Called when this Player receives a hotkey keydown event.* Supported player-wide hotkeys are:** f - toggle fullscreen* m - toggle mute* k or Space - toggle play/pause** @param {Event} event* The `keydown` event that caused this function to be called.*/handleHotkeys(event) {const hotkeys = this.options_.userActions ? this.options_.userActions.hotkeys : {};// set fullscreenKey, muteKey, playPauseKey from `hotkeys`, use defaults if not setconst {fullscreenKey = keydownEvent => keycode.isEventKey(keydownEvent, 'f'),muteKey = keydownEvent => keycode.isEventKey(keydownEvent, 'm'),playPauseKey = keydownEvent => keycode.isEventKey(keydownEvent, 'k') || keycode.isEventKey(keydownEvent, 'Space')} = hotkeys;if (fullscreenKey.call(this, event)) {event.preventDefault();event.stopPropagation();const FSToggle = Component$1.getComponent('FullscreenToggle');if (document[this.fsApi_.fullscreenEnabled] !== false) {FSToggle.prototype.handleClick.call(this, event);}} else if (muteKey.call(this, event)) {event.preventDefault();event.stopPropagation();const MuteToggle = Component$1.getComponent('MuteToggle');MuteToggle.prototype.handleClick.call(this, event);} else if (playPauseKey.call(this, event)) {event.preventDefault();event.stopPropagation();const PlayToggle = Component$1.getComponent('PlayToggle');PlayToggle.prototype.handleClick.call(this, event);}}/*** Check whether the player can play a given mimetype** @see https://www.w3.org/TR/2011/WD-html5-20110113/video.html#dom-navigator-canplaytype** @param {string} type* The mimetype to check** @return {string}* 'probably', 'maybe', or '' (empty string)*/canPlayType(type) {let can;// Loop through each playback technology in the options orderfor (let i = 0, j = this.options_.techOrder; i < j.length; i++) {const techName = j[i];let tech = Tech.getTech(techName);// Support old behavior of techs being registered as components.// Remove once that deprecated behavior is removed.if (!tech) {tech = Component$1.getComponent(techName);}// Check if the current tech is defined before continuingif (!tech) {log$1.error(`The "${techName}" tech is undefined. Skipped browser support check for that tech.`);continue;}// Check if the browser supports this technologyif (tech.isSupported()) {can = tech.canPlayType(type);if (can) {return can;}}}return '';}/*** Select source based on tech-order or source-order* Uses source-order selection if `options.sourceOrder` is truthy. Otherwise,* defaults to tech-order selection** @param {Array} sources* The sources for a media asset** @return {Object|boolean}* Object of source and tech order or false*/selectSource(sources) {// Get only the techs specified in `techOrder` that exist and are supported by the// current platformconst techs = this.options_.techOrder.map(techName => {return [techName, Tech.getTech(techName)];}).filter(([techName, tech]) => {// Check if the current tech is defined before continuingif (tech) {// Check if the browser supports this technologyreturn tech.isSupported();}log$1.error(`The "${techName}" tech is undefined. Skipped browser support check for that tech.`);return false;});// Iterate over each `innerArray` element once per `outerArray` element and execute// `tester` with both. If `tester` returns a non-falsy value, exit early and return// that value.const findFirstPassingTechSourcePair = function (outerArray, innerArray, tester) {let found;outerArray.some(outerChoice => {return innerArray.some(innerChoice => {found = tester(outerChoice, innerChoice);if (found) {return true;}});});return found;};let foundSourceAndTech;const flip = fn => (a, b) => fn(b, a);const finder = ([techName, tech], source) => {if (tech.canPlaySource(source, this.options_[techName.toLowerCase()])) {return {source,tech: techName};}};// Depending on the truthiness of `options.sourceOrder`, we swap the order of techs and sources// to select from them based on their priority.if (this.options_.sourceOrder) {// Source-first orderingfoundSourceAndTech = findFirstPassingTechSourcePair(sources, techs, flip(finder));} else {// Tech-first orderingfoundSourceAndTech = findFirstPassingTechSourcePair(techs, sources, finder);}return foundSourceAndTech || false;}/*** Executes source setting and getting logic** @param {Tech~SourceObject|Tech~SourceObject[]|string} [source]* A SourceObject, an array of SourceObjects, or a string referencing* a URL to a media source. It is _highly recommended_ that an object* or array of objects is used here, so that source selection* algorithms can take the `type` into account.** If not provided, this method acts as a getter.* @param {boolean} [isRetry]* Indicates whether this is being called internally as a result of a retry** @return {string|undefined}* If the `source` argument is missing, returns the current source* URL. Otherwise, returns nothing/undefined.*/handleSrc_(source, isRetry) {// getter usageif (typeof source === 'undefined') {return this.cache_.src || '';}// Reset retry behavior for new sourceif (this.resetRetryOnError_) {this.resetRetryOnError_();}// filter out invalid sources and turn our source into// an array of source objectsconst sources = filterSource(source);// if a source was passed in then it is invalid because// it was filtered to a zero length Array. So we have to// show an errorif (!sources.length) {this.setTimeout(function () {this.error({code: 4,message: this.options_.notSupportedMessage});}, 0);return;}// initial sourcesthis.changingSrc_ = true;// Only update the cached source list if we are not retrying a new source after error,// since in that case we want to include the failed source(s) in the cacheif (!isRetry) {this.cache_.sources = sources;}this.updateSourceCaches_(sources[0]);// middlewareSource is the source after it has been changed by middlewaresetSource(this, sources[0], (middlewareSource, mws) => {this.middleware_ = mws;// since sourceSet is async we have to update the cache again after we select a source since// the source that is selected could be out of order from the cache update above this callback.if (!isRetry) {this.cache_.sources = sources;}this.updateSourceCaches_(middlewareSource);const err = this.src_(middlewareSource);if (err) {if (sources.length > 1) {return this.handleSrc_(sources.slice(1));}this.changingSrc_ = false;// We need to wrap this in a timeout to give folks a chance to add error event handlersthis.setTimeout(function () {this.error({code: 4,message: this.options_.notSupportedMessage});}, 0);// we could not find an appropriate tech, but let's still notify the delegate that this is it// this needs a better comment about why this is neededthis.triggerReady();return;}setTech(mws, this.tech_);});// Try another available source if this one fails before playback.if (sources.length > 1) {const retry = () => {// Remove the error modalthis.error(null);this.handleSrc_(sources.slice(1), true);};const stopListeningForErrors = () => {this.off('error', retry);};this.one('error', retry);this.one('playing', stopListeningForErrors);this.resetRetryOnError_ = () => {this.off('error', retry);this.off('playing', stopListeningForErrors);};}}/*** Get or set the video source.** @param {Tech~SourceObject|Tech~SourceObject[]|string} [source]* A SourceObject, an array of SourceObjects, or a string referencing* a URL to a media source. It is _highly recommended_ that an object* or array of objects is used here, so that source selection* algorithms can take the `type` into account.** If not provided, this method acts as a getter.** @return {string|undefined}* If the `source` argument is missing, returns the current source* URL. Otherwise, returns nothing/undefined.*/src(source) {return this.handleSrc_(source, false);}/*** Set the source object on the tech, returns a boolean that indicates whether* there is a tech that can play the source or not** @param {Tech~SourceObject} source* The source object to set on the Tech** @return {boolean}* - True if there is no Tech to playback this source* - False otherwise** @private*/src_(source) {const sourceTech = this.selectSource([source]);if (!sourceTech) {return true;}if (!titleCaseEquals(sourceTech.tech, this.techName_)) {this.changingSrc_ = true;// load this technology with the chosen sourcethis.loadTech_(sourceTech.tech, sourceTech.source);this.tech_.ready(() => {this.changingSrc_ = false;});return false;}// wait until the tech is ready to set the source// and set it synchronously if possible (#2326)this.ready(function () {// The setSource tech method was added with source handlers// so older techs won't support it// We need to check the direct prototype for the case where subclasses// of the tech do not support source handlersif (this.tech_.constructor.prototype.hasOwnProperty('setSource')) {this.techCall_('setSource', source);} else {this.techCall_('src', source.src);}this.changingSrc_ = false;}, true);return false;}/*** Begin loading the src data.*/load() {// Workaround to use the load method with the VHS.// Does not cover the case when the load method is called directly from the mediaElement.if (this.tech_ && this.tech_.vhs) {this.src(this.currentSource());return;}this.techCall_('load');}/*** Reset the player. Loads the first tech in the techOrder,* removes all the text tracks in the existing `tech`,* and calls `reset` on the `tech`.*/reset() {if (this.paused()) {this.doReset_();} else {const playPromise = this.play();silencePromise(playPromise.then(() => this.doReset_()));}}doReset_() {if (this.tech_) {this.tech_.clearTracks('text');}this.removeClass('vjs-playing');this.addClass('vjs-paused');this.resetCache_();this.poster('');this.loadTech_(this.options_.techOrder[0], null);this.techCall_('reset');this.resetControlBarUI_();this.error(null);if (this.titleBar) {this.titleBar.update({title: undefined,description: undefined});}if (isEvented(this)) {this.trigger('playerreset');}}/*** Reset Control Bar's UI by calling sub-methods that reset* all of Control Bar's components*/resetControlBarUI_() {this.resetProgressBar_();this.resetPlaybackRate_();this.resetVolumeBar_();}/*** Reset tech's progress so progress bar is reset in the UI*/resetProgressBar_() {this.currentTime(0);const {currentTimeDisplay,durationDisplay,progressControl,remainingTimeDisplay} = this.controlBar || {};const {seekBar} = progressControl || {};if (currentTimeDisplay) {currentTimeDisplay.updateContent();}if (durationDisplay) {durationDisplay.updateContent();}if (remainingTimeDisplay) {remainingTimeDisplay.updateContent();}if (seekBar) {seekBar.update();if (seekBar.loadProgressBar) {seekBar.loadProgressBar.update();}}}/*** Reset Playback ratio*/resetPlaybackRate_() {this.playbackRate(this.defaultPlaybackRate());this.handleTechRateChange_();}/*** Reset Volume bar*/resetVolumeBar_() {this.volume(1.0);this.trigger('volumechange');}/*** Returns all of the current source objects.** @return {Tech~SourceObject[]}* The current source objects*/currentSources() {const source = this.currentSource();const sources = [];// assume `{}` or `{ src }`if (Object.keys(source).length !== 0) {sources.push(source);}return this.cache_.sources || sources;}/*** Returns the current source object.** @return {Tech~SourceObject}* The current source object*/currentSource() {return this.cache_.source || {};}/*** Returns the fully qualified URL of the current source value e.g. http://mysite.com/video.mp4* Can be used in conjunction with `currentType` to assist in rebuilding the current source object.** @return {string}* The current source*/currentSrc() {return this.currentSource() && this.currentSource().src || '';}/*** Get the current source type e.g. video/mp4* This can allow you rebuild the current source object so that you could load the same* source and tech later** @return {string}* The source MIME type*/currentType() {return this.currentSource() && this.currentSource().type || '';}/*** Get or set the preload attribute** @param {'none'|'auto'|'metadata'} [value]* Preload mode to pass to tech** @return {string|undefined}* - The preload attribute value when getting* - Nothing when setting*/preload(value) {if (value !== undefined) {this.techCall_('setPreload', value);this.options_.preload = value;return;}return this.techGet_('preload');}/*** Get or set the autoplay option. When this is a boolean it will* modify the attribute on the tech. When this is a string the attribute on* the tech will be removed and `Player` will handle autoplay on loadstarts.** @param {boolean|'play'|'muted'|'any'} [value]* - true: autoplay using the browser behavior* - false: do not autoplay* - 'play': call play() on every loadstart* - 'muted': call muted() then play() on every loadstart* - 'any': call play() on every loadstart. if that fails call muted() then play().* - *: values other than those listed here will be set `autoplay` to true** @return {boolean|string|undefined}* - The current value of autoplay when getting* - Nothing when setting*/autoplay(value) {// getter usageif (value === undefined) {return this.options_.autoplay || false;}let techAutoplay;// if the value is a valid string set it to that, or normalize `true` to 'play', if need beif (typeof value === 'string' && /(any|play|muted)/.test(value) || value === true && this.options_.normalizeAutoplay) {this.options_.autoplay = value;this.manualAutoplay_(typeof value === 'string' ? value : 'play');techAutoplay = false;// any falsy value sets autoplay to false in the browser,// lets do the same} else if (!value) {this.options_.autoplay = false;// any other value (ie truthy) sets autoplay to true} else {this.options_.autoplay = true;}techAutoplay = typeof techAutoplay === 'undefined' ? this.options_.autoplay : techAutoplay;// if we don't have a tech then we do not queue up// a setAutoplay call on tech ready. We do this because the// autoplay option will be passed in the constructor and we// do not need to set it twiceif (this.tech_) {this.techCall_('setAutoplay', techAutoplay);}}/*** Set or unset the playsinline attribute.* Playsinline tells the browser that non-fullscreen playback is preferred.** @param {boolean} [value]* - true means that we should try to play inline by default* - false means that we should use the browser's default playback mode,* which in most cases is inline. iOS Safari is a notable exception* and plays fullscreen by default.** @return {string|undefined}* - the current value of playsinline* - Nothing when setting** @see [Spec]{@link https://html.spec.whatwg.org/#attr-video-playsinline}*/playsinline(value) {if (value !== undefined) {this.techCall_('setPlaysinline', value);this.options_.playsinline = value;}return this.techGet_('playsinline');}/*** Get or set the loop attribute on the video element.** @param {boolean} [value]* - true means that we should loop the video* - false means that we should not loop the video** @return {boolean|undefined}* - The current value of loop when getting* - Nothing when setting*/loop(value) {if (value !== undefined) {this.techCall_('setLoop', value);this.options_.loop = value;return;}return this.techGet_('loop');}/*** Get or set the poster image source url** @fires Player#posterchange** @param {string} [src]* Poster image source URL** @return {string|undefined}* - The current value of poster when getting* - Nothing when setting*/poster(src) {if (src === undefined) {return this.poster_;}// The correct way to remove a poster is to set as an empty string// other falsey values will throw errorsif (!src) {src = '';}if (src === this.poster_) {return;}// update the internal poster variablethis.poster_ = src;// update the tech's posterthis.techCall_('setPoster', src);this.isPosterFromTech_ = false;// alert components that the poster has been set/*** This event fires when the poster image is changed on the player.** @event Player#posterchange* @type {Event}*/this.trigger('posterchange');}/*** Some techs (e.g. YouTube) can provide a poster source in an* asynchronous way. We want the poster component to use this* poster source so that it covers up the tech's controls.* (YouTube's play button). However we only want to use this* source if the player user hasn't set a poster through* the normal APIs.** @fires Player#posterchange* @listens Tech#posterchange* @private*/handleTechPosterChange_() {if ((!this.poster_ || this.options_.techCanOverridePoster) && this.tech_ && this.tech_.poster) {const newPoster = this.tech_.poster() || '';if (newPoster !== this.poster_) {this.poster_ = newPoster;this.isPosterFromTech_ = true;// Let components know the poster has changedthis.trigger('posterchange');}}}/*** Get or set whether or not the controls are showing.** @fires Player#controlsenabled** @param {boolean} [bool]* - true to turn controls on* - false to turn controls off** @return {boolean|undefined}* - The current value of controls when getting* - Nothing when setting*/controls(bool) {if (bool === undefined) {return !!this.controls_;}bool = !!bool;// Don't trigger a change event unless it actually changedif (this.controls_ === bool) {return;}this.controls_ = bool;if (this.usingNativeControls()) {this.techCall_('setControls', bool);}if (this.controls_) {this.removeClass('vjs-controls-disabled');this.addClass('vjs-controls-enabled');/*** @event Player#controlsenabled* @type {Event}*/this.trigger('controlsenabled');if (!this.usingNativeControls()) {this.addTechControlsListeners_();}} else {this.removeClass('vjs-controls-enabled');this.addClass('vjs-controls-disabled');/*** @event Player#controlsdisabled* @type {Event}*/this.trigger('controlsdisabled');if (!this.usingNativeControls()) {this.removeTechControlsListeners_();}}}/*** Toggle native controls on/off. Native controls are the controls built into* devices (e.g. default iPhone controls) or other techs* (e.g. Vimeo Controls)* **This should only be set by the current tech, because only the tech knows* if it can support native controls**** @fires Player#usingnativecontrols* @fires Player#usingcustomcontrols** @param {boolean} [bool]* - true to turn native controls on* - false to turn native controls off** @return {boolean|undefined}* - The current value of native controls when getting* - Nothing when setting*/usingNativeControls(bool) {if (bool === undefined) {return !!this.usingNativeControls_;}bool = !!bool;// Don't trigger a change event unless it actually changedif (this.usingNativeControls_ === bool) {return;}this.usingNativeControls_ = bool;if (this.usingNativeControls_) {this.addClass('vjs-using-native-controls');/*** player is using the native device controls** @event Player#usingnativecontrols* @type {Event}*/this.trigger('usingnativecontrols');} else {this.removeClass('vjs-using-native-controls');/*** player is using the custom HTML controls** @event Player#usingcustomcontrols* @type {Event}*/this.trigger('usingcustomcontrols');}}/*** Set or get the current MediaError** @fires Player#error** @param {MediaError|string|number} [err]* A MediaError or a string/number to be turned* into a MediaError** @return {MediaError|null|undefined}* - The current MediaError when getting (or null)* - Nothing when setting*/error(err) {if (err === undefined) {return this.error_ || null;}// allow hooks to modify error objecthooks('beforeerror').forEach(hookFunction => {const newErr = hookFunction(this, err);if (!(isObject$1(newErr) && !Array.isArray(newErr) || typeof newErr === 'string' || typeof newErr === 'number' || newErr === null)) {this.log.error('please return a value that MediaError expects in beforeerror hooks');return;}err = newErr;});// Suppress the first error message for no compatible source until// user interactionif (this.options_.suppressNotSupportedError && err && err.code === 4) {const triggerSuppressedError = function () {this.error(err);};this.options_.suppressNotSupportedError = false;this.any(['click', 'touchstart'], triggerSuppressedError);this.one('loadstart', function () {this.off(['click', 'touchstart'], triggerSuppressedError);});return;}// restoring to defaultif (err === null) {this.error_ = null;this.removeClass('vjs-error');if (this.errorDisplay) {this.errorDisplay.close();}return;}this.error_ = new MediaError(err);// add the vjs-error classname to the playerthis.addClass('vjs-error');// log the name of the error type and any message// IE11 logs "[object object]" and required you to expand message to see error objectlog$1.error(`(CODE:${this.error_.code} ${MediaError.errorTypes[this.error_.code]})`, this.error_.message, this.error_);/*** @event Player#error* @type {Event}*/this.trigger('error');// notify hooks of the per player errorhooks('error').forEach(hookFunction => hookFunction(this, this.error_));return;}/*** Report user activity** @param {Object} event* Event object*/reportUserActivity(event) {this.userActivity_ = true;}/*** Get/set if user is active** @fires Player#useractive* @fires Player#userinactive** @param {boolean} [bool]* - true if the user is active* - false if the user is inactive** @return {boolean|undefined}* - The current value of userActive when getting* - Nothing when setting*/userActive(bool) {if (bool === undefined) {return this.userActive_;}bool = !!bool;if (bool === this.userActive_) {return;}this.userActive_ = bool;if (this.userActive_) {this.userActivity_ = true;this.removeClass('vjs-user-inactive');this.addClass('vjs-user-active');/*** @event Player#useractive* @type {Event}*/this.trigger('useractive');return;}// Chrome/Safari/IE have bugs where when you change the cursor it can// trigger a mousemove event. This causes an issue when you're hiding// the cursor when the user is inactive, and a mousemove signals user// activity. Making it impossible to go into inactive mode. Specifically// this happens in fullscreen when we really need to hide the cursor.//// When this gets resolved in ALL browsers it can be removed// https://code.google.com/p/chromium/issues/detail?id=103041if (this.tech_) {this.tech_.one('mousemove', function (e) {e.stopPropagation();e.preventDefault();});}this.userActivity_ = false;this.removeClass('vjs-user-active');this.addClass('vjs-user-inactive');/*** @event Player#userinactive* @type {Event}*/this.trigger('userinactive');}/*** Listen for user activity based on timeout value** @private*/listenForUserActivity_() {let mouseInProgress;let lastMoveX;let lastMoveY;const handleActivity = bind_(this, this.reportUserActivity);const handleMouseMove = function (e) {// #1068 - Prevent mousemove spamming// Chrome Bug: https://code.google.com/p/chromium/issues/detail?id=366970if (e.screenX !== lastMoveX || e.screenY !== lastMoveY) {lastMoveX = e.screenX;lastMoveY = e.screenY;handleActivity();}};const handleMouseDown = function () {handleActivity();// For as long as the they are touching the device or have their mouse down,// we consider them active even if they're not moving their finger or mouse.// So we want to continue to update that they are activethis.clearInterval(mouseInProgress);// Setting userActivity=true now and setting the interval to the same time// as the activityCheck interval (250) should ensure we never miss the// next activityCheckmouseInProgress = this.setInterval(handleActivity, 250);};const handleMouseUpAndMouseLeave = function (event) {handleActivity();// Stop the interval that maintains activity if the mouse/touch is downthis.clearInterval(mouseInProgress);};// Any mouse movement will be considered user activitythis.on('mousedown', handleMouseDown);this.on('mousemove', handleMouseMove);this.on('mouseup', handleMouseUpAndMouseLeave);this.on('mouseleave', handleMouseUpAndMouseLeave);const controlBar = this.getChild('controlBar');// Fixes bug on Android & iOS where when tapping progressBar (when control bar is displayed)// controlBar would no longer be hidden by default timeout.if (controlBar && !IS_IOS && !IS_ANDROID) {controlBar.on('mouseenter', function (event) {if (this.player().options_.inactivityTimeout !== 0) {this.player().cache_.inactivityTimeout = this.player().options_.inactivityTimeout;}this.player().options_.inactivityTimeout = 0;});controlBar.on('mouseleave', function (event) {this.player().options_.inactivityTimeout = this.player().cache_.inactivityTimeout;});}// Listen for keyboard navigation// Shouldn't need to use inProgress interval because of key repeatthis.on('keydown', handleActivity);this.on('keyup', handleActivity);// Run an interval every 250 milliseconds instead of stuffing everything into// the mousemove/touchmove function itself, to prevent performance degradation.// `this.reportUserActivity` simply sets this.userActivity_ to true, which// then gets picked up by this loop// http://ejohn.org/blog/learning-from-twitter/let inactivityTimeout;/** @this Player */const activityCheck = function () {// Check to see if mouse/touch activity has happenedif (!this.userActivity_) {return;}// Reset the activity trackerthis.userActivity_ = false;// If the user state was inactive, set the state to activethis.userActive(true);// Clear any existing inactivity timeout to start the timer overthis.clearTimeout(inactivityTimeout);const timeout = this.options_.inactivityTimeout;if (timeout <= 0) {return;}// In <timeout> milliseconds, if no more activity has occurred the// user will be considered inactiveinactivityTimeout = this.setTimeout(function () {// Protect against the case where the inactivityTimeout can trigger just// before the next user activity is picked up by the activity check loop// causing a flickerif (!this.userActivity_) {this.userActive(false);}}, timeout);};this.setInterval(activityCheck, 250);}/*** Gets or sets the current playback rate. A playback rate of* 1.0 represents normal speed and 0.5 would indicate half-speed* playback, for instance.** @see https://html.spec.whatwg.org/multipage/embedded-content.html#dom-media-playbackrate** @param {number} [rate]* New playback rate to set.** @return {number|undefined}* - The current playback rate when getting or 1.0* - Nothing when setting*/playbackRate(rate) {if (rate !== undefined) {// NOTE: this.cache_.lastPlaybackRate is set from the tech handler// that is registered abovethis.techCall_('setPlaybackRate', rate);return;}if (this.tech_ && this.tech_.featuresPlaybackRate) {return this.cache_.lastPlaybackRate || this.techGet_('playbackRate');}return 1.0;}/*** Gets or sets the current default playback rate. A default playback rate of* 1.0 represents normal speed and 0.5 would indicate half-speed playback, for instance.* defaultPlaybackRate will only represent what the initial playbackRate of a video was, not* not the current playbackRate.** @see https://html.spec.whatwg.org/multipage/embedded-content.html#dom-media-defaultplaybackrate** @param {number} [rate]* New default playback rate to set.** @return {number|undefined}* - The default playback rate when getting or 1.0* - Nothing when setting*/defaultPlaybackRate(rate) {if (rate !== undefined) {return this.techCall_('setDefaultPlaybackRate', rate);}if (this.tech_ && this.tech_.featuresPlaybackRate) {return this.techGet_('defaultPlaybackRate');}return 1.0;}/*** Gets or sets the audio flag** @param {boolean} [bool]* - true signals that this is an audio player* - false signals that this is not an audio player** @return {boolean|undefined}* - The current value of isAudio when getting* - Nothing when setting*/isAudio(bool) {if (bool !== undefined) {this.isAudio_ = !!bool;return;}return !!this.isAudio_;}enableAudioOnlyUI_() {// Update styling immediately to show the control bar so we can get its heightthis.addClass('vjs-audio-only-mode');const playerChildren = this.children();const controlBar = this.getChild('ControlBar');const controlBarHeight = controlBar && controlBar.currentHeight();// Hide all player components except the control bar. Control bar components// needed only for video are hidden with CSSplayerChildren.forEach(child => {if (child === controlBar) {return;}if (child.el_ && !child.hasClass('vjs-hidden')) {child.hide();this.audioOnlyCache_.hiddenChildren.push(child);}});this.audioOnlyCache_.playerHeight = this.currentHeight();// Set the player height the same as the control barthis.height(controlBarHeight);this.trigger('audioonlymodechange');}disableAudioOnlyUI_() {this.removeClass('vjs-audio-only-mode');// Show player components that were previously hiddenthis.audioOnlyCache_.hiddenChildren.forEach(child => child.show());// Reset player heightthis.height(this.audioOnlyCache_.playerHeight);this.trigger('audioonlymodechange');}/*** Get the current audioOnlyMode state or set audioOnlyMode to true or false.** Setting this to `true` will hide all player components except the control bar,* as well as control bar components needed only for video.** @param {boolean} [value]* The value to set audioOnlyMode to.** @return {Promise|boolean}* A Promise is returned when setting the state, and a boolean when getting* the present state*/audioOnlyMode(value) {if (typeof value !== 'boolean' || value === this.audioOnlyMode_) {return this.audioOnlyMode_;}this.audioOnlyMode_ = value;// Enable Audio Only Modeif (value) {const exitPromises = [];// Fullscreen and PiP are not supported in audioOnlyMode, so exit if we need to.if (this.isInPictureInPicture()) {exitPromises.push(this.exitPictureInPicture());}if (this.isFullscreen()) {exitPromises.push(this.exitFullscreen());}if (this.audioPosterMode()) {exitPromises.push(this.audioPosterMode(false));}return Promise.all(exitPromises).then(() => this.enableAudioOnlyUI_());}// Disable Audio Only Modereturn Promise.resolve().then(() => this.disableAudioOnlyUI_());}enablePosterModeUI_() {// Hide the video element and show the poster image to enable posterModeUIconst tech = this.tech_ && this.tech_;tech.hide();this.addClass('vjs-audio-poster-mode');this.trigger('audiopostermodechange');}disablePosterModeUI_() {// Show the video element and hide the poster image to disable posterModeUIconst tech = this.tech_ && this.tech_;tech.show();this.removeClass('vjs-audio-poster-mode');this.trigger('audiopostermodechange');}/*** Get the current audioPosterMode state or set audioPosterMode to true or false** @param {boolean} [value]* The value to set audioPosterMode to.** @return {Promise|boolean}* A Promise is returned when setting the state, and a boolean when getting* the present state*/audioPosterMode(value) {if (typeof value !== 'boolean' || value === this.audioPosterMode_) {return this.audioPosterMode_;}this.audioPosterMode_ = value;if (value) {if (this.audioOnlyMode()) {const audioOnlyModePromise = this.audioOnlyMode(false);return audioOnlyModePromise.then(() => {// enable audio poster mode after audio only mode is disabledthis.enablePosterModeUI_();});}return Promise.resolve().then(() => {// enable audio poster modethis.enablePosterModeUI_();});}return Promise.resolve().then(() => {// disable audio poster modethis.disablePosterModeUI_();});}/*** A helper method for adding a {@link TextTrack} to our* {@link TextTrackList}.** In addition to the W3C settings we allow adding additional info through options.** @see http://www.w3.org/html/wg/drafts/html/master/embedded-content-0.html#dom-media-addtexttrack** @param {string} [kind]* the kind of TextTrack you are adding** @param {string} [label]* the label to give the TextTrack label** @param {string} [language]* the language to set on the TextTrack** @return {TextTrack|undefined}* the TextTrack that was added or undefined* if there is no tech*/addTextTrack(kind, label, language) {if (this.tech_) {return this.tech_.addTextTrack(kind, label, language);}}/*** Create a remote {@link TextTrack} and an {@link HTMLTrackElement}.** @param {Object} options* Options to pass to {@link HTMLTrackElement} during creation. See* {@link HTMLTrackElement} for object properties that you should use.** @param {boolean} [manualCleanup=false] if set to true, the TextTrack will not be removed* from the TextTrackList and HtmlTrackElementList* after a source change** @return { import('./tracks/html-track-element').default }* the HTMLTrackElement that was created and added* to the HtmlTrackElementList and the remote* TextTrackList**/addRemoteTextTrack(options, manualCleanup) {if (this.tech_) {return this.tech_.addRemoteTextTrack(options, manualCleanup);}}/*** Remove a remote {@link TextTrack} from the respective* {@link TextTrackList} and {@link HtmlTrackElementList}.** @param {Object} track* Remote {@link TextTrack} to remove** @return {undefined}* does not return anything*/removeRemoteTextTrack(obj = {}) {let {track} = obj;if (!track) {track = obj;}// destructure the input into an object with a track argument, defaulting to arguments[0]// default the whole argument to an empty object if nothing was passed inif (this.tech_) {return this.tech_.removeRemoteTextTrack(track);}}/*** Gets available media playback quality metrics as specified by the W3C's Media* Playback Quality API.** @see [Spec]{@link https://wicg.github.io/media-playback-quality}** @return {Object|undefined}* An object with supported media playback quality metrics or undefined if there* is no tech or the tech does not support it.*/getVideoPlaybackQuality() {return this.techGet_('getVideoPlaybackQuality');}/*** Get video width** @return {number}* current video width*/videoWidth() {return this.tech_ && this.tech_.videoWidth && this.tech_.videoWidth() || 0;}/*** Get video height** @return {number}* current video height*/videoHeight() {return this.tech_ && this.tech_.videoHeight && this.tech_.videoHeight() || 0;}/*** Set or get the player's language code.** Changing the language will trigger* [languagechange]{@link Player#event:languagechange}* which Components can use to update control text.* ClickableComponent will update its control text by default on* [languagechange]{@link Player#event:languagechange}.** @fires Player#languagechange** @param {string} [code]* the language code to set the player to** @return {string|undefined}* - The current language code when getting* - Nothing when setting*/language(code) {if (code === undefined) {return this.language_;}if (this.language_ !== String(code).toLowerCase()) {this.language_ = String(code).toLowerCase();// during first init, it's possible some things won't be eventedif (isEvented(this)) {/*** fires when the player language change** @event Player#languagechange* @type {Event}*/this.trigger('languagechange');}}}/*** Get the player's language dictionary* Merge every time, because a newly added plugin might call videojs.addLanguage() at any time* Languages specified directly in the player options have precedence** @return {Array}* An array of of supported languages*/languages() {return merge$2(Player.prototype.options_.languages, this.languages_);}/*** returns a JavaScript object representing the current track* information. **DOES not return it as JSON**** @return {Object}* Object representing the current of track info*/toJSON() {const options = merge$2(this.options_);const tracks = options.tracks;options.tracks = [];for (let i = 0; i < tracks.length; i++) {let track = tracks[i];// deep merge tracks and null out player so no circular referencestrack = merge$2(track);track.player = undefined;options.tracks[i] = track;}return options;}/*** Creates a simple modal dialog (an instance of the {@link ModalDialog}* component) that immediately overlays the player with arbitrary* content and removes itself when closed.** @param {string|Function|Element|Array|null} content* Same as {@link ModalDialog#content}'s param of the same name.* The most straight-forward usage is to provide a string or DOM* element.** @param {Object} [options]* Extra options which will be passed on to the {@link ModalDialog}.** @return {ModalDialog}* the {@link ModalDialog} that was created*/createModal(content, options) {options = options || {};options.content = content || '';const modal = new ModalDialog(this, options);this.addChild(modal);modal.on('dispose', () => {this.removeChild(modal);});modal.open();return modal;}/*** Change breakpoint classes when the player resizes.** @private*/updateCurrentBreakpoint_() {if (!this.responsive()) {return;}const currentBreakpoint = this.currentBreakpoint();const currentWidth = this.currentWidth();for (let i = 0; i < BREAKPOINT_ORDER.length; i++) {const candidateBreakpoint = BREAKPOINT_ORDER[i];const maxWidth = this.breakpoints_[candidateBreakpoint];if (currentWidth <= maxWidth) {// The current breakpoint did not change, nothing to do.if (currentBreakpoint === candidateBreakpoint) {return;}// Only remove a class if there is a current breakpoint.if (currentBreakpoint) {this.removeClass(BREAKPOINT_CLASSES[currentBreakpoint]);}this.addClass(BREAKPOINT_CLASSES[candidateBreakpoint]);this.breakpoint_ = candidateBreakpoint;break;}}}/*** Removes the current breakpoint.** @private*/removeCurrentBreakpoint_() {const className = this.currentBreakpointClass();this.breakpoint_ = '';if (className) {this.removeClass(className);}}/*** Get or set breakpoints on the player.** Calling this method with an object or `true` will remove any previous* custom breakpoints and start from the defaults again.** @param {Object|boolean} [breakpoints]* If an object is given, it can be used to provide custom* breakpoints. If `true` is given, will set default breakpoints.* If this argument is not given, will simply return the current* breakpoints.** @param {number} [breakpoints.tiny]* The maximum width for the "vjs-layout-tiny" class.** @param {number} [breakpoints.xsmall]* The maximum width for the "vjs-layout-x-small" class.** @param {number} [breakpoints.small]* The maximum width for the "vjs-layout-small" class.** @param {number} [breakpoints.medium]* The maximum width for the "vjs-layout-medium" class.** @param {number} [breakpoints.large]* The maximum width for the "vjs-layout-large" class.** @param {number} [breakpoints.xlarge]* The maximum width for the "vjs-layout-x-large" class.** @param {number} [breakpoints.huge]* The maximum width for the "vjs-layout-huge" class.** @return {Object}* An object mapping breakpoint names to maximum width values.*/breakpoints(breakpoints) {// Used as a getter.if (breakpoints === undefined) {return Object.assign(this.breakpoints_);}this.breakpoint_ = '';this.breakpoints_ = Object.assign({}, DEFAULT_BREAKPOINTS, breakpoints);// When breakpoint definitions change, we need to update the currently// selected breakpoint.this.updateCurrentBreakpoint_();// Clone the breakpoints before returning.return Object.assign(this.breakpoints_);}/*** Get or set a flag indicating whether or not this player should adjust* its UI based on its dimensions.** @param {boolean} [value]* Should be `true` if the player should adjust its UI based on its* dimensions; otherwise, should be `false`.** @return {boolean|undefined}* Will be `true` if this player should adjust its UI based on its* dimensions; otherwise, will be `false`.* Nothing if setting*/responsive(value) {// Used as a getter.if (value === undefined) {return this.responsive_;}value = Boolean(value);const current = this.responsive_;// Nothing changed.if (value === current) {return;}// The value actually changed, set it.this.responsive_ = value;// Start listening for breakpoints and set the initial breakpoint if the// player is now responsive.if (value) {this.on('playerresize', this.boundUpdateCurrentBreakpoint_);this.updateCurrentBreakpoint_();// Stop listening for breakpoints if the player is no longer responsive.} else {this.off('playerresize', this.boundUpdateCurrentBreakpoint_);this.removeCurrentBreakpoint_();}return value;}/*** Get current breakpoint name, if any.** @return {string}* If there is currently a breakpoint set, returns a the key from the* breakpoints object matching it. Otherwise, returns an empty string.*/currentBreakpoint() {return this.breakpoint_;}/*** Get the current breakpoint class name.** @return {string}* The matching class name (e.g. `"vjs-layout-tiny"` or* `"vjs-layout-large"`) for the current breakpoint. Empty string if* there is no current breakpoint.*/currentBreakpointClass() {return BREAKPOINT_CLASSES[this.breakpoint_] || '';}/*** An object that describes a single piece of media.** Properties that are not part of this type description will be retained; so,* this can be viewed as a generic metadata storage mechanism as well.** @see {@link https://wicg.github.io/mediasession/#the-mediametadata-interface}* @typedef {Object} Player~MediaObject** @property {string} [album]* Unused, except if this object is passed to the `MediaSession`* API.** @property {string} [artist]* Unused, except if this object is passed to the `MediaSession`* API.** @property {Object[]} [artwork]* Unused, except if this object is passed to the `MediaSession`* API. If not specified, will be populated via the `poster`, if* available.** @property {string} [poster]* URL to an image that will display before playback.** @property {Tech~SourceObject|Tech~SourceObject[]|string} [src]* A single source object, an array of source objects, or a string* referencing a URL to a media source. It is _highly recommended_* that an object or array of objects is used here, so that source* selection algorithms can take the `type` into account.** @property {string} [title]* Unused, except if this object is passed to the `MediaSession`* API.** @property {Object[]} [textTracks]* An array of objects to be used to create text tracks, following* the {@link https://www.w3.org/TR/html50/embedded-content-0.html#the-track-element|native track element format}.* For ease of removal, these will be created as "remote" text* tracks and set to automatically clean up on source changes.** These objects may have properties like `src`, `kind`, `label`,* and `language`, see {@link Tech#createRemoteTextTrack}.*//*** Populate the player using a {@link Player~MediaObject|MediaObject}.** @param {Player~MediaObject} media* A media object.** @param {Function} ready* A callback to be called when the player is ready.*/loadMedia(media, ready) {if (!media || typeof media !== 'object') {return;}const crossOrigin = this.crossOrigin();this.reset();// Clone the media object so it cannot be mutated from outside.this.cache_.media = merge$2(media);const {artist,artwork,description,poster,src,textTracks,title} = this.cache_.media;// If `artwork` is not given, create it using `poster`.if (!artwork && poster) {this.cache_.media.artwork = [{src: poster,type: getMimetype(poster)}];}if (crossOrigin) {this.crossOrigin(crossOrigin);}if (src) {this.src(src);}if (poster) {this.poster(poster);}if (Array.isArray(textTracks)) {textTracks.forEach(tt => this.addRemoteTextTrack(tt, false));}if (this.titleBar) {this.titleBar.update({title,description: description || artist || ''});}this.ready(ready);}/*** Get a clone of the current {@link Player~MediaObject} for this player.** If the `loadMedia` method has not been used, will attempt to return a* {@link Player~MediaObject} based on the current state of the player.** @return {Player~MediaObject}*/getMedia() {if (!this.cache_.media) {const poster = this.poster();const src = this.currentSources();const textTracks = Array.prototype.map.call(this.remoteTextTracks(), tt => ({kind: tt.kind,label: tt.label,language: tt.language,src: tt.src}));const media = {src,textTracks};if (poster) {media.poster = poster;media.artwork = [{src: media.poster,type: getMimetype(media.poster)}];}return media;}return merge$2(this.cache_.media);}/*** Gets tag settings** @param {Element} tag* The player tag** @return {Object}* An object containing all of the settings* for a player tag*/static getTagSettings(tag) {const baseOptions = {sources: [],tracks: []};const tagOptions = getAttributes(tag);const dataSetup = tagOptions['data-setup'];if (hasClass(tag, 'vjs-fill')) {tagOptions.fill = true;}if (hasClass(tag, 'vjs-fluid')) {tagOptions.fluid = true;}// Check if data-setup attr exists.if (dataSetup !== null) {// Parse options JSON// If empty string, make it a parsable json object.const [err, data] = tuple(dataSetup || '{}');if (err) {log$1.error(err);}Object.assign(tagOptions, data);}Object.assign(baseOptions, tagOptions);// Get tag children settingsif (tag.hasChildNodes()) {const children = tag.childNodes;for (let i = 0, j = children.length; i < j; i++) {const child = children[i];// Change case needed: http://ejohn.org/blog/nodename-case-sensitivity/const childName = child.nodeName.toLowerCase();if (childName === 'source') {baseOptions.sources.push(getAttributes(child));} else if (childName === 'track') {baseOptions.tracks.push(getAttributes(child));}}}return baseOptions;}/*** Set debug mode to enable/disable logs at info level.** @param {boolean} enabled* @fires Player#debugon* @fires Player#debugoff* @return {boolean|undefined}*/debug(enabled) {if (enabled === undefined) {return this.debugEnabled_;}if (enabled) {this.trigger('debugon');this.previousLogLevel_ = this.log.level;this.log.level('debug');this.debugEnabled_ = true;} else {this.trigger('debugoff');this.log.level(this.previousLogLevel_);this.previousLogLevel_ = undefined;this.debugEnabled_ = false;}}/*** Set or get current playback rates.* Takes an array and updates the playback rates menu with the new items.* Pass in an empty array to hide the menu.* Values other than arrays are ignored.** @fires Player#playbackrateschange* @param {number[]} newRates* The new rates that the playback rates menu should update to.* An empty array will hide the menu* @return {number[]} When used as a getter will return the current playback rates*/playbackRates(newRates) {if (newRates === undefined) {return this.cache_.playbackRates;}// ignore any value that isn't an arrayif (!Array.isArray(newRates)) {return;}// ignore any arrays that don't only contain numbersif (!newRates.every(rate => typeof rate === 'number')) {return;}this.cache_.playbackRates = newRates;/*** fires when the playback rates in a player are changed** @event Player#playbackrateschange* @type {Event}*/this.trigger('playbackrateschange');}}/*** Get the {@link VideoTrackList}** @link https://html.spec.whatwg.org/multipage/embedded-content.html#videotracklist** @return {VideoTrackList}* the current video track list** @method Player.prototype.videoTracks*//*** Get the {@link AudioTrackList}** @link https://html.spec.whatwg.org/multipage/embedded-content.html#audiotracklist** @return {AudioTrackList}* the current audio track list** @method Player.prototype.audioTracks*//*** Get the {@link TextTrackList}** @link http://www.w3.org/html/wg/drafts/html/master/embedded-content-0.html#dom-media-texttracks** @return {TextTrackList}* the current text track list** @method Player.prototype.textTracks*//*** Get the remote {@link TextTrackList}** @return {TextTrackList}* The current remote text track list** @method Player.prototype.remoteTextTracks*//*** Get the remote {@link HtmlTrackElementList} tracks.** @return {HtmlTrackElementList}* The current remote text track element list** @method Player.prototype.remoteTextTrackEls*/ALL.names.forEach(function (name) {const props = ALL[name];Player.prototype[props.getterName] = function () {if (this.tech_) {return this.tech_[props.getterName]();}// if we have not yet loadTech_, we create {video,audio,text}Tracks_// these will be passed to the tech during loadingthis[props.privateName] = this[props.privateName] || new props.ListClass();return this[props.privateName];};});/*** Get or set the `Player`'s crossorigin option. For the HTML5 player, this* sets the `crossOrigin` property on the `<video>` tag to control the CORS* behavior.** @see [Video Element Attributes]{@link https://developer.mozilla.org/en-US/docs/Web/HTML/Element/video#attr-crossorigin}** @param {string} [value]* The value to set the `Player`'s crossorigin to. If an argument is* given, must be one of `anonymous` or `use-credentials`.** @return {string|undefined}* - The current crossorigin value of the `Player` when getting.* - undefined when setting*/Player.prototype.crossorigin = Player.prototype.crossOrigin;/*** Global enumeration of players.** The keys are the player IDs and the values are either the {@link Player}* instance or `null` for disposed players.** @type {Object}*/Player.players = {};const navigator = window.navigator;/** Player instance options, surfaced using options* options = Player.prototype.options_* Make changes in options, not here.** @type {Object}* @private*/Player.prototype.options_ = {// Default order of fallback technologytechOrder: Tech.defaultTechOrder_,html5: {},// enable sourceset by defaultenableSourceset: true,// default inactivity timeoutinactivityTimeout: 2000,// default playback ratesplaybackRates: [],// Add playback rate selection by adding rates// 'playbackRates': [0.5, 1, 1.5, 2],liveui: false,// Included control setschildren: ['mediaLoader', 'posterImage', 'titleBar', 'textTrackDisplay', 'loadingSpinner', 'bigPlayButton', 'liveTracker', 'controlBar', 'errorDisplay', 'textTrackSettings', 'resizeManager'],language: navigator && (navigator.languages && navigator.languages[0] || navigator.userLanguage || navigator.language) || 'en',// locales and their language translationslanguages: {},// Default message to show when a video cannot be played.notSupportedMessage: 'No compatible source was found for this media.',normalizeAutoplay: false,fullscreen: {options: {navigationUI: 'hide'}},breakpoints: {},responsive: false,audioOnlyMode: false,audioPosterMode: false,// Default smooth seeking to falseenableSmoothSeeking: false};TECH_EVENTS_RETRIGGER.forEach(function (event) {Player.prototype[`handleTech${toTitleCase$1(event)}_`] = function () {return this.trigger(event);};});/*** Fired when the player has initial duration and dimension information** @event Player#loadedmetadata* @type {Event}*//*** Fired when the player has downloaded data at the current playback position** @event Player#loadeddata* @type {Event}*//*** Fired when the current playback position has changed ** During playback this is fired every 15-250 milliseconds, depending on the* playback technology in use.** @event Player#timeupdate* @type {Event}*//*** Fired when the volume changes** @event Player#volumechange* @type {Event}*//*** Reports whether or not a player has a plugin available.** This does not report whether or not the plugin has ever been initialized* on this player. For that, [usingPlugin]{@link Player#usingPlugin}.** @method Player#hasPlugin* @param {string} name* The name of a plugin.** @return {boolean}* Whether or not this player has the requested plugin available.*//*** Reports whether or not a player is using a plugin by name.** For basic plugins, this only reports whether the plugin has _ever_ been* initialized on this player.** @method Player#usingPlugin* @param {string} name* The name of a plugin.** @return {boolean}* Whether or not this player is using the requested plugin.*/Component$1.registerComponent('Player', Player);/*** @file plugin.js*//*** The base plugin name.** @private* @constant* @type {string}*/const BASE_PLUGIN_NAME = 'plugin';/*** The key on which a player's active plugins cache is stored.** @private* @constant* @type {string}*/const PLUGIN_CACHE_KEY = 'activePlugins_';/*** Stores registered plugins in a private space.** @private* @type {Object}*/const pluginStorage = {};/*** Reports whether or not a plugin has been registered.** @private* @param {string} name* The name of a plugin.** @return {boolean}* Whether or not the plugin has been registered.*/const pluginExists = name => pluginStorage.hasOwnProperty(name);/*** Get a single registered plugin by name.** @private* @param {string} name* The name of a plugin.** @return {typeof Plugin|Function|undefined}* The plugin (or undefined).*/const getPlugin = name => pluginExists(name) ? pluginStorage[name] : undefined;/*** Marks a plugin as "active" on a player.** Also, ensures that the player has an object for tracking active plugins.** @private* @param {Player} player* A Video.js player instance.** @param {string} name* The name of a plugin.*/const markPluginAsActive = (player, name) => {player[PLUGIN_CACHE_KEY] = player[PLUGIN_CACHE_KEY] || {};player[PLUGIN_CACHE_KEY][name] = true;};/*** Triggers a pair of plugin setup events.** @private* @param {Player} player* A Video.js player instance.** @param {PluginEventHash} hash* A plugin event hash.** @param {boolean} [before]* If true, prefixes the event name with "before". In other words,* use this to trigger "beforepluginsetup" instead of "pluginsetup".*/const triggerSetupEvent = (player, hash, before) => {const eventName = (before ? 'before' : '') + 'pluginsetup';player.trigger(eventName, hash);player.trigger(eventName + ':' + hash.name, hash);};/*** Takes a basic plugin function and returns a wrapper function which marks* on the player that the plugin has been activated.** @private* @param {string} name* The name of the plugin.** @param {Function} plugin* The basic plugin.** @return {Function}* A wrapper function for the given plugin.*/const createBasicPlugin = function (name, plugin) {const basicPluginWrapper = function () {// We trigger the "beforepluginsetup" and "pluginsetup" events on the player// regardless, but we want the hash to be consistent with the hash provided// for advanced plugins.//// The only potentially counter-intuitive thing here is the `instance` in// the "pluginsetup" event is the value returned by the `plugin` function.triggerSetupEvent(this, {name,plugin,instance: null}, true);const instance = plugin.apply(this, arguments);markPluginAsActive(this, name);triggerSetupEvent(this, {name,plugin,instance});return instance;};Object.keys(plugin).forEach(function (prop) {basicPluginWrapper[prop] = plugin[prop];});return basicPluginWrapper;};/*** Takes a plugin sub-class and returns a factory function for generating* instances of it.** This factory function will replace itself with an instance of the requested* sub-class of Plugin.** @private* @param {string} name* The name of the plugin.** @param {Plugin} PluginSubClass* The advanced plugin.** @return {Function}*/const createPluginFactory = (name, PluginSubClass) => {// Add a `name` property to the plugin prototype so that each plugin can// refer to itself by name.PluginSubClass.prototype.name = name;return function (...args) {triggerSetupEvent(this, {name,plugin: PluginSubClass,instance: null}, true);const instance = new PluginSubClass(...[this, ...args]);// The plugin is replaced by a function that returns the current instance.this[name] = () => instance;triggerSetupEvent(this, instance.getEventHash());return instance;};};/*** Parent class for all advanced plugins.** @mixes module:evented~EventedMixin* @mixes module:stateful~StatefulMixin* @fires Player#beforepluginsetup* @fires Player#beforepluginsetup:$name* @fires Player#pluginsetup* @fires Player#pluginsetup:$name* @listens Player#dispose* @throws {Error}* If attempting to instantiate the base {@link Plugin} class* directly instead of via a sub-class.*/class Plugin {/*** Creates an instance of this class.** Sub-classes should call `super` to ensure plugins are properly initialized.** @param {Player} player* A Video.js player instance.*/constructor(player) {if (this.constructor === Plugin) {throw new Error('Plugin must be sub-classed; not directly instantiated.');}this.player = player;if (!this.log) {this.log = this.player.log.createLogger(this.name);}// Make this object evented, but remove the added `trigger` method so we// use the prototype version instead.evented(this);delete this.trigger;stateful(this, this.constructor.defaultState);markPluginAsActive(player, this.name);// Auto-bind the dispose method so we can use it as a listener and unbind// it later easily.this.dispose = this.dispose.bind(this);// If the player is disposed, dispose the plugin.player.on('dispose', this.dispose);}/*** Get the version of the plugin that was set on <pluginName>.VERSION*/version() {return this.constructor.VERSION;}/*** Each event triggered by plugins includes a hash of additional data with* conventional properties.** This returns that object or mutates an existing hash.** @param {Object} [hash={}]* An object to be used as event an event hash.** @return {PluginEventHash}* An event hash object with provided properties mixed-in.*/getEventHash(hash = {}) {hash.name = this.name;hash.plugin = this.constructor;hash.instance = this;return hash;}/*** Triggers an event on the plugin object and overrides* {@link module:evented~EventedMixin.trigger|EventedMixin.trigger}.** @param {string|Object} event* An event type or an object with a type property.** @param {Object} [hash={}]* Additional data hash to merge with a* {@link PluginEventHash|PluginEventHash}.** @return {boolean}* Whether or not default was prevented.*/trigger(event, hash = {}) {return trigger(this.eventBusEl_, event, this.getEventHash(hash));}/*** Handles "statechanged" events on the plugin. No-op by default, override by* subclassing.** @abstract* @param {Event} e* An event object provided by a "statechanged" event.** @param {Object} e.changes* An object describing changes that occurred with the "statechanged"* event.*/handleStateChanged(e) {}/*** Disposes a plugin.** Subclasses can override this if they want, but for the sake of safety,* it's probably best to subscribe the "dispose" event.** @fires Plugin#dispose*/dispose() {const {name,player} = this;/*** Signals that a advanced plugin is about to be disposed.** @event Plugin#dispose* @type {Event}*/this.trigger('dispose');this.off();player.off('dispose', this.dispose);// Eliminate any possible sources of leaking memory by clearing up// references between the player and the plugin instance and nulling out// the plugin's state and replacing methods with a function that throws.player[PLUGIN_CACHE_KEY][name] = false;this.player = this.state = null;// Finally, replace the plugin name on the player with a new factory// function, so that the plugin is ready to be set up again.player[name] = createPluginFactory(name, pluginStorage[name]);}/*** Determines if a plugin is a basic plugin (i.e. not a sub-class of `Plugin`).** @param {string|Function} plugin* If a string, matches the name of a plugin. If a function, will be* tested directly.** @return {boolean}* Whether or not a plugin is a basic plugin.*/static isBasic(plugin) {const p = typeof plugin === 'string' ? getPlugin(plugin) : plugin;return typeof p === 'function' && !Plugin.prototype.isPrototypeOf(p.prototype);}/*** Register a Video.js plugin.** @param {string} name* The name of the plugin to be registered. Must be a string and* must not match an existing plugin or a method on the `Player`* prototype.** @param {typeof Plugin|Function} plugin* A sub-class of `Plugin` or a function for basic plugins.** @return {typeof Plugin|Function}* For advanced plugins, a factory function for that plugin. For* basic plugins, a wrapper function that initializes the plugin.*/static registerPlugin(name, plugin) {if (typeof name !== 'string') {throw new Error(`Illegal plugin name, "${name}", must be a string, was ${typeof name}.`);}if (pluginExists(name)) {log$1.warn(`A plugin named "${name}" already exists. You may want to avoid re-registering plugins!`);} else if (Player.prototype.hasOwnProperty(name)) {throw new Error(`Illegal plugin name, "${name}", cannot share a name with an existing player method!`);}if (typeof plugin !== 'function') {throw new Error(`Illegal plugin for "${name}", must be a function, was ${typeof plugin}.`);}pluginStorage[name] = plugin;// Add a player prototype method for all sub-classed plugins (but not for// the base Plugin class).if (name !== BASE_PLUGIN_NAME) {if (Plugin.isBasic(plugin)) {Player.prototype[name] = createBasicPlugin(name, plugin);} else {Player.prototype[name] = createPluginFactory(name, plugin);}}return plugin;}/*** De-register a Video.js plugin.** @param {string} name* The name of the plugin to be de-registered. Must be a string that* matches an existing plugin.** @throws {Error}* If an attempt is made to de-register the base plugin.*/static deregisterPlugin(name) {if (name === BASE_PLUGIN_NAME) {throw new Error('Cannot de-register base plugin.');}if (pluginExists(name)) {delete pluginStorage[name];delete Player.prototype[name];}}/*** Gets an object containing multiple Video.js plugins.** @param {Array} [names]* If provided, should be an array of plugin names. Defaults to _all_* plugin names.** @return {Object|undefined}* An object containing plugin(s) associated with their name(s) or* `undefined` if no matching plugins exist).*/static getPlugins(names = Object.keys(pluginStorage)) {let result;names.forEach(name => {const plugin = getPlugin(name);if (plugin) {result = result || {};result[name] = plugin;}});return result;}/*** Gets a plugin's version, if available** @param {string} name* The name of a plugin.** @return {string}* The plugin's version or an empty string.*/static getPluginVersion(name) {const plugin = getPlugin(name);return plugin && plugin.VERSION || '';}}/*** Gets a plugin by name if it exists.** @static* @method getPlugin* @memberOf Plugin* @param {string} name* The name of a plugin.** @returns {typeof Plugin|Function|undefined}* The plugin (or `undefined`).*/Plugin.getPlugin = getPlugin;/*** The name of the base plugin class as it is registered.** @type {string}*/Plugin.BASE_PLUGIN_NAME = BASE_PLUGIN_NAME;Plugin.registerPlugin(BASE_PLUGIN_NAME, Plugin);/*** Documented in player.js** @ignore*/Player.prototype.usingPlugin = function (name) {return !!this[PLUGIN_CACHE_KEY] && this[PLUGIN_CACHE_KEY][name] === true;};/*** Documented in player.js** @ignore*/Player.prototype.hasPlugin = function (name) {return !!pluginExists(name);};/*** Signals that a plugin is about to be set up on a player.** @event Player#beforepluginsetup* @type {PluginEventHash}*//*** Signals that a plugin is about to be set up on a player - by name. The name* is the name of the plugin.** @event Player#beforepluginsetup:$name* @type {PluginEventHash}*//*** Signals that a plugin has just been set up on a player.** @event Player#pluginsetup* @type {PluginEventHash}*//*** Signals that a plugin has just been set up on a player - by name. The name* is the name of the plugin.** @event Player#pluginsetup:$name* @type {PluginEventHash}*//*** @typedef {Object} PluginEventHash** @property {string} instance* For basic plugins, the return value of the plugin function. For* advanced plugins, the plugin instance on which the event is fired.** @property {string} name* The name of the plugin.** @property {string} plugin* For basic plugins, the plugin function. For advanced plugins, the* plugin class/constructor.*//*** @file deprecate.js* @module deprecate*//*** Decorate a function with a deprecation message the first time it is called.** @param {string} message* A deprecation message to log the first time the returned function* is called.** @param {Function} fn* The function to be deprecated.** @return {Function}* A wrapper function that will log a deprecation warning the first* time it is called. The return value will be the return value of* the wrapped function.*/function deprecate(message, fn) {let warned = false;return function (...args) {if (!warned) {log$1.warn(message);}warned = true;return fn.apply(this, args);};}/*** Internal function used to mark a function as deprecated in the next major* version with consistent messaging.** @param {number} major The major version where it will be removed* @param {string} oldName The old function name* @param {string} newName The new function name* @param {Function} fn The function to deprecate* @return {Function} The decorated function*/function deprecateForMajor(major, oldName, newName, fn) {return deprecate(`${oldName} is deprecated and will be removed in ${major}.0; please use ${newName} instead.`, fn);}/*** @file video.js* @module videojs*//*** Normalize an `id` value by trimming off a leading `#`** @private* @param {string} id* A string, maybe with a leading `#`.** @return {string}* The string, without any leading `#`.*/const normalizeId = id => id.indexOf('#') === 0 ? id.slice(1) : id;/*** A callback that is called when a component is ready. Does not have any* parameters and any callback value will be ignored. See: {@link Component~ReadyCallback}** @callback ReadyCallback*//*** The `videojs()` function doubles as the main function for users to create a* {@link Player} instance as well as the main library namespace.** It can also be used as a getter for a pre-existing {@link Player} instance.* However, we _strongly_ recommend using `videojs.getPlayer()` for this* purpose because it avoids any potential for unintended initialization.** Due to [limitations](https://github.com/jsdoc3/jsdoc/issues/955#issuecomment-313829149)* of our JSDoc template, we cannot properly document this as both a function* and a namespace, so its function signature is documented here.** #### Arguments* ##### id* string|Element, **required**** Video element or video element ID.** ##### options* Object, optional** Options object for providing settings.* See: [Options Guide](https://docs.videojs.com/tutorial-options.html).** ##### ready* {@link Component~ReadyCallback}, optional** A function to be called when the {@link Player} and {@link Tech} are ready.** #### Return Value** The `videojs()` function returns a {@link Player} instance.** @namespace** @borrows AudioTrack as AudioTrack* @borrows Component.getComponent as getComponent* @borrows module:events.on as on* @borrows module:events.one as one* @borrows module:events.off as off* @borrows module:events.trigger as trigger* @borrows EventTarget as EventTarget* @borrows module:middleware.use as use* @borrows Player.players as players* @borrows Plugin.registerPlugin as registerPlugin* @borrows Plugin.deregisterPlugin as deregisterPlugin* @borrows Plugin.getPlugins as getPlugins* @borrows Plugin.getPlugin as getPlugin* @borrows Plugin.getPluginVersion as getPluginVersion* @borrows Tech.getTech as getTech* @borrows Tech.registerTech as registerTech* @borrows TextTrack as TextTrack* @borrows VideoTrack as VideoTrack** @param {string|Element} id* Video element or video element ID.** @param {Object} [options]* Options object for providing settings.* See: [Options Guide](https://docs.videojs.com/tutorial-options.html).** @param {ReadyCallback} [ready]* A function to be called when the {@link Player} and {@link Tech} are* ready.** @return {Player}* The `videojs()` function returns a {@link Player|Player} instance.*/function videojs(id, options, ready) {let player = videojs.getPlayer(id);if (player) {if (options) {log$1.warn(`Player "${id}" is already initialised. Options will not be applied.`);}if (ready) {player.ready(ready);}return player;}const el = typeof id === 'string' ? $('#' + normalizeId(id)) : id;if (!isEl(el)) {throw new TypeError('The element or ID supplied is not valid. (videojs)');}// document.body.contains(el) will only check if el is contained within that one document.// This causes problems for elements in iframes.// Instead, use the element's ownerDocument instead of the global document.// This will make sure that the element is indeed in the dom of that document.// Additionally, check that the document in question has a default view.// If the document is no longer attached to the dom, the defaultView of the document will be null.// If element is inside Shadow DOM (e.g. is part of a Custom element), ownerDocument.body// always returns false. Instead, use the Shadow DOM root.const inShadowDom = 'getRootNode' in el ? el.getRootNode() instanceof window.ShadowRoot : false;const rootNode = inShadowDom ? el.getRootNode() : el.ownerDocument.body;if (!el.ownerDocument.defaultView || !rootNode.contains(el)) {log$1.warn('The element supplied is not included in the DOM');}options = options || {};// Store a copy of the el before modification, if it is to be restored in destroy()// If div ingest, store the parent divif (options.restoreEl === true) {options.restoreEl = (el.parentNode && el.parentNode.hasAttribute('data-vjs-player') ? el.parentNode : el).cloneNode(true);}hooks('beforesetup').forEach(hookFunction => {const opts = hookFunction(el, merge$2(options));if (!isObject$1(opts) || Array.isArray(opts)) {log$1.error('please return an object in beforesetup hooks');return;}options = merge$2(options, opts);});// We get the current "Player" component here in case an integration has// replaced it with a custom player.const PlayerComponent = Component$1.getComponent('Player');player = new PlayerComponent(el, options, ready);hooks('setup').forEach(hookFunction => hookFunction(player));return player;}videojs.hooks_ = hooks_;videojs.hooks = hooks;videojs.hook = hook;videojs.hookOnce = hookOnce;videojs.removeHook = removeHook;// Add default stylesif (window.VIDEOJS_NO_DYNAMIC_STYLE !== true && isReal()) {let style = $('.vjs-styles-defaults');if (!style) {style = createStyleElement('vjs-styles-defaults');const head = $('head');if (head) {head.insertBefore(style, head.firstChild);}setTextContent(style, `.video-js {width: 300px;height: 150px;}.vjs-fluid:not(.vjs-audio-only-mode) {padding-top: 56.25%}`);}}// Run Auto-load players// You have to wait at least once in case this script is loaded after your// video in the DOM (weird behavior only with minified version)autoSetupTimeout(1, videojs);/*** Current Video.js version. Follows [semantic versioning](https://semver.org/).** @type {string}*/videojs.VERSION = version$5;/*** The global options object. These are the settings that take effect* if no overrides are specified when the player is created.** @type {Object}*/videojs.options = Player.prototype.options_;/*** Get an object with the currently created players, keyed by player ID** @return {Object}* The created players*/videojs.getPlayers = () => Player.players;/*** Get a single player based on an ID or DOM element.** This is useful if you want to check if an element or ID has an associated* Video.js player, but not create one if it doesn't.** @param {string|Element} id* An HTML element - `<video>`, `<audio>`, or `<video-js>` -* or a string matching the `id` of such an element.** @return {Player|undefined}* A player instance or `undefined` if there is no player instance* matching the argument.*/videojs.getPlayer = id => {const players = Player.players;let tag;if (typeof id === 'string') {const nId = normalizeId(id);const player = players[nId];if (player) {return player;}tag = $('#' + nId);} else {tag = id;}if (isEl(tag)) {const {player,playerId} = tag;// Element may have a `player` property referring to an already created// player instance. If so, return that.if (player || players[playerId]) {return player || players[playerId];}}};/*** Returns an array of all current players.** @return {Array}* An array of all players. The array will be in the order that* `Object.keys` provides, which could potentially vary between* JavaScript engines.**/videojs.getAllPlayers = () =>// Disposed players leave a key with a `null` value, so we need to make sure// we filter those out.Object.keys(Player.players).map(k => Player.players[k]).filter(Boolean);videojs.players = Player.players;videojs.getComponent = Component$1.getComponent;/*** Register a component so it can referred to by name. Used when adding to other* components, either through addChild `component.addChild('myComponent')` or through* default children options `{ children: ['myComponent'] }`.** > NOTE: You could also just initialize the component before adding.* `component.addChild(new MyComponent());`** @param {string} name* The class name of the component** @param {typeof Component} comp* The component class** @return {typeof Component}* The newly registered component*/videojs.registerComponent = (name, comp) => {if (Tech.isTech(comp)) {log$1.warn(`The ${name} tech was registered as a component. It should instead be registered using videojs.registerTech(name, tech)`);}return Component$1.registerComponent.call(Component$1, name, comp);};videojs.getTech = Tech.getTech;videojs.registerTech = Tech.registerTech;videojs.use = use;/*** An object that can be returned by a middleware to signify* that the middleware is being terminated.** @type {object}* @property {object} middleware.TERMINATOR*/Object.defineProperty(videojs, 'middleware', {value: {},writeable: false,enumerable: true});Object.defineProperty(videojs.middleware, 'TERMINATOR', {value: TERMINATOR,writeable: false,enumerable: true});/*** A reference to the {@link module:browser|browser utility module} as an object.** @type {Object}* @see {@link module:browser|browser}*/videojs.browser = browser;/*** A reference to the {@link module:obj|obj utility module} as an object.** @type {Object}* @see {@link module:obj|obj}*/videojs.obj = Obj;/*** Deprecated reference to the {@link module:obj.merge|merge function}** @type {Function}* @see {@link module:obj.merge|merge}* @deprecated Deprecated and will be removed in 9.0. Please use videojs.obj.merge instead.*/videojs.mergeOptions = deprecateForMajor(9, 'videojs.mergeOptions', 'videojs.obj.merge', merge$2);/*** Deprecated reference to the {@link module:obj.defineLazyProperty|defineLazyProperty function}** @type {Function}* @see {@link module:obj.defineLazyProperty|defineLazyProperty}* @deprecated Deprecated and will be removed in 9.0. Please use videojs.obj.defineLazyProperty instead.*/videojs.defineLazyProperty = deprecateForMajor(9, 'videojs.defineLazyProperty', 'videojs.obj.defineLazyProperty', defineLazyProperty);/*** Deprecated reference to the {@link module:fn.bind_|fn.bind_ function}** @type {Function}* @see {@link module:fn.bind_|fn.bind_}* @deprecated Deprecated and will be removed in 9.0. Please use native Function.prototype.bind instead.*/videojs.bind = deprecateForMajor(9, 'videojs.bind', 'native Function.prototype.bind', bind_);videojs.registerPlugin = Plugin.registerPlugin;videojs.deregisterPlugin = Plugin.deregisterPlugin;/*** Deprecated method to register a plugin with Video.js** @deprecated Deprecated and will be removed in 9.0. Use videojs.registerPlugin() instead.** @param {string} name* The plugin name** @param {typeof Plugin|Function} plugin* The plugin sub-class or function** @return {typeof Plugin|Function}*/videojs.plugin = (name, plugin) => {log$1.warn('videojs.plugin() is deprecated; use videojs.registerPlugin() instead');return Plugin.registerPlugin(name, plugin);};videojs.getPlugins = Plugin.getPlugins;videojs.getPlugin = Plugin.getPlugin;videojs.getPluginVersion = Plugin.getPluginVersion;/*** Adding languages so that they're available to all players.* Example: `videojs.addLanguage('es', { 'Hello': 'Hola' });`** @param {string} code* The language code or dictionary property** @param {Object} data* The data values to be translated** @return {Object}* The resulting language dictionary object*/videojs.addLanguage = function (code, data) {code = ('' + code).toLowerCase();videojs.options.languages = merge$2(videojs.options.languages, {[code]: data});return videojs.options.languages[code];};/*** A reference to the {@link module:log|log utility module} as an object.** @type {Function}* @see {@link module:log|log}*/videojs.log = log$1;videojs.createLogger = createLogger;/*** A reference to the {@link module:time|time utility module} as an object.** @type {Object}* @see {@link module:time|time}*/videojs.time = Time;/*** Deprecated reference to the {@link module:time.createTimeRanges|createTimeRanges function}** @type {Function}* @see {@link module:time.createTimeRanges|createTimeRanges}* @deprecated Deprecated and will be removed in 9.0. Please use videojs.time.createTimeRanges instead.*/videojs.createTimeRange = deprecateForMajor(9, 'videojs.createTimeRange', 'videojs.time.createTimeRanges', createTimeRanges$1);/*** Deprecated reference to the {@link module:time.createTimeRanges|createTimeRanges function}** @type {Function}* @see {@link module:time.createTimeRanges|createTimeRanges}* @deprecated Deprecated and will be removed in 9.0. Please use videojs.time.createTimeRanges instead.*/videojs.createTimeRanges = deprecateForMajor(9, 'videojs.createTimeRanges', 'videojs.time.createTimeRanges', createTimeRanges$1);/*** Deprecated reference to the {@link module:time.formatTime|formatTime function}** @type {Function}* @see {@link module:time.formatTime|formatTime}* @deprecated Deprecated and will be removed in 9.0. Please use videojs.time.format instead.*/videojs.formatTime = deprecateForMajor(9, 'videojs.formatTime', 'videojs.time.formatTime', formatTime);/*** Deprecated reference to the {@link module:time.setFormatTime|setFormatTime function}** @type {Function}* @see {@link module:time.setFormatTime|setFormatTime}* @deprecated Deprecated and will be removed in 9.0. Please use videojs.time.setFormat instead.*/videojs.setFormatTime = deprecateForMajor(9, 'videojs.setFormatTime', 'videojs.time.setFormatTime', setFormatTime);/*** Deprecated reference to the {@link module:time.resetFormatTime|resetFormatTime function}** @type {Function}* @see {@link module:time.resetFormatTime|resetFormatTime}* @deprecated Deprecated and will be removed in 9.0. Please use videojs.time.resetFormat instead.*/videojs.resetFormatTime = deprecateForMajor(9, 'videojs.resetFormatTime', 'videojs.time.resetFormatTime', resetFormatTime);/*** Deprecated reference to the {@link module:url.parseUrl|Url.parseUrl function}** @type {Function}* @see {@link module:url.parseUrl|parseUrl}* @deprecated Deprecated and will be removed in 9.0. Please use videojs.url.parseUrl instead.*/videojs.parseUrl = deprecateForMajor(9, 'videojs.parseUrl', 'videojs.url.parseUrl', parseUrl);/*** Deprecated reference to the {@link module:url.isCrossOrigin|Url.isCrossOrigin function}** @type {Function}* @see {@link module:url.isCrossOrigin|isCrossOrigin}* @deprecated Deprecated and will be removed in 9.0. Please use videojs.url.isCrossOrigin instead.*/videojs.isCrossOrigin = deprecateForMajor(9, 'videojs.isCrossOrigin', 'videojs.url.isCrossOrigin', isCrossOrigin);videojs.EventTarget = EventTarget$2;videojs.any = any;videojs.on = on;videojs.one = one;videojs.off = off;videojs.trigger = trigger;/*** A cross-browser XMLHttpRequest wrapper.** @function* @param {Object} options* Settings for the request.** @return {XMLHttpRequest|XDomainRequest}* The request object.** @see https://github.com/Raynos/xhr*/videojs.xhr = lib;videojs.TextTrack = TextTrack;videojs.AudioTrack = AudioTrack;videojs.VideoTrack = VideoTrack;['isEl', 'isTextNode', 'createEl', 'hasClass', 'addClass', 'removeClass', 'toggleClass', 'setAttributes', 'getAttributes', 'emptyEl', 'appendContent', 'insertContent'].forEach(k => {videojs[k] = function () {log$1.warn(`videojs.${k}() is deprecated; use videojs.dom.${k}() instead`);return Dom[k].apply(null, arguments);};});videojs.computedStyle = deprecateForMajor(9, 'videojs.computedStyle', 'videojs.dom.computedStyle', computedStyle);/*** A reference to the {@link module:dom|DOM utility module} as an object.** @type {Object}* @see {@link module:dom|dom}*/videojs.dom = Dom;/*** A reference to the {@link module:fn|fn utility module} as an object.** @type {Object}* @see {@link module:fn|fn}*/videojs.fn = Fn;/*** A reference to the {@link module:num|num utility module} as an object.** @type {Object}* @see {@link module:num|num}*/videojs.num = Num;/*** A reference to the {@link module:str|str utility module} as an object.** @type {Object}* @see {@link module:str|str}*/videojs.str = Str;/*** A reference to the {@link module:url|URL utility module} as an object.** @type {Object}* @see {@link module:url|url}*/videojs.url = Url;createCommonjsModule(function (module, exports) {/*! @name videojs-contrib-quality-levels @version 4.0.0 @license Apache-2.0 */(function (global, factory) {module.exports = factory(videojs) ;})(commonjsGlobal, function (videojs) {function _interopDefaultLegacy(e) {return e && typeof e === 'object' && 'default' in e ? e : {'default': e};}var videojs__default = /*#__PURE__*/_interopDefaultLegacy(videojs);/*** A single QualityLevel.** interface QualityLevel {* readonly attribute DOMString id;* attribute DOMString label;* readonly attribute long width;* readonly attribute long height;* readonly attribute long bitrate;* attribute boolean enabled;* };** @class QualityLevel*/class QualityLevel {/*** Creates a QualityLevel** @param {Representation|Object} representation The representation of the quality level* @param {string} representation.id Unique id of the QualityLevel* @param {number=} representation.width Resolution width of the QualityLevel* @param {number=} representation.height Resolution height of the QualityLevel* @param {number} representation.bandwidth Bitrate of the QualityLevel* @param {number=} representation.frameRate Frame-rate of the QualityLevel* @param {Function} representation.enabled Callback to enable/disable QualityLevel*/constructor(representation) {let level = this; // eslint-disable-linelevel.id = representation.id;level.label = level.id;level.width = representation.width;level.height = representation.height;level.bitrate = representation.bandwidth;level.frameRate = representation.frameRate;level.enabled_ = representation.enabled;Object.defineProperty(level, 'enabled', {/*** Get whether the QualityLevel is enabled.** @return {boolean} True if the QualityLevel is enabled.*/get() {return level.enabled_();},/*** Enable or disable the QualityLevel.** @param {boolean} enable true to enable QualityLevel, false to disable.*/set(enable) {level.enabled_(enable);}});return level;}}/*** A list of QualityLevels.** interface QualityLevelList : EventTarget {* getter QualityLevel (unsigned long index);* readonly attribute unsigned long length;* readonly attribute long selectedIndex;** void addQualityLevel(QualityLevel qualityLevel)* void removeQualityLevel(QualityLevel remove)* QualityLevel? getQualityLevelById(DOMString id);** attribute EventHandler onchange;* attribute EventHandler onaddqualitylevel;* attribute EventHandler onremovequalitylevel;* };** @extends videojs.EventTarget* @class QualityLevelList*/class QualityLevelList extends videojs__default['default'].EventTarget {/*** Creates a QualityLevelList.*/constructor() {super();let list = this; // eslint-disable-linelist.levels_ = [];list.selectedIndex_ = -1;/*** Get the index of the currently selected QualityLevel.** @returns {number} The index of the selected QualityLevel. -1 if none selected.* @readonly*/Object.defineProperty(list, 'selectedIndex', {get() {return list.selectedIndex_;}});/*** Get the length of the list of QualityLevels.** @returns {number} The length of the list.* @readonly*/Object.defineProperty(list, 'length', {get() {return list.levels_.length;}});list[Symbol.iterator] = () => list.levels_.values();return list;}/*** Adds a quality level to the list.** @param {Representation|Object} representation The representation of the quality level* @param {string} representation.id Unique id of the QualityLevel* @param {number=} representation.width Resolution width of the QualityLevel* @param {number=} representation.height Resolution height of the QualityLevel* @param {number} representation.bandwidth Bitrate of the QualityLevel* @param {number=} representation.frameRate Frame-rate of the QualityLevel* @param {Function} representation.enabled Callback to enable/disable QualityLevel* @return {QualityLevel} the QualityLevel added to the list* @method addQualityLevel*/addQualityLevel(representation) {let qualityLevel = this.getQualityLevelById(representation.id); // Do not add duplicate quality levelsif (qualityLevel) {return qualityLevel;}const index = this.levels_.length;qualityLevel = new QualityLevel(representation);if (!('' + index in this)) {Object.defineProperty(this, index, {get() {return this.levels_[index];}});}this.levels_.push(qualityLevel);this.trigger({qualityLevel,type: 'addqualitylevel'});return qualityLevel;}/*** Removes a quality level from the list.** @param {QualityLevel} qualityLevel The QualityLevel to remove from the list.* @return {QualityLevel|null} the QualityLevel removed or null if nothing removed* @method removeQualityLevel*/removeQualityLevel(qualityLevel) {let removed = null;for (let i = 0, l = this.length; i < l; i++) {if (this[i] === qualityLevel) {removed = this.levels_.splice(i, 1)[0];if (this.selectedIndex_ === i) {this.selectedIndex_ = -1;} else if (this.selectedIndex_ > i) {this.selectedIndex_--;}break;}}if (removed) {this.trigger({qualityLevel,type: 'removequalitylevel'});}return removed;}/*** Searches for a QualityLevel with the given id.** @param {string} id The id of the QualityLevel to find.* @return {QualityLevel|null} The QualityLevel with id, or null if not found.* @method getQualityLevelById*/getQualityLevelById(id) {for (let i = 0, l = this.length; i < l; i++) {const level = this[i];if (level.id === id) {return level;}}return null;}/*** Resets the list of QualityLevels to empty** @method dispose*/dispose() {this.selectedIndex_ = -1;this.levels_.length = 0;}}/*** change - The selected QualityLevel has changed.* addqualitylevel - A QualityLevel has been added to the QualityLevelList.* removequalitylevel - A QualityLevel has been removed from the QualityLevelList.*/QualityLevelList.prototype.allowedEvents_ = {change: 'change',addqualitylevel: 'addqualitylevel',removequalitylevel: 'removequalitylevel'}; // emulate attribute EventHandler support to allow for feature detectionfor (const event in QualityLevelList.prototype.allowedEvents_) {QualityLevelList.prototype['on' + event] = null;}var version = "4.0.0";/*** Initialization function for the qualityLevels plugin. Sets up the QualityLevelList and* event handlers.** @param {Player} player Player object.* @param {Object} options Plugin options object.* @return {QualityLevelList} a list of QualityLevels*/const initPlugin = function (player, options) {const originalPluginFn = player.qualityLevels;const qualityLevelList = new QualityLevelList();const disposeHandler = function () {qualityLevelList.dispose();player.qualityLevels = originalPluginFn;player.off('dispose', disposeHandler);};player.on('dispose', disposeHandler);player.qualityLevels = () => qualityLevelList;player.qualityLevels.VERSION = version;return qualityLevelList;};/*** A video.js plugin.** In the plugin function, the value of `this` is a video.js `Player`* instance. You cannot rely on the player being in a "ready" state here,* depending on how the plugin is invoked. This may or may not be important* to you; if not, remove the wait for "ready"!** @param {Object} options Plugin options object* @return {QualityLevelList} a list of QualityLevels*/const qualityLevels = function (options) {return initPlugin(this, videojs__default['default'].obj.merge({}, options));}; // Register the plugin with video.js.videojs__default['default'].registerPlugin('qualityLevels', qualityLevels); // Include the version number.qualityLevels.VERSION = version;return qualityLevels;});});var urlToolkit = createCommonjsModule(function (module, exports) {// see https://tools.ietf.org/html/rfc1808(function (root) {var URL_REGEX = /^(?=((?:[a-zA-Z0-9+\-.]+:)?))\1(?=((?:\/\/[^\/?#]*)?))\2(?=((?:(?:[^?#\/]*\/)*[^;?#\/]*)?))\3((?:;[^?#]*)?)(\?[^#]*)?(#[^]*)?$/;var FIRST_SEGMENT_REGEX = /^(?=([^\/?#]*))\1([^]*)$/;var SLASH_DOT_REGEX = /(?:\/|^)\.(?=\/)/g;var SLASH_DOT_DOT_REGEX = /(?:\/|^)\.\.\/(?!\.\.\/)[^\/]*(?=\/)/g;var URLToolkit = {// If opts.alwaysNormalize is true then the path will always be normalized even when it starts with / or //// E.g// With opts.alwaysNormalize = false (default, spec compliant)// http://a.com/b/cd + /e/f/../g => http://a.com/e/f/../g// With opts.alwaysNormalize = true (not spec compliant)// http://a.com/b/cd + /e/f/../g => http://a.com/e/gbuildAbsoluteURL: function (baseURL, relativeURL, opts) {opts = opts || {};// remove any remaining space and CRLFbaseURL = baseURL.trim();relativeURL = relativeURL.trim();if (!relativeURL) {// 2a) If the embedded URL is entirely empty, it inherits the// entire base URL (i.e., is set equal to the base URL)// and we are done.if (!opts.alwaysNormalize) {return baseURL;}var basePartsForNormalise = URLToolkit.parseURL(baseURL);if (!basePartsForNormalise) {throw new Error('Error trying to parse base URL.');}basePartsForNormalise.path = URLToolkit.normalizePath(basePartsForNormalise.path);return URLToolkit.buildURLFromParts(basePartsForNormalise);}var relativeParts = URLToolkit.parseURL(relativeURL);if (!relativeParts) {throw new Error('Error trying to parse relative URL.');}if (relativeParts.scheme) {// 2b) If the embedded URL starts with a scheme name, it is// interpreted as an absolute URL and we are done.if (!opts.alwaysNormalize) {return relativeURL;}relativeParts.path = URLToolkit.normalizePath(relativeParts.path);return URLToolkit.buildURLFromParts(relativeParts);}var baseParts = URLToolkit.parseURL(baseURL);if (!baseParts) {throw new Error('Error trying to parse base URL.');}if (!baseParts.netLoc && baseParts.path && baseParts.path[0] !== '/') {// If netLoc missing and path doesn't start with '/', assume everthing before the first '/' is the netLoc// This causes 'example.com/a' to be handled as '//example.com/a' instead of '/example.com/a'var pathParts = FIRST_SEGMENT_REGEX.exec(baseParts.path);baseParts.netLoc = pathParts[1];baseParts.path = pathParts[2];}if (baseParts.netLoc && !baseParts.path) {baseParts.path = '/';}var builtParts = {// 2c) Otherwise, the embedded URL inherits the scheme of// the base URL.scheme: baseParts.scheme,netLoc: relativeParts.netLoc,path: null,params: relativeParts.params,query: relativeParts.query,fragment: relativeParts.fragment};if (!relativeParts.netLoc) {// 3) If the embedded URL's <net_loc> is non-empty, we skip to// Step 7. Otherwise, the embedded URL inherits the <net_loc>// (if any) of the base URL.builtParts.netLoc = baseParts.netLoc;// 4) If the embedded URL path is preceded by a slash "/", the// path is not relative and we skip to Step 7.if (relativeParts.path[0] !== '/') {if (!relativeParts.path) {// 5) If the embedded URL path is empty (and not preceded by a// slash), then the embedded URL inherits the base URL pathbuiltParts.path = baseParts.path;// 5a) if the embedded URL's <params> is non-empty, we skip to// step 7; otherwise, it inherits the <params> of the base// URL (if any) andif (!relativeParts.params) {builtParts.params = baseParts.params;// 5b) if the embedded URL's <query> is non-empty, we skip to// step 7; otherwise, it inherits the <query> of the base// URL (if any) and we skip to step 7.if (!relativeParts.query) {builtParts.query = baseParts.query;}}} else {// 6) The last segment of the base URL's path (anything// following the rightmost slash "/", or the entire path if no// slash is present) is removed and the embedded URL's path is// appended in its place.var baseURLPath = baseParts.path;var newPath = baseURLPath.substring(0, baseURLPath.lastIndexOf('/') + 1) + relativeParts.path;builtParts.path = URLToolkit.normalizePath(newPath);}}}if (builtParts.path === null) {builtParts.path = opts.alwaysNormalize ? URLToolkit.normalizePath(relativeParts.path) : relativeParts.path;}return URLToolkit.buildURLFromParts(builtParts);},parseURL: function (url) {var parts = URL_REGEX.exec(url);if (!parts) {return null;}return {scheme: parts[1] || '',netLoc: parts[2] || '',path: parts[3] || '',params: parts[4] || '',query: parts[5] || '',fragment: parts[6] || ''};},normalizePath: function (path) {// The following operations are// then applied, in order, to the new path:// 6a) All occurrences of "./", where "." is a complete path// segment, are removed.// 6b) If the path ends with "." as a complete path segment,// that "." is removed.path = path.split('').reverse().join('').replace(SLASH_DOT_REGEX, '');// 6c) All occurrences of "<segment>/../", where <segment> is a// complete path segment not equal to "..", are removed.// Removal of these path segments is performed iteratively,// removing the leftmost matching pattern on each iteration,// until no matching pattern remains.// 6d) If the path ends with "<segment>/..", where <segment> is a// complete path segment not equal to "..", that// "<segment>/.." is removed.while (path.length !== (path = path.replace(SLASH_DOT_DOT_REGEX, '')).length) {}return path.split('').reverse().join('');},buildURLFromParts: function (parts) {return parts.scheme + parts.netLoc + parts.path + parts.params + parts.query + parts.fragment;}};module.exports = URLToolkit;})();});var DEFAULT_LOCATION = 'http://example.com';var resolveUrl$1 = function resolveUrl(baseUrl, relativeUrl) {// return early if we don't need to resolveif (/^[a-z]+:/i.test(relativeUrl)) {return relativeUrl;} // if baseUrl is a data URI, ignore it and resolve everything relative to window.locationif (/^data:/.test(baseUrl)) {baseUrl = window.location && window.location.href || '';} // IE11 supports URL but not the URL constructor// feature detect the behavior we wantvar nativeURL = typeof window.URL === 'function';var protocolLess = /^\/\//.test(baseUrl); // remove location if window.location isn't available (i.e. we're in node)// and if baseUrl isn't an absolute urlvar removeLocation = !window.location && !/\/\//i.test(baseUrl); // if the base URL is relative then combine with the current locationif (nativeURL) {baseUrl = new window.URL(baseUrl, window.location || DEFAULT_LOCATION);} else if (!/\/\//i.test(baseUrl)) {baseUrl = urlToolkit.buildAbsoluteURL(window.location && window.location.href || '', baseUrl);}if (nativeURL) {var newUrl = new URL(relativeUrl, baseUrl); // if we're a protocol-less url, remove the protocol// and if we're location-less, remove the location// otherwise, return the url unmodifiedif (removeLocation) {return newUrl.href.slice(DEFAULT_LOCATION.length);} else if (protocolLess) {return newUrl.href.slice(newUrl.protocol.length);}return newUrl.href;}return urlToolkit.buildAbsoluteURL(baseUrl, relativeUrl);};/*** @file stream.js*//*** A lightweight readable stream implemention that handles event dispatching.** @class Stream*/var Stream = /*#__PURE__*/function () {function Stream() {this.listeners = {};}/*** Add a listener for a specified event type.** @param {string} type the event name* @param {Function} listener the callback to be invoked when an event of* the specified type occurs*/var _proto = Stream.prototype;_proto.on = function on(type, listener) {if (!this.listeners[type]) {this.listeners[type] = [];}this.listeners[type].push(listener);}/*** Remove a listener for a specified event type.** @param {string} type the event name* @param {Function} listener a function previously registered for this* type of event through `on`* @return {boolean} if we could turn it off or not*/;_proto.off = function off(type, listener) {if (!this.listeners[type]) {return false;}var index = this.listeners[type].indexOf(listener); // TODO: which is better?// In Video.js we slice listener functions// on trigger so that it does not mess up the order// while we loop through.//// Here we slice on off so that the loop in trigger// can continue using it's old reference to loop without// messing up the order.this.listeners[type] = this.listeners[type].slice(0);this.listeners[type].splice(index, 1);return index > -1;}/*** Trigger an event of the specified type on this stream. Any additional* arguments to this function are passed as parameters to event listeners.** @param {string} type the event name*/;_proto.trigger = function trigger(type) {var callbacks = this.listeners[type];if (!callbacks) {return;} // Slicing the arguments on every invocation of this method// can add a significant amount of overhead. Avoid the// intermediate object creation for the common case of a// single callback argumentif (arguments.length === 2) {var length = callbacks.length;for (var i = 0; i < length; ++i) {callbacks[i].call(this, arguments[1]);}} else {var args = Array.prototype.slice.call(arguments, 1);var _length = callbacks.length;for (var _i = 0; _i < _length; ++_i) {callbacks[_i].apply(this, args);}}}/*** Destroys the stream and cleans up.*/;_proto.dispose = function dispose() {this.listeners = {};}/*** Forwards all `data` events on this stream to the destination stream. The* destination stream should provide a method `push` to receive the data* events as they arrive.** @param {Stream} destination the stream that will receive all `data` events* @see http://nodejs.org/api/stream.html#stream_readable_pipe_destination_options*/;_proto.pipe = function pipe(destination) {this.on('data', function (data) {destination.push(data);});};return Stream;}();var atob$1 = function atob(s) {return window.atob ? window.atob(s) : Buffer.from(s, 'base64').toString('binary');};function decodeB64ToUint8Array$1(b64Text) {var decodedString = atob$1(b64Text);var array = new Uint8Array(decodedString.length);for (var i = 0; i < decodedString.length; i++) {array[i] = decodedString.charCodeAt(i);}return array;}/*! @name m3u8-parser @version 7.1.0 @license Apache-2.0 *//*** @file m3u8/line-stream.js*//*** A stream that buffers string input and generates a `data` event for each* line.** @class LineStream* @extends Stream*/class LineStream extends Stream {constructor() {super();this.buffer = '';}/*** Add new data to be parsed.** @param {string} data the text to process*/push(data) {let nextNewline;this.buffer += data;nextNewline = this.buffer.indexOf('\n');for (; nextNewline > -1; nextNewline = this.buffer.indexOf('\n')) {this.trigger('data', this.buffer.substring(0, nextNewline));this.buffer = this.buffer.substring(nextNewline + 1);}}}const TAB = String.fromCharCode(0x09);const parseByterange = function (byterangeString) {// optionally match and capture 0+ digits before `@`// optionally match and capture 0+ digits after `@`const match = /([0-9.]*)?@?([0-9.]*)?/.exec(byterangeString || '');const result = {};if (match[1]) {result.length = parseInt(match[1], 10);}if (match[2]) {result.offset = parseInt(match[2], 10);}return result;};/*** "forgiving" attribute list psuedo-grammar:* attributes -> keyvalue (',' keyvalue)** keyvalue -> key '=' value* key -> [^=]** value -> '"' [^"]* '"' | [^,]**/const attributeSeparator = function () {const key = '[^=]*';const value = '"[^"]*"|[^,]*';const keyvalue = '(?:' + key + ')=(?:' + value + ')';return new RegExp('(?:^|,)(' + keyvalue + ')');};/*** Parse attributes from a line given the separator** @param {string} attributes the attribute line to parse*/const parseAttributes$1 = function (attributes) {const result = {};if (!attributes) {return result;} // split the string using attributes as the separatorconst attrs = attributes.split(attributeSeparator());let i = attrs.length;let attr;while (i--) {// filter out unmatched portions of the stringif (attrs[i] === '') {continue;} // split the key and valueattr = /([^=]*)=(.*)/.exec(attrs[i]).slice(1); // trim whitespace and remove optional quotes around the valueattr[0] = attr[0].replace(/^\s+|\s+$/g, '');attr[1] = attr[1].replace(/^\s+|\s+$/g, '');attr[1] = attr[1].replace(/^['"](.*)['"]$/g, '$1');result[attr[0]] = attr[1];}return result;};/*** A line-level M3U8 parser event stream. It expects to receive input one* line at a time and performs a context-free parse of its contents. A stream* interpretation of a manifest can be useful if the manifest is expected to* be too large to fit comfortably into memory or the entirety of the input* is not immediately available. Otherwise, it's probably much easier to work* with a regular `Parser` object.** Produces `data` events with an object that captures the parser's* interpretation of the input. That object has a property `tag` that is one* of `uri`, `comment`, or `tag`. URIs only have a single additional* property, `line`, which captures the entirety of the input without* interpretation. Comments similarly have a single additional property* `text` which is the input without the leading `#`.** Tags always have a property `tagType` which is the lower-cased version of* the M3U8 directive without the `#EXT` or `#EXT-X-` prefix. For instance,* `#EXT-X-MEDIA-SEQUENCE` becomes `media-sequence` when parsed. Unrecognized* tags are given the tag type `unknown` and a single additional property* `data` with the remainder of the input.** @class ParseStream* @extends Stream*/class ParseStream extends Stream {constructor() {super();this.customParsers = [];this.tagMappers = [];}/*** Parses an additional line of input.** @param {string} line a single line of an M3U8 file to parse*/push(line) {let match;let event; // strip whitespaceline = line.trim();if (line.length === 0) {// ignore empty linesreturn;} // URIsif (line[0] !== '#') {this.trigger('data', {type: 'uri',uri: line});return;} // map tagsconst newLines = this.tagMappers.reduce((acc, mapper) => {const mappedLine = mapper(line); // skip if unchangedif (mappedLine === line) {return acc;}return acc.concat([mappedLine]);}, [line]);newLines.forEach(newLine => {for (let i = 0; i < this.customParsers.length; i++) {if (this.customParsers[i].call(this, newLine)) {return;}} // Commentsif (newLine.indexOf('#EXT') !== 0) {this.trigger('data', {type: 'comment',text: newLine.slice(1)});return;} // strip off any carriage returns here so the regex matching// doesn't have to account for them.newLine = newLine.replace('\r', ''); // Tagsmatch = /^#EXTM3U/.exec(newLine);if (match) {this.trigger('data', {type: 'tag',tagType: 'm3u'});return;}match = /^#EXTINF:([0-9\.]*)?,?(.*)?$/.exec(newLine);if (match) {event = {type: 'tag',tagType: 'inf'};if (match[1]) {event.duration = parseFloat(match[1]);}if (match[2]) {event.title = match[2];}this.trigger('data', event);return;}match = /^#EXT-X-TARGETDURATION:([0-9.]*)?/.exec(newLine);if (match) {event = {type: 'tag',tagType: 'targetduration'};if (match[1]) {event.duration = parseInt(match[1], 10);}this.trigger('data', event);return;}match = /^#EXT-X-VERSION:([0-9.]*)?/.exec(newLine);if (match) {event = {type: 'tag',tagType: 'version'};if (match[1]) {event.version = parseInt(match[1], 10);}this.trigger('data', event);return;}match = /^#EXT-X-MEDIA-SEQUENCE:(\-?[0-9.]*)?/.exec(newLine);if (match) {event = {type: 'tag',tagType: 'media-sequence'};if (match[1]) {event.number = parseInt(match[1], 10);}this.trigger('data', event);return;}match = /^#EXT-X-DISCONTINUITY-SEQUENCE:(\-?[0-9.]*)?/.exec(newLine);if (match) {event = {type: 'tag',tagType: 'discontinuity-sequence'};if (match[1]) {event.number = parseInt(match[1], 10);}this.trigger('data', event);return;}match = /^#EXT-X-PLAYLIST-TYPE:(.*)?$/.exec(newLine);if (match) {event = {type: 'tag',tagType: 'playlist-type'};if (match[1]) {event.playlistType = match[1];}this.trigger('data', event);return;}match = /^#EXT-X-BYTERANGE:(.*)?$/.exec(newLine);if (match) {event = _extends$1(parseByterange(match[1]), {type: 'tag',tagType: 'byterange'});this.trigger('data', event);return;}match = /^#EXT-X-ALLOW-CACHE:(YES|NO)?/.exec(newLine);if (match) {event = {type: 'tag',tagType: 'allow-cache'};if (match[1]) {event.allowed = !/NO/.test(match[1]);}this.trigger('data', event);return;}match = /^#EXT-X-MAP:(.*)$/.exec(newLine);if (match) {event = {type: 'tag',tagType: 'map'};if (match[1]) {const attributes = parseAttributes$1(match[1]);if (attributes.URI) {event.uri = attributes.URI;}if (attributes.BYTERANGE) {event.byterange = parseByterange(attributes.BYTERANGE);}}this.trigger('data', event);return;}match = /^#EXT-X-STREAM-INF:(.*)$/.exec(newLine);if (match) {event = {type: 'tag',tagType: 'stream-inf'};if (match[1]) {event.attributes = parseAttributes$1(match[1]);if (event.attributes.RESOLUTION) {const split = event.attributes.RESOLUTION.split('x');const resolution = {};if (split[0]) {resolution.width = parseInt(split[0], 10);}if (split[1]) {resolution.height = parseInt(split[1], 10);}event.attributes.RESOLUTION = resolution;}if (event.attributes.BANDWIDTH) {event.attributes.BANDWIDTH = parseInt(event.attributes.BANDWIDTH, 10);}if (event.attributes['FRAME-RATE']) {event.attributes['FRAME-RATE'] = parseFloat(event.attributes['FRAME-RATE']);}if (event.attributes['PROGRAM-ID']) {event.attributes['PROGRAM-ID'] = parseInt(event.attributes['PROGRAM-ID'], 10);}}this.trigger('data', event);return;}match = /^#EXT-X-MEDIA:(.*)$/.exec(newLine);if (match) {event = {type: 'tag',tagType: 'media'};if (match[1]) {event.attributes = parseAttributes$1(match[1]);}this.trigger('data', event);return;}match = /^#EXT-X-ENDLIST/.exec(newLine);if (match) {this.trigger('data', {type: 'tag',tagType: 'endlist'});return;}match = /^#EXT-X-DISCONTINUITY/.exec(newLine);if (match) {this.trigger('data', {type: 'tag',tagType: 'discontinuity'});return;}match = /^#EXT-X-PROGRAM-DATE-TIME:(.*)$/.exec(newLine);if (match) {event = {type: 'tag',tagType: 'program-date-time'};if (match[1]) {event.dateTimeString = match[1];event.dateTimeObject = new Date(match[1]);}this.trigger('data', event);return;}match = /^#EXT-X-KEY:(.*)$/.exec(newLine);if (match) {event = {type: 'tag',tagType: 'key'};if (match[1]) {event.attributes = parseAttributes$1(match[1]); // parse the IV string into a Uint32Arrayif (event.attributes.IV) {if (event.attributes.IV.substring(0, 2).toLowerCase() === '0x') {event.attributes.IV = event.attributes.IV.substring(2);}event.attributes.IV = event.attributes.IV.match(/.{8}/g);event.attributes.IV[0] = parseInt(event.attributes.IV[0], 16);event.attributes.IV[1] = parseInt(event.attributes.IV[1], 16);event.attributes.IV[2] = parseInt(event.attributes.IV[2], 16);event.attributes.IV[3] = parseInt(event.attributes.IV[3], 16);event.attributes.IV = new Uint32Array(event.attributes.IV);}}this.trigger('data', event);return;}match = /^#EXT-X-START:(.*)$/.exec(newLine);if (match) {event = {type: 'tag',tagType: 'start'};if (match[1]) {event.attributes = parseAttributes$1(match[1]);event.attributes['TIME-OFFSET'] = parseFloat(event.attributes['TIME-OFFSET']);event.attributes.PRECISE = /YES/.test(event.attributes.PRECISE);}this.trigger('data', event);return;}match = /^#EXT-X-CUE-OUT-CONT:(.*)?$/.exec(newLine);if (match) {event = {type: 'tag',tagType: 'cue-out-cont'};if (match[1]) {event.data = match[1];} else {event.data = '';}this.trigger('data', event);return;}match = /^#EXT-X-CUE-OUT:(.*)?$/.exec(newLine);if (match) {event = {type: 'tag',tagType: 'cue-out'};if (match[1]) {event.data = match[1];} else {event.data = '';}this.trigger('data', event);return;}match = /^#EXT-X-CUE-IN:(.*)?$/.exec(newLine);if (match) {event = {type: 'tag',tagType: 'cue-in'};if (match[1]) {event.data = match[1];} else {event.data = '';}this.trigger('data', event);return;}match = /^#EXT-X-SKIP:(.*)$/.exec(newLine);if (match && match[1]) {event = {type: 'tag',tagType: 'skip'};event.attributes = parseAttributes$1(match[1]);if (event.attributes.hasOwnProperty('SKIPPED-SEGMENTS')) {event.attributes['SKIPPED-SEGMENTS'] = parseInt(event.attributes['SKIPPED-SEGMENTS'], 10);}if (event.attributes.hasOwnProperty('RECENTLY-REMOVED-DATERANGES')) {event.attributes['RECENTLY-REMOVED-DATERANGES'] = event.attributes['RECENTLY-REMOVED-DATERANGES'].split(TAB);}this.trigger('data', event);return;}match = /^#EXT-X-PART:(.*)$/.exec(newLine);if (match && match[1]) {event = {type: 'tag',tagType: 'part'};event.attributes = parseAttributes$1(match[1]);['DURATION'].forEach(function (key) {if (event.attributes.hasOwnProperty(key)) {event.attributes[key] = parseFloat(event.attributes[key]);}});['INDEPENDENT', 'GAP'].forEach(function (key) {if (event.attributes.hasOwnProperty(key)) {event.attributes[key] = /YES/.test(event.attributes[key]);}});if (event.attributes.hasOwnProperty('BYTERANGE')) {event.attributes.byterange = parseByterange(event.attributes.BYTERANGE);}this.trigger('data', event);return;}match = /^#EXT-X-SERVER-CONTROL:(.*)$/.exec(newLine);if (match && match[1]) {event = {type: 'tag',tagType: 'server-control'};event.attributes = parseAttributes$1(match[1]);['CAN-SKIP-UNTIL', 'PART-HOLD-BACK', 'HOLD-BACK'].forEach(function (key) {if (event.attributes.hasOwnProperty(key)) {event.attributes[key] = parseFloat(event.attributes[key]);}});['CAN-SKIP-DATERANGES', 'CAN-BLOCK-RELOAD'].forEach(function (key) {if (event.attributes.hasOwnProperty(key)) {event.attributes[key] = /YES/.test(event.attributes[key]);}});this.trigger('data', event);return;}match = /^#EXT-X-PART-INF:(.*)$/.exec(newLine);if (match && match[1]) {event = {type: 'tag',tagType: 'part-inf'};event.attributes = parseAttributes$1(match[1]);['PART-TARGET'].forEach(function (key) {if (event.attributes.hasOwnProperty(key)) {event.attributes[key] = parseFloat(event.attributes[key]);}});this.trigger('data', event);return;}match = /^#EXT-X-PRELOAD-HINT:(.*)$/.exec(newLine);if (match && match[1]) {event = {type: 'tag',tagType: 'preload-hint'};event.attributes = parseAttributes$1(match[1]);['BYTERANGE-START', 'BYTERANGE-LENGTH'].forEach(function (key) {if (event.attributes.hasOwnProperty(key)) {event.attributes[key] = parseInt(event.attributes[key], 10);const subkey = key === 'BYTERANGE-LENGTH' ? 'length' : 'offset';event.attributes.byterange = event.attributes.byterange || {};event.attributes.byterange[subkey] = event.attributes[key]; // only keep the parsed byterange object.delete event.attributes[key];}});this.trigger('data', event);return;}match = /^#EXT-X-RENDITION-REPORT:(.*)$/.exec(newLine);if (match && match[1]) {event = {type: 'tag',tagType: 'rendition-report'};event.attributes = parseAttributes$1(match[1]);['LAST-MSN', 'LAST-PART'].forEach(function (key) {if (event.attributes.hasOwnProperty(key)) {event.attributes[key] = parseInt(event.attributes[key], 10);}});this.trigger('data', event);return;}match = /^#EXT-X-DATERANGE:(.*)$/.exec(newLine);if (match && match[1]) {event = {type: 'tag',tagType: 'daterange'};event.attributes = parseAttributes$1(match[1]);['ID', 'CLASS'].forEach(function (key) {if (event.attributes.hasOwnProperty(key)) {event.attributes[key] = String(event.attributes[key]);}});['START-DATE', 'END-DATE'].forEach(function (key) {if (event.attributes.hasOwnProperty(key)) {event.attributes[key] = new Date(event.attributes[key]);}});['DURATION', 'PLANNED-DURATION'].forEach(function (key) {if (event.attributes.hasOwnProperty(key)) {event.attributes[key] = parseFloat(event.attributes[key]);}});['END-ON-NEXT'].forEach(function (key) {if (event.attributes.hasOwnProperty(key)) {event.attributes[key] = /YES/i.test(event.attributes[key]);}});['SCTE35-CMD', ' SCTE35-OUT', 'SCTE35-IN'].forEach(function (key) {if (event.attributes.hasOwnProperty(key)) {event.attributes[key] = event.attributes[key].toString(16);}});const clientAttributePattern = /^X-([A-Z]+-)+[A-Z]+$/;for (const key in event.attributes) {if (!clientAttributePattern.test(key)) {continue;}const isHexaDecimal = /[0-9A-Fa-f]{6}/g.test(event.attributes[key]);const isDecimalFloating = /^\d+(\.\d+)?$/.test(event.attributes[key]);event.attributes[key] = isHexaDecimal ? event.attributes[key].toString(16) : isDecimalFloating ? parseFloat(event.attributes[key]) : String(event.attributes[key]);}this.trigger('data', event);return;}match = /^#EXT-X-INDEPENDENT-SEGMENTS/.exec(newLine);if (match) {this.trigger('data', {type: 'tag',tagType: 'independent-segments'});return;}match = /^#EXT-X-CONTENT-STEERING:(.*)$/.exec(newLine);if (match) {event = {type: 'tag',tagType: 'content-steering'};event.attributes = parseAttributes$1(match[1]);this.trigger('data', event);return;} // unknown tag typethis.trigger('data', {type: 'tag',data: newLine.slice(4)});});}/*** Add a parser for custom headers** @param {Object} options a map of options for the added parser* @param {RegExp} options.expression a regular expression to match the custom header* @param {string} options.customType the custom type to register to the output* @param {Function} [options.dataParser] function to parse the line into an object* @param {boolean} [options.segment] should tag data be attached to the segment object*/addParser({expression,customType,dataParser,segment}) {if (typeof dataParser !== 'function') {dataParser = line => line;}this.customParsers.push(line => {const match = expression.exec(line);if (match) {this.trigger('data', {type: 'custom',data: dataParser(line),customType,segment});return true;}});}/*** Add a custom header mapper** @param {Object} options* @param {RegExp} options.expression a regular expression to match the custom header* @param {Function} options.map function to translate tag into a different tag*/addTagMapper({expression,map}) {const mapFn = line => {if (expression.test(line)) {return map(line);}return line;};this.tagMappers.push(mapFn);}}const camelCase = str => str.toLowerCase().replace(/-(\w)/g, a => a[1].toUpperCase());const camelCaseKeys = function (attributes) {const result = {};Object.keys(attributes).forEach(function (key) {result[camelCase(key)] = attributes[key];});return result;}; // set SERVER-CONTROL hold back based upon targetDuration and partTargetDuration// we need this helper because defaults are based upon targetDuration and// partTargetDuration being set, but they may not be if SERVER-CONTROL appears before// target durations are set.const setHoldBack = function (manifest) {const {serverControl,targetDuration,partTargetDuration} = manifest;if (!serverControl) {return;}const tag = '#EXT-X-SERVER-CONTROL';const hb = 'holdBack';const phb = 'partHoldBack';const minTargetDuration = targetDuration && targetDuration * 3;const minPartDuration = partTargetDuration && partTargetDuration * 2;if (targetDuration && !serverControl.hasOwnProperty(hb)) {serverControl[hb] = minTargetDuration;this.trigger('info', {message: `${tag} defaulting HOLD-BACK to targetDuration * 3 (${minTargetDuration}).`});}if (minTargetDuration && serverControl[hb] < minTargetDuration) {this.trigger('warn', {message: `${tag} clamping HOLD-BACK (${serverControl[hb]}) to targetDuration * 3 (${minTargetDuration})`});serverControl[hb] = minTargetDuration;} // default no part hold back to part target duration * 3if (partTargetDuration && !serverControl.hasOwnProperty(phb)) {serverControl[phb] = partTargetDuration * 3;this.trigger('info', {message: `${tag} defaulting PART-HOLD-BACK to partTargetDuration * 3 (${serverControl[phb]}).`});} // if part hold back is too small default it to part target duration * 2if (partTargetDuration && serverControl[phb] < minPartDuration) {this.trigger('warn', {message: `${tag} clamping PART-HOLD-BACK (${serverControl[phb]}) to partTargetDuration * 2 (${minPartDuration}).`});serverControl[phb] = minPartDuration;}};/*** A parser for M3U8 files. The current interpretation of the input is* exposed as a property `manifest` on parser objects. It's just two lines to* create and parse a manifest once you have the contents available as a string:** ```js* var parser = new m3u8.Parser();* parser.push(xhr.responseText);* ```** New input can later be applied to update the manifest object by calling* `push` again.** The parser attempts to create a usable manifest object even if the* underlying input is somewhat nonsensical. It emits `info` and `warning`* events during the parse if it encounters input that seems invalid or* requires some property of the manifest object to be defaulted.** @class Parser* @extends Stream*/class Parser extends Stream {constructor() {super();this.lineStream = new LineStream();this.parseStream = new ParseStream();this.lineStream.pipe(this.parseStream);this.lastProgramDateTime = null;/* eslint-disable consistent-this */const self = this;/* eslint-enable consistent-this */const uris = [];let currentUri = {}; // if specified, the active EXT-X-MAP definitionlet currentMap; // if specified, the active decryption keylet key;let hasParts = false;const noop = function () {};const defaultMediaGroups = {'AUDIO': {},'VIDEO': {},'CLOSED-CAPTIONS': {},'SUBTITLES': {}}; // This is the Widevine UUID from DASH IF IOP. The same exact string is// used in MPDs with Widevine encrypted streams.const widevineUuid = 'urn:uuid:edef8ba9-79d6-4ace-a3c8-27dcd51d21ed'; // group segments into numbered timelines delineated by discontinuitieslet currentTimeline = 0; // the manifest is empty until the parse stream begins delivering datathis.manifest = {allowCache: true,discontinuityStarts: [],dateRanges: [],segments: []}; // keep track of the last seen segment's byte range end, as segments are not required// to provide the offset, in which case it defaults to the next byte after the// previous segmentlet lastByterangeEnd = 0; // keep track of the last seen part's byte range end.let lastPartByterangeEnd = 0;const dateRangeTags = {};this.on('end', () => {// only add preloadSegment if we don't yet have a uri for it.// and we actually have parts/preloadHintsif (currentUri.uri || !currentUri.parts && !currentUri.preloadHints) {return;}if (!currentUri.map && currentMap) {currentUri.map = currentMap;}if (!currentUri.key && key) {currentUri.key = key;}if (!currentUri.timeline && typeof currentTimeline === 'number') {currentUri.timeline = currentTimeline;}this.manifest.preloadSegment = currentUri;}); // update the manifest with the m3u8 entry from the parse streamthis.parseStream.on('data', function (entry) {let mediaGroup;let rendition;({tag() {// switch based on the tag type(({version() {if (entry.version) {this.manifest.version = entry.version;}},'allow-cache'() {this.manifest.allowCache = entry.allowed;if (!('allowed' in entry)) {this.trigger('info', {message: 'defaulting allowCache to YES'});this.manifest.allowCache = true;}},byterange() {const byterange = {};if ('length' in entry) {currentUri.byterange = byterange;byterange.length = entry.length;if (!('offset' in entry)) {/** From the latest spec (as of this writing):* https://tools.ietf.org/html/draft-pantos-http-live-streaming-23#section-4.3.2.2** Same text since EXT-X-BYTERANGE's introduction in draft 7:* https://tools.ietf.org/html/draft-pantos-http-live-streaming-07#section-3.3.1)** "If o [offset] is not present, the sub-range begins at the next byte* following the sub-range of the previous media segment."*/entry.offset = lastByterangeEnd;}}if ('offset' in entry) {currentUri.byterange = byterange;byterange.offset = entry.offset;}lastByterangeEnd = byterange.offset + byterange.length;},endlist() {this.manifest.endList = true;},inf() {if (!('mediaSequence' in this.manifest)) {this.manifest.mediaSequence = 0;this.trigger('info', {message: 'defaulting media sequence to zero'});}if (!('discontinuitySequence' in this.manifest)) {this.manifest.discontinuitySequence = 0;this.trigger('info', {message: 'defaulting discontinuity sequence to zero'});}if (entry.title) {currentUri.title = entry.title;}if (entry.duration > 0) {currentUri.duration = entry.duration;}if (entry.duration === 0) {currentUri.duration = 0.01;this.trigger('info', {message: 'updating zero segment duration to a small value'});}this.manifest.segments = uris;},key() {if (!entry.attributes) {this.trigger('warn', {message: 'ignoring key declaration without attribute list'});return;} // clear the active encryption keyif (entry.attributes.METHOD === 'NONE') {key = null;return;}if (!entry.attributes.URI) {this.trigger('warn', {message: 'ignoring key declaration without URI'});return;}if (entry.attributes.KEYFORMAT === 'com.apple.streamingkeydelivery') {this.manifest.contentProtection = this.manifest.contentProtection || {}; // TODO: add full support for this.this.manifest.contentProtection['com.apple.fps.1_0'] = {attributes: entry.attributes};return;}if (entry.attributes.KEYFORMAT === 'com.microsoft.playready') {this.manifest.contentProtection = this.manifest.contentProtection || {}; // TODO: add full support for this.this.manifest.contentProtection['com.microsoft.playready'] = {uri: entry.attributes.URI};return;} // check if the content is encrypted for Widevine// Widevine/HLS spec: https://storage.googleapis.com/wvdocs/Widevine_DRM_HLS.pdfif (entry.attributes.KEYFORMAT === widevineUuid) {const VALID_METHODS = ['SAMPLE-AES', 'SAMPLE-AES-CTR', 'SAMPLE-AES-CENC'];if (VALID_METHODS.indexOf(entry.attributes.METHOD) === -1) {this.trigger('warn', {message: 'invalid key method provided for Widevine'});return;}if (entry.attributes.METHOD === 'SAMPLE-AES-CENC') {this.trigger('warn', {message: 'SAMPLE-AES-CENC is deprecated, please use SAMPLE-AES-CTR instead'});}if (entry.attributes.URI.substring(0, 23) !== 'data:text/plain;base64,') {this.trigger('warn', {message: 'invalid key URI provided for Widevine'});return;}if (!(entry.attributes.KEYID && entry.attributes.KEYID.substring(0, 2) === '0x')) {this.trigger('warn', {message: 'invalid key ID provided for Widevine'});return;} // if Widevine key attributes are valid, store them as `contentProtection`// on the manifest to emulate Widevine tag structure in a DASH mpdthis.manifest.contentProtection = this.manifest.contentProtection || {};this.manifest.contentProtection['com.widevine.alpha'] = {attributes: {schemeIdUri: entry.attributes.KEYFORMAT,// remove '0x' from the key id stringkeyId: entry.attributes.KEYID.substring(2)},// decode the base64-encoded PSSH boxpssh: decodeB64ToUint8Array$1(entry.attributes.URI.split(',')[1])};return;}if (!entry.attributes.METHOD) {this.trigger('warn', {message: 'defaulting key method to AES-128'});} // setup an encryption key for upcoming segmentskey = {method: entry.attributes.METHOD || 'AES-128',uri: entry.attributes.URI};if (typeof entry.attributes.IV !== 'undefined') {key.iv = entry.attributes.IV;}},'media-sequence'() {if (!isFinite(entry.number)) {this.trigger('warn', {message: 'ignoring invalid media sequence: ' + entry.number});return;}this.manifest.mediaSequence = entry.number;},'discontinuity-sequence'() {if (!isFinite(entry.number)) {this.trigger('warn', {message: 'ignoring invalid discontinuity sequence: ' + entry.number});return;}this.manifest.discontinuitySequence = entry.number;currentTimeline = entry.number;},'playlist-type'() {if (!/VOD|EVENT/.test(entry.playlistType)) {this.trigger('warn', {message: 'ignoring unknown playlist type: ' + entry.playlist});return;}this.manifest.playlistType = entry.playlistType;},map() {currentMap = {};if (entry.uri) {currentMap.uri = entry.uri;}if (entry.byterange) {currentMap.byterange = entry.byterange;}if (key) {currentMap.key = key;}},'stream-inf'() {this.manifest.playlists = uris;this.manifest.mediaGroups = this.manifest.mediaGroups || defaultMediaGroups;if (!entry.attributes) {this.trigger('warn', {message: 'ignoring empty stream-inf attributes'});return;}if (!currentUri.attributes) {currentUri.attributes = {};}_extends$1(currentUri.attributes, entry.attributes);},media() {this.manifest.mediaGroups = this.manifest.mediaGroups || defaultMediaGroups;if (!(entry.attributes && entry.attributes.TYPE && entry.attributes['GROUP-ID'] && entry.attributes.NAME)) {this.trigger('warn', {message: 'ignoring incomplete or missing media group'});return;} // find the media group, creating defaults as necessaryconst mediaGroupType = this.manifest.mediaGroups[entry.attributes.TYPE];mediaGroupType[entry.attributes['GROUP-ID']] = mediaGroupType[entry.attributes['GROUP-ID']] || {};mediaGroup = mediaGroupType[entry.attributes['GROUP-ID']]; // collect the rendition metadatarendition = {default: /yes/i.test(entry.attributes.DEFAULT)};if (rendition.default) {rendition.autoselect = true;} else {rendition.autoselect = /yes/i.test(entry.attributes.AUTOSELECT);}if (entry.attributes.LANGUAGE) {rendition.language = entry.attributes.LANGUAGE;}if (entry.attributes.URI) {rendition.uri = entry.attributes.URI;}if (entry.attributes['INSTREAM-ID']) {rendition.instreamId = entry.attributes['INSTREAM-ID'];}if (entry.attributes.CHARACTERISTICS) {rendition.characteristics = entry.attributes.CHARACTERISTICS;}if (entry.attributes.FORCED) {rendition.forced = /yes/i.test(entry.attributes.FORCED);} // insert the new renditionmediaGroup[entry.attributes.NAME] = rendition;},discontinuity() {currentTimeline += 1;currentUri.discontinuity = true;this.manifest.discontinuityStarts.push(uris.length);},'program-date-time'() {if (typeof this.manifest.dateTimeString === 'undefined') {// PROGRAM-DATE-TIME is a media-segment tag, but for backwards// compatibility, we add the first occurence of the PROGRAM-DATE-TIME tag// to the manifest object// TODO: Consider removing this in future major versionthis.manifest.dateTimeString = entry.dateTimeString;this.manifest.dateTimeObject = entry.dateTimeObject;}currentUri.dateTimeString = entry.dateTimeString;currentUri.dateTimeObject = entry.dateTimeObject;const {lastProgramDateTime} = this;this.lastProgramDateTime = new Date(entry.dateTimeString).getTime(); // We should extrapolate Program Date Time backward only during first program date time occurrence.// Once we have at least one program date time point, we can always extrapolate it forward using lastProgramDateTime reference.if (lastProgramDateTime === null) {// Extrapolate Program Date Time backward// Since it is first program date time occurrence we're assuming that// all this.manifest.segments have no program date time infothis.manifest.segments.reduceRight((programDateTime, segment) => {segment.programDateTime = programDateTime - segment.duration * 1000;return segment.programDateTime;}, this.lastProgramDateTime);}},targetduration() {if (!isFinite(entry.duration) || entry.duration < 0) {this.trigger('warn', {message: 'ignoring invalid target duration: ' + entry.duration});return;}this.manifest.targetDuration = entry.duration;setHoldBack.call(this, this.manifest);},start() {if (!entry.attributes || isNaN(entry.attributes['TIME-OFFSET'])) {this.trigger('warn', {message: 'ignoring start declaration without appropriate attribute list'});return;}this.manifest.start = {timeOffset: entry.attributes['TIME-OFFSET'],precise: entry.attributes.PRECISE};},'cue-out'() {currentUri.cueOut = entry.data;},'cue-out-cont'() {currentUri.cueOutCont = entry.data;},'cue-in'() {currentUri.cueIn = entry.data;},'skip'() {this.manifest.skip = camelCaseKeys(entry.attributes);this.warnOnMissingAttributes_('#EXT-X-SKIP', entry.attributes, ['SKIPPED-SEGMENTS']);},'part'() {hasParts = true; // parts are always specifed before a segmentconst segmentIndex = this.manifest.segments.length;const part = camelCaseKeys(entry.attributes);currentUri.parts = currentUri.parts || [];currentUri.parts.push(part);if (part.byterange) {if (!part.byterange.hasOwnProperty('offset')) {part.byterange.offset = lastPartByterangeEnd;}lastPartByterangeEnd = part.byterange.offset + part.byterange.length;}const partIndex = currentUri.parts.length - 1;this.warnOnMissingAttributes_(`#EXT-X-PART #${partIndex} for segment #${segmentIndex}`, entry.attributes, ['URI', 'DURATION']);if (this.manifest.renditionReports) {this.manifest.renditionReports.forEach((r, i) => {if (!r.hasOwnProperty('lastPart')) {this.trigger('warn', {message: `#EXT-X-RENDITION-REPORT #${i} lacks required attribute(s): LAST-PART`});}});}},'server-control'() {const attrs = this.manifest.serverControl = camelCaseKeys(entry.attributes);if (!attrs.hasOwnProperty('canBlockReload')) {attrs.canBlockReload = false;this.trigger('info', {message: '#EXT-X-SERVER-CONTROL defaulting CAN-BLOCK-RELOAD to false'});}setHoldBack.call(this, this.manifest);if (attrs.canSkipDateranges && !attrs.hasOwnProperty('canSkipUntil')) {this.trigger('warn', {message: '#EXT-X-SERVER-CONTROL lacks required attribute CAN-SKIP-UNTIL which is required when CAN-SKIP-DATERANGES is set'});}},'preload-hint'() {// parts are always specifed before a segmentconst segmentIndex = this.manifest.segments.length;const hint = camelCaseKeys(entry.attributes);const isPart = hint.type && hint.type === 'PART';currentUri.preloadHints = currentUri.preloadHints || [];currentUri.preloadHints.push(hint);if (hint.byterange) {if (!hint.byterange.hasOwnProperty('offset')) {// use last part byterange end or zero if not a part.hint.byterange.offset = isPart ? lastPartByterangeEnd : 0;if (isPart) {lastPartByterangeEnd = hint.byterange.offset + hint.byterange.length;}}}const index = currentUri.preloadHints.length - 1;this.warnOnMissingAttributes_(`#EXT-X-PRELOAD-HINT #${index} for segment #${segmentIndex}`, entry.attributes, ['TYPE', 'URI']);if (!hint.type) {return;} // search through all preload hints except for the current one for// a duplicate type.for (let i = 0; i < currentUri.preloadHints.length - 1; i++) {const otherHint = currentUri.preloadHints[i];if (!otherHint.type) {continue;}if (otherHint.type === hint.type) {this.trigger('warn', {message: `#EXT-X-PRELOAD-HINT #${index} for segment #${segmentIndex} has the same TYPE ${hint.type} as preload hint #${i}`});}}},'rendition-report'() {const report = camelCaseKeys(entry.attributes);this.manifest.renditionReports = this.manifest.renditionReports || [];this.manifest.renditionReports.push(report);const index = this.manifest.renditionReports.length - 1;const required = ['LAST-MSN', 'URI'];if (hasParts) {required.push('LAST-PART');}this.warnOnMissingAttributes_(`#EXT-X-RENDITION-REPORT #${index}`, entry.attributes, required);},'part-inf'() {this.manifest.partInf = camelCaseKeys(entry.attributes);this.warnOnMissingAttributes_('#EXT-X-PART-INF', entry.attributes, ['PART-TARGET']);if (this.manifest.partInf.partTarget) {this.manifest.partTargetDuration = this.manifest.partInf.partTarget;}setHoldBack.call(this, this.manifest);},'daterange'() {this.manifest.dateRanges.push(camelCaseKeys(entry.attributes));const index = this.manifest.dateRanges.length - 1;this.warnOnMissingAttributes_(`#EXT-X-DATERANGE #${index}`, entry.attributes, ['ID', 'START-DATE']);const dateRange = this.manifest.dateRanges[index];if (dateRange.endDate && dateRange.startDate && new Date(dateRange.endDate) < new Date(dateRange.startDate)) {this.trigger('warn', {message: 'EXT-X-DATERANGE END-DATE must be equal to or later than the value of the START-DATE'});}if (dateRange.duration && dateRange.duration < 0) {this.trigger('warn', {message: 'EXT-X-DATERANGE DURATION must not be negative'});}if (dateRange.plannedDuration && dateRange.plannedDuration < 0) {this.trigger('warn', {message: 'EXT-X-DATERANGE PLANNED-DURATION must not be negative'});}const endOnNextYes = !!dateRange.endOnNext;if (endOnNextYes && !dateRange.class) {this.trigger('warn', {message: 'EXT-X-DATERANGE with an END-ON-NEXT=YES attribute must have a CLASS attribute'});}if (endOnNextYes && (dateRange.duration || dateRange.endDate)) {this.trigger('warn', {message: 'EXT-X-DATERANGE with an END-ON-NEXT=YES attribute must not contain DURATION or END-DATE attributes'});}if (dateRange.duration && dateRange.endDate) {const startDate = dateRange.startDate;const newDateInSeconds = startDate.getTime() + dateRange.duration * 1000;this.manifest.dateRanges[index].endDate = new Date(newDateInSeconds);}if (!dateRangeTags[dateRange.id]) {dateRangeTags[dateRange.id] = dateRange;} else {for (const attribute in dateRangeTags[dateRange.id]) {if (!!dateRange[attribute] && JSON.stringify(dateRangeTags[dateRange.id][attribute]) !== JSON.stringify(dateRange[attribute])) {this.trigger('warn', {message: 'EXT-X-DATERANGE tags with the same ID in a playlist must have the same attributes values'});break;}} // if tags with the same ID do not have conflicting attributes, merge themconst dateRangeWithSameId = this.manifest.dateRanges.findIndex(dateRangeToFind => dateRangeToFind.id === dateRange.id);this.manifest.dateRanges[dateRangeWithSameId] = _extends$1(this.manifest.dateRanges[dateRangeWithSameId], dateRange);dateRangeTags[dateRange.id] = _extends$1(dateRangeTags[dateRange.id], dateRange); // after merging, delete the duplicate dateRange that was added lastthis.manifest.dateRanges.pop();}},'independent-segments'() {this.manifest.independentSegments = true;},'content-steering'() {this.manifest.contentSteering = camelCaseKeys(entry.attributes);this.warnOnMissingAttributes_('#EXT-X-CONTENT-STEERING', entry.attributes, ['SERVER-URI']);}})[entry.tagType] || noop).call(self);},uri() {currentUri.uri = entry.uri;uris.push(currentUri); // if no explicit duration was declared, use the target durationif (this.manifest.targetDuration && !('duration' in currentUri)) {this.trigger('warn', {message: 'defaulting segment duration to the target duration'});currentUri.duration = this.manifest.targetDuration;} // annotate with encryption information, if necessaryif (key) {currentUri.key = key;}currentUri.timeline = currentTimeline; // annotate with initialization segment information, if necessaryif (currentMap) {currentUri.map = currentMap;} // reset the last byterange end as it needs to be 0 between partslastPartByterangeEnd = 0; // Once we have at least one program date time we can always extrapolate it forwardif (this.lastProgramDateTime !== null) {currentUri.programDateTime = this.lastProgramDateTime;this.lastProgramDateTime += currentUri.duration * 1000;} // prepare for the next URIcurrentUri = {};},comment() {// comments are not important for playback},custom() {// if this is segment-level data attach the output to the segmentif (entry.segment) {currentUri.custom = currentUri.custom || {};currentUri.custom[entry.customType] = entry.data; // if this is manifest-level data attach to the top level manifest object} else {this.manifest.custom = this.manifest.custom || {};this.manifest.custom[entry.customType] = entry.data;}}})[entry.type].call(self);});}warnOnMissingAttributes_(identifier, attributes, required) {const missing = [];required.forEach(function (key) {if (!attributes.hasOwnProperty(key)) {missing.push(key);}});if (missing.length) {this.trigger('warn', {message: `${identifier} lacks required attribute(s): ${missing.join(', ')}`});}}/*** Parse the input string and update the manifest object.** @param {string} chunk a potentially incomplete portion of the manifest*/push(chunk) {this.lineStream.push(chunk);}/*** Flush any remaining input. This can be handy if the last line of an M3U8* manifest did not contain a trailing newline but the file has been* completely received.*/end() {// flush any buffered inputthis.lineStream.push('\n');if (this.manifest.dateRanges.length && this.lastProgramDateTime === null) {this.trigger('warn', {message: 'A playlist with EXT-X-DATERANGE tag must contain atleast one EXT-X-PROGRAM-DATE-TIME tag'});}this.lastProgramDateTime = null;this.trigger('end');}/*** Add an additional parser for non-standard tags** @param {Object} options a map of options for the added parser* @param {RegExp} options.expression a regular expression to match the custom header* @param {string} options.customType the custom type to register to the output* @param {Function} [options.dataParser] function to parse the line into an object* @param {boolean} [options.segment] should tag data be attached to the segment object*/addParser(options) {this.parseStream.addParser(options);}/*** Add a custom header mapper** @param {Object} options* @param {RegExp} options.expression a regular expression to match the custom header* @param {Function} options.map function to translate tag into a different tag*/addTagMapper(options) {this.parseStream.addTagMapper(options);}}var regexs = {// to determine mime typesmp4: /^(av0?1|avc0?[1234]|vp0?9|flac|opus|mp3|mp4a|mp4v|stpp.ttml.im1t)/,webm: /^(vp0?[89]|av0?1|opus|vorbis)/,ogg: /^(vp0?[89]|theora|flac|opus|vorbis)/,// to determine if a codec is audio or videovideo: /^(av0?1|avc0?[1234]|vp0?[89]|hvc1|hev1|theora|mp4v)/,audio: /^(mp4a|flac|vorbis|opus|ac-[34]|ec-3|alac|mp3|speex|aac)/,text: /^(stpp.ttml.im1t)/,// mux.js support regexmuxerVideo: /^(avc0?1)/,muxerAudio: /^(mp4a)/,// match nothing as muxer does not support text right now.// there cannot never be a character before the start of a string// so this matches nothing.muxerText: /a^/};var mediaTypes = ['video', 'audio', 'text'];var upperMediaTypes = ['Video', 'Audio', 'Text'];/*** Replace the old apple-style `avc1.<dd>.<dd>` codec string with the standard* `avc1.<hhhhhh>`** @param {string} codec* Codec string to translate* @return {string}* The translated codec string*/var translateLegacyCodec = function translateLegacyCodec(codec) {if (!codec) {return codec;}return codec.replace(/avc1\.(\d+)\.(\d+)/i, function (orig, profile, avcLevel) {var profileHex = ('00' + Number(profile).toString(16)).slice(-2);var avcLevelHex = ('00' + Number(avcLevel).toString(16)).slice(-2);return 'avc1.' + profileHex + '00' + avcLevelHex;});};/*** @typedef {Object} ParsedCodecInfo* @property {number} codecCount* Number of codecs parsed* @property {string} [videoCodec]* Parsed video codec (if found)* @property {string} [videoObjectTypeIndicator]* Video object type indicator (if found)* @property {string|null} audioProfile* Audio profile*//*** Parses a codec string to retrieve the number of codecs specified, the video codec and* object type indicator, and the audio profile.** @param {string} [codecString]* The codec string to parse* @return {ParsedCodecInfo}* Parsed codec info*/var parseCodecs = function parseCodecs(codecString) {if (codecString === void 0) {codecString = '';}var codecs = codecString.split(',');var result = [];codecs.forEach(function (codec) {codec = codec.trim();var codecType;mediaTypes.forEach(function (name) {var match = regexs[name].exec(codec.toLowerCase());if (!match || match.length <= 1) {return;}codecType = name; // maintain codec casevar type = codec.substring(0, match[1].length);var details = codec.replace(type, '');result.push({type: type,details: details,mediaType: name});});if (!codecType) {result.push({type: codec,details: '',mediaType: 'unknown'});}});return result;};/*** Returns a ParsedCodecInfo object for the default alternate audio playlist if there is* a default alternate audio playlist for the provided audio group.** @param {Object} master* The master playlist* @param {string} audioGroupId* ID of the audio group for which to find the default codec info* @return {ParsedCodecInfo}* Parsed codec info*/var codecsFromDefault = function codecsFromDefault(master, audioGroupId) {if (!master.mediaGroups.AUDIO || !audioGroupId) {return null;}var audioGroup = master.mediaGroups.AUDIO[audioGroupId];if (!audioGroup) {return null;}for (var name in audioGroup) {var audioType = audioGroup[name];if (audioType.default && audioType.playlists) {// codec should be the same for all playlists within the audio typereturn parseCodecs(audioType.playlists[0].attributes.CODECS);}}return null;};var isAudioCodec = function isAudioCodec(codec) {if (codec === void 0) {codec = '';}return regexs.audio.test(codec.trim().toLowerCase());};var isTextCodec = function isTextCodec(codec) {if (codec === void 0) {codec = '';}return regexs.text.test(codec.trim().toLowerCase());};var getMimeForCodec = function getMimeForCodec(codecString) {if (!codecString || typeof codecString !== 'string') {return;}var codecs = codecString.toLowerCase().split(',').map(function (c) {return translateLegacyCodec(c.trim());}); // default to video typevar type = 'video'; // only change to audio type if the only codec we have is// audioif (codecs.length === 1 && isAudioCodec(codecs[0])) {type = 'audio';} else if (codecs.length === 1 && isTextCodec(codecs[0])) {// text uses application/<container> for nowtype = 'application';} // default the container to mp4var container = 'mp4'; // every codec must be able to go into the container// for that container to be the correct oneif (codecs.every(function (c) {return regexs.mp4.test(c);})) {container = 'mp4';} else if (codecs.every(function (c) {return regexs.webm.test(c);})) {container = 'webm';} else if (codecs.every(function (c) {return regexs.ogg.test(c);})) {container = 'ogg';}return type + "/" + container + ";codecs=\"" + codecString + "\"";};var browserSupportsCodec = function browserSupportsCodec(codecString) {if (codecString === void 0) {codecString = '';}return window.MediaSource && window.MediaSource.isTypeSupported && window.MediaSource.isTypeSupported(getMimeForCodec(codecString)) || false;};var muxerSupportsCodec = function muxerSupportsCodec(codecString) {if (codecString === void 0) {codecString = '';}return codecString.toLowerCase().split(',').every(function (codec) {codec = codec.trim(); // any match is supported.for (var i = 0; i < upperMediaTypes.length; i++) {var type = upperMediaTypes[i];if (regexs["muxer" + type].test(codec)) {return true;}}return false;});};var DEFAULT_AUDIO_CODEC = 'mp4a.40.2';var DEFAULT_VIDEO_CODEC = 'avc1.4d400d';var MPEGURL_REGEX = /^(audio|video|application)\/(x-|vnd\.apple\.)?mpegurl/i;var DASH_REGEX = /^application\/dash\+xml/i;/*** Returns a string that describes the type of source based on a video source object's* media type.** @see {@link https://dev.w3.org/html5/pf-summary/video.html#dom-source-type|Source Type}** @param {string} type* Video source object media type* @return {('hls'|'dash'|'vhs-json'|null)}* VHS source type string*/var simpleTypeFromSourceType = function simpleTypeFromSourceType(type) {if (MPEGURL_REGEX.test(type)) {return 'hls';}if (DASH_REGEX.test(type)) {return 'dash';} // Denotes the special case of a manifest object passed to http-streaming instead of a// source URL.//// See https://en.wikipedia.org/wiki/Media_type for details on specifying media types.//// In this case, vnd stands for vendor, video.js for the organization, VHS for this// project, and the +json suffix identifies the structure of the media type.if (type === 'application/vnd.videojs.vhs+json') {return 'vhs-json';}return null;};// const log2 = Math.log2 ? Math.log2 : (x) => (Math.log(x) / Math.log(2));// we used to do this with log2 but BigInt does not support builtin math// Math.ceil(log2(x));var countBits = function countBits(x) {return x.toString(2).length;}; // count the number of whole bytes it would take to represent a numbervar countBytes = function countBytes(x) {return Math.ceil(countBits(x) / 8);};var isArrayBufferView = function isArrayBufferView(obj) {if (ArrayBuffer.isView === 'function') {return ArrayBuffer.isView(obj);}return obj && obj.buffer instanceof ArrayBuffer;};var isTypedArray = function isTypedArray(obj) {return isArrayBufferView(obj);};var toUint8 = function toUint8(bytes) {if (bytes instanceof Uint8Array) {return bytes;}if (!Array.isArray(bytes) && !isTypedArray(bytes) && !(bytes instanceof ArrayBuffer)) {// any non-number or NaN leads to empty uint8array// eslint-disable-next-lineif (typeof bytes !== 'number' || typeof bytes === 'number' && bytes !== bytes) {bytes = 0;} else {bytes = [bytes];}}return new Uint8Array(bytes && bytes.buffer || bytes, bytes && bytes.byteOffset || 0, bytes && bytes.byteLength || 0);};var BigInt = window.BigInt || Number;var BYTE_TABLE = [BigInt('0x1'), BigInt('0x100'), BigInt('0x10000'), BigInt('0x1000000'), BigInt('0x100000000'), BigInt('0x10000000000'), BigInt('0x1000000000000'), BigInt('0x100000000000000'), BigInt('0x10000000000000000')];(function () {var a = new Uint16Array([0xFFCC]);var b = new Uint8Array(a.buffer, a.byteOffset, a.byteLength);if (b[0] === 0xFF) {return 'big';}if (b[0] === 0xCC) {return 'little';}return 'unknown';})();var bytesToNumber = function bytesToNumber(bytes, _temp) {var _ref = _temp === void 0 ? {} : _temp,_ref$signed = _ref.signed,signed = _ref$signed === void 0 ? false : _ref$signed,_ref$le = _ref.le,le = _ref$le === void 0 ? false : _ref$le;bytes = toUint8(bytes);var fn = le ? 'reduce' : 'reduceRight';var obj = bytes[fn] ? bytes[fn] : Array.prototype[fn];var number = obj.call(bytes, function (total, byte, i) {var exponent = le ? i : Math.abs(i + 1 - bytes.length);return total + BigInt(byte) * BYTE_TABLE[exponent];}, BigInt(0));if (signed) {var max = BYTE_TABLE[bytes.length] / BigInt(2) - BigInt(1);number = BigInt(number);if (number > max) {number -= max;number -= max;number -= BigInt(2);}}return Number(number);};var numberToBytes = function numberToBytes(number, _temp2) {var _ref2 = _temp2 === void 0 ? {} : _temp2,_ref2$le = _ref2.le,le = _ref2$le === void 0 ? false : _ref2$le;// eslint-disable-next-lineif (typeof number !== 'bigint' && typeof number !== 'number' || typeof number === 'number' && number !== number) {number = 0;}number = BigInt(number);var byteCount = countBytes(number);var bytes = new Uint8Array(new ArrayBuffer(byteCount));for (var i = 0; i < byteCount; i++) {var byteIndex = le ? i : Math.abs(i + 1 - bytes.length);bytes[byteIndex] = Number(number / BYTE_TABLE[i] & BigInt(0xFF));if (number < 0) {bytes[byteIndex] = Math.abs(~bytes[byteIndex]);bytes[byteIndex] -= i === 0 ? 1 : 2;}}return bytes;};var stringToBytes = function stringToBytes(string, stringIsBytes) {if (typeof string !== 'string' && string && typeof string.toString === 'function') {string = string.toString();}if (typeof string !== 'string') {return new Uint8Array();} // If the string already is bytes, we don't have to do this// otherwise we do this so that we split multi length characters// into individual bytesif (!stringIsBytes) {string = unescape(encodeURIComponent(string));}var view = new Uint8Array(string.length);for (var i = 0; i < string.length; i++) {view[i] = string.charCodeAt(i);}return view;};var concatTypedArrays = function concatTypedArrays() {for (var _len = arguments.length, buffers = new Array(_len), _key = 0; _key < _len; _key++) {buffers[_key] = arguments[_key];}buffers = buffers.filter(function (b) {return b && (b.byteLength || b.length) && typeof b !== 'string';});if (buffers.length <= 1) {// for 0 length we will return empty uint8// for 1 length we return the first uint8return toUint8(buffers[0]);}var totalLen = buffers.reduce(function (total, buf, i) {return total + (buf.byteLength || buf.length);}, 0);var tempBuffer = new Uint8Array(totalLen);var offset = 0;buffers.forEach(function (buf) {buf = toUint8(buf);tempBuffer.set(buf, offset);offset += buf.byteLength;});return tempBuffer;};/*** Check if the bytes "b" are contained within bytes "a".** @param {Uint8Array|Array} a* Bytes to check in** @param {Uint8Array|Array} b* Bytes to check for** @param {Object} options* options** @param {Array|Uint8Array} [offset=0]* offset to use when looking at bytes in a** @param {Array|Uint8Array} [mask=[]]* mask to use on bytes before comparison.** @return {boolean}* If all bytes in b are inside of a, taking into account* bit masks.*/var bytesMatch = function bytesMatch(a, b, _temp3) {var _ref3 = _temp3 === void 0 ? {} : _temp3,_ref3$offset = _ref3.offset,offset = _ref3$offset === void 0 ? 0 : _ref3$offset,_ref3$mask = _ref3.mask,mask = _ref3$mask === void 0 ? [] : _ref3$mask;a = toUint8(a);b = toUint8(b); // ie 11 does not support uint8 everyvar fn = b.every ? b.every : Array.prototype.every;return b.length && a.length - offset >= b.length &&// ie 11 doesn't support every on uin8fn.call(b, function (bByte, i) {var aByte = mask[i] ? mask[i] & a[offset + i] : a[offset + i];return bByte === aByte;});};/*** Loops through all supported media groups in master and calls the provided* callback for each group** @param {Object} master* The parsed master manifest object* @param {string[]} groups* The media groups to call the callback for* @param {Function} callback* Callback to call for each media group*/var forEachMediaGroup$1 = function forEachMediaGroup(master, groups, callback) {groups.forEach(function (mediaType) {for (var groupKey in master.mediaGroups[mediaType]) {for (var labelKey in master.mediaGroups[mediaType][groupKey]) {var mediaProperties = master.mediaGroups[mediaType][groupKey][labelKey];callback(mediaProperties, mediaType, groupKey, labelKey);}}});};var atob = function atob(s) {return window.atob ? window.atob(s) : Buffer.from(s, 'base64').toString('binary');};function decodeB64ToUint8Array(b64Text) {var decodedString = atob(b64Text);var array = new Uint8Array(decodedString.length);for (var i = 0; i < decodedString.length; i++) {array[i] = decodedString.charCodeAt(i);}return array;}/*** Ponyfill for `Array.prototype.find` which is only available in ES6 runtimes.** Works with anything that has a `length` property and index access properties, including NodeList.** @template {unknown} T* @param {Array<T> | ({length:number, [number]: T})} list* @param {function (item: T, index: number, list:Array<T> | ({length:number, [number]: T})):boolean} predicate* @param {Partial<Pick<ArrayConstructor['prototype'], 'find'>>?} ac `Array.prototype` by default,* allows injecting a custom implementation in tests* @returns {T | undefined}** @see https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/find* @see https://tc39.es/ecma262/multipage/indexed-collections.html#sec-array.prototype.find*/function find$1(list, predicate, ac) {if (ac === undefined) {ac = Array.prototype;}if (list && typeof ac.find === 'function') {return ac.find.call(list, predicate);}for (var i = 0; i < list.length; i++) {if (Object.prototype.hasOwnProperty.call(list, i)) {var item = list[i];if (predicate.call(undefined, item, i, list)) {return item;}}}}/*** "Shallow freezes" an object to render it immutable.* Uses `Object.freeze` if available,* otherwise the immutability is only in the type.** Is used to create "enum like" objects.** @template T* @param {T} object the object to freeze* @param {Pick<ObjectConstructor, 'freeze'> = Object} oc `Object` by default,* allows to inject custom object constructor for tests* @returns {Readonly<T>}** @see https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/freeze*/function freeze(object, oc) {if (oc === undefined) {oc = Object;}return oc && typeof oc.freeze === 'function' ? oc.freeze(object) : object;}/*** Since we can not rely on `Object.assign` we provide a simplified version* that is sufficient for our needs.** @param {Object} target* @param {Object | null | undefined} source** @returns {Object} target* @throws TypeError if target is not an object** @see https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/assign* @see https://tc39.es/ecma262/multipage/fundamental-objects.html#sec-object.assign*/function assign(target, source) {if (target === null || typeof target !== 'object') {throw new TypeError('target is not an object');}for (var key in source) {if (Object.prototype.hasOwnProperty.call(source, key)) {target[key] = source[key];}}return target;}/*** All mime types that are allowed as input to `DOMParser.parseFromString`** @see https://developer.mozilla.org/en-US/docs/Web/API/DOMParser/parseFromString#Argument02 MDN* @see https://html.spec.whatwg.org/multipage/dynamic-markup-insertion.html#domparsersupportedtype WHATWG HTML Spec* @see DOMParser.prototype.parseFromString*/var MIME_TYPE = freeze({/*** `text/html`, the only mime type that triggers treating an XML document as HTML.** @see DOMParser.SupportedType.isHTML* @see https://www.iana.org/assignments/media-types/text/html IANA MimeType registration* @see https://en.wikipedia.org/wiki/HTML Wikipedia* @see https://developer.mozilla.org/en-US/docs/Web/API/DOMParser/parseFromString MDN* @see https://html.spec.whatwg.org/multipage/dynamic-markup-insertion.html#dom-domparser-parsefromstring WHATWG HTML Spec*/HTML: 'text/html',/*** Helper method to check a mime type if it indicates an HTML document** @param {string} [value]* @returns {boolean}** @see https://www.iana.org/assignments/media-types/text/html IANA MimeType registration* @see https://en.wikipedia.org/wiki/HTML Wikipedia* @see https://developer.mozilla.org/en-US/docs/Web/API/DOMParser/parseFromString MDN* @see https://html.spec.whatwg.org/multipage/dynamic-markup-insertion.html#dom-domparser-parsefromstring */isHTML: function (value) {return value === MIME_TYPE.HTML;},/*** `application/xml`, the standard mime type for XML documents.** @see https://www.iana.org/assignments/media-types/application/xml IANA MimeType registration* @see https://tools.ietf.org/html/rfc7303#section-9.1 RFC 7303* @see https://en.wikipedia.org/wiki/XML_and_MIME Wikipedia*/XML_APPLICATION: 'application/xml',/*** `text/html`, an alias for `application/xml`.** @see https://tools.ietf.org/html/rfc7303#section-9.2 RFC 7303* @see https://www.iana.org/assignments/media-types/text/xml IANA MimeType registration* @see https://en.wikipedia.org/wiki/XML_and_MIME Wikipedia*/XML_TEXT: 'text/xml',/*** `application/xhtml+xml`, indicates an XML document that has the default HTML namespace,* but is parsed as an XML document.** @see https://www.iana.org/assignments/media-types/application/xhtml+xml IANA MimeType registration* @see https://dom.spec.whatwg.org/#dom-domimplementation-createdocument WHATWG DOM Spec* @see https://en.wikipedia.org/wiki/XHTML Wikipedia*/XML_XHTML_APPLICATION: 'application/xhtml+xml',/*** `image/svg+xml`,** @see https://www.iana.org/assignments/media-types/image/svg+xml IANA MimeType registration* @see https://www.w3.org/TR/SVG11/ W3C SVG 1.1* @see https://en.wikipedia.org/wiki/Scalable_Vector_Graphics Wikipedia*/XML_SVG_IMAGE: 'image/svg+xml'});/*** Namespaces that are used in this code base.** @see http://www.w3.org/TR/REC-xml-names*/var NAMESPACE$3 = freeze({/*** The XHTML namespace.** @see http://www.w3.org/1999/xhtml*/HTML: 'http://www.w3.org/1999/xhtml',/*** Checks if `uri` equals `NAMESPACE.HTML`.** @param {string} [uri]** @see NAMESPACE.HTML*/isHTML: function (uri) {return uri === NAMESPACE$3.HTML;},/*** The SVG namespace.** @see http://www.w3.org/2000/svg*/SVG: 'http://www.w3.org/2000/svg',/*** The `xml:` namespace.** @see http://www.w3.org/XML/1998/namespace*/XML: 'http://www.w3.org/XML/1998/namespace',/*** The `xmlns:` namespace** @see https://www.w3.org/2000/xmlns/*/XMLNS: 'http://www.w3.org/2000/xmlns/'});var assign_1 = assign;var find_1 = find$1;var freeze_1 = freeze;var MIME_TYPE_1 = MIME_TYPE;var NAMESPACE_1 = NAMESPACE$3;var conventions = {assign: assign_1,find: find_1,freeze: freeze_1,MIME_TYPE: MIME_TYPE_1,NAMESPACE: NAMESPACE_1};var find = conventions.find;var NAMESPACE$2 = conventions.NAMESPACE;/*** A prerequisite for `[].filter`, to drop elements that are empty* @param {string} input* @returns {boolean}*/function notEmptyString(input) {return input !== '';}/*** @see https://infra.spec.whatwg.org/#split-on-ascii-whitespace* @see https://infra.spec.whatwg.org/#ascii-whitespace** @param {string} input* @returns {string[]} (can be empty)*/function splitOnASCIIWhitespace(input) {// U+0009 TAB, U+000A LF, U+000C FF, U+000D CR, U+0020 SPACEreturn input ? input.split(/[\t\n\f\r ]+/).filter(notEmptyString) : [];}/*** Adds element as a key to current if it is not already present.** @param {Record<string, boolean | undefined>} current* @param {string} element* @returns {Record<string, boolean | undefined>}*/function orderedSetReducer(current, element) {if (!current.hasOwnProperty(element)) {current[element] = true;}return current;}/*** @see https://infra.spec.whatwg.org/#ordered-set* @param {string} input* @returns {string[]}*/function toOrderedSet(input) {if (!input) return [];var list = splitOnASCIIWhitespace(input);return Object.keys(list.reduce(orderedSetReducer, {}));}/*** Uses `list.indexOf` to implement something like `Array.prototype.includes`,* which we can not rely on being available.** @param {any[]} list* @returns {function(any): boolean}*/function arrayIncludes(list) {return function (element) {return list && list.indexOf(element) !== -1;};}function copy(src, dest) {for (var p in src) {if (Object.prototype.hasOwnProperty.call(src, p)) {dest[p] = src[p];}}}/**^\w+\.prototype\.([_\w]+)\s*=\s*((?:.*\{\s*?[\r\n][\s\S]*?^})|\S.*?(?=[;\r\n]));?^\w+\.prototype\.([_\w]+)\s*=\s*(\S.*?(?=[;\r\n]));?*/function _extends(Class, Super) {var pt = Class.prototype;if (!(pt instanceof Super)) {function t() {}t.prototype = Super.prototype;t = new t();copy(pt, t);Class.prototype = pt = t;}if (pt.constructor != Class) {if (typeof Class != 'function') {console.error("unknown Class:" + Class);}pt.constructor = Class;}}// Node Typesvar NodeType = {};var ELEMENT_NODE = NodeType.ELEMENT_NODE = 1;var ATTRIBUTE_NODE = NodeType.ATTRIBUTE_NODE = 2;var TEXT_NODE = NodeType.TEXT_NODE = 3;var CDATA_SECTION_NODE = NodeType.CDATA_SECTION_NODE = 4;var ENTITY_REFERENCE_NODE = NodeType.ENTITY_REFERENCE_NODE = 5;var ENTITY_NODE = NodeType.ENTITY_NODE = 6;var PROCESSING_INSTRUCTION_NODE = NodeType.PROCESSING_INSTRUCTION_NODE = 7;var COMMENT_NODE = NodeType.COMMENT_NODE = 8;var DOCUMENT_NODE = NodeType.DOCUMENT_NODE = 9;var DOCUMENT_TYPE_NODE = NodeType.DOCUMENT_TYPE_NODE = 10;var DOCUMENT_FRAGMENT_NODE = NodeType.DOCUMENT_FRAGMENT_NODE = 11;var NOTATION_NODE = NodeType.NOTATION_NODE = 12;// ExceptionCodevar ExceptionCode = {};var ExceptionMessage = {};ExceptionCode.INDEX_SIZE_ERR = (ExceptionMessage[1] = "Index size error", 1);ExceptionCode.DOMSTRING_SIZE_ERR = (ExceptionMessage[2] = "DOMString size error", 2);var HIERARCHY_REQUEST_ERR = ExceptionCode.HIERARCHY_REQUEST_ERR = (ExceptionMessage[3] = "Hierarchy request error", 3);ExceptionCode.WRONG_DOCUMENT_ERR = (ExceptionMessage[4] = "Wrong document", 4);ExceptionCode.INVALID_CHARACTER_ERR = (ExceptionMessage[5] = "Invalid character", 5);ExceptionCode.NO_DATA_ALLOWED_ERR = (ExceptionMessage[6] = "No data allowed", 6);ExceptionCode.NO_MODIFICATION_ALLOWED_ERR = (ExceptionMessage[7] = "No modification allowed", 7);var NOT_FOUND_ERR = ExceptionCode.NOT_FOUND_ERR = (ExceptionMessage[8] = "Not found", 8);ExceptionCode.NOT_SUPPORTED_ERR = (ExceptionMessage[9] = "Not supported", 9);var INUSE_ATTRIBUTE_ERR = ExceptionCode.INUSE_ATTRIBUTE_ERR = (ExceptionMessage[10] = "Attribute in use", 10);//level2ExceptionCode.INVALID_STATE_ERR = (ExceptionMessage[11] = "Invalid state", 11);ExceptionCode.SYNTAX_ERR = (ExceptionMessage[12] = "Syntax error", 12);ExceptionCode.INVALID_MODIFICATION_ERR = (ExceptionMessage[13] = "Invalid modification", 13);ExceptionCode.NAMESPACE_ERR = (ExceptionMessage[14] = "Invalid namespace", 14);ExceptionCode.INVALID_ACCESS_ERR = (ExceptionMessage[15] = "Invalid access", 15);/*** DOM Level 2* Object DOMException* @see http://www.w3.org/TR/2000/REC-DOM-Level-2-Core-20001113/ecma-script-binding.html* @see http://www.w3.org/TR/REC-DOM-Level-1/ecma-script-language-binding.html*/function DOMException(code, message) {if (message instanceof Error) {var error = message;} else {error = this;Error.call(this, ExceptionMessage[code]);this.message = ExceptionMessage[code];if (Error.captureStackTrace) Error.captureStackTrace(this, DOMException);}error.code = code;if (message) this.message = this.message + ": " + message;return error;}DOMException.prototype = Error.prototype;copy(ExceptionCode, DOMException);/*** @see http://www.w3.org/TR/2000/REC-DOM-Level-2-Core-20001113/core.html#ID-536297177* The NodeList interface provides the abstraction of an ordered collection of nodes, without defining or constraining how this collection is implemented. NodeList objects in the DOM are live.* The items in the NodeList are accessible via an integral index, starting from 0.*/function NodeList() {}NodeList.prototype = {/*** The number of nodes in the list. The range of valid child node indices is 0 to length-1 inclusive.* @standard level1*/length: 0,/*** Returns the indexth item in the collection. If index is greater than or equal to the number of nodes in the list, this returns null.* @standard level1* @param index unsigned long* Index into the collection.* @return Node* The node at the indexth position in the NodeList, or null if that is not a valid index.*/item: function (index) {return index >= 0 && index < this.length ? this[index] : null;},toString: function (isHTML, nodeFilter) {for (var buf = [], i = 0; i < this.length; i++) {serializeToString(this[i], buf, isHTML, nodeFilter);}return buf.join('');},/*** @private* @param {function (Node):boolean} predicate* @returns {Node[]}*/filter: function (predicate) {return Array.prototype.filter.call(this, predicate);},/*** @private* @param {Node} item* @returns {number}*/indexOf: function (item) {return Array.prototype.indexOf.call(this, item);}};function LiveNodeList(node, refresh) {this._node = node;this._refresh = refresh;_updateLiveList(this);}function _updateLiveList(list) {var inc = list._node._inc || list._node.ownerDocument._inc;if (list._inc !== inc) {var ls = list._refresh(list._node);__set__(list, 'length', ls.length);if (!list.$$length || ls.length < list.$$length) {for (var i = ls.length; (i in list); i++) {if (Object.prototype.hasOwnProperty.call(list, i)) {delete list[i];}}}copy(ls, list);list._inc = inc;}}LiveNodeList.prototype.item = function (i) {_updateLiveList(this);return this[i] || null;};_extends(LiveNodeList, NodeList);/*** Objects implementing the NamedNodeMap interface are used* to represent collections of nodes that can be accessed by name.* Note that NamedNodeMap does not inherit from NodeList;* NamedNodeMaps are not maintained in any particular order.* Objects contained in an object implementing NamedNodeMap may also be accessed by an ordinal index,* but this is simply to allow convenient enumeration of the contents of a NamedNodeMap,* and does not imply that the DOM specifies an order to these Nodes.* NamedNodeMap objects in the DOM are live.* used for attributes or DocumentType entities*/function NamedNodeMap() {}function _findNodeIndex(list, node) {var i = list.length;while (i--) {if (list[i] === node) {return i;}}}function _addNamedNode(el, list, newAttr, oldAttr) {if (oldAttr) {list[_findNodeIndex(list, oldAttr)] = newAttr;} else {list[list.length++] = newAttr;}if (el) {newAttr.ownerElement = el;var doc = el.ownerDocument;if (doc) {oldAttr && _onRemoveAttribute(doc, el, oldAttr);_onAddAttribute(doc, el, newAttr);}}}function _removeNamedNode(el, list, attr) {//console.log('remove attr:'+attr)var i = _findNodeIndex(list, attr);if (i >= 0) {var lastIndex = list.length - 1;while (i < lastIndex) {list[i] = list[++i];}list.length = lastIndex;if (el) {var doc = el.ownerDocument;if (doc) {_onRemoveAttribute(doc, el, attr);attr.ownerElement = null;}}} else {throw new DOMException(NOT_FOUND_ERR, new Error(el.tagName + '@' + attr));}}NamedNodeMap.prototype = {length: 0,item: NodeList.prototype.item,getNamedItem: function (key) {// if(key.indexOf(':')>0 || key == 'xmlns'){// return null;// }//console.log()var i = this.length;while (i--) {var attr = this[i];//console.log(attr.nodeName,key)if (attr.nodeName == key) {return attr;}}},setNamedItem: function (attr) {var el = attr.ownerElement;if (el && el != this._ownerElement) {throw new DOMException(INUSE_ATTRIBUTE_ERR);}var oldAttr = this.getNamedItem(attr.nodeName);_addNamedNode(this._ownerElement, this, attr, oldAttr);return oldAttr;},/* returns Node */setNamedItemNS: function (attr) {// raises: WRONG_DOCUMENT_ERR,NO_MODIFICATION_ALLOWED_ERR,INUSE_ATTRIBUTE_ERRvar el = attr.ownerElement,oldAttr;if (el && el != this._ownerElement) {throw new DOMException(INUSE_ATTRIBUTE_ERR);}oldAttr = this.getNamedItemNS(attr.namespaceURI, attr.localName);_addNamedNode(this._ownerElement, this, attr, oldAttr);return oldAttr;},/* returns Node */removeNamedItem: function (key) {var attr = this.getNamedItem(key);_removeNamedNode(this._ownerElement, this, attr);return attr;},// raises: NOT_FOUND_ERR,NO_MODIFICATION_ALLOWED_ERR//for level2removeNamedItemNS: function (namespaceURI, localName) {var attr = this.getNamedItemNS(namespaceURI, localName);_removeNamedNode(this._ownerElement, this, attr);return attr;},getNamedItemNS: function (namespaceURI, localName) {var i = this.length;while (i--) {var node = this[i];if (node.localName == localName && node.namespaceURI == namespaceURI) {return node;}}return null;}};/*** The DOMImplementation interface represents an object providing methods* which are not dependent on any particular document.* Such an object is returned by the `Document.implementation` property.** __The individual methods describe the differences compared to the specs.__** @constructor** @see https://developer.mozilla.org/en-US/docs/Web/API/DOMImplementation MDN* @see https://www.w3.org/TR/REC-DOM-Level-1/level-one-core.html#ID-102161490 DOM Level 1 Core (Initial)* @see https://www.w3.org/TR/DOM-Level-2-Core/core.html#ID-102161490 DOM Level 2 Core* @see https://www.w3.org/TR/DOM-Level-3-Core/core.html#ID-102161490 DOM Level 3 Core* @see https://dom.spec.whatwg.org/#domimplementation DOM Living Standard*/function DOMImplementation$1() {}DOMImplementation$1.prototype = {/*** The DOMImplementation.hasFeature() method returns a Boolean flag indicating if a given feature is supported.* The different implementations fairly diverged in what kind of features were reported.* The latest version of the spec settled to force this method to always return true, where the functionality was accurate and in use.** @deprecated It is deprecated and modern browsers return true in all cases.** @param {string} feature* @param {string} [version]* @returns {boolean} always true** @see https://developer.mozilla.org/en-US/docs/Web/API/DOMImplementation/hasFeature MDN* @see https://www.w3.org/TR/REC-DOM-Level-1/level-one-core.html#ID-5CED94D7 DOM Level 1 Core* @see https://dom.spec.whatwg.org/#dom-domimplementation-hasfeature DOM Living Standard*/hasFeature: function (feature, version) {return true;},/*** Creates an XML Document object of the specified type with its document element.** __It behaves slightly different from the description in the living standard__:* - There is no interface/class `XMLDocument`, it returns a `Document` instance.* - `contentType`, `encoding`, `mode`, `origin`, `url` fields are currently not declared.* - this implementation is not validating names or qualified names* (when parsing XML strings, the SAX parser takes care of that)** @param {string|null} namespaceURI* @param {string} qualifiedName* @param {DocumentType=null} doctype* @returns {Document}** @see https://developer.mozilla.org/en-US/docs/Web/API/DOMImplementation/createDocument MDN* @see https://www.w3.org/TR/DOM-Level-2-Core/core.html#Level-2-Core-DOM-createDocument DOM Level 2 Core (initial)* @see https://dom.spec.whatwg.org/#dom-domimplementation-createdocument DOM Level 2 Core** @see https://dom.spec.whatwg.org/#validate-and-extract DOM: Validate and extract* @see https://www.w3.org/TR/xml/#NT-NameStartChar XML Spec: Names* @see https://www.w3.org/TR/xml-names/#ns-qualnames XML Namespaces: Qualified names*/createDocument: function (namespaceURI, qualifiedName, doctype) {var doc = new Document();doc.implementation = this;doc.childNodes = new NodeList();doc.doctype = doctype || null;if (doctype) {doc.appendChild(doctype);}if (qualifiedName) {var root = doc.createElementNS(namespaceURI, qualifiedName);doc.appendChild(root);}return doc;},/*** Returns a doctype, with the given `qualifiedName`, `publicId`, and `systemId`.** __This behavior is slightly different from the in the specs__:* - this implementation is not validating names or qualified names* (when parsing XML strings, the SAX parser takes care of that)** @param {string} qualifiedName* @param {string} [publicId]* @param {string} [systemId]* @returns {DocumentType} which can either be used with `DOMImplementation.createDocument` upon document creation* or can be put into the document via methods like `Node.insertBefore()` or `Node.replaceChild()`** @see https://developer.mozilla.org/en-US/docs/Web/API/DOMImplementation/createDocumentType MDN* @see https://www.w3.org/TR/DOM-Level-2-Core/core.html#Level-2-Core-DOM-createDocType DOM Level 2 Core* @see https://dom.spec.whatwg.org/#dom-domimplementation-createdocumenttype DOM Living Standard** @see https://dom.spec.whatwg.org/#validate-and-extract DOM: Validate and extract* @see https://www.w3.org/TR/xml/#NT-NameStartChar XML Spec: Names* @see https://www.w3.org/TR/xml-names/#ns-qualnames XML Namespaces: Qualified names*/createDocumentType: function (qualifiedName, publicId, systemId) {var node = new DocumentType();node.name = qualifiedName;node.nodeName = qualifiedName;node.publicId = publicId || '';node.systemId = systemId || '';return node;}};/*** @see http://www.w3.org/TR/2000/REC-DOM-Level-2-Core-20001113/core.html#ID-1950641247*/function Node() {}Node.prototype = {firstChild: null,lastChild: null,previousSibling: null,nextSibling: null,attributes: null,parentNode: null,childNodes: null,ownerDocument: null,nodeValue: null,namespaceURI: null,prefix: null,localName: null,// Modified in DOM Level 2:insertBefore: function (newChild, refChild) {//raisesreturn _insertBefore(this, newChild, refChild);},replaceChild: function (newChild, oldChild) {//raises_insertBefore(this, newChild, oldChild, assertPreReplacementValidityInDocument);if (oldChild) {this.removeChild(oldChild);}},removeChild: function (oldChild) {return _removeChild(this, oldChild);},appendChild: function (newChild) {return this.insertBefore(newChild, null);},hasChildNodes: function () {return this.firstChild != null;},cloneNode: function (deep) {return cloneNode(this.ownerDocument || this, this, deep);},// Modified in DOM Level 2:normalize: function () {var child = this.firstChild;while (child) {var next = child.nextSibling;if (next && next.nodeType == TEXT_NODE && child.nodeType == TEXT_NODE) {this.removeChild(next);child.appendData(next.data);} else {child.normalize();child = next;}}},// Introduced in DOM Level 2:isSupported: function (feature, version) {return this.ownerDocument.implementation.hasFeature(feature, version);},// Introduced in DOM Level 2:hasAttributes: function () {return this.attributes.length > 0;},/*** Look up the prefix associated to the given namespace URI, starting from this node.* **The default namespace declarations are ignored by this method.*** See Namespace Prefix Lookup for details on the algorithm used by this method.** _Note: The implementation seems to be incomplete when compared to the algorithm described in the specs._** @param {string | null} namespaceURI* @returns {string | null}* @see https://www.w3.org/TR/DOM-Level-3-Core/core.html#Node3-lookupNamespacePrefix* @see https://www.w3.org/TR/DOM-Level-3-Core/namespaces-algorithms.html#lookupNamespacePrefixAlgo* @see https://dom.spec.whatwg.org/#dom-node-lookupprefix* @see https://github.com/xmldom/xmldom/issues/322*/lookupPrefix: function (namespaceURI) {var el = this;while (el) {var map = el._nsMap;//console.dir(map)if (map) {for (var n in map) {if (Object.prototype.hasOwnProperty.call(map, n) && map[n] === namespaceURI) {return n;}}}el = el.nodeType == ATTRIBUTE_NODE ? el.ownerDocument : el.parentNode;}return null;},// Introduced in DOM Level 3:lookupNamespaceURI: function (prefix) {var el = this;while (el) {var map = el._nsMap;//console.dir(map)if (map) {if (Object.prototype.hasOwnProperty.call(map, prefix)) {return map[prefix];}}el = el.nodeType == ATTRIBUTE_NODE ? el.ownerDocument : el.parentNode;}return null;},// Introduced in DOM Level 3:isDefaultNamespace: function (namespaceURI) {var prefix = this.lookupPrefix(namespaceURI);return prefix == null;}};function _xmlEncoder(c) {return c == '<' && '<' || c == '>' && '>' || c == '&' && '&' || c == '"' && '"' || '&#' + c.charCodeAt() + ';';}copy(NodeType, Node);copy(NodeType, Node.prototype);/*** @param callback return true for continue,false for break* @return boolean true: break visit;*/function _visitNode(node, callback) {if (callback(node)) {return true;}if (node = node.firstChild) {do {if (_visitNode(node, callback)) {return true;}} while (node = node.nextSibling);}}function Document() {this.ownerDocument = this;}function _onAddAttribute(doc, el, newAttr) {doc && doc._inc++;var ns = newAttr.namespaceURI;if (ns === NAMESPACE$2.XMLNS) {//update namespaceel._nsMap[newAttr.prefix ? newAttr.localName : ''] = newAttr.value;}}function _onRemoveAttribute(doc, el, newAttr, remove) {doc && doc._inc++;var ns = newAttr.namespaceURI;if (ns === NAMESPACE$2.XMLNS) {//update namespacedelete el._nsMap[newAttr.prefix ? newAttr.localName : ''];}}/*** Updates `el.childNodes`, updating the indexed items and it's `length`.* Passing `newChild` means it will be appended.* Otherwise it's assumed that an item has been removed,* and `el.firstNode` and it's `.nextSibling` are used* to walk the current list of child nodes.** @param {Document} doc* @param {Node} el* @param {Node} [newChild]* @private*/function _onUpdateChild(doc, el, newChild) {if (doc && doc._inc) {doc._inc++;//update childNodesvar cs = el.childNodes;if (newChild) {cs[cs.length++] = newChild;} else {var child = el.firstChild;var i = 0;while (child) {cs[i++] = child;child = child.nextSibling;}cs.length = i;delete cs[cs.length];}}}/*** Removes the connections between `parentNode` and `child`* and any existing `child.previousSibling` or `child.nextSibling`.** @see https://github.com/xmldom/xmldom/issues/135* @see https://github.com/xmldom/xmldom/issues/145** @param {Node} parentNode* @param {Node} child* @returns {Node} the child that was removed.* @private*/function _removeChild(parentNode, child) {var previous = child.previousSibling;var next = child.nextSibling;if (previous) {previous.nextSibling = next;} else {parentNode.firstChild = next;}if (next) {next.previousSibling = previous;} else {parentNode.lastChild = previous;}child.parentNode = null;child.previousSibling = null;child.nextSibling = null;_onUpdateChild(parentNode.ownerDocument, parentNode);return child;}/*** Returns `true` if `node` can be a parent for insertion.* @param {Node} node* @returns {boolean}*/function hasValidParentNodeType(node) {return node && (node.nodeType === Node.DOCUMENT_NODE || node.nodeType === Node.DOCUMENT_FRAGMENT_NODE || node.nodeType === Node.ELEMENT_NODE);}/*** Returns `true` if `node` can be inserted according to it's `nodeType`.* @param {Node} node* @returns {boolean}*/function hasInsertableNodeType(node) {return node && (isElementNode(node) || isTextNode(node) || isDocTypeNode(node) || node.nodeType === Node.DOCUMENT_FRAGMENT_NODE || node.nodeType === Node.COMMENT_NODE || node.nodeType === Node.PROCESSING_INSTRUCTION_NODE);}/*** Returns true if `node` is a DOCTYPE node* @param {Node} node* @returns {boolean}*/function isDocTypeNode(node) {return node && node.nodeType === Node.DOCUMENT_TYPE_NODE;}/*** Returns true if the node is an element* @param {Node} node* @returns {boolean}*/function isElementNode(node) {return node && node.nodeType === Node.ELEMENT_NODE;}/*** Returns true if `node` is a text node* @param {Node} node* @returns {boolean}*/function isTextNode(node) {return node && node.nodeType === Node.TEXT_NODE;}/*** Check if en element node can be inserted before `child`, or at the end if child is falsy,* according to the presence and position of a doctype node on the same level.** @param {Document} doc The document node* @param {Node} child the node that would become the nextSibling if the element would be inserted* @returns {boolean} `true` if an element can be inserted before child* @private* https://dom.spec.whatwg.org/#concept-node-ensure-pre-insertion-validity*/function isElementInsertionPossible(doc, child) {var parentChildNodes = doc.childNodes || [];if (find(parentChildNodes, isElementNode) || isDocTypeNode(child)) {return false;}var docTypeNode = find(parentChildNodes, isDocTypeNode);return !(child && docTypeNode && parentChildNodes.indexOf(docTypeNode) > parentChildNodes.indexOf(child));}/*** Check if en element node can be inserted before `child`, or at the end if child is falsy,* according to the presence and position of a doctype node on the same level.** @param {Node} doc The document node* @param {Node} child the node that would become the nextSibling if the element would be inserted* @returns {boolean} `true` if an element can be inserted before child* @private* https://dom.spec.whatwg.org/#concept-node-ensure-pre-insertion-validity*/function isElementReplacementPossible(doc, child) {var parentChildNodes = doc.childNodes || [];function hasElementChildThatIsNotChild(node) {return isElementNode(node) && node !== child;}if (find(parentChildNodes, hasElementChildThatIsNotChild)) {return false;}var docTypeNode = find(parentChildNodes, isDocTypeNode);return !(child && docTypeNode && parentChildNodes.indexOf(docTypeNode) > parentChildNodes.indexOf(child));}/*** @private* Steps 1-5 of the checks before inserting and before replacing a child are the same.** @param {Node} parent the parent node to insert `node` into* @param {Node} node the node to insert* @param {Node=} child the node that should become the `nextSibling` of `node`* @returns {Node}* @throws DOMException for several node combinations that would create a DOM that is not well-formed.* @throws DOMException if `child` is provided but is not a child of `parent`.* @see https://dom.spec.whatwg.org/#concept-node-ensure-pre-insertion-validity* @see https://dom.spec.whatwg.org/#concept-node-replace*/function assertPreInsertionValidity1to5(parent, node, child) {// 1. If `parent` is not a Document, DocumentFragment, or Element node, then throw a "HierarchyRequestError" DOMException.if (!hasValidParentNodeType(parent)) {throw new DOMException(HIERARCHY_REQUEST_ERR, 'Unexpected parent node type ' + parent.nodeType);}// 2. If `node` is a host-including inclusive ancestor of `parent`, then throw a "HierarchyRequestError" DOMException.// not implemented!// 3. If `child` is non-null and its parent is not `parent`, then throw a "NotFoundError" DOMException.if (child && child.parentNode !== parent) {throw new DOMException(NOT_FOUND_ERR, 'child not in parent');}if (// 4. If `node` is not a DocumentFragment, DocumentType, Element, or CharacterData node, then throw a "HierarchyRequestError" DOMException.!hasInsertableNodeType(node) ||// 5. If either `node` is a Text node and `parent` is a document,// the sax parser currently adds top level text nodes, this will be fixed in 0.9.0// || (node.nodeType === Node.TEXT_NODE && parent.nodeType === Node.DOCUMENT_NODE)// or `node` is a doctype and `parent` is not a document, then throw a "HierarchyRequestError" DOMException.isDocTypeNode(node) && parent.nodeType !== Node.DOCUMENT_NODE) {throw new DOMException(HIERARCHY_REQUEST_ERR, 'Unexpected node type ' + node.nodeType + ' for parent node type ' + parent.nodeType);}}/*** @private* Step 6 of the checks before inserting and before replacing a child are different.** @param {Document} parent the parent node to insert `node` into* @param {Node} node the node to insert* @param {Node | undefined} child the node that should become the `nextSibling` of `node`* @returns {Node}* @throws DOMException for several node combinations that would create a DOM that is not well-formed.* @throws DOMException if `child` is provided but is not a child of `parent`.* @see https://dom.spec.whatwg.org/#concept-node-ensure-pre-insertion-validity* @see https://dom.spec.whatwg.org/#concept-node-replace*/function assertPreInsertionValidityInDocument(parent, node, child) {var parentChildNodes = parent.childNodes || [];var nodeChildNodes = node.childNodes || [];// DocumentFragmentif (node.nodeType === Node.DOCUMENT_FRAGMENT_NODE) {var nodeChildElements = nodeChildNodes.filter(isElementNode);// If node has more than one element child or has a Text node child.if (nodeChildElements.length > 1 || find(nodeChildNodes, isTextNode)) {throw new DOMException(HIERARCHY_REQUEST_ERR, 'More than one element or text in fragment');}// Otherwise, if `node` has one element child and either `parent` has an element child,// `child` is a doctype, or `child` is non-null and a doctype is following `child`.if (nodeChildElements.length === 1 && !isElementInsertionPossible(parent, child)) {throw new DOMException(HIERARCHY_REQUEST_ERR, 'Element in fragment can not be inserted before doctype');}}// Elementif (isElementNode(node)) {// `parent` has an element child, `child` is a doctype,// or `child` is non-null and a doctype is following `child`.if (!isElementInsertionPossible(parent, child)) {throw new DOMException(HIERARCHY_REQUEST_ERR, 'Only one element can be added and only after doctype');}}// DocumentTypeif (isDocTypeNode(node)) {// `parent` has a doctype child,if (find(parentChildNodes, isDocTypeNode)) {throw new DOMException(HIERARCHY_REQUEST_ERR, 'Only one doctype is allowed');}var parentElementChild = find(parentChildNodes, isElementNode);// `child` is non-null and an element is preceding `child`,if (child && parentChildNodes.indexOf(parentElementChild) < parentChildNodes.indexOf(child)) {throw new DOMException(HIERARCHY_REQUEST_ERR, 'Doctype can only be inserted before an element');}// or `child` is null and `parent` has an element child.if (!child && parentElementChild) {throw new DOMException(HIERARCHY_REQUEST_ERR, 'Doctype can not be appended since element is present');}}}/*** @private* Step 6 of the checks before inserting and before replacing a child are different.** @param {Document} parent the parent node to insert `node` into* @param {Node} node the node to insert* @param {Node | undefined} child the node that should become the `nextSibling` of `node`* @returns {Node}* @throws DOMException for several node combinations that would create a DOM that is not well-formed.* @throws DOMException if `child` is provided but is not a child of `parent`.* @see https://dom.spec.whatwg.org/#concept-node-ensure-pre-insertion-validity* @see https://dom.spec.whatwg.org/#concept-node-replace*/function assertPreReplacementValidityInDocument(parent, node, child) {var parentChildNodes = parent.childNodes || [];var nodeChildNodes = node.childNodes || [];// DocumentFragmentif (node.nodeType === Node.DOCUMENT_FRAGMENT_NODE) {var nodeChildElements = nodeChildNodes.filter(isElementNode);// If `node` has more than one element child or has a Text node child.if (nodeChildElements.length > 1 || find(nodeChildNodes, isTextNode)) {throw new DOMException(HIERARCHY_REQUEST_ERR, 'More than one element or text in fragment');}// Otherwise, if `node` has one element child and either `parent` has an element child that is not `child` or a doctype is following `child`.if (nodeChildElements.length === 1 && !isElementReplacementPossible(parent, child)) {throw new DOMException(HIERARCHY_REQUEST_ERR, 'Element in fragment can not be inserted before doctype');}}// Elementif (isElementNode(node)) {// `parent` has an element child that is not `child` or a doctype is following `child`.if (!isElementReplacementPossible(parent, child)) {throw new DOMException(HIERARCHY_REQUEST_ERR, 'Only one element can be added and only after doctype');}}// DocumentTypeif (isDocTypeNode(node)) {function hasDoctypeChildThatIsNotChild(node) {return isDocTypeNode(node) && node !== child;}// `parent` has a doctype child that is not `child`,if (find(parentChildNodes, hasDoctypeChildThatIsNotChild)) {throw new DOMException(HIERARCHY_REQUEST_ERR, 'Only one doctype is allowed');}var parentElementChild = find(parentChildNodes, isElementNode);// or an element is preceding `child`.if (child && parentChildNodes.indexOf(parentElementChild) < parentChildNodes.indexOf(child)) {throw new DOMException(HIERARCHY_REQUEST_ERR, 'Doctype can only be inserted before an element');}}}/*** @private* @param {Node} parent the parent node to insert `node` into* @param {Node} node the node to insert* @param {Node=} child the node that should become the `nextSibling` of `node`* @returns {Node}* @throws DOMException for several node combinations that would create a DOM that is not well-formed.* @throws DOMException if `child` is provided but is not a child of `parent`.* @see https://dom.spec.whatwg.org/#concept-node-ensure-pre-insertion-validity*/function _insertBefore(parent, node, child, _inDocumentAssertion) {// To ensure pre-insertion validity of a node into a parent before a child, run these steps:assertPreInsertionValidity1to5(parent, node, child);// If parent is a document, and any of the statements below, switched on the interface node implements,// are true, then throw a "HierarchyRequestError" DOMException.if (parent.nodeType === Node.DOCUMENT_NODE) {(_inDocumentAssertion || assertPreInsertionValidityInDocument)(parent, node, child);}var cp = node.parentNode;if (cp) {cp.removeChild(node); //remove and update}if (node.nodeType === DOCUMENT_FRAGMENT_NODE) {var newFirst = node.firstChild;if (newFirst == null) {return node;}var newLast = node.lastChild;} else {newFirst = newLast = node;}var pre = child ? child.previousSibling : parent.lastChild;newFirst.previousSibling = pre;newLast.nextSibling = child;if (pre) {pre.nextSibling = newFirst;} else {parent.firstChild = newFirst;}if (child == null) {parent.lastChild = newLast;} else {child.previousSibling = newLast;}do {newFirst.parentNode = parent;} while (newFirst !== newLast && (newFirst = newFirst.nextSibling));_onUpdateChild(parent.ownerDocument || parent, parent);//console.log(parent.lastChild.nextSibling == null)if (node.nodeType == DOCUMENT_FRAGMENT_NODE) {node.firstChild = node.lastChild = null;}return node;}/*** Appends `newChild` to `parentNode`.* If `newChild` is already connected to a `parentNode` it is first removed from it.** @see https://github.com/xmldom/xmldom/issues/135* @see https://github.com/xmldom/xmldom/issues/145* @param {Node} parentNode* @param {Node} newChild* @returns {Node}* @private*/function _appendSingleChild(parentNode, newChild) {if (newChild.parentNode) {newChild.parentNode.removeChild(newChild);}newChild.parentNode = parentNode;newChild.previousSibling = parentNode.lastChild;newChild.nextSibling = null;if (newChild.previousSibling) {newChild.previousSibling.nextSibling = newChild;} else {parentNode.firstChild = newChild;}parentNode.lastChild = newChild;_onUpdateChild(parentNode.ownerDocument, parentNode, newChild);return newChild;}Document.prototype = {//implementation : null,nodeName: '#document',nodeType: DOCUMENT_NODE,/*** The DocumentType node of the document.** @readonly* @type DocumentType*/doctype: null,documentElement: null,_inc: 1,insertBefore: function (newChild, refChild) {//raisesif (newChild.nodeType == DOCUMENT_FRAGMENT_NODE) {var child = newChild.firstChild;while (child) {var next = child.nextSibling;this.insertBefore(child, refChild);child = next;}return newChild;}_insertBefore(this, newChild, refChild);newChild.ownerDocument = this;if (this.documentElement === null && newChild.nodeType === ELEMENT_NODE) {this.documentElement = newChild;}return newChild;},removeChild: function (oldChild) {if (this.documentElement == oldChild) {this.documentElement = null;}return _removeChild(this, oldChild);},replaceChild: function (newChild, oldChild) {//raises_insertBefore(this, newChild, oldChild, assertPreReplacementValidityInDocument);newChild.ownerDocument = this;if (oldChild) {this.removeChild(oldChild);}if (isElementNode(newChild)) {this.documentElement = newChild;}},// Introduced in DOM Level 2:importNode: function (importedNode, deep) {return importNode(this, importedNode, deep);},// Introduced in DOM Level 2:getElementById: function (id) {var rtv = null;_visitNode(this.documentElement, function (node) {if (node.nodeType == ELEMENT_NODE) {if (node.getAttribute('id') == id) {rtv = node;return true;}}});return rtv;},/*** The `getElementsByClassName` method of `Document` interface returns an array-like object* of all child elements which have **all** of the given class name(s).** Returns an empty list if `classeNames` is an empty string or only contains HTML white space characters.*** Warning: This is a live LiveNodeList.* Changes in the DOM will reflect in the array as the changes occur.* If an element selected by this array no longer qualifies for the selector,* it will automatically be removed. Be aware of this for iteration purposes.** @param {string} classNames is a string representing the class name(s) to match; multiple class names are separated by (ASCII-)whitespace** @see https://developer.mozilla.org/en-US/docs/Web/API/Document/getElementsByClassName* @see https://dom.spec.whatwg.org/#concept-getelementsbyclassname*/getElementsByClassName: function (classNames) {var classNamesSet = toOrderedSet(classNames);return new LiveNodeList(this, function (base) {var ls = [];if (classNamesSet.length > 0) {_visitNode(base.documentElement, function (node) {if (node !== base && node.nodeType === ELEMENT_NODE) {var nodeClassNames = node.getAttribute('class');// can be null if the attribute does not existif (nodeClassNames) {// before splitting and iterating just compare them for the most common casevar matches = classNames === nodeClassNames;if (!matches) {var nodeClassNamesSet = toOrderedSet(nodeClassNames);matches = classNamesSet.every(arrayIncludes(nodeClassNamesSet));}if (matches) {ls.push(node);}}}});}return ls;});},//document factory method:createElement: function (tagName) {var node = new Element();node.ownerDocument = this;node.nodeName = tagName;node.tagName = tagName;node.localName = tagName;node.childNodes = new NodeList();var attrs = node.attributes = new NamedNodeMap();attrs._ownerElement = node;return node;},createDocumentFragment: function () {var node = new DocumentFragment();node.ownerDocument = this;node.childNodes = new NodeList();return node;},createTextNode: function (data) {var node = new Text();node.ownerDocument = this;node.appendData(data);return node;},createComment: function (data) {var node = new Comment();node.ownerDocument = this;node.appendData(data);return node;},createCDATASection: function (data) {var node = new CDATASection();node.ownerDocument = this;node.appendData(data);return node;},createProcessingInstruction: function (target, data) {var node = new ProcessingInstruction();node.ownerDocument = this;node.tagName = node.nodeName = node.target = target;node.nodeValue = node.data = data;return node;},createAttribute: function (name) {var node = new Attr();node.ownerDocument = this;node.name = name;node.nodeName = name;node.localName = name;node.specified = true;return node;},createEntityReference: function (name) {var node = new EntityReference();node.ownerDocument = this;node.nodeName = name;return node;},// Introduced in DOM Level 2:createElementNS: function (namespaceURI, qualifiedName) {var node = new Element();var pl = qualifiedName.split(':');var attrs = node.attributes = new NamedNodeMap();node.childNodes = new NodeList();node.ownerDocument = this;node.nodeName = qualifiedName;node.tagName = qualifiedName;node.namespaceURI = namespaceURI;if (pl.length == 2) {node.prefix = pl[0];node.localName = pl[1];} else {//el.prefix = null;node.localName = qualifiedName;}attrs._ownerElement = node;return node;},// Introduced in DOM Level 2:createAttributeNS: function (namespaceURI, qualifiedName) {var node = new Attr();var pl = qualifiedName.split(':');node.ownerDocument = this;node.nodeName = qualifiedName;node.name = qualifiedName;node.namespaceURI = namespaceURI;node.specified = true;if (pl.length == 2) {node.prefix = pl[0];node.localName = pl[1];} else {//el.prefix = null;node.localName = qualifiedName;}return node;}};_extends(Document, Node);function Element() {this._nsMap = {};}Element.prototype = {nodeType: ELEMENT_NODE,hasAttribute: function (name) {return this.getAttributeNode(name) != null;},getAttribute: function (name) {var attr = this.getAttributeNode(name);return attr && attr.value || '';},getAttributeNode: function (name) {return this.attributes.getNamedItem(name);},setAttribute: function (name, value) {var attr = this.ownerDocument.createAttribute(name);attr.value = attr.nodeValue = "" + value;this.setAttributeNode(attr);},removeAttribute: function (name) {var attr = this.getAttributeNode(name);attr && this.removeAttributeNode(attr);},//four real opeartion methodappendChild: function (newChild) {if (newChild.nodeType === DOCUMENT_FRAGMENT_NODE) {return this.insertBefore(newChild, null);} else {return _appendSingleChild(this, newChild);}},setAttributeNode: function (newAttr) {return this.attributes.setNamedItem(newAttr);},setAttributeNodeNS: function (newAttr) {return this.attributes.setNamedItemNS(newAttr);},removeAttributeNode: function (oldAttr) {//console.log(this == oldAttr.ownerElement)return this.attributes.removeNamedItem(oldAttr.nodeName);},//get real attribute name,and remove it by removeAttributeNoderemoveAttributeNS: function (namespaceURI, localName) {var old = this.getAttributeNodeNS(namespaceURI, localName);old && this.removeAttributeNode(old);},hasAttributeNS: function (namespaceURI, localName) {return this.getAttributeNodeNS(namespaceURI, localName) != null;},getAttributeNS: function (namespaceURI, localName) {var attr = this.getAttributeNodeNS(namespaceURI, localName);return attr && attr.value || '';},setAttributeNS: function (namespaceURI, qualifiedName, value) {var attr = this.ownerDocument.createAttributeNS(namespaceURI, qualifiedName);attr.value = attr.nodeValue = "" + value;this.setAttributeNode(attr);},getAttributeNodeNS: function (namespaceURI, localName) {return this.attributes.getNamedItemNS(namespaceURI, localName);},getElementsByTagName: function (tagName) {return new LiveNodeList(this, function (base) {var ls = [];_visitNode(base, function (node) {if (node !== base && node.nodeType == ELEMENT_NODE && (tagName === '*' || node.tagName == tagName)) {ls.push(node);}});return ls;});},getElementsByTagNameNS: function (namespaceURI, localName) {return new LiveNodeList(this, function (base) {var ls = [];_visitNode(base, function (node) {if (node !== base && node.nodeType === ELEMENT_NODE && (namespaceURI === '*' || node.namespaceURI === namespaceURI) && (localName === '*' || node.localName == localName)) {ls.push(node);}});return ls;});}};Document.prototype.getElementsByTagName = Element.prototype.getElementsByTagName;Document.prototype.getElementsByTagNameNS = Element.prototype.getElementsByTagNameNS;_extends(Element, Node);function Attr() {}Attr.prototype.nodeType = ATTRIBUTE_NODE;_extends(Attr, Node);function CharacterData() {}CharacterData.prototype = {data: '',substringData: function (offset, count) {return this.data.substring(offset, offset + count);},appendData: function (text) {text = this.data + text;this.nodeValue = this.data = text;this.length = text.length;},insertData: function (offset, text) {this.replaceData(offset, 0, text);},appendChild: function (newChild) {throw new Error(ExceptionMessage[HIERARCHY_REQUEST_ERR]);},deleteData: function (offset, count) {this.replaceData(offset, count, "");},replaceData: function (offset, count, text) {var start = this.data.substring(0, offset);var end = this.data.substring(offset + count);text = start + text + end;this.nodeValue = this.data = text;this.length = text.length;}};_extends(CharacterData, Node);function Text() {}Text.prototype = {nodeName: "#text",nodeType: TEXT_NODE,splitText: function (offset) {var text = this.data;var newText = text.substring(offset);text = text.substring(0, offset);this.data = this.nodeValue = text;this.length = text.length;var newNode = this.ownerDocument.createTextNode(newText);if (this.parentNode) {this.parentNode.insertBefore(newNode, this.nextSibling);}return newNode;}};_extends(Text, CharacterData);function Comment() {}Comment.prototype = {nodeName: "#comment",nodeType: COMMENT_NODE};_extends(Comment, CharacterData);function CDATASection() {}CDATASection.prototype = {nodeName: "#cdata-section",nodeType: CDATA_SECTION_NODE};_extends(CDATASection, CharacterData);function DocumentType() {}DocumentType.prototype.nodeType = DOCUMENT_TYPE_NODE;_extends(DocumentType, Node);function Notation() {}Notation.prototype.nodeType = NOTATION_NODE;_extends(Notation, Node);function Entity() {}Entity.prototype.nodeType = ENTITY_NODE;_extends(Entity, Node);function EntityReference() {}EntityReference.prototype.nodeType = ENTITY_REFERENCE_NODE;_extends(EntityReference, Node);function DocumentFragment() {}DocumentFragment.prototype.nodeName = "#document-fragment";DocumentFragment.prototype.nodeType = DOCUMENT_FRAGMENT_NODE;_extends(DocumentFragment, Node);function ProcessingInstruction() {}ProcessingInstruction.prototype.nodeType = PROCESSING_INSTRUCTION_NODE;_extends(ProcessingInstruction, Node);function XMLSerializer() {}XMLSerializer.prototype.serializeToString = function (node, isHtml, nodeFilter) {return nodeSerializeToString.call(node, isHtml, nodeFilter);};Node.prototype.toString = nodeSerializeToString;function nodeSerializeToString(isHtml, nodeFilter) {var buf = [];var refNode = this.nodeType == 9 && this.documentElement || this;var prefix = refNode.prefix;var uri = refNode.namespaceURI;if (uri && prefix == null) {//console.log(prefix)var prefix = refNode.lookupPrefix(uri);if (prefix == null) {//isHTML = true;var visibleNamespaces = [{namespace: uri,prefix: null}//{namespace:uri,prefix:''}];}}serializeToString(this, buf, isHtml, nodeFilter, visibleNamespaces);//console.log('###',this.nodeType,uri,prefix,buf.join(''))return buf.join('');}function needNamespaceDefine(node, isHTML, visibleNamespaces) {var prefix = node.prefix || '';var uri = node.namespaceURI;// According to [Namespaces in XML 1.0](https://www.w3.org/TR/REC-xml-names/#ns-using) ,// and more specifically https://www.w3.org/TR/REC-xml-names/#nsc-NoPrefixUndecl :// > In a namespace declaration for a prefix [...], the attribute value MUST NOT be empty.// in a similar manner [Namespaces in XML 1.1](https://www.w3.org/TR/xml-names11/#ns-using)// and more specifically https://www.w3.org/TR/xml-names11/#nsc-NSDeclared :// > [...] Furthermore, the attribute value [...] must not be an empty string.// so serializing empty namespace value like xmlns:ds="" would produce an invalid XML document.if (!uri) {return false;}if (prefix === "xml" && uri === NAMESPACE$2.XML || uri === NAMESPACE$2.XMLNS) {return false;}var i = visibleNamespaces.length;while (i--) {var ns = visibleNamespaces[i];// get namespace prefixif (ns.prefix === prefix) {return ns.namespace !== uri;}}return true;}/*** Well-formed constraint: No < in Attribute Values* > The replacement text of any entity referred to directly or indirectly* > in an attribute value must not contain a <.* @see https://www.w3.org/TR/xml11/#CleanAttrVals* @see https://www.w3.org/TR/xml11/#NT-AttValue** Literal whitespace other than space that appear in attribute values* are serialized as their entity references, so they will be preserved.* (In contrast to whitespace literals in the input which are normalized to spaces)* @see https://www.w3.org/TR/xml11/#AVNormalize* @see https://w3c.github.io/DOM-Parsing/#serializing-an-element-s-attributes*/function addSerializedAttribute(buf, qualifiedName, value) {buf.push(' ', qualifiedName, '="', value.replace(/[<>&"\t\n\r]/g, _xmlEncoder), '"');}function serializeToString(node, buf, isHTML, nodeFilter, visibleNamespaces) {if (!visibleNamespaces) {visibleNamespaces = [];}if (nodeFilter) {node = nodeFilter(node);if (node) {if (typeof node == 'string') {buf.push(node);return;}} else {return;}//buf.sort.apply(attrs, attributeSorter);}switch (node.nodeType) {case ELEMENT_NODE:var attrs = node.attributes;var len = attrs.length;var child = node.firstChild;var nodeName = node.tagName;isHTML = NAMESPACE$2.isHTML(node.namespaceURI) || isHTML;var prefixedNodeName = nodeName;if (!isHTML && !node.prefix && node.namespaceURI) {var defaultNS;// lookup current default ns from `xmlns` attributefor (var ai = 0; ai < attrs.length; ai++) {if (attrs.item(ai).name === 'xmlns') {defaultNS = attrs.item(ai).value;break;}}if (!defaultNS) {// lookup current default ns in visibleNamespacesfor (var nsi = visibleNamespaces.length - 1; nsi >= 0; nsi--) {var namespace = visibleNamespaces[nsi];if (namespace.prefix === '' && namespace.namespace === node.namespaceURI) {defaultNS = namespace.namespace;break;}}}if (defaultNS !== node.namespaceURI) {for (var nsi = visibleNamespaces.length - 1; nsi >= 0; nsi--) {var namespace = visibleNamespaces[nsi];if (namespace.namespace === node.namespaceURI) {if (namespace.prefix) {prefixedNodeName = namespace.prefix + ':' + nodeName;}break;}}}}buf.push('<', prefixedNodeName);for (var i = 0; i < len; i++) {// add namespaces for attributesvar attr = attrs.item(i);if (attr.prefix == 'xmlns') {visibleNamespaces.push({prefix: attr.localName,namespace: attr.value});} else if (attr.nodeName == 'xmlns') {visibleNamespaces.push({prefix: '',namespace: attr.value});}}for (var i = 0; i < len; i++) {var attr = attrs.item(i);if (needNamespaceDefine(attr, isHTML, visibleNamespaces)) {var prefix = attr.prefix || '';var uri = attr.namespaceURI;addSerializedAttribute(buf, prefix ? 'xmlns:' + prefix : "xmlns", uri);visibleNamespaces.push({prefix: prefix,namespace: uri});}serializeToString(attr, buf, isHTML, nodeFilter, visibleNamespaces);}// add namespace for current nodeif (nodeName === prefixedNodeName && needNamespaceDefine(node, isHTML, visibleNamespaces)) {var prefix = node.prefix || '';var uri = node.namespaceURI;addSerializedAttribute(buf, prefix ? 'xmlns:' + prefix : "xmlns", uri);visibleNamespaces.push({prefix: prefix,namespace: uri});}if (child || isHTML && !/^(?:meta|link|img|br|hr|input)$/i.test(nodeName)) {buf.push('>');//if is cdata child nodeif (isHTML && /^script$/i.test(nodeName)) {while (child) {if (child.data) {buf.push(child.data);} else {serializeToString(child, buf, isHTML, nodeFilter, visibleNamespaces.slice());}child = child.nextSibling;}} else {while (child) {serializeToString(child, buf, isHTML, nodeFilter, visibleNamespaces.slice());child = child.nextSibling;}}buf.push('</', prefixedNodeName, '>');} else {buf.push('/>');}// remove added visible namespaces//visibleNamespaces.length = startVisibleNamespaces;return;case DOCUMENT_NODE:case DOCUMENT_FRAGMENT_NODE:var child = node.firstChild;while (child) {serializeToString(child, buf, isHTML, nodeFilter, visibleNamespaces.slice());child = child.nextSibling;}return;case ATTRIBUTE_NODE:return addSerializedAttribute(buf, node.name, node.value);case TEXT_NODE:/*** The ampersand character (&) and the left angle bracket (<) must not appear in their literal form,* except when used as markup delimiters, or within a comment, a processing instruction, or a CDATA section.* If they are needed elsewhere, they must be escaped using either numeric character references or the strings* `&` and `<` respectively.* The right angle bracket (>) may be represented using the string " > ", and must, for compatibility,* be escaped using either `>` or a character reference when it appears in the string `]]>` in content,* when that string is not marking the end of a CDATA section.** In the content of elements, character data is any string of characters* which does not contain the start-delimiter of any markup* and does not include the CDATA-section-close delimiter, `]]>`.** @see https://www.w3.org/TR/xml/#NT-CharData* @see https://w3c.github.io/DOM-Parsing/#xml-serializing-a-text-node*/return buf.push(node.data.replace(/[<&>]/g, _xmlEncoder));case CDATA_SECTION_NODE:return buf.push('<![CDATA[', node.data, ']]>');case COMMENT_NODE:return buf.push("<!--", node.data, "-->");case DOCUMENT_TYPE_NODE:var pubid = node.publicId;var sysid = node.systemId;buf.push('<!DOCTYPE ', node.name);if (pubid) {buf.push(' PUBLIC ', pubid);if (sysid && sysid != '.') {buf.push(' ', sysid);}buf.push('>');} else if (sysid && sysid != '.') {buf.push(' SYSTEM ', sysid, '>');} else {var sub = node.internalSubset;if (sub) {buf.push(" [", sub, "]");}buf.push(">");}return;case PROCESSING_INSTRUCTION_NODE:return buf.push("<?", node.target, " ", node.data, "?>");case ENTITY_REFERENCE_NODE:return buf.push('&', node.nodeName, ';');//case ENTITY_NODE://case NOTATION_NODE:default:buf.push('??', node.nodeName);}}function importNode(doc, node, deep) {var node2;switch (node.nodeType) {case ELEMENT_NODE:node2 = node.cloneNode(false);node2.ownerDocument = doc;//var attrs = node2.attributes;//var len = attrs.length;//for(var i=0;i<len;i++){//node2.setAttributeNodeNS(importNode(doc,attrs.item(i),deep));//}case DOCUMENT_FRAGMENT_NODE:break;case ATTRIBUTE_NODE:deep = true;break;//case ENTITY_REFERENCE_NODE://case PROCESSING_INSTRUCTION_NODE:////case TEXT_NODE://case CDATA_SECTION_NODE://case COMMENT_NODE:// deep = false;// break;//case DOCUMENT_NODE://case DOCUMENT_TYPE_NODE://cannot be imported.//case ENTITY_NODE://case NOTATION_NODE://can not hit in level3//default:throw e;}if (!node2) {node2 = node.cloneNode(false); //false}node2.ownerDocument = doc;node2.parentNode = null;if (deep) {var child = node.firstChild;while (child) {node2.appendChild(importNode(doc, child, deep));child = child.nextSibling;}}return node2;}////var _relationMap = {firstChild:1,lastChild:1,previousSibling:1,nextSibling:1,// attributes:1,childNodes:1,parentNode:1,documentElement:1,doctype,};function cloneNode(doc, node, deep) {var node2 = new node.constructor();for (var n in node) {if (Object.prototype.hasOwnProperty.call(node, n)) {var v = node[n];if (typeof v != "object") {if (v != node2[n]) {node2[n] = v;}}}}if (node.childNodes) {node2.childNodes = new NodeList();}node2.ownerDocument = doc;switch (node2.nodeType) {case ELEMENT_NODE:var attrs = node.attributes;var attrs2 = node2.attributes = new NamedNodeMap();var len = attrs.length;attrs2._ownerElement = node2;for (var i = 0; i < len; i++) {node2.setAttributeNode(cloneNode(doc, attrs.item(i), true));}break;case ATTRIBUTE_NODE:deep = true;}if (deep) {var child = node.firstChild;while (child) {node2.appendChild(cloneNode(doc, child, deep));child = child.nextSibling;}}return node2;}function __set__(object, key, value) {object[key] = value;}//do dynamictry {if (Object.defineProperty) {Object.defineProperty(LiveNodeList.prototype, 'length', {get: function () {_updateLiveList(this);return this.$$length;}});Object.defineProperty(Node.prototype, 'textContent', {get: function () {return getTextContent(this);},set: function (data) {switch (this.nodeType) {case ELEMENT_NODE:case DOCUMENT_FRAGMENT_NODE:while (this.firstChild) {this.removeChild(this.firstChild);}if (data || String(data)) {this.appendChild(this.ownerDocument.createTextNode(data));}break;default:this.data = data;this.value = data;this.nodeValue = data;}}});function getTextContent(node) {switch (node.nodeType) {case ELEMENT_NODE:case DOCUMENT_FRAGMENT_NODE:var buf = [];node = node.firstChild;while (node) {if (node.nodeType !== 7 && node.nodeType !== 8) {buf.push(getTextContent(node));}node = node.nextSibling;}return buf.join('');default:return node.nodeValue;}}__set__ = function (object, key, value) {//console.log(value)object['$$' + key] = value;};}} catch (e) {//ie8}//if(typeof require == 'function'){var DocumentType_1 = DocumentType;var DOMException_1 = DOMException;var DOMImplementation_1 = DOMImplementation$1;var Element_1 = Element;var Node_1 = Node;var NodeList_1 = NodeList;var XMLSerializer_1 = XMLSerializer;//}var dom = {DocumentType: DocumentType_1,DOMException: DOMException_1,DOMImplementation: DOMImplementation_1,Element: Element_1,Node: Node_1,NodeList: NodeList_1,XMLSerializer: XMLSerializer_1};var entities = createCommonjsModule(function (module, exports) {var freeze = conventions.freeze;/*** The entities that are predefined in every XML document.** @see https://www.w3.org/TR/2006/REC-xml11-20060816/#sec-predefined-ent W3C XML 1.1* @see https://www.w3.org/TR/2008/REC-xml-20081126/#sec-predefined-ent W3C XML 1.0* @see https://en.wikipedia.org/wiki/List_of_XML_and_HTML_character_entity_references#Predefined_entities_in_XML Wikipedia*/exports.XML_ENTITIES = freeze({amp: '&',apos: "'",gt: '>',lt: '<',quot: '"'});/*** A map of all entities that are detected in an HTML document.* They contain all entries from `XML_ENTITIES`.** @see XML_ENTITIES* @see DOMParser.parseFromString* @see DOMImplementation.prototype.createHTMLDocument* @see https://html.spec.whatwg.org/#named-character-references WHATWG HTML(5) Spec* @see https://html.spec.whatwg.org/entities.json JSON* @see https://www.w3.org/TR/xml-entity-names/ W3C XML Entity Names* @see https://www.w3.org/TR/html4/sgml/entities.html W3C HTML4/SGML* @see https://en.wikipedia.org/wiki/List_of_XML_and_HTML_character_entity_references#Character_entity_references_in_HTML Wikipedia (HTML)* @see https://en.wikipedia.org/wiki/List_of_XML_and_HTML_character_entity_references#Entities_representing_special_characters_in_XHTML Wikpedia (XHTML)*/exports.HTML_ENTITIES = freeze({Aacute: '\u00C1',aacute: '\u00E1',Abreve: '\u0102',abreve: '\u0103',ac: '\u223E',acd: '\u223F',acE: '\u223E\u0333',Acirc: '\u00C2',acirc: '\u00E2',acute: '\u00B4',Acy: '\u0410',acy: '\u0430',AElig: '\u00C6',aelig: '\u00E6',af: '\u2061',Afr: '\uD835\uDD04',afr: '\uD835\uDD1E',Agrave: '\u00C0',agrave: '\u00E0',alefsym: '\u2135',aleph: '\u2135',Alpha: '\u0391',alpha: '\u03B1',Amacr: '\u0100',amacr: '\u0101',amalg: '\u2A3F',AMP: '\u0026',amp: '\u0026',And: '\u2A53',and: '\u2227',andand: '\u2A55',andd: '\u2A5C',andslope: '\u2A58',andv: '\u2A5A',ang: '\u2220',ange: '\u29A4',angle: '\u2220',angmsd: '\u2221',angmsdaa: '\u29A8',angmsdab: '\u29A9',angmsdac: '\u29AA',angmsdad: '\u29AB',angmsdae: '\u29AC',angmsdaf: '\u29AD',angmsdag: '\u29AE',angmsdah: '\u29AF',angrt: '\u221F',angrtvb: '\u22BE',angrtvbd: '\u299D',angsph: '\u2222',angst: '\u00C5',angzarr: '\u237C',Aogon: '\u0104',aogon: '\u0105',Aopf: '\uD835\uDD38',aopf: '\uD835\uDD52',ap: '\u2248',apacir: '\u2A6F',apE: '\u2A70',ape: '\u224A',apid: '\u224B',apos: '\u0027',ApplyFunction: '\u2061',approx: '\u2248',approxeq: '\u224A',Aring: '\u00C5',aring: '\u00E5',Ascr: '\uD835\uDC9C',ascr: '\uD835\uDCB6',Assign: '\u2254',ast: '\u002A',asymp: '\u2248',asympeq: '\u224D',Atilde: '\u00C3',atilde: '\u00E3',Auml: '\u00C4',auml: '\u00E4',awconint: '\u2233',awint: '\u2A11',backcong: '\u224C',backepsilon: '\u03F6',backprime: '\u2035',backsim: '\u223D',backsimeq: '\u22CD',Backslash: '\u2216',Barv: '\u2AE7',barvee: '\u22BD',Barwed: '\u2306',barwed: '\u2305',barwedge: '\u2305',bbrk: '\u23B5',bbrktbrk: '\u23B6',bcong: '\u224C',Bcy: '\u0411',bcy: '\u0431',bdquo: '\u201E',becaus: '\u2235',Because: '\u2235',because: '\u2235',bemptyv: '\u29B0',bepsi: '\u03F6',bernou: '\u212C',Bernoullis: '\u212C',Beta: '\u0392',beta: '\u03B2',beth: '\u2136',between: '\u226C',Bfr: '\uD835\uDD05',bfr: '\uD835\uDD1F',bigcap: '\u22C2',bigcirc: '\u25EF',bigcup: '\u22C3',bigodot: '\u2A00',bigoplus: '\u2A01',bigotimes: '\u2A02',bigsqcup: '\u2A06',bigstar: '\u2605',bigtriangledown: '\u25BD',bigtriangleup: '\u25B3',biguplus: '\u2A04',bigvee: '\u22C1',bigwedge: '\u22C0',bkarow: '\u290D',blacklozenge: '\u29EB',blacksquare: '\u25AA',blacktriangle: '\u25B4',blacktriangledown: '\u25BE',blacktriangleleft: '\u25C2',blacktriangleright: '\u25B8',blank: '\u2423',blk12: '\u2592',blk14: '\u2591',blk34: '\u2593',block: '\u2588',bne: '\u003D\u20E5',bnequiv: '\u2261\u20E5',bNot: '\u2AED',bnot: '\u2310',Bopf: '\uD835\uDD39',bopf: '\uD835\uDD53',bot: '\u22A5',bottom: '\u22A5',bowtie: '\u22C8',boxbox: '\u29C9',boxDL: '\u2557',boxDl: '\u2556',boxdL: '\u2555',boxdl: '\u2510',boxDR: '\u2554',boxDr: '\u2553',boxdR: '\u2552',boxdr: '\u250C',boxH: '\u2550',boxh: '\u2500',boxHD: '\u2566',boxHd: '\u2564',boxhD: '\u2565',boxhd: '\u252C',boxHU: '\u2569',boxHu: '\u2567',boxhU: '\u2568',boxhu: '\u2534',boxminus: '\u229F',boxplus: '\u229E',boxtimes: '\u22A0',boxUL: '\u255D',boxUl: '\u255C',boxuL: '\u255B',boxul: '\u2518',boxUR: '\u255A',boxUr: '\u2559',boxuR: '\u2558',boxur: '\u2514',boxV: '\u2551',boxv: '\u2502',boxVH: '\u256C',boxVh: '\u256B',boxvH: '\u256A',boxvh: '\u253C',boxVL: '\u2563',boxVl: '\u2562',boxvL: '\u2561',boxvl: '\u2524',boxVR: '\u2560',boxVr: '\u255F',boxvR: '\u255E',boxvr: '\u251C',bprime: '\u2035',Breve: '\u02D8',breve: '\u02D8',brvbar: '\u00A6',Bscr: '\u212C',bscr: '\uD835\uDCB7',bsemi: '\u204F',bsim: '\u223D',bsime: '\u22CD',bsol: '\u005C',bsolb: '\u29C5',bsolhsub: '\u27C8',bull: '\u2022',bullet: '\u2022',bump: '\u224E',bumpE: '\u2AAE',bumpe: '\u224F',Bumpeq: '\u224E',bumpeq: '\u224F',Cacute: '\u0106',cacute: '\u0107',Cap: '\u22D2',cap: '\u2229',capand: '\u2A44',capbrcup: '\u2A49',capcap: '\u2A4B',capcup: '\u2A47',capdot: '\u2A40',CapitalDifferentialD: '\u2145',caps: '\u2229\uFE00',caret: '\u2041',caron: '\u02C7',Cayleys: '\u212D',ccaps: '\u2A4D',Ccaron: '\u010C',ccaron: '\u010D',Ccedil: '\u00C7',ccedil: '\u00E7',Ccirc: '\u0108',ccirc: '\u0109',Cconint: '\u2230',ccups: '\u2A4C',ccupssm: '\u2A50',Cdot: '\u010A',cdot: '\u010B',cedil: '\u00B8',Cedilla: '\u00B8',cemptyv: '\u29B2',cent: '\u00A2',CenterDot: '\u00B7',centerdot: '\u00B7',Cfr: '\u212D',cfr: '\uD835\uDD20',CHcy: '\u0427',chcy: '\u0447',check: '\u2713',checkmark: '\u2713',Chi: '\u03A7',chi: '\u03C7',cir: '\u25CB',circ: '\u02C6',circeq: '\u2257',circlearrowleft: '\u21BA',circlearrowright: '\u21BB',circledast: '\u229B',circledcirc: '\u229A',circleddash: '\u229D',CircleDot: '\u2299',circledR: '\u00AE',circledS: '\u24C8',CircleMinus: '\u2296',CirclePlus: '\u2295',CircleTimes: '\u2297',cirE: '\u29C3',cire: '\u2257',cirfnint: '\u2A10',cirmid: '\u2AEF',cirscir: '\u29C2',ClockwiseContourIntegral: '\u2232',CloseCurlyDoubleQuote: '\u201D',CloseCurlyQuote: '\u2019',clubs: '\u2663',clubsuit: '\u2663',Colon: '\u2237',colon: '\u003A',Colone: '\u2A74',colone: '\u2254',coloneq: '\u2254',comma: '\u002C',commat: '\u0040',comp: '\u2201',compfn: '\u2218',complement: '\u2201',complexes: '\u2102',cong: '\u2245',congdot: '\u2A6D',Congruent: '\u2261',Conint: '\u222F',conint: '\u222E',ContourIntegral: '\u222E',Copf: '\u2102',copf: '\uD835\uDD54',coprod: '\u2210',Coproduct: '\u2210',COPY: '\u00A9',copy: '\u00A9',copysr: '\u2117',CounterClockwiseContourIntegral: '\u2233',crarr: '\u21B5',Cross: '\u2A2F',cross: '\u2717',Cscr: '\uD835\uDC9E',cscr: '\uD835\uDCB8',csub: '\u2ACF',csube: '\u2AD1',csup: '\u2AD0',csupe: '\u2AD2',ctdot: '\u22EF',cudarrl: '\u2938',cudarrr: '\u2935',cuepr: '\u22DE',cuesc: '\u22DF',cularr: '\u21B6',cularrp: '\u293D',Cup: '\u22D3',cup: '\u222A',cupbrcap: '\u2A48',CupCap: '\u224D',cupcap: '\u2A46',cupcup: '\u2A4A',cupdot: '\u228D',cupor: '\u2A45',cups: '\u222A\uFE00',curarr: '\u21B7',curarrm: '\u293C',curlyeqprec: '\u22DE',curlyeqsucc: '\u22DF',curlyvee: '\u22CE',curlywedge: '\u22CF',curren: '\u00A4',curvearrowleft: '\u21B6',curvearrowright: '\u21B7',cuvee: '\u22CE',cuwed: '\u22CF',cwconint: '\u2232',cwint: '\u2231',cylcty: '\u232D',Dagger: '\u2021',dagger: '\u2020',daleth: '\u2138',Darr: '\u21A1',dArr: '\u21D3',darr: '\u2193',dash: '\u2010',Dashv: '\u2AE4',dashv: '\u22A3',dbkarow: '\u290F',dblac: '\u02DD',Dcaron: '\u010E',dcaron: '\u010F',Dcy: '\u0414',dcy: '\u0434',DD: '\u2145',dd: '\u2146',ddagger: '\u2021',ddarr: '\u21CA',DDotrahd: '\u2911',ddotseq: '\u2A77',deg: '\u00B0',Del: '\u2207',Delta: '\u0394',delta: '\u03B4',demptyv: '\u29B1',dfisht: '\u297F',Dfr: '\uD835\uDD07',dfr: '\uD835\uDD21',dHar: '\u2965',dharl: '\u21C3',dharr: '\u21C2',DiacriticalAcute: '\u00B4',DiacriticalDot: '\u02D9',DiacriticalDoubleAcute: '\u02DD',DiacriticalGrave: '\u0060',DiacriticalTilde: '\u02DC',diam: '\u22C4',Diamond: '\u22C4',diamond: '\u22C4',diamondsuit: '\u2666',diams: '\u2666',die: '\u00A8',DifferentialD: '\u2146',digamma: '\u03DD',disin: '\u22F2',div: '\u00F7',divide: '\u00F7',divideontimes: '\u22C7',divonx: '\u22C7',DJcy: '\u0402',djcy: '\u0452',dlcorn: '\u231E',dlcrop: '\u230D',dollar: '\u0024',Dopf: '\uD835\uDD3B',dopf: '\uD835\uDD55',Dot: '\u00A8',dot: '\u02D9',DotDot: '\u20DC',doteq: '\u2250',doteqdot: '\u2251',DotEqual: '\u2250',dotminus: '\u2238',dotplus: '\u2214',dotsquare: '\u22A1',doublebarwedge: '\u2306',DoubleContourIntegral: '\u222F',DoubleDot: '\u00A8',DoubleDownArrow: '\u21D3',DoubleLeftArrow: '\u21D0',DoubleLeftRightArrow: '\u21D4',DoubleLeftTee: '\u2AE4',DoubleLongLeftArrow: '\u27F8',DoubleLongLeftRightArrow: '\u27FA',DoubleLongRightArrow: '\u27F9',DoubleRightArrow: '\u21D2',DoubleRightTee: '\u22A8',DoubleUpArrow: '\u21D1',DoubleUpDownArrow: '\u21D5',DoubleVerticalBar: '\u2225',DownArrow: '\u2193',Downarrow: '\u21D3',downarrow: '\u2193',DownArrowBar: '\u2913',DownArrowUpArrow: '\u21F5',DownBreve: '\u0311',downdownarrows: '\u21CA',downharpoonleft: '\u21C3',downharpoonright: '\u21C2',DownLeftRightVector: '\u2950',DownLeftTeeVector: '\u295E',DownLeftVector: '\u21BD',DownLeftVectorBar: '\u2956',DownRightTeeVector: '\u295F',DownRightVector: '\u21C1',DownRightVectorBar: '\u2957',DownTee: '\u22A4',DownTeeArrow: '\u21A7',drbkarow: '\u2910',drcorn: '\u231F',drcrop: '\u230C',Dscr: '\uD835\uDC9F',dscr: '\uD835\uDCB9',DScy: '\u0405',dscy: '\u0455',dsol: '\u29F6',Dstrok: '\u0110',dstrok: '\u0111',dtdot: '\u22F1',dtri: '\u25BF',dtrif: '\u25BE',duarr: '\u21F5',duhar: '\u296F',dwangle: '\u29A6',DZcy: '\u040F',dzcy: '\u045F',dzigrarr: '\u27FF',Eacute: '\u00C9',eacute: '\u00E9',easter: '\u2A6E',Ecaron: '\u011A',ecaron: '\u011B',ecir: '\u2256',Ecirc: '\u00CA',ecirc: '\u00EA',ecolon: '\u2255',Ecy: '\u042D',ecy: '\u044D',eDDot: '\u2A77',Edot: '\u0116',eDot: '\u2251',edot: '\u0117',ee: '\u2147',efDot: '\u2252',Efr: '\uD835\uDD08',efr: '\uD835\uDD22',eg: '\u2A9A',Egrave: '\u00C8',egrave: '\u00E8',egs: '\u2A96',egsdot: '\u2A98',el: '\u2A99',Element: '\u2208',elinters: '\u23E7',ell: '\u2113',els: '\u2A95',elsdot: '\u2A97',Emacr: '\u0112',emacr: '\u0113',empty: '\u2205',emptyset: '\u2205',EmptySmallSquare: '\u25FB',emptyv: '\u2205',EmptyVerySmallSquare: '\u25AB',emsp: '\u2003',emsp13: '\u2004',emsp14: '\u2005',ENG: '\u014A',eng: '\u014B',ensp: '\u2002',Eogon: '\u0118',eogon: '\u0119',Eopf: '\uD835\uDD3C',eopf: '\uD835\uDD56',epar: '\u22D5',eparsl: '\u29E3',eplus: '\u2A71',epsi: '\u03B5',Epsilon: '\u0395',epsilon: '\u03B5',epsiv: '\u03F5',eqcirc: '\u2256',eqcolon: '\u2255',eqsim: '\u2242',eqslantgtr: '\u2A96',eqslantless: '\u2A95',Equal: '\u2A75',equals: '\u003D',EqualTilde: '\u2242',equest: '\u225F',Equilibrium: '\u21CC',equiv: '\u2261',equivDD: '\u2A78',eqvparsl: '\u29E5',erarr: '\u2971',erDot: '\u2253',Escr: '\u2130',escr: '\u212F',esdot: '\u2250',Esim: '\u2A73',esim: '\u2242',Eta: '\u0397',eta: '\u03B7',ETH: '\u00D0',eth: '\u00F0',Euml: '\u00CB',euml: '\u00EB',euro: '\u20AC',excl: '\u0021',exist: '\u2203',Exists: '\u2203',expectation: '\u2130',ExponentialE: '\u2147',exponentiale: '\u2147',fallingdotseq: '\u2252',Fcy: '\u0424',fcy: '\u0444',female: '\u2640',ffilig: '\uFB03',fflig: '\uFB00',ffllig: '\uFB04',Ffr: '\uD835\uDD09',ffr: '\uD835\uDD23',filig: '\uFB01',FilledSmallSquare: '\u25FC',FilledVerySmallSquare: '\u25AA',fjlig: '\u0066\u006A',flat: '\u266D',fllig: '\uFB02',fltns: '\u25B1',fnof: '\u0192',Fopf: '\uD835\uDD3D',fopf: '\uD835\uDD57',ForAll: '\u2200',forall: '\u2200',fork: '\u22D4',forkv: '\u2AD9',Fouriertrf: '\u2131',fpartint: '\u2A0D',frac12: '\u00BD',frac13: '\u2153',frac14: '\u00BC',frac15: '\u2155',frac16: '\u2159',frac18: '\u215B',frac23: '\u2154',frac25: '\u2156',frac34: '\u00BE',frac35: '\u2157',frac38: '\u215C',frac45: '\u2158',frac56: '\u215A',frac58: '\u215D',frac78: '\u215E',frasl: '\u2044',frown: '\u2322',Fscr: '\u2131',fscr: '\uD835\uDCBB',gacute: '\u01F5',Gamma: '\u0393',gamma: '\u03B3',Gammad: '\u03DC',gammad: '\u03DD',gap: '\u2A86',Gbreve: '\u011E',gbreve: '\u011F',Gcedil: '\u0122',Gcirc: '\u011C',gcirc: '\u011D',Gcy: '\u0413',gcy: '\u0433',Gdot: '\u0120',gdot: '\u0121',gE: '\u2267',ge: '\u2265',gEl: '\u2A8C',gel: '\u22DB',geq: '\u2265',geqq: '\u2267',geqslant: '\u2A7E',ges: '\u2A7E',gescc: '\u2AA9',gesdot: '\u2A80',gesdoto: '\u2A82',gesdotol: '\u2A84',gesl: '\u22DB\uFE00',gesles: '\u2A94',Gfr: '\uD835\uDD0A',gfr: '\uD835\uDD24',Gg: '\u22D9',gg: '\u226B',ggg: '\u22D9',gimel: '\u2137',GJcy: '\u0403',gjcy: '\u0453',gl: '\u2277',gla: '\u2AA5',glE: '\u2A92',glj: '\u2AA4',gnap: '\u2A8A',gnapprox: '\u2A8A',gnE: '\u2269',gne: '\u2A88',gneq: '\u2A88',gneqq: '\u2269',gnsim: '\u22E7',Gopf: '\uD835\uDD3E',gopf: '\uD835\uDD58',grave: '\u0060',GreaterEqual: '\u2265',GreaterEqualLess: '\u22DB',GreaterFullEqual: '\u2267',GreaterGreater: '\u2AA2',GreaterLess: '\u2277',GreaterSlantEqual: '\u2A7E',GreaterTilde: '\u2273',Gscr: '\uD835\uDCA2',gscr: '\u210A',gsim: '\u2273',gsime: '\u2A8E',gsiml: '\u2A90',Gt: '\u226B',GT: '\u003E',gt: '\u003E',gtcc: '\u2AA7',gtcir: '\u2A7A',gtdot: '\u22D7',gtlPar: '\u2995',gtquest: '\u2A7C',gtrapprox: '\u2A86',gtrarr: '\u2978',gtrdot: '\u22D7',gtreqless: '\u22DB',gtreqqless: '\u2A8C',gtrless: '\u2277',gtrsim: '\u2273',gvertneqq: '\u2269\uFE00',gvnE: '\u2269\uFE00',Hacek: '\u02C7',hairsp: '\u200A',half: '\u00BD',hamilt: '\u210B',HARDcy: '\u042A',hardcy: '\u044A',hArr: '\u21D4',harr: '\u2194',harrcir: '\u2948',harrw: '\u21AD',Hat: '\u005E',hbar: '\u210F',Hcirc: '\u0124',hcirc: '\u0125',hearts: '\u2665',heartsuit: '\u2665',hellip: '\u2026',hercon: '\u22B9',Hfr: '\u210C',hfr: '\uD835\uDD25',HilbertSpace: '\u210B',hksearow: '\u2925',hkswarow: '\u2926',hoarr: '\u21FF',homtht: '\u223B',hookleftarrow: '\u21A9',hookrightarrow: '\u21AA',Hopf: '\u210D',hopf: '\uD835\uDD59',horbar: '\u2015',HorizontalLine: '\u2500',Hscr: '\u210B',hscr: '\uD835\uDCBD',hslash: '\u210F',Hstrok: '\u0126',hstrok: '\u0127',HumpDownHump: '\u224E',HumpEqual: '\u224F',hybull: '\u2043',hyphen: '\u2010',Iacute: '\u00CD',iacute: '\u00ED',ic: '\u2063',Icirc: '\u00CE',icirc: '\u00EE',Icy: '\u0418',icy: '\u0438',Idot: '\u0130',IEcy: '\u0415',iecy: '\u0435',iexcl: '\u00A1',iff: '\u21D4',Ifr: '\u2111',ifr: '\uD835\uDD26',Igrave: '\u00CC',igrave: '\u00EC',ii: '\u2148',iiiint: '\u2A0C',iiint: '\u222D',iinfin: '\u29DC',iiota: '\u2129',IJlig: '\u0132',ijlig: '\u0133',Im: '\u2111',Imacr: '\u012A',imacr: '\u012B',image: '\u2111',ImaginaryI: '\u2148',imagline: '\u2110',imagpart: '\u2111',imath: '\u0131',imof: '\u22B7',imped: '\u01B5',Implies: '\u21D2',in: '\u2208',incare: '\u2105',infin: '\u221E',infintie: '\u29DD',inodot: '\u0131',Int: '\u222C',int: '\u222B',intcal: '\u22BA',integers: '\u2124',Integral: '\u222B',intercal: '\u22BA',Intersection: '\u22C2',intlarhk: '\u2A17',intprod: '\u2A3C',InvisibleComma: '\u2063',InvisibleTimes: '\u2062',IOcy: '\u0401',iocy: '\u0451',Iogon: '\u012E',iogon: '\u012F',Iopf: '\uD835\uDD40',iopf: '\uD835\uDD5A',Iota: '\u0399',iota: '\u03B9',iprod: '\u2A3C',iquest: '\u00BF',Iscr: '\u2110',iscr: '\uD835\uDCBE',isin: '\u2208',isindot: '\u22F5',isinE: '\u22F9',isins: '\u22F4',isinsv: '\u22F3',isinv: '\u2208',it: '\u2062',Itilde: '\u0128',itilde: '\u0129',Iukcy: '\u0406',iukcy: '\u0456',Iuml: '\u00CF',iuml: '\u00EF',Jcirc: '\u0134',jcirc: '\u0135',Jcy: '\u0419',jcy: '\u0439',Jfr: '\uD835\uDD0D',jfr: '\uD835\uDD27',jmath: '\u0237',Jopf: '\uD835\uDD41',jopf: '\uD835\uDD5B',Jscr: '\uD835\uDCA5',jscr: '\uD835\uDCBF',Jsercy: '\u0408',jsercy: '\u0458',Jukcy: '\u0404',jukcy: '\u0454',Kappa: '\u039A',kappa: '\u03BA',kappav: '\u03F0',Kcedil: '\u0136',kcedil: '\u0137',Kcy: '\u041A',kcy: '\u043A',Kfr: '\uD835\uDD0E',kfr: '\uD835\uDD28',kgreen: '\u0138',KHcy: '\u0425',khcy: '\u0445',KJcy: '\u040C',kjcy: '\u045C',Kopf: '\uD835\uDD42',kopf: '\uD835\uDD5C',Kscr: '\uD835\uDCA6',kscr: '\uD835\uDCC0',lAarr: '\u21DA',Lacute: '\u0139',lacute: '\u013A',laemptyv: '\u29B4',lagran: '\u2112',Lambda: '\u039B',lambda: '\u03BB',Lang: '\u27EA',lang: '\u27E8',langd: '\u2991',langle: '\u27E8',lap: '\u2A85',Laplacetrf: '\u2112',laquo: '\u00AB',Larr: '\u219E',lArr: '\u21D0',larr: '\u2190',larrb: '\u21E4',larrbfs: '\u291F',larrfs: '\u291D',larrhk: '\u21A9',larrlp: '\u21AB',larrpl: '\u2939',larrsim: '\u2973',larrtl: '\u21A2',lat: '\u2AAB',lAtail: '\u291B',latail: '\u2919',late: '\u2AAD',lates: '\u2AAD\uFE00',lBarr: '\u290E',lbarr: '\u290C',lbbrk: '\u2772',lbrace: '\u007B',lbrack: '\u005B',lbrke: '\u298B',lbrksld: '\u298F',lbrkslu: '\u298D',Lcaron: '\u013D',lcaron: '\u013E',Lcedil: '\u013B',lcedil: '\u013C',lceil: '\u2308',lcub: '\u007B',Lcy: '\u041B',lcy: '\u043B',ldca: '\u2936',ldquo: '\u201C',ldquor: '\u201E',ldrdhar: '\u2967',ldrushar: '\u294B',ldsh: '\u21B2',lE: '\u2266',le: '\u2264',LeftAngleBracket: '\u27E8',LeftArrow: '\u2190',Leftarrow: '\u21D0',leftarrow: '\u2190',LeftArrowBar: '\u21E4',LeftArrowRightArrow: '\u21C6',leftarrowtail: '\u21A2',LeftCeiling: '\u2308',LeftDoubleBracket: '\u27E6',LeftDownTeeVector: '\u2961',LeftDownVector: '\u21C3',LeftDownVectorBar: '\u2959',LeftFloor: '\u230A',leftharpoondown: '\u21BD',leftharpoonup: '\u21BC',leftleftarrows: '\u21C7',LeftRightArrow: '\u2194',Leftrightarrow: '\u21D4',leftrightarrow: '\u2194',leftrightarrows: '\u21C6',leftrightharpoons: '\u21CB',leftrightsquigarrow: '\u21AD',LeftRightVector: '\u294E',LeftTee: '\u22A3',LeftTeeArrow: '\u21A4',LeftTeeVector: '\u295A',leftthreetimes: '\u22CB',LeftTriangle: '\u22B2',LeftTriangleBar: '\u29CF',LeftTriangleEqual: '\u22B4',LeftUpDownVector: '\u2951',LeftUpTeeVector: '\u2960',LeftUpVector: '\u21BF',LeftUpVectorBar: '\u2958',LeftVector: '\u21BC',LeftVectorBar: '\u2952',lEg: '\u2A8B',leg: '\u22DA',leq: '\u2264',leqq: '\u2266',leqslant: '\u2A7D',les: '\u2A7D',lescc: '\u2AA8',lesdot: '\u2A7F',lesdoto: '\u2A81',lesdotor: '\u2A83',lesg: '\u22DA\uFE00',lesges: '\u2A93',lessapprox: '\u2A85',lessdot: '\u22D6',lesseqgtr: '\u22DA',lesseqqgtr: '\u2A8B',LessEqualGreater: '\u22DA',LessFullEqual: '\u2266',LessGreater: '\u2276',lessgtr: '\u2276',LessLess: '\u2AA1',lesssim: '\u2272',LessSlantEqual: '\u2A7D',LessTilde: '\u2272',lfisht: '\u297C',lfloor: '\u230A',Lfr: '\uD835\uDD0F',lfr: '\uD835\uDD29',lg: '\u2276',lgE: '\u2A91',lHar: '\u2962',lhard: '\u21BD',lharu: '\u21BC',lharul: '\u296A',lhblk: '\u2584',LJcy: '\u0409',ljcy: '\u0459',Ll: '\u22D8',ll: '\u226A',llarr: '\u21C7',llcorner: '\u231E',Lleftarrow: '\u21DA',llhard: '\u296B',lltri: '\u25FA',Lmidot: '\u013F',lmidot: '\u0140',lmoust: '\u23B0',lmoustache: '\u23B0',lnap: '\u2A89',lnapprox: '\u2A89',lnE: '\u2268',lne: '\u2A87',lneq: '\u2A87',lneqq: '\u2268',lnsim: '\u22E6',loang: '\u27EC',loarr: '\u21FD',lobrk: '\u27E6',LongLeftArrow: '\u27F5',Longleftarrow: '\u27F8',longleftarrow: '\u27F5',LongLeftRightArrow: '\u27F7',Longleftrightarrow: '\u27FA',longleftrightarrow: '\u27F7',longmapsto: '\u27FC',LongRightArrow: '\u27F6',Longrightarrow: '\u27F9',longrightarrow: '\u27F6',looparrowleft: '\u21AB',looparrowright: '\u21AC',lopar: '\u2985',Lopf: '\uD835\uDD43',lopf: '\uD835\uDD5D',loplus: '\u2A2D',lotimes: '\u2A34',lowast: '\u2217',lowbar: '\u005F',LowerLeftArrow: '\u2199',LowerRightArrow: '\u2198',loz: '\u25CA',lozenge: '\u25CA',lozf: '\u29EB',lpar: '\u0028',lparlt: '\u2993',lrarr: '\u21C6',lrcorner: '\u231F',lrhar: '\u21CB',lrhard: '\u296D',lrm: '\u200E',lrtri: '\u22BF',lsaquo: '\u2039',Lscr: '\u2112',lscr: '\uD835\uDCC1',Lsh: '\u21B0',lsh: '\u21B0',lsim: '\u2272',lsime: '\u2A8D',lsimg: '\u2A8F',lsqb: '\u005B',lsquo: '\u2018',lsquor: '\u201A',Lstrok: '\u0141',lstrok: '\u0142',Lt: '\u226A',LT: '\u003C',lt: '\u003C',ltcc: '\u2AA6',ltcir: '\u2A79',ltdot: '\u22D6',lthree: '\u22CB',ltimes: '\u22C9',ltlarr: '\u2976',ltquest: '\u2A7B',ltri: '\u25C3',ltrie: '\u22B4',ltrif: '\u25C2',ltrPar: '\u2996',lurdshar: '\u294A',luruhar: '\u2966',lvertneqq: '\u2268\uFE00',lvnE: '\u2268\uFE00',macr: '\u00AF',male: '\u2642',malt: '\u2720',maltese: '\u2720',Map: '\u2905',map: '\u21A6',mapsto: '\u21A6',mapstodown: '\u21A7',mapstoleft: '\u21A4',mapstoup: '\u21A5',marker: '\u25AE',mcomma: '\u2A29',Mcy: '\u041C',mcy: '\u043C',mdash: '\u2014',mDDot: '\u223A',measuredangle: '\u2221',MediumSpace: '\u205F',Mellintrf: '\u2133',Mfr: '\uD835\uDD10',mfr: '\uD835\uDD2A',mho: '\u2127',micro: '\u00B5',mid: '\u2223',midast: '\u002A',midcir: '\u2AF0',middot: '\u00B7',minus: '\u2212',minusb: '\u229F',minusd: '\u2238',minusdu: '\u2A2A',MinusPlus: '\u2213',mlcp: '\u2ADB',mldr: '\u2026',mnplus: '\u2213',models: '\u22A7',Mopf: '\uD835\uDD44',mopf: '\uD835\uDD5E',mp: '\u2213',Mscr: '\u2133',mscr: '\uD835\uDCC2',mstpos: '\u223E',Mu: '\u039C',mu: '\u03BC',multimap: '\u22B8',mumap: '\u22B8',nabla: '\u2207',Nacute: '\u0143',nacute: '\u0144',nang: '\u2220\u20D2',nap: '\u2249',napE: '\u2A70\u0338',napid: '\u224B\u0338',napos: '\u0149',napprox: '\u2249',natur: '\u266E',natural: '\u266E',naturals: '\u2115',nbsp: '\u00A0',nbump: '\u224E\u0338',nbumpe: '\u224F\u0338',ncap: '\u2A43',Ncaron: '\u0147',ncaron: '\u0148',Ncedil: '\u0145',ncedil: '\u0146',ncong: '\u2247',ncongdot: '\u2A6D\u0338',ncup: '\u2A42',Ncy: '\u041D',ncy: '\u043D',ndash: '\u2013',ne: '\u2260',nearhk: '\u2924',neArr: '\u21D7',nearr: '\u2197',nearrow: '\u2197',nedot: '\u2250\u0338',NegativeMediumSpace: '\u200B',NegativeThickSpace: '\u200B',NegativeThinSpace: '\u200B',NegativeVeryThinSpace: '\u200B',nequiv: '\u2262',nesear: '\u2928',nesim: '\u2242\u0338',NestedGreaterGreater: '\u226B',NestedLessLess: '\u226A',NewLine: '\u000A',nexist: '\u2204',nexists: '\u2204',Nfr: '\uD835\uDD11',nfr: '\uD835\uDD2B',ngE: '\u2267\u0338',nge: '\u2271',ngeq: '\u2271',ngeqq: '\u2267\u0338',ngeqslant: '\u2A7E\u0338',nges: '\u2A7E\u0338',nGg: '\u22D9\u0338',ngsim: '\u2275',nGt: '\u226B\u20D2',ngt: '\u226F',ngtr: '\u226F',nGtv: '\u226B\u0338',nhArr: '\u21CE',nharr: '\u21AE',nhpar: '\u2AF2',ni: '\u220B',nis: '\u22FC',nisd: '\u22FA',niv: '\u220B',NJcy: '\u040A',njcy: '\u045A',nlArr: '\u21CD',nlarr: '\u219A',nldr: '\u2025',nlE: '\u2266\u0338',nle: '\u2270',nLeftarrow: '\u21CD',nleftarrow: '\u219A',nLeftrightarrow: '\u21CE',nleftrightarrow: '\u21AE',nleq: '\u2270',nleqq: '\u2266\u0338',nleqslant: '\u2A7D\u0338',nles: '\u2A7D\u0338',nless: '\u226E',nLl: '\u22D8\u0338',nlsim: '\u2274',nLt: '\u226A\u20D2',nlt: '\u226E',nltri: '\u22EA',nltrie: '\u22EC',nLtv: '\u226A\u0338',nmid: '\u2224',NoBreak: '\u2060',NonBreakingSpace: '\u00A0',Nopf: '\u2115',nopf: '\uD835\uDD5F',Not: '\u2AEC',not: '\u00AC',NotCongruent: '\u2262',NotCupCap: '\u226D',NotDoubleVerticalBar: '\u2226',NotElement: '\u2209',NotEqual: '\u2260',NotEqualTilde: '\u2242\u0338',NotExists: '\u2204',NotGreater: '\u226F',NotGreaterEqual: '\u2271',NotGreaterFullEqual: '\u2267\u0338',NotGreaterGreater: '\u226B\u0338',NotGreaterLess: '\u2279',NotGreaterSlantEqual: '\u2A7E\u0338',NotGreaterTilde: '\u2275',NotHumpDownHump: '\u224E\u0338',NotHumpEqual: '\u224F\u0338',notin: '\u2209',notindot: '\u22F5\u0338',notinE: '\u22F9\u0338',notinva: '\u2209',notinvb: '\u22F7',notinvc: '\u22F6',NotLeftTriangle: '\u22EA',NotLeftTriangleBar: '\u29CF\u0338',NotLeftTriangleEqual: '\u22EC',NotLess: '\u226E',NotLessEqual: '\u2270',NotLessGreater: '\u2278',NotLessLess: '\u226A\u0338',NotLessSlantEqual: '\u2A7D\u0338',NotLessTilde: '\u2274',NotNestedGreaterGreater: '\u2AA2\u0338',NotNestedLessLess: '\u2AA1\u0338',notni: '\u220C',notniva: '\u220C',notnivb: '\u22FE',notnivc: '\u22FD',NotPrecedes: '\u2280',NotPrecedesEqual: '\u2AAF\u0338',NotPrecedesSlantEqual: '\u22E0',NotReverseElement: '\u220C',NotRightTriangle: '\u22EB',NotRightTriangleBar: '\u29D0\u0338',NotRightTriangleEqual: '\u22ED',NotSquareSubset: '\u228F\u0338',NotSquareSubsetEqual: '\u22E2',NotSquareSuperset: '\u2290\u0338',NotSquareSupersetEqual: '\u22E3',NotSubset: '\u2282\u20D2',NotSubsetEqual: '\u2288',NotSucceeds: '\u2281',NotSucceedsEqual: '\u2AB0\u0338',NotSucceedsSlantEqual: '\u22E1',NotSucceedsTilde: '\u227F\u0338',NotSuperset: '\u2283\u20D2',NotSupersetEqual: '\u2289',NotTilde: '\u2241',NotTildeEqual: '\u2244',NotTildeFullEqual: '\u2247',NotTildeTilde: '\u2249',NotVerticalBar: '\u2224',npar: '\u2226',nparallel: '\u2226',nparsl: '\u2AFD\u20E5',npart: '\u2202\u0338',npolint: '\u2A14',npr: '\u2280',nprcue: '\u22E0',npre: '\u2AAF\u0338',nprec: '\u2280',npreceq: '\u2AAF\u0338',nrArr: '\u21CF',nrarr: '\u219B',nrarrc: '\u2933\u0338',nrarrw: '\u219D\u0338',nRightarrow: '\u21CF',nrightarrow: '\u219B',nrtri: '\u22EB',nrtrie: '\u22ED',nsc: '\u2281',nsccue: '\u22E1',nsce: '\u2AB0\u0338',Nscr: '\uD835\uDCA9',nscr: '\uD835\uDCC3',nshortmid: '\u2224',nshortparallel: '\u2226',nsim: '\u2241',nsime: '\u2244',nsimeq: '\u2244',nsmid: '\u2224',nspar: '\u2226',nsqsube: '\u22E2',nsqsupe: '\u22E3',nsub: '\u2284',nsubE: '\u2AC5\u0338',nsube: '\u2288',nsubset: '\u2282\u20D2',nsubseteq: '\u2288',nsubseteqq: '\u2AC5\u0338',nsucc: '\u2281',nsucceq: '\u2AB0\u0338',nsup: '\u2285',nsupE: '\u2AC6\u0338',nsupe: '\u2289',nsupset: '\u2283\u20D2',nsupseteq: '\u2289',nsupseteqq: '\u2AC6\u0338',ntgl: '\u2279',Ntilde: '\u00D1',ntilde: '\u00F1',ntlg: '\u2278',ntriangleleft: '\u22EA',ntrianglelefteq: '\u22EC',ntriangleright: '\u22EB',ntrianglerighteq: '\u22ED',Nu: '\u039D',nu: '\u03BD',num: '\u0023',numero: '\u2116',numsp: '\u2007',nvap: '\u224D\u20D2',nVDash: '\u22AF',nVdash: '\u22AE',nvDash: '\u22AD',nvdash: '\u22AC',nvge: '\u2265\u20D2',nvgt: '\u003E\u20D2',nvHarr: '\u2904',nvinfin: '\u29DE',nvlArr: '\u2902',nvle: '\u2264\u20D2',nvlt: '\u003C\u20D2',nvltrie: '\u22B4\u20D2',nvrArr: '\u2903',nvrtrie: '\u22B5\u20D2',nvsim: '\u223C\u20D2',nwarhk: '\u2923',nwArr: '\u21D6',nwarr: '\u2196',nwarrow: '\u2196',nwnear: '\u2927',Oacute: '\u00D3',oacute: '\u00F3',oast: '\u229B',ocir: '\u229A',Ocirc: '\u00D4',ocirc: '\u00F4',Ocy: '\u041E',ocy: '\u043E',odash: '\u229D',Odblac: '\u0150',odblac: '\u0151',odiv: '\u2A38',odot: '\u2299',odsold: '\u29BC',OElig: '\u0152',oelig: '\u0153',ofcir: '\u29BF',Ofr: '\uD835\uDD12',ofr: '\uD835\uDD2C',ogon: '\u02DB',Ograve: '\u00D2',ograve: '\u00F2',ogt: '\u29C1',ohbar: '\u29B5',ohm: '\u03A9',oint: '\u222E',olarr: '\u21BA',olcir: '\u29BE',olcross: '\u29BB',oline: '\u203E',olt: '\u29C0',Omacr: '\u014C',omacr: '\u014D',Omega: '\u03A9',omega: '\u03C9',Omicron: '\u039F',omicron: '\u03BF',omid: '\u29B6',ominus: '\u2296',Oopf: '\uD835\uDD46',oopf: '\uD835\uDD60',opar: '\u29B7',OpenCurlyDoubleQuote: '\u201C',OpenCurlyQuote: '\u2018',operp: '\u29B9',oplus: '\u2295',Or: '\u2A54',or: '\u2228',orarr: '\u21BB',ord: '\u2A5D',order: '\u2134',orderof: '\u2134',ordf: '\u00AA',ordm: '\u00BA',origof: '\u22B6',oror: '\u2A56',orslope: '\u2A57',orv: '\u2A5B',oS: '\u24C8',Oscr: '\uD835\uDCAA',oscr: '\u2134',Oslash: '\u00D8',oslash: '\u00F8',osol: '\u2298',Otilde: '\u00D5',otilde: '\u00F5',Otimes: '\u2A37',otimes: '\u2297',otimesas: '\u2A36',Ouml: '\u00D6',ouml: '\u00F6',ovbar: '\u233D',OverBar: '\u203E',OverBrace: '\u23DE',OverBracket: '\u23B4',OverParenthesis: '\u23DC',par: '\u2225',para: '\u00B6',parallel: '\u2225',parsim: '\u2AF3',parsl: '\u2AFD',part: '\u2202',PartialD: '\u2202',Pcy: '\u041F',pcy: '\u043F',percnt: '\u0025',period: '\u002E',permil: '\u2030',perp: '\u22A5',pertenk: '\u2031',Pfr: '\uD835\uDD13',pfr: '\uD835\uDD2D',Phi: '\u03A6',phi: '\u03C6',phiv: '\u03D5',phmmat: '\u2133',phone: '\u260E',Pi: '\u03A0',pi: '\u03C0',pitchfork: '\u22D4',piv: '\u03D6',planck: '\u210F',planckh: '\u210E',plankv: '\u210F',plus: '\u002B',plusacir: '\u2A23',plusb: '\u229E',pluscir: '\u2A22',plusdo: '\u2214',plusdu: '\u2A25',pluse: '\u2A72',PlusMinus: '\u00B1',plusmn: '\u00B1',plussim: '\u2A26',plustwo: '\u2A27',pm: '\u00B1',Poincareplane: '\u210C',pointint: '\u2A15',Popf: '\u2119',popf: '\uD835\uDD61',pound: '\u00A3',Pr: '\u2ABB',pr: '\u227A',prap: '\u2AB7',prcue: '\u227C',prE: '\u2AB3',pre: '\u2AAF',prec: '\u227A',precapprox: '\u2AB7',preccurlyeq: '\u227C',Precedes: '\u227A',PrecedesEqual: '\u2AAF',PrecedesSlantEqual: '\u227C',PrecedesTilde: '\u227E',preceq: '\u2AAF',precnapprox: '\u2AB9',precneqq: '\u2AB5',precnsim: '\u22E8',precsim: '\u227E',Prime: '\u2033',prime: '\u2032',primes: '\u2119',prnap: '\u2AB9',prnE: '\u2AB5',prnsim: '\u22E8',prod: '\u220F',Product: '\u220F',profalar: '\u232E',profline: '\u2312',profsurf: '\u2313',prop: '\u221D',Proportion: '\u2237',Proportional: '\u221D',propto: '\u221D',prsim: '\u227E',prurel: '\u22B0',Pscr: '\uD835\uDCAB',pscr: '\uD835\uDCC5',Psi: '\u03A8',psi: '\u03C8',puncsp: '\u2008',Qfr: '\uD835\uDD14',qfr: '\uD835\uDD2E',qint: '\u2A0C',Qopf: '\u211A',qopf: '\uD835\uDD62',qprime: '\u2057',Qscr: '\uD835\uDCAC',qscr: '\uD835\uDCC6',quaternions: '\u210D',quatint: '\u2A16',quest: '\u003F',questeq: '\u225F',QUOT: '\u0022',quot: '\u0022',rAarr: '\u21DB',race: '\u223D\u0331',Racute: '\u0154',racute: '\u0155',radic: '\u221A',raemptyv: '\u29B3',Rang: '\u27EB',rang: '\u27E9',rangd: '\u2992',range: '\u29A5',rangle: '\u27E9',raquo: '\u00BB',Rarr: '\u21A0',rArr: '\u21D2',rarr: '\u2192',rarrap: '\u2975',rarrb: '\u21E5',rarrbfs: '\u2920',rarrc: '\u2933',rarrfs: '\u291E',rarrhk: '\u21AA',rarrlp: '\u21AC',rarrpl: '\u2945',rarrsim: '\u2974',Rarrtl: '\u2916',rarrtl: '\u21A3',rarrw: '\u219D',rAtail: '\u291C',ratail: '\u291A',ratio: '\u2236',rationals: '\u211A',RBarr: '\u2910',rBarr: '\u290F',rbarr: '\u290D',rbbrk: '\u2773',rbrace: '\u007D',rbrack: '\u005D',rbrke: '\u298C',rbrksld: '\u298E',rbrkslu: '\u2990',Rcaron: '\u0158',rcaron: '\u0159',Rcedil: '\u0156',rcedil: '\u0157',rceil: '\u2309',rcub: '\u007D',Rcy: '\u0420',rcy: '\u0440',rdca: '\u2937',rdldhar: '\u2969',rdquo: '\u201D',rdquor: '\u201D',rdsh: '\u21B3',Re: '\u211C',real: '\u211C',realine: '\u211B',realpart: '\u211C',reals: '\u211D',rect: '\u25AD',REG: '\u00AE',reg: '\u00AE',ReverseElement: '\u220B',ReverseEquilibrium: '\u21CB',ReverseUpEquilibrium: '\u296F',rfisht: '\u297D',rfloor: '\u230B',Rfr: '\u211C',rfr: '\uD835\uDD2F',rHar: '\u2964',rhard: '\u21C1',rharu: '\u21C0',rharul: '\u296C',Rho: '\u03A1',rho: '\u03C1',rhov: '\u03F1',RightAngleBracket: '\u27E9',RightArrow: '\u2192',Rightarrow: '\u21D2',rightarrow: '\u2192',RightArrowBar: '\u21E5',RightArrowLeftArrow: '\u21C4',rightarrowtail: '\u21A3',RightCeiling: '\u2309',RightDoubleBracket: '\u27E7',RightDownTeeVector: '\u295D',RightDownVector: '\u21C2',RightDownVectorBar: '\u2955',RightFloor: '\u230B',rightharpoondown: '\u21C1',rightharpoonup: '\u21C0',rightleftarrows: '\u21C4',rightleftharpoons: '\u21CC',rightrightarrows: '\u21C9',rightsquigarrow: '\u219D',RightTee: '\u22A2',RightTeeArrow: '\u21A6',RightTeeVector: '\u295B',rightthreetimes: '\u22CC',RightTriangle: '\u22B3',RightTriangleBar: '\u29D0',RightTriangleEqual: '\u22B5',RightUpDownVector: '\u294F',RightUpTeeVector: '\u295C',RightUpVector: '\u21BE',RightUpVectorBar: '\u2954',RightVector: '\u21C0',RightVectorBar: '\u2953',ring: '\u02DA',risingdotseq: '\u2253',rlarr: '\u21C4',rlhar: '\u21CC',rlm: '\u200F',rmoust: '\u23B1',rmoustache: '\u23B1',rnmid: '\u2AEE',roang: '\u27ED',roarr: '\u21FE',robrk: '\u27E7',ropar: '\u2986',Ropf: '\u211D',ropf: '\uD835\uDD63',roplus: '\u2A2E',rotimes: '\u2A35',RoundImplies: '\u2970',rpar: '\u0029',rpargt: '\u2994',rppolint: '\u2A12',rrarr: '\u21C9',Rrightarrow: '\u21DB',rsaquo: '\u203A',Rscr: '\u211B',rscr: '\uD835\uDCC7',Rsh: '\u21B1',rsh: '\u21B1',rsqb: '\u005D',rsquo: '\u2019',rsquor: '\u2019',rthree: '\u22CC',rtimes: '\u22CA',rtri: '\u25B9',rtrie: '\u22B5',rtrif: '\u25B8',rtriltri: '\u29CE',RuleDelayed: '\u29F4',ruluhar: '\u2968',rx: '\u211E',Sacute: '\u015A',sacute: '\u015B',sbquo: '\u201A',Sc: '\u2ABC',sc: '\u227B',scap: '\u2AB8',Scaron: '\u0160',scaron: '\u0161',sccue: '\u227D',scE: '\u2AB4',sce: '\u2AB0',Scedil: '\u015E',scedil: '\u015F',Scirc: '\u015C',scirc: '\u015D',scnap: '\u2ABA',scnE: '\u2AB6',scnsim: '\u22E9',scpolint: '\u2A13',scsim: '\u227F',Scy: '\u0421',scy: '\u0441',sdot: '\u22C5',sdotb: '\u22A1',sdote: '\u2A66',searhk: '\u2925',seArr: '\u21D8',searr: '\u2198',searrow: '\u2198',sect: '\u00A7',semi: '\u003B',seswar: '\u2929',setminus: '\u2216',setmn: '\u2216',sext: '\u2736',Sfr: '\uD835\uDD16',sfr: '\uD835\uDD30',sfrown: '\u2322',sharp: '\u266F',SHCHcy: '\u0429',shchcy: '\u0449',SHcy: '\u0428',shcy: '\u0448',ShortDownArrow: '\u2193',ShortLeftArrow: '\u2190',shortmid: '\u2223',shortparallel: '\u2225',ShortRightArrow: '\u2192',ShortUpArrow: '\u2191',shy: '\u00AD',Sigma: '\u03A3',sigma: '\u03C3',sigmaf: '\u03C2',sigmav: '\u03C2',sim: '\u223C',simdot: '\u2A6A',sime: '\u2243',simeq: '\u2243',simg: '\u2A9E',simgE: '\u2AA0',siml: '\u2A9D',simlE: '\u2A9F',simne: '\u2246',simplus: '\u2A24',simrarr: '\u2972',slarr: '\u2190',SmallCircle: '\u2218',smallsetminus: '\u2216',smashp: '\u2A33',smeparsl: '\u29E4',smid: '\u2223',smile: '\u2323',smt: '\u2AAA',smte: '\u2AAC',smtes: '\u2AAC\uFE00',SOFTcy: '\u042C',softcy: '\u044C',sol: '\u002F',solb: '\u29C4',solbar: '\u233F',Sopf: '\uD835\uDD4A',sopf: '\uD835\uDD64',spades: '\u2660',spadesuit: '\u2660',spar: '\u2225',sqcap: '\u2293',sqcaps: '\u2293\uFE00',sqcup: '\u2294',sqcups: '\u2294\uFE00',Sqrt: '\u221A',sqsub: '\u228F',sqsube: '\u2291',sqsubset: '\u228F',sqsubseteq: '\u2291',sqsup: '\u2290',sqsupe: '\u2292',sqsupset: '\u2290',sqsupseteq: '\u2292',squ: '\u25A1',Square: '\u25A1',square: '\u25A1',SquareIntersection: '\u2293',SquareSubset: '\u228F',SquareSubsetEqual: '\u2291',SquareSuperset: '\u2290',SquareSupersetEqual: '\u2292',SquareUnion: '\u2294',squarf: '\u25AA',squf: '\u25AA',srarr: '\u2192',Sscr: '\uD835\uDCAE',sscr: '\uD835\uDCC8',ssetmn: '\u2216',ssmile: '\u2323',sstarf: '\u22C6',Star: '\u22C6',star: '\u2606',starf: '\u2605',straightepsilon: '\u03F5',straightphi: '\u03D5',strns: '\u00AF',Sub: '\u22D0',sub: '\u2282',subdot: '\u2ABD',subE: '\u2AC5',sube: '\u2286',subedot: '\u2AC3',submult: '\u2AC1',subnE: '\u2ACB',subne: '\u228A',subplus: '\u2ABF',subrarr: '\u2979',Subset: '\u22D0',subset: '\u2282',subseteq: '\u2286',subseteqq: '\u2AC5',SubsetEqual: '\u2286',subsetneq: '\u228A',subsetneqq: '\u2ACB',subsim: '\u2AC7',subsub: '\u2AD5',subsup: '\u2AD3',succ: '\u227B',succapprox: '\u2AB8',succcurlyeq: '\u227D',Succeeds: '\u227B',SucceedsEqual: '\u2AB0',SucceedsSlantEqual: '\u227D',SucceedsTilde: '\u227F',succeq: '\u2AB0',succnapprox: '\u2ABA',succneqq: '\u2AB6',succnsim: '\u22E9',succsim: '\u227F',SuchThat: '\u220B',Sum: '\u2211',sum: '\u2211',sung: '\u266A',Sup: '\u22D1',sup: '\u2283',sup1: '\u00B9',sup2: '\u00B2',sup3: '\u00B3',supdot: '\u2ABE',supdsub: '\u2AD8',supE: '\u2AC6',supe: '\u2287',supedot: '\u2AC4',Superset: '\u2283',SupersetEqual: '\u2287',suphsol: '\u27C9',suphsub: '\u2AD7',suplarr: '\u297B',supmult: '\u2AC2',supnE: '\u2ACC',supne: '\u228B',supplus: '\u2AC0',Supset: '\u22D1',supset: '\u2283',supseteq: '\u2287',supseteqq: '\u2AC6',supsetneq: '\u228B',supsetneqq: '\u2ACC',supsim: '\u2AC8',supsub: '\u2AD4',supsup: '\u2AD6',swarhk: '\u2926',swArr: '\u21D9',swarr: '\u2199',swarrow: '\u2199',swnwar: '\u292A',szlig: '\u00DF',Tab: '\u0009',target: '\u2316',Tau: '\u03A4',tau: '\u03C4',tbrk: '\u23B4',Tcaron: '\u0164',tcaron: '\u0165',Tcedil: '\u0162',tcedil: '\u0163',Tcy: '\u0422',tcy: '\u0442',tdot: '\u20DB',telrec: '\u2315',Tfr: '\uD835\uDD17',tfr: '\uD835\uDD31',there4: '\u2234',Therefore: '\u2234',therefore: '\u2234',Theta: '\u0398',theta: '\u03B8',thetasym: '\u03D1',thetav: '\u03D1',thickapprox: '\u2248',thicksim: '\u223C',ThickSpace: '\u205F\u200A',thinsp: '\u2009',ThinSpace: '\u2009',thkap: '\u2248',thksim: '\u223C',THORN: '\u00DE',thorn: '\u00FE',Tilde: '\u223C',tilde: '\u02DC',TildeEqual: '\u2243',TildeFullEqual: '\u2245',TildeTilde: '\u2248',times: '\u00D7',timesb: '\u22A0',timesbar: '\u2A31',timesd: '\u2A30',tint: '\u222D',toea: '\u2928',top: '\u22A4',topbot: '\u2336',topcir: '\u2AF1',Topf: '\uD835\uDD4B',topf: '\uD835\uDD65',topfork: '\u2ADA',tosa: '\u2929',tprime: '\u2034',TRADE: '\u2122',trade: '\u2122',triangle: '\u25B5',triangledown: '\u25BF',triangleleft: '\u25C3',trianglelefteq: '\u22B4',triangleq: '\u225C',triangleright: '\u25B9',trianglerighteq: '\u22B5',tridot: '\u25EC',trie: '\u225C',triminus: '\u2A3A',TripleDot: '\u20DB',triplus: '\u2A39',trisb: '\u29CD',tritime: '\u2A3B',trpezium: '\u23E2',Tscr: '\uD835\uDCAF',tscr: '\uD835\uDCC9',TScy: '\u0426',tscy: '\u0446',TSHcy: '\u040B',tshcy: '\u045B',Tstrok: '\u0166',tstrok: '\u0167',twixt: '\u226C',twoheadleftarrow: '\u219E',twoheadrightarrow: '\u21A0',Uacute: '\u00DA',uacute: '\u00FA',Uarr: '\u219F',uArr: '\u21D1',uarr: '\u2191',Uarrocir: '\u2949',Ubrcy: '\u040E',ubrcy: '\u045E',Ubreve: '\u016C',ubreve: '\u016D',Ucirc: '\u00DB',ucirc: '\u00FB',Ucy: '\u0423',ucy: '\u0443',udarr: '\u21C5',Udblac: '\u0170',udblac: '\u0171',udhar: '\u296E',ufisht: '\u297E',Ufr: '\uD835\uDD18',ufr: '\uD835\uDD32',Ugrave: '\u00D9',ugrave: '\u00F9',uHar: '\u2963',uharl: '\u21BF',uharr: '\u21BE',uhblk: '\u2580',ulcorn: '\u231C',ulcorner: '\u231C',ulcrop: '\u230F',ultri: '\u25F8',Umacr: '\u016A',umacr: '\u016B',uml: '\u00A8',UnderBar: '\u005F',UnderBrace: '\u23DF',UnderBracket: '\u23B5',UnderParenthesis: '\u23DD',Union: '\u22C3',UnionPlus: '\u228E',Uogon: '\u0172',uogon: '\u0173',Uopf: '\uD835\uDD4C',uopf: '\uD835\uDD66',UpArrow: '\u2191',Uparrow: '\u21D1',uparrow: '\u2191',UpArrowBar: '\u2912',UpArrowDownArrow: '\u21C5',UpDownArrow: '\u2195',Updownarrow: '\u21D5',updownarrow: '\u2195',UpEquilibrium: '\u296E',upharpoonleft: '\u21BF',upharpoonright: '\u21BE',uplus: '\u228E',UpperLeftArrow: '\u2196',UpperRightArrow: '\u2197',Upsi: '\u03D2',upsi: '\u03C5',upsih: '\u03D2',Upsilon: '\u03A5',upsilon: '\u03C5',UpTee: '\u22A5',UpTeeArrow: '\u21A5',upuparrows: '\u21C8',urcorn: '\u231D',urcorner: '\u231D',urcrop: '\u230E',Uring: '\u016E',uring: '\u016F',urtri: '\u25F9',Uscr: '\uD835\uDCB0',uscr: '\uD835\uDCCA',utdot: '\u22F0',Utilde: '\u0168',utilde: '\u0169',utri: '\u25B5',utrif: '\u25B4',uuarr: '\u21C8',Uuml: '\u00DC',uuml: '\u00FC',uwangle: '\u29A7',vangrt: '\u299C',varepsilon: '\u03F5',varkappa: '\u03F0',varnothing: '\u2205',varphi: '\u03D5',varpi: '\u03D6',varpropto: '\u221D',vArr: '\u21D5',varr: '\u2195',varrho: '\u03F1',varsigma: '\u03C2',varsubsetneq: '\u228A\uFE00',varsubsetneqq: '\u2ACB\uFE00',varsupsetneq: '\u228B\uFE00',varsupsetneqq: '\u2ACC\uFE00',vartheta: '\u03D1',vartriangleleft: '\u22B2',vartriangleright: '\u22B3',Vbar: '\u2AEB',vBar: '\u2AE8',vBarv: '\u2AE9',Vcy: '\u0412',vcy: '\u0432',VDash: '\u22AB',Vdash: '\u22A9',vDash: '\u22A8',vdash: '\u22A2',Vdashl: '\u2AE6',Vee: '\u22C1',vee: '\u2228',veebar: '\u22BB',veeeq: '\u225A',vellip: '\u22EE',Verbar: '\u2016',verbar: '\u007C',Vert: '\u2016',vert: '\u007C',VerticalBar: '\u2223',VerticalLine: '\u007C',VerticalSeparator: '\u2758',VerticalTilde: '\u2240',VeryThinSpace: '\u200A',Vfr: '\uD835\uDD19',vfr: '\uD835\uDD33',vltri: '\u22B2',vnsub: '\u2282\u20D2',vnsup: '\u2283\u20D2',Vopf: '\uD835\uDD4D',vopf: '\uD835\uDD67',vprop: '\u221D',vrtri: '\u22B3',Vscr: '\uD835\uDCB1',vscr: '\uD835\uDCCB',vsubnE: '\u2ACB\uFE00',vsubne: '\u228A\uFE00',vsupnE: '\u2ACC\uFE00',vsupne: '\u228B\uFE00',Vvdash: '\u22AA',vzigzag: '\u299A',Wcirc: '\u0174',wcirc: '\u0175',wedbar: '\u2A5F',Wedge: '\u22C0',wedge: '\u2227',wedgeq: '\u2259',weierp: '\u2118',Wfr: '\uD835\uDD1A',wfr: '\uD835\uDD34',Wopf: '\uD835\uDD4E',wopf: '\uD835\uDD68',wp: '\u2118',wr: '\u2240',wreath: '\u2240',Wscr: '\uD835\uDCB2',wscr: '\uD835\uDCCC',xcap: '\u22C2',xcirc: '\u25EF',xcup: '\u22C3',xdtri: '\u25BD',Xfr: '\uD835\uDD1B',xfr: '\uD835\uDD35',xhArr: '\u27FA',xharr: '\u27F7',Xi: '\u039E',xi: '\u03BE',xlArr: '\u27F8',xlarr: '\u27F5',xmap: '\u27FC',xnis: '\u22FB',xodot: '\u2A00',Xopf: '\uD835\uDD4F',xopf: '\uD835\uDD69',xoplus: '\u2A01',xotime: '\u2A02',xrArr: '\u27F9',xrarr: '\u27F6',Xscr: '\uD835\uDCB3',xscr: '\uD835\uDCCD',xsqcup: '\u2A06',xuplus: '\u2A04',xutri: '\u25B3',xvee: '\u22C1',xwedge: '\u22C0',Yacute: '\u00DD',yacute: '\u00FD',YAcy: '\u042F',yacy: '\u044F',Ycirc: '\u0176',ycirc: '\u0177',Ycy: '\u042B',ycy: '\u044B',yen: '\u00A5',Yfr: '\uD835\uDD1C',yfr: '\uD835\uDD36',YIcy: '\u0407',yicy: '\u0457',Yopf: '\uD835\uDD50',yopf: '\uD835\uDD6A',Yscr: '\uD835\uDCB4',yscr: '\uD835\uDCCE',YUcy: '\u042E',yucy: '\u044E',Yuml: '\u0178',yuml: '\u00FF',Zacute: '\u0179',zacute: '\u017A',Zcaron: '\u017D',zcaron: '\u017E',Zcy: '\u0417',zcy: '\u0437',Zdot: '\u017B',zdot: '\u017C',zeetrf: '\u2128',ZeroWidthSpace: '\u200B',Zeta: '\u0396',zeta: '\u03B6',Zfr: '\u2128',zfr: '\uD835\uDD37',ZHcy: '\u0416',zhcy: '\u0436',zigrarr: '\u21DD',Zopf: '\u2124',zopf: '\uD835\uDD6B',Zscr: '\uD835\uDCB5',zscr: '\uD835\uDCCF',zwj: '\u200D',zwnj: '\u200C'});/*** @deprecated use `HTML_ENTITIES` instead* @see HTML_ENTITIES*/exports.entityMap = exports.HTML_ENTITIES;});entities.XML_ENTITIES;entities.HTML_ENTITIES;entities.entityMap;var NAMESPACE$1 = conventions.NAMESPACE;//[4] NameStartChar ::= ":" | [A-Z] | "_" | [a-z] | [#xC0-#xD6] | [#xD8-#xF6] | [#xF8-#x2FF] | [#x370-#x37D] | [#x37F-#x1FFF] | [#x200C-#x200D] | [#x2070-#x218F] | [#x2C00-#x2FEF] | [#x3001-#xD7FF] | [#xF900-#xFDCF] | [#xFDF0-#xFFFD] | [#x10000-#xEFFFF]//[4a] NameChar ::= NameStartChar | "-" | "." | [0-9] | #xB7 | [#x0300-#x036F] | [#x203F-#x2040]//[5] Name ::= NameStartChar (NameChar)*var nameStartChar = /[A-Z_a-z\xC0-\xD6\xD8-\xF6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD]/; //\u10000-\uEFFFFvar nameChar = new RegExp("[\\-\\.0-9" + nameStartChar.source.slice(1, -1) + "\\u00B7\\u0300-\\u036F\\u203F-\\u2040]");var tagNamePattern = new RegExp('^' + nameStartChar.source + nameChar.source + '*(?:\:' + nameStartChar.source + nameChar.source + '*)?$');//var tagNamePattern = /^[a-zA-Z_][\w\-\.]*(?:\:[a-zA-Z_][\w\-\.]*)?$///var handlers = 'resolveEntity,getExternalSubset,characters,endDocument,endElement,endPrefixMapping,ignorableWhitespace,processingInstruction,setDocumentLocator,skippedEntity,startDocument,startElement,startPrefixMapping,notationDecl,unparsedEntityDecl,error,fatalError,warning,attributeDecl,elementDecl,externalEntityDecl,internalEntityDecl,comment,endCDATA,endDTD,endEntity,startCDATA,startDTD,startEntity'.split(',')//S_TAG, S_ATTR, S_EQ, S_ATTR_NOQUOT_VALUE//S_ATTR_SPACE, S_ATTR_END, S_TAG_SPACE, S_TAG_CLOSEvar S_TAG = 0; //tag name offerringvar S_ATTR = 1; //attr name offerringvar S_ATTR_SPACE = 2; //attr name end and space offervar S_EQ = 3; //=space?var S_ATTR_NOQUOT_VALUE = 4; //attr value(no quot value only)var S_ATTR_END = 5; //attr value end and no space(quot end)var S_TAG_SPACE = 6; //(attr value end || tag end ) && (space offer)var S_TAG_CLOSE = 7; //closed el<el />/*** Creates an error that will not be caught by XMLReader aka the SAX parser.** @param {string} message* @param {any?} locator Optional, can provide details about the location in the source* @constructor*/function ParseError$1(message, locator) {this.message = message;this.locator = locator;if (Error.captureStackTrace) Error.captureStackTrace(this, ParseError$1);}ParseError$1.prototype = new Error();ParseError$1.prototype.name = ParseError$1.name;function XMLReader$1() {}XMLReader$1.prototype = {parse: function (source, defaultNSMap, entityMap) {var domBuilder = this.domBuilder;domBuilder.startDocument();_copy(defaultNSMap, defaultNSMap = {});parse$1(source, defaultNSMap, entityMap, domBuilder, this.errorHandler);domBuilder.endDocument();}};function parse$1(source, defaultNSMapCopy, entityMap, domBuilder, errorHandler) {function fixedFromCharCode(code) {// String.prototype.fromCharCode does not supports// > 2 bytes unicode chars directlyif (code > 0xffff) {code -= 0x10000;var surrogate1 = 0xd800 + (code >> 10),surrogate2 = 0xdc00 + (code & 0x3ff);return String.fromCharCode(surrogate1, surrogate2);} else {return String.fromCharCode(code);}}function entityReplacer(a) {var k = a.slice(1, -1);if (Object.hasOwnProperty.call(entityMap, k)) {return entityMap[k];} else if (k.charAt(0) === '#') {return fixedFromCharCode(parseInt(k.substr(1).replace('x', '0x')));} else {errorHandler.error('entity not found:' + a);return a;}}function appendText(end) {//has some bugsif (end > start) {var xt = source.substring(start, end).replace(/&#?\w+;/g, entityReplacer);locator && position(start);domBuilder.characters(xt, 0, end - start);start = end;}}function position(p, m) {while (p >= lineEnd && (m = linePattern.exec(source))) {lineStart = m.index;lineEnd = lineStart + m[0].length;locator.lineNumber++;//console.log('line++:',locator,startPos,endPos)}locator.columnNumber = p - lineStart + 1;}var lineStart = 0;var lineEnd = 0;var linePattern = /.*(?:\r\n?|\n)|.*$/g;var locator = domBuilder.locator;var parseStack = [{currentNSMap: defaultNSMapCopy}];var closeMap = {};var start = 0;while (true) {try {var tagStart = source.indexOf('<', start);if (tagStart < 0) {if (!source.substr(start).match(/^\s*$/)) {var doc = domBuilder.doc;var text = doc.createTextNode(source.substr(start));doc.appendChild(text);domBuilder.currentElement = text;}return;}if (tagStart > start) {appendText(tagStart);}switch (source.charAt(tagStart + 1)) {case '/':var end = source.indexOf('>', tagStart + 3);var tagName = source.substring(tagStart + 2, end).replace(/[ \t\n\r]+$/g, '');var config = parseStack.pop();if (end < 0) {tagName = source.substring(tagStart + 2).replace(/[\s<].*/, '');errorHandler.error("end tag name: " + tagName + ' is not complete:' + config.tagName);end = tagStart + 1 + tagName.length;} else if (tagName.match(/\s</)) {tagName = tagName.replace(/[\s<].*/, '');errorHandler.error("end tag name: " + tagName + ' maybe not complete');end = tagStart + 1 + tagName.length;}var localNSMap = config.localNSMap;var endMatch = config.tagName == tagName;var endIgnoreCaseMach = endMatch || config.tagName && config.tagName.toLowerCase() == tagName.toLowerCase();if (endIgnoreCaseMach) {domBuilder.endElement(config.uri, config.localName, tagName);if (localNSMap) {for (var prefix in localNSMap) {if (Object.prototype.hasOwnProperty.call(localNSMap, prefix)) {domBuilder.endPrefixMapping(prefix);}}}if (!endMatch) {errorHandler.fatalError("end tag name: " + tagName + ' is not match the current start tagName:' + config.tagName); // No known test case}} else {parseStack.push(config);}end++;break;// end elmentcase '?':// <?...?>locator && position(tagStart);end = parseInstruction(source, tagStart, domBuilder);break;case '!':// <!doctype,<![CDATA,<!--locator && position(tagStart);end = parseDCC(source, tagStart, domBuilder, errorHandler);break;default:locator && position(tagStart);var el = new ElementAttributes();var currentNSMap = parseStack[parseStack.length - 1].currentNSMap;//elStartEndvar end = parseElementStartPart(source, tagStart, el, currentNSMap, entityReplacer, errorHandler);var len = el.length;if (!el.closed && fixSelfClosed(source, end, el.tagName, closeMap)) {el.closed = true;if (!entityMap.nbsp) {errorHandler.warning('unclosed xml attribute');}}if (locator && len) {var locator2 = copyLocator(locator, {});//try{//attribute position fixedfor (var i = 0; i < len; i++) {var a = el[i];position(a.offset);a.locator = copyLocator(locator, {});}domBuilder.locator = locator2;if (appendElement$1(el, domBuilder, currentNSMap)) {parseStack.push(el);}domBuilder.locator = locator;} else {if (appendElement$1(el, domBuilder, currentNSMap)) {parseStack.push(el);}}if (NAMESPACE$1.isHTML(el.uri) && !el.closed) {end = parseHtmlSpecialContent(source, end, el.tagName, entityReplacer, domBuilder);} else {end++;}}} catch (e) {if (e instanceof ParseError$1) {throw e;}errorHandler.error('element parse error: ' + e);end = -1;}if (end > start) {start = end;} else {//TODO: 这里有可能sax回退,有位置错误风险appendText(Math.max(tagStart, start) + 1);}}}function copyLocator(f, t) {t.lineNumber = f.lineNumber;t.columnNumber = f.columnNumber;return t;}/*** @see #appendElement(source,elStartEnd,el,selfClosed,entityReplacer,domBuilder,parseStack);* @return end of the elementStartPart(end of elementEndPart for selfClosed el)*/function parseElementStartPart(source, start, el, currentNSMap, entityReplacer, errorHandler) {/*** @param {string} qname* @param {string} value* @param {number} startIndex*/function addAttribute(qname, value, startIndex) {if (el.attributeNames.hasOwnProperty(qname)) {errorHandler.fatalError('Attribute ' + qname + ' redefined');}el.addValue(qname,// @see https://www.w3.org/TR/xml/#AVNormalize// since the xmldom sax parser does not "interpret" DTD the following is not implemented:// - recursive replacement of (DTD) entity references// - trimming and collapsing multiple spaces into a single one for attributes that are not of type CDATAvalue.replace(/[\t\n\r]/g, ' ').replace(/&#?\w+;/g, entityReplacer), startIndex);}var attrName;var value;var p = ++start;var s = S_TAG; //statuswhile (true) {var c = source.charAt(p);switch (c) {case '=':if (s === S_ATTR) {//attrNameattrName = source.slice(start, p);s = S_EQ;} else if (s === S_ATTR_SPACE) {s = S_EQ;} else {//fatalError: equal must after attrName or space after attrNamethrow new Error('attribute equal must after attrName'); // No known test case}break;case '\'':case '"':if (s === S_EQ || s === S_ATTR //|| s == S_ATTR_SPACE) {//equalif (s === S_ATTR) {errorHandler.warning('attribute value must after "="');attrName = source.slice(start, p);}start = p + 1;p = source.indexOf(c, start);if (p > 0) {value = source.slice(start, p);addAttribute(attrName, value, start - 1);s = S_ATTR_END;} else {//fatalError: no end quot matchthrow new Error('attribute value no end \'' + c + '\' match');}} else if (s == S_ATTR_NOQUOT_VALUE) {value = source.slice(start, p);addAttribute(attrName, value, start);errorHandler.warning('attribute "' + attrName + '" missed start quot(' + c + ')!!');start = p + 1;s = S_ATTR_END;} else {//fatalError: no equal beforethrow new Error('attribute value must after "="'); // No known test case}break;case '/':switch (s) {case S_TAG:el.setTagName(source.slice(start, p));case S_ATTR_END:case S_TAG_SPACE:case S_TAG_CLOSE:s = S_TAG_CLOSE;el.closed = true;case S_ATTR_NOQUOT_VALUE:case S_ATTR:break;case S_ATTR_SPACE:el.closed = true;break;//case S_EQ:default:throw new Error("attribute invalid close char('/')");// No known test case}break;case ''://end documenterrorHandler.error('unexpected end of input');if (s == S_TAG) {el.setTagName(source.slice(start, p));}return p;case '>':switch (s) {case S_TAG:el.setTagName(source.slice(start, p));case S_ATTR_END:case S_TAG_SPACE:case S_TAG_CLOSE:break;//normalcase S_ATTR_NOQUOT_VALUE: //Compatible statecase S_ATTR:value = source.slice(start, p);if (value.slice(-1) === '/') {el.closed = true;value = value.slice(0, -1);}case S_ATTR_SPACE:if (s === S_ATTR_SPACE) {value = attrName;}if (s == S_ATTR_NOQUOT_VALUE) {errorHandler.warning('attribute "' + value + '" missed quot(")!');addAttribute(attrName, value, start);} else {if (!NAMESPACE$1.isHTML(currentNSMap['']) || !value.match(/^(?:disabled|checked|selected)$/i)) {errorHandler.warning('attribute "' + value + '" missed value!! "' + value + '" instead!!');}addAttribute(value, value, start);}break;case S_EQ:throw new Error('attribute value missed!!');}// console.log(tagName,tagNamePattern,tagNamePattern.test(tagName))return p;/*xml space '\x20' | #x9 | #xD | #xA; */case '\u0080':c = ' ';default:if (c <= ' ') {//spaceswitch (s) {case S_TAG:el.setTagName(source.slice(start, p)); //tagNames = S_TAG_SPACE;break;case S_ATTR:attrName = source.slice(start, p);s = S_ATTR_SPACE;break;case S_ATTR_NOQUOT_VALUE:var value = source.slice(start, p);errorHandler.warning('attribute "' + value + '" missed quot(")!!');addAttribute(attrName, value, start);case S_ATTR_END:s = S_TAG_SPACE;break;//case S_TAG_SPACE://case S_EQ://case S_ATTR_SPACE:// void();break;//case S_TAG_CLOSE://ignore warning}} else {//not space//S_TAG, S_ATTR, S_EQ, S_ATTR_NOQUOT_VALUE//S_ATTR_SPACE, S_ATTR_END, S_TAG_SPACE, S_TAG_CLOSEswitch (s) {//case S_TAG:void();break;//case S_ATTR:void();break;//case S_ATTR_NOQUOT_VALUE:void();break;case S_ATTR_SPACE:el.tagName;if (!NAMESPACE$1.isHTML(currentNSMap['']) || !attrName.match(/^(?:disabled|checked|selected)$/i)) {errorHandler.warning('attribute "' + attrName + '" missed value!! "' + attrName + '" instead2!!');}addAttribute(attrName, attrName, start);start = p;s = S_ATTR;break;case S_ATTR_END:errorHandler.warning('attribute space is required"' + attrName + '"!!');case S_TAG_SPACE:s = S_ATTR;start = p;break;case S_EQ:s = S_ATTR_NOQUOT_VALUE;start = p;break;case S_TAG_CLOSE:throw new Error("elements closed character '/' and '>' must be connected to");}}} //end outer switch//console.log('p++',p)p++;}}/*** @return true if has new namespace define*/function appendElement$1(el, domBuilder, currentNSMap) {var tagName = el.tagName;var localNSMap = null;//var currentNSMap = parseStack[parseStack.length-1].currentNSMap;var i = el.length;while (i--) {var a = el[i];var qName = a.qName;var value = a.value;var nsp = qName.indexOf(':');if (nsp > 0) {var prefix = a.prefix = qName.slice(0, nsp);var localName = qName.slice(nsp + 1);var nsPrefix = prefix === 'xmlns' && localName;} else {localName = qName;prefix = null;nsPrefix = qName === 'xmlns' && '';}//can not set prefix,because prefix !== ''a.localName = localName;//prefix == null for no ns prefix attributeif (nsPrefix !== false) {//hack!!if (localNSMap == null) {localNSMap = {};//console.log(currentNSMap,0)_copy(currentNSMap, currentNSMap = {});//console.log(currentNSMap,1)}currentNSMap[nsPrefix] = localNSMap[nsPrefix] = value;a.uri = NAMESPACE$1.XMLNS;domBuilder.startPrefixMapping(nsPrefix, value);}}var i = el.length;while (i--) {a = el[i];var prefix = a.prefix;if (prefix) {//no prefix attribute has no namespaceif (prefix === 'xml') {a.uri = NAMESPACE$1.XML;}if (prefix !== 'xmlns') {a.uri = currentNSMap[prefix || ''];//{console.log('###'+a.qName,domBuilder.locator.systemId+'',currentNSMap,a.uri)}}}}var nsp = tagName.indexOf(':');if (nsp > 0) {prefix = el.prefix = tagName.slice(0, nsp);localName = el.localName = tagName.slice(nsp + 1);} else {prefix = null; //important!!localName = el.localName = tagName;}//no prefix element has default namespacevar ns = el.uri = currentNSMap[prefix || ''];domBuilder.startElement(ns, localName, tagName, el);//endPrefixMapping and startPrefixMapping have not any help for dom builder//localNSMap = nullif (el.closed) {domBuilder.endElement(ns, localName, tagName);if (localNSMap) {for (prefix in localNSMap) {if (Object.prototype.hasOwnProperty.call(localNSMap, prefix)) {domBuilder.endPrefixMapping(prefix);}}}} else {el.currentNSMap = currentNSMap;el.localNSMap = localNSMap;//parseStack.push(el);return true;}}function parseHtmlSpecialContent(source, elStartEnd, tagName, entityReplacer, domBuilder) {if (/^(?:script|textarea)$/i.test(tagName)) {var elEndStart = source.indexOf('</' + tagName + '>', elStartEnd);var text = source.substring(elStartEnd + 1, elEndStart);if (/[&<]/.test(text)) {if (/^script$/i.test(tagName)) {//if(!/\]\]>/.test(text)){//lexHandler.startCDATA();domBuilder.characters(text, 0, text.length);//lexHandler.endCDATA();return elEndStart;//}} //}else{//text areatext = text.replace(/&#?\w+;/g, entityReplacer);domBuilder.characters(text, 0, text.length);return elEndStart;//}}}return elStartEnd + 1;}function fixSelfClosed(source, elStartEnd, tagName, closeMap) {//if(tagName in closeMap){var pos = closeMap[tagName];if (pos == null) {//console.log(tagName)pos = source.lastIndexOf('</' + tagName + '>');if (pos < elStartEnd) {//忘记闭合pos = source.lastIndexOf('</' + tagName);}closeMap[tagName] = pos;}return pos < elStartEnd;//}}function _copy(source, target) {for (var n in source) {if (Object.prototype.hasOwnProperty.call(source, n)) {target[n] = source[n];}}}function parseDCC(source, start, domBuilder, errorHandler) {//sure start with '<!'var next = source.charAt(start + 2);switch (next) {case '-':if (source.charAt(start + 3) === '-') {var end = source.indexOf('-->', start + 4);//append comment source.substring(4,end)//<!--if (end > start) {domBuilder.comment(source, start + 4, end - start - 4);return end + 3;} else {errorHandler.error("Unclosed comment");return -1;}} else {//errorreturn -1;}default:if (source.substr(start + 3, 6) == 'CDATA[') {var end = source.indexOf(']]>', start + 9);domBuilder.startCDATA();domBuilder.characters(source, start + 9, end - start - 9);domBuilder.endCDATA();return end + 3;}//<!DOCTYPE//startDTD(java.lang.String name, java.lang.String publicId, java.lang.String systemId)var matchs = split(source, start);var len = matchs.length;if (len > 1 && /!doctype/i.test(matchs[0][0])) {var name = matchs[1][0];var pubid = false;var sysid = false;if (len > 3) {if (/^public$/i.test(matchs[2][0])) {pubid = matchs[3][0];sysid = len > 4 && matchs[4][0];} else if (/^system$/i.test(matchs[2][0])) {sysid = matchs[3][0];}}var lastMatch = matchs[len - 1];domBuilder.startDTD(name, pubid, sysid);domBuilder.endDTD();return lastMatch.index + lastMatch[0].length;}}return -1;}function parseInstruction(source, start, domBuilder) {var end = source.indexOf('?>', start);if (end) {var match = source.substring(start, end).match(/^<\?(\S*)\s*([\s\S]*?)\s*$/);if (match) {match[0].length;domBuilder.processingInstruction(match[1], match[2]);return end + 2;} else {//errorreturn -1;}}return -1;}function ElementAttributes() {this.attributeNames = {};}ElementAttributes.prototype = {setTagName: function (tagName) {if (!tagNamePattern.test(tagName)) {throw new Error('invalid tagName:' + tagName);}this.tagName = tagName;},addValue: function (qName, value, offset) {if (!tagNamePattern.test(qName)) {throw new Error('invalid attribute:' + qName);}this.attributeNames[qName] = this.length;this[this.length++] = {qName: qName,value: value,offset: offset};},length: 0,getLocalName: function (i) {return this[i].localName;},getLocator: function (i) {return this[i].locator;},getQName: function (i) {return this[i].qName;},getURI: function (i) {return this[i].uri;},getValue: function (i) {return this[i].value;}// ,getIndex:function(uri, localName)){// if(localName){//// }else{// var qName = uri// }// },// getValue:function(){return this.getValue(this.getIndex.apply(this,arguments))},// getType:function(uri,localName){}// getType:function(i){},};function split(source, start) {var match;var buf = [];var reg = /'[^']+'|"[^"]+"|[^\s<>\/=]+=?|(\/?\s*>|<)/g;reg.lastIndex = start;reg.exec(source); //skip <while (match = reg.exec(source)) {buf.push(match);if (match[1]) return buf;}}var XMLReader_1 = XMLReader$1;var ParseError_1 = ParseError$1;var sax = {XMLReader: XMLReader_1,ParseError: ParseError_1};var DOMImplementation = dom.DOMImplementation;var NAMESPACE = conventions.NAMESPACE;var ParseError = sax.ParseError;var XMLReader = sax.XMLReader;/*** Normalizes line ending according to https://www.w3.org/TR/xml11/#sec-line-ends:** > XML parsed entities are often stored in computer files which,* > for editing convenience, are organized into lines.* > These lines are typically separated by some combination* > of the characters CARRIAGE RETURN (#xD) and LINE FEED (#xA).* >* > To simplify the tasks of applications, the XML processor must behave* > as if it normalized all line breaks in external parsed entities (including the document entity)* > on input, before parsing, by translating all of the following to a single #xA character:* >* > 1. the two-character sequence #xD #xA* > 2. the two-character sequence #xD #x85* > 3. the single character #x85* > 4. the single character #x2028* > 5. any #xD character that is not immediately followed by #xA or #x85.** @param {string} input* @returns {string}*/function normalizeLineEndings(input) {return input.replace(/\r[\n\u0085]/g, '\n').replace(/[\r\u0085\u2028]/g, '\n');}/*** @typedef Locator* @property {number} [columnNumber]* @property {number} [lineNumber]*//*** @typedef DOMParserOptions* @property {DOMHandler} [domBuilder]* @property {Function} [errorHandler]* @property {(string) => string} [normalizeLineEndings] used to replace line endings before parsing* defaults to `normalizeLineEndings`* @property {Locator} [locator]* @property {Record<string, string>} [xmlns]** @see normalizeLineEndings*//*** The DOMParser interface provides the ability to parse XML or HTML source code* from a string into a DOM `Document`.** _xmldom is different from the spec in that it allows an `options` parameter,* to override the default behavior._** @param {DOMParserOptions} [options]* @constructor** @see https://developer.mozilla.org/en-US/docs/Web/API/DOMParser* @see https://html.spec.whatwg.org/multipage/dynamic-markup-insertion.html#dom-parsing-and-serialization*/function DOMParser$1(options) {this.options = options || {locator: {}};}DOMParser$1.prototype.parseFromString = function (source, mimeType) {var options = this.options;var sax = new XMLReader();var domBuilder = options.domBuilder || new DOMHandler(); //contentHandler and LexicalHandlervar errorHandler = options.errorHandler;var locator = options.locator;var defaultNSMap = options.xmlns || {};var isHTML = /\/x?html?$/.test(mimeType); //mimeType.toLowerCase().indexOf('html') > -1;var entityMap = isHTML ? entities.HTML_ENTITIES : entities.XML_ENTITIES;if (locator) {domBuilder.setDocumentLocator(locator);}sax.errorHandler = buildErrorHandler(errorHandler, domBuilder, locator);sax.domBuilder = options.domBuilder || domBuilder;if (isHTML) {defaultNSMap[''] = NAMESPACE.HTML;}defaultNSMap.xml = defaultNSMap.xml || NAMESPACE.XML;var normalize = options.normalizeLineEndings || normalizeLineEndings;if (source && typeof source === 'string') {sax.parse(normalize(source), defaultNSMap, entityMap);} else {sax.errorHandler.error('invalid doc source');}return domBuilder.doc;};function buildErrorHandler(errorImpl, domBuilder, locator) {if (!errorImpl) {if (domBuilder instanceof DOMHandler) {return domBuilder;}errorImpl = domBuilder;}var errorHandler = {};var isCallback = errorImpl instanceof Function;locator = locator || {};function build(key) {var fn = errorImpl[key];if (!fn && isCallback) {fn = errorImpl.length == 2 ? function (msg) {errorImpl(key, msg);} : errorImpl;}errorHandler[key] = fn && function (msg) {fn('[xmldom ' + key + ']\t' + msg + _locator(locator));} || function () {};}build('warning');build('error');build('fatalError');return errorHandler;}//console.log('#\n\n\n\n\n\n\n####')/*** +ContentHandler+ErrorHandler* +LexicalHandler+EntityResolver2* -DeclHandler-DTDHandler** DefaultHandler:EntityResolver, DTDHandler, ContentHandler, ErrorHandler* DefaultHandler2:DefaultHandler,LexicalHandler, DeclHandler, EntityResolver2* @link http://www.saxproject.org/apidoc/org/xml/sax/helpers/DefaultHandler.html*/function DOMHandler() {this.cdata = false;}function position(locator, node) {node.lineNumber = locator.lineNumber;node.columnNumber = locator.columnNumber;}/*** @see org.xml.sax.ContentHandler#startDocument* @link http://www.saxproject.org/apidoc/org/xml/sax/ContentHandler.html*/DOMHandler.prototype = {startDocument: function () {this.doc = new DOMImplementation().createDocument(null, null, null);if (this.locator) {this.doc.documentURI = this.locator.systemId;}},startElement: function (namespaceURI, localName, qName, attrs) {var doc = this.doc;var el = doc.createElementNS(namespaceURI, qName || localName);var len = attrs.length;appendElement(this, el);this.currentElement = el;this.locator && position(this.locator, el);for (var i = 0; i < len; i++) {var namespaceURI = attrs.getURI(i);var value = attrs.getValue(i);var qName = attrs.getQName(i);var attr = doc.createAttributeNS(namespaceURI, qName);this.locator && position(attrs.getLocator(i), attr);attr.value = attr.nodeValue = value;el.setAttributeNode(attr);}},endElement: function (namespaceURI, localName, qName) {var current = this.currentElement;current.tagName;this.currentElement = current.parentNode;},startPrefixMapping: function (prefix, uri) {},endPrefixMapping: function (prefix) {},processingInstruction: function (target, data) {var ins = this.doc.createProcessingInstruction(target, data);this.locator && position(this.locator, ins);appendElement(this, ins);},ignorableWhitespace: function (ch, start, length) {},characters: function (chars, start, length) {chars = _toString.apply(this, arguments);//console.log(chars)if (chars) {if (this.cdata) {var charNode = this.doc.createCDATASection(chars);} else {var charNode = this.doc.createTextNode(chars);}if (this.currentElement) {this.currentElement.appendChild(charNode);} else if (/^\s*$/.test(chars)) {this.doc.appendChild(charNode);//process xml}this.locator && position(this.locator, charNode);}},skippedEntity: function (name) {},endDocument: function () {this.doc.normalize();},setDocumentLocator: function (locator) {if (this.locator = locator) {// && !('lineNumber' in locator)){locator.lineNumber = 0;}},//LexicalHandlercomment: function (chars, start, length) {chars = _toString.apply(this, arguments);var comm = this.doc.createComment(chars);this.locator && position(this.locator, comm);appendElement(this, comm);},startCDATA: function () {//used in characters() methodsthis.cdata = true;},endCDATA: function () {this.cdata = false;},startDTD: function (name, publicId, systemId) {var impl = this.doc.implementation;if (impl && impl.createDocumentType) {var dt = impl.createDocumentType(name, publicId, systemId);this.locator && position(this.locator, dt);appendElement(this, dt);this.doc.doctype = dt;}},/*** @see org.xml.sax.ErrorHandler* @link http://www.saxproject.org/apidoc/org/xml/sax/ErrorHandler.html*/warning: function (error) {console.warn('[xmldom warning]\t' + error, _locator(this.locator));},error: function (error) {console.error('[xmldom error]\t' + error, _locator(this.locator));},fatalError: function (error) {throw new ParseError(error, this.locator);}};function _locator(l) {if (l) {return '\n@' + (l.systemId || '') + '#[line:' + l.lineNumber + ',col:' + l.columnNumber + ']';}}function _toString(chars, start, length) {if (typeof chars == 'string') {return chars.substr(start, length);} else {//java sax connect width xmldom on rhino(what about: "? && !(chars instanceof String)")if (chars.length >= start + length || start) {return new java.lang.String(chars, start, length) + '';}return chars;}}/** @link http://www.saxproject.org/apidoc/org/xml/sax/ext/LexicalHandler.html* used method of org.xml.sax.ext.LexicalHandler:* #comment(chars, start, length)* #startCDATA()* #endCDATA()* #startDTD(name, publicId, systemId)*** IGNORED method of org.xml.sax.ext.LexicalHandler:* #endDTD()* #startEntity(name)* #endEntity(name)*** @link http://www.saxproject.org/apidoc/org/xml/sax/ext/DeclHandler.html* IGNORED method of org.xml.sax.ext.DeclHandler* #attributeDecl(eName, aName, type, mode, value)* #elementDecl(name, model)* #externalEntityDecl(name, publicId, systemId)* #internalEntityDecl(name, value)* @link http://www.saxproject.org/apidoc/org/xml/sax/ext/EntityResolver2.html* IGNORED method of org.xml.sax.EntityResolver2* #resolveEntity(String name,String publicId,String baseURI,String systemId)* #resolveEntity(publicId, systemId)* #getExternalSubset(name, baseURI)* @link http://www.saxproject.org/apidoc/org/xml/sax/DTDHandler.html* IGNORED method of org.xml.sax.DTDHandler* #notationDecl(name, publicId, systemId) {};* #unparsedEntityDecl(name, publicId, systemId, notationName) {};*/"endDTD,startEntity,endEntity,attributeDecl,elementDecl,externalEntityDecl,internalEntityDecl,resolveEntity,getExternalSubset,notationDecl,unparsedEntityDecl".replace(/\w+/g, function (key) {DOMHandler.prototype[key] = function () {return null;};});/* Private static helpers treated below as private instance methods, so don't need to add these to the public API; we might use a Relator to also get rid of non-standard public properties */function appendElement(hander, node) {if (!hander.currentElement) {hander.doc.appendChild(node);} else {hander.currentElement.appendChild(node);}} //appendChild and setAttributeNS are preformance keyvar __DOMHandler = DOMHandler;var normalizeLineEndings_1 = normalizeLineEndings;var DOMParser_1 = DOMParser$1;var domParser = {__DOMHandler: __DOMHandler,normalizeLineEndings: normalizeLineEndings_1,DOMParser: DOMParser_1};var DOMParser = domParser.DOMParser;/*! @name mpd-parser @version 1.3.0 @license Apache-2.0 */const isObject = obj => {return !!obj && typeof obj === 'object';};const merge$1 = (...objects) => {return objects.reduce((result, source) => {if (typeof source !== 'object') {return result;}Object.keys(source).forEach(key => {if (Array.isArray(result[key]) && Array.isArray(source[key])) {result[key] = result[key].concat(source[key]);} else if (isObject(result[key]) && isObject(source[key])) {result[key] = merge$1(result[key], source[key]);} else {result[key] = source[key];}});return result;}, {});};const values = o => Object.keys(o).map(k => o[k]);const range = (start, end) => {const result = [];for (let i = start; i < end; i++) {result.push(i);}return result;};const flatten = lists => lists.reduce((x, y) => x.concat(y), []);const from = list => {if (!list.length) {return [];}const result = [];for (let i = 0; i < list.length; i++) {result.push(list[i]);}return result;};const findIndexes = (l, key) => l.reduce((a, e, i) => {if (e[key]) {a.push(i);}return a;}, []);/*** Returns a union of the included lists provided each element can be identified by a key.** @param {Array} list - list of lists to get the union of* @param {Function} keyFunction - the function to use as a key for each element** @return {Array} the union of the arrays*/const union = (lists, keyFunction) => {return values(lists.reduce((acc, list) => {list.forEach(el => {acc[keyFunction(el)] = el;});return acc;}, {}));};var errors = {INVALID_NUMBER_OF_PERIOD: 'INVALID_NUMBER_OF_PERIOD',INVALID_NUMBER_OF_CONTENT_STEERING: 'INVALID_NUMBER_OF_CONTENT_STEERING',DASH_EMPTY_MANIFEST: 'DASH_EMPTY_MANIFEST',DASH_INVALID_XML: 'DASH_INVALID_XML',NO_BASE_URL: 'NO_BASE_URL',MISSING_SEGMENT_INFORMATION: 'MISSING_SEGMENT_INFORMATION',SEGMENT_TIME_UNSPECIFIED: 'SEGMENT_TIME_UNSPECIFIED',UNSUPPORTED_UTC_TIMING_SCHEME: 'UNSUPPORTED_UTC_TIMING_SCHEME'};/*** @typedef {Object} SingleUri* @property {string} uri - relative location of segment* @property {string} resolvedUri - resolved location of segment* @property {Object} byterange - Object containing information on how to make byte range* requests following byte-range-spec per RFC2616.* @property {String} byterange.length - length of range request* @property {String} byterange.offset - byte offset of range request** @see https://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.35.1*//*** Converts a URLType node (5.3.9.2.3 Table 13) to a segment object* that conforms to how m3u8-parser is structured** @see https://github.com/videojs/m3u8-parser** @param {string} baseUrl - baseUrl provided by <BaseUrl> nodes* @param {string} source - source url for segment* @param {string} range - optional range used for range calls,* follows RFC 2616, Clause 14.35.1* @return {SingleUri} full segment information transformed into a format similar* to m3u8-parser*/const urlTypeToSegment = ({baseUrl = '',source = '',range = '',indexRange = ''}) => {const segment = {uri: source,resolvedUri: resolveUrl$1(baseUrl || '', source)};if (range || indexRange) {const rangeStr = range ? range : indexRange;const ranges = rangeStr.split('-'); // default to parsing this as a BigInt if possiblelet startRange = window.BigInt ? window.BigInt(ranges[0]) : parseInt(ranges[0], 10);let endRange = window.BigInt ? window.BigInt(ranges[1]) : parseInt(ranges[1], 10); // convert back to a number if less than MAX_SAFE_INTEGERif (startRange < Number.MAX_SAFE_INTEGER && typeof startRange === 'bigint') {startRange = Number(startRange);}if (endRange < Number.MAX_SAFE_INTEGER && typeof endRange === 'bigint') {endRange = Number(endRange);}let length;if (typeof endRange === 'bigint' || typeof startRange === 'bigint') {length = window.BigInt(endRange) - window.BigInt(startRange) + window.BigInt(1);} else {length = endRange - startRange + 1;}if (typeof length === 'bigint' && length < Number.MAX_SAFE_INTEGER) {length = Number(length);} // byterange should be inclusive according to// RFC 2616, Clause 14.35.1segment.byterange = {length,offset: startRange};}return segment;};const byteRangeToString = byterange => {// `endRange` is one less than `offset + length` because the HTTP range// header uses inclusive rangeslet endRange;if (typeof byterange.offset === 'bigint' || typeof byterange.length === 'bigint') {endRange = window.BigInt(byterange.offset) + window.BigInt(byterange.length) - window.BigInt(1);} else {endRange = byterange.offset + byterange.length - 1;}return `${byterange.offset}-${endRange}`;};/*** parse the end number attribue that can be a string* number, or undefined.** @param {string|number|undefined} endNumber* The end number attribute.** @return {number|null}* The result of parsing the end number.*/const parseEndNumber = endNumber => {if (endNumber && typeof endNumber !== 'number') {endNumber = parseInt(endNumber, 10);}if (isNaN(endNumber)) {return null;}return endNumber;};/*** Functions for calculating the range of available segments in static and dynamic* manifests.*/const segmentRange = {/*** Returns the entire range of available segments for a static MPD** @param {Object} attributes* Inheritied MPD attributes* @return {{ start: number, end: number }}* The start and end numbers for available segments*/static(attributes) {const {duration,timescale = 1,sourceDuration,periodDuration} = attributes;const endNumber = parseEndNumber(attributes.endNumber);const segmentDuration = duration / timescale;if (typeof endNumber === 'number') {return {start: 0,end: endNumber};}if (typeof periodDuration === 'number') {return {start: 0,end: periodDuration / segmentDuration};}return {start: 0,end: sourceDuration / segmentDuration};},/*** Returns the current live window range of available segments for a dynamic MPD** @param {Object} attributes* Inheritied MPD attributes* @return {{ start: number, end: number }}* The start and end numbers for available segments*/dynamic(attributes) {const {NOW,clientOffset,availabilityStartTime,timescale = 1,duration,periodStart = 0,minimumUpdatePeriod = 0,timeShiftBufferDepth = Infinity} = attributes;const endNumber = parseEndNumber(attributes.endNumber); // clientOffset is passed in at the top level of mpd-parser and is an offset calculated// after retrieving UTC server time.const now = (NOW + clientOffset) / 1000; // WC stands for Wall Clock.// Convert the period start time to EPOCH.const periodStartWC = availabilityStartTime + periodStart; // Period end in EPOCH is manifest's retrieval time + time until next update.const periodEndWC = now + minimumUpdatePeriod;const periodDuration = periodEndWC - periodStartWC;const segmentCount = Math.ceil(periodDuration * timescale / duration);const availableStart = Math.floor((now - periodStartWC - timeShiftBufferDepth) * timescale / duration);const availableEnd = Math.floor((now - periodStartWC) * timescale / duration);return {start: Math.max(0, availableStart),end: typeof endNumber === 'number' ? endNumber : Math.min(segmentCount, availableEnd)};}};/*** Maps a range of numbers to objects with information needed to build the corresponding* segment list** @name toSegmentsCallback* @function* @param {number} number* Number of the segment* @param {number} index* Index of the number in the range list* @return {{ number: Number, duration: Number, timeline: Number, time: Number }}* Object with segment timing and duration info*//*** Returns a callback for Array.prototype.map for mapping a range of numbers to* information needed to build the segment list.** @param {Object} attributes* Inherited MPD attributes* @return {toSegmentsCallback}* Callback map function*/const toSegments = attributes => number => {const {duration,timescale = 1,periodStart,startNumber = 1} = attributes;return {number: startNumber + number,duration: duration / timescale,timeline: periodStart,time: number * duration};};/*** Returns a list of objects containing segment timing and duration info used for* building the list of segments. This uses the @duration attribute specified* in the MPD manifest to derive the range of segments.** @param {Object} attributes* Inherited MPD attributes* @return {{number: number, duration: number, time: number, timeline: number}[]}* List of Objects with segment timing and duration info*/const parseByDuration = attributes => {const {type,duration,timescale = 1,periodDuration,sourceDuration} = attributes;const {start,end} = segmentRange[type](attributes);const segments = range(start, end).map(toSegments(attributes));if (type === 'static') {const index = segments.length - 1; // section is either a period or the full sourceconst sectionDuration = typeof periodDuration === 'number' ? periodDuration : sourceDuration; // final segment may be less than full segment durationsegments[index].duration = sectionDuration - duration / timescale * index;}return segments;};/*** Translates SegmentBase into a set of segments.* (DASH SPEC Section 5.3.9.3.2) contains a set of <SegmentURL> nodes. Each* node should be translated into segment.** @param {Object} attributes* Object containing all inherited attributes from parent elements with attribute* names as keys* @return {Object.<Array>} list of segments*/const segmentsFromBase = attributes => {const {baseUrl,initialization = {},sourceDuration,indexRange = '',periodStart,presentationTime,number = 0,duration} = attributes; // base url is required for SegmentBase to work, per spec (Section 5.3.9.2.1)if (!baseUrl) {throw new Error(errors.NO_BASE_URL);}const initSegment = urlTypeToSegment({baseUrl,source: initialization.sourceURL,range: initialization.range});const segment = urlTypeToSegment({baseUrl,source: baseUrl,indexRange});segment.map = initSegment; // If there is a duration, use it, otherwise use the given duration of the source// (since SegmentBase is only for one total segment)if (duration) {const segmentTimeInfo = parseByDuration(attributes);if (segmentTimeInfo.length) {segment.duration = segmentTimeInfo[0].duration;segment.timeline = segmentTimeInfo[0].timeline;}} else if (sourceDuration) {segment.duration = sourceDuration;segment.timeline = periodStart;} // If presentation time is provided, these segments are being generated by SIDX// references, and should use the time provided. For the general case of SegmentBase,// there should only be one segment in the period, so its presentation time is the same// as its period start.segment.presentationTime = presentationTime || periodStart;segment.number = number;return [segment];};/*** Given a playlist, a sidx box, and a baseUrl, update the segment list of the playlist* according to the sidx information given.** playlist.sidx has metadadata about the sidx where-as the sidx param* is the parsed sidx box itself.** @param {Object} playlist the playlist to update the sidx information for* @param {Object} sidx the parsed sidx box* @return {Object} the playlist object with the updated sidx information*/const addSidxSegmentsToPlaylist$1 = (playlist, sidx, baseUrl) => {// Retain init segment informationconst initSegment = playlist.sidx.map ? playlist.sidx.map : null; // Retain source duration from initial main manifest parsingconst sourceDuration = playlist.sidx.duration; // Retain source timelineconst timeline = playlist.timeline || 0;const sidxByteRange = playlist.sidx.byterange;const sidxEnd = sidxByteRange.offset + sidxByteRange.length; // Retain timescale of the parsed sidxconst timescale = sidx.timescale; // referenceType 1 refers to other sidx boxesconst mediaReferences = sidx.references.filter(r => r.referenceType !== 1);const segments = [];const type = playlist.endList ? 'static' : 'dynamic';const periodStart = playlist.sidx.timeline;let presentationTime = periodStart;let number = playlist.mediaSequence || 0; // firstOffset is the offset from the end of the sidx boxlet startIndex; // eslint-disable-next-lineif (typeof sidx.firstOffset === 'bigint') {startIndex = window.BigInt(sidxEnd) + sidx.firstOffset;} else {startIndex = sidxEnd + sidx.firstOffset;}for (let i = 0; i < mediaReferences.length; i++) {const reference = sidx.references[i]; // size of the referenced (sub)segmentconst size = reference.referencedSize; // duration of the referenced (sub)segment, in the timescale// this will be converted to seconds when generating segmentsconst duration = reference.subsegmentDuration; // should be an inclusive rangelet endIndex; // eslint-disable-next-lineif (typeof startIndex === 'bigint') {endIndex = startIndex + window.BigInt(size) - window.BigInt(1);} else {endIndex = startIndex + size - 1;}const indexRange = `${startIndex}-${endIndex}`;const attributes = {baseUrl,timescale,timeline,periodStart,presentationTime,number,duration,sourceDuration,indexRange,type};const segment = segmentsFromBase(attributes)[0];if (initSegment) {segment.map = initSegment;}segments.push(segment);if (typeof startIndex === 'bigint') {startIndex += window.BigInt(size);} else {startIndex += size;}presentationTime += duration / timescale;number++;}playlist.segments = segments;return playlist;};const SUPPORTED_MEDIA_TYPES = ['AUDIO', 'SUBTITLES']; // allow one 60fps frame as leniency (arbitrarily chosen)const TIME_FUDGE = 1 / 60;/*** Given a list of timelineStarts, combines, dedupes, and sorts them.** @param {TimelineStart[]} timelineStarts - list of timeline starts** @return {TimelineStart[]} the combined and deduped timeline starts*/const getUniqueTimelineStarts = timelineStarts => {return union(timelineStarts, ({timeline}) => timeline).sort((a, b) => a.timeline > b.timeline ? 1 : -1);};/*** Finds the playlist with the matching NAME attribute.** @param {Array} playlists - playlists to search through* @param {string} name - the NAME attribute to search for** @return {Object|null} the matching playlist object, or null*/const findPlaylistWithName = (playlists, name) => {for (let i = 0; i < playlists.length; i++) {if (playlists[i].attributes.NAME === name) {return playlists[i];}}return null;};/*** Gets a flattened array of media group playlists.** @param {Object} manifest - the main manifest object** @return {Array} the media group playlists*/const getMediaGroupPlaylists = manifest => {let mediaGroupPlaylists = [];forEachMediaGroup$1(manifest, SUPPORTED_MEDIA_TYPES, (properties, type, group, label) => {mediaGroupPlaylists = mediaGroupPlaylists.concat(properties.playlists || []);});return mediaGroupPlaylists;};/*** Updates the playlist's media sequence numbers.** @param {Object} config - options object* @param {Object} config.playlist - the playlist to update* @param {number} config.mediaSequence - the mediaSequence number to start with*/const updateMediaSequenceForPlaylist = ({playlist,mediaSequence}) => {playlist.mediaSequence = mediaSequence;playlist.segments.forEach((segment, index) => {segment.number = playlist.mediaSequence + index;});};/*** Updates the media and discontinuity sequence numbers of newPlaylists given oldPlaylists* and a complete list of timeline starts.** If no matching playlist is found, only the discontinuity sequence number of the playlist* will be updated.** Since early available timelines are not supported, at least one segment must be present.** @param {Object} config - options object* @param {Object[]} oldPlaylists - the old playlists to use as a reference* @param {Object[]} newPlaylists - the new playlists to update* @param {Object} timelineStarts - all timelineStarts seen in the stream to this point*/const updateSequenceNumbers = ({oldPlaylists,newPlaylists,timelineStarts}) => {newPlaylists.forEach(playlist => {playlist.discontinuitySequence = timelineStarts.findIndex(function ({timeline}) {return timeline === playlist.timeline;}); // Playlists NAMEs come from DASH Representation IDs, which are mandatory// (see ISO_23009-1-2012 5.3.5.2).//// If the same Representation existed in a prior Period, it will retain the same NAME.const oldPlaylist = findPlaylistWithName(oldPlaylists, playlist.attributes.NAME);if (!oldPlaylist) {// Since this is a new playlist, the media sequence values can start from 0 without// consequence.return;} // TODO better support for live SIDX//// As of this writing, mpd-parser does not support multiperiod SIDX (in live or VOD).// This is evident by a playlist only having a single SIDX reference. In a multiperiod// playlist there would need to be multiple SIDX references. In addition, live SIDX is// not supported when the SIDX properties change on refreshes.//// In the future, if support needs to be added, the merging logic here can be called// after SIDX references are resolved. For now, exit early to prevent exceptions being// thrown due to undefined references.if (playlist.sidx) {return;} // Since we don't yet support early available timelines, we don't need to support// playlists with no segments.const firstNewSegment = playlist.segments[0];const oldMatchingSegmentIndex = oldPlaylist.segments.findIndex(function (oldSegment) {return Math.abs(oldSegment.presentationTime - firstNewSegment.presentationTime) < TIME_FUDGE;}); // No matching segment from the old playlist means the entire playlist was refreshed.// In this case the media sequence should account for this update, and the new segments// should be marked as discontinuous from the prior content, since the last prior// timeline was removed.if (oldMatchingSegmentIndex === -1) {updateMediaSequenceForPlaylist({playlist,mediaSequence: oldPlaylist.mediaSequence + oldPlaylist.segments.length});playlist.segments[0].discontinuity = true;playlist.discontinuityStarts.unshift(0); // No matching segment does not necessarily mean there's missing content.//// If the new playlist's timeline is the same as the last seen segment's timeline,// then a discontinuity can be added to identify that there's potentially missing// content. If there's no missing content, the discontinuity should still be rather// harmless. It's possible that if segment durations are accurate enough, that the// existence of a gap can be determined using the presentation times and durations,// but if the segment timing info is off, it may introduce more problems than simply// adding the discontinuity.//// If the new playlist's timeline is different from the last seen segment's timeline,// then a discontinuity can be added to identify that this is the first seen segment// of a new timeline. However, the logic at the start of this function that// determined the disconinuity sequence by timeline index is now off by one (the// discontinuity of the newest timeline hasn't yet fallen off the manifest...since// we added it), so the disconinuity sequence must be decremented.//// A period may also have a duration of zero, so the case of no segments is handled// here even though we don't yet support early available periods.if (!oldPlaylist.segments.length && playlist.timeline > oldPlaylist.timeline || oldPlaylist.segments.length && playlist.timeline > oldPlaylist.segments[oldPlaylist.segments.length - 1].timeline) {playlist.discontinuitySequence--;}return;} // If the first segment matched with a prior segment on a discontinuity (it's matching// on the first segment of a period), then the discontinuitySequence shouldn't be the// timeline's matching one, but instead should be the one prior, and the first segment// of the new manifest should be marked with a discontinuity.//// The reason for this special case is that discontinuity sequence shows how many// discontinuities have fallen off of the playlist, and discontinuities are marked on// the first segment of a new "timeline." Because of this, while DASH will retain that// Period while the "timeline" exists, HLS keeps track of it via the discontinuity// sequence, and that first segment is an indicator, but can be removed before that// timeline is gone.const oldMatchingSegment = oldPlaylist.segments[oldMatchingSegmentIndex];if (oldMatchingSegment.discontinuity && !firstNewSegment.discontinuity) {firstNewSegment.discontinuity = true;playlist.discontinuityStarts.unshift(0);playlist.discontinuitySequence--;}updateMediaSequenceForPlaylist({playlist,mediaSequence: oldPlaylist.segments[oldMatchingSegmentIndex].number});});};/*** Given an old parsed manifest object and a new parsed manifest object, updates the* sequence and timing values within the new manifest to ensure that it lines up with the* old.** @param {Array} oldManifest - the old main manifest object* @param {Array} newManifest - the new main manifest object** @return {Object} the updated new manifest object*/const positionManifestOnTimeline = ({oldManifest,newManifest}) => {// Starting from v4.1.2 of the IOP, section 4.4.3.3 states://// "MPD@availabilityStartTime and Period@start shall not be changed over MPD updates."//// This was added from https://github.com/Dash-Industry-Forum/DASH-IF-IOP/issues/160//// Because of this change, and the difficulty of supporting periods with changing start// times, periods with changing start times are not supported. This makes the logic much// simpler, since periods with the same start time can be considerred the same period// across refreshes.//// To give an example as to the difficulty of handling periods where the start time may// change, if a single period manifest is refreshed with another manifest with a single// period, and both the start and end times are increased, then the only way to determine// if it's a new period or an old one that has changed is to look through the segments of// each playlist and determine the presentation time bounds to find a match. In addition,// if the period start changed to exceed the old period end, then there would be no// match, and it would not be possible to determine whether the refreshed period is a new// one or the old one.const oldPlaylists = oldManifest.playlists.concat(getMediaGroupPlaylists(oldManifest));const newPlaylists = newManifest.playlists.concat(getMediaGroupPlaylists(newManifest)); // Save all seen timelineStarts to the new manifest. Although this potentially means that// there's a "memory leak" in that it will never stop growing, in reality, only a couple// of properties are saved for each seen Period. Even long running live streams won't// generate too many Periods, unless the stream is watched for decades. In the future,// this can be optimized by mapping to discontinuity sequence numbers for each timeline,// but it may not become an issue, and the additional info can be useful for debugging.newManifest.timelineStarts = getUniqueTimelineStarts([oldManifest.timelineStarts, newManifest.timelineStarts]);updateSequenceNumbers({oldPlaylists,newPlaylists,timelineStarts: newManifest.timelineStarts});return newManifest;};const generateSidxKey = sidx => sidx && sidx.uri + '-' + byteRangeToString(sidx.byterange);const mergeDiscontiguousPlaylists = playlists => {// Break out playlists into groups based on their baseUrlconst playlistsByBaseUrl = playlists.reduce(function (acc, cur) {if (!acc[cur.attributes.baseUrl]) {acc[cur.attributes.baseUrl] = [];}acc[cur.attributes.baseUrl].push(cur);return acc;}, {});let allPlaylists = [];Object.values(playlistsByBaseUrl).forEach(playlistGroup => {const mergedPlaylists = values(playlistGroup.reduce((acc, playlist) => {// assuming playlist IDs are the same across periods// TODO: handle multiperiod where representation sets are not the same// across periodsconst name = playlist.attributes.id + (playlist.attributes.lang || '');if (!acc[name]) {// First Periodacc[name] = playlist;acc[name].attributes.timelineStarts = [];} else {// Subsequent Periodsif (playlist.segments) {// first segment of subsequent periods signal a discontinuityif (playlist.segments[0]) {playlist.segments[0].discontinuity = true;}acc[name].segments.push(...playlist.segments);} // bubble up contentProtection, this assumes all DRM content// has the same contentProtectionif (playlist.attributes.contentProtection) {acc[name].attributes.contentProtection = playlist.attributes.contentProtection;}}acc[name].attributes.timelineStarts.push({// Although they represent the same number, it's important to have both to make it// compatible with HLS potentially having a similar attribute.start: playlist.attributes.periodStart,timeline: playlist.attributes.periodStart});return acc;}, {}));allPlaylists = allPlaylists.concat(mergedPlaylists);});return allPlaylists.map(playlist => {playlist.discontinuityStarts = findIndexes(playlist.segments || [], 'discontinuity');return playlist;});};const addSidxSegmentsToPlaylist = (playlist, sidxMapping) => {const sidxKey = generateSidxKey(playlist.sidx);const sidxMatch = sidxKey && sidxMapping[sidxKey] && sidxMapping[sidxKey].sidx;if (sidxMatch) {addSidxSegmentsToPlaylist$1(playlist, sidxMatch, playlist.sidx.resolvedUri);}return playlist;};const addSidxSegmentsToPlaylists = (playlists, sidxMapping = {}) => {if (!Object.keys(sidxMapping).length) {return playlists;}for (const i in playlists) {playlists[i] = addSidxSegmentsToPlaylist(playlists[i], sidxMapping);}return playlists;};const formatAudioPlaylist = ({attributes,segments,sidx,mediaSequence,discontinuitySequence,discontinuityStarts}, isAudioOnly) => {const playlist = {attributes: {NAME: attributes.id,BANDWIDTH: attributes.bandwidth,CODECS: attributes.codecs,['PROGRAM-ID']: 1},uri: '',endList: attributes.type === 'static',timeline: attributes.periodStart,resolvedUri: attributes.baseUrl || '',targetDuration: attributes.duration,discontinuitySequence,discontinuityStarts,timelineStarts: attributes.timelineStarts,mediaSequence,segments};if (attributes.contentProtection) {playlist.contentProtection = attributes.contentProtection;}if (attributes.serviceLocation) {playlist.attributes.serviceLocation = attributes.serviceLocation;}if (sidx) {playlist.sidx = sidx;}if (isAudioOnly) {playlist.attributes.AUDIO = 'audio';playlist.attributes.SUBTITLES = 'subs';}return playlist;};const formatVttPlaylist = ({attributes,segments,mediaSequence,discontinuityStarts,discontinuitySequence}) => {if (typeof segments === 'undefined') {// vtt tracks may use single file in BaseURLsegments = [{uri: attributes.baseUrl,timeline: attributes.periodStart,resolvedUri: attributes.baseUrl || '',duration: attributes.sourceDuration,number: 0}]; // targetDuration should be the same duration as the only segmentattributes.duration = attributes.sourceDuration;}const m3u8Attributes = {NAME: attributes.id,BANDWIDTH: attributes.bandwidth,['PROGRAM-ID']: 1};if (attributes.codecs) {m3u8Attributes.CODECS = attributes.codecs;}const vttPlaylist = {attributes: m3u8Attributes,uri: '',endList: attributes.type === 'static',timeline: attributes.periodStart,resolvedUri: attributes.baseUrl || '',targetDuration: attributes.duration,timelineStarts: attributes.timelineStarts,discontinuityStarts,discontinuitySequence,mediaSequence,segments};if (attributes.serviceLocation) {vttPlaylist.attributes.serviceLocation = attributes.serviceLocation;}return vttPlaylist;};const organizeAudioPlaylists = (playlists, sidxMapping = {}, isAudioOnly = false) => {let mainPlaylist;const formattedPlaylists = playlists.reduce((a, playlist) => {const role = playlist.attributes.role && playlist.attributes.role.value || '';const language = playlist.attributes.lang || '';let label = playlist.attributes.label || 'main';if (language && !playlist.attributes.label) {const roleLabel = role ? ` (${role})` : '';label = `${playlist.attributes.lang}${roleLabel}`;}if (!a[label]) {a[label] = {language,autoselect: true,default: role === 'main',playlists: [],uri: ''};}const formatted = addSidxSegmentsToPlaylist(formatAudioPlaylist(playlist, isAudioOnly), sidxMapping);a[label].playlists.push(formatted);if (typeof mainPlaylist === 'undefined' && role === 'main') {mainPlaylist = playlist;mainPlaylist.default = true;}return a;}, {}); // if no playlists have role "main", mark the first as mainif (!mainPlaylist) {const firstLabel = Object.keys(formattedPlaylists)[0];formattedPlaylists[firstLabel].default = true;}return formattedPlaylists;};const organizeVttPlaylists = (playlists, sidxMapping = {}) => {return playlists.reduce((a, playlist) => {const label = playlist.attributes.label || playlist.attributes.lang || 'text';if (!a[label]) {a[label] = {language: label,default: false,autoselect: false,playlists: [],uri: ''};}a[label].playlists.push(addSidxSegmentsToPlaylist(formatVttPlaylist(playlist), sidxMapping));return a;}, {});};const organizeCaptionServices = captionServices => captionServices.reduce((svcObj, svc) => {if (!svc) {return svcObj;}svc.forEach(service => {const {channel,language} = service;svcObj[language] = {autoselect: false,default: false,instreamId: channel,language};if (service.hasOwnProperty('aspectRatio')) {svcObj[language].aspectRatio = service.aspectRatio;}if (service.hasOwnProperty('easyReader')) {svcObj[language].easyReader = service.easyReader;}if (service.hasOwnProperty('3D')) {svcObj[language]['3D'] = service['3D'];}});return svcObj;}, {});const formatVideoPlaylist = ({attributes,segments,sidx,discontinuityStarts}) => {const playlist = {attributes: {NAME: attributes.id,AUDIO: 'audio',SUBTITLES: 'subs',RESOLUTION: {width: attributes.width,height: attributes.height},CODECS: attributes.codecs,BANDWIDTH: attributes.bandwidth,['PROGRAM-ID']: 1},uri: '',endList: attributes.type === 'static',timeline: attributes.periodStart,resolvedUri: attributes.baseUrl || '',targetDuration: attributes.duration,discontinuityStarts,timelineStarts: attributes.timelineStarts,segments};if (attributes.frameRate) {playlist.attributes['FRAME-RATE'] = attributes.frameRate;}if (attributes.contentProtection) {playlist.contentProtection = attributes.contentProtection;}if (attributes.serviceLocation) {playlist.attributes.serviceLocation = attributes.serviceLocation;}if (sidx) {playlist.sidx = sidx;}return playlist;};const videoOnly = ({attributes}) => attributes.mimeType === 'video/mp4' || attributes.mimeType === 'video/webm' || attributes.contentType === 'video';const audioOnly = ({attributes}) => attributes.mimeType === 'audio/mp4' || attributes.mimeType === 'audio/webm' || attributes.contentType === 'audio';const vttOnly = ({attributes}) => attributes.mimeType === 'text/vtt' || attributes.contentType === 'text';/*** Contains start and timeline properties denoting a timeline start. For DASH, these will* be the same number.** @typedef {Object} TimelineStart* @property {number} start - the start time of the timeline* @property {number} timeline - the timeline number*//*** Adds appropriate media and discontinuity sequence values to the segments and playlists.** Throughout mpd-parser, the `number` attribute is used in relation to `startNumber`, a* DASH specific attribute used in constructing segment URI's from templates. However, from* an HLS perspective, the `number` attribute on a segment would be its `mediaSequence`* value, which should start at the original media sequence value (or 0) and increment by 1* for each segment thereafter. Since DASH's `startNumber` values are independent per* period, it doesn't make sense to use it for `number`. Instead, assume everything starts* from a 0 mediaSequence value and increment from there.** Note that VHS currently doesn't use the `number` property, but it can be helpful for* debugging and making sense of the manifest.** For live playlists, to account for values increasing in manifests when periods are* removed on refreshes, merging logic should be used to update the numbers to their* appropriate values (to ensure they're sequential and increasing).** @param {Object[]} playlists - the playlists to update* @param {TimelineStart[]} timelineStarts - the timeline starts for the manifest*/const addMediaSequenceValues = (playlists, timelineStarts) => {// increment all segments sequentiallyplaylists.forEach(playlist => {playlist.mediaSequence = 0;playlist.discontinuitySequence = timelineStarts.findIndex(function ({timeline}) {return timeline === playlist.timeline;});if (!playlist.segments) {return;}playlist.segments.forEach((segment, index) => {segment.number = index;});});};/*** Given a media group object, flattens all playlists within the media group into a single* array.** @param {Object} mediaGroupObject - the media group object** @return {Object[]}* The media group playlists*/const flattenMediaGroupPlaylists = mediaGroupObject => {if (!mediaGroupObject) {return [];}return Object.keys(mediaGroupObject).reduce((acc, label) => {const labelContents = mediaGroupObject[label];return acc.concat(labelContents.playlists);}, []);};const toM3u8 = ({dashPlaylists,locations,contentSteering,sidxMapping = {},previousManifest,eventStream}) => {if (!dashPlaylists.length) {return {};} // grab all main manifest attributesconst {sourceDuration: duration,type,suggestedPresentationDelay,minimumUpdatePeriod} = dashPlaylists[0].attributes;const videoPlaylists = mergeDiscontiguousPlaylists(dashPlaylists.filter(videoOnly)).map(formatVideoPlaylist);const audioPlaylists = mergeDiscontiguousPlaylists(dashPlaylists.filter(audioOnly));const vttPlaylists = mergeDiscontiguousPlaylists(dashPlaylists.filter(vttOnly));const captions = dashPlaylists.map(playlist => playlist.attributes.captionServices).filter(Boolean);const manifest = {allowCache: true,discontinuityStarts: [],segments: [],endList: true,mediaGroups: {AUDIO: {},VIDEO: {},['CLOSED-CAPTIONS']: {},SUBTITLES: {}},uri: '',duration,playlists: addSidxSegmentsToPlaylists(videoPlaylists, sidxMapping)};if (minimumUpdatePeriod >= 0) {manifest.minimumUpdatePeriod = minimumUpdatePeriod * 1000;}if (locations) {manifest.locations = locations;}if (contentSteering) {manifest.contentSteering = contentSteering;}if (type === 'dynamic') {manifest.suggestedPresentationDelay = suggestedPresentationDelay;}if (eventStream && eventStream.length > 0) {manifest.eventStream = eventStream;}const isAudioOnly = manifest.playlists.length === 0;const organizedAudioGroup = audioPlaylists.length ? organizeAudioPlaylists(audioPlaylists, sidxMapping, isAudioOnly) : null;const organizedVttGroup = vttPlaylists.length ? organizeVttPlaylists(vttPlaylists, sidxMapping) : null;const formattedPlaylists = videoPlaylists.concat(flattenMediaGroupPlaylists(organizedAudioGroup), flattenMediaGroupPlaylists(organizedVttGroup));const playlistTimelineStarts = formattedPlaylists.map(({timelineStarts}) => timelineStarts);manifest.timelineStarts = getUniqueTimelineStarts(playlistTimelineStarts);addMediaSequenceValues(formattedPlaylists, manifest.timelineStarts);if (organizedAudioGroup) {manifest.mediaGroups.AUDIO.audio = organizedAudioGroup;}if (organizedVttGroup) {manifest.mediaGroups.SUBTITLES.subs = organizedVttGroup;}if (captions.length) {manifest.mediaGroups['CLOSED-CAPTIONS'].cc = organizeCaptionServices(captions);}if (previousManifest) {return positionManifestOnTimeline({oldManifest: previousManifest,newManifest: manifest});}return manifest;};/*** Calculates the R (repetition) value for a live stream (for the final segment* in a manifest where the r value is negative 1)** @param {Object} attributes* Object containing all inherited attributes from parent elements with attribute* names as keys* @param {number} time* current time (typically the total time up until the final segment)* @param {number} duration* duration property for the given <S />** @return {number}* R value to reach the end of the given period*/const getLiveRValue = (attributes, time, duration) => {const {NOW,clientOffset,availabilityStartTime,timescale = 1,periodStart = 0,minimumUpdatePeriod = 0} = attributes;const now = (NOW + clientOffset) / 1000;const periodStartWC = availabilityStartTime + periodStart;const periodEndWC = now + minimumUpdatePeriod;const periodDuration = periodEndWC - periodStartWC;return Math.ceil((periodDuration * timescale - time) / duration);};/*** Uses information provided by SegmentTemplate.SegmentTimeline to determine segment* timing and duration** @param {Object} attributes* Object containing all inherited attributes from parent elements with attribute* names as keys* @param {Object[]} segmentTimeline* List of objects representing the attributes of each S element contained within** @return {{number: number, duration: number, time: number, timeline: number}[]}* List of Objects with segment timing and duration info*/const parseByTimeline = (attributes, segmentTimeline) => {const {type,minimumUpdatePeriod = 0,media = '',sourceDuration,timescale = 1,startNumber = 1,periodStart: timeline} = attributes;const segments = [];let time = -1;for (let sIndex = 0; sIndex < segmentTimeline.length; sIndex++) {const S = segmentTimeline[sIndex];const duration = S.d;const repeat = S.r || 0;const segmentTime = S.t || 0;if (time < 0) {// first segmenttime = segmentTime;}if (segmentTime && segmentTime > time) {// discontinuity// TODO: How to handle this type of discontinuity// timeline++ here would treat it like HLS discontuity and content would// get appended without gap// E.G.// <S t="0" d="1" />// <S d="1" />// <S d="1" />// <S t="5" d="1" />// would have $Time$ values of [0, 1, 2, 5]// should this be appened at time positions [0, 1, 2, 3],(#EXT-X-DISCONTINUITY)// or [0, 1, 2, gap, gap, 5]? (#EXT-X-GAP)// does the value of sourceDuration consider this when calculating arbitrary// negative @r repeat value?// E.G. Same elements as above with this added at the end// <S d="1" r="-1" />// with a sourceDuration of 10// Would the 2 gaps be included in the time duration calculations resulting in// 8 segments with $Time$ values of [0, 1, 2, 5, 6, 7, 8, 9] or 10 segments// with $Time$ values of [0, 1, 2, 5, 6, 7, 8, 9, 10, 11] ?time = segmentTime;}let count;if (repeat < 0) {const nextS = sIndex + 1;if (nextS === segmentTimeline.length) {// last segmentif (type === 'dynamic' && minimumUpdatePeriod > 0 && media.indexOf('$Number$') > 0) {count = getLiveRValue(attributes, time, duration);} else {// TODO: This may be incorrect depending on conclusion of TODO abovecount = (sourceDuration * timescale - time) / duration;}} else {count = (segmentTimeline[nextS].t - time) / duration;}} else {count = repeat + 1;}const end = startNumber + segments.length + count;let number = startNumber + segments.length;while (number < end) {segments.push({number,duration: duration / timescale,time,timeline});time += duration;number++;}}return segments;};const identifierPattern = /\$([A-z]*)(?:(%0)([0-9]+)d)?\$/g;/*** Replaces template identifiers with corresponding values. To be used as the callback* for String.prototype.replace** @name replaceCallback* @function* @param {string} match* Entire match of identifier* @param {string} identifier* Name of matched identifier* @param {string} format* Format tag string. Its presence indicates that padding is expected* @param {string} width* Desired length of the replaced value. Values less than this width shall be left* zero padded* @return {string}* Replacement for the matched identifier*//*** Returns a function to be used as a callback for String.prototype.replace to replace* template identifiers** @param {Obect} values* Object containing values that shall be used to replace known identifiers* @param {number} values.RepresentationID* Value of the Representation@id attribute* @param {number} values.Number* Number of the corresponding segment* @param {number} values.Bandwidth* Value of the Representation@bandwidth attribute.* @param {number} values.Time* Timestamp value of the corresponding segment* @return {replaceCallback}* Callback to be used with String.prototype.replace to replace identifiers*/const identifierReplacement = values => (match, identifier, format, width) => {if (match === '$$') {// escape sequencereturn '$';}if (typeof values[identifier] === 'undefined') {return match;}const value = '' + values[identifier];if (identifier === 'RepresentationID') {// Format tag shall not be present with RepresentationIDreturn value;}if (!format) {width = 1;} else {width = parseInt(width, 10);}if (value.length >= width) {return value;}return `${new Array(width - value.length + 1).join('0')}${value}`;};/*** Constructs a segment url from a template string** @param {string} url* Template string to construct url from* @param {Obect} values* Object containing values that shall be used to replace known identifiers* @param {number} values.RepresentationID* Value of the Representation@id attribute* @param {number} values.Number* Number of the corresponding segment* @param {number} values.Bandwidth* Value of the Representation@bandwidth attribute.* @param {number} values.Time* Timestamp value of the corresponding segment* @return {string}* Segment url with identifiers replaced*/const constructTemplateUrl = (url, values) => url.replace(identifierPattern, identifierReplacement(values));/*** Generates a list of objects containing timing and duration information about each* segment needed to generate segment uris and the complete segment object** @param {Object} attributes* Object containing all inherited attributes from parent elements with attribute* names as keys* @param {Object[]|undefined} segmentTimeline* List of objects representing the attributes of each S element contained within* the SegmentTimeline element* @return {{number: number, duration: number, time: number, timeline: number}[]}* List of Objects with segment timing and duration info*/const parseTemplateInfo = (attributes, segmentTimeline) => {if (!attributes.duration && !segmentTimeline) {// if neither @duration or SegmentTimeline are present, then there shall be exactly// one media segmentreturn [{number: attributes.startNumber || 1,duration: attributes.sourceDuration,time: 0,timeline: attributes.periodStart}];}if (attributes.duration) {return parseByDuration(attributes);}return parseByTimeline(attributes, segmentTimeline);};/*** Generates a list of segments using information provided by the SegmentTemplate element** @param {Object} attributes* Object containing all inherited attributes from parent elements with attribute* names as keys* @param {Object[]|undefined} segmentTimeline* List of objects representing the attributes of each S element contained within* the SegmentTimeline element* @return {Object[]}* List of segment objects*/const segmentsFromTemplate = (attributes, segmentTimeline) => {const templateValues = {RepresentationID: attributes.id,Bandwidth: attributes.bandwidth || 0};const {initialization = {sourceURL: '',range: ''}} = attributes;const mapSegment = urlTypeToSegment({baseUrl: attributes.baseUrl,source: constructTemplateUrl(initialization.sourceURL, templateValues),range: initialization.range});const segments = parseTemplateInfo(attributes, segmentTimeline);return segments.map(segment => {templateValues.Number = segment.number;templateValues.Time = segment.time;const uri = constructTemplateUrl(attributes.media || '', templateValues); // See DASH spec section 5.3.9.2.2// - if timescale isn't present on any level, default to 1.const timescale = attributes.timescale || 1; // - if presentationTimeOffset isn't present on any level, default to 0const presentationTimeOffset = attributes.presentationTimeOffset || 0;const presentationTime =// Even if the @t attribute is not specified for the segment, segment.time is// calculated in mpd-parser prior to this, so it's assumed to be available.attributes.periodStart + (segment.time - presentationTimeOffset) / timescale;const map = {uri,timeline: segment.timeline,duration: segment.duration,resolvedUri: resolveUrl$1(attributes.baseUrl || '', uri),map: mapSegment,number: segment.number,presentationTime};return map;});};/*** Converts a <SegmentUrl> (of type URLType from the DASH spec 5.3.9.2 Table 14)* to an object that matches the output of a segment in videojs/mpd-parser** @param {Object} attributes* Object containing all inherited attributes from parent elements with attribute* names as keys* @param {Object} segmentUrl* <SegmentURL> node to translate into a segment object* @return {Object} translated segment object*/const SegmentURLToSegmentObject = (attributes, segmentUrl) => {const {baseUrl,initialization = {}} = attributes;const initSegment = urlTypeToSegment({baseUrl,source: initialization.sourceURL,range: initialization.range});const segment = urlTypeToSegment({baseUrl,source: segmentUrl.media,range: segmentUrl.mediaRange});segment.map = initSegment;return segment;};/*** Generates a list of segments using information provided by the SegmentList element* SegmentList (DASH SPEC Section 5.3.9.3.2) contains a set of <SegmentURL> nodes. Each* node should be translated into segment.** @param {Object} attributes* Object containing all inherited attributes from parent elements with attribute* names as keys* @param {Object[]|undefined} segmentTimeline* List of objects representing the attributes of each S element contained within* the SegmentTimeline element* @return {Object.<Array>} list of segments*/const segmentsFromList = (attributes, segmentTimeline) => {const {duration,segmentUrls = [],periodStart} = attributes; // Per spec (5.3.9.2.1) no way to determine segment duration OR// if both SegmentTimeline and @duration are defined, it is outside of spec.if (!duration && !segmentTimeline || duration && segmentTimeline) {throw new Error(errors.SEGMENT_TIME_UNSPECIFIED);}const segmentUrlMap = segmentUrls.map(segmentUrlObject => SegmentURLToSegmentObject(attributes, segmentUrlObject));let segmentTimeInfo;if (duration) {segmentTimeInfo = parseByDuration(attributes);}if (segmentTimeline) {segmentTimeInfo = parseByTimeline(attributes, segmentTimeline);}const segments = segmentTimeInfo.map((segmentTime, index) => {if (segmentUrlMap[index]) {const segment = segmentUrlMap[index]; // See DASH spec section 5.3.9.2.2// - if timescale isn't present on any level, default to 1.const timescale = attributes.timescale || 1; // - if presentationTimeOffset isn't present on any level, default to 0const presentationTimeOffset = attributes.presentationTimeOffset || 0;segment.timeline = segmentTime.timeline;segment.duration = segmentTime.duration;segment.number = segmentTime.number;segment.presentationTime = periodStart + (segmentTime.time - presentationTimeOffset) / timescale;return segment;} // Since we're mapping we should get rid of any blank segments (in case// the given SegmentTimeline is handling for more elements than we have// SegmentURLs for).}).filter(segment => segment);return segments;};const generateSegments = ({attributes,segmentInfo}) => {let segmentAttributes;let segmentsFn;if (segmentInfo.template) {segmentsFn = segmentsFromTemplate;segmentAttributes = merge$1(attributes, segmentInfo.template);} else if (segmentInfo.base) {segmentsFn = segmentsFromBase;segmentAttributes = merge$1(attributes, segmentInfo.base);} else if (segmentInfo.list) {segmentsFn = segmentsFromList;segmentAttributes = merge$1(attributes, segmentInfo.list);}const segmentsInfo = {attributes};if (!segmentsFn) {return segmentsInfo;}const segments = segmentsFn(segmentAttributes, segmentInfo.segmentTimeline); // The @duration attribute will be used to determin the playlist's targetDuration which// must be in seconds. Since we've generated the segment list, we no longer need// @duration to be in @timescale units, so we can convert it here.if (segmentAttributes.duration) {const {duration,timescale = 1} = segmentAttributes;segmentAttributes.duration = duration / timescale;} else if (segments.length) {// if there is no @duration attribute, use the largest segment duration as// as target durationsegmentAttributes.duration = segments.reduce((max, segment) => {return Math.max(max, Math.ceil(segment.duration));}, 0);} else {segmentAttributes.duration = 0;}segmentsInfo.attributes = segmentAttributes;segmentsInfo.segments = segments; // This is a sidx box without actual segment informationif (segmentInfo.base && segmentAttributes.indexRange) {segmentsInfo.sidx = segments[0];segmentsInfo.segments = [];}return segmentsInfo;};const toPlaylists = representations => representations.map(generateSegments);const findChildren = (element, name) => from(element.childNodes).filter(({tagName}) => tagName === name);const getContent = element => element.textContent.trim();/*** Converts the provided string that may contain a division operation to a number.** @param {string} value - the provided string value** @return {number} the parsed string value*/const parseDivisionValue = value => {return parseFloat(value.split('/').reduce((prev, current) => prev / current));};const parseDuration = str => {const SECONDS_IN_YEAR = 365 * 24 * 60 * 60;const SECONDS_IN_MONTH = 30 * 24 * 60 * 60;const SECONDS_IN_DAY = 24 * 60 * 60;const SECONDS_IN_HOUR = 60 * 60;const SECONDS_IN_MIN = 60; // P10Y10M10DT10H10M10.1Sconst durationRegex = /P(?:(\d*)Y)?(?:(\d*)M)?(?:(\d*)D)?(?:T(?:(\d*)H)?(?:(\d*)M)?(?:([\d.]*)S)?)?/;const match = durationRegex.exec(str);if (!match) {return 0;}const [year, month, day, hour, minute, second] = match.slice(1);return parseFloat(year || 0) * SECONDS_IN_YEAR + parseFloat(month || 0) * SECONDS_IN_MONTH + parseFloat(day || 0) * SECONDS_IN_DAY + parseFloat(hour || 0) * SECONDS_IN_HOUR + parseFloat(minute || 0) * SECONDS_IN_MIN + parseFloat(second || 0);};const parseDate = str => {// Date format without timezone according to ISO 8601// YYY-MM-DDThh:mm:ss.ssssssconst dateRegex = /^\d+-\d+-\d+T\d+:\d+:\d+(\.\d+)?$/; // If the date string does not specifiy a timezone, we must specifiy UTC. This is// expressed by ending with 'Z'if (dateRegex.test(str)) {str += 'Z';}return Date.parse(str);};const parsers = {/*** Specifies the duration of the entire Media Presentation. Format is a duration string* as specified in ISO 8601** @param {string} value* value of attribute as a string* @return {number}* The duration in seconds*/mediaPresentationDuration(value) {return parseDuration(value);},/*** Specifies the Segment availability start time for all Segments referred to in this* MPD. For a dynamic manifest, it specifies the anchor for the earliest availability* time. Format is a date string as specified in ISO 8601** @param {string} value* value of attribute as a string* @return {number}* The date as seconds from unix epoch*/availabilityStartTime(value) {return parseDate(value) / 1000;},/*** Specifies the smallest period between potential changes to the MPD. Format is a* duration string as specified in ISO 8601** @param {string} value* value of attribute as a string* @return {number}* The duration in seconds*/minimumUpdatePeriod(value) {return parseDuration(value);},/*** Specifies the suggested presentation delay. Format is a* duration string as specified in ISO 8601** @param {string} value* value of attribute as a string* @return {number}* The duration in seconds*/suggestedPresentationDelay(value) {return parseDuration(value);},/*** specifices the type of mpd. Can be either "static" or "dynamic"** @param {string} value* value of attribute as a string** @return {string}* The type as a string*/type(value) {return value;},/*** Specifies the duration of the smallest time shifting buffer for any Representation* in the MPD. Format is a duration string as specified in ISO 8601** @param {string} value* value of attribute as a string* @return {number}* The duration in seconds*/timeShiftBufferDepth(value) {return parseDuration(value);},/*** Specifies the PeriodStart time of the Period relative to the availabilityStarttime.* Format is a duration string as specified in ISO 8601** @param {string} value* value of attribute as a string* @return {number}* The duration in seconds*/start(value) {return parseDuration(value);},/*** Specifies the width of the visual presentation** @param {string} value* value of attribute as a string* @return {number}* The parsed width*/width(value) {return parseInt(value, 10);},/*** Specifies the height of the visual presentation** @param {string} value* value of attribute as a string* @return {number}* The parsed height*/height(value) {return parseInt(value, 10);},/*** Specifies the bitrate of the representation** @param {string} value* value of attribute as a string* @return {number}* The parsed bandwidth*/bandwidth(value) {return parseInt(value, 10);},/*** Specifies the frame rate of the representation** @param {string} value* value of attribute as a string* @return {number}* The parsed frame rate*/frameRate(value) {return parseDivisionValue(value);},/*** Specifies the number of the first Media Segment in this Representation in the Period** @param {string} value* value of attribute as a string* @return {number}* The parsed number*/startNumber(value) {return parseInt(value, 10);},/*** Specifies the timescale in units per seconds** @param {string} value* value of attribute as a string* @return {number}* The parsed timescale*/timescale(value) {return parseInt(value, 10);},/*** Specifies the presentationTimeOffset.** @param {string} value* value of the attribute as a string** @return {number}* The parsed presentationTimeOffset*/presentationTimeOffset(value) {return parseInt(value, 10);},/*** Specifies the constant approximate Segment duration* NOTE: The <Period> element also contains an @duration attribute. This duration* specifies the duration of the Period. This attribute is currently not* supported by the rest of the parser, however we still check for it to prevent* errors.** @param {string} value* value of attribute as a string* @return {number}* The parsed duration*/duration(value) {const parsedValue = parseInt(value, 10);if (isNaN(parsedValue)) {return parseDuration(value);}return parsedValue;},/*** Specifies the Segment duration, in units of the value of the @timescale.** @param {string} value* value of attribute as a string* @return {number}* The parsed duration*/d(value) {return parseInt(value, 10);},/*** Specifies the MPD start time, in @timescale units, the first Segment in the series* starts relative to the beginning of the Period** @param {string} value* value of attribute as a string* @return {number}* The parsed time*/t(value) {return parseInt(value, 10);},/*** Specifies the repeat count of the number of following contiguous Segments with the* same duration expressed by the value of @d** @param {string} value* value of attribute as a string* @return {number}* The parsed number*/r(value) {return parseInt(value, 10);},/*** Specifies the presentationTime.** @param {string} value* value of the attribute as a string** @return {number}* The parsed presentationTime*/presentationTime(value) {return parseInt(value, 10);},/*** Default parser for all other attributes. Acts as a no-op and just returns the value* as a string** @param {string} value* value of attribute as a string* @return {string}* Unparsed value*/DEFAULT(value) {return value;}};/*** Gets all the attributes and values of the provided node, parses attributes with known* types, and returns an object with attribute names mapped to values.** @param {Node} el* The node to parse attributes from* @return {Object}* Object with all attributes of el parsed*/const parseAttributes = el => {if (!(el && el.attributes)) {return {};}return from(el.attributes).reduce((a, e) => {const parseFn = parsers[e.name] || parsers.DEFAULT;a[e.name] = parseFn(e.value);return a;}, {});};const keySystemsMap = {'urn:uuid:1077efec-c0b2-4d02-ace3-3c1e52e2fb4b': 'org.w3.clearkey','urn:uuid:edef8ba9-79d6-4ace-a3c8-27dcd51d21ed': 'com.widevine.alpha','urn:uuid:9a04f079-9840-4286-ab92-e65be0885f95': 'com.microsoft.playready','urn:uuid:f239e769-efa3-4850-9c16-a903c6932efb': 'com.adobe.primetime',// ISO_IEC 23009-1_2022 5.8.5.2.2 The mp4 Protection Scheme'urn:mpeg:dash:mp4protection:2011': 'mp4protection'};/*** Builds a list of urls that is the product of the reference urls and BaseURL values** @param {Object[]} references* List of objects containing the reference URL as well as its attributes* @param {Node[]} baseUrlElements* List of BaseURL nodes from the mpd* @return {Object[]}* List of objects with resolved urls and attributes*/const buildBaseUrls = (references, baseUrlElements) => {if (!baseUrlElements.length) {return references;}return flatten(references.map(function (reference) {return baseUrlElements.map(function (baseUrlElement) {const initialBaseUrl = getContent(baseUrlElement);const resolvedBaseUrl = resolveUrl$1(reference.baseUrl, initialBaseUrl);const finalBaseUrl = merge$1(parseAttributes(baseUrlElement), {baseUrl: resolvedBaseUrl}); // If the URL is resolved, we want to get the serviceLocation from the reference// assuming there is no serviceLocation on the initialBaseUrlif (resolvedBaseUrl !== initialBaseUrl && !finalBaseUrl.serviceLocation && reference.serviceLocation) {finalBaseUrl.serviceLocation = reference.serviceLocation;}return finalBaseUrl;});}));};/*** Contains all Segment information for its containing AdaptationSet** @typedef {Object} SegmentInformation* @property {Object|undefined} template* Contains the attributes for the SegmentTemplate node* @property {Object[]|undefined} segmentTimeline* Contains a list of atrributes for each S node within the SegmentTimeline node* @property {Object|undefined} list* Contains the attributes for the SegmentList node* @property {Object|undefined} base* Contains the attributes for the SegmentBase node*//*** Returns all available Segment information contained within the AdaptationSet node** @param {Node} adaptationSet* The AdaptationSet node to get Segment information from* @return {SegmentInformation}* The Segment information contained within the provided AdaptationSet*/const getSegmentInformation = adaptationSet => {const segmentTemplate = findChildren(adaptationSet, 'SegmentTemplate')[0];const segmentList = findChildren(adaptationSet, 'SegmentList')[0];const segmentUrls = segmentList && findChildren(segmentList, 'SegmentURL').map(s => merge$1({tag: 'SegmentURL'}, parseAttributes(s)));const segmentBase = findChildren(adaptationSet, 'SegmentBase')[0];const segmentTimelineParentNode = segmentList || segmentTemplate;const segmentTimeline = segmentTimelineParentNode && findChildren(segmentTimelineParentNode, 'SegmentTimeline')[0];const segmentInitializationParentNode = segmentList || segmentBase || segmentTemplate;const segmentInitialization = segmentInitializationParentNode && findChildren(segmentInitializationParentNode, 'Initialization')[0]; // SegmentTemplate is handled slightly differently, since it can have both// @initialization and an <Initialization> node. @initialization can be templated,// while the node can have a url and range specified. If the <SegmentTemplate> has// both @initialization and an <Initialization> subelement we opt to override with// the node, as this interaction is not defined in the spec.const template = segmentTemplate && parseAttributes(segmentTemplate);if (template && segmentInitialization) {template.initialization = segmentInitialization && parseAttributes(segmentInitialization);} else if (template && template.initialization) {// If it is @initialization we convert it to an object since this is the format that// later functions will rely on for the initialization segment. This is only valid// for <SegmentTemplate>template.initialization = {sourceURL: template.initialization};}const segmentInfo = {template,segmentTimeline: segmentTimeline && findChildren(segmentTimeline, 'S').map(s => parseAttributes(s)),list: segmentList && merge$1(parseAttributes(segmentList), {segmentUrls,initialization: parseAttributes(segmentInitialization)}),base: segmentBase && merge$1(parseAttributes(segmentBase), {initialization: parseAttributes(segmentInitialization)})};Object.keys(segmentInfo).forEach(key => {if (!segmentInfo[key]) {delete segmentInfo[key];}});return segmentInfo;};/*** Contains Segment information and attributes needed to construct a Playlist object* from a Representation** @typedef {Object} RepresentationInformation* @property {SegmentInformation} segmentInfo* Segment information for this Representation* @property {Object} attributes* Inherited attributes for this Representation*//*** Maps a Representation node to an object containing Segment information and attributes** @name inheritBaseUrlsCallback* @function* @param {Node} representation* Representation node from the mpd* @return {RepresentationInformation}* Representation information needed to construct a Playlist object*//*** Returns a callback for Array.prototype.map for mapping Representation nodes to* Segment information and attributes using inherited BaseURL nodes.** @param {Object} adaptationSetAttributes* Contains attributes inherited by the AdaptationSet* @param {Object[]} adaptationSetBaseUrls* List of objects containing resolved base URLs and attributes* inherited by the AdaptationSet* @param {SegmentInformation} adaptationSetSegmentInfo* Contains Segment information for the AdaptationSet* @return {inheritBaseUrlsCallback}* Callback map function*/const inheritBaseUrls = (adaptationSetAttributes, adaptationSetBaseUrls, adaptationSetSegmentInfo) => representation => {const repBaseUrlElements = findChildren(representation, 'BaseURL');const repBaseUrls = buildBaseUrls(adaptationSetBaseUrls, repBaseUrlElements);const attributes = merge$1(adaptationSetAttributes, parseAttributes(representation));const representationSegmentInfo = getSegmentInformation(representation);return repBaseUrls.map(baseUrl => {return {segmentInfo: merge$1(adaptationSetSegmentInfo, representationSegmentInfo),attributes: merge$1(attributes, baseUrl)};});};/*** Tranforms a series of content protection nodes to* an object containing pssh data by key system** @param {Node[]} contentProtectionNodes* Content protection nodes* @return {Object}* Object containing pssh data by key system*/const generateKeySystemInformation = contentProtectionNodes => {return contentProtectionNodes.reduce((acc, node) => {const attributes = parseAttributes(node); // Although it could be argued that according to the UUID RFC spec the UUID string (a-f chars) should be generated// as a lowercase string it also mentions it should be treated as case-insensitive on input. Since the key system// UUIDs in the keySystemsMap are hardcoded as lowercase in the codebase there isn't any reason not to do// .toLowerCase() on the input UUID string from the manifest (at least I could not think of one).if (attributes.schemeIdUri) {attributes.schemeIdUri = attributes.schemeIdUri.toLowerCase();}const keySystem = keySystemsMap[attributes.schemeIdUri];if (keySystem) {acc[keySystem] = {attributes};const psshNode = findChildren(node, 'cenc:pssh')[0];if (psshNode) {const pssh = getContent(psshNode);acc[keySystem].pssh = pssh && decodeB64ToUint8Array(pssh);}}return acc;}, {});}; // defined in ANSI_SCTE 214-1 2016const parseCaptionServiceMetadata = service => {// 608 captionsif (service.schemeIdUri === 'urn:scte:dash:cc:cea-608:2015') {const values = typeof service.value !== 'string' ? [] : service.value.split(';');return values.map(value => {let channel;let language; // default language to valuelanguage = value;if (/^CC\d=/.test(value)) {[channel, language] = value.split('=');} else if (/^CC\d$/.test(value)) {channel = value;}return {channel,language};});} else if (service.schemeIdUri === 'urn:scte:dash:cc:cea-708:2015') {const values = typeof service.value !== 'string' ? [] : service.value.split(';');return values.map(value => {const flags = {// service or channel number 1-63'channel': undefined,// language is a 3ALPHA per ISO 639.2/B// field is required'language': undefined,// BIT 1/0 or ?// default value is 1, meaning 16:9 aspect ratio, 0 is 4:3, ? is unknown'aspectRatio': 1,// BIT 1/0// easy reader flag indicated the text is tailed to the needs of beginning readers// default 0, or off'easyReader': 0,// BIT 1/0// If 3d metadata is present (CEA-708.1) then 1// default 0'3D': 0};if (/=/.test(value)) {const [channel, opts = ''] = value.split('=');flags.channel = channel;flags.language = value;opts.split(',').forEach(opt => {const [name, val] = opt.split(':');if (name === 'lang') {flags.language = val; // er for easyReadery} else if (name === 'er') {flags.easyReader = Number(val); // war for wide aspect ratio} else if (name === 'war') {flags.aspectRatio = Number(val);} else if (name === '3D') {flags['3D'] = Number(val);}});} else {flags.language = value;}if (flags.channel) {flags.channel = 'SERVICE' + flags.channel;}return flags;});}};/*** A map callback that will parse all event stream data for a collection of periods* DASH ISO_IEC_23009 5.10.2.2* https://dashif-documents.azurewebsites.net/Events/master/event.html#mpd-event-timing** @param {PeriodInformation} period object containing necessary period information* @return a collection of parsed eventstream event objects*/const toEventStream = period => {// get and flatten all EventStreams tags and parse attributes and childrenreturn flatten(findChildren(period.node, 'EventStream').map(eventStream => {const eventStreamAttributes = parseAttributes(eventStream);const schemeIdUri = eventStreamAttributes.schemeIdUri; // find all Events per EventStream tag and map to return objectsreturn findChildren(eventStream, 'Event').map(event => {const eventAttributes = parseAttributes(event);const presentationTime = eventAttributes.presentationTime || 0;const timescale = eventStreamAttributes.timescale || 1;const duration = eventAttributes.duration || 0;const start = presentationTime / timescale + period.attributes.start;return {schemeIdUri,value: eventStreamAttributes.value,id: eventAttributes.id,start,end: start + duration / timescale,messageData: getContent(event) || eventAttributes.messageData,contentEncoding: eventStreamAttributes.contentEncoding,presentationTimeOffset: eventStreamAttributes.presentationTimeOffset || 0};});}));};/*** Maps an AdaptationSet node to a list of Representation information objects** @name toRepresentationsCallback* @function* @param {Node} adaptationSet* AdaptationSet node from the mpd* @return {RepresentationInformation[]}* List of objects containing Representaion information*//*** Returns a callback for Array.prototype.map for mapping AdaptationSet nodes to a list of* Representation information objects** @param {Object} periodAttributes* Contains attributes inherited by the Period* @param {Object[]} periodBaseUrls* Contains list of objects with resolved base urls and attributes* inherited by the Period* @param {string[]} periodSegmentInfo* Contains Segment Information at the period level* @return {toRepresentationsCallback}* Callback map function*/const toRepresentations = (periodAttributes, periodBaseUrls, periodSegmentInfo) => adaptationSet => {const adaptationSetAttributes = parseAttributes(adaptationSet);const adaptationSetBaseUrls = buildBaseUrls(periodBaseUrls, findChildren(adaptationSet, 'BaseURL'));const role = findChildren(adaptationSet, 'Role')[0];const roleAttributes = {role: parseAttributes(role)};let attrs = merge$1(periodAttributes, adaptationSetAttributes, roleAttributes);const accessibility = findChildren(adaptationSet, 'Accessibility')[0];const captionServices = parseCaptionServiceMetadata(parseAttributes(accessibility));if (captionServices) {attrs = merge$1(attrs, {captionServices});}const label = findChildren(adaptationSet, 'Label')[0];if (label && label.childNodes.length) {const labelVal = label.childNodes[0].nodeValue.trim();attrs = merge$1(attrs, {label: labelVal});}const contentProtection = generateKeySystemInformation(findChildren(adaptationSet, 'ContentProtection'));if (Object.keys(contentProtection).length) {attrs = merge$1(attrs, {contentProtection});}const segmentInfo = getSegmentInformation(adaptationSet);const representations = findChildren(adaptationSet, 'Representation');const adaptationSetSegmentInfo = merge$1(periodSegmentInfo, segmentInfo);return flatten(representations.map(inheritBaseUrls(attrs, adaptationSetBaseUrls, adaptationSetSegmentInfo)));};/*** Contains all period information for mapping nodes onto adaptation sets.** @typedef {Object} PeriodInformation* @property {Node} period.node* Period node from the mpd* @property {Object} period.attributes* Parsed period attributes from node plus any added*//*** Maps a PeriodInformation object to a list of Representation information objects for all* AdaptationSet nodes contained within the Period.** @name toAdaptationSetsCallback* @function* @param {PeriodInformation} period* Period object containing necessary period information* @param {number} periodStart* Start time of the Period within the mpd* @return {RepresentationInformation[]}* List of objects containing Representaion information*//*** Returns a callback for Array.prototype.map for mapping Period nodes to a list of* Representation information objects** @param {Object} mpdAttributes* Contains attributes inherited by the mpd* @param {Object[]} mpdBaseUrls* Contains list of objects with resolved base urls and attributes* inherited by the mpd* @return {toAdaptationSetsCallback}* Callback map function*/const toAdaptationSets = (mpdAttributes, mpdBaseUrls) => (period, index) => {const periodBaseUrls = buildBaseUrls(mpdBaseUrls, findChildren(period.node, 'BaseURL'));const periodAttributes = merge$1(mpdAttributes, {periodStart: period.attributes.start});if (typeof period.attributes.duration === 'number') {periodAttributes.periodDuration = period.attributes.duration;}const adaptationSets = findChildren(period.node, 'AdaptationSet');const periodSegmentInfo = getSegmentInformation(period.node);return flatten(adaptationSets.map(toRepresentations(periodAttributes, periodBaseUrls, periodSegmentInfo)));};/*** Tranforms an array of content steering nodes into an object* containing CDN content steering information from the MPD manifest.** For more information on the DASH spec for Content Steering parsing, see:* https://dashif.org/docs/DASH-IF-CTS-00XX-Content-Steering-Community-Review.pdf** @param {Node[]} contentSteeringNodes* Content steering nodes* @param {Function} eventHandler* The event handler passed into the parser options to handle warnings* @return {Object}* Object containing content steering data*/const generateContentSteeringInformation = (contentSteeringNodes, eventHandler) => {// If there are more than one ContentSteering tags, throw an errorif (contentSteeringNodes.length > 1) {eventHandler({type: 'warn',message: 'The MPD manifest should contain no more than one ContentSteering tag'});} // Return a null value if there are no ContentSteering tagsif (!contentSteeringNodes.length) {return null;}const infoFromContentSteeringTag = merge$1({serverURL: getContent(contentSteeringNodes[0])}, parseAttributes(contentSteeringNodes[0])); // Converts `queryBeforeStart` to a boolean, as well as setting the default value// to `false` if it doesn't existinfoFromContentSteeringTag.queryBeforeStart = infoFromContentSteeringTag.queryBeforeStart === 'true';return infoFromContentSteeringTag;};/*** Gets Period@start property for a given period.** @param {Object} options* Options object* @param {Object} options.attributes* Period attributes* @param {Object} [options.priorPeriodAttributes]* Prior period attributes (if prior period is available)* @param {string} options.mpdType* The MPD@type these periods came from* @return {number|null}* The period start, or null if it's an early available period or error*/const getPeriodStart = ({attributes,priorPeriodAttributes,mpdType}) => {// Summary of period start time calculation from DASH spec section 5.3.2.1//// A period's start is the first period's start + time elapsed after playing all// prior periods to this one. Periods continue one after the other in time (without// gaps) until the end of the presentation.//// The value of Period@start should be:// 1. if Period@start is present: value of Period@start// 2. if previous period exists and it has @duration: previous Period@start +// previous Period@duration// 3. if this is first period and MPD@type is 'static': 0// 4. in all other cases, consider the period an "early available period" (note: not// currently supported)// (1)if (typeof attributes.start === 'number') {return attributes.start;} // (2)if (priorPeriodAttributes && typeof priorPeriodAttributes.start === 'number' && typeof priorPeriodAttributes.duration === 'number') {return priorPeriodAttributes.start + priorPeriodAttributes.duration;} // (3)if (!priorPeriodAttributes && mpdType === 'static') {return 0;} // (4)// There is currently no logic for calculating the Period@start value if there is// no Period@start or prior Period@start and Period@duration available. This is not made// explicit by the DASH interop guidelines or the DASH spec, however, since there's// nothing about any other resolution strategies, it's implied. Thus, this case should// be considered an early available period, or error, and null should suffice for both// of those cases.return null;};/*** Traverses the mpd xml tree to generate a list of Representation information objects* that have inherited attributes from parent nodes** @param {Node} mpd* The root node of the mpd* @param {Object} options* Available options for inheritAttributes* @param {string} options.manifestUri* The uri source of the mpd* @param {number} options.NOW* Current time per DASH IOP. Default is current time in ms since epoch* @param {number} options.clientOffset* Client time difference from NOW (in milliseconds)* @return {RepresentationInformation[]}* List of objects containing Representation information*/const inheritAttributes = (mpd, options = {}) => {const {manifestUri = '',NOW = Date.now(),clientOffset = 0,// TODO: For now, we are expecting an eventHandler callback function// to be passed into the mpd parser as an option.// In the future, we should enable stream parsing by using the Stream class from vhs-utils.// This will support new features including a standardized event handler.// See the m3u8 parser for examples of how stream parsing is currently used for HLS parsing.// https://github.com/videojs/vhs-utils/blob/88d6e10c631e57a5af02c5a62bc7376cd456b4f5/src/stream.js#L9eventHandler = function () {}} = options;const periodNodes = findChildren(mpd, 'Period');if (!periodNodes.length) {throw new Error(errors.INVALID_NUMBER_OF_PERIOD);}const locations = findChildren(mpd, 'Location');const mpdAttributes = parseAttributes(mpd);const mpdBaseUrls = buildBaseUrls([{baseUrl: manifestUri}], findChildren(mpd, 'BaseURL'));const contentSteeringNodes = findChildren(mpd, 'ContentSteering'); // See DASH spec section 5.3.1.2, Semantics of MPD element. Default type to 'static'.mpdAttributes.type = mpdAttributes.type || 'static';mpdAttributes.sourceDuration = mpdAttributes.mediaPresentationDuration || 0;mpdAttributes.NOW = NOW;mpdAttributes.clientOffset = clientOffset;if (locations.length) {mpdAttributes.locations = locations.map(getContent);}const periods = []; // Since toAdaptationSets acts on individual periods right now, the simplest approach to// adding properties that require looking at prior periods is to parse attributes and add// missing ones before toAdaptationSets is called. If more such properties are added, it// may be better to refactor toAdaptationSets.periodNodes.forEach((node, index) => {const attributes = parseAttributes(node); // Use the last modified prior period, as it may contain added information necessary// for this period.const priorPeriod = periods[index - 1];attributes.start = getPeriodStart({attributes,priorPeriodAttributes: priorPeriod ? priorPeriod.attributes : null,mpdType: mpdAttributes.type});periods.push({node,attributes});});return {locations: mpdAttributes.locations,contentSteeringInfo: generateContentSteeringInformation(contentSteeringNodes, eventHandler),// TODO: There are occurences where this `representationInfo` array contains undesired// duplicates. This generally occurs when there are multiple BaseURL nodes that are// direct children of the MPD node. When we attempt to resolve URLs from a combination of the// parent BaseURL and a child BaseURL, and the value does not resolve,// we end up returning the child BaseURL multiple times.// We need to determine a way to remove these duplicates in a safe way.// See: https://github.com/videojs/mpd-parser/pull/17#discussion_r162750527representationInfo: flatten(periods.map(toAdaptationSets(mpdAttributes, mpdBaseUrls))),eventStream: flatten(periods.map(toEventStream))};};const stringToMpdXml = manifestString => {if (manifestString === '') {throw new Error(errors.DASH_EMPTY_MANIFEST);}const parser = new DOMParser();let xml;let mpd;try {xml = parser.parseFromString(manifestString, 'application/xml');mpd = xml && xml.documentElement.tagName === 'MPD' ? xml.documentElement : null;} catch (e) {// ie 11 throws on invalid xml}if (!mpd || mpd && mpd.getElementsByTagName('parsererror').length > 0) {throw new Error(errors.DASH_INVALID_XML);}return mpd;};/*** Parses the manifest for a UTCTiming node, returning the nodes attributes if found** @param {string} mpd* XML string of the MPD manifest* @return {Object|null}* Attributes of UTCTiming node specified in the manifest. Null if none found*/const parseUTCTimingScheme = mpd => {const UTCTimingNode = findChildren(mpd, 'UTCTiming')[0];if (!UTCTimingNode) {return null;}const attributes = parseAttributes(UTCTimingNode);switch (attributes.schemeIdUri) {case 'urn:mpeg:dash:utc:http-head:2014':case 'urn:mpeg:dash:utc:http-head:2012':attributes.method = 'HEAD';break;case 'urn:mpeg:dash:utc:http-xsdate:2014':case 'urn:mpeg:dash:utc:http-iso:2014':case 'urn:mpeg:dash:utc:http-xsdate:2012':case 'urn:mpeg:dash:utc:http-iso:2012':attributes.method = 'GET';break;case 'urn:mpeg:dash:utc:direct:2014':case 'urn:mpeg:dash:utc:direct:2012':attributes.method = 'DIRECT';attributes.value = Date.parse(attributes.value);break;case 'urn:mpeg:dash:utc:http-ntp:2014':case 'urn:mpeg:dash:utc:ntp:2014':case 'urn:mpeg:dash:utc:sntp:2014':default:throw new Error(errors.UNSUPPORTED_UTC_TIMING_SCHEME);}return attributes;};/** Given a DASH manifest string and options, parses the DASH manifest into an object in the* form outputed by m3u8-parser and accepted by videojs/http-streaming.** For live DASH manifests, if `previousManifest` is provided in options, then the newly* parsed DASH manifest will have its media sequence and discontinuity sequence values* updated to reflect its position relative to the prior manifest.** @param {string} manifestString - the DASH manifest as a string* @param {options} [options] - any options** @return {Object} the manifest object*/const parse = (manifestString, options = {}) => {const parsedManifestInfo = inheritAttributes(stringToMpdXml(manifestString), options);const playlists = toPlaylists(parsedManifestInfo.representationInfo);return toM3u8({dashPlaylists: playlists,locations: parsedManifestInfo.locations,contentSteering: parsedManifestInfo.contentSteeringInfo,sidxMapping: options.sidxMapping,previousManifest: options.previousManifest,eventStream: parsedManifestInfo.eventStream});};/*** Parses the manifest for a UTCTiming node, returning the nodes attributes if found** @param {string} manifestString* XML string of the MPD manifest* @return {Object|null}* Attributes of UTCTiming node specified in the manifest. Null if none found*/const parseUTCTiming = manifestString => parseUTCTimingScheme(stringToMpdXml(manifestString));var MAX_UINT32 = Math.pow(2, 32);var getUint64$1 = function (uint8) {var dv = new DataView(uint8.buffer, uint8.byteOffset, uint8.byteLength);var value;if (dv.getBigUint64) {value = dv.getBigUint64(0);if (value < Number.MAX_SAFE_INTEGER) {return Number(value);}return value;}return dv.getUint32(0) * MAX_UINT32 + dv.getUint32(4);};var numbers = {getUint64: getUint64$1,MAX_UINT32: MAX_UINT32};var getUint64 = numbers.getUint64;var parseSidx = function (data) {var view = new DataView(data.buffer, data.byteOffset, data.byteLength),result = {version: data[0],flags: new Uint8Array(data.subarray(1, 4)),references: [],referenceId: view.getUint32(4),timescale: view.getUint32(8)},i = 12;if (result.version === 0) {result.earliestPresentationTime = view.getUint32(i);result.firstOffset = view.getUint32(i + 4);i += 8;} else {// read 64 bitsresult.earliestPresentationTime = getUint64(data.subarray(i));result.firstOffset = getUint64(data.subarray(i + 8));i += 16;}i += 2; // reservedvar referenceCount = view.getUint16(i);i += 2; // start of referencesfor (; referenceCount > 0; i += 12, referenceCount--) {result.references.push({referenceType: (data[i] & 0x80) >>> 7,referencedSize: view.getUint32(i) & 0x7FFFFFFF,subsegmentDuration: view.getUint32(i + 4),startsWithSap: !!(data[i + 8] & 0x80),sapType: (data[i + 8] & 0x70) >>> 4,sapDeltaTime: view.getUint32(i + 8) & 0x0FFFFFFF});}return result;};var parseSidx_1 = parseSidx;var ID3 = toUint8([0x49, 0x44, 0x33]);var getId3Size = function getId3Size(bytes, offset) {if (offset === void 0) {offset = 0;}bytes = toUint8(bytes);var flags = bytes[offset + 5];var returnSize = bytes[offset + 6] << 21 | bytes[offset + 7] << 14 | bytes[offset + 8] << 7 | bytes[offset + 9];var footerPresent = (flags & 16) >> 4;if (footerPresent) {return returnSize + 20;}return returnSize + 10;};var getId3Offset = function getId3Offset(bytes, offset) {if (offset === void 0) {offset = 0;}bytes = toUint8(bytes);if (bytes.length - offset < 10 || !bytesMatch(bytes, ID3, {offset: offset})) {return offset;}offset += getId3Size(bytes, offset); // recursive check for id3 tags as some files// have multiple ID3 tag sections even though// they should not.return getId3Offset(bytes, offset);};var normalizePath$1 = function normalizePath(path) {if (typeof path === 'string') {return stringToBytes(path);}if (typeof path === 'number') {return path;}return path;};var normalizePaths$1 = function normalizePaths(paths) {if (!Array.isArray(paths)) {return [normalizePath$1(paths)];}return paths.map(function (p) {return normalizePath$1(p);});};/*** find any number of boxes by name given a path to it in an iso bmff* such as mp4.** @param {TypedArray} bytes* bytes for the iso bmff to search for boxes in** @param {Uint8Array[]|string[]|string|Uint8Array} name* An array of paths or a single path representing the name* of boxes to search through in bytes. Paths may be* uint8 (character codes) or strings.** @param {boolean} [complete=false]* Should we search only for complete boxes on the final path.* This is very useful when you do not want to get back partial boxes* in the case of streaming files.** @return {Uint8Array[]}* An array of the end paths that we found.*/var findBox = function findBox(bytes, paths, complete) {if (complete === void 0) {complete = false;}paths = normalizePaths$1(paths);bytes = toUint8(bytes);var results = [];if (!paths.length) {// short-circuit the search for empty pathsreturn results;}var i = 0;while (i < bytes.length) {var size = (bytes[i] << 24 | bytes[i + 1] << 16 | bytes[i + 2] << 8 | bytes[i + 3]) >>> 0;var type = bytes.subarray(i + 4, i + 8); // invalid box format.if (size === 0) {break;}var end = i + size;if (end > bytes.length) {// this box is bigger than the number of bytes we have// and complete is set, we cannot find any more boxes.if (complete) {break;}end = bytes.length;}var data = bytes.subarray(i + 8, end);if (bytesMatch(type, paths[0])) {if (paths.length === 1) {// this is the end of the path and we've found the box we were// looking forresults.push(data);} else {// recursively search for the next box along the pathresults.push.apply(results, findBox(data, paths.slice(1), complete));}}i = end;} // we've finished searching all of bytesreturn results;};// https://matroska-org.github.io/libebml/specs.html// https://www.matroska.org/technical/elements.html// https://www.webmproject.org/docs/container/var EBML_TAGS = {EBML: toUint8([0x1A, 0x45, 0xDF, 0xA3]),DocType: toUint8([0x42, 0x82]),Segment: toUint8([0x18, 0x53, 0x80, 0x67]),SegmentInfo: toUint8([0x15, 0x49, 0xA9, 0x66]),Tracks: toUint8([0x16, 0x54, 0xAE, 0x6B]),Track: toUint8([0xAE]),TrackNumber: toUint8([0xd7]),DefaultDuration: toUint8([0x23, 0xe3, 0x83]),TrackEntry: toUint8([0xAE]),TrackType: toUint8([0x83]),FlagDefault: toUint8([0x88]),CodecID: toUint8([0x86]),CodecPrivate: toUint8([0x63, 0xA2]),VideoTrack: toUint8([0xe0]),AudioTrack: toUint8([0xe1]),// Not used yet, but will be used for live webm/mkv// see https://www.matroska.org/technical/basics.html#block-structure// see https://www.matroska.org/technical/basics.html#simpleblock-structureCluster: toUint8([0x1F, 0x43, 0xB6, 0x75]),Timestamp: toUint8([0xE7]),TimestampScale: toUint8([0x2A, 0xD7, 0xB1]),BlockGroup: toUint8([0xA0]),BlockDuration: toUint8([0x9B]),Block: toUint8([0xA1]),SimpleBlock: toUint8([0xA3])};/*** This is a simple table to determine the length* of things in ebml. The length is one based (starts at 1,* rather than zero) and for every zero bit before a one bit* we add one to length. We also need this table because in some* case we have to xor all the length bits from another value.*/var LENGTH_TABLE = [128, 64, 32, 16, 8, 4, 2, 1];var getLength = function getLength(byte) {var len = 1;for (var i = 0; i < LENGTH_TABLE.length; i++) {if (byte & LENGTH_TABLE[i]) {break;}len++;}return len;}; // length in ebml is stored in the first 4 to 8 bits// of the first byte. 4 for the id length and 8 for the// data size length. Length is measured by converting the number to binary// then 1 + the number of zeros before a 1 is encountered starting// from the left.var getvint = function getvint(bytes, offset, removeLength, signed) {if (removeLength === void 0) {removeLength = true;}if (signed === void 0) {signed = false;}var length = getLength(bytes[offset]);var valueBytes = bytes.subarray(offset, offset + length); // NOTE that we do **not** subarray here because we need to copy these bytes// as they will be modified below to remove the dataSizeLen bits and we do not// want to modify the original data. normally we could just call slice on// uint8array but ie 11 does not support that...if (removeLength) {valueBytes = Array.prototype.slice.call(bytes, offset, offset + length);valueBytes[0] ^= LENGTH_TABLE[length - 1];}return {length: length,value: bytesToNumber(valueBytes, {signed: signed}),bytes: valueBytes};};var normalizePath = function normalizePath(path) {if (typeof path === 'string') {return path.match(/.{1,2}/g).map(function (p) {return normalizePath(p);});}if (typeof path === 'number') {return numberToBytes(path);}return path;};var normalizePaths = function normalizePaths(paths) {if (!Array.isArray(paths)) {return [normalizePath(paths)];}return paths.map(function (p) {return normalizePath(p);});};var getInfinityDataSize = function getInfinityDataSize(id, bytes, offset) {if (offset >= bytes.length) {return bytes.length;}var innerid = getvint(bytes, offset, false);if (bytesMatch(id.bytes, innerid.bytes)) {return offset;}var dataHeader = getvint(bytes, offset + innerid.length);return getInfinityDataSize(id, bytes, offset + dataHeader.length + dataHeader.value + innerid.length);};/*** Notes on the EBLM format.** EBLM uses "vints" tags. Every vint tag contains* two parts** 1. The length from the first byte. You get this by* converting the byte to binary and counting the zeros* before a 1. Then you add 1 to that. Examples* 00011111 = length 4 because there are 3 zeros before a 1.* 00100000 = length 3 because there are 2 zeros before a 1.* 00000011 = length 7 because there are 6 zeros before a 1.** 2. The bits used for length are removed from the first byte* Then all the bytes are merged into a value. NOTE: this* is not the case for id ebml tags as there id includes* length bits.**/var findEbml = function findEbml(bytes, paths) {paths = normalizePaths(paths);bytes = toUint8(bytes);var results = [];if (!paths.length) {return results;}var i = 0;while (i < bytes.length) {var id = getvint(bytes, i, false);var dataHeader = getvint(bytes, i + id.length);var dataStart = i + id.length + dataHeader.length; // dataSize is unknown or this is a live streamif (dataHeader.value === 0x7f) {dataHeader.value = getInfinityDataSize(id, bytes, dataStart);if (dataHeader.value !== bytes.length) {dataHeader.value -= dataStart;}}var dataEnd = dataStart + dataHeader.value > bytes.length ? bytes.length : dataStart + dataHeader.value;var data = bytes.subarray(dataStart, dataEnd);if (bytesMatch(paths[0], id.bytes)) {if (paths.length === 1) {// this is the end of the paths and we've found the tag we were// looking forresults.push(data);} else {// recursively search for the next tag inside of the data// of this oneresults = results.concat(findEbml(data, paths.slice(1)));}}var totalLength = id.length + dataHeader.length + data.length; // move past this tag entirely, we are not looking for iti += totalLength;}return results;}; // see https://www.matroska.org/technical/basics.html#block-structurevar NAL_TYPE_ONE = toUint8([0x00, 0x00, 0x00, 0x01]);var NAL_TYPE_TWO = toUint8([0x00, 0x00, 0x01]);var EMULATION_PREVENTION = toUint8([0x00, 0x00, 0x03]);/*** Expunge any "Emulation Prevention" bytes from a "Raw Byte* Sequence Payload"** @param data {Uint8Array} the bytes of a RBSP from a NAL* unit* @return {Uint8Array} the RBSP without any Emulation* Prevention Bytes*/var discardEmulationPreventionBytes = function discardEmulationPreventionBytes(bytes) {var positions = [];var i = 1; // Find all `Emulation Prevention Bytes`while (i < bytes.length - 2) {if (bytesMatch(bytes.subarray(i, i + 3), EMULATION_PREVENTION)) {positions.push(i + 2);i++;}i++;} // If no Emulation Prevention Bytes were found just return the original// arrayif (positions.length === 0) {return bytes;} // Create a new array to hold the NAL unit datavar newLength = bytes.length - positions.length;var newData = new Uint8Array(newLength);var sourceIndex = 0;for (i = 0; i < newLength; sourceIndex++, i++) {if (sourceIndex === positions[0]) {// Skip this bytesourceIndex++; // Remove this position indexpositions.shift();}newData[i] = bytes[sourceIndex];}return newData;};var findNal = function findNal(bytes, dataType, types, nalLimit) {if (nalLimit === void 0) {nalLimit = Infinity;}bytes = toUint8(bytes);types = [].concat(types);var i = 0;var nalStart;var nalsFound = 0; // keep searching until:// we reach the end of bytes// we reach the maximum number of nals they want to seach// NOTE: that we disregard nalLimit when we have found the start// of the nal we want so that we can find the end of the nal we want.while (i < bytes.length && (nalsFound < nalLimit || nalStart)) {var nalOffset = void 0;if (bytesMatch(bytes.subarray(i), NAL_TYPE_ONE)) {nalOffset = 4;} else if (bytesMatch(bytes.subarray(i), NAL_TYPE_TWO)) {nalOffset = 3;} // we are unsynced,// find the next nal unitif (!nalOffset) {i++;continue;}nalsFound++;if (nalStart) {return discardEmulationPreventionBytes(bytes.subarray(nalStart, i));}var nalType = void 0;if (dataType === 'h264') {nalType = bytes[i + nalOffset] & 0x1f;} else if (dataType === 'h265') {nalType = bytes[i + nalOffset] >> 1 & 0x3f;}if (types.indexOf(nalType) !== -1) {nalStart = i + nalOffset;} // nal header is 1 length for h264, and 2 for h265i += nalOffset + (dataType === 'h264' ? 1 : 2);}return bytes.subarray(0, 0);};var findH264Nal = function findH264Nal(bytes, type, nalLimit) {return findNal(bytes, 'h264', type, nalLimit);};var findH265Nal = function findH265Nal(bytes, type, nalLimit) {return findNal(bytes, 'h265', type, nalLimit);};var CONSTANTS = {// "webm" string literal in hex'webm': toUint8([0x77, 0x65, 0x62, 0x6d]),// "matroska" string literal in hex'matroska': toUint8([0x6d, 0x61, 0x74, 0x72, 0x6f, 0x73, 0x6b, 0x61]),// "fLaC" string literal in hex'flac': toUint8([0x66, 0x4c, 0x61, 0x43]),// "OggS" string literal in hex'ogg': toUint8([0x4f, 0x67, 0x67, 0x53]),// ac-3 sync byte, also works for ec-3 as that is simply a codec// of ac-3'ac3': toUint8([0x0b, 0x77]),// "RIFF" string literal in hex used for wav and avi'riff': toUint8([0x52, 0x49, 0x46, 0x46]),// "AVI" string literal in hex'avi': toUint8([0x41, 0x56, 0x49]),// "WAVE" string literal in hex'wav': toUint8([0x57, 0x41, 0x56, 0x45]),// "ftyp3g" string literal in hex'3gp': toUint8([0x66, 0x74, 0x79, 0x70, 0x33, 0x67]),// "ftyp" string literal in hex'mp4': toUint8([0x66, 0x74, 0x79, 0x70]),// "styp" string literal in hex'fmp4': toUint8([0x73, 0x74, 0x79, 0x70]),// "ftypqt" string literal in hex'mov': toUint8([0x66, 0x74, 0x79, 0x70, 0x71, 0x74]),// moov string literal in hex'moov': toUint8([0x6D, 0x6F, 0x6F, 0x76]),// moof string literal in hex'moof': toUint8([0x6D, 0x6F, 0x6F, 0x66])};var _isLikely = {aac: function aac(bytes) {var offset = getId3Offset(bytes);return bytesMatch(bytes, [0xFF, 0x10], {offset: offset,mask: [0xFF, 0x16]});},mp3: function mp3(bytes) {var offset = getId3Offset(bytes);return bytesMatch(bytes, [0xFF, 0x02], {offset: offset,mask: [0xFF, 0x06]});},webm: function webm(bytes) {var docType = findEbml(bytes, [EBML_TAGS.EBML, EBML_TAGS.DocType])[0]; // check if DocType EBML tag is webmreturn bytesMatch(docType, CONSTANTS.webm);},mkv: function mkv(bytes) {var docType = findEbml(bytes, [EBML_TAGS.EBML, EBML_TAGS.DocType])[0]; // check if DocType EBML tag is matroskareturn bytesMatch(docType, CONSTANTS.matroska);},mp4: function mp4(bytes) {// if this file is another base media file format, it is not mp4if (_isLikely['3gp'](bytes) || _isLikely.mov(bytes)) {return false;} // if this file starts with a ftyp or styp box its mp4if (bytesMatch(bytes, CONSTANTS.mp4, {offset: 4}) || bytesMatch(bytes, CONSTANTS.fmp4, {offset: 4})) {return true;} // if this file starts with a moof/moov box its mp4if (bytesMatch(bytes, CONSTANTS.moof, {offset: 4}) || bytesMatch(bytes, CONSTANTS.moov, {offset: 4})) {return true;}},mov: function mov(bytes) {return bytesMatch(bytes, CONSTANTS.mov, {offset: 4});},'3gp': function gp(bytes) {return bytesMatch(bytes, CONSTANTS['3gp'], {offset: 4});},ac3: function ac3(bytes) {var offset = getId3Offset(bytes);return bytesMatch(bytes, CONSTANTS.ac3, {offset: offset});},ts: function ts(bytes) {if (bytes.length < 189 && bytes.length >= 1) {return bytes[0] === 0x47;}var i = 0; // check the first 376 bytes for two matching sync byteswhile (i + 188 < bytes.length && i < 188) {if (bytes[i] === 0x47 && bytes[i + 188] === 0x47) {return true;}i += 1;}return false;},flac: function flac(bytes) {var offset = getId3Offset(bytes);return bytesMatch(bytes, CONSTANTS.flac, {offset: offset});},ogg: function ogg(bytes) {return bytesMatch(bytes, CONSTANTS.ogg);},avi: function avi(bytes) {return bytesMatch(bytes, CONSTANTS.riff) && bytesMatch(bytes, CONSTANTS.avi, {offset: 8});},wav: function wav(bytes) {return bytesMatch(bytes, CONSTANTS.riff) && bytesMatch(bytes, CONSTANTS.wav, {offset: 8});},'h264': function h264(bytes) {// find seq_parameter_set_rbspreturn findH264Nal(bytes, 7, 3).length;},'h265': function h265(bytes) {// find video_parameter_set_rbsp or seq_parameter_set_rbspreturn findH265Nal(bytes, [32, 33], 3).length;}}; // get all the isLikely functions// but make sure 'ts' is above h264 and h265// but below everything else as it is the least specificvar isLikelyTypes = Object.keys(_isLikely) // remove ts, h264, h265.filter(function (t) {return t !== 'ts' && t !== 'h264' && t !== 'h265';}) // add it back to the bottom.concat(['ts', 'h264', 'h265']); // make sure we are dealing with uint8 data.isLikelyTypes.forEach(function (type) {var isLikelyFn = _isLikely[type];_isLikely[type] = function (bytes) {return isLikelyFn(toUint8(bytes));};}); // export after wrappingvar isLikely = _isLikely; // A useful list of file signatures can be found here// https://en.wikipedia.org/wiki/List_of_file_signaturesvar detectContainerForBytes = function detectContainerForBytes(bytes) {bytes = toUint8(bytes);for (var i = 0; i < isLikelyTypes.length; i++) {var type = isLikelyTypes[i];if (isLikely[type](bytes)) {return type;}}return '';}; // fmp4 is not a containervar isLikelyFmp4MediaSegment = function isLikelyFmp4MediaSegment(bytes) {return findBox(bytes, ['moof']).length > 0;};/*** mux.js** Copyright (c) Brightcove* Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE*/var ONE_SECOND_IN_TS = 90000,// 90kHz clocksecondsToVideoTs,secondsToAudioTs,videoTsToSeconds,audioTsToSeconds,audioTsToVideoTs,videoTsToAudioTs,metadataTsToSeconds;secondsToVideoTs = function (seconds) {return seconds * ONE_SECOND_IN_TS;};secondsToAudioTs = function (seconds, sampleRate) {return seconds * sampleRate;};videoTsToSeconds = function (timestamp) {return timestamp / ONE_SECOND_IN_TS;};audioTsToSeconds = function (timestamp, sampleRate) {return timestamp / sampleRate;};audioTsToVideoTs = function (timestamp, sampleRate) {return secondsToVideoTs(audioTsToSeconds(timestamp, sampleRate));};videoTsToAudioTs = function (timestamp, sampleRate) {return secondsToAudioTs(videoTsToSeconds(timestamp), sampleRate);};/*** Adjust ID3 tag or caption timing information by the timeline pts values* (if keepOriginalTimestamps is false) and convert to seconds*/metadataTsToSeconds = function (timestamp, timelineStartPts, keepOriginalTimestamps) {return videoTsToSeconds(keepOriginalTimestamps ? timestamp : timestamp - timelineStartPts);};var clock = {ONE_SECOND_IN_TS: ONE_SECOND_IN_TS,secondsToVideoTs: secondsToVideoTs,secondsToAudioTs: secondsToAudioTs,videoTsToSeconds: videoTsToSeconds,audioTsToSeconds: audioTsToSeconds,audioTsToVideoTs: audioTsToVideoTs,videoTsToAudioTs: videoTsToAudioTs,metadataTsToSeconds: metadataTsToSeconds};var clock_1 = clock.ONE_SECOND_IN_TS;/*! @name @videojs/http-streaming @version 3.10.0 @license Apache-2.0 *//*** @file resolve-url.js - Handling how URLs are resolved and manipulated*/const resolveUrl = resolveUrl$1;/*** If the xhr request was redirected, return the responseURL, otherwise,* return the original url.** @api private** @param {string} url - an url being requested* @param {XMLHttpRequest} req - xhr request result** @return {string}*/const resolveManifestRedirect = (url, req) => {// To understand how the responseURL below is set and generated:// - https://fetch.spec.whatwg.org/#concept-response-url// - https://fetch.spec.whatwg.org/#atomic-http-redirect-handlingif (req && req.responseURL && url !== req.responseURL) {return req.responseURL;}return url;};const logger = source => {if (videojs.log.debug) {return videojs.log.debug.bind(videojs, 'VHS:', `${source} >`);}return function () {};};/*** Provides a compatibility layer between Video.js 7 and 8 API changes for VHS.*//*** Delegates to videojs.obj.merge (Video.js 8) or* videojs.mergeOptions (Video.js 7).*/function merge(...args) {const context = videojs.obj || videojs;const fn = context.merge || context.mergeOptions;return fn.apply(context, args);}/*** Delegates to videojs.time.createTimeRanges (Video.js 8) or* videojs.createTimeRanges (Video.js 7).*/function createTimeRanges(...args) {const context = videojs.time || videojs;const fn = context.createTimeRanges || context.createTimeRanges;return fn.apply(context, args);}/*** ranges** Utilities for working with TimeRanges.**/const TIME_FUDGE_FACTOR = 1 / 30; // Comparisons between time values such as current time and the end of the buffered range// can be misleading because of precision differences or when the current media has poorly// aligned audio and video, which can cause values to be slightly off from what you would// expect. This value is what we consider to be safe to use in such comparisons to account// for these scenarios.const SAFE_TIME_DELTA = TIME_FUDGE_FACTOR * 3;const filterRanges = function (timeRanges, predicate) {const results = [];let i;if (timeRanges && timeRanges.length) {// Search for ranges that match the predicatefor (i = 0; i < timeRanges.length; i++) {if (predicate(timeRanges.start(i), timeRanges.end(i))) {results.push([timeRanges.start(i), timeRanges.end(i)]);}}}return createTimeRanges(results);};/*** Attempts to find the buffered TimeRange that contains the specified* time.** @param {TimeRanges} buffered - the TimeRanges object to query* @param {number} time - the time to filter on.* @return {TimeRanges} a new TimeRanges object*/const findRange = function (buffered, time) {return filterRanges(buffered, function (start, end) {return start - SAFE_TIME_DELTA <= time && end + SAFE_TIME_DELTA >= time;});};/*** Returns the TimeRanges that begin later than the specified time.** @param {TimeRanges} timeRanges - the TimeRanges object to query* @param {number} time - the time to filter on.* @return {TimeRanges} a new TimeRanges object.*/const findNextRange = function (timeRanges, time) {return filterRanges(timeRanges, function (start) {return start - TIME_FUDGE_FACTOR >= time;});};/*** Returns gaps within a list of TimeRanges** @param {TimeRanges} buffered - the TimeRanges object* @return {TimeRanges} a TimeRanges object of gaps*/const findGaps = function (buffered) {if (buffered.length < 2) {return createTimeRanges();}const ranges = [];for (let i = 1; i < buffered.length; i++) {const start = buffered.end(i - 1);const end = buffered.start(i);ranges.push([start, end]);}return createTimeRanges(ranges);};/*** Calculate the intersection of two TimeRanges** @param {TimeRanges} bufferA* @param {TimeRanges} bufferB* @return {TimeRanges} The interesection of `bufferA` with `bufferB`*/const bufferIntersection = function (bufferA, bufferB) {let start = null;let end = null;let arity = 0;const extents = [];const ranges = [];if (!bufferA || !bufferA.length || !bufferB || !bufferB.length) {return createTimeRanges();} // Handle the case where we have both buffers and create an// intersection of the twolet count = bufferA.length; // A) Gather up all start and end timeswhile (count--) {extents.push({time: bufferA.start(count),type: 'start'});extents.push({time: bufferA.end(count),type: 'end'});}count = bufferB.length;while (count--) {extents.push({time: bufferB.start(count),type: 'start'});extents.push({time: bufferB.end(count),type: 'end'});} // B) Sort them by timeextents.sort(function (a, b) {return a.time - b.time;}); // C) Go along one by one incrementing arity for start and decrementing// arity for endsfor (count = 0; count < extents.length; count++) {if (extents[count].type === 'start') {arity++; // D) If arity is ever incremented to 2 we are entering an// overlapping rangeif (arity === 2) {start = extents[count].time;}} else if (extents[count].type === 'end') {arity--; // E) If arity is ever decremented to 1 we leaving an// overlapping rangeif (arity === 1) {end = extents[count].time;}} // F) Record overlapping rangesif (start !== null && end !== null) {ranges.push([start, end]);start = null;end = null;}}return createTimeRanges(ranges);};/*** Gets a human readable string for a TimeRange** @param {TimeRange} range* @return {string} a human readable string*/const printableRange = range => {const strArr = [];if (!range || !range.length) {return '';}for (let i = 0; i < range.length; i++) {strArr.push(range.start(i) + ' => ' + range.end(i));}return strArr.join(', ');};/*** Calculates the amount of time left in seconds until the player hits the end of the* buffer and causes a rebuffer** @param {TimeRange} buffered* The state of the buffer* @param {Numnber} currentTime* The current time of the player* @param {number} playbackRate* The current playback rate of the player. Defaults to 1.* @return {number}* Time until the player has to start rebuffering in seconds.* @function timeUntilRebuffer*/const timeUntilRebuffer = function (buffered, currentTime, playbackRate = 1) {const bufferedEnd = buffered.length ? buffered.end(buffered.length - 1) : 0;return (bufferedEnd - currentTime) / playbackRate;};/*** Converts a TimeRanges object into an array representation** @param {TimeRanges} timeRanges* @return {Array}*/const timeRangesToArray = timeRanges => {const timeRangesList = [];for (let i = 0; i < timeRanges.length; i++) {timeRangesList.push({start: timeRanges.start(i),end: timeRanges.end(i)});}return timeRangesList;};/*** Determines if two time range objects are different.** @param {TimeRange} a* the first time range object to check** @param {TimeRange} b* the second time range object to check** @return {Boolean}* Whether the time range objects differ*/const isRangeDifferent = function (a, b) {// same objectif (a === b) {return false;} // one or the other is undefinedif (!a && b || !b && a) {return true;} // length is differentif (a.length !== b.length) {return true;} // see if any start/end pair is differentfor (let i = 0; i < a.length; i++) {if (a.start(i) !== b.start(i) || a.end(i) !== b.end(i)) {return true;}} // if the length and every pair is the same// this is the same time rangereturn false;};const lastBufferedEnd = function (a) {if (!a || !a.length || !a.end) {return;}return a.end(a.length - 1);};/*** A utility function to add up the amount of time in a timeRange* after a specified startTime.* ie:[[0, 10], [20, 40], [50, 60]] with a startTime 0* would return 40 as there are 40s seconds after 0 in the timeRange** @param {TimeRange} range* The range to check against* @param {number} startTime* The time in the time range that you should start counting from** @return {number}* The number of seconds in the buffer passed the specified time.*/const timeAheadOf = function (range, startTime) {let time = 0;if (!range || !range.length) {return time;}for (let i = 0; i < range.length; i++) {const start = range.start(i);const end = range.end(i); // startTime is after this range entirelyif (startTime > end) {continue;} // startTime is within this rangeif (startTime > start && startTime <= end) {time += end - startTime;continue;} // startTime is before this range.time += end - start;}return time;};/*** @file playlist.js** Playlist related utilities.*//*** Get the duration of a segment, with special cases for* llhls segments that do not have a duration yet.** @param {Object} playlist* the playlist that the segment belongs to.* @param {Object} segment* the segment to get a duration for.** @return {number}* the segment duration*/const segmentDurationWithParts = (playlist, segment) => {// if this isn't a preload segment// then we will have a segment duration that is accurate.if (!segment.preload) {return segment.duration;} // otherwise we have to add up parts and preload hints// to get an up to date duration.let result = 0;(segment.parts || []).forEach(function (p) {result += p.duration;}); // for preload hints we have to use partTargetDuration// as they won't even have a duration yet.(segment.preloadHints || []).forEach(function (p) {if (p.type === 'PART') {result += playlist.partTargetDuration;}});return result;};/*** A function to get a combined list of parts and segments with durations* and indexes.** @param {Playlist} playlist the playlist to get the list for.** @return {Array} The part/segment list.*/const getPartsAndSegments = playlist => (playlist.segments || []).reduce((acc, segment, si) => {if (segment.parts) {segment.parts.forEach(function (part, pi) {acc.push({duration: part.duration,segmentIndex: si,partIndex: pi,part,segment});});} else {acc.push({duration: segment.duration,segmentIndex: si,partIndex: null,segment,part: null});}return acc;}, []);const getLastParts = media => {const lastSegment = media.segments && media.segments.length && media.segments[media.segments.length - 1];return lastSegment && lastSegment.parts || [];};const getKnownPartCount = ({preloadSegment}) => {if (!preloadSegment) {return;}const {parts,preloadHints} = preloadSegment;let partCount = (preloadHints || []).reduce((count, hint) => count + (hint.type === 'PART' ? 1 : 0), 0);partCount += parts && parts.length ? parts.length : 0;return partCount;};/*** Get the number of seconds to delay from the end of a* live playlist.** @param {Playlist} main the main playlist* @param {Playlist} media the media playlist* @return {number} the hold back in seconds.*/const liveEdgeDelay = (main, media) => {if (media.endList) {return 0;} // dash suggestedPresentationDelay trumps everythingif (main && main.suggestedPresentationDelay) {return main.suggestedPresentationDelay;}const hasParts = getLastParts(media).length > 0; // look for "part" delays from ll-hls firstif (hasParts && media.serverControl && media.serverControl.partHoldBack) {return media.serverControl.partHoldBack;} else if (hasParts && media.partTargetDuration) {return media.partTargetDuration * 3; // finally look for full segment delays} else if (media.serverControl && media.serverControl.holdBack) {return media.serverControl.holdBack;} else if (media.targetDuration) {return media.targetDuration * 3;}return 0;};/*** walk backward until we find a duration we can use* or return a failure** @param {Playlist} playlist the playlist to walk through* @param {Number} endSequence the mediaSequence to stop walking on*/const backwardDuration = function (playlist, endSequence) {let result = 0;let i = endSequence - playlist.mediaSequence; // if a start time is available for segment immediately following// the interval, use itlet segment = playlist.segments[i]; // Walk backward until we find the latest segment with timeline// information that is earlier than endSequenceif (segment) {if (typeof segment.start !== 'undefined') {return {result: segment.start,precise: true};}if (typeof segment.end !== 'undefined') {return {result: segment.end - segment.duration,precise: true};}}while (i--) {segment = playlist.segments[i];if (typeof segment.end !== 'undefined') {return {result: result + segment.end,precise: true};}result += segmentDurationWithParts(playlist, segment);if (typeof segment.start !== 'undefined') {return {result: result + segment.start,precise: true};}}return {result,precise: false};};/*** walk forward until we find a duration we can use* or return a failure** @param {Playlist} playlist the playlist to walk through* @param {number} endSequence the mediaSequence to stop walking on*/const forwardDuration = function (playlist, endSequence) {let result = 0;let segment;let i = endSequence - playlist.mediaSequence; // Walk forward until we find the earliest segment with timeline// informationfor (; i < playlist.segments.length; i++) {segment = playlist.segments[i];if (typeof segment.start !== 'undefined') {return {result: segment.start - result,precise: true};}result += segmentDurationWithParts(playlist, segment);if (typeof segment.end !== 'undefined') {return {result: segment.end - result,precise: true};}} // indicate we didn't find a useful duration estimatereturn {result: -1,precise: false};};/*** Calculate the media duration from the segments associated with a* playlist. The duration of a subinterval of the available segments* may be calculated by specifying an end index.** @param {Object} playlist a media playlist object* @param {number=} endSequence an exclusive upper boundary* for the playlist. Defaults to playlist length.* @param {number} expired the amount of time that has dropped* off the front of the playlist in a live scenario* @return {number} the duration between the first available segment* and end index.*/const intervalDuration = function (playlist, endSequence, expired) {if (typeof endSequence === 'undefined') {endSequence = playlist.mediaSequence + playlist.segments.length;}if (endSequence < playlist.mediaSequence) {return 0;} // do a backward walk to estimate the durationconst backward = backwardDuration(playlist, endSequence);if (backward.precise) {// if we were able to base our duration estimate on timing// information provided directly from the Media Source, return// itreturn backward.result;} // walk forward to see if a precise duration estimate can be made// that wayconst forward = forwardDuration(playlist, endSequence);if (forward.precise) {// we found a segment that has been buffered and so it's// position is known preciselyreturn forward.result;} // return the less-precise, playlist-based duration estimatereturn backward.result + expired;};/*** Calculates the duration of a playlist. If a start and end index* are specified, the duration will be for the subset of the media* timeline between those two indices. The total duration for live* playlists is always Infinity.** @param {Object} playlist a media playlist object* @param {number=} endSequence an exclusive upper* boundary for the playlist. Defaults to the playlist media* sequence number plus its length.* @param {number=} expired the amount of time that has* dropped off the front of the playlist in a live scenario* @return {number} the duration between the start index and end* index.*/const duration = function (playlist, endSequence, expired) {if (!playlist) {return 0;}if (typeof expired !== 'number') {expired = 0;} // if a slice of the total duration is not requested, use// playlist-level duration indicators when they're presentif (typeof endSequence === 'undefined') {// if present, use the duration specified in the playlistif (playlist.totalDuration) {return playlist.totalDuration;} // duration should be Infinity for live playlistsif (!playlist.endList) {return window.Infinity;}} // calculate the total duration based on the segment durationsreturn intervalDuration(playlist, endSequence, expired);};/*** Calculate the time between two indexes in the current playlist* neight the start- nor the end-index need to be within the current* playlist in which case, the targetDuration of the playlist is used* to approximate the durations of the segments** @param {Array} options.durationList list to iterate over for durations.* @param {number} options.defaultDuration duration to use for elements before or after the durationList* @param {number} options.startIndex partsAndSegments index to start* @param {number} options.endIndex partsAndSegments index to end.* @return {number} the number of seconds between startIndex and endIndex*/const sumDurations = function ({defaultDuration,durationList,startIndex,endIndex}) {let durations = 0;if (startIndex > endIndex) {[startIndex, endIndex] = [endIndex, startIndex];}if (startIndex < 0) {for (let i = startIndex; i < Math.min(0, endIndex); i++) {durations += defaultDuration;}startIndex = 0;}for (let i = startIndex; i < endIndex; i++) {durations += durationList[i].duration;}return durations;};/*** Calculates the playlist end time** @param {Object} playlist a media playlist object* @param {number=} expired the amount of time that has* dropped off the front of the playlist in a live scenario* @param {boolean|false} useSafeLiveEnd a boolean value indicating whether or not the* playlist end calculation should consider the safe live end* (truncate the playlist end by three segments). This is normally* used for calculating the end of the playlist's seekable range.* This takes into account the value of liveEdgePadding.* Setting liveEdgePadding to 0 is equivalent to setting this to false.* @param {number} liveEdgePadding a number indicating how far from the end of the playlist we should be in seconds.* If this is provided, it is used in the safe live end calculation.* Setting useSafeLiveEnd=false or liveEdgePadding=0 are equivalent.* Corresponds to suggestedPresentationDelay in DASH manifests.* @return {number} the end time of playlist* @function playlistEnd*/const playlistEnd = function (playlist, expired, useSafeLiveEnd, liveEdgePadding) {if (!playlist || !playlist.segments) {return null;}if (playlist.endList) {return duration(playlist);}if (expired === null) {return null;}expired = expired || 0;let lastSegmentEndTime = intervalDuration(playlist, playlist.mediaSequence + playlist.segments.length, expired);if (useSafeLiveEnd) {liveEdgePadding = typeof liveEdgePadding === 'number' ? liveEdgePadding : liveEdgeDelay(null, playlist);lastSegmentEndTime -= liveEdgePadding;} // don't return a time less than zeroreturn Math.max(0, lastSegmentEndTime);};/*** Calculates the interval of time that is currently seekable in a* playlist. The returned time ranges are relative to the earliest* moment in the specified playlist that is still available. A full* seekable implementation for live streams would need to offset* these values by the duration of content that has expired from the* stream.** @param {Object} playlist a media playlist object* dropped off the front of the playlist in a live scenario* @param {number=} expired the amount of time that has* dropped off the front of the playlist in a live scenario* @param {number} liveEdgePadding how far from the end of the playlist we should be in seconds.* Corresponds to suggestedPresentationDelay in DASH manifests.* @return {TimeRanges} the periods of time that are valid targets* for seeking*/const seekable = function (playlist, expired, liveEdgePadding) {const useSafeLiveEnd = true;const seekableStart = expired || 0;let seekableEnd = playlistEnd(playlist, expired, useSafeLiveEnd, liveEdgePadding);if (seekableEnd === null) {return createTimeRanges();} // Clamp seekable end since it can not be less than the seekable startif (seekableEnd < seekableStart) {seekableEnd = seekableStart;}return createTimeRanges(seekableStart, seekableEnd);};/*** Determine the index and estimated starting time of the segment that* contains a specified playback position in a media playlist.** @param {Object} options.playlist the media playlist to query* @param {number} options.currentTime The number of seconds since the earliest* possible position to determine the containing segment for* @param {number} options.startTime the time when the segment/part starts* @param {number} options.startingSegmentIndex the segment index to start looking at.* @param {number?} [options.startingPartIndex] the part index to look at within the segment.** @return {Object} an object with partIndex, segmentIndex, and startTime.*/const getMediaInfoForTime = function ({playlist,currentTime,startingSegmentIndex,startingPartIndex,startTime,exactManifestTimings}) {let time = currentTime - startTime;const partsAndSegments = getPartsAndSegments(playlist);let startIndex = 0;for (let i = 0; i < partsAndSegments.length; i++) {const partAndSegment = partsAndSegments[i];if (startingSegmentIndex !== partAndSegment.segmentIndex) {continue;} // skip this if part index does not match.if (typeof startingPartIndex === 'number' && typeof partAndSegment.partIndex === 'number' && startingPartIndex !== partAndSegment.partIndex) {continue;}startIndex = i;break;}if (time < 0) {// Walk backward from startIndex in the playlist, adding durations// until we find a segment that contains `time` and return itif (startIndex > 0) {for (let i = startIndex - 1; i >= 0; i--) {const partAndSegment = partsAndSegments[i];time += partAndSegment.duration;if (exactManifestTimings) {if (time < 0) {continue;}} else if (time + TIME_FUDGE_FACTOR <= 0) {continue;}return {partIndex: partAndSegment.partIndex,segmentIndex: partAndSegment.segmentIndex,startTime: startTime - sumDurations({defaultDuration: playlist.targetDuration,durationList: partsAndSegments,startIndex,endIndex: i})};}} // We were unable to find a good segment within the playlist// so select the first segmentreturn {partIndex: partsAndSegments[0] && partsAndSegments[0].partIndex || null,segmentIndex: partsAndSegments[0] && partsAndSegments[0].segmentIndex || 0,startTime: currentTime};} // When startIndex is negative, we first walk forward to first segment// adding target durations. If we "run out of time" before getting to// the first segment, return the first segmentif (startIndex < 0) {for (let i = startIndex; i < 0; i++) {time -= playlist.targetDuration;if (time < 0) {return {partIndex: partsAndSegments[0] && partsAndSegments[0].partIndex || null,segmentIndex: partsAndSegments[0] && partsAndSegments[0].segmentIndex || 0,startTime: currentTime};}}startIndex = 0;} // Walk forward from startIndex in the playlist, subtracting durations// until we find a segment that contains `time` and return itfor (let i = startIndex; i < partsAndSegments.length; i++) {const partAndSegment = partsAndSegments[i];time -= partAndSegment.duration;const canUseFudgeFactor = partAndSegment.duration > TIME_FUDGE_FACTOR;const isExactlyAtTheEnd = time === 0;const isExtremelyCloseToTheEnd = canUseFudgeFactor && time + TIME_FUDGE_FACTOR >= 0;if (isExactlyAtTheEnd || isExtremelyCloseToTheEnd) {// 1) We are exactly at the end of the current segment.// 2) We are extremely close to the end of the current segment (The difference is less than 1 / 30).// We may encounter this situation when// we don't have exact match between segment duration info in the manifest and the actual duration of the segment// For example:// We appended 3 segments 10 seconds each, meaning we should have 30 sec buffered,// but we the actual buffered is 29.99999//// In both cases:// if we passed current time -> it means that we already played current segment// if we passed buffered.end -> it means that this segment is already loaded and buffered// we should select the next segment if we have one:if (i !== partsAndSegments.length - 1) {continue;}}if (exactManifestTimings) {if (time > 0) {continue;}} else if (time - TIME_FUDGE_FACTOR >= 0) {continue;}return {partIndex: partAndSegment.partIndex,segmentIndex: partAndSegment.segmentIndex,startTime: startTime + sumDurations({defaultDuration: playlist.targetDuration,durationList: partsAndSegments,startIndex,endIndex: i})};} // We are out of possible candidates so load the last one...return {segmentIndex: partsAndSegments[partsAndSegments.length - 1].segmentIndex,partIndex: partsAndSegments[partsAndSegments.length - 1].partIndex,startTime: currentTime};};/*** Check whether the playlist is excluded or not.** @param {Object} playlist the media playlist object* @return {boolean} whether the playlist is excluded or not* @function isExcluded*/const isExcluded = function (playlist) {return playlist.excludeUntil && playlist.excludeUntil > Date.now();};/*** Check whether the playlist is compatible with current playback configuration or has* been excluded permanently for being incompatible.** @param {Object} playlist the media playlist object* @return {boolean} whether the playlist is incompatible or not* @function isIncompatible*/const isIncompatible = function (playlist) {return playlist.excludeUntil && playlist.excludeUntil === Infinity;};/*** Check whether the playlist is enabled or not.** @param {Object} playlist the media playlist object* @return {boolean} whether the playlist is enabled or not* @function isEnabled*/const isEnabled = function (playlist) {const excluded = isExcluded(playlist);return !playlist.disabled && !excluded;};/*** Check whether the playlist has been manually disabled through the representations api.** @param {Object} playlist the media playlist object* @return {boolean} whether the playlist is disabled manually or not* @function isDisabled*/const isDisabled = function (playlist) {return playlist.disabled;};/*** Returns whether the current playlist is an AES encrypted HLS stream** @return {boolean} true if it's an AES encrypted HLS stream*/const isAes = function (media) {for (let i = 0; i < media.segments.length; i++) {if (media.segments[i].key) {return true;}}return false;};/*** Checks if the playlist has a value for the specified attribute** @param {string} attr* Attribute to check for* @param {Object} playlist* The media playlist object* @return {boolean}* Whether the playlist contains a value for the attribute or not* @function hasAttribute*/const hasAttribute = function (attr, playlist) {return playlist.attributes && playlist.attributes[attr];};/*** Estimates the time required to complete a segment download from the specified playlist** @param {number} segmentDuration* Duration of requested segment* @param {number} bandwidth* Current measured bandwidth of the player* @param {Object} playlist* The media playlist object* @param {number=} bytesReceived* Number of bytes already received for the request. Defaults to 0* @return {number|NaN}* The estimated time to request the segment. NaN if bandwidth information for* the given playlist is unavailable* @function estimateSegmentRequestTime*/const estimateSegmentRequestTime = function (segmentDuration, bandwidth, playlist, bytesReceived = 0) {if (!hasAttribute('BANDWIDTH', playlist)) {return NaN;}const size = segmentDuration * playlist.attributes.BANDWIDTH;return (size - bytesReceived * 8) / bandwidth;};/** Returns whether the current playlist is the lowest rendition** @return {Boolean} true if on lowest rendition*/const isLowestEnabledRendition = (main, media) => {if (main.playlists.length === 1) {return true;}const currentBandwidth = media.attributes.BANDWIDTH || Number.MAX_VALUE;return main.playlists.filter(playlist => {if (!isEnabled(playlist)) {return false;}return (playlist.attributes.BANDWIDTH || 0) < currentBandwidth;}).length === 0;};const playlistMatch = (a, b) => {// both playlits are null// or only one playlist is non-null// no matchif (!a && !b || !a && b || a && !b) {return false;} // playlist objects are the same, matchif (a === b) {return true;} // first try to use id as it should be the most// accurateif (a.id && b.id && a.id === b.id) {return true;} // next try to use reslovedUri as it should be the// second most accurate.if (a.resolvedUri && b.resolvedUri && a.resolvedUri === b.resolvedUri) {return true;} // finally try to use uri as it should be accurate// but might miss a few cases for relative urisif (a.uri && b.uri && a.uri === b.uri) {return true;}return false;};const someAudioVariant = function (main, callback) {const AUDIO = main && main.mediaGroups && main.mediaGroups.AUDIO || {};let found = false;for (const groupName in AUDIO) {for (const label in AUDIO[groupName]) {found = callback(AUDIO[groupName][label]);if (found) {break;}}if (found) {break;}}return !!found;};const isAudioOnly = main => {// we are audio only if we have no main playlists but do// have media group playlists.if (!main || !main.playlists || !main.playlists.length) {// without audio variants or playlists this// is not an audio only main.const found = someAudioVariant(main, variant => variant.playlists && variant.playlists.length || variant.uri);return found;} // if every playlist has only an audio codec it is audio onlyfor (let i = 0; i < main.playlists.length; i++) {const playlist = main.playlists[i];const CODECS = playlist.attributes && playlist.attributes.CODECS; // all codecs are audio, this is an audio playlist.if (CODECS && CODECS.split(',').every(c => isAudioCodec(c))) {continue;} // playlist is in an audio group it is audio onlyconst found = someAudioVariant(main, variant => playlistMatch(playlist, variant));if (found) {continue;} // if we make it here this playlist isn't audio and we// are not audio onlyreturn false;} // if we make it past every playlist without returning, then// this is an audio only playlist.return true;}; // exportsvar Playlist = {liveEdgeDelay,duration,seekable,getMediaInfoForTime,isEnabled,isDisabled,isExcluded,isIncompatible,playlistEnd,isAes,hasAttribute,estimateSegmentRequestTime,isLowestEnabledRendition,isAudioOnly,playlistMatch,segmentDurationWithParts};const {log} = videojs;const createPlaylistID = (index, uri) => {return `${index}-${uri}`;}; // default function for creating a group idconst groupID = (type, group, label) => {return `placeholder-uri-${type}-${group}-${label}`;};/*** Parses a given m3u8 playlist** @param {Function} [onwarn]* a function to call when the parser triggers a warning event.* @param {Function} [oninfo]* a function to call when the parser triggers an info event.* @param {string} manifestString* The downloaded manifest string* @param {Object[]} [customTagParsers]* An array of custom tag parsers for the m3u8-parser instance* @param {Object[]} [customTagMappers]* An array of custom tag mappers for the m3u8-parser instance* @param {boolean} [llhls]* Whether to keep ll-hls features in the manifest after parsing.* @return {Object}* The manifest object*/const parseManifest = ({onwarn,oninfo,manifestString,customTagParsers = [],customTagMappers = [],llhls}) => {const parser = new Parser();if (onwarn) {parser.on('warn', onwarn);}if (oninfo) {parser.on('info', oninfo);}customTagParsers.forEach(customParser => parser.addParser(customParser));customTagMappers.forEach(mapper => parser.addTagMapper(mapper));parser.push(manifestString);parser.end();const manifest = parser.manifest; // remove llhls features from the parsed manifest// if we don't want llhls support.if (!llhls) {['preloadSegment', 'skip', 'serverControl', 'renditionReports', 'partInf', 'partTargetDuration'].forEach(function (k) {if (manifest.hasOwnProperty(k)) {delete manifest[k];}});if (manifest.segments) {manifest.segments.forEach(function (segment) {['parts', 'preloadHints'].forEach(function (k) {if (segment.hasOwnProperty(k)) {delete segment[k];}});});}}if (!manifest.targetDuration) {let targetDuration = 10;if (manifest.segments && manifest.segments.length) {targetDuration = manifest.segments.reduce((acc, s) => Math.max(acc, s.duration), 0);}if (onwarn) {onwarn({message: `manifest has no targetDuration defaulting to ${targetDuration}`});}manifest.targetDuration = targetDuration;}const parts = getLastParts(manifest);if (parts.length && !manifest.partTargetDuration) {const partTargetDuration = parts.reduce((acc, p) => Math.max(acc, p.duration), 0);if (onwarn) {onwarn({message: `manifest has no partTargetDuration defaulting to ${partTargetDuration}`});log.error('LL-HLS manifest has parts but lacks required #EXT-X-PART-INF:PART-TARGET value. See https://datatracker.ietf.org/doc/html/draft-pantos-hls-rfc8216bis-09#section-4.4.3.7. Playback is not guaranteed.');}manifest.partTargetDuration = partTargetDuration;}return manifest;};/*** Loops through all supported media groups in main and calls the provided* callback for each group** @param {Object} main* The parsed main manifest object* @param {Function} callback* Callback to call for each media group*/const forEachMediaGroup = (main, callback) => {if (!main.mediaGroups) {return;}['AUDIO', 'SUBTITLES'].forEach(mediaType => {if (!main.mediaGroups[mediaType]) {return;}for (const groupKey in main.mediaGroups[mediaType]) {for (const labelKey in main.mediaGroups[mediaType][groupKey]) {const mediaProperties = main.mediaGroups[mediaType][groupKey][labelKey];callback(mediaProperties, mediaType, groupKey, labelKey);}}});};/*** Adds properties and attributes to the playlist to keep consistent functionality for* playlists throughout VHS.** @param {Object} config* Arguments object* @param {Object} config.playlist* The media playlist* @param {string} [config.uri]* The uri to the media playlist (if media playlist is not from within a main* playlist)* @param {string} id* ID to use for the playlist*/const setupMediaPlaylist = ({playlist,uri,id}) => {playlist.id = id;playlist.playlistErrors_ = 0;if (uri) {// For media playlists, m3u8-parser does not have access to a URI, as HLS media// playlists do not contain their own source URI, but one is needed for consistency in// VHS.playlist.uri = uri;} // For HLS main playlists, even though certain attributes MUST be defined, the// stream may still be played without them.// For HLS media playlists, m3u8-parser does not attach an attributes object to the// manifest.//// To avoid undefined reference errors through the project, and make the code easier// to write/read, add an empty attributes object for these cases.playlist.attributes = playlist.attributes || {};};/*** Adds ID, resolvedUri, and attributes properties to each playlist of the main, where* necessary. In addition, creates playlist IDs for each playlist and adds playlist ID to* playlist references to the playlists array.** @param {Object} main* The main playlist*/const setupMediaPlaylists = main => {let i = main.playlists.length;while (i--) {const playlist = main.playlists[i];setupMediaPlaylist({playlist,id: createPlaylistID(i, playlist.uri)});playlist.resolvedUri = resolveUrl(main.uri, playlist.uri);main.playlists[playlist.id] = playlist; // URI reference added for backwards compatibilitymain.playlists[playlist.uri] = playlist; // Although the spec states an #EXT-X-STREAM-INF tag MUST have a BANDWIDTH attribute,// the stream can be played without it. Although an attributes property may have been// added to the playlist to prevent undefined references, issue a warning to fix the// manifest.if (!playlist.attributes.BANDWIDTH) {log.warn('Invalid playlist STREAM-INF detected. Missing BANDWIDTH attribute.');}}};/*** Adds resolvedUri properties to each media group.** @param {Object} main* The main playlist*/const resolveMediaGroupUris = main => {forEachMediaGroup(main, properties => {if (properties.uri) {properties.resolvedUri = resolveUrl(main.uri, properties.uri);}});};/*** Creates a main playlist wrapper to insert a sole media playlist into.** @param {Object} media* Media playlist* @param {string} uri* The media URI** @return {Object}* main playlist*/const mainForMedia = (media, uri) => {const id = createPlaylistID(0, uri);const main = {mediaGroups: {'AUDIO': {},'VIDEO': {},'CLOSED-CAPTIONS': {},'SUBTITLES': {}},uri: window.location.href,resolvedUri: window.location.href,playlists: [{uri,id,resolvedUri: uri,// m3u8-parser does not attach an attributes property to media playlists so make// sure that the property is attached to avoid undefined reference errorsattributes: {}}]}; // set up ID referencemain.playlists[id] = main.playlists[0]; // URI reference added for backwards compatibilitymain.playlists[uri] = main.playlists[0];return main;};/*** Does an in-place update of the main manifest to add updated playlist URI references* as well as other properties needed by VHS that aren't included by the parser.** @param {Object} main* main manifest object* @param {string} uri* The source URI* @param {function} createGroupID* A function to determine how to create the groupID for mediaGroups*/const addPropertiesToMain = (main, uri, createGroupID = groupID) => {main.uri = uri;for (let i = 0; i < main.playlists.length; i++) {if (!main.playlists[i].uri) {// Set up phony URIs for the playlists since playlists are referenced by their URIs// throughout VHS, but some formats (e.g., DASH) don't have external URIs// TODO: consider adding dummy URIs in mpd-parserconst phonyUri = `placeholder-uri-${i}`;main.playlists[i].uri = phonyUri;}}const audioOnlyMain = isAudioOnly(main);forEachMediaGroup(main, (properties, mediaType, groupKey, labelKey) => {// add a playlist array under propertiesif (!properties.playlists || !properties.playlists.length) {// If the manifest is audio only and this media group does not have a uri, check// if the media group is located in the main list of playlists. If it is, don't add// placeholder properties as it shouldn't be considered an alternate audio track.if (audioOnlyMain && mediaType === 'AUDIO' && !properties.uri) {for (let i = 0; i < main.playlists.length; i++) {const p = main.playlists[i];if (p.attributes && p.attributes.AUDIO && p.attributes.AUDIO === groupKey) {return;}}}properties.playlists = [_extends$1({}, properties)];}properties.playlists.forEach(function (p, i) {const groupId = createGroupID(mediaType, groupKey, labelKey, p);const id = createPlaylistID(i, groupId);if (p.uri) {p.resolvedUri = p.resolvedUri || resolveUrl(main.uri, p.uri);} else {// DEPRECATED, this has been added to prevent a breaking change.// previously we only ever had a single media group playlist, so// we mark the first playlist uri without prepending the index as we used to// ideally we would do all of the playlists the same way.p.uri = i === 0 ? groupId : id; // don't resolve a placeholder uri to an absolute url, just use// the placeholder againp.resolvedUri = p.uri;}p.id = p.id || id; // add an empty attributes object, all playlists are// expected to have this.p.attributes = p.attributes || {}; // setup ID and URI references (URI for backwards compatibility)main.playlists[p.id] = p;main.playlists[p.uri] = p;});});setupMediaPlaylists(main);resolveMediaGroupUris(main);};class DateRangesStorage {constructor() {this.offset_ = null;this.pendingDateRanges_ = new Map();this.processedDateRanges_ = new Map();}setOffset(segments = []) {// already setif (this.offset_ !== null) {return;} // no segment to processif (!segments.length) {return;}const [firstSegment] = segments; // no program date timeif (firstSegment.programDateTime === undefined) {return;} // Set offset as ProgramDateTime for the very first segment of the very first playlist load:this.offset_ = firstSegment.programDateTime / 1000;}setPendingDateRanges(dateRanges = []) {if (!dateRanges.length) {return;}const [dateRange] = dateRanges;const startTime = dateRange.startDate.getTime();this.trimProcessedDateRanges_(startTime);this.pendingDateRanges_ = dateRanges.reduce((map, pendingDateRange) => {map.set(pendingDateRange.id, pendingDateRange);return map;}, new Map());}processDateRange(dateRange) {this.pendingDateRanges_.delete(dateRange.id);this.processedDateRanges_.set(dateRange.id, dateRange);}getDateRangesToProcess() {if (this.offset_ === null) {return [];}const dateRangeClasses = {};const dateRangesToProcess = [];this.pendingDateRanges_.forEach((dateRange, id) => {if (this.processedDateRanges_.has(id)) {return;}dateRange.startTime = dateRange.startDate.getTime() / 1000 - this.offset_;dateRange.processDateRange = () => this.processDateRange(dateRange);dateRangesToProcess.push(dateRange);if (!dateRange.class) {return;}if (dateRangeClasses[dateRange.class]) {const length = dateRangeClasses[dateRange.class].push(dateRange);dateRange.classListIndex = length - 1;} else {dateRangeClasses[dateRange.class] = [dateRange];dateRange.classListIndex = 0;}});for (const dateRange of dateRangesToProcess) {const classList = dateRangeClasses[dateRange.class] || [];if (dateRange.endDate) {dateRange.endTime = dateRange.endDate.getTime() / 1000 - this.offset_;} else if (dateRange.endOnNext && classList[dateRange.classListIndex + 1]) {dateRange.endTime = classList[dateRange.classListIndex + 1].startTime;} else if (dateRange.duration) {dateRange.endTime = dateRange.startTime + dateRange.duration;} else if (dateRange.plannedDuration) {dateRange.endTime = dateRange.startTime + dateRange.plannedDuration;} else {dateRange.endTime = dateRange.startTime;}}return dateRangesToProcess;}trimProcessedDateRanges_(startTime) {const copy = new Map(this.processedDateRanges_);copy.forEach((dateRange, id) => {if (dateRange.startDate.getTime() < startTime) {this.processedDateRanges_.delete(id);}});}}const {EventTarget: EventTarget$1} = videojs;const addLLHLSQueryDirectives = (uri, media) => {if (media.endList || !media.serverControl) {return uri;}const parameters = {};if (media.serverControl.canBlockReload) {const {preloadSegment} = media; // next msn is a zero based value, length is not.let nextMSN = media.mediaSequence + media.segments.length; // If preload segment has parts then it is likely// that we are going to request a part of that preload segment.// the logic below is used to determine that.if (preloadSegment) {const parts = preloadSegment.parts || []; // _HLS_part is a zero based indexconst nextPart = getKnownPartCount(media) - 1; // if nextPart is > -1 and not equal to just the// length of parts, then we know we had part preload hints// and we need to add the _HLS_part= queryif (nextPart > -1 && nextPart !== parts.length - 1) {// add existing parts to our preload hints// eslint-disable-next-lineparameters._HLS_part = nextPart;} // this if statement makes sure that we request the msn// of the preload segment if:// 1. the preload segment had parts (and was not yet a full segment)// but was added to our segments array// 2. the preload segment had preload hints for parts that are not in// the manifest yet.// in all other cases we want the segment after the preload segment// which will be given by using media.segments.length because it is 1 based// rather than 0 based.if (nextPart > -1 || parts.length) {nextMSN--;}} // add _HLS_msn= in front of any _HLS_part query// eslint-disable-next-lineparameters._HLS_msn = nextMSN;}if (media.serverControl && media.serverControl.canSkipUntil) {// add _HLS_skip= infront of all other queries.// eslint-disable-next-lineparameters._HLS_skip = media.serverControl.canSkipDateranges ? 'v2' : 'YES';}if (Object.keys(parameters).length) {const parsedUri = new window.URL(uri);['_HLS_skip', '_HLS_msn', '_HLS_part'].forEach(function (name) {if (!parameters.hasOwnProperty(name)) {return;}parsedUri.searchParams.set(name, parameters[name]);});uri = parsedUri.toString();}return uri;};/*** Returns a new segment object with properties and* the parts array merged.** @param {Object} a the old segment* @param {Object} b the new segment** @return {Object} the merged segment*/const updateSegment = (a, b) => {if (!a) {return b;}const result = merge(a, b); // if only the old segment has preload hints// and the new one does not, remove preload hints.if (a.preloadHints && !b.preloadHints) {delete result.preloadHints;} // if only the old segment has parts// then the parts are no longer validif (a.parts && !b.parts) {delete result.parts; // if both segments have parts// copy part propeties from the old segment// to the new one.} else if (a.parts && b.parts) {for (let i = 0; i < b.parts.length; i++) {if (a.parts && a.parts[i]) {result.parts[i] = merge(a.parts[i], b.parts[i]);}}} // set skipped to false for segments that have// have had information merged from the old segment.if (!a.skipped && b.skipped) {result.skipped = false;} // set preload to false for segments that have// had information added in the new segment.if (a.preload && !b.preload) {result.preload = false;}return result;};/*** Returns a new array of segments that is the result of merging* properties from an older list of segments onto an updated* list. No properties on the updated playlist will be ovewritten.** @param {Array} original the outdated list of segments* @param {Array} update the updated list of segments* @param {number=} offset the index of the first update* segment in the original segment list. For non-live playlists,* this should always be zero and does not need to be* specified. For live playlists, it should be the difference* between the media sequence numbers in the original and updated* playlists.* @return {Array} a list of merged segment objects*/const updateSegments = (original, update, offset) => {const oldSegments = original.slice();const newSegments = update.slice();offset = offset || 0;const result = [];let currentMap;for (let newIndex = 0; newIndex < newSegments.length; newIndex++) {const oldSegment = oldSegments[newIndex + offset];const newSegment = newSegments[newIndex];if (oldSegment) {currentMap = oldSegment.map || currentMap;result.push(updateSegment(oldSegment, newSegment));} else {// carry over map to new segment if it is missingif (currentMap && !newSegment.map) {newSegment.map = currentMap;}result.push(newSegment);}}return result;};const resolveSegmentUris = (segment, baseUri) => {// preloadSegment will not have a uri at all// as the segment isn't actually in the manifest yet, only partsif (!segment.resolvedUri && segment.uri) {segment.resolvedUri = resolveUrl(baseUri, segment.uri);}if (segment.key && !segment.key.resolvedUri) {segment.key.resolvedUri = resolveUrl(baseUri, segment.key.uri);}if (segment.map && !segment.map.resolvedUri) {segment.map.resolvedUri = resolveUrl(baseUri, segment.map.uri);}if (segment.map && segment.map.key && !segment.map.key.resolvedUri) {segment.map.key.resolvedUri = resolveUrl(baseUri, segment.map.key.uri);}if (segment.parts && segment.parts.length) {segment.parts.forEach(p => {if (p.resolvedUri) {return;}p.resolvedUri = resolveUrl(baseUri, p.uri);});}if (segment.preloadHints && segment.preloadHints.length) {segment.preloadHints.forEach(p => {if (p.resolvedUri) {return;}p.resolvedUri = resolveUrl(baseUri, p.uri);});}};const getAllSegments = function (media) {const segments = media.segments || [];const preloadSegment = media.preloadSegment; // a preloadSegment with only preloadHints is not currently// a usable segment, only include a preloadSegment that has// parts.if (preloadSegment && preloadSegment.parts && preloadSegment.parts.length) {// if preloadHints has a MAP that means that the// init segment is going to change. We cannot use any of the parts// from this preload segment.if (preloadSegment.preloadHints) {for (let i = 0; i < preloadSegment.preloadHints.length; i++) {if (preloadSegment.preloadHints[i].type === 'MAP') {return segments;}}} // set the duration for our preload segment to target duration.preloadSegment.duration = media.targetDuration;preloadSegment.preload = true;segments.push(preloadSegment);}return segments;}; // consider the playlist unchanged if the playlist object is the same or// the number of segments is equal, the media sequence number is unchanged,// and this playlist hasn't become the end of the playlistconst isPlaylistUnchanged = (a, b) => a === b || a.segments && b.segments && a.segments.length === b.segments.length && a.endList === b.endList && a.mediaSequence === b.mediaSequence && a.preloadSegment === b.preloadSegment;/*** Returns a new main playlist that is the result of merging an* updated media playlist into the original version. If the* updated media playlist does not match any of the playlist* entries in the original main playlist, null is returned.** @param {Object} main a parsed main M3U8 object* @param {Object} media a parsed media M3U8 object* @return {Object} a new object that represents the original* main playlist with the updated media playlist merged in, or* null if the merge produced no change.*/const updateMain$1 = (main, newMedia, unchangedCheck = isPlaylistUnchanged) => {const result = merge(main, {});const oldMedia = result.playlists[newMedia.id];if (!oldMedia) {return null;}if (unchangedCheck(oldMedia, newMedia)) {return null;}newMedia.segments = getAllSegments(newMedia);const mergedPlaylist = merge(oldMedia, newMedia); // always use the new media's preload segmentif (mergedPlaylist.preloadSegment && !newMedia.preloadSegment) {delete mergedPlaylist.preloadSegment;} // if the update could overlap existing segment information, merge the two segment listsif (oldMedia.segments) {if (newMedia.skip) {newMedia.segments = newMedia.segments || []; // add back in objects for skipped segments, so that we merge// old properties into the new segmentsfor (let i = 0; i < newMedia.skip.skippedSegments; i++) {newMedia.segments.unshift({skipped: true});}}mergedPlaylist.segments = updateSegments(oldMedia.segments, newMedia.segments, newMedia.mediaSequence - oldMedia.mediaSequence);} // resolve any segment URIs to prevent us from having to do it latermergedPlaylist.segments.forEach(segment => {resolveSegmentUris(segment, mergedPlaylist.resolvedUri);}); // TODO Right now in the playlists array there are two references to each playlist, one// that is referenced by index, and one by URI. The index reference may no longer be// necessary.for (let i = 0; i < result.playlists.length; i++) {if (result.playlists[i].id === newMedia.id) {result.playlists[i] = mergedPlaylist;}}result.playlists[newMedia.id] = mergedPlaylist; // URI reference added for backwards compatibilityresult.playlists[newMedia.uri] = mergedPlaylist; // update media group playlist references.forEachMediaGroup(main, (properties, mediaType, groupKey, labelKey) => {if (!properties.playlists) {return;}for (let i = 0; i < properties.playlists.length; i++) {if (newMedia.id === properties.playlists[i].id) {properties.playlists[i] = mergedPlaylist;}}});return result;};/*** Calculates the time to wait before refreshing a live playlist** @param {Object} media* The current media* @param {boolean} update* True if there were any updates from the last refresh, false otherwise* @return {number}* The time in ms to wait before refreshing the live playlist*/const refreshDelay = (media, update) => {const segments = media.segments || [];const lastSegment = segments[segments.length - 1];const lastPart = lastSegment && lastSegment.parts && lastSegment.parts[lastSegment.parts.length - 1];const lastDuration = lastPart && lastPart.duration || lastSegment && lastSegment.duration;if (update && lastDuration) {return lastDuration * 1000;} // if the playlist is unchanged since the last reload or last segment duration// cannot be determined, try again after half the target durationreturn (media.partTargetDuration || media.targetDuration || 10) * 500;};/*** Load a playlist from a remote location** @class PlaylistLoader* @extends Stream* @param {string|Object} src url or object of manifest* @param {boolean} withCredentials the withCredentials xhr option* @class*/class PlaylistLoader extends EventTarget$1 {constructor(src, vhs, options = {}) {super();if (!src) {throw new Error('A non-empty playlist URL or object is required');}this.logger_ = logger('PlaylistLoader');const {withCredentials = false} = options;this.src = src;this.vhs_ = vhs;this.withCredentials = withCredentials;this.addDateRangesToTextTrack_ = options.addDateRangesToTextTrack;const vhsOptions = vhs.options_;this.customTagParsers = vhsOptions && vhsOptions.customTagParsers || [];this.customTagMappers = vhsOptions && vhsOptions.customTagMappers || [];this.llhls = vhsOptions && vhsOptions.llhls;this.dateRangesStorage_ = new DateRangesStorage(); // initialize the loader statethis.state = 'HAVE_NOTHING'; // live playlist staleness timeoutthis.handleMediaupdatetimeout_ = this.handleMediaupdatetimeout_.bind(this);this.on('mediaupdatetimeout', this.handleMediaupdatetimeout_);this.on('loadedplaylist', this.handleLoadedPlaylist_.bind(this));}handleLoadedPlaylist_() {const mediaPlaylist = this.media();if (!mediaPlaylist) {return;}this.dateRangesStorage_.setOffset(mediaPlaylist.segments);this.dateRangesStorage_.setPendingDateRanges(mediaPlaylist.dateRanges);const availableDateRanges = this.dateRangesStorage_.getDateRangesToProcess();if (!availableDateRanges.length || !this.addDateRangesToTextTrack_) {return;}this.addDateRangesToTextTrack_(availableDateRanges);}handleMediaupdatetimeout_() {if (this.state !== 'HAVE_METADATA') {// only refresh the media playlist if no other activity is going onreturn;}const media = this.media();let uri = resolveUrl(this.main.uri, media.uri);if (this.llhls) {uri = addLLHLSQueryDirectives(uri, media);}this.state = 'HAVE_CURRENT_METADATA';this.request = this.vhs_.xhr({uri,withCredentials: this.withCredentials}, (error, req) => {// disposedif (!this.request) {return;}if (error) {return this.playlistRequestError(this.request, this.media(), 'HAVE_METADATA');}this.haveMetadata({playlistString: this.request.responseText,url: this.media().uri,id: this.media().id});});}playlistRequestError(xhr, playlist, startingState) {const {uri,id} = playlist; // any in-flight request is now finishedthis.request = null;if (startingState) {this.state = startingState;}this.error = {playlist: this.main.playlists[id],status: xhr.status,message: `HLS playlist request error at URL: ${uri}.`,responseText: xhr.responseText,code: xhr.status >= 500 ? 4 : 2};this.trigger('error');}parseManifest_({url,manifestString}) {return parseManifest({onwarn: ({message}) => this.logger_(`m3u8-parser warn for ${url}: ${message}`),oninfo: ({message}) => this.logger_(`m3u8-parser info for ${url}: ${message}`),manifestString,customTagParsers: this.customTagParsers,customTagMappers: this.customTagMappers,llhls: this.llhls});}/*** Update the playlist loader's state in response to a new or updated playlist.** @param {string} [playlistString]* Playlist string (if playlistObject is not provided)* @param {Object} [playlistObject]* Playlist object (if playlistString is not provided)* @param {string} url* URL of playlist* @param {string} id* ID to use for playlist*/haveMetadata({playlistString,playlistObject,url,id}) {// any in-flight request is now finishedthis.request = null;this.state = 'HAVE_METADATA';const playlist = playlistObject || this.parseManifest_({url,manifestString: playlistString});playlist.lastRequest = Date.now();setupMediaPlaylist({playlist,uri: url,id}); // merge this playlist into the main manifestconst update = updateMain$1(this.main, playlist);this.targetDuration = playlist.partTargetDuration || playlist.targetDuration;this.pendingMedia_ = null;if (update) {this.main = update;this.media_ = this.main.playlists[id];} else {this.trigger('playlistunchanged');}this.updateMediaUpdateTimeout_(refreshDelay(this.media(), !!update));this.trigger('loadedplaylist');}/*** Abort any outstanding work and clean up.*/dispose() {this.trigger('dispose');this.stopRequest();window.clearTimeout(this.mediaUpdateTimeout);window.clearTimeout(this.finalRenditionTimeout);this.dateRangesStorage_ = new DateRangesStorage();this.off();}stopRequest() {if (this.request) {const oldRequest = this.request;this.request = null;oldRequest.onreadystatechange = null;oldRequest.abort();}}/*** When called without any arguments, returns the currently* active media playlist. When called with a single argument,* triggers the playlist loader to asynchronously switch to the* specified media playlist. Calling this method while the* loader is in the HAVE_NOTHING causes an error to be emitted* but otherwise has no effect.** @param {Object=} playlist the parsed media playlist* object to switch to* @param {boolean=} shouldDelay whether we should delay the request by half target duration** @return {Playlist} the current loaded media*/media(playlist, shouldDelay) {// getterif (!playlist) {return this.media_;} // setterif (this.state === 'HAVE_NOTHING') {throw new Error('Cannot switch media playlist from ' + this.state);} // find the playlist object if the target playlist has been// specified by URIif (typeof playlist === 'string') {if (!this.main.playlists[playlist]) {throw new Error('Unknown playlist URI: ' + playlist);}playlist = this.main.playlists[playlist];}window.clearTimeout(this.finalRenditionTimeout);if (shouldDelay) {const delay = (playlist.partTargetDuration || playlist.targetDuration) / 2 * 1000 || 5 * 1000;this.finalRenditionTimeout = window.setTimeout(this.media.bind(this, playlist, false), delay);return;}const startingState = this.state;const mediaChange = !this.media_ || playlist.id !== this.media_.id;const mainPlaylistRef = this.main.playlists[playlist.id]; // switch to fully loaded playlists immediatelyif (mainPlaylistRef && mainPlaylistRef.endList ||// handle the case of a playlist object (e.g., if using vhs-json with a resolved// media playlist or, for the case of demuxed audio, a resolved audio media group)playlist.endList && playlist.segments.length) {// abort outstanding playlist requestsif (this.request) {this.request.onreadystatechange = null;this.request.abort();this.request = null;}this.state = 'HAVE_METADATA';this.media_ = playlist; // trigger media change if the active media has been updatedif (mediaChange) {this.trigger('mediachanging');if (startingState === 'HAVE_MAIN_MANIFEST') {// The initial playlist was a main manifest, and the first media selected was// also provided (in the form of a resolved playlist object) as part of the// source object (rather than just a URL). Therefore, since the media playlist// doesn't need to be requested, loadedmetadata won't trigger as part of the// normal flow, and needs an explicit trigger here.this.trigger('loadedmetadata');} else {this.trigger('mediachange');}}return;} // We update/set the timeout here so that live playlists// that are not a media change will "start" the loader as expected.// We expect that this function will start the media update timeout// cycle again. This also prevents a playlist switch failure from// causing us to stall during live.this.updateMediaUpdateTimeout_(refreshDelay(playlist, true)); // switching to the active playlist is a no-opif (!mediaChange) {return;}this.state = 'SWITCHING_MEDIA'; // there is already an outstanding playlist requestif (this.request) {if (playlist.resolvedUri === this.request.url) {// requesting to switch to the same playlist multiple times// has no effect after the firstreturn;}this.request.onreadystatechange = null;this.request.abort();this.request = null;} // request the new playlistif (this.media_) {this.trigger('mediachanging');}this.pendingMedia_ = playlist;this.request = this.vhs_.xhr({uri: playlist.resolvedUri,withCredentials: this.withCredentials}, (error, req) => {// disposedif (!this.request) {return;}playlist.lastRequest = Date.now();playlist.resolvedUri = resolveManifestRedirect(playlist.resolvedUri, req);if (error) {return this.playlistRequestError(this.request, playlist, startingState);}this.haveMetadata({playlistString: req.responseText,url: playlist.uri,id: playlist.id}); // fire loadedmetadata the first time a media playlist is loadedif (startingState === 'HAVE_MAIN_MANIFEST') {this.trigger('loadedmetadata');} else {this.trigger('mediachange');}});}/*** pause loading of the playlist*/pause() {if (this.mediaUpdateTimeout) {window.clearTimeout(this.mediaUpdateTimeout);this.mediaUpdateTimeout = null;}this.stopRequest();if (this.state === 'HAVE_NOTHING') {// If we pause the loader before any data has been retrieved, its as if we never// started, so reset to an unstarted state.this.started = false;} // Need to restore state now that no activity is happeningif (this.state === 'SWITCHING_MEDIA') {// if the loader was in the process of switching media, it should either return to// HAVE_MAIN_MANIFEST or HAVE_METADATA depending on if the loader has loaded a media// playlist yet. This is determined by the existence of loader.media_if (this.media_) {this.state = 'HAVE_METADATA';} else {this.state = 'HAVE_MAIN_MANIFEST';}} else if (this.state === 'HAVE_CURRENT_METADATA') {this.state = 'HAVE_METADATA';}}/*** start loading of the playlist*/load(shouldDelay) {if (this.mediaUpdateTimeout) {window.clearTimeout(this.mediaUpdateTimeout);this.mediaUpdateTimeout = null;}const media = this.media();if (shouldDelay) {const delay = media ? (media.partTargetDuration || media.targetDuration) / 2 * 1000 : 5 * 1000;this.mediaUpdateTimeout = window.setTimeout(() => {this.mediaUpdateTimeout = null;this.load();}, delay);return;}if (!this.started) {this.start();return;}if (media && !media.endList) {this.trigger('mediaupdatetimeout');} else {this.trigger('loadedplaylist');}}updateMediaUpdateTimeout_(delay) {if (this.mediaUpdateTimeout) {window.clearTimeout(this.mediaUpdateTimeout);this.mediaUpdateTimeout = null;} // we only have use mediaupdatetimeout for live playlists.if (!this.media() || this.media().endList) {return;}this.mediaUpdateTimeout = window.setTimeout(() => {this.mediaUpdateTimeout = null;this.trigger('mediaupdatetimeout');this.updateMediaUpdateTimeout_(delay);}, delay);}/*** start loading of the playlist*/start() {this.started = true;if (typeof this.src === 'object') {// in the case of an entirely constructed manifest object (meaning there's no actual// manifest on a server), default the uri to the page's hrefif (!this.src.uri) {this.src.uri = window.location.href;} // resolvedUri is added on internally after the initial request. Since there's no// request for pre-resolved manifests, add on resolvedUri here.this.src.resolvedUri = this.src.uri; // Since a manifest object was passed in as the source (instead of a URL), the first// request can be skipped (since the top level of the manifest, at a minimum, is// already available as a parsed manifest object). However, if the manifest object// represents a main playlist, some media playlists may need to be resolved before// the starting segment list is available. Therefore, go directly to setup of the// initial playlist, and let the normal flow continue from there.//// Note that the call to setup is asynchronous, as other sections of VHS may assume// that the first request is asynchronous.setTimeout(() => {this.setupInitialPlaylist(this.src);}, 0);return;} // request the specified URLthis.request = this.vhs_.xhr({uri: this.src,withCredentials: this.withCredentials}, (error, req) => {// disposedif (!this.request) {return;} // clear the loader's request referencethis.request = null;if (error) {this.error = {status: req.status,message: `HLS playlist request error at URL: ${this.src}.`,responseText: req.responseText,// MEDIA_ERR_NETWORKcode: 2};if (this.state === 'HAVE_NOTHING') {this.started = false;}return this.trigger('error');}this.src = resolveManifestRedirect(this.src, req);const manifest = this.parseManifest_({manifestString: req.responseText,url: this.src});this.setupInitialPlaylist(manifest);});}srcUri() {return typeof this.src === 'string' ? this.src : this.src.uri;}/*** Given a manifest object that's either a main or media playlist, trigger the proper* events and set the state of the playlist loader.** If the manifest object represents a main playlist, `loadedplaylist` will be* triggered to allow listeners to select a playlist. If none is selected, the loader* will default to the first one in the playlists array.** If the manifest object represents a media playlist, `loadedplaylist` will be* triggered followed by `loadedmetadata`, as the only available playlist is loaded.** In the case of a media playlist, a main playlist object wrapper with one playlist* will be created so that all logic can handle playlists in the same fashion (as an* assumed manifest object schema).** @param {Object} manifest* The parsed manifest object*/setupInitialPlaylist(manifest) {this.state = 'HAVE_MAIN_MANIFEST';if (manifest.playlists) {this.main = manifest;addPropertiesToMain(this.main, this.srcUri()); // If the initial main playlist has playlists wtih segments already resolved,// then resolve URIs in advance, as they are usually done after a playlist request,// which may not happen if the playlist is resolved.manifest.playlists.forEach(playlist => {playlist.segments = getAllSegments(playlist);playlist.segments.forEach(segment => {resolveSegmentUris(segment, playlist.resolvedUri);});});this.trigger('loadedplaylist');if (!this.request) {// no media playlist was specifically selected so start// from the first listed onethis.media(this.main.playlists[0]);}return;} // In order to support media playlists passed in as vhs-json, the case where the uri// is not provided as part of the manifest should be considered, and an appropriate// default used.const uri = this.srcUri() || window.location.href;this.main = mainForMedia(manifest, uri);this.haveMetadata({playlistObject: manifest,url: uri,id: this.main.playlists[0].id});this.trigger('loadedmetadata');}/*** Updates or deletes a preexisting pathway clone.* Ensures that all playlists related to the old pathway clone are* either updated or deleted.** @param {Object} clone On update, the pathway clone object for the newly updated pathway clone.* On delete, the old pathway clone object to be deleted.* @param {boolean} isUpdate True if the pathway is to be updated,* false if it is meant to be deleted.*/updateOrDeleteClone(clone, isUpdate) {const main = this.main;const pathway = clone.ID;let i = main.playlists.length; // Iterate backwards through the playlist so we can remove playlists if necessary.while (i--) {const p = main.playlists[i];if (p.attributes['PATHWAY-ID'] === pathway) {const oldPlaylistUri = p.resolvedUri;const oldPlaylistId = p.id; // update the indexed playlist and add new playlists by ID and URIif (isUpdate) {const newPlaylistUri = this.createCloneURI_(p.resolvedUri, clone);const newPlaylistId = createPlaylistID(pathway, newPlaylistUri);const attributes = this.createCloneAttributes_(pathway, p.attributes);const updatedPlaylist = this.createClonePlaylist_(p, newPlaylistId, clone, attributes);main.playlists[i] = updatedPlaylist;main.playlists[newPlaylistId] = updatedPlaylist;main.playlists[newPlaylistUri] = updatedPlaylist;} else {// Remove the indexed playlist.main.playlists.splice(i, 1);} // Remove playlists by the old ID and URI.delete main.playlists[oldPlaylistId];delete main.playlists[oldPlaylistUri];}}this.updateOrDeleteCloneMedia(clone, isUpdate);}/*** Updates or deletes media data based on the pathway clone object.* Due to the complexity of the media groups and playlists, in all cases* we remove all of the old media groups and playlists.* On updates, we then create new media groups and playlists based on the* new pathway clone object.** @param {Object} clone The pathway clone object for the newly updated pathway clone.* @param {boolean} isUpdate True if the pathway is to be updated,* false if it is meant to be deleted.*/updateOrDeleteCloneMedia(clone, isUpdate) {const main = this.main;const id = clone.ID;['AUDIO', 'SUBTITLES', 'CLOSED-CAPTIONS'].forEach(mediaType => {if (!main.mediaGroups[mediaType] || !main.mediaGroups[mediaType][id]) {return;}for (const groupKey in main.mediaGroups[mediaType]) {// Remove all media playlists for the media group for this pathway clone.if (groupKey === id) {for (const labelKey in main.mediaGroups[mediaType][groupKey]) {const oldMedia = main.mediaGroups[mediaType][groupKey][labelKey];oldMedia.playlists.forEach((p, i) => {const oldMediaPlaylist = main.playlists[p.id];const oldPlaylistId = oldMediaPlaylist.id;const oldPlaylistUri = oldMediaPlaylist.resolvedUri;delete main.playlists[oldPlaylistId];delete main.playlists[oldPlaylistUri];});} // Delete the old media group.delete main.mediaGroups[mediaType][groupKey];}}}); // Create the new media groups and playlists if there is an update.if (isUpdate) {this.createClonedMediaGroups_(clone);}}/*** Given a pathway clone object, clones all necessary playlists.** @param {Object} clone The pathway clone object.* @param {Object} basePlaylist The original playlist to clone from.*/addClonePathway(clone, basePlaylist = {}) {const main = this.main;const index = main.playlists.length;const uri = this.createCloneURI_(basePlaylist.resolvedUri, clone);const playlistId = createPlaylistID(clone.ID, uri);const attributes = this.createCloneAttributes_(clone.ID, basePlaylist.attributes);const playlist = this.createClonePlaylist_(basePlaylist, playlistId, clone, attributes);main.playlists[index] = playlist; // add playlist by ID and URImain.playlists[playlistId] = playlist;main.playlists[uri] = playlist;this.createClonedMediaGroups_(clone);}/*** Given a pathway clone object we create clones of all media.* In this function, all necessary information and updated playlists* are added to the `mediaGroup` object.* Playlists are also added to the `playlists` array so the media groups* will be properly linked.** @param {Object} clone The pathway clone object.*/createClonedMediaGroups_(clone) {const id = clone.ID;const baseID = clone['BASE-ID'];const main = this.main;['AUDIO', 'SUBTITLES', 'CLOSED-CAPTIONS'].forEach(mediaType => {// If the media type doesn't exist, or there is already a clone, skip// to the next media type.if (!main.mediaGroups[mediaType] || main.mediaGroups[mediaType][id]) {return;}for (const groupKey in main.mediaGroups[mediaType]) {if (groupKey === baseID) {// Create the group.main.mediaGroups[mediaType][id] = {};} else {// There is no need to iterate over label keys in this case.continue;}for (const labelKey in main.mediaGroups[mediaType][groupKey]) {const oldMedia = main.mediaGroups[mediaType][groupKey][labelKey];main.mediaGroups[mediaType][id][labelKey] = _extends$1({}, oldMedia);const newMedia = main.mediaGroups[mediaType][id][labelKey]; // update URIs on the mediaconst newUri = this.createCloneURI_(oldMedia.resolvedUri, clone);newMedia.resolvedUri = newUri;newMedia.uri = newUri; // Reset playlists in the new media group.newMedia.playlists = []; // Create new playlists in the newly cloned media group.oldMedia.playlists.forEach((p, i) => {const oldMediaPlaylist = main.playlists[p.id];const group = groupID(mediaType, id, labelKey);const newPlaylistID = createPlaylistID(id, group); // Check to see if it already existsif (oldMediaPlaylist && !main.playlists[newPlaylistID]) {const newMediaPlaylist = this.createClonePlaylist_(oldMediaPlaylist, newPlaylistID, clone);const newPlaylistUri = newMediaPlaylist.resolvedUri;main.playlists[newPlaylistID] = newMediaPlaylist;main.playlists[newPlaylistUri] = newMediaPlaylist;}newMedia.playlists[i] = this.createClonePlaylist_(p, newPlaylistID, clone);});}}});}/*** Using the original playlist to be cloned, and the pathway clone object* information, we create a new playlist.** @param {Object} basePlaylist The original playlist to be cloned from.* @param {string} id The desired id of the newly cloned playlist.* @param {Object} clone The pathway clone object.* @param {Object} attributes An optional object to populate the `attributes` property in the playlist.** @return {Object} The combined cloned playlist.*/createClonePlaylist_(basePlaylist, id, clone, attributes) {const uri = this.createCloneURI_(basePlaylist.resolvedUri, clone);const newProps = {resolvedUri: uri,uri,id}; // Remove all segments from previous playlist in the clone.if (basePlaylist.segments) {newProps.segments = [];}if (attributes) {newProps.attributes = attributes;}return merge(basePlaylist, newProps);}/*** Generates an updated URI for a cloned pathway based on the original* pathway's URI and the paramaters from the pathway clone object in the* content steering server response.** @param {string} baseUri URI to be updated in the cloned pathway.* @param {Object} clone The pathway clone object.** @return {string} The updated URI for the cloned pathway.*/createCloneURI_(baseURI, clone) {const uri = new URL(baseURI);uri.hostname = clone['URI-REPLACEMENT'].HOST;const params = clone['URI-REPLACEMENT'].PARAMS; // Add params to the cloned URL.for (const key of Object.keys(params)) {uri.searchParams.set(key, params[key]);}return uri.href;}/*** Helper function to create the attributes needed for the new clone.* This mainly adds the necessary media attributes.** @param {string} id The pathway clone object ID.* @param {Object} oldAttributes The old attributes to compare to.* @return {Object} The new attributes to add to the playlist.*/createCloneAttributes_(id, oldAttributes) {const attributes = {['PATHWAY-ID']: id};['AUDIO', 'SUBTITLES', 'CLOSED-CAPTIONS'].forEach(mediaType => {if (oldAttributes[mediaType]) {attributes[mediaType] = id;}});return attributes;}/*** Returns the key ID set from a playlist** @param {playlist} playlist to fetch the key ID set from.* @return a Set of 32 digit hex strings that represent the unique keyIds for that playlist.*/getKeyIdSet(playlist) {if (playlist.contentProtection) {const keyIds = new Set();for (const keysystem in playlist.contentProtection) {const keyId = playlist.contentProtection[keysystem].attributes.keyId;if (keyId) {keyIds.add(keyId.toLowerCase());}}return keyIds;}}}/*** @file xhr.js*/const {xhr: videojsXHR} = videojs;const callbackWrapper = function (request, error, response, callback) {const reqResponse = request.responseType === 'arraybuffer' ? request.response : request.responseText;if (!error && reqResponse) {request.responseTime = Date.now();request.roundTripTime = request.responseTime - request.requestTime;request.bytesReceived = reqResponse.byteLength || reqResponse.length;if (!request.bandwidth) {request.bandwidth = Math.floor(request.bytesReceived / request.roundTripTime * 8 * 1000);}}if (response.headers) {request.responseHeaders = response.headers;} // videojs.xhr now uses a specific code on the error// object to signal that a request has timed out instead// of setting a boolean on the request objectif (error && error.code === 'ETIMEDOUT') {request.timedout = true;} // videojs.xhr no longer considers status codes outside of 200 and 0// (for file uris) to be errors, but the old XHR did, so emulate that// behavior. Status 206 may be used in response to byterange requests.if (!error && !request.aborted && response.statusCode !== 200 && response.statusCode !== 206 && response.statusCode !== 0) {error = new Error('XHR Failed with a response of: ' + (request && (reqResponse || request.responseText)));}callback(error, request);};/*** Iterates over the request hooks Set and calls them in order** @param {Set} hooks the hook Set to iterate over* @param {Object} options the request options to pass to the xhr wrapper* @return the callback hook function return value, the modified or new options Object.*/const callAllRequestHooks = (requestSet, options) => {if (!requestSet || !requestSet.size) {return;}let newOptions = options;requestSet.forEach(requestCallback => {newOptions = requestCallback(newOptions);});return newOptions;};/*** Iterates over the response hooks Set and calls them in order.** @param {Set} hooks the hook Set to iterate over* @param {Object} request the xhr request object* @param {Object} error the xhr error object* @param {Object} response the xhr response object*/const callAllResponseHooks = (responseSet, request, error, response) => {if (!responseSet || !responseSet.size) {return;}responseSet.forEach(responseCallback => {responseCallback(request, error, response);});};const xhrFactory = function () {const xhr = function XhrFunction(options, callback) {// Add a default timeoutoptions = merge({timeout: 45e3}, options); // Allow an optional user-specified function to modify the option// object before we construct the xhr request// TODO: Remove beforeRequest in the next major release.const beforeRequest = XhrFunction.beforeRequest || videojs.Vhs.xhr.beforeRequest; // onRequest and onResponse hooks as a Set, at either the player or global level.// TODO: new Set added here for beforeRequest alias. Remove this when beforeRequest is removed.const _requestCallbackSet = XhrFunction._requestCallbackSet || videojs.Vhs.xhr._requestCallbackSet || new Set();const _responseCallbackSet = XhrFunction._responseCallbackSet || videojs.Vhs.xhr._responseCallbackSet;if (beforeRequest && typeof beforeRequest === 'function') {videojs.log.warn('beforeRequest is deprecated, use onRequest instead.');_requestCallbackSet.add(beforeRequest);} // Use the standard videojs.xhr() method unless `videojs.Vhs.xhr` has been overriden// TODO: switch back to videojs.Vhs.xhr.name === 'XhrFunction' when we drop IE11const xhrMethod = videojs.Vhs.xhr.original === true ? videojsXHR : videojs.Vhs.xhr; // call all registered onRequest hooks, assign new options.const beforeRequestOptions = callAllRequestHooks(_requestCallbackSet, options); // Remove the beforeRequest function from the hooks set so stale beforeRequest functions are not called._requestCallbackSet.delete(beforeRequest); // xhrMethod will call XMLHttpRequest.open and XMLHttpRequest.sendconst request = xhrMethod(beforeRequestOptions || options, function (error, response) {// call all registered onResponse hookscallAllResponseHooks(_responseCallbackSet, request, error, response);return callbackWrapper(request, error, response, callback);});const originalAbort = request.abort;request.abort = function () {request.aborted = true;return originalAbort.apply(request, arguments);};request.uri = options.uri;request.requestTime = Date.now();return request;};xhr.original = true;return xhr;};/*** Turns segment byterange into a string suitable for use in* HTTP Range requests** @param {Object} byterange - an object with two values defining the start and end* of a byte-range*/const byterangeStr = function (byterange) {// `byterangeEnd` is one less than `offset + length` because the HTTP range// header uses inclusive rangeslet byterangeEnd;const byterangeStart = byterange.offset;if (typeof byterange.offset === 'bigint' || typeof byterange.length === 'bigint') {byterangeEnd = window.BigInt(byterange.offset) + window.BigInt(byterange.length) - window.BigInt(1);} else {byterangeEnd = byterange.offset + byterange.length - 1;}return 'bytes=' + byterangeStart + '-' + byterangeEnd;};/*** Defines headers for use in the xhr request for a particular segment.** @param {Object} segment - a simplified copy of the segmentInfo object* from SegmentLoader*/const segmentXhrHeaders = function (segment) {const headers = {};if (segment.byterange) {headers.Range = byterangeStr(segment.byterange);}return headers;};/*** @file bin-utils.js*//*** convert a TimeRange to text** @param {TimeRange} range the timerange to use for conversion* @param {number} i the iterator on the range to convert* @return {string} the range in string format*/const textRange = function (range, i) {return range.start(i) + '-' + range.end(i);};/*** format a number as hex string** @param {number} e The number* @param {number} i the iterator* @return {string} the hex formatted number as a string*/const formatHexString = function (e, i) {const value = e.toString(16);return '00'.substring(0, 2 - value.length) + value + (i % 2 ? ' ' : '');};const formatAsciiString = function (e) {if (e >= 0x20 && e < 0x7e) {return String.fromCharCode(e);}return '.';};/*** Creates an object for sending to a web worker modifying properties that are TypedArrays* into a new object with seperated properties for the buffer, byteOffset, and byteLength.** @param {Object} message* Object of properties and values to send to the web worker* @return {Object}* Modified message with TypedArray values expanded* @function createTransferableMessage*/const createTransferableMessage = function (message) {const transferable = {};Object.keys(message).forEach(key => {const value = message[key];if (isArrayBufferView(value)) {transferable[key] = {bytes: value.buffer,byteOffset: value.byteOffset,byteLength: value.byteLength};} else {transferable[key] = value;}});return transferable;};/*** Returns a unique string identifier for a media initialization* segment.** @param {Object} initSegment* the init segment object.** @return {string} the generated init segment id*/const initSegmentId = function (initSegment) {const byterange = initSegment.byterange || {length: Infinity,offset: 0};return [byterange.length, byterange.offset, initSegment.resolvedUri].join(',');};/*** Returns a unique string identifier for a media segment key.** @param {Object} key the encryption key* @return {string} the unique id for the media segment key.*/const segmentKeyId = function (key) {return key.resolvedUri;};/*** utils to help dump binary data to the console** @param {Array|TypedArray} data* data to dump to a string** @return {string} the data as a hex string.*/const hexDump = data => {const bytes = Array.prototype.slice.call(data);const step = 16;let result = '';let hex;let ascii;for (let j = 0; j < bytes.length / step; j++) {hex = bytes.slice(j * step, j * step + step).map(formatHexString).join('');ascii = bytes.slice(j * step, j * step + step).map(formatAsciiString).join('');result += hex + ' ' + ascii + '\n';}return result;};const tagDump = ({bytes}) => hexDump(bytes);const textRanges = ranges => {let result = '';let i;for (i = 0; i < ranges.length; i++) {result += textRange(ranges, i) + ' ';}return result;};var utils = /*#__PURE__*/Object.freeze({__proto__: null,createTransferableMessage: createTransferableMessage,initSegmentId: initSegmentId,segmentKeyId: segmentKeyId,hexDump: hexDump,tagDump: tagDump,textRanges: textRanges});// TODO handle fmp4 case where the timing info is accurate and doesn't involve transmux// 25% was arbitrarily chosen, and may need to be refined over time.const SEGMENT_END_FUDGE_PERCENT = 0.25;/*** Converts a player time (any time that can be gotten/set from player.currentTime(),* e.g., any time within player.seekable().start(0) to player.seekable().end(0)) to a* program time (any time referencing the real world (e.g., EXT-X-PROGRAM-DATE-TIME)).** The containing segment is required as the EXT-X-PROGRAM-DATE-TIME serves as an "anchor* point" (a point where we have a mapping from program time to player time, with player* time being the post transmux start of the segment).** For more details, see [this doc](../../docs/program-time-from-player-time.md).** @param {number} playerTime the player time* @param {Object} segment the segment which contains the player time* @return {Date} program time*/const playerTimeToProgramTime = (playerTime, segment) => {if (!segment.dateTimeObject) {// Can't convert without an "anchor point" for the program time (i.e., a time that can// be used to map the start of a segment with a real world time).return null;}const transmuxerPrependedSeconds = segment.videoTimingInfo.transmuxerPrependedSeconds;const transmuxedStart = segment.videoTimingInfo.transmuxedPresentationStart; // get the start of the content from before old content is prependedconst startOfSegment = transmuxedStart + transmuxerPrependedSeconds;const offsetFromSegmentStart = playerTime - startOfSegment;return new Date(segment.dateTimeObject.getTime() + offsetFromSegmentStart * 1000);};const originalSegmentVideoDuration = videoTimingInfo => {return videoTimingInfo.transmuxedPresentationEnd - videoTimingInfo.transmuxedPresentationStart - videoTimingInfo.transmuxerPrependedSeconds;};/*** Finds a segment that contains the time requested given as an ISO-8601 string. The* returned segment might be an estimate or an accurate match.** @param {string} programTime The ISO-8601 programTime to find a match for* @param {Object} playlist A playlist object to search within*/const findSegmentForProgramTime = (programTime, playlist) => {// Assumptions:// - verifyProgramDateTimeTags has already been run// - live streams have been startedlet dateTimeObject;try {dateTimeObject = new Date(programTime);} catch (e) {return null;}if (!playlist || !playlist.segments || playlist.segments.length === 0) {return null;}let segment = playlist.segments[0];if (dateTimeObject < new Date(segment.dateTimeObject)) {// Requested time is before stream start.return null;}for (let i = 0; i < playlist.segments.length - 1; i++) {segment = playlist.segments[i];const nextSegmentStart = new Date(playlist.segments[i + 1].dateTimeObject);if (dateTimeObject < nextSegmentStart) {break;}}const lastSegment = playlist.segments[playlist.segments.length - 1];const lastSegmentStart = lastSegment.dateTimeObject;const lastSegmentDuration = lastSegment.videoTimingInfo ? originalSegmentVideoDuration(lastSegment.videoTimingInfo) : lastSegment.duration + lastSegment.duration * SEGMENT_END_FUDGE_PERCENT;const lastSegmentEnd = new Date(lastSegmentStart.getTime() + lastSegmentDuration * 1000);if (dateTimeObject > lastSegmentEnd) {// Beyond the end of the stream, or our best guess of the end of the stream.return null;}if (dateTimeObject > new Date(lastSegmentStart)) {segment = lastSegment;}return {segment,estimatedStart: segment.videoTimingInfo ? segment.videoTimingInfo.transmuxedPresentationStart : Playlist.duration(playlist, playlist.mediaSequence + playlist.segments.indexOf(segment)),// Although, given that all segments have accurate date time objects, the segment// selected should be accurate, unless the video has been transmuxed at some point// (determined by the presence of the videoTimingInfo object), the segment's "player// time" (the start time in the player) can't be considered accurate.type: segment.videoTimingInfo ? 'accurate' : 'estimate'};};/*** Finds a segment that contains the given player time(in seconds).** @param {number} time The player time to find a match for* @param {Object} playlist A playlist object to search within*/const findSegmentForPlayerTime = (time, playlist) => {// Assumptions:// - there will always be a segment.duration// - we can start from zero// - segments are in time orderif (!playlist || !playlist.segments || playlist.segments.length === 0) {return null;}let segmentEnd = 0;let segment;for (let i = 0; i < playlist.segments.length; i++) {segment = playlist.segments[i]; // videoTimingInfo is set after the segment is downloaded and transmuxed, and// should contain the most accurate values we have for the segment's player times.//// Use the accurate transmuxedPresentationEnd value if it is available, otherwise fall// back to an estimate based on the manifest derived (inaccurate) segment.duration, to// calculate an end value.segmentEnd = segment.videoTimingInfo ? segment.videoTimingInfo.transmuxedPresentationEnd : segmentEnd + segment.duration;if (time <= segmentEnd) {break;}}const lastSegment = playlist.segments[playlist.segments.length - 1];if (lastSegment.videoTimingInfo && lastSegment.videoTimingInfo.transmuxedPresentationEnd < time) {// The time requested is beyond the stream end.return null;}if (time > segmentEnd) {// The time is within or beyond the last segment.//// Check to see if the time is beyond a reasonable guess of the end of the stream.if (time > segmentEnd + lastSegment.duration * SEGMENT_END_FUDGE_PERCENT) {// Technically, because the duration value is only an estimate, the time may still// exist in the last segment, however, there isn't enough information to make even// a reasonable estimate.return null;}segment = lastSegment;}return {segment,estimatedStart: segment.videoTimingInfo ? segment.videoTimingInfo.transmuxedPresentationStart : segmentEnd - segment.duration,// Because videoTimingInfo is only set after transmux, it is the only way to get// accurate timing values.type: segment.videoTimingInfo ? 'accurate' : 'estimate'};};/*** Gives the offset of the comparisonTimestamp from the programTime timestamp in seconds.* If the offset returned is positive, the programTime occurs after the* comparisonTimestamp.* If the offset is negative, the programTime occurs before the comparisonTimestamp.** @param {string} comparisonTimeStamp An ISO-8601 timestamp to compare against* @param {string} programTime The programTime as an ISO-8601 string* @return {number} offset*/const getOffsetFromTimestamp = (comparisonTimeStamp, programTime) => {let segmentDateTime;let programDateTime;try {segmentDateTime = new Date(comparisonTimeStamp);programDateTime = new Date(programTime);} catch (e) {// TODO handle error}const segmentTimeEpoch = segmentDateTime.getTime();const programTimeEpoch = programDateTime.getTime();return (programTimeEpoch - segmentTimeEpoch) / 1000;};/*** Checks that all segments in this playlist have programDateTime tags.** @param {Object} playlist A playlist object*/const verifyProgramDateTimeTags = playlist => {if (!playlist.segments || playlist.segments.length === 0) {return false;}for (let i = 0; i < playlist.segments.length; i++) {const segment = playlist.segments[i];if (!segment.dateTimeObject) {return false;}}return true;};/*** Returns the programTime of the media given a playlist and a playerTime.* The playlist must have programDateTime tags for a programDateTime tag to be returned.* If the segments containing the time requested have not been buffered yet, an estimate* may be returned to the callback.** @param {Object} args* @param {Object} args.playlist A playlist object to search within* @param {number} time A playerTime in seconds* @param {Function} callback(err, programTime)* @return {string} err.message A detailed error message* @return {Object} programTime* @return {number} programTime.mediaSeconds The streamTime in seconds* @return {string} programTime.programDateTime The programTime as an ISO-8601 String*/const getProgramTime = ({playlist,time = undefined,callback}) => {if (!callback) {throw new Error('getProgramTime: callback must be provided');}if (!playlist || time === undefined) {return callback({message: 'getProgramTime: playlist and time must be provided'});}const matchedSegment = findSegmentForPlayerTime(time, playlist);if (!matchedSegment) {return callback({message: 'valid programTime was not found'});}if (matchedSegment.type === 'estimate') {return callback({message: 'Accurate programTime could not be determined.' + ' Please seek to e.seekTime and try again',seekTime: matchedSegment.estimatedStart});}const programTimeObject = {mediaSeconds: time};const programTime = playerTimeToProgramTime(time, matchedSegment.segment);if (programTime) {programTimeObject.programDateTime = programTime.toISOString();}return callback(null, programTimeObject);};/*** Seeks in the player to a time that matches the given programTime ISO-8601 string.** @param {Object} args* @param {string} args.programTime A programTime to seek to as an ISO-8601 String* @param {Object} args.playlist A playlist to look within* @param {number} args.retryCount The number of times to try for an accurate seek. Default is 2.* @param {Function} args.seekTo A method to perform a seek* @param {boolean} args.pauseAfterSeek Whether to end in a paused state after seeking. Default is true.* @param {Object} args.tech The tech to seek on* @param {Function} args.callback(err, newTime) A callback to return the new time to* @return {string} err.message A detailed error message* @return {number} newTime The exact time that was seeked to in seconds*/const seekToProgramTime = ({programTime,playlist,retryCount = 2,seekTo,pauseAfterSeek = true,tech,callback}) => {if (!callback) {throw new Error('seekToProgramTime: callback must be provided');}if (typeof programTime === 'undefined' || !playlist || !seekTo) {return callback({message: 'seekToProgramTime: programTime, seekTo and playlist must be provided'});}if (!playlist.endList && !tech.hasStarted_) {return callback({message: 'player must be playing a live stream to start buffering'});}if (!verifyProgramDateTimeTags(playlist)) {return callback({message: 'programDateTime tags must be provided in the manifest ' + playlist.resolvedUri});}const matchedSegment = findSegmentForProgramTime(programTime, playlist); // no matchif (!matchedSegment) {return callback({message: `${programTime} was not found in the stream`});}const segment = matchedSegment.segment;const mediaOffset = getOffsetFromTimestamp(segment.dateTimeObject, programTime);if (matchedSegment.type === 'estimate') {// we've run out of retriesif (retryCount === 0) {return callback({message: `${programTime} is not buffered yet. Try again`});}seekTo(matchedSegment.estimatedStart + mediaOffset);tech.one('seeked', () => {seekToProgramTime({programTime,playlist,retryCount: retryCount - 1,seekTo,pauseAfterSeek,tech,callback});});return;} // Since the segment.start value is determined from the buffered end or ending time// of the prior segment, the seekToTime doesn't need to account for any transmuxer// modifications.const seekToTime = segment.start + mediaOffset;const seekedCallback = () => {return callback(null, tech.currentTime());}; // listen for seeked eventtech.one('seeked', seekedCallback); // pause before seeking as video.js will restore this stateif (pauseAfterSeek) {tech.pause();}seekTo(seekToTime);};// which will only happen if the request is complete.const callbackOnCompleted = (request, cb) => {if (request.readyState === 4) {return cb();}return;};const containerRequest = (uri, xhr, cb) => {let bytes = [];let id3Offset;let finished = false;const endRequestAndCallback = function (err, req, type, _bytes) {req.abort();finished = true;return cb(err, req, type, _bytes);};const progressListener = function (error, request) {if (finished) {return;}if (error) {return endRequestAndCallback(error, request, '', bytes);} // grap the new part of content that was just downloadedconst newPart = request.responseText.substring(bytes && bytes.byteLength || 0, request.responseText.length); // add that onto bytesbytes = concatTypedArrays(bytes, stringToBytes(newPart, true));id3Offset = id3Offset || getId3Offset(bytes); // we need at least 10 bytes to determine a type// or we need at least two bytes after an id3Offsetif (bytes.length < 10 || id3Offset && bytes.length < id3Offset + 2) {return callbackOnCompleted(request, () => endRequestAndCallback(error, request, '', bytes));}const type = detectContainerForBytes(bytes); // if this looks like a ts segment but we don't have enough data// to see the second sync byte, wait until we have enough data// before declaring it tsif (type === 'ts' && bytes.length < 188) {return callbackOnCompleted(request, () => endRequestAndCallback(error, request, '', bytes));} // this may be an unsynced ts segment// wait for 376 bytes before detecting no containerif (!type && bytes.length < 376) {return callbackOnCompleted(request, () => endRequestAndCallback(error, request, '', bytes));}return endRequestAndCallback(null, request, type, bytes);};const options = {uri,beforeSend(request) {// this forces the browser to pass the bytes to us unprocessedrequest.overrideMimeType('text/plain; charset=x-user-defined');request.addEventListener('progress', function ({total,loaded}) {return callbackWrapper(request, null, {statusCode: request.status}, progressListener);});}};const request = xhr(options, function (error, response) {return callbackWrapper(request, error, response, progressListener);});return request;};const {EventTarget} = videojs;const dashPlaylistUnchanged = function (a, b) {if (!isPlaylistUnchanged(a, b)) {return false;} // for dash the above check will often return true in scenarios where// the playlist actually has changed because mediaSequence isn't a// dash thing, and we often set it to 1. So if the playlists have the same amount// of segments we return true.// So for dash we need to make sure that the underlying segments are different.// if sidx changed then the playlists are different.if (a.sidx && b.sidx && (a.sidx.offset !== b.sidx.offset || a.sidx.length !== b.sidx.length)) {return false;} else if (!a.sidx && b.sidx || a.sidx && !b.sidx) {return false;} // one or the other does not have segments// there was a change.if (a.segments && !b.segments || !a.segments && b.segments) {return false;} // neither has segments nothing changedif (!a.segments && !b.segments) {return true;} // check segments themselvesfor (let i = 0; i < a.segments.length; i++) {const aSegment = a.segments[i];const bSegment = b.segments[i]; // if uris are different between segments there was a changeif (aSegment.uri !== bSegment.uri) {return false;} // neither segment has a byterange, there will be no byterange change.if (!aSegment.byterange && !bSegment.byterange) {continue;}const aByterange = aSegment.byterange;const bByterange = bSegment.byterange; // if byterange only exists on one of the segments, there was a change.if (aByterange && !bByterange || !aByterange && bByterange) {return false;} // if both segments have byterange with different offsets, there was a change.if (aByterange.offset !== bByterange.offset || aByterange.length !== bByterange.length) {return false;}} // if everything was the same with segments, this is the same playlist.return true;};/*** Use the representation IDs from the mpd object to create groupIDs, the NAME is set to mandatory representation* ID in the parser. This allows for continuous playout across periods with the same representation IDs* (continuous periods as defined in DASH-IF 3.2.12). This is assumed in the mpd-parser as well. If we want to support* periods without continuous playback this function may need modification as well as the parser.*/const dashGroupId = (type, group, label, playlist) => {// If the manifest somehow does not have an ID (non-dash compliant), use the label.const playlistId = playlist.attributes.NAME || label;return `placeholder-uri-${type}-${group}-${playlistId}`;};/*** Parses the main XML string and updates playlist URI references.** @param {Object} config* Object of arguments* @param {string} config.mainXml* The mpd XML* @param {string} config.srcUrl* The mpd URL* @param {Date} config.clientOffset* A time difference between server and client* @param {Object} config.sidxMapping* SIDX mappings for moof/mdat URIs and byte ranges* @return {Object}* The parsed mpd manifest object*/const parseMainXml = ({mainXml,srcUrl,clientOffset,sidxMapping,previousManifest}) => {const manifest = parse(mainXml, {manifestUri: srcUrl,clientOffset,sidxMapping,previousManifest});addPropertiesToMain(manifest, srcUrl, dashGroupId);return manifest;};/*** Removes any mediaGroup labels that no longer exist in the newMain** @param {Object} update* The previous mpd object being updated* @param {Object} newMain* The new mpd object*/const removeOldMediaGroupLabels = (update, newMain) => {forEachMediaGroup(update, (properties, type, group, label) => {if (!(label in newMain.mediaGroups[type][group])) {delete update.mediaGroups[type][group][label];}});};/*** Returns a new main manifest that is the result of merging an updated main manifest* into the original version.** @param {Object} oldMain* The old parsed mpd object* @param {Object} newMain* The updated parsed mpd object* @return {Object}* A new object representing the original main manifest with the updated media* playlists merged in*/const updateMain = (oldMain, newMain, sidxMapping) => {let noChanges = true;let update = merge(oldMain, {// These are top level properties that can be updatedduration: newMain.duration,minimumUpdatePeriod: newMain.minimumUpdatePeriod,timelineStarts: newMain.timelineStarts}); // First update the playlists in playlist listfor (let i = 0; i < newMain.playlists.length; i++) {const playlist = newMain.playlists[i];if (playlist.sidx) {const sidxKey = generateSidxKey(playlist.sidx); // add sidx segments to the playlist if we have all the sidx info alreadyif (sidxMapping && sidxMapping[sidxKey] && sidxMapping[sidxKey].sidx) {addSidxSegmentsToPlaylist$1(playlist, sidxMapping[sidxKey].sidx, playlist.sidx.resolvedUri);}}const playlistUpdate = updateMain$1(update, playlist, dashPlaylistUnchanged);if (playlistUpdate) {update = playlistUpdate;noChanges = false;}} // Then update media group playlistsforEachMediaGroup(newMain, (properties, type, group, label) => {if (properties.playlists && properties.playlists.length) {const id = properties.playlists[0].id;const playlistUpdate = updateMain$1(update, properties.playlists[0], dashPlaylistUnchanged);if (playlistUpdate) {update = playlistUpdate; // add new mediaGroup label if it doesn't exist and assign the new mediaGroup.if (!(label in update.mediaGroups[type][group])) {update.mediaGroups[type][group][label] = properties;} // update the playlist reference within media groupsupdate.mediaGroups[type][group][label].playlists[0] = update.playlists[id];noChanges = false;}}}); // remove mediaGroup labels and references that no longer exist in the newMainremoveOldMediaGroupLabels(update, newMain);if (newMain.minimumUpdatePeriod !== oldMain.minimumUpdatePeriod) {noChanges = false;}if (noChanges) {return null;}return update;}; // SIDX should be equivalent if the URI and byteranges of the SIDX match.// If the SIDXs have maps, the two maps should match,// both `a` and `b` missing SIDXs is considered matching.// If `a` or `b` but not both have a map, they aren't matching.const equivalentSidx = (a, b) => {const neitherMap = Boolean(!a.map && !b.map);const equivalentMap = neitherMap || Boolean(a.map && b.map && a.map.byterange.offset === b.map.byterange.offset && a.map.byterange.length === b.map.byterange.length);return equivalentMap && a.uri === b.uri && a.byterange.offset === b.byterange.offset && a.byterange.length === b.byterange.length;}; // exported for testingconst compareSidxEntry = (playlists, oldSidxMapping) => {const newSidxMapping = {};for (const id in playlists) {const playlist = playlists[id];const currentSidxInfo = playlist.sidx;if (currentSidxInfo) {const key = generateSidxKey(currentSidxInfo);if (!oldSidxMapping[key]) {break;}const savedSidxInfo = oldSidxMapping[key].sidxInfo;if (equivalentSidx(savedSidxInfo, currentSidxInfo)) {newSidxMapping[key] = oldSidxMapping[key];}}}return newSidxMapping;};/*** A function that filters out changed items as they need to be requested separately.** The method is exported for testing** @param {Object} main the parsed mpd XML returned via mpd-parser* @param {Object} oldSidxMapping the SIDX to compare against*/const filterChangedSidxMappings = (main, oldSidxMapping) => {const videoSidx = compareSidxEntry(main.playlists, oldSidxMapping);let mediaGroupSidx = videoSidx;forEachMediaGroup(main, (properties, mediaType, groupKey, labelKey) => {if (properties.playlists && properties.playlists.length) {const playlists = properties.playlists;mediaGroupSidx = merge(mediaGroupSidx, compareSidxEntry(playlists, oldSidxMapping));}});return mediaGroupSidx;};class DashPlaylistLoader extends EventTarget {// DashPlaylistLoader must accept either a src url or a playlist because subsequent// playlist loader setups from media groups will expect to be able to pass a playlist// (since there aren't external URLs to media playlists with DASH)constructor(srcUrlOrPlaylist, vhs, options = {}, mainPlaylistLoader) {super();this.mainPlaylistLoader_ = mainPlaylistLoader || this;if (!mainPlaylistLoader) {this.isMain_ = true;}const {withCredentials = false} = options;this.vhs_ = vhs;this.withCredentials = withCredentials;this.addMetadataToTextTrack = options.addMetadataToTextTrack;if (!srcUrlOrPlaylist) {throw new Error('A non-empty playlist URL or object is required');} // event naming?this.on('minimumUpdatePeriod', () => {this.refreshXml_();}); // live playlist staleness timeoutthis.on('mediaupdatetimeout', () => {this.refreshMedia_(this.media().id);});this.state = 'HAVE_NOTHING';this.loadedPlaylists_ = {};this.logger_ = logger('DashPlaylistLoader'); // initialize the loader state// The mainPlaylistLoader will be created with a stringif (this.isMain_) {this.mainPlaylistLoader_.srcUrl = srcUrlOrPlaylist; // TODO: reset sidxMapping between period changes// once multi-period is refactoredthis.mainPlaylistLoader_.sidxMapping_ = {};} else {this.childPlaylist_ = srcUrlOrPlaylist;}}requestErrored_(err, request, startingState) {// disposedif (!this.request) {return true;} // pending request is clearedthis.request = null;if (err) {// use the provided error object or create one// based on the request/responsethis.error = typeof err === 'object' && !(err instanceof Error) ? err : {status: request.status,message: 'DASH request error at URL: ' + request.uri,response: request.response,// MEDIA_ERR_NETWORKcode: 2};if (startingState) {this.state = startingState;}this.trigger('error');return true;}}/*** Verify that the container of the sidx segment can be parsed* and if it can, get and parse that segment.*/addSidxSegments_(playlist, startingState, cb) {const sidxKey = playlist.sidx && generateSidxKey(playlist.sidx); // playlist lacks sidx or sidx segments were added to this playlist already.if (!playlist.sidx || !sidxKey || this.mainPlaylistLoader_.sidxMapping_[sidxKey]) {// keep this function asyncthis.mediaRequest_ = window.setTimeout(() => cb(false), 0);return;} // resolve the segment URL relative to the playlistconst uri = resolveManifestRedirect(playlist.sidx.resolvedUri);const fin = (err, request) => {if (this.requestErrored_(err, request, startingState)) {return;}const sidxMapping = this.mainPlaylistLoader_.sidxMapping_;let sidx;try {sidx = parseSidx_1(toUint8(request.response).subarray(8));} catch (e) {// sidx parsing failed.this.requestErrored_(e, request, startingState);return;}sidxMapping[sidxKey] = {sidxInfo: playlist.sidx,sidx};addSidxSegmentsToPlaylist$1(playlist, sidx, playlist.sidx.resolvedUri);return cb(true);};this.request = containerRequest(uri, this.vhs_.xhr, (err, request, container, bytes) => {if (err) {return fin(err, request);}if (!container || container !== 'mp4') {return fin({status: request.status,message: `Unsupported ${container || 'unknown'} container type for sidx segment at URL: ${uri}`,// response is just bytes in this case// but we really don't want to return that.response: '',playlist,internal: true,playlistExclusionDuration: Infinity,// MEDIA_ERR_NETWORKcode: 2}, request);} // if we already downloaded the sidx bytes in the container request, use themconst {offset,length} = playlist.sidx.byterange;if (bytes.length >= length + offset) {return fin(err, {response: bytes.subarray(offset, offset + length),status: request.status,uri: request.uri});} // otherwise request sidx bytesthis.request = this.vhs_.xhr({uri,responseType: 'arraybuffer',headers: segmentXhrHeaders({byterange: playlist.sidx.byterange})}, fin);});}dispose() {this.trigger('dispose');this.stopRequest();this.loadedPlaylists_ = {};window.clearTimeout(this.minimumUpdatePeriodTimeout_);window.clearTimeout(this.mediaRequest_);window.clearTimeout(this.mediaUpdateTimeout);this.mediaUpdateTimeout = null;this.mediaRequest_ = null;this.minimumUpdatePeriodTimeout_ = null;if (this.mainPlaylistLoader_.createMupOnMedia_) {this.off('loadedmetadata', this.mainPlaylistLoader_.createMupOnMedia_);this.mainPlaylistLoader_.createMupOnMedia_ = null;}this.off();}hasPendingRequest() {return this.request || this.mediaRequest_;}stopRequest() {if (this.request) {const oldRequest = this.request;this.request = null;oldRequest.onreadystatechange = null;oldRequest.abort();}}media(playlist) {// getterif (!playlist) {return this.media_;} // setterif (this.state === 'HAVE_NOTHING') {throw new Error('Cannot switch media playlist from ' + this.state);}const startingState = this.state; // find the playlist object if the target playlist has been specified by URIif (typeof playlist === 'string') {if (!this.mainPlaylistLoader_.main.playlists[playlist]) {throw new Error('Unknown playlist URI: ' + playlist);}playlist = this.mainPlaylistLoader_.main.playlists[playlist];}const mediaChange = !this.media_ || playlist.id !== this.media_.id; // switch to previously loaded playlists immediatelyif (mediaChange && this.loadedPlaylists_[playlist.id] && this.loadedPlaylists_[playlist.id].endList) {this.state = 'HAVE_METADATA';this.media_ = playlist; // trigger media change if the active media has been updatedif (mediaChange) {this.trigger('mediachanging');this.trigger('mediachange');}return;} // switching to the active playlist is a no-opif (!mediaChange) {return;} // switching from an already loaded playlistif (this.media_) {this.trigger('mediachanging');}this.addSidxSegments_(playlist, startingState, sidxChanged => {// everything is ready just continue to haveMetadatathis.haveMetadata({startingState,playlist});});}haveMetadata({startingState,playlist}) {this.state = 'HAVE_METADATA';this.loadedPlaylists_[playlist.id] = playlist;this.mediaRequest_ = null; // This will trigger loadedplaylistthis.refreshMedia_(playlist.id); // fire loadedmetadata the first time a media playlist is loaded// to resolve setup of media groupsif (startingState === 'HAVE_MAIN_MANIFEST') {this.trigger('loadedmetadata');} else {// trigger media change if the active media has been updatedthis.trigger('mediachange');}}pause() {if (this.mainPlaylistLoader_.createMupOnMedia_) {this.off('loadedmetadata', this.mainPlaylistLoader_.createMupOnMedia_);this.mainPlaylistLoader_.createMupOnMedia_ = null;}this.stopRequest();window.clearTimeout(this.mediaUpdateTimeout);this.mediaUpdateTimeout = null;if (this.isMain_) {window.clearTimeout(this.mainPlaylistLoader_.minimumUpdatePeriodTimeout_);this.mainPlaylistLoader_.minimumUpdatePeriodTimeout_ = null;}if (this.state === 'HAVE_NOTHING') {// If we pause the loader before any data has been retrieved, its as if we never// started, so reset to an unstarted state.this.started = false;}}load(isFinalRendition) {window.clearTimeout(this.mediaUpdateTimeout);this.mediaUpdateTimeout = null;const media = this.media();if (isFinalRendition) {const delay = media ? media.targetDuration / 2 * 1000 : 5 * 1000;this.mediaUpdateTimeout = window.setTimeout(() => this.load(), delay);return;} // because the playlists are internal to the manifest, load should either load the// main manifest, or do nothing but trigger an eventif (!this.started) {this.start();return;}if (media && !media.endList) {// Check to see if this is the main loader and the MUP was cleared (this happens// when the loader was paused). `media` should be set at this point since one is always// set during `start()`.if (this.isMain_ && !this.minimumUpdatePeriodTimeout_) {// Trigger minimumUpdatePeriod to refresh the main manifestthis.trigger('minimumUpdatePeriod'); // Since there was no prior minimumUpdatePeriodTimeout it should be recreatedthis.updateMinimumUpdatePeriodTimeout_();}this.trigger('mediaupdatetimeout');} else {this.trigger('loadedplaylist');}}start() {this.started = true; // We don't need to request the main manifest again// Call this asynchronously to match the xhr request behavior belowif (!this.isMain_) {this.mediaRequest_ = window.setTimeout(() => this.haveMain_(), 0);return;}this.requestMain_((req, mainChanged) => {this.haveMain_();if (!this.hasPendingRequest() && !this.media_) {this.media(this.mainPlaylistLoader_.main.playlists[0]);}});}requestMain_(cb) {this.request = this.vhs_.xhr({uri: this.mainPlaylistLoader_.srcUrl,withCredentials: this.withCredentials}, (error, req) => {if (this.requestErrored_(error, req)) {if (this.state === 'HAVE_NOTHING') {this.started = false;}return;}const mainChanged = req.responseText !== this.mainPlaylistLoader_.mainXml_;this.mainPlaylistLoader_.mainXml_ = req.responseText;if (req.responseHeaders && req.responseHeaders.date) {this.mainLoaded_ = Date.parse(req.responseHeaders.date);} else {this.mainLoaded_ = Date.now();}this.mainPlaylistLoader_.srcUrl = resolveManifestRedirect(this.mainPlaylistLoader_.srcUrl, req);if (mainChanged) {this.handleMain_();this.syncClientServerClock_(() => {return cb(req, mainChanged);});return;}return cb(req, mainChanged);});}/*** Parses the main xml for UTCTiming node to sync the client clock to the server* clock. If the UTCTiming node requires a HEAD or GET request, that request is made.** @param {Function} done* Function to call when clock sync has completed*/syncClientServerClock_(done) {const utcTiming = parseUTCTiming(this.mainPlaylistLoader_.mainXml_); // No UTCTiming element found in the mpd. Use Date header from mpd request as the// server clockif (utcTiming === null) {this.mainPlaylistLoader_.clientOffset_ = this.mainLoaded_ - Date.now();return done();}if (utcTiming.method === 'DIRECT') {this.mainPlaylistLoader_.clientOffset_ = utcTiming.value - Date.now();return done();}this.request = this.vhs_.xhr({uri: resolveUrl(this.mainPlaylistLoader_.srcUrl, utcTiming.value),method: utcTiming.method,withCredentials: this.withCredentials}, (error, req) => {// disposedif (!this.request) {return;}if (error) {// sync request failed, fall back to using date header from mpd// TODO: log warningthis.mainPlaylistLoader_.clientOffset_ = this.mainLoaded_ - Date.now();return done();}let serverTime;if (utcTiming.method === 'HEAD') {if (!req.responseHeaders || !req.responseHeaders.date) {// expected date header not preset, fall back to using date header from mpd// TODO: log warningserverTime = this.mainLoaded_;} else {serverTime = Date.parse(req.responseHeaders.date);}} else {serverTime = Date.parse(req.responseText);}this.mainPlaylistLoader_.clientOffset_ = serverTime - Date.now();done();});}haveMain_() {this.state = 'HAVE_MAIN_MANIFEST';if (this.isMain_) {// We have the main playlist at this point, so// trigger this to allow PlaylistController// to make an initial playlist selectionthis.trigger('loadedplaylist');} else if (!this.media_) {// no media playlist was specifically selected so select// the one the child playlist loader was created withthis.media(this.childPlaylist_);}}handleMain_() {// clear media requestthis.mediaRequest_ = null;const oldMain = this.mainPlaylistLoader_.main;let newMain = parseMainXml({mainXml: this.mainPlaylistLoader_.mainXml_,srcUrl: this.mainPlaylistLoader_.srcUrl,clientOffset: this.mainPlaylistLoader_.clientOffset_,sidxMapping: this.mainPlaylistLoader_.sidxMapping_,previousManifest: oldMain}); // if we have an old main to compare the new main againstif (oldMain) {newMain = updateMain(oldMain, newMain, this.mainPlaylistLoader_.sidxMapping_);} // only update main if we have a new mainthis.mainPlaylistLoader_.main = newMain ? newMain : oldMain;const location = this.mainPlaylistLoader_.main.locations && this.mainPlaylistLoader_.main.locations[0];if (location && location !== this.mainPlaylistLoader_.srcUrl) {this.mainPlaylistLoader_.srcUrl = location;}if (!oldMain || newMain && newMain.minimumUpdatePeriod !== oldMain.minimumUpdatePeriod) {this.updateMinimumUpdatePeriodTimeout_();}this.addEventStreamToMetadataTrack_(newMain);return Boolean(newMain);}updateMinimumUpdatePeriodTimeout_() {const mpl = this.mainPlaylistLoader_; // cancel any pending creation of mup on media// a new one will be added if needed.if (mpl.createMupOnMedia_) {mpl.off('loadedmetadata', mpl.createMupOnMedia_);mpl.createMupOnMedia_ = null;} // clear any pending timeoutsif (mpl.minimumUpdatePeriodTimeout_) {window.clearTimeout(mpl.minimumUpdatePeriodTimeout_);mpl.minimumUpdatePeriodTimeout_ = null;}let mup = mpl.main && mpl.main.minimumUpdatePeriod; // If the minimumUpdatePeriod has a value of 0, that indicates that the current// MPD has no future validity, so a new one will need to be acquired when new// media segments are to be made available. Thus, we use the target duration// in this caseif (mup === 0) {if (mpl.media()) {mup = mpl.media().targetDuration * 1000;} else {mpl.createMupOnMedia_ = mpl.updateMinimumUpdatePeriodTimeout_;mpl.one('loadedmetadata', mpl.createMupOnMedia_);}} // if minimumUpdatePeriod is invalid or <= zero, which// can happen when a live video becomes VOD. skip timeout// creation.if (typeof mup !== 'number' || mup <= 0) {if (mup < 0) {this.logger_(`found invalid minimumUpdatePeriod of ${mup}, not setting a timeout`);}return;}this.createMUPTimeout_(mup);}createMUPTimeout_(mup) {const mpl = this.mainPlaylistLoader_;mpl.minimumUpdatePeriodTimeout_ = window.setTimeout(() => {mpl.minimumUpdatePeriodTimeout_ = null;mpl.trigger('minimumUpdatePeriod');mpl.createMUPTimeout_(mup);}, mup);}/*** Sends request to refresh the main xml and updates the parsed main manifest*/refreshXml_() {this.requestMain_((req, mainChanged) => {if (!mainChanged) {return;}if (this.media_) {this.media_ = this.mainPlaylistLoader_.main.playlists[this.media_.id];} // This will filter out updated sidx info from the mappingthis.mainPlaylistLoader_.sidxMapping_ = filterChangedSidxMappings(this.mainPlaylistLoader_.main, this.mainPlaylistLoader_.sidxMapping_);this.addSidxSegments_(this.media(), this.state, sidxChanged => {// TODO: do we need to reload the current playlist?this.refreshMedia_(this.media().id);});});}/*** Refreshes the media playlist by re-parsing the main xml and updating playlist* references. If this is an alternate loader, the updated parsed manifest is retrieved* from the main loader.*/refreshMedia_(mediaID) {if (!mediaID) {throw new Error('refreshMedia_ must take a media id');} // for main we have to reparse the main xml// to re-create segments based on current timing values// which may change media. We only skip updating the main manifest// if this is the first time this.media_ is being set.// as main was just parsed in that case.if (this.media_ && this.isMain_) {this.handleMain_();}const playlists = this.mainPlaylistLoader_.main.playlists;const mediaChanged = !this.media_ || this.media_ !== playlists[mediaID];if (mediaChanged) {this.media_ = playlists[mediaID];} else {this.trigger('playlistunchanged');}if (!this.mediaUpdateTimeout) {const createMediaUpdateTimeout = () => {if (this.media().endList) {return;}this.mediaUpdateTimeout = window.setTimeout(() => {this.trigger('mediaupdatetimeout');createMediaUpdateTimeout();}, refreshDelay(this.media(), Boolean(mediaChanged)));};createMediaUpdateTimeout();}this.trigger('loadedplaylist');}/*** Takes eventstream data from a parsed DASH manifest and adds it to the metadata text track.** @param {manifest} newMain the newly parsed manifest*/addEventStreamToMetadataTrack_(newMain) {// Only add new event stream metadata if we have a new manifest.if (newMain && this.mainPlaylistLoader_.main.eventStream) {// convert EventStream to ID3-like data.const metadataArray = this.mainPlaylistLoader_.main.eventStream.map(eventStreamNode => {return {cueTime: eventStreamNode.start,frames: [{data: eventStreamNode.messageData}]};});this.addMetadataToTextTrack('EventStream', metadataArray, this.mainPlaylistLoader_.main.duration);}}/*** Returns the key ID set from a playlist** @param {playlist} playlist to fetch the key ID set from.* @return a Set of 32 digit hex strings that represent the unique keyIds for that playlist.*/getKeyIdSet(playlist) {if (playlist.contentProtection) {const keyIds = new Set();for (const keysystem in playlist.contentProtection) {const defaultKID = playlist.contentProtection[keysystem].attributes['cenc:default_KID'];if (defaultKID) {// DASH keyIds are separated by dashes.keyIds.add(defaultKID.replace(/-/g, '').toLowerCase());}}return keyIds;}}}var Config = {GOAL_BUFFER_LENGTH: 30,MAX_GOAL_BUFFER_LENGTH: 60,BACK_BUFFER_LENGTH: 30,GOAL_BUFFER_LENGTH_RATE: 1,// 0.5 MB/sINITIAL_BANDWIDTH: 4194304,// A fudge factor to apply to advertised playlist bitrates to account for// temporary flucations in client bandwidthBANDWIDTH_VARIANCE: 1.2,// How much of the buffer must be filled before we consider upswitchingBUFFER_LOW_WATER_LINE: 0,MAX_BUFFER_LOW_WATER_LINE: 30,// TODO: Remove this when experimentalBufferBasedABR is removedEXPERIMENTAL_MAX_BUFFER_LOW_WATER_LINE: 16,BUFFER_LOW_WATER_LINE_RATE: 1,// If the buffer is greater than the high water line, we won't switch downBUFFER_HIGH_WATER_LINE: 30};const stringToArrayBuffer = string => {const view = new Uint8Array(new ArrayBuffer(string.length));for (let i = 0; i < string.length; i++) {view[i] = string.charCodeAt(i);}return view.buffer;};/* global Blob, BlobBuilder, Worker */// unify worker interfaceconst browserWorkerPolyFill = function (workerObj) {// node only supports on/offworkerObj.on = workerObj.addEventListener;workerObj.off = workerObj.removeEventListener;return workerObj;};const createObjectURL = function (str) {try {return URL.createObjectURL(new Blob([str], {type: 'application/javascript'}));} catch (e) {const blob = new BlobBuilder();blob.append(str);return URL.createObjectURL(blob.getBlob());}};const factory = function (code) {return function () {const objectUrl = createObjectURL(code);const worker = browserWorkerPolyFill(new Worker(objectUrl));worker.objURL = objectUrl;const terminate = worker.terminate;worker.on = worker.addEventListener;worker.off = worker.removeEventListener;worker.terminate = function () {URL.revokeObjectURL(objectUrl);return terminate.call(this);};return worker;};};const transform = function (code) {return `var browserWorkerPolyFill = ${browserWorkerPolyFill.toString()};\n` + 'browserWorkerPolyFill(self);\n' + code;};const getWorkerString = function (fn) {return fn.toString().replace(/^function.+?{/, '').slice(0, -1);};/* rollup-plugin-worker-factory start for worker!/home/runner/work/http-streaming/http-streaming/src/transmuxer-worker.js */const workerCode$1 = transform(getWorkerString(function () {var commonjsGlobal = typeof globalThis !== 'undefined' ? globalThis : typeof window !== 'undefined' ? window : typeof global !== 'undefined' ? global : typeof self !== 'undefined' ? self : {};/*** mux.js** Copyright (c) Brightcove* Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE** A lightweight readable stream implemention that handles event dispatching.* Objects that inherit from streams should call init in their constructors.*/var Stream$8 = function () {this.init = function () {var listeners = {};/*** Add a listener for a specified event type.* @param type {string} the event name* @param listener {function} the callback to be invoked when an event of* the specified type occurs*/this.on = function (type, listener) {if (!listeners[type]) {listeners[type] = [];}listeners[type] = listeners[type].concat(listener);};/*** Remove a listener for a specified event type.* @param type {string} the event name* @param listener {function} a function previously registered for this* type of event through `on`*/this.off = function (type, listener) {var index;if (!listeners[type]) {return false;}index = listeners[type].indexOf(listener);listeners[type] = listeners[type].slice();listeners[type].splice(index, 1);return index > -1;};/*** Trigger an event of the specified type on this stream. Any additional* arguments to this function are passed as parameters to event listeners.* @param type {string} the event name*/this.trigger = function (type) {var callbacks, i, length, args;callbacks = listeners[type];if (!callbacks) {return;} // Slicing the arguments on every invocation of this method// can add a significant amount of overhead. Avoid the// intermediate object creation for the common case of a// single callback argumentif (arguments.length === 2) {length = callbacks.length;for (i = 0; i < length; ++i) {callbacks[i].call(this, arguments[1]);}} else {args = [];i = arguments.length;for (i = 1; i < arguments.length; ++i) {args.push(arguments[i]);}length = callbacks.length;for (i = 0; i < length; ++i) {callbacks[i].apply(this, args);}}};/*** Destroys the stream and cleans up.*/this.dispose = function () {listeners = {};};};};/*** Forwards all `data` events on this stream to the destination stream. The* destination stream should provide a method `push` to receive the data* events as they arrive.* @param destination {stream} the stream that will receive all `data` events* @param autoFlush {boolean} if false, we will not call `flush` on the destination* when the current stream emits a 'done' event* @see http://nodejs.org/api/stream.html#stream_readable_pipe_destination_options*/Stream$8.prototype.pipe = function (destination) {this.on('data', function (data) {destination.push(data);});this.on('done', function (flushSource) {destination.flush(flushSource);});this.on('partialdone', function (flushSource) {destination.partialFlush(flushSource);});this.on('endedtimeline', function (flushSource) {destination.endTimeline(flushSource);});this.on('reset', function (flushSource) {destination.reset(flushSource);});return destination;}; // Default stream functions that are expected to be overridden to perform// actual work. These are provided by the prototype as a sort of no-op// implementation so that we don't have to check for their existence in the// `pipe` function above.Stream$8.prototype.push = function (data) {this.trigger('data', data);};Stream$8.prototype.flush = function (flushSource) {this.trigger('done', flushSource);};Stream$8.prototype.partialFlush = function (flushSource) {this.trigger('partialdone', flushSource);};Stream$8.prototype.endTimeline = function (flushSource) {this.trigger('endedtimeline', flushSource);};Stream$8.prototype.reset = function (flushSource) {this.trigger('reset', flushSource);};var stream = Stream$8;var MAX_UINT32$1 = Math.pow(2, 32);var getUint64$3 = function (uint8) {var dv = new DataView(uint8.buffer, uint8.byteOffset, uint8.byteLength);var value;if (dv.getBigUint64) {value = dv.getBigUint64(0);if (value < Number.MAX_SAFE_INTEGER) {return Number(value);}return value;}return dv.getUint32(0) * MAX_UINT32$1 + dv.getUint32(4);};var numbers = {getUint64: getUint64$3,MAX_UINT32: MAX_UINT32$1};/*** mux.js** Copyright (c) Brightcove* Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE** Functions that generate fragmented MP4s suitable for use with Media* Source Extensions.*/var MAX_UINT32 = numbers.MAX_UINT32;var box, dinf, esds, ftyp, mdat, mfhd, minf, moof, moov, mvex, mvhd, trak, tkhd, mdia, mdhd, hdlr, sdtp, stbl, stsd, traf, trex, trun$1, types, MAJOR_BRAND, MINOR_VERSION, AVC1_BRAND, VIDEO_HDLR, AUDIO_HDLR, HDLR_TYPES, VMHD, SMHD, DREF, STCO, STSC, STSZ, STTS; // pre-calculate constants(function () {var i;types = {avc1: [],// codingnameavcC: [],btrt: [],dinf: [],dref: [],esds: [],ftyp: [],hdlr: [],mdat: [],mdhd: [],mdia: [],mfhd: [],minf: [],moof: [],moov: [],mp4a: [],// codingnamemvex: [],mvhd: [],pasp: [],sdtp: [],smhd: [],stbl: [],stco: [],stsc: [],stsd: [],stsz: [],stts: [],styp: [],tfdt: [],tfhd: [],traf: [],trak: [],trun: [],trex: [],tkhd: [],vmhd: []}; // In environments where Uint8Array is undefined (e.g., IE8), skip set up so that we// don't throw an errorif (typeof Uint8Array === 'undefined') {return;}for (i in types) {if (types.hasOwnProperty(i)) {types[i] = [i.charCodeAt(0), i.charCodeAt(1), i.charCodeAt(2), i.charCodeAt(3)];}}MAJOR_BRAND = new Uint8Array(['i'.charCodeAt(0), 's'.charCodeAt(0), 'o'.charCodeAt(0), 'm'.charCodeAt(0)]);AVC1_BRAND = new Uint8Array(['a'.charCodeAt(0), 'v'.charCodeAt(0), 'c'.charCodeAt(0), '1'.charCodeAt(0)]);MINOR_VERSION = new Uint8Array([0, 0, 0, 1]);VIDEO_HDLR = new Uint8Array([0x00,// version 00x00, 0x00, 0x00,// flags0x00, 0x00, 0x00, 0x00,// pre_defined0x76, 0x69, 0x64, 0x65,// handler_type: 'vide'0x00, 0x00, 0x00, 0x00,// reserved0x00, 0x00, 0x00, 0x00,// reserved0x00, 0x00, 0x00, 0x00,// reserved0x56, 0x69, 0x64, 0x65, 0x6f, 0x48, 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x72, 0x00 // name: 'VideoHandler']);AUDIO_HDLR = new Uint8Array([0x00,// version 00x00, 0x00, 0x00,// flags0x00, 0x00, 0x00, 0x00,// pre_defined0x73, 0x6f, 0x75, 0x6e,// handler_type: 'soun'0x00, 0x00, 0x00, 0x00,// reserved0x00, 0x00, 0x00, 0x00,// reserved0x00, 0x00, 0x00, 0x00,// reserved0x53, 0x6f, 0x75, 0x6e, 0x64, 0x48, 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x72, 0x00 // name: 'SoundHandler']);HDLR_TYPES = {video: VIDEO_HDLR,audio: AUDIO_HDLR};DREF = new Uint8Array([0x00,// version 00x00, 0x00, 0x00,// flags0x00, 0x00, 0x00, 0x01,// entry_count0x00, 0x00, 0x00, 0x0c,// entry_size0x75, 0x72, 0x6c, 0x20,// 'url' type0x00,// version 00x00, 0x00, 0x01 // entry_flags]);SMHD = new Uint8Array([0x00,// version0x00, 0x00, 0x00,// flags0x00, 0x00,// balance, 0 means centered0x00, 0x00 // reserved]);STCO = new Uint8Array([0x00,// version0x00, 0x00, 0x00,// flags0x00, 0x00, 0x00, 0x00 // entry_count]);STSC = STCO;STSZ = new Uint8Array([0x00,// version0x00, 0x00, 0x00,// flags0x00, 0x00, 0x00, 0x00,// sample_size0x00, 0x00, 0x00, 0x00 // sample_count]);STTS = STCO;VMHD = new Uint8Array([0x00,// version0x00, 0x00, 0x01,// flags0x00, 0x00,// graphicsmode0x00, 0x00, 0x00, 0x00, 0x00, 0x00 // opcolor]);})();box = function (type) {var payload = [],size = 0,i,result,view;for (i = 1; i < arguments.length; i++) {payload.push(arguments[i]);}i = payload.length; // calculate the total size we need to allocatewhile (i--) {size += payload[i].byteLength;}result = new Uint8Array(size + 8);view = new DataView(result.buffer, result.byteOffset, result.byteLength);view.setUint32(0, result.byteLength);result.set(type, 4); // copy the payload into the resultfor (i = 0, size = 8; i < payload.length; i++) {result.set(payload[i], size);size += payload[i].byteLength;}return result;};dinf = function () {return box(types.dinf, box(types.dref, DREF));};esds = function (track) {return box(types.esds, new Uint8Array([0x00,// version0x00, 0x00, 0x00,// flags// ES_Descriptor0x03,// tag, ES_DescrTag0x19,// length0x00, 0x00,// ES_ID0x00,// streamDependenceFlag, URL_flag, reserved, streamPriority// DecoderConfigDescriptor0x04,// tag, DecoderConfigDescrTag0x11,// length0x40,// object type0x15,// streamType0x00, 0x06, 0x00,// bufferSizeDB0x00, 0x00, 0xda, 0xc0,// maxBitrate0x00, 0x00, 0xda, 0xc0,// avgBitrate// DecoderSpecificInfo0x05,// tag, DecoderSpecificInfoTag0x02,// length// ISO/IEC 14496-3, AudioSpecificConfig// for samplingFrequencyIndex see ISO/IEC 13818-7:2006, 8.1.3.2.2, Table 35track.audioobjecttype << 3 | track.samplingfrequencyindex >>> 1, track.samplingfrequencyindex << 7 | track.channelcount << 3, 0x06, 0x01, 0x02 // GASpecificConfig]));};ftyp = function () {return box(types.ftyp, MAJOR_BRAND, MINOR_VERSION, MAJOR_BRAND, AVC1_BRAND);};hdlr = function (type) {return box(types.hdlr, HDLR_TYPES[type]);};mdat = function (data) {return box(types.mdat, data);};mdhd = function (track) {var result = new Uint8Array([0x00,// version 00x00, 0x00, 0x00,// flags0x00, 0x00, 0x00, 0x02,// creation_time0x00, 0x00, 0x00, 0x03,// modification_time0x00, 0x01, 0x5f, 0x90,// timescale, 90,000 "ticks" per secondtrack.duration >>> 24 & 0xFF, track.duration >>> 16 & 0xFF, track.duration >>> 8 & 0xFF, track.duration & 0xFF,// duration0x55, 0xc4,// 'und' language (undetermined)0x00, 0x00]); // Use the sample rate from the track metadata, when it is// defined. The sample rate can be parsed out of an ADTS header, for// instance.if (track.samplerate) {result[12] = track.samplerate >>> 24 & 0xFF;result[13] = track.samplerate >>> 16 & 0xFF;result[14] = track.samplerate >>> 8 & 0xFF;result[15] = track.samplerate & 0xFF;}return box(types.mdhd, result);};mdia = function (track) {return box(types.mdia, mdhd(track), hdlr(track.type), minf(track));};mfhd = function (sequenceNumber) {return box(types.mfhd, new Uint8Array([0x00, 0x00, 0x00, 0x00,// flags(sequenceNumber & 0xFF000000) >> 24, (sequenceNumber & 0xFF0000) >> 16, (sequenceNumber & 0xFF00) >> 8, sequenceNumber & 0xFF // sequence_number]));};minf = function (track) {return box(types.minf, track.type === 'video' ? box(types.vmhd, VMHD) : box(types.smhd, SMHD), dinf(), stbl(track));};moof = function (sequenceNumber, tracks) {var trackFragments = [],i = tracks.length; // build traf boxes for each track fragmentwhile (i--) {trackFragments[i] = traf(tracks[i]);}return box.apply(null, [types.moof, mfhd(sequenceNumber)].concat(trackFragments));};/*** Returns a movie box.* @param tracks {array} the tracks associated with this movie* @see ISO/IEC 14496-12:2012(E), section 8.2.1*/moov = function (tracks) {var i = tracks.length,boxes = [];while (i--) {boxes[i] = trak(tracks[i]);}return box.apply(null, [types.moov, mvhd(0xffffffff)].concat(boxes).concat(mvex(tracks)));};mvex = function (tracks) {var i = tracks.length,boxes = [];while (i--) {boxes[i] = trex(tracks[i]);}return box.apply(null, [types.mvex].concat(boxes));};mvhd = function (duration) {var bytes = new Uint8Array([0x00,// version 00x00, 0x00, 0x00,// flags0x00, 0x00, 0x00, 0x01,// creation_time0x00, 0x00, 0x00, 0x02,// modification_time0x00, 0x01, 0x5f, 0x90,// timescale, 90,000 "ticks" per second(duration & 0xFF000000) >> 24, (duration & 0xFF0000) >> 16, (duration & 0xFF00) >> 8, duration & 0xFF,// duration0x00, 0x01, 0x00, 0x00,// 1.0 rate0x01, 0x00,// 1.0 volume0x00, 0x00,// reserved0x00, 0x00, 0x00, 0x00,// reserved0x00, 0x00, 0x00, 0x00,// reserved0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00,// transformation: unity matrix0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,// pre_defined0xff, 0xff, 0xff, 0xff // next_track_ID]);return box(types.mvhd, bytes);};sdtp = function (track) {var samples = track.samples || [],bytes = new Uint8Array(4 + samples.length),flags,i; // leave the full box header (4 bytes) all zero// write the sample tablefor (i = 0; i < samples.length; i++) {flags = samples[i].flags;bytes[i + 4] = flags.dependsOn << 4 | flags.isDependedOn << 2 | flags.hasRedundancy;}return box(types.sdtp, bytes);};stbl = function (track) {return box(types.stbl, stsd(track), box(types.stts, STTS), box(types.stsc, STSC), box(types.stsz, STSZ), box(types.stco, STCO));};(function () {var videoSample, audioSample;stsd = function (track) {return box(types.stsd, new Uint8Array([0x00,// version 00x00, 0x00, 0x00,// flags0x00, 0x00, 0x00, 0x01]), track.type === 'video' ? videoSample(track) : audioSample(track));};videoSample = function (track) {var sps = track.sps || [],pps = track.pps || [],sequenceParameterSets = [],pictureParameterSets = [],i,avc1Box; // assemble the SPSsfor (i = 0; i < sps.length; i++) {sequenceParameterSets.push((sps[i].byteLength & 0xFF00) >>> 8);sequenceParameterSets.push(sps[i].byteLength & 0xFF); // sequenceParameterSetLengthsequenceParameterSets = sequenceParameterSets.concat(Array.prototype.slice.call(sps[i])); // SPS} // assemble the PPSsfor (i = 0; i < pps.length; i++) {pictureParameterSets.push((pps[i].byteLength & 0xFF00) >>> 8);pictureParameterSets.push(pps[i].byteLength & 0xFF);pictureParameterSets = pictureParameterSets.concat(Array.prototype.slice.call(pps[i]));}avc1Box = [types.avc1, new Uint8Array([0x00, 0x00, 0x00, 0x00, 0x00, 0x00,// reserved0x00, 0x01,// data_reference_index0x00, 0x00,// pre_defined0x00, 0x00,// reserved0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,// pre_defined(track.width & 0xff00) >> 8, track.width & 0xff,// width(track.height & 0xff00) >> 8, track.height & 0xff,// height0x00, 0x48, 0x00, 0x00,// horizresolution0x00, 0x48, 0x00, 0x00,// vertresolution0x00, 0x00, 0x00, 0x00,// reserved0x00, 0x01,// frame_count0x13, 0x76, 0x69, 0x64, 0x65, 0x6f, 0x6a, 0x73, 0x2d, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x69, 0x62, 0x2d, 0x68, 0x6c, 0x73, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,// compressorname0x00, 0x18,// depth = 240x11, 0x11 // pre_defined = -1]), box(types.avcC, new Uint8Array([0x01,// configurationVersiontrack.profileIdc,// AVCProfileIndicationtrack.profileCompatibility,// profile_compatibilitytrack.levelIdc,// AVCLevelIndication0xff // lengthSizeMinusOne, hard-coded to 4 bytes].concat([sps.length],// numOfSequenceParameterSetssequenceParameterSets,// "SPS"[pps.length],// numOfPictureParameterSetspictureParameterSets // "PPS"))), box(types.btrt, new Uint8Array([0x00, 0x1c, 0x9c, 0x80,// bufferSizeDB0x00, 0x2d, 0xc6, 0xc0,// maxBitrate0x00, 0x2d, 0xc6, 0xc0 // avgBitrate]))];if (track.sarRatio) {var hSpacing = track.sarRatio[0],vSpacing = track.sarRatio[1];avc1Box.push(box(types.pasp, new Uint8Array([(hSpacing & 0xFF000000) >> 24, (hSpacing & 0xFF0000) >> 16, (hSpacing & 0xFF00) >> 8, hSpacing & 0xFF, (vSpacing & 0xFF000000) >> 24, (vSpacing & 0xFF0000) >> 16, (vSpacing & 0xFF00) >> 8, vSpacing & 0xFF])));}return box.apply(null, avc1Box);};audioSample = function (track) {return box(types.mp4a, new Uint8Array([// SampleEntry, ISO/IEC 14496-120x00, 0x00, 0x00, 0x00, 0x00, 0x00,// reserved0x00, 0x01,// data_reference_index// AudioSampleEntry, ISO/IEC 14496-120x00, 0x00, 0x00, 0x00,// reserved0x00, 0x00, 0x00, 0x00,// reserved(track.channelcount & 0xff00) >> 8, track.channelcount & 0xff,// channelcount(track.samplesize & 0xff00) >> 8, track.samplesize & 0xff,// samplesize0x00, 0x00,// pre_defined0x00, 0x00,// reserved(track.samplerate & 0xff00) >> 8, track.samplerate & 0xff, 0x00, 0x00 // samplerate, 16.16// MP4AudioSampleEntry, ISO/IEC 14496-14]), esds(track));};})();tkhd = function (track) {var result = new Uint8Array([0x00,// version 00x00, 0x00, 0x07,// flags0x00, 0x00, 0x00, 0x00,// creation_time0x00, 0x00, 0x00, 0x00,// modification_time(track.id & 0xFF000000) >> 24, (track.id & 0xFF0000) >> 16, (track.id & 0xFF00) >> 8, track.id & 0xFF,// track_ID0x00, 0x00, 0x00, 0x00,// reserved(track.duration & 0xFF000000) >> 24, (track.duration & 0xFF0000) >> 16, (track.duration & 0xFF00) >> 8, track.duration & 0xFF,// duration0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,// reserved0x00, 0x00,// layer0x00, 0x00,// alternate_group0x01, 0x00,// non-audio track volume0x00, 0x00,// reserved0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00,// transformation: unity matrix(track.width & 0xFF00) >> 8, track.width & 0xFF, 0x00, 0x00,// width(track.height & 0xFF00) >> 8, track.height & 0xFF, 0x00, 0x00 // height]);return box(types.tkhd, result);};/*** Generate a track fragment (traf) box. A traf box collects metadata* about tracks in a movie fragment (moof) box.*/traf = function (track) {var trackFragmentHeader, trackFragmentDecodeTime, trackFragmentRun, sampleDependencyTable, dataOffset, upperWordBaseMediaDecodeTime, lowerWordBaseMediaDecodeTime;trackFragmentHeader = box(types.tfhd, new Uint8Array([0x00,// version 00x00, 0x00, 0x3a,// flags(track.id & 0xFF000000) >> 24, (track.id & 0xFF0000) >> 16, (track.id & 0xFF00) >> 8, track.id & 0xFF,// track_ID0x00, 0x00, 0x00, 0x01,// sample_description_index0x00, 0x00, 0x00, 0x00,// default_sample_duration0x00, 0x00, 0x00, 0x00,// default_sample_size0x00, 0x00, 0x00, 0x00 // default_sample_flags]));upperWordBaseMediaDecodeTime = Math.floor(track.baseMediaDecodeTime / MAX_UINT32);lowerWordBaseMediaDecodeTime = Math.floor(track.baseMediaDecodeTime % MAX_UINT32);trackFragmentDecodeTime = box(types.tfdt, new Uint8Array([0x01,// version 10x00, 0x00, 0x00,// flags// baseMediaDecodeTimeupperWordBaseMediaDecodeTime >>> 24 & 0xFF, upperWordBaseMediaDecodeTime >>> 16 & 0xFF, upperWordBaseMediaDecodeTime >>> 8 & 0xFF, upperWordBaseMediaDecodeTime & 0xFF, lowerWordBaseMediaDecodeTime >>> 24 & 0xFF, lowerWordBaseMediaDecodeTime >>> 16 & 0xFF, lowerWordBaseMediaDecodeTime >>> 8 & 0xFF, lowerWordBaseMediaDecodeTime & 0xFF])); // the data offset specifies the number of bytes from the start of// the containing moof to the first payload byte of the associated// mdatdataOffset = 32 +// tfhd20 +// tfdt8 +// traf header16 +// mfhd8 +// moof header8; // mdat header// audio tracks require less metadataif (track.type === 'audio') {trackFragmentRun = trun$1(track, dataOffset);return box(types.traf, trackFragmentHeader, trackFragmentDecodeTime, trackFragmentRun);} // video tracks should contain an independent and disposable samples// box (sdtp)// generate one and adjust offsets to matchsampleDependencyTable = sdtp(track);trackFragmentRun = trun$1(track, sampleDependencyTable.length + dataOffset);return box(types.traf, trackFragmentHeader, trackFragmentDecodeTime, trackFragmentRun, sampleDependencyTable);};/*** Generate a track box.* @param track {object} a track definition* @return {Uint8Array} the track box*/trak = function (track) {track.duration = track.duration || 0xffffffff;return box(types.trak, tkhd(track), mdia(track));};trex = function (track) {var result = new Uint8Array([0x00,// version 00x00, 0x00, 0x00,// flags(track.id & 0xFF000000) >> 24, (track.id & 0xFF0000) >> 16, (track.id & 0xFF00) >> 8, track.id & 0xFF,// track_ID0x00, 0x00, 0x00, 0x01,// default_sample_description_index0x00, 0x00, 0x00, 0x00,// default_sample_duration0x00, 0x00, 0x00, 0x00,// default_sample_size0x00, 0x01, 0x00, 0x01 // default_sample_flags]); // the last two bytes of default_sample_flags is the sample// degradation priority, a hint about the importance of this sample// relative to others. Lower the degradation priority for all sample// types other than video.if (track.type !== 'video') {result[result.length - 1] = 0x00;}return box(types.trex, result);};(function () {var audioTrun, videoTrun, trunHeader; // This method assumes all samples are uniform. That is, if a// duration is present for the first sample, it will be present for// all subsequent samples.// see ISO/IEC 14496-12:2012, Section 8.8.8.1trunHeader = function (samples, offset) {var durationPresent = 0,sizePresent = 0,flagsPresent = 0,compositionTimeOffset = 0; // trun flag constantsif (samples.length) {if (samples[0].duration !== undefined) {durationPresent = 0x1;}if (samples[0].size !== undefined) {sizePresent = 0x2;}if (samples[0].flags !== undefined) {flagsPresent = 0x4;}if (samples[0].compositionTimeOffset !== undefined) {compositionTimeOffset = 0x8;}}return [0x00,// version 00x00, durationPresent | sizePresent | flagsPresent | compositionTimeOffset, 0x01,// flags(samples.length & 0xFF000000) >>> 24, (samples.length & 0xFF0000) >>> 16, (samples.length & 0xFF00) >>> 8, samples.length & 0xFF,// sample_count(offset & 0xFF000000) >>> 24, (offset & 0xFF0000) >>> 16, (offset & 0xFF00) >>> 8, offset & 0xFF // data_offset];};videoTrun = function (track, offset) {var bytesOffest, bytes, header, samples, sample, i;samples = track.samples || [];offset += 8 + 12 + 16 * samples.length;header = trunHeader(samples, offset);bytes = new Uint8Array(header.length + samples.length * 16);bytes.set(header);bytesOffest = header.length;for (i = 0; i < samples.length; i++) {sample = samples[i];bytes[bytesOffest++] = (sample.duration & 0xFF000000) >>> 24;bytes[bytesOffest++] = (sample.duration & 0xFF0000) >>> 16;bytes[bytesOffest++] = (sample.duration & 0xFF00) >>> 8;bytes[bytesOffest++] = sample.duration & 0xFF; // sample_durationbytes[bytesOffest++] = (sample.size & 0xFF000000) >>> 24;bytes[bytesOffest++] = (sample.size & 0xFF0000) >>> 16;bytes[bytesOffest++] = (sample.size & 0xFF00) >>> 8;bytes[bytesOffest++] = sample.size & 0xFF; // sample_sizebytes[bytesOffest++] = sample.flags.isLeading << 2 | sample.flags.dependsOn;bytes[bytesOffest++] = sample.flags.isDependedOn << 6 | sample.flags.hasRedundancy << 4 | sample.flags.paddingValue << 1 | sample.flags.isNonSyncSample;bytes[bytesOffest++] = sample.flags.degradationPriority & 0xF0 << 8;bytes[bytesOffest++] = sample.flags.degradationPriority & 0x0F; // sample_flagsbytes[bytesOffest++] = (sample.compositionTimeOffset & 0xFF000000) >>> 24;bytes[bytesOffest++] = (sample.compositionTimeOffset & 0xFF0000) >>> 16;bytes[bytesOffest++] = (sample.compositionTimeOffset & 0xFF00) >>> 8;bytes[bytesOffest++] = sample.compositionTimeOffset & 0xFF; // sample_composition_time_offset}return box(types.trun, bytes);};audioTrun = function (track, offset) {var bytes, bytesOffest, header, samples, sample, i;samples = track.samples || [];offset += 8 + 12 + 8 * samples.length;header = trunHeader(samples, offset);bytes = new Uint8Array(header.length + samples.length * 8);bytes.set(header);bytesOffest = header.length;for (i = 0; i < samples.length; i++) {sample = samples[i];bytes[bytesOffest++] = (sample.duration & 0xFF000000) >>> 24;bytes[bytesOffest++] = (sample.duration & 0xFF0000) >>> 16;bytes[bytesOffest++] = (sample.duration & 0xFF00) >>> 8;bytes[bytesOffest++] = sample.duration & 0xFF; // sample_durationbytes[bytesOffest++] = (sample.size & 0xFF000000) >>> 24;bytes[bytesOffest++] = (sample.size & 0xFF0000) >>> 16;bytes[bytesOffest++] = (sample.size & 0xFF00) >>> 8;bytes[bytesOffest++] = sample.size & 0xFF; // sample_size}return box(types.trun, bytes);};trun$1 = function (track, offset) {if (track.type === 'audio') {return audioTrun(track, offset);}return videoTrun(track, offset);};})();var mp4Generator = {ftyp: ftyp,mdat: mdat,moof: moof,moov: moov,initSegment: function (tracks) {var fileType = ftyp(),movie = moov(tracks),result;result = new Uint8Array(fileType.byteLength + movie.byteLength);result.set(fileType);result.set(movie, fileType.byteLength);return result;}};/*** mux.js** Copyright (c) Brightcove* Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE*/// composed of the nal units that make up that frame// Also keep track of cummulative data about the frame from the nal units such// as the frame duration, starting pts, etc.var groupNalsIntoFrames = function (nalUnits) {var i,currentNal,currentFrame = [],frames = []; // TODO added for LHLS, make sure this is OKframes.byteLength = 0;frames.nalCount = 0;frames.duration = 0;currentFrame.byteLength = 0;for (i = 0; i < nalUnits.length; i++) {currentNal = nalUnits[i]; // Split on 'aud'-type nal unitsif (currentNal.nalUnitType === 'access_unit_delimiter_rbsp') {// Since the very first nal unit is expected to be an AUD// only push to the frames array when currentFrame is not emptyif (currentFrame.length) {currentFrame.duration = currentNal.dts - currentFrame.dts; // TODO added for LHLS, make sure this is OKframes.byteLength += currentFrame.byteLength;frames.nalCount += currentFrame.length;frames.duration += currentFrame.duration;frames.push(currentFrame);}currentFrame = [currentNal];currentFrame.byteLength = currentNal.data.byteLength;currentFrame.pts = currentNal.pts;currentFrame.dts = currentNal.dts;} else {// Specifically flag key frames for ease of use laterif (currentNal.nalUnitType === 'slice_layer_without_partitioning_rbsp_idr') {currentFrame.keyFrame = true;}currentFrame.duration = currentNal.dts - currentFrame.dts;currentFrame.byteLength += currentNal.data.byteLength;currentFrame.push(currentNal);}} // For the last frame, use the duration of the previous frame if we// have nothing better to go onif (frames.length && (!currentFrame.duration || currentFrame.duration <= 0)) {currentFrame.duration = frames[frames.length - 1].duration;} // Push the final frame// TODO added for LHLS, make sure this is OKframes.byteLength += currentFrame.byteLength;frames.nalCount += currentFrame.length;frames.duration += currentFrame.duration;frames.push(currentFrame);return frames;}; // Convert an array of frames into an array of Gop with each Gop being composed// of the frames that make up that Gop// Also keep track of cummulative data about the Gop from the frames such as the// Gop duration, starting pts, etc.var groupFramesIntoGops = function (frames) {var i,currentFrame,currentGop = [],gops = []; // We must pre-set some of the values on the Gop since we// keep running totals of these valuescurrentGop.byteLength = 0;currentGop.nalCount = 0;currentGop.duration = 0;currentGop.pts = frames[0].pts;currentGop.dts = frames[0].dts; // store some metadata about all the Gopsgops.byteLength = 0;gops.nalCount = 0;gops.duration = 0;gops.pts = frames[0].pts;gops.dts = frames[0].dts;for (i = 0; i < frames.length; i++) {currentFrame = frames[i];if (currentFrame.keyFrame) {// Since the very first frame is expected to be an keyframe// only push to the gops array when currentGop is not emptyif (currentGop.length) {gops.push(currentGop);gops.byteLength += currentGop.byteLength;gops.nalCount += currentGop.nalCount;gops.duration += currentGop.duration;}currentGop = [currentFrame];currentGop.nalCount = currentFrame.length;currentGop.byteLength = currentFrame.byteLength;currentGop.pts = currentFrame.pts;currentGop.dts = currentFrame.dts;currentGop.duration = currentFrame.duration;} else {currentGop.duration += currentFrame.duration;currentGop.nalCount += currentFrame.length;currentGop.byteLength += currentFrame.byteLength;currentGop.push(currentFrame);}}if (gops.length && currentGop.duration <= 0) {currentGop.duration = gops[gops.length - 1].duration;}gops.byteLength += currentGop.byteLength;gops.nalCount += currentGop.nalCount;gops.duration += currentGop.duration; // push the final Gopgops.push(currentGop);return gops;};/** Search for the first keyframe in the GOPs and throw away all frames* until that keyframe. Then extend the duration of the pulled keyframe* and pull the PTS and DTS of the keyframe so that it covers the time* range of the frames that were disposed.** @param {Array} gops video GOPs* @returns {Array} modified video GOPs*/var extendFirstKeyFrame = function (gops) {var currentGop;if (!gops[0][0].keyFrame && gops.length > 1) {// Remove the first GOPcurrentGop = gops.shift();gops.byteLength -= currentGop.byteLength;gops.nalCount -= currentGop.nalCount; // Extend the first frame of what is now the// first gop to cover the time period of the// frames we just removedgops[0][0].dts = currentGop.dts;gops[0][0].pts = currentGop.pts;gops[0][0].duration += currentGop.duration;}return gops;};/*** Default sample object* see ISO/IEC 14496-12:2012, section 8.6.4.3*/var createDefaultSample = function () {return {size: 0,flags: {isLeading: 0,dependsOn: 1,isDependedOn: 0,hasRedundancy: 0,degradationPriority: 0,isNonSyncSample: 1}};};/** Collates information from a video frame into an object for eventual* entry into an MP4 sample table.** @param {Object} frame the video frame* @param {Number} dataOffset the byte offset to position the sample* @return {Object} object containing sample table info for a frame*/var sampleForFrame = function (frame, dataOffset) {var sample = createDefaultSample();sample.dataOffset = dataOffset;sample.compositionTimeOffset = frame.pts - frame.dts;sample.duration = frame.duration;sample.size = 4 * frame.length; // Space for nal unit sizesample.size += frame.byteLength;if (frame.keyFrame) {sample.flags.dependsOn = 2;sample.flags.isNonSyncSample = 0;}return sample;}; // generate the track's sample table from an array of gopsvar generateSampleTable$1 = function (gops, baseDataOffset) {var h,i,sample,currentGop,currentFrame,dataOffset = baseDataOffset || 0,samples = [];for (h = 0; h < gops.length; h++) {currentGop = gops[h];for (i = 0; i < currentGop.length; i++) {currentFrame = currentGop[i];sample = sampleForFrame(currentFrame, dataOffset);dataOffset += sample.size;samples.push(sample);}}return samples;}; // generate the track's raw mdat data from an array of gopsvar concatenateNalData = function (gops) {var h,i,j,currentGop,currentFrame,currentNal,dataOffset = 0,nalsByteLength = gops.byteLength,numberOfNals = gops.nalCount,totalByteLength = nalsByteLength + 4 * numberOfNals,data = new Uint8Array(totalByteLength),view = new DataView(data.buffer); // For each Gop..for (h = 0; h < gops.length; h++) {currentGop = gops[h]; // For each Frame..for (i = 0; i < currentGop.length; i++) {currentFrame = currentGop[i]; // For each NAL..for (j = 0; j < currentFrame.length; j++) {currentNal = currentFrame[j];view.setUint32(dataOffset, currentNal.data.byteLength);dataOffset += 4;data.set(currentNal.data, dataOffset);dataOffset += currentNal.data.byteLength;}}}return data;}; // generate the track's sample table from a framevar generateSampleTableForFrame = function (frame, baseDataOffset) {var sample,dataOffset = baseDataOffset || 0,samples = [];sample = sampleForFrame(frame, dataOffset);samples.push(sample);return samples;}; // generate the track's raw mdat data from a framevar concatenateNalDataForFrame = function (frame) {var i,currentNal,dataOffset = 0,nalsByteLength = frame.byteLength,numberOfNals = frame.length,totalByteLength = nalsByteLength + 4 * numberOfNals,data = new Uint8Array(totalByteLength),view = new DataView(data.buffer); // For each NAL..for (i = 0; i < frame.length; i++) {currentNal = frame[i];view.setUint32(dataOffset, currentNal.data.byteLength);dataOffset += 4;data.set(currentNal.data, dataOffset);dataOffset += currentNal.data.byteLength;}return data;};var frameUtils$1 = {groupNalsIntoFrames: groupNalsIntoFrames,groupFramesIntoGops: groupFramesIntoGops,extendFirstKeyFrame: extendFirstKeyFrame,generateSampleTable: generateSampleTable$1,concatenateNalData: concatenateNalData,generateSampleTableForFrame: generateSampleTableForFrame,concatenateNalDataForFrame: concatenateNalDataForFrame};/*** mux.js** Copyright (c) Brightcove* Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE*/var highPrefix = [33, 16, 5, 32, 164, 27];var lowPrefix = [33, 65, 108, 84, 1, 2, 4, 8, 168, 2, 4, 8, 17, 191, 252];var zeroFill = function (count) {var a = [];while (count--) {a.push(0);}return a;};var makeTable = function (metaTable) {return Object.keys(metaTable).reduce(function (obj, key) {obj[key] = new Uint8Array(metaTable[key].reduce(function (arr, part) {return arr.concat(part);}, []));return obj;}, {});};var silence;var silence_1 = function () {if (!silence) {// Frames-of-silence to use for filling in missing AAC framesvar coneOfSilence = {96000: [highPrefix, [227, 64], zeroFill(154), [56]],88200: [highPrefix, [231], zeroFill(170), [56]],64000: [highPrefix, [248, 192], zeroFill(240), [56]],48000: [highPrefix, [255, 192], zeroFill(268), [55, 148, 128], zeroFill(54), [112]],44100: [highPrefix, [255, 192], zeroFill(268), [55, 163, 128], zeroFill(84), [112]],32000: [highPrefix, [255, 192], zeroFill(268), [55, 234], zeroFill(226), [112]],24000: [highPrefix, [255, 192], zeroFill(268), [55, 255, 128], zeroFill(268), [111, 112], zeroFill(126), [224]],16000: [highPrefix, [255, 192], zeroFill(268), [55, 255, 128], zeroFill(268), [111, 255], zeroFill(269), [223, 108], zeroFill(195), [1, 192]],12000: [lowPrefix, zeroFill(268), [3, 127, 248], zeroFill(268), [6, 255, 240], zeroFill(268), [13, 255, 224], zeroFill(268), [27, 253, 128], zeroFill(259), [56]],11025: [lowPrefix, zeroFill(268), [3, 127, 248], zeroFill(268), [6, 255, 240], zeroFill(268), [13, 255, 224], zeroFill(268), [27, 255, 192], zeroFill(268), [55, 175, 128], zeroFill(108), [112]],8000: [lowPrefix, zeroFill(268), [3, 121, 16], zeroFill(47), [7]]};silence = makeTable(coneOfSilence);}return silence;};/*** mux.js** Copyright (c) Brightcove* Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE*/var ONE_SECOND_IN_TS$4 = 90000,// 90kHz clocksecondsToVideoTs,secondsToAudioTs,videoTsToSeconds,audioTsToSeconds,audioTsToVideoTs,videoTsToAudioTs,metadataTsToSeconds;secondsToVideoTs = function (seconds) {return seconds * ONE_SECOND_IN_TS$4;};secondsToAudioTs = function (seconds, sampleRate) {return seconds * sampleRate;};videoTsToSeconds = function (timestamp) {return timestamp / ONE_SECOND_IN_TS$4;};audioTsToSeconds = function (timestamp, sampleRate) {return timestamp / sampleRate;};audioTsToVideoTs = function (timestamp, sampleRate) {return secondsToVideoTs(audioTsToSeconds(timestamp, sampleRate));};videoTsToAudioTs = function (timestamp, sampleRate) {return secondsToAudioTs(videoTsToSeconds(timestamp), sampleRate);};/*** Adjust ID3 tag or caption timing information by the timeline pts values* (if keepOriginalTimestamps is false) and convert to seconds*/metadataTsToSeconds = function (timestamp, timelineStartPts, keepOriginalTimestamps) {return videoTsToSeconds(keepOriginalTimestamps ? timestamp : timestamp - timelineStartPts);};var clock$2 = {ONE_SECOND_IN_TS: ONE_SECOND_IN_TS$4,secondsToVideoTs: secondsToVideoTs,secondsToAudioTs: secondsToAudioTs,videoTsToSeconds: videoTsToSeconds,audioTsToSeconds: audioTsToSeconds,audioTsToVideoTs: audioTsToVideoTs,videoTsToAudioTs: videoTsToAudioTs,metadataTsToSeconds: metadataTsToSeconds};/*** mux.js** Copyright (c) Brightcove* Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE*/var coneOfSilence = silence_1;var clock$1 = clock$2;/*** Sum the `byteLength` properties of the data in each AAC frame*/var sumFrameByteLengths = function (array) {var i,currentObj,sum = 0; // sum the byteLength's all each nal unit in the framefor (i = 0; i < array.length; i++) {currentObj = array[i];sum += currentObj.data.byteLength;}return sum;}; // Possibly pad (prefix) the audio track with silence if appending this track// would lead to the introduction of a gap in the audio buffervar prefixWithSilence = function (track, frames, audioAppendStartTs, videoBaseMediaDecodeTime) {var baseMediaDecodeTimeTs,frameDuration = 0,audioGapDuration = 0,audioFillFrameCount = 0,audioFillDuration = 0,silentFrame,i,firstFrame;if (!frames.length) {return;}baseMediaDecodeTimeTs = clock$1.audioTsToVideoTs(track.baseMediaDecodeTime, track.samplerate); // determine frame clock duration based on sample rate, round up to avoid overfillsframeDuration = Math.ceil(clock$1.ONE_SECOND_IN_TS / (track.samplerate / 1024));if (audioAppendStartTs && videoBaseMediaDecodeTime) {// insert the shortest possible amount (audio gap or audio to video gap)audioGapDuration = baseMediaDecodeTimeTs - Math.max(audioAppendStartTs, videoBaseMediaDecodeTime); // number of full frames in the audio gapaudioFillFrameCount = Math.floor(audioGapDuration / frameDuration);audioFillDuration = audioFillFrameCount * frameDuration;} // don't attempt to fill gaps smaller than a single frame or larger// than a half secondif (audioFillFrameCount < 1 || audioFillDuration > clock$1.ONE_SECOND_IN_TS / 2) {return;}silentFrame = coneOfSilence()[track.samplerate];if (!silentFrame) {// we don't have a silent frame pregenerated for the sample rate, so use a frame// from the content insteadsilentFrame = frames[0].data;}for (i = 0; i < audioFillFrameCount; i++) {firstFrame = frames[0];frames.splice(0, 0, {data: silentFrame,dts: firstFrame.dts - frameDuration,pts: firstFrame.pts - frameDuration});}track.baseMediaDecodeTime -= Math.floor(clock$1.videoTsToAudioTs(audioFillDuration, track.samplerate));return audioFillDuration;}; // If the audio segment extends before the earliest allowed dts// value, remove AAC frames until starts at or after the earliest// allowed DTS so that we don't end up with a negative baseMedia-// DecodeTime for the audio trackvar trimAdtsFramesByEarliestDts = function (adtsFrames, track, earliestAllowedDts) {if (track.minSegmentDts >= earliestAllowedDts) {return adtsFrames;} // We will need to recalculate the earliest segment Dtstrack.minSegmentDts = Infinity;return adtsFrames.filter(function (currentFrame) {// If this is an allowed frame, keep it and record it's Dtsif (currentFrame.dts >= earliestAllowedDts) {track.minSegmentDts = Math.min(track.minSegmentDts, currentFrame.dts);track.minSegmentPts = track.minSegmentDts;return true;} // Otherwise, discard itreturn false;});}; // generate the track's raw mdat data from an array of framesvar generateSampleTable = function (frames) {var i,currentFrame,samples = [];for (i = 0; i < frames.length; i++) {currentFrame = frames[i];samples.push({size: currentFrame.data.byteLength,duration: 1024 // For AAC audio, all samples contain 1024 samples});}return samples;}; // generate the track's sample table from an array of framesvar concatenateFrameData = function (frames) {var i,currentFrame,dataOffset = 0,data = new Uint8Array(sumFrameByteLengths(frames));for (i = 0; i < frames.length; i++) {currentFrame = frames[i];data.set(currentFrame.data, dataOffset);dataOffset += currentFrame.data.byteLength;}return data;};var audioFrameUtils$1 = {prefixWithSilence: prefixWithSilence,trimAdtsFramesByEarliestDts: trimAdtsFramesByEarliestDts,generateSampleTable: generateSampleTable,concatenateFrameData: concatenateFrameData};/*** mux.js** Copyright (c) Brightcove* Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE*/var ONE_SECOND_IN_TS$3 = clock$2.ONE_SECOND_IN_TS;/*** Store information about the start and end of the track and the* duration for each frame/sample we process in order to calculate* the baseMediaDecodeTime*/var collectDtsInfo = function (track, data) {if (typeof data.pts === 'number') {if (track.timelineStartInfo.pts === undefined) {track.timelineStartInfo.pts = data.pts;}if (track.minSegmentPts === undefined) {track.minSegmentPts = data.pts;} else {track.minSegmentPts = Math.min(track.minSegmentPts, data.pts);}if (track.maxSegmentPts === undefined) {track.maxSegmentPts = data.pts;} else {track.maxSegmentPts = Math.max(track.maxSegmentPts, data.pts);}}if (typeof data.dts === 'number') {if (track.timelineStartInfo.dts === undefined) {track.timelineStartInfo.dts = data.dts;}if (track.minSegmentDts === undefined) {track.minSegmentDts = data.dts;} else {track.minSegmentDts = Math.min(track.minSegmentDts, data.dts);}if (track.maxSegmentDts === undefined) {track.maxSegmentDts = data.dts;} else {track.maxSegmentDts = Math.max(track.maxSegmentDts, data.dts);}}};/*** Clear values used to calculate the baseMediaDecodeTime between* tracks*/var clearDtsInfo = function (track) {delete track.minSegmentDts;delete track.maxSegmentDts;delete track.minSegmentPts;delete track.maxSegmentPts;};/*** Calculate the track's baseMediaDecodeTime based on the earliest* DTS the transmuxer has ever seen and the minimum DTS for the* current track* @param track {object} track metadata configuration* @param keepOriginalTimestamps {boolean} If true, keep the timestamps* in the source; false to adjust the first segment to start at 0.*/var calculateTrackBaseMediaDecodeTime = function (track, keepOriginalTimestamps) {var baseMediaDecodeTime,scale,minSegmentDts = track.minSegmentDts; // Optionally adjust the time so the first segment starts at zero.if (!keepOriginalTimestamps) {minSegmentDts -= track.timelineStartInfo.dts;} // track.timelineStartInfo.baseMediaDecodeTime is the location, in time, where// we want the start of the first segment to be placedbaseMediaDecodeTime = track.timelineStartInfo.baseMediaDecodeTime; // Add to that the distance this segment is from the very firstbaseMediaDecodeTime += minSegmentDts; // baseMediaDecodeTime must not become negativebaseMediaDecodeTime = Math.max(0, baseMediaDecodeTime);if (track.type === 'audio') {// Audio has a different clock equal to the sampling_rate so we need to// scale the PTS values into the clock rate of the trackscale = track.samplerate / ONE_SECOND_IN_TS$3;baseMediaDecodeTime *= scale;baseMediaDecodeTime = Math.floor(baseMediaDecodeTime);}return baseMediaDecodeTime;};var trackDecodeInfo$1 = {clearDtsInfo: clearDtsInfo,calculateTrackBaseMediaDecodeTime: calculateTrackBaseMediaDecodeTime,collectDtsInfo: collectDtsInfo};/*** mux.js** Copyright (c) Brightcove* Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE** Reads in-band caption information from a video elementary* stream. Captions must follow the CEA-708 standard for injection* into an MPEG-2 transport streams.* @see https://en.wikipedia.org/wiki/CEA-708* @see https://www.gpo.gov/fdsys/pkg/CFR-2007-title47-vol1/pdf/CFR-2007-title47-vol1-sec15-119.pdf*/// payload type field to indicate how they are to be// interpreted. CEAS-708 caption content is always transmitted with// payload type 0x04.var USER_DATA_REGISTERED_ITU_T_T35 = 4,RBSP_TRAILING_BITS = 128;/*** Parse a supplemental enhancement information (SEI) NAL unit.* Stops parsing once a message of type ITU T T35 has been found.** @param bytes {Uint8Array} the bytes of a SEI NAL unit* @return {object} the parsed SEI payload* @see Rec. ITU-T H.264, 7.3.2.3.1*/var parseSei = function (bytes) {var i = 0,result = {payloadType: -1,payloadSize: 0},payloadType = 0,payloadSize = 0; // go through the sei_rbsp parsing each each individual sei_messagewhile (i < bytes.byteLength) {// stop once we have hit the end of the sei_rbspif (bytes[i] === RBSP_TRAILING_BITS) {break;} // Parse payload typewhile (bytes[i] === 0xFF) {payloadType += 255;i++;}payloadType += bytes[i++]; // Parse payload sizewhile (bytes[i] === 0xFF) {payloadSize += 255;i++;}payloadSize += bytes[i++]; // this sei_message is a 608/708 caption so save it and break// there can only ever be one caption message in a frame's seiif (!result.payload && payloadType === USER_DATA_REGISTERED_ITU_T_T35) {var userIdentifier = String.fromCharCode(bytes[i + 3], bytes[i + 4], bytes[i + 5], bytes[i + 6]);if (userIdentifier === 'GA94') {result.payloadType = payloadType;result.payloadSize = payloadSize;result.payload = bytes.subarray(i, i + payloadSize);break;} else {result.payload = void 0;}} // skip the payload and parse the next messagei += payloadSize;payloadType = 0;payloadSize = 0;}return result;}; // see ANSI/SCTE 128-1 (2013), section 8.1var parseUserData = function (sei) {// itu_t_t35_contry_code must be 181 (United States) for// captionsif (sei.payload[0] !== 181) {return null;} // itu_t_t35_provider_code should be 49 (ATSC) for captionsif ((sei.payload[1] << 8 | sei.payload[2]) !== 49) {return null;} // the user_identifier should be "GA94" to indicate ATSC1 dataif (String.fromCharCode(sei.payload[3], sei.payload[4], sei.payload[5], sei.payload[6]) !== 'GA94') {return null;} // finally, user_data_type_code should be 0x03 for caption dataif (sei.payload[7] !== 0x03) {return null;} // return the user_data_type_structure and strip the trailing// marker bitsreturn sei.payload.subarray(8, sei.payload.length - 1);}; // see CEA-708-D, section 4.4var parseCaptionPackets = function (pts, userData) {var results = [],i,count,offset,data; // if this is just filler, return immediatelyif (!(userData[0] & 0x40)) {return results;} // parse out the cc_data_1 and cc_data_2 fieldscount = userData[0] & 0x1f;for (i = 0; i < count; i++) {offset = i * 3;data = {type: userData[offset + 2] & 0x03,pts: pts}; // capture cc data when cc_valid is 1if (userData[offset + 2] & 0x04) {data.ccData = userData[offset + 3] << 8 | userData[offset + 4];results.push(data);}}return results;};var discardEmulationPreventionBytes$1 = function (data) {var length = data.byteLength,emulationPreventionBytesPositions = [],i = 1,newLength,newData; // Find all `Emulation Prevention Bytes`while (i < length - 2) {if (data[i] === 0 && data[i + 1] === 0 && data[i + 2] === 0x03) {emulationPreventionBytesPositions.push(i + 2);i += 2;} else {i++;}} // If no Emulation Prevention Bytes were found just return the original// arrayif (emulationPreventionBytesPositions.length === 0) {return data;} // Create a new array to hold the NAL unit datanewLength = length - emulationPreventionBytesPositions.length;newData = new Uint8Array(newLength);var sourceIndex = 0;for (i = 0; i < newLength; sourceIndex++, i++) {if (sourceIndex === emulationPreventionBytesPositions[0]) {// Skip this bytesourceIndex++; // Remove this position indexemulationPreventionBytesPositions.shift();}newData[i] = data[sourceIndex];}return newData;}; // exportsvar captionPacketParser = {parseSei: parseSei,parseUserData: parseUserData,parseCaptionPackets: parseCaptionPackets,discardEmulationPreventionBytes: discardEmulationPreventionBytes$1,USER_DATA_REGISTERED_ITU_T_T35: USER_DATA_REGISTERED_ITU_T_T35};/*** mux.js** Copyright (c) Brightcove* Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE** Reads in-band caption information from a video elementary* stream. Captions must follow the CEA-708 standard for injection* into an MPEG-2 transport streams.* @see https://en.wikipedia.org/wiki/CEA-708* @see https://www.gpo.gov/fdsys/pkg/CFR-2007-title47-vol1/pdf/CFR-2007-title47-vol1-sec15-119.pdf*/// Link To Transport// -----------------var Stream$7 = stream;var cea708Parser = captionPacketParser;var CaptionStream$2 = function (options) {options = options || {};CaptionStream$2.prototype.init.call(this); // parse708captions flag, default to truethis.parse708captions_ = typeof options.parse708captions === 'boolean' ? options.parse708captions : true;this.captionPackets_ = [];this.ccStreams_ = [new Cea608Stream(0, 0),// eslint-disable-line no-use-before-definenew Cea608Stream(0, 1),// eslint-disable-line no-use-before-definenew Cea608Stream(1, 0),// eslint-disable-line no-use-before-definenew Cea608Stream(1, 1) // eslint-disable-line no-use-before-define];if (this.parse708captions_) {this.cc708Stream_ = new Cea708Stream({captionServices: options.captionServices}); // eslint-disable-line no-use-before-define}this.reset(); // forward data and done events from CCs to this CaptionStreamthis.ccStreams_.forEach(function (cc) {cc.on('data', this.trigger.bind(this, 'data'));cc.on('partialdone', this.trigger.bind(this, 'partialdone'));cc.on('done', this.trigger.bind(this, 'done'));}, this);if (this.parse708captions_) {this.cc708Stream_.on('data', this.trigger.bind(this, 'data'));this.cc708Stream_.on('partialdone', this.trigger.bind(this, 'partialdone'));this.cc708Stream_.on('done', this.trigger.bind(this, 'done'));}};CaptionStream$2.prototype = new Stream$7();CaptionStream$2.prototype.push = function (event) {var sei, userData, newCaptionPackets; // only examine SEI NALsif (event.nalUnitType !== 'sei_rbsp') {return;} // parse the seisei = cea708Parser.parseSei(event.escapedRBSP); // no payload data, skipif (!sei.payload) {return;} // ignore everything but user_data_registered_itu_t_t35if (sei.payloadType !== cea708Parser.USER_DATA_REGISTERED_ITU_T_T35) {return;} // parse out the user data payloaduserData = cea708Parser.parseUserData(sei); // ignore unrecognized userDataif (!userData) {return;} // Sometimes, the same segment # will be downloaded twice. To stop the// caption data from being processed twice, we track the latest dts we've// received and ignore everything with a dts before that. However, since// data for a specific dts can be split across packets on either side of// a segment boundary, we need to make sure we *don't* ignore the packets// from the *next* segment that have dts === this.latestDts_. By constantly// tracking the number of packets received with dts === this.latestDts_, we// know how many should be ignored once we start receiving duplicates.if (event.dts < this.latestDts_) {// We've started getting older data, so set the flag.this.ignoreNextEqualDts_ = true;return;} else if (event.dts === this.latestDts_ && this.ignoreNextEqualDts_) {this.numSameDts_--;if (!this.numSameDts_) {// We've received the last duplicate packet, time to start processing againthis.ignoreNextEqualDts_ = false;}return;} // parse out CC data packets and save them for laternewCaptionPackets = cea708Parser.parseCaptionPackets(event.pts, userData);this.captionPackets_ = this.captionPackets_.concat(newCaptionPackets);if (this.latestDts_ !== event.dts) {this.numSameDts_ = 0;}this.numSameDts_++;this.latestDts_ = event.dts;};CaptionStream$2.prototype.flushCCStreams = function (flushType) {this.ccStreams_.forEach(function (cc) {return flushType === 'flush' ? cc.flush() : cc.partialFlush();}, this);};CaptionStream$2.prototype.flushStream = function (flushType) {// make sure we actually parsed captions before proceedingif (!this.captionPackets_.length) {this.flushCCStreams(flushType);return;} // In Chrome, the Array#sort function is not stable so add a// presortIndex that we can use to ensure we get a stable-sortthis.captionPackets_.forEach(function (elem, idx) {elem.presortIndex = idx;}); // sort caption byte-pairs based on their PTS valuesthis.captionPackets_.sort(function (a, b) {if (a.pts === b.pts) {return a.presortIndex - b.presortIndex;}return a.pts - b.pts;});this.captionPackets_.forEach(function (packet) {if (packet.type < 2) {// Dispatch packet to the right Cea608Streamthis.dispatchCea608Packet(packet);} else {// Dispatch packet to the Cea708Streamthis.dispatchCea708Packet(packet);}}, this);this.captionPackets_.length = 0;this.flushCCStreams(flushType);};CaptionStream$2.prototype.flush = function () {return this.flushStream('flush');}; // Only called if handling partial dataCaptionStream$2.prototype.partialFlush = function () {return this.flushStream('partialFlush');};CaptionStream$2.prototype.reset = function () {this.latestDts_ = null;this.ignoreNextEqualDts_ = false;this.numSameDts_ = 0;this.activeCea608Channel_ = [null, null];this.ccStreams_.forEach(function (ccStream) {ccStream.reset();});}; // From the CEA-608 spec:/** When XDS sub-packets are interleaved with other services, the end of each sub-packet shall be followed* by a control pair to change to a different service. When any of the control codes from 0x10 to 0x1F is* used to begin a control code pair, it indicates the return to captioning or Text data. The control code pair* and subsequent data should then be processed according to the FCC rules. It may be necessary for the* line 21 data encoder to automatically insert a control code pair (i.e. RCL, RU2, RU3, RU4, RDC, or RTD)* to switch to captioning or Text.*/// With that in mind, we ignore any data between an XDS control code and a// subsequent closed-captioning control code.CaptionStream$2.prototype.dispatchCea608Packet = function (packet) {// NOTE: packet.type is the CEA608 fieldif (this.setsTextOrXDSActive(packet)) {this.activeCea608Channel_[packet.type] = null;} else if (this.setsChannel1Active(packet)) {this.activeCea608Channel_[packet.type] = 0;} else if (this.setsChannel2Active(packet)) {this.activeCea608Channel_[packet.type] = 1;}if (this.activeCea608Channel_[packet.type] === null) {// If we haven't received anything to set the active channel, or the// packets are Text/XDS data, discard the data; we don't want jumbled// captionsreturn;}this.ccStreams_[(packet.type << 1) + this.activeCea608Channel_[packet.type]].push(packet);};CaptionStream$2.prototype.setsChannel1Active = function (packet) {return (packet.ccData & 0x7800) === 0x1000;};CaptionStream$2.prototype.setsChannel2Active = function (packet) {return (packet.ccData & 0x7800) === 0x1800;};CaptionStream$2.prototype.setsTextOrXDSActive = function (packet) {return (packet.ccData & 0x7100) === 0x0100 || (packet.ccData & 0x78fe) === 0x102a || (packet.ccData & 0x78fe) === 0x182a;};CaptionStream$2.prototype.dispatchCea708Packet = function (packet) {if (this.parse708captions_) {this.cc708Stream_.push(packet);}}; // ----------------------// Session to Application// ----------------------// This hash maps special and extended character codes to their// proper Unicode equivalent. The first one-byte key is just a// non-standard character code. The two-byte keys that follow are// the extended CEA708 character codes, along with the preceding// 0x10 extended character byte to distinguish these codes from// non-extended character codes. Every CEA708 character code that// is not in this object maps directly to a standard unicode// character code.// The transparent space and non-breaking transparent space are// technically not fully supported since there is no code to// make them transparent, so they have normal non-transparent// stand-ins.// The special closed caption (CC) character isn't a standard// unicode character, so a fairly similar unicode character was// chosen in it's place.var CHARACTER_TRANSLATION_708 = {0x7f: 0x266a,// ♪0x1020: 0x20,// Transparent Space0x1021: 0xa0,// Nob-breaking Transparent Space0x1025: 0x2026,// …0x102a: 0x0160,// Š0x102c: 0x0152,// Å’0x1030: 0x2588,// █0x1031: 0x2018,// ‘0x1032: 0x2019,// ’0x1033: 0x201c,// “0x1034: 0x201d,// ”0x1035: 0x2022,// •0x1039: 0x2122,// â„¢0x103a: 0x0161,// š0x103c: 0x0153,// Å“0x103d: 0x2120,// ℠0x103f: 0x0178,// Ÿ0x1076: 0x215b,// ⅛0x1077: 0x215c,// ⅜0x1078: 0x215d,// ⅝0x1079: 0x215e,// ⅞0x107a: 0x23d0,// ⏐0x107b: 0x23a4,// ⎤0x107c: 0x23a3,// ⎣0x107d: 0x23af,// ⎯0x107e: 0x23a6,// ⎦0x107f: 0x23a1,// ⎡0x10a0: 0x3138 // ㄸ (CC char)};var get708CharFromCode = function (code) {var newCode = CHARACTER_TRANSLATION_708[code] || code;if (code & 0x1000 && code === newCode) {// Invalid extended codereturn '';}return String.fromCharCode(newCode);};var within708TextBlock = function (b) {return 0x20 <= b && b <= 0x7f || 0xa0 <= b && b <= 0xff;};var Cea708Window = function (windowNum) {this.windowNum = windowNum;this.reset();};Cea708Window.prototype.reset = function () {this.clearText();this.pendingNewLine = false;this.winAttr = {};this.penAttr = {};this.penLoc = {};this.penColor = {}; // These default values are arbitrary,// defineWindow will usually override themthis.visible = 0;this.rowLock = 0;this.columnLock = 0;this.priority = 0;this.relativePositioning = 0;this.anchorVertical = 0;this.anchorHorizontal = 0;this.anchorPoint = 0;this.rowCount = 1;this.virtualRowCount = this.rowCount + 1;this.columnCount = 41;this.windowStyle = 0;this.penStyle = 0;};Cea708Window.prototype.getText = function () {return this.rows.join('\n');};Cea708Window.prototype.clearText = function () {this.rows = [''];this.rowIdx = 0;};Cea708Window.prototype.newLine = function (pts) {if (this.rows.length >= this.virtualRowCount && typeof this.beforeRowOverflow === 'function') {this.beforeRowOverflow(pts);}if (this.rows.length > 0) {this.rows.push('');this.rowIdx++;} // Show all virtual rows since there's no visible scrollingwhile (this.rows.length > this.virtualRowCount) {this.rows.shift();this.rowIdx--;}};Cea708Window.prototype.isEmpty = function () {if (this.rows.length === 0) {return true;} else if (this.rows.length === 1) {return this.rows[0] === '';}return false;};Cea708Window.prototype.addText = function (text) {this.rows[this.rowIdx] += text;};Cea708Window.prototype.backspace = function () {if (!this.isEmpty()) {var row = this.rows[this.rowIdx];this.rows[this.rowIdx] = row.substr(0, row.length - 1);}};var Cea708Service = function (serviceNum, encoding, stream) {this.serviceNum = serviceNum;this.text = '';this.currentWindow = new Cea708Window(-1);this.windows = [];this.stream = stream; // Try to setup a TextDecoder if an `encoding` value was providedif (typeof encoding === 'string') {this.createTextDecoder(encoding);}};/*** Initialize service windows* Must be run before service use** @param {Integer} pts PTS value* @param {Function} beforeRowOverflow Function to execute before row overflow of a window*/Cea708Service.prototype.init = function (pts, beforeRowOverflow) {this.startPts = pts;for (var win = 0; win < 8; win++) {this.windows[win] = new Cea708Window(win);if (typeof beforeRowOverflow === 'function') {this.windows[win].beforeRowOverflow = beforeRowOverflow;}}};/*** Set current window of service to be affected by commands** @param {Integer} windowNum Window number*/Cea708Service.prototype.setCurrentWindow = function (windowNum) {this.currentWindow = this.windows[windowNum];};/*** Try to create a TextDecoder if it is natively supported*/Cea708Service.prototype.createTextDecoder = function (encoding) {if (typeof TextDecoder === 'undefined') {this.stream.trigger('log', {level: 'warn',message: 'The `encoding` option is unsupported without TextDecoder support'});} else {try {this.textDecoder_ = new TextDecoder(encoding);} catch (error) {this.stream.trigger('log', {level: 'warn',message: 'TextDecoder could not be created with ' + encoding + ' encoding. ' + error});}}};var Cea708Stream = function (options) {options = options || {};Cea708Stream.prototype.init.call(this);var self = this;var captionServices = options.captionServices || {};var captionServiceEncodings = {};var serviceProps; // Get service encodings from captionServices option blockObject.keys(captionServices).forEach(serviceName => {serviceProps = captionServices[serviceName];if (/^SERVICE/.test(serviceName)) {captionServiceEncodings[serviceName] = serviceProps.encoding;}});this.serviceEncodings = captionServiceEncodings;this.current708Packet = null;this.services = {};this.push = function (packet) {if (packet.type === 3) {// 708 packet startself.new708Packet();self.add708Bytes(packet);} else {if (self.current708Packet === null) {// This should only happen at the start of a file if there's no packet start.self.new708Packet();}self.add708Bytes(packet);}};};Cea708Stream.prototype = new Stream$7();/*** Push current 708 packet, create new 708 packet.*/Cea708Stream.prototype.new708Packet = function () {if (this.current708Packet !== null) {this.push708Packet();}this.current708Packet = {data: [],ptsVals: []};};/*** Add pts and both bytes from packet into current 708 packet.*/Cea708Stream.prototype.add708Bytes = function (packet) {var data = packet.ccData;var byte0 = data >>> 8;var byte1 = data & 0xff; // I would just keep a list of packets instead of bytes, but it isn't clear in the spec// that service blocks will always line up with byte pairs.this.current708Packet.ptsVals.push(packet.pts);this.current708Packet.data.push(byte0);this.current708Packet.data.push(byte1);};/*** Parse completed 708 packet into service blocks and push each service block.*/Cea708Stream.prototype.push708Packet = function () {var packet708 = this.current708Packet;var packetData = packet708.data;var serviceNum = null;var blockSize = null;var i = 0;var b = packetData[i++];packet708.seq = b >> 6;packet708.sizeCode = b & 0x3f; // 0b00111111;for (; i < packetData.length; i++) {b = packetData[i++];serviceNum = b >> 5;blockSize = b & 0x1f; // 0b00011111if (serviceNum === 7 && blockSize > 0) {// Extended service numb = packetData[i++];serviceNum = b;}this.pushServiceBlock(serviceNum, i, blockSize);if (blockSize > 0) {i += blockSize - 1;}}};/*** Parse service block, execute commands, read text.** Note: While many of these commands serve important purposes,* many others just parse out the parameters or attributes, but* nothing is done with them because this is not a full and complete* implementation of the entire 708 spec.** @param {Integer} serviceNum Service number* @param {Integer} start Start index of the 708 packet data* @param {Integer} size Block size*/Cea708Stream.prototype.pushServiceBlock = function (serviceNum, start, size) {var b;var i = start;var packetData = this.current708Packet.data;var service = this.services[serviceNum];if (!service) {service = this.initService(serviceNum, i);}for (; i < start + size && i < packetData.length; i++) {b = packetData[i];if (within708TextBlock(b)) {i = this.handleText(i, service);} else if (b === 0x18) {i = this.multiByteCharacter(i, service);} else if (b === 0x10) {i = this.extendedCommands(i, service);} else if (0x80 <= b && b <= 0x87) {i = this.setCurrentWindow(i, service);} else if (0x98 <= b && b <= 0x9f) {i = this.defineWindow(i, service);} else if (b === 0x88) {i = this.clearWindows(i, service);} else if (b === 0x8c) {i = this.deleteWindows(i, service);} else if (b === 0x89) {i = this.displayWindows(i, service);} else if (b === 0x8a) {i = this.hideWindows(i, service);} else if (b === 0x8b) {i = this.toggleWindows(i, service);} else if (b === 0x97) {i = this.setWindowAttributes(i, service);} else if (b === 0x90) {i = this.setPenAttributes(i, service);} else if (b === 0x91) {i = this.setPenColor(i, service);} else if (b === 0x92) {i = this.setPenLocation(i, service);} else if (b === 0x8f) {service = this.reset(i, service);} else if (b === 0x08) {// BS: Backspaceservice.currentWindow.backspace();} else if (b === 0x0c) {// FF: Form feedservice.currentWindow.clearText();} else if (b === 0x0d) {// CR: Carriage returnservice.currentWindow.pendingNewLine = true;} else if (b === 0x0e) {// HCR: Horizontal carriage returnservice.currentWindow.clearText();} else if (b === 0x8d) {// DLY: Delay, nothing to doi++;} else ;}};/*** Execute an extended command** @param {Integer} i Current index in the 708 packet* @param {Service} service The service object to be affected* @return {Integer} New index after parsing*/Cea708Stream.prototype.extendedCommands = function (i, service) {var packetData = this.current708Packet.data;var b = packetData[++i];if (within708TextBlock(b)) {i = this.handleText(i, service, {isExtended: true});}return i;};/*** Get PTS value of a given byte index** @param {Integer} byteIndex Index of the byte* @return {Integer} PTS*/Cea708Stream.prototype.getPts = function (byteIndex) {// There's 1 pts value per 2 bytesreturn this.current708Packet.ptsVals[Math.floor(byteIndex / 2)];};/*** Initializes a service** @param {Integer} serviceNum Service number* @return {Service} Initialized service object*/Cea708Stream.prototype.initService = function (serviceNum, i) {var serviceName = 'SERVICE' + serviceNum;var self = this;var serviceName;var encoding;if (serviceName in this.serviceEncodings) {encoding = this.serviceEncodings[serviceName];}this.services[serviceNum] = new Cea708Service(serviceNum, encoding, self);this.services[serviceNum].init(this.getPts(i), function (pts) {self.flushDisplayed(pts, self.services[serviceNum]);});return this.services[serviceNum];};/*** Execute text writing to current window** @param {Integer} i Current index in the 708 packet* @param {Service} service The service object to be affected* @return {Integer} New index after parsing*/Cea708Stream.prototype.handleText = function (i, service, options) {var isExtended = options && options.isExtended;var isMultiByte = options && options.isMultiByte;var packetData = this.current708Packet.data;var extended = isExtended ? 0x1000 : 0x0000;var currentByte = packetData[i];var nextByte = packetData[i + 1];var win = service.currentWindow;var char;var charCodeArray; // Converts an array of bytes to a unicode hex string.function toHexString(byteArray) {return byteArray.map(byte => {return ('0' + (byte & 0xFF).toString(16)).slice(-2);}).join('');}if (isMultiByte) {charCodeArray = [currentByte, nextByte];i++;} else {charCodeArray = [currentByte];} // Use the TextDecoder if one was created for this serviceif (service.textDecoder_ && !isExtended) {char = service.textDecoder_.decode(new Uint8Array(charCodeArray));} else {// We assume any multi-byte char without a decoder is unicode.if (isMultiByte) {const unicode = toHexString(charCodeArray); // Takes a unicode hex string and creates a single character.char = String.fromCharCode(parseInt(unicode, 16));} else {char = get708CharFromCode(extended | currentByte);}}if (win.pendingNewLine && !win.isEmpty()) {win.newLine(this.getPts(i));}win.pendingNewLine = false;win.addText(char);return i;};/*** Handle decoding of multibyte character** @param {Integer} i Current index in the 708 packet* @param {Service} service The service object to be affected* @return {Integer} New index after parsing*/Cea708Stream.prototype.multiByteCharacter = function (i, service) {var packetData = this.current708Packet.data;var firstByte = packetData[i + 1];var secondByte = packetData[i + 2];if (within708TextBlock(firstByte) && within708TextBlock(secondByte)) {i = this.handleText(++i, service, {isMultiByte: true});}return i;};/*** Parse and execute the CW# command.** Set the current window.** @param {Integer} i Current index in the 708 packet* @param {Service} service The service object to be affected* @return {Integer} New index after parsing*/Cea708Stream.prototype.setCurrentWindow = function (i, service) {var packetData = this.current708Packet.data;var b = packetData[i];var windowNum = b & 0x07;service.setCurrentWindow(windowNum);return i;};/*** Parse and execute the DF# command.** Define a window and set it as the current window.** @param {Integer} i Current index in the 708 packet* @param {Service} service The service object to be affected* @return {Integer} New index after parsing*/Cea708Stream.prototype.defineWindow = function (i, service) {var packetData = this.current708Packet.data;var b = packetData[i];var windowNum = b & 0x07;service.setCurrentWindow(windowNum);var win = service.currentWindow;b = packetData[++i];win.visible = (b & 0x20) >> 5; // vwin.rowLock = (b & 0x10) >> 4; // rlwin.columnLock = (b & 0x08) >> 3; // clwin.priority = b & 0x07; // pb = packetData[++i];win.relativePositioning = (b & 0x80) >> 7; // rpwin.anchorVertical = b & 0x7f; // avb = packetData[++i];win.anchorHorizontal = b; // ahb = packetData[++i];win.anchorPoint = (b & 0xf0) >> 4; // apwin.rowCount = b & 0x0f; // rcb = packetData[++i];win.columnCount = b & 0x3f; // ccb = packetData[++i];win.windowStyle = (b & 0x38) >> 3; // wswin.penStyle = b & 0x07; // ps// The spec says there are (rowCount+1) "virtual rows"win.virtualRowCount = win.rowCount + 1;return i;};/*** Parse and execute the SWA command.** Set attributes of the current window.** @param {Integer} i Current index in the 708 packet* @param {Service} service The service object to be affected* @return {Integer} New index after parsing*/Cea708Stream.prototype.setWindowAttributes = function (i, service) {var packetData = this.current708Packet.data;var b = packetData[i];var winAttr = service.currentWindow.winAttr;b = packetData[++i];winAttr.fillOpacity = (b & 0xc0) >> 6; // fowinAttr.fillRed = (b & 0x30) >> 4; // frwinAttr.fillGreen = (b & 0x0c) >> 2; // fgwinAttr.fillBlue = b & 0x03; // fbb = packetData[++i];winAttr.borderType = (b & 0xc0) >> 6; // btwinAttr.borderRed = (b & 0x30) >> 4; // brwinAttr.borderGreen = (b & 0x0c) >> 2; // bgwinAttr.borderBlue = b & 0x03; // bbb = packetData[++i];winAttr.borderType += (b & 0x80) >> 5; // btwinAttr.wordWrap = (b & 0x40) >> 6; // wwwinAttr.printDirection = (b & 0x30) >> 4; // pdwinAttr.scrollDirection = (b & 0x0c) >> 2; // sdwinAttr.justify = b & 0x03; // jb = packetData[++i];winAttr.effectSpeed = (b & 0xf0) >> 4; // eswinAttr.effectDirection = (b & 0x0c) >> 2; // edwinAttr.displayEffect = b & 0x03; // dereturn i;};/*** Gather text from all displayed windows and push a caption to output.** @param {Integer} i Current index in the 708 packet* @param {Service} service The service object to be affected*/Cea708Stream.prototype.flushDisplayed = function (pts, service) {var displayedText = []; // TODO: Positioning not supported, displaying multiple windows will not necessarily// display text in the correct order, but sample files so far have not shown any issue.for (var winId = 0; winId < 8; winId++) {if (service.windows[winId].visible && !service.windows[winId].isEmpty()) {displayedText.push(service.windows[winId].getText());}}service.endPts = pts;service.text = displayedText.join('\n\n');this.pushCaption(service);service.startPts = pts;};/*** Push a caption to output if the caption contains text.** @param {Service} service The service object to be affected*/Cea708Stream.prototype.pushCaption = function (service) {if (service.text !== '') {this.trigger('data', {startPts: service.startPts,endPts: service.endPts,text: service.text,stream: 'cc708_' + service.serviceNum});service.text = '';service.startPts = service.endPts;}};/*** Parse and execute the DSW command.** Set visible property of windows based on the parsed bitmask.** @param {Integer} i Current index in the 708 packet* @param {Service} service The service object to be affected* @return {Integer} New index after parsing*/Cea708Stream.prototype.displayWindows = function (i, service) {var packetData = this.current708Packet.data;var b = packetData[++i];var pts = this.getPts(i);this.flushDisplayed(pts, service);for (var winId = 0; winId < 8; winId++) {if (b & 0x01 << winId) {service.windows[winId].visible = 1;}}return i;};/*** Parse and execute the HDW command.** Set visible property of windows based on the parsed bitmask.** @param {Integer} i Current index in the 708 packet* @param {Service} service The service object to be affected* @return {Integer} New index after parsing*/Cea708Stream.prototype.hideWindows = function (i, service) {var packetData = this.current708Packet.data;var b = packetData[++i];var pts = this.getPts(i);this.flushDisplayed(pts, service);for (var winId = 0; winId < 8; winId++) {if (b & 0x01 << winId) {service.windows[winId].visible = 0;}}return i;};/*** Parse and execute the TGW command.** Set visible property of windows based on the parsed bitmask.** @param {Integer} i Current index in the 708 packet* @param {Service} service The service object to be affected* @return {Integer} New index after parsing*/Cea708Stream.prototype.toggleWindows = function (i, service) {var packetData = this.current708Packet.data;var b = packetData[++i];var pts = this.getPts(i);this.flushDisplayed(pts, service);for (var winId = 0; winId < 8; winId++) {if (b & 0x01 << winId) {service.windows[winId].visible ^= 1;}}return i;};/*** Parse and execute the CLW command.** Clear text of windows based on the parsed bitmask.** @param {Integer} i Current index in the 708 packet* @param {Service} service The service object to be affected* @return {Integer} New index after parsing*/Cea708Stream.prototype.clearWindows = function (i, service) {var packetData = this.current708Packet.data;var b = packetData[++i];var pts = this.getPts(i);this.flushDisplayed(pts, service);for (var winId = 0; winId < 8; winId++) {if (b & 0x01 << winId) {service.windows[winId].clearText();}}return i;};/*** Parse and execute the DLW command.** Re-initialize windows based on the parsed bitmask.** @param {Integer} i Current index in the 708 packet* @param {Service} service The service object to be affected* @return {Integer} New index after parsing*/Cea708Stream.prototype.deleteWindows = function (i, service) {var packetData = this.current708Packet.data;var b = packetData[++i];var pts = this.getPts(i);this.flushDisplayed(pts, service);for (var winId = 0; winId < 8; winId++) {if (b & 0x01 << winId) {service.windows[winId].reset();}}return i;};/*** Parse and execute the SPA command.** Set pen attributes of the current window.** @param {Integer} i Current index in the 708 packet* @param {Service} service The service object to be affected* @return {Integer} New index after parsing*/Cea708Stream.prototype.setPenAttributes = function (i, service) {var packetData = this.current708Packet.data;var b = packetData[i];var penAttr = service.currentWindow.penAttr;b = packetData[++i];penAttr.textTag = (b & 0xf0) >> 4; // ttpenAttr.offset = (b & 0x0c) >> 2; // openAttr.penSize = b & 0x03; // sb = packetData[++i];penAttr.italics = (b & 0x80) >> 7; // ipenAttr.underline = (b & 0x40) >> 6; // upenAttr.edgeType = (b & 0x38) >> 3; // etpenAttr.fontStyle = b & 0x07; // fsreturn i;};/*** Parse and execute the SPC command.** Set pen color of the current window.** @param {Integer} i Current index in the 708 packet* @param {Service} service The service object to be affected* @return {Integer} New index after parsing*/Cea708Stream.prototype.setPenColor = function (i, service) {var packetData = this.current708Packet.data;var b = packetData[i];var penColor = service.currentWindow.penColor;b = packetData[++i];penColor.fgOpacity = (b & 0xc0) >> 6; // fopenColor.fgRed = (b & 0x30) >> 4; // frpenColor.fgGreen = (b & 0x0c) >> 2; // fgpenColor.fgBlue = b & 0x03; // fbb = packetData[++i];penColor.bgOpacity = (b & 0xc0) >> 6; // bopenColor.bgRed = (b & 0x30) >> 4; // brpenColor.bgGreen = (b & 0x0c) >> 2; // bgpenColor.bgBlue = b & 0x03; // bbb = packetData[++i];penColor.edgeRed = (b & 0x30) >> 4; // erpenColor.edgeGreen = (b & 0x0c) >> 2; // egpenColor.edgeBlue = b & 0x03; // ebreturn i;};/*** Parse and execute the SPL command.** Set pen location of the current window.** @param {Integer} i Current index in the 708 packet* @param {Service} service The service object to be affected* @return {Integer} New index after parsing*/Cea708Stream.prototype.setPenLocation = function (i, service) {var packetData = this.current708Packet.data;var b = packetData[i];var penLoc = service.currentWindow.penLoc; // Positioning isn't really supported at the moment, so this essentially just inserts a linebreakservice.currentWindow.pendingNewLine = true;b = packetData[++i];penLoc.row = b & 0x0f; // rb = packetData[++i];penLoc.column = b & 0x3f; // creturn i;};/*** Execute the RST command.** Reset service to a clean slate. Re-initialize.** @param {Integer} i Current index in the 708 packet* @param {Service} service The service object to be affected* @return {Service} Re-initialized service*/Cea708Stream.prototype.reset = function (i, service) {var pts = this.getPts(i);this.flushDisplayed(pts, service);return this.initService(service.serviceNum, i);}; // This hash maps non-ASCII, special, and extended character codes to their// proper Unicode equivalent. The first keys that are only a single byte// are the non-standard ASCII characters, which simply map the CEA608 byte// to the standard ASCII/Unicode. The two-byte keys that follow are the CEA608// character codes, but have their MSB bitmasked with 0x03 so that a lookup// can be performed regardless of the field and data channel on which the// character code was received.var CHARACTER_TRANSLATION = {0x2a: 0xe1,// á0x5c: 0xe9,// é0x5e: 0xed,// í0x5f: 0xf3,// ó0x60: 0xfa,// ú0x7b: 0xe7,// ç0x7c: 0xf7,// ÷0x7d: 0xd1,// Ñ0x7e: 0xf1,// ñ0x7f: 0x2588,// █0x0130: 0xae,// ®0x0131: 0xb0,// °0x0132: 0xbd,// ½0x0133: 0xbf,// ¿0x0134: 0x2122,// â„¢0x0135: 0xa2,// ¢0x0136: 0xa3,// £0x0137: 0x266a,// ♪0x0138: 0xe0,// à0x0139: 0xa0,//0x013a: 0xe8,// è0x013b: 0xe2,// â0x013c: 0xea,// ê0x013d: 0xee,// î0x013e: 0xf4,// ô0x013f: 0xfb,// û0x0220: 0xc1,// Á0x0221: 0xc9,// É0x0222: 0xd3,// Ó0x0223: 0xda,// Ú0x0224: 0xdc,// Ü0x0225: 0xfc,// ü0x0226: 0x2018,// ‘0x0227: 0xa1,// ¡0x0228: 0x2a,// *0x0229: 0x27,// '0x022a: 0x2014,// —0x022b: 0xa9,// ©0x022c: 0x2120,// ℠0x022d: 0x2022,// •0x022e: 0x201c,// “0x022f: 0x201d,// ”0x0230: 0xc0,// À0x0231: 0xc2,// Â0x0232: 0xc7,// Ç0x0233: 0xc8,// È0x0234: 0xca,// Ê0x0235: 0xcb,// Ë0x0236: 0xeb,// ë0x0237: 0xce,// Î0x0238: 0xcf,// Ï0x0239: 0xef,// ï0x023a: 0xd4,// Ô0x023b: 0xd9,// Ù0x023c: 0xf9,// ù0x023d: 0xdb,// Û0x023e: 0xab,// «0x023f: 0xbb,// »0x0320: 0xc3,// Ã0x0321: 0xe3,// ã0x0322: 0xcd,// Í0x0323: 0xcc,// Ì0x0324: 0xec,// ì0x0325: 0xd2,// Ò0x0326: 0xf2,// ò0x0327: 0xd5,// Õ0x0328: 0xf5,// õ0x0329: 0x7b,// {0x032a: 0x7d,// }0x032b: 0x5c,// \0x032c: 0x5e,// ^0x032d: 0x5f,// _0x032e: 0x7c,// |0x032f: 0x7e,// ~0x0330: 0xc4,// Ä0x0331: 0xe4,// ä0x0332: 0xd6,// Ö0x0333: 0xf6,// ö0x0334: 0xdf,// ß0x0335: 0xa5,// ¥0x0336: 0xa4,// ¤0x0337: 0x2502,// │0x0338: 0xc5,// Å0x0339: 0xe5,// å0x033a: 0xd8,// Ø0x033b: 0xf8,// ø0x033c: 0x250c,// ┌0x033d: 0x2510,// ┐0x033e: 0x2514,// â””0x033f: 0x2518 // ┘};var getCharFromCode = function (code) {if (code === null) {return '';}code = CHARACTER_TRANSLATION[code] || code;return String.fromCharCode(code);}; // the index of the last row in a CEA-608 display buffervar BOTTOM_ROW = 14; // This array is used for mapping PACs -> row #, since there's no way of// getting it through bit logic.var ROWS = [0x1100, 0x1120, 0x1200, 0x1220, 0x1500, 0x1520, 0x1600, 0x1620, 0x1700, 0x1720, 0x1000, 0x1300, 0x1320, 0x1400, 0x1420]; // CEA-608 captions are rendered onto a 34x15 matrix of character// cells. The "bottom" row is the last element in the outer array.// We keep track of positioning information as we go by storing the// number of indentations and the tab offset in this buffer.var createDisplayBuffer = function () {var result = [],i = BOTTOM_ROW + 1;while (i--) {result.push({text: '',indent: 0,offset: 0});}return result;};var Cea608Stream = function (field, dataChannel) {Cea608Stream.prototype.init.call(this);this.field_ = field || 0;this.dataChannel_ = dataChannel || 0;this.name_ = 'CC' + ((this.field_ << 1 | this.dataChannel_) + 1);this.setConstants();this.reset();this.push = function (packet) {var data, swap, char0, char1, text; // remove the parity bitsdata = packet.ccData & 0x7f7f; // ignore duplicate control codes; the spec demands they're sent twiceif (data === this.lastControlCode_) {this.lastControlCode_ = null;return;} // Store control codesif ((data & 0xf000) === 0x1000) {this.lastControlCode_ = data;} else if (data !== this.PADDING_) {this.lastControlCode_ = null;}char0 = data >>> 8;char1 = data & 0xff;if (data === this.PADDING_) {return;} else if (data === this.RESUME_CAPTION_LOADING_) {this.mode_ = 'popOn';} else if (data === this.END_OF_CAPTION_) {// If an EOC is received while in paint-on mode, the displayed caption// text should be swapped to non-displayed memory as if it was a pop-on// caption. Because of that, we should explicitly switch back to pop-on// modethis.mode_ = 'popOn';this.clearFormatting(packet.pts); // if a caption was being displayed, it's gone nowthis.flushDisplayed(packet.pts); // flip memoryswap = this.displayed_;this.displayed_ = this.nonDisplayed_;this.nonDisplayed_ = swap; // start measuring the time to display the captionthis.startPts_ = packet.pts;} else if (data === this.ROLL_UP_2_ROWS_) {this.rollUpRows_ = 2;this.setRollUp(packet.pts);} else if (data === this.ROLL_UP_3_ROWS_) {this.rollUpRows_ = 3;this.setRollUp(packet.pts);} else if (data === this.ROLL_UP_4_ROWS_) {this.rollUpRows_ = 4;this.setRollUp(packet.pts);} else if (data === this.CARRIAGE_RETURN_) {this.clearFormatting(packet.pts);this.flushDisplayed(packet.pts);this.shiftRowsUp_();this.startPts_ = packet.pts;} else if (data === this.BACKSPACE_) {if (this.mode_ === 'popOn') {this.nonDisplayed_[this.row_].text = this.nonDisplayed_[this.row_].text.slice(0, -1);} else {this.displayed_[this.row_].text = this.displayed_[this.row_].text.slice(0, -1);}} else if (data === this.ERASE_DISPLAYED_MEMORY_) {this.flushDisplayed(packet.pts);this.displayed_ = createDisplayBuffer();} else if (data === this.ERASE_NON_DISPLAYED_MEMORY_) {this.nonDisplayed_ = createDisplayBuffer();} else if (data === this.RESUME_DIRECT_CAPTIONING_) {if (this.mode_ !== 'paintOn') {// NOTE: This should be removed when proper caption positioning is// implementedthis.flushDisplayed(packet.pts);this.displayed_ = createDisplayBuffer();}this.mode_ = 'paintOn';this.startPts_ = packet.pts; // Append special characters to caption text} else if (this.isSpecialCharacter(char0, char1)) {// Bitmask char0 so that we can apply character transformations// regardless of field and data channel.// Then byte-shift to the left and OR with char1 so we can pass the// entire character code to `getCharFromCode`.char0 = (char0 & 0x03) << 8;text = getCharFromCode(char0 | char1);this[this.mode_](packet.pts, text);this.column_++; // Append extended characters to caption text} else if (this.isExtCharacter(char0, char1)) {// Extended characters always follow their "non-extended" equivalents.// IE if a "è" is desired, you'll always receive "eè"; non-compliant// decoders are supposed to drop the "è", while compliant decoders// backspace the "e" and insert "è".// Delete the previous characterif (this.mode_ === 'popOn') {this.nonDisplayed_[this.row_].text = this.nonDisplayed_[this.row_].text.slice(0, -1);} else {this.displayed_[this.row_].text = this.displayed_[this.row_].text.slice(0, -1);} // Bitmask char0 so that we can apply character transformations// regardless of field and data channel.// Then byte-shift to the left and OR with char1 so we can pass the// entire character code to `getCharFromCode`.char0 = (char0 & 0x03) << 8;text = getCharFromCode(char0 | char1);this[this.mode_](packet.pts, text);this.column_++; // Process mid-row codes} else if (this.isMidRowCode(char0, char1)) {// Attributes are not additive, so clear all formattingthis.clearFormatting(packet.pts); // According to the standard, mid-row codes// should be replaced with spaces, so add one nowthis[this.mode_](packet.pts, ' ');this.column_++;if ((char1 & 0xe) === 0xe) {this.addFormatting(packet.pts, ['i']);}if ((char1 & 0x1) === 0x1) {this.addFormatting(packet.pts, ['u']);} // Detect offset control codes and adjust cursor} else if (this.isOffsetControlCode(char0, char1)) {// Cursor position is set by indent PAC (see below) in 4-column// increments, with an additional offset code of 1-3 to reach any// of the 32 columns specified by CEA-608. So all we need to do// here is increment the column cursor by the given offset.const offset = char1 & 0x03; // For an offest value 1-3, set the offset for that caption// in the non-displayed array.this.nonDisplayed_[this.row_].offset = offset;this.column_ += offset; // Detect PACs (Preamble Address Codes)} else if (this.isPAC(char0, char1)) {// There's no logic for PAC -> row mapping, so we have to just// find the row code in an array and use its index :(var row = ROWS.indexOf(data & 0x1f20); // Configure the caption window if we're in roll-up modeif (this.mode_ === 'rollUp') {// This implies that the base row is incorrectly set.// As per the recommendation in CEA-608(Base Row Implementation), defer to the number// of roll-up rows set.if (row - this.rollUpRows_ + 1 < 0) {row = this.rollUpRows_ - 1;}this.setRollUp(packet.pts, row);}if (row !== this.row_) {// formatting is only persistent for current rowthis.clearFormatting(packet.pts);this.row_ = row;} // All PACs can apply underline, so detect and apply// (All odd-numbered second bytes set underline)if (char1 & 0x1 && this.formatting_.indexOf('u') === -1) {this.addFormatting(packet.pts, ['u']);}if ((data & 0x10) === 0x10) {// We've got an indent level code. Each successive even number// increments the column cursor by 4, so we can get the desired// column position by bit-shifting to the right (to get n/2)// and multiplying by 4.const indentations = (data & 0xe) >> 1;this.column_ = indentations * 4; // add to the number of indentations for positioningthis.nonDisplayed_[this.row_].indent += indentations;}if (this.isColorPAC(char1)) {// it's a color code, though we only support white, which// can be either normal or italicized. white italics can be// either 0x4e or 0x6e depending on the row, so we just// bitwise-and with 0xe to see if italics should be turned onif ((char1 & 0xe) === 0xe) {this.addFormatting(packet.pts, ['i']);}} // We have a normal character in char0, and possibly one in char1} else if (this.isNormalChar(char0)) {if (char1 === 0x00) {char1 = null;}text = getCharFromCode(char0);text += getCharFromCode(char1);this[this.mode_](packet.pts, text);this.column_ += text.length;} // finish data processing};};Cea608Stream.prototype = new Stream$7(); // Trigger a cue point that captures the current state of the// display bufferCea608Stream.prototype.flushDisplayed = function (pts) {const logWarning = index => {this.trigger('log', {level: 'warn',message: 'Skipping a malformed 608 caption at index ' + index + '.'});};const content = [];this.displayed_.forEach((row, i) => {if (row && row.text && row.text.length) {try {// remove spaces from the start and end of the stringrow.text = row.text.trim();} catch (e) {// Ordinarily, this shouldn't happen. However, caption// parsing errors should not throw exceptions and// break playback.logWarning(i);} // See the below link for more details on the following fields:// https://dvcs.w3.org/hg/text-tracks/raw-file/default/608toVTT/608toVTT.html#positioning-in-cea-608if (row.text.length) {content.push({// The text to be displayed in the caption from this specific row, with whitespace removed.text: row.text,// Value between 1 and 15 representing the PAC row used to calculate line height.line: i + 1,// A number representing the indent position by percentage (CEA-608 PAC indent code).// The value will be a number between 10 and 80. Offset is used to add an aditional// value to the position if necessary.position: 10 + Math.min(70, row.indent * 10) + row.offset * 2.5});}} else if (row === undefined || row === null) {logWarning(i);}});if (content.length) {this.trigger('data', {startPts: this.startPts_,endPts: pts,content,stream: this.name_});}};/*** Zero out the data, used for startup and on seek*/Cea608Stream.prototype.reset = function () {this.mode_ = 'popOn'; // When in roll-up mode, the index of the last row that will// actually display captions. If a caption is shifted to a row// with a lower index than this, it is cleared from the display// bufferthis.topRow_ = 0;this.startPts_ = 0;this.displayed_ = createDisplayBuffer();this.nonDisplayed_ = createDisplayBuffer();this.lastControlCode_ = null; // Track row and column for proper line-breaking and spacingthis.column_ = 0;this.row_ = BOTTOM_ROW;this.rollUpRows_ = 2; // This variable holds currently-applied formattingthis.formatting_ = [];};/*** Sets up control code and related constants for this instance*/Cea608Stream.prototype.setConstants = function () {// The following attributes have these uses:// ext_ : char0 for mid-row codes, and the base for extended// chars (ext_+0, ext_+1, and ext_+2 are char0s for// extended codes)// control_: char0 for control codes, except byte-shifted to the// left so that we can do this.control_ | CONTROL_CODE// offset_: char0 for tab offset codes//// It's also worth noting that control codes, and _only_ control codes,// differ between field 1 and field2. Field 2 control codes are always// their field 1 value plus 1. That's why there's the "| field" on the// control value.if (this.dataChannel_ === 0) {this.BASE_ = 0x10;this.EXT_ = 0x11;this.CONTROL_ = (0x14 | this.field_) << 8;this.OFFSET_ = 0x17;} else if (this.dataChannel_ === 1) {this.BASE_ = 0x18;this.EXT_ = 0x19;this.CONTROL_ = (0x1c | this.field_) << 8;this.OFFSET_ = 0x1f;} // Constants for the LSByte command codes recognized by Cea608Stream. This// list is not exhaustive. For a more comprehensive listing and semantics see// http://www.gpo.gov/fdsys/pkg/CFR-2010-title47-vol1/pdf/CFR-2010-title47-vol1-sec15-119.pdf// Paddingthis.PADDING_ = 0x0000; // Pop-on Modethis.RESUME_CAPTION_LOADING_ = this.CONTROL_ | 0x20;this.END_OF_CAPTION_ = this.CONTROL_ | 0x2f; // Roll-up Modethis.ROLL_UP_2_ROWS_ = this.CONTROL_ | 0x25;this.ROLL_UP_3_ROWS_ = this.CONTROL_ | 0x26;this.ROLL_UP_4_ROWS_ = this.CONTROL_ | 0x27;this.CARRIAGE_RETURN_ = this.CONTROL_ | 0x2d; // paint-on modethis.RESUME_DIRECT_CAPTIONING_ = this.CONTROL_ | 0x29; // Erasurethis.BACKSPACE_ = this.CONTROL_ | 0x21;this.ERASE_DISPLAYED_MEMORY_ = this.CONTROL_ | 0x2c;this.ERASE_NON_DISPLAYED_MEMORY_ = this.CONTROL_ | 0x2e;};/*** Detects if the 2-byte packet data is a special character** Special characters have a second byte in the range 0x30 to 0x3f,* with the first byte being 0x11 (for data channel 1) or 0x19 (for* data channel 2).** @param {Integer} char0 The first byte* @param {Integer} char1 The second byte* @return {Boolean} Whether the 2 bytes are an special character*/Cea608Stream.prototype.isSpecialCharacter = function (char0, char1) {return char0 === this.EXT_ && char1 >= 0x30 && char1 <= 0x3f;};/*** Detects if the 2-byte packet data is an extended character** Extended characters have a second byte in the range 0x20 to 0x3f,* with the first byte being 0x12 or 0x13 (for data channel 1) or* 0x1a or 0x1b (for data channel 2).** @param {Integer} char0 The first byte* @param {Integer} char1 The second byte* @return {Boolean} Whether the 2 bytes are an extended character*/Cea608Stream.prototype.isExtCharacter = function (char0, char1) {return (char0 === this.EXT_ + 1 || char0 === this.EXT_ + 2) && char1 >= 0x20 && char1 <= 0x3f;};/*** Detects if the 2-byte packet is a mid-row code** Mid-row codes have a second byte in the range 0x20 to 0x2f, with* the first byte being 0x11 (for data channel 1) or 0x19 (for data* channel 2).** @param {Integer} char0 The first byte* @param {Integer} char1 The second byte* @return {Boolean} Whether the 2 bytes are a mid-row code*/Cea608Stream.prototype.isMidRowCode = function (char0, char1) {return char0 === this.EXT_ && char1 >= 0x20 && char1 <= 0x2f;};/*** Detects if the 2-byte packet is an offset control code** Offset control codes have a second byte in the range 0x21 to 0x23,* with the first byte being 0x17 (for data channel 1) or 0x1f (for* data channel 2).** @param {Integer} char0 The first byte* @param {Integer} char1 The second byte* @return {Boolean} Whether the 2 bytes are an offset control code*/Cea608Stream.prototype.isOffsetControlCode = function (char0, char1) {return char0 === this.OFFSET_ && char1 >= 0x21 && char1 <= 0x23;};/*** Detects if the 2-byte packet is a Preamble Address Code** PACs have a first byte in the range 0x10 to 0x17 (for data channel 1)* or 0x18 to 0x1f (for data channel 2), with the second byte in the* range 0x40 to 0x7f.** @param {Integer} char0 The first byte* @param {Integer} char1 The second byte* @return {Boolean} Whether the 2 bytes are a PAC*/Cea608Stream.prototype.isPAC = function (char0, char1) {return char0 >= this.BASE_ && char0 < this.BASE_ + 8 && char1 >= 0x40 && char1 <= 0x7f;};/*** Detects if a packet's second byte is in the range of a PAC color code** PAC color codes have the second byte be in the range 0x40 to 0x4f, or* 0x60 to 0x6f.** @param {Integer} char1 The second byte* @return {Boolean} Whether the byte is a color PAC*/Cea608Stream.prototype.isColorPAC = function (char1) {return char1 >= 0x40 && char1 <= 0x4f || char1 >= 0x60 && char1 <= 0x7f;};/*** Detects if a single byte is in the range of a normal character** Normal text bytes are in the range 0x20 to 0x7f.** @param {Integer} char The byte* @return {Boolean} Whether the byte is a normal character*/Cea608Stream.prototype.isNormalChar = function (char) {return char >= 0x20 && char <= 0x7f;};/*** Configures roll-up** @param {Integer} pts Current PTS* @param {Integer} newBaseRow Used by PACs to slide the current window to* a new position*/Cea608Stream.prototype.setRollUp = function (pts, newBaseRow) {// Reset the base row to the bottom row when switching modesif (this.mode_ !== 'rollUp') {this.row_ = BOTTOM_ROW;this.mode_ = 'rollUp'; // Spec says to wipe memories when switching to roll-upthis.flushDisplayed(pts);this.nonDisplayed_ = createDisplayBuffer();this.displayed_ = createDisplayBuffer();}if (newBaseRow !== undefined && newBaseRow !== this.row_) {// move currently displayed captions (up or down) to the new base rowfor (var i = 0; i < this.rollUpRows_; i++) {this.displayed_[newBaseRow - i] = this.displayed_[this.row_ - i];this.displayed_[this.row_ - i] = {text: '',indent: 0,offset: 0};}}if (newBaseRow === undefined) {newBaseRow = this.row_;}this.topRow_ = newBaseRow - this.rollUpRows_ + 1;}; // Adds the opening HTML tag for the passed character to the caption text,// and keeps track of it for later closingCea608Stream.prototype.addFormatting = function (pts, format) {this.formatting_ = this.formatting_.concat(format);var text = format.reduce(function (text, format) {return text + '<' + format + '>';}, '');this[this.mode_](pts, text);}; // Adds HTML closing tags for current formatting to caption text and// clears remembered formattingCea608Stream.prototype.clearFormatting = function (pts) {if (!this.formatting_.length) {return;}var text = this.formatting_.reverse().reduce(function (text, format) {return text + '</' + format + '>';}, '');this.formatting_ = [];this[this.mode_](pts, text);}; // Mode ImplementationsCea608Stream.prototype.popOn = function (pts, text) {var baseRow = this.nonDisplayed_[this.row_].text; // buffer charactersbaseRow += text;this.nonDisplayed_[this.row_].text = baseRow;};Cea608Stream.prototype.rollUp = function (pts, text) {var baseRow = this.displayed_[this.row_].text;baseRow += text;this.displayed_[this.row_].text = baseRow;};Cea608Stream.prototype.shiftRowsUp_ = function () {var i; // clear out inactive rowsfor (i = 0; i < this.topRow_; i++) {this.displayed_[i] = {text: '',indent: 0,offset: 0};}for (i = this.row_ + 1; i < BOTTOM_ROW + 1; i++) {this.displayed_[i] = {text: '',indent: 0,offset: 0};} // shift displayed rows upfor (i = this.topRow_; i < this.row_; i++) {this.displayed_[i] = this.displayed_[i + 1];} // clear out the bottom rowthis.displayed_[this.row_] = {text: '',indent: 0,offset: 0};};Cea608Stream.prototype.paintOn = function (pts, text) {var baseRow = this.displayed_[this.row_].text;baseRow += text;this.displayed_[this.row_].text = baseRow;}; // exportsvar captionStream = {CaptionStream: CaptionStream$2,Cea608Stream: Cea608Stream,Cea708Stream: Cea708Stream};/*** mux.js** Copyright (c) Brightcove* Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE*/var streamTypes = {H264_STREAM_TYPE: 0x1B,ADTS_STREAM_TYPE: 0x0F,METADATA_STREAM_TYPE: 0x15};/*** mux.js** Copyright (c) Brightcove* Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE** Accepts program elementary stream (PES) data events and corrects* decode and presentation time stamps to account for a rollover* of the 33 bit value.*/var Stream$6 = stream;var MAX_TS = 8589934592;var RO_THRESH = 4294967296;var TYPE_SHARED = 'shared';var handleRollover$1 = function (value, reference) {var direction = 1;if (value > reference) {// If the current timestamp value is greater than our reference timestamp and we detect a// timestamp rollover, this means the roll over is happening in the opposite direction.// Example scenario: Enter a long stream/video just after a rollover occurred. The reference// point will be set to a small number, e.g. 1. The user then seeks backwards over the// rollover point. In loading this segment, the timestamp values will be very large,// e.g. 2^33 - 1. Since this comes before the data we loaded previously, we want to adjust// the time stamp to be `value - 2^33`.direction = -1;} // Note: A seek forwards or back that is greater than the RO_THRESH (2^32, ~13 hours) will// cause an incorrect adjustment.while (Math.abs(reference - value) > RO_THRESH) {value += direction * MAX_TS;}return value;};var TimestampRolloverStream$1 = function (type) {var lastDTS, referenceDTS;TimestampRolloverStream$1.prototype.init.call(this); // The "shared" type is used in cases where a stream will contain muxed// video and audio. We could use `undefined` here, but having a string// makes debugging a little clearer.this.type_ = type || TYPE_SHARED;this.push = function (data) {/*** Rollover stream expects data from elementary stream.* Elementary stream can push forward 2 types of data* - Parsed Video/Audio/Timed-metadata PES (packetized elementary stream) packets* - Tracks metadata from PMT (Program Map Table)* Rollover stream expects pts/dts info to be available, since it stores lastDTS* We should ignore non-PES packets since they may override lastDTS to undefined.* lastDTS is important to signal the next segments* about rollover from the previous segments.*/if (data.type === 'metadata') {this.trigger('data', data);return;} // Any "shared" rollover streams will accept _all_ data. Otherwise,// streams will only accept data that matches their type.if (this.type_ !== TYPE_SHARED && data.type !== this.type_) {return;}if (referenceDTS === undefined) {referenceDTS = data.dts;}data.dts = handleRollover$1(data.dts, referenceDTS);data.pts = handleRollover$1(data.pts, referenceDTS);lastDTS = data.dts;this.trigger('data', data);};this.flush = function () {referenceDTS = lastDTS;this.trigger('done');};this.endTimeline = function () {this.flush();this.trigger('endedtimeline');};this.discontinuity = function () {referenceDTS = void 0;lastDTS = void 0;};this.reset = function () {this.discontinuity();this.trigger('reset');};};TimestampRolloverStream$1.prototype = new Stream$6();var timestampRolloverStream = {TimestampRolloverStream: TimestampRolloverStream$1,handleRollover: handleRollover$1}; // Once IE11 support is dropped, this function should be removed.var typedArrayIndexOf$1 = (typedArray, element, fromIndex) => {if (!typedArray) {return -1;}var currentIndex = fromIndex;for (; currentIndex < typedArray.length; currentIndex++) {if (typedArray[currentIndex] === element) {return currentIndex;}}return -1;};var typedArray = {typedArrayIndexOf: typedArrayIndexOf$1};/*** mux.js** Copyright (c) Brightcove* Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE** Tools for parsing ID3 frame data* @see http://id3.org/id3v2.3.0*/var typedArrayIndexOf = typedArray.typedArrayIndexOf,// Frames that allow different types of text encoding contain a text// encoding description byte [ID3v2.4.0 section 4.]textEncodingDescriptionByte = {Iso88591: 0x00,// ISO-8859-1, terminated with \0.Utf16: 0x01,// UTF-16 encoded Unicode BOM, terminated with \0\0Utf16be: 0x02,// UTF-16BE encoded Unicode, without BOM, terminated with \0\0Utf8: 0x03 // UTF-8 encoded Unicode, terminated with \0},// return a percent-encoded representation of the specified byte range// @see http://en.wikipedia.org/wiki/Percent-encodingpercentEncode$1 = function (bytes, start, end) {var i,result = '';for (i = start; i < end; i++) {result += '%' + ('00' + bytes[i].toString(16)).slice(-2);}return result;},// return the string representation of the specified byte range,// interpreted as UTf-8.parseUtf8 = function (bytes, start, end) {return decodeURIComponent(percentEncode$1(bytes, start, end));},// return the string representation of the specified byte range,// interpreted as ISO-8859-1.parseIso88591$1 = function (bytes, start, end) {return unescape(percentEncode$1(bytes, start, end)); // jshint ignore:line},parseSyncSafeInteger$1 = function (data) {return data[0] << 21 | data[1] << 14 | data[2] << 7 | data[3];},frameParsers = {'APIC': function (frame) {var i = 1,mimeTypeEndIndex,descriptionEndIndex,LINK_MIME_TYPE = '-->';if (frame.data[0] !== textEncodingDescriptionByte.Utf8) {// ignore frames with unrecognized character encodingsreturn;} // parsing fields [ID3v2.4.0 section 4.14.]mimeTypeEndIndex = typedArrayIndexOf(frame.data, 0, i);if (mimeTypeEndIndex < 0) {// malformed framereturn;} // parsing Mime type field (terminated with \0)frame.mimeType = parseIso88591$1(frame.data, i, mimeTypeEndIndex);i = mimeTypeEndIndex + 1; // parsing 1-byte Picture Type fieldframe.pictureType = frame.data[i];i++;descriptionEndIndex = typedArrayIndexOf(frame.data, 0, i);if (descriptionEndIndex < 0) {// malformed framereturn;} // parsing Description field (terminated with \0)frame.description = parseUtf8(frame.data, i, descriptionEndIndex);i = descriptionEndIndex + 1;if (frame.mimeType === LINK_MIME_TYPE) {// parsing Picture Data field as URL (always represented as ISO-8859-1 [ID3v2.4.0 section 4.])frame.url = parseIso88591$1(frame.data, i, frame.data.length);} else {// parsing Picture Data field as binary dataframe.pictureData = frame.data.subarray(i, frame.data.length);}},'T*': function (frame) {if (frame.data[0] !== textEncodingDescriptionByte.Utf8) {// ignore frames with unrecognized character encodingsreturn;} // parse text field, do not include null terminator in the frame value// frames that allow different types of encoding contain terminated text [ID3v2.4.0 section 4.]frame.value = parseUtf8(frame.data, 1, frame.data.length).replace(/\0*$/, ''); // text information frames supports multiple strings, stored as a terminator separated list [ID3v2.4.0 section 4.2.]frame.values = frame.value.split('\0');},'TXXX': function (frame) {var descriptionEndIndex;if (frame.data[0] !== textEncodingDescriptionByte.Utf8) {// ignore frames with unrecognized character encodingsreturn;}descriptionEndIndex = typedArrayIndexOf(frame.data, 0, 1);if (descriptionEndIndex === -1) {return;} // parse the text fieldsframe.description = parseUtf8(frame.data, 1, descriptionEndIndex); // do not include the null terminator in the tag value// frames that allow different types of encoding contain terminated text// [ID3v2.4.0 section 4.]frame.value = parseUtf8(frame.data, descriptionEndIndex + 1, frame.data.length).replace(/\0*$/, '');frame.data = frame.value;},'W*': function (frame) {// parse URL field; URL fields are always represented as ISO-8859-1 [ID3v2.4.0 section 4.]// if the value is followed by a string termination all the following information should be ignored [ID3v2.4.0 section 4.3]frame.url = parseIso88591$1(frame.data, 0, frame.data.length).replace(/\0.*$/, '');},'WXXX': function (frame) {var descriptionEndIndex;if (frame.data[0] !== textEncodingDescriptionByte.Utf8) {// ignore frames with unrecognized character encodingsreturn;}descriptionEndIndex = typedArrayIndexOf(frame.data, 0, 1);if (descriptionEndIndex === -1) {return;} // parse the description and URL fieldsframe.description = parseUtf8(frame.data, 1, descriptionEndIndex); // URL fields are always represented as ISO-8859-1 [ID3v2.4.0 section 4.]// if the value is followed by a string termination all the following information// should be ignored [ID3v2.4.0 section 4.3]frame.url = parseIso88591$1(frame.data, descriptionEndIndex + 1, frame.data.length).replace(/\0.*$/, '');},'PRIV': function (frame) {var i;for (i = 0; i < frame.data.length; i++) {if (frame.data[i] === 0) {// parse the description and URL fieldsframe.owner = parseIso88591$1(frame.data, 0, i);break;}}frame.privateData = frame.data.subarray(i + 1);frame.data = frame.privateData;}};var parseId3Frames$1 = function (data) {var frameSize,frameHeader,frameStart = 10,tagSize = 0,frames = []; // If we don't have enough data for a header, 10 bytes,// or 'ID3' in the first 3 bytes this is not a valid ID3 tag.if (data.length < 10 || data[0] !== 'I'.charCodeAt(0) || data[1] !== 'D'.charCodeAt(0) || data[2] !== '3'.charCodeAt(0)) {return;} // the frame size is transmitted as a 28-bit integer in the// last four bytes of the ID3 header.// The most significant bit of each byte is dropped and the// results concatenated to recover the actual value.tagSize = parseSyncSafeInteger$1(data.subarray(6, 10)); // ID3 reports the tag size excluding the header but it's more// convenient for our comparisons to include ittagSize += 10; // check bit 6 of byte 5 for the extended header flag.var hasExtendedHeader = data[5] & 0x40;if (hasExtendedHeader) {// advance the frame start past the extended headerframeStart += 4; // header size fieldframeStart += parseSyncSafeInteger$1(data.subarray(10, 14));tagSize -= parseSyncSafeInteger$1(data.subarray(16, 20)); // clip any padding off the end} // parse one or more ID3 frames// http://id3.org/id3v2.3.0#ID3v2_frame_overviewdo {// determine the number of bytes in this frameframeSize = parseSyncSafeInteger$1(data.subarray(frameStart + 4, frameStart + 8));if (frameSize < 1) {break;}frameHeader = String.fromCharCode(data[frameStart], data[frameStart + 1], data[frameStart + 2], data[frameStart + 3]);var frame = {id: frameHeader,data: data.subarray(frameStart + 10, frameStart + frameSize + 10)};frame.key = frame.id; // parse frame valuesif (frameParsers[frame.id]) {// use frame specific parserframeParsers[frame.id](frame);} else if (frame.id[0] === 'T') {// use text frame generic parserframeParsers['T*'](frame);} else if (frame.id[0] === 'W') {// use URL link frame generic parserframeParsers['W*'](frame);}frames.push(frame);frameStart += 10; // advance past the frame headerframeStart += frameSize; // advance past the frame body} while (frameStart < tagSize);return frames;};var parseId3 = {parseId3Frames: parseId3Frames$1,parseSyncSafeInteger: parseSyncSafeInteger$1,frameParsers: frameParsers};/*** mux.js** Copyright (c) Brightcove* Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE** Accepts program elementary stream (PES) data events and parses out* ID3 metadata from them, if present.* @see http://id3.org/id3v2.3.0*/var Stream$5 = stream,StreamTypes$3 = streamTypes,id3 = parseId3,MetadataStream;MetadataStream = function (options) {var settings = {// the bytes of the program-level descriptor field in MP2T// see ISO/IEC 13818-1:2013 (E), section 2.6 "Program and// program element descriptors"descriptor: options && options.descriptor},// the total size in bytes of the ID3 tag being parsedtagSize = 0,// tag data that is not complete enough to be parsedbuffer = [],// the total number of bytes currently in the bufferbufferSize = 0,i;MetadataStream.prototype.init.call(this); // calculate the text track in-band metadata track dispatch type// https://html.spec.whatwg.org/multipage/embedded-content.html#steps-to-expose-a-media-resource-specific-text-trackthis.dispatchType = StreamTypes$3.METADATA_STREAM_TYPE.toString(16);if (settings.descriptor) {for (i = 0; i < settings.descriptor.length; i++) {this.dispatchType += ('00' + settings.descriptor[i].toString(16)).slice(-2);}}this.push = function (chunk) {var tag, frameStart, frameSize, frame, i, frameHeader;if (chunk.type !== 'timed-metadata') {return;} // if data_alignment_indicator is set in the PES header,// we must have the start of a new ID3 tag. Assume anything// remaining in the buffer was malformed and throw it outif (chunk.dataAlignmentIndicator) {bufferSize = 0;buffer.length = 0;} // ignore events that don't look like ID3 dataif (buffer.length === 0 && (chunk.data.length < 10 || chunk.data[0] !== 'I'.charCodeAt(0) || chunk.data[1] !== 'D'.charCodeAt(0) || chunk.data[2] !== '3'.charCodeAt(0))) {this.trigger('log', {level: 'warn',message: 'Skipping unrecognized metadata packet'});return;} // add this chunk to the data we've collected so farbuffer.push(chunk);bufferSize += chunk.data.byteLength; // grab the size of the entire frame from the ID3 headerif (buffer.length === 1) {// the frame size is transmitted as a 28-bit integer in the// last four bytes of the ID3 header.// The most significant bit of each byte is dropped and the// results concatenated to recover the actual value.tagSize = id3.parseSyncSafeInteger(chunk.data.subarray(6, 10)); // ID3 reports the tag size excluding the header but it's more// convenient for our comparisons to include ittagSize += 10;} // if the entire frame has not arrived, wait for more dataif (bufferSize < tagSize) {return;} // collect the entire frame so it can be parsedtag = {data: new Uint8Array(tagSize),frames: [],pts: buffer[0].pts,dts: buffer[0].dts};for (i = 0; i < tagSize;) {tag.data.set(buffer[0].data.subarray(0, tagSize - i), i);i += buffer[0].data.byteLength;bufferSize -= buffer[0].data.byteLength;buffer.shift();} // find the start of the first frame and the end of the tagframeStart = 10;if (tag.data[5] & 0x40) {// advance the frame start past the extended headerframeStart += 4; // header size fieldframeStart += id3.parseSyncSafeInteger(tag.data.subarray(10, 14)); // clip any padding off the endtagSize -= id3.parseSyncSafeInteger(tag.data.subarray(16, 20));} // parse one or more ID3 frames// http://id3.org/id3v2.3.0#ID3v2_frame_overviewdo {// determine the number of bytes in this frameframeSize = id3.parseSyncSafeInteger(tag.data.subarray(frameStart + 4, frameStart + 8));if (frameSize < 1) {this.trigger('log', {level: 'warn',message: 'Malformed ID3 frame encountered. Skipping remaining metadata parsing.'}); // If the frame is malformed, don't parse any further frames but allow previous valid parsed frames// to be sent along.break;}frameHeader = String.fromCharCode(tag.data[frameStart], tag.data[frameStart + 1], tag.data[frameStart + 2], tag.data[frameStart + 3]);frame = {id: frameHeader,data: tag.data.subarray(frameStart + 10, frameStart + frameSize + 10)};frame.key = frame.id; // parse frame valuesif (id3.frameParsers[frame.id]) {// use frame specific parserid3.frameParsers[frame.id](frame);} else if (frame.id[0] === 'T') {// use text frame generic parserid3.frameParsers['T*'](frame);} else if (frame.id[0] === 'W') {// use URL link frame generic parserid3.frameParsers['W*'](frame);} // handle the special PRIV frame used to indicate the start// time for raw AAC dataif (frame.owner === 'com.apple.streaming.transportStreamTimestamp') {var d = frame.data,size = (d[3] & 0x01) << 30 | d[4] << 22 | d[5] << 14 | d[6] << 6 | d[7] >>> 2;size *= 4;size += d[7] & 0x03;frame.timeStamp = size; // in raw AAC, all subsequent data will be timestamped based// on the value of this frame// we couldn't have known the appropriate pts and dts before// parsing this ID3 tag so set those values nowif (tag.pts === undefined && tag.dts === undefined) {tag.pts = frame.timeStamp;tag.dts = frame.timeStamp;}this.trigger('timestamp', frame);}tag.frames.push(frame);frameStart += 10; // advance past the frame headerframeStart += frameSize; // advance past the frame body} while (frameStart < tagSize);this.trigger('data', tag);};};MetadataStream.prototype = new Stream$5();var metadataStream = MetadataStream;/*** mux.js** Copyright (c) Brightcove* Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE** A stream-based mp2t to mp4 converter. This utility can be used to* deliver mp4s to a SourceBuffer on platforms that support native* Media Source Extensions.*/var Stream$4 = stream,CaptionStream$1 = captionStream,StreamTypes$2 = streamTypes,TimestampRolloverStream = timestampRolloverStream.TimestampRolloverStream; // object typesvar TransportPacketStream, TransportParseStream, ElementaryStream; // constantsvar MP2T_PACKET_LENGTH$1 = 188,// bytesSYNC_BYTE$1 = 0x47;/*** Splits an incoming stream of binary data into MPEG-2 Transport* Stream packets.*/TransportPacketStream = function () {var buffer = new Uint8Array(MP2T_PACKET_LENGTH$1),bytesInBuffer = 0;TransportPacketStream.prototype.init.call(this); // Deliver new bytes to the stream./*** Split a stream of data into M2TS packets**/this.push = function (bytes) {var startIndex = 0,endIndex = MP2T_PACKET_LENGTH$1,everything; // If there are bytes remaining from the last segment, prepend them to the// bytes that were pushed inif (bytesInBuffer) {everything = new Uint8Array(bytes.byteLength + bytesInBuffer);everything.set(buffer.subarray(0, bytesInBuffer));everything.set(bytes, bytesInBuffer);bytesInBuffer = 0;} else {everything = bytes;} // While we have enough data for a packetwhile (endIndex < everything.byteLength) {// Look for a pair of start and end sync bytes in the data..if (everything[startIndex] === SYNC_BYTE$1 && everything[endIndex] === SYNC_BYTE$1) {// We found a packet so emit it and jump one whole packet forward in// the streamthis.trigger('data', everything.subarray(startIndex, endIndex));startIndex += MP2T_PACKET_LENGTH$1;endIndex += MP2T_PACKET_LENGTH$1;continue;} // If we get here, we have somehow become de-synchronized and we need to step// forward one byte at a time until we find a pair of sync bytes that denote// a packetstartIndex++;endIndex++;} // If there was some data left over at the end of the segment that couldn't// possibly be a whole packet, keep it because it might be the start of a packet// that continues in the next segmentif (startIndex < everything.byteLength) {buffer.set(everything.subarray(startIndex), 0);bytesInBuffer = everything.byteLength - startIndex;}};/*** Passes identified M2TS packets to the TransportParseStream to be parsed**/this.flush = function () {// If the buffer contains a whole packet when we are being flushed, emit it// and empty the buffer. Otherwise hold onto the data because it may be// important for decoding the next segmentif (bytesInBuffer === MP2T_PACKET_LENGTH$1 && buffer[0] === SYNC_BYTE$1) {this.trigger('data', buffer);bytesInBuffer = 0;}this.trigger('done');};this.endTimeline = function () {this.flush();this.trigger('endedtimeline');};this.reset = function () {bytesInBuffer = 0;this.trigger('reset');};};TransportPacketStream.prototype = new Stream$4();/*** Accepts an MP2T TransportPacketStream and emits data events with parsed* forms of the individual transport stream packets.*/TransportParseStream = function () {var parsePsi, parsePat, parsePmt, self;TransportParseStream.prototype.init.call(this);self = this;this.packetsWaitingForPmt = [];this.programMapTable = undefined;parsePsi = function (payload, psi) {var offset = 0; // PSI packets may be split into multiple sections and those// sections may be split into multiple packets. If a PSI// section starts in this packet, the payload_unit_start_indicator// will be true and the first byte of the payload will indicate// the offset from the current position to the start of the// section.if (psi.payloadUnitStartIndicator) {offset += payload[offset] + 1;}if (psi.type === 'pat') {parsePat(payload.subarray(offset), psi);} else {parsePmt(payload.subarray(offset), psi);}};parsePat = function (payload, pat) {pat.section_number = payload[7]; // eslint-disable-line camelcasepat.last_section_number = payload[8]; // eslint-disable-line camelcase// skip the PSI header and parse the first PMT entryself.pmtPid = (payload[10] & 0x1F) << 8 | payload[11];pat.pmtPid = self.pmtPid;};/*** Parse out the relevant fields of a Program Map Table (PMT).* @param payload {Uint8Array} the PMT-specific portion of an MP2T* packet. The first byte in this array should be the table_id* field.* @param pmt {object} the object that should be decorated with* fields parsed from the PMT.*/parsePmt = function (payload, pmt) {var sectionLength, tableEnd, programInfoLength, offset; // PMTs can be sent ahead of the time when they should actually// take effect. We don't believe this should ever be the case// for HLS but we'll ignore "forward" PMT declarations if we see// them. Future PMT declarations have the current_next_indicator// set to zero.if (!(payload[5] & 0x01)) {return;} // overwrite any existing program map tableself.programMapTable = {video: null,audio: null,'timed-metadata': {}}; // the mapping table ends at the end of the current sectionsectionLength = (payload[1] & 0x0f) << 8 | payload[2];tableEnd = 3 + sectionLength - 4; // to determine where the table is, we have to figure out how// long the program info descriptors areprogramInfoLength = (payload[10] & 0x0f) << 8 | payload[11]; // advance the offset to the first entry in the mapping tableoffset = 12 + programInfoLength;while (offset < tableEnd) {var streamType = payload[offset];var pid = (payload[offset + 1] & 0x1F) << 8 | payload[offset + 2]; // only map a single elementary_pid for audio and video stream types// TODO: should this be done for metadata too? for now maintain behavior of// multiple metadata streamsif (streamType === StreamTypes$2.H264_STREAM_TYPE && self.programMapTable.video === null) {self.programMapTable.video = pid;} else if (streamType === StreamTypes$2.ADTS_STREAM_TYPE && self.programMapTable.audio === null) {self.programMapTable.audio = pid;} else if (streamType === StreamTypes$2.METADATA_STREAM_TYPE) {// map pid to stream type for metadata streamsself.programMapTable['timed-metadata'][pid] = streamType;} // move to the next table entry// skip past the elementary stream descriptors, if presentoffset += ((payload[offset + 3] & 0x0F) << 8 | payload[offset + 4]) + 5;} // record the map on the packet as wellpmt.programMapTable = self.programMapTable;};/*** Deliver a new MP2T packet to the next stream in the pipeline.*/this.push = function (packet) {var result = {},offset = 4;result.payloadUnitStartIndicator = !!(packet[1] & 0x40); // pid is a 13-bit field starting at the last bit of packet[1]result.pid = packet[1] & 0x1f;result.pid <<= 8;result.pid |= packet[2]; // if an adaption field is present, its length is specified by the// fifth byte of the TS packet header. The adaptation field is// used to add stuffing to PES packets that don't fill a complete// TS packet, and to specify some forms of timing and control data// that we do not currently use.if ((packet[3] & 0x30) >>> 4 > 0x01) {offset += packet[offset] + 1;} // parse the rest of the packet based on the typeif (result.pid === 0) {result.type = 'pat';parsePsi(packet.subarray(offset), result);this.trigger('data', result);} else if (result.pid === this.pmtPid) {result.type = 'pmt';parsePsi(packet.subarray(offset), result);this.trigger('data', result); // if there are any packets waiting for a PMT to be found, process them nowwhile (this.packetsWaitingForPmt.length) {this.processPes_.apply(this, this.packetsWaitingForPmt.shift());}} else if (this.programMapTable === undefined) {// When we have not seen a PMT yet, defer further processing of// PES packets until one has been parsedthis.packetsWaitingForPmt.push([packet, offset, result]);} else {this.processPes_(packet, offset, result);}};this.processPes_ = function (packet, offset, result) {// set the appropriate stream typeif (result.pid === this.programMapTable.video) {result.streamType = StreamTypes$2.H264_STREAM_TYPE;} else if (result.pid === this.programMapTable.audio) {result.streamType = StreamTypes$2.ADTS_STREAM_TYPE;} else {// if not video or audio, it is timed-metadata or unknown// if unknown, streamType will be undefinedresult.streamType = this.programMapTable['timed-metadata'][result.pid];}result.type = 'pes';result.data = packet.subarray(offset);this.trigger('data', result);};};TransportParseStream.prototype = new Stream$4();TransportParseStream.STREAM_TYPES = {h264: 0x1b,adts: 0x0f};/*** Reconsistutes program elementary stream (PES) packets from parsed* transport stream packets. That is, if you pipe an* mp2t.TransportParseStream into a mp2t.ElementaryStream, the output* events will be events which capture the bytes for individual PES* packets plus relevant metadata that has been extracted from the* container.*/ElementaryStream = function () {var self = this,segmentHadPmt = false,// PES packet fragmentsvideo = {data: [],size: 0},audio = {data: [],size: 0},timedMetadata = {data: [],size: 0},programMapTable,parsePes = function (payload, pes) {var ptsDtsFlags;const startPrefix = payload[0] << 16 | payload[1] << 8 | payload[2]; // default to an empty arraypes.data = new Uint8Array(); // In certain live streams, the start of a TS fragment has ts packets// that are frame data that is continuing from the previous fragment. This// is to check that the pes data is the start of a new pes payloadif (startPrefix !== 1) {return;} // get the packet length, this will be 0 for videopes.packetLength = 6 + (payload[4] << 8 | payload[5]); // find out if this packets starts a new keyframepes.dataAlignmentIndicator = (payload[6] & 0x04) !== 0; // PES packets may be annotated with a PTS value, or a PTS value// and a DTS value. Determine what combination of values is// available to work with.ptsDtsFlags = payload[7]; // PTS and DTS are normally stored as a 33-bit number. Javascript// performs all bitwise operations on 32-bit integers but javascript// supports a much greater range (52-bits) of integer using standard// mathematical operations.// We construct a 31-bit value using bitwise operators over the 31// most significant bits and then multiply by 4 (equal to a left-shift// of 2) before we add the final 2 least significant bits of the// timestamp (equal to an OR.)if (ptsDtsFlags & 0xC0) {// the PTS and DTS are not written out directly. For information// on how they are encoded, see// http://dvd.sourceforge.net/dvdinfo/pes-hdr.htmlpes.pts = (payload[9] & 0x0E) << 27 | (payload[10] & 0xFF) << 20 | (payload[11] & 0xFE) << 12 | (payload[12] & 0xFF) << 5 | (payload[13] & 0xFE) >>> 3;pes.pts *= 4; // Left shift by 2pes.pts += (payload[13] & 0x06) >>> 1; // OR by the two LSBspes.dts = pes.pts;if (ptsDtsFlags & 0x40) {pes.dts = (payload[14] & 0x0E) << 27 | (payload[15] & 0xFF) << 20 | (payload[16] & 0xFE) << 12 | (payload[17] & 0xFF) << 5 | (payload[18] & 0xFE) >>> 3;pes.dts *= 4; // Left shift by 2pes.dts += (payload[18] & 0x06) >>> 1; // OR by the two LSBs}} // the data section starts immediately after the PES header.// pes_header_data_length specifies the number of header bytes// that follow the last byte of the field.pes.data = payload.subarray(9 + payload[8]);},/*** Pass completely parsed PES packets to the next stream in the pipeline**/flushStream = function (stream, type, forceFlush) {var packetData = new Uint8Array(stream.size),event = {type: type},i = 0,offset = 0,packetFlushable = false,fragment; // do nothing if there is not enough buffered data for a complete// PES headerif (!stream.data.length || stream.size < 9) {return;}event.trackId = stream.data[0].pid; // reassemble the packetfor (i = 0; i < stream.data.length; i++) {fragment = stream.data[i];packetData.set(fragment.data, offset);offset += fragment.data.byteLength;} // parse assembled packet's PES headerparsePes(packetData, event); // non-video PES packets MUST have a non-zero PES_packet_length// check that there is enough stream data to fill the packetpacketFlushable = type === 'video' || event.packetLength <= stream.size; // flush pending packets if the conditions are rightif (forceFlush || packetFlushable) {stream.size = 0;stream.data.length = 0;} // only emit packets that are complete. this is to avoid assembling// incomplete PES packets due to poor segmentationif (packetFlushable) {self.trigger('data', event);}};ElementaryStream.prototype.init.call(this);/*** Identifies M2TS packet types and parses PES packets using metadata* parsed from the PMT**/this.push = function (data) {({pat: function () {// we have to wait for the PMT to arrive as well before we// have any meaningful metadata},pes: function () {var stream, streamType;switch (data.streamType) {case StreamTypes$2.H264_STREAM_TYPE:stream = video;streamType = 'video';break;case StreamTypes$2.ADTS_STREAM_TYPE:stream = audio;streamType = 'audio';break;case StreamTypes$2.METADATA_STREAM_TYPE:stream = timedMetadata;streamType = 'timed-metadata';break;default:// ignore unknown stream typesreturn;} // if a new packet is starting, we can flush the completed// packetif (data.payloadUnitStartIndicator) {flushStream(stream, streamType, true);} // buffer this fragment until we are sure we've received the// complete payloadstream.data.push(data);stream.size += data.data.byteLength;},pmt: function () {var event = {type: 'metadata',tracks: []};programMapTable = data.programMapTable; // translate audio and video streams to tracksif (programMapTable.video !== null) {event.tracks.push({timelineStartInfo: {baseMediaDecodeTime: 0},id: +programMapTable.video,codec: 'avc',type: 'video'});}if (programMapTable.audio !== null) {event.tracks.push({timelineStartInfo: {baseMediaDecodeTime: 0},id: +programMapTable.audio,codec: 'adts',type: 'audio'});}segmentHadPmt = true;self.trigger('data', event);}})[data.type]();};this.reset = function () {video.size = 0;video.data.length = 0;audio.size = 0;audio.data.length = 0;this.trigger('reset');};/*** Flush any remaining input. Video PES packets may be of variable* length. Normally, the start of a new video packet can trigger the* finalization of the previous packet. That is not possible if no* more video is forthcoming, however. In that case, some other* mechanism (like the end of the file) has to be employed. When it is* clear that no additional data is forthcoming, calling this method* will flush the buffered packets.*/this.flushStreams_ = function () {// !!THIS ORDER IS IMPORTANT!!// video first then audioflushStream(video, 'video');flushStream(audio, 'audio');flushStream(timedMetadata, 'timed-metadata');};this.flush = function () {// if on flush we haven't had a pmt emitted// and we have a pmt to emit. emit the pmt// so that we trigger a trackinfo downstream.if (!segmentHadPmt && programMapTable) {var pmt = {type: 'metadata',tracks: []}; // translate audio and video streams to tracksif (programMapTable.video !== null) {pmt.tracks.push({timelineStartInfo: {baseMediaDecodeTime: 0},id: +programMapTable.video,codec: 'avc',type: 'video'});}if (programMapTable.audio !== null) {pmt.tracks.push({timelineStartInfo: {baseMediaDecodeTime: 0},id: +programMapTable.audio,codec: 'adts',type: 'audio'});}self.trigger('data', pmt);}segmentHadPmt = false;this.flushStreams_();this.trigger('done');};};ElementaryStream.prototype = new Stream$4();var m2ts$1 = {PAT_PID: 0x0000,MP2T_PACKET_LENGTH: MP2T_PACKET_LENGTH$1,TransportPacketStream: TransportPacketStream,TransportParseStream: TransportParseStream,ElementaryStream: ElementaryStream,TimestampRolloverStream: TimestampRolloverStream,CaptionStream: CaptionStream$1.CaptionStream,Cea608Stream: CaptionStream$1.Cea608Stream,Cea708Stream: CaptionStream$1.Cea708Stream,MetadataStream: metadataStream};for (var type in StreamTypes$2) {if (StreamTypes$2.hasOwnProperty(type)) {m2ts$1[type] = StreamTypes$2[type];}}var m2ts_1 = m2ts$1;/*** mux.js** Copyright (c) Brightcove* Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE*/var Stream$3 = stream;var ONE_SECOND_IN_TS$2 = clock$2.ONE_SECOND_IN_TS;var AdtsStream$1;var ADTS_SAMPLING_FREQUENCIES$1 = [96000, 88200, 64000, 48000, 44100, 32000, 24000, 22050, 16000, 12000, 11025, 8000, 7350];/** Accepts a ElementaryStream and emits data events with parsed* AAC Audio Frames of the individual packets. Input audio in ADTS* format is unpacked and re-emitted as AAC frames.** @see http://wiki.multimedia.cx/index.php?title=ADTS* @see http://wiki.multimedia.cx/?title=Understanding_AAC*/AdtsStream$1 = function (handlePartialSegments) {var buffer,frameNum = 0;AdtsStream$1.prototype.init.call(this);this.skipWarn_ = function (start, end) {this.trigger('log', {level: 'warn',message: `adts skiping bytes ${start} to ${end} in frame ${frameNum} outside syncword`});};this.push = function (packet) {var i = 0,frameLength,protectionSkipBytes,oldBuffer,sampleCount,adtsFrameDuration;if (!handlePartialSegments) {frameNum = 0;}if (packet.type !== 'audio') {// ignore non-audio datareturn;} // Prepend any data in the buffer to the input data so that we can parse// aac frames the cross a PES packet boundaryif (buffer && buffer.length) {oldBuffer = buffer;buffer = new Uint8Array(oldBuffer.byteLength + packet.data.byteLength);buffer.set(oldBuffer);buffer.set(packet.data, oldBuffer.byteLength);} else {buffer = packet.data;} // unpack any ADTS frames which have been fully received// for details on the ADTS header, see http://wiki.multimedia.cx/index.php?title=ADTSvar skip; // We use i + 7 here because we want to be able to parse the entire header.// If we don't have enough bytes to do that, then we definitely won't have a full frame.while (i + 7 < buffer.length) {// Look for the start of an ADTS header..if (buffer[i] !== 0xFF || (buffer[i + 1] & 0xF6) !== 0xF0) {if (typeof skip !== 'number') {skip = i;} // If a valid header was not found, jump one forward and attempt to// find a valid ADTS header starting at the next bytei++;continue;}if (typeof skip === 'number') {this.skipWarn_(skip, i);skip = null;} // The protection skip bit tells us if we have 2 bytes of CRC data at the// end of the ADTS headerprotectionSkipBytes = (~buffer[i + 1] & 0x01) * 2; // Frame length is a 13 bit integer starting 16 bits from the// end of the sync sequence// NOTE: frame length includes the size of the headerframeLength = (buffer[i + 3] & 0x03) << 11 | buffer[i + 4] << 3 | (buffer[i + 5] & 0xe0) >> 5;sampleCount = ((buffer[i + 6] & 0x03) + 1) * 1024;adtsFrameDuration = sampleCount * ONE_SECOND_IN_TS$2 / ADTS_SAMPLING_FREQUENCIES$1[(buffer[i + 2] & 0x3c) >>> 2]; // If we don't have enough data to actually finish this ADTS frame,// then we have to wait for more dataif (buffer.byteLength - i < frameLength) {break;} // Otherwise, deliver the complete AAC framethis.trigger('data', {pts: packet.pts + frameNum * adtsFrameDuration,dts: packet.dts + frameNum * adtsFrameDuration,sampleCount: sampleCount,audioobjecttype: (buffer[i + 2] >>> 6 & 0x03) + 1,channelcount: (buffer[i + 2] & 1) << 2 | (buffer[i + 3] & 0xc0) >>> 6,samplerate: ADTS_SAMPLING_FREQUENCIES$1[(buffer[i + 2] & 0x3c) >>> 2],samplingfrequencyindex: (buffer[i + 2] & 0x3c) >>> 2,// assume ISO/IEC 14496-12 AudioSampleEntry default of 16samplesize: 16,// data is the frame without it's headerdata: buffer.subarray(i + 7 + protectionSkipBytes, i + frameLength)});frameNum++;i += frameLength;}if (typeof skip === 'number') {this.skipWarn_(skip, i);skip = null;} // remove processed bytes from the buffer.buffer = buffer.subarray(i);};this.flush = function () {frameNum = 0;this.trigger('done');};this.reset = function () {buffer = void 0;this.trigger('reset');};this.endTimeline = function () {buffer = void 0;this.trigger('endedtimeline');};};AdtsStream$1.prototype = new Stream$3();var adts = AdtsStream$1;/*** mux.js** Copyright (c) Brightcove* Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE*/var ExpGolomb$1;/*** Parser for exponential Golomb codes, a variable-bitwidth number encoding* scheme used by h264.*/ExpGolomb$1 = function (workingData) {var// the number of bytes left to examine in workingDataworkingBytesAvailable = workingData.byteLength,// the current word being examinedworkingWord = 0,// :uint// the number of bits left to examine in the current wordworkingBitsAvailable = 0; // :uint;// ():uintthis.length = function () {return 8 * workingBytesAvailable;}; // ():uintthis.bitsAvailable = function () {return 8 * workingBytesAvailable + workingBitsAvailable;}; // ():voidthis.loadWord = function () {var position = workingData.byteLength - workingBytesAvailable,workingBytes = new Uint8Array(4),availableBytes = Math.min(4, workingBytesAvailable);if (availableBytes === 0) {throw new Error('no bytes available');}workingBytes.set(workingData.subarray(position, position + availableBytes));workingWord = new DataView(workingBytes.buffer).getUint32(0); // track the amount of workingData that has been processedworkingBitsAvailable = availableBytes * 8;workingBytesAvailable -= availableBytes;}; // (count:int):voidthis.skipBits = function (count) {var skipBytes; // :intif (workingBitsAvailable > count) {workingWord <<= count;workingBitsAvailable -= count;} else {count -= workingBitsAvailable;skipBytes = Math.floor(count / 8);count -= skipBytes * 8;workingBytesAvailable -= skipBytes;this.loadWord();workingWord <<= count;workingBitsAvailable -= count;}}; // (size:int):uintthis.readBits = function (size) {var bits = Math.min(workingBitsAvailable, size),// :uintvalu = workingWord >>> 32 - bits; // :uint// if size > 31, handle errorworkingBitsAvailable -= bits;if (workingBitsAvailable > 0) {workingWord <<= bits;} else if (workingBytesAvailable > 0) {this.loadWord();}bits = size - bits;if (bits > 0) {return valu << bits | this.readBits(bits);}return valu;}; // ():uintthis.skipLeadingZeros = function () {var leadingZeroCount; // :uintfor (leadingZeroCount = 0; leadingZeroCount < workingBitsAvailable; ++leadingZeroCount) {if ((workingWord & 0x80000000 >>> leadingZeroCount) !== 0) {// the first bit of working word is 1workingWord <<= leadingZeroCount;workingBitsAvailable -= leadingZeroCount;return leadingZeroCount;}} // we exhausted workingWord and still have not found a 1this.loadWord();return leadingZeroCount + this.skipLeadingZeros();}; // ():voidthis.skipUnsignedExpGolomb = function () {this.skipBits(1 + this.skipLeadingZeros());}; // ():voidthis.skipExpGolomb = function () {this.skipBits(1 + this.skipLeadingZeros());}; // ():uintthis.readUnsignedExpGolomb = function () {var clz = this.skipLeadingZeros(); // :uintreturn this.readBits(clz + 1) - 1;}; // ():intthis.readExpGolomb = function () {var valu = this.readUnsignedExpGolomb(); // :intif (0x01 & valu) {// the number is odd if the low order bit is setreturn 1 + valu >>> 1; // add 1 to make it even, and divide by 2}return -1 * (valu >>> 1); // divide by two then make it negative}; // Some convenience functions// :Booleanthis.readBoolean = function () {return this.readBits(1) === 1;}; // ():intthis.readUnsignedByte = function () {return this.readBits(8);};this.loadWord();};var expGolomb = ExpGolomb$1;/*** mux.js** Copyright (c) Brightcove* Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE*/var Stream$2 = stream;var ExpGolomb = expGolomb;var H264Stream$1, NalByteStream;var PROFILES_WITH_OPTIONAL_SPS_DATA;/*** Accepts a NAL unit byte stream and unpacks the embedded NAL units.*/NalByteStream = function () {var syncPoint = 0,i,buffer;NalByteStream.prototype.init.call(this);/** Scans a byte stream and triggers a data event with the NAL units found.* @param {Object} data Event received from H264Stream* @param {Uint8Array} data.data The h264 byte stream to be scanned** @see H264Stream.push*/this.push = function (data) {var swapBuffer;if (!buffer) {buffer = data.data;} else {swapBuffer = new Uint8Array(buffer.byteLength + data.data.byteLength);swapBuffer.set(buffer);swapBuffer.set(data.data, buffer.byteLength);buffer = swapBuffer;}var len = buffer.byteLength; // Rec. ITU-T H.264, Annex B// scan for NAL unit boundaries// a match looks like this:// 0 0 1 .. NAL .. 0 0 1// ^ sync point ^ i// or this:// 0 0 1 .. NAL .. 0 0 0// ^ sync point ^ i// advance the sync point to a NAL start, if necessaryfor (; syncPoint < len - 3; syncPoint++) {if (buffer[syncPoint + 2] === 1) {// the sync point is properly alignedi = syncPoint + 5;break;}}while (i < len) {// look at the current byte to determine if we've hit the end of// a NAL unit boundaryswitch (buffer[i]) {case 0:// skip past non-sync sequencesif (buffer[i - 1] !== 0) {i += 2;break;} else if (buffer[i - 2] !== 0) {i++;break;} // deliver the NAL unit if it isn't emptyif (syncPoint + 3 !== i - 2) {this.trigger('data', buffer.subarray(syncPoint + 3, i - 2));} // drop trailing zeroesdo {i++;} while (buffer[i] !== 1 && i < len);syncPoint = i - 2;i += 3;break;case 1:// skip past non-sync sequencesif (buffer[i - 1] !== 0 || buffer[i - 2] !== 0) {i += 3;break;} // deliver the NAL unitthis.trigger('data', buffer.subarray(syncPoint + 3, i - 2));syncPoint = i - 2;i += 3;break;default:// the current byte isn't a one or zero, so it cannot be part// of a sync sequencei += 3;break;}} // filter out the NAL units that were deliveredbuffer = buffer.subarray(syncPoint);i -= syncPoint;syncPoint = 0;};this.reset = function () {buffer = null;syncPoint = 0;this.trigger('reset');};this.flush = function () {// deliver the last buffered NAL unitif (buffer && buffer.byteLength > 3) {this.trigger('data', buffer.subarray(syncPoint + 3));} // reset the stream statebuffer = null;syncPoint = 0;this.trigger('done');};this.endTimeline = function () {this.flush();this.trigger('endedtimeline');};};NalByteStream.prototype = new Stream$2(); // values of profile_idc that indicate additional fields are included in the SPS// see Recommendation ITU-T H.264 (4/2013),// 7.3.2.1.1 Sequence parameter set data syntaxPROFILES_WITH_OPTIONAL_SPS_DATA = {100: true,110: true,122: true,244: true,44: true,83: true,86: true,118: true,128: true,// TODO: the three profiles below don't// appear to have sps data in the specificiation anymore?138: true,139: true,134: true};/*** Accepts input from a ElementaryStream and produces H.264 NAL unit data* events.*/H264Stream$1 = function () {var nalByteStream = new NalByteStream(),self,trackId,currentPts,currentDts,discardEmulationPreventionBytes,readSequenceParameterSet,skipScalingList;H264Stream$1.prototype.init.call(this);self = this;/** Pushes a packet from a stream onto the NalByteStream** @param {Object} packet - A packet received from a stream* @param {Uint8Array} packet.data - The raw bytes of the packet* @param {Number} packet.dts - Decode timestamp of the packet* @param {Number} packet.pts - Presentation timestamp of the packet* @param {Number} packet.trackId - The id of the h264 track this packet came from* @param {('video'|'audio')} packet.type - The type of packet**/this.push = function (packet) {if (packet.type !== 'video') {return;}trackId = packet.trackId;currentPts = packet.pts;currentDts = packet.dts;nalByteStream.push(packet);};/** Identify NAL unit types and pass on the NALU, trackId, presentation and decode timestamps* for the NALUs to the next stream component.* Also, preprocess caption and sequence parameter NALUs.** @param {Uint8Array} data - A NAL unit identified by `NalByteStream.push`* @see NalByteStream.push*/nalByteStream.on('data', function (data) {var event = {trackId: trackId,pts: currentPts,dts: currentDts,data: data,nalUnitTypeCode: data[0] & 0x1f};switch (event.nalUnitTypeCode) {case 0x05:event.nalUnitType = 'slice_layer_without_partitioning_rbsp_idr';break;case 0x06:event.nalUnitType = 'sei_rbsp';event.escapedRBSP = discardEmulationPreventionBytes(data.subarray(1));break;case 0x07:event.nalUnitType = 'seq_parameter_set_rbsp';event.escapedRBSP = discardEmulationPreventionBytes(data.subarray(1));event.config = readSequenceParameterSet(event.escapedRBSP);break;case 0x08:event.nalUnitType = 'pic_parameter_set_rbsp';break;case 0x09:event.nalUnitType = 'access_unit_delimiter_rbsp';break;} // This triggers data on the H264Streamself.trigger('data', event);});nalByteStream.on('done', function () {self.trigger('done');});nalByteStream.on('partialdone', function () {self.trigger('partialdone');});nalByteStream.on('reset', function () {self.trigger('reset');});nalByteStream.on('endedtimeline', function () {self.trigger('endedtimeline');});this.flush = function () {nalByteStream.flush();};this.partialFlush = function () {nalByteStream.partialFlush();};this.reset = function () {nalByteStream.reset();};this.endTimeline = function () {nalByteStream.endTimeline();};/*** Advance the ExpGolomb decoder past a scaling list. The scaling* list is optionally transmitted as part of a sequence parameter* set and is not relevant to transmuxing.* @param count {number} the number of entries in this scaling list* @param expGolombDecoder {object} an ExpGolomb pointed to the* start of a scaling list* @see Recommendation ITU-T H.264, Section 7.3.2.1.1.1*/skipScalingList = function (count, expGolombDecoder) {var lastScale = 8,nextScale = 8,j,deltaScale;for (j = 0; j < count; j++) {if (nextScale !== 0) {deltaScale = expGolombDecoder.readExpGolomb();nextScale = (lastScale + deltaScale + 256) % 256;}lastScale = nextScale === 0 ? lastScale : nextScale;}};/*** Expunge any "Emulation Prevention" bytes from a "Raw Byte* Sequence Payload"* @param data {Uint8Array} the bytes of a RBSP from a NAL* unit* @return {Uint8Array} the RBSP without any Emulation* Prevention Bytes*/discardEmulationPreventionBytes = function (data) {var length = data.byteLength,emulationPreventionBytesPositions = [],i = 1,newLength,newData; // Find all `Emulation Prevention Bytes`while (i < length - 2) {if (data[i] === 0 && data[i + 1] === 0 && data[i + 2] === 0x03) {emulationPreventionBytesPositions.push(i + 2);i += 2;} else {i++;}} // If no Emulation Prevention Bytes were found just return the original// arrayif (emulationPreventionBytesPositions.length === 0) {return data;} // Create a new array to hold the NAL unit datanewLength = length - emulationPreventionBytesPositions.length;newData = new Uint8Array(newLength);var sourceIndex = 0;for (i = 0; i < newLength; sourceIndex++, i++) {if (sourceIndex === emulationPreventionBytesPositions[0]) {// Skip this bytesourceIndex++; // Remove this position indexemulationPreventionBytesPositions.shift();}newData[i] = data[sourceIndex];}return newData;};/*** Read a sequence parameter set and return some interesting video* properties. A sequence parameter set is the H264 metadata that* describes the properties of upcoming video frames.* @param data {Uint8Array} the bytes of a sequence parameter set* @return {object} an object with configuration parsed from the* sequence parameter set, including the dimensions of the* associated video frames.*/readSequenceParameterSet = function (data) {var frameCropLeftOffset = 0,frameCropRightOffset = 0,frameCropTopOffset = 0,frameCropBottomOffset = 0,expGolombDecoder,profileIdc,levelIdc,profileCompatibility,chromaFormatIdc,picOrderCntType,numRefFramesInPicOrderCntCycle,picWidthInMbsMinus1,picHeightInMapUnitsMinus1,frameMbsOnlyFlag,scalingListCount,sarRatio = [1, 1],aspectRatioIdc,i;expGolombDecoder = new ExpGolomb(data);profileIdc = expGolombDecoder.readUnsignedByte(); // profile_idcprofileCompatibility = expGolombDecoder.readUnsignedByte(); // constraint_set[0-5]_flaglevelIdc = expGolombDecoder.readUnsignedByte(); // level_idc u(8)expGolombDecoder.skipUnsignedExpGolomb(); // seq_parameter_set_id// some profiles have more optional data we don't needif (PROFILES_WITH_OPTIONAL_SPS_DATA[profileIdc]) {chromaFormatIdc = expGolombDecoder.readUnsignedExpGolomb();if (chromaFormatIdc === 3) {expGolombDecoder.skipBits(1); // separate_colour_plane_flag}expGolombDecoder.skipUnsignedExpGolomb(); // bit_depth_luma_minus8expGolombDecoder.skipUnsignedExpGolomb(); // bit_depth_chroma_minus8expGolombDecoder.skipBits(1); // qpprime_y_zero_transform_bypass_flagif (expGolombDecoder.readBoolean()) {// seq_scaling_matrix_present_flagscalingListCount = chromaFormatIdc !== 3 ? 8 : 12;for (i = 0; i < scalingListCount; i++) {if (expGolombDecoder.readBoolean()) {// seq_scaling_list_present_flag[ i ]if (i < 6) {skipScalingList(16, expGolombDecoder);} else {skipScalingList(64, expGolombDecoder);}}}}}expGolombDecoder.skipUnsignedExpGolomb(); // log2_max_frame_num_minus4picOrderCntType = expGolombDecoder.readUnsignedExpGolomb();if (picOrderCntType === 0) {expGolombDecoder.readUnsignedExpGolomb(); // log2_max_pic_order_cnt_lsb_minus4} else if (picOrderCntType === 1) {expGolombDecoder.skipBits(1); // delta_pic_order_always_zero_flagexpGolombDecoder.skipExpGolomb(); // offset_for_non_ref_picexpGolombDecoder.skipExpGolomb(); // offset_for_top_to_bottom_fieldnumRefFramesInPicOrderCntCycle = expGolombDecoder.readUnsignedExpGolomb();for (i = 0; i < numRefFramesInPicOrderCntCycle; i++) {expGolombDecoder.skipExpGolomb(); // offset_for_ref_frame[ i ]}}expGolombDecoder.skipUnsignedExpGolomb(); // max_num_ref_framesexpGolombDecoder.skipBits(1); // gaps_in_frame_num_value_allowed_flagpicWidthInMbsMinus1 = expGolombDecoder.readUnsignedExpGolomb();picHeightInMapUnitsMinus1 = expGolombDecoder.readUnsignedExpGolomb();frameMbsOnlyFlag = expGolombDecoder.readBits(1);if (frameMbsOnlyFlag === 0) {expGolombDecoder.skipBits(1); // mb_adaptive_frame_field_flag}expGolombDecoder.skipBits(1); // direct_8x8_inference_flagif (expGolombDecoder.readBoolean()) {// frame_cropping_flagframeCropLeftOffset = expGolombDecoder.readUnsignedExpGolomb();frameCropRightOffset = expGolombDecoder.readUnsignedExpGolomb();frameCropTopOffset = expGolombDecoder.readUnsignedExpGolomb();frameCropBottomOffset = expGolombDecoder.readUnsignedExpGolomb();}if (expGolombDecoder.readBoolean()) {// vui_parameters_present_flagif (expGolombDecoder.readBoolean()) {// aspect_ratio_info_present_flagaspectRatioIdc = expGolombDecoder.readUnsignedByte();switch (aspectRatioIdc) {case 1:sarRatio = [1, 1];break;case 2:sarRatio = [12, 11];break;case 3:sarRatio = [10, 11];break;case 4:sarRatio = [16, 11];break;case 5:sarRatio = [40, 33];break;case 6:sarRatio = [24, 11];break;case 7:sarRatio = [20, 11];break;case 8:sarRatio = [32, 11];break;case 9:sarRatio = [80, 33];break;case 10:sarRatio = [18, 11];break;case 11:sarRatio = [15, 11];break;case 12:sarRatio = [64, 33];break;case 13:sarRatio = [160, 99];break;case 14:sarRatio = [4, 3];break;case 15:sarRatio = [3, 2];break;case 16:sarRatio = [2, 1];break;case 255:{sarRatio = [expGolombDecoder.readUnsignedByte() << 8 | expGolombDecoder.readUnsignedByte(), expGolombDecoder.readUnsignedByte() << 8 | expGolombDecoder.readUnsignedByte()];break;}}if (sarRatio) {sarRatio[0] / sarRatio[1];}}}return {profileIdc: profileIdc,levelIdc: levelIdc,profileCompatibility: profileCompatibility,width: (picWidthInMbsMinus1 + 1) * 16 - frameCropLeftOffset * 2 - frameCropRightOffset * 2,height: (2 - frameMbsOnlyFlag) * (picHeightInMapUnitsMinus1 + 1) * 16 - frameCropTopOffset * 2 - frameCropBottomOffset * 2,// sar is sample aspect ratiosarRatio: sarRatio};};};H264Stream$1.prototype = new Stream$2();var h264 = {H264Stream: H264Stream$1,NalByteStream: NalByteStream};/*** mux.js** Copyright (c) Brightcove* Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE** Utilities to detect basic properties and metadata about Aac data.*/var ADTS_SAMPLING_FREQUENCIES = [96000, 88200, 64000, 48000, 44100, 32000, 24000, 22050, 16000, 12000, 11025, 8000, 7350];var parseId3TagSize = function (header, byteIndex) {var returnSize = header[byteIndex + 6] << 21 | header[byteIndex + 7] << 14 | header[byteIndex + 8] << 7 | header[byteIndex + 9],flags = header[byteIndex + 5],footerPresent = (flags & 16) >> 4; // if we get a negative returnSize clamp it to 0returnSize = returnSize >= 0 ? returnSize : 0;if (footerPresent) {return returnSize + 20;}return returnSize + 10;};var getId3Offset = function (data, offset) {if (data.length - offset < 10 || data[offset] !== 'I'.charCodeAt(0) || data[offset + 1] !== 'D'.charCodeAt(0) || data[offset + 2] !== '3'.charCodeAt(0)) {return offset;}offset += parseId3TagSize(data, offset);return getId3Offset(data, offset);}; // TODO: use vhs-utilsvar isLikelyAacData$1 = function (data) {var offset = getId3Offset(data, 0);return data.length >= offset + 2 && (data[offset] & 0xFF) === 0xFF && (data[offset + 1] & 0xF0) === 0xF0 &&// verify that the 2 layer bits are 0, aka this// is not mp3 data but aac data.(data[offset + 1] & 0x16) === 0x10;};var parseSyncSafeInteger = function (data) {return data[0] << 21 | data[1] << 14 | data[2] << 7 | data[3];}; // return a percent-encoded representation of the specified byte range// @see http://en.wikipedia.org/wiki/Percent-encodingvar percentEncode = function (bytes, start, end) {var i,result = '';for (i = start; i < end; i++) {result += '%' + ('00' + bytes[i].toString(16)).slice(-2);}return result;}; // return the string representation of the specified byte range,// interpreted as ISO-8859-1.var parseIso88591 = function (bytes, start, end) {return unescape(percentEncode(bytes, start, end)); // jshint ignore:line};var parseAdtsSize = function (header, byteIndex) {var lowThree = (header[byteIndex + 5] & 0xE0) >> 5,middle = header[byteIndex + 4] << 3,highTwo = header[byteIndex + 3] & 0x3 << 11;return highTwo | middle | lowThree;};var parseType$4 = function (header, byteIndex) {if (header[byteIndex] === 'I'.charCodeAt(0) && header[byteIndex + 1] === 'D'.charCodeAt(0) && header[byteIndex + 2] === '3'.charCodeAt(0)) {return 'timed-metadata';} else if (header[byteIndex] & 0xff === 0xff && (header[byteIndex + 1] & 0xf0) === 0xf0) {return 'audio';}return null;};var parseSampleRate = function (packet) {var i = 0;while (i + 5 < packet.length) {if (packet[i] !== 0xFF || (packet[i + 1] & 0xF6) !== 0xF0) {// If a valid header was not found, jump one forward and attempt to// find a valid ADTS header starting at the next bytei++;continue;}return ADTS_SAMPLING_FREQUENCIES[(packet[i + 2] & 0x3c) >>> 2];}return null;};var parseAacTimestamp = function (packet) {var frameStart, frameSize, frame, frameHeader; // find the start of the first frame and the end of the tagframeStart = 10;if (packet[5] & 0x40) {// advance the frame start past the extended headerframeStart += 4; // header size fieldframeStart += parseSyncSafeInteger(packet.subarray(10, 14));} // parse one or more ID3 frames// http://id3.org/id3v2.3.0#ID3v2_frame_overviewdo {// determine the number of bytes in this frameframeSize = parseSyncSafeInteger(packet.subarray(frameStart + 4, frameStart + 8));if (frameSize < 1) {return null;}frameHeader = String.fromCharCode(packet[frameStart], packet[frameStart + 1], packet[frameStart + 2], packet[frameStart + 3]);if (frameHeader === 'PRIV') {frame = packet.subarray(frameStart + 10, frameStart + frameSize + 10);for (var i = 0; i < frame.byteLength; i++) {if (frame[i] === 0) {var owner = parseIso88591(frame, 0, i);if (owner === 'com.apple.streaming.transportStreamTimestamp') {var d = frame.subarray(i + 1);var size = (d[3] & 0x01) << 30 | d[4] << 22 | d[5] << 14 | d[6] << 6 | d[7] >>> 2;size *= 4;size += d[7] & 0x03;return size;}break;}}}frameStart += 10; // advance past the frame headerframeStart += frameSize; // advance past the frame body} while (frameStart < packet.byteLength);return null;};var utils = {isLikelyAacData: isLikelyAacData$1,parseId3TagSize: parseId3TagSize,parseAdtsSize: parseAdtsSize,parseType: parseType$4,parseSampleRate: parseSampleRate,parseAacTimestamp: parseAacTimestamp};/*** mux.js** Copyright (c) Brightcove* Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE** A stream-based aac to mp4 converter. This utility can be used to* deliver mp4s to a SourceBuffer on platforms that support native* Media Source Extensions.*/var Stream$1 = stream;var aacUtils = utils; // Constantsvar AacStream$1;/*** Splits an incoming stream of binary data into ADTS and ID3 Frames.*/AacStream$1 = function () {var everything = new Uint8Array(),timeStamp = 0;AacStream$1.prototype.init.call(this);this.setTimestamp = function (timestamp) {timeStamp = timestamp;};this.push = function (bytes) {var frameSize = 0,byteIndex = 0,bytesLeft,chunk,packet,tempLength; // If there are bytes remaining from the last segment, prepend them to the// bytes that were pushed inif (everything.length) {tempLength = everything.length;everything = new Uint8Array(bytes.byteLength + tempLength);everything.set(everything.subarray(0, tempLength));everything.set(bytes, tempLength);} else {everything = bytes;}while (everything.length - byteIndex >= 3) {if (everything[byteIndex] === 'I'.charCodeAt(0) && everything[byteIndex + 1] === 'D'.charCodeAt(0) && everything[byteIndex + 2] === '3'.charCodeAt(0)) {// Exit early because we don't have enough to parse// the ID3 tag headerif (everything.length - byteIndex < 10) {break;} // check framesizeframeSize = aacUtils.parseId3TagSize(everything, byteIndex); // Exit early if we don't have enough in the buffer// to emit a full packet// Add to byteIndex to support multiple ID3 tags in sequenceif (byteIndex + frameSize > everything.length) {break;}chunk = {type: 'timed-metadata',data: everything.subarray(byteIndex, byteIndex + frameSize)};this.trigger('data', chunk);byteIndex += frameSize;continue;} else if ((everything[byteIndex] & 0xff) === 0xff && (everything[byteIndex + 1] & 0xf0) === 0xf0) {// Exit early because we don't have enough to parse// the ADTS frame headerif (everything.length - byteIndex < 7) {break;}frameSize = aacUtils.parseAdtsSize(everything, byteIndex); // Exit early if we don't have enough in the buffer// to emit a full packetif (byteIndex + frameSize > everything.length) {break;}packet = {type: 'audio',data: everything.subarray(byteIndex, byteIndex + frameSize),pts: timeStamp,dts: timeStamp};this.trigger('data', packet);byteIndex += frameSize;continue;}byteIndex++;}bytesLeft = everything.length - byteIndex;if (bytesLeft > 0) {everything = everything.subarray(byteIndex);} else {everything = new Uint8Array();}};this.reset = function () {everything = new Uint8Array();this.trigger('reset');};this.endTimeline = function () {everything = new Uint8Array();this.trigger('endedtimeline');};};AacStream$1.prototype = new Stream$1();var aac = AacStream$1;var AUDIO_PROPERTIES$1 = ['audioobjecttype', 'channelcount', 'samplerate', 'samplingfrequencyindex', 'samplesize'];var audioProperties = AUDIO_PROPERTIES$1;var VIDEO_PROPERTIES$1 = ['width', 'height', 'profileIdc', 'levelIdc', 'profileCompatibility', 'sarRatio'];var videoProperties = VIDEO_PROPERTIES$1;/*** mux.js** Copyright (c) Brightcove* Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE** A stream-based mp2t to mp4 converter. This utility can be used to* deliver mp4s to a SourceBuffer on platforms that support native* Media Source Extensions.*/var Stream = stream;var mp4 = mp4Generator;var frameUtils = frameUtils$1;var audioFrameUtils = audioFrameUtils$1;var trackDecodeInfo = trackDecodeInfo$1;var m2ts = m2ts_1;var clock = clock$2;var AdtsStream = adts;var H264Stream = h264.H264Stream;var AacStream = aac;var isLikelyAacData = utils.isLikelyAacData;var ONE_SECOND_IN_TS$1 = clock$2.ONE_SECOND_IN_TS;var AUDIO_PROPERTIES = audioProperties;var VIDEO_PROPERTIES = videoProperties; // object typesvar VideoSegmentStream, AudioSegmentStream, Transmuxer, CoalesceStream;var retriggerForStream = function (key, event) {event.stream = key;this.trigger('log', event);};var addPipelineLogRetriggers = function (transmuxer, pipeline) {var keys = Object.keys(pipeline);for (var i = 0; i < keys.length; i++) {var key = keys[i]; // skip non-stream keys and headOfPipeline// which is just a duplicateif (key === 'headOfPipeline' || !pipeline[key].on) {continue;}pipeline[key].on('log', retriggerForStream.bind(transmuxer, key));}};/*** Compare two arrays (even typed) for same-ness*/var arrayEquals = function (a, b) {var i;if (a.length !== b.length) {return false;} // compare the value of each element in the arrayfor (i = 0; i < a.length; i++) {if (a[i] !== b[i]) {return false;}}return true;};var generateSegmentTimingInfo = function (baseMediaDecodeTime, startDts, startPts, endDts, endPts, prependedContentDuration) {var ptsOffsetFromDts = startPts - startDts,decodeDuration = endDts - startDts,presentationDuration = endPts - startPts; // The PTS and DTS values are based on the actual stream times from the segment,// however, the player time values will reflect a start from the baseMediaDecodeTime.// In order to provide relevant values for the player times, base timing info on the// baseMediaDecodeTime and the DTS and PTS durations of the segment.return {start: {dts: baseMediaDecodeTime,pts: baseMediaDecodeTime + ptsOffsetFromDts},end: {dts: baseMediaDecodeTime + decodeDuration,pts: baseMediaDecodeTime + presentationDuration},prependedContentDuration: prependedContentDuration,baseMediaDecodeTime: baseMediaDecodeTime};};/*** Constructs a single-track, ISO BMFF media segment from AAC data* events. The output of this stream can be fed to a SourceBuffer* configured with a suitable initialization segment.* @param track {object} track metadata configuration* @param options {object} transmuxer options object* @param options.keepOriginalTimestamps {boolean} If true, keep the timestamps* in the source; false to adjust the first segment to start at 0.*/AudioSegmentStream = function (track, options) {var adtsFrames = [],sequenceNumber,earliestAllowedDts = 0,audioAppendStartTs = 0,videoBaseMediaDecodeTime = Infinity;options = options || {};sequenceNumber = options.firstSequenceNumber || 0;AudioSegmentStream.prototype.init.call(this);this.push = function (data) {trackDecodeInfo.collectDtsInfo(track, data);if (track) {AUDIO_PROPERTIES.forEach(function (prop) {track[prop] = data[prop];});} // buffer audio data until end() is calledadtsFrames.push(data);};this.setEarliestDts = function (earliestDts) {earliestAllowedDts = earliestDts;};this.setVideoBaseMediaDecodeTime = function (baseMediaDecodeTime) {videoBaseMediaDecodeTime = baseMediaDecodeTime;};this.setAudioAppendStart = function (timestamp) {audioAppendStartTs = timestamp;};this.flush = function () {var frames, moof, mdat, boxes, frameDuration, segmentDuration, videoClockCyclesOfSilencePrefixed; // return early if no audio data has been observedif (adtsFrames.length === 0) {this.trigger('done', 'AudioSegmentStream');return;}frames = audioFrameUtils.trimAdtsFramesByEarliestDts(adtsFrames, track, earliestAllowedDts);track.baseMediaDecodeTime = trackDecodeInfo.calculateTrackBaseMediaDecodeTime(track, options.keepOriginalTimestamps); // amount of audio filled but the value is in video clock rather than audio clockvideoClockCyclesOfSilencePrefixed = audioFrameUtils.prefixWithSilence(track, frames, audioAppendStartTs, videoBaseMediaDecodeTime); // we have to build the index from byte locations to// samples (that is, adts frames) in the audio datatrack.samples = audioFrameUtils.generateSampleTable(frames); // concatenate the audio data to constuct the mdatmdat = mp4.mdat(audioFrameUtils.concatenateFrameData(frames));adtsFrames = [];moof = mp4.moof(sequenceNumber, [track]);boxes = new Uint8Array(moof.byteLength + mdat.byteLength); // bump the sequence number for next timesequenceNumber++;boxes.set(moof);boxes.set(mdat, moof.byteLength);trackDecodeInfo.clearDtsInfo(track);frameDuration = Math.ceil(ONE_SECOND_IN_TS$1 * 1024 / track.samplerate); // TODO this check was added to maintain backwards compatibility (particularly with// tests) on adding the timingInfo event. However, it seems unlikely that there's a// valid use-case where an init segment/data should be triggered without associated// frames. Leaving for now, but should be looked into.if (frames.length) {segmentDuration = frames.length * frameDuration;this.trigger('segmentTimingInfo', generateSegmentTimingInfo(// The audio track's baseMediaDecodeTime is in audio clock cycles, but the// frame info is in video clock cycles. Convert to match expectation of// listeners (that all timestamps will be based on video clock cycles).clock.audioTsToVideoTs(track.baseMediaDecodeTime, track.samplerate),// frame times are already in video clock, as is segment durationframes[0].dts, frames[0].pts, frames[0].dts + segmentDuration, frames[0].pts + segmentDuration, videoClockCyclesOfSilencePrefixed || 0));this.trigger('timingInfo', {start: frames[0].pts,end: frames[0].pts + segmentDuration});}this.trigger('data', {track: track,boxes: boxes});this.trigger('done', 'AudioSegmentStream');};this.reset = function () {trackDecodeInfo.clearDtsInfo(track);adtsFrames = [];this.trigger('reset');};};AudioSegmentStream.prototype = new Stream();/*** Constructs a single-track, ISO BMFF media segment from H264 data* events. The output of this stream can be fed to a SourceBuffer* configured with a suitable initialization segment.* @param track {object} track metadata configuration* @param options {object} transmuxer options object* @param options.alignGopsAtEnd {boolean} If true, start from the end of the* gopsToAlignWith list when attempting to align gop pts* @param options.keepOriginalTimestamps {boolean} If true, keep the timestamps* in the source; false to adjust the first segment to start at 0.*/VideoSegmentStream = function (track, options) {var sequenceNumber,nalUnits = [],gopsToAlignWith = [],config,pps;options = options || {};sequenceNumber = options.firstSequenceNumber || 0;VideoSegmentStream.prototype.init.call(this);delete track.minPTS;this.gopCache_ = [];/*** Constructs a ISO BMFF segment given H264 nalUnits* @param {Object} nalUnit A data event representing a nalUnit* @param {String} nalUnit.nalUnitType* @param {Object} nalUnit.config Properties for a mp4 track* @param {Uint8Array} nalUnit.data The nalUnit bytes* @see lib/codecs/h264.js**/this.push = function (nalUnit) {trackDecodeInfo.collectDtsInfo(track, nalUnit); // record the track configif (nalUnit.nalUnitType === 'seq_parameter_set_rbsp' && !config) {config = nalUnit.config;track.sps = [nalUnit.data];VIDEO_PROPERTIES.forEach(function (prop) {track[prop] = config[prop];}, this);}if (nalUnit.nalUnitType === 'pic_parameter_set_rbsp' && !pps) {pps = nalUnit.data;track.pps = [nalUnit.data];} // buffer video until flush() is callednalUnits.push(nalUnit);};/*** Pass constructed ISO BMFF track and boxes on to the* next stream in the pipeline**/this.flush = function () {var frames,gopForFusion,gops,moof,mdat,boxes,prependedContentDuration = 0,firstGop,lastGop; // Throw away nalUnits at the start of the byte stream until// we find the first AUDwhile (nalUnits.length) {if (nalUnits[0].nalUnitType === 'access_unit_delimiter_rbsp') {break;}nalUnits.shift();} // Return early if no video data has been observedif (nalUnits.length === 0) {this.resetStream_();this.trigger('done', 'VideoSegmentStream');return;} // Organize the raw nal-units into arrays that represent// higher-level constructs such as frames and gops// (group-of-pictures)frames = frameUtils.groupNalsIntoFrames(nalUnits);gops = frameUtils.groupFramesIntoGops(frames); // If the first frame of this fragment is not a keyframe we have// a problem since MSE (on Chrome) requires a leading keyframe.//// We have two approaches to repairing this situation:// 1) GOP-FUSION:// This is where we keep track of the GOPS (group-of-pictures)// from previous fragments and attempt to find one that we can// prepend to the current fragment in order to create a valid// fragment.// 2) KEYFRAME-PULLING:// Here we search for the first keyframe in the fragment and// throw away all the frames between the start of the fragment// and that keyframe. We then extend the duration and pull the// PTS of the keyframe forward so that it covers the time range// of the frames that were disposed of.//// #1 is far prefereable over #2 which can cause "stuttering" but// requires more things to be just right.if (!gops[0][0].keyFrame) {// Search for a gop for fusion from our gopCachegopForFusion = this.getGopForFusion_(nalUnits[0], track);if (gopForFusion) {// in order to provide more accurate timing information about the segment, save// the number of seconds prepended to the original segment due to GOP fusionprependedContentDuration = gopForFusion.duration;gops.unshift(gopForFusion); // Adjust Gops' metadata to account for the inclusion of the// new gop at the beginninggops.byteLength += gopForFusion.byteLength;gops.nalCount += gopForFusion.nalCount;gops.pts = gopForFusion.pts;gops.dts = gopForFusion.dts;gops.duration += gopForFusion.duration;} else {// If we didn't find a candidate gop fall back to keyframe-pullinggops = frameUtils.extendFirstKeyFrame(gops);}} // Trim gops to align with gopsToAlignWithif (gopsToAlignWith.length) {var alignedGops;if (options.alignGopsAtEnd) {alignedGops = this.alignGopsAtEnd_(gops);} else {alignedGops = this.alignGopsAtStart_(gops);}if (!alignedGops) {// save all the nals in the last GOP into the gop cachethis.gopCache_.unshift({gop: gops.pop(),pps: track.pps,sps: track.sps}); // Keep a maximum of 6 GOPs in the cachethis.gopCache_.length = Math.min(6, this.gopCache_.length); // Clear nalUnitsnalUnits = []; // return early no gops can be aligned with desired gopsToAlignWiththis.resetStream_();this.trigger('done', 'VideoSegmentStream');return;} // Some gops were trimmed. clear dts info so minSegmentDts and pts are correct// when recalculated before sending off to CoalesceStreamtrackDecodeInfo.clearDtsInfo(track);gops = alignedGops;}trackDecodeInfo.collectDtsInfo(track, gops); // First, we have to build the index from byte locations to// samples (that is, frames) in the video datatrack.samples = frameUtils.generateSampleTable(gops); // Concatenate the video data and construct the mdatmdat = mp4.mdat(frameUtils.concatenateNalData(gops));track.baseMediaDecodeTime = trackDecodeInfo.calculateTrackBaseMediaDecodeTime(track, options.keepOriginalTimestamps);this.trigger('processedGopsInfo', gops.map(function (gop) {return {pts: gop.pts,dts: gop.dts,byteLength: gop.byteLength};}));firstGop = gops[0];lastGop = gops[gops.length - 1];this.trigger('segmentTimingInfo', generateSegmentTimingInfo(track.baseMediaDecodeTime, firstGop.dts, firstGop.pts, lastGop.dts + lastGop.duration, lastGop.pts + lastGop.duration, prependedContentDuration));this.trigger('timingInfo', {start: gops[0].pts,end: gops[gops.length - 1].pts + gops[gops.length - 1].duration}); // save all the nals in the last GOP into the gop cachethis.gopCache_.unshift({gop: gops.pop(),pps: track.pps,sps: track.sps}); // Keep a maximum of 6 GOPs in the cachethis.gopCache_.length = Math.min(6, this.gopCache_.length); // Clear nalUnitsnalUnits = [];this.trigger('baseMediaDecodeTime', track.baseMediaDecodeTime);this.trigger('timelineStartInfo', track.timelineStartInfo);moof = mp4.moof(sequenceNumber, [track]); // it would be great to allocate this array up front instead of// throwing away hundreds of media segment fragmentsboxes = new Uint8Array(moof.byteLength + mdat.byteLength); // Bump the sequence number for next timesequenceNumber++;boxes.set(moof);boxes.set(mdat, moof.byteLength);this.trigger('data', {track: track,boxes: boxes});this.resetStream_(); // Continue with the flush process nowthis.trigger('done', 'VideoSegmentStream');};this.reset = function () {this.resetStream_();nalUnits = [];this.gopCache_.length = 0;gopsToAlignWith.length = 0;this.trigger('reset');};this.resetStream_ = function () {trackDecodeInfo.clearDtsInfo(track); // reset config and pps because they may differ across segments// for instance, when we are rendition switchingconfig = undefined;pps = undefined;}; // Search for a candidate Gop for gop-fusion from the gop cache and// return it or return null if no good candidate was foundthis.getGopForFusion_ = function (nalUnit) {var halfSecond = 45000,// Half-a-second in a 90khz clockallowableOverlap = 10000,// About 3 frames @ 30fpsnearestDistance = Infinity,dtsDistance,nearestGopObj,currentGop,currentGopObj,i; // Search for the GOP nearest to the beginning of this nal unitfor (i = 0; i < this.gopCache_.length; i++) {currentGopObj = this.gopCache_[i];currentGop = currentGopObj.gop; // Reject Gops with different SPS or PPSif (!(track.pps && arrayEquals(track.pps[0], currentGopObj.pps[0])) || !(track.sps && arrayEquals(track.sps[0], currentGopObj.sps[0]))) {continue;} // Reject Gops that would require a negative baseMediaDecodeTimeif (currentGop.dts < track.timelineStartInfo.dts) {continue;} // The distance between the end of the gop and the start of the nalUnitdtsDistance = nalUnit.dts - currentGop.dts - currentGop.duration; // Only consider GOPS that start before the nal unit and end within// a half-second of the nal unitif (dtsDistance >= -allowableOverlap && dtsDistance <= halfSecond) {// Always use the closest GOP we found if there is more than// one candidateif (!nearestGopObj || nearestDistance > dtsDistance) {nearestGopObj = currentGopObj;nearestDistance = dtsDistance;}}}if (nearestGopObj) {return nearestGopObj.gop;}return null;}; // trim gop list to the first gop found that has a matching pts with a gop in the list// of gopsToAlignWith starting from the START of the listthis.alignGopsAtStart_ = function (gops) {var alignIndex, gopIndex, align, gop, byteLength, nalCount, duration, alignedGops;byteLength = gops.byteLength;nalCount = gops.nalCount;duration = gops.duration;alignIndex = gopIndex = 0;while (alignIndex < gopsToAlignWith.length && gopIndex < gops.length) {align = gopsToAlignWith[alignIndex];gop = gops[gopIndex];if (align.pts === gop.pts) {break;}if (gop.pts > align.pts) {// this current gop starts after the current gop we want to align on, so increment// align indexalignIndex++;continue;} // current gop starts before the current gop we want to align on. so increment gop// indexgopIndex++;byteLength -= gop.byteLength;nalCount -= gop.nalCount;duration -= gop.duration;}if (gopIndex === 0) {// no gops to trimreturn gops;}if (gopIndex === gops.length) {// all gops trimmed, skip appending all gopsreturn null;}alignedGops = gops.slice(gopIndex);alignedGops.byteLength = byteLength;alignedGops.duration = duration;alignedGops.nalCount = nalCount;alignedGops.pts = alignedGops[0].pts;alignedGops.dts = alignedGops[0].dts;return alignedGops;}; // trim gop list to the first gop found that has a matching pts with a gop in the list// of gopsToAlignWith starting from the END of the listthis.alignGopsAtEnd_ = function (gops) {var alignIndex, gopIndex, align, gop, alignEndIndex, matchFound;alignIndex = gopsToAlignWith.length - 1;gopIndex = gops.length - 1;alignEndIndex = null;matchFound = false;while (alignIndex >= 0 && gopIndex >= 0) {align = gopsToAlignWith[alignIndex];gop = gops[gopIndex];if (align.pts === gop.pts) {matchFound = true;break;}if (align.pts > gop.pts) {alignIndex--;continue;}if (alignIndex === gopsToAlignWith.length - 1) {// gop.pts is greater than the last alignment candidate. If no match is found// by the end of this loop, we still want to append gops that come after this// pointalignEndIndex = gopIndex;}gopIndex--;}if (!matchFound && alignEndIndex === null) {return null;}var trimIndex;if (matchFound) {trimIndex = gopIndex;} else {trimIndex = alignEndIndex;}if (trimIndex === 0) {return gops;}var alignedGops = gops.slice(trimIndex);var metadata = alignedGops.reduce(function (total, gop) {total.byteLength += gop.byteLength;total.duration += gop.duration;total.nalCount += gop.nalCount;return total;}, {byteLength: 0,duration: 0,nalCount: 0});alignedGops.byteLength = metadata.byteLength;alignedGops.duration = metadata.duration;alignedGops.nalCount = metadata.nalCount;alignedGops.pts = alignedGops[0].pts;alignedGops.dts = alignedGops[0].dts;return alignedGops;};this.alignGopsWith = function (newGopsToAlignWith) {gopsToAlignWith = newGopsToAlignWith;};};VideoSegmentStream.prototype = new Stream();/*** A Stream that can combine multiple streams (ie. audio & video)* into a single output segment for MSE. Also supports audio-only* and video-only streams.* @param options {object} transmuxer options object* @param options.keepOriginalTimestamps {boolean} If true, keep the timestamps* in the source; false to adjust the first segment to start at media timeline start.*/CoalesceStream = function (options, metadataStream) {// Number of Tracks per output segment// If greater than 1, we combine multiple// tracks into a single segmentthis.numberOfTracks = 0;this.metadataStream = metadataStream;options = options || {};if (typeof options.remux !== 'undefined') {this.remuxTracks = !!options.remux;} else {this.remuxTracks = true;}if (typeof options.keepOriginalTimestamps === 'boolean') {this.keepOriginalTimestamps = options.keepOriginalTimestamps;} else {this.keepOriginalTimestamps = false;}this.pendingTracks = [];this.videoTrack = null;this.pendingBoxes = [];this.pendingCaptions = [];this.pendingMetadata = [];this.pendingBytes = 0;this.emittedTracks = 0;CoalesceStream.prototype.init.call(this); // Take output from multiplethis.push = function (output) {// buffer incoming captions until the associated video segment// finishesif (output.content || output.text) {return this.pendingCaptions.push(output);} // buffer incoming id3 tags until the final flushif (output.frames) {return this.pendingMetadata.push(output);} // Add this track to the list of pending tracks and store// important information required for the construction of// the final segmentthis.pendingTracks.push(output.track);this.pendingBytes += output.boxes.byteLength; // TODO: is there an issue for this against chrome?// We unshift audio and push video because// as of Chrome 75 when switching from// one init segment to another if the video// mdat does not appear after the audio mdat// only audio will play for the duration of our transmux.if (output.track.type === 'video') {this.videoTrack = output.track;this.pendingBoxes.push(output.boxes);}if (output.track.type === 'audio') {this.audioTrack = output.track;this.pendingBoxes.unshift(output.boxes);}};};CoalesceStream.prototype = new Stream();CoalesceStream.prototype.flush = function (flushSource) {var offset = 0,event = {captions: [],captionStreams: {},metadata: [],info: {}},caption,id3,initSegment,timelineStartPts = 0,i;if (this.pendingTracks.length < this.numberOfTracks) {if (flushSource !== 'VideoSegmentStream' && flushSource !== 'AudioSegmentStream') {// Return because we haven't received a flush from a data-generating// portion of the segment (meaning that we have only recieved meta-data// or captions.)return;} else if (this.remuxTracks) {// Return until we have enough tracks from the pipeline to remux (if we// are remuxing audio and video into a single MP4)return;} else if (this.pendingTracks.length === 0) {// In the case where we receive a flush without any data having been// received we consider it an emitted track for the purposes of coalescing// `done` events.// We do this for the case where there is an audio and video track in the// segment but no audio data. (seen in several playlists with alternate// audio tracks and no audio present in the main TS segments.)this.emittedTracks++;if (this.emittedTracks >= this.numberOfTracks) {this.trigger('done');this.emittedTracks = 0;}return;}}if (this.videoTrack) {timelineStartPts = this.videoTrack.timelineStartInfo.pts;VIDEO_PROPERTIES.forEach(function (prop) {event.info[prop] = this.videoTrack[prop];}, this);} else if (this.audioTrack) {timelineStartPts = this.audioTrack.timelineStartInfo.pts;AUDIO_PROPERTIES.forEach(function (prop) {event.info[prop] = this.audioTrack[prop];}, this);}if (this.videoTrack || this.audioTrack) {if (this.pendingTracks.length === 1) {event.type = this.pendingTracks[0].type;} else {event.type = 'combined';}this.emittedTracks += this.pendingTracks.length;initSegment = mp4.initSegment(this.pendingTracks); // Create a new typed array to hold the init segmentevent.initSegment = new Uint8Array(initSegment.byteLength); // Create an init segment containing a moov// and track definitionsevent.initSegment.set(initSegment); // Create a new typed array to hold the moof+mdatsevent.data = new Uint8Array(this.pendingBytes); // Append each moof+mdat (one per track) togetherfor (i = 0; i < this.pendingBoxes.length; i++) {event.data.set(this.pendingBoxes[i], offset);offset += this.pendingBoxes[i].byteLength;} // Translate caption PTS times into second offsets to match the// video timeline for the segment, and add track infofor (i = 0; i < this.pendingCaptions.length; i++) {caption = this.pendingCaptions[i];caption.startTime = clock.metadataTsToSeconds(caption.startPts, timelineStartPts, this.keepOriginalTimestamps);caption.endTime = clock.metadataTsToSeconds(caption.endPts, timelineStartPts, this.keepOriginalTimestamps);event.captionStreams[caption.stream] = true;event.captions.push(caption);} // Translate ID3 frame PTS times into second offsets to match the// video timeline for the segmentfor (i = 0; i < this.pendingMetadata.length; i++) {id3 = this.pendingMetadata[i];id3.cueTime = clock.metadataTsToSeconds(id3.pts, timelineStartPts, this.keepOriginalTimestamps);event.metadata.push(id3);} // We add this to every single emitted segment even though we only need// it for the firstevent.metadata.dispatchType = this.metadataStream.dispatchType; // Reset stream statethis.pendingTracks.length = 0;this.videoTrack = null;this.pendingBoxes.length = 0;this.pendingCaptions.length = 0;this.pendingBytes = 0;this.pendingMetadata.length = 0; // Emit the built segment// We include captions and ID3 tags for backwards compatibility,// ideally we should send only video and audio in the data eventthis.trigger('data', event); // Emit each caption to the outside world// Ideally, this would happen immediately on parsing captions,// but we need to ensure that video data is sent back first// so that caption timing can be adjusted to match video timingfor (i = 0; i < event.captions.length; i++) {caption = event.captions[i];this.trigger('caption', caption);} // Emit each id3 tag to the outside world// Ideally, this would happen immediately on parsing the tag,// but we need to ensure that video data is sent back first// so that ID3 frame timing can be adjusted to match video timingfor (i = 0; i < event.metadata.length; i++) {id3 = event.metadata[i];this.trigger('id3Frame', id3);}} // Only emit `done` if all tracks have been flushed and emittedif (this.emittedTracks >= this.numberOfTracks) {this.trigger('done');this.emittedTracks = 0;}};CoalesceStream.prototype.setRemux = function (val) {this.remuxTracks = val;};/*** A Stream that expects MP2T binary data as input and produces* corresponding media segments, suitable for use with Media Source* Extension (MSE) implementations that support the ISO BMFF byte* stream format, like Chrome.*/Transmuxer = function (options) {var self = this,hasFlushed = true,videoTrack,audioTrack;Transmuxer.prototype.init.call(this);options = options || {};this.baseMediaDecodeTime = options.baseMediaDecodeTime || 0;this.transmuxPipeline_ = {};this.setupAacPipeline = function () {var pipeline = {};this.transmuxPipeline_ = pipeline;pipeline.type = 'aac';pipeline.metadataStream = new m2ts.MetadataStream(); // set up the parsing pipelinepipeline.aacStream = new AacStream();pipeline.audioTimestampRolloverStream = new m2ts.TimestampRolloverStream('audio');pipeline.timedMetadataTimestampRolloverStream = new m2ts.TimestampRolloverStream('timed-metadata');pipeline.adtsStream = new AdtsStream();pipeline.coalesceStream = new CoalesceStream(options, pipeline.metadataStream);pipeline.headOfPipeline = pipeline.aacStream;pipeline.aacStream.pipe(pipeline.audioTimestampRolloverStream).pipe(pipeline.adtsStream);pipeline.aacStream.pipe(pipeline.timedMetadataTimestampRolloverStream).pipe(pipeline.metadataStream).pipe(pipeline.coalesceStream);pipeline.metadataStream.on('timestamp', function (frame) {pipeline.aacStream.setTimestamp(frame.timeStamp);});pipeline.aacStream.on('data', function (data) {if (data.type !== 'timed-metadata' && data.type !== 'audio' || pipeline.audioSegmentStream) {return;}audioTrack = audioTrack || {timelineStartInfo: {baseMediaDecodeTime: self.baseMediaDecodeTime},codec: 'adts',type: 'audio'}; // hook up the audio segment stream to the first track with aac datapipeline.coalesceStream.numberOfTracks++;pipeline.audioSegmentStream = new AudioSegmentStream(audioTrack, options);pipeline.audioSegmentStream.on('log', self.getLogTrigger_('audioSegmentStream'));pipeline.audioSegmentStream.on('timingInfo', self.trigger.bind(self, 'audioTimingInfo')); // Set up the final part of the audio pipelinepipeline.adtsStream.pipe(pipeline.audioSegmentStream).pipe(pipeline.coalesceStream); // emit pmt infoself.trigger('trackinfo', {hasAudio: !!audioTrack,hasVideo: !!videoTrack});}); // Re-emit any data coming from the coalesce stream to the outside worldpipeline.coalesceStream.on('data', this.trigger.bind(this, 'data')); // Let the consumer know we have finished flushing the entire pipelinepipeline.coalesceStream.on('done', this.trigger.bind(this, 'done'));addPipelineLogRetriggers(this, pipeline);};this.setupTsPipeline = function () {var pipeline = {};this.transmuxPipeline_ = pipeline;pipeline.type = 'ts';pipeline.metadataStream = new m2ts.MetadataStream(); // set up the parsing pipelinepipeline.packetStream = new m2ts.TransportPacketStream();pipeline.parseStream = new m2ts.TransportParseStream();pipeline.elementaryStream = new m2ts.ElementaryStream();pipeline.timestampRolloverStream = new m2ts.TimestampRolloverStream();pipeline.adtsStream = new AdtsStream();pipeline.h264Stream = new H264Stream();pipeline.captionStream = new m2ts.CaptionStream(options);pipeline.coalesceStream = new CoalesceStream(options, pipeline.metadataStream);pipeline.headOfPipeline = pipeline.packetStream; // disassemble MPEG2-TS packets into elementary streamspipeline.packetStream.pipe(pipeline.parseStream).pipe(pipeline.elementaryStream).pipe(pipeline.timestampRolloverStream); // !!THIS ORDER IS IMPORTANT!!// demux the streamspipeline.timestampRolloverStream.pipe(pipeline.h264Stream);pipeline.timestampRolloverStream.pipe(pipeline.adtsStream);pipeline.timestampRolloverStream.pipe(pipeline.metadataStream).pipe(pipeline.coalesceStream); // Hook up CEA-608/708 caption streampipeline.h264Stream.pipe(pipeline.captionStream).pipe(pipeline.coalesceStream);pipeline.elementaryStream.on('data', function (data) {var i;if (data.type === 'metadata') {i = data.tracks.length; // scan the tracks listed in the metadatawhile (i--) {if (!videoTrack && data.tracks[i].type === 'video') {videoTrack = data.tracks[i];videoTrack.timelineStartInfo.baseMediaDecodeTime = self.baseMediaDecodeTime;} else if (!audioTrack && data.tracks[i].type === 'audio') {audioTrack = data.tracks[i];audioTrack.timelineStartInfo.baseMediaDecodeTime = self.baseMediaDecodeTime;}} // hook up the video segment stream to the first track with h264 dataif (videoTrack && !pipeline.videoSegmentStream) {pipeline.coalesceStream.numberOfTracks++;pipeline.videoSegmentStream = new VideoSegmentStream(videoTrack, options);pipeline.videoSegmentStream.on('log', self.getLogTrigger_('videoSegmentStream'));pipeline.videoSegmentStream.on('timelineStartInfo', function (timelineStartInfo) {// When video emits timelineStartInfo data after a flush, we forward that// info to the AudioSegmentStream, if it exists, because video timeline// data takes precedence. Do not do this if keepOriginalTimestamps is set,// because this is a particularly subtle form of timestamp alteration.if (audioTrack && !options.keepOriginalTimestamps) {audioTrack.timelineStartInfo = timelineStartInfo; // On the first segment we trim AAC frames that exist before the// very earliest DTS we have seen in video because Chrome will// interpret any video track with a baseMediaDecodeTime that is// non-zero as a gap.pipeline.audioSegmentStream.setEarliestDts(timelineStartInfo.dts - self.baseMediaDecodeTime);}});pipeline.videoSegmentStream.on('processedGopsInfo', self.trigger.bind(self, 'gopInfo'));pipeline.videoSegmentStream.on('segmentTimingInfo', self.trigger.bind(self, 'videoSegmentTimingInfo'));pipeline.videoSegmentStream.on('baseMediaDecodeTime', function (baseMediaDecodeTime) {if (audioTrack) {pipeline.audioSegmentStream.setVideoBaseMediaDecodeTime(baseMediaDecodeTime);}});pipeline.videoSegmentStream.on('timingInfo', self.trigger.bind(self, 'videoTimingInfo')); // Set up the final part of the video pipelinepipeline.h264Stream.pipe(pipeline.videoSegmentStream).pipe(pipeline.coalesceStream);}if (audioTrack && !pipeline.audioSegmentStream) {// hook up the audio segment stream to the first track with aac datapipeline.coalesceStream.numberOfTracks++;pipeline.audioSegmentStream = new AudioSegmentStream(audioTrack, options);pipeline.audioSegmentStream.on('log', self.getLogTrigger_('audioSegmentStream'));pipeline.audioSegmentStream.on('timingInfo', self.trigger.bind(self, 'audioTimingInfo'));pipeline.audioSegmentStream.on('segmentTimingInfo', self.trigger.bind(self, 'audioSegmentTimingInfo')); // Set up the final part of the audio pipelinepipeline.adtsStream.pipe(pipeline.audioSegmentStream).pipe(pipeline.coalesceStream);} // emit pmt infoself.trigger('trackinfo', {hasAudio: !!audioTrack,hasVideo: !!videoTrack});}}); // Re-emit any data coming from the coalesce stream to the outside worldpipeline.coalesceStream.on('data', this.trigger.bind(this, 'data'));pipeline.coalesceStream.on('id3Frame', function (id3Frame) {id3Frame.dispatchType = pipeline.metadataStream.dispatchType;self.trigger('id3Frame', id3Frame);});pipeline.coalesceStream.on('caption', this.trigger.bind(this, 'caption')); // Let the consumer know we have finished flushing the entire pipelinepipeline.coalesceStream.on('done', this.trigger.bind(this, 'done'));addPipelineLogRetriggers(this, pipeline);}; // hook up the segment streams once track metadata is deliveredthis.setBaseMediaDecodeTime = function (baseMediaDecodeTime) {var pipeline = this.transmuxPipeline_;if (!options.keepOriginalTimestamps) {this.baseMediaDecodeTime = baseMediaDecodeTime;}if (audioTrack) {audioTrack.timelineStartInfo.dts = undefined;audioTrack.timelineStartInfo.pts = undefined;trackDecodeInfo.clearDtsInfo(audioTrack);if (pipeline.audioTimestampRolloverStream) {pipeline.audioTimestampRolloverStream.discontinuity();}}if (videoTrack) {if (pipeline.videoSegmentStream) {pipeline.videoSegmentStream.gopCache_ = [];}videoTrack.timelineStartInfo.dts = undefined;videoTrack.timelineStartInfo.pts = undefined;trackDecodeInfo.clearDtsInfo(videoTrack);pipeline.captionStream.reset();}if (pipeline.timestampRolloverStream) {pipeline.timestampRolloverStream.discontinuity();}};this.setAudioAppendStart = function (timestamp) {if (audioTrack) {this.transmuxPipeline_.audioSegmentStream.setAudioAppendStart(timestamp);}};this.setRemux = function (val) {var pipeline = this.transmuxPipeline_;options.remux = val;if (pipeline && pipeline.coalesceStream) {pipeline.coalesceStream.setRemux(val);}};this.alignGopsWith = function (gopsToAlignWith) {if (videoTrack && this.transmuxPipeline_.videoSegmentStream) {this.transmuxPipeline_.videoSegmentStream.alignGopsWith(gopsToAlignWith);}};this.getLogTrigger_ = function (key) {var self = this;return function (event) {event.stream = key;self.trigger('log', event);};}; // feed incoming data to the front of the parsing pipelinethis.push = function (data) {if (hasFlushed) {var isAac = isLikelyAacData(data);if (isAac && this.transmuxPipeline_.type !== 'aac') {this.setupAacPipeline();} else if (!isAac && this.transmuxPipeline_.type !== 'ts') {this.setupTsPipeline();}hasFlushed = false;}this.transmuxPipeline_.headOfPipeline.push(data);}; // flush any buffered datathis.flush = function () {hasFlushed = true; // Start at the top of the pipeline and flush all pending workthis.transmuxPipeline_.headOfPipeline.flush();};this.endTimeline = function () {this.transmuxPipeline_.headOfPipeline.endTimeline();};this.reset = function () {if (this.transmuxPipeline_.headOfPipeline) {this.transmuxPipeline_.headOfPipeline.reset();}}; // Caption data has to be reset when seeking outside buffered rangethis.resetCaptions = function () {if (this.transmuxPipeline_.captionStream) {this.transmuxPipeline_.captionStream.reset();}};};Transmuxer.prototype = new Stream();var transmuxer = {Transmuxer: Transmuxer,VideoSegmentStream: VideoSegmentStream,AudioSegmentStream: AudioSegmentStream,AUDIO_PROPERTIES: AUDIO_PROPERTIES,VIDEO_PROPERTIES: VIDEO_PROPERTIES,// exported for testinggenerateSegmentTimingInfo: generateSegmentTimingInfo};/*** mux.js** Copyright (c) Brightcove* Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE*/var toUnsigned$3 = function (value) {return value >>> 0;};var toHexString$1 = function (value) {return ('00' + value.toString(16)).slice(-2);};var bin = {toUnsigned: toUnsigned$3,toHexString: toHexString$1};var parseType$3 = function (buffer) {var result = '';result += String.fromCharCode(buffer[0]);result += String.fromCharCode(buffer[1]);result += String.fromCharCode(buffer[2]);result += String.fromCharCode(buffer[3]);return result;};var parseType_1 = parseType$3;var toUnsigned$2 = bin.toUnsigned;var parseType$2 = parseType_1;var findBox$2 = function (data, path) {var results = [],i,size,type,end,subresults;if (!path.length) {// short-circuit the search for empty pathsreturn null;}for (i = 0; i < data.byteLength;) {size = toUnsigned$2(data[i] << 24 | data[i + 1] << 16 | data[i + 2] << 8 | data[i + 3]);type = parseType$2(data.subarray(i + 4, i + 8));end = size > 1 ? i + size : data.byteLength;if (type === path[0]) {if (path.length === 1) {// this is the end of the path and we've found the box we were// looking forresults.push(data.subarray(i + 8, end));} else {// recursively search for the next box along the pathsubresults = findBox$2(data.subarray(i + 8, end), path.slice(1));if (subresults.length) {results = results.concat(subresults);}}}i = end;} // we've finished searching all of datareturn results;};var findBox_1 = findBox$2;var toUnsigned$1 = bin.toUnsigned;var getUint64$2 = numbers.getUint64;var tfdt = function (data) {var result = {version: data[0],flags: new Uint8Array(data.subarray(1, 4))};if (result.version === 1) {result.baseMediaDecodeTime = getUint64$2(data.subarray(4));} else {result.baseMediaDecodeTime = toUnsigned$1(data[4] << 24 | data[5] << 16 | data[6] << 8 | data[7]);}return result;};var parseTfdt$2 = tfdt;var parseSampleFlags$1 = function (flags) {return {isLeading: (flags[0] & 0x0c) >>> 2,dependsOn: flags[0] & 0x03,isDependedOn: (flags[1] & 0xc0) >>> 6,hasRedundancy: (flags[1] & 0x30) >>> 4,paddingValue: (flags[1] & 0x0e) >>> 1,isNonSyncSample: flags[1] & 0x01,degradationPriority: flags[2] << 8 | flags[3]};};var parseSampleFlags_1 = parseSampleFlags$1;var parseSampleFlags = parseSampleFlags_1;var trun = function (data) {var result = {version: data[0],flags: new Uint8Array(data.subarray(1, 4)),samples: []},view = new DataView(data.buffer, data.byteOffset, data.byteLength),// Flag interpretationdataOffsetPresent = result.flags[2] & 0x01,// compare with 2nd byte of 0x1firstSampleFlagsPresent = result.flags[2] & 0x04,// compare with 2nd byte of 0x4sampleDurationPresent = result.flags[1] & 0x01,// compare with 2nd byte of 0x100sampleSizePresent = result.flags[1] & 0x02,// compare with 2nd byte of 0x200sampleFlagsPresent = result.flags[1] & 0x04,// compare with 2nd byte of 0x400sampleCompositionTimeOffsetPresent = result.flags[1] & 0x08,// compare with 2nd byte of 0x800sampleCount = view.getUint32(4),offset = 8,sample;if (dataOffsetPresent) {// 32 bit signed integerresult.dataOffset = view.getInt32(offset);offset += 4;} // Overrides the flags for the first sample only. The order of// optional values will be: duration, size, compositionTimeOffsetif (firstSampleFlagsPresent && sampleCount) {sample = {flags: parseSampleFlags(data.subarray(offset, offset + 4))};offset += 4;if (sampleDurationPresent) {sample.duration = view.getUint32(offset);offset += 4;}if (sampleSizePresent) {sample.size = view.getUint32(offset);offset += 4;}if (sampleCompositionTimeOffsetPresent) {if (result.version === 1) {sample.compositionTimeOffset = view.getInt32(offset);} else {sample.compositionTimeOffset = view.getUint32(offset);}offset += 4;}result.samples.push(sample);sampleCount--;}while (sampleCount--) {sample = {};if (sampleDurationPresent) {sample.duration = view.getUint32(offset);offset += 4;}if (sampleSizePresent) {sample.size = view.getUint32(offset);offset += 4;}if (sampleFlagsPresent) {sample.flags = parseSampleFlags(data.subarray(offset, offset + 4));offset += 4;}if (sampleCompositionTimeOffsetPresent) {if (result.version === 1) {sample.compositionTimeOffset = view.getInt32(offset);} else {sample.compositionTimeOffset = view.getUint32(offset);}offset += 4;}result.samples.push(sample);}return result;};var parseTrun$2 = trun;var tfhd = function (data) {var view = new DataView(data.buffer, data.byteOffset, data.byteLength),result = {version: data[0],flags: new Uint8Array(data.subarray(1, 4)),trackId: view.getUint32(4)},baseDataOffsetPresent = result.flags[2] & 0x01,sampleDescriptionIndexPresent = result.flags[2] & 0x02,defaultSampleDurationPresent = result.flags[2] & 0x08,defaultSampleSizePresent = result.flags[2] & 0x10,defaultSampleFlagsPresent = result.flags[2] & 0x20,durationIsEmpty = result.flags[0] & 0x010000,defaultBaseIsMoof = result.flags[0] & 0x020000,i;i = 8;if (baseDataOffsetPresent) {i += 4; // truncate top 4 bytes// FIXME: should we read the full 64 bits?result.baseDataOffset = view.getUint32(12);i += 4;}if (sampleDescriptionIndexPresent) {result.sampleDescriptionIndex = view.getUint32(i);i += 4;}if (defaultSampleDurationPresent) {result.defaultSampleDuration = view.getUint32(i);i += 4;}if (defaultSampleSizePresent) {result.defaultSampleSize = view.getUint32(i);i += 4;}if (defaultSampleFlagsPresent) {result.defaultSampleFlags = view.getUint32(i);}if (durationIsEmpty) {result.durationIsEmpty = true;}if (!baseDataOffsetPresent && defaultBaseIsMoof) {result.baseDataOffsetIsMoof = true;}return result;};var parseTfhd$2 = tfhd;var win;if (typeof window !== "undefined") {win = window;} else if (typeof commonjsGlobal !== "undefined") {win = commonjsGlobal;} else if (typeof self !== "undefined") {win = self;} else {win = {};}var window_1 = win;/*** mux.js** Copyright (c) Brightcove* Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE** Reads in-band CEA-708 captions out of FMP4 segments.* @see https://en.wikipedia.org/wiki/CEA-708*/var discardEmulationPreventionBytes = captionPacketParser.discardEmulationPreventionBytes;var CaptionStream = captionStream.CaptionStream;var findBox$1 = findBox_1;var parseTfdt$1 = parseTfdt$2;var parseTrun$1 = parseTrun$2;var parseTfhd$1 = parseTfhd$2;var window$2 = window_1;/*** Maps an offset in the mdat to a sample based on the the size of the samples.* Assumes that `parseSamples` has been called first.** @param {Number} offset - The offset into the mdat* @param {Object[]} samples - An array of samples, parsed using `parseSamples`* @return {?Object} The matching sample, or null if no match was found.** @see ISO-BMFF-12/2015, Section 8.8.8**/var mapToSample = function (offset, samples) {var approximateOffset = offset;for (var i = 0; i < samples.length; i++) {var sample = samples[i];if (approximateOffset < sample.size) {return sample;}approximateOffset -= sample.size;}return null;};/*** Finds SEI nal units contained in a Media Data Box.* Assumes that `parseSamples` has been called first.** @param {Uint8Array} avcStream - The bytes of the mdat* @param {Object[]} samples - The samples parsed out by `parseSamples`* @param {Number} trackId - The trackId of this video track* @return {Object[]} seiNals - the parsed SEI NALUs found.* The contents of the seiNal should match what is expected by* CaptionStream.push (nalUnitType, size, data, escapedRBSP, pts, dts)** @see ISO-BMFF-12/2015, Section 8.1.1* @see Rec. ITU-T H.264, 7.3.2.3.1**/var findSeiNals = function (avcStream, samples, trackId) {var avcView = new DataView(avcStream.buffer, avcStream.byteOffset, avcStream.byteLength),result = {logs: [],seiNals: []},seiNal,i,length,lastMatchedSample;for (i = 0; i + 4 < avcStream.length; i += length) {length = avcView.getUint32(i);i += 4; // Bail if this doesn't appear to be an H264 streamif (length <= 0) {continue;}switch (avcStream[i] & 0x1F) {case 0x06:var data = avcStream.subarray(i + 1, i + 1 + length);var matchingSample = mapToSample(i, samples);seiNal = {nalUnitType: 'sei_rbsp',size: length,data: data,escapedRBSP: discardEmulationPreventionBytes(data),trackId: trackId};if (matchingSample) {seiNal.pts = matchingSample.pts;seiNal.dts = matchingSample.dts;lastMatchedSample = matchingSample;} else if (lastMatchedSample) {// If a matching sample cannot be found, use the last// sample's values as they should be as close as possibleseiNal.pts = lastMatchedSample.pts;seiNal.dts = lastMatchedSample.dts;} else {result.logs.push({level: 'warn',message: 'We\'ve encountered a nal unit without data at ' + i + ' for trackId ' + trackId + '. See mux.js#223.'});break;}result.seiNals.push(seiNal);break;}}return result;};/*** Parses sample information out of Track Run Boxes and calculates* the absolute presentation and decode timestamps of each sample.** @param {Array<Uint8Array>} truns - The Trun Run boxes to be parsed* @param {Number|BigInt} baseMediaDecodeTime - base media decode time from tfdt@see ISO-BMFF-12/2015, Section 8.8.12* @param {Object} tfhd - The parsed Track Fragment Header* @see inspect.parseTfhd* @return {Object[]} the parsed samples** @see ISO-BMFF-12/2015, Section 8.8.8**/var parseSamples = function (truns, baseMediaDecodeTime, tfhd) {var currentDts = baseMediaDecodeTime;var defaultSampleDuration = tfhd.defaultSampleDuration || 0;var defaultSampleSize = tfhd.defaultSampleSize || 0;var trackId = tfhd.trackId;var allSamples = [];truns.forEach(function (trun) {// Note: We currently do not parse the sample table as well// as the trun. It's possible some sources will require this.// moov > trak > mdia > minf > stblvar trackRun = parseTrun$1(trun);var samples = trackRun.samples;samples.forEach(function (sample) {if (sample.duration === undefined) {sample.duration = defaultSampleDuration;}if (sample.size === undefined) {sample.size = defaultSampleSize;}sample.trackId = trackId;sample.dts = currentDts;if (sample.compositionTimeOffset === undefined) {sample.compositionTimeOffset = 0;}if (typeof currentDts === 'bigint') {sample.pts = currentDts + window$2.BigInt(sample.compositionTimeOffset);currentDts += window$2.BigInt(sample.duration);} else {sample.pts = currentDts + sample.compositionTimeOffset;currentDts += sample.duration;}});allSamples = allSamples.concat(samples);});return allSamples;};/*** Parses out caption nals from an FMP4 segment's video tracks.** @param {Uint8Array} segment - The bytes of a single segment* @param {Number} videoTrackId - The trackId of a video track in the segment* @return {Object.<Number, Object[]>} A mapping of video trackId to* a list of seiNals found in that track**/var parseCaptionNals = function (segment, videoTrackId) {// To get the samplesvar trafs = findBox$1(segment, ['moof', 'traf']); // To get SEI NAL unitsvar mdats = findBox$1(segment, ['mdat']);var captionNals = {};var mdatTrafPairs = []; // Pair up each traf with a mdat as moofs and mdats are in pairsmdats.forEach(function (mdat, index) {var matchingTraf = trafs[index];mdatTrafPairs.push({mdat: mdat,traf: matchingTraf});});mdatTrafPairs.forEach(function (pair) {var mdat = pair.mdat;var traf = pair.traf;var tfhd = findBox$1(traf, ['tfhd']); // Exactly 1 tfhd per trafvar headerInfo = parseTfhd$1(tfhd[0]);var trackId = headerInfo.trackId;var tfdt = findBox$1(traf, ['tfdt']); // Either 0 or 1 tfdt per trafvar baseMediaDecodeTime = tfdt.length > 0 ? parseTfdt$1(tfdt[0]).baseMediaDecodeTime : 0;var truns = findBox$1(traf, ['trun']);var samples;var result; // Only parse video data for the chosen video trackif (videoTrackId === trackId && truns.length > 0) {samples = parseSamples(truns, baseMediaDecodeTime, headerInfo);result = findSeiNals(mdat, samples, trackId);if (!captionNals[trackId]) {captionNals[trackId] = {seiNals: [],logs: []};}captionNals[trackId].seiNals = captionNals[trackId].seiNals.concat(result.seiNals);captionNals[trackId].logs = captionNals[trackId].logs.concat(result.logs);}});return captionNals;};/*** Parses out inband captions from an MP4 container and returns* caption objects that can be used by WebVTT and the TextTrack API.* @see https://developer.mozilla.org/en-US/docs/Web/API/VTTCue* @see https://developer.mozilla.org/en-US/docs/Web/API/TextTrack* Assumes that `probe.getVideoTrackIds` and `probe.timescale` have been called first** @param {Uint8Array} segment - The fmp4 segment containing embedded captions* @param {Number} trackId - The id of the video track to parse* @param {Number} timescale - The timescale for the video track from the init segment** @return {?Object[]} parsedCaptions - A list of captions or null if no video tracks* @return {Number} parsedCaptions[].startTime - The time to show the caption in seconds* @return {Number} parsedCaptions[].endTime - The time to stop showing the caption in seconds* @return {Object[]} parsedCaptions[].content - A list of individual caption segments* @return {String} parsedCaptions[].content.text - The visible content of the caption segment* @return {Number} parsedCaptions[].content.line - The line height from 1-15 for positioning of the caption segment* @return {Number} parsedCaptions[].content.position - The column indent percentage for cue positioning from 10-80**/var parseEmbeddedCaptions = function (segment, trackId, timescale) {var captionNals; // the ISO-BMFF spec says that trackId can't be zero, but there's some broken content out thereif (trackId === null) {return null;}captionNals = parseCaptionNals(segment, trackId);var trackNals = captionNals[trackId] || {};return {seiNals: trackNals.seiNals,logs: trackNals.logs,timescale: timescale};};/*** Converts SEI NALUs into captions that can be used by video.js**/var CaptionParser = function () {var isInitialized = false;var captionStream; // Stores segments seen before trackId and timescale are setvar segmentCache; // Stores video track ID of the track being parsedvar trackId; // Stores the timescale of the track being parsedvar timescale; // Stores captions parsed so farvar parsedCaptions; // Stores whether we are receiving partial data or notvar parsingPartial;/*** A method to indicate whether a CaptionParser has been initalized* @returns {Boolean}**/this.isInitialized = function () {return isInitialized;};/*** Initializes the underlying CaptionStream, SEI NAL parsing* and management, and caption collection**/this.init = function (options) {captionStream = new CaptionStream();isInitialized = true;parsingPartial = options ? options.isPartial : false; // Collect dispatched captionscaptionStream.on('data', function (event) {// Convert to seconds in the source's timescaleevent.startTime = event.startPts / timescale;event.endTime = event.endPts / timescale;parsedCaptions.captions.push(event);parsedCaptions.captionStreams[event.stream] = true;});captionStream.on('log', function (log) {parsedCaptions.logs.push(log);});};/*** Determines if a new video track will be selected* or if the timescale changed* @return {Boolean}**/this.isNewInit = function (videoTrackIds, timescales) {if (videoTrackIds && videoTrackIds.length === 0 || timescales && typeof timescales === 'object' && Object.keys(timescales).length === 0) {return false;}return trackId !== videoTrackIds[0] || timescale !== timescales[trackId];};/*** Parses out SEI captions and interacts with underlying* CaptionStream to return dispatched captions** @param {Uint8Array} segment - The fmp4 segment containing embedded captions* @param {Number[]} videoTrackIds - A list of video tracks found in the init segment* @param {Object.<Number, Number>} timescales - The timescales found in the init segment* @see parseEmbeddedCaptions* @see m2ts/caption-stream.js**/this.parse = function (segment, videoTrackIds, timescales) {var parsedData;if (!this.isInitialized()) {return null; // This is not likely to be a video segment} else if (!videoTrackIds || !timescales) {return null;} else if (this.isNewInit(videoTrackIds, timescales)) {// Use the first video track only as there is no// mechanism to switch to other video trackstrackId = videoTrackIds[0];timescale = timescales[trackId]; // If an init segment has not been seen yet, hold onto segment// data until we have one.// the ISO-BMFF spec says that trackId can't be zero, but there's some broken content out there} else if (trackId === null || !timescale) {segmentCache.push(segment);return null;} // Now that a timescale and trackId is set, parse cached segmentswhile (segmentCache.length > 0) {var cachedSegment = segmentCache.shift();this.parse(cachedSegment, videoTrackIds, timescales);}parsedData = parseEmbeddedCaptions(segment, trackId, timescale);if (parsedData && parsedData.logs) {parsedCaptions.logs = parsedCaptions.logs.concat(parsedData.logs);}if (parsedData === null || !parsedData.seiNals) {if (parsedCaptions.logs.length) {return {logs: parsedCaptions.logs,captions: [],captionStreams: []};}return null;}this.pushNals(parsedData.seiNals); // Force the parsed captions to be dispatchedthis.flushStream();return parsedCaptions;};/*** Pushes SEI NALUs onto CaptionStream* @param {Object[]} nals - A list of SEI nals parsed using `parseCaptionNals`* Assumes that `parseCaptionNals` has been called first* @see m2ts/caption-stream.js**/this.pushNals = function (nals) {if (!this.isInitialized() || !nals || nals.length === 0) {return null;}nals.forEach(function (nal) {captionStream.push(nal);});};/*** Flushes underlying CaptionStream to dispatch processed, displayable captions* @see m2ts/caption-stream.js**/this.flushStream = function () {if (!this.isInitialized()) {return null;}if (!parsingPartial) {captionStream.flush();} else {captionStream.partialFlush();}};/*** Reset caption buckets for new data**/this.clearParsedCaptions = function () {parsedCaptions.captions = [];parsedCaptions.captionStreams = {};parsedCaptions.logs = [];};/*** Resets underlying CaptionStream* @see m2ts/caption-stream.js**/this.resetCaptionStream = function () {if (!this.isInitialized()) {return null;}captionStream.reset();};/*** Convenience method to clear all captions flushed from the* CaptionStream and still being parsed* @see m2ts/caption-stream.js**/this.clearAllCaptions = function () {this.clearParsedCaptions();this.resetCaptionStream();};/*** Reset caption parser**/this.reset = function () {segmentCache = [];trackId = null;timescale = null;if (!parsedCaptions) {parsedCaptions = {captions: [],// CC1, CC2, CC3, CC4captionStreams: {},logs: []};} else {this.clearParsedCaptions();}this.resetCaptionStream();};this.reset();};var captionParser = CaptionParser;/*** Returns the first string in the data array ending with a null char '\0'* @param {UInt8} data* @returns the string with the null char*/var uint8ToCString$1 = function (data) {var index = 0;var curChar = String.fromCharCode(data[index]);var retString = '';while (curChar !== '\0') {retString += curChar;index++;curChar = String.fromCharCode(data[index]);} // Add nullCharretString += curChar;return retString;};var string = {uint8ToCString: uint8ToCString$1};var uint8ToCString = string.uint8ToCString;var getUint64$1 = numbers.getUint64;/*** Based on: ISO/IEC 23009 Section: 5.10.3.3* References:* https://dashif-documents.azurewebsites.net/Events/master/event.html#emsg-format* https://aomediacodec.github.io/id3-emsg/** Takes emsg box data as a uint8 array and returns a emsg box object* @param {UInt8Array} boxData data from emsg box* @returns A parsed emsg box object*/var parseEmsgBox = function (boxData) {// version + flagsvar offset = 4;var version = boxData[0];var scheme_id_uri, value, timescale, presentation_time, presentation_time_delta, event_duration, id, message_data;if (version === 0) {scheme_id_uri = uint8ToCString(boxData.subarray(offset));offset += scheme_id_uri.length;value = uint8ToCString(boxData.subarray(offset));offset += value.length;var dv = new DataView(boxData.buffer);timescale = dv.getUint32(offset);offset += 4;presentation_time_delta = dv.getUint32(offset);offset += 4;event_duration = dv.getUint32(offset);offset += 4;id = dv.getUint32(offset);offset += 4;} else if (version === 1) {var dv = new DataView(boxData.buffer);timescale = dv.getUint32(offset);offset += 4;presentation_time = getUint64$1(boxData.subarray(offset));offset += 8;event_duration = dv.getUint32(offset);offset += 4;id = dv.getUint32(offset);offset += 4;scheme_id_uri = uint8ToCString(boxData.subarray(offset));offset += scheme_id_uri.length;value = uint8ToCString(boxData.subarray(offset));offset += value.length;}message_data = new Uint8Array(boxData.subarray(offset, boxData.byteLength));var emsgBox = {scheme_id_uri,value,// if timescale is undefined or 0 set to 1timescale: timescale ? timescale : 1,presentation_time,presentation_time_delta,event_duration,id,message_data};return isValidEmsgBox(version, emsgBox) ? emsgBox : undefined;};/*** Scales a presentation time or time delta with an offset with a provided timescale* @param {number} presentationTime* @param {number} timescale* @param {number} timeDelta* @param {number} offset* @returns the scaled time as a number*/var scaleTime = function (presentationTime, timescale, timeDelta, offset) {return presentationTime || presentationTime === 0 ? presentationTime / timescale : offset + timeDelta / timescale;};/*** Checks the emsg box data for validity based on the version* @param {number} version of the emsg box to validate* @param {Object} emsg the emsg data to validate* @returns if the box is valid as a boolean*/var isValidEmsgBox = function (version, emsg) {var hasScheme = emsg.scheme_id_uri !== '\0';var isValidV0Box = version === 0 && isDefined(emsg.presentation_time_delta) && hasScheme;var isValidV1Box = version === 1 && isDefined(emsg.presentation_time) && hasScheme; // Only valid versions of emsg are 0 and 1return !(version > 1) && isValidV0Box || isValidV1Box;}; // Utility function to check if an object is definedvar isDefined = function (data) {return data !== undefined || data !== null;};var emsg$1 = {parseEmsgBox: parseEmsgBox,scaleTime: scaleTime};/*** mux.js** Copyright (c) Brightcove* Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE** Utilities to detect basic properties and metadata about MP4s.*/var toUnsigned = bin.toUnsigned;var toHexString = bin.toHexString;var findBox = findBox_1;var parseType$1 = parseType_1;var emsg = emsg$1;var parseTfhd = parseTfhd$2;var parseTrun = parseTrun$2;var parseTfdt = parseTfdt$2;var getUint64 = numbers.getUint64;var timescale, startTime, compositionStartTime, getVideoTrackIds, getTracks, getTimescaleFromMediaHeader, getEmsgID3;var window$1 = window_1;var parseId3Frames = parseId3.parseId3Frames;/*** Parses an MP4 initialization segment and extracts the timescale* values for any declared tracks. Timescale values indicate the* number of clock ticks per second to assume for time-based values* elsewhere in the MP4.** To determine the start time of an MP4, you need two pieces of* information: the timescale unit and the earliest base media decode* time. Multiple timescales can be specified within an MP4 but the* base media decode time is always expressed in the timescale from* the media header box for the track:* ```* moov > trak > mdia > mdhd.timescale* ```* @param init {Uint8Array} the bytes of the init segment* @return {object} a hash of track ids to timescale values or null if* the init segment is malformed.*/timescale = function (init) {var result = {},traks = findBox(init, ['moov', 'trak']); // mdhd timescalereturn traks.reduce(function (result, trak) {var tkhd, version, index, id, mdhd;tkhd = findBox(trak, ['tkhd'])[0];if (!tkhd) {return null;}version = tkhd[0];index = version === 0 ? 12 : 20;id = toUnsigned(tkhd[index] << 24 | tkhd[index + 1] << 16 | tkhd[index + 2] << 8 | tkhd[index + 3]);mdhd = findBox(trak, ['mdia', 'mdhd'])[0];if (!mdhd) {return null;}version = mdhd[0];index = version === 0 ? 12 : 20;result[id] = toUnsigned(mdhd[index] << 24 | mdhd[index + 1] << 16 | mdhd[index + 2] << 8 | mdhd[index + 3]);return result;}, result);};/*** Determine the base media decode start time, in seconds, for an MP4* fragment. If multiple fragments are specified, the earliest time is* returned.** The base media decode time can be parsed from track fragment* metadata:* ```* moof > traf > tfdt.baseMediaDecodeTime* ```* It requires the timescale value from the mdhd to interpret.** @param timescale {object} a hash of track ids to timescale values.* @return {number} the earliest base media decode start time for the* fragment, in seconds*/startTime = function (timescale, fragment) {var trafs; // we need info from two childrend of each track fragment boxtrafs = findBox(fragment, ['moof', 'traf']); // determine the start times for each trackvar lowestTime = trafs.reduce(function (acc, traf) {var tfhd = findBox(traf, ['tfhd'])[0]; // get the track id from the tfhdvar id = toUnsigned(tfhd[4] << 24 | tfhd[5] << 16 | tfhd[6] << 8 | tfhd[7]); // assume a 90kHz clock if no timescale was specifiedvar scale = timescale[id] || 90e3; // get the base media decode time from the tfdtvar tfdt = findBox(traf, ['tfdt'])[0];var dv = new DataView(tfdt.buffer, tfdt.byteOffset, tfdt.byteLength);var baseTime; // version 1 is 64 bitif (tfdt[0] === 1) {baseTime = getUint64(tfdt.subarray(4, 12));} else {baseTime = dv.getUint32(4);} // convert base time to seconds if it is a valid number.let seconds;if (typeof baseTime === 'bigint') {seconds = baseTime / window$1.BigInt(scale);} else if (typeof baseTime === 'number' && !isNaN(baseTime)) {seconds = baseTime / scale;}if (seconds < Number.MAX_SAFE_INTEGER) {seconds = Number(seconds);}if (seconds < acc) {acc = seconds;}return acc;}, Infinity);return typeof lowestTime === 'bigint' || isFinite(lowestTime) ? lowestTime : 0;};/*** Determine the composition start, in seconds, for an MP4* fragment.** The composition start time of a fragment can be calculated using the base* media decode time, composition time offset, and timescale, as follows:** compositionStartTime = (baseMediaDecodeTime + compositionTimeOffset) / timescale** All of the aforementioned information is contained within a media fragment's* `traf` box, except for timescale info, which comes from the initialization* segment, so a track id (also contained within a `traf`) is also necessary to* associate it with a timescale*** @param timescales {object} - a hash of track ids to timescale values.* @param fragment {Unit8Array} - the bytes of a media segment* @return {number} the composition start time for the fragment, in seconds**/compositionStartTime = function (timescales, fragment) {var trafBoxes = findBox(fragment, ['moof', 'traf']);var baseMediaDecodeTime = 0;var compositionTimeOffset = 0;var trackId;if (trafBoxes && trafBoxes.length) {// The spec states that track run samples contained within a `traf` box are contiguous, but// it does not explicitly state whether the `traf` boxes themselves are contiguous.// We will assume that they are, so we only need the first to calculate start time.var tfhd = findBox(trafBoxes[0], ['tfhd'])[0];var trun = findBox(trafBoxes[0], ['trun'])[0];var tfdt = findBox(trafBoxes[0], ['tfdt'])[0];if (tfhd) {var parsedTfhd = parseTfhd(tfhd);trackId = parsedTfhd.trackId;}if (tfdt) {var parsedTfdt = parseTfdt(tfdt);baseMediaDecodeTime = parsedTfdt.baseMediaDecodeTime;}if (trun) {var parsedTrun = parseTrun(trun);if (parsedTrun.samples && parsedTrun.samples.length) {compositionTimeOffset = parsedTrun.samples[0].compositionTimeOffset || 0;}}} // Get timescale for this specific track. Assume a 90kHz clock if no timescale was// specified.var timescale = timescales[trackId] || 90e3; // return the composition start time, in secondsif (typeof baseMediaDecodeTime === 'bigint') {compositionTimeOffset = window$1.BigInt(compositionTimeOffset);timescale = window$1.BigInt(timescale);}var result = (baseMediaDecodeTime + compositionTimeOffset) / timescale;if (typeof result === 'bigint' && result < Number.MAX_SAFE_INTEGER) {result = Number(result);}return result;};/*** Find the trackIds of the video tracks in this source.* Found by parsing the Handler Reference and Track Header Boxes:* moov > trak > mdia > hdlr* moov > trak > tkhd** @param {Uint8Array} init - The bytes of the init segment for this source* @return {Number[]} A list of trackIds** @see ISO-BMFF-12/2015, Section 8.4.3**/getVideoTrackIds = function (init) {var traks = findBox(init, ['moov', 'trak']);var videoTrackIds = [];traks.forEach(function (trak) {var hdlrs = findBox(trak, ['mdia', 'hdlr']);var tkhds = findBox(trak, ['tkhd']);hdlrs.forEach(function (hdlr, index) {var handlerType = parseType$1(hdlr.subarray(8, 12));var tkhd = tkhds[index];var view;var version;var trackId;if (handlerType === 'vide') {view = new DataView(tkhd.buffer, tkhd.byteOffset, tkhd.byteLength);version = view.getUint8(0);trackId = version === 0 ? view.getUint32(12) : view.getUint32(20);videoTrackIds.push(trackId);}});});return videoTrackIds;};getTimescaleFromMediaHeader = function (mdhd) {// mdhd is a FullBox, meaning it will have its own version as the first bytevar version = mdhd[0];var index = version === 0 ? 12 : 20;return toUnsigned(mdhd[index] << 24 | mdhd[index + 1] << 16 | mdhd[index + 2] << 8 | mdhd[index + 3]);};/*** Get all the video, audio, and hint tracks from a non fragmented* mp4 segment*/getTracks = function (init) {var traks = findBox(init, ['moov', 'trak']);var tracks = [];traks.forEach(function (trak) {var track = {};var tkhd = findBox(trak, ['tkhd'])[0];var view, tkhdVersion; // idif (tkhd) {view = new DataView(tkhd.buffer, tkhd.byteOffset, tkhd.byteLength);tkhdVersion = view.getUint8(0);track.id = tkhdVersion === 0 ? view.getUint32(12) : view.getUint32(20);}var hdlr = findBox(trak, ['mdia', 'hdlr'])[0]; // typeif (hdlr) {var type = parseType$1(hdlr.subarray(8, 12));if (type === 'vide') {track.type = 'video';} else if (type === 'soun') {track.type = 'audio';} else {track.type = type;}} // codecvar stsd = findBox(trak, ['mdia', 'minf', 'stbl', 'stsd'])[0];if (stsd) {var sampleDescriptions = stsd.subarray(8); // gives the codec type stringtrack.codec = parseType$1(sampleDescriptions.subarray(4, 8));var codecBox = findBox(sampleDescriptions, [track.codec])[0];var codecConfig, codecConfigType;if (codecBox) {// https://tools.ietf.org/html/rfc6381#section-3.3if (/^[asm]vc[1-9]$/i.test(track.codec)) {// we don't need anything but the "config" parameter of the// avc1 codecBoxcodecConfig = codecBox.subarray(78);codecConfigType = parseType$1(codecConfig.subarray(4, 8));if (codecConfigType === 'avcC' && codecConfig.length > 11) {track.codec += '.'; // left padded with zeroes for single digit hex// profile idctrack.codec += toHexString(codecConfig[9]); // the byte containing the constraint_set flagstrack.codec += toHexString(codecConfig[10]); // level idctrack.codec += toHexString(codecConfig[11]);} else {// TODO: show a warning that we couldn't parse the codec// and are using the defaulttrack.codec = 'avc1.4d400d';}} else if (/^mp4[a,v]$/i.test(track.codec)) {// we do not need anything but the streamDescriptor of the mp4a codecBoxcodecConfig = codecBox.subarray(28);codecConfigType = parseType$1(codecConfig.subarray(4, 8));if (codecConfigType === 'esds' && codecConfig.length > 20 && codecConfig[19] !== 0) {track.codec += '.' + toHexString(codecConfig[19]); // this value is only a single digittrack.codec += '.' + toHexString(codecConfig[20] >>> 2 & 0x3f).replace(/^0/, '');} else {// TODO: show a warning that we couldn't parse the codec// and are using the defaulttrack.codec = 'mp4a.40.2';}} else {// flac, opus, etctrack.codec = track.codec.toLowerCase();}}}var mdhd = findBox(trak, ['mdia', 'mdhd'])[0];if (mdhd) {track.timescale = getTimescaleFromMediaHeader(mdhd);}tracks.push(track);});return tracks;};/*** Returns an array of emsg ID3 data from the provided segmentData.* An offset can also be provided as the Latest Arrival Time to calculate* the Event Start Time of v0 EMSG boxes.* See: https://dashif-documents.azurewebsites.net/Events/master/event.html#Inband-event-timing** @param {Uint8Array} segmentData the segment byte array.* @param {number} offset the segment start time or Latest Arrival Time,* @return {Object[]} an array of ID3 parsed from EMSG boxes*/getEmsgID3 = function (segmentData, offset = 0) {var emsgBoxes = findBox(segmentData, ['emsg']);return emsgBoxes.map(data => {var parsedBox = emsg.parseEmsgBox(new Uint8Array(data));var parsedId3Frames = parseId3Frames(parsedBox.message_data);return {cueTime: emsg.scaleTime(parsedBox.presentation_time, parsedBox.timescale, parsedBox.presentation_time_delta, offset),duration: emsg.scaleTime(parsedBox.event_duration, parsedBox.timescale),frames: parsedId3Frames};});};var probe$2 = {// export mp4 inspector's findBox and parseType for backwards compatibilityfindBox: findBox,parseType: parseType$1,timescale: timescale,startTime: startTime,compositionStartTime: compositionStartTime,videoTrackIds: getVideoTrackIds,tracks: getTracks,getTimescaleFromMediaHeader: getTimescaleFromMediaHeader,getEmsgID3: getEmsgID3};/*** mux.js** Copyright (c) Brightcove* Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE** Utilities to detect basic properties and metadata about TS Segments.*/var StreamTypes$1 = streamTypes;var parsePid = function (packet) {var pid = packet[1] & 0x1f;pid <<= 8;pid |= packet[2];return pid;};var parsePayloadUnitStartIndicator = function (packet) {return !!(packet[1] & 0x40);};var parseAdaptionField = function (packet) {var offset = 0; // if an adaption field is present, its length is specified by the// fifth byte of the TS packet header. The adaptation field is// used to add stuffing to PES packets that don't fill a complete// TS packet, and to specify some forms of timing and control data// that we do not currently use.if ((packet[3] & 0x30) >>> 4 > 0x01) {offset += packet[4] + 1;}return offset;};var parseType = function (packet, pmtPid) {var pid = parsePid(packet);if (pid === 0) {return 'pat';} else if (pid === pmtPid) {return 'pmt';} else if (pmtPid) {return 'pes';}return null;};var parsePat = function (packet) {var pusi = parsePayloadUnitStartIndicator(packet);var offset = 4 + parseAdaptionField(packet);if (pusi) {offset += packet[offset] + 1;}return (packet[offset + 10] & 0x1f) << 8 | packet[offset + 11];};var parsePmt = function (packet) {var programMapTable = {};var pusi = parsePayloadUnitStartIndicator(packet);var payloadOffset = 4 + parseAdaptionField(packet);if (pusi) {payloadOffset += packet[payloadOffset] + 1;} // PMTs can be sent ahead of the time when they should actually// take effect. We don't believe this should ever be the case// for HLS but we'll ignore "forward" PMT declarations if we see// them. Future PMT declarations have the current_next_indicator// set to zero.if (!(packet[payloadOffset + 5] & 0x01)) {return;}var sectionLength, tableEnd, programInfoLength; // the mapping table ends at the end of the current sectionsectionLength = (packet[payloadOffset + 1] & 0x0f) << 8 | packet[payloadOffset + 2];tableEnd = 3 + sectionLength - 4; // to determine where the table is, we have to figure out how// long the program info descriptors areprogramInfoLength = (packet[payloadOffset + 10] & 0x0f) << 8 | packet[payloadOffset + 11]; // advance the offset to the first entry in the mapping tablevar offset = 12 + programInfoLength;while (offset < tableEnd) {var i = payloadOffset + offset; // add an entry that maps the elementary_pid to the stream_typeprogramMapTable[(packet[i + 1] & 0x1F) << 8 | packet[i + 2]] = packet[i]; // move to the next table entry// skip past the elementary stream descriptors, if presentoffset += ((packet[i + 3] & 0x0F) << 8 | packet[i + 4]) + 5;}return programMapTable;};var parsePesType = function (packet, programMapTable) {var pid = parsePid(packet);var type = programMapTable[pid];switch (type) {case StreamTypes$1.H264_STREAM_TYPE:return 'video';case StreamTypes$1.ADTS_STREAM_TYPE:return 'audio';case StreamTypes$1.METADATA_STREAM_TYPE:return 'timed-metadata';default:return null;}};var parsePesTime = function (packet) {var pusi = parsePayloadUnitStartIndicator(packet);if (!pusi) {return null;}var offset = 4 + parseAdaptionField(packet);if (offset >= packet.byteLength) {// From the H 222.0 MPEG-TS spec// "For transport stream packets carrying PES packets, stuffing is needed when there// is insufficient PES packet data to completely fill the transport stream packet// payload bytes. Stuffing is accomplished by defining an adaptation field longer than// the sum of the lengths of the data elements in it, so that the payload bytes// remaining after the adaptation field exactly accommodates the available PES packet// data."//// If the offset is >= the length of the packet, then the packet contains no data// and instead is just adaption field stuffing bytesreturn null;}var pes = null;var ptsDtsFlags; // PES packets may be annotated with a PTS value, or a PTS value// and a DTS value. Determine what combination of values is// available to work with.ptsDtsFlags = packet[offset + 7]; // PTS and DTS are normally stored as a 33-bit number. Javascript// performs all bitwise operations on 32-bit integers but javascript// supports a much greater range (52-bits) of integer using standard// mathematical operations.// We construct a 31-bit value using bitwise operators over the 31// most significant bits and then multiply by 4 (equal to a left-shift// of 2) before we add the final 2 least significant bits of the// timestamp (equal to an OR.)if (ptsDtsFlags & 0xC0) {pes = {}; // the PTS and DTS are not written out directly. For information// on how they are encoded, see// http://dvd.sourceforge.net/dvdinfo/pes-hdr.htmlpes.pts = (packet[offset + 9] & 0x0E) << 27 | (packet[offset + 10] & 0xFF) << 20 | (packet[offset + 11] & 0xFE) << 12 | (packet[offset + 12] & 0xFF) << 5 | (packet[offset + 13] & 0xFE) >>> 3;pes.pts *= 4; // Left shift by 2pes.pts += (packet[offset + 13] & 0x06) >>> 1; // OR by the two LSBspes.dts = pes.pts;if (ptsDtsFlags & 0x40) {pes.dts = (packet[offset + 14] & 0x0E) << 27 | (packet[offset + 15] & 0xFF) << 20 | (packet[offset + 16] & 0xFE) << 12 | (packet[offset + 17] & 0xFF) << 5 | (packet[offset + 18] & 0xFE) >>> 3;pes.dts *= 4; // Left shift by 2pes.dts += (packet[offset + 18] & 0x06) >>> 1; // OR by the two LSBs}}return pes;};var parseNalUnitType = function (type) {switch (type) {case 0x05:return 'slice_layer_without_partitioning_rbsp_idr';case 0x06:return 'sei_rbsp';case 0x07:return 'seq_parameter_set_rbsp';case 0x08:return 'pic_parameter_set_rbsp';case 0x09:return 'access_unit_delimiter_rbsp';default:return null;}};var videoPacketContainsKeyFrame = function (packet) {var offset = 4 + parseAdaptionField(packet);var frameBuffer = packet.subarray(offset);var frameI = 0;var frameSyncPoint = 0;var foundKeyFrame = false;var nalType; // advance the sync point to a NAL start, if necessaryfor (; frameSyncPoint < frameBuffer.byteLength - 3; frameSyncPoint++) {if (frameBuffer[frameSyncPoint + 2] === 1) {// the sync point is properly alignedframeI = frameSyncPoint + 5;break;}}while (frameI < frameBuffer.byteLength) {// look at the current byte to determine if we've hit the end of// a NAL unit boundaryswitch (frameBuffer[frameI]) {case 0:// skip past non-sync sequencesif (frameBuffer[frameI - 1] !== 0) {frameI += 2;break;} else if (frameBuffer[frameI - 2] !== 0) {frameI++;break;}if (frameSyncPoint + 3 !== frameI - 2) {nalType = parseNalUnitType(frameBuffer[frameSyncPoint + 3] & 0x1f);if (nalType === 'slice_layer_without_partitioning_rbsp_idr') {foundKeyFrame = true;}} // drop trailing zeroesdo {frameI++;} while (frameBuffer[frameI] !== 1 && frameI < frameBuffer.length);frameSyncPoint = frameI - 2;frameI += 3;break;case 1:// skip past non-sync sequencesif (frameBuffer[frameI - 1] !== 0 || frameBuffer[frameI - 2] !== 0) {frameI += 3;break;}nalType = parseNalUnitType(frameBuffer[frameSyncPoint + 3] & 0x1f);if (nalType === 'slice_layer_without_partitioning_rbsp_idr') {foundKeyFrame = true;}frameSyncPoint = frameI - 2;frameI += 3;break;default:// the current byte isn't a one or zero, so it cannot be part// of a sync sequenceframeI += 3;break;}}frameBuffer = frameBuffer.subarray(frameSyncPoint);frameI -= frameSyncPoint;frameSyncPoint = 0; // parse the final nalif (frameBuffer && frameBuffer.byteLength > 3) {nalType = parseNalUnitType(frameBuffer[frameSyncPoint + 3] & 0x1f);if (nalType === 'slice_layer_without_partitioning_rbsp_idr') {foundKeyFrame = true;}}return foundKeyFrame;};var probe$1 = {parseType: parseType,parsePat: parsePat,parsePmt: parsePmt,parsePayloadUnitStartIndicator: parsePayloadUnitStartIndicator,parsePesType: parsePesType,parsePesTime: parsePesTime,videoPacketContainsKeyFrame: videoPacketContainsKeyFrame};/*** mux.js** Copyright (c) Brightcove* Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE** Parse mpeg2 transport stream packets to extract basic timing information*/var StreamTypes = streamTypes;var handleRollover = timestampRolloverStream.handleRollover;var probe = {};probe.ts = probe$1;probe.aac = utils;var ONE_SECOND_IN_TS = clock$2.ONE_SECOND_IN_TS;var MP2T_PACKET_LENGTH = 188,// bytesSYNC_BYTE = 0x47;/*** walks through segment data looking for pat and pmt packets to parse out* program map table information*/var parsePsi_ = function (bytes, pmt) {var startIndex = 0,endIndex = MP2T_PACKET_LENGTH,packet,type;while (endIndex < bytes.byteLength) {// Look for a pair of start and end sync bytes in the data..if (bytes[startIndex] === SYNC_BYTE && bytes[endIndex] === SYNC_BYTE) {// We found a packetpacket = bytes.subarray(startIndex, endIndex);type = probe.ts.parseType(packet, pmt.pid);switch (type) {case 'pat':pmt.pid = probe.ts.parsePat(packet);break;case 'pmt':var table = probe.ts.parsePmt(packet);pmt.table = pmt.table || {};Object.keys(table).forEach(function (key) {pmt.table[key] = table[key];});break;}startIndex += MP2T_PACKET_LENGTH;endIndex += MP2T_PACKET_LENGTH;continue;} // If we get here, we have somehow become de-synchronized and we need to step// forward one byte at a time until we find a pair of sync bytes that denote// a packetstartIndex++;endIndex++;}};/*** walks through the segment data from the start and end to get timing information* for the first and last audio pes packets*/var parseAudioPes_ = function (bytes, pmt, result) {var startIndex = 0,endIndex = MP2T_PACKET_LENGTH,packet,type,pesType,pusi,parsed;var endLoop = false; // Start walking from start of segment to get first audio packetwhile (endIndex <= bytes.byteLength) {// Look for a pair of start and end sync bytes in the data..if (bytes[startIndex] === SYNC_BYTE && (bytes[endIndex] === SYNC_BYTE || endIndex === bytes.byteLength)) {// We found a packetpacket = bytes.subarray(startIndex, endIndex);type = probe.ts.parseType(packet, pmt.pid);switch (type) {case 'pes':pesType = probe.ts.parsePesType(packet, pmt.table);pusi = probe.ts.parsePayloadUnitStartIndicator(packet);if (pesType === 'audio' && pusi) {parsed = probe.ts.parsePesTime(packet);if (parsed) {parsed.type = 'audio';result.audio.push(parsed);endLoop = true;}}break;}if (endLoop) {break;}startIndex += MP2T_PACKET_LENGTH;endIndex += MP2T_PACKET_LENGTH;continue;} // If we get here, we have somehow become de-synchronized and we need to step// forward one byte at a time until we find a pair of sync bytes that denote// a packetstartIndex++;endIndex++;} // Start walking from end of segment to get last audio packetendIndex = bytes.byteLength;startIndex = endIndex - MP2T_PACKET_LENGTH;endLoop = false;while (startIndex >= 0) {// Look for a pair of start and end sync bytes in the data..if (bytes[startIndex] === SYNC_BYTE && (bytes[endIndex] === SYNC_BYTE || endIndex === bytes.byteLength)) {// We found a packetpacket = bytes.subarray(startIndex, endIndex);type = probe.ts.parseType(packet, pmt.pid);switch (type) {case 'pes':pesType = probe.ts.parsePesType(packet, pmt.table);pusi = probe.ts.parsePayloadUnitStartIndicator(packet);if (pesType === 'audio' && pusi) {parsed = probe.ts.parsePesTime(packet);if (parsed) {parsed.type = 'audio';result.audio.push(parsed);endLoop = true;}}break;}if (endLoop) {break;}startIndex -= MP2T_PACKET_LENGTH;endIndex -= MP2T_PACKET_LENGTH;continue;} // If we get here, we have somehow become de-synchronized and we need to step// forward one byte at a time until we find a pair of sync bytes that denote// a packetstartIndex--;endIndex--;}};/*** walks through the segment data from the start and end to get timing information* for the first and last video pes packets as well as timing information for the first* key frame.*/var parseVideoPes_ = function (bytes, pmt, result) {var startIndex = 0,endIndex = MP2T_PACKET_LENGTH,packet,type,pesType,pusi,parsed,frame,i,pes;var endLoop = false;var currentFrame = {data: [],size: 0}; // Start walking from start of segment to get first video packetwhile (endIndex < bytes.byteLength) {// Look for a pair of start and end sync bytes in the data..if (bytes[startIndex] === SYNC_BYTE && bytes[endIndex] === SYNC_BYTE) {// We found a packetpacket = bytes.subarray(startIndex, endIndex);type = probe.ts.parseType(packet, pmt.pid);switch (type) {case 'pes':pesType = probe.ts.parsePesType(packet, pmt.table);pusi = probe.ts.parsePayloadUnitStartIndicator(packet);if (pesType === 'video') {if (pusi && !endLoop) {parsed = probe.ts.parsePesTime(packet);if (parsed) {parsed.type = 'video';result.video.push(parsed);endLoop = true;}}if (!result.firstKeyFrame) {if (pusi) {if (currentFrame.size !== 0) {frame = new Uint8Array(currentFrame.size);i = 0;while (currentFrame.data.length) {pes = currentFrame.data.shift();frame.set(pes, i);i += pes.byteLength;}if (probe.ts.videoPacketContainsKeyFrame(frame)) {var firstKeyFrame = probe.ts.parsePesTime(frame); // PTS/DTS may not be available. Simply *not* setting// the keyframe seems to work fine with HLS playback// and definitely preferable to a crash with TypeError...if (firstKeyFrame) {result.firstKeyFrame = firstKeyFrame;result.firstKeyFrame.type = 'video';} else {// eslint-disable-next-lineconsole.warn('Failed to extract PTS/DTS from PES at first keyframe. ' + 'This could be an unusual TS segment, or else mux.js did not ' + 'parse your TS segment correctly. If you know your TS ' + 'segments do contain PTS/DTS on keyframes please file a bug ' + 'report! You can try ffprobe to double check for yourself.');}}currentFrame.size = 0;}}currentFrame.data.push(packet);currentFrame.size += packet.byteLength;}}break;}if (endLoop && result.firstKeyFrame) {break;}startIndex += MP2T_PACKET_LENGTH;endIndex += MP2T_PACKET_LENGTH;continue;} // If we get here, we have somehow become de-synchronized and we need to step// forward one byte at a time until we find a pair of sync bytes that denote// a packetstartIndex++;endIndex++;} // Start walking from end of segment to get last video packetendIndex = bytes.byteLength;startIndex = endIndex - MP2T_PACKET_LENGTH;endLoop = false;while (startIndex >= 0) {// Look for a pair of start and end sync bytes in the data..if (bytes[startIndex] === SYNC_BYTE && bytes[endIndex] === SYNC_BYTE) {// We found a packetpacket = bytes.subarray(startIndex, endIndex);type = probe.ts.parseType(packet, pmt.pid);switch (type) {case 'pes':pesType = probe.ts.parsePesType(packet, pmt.table);pusi = probe.ts.parsePayloadUnitStartIndicator(packet);if (pesType === 'video' && pusi) {parsed = probe.ts.parsePesTime(packet);if (parsed) {parsed.type = 'video';result.video.push(parsed);endLoop = true;}}break;}if (endLoop) {break;}startIndex -= MP2T_PACKET_LENGTH;endIndex -= MP2T_PACKET_LENGTH;continue;} // If we get here, we have somehow become de-synchronized and we need to step// forward one byte at a time until we find a pair of sync bytes that denote// a packetstartIndex--;endIndex--;}};/*** Adjusts the timestamp information for the segment to account for* rollover and convert to seconds based on pes packet timescale (90khz clock)*/var adjustTimestamp_ = function (segmentInfo, baseTimestamp) {if (segmentInfo.audio && segmentInfo.audio.length) {var audioBaseTimestamp = baseTimestamp;if (typeof audioBaseTimestamp === 'undefined' || isNaN(audioBaseTimestamp)) {audioBaseTimestamp = segmentInfo.audio[0].dts;}segmentInfo.audio.forEach(function (info) {info.dts = handleRollover(info.dts, audioBaseTimestamp);info.pts = handleRollover(info.pts, audioBaseTimestamp); // time in secondsinfo.dtsTime = info.dts / ONE_SECOND_IN_TS;info.ptsTime = info.pts / ONE_SECOND_IN_TS;});}if (segmentInfo.video && segmentInfo.video.length) {var videoBaseTimestamp = baseTimestamp;if (typeof videoBaseTimestamp === 'undefined' || isNaN(videoBaseTimestamp)) {videoBaseTimestamp = segmentInfo.video[0].dts;}segmentInfo.video.forEach(function (info) {info.dts = handleRollover(info.dts, videoBaseTimestamp);info.pts = handleRollover(info.pts, videoBaseTimestamp); // time in secondsinfo.dtsTime = info.dts / ONE_SECOND_IN_TS;info.ptsTime = info.pts / ONE_SECOND_IN_TS;});if (segmentInfo.firstKeyFrame) {var frame = segmentInfo.firstKeyFrame;frame.dts = handleRollover(frame.dts, videoBaseTimestamp);frame.pts = handleRollover(frame.pts, videoBaseTimestamp); // time in secondsframe.dtsTime = frame.dts / ONE_SECOND_IN_TS;frame.ptsTime = frame.pts / ONE_SECOND_IN_TS;}}};/*** inspects the aac data stream for start and end time information*/var inspectAac_ = function (bytes) {var endLoop = false,audioCount = 0,sampleRate = null,timestamp = null,frameSize = 0,byteIndex = 0,packet;while (bytes.length - byteIndex >= 3) {var type = probe.aac.parseType(bytes, byteIndex);switch (type) {case 'timed-metadata':// Exit early because we don't have enough to parse// the ID3 tag headerif (bytes.length - byteIndex < 10) {endLoop = true;break;}frameSize = probe.aac.parseId3TagSize(bytes, byteIndex); // Exit early if we don't have enough in the buffer// to emit a full packetif (frameSize > bytes.length) {endLoop = true;break;}if (timestamp === null) {packet = bytes.subarray(byteIndex, byteIndex + frameSize);timestamp = probe.aac.parseAacTimestamp(packet);}byteIndex += frameSize;break;case 'audio':// Exit early because we don't have enough to parse// the ADTS frame headerif (bytes.length - byteIndex < 7) {endLoop = true;break;}frameSize = probe.aac.parseAdtsSize(bytes, byteIndex); // Exit early if we don't have enough in the buffer// to emit a full packetif (frameSize > bytes.length) {endLoop = true;break;}if (sampleRate === null) {packet = bytes.subarray(byteIndex, byteIndex + frameSize);sampleRate = probe.aac.parseSampleRate(packet);}audioCount++;byteIndex += frameSize;break;default:byteIndex++;break;}if (endLoop) {return null;}}if (sampleRate === null || timestamp === null) {return null;}var audioTimescale = ONE_SECOND_IN_TS / sampleRate;var result = {audio: [{type: 'audio',dts: timestamp,pts: timestamp}, {type: 'audio',dts: timestamp + audioCount * 1024 * audioTimescale,pts: timestamp + audioCount * 1024 * audioTimescale}]};return result;};/*** inspects the transport stream segment data for start and end time information* of the audio and video tracks (when present) as well as the first key frame's* start time.*/var inspectTs_ = function (bytes) {var pmt = {pid: null,table: null};var result = {};parsePsi_(bytes, pmt);for (var pid in pmt.table) {if (pmt.table.hasOwnProperty(pid)) {var type = pmt.table[pid];switch (type) {case StreamTypes.H264_STREAM_TYPE:result.video = [];parseVideoPes_(bytes, pmt, result);if (result.video.length === 0) {delete result.video;}break;case StreamTypes.ADTS_STREAM_TYPE:result.audio = [];parseAudioPes_(bytes, pmt, result);if (result.audio.length === 0) {delete result.audio;}break;}}}return result;};/*** Inspects segment byte data and returns an object with start and end timing information** @param {Uint8Array} bytes The segment byte data* @param {Number} baseTimestamp Relative reference timestamp used when adjusting frame* timestamps for rollover. This value must be in 90khz clock.* @return {Object} Object containing start and end frame timing info of segment.*/var inspect = function (bytes, baseTimestamp) {var isAacData = probe.aac.isLikelyAacData(bytes);var result;if (isAacData) {result = inspectAac_(bytes);} else {result = inspectTs_(bytes);}if (!result || !result.audio && !result.video) {return null;}adjustTimestamp_(result, baseTimestamp);return result;};var tsInspector = {inspect: inspect,parseAudioPes_: parseAudioPes_};/* global self *//*** Re-emits transmuxer events by converting them into messages to the* world outside the worker.** @param {Object} transmuxer the transmuxer to wire events on* @private*/const wireTransmuxerEvents = function (self, transmuxer) {transmuxer.on('data', function (segment) {// transfer ownership of the underlying ArrayBuffer// instead of doing a copy to save memory// ArrayBuffers are transferable but generic TypedArrays are not// @link https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API/Using_web_workers#Passing_data_by_transferring_ownership_(transferable_objects)const initArray = segment.initSegment;segment.initSegment = {data: initArray.buffer,byteOffset: initArray.byteOffset,byteLength: initArray.byteLength};const typedArray = segment.data;segment.data = typedArray.buffer;self.postMessage({action: 'data',segment,byteOffset: typedArray.byteOffset,byteLength: typedArray.byteLength}, [segment.data]);});transmuxer.on('done', function (data) {self.postMessage({action: 'done'});});transmuxer.on('gopInfo', function (gopInfo) {self.postMessage({action: 'gopInfo',gopInfo});});transmuxer.on('videoSegmentTimingInfo', function (timingInfo) {const videoSegmentTimingInfo = {start: {decode: clock$2.videoTsToSeconds(timingInfo.start.dts),presentation: clock$2.videoTsToSeconds(timingInfo.start.pts)},end: {decode: clock$2.videoTsToSeconds(timingInfo.end.dts),presentation: clock$2.videoTsToSeconds(timingInfo.end.pts)},baseMediaDecodeTime: clock$2.videoTsToSeconds(timingInfo.baseMediaDecodeTime)};if (timingInfo.prependedContentDuration) {videoSegmentTimingInfo.prependedContentDuration = clock$2.videoTsToSeconds(timingInfo.prependedContentDuration);}self.postMessage({action: 'videoSegmentTimingInfo',videoSegmentTimingInfo});});transmuxer.on('audioSegmentTimingInfo', function (timingInfo) {// Note that all times for [audio/video]SegmentTimingInfo events are in video clockconst audioSegmentTimingInfo = {start: {decode: clock$2.videoTsToSeconds(timingInfo.start.dts),presentation: clock$2.videoTsToSeconds(timingInfo.start.pts)},end: {decode: clock$2.videoTsToSeconds(timingInfo.end.dts),presentation: clock$2.videoTsToSeconds(timingInfo.end.pts)},baseMediaDecodeTime: clock$2.videoTsToSeconds(timingInfo.baseMediaDecodeTime)};if (timingInfo.prependedContentDuration) {audioSegmentTimingInfo.prependedContentDuration = clock$2.videoTsToSeconds(timingInfo.prependedContentDuration);}self.postMessage({action: 'audioSegmentTimingInfo',audioSegmentTimingInfo});});transmuxer.on('id3Frame', function (id3Frame) {self.postMessage({action: 'id3Frame',id3Frame});});transmuxer.on('caption', function (caption) {self.postMessage({action: 'caption',caption});});transmuxer.on('trackinfo', function (trackInfo) {self.postMessage({action: 'trackinfo',trackInfo});});transmuxer.on('audioTimingInfo', function (audioTimingInfo) {// convert to video TS since we prioritize video time over audioself.postMessage({action: 'audioTimingInfo',audioTimingInfo: {start: clock$2.videoTsToSeconds(audioTimingInfo.start),end: clock$2.videoTsToSeconds(audioTimingInfo.end)}});});transmuxer.on('videoTimingInfo', function (videoTimingInfo) {self.postMessage({action: 'videoTimingInfo',videoTimingInfo: {start: clock$2.videoTsToSeconds(videoTimingInfo.start),end: clock$2.videoTsToSeconds(videoTimingInfo.end)}});});transmuxer.on('log', function (log) {self.postMessage({action: 'log',log});});};/*** All incoming messages route through this hash. If no function exists* to handle an incoming message, then we ignore the message.** @class MessageHandlers* @param {Object} options the options to initialize with*/class MessageHandlers {constructor(self, options) {this.options = options || {};this.self = self;this.init();}/*** initialize our web worker and wire all the events.*/init() {if (this.transmuxer) {this.transmuxer.dispose();}this.transmuxer = new transmuxer.Transmuxer(this.options);wireTransmuxerEvents(this.self, this.transmuxer);}pushMp4Captions(data) {if (!this.captionParser) {this.captionParser = new captionParser();this.captionParser.init();}const segment = new Uint8Array(data.data, data.byteOffset, data.byteLength);const parsed = this.captionParser.parse(segment, data.trackIds, data.timescales);this.self.postMessage({action: 'mp4Captions',captions: parsed && parsed.captions || [],logs: parsed && parsed.logs || [],data: segment.buffer}, [segment.buffer]);}probeMp4StartTime({timescales,data}) {const startTime = probe$2.startTime(timescales, data);this.self.postMessage({action: 'probeMp4StartTime',startTime,data}, [data.buffer]);}probeMp4Tracks({data}) {const tracks = probe$2.tracks(data);this.self.postMessage({action: 'probeMp4Tracks',tracks,data}, [data.buffer]);}/*** Probes an mp4 segment for EMSG boxes containing ID3 data.* https://aomediacodec.github.io/id3-emsg/** @param {Uint8Array} data segment data* @param {number} offset segment start time* @return {Object[]} an array of ID3 frames*/probeEmsgID3({data,offset}) {const id3Frames = probe$2.getEmsgID3(data, offset);this.self.postMessage({action: 'probeEmsgID3',id3Frames,emsgData: data}, [data.buffer]);}/*** Probe an mpeg2-ts segment to determine the start time of the segment in it's* internal "media time," as well as whether it contains video and/or audio.** @private* @param {Uint8Array} bytes - segment bytes* @param {number} baseStartTime* Relative reference timestamp used when adjusting frame timestamps for rollover.* This value should be in seconds, as it's converted to a 90khz clock within the* function body.* @return {Object} The start time of the current segment in "media time" as well as* whether it contains video and/or audio*/probeTs({data,baseStartTime}) {const tsStartTime = typeof baseStartTime === 'number' && !isNaN(baseStartTime) ? baseStartTime * clock$2.ONE_SECOND_IN_TS : void 0;const timeInfo = tsInspector.inspect(data, tsStartTime);let result = null;if (timeInfo) {result = {// each type's time info comes back as an array of 2 times, start and endhasVideo: timeInfo.video && timeInfo.video.length === 2 || false,hasAudio: timeInfo.audio && timeInfo.audio.length === 2 || false};if (result.hasVideo) {result.videoStart = timeInfo.video[0].ptsTime;}if (result.hasAudio) {result.audioStart = timeInfo.audio[0].ptsTime;}}this.self.postMessage({action: 'probeTs',result,data}, [data.buffer]);}clearAllMp4Captions() {if (this.captionParser) {this.captionParser.clearAllCaptions();}}clearParsedMp4Captions() {if (this.captionParser) {this.captionParser.clearParsedCaptions();}}/*** Adds data (a ts segment) to the start of the transmuxer pipeline for* processing.** @param {ArrayBuffer} data data to push into the muxer*/push(data) {// Cast array buffer to correct type for transmuxerconst segment = new Uint8Array(data.data, data.byteOffset, data.byteLength);this.transmuxer.push(segment);}/*** Recreate the transmuxer so that the next segment added via `push`* start with a fresh transmuxer.*/reset() {this.transmuxer.reset();}/*** Set the value that will be used as the `baseMediaDecodeTime` time for the* next segment pushed in. Subsequent segments will have their `baseMediaDecodeTime`* set relative to the first based on the PTS values.** @param {Object} data used to set the timestamp offset in the muxer*/setTimestampOffset(data) {const timestampOffset = data.timestampOffset || 0;this.transmuxer.setBaseMediaDecodeTime(Math.round(clock$2.secondsToVideoTs(timestampOffset)));}setAudioAppendStart(data) {this.transmuxer.setAudioAppendStart(Math.ceil(clock$2.secondsToVideoTs(data.appendStart)));}setRemux(data) {this.transmuxer.setRemux(data.remux);}/*** Forces the pipeline to finish processing the last segment and emit it's* results.** @param {Object} data event data, not really used*/flush(data) {this.transmuxer.flush(); // transmuxed done action is fired after both audio/video pipelines are flushedself.postMessage({action: 'done',type: 'transmuxed'});}endTimeline() {this.transmuxer.endTimeline(); // transmuxed endedtimeline action is fired after both audio/video pipelines end their// timelinesself.postMessage({action: 'endedtimeline',type: 'transmuxed'});}alignGopsWith(data) {this.transmuxer.alignGopsWith(data.gopsToAlignWith.slice());}}/*** Our web worker interface so that things can talk to mux.js* that will be running in a web worker. the scope is passed to this by* webworkify.** @param {Object} self the scope for the web worker*/self.onmessage = function (event) {if (event.data.action === 'init' && event.data.options) {this.messageHandlers = new MessageHandlers(self, event.data.options);return;}if (!this.messageHandlers) {this.messageHandlers = new MessageHandlers(self);}if (event.data && event.data.action && event.data.action !== 'init') {if (this.messageHandlers[event.data.action]) {this.messageHandlers[event.data.action](event.data);}}};}));var TransmuxWorker = factory(workerCode$1);/* rollup-plugin-worker-factory end for worker!/home/runner/work/http-streaming/http-streaming/src/transmuxer-worker.js */const handleData_ = (event, transmuxedData, callback) => {const {type,initSegment,captions,captionStreams,metadata,videoFrameDtsTime,videoFramePtsTime} = event.data.segment;transmuxedData.buffer.push({captions,captionStreams,metadata});const boxes = event.data.segment.boxes || {data: event.data.segment.data};const result = {type,// cast ArrayBuffer to TypedArraydata: new Uint8Array(boxes.data, boxes.data.byteOffset, boxes.data.byteLength),initSegment: new Uint8Array(initSegment.data, initSegment.byteOffset, initSegment.byteLength)};if (typeof videoFrameDtsTime !== 'undefined') {result.videoFrameDtsTime = videoFrameDtsTime;}if (typeof videoFramePtsTime !== 'undefined') {result.videoFramePtsTime = videoFramePtsTime;}callback(result);};const handleDone_ = ({transmuxedData,callback}) => {// Previously we only returned data on data events,// not on done events. Clear out the buffer to keep that consistent.transmuxedData.buffer = []; // all buffers should have been flushed from the muxer, so start processing anything we// have receivedcallback(transmuxedData);};const handleGopInfo_ = (event, transmuxedData) => {transmuxedData.gopInfo = event.data.gopInfo;};const processTransmux = options => {const {transmuxer,bytes,audioAppendStart,gopsToAlignWith,remux,onData,onTrackInfo,onAudioTimingInfo,onVideoTimingInfo,onVideoSegmentTimingInfo,onAudioSegmentTimingInfo,onId3,onCaptions,onDone,onEndedTimeline,onTransmuxerLog,isEndOfTimeline} = options;const transmuxedData = {buffer: []};let waitForEndedTimelineEvent = isEndOfTimeline;const handleMessage = event => {if (transmuxer.currentTransmux !== options) {// disposedreturn;}if (event.data.action === 'data') {handleData_(event, transmuxedData, onData);}if (event.data.action === 'trackinfo') {onTrackInfo(event.data.trackInfo);}if (event.data.action === 'gopInfo') {handleGopInfo_(event, transmuxedData);}if (event.data.action === 'audioTimingInfo') {onAudioTimingInfo(event.data.audioTimingInfo);}if (event.data.action === 'videoTimingInfo') {onVideoTimingInfo(event.data.videoTimingInfo);}if (event.data.action === 'videoSegmentTimingInfo') {onVideoSegmentTimingInfo(event.data.videoSegmentTimingInfo);}if (event.data.action === 'audioSegmentTimingInfo') {onAudioSegmentTimingInfo(event.data.audioSegmentTimingInfo);}if (event.data.action === 'id3Frame') {onId3([event.data.id3Frame], event.data.id3Frame.dispatchType);}if (event.data.action === 'caption') {onCaptions(event.data.caption);}if (event.data.action === 'endedtimeline') {waitForEndedTimelineEvent = false;onEndedTimeline();}if (event.data.action === 'log') {onTransmuxerLog(event.data.log);} // wait for the transmuxed event since we may have audio and videoif (event.data.type !== 'transmuxed') {return;} // If the "endedtimeline" event has not yet fired, and this segment represents the end// of a timeline, that means there may still be data events before the segment// processing can be considerred complete. In that case, the final event should be// an "endedtimeline" event with the type "transmuxed."if (waitForEndedTimelineEvent) {return;}transmuxer.onmessage = null;handleDone_({transmuxedData,callback: onDone});/* eslint-disable no-use-before-define */dequeue(transmuxer);/* eslint-enable */};transmuxer.onmessage = handleMessage;if (audioAppendStart) {transmuxer.postMessage({action: 'setAudioAppendStart',appendStart: audioAppendStart});} // allow empty arrays to be passed to clear out GOPsif (Array.isArray(gopsToAlignWith)) {transmuxer.postMessage({action: 'alignGopsWith',gopsToAlignWith});}if (typeof remux !== 'undefined') {transmuxer.postMessage({action: 'setRemux',remux});}if (bytes.byteLength) {const buffer = bytes instanceof ArrayBuffer ? bytes : bytes.buffer;const byteOffset = bytes instanceof ArrayBuffer ? 0 : bytes.byteOffset;transmuxer.postMessage({action: 'push',// Send the typed-array of data as an ArrayBuffer so that// it can be sent as a "Transferable" and avoid the costly// memory copydata: buffer,// To recreate the original typed-array, we need information// about what portion of the ArrayBuffer it was a view intobyteOffset,byteLength: bytes.byteLength}, [buffer]);}if (isEndOfTimeline) {transmuxer.postMessage({action: 'endTimeline'});} // even if we didn't push any bytes, we have to make sure we flush in case we reached// the end of the segmenttransmuxer.postMessage({action: 'flush'});};const dequeue = transmuxer => {transmuxer.currentTransmux = null;if (transmuxer.transmuxQueue.length) {transmuxer.currentTransmux = transmuxer.transmuxQueue.shift();if (typeof transmuxer.currentTransmux === 'function') {transmuxer.currentTransmux();} else {processTransmux(transmuxer.currentTransmux);}}};const processAction = (transmuxer, action) => {transmuxer.postMessage({action});dequeue(transmuxer);};const enqueueAction = (action, transmuxer) => {if (!transmuxer.currentTransmux) {transmuxer.currentTransmux = action;processAction(transmuxer, action);return;}transmuxer.transmuxQueue.push(processAction.bind(null, transmuxer, action));};const reset = transmuxer => {enqueueAction('reset', transmuxer);};const endTimeline = transmuxer => {enqueueAction('endTimeline', transmuxer);};const transmux = options => {if (!options.transmuxer.currentTransmux) {options.transmuxer.currentTransmux = options;processTransmux(options);return;}options.transmuxer.transmuxQueue.push(options);};const createTransmuxer = options => {const transmuxer = new TransmuxWorker();transmuxer.currentTransmux = null;transmuxer.transmuxQueue = [];const term = transmuxer.terminate;transmuxer.terminate = () => {transmuxer.currentTransmux = null;transmuxer.transmuxQueue.length = 0;return term.call(transmuxer);};transmuxer.postMessage({action: 'init',options});return transmuxer;};var segmentTransmuxer = {reset,endTimeline,transmux,createTransmuxer};const workerCallback = function (options) {const transmuxer = options.transmuxer;const endAction = options.endAction || options.action;const callback = options.callback;const message = _extends$1({}, options, {endAction: null,transmuxer: null,callback: null});const listenForEndEvent = event => {if (event.data.action !== endAction) {return;}transmuxer.removeEventListener('message', listenForEndEvent); // transfer ownership of bytes back to us.if (event.data.data) {event.data.data = new Uint8Array(event.data.data, options.byteOffset || 0, options.byteLength || event.data.data.byteLength);if (options.data) {options.data = event.data.data;}}callback(event.data);};transmuxer.addEventListener('message', listenForEndEvent);if (options.data) {const isArrayBuffer = options.data instanceof ArrayBuffer;message.byteOffset = isArrayBuffer ? 0 : options.data.byteOffset;message.byteLength = options.data.byteLength;const transfers = [isArrayBuffer ? options.data : options.data.buffer];transmuxer.postMessage(message, transfers);} else {transmuxer.postMessage(message);}};const REQUEST_ERRORS = {FAILURE: 2,TIMEOUT: -101,ABORTED: -102};/*** Abort all requests** @param {Object} activeXhrs - an object that tracks all XHR requests*/const abortAll = activeXhrs => {activeXhrs.forEach(xhr => {xhr.abort();});};/*** Gather important bandwidth stats once a request has completed** @param {Object} request - the XHR request from which to gather stats*/const getRequestStats = request => {return {bandwidth: request.bandwidth,bytesReceived: request.bytesReceived || 0,roundTripTime: request.roundTripTime || 0};};/*** If possible gather bandwidth stats as a request is in* progress** @param {Event} progressEvent - an event object from an XHR's progress event*/const getProgressStats = progressEvent => {const request = progressEvent.target;const roundTripTime = Date.now() - request.requestTime;const stats = {bandwidth: Infinity,bytesReceived: 0,roundTripTime: roundTripTime || 0};stats.bytesReceived = progressEvent.loaded; // This can result in Infinity if stats.roundTripTime is 0 but that is ok// because we should only use bandwidth stats on progress to determine when// abort a request early due to insufficient bandwidthstats.bandwidth = Math.floor(stats.bytesReceived / stats.roundTripTime * 8 * 1000);return stats;};/*** Handle all error conditions in one place and return an object* with all the information** @param {Error|null} error - if non-null signals an error occured with the XHR* @param {Object} request - the XHR request that possibly generated the error*/const handleErrors = (error, request) => {if (request.timedout) {return {status: request.status,message: 'HLS request timed-out at URL: ' + request.uri,code: REQUEST_ERRORS.TIMEOUT,xhr: request};}if (request.aborted) {return {status: request.status,message: 'HLS request aborted at URL: ' + request.uri,code: REQUEST_ERRORS.ABORTED,xhr: request};}if (error) {return {status: request.status,message: 'HLS request errored at URL: ' + request.uri,code: REQUEST_ERRORS.FAILURE,xhr: request};}if (request.responseType === 'arraybuffer' && request.response.byteLength === 0) {return {status: request.status,message: 'Empty HLS response at URL: ' + request.uri,code: REQUEST_ERRORS.FAILURE,xhr: request};}return null;};/*** Handle responses for key data and convert the key data to the correct format* for the decryption step later** @param {Object} segment - a simplified copy of the segmentInfo object* from SegmentLoader* @param {Array} objects - objects to add the key bytes to.* @param {Function} finishProcessingFn - a callback to execute to continue processing* this request*/const handleKeyResponse = (segment, objects, finishProcessingFn) => (error, request) => {const response = request.response;const errorObj = handleErrors(error, request);if (errorObj) {return finishProcessingFn(errorObj, segment);}if (response.byteLength !== 16) {return finishProcessingFn({status: request.status,message: 'Invalid HLS key at URL: ' + request.uri,code: REQUEST_ERRORS.FAILURE,xhr: request}, segment);}const view = new DataView(response);const bytes = new Uint32Array([view.getUint32(0), view.getUint32(4), view.getUint32(8), view.getUint32(12)]);for (let i = 0; i < objects.length; i++) {objects[i].bytes = bytes;}return finishProcessingFn(null, segment);};const parseInitSegment = (segment, callback) => {const type = detectContainerForBytes(segment.map.bytes); // TODO: We should also handle ts init segments here, but we// only know how to parse mp4 init segments at the momentif (type !== 'mp4') {const uri = segment.map.resolvedUri || segment.map.uri;return callback({internal: true,message: `Found unsupported ${type || 'unknown'} container for initialization segment at URL: ${uri}`,code: REQUEST_ERRORS.FAILURE});}workerCallback({action: 'probeMp4Tracks',data: segment.map.bytes,transmuxer: segment.transmuxer,callback: ({tracks,data}) => {// transfer bytes back to ussegment.map.bytes = data;tracks.forEach(function (track) {segment.map.tracks = segment.map.tracks || {}; // only support one track of each type for nowif (segment.map.tracks[track.type]) {return;}segment.map.tracks[track.type] = track;if (typeof track.id === 'number' && track.timescale) {segment.map.timescales = segment.map.timescales || {};segment.map.timescales[track.id] = track.timescale;}});return callback(null);}});};/*** Handle init-segment responses** @param {Object} segment - a simplified copy of the segmentInfo object* from SegmentLoader* @param {Function} finishProcessingFn - a callback to execute to continue processing* this request*/const handleInitSegmentResponse = ({segment,finishProcessingFn}) => (error, request) => {const errorObj = handleErrors(error, request);if (errorObj) {return finishProcessingFn(errorObj, segment);}const bytes = new Uint8Array(request.response); // init segment is encypted, we will have to wait// until the key request is done to decrypt.if (segment.map.key) {segment.map.encryptedBytes = bytes;return finishProcessingFn(null, segment);}segment.map.bytes = bytes;parseInitSegment(segment, function (parseError) {if (parseError) {parseError.xhr = request;parseError.status = request.status;return finishProcessingFn(parseError, segment);}finishProcessingFn(null, segment);});};/*** Response handler for segment-requests being sure to set the correct* property depending on whether the segment is encryped or not* Also records and keeps track of stats that are used for ABR purposes** @param {Object} segment - a simplified copy of the segmentInfo object* from SegmentLoader* @param {Function} finishProcessingFn - a callback to execute to continue processing* this request*/const handleSegmentResponse = ({segment,finishProcessingFn,responseType}) => (error, request) => {const errorObj = handleErrors(error, request);if (errorObj) {return finishProcessingFn(errorObj, segment);}const newBytes =// although responseText "should" exist, this guard serves to prevent an error being// thrown for two primary cases:// 1. the mime type override stops working, or is not implemented for a specific// browser// 2. when using mock XHR libraries like sinon that do not allow the override behaviorresponseType === 'arraybuffer' || !request.responseText ? request.response : stringToArrayBuffer(request.responseText.substring(segment.lastReachedChar || 0));segment.stats = getRequestStats(request);if (segment.key) {segment.encryptedBytes = new Uint8Array(newBytes);} else {segment.bytes = new Uint8Array(newBytes);}return finishProcessingFn(null, segment);};const transmuxAndNotify = ({segment,bytes,trackInfoFn,timingInfoFn,videoSegmentTimingInfoFn,audioSegmentTimingInfoFn,id3Fn,captionsFn,isEndOfTimeline,endedTimelineFn,dataFn,doneFn,onTransmuxerLog}) => {const fmp4Tracks = segment.map && segment.map.tracks || {};const isMuxed = Boolean(fmp4Tracks.audio && fmp4Tracks.video); // Keep references to each function so we can null them out after we're done with them.// One reason for this is that in the case of full segments, we want to trust start// times from the probe, rather than the transmuxer.let audioStartFn = timingInfoFn.bind(null, segment, 'audio', 'start');const audioEndFn = timingInfoFn.bind(null, segment, 'audio', 'end');let videoStartFn = timingInfoFn.bind(null, segment, 'video', 'start');const videoEndFn = timingInfoFn.bind(null, segment, 'video', 'end');const finish = () => transmux({bytes,transmuxer: segment.transmuxer,audioAppendStart: segment.audioAppendStart,gopsToAlignWith: segment.gopsToAlignWith,remux: isMuxed,onData: result => {result.type = result.type === 'combined' ? 'video' : result.type;dataFn(segment, result);},onTrackInfo: trackInfo => {if (trackInfoFn) {if (isMuxed) {trackInfo.isMuxed = true;}trackInfoFn(segment, trackInfo);}},onAudioTimingInfo: audioTimingInfo => {// we only want the first start value we encounterif (audioStartFn && typeof audioTimingInfo.start !== 'undefined') {audioStartFn(audioTimingInfo.start);audioStartFn = null;} // we want to continually update the end timeif (audioEndFn && typeof audioTimingInfo.end !== 'undefined') {audioEndFn(audioTimingInfo.end);}},onVideoTimingInfo: videoTimingInfo => {// we only want the first start value we encounterif (videoStartFn && typeof videoTimingInfo.start !== 'undefined') {videoStartFn(videoTimingInfo.start);videoStartFn = null;} // we want to continually update the end timeif (videoEndFn && typeof videoTimingInfo.end !== 'undefined') {videoEndFn(videoTimingInfo.end);}},onVideoSegmentTimingInfo: videoSegmentTimingInfo => {videoSegmentTimingInfoFn(videoSegmentTimingInfo);},onAudioSegmentTimingInfo: audioSegmentTimingInfo => {audioSegmentTimingInfoFn(audioSegmentTimingInfo);},onId3: (id3Frames, dispatchType) => {id3Fn(segment, id3Frames, dispatchType);},onCaptions: captions => {captionsFn(segment, [captions]);},isEndOfTimeline,onEndedTimeline: () => {endedTimelineFn();},onTransmuxerLog,onDone: result => {if (!doneFn) {return;}result.type = result.type === 'combined' ? 'video' : result.type;doneFn(null, segment, result);}}); // In the transmuxer, we don't yet have the ability to extract a "proper" start time.// Meaning cached frame data may corrupt our notion of where this segment// really starts. To get around this, probe for the info needed.workerCallback({action: 'probeTs',transmuxer: segment.transmuxer,data: bytes,baseStartTime: segment.baseStartTime,callback: data => {segment.bytes = bytes = data.data;const probeResult = data.result;if (probeResult) {trackInfoFn(segment, {hasAudio: probeResult.hasAudio,hasVideo: probeResult.hasVideo,isMuxed});trackInfoFn = null;}finish();}});};const handleSegmentBytes = ({segment,bytes,trackInfoFn,timingInfoFn,videoSegmentTimingInfoFn,audioSegmentTimingInfoFn,id3Fn,captionsFn,isEndOfTimeline,endedTimelineFn,dataFn,doneFn,onTransmuxerLog}) => {let bytesAsUint8Array = new Uint8Array(bytes); // TODO:// We should have a handler that fetches the number of bytes required// to check if something is fmp4. This will allow us to save bandwidth// because we can only exclude a playlist and abort requests// by codec after trackinfo triggers.if (isLikelyFmp4MediaSegment(bytesAsUint8Array)) {segment.isFmp4 = true;const {tracks} = segment.map;const trackInfo = {isFmp4: true,hasVideo: !!tracks.video,hasAudio: !!tracks.audio}; // if we have a audio track, with a codec that is not set to// encrypted audioif (tracks.audio && tracks.audio.codec && tracks.audio.codec !== 'enca') {trackInfo.audioCodec = tracks.audio.codec;} // if we have a video track, with a codec that is not set to// encrypted videoif (tracks.video && tracks.video.codec && tracks.video.codec !== 'encv') {trackInfo.videoCodec = tracks.video.codec;}if (tracks.video && tracks.audio) {trackInfo.isMuxed = true;} // since we don't support appending fmp4 data on progress, we know we have the full// segment heretrackInfoFn(segment, trackInfo); // The probe doesn't provide the segment end time, so only callback with the start// time. The end time can be roughly calculated by the receiver using the duration.//// Note that the start time returned by the probe reflects the baseMediaDecodeTime, as// that is the true start of the segment (where the playback engine should begin// decoding).const finishLoading = (captions, id3Frames) => {// if the track still has audio at this point it is only possible// for it to be audio only. See `tracks.video && tracks.audio` if statement// above.// we make sure to use segment.bytes here as thatdataFn(segment, {data: bytesAsUint8Array,type: trackInfo.hasAudio && !trackInfo.isMuxed ? 'audio' : 'video'});if (id3Frames && id3Frames.length) {id3Fn(segment, id3Frames);}if (captions && captions.length) {captionsFn(segment, captions);}doneFn(null, segment, {});};workerCallback({action: 'probeMp4StartTime',timescales: segment.map.timescales,data: bytesAsUint8Array,transmuxer: segment.transmuxer,callback: ({data,startTime}) => {// transfer bytes back to usbytes = data.buffer;segment.bytes = bytesAsUint8Array = data;if (trackInfo.hasAudio && !trackInfo.isMuxed) {timingInfoFn(segment, 'audio', 'start', startTime);}if (trackInfo.hasVideo) {timingInfoFn(segment, 'video', 'start', startTime);}workerCallback({action: 'probeEmsgID3',data: bytesAsUint8Array,transmuxer: segment.transmuxer,offset: startTime,callback: ({emsgData,id3Frames}) => {// transfer bytes back to usbytes = emsgData.buffer;segment.bytes = bytesAsUint8Array = emsgData; // Run through the CaptionParser in case there are captions.// Initialize CaptionParser if it hasn't been yetif (!tracks.video || !emsgData.byteLength || !segment.transmuxer) {finishLoading(undefined, id3Frames);return;}workerCallback({action: 'pushMp4Captions',endAction: 'mp4Captions',transmuxer: segment.transmuxer,data: bytesAsUint8Array,timescales: segment.map.timescales,trackIds: [tracks.video.id],callback: message => {// transfer bytes back to usbytes = message.data.buffer;segment.bytes = bytesAsUint8Array = message.data;message.logs.forEach(function (log) {onTransmuxerLog(merge(log, {stream: 'mp4CaptionParser'}));});finishLoading(message.captions, id3Frames);}});}});}});return;} // VTT or other segments that don't need processingif (!segment.transmuxer) {doneFn(null, segment, {});return;}if (typeof segment.container === 'undefined') {segment.container = detectContainerForBytes(bytesAsUint8Array);}if (segment.container !== 'ts' && segment.container !== 'aac') {trackInfoFn(segment, {hasAudio: false,hasVideo: false});doneFn(null, segment, {});return;} // ts or aactransmuxAndNotify({segment,bytes,trackInfoFn,timingInfoFn,videoSegmentTimingInfoFn,audioSegmentTimingInfoFn,id3Fn,captionsFn,isEndOfTimeline,endedTimelineFn,dataFn,doneFn,onTransmuxerLog});};const decrypt = function ({id,key,encryptedBytes,decryptionWorker}, callback) {const decryptionHandler = event => {if (event.data.source === id) {decryptionWorker.removeEventListener('message', decryptionHandler);const decrypted = event.data.decrypted;callback(new Uint8Array(decrypted.bytes, decrypted.byteOffset, decrypted.byteLength));}};decryptionWorker.addEventListener('message', decryptionHandler);let keyBytes;if (key.bytes.slice) {keyBytes = key.bytes.slice();} else {keyBytes = new Uint32Array(Array.prototype.slice.call(key.bytes));} // incrementally decrypt the bytesdecryptionWorker.postMessage(createTransferableMessage({source: id,encrypted: encryptedBytes,key: keyBytes,iv: key.iv}), [encryptedBytes.buffer, keyBytes.buffer]);};/*** Decrypt the segment via the decryption web worker** @param {WebWorker} decryptionWorker - a WebWorker interface to AES-128 decryption* routines* @param {Object} segment - a simplified copy of the segmentInfo object* from SegmentLoader* @param {Function} trackInfoFn - a callback that receives track info* @param {Function} timingInfoFn - a callback that receives timing info* @param {Function} videoSegmentTimingInfoFn* a callback that receives video timing info based on media times and* any adjustments made by the transmuxer* @param {Function} audioSegmentTimingInfoFn* a callback that receives audio timing info based on media times and* any adjustments made by the transmuxer* @param {boolean} isEndOfTimeline* true if this segment represents the last segment in a timeline* @param {Function} endedTimelineFn* a callback made when a timeline is ended, will only be called if* isEndOfTimeline is true* @param {Function} dataFn - a callback that is executed when segment bytes are available* and ready to use* @param {Function} doneFn - a callback that is executed after decryption has completed*/const decryptSegment = ({decryptionWorker,segment,trackInfoFn,timingInfoFn,videoSegmentTimingInfoFn,audioSegmentTimingInfoFn,id3Fn,captionsFn,isEndOfTimeline,endedTimelineFn,dataFn,doneFn,onTransmuxerLog}) => {decrypt({id: segment.requestId,key: segment.key,encryptedBytes: segment.encryptedBytes,decryptionWorker}, decryptedBytes => {segment.bytes = decryptedBytes;handleSegmentBytes({segment,bytes: segment.bytes,trackInfoFn,timingInfoFn,videoSegmentTimingInfoFn,audioSegmentTimingInfoFn,id3Fn,captionsFn,isEndOfTimeline,endedTimelineFn,dataFn,doneFn,onTransmuxerLog});});};/*** This function waits for all XHRs to finish (with either success or failure)* before continueing processing via it's callback. The function gathers errors* from each request into a single errors array so that the error status for* each request can be examined later.** @param {Object} activeXhrs - an object that tracks all XHR requests* @param {WebWorker} decryptionWorker - a WebWorker interface to AES-128 decryption* routines* @param {Function} trackInfoFn - a callback that receives track info* @param {Function} timingInfoFn - a callback that receives timing info* @param {Function} videoSegmentTimingInfoFn* a callback that receives video timing info based on media times and* any adjustments made by the transmuxer* @param {Function} audioSegmentTimingInfoFn* a callback that receives audio timing info based on media times and* any adjustments made by the transmuxer* @param {Function} id3Fn - a callback that receives ID3 metadata* @param {Function} captionsFn - a callback that receives captions* @param {boolean} isEndOfTimeline* true if this segment represents the last segment in a timeline* @param {Function} endedTimelineFn* a callback made when a timeline is ended, will only be called if* isEndOfTimeline is true* @param {Function} dataFn - a callback that is executed when segment bytes are available* and ready to use* @param {Function} doneFn - a callback that is executed after all resources have been* downloaded and any decryption completed*/const waitForCompletion = ({activeXhrs,decryptionWorker,trackInfoFn,timingInfoFn,videoSegmentTimingInfoFn,audioSegmentTimingInfoFn,id3Fn,captionsFn,isEndOfTimeline,endedTimelineFn,dataFn,doneFn,onTransmuxerLog}) => {let count = 0;let didError = false;return (error, segment) => {if (didError) {return;}if (error) {didError = true; // If there are errors, we have to abort any outstanding requestsabortAll(activeXhrs); // Even though the requests above are aborted, and in theory we could wait until we// handle the aborted events from those requests, there are some cases where we may// never get an aborted event. For instance, if the network connection is lost and// there were two requests, the first may have triggered an error immediately, while// the second request remains unsent. In that case, the aborted algorithm will not// trigger an abort: see https://xhr.spec.whatwg.org/#the-abort()-method//// We also can't rely on the ready state of the XHR, since the request that// triggered the connection error may also show as a ready state of 0 (unsent).// Therefore, we have to finish this group of requests immediately after the first// seen error.return doneFn(error, segment);}count += 1;if (count === activeXhrs.length) {const segmentFinish = function () {if (segment.encryptedBytes) {return decryptSegment({decryptionWorker,segment,trackInfoFn,timingInfoFn,videoSegmentTimingInfoFn,audioSegmentTimingInfoFn,id3Fn,captionsFn,isEndOfTimeline,endedTimelineFn,dataFn,doneFn,onTransmuxerLog});} // Otherwise, everything is ready just continuehandleSegmentBytes({segment,bytes: segment.bytes,trackInfoFn,timingInfoFn,videoSegmentTimingInfoFn,audioSegmentTimingInfoFn,id3Fn,captionsFn,isEndOfTimeline,endedTimelineFn,dataFn,doneFn,onTransmuxerLog});}; // Keep track of when *all* of the requests have completedsegment.endOfAllRequests = Date.now();if (segment.map && segment.map.encryptedBytes && !segment.map.bytes) {return decrypt({decryptionWorker,// add -init to the "id" to differentiate between segment// and init segment decryption, just in case they happen// at the same time at some point in the future.id: segment.requestId + '-init',encryptedBytes: segment.map.encryptedBytes,key: segment.map.key}, decryptedBytes => {segment.map.bytes = decryptedBytes;parseInitSegment(segment, parseError => {if (parseError) {abortAll(activeXhrs);return doneFn(parseError, segment);}segmentFinish();});});}segmentFinish();}};};/*** Calls the abort callback if any request within the batch was aborted. Will only call* the callback once per batch of requests, even if multiple were aborted.** @param {Object} loadendState - state to check to see if the abort function was called* @param {Function} abortFn - callback to call for abort*/const handleLoadEnd = ({loadendState,abortFn}) => event => {const request = event.target;if (request.aborted && abortFn && !loadendState.calledAbortFn) {abortFn();loadendState.calledAbortFn = true;}};/*** Simple progress event callback handler that gathers some stats before* executing a provided callback with the `segment` object** @param {Object} segment - a simplified copy of the segmentInfo object* from SegmentLoader* @param {Function} progressFn - a callback that is executed each time a progress event* is received* @param {Function} trackInfoFn - a callback that receives track info* @param {Function} timingInfoFn - a callback that receives timing info* @param {Function} videoSegmentTimingInfoFn* a callback that receives video timing info based on media times and* any adjustments made by the transmuxer* @param {Function} audioSegmentTimingInfoFn* a callback that receives audio timing info based on media times and* any adjustments made by the transmuxer* @param {boolean} isEndOfTimeline* true if this segment represents the last segment in a timeline* @param {Function} endedTimelineFn* a callback made when a timeline is ended, will only be called if* isEndOfTimeline is true* @param {Function} dataFn - a callback that is executed when segment bytes are available* and ready to use* @param {Event} event - the progress event object from XMLHttpRequest*/const handleProgress = ({segment,progressFn,trackInfoFn,timingInfoFn,videoSegmentTimingInfoFn,audioSegmentTimingInfoFn,id3Fn,captionsFn,isEndOfTimeline,endedTimelineFn,dataFn}) => event => {const request = event.target;if (request.aborted) {return;}segment.stats = merge(segment.stats, getProgressStats(event)); // record the time that we receive the first byte of dataif (!segment.stats.firstBytesReceivedAt && segment.stats.bytesReceived) {segment.stats.firstBytesReceivedAt = Date.now();}return progressFn(event, segment);};/*** Load all resources and does any processing necessary for a media-segment** Features:* decrypts the media-segment if it has a key uri and an iv* aborts *all* requests if *any* one request fails** The segment object, at minimum, has the following format:* {* resolvedUri: String,* [transmuxer]: Object,* [byterange]: {* offset: Number,* length: Number* },* [key]: {* resolvedUri: String* [byterange]: {* offset: Number,* length: Number* },* iv: {* bytes: Uint32Array* }* },* [map]: {* resolvedUri: String,* [byterange]: {* offset: Number,* length: Number* },* [bytes]: Uint8Array* }* }* ...where [name] denotes optional properties** @param {Function} xhr - an instance of the xhr wrapper in xhr.js* @param {Object} xhrOptions - the base options to provide to all xhr requests* @param {WebWorker} decryptionWorker - a WebWorker interface to AES-128* decryption routines* @param {Object} segment - a simplified copy of the segmentInfo object* from SegmentLoader* @param {Function} abortFn - a callback called (only once) if any piece of a request was* aborted* @param {Function} progressFn - a callback that receives progress events from the main* segment's xhr request* @param {Function} trackInfoFn - a callback that receives track info* @param {Function} timingInfoFn - a callback that receives timing info* @param {Function} videoSegmentTimingInfoFn* a callback that receives video timing info based on media times and* any adjustments made by the transmuxer* @param {Function} audioSegmentTimingInfoFn* a callback that receives audio timing info based on media times and* any adjustments made by the transmuxer* @param {Function} id3Fn - a callback that receives ID3 metadata* @param {Function} captionsFn - a callback that receives captions* @param {boolean} isEndOfTimeline* true if this segment represents the last segment in a timeline* @param {Function} endedTimelineFn* a callback made when a timeline is ended, will only be called if* isEndOfTimeline is true* @param {Function} dataFn - a callback that receives data from the main segment's xhr* request, transmuxed if needed* @param {Function} doneFn - a callback that is executed only once all requests have* succeeded or failed* @return {Function} a function that, when invoked, immediately aborts all* outstanding requests*/const mediaSegmentRequest = ({xhr,xhrOptions,decryptionWorker,segment,abortFn,progressFn,trackInfoFn,timingInfoFn,videoSegmentTimingInfoFn,audioSegmentTimingInfoFn,id3Fn,captionsFn,isEndOfTimeline,endedTimelineFn,dataFn,doneFn,onTransmuxerLog}) => {const activeXhrs = [];const finishProcessingFn = waitForCompletion({activeXhrs,decryptionWorker,trackInfoFn,timingInfoFn,videoSegmentTimingInfoFn,audioSegmentTimingInfoFn,id3Fn,captionsFn,isEndOfTimeline,endedTimelineFn,dataFn,doneFn,onTransmuxerLog}); // optionally, request the decryption keyif (segment.key && !segment.key.bytes) {const objects = [segment.key];if (segment.map && !segment.map.bytes && segment.map.key && segment.map.key.resolvedUri === segment.key.resolvedUri) {objects.push(segment.map.key);}const keyRequestOptions = merge(xhrOptions, {uri: segment.key.resolvedUri,responseType: 'arraybuffer'});const keyRequestCallback = handleKeyResponse(segment, objects, finishProcessingFn);const keyXhr = xhr(keyRequestOptions, keyRequestCallback);activeXhrs.push(keyXhr);} // optionally, request the associated media init segmentif (segment.map && !segment.map.bytes) {const differentMapKey = segment.map.key && (!segment.key || segment.key.resolvedUri !== segment.map.key.resolvedUri);if (differentMapKey) {const mapKeyRequestOptions = merge(xhrOptions, {uri: segment.map.key.resolvedUri,responseType: 'arraybuffer'});const mapKeyRequestCallback = handleKeyResponse(segment, [segment.map.key], finishProcessingFn);const mapKeyXhr = xhr(mapKeyRequestOptions, mapKeyRequestCallback);activeXhrs.push(mapKeyXhr);}const initSegmentOptions = merge(xhrOptions, {uri: segment.map.resolvedUri,responseType: 'arraybuffer',headers: segmentXhrHeaders(segment.map)});const initSegmentRequestCallback = handleInitSegmentResponse({segment,finishProcessingFn});const initSegmentXhr = xhr(initSegmentOptions, initSegmentRequestCallback);activeXhrs.push(initSegmentXhr);}const segmentRequestOptions = merge(xhrOptions, {uri: segment.part && segment.part.resolvedUri || segment.resolvedUri,responseType: 'arraybuffer',headers: segmentXhrHeaders(segment)});const segmentRequestCallback = handleSegmentResponse({segment,finishProcessingFn,responseType: segmentRequestOptions.responseType});const segmentXhr = xhr(segmentRequestOptions, segmentRequestCallback);segmentXhr.addEventListener('progress', handleProgress({segment,progressFn,trackInfoFn,timingInfoFn,videoSegmentTimingInfoFn,audioSegmentTimingInfoFn,id3Fn,captionsFn,isEndOfTimeline,endedTimelineFn,dataFn}));activeXhrs.push(segmentXhr); // since all parts of the request must be considered, but should not make callbacks// multiple times, provide a shared state objectconst loadendState = {};activeXhrs.forEach(activeXhr => {activeXhr.addEventListener('loadend', handleLoadEnd({loadendState,abortFn}));});return () => abortAll(activeXhrs);};/*** @file - codecs.js - Handles tasks regarding codec strings such as translating them to* codec strings, or translating codec strings into objects that can be examined.*/const logFn$1 = logger('CodecUtils');/*** Returns a set of codec strings parsed from the playlist or the default* codec strings if no codecs were specified in the playlist** @param {Playlist} media the current media playlist* @return {Object} an object with the video and audio codecs*/const getCodecs = function (media) {// if the codecs were explicitly specified, use them instead of the// defaultsconst mediaAttributes = media.attributes || {};if (mediaAttributes.CODECS) {return parseCodecs(mediaAttributes.CODECS);}};const isMaat = (main, media) => {const mediaAttributes = media.attributes || {};return main && main.mediaGroups && main.mediaGroups.AUDIO && mediaAttributes.AUDIO && main.mediaGroups.AUDIO[mediaAttributes.AUDIO];};const isMuxed = (main, media) => {if (!isMaat(main, media)) {return true;}const mediaAttributes = media.attributes || {};const audioGroup = main.mediaGroups.AUDIO[mediaAttributes.AUDIO];for (const groupId in audioGroup) {// If an audio group has a URI (the case for HLS, as HLS will use external playlists),// or there are listed playlists (the case for DASH, as the manifest will have already// provided all of the details necessary to generate the audio playlist, as opposed to// HLS' externally requested playlists), then the content is demuxed.if (!audioGroup[groupId].uri && !audioGroup[groupId].playlists) {return true;}}return false;};const unwrapCodecList = function (codecList) {const codecs = {};codecList.forEach(({mediaType,type,details}) => {codecs[mediaType] = codecs[mediaType] || [];codecs[mediaType].push(translateLegacyCodec(`${type}${details}`));});Object.keys(codecs).forEach(function (mediaType) {if (codecs[mediaType].length > 1) {logFn$1(`multiple ${mediaType} codecs found as attributes: ${codecs[mediaType].join(', ')}. Setting playlist codecs to null so that we wait for mux.js to probe segments for real codecs.`);codecs[mediaType] = null;return;}codecs[mediaType] = codecs[mediaType][0];});return codecs;};const codecCount = function (codecObj) {let count = 0;if (codecObj.audio) {count++;}if (codecObj.video) {count++;}return count;};/*** Calculates the codec strings for a working configuration of* SourceBuffers to play variant streams in a main playlist. If* there is no possible working configuration, an empty object will be* returned.** @param main {Object} the m3u8 object for the main playlist* @param media {Object} the m3u8 object for the variant playlist* @return {Object} the codec strings.** @private*/const codecsForPlaylist = function (main, media) {const mediaAttributes = media.attributes || {};const codecInfo = unwrapCodecList(getCodecs(media) || []); // HLS with multiple-audio tracks must always get an audio codec.// Put another way, there is no way to have a video-only multiple-audio HLS!if (isMaat(main, media) && !codecInfo.audio) {if (!isMuxed(main, media)) {// It is possible for codecs to be specified on the audio media group playlist but// not on the rendition playlist. This is mostly the case for DASH, where audio and// video are always separate (and separately specified).const defaultCodecs = unwrapCodecList(codecsFromDefault(main, mediaAttributes.AUDIO) || []);if (defaultCodecs.audio) {codecInfo.audio = defaultCodecs.audio;}}}return codecInfo;};const logFn = logger('PlaylistSelector');const representationToString = function (representation) {if (!representation || !representation.playlist) {return;}const playlist = representation.playlist;return JSON.stringify({id: playlist.id,bandwidth: representation.bandwidth,width: representation.width,height: representation.height,codecs: playlist.attributes && playlist.attributes.CODECS || ''});}; // Utilities/*** Returns the CSS value for the specified property on an element* using `getComputedStyle`. Firefox has a long-standing issue where* getComputedStyle() may return null when running in an iframe with* `display: none`.** @see https://bugzilla.mozilla.org/show_bug.cgi?id=548397* @param {HTMLElement} el the htmlelement to work on* @param {string} the proprety to get the style for*/const safeGetComputedStyle = function (el, property) {if (!el) {return '';}const result = window.getComputedStyle(el);if (!result) {return '';}return result[property];};/*** Resuable stable sort function** @param {Playlists} array* @param {Function} sortFn Different comparators* @function stableSort*/const stableSort = function (array, sortFn) {const newArray = array.slice();array.sort(function (left, right) {const cmp = sortFn(left, right);if (cmp === 0) {return newArray.indexOf(left) - newArray.indexOf(right);}return cmp;});};/*** A comparator function to sort two playlist object by bandwidth.** @param {Object} left a media playlist object* @param {Object} right a media playlist object* @return {number} Greater than zero if the bandwidth attribute of* left is greater than the corresponding attribute of right. Less* than zero if the bandwidth of right is greater than left and* exactly zero if the two are equal.*/const comparePlaylistBandwidth = function (left, right) {let leftBandwidth;let rightBandwidth;if (left.attributes.BANDWIDTH) {leftBandwidth = left.attributes.BANDWIDTH;}leftBandwidth = leftBandwidth || window.Number.MAX_VALUE;if (right.attributes.BANDWIDTH) {rightBandwidth = right.attributes.BANDWIDTH;}rightBandwidth = rightBandwidth || window.Number.MAX_VALUE;return leftBandwidth - rightBandwidth;};/*** A comparator function to sort two playlist object by resolution (width).** @param {Object} left a media playlist object* @param {Object} right a media playlist object* @return {number} Greater than zero if the resolution.width attribute of* left is greater than the corresponding attribute of right. Less* than zero if the resolution.width of right is greater than left and* exactly zero if the two are equal.*/const comparePlaylistResolution = function (left, right) {let leftWidth;let rightWidth;if (left.attributes.RESOLUTION && left.attributes.RESOLUTION.width) {leftWidth = left.attributes.RESOLUTION.width;}leftWidth = leftWidth || window.Number.MAX_VALUE;if (right.attributes.RESOLUTION && right.attributes.RESOLUTION.width) {rightWidth = right.attributes.RESOLUTION.width;}rightWidth = rightWidth || window.Number.MAX_VALUE; // NOTE - Fallback to bandwidth sort as appropriate in cases where multiple renditions// have the same media dimensions/ resolutionif (leftWidth === rightWidth && left.attributes.BANDWIDTH && right.attributes.BANDWIDTH) {return left.attributes.BANDWIDTH - right.attributes.BANDWIDTH;}return leftWidth - rightWidth;};/*** Chooses the appropriate media playlist based on bandwidth and player size** @param {Object} main* Object representation of the main manifest* @param {number} playerBandwidth* Current calculated bandwidth of the player* @param {number} playerWidth* Current width of the player element (should account for the device pixel ratio)* @param {number} playerHeight* Current height of the player element (should account for the device pixel ratio)* @param {boolean} limitRenditionByPlayerDimensions* True if the player width and height should be used during the selection, false otherwise* @param {Object} playlistController* the current playlistController object* @return {Playlist} the highest bitrate playlist less than the* currently detected bandwidth, accounting for some amount of* bandwidth variance*/let simpleSelector = function (main, playerBandwidth, playerWidth, playerHeight, limitRenditionByPlayerDimensions, playlistController) {// If we end up getting called before `main` is available, exit earlyif (!main) {return;}const options = {bandwidth: playerBandwidth,width: playerWidth,height: playerHeight,limitRenditionByPlayerDimensions};let playlists = main.playlists; // if playlist is audio only, select between currently active audio group playlists.if (Playlist.isAudioOnly(main)) {playlists = playlistController.getAudioTrackPlaylists_(); // add audioOnly to options so that we log audioOnly: true// at the buttom of this function for debugging.options.audioOnly = true;} // convert the playlists to an intermediary representation to make comparisons easierlet sortedPlaylistReps = playlists.map(playlist => {let bandwidth;const width = playlist.attributes && playlist.attributes.RESOLUTION && playlist.attributes.RESOLUTION.width;const height = playlist.attributes && playlist.attributes.RESOLUTION && playlist.attributes.RESOLUTION.height;bandwidth = playlist.attributes && playlist.attributes.BANDWIDTH;bandwidth = bandwidth || window.Number.MAX_VALUE;return {bandwidth,width,height,playlist};});stableSort(sortedPlaylistReps, (left, right) => left.bandwidth - right.bandwidth); // filter out any playlists that have been excluded due to// incompatible configurationssortedPlaylistReps = sortedPlaylistReps.filter(rep => !Playlist.isIncompatible(rep.playlist)); // filter out any playlists that have been disabled manually through the representations// api or excluded temporarily due to playback errors.let enabledPlaylistReps = sortedPlaylistReps.filter(rep => Playlist.isEnabled(rep.playlist));if (!enabledPlaylistReps.length) {// if there are no enabled playlists, then they have all been excluded or disabled// by the user through the representations api. In this case, ignore exclusion and// fallback to what the user wants by using playlists the user has not disabled.enabledPlaylistReps = sortedPlaylistReps.filter(rep => !Playlist.isDisabled(rep.playlist));} // filter out any variant that has greater effective bitrate// than the current estimated bandwidthconst bandwidthPlaylistReps = enabledPlaylistReps.filter(rep => rep.bandwidth * Config.BANDWIDTH_VARIANCE < playerBandwidth);let highestRemainingBandwidthRep = bandwidthPlaylistReps[bandwidthPlaylistReps.length - 1]; // get all of the renditions with the same (highest) bandwidth// and then taking the very first elementconst bandwidthBestRep = bandwidthPlaylistReps.filter(rep => rep.bandwidth === highestRemainingBandwidthRep.bandwidth)[0]; // if we're not going to limit renditions by player size, make an early decision.if (limitRenditionByPlayerDimensions === false) {const chosenRep = bandwidthBestRep || enabledPlaylistReps[0] || sortedPlaylistReps[0];if (chosenRep && chosenRep.playlist) {let type = 'sortedPlaylistReps';if (bandwidthBestRep) {type = 'bandwidthBestRep';}if (enabledPlaylistReps[0]) {type = 'enabledPlaylistReps';}logFn(`choosing ${representationToString(chosenRep)} using ${type} with options`, options);return chosenRep.playlist;}logFn('could not choose a playlist with options', options);return null;} // filter out playlists without resolution informationconst haveResolution = bandwidthPlaylistReps.filter(rep => rep.width && rep.height); // sort variants by resolutionstableSort(haveResolution, (left, right) => left.width - right.width); // if we have the exact resolution as the player use itconst resolutionBestRepList = haveResolution.filter(rep => rep.width === playerWidth && rep.height === playerHeight);highestRemainingBandwidthRep = resolutionBestRepList[resolutionBestRepList.length - 1]; // ensure that we pick the highest bandwidth variant that have exact resolutionconst resolutionBestRep = resolutionBestRepList.filter(rep => rep.bandwidth === highestRemainingBandwidthRep.bandwidth)[0];let resolutionPlusOneList;let resolutionPlusOneSmallest;let resolutionPlusOneRep; // find the smallest variant that is larger than the player// if there is no match of exact resolutionif (!resolutionBestRep) {resolutionPlusOneList = haveResolution.filter(rep => rep.width > playerWidth || rep.height > playerHeight); // find all the variants have the same smallest resolutionresolutionPlusOneSmallest = resolutionPlusOneList.filter(rep => rep.width === resolutionPlusOneList[0].width && rep.height === resolutionPlusOneList[0].height); // ensure that we also pick the highest bandwidth variant that// is just-larger-than the video playerhighestRemainingBandwidthRep = resolutionPlusOneSmallest[resolutionPlusOneSmallest.length - 1];resolutionPlusOneRep = resolutionPlusOneSmallest.filter(rep => rep.bandwidth === highestRemainingBandwidthRep.bandwidth)[0];}let leastPixelDiffRep; // If this selector proves to be better than others,// resolutionPlusOneRep and resolutionBestRep and all// the code involving them should be removed.if (playlistController.leastPixelDiffSelector) {// find the variant that is closest to the player's pixel sizeconst leastPixelDiffList = haveResolution.map(rep => {rep.pixelDiff = Math.abs(rep.width - playerWidth) + Math.abs(rep.height - playerHeight);return rep;}); // get the highest bandwidth, closest resolution playliststableSort(leastPixelDiffList, (left, right) => {// sort by highest bandwidth if pixelDiff is the sameif (left.pixelDiff === right.pixelDiff) {return right.bandwidth - left.bandwidth;}return left.pixelDiff - right.pixelDiff;});leastPixelDiffRep = leastPixelDiffList[0];} // fallback chain of variantsconst chosenRep = leastPixelDiffRep || resolutionPlusOneRep || resolutionBestRep || bandwidthBestRep || enabledPlaylistReps[0] || sortedPlaylistReps[0];if (chosenRep && chosenRep.playlist) {let type = 'sortedPlaylistReps';if (leastPixelDiffRep) {type = 'leastPixelDiffRep';} else if (resolutionPlusOneRep) {type = 'resolutionPlusOneRep';} else if (resolutionBestRep) {type = 'resolutionBestRep';} else if (bandwidthBestRep) {type = 'bandwidthBestRep';} else if (enabledPlaylistReps[0]) {type = 'enabledPlaylistReps';}logFn(`choosing ${representationToString(chosenRep)} using ${type} with options`, options);return chosenRep.playlist;}logFn('could not choose a playlist with options', options);return null;};/*** Chooses the appropriate media playlist based on the most recent* bandwidth estimate and the player size.** Expects to be called within the context of an instance of VhsHandler** @return {Playlist} the highest bitrate playlist less than the* currently detected bandwidth, accounting for some amount of* bandwidth variance*/const lastBandwidthSelector = function () {const pixelRatio = this.useDevicePixelRatio ? window.devicePixelRatio || 1 : 1;return simpleSelector(this.playlists.main, this.systemBandwidth, parseInt(safeGetComputedStyle(this.tech_.el(), 'width'), 10) * pixelRatio, parseInt(safeGetComputedStyle(this.tech_.el(), 'height'), 10) * pixelRatio, this.limitRenditionByPlayerDimensions, this.playlistController_);};/*** Chooses the appropriate media playlist based on an* exponential-weighted moving average of the bandwidth after* filtering for player size.** Expects to be called within the context of an instance of VhsHandler** @param {number} decay - a number between 0 and 1. Higher values of* this parameter will cause previous bandwidth estimates to lose* significance more quickly.* @return {Function} a function which can be invoked to create a new* playlist selector function.* @see https://en.wikipedia.org/wiki/Moving_average#Exponential_moving_average*/const movingAverageBandwidthSelector = function (decay) {let average = -1;let lastSystemBandwidth = -1;if (decay < 0 || decay > 1) {throw new Error('Moving average bandwidth decay must be between 0 and 1.');}return function () {const pixelRatio = this.useDevicePixelRatio ? window.devicePixelRatio || 1 : 1;if (average < 0) {average = this.systemBandwidth;lastSystemBandwidth = this.systemBandwidth;} // stop the average value from decaying for every 250ms// when the systemBandwidth is constant// and// stop average from setting to a very low value when the// systemBandwidth becomes 0 in case of chunk cancellationif (this.systemBandwidth > 0 && this.systemBandwidth !== lastSystemBandwidth) {average = decay * this.systemBandwidth + (1 - decay) * average;lastSystemBandwidth = this.systemBandwidth;}return simpleSelector(this.playlists.main, average, parseInt(safeGetComputedStyle(this.tech_.el(), 'width'), 10) * pixelRatio, parseInt(safeGetComputedStyle(this.tech_.el(), 'height'), 10) * pixelRatio, this.limitRenditionByPlayerDimensions, this.playlistController_);};};/*** Chooses the appropriate media playlist based on the potential to rebuffer** @param {Object} settings* Object of information required to use this selector* @param {Object} settings.main* Object representation of the main manifest* @param {number} settings.currentTime* The current time of the player* @param {number} settings.bandwidth* Current measured bandwidth* @param {number} settings.duration* Duration of the media* @param {number} settings.segmentDuration* Segment duration to be used in round trip time calculations* @param {number} settings.timeUntilRebuffer* Time left in seconds until the player has to rebuffer* @param {number} settings.currentTimeline* The current timeline segments are being loaded from* @param {SyncController} settings.syncController* SyncController for determining if we have a sync point for a given playlist* @return {Object|null}* {Object} return.playlist* The highest bandwidth playlist with the least amount of rebuffering* {Number} return.rebufferingImpact* The amount of time in seconds switching to this playlist will rebuffer. A* negative value means that switching will cause zero rebuffering.*/const minRebufferMaxBandwidthSelector = function (settings) {const {main,currentTime,bandwidth,duration,segmentDuration,timeUntilRebuffer,currentTimeline,syncController} = settings; // filter out any playlists that have been excluded due to// incompatible configurationsconst compatiblePlaylists = main.playlists.filter(playlist => !Playlist.isIncompatible(playlist)); // filter out any playlists that have been disabled manually through the representations// api or excluded temporarily due to playback errors.let enabledPlaylists = compatiblePlaylists.filter(Playlist.isEnabled);if (!enabledPlaylists.length) {// if there are no enabled playlists, then they have all been excluded or disabled// by the user through the representations api. In this case, ignore exclusion and// fallback to what the user wants by using playlists the user has not disabled.enabledPlaylists = compatiblePlaylists.filter(playlist => !Playlist.isDisabled(playlist));}const bandwidthPlaylists = enabledPlaylists.filter(Playlist.hasAttribute.bind(null, 'BANDWIDTH'));const rebufferingEstimates = bandwidthPlaylists.map(playlist => {const syncPoint = syncController.getSyncPoint(playlist, duration, currentTimeline, currentTime); // If there is no sync point for this playlist, switching to it will require a// sync request first. This will double the request timeconst numRequests = syncPoint ? 1 : 2;const requestTimeEstimate = Playlist.estimateSegmentRequestTime(segmentDuration, bandwidth, playlist);const rebufferingImpact = requestTimeEstimate * numRequests - timeUntilRebuffer;return {playlist,rebufferingImpact};});const noRebufferingPlaylists = rebufferingEstimates.filter(estimate => estimate.rebufferingImpact <= 0); // Sort by bandwidth DESCstableSort(noRebufferingPlaylists, (a, b) => comparePlaylistBandwidth(b.playlist, a.playlist));if (noRebufferingPlaylists.length) {return noRebufferingPlaylists[0];}stableSort(rebufferingEstimates, (a, b) => a.rebufferingImpact - b.rebufferingImpact);return rebufferingEstimates[0] || null;};/*** Chooses the appropriate media playlist, which in this case is the lowest bitrate* one with video. If no renditions with video exist, return the lowest audio rendition.** Expects to be called within the context of an instance of VhsHandler** @return {Object|null}* {Object} return.playlist* The lowest bitrate playlist that contains a video codec. If no such rendition* exists pick the lowest audio rendition.*/const lowestBitrateCompatibleVariantSelector = function () {// filter out any playlists that have been excluded due to// incompatible configurations or playback errorsconst playlists = this.playlists.main.playlists.filter(Playlist.isEnabled); // Sort ascending by bitratestableSort(playlists, (a, b) => comparePlaylistBandwidth(a, b)); // Parse and assume that playlists with no video codec have no video// (this is not necessarily true, although it is generally true).//// If an entire manifest has no valid videos everything will get filtered// out.const playlistsWithVideo = playlists.filter(playlist => !!codecsForPlaylist(this.playlists.main, playlist).video);return playlistsWithVideo[0] || null;};/*** Combine all segments into a single Uint8Array** @param {Object} segmentObj* @return {Uint8Array} concatenated bytes* @private*/const concatSegments = segmentObj => {let offset = 0;let tempBuffer;if (segmentObj.bytes) {tempBuffer = new Uint8Array(segmentObj.bytes); // combine the individual segments into one large typed-arraysegmentObj.segments.forEach(segment => {tempBuffer.set(segment, offset);offset += segment.byteLength;});}return tempBuffer;};/*** @file text-tracks.js*//*** Create captions text tracks on video.js if they do not exist** @param {Object} inbandTextTracks a reference to current inbandTextTracks* @param {Object} tech the video.js tech* @param {Object} captionStream the caption stream to create* @private*/const createCaptionsTrackIfNotExists = function (inbandTextTracks, tech, captionStream) {if (!inbandTextTracks[captionStream]) {tech.trigger({type: 'usage',name: 'vhs-608'});let instreamId = captionStream; // we need to translate SERVICEn for 708 to how mux.js currently labels themif (/^cc708_/.test(captionStream)) {instreamId = 'SERVICE' + captionStream.split('_')[1];}const track = tech.textTracks().getTrackById(instreamId);if (track) {// Resuse an existing track with a CC# id because this was// very likely created by videojs-contrib-hls from information// in the m3u8 for us to useinbandTextTracks[captionStream] = track;} else {// This section gets called when we have caption services that aren't specified in the manifest.// Manifest level caption services are handled in media-groups.js under CLOSED-CAPTIONS.const captionServices = tech.options_.vhs && tech.options_.vhs.captionServices || {};let label = captionStream;let language = captionStream;let def = false;const captionService = captionServices[instreamId];if (captionService) {label = captionService.label;language = captionService.language;def = captionService.default;} // Otherwise, create a track with the default `CC#` label and// without a languageinbandTextTracks[captionStream] = tech.addRemoteTextTrack({kind: 'captions',id: instreamId,// TODO: investigate why this doesn't seem to turn the caption on by defaultdefault: def,label,language}, false).track;}}};/*** Add caption text track data to a source handler given an array of captions** @param {Object}* @param {Object} inbandTextTracks the inband text tracks* @param {number} timestampOffset the timestamp offset of the source buffer* @param {Array} captionArray an array of caption data* @private*/const addCaptionData = function ({inbandTextTracks,captionArray,timestampOffset}) {if (!captionArray) {return;}const Cue = window.WebKitDataCue || window.VTTCue;captionArray.forEach(caption => {const track = caption.stream; // in CEA 608 captions, video.js/mux.js sends a content array// with positioning dataif (caption.content) {caption.content.forEach(value => {const cue = new Cue(caption.startTime + timestampOffset, caption.endTime + timestampOffset, value.text);cue.line = value.line;cue.align = 'left';cue.position = value.position;cue.positionAlign = 'line-left';inbandTextTracks[track].addCue(cue);});} else {// otherwise, a text value with combined captions is sentinbandTextTracks[track].addCue(new Cue(caption.startTime + timestampOffset, caption.endTime + timestampOffset, caption.text));}});};/*** Define properties on a cue for backwards compatability,* but warn the user that the way that they are using it* is depricated and will be removed at a later date.** @param {Cue} cue the cue to add the properties on* @private*/const deprecateOldCue = function (cue) {Object.defineProperties(cue.frame, {id: {get() {videojs.log.warn('cue.frame.id is deprecated. Use cue.value.key instead.');return cue.value.key;}},value: {get() {videojs.log.warn('cue.frame.value is deprecated. Use cue.value.data instead.');return cue.value.data;}},privateData: {get() {videojs.log.warn('cue.frame.privateData is deprecated. Use cue.value.data instead.');return cue.value.data;}}});};/*** Add metadata text track data to a source handler given an array of metadata** @param {Object}* @param {Object} inbandTextTracks the inband text tracks* @param {Array} metadataArray an array of meta data* @param {number} timestampOffset the timestamp offset of the source buffer* @param {number} videoDuration the duration of the video* @private*/const addMetadata = ({inbandTextTracks,metadataArray,timestampOffset,videoDuration}) => {if (!metadataArray) {return;}const Cue = window.WebKitDataCue || window.VTTCue;const metadataTrack = inbandTextTracks.metadataTrack_;if (!metadataTrack) {return;}metadataArray.forEach(metadata => {const time = metadata.cueTime + timestampOffset; // if time isn't a finite number between 0 and Infinity, like NaN,// ignore this bit of metadata.// This likely occurs when you have an non-timed ID3 tag like TIT2,// which is the "Title/Songname/Content description" frameif (typeof time !== 'number' || window.isNaN(time) || time < 0 || !(time < Infinity)) {return;} // If we have no frames, we can't create a cue.if (!metadata.frames || !metadata.frames.length) {return;}metadata.frames.forEach(frame => {const cue = new Cue(time, time, frame.value || frame.url || frame.data || '');cue.frame = frame;cue.value = frame;deprecateOldCue(cue);metadataTrack.addCue(cue);});});if (!metadataTrack.cues || !metadataTrack.cues.length) {return;} // Updating the metadeta cues so that// the endTime of each cue is the startTime of the next cue// the endTime of last cue is the duration of the videoconst cues = metadataTrack.cues;const cuesArray = []; // Create a copy of the TextTrackCueList...// ...disregarding cues with a falsey valuefor (let i = 0; i < cues.length; i++) {if (cues[i]) {cuesArray.push(cues[i]);}} // Group cues by their startTime valueconst cuesGroupedByStartTime = cuesArray.reduce((obj, cue) => {const timeSlot = obj[cue.startTime] || [];timeSlot.push(cue);obj[cue.startTime] = timeSlot;return obj;}, {}); // Sort startTimes by ascending orderconst sortedStartTimes = Object.keys(cuesGroupedByStartTime).sort((a, b) => Number(a) - Number(b)); // Map each cue group's endTime to the next group's startTimesortedStartTimes.forEach((startTime, idx) => {const cueGroup = cuesGroupedByStartTime[startTime];const finiteDuration = isFinite(videoDuration) ? videoDuration : startTime;const nextTime = Number(sortedStartTimes[idx + 1]) || finiteDuration; // Map each cue's endTime the next group's startTimecueGroup.forEach(cue => {cue.endTime = nextTime;});});}; // object for mapping daterange attributesconst dateRangeAttr = {id: 'ID',class: 'CLASS',startDate: 'START-DATE',duration: 'DURATION',endDate: 'END-DATE',endOnNext: 'END-ON-NEXT',plannedDuration: 'PLANNED-DURATION',scte35Out: 'SCTE35-OUT',scte35In: 'SCTE35-IN'};const dateRangeKeysToOmit = new Set(['id', 'class', 'startDate', 'duration', 'endDate', 'endOnNext', 'startTime', 'endTime', 'processDateRange']);/*** Add DateRange metadata text track to a source handler given an array of metadata** @param {Object}* @param {Object} inbandTextTracks the inband text tracks* @param {Array} dateRanges parsed media playlist* @private*/const addDateRangeMetadata = ({inbandTextTracks,dateRanges}) => {const metadataTrack = inbandTextTracks.metadataTrack_;if (!metadataTrack) {return;}const Cue = window.WebKitDataCue || window.VTTCue;dateRanges.forEach(dateRange => {// we generate multiple cues for each date range with different attributesfor (const key of Object.keys(dateRange)) {if (dateRangeKeysToOmit.has(key)) {continue;}const cue = new Cue(dateRange.startTime, dateRange.endTime, '');cue.id = dateRange.id;cue.type = 'com.apple.quicktime.HLS';cue.value = {key: dateRangeAttr[key],data: dateRange[key]};if (key === 'scte35Out' || key === 'scte35In') {cue.value.data = new Uint8Array(cue.value.data.match(/[\da-f]{2}/gi)).buffer;}metadataTrack.addCue(cue);}dateRange.processDateRange();});};/*** Create metadata text track on video.js if it does not exist** @param {Object} inbandTextTracks a reference to current inbandTextTracks* @param {string} dispatchType the inband metadata track dispatch type* @param {Object} tech the video.js tech* @private*/const createMetadataTrackIfNotExists = (inbandTextTracks, dispatchType, tech) => {if (inbandTextTracks.metadataTrack_) {return;}inbandTextTracks.metadataTrack_ = tech.addRemoteTextTrack({kind: 'metadata',label: 'Timed Metadata'}, false).track;if (!videojs.browser.IS_ANY_SAFARI) {inbandTextTracks.metadataTrack_.inBandMetadataTrackDispatchType = dispatchType;}};/*** Remove cues from a track on video.js.** @param {Double} start start of where we should remove the cue* @param {Double} end end of where the we should remove the cue* @param {Object} track the text track to remove the cues from* @private*/const removeCuesFromTrack = function (start, end, track) {let i;let cue;if (!track) {return;}if (!track.cues) {return;}i = track.cues.length;while (i--) {cue = track.cues[i]; // Remove any cue within the provided start and end timeif (cue.startTime >= start && cue.endTime <= end) {track.removeCue(cue);}}};/*** Remove duplicate cues from a track on video.js (a cue is considered a* duplicate if it has the same time interval and text as another)** @param {Object} track the text track to remove the duplicate cues from* @private*/const removeDuplicateCuesFromTrack = function (track) {const cues = track.cues;if (!cues) {return;}const uniqueCues = {};for (let i = cues.length - 1; i >= 0; i--) {const cue = cues[i];const cueKey = `${cue.startTime}-${cue.endTime}-${cue.text}`;if (uniqueCues[cueKey]) {track.removeCue(cue);} else {uniqueCues[cueKey] = cue;}}};/*** Returns a list of gops in the buffer that have a pts value of 3 seconds or more in* front of current time.** @param {Array} buffer* The current buffer of gop information* @param {number} currentTime* The current time* @param {Double} mapping* Offset to map display time to stream presentation time* @return {Array}* List of gops considered safe to append over*/const gopsSafeToAlignWith = (buffer, currentTime, mapping) => {if (typeof currentTime === 'undefined' || currentTime === null || !buffer.length) {return [];} // pts value for current time + 3 seconds to give a bit more wiggle roomconst currentTimePts = Math.ceil((currentTime - mapping + 3) * clock_1);let i;for (i = 0; i < buffer.length; i++) {if (buffer[i].pts > currentTimePts) {break;}}return buffer.slice(i);};/*** Appends gop information (timing and byteLength) received by the transmuxer for the* gops appended in the last call to appendBuffer** @param {Array} buffer* The current buffer of gop information* @param {Array} gops* List of new gop information* @param {boolean} replace* If true, replace the buffer with the new gop information. If false, append the* new gop information to the buffer in the right location of time.* @return {Array}* Updated list of gop information*/const updateGopBuffer = (buffer, gops, replace) => {if (!gops.length) {return buffer;}if (replace) {// If we are in safe append mode, then completely overwrite the gop buffer// with the most recent appeneded data. This will make sure that when appending// future segments, we only try to align with gops that are both ahead of current// time and in the last segment appended.return gops.slice();}const start = gops[0].pts;let i = 0;for (i; i < buffer.length; i++) {if (buffer[i].pts >= start) {break;}}return buffer.slice(0, i).concat(gops);};/*** Removes gop information in buffer that overlaps with provided start and end** @param {Array} buffer* The current buffer of gop information* @param {Double} start* position to start the remove at* @param {Double} end* position to end the remove at* @param {Double} mapping* Offset to map display time to stream presentation time*/const removeGopBuffer = (buffer, start, end, mapping) => {const startPts = Math.ceil((start - mapping) * clock_1);const endPts = Math.ceil((end - mapping) * clock_1);const updatedBuffer = buffer.slice();let i = buffer.length;while (i--) {if (buffer[i].pts <= endPts) {break;}}if (i === -1) {// no removal because end of remove range is before start of bufferreturn updatedBuffer;}let j = i + 1;while (j--) {if (buffer[j].pts <= startPts) {break;}} // clamp remove range start to 0 indexj = Math.max(j, 0);updatedBuffer.splice(j, i - j + 1);return updatedBuffer;};const shallowEqual = function (a, b) {// if both are undefined// or one or the other is undefined// they are not equalif (!a && !b || !a && b || a && !b) {return false;} // they are the same object and thus, equalif (a === b) {return true;} // sort keys so we can make sure they have// all the same keys later.const akeys = Object.keys(a).sort();const bkeys = Object.keys(b).sort(); // different number of keys, not equalif (akeys.length !== bkeys.length) {return false;}for (let i = 0; i < akeys.length; i++) {const key = akeys[i]; // different sorted keys, not equalif (key !== bkeys[i]) {return false;} // different values, not equalif (a[key] !== b[key]) {return false;}}return true;};// https://www.w3.org/TR/WebIDL-1/#quotaexceedederrorconst QUOTA_EXCEEDED_ERR = 22;/*** The segment loader has no recourse except to fetch a segment in the* current playlist and use the internal timestamps in that segment to* generate a syncPoint. This function returns a good candidate index* for that process.** @param {Array} segments - the segments array from a playlist.* @return {number} An index of a segment from the playlist to load*/const getSyncSegmentCandidate = function (currentTimeline, segments, targetTime) {segments = segments || [];const timelineSegments = [];let time = 0;for (let i = 0; i < segments.length; i++) {const segment = segments[i];if (currentTimeline === segment.timeline) {timelineSegments.push(i);time += segment.duration;if (time > targetTime) {return i;}}}if (timelineSegments.length === 0) {return 0;} // default to the last timeline segmentreturn timelineSegments[timelineSegments.length - 1];}; // In the event of a quota exceeded error, keep at least one second of back buffer. This// number was arbitrarily chosen and may be updated in the future, but seemed reasonable// as a start to prevent any potential issues with removing content too close to the// playhead.const MIN_BACK_BUFFER = 1; // in msconst CHECK_BUFFER_DELAY = 500;const finite = num => typeof num === 'number' && isFinite(num); // With most content hovering around 30fps, if a segment has a duration less than a half// frame at 30fps or one frame at 60fps, the bandwidth and throughput calculations will// not accurately reflect the rest of the content.const MIN_SEGMENT_DURATION_TO_SAVE_STATS = 1 / 60;const illegalMediaSwitch = (loaderType, startingMedia, trackInfo) => {// Although these checks should most likely cover non 'main' types, for now it narrows// the scope of our checks.if (loaderType !== 'main' || !startingMedia || !trackInfo) {return null;}if (!trackInfo.hasAudio && !trackInfo.hasVideo) {return 'Neither audio nor video found in segment.';}if (startingMedia.hasVideo && !trackInfo.hasVideo) {return 'Only audio found in segment when we expected video.' + ' We can\'t switch to audio only from a stream that had video.' + ' To get rid of this message, please add codec information to the manifest.';}if (!startingMedia.hasVideo && trackInfo.hasVideo) {return 'Video found in segment when we expected only audio.' + ' We can\'t switch to a stream with video from an audio only stream.' + ' To get rid of this message, please add codec information to the manifest.';}return null;};/*** Calculates a time value that is safe to remove from the back buffer without interrupting* playback.** @param {TimeRange} seekable* The current seekable range* @param {number} currentTime* The current time of the player* @param {number} targetDuration* The target duration of the current playlist* @return {number}* Time that is safe to remove from the back buffer without interrupting playback*/const safeBackBufferTrimTime = (seekable, currentTime, targetDuration) => {// 30 seconds before the playhead provides a safe default for trimming.//// Choosing a reasonable default is particularly important for high bitrate content and// VOD videos/live streams with large windows, as the buffer may end up overfilled and// throw an APPEND_BUFFER_ERR.let trimTime = currentTime - Config.BACK_BUFFER_LENGTH;if (seekable.length) {// Some live playlists may have a shorter window of content than the full allowed back// buffer. For these playlists, don't save content that's no longer within the window.trimTime = Math.max(trimTime, seekable.start(0));} // Don't remove within target duration of the current time to avoid the possibility of// removing the GOP currently being played, as removing it can cause playback stalls.const maxTrimTime = currentTime - targetDuration;return Math.min(maxTrimTime, trimTime);};const segmentInfoString = segmentInfo => {const {startOfSegment,duration,segment,part,playlist: {mediaSequence: seq,id,segments = []},mediaIndex: index,partIndex,timeline} = segmentInfo;const segmentLen = segments.length - 1;let selection = 'mediaIndex/partIndex increment';if (segmentInfo.getMediaInfoForTime) {selection = `getMediaInfoForTime (${segmentInfo.getMediaInfoForTime})`;} else if (segmentInfo.isSyncRequest) {selection = 'getSyncSegmentCandidate (isSyncRequest)';}if (segmentInfo.independent) {selection += ` with independent ${segmentInfo.independent}`;}const hasPartIndex = typeof partIndex === 'number';const name = segmentInfo.segment.uri ? 'segment' : 'pre-segment';const zeroBasedPartCount = hasPartIndex ? getKnownPartCount({preloadSegment: segment}) - 1 : 0;return `${name} [${seq + index}/${seq + segmentLen}]` + (hasPartIndex ? ` part [${partIndex}/${zeroBasedPartCount}]` : '') + ` segment start/end [${segment.start} => ${segment.end}]` + (hasPartIndex ? ` part start/end [${part.start} => ${part.end}]` : '') + ` startOfSegment [${startOfSegment}]` + ` duration [${duration}]` + ` timeline [${timeline}]` + ` selected by [${selection}]` + ` playlist [${id}]`;};const timingInfoPropertyForMedia = mediaType => `${mediaType}TimingInfo`;/*** Returns the timestamp offset to use for the segment.** @param {number} segmentTimeline* The timeline of the segment* @param {number} currentTimeline* The timeline currently being followed by the loader* @param {number} startOfSegment* The estimated segment start* @param {TimeRange[]} buffered* The loader's buffer* @param {boolean} overrideCheck* If true, no checks are made to see if the timestamp offset value should be set,* but sets it directly to a value.** @return {number|null}* Either a number representing a new timestamp offset, or null if the segment is* part of the same timeline*/const timestampOffsetForSegment = ({segmentTimeline,currentTimeline,startOfSegment,buffered,overrideCheck}) => {// Check to see if we are crossing a discontinuity to see if we need to set the// timestamp offset on the transmuxer and source buffer.//// Previously, we changed the timestampOffset if the start of this segment was less than// the currently set timestampOffset, but this isn't desirable as it can produce bad// behavior, especially around long running live streams.if (!overrideCheck && segmentTimeline === currentTimeline) {return null;} // When changing renditions, it's possible to request a segment on an older timeline. For// instance, given two renditions with the following://// #EXTINF:10// segment1// #EXT-X-DISCONTINUITY// #EXTINF:10// segment2// #EXTINF:10// segment3//// And the current player state://// current time: 8// buffer: 0 => 20//// The next segment on the current rendition would be segment3, filling the buffer from// 20s onwards. However, if a rendition switch happens after segment2 was requested,// then the next segment to be requested will be segment1 from the new rendition in// order to fill time 8 and onwards. Using the buffered end would result in repeated// content (since it would position segment1 of the new rendition starting at 20s). This// case can be identified when the new segment's timeline is a prior value. Instead of// using the buffered end, the startOfSegment can be used, which, hopefully, will be// more accurate to the actual start time of the segment.if (segmentTimeline < currentTimeline) {return startOfSegment;} // segmentInfo.startOfSegment used to be used as the timestamp offset, however, that// value uses the end of the last segment if it is available. While this value// should often be correct, it's better to rely on the buffered end, as the new// content post discontinuity should line up with the buffered end as if it were// time 0 for the new content.return buffered.length ? buffered.end(buffered.length - 1) : startOfSegment;};/*** Returns whether or not the loader should wait for a timeline change from the timeline* change controller before processing the segment.** Primary timing in VHS goes by video. This is different from most media players, as* audio is more often used as the primary timing source. For the foreseeable future, VHS* will continue to use video as the primary timing source, due to the current logic and* expectations built around it.* Since the timing follows video, in order to maintain sync, the video loader is* responsible for setting both audio and video source buffer timestamp offsets.** Setting different values for audio and video source buffers could lead to* desyncing. The following examples demonstrate some of the situations where this* distinction is important. Note that all of these cases involve demuxed content. When* content is muxed, the audio and video are packaged together, therefore syncing* separate media playlists is not an issue.** CASE 1: Audio prepares to load a new timeline before video:** Timeline: 0 1* Audio Segments: 0 1 2 3 4 5 DISCO 6 7 8 9* Audio Loader: ^* Video Segments: 0 1 2 3 4 5 DISCO 6 7 8 9* Video Loader ^** In the above example, the audio loader is preparing to load the 6th segment, the first* after a discontinuity, while the video loader is still loading the 5th segment, before* the discontinuity.** If the audio loader goes ahead and loads and appends the 6th segment before the video* loader crosses the discontinuity, then when appended, the 6th audio segment will use* the timestamp offset from timeline 0. This will likely lead to desyncing. In addition,* the audio loader must provide the audioAppendStart value to trim the content in the* transmuxer, and that value relies on the audio timestamp offset. Since the audio* timestamp offset is set by the video (main) loader, the audio loader shouldn't load the* segment until that value is provided.** CASE 2: Video prepares to load a new timeline before audio:** Timeline: 0 1* Audio Segments: 0 1 2 3 4 5 DISCO 6 7 8 9* Audio Loader: ^* Video Segments: 0 1 2 3 4 5 DISCO 6 7 8 9* Video Loader ^** In the above example, the video loader is preparing to load the 6th segment, the first* after a discontinuity, while the audio loader is still loading the 5th segment, before* the discontinuity.** If the video loader goes ahead and loads and appends the 6th segment, then once the* segment is loaded and processed, both the video and audio timestamp offsets will be* set, since video is used as the primary timing source. This is to ensure content lines* up appropriately, as any modifications to the video timing are reflected by audio when* the video loader sets the audio and video timestamp offsets to the same value. However,* setting the timestamp offset for audio before audio has had a chance to change* timelines will likely lead to desyncing, as the audio loader will append segment 5 with* a timestamp intended to apply to segments from timeline 1 rather than timeline 0.** CASE 3: When seeking, audio prepares to load a new timeline before video** Timeline: 0 1* Audio Segments: 0 1 2 3 4 5 DISCO 6 7 8 9* Audio Loader: ^* Video Segments: 0 1 2 3 4 5 DISCO 6 7 8 9* Video Loader ^** In the above example, both audio and video loaders are loading segments from timeline* 0, but imagine that the seek originated from timeline 1.** When seeking to a new timeline, the timestamp offset will be set based on the expected* segment start of the loaded video segment. In order to maintain sync, the audio loader* must wait for the video loader to load its segment and update both the audio and video* timestamp offsets before it may load and append its own segment. This is the case* whether the seek results in a mismatched segment request (e.g., the audio loader* chooses to load segment 3 and the video loader chooses to load segment 4) or the* loaders choose to load the same segment index from each playlist, as the segments may* not be aligned perfectly, even for matching segment indexes.** @param {Object} timelinechangeController* @param {number} currentTimeline* The timeline currently being followed by the loader* @param {number} segmentTimeline* The timeline of the segment being loaded* @param {('main'|'audio')} loaderType* The loader type* @param {boolean} audioDisabled* Whether the audio is disabled for the loader. This should only be true when the* loader may have muxed audio in its segment, but should not append it, e.g., for* the main loader when an alternate audio playlist is active.** @return {boolean}* Whether the loader should wait for a timeline change from the timeline change* controller before processing the segment*/const shouldWaitForTimelineChange = ({timelineChangeController,currentTimeline,segmentTimeline,loaderType,audioDisabled}) => {if (currentTimeline === segmentTimeline) {return false;}if (loaderType === 'audio') {const lastMainTimelineChange = timelineChangeController.lastTimelineChange({type: 'main'}); // Audio loader should wait if://// * main hasn't had a timeline change yet (thus has not loaded its first segment)// * main hasn't yet changed to the timeline audio is looking to loadreturn !lastMainTimelineChange || lastMainTimelineChange.to !== segmentTimeline;} // The main loader only needs to wait for timeline changes if there's demuxed audio.// Otherwise, there's nothing to wait for, since audio would be muxed into the main// loader's segments (or the content is audio/video only and handled by the main// loader).if (loaderType === 'main' && audioDisabled) {const pendingAudioTimelineChange = timelineChangeController.pendingTimelineChange({type: 'audio'}); // Main loader should wait for the audio loader if audio is not pending a timeline// change to the current timeline.//// Since the main loader is responsible for setting the timestamp offset for both// audio and video, the main loader must wait for audio to be about to change to its// timeline before setting the offset, otherwise, if audio is behind in loading,// segments from the previous timeline would be adjusted by the new timestamp offset.//// This requirement means that video will not cross a timeline until the audio is// about to cross to it, so that way audio and video will always cross the timeline// together.//// In addition to normal timeline changes, these rules also apply to the start of a// stream (going from a non-existent timeline, -1, to timeline 0). It's important// that these rules apply to the first timeline change because if they did not, it's// possible that the main loader will cross two timelines before the audio loader has// crossed one. Logic may be implemented to handle the startup as a special case, but// it's easier to simply treat all timeline changes the same.if (pendingAudioTimelineChange && pendingAudioTimelineChange.to === segmentTimeline) {return false;}return true;}return false;};const mediaDuration = timingInfos => {let maxDuration = 0;['video', 'audio'].forEach(function (type) {const typeTimingInfo = timingInfos[`${type}TimingInfo`];if (!typeTimingInfo) {return;}const {start,end} = typeTimingInfo;let duration;if (typeof start === 'bigint' || typeof end === 'bigint') {duration = window.BigInt(end) - window.BigInt(start);} else if (typeof start === 'number' && typeof end === 'number') {duration = end - start;}if (typeof duration !== 'undefined' && duration > maxDuration) {maxDuration = duration;}}); // convert back to a number if it is lower than MAX_SAFE_INTEGER// as we only need BigInt when we are above that.if (typeof maxDuration === 'bigint' && maxDuration < Number.MAX_SAFE_INTEGER) {maxDuration = Number(maxDuration);}return maxDuration;};const segmentTooLong = ({segmentDuration,maxDuration}) => {// 0 duration segments are most likely due to metadata only segments or a lack of// information.if (!segmentDuration) {return false;} // For HLS://// https://tools.ietf.org/html/draft-pantos-http-live-streaming-23#section-4.3.3.1// The EXTINF duration of each Media Segment in the Playlist// file, when rounded to the nearest integer, MUST be less than or equal// to the target duration; longer segments can trigger playback stalls// or other errors.//// For DASH, the mpd-parser uses the largest reported segment duration as the target// duration. Although that reported duration is occasionally approximate (i.e., not// exact), a strict check may report that a segment is too long more often in DASH.return Math.round(segmentDuration) > maxDuration + TIME_FUDGE_FACTOR;};const getTroublesomeSegmentDurationMessage = (segmentInfo, sourceType) => {// Right now we aren't following DASH's timing model exactly, so only perform// this check for HLS content.if (sourceType !== 'hls') {return null;}const segmentDuration = mediaDuration({audioTimingInfo: segmentInfo.audioTimingInfo,videoTimingInfo: segmentInfo.videoTimingInfo}); // Don't report if we lack information.//// If the segment has a duration of 0 it is either a lack of information or a// metadata only segment and shouldn't be reported here.if (!segmentDuration) {return null;}const targetDuration = segmentInfo.playlist.targetDuration;const isSegmentWayTooLong = segmentTooLong({segmentDuration,maxDuration: targetDuration * 2});const isSegmentSlightlyTooLong = segmentTooLong({segmentDuration,maxDuration: targetDuration});const segmentTooLongMessage = `Segment with index ${segmentInfo.mediaIndex} ` + `from playlist ${segmentInfo.playlist.id} ` + `has a duration of ${segmentDuration} ` + `when the reported duration is ${segmentInfo.duration} ` + `and the target duration is ${targetDuration}. ` + 'For HLS content, a duration in excess of the target duration may result in ' + 'playback issues. See the HLS specification section on EXT-X-TARGETDURATION for ' + 'more details: ' + 'https://tools.ietf.org/html/draft-pantos-http-live-streaming-23#section-4.3.3.1';if (isSegmentWayTooLong || isSegmentSlightlyTooLong) {return {severity: isSegmentWayTooLong ? 'warn' : 'info',message: segmentTooLongMessage};}return null;};/*** An object that manages segment loading and appending.** @class SegmentLoader* @param {Object} options required and optional options* @extends videojs.EventTarget*/class SegmentLoader extends videojs.EventTarget {constructor(settings, options = {}) {super(); // check pre-conditionsif (!settings) {throw new TypeError('Initialization settings are required');}if (typeof settings.currentTime !== 'function') {throw new TypeError('No currentTime getter specified');}if (!settings.mediaSource) {throw new TypeError('No MediaSource specified');} // public propertiesthis.bandwidth = settings.bandwidth;this.throughput = {rate: 0,count: 0};this.roundTrip = NaN;this.resetStats_();this.mediaIndex = null;this.partIndex = null; // private settingsthis.hasPlayed_ = settings.hasPlayed;this.currentTime_ = settings.currentTime;this.seekable_ = settings.seekable;this.seeking_ = settings.seeking;this.duration_ = settings.duration;this.mediaSource_ = settings.mediaSource;this.vhs_ = settings.vhs;this.loaderType_ = settings.loaderType;this.currentMediaInfo_ = void 0;this.startingMediaInfo_ = void 0;this.segmentMetadataTrack_ = settings.segmentMetadataTrack;this.goalBufferLength_ = settings.goalBufferLength;this.sourceType_ = settings.sourceType;this.sourceUpdater_ = settings.sourceUpdater;this.inbandTextTracks_ = settings.inbandTextTracks;this.state_ = 'INIT';this.timelineChangeController_ = settings.timelineChangeController;this.shouldSaveSegmentTimingInfo_ = true;this.parse708captions_ = settings.parse708captions;this.useDtsForTimestampOffset_ = settings.useDtsForTimestampOffset;this.captionServices_ = settings.captionServices;this.exactManifestTimings = settings.exactManifestTimings;this.addMetadataToTextTrack = settings.addMetadataToTextTrack; // private instance variablesthis.checkBufferTimeout_ = null;this.error_ = void 0;this.currentTimeline_ = -1;this.shouldForceTimestampOffsetAfterResync_ = false;this.pendingSegment_ = null;this.xhrOptions_ = null;this.pendingSegments_ = [];this.audioDisabled_ = false;this.isPendingTimestampOffset_ = false; // TODO possibly move gopBuffer and timeMapping info to a separate controllerthis.gopBuffer_ = [];this.timeMapping_ = 0;this.safeAppend_ = false;this.appendInitSegment_ = {audio: true,video: true};this.playlistOfLastInitSegment_ = {audio: null,video: null};this.callQueue_ = []; // If the segment loader prepares to load a segment, but does not have enough// information yet to start the loading process (e.g., if the audio loader wants to// load a segment from the next timeline but the main loader hasn't yet crossed that// timeline), then the load call will be added to the queue until it is ready to be// processed.this.loadQueue_ = [];this.metadataQueue_ = {id3: [],caption: []};this.waitingOnRemove_ = false;this.quotaExceededErrorRetryTimeout_ = null; // Fragmented mp4 playbackthis.activeInitSegmentId_ = null;this.initSegments_ = {}; // HLSe playbackthis.cacheEncryptionKeys_ = settings.cacheEncryptionKeys;this.keyCache_ = {};this.decrypter_ = settings.decrypter; // Manages the tracking and generation of sync-points, mappings// between a time in the display time and a segment index within// a playlistthis.syncController_ = settings.syncController;this.syncPoint_ = {segmentIndex: 0,time: 0};this.transmuxer_ = this.createTransmuxer_();this.triggerSyncInfoUpdate_ = () => this.trigger('syncinfoupdate');this.syncController_.on('syncinfoupdate', this.triggerSyncInfoUpdate_);this.mediaSource_.addEventListener('sourceopen', () => {if (!this.isEndOfStream_()) {this.ended_ = false;}}); // ...for determining the fetch locationthis.fetchAtBuffer_ = false;this.logger_ = logger(`SegmentLoader[${this.loaderType_}]`);Object.defineProperty(this, 'state', {get() {return this.state_;},set(newState) {if (newState !== this.state_) {this.logger_(`${this.state_} -> ${newState}`);this.state_ = newState;this.trigger('statechange');}}});this.sourceUpdater_.on('ready', () => {if (this.hasEnoughInfoToAppend_()) {this.processCallQueue_();}}); // Only the main loader needs to listen for pending timeline changes, as the main// loader should wait for audio to be ready to change its timeline so that both main// and audio timelines change together. For more details, see the// shouldWaitForTimelineChange function.if (this.loaderType_ === 'main') {this.timelineChangeController_.on('pendingtimelinechange', () => {if (this.hasEnoughInfoToAppend_()) {this.processCallQueue_();}});} // The main loader only listens on pending timeline changes, but the audio loader,// since its loads follow main, needs to listen on timeline changes. For more details,// see the shouldWaitForTimelineChange function.if (this.loaderType_ === 'audio') {this.timelineChangeController_.on('timelinechange', () => {if (this.hasEnoughInfoToLoad_()) {this.processLoadQueue_();}if (this.hasEnoughInfoToAppend_()) {this.processCallQueue_();}});}}createTransmuxer_() {return segmentTransmuxer.createTransmuxer({remux: false,alignGopsAtEnd: this.safeAppend_,keepOriginalTimestamps: true,parse708captions: this.parse708captions_,captionServices: this.captionServices_});}/*** reset all of our media stats** @private*/resetStats_() {this.mediaBytesTransferred = 0;this.mediaRequests = 0;this.mediaRequestsAborted = 0;this.mediaRequestsTimedout = 0;this.mediaRequestsErrored = 0;this.mediaTransferDuration = 0;this.mediaSecondsLoaded = 0;this.mediaAppends = 0;}/*** dispose of the SegmentLoader and reset to the default state*/dispose() {this.trigger('dispose');this.state = 'DISPOSED';this.pause();this.abort_();if (this.transmuxer_) {this.transmuxer_.terminate();}this.resetStats_();if (this.checkBufferTimeout_) {window.clearTimeout(this.checkBufferTimeout_);}if (this.syncController_ && this.triggerSyncInfoUpdate_) {this.syncController_.off('syncinfoupdate', this.triggerSyncInfoUpdate_);}this.off();}setAudio(enable) {this.audioDisabled_ = !enable;if (enable) {this.appendInitSegment_.audio = true;} else {// remove current track audio if it gets disabledthis.sourceUpdater_.removeAudio(0, this.duration_());}}/*** abort anything that is currently doing on with the SegmentLoader* and reset to a default state*/abort() {if (this.state !== 'WAITING') {if (this.pendingSegment_) {this.pendingSegment_ = null;}return;}this.abort_(); // We aborted the requests we were waiting on, so reset the loader's state to READY// since we are no longer "waiting" on any requests. XHR callback is not always run// when the request is aborted. This will prevent the loader from being stuck in the// WAITING state indefinitely.this.state = 'READY'; // don't wait for buffer check timeouts to begin fetching the// next segmentif (!this.paused()) {this.monitorBuffer_();}}/*** abort all pending xhr requests and null any pending segements** @private*/abort_() {if (this.pendingSegment_ && this.pendingSegment_.abortRequests) {this.pendingSegment_.abortRequests();} // clear out the segment being processedthis.pendingSegment_ = null;this.callQueue_ = [];this.loadQueue_ = [];this.metadataQueue_.id3 = [];this.metadataQueue_.caption = [];this.timelineChangeController_.clearPendingTimelineChange(this.loaderType_);this.waitingOnRemove_ = false;window.clearTimeout(this.quotaExceededErrorRetryTimeout_);this.quotaExceededErrorRetryTimeout_ = null;}checkForAbort_(requestId) {// If the state is APPENDING, then aborts will not modify the state, meaning the first// callback that happens should reset the state to READY so that loading can continue.if (this.state === 'APPENDING' && !this.pendingSegment_) {this.state = 'READY';return true;}if (!this.pendingSegment_ || this.pendingSegment_.requestId !== requestId) {return true;}return false;}/*** set an error on the segment loader and null out any pending segements** @param {Error} error the error to set on the SegmentLoader* @return {Error} the error that was set or that is currently set*/error(error) {if (typeof error !== 'undefined') {this.logger_('error occurred:', error);this.error_ = error;}this.pendingSegment_ = null;return this.error_;}endOfStream() {this.ended_ = true;if (this.transmuxer_) {// need to clear out any cached data to prepare for the new segmentsegmentTransmuxer.reset(this.transmuxer_);}this.gopBuffer_.length = 0;this.pause();this.trigger('ended');}/*** Indicates which time ranges are buffered** @return {TimeRange}* TimeRange object representing the current buffered ranges*/buffered_() {const trackInfo = this.getMediaInfo_();if (!this.sourceUpdater_ || !trackInfo) {return createTimeRanges();}if (this.loaderType_ === 'main') {const {hasAudio,hasVideo,isMuxed} = trackInfo;if (hasVideo && hasAudio && !this.audioDisabled_ && !isMuxed) {return this.sourceUpdater_.buffered();}if (hasVideo) {return this.sourceUpdater_.videoBuffered();}} // One case that can be ignored for now is audio only with alt audio,// as we don't yet have proper support for that.return this.sourceUpdater_.audioBuffered();}/*** Gets and sets init segment for the provided map** @param {Object} map* The map object representing the init segment to get or set* @param {boolean=} set* If true, the init segment for the provided map should be saved* @return {Object}* map object for desired init segment*/initSegmentForMap(map, set = false) {if (!map) {return null;}const id = initSegmentId(map);let storedMap = this.initSegments_[id];if (set && !storedMap && map.bytes) {this.initSegments_[id] = storedMap = {resolvedUri: map.resolvedUri,byterange: map.byterange,bytes: map.bytes,tracks: map.tracks,timescales: map.timescales};}return storedMap || map;}/*** Gets and sets key for the provided key** @param {Object} key* The key object representing the key to get or set* @param {boolean=} set* If true, the key for the provided key should be saved* @return {Object}* Key object for desired key*/segmentKey(key, set = false) {if (!key) {return null;}const id = segmentKeyId(key);let storedKey = this.keyCache_[id]; // TODO: We should use the HTTP Expires header to invalidate our cache per// https://tools.ietf.org/html/draft-pantos-http-live-streaming-23#section-6.2.3if (this.cacheEncryptionKeys_ && set && !storedKey && key.bytes) {this.keyCache_[id] = storedKey = {resolvedUri: key.resolvedUri,bytes: key.bytes};}const result = {resolvedUri: (storedKey || key).resolvedUri};if (storedKey) {result.bytes = storedKey.bytes;}return result;}/*** Returns true if all configuration required for loading is present, otherwise false.** @return {boolean} True if the all configuration is ready for loading* @private*/couldBeginLoading_() {return this.playlist_ && !this.paused();}/*** load a playlist and start to fill the buffer*/load() {// un-pausethis.monitorBuffer_(); // if we don't have a playlist yet, keep waiting for one to be// specifiedif (!this.playlist_) {return;} // if all the configuration is ready, initialize and begin loadingif (this.state === 'INIT' && this.couldBeginLoading_()) {return this.init_();} // if we're in the middle of processing a segment already, don't// kick off an additional segment requestif (!this.couldBeginLoading_() || this.state !== 'READY' && this.state !== 'INIT') {return;}this.state = 'READY';}/*** Once all the starting parameters have been specified, begin* operation. This method should only be invoked from the INIT* state.** @private*/init_() {this.state = 'READY'; // if this is the audio segment loader, and it hasn't been inited before, then any old// audio data from the muxed content should be removedthis.resetEverything();return this.monitorBuffer_();}/*** set a playlist on the segment loader** @param {PlaylistLoader} media the playlist to set on the segment loader*/playlist(newPlaylist, options = {}) {if (!newPlaylist) {return;}const oldPlaylist = this.playlist_;const segmentInfo = this.pendingSegment_;this.playlist_ = newPlaylist;this.xhrOptions_ = options; // when we haven't started playing yet, the start of a live playlist// is always our zero-time so force a sync update each time the playlist// is refreshed from the server//// Use the INIT state to determine if playback has started, as the playlist sync info// should be fixed once requests begin (as sync points are generated based on sync// info), but not before then.if (this.state === 'INIT') {newPlaylist.syncInfo = {mediaSequence: newPlaylist.mediaSequence,time: 0}; // Setting the date time mapping means mapping the program date time (if available)// to time 0 on the player's timeline. The playlist's syncInfo serves a similar// purpose, mapping the initial mediaSequence to time zero. Since the syncInfo can// be updated as the playlist is refreshed before the loader starts loading, the// program date time mapping needs to be updated as well.//// This mapping is only done for the main loader because a program date time should// map equivalently between playlists.if (this.loaderType_ === 'main') {this.syncController_.setDateTimeMappingForStart(newPlaylist);}}let oldId = null;if (oldPlaylist) {if (oldPlaylist.id) {oldId = oldPlaylist.id;} else if (oldPlaylist.uri) {oldId = oldPlaylist.uri;}}this.logger_(`playlist update [${oldId} => ${newPlaylist.id || newPlaylist.uri}]`);this.syncController_.updateMediaSequenceMap(newPlaylist, this.currentTime_(), this.loaderType_); // in VOD, this is always a rendition switch (or we updated our syncInfo above)// in LIVE, we always want to update with new playlists (including refreshes)this.trigger('syncinfoupdate'); // if we were unpaused but waiting for a playlist, start// buffering nowif (this.state === 'INIT' && this.couldBeginLoading_()) {return this.init_();}if (!oldPlaylist || oldPlaylist.uri !== newPlaylist.uri) {if (this.mediaIndex !== null) {// we must reset/resync the segment loader when we switch renditions and// the segment loader is already synced to the previous rendition// We only want to reset the loader here for LLHLS playback, as resetLoader sets fetchAtBuffer_// to false, resulting in fetching segments at currentTime and causing repeated// same-segment requests on playlist change. This erroneously drives up the playback watcher// stalled segment count, as re-requesting segments at the currentTime or browser cached segments// will not change the buffer.// Reference for LLHLS fixes: https://github.com/videojs/http-streaming/pull/1201const isLLHLS = !newPlaylist.endList && typeof newPlaylist.partTargetDuration === 'number';if (isLLHLS) {this.resetLoader();} else {this.resyncLoader();}}this.currentMediaInfo_ = void 0;this.trigger('playlistupdate'); // the rest of this function depends on `oldPlaylist` being definedreturn;} // we reloaded the same playlist so we are in a live scenario// and we will likely need to adjust the mediaIndexconst mediaSequenceDiff = newPlaylist.mediaSequence - oldPlaylist.mediaSequence;this.logger_(`live window shift [${mediaSequenceDiff}]`); // update the mediaIndex on the SegmentLoader// this is important because we can abort a request and this value must be// equal to the last appended mediaIndexif (this.mediaIndex !== null) {this.mediaIndex -= mediaSequenceDiff; // this can happen if we are going to load the first segment, but get a playlist// update during that. mediaIndex would go from 0 to -1 if mediaSequence in the// new playlist was incremented by 1.if (this.mediaIndex < 0) {this.mediaIndex = null;this.partIndex = null;} else {const segment = this.playlist_.segments[this.mediaIndex]; // partIndex should remain the same for the same segment// unless parts fell off of the playlist for this segment.// In that case we need to reset partIndex and resyncif (this.partIndex && (!segment.parts || !segment.parts.length || !segment.parts[this.partIndex])) {const mediaIndex = this.mediaIndex;this.logger_(`currently processing part (index ${this.partIndex}) no longer exists.`);this.resetLoader(); // We want to throw away the partIndex and the data associated with it,// as the part was dropped from our current playlists segment.// The mediaIndex will still be valid so keep that around.this.mediaIndex = mediaIndex;}}} // update the mediaIndex on the SegmentInfo object// this is important because we will update this.mediaIndex with this value// in `handleAppendsDone_` after the segment has been successfully appendedif (segmentInfo) {segmentInfo.mediaIndex -= mediaSequenceDiff;if (segmentInfo.mediaIndex < 0) {segmentInfo.mediaIndex = null;segmentInfo.partIndex = null;} else {// we need to update the referenced segment so that timing information is// saved for the new playlist's segment, however, if the segment fell off the// playlist, we can leave the old reference and just lose the timing infoif (segmentInfo.mediaIndex >= 0) {segmentInfo.segment = newPlaylist.segments[segmentInfo.mediaIndex];}if (segmentInfo.partIndex >= 0 && segmentInfo.segment.parts) {segmentInfo.part = segmentInfo.segment.parts[segmentInfo.partIndex];}}}this.syncController_.saveExpiredSegmentInfo(oldPlaylist, newPlaylist);}/*** Prevent the loader from fetching additional segments. If there* is a segment request outstanding, it will finish processing* before the loader halts. A segment loader can be unpaused by* calling load().*/pause() {if (this.checkBufferTimeout_) {window.clearTimeout(this.checkBufferTimeout_);this.checkBufferTimeout_ = null;}}/*** Returns whether the segment loader is fetching additional* segments when given the opportunity. This property can be* modified through calls to pause() and load().*/paused() {return this.checkBufferTimeout_ === null;}/*** Delete all the buffered data and reset the SegmentLoader** @param {Function} [done] an optional callback to be executed when the remove* operation is complete*/resetEverything(done) {this.ended_ = false;this.activeInitSegmentId_ = null;this.appendInitSegment_ = {audio: true,video: true};this.resetLoader(); // remove from 0, the earliest point, to Infinity, to signify removal of everything.// VTT Segment Loader doesn't need to do anything but in the regular SegmentLoader,// we then clamp the value to duration if necessary.this.remove(0, Infinity, done); // clears fmp4 captionsif (this.transmuxer_) {this.transmuxer_.postMessage({action: 'clearAllMp4Captions'}); // reset the cache in the transmuxerthis.transmuxer_.postMessage({action: 'reset'});}}/*** Force the SegmentLoader to resync and start loading around the currentTime instead* of starting at the end of the buffer** Useful for fast quality changes*/resetLoader() {this.fetchAtBuffer_ = false;this.resyncLoader();}/*** Force the SegmentLoader to restart synchronization and make a conservative guess* before returning to the simple walk-forward method*/resyncLoader() {if (this.transmuxer_) {// need to clear out any cached data to prepare for the new segmentsegmentTransmuxer.reset(this.transmuxer_);}this.mediaIndex = null;this.partIndex = null;this.syncPoint_ = null;this.isPendingTimestampOffset_ = false;this.shouldForceTimestampOffsetAfterResync_ = true;this.callQueue_ = [];this.loadQueue_ = [];this.metadataQueue_.id3 = [];this.metadataQueue_.caption = [];this.abort();if (this.transmuxer_) {this.transmuxer_.postMessage({action: 'clearParsedMp4Captions'});}}/*** Remove any data in the source buffer between start and end times** @param {number} start - the start time of the region to remove from the buffer* @param {number} end - the end time of the region to remove from the buffer* @param {Function} [done] - an optional callback to be executed when the remove* @param {boolean} force - force all remove operations to happen* operation is complete*/remove(start, end, done = () => {}, force = false) {// clamp end to duration if we need to remove everything.// This is due to a browser bug that causes issues if we remove to Infinity.// videojs/videojs-contrib-hls#1225if (end === Infinity) {end = this.duration_();} // skip removes that would throw an error// commonly happens during a rendition switch at the start of a video// from start 0 to end 0if (end <= start) {this.logger_('skipping remove because end ${end} is <= start ${start}');return;}if (!this.sourceUpdater_ || !this.getMediaInfo_()) {this.logger_('skipping remove because no source updater or starting media info'); // nothing to remove if we haven't processed any mediareturn;} // set it to one to complete this function's removeslet removesRemaining = 1;const removeFinished = () => {removesRemaining--;if (removesRemaining === 0) {done();}};if (force || !this.audioDisabled_) {removesRemaining++;this.sourceUpdater_.removeAudio(start, end, removeFinished);} // While it would be better to only remove video if the main loader has video, this// should be safe with audio only as removeVideo will call back even if there's no// video buffer.//// In theory we can check to see if there's video before calling the remove, but in// the event that we're switching between renditions and from video to audio only// (when we add support for that), we may need to clear the video contents despite// what the new media will contain.if (force || this.loaderType_ === 'main') {this.gopBuffer_ = removeGopBuffer(this.gopBuffer_, start, end, this.timeMapping_);removesRemaining++;this.sourceUpdater_.removeVideo(start, end, removeFinished);} // remove any captions and ID3 tagsfor (const track in this.inbandTextTracks_) {removeCuesFromTrack(start, end, this.inbandTextTracks_[track]);}removeCuesFromTrack(start, end, this.segmentMetadataTrack_); // finished this function's removesremoveFinished();}/*** (re-)schedule monitorBufferTick_ to run as soon as possible** @private*/monitorBuffer_() {if (this.checkBufferTimeout_) {window.clearTimeout(this.checkBufferTimeout_);}this.checkBufferTimeout_ = window.setTimeout(this.monitorBufferTick_.bind(this), 1);}/*** As long as the SegmentLoader is in the READY state, periodically* invoke fillBuffer_().** @private*/monitorBufferTick_() {if (this.state === 'READY') {this.fillBuffer_();}if (this.checkBufferTimeout_) {window.clearTimeout(this.checkBufferTimeout_);}this.checkBufferTimeout_ = window.setTimeout(this.monitorBufferTick_.bind(this), CHECK_BUFFER_DELAY);}/*** fill the buffer with segements unless the sourceBuffers are* currently updating** Note: this function should only ever be called by monitorBuffer_* and never directly** @private*/fillBuffer_() {// TODO since the source buffer maintains a queue, and we shouldn't call this function// except when we're ready for the next segment, this check can most likely be removedif (this.sourceUpdater_.updating()) {return;} // see if we need to begin loading immediatelyconst segmentInfo = this.chooseNextRequest_();if (!segmentInfo) {return;}if (typeof segmentInfo.timestampOffset === 'number') {this.isPendingTimestampOffset_ = false;this.timelineChangeController_.pendingTimelineChange({type: this.loaderType_,from: this.currentTimeline_,to: segmentInfo.timeline});}this.loadSegment_(segmentInfo);}/*** Determines if we should call endOfStream on the media source based* on the state of the buffer or if appened segment was the final* segment in the playlist.** @param {number} [mediaIndex] the media index of segment we last appended* @param {Object} [playlist] a media playlist object* @return {boolean} do we need to call endOfStream on the MediaSource*/isEndOfStream_(mediaIndex = this.mediaIndex, playlist = this.playlist_, partIndex = this.partIndex) {if (!playlist || !this.mediaSource_) {return false;}const segment = typeof mediaIndex === 'number' && playlist.segments[mediaIndex]; // mediaIndex is zero based but length is 1 basedconst appendedLastSegment = mediaIndex + 1 === playlist.segments.length; // true if there are no parts, or this is the last part.const appendedLastPart = !segment || !segment.parts || partIndex + 1 === segment.parts.length; // if we've buffered to the end of the video, we need to call endOfStream// so that MediaSources can trigger the `ended` event when it runs out of// buffered data instead of waiting for mereturn playlist.endList && this.mediaSource_.readyState === 'open' && appendedLastSegment && appendedLastPart;}/*** Determines what request should be made given current segment loader state.** @return {Object} a request object that describes the segment/part to load*/chooseNextRequest_() {const buffered = this.buffered_();const bufferedEnd = lastBufferedEnd(buffered) || 0;const bufferedTime = timeAheadOf(buffered, this.currentTime_());const preloaded = !this.hasPlayed_() && bufferedTime >= 1;const haveEnoughBuffer = bufferedTime >= this.goalBufferLength_();const segments = this.playlist_.segments; // return no segment if:// 1. we don't have segments// 2. The video has not yet played and we already downloaded a segment// 3. we already have enough buffered timeif (!segments.length || preloaded || haveEnoughBuffer) {return null;}this.syncPoint_ = this.syncPoint_ || this.syncController_.getSyncPoint(this.playlist_, this.duration_(), this.currentTimeline_, this.currentTime_(), this.loaderType_);const next = {partIndex: null,mediaIndex: null,startOfSegment: null,playlist: this.playlist_,isSyncRequest: Boolean(!this.syncPoint_)};if (next.isSyncRequest) {next.mediaIndex = getSyncSegmentCandidate(this.currentTimeline_, segments, bufferedEnd);this.logger_(`choose next request. Can not find sync point. Fallback to media Index: ${next.mediaIndex}`);} else if (this.mediaIndex !== null) {const segment = segments[this.mediaIndex];const partIndex = typeof this.partIndex === 'number' ? this.partIndex : -1;next.startOfSegment = segment.end ? segment.end : bufferedEnd;if (segment.parts && segment.parts[partIndex + 1]) {next.mediaIndex = this.mediaIndex;next.partIndex = partIndex + 1;} else {next.mediaIndex = this.mediaIndex + 1;}} else {// Find the segment containing the end of the buffer or current time.const {segmentIndex,startTime,partIndex} = Playlist.getMediaInfoForTime({exactManifestTimings: this.exactManifestTimings,playlist: this.playlist_,currentTime: this.fetchAtBuffer_ ? bufferedEnd : this.currentTime_(),startingPartIndex: this.syncPoint_.partIndex,startingSegmentIndex: this.syncPoint_.segmentIndex,startTime: this.syncPoint_.time});next.getMediaInfoForTime = this.fetchAtBuffer_ ? `bufferedEnd ${bufferedEnd}` : `currentTime ${this.currentTime_()}`;next.mediaIndex = segmentIndex;next.startOfSegment = startTime;next.partIndex = partIndex;this.logger_(`choose next request. Playlist switched and we have a sync point. Media Index: ${next.mediaIndex} `);}const nextSegment = segments[next.mediaIndex];let nextPart = nextSegment && typeof next.partIndex === 'number' && nextSegment.parts && nextSegment.parts[next.partIndex]; // if the next segment index is invalid or// the next partIndex is invalid do not choose a next segment.if (!nextSegment || typeof next.partIndex === 'number' && !nextPart) {return null;} // if the next segment has parts, and we don't have a partIndex.// Set partIndex to 0if (typeof next.partIndex !== 'number' && nextSegment.parts) {next.partIndex = 0;nextPart = nextSegment.parts[0];} // independentSegments applies to every segment in a playlist. If independentSegments appears in a main playlist,// it applies to each segment in each media playlist.// https://datatracker.ietf.org/doc/html/draft-pantos-http-live-streaming-23#section-4.3.5.1const hasIndependentSegments = this.vhs_.playlists && this.vhs_.playlists.main && this.vhs_.playlists.main.independentSegments || this.playlist_.independentSegments; // if we have no buffered data then we need to make sure// that the next part we append is "independent" if possible.// So we check if the previous part is independent, and request// it if it is.if (!bufferedTime && nextPart && !hasIndependentSegments && !nextPart.independent) {if (next.partIndex === 0) {const lastSegment = segments[next.mediaIndex - 1];const lastSegmentLastPart = lastSegment.parts && lastSegment.parts.length && lastSegment.parts[lastSegment.parts.length - 1];if (lastSegmentLastPart && lastSegmentLastPart.independent) {next.mediaIndex -= 1;next.partIndex = lastSegment.parts.length - 1;next.independent = 'previous segment';}} else if (nextSegment.parts[next.partIndex - 1].independent) {next.partIndex -= 1;next.independent = 'previous part';}}const ended = this.mediaSource_ && this.mediaSource_.readyState === 'ended'; // do not choose a next segment if all of the following:// 1. this is the last segment in the playlist// 2. end of stream has been called on the media source already// 3. the player is not seekingif (next.mediaIndex >= segments.length - 1 && ended && !this.seeking_()) {return null;}if (this.shouldForceTimestampOffsetAfterResync_) {this.shouldForceTimestampOffsetAfterResync_ = false;next.forceTimestampOffset = true;this.logger_('choose next request. Force timestamp offset after loader resync');}return this.generateSegmentInfo_(next);}generateSegmentInfo_(options) {const {independent,playlist,mediaIndex,startOfSegment,isSyncRequest,partIndex,forceTimestampOffset,getMediaInfoForTime} = options;const segment = playlist.segments[mediaIndex];const part = typeof partIndex === 'number' && segment.parts[partIndex];const segmentInfo = {requestId: 'segment-loader-' + Math.random(),// resolve the segment URL relative to the playlisturi: part && part.resolvedUri || segment.resolvedUri,// the segment's mediaIndex at the time it was requestedmediaIndex,partIndex: part ? partIndex : null,// whether or not to update the SegmentLoader's state with this// segment's mediaIndexisSyncRequest,startOfSegment,// the segment's playlistplaylist,// unencrypted bytes of the segmentbytes: null,// when a key is defined for this segment, the encrypted bytesencryptedBytes: null,// The target timestampOffset for this segment when we append it// to the source buffertimestampOffset: null,// The timeline that the segment is intimeline: segment.timeline,// The expected duration of the segment in secondsduration: part && part.duration || segment.duration,// retain the segment in case the playlist updates while doing an async processsegment,part,byteLength: 0,transmuxer: this.transmuxer_,// type of getMediaInfoForTime that was used to get this segmentgetMediaInfoForTime,independent};const overrideCheck = typeof forceTimestampOffset !== 'undefined' ? forceTimestampOffset : this.isPendingTimestampOffset_;segmentInfo.timestampOffset = this.timestampOffsetForSegment_({segmentTimeline: segment.timeline,currentTimeline: this.currentTimeline_,startOfSegment,buffered: this.buffered_(),overrideCheck});const audioBufferedEnd = lastBufferedEnd(this.sourceUpdater_.audioBuffered());if (typeof audioBufferedEnd === 'number') {// since the transmuxer is using the actual timing values, but the buffer is// adjusted by the timestamp offset, we must adjust the value heresegmentInfo.audioAppendStart = audioBufferedEnd - this.sourceUpdater_.audioTimestampOffset();}if (this.sourceUpdater_.videoBuffered().length) {segmentInfo.gopsToAlignWith = gopsSafeToAlignWith(this.gopBuffer_,// since the transmuxer is using the actual timing values, but the time is// adjusted by the timestmap offset, we must adjust the value herethis.currentTime_() - this.sourceUpdater_.videoTimestampOffset(), this.timeMapping_);}return segmentInfo;} // get the timestampoffset for a segment,// added so that vtt segment loader can override and prevent// adding timestamp offsets.timestampOffsetForSegment_(options) {return timestampOffsetForSegment(options);}/*** Determines if the network has enough bandwidth to complete the current segment* request in a timely manner. If not, the request will be aborted early and bandwidth* updated to trigger a playlist switch.** @param {Object} stats* Object containing stats about the request timing and size* @private*/earlyAbortWhenNeeded_(stats) {if (this.vhs_.tech_.paused() ||// Don't abort if the current playlist is on the lowestEnabledRendition// TODO: Replace using timeout with a boolean indicating whether this playlist is// the lowestEnabledRendition.!this.xhrOptions_.timeout ||// Don't abort if we have no bandwidth information to estimate segment sizes!this.playlist_.attributes.BANDWIDTH) {return;} // Wait at least 1 second since the first byte of data has been received before// using the calculated bandwidth from the progress event to allow the bitrate// to stabilizeif (Date.now() - (stats.firstBytesReceivedAt || Date.now()) < 1000) {return;}const currentTime = this.currentTime_();const measuredBandwidth = stats.bandwidth;const segmentDuration = this.pendingSegment_.duration;const requestTimeRemaining = Playlist.estimateSegmentRequestTime(segmentDuration, measuredBandwidth, this.playlist_, stats.bytesReceived); // Subtract 1 from the timeUntilRebuffer so we still consider an early abort// if we are only left with less than 1 second when the request completes.// A negative timeUntilRebuffering indicates we are already rebufferingconst timeUntilRebuffer$1 = timeUntilRebuffer(this.buffered_(), currentTime, this.vhs_.tech_.playbackRate()) - 1; // Only consider aborting early if the estimated time to finish the download// is larger than the estimated time until the player runs out of forward bufferif (requestTimeRemaining <= timeUntilRebuffer$1) {return;}const switchCandidate = minRebufferMaxBandwidthSelector({main: this.vhs_.playlists.main,currentTime,bandwidth: measuredBandwidth,duration: this.duration_(),segmentDuration,timeUntilRebuffer: timeUntilRebuffer$1,currentTimeline: this.currentTimeline_,syncController: this.syncController_});if (!switchCandidate) {return;}const rebufferingImpact = requestTimeRemaining - timeUntilRebuffer$1;const timeSavedBySwitching = rebufferingImpact - switchCandidate.rebufferingImpact;let minimumTimeSaving = 0.5; // If we are already rebuffering, increase the amount of variance we add to the// potential round trip time of the new request so that we are not too aggressive// with switching to a playlist that might save us a fraction of a second.if (timeUntilRebuffer$1 <= TIME_FUDGE_FACTOR) {minimumTimeSaving = 1;}if (!switchCandidate.playlist || switchCandidate.playlist.uri === this.playlist_.uri || timeSavedBySwitching < minimumTimeSaving) {return;} // set the bandwidth to that of the desired playlist being sure to scale by// BANDWIDTH_VARIANCE and add one so the playlist selector does not exclude it// don't trigger a bandwidthupdate as the bandwidth is artifialthis.bandwidth = switchCandidate.playlist.attributes.BANDWIDTH * Config.BANDWIDTH_VARIANCE + 1;this.trigger('earlyabort');}handleAbort_(segmentInfo) {this.logger_(`Aborting ${segmentInfoString(segmentInfo)}`);this.mediaRequestsAborted += 1;}/*** XHR `progress` event handler** @param {Event}* The XHR `progress` event* @param {Object} simpleSegment* A simplified segment object copy* @private*/handleProgress_(event, simpleSegment) {this.earlyAbortWhenNeeded_(simpleSegment.stats);if (this.checkForAbort_(simpleSegment.requestId)) {return;}this.trigger('progress');}handleTrackInfo_(simpleSegment, trackInfo) {this.earlyAbortWhenNeeded_(simpleSegment.stats);if (this.checkForAbort_(simpleSegment.requestId)) {return;}if (this.checkForIllegalMediaSwitch(trackInfo)) {return;}trackInfo = trackInfo || {}; // When we have track info, determine what media types this loader is dealing with.// Guard against cases where we're not getting track info at all until we are// certain that all streams will provide it.if (!shallowEqual(this.currentMediaInfo_, trackInfo)) {this.appendInitSegment_ = {audio: true,video: true};this.startingMediaInfo_ = trackInfo;this.currentMediaInfo_ = trackInfo;this.logger_('trackinfo update', trackInfo);this.trigger('trackinfo');} // trackinfo may cause an abort if the trackinfo// causes a codec change to an unsupported codec.if (this.checkForAbort_(simpleSegment.requestId)) {return;} // set trackinfo on the pending segment so that// it can append.this.pendingSegment_.trackInfo = trackInfo; // check if any calls were waiting on the track infoif (this.hasEnoughInfoToAppend_()) {this.processCallQueue_();}}handleTimingInfo_(simpleSegment, mediaType, timeType, time) {this.earlyAbortWhenNeeded_(simpleSegment.stats);if (this.checkForAbort_(simpleSegment.requestId)) {return;}const segmentInfo = this.pendingSegment_;const timingInfoProperty = timingInfoPropertyForMedia(mediaType);segmentInfo[timingInfoProperty] = segmentInfo[timingInfoProperty] || {};segmentInfo[timingInfoProperty][timeType] = time;this.logger_(`timinginfo: ${mediaType} - ${timeType} - ${time}`); // check if any calls were waiting on the timing infoif (this.hasEnoughInfoToAppend_()) {this.processCallQueue_();}}handleCaptions_(simpleSegment, captionData) {this.earlyAbortWhenNeeded_(simpleSegment.stats);if (this.checkForAbort_(simpleSegment.requestId)) {return;} // This could only happen with fmp4 segments, but// should still not happen in generalif (captionData.length === 0) {this.logger_('SegmentLoader received no captions from a caption event');return;}const segmentInfo = this.pendingSegment_; // Wait until we have some video data so that caption timing// can be adjusted by the timestamp offsetif (!segmentInfo.hasAppendedData_) {this.metadataQueue_.caption.push(this.handleCaptions_.bind(this, simpleSegment, captionData));return;}const timestampOffset = this.sourceUpdater_.videoTimestampOffset() === null ? this.sourceUpdater_.audioTimestampOffset() : this.sourceUpdater_.videoTimestampOffset();const captionTracks = {}; // get total start/end and captions for each track/streamcaptionData.forEach(caption => {// caption.stream is actually a track name...// set to the existing values in tracks or default valuescaptionTracks[caption.stream] = captionTracks[caption.stream] || {// Infinity, as any other value will be less than thisstartTime: Infinity,captions: [],// 0 as an other value will be more than thisendTime: 0};const captionTrack = captionTracks[caption.stream];captionTrack.startTime = Math.min(captionTrack.startTime, caption.startTime + timestampOffset);captionTrack.endTime = Math.max(captionTrack.endTime, caption.endTime + timestampOffset);captionTrack.captions.push(caption);});Object.keys(captionTracks).forEach(trackName => {const {startTime,endTime,captions} = captionTracks[trackName];const inbandTextTracks = this.inbandTextTracks_;this.logger_(`adding cues from ${startTime} -> ${endTime} for ${trackName}`);createCaptionsTrackIfNotExists(inbandTextTracks, this.vhs_.tech_, trackName); // clear out any cues that start and end at the same time period for the same track.// We do this because a rendition change that also changes the timescale for captions// will result in captions being re-parsed for certain segments. If we add them again// without clearing we will have two of the same captions visible.removeCuesFromTrack(startTime, endTime, inbandTextTracks[trackName]);addCaptionData({captionArray: captions,inbandTextTracks,timestampOffset});}); // Reset stored captions since we added parsed// captions to a text track at this pointif (this.transmuxer_) {this.transmuxer_.postMessage({action: 'clearParsedMp4Captions'});}}handleId3_(simpleSegment, id3Frames, dispatchType) {this.earlyAbortWhenNeeded_(simpleSegment.stats);if (this.checkForAbort_(simpleSegment.requestId)) {return;}const segmentInfo = this.pendingSegment_; // we need to have appended data in order for the timestamp offset to be setif (!segmentInfo.hasAppendedData_) {this.metadataQueue_.id3.push(this.handleId3_.bind(this, simpleSegment, id3Frames, dispatchType));return;}this.addMetadataToTextTrack(dispatchType, id3Frames, this.duration_());}processMetadataQueue_() {this.metadataQueue_.id3.forEach(fn => fn());this.metadataQueue_.caption.forEach(fn => fn());this.metadataQueue_.id3 = [];this.metadataQueue_.caption = [];}processCallQueue_() {const callQueue = this.callQueue_; // Clear out the queue before the queued functions are run, since some of the// functions may check the length of the load queue and default to pushing themselves// back onto the queue.this.callQueue_ = [];callQueue.forEach(fun => fun());}processLoadQueue_() {const loadQueue = this.loadQueue_; // Clear out the queue before the queued functions are run, since some of the// functions may check the length of the load queue and default to pushing themselves// back onto the queue.this.loadQueue_ = [];loadQueue.forEach(fun => fun());}/*** Determines whether the loader has enough info to load the next segment.** @return {boolean}* Whether or not the loader has enough info to load the next segment*/hasEnoughInfoToLoad_() {// Since primary timing goes by video, only the audio loader potentially needs to wait// to load.if (this.loaderType_ !== 'audio') {return true;}const segmentInfo = this.pendingSegment_; // A fill buffer must have already run to establish a pending segment before there's// enough info to load.if (!segmentInfo) {return false;} // The first segment can and should be loaded immediately so that source buffers are// created together (before appending). Source buffer creation uses the presence of// audio and video data to determine whether to create audio/video source buffers, and// uses processed (transmuxed or parsed) media to determine the types required.if (!this.getCurrentMediaInfo_()) {return true;}if (// Technically, instead of waiting to load a segment on timeline changes, a segment// can be requested and downloaded and only wait before it is transmuxed or parsed.// But in practice, there are a few reasons why it is better to wait until a loader// is ready to append that segment before requesting and downloading://// 1. Because audio and main loaders cross discontinuities together, if this loader// is waiting for the other to catch up, then instead of requesting another// segment and using up more bandwidth, by not yet loading, more bandwidth is// allotted to the loader currently behind.// 2. media-segment-request doesn't have to have logic to consider whether a segment// is ready to be processed or not, isolating the queueing behavior to the loader.// 3. The audio loader bases some of its segment properties on timing information// provided by the main loader, meaning that, if the logic for waiting on// processing was in media-segment-request, then it would also need to know how// to re-generate the segment information after the main loader caught up.shouldWaitForTimelineChange({timelineChangeController: this.timelineChangeController_,currentTimeline: this.currentTimeline_,segmentTimeline: segmentInfo.timeline,loaderType: this.loaderType_,audioDisabled: this.audioDisabled_})) {return false;}return true;}getCurrentMediaInfo_(segmentInfo = this.pendingSegment_) {return segmentInfo && segmentInfo.trackInfo || this.currentMediaInfo_;}getMediaInfo_(segmentInfo = this.pendingSegment_) {return this.getCurrentMediaInfo_(segmentInfo) || this.startingMediaInfo_;}getPendingSegmentPlaylist() {return this.pendingSegment_ ? this.pendingSegment_.playlist : null;}hasEnoughInfoToAppend_() {if (!this.sourceUpdater_.ready()) {return false;} // If content needs to be removed or the loader is waiting on an append reattempt,// then no additional content should be appended until the prior append is resolved.if (this.waitingOnRemove_ || this.quotaExceededErrorRetryTimeout_) {return false;}const segmentInfo = this.pendingSegment_;const trackInfo = this.getCurrentMediaInfo_(); // no segment to append any data for or// we do not have information on this specific// segment yetif (!segmentInfo || !trackInfo) {return false;}const {hasAudio,hasVideo,isMuxed} = trackInfo;if (hasVideo && !segmentInfo.videoTimingInfo) {return false;} // muxed content only relies on video timing information for now.if (hasAudio && !this.audioDisabled_ && !isMuxed && !segmentInfo.audioTimingInfo) {return false;}if (shouldWaitForTimelineChange({timelineChangeController: this.timelineChangeController_,currentTimeline: this.currentTimeline_,segmentTimeline: segmentInfo.timeline,loaderType: this.loaderType_,audioDisabled: this.audioDisabled_})) {return false;}return true;}handleData_(simpleSegment, result) {this.earlyAbortWhenNeeded_(simpleSegment.stats);if (this.checkForAbort_(simpleSegment.requestId)) {return;} // If there's anything in the call queue, then this data came later and should be// executed after the calls currently queued.if (this.callQueue_.length || !this.hasEnoughInfoToAppend_()) {this.callQueue_.push(this.handleData_.bind(this, simpleSegment, result));return;}const segmentInfo = this.pendingSegment_; // update the time mapping so we can translate from display time to media timethis.setTimeMapping_(segmentInfo.timeline); // for tracking overall statsthis.updateMediaSecondsLoaded_(segmentInfo.part || segmentInfo.segment); // Note that the state isn't changed from loading to appending. This is because abort// logic may change behavior depending on the state, and changing state too early may// inflate our estimates of bandwidth. In the future this should be re-examined to// note more granular states.// don't process and append data if the mediaSource is closedif (this.mediaSource_.readyState === 'closed') {return;} // if this request included an initialization segment, save that data// to the initSegment cacheif (simpleSegment.map) {simpleSegment.map = this.initSegmentForMap(simpleSegment.map, true); // move over init segment properties to media requestsegmentInfo.segment.map = simpleSegment.map;} // if this request included a segment key, save that data in the cacheif (simpleSegment.key) {this.segmentKey(simpleSegment.key, true);}segmentInfo.isFmp4 = simpleSegment.isFmp4;segmentInfo.timingInfo = segmentInfo.timingInfo || {};if (segmentInfo.isFmp4) {this.trigger('fmp4');segmentInfo.timingInfo.start = segmentInfo[timingInfoPropertyForMedia(result.type)].start;} else {const trackInfo = this.getCurrentMediaInfo_();const useVideoTimingInfo = this.loaderType_ === 'main' && trackInfo && trackInfo.hasVideo;let firstVideoFrameTimeForData;if (useVideoTimingInfo) {firstVideoFrameTimeForData = segmentInfo.videoTimingInfo.start;} // Segment loader knows more about segment timing than the transmuxer (in certain// aspects), so make any changes required for a more accurate start time.// Don't set the end time yet, as the segment may not be finished processing.segmentInfo.timingInfo.start = this.trueSegmentStart_({currentStart: segmentInfo.timingInfo.start,playlist: segmentInfo.playlist,mediaIndex: segmentInfo.mediaIndex,currentVideoTimestampOffset: this.sourceUpdater_.videoTimestampOffset(),useVideoTimingInfo,firstVideoFrameTimeForData,videoTimingInfo: segmentInfo.videoTimingInfo,audioTimingInfo: segmentInfo.audioTimingInfo});} // Init segments for audio and video only need to be appended in certain cases. Now// that data is about to be appended, we can check the final cases to determine// whether we should append an init segment.this.updateAppendInitSegmentStatus(segmentInfo, result.type); // Timestamp offset should be updated once we get new data and have its timing info,// as we use the start of the segment to offset the best guess (playlist provided)// timestamp offset.this.updateSourceBufferTimestampOffset_(segmentInfo); // if this is a sync request we need to determine whether it should// be appended or not.if (segmentInfo.isSyncRequest) {// first save/update our timing info for this segment.// this is what allows us to choose an accurate segment// and the main reason we make a sync request.this.updateTimingInfoEnd_(segmentInfo);this.syncController_.saveSegmentTimingInfo({segmentInfo,shouldSaveTimelineMapping: this.loaderType_ === 'main'});const next = this.chooseNextRequest_(); // If the sync request isn't the segment that would be requested next// after taking into account its timing info, do not append it.if (next.mediaIndex !== segmentInfo.mediaIndex || next.partIndex !== segmentInfo.partIndex) {this.logger_('sync segment was incorrect, not appending');return;} // otherwise append it like any other segment as our guess was correct.this.logger_('sync segment was correct, appending');} // Save some state so that in the future anything waiting on first append (and/or// timestamp offset(s)) can process immediately. While the extra state isn't optimal,// we need some notion of whether the timestamp offset or other relevant information// has had a chance to be set.segmentInfo.hasAppendedData_ = true; // Now that the timestamp offset should be set, we can append any waiting ID3 tags.this.processMetadataQueue_();this.appendData_(segmentInfo, result);}updateAppendInitSegmentStatus(segmentInfo, type) {// alt audio doesn't manage timestamp offsetif (this.loaderType_ === 'main' && typeof segmentInfo.timestampOffset === 'number' &&// in the case that we're handling partial data, we don't want to append an init// segment for each chunk!segmentInfo.changedTimestampOffset) {// if the timestamp offset changed, the timeline may have changed, so we have to re-// append init segmentsthis.appendInitSegment_ = {audio: true,video: true};}if (this.playlistOfLastInitSegment_[type] !== segmentInfo.playlist) {// make sure we append init segment on playlist changes, in case the media config// changedthis.appendInitSegment_[type] = true;}}getInitSegmentAndUpdateState_({type,initSegment,map,playlist}) {// "The EXT-X-MAP tag specifies how to obtain the Media Initialization Section// (Section 3) required to parse the applicable Media Segments. It applies to every// Media Segment that appears after it in the Playlist until the next EXT-X-MAP tag// or until the end of the playlist."// https://tools.ietf.org/html/draft-pantos-http-live-streaming-23#section-4.3.2.5if (map) {const id = initSegmentId(map);if (this.activeInitSegmentId_ === id) {// don't need to re-append the init segment if the ID matchesreturn null;} // a map-specified init segment takes priority over any transmuxed (or otherwise// obtained) init segment//// this also caches the init segment for later useinitSegment = this.initSegmentForMap(map, true).bytes;this.activeInitSegmentId_ = id;} // We used to always prepend init segments for video, however, that shouldn't be// necessary. Instead, we should only append on changes, similar to what we've always// done for audio. This is more important (though may not be that important) for// frame-by-frame appending for LHLS, simply because of the increased quantity of// appends.if (initSegment && this.appendInitSegment_[type]) {// Make sure we track the playlist that we last used for the init segment, so that// we can re-append the init segment in the event that we get data from a new// playlist. Discontinuities and track changes are handled in other sections.this.playlistOfLastInitSegment_[type] = playlist; // Disable future init segment appends for this type. Until a change is necessary.this.appendInitSegment_[type] = false; // we need to clear out the fmp4 active init segment id, since// we are appending the muxer init segmentthis.activeInitSegmentId_ = null;return initSegment;}return null;}handleQuotaExceededError_({segmentInfo,type,bytes}, error) {const audioBuffered = this.sourceUpdater_.audioBuffered();const videoBuffered = this.sourceUpdater_.videoBuffered(); // For now we're ignoring any notion of gaps in the buffer, but they, in theory,// should be cleared out during the buffer removals. However, log in case it helps// debug.if (audioBuffered.length > 1) {this.logger_('On QUOTA_EXCEEDED_ERR, found gaps in the audio buffer: ' + timeRangesToArray(audioBuffered).join(', '));}if (videoBuffered.length > 1) {this.logger_('On QUOTA_EXCEEDED_ERR, found gaps in the video buffer: ' + timeRangesToArray(videoBuffered).join(', '));}const audioBufferStart = audioBuffered.length ? audioBuffered.start(0) : 0;const audioBufferEnd = audioBuffered.length ? audioBuffered.end(audioBuffered.length - 1) : 0;const videoBufferStart = videoBuffered.length ? videoBuffered.start(0) : 0;const videoBufferEnd = videoBuffered.length ? videoBuffered.end(videoBuffered.length - 1) : 0;if (audioBufferEnd - audioBufferStart <= MIN_BACK_BUFFER && videoBufferEnd - videoBufferStart <= MIN_BACK_BUFFER) {// Can't remove enough buffer to make room for new segment (or the browser doesn't// allow for appends of segments this size). In the future, it may be possible to// split up the segment and append in pieces, but for now, error out this playlist// in an attempt to switch to a more manageable rendition.this.logger_('On QUOTA_EXCEEDED_ERR, single segment too large to append to ' + 'buffer, triggering an error. ' + `Appended byte length: ${bytes.byteLength}, ` + `audio buffer: ${timeRangesToArray(audioBuffered).join(', ')}, ` + `video buffer: ${timeRangesToArray(videoBuffered).join(', ')}, `);this.error({message: 'Quota exceeded error with append of a single segment of content',excludeUntil: Infinity});this.trigger('error');return;} // To try to resolve the quota exceeded error, clear back buffer and retry. This means// that the segment-loader should block on future events until this one is handled, so// that it doesn't keep moving onto further segments. Adding the call to the call// queue will prevent further appends until waitingOnRemove_ and// quotaExceededErrorRetryTimeout_ are cleared.//// Note that this will only block the current loader. In the case of demuxed content,// the other load may keep filling as fast as possible. In practice, this should be// OK, as it is a rare case when either audio has a high enough bitrate to fill up a// source buffer, or video fills without enough room for audio to append (and without// the availability of clearing out seconds of back buffer to make room for audio).// But it might still be good to handle this case in the future as a TODO.this.waitingOnRemove_ = true;this.callQueue_.push(this.appendToSourceBuffer_.bind(this, {segmentInfo,type,bytes}));const currentTime = this.currentTime_(); // Try to remove as much audio and video as possible to make room for new content// before retrying.const timeToRemoveUntil = currentTime - MIN_BACK_BUFFER;this.logger_(`On QUOTA_EXCEEDED_ERR, removing audio/video from 0 to ${timeToRemoveUntil}`);this.remove(0, timeToRemoveUntil, () => {this.logger_(`On QUOTA_EXCEEDED_ERR, retrying append in ${MIN_BACK_BUFFER}s`);this.waitingOnRemove_ = false; // wait the length of time alotted in the back buffer to prevent wasted// attempts (since we can't clear less than the minimum)this.quotaExceededErrorRetryTimeout_ = window.setTimeout(() => {this.logger_('On QUOTA_EXCEEDED_ERR, re-processing call queue');this.quotaExceededErrorRetryTimeout_ = null;this.processCallQueue_();}, MIN_BACK_BUFFER * 1000);}, true);}handleAppendError_({segmentInfo,type,bytes}, error) {// if there's no error, nothing to doif (!error) {return;}if (error.code === QUOTA_EXCEEDED_ERR) {this.handleQuotaExceededError_({segmentInfo,type,bytes}); // A quota exceeded error should be recoverable with a future re-append, so no need// to trigger an append error.return;}this.logger_('Received non QUOTA_EXCEEDED_ERR on append', error);this.error(`${type} append of ${bytes.length}b failed for segment ` + `#${segmentInfo.mediaIndex} in playlist ${segmentInfo.playlist.id}`); // If an append errors, we often can't recover.// (see https://w3c.github.io/media-source/#sourcebuffer-append-error).//// Trigger a special error so that it can be handled separately from normal,// recoverable errors.this.trigger('appenderror');}appendToSourceBuffer_({segmentInfo,type,initSegment,data,bytes}) {// If this is a re-append, bytes were already created and don't need to be recreatedif (!bytes) {const segments = [data];let byteLength = data.byteLength;if (initSegment) {// if the media initialization segment is changing, append it before the content// segmentsegments.unshift(initSegment);byteLength += initSegment.byteLength;} // Technically we should be OK appending the init segment separately, however, we// haven't yet tested that, and prepending is how we have always done things.bytes = concatSegments({bytes: byteLength,segments});}this.sourceUpdater_.appendBuffer({segmentInfo,type,bytes}, this.handleAppendError_.bind(this, {segmentInfo,type,bytes}));}handleSegmentTimingInfo_(type, requestId, segmentTimingInfo) {if (!this.pendingSegment_ || requestId !== this.pendingSegment_.requestId) {return;}const segment = this.pendingSegment_.segment;const timingInfoProperty = `${type}TimingInfo`;if (!segment[timingInfoProperty]) {segment[timingInfoProperty] = {};}segment[timingInfoProperty].transmuxerPrependedSeconds = segmentTimingInfo.prependedContentDuration || 0;segment[timingInfoProperty].transmuxedPresentationStart = segmentTimingInfo.start.presentation;segment[timingInfoProperty].transmuxedDecodeStart = segmentTimingInfo.start.decode;segment[timingInfoProperty].transmuxedPresentationEnd = segmentTimingInfo.end.presentation;segment[timingInfoProperty].transmuxedDecodeEnd = segmentTimingInfo.end.decode; // mainly used as a reference for debuggingsegment[timingInfoProperty].baseMediaDecodeTime = segmentTimingInfo.baseMediaDecodeTime;}appendData_(segmentInfo, result) {const {type,data} = result;if (!data || !data.byteLength) {return;}if (type === 'audio' && this.audioDisabled_) {return;}const initSegment = this.getInitSegmentAndUpdateState_({type,initSegment: result.initSegment,playlist: segmentInfo.playlist,map: segmentInfo.isFmp4 ? segmentInfo.segment.map : null});this.appendToSourceBuffer_({segmentInfo,type,initSegment,data});}/*** load a specific segment from a request into the buffer** @private*/loadSegment_(segmentInfo) {this.state = 'WAITING';this.pendingSegment_ = segmentInfo;this.trimBackBuffer_(segmentInfo);if (typeof segmentInfo.timestampOffset === 'number') {if (this.transmuxer_) {this.transmuxer_.postMessage({action: 'clearAllMp4Captions'});}}if (!this.hasEnoughInfoToLoad_()) {this.loadQueue_.push(() => {// regenerate the audioAppendStart, timestampOffset, etc as they// may have changed since this function was added to the queue.const options = _extends$1({}, segmentInfo, {forceTimestampOffset: true});_extends$1(segmentInfo, this.generateSegmentInfo_(options));this.isPendingTimestampOffset_ = false;this.updateTransmuxerAndRequestSegment_(segmentInfo);});return;}this.updateTransmuxerAndRequestSegment_(segmentInfo);}updateTransmuxerAndRequestSegment_(segmentInfo) {// We'll update the source buffer's timestamp offset once we have transmuxed data, but// the transmuxer still needs to be updated before then.//// Even though keepOriginalTimestamps is set to true for the transmuxer, timestamp// offset must be passed to the transmuxer for stream correcting adjustments.if (this.shouldUpdateTransmuxerTimestampOffset_(segmentInfo.timestampOffset)) {this.gopBuffer_.length = 0; // gopsToAlignWith was set before the GOP buffer was clearedsegmentInfo.gopsToAlignWith = [];this.timeMapping_ = 0; // reset values in the transmuxer since a discontinuity should start freshthis.transmuxer_.postMessage({action: 'reset'});this.transmuxer_.postMessage({action: 'setTimestampOffset',timestampOffset: segmentInfo.timestampOffset});}const simpleSegment = this.createSimplifiedSegmentObj_(segmentInfo);const isEndOfStream = this.isEndOfStream_(segmentInfo.mediaIndex, segmentInfo.playlist, segmentInfo.partIndex);const isWalkingForward = this.mediaIndex !== null;const isDiscontinuity = segmentInfo.timeline !== this.currentTimeline_ &&// currentTimeline starts at -1, so we shouldn't end the timeline switching to 0,// the first timelinesegmentInfo.timeline > 0;const isEndOfTimeline = isEndOfStream || isWalkingForward && isDiscontinuity;this.logger_(`Requesting ${segmentInfoString(segmentInfo)}`); // If there's an init segment associated with this segment, but it is not cached (identified by a lack of bytes),// then this init segment has never been seen before and should be appended.//// At this point the content type (audio/video or both) is not yet known, but it should be safe to set// both to true and leave the decision of whether to append the init segment to append time.if (simpleSegment.map && !simpleSegment.map.bytes) {this.logger_('going to request init segment.');this.appendInitSegment_ = {video: true,audio: true};}segmentInfo.abortRequests = mediaSegmentRequest({xhr: this.vhs_.xhr,xhrOptions: this.xhrOptions_,decryptionWorker: this.decrypter_,segment: simpleSegment,abortFn: this.handleAbort_.bind(this, segmentInfo),progressFn: this.handleProgress_.bind(this),trackInfoFn: this.handleTrackInfo_.bind(this),timingInfoFn: this.handleTimingInfo_.bind(this),videoSegmentTimingInfoFn: this.handleSegmentTimingInfo_.bind(this, 'video', segmentInfo.requestId),audioSegmentTimingInfoFn: this.handleSegmentTimingInfo_.bind(this, 'audio', segmentInfo.requestId),captionsFn: this.handleCaptions_.bind(this),isEndOfTimeline,endedTimelineFn: () => {this.logger_('received endedtimeline callback');},id3Fn: this.handleId3_.bind(this),dataFn: this.handleData_.bind(this),doneFn: this.segmentRequestFinished_.bind(this),onTransmuxerLog: ({message,level,stream}) => {this.logger_(`${segmentInfoString(segmentInfo)} logged from transmuxer stream ${stream} as a ${level}: ${message}`);}});}/*** trim the back buffer so that we don't have too much data* in the source buffer** @private** @param {Object} segmentInfo - the current segment*/trimBackBuffer_(segmentInfo) {const removeToTime = safeBackBufferTrimTime(this.seekable_(), this.currentTime_(), this.playlist_.targetDuration || 10); // Chrome has a hard limit of 150MB of// buffer and a very conservative "garbage collector"// We manually clear out the old buffer to ensure// we don't trigger the QuotaExceeded error// on the source buffer during subsequent appendsif (removeToTime > 0) {this.remove(0, removeToTime);}}/*** created a simplified copy of the segment object with just the* information necessary to perform the XHR and decryption** @private** @param {Object} segmentInfo - the current segment* @return {Object} a simplified segment object copy*/createSimplifiedSegmentObj_(segmentInfo) {const segment = segmentInfo.segment;const part = segmentInfo.part;const simpleSegment = {resolvedUri: part ? part.resolvedUri : segment.resolvedUri,byterange: part ? part.byterange : segment.byterange,requestId: segmentInfo.requestId,transmuxer: segmentInfo.transmuxer,audioAppendStart: segmentInfo.audioAppendStart,gopsToAlignWith: segmentInfo.gopsToAlignWith,part: segmentInfo.part};const previousSegment = segmentInfo.playlist.segments[segmentInfo.mediaIndex - 1];if (previousSegment && previousSegment.timeline === segment.timeline) {// The baseStartTime of a segment is used to handle rollover when probing the TS// segment to retrieve timing information. Since the probe only looks at the media's// times (e.g., PTS and DTS values of the segment), and doesn't consider the// player's time (e.g., player.currentTime()), baseStartTime should reflect the// media time as well. transmuxedDecodeEnd represents the end time of a segment, in// seconds of media time, so should be used here. The previous segment is used since// the end of the previous segment should represent the beginning of the current// segment, so long as they are on the same timeline.if (previousSegment.videoTimingInfo) {simpleSegment.baseStartTime = previousSegment.videoTimingInfo.transmuxedDecodeEnd;} else if (previousSegment.audioTimingInfo) {simpleSegment.baseStartTime = previousSegment.audioTimingInfo.transmuxedDecodeEnd;}}if (segment.key) {// if the media sequence is greater than 2^32, the IV will be incorrect// assuming 10s segments, that would be about 1300 yearsconst iv = segment.key.iv || new Uint32Array([0, 0, 0, segmentInfo.mediaIndex + segmentInfo.playlist.mediaSequence]);simpleSegment.key = this.segmentKey(segment.key);simpleSegment.key.iv = iv;}if (segment.map) {simpleSegment.map = this.initSegmentForMap(segment.map);}return simpleSegment;}saveTransferStats_(stats) {// every request counts as a media request even if it has been aborted// or canceled due to a timeoutthis.mediaRequests += 1;if (stats) {this.mediaBytesTransferred += stats.bytesReceived;this.mediaTransferDuration += stats.roundTripTime;}}saveBandwidthRelatedStats_(duration, stats) {// byteLength will be used for throughput, and should be based on bytes receieved,// which we only know at the end of the request and should reflect total bytes// downloaded rather than just bytes processed from components of the segmentthis.pendingSegment_.byteLength = stats.bytesReceived;if (duration < MIN_SEGMENT_DURATION_TO_SAVE_STATS) {this.logger_(`Ignoring segment's bandwidth because its duration of ${duration}` + ` is less than the min to record ${MIN_SEGMENT_DURATION_TO_SAVE_STATS}`);return;}this.bandwidth = stats.bandwidth;this.roundTrip = stats.roundTripTime;}handleTimeout_() {// although the VTT segment loader bandwidth isn't really used, it's good to// maintain functinality between segment loadersthis.mediaRequestsTimedout += 1;this.bandwidth = 1;this.roundTrip = NaN;this.trigger('bandwidthupdate');this.trigger('timeout');}/*** Handle the callback from the segmentRequest function and set the* associated SegmentLoader state and errors if necessary** @private*/segmentRequestFinished_(error, simpleSegment, result) {// TODO handle special cases, e.g., muxed audio/video but only audio in the segment// check the call queue directly since this function doesn't need to deal with any// data, and can continue even if the source buffers are not set up and we didn't get// any data from the segmentif (this.callQueue_.length) {this.callQueue_.push(this.segmentRequestFinished_.bind(this, error, simpleSegment, result));return;}this.saveTransferStats_(simpleSegment.stats); // The request was aborted and the SegmentLoader has already been resetif (!this.pendingSegment_) {return;} // the request was aborted and the SegmentLoader has already started// another request. this can happen when the timeout for an aborted// request triggers due to a limitation in the XHR library// do not count this as any sort of request or we risk double-countingif (simpleSegment.requestId !== this.pendingSegment_.requestId) {return;} // an error occurred from the active pendingSegment_ so reset everythingif (error) {this.pendingSegment_ = null;this.state = 'READY'; // aborts are not a true error condition and nothing corrective needs to be doneif (error.code === REQUEST_ERRORS.ABORTED) {return;}this.pause(); // the error is really just that at least one of the requests timed-out// set the bandwidth to a very low value and trigger an ABR switch to// take emergency actionif (error.code === REQUEST_ERRORS.TIMEOUT) {this.handleTimeout_();return;} // if control-flow has arrived here, then the error is real// emit an error event to exclude the current playlistthis.mediaRequestsErrored += 1;this.error(error);this.trigger('error');return;}const segmentInfo = this.pendingSegment_; // the response was a success so set any bandwidth stats the request// generated for ABR purposesthis.saveBandwidthRelatedStats_(segmentInfo.duration, simpleSegment.stats);segmentInfo.endOfAllRequests = simpleSegment.endOfAllRequests;if (result.gopInfo) {this.gopBuffer_ = updateGopBuffer(this.gopBuffer_, result.gopInfo, this.safeAppend_);} // Although we may have already started appending on progress, we shouldn't switch the// state away from loading until we are officially done loading the segment data.this.state = 'APPENDING'; // used for testingthis.trigger('appending');this.waitForAppendsToComplete_(segmentInfo);}setTimeMapping_(timeline) {const timelineMapping = this.syncController_.mappingForTimeline(timeline);if (timelineMapping !== null) {this.timeMapping_ = timelineMapping;}}updateMediaSecondsLoaded_(segment) {if (typeof segment.start === 'number' && typeof segment.end === 'number') {this.mediaSecondsLoaded += segment.end - segment.start;} else {this.mediaSecondsLoaded += segment.duration;}}shouldUpdateTransmuxerTimestampOffset_(timestampOffset) {if (timestampOffset === null) {return false;} // note that we're potentially using the same timestamp offset for both video and// audioif (this.loaderType_ === 'main' && timestampOffset !== this.sourceUpdater_.videoTimestampOffset()) {return true;}if (!this.audioDisabled_ && timestampOffset !== this.sourceUpdater_.audioTimestampOffset()) {return true;}return false;}trueSegmentStart_({currentStart,playlist,mediaIndex,firstVideoFrameTimeForData,currentVideoTimestampOffset,useVideoTimingInfo,videoTimingInfo,audioTimingInfo}) {if (typeof currentStart !== 'undefined') {// if start was set once, keep using itreturn currentStart;}if (!useVideoTimingInfo) {return audioTimingInfo.start;}const previousSegment = playlist.segments[mediaIndex - 1]; // The start of a segment should be the start of the first full frame contained// within that segment. Since the transmuxer maintains a cache of incomplete data// from and/or the last frame seen, the start time may reflect a frame that starts// in the previous segment. Check for that case and ensure the start time is// accurate for the segment.if (mediaIndex === 0 || !previousSegment || typeof previousSegment.start === 'undefined' || previousSegment.end !== firstVideoFrameTimeForData + currentVideoTimestampOffset) {return firstVideoFrameTimeForData;}return videoTimingInfo.start;}waitForAppendsToComplete_(segmentInfo) {const trackInfo = this.getCurrentMediaInfo_(segmentInfo);if (!trackInfo) {this.error({message: 'No starting media returned, likely due to an unsupported media format.',playlistExclusionDuration: Infinity});this.trigger('error');return;} // Although transmuxing is done, appends may not yet be finished. Throw a marker// on each queue this loader is responsible for to ensure that the appends are// complete.const {hasAudio,hasVideo,isMuxed} = trackInfo;const waitForVideo = this.loaderType_ === 'main' && hasVideo;const waitForAudio = !this.audioDisabled_ && hasAudio && !isMuxed;segmentInfo.waitingOnAppends = 0; // segments with no dataif (!segmentInfo.hasAppendedData_) {if (!segmentInfo.timingInfo && typeof segmentInfo.timestampOffset === 'number') {// When there's no audio or video data in the segment, there's no audio or video// timing information.//// If there's no audio or video timing information, then the timestamp offset// can't be adjusted to the appropriate value for the transmuxer and source// buffers.//// Therefore, the next segment should be used to set the timestamp offset.this.isPendingTimestampOffset_ = true;} // override settings for metadata only segmentssegmentInfo.timingInfo = {start: 0};segmentInfo.waitingOnAppends++;if (!this.isPendingTimestampOffset_) {// update the timestampoffsetthis.updateSourceBufferTimestampOffset_(segmentInfo); // make sure the metadata queue is processed even though we have// no video/audio data.this.processMetadataQueue_();} // append is "done" instantly with no data.this.checkAppendsDone_(segmentInfo);return;} // Since source updater could call back synchronously, do the increments first.if (waitForVideo) {segmentInfo.waitingOnAppends++;}if (waitForAudio) {segmentInfo.waitingOnAppends++;}if (waitForVideo) {this.sourceUpdater_.videoQueueCallback(this.checkAppendsDone_.bind(this, segmentInfo));}if (waitForAudio) {this.sourceUpdater_.audioQueueCallback(this.checkAppendsDone_.bind(this, segmentInfo));}}checkAppendsDone_(segmentInfo) {if (this.checkForAbort_(segmentInfo.requestId)) {return;}segmentInfo.waitingOnAppends--;if (segmentInfo.waitingOnAppends === 0) {this.handleAppendsDone_();}}checkForIllegalMediaSwitch(trackInfo) {const illegalMediaSwitchError = illegalMediaSwitch(this.loaderType_, this.getCurrentMediaInfo_(), trackInfo);if (illegalMediaSwitchError) {this.error({message: illegalMediaSwitchError,playlistExclusionDuration: Infinity});this.trigger('error');return true;}return false;}updateSourceBufferTimestampOffset_(segmentInfo) {if (segmentInfo.timestampOffset === null ||// we don't yet have the start for whatever media type (video or audio) has// priority, timing-wise, so we must waittypeof segmentInfo.timingInfo.start !== 'number' ||// already updated the timestamp offset for this segmentsegmentInfo.changedTimestampOffset ||// the alt audio loader should not be responsible for setting the timestamp offsetthis.loaderType_ !== 'main') {return;}let didChange = false; // Primary timing goes by video, and audio is trimmed in the transmuxer, meaning that// the timing info here comes from video. In the event that the audio is longer than// the video, this will trim the start of the audio.// This also trims any offset from 0 at the beginning of the mediasegmentInfo.timestampOffset -= this.getSegmentStartTimeForTimestampOffsetCalculation_({videoTimingInfo: segmentInfo.segment.videoTimingInfo,audioTimingInfo: segmentInfo.segment.audioTimingInfo,timingInfo: segmentInfo.timingInfo}); // In the event that there are part segment downloads, each will try to update the// timestamp offset. Retaining this bit of state prevents us from updating in the// future (within the same segment), however, there may be a better way to handle it.segmentInfo.changedTimestampOffset = true;if (segmentInfo.timestampOffset !== this.sourceUpdater_.videoTimestampOffset()) {this.sourceUpdater_.videoTimestampOffset(segmentInfo.timestampOffset);didChange = true;}if (segmentInfo.timestampOffset !== this.sourceUpdater_.audioTimestampOffset()) {this.sourceUpdater_.audioTimestampOffset(segmentInfo.timestampOffset);didChange = true;}if (didChange) {this.trigger('timestampoffset');}}getSegmentStartTimeForTimestampOffsetCalculation_({videoTimingInfo,audioTimingInfo,timingInfo}) {if (!this.useDtsForTimestampOffset_) {return timingInfo.start;}if (videoTimingInfo && typeof videoTimingInfo.transmuxedDecodeStart === 'number') {return videoTimingInfo.transmuxedDecodeStart;} // handle audio onlyif (audioTimingInfo && typeof audioTimingInfo.transmuxedDecodeStart === 'number') {return audioTimingInfo.transmuxedDecodeStart;} // handle content not transmuxed (e.g., MP4)return timingInfo.start;}updateTimingInfoEnd_(segmentInfo) {segmentInfo.timingInfo = segmentInfo.timingInfo || {};const trackInfo = this.getMediaInfo_();const useVideoTimingInfo = this.loaderType_ === 'main' && trackInfo && trackInfo.hasVideo;const prioritizedTimingInfo = useVideoTimingInfo && segmentInfo.videoTimingInfo ? segmentInfo.videoTimingInfo : segmentInfo.audioTimingInfo;if (!prioritizedTimingInfo) {return;}segmentInfo.timingInfo.end = typeof prioritizedTimingInfo.end === 'number' ?// End time may not exist in a case where we aren't parsing the full segment (one// current example is the case of fmp4), so use the rough duration to calculate an// end time.prioritizedTimingInfo.end : prioritizedTimingInfo.start + segmentInfo.duration;}/*** callback to run when appendBuffer is finished. detects if we are* in a good state to do things with the data we got, or if we need* to wait for more** @private*/handleAppendsDone_() {// appendsdone can cause an abortif (this.pendingSegment_) {this.trigger('appendsdone');}if (!this.pendingSegment_) {this.state = 'READY'; // TODO should this move into this.checkForAbort to speed up requests post abort in// all appending cases?if (!this.paused()) {this.monitorBuffer_();}return;}const segmentInfo = this.pendingSegment_; // Now that the end of the segment has been reached, we can set the end time. It's// best to wait until all appends are done so we're sure that the primary media is// finished (and we have its end time).this.updateTimingInfoEnd_(segmentInfo);if (this.shouldSaveSegmentTimingInfo_) {// Timeline mappings should only be saved for the main loader. This is for multiple// reasons://// 1) Only one mapping is saved per timeline, meaning that if both the audio loader// and the main loader try to save the timeline mapping, whichever comes later// will overwrite the first. In theory this is OK, as the mappings should be the// same, however, it breaks for (2)// 2) In the event of a live stream, the initial live point will make for a somewhat// arbitrary mapping. If audio and video streams are not perfectly in-sync, then// the mapping will be off for one of the streams, dependent on which one was// first saved (see (1)).// 3) Primary timing goes by video in VHS, so the mapping should be video.//// Since the audio loader will wait for the main loader to load the first segment,// the main loader will save the first timeline mapping, and ensure that there won't// be a case where audio loads two segments without saving a mapping (thus leading// to missing segment timing info).this.syncController_.saveSegmentTimingInfo({segmentInfo,shouldSaveTimelineMapping: this.loaderType_ === 'main'});}const segmentDurationMessage = getTroublesomeSegmentDurationMessage(segmentInfo, this.sourceType_);if (segmentDurationMessage) {if (segmentDurationMessage.severity === 'warn') {videojs.log.warn(segmentDurationMessage.message);} else {this.logger_(segmentDurationMessage.message);}}this.recordThroughput_(segmentInfo);this.pendingSegment_ = null;this.state = 'READY';if (segmentInfo.isSyncRequest) {this.trigger('syncinfoupdate'); // if the sync request was not appended// then it was not the correct segment.// throw it away and use the data it gave us// to get the correct one.if (!segmentInfo.hasAppendedData_) {this.logger_(`Throwing away un-appended sync request ${segmentInfoString(segmentInfo)}`);return;}}this.logger_(`Appended ${segmentInfoString(segmentInfo)}`);this.addSegmentMetadataCue_(segmentInfo);this.fetchAtBuffer_ = true;if (this.currentTimeline_ !== segmentInfo.timeline) {this.timelineChangeController_.lastTimelineChange({type: this.loaderType_,from: this.currentTimeline_,to: segmentInfo.timeline}); // If audio is not disabled, the main segment loader is responsible for updating// the audio timeline as well. If the content is video only, this won't have any// impact.if (this.loaderType_ === 'main' && !this.audioDisabled_) {this.timelineChangeController_.lastTimelineChange({type: 'audio',from: this.currentTimeline_,to: segmentInfo.timeline});}}this.currentTimeline_ = segmentInfo.timeline; // We must update the syncinfo to recalculate the seekable range before// the following conditional otherwise it may consider this a bad "guess"// and attempt to resync when the post-update seekable window and live// point would mean that this was the perfect segment to fetchthis.trigger('syncinfoupdate');const segment = segmentInfo.segment;const part = segmentInfo.part;const badSegmentGuess = segment.end && this.currentTime_() - segment.end > segmentInfo.playlist.targetDuration * 3;const badPartGuess = part && part.end && this.currentTime_() - part.end > segmentInfo.playlist.partTargetDuration * 3; // If we previously appended a segment/part that ends more than 3 part/targetDurations before// the currentTime_ that means that our conservative guess was too conservative.// In that case, reset the loader state so that we try to use any information gained// from the previous request to create a new, more accurate, sync-point.if (badSegmentGuess || badPartGuess) {this.logger_(`bad ${badSegmentGuess ? 'segment' : 'part'} ${segmentInfoString(segmentInfo)}`);this.resetEverything();return;}const isWalkingForward = this.mediaIndex !== null; // Don't do a rendition switch unless we have enough time to get a sync segment// and conservatively guessif (isWalkingForward) {this.trigger('bandwidthupdate');}this.trigger('progress');this.mediaIndex = segmentInfo.mediaIndex;this.partIndex = segmentInfo.partIndex; // any time an update finishes and the last segment is in the// buffer, end the stream. this ensures the "ended" event will// fire if playback reaches that point.if (this.isEndOfStream_(segmentInfo.mediaIndex, segmentInfo.playlist, segmentInfo.partIndex)) {this.endOfStream();} // used for testingthis.trigger('appended');if (segmentInfo.hasAppendedData_) {this.mediaAppends++;}if (!this.paused()) {this.monitorBuffer_();}}/*** Records the current throughput of the decrypt, transmux, and append* portion of the semgment pipeline. `throughput.rate` is a the cumulative* moving average of the throughput. `throughput.count` is the number of* data points in the average.** @private* @param {Object} segmentInfo the object returned by loadSegment*/recordThroughput_(segmentInfo) {if (segmentInfo.duration < MIN_SEGMENT_DURATION_TO_SAVE_STATS) {this.logger_(`Ignoring segment's throughput because its duration of ${segmentInfo.duration}` + ` is less than the min to record ${MIN_SEGMENT_DURATION_TO_SAVE_STATS}`);return;}const rate = this.throughput.rate; // Add one to the time to ensure that we don't accidentally attempt to divide// by zero in the case where the throughput is ridiculously highconst segmentProcessingTime = Date.now() - segmentInfo.endOfAllRequests + 1; // Multiply by 8000 to convert from bytes/millisecond to bits/secondconst segmentProcessingThroughput = Math.floor(segmentInfo.byteLength / segmentProcessingTime * 8 * 1000); // This is just a cumulative moving average calculation:// newAvg = oldAvg + (sample - oldAvg) / (sampleCount + 1)this.throughput.rate += (segmentProcessingThroughput - rate) / ++this.throughput.count;}/*** Adds a cue to the segment-metadata track with some metadata information about the* segment** @private* @param {Object} segmentInfo* the object returned by loadSegment* @method addSegmentMetadataCue_*/addSegmentMetadataCue_(segmentInfo) {if (!this.segmentMetadataTrack_) {return;}const segment = segmentInfo.segment;const start = segment.start;const end = segment.end; // Do not try adding the cue if the start and end times are invalid.if (!finite(start) || !finite(end)) {return;}removeCuesFromTrack(start, end, this.segmentMetadataTrack_);const Cue = window.WebKitDataCue || window.VTTCue;const value = {custom: segment.custom,dateTimeObject: segment.dateTimeObject,dateTimeString: segment.dateTimeString,programDateTime: segment.programDateTime,bandwidth: segmentInfo.playlist.attributes.BANDWIDTH,resolution: segmentInfo.playlist.attributes.RESOLUTION,codecs: segmentInfo.playlist.attributes.CODECS,byteLength: segmentInfo.byteLength,uri: segmentInfo.uri,timeline: segmentInfo.timeline,playlist: segmentInfo.playlist.id,start,end};const data = JSON.stringify(value);const cue = new Cue(start, end, data); // Attach the metadata to the value property of the cue to keep consistency between// the differences of WebKitDataCue in safari and VTTCue in other browserscue.value = value;this.segmentMetadataTrack_.addCue(cue);}}function noop() {}const toTitleCase = function (string) {if (typeof string !== 'string') {return string;}return string.replace(/./, w => w.toUpperCase());};/*** @file source-updater.js*/const bufferTypes = ['video', 'audio'];const updating = (type, sourceUpdater) => {const sourceBuffer = sourceUpdater[`${type}Buffer`];return sourceBuffer && sourceBuffer.updating || sourceUpdater.queuePending[type];};const nextQueueIndexOfType = (type, queue) => {for (let i = 0; i < queue.length; i++) {const queueEntry = queue[i];if (queueEntry.type === 'mediaSource') {// If the next entry is a media source entry (uses multiple source buffers), block// processing to allow it to go through first.return null;}if (queueEntry.type === type) {return i;}}return null;};const shiftQueue = (type, sourceUpdater) => {if (sourceUpdater.queue.length === 0) {return;}let queueIndex = 0;let queueEntry = sourceUpdater.queue[queueIndex];if (queueEntry.type === 'mediaSource') {if (!sourceUpdater.updating() && sourceUpdater.mediaSource.readyState !== 'closed') {sourceUpdater.queue.shift();queueEntry.action(sourceUpdater);if (queueEntry.doneFn) {queueEntry.doneFn();} // Only specific source buffer actions must wait for async updateend events. Media// Source actions process synchronously. Therefore, both audio and video source// buffers are now clear to process the next queue entries.shiftQueue('audio', sourceUpdater);shiftQueue('video', sourceUpdater);} // Media Source actions require both source buffers, so if the media source action// couldn't process yet (because one or both source buffers are busy), block other// queue actions until both are available and the media source action can process.return;}if (type === 'mediaSource') {// If the queue was shifted by a media source action (this happens when pushing a// media source action onto the queue), then it wasn't from an updateend event from an// audio or video source buffer, so there's no change from previous state, and no// processing should be done.return;} // Media source queue entries don't need to consider whether the source updater is// started (i.e., source buffers are created) as they don't need the source buffers, but// source buffer queue entries do.if (!sourceUpdater.ready() || sourceUpdater.mediaSource.readyState === 'closed' || updating(type, sourceUpdater)) {return;}if (queueEntry.type !== type) {queueIndex = nextQueueIndexOfType(type, sourceUpdater.queue);if (queueIndex === null) {// Either there's no queue entry that uses this source buffer type in the queue, or// there's a media source queue entry before the next entry of this type, in which// case wait for that action to process first.return;}queueEntry = sourceUpdater.queue[queueIndex];}sourceUpdater.queue.splice(queueIndex, 1); // Keep a record that this source buffer type is in use.//// The queue pending operation must be set before the action is performed in the event// that the action results in a synchronous event that is acted upon. For instance, if// an exception is thrown that can be handled, it's possible that new actions will be// appended to an empty queue and immediately executed, but would not have the correct// pending information if this property was set after the action was performed.sourceUpdater.queuePending[type] = queueEntry;queueEntry.action(type, sourceUpdater);if (!queueEntry.doneFn) {// synchronous operation, process next entrysourceUpdater.queuePending[type] = null;shiftQueue(type, sourceUpdater);return;}};const cleanupBuffer = (type, sourceUpdater) => {const buffer = sourceUpdater[`${type}Buffer`];const titleType = toTitleCase(type);if (!buffer) {return;}buffer.removeEventListener('updateend', sourceUpdater[`on${titleType}UpdateEnd_`]);buffer.removeEventListener('error', sourceUpdater[`on${titleType}Error_`]);sourceUpdater.codecs[type] = null;sourceUpdater[`${type}Buffer`] = null;};const inSourceBuffers = (mediaSource, sourceBuffer) => mediaSource && sourceBuffer && Array.prototype.indexOf.call(mediaSource.sourceBuffers, sourceBuffer) !== -1;const actions = {appendBuffer: (bytes, segmentInfo, onError) => (type, sourceUpdater) => {const sourceBuffer = sourceUpdater[`${type}Buffer`]; // can't do anything if the media source / source buffer is null// or the media source does not contain this source buffer.if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {return;}sourceUpdater.logger_(`Appending segment ${segmentInfo.mediaIndex}'s ${bytes.length} bytes to ${type}Buffer`);try {sourceBuffer.appendBuffer(bytes);} catch (e) {sourceUpdater.logger_(`Error with code ${e.code} ` + (e.code === QUOTA_EXCEEDED_ERR ? '(QUOTA_EXCEEDED_ERR) ' : '') + `when appending segment ${segmentInfo.mediaIndex} to ${type}Buffer`);sourceUpdater.queuePending[type] = null;onError(e);}},remove: (start, end) => (type, sourceUpdater) => {const sourceBuffer = sourceUpdater[`${type}Buffer`]; // can't do anything if the media source / source buffer is null// or the media source does not contain this source buffer.if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {return;}sourceUpdater.logger_(`Removing ${start} to ${end} from ${type}Buffer`);try {sourceBuffer.remove(start, end);} catch (e) {sourceUpdater.logger_(`Remove ${start} to ${end} from ${type}Buffer failed`);}},timestampOffset: offset => (type, sourceUpdater) => {const sourceBuffer = sourceUpdater[`${type}Buffer`]; // can't do anything if the media source / source buffer is null// or the media source does not contain this source buffer.if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {return;}sourceUpdater.logger_(`Setting ${type}timestampOffset to ${offset}`);sourceBuffer.timestampOffset = offset;},callback: callback => (type, sourceUpdater) => {callback();},endOfStream: error => sourceUpdater => {if (sourceUpdater.mediaSource.readyState !== 'open') {return;}sourceUpdater.logger_(`Calling mediaSource endOfStream(${error || ''})`);try {sourceUpdater.mediaSource.endOfStream(error);} catch (e) {videojs.log.warn('Failed to call media source endOfStream', e);}},duration: duration => sourceUpdater => {sourceUpdater.logger_(`Setting mediaSource duration to ${duration}`);try {sourceUpdater.mediaSource.duration = duration;} catch (e) {videojs.log.warn('Failed to set media source duration', e);}},abort: () => (type, sourceUpdater) => {if (sourceUpdater.mediaSource.readyState !== 'open') {return;}const sourceBuffer = sourceUpdater[`${type}Buffer`]; // can't do anything if the media source / source buffer is null// or the media source does not contain this source buffer.if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {return;}sourceUpdater.logger_(`calling abort on ${type}Buffer`);try {sourceBuffer.abort();} catch (e) {videojs.log.warn(`Failed to abort on ${type}Buffer`, e);}},addSourceBuffer: (type, codec) => sourceUpdater => {const titleType = toTitleCase(type);const mime = getMimeForCodec(codec);sourceUpdater.logger_(`Adding ${type}Buffer with codec ${codec} to mediaSource`);const sourceBuffer = sourceUpdater.mediaSource.addSourceBuffer(mime);sourceBuffer.addEventListener('updateend', sourceUpdater[`on${titleType}UpdateEnd_`]);sourceBuffer.addEventListener('error', sourceUpdater[`on${titleType}Error_`]);sourceUpdater.codecs[type] = codec;sourceUpdater[`${type}Buffer`] = sourceBuffer;},removeSourceBuffer: type => sourceUpdater => {const sourceBuffer = sourceUpdater[`${type}Buffer`];cleanupBuffer(type, sourceUpdater); // can't do anything if the media source / source buffer is null// or the media source does not contain this source buffer.if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {return;}sourceUpdater.logger_(`Removing ${type}Buffer with codec ${sourceUpdater.codecs[type]} from mediaSource`);try {sourceUpdater.mediaSource.removeSourceBuffer(sourceBuffer);} catch (e) {videojs.log.warn(`Failed to removeSourceBuffer ${type}Buffer`, e);}},changeType: codec => (type, sourceUpdater) => {const sourceBuffer = sourceUpdater[`${type}Buffer`];const mime = getMimeForCodec(codec); // can't do anything if the media source / source buffer is null// or the media source does not contain this source buffer.if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {return;} // do not update codec if we don't need to.if (sourceUpdater.codecs[type] === codec) {return;}sourceUpdater.logger_(`changing ${type}Buffer codec from ${sourceUpdater.codecs[type]} to ${codec}`); // check if change to the provided type is supportedtry {sourceBuffer.changeType(mime);sourceUpdater.codecs[type] = codec;} catch (e) {videojs.log.warn(`Failed to changeType on ${type}Buffer`, e);}}};const pushQueue = ({type,sourceUpdater,action,doneFn,name}) => {sourceUpdater.queue.push({type,action,doneFn,name});shiftQueue(type, sourceUpdater);};const onUpdateend = (type, sourceUpdater) => e => {// Although there should, in theory, be a pending action for any updateend receieved,// there are some actions that may trigger updateend events without set definitions in// the w3c spec. For instance, setting the duration on the media source may trigger// updateend events on source buffers. This does not appear to be in the spec. As such,// if we encounter an updateend without a corresponding pending action from our queue// for that source buffer type, process the next action.if (sourceUpdater.queuePending[type]) {const doneFn = sourceUpdater.queuePending[type].doneFn;sourceUpdater.queuePending[type] = null;if (doneFn) {// if there's an error, report itdoneFn(sourceUpdater[`${type}Error_`]);}}shiftQueue(type, sourceUpdater);};/*** A queue of callbacks to be serialized and applied when a* MediaSource and its associated SourceBuffers are not in the* updating state. It is used by the segment loader to update the* underlying SourceBuffers when new data is loaded, for instance.** @class SourceUpdater* @param {MediaSource} mediaSource the MediaSource to create the SourceBuffer from* @param {string} mimeType the desired MIME type of the underlying SourceBuffer*/class SourceUpdater extends videojs.EventTarget {constructor(mediaSource) {super();this.mediaSource = mediaSource;this.sourceopenListener_ = () => shiftQueue('mediaSource', this);this.mediaSource.addEventListener('sourceopen', this.sourceopenListener_);this.logger_ = logger('SourceUpdater'); // initial timestamp offset is 0this.audioTimestampOffset_ = 0;this.videoTimestampOffset_ = 0;this.queue = [];this.queuePending = {audio: null,video: null};this.delayedAudioAppendQueue_ = [];this.videoAppendQueued_ = false;this.codecs = {};this.onVideoUpdateEnd_ = onUpdateend('video', this);this.onAudioUpdateEnd_ = onUpdateend('audio', this);this.onVideoError_ = e => {// used for debuggingthis.videoError_ = e;};this.onAudioError_ = e => {// used for debuggingthis.audioError_ = e;};this.createdSourceBuffers_ = false;this.initializedEme_ = false;this.triggeredReady_ = false;}initializedEme() {this.initializedEme_ = true;this.triggerReady();}hasCreatedSourceBuffers() {// if false, likely waiting on one of the segment loaders to get enough data to create// source buffersreturn this.createdSourceBuffers_;}hasInitializedAnyEme() {return this.initializedEme_;}ready() {return this.hasCreatedSourceBuffers() && this.hasInitializedAnyEme();}createSourceBuffers(codecs) {if (this.hasCreatedSourceBuffers()) {// already created them beforereturn;} // the intial addOrChangeSourceBuffers will always be// two add buffers.this.addOrChangeSourceBuffers(codecs);this.createdSourceBuffers_ = true;this.trigger('createdsourcebuffers');this.triggerReady();}triggerReady() {// only allow ready to be triggered once, this prevents the case// where:// 1. we trigger createdsourcebuffers// 2. ie 11 synchronously initializates eme// 3. the synchronous initialization causes us to trigger ready// 4. We go back to the ready check in createSourceBuffers and ready is triggered again.if (this.ready() && !this.triggeredReady_) {this.triggeredReady_ = true;this.trigger('ready');}}/*** Add a type of source buffer to the media source.** @param {string} type* The type of source buffer to add.** @param {string} codec* The codec to add the source buffer with.*/addSourceBuffer(type, codec) {pushQueue({type: 'mediaSource',sourceUpdater: this,action: actions.addSourceBuffer(type, codec),name: 'addSourceBuffer'});}/*** call abort on a source buffer.** @param {string} type* The type of source buffer to call abort on.*/abort(type) {pushQueue({type,sourceUpdater: this,action: actions.abort(type),name: 'abort'});}/*** Call removeSourceBuffer and remove a specific type* of source buffer on the mediaSource.** @param {string} type* The type of source buffer to remove.*/removeSourceBuffer(type) {if (!this.canRemoveSourceBuffer()) {videojs.log.error('removeSourceBuffer is not supported!');return;}pushQueue({type: 'mediaSource',sourceUpdater: this,action: actions.removeSourceBuffer(type),name: 'removeSourceBuffer'});}/*** Whether or not the removeSourceBuffer function is supported* on the mediaSource.** @return {boolean}* if removeSourceBuffer can be called.*/canRemoveSourceBuffer() {// As of Firefox 83 removeSourceBuffer// throws errors, so we report that it does not support this.return !videojs.browser.IS_FIREFOX && window.MediaSource && window.MediaSource.prototype && typeof window.MediaSource.prototype.removeSourceBuffer === 'function';}/*** Whether or not the changeType function is supported* on our SourceBuffers.** @return {boolean}* if changeType can be called.*/static canChangeType() {return window.SourceBuffer && window.SourceBuffer.prototype && typeof window.SourceBuffer.prototype.changeType === 'function';}/*** Whether or not the changeType function is supported* on our SourceBuffers.** @return {boolean}* if changeType can be called.*/canChangeType() {return this.constructor.canChangeType();}/*** Call the changeType function on a source buffer, given the code and type.** @param {string} type* The type of source buffer to call changeType on.** @param {string} codec* The codec string to change type with on the source buffer.*/changeType(type, codec) {if (!this.canChangeType()) {videojs.log.error('changeType is not supported!');return;}pushQueue({type,sourceUpdater: this,action: actions.changeType(codec),name: 'changeType'});}/*** Add source buffers with a codec or, if they are already created,* call changeType on source buffers using changeType.** @param {Object} codecs* Codecs to switch to*/addOrChangeSourceBuffers(codecs) {if (!codecs || typeof codecs !== 'object' || Object.keys(codecs).length === 0) {throw new Error('Cannot addOrChangeSourceBuffers to undefined codecs');}Object.keys(codecs).forEach(type => {const codec = codecs[type];if (!this.hasCreatedSourceBuffers()) {return this.addSourceBuffer(type, codec);}if (this.canChangeType()) {this.changeType(type, codec);}});}/*** Queue an update to append an ArrayBuffer.** @param {MediaObject} object containing audioBytes and/or videoBytes* @param {Function} done the function to call when done* @see http://www.w3.org/TR/media-source/#widl-SourceBuffer-appendBuffer-void-ArrayBuffer-data*/appendBuffer(options, doneFn) {const {segmentInfo,type,bytes} = options;this.processedAppend_ = true;if (type === 'audio' && this.videoBuffer && !this.videoAppendQueued_) {this.delayedAudioAppendQueue_.push([options, doneFn]);this.logger_(`delayed audio append of ${bytes.length} until video append`);return;} // In the case of certain errors, for instance, QUOTA_EXCEEDED_ERR, updateend will// not be fired. This means that the queue will be blocked until the next action// taken by the segment-loader. Provide a mechanism for segment-loader to handle// these errors by calling the doneFn with the specific error.const onError = doneFn;pushQueue({type,sourceUpdater: this,action: actions.appendBuffer(bytes, segmentInfo || {mediaIndex: -1}, onError),doneFn,name: 'appendBuffer'});if (type === 'video') {this.videoAppendQueued_ = true;if (!this.delayedAudioAppendQueue_.length) {return;}const queue = this.delayedAudioAppendQueue_.slice();this.logger_(`queuing delayed audio ${queue.length} appendBuffers`);this.delayedAudioAppendQueue_.length = 0;queue.forEach(que => {this.appendBuffer.apply(this, que);});}}/*** Get the audio buffer's buffered timerange.** @return {TimeRange}* The audio buffer's buffered time range*/audioBuffered() {// no media source/source buffer or it isn't in the media sources// source buffer listif (!inSourceBuffers(this.mediaSource, this.audioBuffer)) {return createTimeRanges();}return this.audioBuffer.buffered ? this.audioBuffer.buffered : createTimeRanges();}/*** Get the video buffer's buffered timerange.** @return {TimeRange}* The video buffer's buffered time range*/videoBuffered() {// no media source/source buffer or it isn't in the media sources// source buffer listif (!inSourceBuffers(this.mediaSource, this.videoBuffer)) {return createTimeRanges();}return this.videoBuffer.buffered ? this.videoBuffer.buffered : createTimeRanges();}/*** Get a combined video/audio buffer's buffered timerange.** @return {TimeRange}* the combined time range*/buffered() {const video = inSourceBuffers(this.mediaSource, this.videoBuffer) ? this.videoBuffer : null;const audio = inSourceBuffers(this.mediaSource, this.audioBuffer) ? this.audioBuffer : null;if (audio && !video) {return this.audioBuffered();}if (video && !audio) {return this.videoBuffered();}return bufferIntersection(this.audioBuffered(), this.videoBuffered());}/*** Add a callback to the queue that will set duration on the mediaSource.** @param {number} duration* The duration to set** @param {Function} [doneFn]* function to run after duration has been set.*/setDuration(duration, doneFn = noop) {// In order to set the duration on the media source, it's necessary to wait for all// source buffers to no longer be updating. "If the updating attribute equals true on// any SourceBuffer in sourceBuffers, then throw an InvalidStateError exception and// abort these steps." (source: https://www.w3.org/TR/media-source/#attributes).pushQueue({type: 'mediaSource',sourceUpdater: this,action: actions.duration(duration),name: 'duration',doneFn});}/*** Add a mediaSource endOfStream call to the queue** @param {Error} [error]* Call endOfStream with an error** @param {Function} [doneFn]* A function that should be called when the* endOfStream call has finished.*/endOfStream(error = null, doneFn = noop) {if (typeof error !== 'string') {error = undefined;} // In order to set the duration on the media source, it's necessary to wait for all// source buffers to no longer be updating. "If the updating attribute equals true on// any SourceBuffer in sourceBuffers, then throw an InvalidStateError exception and// abort these steps." (source: https://www.w3.org/TR/media-source/#attributes).pushQueue({type: 'mediaSource',sourceUpdater: this,action: actions.endOfStream(error),name: 'endOfStream',doneFn});}/*** Queue an update to remove a time range from the buffer.** @param {number} start where to start the removal* @param {number} end where to end the removal* @param {Function} [done=noop] optional callback to be executed when the remove* operation is complete* @see http://www.w3.org/TR/media-source/#widl-SourceBuffer-remove-void-double-start-unrestricted-double-end*/removeAudio(start, end, done = noop) {if (!this.audioBuffered().length || this.audioBuffered().end(0) === 0) {done();return;}pushQueue({type: 'audio',sourceUpdater: this,action: actions.remove(start, end),doneFn: done,name: 'remove'});}/*** Queue an update to remove a time range from the buffer.** @param {number} start where to start the removal* @param {number} end where to end the removal* @param {Function} [done=noop] optional callback to be executed when the remove* operation is complete* @see http://www.w3.org/TR/media-source/#widl-SourceBuffer-remove-void-double-start-unrestricted-double-end*/removeVideo(start, end, done = noop) {if (!this.videoBuffered().length || this.videoBuffered().end(0) === 0) {done();return;}pushQueue({type: 'video',sourceUpdater: this,action: actions.remove(start, end),doneFn: done,name: 'remove'});}/*** Whether the underlying sourceBuffer is updating or not** @return {boolean} the updating status of the SourceBuffer*/updating() {// the audio/video source buffer is updatingif (updating('audio', this) || updating('video', this)) {return true;}return false;}/*** Set/get the timestampoffset on the audio SourceBuffer** @return {number} the timestamp offset*/audioTimestampOffset(offset) {if (typeof offset !== 'undefined' && this.audioBuffer &&// no point in updating if it's the samethis.audioTimestampOffset_ !== offset) {pushQueue({type: 'audio',sourceUpdater: this,action: actions.timestampOffset(offset),name: 'timestampOffset'});this.audioTimestampOffset_ = offset;}return this.audioTimestampOffset_;}/*** Set/get the timestampoffset on the video SourceBuffer** @return {number} the timestamp offset*/videoTimestampOffset(offset) {if (typeof offset !== 'undefined' && this.videoBuffer &&// no point in updating if it's the samethis.videoTimestampOffset !== offset) {pushQueue({type: 'video',sourceUpdater: this,action: actions.timestampOffset(offset),name: 'timestampOffset'});this.videoTimestampOffset_ = offset;}return this.videoTimestampOffset_;}/*** Add a function to the queue that will be called* when it is its turn to run in the audio queue.** @param {Function} callback* The callback to queue.*/audioQueueCallback(callback) {if (!this.audioBuffer) {return;}pushQueue({type: 'audio',sourceUpdater: this,action: actions.callback(callback),name: 'callback'});}/*** Add a function to the queue that will be called* when it is its turn to run in the video queue.** @param {Function} callback* The callback to queue.*/videoQueueCallback(callback) {if (!this.videoBuffer) {return;}pushQueue({type: 'video',sourceUpdater: this,action: actions.callback(callback),name: 'callback'});}/*** dispose of the source updater and the underlying sourceBuffer*/dispose() {this.trigger('dispose');bufferTypes.forEach(type => {this.abort(type);if (this.canRemoveSourceBuffer()) {this.removeSourceBuffer(type);} else {this[`${type}QueueCallback`](() => cleanupBuffer(type, this));}});this.videoAppendQueued_ = false;this.delayedAudioAppendQueue_.length = 0;if (this.sourceopenListener_) {this.mediaSource.removeEventListener('sourceopen', this.sourceopenListener_);}this.off();}}const uint8ToUtf8 = uintArray => decodeURIComponent(escape(String.fromCharCode.apply(null, uintArray)));const bufferToHexString = buffer => {const uInt8Buffer = new Uint8Array(buffer);return Array.from(uInt8Buffer).map(byte => byte.toString(16).padStart(2, '0')).join('');};/*** @file vtt-segment-loader.js*/const VTT_LINE_TERMINATORS = new Uint8Array('\n\n'.split('').map(char => char.charCodeAt(0)));class NoVttJsError extends Error {constructor() {super('Trying to parse received VTT cues, but there is no WebVTT. Make sure vtt.js is loaded.');}}/*** An object that manages segment loading and appending.** @class VTTSegmentLoader* @param {Object} options required and optional options* @extends videojs.EventTarget*/class VTTSegmentLoader extends SegmentLoader {constructor(settings, options = {}) {super(settings, options); // SegmentLoader requires a MediaSource be specified or it will throw an error;// however, VTTSegmentLoader has no need of a media source, so delete the referencethis.mediaSource_ = null;this.subtitlesTrack_ = null;this.loaderType_ = 'subtitle';this.featuresNativeTextTracks_ = settings.featuresNativeTextTracks;this.loadVttJs = settings.loadVttJs; // The VTT segment will have its own time mappings. Saving VTT segment timing info in// the sync controller leads to improper behavior.this.shouldSaveSegmentTimingInfo_ = false;}createTransmuxer_() {// don't need to transmux any subtitlesreturn null;}/*** Indicates which time ranges are buffered** @return {TimeRange}* TimeRange object representing the current buffered ranges*/buffered_() {if (!this.subtitlesTrack_ || !this.subtitlesTrack_.cues || !this.subtitlesTrack_.cues.length) {return createTimeRanges();}const cues = this.subtitlesTrack_.cues;const start = cues[0].startTime;const end = cues[cues.length - 1].startTime;return createTimeRanges([[start, end]]);}/*** Gets and sets init segment for the provided map** @param {Object} map* The map object representing the init segment to get or set* @param {boolean=} set* If true, the init segment for the provided map should be saved* @return {Object}* map object for desired init segment*/initSegmentForMap(map, set = false) {if (!map) {return null;}const id = initSegmentId(map);let storedMap = this.initSegments_[id];if (set && !storedMap && map.bytes) {// append WebVTT line terminators to the media initialization segment if it exists// to follow the WebVTT spec (https://w3c.github.io/webvtt/#file-structure) that// requires two or more WebVTT line terminators between the WebVTT header and the// rest of the fileconst combinedByteLength = VTT_LINE_TERMINATORS.byteLength + map.bytes.byteLength;const combinedSegment = new Uint8Array(combinedByteLength);combinedSegment.set(map.bytes);combinedSegment.set(VTT_LINE_TERMINATORS, map.bytes.byteLength);this.initSegments_[id] = storedMap = {resolvedUri: map.resolvedUri,byterange: map.byterange,bytes: combinedSegment};}return storedMap || map;}/*** Returns true if all configuration required for loading is present, otherwise false.** @return {boolean} True if the all configuration is ready for loading* @private*/couldBeginLoading_() {return this.playlist_ && this.subtitlesTrack_ && !this.paused();}/*** Once all the starting parameters have been specified, begin* operation. This method should only be invoked from the INIT* state.** @private*/init_() {this.state = 'READY';this.resetEverything();return this.monitorBuffer_();}/*** Set a subtitle track on the segment loader to add subtitles to** @param {TextTrack=} track* The text track to add loaded subtitles to* @return {TextTrack}* Returns the subtitles track*/track(track) {if (typeof track === 'undefined') {return this.subtitlesTrack_;}this.subtitlesTrack_ = track; // if we were unpaused but waiting for a sourceUpdater, start// buffering nowif (this.state === 'INIT' && this.couldBeginLoading_()) {this.init_();}return this.subtitlesTrack_;}/*** Remove any data in the source buffer between start and end times** @param {number} start - the start time of the region to remove from the buffer* @param {number} end - the end time of the region to remove from the buffer*/remove(start, end) {removeCuesFromTrack(start, end, this.subtitlesTrack_);}/*** fill the buffer with segements unless the sourceBuffers are* currently updating** Note: this function should only ever be called by monitorBuffer_* and never directly** @private*/fillBuffer_() {// see if we need to begin loading immediatelyconst segmentInfo = this.chooseNextRequest_();if (!segmentInfo) {return;}if (this.syncController_.timestampOffsetForTimeline(segmentInfo.timeline) === null) {// We don't have the timestamp offset that we need to sync subtitles.// Rerun on a timestamp offset or user interaction.const checkTimestampOffset = () => {this.state = 'READY';if (!this.paused()) {// if not paused, queue a buffer check as soon as possiblethis.monitorBuffer_();}};this.syncController_.one('timestampoffset', checkTimestampOffset);this.state = 'WAITING_ON_TIMELINE';return;}this.loadSegment_(segmentInfo);} // never set a timestamp offset for vtt segments.timestampOffsetForSegment_() {return null;}chooseNextRequest_() {return this.skipEmptySegments_(super.chooseNextRequest_());}/*** Prevents the segment loader from requesting segments we know contain no subtitles* by walking forward until we find the next segment that we don't know whether it is* empty or not.** @param {Object} segmentInfo* a segment info object that describes the current segment* @return {Object}* a segment info object that describes the current segment*/skipEmptySegments_(segmentInfo) {while (segmentInfo && segmentInfo.segment.empty) {// stop at the last possible segmentInfoif (segmentInfo.mediaIndex + 1 >= segmentInfo.playlist.segments.length) {segmentInfo = null;break;}segmentInfo = this.generateSegmentInfo_({playlist: segmentInfo.playlist,mediaIndex: segmentInfo.mediaIndex + 1,startOfSegment: segmentInfo.startOfSegment + segmentInfo.duration,isSyncRequest: segmentInfo.isSyncRequest});}return segmentInfo;}stopForError(error) {this.error(error);this.state = 'READY';this.pause();this.trigger('error');}/*** append a decrypted segement to the SourceBuffer through a SourceUpdater** @private*/segmentRequestFinished_(error, simpleSegment, result) {if (!this.subtitlesTrack_) {this.state = 'READY';return;}this.saveTransferStats_(simpleSegment.stats); // the request was abortedif (!this.pendingSegment_) {this.state = 'READY';this.mediaRequestsAborted += 1;return;}if (error) {if (error.code === REQUEST_ERRORS.TIMEOUT) {this.handleTimeout_();}if (error.code === REQUEST_ERRORS.ABORTED) {this.mediaRequestsAborted += 1;} else {this.mediaRequestsErrored += 1;}this.stopForError(error);return;}const segmentInfo = this.pendingSegment_; // although the VTT segment loader bandwidth isn't really used, it's good to// maintain functionality between segment loadersthis.saveBandwidthRelatedStats_(segmentInfo.duration, simpleSegment.stats); // if this request included a segment key, save that data in the cacheif (simpleSegment.key) {this.segmentKey(simpleSegment.key, true);}this.state = 'APPENDING'; // used for teststhis.trigger('appending');const segment = segmentInfo.segment;if (segment.map) {segment.map.bytes = simpleSegment.map.bytes;}segmentInfo.bytes = simpleSegment.bytes; // Make sure that vttjs has loaded, otherwise, load it and wait till it finished loadingif (typeof window.WebVTT !== 'function' && typeof this.loadVttJs === 'function') {this.state = 'WAITING_ON_VTTJS'; // should be fine to call multiple times// script will be loaded once but multiple listeners will be added to the queue, which is expected.this.loadVttJs().then(() => this.segmentRequestFinished_(error, simpleSegment, result), () => this.stopForError({message: 'Error loading vtt.js'}));return;}segment.requested = true;try {this.parseVTTCues_(segmentInfo);} catch (e) {this.stopForError({message: e.message});return;}this.updateTimeMapping_(segmentInfo, this.syncController_.timelines[segmentInfo.timeline], this.playlist_);if (segmentInfo.cues.length) {segmentInfo.timingInfo = {start: segmentInfo.cues[0].startTime,end: segmentInfo.cues[segmentInfo.cues.length - 1].endTime};} else {segmentInfo.timingInfo = {start: segmentInfo.startOfSegment,end: segmentInfo.startOfSegment + segmentInfo.duration};}if (segmentInfo.isSyncRequest) {this.trigger('syncinfoupdate');this.pendingSegment_ = null;this.state = 'READY';return;}segmentInfo.byteLength = segmentInfo.bytes.byteLength;this.mediaSecondsLoaded += segment.duration; // Create VTTCue instances for each cue in the new segment and add them to// the subtitle tracksegmentInfo.cues.forEach(cue => {this.subtitlesTrack_.addCue(this.featuresNativeTextTracks_ ? new window.VTTCue(cue.startTime, cue.endTime, cue.text) : cue);}); // Remove any duplicate cues from the subtitle track. The WebVTT spec allows// cues to have identical time-intervals, but if the text is also identical// we can safely assume it is a duplicate that can be removed (ex. when a cue// "overlaps" VTT segments)removeDuplicateCuesFromTrack(this.subtitlesTrack_);this.handleAppendsDone_();}handleData_() {// noop as we shouldn't be getting video/audio data captions// that we do not support here.}updateTimingInfoEnd_() {// noop}/*** Uses the WebVTT parser to parse the segment response** @throws NoVttJsError** @param {Object} segmentInfo* a segment info object that describes the current segment* @private*/parseVTTCues_(segmentInfo) {let decoder;let decodeBytesToString = false;if (typeof window.WebVTT !== 'function') {// caller is responsible for exception handling.throw new NoVttJsError();}if (typeof window.TextDecoder === 'function') {decoder = new window.TextDecoder('utf8');} else {decoder = window.WebVTT.StringDecoder();decodeBytesToString = true;}const parser = new window.WebVTT.Parser(window, window.vttjs, decoder);segmentInfo.cues = [];segmentInfo.timestampmap = {MPEGTS: 0,LOCAL: 0};parser.oncue = segmentInfo.cues.push.bind(segmentInfo.cues);parser.ontimestampmap = map => {segmentInfo.timestampmap = map;};parser.onparsingerror = error => {videojs.log.warn('Error encountered when parsing cues: ' + error.message);};if (segmentInfo.segment.map) {let mapData = segmentInfo.segment.map.bytes;if (decodeBytesToString) {mapData = uint8ToUtf8(mapData);}parser.parse(mapData);}let segmentData = segmentInfo.bytes;if (decodeBytesToString) {segmentData = uint8ToUtf8(segmentData);}parser.parse(segmentData);parser.flush();}/*** Updates the start and end times of any cues parsed by the WebVTT parser using* the information parsed from the X-TIMESTAMP-MAP header and a TS to media time mapping* from the SyncController** @param {Object} segmentInfo* a segment info object that describes the current segment* @param {Object} mappingObj* object containing a mapping from TS to media time* @param {Object} playlist* the playlist object containing the segment* @private*/updateTimeMapping_(segmentInfo, mappingObj, playlist) {const segment = segmentInfo.segment;if (!mappingObj) {// If the sync controller does not have a mapping of TS to Media Time for the// timeline, then we don't have enough information to update the cue// start/end timesreturn;}if (!segmentInfo.cues.length) {// If there are no cues, we also do not have enough information to figure out// segment timing. Mark that the segment contains no cues so we don't re-request// an empty segment.segment.empty = true;return;}const {MPEGTS,LOCAL} = segmentInfo.timestampmap;/*** From the spec:* The MPEGTS media timestamp MUST use a 90KHz timescale,* even when non-WebVTT Media Segments use a different timescale.*/const mpegTsInSeconds = MPEGTS / clock_1;const diff = mpegTsInSeconds - LOCAL + mappingObj.mapping;segmentInfo.cues.forEach(cue => {const duration = cue.endTime - cue.startTime;const startTime = MPEGTS === 0 ? cue.startTime + diff : this.handleRollover_(cue.startTime + diff, mappingObj.time);cue.startTime = Math.max(startTime, 0);cue.endTime = Math.max(startTime + duration, 0);});if (!playlist.syncInfo) {const firstStart = segmentInfo.cues[0].startTime;const lastStart = segmentInfo.cues[segmentInfo.cues.length - 1].startTime;playlist.syncInfo = {mediaSequence: playlist.mediaSequence + segmentInfo.mediaIndex,time: Math.min(firstStart, lastStart - segment.duration)};}}/*** MPEG-TS PES timestamps are limited to 2^33.* Once they reach 2^33, they roll over to 0.* mux.js handles PES timestamp rollover for the following scenarios:* [forward rollover(right)] ->* PES timestamps monotonically increase, and once they reach 2^33, they roll over to 0* [backward rollover(left)] -->* we seek back to position before rollover.** According to the HLS SPEC:* When synchronizing WebVTT with PES timestamps, clients SHOULD account* for cases where the 33-bit PES timestamps have wrapped and the WebVTT* cue times have not. When the PES timestamp wraps, the WebVTT Segment* SHOULD have a X-TIMESTAMP-MAP header that maps the current WebVTT* time to the new (low valued) PES timestamp.** So we want to handle rollover here and align VTT Cue start/end time to the player's time.*/handleRollover_(value, reference) {if (reference === null) {return value;}let valueIn90khz = value * clock_1;const referenceIn90khz = reference * clock_1;let offset;if (referenceIn90khz < valueIn90khz) {// - 2^33offset = -8589934592;} else {// + 2^33offset = 8589934592;} // distance(value - reference) > 2^32while (Math.abs(valueIn90khz - referenceIn90khz) > 4294967296) {valueIn90khz += offset;}return valueIn90khz / clock_1;}}/*** @file ad-cue-tags.js*//*** Searches for an ad cue that overlaps with the given mediaTime** @param {Object} track* the track to find the cue for** @param {number} mediaTime* the time to find the cue at** @return {Object|null}* the found cue or null*/const findAdCue = function (track, mediaTime) {const cues = track.cues;for (let i = 0; i < cues.length; i++) {const cue = cues[i];if (mediaTime >= cue.adStartTime && mediaTime <= cue.adEndTime) {return cue;}}return null;};const updateAdCues = function (media, track, offset = 0) {if (!media.segments) {return;}let mediaTime = offset;let cue;for (let i = 0; i < media.segments.length; i++) {const segment = media.segments[i];if (!cue) {// Since the cues will span for at least the segment duration, adding a fudge// factor of half segment duration will prevent duplicate cues from being// created when timing info is not exact (e.g. cue start time initialized// at 10.006677, but next call mediaTime is 10.003332 )cue = findAdCue(track, mediaTime + segment.duration / 2);}if (cue) {if ('cueIn' in segment) {// Found a CUE-IN so end the cuecue.endTime = mediaTime;cue.adEndTime = mediaTime;mediaTime += segment.duration;cue = null;continue;}if (mediaTime < cue.endTime) {// Already processed this mediaTime for this cuemediaTime += segment.duration;continue;} // otherwise extend cue until a CUE-IN is foundcue.endTime += segment.duration;} else {if ('cueOut' in segment) {cue = new window.VTTCue(mediaTime, mediaTime + segment.duration, segment.cueOut);cue.adStartTime = mediaTime; // Assumes tag format to be// #EXT-X-CUE-OUT:30cue.adEndTime = mediaTime + parseFloat(segment.cueOut);track.addCue(cue);}if ('cueOutCont' in segment) {// Entered into the middle of an ad cue// Assumes tag formate to be// #EXT-X-CUE-OUT-CONT:10/30const [adOffset, adTotal] = segment.cueOutCont.split('/').map(parseFloat);cue = new window.VTTCue(mediaTime, mediaTime + segment.duration, '');cue.adStartTime = mediaTime - adOffset;cue.adEndTime = cue.adStartTime + adTotal;track.addCue(cue);}}mediaTime += segment.duration;}};/*** @file sync-controller.js*/// synchronize expired playlist segments.// the max media sequence diff is 48 hours of live stream// content with two second segments. Anything larger than that// will likely be invalid.const MAX_MEDIA_SEQUENCE_DIFF_FOR_SYNC = 86400;const syncPointStrategies = [// Stategy "VOD": Handle the VOD-case where the sync-point is *always*// the equivalence display-time 0 === segment-index 0{name: 'VOD',run: (syncController, playlist, duration, currentTimeline, currentTime) => {if (duration !== Infinity) {const syncPoint = {time: 0,segmentIndex: 0,partIndex: null};return syncPoint;}return null;}}, {name: 'MediaSequence',/*** run media sequence strategy** @param {SyncController} syncController* @param {Object} playlist* @param {number} duration* @param {number} currentTimeline* @param {number} currentTime* @param {string} type*/run: (syncController, playlist, duration, currentTimeline, currentTime, type) => {if (!type) {return null;}const mediaSequenceMap = syncController.getMediaSequenceMap(type);if (!mediaSequenceMap || mediaSequenceMap.size === 0) {return null;}if (playlist.mediaSequence === undefined || !Array.isArray(playlist.segments) || !playlist.segments.length) {return null;}let currentMediaSequence = playlist.mediaSequence;let segmentIndex = 0;for (const segment of playlist.segments) {const range = mediaSequenceMap.get(currentMediaSequence);if (!range) {// unexpected case// we expect this playlist to be the same playlist in the map// just break from the loop and move forward to the next strategybreak;}if (currentTime >= range.start && currentTime < range.end) {// we found segmentif (Array.isArray(segment.parts) && segment.parts.length) {let currentPartStart = range.start;let partIndex = 0;for (const part of segment.parts) {const start = currentPartStart;const end = start + part.duration;if (currentTime >= start && currentTime < end) {return {time: range.start,segmentIndex,partIndex};}partIndex++;currentPartStart = end;}} // no parts found, return sync point for segmentreturn {time: range.start,segmentIndex,partIndex: null};}segmentIndex++;currentMediaSequence++;} // we didn't find any segments for provided current timereturn null;}},// Stategy "ProgramDateTime": We have a program-date-time tag in this playlist{name: 'ProgramDateTime',run: (syncController, playlist, duration, currentTimeline, currentTime) => {if (!Object.keys(syncController.timelineToDatetimeMappings).length) {return null;}let syncPoint = null;let lastDistance = null;const partsAndSegments = getPartsAndSegments(playlist);currentTime = currentTime || 0;for (let i = 0; i < partsAndSegments.length; i++) {// start from the end and loop backwards for live// or start from the front and loop forwards for non-liveconst index = playlist.endList || currentTime === 0 ? i : partsAndSegments.length - (i + 1);const partAndSegment = partsAndSegments[index];const segment = partAndSegment.segment;const datetimeMapping = syncController.timelineToDatetimeMappings[segment.timeline];if (!datetimeMapping || !segment.dateTimeObject) {continue;}const segmentTime = segment.dateTimeObject.getTime() / 1000;let start = segmentTime + datetimeMapping; // take part duration into account.if (segment.parts && typeof partAndSegment.partIndex === 'number') {for (let z = 0; z < partAndSegment.partIndex; z++) {start += segment.parts[z].duration;}}const distance = Math.abs(currentTime - start); // Once the distance begins to increase, or if distance is 0, we have passed// currentTime and can stop looking for better candidatesif (lastDistance !== null && (distance === 0 || lastDistance < distance)) {break;}lastDistance = distance;syncPoint = {time: start,segmentIndex: partAndSegment.segmentIndex,partIndex: partAndSegment.partIndex};}return syncPoint;}},// Stategy "Segment": We have a known time mapping for a timeline and a// segment in the current timeline with timing data{name: 'Segment',run: (syncController, playlist, duration, currentTimeline, currentTime) => {let syncPoint = null;let lastDistance = null;currentTime = currentTime || 0;const partsAndSegments = getPartsAndSegments(playlist);for (let i = 0; i < partsAndSegments.length; i++) {// start from the end and loop backwards for live// or start from the front and loop forwards for non-liveconst index = playlist.endList || currentTime === 0 ? i : partsAndSegments.length - (i + 1);const partAndSegment = partsAndSegments[index];const segment = partAndSegment.segment;const start = partAndSegment.part && partAndSegment.part.start || segment && segment.start;if (segment.timeline === currentTimeline && typeof start !== 'undefined') {const distance = Math.abs(currentTime - start); // Once the distance begins to increase, we have passed// currentTime and can stop looking for better candidatesif (lastDistance !== null && lastDistance < distance) {break;}if (!syncPoint || lastDistance === null || lastDistance >= distance) {lastDistance = distance;syncPoint = {time: start,segmentIndex: partAndSegment.segmentIndex,partIndex: partAndSegment.partIndex};}}}return syncPoint;}},// Stategy "Discontinuity": We have a discontinuity with a known// display-time{name: 'Discontinuity',run: (syncController, playlist, duration, currentTimeline, currentTime) => {let syncPoint = null;currentTime = currentTime || 0;if (playlist.discontinuityStarts && playlist.discontinuityStarts.length) {let lastDistance = null;for (let i = 0; i < playlist.discontinuityStarts.length; i++) {const segmentIndex = playlist.discontinuityStarts[i];const discontinuity = playlist.discontinuitySequence + i + 1;const discontinuitySync = syncController.discontinuities[discontinuity];if (discontinuitySync) {const distance = Math.abs(currentTime - discontinuitySync.time); // Once the distance begins to increase, we have passed// currentTime and can stop looking for better candidatesif (lastDistance !== null && lastDistance < distance) {break;}if (!syncPoint || lastDistance === null || lastDistance >= distance) {lastDistance = distance;syncPoint = {time: discontinuitySync.time,segmentIndex,partIndex: null};}}}}return syncPoint;}},// Stategy "Playlist": We have a playlist with a known mapping of// segment index to display time{name: 'Playlist',run: (syncController, playlist, duration, currentTimeline, currentTime) => {if (playlist.syncInfo) {const syncPoint = {time: playlist.syncInfo.time,segmentIndex: playlist.syncInfo.mediaSequence - playlist.mediaSequence,partIndex: null};return syncPoint;}return null;}}];class SyncController extends videojs.EventTarget {constructor(options = {}) {super(); // ...for synching across variantsthis.timelines = [];this.discontinuities = [];this.timelineToDatetimeMappings = {};/*** @type {Map<string, Map<number, { start: number, end: number }>>}* @private*/this.mediaSequenceStorage_ = new Map();this.logger_ = logger('SyncController');}/*** Get media sequence map by type** @param {string} type - segment loader type* @return {Map<number, { start: number, end: number }> | undefined}*/getMediaSequenceMap(type) {return this.mediaSequenceStorage_.get(type);}/*** Update Media Sequence Map -> <MediaSequence, Range>** @param {Object} playlist - parsed playlist* @param {number} currentTime - current player's time* @param {string} type - segment loader type* @return {void}*/updateMediaSequenceMap(playlist, currentTime, type) {// we should not process this playlist if it does not have mediaSequence or segmentsif (playlist.mediaSequence === undefined || !Array.isArray(playlist.segments) || !playlist.segments.length) {return;}const currentMap = this.getMediaSequenceMap(type);const result = new Map();let currentMediaSequence = playlist.mediaSequence;let currentBaseTime;if (!currentMap) {// first playlist setup:currentBaseTime = 0;} else if (currentMap.has(playlist.mediaSequence)) {// further playlists setup:currentBaseTime = currentMap.get(playlist.mediaSequence).start;} else {// it seems like we have a gap between playlists, use current time as a fallback:this.logger_(`MediaSequence sync for ${type} segment loader - received a gap between playlists.Fallback base time to: ${currentTime}.Received media sequence: ${currentMediaSequence}.Current map: `, currentMap);currentBaseTime = currentTime;}this.logger_(`MediaSequence sync for ${type} segment loader.Received media sequence: ${currentMediaSequence}.base time is ${currentBaseTime}Current map: `, currentMap);playlist.segments.forEach(segment => {const start = currentBaseTime;const end = start + segment.duration;const range = {start,end};result.set(currentMediaSequence, range);currentMediaSequence++;currentBaseTime = end;});this.mediaSequenceStorage_.set(type, result);}/*** Find a sync-point for the playlist specified** A sync-point is defined as a known mapping from display-time to* a segment-index in the current playlist.** @param {Playlist} playlist* The playlist that needs a sync-point* @param {number} duration* Duration of the MediaSource (Infinite if playing a live source)* @param {number} currentTimeline* The last timeline from which a segment was loaded* @param {number} currentTime* Current player's time* @param {string} type* Segment loader type* @return {Object}* A sync-point object*/getSyncPoint(playlist, duration, currentTimeline, currentTime, type) {// Always use VOD sync point for VODif (duration !== Infinity) {const vodSyncPointStrategy = syncPointStrategies.find(({name}) => name === 'VOD');return vodSyncPointStrategy.run(this, playlist, duration);}const syncPoints = this.runStrategies_(playlist, duration, currentTimeline, currentTime, type);if (!syncPoints.length) {// Signal that we need to attempt to get a sync-point manually// by fetching a segment in the playlist and constructing// a sync-point from that informationreturn null;} // If we have exact match just return it instead of finding the nearest distancefor (const syncPointInfo of syncPoints) {const {syncPoint,strategy} = syncPointInfo;const {segmentIndex,time} = syncPoint;if (segmentIndex < 0) {continue;}const selectedSegment = playlist.segments[segmentIndex];const start = time;const end = start + selectedSegment.duration;this.logger_(`Strategy: ${strategy}. Current time: ${currentTime}. selected segment: ${segmentIndex}. Time: [${start} -> ${end}]}`);if (currentTime >= start && currentTime < end) {this.logger_('Found sync point with exact match: ', syncPoint);return syncPoint;}} // Now find the sync-point that is closest to the currentTime because// that should result in the most accurate guess about which segment// to fetchreturn this.selectSyncPoint_(syncPoints, {key: 'time',value: currentTime});}/*** Calculate the amount of time that has expired off the playlist during playback** @param {Playlist} playlist* Playlist object to calculate expired from* @param {number} duration* Duration of the MediaSource (Infinity if playling a live source)* @return {number|null}* The amount of time that has expired off the playlist during playback. Null* if no sync-points for the playlist can be found.*/getExpiredTime(playlist, duration) {if (!playlist || !playlist.segments) {return null;}const syncPoints = this.runStrategies_(playlist, duration, playlist.discontinuitySequence, 0, 'main'); // Without sync-points, there is not enough information to determine the expired timeif (!syncPoints.length) {return null;}const syncPoint = this.selectSyncPoint_(syncPoints, {key: 'segmentIndex',value: 0}); // If the sync-point is beyond the start of the playlist, we want to subtract the// duration from index 0 to syncPoint.segmentIndex instead of adding.if (syncPoint.segmentIndex > 0) {syncPoint.time *= -1;}return Math.abs(syncPoint.time + sumDurations({defaultDuration: playlist.targetDuration,durationList: playlist.segments,startIndex: syncPoint.segmentIndex,endIndex: 0}));}/*** Runs each sync-point strategy and returns a list of sync-points returned by the* strategies** @private* @param {Playlist} playlist* The playlist that needs a sync-point* @param {number} duration* Duration of the MediaSource (Infinity if playing a live source)* @param {number} currentTimeline* The last timeline from which a segment was loaded* @param {number} currentTime* Current player's time* @param {string} type* Segment loader type* @return {Array}* A list of sync-point objects*/runStrategies_(playlist, duration, currentTimeline, currentTime, type) {const syncPoints = []; // Try to find a sync-point in by utilizing various strategies...for (let i = 0; i < syncPointStrategies.length; i++) {const strategy = syncPointStrategies[i];const syncPoint = strategy.run(this, playlist, duration, currentTimeline, currentTime, type);if (syncPoint) {syncPoint.strategy = strategy.name;syncPoints.push({strategy: strategy.name,syncPoint});}}return syncPoints;}/*** Selects the sync-point nearest the specified target** @private* @param {Array} syncPoints* List of sync-points to select from* @param {Object} target* Object specifying the property and value we are targeting* @param {string} target.key* Specifies the property to target. Must be either 'time' or 'segmentIndex'* @param {number} target.value* The value to target for the specified key.* @return {Object}* The sync-point nearest the target*/selectSyncPoint_(syncPoints, target) {let bestSyncPoint = syncPoints[0].syncPoint;let bestDistance = Math.abs(syncPoints[0].syncPoint[target.key] - target.value);let bestStrategy = syncPoints[0].strategy;for (let i = 1; i < syncPoints.length; i++) {const newDistance = Math.abs(syncPoints[i].syncPoint[target.key] - target.value);if (newDistance < bestDistance) {bestDistance = newDistance;bestSyncPoint = syncPoints[i].syncPoint;bestStrategy = syncPoints[i].strategy;}}this.logger_(`syncPoint for [${target.key}: ${target.value}] chosen with strategy` + ` [${bestStrategy}]: [time:${bestSyncPoint.time},` + ` segmentIndex:${bestSyncPoint.segmentIndex}` + (typeof bestSyncPoint.partIndex === 'number' ? `,partIndex:${bestSyncPoint.partIndex}` : '') + ']');return bestSyncPoint;}/*** Save any meta-data present on the segments when segments leave* the live window to the playlist to allow for synchronization at the* playlist level later.** @param {Playlist} oldPlaylist - The previous active playlist* @param {Playlist} newPlaylist - The updated and most current playlist*/saveExpiredSegmentInfo(oldPlaylist, newPlaylist) {const mediaSequenceDiff = newPlaylist.mediaSequence - oldPlaylist.mediaSequence; // Ignore large media sequence gapsif (mediaSequenceDiff > MAX_MEDIA_SEQUENCE_DIFF_FOR_SYNC) {videojs.log.warn(`Not saving expired segment info. Media sequence gap ${mediaSequenceDiff} is too large.`);return;} // When a segment expires from the playlist and it has a start time// save that information as a possible sync-point reference in futurefor (let i = mediaSequenceDiff - 1; i >= 0; i--) {const lastRemovedSegment = oldPlaylist.segments[i];if (lastRemovedSegment && typeof lastRemovedSegment.start !== 'undefined') {newPlaylist.syncInfo = {mediaSequence: oldPlaylist.mediaSequence + i,time: lastRemovedSegment.start};this.logger_(`playlist refresh sync: [time:${newPlaylist.syncInfo.time},` + ` mediaSequence: ${newPlaylist.syncInfo.mediaSequence}]`);this.trigger('syncinfoupdate');break;}}}/*** Save the mapping from playlist's ProgramDateTime to display. This should only happen* before segments start to load.** @param {Playlist} playlist - The currently active playlist*/setDateTimeMappingForStart(playlist) {// It's possible for the playlist to be updated before playback starts, meaning time// zero is not yet set. If, during these playlist refreshes, a discontinuity is// crossed, then the old time zero mapping (for the prior timeline) would be retained// unless the mappings are cleared.this.timelineToDatetimeMappings = {};if (playlist.segments && playlist.segments.length && playlist.segments[0].dateTimeObject) {const firstSegment = playlist.segments[0];const playlistTimestamp = firstSegment.dateTimeObject.getTime() / 1000;this.timelineToDatetimeMappings[firstSegment.timeline] = -playlistTimestamp;}}/*** Calculates and saves timeline mappings, playlist sync info, and segment timing values* based on the latest timing information.** @param {Object} options* Options object* @param {SegmentInfo} options.segmentInfo* The current active request information* @param {boolean} options.shouldSaveTimelineMapping* If there's a timeline change, determines if the timeline mapping should be* saved for timeline mapping and program date time mappings.*/saveSegmentTimingInfo({segmentInfo,shouldSaveTimelineMapping}) {const didCalculateSegmentTimeMapping = this.calculateSegmentTimeMapping_(segmentInfo, segmentInfo.timingInfo, shouldSaveTimelineMapping);const segment = segmentInfo.segment;if (didCalculateSegmentTimeMapping) {this.saveDiscontinuitySyncInfo_(segmentInfo); // If the playlist does not have sync information yet, record that information// now with segment timing informationif (!segmentInfo.playlist.syncInfo) {segmentInfo.playlist.syncInfo = {mediaSequence: segmentInfo.playlist.mediaSequence + segmentInfo.mediaIndex,time: segment.start};}}const dateTime = segment.dateTimeObject;if (segment.discontinuity && shouldSaveTimelineMapping && dateTime) {this.timelineToDatetimeMappings[segment.timeline] = -(dateTime.getTime() / 1000);}}timestampOffsetForTimeline(timeline) {if (typeof this.timelines[timeline] === 'undefined') {return null;}return this.timelines[timeline].time;}mappingForTimeline(timeline) {if (typeof this.timelines[timeline] === 'undefined') {return null;}return this.timelines[timeline].mapping;}/*** Use the "media time" for a segment to generate a mapping to "display time" and* save that display time to the segment.** @private* @param {SegmentInfo} segmentInfo* The current active request information* @param {Object} timingInfo* The start and end time of the current segment in "media time"* @param {boolean} shouldSaveTimelineMapping* If there's a timeline change, determines if the timeline mapping should be* saved in timelines.* @return {boolean}* Returns false if segment time mapping could not be calculated*/calculateSegmentTimeMapping_(segmentInfo, timingInfo, shouldSaveTimelineMapping) {// TODO: remove side effectsconst segment = segmentInfo.segment;const part = segmentInfo.part;let mappingObj = this.timelines[segmentInfo.timeline];let start;let end;if (typeof segmentInfo.timestampOffset === 'number') {mappingObj = {time: segmentInfo.startOfSegment,mapping: segmentInfo.startOfSegment - timingInfo.start};if (shouldSaveTimelineMapping) {this.timelines[segmentInfo.timeline] = mappingObj;this.trigger('timestampoffset');this.logger_(`time mapping for timeline ${segmentInfo.timeline}: ` + `[time: ${mappingObj.time}] [mapping: ${mappingObj.mapping}]`);}start = segmentInfo.startOfSegment;end = timingInfo.end + mappingObj.mapping;} else if (mappingObj) {start = timingInfo.start + mappingObj.mapping;end = timingInfo.end + mappingObj.mapping;} else {return false;}if (part) {part.start = start;part.end = end;} // If we don't have a segment start yet or the start value we got// is less than our current segment.start value, save a new start value.// We have to do this because parts will have segment timing info saved// multiple times and we want segment start to be the earliest part start// value for that segment.if (!segment.start || start < segment.start) {segment.start = start;}segment.end = end;return true;}/*** Each time we have discontinuity in the playlist, attempt to calculate the location* in display of the start of the discontinuity and save that. We also save an accuracy* value so that we save values with the most accuracy (closest to 0.)** @private* @param {SegmentInfo} segmentInfo - The current active request information*/saveDiscontinuitySyncInfo_(segmentInfo) {const playlist = segmentInfo.playlist;const segment = segmentInfo.segment; // If the current segment is a discontinuity then we know exactly where// the start of the range and it's accuracy is 0 (greater accuracy values// mean more approximation)if (segment.discontinuity) {this.discontinuities[segment.timeline] = {time: segment.start,accuracy: 0};} else if (playlist.discontinuityStarts && playlist.discontinuityStarts.length) {// Search for future discontinuities that we can provide better timing// information for and save that information for sync purposesfor (let i = 0; i < playlist.discontinuityStarts.length; i++) {const segmentIndex = playlist.discontinuityStarts[i];const discontinuity = playlist.discontinuitySequence + i + 1;const mediaIndexDiff = segmentIndex - segmentInfo.mediaIndex;const accuracy = Math.abs(mediaIndexDiff);if (!this.discontinuities[discontinuity] || this.discontinuities[discontinuity].accuracy > accuracy) {let time;if (mediaIndexDiff < 0) {time = segment.start - sumDurations({defaultDuration: playlist.targetDuration,durationList: playlist.segments,startIndex: segmentInfo.mediaIndex,endIndex: segmentIndex});} else {time = segment.end + sumDurations({defaultDuration: playlist.targetDuration,durationList: playlist.segments,startIndex: segmentInfo.mediaIndex + 1,endIndex: segmentIndex});}this.discontinuities[discontinuity] = {time,accuracy};}}}}dispose() {this.trigger('dispose');this.off();}}/*** The TimelineChangeController acts as a source for segment loaders to listen for and* keep track of latest and pending timeline changes. This is useful to ensure proper* sync, as each loader may need to make a consideration for what timeline the other* loader is on before making changes which could impact the other loader's media.** @class TimelineChangeController* @extends videojs.EventTarget*/class TimelineChangeController extends videojs.EventTarget {constructor() {super();this.pendingTimelineChanges_ = {};this.lastTimelineChanges_ = {};}clearPendingTimelineChange(type) {this.pendingTimelineChanges_[type] = null;this.trigger('pendingtimelinechange');}pendingTimelineChange({type,from,to}) {if (typeof from === 'number' && typeof to === 'number') {this.pendingTimelineChanges_[type] = {type,from,to};this.trigger('pendingtimelinechange');}return this.pendingTimelineChanges_[type];}lastTimelineChange({type,from,to}) {if (typeof from === 'number' && typeof to === 'number') {this.lastTimelineChanges_[type] = {type,from,to};delete this.pendingTimelineChanges_[type];this.trigger('timelinechange');}return this.lastTimelineChanges_[type];}dispose() {this.trigger('dispose');this.pendingTimelineChanges_ = {};this.lastTimelineChanges_ = {};this.off();}}/* rollup-plugin-worker-factory start for worker!/home/runner/work/http-streaming/http-streaming/src/decrypter-worker.js */const workerCode = transform(getWorkerString(function () {/*** @file stream.js*//*** A lightweight readable stream implemention that handles event dispatching.** @class Stream*/var Stream = /*#__PURE__*/function () {function Stream() {this.listeners = {};}/*** Add a listener for a specified event type.** @param {string} type the event name* @param {Function} listener the callback to be invoked when an event of* the specified type occurs*/var _proto = Stream.prototype;_proto.on = function on(type, listener) {if (!this.listeners[type]) {this.listeners[type] = [];}this.listeners[type].push(listener);}/*** Remove a listener for a specified event type.** @param {string} type the event name* @param {Function} listener a function previously registered for this* type of event through `on`* @return {boolean} if we could turn it off or not*/;_proto.off = function off(type, listener) {if (!this.listeners[type]) {return false;}var index = this.listeners[type].indexOf(listener); // TODO: which is better?// In Video.js we slice listener functions// on trigger so that it does not mess up the order// while we loop through.//// Here we slice on off so that the loop in trigger// can continue using it's old reference to loop without// messing up the order.this.listeners[type] = this.listeners[type].slice(0);this.listeners[type].splice(index, 1);return index > -1;}/*** Trigger an event of the specified type on this stream. Any additional* arguments to this function are passed as parameters to event listeners.** @param {string} type the event name*/;_proto.trigger = function trigger(type) {var callbacks = this.listeners[type];if (!callbacks) {return;} // Slicing the arguments on every invocation of this method// can add a significant amount of overhead. Avoid the// intermediate object creation for the common case of a// single callback argumentif (arguments.length === 2) {var length = callbacks.length;for (var i = 0; i < length; ++i) {callbacks[i].call(this, arguments[1]);}} else {var args = Array.prototype.slice.call(arguments, 1);var _length = callbacks.length;for (var _i = 0; _i < _length; ++_i) {callbacks[_i].apply(this, args);}}}/*** Destroys the stream and cleans up.*/;_proto.dispose = function dispose() {this.listeners = {};}/*** Forwards all `data` events on this stream to the destination stream. The* destination stream should provide a method `push` to receive the data* events as they arrive.** @param {Stream} destination the stream that will receive all `data` events* @see http://nodejs.org/api/stream.html#stream_readable_pipe_destination_options*/;_proto.pipe = function pipe(destination) {this.on('data', function (data) {destination.push(data);});};return Stream;}();/*! @name pkcs7 @version 1.0.4 @license Apache-2.0 *//*** Returns the subarray of a Uint8Array without PKCS#7 padding.** @param padded {Uint8Array} unencrypted bytes that have been padded* @return {Uint8Array} the unpadded bytes* @see http://tools.ietf.org/html/rfc5652*/function unpad(padded) {return padded.subarray(0, padded.byteLength - padded[padded.byteLength - 1]);}/*! @name aes-decrypter @version 4.0.1 @license Apache-2.0 *//*** @file aes.js** This file contains an adaptation of the AES decryption algorithm* from the Standford Javascript Cryptography Library. That work is* covered by the following copyright and permissions notice:** Copyright 2009-2010 Emily Stark, Mike Hamburg, Dan Boneh.* All rights reserved.** Redistribution and use in source and binary forms, with or without* modification, are permitted provided that the following conditions are* met:** 1. Redistributions of source code must retain the above copyright* notice, this list of conditions and the following disclaimer.** 2. Redistributions in binary form must reproduce the above* copyright notice, this list of conditions and the following* disclaimer in the documentation and/or other materials provided* with the distribution.** THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR* IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE* DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> OR CONTRIBUTORS BE* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.** The views and conclusions contained in the software and documentation* are those of the authors and should not be interpreted as representing* official policies, either expressed or implied, of the authors.*//*** Expand the S-box tables.** @private*/const precompute = function () {const tables = [[[], [], [], [], []], [[], [], [], [], []]];const encTable = tables[0];const decTable = tables[1];const sbox = encTable[4];const sboxInv = decTable[4];let i;let x;let xInv;const d = [];const th = [];let x2;let x4;let x8;let s;let tEnc;let tDec; // Compute double and third tablesfor (i = 0; i < 256; i++) {th[(d[i] = i << 1 ^ (i >> 7) * 283) ^ i] = i;}for (x = xInv = 0; !sbox[x]; x ^= x2 || 1, xInv = th[xInv] || 1) {// Compute sboxs = xInv ^ xInv << 1 ^ xInv << 2 ^ xInv << 3 ^ xInv << 4;s = s >> 8 ^ s & 255 ^ 99;sbox[x] = s;sboxInv[s] = x; // Compute MixColumnsx8 = d[x4 = d[x2 = d[x]]];tDec = x8 * 0x1010101 ^ x4 * 0x10001 ^ x2 * 0x101 ^ x * 0x1010100;tEnc = d[s] * 0x101 ^ s * 0x1010100;for (i = 0; i < 4; i++) {encTable[i][x] = tEnc = tEnc << 24 ^ tEnc >>> 8;decTable[i][s] = tDec = tDec << 24 ^ tDec >>> 8;}} // Compactify. Considerable speedup on Firefox.for (i = 0; i < 5; i++) {encTable[i] = encTable[i].slice(0);decTable[i] = decTable[i].slice(0);}return tables;};let aesTables = null;/*** Schedule out an AES key for both encryption and decryption. This* is a low-level class. Use a cipher mode to do bulk encryption.** @class AES* @param key {Array} The key as an array of 4, 6 or 8 words.*/class AES {constructor(key) {/*** The expanded S-box and inverse S-box tables. These will be computed* on the client so that we don't have to send them down the wire.** There are two tables, _tables[0] is for encryption and* _tables[1] is for decryption.** The first 4 sub-tables are the expanded S-box with MixColumns. The* last (_tables[01][4]) is the S-box itself.** @private*/// if we have yet to precompute the S-box tables// do so nowif (!aesTables) {aesTables = precompute();} // then make a copy of that object for usethis._tables = [[aesTables[0][0].slice(), aesTables[0][1].slice(), aesTables[0][2].slice(), aesTables[0][3].slice(), aesTables[0][4].slice()], [aesTables[1][0].slice(), aesTables[1][1].slice(), aesTables[1][2].slice(), aesTables[1][3].slice(), aesTables[1][4].slice()]];let i;let j;let tmp;const sbox = this._tables[0][4];const decTable = this._tables[1];const keyLen = key.length;let rcon = 1;if (keyLen !== 4 && keyLen !== 6 && keyLen !== 8) {throw new Error('Invalid aes key size');}const encKey = key.slice(0);const decKey = [];this._key = [encKey, decKey]; // schedule encryption keysfor (i = keyLen; i < 4 * keyLen + 28; i++) {tmp = encKey[i - 1]; // apply sboxif (i % keyLen === 0 || keyLen === 8 && i % keyLen === 4) {tmp = sbox[tmp >>> 24] << 24 ^ sbox[tmp >> 16 & 255] << 16 ^ sbox[tmp >> 8 & 255] << 8 ^ sbox[tmp & 255]; // shift rows and add rconif (i % keyLen === 0) {tmp = tmp << 8 ^ tmp >>> 24 ^ rcon << 24;rcon = rcon << 1 ^ (rcon >> 7) * 283;}}encKey[i] = encKey[i - keyLen] ^ tmp;} // schedule decryption keysfor (j = 0; i; j++, i--) {tmp = encKey[j & 3 ? i : i - 4];if (i <= 4 || j < 4) {decKey[j] = tmp;} else {decKey[j] = decTable[0][sbox[tmp >>> 24]] ^ decTable[1][sbox[tmp >> 16 & 255]] ^ decTable[2][sbox[tmp >> 8 & 255]] ^ decTable[3][sbox[tmp & 255]];}}}/*** Decrypt 16 bytes, specified as four 32-bit words.** @param {number} encrypted0 the first word to decrypt* @param {number} encrypted1 the second word to decrypt* @param {number} encrypted2 the third word to decrypt* @param {number} encrypted3 the fourth word to decrypt* @param {Int32Array} out the array to write the decrypted words* into* @param {number} offset the offset into the output array to start* writing results* @return {Array} The plaintext.*/decrypt(encrypted0, encrypted1, encrypted2, encrypted3, out, offset) {const key = this._key[1]; // state variables a,b,c,d are loaded with pre-whitened datalet a = encrypted0 ^ key[0];let b = encrypted3 ^ key[1];let c = encrypted2 ^ key[2];let d = encrypted1 ^ key[3];let a2;let b2;let c2; // key.length === 2 ?const nInnerRounds = key.length / 4 - 2;let i;let kIndex = 4;const table = this._tables[1]; // load up the tablesconst table0 = table[0];const table1 = table[1];const table2 = table[2];const table3 = table[3];const sbox = table[4]; // Inner rounds. Cribbed from OpenSSL.for (i = 0; i < nInnerRounds; i++) {a2 = table0[a >>> 24] ^ table1[b >> 16 & 255] ^ table2[c >> 8 & 255] ^ table3[d & 255] ^ key[kIndex];b2 = table0[b >>> 24] ^ table1[c >> 16 & 255] ^ table2[d >> 8 & 255] ^ table3[a & 255] ^ key[kIndex + 1];c2 = table0[c >>> 24] ^ table1[d >> 16 & 255] ^ table2[a >> 8 & 255] ^ table3[b & 255] ^ key[kIndex + 2];d = table0[d >>> 24] ^ table1[a >> 16 & 255] ^ table2[b >> 8 & 255] ^ table3[c & 255] ^ key[kIndex + 3];kIndex += 4;a = a2;b = b2;c = c2;} // Last round.for (i = 0; i < 4; i++) {out[(3 & -i) + offset] = sbox[a >>> 24] << 24 ^ sbox[b >> 16 & 255] << 16 ^ sbox[c >> 8 & 255] << 8 ^ sbox[d & 255] ^ key[kIndex++];a2 = a;a = b;b = c;c = d;d = a2;}}}/*** @file async-stream.js*//*** A wrapper around the Stream class to use setTimeout* and run stream "jobs" Asynchronously** @class AsyncStream* @extends Stream*/class AsyncStream extends Stream {constructor() {super(Stream);this.jobs = [];this.delay = 1;this.timeout_ = null;}/*** process an async job** @private*/processJob_() {this.jobs.shift()();if (this.jobs.length) {this.timeout_ = setTimeout(this.processJob_.bind(this), this.delay);} else {this.timeout_ = null;}}/*** push a job into the stream** @param {Function} job the job to push into the stream*/push(job) {this.jobs.push(job);if (!this.timeout_) {this.timeout_ = setTimeout(this.processJob_.bind(this), this.delay);}}}/*** @file decrypter.js** An asynchronous implementation of AES-128 CBC decryption with* PKCS#7 padding.*//*** Convert network-order (big-endian) bytes into their little-endian* representation.*/const ntoh = function (word) {return word << 24 | (word & 0xff00) << 8 | (word & 0xff0000) >> 8 | word >>> 24;};/*** Decrypt bytes using AES-128 with CBC and PKCS#7 padding.** @param {Uint8Array} encrypted the encrypted bytes* @param {Uint32Array} key the bytes of the decryption key* @param {Uint32Array} initVector the initialization vector (IV) to* use for the first round of CBC.* @return {Uint8Array} the decrypted bytes** @see http://en.wikipedia.org/wiki/Advanced_Encryption_Standard* @see http://en.wikipedia.org/wiki/Block_cipher_mode_of_operation#Cipher_Block_Chaining_.28CBC.29* @see https://tools.ietf.org/html/rfc2315*/const decrypt = function (encrypted, key, initVector) {// word-level access to the encrypted bytesconst encrypted32 = new Int32Array(encrypted.buffer, encrypted.byteOffset, encrypted.byteLength >> 2);const decipher = new AES(Array.prototype.slice.call(key)); // byte and word-level access for the decrypted outputconst decrypted = new Uint8Array(encrypted.byteLength);const decrypted32 = new Int32Array(decrypted.buffer); // temporary variables for working with the IV, encrypted, and// decrypted datalet init0;let init1;let init2;let init3;let encrypted0;let encrypted1;let encrypted2;let encrypted3; // iteration variablelet wordIx; // pull out the words of the IV to ensure we don't modify the// passed-in reference and easier accessinit0 = initVector[0];init1 = initVector[1];init2 = initVector[2];init3 = initVector[3]; // decrypt four word sequences, applying cipher-block chaining (CBC)// to each decrypted blockfor (wordIx = 0; wordIx < encrypted32.length; wordIx += 4) {// convert big-endian (network order) words into little-endian// (javascript order)encrypted0 = ntoh(encrypted32[wordIx]);encrypted1 = ntoh(encrypted32[wordIx + 1]);encrypted2 = ntoh(encrypted32[wordIx + 2]);encrypted3 = ntoh(encrypted32[wordIx + 3]); // decrypt the blockdecipher.decrypt(encrypted0, encrypted1, encrypted2, encrypted3, decrypted32, wordIx); // XOR with the IV, and restore network byte-order to obtain the// plaintextdecrypted32[wordIx] = ntoh(decrypted32[wordIx] ^ init0);decrypted32[wordIx + 1] = ntoh(decrypted32[wordIx + 1] ^ init1);decrypted32[wordIx + 2] = ntoh(decrypted32[wordIx + 2] ^ init2);decrypted32[wordIx + 3] = ntoh(decrypted32[wordIx + 3] ^ init3); // setup the IV for the next roundinit0 = encrypted0;init1 = encrypted1;init2 = encrypted2;init3 = encrypted3;}return decrypted;};/*** The `Decrypter` class that manages decryption of AES* data through `AsyncStream` objects and the `decrypt`* function** @param {Uint8Array} encrypted the encrypted bytes* @param {Uint32Array} key the bytes of the decryption key* @param {Uint32Array} initVector the initialization vector (IV) to* @param {Function} done the function to run when done* @class Decrypter*/class Decrypter {constructor(encrypted, key, initVector, done) {const step = Decrypter.STEP;const encrypted32 = new Int32Array(encrypted.buffer);const decrypted = new Uint8Array(encrypted.byteLength);let i = 0;this.asyncStream_ = new AsyncStream(); // split up the encryption job and do the individual chunks asynchronouslythis.asyncStream_.push(this.decryptChunk_(encrypted32.subarray(i, i + step), key, initVector, decrypted));for (i = step; i < encrypted32.length; i += step) {initVector = new Uint32Array([ntoh(encrypted32[i - 4]), ntoh(encrypted32[i - 3]), ntoh(encrypted32[i - 2]), ntoh(encrypted32[i - 1])]);this.asyncStream_.push(this.decryptChunk_(encrypted32.subarray(i, i + step), key, initVector, decrypted));} // invoke the done() callback when everything is finishedthis.asyncStream_.push(function () {// remove pkcs#7 padding from the decrypted bytesdone(null, unpad(decrypted));});}/*** a getter for step the maximum number of bytes to process at one time** @return {number} the value of step 32000*/static get STEP() {// 4 * 8000;return 32000;}/*** @private*/decryptChunk_(encrypted, key, initVector, decrypted) {return function () {const bytes = decrypt(encrypted, key, initVector);decrypted.set(bytes, encrypted.byteOffset);};}}var commonjsGlobal = typeof globalThis !== 'undefined' ? globalThis : typeof window !== 'undefined' ? window : typeof global !== 'undefined' ? global : typeof self !== 'undefined' ? self : {};var win;if (typeof window !== "undefined") {win = window;} else if (typeof commonjsGlobal !== "undefined") {win = commonjsGlobal;} else if (typeof self !== "undefined") {win = self;} else {win = {};}var window_1 = win;var isArrayBufferView = function isArrayBufferView(obj) {if (ArrayBuffer.isView === 'function') {return ArrayBuffer.isView(obj);}return obj && obj.buffer instanceof ArrayBuffer;};var BigInt = window_1.BigInt || Number;[BigInt('0x1'), BigInt('0x100'), BigInt('0x10000'), BigInt('0x1000000'), BigInt('0x100000000'), BigInt('0x10000000000'), BigInt('0x1000000000000'), BigInt('0x100000000000000'), BigInt('0x10000000000000000')];(function () {var a = new Uint16Array([0xFFCC]);var b = new Uint8Array(a.buffer, a.byteOffset, a.byteLength);if (b[0] === 0xFF) {return 'big';}if (b[0] === 0xCC) {return 'little';}return 'unknown';})();/*** Creates an object for sending to a web worker modifying properties that are TypedArrays* into a new object with seperated properties for the buffer, byteOffset, and byteLength.** @param {Object} message* Object of properties and values to send to the web worker* @return {Object}* Modified message with TypedArray values expanded* @function createTransferableMessage*/const createTransferableMessage = function (message) {const transferable = {};Object.keys(message).forEach(key => {const value = message[key];if (isArrayBufferView(value)) {transferable[key] = {bytes: value.buffer,byteOffset: value.byteOffset,byteLength: value.byteLength};} else {transferable[key] = value;}});return transferable;};/* global self *//*** Our web worker interface so that things can talk to aes-decrypter* that will be running in a web worker. the scope is passed to this by* webworkify.*/self.onmessage = function (event) {const data = event.data;const encrypted = new Uint8Array(data.encrypted.bytes, data.encrypted.byteOffset, data.encrypted.byteLength);const key = new Uint32Array(data.key.bytes, data.key.byteOffset, data.key.byteLength / 4);const iv = new Uint32Array(data.iv.bytes, data.iv.byteOffset, data.iv.byteLength / 4);/* eslint-disable no-new, handle-callback-err */new Decrypter(encrypted, key, iv, function (err, bytes) {self.postMessage(createTransferableMessage({source: data.source,decrypted: bytes}), [bytes.buffer]);});/* eslint-enable */};}));var Decrypter = factory(workerCode);/* rollup-plugin-worker-factory end for worker!/home/runner/work/http-streaming/http-streaming/src/decrypter-worker.js *//*** Convert the properties of an HLS track into an audioTrackKind.** @private*/const audioTrackKind_ = properties => {let kind = properties.default ? 'main' : 'alternative';if (properties.characteristics && properties.characteristics.indexOf('public.accessibility.describes-video') >= 0) {kind = 'main-desc';}return kind;};/*** Pause provided segment loader and playlist loader if active** @param {SegmentLoader} segmentLoader* SegmentLoader to pause* @param {Object} mediaType* Active media type* @function stopLoaders*/const stopLoaders = (segmentLoader, mediaType) => {segmentLoader.abort();segmentLoader.pause();if (mediaType && mediaType.activePlaylistLoader) {mediaType.activePlaylistLoader.pause();mediaType.activePlaylistLoader = null;}};/*** Start loading provided segment loader and playlist loader** @param {PlaylistLoader} playlistLoader* PlaylistLoader to start loading* @param {Object} mediaType* Active media type* @function startLoaders*/const startLoaders = (playlistLoader, mediaType) => {// Segment loader will be started after `loadedmetadata` or `loadedplaylist` from the// playlist loadermediaType.activePlaylistLoader = playlistLoader;playlistLoader.load();};/*** Returns a function to be called when the media group changes. It performs a* non-destructive (preserve the buffer) resync of the SegmentLoader. This is because a* change of group is merely a rendition switch of the same content at another encoding,* rather than a change of content, such as switching audio from English to Spanish.** @param {string} type* MediaGroup type* @param {Object} settings* Object containing required information for media groups* @return {Function}* Handler for a non-destructive resync of SegmentLoader when the active media* group changes.* @function onGroupChanged*/const onGroupChanged = (type, settings) => () => {const {segmentLoaders: {[type]: segmentLoader,main: mainSegmentLoader},mediaTypes: {[type]: mediaType}} = settings;const activeTrack = mediaType.activeTrack();const activeGroup = mediaType.getActiveGroup();const previousActiveLoader = mediaType.activePlaylistLoader;const lastGroup = mediaType.lastGroup_; // the group did not change do nothingif (activeGroup && lastGroup && activeGroup.id === lastGroup.id) {return;}mediaType.lastGroup_ = activeGroup;mediaType.lastTrack_ = activeTrack;stopLoaders(segmentLoader, mediaType);if (!activeGroup || activeGroup.isMainPlaylist) {// there is no group active or active group is a main playlist and won't changereturn;}if (!activeGroup.playlistLoader) {if (previousActiveLoader) {// The previous group had a playlist loader but the new active group does not// this means we are switching from demuxed to muxed audio. In this case we want to// do a destructive reset of the main segment loader and not restart the audio// loaders.mainSegmentLoader.resetEverything();}return;} // Non-destructive resyncsegmentLoader.resyncLoader();startLoaders(activeGroup.playlistLoader, mediaType);};const onGroupChanging = (type, settings) => () => {const {segmentLoaders: {[type]: segmentLoader},mediaTypes: {[type]: mediaType}} = settings;mediaType.lastGroup_ = null;segmentLoader.abort();segmentLoader.pause();};/*** Returns a function to be called when the media track changes. It performs a* destructive reset of the SegmentLoader to ensure we start loading as close to* currentTime as possible.** @param {string} type* MediaGroup type* @param {Object} settings* Object containing required information for media groups* @return {Function}* Handler for a destructive reset of SegmentLoader when the active media* track changes.* @function onTrackChanged*/const onTrackChanged = (type, settings) => () => {const {mainPlaylistLoader,segmentLoaders: {[type]: segmentLoader,main: mainSegmentLoader},mediaTypes: {[type]: mediaType}} = settings;const activeTrack = mediaType.activeTrack();const activeGroup = mediaType.getActiveGroup();const previousActiveLoader = mediaType.activePlaylistLoader;const lastTrack = mediaType.lastTrack_; // track did not change, do nothingif (lastTrack && activeTrack && lastTrack.id === activeTrack.id) {return;}mediaType.lastGroup_ = activeGroup;mediaType.lastTrack_ = activeTrack;stopLoaders(segmentLoader, mediaType);if (!activeGroup) {// there is no group active so we do not want to restart loadersreturn;}if (activeGroup.isMainPlaylist) {// track did not change, do nothingif (!activeTrack || !lastTrack || activeTrack.id === lastTrack.id) {return;}const pc = settings.vhs.playlistController_;const newPlaylist = pc.selectPlaylist(); // media will not change do nothingif (pc.media() === newPlaylist) {return;}mediaType.logger_(`track change. Switching main audio from ${lastTrack.id} to ${activeTrack.id}`);mainPlaylistLoader.pause();mainSegmentLoader.resetEverything();pc.fastQualityChange_(newPlaylist);return;}if (type === 'AUDIO') {if (!activeGroup.playlistLoader) {// when switching from demuxed audio/video to muxed audio/video (noted by no// playlist loader for the audio group), we want to do a destructive reset of the// main segment loader and not restart the audio loadersmainSegmentLoader.setAudio(true); // don't have to worry about disabling the audio of the audio segment loader since// it should be stoppedmainSegmentLoader.resetEverything();return;} // although the segment loader is an audio segment loader, call the setAudio// function to ensure it is prepared to re-append the init segment (or handle other// config changes)segmentLoader.setAudio(true);mainSegmentLoader.setAudio(false);}if (previousActiveLoader === activeGroup.playlistLoader) {// Nothing has actually changed. This can happen because track change events can fire// multiple times for a "single" change. One for enabling the new active track, and// one for disabling the track that was activestartLoaders(activeGroup.playlistLoader, mediaType);return;}if (segmentLoader.track) {// For WebVTT, set the new text track in the segmentloadersegmentLoader.track(activeTrack);} // destructive resetsegmentLoader.resetEverything();startLoaders(activeGroup.playlistLoader, mediaType);};const onError = {/*** Returns a function to be called when a SegmentLoader or PlaylistLoader encounters* an error.** @param {string} type* MediaGroup type* @param {Object} settings* Object containing required information for media groups* @return {Function}* Error handler. Logs warning (or error if the playlist is excluded) to* console and switches back to default audio track.* @function onError.AUDIO*/AUDIO: (type, settings) => () => {const {mediaTypes: {[type]: mediaType},excludePlaylist} = settings; // switch back to default audio trackconst activeTrack = mediaType.activeTrack();const activeGroup = mediaType.activeGroup();const id = (activeGroup.filter(group => group.default)[0] || activeGroup[0]).id;const defaultTrack = mediaType.tracks[id];if (activeTrack === defaultTrack) {// Default track encountered an error. All we can do now is exclude the current// rendition and hope another will switch audio groupsexcludePlaylist({error: {message: 'Problem encountered loading the default audio track.'}});return;}videojs.log.warn('Problem encountered loading the alternate audio track.' + 'Switching back to default.');for (const trackId in mediaType.tracks) {mediaType.tracks[trackId].enabled = mediaType.tracks[trackId] === defaultTrack;}mediaType.onTrackChanged();},/*** Returns a function to be called when a SegmentLoader or PlaylistLoader encounters* an error.** @param {string} type* MediaGroup type* @param {Object} settings* Object containing required information for media groups* @return {Function}* Error handler. Logs warning to console and disables the active subtitle track* @function onError.SUBTITLES*/SUBTITLES: (type, settings) => () => {const {mediaTypes: {[type]: mediaType}} = settings;videojs.log.warn('Problem encountered loading the subtitle track.' + 'Disabling subtitle track.');const track = mediaType.activeTrack();if (track) {track.mode = 'disabled';}mediaType.onTrackChanged();}};const setupListeners = {/*** Setup event listeners for audio playlist loader** @param {string} type* MediaGroup type* @param {PlaylistLoader|null} playlistLoader* PlaylistLoader to register listeners on* @param {Object} settings* Object containing required information for media groups* @function setupListeners.AUDIO*/AUDIO: (type, playlistLoader, settings) => {if (!playlistLoader) {// no playlist loader means audio will be muxed with the videoreturn;}const {tech,requestOptions,segmentLoaders: {[type]: segmentLoader}} = settings;playlistLoader.on('loadedmetadata', () => {const media = playlistLoader.media();segmentLoader.playlist(media, requestOptions); // if the video is already playing, or if this isn't a live video and preload// permits, start downloading segmentsif (!tech.paused() || media.endList && tech.preload() !== 'none') {segmentLoader.load();}});playlistLoader.on('loadedplaylist', () => {segmentLoader.playlist(playlistLoader.media(), requestOptions); // If the player isn't paused, ensure that the segment loader is runningif (!tech.paused()) {segmentLoader.load();}});playlistLoader.on('error', onError[type](type, settings));},/*** Setup event listeners for subtitle playlist loader** @param {string} type* MediaGroup type* @param {PlaylistLoader|null} playlistLoader* PlaylistLoader to register listeners on* @param {Object} settings* Object containing required information for media groups* @function setupListeners.SUBTITLES*/SUBTITLES: (type, playlistLoader, settings) => {const {tech,requestOptions,segmentLoaders: {[type]: segmentLoader},mediaTypes: {[type]: mediaType}} = settings;playlistLoader.on('loadedmetadata', () => {const media = playlistLoader.media();segmentLoader.playlist(media, requestOptions);segmentLoader.track(mediaType.activeTrack()); // if the video is already playing, or if this isn't a live video and preload// permits, start downloading segmentsif (!tech.paused() || media.endList && tech.preload() !== 'none') {segmentLoader.load();}});playlistLoader.on('loadedplaylist', () => {segmentLoader.playlist(playlistLoader.media(), requestOptions); // If the player isn't paused, ensure that the segment loader is runningif (!tech.paused()) {segmentLoader.load();}});playlistLoader.on('error', onError[type](type, settings));}};const initialize = {/*** Setup PlaylistLoaders and AudioTracks for the audio groups** @param {string} type* MediaGroup type* @param {Object} settings* Object containing required information for media groups* @function initialize.AUDIO*/'AUDIO': (type, settings) => {const {vhs,sourceType,segmentLoaders: {[type]: segmentLoader},requestOptions,main: {mediaGroups},mediaTypes: {[type]: {groups,tracks,logger_}},mainPlaylistLoader} = settings;const audioOnlyMain = isAudioOnly(mainPlaylistLoader.main); // force a default if we have noneif (!mediaGroups[type] || Object.keys(mediaGroups[type]).length === 0) {mediaGroups[type] = {main: {default: {default: true}}};if (audioOnlyMain) {mediaGroups[type].main.default.playlists = mainPlaylistLoader.main.playlists;}}for (const groupId in mediaGroups[type]) {if (!groups[groupId]) {groups[groupId] = [];}for (const variantLabel in mediaGroups[type][groupId]) {let properties = mediaGroups[type][groupId][variantLabel];let playlistLoader;if (audioOnlyMain) {logger_(`AUDIO group '${groupId}' label '${variantLabel}' is a main playlist`);properties.isMainPlaylist = true;playlistLoader = null; // if vhs-json was provided as the source, and the media playlist was resolved,// use the resolved media playlist object} else if (sourceType === 'vhs-json' && properties.playlists) {playlistLoader = new PlaylistLoader(properties.playlists[0], vhs, requestOptions);} else if (properties.resolvedUri) {playlistLoader = new PlaylistLoader(properties.resolvedUri, vhs, requestOptions); // TODO: dash isn't the only type with properties.playlists// should we even have properties.playlists in this check.} else if (properties.playlists && sourceType === 'dash') {playlistLoader = new DashPlaylistLoader(properties.playlists[0], vhs, requestOptions, mainPlaylistLoader);} else {// no resolvedUri means the audio is muxed with the video when using this// audio trackplaylistLoader = null;}properties = merge({id: variantLabel,playlistLoader}, properties);setupListeners[type](type, properties.playlistLoader, settings);groups[groupId].push(properties);if (typeof tracks[variantLabel] === 'undefined') {const track = new videojs.AudioTrack({id: variantLabel,kind: audioTrackKind_(properties),enabled: false,language: properties.language,default: properties.default,label: variantLabel});tracks[variantLabel] = track;}}} // setup single error event handler for the segment loadersegmentLoader.on('error', onError[type](type, settings));},/*** Setup PlaylistLoaders and TextTracks for the subtitle groups** @param {string} type* MediaGroup type* @param {Object} settings* Object containing required information for media groups* @function initialize.SUBTITLES*/'SUBTITLES': (type, settings) => {const {tech,vhs,sourceType,segmentLoaders: {[type]: segmentLoader},requestOptions,main: {mediaGroups},mediaTypes: {[type]: {groups,tracks}},mainPlaylistLoader} = settings;for (const groupId in mediaGroups[type]) {if (!groups[groupId]) {groups[groupId] = [];}for (const variantLabel in mediaGroups[type][groupId]) {if (!vhs.options_.useForcedSubtitles && mediaGroups[type][groupId][variantLabel].forced) {// Subtitle playlists with the forced attribute are not selectable in Safari.// According to Apple's HLS Authoring Specification:// If content has forced subtitles and regular subtitles in a given language,// the regular subtitles track in that language MUST contain both the forced// subtitles and the regular subtitles for that language.// Because of this requirement and that Safari does not add forced subtitles,// forced subtitles are skipped here to maintain consistent experience across// all platformscontinue;}let properties = mediaGroups[type][groupId][variantLabel];let playlistLoader;if (sourceType === 'hls') {playlistLoader = new PlaylistLoader(properties.resolvedUri, vhs, requestOptions);} else if (sourceType === 'dash') {const playlists = properties.playlists.filter(p => p.excludeUntil !== Infinity);if (!playlists.length) {return;}playlistLoader = new DashPlaylistLoader(properties.playlists[0], vhs, requestOptions, mainPlaylistLoader);} else if (sourceType === 'vhs-json') {playlistLoader = new PlaylistLoader(// if the vhs-json object included the media playlist, use the media playlist// as provided, otherwise use the resolved URI to load the playlistproperties.playlists ? properties.playlists[0] : properties.resolvedUri, vhs, requestOptions);}properties = merge({id: variantLabel,playlistLoader}, properties);setupListeners[type](type, properties.playlistLoader, settings);groups[groupId].push(properties);if (typeof tracks[variantLabel] === 'undefined') {const track = tech.addRemoteTextTrack({id: variantLabel,kind: 'subtitles',default: properties.default && properties.autoselect,language: properties.language,label: variantLabel}, false).track;tracks[variantLabel] = track;}}} // setup single error event handler for the segment loadersegmentLoader.on('error', onError[type](type, settings));},/*** Setup TextTracks for the closed-caption groups** @param {String} type* MediaGroup type* @param {Object} settings* Object containing required information for media groups* @function initialize['CLOSED-CAPTIONS']*/'CLOSED-CAPTIONS': (type, settings) => {const {tech,main: {mediaGroups},mediaTypes: {[type]: {groups,tracks}}} = settings;for (const groupId in mediaGroups[type]) {if (!groups[groupId]) {groups[groupId] = [];}for (const variantLabel in mediaGroups[type][groupId]) {const properties = mediaGroups[type][groupId][variantLabel]; // Look for either 608 (CCn) or 708 (SERVICEn) caption servicesif (!/^(?:CC|SERVICE)/.test(properties.instreamId)) {continue;}const captionServices = tech.options_.vhs && tech.options_.vhs.captionServices || {};let newProps = {label: variantLabel,language: properties.language,instreamId: properties.instreamId,default: properties.default && properties.autoselect};if (captionServices[newProps.instreamId]) {newProps = merge(newProps, captionServices[newProps.instreamId]);}if (newProps.default === undefined) {delete newProps.default;} // No PlaylistLoader is required for Closed-Captions because the captions are// embedded within the video streamgroups[groupId].push(merge({id: variantLabel}, properties));if (typeof tracks[variantLabel] === 'undefined') {const track = tech.addRemoteTextTrack({id: newProps.instreamId,kind: 'captions',default: newProps.default,language: newProps.language,label: newProps.label}, false).track;tracks[variantLabel] = track;}}}}};const groupMatch = (list, media) => {for (let i = 0; i < list.length; i++) {if (playlistMatch(media, list[i])) {return true;}if (list[i].playlists && groupMatch(list[i].playlists, media)) {return true;}}return false;};/*** Returns a function used to get the active group of the provided type** @param {string} type* MediaGroup type* @param {Object} settings* Object containing required information for media groups* @return {Function}* Function that returns the active media group for the provided type. Takes an* optional parameter {TextTrack} track. If no track is provided, a list of all* variants in the group, otherwise the variant corresponding to the provided* track is returned.* @function activeGroup*/const activeGroup = (type, settings) => track => {const {mainPlaylistLoader,mediaTypes: {[type]: {groups}}} = settings;const media = mainPlaylistLoader.media();if (!media) {return null;}let variants = null; // set to variants to main media active groupif (media.attributes[type]) {variants = groups[media.attributes[type]];}const groupKeys = Object.keys(groups);if (!variants) {// find the mainPlaylistLoader media// that is in a media group if we are dealing// with audio onlyif (type === 'AUDIO' && groupKeys.length > 1 && isAudioOnly(settings.main)) {for (let i = 0; i < groupKeys.length; i++) {const groupPropertyList = groups[groupKeys[i]];if (groupMatch(groupPropertyList, media)) {variants = groupPropertyList;break;}} // use the main group if it exists} else if (groups.main) {variants = groups.main; // only one group, use that one} else if (groupKeys.length === 1) {variants = groups[groupKeys[0]];}}if (typeof track === 'undefined') {return variants;}if (track === null || !variants) {// An active track was specified so a corresponding group is expected. track === null// means no track is currently active so there is no corresponding groupreturn null;}return variants.filter(props => props.id === track.id)[0] || null;};const activeTrack = {/*** Returns a function used to get the active track of type provided** @param {string} type* MediaGroup type* @param {Object} settings* Object containing required information for media groups* @return {Function}* Function that returns the active media track for the provided type. Returns* null if no track is active* @function activeTrack.AUDIO*/AUDIO: (type, settings) => () => {const {mediaTypes: {[type]: {tracks}}} = settings;for (const id in tracks) {if (tracks[id].enabled) {return tracks[id];}}return null;},/*** Returns a function used to get the active track of type provided** @param {string} type* MediaGroup type* @param {Object} settings* Object containing required information for media groups* @return {Function}* Function that returns the active media track for the provided type. Returns* null if no track is active* @function activeTrack.SUBTITLES*/SUBTITLES: (type, settings) => () => {const {mediaTypes: {[type]: {tracks}}} = settings;for (const id in tracks) {if (tracks[id].mode === 'showing' || tracks[id].mode === 'hidden') {return tracks[id];}}return null;}};const getActiveGroup = (type, {mediaTypes}) => () => {const activeTrack_ = mediaTypes[type].activeTrack();if (!activeTrack_) {return null;}return mediaTypes[type].activeGroup(activeTrack_);};/*** Setup PlaylistLoaders and Tracks for media groups (Audio, Subtitles,* Closed-Captions) specified in the main manifest.** @param {Object} settings* Object containing required information for setting up the media groups* @param {Tech} settings.tech* The tech of the player* @param {Object} settings.requestOptions* XHR request options used by the segment loaders* @param {PlaylistLoader} settings.mainPlaylistLoader* PlaylistLoader for the main source* @param {VhsHandler} settings.vhs* VHS SourceHandler* @param {Object} settings.main* The parsed main manifest* @param {Object} settings.mediaTypes* Object to store the loaders, tracks, and utility methods for each media type* @param {Function} settings.excludePlaylist* Excludes the current rendition and forces a rendition switch.* @function setupMediaGroups*/const setupMediaGroups = settings => {['AUDIO', 'SUBTITLES', 'CLOSED-CAPTIONS'].forEach(type => {initialize[type](type, settings);});const {mediaTypes,mainPlaylistLoader,tech,vhs,segmentLoaders: {['AUDIO']: audioSegmentLoader,main: mainSegmentLoader}} = settings; // setup active group and track getters and change event handlers['AUDIO', 'SUBTITLES'].forEach(type => {mediaTypes[type].activeGroup = activeGroup(type, settings);mediaTypes[type].activeTrack = activeTrack[type](type, settings);mediaTypes[type].onGroupChanged = onGroupChanged(type, settings);mediaTypes[type].onGroupChanging = onGroupChanging(type, settings);mediaTypes[type].onTrackChanged = onTrackChanged(type, settings);mediaTypes[type].getActiveGroup = getActiveGroup(type, settings);}); // DO NOT enable the default subtitle or caption track.// DO enable the default audio trackconst audioGroup = mediaTypes.AUDIO.activeGroup();if (audioGroup) {const groupId = (audioGroup.filter(group => group.default)[0] || audioGroup[0]).id;mediaTypes.AUDIO.tracks[groupId].enabled = true;mediaTypes.AUDIO.onGroupChanged();mediaTypes.AUDIO.onTrackChanged();const activeAudioGroup = mediaTypes.AUDIO.getActiveGroup(); // a similar check for handling setAudio on each loader is run again each time the// track is changed, but needs to be handled here since the track may not be considered// changed on the first call to onTrackChangedif (!activeAudioGroup.playlistLoader) {// either audio is muxed with video or the stream is audio onlymainSegmentLoader.setAudio(true);} else {// audio is demuxedmainSegmentLoader.setAudio(false);audioSegmentLoader.setAudio(true);}}mainPlaylistLoader.on('mediachange', () => {['AUDIO', 'SUBTITLES'].forEach(type => mediaTypes[type].onGroupChanged());});mainPlaylistLoader.on('mediachanging', () => {['AUDIO', 'SUBTITLES'].forEach(type => mediaTypes[type].onGroupChanging());}); // custom audio track change event handler for usage eventconst onAudioTrackChanged = () => {mediaTypes.AUDIO.onTrackChanged();tech.trigger({type: 'usage',name: 'vhs-audio-change'});};tech.audioTracks().addEventListener('change', onAudioTrackChanged);tech.remoteTextTracks().addEventListener('change', mediaTypes.SUBTITLES.onTrackChanged);vhs.on('dispose', () => {tech.audioTracks().removeEventListener('change', onAudioTrackChanged);tech.remoteTextTracks().removeEventListener('change', mediaTypes.SUBTITLES.onTrackChanged);}); // clear existing audio tracks and add the ones we just createdtech.clearTracks('audio');for (const id in mediaTypes.AUDIO.tracks) {tech.audioTracks().addTrack(mediaTypes.AUDIO.tracks[id]);}};/*** Creates skeleton object used to store the loaders, tracks, and utility methods for each* media type** @return {Object}* Object to store the loaders, tracks, and utility methods for each media type* @function createMediaTypes*/const createMediaTypes = () => {const mediaTypes = {};['AUDIO', 'SUBTITLES', 'CLOSED-CAPTIONS'].forEach(type => {mediaTypes[type] = {groups: {},tracks: {},activePlaylistLoader: null,activeGroup: noop,activeTrack: noop,getActiveGroup: noop,onGroupChanged: noop,onTrackChanged: noop,lastTrack_: null,logger_: logger(`MediaGroups[${type}]`)};});return mediaTypes;};/*** A utility class for setting properties and maintaining the state of the content steering manifest.** Content Steering manifest format:* VERSION: number (required) currently only version 1 is supported.* TTL: number in seconds (optional) until the next content steering manifest reload.* RELOAD-URI: string (optional) uri to fetch the next content steering manifest.* SERVICE-LOCATION-PRIORITY or PATHWAY-PRIORITY a non empty array of unique string values.* PATHWAY-CLONES: array (optional) (HLS only) pathway clone objects to copy from other playlists.*/class SteeringManifest {constructor() {this.priority_ = [];this.pathwayClones_ = new Map();}set version(number) {// Only version 1 is currently supported for both DASH and HLS.if (number === 1) {this.version_ = number;}}set ttl(seconds) {// TTL = time-to-live, default = 300 seconds.this.ttl_ = seconds || 300;}set reloadUri(uri) {if (uri) {// reload URI can be relative to the previous reloadUri.this.reloadUri_ = resolveUrl(this.reloadUri_, uri);}}set priority(array) {// priority must be non-empty and unique values.if (array && array.length) {this.priority_ = array;}}set pathwayClones(array) {// pathwayClones must be non-empty.if (array && array.length) {this.pathwayClones_ = new Map(array.map(clone => [clone.ID, clone]));}}get version() {return this.version_;}get ttl() {return this.ttl_;}get reloadUri() {return this.reloadUri_;}get priority() {return this.priority_;}get pathwayClones() {return this.pathwayClones_;}}/*** This class represents a content steering manifest and associated state. See both HLS and DASH specifications.* HLS: https://developer.apple.com/streaming/HLSContentSteeringSpecification.pdf and* https://datatracker.ietf.org/doc/draft-pantos-hls-rfc8216bis/ section 4.4.6.6.* DASH: https://dashif.org/docs/DASH-IF-CTS-00XX-Content-Steering-Community-Review.pdf** @param {function} xhr for making a network request from the browser.* @param {function} bandwidth for fetching the current bandwidth from the main segment loader.*/class ContentSteeringController extends videojs.EventTarget {constructor(xhr, bandwidth) {super();this.currentPathway = null;this.defaultPathway = null;this.queryBeforeStart = false;this.availablePathways_ = new Set();this.steeringManifest = new SteeringManifest();this.proxyServerUrl_ = null;this.manifestType_ = null;this.ttlTimeout_ = null;this.request_ = null;this.currentPathwayClones = new Map();this.nextPathwayClones = new Map();this.excludedSteeringManifestURLs = new Set();this.logger_ = logger('Content Steering');this.xhr_ = xhr;this.getBandwidth_ = bandwidth;}/*** Assigns the content steering tag properties to the steering controller** @param {string} baseUrl the baseURL from the main manifest for resolving the steering manifest url* @param {Object} steeringTag the content steering tag from the main manifest*/assignTagProperties(baseUrl, steeringTag) {this.manifestType_ = steeringTag.serverUri ? 'HLS' : 'DASH'; // serverUri is HLS serverURL is DASHconst steeringUri = steeringTag.serverUri || steeringTag.serverURL;if (!steeringUri) {this.logger_(`steering manifest URL is ${steeringUri}, cannot request steering manifest.`);this.trigger('error');return;} // Content steering manifests can be encoded as a data URI. We can decode, parse and return early if that's the case.if (steeringUri.startsWith('data:')) {this.decodeDataUriManifest_(steeringUri.substring(steeringUri.indexOf(',') + 1));return;} // reloadUri is the resolution of the main manifest URL and steering URL.this.steeringManifest.reloadUri = resolveUrl(baseUrl, steeringUri); // pathwayId is HLS defaultServiceLocation is DASHthis.defaultPathway = steeringTag.pathwayId || steeringTag.defaultServiceLocation; // currently only DASH supports the following properties on <ContentSteering> tags.this.queryBeforeStart = steeringTag.queryBeforeStart;this.proxyServerUrl_ = steeringTag.proxyServerURL; // trigger a steering event if we have a pathway from the content steering tag.// this tells VHS which segment pathway to start with.// If queryBeforeStart is true we need to wait for the steering manifest response.if (this.defaultPathway && !this.queryBeforeStart) {this.trigger('content-steering');}}/*** Requests the content steering manifest and parse the response. This should only be called after* assignTagProperties was called with a content steering tag.** @param {string} initialUri The optional uri to make the request with.* If set, the request should be made with exactly what is passed in this variable.* This scenario should only happen once on initalization.*/requestSteeringManifest(initial) {const reloadUri = this.steeringManifest.reloadUri;if (!reloadUri) {return;} // We currently don't support passing MPD query parameters directly to the content steering URL as this requires// ExtUrlQueryInfo tag support. See the DASH content steering spec section 8.1.// This request URI accounts for manifest URIs that have been excluded.const uri = initial ? reloadUri : this.getRequestURI(reloadUri); // If there are no valid manifest URIs, we should stop content steering.if (!uri) {this.logger_('No valid content steering manifest URIs. Stopping content steering.');this.trigger('error');this.dispose();return;}this.request_ = this.xhr_({uri}, (error, errorInfo) => {if (error) {// If the client receives HTTP 410 Gone in response to a manifest request,// it MUST NOT issue another request for that URI for the remainder of the// playback session. It MAY continue to use the most-recently obtained set// of Pathways.if (errorInfo.status === 410) {this.logger_(`manifest request 410 ${error}.`);this.logger_(`There will be no more content steering requests to ${uri} this session.`);this.excludedSteeringManifestURLs.add(uri);return;} // If the client receives HTTP 429 Too Many Requests with a Retry-After// header in response to a manifest request, it SHOULD wait until the time// specified by the Retry-After header to reissue the request.if (errorInfo.status === 429) {const retrySeconds = errorInfo.responseHeaders['retry-after'];this.logger_(`manifest request 429 ${error}.`);this.logger_(`content steering will retry in ${retrySeconds} seconds.`);this.startTTLTimeout_(parseInt(retrySeconds, 10));return;} // If the Steering Manifest cannot be loaded and parsed correctly, the// client SHOULD continue to use the previous values and attempt to reload// it after waiting for the previously-specified TTL (or 5 minutes if// none).this.logger_(`manifest failed to load ${error}.`);this.startTTLTimeout_();return;}const steeringManifestJson = JSON.parse(this.request_.responseText);this.assignSteeringProperties_(steeringManifestJson);this.startTTLTimeout_();});}/*** Set the proxy server URL and add the steering manifest url as a URI encoded parameter.** @param {string} steeringUrl the steering manifest url* @return the steering manifest url to a proxy server with all parameters set*/setProxyServerUrl_(steeringUrl) {const steeringUrlObject = new window.URL(steeringUrl);const proxyServerUrlObject = new window.URL(this.proxyServerUrl_);proxyServerUrlObject.searchParams.set('url', encodeURI(steeringUrlObject.toString()));return this.setSteeringParams_(proxyServerUrlObject.toString());}/*** Decodes and parses the data uri encoded steering manifest** @param {string} dataUri the data uri to be decoded and parsed.*/decodeDataUriManifest_(dataUri) {const steeringManifestJson = JSON.parse(window.atob(dataUri));this.assignSteeringProperties_(steeringManifestJson);}/*** Set the HLS or DASH content steering manifest request query parameters. For example:* _HLS_pathway="<CURRENT-PATHWAY-ID>" and _HLS_throughput=<THROUGHPUT>* _DASH_pathway and _DASH_throughput** @param {string} uri to add content steering server parameters to.* @return a new uri as a string with the added steering query parameters.*/setSteeringParams_(url) {const urlObject = new window.URL(url);const path = this.getPathway();const networkThroughput = this.getBandwidth_();if (path) {const pathwayKey = `_${this.manifestType_}_pathway`;urlObject.searchParams.set(pathwayKey, path);}if (networkThroughput) {const throughputKey = `_${this.manifestType_}_throughput`;urlObject.searchParams.set(throughputKey, networkThroughput);}return urlObject.toString();}/*** Assigns the current steering manifest properties and to the SteeringManifest object** @param {Object} steeringJson the raw JSON steering manifest*/assignSteeringProperties_(steeringJson) {this.steeringManifest.version = steeringJson.VERSION;if (!this.steeringManifest.version) {this.logger_(`manifest version is ${steeringJson.VERSION}, which is not supported.`);this.trigger('error');return;}this.steeringManifest.ttl = steeringJson.TTL;this.steeringManifest.reloadUri = steeringJson['RELOAD-URI']; // HLS = PATHWAY-PRIORITY required. DASH = SERVICE-LOCATION-PRIORITY optionalthis.steeringManifest.priority = steeringJson['PATHWAY-PRIORITY'] || steeringJson['SERVICE-LOCATION-PRIORITY']; // Pathway clones to be created/updated in HLS.// See section 7.2 https://datatracker.ietf.org/doc/draft-pantos-hls-rfc8216bis/this.steeringManifest.pathwayClones = steeringJson['PATHWAY-CLONES'];this.nextPathwayClones = this.steeringManifest.pathwayClones; // 1. apply first pathway from the array.// 2. if first pathway doesn't exist in manifest, try next pathway.// a. if all pathways are exhausted, ignore the steering manifest priority.// 3. if segments fail from an established pathway, try all variants/renditions, then exclude the failed pathway.// a. exclude a pathway for a minimum of the last TTL duration. Meaning, from the next steering response,// the excluded pathway will be ignored.// See excludePathway usage in excludePlaylist().// If there are no available pathways, we need to stop content steering.if (!this.availablePathways_.size) {this.logger_('There are no available pathways for content steering. Ending content steering.');this.trigger('error');this.dispose();}const chooseNextPathway = pathwaysByPriority => {for (const path of pathwaysByPriority) {if (this.availablePathways_.has(path)) {return path;}} // If no pathway matches, ignore the manifest and choose the first available.return [...this.availablePathways_][0];};const nextPathway = chooseNextPathway(this.steeringManifest.priority);if (this.currentPathway !== nextPathway) {this.currentPathway = nextPathway;this.trigger('content-steering');}}/*** Returns the pathway to use for steering decisions** @return {string} returns the current pathway or the default*/getPathway() {return this.currentPathway || this.defaultPathway;}/*** Chooses the manifest request URI based on proxy URIs and server URLs.* Also accounts for exclusion on certain manifest URIs.** @param {string} reloadUri the base uri before parameters** @return {string} the final URI for the request to the manifest server.*/getRequestURI(reloadUri) {if (!reloadUri) {return null;}const isExcluded = uri => this.excludedSteeringManifestURLs.has(uri);if (this.proxyServerUrl_) {const proxyURI = this.setProxyServerUrl_(reloadUri);if (!isExcluded(proxyURI)) {return proxyURI;}}const steeringURI = this.setSteeringParams_(reloadUri);if (!isExcluded(steeringURI)) {return steeringURI;} // Return nothing if all valid manifest URIs are excluded.return null;}/*** Start the timeout for re-requesting the steering manifest at the TTL interval.** @param {number} ttl time in seconds of the timeout. Defaults to the* ttl interval in the steering manifest*/startTTLTimeout_(ttl = this.steeringManifest.ttl) {// 300 (5 minutes) is the default value.const ttlMS = ttl * 1000;this.ttlTimeout_ = window.setTimeout(() => {this.requestSteeringManifest();}, ttlMS);}/*** Clear the TTL timeout if necessary.*/clearTTLTimeout_() {window.clearTimeout(this.ttlTimeout_);this.ttlTimeout_ = null;}/*** aborts any current steering xhr and sets the current request object to null*/abort() {if (this.request_) {this.request_.abort();}this.request_ = null;}/*** aborts steering requests clears the ttl timeout and resets all properties.*/dispose() {this.off('content-steering');this.off('error');this.abort();this.clearTTLTimeout_();this.currentPathway = null;this.defaultPathway = null;this.queryBeforeStart = null;this.proxyServerUrl_ = null;this.manifestType_ = null;this.ttlTimeout_ = null;this.request_ = null;this.excludedSteeringManifestURLs = new Set();this.availablePathways_ = new Set();this.steeringManifest = new SteeringManifest();}/*** adds a pathway to the available pathways set** @param {string} pathway the pathway string to add*/addAvailablePathway(pathway) {if (pathway) {this.availablePathways_.add(pathway);}}/*** Clears all pathways from the available pathways set*/clearAvailablePathways() {this.availablePathways_.clear();}/*** Removes a pathway from the available pathways set.*/excludePathway(pathway) {return this.availablePathways_.delete(pathway);}/*** Checks the refreshed DASH manifest content steering tag for changes.** @param {string} baseURL new steering tag on DASH manifest refresh* @param {Object} newTag the new tag to check for changes* @return a true or false whether the new tag has different values*/didDASHTagChange(baseURL, newTag) {return !newTag && this.steeringManifest.reloadUri || newTag && (resolveUrl(baseURL, newTag.serverURL) !== this.steeringManifest.reloadUri || newTag.defaultServiceLocation !== this.defaultPathway || newTag.queryBeforeStart !== this.queryBeforeStart || newTag.proxyServerURL !== this.proxyServerUrl_);}getAvailablePathways() {return this.availablePathways_;}}/*** @file playlist-controller.js*/const ABORT_EARLY_EXCLUSION_SECONDS = 10;let Vhs$1; // SegmentLoader stats that need to have each loader's// values summed to calculate the final valueconst loaderStats = ['mediaRequests', 'mediaRequestsAborted', 'mediaRequestsTimedout', 'mediaRequestsErrored', 'mediaTransferDuration', 'mediaBytesTransferred', 'mediaAppends'];const sumLoaderStat = function (stat) {return this.audioSegmentLoader_[stat] + this.mainSegmentLoader_[stat];};const shouldSwitchToMedia = function ({currentPlaylist,buffered,currentTime,nextPlaylist,bufferLowWaterLine,bufferHighWaterLine,duration,bufferBasedABR,log}) {// we have no other playlist to switch toif (!nextPlaylist) {videojs.log.warn('We received no playlist to switch to. Please check your stream.');return false;}const sharedLogLine = `allowing switch ${currentPlaylist && currentPlaylist.id || 'null'} -> ${nextPlaylist.id}`;if (!currentPlaylist) {log(`${sharedLogLine} as current playlist is not set`);return true;} // no need to switch if playlist is the sameif (nextPlaylist.id === currentPlaylist.id) {return false;} // determine if current time is in a buffered range.const isBuffered = Boolean(findRange(buffered, currentTime).length); // If the playlist is live, then we want to not take low water line into account.// This is because in LIVE, the player plays 3 segments from the end of the// playlist, and if `BUFFER_LOW_WATER_LINE` is greater than the duration availble// in those segments, a viewer will never experience a rendition upswitch.if (!currentPlaylist.endList) {// For LLHLS live streams, don't switch renditions before playback has started, as it almost// doubles the time to first playback.if (!isBuffered && typeof currentPlaylist.partTargetDuration === 'number') {log(`not ${sharedLogLine} as current playlist is live llhls, but currentTime isn't in buffered.`);return false;}log(`${sharedLogLine} as current playlist is live`);return true;}const forwardBuffer = timeAheadOf(buffered, currentTime);const maxBufferLowWaterLine = bufferBasedABR ? Config.EXPERIMENTAL_MAX_BUFFER_LOW_WATER_LINE : Config.MAX_BUFFER_LOW_WATER_LINE; // For the same reason as LIVE, we ignore the low water line when the VOD// duration is below the max potential low water lineif (duration < maxBufferLowWaterLine) {log(`${sharedLogLine} as duration < max low water line (${duration} < ${maxBufferLowWaterLine})`);return true;}const nextBandwidth = nextPlaylist.attributes.BANDWIDTH;const currBandwidth = currentPlaylist.attributes.BANDWIDTH; // when switching down, if our buffer is lower than the high water line,// we can switch downif (nextBandwidth < currBandwidth && (!bufferBasedABR || forwardBuffer < bufferHighWaterLine)) {let logLine = `${sharedLogLine} as next bandwidth < current bandwidth (${nextBandwidth} < ${currBandwidth})`;if (bufferBasedABR) {logLine += ` and forwardBuffer < bufferHighWaterLine (${forwardBuffer} < ${bufferHighWaterLine})`;}log(logLine);return true;} // and if our buffer is higher than the low water line,// we can switch upif ((!bufferBasedABR || nextBandwidth > currBandwidth) && forwardBuffer >= bufferLowWaterLine) {let logLine = `${sharedLogLine} as forwardBuffer >= bufferLowWaterLine (${forwardBuffer} >= ${bufferLowWaterLine})`;if (bufferBasedABR) {logLine += ` and next bandwidth > current bandwidth (${nextBandwidth} > ${currBandwidth})`;}log(logLine);return true;}log(`not ${sharedLogLine} as no switching criteria met`);return false;};/*** the main playlist controller controller all interactons* between playlists and segmentloaders. At this time this mainly* involves a main playlist and a series of audio playlists* if they are available** @class PlaylistController* @extends videojs.EventTarget*/class PlaylistController extends videojs.EventTarget {constructor(options) {super();const {src,withCredentials,tech,bandwidth,externVhs,useCueTags,playlistExclusionDuration,enableLowInitialPlaylist,sourceType,cacheEncryptionKeys,bufferBasedABR,leastPixelDiffSelector,captionServices} = options;if (!src) {throw new Error('A non-empty playlist URL or JSON manifest string is required');}let {maxPlaylistRetries} = options;if (maxPlaylistRetries === null || typeof maxPlaylistRetries === 'undefined') {maxPlaylistRetries = Infinity;}Vhs$1 = externVhs;this.bufferBasedABR = Boolean(bufferBasedABR);this.leastPixelDiffSelector = Boolean(leastPixelDiffSelector);this.withCredentials = withCredentials;this.tech_ = tech;this.vhs_ = tech.vhs;this.sourceType_ = sourceType;this.useCueTags_ = useCueTags;this.playlistExclusionDuration = playlistExclusionDuration;this.maxPlaylistRetries = maxPlaylistRetries;this.enableLowInitialPlaylist = enableLowInitialPlaylist;if (this.useCueTags_) {this.cueTagsTrack_ = this.tech_.addTextTrack('metadata', 'ad-cues');this.cueTagsTrack_.inBandMetadataTrackDispatchType = '';}this.requestOptions_ = {withCredentials,maxPlaylistRetries,timeout: null};this.on('error', this.pauseLoading);this.mediaTypes_ = createMediaTypes();this.mediaSource = new window.MediaSource();this.handleDurationChange_ = this.handleDurationChange_.bind(this);this.handleSourceOpen_ = this.handleSourceOpen_.bind(this);this.handleSourceEnded_ = this.handleSourceEnded_.bind(this);this.mediaSource.addEventListener('durationchange', this.handleDurationChange_); // load the media source into the playerthis.mediaSource.addEventListener('sourceopen', this.handleSourceOpen_);this.mediaSource.addEventListener('sourceended', this.handleSourceEnded_); // we don't have to handle sourceclose since dispose will handle termination of// everything, and the MediaSource should not be detached without a proper disposalthis.seekable_ = createTimeRanges();this.hasPlayed_ = false;this.syncController_ = new SyncController(options);this.segmentMetadataTrack_ = tech.addRemoteTextTrack({kind: 'metadata',label: 'segment-metadata'}, false).track;this.decrypter_ = new Decrypter();this.sourceUpdater_ = new SourceUpdater(this.mediaSource);this.inbandTextTracks_ = {};this.timelineChangeController_ = new TimelineChangeController();this.keyStatusMap_ = new Map();const segmentLoaderSettings = {vhs: this.vhs_,parse708captions: options.parse708captions,useDtsForTimestampOffset: options.useDtsForTimestampOffset,captionServices,mediaSource: this.mediaSource,currentTime: this.tech_.currentTime.bind(this.tech_),seekable: () => this.seekable(),seeking: () => this.tech_.seeking(),duration: () => this.duration(),hasPlayed: () => this.hasPlayed_,goalBufferLength: () => this.goalBufferLength(),bandwidth,syncController: this.syncController_,decrypter: this.decrypter_,sourceType: this.sourceType_,inbandTextTracks: this.inbandTextTracks_,cacheEncryptionKeys,sourceUpdater: this.sourceUpdater_,timelineChangeController: this.timelineChangeController_,exactManifestTimings: options.exactManifestTimings,addMetadataToTextTrack: this.addMetadataToTextTrack.bind(this)}; // The source type check not only determines whether a special DASH playlist loader// should be used, but also covers the case where the provided src is a vhs-json// manifest object (instead of a URL). In the case of vhs-json, the default// PlaylistLoader should be used.this.mainPlaylistLoader_ = this.sourceType_ === 'dash' ? new DashPlaylistLoader(src, this.vhs_, merge(this.requestOptions_, {addMetadataToTextTrack: this.addMetadataToTextTrack.bind(this)})) : new PlaylistLoader(src, this.vhs_, merge(this.requestOptions_, {addDateRangesToTextTrack: this.addDateRangesToTextTrack_.bind(this)}));this.setupMainPlaylistLoaderListeners_(); // setup segment loaders// combined audio/video or just video when alternate audio track is selectedthis.mainSegmentLoader_ = new SegmentLoader(merge(segmentLoaderSettings, {segmentMetadataTrack: this.segmentMetadataTrack_,loaderType: 'main'}), options); // alternate audio trackthis.audioSegmentLoader_ = new SegmentLoader(merge(segmentLoaderSettings, {loaderType: 'audio'}), options);this.subtitleSegmentLoader_ = new VTTSegmentLoader(merge(segmentLoaderSettings, {loaderType: 'vtt',featuresNativeTextTracks: this.tech_.featuresNativeTextTracks,loadVttJs: () => new Promise((resolve, reject) => {function onLoad() {tech.off('vttjserror', onError);resolve();}function onError() {tech.off('vttjsloaded', onLoad);reject();}tech.one('vttjsloaded', onLoad);tech.one('vttjserror', onError); // safe to call multiple times, script will be loaded only once:tech.addWebVttScript_();})}), options);const getBandwidth = () => {return this.mainSegmentLoader_.bandwidth;};this.contentSteeringController_ = new ContentSteeringController(this.vhs_.xhr, getBandwidth);this.setupSegmentLoaderListeners_();if (this.bufferBasedABR) {this.mainPlaylistLoader_.one('loadedplaylist', () => this.startABRTimer_());this.tech_.on('pause', () => this.stopABRTimer_());this.tech_.on('play', () => this.startABRTimer_());} // Create SegmentLoader stat-getters// mediaRequests_// mediaRequestsAborted_// mediaRequestsTimedout_// mediaRequestsErrored_// mediaTransferDuration_// mediaBytesTransferred_// mediaAppends_loaderStats.forEach(stat => {this[stat + '_'] = sumLoaderStat.bind(this, stat);});this.logger_ = logger('pc');this.triggeredFmp4Usage = false;if (this.tech_.preload() === 'none') {this.loadOnPlay_ = () => {this.loadOnPlay_ = null;this.mainPlaylistLoader_.load();};this.tech_.one('play', this.loadOnPlay_);} else {this.mainPlaylistLoader_.load();}this.timeToLoadedData__ = -1;this.mainAppendsToLoadedData__ = -1;this.audioAppendsToLoadedData__ = -1;const event = this.tech_.preload() === 'none' ? 'play' : 'loadstart'; // start the first frame timer on loadstart or play (for preload none)this.tech_.one(event, () => {const timeToLoadedDataStart = Date.now();this.tech_.one('loadeddata', () => {this.timeToLoadedData__ = Date.now() - timeToLoadedDataStart;this.mainAppendsToLoadedData__ = this.mainSegmentLoader_.mediaAppends;this.audioAppendsToLoadedData__ = this.audioSegmentLoader_.mediaAppends;});});}mainAppendsToLoadedData_() {return this.mainAppendsToLoadedData__;}audioAppendsToLoadedData_() {return this.audioAppendsToLoadedData__;}appendsToLoadedData_() {const main = this.mainAppendsToLoadedData_();const audio = this.audioAppendsToLoadedData_();if (main === -1 || audio === -1) {return -1;}return main + audio;}timeToLoadedData_() {return this.timeToLoadedData__;}/*** Run selectPlaylist and switch to the new playlist if we should** @param {string} [reason=abr] a reason for why the ABR check is made* @private*/checkABR_(reason = 'abr') {const nextPlaylist = this.selectPlaylist();if (nextPlaylist && this.shouldSwitchToMedia_(nextPlaylist)) {this.switchMedia_(nextPlaylist, reason);}}switchMedia_(playlist, cause, delay) {const oldMedia = this.media();const oldId = oldMedia && (oldMedia.id || oldMedia.uri);const newId = playlist && (playlist.id || playlist.uri);if (oldId && oldId !== newId) {this.logger_(`switch media ${oldId} -> ${newId} from ${cause}`);this.tech_.trigger({type: 'usage',name: `vhs-rendition-change-${cause}`});}this.mainPlaylistLoader_.media(playlist, delay);}/*** A function that ensures we switch our playlists inside of `mediaTypes`* to match the current `serviceLocation` provided by the contentSteering controller.* We want to check media types of `AUDIO`, `SUBTITLES`, and `CLOSED-CAPTIONS`.** This should only be called on a DASH playback scenario while using content steering.* This is necessary due to differences in how media in HLS manifests are generally tied to* a video playlist, where in DASH that is not always the case.*/switchMediaForDASHContentSteering_() {['AUDIO', 'SUBTITLES', 'CLOSED-CAPTIONS'].forEach(type => {const mediaType = this.mediaTypes_[type];const activeGroup = mediaType ? mediaType.activeGroup() : null;const pathway = this.contentSteeringController_.getPathway();if (activeGroup && pathway) {// activeGroup can be an array or a single groupconst mediaPlaylists = activeGroup.length ? activeGroup[0].playlists : activeGroup.playlists;const dashMediaPlaylists = mediaPlaylists.filter(p => p.attributes.serviceLocation === pathway); // Switch the current active playlist to the correct CDNif (dashMediaPlaylists.length) {this.mediaTypes_[type].activePlaylistLoader.media(dashMediaPlaylists[0]);}}});}/*** Start a timer that periodically calls checkABR_** @private*/startABRTimer_() {this.stopABRTimer_();this.abrTimer_ = window.setInterval(() => this.checkABR_(), 250);}/*** Stop the timer that periodically calls checkABR_** @private*/stopABRTimer_() {// if we're scrubbing, we don't need to pause.// This getter will be added to Video.js in version 7.11.if (this.tech_.scrubbing && this.tech_.scrubbing()) {return;}window.clearInterval(this.abrTimer_);this.abrTimer_ = null;}/*** Get a list of playlists for the currently selected audio playlist** @return {Array} the array of audio playlists*/getAudioTrackPlaylists_() {const main = this.main();const defaultPlaylists = main && main.playlists || []; // if we don't have any audio groups then we can only// assume that the audio tracks are contained in main// playlist array, use that or an empty array.if (!main || !main.mediaGroups || !main.mediaGroups.AUDIO) {return defaultPlaylists;}const AUDIO = main.mediaGroups.AUDIO;const groupKeys = Object.keys(AUDIO);let track; // get the current active trackif (Object.keys(this.mediaTypes_.AUDIO.groups).length) {track = this.mediaTypes_.AUDIO.activeTrack(); // or get the default track from main if mediaTypes_ isn't setup yet} else {// default group is `main` or just the first group.const defaultGroup = AUDIO.main || groupKeys.length && AUDIO[groupKeys[0]];for (const label in defaultGroup) {if (defaultGroup[label].default) {track = {label};break;}}} // no active track no playlists.if (!track) {return defaultPlaylists;}const playlists = []; // get all of the playlists that are possible for the// active track.for (const group in AUDIO) {if (AUDIO[group][track.label]) {const properties = AUDIO[group][track.label];if (properties.playlists && properties.playlists.length) {playlists.push.apply(playlists, properties.playlists);} else if (properties.uri) {playlists.push(properties);} else if (main.playlists.length) {// if an audio group does not have a uri// see if we have main playlists that use it as a group.// if we do then add those to the playlists list.for (let i = 0; i < main.playlists.length; i++) {const playlist = main.playlists[i];if (playlist.attributes && playlist.attributes.AUDIO && playlist.attributes.AUDIO === group) {playlists.push(playlist);}}}}}if (!playlists.length) {return defaultPlaylists;}return playlists;}/*** Register event handlers on the main playlist loader. A helper* function for construction time.** @private*/setupMainPlaylistLoaderListeners_() {this.mainPlaylistLoader_.on('loadedmetadata', () => {const media = this.mainPlaylistLoader_.media();const requestTimeout = media.targetDuration * 1.5 * 1000; // If we don't have any more available playlists, we don't want to// timeout the request.if (isLowestEnabledRendition(this.mainPlaylistLoader_.main, this.mainPlaylistLoader_.media())) {this.requestOptions_.timeout = 0;} else {this.requestOptions_.timeout = requestTimeout;} // if this isn't a live video and preload permits, start// downloading segmentsif (media.endList && this.tech_.preload() !== 'none') {this.mainSegmentLoader_.playlist(media, this.requestOptions_);this.mainSegmentLoader_.load();}setupMediaGroups({sourceType: this.sourceType_,segmentLoaders: {AUDIO: this.audioSegmentLoader_,SUBTITLES: this.subtitleSegmentLoader_,main: this.mainSegmentLoader_},tech: this.tech_,requestOptions: this.requestOptions_,mainPlaylistLoader: this.mainPlaylistLoader_,vhs: this.vhs_,main: this.main(),mediaTypes: this.mediaTypes_,excludePlaylist: this.excludePlaylist.bind(this)});this.triggerPresenceUsage_(this.main(), media);this.setupFirstPlay();if (!this.mediaTypes_.AUDIO.activePlaylistLoader || this.mediaTypes_.AUDIO.activePlaylistLoader.media()) {this.trigger('selectedinitialmedia');} else {// We must wait for the active audio playlist loader to// finish setting up before triggering this event so the// representations API and EME setup is correctthis.mediaTypes_.AUDIO.activePlaylistLoader.one('loadedmetadata', () => {this.trigger('selectedinitialmedia');});}});this.mainPlaylistLoader_.on('loadedplaylist', () => {if (this.loadOnPlay_) {this.tech_.off('play', this.loadOnPlay_);}let updatedPlaylist = this.mainPlaylistLoader_.media();if (!updatedPlaylist) {// Add content steering listeners on first load and init.this.attachContentSteeringListeners_();this.initContentSteeringController_(); // exclude any variants that are not supported by the browser before selecting// an initial media as the playlist selectors do not consider browser supportthis.excludeUnsupportedVariants_();let selectedMedia;if (this.enableLowInitialPlaylist) {selectedMedia = this.selectInitialPlaylist();}if (!selectedMedia) {selectedMedia = this.selectPlaylist();}if (!selectedMedia || !this.shouldSwitchToMedia_(selectedMedia)) {return;}this.initialMedia_ = selectedMedia;this.switchMedia_(this.initialMedia_, 'initial'); // Under the standard case where a source URL is provided, loadedplaylist will// fire again since the playlist will be requested. In the case of vhs-json// (where the manifest object is provided as the source), when the media// playlist's `segments` list is already available, a media playlist won't be// requested, and loadedplaylist won't fire again, so the playlist handler must be// called on its own here.const haveJsonSource = this.sourceType_ === 'vhs-json' && this.initialMedia_.segments;if (!haveJsonSource) {return;}updatedPlaylist = this.initialMedia_;}this.handleUpdatedMediaPlaylist(updatedPlaylist);});this.mainPlaylistLoader_.on('error', () => {const error = this.mainPlaylistLoader_.error;this.excludePlaylist({playlistToExclude: error.playlist,error});});this.mainPlaylistLoader_.on('mediachanging', () => {this.mainSegmentLoader_.abort();this.mainSegmentLoader_.pause();});this.mainPlaylistLoader_.on('mediachange', () => {const media = this.mainPlaylistLoader_.media();const requestTimeout = media.targetDuration * 1.5 * 1000; // If we don't have any more available playlists, we don't want to// timeout the request.if (isLowestEnabledRendition(this.mainPlaylistLoader_.main, this.mainPlaylistLoader_.media())) {this.requestOptions_.timeout = 0;} else {this.requestOptions_.timeout = requestTimeout;}if (this.sourceType_ === 'dash') {// we don't want to re-request the same hls playlist right after it was changedthis.mainPlaylistLoader_.load();} // TODO: Create a new event on the PlaylistLoader that signals// that the segments have changed in some way and use that to// update the SegmentLoader instead of doing it twice here and// on `loadedplaylist`this.mainSegmentLoader_.pause();this.mainSegmentLoader_.playlist(media, this.requestOptions_);if (this.waitingForFastQualityPlaylistReceived_) {this.runFastQualitySwitch_();} else {this.mainSegmentLoader_.load();}this.tech_.trigger({type: 'mediachange',bubbles: true});});this.mainPlaylistLoader_.on('playlistunchanged', () => {const updatedPlaylist = this.mainPlaylistLoader_.media(); // ignore unchanged playlists that have already been// excluded for not-changing. We likely just have a really slowly updating// playlist.if (updatedPlaylist.lastExcludeReason_ === 'playlist-unchanged') {return;}const playlistOutdated = this.stuckAtPlaylistEnd_(updatedPlaylist);if (playlistOutdated) {// Playlist has stopped updating and we're stuck at its end. Try to// exclude it and switch to another playlist in the hope that that// one is updating (and give the player a chance to re-adjust to the// safe live point).this.excludePlaylist({error: {message: 'Playlist no longer updating.',reason: 'playlist-unchanged'}}); // useful for monitoring QoSthis.tech_.trigger('playliststuck');}});this.mainPlaylistLoader_.on('renditiondisabled', () => {this.tech_.trigger({type: 'usage',name: 'vhs-rendition-disabled'});});this.mainPlaylistLoader_.on('renditionenabled', () => {this.tech_.trigger({type: 'usage',name: 'vhs-rendition-enabled'});});}/*** Given an updated media playlist (whether it was loaded for the first time, or* refreshed for live playlists), update any relevant properties and state to reflect* changes in the media that should be accounted for (e.g., cues and duration).** @param {Object} updatedPlaylist the updated media playlist object** @private*/handleUpdatedMediaPlaylist(updatedPlaylist) {if (this.useCueTags_) {this.updateAdCues_(updatedPlaylist);} // TODO: Create a new event on the PlaylistLoader that signals// that the segments have changed in some way and use that to// update the SegmentLoader instead of doing it twice here and// on `mediachange`this.mainSegmentLoader_.pause();this.mainSegmentLoader_.playlist(updatedPlaylist, this.requestOptions_);if (this.waitingForFastQualityPlaylistReceived_) {this.runFastQualitySwitch_();}this.updateDuration(!updatedPlaylist.endList); // If the player isn't paused, ensure that the segment loader is running,// as it is possible that it was temporarily stopped while waiting for// a playlist (e.g., in case the playlist errored and we re-requested it).if (!this.tech_.paused()) {this.mainSegmentLoader_.load();if (this.audioSegmentLoader_) {this.audioSegmentLoader_.load();}}}/*** A helper function for triggerring presence usage events once per source** @private*/triggerPresenceUsage_(main, media) {const mediaGroups = main.mediaGroups || {};let defaultDemuxed = true;const audioGroupKeys = Object.keys(mediaGroups.AUDIO);for (const mediaGroup in mediaGroups.AUDIO) {for (const label in mediaGroups.AUDIO[mediaGroup]) {const properties = mediaGroups.AUDIO[mediaGroup][label];if (!properties.uri) {defaultDemuxed = false;}}}if (defaultDemuxed) {this.tech_.trigger({type: 'usage',name: 'vhs-demuxed'});}if (Object.keys(mediaGroups.SUBTITLES).length) {this.tech_.trigger({type: 'usage',name: 'vhs-webvtt'});}if (Vhs$1.Playlist.isAes(media)) {this.tech_.trigger({type: 'usage',name: 'vhs-aes'});}if (audioGroupKeys.length && Object.keys(mediaGroups.AUDIO[audioGroupKeys[0]]).length > 1) {this.tech_.trigger({type: 'usage',name: 'vhs-alternate-audio'});}if (this.useCueTags_) {this.tech_.trigger({type: 'usage',name: 'vhs-playlist-cue-tags'});}}shouldSwitchToMedia_(nextPlaylist) {const currentPlaylist = this.mainPlaylistLoader_.media() || this.mainPlaylistLoader_.pendingMedia_;const currentTime = this.tech_.currentTime();const bufferLowWaterLine = this.bufferLowWaterLine();const bufferHighWaterLine = this.bufferHighWaterLine();const buffered = this.tech_.buffered();return shouldSwitchToMedia({buffered,currentTime,currentPlaylist,nextPlaylist,bufferLowWaterLine,bufferHighWaterLine,duration: this.duration(),bufferBasedABR: this.bufferBasedABR,log: this.logger_});}/*** Register event handlers on the segment loaders. A helper function* for construction time.** @private*/setupSegmentLoaderListeners_() {this.mainSegmentLoader_.on('bandwidthupdate', () => {// Whether or not buffer based ABR or another ABR is used, on a bandwidth change it's// useful to check to see if a rendition switch should be made.this.checkABR_('bandwidthupdate');this.tech_.trigger('bandwidthupdate');});this.mainSegmentLoader_.on('timeout', () => {if (this.bufferBasedABR) {// If a rendition change is needed, then it would've be done on `bandwidthupdate`.// Here the only consideration is that for buffer based ABR there's no guarantee// of an immediate switch (since the bandwidth is averaged with a timeout// bandwidth value of 1), so force a load on the segment loader to keep it going.this.mainSegmentLoader_.load();}}); // `progress` events are not reliable enough of a bandwidth measure to trigger buffer// based ABR.if (!this.bufferBasedABR) {this.mainSegmentLoader_.on('progress', () => {this.trigger('progress');});}this.mainSegmentLoader_.on('error', () => {const error = this.mainSegmentLoader_.error();this.excludePlaylist({playlistToExclude: error.playlist,error});});this.mainSegmentLoader_.on('appenderror', () => {this.error = this.mainSegmentLoader_.error_;this.trigger('error');});this.mainSegmentLoader_.on('syncinfoupdate', () => {this.onSyncInfoUpdate_();});this.mainSegmentLoader_.on('timestampoffset', () => {this.tech_.trigger({type: 'usage',name: 'vhs-timestamp-offset'});});this.audioSegmentLoader_.on('syncinfoupdate', () => {this.onSyncInfoUpdate_();});this.audioSegmentLoader_.on('appenderror', () => {this.error = this.audioSegmentLoader_.error_;this.trigger('error');});this.mainSegmentLoader_.on('ended', () => {this.logger_('main segment loader ended');this.onEndOfStream();});this.mainSegmentLoader_.on('earlyabort', event => {// never try to early abort with the new ABR algorithmif (this.bufferBasedABR) {return;}this.delegateLoaders_('all', ['abort']);this.excludePlaylist({error: {message: 'Aborted early because there isn\'t enough bandwidth to complete ' + 'the request without rebuffering.'},playlistExclusionDuration: ABORT_EARLY_EXCLUSION_SECONDS});});const updateCodecs = () => {if (!this.sourceUpdater_.hasCreatedSourceBuffers()) {return this.tryToCreateSourceBuffers_();}const codecs = this.getCodecsOrExclude_(); // no codecs means that the playlist was excludedif (!codecs) {return;}this.sourceUpdater_.addOrChangeSourceBuffers(codecs);};this.mainSegmentLoader_.on('trackinfo', updateCodecs);this.audioSegmentLoader_.on('trackinfo', updateCodecs);this.mainSegmentLoader_.on('fmp4', () => {if (!this.triggeredFmp4Usage) {this.tech_.trigger({type: 'usage',name: 'vhs-fmp4'});this.triggeredFmp4Usage = true;}});this.audioSegmentLoader_.on('fmp4', () => {if (!this.triggeredFmp4Usage) {this.tech_.trigger({type: 'usage',name: 'vhs-fmp4'});this.triggeredFmp4Usage = true;}});this.audioSegmentLoader_.on('ended', () => {this.logger_('audioSegmentLoader ended');this.onEndOfStream();});}mediaSecondsLoaded_() {return Math.max(this.audioSegmentLoader_.mediaSecondsLoaded + this.mainSegmentLoader_.mediaSecondsLoaded);}/*** Call load on our SegmentLoaders*/load() {this.mainSegmentLoader_.load();if (this.mediaTypes_.AUDIO.activePlaylistLoader) {this.audioSegmentLoader_.load();}if (this.mediaTypes_.SUBTITLES.activePlaylistLoader) {this.subtitleSegmentLoader_.load();}}/*** Re-tune playback quality level for the current player* conditions. This method will perform destructive actions like removing* already buffered content in order to readjust the currently active* playlist quickly. This is good for manual quality changes** @private*/fastQualityChange_(media = this.selectPlaylist()) {if (media && media === this.mainPlaylistLoader_.media()) {this.logger_('skipping fastQualityChange because new media is same as old');return;}this.switchMedia_(media, 'fast-quality'); // we would like to avoid race condition when we call fastQuality,// reset everything and start loading segments from prev segments instead of new because new playlist is not received yetthis.waitingForFastQualityPlaylistReceived_ = true;}runFastQualitySwitch_() {this.waitingForFastQualityPlaylistReceived_ = false; // Delete all buffered data to allow an immediate quality switch, then seek to give// the browser a kick to remove any cached frames from the previous rendtion (.04 seconds// ahead was roughly the minimum that will accomplish this across a variety of content// in IE and Edge, but seeking in place is sufficient on all other browsers)// Edge/IE bug: https://developer.microsoft.com/en-us/microsoft-edge/platform/issues/14600375/// Chrome bug: https://bugs.chromium.org/p/chromium/issues/detail?id=651904this.mainSegmentLoader_.pause();this.mainSegmentLoader_.resetEverything(() => {this.tech_.setCurrentTime(this.tech_.currentTime());}); // don't need to reset audio as it is reset when media changes}/*** Begin playback.*/play() {if (this.setupFirstPlay()) {return;}if (this.tech_.ended()) {this.tech_.setCurrentTime(0);}if (this.hasPlayed_) {this.load();}const seekable = this.tech_.seekable(); // if the viewer has paused and we fell out of the live window,// seek forward to the live pointif (this.tech_.duration() === Infinity) {if (this.tech_.currentTime() < seekable.start(0)) {return this.tech_.setCurrentTime(seekable.end(seekable.length - 1));}}}/*** Seek to the latest media position if this is a live video and the* player and video are loaded and initialized.*/setupFirstPlay() {const media = this.mainPlaylistLoader_.media(); // Check that everything is ready to begin buffering for the first call to play// If 1) there is no active media// 2) the player is paused// 3) the first play has already been setup// then exit earlyif (!media || this.tech_.paused() || this.hasPlayed_) {return false;} // when the video is a live stream and/or has a start timeif (!media.endList || media.start) {const seekable = this.seekable();if (!seekable.length) {// without a seekable range, the player cannot seek to begin buffering at the// live or start pointreturn false;}const seekableEnd = seekable.end(0);let startPoint = seekableEnd;if (media.start) {const offset = media.start.timeOffset;if (offset < 0) {startPoint = Math.max(seekableEnd + offset, seekable.start(0));} else {startPoint = Math.min(seekableEnd, offset);}} // trigger firstplay to inform the source handler to ignore the next seek eventthis.trigger('firstplay'); // seek to the live pointthis.tech_.setCurrentTime(startPoint);}this.hasPlayed_ = true; // we can begin loading now that everything is readythis.load();return true;}/*** handle the sourceopen event on the MediaSource** @private*/handleSourceOpen_() {// Only attempt to create the source buffer if none already exist.// handleSourceOpen is also called when we are "re-opening" a source buffer// after `endOfStream` has been called (in response to a seek for instance)this.tryToCreateSourceBuffers_(); // if autoplay is enabled, begin playback. This is duplicative of// code in video.js but is required because play() must be invoked// *after* the media source has opened.if (this.tech_.autoplay()) {const playPromise = this.tech_.play(); // Catch/silence error when a pause interrupts a play request// on browsers which return a promiseif (typeof playPromise !== 'undefined' && typeof playPromise.then === 'function') {playPromise.then(null, e => {});}}this.trigger('sourceopen');}/*** handle the sourceended event on the MediaSource** @private*/handleSourceEnded_() {if (!this.inbandTextTracks_.metadataTrack_) {return;}const cues = this.inbandTextTracks_.metadataTrack_.cues;if (!cues || !cues.length) {return;}const duration = this.duration();cues[cues.length - 1].endTime = isNaN(duration) || Math.abs(duration) === Infinity ? Number.MAX_VALUE : duration;}/*** handle the durationchange event on the MediaSource** @private*/handleDurationChange_() {this.tech_.trigger('durationchange');}/*** Calls endOfStream on the media source when all active stream types have called* endOfStream** @param {string} streamType* Stream type of the segment loader that called endOfStream* @private*/onEndOfStream() {let isEndOfStream = this.mainSegmentLoader_.ended_;if (this.mediaTypes_.AUDIO.activePlaylistLoader) {const mainMediaInfo = this.mainSegmentLoader_.getCurrentMediaInfo_(); // if the audio playlist loader exists, then alternate audio is activeif (!mainMediaInfo || mainMediaInfo.hasVideo) {// if we do not know if the main segment loader contains video yet or if we// definitively know the main segment loader contains video, then we need to wait// for both main and audio segment loaders to call endOfStreamisEndOfStream = isEndOfStream && this.audioSegmentLoader_.ended_;} else {// otherwise just rely on the audio loaderisEndOfStream = this.audioSegmentLoader_.ended_;}}if (!isEndOfStream) {return;}this.stopABRTimer_();this.sourceUpdater_.endOfStream();}/*** Check if a playlist has stopped being updated** @param {Object} playlist the media playlist object* @return {boolean} whether the playlist has stopped being updated or not*/stuckAtPlaylistEnd_(playlist) {const seekable = this.seekable();if (!seekable.length) {// playlist doesn't have enough information to determine whether we are stuckreturn false;}const expired = this.syncController_.getExpiredTime(playlist, this.duration());if (expired === null) {return false;} // does not use the safe live end to calculate playlist end, since we// don't want to say we are stuck while there is still contentconst absolutePlaylistEnd = Vhs$1.Playlist.playlistEnd(playlist, expired);const currentTime = this.tech_.currentTime();const buffered = this.tech_.buffered();if (!buffered.length) {// return true if the playhead reached the absolute end of the playlistreturn absolutePlaylistEnd - currentTime <= SAFE_TIME_DELTA;}const bufferedEnd = buffered.end(buffered.length - 1); // return true if there is too little buffer left and buffer has reached absolute// end of playlistreturn bufferedEnd - currentTime <= SAFE_TIME_DELTA && absolutePlaylistEnd - bufferedEnd <= SAFE_TIME_DELTA;}/*** Exclude a playlist for a set amount of time, making it unavailable for selection by* the rendition selection algorithm, then force a new playlist (rendition) selection.** @param {Object=} playlistToExclude* the playlist to exclude, defaults to the currently selected playlist* @param {Object=} error* an optional error* @param {number=} playlistExclusionDuration* an optional number of seconds to exclude the playlist*/excludePlaylist({playlistToExclude = this.mainPlaylistLoader_.media(),error = {},playlistExclusionDuration}) {// If the `error` was generated by the playlist loader, it will contain// the playlist we were trying to load (but failed) and that should be// excluded instead of the currently selected playlist which is likely// out-of-date in this scenarioplaylistToExclude = playlistToExclude || this.mainPlaylistLoader_.media();playlistExclusionDuration = playlistExclusionDuration || error.playlistExclusionDuration || this.playlistExclusionDuration; // If there is no current playlist, then an error occurred while we were// trying to load the main OR while we were disposing of the techif (!playlistToExclude) {this.error = error;if (this.mediaSource.readyState !== 'open') {this.trigger('error');} else {this.sourceUpdater_.endOfStream('network');}return;}playlistToExclude.playlistErrors_++;const playlists = this.mainPlaylistLoader_.main.playlists;const enabledPlaylists = playlists.filter(isEnabled);const isFinalRendition = enabledPlaylists.length === 1 && enabledPlaylists[0] === playlistToExclude; // Don't exclude the only playlist unless it was excluded// foreverif (playlists.length === 1 && playlistExclusionDuration !== Infinity) {videojs.log.warn(`Problem encountered with playlist ${playlistToExclude.id}. ` + 'Trying again since it is the only playlist.');this.tech_.trigger('retryplaylist'); // if this is a final rendition, we should delayreturn this.mainPlaylistLoader_.load(isFinalRendition);}if (isFinalRendition) {// If we're content steering, try other pathways.if (this.main().contentSteering) {const pathway = this.pathwayAttribute_(playlistToExclude); // Ignore at least 1 steering manifest refresh.const reIncludeDelay = this.contentSteeringController_.steeringManifest.ttl * 1000;this.contentSteeringController_.excludePathway(pathway);this.excludeThenChangePathway_();setTimeout(() => {this.contentSteeringController_.addAvailablePathway(pathway);}, reIncludeDelay);return;} // Since we're on the final non-excluded playlist, and we're about to exclude// it, instead of erring the player or retrying this playlist, clear out the current// exclusion list. This allows other playlists to be attempted in case any have been// fixed.let reincluded = false;playlists.forEach(playlist => {// skip current playlist which is about to be excludedif (playlist === playlistToExclude) {return;}const excludeUntil = playlist.excludeUntil; // a playlist cannot be reincluded if it wasn't excluded to begin with.if (typeof excludeUntil !== 'undefined' && excludeUntil !== Infinity) {reincluded = true;delete playlist.excludeUntil;}});if (reincluded) {videojs.log.warn('Removing other playlists from the exclusion list because the last ' + 'rendition is about to be excluded.'); // Technically we are retrying a playlist, in that we are simply retrying a previous// playlist. This is needed for users relying on the retryplaylist event to catch a// case where the player might be stuck and looping through "dead" playlists.this.tech_.trigger('retryplaylist');}} // Exclude this playlistlet excludeUntil;if (playlistToExclude.playlistErrors_ > this.maxPlaylistRetries) {excludeUntil = Infinity;} else {excludeUntil = Date.now() + playlistExclusionDuration * 1000;}playlistToExclude.excludeUntil = excludeUntil;if (error.reason) {playlistToExclude.lastExcludeReason_ = error.reason;}this.tech_.trigger('excludeplaylist');this.tech_.trigger({type: 'usage',name: 'vhs-rendition-excluded'}); // TODO: only load a new playlist if we're excluding the current playlist// If this function was called with a playlist that's not the current active playlist// (e.g., media().id !== playlistToExclude.id),// then a new playlist should not be selected and loaded, as there's nothing wrong with the current playlist.const nextPlaylist = this.selectPlaylist();if (!nextPlaylist) {this.error = 'Playback cannot continue. No available working or supported playlists.';this.trigger('error');return;}const logFn = error.internal ? this.logger_ : videojs.log.warn;const errorMessage = error.message ? ' ' + error.message : '';logFn(`${error.internal ? 'Internal problem' : 'Problem'} encountered with playlist ${playlistToExclude.id}.` + `${errorMessage} Switching to playlist ${nextPlaylist.id}.`); // if audio group changed reset audio loadersif (nextPlaylist.attributes.AUDIO !== playlistToExclude.attributes.AUDIO) {this.delegateLoaders_('audio', ['abort', 'pause']);} // if subtitle group changed reset subtitle loadersif (nextPlaylist.attributes.SUBTITLES !== playlistToExclude.attributes.SUBTITLES) {this.delegateLoaders_('subtitle', ['abort', 'pause']);}this.delegateLoaders_('main', ['abort', 'pause']);const delayDuration = nextPlaylist.targetDuration / 2 * 1000 || 5 * 1000;const shouldDelay = typeof nextPlaylist.lastRequest === 'number' && Date.now() - nextPlaylist.lastRequest <= delayDuration; // delay if it's a final rendition or if the last refresh is sooner than half targetDurationreturn this.switchMedia_(nextPlaylist, 'exclude', isFinalRendition || shouldDelay);}/*** Pause all segment/playlist loaders*/pauseLoading() {this.delegateLoaders_('all', ['abort', 'pause']);this.stopABRTimer_();}/*** Call a set of functions in order on playlist loaders, segment loaders,* or both types of loaders.** @param {string} filter* Filter loaders that should call fnNames using a string. Can be:* * all - run on all loaders* * audio - run on all audio loaders* * subtitle - run on all subtitle loaders* * main - run on the main loaders** @param {Array|string} fnNames* A string or array of function names to call.*/delegateLoaders_(filter, fnNames) {const loaders = [];const dontFilterPlaylist = filter === 'all';if (dontFilterPlaylist || filter === 'main') {loaders.push(this.mainPlaylistLoader_);}const mediaTypes = [];if (dontFilterPlaylist || filter === 'audio') {mediaTypes.push('AUDIO');}if (dontFilterPlaylist || filter === 'subtitle') {mediaTypes.push('CLOSED-CAPTIONS');mediaTypes.push('SUBTITLES');}mediaTypes.forEach(mediaType => {const loader = this.mediaTypes_[mediaType] && this.mediaTypes_[mediaType].activePlaylistLoader;if (loader) {loaders.push(loader);}});['main', 'audio', 'subtitle'].forEach(name => {const loader = this[`${name}SegmentLoader_`];if (loader && (filter === name || filter === 'all')) {loaders.push(loader);}});loaders.forEach(loader => fnNames.forEach(fnName => {if (typeof loader[fnName] === 'function') {loader[fnName]();}}));}/*** set the current time on all segment loaders** @param {TimeRange} currentTime the current time to set* @return {TimeRange} the current time*/setCurrentTime(currentTime) {const buffered = findRange(this.tech_.buffered(), currentTime);if (!(this.mainPlaylistLoader_ && this.mainPlaylistLoader_.media())) {// return immediately if the metadata is not ready yetreturn 0;} // it's clearly an edge-case but don't thrown an error if asked to// seek within an empty playlistif (!this.mainPlaylistLoader_.media().segments) {return 0;} // if the seek location is already buffered, continue buffering as usualif (buffered && buffered.length) {return currentTime;} // cancel outstanding requests so we begin buffering at the new// locationthis.mainSegmentLoader_.pause();this.mainSegmentLoader_.resetEverything();if (this.mediaTypes_.AUDIO.activePlaylistLoader) {this.audioSegmentLoader_.pause();this.audioSegmentLoader_.resetEverything();}if (this.mediaTypes_.SUBTITLES.activePlaylistLoader) {this.subtitleSegmentLoader_.pause();this.subtitleSegmentLoader_.resetEverything();} // start segment loader loading in case they are pausedthis.load();}/*** get the current duration** @return {TimeRange} the duration*/duration() {if (!this.mainPlaylistLoader_) {return 0;}const media = this.mainPlaylistLoader_.media();if (!media) {// no playlists loaded yet, so can't determine a durationreturn 0;} // Don't rely on the media source for duration in the case of a live playlist since// setting the native MediaSource's duration to infinity ends up with consequences to// seekable behavior. See https://github.com/w3c/media-source/issues/5 for details.//// This is resolved in the spec by https://github.com/w3c/media-source/pull/92,// however, few browsers have support for setLiveSeekableRange()// https://developer.mozilla.org/en-US/docs/Web/API/MediaSource/setLiveSeekableRange//// Until a time when the duration of the media source can be set to infinity, and a// seekable range specified across browsers, just return Infinity.if (!media.endList) {return Infinity;} // Since this is a VOD video, it is safe to rely on the media source's duration (if// available). If it's not available, fall back to a playlist-calculated estimate.if (this.mediaSource) {return this.mediaSource.duration;}return Vhs$1.Playlist.duration(media);}/*** check the seekable range** @return {TimeRange} the seekable range*/seekable() {return this.seekable_;}onSyncInfoUpdate_() {let audioSeekable; // TODO check for creation of both source buffers before updating seekable//// A fix was made to this function where a check for// this.sourceUpdater_.hasCreatedSourceBuffers// was added to ensure that both source buffers were created before seekable was// updated. However, it originally had a bug where it was checking for a true and// returning early instead of checking for false. Setting it to check for false to// return early though created other issues. A call to play() would check for seekable// end without verifying that a seekable range was present. In addition, even checking// for that didn't solve some issues, as handleFirstPlay is sometimes worked around// due to a media update calling load on the segment loaders, skipping a seek to live,// thereby starting live streams at the beginning of the stream rather than at the end.//// This conditional should be fixed to wait for the creation of two source buffers at// the same time as the other sections of code are fixed to properly seek to live and// not throw an error due to checking for a seekable end when no seekable range exists.//// For now, fall back to the older behavior, with the understanding that the seekable// range may not be completely correct, leading to a suboptimal initial live point.if (!this.mainPlaylistLoader_) {return;}let media = this.mainPlaylistLoader_.media();if (!media) {return;}let expired = this.syncController_.getExpiredTime(media, this.duration());if (expired === null) {// not enough information to update seekablereturn;}const main = this.mainPlaylistLoader_.main;const mainSeekable = Vhs$1.Playlist.seekable(media, expired, Vhs$1.Playlist.liveEdgeDelay(main, media));if (mainSeekable.length === 0) {return;}if (this.mediaTypes_.AUDIO.activePlaylistLoader) {media = this.mediaTypes_.AUDIO.activePlaylistLoader.media();expired = this.syncController_.getExpiredTime(media, this.duration());if (expired === null) {return;}audioSeekable = Vhs$1.Playlist.seekable(media, expired, Vhs$1.Playlist.liveEdgeDelay(main, media));if (audioSeekable.length === 0) {return;}}let oldEnd;let oldStart;if (this.seekable_ && this.seekable_.length) {oldEnd = this.seekable_.end(0);oldStart = this.seekable_.start(0);}if (!audioSeekable) {// seekable has been calculated based on buffering video data so it// can be returned directlythis.seekable_ = mainSeekable;} else if (audioSeekable.start(0) > mainSeekable.end(0) || mainSeekable.start(0) > audioSeekable.end(0)) {// seekables are pretty far off, rely on mainthis.seekable_ = mainSeekable;} else {this.seekable_ = createTimeRanges([[audioSeekable.start(0) > mainSeekable.start(0) ? audioSeekable.start(0) : mainSeekable.start(0), audioSeekable.end(0) < mainSeekable.end(0) ? audioSeekable.end(0) : mainSeekable.end(0)]]);} // seekable is the same as last timeif (this.seekable_ && this.seekable_.length) {if (this.seekable_.end(0) === oldEnd && this.seekable_.start(0) === oldStart) {return;}}this.logger_(`seekable updated [${printableRange(this.seekable_)}]`);this.tech_.trigger('seekablechanged');}/*** Update the player duration*/updateDuration(isLive) {if (this.updateDuration_) {this.mediaSource.removeEventListener('sourceopen', this.updateDuration_);this.updateDuration_ = null;}if (this.mediaSource.readyState !== 'open') {this.updateDuration_ = this.updateDuration.bind(this, isLive);this.mediaSource.addEventListener('sourceopen', this.updateDuration_);return;}if (isLive) {const seekable = this.seekable();if (!seekable.length) {return;} // Even in the case of a live playlist, the native MediaSource's duration should not// be set to Infinity (even though this would be expected for a live playlist), since// setting the native MediaSource's duration to infinity ends up with consequences to// seekable behavior. See https://github.com/w3c/media-source/issues/5 for details.//// This is resolved in the spec by https://github.com/w3c/media-source/pull/92,// however, few browsers have support for setLiveSeekableRange()// https://developer.mozilla.org/en-US/docs/Web/API/MediaSource/setLiveSeekableRange//// Until a time when the duration of the media source can be set to infinity, and a// seekable range specified across browsers, the duration should be greater than or// equal to the last possible seekable value.// MediaSource duration starts as NaN// It is possible (and probable) that this case will never be reached for many// sources, since the MediaSource reports duration as the highest value without// accounting for timestamp offset. For example, if the timestamp offset is -100 and// we buffered times 0 to 100 with real times of 100 to 200, even though current// time will be between 0 and 100, the native media source may report the duration// as 200. However, since we report duration separate from the media source (as// Infinity), and as long as the native media source duration value is greater than// our reported seekable range, seeks will work as expected. The large number as// duration for live is actually a strategy used by some players to work around the// issue of live seekable ranges cited above.if (isNaN(this.mediaSource.duration) || this.mediaSource.duration < seekable.end(seekable.length - 1)) {this.sourceUpdater_.setDuration(seekable.end(seekable.length - 1));}return;}const buffered = this.tech_.buffered();let duration = Vhs$1.Playlist.duration(this.mainPlaylistLoader_.media());if (buffered.length > 0) {duration = Math.max(duration, buffered.end(buffered.length - 1));}if (this.mediaSource.duration !== duration) {this.sourceUpdater_.setDuration(duration);}}/*** dispose of the PlaylistController and everything* that it controls*/dispose() {this.trigger('dispose');this.decrypter_.terminate();this.mainPlaylistLoader_.dispose();this.mainSegmentLoader_.dispose();this.contentSteeringController_.dispose();this.keyStatusMap_.clear();if (this.loadOnPlay_) {this.tech_.off('play', this.loadOnPlay_);}['AUDIO', 'SUBTITLES'].forEach(type => {const groups = this.mediaTypes_[type].groups;for (const id in groups) {groups[id].forEach(group => {if (group.playlistLoader) {group.playlistLoader.dispose();}});}});this.audioSegmentLoader_.dispose();this.subtitleSegmentLoader_.dispose();this.sourceUpdater_.dispose();this.timelineChangeController_.dispose();this.stopABRTimer_();if (this.updateDuration_) {this.mediaSource.removeEventListener('sourceopen', this.updateDuration_);}this.mediaSource.removeEventListener('durationchange', this.handleDurationChange_); // load the media source into the playerthis.mediaSource.removeEventListener('sourceopen', this.handleSourceOpen_);this.mediaSource.removeEventListener('sourceended', this.handleSourceEnded_);this.off();}/*** return the main playlist object if we have one** @return {Object} the main playlist object that we parsed*/main() {return this.mainPlaylistLoader_.main;}/*** return the currently selected playlist** @return {Object} the currently selected playlist object that we parsed*/media() {// playlist loader will not return media if it has not been fully loadedreturn this.mainPlaylistLoader_.media() || this.initialMedia_;}areMediaTypesKnown_() {const usingAudioLoader = !!this.mediaTypes_.AUDIO.activePlaylistLoader;const hasMainMediaInfo = !!this.mainSegmentLoader_.getCurrentMediaInfo_(); // if we are not using an audio loader, then we have audio media info// otherwise check on the segment loader.const hasAudioMediaInfo = !usingAudioLoader ? true : !!this.audioSegmentLoader_.getCurrentMediaInfo_(); // one or both loaders has not loaded sufficently to get codecsif (!hasMainMediaInfo || !hasAudioMediaInfo) {return false;}return true;}getCodecsOrExclude_() {const media = {main: this.mainSegmentLoader_.getCurrentMediaInfo_() || {},audio: this.audioSegmentLoader_.getCurrentMediaInfo_() || {}};const playlist = this.mainSegmentLoader_.getPendingSegmentPlaylist() || this.media(); // set "main" media equal to videomedia.video = media.main;const playlistCodecs = codecsForPlaylist(this.main(), playlist);const codecs = {};const usingAudioLoader = !!this.mediaTypes_.AUDIO.activePlaylistLoader;if (media.main.hasVideo) {codecs.video = playlistCodecs.video || media.main.videoCodec || DEFAULT_VIDEO_CODEC;}if (media.main.isMuxed) {codecs.video += `,${playlistCodecs.audio || media.main.audioCodec || DEFAULT_AUDIO_CODEC}`;}if (media.main.hasAudio && !media.main.isMuxed || media.audio.hasAudio || usingAudioLoader) {codecs.audio = playlistCodecs.audio || media.main.audioCodec || media.audio.audioCodec || DEFAULT_AUDIO_CODEC; // set audio isFmp4 so we use the correct "supports" function belowmedia.audio.isFmp4 = media.main.hasAudio && !media.main.isMuxed ? media.main.isFmp4 : media.audio.isFmp4;} // no codecs, no playback.if (!codecs.audio && !codecs.video) {this.excludePlaylist({playlistToExclude: playlist,error: {message: 'Could not determine codecs for playlist.'},playlistExclusionDuration: Infinity});return;} // fmp4 relies on browser support, while ts relies on muxer supportconst supportFunction = (isFmp4, codec) => isFmp4 ? browserSupportsCodec(codec) : muxerSupportsCodec(codec);const unsupportedCodecs = {};let unsupportedAudio;['video', 'audio'].forEach(function (type) {if (codecs.hasOwnProperty(type) && !supportFunction(media[type].isFmp4, codecs[type])) {const supporter = media[type].isFmp4 ? 'browser' : 'muxer';unsupportedCodecs[supporter] = unsupportedCodecs[supporter] || [];unsupportedCodecs[supporter].push(codecs[type]);if (type === 'audio') {unsupportedAudio = supporter;}}});if (usingAudioLoader && unsupportedAudio && playlist.attributes.AUDIO) {const audioGroup = playlist.attributes.AUDIO;this.main().playlists.forEach(variant => {const variantAudioGroup = variant.attributes && variant.attributes.AUDIO;if (variantAudioGroup === audioGroup && variant !== playlist) {variant.excludeUntil = Infinity;}});this.logger_(`excluding audio group ${audioGroup} as ${unsupportedAudio} does not support codec(s): "${codecs.audio}"`);} // if we have any unsupported codecs exclude this playlist.if (Object.keys(unsupportedCodecs).length) {const message = Object.keys(unsupportedCodecs).reduce((acc, supporter) => {if (acc) {acc += ', ';}acc += `${supporter} does not support codec(s): "${unsupportedCodecs[supporter].join(',')}"`;return acc;}, '') + '.';this.excludePlaylist({playlistToExclude: playlist,error: {internal: true,message},playlistExclusionDuration: Infinity});return;} // check if codec switching is happeningif (this.sourceUpdater_.hasCreatedSourceBuffers() && !this.sourceUpdater_.canChangeType()) {const switchMessages = [];['video', 'audio'].forEach(type => {const newCodec = (parseCodecs(this.sourceUpdater_.codecs[type] || '')[0] || {}).type;const oldCodec = (parseCodecs(codecs[type] || '')[0] || {}).type;if (newCodec && oldCodec && newCodec.toLowerCase() !== oldCodec.toLowerCase()) {switchMessages.push(`"${this.sourceUpdater_.codecs[type]}" -> "${codecs[type]}"`);}});if (switchMessages.length) {this.excludePlaylist({playlistToExclude: playlist,error: {message: `Codec switching not supported: ${switchMessages.join(', ')}.`,internal: true},playlistExclusionDuration: Infinity});return;}} // TODO: when using the muxer shouldn't we just return// the codecs that the muxer outputs?return codecs;}/*** Create source buffers and exlude any incompatible renditions.** @private*/tryToCreateSourceBuffers_() {// media source is not ready yet or sourceBuffers are already// created.if (this.mediaSource.readyState !== 'open' || this.sourceUpdater_.hasCreatedSourceBuffers()) {return;}if (!this.areMediaTypesKnown_()) {return;}const codecs = this.getCodecsOrExclude_(); // no codecs means that the playlist was excludedif (!codecs) {return;}this.sourceUpdater_.createSourceBuffers(codecs);const codecString = [codecs.video, codecs.audio].filter(Boolean).join(',');this.excludeIncompatibleVariants_(codecString);}/*** Excludes playlists with codecs that are unsupported by the muxer and browser.*/excludeUnsupportedVariants_() {const playlists = this.main().playlists;const ids = []; // TODO: why don't we have a property to loop through all// playlist? Why did we ever mix indexes and keys?Object.keys(playlists).forEach(key => {const variant = playlists[key]; // check if we already processed this playlist.if (ids.indexOf(variant.id) !== -1) {return;}ids.push(variant.id);const codecs = codecsForPlaylist(this.main, variant);const unsupported = [];if (codecs.audio && !muxerSupportsCodec(codecs.audio) && !browserSupportsCodec(codecs.audio)) {unsupported.push(`audio codec ${codecs.audio}`);}if (codecs.video && !muxerSupportsCodec(codecs.video) && !browserSupportsCodec(codecs.video)) {unsupported.push(`video codec ${codecs.video}`);}if (codecs.text && codecs.text === 'stpp.ttml.im1t') {unsupported.push(`text codec ${codecs.text}`);}if (unsupported.length) {variant.excludeUntil = Infinity;this.logger_(`excluding ${variant.id} for unsupported: ${unsupported.join(', ')}`);}});}/*** Exclude playlists that are known to be codec or* stream-incompatible with the SourceBuffer configuration. For* instance, Media Source Extensions would cause the video element to* stall waiting for video data if you switched from a variant with* video and audio to an audio-only one.** @param {Object} media a media playlist compatible with the current* set of SourceBuffers. Variants in the current main playlist that* do not appear to have compatible codec or stream configurations* will be excluded from the default playlist selection algorithm* indefinitely.* @private*/excludeIncompatibleVariants_(codecString) {const ids = [];const playlists = this.main().playlists;const codecs = unwrapCodecList(parseCodecs(codecString));const codecCount_ = codecCount(codecs);const videoDetails = codecs.video && parseCodecs(codecs.video)[0] || null;const audioDetails = codecs.audio && parseCodecs(codecs.audio)[0] || null;Object.keys(playlists).forEach(key => {const variant = playlists[key]; // check if we already processed this playlist.// or it if it is already excluded forever.if (ids.indexOf(variant.id) !== -1 || variant.excludeUntil === Infinity) {return;}ids.push(variant.id);const exclusionReasons = []; // get codecs from the playlist for this variantconst variantCodecs = codecsForPlaylist(this.mainPlaylistLoader_.main, variant);const variantCodecCount = codecCount(variantCodecs); // if no codecs are listed, we cannot determine that this// variant is incompatible. Wait for mux.js to probeif (!variantCodecs.audio && !variantCodecs.video) {return;} // TODO: we can support this by removing the// old media source and creating a new one, but it will take some work.// The number of streams cannot changeif (variantCodecCount !== codecCount_) {exclusionReasons.push(`codec count "${variantCodecCount}" !== "${codecCount_}"`);} // only exclude playlists by codec change, if codecs cannot switch// during playback.if (!this.sourceUpdater_.canChangeType()) {const variantVideoDetails = variantCodecs.video && parseCodecs(variantCodecs.video)[0] || null;const variantAudioDetails = variantCodecs.audio && parseCodecs(variantCodecs.audio)[0] || null; // the video codec cannot changeif (variantVideoDetails && videoDetails && variantVideoDetails.type.toLowerCase() !== videoDetails.type.toLowerCase()) {exclusionReasons.push(`video codec "${variantVideoDetails.type}" !== "${videoDetails.type}"`);} // the audio codec cannot changeif (variantAudioDetails && audioDetails && variantAudioDetails.type.toLowerCase() !== audioDetails.type.toLowerCase()) {exclusionReasons.push(`audio codec "${variantAudioDetails.type}" !== "${audioDetails.type}"`);}}if (exclusionReasons.length) {variant.excludeUntil = Infinity;this.logger_(`excluding ${variant.id}: ${exclusionReasons.join(' && ')}`);}});}updateAdCues_(media) {let offset = 0;const seekable = this.seekable();if (seekable.length) {offset = seekable.start(0);}updateAdCues(media, this.cueTagsTrack_, offset);}/*** Calculates the desired forward buffer length based on current time** @return {number} Desired forward buffer length in seconds*/goalBufferLength() {const currentTime = this.tech_.currentTime();const initial = Config.GOAL_BUFFER_LENGTH;const rate = Config.GOAL_BUFFER_LENGTH_RATE;const max = Math.max(initial, Config.MAX_GOAL_BUFFER_LENGTH);return Math.min(initial + currentTime * rate, max);}/*** Calculates the desired buffer low water line based on current time** @return {number} Desired buffer low water line in seconds*/bufferLowWaterLine() {const currentTime = this.tech_.currentTime();const initial = Config.BUFFER_LOW_WATER_LINE;const rate = Config.BUFFER_LOW_WATER_LINE_RATE;const max = Math.max(initial, Config.MAX_BUFFER_LOW_WATER_LINE);const newMax = Math.max(initial, Config.EXPERIMENTAL_MAX_BUFFER_LOW_WATER_LINE);return Math.min(initial + currentTime * rate, this.bufferBasedABR ? newMax : max);}bufferHighWaterLine() {return Config.BUFFER_HIGH_WATER_LINE;}addDateRangesToTextTrack_(dateRanges) {createMetadataTrackIfNotExists(this.inbandTextTracks_, 'com.apple.streaming', this.tech_);addDateRangeMetadata({inbandTextTracks: this.inbandTextTracks_,dateRanges});}addMetadataToTextTrack(dispatchType, metadataArray, videoDuration) {const timestampOffset = this.sourceUpdater_.videoBuffer ? this.sourceUpdater_.videoTimestampOffset() : this.sourceUpdater_.audioTimestampOffset(); // There's potentially an issue where we could double add metadata if there's a muxed// audio/video source with a metadata track, and an alt audio with a metadata track.// However, this probably won't happen, and if it does it can be handled then.createMetadataTrackIfNotExists(this.inbandTextTracks_, dispatchType, this.tech_);addMetadata({inbandTextTracks: this.inbandTextTracks_,metadataArray,timestampOffset,videoDuration});}/*** Utility for getting the pathway or service location from an HLS or DASH playlist.** @param {Object} playlist for getting pathway from.* @return the pathway attribute of a playlist*/pathwayAttribute_(playlist) {return playlist.attributes['PATHWAY-ID'] || playlist.attributes.serviceLocation;}/*** Initialize available pathways and apply the tag properties.*/initContentSteeringController_() {const main = this.main();if (!main.contentSteering) {return;}for (const playlist of main.playlists) {this.contentSteeringController_.addAvailablePathway(this.pathwayAttribute_(playlist));}this.contentSteeringController_.assignTagProperties(main.uri, main.contentSteering); // request the steering manifest immediately if queryBeforeStart is set.if (this.contentSteeringController_.queryBeforeStart) {// When queryBeforeStart is true, initial request should omit steering parameters.this.contentSteeringController_.requestSteeringManifest(true);return;} // otherwise start content steering after playback startsthis.tech_.one('canplay', () => {this.contentSteeringController_.requestSteeringManifest();});}/*** Reset the content steering controller and re-init.*/resetContentSteeringController_() {this.contentSteeringController_.clearAvailablePathways();this.contentSteeringController_.dispose();this.initContentSteeringController_();}/*** Attaches the listeners for content steering.*/attachContentSteeringListeners_() {this.contentSteeringController_.on('content-steering', this.excludeThenChangePathway_.bind(this));if (this.sourceType_ === 'dash') {this.mainPlaylistLoader_.on('loadedplaylist', () => {const main = this.main(); // check if steering tag or pathways changed.const didDashTagChange = this.contentSteeringController_.didDASHTagChange(main.uri, main.contentSteering);const didPathwaysChange = () => {const availablePathways = this.contentSteeringController_.getAvailablePathways();const newPathways = [];for (const playlist of main.playlists) {const serviceLocation = playlist.attributes.serviceLocation;if (serviceLocation) {newPathways.push(serviceLocation);if (!availablePathways.has(serviceLocation)) {return true;}}} // If we have no new serviceLocations and previously had availablePathwaysif (!newPathways.length && availablePathways.size) {return true;}return false;};if (didDashTagChange || didPathwaysChange()) {this.resetContentSteeringController_();}});}}/*** Simple exclude and change playlist logic for content steering.*/excludeThenChangePathway_() {const currentPathway = this.contentSteeringController_.getPathway();if (!currentPathway) {return;}this.handlePathwayClones_();const main = this.main();const playlists = main.playlists;const ids = new Set();let didEnablePlaylists = false;Object.keys(playlists).forEach(key => {const variant = playlists[key];const pathwayId = this.pathwayAttribute_(variant);const differentPathwayId = pathwayId && currentPathway !== pathwayId;const steeringExclusion = variant.excludeUntil === Infinity && variant.lastExcludeReason_ === 'content-steering';if (steeringExclusion && !differentPathwayId) {delete variant.excludeUntil;delete variant.lastExcludeReason_;didEnablePlaylists = true;}const noExcludeUntil = !variant.excludeUntil && variant.excludeUntil !== Infinity;const shouldExclude = !ids.has(variant.id) && differentPathwayId && noExcludeUntil;if (!shouldExclude) {return;}ids.add(variant.id);variant.excludeUntil = Infinity;variant.lastExcludeReason_ = 'content-steering'; // TODO: kind of spammy, maybe move this.this.logger_(`excluding ${variant.id} for ${variant.lastExcludeReason_}`);});if (this.contentSteeringController_.manifestType_ === 'DASH') {Object.keys(this.mediaTypes_).forEach(key => {const type = this.mediaTypes_[key];if (type.activePlaylistLoader) {const currentPlaylist = type.activePlaylistLoader.media_; // Check if the current media playlist matches the current CDNif (currentPlaylist && currentPlaylist.attributes.serviceLocation !== currentPathway) {didEnablePlaylists = true;}}});}if (didEnablePlaylists) {this.changeSegmentPathway_();}}/*** Add, update, or delete playlists and media groups for* the pathway clones for HLS Content Steering.** See https://datatracker.ietf.org/doc/draft-pantos-hls-rfc8216bis/** NOTE: Pathway cloning does not currently support the `PER_VARIANT_URIS` and* `PER_RENDITION_URIS` as we do not handle `STABLE-VARIANT-ID` or* `STABLE-RENDITION-ID` values.*/handlePathwayClones_() {const main = this.main();const playlists = main.playlists;const currentPathwayClones = this.contentSteeringController_.currentPathwayClones;const nextPathwayClones = this.contentSteeringController_.nextPathwayClones;const hasClones = currentPathwayClones && currentPathwayClones.size || nextPathwayClones && nextPathwayClones.size;if (!hasClones) {return;}for (const [id, clone] of currentPathwayClones.entries()) {const newClone = nextPathwayClones.get(id); // Delete the old pathway clone.if (!newClone) {this.mainPlaylistLoader_.updateOrDeleteClone(clone);this.contentSteeringController_.excludePathway(id);}}for (const [id, clone] of nextPathwayClones.entries()) {const oldClone = currentPathwayClones.get(id); // Create a new pathway if it is a new pathway clone object.if (!oldClone) {const playlistsToClone = playlists.filter(p => {return p.attributes['PATHWAY-ID'] === clone['BASE-ID'];});playlistsToClone.forEach(p => {this.mainPlaylistLoader_.addClonePathway(clone, p);});this.contentSteeringController_.addAvailablePathway(id);continue;} // There have not been changes to the pathway clone object, so skip.if (this.equalPathwayClones_(oldClone, clone)) {continue;} // Update a preexisting cloned pathway.// True is set for the update flag.this.mainPlaylistLoader_.updateOrDeleteClone(clone, true);this.contentSteeringController_.addAvailablePathway(id);} // Deep copy contents of next to current pathways.this.contentSteeringController_.currentPathwayClones = new Map(JSON.parse(JSON.stringify([...nextPathwayClones])));}/*** Determines whether two pathway clone objects are equivalent.** @param {Object} a The first pathway clone object.* @param {Object} b The second pathway clone object.* @return {boolean} True if the pathway clone objects are equal, false otherwise.*/equalPathwayClones_(a, b) {if (a['BASE-ID'] !== b['BASE-ID'] || a.ID !== b.ID || a['URI-REPLACEMENT'].HOST !== b['URI-REPLACEMENT'].HOST) {return false;}const aParams = a['URI-REPLACEMENT'].PARAMS;const bParams = b['URI-REPLACEMENT'].PARAMS; // We need to iterate through both lists of params because one could be// missing a parameter that the other has.for (const p in aParams) {if (aParams[p] !== bParams[p]) {return false;}}for (const p in bParams) {if (aParams[p] !== bParams[p]) {return false;}}return true;}/*** Changes the current playlists for audio, video and subtitles after a new pathway* is chosen from content steering.*/changeSegmentPathway_() {const nextPlaylist = this.selectPlaylist();this.pauseLoading(); // Switch audio and text track playlists if necessary in DASHif (this.contentSteeringController_.manifestType_ === 'DASH') {this.switchMediaForDASHContentSteering_();}this.switchMedia_(nextPlaylist, 'content-steering');}/*** Iterates through playlists and check their keyId set and compare with the* keyStatusMap, only enable playlists that have a usable key. If the playlist* has no keyId leave it enabled by default.*/excludeNonUsablePlaylistsByKeyId_() {if (!this.mainPlaylistLoader_ || !this.mainPlaylistLoader_.main) {return;}let nonUsableKeyStatusCount = 0;const NON_USABLE = 'non-usable';this.mainPlaylistLoader_.main.playlists.forEach(playlist => {const keyIdSet = this.mainPlaylistLoader_.getKeyIdSet(playlist); // If the playlist doesn't have keyIDs lets not exclude it.if (!keyIdSet || !keyIdSet.size) {return;}keyIdSet.forEach(key => {const USABLE = 'usable';const hasUsableKeyStatus = this.keyStatusMap_.has(key) && this.keyStatusMap_.get(key) === USABLE;const nonUsableExclusion = playlist.lastExcludeReason_ === NON_USABLE && playlist.excludeUntil === Infinity;if (!hasUsableKeyStatus) {// Only exclude playlists that haven't already been excluded as non-usable.if (playlist.excludeUntil !== Infinity && playlist.lastExcludeReason_ !== NON_USABLE) {playlist.excludeUntil = Infinity;playlist.lastExcludeReason_ = NON_USABLE;this.logger_(`excluding playlist ${playlist.id} because the key ID ${key} doesn't exist in the keyStatusMap or is not ${USABLE}`);} // count all nonUsableKeyStatusnonUsableKeyStatusCount++;} else if (hasUsableKeyStatus && nonUsableExclusion) {delete playlist.excludeUntil;delete playlist.lastExcludeReason_;this.logger_(`enabling playlist ${playlist.id} because key ID ${key} is ${USABLE}`);}});}); // If for whatever reason every playlist has a non usable key status. Lets try re-including the SD renditions as a failsafe.if (nonUsableKeyStatusCount >= this.mainPlaylistLoader_.main.playlists.length) {this.mainPlaylistLoader_.main.playlists.forEach(playlist => {const isNonHD = playlist && playlist.attributes && playlist.attributes.RESOLUTION && playlist.attributes.RESOLUTION.height < 720;const excludedForNonUsableKey = playlist.excludeUntil === Infinity && playlist.lastExcludeReason_ === NON_USABLE;if (isNonHD && excludedForNonUsableKey) {// Only delete the excludeUntil so we don't try and re-exclude these playlists.delete playlist.excludeUntil;videojs.log.warn(`enabling non-HD playlist ${playlist.id} because all playlists were excluded due to ${NON_USABLE} key IDs`);}});}}/*** Adds a keystatus to the keystatus map, tries to convert to string if necessary.** @param {any} keyId the keyId to add a status for* @param {string} status the status of the keyId*/addKeyStatus_(keyId, status) {const isString = typeof keyId === 'string';const keyIdHexString = isString ? keyId : bufferToHexString(keyId);const formattedKeyIdString = keyIdHexString.slice(0, 32).toLowerCase();this.logger_(`KeyStatus '${status}' with key ID ${formattedKeyIdString} added to the keyStatusMap`);this.keyStatusMap_.set(formattedKeyIdString, status);}/*** Utility function for adding key status to the keyStatusMap and filtering usable encrypted playlists.** @param {any} keyId the keyId from the keystatuschange event* @param {string} status the key status string*/updatePlaylistByKeyStatus(keyId, status) {this.addKeyStatus_(keyId, status);if (!this.waitingForFastQualityPlaylistReceived_) {this.excludeNonUsableThenChangePlaylist_();} // Listen to loadedplaylist with a single listener and check for new contentProtection elements when a playlist is updated.this.mainPlaylistLoader_.off('loadedplaylist', this.excludeNonUsableThenChangePlaylist_.bind(this));this.mainPlaylistLoader_.on('loadedplaylist', this.excludeNonUsableThenChangePlaylist_.bind(this));}excludeNonUsableThenChangePlaylist_() {this.excludeNonUsablePlaylistsByKeyId_();this.fastQualityChange_();}}/*** Returns a function that acts as the Enable/disable playlist function.** @param {PlaylistLoader} loader - The main playlist loader* @param {string} playlistID - id of the playlist* @param {Function} changePlaylistFn - A function to be called after a* playlist's enabled-state has been changed. Will NOT be called if a* playlist's enabled-state is unchanged* @param {boolean=} enable - Value to set the playlist enabled-state to* or if undefined returns the current enabled-state for the playlist* @return {Function} Function for setting/getting enabled*/const enableFunction = (loader, playlistID, changePlaylistFn) => enable => {const playlist = loader.main.playlists[playlistID];const incompatible = isIncompatible(playlist);const currentlyEnabled = isEnabled(playlist);if (typeof enable === 'undefined') {return currentlyEnabled;}if (enable) {delete playlist.disabled;} else {playlist.disabled = true;}if (enable !== currentlyEnabled && !incompatible) {// Ensure the outside world knows about our changeschangePlaylistFn();if (enable) {loader.trigger('renditionenabled');} else {loader.trigger('renditiondisabled');}}return enable;};/*** The representation object encapsulates the publicly visible information* in a media playlist along with a setter/getter-type function (enabled)* for changing the enabled-state of a particular playlist entry** @class Representation*/class Representation {constructor(vhsHandler, playlist, id) {const {playlistController_: pc} = vhsHandler;const qualityChangeFunction = pc.fastQualityChange_.bind(pc); // some playlist attributes are optionalif (playlist.attributes) {const resolution = playlist.attributes.RESOLUTION;this.width = resolution && resolution.width;this.height = resolution && resolution.height;this.bandwidth = playlist.attributes.BANDWIDTH;this.frameRate = playlist.attributes['FRAME-RATE'];}this.codecs = codecsForPlaylist(pc.main(), playlist);this.playlist = playlist; // The id is simply the ordinality of the media playlist// within the main playlistthis.id = id; // Partially-apply the enableFunction to create a playlist-// specific variantthis.enabled = enableFunction(vhsHandler.playlists, playlist.id, qualityChangeFunction);}}/*** A mixin function that adds the `representations` api to an instance* of the VhsHandler class** @param {VhsHandler} vhsHandler - An instance of VhsHandler to add the* representation API into*/const renditionSelectionMixin = function (vhsHandler) {// Add a single API-specific function to the VhsHandler instancevhsHandler.representations = () => {const main = vhsHandler.playlistController_.main();const playlists = isAudioOnly(main) ? vhsHandler.playlistController_.getAudioTrackPlaylists_() : main.playlists;if (!playlists) {return [];}return playlists.filter(media => !isIncompatible(media)).map((e, i) => new Representation(vhsHandler, e, e.id));};};/*** @file playback-watcher.js** Playback starts, and now my watch begins. It shall not end until my death. I shall* take no wait, hold no uncleared timeouts, father no bad seeks. I shall wear no crowns* and win no glory. I shall live and die at my post. I am the corrector of the underflow.* I am the watcher of gaps. I am the shield that guards the realms of seekable. I pledge* my life and honor to the Playback Watch, for this Player and all the Players to come.*/const timerCancelEvents = ['seeking', 'seeked', 'pause', 'playing', 'error'];/*** @class PlaybackWatcher*/class PlaybackWatcher {/*** Represents an PlaybackWatcher object.** @class* @param {Object} options an object that includes the tech and settings*/constructor(options) {this.playlistController_ = options.playlistController;this.tech_ = options.tech;this.seekable = options.seekable;this.allowSeeksWithinUnsafeLiveWindow = options.allowSeeksWithinUnsafeLiveWindow;this.liveRangeSafeTimeDelta = options.liveRangeSafeTimeDelta;this.media = options.media;this.consecutiveUpdates = 0;this.lastRecordedTime = null;this.checkCurrentTimeTimeout_ = null;this.logger_ = logger('PlaybackWatcher');this.logger_('initialize');const playHandler = () => this.monitorCurrentTime_();const canPlayHandler = () => this.monitorCurrentTime_();const waitingHandler = () => this.techWaiting_();const cancelTimerHandler = () => this.resetTimeUpdate_();const pc = this.playlistController_;const loaderTypes = ['main', 'subtitle', 'audio'];const loaderChecks = {};loaderTypes.forEach(type => {loaderChecks[type] = {reset: () => this.resetSegmentDownloads_(type),updateend: () => this.checkSegmentDownloads_(type)};pc[`${type}SegmentLoader_`].on('appendsdone', loaderChecks[type].updateend); // If a rendition switch happens during a playback stall where the buffer// isn't changing we want to reset. We cannot assume that the new rendition// will also be stalled, until after new appends.pc[`${type}SegmentLoader_`].on('playlistupdate', loaderChecks[type].reset); // Playback stalls should not be detected right after seeking.// This prevents one segment playlists (single vtt or single segment content)// from being detected as stalling. As the buffer will not change in those cases, since// the buffer is the entire video duration.this.tech_.on(['seeked', 'seeking'], loaderChecks[type].reset);});/*** We check if a seek was into a gap through the following steps:* 1. We get a seeking event and we do not get a seeked event. This means that* a seek was attempted but not completed.* 2. We run `fixesBadSeeks_` on segment loader appends. This means that we already* removed everything from our buffer and appended a segment, and should be ready* to check for gaps.*/const setSeekingHandlers = fn => {['main', 'audio'].forEach(type => {pc[`${type}SegmentLoader_`][fn]('appended', this.seekingAppendCheck_);});};this.seekingAppendCheck_ = () => {if (this.fixesBadSeeks_()) {this.consecutiveUpdates = 0;this.lastRecordedTime = this.tech_.currentTime();setSeekingHandlers('off');}};this.clearSeekingAppendCheck_ = () => setSeekingHandlers('off');this.watchForBadSeeking_ = () => {this.clearSeekingAppendCheck_();setSeekingHandlers('on');};this.tech_.on('seeked', this.clearSeekingAppendCheck_);this.tech_.on('seeking', this.watchForBadSeeking_);this.tech_.on('waiting', waitingHandler);this.tech_.on(timerCancelEvents, cancelTimerHandler);this.tech_.on('canplay', canPlayHandler);/*An edge case exists that results in gaps not being skipped when they exist at the beginning of a stream. This caseis surfaced in one of two ways:1) The `waiting` event is fired before the player has buffered content, making it impossibleto find or skip the gap. The `waiting` event is followed by a `play` event. On first playwe can check if playback is stalled due to a gap, and skip the gap if necessary.2) A source with a gap at the beginning of the stream is loaded programatically while the playeris in a playing state. To catch this case, it's important that our one-time play listener is setupeven if the player is in a playing state*/this.tech_.one('play', playHandler); // Define the dispose function to clean up our eventsthis.dispose = () => {this.clearSeekingAppendCheck_();this.logger_('dispose');this.tech_.off('waiting', waitingHandler);this.tech_.off(timerCancelEvents, cancelTimerHandler);this.tech_.off('canplay', canPlayHandler);this.tech_.off('play', playHandler);this.tech_.off('seeking', this.watchForBadSeeking_);this.tech_.off('seeked', this.clearSeekingAppendCheck_);loaderTypes.forEach(type => {pc[`${type}SegmentLoader_`].off('appendsdone', loaderChecks[type].updateend);pc[`${type}SegmentLoader_`].off('playlistupdate', loaderChecks[type].reset);this.tech_.off(['seeked', 'seeking'], loaderChecks[type].reset);});if (this.checkCurrentTimeTimeout_) {window.clearTimeout(this.checkCurrentTimeTimeout_);}this.resetTimeUpdate_();};}/*** Periodically check current time to see if playback stopped** @private*/monitorCurrentTime_() {this.checkCurrentTime_();if (this.checkCurrentTimeTimeout_) {window.clearTimeout(this.checkCurrentTimeTimeout_);} // 42 = 24 fps // 250 is what Webkit uses // FF uses 15this.checkCurrentTimeTimeout_ = window.setTimeout(this.monitorCurrentTime_.bind(this), 250);}/*** Reset stalled download stats for a specific type of loader** @param {string} type* The segment loader type to check.** @listens SegmentLoader#playlistupdate* @listens Tech#seeking* @listens Tech#seeked*/resetSegmentDownloads_(type) {const loader = this.playlistController_[`${type}SegmentLoader_`];if (this[`${type}StalledDownloads_`] > 0) {this.logger_(`resetting possible stalled download count for ${type} loader`);}this[`${type}StalledDownloads_`] = 0;this[`${type}Buffered_`] = loader.buffered_();}/*** Checks on every segment `appendsdone` to see* if segment appends are making progress. If they are not* and we are still downloading bytes. We exclude the playlist.** @param {string} type* The segment loader type to check.** @listens SegmentLoader#appendsdone*/checkSegmentDownloads_(type) {const pc = this.playlistController_;const loader = pc[`${type}SegmentLoader_`];const buffered = loader.buffered_();const isBufferedDifferent = isRangeDifferent(this[`${type}Buffered_`], buffered);this[`${type}Buffered_`] = buffered; // if another watcher is going to fix the issue or// the buffered value for this loader changed// appends are workingif (isBufferedDifferent) {this.resetSegmentDownloads_(type);return;}this[`${type}StalledDownloads_`]++;this.logger_(`found #${this[`${type}StalledDownloads_`]} ${type} appends that did not increase buffer (possible stalled download)`, {playlistId: loader.playlist_ && loader.playlist_.id,buffered: timeRangesToArray(buffered)}); // after 10 possibly stalled appends with no reset, excludeif (this[`${type}StalledDownloads_`] < 10) {return;}this.logger_(`${type} loader stalled download exclusion`);this.resetSegmentDownloads_(type);this.tech_.trigger({type: 'usage',name: `vhs-${type}-download-exclusion`});if (type === 'subtitle') {return;} // TODO: should we exclude audio tracks rather than main tracks// when type is audio?pc.excludePlaylist({error: {message: `Excessive ${type} segment downloading detected.`},playlistExclusionDuration: Infinity});}/*** The purpose of this function is to emulate the "waiting" event on* browsers that do not emit it when they are waiting for more* data to continue playback** @private*/checkCurrentTime_() {if (this.tech_.paused() || this.tech_.seeking()) {return;}const currentTime = this.tech_.currentTime();const buffered = this.tech_.buffered();if (this.lastRecordedTime === currentTime && (!buffered.length || currentTime + SAFE_TIME_DELTA >= buffered.end(buffered.length - 1))) {// If current time is at the end of the final buffered region, then any playback// stall is most likely caused by buffering in a low bandwidth environment. The tech// should fire a `waiting` event in this scenario, but due to browser and tech// inconsistencies. Calling `techWaiting_` here allows us to simulate// responding to a native `waiting` event when the tech fails to emit one.return this.techWaiting_();}if (this.consecutiveUpdates >= 5 && currentTime === this.lastRecordedTime) {this.consecutiveUpdates++;this.waiting_();} else if (currentTime === this.lastRecordedTime) {this.consecutiveUpdates++;} else {this.consecutiveUpdates = 0;this.lastRecordedTime = currentTime;}}/*** Resets the 'timeupdate' mechanism designed to detect that we are stalled** @private*/resetTimeUpdate_() {this.consecutiveUpdates = 0;}/*** Fixes situations where there's a bad seek** @return {boolean} whether an action was taken to fix the seek* @private*/fixesBadSeeks_() {const seeking = this.tech_.seeking();if (!seeking) {return false;} // TODO: It's possible that these seekable checks should be moved out of this function// and into a function that runs on seekablechange. It's also possible that we only need// afterSeekableWindow as the buffered check at the bottom is good enough to handle before// seekable range.const seekable = this.seekable();const currentTime = this.tech_.currentTime();const isAfterSeekableRange = this.afterSeekableWindow_(seekable, currentTime, this.media(), this.allowSeeksWithinUnsafeLiveWindow);let seekTo;if (isAfterSeekableRange) {const seekableEnd = seekable.end(seekable.length - 1); // sync to live point (if VOD, our seekable was updated and we're simply adjusting)seekTo = seekableEnd;}if (this.beforeSeekableWindow_(seekable, currentTime)) {const seekableStart = seekable.start(0); // sync to the beginning of the live window// provide a buffer of .1 seconds to handle rounding/imprecise numbersseekTo = seekableStart + (// if the playlist is too short and the seekable range is an exact time (can// happen in live with a 3 segment playlist), then don't use a time deltaseekableStart === seekable.end(0) ? 0 : SAFE_TIME_DELTA);}if (typeof seekTo !== 'undefined') {this.logger_(`Trying to seek outside of seekable at time ${currentTime} with ` + `seekable range ${printableRange(seekable)}. Seeking to ` + `${seekTo}.`);this.tech_.setCurrentTime(seekTo);return true;}const sourceUpdater = this.playlistController_.sourceUpdater_;const buffered = this.tech_.buffered();const audioBuffered = sourceUpdater.audioBuffer ? sourceUpdater.audioBuffered() : null;const videoBuffered = sourceUpdater.videoBuffer ? sourceUpdater.videoBuffered() : null;const media = this.media(); // verify that at least two segment durations or one part duration have been// appended before checking for a gap.const minAppendedDuration = media.partTargetDuration ? media.partTargetDuration : (media.targetDuration - TIME_FUDGE_FACTOR) * 2; // verify that at least two segment durations have been// appended before checking for a gap.const bufferedToCheck = [audioBuffered, videoBuffered];for (let i = 0; i < bufferedToCheck.length; i++) {// skip null bufferedif (!bufferedToCheck[i]) {continue;}const timeAhead = timeAheadOf(bufferedToCheck[i], currentTime); // if we are less than two video/audio segment durations or one part// duration behind we haven't appended enough to call this a bad seek.if (timeAhead < minAppendedDuration) {return false;}}const nextRange = findNextRange(buffered, currentTime); // we have appended enough content, but we don't have anything buffered// to seek over the gapif (nextRange.length === 0) {return false;}seekTo = nextRange.start(0) + SAFE_TIME_DELTA;this.logger_(`Buffered region starts (${nextRange.start(0)}) ` + ` just beyond seek point (${currentTime}). Seeking to ${seekTo}.`);this.tech_.setCurrentTime(seekTo);return true;}/*** Handler for situations when we determine the player is waiting.** @private*/waiting_() {if (this.techWaiting_()) {return;} // All tech waiting checks failed. Use last resort correctionconst currentTime = this.tech_.currentTime();const buffered = this.tech_.buffered();const currentRange = findRange(buffered, currentTime); // Sometimes the player can stall for unknown reasons within a contiguous buffered// region with no indication that anything is amiss (seen in Firefox). Seeking to// currentTime is usually enough to kickstart the player. This checks that the player// is currently within a buffered region before attempting a corrective seek.// Chrome does not appear to continue `timeupdate` events after a `waiting` event// until there is ~ 3 seconds of forward buffer available. PlaybackWatcher should also// make sure there is ~3 seconds of forward buffer before taking any corrective action// to avoid triggering an `unknownwaiting` event when the network is slow.if (currentRange.length && currentTime + 3 <= currentRange.end(0)) {this.resetTimeUpdate_();this.tech_.setCurrentTime(currentTime);this.logger_(`Stopped at ${currentTime} while inside a buffered region ` + `[${currentRange.start(0)} -> ${currentRange.end(0)}]. Attempting to resume ` + 'playback by seeking to the current time.'); // unknown waiting corrections may be useful for monitoring QoSthis.tech_.trigger({type: 'usage',name: 'vhs-unknown-waiting'});return;}}/*** Handler for situations when the tech fires a `waiting` event** @return {boolean}* True if an action (or none) was needed to correct the waiting. False if no* checks passed* @private*/techWaiting_() {const seekable = this.seekable();const currentTime = this.tech_.currentTime();if (this.tech_.seeking()) {// Tech is seeking or already waiting on another action, no action neededreturn true;}if (this.beforeSeekableWindow_(seekable, currentTime)) {const livePoint = seekable.end(seekable.length - 1);this.logger_(`Fell out of live window at time ${currentTime}. Seeking to ` + `live point (seekable end) ${livePoint}`);this.resetTimeUpdate_();this.tech_.setCurrentTime(livePoint); // live window resyncs may be useful for monitoring QoSthis.tech_.trigger({type: 'usage',name: 'vhs-live-resync'});return true;}const sourceUpdater = this.tech_.vhs.playlistController_.sourceUpdater_;const buffered = this.tech_.buffered();const videoUnderflow = this.videoUnderflow_({audioBuffered: sourceUpdater.audioBuffered(),videoBuffered: sourceUpdater.videoBuffered(),currentTime});if (videoUnderflow) {// Even though the video underflowed and was stuck in a gap, the audio overplayed// the gap, leading currentTime into a buffered range. Seeking to currentTime// allows the video to catch up to the audio position without losing any audio// (only suffering ~3 seconds of frozen video and a pause in audio playback).this.resetTimeUpdate_();this.tech_.setCurrentTime(currentTime); // video underflow may be useful for monitoring QoSthis.tech_.trigger({type: 'usage',name: 'vhs-video-underflow'});return true;}const nextRange = findNextRange(buffered, currentTime); // check for gapif (nextRange.length > 0) {this.logger_(`Stopped at ${currentTime} and seeking to ${nextRange.start(0)}`);this.resetTimeUpdate_();this.skipTheGap_(currentTime);return true;} // All checks failed. Returning false to indicate failure to correct waitingreturn false;}afterSeekableWindow_(seekable, currentTime, playlist, allowSeeksWithinUnsafeLiveWindow = false) {if (!seekable.length) {// we can't make a solid case if there's no seekable, default to falsereturn false;}let allowedEnd = seekable.end(seekable.length - 1) + SAFE_TIME_DELTA;const isLive = !playlist.endList;const isLLHLS = typeof playlist.partTargetDuration === 'number';if (isLive && (isLLHLS || allowSeeksWithinUnsafeLiveWindow)) {allowedEnd = seekable.end(seekable.length - 1) + playlist.targetDuration * 3;}if (currentTime > allowedEnd) {return true;}return false;}beforeSeekableWindow_(seekable, currentTime) {if (seekable.length &&// can't fall before 0 and 0 seekable start identifies VOD streamseekable.start(0) > 0 && currentTime < seekable.start(0) - this.liveRangeSafeTimeDelta) {return true;}return false;}videoUnderflow_({videoBuffered,audioBuffered,currentTime}) {// audio only content will not have video underflow :)if (!videoBuffered) {return;}let gap; // find a gap in demuxed content.if (videoBuffered.length && audioBuffered.length) {// in Chrome audio will continue to play for ~3s when we run out of video// so we have to check that the video buffer did have some buffer in the// past.const lastVideoRange = findRange(videoBuffered, currentTime - 3);const videoRange = findRange(videoBuffered, currentTime);const audioRange = findRange(audioBuffered, currentTime);if (audioRange.length && !videoRange.length && lastVideoRange.length) {gap = {start: lastVideoRange.end(0),end: audioRange.end(0)};} // find a gap in muxed content.} else {const nextRange = findNextRange(videoBuffered, currentTime); // Even if there is no available next range, there is still a possibility we are// stuck in a gap due to video underflow.if (!nextRange.length) {gap = this.gapFromVideoUnderflow_(videoBuffered, currentTime);}}if (gap) {this.logger_(`Encountered a gap in video from ${gap.start} to ${gap.end}. ` + `Seeking to current time ${currentTime}`);return true;}return false;}/*** Timer callback. If playback still has not proceeded, then we seek* to the start of the next buffered region.** @private*/skipTheGap_(scheduledCurrentTime) {const buffered = this.tech_.buffered();const currentTime = this.tech_.currentTime();const nextRange = findNextRange(buffered, currentTime);this.resetTimeUpdate_();if (nextRange.length === 0 || currentTime !== scheduledCurrentTime) {return;}this.logger_('skipTheGap_:', 'currentTime:', currentTime, 'scheduled currentTime:', scheduledCurrentTime, 'nextRange start:', nextRange.start(0)); // only seek if we still have not playedthis.tech_.setCurrentTime(nextRange.start(0) + TIME_FUDGE_FACTOR);this.tech_.trigger({type: 'usage',name: 'vhs-gap-skip'});}gapFromVideoUnderflow_(buffered, currentTime) {// At least in Chrome, if there is a gap in the video buffer, the audio will continue// playing for ~3 seconds after the video gap starts. This is done to account for// video buffer underflow/underrun (note that this is not done when there is audio// buffer underflow/underrun -- in that case the video will stop as soon as it// encounters the gap, as audio stalls are more noticeable/jarring to a user than// video stalls). The player's time will reflect the playthrough of audio, so the// time will appear as if we are in a buffered region, even if we are stuck in a// "gap."//// Example:// video buffer: 0 => 10.1, 10.2 => 20// audio buffer: 0 => 20// overall buffer: 0 => 10.1, 10.2 => 20// current time: 13//// Chrome's video froze at 10 seconds, where the video buffer encountered the gap,// however, the audio continued playing until it reached ~3 seconds past the gap// (13 seconds), at which point it stops as well. Since current time is past the// gap, findNextRange will return no ranges.//// To check for this issue, we see if there is a gap that starts somewhere within// a 3 second range (3 seconds +/- 1 second) back from our current time.const gaps = findGaps(buffered);for (let i = 0; i < gaps.length; i++) {const start = gaps.start(i);const end = gaps.end(i); // gap is starts no more than 4 seconds backif (currentTime - start < 4 && currentTime - start > 2) {return {start,end};}}return null;}}const defaultOptions = {errorInterval: 30,getSource(next) {const tech = this.tech({IWillNotUseThisInPlugins: true});const sourceObj = tech.currentSource_ || this.currentSource();return next(sourceObj);}};/*** Main entry point for the plugin** @param {Player} player a reference to a videojs Player instance* @param {Object} [options] an object with plugin options* @private*/const initPlugin = function (player, options) {let lastCalled = 0;let seekTo = 0;const localOptions = merge(defaultOptions, options);player.ready(() => {player.trigger({type: 'usage',name: 'vhs-error-reload-initialized'});});/*** Player modifications to perform that must wait until `loadedmetadata`* has been triggered** @private*/const loadedMetadataHandler = function () {if (seekTo) {player.currentTime(seekTo);}};/*** Set the source on the player element, play, and seek if necessary** @param {Object} sourceObj An object specifying the source url and mime-type to play* @private*/const setSource = function (sourceObj) {if (sourceObj === null || sourceObj === undefined) {return;}seekTo = player.duration() !== Infinity && player.currentTime() || 0;player.one('loadedmetadata', loadedMetadataHandler);player.src(sourceObj);player.trigger({type: 'usage',name: 'vhs-error-reload'});player.play();};/*** Attempt to get a source from either the built-in getSource function* or a custom function provided via the options** @private*/const errorHandler = function () {// Do not attempt to reload the source if a source-reload occurred before// 'errorInterval' time has elapsed since the last source-reloadif (Date.now() - lastCalled < localOptions.errorInterval * 1000) {player.trigger({type: 'usage',name: 'vhs-error-reload-canceled'});return;}if (!localOptions.getSource || typeof localOptions.getSource !== 'function') {videojs.log.error('ERROR: reloadSourceOnError - The option getSource must be a function!');return;}lastCalled = Date.now();return localOptions.getSource.call(player, setSource);};/*** Unbind any event handlers that were bound by the plugin** @private*/const cleanupEvents = function () {player.off('loadedmetadata', loadedMetadataHandler);player.off('error', errorHandler);player.off('dispose', cleanupEvents);};/*** Cleanup before re-initializing the plugin** @param {Object} [newOptions] an object with plugin options* @private*/const reinitPlugin = function (newOptions) {cleanupEvents();initPlugin(player, newOptions);};player.on('error', errorHandler);player.on('dispose', cleanupEvents); // Overwrite the plugin function so that we can correctly cleanup before// initializing the pluginplayer.reloadSourceOnError = reinitPlugin;};/*** Reload the source when an error is detected as long as there* wasn't an error previously within the last 30 seconds** @param {Object} [options] an object with plugin options*/const reloadSourceOnError = function (options) {initPlugin(this, options);};var version$4 = "3.10.0";var version$3 = "7.0.2";var version$2 = "1.3.0";var version$1 = "7.1.0";var version = "4.0.1";/*** @file videojs-http-streaming.js** The main file for the VHS project.* License: https://github.com/videojs/videojs-http-streaming/blob/main/LICENSE*/const Vhs = {PlaylistLoader,Playlist,utils,STANDARD_PLAYLIST_SELECTOR: lastBandwidthSelector,INITIAL_PLAYLIST_SELECTOR: lowestBitrateCompatibleVariantSelector,lastBandwidthSelector,movingAverageBandwidthSelector,comparePlaylistBandwidth,comparePlaylistResolution,xhr: xhrFactory()}; // Define getter/setters for config propertiesObject.keys(Config).forEach(prop => {Object.defineProperty(Vhs, prop, {get() {videojs.log.warn(`using Vhs.${prop} is UNSAFE be sure you know what you are doing`);return Config[prop];},set(value) {videojs.log.warn(`using Vhs.${prop} is UNSAFE be sure you know what you are doing`);if (typeof value !== 'number' || value < 0) {videojs.log.warn(`value of Vhs.${prop} must be greater than or equal to 0`);return;}Config[prop] = value;}});});const LOCAL_STORAGE_KEY = 'videojs-vhs';/*** Updates the selectedIndex of the QualityLevelList when a mediachange happens in vhs.** @param {QualityLevelList} qualityLevels The QualityLevelList to update.* @param {PlaylistLoader} playlistLoader PlaylistLoader containing the new media info.* @function handleVhsMediaChange*/const handleVhsMediaChange = function (qualityLevels, playlistLoader) {const newPlaylist = playlistLoader.media();let selectedIndex = -1;for (let i = 0; i < qualityLevels.length; i++) {if (qualityLevels[i].id === newPlaylist.id) {selectedIndex = i;break;}}qualityLevels.selectedIndex_ = selectedIndex;qualityLevels.trigger({selectedIndex,type: 'change'});};/*** Adds quality levels to list once playlist metadata is available** @param {QualityLevelList} qualityLevels The QualityLevelList to attach events to.* @param {Object} vhs Vhs object to listen to for media events.* @function handleVhsLoadedMetadata*/const handleVhsLoadedMetadata = function (qualityLevels, vhs) {vhs.representations().forEach(rep => {qualityLevels.addQualityLevel(rep);});handleVhsMediaChange(qualityLevels, vhs.playlists);}; // VHS is a source handler, not a tech. Make sure attempts to use it// as one do not cause exceptions.Vhs.canPlaySource = function () {return videojs.log.warn('VHS is no longer a tech. Please remove it from ' + 'your player\'s techOrder.');};const emeKeySystems = (keySystemOptions, mainPlaylist, audioPlaylist) => {if (!keySystemOptions) {return keySystemOptions;}let codecs = {};if (mainPlaylist && mainPlaylist.attributes && mainPlaylist.attributes.CODECS) {codecs = unwrapCodecList(parseCodecs(mainPlaylist.attributes.CODECS));}if (audioPlaylist && audioPlaylist.attributes && audioPlaylist.attributes.CODECS) {codecs.audio = audioPlaylist.attributes.CODECS;}const videoContentType = getMimeForCodec(codecs.video);const audioContentType = getMimeForCodec(codecs.audio); // upsert the content types based on the selected playlistconst keySystemContentTypes = {};for (const keySystem in keySystemOptions) {keySystemContentTypes[keySystem] = {};if (audioContentType) {keySystemContentTypes[keySystem].audioContentType = audioContentType;}if (videoContentType) {keySystemContentTypes[keySystem].videoContentType = videoContentType;} // Default to using the video playlist's PSSH even though they may be different, as// videojs-contrib-eme will only accept one in the options.//// This shouldn't be an issue for most cases as early intialization will handle all// unique PSSH values, and if they aren't, then encrypted events should have the// specific information needed for the unique license.if (mainPlaylist.contentProtection && mainPlaylist.contentProtection[keySystem] && mainPlaylist.contentProtection[keySystem].pssh) {keySystemContentTypes[keySystem].pssh = mainPlaylist.contentProtection[keySystem].pssh;} // videojs-contrib-eme accepts the option of specifying: 'com.some.cdm': 'url'// so we need to prevent overwriting the URL entirelyif (typeof keySystemOptions[keySystem] === 'string') {keySystemContentTypes[keySystem].url = keySystemOptions[keySystem];}}return merge(keySystemOptions, keySystemContentTypes);};/*** @typedef {Object} KeySystems** keySystems configuration for https://github.com/videojs/videojs-contrib-eme* Note: not all options are listed here.** @property {Uint8Array} [pssh]* Protection System Specific Header*//*** Goes through all the playlists and collects an array of KeySystems options objects* containing each playlist's keySystems and their pssh values, if available.** @param {Object[]} playlists* The playlists to look through* @param {string[]} keySystems* The keySystems to collect pssh values for** @return {KeySystems[]}* An array of KeySystems objects containing available key systems and their* pssh values*/const getAllPsshKeySystemsOptions = (playlists, keySystems) => {return playlists.reduce((keySystemsArr, playlist) => {if (!playlist.contentProtection) {return keySystemsArr;}const keySystemsOptions = keySystems.reduce((keySystemsObj, keySystem) => {const keySystemOptions = playlist.contentProtection[keySystem];if (keySystemOptions && keySystemOptions.pssh) {keySystemsObj[keySystem] = {pssh: keySystemOptions.pssh};}return keySystemsObj;}, {});if (Object.keys(keySystemsOptions).length) {keySystemsArr.push(keySystemsOptions);}return keySystemsArr;}, []);};/*** Returns a promise that waits for the* [eme plugin](https://github.com/videojs/videojs-contrib-eme) to create a key session.** Works around https://bugs.chromium.org/p/chromium/issues/detail?id=895449 in non-IE11* browsers.** As per the above ticket, this is particularly important for Chrome, where, if* unencrypted content is appended before encrypted content and the key session has not* been created, a MEDIA_ERR_DECODE will be thrown once the encrypted content is reached* during playback.** @param {Object} player* The player instance* @param {Object[]} sourceKeySystems* The key systems options from the player source* @param {Object} [audioMedia]* The active audio media playlist (optional)* @param {Object[]} mainPlaylists* The playlists found on the main playlist object** @return {Object}* Promise that resolves when the key session has been created*/const waitForKeySessionCreation = ({player,sourceKeySystems,audioMedia,mainPlaylists}) => {if (!player.eme.initializeMediaKeys) {return Promise.resolve();} // TODO should all audio PSSH values be initialized for DRM?//// All unique video rendition pssh values are initialized for DRM, but here only// the initial audio playlist license is initialized. In theory, an encrypted// event should be fired if the user switches to an alternative audio playlist// where a license is required, but this case hasn't yet been tested. In addition, there// may be many alternate audio playlists unlikely to be used (e.g., multiple different// languages).const playlists = audioMedia ? mainPlaylists.concat([audioMedia]) : mainPlaylists;const keySystemsOptionsArr = getAllPsshKeySystemsOptions(playlists, Object.keys(sourceKeySystems));const initializationFinishedPromises = [];const keySessionCreatedPromises = []; // Since PSSH values are interpreted as initData, EME will dedupe any duplicates. The// only place where it should not be deduped is for ms-prefixed APIs, but// the existence of modern EME APIs in addition to// ms-prefixed APIs on Edge should prevent this from being a concern.// initializeMediaKeys also won't use the webkit-prefixed APIs.keySystemsOptionsArr.forEach(keySystemsOptions => {keySessionCreatedPromises.push(new Promise((resolve, reject) => {player.tech_.one('keysessioncreated', resolve);}));initializationFinishedPromises.push(new Promise((resolve, reject) => {player.eme.initializeMediaKeys({keySystems: keySystemsOptions}, err => {if (err) {reject(err);return;}resolve();});}));}); // The reasons Promise.race is chosen over Promise.any://// * Promise.any is only available in Safari 14+.// * None of these promises are expected to reject. If they do reject, it might be// better here for the race to surface the rejection, rather than mask it by using// Promise.any.return Promise.race([// If a session was previously created, these will all finish resolving without// creating a new session, otherwise it will take until the end of all license// requests, which is why the key session check is used (to make setup much faster).Promise.all(initializationFinishedPromises),// Once a single session is created, the browser knows DRM will be used.Promise.race(keySessionCreatedPromises)]);};/*** If the [eme](https://github.com/videojs/videojs-contrib-eme) plugin is available, and* there are keySystems on the source, sets up source options to prepare the source for* eme.** @param {Object} player* The player instance* @param {Object[]} sourceKeySystems* The key systems options from the player source* @param {Object} media* The active media playlist* @param {Object} [audioMedia]* The active audio media playlist (optional)** @return {boolean}* Whether or not options were configured and EME is available*/const setupEmeOptions = ({player,sourceKeySystems,media,audioMedia}) => {const sourceOptions = emeKeySystems(sourceKeySystems, media, audioMedia);if (!sourceOptions) {return false;}player.currentSource().keySystems = sourceOptions; // eme handles the rest of the setup, so if it is missing// do nothing.if (sourceOptions && !player.eme) {videojs.log.warn('DRM encrypted source cannot be decrypted without a DRM plugin');return false;}return true;};const getVhsLocalStorage = () => {if (!window.localStorage) {return null;}const storedObject = window.localStorage.getItem(LOCAL_STORAGE_KEY);if (!storedObject) {return null;}try {return JSON.parse(storedObject);} catch (e) {// someone may have tampered with the valuereturn null;}};const updateVhsLocalStorage = options => {if (!window.localStorage) {return false;}let objectToStore = getVhsLocalStorage();objectToStore = objectToStore ? merge(objectToStore, options) : options;try {window.localStorage.setItem(LOCAL_STORAGE_KEY, JSON.stringify(objectToStore));} catch (e) {// Throws if storage is full (e.g., always on iOS 5+ Safari private mode, where// storage is set to 0).// https://developer.mozilla.org/en-US/docs/Web/API/Storage/setItem#Exceptions// No need to perform any operation.return false;}return objectToStore;};/*** Parses VHS-supported media types from data URIs. See* https://developer.mozilla.org/en-US/docs/Web/HTTP/Basics_of_HTTP/Data_URIs* for information on data URIs.** @param {string} dataUri* The data URI** @return {string|Object}* The parsed object/string, or the original string if no supported media type* was found*/const expandDataUri = dataUri => {if (dataUri.toLowerCase().indexOf('data:application/vnd.videojs.vhs+json,') === 0) {return JSON.parse(dataUri.substring(dataUri.indexOf(',') + 1));} // no known case for this data URI, return the string as-isreturn dataUri;};/*** Adds a request hook to an xhr object** @param {Object} xhr object to add the onRequest hook to* @param {function} callback hook function for an xhr request*/const addOnRequestHook = (xhr, callback) => {if (!xhr._requestCallbackSet) {xhr._requestCallbackSet = new Set();}xhr._requestCallbackSet.add(callback);};/*** Adds a response hook to an xhr object** @param {Object} xhr object to add the onResponse hook to* @param {function} callback hook function for an xhr response*/const addOnResponseHook = (xhr, callback) => {if (!xhr._responseCallbackSet) {xhr._responseCallbackSet = new Set();}xhr._responseCallbackSet.add(callback);};/*** Removes a request hook on an xhr object, deletes the onRequest set if empty.** @param {Object} xhr object to remove the onRequest hook from* @param {function} callback hook function to remove*/const removeOnRequestHook = (xhr, callback) => {if (!xhr._requestCallbackSet) {return;}xhr._requestCallbackSet.delete(callback);if (!xhr._requestCallbackSet.size) {delete xhr._requestCallbackSet;}};/*** Removes a response hook on an xhr object, deletes the onResponse set if empty.** @param {Object} xhr object to remove the onResponse hook from* @param {function} callback hook function to remove*/const removeOnResponseHook = (xhr, callback) => {if (!xhr._responseCallbackSet) {return;}xhr._responseCallbackSet.delete(callback);if (!xhr._responseCallbackSet.size) {delete xhr._responseCallbackSet;}};/*** Whether the browser has built-in HLS support.*/Vhs.supportsNativeHls = function () {if (!document || !document.createElement) {return false;}const video = document.createElement('video'); // native HLS is definitely not supported if HTML5 video isn'tif (!videojs.getTech('Html5').isSupported()) {return false;} // HLS manifests can go by many mime-typesconst canPlay = [// Apple santioned'application/vnd.apple.mpegurl',// Apple sanctioned for backwards compatibility'audio/mpegurl',// Very common'audio/x-mpegurl',// Very common'application/x-mpegurl',// Included for completeness'video/x-mpegurl', 'video/mpegurl', 'application/mpegurl'];return canPlay.some(function (canItPlay) {return /maybe|probably/i.test(video.canPlayType(canItPlay));});}();Vhs.supportsNativeDash = function () {if (!document || !document.createElement || !videojs.getTech('Html5').isSupported()) {return false;}return /maybe|probably/i.test(document.createElement('video').canPlayType('application/dash+xml'));}();Vhs.supportsTypeNatively = type => {if (type === 'hls') {return Vhs.supportsNativeHls;}if (type === 'dash') {return Vhs.supportsNativeDash;}return false;};/*** VHS is a source handler, not a tech. Make sure attempts to use it* as one do not cause exceptions.*/Vhs.isSupported = function () {return videojs.log.warn('VHS is no longer a tech. Please remove it from ' + 'your player\'s techOrder.');};/*** A global function for setting an onRequest hook** @param {function} callback for request modifiction*/Vhs.xhr.onRequest = function (callback) {addOnRequestHook(Vhs.xhr, callback);};/*** A global function for setting an onResponse hook** @param {callback} callback for response data retrieval*/Vhs.xhr.onResponse = function (callback) {addOnResponseHook(Vhs.xhr, callback);};/*** Deletes a global onRequest callback if it exists** @param {function} callback to delete from the global set*/Vhs.xhr.offRequest = function (callback) {removeOnRequestHook(Vhs.xhr, callback);};/*** Deletes a global onResponse callback if it exists** @param {function} callback to delete from the global set*/Vhs.xhr.offResponse = function (callback) {removeOnResponseHook(Vhs.xhr, callback);};const Component = videojs.getComponent('Component');/*** The Vhs Handler object, where we orchestrate all of the parts* of VHS to interact with video.js** @class VhsHandler* @extends videojs.Component* @param {Object} source the soruce object* @param {Tech} tech the parent tech object* @param {Object} options optional and required options*/class VhsHandler extends Component {constructor(source, tech, options) {super(tech, options.vhs); // if a tech level `initialBandwidth` option was passed// use that over the VHS level `bandwidth` optionif (typeof options.initialBandwidth === 'number') {this.options_.bandwidth = options.initialBandwidth;}this.logger_ = logger('VhsHandler'); // we need access to the player in some cases,// so, get it from Video.js via the `playerId`if (tech.options_ && tech.options_.playerId) {const _player = videojs.getPlayer(tech.options_.playerId);this.player_ = _player;}this.tech_ = tech;this.source_ = source;this.stats = {};this.ignoreNextSeekingEvent_ = false;this.setOptions_();if (this.options_.overrideNative && tech.overrideNativeAudioTracks && tech.overrideNativeVideoTracks) {tech.overrideNativeAudioTracks(true);tech.overrideNativeVideoTracks(true);} else if (this.options_.overrideNative && (tech.featuresNativeVideoTracks || tech.featuresNativeAudioTracks)) {// overriding native VHS only works if audio tracks have been emulated// error early if we're misconfiguredthrow new Error('Overriding native VHS requires emulated tracks. ' + 'See https://git.io/vMpjB');} // listen for fullscreenchange events for this player so that we// can adjust our quality selection quicklythis.on(document, ['fullscreenchange', 'webkitfullscreenchange', 'mozfullscreenchange', 'MSFullscreenChange'], event => {const fullscreenElement = document.fullscreenElement || document.webkitFullscreenElement || document.mozFullScreenElement || document.msFullscreenElement;if (fullscreenElement && fullscreenElement.contains(this.tech_.el())) {this.playlistController_.fastQualityChange_();} else {// When leaving fullscreen, since the in page pixel dimensions should be smaller// than full screen, see if there should be a rendition switch down to preserve// bandwidth.this.playlistController_.checkABR_();}});this.on(this.tech_, 'seeking', function () {if (this.ignoreNextSeekingEvent_) {this.ignoreNextSeekingEvent_ = false;return;}this.setCurrentTime(this.tech_.currentTime());});this.on(this.tech_, 'error', function () {// verify that the error was real and we are loaded// enough to have pc loaded.if (this.tech_.error() && this.playlistController_) {this.playlistController_.pauseLoading();}});this.on(this.tech_, 'play', this.play);}/*** Set VHS options based on options from configuration, as well as partial* options to be passed at a later time.** @param {Object} options A partial chunk of config options*/setOptions_(options = {}) {this.options_ = merge(this.options_, options); // defaultsthis.options_.withCredentials = this.options_.withCredentials || false;this.options_.limitRenditionByPlayerDimensions = this.options_.limitRenditionByPlayerDimensions === false ? false : true;this.options_.useDevicePixelRatio = this.options_.useDevicePixelRatio || false;this.options_.useBandwidthFromLocalStorage = typeof this.source_.useBandwidthFromLocalStorage !== 'undefined' ? this.source_.useBandwidthFromLocalStorage : this.options_.useBandwidthFromLocalStorage || false;this.options_.useForcedSubtitles = this.options_.useForcedSubtitles || false;this.options_.useNetworkInformationApi = this.options_.useNetworkInformationApi || false;this.options_.useDtsForTimestampOffset = this.options_.useDtsForTimestampOffset || false;this.options_.customTagParsers = this.options_.customTagParsers || [];this.options_.customTagMappers = this.options_.customTagMappers || [];this.options_.cacheEncryptionKeys = this.options_.cacheEncryptionKeys || false;this.options_.llhls = this.options_.llhls === false ? false : true;this.options_.bufferBasedABR = this.options_.bufferBasedABR || false;if (typeof this.options_.playlistExclusionDuration !== 'number') {this.options_.playlistExclusionDuration = 60;}if (typeof this.options_.bandwidth !== 'number') {if (this.options_.useBandwidthFromLocalStorage) {const storedObject = getVhsLocalStorage();if (storedObject && storedObject.bandwidth) {this.options_.bandwidth = storedObject.bandwidth;this.tech_.trigger({type: 'usage',name: 'vhs-bandwidth-from-local-storage'});}if (storedObject && storedObject.throughput) {this.options_.throughput = storedObject.throughput;this.tech_.trigger({type: 'usage',name: 'vhs-throughput-from-local-storage'});}}} // if bandwidth was not set by options or pulled from local storage, start playlist// selection at a reasonable bandwidthif (typeof this.options_.bandwidth !== 'number') {this.options_.bandwidth = Config.INITIAL_BANDWIDTH;} // If the bandwidth number is unchanged from the initial setting// then this takes precedence over the enableLowInitialPlaylist optionthis.options_.enableLowInitialPlaylist = this.options_.enableLowInitialPlaylist && this.options_.bandwidth === Config.INITIAL_BANDWIDTH; // grab options passed to player.src['withCredentials', 'useDevicePixelRatio', 'limitRenditionByPlayerDimensions', 'bandwidth', 'customTagParsers', 'customTagMappers', 'cacheEncryptionKeys', 'playlistSelector', 'initialPlaylistSelector', 'bufferBasedABR', 'liveRangeSafeTimeDelta', 'llhls', 'useForcedSubtitles', 'useNetworkInformationApi', 'useDtsForTimestampOffset', 'exactManifestTimings', 'leastPixelDiffSelector'].forEach(option => {if (typeof this.source_[option] !== 'undefined') {this.options_[option] = this.source_[option];}});this.limitRenditionByPlayerDimensions = this.options_.limitRenditionByPlayerDimensions;this.useDevicePixelRatio = this.options_.useDevicePixelRatio;} // alias for public method to set optionssetOptions(options = {}) {this.setOptions_(options);}/*** called when player.src gets called, handle a new source** @param {Object} src the source object to handle*/src(src, type) {// do nothing if the src is falseyif (!src) {return;}this.setOptions_(); // add main playlist controller optionsthis.options_.src = expandDataUri(this.source_.src);this.options_.tech = this.tech_;this.options_.externVhs = Vhs;this.options_.sourceType = simpleTypeFromSourceType(type); // Whenever we seek internally, we should update the techthis.options_.seekTo = time => {this.tech_.setCurrentTime(time);};this.playlistController_ = new PlaylistController(this.options_);const playbackWatcherOptions = merge({liveRangeSafeTimeDelta: SAFE_TIME_DELTA}, this.options_, {seekable: () => this.seekable(),media: () => this.playlistController_.media(),playlistController: this.playlistController_});this.playbackWatcher_ = new PlaybackWatcher(playbackWatcherOptions);this.playlistController_.on('error', () => {const player = videojs.players[this.tech_.options_.playerId];let error = this.playlistController_.error;if (typeof error === 'object' && !error.code) {error.code = 3;} else if (typeof error === 'string') {error = {message: error,code: 3};}player.error(error);});const defaultSelector = this.options_.bufferBasedABR ? Vhs.movingAverageBandwidthSelector(0.55) : Vhs.STANDARD_PLAYLIST_SELECTOR; // `this` in selectPlaylist should be the VhsHandler for backwards// compatibility with < v2this.playlistController_.selectPlaylist = this.selectPlaylist ? this.selectPlaylist.bind(this) : defaultSelector.bind(this);this.playlistController_.selectInitialPlaylist = Vhs.INITIAL_PLAYLIST_SELECTOR.bind(this); // re-expose some internal objects for backwards compatibility with < v2this.playlists = this.playlistController_.mainPlaylistLoader_;this.mediaSource = this.playlistController_.mediaSource; // Proxy assignment of some properties to the main playlist// controller. Using a custom property for backwards compatibility// with < v2Object.defineProperties(this, {selectPlaylist: {get() {return this.playlistController_.selectPlaylist;},set(selectPlaylist) {this.playlistController_.selectPlaylist = selectPlaylist.bind(this);}},throughput: {get() {return this.playlistController_.mainSegmentLoader_.throughput.rate;},set(throughput) {this.playlistController_.mainSegmentLoader_.throughput.rate = throughput; // By setting `count` to 1 the throughput value becomes the starting value// for the cumulative averagethis.playlistController_.mainSegmentLoader_.throughput.count = 1;}},bandwidth: {get() {let playerBandwidthEst = this.playlistController_.mainSegmentLoader_.bandwidth;const networkInformation = window.navigator.connection || window.navigator.mozConnection || window.navigator.webkitConnection;const tenMbpsAsBitsPerSecond = 10e6;if (this.options_.useNetworkInformationApi && networkInformation) {// downlink returns Mbps// https://developer.mozilla.org/en-US/docs/Web/API/NetworkInformation/downlinkconst networkInfoBandwidthEstBitsPerSec = networkInformation.downlink * 1000 * 1000; // downlink maxes out at 10 Mbps. In the event that both networkInformationApi and the player// estimate a bandwidth greater than 10 Mbps, use the larger of the two estimates to ensure that// high quality streams are not filtered out.if (networkInfoBandwidthEstBitsPerSec >= tenMbpsAsBitsPerSecond && playerBandwidthEst >= tenMbpsAsBitsPerSecond) {playerBandwidthEst = Math.max(playerBandwidthEst, networkInfoBandwidthEstBitsPerSec);} else {playerBandwidthEst = networkInfoBandwidthEstBitsPerSec;}}return playerBandwidthEst;},set(bandwidth) {this.playlistController_.mainSegmentLoader_.bandwidth = bandwidth; // setting the bandwidth manually resets the throughput counter// `count` is set to zero that current value of `rate` isn't included// in the cumulative averagethis.playlistController_.mainSegmentLoader_.throughput = {rate: 0,count: 0};}},/*** `systemBandwidth` is a combination of two serial processes bit-rates. The first* is the network bitrate provided by `bandwidth` and the second is the bitrate of* the entire process after that - decryption, transmuxing, and appending - provided* by `throughput`.** Since the two process are serial, the overall system bandwidth is given by:* sysBandwidth = 1 / (1 / bandwidth + 1 / throughput)*/systemBandwidth: {get() {const invBandwidth = 1 / (this.bandwidth || 1);let invThroughput;if (this.throughput > 0) {invThroughput = 1 / this.throughput;} else {invThroughput = 0;}const systemBitrate = Math.floor(1 / (invBandwidth + invThroughput));return systemBitrate;},set() {videojs.log.error('The "systemBandwidth" property is read-only');}}});if (this.options_.bandwidth) {this.bandwidth = this.options_.bandwidth;}if (this.options_.throughput) {this.throughput = this.options_.throughput;}Object.defineProperties(this.stats, {bandwidth: {get: () => this.bandwidth || 0,enumerable: true},mediaRequests: {get: () => this.playlistController_.mediaRequests_() || 0,enumerable: true},mediaRequestsAborted: {get: () => this.playlistController_.mediaRequestsAborted_() || 0,enumerable: true},mediaRequestsTimedout: {get: () => this.playlistController_.mediaRequestsTimedout_() || 0,enumerable: true},mediaRequestsErrored: {get: () => this.playlistController_.mediaRequestsErrored_() || 0,enumerable: true},mediaTransferDuration: {get: () => this.playlistController_.mediaTransferDuration_() || 0,enumerable: true},mediaBytesTransferred: {get: () => this.playlistController_.mediaBytesTransferred_() || 0,enumerable: true},mediaSecondsLoaded: {get: () => this.playlistController_.mediaSecondsLoaded_() || 0,enumerable: true},mediaAppends: {get: () => this.playlistController_.mediaAppends_() || 0,enumerable: true},mainAppendsToLoadedData: {get: () => this.playlistController_.mainAppendsToLoadedData_() || 0,enumerable: true},audioAppendsToLoadedData: {get: () => this.playlistController_.audioAppendsToLoadedData_() || 0,enumerable: true},appendsToLoadedData: {get: () => this.playlistController_.appendsToLoadedData_() || 0,enumerable: true},timeToLoadedData: {get: () => this.playlistController_.timeToLoadedData_() || 0,enumerable: true},buffered: {get: () => timeRangesToArray(this.tech_.buffered()),enumerable: true},currentTime: {get: () => this.tech_.currentTime(),enumerable: true},currentSource: {get: () => this.tech_.currentSource_,enumerable: true},currentTech: {get: () => this.tech_.name_,enumerable: true},duration: {get: () => this.tech_.duration(),enumerable: true},main: {get: () => this.playlists.main,enumerable: true},playerDimensions: {get: () => this.tech_.currentDimensions(),enumerable: true},seekable: {get: () => timeRangesToArray(this.tech_.seekable()),enumerable: true},timestamp: {get: () => Date.now(),enumerable: true},videoPlaybackQuality: {get: () => this.tech_.getVideoPlaybackQuality(),enumerable: true}});this.tech_.one('canplay', this.playlistController_.setupFirstPlay.bind(this.playlistController_));this.tech_.on('bandwidthupdate', () => {if (this.options_.useBandwidthFromLocalStorage) {updateVhsLocalStorage({bandwidth: this.bandwidth,throughput: Math.round(this.throughput)});}});this.playlistController_.on('selectedinitialmedia', () => {// Add the manual rendition mix-in to VhsHandlerrenditionSelectionMixin(this);});this.playlistController_.sourceUpdater_.on('createdsourcebuffers', () => {this.setupEme_();}); // the bandwidth of the primary segment loader is our best// estimate of overall bandwidththis.on(this.playlistController_, 'progress', function () {this.tech_.trigger('progress');}); // In the live case, we need to ignore the very first `seeking` event since// that will be the result of the seek-to-live behaviorthis.on(this.playlistController_, 'firstplay', function () {this.ignoreNextSeekingEvent_ = true;});this.setupQualityLevels_(); // do nothing if the tech has been disposed already// this can occur if someone sets the src in player.ready(), for instanceif (!this.tech_.el()) {return;}this.mediaSourceUrl_ = window.URL.createObjectURL(this.playlistController_.mediaSource);this.tech_.src(this.mediaSourceUrl_);}createKeySessions_() {const audioPlaylistLoader = this.playlistController_.mediaTypes_.AUDIO.activePlaylistLoader;this.logger_('waiting for EME key session creation');waitForKeySessionCreation({player: this.player_,sourceKeySystems: this.source_.keySystems,audioMedia: audioPlaylistLoader && audioPlaylistLoader.media(),mainPlaylists: this.playlists.main.playlists}).then(() => {this.logger_('created EME key session');this.playlistController_.sourceUpdater_.initializedEme();}).catch(err => {this.logger_('error while creating EME key session', err);this.player_.error({message: 'Failed to initialize media keys for EME',code: 3});});}handleWaitingForKey_() {// If waitingforkey is fired, it's possible that the data that's necessary to retrieve// the key is in the manifest. While this should've happened on initial source load, it// may happen again in live streams where the keys change, and the manifest info// reflects the update.//// Because videojs-contrib-eme compares the PSSH data we send to that of PSSH data it's// already requested keys for, we don't have to worry about this generating extraneous// requests.this.logger_('waitingforkey fired, attempting to create any new key sessions');this.createKeySessions_();}/*** If necessary and EME is available, sets up EME options and waits for key session* creation.** This function also updates the source updater so taht it can be used, as for some* browsers, EME must be configured before content is appended (if appending unencrypted* content before encrypted content).*/setupEme_() {const audioPlaylistLoader = this.playlistController_.mediaTypes_.AUDIO.activePlaylistLoader;const didSetupEmeOptions = setupEmeOptions({player: this.player_,sourceKeySystems: this.source_.keySystems,media: this.playlists.media(),audioMedia: audioPlaylistLoader && audioPlaylistLoader.media()});this.player_.tech_.on('keystatuschange', e => {this.playlistController_.updatePlaylistByKeyStatus(e.keyId, e.status);});this.handleWaitingForKey_ = this.handleWaitingForKey_.bind(this);this.player_.tech_.on('waitingforkey', this.handleWaitingForKey_);if (!didSetupEmeOptions) {// If EME options were not set up, we've done all we could to initialize EME.this.playlistController_.sourceUpdater_.initializedEme();return;}this.createKeySessions_();}/*** Initializes the quality levels and sets listeners to update them.** @method setupQualityLevels_* @private*/setupQualityLevels_() {const player = videojs.players[this.tech_.options_.playerId]; // if there isn't a player or there isn't a qualityLevels plugin// or qualityLevels_ listeners have already been setup, do nothing.if (!player || !player.qualityLevels || this.qualityLevels_) {return;}this.qualityLevels_ = player.qualityLevels();this.playlistController_.on('selectedinitialmedia', () => {handleVhsLoadedMetadata(this.qualityLevels_, this);});this.playlists.on('mediachange', () => {handleVhsMediaChange(this.qualityLevels_, this.playlists);});}/*** return the version*/static version() {return {'@videojs/http-streaming': version$4,'mux.js': version$3,'mpd-parser': version$2,'m3u8-parser': version$1,'aes-decrypter': version};}/*** return the version*/version() {return this.constructor.version();}canChangeType() {return SourceUpdater.canChangeType();}/*** Begin playing the video.*/play() {this.playlistController_.play();}/*** a wrapper around the function in PlaylistController*/setCurrentTime(currentTime) {this.playlistController_.setCurrentTime(currentTime);}/*** a wrapper around the function in PlaylistController*/duration() {return this.playlistController_.duration();}/*** a wrapper around the function in PlaylistController*/seekable() {return this.playlistController_.seekable();}/*** Abort all outstanding work and cleanup.*/dispose() {if (this.playbackWatcher_) {this.playbackWatcher_.dispose();}if (this.playlistController_) {this.playlistController_.dispose();}if (this.qualityLevels_) {this.qualityLevels_.dispose();}if (this.tech_ && this.tech_.vhs) {delete this.tech_.vhs;}if (this.mediaSourceUrl_ && window.URL.revokeObjectURL) {window.URL.revokeObjectURL(this.mediaSourceUrl_);this.mediaSourceUrl_ = null;}if (this.tech_) {this.tech_.off('waitingforkey', this.handleWaitingForKey_);}super.dispose();}convertToProgramTime(time, callback) {return getProgramTime({playlist: this.playlistController_.media(),time,callback});} // the player must be playing before calling thisseekToProgramTime(programTime, callback, pauseAfterSeek = true, retryCount = 2) {return seekToProgramTime({programTime,playlist: this.playlistController_.media(),retryCount,pauseAfterSeek,seekTo: this.options_.seekTo,tech: this.options_.tech,callback});}/*** Adds the onRequest, onResponse, offRequest and offResponse functions* to the VhsHandler xhr Object.*/setupXhrHooks_() {/*** A player function for setting an onRequest hook** @param {function} callback for request modifiction*/this.xhr.onRequest = callback => {addOnRequestHook(this.xhr, callback);};/*** A player function for setting an onResponse hook** @param {callback} callback for response data retrieval*/this.xhr.onResponse = callback => {addOnResponseHook(this.xhr, callback);};/*** Deletes a player onRequest callback if it exists** @param {function} callback to delete from the player set*/this.xhr.offRequest = callback => {removeOnRequestHook(this.xhr, callback);};/*** Deletes a player onResponse callback if it exists** @param {function} callback to delete from the player set*/this.xhr.offResponse = callback => {removeOnResponseHook(this.xhr, callback);}; // Trigger an event on the player to notify the user that vhs is ready to set xhr hooks.// This allows hooks to be set before the source is set to vhs when handleSource is called.this.player_.trigger('xhr-hooks-ready');}}/*** The Source Handler object, which informs video.js what additional* MIME types are supported and sets up playback. It is registered* automatically to the appropriate tech based on the capabilities of* the browser it is running in. It is not necessary to use or modify* this object in normal usage.*/const VhsSourceHandler = {name: 'videojs-http-streaming',VERSION: version$4,canHandleSource(srcObj, options = {}) {const localOptions = merge(videojs.options, options);return VhsSourceHandler.canPlayType(srcObj.type, localOptions);},handleSource(source, tech, options = {}) {const localOptions = merge(videojs.options, options);tech.vhs = new VhsHandler(source, tech, localOptions);tech.vhs.xhr = xhrFactory();tech.vhs.setupXhrHooks_();tech.vhs.src(source.src, source.type);return tech.vhs;},canPlayType(type, options) {const simpleType = simpleTypeFromSourceType(type);if (!simpleType) {return '';}const overrideNative = VhsSourceHandler.getOverrideNative(options);const supportsTypeNatively = Vhs.supportsTypeNatively(simpleType);const canUseMsePlayback = !supportsTypeNatively || overrideNative;return canUseMsePlayback ? 'maybe' : '';},getOverrideNative(options = {}) {const {vhs = {}} = options;const defaultOverrideNative = !(videojs.browser.IS_ANY_SAFARI || videojs.browser.IS_IOS);const {overrideNative = defaultOverrideNative} = vhs;return overrideNative;}};/*** Check to see if the native MediaSource object exists and supports* an MP4 container with both H.264 video and AAC-LC audio.** @return {boolean} if native media sources are supported*/const supportsNativeMediaSources = () => {return browserSupportsCodec('avc1.4d400d,mp4a.40.2');}; // register source handlers with the appropriate techsif (supportsNativeMediaSources()) {videojs.getTech('Html5').registerSourceHandler(VhsSourceHandler, 0);}videojs.VhsHandler = VhsHandler;videojs.VhsSourceHandler = VhsSourceHandler;videojs.Vhs = Vhs;if (!videojs.use) {videojs.registerComponent('Vhs', Vhs);}videojs.options.vhs = videojs.options.vhs || {};if (!videojs.getPlugin || !videojs.getPlugin('reloadSourceOnError')) {videojs.registerPlugin('reloadSourceOnError', reloadSourceOnError);}return videojs;}));