| 123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382 | <!DOCTYPE html><html lang="en"><head>    <meta charset="utf-8">    <title>JSDoc: Source: html-media-source.js</title>    <script src="scripts/prettify/prettify.js"> </script>    <script src="scripts/prettify/lang-css.js"> </script>    <!--[if lt IE 9]>      <script src="//html5shiv.googlecode.com/svn/trunk/html5.js"></script>    <![endif]-->    <link type="text/css" rel="stylesheet" href="styles/prettify-tomorrow.css">    <link type="text/css" rel="stylesheet" href="styles/jsdoc-default.css"></head><body><div id="main">    <h1 class="page-title">Source: html-media-source.js</h1>            <section>        <article>            <pre class="prettyprint source linenums"><code>/** * @file html-media-source.js */import window from 'global/window';import document from 'global/document';import videojs from 'video.js';import VirtualSourceBuffer from './virtual-source-buffer';import {durationOfVideo} from './add-text-track-data';import {  isAudioCodec,  isVideoCodec,  parseContentType,  translateLegacyCodecs} from './codec-utils';/** * Our MediaSource implementation in HTML, mimics native * MediaSource where/if possible. * * @link https://developer.mozilla.org/en-US/docs/Web/API/MediaSource * @class HtmlMediaSource * @extends videojs.EventTarget */export default class HtmlMediaSource extends videojs.EventTarget {  constructor() {    super();    let property;    this.nativeMediaSource_ = new window.MediaSource();    // delegate to the native MediaSource's methods by default    for (property in this.nativeMediaSource_) {      if (!(property in HtmlMediaSource.prototype) &&          typeof this.nativeMediaSource_[property] === 'function') {        this[property] = this.nativeMediaSource_[property].bind(this.nativeMediaSource_);      }    }    // emulate `duration` and `seekable` until seeking can be    // handled uniformly for live streams    // see https://github.com/w3c/media-source/issues/5    this.duration_ = NaN;    Object.defineProperty(this, 'duration', {      get() {        if (this.duration_ === Infinity) {          return this.duration_;        }        return this.nativeMediaSource_.duration;      },      set(duration) {        this.duration_ = duration;        if (duration !== Infinity) {          this.nativeMediaSource_.duration = duration;          return;        }      }    });    Object.defineProperty(this, 'seekable', {      get() {        if (this.duration_ === Infinity) {          return videojs.createTimeRanges([[0, this.nativeMediaSource_.duration]]);        }        return this.nativeMediaSource_.seekable;      }    });    Object.defineProperty(this, 'readyState', {      get() {        return this.nativeMediaSource_.readyState;      }    });    Object.defineProperty(this, 'activeSourceBuffers', {      get() {        return this.activeSourceBuffers_;      }    });    // the list of virtual and native SourceBuffers created by this    // MediaSource    this.sourceBuffers = [];    this.activeSourceBuffers_ = [];    /**     * update the list of active source buffers based upon various     * imformation from HLS and video.js     *     * @private     */    this.updateActiveSourceBuffers_ = () => {      // Retain the reference but empty the array      this.activeSourceBuffers_.length = 0;      // If there is only one source buffer, then it will always be active and audio will      // be disabled based on the codec of the source buffer      if (this.sourceBuffers.length === 1) {        let sourceBuffer = this.sourceBuffers[0];        sourceBuffer.appendAudioInitSegment_ = true;        sourceBuffer.audioDisabled_ = !sourceBuffer.audioCodec_;        this.activeSourceBuffers_.push(sourceBuffer);        return;      }      // There are 2 source buffers, a combined (possibly video only) source buffer and      // and an audio only source buffer.      // By default, the audio in the combined virtual source buffer is enabled      // and the audio-only source buffer (if it exists) is disabled.      let disableCombined = false;      let disableAudioOnly = true;      // TODO: maybe we can store the sourcebuffers on the track objects?      // safari may do something like this      for (let i = 0; i < this.player_.audioTracks().length; i++) {        let track = this.player_.audioTracks()[i];        if (track.enabled && track.kind !== 'main') {          // The enabled track is an alternate audio track so disable the audio in          // the combined source buffer and enable the audio-only source buffer.          disableCombined = true;          disableAudioOnly = false;          break;        }      }      this.sourceBuffers.forEach((sourceBuffer) => {        /* eslinst-disable */        // TODO once codecs are required, we can switch to using the codecs to determine        //      what stream is the video stream, rather than relying on videoTracks        /* eslinst-enable */        sourceBuffer.appendAudioInitSegment_ = true;        if (sourceBuffer.videoCodec_ && sourceBuffer.audioCodec_) {          // combined          sourceBuffer.audioDisabled_ = disableCombined;        } else if (sourceBuffer.videoCodec_ && !sourceBuffer.audioCodec_) {          // If the "combined" source buffer is video only, then we do not want          // disable the audio-only source buffer (this is mostly for demuxed          // audio and video hls)          sourceBuffer.audioDisabled_ = true;          disableAudioOnly = false;        } else if (!sourceBuffer.videoCodec_ && sourceBuffer.audioCodec_) {          // audio only          sourceBuffer.audioDisabled_ = disableAudioOnly;          if (disableAudioOnly) {            return;          }        }        this.activeSourceBuffers_.push(sourceBuffer);      });    };    this.onPlayerMediachange_ = () => {      this.sourceBuffers.forEach((sourceBuffer) => {        sourceBuffer.appendAudioInitSegment_ = true;      });    };    this.onHlsReset_ = () => {      this.sourceBuffers.forEach((sourceBuffer) => {        if (sourceBuffer.transmuxer_) {          sourceBuffer.transmuxer_.postMessage({action: 'resetCaptions'});        }      });    };    this.onHlsSegmentTimeMapping_ = (event) => {      this.sourceBuffers.forEach(buffer => buffer.timeMapping_ = event.mapping);    };    // Re-emit MediaSource events on the polyfill    [      'sourceopen',      'sourceclose',      'sourceended'    ].forEach(function(eventName) {      this.nativeMediaSource_.addEventListener(eventName, this.trigger.bind(this));    }, this);    // capture the associated player when the MediaSource is    // successfully attached    this.on('sourceopen', (event) => {      // Get the player this MediaSource is attached to      let video = document.querySelector('[src="' + this.url_ + '"]');      if (!video) {        return;      }      this.player_ = videojs(video.parentNode);      // hls-reset is fired by videojs.Hls on to the tech after the main SegmentLoader      // resets its state and flushes the buffer      this.player_.tech_.on('hls-reset', this.onHlsReset_);      // hls-segment-time-mapping is fired by videojs.Hls on to the tech after the main      // SegmentLoader inspects an MTS segment and has an accurate stream to display      // time mapping      this.player_.tech_.on('hls-segment-time-mapping', this.onHlsSegmentTimeMapping_);      if (this.player_.audioTracks && this.player_.audioTracks()) {        this.player_.audioTracks().on('change', this.updateActiveSourceBuffers_);        this.player_.audioTracks().on('addtrack', this.updateActiveSourceBuffers_);        this.player_.audioTracks().on('removetrack', this.updateActiveSourceBuffers_);      }      this.player_.on('mediachange', this.onPlayerMediachange_);    });    this.on('sourceended', (event) => {      let duration = durationOfVideo(this.duration);      for (let i = 0; i < this.sourceBuffers.length; i++) {        let sourcebuffer = this.sourceBuffers[i];        let cues = sourcebuffer.metadataTrack_ && sourcebuffer.metadataTrack_.cues;        if (cues && cues.length) {          cues[cues.length - 1].endTime = duration;        }      }    });    // explicitly terminate any WebWorkers that were created    // by SourceHandlers    this.on('sourceclose', function(event) {      this.sourceBuffers.forEach(function(sourceBuffer) {        if (sourceBuffer.transmuxer_) {          sourceBuffer.transmuxer_.terminate();        }      });      this.sourceBuffers.length = 0;      if (!this.player_) {        return;      }      if (this.player_.audioTracks && this.player_.audioTracks()) {        this.player_.audioTracks().off('change', this.updateActiveSourceBuffers_);        this.player_.audioTracks().off('addtrack', this.updateActiveSourceBuffers_);        this.player_.audioTracks().off('removetrack', this.updateActiveSourceBuffers_);      }      // We can only change this if the player hasn't been disposed of yet      // because `off` eventually tries to use the el_ property. If it has      // been disposed of, then don't worry about it because there are no      // event handlers left to unbind anyway      if (this.player_.el_) {        this.player_.off('mediachange', this.onPlayerMediachange_);        this.player_.tech_.off('hls-reset', this.onHlsReset_);        this.player_.tech_.off('hls-segment-time-mapping', this.onHlsSegmentTimeMapping_);      }    });  }  /**   * Add a range that that can now be seeked to.   *   * @param {Double} start where to start the addition   * @param {Double} end where to end the addition   * @private   */  addSeekableRange_(start, end) {    let error;    if (this.duration !== Infinity) {      error = new Error('MediaSource.addSeekableRange() can only be invoked ' +                        'when the duration is Infinity');      error.name = 'InvalidStateError';      error.code = 11;      throw error;    }    if (end > this.nativeMediaSource_.duration ||        isNaN(this.nativeMediaSource_.duration)) {      this.nativeMediaSource_.duration = end;    }  }  /**   * Add a source buffer to the media source.   *   * @link https://developer.mozilla.org/en-US/docs/Web/API/MediaSource/addSourceBuffer   * @param {String} type the content-type of the content   * @return {Object} the created source buffer   */  addSourceBuffer(type) {    let buffer;    let parsedType = parseContentType(type);    // Create a VirtualSourceBuffer to transmux MPEG-2 transport    // stream segments into fragmented MP4s    if ((/^(video|audio)\/mp2t$/i).test(parsedType.type)) {      let codecs = [];      if (parsedType.parameters && parsedType.parameters.codecs) {        codecs = parsedType.parameters.codecs.split(',');        codecs = translateLegacyCodecs(codecs);        codecs = codecs.filter((codec) => {          return (isAudioCodec(codec) || isVideoCodec(codec));        });      }      if (codecs.length === 0) {        codecs = ['avc1.4d400d', 'mp4a.40.2'];      }      buffer = new VirtualSourceBuffer(this, codecs);      if (this.sourceBuffers.length !== 0) {        // If another VirtualSourceBuffer already exists, then we are creating a        // SourceBuffer for an alternate audio track and therefore we know that        // the source has both an audio and video track.        // That means we should trigger the manual creation of the real        // SourceBuffers instead of waiting for the transmuxer to return data        this.sourceBuffers[0].createRealSourceBuffers_();        buffer.createRealSourceBuffers_();        // Automatically disable the audio on the first source buffer if        // a second source buffer is ever created        this.sourceBuffers[0].audioDisabled_ = true;      }    } else {      // delegate to the native implementation      buffer = this.nativeMediaSource_.addSourceBuffer(type);    }    this.sourceBuffers.push(buffer);    return buffer;  }}</code></pre>        </article>    </section></div><nav>    <h2><a href="index.html">Home</a></h2><h3>Classes</h3><ul><li><a href="FlashMediaSource.html">FlashMediaSource</a></li><li><a href="FlashSourceBuffer.html">FlashSourceBuffer</a></li><li><a href="HtmlMediaSource.html">HtmlMediaSource</a></li><li><a href="MessageHandlers.html">MessageHandlers</a></li><li><a href="VirtualSourceBuffer.html">VirtualSourceBuffer</a></li></ul><h3>Global</h3><ul><li><a href="global.html#abort">abort</a></li><li><a href="global.html#addSourceBuffer">addSourceBuffer</a></li><li><a href="global.html#appendBuffer">appendBuffer</a></li><li><a href="global.html#appendGopInfo_">appendGopInfo_</a></li><li><a href="global.html#endOfStream">endOfStream</a></li><li><a href="global.html#FlashTransmuxerWorker">FlashTransmuxerWorker</a></li><li><a href="global.html#get">get</a></li><li><a href="global.html#gopsSafeToAlignWith">gopsSafeToAlignWith</a></li><li><a href="global.html#MediaSource">MediaSource</a></li><li><a href="global.html#open">open</a></li><li><a href="global.html#remove">remove</a></li><li><a href="global.html#removeGopBuffer">removeGopBuffer</a></li><li><a href="global.html#set">set</a></li><li><a href="global.html#supportsNativeMediaSources">supportsNativeMediaSources</a></li><li><a href="global.html#TransmuxerWorker">TransmuxerWorker</a></li><li><a href="global.html#updateGopBuffer">updateGopBuffer</a></li><li><a href="global.html#URL">URL</a></li></ul></nav><br class="clear"><footer>    Documentation generated by <a href="https://github.com/jsdoc3/jsdoc">JSDoc 3.5.4</a> on Thu Nov 02 2017 12:03:25 GMT-0400 (EDT)</footer><script> prettyPrint(); </script><script src="scripts/linenumber.js"> </script></body></html>
 |