html-media-source.js 14 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368
  1. /**
  2. * @file html-media-source.js
  3. */
  4. 'use strict';
  5. Object.defineProperty(exports, '__esModule', {
  6. value: true
  7. });
  8. var _createClass = (function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ('value' in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; })();
  9. var _get = function get(_x, _x2, _x3) { var _again = true; _function: while (_again) { var object = _x, property = _x2, receiver = _x3; _again = false; if (object === null) object = Function.prototype; var desc = Object.getOwnPropertyDescriptor(object, property); if (desc === undefined) { var parent = Object.getPrototypeOf(object); if (parent === null) { return undefined; } else { _x = parent; _x2 = property; _x3 = receiver; _again = true; desc = parent = undefined; continue _function; } } else if ('value' in desc) { return desc.value; } else { var getter = desc.get; if (getter === undefined) { return undefined; } return getter.call(receiver); } } };
  10. function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
  11. function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError('Cannot call a class as a function'); } }
  12. function _inherits(subClass, superClass) { if (typeof superClass !== 'function' && superClass !== null) { throw new TypeError('Super expression must either be null or a function, not ' + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; }
  13. var _globalWindow = require('global/window');
  14. var _globalWindow2 = _interopRequireDefault(_globalWindow);
  15. var _globalDocument = require('global/document');
  16. var _globalDocument2 = _interopRequireDefault(_globalDocument);
  17. var _videoJs = require('video.js');
  18. var _videoJs2 = _interopRequireDefault(_videoJs);
  19. var _virtualSourceBuffer = require('./virtual-source-buffer');
  20. var _virtualSourceBuffer2 = _interopRequireDefault(_virtualSourceBuffer);
  21. var _addTextTrackData = require('./add-text-track-data');
  22. var _codecUtils = require('./codec-utils');
  23. /**
  24. * Our MediaSource implementation in HTML, mimics native
  25. * MediaSource where/if possible.
  26. *
  27. * @link https://developer.mozilla.org/en-US/docs/Web/API/MediaSource
  28. * @class HtmlMediaSource
  29. * @extends videojs.EventTarget
  30. */
  31. var HtmlMediaSource = (function (_videojs$EventTarget) {
  32. _inherits(HtmlMediaSource, _videojs$EventTarget);
  33. function HtmlMediaSource() {
  34. var _this = this;
  35. _classCallCheck(this, HtmlMediaSource);
  36. _get(Object.getPrototypeOf(HtmlMediaSource.prototype), 'constructor', this).call(this);
  37. var property = undefined;
  38. this.nativeMediaSource_ = new _globalWindow2['default'].MediaSource();
  39. // delegate to the native MediaSource's methods by default
  40. for (property in this.nativeMediaSource_) {
  41. if (!(property in HtmlMediaSource.prototype) && typeof this.nativeMediaSource_[property] === 'function') {
  42. this[property] = this.nativeMediaSource_[property].bind(this.nativeMediaSource_);
  43. }
  44. }
  45. // emulate `duration` and `seekable` until seeking can be
  46. // handled uniformly for live streams
  47. // see https://github.com/w3c/media-source/issues/5
  48. this.duration_ = NaN;
  49. Object.defineProperty(this, 'duration', {
  50. get: function get() {
  51. if (this.duration_ === Infinity) {
  52. return this.duration_;
  53. }
  54. return this.nativeMediaSource_.duration;
  55. },
  56. set: function set(duration) {
  57. this.duration_ = duration;
  58. if (duration !== Infinity) {
  59. this.nativeMediaSource_.duration = duration;
  60. return;
  61. }
  62. }
  63. });
  64. Object.defineProperty(this, 'seekable', {
  65. get: function get() {
  66. if (this.duration_ === Infinity) {
  67. return _videoJs2['default'].createTimeRanges([[0, this.nativeMediaSource_.duration]]);
  68. }
  69. return this.nativeMediaSource_.seekable;
  70. }
  71. });
  72. Object.defineProperty(this, 'readyState', {
  73. get: function get() {
  74. return this.nativeMediaSource_.readyState;
  75. }
  76. });
  77. Object.defineProperty(this, 'activeSourceBuffers', {
  78. get: function get() {
  79. return this.activeSourceBuffers_;
  80. }
  81. });
  82. // the list of virtual and native SourceBuffers created by this
  83. // MediaSource
  84. this.sourceBuffers = [];
  85. this.activeSourceBuffers_ = [];
  86. /**
  87. * update the list of active source buffers based upon various
  88. * imformation from HLS and video.js
  89. *
  90. * @private
  91. */
  92. this.updateActiveSourceBuffers_ = function () {
  93. // Retain the reference but empty the array
  94. _this.activeSourceBuffers_.length = 0;
  95. // If there is only one source buffer, then it will always be active and audio will
  96. // be disabled based on the codec of the source buffer
  97. if (_this.sourceBuffers.length === 1) {
  98. var sourceBuffer = _this.sourceBuffers[0];
  99. sourceBuffer.appendAudioInitSegment_ = true;
  100. sourceBuffer.audioDisabled_ = !sourceBuffer.audioCodec_;
  101. _this.activeSourceBuffers_.push(sourceBuffer);
  102. return;
  103. }
  104. // There are 2 source buffers, a combined (possibly video only) source buffer and
  105. // and an audio only source buffer.
  106. // By default, the audio in the combined virtual source buffer is enabled
  107. // and the audio-only source buffer (if it exists) is disabled.
  108. var disableCombined = false;
  109. var disableAudioOnly = true;
  110. // TODO: maybe we can store the sourcebuffers on the track objects?
  111. // safari may do something like this
  112. for (var i = 0; i < _this.player_.audioTracks().length; i++) {
  113. var track = _this.player_.audioTracks()[i];
  114. if (track.enabled && track.kind !== 'main') {
  115. // The enabled track is an alternate audio track so disable the audio in
  116. // the combined source buffer and enable the audio-only source buffer.
  117. disableCombined = true;
  118. disableAudioOnly = false;
  119. break;
  120. }
  121. }
  122. _this.sourceBuffers.forEach(function (sourceBuffer) {
  123. /* eslinst-disable */
  124. // TODO once codecs are required, we can switch to using the codecs to determine
  125. // what stream is the video stream, rather than relying on videoTracks
  126. /* eslinst-enable */
  127. sourceBuffer.appendAudioInitSegment_ = true;
  128. if (sourceBuffer.videoCodec_ && sourceBuffer.audioCodec_) {
  129. // combined
  130. sourceBuffer.audioDisabled_ = disableCombined;
  131. } else if (sourceBuffer.videoCodec_ && !sourceBuffer.audioCodec_) {
  132. // If the "combined" source buffer is video only, then we do not want
  133. // disable the audio-only source buffer (this is mostly for demuxed
  134. // audio and video hls)
  135. sourceBuffer.audioDisabled_ = true;
  136. disableAudioOnly = false;
  137. } else if (!sourceBuffer.videoCodec_ && sourceBuffer.audioCodec_) {
  138. // audio only
  139. sourceBuffer.audioDisabled_ = disableAudioOnly;
  140. if (disableAudioOnly) {
  141. return;
  142. }
  143. }
  144. _this.activeSourceBuffers_.push(sourceBuffer);
  145. });
  146. };
  147. this.onPlayerMediachange_ = function () {
  148. _this.sourceBuffers.forEach(function (sourceBuffer) {
  149. sourceBuffer.appendAudioInitSegment_ = true;
  150. });
  151. };
  152. this.onHlsReset_ = function () {
  153. _this.sourceBuffers.forEach(function (sourceBuffer) {
  154. if (sourceBuffer.transmuxer_) {
  155. sourceBuffer.transmuxer_.postMessage({ action: 'resetCaptions' });
  156. }
  157. });
  158. };
  159. this.onHlsSegmentTimeMapping_ = function (event) {
  160. _this.sourceBuffers.forEach(function (buffer) {
  161. return buffer.timeMapping_ = event.mapping;
  162. });
  163. };
  164. // Re-emit MediaSource events on the polyfill
  165. ['sourceopen', 'sourceclose', 'sourceended'].forEach(function (eventName) {
  166. this.nativeMediaSource_.addEventListener(eventName, this.trigger.bind(this));
  167. }, this);
  168. // capture the associated player when the MediaSource is
  169. // successfully attached
  170. this.on('sourceopen', function (event) {
  171. // Get the player this MediaSource is attached to
  172. var video = _globalDocument2['default'].querySelector('[src="' + _this.url_ + '"]');
  173. if (!video) {
  174. return;
  175. }
  176. _this.player_ = (0, _videoJs2['default'])(video.parentNode);
  177. // hls-reset is fired by videojs.Hls on to the tech after the main SegmentLoader
  178. // resets its state and flushes the buffer
  179. _this.player_.tech_.on('hls-reset', _this.onHlsReset_);
  180. // hls-segment-time-mapping is fired by videojs.Hls on to the tech after the main
  181. // SegmentLoader inspects an MTS segment and has an accurate stream to display
  182. // time mapping
  183. _this.player_.tech_.on('hls-segment-time-mapping', _this.onHlsSegmentTimeMapping_);
  184. if (_this.player_.audioTracks && _this.player_.audioTracks()) {
  185. _this.player_.audioTracks().on('change', _this.updateActiveSourceBuffers_);
  186. _this.player_.audioTracks().on('addtrack', _this.updateActiveSourceBuffers_);
  187. _this.player_.audioTracks().on('removetrack', _this.updateActiveSourceBuffers_);
  188. }
  189. _this.player_.on('mediachange', _this.onPlayerMediachange_);
  190. });
  191. this.on('sourceended', function (event) {
  192. var duration = (0, _addTextTrackData.durationOfVideo)(_this.duration);
  193. for (var i = 0; i < _this.sourceBuffers.length; i++) {
  194. var sourcebuffer = _this.sourceBuffers[i];
  195. var cues = sourcebuffer.metadataTrack_ && sourcebuffer.metadataTrack_.cues;
  196. if (cues && cues.length) {
  197. cues[cues.length - 1].endTime = duration;
  198. }
  199. }
  200. });
  201. // explicitly terminate any WebWorkers that were created
  202. // by SourceHandlers
  203. this.on('sourceclose', function (event) {
  204. this.sourceBuffers.forEach(function (sourceBuffer) {
  205. if (sourceBuffer.transmuxer_) {
  206. sourceBuffer.transmuxer_.terminate();
  207. }
  208. });
  209. this.sourceBuffers.length = 0;
  210. if (!this.player_) {
  211. return;
  212. }
  213. if (this.player_.audioTracks && this.player_.audioTracks()) {
  214. this.player_.audioTracks().off('change', this.updateActiveSourceBuffers_);
  215. this.player_.audioTracks().off('addtrack', this.updateActiveSourceBuffers_);
  216. this.player_.audioTracks().off('removetrack', this.updateActiveSourceBuffers_);
  217. }
  218. // We can only change this if the player hasn't been disposed of yet
  219. // because `off` eventually tries to use the el_ property. If it has
  220. // been disposed of, then don't worry about it because there are no
  221. // event handlers left to unbind anyway
  222. if (this.player_.el_) {
  223. this.player_.off('mediachange', this.onPlayerMediachange_);
  224. this.player_.tech_.off('hls-reset', this.onHlsReset_);
  225. this.player_.tech_.off('hls-segment-time-mapping', this.onHlsSegmentTimeMapping_);
  226. }
  227. });
  228. }
  229. /**
  230. * Add a range that that can now be seeked to.
  231. *
  232. * @param {Double} start where to start the addition
  233. * @param {Double} end where to end the addition
  234. * @private
  235. */
  236. _createClass(HtmlMediaSource, [{
  237. key: 'addSeekableRange_',
  238. value: function addSeekableRange_(start, end) {
  239. var error = undefined;
  240. if (this.duration !== Infinity) {
  241. error = new Error('MediaSource.addSeekableRange() can only be invoked ' + 'when the duration is Infinity');
  242. error.name = 'InvalidStateError';
  243. error.code = 11;
  244. throw error;
  245. }
  246. if (end > this.nativeMediaSource_.duration || isNaN(this.nativeMediaSource_.duration)) {
  247. this.nativeMediaSource_.duration = end;
  248. }
  249. }
  250. /**
  251. * Add a source buffer to the media source.
  252. *
  253. * @link https://developer.mozilla.org/en-US/docs/Web/API/MediaSource/addSourceBuffer
  254. * @param {String} type the content-type of the content
  255. * @return {Object} the created source buffer
  256. */
  257. }, {
  258. key: 'addSourceBuffer',
  259. value: function addSourceBuffer(type) {
  260. var buffer = undefined;
  261. var parsedType = (0, _codecUtils.parseContentType)(type);
  262. // Create a VirtualSourceBuffer to transmux MPEG-2 transport
  263. // stream segments into fragmented MP4s
  264. if (/^(video|audio)\/mp2t$/i.test(parsedType.type)) {
  265. var codecs = [];
  266. if (parsedType.parameters && parsedType.parameters.codecs) {
  267. codecs = parsedType.parameters.codecs.split(',');
  268. codecs = (0, _codecUtils.translateLegacyCodecs)(codecs);
  269. codecs = codecs.filter(function (codec) {
  270. return (0, _codecUtils.isAudioCodec)(codec) || (0, _codecUtils.isVideoCodec)(codec);
  271. });
  272. }
  273. if (codecs.length === 0) {
  274. codecs = ['avc1.4d400d', 'mp4a.40.2'];
  275. }
  276. buffer = new _virtualSourceBuffer2['default'](this, codecs);
  277. if (this.sourceBuffers.length !== 0) {
  278. // If another VirtualSourceBuffer already exists, then we are creating a
  279. // SourceBuffer for an alternate audio track and therefore we know that
  280. // the source has both an audio and video track.
  281. // That means we should trigger the manual creation of the real
  282. // SourceBuffers instead of waiting for the transmuxer to return data
  283. this.sourceBuffers[0].createRealSourceBuffers_();
  284. buffer.createRealSourceBuffers_();
  285. // Automatically disable the audio on the first source buffer if
  286. // a second source buffer is ever created
  287. this.sourceBuffers[0].audioDisabled_ = true;
  288. }
  289. } else {
  290. // delegate to the native implementation
  291. buffer = this.nativeMediaSource_.addSourceBuffer(type);
  292. }
  293. this.sourceBuffers.push(buffer);
  294. return buffer;
  295. }
  296. }]);
  297. return HtmlMediaSource;
  298. })(_videoJs2['default'].EventTarget);
  299. exports['default'] = HtmlMediaSource;
  300. module.exports = exports['default'];