transmuxer.js 11 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324
  1. "use strict";
  2. var Stream = require('../utils/stream.js');
  3. var m2ts = require('../m2ts/m2ts.js');
  4. var codecs = require('../codecs/index.js');
  5. var AudioSegmentStream = require('./audio-segment-stream.js');
  6. var VideoSegmentStream = require('./video-segment-stream.js');
  7. var trackInfo = require('../mp4/track-decode-info.js');
  8. var isLikelyAacData = require('../aac/utils').isLikelyAacData;
  9. var AdtsStream = require('../codecs/adts');
  10. var AacStream = require('../aac/index');
  11. var clock = require('../utils/clock');
  12. var createPipeline = function createPipeline(object) {
  13. object.prototype = new Stream();
  14. object.prototype.init.call(object);
  15. return object;
  16. };
  17. var tsPipeline = function tsPipeline(options) {
  18. var pipeline = {
  19. type: 'ts',
  20. tracks: {
  21. audio: null,
  22. video: null
  23. },
  24. packet: new m2ts.TransportPacketStream(),
  25. parse: new m2ts.TransportParseStream(),
  26. elementary: new m2ts.ElementaryStream(),
  27. timestampRollover: new m2ts.TimestampRolloverStream(),
  28. adts: new codecs.Adts(),
  29. h264: new codecs.h264.H264Stream(),
  30. captionStream: new m2ts.CaptionStream(options),
  31. metadataStream: new m2ts.MetadataStream()
  32. };
  33. pipeline.headOfPipeline = pipeline.packet; // Transport Stream
  34. pipeline.packet.pipe(pipeline.parse).pipe(pipeline.elementary).pipe(pipeline.timestampRollover); // H264
  35. pipeline.timestampRollover.pipe(pipeline.h264); // Hook up CEA-608/708 caption stream
  36. pipeline.h264.pipe(pipeline.captionStream);
  37. pipeline.timestampRollover.pipe(pipeline.metadataStream); // ADTS
  38. pipeline.timestampRollover.pipe(pipeline.adts);
  39. pipeline.elementary.on('data', function (data) {
  40. if (data.type !== 'metadata') {
  41. return;
  42. }
  43. for (var i = 0; i < data.tracks.length; i++) {
  44. if (!pipeline.tracks[data.tracks[i].type]) {
  45. pipeline.tracks[data.tracks[i].type] = data.tracks[i];
  46. pipeline.tracks[data.tracks[i].type].timelineStartInfo.baseMediaDecodeTime = options.baseMediaDecodeTime;
  47. }
  48. }
  49. if (pipeline.tracks.video && !pipeline.videoSegmentStream) {
  50. pipeline.videoSegmentStream = new VideoSegmentStream(pipeline.tracks.video, options);
  51. pipeline.videoSegmentStream.on('timelineStartInfo', function (timelineStartInfo) {
  52. if (pipeline.tracks.audio && !options.keepOriginalTimestamps) {
  53. pipeline.audioSegmentStream.setEarliestDts(timelineStartInfo.dts - options.baseMediaDecodeTime);
  54. }
  55. });
  56. pipeline.videoSegmentStream.on('timingInfo', pipeline.trigger.bind(pipeline, 'videoTimingInfo'));
  57. pipeline.videoSegmentStream.on('data', function (data) {
  58. pipeline.trigger('data', {
  59. type: 'video',
  60. data: data
  61. });
  62. });
  63. pipeline.videoSegmentStream.on('done', pipeline.trigger.bind(pipeline, 'done'));
  64. pipeline.videoSegmentStream.on('partialdone', pipeline.trigger.bind(pipeline, 'partialdone'));
  65. pipeline.videoSegmentStream.on('endedtimeline', pipeline.trigger.bind(pipeline, 'endedtimeline'));
  66. pipeline.h264.pipe(pipeline.videoSegmentStream);
  67. }
  68. if (pipeline.tracks.audio && !pipeline.audioSegmentStream) {
  69. pipeline.audioSegmentStream = new AudioSegmentStream(pipeline.tracks.audio, options);
  70. pipeline.audioSegmentStream.on('data', function (data) {
  71. pipeline.trigger('data', {
  72. type: 'audio',
  73. data: data
  74. });
  75. });
  76. pipeline.audioSegmentStream.on('done', pipeline.trigger.bind(pipeline, 'done'));
  77. pipeline.audioSegmentStream.on('partialdone', pipeline.trigger.bind(pipeline, 'partialdone'));
  78. pipeline.audioSegmentStream.on('endedtimeline', pipeline.trigger.bind(pipeline, 'endedtimeline'));
  79. pipeline.audioSegmentStream.on('timingInfo', pipeline.trigger.bind(pipeline, 'audioTimingInfo'));
  80. pipeline.adts.pipe(pipeline.audioSegmentStream);
  81. } // emit pmt info
  82. pipeline.trigger('trackinfo', {
  83. hasAudio: !!pipeline.tracks.audio,
  84. hasVideo: !!pipeline.tracks.video
  85. });
  86. });
  87. pipeline.captionStream.on('data', function (caption) {
  88. var timelineStartPts;
  89. if (pipeline.tracks.video) {
  90. timelineStartPts = pipeline.tracks.video.timelineStartInfo.pts || 0;
  91. } else {
  92. // This will only happen if we encounter caption packets before
  93. // video data in a segment. This is an unusual/unlikely scenario,
  94. // so we assume the timeline starts at zero for now.
  95. timelineStartPts = 0;
  96. } // Translate caption PTS times into second offsets into the
  97. // video timeline for the segment
  98. caption.startTime = clock.metadataTsToSeconds(caption.startPts, timelineStartPts, options.keepOriginalTimestamps);
  99. caption.endTime = clock.metadataTsToSeconds(caption.endPts, timelineStartPts, options.keepOriginalTimestamps);
  100. pipeline.trigger('caption', caption);
  101. });
  102. pipeline = createPipeline(pipeline);
  103. pipeline.metadataStream.on('data', pipeline.trigger.bind(pipeline, 'id3Frame'));
  104. return pipeline;
  105. };
  106. var aacPipeline = function aacPipeline(options) {
  107. var pipeline = {
  108. type: 'aac',
  109. tracks: {
  110. audio: null
  111. },
  112. metadataStream: new m2ts.MetadataStream(),
  113. aacStream: new AacStream(),
  114. audioRollover: new m2ts.TimestampRolloverStream('audio'),
  115. timedMetadataRollover: new m2ts.TimestampRolloverStream('timed-metadata'),
  116. adtsStream: new AdtsStream(true)
  117. }; // set up the parsing pipeline
  118. pipeline.headOfPipeline = pipeline.aacStream;
  119. pipeline.aacStream.pipe(pipeline.audioRollover).pipe(pipeline.adtsStream);
  120. pipeline.aacStream.pipe(pipeline.timedMetadataRollover).pipe(pipeline.metadataStream);
  121. pipeline.metadataStream.on('timestamp', function (frame) {
  122. pipeline.aacStream.setTimestamp(frame.timeStamp);
  123. });
  124. pipeline.aacStream.on('data', function (data) {
  125. if (data.type !== 'timed-metadata' && data.type !== 'audio' || pipeline.audioSegmentStream) {
  126. return;
  127. }
  128. pipeline.tracks.audio = pipeline.tracks.audio || {
  129. timelineStartInfo: {
  130. baseMediaDecodeTime: options.baseMediaDecodeTime
  131. },
  132. codec: 'adts',
  133. type: 'audio'
  134. }; // hook up the audio segment stream to the first track with aac data
  135. pipeline.audioSegmentStream = new AudioSegmentStream(pipeline.tracks.audio, options);
  136. pipeline.audioSegmentStream.on('data', function (data) {
  137. pipeline.trigger('data', {
  138. type: 'audio',
  139. data: data
  140. });
  141. });
  142. pipeline.audioSegmentStream.on('partialdone', pipeline.trigger.bind(pipeline, 'partialdone'));
  143. pipeline.audioSegmentStream.on('done', pipeline.trigger.bind(pipeline, 'done'));
  144. pipeline.audioSegmentStream.on('endedtimeline', pipeline.trigger.bind(pipeline, 'endedtimeline'));
  145. pipeline.audioSegmentStream.on('timingInfo', pipeline.trigger.bind(pipeline, 'audioTimingInfo')); // Set up the final part of the audio pipeline
  146. pipeline.adtsStream.pipe(pipeline.audioSegmentStream);
  147. pipeline.trigger('trackinfo', {
  148. hasAudio: !!pipeline.tracks.audio,
  149. hasVideo: !!pipeline.tracks.video
  150. });
  151. }); // set the pipeline up as a stream before binding to get access to the trigger function
  152. pipeline = createPipeline(pipeline);
  153. pipeline.metadataStream.on('data', pipeline.trigger.bind(pipeline, 'id3Frame'));
  154. return pipeline;
  155. };
  156. var setupPipelineListeners = function setupPipelineListeners(pipeline, transmuxer) {
  157. pipeline.on('data', transmuxer.trigger.bind(transmuxer, 'data'));
  158. pipeline.on('done', transmuxer.trigger.bind(transmuxer, 'done'));
  159. pipeline.on('partialdone', transmuxer.trigger.bind(transmuxer, 'partialdone'));
  160. pipeline.on('endedtimeline', transmuxer.trigger.bind(transmuxer, 'endedtimeline'));
  161. pipeline.on('audioTimingInfo', transmuxer.trigger.bind(transmuxer, 'audioTimingInfo'));
  162. pipeline.on('videoTimingInfo', transmuxer.trigger.bind(transmuxer, 'videoTimingInfo'));
  163. pipeline.on('trackinfo', transmuxer.trigger.bind(transmuxer, 'trackinfo'));
  164. pipeline.on('id3Frame', function (event) {
  165. // add this to every single emitted segment even though it's only needed for the first
  166. event.dispatchType = pipeline.metadataStream.dispatchType; // keep original time, can be adjusted if needed at a higher level
  167. event.cueTime = clock.videoTsToSeconds(event.pts);
  168. transmuxer.trigger('id3Frame', event);
  169. });
  170. pipeline.on('caption', function (event) {
  171. transmuxer.trigger('caption', event);
  172. });
  173. };
  174. var Transmuxer = function Transmuxer(options) {
  175. var pipeline = null,
  176. hasFlushed = true;
  177. options = options || {};
  178. Transmuxer.prototype.init.call(this);
  179. options.baseMediaDecodeTime = options.baseMediaDecodeTime || 0;
  180. this.push = function (bytes) {
  181. if (hasFlushed) {
  182. var isAac = isLikelyAacData(bytes);
  183. if (isAac && (!pipeline || pipeline.type !== 'aac')) {
  184. pipeline = aacPipeline(options);
  185. setupPipelineListeners(pipeline, this);
  186. } else if (!isAac && (!pipeline || pipeline.type !== 'ts')) {
  187. pipeline = tsPipeline(options);
  188. setupPipelineListeners(pipeline, this);
  189. }
  190. hasFlushed = false;
  191. }
  192. pipeline.headOfPipeline.push(bytes);
  193. };
  194. this.flush = function () {
  195. if (!pipeline) {
  196. return;
  197. }
  198. hasFlushed = true;
  199. pipeline.headOfPipeline.flush();
  200. };
  201. this.partialFlush = function () {
  202. if (!pipeline) {
  203. return;
  204. }
  205. pipeline.headOfPipeline.partialFlush();
  206. };
  207. this.endTimeline = function () {
  208. if (!pipeline) {
  209. return;
  210. }
  211. pipeline.headOfPipeline.endTimeline();
  212. };
  213. this.reset = function () {
  214. if (!pipeline) {
  215. return;
  216. }
  217. pipeline.headOfPipeline.reset();
  218. };
  219. this.setBaseMediaDecodeTime = function (baseMediaDecodeTime) {
  220. if (!options.keepOriginalTimestamps) {
  221. options.baseMediaDecodeTime = baseMediaDecodeTime;
  222. }
  223. if (!pipeline) {
  224. return;
  225. }
  226. if (pipeline.tracks.audio) {
  227. pipeline.tracks.audio.timelineStartInfo.dts = undefined;
  228. pipeline.tracks.audio.timelineStartInfo.pts = undefined;
  229. trackInfo.clearDtsInfo(pipeline.tracks.audio);
  230. if (pipeline.audioRollover) {
  231. pipeline.audioRollover.discontinuity();
  232. }
  233. }
  234. if (pipeline.tracks.video) {
  235. if (pipeline.videoSegmentStream) {
  236. pipeline.videoSegmentStream.gopCache_ = [];
  237. }
  238. pipeline.tracks.video.timelineStartInfo.dts = undefined;
  239. pipeline.tracks.video.timelineStartInfo.pts = undefined;
  240. trackInfo.clearDtsInfo(pipeline.tracks.video); // pipeline.captionStream.reset();
  241. }
  242. if (pipeline.timestampRollover) {
  243. pipeline.timestampRollover.discontinuity();
  244. }
  245. };
  246. this.setRemux = function (val) {
  247. options.remux = val;
  248. if (pipeline && pipeline.coalesceStream) {
  249. pipeline.coalesceStream.setRemux(val);
  250. }
  251. };
  252. this.setAudioAppendStart = function (audioAppendStart) {
  253. if (!pipeline || !pipeline.tracks.audio || !pipeline.audioSegmentStream) {
  254. return;
  255. }
  256. pipeline.audioSegmentStream.setAudioAppendStart(audioAppendStart);
  257. }; // TODO GOP alignment support
  258. // Support may be a bit trickier than with full segment appends, as GOPs may be split
  259. // and processed in a more granular fashion
  260. this.alignGopsWith = function (gopsToAlignWith) {
  261. return;
  262. };
  263. };
  264. Transmuxer.prototype = new Stream();
  265. module.exports = Transmuxer;