videojs-contrib-media-sources.js 460 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387238823892390239123922393239423952396239723982399240024012402240324042405240624072408240924102411241224132414241524162417241824192420242124222423242424252426242724282429243024312432243324342435243624372438243924402441244224432444244524462447244824492450245124522453245424552456245724582459246024612462246324642465246624672468246924702471247224732474247524762477247824792480248124822483248424852486248724882489249024912492249324942495249624972498249925002501250225032504250525062507250825092510251125122513251425152516251725182519252025212522252325242525252625272528252925302531253225332534253525362537253825392540254125422543254425452546254725482549255025512552255325542555255625572558255925602561256225632564256525662567256825692570257125722573257425752576257725782579258025812582258325842585258625872588258925902591259225932594259525962597259825992600260126022603260426052606260726082609261026112612261326142615261626172618261926202621262226232624262526262627262826292630263126322633263426352636263726382639264026412642264326442645264626472648264926502651265226532654265526562657265826592660266126622663266426652666266726682669267026712672267326742675267626772678267926802681268226832684268526862687268826892690269126922693269426952696269726982699270027012702270327042705270627072708270927102711271227132714271527162717271827192720272127222723272427252726272727282729273027312732273327342735273627372738273927402741274227432744274527462747274827492750275127522753275427552756275727582759276027612762276327642765276627672768276927702771277227732774277527762777277827792780278127822783278427852786278727882789279027912792279327942795279627972798279928002801280228032804280528062807280828092810281128122813281428152816281728182819282028212822282328242825282628272828282928302831283228332834283528362837283828392840284128422843284428452846284728482849285028512852285328542855285628572858285928602861286228632864286528662867286828692870287128722873287428752876287728782879288028812882288328842885288628872888288928902891289228932894289528962897289828992900290129022903290429052906290729082909291029112912291329142915291629172918291929202921292229232924292529262927292829292930293129322933293429352936293729382939294029412942294329442945294629472948294929502951295229532954295529562957295829592960296129622963296429652966296729682969297029712972297329742975297629772978297929802981298229832984298529862987298829892990299129922993299429952996299729982999300030013002300330043005300630073008300930103011301230133014301530163017301830193020302130223023302430253026302730283029303030313032303330343035303630373038303930403041304230433044304530463047304830493050305130523053305430553056305730583059306030613062306330643065306630673068306930703071307230733074307530763077307830793080308130823083308430853086308730883089309030913092309330943095309630973098309931003101310231033104310531063107310831093110311131123113311431153116311731183119312031213122312331243125312631273128312931303131313231333134313531363137313831393140314131423143314431453146314731483149315031513152315331543155315631573158315931603161316231633164316531663167316831693170317131723173317431753176317731783179318031813182318331843185318631873188318931903191319231933194319531963197319831993200320132023203320432053206320732083209321032113212321332143215321632173218321932203221322232233224322532263227322832293230323132323233323432353236323732383239324032413242324332443245324632473248324932503251325232533254325532563257325832593260326132623263326432653266326732683269327032713272327332743275327632773278327932803281328232833284328532863287328832893290329132923293329432953296329732983299330033013302330333043305330633073308330933103311331233133314331533163317331833193320332133223323332433253326332733283329333033313332333333343335333633373338333933403341334233433344334533463347334833493350335133523353335433553356335733583359336033613362336333643365336633673368336933703371337233733374337533763377337833793380338133823383338433853386338733883389339033913392339333943395339633973398339934003401340234033404340534063407340834093410341134123413341434153416341734183419342034213422342334243425342634273428342934303431343234333434343534363437343834393440344134423443344434453446344734483449345034513452345334543455345634573458345934603461346234633464346534663467346834693470347134723473347434753476347734783479348034813482348334843485348634873488348934903491349234933494349534963497349834993500350135023503350435053506350735083509351035113512351335143515351635173518351935203521352235233524352535263527352835293530353135323533353435353536353735383539354035413542354335443545354635473548354935503551355235533554355535563557355835593560356135623563356435653566356735683569357035713572357335743575357635773578357935803581358235833584358535863587358835893590359135923593359435953596359735983599360036013602360336043605360636073608360936103611361236133614361536163617361836193620362136223623362436253626362736283629363036313632363336343635363636373638363936403641364236433644364536463647364836493650365136523653365436553656365736583659366036613662366336643665366636673668366936703671367236733674367536763677367836793680368136823683368436853686368736883689369036913692369336943695369636973698369937003701370237033704370537063707370837093710371137123713371437153716371737183719372037213722372337243725372637273728372937303731373237333734373537363737373837393740374137423743374437453746374737483749375037513752375337543755375637573758375937603761376237633764376537663767376837693770377137723773377437753776377737783779378037813782378337843785378637873788378937903791379237933794379537963797379837993800380138023803380438053806380738083809381038113812381338143815381638173818381938203821382238233824382538263827382838293830383138323833383438353836383738383839384038413842384338443845384638473848384938503851385238533854385538563857385838593860386138623863386438653866386738683869387038713872387338743875387638773878387938803881388238833884388538863887388838893890389138923893389438953896389738983899390039013902390339043905390639073908390939103911391239133914391539163917391839193920392139223923392439253926392739283929393039313932393339343935393639373938393939403941394239433944394539463947394839493950395139523953395439553956395739583959396039613962396339643965396639673968396939703971397239733974397539763977397839793980398139823983398439853986398739883989399039913992399339943995399639973998399940004001400240034004400540064007400840094010401140124013401440154016401740184019402040214022402340244025402640274028402940304031403240334034403540364037403840394040404140424043404440454046404740484049405040514052405340544055405640574058405940604061406240634064406540664067406840694070407140724073407440754076407740784079408040814082408340844085408640874088408940904091409240934094409540964097409840994100410141024103410441054106410741084109411041114112411341144115411641174118411941204121412241234124412541264127412841294130413141324133413441354136413741384139414041414142414341444145414641474148414941504151415241534154415541564157415841594160416141624163416441654166416741684169417041714172417341744175417641774178417941804181418241834184418541864187418841894190419141924193419441954196419741984199420042014202420342044205420642074208420942104211421242134214421542164217421842194220422142224223422442254226422742284229423042314232423342344235423642374238423942404241424242434244424542464247424842494250425142524253425442554256425742584259426042614262426342644265426642674268426942704271427242734274427542764277427842794280428142824283428442854286428742884289429042914292429342944295429642974298429943004301430243034304430543064307430843094310431143124313431443154316431743184319432043214322432343244325432643274328432943304331433243334334433543364337433843394340434143424343434443454346434743484349435043514352435343544355435643574358435943604361436243634364436543664367436843694370437143724373437443754376437743784379438043814382438343844385438643874388438943904391439243934394439543964397439843994400440144024403440444054406440744084409441044114412441344144415441644174418441944204421442244234424442544264427442844294430443144324433443444354436443744384439444044414442444344444445444644474448444944504451445244534454445544564457445844594460446144624463446444654466446744684469447044714472447344744475447644774478447944804481448244834484448544864487448844894490449144924493449444954496449744984499450045014502450345044505450645074508450945104511451245134514451545164517451845194520452145224523452445254526452745284529453045314532453345344535453645374538453945404541454245434544454545464547454845494550455145524553455445554556455745584559456045614562456345644565456645674568456945704571457245734574457545764577457845794580458145824583458445854586458745884589459045914592459345944595459645974598459946004601460246034604460546064607460846094610461146124613461446154616461746184619462046214622462346244625462646274628462946304631463246334634463546364637463846394640464146424643464446454646464746484649465046514652465346544655465646574658465946604661466246634664466546664667466846694670467146724673467446754676467746784679468046814682468346844685468646874688468946904691469246934694469546964697469846994700470147024703470447054706470747084709471047114712471347144715471647174718471947204721472247234724472547264727472847294730473147324733473447354736473747384739474047414742474347444745474647474748474947504751475247534754475547564757475847594760476147624763476447654766476747684769477047714772477347744775477647774778477947804781478247834784478547864787478847894790479147924793479447954796479747984799480048014802480348044805480648074808480948104811481248134814481548164817481848194820482148224823482448254826482748284829483048314832483348344835483648374838483948404841484248434844484548464847484848494850485148524853485448554856485748584859486048614862486348644865486648674868486948704871487248734874487548764877487848794880488148824883488448854886488748884889489048914892489348944895489648974898489949004901490249034904490549064907490849094910491149124913491449154916491749184919492049214922492349244925492649274928492949304931493249334934493549364937493849394940494149424943494449454946494749484949495049514952495349544955495649574958495949604961496249634964496549664967496849694970497149724973497449754976497749784979498049814982498349844985498649874988498949904991499249934994499549964997499849995000500150025003500450055006500750085009501050115012501350145015501650175018501950205021502250235024502550265027502850295030503150325033503450355036503750385039504050415042504350445045504650475048504950505051505250535054505550565057505850595060506150625063506450655066506750685069507050715072507350745075507650775078507950805081508250835084508550865087508850895090509150925093509450955096509750985099510051015102510351045105510651075108510951105111511251135114511551165117511851195120512151225123512451255126512751285129513051315132513351345135513651375138513951405141514251435144514551465147514851495150515151525153515451555156515751585159516051615162516351645165516651675168516951705171517251735174517551765177517851795180518151825183518451855186518751885189519051915192519351945195519651975198519952005201520252035204520552065207520852095210521152125213521452155216521752185219522052215222522352245225522652275228522952305231523252335234523552365237523852395240524152425243524452455246524752485249525052515252525352545255525652575258525952605261526252635264526552665267526852695270527152725273527452755276527752785279528052815282528352845285528652875288528952905291529252935294529552965297529852995300530153025303530453055306530753085309531053115312531353145315531653175318531953205321532253235324532553265327532853295330533153325333533453355336533753385339534053415342534353445345534653475348534953505351535253535354535553565357535853595360536153625363536453655366536753685369537053715372537353745375537653775378537953805381538253835384538553865387538853895390539153925393539453955396539753985399540054015402540354045405540654075408540954105411541254135414541554165417541854195420542154225423542454255426542754285429543054315432543354345435543654375438543954405441544254435444544554465447544854495450545154525453545454555456545754585459546054615462546354645465546654675468546954705471547254735474547554765477547854795480548154825483548454855486548754885489549054915492549354945495549654975498549955005501550255035504550555065507550855095510551155125513551455155516551755185519552055215522552355245525552655275528552955305531553255335534553555365537553855395540554155425543554455455546554755485549555055515552555355545555555655575558555955605561556255635564556555665567556855695570557155725573557455755576557755785579558055815582558355845585558655875588558955905591559255935594559555965597559855995600560156025603560456055606560756085609561056115612561356145615561656175618561956205621562256235624562556265627562856295630563156325633563456355636563756385639564056415642564356445645564656475648564956505651565256535654565556565657565856595660566156625663566456655666566756685669567056715672567356745675567656775678567956805681568256835684568556865687568856895690569156925693569456955696569756985699570057015702570357045705570657075708570957105711571257135714571557165717571857195720572157225723572457255726572757285729573057315732573357345735573657375738573957405741574257435744574557465747574857495750575157525753575457555756575757585759576057615762576357645765576657675768576957705771577257735774577557765777577857795780578157825783578457855786578757885789579057915792579357945795579657975798579958005801580258035804580558065807580858095810581158125813581458155816581758185819582058215822582358245825582658275828582958305831583258335834583558365837583858395840584158425843584458455846584758485849585058515852585358545855585658575858585958605861586258635864586558665867586858695870587158725873587458755876587758785879588058815882588358845885588658875888588958905891589258935894589558965897589858995900590159025903590459055906590759085909591059115912591359145915591659175918591959205921592259235924592559265927592859295930593159325933593459355936593759385939594059415942594359445945594659475948594959505951595259535954595559565957595859595960596159625963596459655966596759685969597059715972597359745975597659775978597959805981598259835984598559865987598859895990599159925993599459955996599759985999600060016002600360046005600660076008600960106011601260136014601560166017601860196020602160226023602460256026602760286029603060316032603360346035603660376038603960406041604260436044604560466047604860496050605160526053605460556056605760586059606060616062606360646065606660676068606960706071607260736074607560766077607860796080608160826083608460856086608760886089609060916092609360946095609660976098609961006101610261036104610561066107610861096110611161126113611461156116611761186119612061216122612361246125612661276128612961306131613261336134613561366137613861396140614161426143614461456146614761486149615061516152615361546155615661576158615961606161616261636164616561666167616861696170617161726173617461756176617761786179618061816182618361846185618661876188618961906191619261936194619561966197619861996200620162026203620462056206620762086209621062116212621362146215621662176218621962206221622262236224622562266227622862296230623162326233623462356236623762386239624062416242624362446245624662476248624962506251625262536254625562566257625862596260626162626263626462656266626762686269627062716272627362746275627662776278627962806281628262836284628562866287628862896290629162926293629462956296629762986299630063016302630363046305630663076308630963106311631263136314631563166317631863196320632163226323632463256326632763286329633063316332633363346335633663376338633963406341634263436344634563466347634863496350635163526353635463556356635763586359636063616362636363646365636663676368636963706371637263736374637563766377637863796380638163826383638463856386638763886389639063916392639363946395639663976398639964006401640264036404640564066407640864096410641164126413641464156416641764186419642064216422642364246425642664276428642964306431643264336434643564366437643864396440644164426443644464456446644764486449645064516452645364546455645664576458645964606461646264636464646564666467646864696470647164726473647464756476647764786479648064816482648364846485648664876488648964906491649264936494649564966497649864996500650165026503650465056506650765086509651065116512651365146515651665176518651965206521652265236524652565266527652865296530653165326533653465356536653765386539654065416542654365446545654665476548654965506551655265536554655565566557655865596560656165626563656465656566656765686569657065716572657365746575657665776578657965806581658265836584658565866587658865896590659165926593659465956596659765986599660066016602660366046605660666076608660966106611661266136614661566166617661866196620662166226623662466256626662766286629663066316632663366346635663666376638663966406641664266436644664566466647664866496650665166526653665466556656665766586659666066616662666366646665666666676668666966706671667266736674667566766677667866796680668166826683668466856686668766886689669066916692669366946695669666976698669967006701670267036704670567066707670867096710671167126713671467156716671767186719672067216722672367246725672667276728672967306731673267336734673567366737673867396740674167426743674467456746674767486749675067516752675367546755675667576758675967606761676267636764676567666767676867696770677167726773677467756776677767786779678067816782678367846785678667876788678967906791679267936794679567966797679867996800680168026803680468056806680768086809681068116812681368146815681668176818681968206821682268236824682568266827682868296830683168326833683468356836683768386839684068416842684368446845684668476848684968506851685268536854685568566857685868596860686168626863686468656866686768686869687068716872687368746875687668776878687968806881688268836884688568866887688868896890689168926893689468956896689768986899690069016902690369046905690669076908690969106911691269136914691569166917691869196920692169226923692469256926692769286929693069316932693369346935693669376938693969406941694269436944694569466947694869496950695169526953695469556956695769586959696069616962696369646965696669676968696969706971697269736974697569766977697869796980698169826983698469856986698769886989699069916992699369946995699669976998699970007001700270037004700570067007700870097010701170127013701470157016701770187019702070217022702370247025702670277028702970307031703270337034703570367037703870397040704170427043704470457046704770487049705070517052705370547055705670577058705970607061706270637064706570667067706870697070707170727073707470757076707770787079708070817082708370847085708670877088708970907091709270937094709570967097709870997100710171027103710471057106710771087109711071117112711371147115711671177118711971207121712271237124712571267127712871297130713171327133713471357136713771387139714071417142714371447145714671477148714971507151715271537154715571567157715871597160716171627163716471657166716771687169717071717172717371747175717671777178717971807181718271837184718571867187718871897190719171927193719471957196719771987199720072017202720372047205720672077208720972107211721272137214721572167217721872197220722172227223722472257226722772287229723072317232723372347235723672377238723972407241724272437244724572467247724872497250725172527253725472557256725772587259726072617262726372647265726672677268726972707271727272737274727572767277727872797280728172827283728472857286728772887289729072917292729372947295729672977298729973007301730273037304730573067307730873097310731173127313731473157316731773187319732073217322732373247325732673277328732973307331733273337334733573367337733873397340734173427343734473457346734773487349735073517352735373547355735673577358735973607361736273637364736573667367736873697370737173727373737473757376737773787379738073817382738373847385738673877388738973907391739273937394739573967397739873997400740174027403740474057406740774087409741074117412741374147415741674177418741974207421742274237424742574267427742874297430743174327433743474357436743774387439744074417442744374447445744674477448744974507451745274537454745574567457745874597460746174627463746474657466746774687469747074717472747374747475747674777478747974807481748274837484748574867487748874897490749174927493749474957496749774987499750075017502750375047505750675077508750975107511751275137514751575167517751875197520752175227523752475257526752775287529753075317532753375347535753675377538753975407541754275437544754575467547754875497550755175527553755475557556755775587559756075617562756375647565756675677568756975707571757275737574757575767577757875797580758175827583758475857586758775887589759075917592759375947595759675977598759976007601760276037604760576067607760876097610761176127613761476157616761776187619762076217622762376247625762676277628762976307631763276337634763576367637763876397640764176427643764476457646764776487649765076517652765376547655765676577658765976607661766276637664766576667667766876697670767176727673767476757676767776787679768076817682768376847685768676877688768976907691769276937694769576967697769876997700770177027703770477057706770777087709771077117712771377147715771677177718771977207721772277237724772577267727772877297730773177327733773477357736773777387739774077417742774377447745774677477748774977507751775277537754775577567757775877597760776177627763776477657766776777687769777077717772777377747775777677777778777977807781778277837784778577867787778877897790779177927793779477957796779777987799780078017802780378047805780678077808780978107811781278137814781578167817781878197820782178227823782478257826782778287829783078317832783378347835783678377838783978407841784278437844784578467847784878497850785178527853785478557856785778587859786078617862786378647865786678677868786978707871787278737874787578767877787878797880788178827883788478857886788778887889789078917892789378947895789678977898789979007901790279037904790579067907790879097910791179127913791479157916791779187919792079217922792379247925792679277928792979307931793279337934793579367937793879397940794179427943794479457946794779487949795079517952795379547955795679577958795979607961796279637964796579667967796879697970797179727973797479757976797779787979798079817982798379847985798679877988798979907991799279937994799579967997799879998000800180028003800480058006800780088009801080118012801380148015801680178018801980208021802280238024802580268027802880298030803180328033803480358036803780388039804080418042804380448045804680478048804980508051805280538054805580568057805880598060806180628063806480658066806780688069807080718072807380748075807680778078807980808081808280838084808580868087808880898090809180928093809480958096809780988099810081018102810381048105810681078108810981108111811281138114811581168117811881198120812181228123812481258126812781288129813081318132813381348135813681378138813981408141814281438144814581468147814881498150815181528153815481558156815781588159816081618162816381648165816681678168816981708171817281738174817581768177817881798180818181828183818481858186818781888189819081918192819381948195819681978198819982008201820282038204820582068207820882098210821182128213821482158216821782188219822082218222822382248225822682278228822982308231823282338234823582368237823882398240824182428243824482458246824782488249825082518252825382548255825682578258825982608261826282638264826582668267826882698270827182728273827482758276827782788279828082818282828382848285828682878288828982908291829282938294829582968297829882998300830183028303830483058306830783088309831083118312831383148315831683178318831983208321832283238324832583268327832883298330833183328333833483358336833783388339834083418342834383448345834683478348834983508351835283538354835583568357835883598360836183628363836483658366836783688369837083718372837383748375837683778378837983808381838283838384838583868387838883898390839183928393839483958396839783988399840084018402840384048405840684078408840984108411841284138414841584168417841884198420842184228423842484258426842784288429843084318432843384348435843684378438843984408441844284438444844584468447844884498450845184528453845484558456845784588459846084618462846384648465846684678468846984708471847284738474847584768477847884798480848184828483848484858486848784888489849084918492849384948495849684978498849985008501850285038504850585068507850885098510851185128513851485158516851785188519852085218522852385248525852685278528852985308531853285338534853585368537853885398540854185428543854485458546854785488549855085518552855385548555855685578558855985608561856285638564856585668567856885698570857185728573857485758576857785788579858085818582858385848585858685878588858985908591859285938594859585968597859885998600860186028603860486058606860786088609861086118612861386148615861686178618861986208621862286238624862586268627862886298630863186328633863486358636863786388639864086418642864386448645864686478648864986508651865286538654865586568657865886598660866186628663866486658666866786688669867086718672867386748675867686778678867986808681868286838684868586868687868886898690869186928693869486958696869786988699870087018702870387048705870687078708870987108711871287138714871587168717871887198720872187228723872487258726872787288729873087318732873387348735873687378738873987408741874287438744874587468747874887498750875187528753875487558756875787588759876087618762876387648765876687678768876987708771877287738774877587768777877887798780878187828783878487858786878787888789879087918792879387948795879687978798879988008801880288038804880588068807880888098810881188128813881488158816881788188819882088218822882388248825882688278828882988308831883288338834883588368837883888398840884188428843884488458846884788488849885088518852885388548855885688578858885988608861886288638864886588668867886888698870887188728873887488758876887788788879888088818882888388848885888688878888888988908891889288938894889588968897889888998900890189028903890489058906890789088909891089118912891389148915891689178918891989208921892289238924892589268927892889298930893189328933893489358936893789388939894089418942894389448945894689478948894989508951895289538954895589568957895889598960896189628963896489658966896789688969897089718972897389748975897689778978897989808981898289838984898589868987898889898990899189928993899489958996899789988999900090019002900390049005900690079008900990109011901290139014901590169017901890199020902190229023902490259026902790289029903090319032903390349035903690379038903990409041904290439044904590469047904890499050905190529053905490559056905790589059906090619062906390649065906690679068906990709071907290739074907590769077907890799080908190829083908490859086908790889089909090919092909390949095909690979098909991009101910291039104910591069107910891099110911191129113911491159116911791189119912091219122912391249125912691279128912991309131913291339134913591369137913891399140914191429143914491459146914791489149915091519152915391549155915691579158915991609161916291639164916591669167916891699170917191729173917491759176917791789179918091819182918391849185918691879188918991909191919291939194919591969197919891999200920192029203920492059206920792089209921092119212921392149215921692179218921992209221922292239224922592269227922892299230923192329233923492359236923792389239924092419242924392449245924692479248924992509251925292539254925592569257925892599260926192629263926492659266926792689269927092719272927392749275927692779278927992809281928292839284928592869287928892899290929192929293929492959296929792989299930093019302930393049305930693079308930993109311931293139314931593169317931893199320932193229323932493259326932793289329933093319332933393349335933693379338933993409341934293439344934593469347934893499350935193529353935493559356935793589359936093619362936393649365936693679368936993709371937293739374937593769377937893799380938193829383938493859386938793889389939093919392939393949395939693979398939994009401940294039404940594069407940894099410941194129413941494159416941794189419942094219422942394249425942694279428942994309431943294339434943594369437943894399440944194429443944494459446944794489449945094519452945394549455945694579458945994609461946294639464946594669467946894699470947194729473947494759476947794789479948094819482948394849485948694879488948994909491949294939494949594969497949894999500950195029503950495059506950795089509951095119512951395149515951695179518951995209521952295239524952595269527952895299530953195329533953495359536953795389539954095419542954395449545954695479548954995509551955295539554955595569557955895599560956195629563956495659566956795689569957095719572957395749575957695779578957995809581958295839584958595869587958895899590959195929593959495959596959795989599960096019602960396049605960696079608960996109611961296139614961596169617961896199620962196229623962496259626962796289629963096319632963396349635963696379638963996409641964296439644964596469647964896499650965196529653965496559656965796589659966096619662966396649665966696679668966996709671967296739674967596769677967896799680968196829683968496859686968796889689969096919692969396949695969696979698969997009701970297039704970597069707970897099710971197129713971497159716971797189719972097219722972397249725972697279728972997309731973297339734973597369737973897399740974197429743974497459746974797489749975097519752975397549755975697579758975997609761976297639764976597669767976897699770977197729773977497759776977797789779978097819782978397849785978697879788978997909791979297939794979597969797979897999800980198029803980498059806980798089809981098119812981398149815981698179818981998209821982298239824982598269827982898299830983198329833983498359836983798389839984098419842984398449845984698479848984998509851985298539854985598569857985898599860986198629863986498659866986798689869987098719872987398749875987698779878987998809881988298839884988598869887988898899890989198929893989498959896989798989899990099019902990399049905990699079908990999109911991299139914991599169917991899199920992199229923992499259926992799289929993099319932993399349935993699379938993999409941994299439944994599469947994899499950995199529953995499559956995799589959996099619962996399649965996699679968996999709971997299739974997599769977997899799980998199829983998499859986998799889989999099919992999399949995999699979998999910000100011000210003100041000510006100071000810009100101001110012100131001410015100161001710018100191002010021100221002310024100251002610027100281002910030100311003210033100341003510036100371003810039100401004110042100431004410045100461004710048100491005010051100521005310054100551005610057100581005910060100611006210063100641006510066100671006810069100701007110072100731007410075100761007710078100791008010081100821008310084100851008610087100881008910090100911009210093100941009510096100971009810099101001010110102101031010410105101061010710108101091011010111101121011310114101151011610117101181011910120101211012210123101241012510126101271012810129101301013110132101331013410135101361013710138101391014010141101421014310144101451014610147101481014910150101511015210153101541015510156101571015810159101601016110162101631016410165101661016710168101691017010171101721017310174101751017610177101781017910180101811018210183101841018510186101871018810189101901019110192101931019410195101961019710198101991020010201102021020310204102051020610207102081020910210102111021210213102141021510216102171021810219102201022110222102231022410225102261022710228102291023010231102321023310234102351023610237102381023910240102411024210243102441024510246102471024810249102501025110252102531025410255102561025710258102591026010261102621026310264102651026610267102681026910270102711027210273102741027510276102771027810279102801028110282102831028410285102861028710288102891029010291102921029310294102951029610297102981029910300103011030210303103041030510306103071030810309103101031110312103131031410315103161031710318103191032010321103221032310324103251032610327103281032910330103311033210333103341033510336103371033810339103401034110342103431034410345103461034710348103491035010351103521035310354103551035610357103581035910360103611036210363103641036510366103671036810369103701037110372103731037410375103761037710378103791038010381103821038310384103851038610387103881038910390103911039210393103941039510396103971039810399104001040110402104031040410405104061040710408104091041010411104121041310414104151041610417104181041910420104211042210423104241042510426104271042810429104301043110432104331043410435104361043710438104391044010441104421044310444104451044610447104481044910450104511045210453104541045510456104571045810459104601046110462104631046410465104661046710468104691047010471104721047310474104751047610477104781047910480104811048210483104841048510486104871048810489104901049110492104931049410495104961049710498104991050010501105021050310504105051050610507105081050910510105111051210513105141051510516105171051810519105201052110522105231052410525105261052710528105291053010531105321053310534105351053610537105381053910540105411054210543105441054510546105471054810549105501055110552105531055410555105561055710558105591056010561105621056310564105651056610567105681056910570105711057210573105741057510576105771057810579105801058110582105831058410585105861058710588105891059010591105921059310594105951059610597105981059910600106011060210603106041060510606106071060810609106101061110612106131061410615106161061710618106191062010621106221062310624106251062610627106281062910630106311063210633106341063510636106371063810639106401064110642106431064410645106461064710648106491065010651106521065310654106551065610657106581065910660106611066210663106641066510666106671066810669106701067110672106731067410675106761067710678106791068010681106821068310684106851068610687106881068910690106911069210693106941069510696106971069810699107001070110702107031070410705107061070710708107091071010711107121071310714107151071610717107181071910720107211072210723107241072510726107271072810729107301073110732107331073410735107361073710738107391074010741107421074310744107451074610747107481074910750107511075210753107541075510756107571075810759107601076110762107631076410765107661076710768107691077010771107721077310774107751077610777107781077910780107811078210783107841078510786107871078810789107901079110792107931079410795107961079710798107991080010801108021080310804108051080610807108081080910810108111081210813108141081510816108171081810819108201082110822108231082410825108261082710828108291083010831108321083310834108351083610837108381083910840108411084210843108441084510846108471084810849108501085110852108531085410855108561085710858108591086010861108621086310864108651086610867108681086910870108711087210873108741087510876108771087810879108801088110882108831088410885108861088710888108891089010891108921089310894108951089610897108981089910900109011090210903109041090510906109071090810909109101091110912109131091410915109161091710918109191092010921109221092310924109251092610927109281092910930109311093210933109341093510936109371093810939109401094110942109431094410945109461094710948109491095010951109521095310954109551095610957109581095910960109611096210963109641096510966109671096810969109701097110972109731097410975109761097710978109791098010981109821098310984109851098610987109881098910990109911099210993109941099510996109971099810999110001100111002110031100411005110061100711008110091101011011110121101311014110151101611017110181101911020110211102211023110241102511026110271102811029110301103111032110331103411035110361103711038110391104011041110421104311044110451104611047110481104911050110511105211053110541105511056110571105811059110601106111062110631106411065110661106711068110691107011071110721107311074110751107611077110781107911080110811108211083110841108511086110871108811089110901109111092110931109411095110961109711098110991110011101111021110311104111051110611107111081110911110111111111211113111141111511116111171111811119111201112111122111231112411125111261112711128111291113011131111321113311134111351113611137111381113911140111411114211143111441114511146111471114811149111501115111152111531115411155111561115711158111591116011161111621116311164111651116611167111681116911170111711117211173111741117511176111771117811179111801118111182111831118411185111861118711188111891119011191111921119311194111951119611197111981119911200112011120211203112041120511206112071120811209112101121111212112131121411215112161121711218112191122011221112221122311224112251122611227112281122911230112311123211233112341123511236112371123811239112401124111242112431124411245112461124711248112491125011251112521125311254112551125611257112581125911260112611126211263112641126511266112671126811269112701127111272112731127411275112761127711278112791128011281112821128311284112851128611287112881128911290112911129211293112941129511296112971129811299113001130111302113031130411305113061130711308113091131011311113121131311314113151131611317113181131911320113211132211323113241132511326113271132811329113301133111332113331133411335113361133711338113391134011341113421134311344113451134611347113481134911350113511135211353113541135511356113571135811359113601136111362113631136411365113661136711368113691137011371113721137311374113751137611377113781137911380113811138211383113841138511386113871138811389113901139111392113931139411395113961139711398113991140011401114021140311404114051140611407114081140911410114111141211413114141141511416114171141811419114201142111422114231142411425114261142711428114291143011431114321143311434114351143611437114381143911440114411144211443114441144511446114471144811449114501145111452114531145411455114561145711458114591146011461114621146311464114651146611467114681146911470114711147211473114741147511476114771147811479114801148111482114831148411485114861148711488114891149011491114921149311494114951149611497114981149911500115011150211503115041150511506115071150811509115101151111512115131151411515115161151711518115191152011521115221152311524115251152611527115281152911530115311153211533115341153511536115371153811539115401154111542115431154411545115461154711548115491155011551115521155311554115551155611557115581155911560115611156211563115641156511566115671156811569115701157111572115731157411575115761157711578115791158011581115821158311584115851158611587115881158911590115911159211593115941159511596115971159811599116001160111602116031160411605116061160711608116091161011611116121161311614116151161611617116181161911620116211162211623116241162511626116271162811629116301163111632116331163411635116361163711638116391164011641116421164311644116451164611647116481164911650116511165211653116541165511656116571165811659116601166111662116631166411665116661166711668116691167011671116721167311674116751167611677116781167911680116811168211683116841168511686116871168811689116901169111692116931169411695116961169711698116991170011701117021170311704117051170611707117081170911710117111171211713117141171511716117171171811719117201172111722117231172411725117261172711728117291173011731117321173311734117351173611737117381173911740117411174211743117441174511746117471174811749117501175111752117531175411755117561175711758117591176011761117621176311764117651176611767117681176911770117711177211773117741177511776117771177811779117801178111782117831178411785117861178711788117891179011791117921179311794117951179611797117981179911800118011180211803118041180511806118071180811809118101181111812118131181411815118161181711818118191182011821118221182311824118251182611827118281182911830118311183211833118341183511836118371183811839118401184111842118431184411845118461184711848118491185011851118521185311854118551185611857118581185911860118611186211863118641186511866118671186811869118701187111872118731187411875118761187711878118791188011881118821188311884118851188611887118881188911890118911189211893118941189511896118971189811899119001190111902119031190411905119061190711908119091191011911119121191311914119151191611917119181191911920119211192211923119241192511926119271192811929119301193111932119331193411935119361193711938119391194011941119421194311944119451194611947119481194911950119511195211953119541195511956119571195811959119601196111962119631196411965119661196711968119691197011971119721197311974119751197611977119781197911980119811198211983119841198511986119871198811989119901199111992119931199411995119961199711998119991200012001120021200312004120051200612007120081200912010120111201212013120141201512016120171201812019120201202112022120231202412025120261202712028120291203012031120321203312034120351203612037120381203912040120411204212043120441204512046120471204812049120501205112052120531205412055120561205712058120591206012061120621206312064120651206612067120681206912070120711207212073120741207512076120771207812079120801208112082120831208412085120861208712088120891209012091120921209312094120951209612097120981209912100121011210212103121041210512106121071210812109121101211112112121131211412115121161211712118121191212012121121221212312124121251212612127121281212912130121311213212133121341213512136121371213812139121401214112142121431214412145121461214712148121491215012151121521215312154121551215612157121581215912160121611216212163121641216512166121671216812169121701217112172121731217412175121761217712178121791218012181121821218312184121851218612187121881218912190121911219212193121941219512196121971219812199122001220112202122031220412205122061220712208122091221012211122121221312214122151221612217122181221912220122211222212223122241222512226122271222812229122301223112232122331223412235122361223712238122391224012241122421224312244122451224612247122481224912250122511225212253122541225512256122571225812259122601226112262122631226412265122661226712268122691227012271122721227312274122751227612277122781227912280122811228212283122841228512286122871228812289122901229112292122931229412295122961229712298122991230012301123021230312304123051230612307123081230912310123111231212313123141231512316123171231812319123201232112322123231232412325123261232712328123291233012331123321233312334123351233612337123381233912340123411234212343123441234512346123471234812349123501235112352123531235412355123561235712358123591236012361123621236312364123651236612367123681236912370123711237212373123741237512376123771237812379123801238112382123831238412385123861238712388123891239012391123921239312394123951239612397123981239912400124011240212403124041240512406124071240812409124101241112412124131241412415124161241712418124191242012421124221242312424124251242612427124281242912430124311243212433124341243512436124371243812439124401244112442124431244412445124461244712448124491245012451124521245312454124551245612457124581245912460124611246212463124641246512466124671246812469124701247112472124731247412475124761247712478124791248012481124821248312484124851248612487124881248912490124911249212493124941249512496124971249812499125001250112502125031250412505125061250712508125091251012511125121251312514125151251612517125181251912520125211252212523125241252512526125271252812529125301253112532125331253412535125361253712538125391254012541125421254312544125451254612547125481254912550125511255212553125541255512556125571255812559125601256112562125631256412565125661256712568125691257012571125721257312574125751257612577125781257912580125811258212583125841258512586125871258812589125901259112592125931259412595125961259712598125991260012601126021260312604126051260612607126081260912610126111261212613126141261512616126171261812619126201262112622126231262412625126261262712628126291263012631126321263312634126351263612637126381263912640126411264212643126441264512646126471264812649126501265112652126531265412655126561265712658126591266012661126621266312664126651266612667126681266912670126711267212673126741267512676126771267812679126801268112682126831268412685126861268712688126891269012691126921269312694126951269612697126981269912700127011270212703127041270512706127071270812709127101271112712127131271412715127161271712718127191272012721127221272312724127251272612727127281272912730127311273212733127341273512736127371273812739127401274112742127431274412745127461274712748127491275012751127521275312754127551275612757127581275912760127611276212763127641276512766127671276812769127701277112772127731277412775127761277712778127791278012781127821278312784127851278612787127881278912790127911279212793127941279512796127971279812799128001280112802128031280412805128061280712808128091281012811128121281312814128151281612817128181281912820128211282212823128241282512826128271282812829128301283112832128331283412835128361283712838128391284012841128421284312844128451284612847128481284912850128511285212853128541285512856128571285812859128601286112862128631286412865128661286712868128691287012871128721287312874128751287612877128781287912880128811288212883128841288512886128871288812889128901289112892128931289412895128961289712898128991290012901129021290312904129051290612907129081290912910129111291212913129141291512916129171291812919129201292112922129231292412925129261292712928129291293012931129321293312934129351293612937129381293912940129411294212943129441294512946129471294812949129501295112952129531295412955129561295712958129591296012961129621296312964129651296612967129681296912970129711297212973129741297512976129771297812979129801298112982129831298412985129861298712988129891299012991129921299312994129951299612997129981299913000130011300213003130041300513006130071300813009130101301113012130131301413015130161301713018130191302013021130221302313024130251302613027130281302913030130311303213033130341303513036130371303813039130401304113042130431304413045130461304713048130491305013051130521305313054130551305613057130581305913060130611306213063130641306513066130671306813069130701307113072130731307413075130761307713078130791308013081130821308313084130851308613087130881308913090130911309213093130941309513096130971309813099131001310113102131031310413105131061310713108131091311013111131121311313114131151311613117131181311913120131211312213123131241312513126131271312813129131301313113132131331313413135131361313713138131391314013141131421314313144131451314613147131481314913150131511315213153131541315513156131571315813159131601316113162131631316413165131661316713168131691317013171131721317313174131751317613177131781317913180131811318213183131841318513186131871318813189131901319113192131931319413195131961319713198131991320013201132021320313204132051320613207132081320913210132111321213213132141321513216132171321813219132201322113222132231322413225132261322713228132291323013231132321323313234132351323613237132381323913240132411324213243132441324513246132471324813249132501325113252132531325413255132561325713258132591326013261132621326313264132651326613267132681326913270132711327213273132741327513276132771327813279132801328113282132831328413285132861328713288132891329013291132921329313294132951329613297132981329913300133011330213303133041330513306133071330813309133101331113312133131331413315133161331713318133191332013321133221332313324133251332613327133281332913330133311333213333133341333513336133371333813339133401334113342133431334413345133461334713348133491335013351133521335313354133551335613357133581335913360133611336213363133641336513366133671336813369133701337113372133731337413375133761337713378133791338013381133821338313384133851338613387133881338913390133911339213393133941339513396133971339813399134001340113402134031340413405134061340713408134091341013411134121341313414134151341613417134181341913420134211342213423134241342513426134271342813429134301343113432134331343413435134361343713438134391344013441134421344313444134451344613447134481344913450134511345213453134541345513456134571345813459134601346113462134631346413465134661346713468134691347013471134721347313474134751347613477134781347913480134811348213483134841348513486134871348813489134901349113492134931349413495134961349713498134991350013501135021350313504135051350613507135081350913510135111351213513135141351513516135171351813519135201352113522135231352413525135261352713528135291353013531135321353313534135351353613537135381353913540135411354213543135441354513546135471354813549135501355113552135531355413555135561355713558135591356013561135621356313564135651356613567135681356913570135711357213573135741357513576135771357813579135801358113582135831358413585135861358713588135891359013591135921359313594135951359613597135981359913600136011360213603136041360513606136071360813609136101361113612136131361413615136161361713618136191362013621136221362313624136251362613627136281362913630136311363213633136341363513636136371363813639136401364113642136431364413645136461364713648136491365013651136521365313654136551365613657136581365913660136611366213663136641366513666136671366813669136701367113672136731367413675136761367713678136791368013681136821368313684136851368613687136881368913690136911369213693136941369513696136971369813699137001370113702137031370413705137061370713708137091371013711137121371313714137151371613717137181371913720137211372213723137241372513726137271372813729137301373113732137331373413735137361373713738137391374013741137421374313744137451374613747137481374913750137511375213753137541375513756137571375813759137601376113762137631376413765137661376713768137691377013771137721377313774137751377613777137781377913780137811378213783137841378513786137871378813789137901379113792137931379413795137961379713798137991380013801138021380313804138051380613807138081380913810138111381213813138141381513816138171381813819138201382113822138231382413825138261382713828138291383013831138321383313834138351383613837138381383913840138411384213843138441384513846138471384813849138501385113852138531385413855138561385713858138591386013861138621386313864138651386613867138681386913870138711387213873138741387513876138771387813879138801388113882138831388413885138861388713888138891389013891138921389313894138951389613897138981389913900139011390213903139041390513906139071390813909139101391113912139131391413915139161391713918139191392013921139221392313924139251392613927139281392913930139311393213933139341393513936139371393813939139401394113942139431394413945139461394713948139491395013951139521395313954139551395613957139581395913960139611396213963139641396513966139671396813969139701397113972139731397413975139761397713978139791398013981139821398313984139851398613987139881398913990139911399213993139941399513996139971399813999140001400114002140031400414005140061400714008140091401014011140121401314014140151401614017140181401914020140211402214023140241402514026140271402814029140301403114032140331403414035140361403714038140391404014041140421404314044140451404614047140481404914050140511405214053140541405514056140571405814059140601406114062140631406414065140661406714068140691407014071140721407314074140751407614077140781407914080140811408214083140841408514086140871408814089140901409114092140931409414095140961409714098140991410014101141021410314104141051410614107141081410914110141111411214113141141411514116141171411814119141201412114122141231412414125141261412714128141291413014131141321413314134141351413614137141381413914140141411414214143141441414514146141471414814149141501415114152141531415414155141561415714158141591416014161141621416314164141651416614167141681416914170141711417214173141741417514176141771417814179141801418114182141831418414185141861418714188141891419014191141921419314194141951419614197141981419914200142011420214203142041420514206142071420814209142101421114212142131421414215142161421714218142191422014221142221422314224142251422614227142281422914230142311423214233142341423514236142371423814239142401424114242142431424414245142461424714248142491425014251142521425314254142551425614257142581425914260142611426214263142641426514266142671426814269142701427114272142731427414275142761427714278142791428014281
  1. (function e(t,n,r){function s(o,u){if(!n[o]){if(!t[o]){var a=typeof require=="function"&&require;if(!u&&a)return a(o,!0);if(i)return i(o,!0);var f=new Error("Cannot find module '"+o+"'");throw f.code="MODULE_NOT_FOUND",f}var l=n[o]={exports:{}};t[o][0].call(l.exports,function(e){var n=t[o][1][e];return s(n?n:e)},l,l.exports,e,t,n,r)}return n[o].exports}var i=typeof require=="function"&&require;for(var o=0;o<r.length;o++)s(r[o]);return s})({1:[function(require,module,exports){
  2. },{}],2:[function(require,module,exports){
  3. (function (global){
  4. var topLevel = typeof global !== 'undefined' ? global :
  5. typeof window !== 'undefined' ? window : {}
  6. var minDoc = require('min-document');
  7. var doccy;
  8. if (typeof document !== 'undefined') {
  9. doccy = document;
  10. } else {
  11. doccy = topLevel['__GLOBAL_DOCUMENT_CACHE@4'];
  12. if (!doccy) {
  13. doccy = topLevel['__GLOBAL_DOCUMENT_CACHE@4'] = minDoc;
  14. }
  15. }
  16. module.exports = doccy;
  17. }).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
  18. },{"min-document":1}],3:[function(require,module,exports){
  19. (function (global){
  20. var win;
  21. if (typeof window !== "undefined") {
  22. win = window;
  23. } else if (typeof global !== "undefined") {
  24. win = global;
  25. } else if (typeof self !== "undefined"){
  26. win = self;
  27. } else {
  28. win = {};
  29. }
  30. module.exports = win;
  31. }).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
  32. },{}],4:[function(require,module,exports){
  33. /**
  34. * mux.js
  35. *
  36. * Copyright (c) 2016 Brightcove
  37. * All rights reserved.
  38. *
  39. * A stream-based aac to mp4 converter. This utility can be used to
  40. * deliver mp4s to a SourceBuffer on platforms that support native
  41. * Media Source Extensions.
  42. */
  43. 'use strict';
  44. var Stream = require('../utils/stream.js');
  45. // Constants
  46. var AacStream;
  47. /**
  48. * Splits an incoming stream of binary data into ADTS and ID3 Frames.
  49. */
  50. AacStream = function() {
  51. var
  52. everything = new Uint8Array(),
  53. timeStamp = 0;
  54. AacStream.prototype.init.call(this);
  55. this.setTimestamp = function(timestamp) {
  56. timeStamp = timestamp;
  57. };
  58. this.parseId3TagSize = function(header, byteIndex) {
  59. var
  60. returnSize = (header[byteIndex + 6] << 21) |
  61. (header[byteIndex + 7] << 14) |
  62. (header[byteIndex + 8] << 7) |
  63. (header[byteIndex + 9]),
  64. flags = header[byteIndex + 5],
  65. footerPresent = (flags & 16) >> 4;
  66. if (footerPresent) {
  67. return returnSize + 20;
  68. }
  69. return returnSize + 10;
  70. };
  71. this.parseAdtsSize = function(header, byteIndex) {
  72. var
  73. lowThree = (header[byteIndex + 5] & 0xE0) >> 5,
  74. middle = header[byteIndex + 4] << 3,
  75. highTwo = header[byteIndex + 3] & 0x3 << 11;
  76. return (highTwo | middle) | lowThree;
  77. };
  78. this.push = function(bytes) {
  79. var
  80. frameSize = 0,
  81. byteIndex = 0,
  82. bytesLeft,
  83. chunk,
  84. packet,
  85. tempLength;
  86. // If there are bytes remaining from the last segment, prepend them to the
  87. // bytes that were pushed in
  88. if (everything.length) {
  89. tempLength = everything.length;
  90. everything = new Uint8Array(bytes.byteLength + tempLength);
  91. everything.set(everything.subarray(0, tempLength));
  92. everything.set(bytes, tempLength);
  93. } else {
  94. everything = bytes;
  95. }
  96. while (everything.length - byteIndex >= 3) {
  97. if ((everything[byteIndex] === 'I'.charCodeAt(0)) &&
  98. (everything[byteIndex + 1] === 'D'.charCodeAt(0)) &&
  99. (everything[byteIndex + 2] === '3'.charCodeAt(0))) {
  100. // Exit early because we don't have enough to parse
  101. // the ID3 tag header
  102. if (everything.length - byteIndex < 10) {
  103. break;
  104. }
  105. // check framesize
  106. frameSize = this.parseId3TagSize(everything, byteIndex);
  107. // Exit early if we don't have enough in the buffer
  108. // to emit a full packet
  109. if (frameSize > everything.length) {
  110. break;
  111. }
  112. chunk = {
  113. type: 'timed-metadata',
  114. data: everything.subarray(byteIndex, byteIndex + frameSize)
  115. };
  116. this.trigger('data', chunk);
  117. byteIndex += frameSize;
  118. continue;
  119. } else if ((everything[byteIndex] & 0xff === 0xff) &&
  120. ((everything[byteIndex + 1] & 0xf0) === 0xf0)) {
  121. // Exit early because we don't have enough to parse
  122. // the ADTS frame header
  123. if (everything.length - byteIndex < 7) {
  124. break;
  125. }
  126. frameSize = this.parseAdtsSize(everything, byteIndex);
  127. // Exit early if we don't have enough in the buffer
  128. // to emit a full packet
  129. if (frameSize > everything.length) {
  130. break;
  131. }
  132. packet = {
  133. type: 'audio',
  134. data: everything.subarray(byteIndex, byteIndex + frameSize),
  135. pts: timeStamp,
  136. dts: timeStamp
  137. };
  138. this.trigger('data', packet);
  139. byteIndex += frameSize;
  140. continue;
  141. }
  142. byteIndex++;
  143. }
  144. bytesLeft = everything.length - byteIndex;
  145. if (bytesLeft > 0) {
  146. everything = everything.subarray(byteIndex);
  147. } else {
  148. everything = new Uint8Array();
  149. }
  150. };
  151. };
  152. AacStream.prototype = new Stream();
  153. module.exports = AacStream;
  154. },{"../utils/stream.js":33}],5:[function(require,module,exports){
  155. /**
  156. * mux.js
  157. *
  158. * Copyright (c) 2016 Brightcove
  159. * All rights reserved.
  160. *
  161. * Utilities to detect basic properties and metadata about Aac data.
  162. */
  163. 'use strict';
  164. var ADTS_SAMPLING_FREQUENCIES = [
  165. 96000,
  166. 88200,
  167. 64000,
  168. 48000,
  169. 44100,
  170. 32000,
  171. 24000,
  172. 22050,
  173. 16000,
  174. 12000,
  175. 11025,
  176. 8000,
  177. 7350
  178. ];
  179. var parseSyncSafeInteger = function(data) {
  180. return (data[0] << 21) |
  181. (data[1] << 14) |
  182. (data[2] << 7) |
  183. (data[3]);
  184. };
  185. // return a percent-encoded representation of the specified byte range
  186. // @see http://en.wikipedia.org/wiki/Percent-encoding
  187. var percentEncode = function(bytes, start, end) {
  188. var i, result = '';
  189. for (i = start; i < end; i++) {
  190. result += '%' + ('00' + bytes[i].toString(16)).slice(-2);
  191. }
  192. return result;
  193. };
  194. // return the string representation of the specified byte range,
  195. // interpreted as ISO-8859-1.
  196. var parseIso88591 = function(bytes, start, end) {
  197. return unescape(percentEncode(bytes, start, end)); // jshint ignore:line
  198. };
  199. var parseId3TagSize = function(header, byteIndex) {
  200. var
  201. returnSize = (header[byteIndex + 6] << 21) |
  202. (header[byteIndex + 7] << 14) |
  203. (header[byteIndex + 8] << 7) |
  204. (header[byteIndex + 9]),
  205. flags = header[byteIndex + 5],
  206. footerPresent = (flags & 16) >> 4;
  207. if (footerPresent) {
  208. return returnSize + 20;
  209. }
  210. return returnSize + 10;
  211. };
  212. var parseAdtsSize = function(header, byteIndex) {
  213. var
  214. lowThree = (header[byteIndex + 5] & 0xE0) >> 5,
  215. middle = header[byteIndex + 4] << 3,
  216. highTwo = header[byteIndex + 3] & 0x3 << 11;
  217. return (highTwo | middle) | lowThree;
  218. };
  219. var parseType = function(header, byteIndex) {
  220. if ((header[byteIndex] === 'I'.charCodeAt(0)) &&
  221. (header[byteIndex + 1] === 'D'.charCodeAt(0)) &&
  222. (header[byteIndex + 2] === '3'.charCodeAt(0))) {
  223. return 'timed-metadata';
  224. } else if ((header[byteIndex] & 0xff === 0xff) &&
  225. ((header[byteIndex + 1] & 0xf0) === 0xf0)) {
  226. return 'audio';
  227. }
  228. return null;
  229. };
  230. var parseSampleRate = function(packet) {
  231. var i = 0;
  232. while (i + 5 < packet.length) {
  233. if (packet[i] !== 0xFF || (packet[i + 1] & 0xF6) !== 0xF0) {
  234. // If a valid header was not found, jump one forward and attempt to
  235. // find a valid ADTS header starting at the next byte
  236. i++;
  237. continue;
  238. }
  239. return ADTS_SAMPLING_FREQUENCIES[(packet[i + 2] & 0x3c) >>> 2];
  240. }
  241. return null;
  242. };
  243. var parseAacTimestamp = function(packet) {
  244. var frameStart, frameSize, frame, frameHeader;
  245. // find the start of the first frame and the end of the tag
  246. frameStart = 10;
  247. if (packet[5] & 0x40) {
  248. // advance the frame start past the extended header
  249. frameStart += 4; // header size field
  250. frameStart += parseSyncSafeInteger(packet.subarray(10, 14));
  251. }
  252. // parse one or more ID3 frames
  253. // http://id3.org/id3v2.3.0#ID3v2_frame_overview
  254. do {
  255. // determine the number of bytes in this frame
  256. frameSize = parseSyncSafeInteger(packet.subarray(frameStart + 4, frameStart + 8));
  257. if (frameSize < 1) {
  258. return null;
  259. }
  260. frameHeader = String.fromCharCode(packet[frameStart],
  261. packet[frameStart + 1],
  262. packet[frameStart + 2],
  263. packet[frameStart + 3]);
  264. if (frameHeader === 'PRIV') {
  265. frame = packet.subarray(frameStart + 10, frameStart + frameSize + 10);
  266. for (var i = 0; i < frame.byteLength; i++) {
  267. if (frame[i] === 0) {
  268. var owner = parseIso88591(frame, 0, i);
  269. if (owner === 'com.apple.streaming.transportStreamTimestamp') {
  270. var d = frame.subarray(i + 1);
  271. var size = ((d[3] & 0x01) << 30) |
  272. (d[4] << 22) |
  273. (d[5] << 14) |
  274. (d[6] << 6) |
  275. (d[7] >>> 2);
  276. size *= 4;
  277. size += d[7] & 0x03;
  278. return size;
  279. }
  280. break;
  281. }
  282. }
  283. }
  284. frameStart += 10; // advance past the frame header
  285. frameStart += frameSize; // advance past the frame body
  286. } while (frameStart < packet.byteLength);
  287. return null;
  288. };
  289. module.exports = {
  290. parseId3TagSize: parseId3TagSize,
  291. parseAdtsSize: parseAdtsSize,
  292. parseType: parseType,
  293. parseSampleRate: parseSampleRate,
  294. parseAacTimestamp: parseAacTimestamp
  295. };
  296. },{}],6:[function(require,module,exports){
  297. 'use strict';
  298. var Stream = require('../utils/stream.js');
  299. var AdtsStream;
  300. var
  301. ADTS_SAMPLING_FREQUENCIES = [
  302. 96000,
  303. 88200,
  304. 64000,
  305. 48000,
  306. 44100,
  307. 32000,
  308. 24000,
  309. 22050,
  310. 16000,
  311. 12000,
  312. 11025,
  313. 8000,
  314. 7350
  315. ];
  316. /*
  317. * Accepts a ElementaryStream and emits data events with parsed
  318. * AAC Audio Frames of the individual packets. Input audio in ADTS
  319. * format is unpacked and re-emitted as AAC frames.
  320. *
  321. * @see http://wiki.multimedia.cx/index.php?title=ADTS
  322. * @see http://wiki.multimedia.cx/?title=Understanding_AAC
  323. */
  324. AdtsStream = function() {
  325. var buffer;
  326. AdtsStream.prototype.init.call(this);
  327. this.push = function(packet) {
  328. var
  329. i = 0,
  330. frameNum = 0,
  331. frameLength,
  332. protectionSkipBytes,
  333. frameEnd,
  334. oldBuffer,
  335. sampleCount,
  336. adtsFrameDuration;
  337. if (packet.type !== 'audio') {
  338. // ignore non-audio data
  339. return;
  340. }
  341. // Prepend any data in the buffer to the input data so that we can parse
  342. // aac frames the cross a PES packet boundary
  343. if (buffer) {
  344. oldBuffer = buffer;
  345. buffer = new Uint8Array(oldBuffer.byteLength + packet.data.byteLength);
  346. buffer.set(oldBuffer);
  347. buffer.set(packet.data, oldBuffer.byteLength);
  348. } else {
  349. buffer = packet.data;
  350. }
  351. // unpack any ADTS frames which have been fully received
  352. // for details on the ADTS header, see http://wiki.multimedia.cx/index.php?title=ADTS
  353. while (i + 5 < buffer.length) {
  354. // Loook for the start of an ADTS header..
  355. if (buffer[i] !== 0xFF || (buffer[i + 1] & 0xF6) !== 0xF0) {
  356. // If a valid header was not found, jump one forward and attempt to
  357. // find a valid ADTS header starting at the next byte
  358. i++;
  359. continue;
  360. }
  361. // The protection skip bit tells us if we have 2 bytes of CRC data at the
  362. // end of the ADTS header
  363. protectionSkipBytes = (~buffer[i + 1] & 0x01) * 2;
  364. // Frame length is a 13 bit integer starting 16 bits from the
  365. // end of the sync sequence
  366. frameLength = ((buffer[i + 3] & 0x03) << 11) |
  367. (buffer[i + 4] << 3) |
  368. ((buffer[i + 5] & 0xe0) >> 5);
  369. sampleCount = ((buffer[i + 6] & 0x03) + 1) * 1024;
  370. adtsFrameDuration = (sampleCount * 90000) /
  371. ADTS_SAMPLING_FREQUENCIES[(buffer[i + 2] & 0x3c) >>> 2];
  372. frameEnd = i + frameLength;
  373. // If we don't have enough data to actually finish this ADTS frame, return
  374. // and wait for more data
  375. if (buffer.byteLength < frameEnd) {
  376. return;
  377. }
  378. // Otherwise, deliver the complete AAC frame
  379. this.trigger('data', {
  380. pts: packet.pts + (frameNum * adtsFrameDuration),
  381. dts: packet.dts + (frameNum * adtsFrameDuration),
  382. sampleCount: sampleCount,
  383. audioobjecttype: ((buffer[i + 2] >>> 6) & 0x03) + 1,
  384. channelcount: ((buffer[i + 2] & 1) << 2) |
  385. ((buffer[i + 3] & 0xc0) >>> 6),
  386. samplerate: ADTS_SAMPLING_FREQUENCIES[(buffer[i + 2] & 0x3c) >>> 2],
  387. samplingfrequencyindex: (buffer[i + 2] & 0x3c) >>> 2,
  388. // assume ISO/IEC 14496-12 AudioSampleEntry default of 16
  389. samplesize: 16,
  390. data: buffer.subarray(i + 7 + protectionSkipBytes, frameEnd)
  391. });
  392. // If the buffer is empty, clear it and return
  393. if (buffer.byteLength === frameEnd) {
  394. buffer = undefined;
  395. return;
  396. }
  397. frameNum++;
  398. // Remove the finished frame from the buffer and start the process again
  399. buffer = buffer.subarray(frameEnd);
  400. }
  401. };
  402. this.flush = function() {
  403. this.trigger('done');
  404. };
  405. };
  406. AdtsStream.prototype = new Stream();
  407. module.exports = AdtsStream;
  408. },{"../utils/stream.js":33}],7:[function(require,module,exports){
  409. 'use strict';
  410. var Stream = require('../utils/stream.js');
  411. var ExpGolomb = require('../utils/exp-golomb.js');
  412. var H264Stream, NalByteStream;
  413. var PROFILES_WITH_OPTIONAL_SPS_DATA;
  414. /**
  415. * Accepts a NAL unit byte stream and unpacks the embedded NAL units.
  416. */
  417. NalByteStream = function() {
  418. var
  419. syncPoint = 0,
  420. i,
  421. buffer;
  422. NalByteStream.prototype.init.call(this);
  423. this.push = function(data) {
  424. var swapBuffer;
  425. if (!buffer) {
  426. buffer = data.data;
  427. } else {
  428. swapBuffer = new Uint8Array(buffer.byteLength + data.data.byteLength);
  429. swapBuffer.set(buffer);
  430. swapBuffer.set(data.data, buffer.byteLength);
  431. buffer = swapBuffer;
  432. }
  433. // Rec. ITU-T H.264, Annex B
  434. // scan for NAL unit boundaries
  435. // a match looks like this:
  436. // 0 0 1 .. NAL .. 0 0 1
  437. // ^ sync point ^ i
  438. // or this:
  439. // 0 0 1 .. NAL .. 0 0 0
  440. // ^ sync point ^ i
  441. // advance the sync point to a NAL start, if necessary
  442. for (; syncPoint < buffer.byteLength - 3; syncPoint++) {
  443. if (buffer[syncPoint + 2] === 1) {
  444. // the sync point is properly aligned
  445. i = syncPoint + 5;
  446. break;
  447. }
  448. }
  449. while (i < buffer.byteLength) {
  450. // look at the current byte to determine if we've hit the end of
  451. // a NAL unit boundary
  452. switch (buffer[i]) {
  453. case 0:
  454. // skip past non-sync sequences
  455. if (buffer[i - 1] !== 0) {
  456. i += 2;
  457. break;
  458. } else if (buffer[i - 2] !== 0) {
  459. i++;
  460. break;
  461. }
  462. // deliver the NAL unit if it isn't empty
  463. if (syncPoint + 3 !== i - 2) {
  464. this.trigger('data', buffer.subarray(syncPoint + 3, i - 2));
  465. }
  466. // drop trailing zeroes
  467. do {
  468. i++;
  469. } while (buffer[i] !== 1 && i < buffer.length);
  470. syncPoint = i - 2;
  471. i += 3;
  472. break;
  473. case 1:
  474. // skip past non-sync sequences
  475. if (buffer[i - 1] !== 0 ||
  476. buffer[i - 2] !== 0) {
  477. i += 3;
  478. break;
  479. }
  480. // deliver the NAL unit
  481. this.trigger('data', buffer.subarray(syncPoint + 3, i - 2));
  482. syncPoint = i - 2;
  483. i += 3;
  484. break;
  485. default:
  486. // the current byte isn't a one or zero, so it cannot be part
  487. // of a sync sequence
  488. i += 3;
  489. break;
  490. }
  491. }
  492. // filter out the NAL units that were delivered
  493. buffer = buffer.subarray(syncPoint);
  494. i -= syncPoint;
  495. syncPoint = 0;
  496. };
  497. this.flush = function() {
  498. // deliver the last buffered NAL unit
  499. if (buffer && buffer.byteLength > 3) {
  500. this.trigger('data', buffer.subarray(syncPoint + 3));
  501. }
  502. // reset the stream state
  503. buffer = null;
  504. syncPoint = 0;
  505. this.trigger('done');
  506. };
  507. };
  508. NalByteStream.prototype = new Stream();
  509. // values of profile_idc that indicate additional fields are included in the SPS
  510. // see Recommendation ITU-T H.264 (4/2013),
  511. // 7.3.2.1.1 Sequence parameter set data syntax
  512. PROFILES_WITH_OPTIONAL_SPS_DATA = {
  513. 100: true,
  514. 110: true,
  515. 122: true,
  516. 244: true,
  517. 44: true,
  518. 83: true,
  519. 86: true,
  520. 118: true,
  521. 128: true,
  522. 138: true,
  523. 139: true,
  524. 134: true
  525. };
  526. /**
  527. * Accepts input from a ElementaryStream and produces H.264 NAL unit data
  528. * events.
  529. */
  530. H264Stream = function() {
  531. var
  532. nalByteStream = new NalByteStream(),
  533. self,
  534. trackId,
  535. currentPts,
  536. currentDts,
  537. discardEmulationPreventionBytes,
  538. readSequenceParameterSet,
  539. skipScalingList;
  540. H264Stream.prototype.init.call(this);
  541. self = this;
  542. this.push = function(packet) {
  543. if (packet.type !== 'video') {
  544. return;
  545. }
  546. trackId = packet.trackId;
  547. currentPts = packet.pts;
  548. currentDts = packet.dts;
  549. nalByteStream.push(packet);
  550. };
  551. nalByteStream.on('data', function(data) {
  552. var
  553. event = {
  554. trackId: trackId,
  555. pts: currentPts,
  556. dts: currentDts,
  557. data: data
  558. };
  559. switch (data[0] & 0x1f) {
  560. case 0x05:
  561. event.nalUnitType = 'slice_layer_without_partitioning_rbsp_idr';
  562. break;
  563. case 0x06:
  564. event.nalUnitType = 'sei_rbsp';
  565. event.escapedRBSP = discardEmulationPreventionBytes(data.subarray(1));
  566. break;
  567. case 0x07:
  568. event.nalUnitType = 'seq_parameter_set_rbsp';
  569. event.escapedRBSP = discardEmulationPreventionBytes(data.subarray(1));
  570. event.config = readSequenceParameterSet(event.escapedRBSP);
  571. break;
  572. case 0x08:
  573. event.nalUnitType = 'pic_parameter_set_rbsp';
  574. break;
  575. case 0x09:
  576. event.nalUnitType = 'access_unit_delimiter_rbsp';
  577. break;
  578. default:
  579. break;
  580. }
  581. self.trigger('data', event);
  582. });
  583. nalByteStream.on('done', function() {
  584. self.trigger('done');
  585. });
  586. this.flush = function() {
  587. nalByteStream.flush();
  588. };
  589. /**
  590. * Advance the ExpGolomb decoder past a scaling list. The scaling
  591. * list is optionally transmitted as part of a sequence parameter
  592. * set and is not relevant to transmuxing.
  593. * @param count {number} the number of entries in this scaling list
  594. * @param expGolombDecoder {object} an ExpGolomb pointed to the
  595. * start of a scaling list
  596. * @see Recommendation ITU-T H.264, Section 7.3.2.1.1.1
  597. */
  598. skipScalingList = function(count, expGolombDecoder) {
  599. var
  600. lastScale = 8,
  601. nextScale = 8,
  602. j,
  603. deltaScale;
  604. for (j = 0; j < count; j++) {
  605. if (nextScale !== 0) {
  606. deltaScale = expGolombDecoder.readExpGolomb();
  607. nextScale = (lastScale + deltaScale + 256) % 256;
  608. }
  609. lastScale = (nextScale === 0) ? lastScale : nextScale;
  610. }
  611. };
  612. /**
  613. * Expunge any "Emulation Prevention" bytes from a "Raw Byte
  614. * Sequence Payload"
  615. * @param data {Uint8Array} the bytes of a RBSP from a NAL
  616. * unit
  617. * @return {Uint8Array} the RBSP without any Emulation
  618. * Prevention Bytes
  619. */
  620. discardEmulationPreventionBytes = function(data) {
  621. var
  622. length = data.byteLength,
  623. emulationPreventionBytesPositions = [],
  624. i = 1,
  625. newLength, newData;
  626. // Find all `Emulation Prevention Bytes`
  627. while (i < length - 2) {
  628. if (data[i] === 0 && data[i + 1] === 0 && data[i + 2] === 0x03) {
  629. emulationPreventionBytesPositions.push(i + 2);
  630. i += 2;
  631. } else {
  632. i++;
  633. }
  634. }
  635. // If no Emulation Prevention Bytes were found just return the original
  636. // array
  637. if (emulationPreventionBytesPositions.length === 0) {
  638. return data;
  639. }
  640. // Create a new array to hold the NAL unit data
  641. newLength = length - emulationPreventionBytesPositions.length;
  642. newData = new Uint8Array(newLength);
  643. var sourceIndex = 0;
  644. for (i = 0; i < newLength; sourceIndex++, i++) {
  645. if (sourceIndex === emulationPreventionBytesPositions[0]) {
  646. // Skip this byte
  647. sourceIndex++;
  648. // Remove this position index
  649. emulationPreventionBytesPositions.shift();
  650. }
  651. newData[i] = data[sourceIndex];
  652. }
  653. return newData;
  654. };
  655. /**
  656. * Read a sequence parameter set and return some interesting video
  657. * properties. A sequence parameter set is the H264 metadata that
  658. * describes the properties of upcoming video frames.
  659. * @param data {Uint8Array} the bytes of a sequence parameter set
  660. * @return {object} an object with configuration parsed from the
  661. * sequence parameter set, including the dimensions of the
  662. * associated video frames.
  663. */
  664. readSequenceParameterSet = function(data) {
  665. var
  666. frameCropLeftOffset = 0,
  667. frameCropRightOffset = 0,
  668. frameCropTopOffset = 0,
  669. frameCropBottomOffset = 0,
  670. sarScale = 1,
  671. expGolombDecoder, profileIdc, levelIdc, profileCompatibility,
  672. chromaFormatIdc, picOrderCntType,
  673. numRefFramesInPicOrderCntCycle, picWidthInMbsMinus1,
  674. picHeightInMapUnitsMinus1,
  675. frameMbsOnlyFlag,
  676. scalingListCount,
  677. sarRatio,
  678. aspectRatioIdc,
  679. i;
  680. expGolombDecoder = new ExpGolomb(data);
  681. profileIdc = expGolombDecoder.readUnsignedByte(); // profile_idc
  682. profileCompatibility = expGolombDecoder.readUnsignedByte(); // constraint_set[0-5]_flag
  683. levelIdc = expGolombDecoder.readUnsignedByte(); // level_idc u(8)
  684. expGolombDecoder.skipUnsignedExpGolomb(); // seq_parameter_set_id
  685. // some profiles have more optional data we don't need
  686. if (PROFILES_WITH_OPTIONAL_SPS_DATA[profileIdc]) {
  687. chromaFormatIdc = expGolombDecoder.readUnsignedExpGolomb();
  688. if (chromaFormatIdc === 3) {
  689. expGolombDecoder.skipBits(1); // separate_colour_plane_flag
  690. }
  691. expGolombDecoder.skipUnsignedExpGolomb(); // bit_depth_luma_minus8
  692. expGolombDecoder.skipUnsignedExpGolomb(); // bit_depth_chroma_minus8
  693. expGolombDecoder.skipBits(1); // qpprime_y_zero_transform_bypass_flag
  694. if (expGolombDecoder.readBoolean()) { // seq_scaling_matrix_present_flag
  695. scalingListCount = (chromaFormatIdc !== 3) ? 8 : 12;
  696. for (i = 0; i < scalingListCount; i++) {
  697. if (expGolombDecoder.readBoolean()) { // seq_scaling_list_present_flag[ i ]
  698. if (i < 6) {
  699. skipScalingList(16, expGolombDecoder);
  700. } else {
  701. skipScalingList(64, expGolombDecoder);
  702. }
  703. }
  704. }
  705. }
  706. }
  707. expGolombDecoder.skipUnsignedExpGolomb(); // log2_max_frame_num_minus4
  708. picOrderCntType = expGolombDecoder.readUnsignedExpGolomb();
  709. if (picOrderCntType === 0) {
  710. expGolombDecoder.readUnsignedExpGolomb(); // log2_max_pic_order_cnt_lsb_minus4
  711. } else if (picOrderCntType === 1) {
  712. expGolombDecoder.skipBits(1); // delta_pic_order_always_zero_flag
  713. expGolombDecoder.skipExpGolomb(); // offset_for_non_ref_pic
  714. expGolombDecoder.skipExpGolomb(); // offset_for_top_to_bottom_field
  715. numRefFramesInPicOrderCntCycle = expGolombDecoder.readUnsignedExpGolomb();
  716. for (i = 0; i < numRefFramesInPicOrderCntCycle; i++) {
  717. expGolombDecoder.skipExpGolomb(); // offset_for_ref_frame[ i ]
  718. }
  719. }
  720. expGolombDecoder.skipUnsignedExpGolomb(); // max_num_ref_frames
  721. expGolombDecoder.skipBits(1); // gaps_in_frame_num_value_allowed_flag
  722. picWidthInMbsMinus1 = expGolombDecoder.readUnsignedExpGolomb();
  723. picHeightInMapUnitsMinus1 = expGolombDecoder.readUnsignedExpGolomb();
  724. frameMbsOnlyFlag = expGolombDecoder.readBits(1);
  725. if (frameMbsOnlyFlag === 0) {
  726. expGolombDecoder.skipBits(1); // mb_adaptive_frame_field_flag
  727. }
  728. expGolombDecoder.skipBits(1); // direct_8x8_inference_flag
  729. if (expGolombDecoder.readBoolean()) { // frame_cropping_flag
  730. frameCropLeftOffset = expGolombDecoder.readUnsignedExpGolomb();
  731. frameCropRightOffset = expGolombDecoder.readUnsignedExpGolomb();
  732. frameCropTopOffset = expGolombDecoder.readUnsignedExpGolomb();
  733. frameCropBottomOffset = expGolombDecoder.readUnsignedExpGolomb();
  734. }
  735. if (expGolombDecoder.readBoolean()) {
  736. // vui_parameters_present_flag
  737. if (expGolombDecoder.readBoolean()) {
  738. // aspect_ratio_info_present_flag
  739. aspectRatioIdc = expGolombDecoder.readUnsignedByte();
  740. switch (aspectRatioIdc) {
  741. case 1: sarRatio = [1, 1]; break;
  742. case 2: sarRatio = [12, 11]; break;
  743. case 3: sarRatio = [10, 11]; break;
  744. case 4: sarRatio = [16, 11]; break;
  745. case 5: sarRatio = [40, 33]; break;
  746. case 6: sarRatio = [24, 11]; break;
  747. case 7: sarRatio = [20, 11]; break;
  748. case 8: sarRatio = [32, 11]; break;
  749. case 9: sarRatio = [80, 33]; break;
  750. case 10: sarRatio = [18, 11]; break;
  751. case 11: sarRatio = [15, 11]; break;
  752. case 12: sarRatio = [64, 33]; break;
  753. case 13: sarRatio = [160, 99]; break;
  754. case 14: sarRatio = [4, 3]; break;
  755. case 15: sarRatio = [3, 2]; break;
  756. case 16: sarRatio = [2, 1]; break;
  757. case 255: {
  758. sarRatio = [expGolombDecoder.readUnsignedByte() << 8 |
  759. expGolombDecoder.readUnsignedByte(),
  760. expGolombDecoder.readUnsignedByte() << 8 |
  761. expGolombDecoder.readUnsignedByte() ];
  762. break;
  763. }
  764. }
  765. if (sarRatio) {
  766. sarScale = sarRatio[0] / sarRatio[1];
  767. }
  768. }
  769. }
  770. return {
  771. profileIdc: profileIdc,
  772. levelIdc: levelIdc,
  773. profileCompatibility: profileCompatibility,
  774. width: Math.ceil((((picWidthInMbsMinus1 + 1) * 16) - frameCropLeftOffset * 2 - frameCropRightOffset * 2) * sarScale),
  775. height: ((2 - frameMbsOnlyFlag) * (picHeightInMapUnitsMinus1 + 1) * 16) - (frameCropTopOffset * 2) - (frameCropBottomOffset * 2)
  776. };
  777. };
  778. };
  779. H264Stream.prototype = new Stream();
  780. module.exports = {
  781. H264Stream: H264Stream,
  782. NalByteStream: NalByteStream
  783. };
  784. },{"../utils/exp-golomb.js":32,"../utils/stream.js":33}],8:[function(require,module,exports){
  785. module.exports = {
  786. adts: require('./adts'),
  787. h264: require('./h264')
  788. };
  789. },{"./adts":6,"./h264":7}],9:[function(require,module,exports){
  790. var highPrefix = [33, 16, 5, 32, 164, 27];
  791. var lowPrefix = [33, 65, 108, 84, 1, 2, 4, 8, 168, 2, 4, 8, 17, 191, 252];
  792. var zeroFill = function(count) {
  793. var a = [];
  794. while (count--) {
  795. a.push(0);
  796. }
  797. return a;
  798. };
  799. var makeTable = function(metaTable) {
  800. return Object.keys(metaTable).reduce(function(obj, key) {
  801. obj[key] = new Uint8Array(metaTable[key].reduce(function(arr, part) {
  802. return arr.concat(part);
  803. }, []));
  804. return obj;
  805. }, {});
  806. };
  807. // Frames-of-silence to use for filling in missing AAC frames
  808. var coneOfSilence = {
  809. 96000: [highPrefix, [227, 64], zeroFill(154), [56]],
  810. 88200: [highPrefix, [231], zeroFill(170), [56]],
  811. 64000: [highPrefix, [248, 192], zeroFill(240), [56]],
  812. 48000: [highPrefix, [255, 192], zeroFill(268), [55, 148, 128], zeroFill(54), [112]],
  813. 44100: [highPrefix, [255, 192], zeroFill(268), [55, 163, 128], zeroFill(84), [112]],
  814. 32000: [highPrefix, [255, 192], zeroFill(268), [55, 234], zeroFill(226), [112]],
  815. 24000: [highPrefix, [255, 192], zeroFill(268), [55, 255, 128], zeroFill(268), [111, 112], zeroFill(126), [224]],
  816. 16000: [highPrefix, [255, 192], zeroFill(268), [55, 255, 128], zeroFill(268), [111, 255], zeroFill(269), [223, 108], zeroFill(195), [1, 192]],
  817. 12000: [lowPrefix, zeroFill(268), [3, 127, 248], zeroFill(268), [6, 255, 240], zeroFill(268), [13, 255, 224], zeroFill(268), [27, 253, 128], zeroFill(259), [56]],
  818. 11025: [lowPrefix, zeroFill(268), [3, 127, 248], zeroFill(268), [6, 255, 240], zeroFill(268), [13, 255, 224], zeroFill(268), [27, 255, 192], zeroFill(268), [55, 175, 128], zeroFill(108), [112]],
  819. 8000: [lowPrefix, zeroFill(268), [3, 121, 16], zeroFill(47), [7]]
  820. };
  821. module.exports = makeTable(coneOfSilence);
  822. },{}],10:[function(require,module,exports){
  823. 'use strict';
  824. var Stream = require('../utils/stream.js');
  825. /**
  826. * The final stage of the transmuxer that emits the flv tags
  827. * for audio, video, and metadata. Also tranlates in time and
  828. * outputs caption data and id3 cues.
  829. */
  830. var CoalesceStream = function(options) {
  831. // Number of Tracks per output segment
  832. // If greater than 1, we combine multiple
  833. // tracks into a single segment
  834. this.numberOfTracks = 0;
  835. this.metadataStream = options.metadataStream;
  836. this.videoTags = [];
  837. this.audioTags = [];
  838. this.videoTrack = null;
  839. this.audioTrack = null;
  840. this.pendingCaptions = [];
  841. this.pendingMetadata = [];
  842. this.pendingTracks = 0;
  843. this.processedTracks = 0;
  844. CoalesceStream.prototype.init.call(this);
  845. // Take output from multiple
  846. this.push = function(output) {
  847. // buffer incoming captions until the associated video segment
  848. // finishes
  849. if (output.text) {
  850. return this.pendingCaptions.push(output);
  851. }
  852. // buffer incoming id3 tags until the final flush
  853. if (output.frames) {
  854. return this.pendingMetadata.push(output);
  855. }
  856. if (output.track.type === 'video') {
  857. this.videoTrack = output.track;
  858. this.videoTags = output.tags;
  859. this.pendingTracks++;
  860. }
  861. if (output.track.type === 'audio') {
  862. this.audioTrack = output.track;
  863. this.audioTags = output.tags;
  864. this.pendingTracks++;
  865. }
  866. };
  867. };
  868. CoalesceStream.prototype = new Stream();
  869. CoalesceStream.prototype.flush = function(flushSource) {
  870. var
  871. id3,
  872. caption,
  873. i,
  874. timelineStartPts,
  875. event = {
  876. tags: {},
  877. captions: [],
  878. captionStreams: {},
  879. metadata: []
  880. };
  881. if (this.pendingTracks < this.numberOfTracks) {
  882. if (flushSource !== 'VideoSegmentStream' &&
  883. flushSource !== 'AudioSegmentStream') {
  884. // Return because we haven't received a flush from a data-generating
  885. // portion of the segment (meaning that we have only recieved meta-data
  886. // or captions.)
  887. return;
  888. } else if (this.pendingTracks === 0) {
  889. // In the case where we receive a flush without any data having been
  890. // received we consider it an emitted track for the purposes of coalescing
  891. // `done` events.
  892. // We do this for the case where there is an audio and video track in the
  893. // segment but no audio data. (seen in several playlists with alternate
  894. // audio tracks and no audio present in the main TS segments.)
  895. this.processedTracks++;
  896. if (this.processedTracks < this.numberOfTracks) {
  897. return;
  898. }
  899. }
  900. }
  901. this.processedTracks += this.pendingTracks;
  902. this.pendingTracks = 0;
  903. if (this.processedTracks < this.numberOfTracks) {
  904. return;
  905. }
  906. if (this.videoTrack) {
  907. timelineStartPts = this.videoTrack.timelineStartInfo.pts;
  908. } else if (this.audioTrack) {
  909. timelineStartPts = this.audioTrack.timelineStartInfo.pts;
  910. }
  911. event.tags.videoTags = this.videoTags;
  912. event.tags.audioTags = this.audioTags;
  913. // Translate caption PTS times into second offsets into the
  914. // video timeline for the segment, and add track info
  915. for (i = 0; i < this.pendingCaptions.length; i++) {
  916. caption = this.pendingCaptions[i];
  917. caption.startTime = caption.startPts - timelineStartPts;
  918. caption.startTime /= 90e3;
  919. caption.endTime = caption.endPts - timelineStartPts;
  920. caption.endTime /= 90e3;
  921. event.captionStreams[caption.stream] = true;
  922. event.captions.push(caption);
  923. }
  924. // Translate ID3 frame PTS times into second offsets into the
  925. // video timeline for the segment
  926. for (i = 0; i < this.pendingMetadata.length; i++) {
  927. id3 = this.pendingMetadata[i];
  928. id3.cueTime = id3.pts - timelineStartPts;
  929. id3.cueTime /= 90e3;
  930. event.metadata.push(id3);
  931. }
  932. // We add this to every single emitted segment even though we only need
  933. // it for the first
  934. event.metadata.dispatchType = this.metadataStream.dispatchType;
  935. // Reset stream state
  936. this.videoTrack = null;
  937. this.audioTrack = null;
  938. this.videoTags = [];
  939. this.audioTags = [];
  940. this.pendingCaptions.length = 0;
  941. this.pendingMetadata.length = 0;
  942. this.pendingTracks = 0;
  943. this.processedTracks = 0;
  944. // Emit the final segment
  945. this.trigger('data', event);
  946. this.trigger('done');
  947. };
  948. module.exports = CoalesceStream;
  949. },{"../utils/stream.js":33}],11:[function(require,module,exports){
  950. 'use strict';
  951. var FlvTag = require('./flv-tag.js');
  952. // For information on the FLV format, see
  953. // http://download.macromedia.com/f4v/video_file_format_spec_v10_1.pdf.
  954. // Technically, this function returns the header and a metadata FLV tag
  955. // if duration is greater than zero
  956. // duration in seconds
  957. // @return {object} the bytes of the FLV header as a Uint8Array
  958. var getFlvHeader = function(duration, audio, video) { // :ByteArray {
  959. var
  960. headBytes = new Uint8Array(3 + 1 + 1 + 4),
  961. head = new DataView(headBytes.buffer),
  962. metadata,
  963. result,
  964. metadataLength;
  965. // default arguments
  966. duration = duration || 0;
  967. audio = audio === undefined ? true : audio;
  968. video = video === undefined ? true : video;
  969. // signature
  970. head.setUint8(0, 0x46); // 'F'
  971. head.setUint8(1, 0x4c); // 'L'
  972. head.setUint8(2, 0x56); // 'V'
  973. // version
  974. head.setUint8(3, 0x01);
  975. // flags
  976. head.setUint8(4, (audio ? 0x04 : 0x00) | (video ? 0x01 : 0x00));
  977. // data offset, should be 9 for FLV v1
  978. head.setUint32(5, headBytes.byteLength);
  979. // init the first FLV tag
  980. if (duration <= 0) {
  981. // no duration available so just write the first field of the first
  982. // FLV tag
  983. result = new Uint8Array(headBytes.byteLength + 4);
  984. result.set(headBytes);
  985. result.set([0, 0, 0, 0], headBytes.byteLength);
  986. return result;
  987. }
  988. // write out the duration metadata tag
  989. metadata = new FlvTag(FlvTag.METADATA_TAG);
  990. metadata.pts = metadata.dts = 0;
  991. metadata.writeMetaDataDouble('duration', duration);
  992. metadataLength = metadata.finalize().length;
  993. result = new Uint8Array(headBytes.byteLength + metadataLength);
  994. result.set(headBytes);
  995. result.set(head.byteLength, metadataLength);
  996. return result;
  997. };
  998. module.exports = getFlvHeader;
  999. },{"./flv-tag.js":12}],12:[function(require,module,exports){
  1000. /**
  1001. * An object that stores the bytes of an FLV tag and methods for
  1002. * querying and manipulating that data.
  1003. * @see http://download.macromedia.com/f4v/video_file_format_spec_v10_1.pdf
  1004. */
  1005. 'use strict';
  1006. var FlvTag;
  1007. // (type:uint, extraData:Boolean = false) extends ByteArray
  1008. FlvTag = function(type, extraData) {
  1009. var
  1010. // Counter if this is a metadata tag, nal start marker if this is a video
  1011. // tag. unused if this is an audio tag
  1012. adHoc = 0, // :uint
  1013. // The default size is 16kb but this is not enough to hold iframe
  1014. // data and the resizing algorithm costs a bit so we create a larger
  1015. // starting buffer for video tags
  1016. bufferStartSize = 16384,
  1017. // checks whether the FLV tag has enough capacity to accept the proposed
  1018. // write and re-allocates the internal buffers if necessary
  1019. prepareWrite = function(flv, count) {
  1020. var
  1021. bytes,
  1022. minLength = flv.position + count;
  1023. if (minLength < flv.bytes.byteLength) {
  1024. // there's enough capacity so do nothing
  1025. return;
  1026. }
  1027. // allocate a new buffer and copy over the data that will not be modified
  1028. bytes = new Uint8Array(minLength * 2);
  1029. bytes.set(flv.bytes.subarray(0, flv.position), 0);
  1030. flv.bytes = bytes;
  1031. flv.view = new DataView(flv.bytes.buffer);
  1032. },
  1033. // commonly used metadata properties
  1034. widthBytes = FlvTag.widthBytes || new Uint8Array('width'.length),
  1035. heightBytes = FlvTag.heightBytes || new Uint8Array('height'.length),
  1036. videocodecidBytes = FlvTag.videocodecidBytes || new Uint8Array('videocodecid'.length),
  1037. i;
  1038. if (!FlvTag.widthBytes) {
  1039. // calculating the bytes of common metadata names ahead of time makes the
  1040. // corresponding writes faster because we don't have to loop over the
  1041. // characters
  1042. // re-test with test/perf.html if you're planning on changing this
  1043. for (i = 0; i < 'width'.length; i++) {
  1044. widthBytes[i] = 'width'.charCodeAt(i);
  1045. }
  1046. for (i = 0; i < 'height'.length; i++) {
  1047. heightBytes[i] = 'height'.charCodeAt(i);
  1048. }
  1049. for (i = 0; i < 'videocodecid'.length; i++) {
  1050. videocodecidBytes[i] = 'videocodecid'.charCodeAt(i);
  1051. }
  1052. FlvTag.widthBytes = widthBytes;
  1053. FlvTag.heightBytes = heightBytes;
  1054. FlvTag.videocodecidBytes = videocodecidBytes;
  1055. }
  1056. this.keyFrame = false; // :Boolean
  1057. switch (type) {
  1058. case FlvTag.VIDEO_TAG:
  1059. this.length = 16;
  1060. // Start the buffer at 256k
  1061. bufferStartSize *= 6;
  1062. break;
  1063. case FlvTag.AUDIO_TAG:
  1064. this.length = 13;
  1065. this.keyFrame = true;
  1066. break;
  1067. case FlvTag.METADATA_TAG:
  1068. this.length = 29;
  1069. this.keyFrame = true;
  1070. break;
  1071. default:
  1072. throw new Error('Unknown FLV tag type');
  1073. }
  1074. this.bytes = new Uint8Array(bufferStartSize);
  1075. this.view = new DataView(this.bytes.buffer);
  1076. this.bytes[0] = type;
  1077. this.position = this.length;
  1078. this.keyFrame = extraData; // Defaults to false
  1079. // presentation timestamp
  1080. this.pts = 0;
  1081. // decoder timestamp
  1082. this.dts = 0;
  1083. // ByteArray#writeBytes(bytes:ByteArray, offset:uint = 0, length:uint = 0)
  1084. this.writeBytes = function(bytes, offset, length) {
  1085. var
  1086. start = offset || 0,
  1087. end;
  1088. length = length || bytes.byteLength;
  1089. end = start + length;
  1090. prepareWrite(this, length);
  1091. this.bytes.set(bytes.subarray(start, end), this.position);
  1092. this.position += length;
  1093. this.length = Math.max(this.length, this.position);
  1094. };
  1095. // ByteArray#writeByte(value:int):void
  1096. this.writeByte = function(byte) {
  1097. prepareWrite(this, 1);
  1098. this.bytes[this.position] = byte;
  1099. this.position++;
  1100. this.length = Math.max(this.length, this.position);
  1101. };
  1102. // ByteArray#writeShort(value:int):void
  1103. this.writeShort = function(short) {
  1104. prepareWrite(this, 2);
  1105. this.view.setUint16(this.position, short);
  1106. this.position += 2;
  1107. this.length = Math.max(this.length, this.position);
  1108. };
  1109. // Negative index into array
  1110. // (pos:uint):int
  1111. this.negIndex = function(pos) {
  1112. return this.bytes[this.length - pos];
  1113. };
  1114. // The functions below ONLY work when this[0] == VIDEO_TAG.
  1115. // We are not going to check for that because we dont want the overhead
  1116. // (nal:ByteArray = null):int
  1117. this.nalUnitSize = function() {
  1118. if (adHoc === 0) {
  1119. return 0;
  1120. }
  1121. return this.length - (adHoc + 4);
  1122. };
  1123. this.startNalUnit = function() {
  1124. // remember position and add 4 bytes
  1125. if (adHoc > 0) {
  1126. throw new Error('Attempted to create new NAL wihout closing the old one');
  1127. }
  1128. // reserve 4 bytes for nal unit size
  1129. adHoc = this.length;
  1130. this.length += 4;
  1131. this.position = this.length;
  1132. };
  1133. // (nal:ByteArray = null):void
  1134. this.endNalUnit = function(nalContainer) {
  1135. var
  1136. nalStart, // :uint
  1137. nalLength; // :uint
  1138. // Rewind to the marker and write the size
  1139. if (this.length === adHoc + 4) {
  1140. // we started a nal unit, but didnt write one, so roll back the 4 byte size value
  1141. this.length -= 4;
  1142. } else if (adHoc > 0) {
  1143. nalStart = adHoc + 4;
  1144. nalLength = this.length - nalStart;
  1145. this.position = adHoc;
  1146. this.view.setUint32(this.position, nalLength);
  1147. this.position = this.length;
  1148. if (nalContainer) {
  1149. // Add the tag to the NAL unit
  1150. nalContainer.push(this.bytes.subarray(nalStart, nalStart + nalLength));
  1151. }
  1152. }
  1153. adHoc = 0;
  1154. };
  1155. /**
  1156. * Write out a 64-bit floating point valued metadata property. This method is
  1157. * called frequently during a typical parse and needs to be fast.
  1158. */
  1159. // (key:String, val:Number):void
  1160. this.writeMetaDataDouble = function(key, val) {
  1161. var i;
  1162. prepareWrite(this, 2 + key.length + 9);
  1163. // write size of property name
  1164. this.view.setUint16(this.position, key.length);
  1165. this.position += 2;
  1166. // this next part looks terrible but it improves parser throughput by
  1167. // 10kB/s in my testing
  1168. // write property name
  1169. if (key === 'width') {
  1170. this.bytes.set(widthBytes, this.position);
  1171. this.position += 5;
  1172. } else if (key === 'height') {
  1173. this.bytes.set(heightBytes, this.position);
  1174. this.position += 6;
  1175. } else if (key === 'videocodecid') {
  1176. this.bytes.set(videocodecidBytes, this.position);
  1177. this.position += 12;
  1178. } else {
  1179. for (i = 0; i < key.length; i++) {
  1180. this.bytes[this.position] = key.charCodeAt(i);
  1181. this.position++;
  1182. }
  1183. }
  1184. // skip null byte
  1185. this.position++;
  1186. // write property value
  1187. this.view.setFloat64(this.position, val);
  1188. this.position += 8;
  1189. // update flv tag length
  1190. this.length = Math.max(this.length, this.position);
  1191. ++adHoc;
  1192. };
  1193. // (key:String, val:Boolean):void
  1194. this.writeMetaDataBoolean = function(key, val) {
  1195. var i;
  1196. prepareWrite(this, 2);
  1197. this.view.setUint16(this.position, key.length);
  1198. this.position += 2;
  1199. for (i = 0; i < key.length; i++) {
  1200. // if key.charCodeAt(i) >= 255, handle error
  1201. prepareWrite(this, 1);
  1202. this.bytes[this.position] = key.charCodeAt(i);
  1203. this.position++;
  1204. }
  1205. prepareWrite(this, 2);
  1206. this.view.setUint8(this.position, 0x01);
  1207. this.position++;
  1208. this.view.setUint8(this.position, val ? 0x01 : 0x00);
  1209. this.position++;
  1210. this.length = Math.max(this.length, this.position);
  1211. ++adHoc;
  1212. };
  1213. // ():ByteArray
  1214. this.finalize = function() {
  1215. var
  1216. dtsDelta, // :int
  1217. len; // :int
  1218. switch (this.bytes[0]) {
  1219. // Video Data
  1220. case FlvTag.VIDEO_TAG:
  1221. // We only support AVC, 1 = key frame (for AVC, a seekable
  1222. // frame), 2 = inter frame (for AVC, a non-seekable frame)
  1223. this.bytes[11] = ((this.keyFrame || extraData) ? 0x10 : 0x20) | 0x07;
  1224. this.bytes[12] = extraData ? 0x00 : 0x01;
  1225. dtsDelta = this.pts - this.dts;
  1226. this.bytes[13] = (dtsDelta & 0x00FF0000) >>> 16;
  1227. this.bytes[14] = (dtsDelta & 0x0000FF00) >>> 8;
  1228. this.bytes[15] = (dtsDelta & 0x000000FF) >>> 0;
  1229. break;
  1230. case FlvTag.AUDIO_TAG:
  1231. this.bytes[11] = 0xAF; // 44 kHz, 16-bit stereo
  1232. this.bytes[12] = extraData ? 0x00 : 0x01;
  1233. break;
  1234. case FlvTag.METADATA_TAG:
  1235. this.position = 11;
  1236. this.view.setUint8(this.position, 0x02); // String type
  1237. this.position++;
  1238. this.view.setUint16(this.position, 0x0A); // 10 Bytes
  1239. this.position += 2;
  1240. // set "onMetaData"
  1241. this.bytes.set([0x6f, 0x6e, 0x4d, 0x65,
  1242. 0x74, 0x61, 0x44, 0x61,
  1243. 0x74, 0x61], this.position);
  1244. this.position += 10;
  1245. this.bytes[this.position] = 0x08; // Array type
  1246. this.position++;
  1247. this.view.setUint32(this.position, adHoc);
  1248. this.position = this.length;
  1249. this.bytes.set([0, 0, 9], this.position);
  1250. this.position += 3; // End Data Tag
  1251. this.length = this.position;
  1252. break;
  1253. }
  1254. len = this.length - 11;
  1255. // write the DataSize field
  1256. this.bytes[ 1] = (len & 0x00FF0000) >>> 16;
  1257. this.bytes[ 2] = (len & 0x0000FF00) >>> 8;
  1258. this.bytes[ 3] = (len & 0x000000FF) >>> 0;
  1259. // write the Timestamp
  1260. this.bytes[ 4] = (this.dts & 0x00FF0000) >>> 16;
  1261. this.bytes[ 5] = (this.dts & 0x0000FF00) >>> 8;
  1262. this.bytes[ 6] = (this.dts & 0x000000FF) >>> 0;
  1263. this.bytes[ 7] = (this.dts & 0xFF000000) >>> 24;
  1264. // write the StreamID
  1265. this.bytes[ 8] = 0;
  1266. this.bytes[ 9] = 0;
  1267. this.bytes[10] = 0;
  1268. // Sometimes we're at the end of the view and have one slot to write a
  1269. // uint32, so, prepareWrite of count 4, since, view is uint8
  1270. prepareWrite(this, 4);
  1271. this.view.setUint32(this.length, this.length);
  1272. this.length += 4;
  1273. this.position += 4;
  1274. // trim down the byte buffer to what is actually being used
  1275. this.bytes = this.bytes.subarray(0, this.length);
  1276. this.frameTime = FlvTag.frameTime(this.bytes);
  1277. // if bytes.bytelength isn't equal to this.length, handle error
  1278. return this;
  1279. };
  1280. };
  1281. FlvTag.AUDIO_TAG = 0x08; // == 8, :uint
  1282. FlvTag.VIDEO_TAG = 0x09; // == 9, :uint
  1283. FlvTag.METADATA_TAG = 0x12; // == 18, :uint
  1284. // (tag:ByteArray):Boolean {
  1285. FlvTag.isAudioFrame = function(tag) {
  1286. return FlvTag.AUDIO_TAG === tag[0];
  1287. };
  1288. // (tag:ByteArray):Boolean {
  1289. FlvTag.isVideoFrame = function(tag) {
  1290. return FlvTag.VIDEO_TAG === tag[0];
  1291. };
  1292. // (tag:ByteArray):Boolean {
  1293. FlvTag.isMetaData = function(tag) {
  1294. return FlvTag.METADATA_TAG === tag[0];
  1295. };
  1296. // (tag:ByteArray):Boolean {
  1297. FlvTag.isKeyFrame = function(tag) {
  1298. if (FlvTag.isVideoFrame(tag)) {
  1299. return tag[11] === 0x17;
  1300. }
  1301. if (FlvTag.isAudioFrame(tag)) {
  1302. return true;
  1303. }
  1304. if (FlvTag.isMetaData(tag)) {
  1305. return true;
  1306. }
  1307. return false;
  1308. };
  1309. // (tag:ByteArray):uint {
  1310. FlvTag.frameTime = function(tag) {
  1311. var pts = tag[ 4] << 16; // :uint
  1312. pts |= tag[ 5] << 8;
  1313. pts |= tag[ 6] << 0;
  1314. pts |= tag[ 7] << 24;
  1315. return pts;
  1316. };
  1317. module.exports = FlvTag;
  1318. },{}],13:[function(require,module,exports){
  1319. module.exports = {
  1320. tag: require('./flv-tag'),
  1321. Transmuxer: require('./transmuxer'),
  1322. getFlvHeader: require('./flv-header')
  1323. };
  1324. },{"./flv-header":11,"./flv-tag":12,"./transmuxer":15}],14:[function(require,module,exports){
  1325. 'use strict';
  1326. var TagList = function() {
  1327. var self = this;
  1328. this.list = [];
  1329. this.push = function(tag) {
  1330. this.list.push({
  1331. bytes: tag.bytes,
  1332. dts: tag.dts,
  1333. pts: tag.pts,
  1334. keyFrame: tag.keyFrame,
  1335. metaDataTag: tag.metaDataTag
  1336. });
  1337. };
  1338. Object.defineProperty(this, 'length', {
  1339. get: function() {
  1340. return self.list.length;
  1341. }
  1342. });
  1343. };
  1344. module.exports = TagList;
  1345. },{}],15:[function(require,module,exports){
  1346. 'use strict';
  1347. var Stream = require('../utils/stream.js');
  1348. var FlvTag = require('./flv-tag.js');
  1349. var m2ts = require('../m2ts/m2ts.js');
  1350. var AdtsStream = require('../codecs/adts.js');
  1351. var H264Stream = require('../codecs/h264').H264Stream;
  1352. var CoalesceStream = require('./coalesce-stream.js');
  1353. var TagList = require('./tag-list.js');
  1354. var
  1355. Transmuxer,
  1356. VideoSegmentStream,
  1357. AudioSegmentStream,
  1358. collectTimelineInfo,
  1359. metaDataTag,
  1360. extraDataTag;
  1361. /**
  1362. * Store information about the start and end of the tracka and the
  1363. * duration for each frame/sample we process in order to calculate
  1364. * the baseMediaDecodeTime
  1365. */
  1366. collectTimelineInfo = function(track, data) {
  1367. if (typeof data.pts === 'number') {
  1368. if (track.timelineStartInfo.pts === undefined) {
  1369. track.timelineStartInfo.pts = data.pts;
  1370. } else {
  1371. track.timelineStartInfo.pts =
  1372. Math.min(track.timelineStartInfo.pts, data.pts);
  1373. }
  1374. }
  1375. if (typeof data.dts === 'number') {
  1376. if (track.timelineStartInfo.dts === undefined) {
  1377. track.timelineStartInfo.dts = data.dts;
  1378. } else {
  1379. track.timelineStartInfo.dts =
  1380. Math.min(track.timelineStartInfo.dts, data.dts);
  1381. }
  1382. }
  1383. };
  1384. metaDataTag = function(track, pts) {
  1385. var
  1386. tag = new FlvTag(FlvTag.METADATA_TAG); // :FlvTag
  1387. tag.dts = pts;
  1388. tag.pts = pts;
  1389. tag.writeMetaDataDouble('videocodecid', 7);
  1390. tag.writeMetaDataDouble('width', track.width);
  1391. tag.writeMetaDataDouble('height', track.height);
  1392. return tag;
  1393. };
  1394. extraDataTag = function(track, pts) {
  1395. var
  1396. i,
  1397. tag = new FlvTag(FlvTag.VIDEO_TAG, true);
  1398. tag.dts = pts;
  1399. tag.pts = pts;
  1400. tag.writeByte(0x01);// version
  1401. tag.writeByte(track.profileIdc);// profile
  1402. tag.writeByte(track.profileCompatibility);// compatibility
  1403. tag.writeByte(track.levelIdc);// level
  1404. tag.writeByte(0xFC | 0x03); // reserved (6 bits), NULA length size - 1 (2 bits)
  1405. tag.writeByte(0xE0 | 0x01); // reserved (3 bits), num of SPS (5 bits)
  1406. tag.writeShort(track.sps[0].length); // data of SPS
  1407. tag.writeBytes(track.sps[0]); // SPS
  1408. tag.writeByte(track.pps.length); // num of PPS (will there ever be more that 1 PPS?)
  1409. for (i = 0; i < track.pps.length; ++i) {
  1410. tag.writeShort(track.pps[i].length); // 2 bytes for length of PPS
  1411. tag.writeBytes(track.pps[i]); // data of PPS
  1412. }
  1413. return tag;
  1414. };
  1415. /**
  1416. * Constructs a single-track, media segment from AAC data
  1417. * events. The output of this stream can be fed to flash.
  1418. */
  1419. AudioSegmentStream = function(track) {
  1420. var
  1421. adtsFrames = [],
  1422. videoKeyFrames = [],
  1423. oldExtraData;
  1424. AudioSegmentStream.prototype.init.call(this);
  1425. this.push = function(data) {
  1426. collectTimelineInfo(track, data);
  1427. if (track) {
  1428. track.audioobjecttype = data.audioobjecttype;
  1429. track.channelcount = data.channelcount;
  1430. track.samplerate = data.samplerate;
  1431. track.samplingfrequencyindex = data.samplingfrequencyindex;
  1432. track.samplesize = data.samplesize;
  1433. track.extraData = (track.audioobjecttype << 11) |
  1434. (track.samplingfrequencyindex << 7) |
  1435. (track.channelcount << 3);
  1436. }
  1437. data.pts = Math.round(data.pts / 90);
  1438. data.dts = Math.round(data.dts / 90);
  1439. // buffer audio data until end() is called
  1440. adtsFrames.push(data);
  1441. };
  1442. this.flush = function() {
  1443. var currentFrame, adtsFrame, lastMetaPts, tags = new TagList();
  1444. // return early if no audio data has been observed
  1445. if (adtsFrames.length === 0) {
  1446. this.trigger('done', 'AudioSegmentStream');
  1447. return;
  1448. }
  1449. lastMetaPts = -Infinity;
  1450. while (adtsFrames.length) {
  1451. currentFrame = adtsFrames.shift();
  1452. // write out a metadata frame at every video key frame
  1453. if (videoKeyFrames.length && currentFrame.pts >= videoKeyFrames[0]) {
  1454. lastMetaPts = videoKeyFrames.shift();
  1455. this.writeMetaDataTags(tags, lastMetaPts);
  1456. }
  1457. // also write out metadata tags every 1 second so that the decoder
  1458. // is re-initialized quickly after seeking into a different
  1459. // audio configuration.
  1460. if (track.extraData !== oldExtraData || currentFrame.pts - lastMetaPts >= 1000) {
  1461. this.writeMetaDataTags(tags, currentFrame.pts);
  1462. oldExtraData = track.extraData;
  1463. lastMetaPts = currentFrame.pts;
  1464. }
  1465. adtsFrame = new FlvTag(FlvTag.AUDIO_TAG);
  1466. adtsFrame.pts = currentFrame.pts;
  1467. adtsFrame.dts = currentFrame.dts;
  1468. adtsFrame.writeBytes(currentFrame.data);
  1469. tags.push(adtsFrame.finalize());
  1470. }
  1471. videoKeyFrames.length = 0;
  1472. oldExtraData = null;
  1473. this.trigger('data', {track: track, tags: tags.list});
  1474. this.trigger('done', 'AudioSegmentStream');
  1475. };
  1476. this.writeMetaDataTags = function(tags, pts) {
  1477. var adtsFrame;
  1478. adtsFrame = new FlvTag(FlvTag.METADATA_TAG);
  1479. // For audio, DTS is always the same as PTS. We want to set the DTS
  1480. // however so we can compare with video DTS to determine approximate
  1481. // packet order
  1482. adtsFrame.pts = pts;
  1483. adtsFrame.dts = pts;
  1484. // AAC is always 10
  1485. adtsFrame.writeMetaDataDouble('audiocodecid', 10);
  1486. adtsFrame.writeMetaDataBoolean('stereo', track.channelcount === 2);
  1487. adtsFrame.writeMetaDataDouble('audiosamplerate', track.samplerate);
  1488. // Is AAC always 16 bit?
  1489. adtsFrame.writeMetaDataDouble('audiosamplesize', 16);
  1490. tags.push(adtsFrame.finalize());
  1491. adtsFrame = new FlvTag(FlvTag.AUDIO_TAG, true);
  1492. // For audio, DTS is always the same as PTS. We want to set the DTS
  1493. // however so we can compare with video DTS to determine approximate
  1494. // packet order
  1495. adtsFrame.pts = pts;
  1496. adtsFrame.dts = pts;
  1497. adtsFrame.view.setUint16(adtsFrame.position, track.extraData);
  1498. adtsFrame.position += 2;
  1499. adtsFrame.length = Math.max(adtsFrame.length, adtsFrame.position);
  1500. tags.push(adtsFrame.finalize());
  1501. };
  1502. this.onVideoKeyFrame = function(pts) {
  1503. videoKeyFrames.push(pts);
  1504. };
  1505. };
  1506. AudioSegmentStream.prototype = new Stream();
  1507. /**
  1508. * Store FlvTags for the h264 stream
  1509. * @param track {object} track metadata configuration
  1510. */
  1511. VideoSegmentStream = function(track) {
  1512. var
  1513. nalUnits = [],
  1514. config,
  1515. h264Frame;
  1516. VideoSegmentStream.prototype.init.call(this);
  1517. this.finishFrame = function(tags, frame) {
  1518. if (!frame) {
  1519. return;
  1520. }
  1521. // Check if keyframe and the length of tags.
  1522. // This makes sure we write metadata on the first frame of a segment.
  1523. if (config && track && track.newMetadata &&
  1524. (frame.keyFrame || tags.length === 0)) {
  1525. // Push extra data on every IDR frame in case we did a stream change + seek
  1526. var metaTag = metaDataTag(config, frame.dts).finalize();
  1527. var extraTag = extraDataTag(track, frame.dts).finalize();
  1528. metaTag.metaDataTag = extraTag.metaDataTag = true;
  1529. tags.push(metaTag);
  1530. tags.push(extraTag);
  1531. track.newMetadata = false;
  1532. this.trigger('keyframe', frame.dts);
  1533. }
  1534. frame.endNalUnit();
  1535. tags.push(frame.finalize());
  1536. h264Frame = null;
  1537. };
  1538. this.push = function(data) {
  1539. collectTimelineInfo(track, data);
  1540. data.pts = Math.round(data.pts / 90);
  1541. data.dts = Math.round(data.dts / 90);
  1542. // buffer video until flush() is called
  1543. nalUnits.push(data);
  1544. };
  1545. this.flush = function() {
  1546. var
  1547. currentNal,
  1548. tags = new TagList();
  1549. // Throw away nalUnits at the start of the byte stream until we find
  1550. // the first AUD
  1551. while (nalUnits.length) {
  1552. if (nalUnits[0].nalUnitType === 'access_unit_delimiter_rbsp') {
  1553. break;
  1554. }
  1555. nalUnits.shift();
  1556. }
  1557. // return early if no video data has been observed
  1558. if (nalUnits.length === 0) {
  1559. this.trigger('done', 'VideoSegmentStream');
  1560. return;
  1561. }
  1562. while (nalUnits.length) {
  1563. currentNal = nalUnits.shift();
  1564. // record the track config
  1565. if (currentNal.nalUnitType === 'seq_parameter_set_rbsp') {
  1566. track.newMetadata = true;
  1567. config = currentNal.config;
  1568. track.width = config.width;
  1569. track.height = config.height;
  1570. track.sps = [currentNal.data];
  1571. track.profileIdc = config.profileIdc;
  1572. track.levelIdc = config.levelIdc;
  1573. track.profileCompatibility = config.profileCompatibility;
  1574. h264Frame.endNalUnit();
  1575. } else if (currentNal.nalUnitType === 'pic_parameter_set_rbsp') {
  1576. track.newMetadata = true;
  1577. track.pps = [currentNal.data];
  1578. h264Frame.endNalUnit();
  1579. } else if (currentNal.nalUnitType === 'access_unit_delimiter_rbsp') {
  1580. if (h264Frame) {
  1581. this.finishFrame(tags, h264Frame);
  1582. }
  1583. h264Frame = new FlvTag(FlvTag.VIDEO_TAG);
  1584. h264Frame.pts = currentNal.pts;
  1585. h264Frame.dts = currentNal.dts;
  1586. } else {
  1587. if (currentNal.nalUnitType === 'slice_layer_without_partitioning_rbsp_idr') {
  1588. // the current sample is a key frame
  1589. h264Frame.keyFrame = true;
  1590. }
  1591. h264Frame.endNalUnit();
  1592. }
  1593. h264Frame.startNalUnit();
  1594. h264Frame.writeBytes(currentNal.data);
  1595. }
  1596. if (h264Frame) {
  1597. this.finishFrame(tags, h264Frame);
  1598. }
  1599. this.trigger('data', {track: track, tags: tags.list});
  1600. // Continue with the flush process now
  1601. this.trigger('done', 'VideoSegmentStream');
  1602. };
  1603. };
  1604. VideoSegmentStream.prototype = new Stream();
  1605. /**
  1606. * An object that incrementally transmuxes MPEG2 Trasport Stream
  1607. * chunks into an FLV.
  1608. */
  1609. Transmuxer = function(options) {
  1610. var
  1611. self = this,
  1612. packetStream, parseStream, elementaryStream,
  1613. videoTimestampRolloverStream, audioTimestampRolloverStream,
  1614. timedMetadataTimestampRolloverStream,
  1615. adtsStream, h264Stream,
  1616. videoSegmentStream, audioSegmentStream, captionStream,
  1617. coalesceStream;
  1618. Transmuxer.prototype.init.call(this);
  1619. options = options || {};
  1620. // expose the metadata stream
  1621. this.metadataStream = new m2ts.MetadataStream();
  1622. options.metadataStream = this.metadataStream;
  1623. // set up the parsing pipeline
  1624. packetStream = new m2ts.TransportPacketStream();
  1625. parseStream = new m2ts.TransportParseStream();
  1626. elementaryStream = new m2ts.ElementaryStream();
  1627. videoTimestampRolloverStream = new m2ts.TimestampRolloverStream('video');
  1628. audioTimestampRolloverStream = new m2ts.TimestampRolloverStream('audio');
  1629. timedMetadataTimestampRolloverStream = new m2ts.TimestampRolloverStream('timed-metadata');
  1630. adtsStream = new AdtsStream();
  1631. h264Stream = new H264Stream();
  1632. coalesceStream = new CoalesceStream(options);
  1633. // disassemble MPEG2-TS packets into elementary streams
  1634. packetStream
  1635. .pipe(parseStream)
  1636. .pipe(elementaryStream);
  1637. // !!THIS ORDER IS IMPORTANT!!
  1638. // demux the streams
  1639. elementaryStream
  1640. .pipe(videoTimestampRolloverStream)
  1641. .pipe(h264Stream);
  1642. elementaryStream
  1643. .pipe(audioTimestampRolloverStream)
  1644. .pipe(adtsStream);
  1645. elementaryStream
  1646. .pipe(timedMetadataTimestampRolloverStream)
  1647. .pipe(this.metadataStream)
  1648. .pipe(coalesceStream);
  1649. // if CEA-708 parsing is available, hook up a caption stream
  1650. captionStream = new m2ts.CaptionStream();
  1651. h264Stream.pipe(captionStream)
  1652. .pipe(coalesceStream);
  1653. // hook up the segment streams once track metadata is delivered
  1654. elementaryStream.on('data', function(data) {
  1655. var i, videoTrack, audioTrack;
  1656. if (data.type === 'metadata') {
  1657. i = data.tracks.length;
  1658. // scan the tracks listed in the metadata
  1659. while (i--) {
  1660. if (data.tracks[i].type === 'video') {
  1661. videoTrack = data.tracks[i];
  1662. } else if (data.tracks[i].type === 'audio') {
  1663. audioTrack = data.tracks[i];
  1664. }
  1665. }
  1666. // hook up the video segment stream to the first track with h264 data
  1667. if (videoTrack && !videoSegmentStream) {
  1668. coalesceStream.numberOfTracks++;
  1669. videoSegmentStream = new VideoSegmentStream(videoTrack);
  1670. // Set up the final part of the video pipeline
  1671. h264Stream
  1672. .pipe(videoSegmentStream)
  1673. .pipe(coalesceStream);
  1674. }
  1675. if (audioTrack && !audioSegmentStream) {
  1676. // hook up the audio segment stream to the first track with aac data
  1677. coalesceStream.numberOfTracks++;
  1678. audioSegmentStream = new AudioSegmentStream(audioTrack);
  1679. // Set up the final part of the audio pipeline
  1680. adtsStream
  1681. .pipe(audioSegmentStream)
  1682. .pipe(coalesceStream);
  1683. if (videoSegmentStream) {
  1684. videoSegmentStream.on('keyframe', audioSegmentStream.onVideoKeyFrame);
  1685. }
  1686. }
  1687. }
  1688. });
  1689. // feed incoming data to the front of the parsing pipeline
  1690. this.push = function(data) {
  1691. packetStream.push(data);
  1692. };
  1693. // flush any buffered data
  1694. this.flush = function() {
  1695. // Start at the top of the pipeline and flush all pending work
  1696. packetStream.flush();
  1697. };
  1698. // Caption data has to be reset when seeking outside buffered range
  1699. this.resetCaptions = function() {
  1700. captionStream.reset();
  1701. };
  1702. // Re-emit any data coming from the coalesce stream to the outside world
  1703. coalesceStream.on('data', function(event) {
  1704. self.trigger('data', event);
  1705. });
  1706. // Let the consumer know we have finished flushing the entire pipeline
  1707. coalesceStream.on('done', function() {
  1708. self.trigger('done');
  1709. });
  1710. };
  1711. Transmuxer.prototype = new Stream();
  1712. // forward compatibility
  1713. module.exports = Transmuxer;
  1714. },{"../codecs/adts.js":6,"../codecs/h264":7,"../m2ts/m2ts.js":19,"../utils/stream.js":33,"./coalesce-stream.js":10,"./flv-tag.js":12,"./tag-list.js":14}],16:[function(require,module,exports){
  1715. 'use strict';
  1716. var muxjs = {
  1717. codecs: require('./codecs'),
  1718. mp4: require('./mp4'),
  1719. flv: require('./flv'),
  1720. mp2t: require('./m2ts')
  1721. };
  1722. // include all the tools when the full library is required
  1723. muxjs.mp4.tools = require('./tools/mp4-inspector');
  1724. muxjs.flv.tools = require('./tools/flv-inspector');
  1725. muxjs.mp2t.tools = require('./tools/ts-inspector');
  1726. module.exports = muxjs;
  1727. },{"./codecs":8,"./flv":13,"./m2ts":18,"./mp4":24,"./tools/flv-inspector":28,"./tools/mp4-inspector":29,"./tools/ts-inspector":30}],17:[function(require,module,exports){
  1728. /**
  1729. * mux.js
  1730. *
  1731. * Copyright (c) 2015 Brightcove
  1732. * All rights reserved.
  1733. *
  1734. * Reads in-band caption information from a video elementary
  1735. * stream. Captions must follow the CEA-708 standard for injection
  1736. * into an MPEG-2 transport streams.
  1737. * @see https://en.wikipedia.org/wiki/CEA-708
  1738. * @see https://www.gpo.gov/fdsys/pkg/CFR-2007-title47-vol1/pdf/CFR-2007-title47-vol1-sec15-119.pdf
  1739. */
  1740. 'use strict';
  1741. // -----------------
  1742. // Link To Transport
  1743. // -----------------
  1744. // Supplemental enhancement information (SEI) NAL units have a
  1745. // payload type field to indicate how they are to be
  1746. // interpreted. CEAS-708 caption content is always transmitted with
  1747. // payload type 0x04.
  1748. var USER_DATA_REGISTERED_ITU_T_T35 = 4,
  1749. RBSP_TRAILING_BITS = 128,
  1750. Stream = require('../utils/stream');
  1751. /**
  1752. * Parse a supplemental enhancement information (SEI) NAL unit.
  1753. * Stops parsing once a message of type ITU T T35 has been found.
  1754. *
  1755. * @param bytes {Uint8Array} the bytes of a SEI NAL unit
  1756. * @return {object} the parsed SEI payload
  1757. * @see Rec. ITU-T H.264, 7.3.2.3.1
  1758. */
  1759. var parseSei = function(bytes) {
  1760. var
  1761. i = 0,
  1762. result = {
  1763. payloadType: -1,
  1764. payloadSize: 0
  1765. },
  1766. payloadType = 0,
  1767. payloadSize = 0;
  1768. // go through the sei_rbsp parsing each each individual sei_message
  1769. while (i < bytes.byteLength) {
  1770. // stop once we have hit the end of the sei_rbsp
  1771. if (bytes[i] === RBSP_TRAILING_BITS) {
  1772. break;
  1773. }
  1774. // Parse payload type
  1775. while (bytes[i] === 0xFF) {
  1776. payloadType += 255;
  1777. i++;
  1778. }
  1779. payloadType += bytes[i++];
  1780. // Parse payload size
  1781. while (bytes[i] === 0xFF) {
  1782. payloadSize += 255;
  1783. i++;
  1784. }
  1785. payloadSize += bytes[i++];
  1786. // this sei_message is a 608/708 caption so save it and break
  1787. // there can only ever be one caption message in a frame's sei
  1788. if (!result.payload && payloadType === USER_DATA_REGISTERED_ITU_T_T35) {
  1789. result.payloadType = payloadType;
  1790. result.payloadSize = payloadSize;
  1791. result.payload = bytes.subarray(i, i + payloadSize);
  1792. break;
  1793. }
  1794. // skip the payload and parse the next message
  1795. i += payloadSize;
  1796. payloadType = 0;
  1797. payloadSize = 0;
  1798. }
  1799. return result;
  1800. };
  1801. // see ANSI/SCTE 128-1 (2013), section 8.1
  1802. var parseUserData = function(sei) {
  1803. // itu_t_t35_contry_code must be 181 (United States) for
  1804. // captions
  1805. if (sei.payload[0] !== 181) {
  1806. return null;
  1807. }
  1808. // itu_t_t35_provider_code should be 49 (ATSC) for captions
  1809. if (((sei.payload[1] << 8) | sei.payload[2]) !== 49) {
  1810. return null;
  1811. }
  1812. // the user_identifier should be "GA94" to indicate ATSC1 data
  1813. if (String.fromCharCode(sei.payload[3],
  1814. sei.payload[4],
  1815. sei.payload[5],
  1816. sei.payload[6]) !== 'GA94') {
  1817. return null;
  1818. }
  1819. // finally, user_data_type_code should be 0x03 for caption data
  1820. if (sei.payload[7] !== 0x03) {
  1821. return null;
  1822. }
  1823. // return the user_data_type_structure and strip the trailing
  1824. // marker bits
  1825. return sei.payload.subarray(8, sei.payload.length - 1);
  1826. };
  1827. // see CEA-708-D, section 4.4
  1828. var parseCaptionPackets = function(pts, userData) {
  1829. var results = [], i, count, offset, data;
  1830. // if this is just filler, return immediately
  1831. if (!(userData[0] & 0x40)) {
  1832. return results;
  1833. }
  1834. // parse out the cc_data_1 and cc_data_2 fields
  1835. count = userData[0] & 0x1f;
  1836. for (i = 0; i < count; i++) {
  1837. offset = i * 3;
  1838. data = {
  1839. type: userData[offset + 2] & 0x03,
  1840. pts: pts
  1841. };
  1842. // capture cc data when cc_valid is 1
  1843. if (userData[offset + 2] & 0x04) {
  1844. data.ccData = (userData[offset + 3] << 8) | userData[offset + 4];
  1845. results.push(data);
  1846. }
  1847. }
  1848. return results;
  1849. };
  1850. var CaptionStream = function() {
  1851. CaptionStream.prototype.init.call(this);
  1852. this.captionPackets_ = [];
  1853. this.ccStreams_ = [
  1854. new Cea608Stream(0, 0), // eslint-disable-line no-use-before-define
  1855. new Cea608Stream(0, 1), // eslint-disable-line no-use-before-define
  1856. new Cea608Stream(1, 0), // eslint-disable-line no-use-before-define
  1857. new Cea608Stream(1, 1) // eslint-disable-line no-use-before-define
  1858. ];
  1859. this.reset();
  1860. // forward data and done events from CCs to this CaptionStream
  1861. this.ccStreams_.forEach(function(cc) {
  1862. cc.on('data', this.trigger.bind(this, 'data'));
  1863. cc.on('done', this.trigger.bind(this, 'done'));
  1864. }, this);
  1865. };
  1866. CaptionStream.prototype = new Stream();
  1867. CaptionStream.prototype.push = function(event) {
  1868. var sei, userData;
  1869. // only examine SEI NALs
  1870. if (event.nalUnitType !== 'sei_rbsp') {
  1871. return;
  1872. }
  1873. // parse the sei
  1874. sei = parseSei(event.escapedRBSP);
  1875. // ignore everything but user_data_registered_itu_t_t35
  1876. if (sei.payloadType !== USER_DATA_REGISTERED_ITU_T_T35) {
  1877. return;
  1878. }
  1879. // parse out the user data payload
  1880. userData = parseUserData(sei);
  1881. // ignore unrecognized userData
  1882. if (!userData) {
  1883. return;
  1884. }
  1885. // Sometimes, the same segment # will be downloaded twice. To stop the
  1886. // caption data from being processed twice, we track the latest dts we've
  1887. // received and ignore everything with a dts before that. However, since
  1888. // data for a specific dts can be split across 2 packets on either side of
  1889. // a segment boundary, we need to make sure we *don't* ignore the second
  1890. // dts packet we receive that has dts === this.latestDts_. And thus, the
  1891. // ignoreNextEqualDts_ flag was born.
  1892. if (event.dts < this.latestDts_) {
  1893. // We've started getting older data, so set the flag.
  1894. this.ignoreNextEqualDts_ = true;
  1895. return;
  1896. } else if ((event.dts === this.latestDts_) && (this.ignoreNextEqualDts_)) {
  1897. // We've received the last duplicate packet, time to start processing again
  1898. this.ignoreNextEqualDts_ = false;
  1899. return;
  1900. }
  1901. // parse out CC data packets and save them for later
  1902. this.captionPackets_ = this.captionPackets_.concat(parseCaptionPackets(event.pts, userData));
  1903. this.latestDts_ = event.dts;
  1904. };
  1905. CaptionStream.prototype.flush = function() {
  1906. // make sure we actually parsed captions before proceeding
  1907. if (!this.captionPackets_.length) {
  1908. this.ccStreams_.forEach(function(cc) {
  1909. cc.flush();
  1910. }, this);
  1911. return;
  1912. }
  1913. // In Chrome, the Array#sort function is not stable so add a
  1914. // presortIndex that we can use to ensure we get a stable-sort
  1915. this.captionPackets_.forEach(function(elem, idx) {
  1916. elem.presortIndex = idx;
  1917. });
  1918. // sort caption byte-pairs based on their PTS values
  1919. this.captionPackets_.sort(function(a, b) {
  1920. if (a.pts === b.pts) {
  1921. return a.presortIndex - b.presortIndex;
  1922. }
  1923. return a.pts - b.pts;
  1924. });
  1925. this.captionPackets_.forEach(function(packet) {
  1926. if (packet.type < 2) {
  1927. // Dispatch packet to the right Cea608Stream
  1928. this.dispatchCea608Packet(packet);
  1929. }
  1930. // this is where an 'else' would go for a dispatching packets
  1931. // to a theoretical Cea708Stream that handles SERVICEn data
  1932. }, this);
  1933. this.captionPackets_.length = 0;
  1934. this.ccStreams_.forEach(function(cc) {
  1935. cc.flush();
  1936. }, this);
  1937. return;
  1938. };
  1939. CaptionStream.prototype.reset = function() {
  1940. this.latestDts_ = null;
  1941. this.ignoreNextEqualDts_ = false;
  1942. this.activeCea608Channel_ = [null, null];
  1943. this.ccStreams_.forEach(function(ccStream) {
  1944. ccStream.reset();
  1945. });
  1946. };
  1947. CaptionStream.prototype.dispatchCea608Packet = function(packet) {
  1948. // NOTE: packet.type is the CEA608 field
  1949. if (this.setsChannel1Active(packet)) {
  1950. this.activeCea608Channel_[packet.type] = 0;
  1951. } else if (this.setsChannel2Active(packet)) {
  1952. this.activeCea608Channel_[packet.type] = 1;
  1953. }
  1954. if (this.activeCea608Channel_[packet.type] === null) {
  1955. // If we haven't received anything to set the active channel, discard the
  1956. // data; we don't want jumbled captions
  1957. return;
  1958. }
  1959. this.ccStreams_[(packet.type << 1) + this.activeCea608Channel_[packet.type]].push(packet);
  1960. };
  1961. CaptionStream.prototype.setsChannel1Active = function(packet) {
  1962. return ((packet.ccData & 0x7800) === 0x1000);
  1963. };
  1964. CaptionStream.prototype.setsChannel2Active = function(packet) {
  1965. return ((packet.ccData & 0x7800) === 0x1800);
  1966. };
  1967. // ----------------------
  1968. // Session to Application
  1969. // ----------------------
  1970. var CHARACTER_TRANSLATION = {
  1971. 0x2a: 0xe1, // á
  1972. 0x5c: 0xe9, // é
  1973. 0x5e: 0xed, // í
  1974. 0x5f: 0xf3, // ó
  1975. 0x60: 0xfa, // ú
  1976. 0x7b: 0xe7, // ç
  1977. 0x7c: 0xf7, // ÷
  1978. 0x7d: 0xd1, // Ñ
  1979. 0x7e: 0xf1, // ñ
  1980. 0x7f: 0x2588, // █
  1981. 0x0130: 0xae, // ®
  1982. 0x0131: 0xb0, // °
  1983. 0x0132: 0xbd, // ½
  1984. 0x0133: 0xbf, // ¿
  1985. 0x0134: 0x2122, // ™
  1986. 0x0135: 0xa2, // ¢
  1987. 0x0136: 0xa3, // £
  1988. 0x0137: 0x266a, // ♪
  1989. 0x0138: 0xe0, // à
  1990. 0x0139: 0xa0, //
  1991. 0x013a: 0xe8, // è
  1992. 0x013b: 0xe2, // â
  1993. 0x013c: 0xea, // ê
  1994. 0x013d: 0xee, // î
  1995. 0x013e: 0xf4, // ô
  1996. 0x013f: 0xfb, // û
  1997. 0x0220: 0xc1, // Á
  1998. 0x0221: 0xc9, // É
  1999. 0x0222: 0xd3, // Ó
  2000. 0x0223: 0xda, // Ú
  2001. 0x0224: 0xdc, // Ü
  2002. 0x0225: 0xfc, // ü
  2003. 0x0226: 0x2018, // ‘
  2004. 0x0227: 0xa1, // ¡
  2005. 0x0228: 0x2a, // *
  2006. 0x0229: 0x27, // '
  2007. 0x022a: 0x2014, // —
  2008. 0x022b: 0xa9, // ©
  2009. 0x022c: 0x2120, // ℠
  2010. 0x022d: 0x2022, // •
  2011. 0x022e: 0x201c, // “
  2012. 0x022f: 0x201d, // ”
  2013. 0x0230: 0xc0, // À
  2014. 0x0231: 0xc2, // Â
  2015. 0x0232: 0xc7, // Ç
  2016. 0x0233: 0xc8, // È
  2017. 0x0234: 0xca, // Ê
  2018. 0x0235: 0xcb, // Ë
  2019. 0x0236: 0xeb, // ë
  2020. 0x0237: 0xce, // Î
  2021. 0x0238: 0xcf, // Ï
  2022. 0x0239: 0xef, // ï
  2023. 0x023a: 0xd4, // Ô
  2024. 0x023b: 0xd9, // Ù
  2025. 0x023c: 0xf9, // ù
  2026. 0x023d: 0xdb, // Û
  2027. 0x023e: 0xab, // «
  2028. 0x023f: 0xbb, // »
  2029. 0x0320: 0xc3, // Ã
  2030. 0x0321: 0xe3, // ã
  2031. 0x0322: 0xcd, // Í
  2032. 0x0323: 0xcc, // Ì
  2033. 0x0324: 0xec, // ì
  2034. 0x0325: 0xd2, // Ò
  2035. 0x0326: 0xf2, // ò
  2036. 0x0327: 0xd5, // Õ
  2037. 0x0328: 0xf5, // õ
  2038. 0x0329: 0x7b, // {
  2039. 0x032a: 0x7d, // }
  2040. 0x032b: 0x5c, // \
  2041. 0x032c: 0x5e, // ^
  2042. 0x032d: 0x5f, // _
  2043. 0x032e: 0x7c, // |
  2044. 0x032f: 0x7e, // ~
  2045. 0x0330: 0xc4, // Ä
  2046. 0x0331: 0xe4, // ä
  2047. 0x0332: 0xd6, // Ö
  2048. 0x0333: 0xf6, // ö
  2049. 0x0334: 0xdf, // ß
  2050. 0x0335: 0xa5, // ¥
  2051. 0x0336: 0xa4, // ¤
  2052. 0x0337: 0x2502, // │
  2053. 0x0338: 0xc5, // Å
  2054. 0x0339: 0xe5, // å
  2055. 0x033a: 0xd8, // Ø
  2056. 0x033b: 0xf8, // ø
  2057. 0x033c: 0x250c, // ┌
  2058. 0x033d: 0x2510, // ┐
  2059. 0x033e: 0x2514, // └
  2060. 0x033f: 0x2518 // ┘
  2061. };
  2062. var getCharFromCode = function(code) {
  2063. if (code === null) {
  2064. return '';
  2065. }
  2066. code = CHARACTER_TRANSLATION[code] || code;
  2067. return String.fromCharCode(code);
  2068. };
  2069. // the index of the last row in a CEA-608 display buffer
  2070. var BOTTOM_ROW = 14;
  2071. // This array is used for mapping PACs -> row #, since there's no way of
  2072. // getting it through bit logic.
  2073. var ROWS = [0x1100, 0x1120, 0x1200, 0x1220, 0x1500, 0x1520, 0x1600, 0x1620,
  2074. 0x1700, 0x1720, 0x1000, 0x1300, 0x1320, 0x1400, 0x1420];
  2075. // CEA-608 captions are rendered onto a 34x15 matrix of character
  2076. // cells. The "bottom" row is the last element in the outer array.
  2077. var createDisplayBuffer = function() {
  2078. var result = [], i = BOTTOM_ROW + 1;
  2079. while (i--) {
  2080. result.push('');
  2081. }
  2082. return result;
  2083. };
  2084. var Cea608Stream = function(field, dataChannel) {
  2085. Cea608Stream.prototype.init.call(this);
  2086. this.field_ = field || 0;
  2087. this.dataChannel_ = dataChannel || 0;
  2088. this.name_ = 'CC' + (((this.field_ << 1) | this.dataChannel_) + 1);
  2089. this.setConstants();
  2090. this.reset();
  2091. this.push = function(packet) {
  2092. var data, swap, char0, char1, text;
  2093. // remove the parity bits
  2094. data = packet.ccData & 0x7f7f;
  2095. // ignore duplicate control codes; the spec demands they're sent twice
  2096. if (data === this.lastControlCode_) {
  2097. this.lastControlCode_ = null;
  2098. return;
  2099. }
  2100. // Store control codes
  2101. if ((data & 0xf000) === 0x1000) {
  2102. this.lastControlCode_ = data;
  2103. } else if (data !== this.PADDING_) {
  2104. this.lastControlCode_ = null;
  2105. }
  2106. char0 = data >>> 8;
  2107. char1 = data & 0xff;
  2108. if (data === this.PADDING_) {
  2109. return;
  2110. } else if (data === this.RESUME_CAPTION_LOADING_) {
  2111. this.mode_ = 'popOn';
  2112. } else if (data === this.END_OF_CAPTION_) {
  2113. this.clearFormatting(packet.pts);
  2114. // if a caption was being displayed, it's gone now
  2115. this.flushDisplayed(packet.pts);
  2116. // flip memory
  2117. swap = this.displayed_;
  2118. this.displayed_ = this.nonDisplayed_;
  2119. this.nonDisplayed_ = swap;
  2120. // start measuring the time to display the caption
  2121. this.startPts_ = packet.pts;
  2122. } else if (data === this.ROLL_UP_2_ROWS_) {
  2123. this.topRow_ = BOTTOM_ROW - 1;
  2124. this.mode_ = 'rollUp';
  2125. } else if (data === this.ROLL_UP_3_ROWS_) {
  2126. this.topRow_ = BOTTOM_ROW - 2;
  2127. this.mode_ = 'rollUp';
  2128. } else if (data === this.ROLL_UP_4_ROWS_) {
  2129. this.topRow_ = BOTTOM_ROW - 3;
  2130. this.mode_ = 'rollUp';
  2131. } else if (data === this.CARRIAGE_RETURN_) {
  2132. this.clearFormatting(packet.pts);
  2133. this.flushDisplayed(packet.pts);
  2134. this.shiftRowsUp_();
  2135. this.startPts_ = packet.pts;
  2136. } else if (data === this.BACKSPACE_) {
  2137. if (this.mode_ === 'popOn') {
  2138. this.nonDisplayed_[BOTTOM_ROW] = this.nonDisplayed_[BOTTOM_ROW].slice(0, -1);
  2139. } else {
  2140. this.displayed_[BOTTOM_ROW] = this.displayed_[BOTTOM_ROW].slice(0, -1);
  2141. }
  2142. } else if (data === this.ERASE_DISPLAYED_MEMORY_) {
  2143. this.flushDisplayed(packet.pts);
  2144. this.displayed_ = createDisplayBuffer();
  2145. } else if (data === this.ERASE_NON_DISPLAYED_MEMORY_) {
  2146. this.nonDisplayed_ = createDisplayBuffer();
  2147. } else if (data === this.RESUME_DIRECT_CAPTIONING_) {
  2148. this.mode_ = 'paintOn';
  2149. // Append special characters to caption text
  2150. } else if (this.isSpecialCharacter(char0, char1)) {
  2151. // Bitmask char0 so that we can apply character transformations
  2152. // regardless of field and data channel.
  2153. // Then byte-shift to the left and OR with char1 so we can pass the
  2154. // entire character code to `getCharFromCode`.
  2155. char0 = (char0 & 0x03) << 8;
  2156. text = getCharFromCode(char0 | char1);
  2157. this[this.mode_](packet.pts, text);
  2158. this.column_++;
  2159. // Append extended characters to caption text
  2160. } else if (this.isExtCharacter(char0, char1)) {
  2161. // Extended characters always follow their "non-extended" equivalents.
  2162. // IE if a "è" is desired, you'll always receive "eè"; non-compliant
  2163. // decoders are supposed to drop the "è", while compliant decoders
  2164. // backspace the "e" and insert "è".
  2165. // Delete the previous character
  2166. if (this.mode_ === 'popOn') {
  2167. this.nonDisplayed_[this.row_] = this.nonDisplayed_[this.row_].slice(0, -1);
  2168. } else {
  2169. this.displayed_[BOTTOM_ROW] = this.displayed_[BOTTOM_ROW].slice(0, -1);
  2170. }
  2171. // Bitmask char0 so that we can apply character transformations
  2172. // regardless of field and data channel.
  2173. // Then byte-shift to the left and OR with char1 so we can pass the
  2174. // entire character code to `getCharFromCode`.
  2175. char0 = (char0 & 0x03) << 8;
  2176. text = getCharFromCode(char0 | char1);
  2177. this[this.mode_](packet.pts, text);
  2178. this.column_++;
  2179. // Process mid-row codes
  2180. } else if (this.isMidRowCode(char0, char1)) {
  2181. // Attributes are not additive, so clear all formatting
  2182. this.clearFormatting(packet.pts);
  2183. // According to the standard, mid-row codes
  2184. // should be replaced with spaces, so add one now
  2185. this[this.mode_](packet.pts, ' ');
  2186. this.column_++;
  2187. if ((char1 & 0xe) === 0xe) {
  2188. this.addFormatting(packet.pts, ['i']);
  2189. }
  2190. if ((char1 & 0x1) === 0x1) {
  2191. this.addFormatting(packet.pts, ['u']);
  2192. }
  2193. // Detect offset control codes and adjust cursor
  2194. } else if (this.isOffsetControlCode(char0, char1)) {
  2195. // Cursor position is set by indent PAC (see below) in 4-column
  2196. // increments, with an additional offset code of 1-3 to reach any
  2197. // of the 32 columns specified by CEA-608. So all we need to do
  2198. // here is increment the column cursor by the given offset.
  2199. this.column_ += (char1 & 0x03);
  2200. // Detect PACs (Preamble Address Codes)
  2201. } else if (this.isPAC(char0, char1)) {
  2202. // There's no logic for PAC -> row mapping, so we have to just
  2203. // find the row code in an array and use its index :(
  2204. var row = ROWS.indexOf(data & 0x1f20);
  2205. if (row !== this.row_) {
  2206. // formatting is only persistent for current row
  2207. this.clearFormatting(packet.pts);
  2208. this.row_ = row;
  2209. }
  2210. // All PACs can apply underline, so detect and apply
  2211. // (All odd-numbered second bytes set underline)
  2212. if ((char1 & 0x1) && (this.formatting_.indexOf('u') === -1)) {
  2213. this.addFormatting(packet.pts, ['u']);
  2214. }
  2215. if ((data & 0x10) === 0x10) {
  2216. // We've got an indent level code. Each successive even number
  2217. // increments the column cursor by 4, so we can get the desired
  2218. // column position by bit-shifting to the right (to get n/2)
  2219. // and multiplying by 4.
  2220. this.column_ = ((data & 0xe) >> 1) * 4;
  2221. }
  2222. if (this.isColorPAC(char1)) {
  2223. // it's a color code, though we only support white, which
  2224. // can be either normal or italicized. white italics can be
  2225. // either 0x4e or 0x6e depending on the row, so we just
  2226. // bitwise-and with 0xe to see if italics should be turned on
  2227. if ((char1 & 0xe) === 0xe) {
  2228. this.addFormatting(packet.pts, ['i']);
  2229. }
  2230. }
  2231. // We have a normal character in char0, and possibly one in char1
  2232. } else if (this.isNormalChar(char0)) {
  2233. if (char1 === 0x00) {
  2234. char1 = null;
  2235. }
  2236. text = getCharFromCode(char0);
  2237. text += getCharFromCode(char1);
  2238. this[this.mode_](packet.pts, text);
  2239. this.column_ += text.length;
  2240. } // finish data processing
  2241. };
  2242. };
  2243. Cea608Stream.prototype = new Stream();
  2244. // Trigger a cue point that captures the current state of the
  2245. // display buffer
  2246. Cea608Stream.prototype.flushDisplayed = function(pts) {
  2247. var content = this.displayed_
  2248. // remove spaces from the start and end of the string
  2249. .map(function(row) {
  2250. return row.trim();
  2251. })
  2252. // combine all text rows to display in one cue
  2253. .join('\n')
  2254. // and remove blank rows from the start and end, but not the middle
  2255. .replace(/^\n+|\n+$/g, '');
  2256. if (content.length) {
  2257. this.trigger('data', {
  2258. startPts: this.startPts_,
  2259. endPts: pts,
  2260. text: content,
  2261. stream: this.name_
  2262. });
  2263. }
  2264. };
  2265. /**
  2266. * Zero out the data, used for startup and on seek
  2267. */
  2268. Cea608Stream.prototype.reset = function() {
  2269. this.mode_ = 'popOn';
  2270. // When in roll-up mode, the index of the last row that will
  2271. // actually display captions. If a caption is shifted to a row
  2272. // with a lower index than this, it is cleared from the display
  2273. // buffer
  2274. this.topRow_ = 0;
  2275. this.startPts_ = 0;
  2276. this.displayed_ = createDisplayBuffer();
  2277. this.nonDisplayed_ = createDisplayBuffer();
  2278. this.lastControlCode_ = null;
  2279. // Track row and column for proper line-breaking and spacing
  2280. this.column_ = 0;
  2281. this.row_ = BOTTOM_ROW;
  2282. // This variable holds currently-applied formatting
  2283. this.formatting_ = [];
  2284. };
  2285. /**
  2286. * Sets up control code and related constants for this instance
  2287. */
  2288. Cea608Stream.prototype.setConstants = function() {
  2289. // The following attributes have these uses:
  2290. // ext_ : char0 for mid-row codes, and the base for extended
  2291. // chars (ext_+0, ext_+1, and ext_+2 are char0s for
  2292. // extended codes)
  2293. // control_: char0 for control codes, except byte-shifted to the
  2294. // left so that we can do this.control_ | CONTROL_CODE
  2295. // offset_: char0 for tab offset codes
  2296. //
  2297. // It's also worth noting that control codes, and _only_ control codes,
  2298. // differ between field 1 and field2. Field 2 control codes are always
  2299. // their field 1 value plus 1. That's why there's the "| field" on the
  2300. // control value.
  2301. if (this.dataChannel_ === 0) {
  2302. this.BASE_ = 0x10;
  2303. this.EXT_ = 0x11;
  2304. this.CONTROL_ = (0x14 | this.field_) << 8;
  2305. this.OFFSET_ = 0x17;
  2306. } else if (this.dataChannel_ === 1) {
  2307. this.BASE_ = 0x18;
  2308. this.EXT_ = 0x19;
  2309. this.CONTROL_ = (0x1c | this.field_) << 8;
  2310. this.OFFSET_ = 0x1f;
  2311. }
  2312. // Constants for the LSByte command codes recognized by Cea608Stream. This
  2313. // list is not exhaustive. For a more comprehensive listing and semantics see
  2314. // http://www.gpo.gov/fdsys/pkg/CFR-2010-title47-vol1/pdf/CFR-2010-title47-vol1-sec15-119.pdf
  2315. // Padding
  2316. this.PADDING_ = 0x0000;
  2317. // Pop-on Mode
  2318. this.RESUME_CAPTION_LOADING_ = this.CONTROL_ | 0x20;
  2319. this.END_OF_CAPTION_ = this.CONTROL_ | 0x2f;
  2320. // Roll-up Mode
  2321. this.ROLL_UP_2_ROWS_ = this.CONTROL_ | 0x25;
  2322. this.ROLL_UP_3_ROWS_ = this.CONTROL_ | 0x26;
  2323. this.ROLL_UP_4_ROWS_ = this.CONTROL_ | 0x27;
  2324. this.CARRIAGE_RETURN_ = this.CONTROL_ | 0x2d;
  2325. // paint-on mode (not supported)
  2326. this.RESUME_DIRECT_CAPTIONING_ = this.CONTROL_ | 0x29;
  2327. // Erasure
  2328. this.BACKSPACE_ = this.CONTROL_ | 0x21;
  2329. this.ERASE_DISPLAYED_MEMORY_ = this.CONTROL_ | 0x2c;
  2330. this.ERASE_NON_DISPLAYED_MEMORY_ = this.CONTROL_ | 0x2e;
  2331. };
  2332. /**
  2333. * Detects if the 2-byte packet data is a special character
  2334. *
  2335. * Special characters have a second byte in the range 0x30 to 0x3f,
  2336. * with the first byte being 0x11 (for data channel 1) or 0x19 (for
  2337. * data channel 2).
  2338. *
  2339. * @param {Integer} char0 The first byte
  2340. * @param {Integer} char1 The second byte
  2341. * @return {Boolean} Whether the 2 bytes are an special character
  2342. */
  2343. Cea608Stream.prototype.isSpecialCharacter = function(char0, char1) {
  2344. return (char0 === this.EXT_ && char1 >= 0x30 && char1 <= 0x3f);
  2345. };
  2346. /**
  2347. * Detects if the 2-byte packet data is an extended character
  2348. *
  2349. * Extended characters have a second byte in the range 0x20 to 0x3f,
  2350. * with the first byte being 0x12 or 0x13 (for data channel 1) or
  2351. * 0x1a or 0x1b (for data channel 2).
  2352. *
  2353. * @param {Integer} char0 The first byte
  2354. * @param {Integer} char1 The second byte
  2355. * @return {Boolean} Whether the 2 bytes are an extended character
  2356. */
  2357. Cea608Stream.prototype.isExtCharacter = function(char0, char1) {
  2358. return ((char0 === (this.EXT_ + 1) || char0 === (this.EXT_ + 2)) &&
  2359. (char1 >= 0x20 && char1 <= 0x3f));
  2360. };
  2361. /**
  2362. * Detects if the 2-byte packet is a mid-row code
  2363. *
  2364. * Mid-row codes have a second byte in the range 0x20 to 0x2f, with
  2365. * the first byte being 0x11 (for data channel 1) or 0x19 (for data
  2366. * channel 2).
  2367. *
  2368. * @param {Integer} char0 The first byte
  2369. * @param {Integer} char1 The second byte
  2370. * @return {Boolean} Whether the 2 bytes are a mid-row code
  2371. */
  2372. Cea608Stream.prototype.isMidRowCode = function(char0, char1) {
  2373. return (char0 === this.EXT_ && (char1 >= 0x20 && char1 <= 0x2f));
  2374. };
  2375. /**
  2376. * Detects if the 2-byte packet is an offset control code
  2377. *
  2378. * Offset control codes have a second byte in the range 0x21 to 0x23,
  2379. * with the first byte being 0x17 (for data channel 1) or 0x1f (for
  2380. * data channel 2).
  2381. *
  2382. * @param {Integer} char0 The first byte
  2383. * @param {Integer} char1 The second byte
  2384. * @return {Boolean} Whether the 2 bytes are an offset control code
  2385. */
  2386. Cea608Stream.prototype.isOffsetControlCode = function(char0, char1) {
  2387. return (char0 === this.OFFSET_ && (char1 >= 0x21 && char1 <= 0x23));
  2388. };
  2389. /**
  2390. * Detects if the 2-byte packet is a Preamble Address Code
  2391. *
  2392. * PACs have a first byte in the range 0x10 to 0x17 (for data channel 1)
  2393. * or 0x18 to 0x1f (for data channel 2), with the second byte in the
  2394. * range 0x40 to 0x7f.
  2395. *
  2396. * @param {Integer} char0 The first byte
  2397. * @param {Integer} char1 The second byte
  2398. * @return {Boolean} Whether the 2 bytes are a PAC
  2399. */
  2400. Cea608Stream.prototype.isPAC = function(char0, char1) {
  2401. return (char0 >= this.BASE_ && char0 < (this.BASE_ + 8) &&
  2402. (char1 >= 0x40 && char1 <= 0x7f));
  2403. };
  2404. /**
  2405. * Detects if a packet's second byte is in the range of a PAC color code
  2406. *
  2407. * PAC color codes have the second byte be in the range 0x40 to 0x4f, or
  2408. * 0x60 to 0x6f.
  2409. *
  2410. * @param {Integer} char1 The second byte
  2411. * @return {Boolean} Whether the byte is a color PAC
  2412. */
  2413. Cea608Stream.prototype.isColorPAC = function(char1) {
  2414. return ((char1 >= 0x40 && char1 <= 0x4f) || (char1 >= 0x60 && char1 <= 0x7f));
  2415. };
  2416. /**
  2417. * Detects if a single byte is in the range of a normal character
  2418. *
  2419. * Normal text bytes are in the range 0x20 to 0x7f.
  2420. *
  2421. * @param {Integer} char The byte
  2422. * @return {Boolean} Whether the byte is a normal character
  2423. */
  2424. Cea608Stream.prototype.isNormalChar = function(char) {
  2425. return (char >= 0x20 && char <= 0x7f);
  2426. };
  2427. // Adds the opening HTML tag for the passed character to the caption text,
  2428. // and keeps track of it for later closing
  2429. Cea608Stream.prototype.addFormatting = function(pts, format) {
  2430. this.formatting_ = this.formatting_.concat(format);
  2431. var text = format.reduce(function(text, format) {
  2432. return text + '<' + format + '>';
  2433. }, '');
  2434. this[this.mode_](pts, text);
  2435. };
  2436. // Adds HTML closing tags for current formatting to caption text and
  2437. // clears remembered formatting
  2438. Cea608Stream.prototype.clearFormatting = function(pts) {
  2439. if (!this.formatting_.length) {
  2440. return;
  2441. }
  2442. var text = this.formatting_.reverse().reduce(function(text, format) {
  2443. return text + '</' + format + '>';
  2444. }, '');
  2445. this.formatting_ = [];
  2446. this[this.mode_](pts, text);
  2447. };
  2448. // Mode Implementations
  2449. Cea608Stream.prototype.popOn = function(pts, text) {
  2450. var baseRow = this.nonDisplayed_[this.row_];
  2451. // buffer characters
  2452. baseRow += text;
  2453. this.nonDisplayed_[this.row_] = baseRow;
  2454. };
  2455. Cea608Stream.prototype.rollUp = function(pts, text) {
  2456. var baseRow = this.displayed_[BOTTOM_ROW];
  2457. baseRow += text;
  2458. this.displayed_[BOTTOM_ROW] = baseRow;
  2459. };
  2460. Cea608Stream.prototype.shiftRowsUp_ = function() {
  2461. var i;
  2462. // clear out inactive rows
  2463. for (i = 0; i < this.topRow_; i++) {
  2464. this.displayed_[i] = '';
  2465. }
  2466. // shift displayed rows up
  2467. for (i = this.topRow_; i < BOTTOM_ROW; i++) {
  2468. this.displayed_[i] = this.displayed_[i + 1];
  2469. }
  2470. // clear out the bottom row
  2471. this.displayed_[BOTTOM_ROW] = '';
  2472. };
  2473. // paintOn mode is not implemented
  2474. Cea608Stream.prototype.paintOn = function() {};
  2475. // exports
  2476. module.exports = {
  2477. CaptionStream: CaptionStream,
  2478. Cea608Stream: Cea608Stream
  2479. };
  2480. },{"../utils/stream":33}],18:[function(require,module,exports){
  2481. module.exports = require('./m2ts');
  2482. },{"./m2ts":19}],19:[function(require,module,exports){
  2483. /**
  2484. * mux.js
  2485. *
  2486. * Copyright (c) 2015 Brightcove
  2487. * All rights reserved.
  2488. *
  2489. * A stream-based mp2t to mp4 converter. This utility can be used to
  2490. * deliver mp4s to a SourceBuffer on platforms that support native
  2491. * Media Source Extensions.
  2492. */
  2493. 'use strict';
  2494. var Stream = require('../utils/stream.js'),
  2495. CaptionStream = require('./caption-stream'),
  2496. StreamTypes = require('./stream-types'),
  2497. TimestampRolloverStream = require('./timestamp-rollover-stream').TimestampRolloverStream;
  2498. var m2tsStreamTypes = require('./stream-types.js');
  2499. // object types
  2500. var TransportPacketStream, TransportParseStream, ElementaryStream;
  2501. // constants
  2502. var
  2503. MP2T_PACKET_LENGTH = 188, // bytes
  2504. SYNC_BYTE = 0x47;
  2505. /**
  2506. * Splits an incoming stream of binary data into MPEG-2 Transport
  2507. * Stream packets.
  2508. */
  2509. TransportPacketStream = function() {
  2510. var
  2511. buffer = new Uint8Array(MP2T_PACKET_LENGTH),
  2512. bytesInBuffer = 0;
  2513. TransportPacketStream.prototype.init.call(this);
  2514. // Deliver new bytes to the stream.
  2515. this.push = function(bytes) {
  2516. var
  2517. startIndex = 0,
  2518. endIndex = MP2T_PACKET_LENGTH,
  2519. everything;
  2520. // If there are bytes remaining from the last segment, prepend them to the
  2521. // bytes that were pushed in
  2522. if (bytesInBuffer) {
  2523. everything = new Uint8Array(bytes.byteLength + bytesInBuffer);
  2524. everything.set(buffer.subarray(0, bytesInBuffer));
  2525. everything.set(bytes, bytesInBuffer);
  2526. bytesInBuffer = 0;
  2527. } else {
  2528. everything = bytes;
  2529. }
  2530. // While we have enough data for a packet
  2531. while (endIndex < everything.byteLength) {
  2532. // Look for a pair of start and end sync bytes in the data..
  2533. if (everything[startIndex] === SYNC_BYTE && everything[endIndex] === SYNC_BYTE) {
  2534. // We found a packet so emit it and jump one whole packet forward in
  2535. // the stream
  2536. this.trigger('data', everything.subarray(startIndex, endIndex));
  2537. startIndex += MP2T_PACKET_LENGTH;
  2538. endIndex += MP2T_PACKET_LENGTH;
  2539. continue;
  2540. }
  2541. // If we get here, we have somehow become de-synchronized and we need to step
  2542. // forward one byte at a time until we find a pair of sync bytes that denote
  2543. // a packet
  2544. startIndex++;
  2545. endIndex++;
  2546. }
  2547. // If there was some data left over at the end of the segment that couldn't
  2548. // possibly be a whole packet, keep it because it might be the start of a packet
  2549. // that continues in the next segment
  2550. if (startIndex < everything.byteLength) {
  2551. buffer.set(everything.subarray(startIndex), 0);
  2552. bytesInBuffer = everything.byteLength - startIndex;
  2553. }
  2554. };
  2555. this.flush = function() {
  2556. // If the buffer contains a whole packet when we are being flushed, emit it
  2557. // and empty the buffer. Otherwise hold onto the data because it may be
  2558. // important for decoding the next segment
  2559. if (bytesInBuffer === MP2T_PACKET_LENGTH && buffer[0] === SYNC_BYTE) {
  2560. this.trigger('data', buffer);
  2561. bytesInBuffer = 0;
  2562. }
  2563. this.trigger('done');
  2564. };
  2565. };
  2566. TransportPacketStream.prototype = new Stream();
  2567. /**
  2568. * Accepts an MP2T TransportPacketStream and emits data events with parsed
  2569. * forms of the individual transport stream packets.
  2570. */
  2571. TransportParseStream = function() {
  2572. var parsePsi, parsePat, parsePmt, self;
  2573. TransportParseStream.prototype.init.call(this);
  2574. self = this;
  2575. this.packetsWaitingForPmt = [];
  2576. this.programMapTable = undefined;
  2577. parsePsi = function(payload, psi) {
  2578. var offset = 0;
  2579. // PSI packets may be split into multiple sections and those
  2580. // sections may be split into multiple packets. If a PSI
  2581. // section starts in this packet, the payload_unit_start_indicator
  2582. // will be true and the first byte of the payload will indicate
  2583. // the offset from the current position to the start of the
  2584. // section.
  2585. if (psi.payloadUnitStartIndicator) {
  2586. offset += payload[offset] + 1;
  2587. }
  2588. if (psi.type === 'pat') {
  2589. parsePat(payload.subarray(offset), psi);
  2590. } else {
  2591. parsePmt(payload.subarray(offset), psi);
  2592. }
  2593. };
  2594. parsePat = function(payload, pat) {
  2595. pat.section_number = payload[7]; // eslint-disable-line camelcase
  2596. pat.last_section_number = payload[8]; // eslint-disable-line camelcase
  2597. // skip the PSI header and parse the first PMT entry
  2598. self.pmtPid = (payload[10] & 0x1F) << 8 | payload[11];
  2599. pat.pmtPid = self.pmtPid;
  2600. };
  2601. /**
  2602. * Parse out the relevant fields of a Program Map Table (PMT).
  2603. * @param payload {Uint8Array} the PMT-specific portion of an MP2T
  2604. * packet. The first byte in this array should be the table_id
  2605. * field.
  2606. * @param pmt {object} the object that should be decorated with
  2607. * fields parsed from the PMT.
  2608. */
  2609. parsePmt = function(payload, pmt) {
  2610. var sectionLength, tableEnd, programInfoLength, offset;
  2611. // PMTs can be sent ahead of the time when they should actually
  2612. // take effect. We don't believe this should ever be the case
  2613. // for HLS but we'll ignore "forward" PMT declarations if we see
  2614. // them. Future PMT declarations have the current_next_indicator
  2615. // set to zero.
  2616. if (!(payload[5] & 0x01)) {
  2617. return;
  2618. }
  2619. // overwrite any existing program map table
  2620. self.programMapTable = {
  2621. video: null,
  2622. audio: null,
  2623. 'timed-metadata': {}
  2624. };
  2625. // the mapping table ends at the end of the current section
  2626. sectionLength = (payload[1] & 0x0f) << 8 | payload[2];
  2627. tableEnd = 3 + sectionLength - 4;
  2628. // to determine where the table is, we have to figure out how
  2629. // long the program info descriptors are
  2630. programInfoLength = (payload[10] & 0x0f) << 8 | payload[11];
  2631. // advance the offset to the first entry in the mapping table
  2632. offset = 12 + programInfoLength;
  2633. while (offset < tableEnd) {
  2634. var streamType = payload[offset];
  2635. var pid = (payload[offset + 1] & 0x1F) << 8 | payload[offset + 2];
  2636. // only map a single elementary_pid for audio and video stream types
  2637. // TODO: should this be done for metadata too? for now maintain behavior of
  2638. // multiple metadata streams
  2639. if (streamType === StreamTypes.H264_STREAM_TYPE &&
  2640. self.programMapTable.video === null) {
  2641. self.programMapTable.video = pid;
  2642. } else if (streamType === StreamTypes.ADTS_STREAM_TYPE &&
  2643. self.programMapTable.audio === null) {
  2644. self.programMapTable.audio = pid;
  2645. } else if (streamType === StreamTypes.METADATA_STREAM_TYPE) {
  2646. // map pid to stream type for metadata streams
  2647. self.programMapTable['timed-metadata'][pid] = streamType;
  2648. }
  2649. // move to the next table entry
  2650. // skip past the elementary stream descriptors, if present
  2651. offset += ((payload[offset + 3] & 0x0F) << 8 | payload[offset + 4]) + 5;
  2652. }
  2653. // record the map on the packet as well
  2654. pmt.programMapTable = self.programMapTable;
  2655. };
  2656. /**
  2657. * Deliver a new MP2T packet to the stream.
  2658. */
  2659. this.push = function(packet) {
  2660. var
  2661. result = {},
  2662. offset = 4;
  2663. result.payloadUnitStartIndicator = !!(packet[1] & 0x40);
  2664. // pid is a 13-bit field starting at the last bit of packet[1]
  2665. result.pid = packet[1] & 0x1f;
  2666. result.pid <<= 8;
  2667. result.pid |= packet[2];
  2668. // if an adaption field is present, its length is specified by the
  2669. // fifth byte of the TS packet header. The adaptation field is
  2670. // used to add stuffing to PES packets that don't fill a complete
  2671. // TS packet, and to specify some forms of timing and control data
  2672. // that we do not currently use.
  2673. if (((packet[3] & 0x30) >>> 4) > 0x01) {
  2674. offset += packet[offset] + 1;
  2675. }
  2676. // parse the rest of the packet based on the type
  2677. if (result.pid === 0) {
  2678. result.type = 'pat';
  2679. parsePsi(packet.subarray(offset), result);
  2680. this.trigger('data', result);
  2681. } else if (result.pid === this.pmtPid) {
  2682. result.type = 'pmt';
  2683. parsePsi(packet.subarray(offset), result);
  2684. this.trigger('data', result);
  2685. // if there are any packets waiting for a PMT to be found, process them now
  2686. while (this.packetsWaitingForPmt.length) {
  2687. this.processPes_.apply(this, this.packetsWaitingForPmt.shift());
  2688. }
  2689. } else if (this.programMapTable === undefined) {
  2690. // When we have not seen a PMT yet, defer further processing of
  2691. // PES packets until one has been parsed
  2692. this.packetsWaitingForPmt.push([packet, offset, result]);
  2693. } else {
  2694. this.processPes_(packet, offset, result);
  2695. }
  2696. };
  2697. this.processPes_ = function(packet, offset, result) {
  2698. // set the appropriate stream type
  2699. if (result.pid === this.programMapTable.video) {
  2700. result.streamType = StreamTypes.H264_STREAM_TYPE;
  2701. } else if (result.pid === this.programMapTable.audio) {
  2702. result.streamType = StreamTypes.ADTS_STREAM_TYPE;
  2703. } else {
  2704. // if not video or audio, it is timed-metadata or unknown
  2705. // if unknown, streamType will be undefined
  2706. result.streamType = this.programMapTable['timed-metadata'][result.pid];
  2707. }
  2708. result.type = 'pes';
  2709. result.data = packet.subarray(offset);
  2710. this.trigger('data', result);
  2711. };
  2712. };
  2713. TransportParseStream.prototype = new Stream();
  2714. TransportParseStream.STREAM_TYPES = {
  2715. h264: 0x1b,
  2716. adts: 0x0f
  2717. };
  2718. /**
  2719. * Reconsistutes program elementary stream (PES) packets from parsed
  2720. * transport stream packets. That is, if you pipe an
  2721. * mp2t.TransportParseStream into a mp2t.ElementaryStream, the output
  2722. * events will be events which capture the bytes for individual PES
  2723. * packets plus relevant metadata that has been extracted from the
  2724. * container.
  2725. */
  2726. ElementaryStream = function() {
  2727. var
  2728. self = this,
  2729. // PES packet fragments
  2730. video = {
  2731. data: [],
  2732. size: 0
  2733. },
  2734. audio = {
  2735. data: [],
  2736. size: 0
  2737. },
  2738. timedMetadata = {
  2739. data: [],
  2740. size: 0
  2741. },
  2742. parsePes = function(payload, pes) {
  2743. var ptsDtsFlags;
  2744. // get the packet length, this will be 0 for video
  2745. pes.packetLength = 6 + ((payload[4] << 8) | payload[5]);
  2746. // find out if this packets starts a new keyframe
  2747. pes.dataAlignmentIndicator = (payload[6] & 0x04) !== 0;
  2748. // PES packets may be annotated with a PTS value, or a PTS value
  2749. // and a DTS value. Determine what combination of values is
  2750. // available to work with.
  2751. ptsDtsFlags = payload[7];
  2752. // PTS and DTS are normally stored as a 33-bit number. Javascript
  2753. // performs all bitwise operations on 32-bit integers but javascript
  2754. // supports a much greater range (52-bits) of integer using standard
  2755. // mathematical operations.
  2756. // We construct a 31-bit value using bitwise operators over the 31
  2757. // most significant bits and then multiply by 4 (equal to a left-shift
  2758. // of 2) before we add the final 2 least significant bits of the
  2759. // timestamp (equal to an OR.)
  2760. if (ptsDtsFlags & 0xC0) {
  2761. // the PTS and DTS are not written out directly. For information
  2762. // on how they are encoded, see
  2763. // http://dvd.sourceforge.net/dvdinfo/pes-hdr.html
  2764. pes.pts = (payload[9] & 0x0E) << 27 |
  2765. (payload[10] & 0xFF) << 20 |
  2766. (payload[11] & 0xFE) << 12 |
  2767. (payload[12] & 0xFF) << 5 |
  2768. (payload[13] & 0xFE) >>> 3;
  2769. pes.pts *= 4; // Left shift by 2
  2770. pes.pts += (payload[13] & 0x06) >>> 1; // OR by the two LSBs
  2771. pes.dts = pes.pts;
  2772. if (ptsDtsFlags & 0x40) {
  2773. pes.dts = (payload[14] & 0x0E) << 27 |
  2774. (payload[15] & 0xFF) << 20 |
  2775. (payload[16] & 0xFE) << 12 |
  2776. (payload[17] & 0xFF) << 5 |
  2777. (payload[18] & 0xFE) >>> 3;
  2778. pes.dts *= 4; // Left shift by 2
  2779. pes.dts += (payload[18] & 0x06) >>> 1; // OR by the two LSBs
  2780. }
  2781. }
  2782. // the data section starts immediately after the PES header.
  2783. // pes_header_data_length specifies the number of header bytes
  2784. // that follow the last byte of the field.
  2785. pes.data = payload.subarray(9 + payload[8]);
  2786. },
  2787. flushStream = function(stream, type, forceFlush) {
  2788. var
  2789. packetData = new Uint8Array(stream.size),
  2790. event = {
  2791. type: type
  2792. },
  2793. i = 0,
  2794. offset = 0,
  2795. packetFlushable = false,
  2796. fragment;
  2797. // do nothing if there is not enough buffered data for a complete
  2798. // PES header
  2799. if (!stream.data.length || stream.size < 9) {
  2800. return;
  2801. }
  2802. event.trackId = stream.data[0].pid;
  2803. // reassemble the packet
  2804. for (i = 0; i < stream.data.length; i++) {
  2805. fragment = stream.data[i];
  2806. packetData.set(fragment.data, offset);
  2807. offset += fragment.data.byteLength;
  2808. }
  2809. // parse assembled packet's PES header
  2810. parsePes(packetData, event);
  2811. // non-video PES packets MUST have a non-zero PES_packet_length
  2812. // check that there is enough stream data to fill the packet
  2813. packetFlushable = type === 'video' || event.packetLength <= stream.size;
  2814. // flush pending packets if the conditions are right
  2815. if (forceFlush || packetFlushable) {
  2816. stream.size = 0;
  2817. stream.data.length = 0;
  2818. }
  2819. // only emit packets that are complete. this is to avoid assembling
  2820. // incomplete PES packets due to poor segmentation
  2821. if (packetFlushable) {
  2822. self.trigger('data', event);
  2823. }
  2824. };
  2825. ElementaryStream.prototype.init.call(this);
  2826. this.push = function(data) {
  2827. ({
  2828. pat: function() {
  2829. // we have to wait for the PMT to arrive as well before we
  2830. // have any meaningful metadata
  2831. },
  2832. pes: function() {
  2833. var stream, streamType;
  2834. switch (data.streamType) {
  2835. case StreamTypes.H264_STREAM_TYPE:
  2836. case m2tsStreamTypes.H264_STREAM_TYPE:
  2837. stream = video;
  2838. streamType = 'video';
  2839. break;
  2840. case StreamTypes.ADTS_STREAM_TYPE:
  2841. stream = audio;
  2842. streamType = 'audio';
  2843. break;
  2844. case StreamTypes.METADATA_STREAM_TYPE:
  2845. stream = timedMetadata;
  2846. streamType = 'timed-metadata';
  2847. break;
  2848. default:
  2849. // ignore unknown stream types
  2850. return;
  2851. }
  2852. // if a new packet is starting, we can flush the completed
  2853. // packet
  2854. if (data.payloadUnitStartIndicator) {
  2855. flushStream(stream, streamType, true);
  2856. }
  2857. // buffer this fragment until we are sure we've received the
  2858. // complete payload
  2859. stream.data.push(data);
  2860. stream.size += data.data.byteLength;
  2861. },
  2862. pmt: function() {
  2863. var
  2864. event = {
  2865. type: 'metadata',
  2866. tracks: []
  2867. },
  2868. programMapTable = data.programMapTable;
  2869. // translate audio and video streams to tracks
  2870. if (programMapTable.video !== null) {
  2871. event.tracks.push({
  2872. timelineStartInfo: {
  2873. baseMediaDecodeTime: 0
  2874. },
  2875. id: +programMapTable.video,
  2876. codec: 'avc',
  2877. type: 'video'
  2878. });
  2879. }
  2880. if (programMapTable.audio !== null) {
  2881. event.tracks.push({
  2882. timelineStartInfo: {
  2883. baseMediaDecodeTime: 0
  2884. },
  2885. id: +programMapTable.audio,
  2886. codec: 'adts',
  2887. type: 'audio'
  2888. });
  2889. }
  2890. self.trigger('data', event);
  2891. }
  2892. })[data.type]();
  2893. };
  2894. /**
  2895. * Flush any remaining input. Video PES packets may be of variable
  2896. * length. Normally, the start of a new video packet can trigger the
  2897. * finalization of the previous packet. That is not possible if no
  2898. * more video is forthcoming, however. In that case, some other
  2899. * mechanism (like the end of the file) has to be employed. When it is
  2900. * clear that no additional data is forthcoming, calling this method
  2901. * will flush the buffered packets.
  2902. */
  2903. this.flush = function() {
  2904. // !!THIS ORDER IS IMPORTANT!!
  2905. // video first then audio
  2906. flushStream(video, 'video');
  2907. flushStream(audio, 'audio');
  2908. flushStream(timedMetadata, 'timed-metadata');
  2909. this.trigger('done');
  2910. };
  2911. };
  2912. ElementaryStream.prototype = new Stream();
  2913. var m2ts = {
  2914. PAT_PID: 0x0000,
  2915. MP2T_PACKET_LENGTH: MP2T_PACKET_LENGTH,
  2916. TransportPacketStream: TransportPacketStream,
  2917. TransportParseStream: TransportParseStream,
  2918. ElementaryStream: ElementaryStream,
  2919. TimestampRolloverStream: TimestampRolloverStream,
  2920. CaptionStream: CaptionStream.CaptionStream,
  2921. Cea608Stream: CaptionStream.Cea608Stream,
  2922. MetadataStream: require('./metadata-stream')
  2923. };
  2924. for (var type in StreamTypes) {
  2925. if (StreamTypes.hasOwnProperty(type)) {
  2926. m2ts[type] = StreamTypes[type];
  2927. }
  2928. }
  2929. module.exports = m2ts;
  2930. },{"../utils/stream.js":33,"./caption-stream":17,"./metadata-stream":20,"./stream-types":22,"./stream-types.js":22,"./timestamp-rollover-stream":23}],20:[function(require,module,exports){
  2931. /**
  2932. * Accepts program elementary stream (PES) data events and parses out
  2933. * ID3 metadata from them, if present.
  2934. * @see http://id3.org/id3v2.3.0
  2935. */
  2936. 'use strict';
  2937. var
  2938. Stream = require('../utils/stream'),
  2939. StreamTypes = require('./stream-types'),
  2940. // return a percent-encoded representation of the specified byte range
  2941. // @see http://en.wikipedia.org/wiki/Percent-encoding
  2942. percentEncode = function(bytes, start, end) {
  2943. var i, result = '';
  2944. for (i = start; i < end; i++) {
  2945. result += '%' + ('00' + bytes[i].toString(16)).slice(-2);
  2946. }
  2947. return result;
  2948. },
  2949. // return the string representation of the specified byte range,
  2950. // interpreted as UTf-8.
  2951. parseUtf8 = function(bytes, start, end) {
  2952. return decodeURIComponent(percentEncode(bytes, start, end));
  2953. },
  2954. // return the string representation of the specified byte range,
  2955. // interpreted as ISO-8859-1.
  2956. parseIso88591 = function(bytes, start, end) {
  2957. return unescape(percentEncode(bytes, start, end)); // jshint ignore:line
  2958. },
  2959. parseSyncSafeInteger = function(data) {
  2960. return (data[0] << 21) |
  2961. (data[1] << 14) |
  2962. (data[2] << 7) |
  2963. (data[3]);
  2964. },
  2965. tagParsers = {
  2966. TXXX: function(tag) {
  2967. var i;
  2968. if (tag.data[0] !== 3) {
  2969. // ignore frames with unrecognized character encodings
  2970. return;
  2971. }
  2972. for (i = 1; i < tag.data.length; i++) {
  2973. if (tag.data[i] === 0) {
  2974. // parse the text fields
  2975. tag.description = parseUtf8(tag.data, 1, i);
  2976. // do not include the null terminator in the tag value
  2977. tag.value = parseUtf8(tag.data, i + 1, tag.data.length).replace(/\0*$/, '');
  2978. break;
  2979. }
  2980. }
  2981. tag.data = tag.value;
  2982. },
  2983. WXXX: function(tag) {
  2984. var i;
  2985. if (tag.data[0] !== 3) {
  2986. // ignore frames with unrecognized character encodings
  2987. return;
  2988. }
  2989. for (i = 1; i < tag.data.length; i++) {
  2990. if (tag.data[i] === 0) {
  2991. // parse the description and URL fields
  2992. tag.description = parseUtf8(tag.data, 1, i);
  2993. tag.url = parseUtf8(tag.data, i + 1, tag.data.length);
  2994. break;
  2995. }
  2996. }
  2997. },
  2998. PRIV: function(tag) {
  2999. var i;
  3000. for (i = 0; i < tag.data.length; i++) {
  3001. if (tag.data[i] === 0) {
  3002. // parse the description and URL fields
  3003. tag.owner = parseIso88591(tag.data, 0, i);
  3004. break;
  3005. }
  3006. }
  3007. tag.privateData = tag.data.subarray(i + 1);
  3008. tag.data = tag.privateData;
  3009. }
  3010. },
  3011. MetadataStream;
  3012. MetadataStream = function(options) {
  3013. var
  3014. settings = {
  3015. debug: !!(options && options.debug),
  3016. // the bytes of the program-level descriptor field in MP2T
  3017. // see ISO/IEC 13818-1:2013 (E), section 2.6 "Program and
  3018. // program element descriptors"
  3019. descriptor: options && options.descriptor
  3020. },
  3021. // the total size in bytes of the ID3 tag being parsed
  3022. tagSize = 0,
  3023. // tag data that is not complete enough to be parsed
  3024. buffer = [],
  3025. // the total number of bytes currently in the buffer
  3026. bufferSize = 0,
  3027. i;
  3028. MetadataStream.prototype.init.call(this);
  3029. // calculate the text track in-band metadata track dispatch type
  3030. // https://html.spec.whatwg.org/multipage/embedded-content.html#steps-to-expose-a-media-resource-specific-text-track
  3031. this.dispatchType = StreamTypes.METADATA_STREAM_TYPE.toString(16);
  3032. if (settings.descriptor) {
  3033. for (i = 0; i < settings.descriptor.length; i++) {
  3034. this.dispatchType += ('00' + settings.descriptor[i].toString(16)).slice(-2);
  3035. }
  3036. }
  3037. this.push = function(chunk) {
  3038. var tag, frameStart, frameSize, frame, i, frameHeader;
  3039. if (chunk.type !== 'timed-metadata') {
  3040. return;
  3041. }
  3042. // if data_alignment_indicator is set in the PES header,
  3043. // we must have the start of a new ID3 tag. Assume anything
  3044. // remaining in the buffer was malformed and throw it out
  3045. if (chunk.dataAlignmentIndicator) {
  3046. bufferSize = 0;
  3047. buffer.length = 0;
  3048. }
  3049. // ignore events that don't look like ID3 data
  3050. if (buffer.length === 0 &&
  3051. (chunk.data.length < 10 ||
  3052. chunk.data[0] !== 'I'.charCodeAt(0) ||
  3053. chunk.data[1] !== 'D'.charCodeAt(0) ||
  3054. chunk.data[2] !== '3'.charCodeAt(0))) {
  3055. if (settings.debug) {
  3056. // eslint-disable-next-line no-console
  3057. console.log('Skipping unrecognized metadata packet');
  3058. }
  3059. return;
  3060. }
  3061. // add this chunk to the data we've collected so far
  3062. buffer.push(chunk);
  3063. bufferSize += chunk.data.byteLength;
  3064. // grab the size of the entire frame from the ID3 header
  3065. if (buffer.length === 1) {
  3066. // the frame size is transmitted as a 28-bit integer in the
  3067. // last four bytes of the ID3 header.
  3068. // The most significant bit of each byte is dropped and the
  3069. // results concatenated to recover the actual value.
  3070. tagSize = parseSyncSafeInteger(chunk.data.subarray(6, 10));
  3071. // ID3 reports the tag size excluding the header but it's more
  3072. // convenient for our comparisons to include it
  3073. tagSize += 10;
  3074. }
  3075. // if the entire frame has not arrived, wait for more data
  3076. if (bufferSize < tagSize) {
  3077. return;
  3078. }
  3079. // collect the entire frame so it can be parsed
  3080. tag = {
  3081. data: new Uint8Array(tagSize),
  3082. frames: [],
  3083. pts: buffer[0].pts,
  3084. dts: buffer[0].dts
  3085. };
  3086. for (i = 0; i < tagSize;) {
  3087. tag.data.set(buffer[0].data.subarray(0, tagSize - i), i);
  3088. i += buffer[0].data.byteLength;
  3089. bufferSize -= buffer[0].data.byteLength;
  3090. buffer.shift();
  3091. }
  3092. // find the start of the first frame and the end of the tag
  3093. frameStart = 10;
  3094. if (tag.data[5] & 0x40) {
  3095. // advance the frame start past the extended header
  3096. frameStart += 4; // header size field
  3097. frameStart += parseSyncSafeInteger(tag.data.subarray(10, 14));
  3098. // clip any padding off the end
  3099. tagSize -= parseSyncSafeInteger(tag.data.subarray(16, 20));
  3100. }
  3101. // parse one or more ID3 frames
  3102. // http://id3.org/id3v2.3.0#ID3v2_frame_overview
  3103. do {
  3104. // determine the number of bytes in this frame
  3105. frameSize = parseSyncSafeInteger(tag.data.subarray(frameStart + 4, frameStart + 8));
  3106. if (frameSize < 1) {
  3107. // eslint-disable-next-line no-console
  3108. return console.log('Malformed ID3 frame encountered. Skipping metadata parsing.');
  3109. }
  3110. frameHeader = String.fromCharCode(tag.data[frameStart],
  3111. tag.data[frameStart + 1],
  3112. tag.data[frameStart + 2],
  3113. tag.data[frameStart + 3]);
  3114. frame = {
  3115. id: frameHeader,
  3116. data: tag.data.subarray(frameStart + 10, frameStart + frameSize + 10)
  3117. };
  3118. frame.key = frame.id;
  3119. if (tagParsers[frame.id]) {
  3120. tagParsers[frame.id](frame);
  3121. // handle the special PRIV frame used to indicate the start
  3122. // time for raw AAC data
  3123. if (frame.owner === 'com.apple.streaming.transportStreamTimestamp') {
  3124. var
  3125. d = frame.data,
  3126. size = ((d[3] & 0x01) << 30) |
  3127. (d[4] << 22) |
  3128. (d[5] << 14) |
  3129. (d[6] << 6) |
  3130. (d[7] >>> 2);
  3131. size *= 4;
  3132. size += d[7] & 0x03;
  3133. frame.timeStamp = size;
  3134. // in raw AAC, all subsequent data will be timestamped based
  3135. // on the value of this frame
  3136. // we couldn't have known the appropriate pts and dts before
  3137. // parsing this ID3 tag so set those values now
  3138. if (tag.pts === undefined && tag.dts === undefined) {
  3139. tag.pts = frame.timeStamp;
  3140. tag.dts = frame.timeStamp;
  3141. }
  3142. this.trigger('timestamp', frame);
  3143. }
  3144. }
  3145. tag.frames.push(frame);
  3146. frameStart += 10; // advance past the frame header
  3147. frameStart += frameSize; // advance past the frame body
  3148. } while (frameStart < tagSize);
  3149. this.trigger('data', tag);
  3150. };
  3151. };
  3152. MetadataStream.prototype = new Stream();
  3153. module.exports = MetadataStream;
  3154. },{"../utils/stream":33,"./stream-types":22}],21:[function(require,module,exports){
  3155. /**
  3156. * mux.js
  3157. *
  3158. * Copyright (c) 2016 Brightcove
  3159. * All rights reserved.
  3160. *
  3161. * Utilities to detect basic properties and metadata about TS Segments.
  3162. */
  3163. 'use strict';
  3164. var StreamTypes = require('./stream-types.js');
  3165. var parsePid = function(packet) {
  3166. var pid = packet[1] & 0x1f;
  3167. pid <<= 8;
  3168. pid |= packet[2];
  3169. return pid;
  3170. };
  3171. var parsePayloadUnitStartIndicator = function(packet) {
  3172. return !!(packet[1] & 0x40);
  3173. };
  3174. var parseAdaptionField = function(packet) {
  3175. var offset = 0;
  3176. // if an adaption field is present, its length is specified by the
  3177. // fifth byte of the TS packet header. The adaptation field is
  3178. // used to add stuffing to PES packets that don't fill a complete
  3179. // TS packet, and to specify some forms of timing and control data
  3180. // that we do not currently use.
  3181. if (((packet[3] & 0x30) >>> 4) > 0x01) {
  3182. offset += packet[4] + 1;
  3183. }
  3184. return offset;
  3185. };
  3186. var parseType = function(packet, pmtPid) {
  3187. var pid = parsePid(packet);
  3188. if (pid === 0) {
  3189. return 'pat';
  3190. } else if (pid === pmtPid) {
  3191. return 'pmt';
  3192. } else if (pmtPid) {
  3193. return 'pes';
  3194. }
  3195. return null;
  3196. };
  3197. var parsePat = function(packet) {
  3198. var pusi = parsePayloadUnitStartIndicator(packet);
  3199. var offset = 4 + parseAdaptionField(packet);
  3200. if (pusi) {
  3201. offset += packet[offset] + 1;
  3202. }
  3203. return (packet[offset + 10] & 0x1f) << 8 | packet[offset + 11];
  3204. };
  3205. var parsePmt = function(packet) {
  3206. var programMapTable = {};
  3207. var pusi = parsePayloadUnitStartIndicator(packet);
  3208. var payloadOffset = 4 + parseAdaptionField(packet);
  3209. if (pusi) {
  3210. payloadOffset += packet[payloadOffset] + 1;
  3211. }
  3212. // PMTs can be sent ahead of the time when they should actually
  3213. // take effect. We don't believe this should ever be the case
  3214. // for HLS but we'll ignore "forward" PMT declarations if we see
  3215. // them. Future PMT declarations have the current_next_indicator
  3216. // set to zero.
  3217. if (!(packet[payloadOffset + 5] & 0x01)) {
  3218. return;
  3219. }
  3220. var sectionLength, tableEnd, programInfoLength;
  3221. // the mapping table ends at the end of the current section
  3222. sectionLength = (packet[payloadOffset + 1] & 0x0f) << 8 | packet[payloadOffset + 2];
  3223. tableEnd = 3 + sectionLength - 4;
  3224. // to determine where the table is, we have to figure out how
  3225. // long the program info descriptors are
  3226. programInfoLength = (packet[payloadOffset + 10] & 0x0f) << 8 | packet[payloadOffset + 11];
  3227. // advance the offset to the first entry in the mapping table
  3228. var offset = 12 + programInfoLength;
  3229. while (offset < tableEnd) {
  3230. var i = payloadOffset + offset;
  3231. // add an entry that maps the elementary_pid to the stream_type
  3232. programMapTable[(packet[i + 1] & 0x1F) << 8 | packet[i + 2]] = packet[i];
  3233. // move to the next table entry
  3234. // skip past the elementary stream descriptors, if present
  3235. offset += ((packet[i + 3] & 0x0F) << 8 | packet[i + 4]) + 5;
  3236. }
  3237. return programMapTable;
  3238. };
  3239. var parsePesType = function(packet, programMapTable) {
  3240. var pid = parsePid(packet);
  3241. var type = programMapTable[pid];
  3242. switch (type) {
  3243. case StreamTypes.H264_STREAM_TYPE:
  3244. return 'video';
  3245. case StreamTypes.ADTS_STREAM_TYPE:
  3246. return 'audio';
  3247. case StreamTypes.METADATA_STREAM_TYPE:
  3248. return 'timed-metadata';
  3249. default:
  3250. return null;
  3251. }
  3252. };
  3253. var parsePesTime = function(packet) {
  3254. var pusi = parsePayloadUnitStartIndicator(packet);
  3255. if (!pusi) {
  3256. return null;
  3257. }
  3258. var offset = 4 + parseAdaptionField(packet);
  3259. if (offset >= packet.byteLength) {
  3260. // From the H 222.0 MPEG-TS spec
  3261. // "For transport stream packets carrying PES packets, stuffing is needed when there
  3262. // is insufficient PES packet data to completely fill the transport stream packet
  3263. // payload bytes. Stuffing is accomplished by defining an adaptation field longer than
  3264. // the sum of the lengths of the data elements in it, so that the payload bytes
  3265. // remaining after the adaptation field exactly accommodates the available PES packet
  3266. // data."
  3267. //
  3268. // If the offset is >= the length of the packet, then the packet contains no data
  3269. // and instead is just adaption field stuffing bytes
  3270. return null;
  3271. }
  3272. var pes = null;
  3273. var ptsDtsFlags;
  3274. // PES packets may be annotated with a PTS value, or a PTS value
  3275. // and a DTS value. Determine what combination of values is
  3276. // available to work with.
  3277. ptsDtsFlags = packet[offset + 7];
  3278. // PTS and DTS are normally stored as a 33-bit number. Javascript
  3279. // performs all bitwise operations on 32-bit integers but javascript
  3280. // supports a much greater range (52-bits) of integer using standard
  3281. // mathematical operations.
  3282. // We construct a 31-bit value using bitwise operators over the 31
  3283. // most significant bits and then multiply by 4 (equal to a left-shift
  3284. // of 2) before we add the final 2 least significant bits of the
  3285. // timestamp (equal to an OR.)
  3286. if (ptsDtsFlags & 0xC0) {
  3287. pes = {};
  3288. // the PTS and DTS are not written out directly. For information
  3289. // on how they are encoded, see
  3290. // http://dvd.sourceforge.net/dvdinfo/pes-hdr.html
  3291. pes.pts = (packet[offset + 9] & 0x0E) << 27 |
  3292. (packet[offset + 10] & 0xFF) << 20 |
  3293. (packet[offset + 11] & 0xFE) << 12 |
  3294. (packet[offset + 12] & 0xFF) << 5 |
  3295. (packet[offset + 13] & 0xFE) >>> 3;
  3296. pes.pts *= 4; // Left shift by 2
  3297. pes.pts += (packet[offset + 13] & 0x06) >>> 1; // OR by the two LSBs
  3298. pes.dts = pes.pts;
  3299. if (ptsDtsFlags & 0x40) {
  3300. pes.dts = (packet[offset + 14] & 0x0E) << 27 |
  3301. (packet[offset + 15] & 0xFF) << 20 |
  3302. (packet[offset + 16] & 0xFE) << 12 |
  3303. (packet[offset + 17] & 0xFF) << 5 |
  3304. (packet[offset + 18] & 0xFE) >>> 3;
  3305. pes.dts *= 4; // Left shift by 2
  3306. pes.dts += (packet[offset + 18] & 0x06) >>> 1; // OR by the two LSBs
  3307. }
  3308. }
  3309. return pes;
  3310. };
  3311. var parseNalUnitType = function(type) {
  3312. switch (type) {
  3313. case 0x05:
  3314. return 'slice_layer_without_partitioning_rbsp_idr';
  3315. case 0x06:
  3316. return 'sei_rbsp';
  3317. case 0x07:
  3318. return 'seq_parameter_set_rbsp';
  3319. case 0x08:
  3320. return 'pic_parameter_set_rbsp';
  3321. case 0x09:
  3322. return 'access_unit_delimiter_rbsp';
  3323. default:
  3324. return null;
  3325. }
  3326. };
  3327. var videoPacketContainsKeyFrame = function(packet) {
  3328. var offset = 4 + parseAdaptionField(packet);
  3329. var frameBuffer = packet.subarray(offset);
  3330. var frameI = 0;
  3331. var frameSyncPoint = 0;
  3332. var foundKeyFrame = false;
  3333. var nalType;
  3334. // advance the sync point to a NAL start, if necessary
  3335. for (; frameSyncPoint < frameBuffer.byteLength - 3; frameSyncPoint++) {
  3336. if (frameBuffer[frameSyncPoint + 2] === 1) {
  3337. // the sync point is properly aligned
  3338. frameI = frameSyncPoint + 5;
  3339. break;
  3340. }
  3341. }
  3342. while (frameI < frameBuffer.byteLength) {
  3343. // look at the current byte to determine if we've hit the end of
  3344. // a NAL unit boundary
  3345. switch (frameBuffer[frameI]) {
  3346. case 0:
  3347. // skip past non-sync sequences
  3348. if (frameBuffer[frameI - 1] !== 0) {
  3349. frameI += 2;
  3350. break;
  3351. } else if (frameBuffer[frameI - 2] !== 0) {
  3352. frameI++;
  3353. break;
  3354. }
  3355. if (frameSyncPoint + 3 !== frameI - 2) {
  3356. nalType = parseNalUnitType(frameBuffer[frameSyncPoint + 3] & 0x1f);
  3357. if (nalType === 'slice_layer_without_partitioning_rbsp_idr') {
  3358. foundKeyFrame = true;
  3359. }
  3360. }
  3361. // drop trailing zeroes
  3362. do {
  3363. frameI++;
  3364. } while (frameBuffer[frameI] !== 1 && frameI < frameBuffer.length);
  3365. frameSyncPoint = frameI - 2;
  3366. frameI += 3;
  3367. break;
  3368. case 1:
  3369. // skip past non-sync sequences
  3370. if (frameBuffer[frameI - 1] !== 0 ||
  3371. frameBuffer[frameI - 2] !== 0) {
  3372. frameI += 3;
  3373. break;
  3374. }
  3375. nalType = parseNalUnitType(frameBuffer[frameSyncPoint + 3] & 0x1f);
  3376. if (nalType === 'slice_layer_without_partitioning_rbsp_idr') {
  3377. foundKeyFrame = true;
  3378. }
  3379. frameSyncPoint = frameI - 2;
  3380. frameI += 3;
  3381. break;
  3382. default:
  3383. // the current byte isn't a one or zero, so it cannot be part
  3384. // of a sync sequence
  3385. frameI += 3;
  3386. break;
  3387. }
  3388. }
  3389. frameBuffer = frameBuffer.subarray(frameSyncPoint);
  3390. frameI -= frameSyncPoint;
  3391. frameSyncPoint = 0;
  3392. // parse the final nal
  3393. if (frameBuffer && frameBuffer.byteLength > 3) {
  3394. nalType = parseNalUnitType(frameBuffer[frameSyncPoint + 3] & 0x1f);
  3395. if (nalType === 'slice_layer_without_partitioning_rbsp_idr') {
  3396. foundKeyFrame = true;
  3397. }
  3398. }
  3399. return foundKeyFrame;
  3400. };
  3401. module.exports = {
  3402. parseType: parseType,
  3403. parsePat: parsePat,
  3404. parsePmt: parsePmt,
  3405. parsePayloadUnitStartIndicator: parsePayloadUnitStartIndicator,
  3406. parsePesType: parsePesType,
  3407. parsePesTime: parsePesTime,
  3408. videoPacketContainsKeyFrame: videoPacketContainsKeyFrame
  3409. };
  3410. },{"./stream-types.js":22}],22:[function(require,module,exports){
  3411. 'use strict';
  3412. module.exports = {
  3413. H264_STREAM_TYPE: 0x1B,
  3414. ADTS_STREAM_TYPE: 0x0F,
  3415. METADATA_STREAM_TYPE: 0x15
  3416. };
  3417. },{}],23:[function(require,module,exports){
  3418. /**
  3419. * mux.js
  3420. *
  3421. * Copyright (c) 2016 Brightcove
  3422. * All rights reserved.
  3423. *
  3424. * Accepts program elementary stream (PES) data events and corrects
  3425. * decode and presentation time stamps to account for a rollover
  3426. * of the 33 bit value.
  3427. */
  3428. 'use strict';
  3429. var Stream = require('../utils/stream');
  3430. var MAX_TS = 8589934592;
  3431. var RO_THRESH = 4294967296;
  3432. var handleRollover = function(value, reference) {
  3433. var direction = 1;
  3434. if (value > reference) {
  3435. // If the current timestamp value is greater than our reference timestamp and we detect a
  3436. // timestamp rollover, this means the roll over is happening in the opposite direction.
  3437. // Example scenario: Enter a long stream/video just after a rollover occurred. The reference
  3438. // point will be set to a small number, e.g. 1. The user then seeks backwards over the
  3439. // rollover point. In loading this segment, the timestamp values will be very large,
  3440. // e.g. 2^33 - 1. Since this comes before the data we loaded previously, we want to adjust
  3441. // the time stamp to be `value - 2^33`.
  3442. direction = -1;
  3443. }
  3444. // Note: A seek forwards or back that is greater than the RO_THRESH (2^32, ~13 hours) will
  3445. // cause an incorrect adjustment.
  3446. while (Math.abs(reference - value) > RO_THRESH) {
  3447. value += (direction * MAX_TS);
  3448. }
  3449. return value;
  3450. };
  3451. var TimestampRolloverStream = function(type) {
  3452. var lastDTS, referenceDTS;
  3453. TimestampRolloverStream.prototype.init.call(this);
  3454. this.type_ = type;
  3455. this.push = function(data) {
  3456. if (data.type !== this.type_) {
  3457. return;
  3458. }
  3459. if (referenceDTS === undefined) {
  3460. referenceDTS = data.dts;
  3461. }
  3462. data.dts = handleRollover(data.dts, referenceDTS);
  3463. data.pts = handleRollover(data.pts, referenceDTS);
  3464. lastDTS = data.dts;
  3465. this.trigger('data', data);
  3466. };
  3467. this.flush = function() {
  3468. referenceDTS = lastDTS;
  3469. this.trigger('done');
  3470. };
  3471. this.discontinuity = function() {
  3472. referenceDTS = void 0;
  3473. lastDTS = void 0;
  3474. };
  3475. };
  3476. TimestampRolloverStream.prototype = new Stream();
  3477. module.exports = {
  3478. TimestampRolloverStream: TimestampRolloverStream,
  3479. handleRollover: handleRollover
  3480. };
  3481. },{"../utils/stream":33}],24:[function(require,module,exports){
  3482. module.exports = {
  3483. generator: require('./mp4-generator'),
  3484. Transmuxer: require('./transmuxer').Transmuxer,
  3485. AudioSegmentStream: require('./transmuxer').AudioSegmentStream,
  3486. VideoSegmentStream: require('./transmuxer').VideoSegmentStream
  3487. };
  3488. },{"./mp4-generator":25,"./transmuxer":27}],25:[function(require,module,exports){
  3489. /**
  3490. * mux.js
  3491. *
  3492. * Copyright (c) 2015 Brightcove
  3493. * All rights reserved.
  3494. *
  3495. * Functions that generate fragmented MP4s suitable for use with Media
  3496. * Source Extensions.
  3497. */
  3498. 'use strict';
  3499. var UINT32_MAX = Math.pow(2, 32) - 1;
  3500. var box, dinf, esds, ftyp, mdat, mfhd, minf, moof, moov, mvex, mvhd,
  3501. trak, tkhd, mdia, mdhd, hdlr, sdtp, stbl, stsd, traf, trex,
  3502. trun, types, MAJOR_BRAND, MINOR_VERSION, AVC1_BRAND, VIDEO_HDLR,
  3503. AUDIO_HDLR, HDLR_TYPES, VMHD, SMHD, DREF, STCO, STSC, STSZ, STTS;
  3504. // pre-calculate constants
  3505. (function() {
  3506. var i;
  3507. types = {
  3508. avc1: [], // codingname
  3509. avcC: [],
  3510. btrt: [],
  3511. dinf: [],
  3512. dref: [],
  3513. esds: [],
  3514. ftyp: [],
  3515. hdlr: [],
  3516. mdat: [],
  3517. mdhd: [],
  3518. mdia: [],
  3519. mfhd: [],
  3520. minf: [],
  3521. moof: [],
  3522. moov: [],
  3523. mp4a: [], // codingname
  3524. mvex: [],
  3525. mvhd: [],
  3526. sdtp: [],
  3527. smhd: [],
  3528. stbl: [],
  3529. stco: [],
  3530. stsc: [],
  3531. stsd: [],
  3532. stsz: [],
  3533. stts: [],
  3534. styp: [],
  3535. tfdt: [],
  3536. tfhd: [],
  3537. traf: [],
  3538. trak: [],
  3539. trun: [],
  3540. trex: [],
  3541. tkhd: [],
  3542. vmhd: []
  3543. };
  3544. // In environments where Uint8Array is undefined (e.g., IE8), skip set up so that we
  3545. // don't throw an error
  3546. if (typeof Uint8Array === 'undefined') {
  3547. return;
  3548. }
  3549. for (i in types) {
  3550. if (types.hasOwnProperty(i)) {
  3551. types[i] = [
  3552. i.charCodeAt(0),
  3553. i.charCodeAt(1),
  3554. i.charCodeAt(2),
  3555. i.charCodeAt(3)
  3556. ];
  3557. }
  3558. }
  3559. MAJOR_BRAND = new Uint8Array([
  3560. 'i'.charCodeAt(0),
  3561. 's'.charCodeAt(0),
  3562. 'o'.charCodeAt(0),
  3563. 'm'.charCodeAt(0)
  3564. ]);
  3565. AVC1_BRAND = new Uint8Array([
  3566. 'a'.charCodeAt(0),
  3567. 'v'.charCodeAt(0),
  3568. 'c'.charCodeAt(0),
  3569. '1'.charCodeAt(0)
  3570. ]);
  3571. MINOR_VERSION = new Uint8Array([0, 0, 0, 1]);
  3572. VIDEO_HDLR = new Uint8Array([
  3573. 0x00, // version 0
  3574. 0x00, 0x00, 0x00, // flags
  3575. 0x00, 0x00, 0x00, 0x00, // pre_defined
  3576. 0x76, 0x69, 0x64, 0x65, // handler_type: 'vide'
  3577. 0x00, 0x00, 0x00, 0x00, // reserved
  3578. 0x00, 0x00, 0x00, 0x00, // reserved
  3579. 0x00, 0x00, 0x00, 0x00, // reserved
  3580. 0x56, 0x69, 0x64, 0x65,
  3581. 0x6f, 0x48, 0x61, 0x6e,
  3582. 0x64, 0x6c, 0x65, 0x72, 0x00 // name: 'VideoHandler'
  3583. ]);
  3584. AUDIO_HDLR = new Uint8Array([
  3585. 0x00, // version 0
  3586. 0x00, 0x00, 0x00, // flags
  3587. 0x00, 0x00, 0x00, 0x00, // pre_defined
  3588. 0x73, 0x6f, 0x75, 0x6e, // handler_type: 'soun'
  3589. 0x00, 0x00, 0x00, 0x00, // reserved
  3590. 0x00, 0x00, 0x00, 0x00, // reserved
  3591. 0x00, 0x00, 0x00, 0x00, // reserved
  3592. 0x53, 0x6f, 0x75, 0x6e,
  3593. 0x64, 0x48, 0x61, 0x6e,
  3594. 0x64, 0x6c, 0x65, 0x72, 0x00 // name: 'SoundHandler'
  3595. ]);
  3596. HDLR_TYPES = {
  3597. video: VIDEO_HDLR,
  3598. audio: AUDIO_HDLR
  3599. };
  3600. DREF = new Uint8Array([
  3601. 0x00, // version 0
  3602. 0x00, 0x00, 0x00, // flags
  3603. 0x00, 0x00, 0x00, 0x01, // entry_count
  3604. 0x00, 0x00, 0x00, 0x0c, // entry_size
  3605. 0x75, 0x72, 0x6c, 0x20, // 'url' type
  3606. 0x00, // version 0
  3607. 0x00, 0x00, 0x01 // entry_flags
  3608. ]);
  3609. SMHD = new Uint8Array([
  3610. 0x00, // version
  3611. 0x00, 0x00, 0x00, // flags
  3612. 0x00, 0x00, // balance, 0 means centered
  3613. 0x00, 0x00 // reserved
  3614. ]);
  3615. STCO = new Uint8Array([
  3616. 0x00, // version
  3617. 0x00, 0x00, 0x00, // flags
  3618. 0x00, 0x00, 0x00, 0x00 // entry_count
  3619. ]);
  3620. STSC = STCO;
  3621. STSZ = new Uint8Array([
  3622. 0x00, // version
  3623. 0x00, 0x00, 0x00, // flags
  3624. 0x00, 0x00, 0x00, 0x00, // sample_size
  3625. 0x00, 0x00, 0x00, 0x00 // sample_count
  3626. ]);
  3627. STTS = STCO;
  3628. VMHD = new Uint8Array([
  3629. 0x00, // version
  3630. 0x00, 0x00, 0x01, // flags
  3631. 0x00, 0x00, // graphicsmode
  3632. 0x00, 0x00,
  3633. 0x00, 0x00,
  3634. 0x00, 0x00 // opcolor
  3635. ]);
  3636. }());
  3637. box = function(type) {
  3638. var
  3639. payload = [],
  3640. size = 0,
  3641. i,
  3642. result,
  3643. view;
  3644. for (i = 1; i < arguments.length; i++) {
  3645. payload.push(arguments[i]);
  3646. }
  3647. i = payload.length;
  3648. // calculate the total size we need to allocate
  3649. while (i--) {
  3650. size += payload[i].byteLength;
  3651. }
  3652. result = new Uint8Array(size + 8);
  3653. view = new DataView(result.buffer, result.byteOffset, result.byteLength);
  3654. view.setUint32(0, result.byteLength);
  3655. result.set(type, 4);
  3656. // copy the payload into the result
  3657. for (i = 0, size = 8; i < payload.length; i++) {
  3658. result.set(payload[i], size);
  3659. size += payload[i].byteLength;
  3660. }
  3661. return result;
  3662. };
  3663. dinf = function() {
  3664. return box(types.dinf, box(types.dref, DREF));
  3665. };
  3666. esds = function(track) {
  3667. return box(types.esds, new Uint8Array([
  3668. 0x00, // version
  3669. 0x00, 0x00, 0x00, // flags
  3670. // ES_Descriptor
  3671. 0x03, // tag, ES_DescrTag
  3672. 0x19, // length
  3673. 0x00, 0x00, // ES_ID
  3674. 0x00, // streamDependenceFlag, URL_flag, reserved, streamPriority
  3675. // DecoderConfigDescriptor
  3676. 0x04, // tag, DecoderConfigDescrTag
  3677. 0x11, // length
  3678. 0x40, // object type
  3679. 0x15, // streamType
  3680. 0x00, 0x06, 0x00, // bufferSizeDB
  3681. 0x00, 0x00, 0xda, 0xc0, // maxBitrate
  3682. 0x00, 0x00, 0xda, 0xc0, // avgBitrate
  3683. // DecoderSpecificInfo
  3684. 0x05, // tag, DecoderSpecificInfoTag
  3685. 0x02, // length
  3686. // ISO/IEC 14496-3, AudioSpecificConfig
  3687. // for samplingFrequencyIndex see ISO/IEC 13818-7:2006, 8.1.3.2.2, Table 35
  3688. (track.audioobjecttype << 3) | (track.samplingfrequencyindex >>> 1),
  3689. (track.samplingfrequencyindex << 7) | (track.channelcount << 3),
  3690. 0x06, 0x01, 0x02 // GASpecificConfig
  3691. ]));
  3692. };
  3693. ftyp = function() {
  3694. return box(types.ftyp, MAJOR_BRAND, MINOR_VERSION, MAJOR_BRAND, AVC1_BRAND);
  3695. };
  3696. hdlr = function(type) {
  3697. return box(types.hdlr, HDLR_TYPES[type]);
  3698. };
  3699. mdat = function(data) {
  3700. return box(types.mdat, data);
  3701. };
  3702. mdhd = function(track) {
  3703. var result = new Uint8Array([
  3704. 0x00, // version 0
  3705. 0x00, 0x00, 0x00, // flags
  3706. 0x00, 0x00, 0x00, 0x02, // creation_time
  3707. 0x00, 0x00, 0x00, 0x03, // modification_time
  3708. 0x00, 0x01, 0x5f, 0x90, // timescale, 90,000 "ticks" per second
  3709. (track.duration >>> 24) & 0xFF,
  3710. (track.duration >>> 16) & 0xFF,
  3711. (track.duration >>> 8) & 0xFF,
  3712. track.duration & 0xFF, // duration
  3713. 0x55, 0xc4, // 'und' language (undetermined)
  3714. 0x00, 0x00
  3715. ]);
  3716. // Use the sample rate from the track metadata, when it is
  3717. // defined. The sample rate can be parsed out of an ADTS header, for
  3718. // instance.
  3719. if (track.samplerate) {
  3720. result[12] = (track.samplerate >>> 24) & 0xFF;
  3721. result[13] = (track.samplerate >>> 16) & 0xFF;
  3722. result[14] = (track.samplerate >>> 8) & 0xFF;
  3723. result[15] = (track.samplerate) & 0xFF;
  3724. }
  3725. return box(types.mdhd, result);
  3726. };
  3727. mdia = function(track) {
  3728. return box(types.mdia, mdhd(track), hdlr(track.type), minf(track));
  3729. };
  3730. mfhd = function(sequenceNumber) {
  3731. return box(types.mfhd, new Uint8Array([
  3732. 0x00,
  3733. 0x00, 0x00, 0x00, // flags
  3734. (sequenceNumber & 0xFF000000) >> 24,
  3735. (sequenceNumber & 0xFF0000) >> 16,
  3736. (sequenceNumber & 0xFF00) >> 8,
  3737. sequenceNumber & 0xFF // sequence_number
  3738. ]));
  3739. };
  3740. minf = function(track) {
  3741. return box(types.minf,
  3742. track.type === 'video' ? box(types.vmhd, VMHD) : box(types.smhd, SMHD),
  3743. dinf(),
  3744. stbl(track));
  3745. };
  3746. moof = function(sequenceNumber, tracks) {
  3747. var
  3748. trackFragments = [],
  3749. i = tracks.length;
  3750. // build traf boxes for each track fragment
  3751. while (i--) {
  3752. trackFragments[i] = traf(tracks[i]);
  3753. }
  3754. return box.apply(null, [
  3755. types.moof,
  3756. mfhd(sequenceNumber)
  3757. ].concat(trackFragments));
  3758. };
  3759. /**
  3760. * Returns a movie box.
  3761. * @param tracks {array} the tracks associated with this movie
  3762. * @see ISO/IEC 14496-12:2012(E), section 8.2.1
  3763. */
  3764. moov = function(tracks) {
  3765. var
  3766. i = tracks.length,
  3767. boxes = [];
  3768. while (i--) {
  3769. boxes[i] = trak(tracks[i]);
  3770. }
  3771. return box.apply(null, [types.moov, mvhd(0xffffffff)].concat(boxes).concat(mvex(tracks)));
  3772. };
  3773. mvex = function(tracks) {
  3774. var
  3775. i = tracks.length,
  3776. boxes = [];
  3777. while (i--) {
  3778. boxes[i] = trex(tracks[i]);
  3779. }
  3780. return box.apply(null, [types.mvex].concat(boxes));
  3781. };
  3782. mvhd = function(duration) {
  3783. var
  3784. bytes = new Uint8Array([
  3785. 0x00, // version 0
  3786. 0x00, 0x00, 0x00, // flags
  3787. 0x00, 0x00, 0x00, 0x01, // creation_time
  3788. 0x00, 0x00, 0x00, 0x02, // modification_time
  3789. 0x00, 0x01, 0x5f, 0x90, // timescale, 90,000 "ticks" per second
  3790. (duration & 0xFF000000) >> 24,
  3791. (duration & 0xFF0000) >> 16,
  3792. (duration & 0xFF00) >> 8,
  3793. duration & 0xFF, // duration
  3794. 0x00, 0x01, 0x00, 0x00, // 1.0 rate
  3795. 0x01, 0x00, // 1.0 volume
  3796. 0x00, 0x00, // reserved
  3797. 0x00, 0x00, 0x00, 0x00, // reserved
  3798. 0x00, 0x00, 0x00, 0x00, // reserved
  3799. 0x00, 0x01, 0x00, 0x00,
  3800. 0x00, 0x00, 0x00, 0x00,
  3801. 0x00, 0x00, 0x00, 0x00,
  3802. 0x00, 0x00, 0x00, 0x00,
  3803. 0x00, 0x01, 0x00, 0x00,
  3804. 0x00, 0x00, 0x00, 0x00,
  3805. 0x00, 0x00, 0x00, 0x00,
  3806. 0x00, 0x00, 0x00, 0x00,
  3807. 0x40, 0x00, 0x00, 0x00, // transformation: unity matrix
  3808. 0x00, 0x00, 0x00, 0x00,
  3809. 0x00, 0x00, 0x00, 0x00,
  3810. 0x00, 0x00, 0x00, 0x00,
  3811. 0x00, 0x00, 0x00, 0x00,
  3812. 0x00, 0x00, 0x00, 0x00,
  3813. 0x00, 0x00, 0x00, 0x00, // pre_defined
  3814. 0xff, 0xff, 0xff, 0xff // next_track_ID
  3815. ]);
  3816. return box(types.mvhd, bytes);
  3817. };
  3818. sdtp = function(track) {
  3819. var
  3820. samples = track.samples || [],
  3821. bytes = new Uint8Array(4 + samples.length),
  3822. flags,
  3823. i;
  3824. // leave the full box header (4 bytes) all zero
  3825. // write the sample table
  3826. for (i = 0; i < samples.length; i++) {
  3827. flags = samples[i].flags;
  3828. bytes[i + 4] = (flags.dependsOn << 4) |
  3829. (flags.isDependedOn << 2) |
  3830. (flags.hasRedundancy);
  3831. }
  3832. return box(types.sdtp,
  3833. bytes);
  3834. };
  3835. stbl = function(track) {
  3836. return box(types.stbl,
  3837. stsd(track),
  3838. box(types.stts, STTS),
  3839. box(types.stsc, STSC),
  3840. box(types.stsz, STSZ),
  3841. box(types.stco, STCO));
  3842. };
  3843. (function() {
  3844. var videoSample, audioSample;
  3845. stsd = function(track) {
  3846. return box(types.stsd, new Uint8Array([
  3847. 0x00, // version 0
  3848. 0x00, 0x00, 0x00, // flags
  3849. 0x00, 0x00, 0x00, 0x01
  3850. ]), track.type === 'video' ? videoSample(track) : audioSample(track));
  3851. };
  3852. videoSample = function(track) {
  3853. var
  3854. sps = track.sps || [],
  3855. pps = track.pps || [],
  3856. sequenceParameterSets = [],
  3857. pictureParameterSets = [],
  3858. i;
  3859. // assemble the SPSs
  3860. for (i = 0; i < sps.length; i++) {
  3861. sequenceParameterSets.push((sps[i].byteLength & 0xFF00) >>> 8);
  3862. sequenceParameterSets.push((sps[i].byteLength & 0xFF)); // sequenceParameterSetLength
  3863. sequenceParameterSets = sequenceParameterSets.concat(Array.prototype.slice.call(sps[i])); // SPS
  3864. }
  3865. // assemble the PPSs
  3866. for (i = 0; i < pps.length; i++) {
  3867. pictureParameterSets.push((pps[i].byteLength & 0xFF00) >>> 8);
  3868. pictureParameterSets.push((pps[i].byteLength & 0xFF));
  3869. pictureParameterSets = pictureParameterSets.concat(Array.prototype.slice.call(pps[i]));
  3870. }
  3871. return box(types.avc1, new Uint8Array([
  3872. 0x00, 0x00, 0x00,
  3873. 0x00, 0x00, 0x00, // reserved
  3874. 0x00, 0x01, // data_reference_index
  3875. 0x00, 0x00, // pre_defined
  3876. 0x00, 0x00, // reserved
  3877. 0x00, 0x00, 0x00, 0x00,
  3878. 0x00, 0x00, 0x00, 0x00,
  3879. 0x00, 0x00, 0x00, 0x00, // pre_defined
  3880. (track.width & 0xff00) >> 8,
  3881. track.width & 0xff, // width
  3882. (track.height & 0xff00) >> 8,
  3883. track.height & 0xff, // height
  3884. 0x00, 0x48, 0x00, 0x00, // horizresolution
  3885. 0x00, 0x48, 0x00, 0x00, // vertresolution
  3886. 0x00, 0x00, 0x00, 0x00, // reserved
  3887. 0x00, 0x01, // frame_count
  3888. 0x13,
  3889. 0x76, 0x69, 0x64, 0x65,
  3890. 0x6f, 0x6a, 0x73, 0x2d,
  3891. 0x63, 0x6f, 0x6e, 0x74,
  3892. 0x72, 0x69, 0x62, 0x2d,
  3893. 0x68, 0x6c, 0x73, 0x00,
  3894. 0x00, 0x00, 0x00, 0x00,
  3895. 0x00, 0x00, 0x00, 0x00,
  3896. 0x00, 0x00, 0x00, // compressorname
  3897. 0x00, 0x18, // depth = 24
  3898. 0x11, 0x11 // pre_defined = -1
  3899. ]), box(types.avcC, new Uint8Array([
  3900. 0x01, // configurationVersion
  3901. track.profileIdc, // AVCProfileIndication
  3902. track.profileCompatibility, // profile_compatibility
  3903. track.levelIdc, // AVCLevelIndication
  3904. 0xff // lengthSizeMinusOne, hard-coded to 4 bytes
  3905. ].concat([
  3906. sps.length // numOfSequenceParameterSets
  3907. ]).concat(sequenceParameterSets).concat([
  3908. pps.length // numOfPictureParameterSets
  3909. ]).concat(pictureParameterSets))), // "PPS"
  3910. box(types.btrt, new Uint8Array([
  3911. 0x00, 0x1c, 0x9c, 0x80, // bufferSizeDB
  3912. 0x00, 0x2d, 0xc6, 0xc0, // maxBitrate
  3913. 0x00, 0x2d, 0xc6, 0xc0
  3914. ])) // avgBitrate
  3915. );
  3916. };
  3917. audioSample = function(track) {
  3918. return box(types.mp4a, new Uint8Array([
  3919. // SampleEntry, ISO/IEC 14496-12
  3920. 0x00, 0x00, 0x00,
  3921. 0x00, 0x00, 0x00, // reserved
  3922. 0x00, 0x01, // data_reference_index
  3923. // AudioSampleEntry, ISO/IEC 14496-12
  3924. 0x00, 0x00, 0x00, 0x00, // reserved
  3925. 0x00, 0x00, 0x00, 0x00, // reserved
  3926. (track.channelcount & 0xff00) >> 8,
  3927. (track.channelcount & 0xff), // channelcount
  3928. (track.samplesize & 0xff00) >> 8,
  3929. (track.samplesize & 0xff), // samplesize
  3930. 0x00, 0x00, // pre_defined
  3931. 0x00, 0x00, // reserved
  3932. (track.samplerate & 0xff00) >> 8,
  3933. (track.samplerate & 0xff),
  3934. 0x00, 0x00 // samplerate, 16.16
  3935. // MP4AudioSampleEntry, ISO/IEC 14496-14
  3936. ]), esds(track));
  3937. };
  3938. }());
  3939. tkhd = function(track) {
  3940. var result = new Uint8Array([
  3941. 0x00, // version 0
  3942. 0x00, 0x00, 0x07, // flags
  3943. 0x00, 0x00, 0x00, 0x00, // creation_time
  3944. 0x00, 0x00, 0x00, 0x00, // modification_time
  3945. (track.id & 0xFF000000) >> 24,
  3946. (track.id & 0xFF0000) >> 16,
  3947. (track.id & 0xFF00) >> 8,
  3948. track.id & 0xFF, // track_ID
  3949. 0x00, 0x00, 0x00, 0x00, // reserved
  3950. (track.duration & 0xFF000000) >> 24,
  3951. (track.duration & 0xFF0000) >> 16,
  3952. (track.duration & 0xFF00) >> 8,
  3953. track.duration & 0xFF, // duration
  3954. 0x00, 0x00, 0x00, 0x00,
  3955. 0x00, 0x00, 0x00, 0x00, // reserved
  3956. 0x00, 0x00, // layer
  3957. 0x00, 0x00, // alternate_group
  3958. 0x01, 0x00, // non-audio track volume
  3959. 0x00, 0x00, // reserved
  3960. 0x00, 0x01, 0x00, 0x00,
  3961. 0x00, 0x00, 0x00, 0x00,
  3962. 0x00, 0x00, 0x00, 0x00,
  3963. 0x00, 0x00, 0x00, 0x00,
  3964. 0x00, 0x01, 0x00, 0x00,
  3965. 0x00, 0x00, 0x00, 0x00,
  3966. 0x00, 0x00, 0x00, 0x00,
  3967. 0x00, 0x00, 0x00, 0x00,
  3968. 0x40, 0x00, 0x00, 0x00, // transformation: unity matrix
  3969. (track.width & 0xFF00) >> 8,
  3970. track.width & 0xFF,
  3971. 0x00, 0x00, // width
  3972. (track.height & 0xFF00) >> 8,
  3973. track.height & 0xFF,
  3974. 0x00, 0x00 // height
  3975. ]);
  3976. return box(types.tkhd, result);
  3977. };
  3978. /**
  3979. * Generate a track fragment (traf) box. A traf box collects metadata
  3980. * about tracks in a movie fragment (moof) box.
  3981. */
  3982. traf = function(track) {
  3983. var trackFragmentHeader, trackFragmentDecodeTime, trackFragmentRun,
  3984. sampleDependencyTable, dataOffset,
  3985. upperWordBaseMediaDecodeTime, lowerWordBaseMediaDecodeTime;
  3986. trackFragmentHeader = box(types.tfhd, new Uint8Array([
  3987. 0x00, // version 0
  3988. 0x00, 0x00, 0x3a, // flags
  3989. (track.id & 0xFF000000) >> 24,
  3990. (track.id & 0xFF0000) >> 16,
  3991. (track.id & 0xFF00) >> 8,
  3992. (track.id & 0xFF), // track_ID
  3993. 0x00, 0x00, 0x00, 0x01, // sample_description_index
  3994. 0x00, 0x00, 0x00, 0x00, // default_sample_duration
  3995. 0x00, 0x00, 0x00, 0x00, // default_sample_size
  3996. 0x00, 0x00, 0x00, 0x00 // default_sample_flags
  3997. ]));
  3998. upperWordBaseMediaDecodeTime = Math.floor(track.baseMediaDecodeTime / (UINT32_MAX + 1));
  3999. lowerWordBaseMediaDecodeTime = Math.floor(track.baseMediaDecodeTime % (UINT32_MAX + 1));
  4000. trackFragmentDecodeTime = box(types.tfdt, new Uint8Array([
  4001. 0x01, // version 1
  4002. 0x00, 0x00, 0x00, // flags
  4003. // baseMediaDecodeTime
  4004. (upperWordBaseMediaDecodeTime >>> 24) & 0xFF,
  4005. (upperWordBaseMediaDecodeTime >>> 16) & 0xFF,
  4006. (upperWordBaseMediaDecodeTime >>> 8) & 0xFF,
  4007. upperWordBaseMediaDecodeTime & 0xFF,
  4008. (lowerWordBaseMediaDecodeTime >>> 24) & 0xFF,
  4009. (lowerWordBaseMediaDecodeTime >>> 16) & 0xFF,
  4010. (lowerWordBaseMediaDecodeTime >>> 8) & 0xFF,
  4011. lowerWordBaseMediaDecodeTime & 0xFF
  4012. ]));
  4013. // the data offset specifies the number of bytes from the start of
  4014. // the containing moof to the first payload byte of the associated
  4015. // mdat
  4016. dataOffset = (32 + // tfhd
  4017. 20 + // tfdt
  4018. 8 + // traf header
  4019. 16 + // mfhd
  4020. 8 + // moof header
  4021. 8); // mdat header
  4022. // audio tracks require less metadata
  4023. if (track.type === 'audio') {
  4024. trackFragmentRun = trun(track, dataOffset);
  4025. return box(types.traf,
  4026. trackFragmentHeader,
  4027. trackFragmentDecodeTime,
  4028. trackFragmentRun);
  4029. }
  4030. // video tracks should contain an independent and disposable samples
  4031. // box (sdtp)
  4032. // generate one and adjust offsets to match
  4033. sampleDependencyTable = sdtp(track);
  4034. trackFragmentRun = trun(track,
  4035. sampleDependencyTable.length + dataOffset);
  4036. return box(types.traf,
  4037. trackFragmentHeader,
  4038. trackFragmentDecodeTime,
  4039. trackFragmentRun,
  4040. sampleDependencyTable);
  4041. };
  4042. /**
  4043. * Generate a track box.
  4044. * @param track {object} a track definition
  4045. * @return {Uint8Array} the track box
  4046. */
  4047. trak = function(track) {
  4048. track.duration = track.duration || 0xffffffff;
  4049. return box(types.trak,
  4050. tkhd(track),
  4051. mdia(track));
  4052. };
  4053. trex = function(track) {
  4054. var result = new Uint8Array([
  4055. 0x00, // version 0
  4056. 0x00, 0x00, 0x00, // flags
  4057. (track.id & 0xFF000000) >> 24,
  4058. (track.id & 0xFF0000) >> 16,
  4059. (track.id & 0xFF00) >> 8,
  4060. (track.id & 0xFF), // track_ID
  4061. 0x00, 0x00, 0x00, 0x01, // default_sample_description_index
  4062. 0x00, 0x00, 0x00, 0x00, // default_sample_duration
  4063. 0x00, 0x00, 0x00, 0x00, // default_sample_size
  4064. 0x00, 0x01, 0x00, 0x01 // default_sample_flags
  4065. ]);
  4066. // the last two bytes of default_sample_flags is the sample
  4067. // degradation priority, a hint about the importance of this sample
  4068. // relative to others. Lower the degradation priority for all sample
  4069. // types other than video.
  4070. if (track.type !== 'video') {
  4071. result[result.length - 1] = 0x00;
  4072. }
  4073. return box(types.trex, result);
  4074. };
  4075. (function() {
  4076. var audioTrun, videoTrun, trunHeader;
  4077. // This method assumes all samples are uniform. That is, if a
  4078. // duration is present for the first sample, it will be present for
  4079. // all subsequent samples.
  4080. // see ISO/IEC 14496-12:2012, Section 8.8.8.1
  4081. trunHeader = function(samples, offset) {
  4082. var durationPresent = 0, sizePresent = 0,
  4083. flagsPresent = 0, compositionTimeOffset = 0;
  4084. // trun flag constants
  4085. if (samples.length) {
  4086. if (samples[0].duration !== undefined) {
  4087. durationPresent = 0x1;
  4088. }
  4089. if (samples[0].size !== undefined) {
  4090. sizePresent = 0x2;
  4091. }
  4092. if (samples[0].flags !== undefined) {
  4093. flagsPresent = 0x4;
  4094. }
  4095. if (samples[0].compositionTimeOffset !== undefined) {
  4096. compositionTimeOffset = 0x8;
  4097. }
  4098. }
  4099. return [
  4100. 0x00, // version 0
  4101. 0x00,
  4102. durationPresent | sizePresent | flagsPresent | compositionTimeOffset,
  4103. 0x01, // flags
  4104. (samples.length & 0xFF000000) >>> 24,
  4105. (samples.length & 0xFF0000) >>> 16,
  4106. (samples.length & 0xFF00) >>> 8,
  4107. samples.length & 0xFF, // sample_count
  4108. (offset & 0xFF000000) >>> 24,
  4109. (offset & 0xFF0000) >>> 16,
  4110. (offset & 0xFF00) >>> 8,
  4111. offset & 0xFF // data_offset
  4112. ];
  4113. };
  4114. videoTrun = function(track, offset) {
  4115. var bytes, samples, sample, i;
  4116. samples = track.samples || [];
  4117. offset += 8 + 12 + (16 * samples.length);
  4118. bytes = trunHeader(samples, offset);
  4119. for (i = 0; i < samples.length; i++) {
  4120. sample = samples[i];
  4121. bytes = bytes.concat([
  4122. (sample.duration & 0xFF000000) >>> 24,
  4123. (sample.duration & 0xFF0000) >>> 16,
  4124. (sample.duration & 0xFF00) >>> 8,
  4125. sample.duration & 0xFF, // sample_duration
  4126. (sample.size & 0xFF000000) >>> 24,
  4127. (sample.size & 0xFF0000) >>> 16,
  4128. (sample.size & 0xFF00) >>> 8,
  4129. sample.size & 0xFF, // sample_size
  4130. (sample.flags.isLeading << 2) | sample.flags.dependsOn,
  4131. (sample.flags.isDependedOn << 6) |
  4132. (sample.flags.hasRedundancy << 4) |
  4133. (sample.flags.paddingValue << 1) |
  4134. sample.flags.isNonSyncSample,
  4135. sample.flags.degradationPriority & 0xF0 << 8,
  4136. sample.flags.degradationPriority & 0x0F, // sample_flags
  4137. (sample.compositionTimeOffset & 0xFF000000) >>> 24,
  4138. (sample.compositionTimeOffset & 0xFF0000) >>> 16,
  4139. (sample.compositionTimeOffset & 0xFF00) >>> 8,
  4140. sample.compositionTimeOffset & 0xFF // sample_composition_time_offset
  4141. ]);
  4142. }
  4143. return box(types.trun, new Uint8Array(bytes));
  4144. };
  4145. audioTrun = function(track, offset) {
  4146. var bytes, samples, sample, i;
  4147. samples = track.samples || [];
  4148. offset += 8 + 12 + (8 * samples.length);
  4149. bytes = trunHeader(samples, offset);
  4150. for (i = 0; i < samples.length; i++) {
  4151. sample = samples[i];
  4152. bytes = bytes.concat([
  4153. (sample.duration & 0xFF000000) >>> 24,
  4154. (sample.duration & 0xFF0000) >>> 16,
  4155. (sample.duration & 0xFF00) >>> 8,
  4156. sample.duration & 0xFF, // sample_duration
  4157. (sample.size & 0xFF000000) >>> 24,
  4158. (sample.size & 0xFF0000) >>> 16,
  4159. (sample.size & 0xFF00) >>> 8,
  4160. sample.size & 0xFF]); // sample_size
  4161. }
  4162. return box(types.trun, new Uint8Array(bytes));
  4163. };
  4164. trun = function(track, offset) {
  4165. if (track.type === 'audio') {
  4166. return audioTrun(track, offset);
  4167. }
  4168. return videoTrun(track, offset);
  4169. };
  4170. }());
  4171. module.exports = {
  4172. ftyp: ftyp,
  4173. mdat: mdat,
  4174. moof: moof,
  4175. moov: moov,
  4176. initSegment: function(tracks) {
  4177. var
  4178. fileType = ftyp(),
  4179. movie = moov(tracks),
  4180. result;
  4181. result = new Uint8Array(fileType.byteLength + movie.byteLength);
  4182. result.set(fileType);
  4183. result.set(movie, fileType.byteLength);
  4184. return result;
  4185. }
  4186. };
  4187. },{}],26:[function(require,module,exports){
  4188. /**
  4189. * mux.js
  4190. *
  4191. * Copyright (c) 2015 Brightcove
  4192. * All rights reserved.
  4193. *
  4194. * Utilities to detect basic properties and metadata about MP4s.
  4195. */
  4196. 'use strict';
  4197. var findBox, parseType, timescale, startTime;
  4198. // Find the data for a box specified by its path
  4199. findBox = function(data, path) {
  4200. var results = [],
  4201. i, size, type, end, subresults;
  4202. if (!path.length) {
  4203. // short-circuit the search for empty paths
  4204. return null;
  4205. }
  4206. for (i = 0; i < data.byteLength;) {
  4207. size = data[i] << 24;
  4208. size |= data[i + 1] << 16;
  4209. size |= data[i + 2] << 8;
  4210. size |= data[i + 3];
  4211. type = parseType(data.subarray(i + 4, i + 8));
  4212. end = size > 1 ? i + size : data.byteLength;
  4213. if (type === path[0]) {
  4214. if (path.length === 1) {
  4215. // this is the end of the path and we've found the box we were
  4216. // looking for
  4217. results.push(data.subarray(i + 8, end));
  4218. } else {
  4219. // recursively search for the next box along the path
  4220. subresults = findBox(data.subarray(i + 8, end), path.slice(1));
  4221. if (subresults.length) {
  4222. results = results.concat(subresults);
  4223. }
  4224. }
  4225. }
  4226. i = end;
  4227. }
  4228. // we've finished searching all of data
  4229. return results;
  4230. };
  4231. /**
  4232. * Returns the string representation of an ASCII encoded four byte buffer.
  4233. * @param buffer {Uint8Array} a four-byte buffer to translate
  4234. * @return {string} the corresponding string
  4235. */
  4236. parseType = function(buffer) {
  4237. var result = '';
  4238. result += String.fromCharCode(buffer[0]);
  4239. result += String.fromCharCode(buffer[1]);
  4240. result += String.fromCharCode(buffer[2]);
  4241. result += String.fromCharCode(buffer[3]);
  4242. return result;
  4243. };
  4244. /**
  4245. * Parses an MP4 initialization segment and extracts the timescale
  4246. * values for any declared tracks. Timescale values indicate the
  4247. * number of clock ticks per second to assume for time-based values
  4248. * elsewhere in the MP4.
  4249. *
  4250. * To determine the start time of an MP4, you need two pieces of
  4251. * information: the timescale unit and the earliest base media decode
  4252. * time. Multiple timescales can be specified within an MP4 but the
  4253. * base media decode time is always expressed in the timescale from
  4254. * the media header box for the track:
  4255. * ```
  4256. * moov > trak > mdia > mdhd.timescale
  4257. * ```
  4258. * @param init {Uint8Array} the bytes of the init segment
  4259. * @return {object} a hash of track ids to timescale values or null if
  4260. * the init segment is malformed.
  4261. */
  4262. timescale = function(init) {
  4263. var
  4264. result = {},
  4265. traks = findBox(init, ['moov', 'trak']);
  4266. // mdhd timescale
  4267. return traks.reduce(function(result, trak) {
  4268. var tkhd, version, index, id, mdhd;
  4269. tkhd = findBox(trak, ['tkhd'])[0];
  4270. if (!tkhd) {
  4271. return null;
  4272. }
  4273. version = tkhd[0];
  4274. index = version === 0 ? 12 : 20;
  4275. id = tkhd[index] << 24 |
  4276. tkhd[index + 1] << 16 |
  4277. tkhd[index + 2] << 8 |
  4278. tkhd[index + 3];
  4279. mdhd = findBox(trak, ['mdia', 'mdhd'])[0];
  4280. if (!mdhd) {
  4281. return null;
  4282. }
  4283. version = mdhd[0];
  4284. index = version === 0 ? 12 : 20;
  4285. result[id] = mdhd[index] << 24 |
  4286. mdhd[index + 1] << 16 |
  4287. mdhd[index + 2] << 8 |
  4288. mdhd[index + 3];
  4289. return result;
  4290. }, result);
  4291. };
  4292. /**
  4293. * Determine the base media decode start time, in seconds, for an MP4
  4294. * fragment. If multiple fragments are specified, the earliest time is
  4295. * returned.
  4296. *
  4297. * The base media decode time can be parsed from track fragment
  4298. * metadata:
  4299. * ```
  4300. * moof > traf > tfdt.baseMediaDecodeTime
  4301. * ```
  4302. * It requires the timescale value from the mdhd to interpret.
  4303. *
  4304. * @param timescale {object} a hash of track ids to timescale values.
  4305. * @return {number} the earliest base media decode start time for the
  4306. * fragment, in seconds
  4307. */
  4308. startTime = function(timescale, fragment) {
  4309. var trafs, baseTimes, result;
  4310. // we need info from two childrend of each track fragment box
  4311. trafs = findBox(fragment, ['moof', 'traf']);
  4312. // determine the start times for each track
  4313. baseTimes = [].concat.apply([], trafs.map(function(traf) {
  4314. return findBox(traf, ['tfhd']).map(function(tfhd) {
  4315. var id, scale, baseTime;
  4316. // get the track id from the tfhd
  4317. id = tfhd[4] << 24 |
  4318. tfhd[5] << 16 |
  4319. tfhd[6] << 8 |
  4320. tfhd[7];
  4321. // assume a 90kHz clock if no timescale was specified
  4322. scale = timescale[id] || 90e3;
  4323. // get the base media decode time from the tfdt
  4324. baseTime = findBox(traf, ['tfdt']).map(function(tfdt) {
  4325. var version, result;
  4326. version = tfdt[0];
  4327. result = tfdt[4] << 24 |
  4328. tfdt[5] << 16 |
  4329. tfdt[6] << 8 |
  4330. tfdt[7];
  4331. if (version === 1) {
  4332. result *= Math.pow(2, 32);
  4333. result += tfdt[8] << 24 |
  4334. tfdt[9] << 16 |
  4335. tfdt[10] << 8 |
  4336. tfdt[11];
  4337. }
  4338. return result;
  4339. })[0];
  4340. baseTime = baseTime || Infinity;
  4341. // convert base time to seconds
  4342. return baseTime / scale;
  4343. });
  4344. }));
  4345. // return the minimum
  4346. result = Math.min.apply(null, baseTimes);
  4347. return isFinite(result) ? result : 0;
  4348. };
  4349. module.exports = {
  4350. parseType: parseType,
  4351. timescale: timescale,
  4352. startTime: startTime
  4353. };
  4354. },{}],27:[function(require,module,exports){
  4355. /**
  4356. * mux.js
  4357. *
  4358. * Copyright (c) 2015 Brightcove
  4359. * All rights reserved.
  4360. *
  4361. * A stream-based mp2t to mp4 converter. This utility can be used to
  4362. * deliver mp4s to a SourceBuffer on platforms that support native
  4363. * Media Source Extensions.
  4364. */
  4365. 'use strict';
  4366. var Stream = require('../utils/stream.js');
  4367. var mp4 = require('./mp4-generator.js');
  4368. var m2ts = require('../m2ts/m2ts.js');
  4369. var AdtsStream = require('../codecs/adts.js');
  4370. var H264Stream = require('../codecs/h264').H264Stream;
  4371. var AacStream = require('../aac');
  4372. var coneOfSilence = require('../data/silence');
  4373. var clock = require('../utils/clock');
  4374. // constants
  4375. var AUDIO_PROPERTIES = [
  4376. 'audioobjecttype',
  4377. 'channelcount',
  4378. 'samplerate',
  4379. 'samplingfrequencyindex',
  4380. 'samplesize'
  4381. ];
  4382. var VIDEO_PROPERTIES = [
  4383. 'width',
  4384. 'height',
  4385. 'profileIdc',
  4386. 'levelIdc',
  4387. 'profileCompatibility'
  4388. ];
  4389. var ONE_SECOND_IN_TS = 90000; // 90kHz clock
  4390. // object types
  4391. var VideoSegmentStream, AudioSegmentStream, Transmuxer, CoalesceStream;
  4392. // Helper functions
  4393. var
  4394. createDefaultSample,
  4395. isLikelyAacData,
  4396. collectDtsInfo,
  4397. clearDtsInfo,
  4398. calculateTrackBaseMediaDecodeTime,
  4399. arrayEquals,
  4400. sumFrameByteLengths;
  4401. /**
  4402. * Default sample object
  4403. * see ISO/IEC 14496-12:2012, section 8.6.4.3
  4404. */
  4405. createDefaultSample = function() {
  4406. return {
  4407. size: 0,
  4408. flags: {
  4409. isLeading: 0,
  4410. dependsOn: 1,
  4411. isDependedOn: 0,
  4412. hasRedundancy: 0,
  4413. degradationPriority: 0
  4414. }
  4415. };
  4416. };
  4417. isLikelyAacData = function(data) {
  4418. if ((data[0] === 'I'.charCodeAt(0)) &&
  4419. (data[1] === 'D'.charCodeAt(0)) &&
  4420. (data[2] === '3'.charCodeAt(0))) {
  4421. return true;
  4422. }
  4423. return false;
  4424. };
  4425. /**
  4426. * Compare two arrays (even typed) for same-ness
  4427. */
  4428. arrayEquals = function(a, b) {
  4429. var
  4430. i;
  4431. if (a.length !== b.length) {
  4432. return false;
  4433. }
  4434. // compare the value of each element in the array
  4435. for (i = 0; i < a.length; i++) {
  4436. if (a[i] !== b[i]) {
  4437. return false;
  4438. }
  4439. }
  4440. return true;
  4441. };
  4442. /**
  4443. * Sum the `byteLength` properties of the data in each AAC frame
  4444. */
  4445. sumFrameByteLengths = function(array) {
  4446. var
  4447. i,
  4448. currentObj,
  4449. sum = 0;
  4450. // sum the byteLength's all each nal unit in the frame
  4451. for (i = 0; i < array.length; i++) {
  4452. currentObj = array[i];
  4453. sum += currentObj.data.byteLength;
  4454. }
  4455. return sum;
  4456. };
  4457. /**
  4458. * Constructs a single-track, ISO BMFF media segment from AAC data
  4459. * events. The output of this stream can be fed to a SourceBuffer
  4460. * configured with a suitable initialization segment.
  4461. */
  4462. AudioSegmentStream = function(track) {
  4463. var
  4464. adtsFrames = [],
  4465. sequenceNumber = 0,
  4466. earliestAllowedDts = 0,
  4467. audioAppendStartTs = 0,
  4468. videoBaseMediaDecodeTime = Infinity;
  4469. AudioSegmentStream.prototype.init.call(this);
  4470. this.push = function(data) {
  4471. collectDtsInfo(track, data);
  4472. if (track) {
  4473. AUDIO_PROPERTIES.forEach(function(prop) {
  4474. track[prop] = data[prop];
  4475. });
  4476. }
  4477. // buffer audio data until end() is called
  4478. adtsFrames.push(data);
  4479. };
  4480. this.setEarliestDts = function(earliestDts) {
  4481. earliestAllowedDts = earliestDts - track.timelineStartInfo.baseMediaDecodeTime;
  4482. };
  4483. this.setVideoBaseMediaDecodeTime = function(baseMediaDecodeTime) {
  4484. videoBaseMediaDecodeTime = baseMediaDecodeTime;
  4485. };
  4486. this.setAudioAppendStart = function(timestamp) {
  4487. audioAppendStartTs = timestamp;
  4488. };
  4489. this.flush = function() {
  4490. var
  4491. frames,
  4492. moof,
  4493. mdat,
  4494. boxes;
  4495. // return early if no audio data has been observed
  4496. if (adtsFrames.length === 0) {
  4497. this.trigger('done', 'AudioSegmentStream');
  4498. return;
  4499. }
  4500. frames = this.trimAdtsFramesByEarliestDts_(adtsFrames);
  4501. track.baseMediaDecodeTime = calculateTrackBaseMediaDecodeTime(track);
  4502. this.prefixWithSilence_(track, frames);
  4503. // we have to build the index from byte locations to
  4504. // samples (that is, adts frames) in the audio data
  4505. track.samples = this.generateSampleTable_(frames);
  4506. // concatenate the audio data to constuct the mdat
  4507. mdat = mp4.mdat(this.concatenateFrameData_(frames));
  4508. adtsFrames = [];
  4509. moof = mp4.moof(sequenceNumber, [track]);
  4510. boxes = new Uint8Array(moof.byteLength + mdat.byteLength);
  4511. // bump the sequence number for next time
  4512. sequenceNumber++;
  4513. boxes.set(moof);
  4514. boxes.set(mdat, moof.byteLength);
  4515. clearDtsInfo(track);
  4516. this.trigger('data', {track: track, boxes: boxes});
  4517. this.trigger('done', 'AudioSegmentStream');
  4518. };
  4519. // Possibly pad (prefix) the audio track with silence if appending this track
  4520. // would lead to the introduction of a gap in the audio buffer
  4521. this.prefixWithSilence_ = function(track, frames) {
  4522. var
  4523. baseMediaDecodeTimeTs,
  4524. frameDuration = 0,
  4525. audioGapDuration = 0,
  4526. audioFillFrameCount = 0,
  4527. audioFillDuration = 0,
  4528. silentFrame,
  4529. i;
  4530. if (!frames.length) {
  4531. return;
  4532. }
  4533. baseMediaDecodeTimeTs = clock.audioTsToVideoTs(track.baseMediaDecodeTime, track.samplerate);
  4534. // determine frame clock duration based on sample rate, round up to avoid overfills
  4535. frameDuration = Math.ceil(ONE_SECOND_IN_TS / (track.samplerate / 1024));
  4536. if (audioAppendStartTs && videoBaseMediaDecodeTime) {
  4537. // insert the shortest possible amount (audio gap or audio to video gap)
  4538. audioGapDuration =
  4539. baseMediaDecodeTimeTs - Math.max(audioAppendStartTs, videoBaseMediaDecodeTime);
  4540. // number of full frames in the audio gap
  4541. audioFillFrameCount = Math.floor(audioGapDuration / frameDuration);
  4542. audioFillDuration = audioFillFrameCount * frameDuration;
  4543. }
  4544. // don't attempt to fill gaps smaller than a single frame or larger
  4545. // than a half second
  4546. if (audioFillFrameCount < 1 || audioFillDuration > ONE_SECOND_IN_TS / 2) {
  4547. return;
  4548. }
  4549. silentFrame = coneOfSilence[track.samplerate];
  4550. if (!silentFrame) {
  4551. // we don't have a silent frame pregenerated for the sample rate, so use a frame
  4552. // from the content instead
  4553. silentFrame = frames[0].data;
  4554. }
  4555. for (i = 0; i < audioFillFrameCount; i++) {
  4556. frames.splice(i, 0, {
  4557. data: silentFrame
  4558. });
  4559. }
  4560. track.baseMediaDecodeTime -=
  4561. Math.floor(clock.videoTsToAudioTs(audioFillDuration, track.samplerate));
  4562. };
  4563. // If the audio segment extends before the earliest allowed dts
  4564. // value, remove AAC frames until starts at or after the earliest
  4565. // allowed DTS so that we don't end up with a negative baseMedia-
  4566. // DecodeTime for the audio track
  4567. this.trimAdtsFramesByEarliestDts_ = function(adtsFrames) {
  4568. if (track.minSegmentDts >= earliestAllowedDts) {
  4569. return adtsFrames;
  4570. }
  4571. // We will need to recalculate the earliest segment Dts
  4572. track.minSegmentDts = Infinity;
  4573. return adtsFrames.filter(function(currentFrame) {
  4574. // If this is an allowed frame, keep it and record it's Dts
  4575. if (currentFrame.dts >= earliestAllowedDts) {
  4576. track.minSegmentDts = Math.min(track.minSegmentDts, currentFrame.dts);
  4577. track.minSegmentPts = track.minSegmentDts;
  4578. return true;
  4579. }
  4580. // Otherwise, discard it
  4581. return false;
  4582. });
  4583. };
  4584. // generate the track's raw mdat data from an array of frames
  4585. this.generateSampleTable_ = function(frames) {
  4586. var
  4587. i,
  4588. currentFrame,
  4589. samples = [];
  4590. for (i = 0; i < frames.length; i++) {
  4591. currentFrame = frames[i];
  4592. samples.push({
  4593. size: currentFrame.data.byteLength,
  4594. duration: 1024 // For AAC audio, all samples contain 1024 samples
  4595. });
  4596. }
  4597. return samples;
  4598. };
  4599. // generate the track's sample table from an array of frames
  4600. this.concatenateFrameData_ = function(frames) {
  4601. var
  4602. i,
  4603. currentFrame,
  4604. dataOffset = 0,
  4605. data = new Uint8Array(sumFrameByteLengths(frames));
  4606. for (i = 0; i < frames.length; i++) {
  4607. currentFrame = frames[i];
  4608. data.set(currentFrame.data, dataOffset);
  4609. dataOffset += currentFrame.data.byteLength;
  4610. }
  4611. return data;
  4612. };
  4613. };
  4614. AudioSegmentStream.prototype = new Stream();
  4615. /**
  4616. * Constructs a single-track, ISO BMFF media segment from H264 data
  4617. * events. The output of this stream can be fed to a SourceBuffer
  4618. * configured with a suitable initialization segment.
  4619. * @param track {object} track metadata configuration
  4620. * @param options {object} transmuxer options object
  4621. * @param options.alignGopsAtEnd {boolean} If true, start from the end of the
  4622. * gopsToAlignWith list when attempting to align gop pts
  4623. */
  4624. VideoSegmentStream = function(track, options) {
  4625. var
  4626. sequenceNumber = 0,
  4627. nalUnits = [],
  4628. gopsToAlignWith = [],
  4629. config,
  4630. pps;
  4631. options = options || {};
  4632. VideoSegmentStream.prototype.init.call(this);
  4633. delete track.minPTS;
  4634. this.gopCache_ = [];
  4635. this.push = function(nalUnit) {
  4636. collectDtsInfo(track, nalUnit);
  4637. // record the track config
  4638. if (nalUnit.nalUnitType === 'seq_parameter_set_rbsp' && !config) {
  4639. config = nalUnit.config;
  4640. track.sps = [nalUnit.data];
  4641. VIDEO_PROPERTIES.forEach(function(prop) {
  4642. track[prop] = config[prop];
  4643. }, this);
  4644. }
  4645. if (nalUnit.nalUnitType === 'pic_parameter_set_rbsp' &&
  4646. !pps) {
  4647. pps = nalUnit.data;
  4648. track.pps = [nalUnit.data];
  4649. }
  4650. // buffer video until flush() is called
  4651. nalUnits.push(nalUnit);
  4652. };
  4653. this.flush = function() {
  4654. var
  4655. frames,
  4656. gopForFusion,
  4657. gops,
  4658. moof,
  4659. mdat,
  4660. boxes;
  4661. // Throw away nalUnits at the start of the byte stream until
  4662. // we find the first AUD
  4663. while (nalUnits.length) {
  4664. if (nalUnits[0].nalUnitType === 'access_unit_delimiter_rbsp') {
  4665. break;
  4666. }
  4667. nalUnits.shift();
  4668. }
  4669. // Return early if no video data has been observed
  4670. if (nalUnits.length === 0) {
  4671. this.resetStream_();
  4672. this.trigger('done', 'VideoSegmentStream');
  4673. return;
  4674. }
  4675. // Organize the raw nal-units into arrays that represent
  4676. // higher-level constructs such as frames and gops
  4677. // (group-of-pictures)
  4678. frames = this.groupNalsIntoFrames_(nalUnits);
  4679. gops = this.groupFramesIntoGops_(frames);
  4680. // If the first frame of this fragment is not a keyframe we have
  4681. // a problem since MSE (on Chrome) requires a leading keyframe.
  4682. //
  4683. // We have two approaches to repairing this situation:
  4684. // 1) GOP-FUSION:
  4685. // This is where we keep track of the GOPS (group-of-pictures)
  4686. // from previous fragments and attempt to find one that we can
  4687. // prepend to the current fragment in order to create a valid
  4688. // fragment.
  4689. // 2) KEYFRAME-PULLING:
  4690. // Here we search for the first keyframe in the fragment and
  4691. // throw away all the frames between the start of the fragment
  4692. // and that keyframe. We then extend the duration and pull the
  4693. // PTS of the keyframe forward so that it covers the time range
  4694. // of the frames that were disposed of.
  4695. //
  4696. // #1 is far prefereable over #2 which can cause "stuttering" but
  4697. // requires more things to be just right.
  4698. if (!gops[0][0].keyFrame) {
  4699. // Search for a gop for fusion from our gopCache
  4700. gopForFusion = this.getGopForFusion_(nalUnits[0], track);
  4701. if (gopForFusion) {
  4702. gops.unshift(gopForFusion);
  4703. // Adjust Gops' metadata to account for the inclusion of the
  4704. // new gop at the beginning
  4705. gops.byteLength += gopForFusion.byteLength;
  4706. gops.nalCount += gopForFusion.nalCount;
  4707. gops.pts = gopForFusion.pts;
  4708. gops.dts = gopForFusion.dts;
  4709. gops.duration += gopForFusion.duration;
  4710. } else {
  4711. // If we didn't find a candidate gop fall back to keyrame-pulling
  4712. gops = this.extendFirstKeyFrame_(gops);
  4713. }
  4714. }
  4715. // Trim gops to align with gopsToAlignWith
  4716. if (gopsToAlignWith.length) {
  4717. var alignedGops;
  4718. if (options.alignGopsAtEnd) {
  4719. alignedGops = this.alignGopsAtEnd_(gops);
  4720. } else {
  4721. alignedGops = this.alignGopsAtStart_(gops);
  4722. }
  4723. if (!alignedGops) {
  4724. // save all the nals in the last GOP into the gop cache
  4725. this.gopCache_.unshift({
  4726. gop: gops.pop(),
  4727. pps: track.pps,
  4728. sps: track.sps
  4729. });
  4730. // Keep a maximum of 6 GOPs in the cache
  4731. this.gopCache_.length = Math.min(6, this.gopCache_.length);
  4732. // Clear nalUnits
  4733. nalUnits = [];
  4734. // return early no gops can be aligned with desired gopsToAlignWith
  4735. this.resetStream_();
  4736. this.trigger('done', 'VideoSegmentStream');
  4737. return;
  4738. }
  4739. // Some gops were trimmed. clear dts info so minSegmentDts and pts are correct
  4740. // when recalculated before sending off to CoalesceStream
  4741. clearDtsInfo(track);
  4742. gops = alignedGops;
  4743. }
  4744. collectDtsInfo(track, gops);
  4745. // First, we have to build the index from byte locations to
  4746. // samples (that is, frames) in the video data
  4747. track.samples = this.generateSampleTable_(gops);
  4748. // Concatenate the video data and construct the mdat
  4749. mdat = mp4.mdat(this.concatenateNalData_(gops));
  4750. track.baseMediaDecodeTime = calculateTrackBaseMediaDecodeTime(track);
  4751. this.trigger('processedGopsInfo', gops.map(function(gop) {
  4752. return {
  4753. pts: gop.pts,
  4754. dts: gop.dts,
  4755. byteLength: gop.byteLength
  4756. };
  4757. }));
  4758. // save all the nals in the last GOP into the gop cache
  4759. this.gopCache_.unshift({
  4760. gop: gops.pop(),
  4761. pps: track.pps,
  4762. sps: track.sps
  4763. });
  4764. // Keep a maximum of 6 GOPs in the cache
  4765. this.gopCache_.length = Math.min(6, this.gopCache_.length);
  4766. // Clear nalUnits
  4767. nalUnits = [];
  4768. this.trigger('baseMediaDecodeTime', track.baseMediaDecodeTime);
  4769. this.trigger('timelineStartInfo', track.timelineStartInfo);
  4770. moof = mp4.moof(sequenceNumber, [track]);
  4771. // it would be great to allocate this array up front instead of
  4772. // throwing away hundreds of media segment fragments
  4773. boxes = new Uint8Array(moof.byteLength + mdat.byteLength);
  4774. // Bump the sequence number for next time
  4775. sequenceNumber++;
  4776. boxes.set(moof);
  4777. boxes.set(mdat, moof.byteLength);
  4778. this.trigger('data', {track: track, boxes: boxes});
  4779. this.resetStream_();
  4780. // Continue with the flush process now
  4781. this.trigger('done', 'VideoSegmentStream');
  4782. };
  4783. this.resetStream_ = function() {
  4784. clearDtsInfo(track);
  4785. // reset config and pps because they may differ across segments
  4786. // for instance, when we are rendition switching
  4787. config = undefined;
  4788. pps = undefined;
  4789. };
  4790. // Search for a candidate Gop for gop-fusion from the gop cache and
  4791. // return it or return null if no good candidate was found
  4792. this.getGopForFusion_ = function(nalUnit) {
  4793. var
  4794. halfSecond = 45000, // Half-a-second in a 90khz clock
  4795. allowableOverlap = 10000, // About 3 frames @ 30fps
  4796. nearestDistance = Infinity,
  4797. dtsDistance,
  4798. nearestGopObj,
  4799. currentGop,
  4800. currentGopObj,
  4801. i;
  4802. // Search for the GOP nearest to the beginning of this nal unit
  4803. for (i = 0; i < this.gopCache_.length; i++) {
  4804. currentGopObj = this.gopCache_[i];
  4805. currentGop = currentGopObj.gop;
  4806. // Reject Gops with different SPS or PPS
  4807. if (!(track.pps && arrayEquals(track.pps[0], currentGopObj.pps[0])) ||
  4808. !(track.sps && arrayEquals(track.sps[0], currentGopObj.sps[0]))) {
  4809. continue;
  4810. }
  4811. // Reject Gops that would require a negative baseMediaDecodeTime
  4812. if (currentGop.dts < track.timelineStartInfo.dts) {
  4813. continue;
  4814. }
  4815. // The distance between the end of the gop and the start of the nalUnit
  4816. dtsDistance = (nalUnit.dts - currentGop.dts) - currentGop.duration;
  4817. // Only consider GOPS that start before the nal unit and end within
  4818. // a half-second of the nal unit
  4819. if (dtsDistance >= -allowableOverlap &&
  4820. dtsDistance <= halfSecond) {
  4821. // Always use the closest GOP we found if there is more than
  4822. // one candidate
  4823. if (!nearestGopObj ||
  4824. nearestDistance > dtsDistance) {
  4825. nearestGopObj = currentGopObj;
  4826. nearestDistance = dtsDistance;
  4827. }
  4828. }
  4829. }
  4830. if (nearestGopObj) {
  4831. return nearestGopObj.gop;
  4832. }
  4833. return null;
  4834. };
  4835. this.extendFirstKeyFrame_ = function(gops) {
  4836. var currentGop;
  4837. if (!gops[0][0].keyFrame && gops.length > 1) {
  4838. // Remove the first GOP
  4839. currentGop = gops.shift();
  4840. gops.byteLength -= currentGop.byteLength;
  4841. gops.nalCount -= currentGop.nalCount;
  4842. // Extend the first frame of what is now the
  4843. // first gop to cover the time period of the
  4844. // frames we just removed
  4845. gops[0][0].dts = currentGop.dts;
  4846. gops[0][0].pts = currentGop.pts;
  4847. gops[0][0].duration += currentGop.duration;
  4848. }
  4849. return gops;
  4850. };
  4851. // Convert an array of nal units into an array of frames with each frame being
  4852. // composed of the nal units that make up that frame
  4853. // Also keep track of cummulative data about the frame from the nal units such
  4854. // as the frame duration, starting pts, etc.
  4855. this.groupNalsIntoFrames_ = function(nalUnits) {
  4856. var
  4857. i,
  4858. currentNal,
  4859. currentFrame = [],
  4860. frames = [];
  4861. currentFrame.byteLength = 0;
  4862. for (i = 0; i < nalUnits.length; i++) {
  4863. currentNal = nalUnits[i];
  4864. // Split on 'aud'-type nal units
  4865. if (currentNal.nalUnitType === 'access_unit_delimiter_rbsp') {
  4866. // Since the very first nal unit is expected to be an AUD
  4867. // only push to the frames array when currentFrame is not empty
  4868. if (currentFrame.length) {
  4869. currentFrame.duration = currentNal.dts - currentFrame.dts;
  4870. frames.push(currentFrame);
  4871. }
  4872. currentFrame = [currentNal];
  4873. currentFrame.byteLength = currentNal.data.byteLength;
  4874. currentFrame.pts = currentNal.pts;
  4875. currentFrame.dts = currentNal.dts;
  4876. } else {
  4877. // Specifically flag key frames for ease of use later
  4878. if (currentNal.nalUnitType === 'slice_layer_without_partitioning_rbsp_idr') {
  4879. currentFrame.keyFrame = true;
  4880. }
  4881. currentFrame.duration = currentNal.dts - currentFrame.dts;
  4882. currentFrame.byteLength += currentNal.data.byteLength;
  4883. currentFrame.push(currentNal);
  4884. }
  4885. }
  4886. // For the last frame, use the duration of the previous frame if we
  4887. // have nothing better to go on
  4888. if (frames.length &&
  4889. (!currentFrame.duration ||
  4890. currentFrame.duration <= 0)) {
  4891. currentFrame.duration = frames[frames.length - 1].duration;
  4892. }
  4893. // Push the final frame
  4894. frames.push(currentFrame);
  4895. return frames;
  4896. };
  4897. // Convert an array of frames into an array of Gop with each Gop being composed
  4898. // of the frames that make up that Gop
  4899. // Also keep track of cummulative data about the Gop from the frames such as the
  4900. // Gop duration, starting pts, etc.
  4901. this.groupFramesIntoGops_ = function(frames) {
  4902. var
  4903. i,
  4904. currentFrame,
  4905. currentGop = [],
  4906. gops = [];
  4907. // We must pre-set some of the values on the Gop since we
  4908. // keep running totals of these values
  4909. currentGop.byteLength = 0;
  4910. currentGop.nalCount = 0;
  4911. currentGop.duration = 0;
  4912. currentGop.pts = frames[0].pts;
  4913. currentGop.dts = frames[0].dts;
  4914. // store some metadata about all the Gops
  4915. gops.byteLength = 0;
  4916. gops.nalCount = 0;
  4917. gops.duration = 0;
  4918. gops.pts = frames[0].pts;
  4919. gops.dts = frames[0].dts;
  4920. for (i = 0; i < frames.length; i++) {
  4921. currentFrame = frames[i];
  4922. if (currentFrame.keyFrame) {
  4923. // Since the very first frame is expected to be an keyframe
  4924. // only push to the gops array when currentGop is not empty
  4925. if (currentGop.length) {
  4926. gops.push(currentGop);
  4927. gops.byteLength += currentGop.byteLength;
  4928. gops.nalCount += currentGop.nalCount;
  4929. gops.duration += currentGop.duration;
  4930. }
  4931. currentGop = [currentFrame];
  4932. currentGop.nalCount = currentFrame.length;
  4933. currentGop.byteLength = currentFrame.byteLength;
  4934. currentGop.pts = currentFrame.pts;
  4935. currentGop.dts = currentFrame.dts;
  4936. currentGop.duration = currentFrame.duration;
  4937. } else {
  4938. currentGop.duration += currentFrame.duration;
  4939. currentGop.nalCount += currentFrame.length;
  4940. currentGop.byteLength += currentFrame.byteLength;
  4941. currentGop.push(currentFrame);
  4942. }
  4943. }
  4944. if (gops.length && currentGop.duration <= 0) {
  4945. currentGop.duration = gops[gops.length - 1].duration;
  4946. }
  4947. gops.byteLength += currentGop.byteLength;
  4948. gops.nalCount += currentGop.nalCount;
  4949. gops.duration += currentGop.duration;
  4950. // push the final Gop
  4951. gops.push(currentGop);
  4952. return gops;
  4953. };
  4954. // generate the track's sample table from an array of gops
  4955. this.generateSampleTable_ = function(gops, baseDataOffset) {
  4956. var
  4957. h, i,
  4958. sample,
  4959. currentGop,
  4960. currentFrame,
  4961. dataOffset = baseDataOffset || 0,
  4962. samples = [];
  4963. for (h = 0; h < gops.length; h++) {
  4964. currentGop = gops[h];
  4965. for (i = 0; i < currentGop.length; i++) {
  4966. currentFrame = currentGop[i];
  4967. sample = createDefaultSample();
  4968. sample.dataOffset = dataOffset;
  4969. sample.compositionTimeOffset = currentFrame.pts - currentFrame.dts;
  4970. sample.duration = currentFrame.duration;
  4971. sample.size = 4 * currentFrame.length; // Space for nal unit size
  4972. sample.size += currentFrame.byteLength;
  4973. if (currentFrame.keyFrame) {
  4974. sample.flags.dependsOn = 2;
  4975. }
  4976. dataOffset += sample.size;
  4977. samples.push(sample);
  4978. }
  4979. }
  4980. return samples;
  4981. };
  4982. // generate the track's raw mdat data from an array of gops
  4983. this.concatenateNalData_ = function(gops) {
  4984. var
  4985. h, i, j,
  4986. currentGop,
  4987. currentFrame,
  4988. currentNal,
  4989. dataOffset = 0,
  4990. nalsByteLength = gops.byteLength,
  4991. numberOfNals = gops.nalCount,
  4992. totalByteLength = nalsByteLength + 4 * numberOfNals,
  4993. data = new Uint8Array(totalByteLength),
  4994. view = new DataView(data.buffer);
  4995. // For each Gop..
  4996. for (h = 0; h < gops.length; h++) {
  4997. currentGop = gops[h];
  4998. // For each Frame..
  4999. for (i = 0; i < currentGop.length; i++) {
  5000. currentFrame = currentGop[i];
  5001. // For each NAL..
  5002. for (j = 0; j < currentFrame.length; j++) {
  5003. currentNal = currentFrame[j];
  5004. view.setUint32(dataOffset, currentNal.data.byteLength);
  5005. dataOffset += 4;
  5006. data.set(currentNal.data, dataOffset);
  5007. dataOffset += currentNal.data.byteLength;
  5008. }
  5009. }
  5010. }
  5011. return data;
  5012. };
  5013. // trim gop list to the first gop found that has a matching pts with a gop in the list
  5014. // of gopsToAlignWith starting from the START of the list
  5015. this.alignGopsAtStart_ = function(gops) {
  5016. var alignIndex, gopIndex, align, gop, byteLength, nalCount, duration, alignedGops;
  5017. byteLength = gops.byteLength;
  5018. nalCount = gops.nalCount;
  5019. duration = gops.duration;
  5020. alignIndex = gopIndex = 0;
  5021. while (alignIndex < gopsToAlignWith.length && gopIndex < gops.length) {
  5022. align = gopsToAlignWith[alignIndex];
  5023. gop = gops[gopIndex];
  5024. if (align.pts === gop.pts) {
  5025. break;
  5026. }
  5027. if (gop.pts > align.pts) {
  5028. // this current gop starts after the current gop we want to align on, so increment
  5029. // align index
  5030. alignIndex++;
  5031. continue;
  5032. }
  5033. // current gop starts before the current gop we want to align on. so increment gop
  5034. // index
  5035. gopIndex++;
  5036. byteLength -= gop.byteLength;
  5037. nalCount -= gop.nalCount;
  5038. duration -= gop.duration;
  5039. }
  5040. if (gopIndex === 0) {
  5041. // no gops to trim
  5042. return gops;
  5043. }
  5044. if (gopIndex === gops.length) {
  5045. // all gops trimmed, skip appending all gops
  5046. return null;
  5047. }
  5048. alignedGops = gops.slice(gopIndex);
  5049. alignedGops.byteLength = byteLength;
  5050. alignedGops.duration = duration;
  5051. alignedGops.nalCount = nalCount;
  5052. alignedGops.pts = alignedGops[0].pts;
  5053. alignedGops.dts = alignedGops[0].dts;
  5054. return alignedGops;
  5055. };
  5056. // trim gop list to the first gop found that has a matching pts with a gop in the list
  5057. // of gopsToAlignWith starting from the END of the list
  5058. this.alignGopsAtEnd_ = function(gops) {
  5059. var alignIndex, gopIndex, align, gop, alignEndIndex, matchFound;
  5060. alignIndex = gopsToAlignWith.length - 1;
  5061. gopIndex = gops.length - 1;
  5062. alignEndIndex = null;
  5063. matchFound = false;
  5064. while (alignIndex >= 0 && gopIndex >= 0) {
  5065. align = gopsToAlignWith[alignIndex];
  5066. gop = gops[gopIndex];
  5067. if (align.pts === gop.pts) {
  5068. matchFound = true;
  5069. break;
  5070. }
  5071. if (align.pts > gop.pts) {
  5072. alignIndex--;
  5073. continue;
  5074. }
  5075. if (alignIndex === gopsToAlignWith.length - 1) {
  5076. // gop.pts is greater than the last alignment candidate. If no match is found
  5077. // by the end of this loop, we still want to append gops that come after this
  5078. // point
  5079. alignEndIndex = gopIndex;
  5080. }
  5081. gopIndex--;
  5082. }
  5083. if (!matchFound && alignEndIndex === null) {
  5084. return null;
  5085. }
  5086. var trimIndex;
  5087. if (matchFound) {
  5088. trimIndex = gopIndex;
  5089. } else {
  5090. trimIndex = alignEndIndex;
  5091. }
  5092. if (trimIndex === 0) {
  5093. return gops;
  5094. }
  5095. var alignedGops = gops.slice(trimIndex);
  5096. var metadata = alignedGops.reduce(function(total, gop) {
  5097. total.byteLength += gop.byteLength;
  5098. total.duration += gop.duration;
  5099. total.nalCount += gop.nalCount;
  5100. return total;
  5101. }, { byteLength: 0, duration: 0, nalCount: 0 });
  5102. alignedGops.byteLength = metadata.byteLength;
  5103. alignedGops.duration = metadata.duration;
  5104. alignedGops.nalCount = metadata.nalCount;
  5105. alignedGops.pts = alignedGops[0].pts;
  5106. alignedGops.dts = alignedGops[0].dts;
  5107. return alignedGops;
  5108. };
  5109. this.alignGopsWith = function(newGopsToAlignWith) {
  5110. gopsToAlignWith = newGopsToAlignWith;
  5111. };
  5112. };
  5113. VideoSegmentStream.prototype = new Stream();
  5114. /**
  5115. * Store information about the start and end of the track and the
  5116. * duration for each frame/sample we process in order to calculate
  5117. * the baseMediaDecodeTime
  5118. */
  5119. collectDtsInfo = function(track, data) {
  5120. if (typeof data.pts === 'number') {
  5121. if (track.timelineStartInfo.pts === undefined) {
  5122. track.timelineStartInfo.pts = data.pts;
  5123. }
  5124. if (track.minSegmentPts === undefined) {
  5125. track.minSegmentPts = data.pts;
  5126. } else {
  5127. track.minSegmentPts = Math.min(track.minSegmentPts, data.pts);
  5128. }
  5129. if (track.maxSegmentPts === undefined) {
  5130. track.maxSegmentPts = data.pts;
  5131. } else {
  5132. track.maxSegmentPts = Math.max(track.maxSegmentPts, data.pts);
  5133. }
  5134. }
  5135. if (typeof data.dts === 'number') {
  5136. if (track.timelineStartInfo.dts === undefined) {
  5137. track.timelineStartInfo.dts = data.dts;
  5138. }
  5139. if (track.minSegmentDts === undefined) {
  5140. track.minSegmentDts = data.dts;
  5141. } else {
  5142. track.minSegmentDts = Math.min(track.minSegmentDts, data.dts);
  5143. }
  5144. if (track.maxSegmentDts === undefined) {
  5145. track.maxSegmentDts = data.dts;
  5146. } else {
  5147. track.maxSegmentDts = Math.max(track.maxSegmentDts, data.dts);
  5148. }
  5149. }
  5150. };
  5151. /**
  5152. * Clear values used to calculate the baseMediaDecodeTime between
  5153. * tracks
  5154. */
  5155. clearDtsInfo = function(track) {
  5156. delete track.minSegmentDts;
  5157. delete track.maxSegmentDts;
  5158. delete track.minSegmentPts;
  5159. delete track.maxSegmentPts;
  5160. };
  5161. /**
  5162. * Calculate the track's baseMediaDecodeTime based on the earliest
  5163. * DTS the transmuxer has ever seen and the minimum DTS for the
  5164. * current track
  5165. */
  5166. calculateTrackBaseMediaDecodeTime = function(track) {
  5167. var
  5168. baseMediaDecodeTime,
  5169. scale,
  5170. // Calculate the distance, in time, that this segment starts from the start
  5171. // of the timeline (earliest time seen since the transmuxer initialized)
  5172. timeSinceStartOfTimeline = track.minSegmentDts - track.timelineStartInfo.dts;
  5173. // track.timelineStartInfo.baseMediaDecodeTime is the location, in time, where
  5174. // we want the start of the first segment to be placed
  5175. baseMediaDecodeTime = track.timelineStartInfo.baseMediaDecodeTime;
  5176. // Add to that the distance this segment is from the very first
  5177. baseMediaDecodeTime += timeSinceStartOfTimeline;
  5178. // baseMediaDecodeTime must not become negative
  5179. baseMediaDecodeTime = Math.max(0, baseMediaDecodeTime);
  5180. if (track.type === 'audio') {
  5181. // Audio has a different clock equal to the sampling_rate so we need to
  5182. // scale the PTS values into the clock rate of the track
  5183. scale = track.samplerate / ONE_SECOND_IN_TS;
  5184. baseMediaDecodeTime *= scale;
  5185. baseMediaDecodeTime = Math.floor(baseMediaDecodeTime);
  5186. }
  5187. return baseMediaDecodeTime;
  5188. };
  5189. /**
  5190. * A Stream that can combine multiple streams (ie. audio & video)
  5191. * into a single output segment for MSE. Also supports audio-only
  5192. * and video-only streams.
  5193. */
  5194. CoalesceStream = function(options, metadataStream) {
  5195. // Number of Tracks per output segment
  5196. // If greater than 1, we combine multiple
  5197. // tracks into a single segment
  5198. this.numberOfTracks = 0;
  5199. this.metadataStream = metadataStream;
  5200. if (typeof options.remux !== 'undefined') {
  5201. this.remuxTracks = !!options.remux;
  5202. } else {
  5203. this.remuxTracks = true;
  5204. }
  5205. this.pendingTracks = [];
  5206. this.videoTrack = null;
  5207. this.pendingBoxes = [];
  5208. this.pendingCaptions = [];
  5209. this.pendingMetadata = [];
  5210. this.pendingBytes = 0;
  5211. this.emittedTracks = 0;
  5212. CoalesceStream.prototype.init.call(this);
  5213. // Take output from multiple
  5214. this.push = function(output) {
  5215. // buffer incoming captions until the associated video segment
  5216. // finishes
  5217. if (output.text) {
  5218. return this.pendingCaptions.push(output);
  5219. }
  5220. // buffer incoming id3 tags until the final flush
  5221. if (output.frames) {
  5222. return this.pendingMetadata.push(output);
  5223. }
  5224. // Add this track to the list of pending tracks and store
  5225. // important information required for the construction of
  5226. // the final segment
  5227. this.pendingTracks.push(output.track);
  5228. this.pendingBoxes.push(output.boxes);
  5229. this.pendingBytes += output.boxes.byteLength;
  5230. if (output.track.type === 'video') {
  5231. this.videoTrack = output.track;
  5232. }
  5233. if (output.track.type === 'audio') {
  5234. this.audioTrack = output.track;
  5235. }
  5236. };
  5237. };
  5238. CoalesceStream.prototype = new Stream();
  5239. CoalesceStream.prototype.flush = function(flushSource) {
  5240. var
  5241. offset = 0,
  5242. event = {
  5243. captions: [],
  5244. captionStreams: {},
  5245. metadata: [],
  5246. info: {}
  5247. },
  5248. caption,
  5249. id3,
  5250. initSegment,
  5251. timelineStartPts = 0,
  5252. i;
  5253. if (this.pendingTracks.length < this.numberOfTracks) {
  5254. if (flushSource !== 'VideoSegmentStream' &&
  5255. flushSource !== 'AudioSegmentStream') {
  5256. // Return because we haven't received a flush from a data-generating
  5257. // portion of the segment (meaning that we have only recieved meta-data
  5258. // or captions.)
  5259. return;
  5260. } else if (this.remuxTracks) {
  5261. // Return until we have enough tracks from the pipeline to remux (if we
  5262. // are remuxing audio and video into a single MP4)
  5263. return;
  5264. } else if (this.pendingTracks.length === 0) {
  5265. // In the case where we receive a flush without any data having been
  5266. // received we consider it an emitted track for the purposes of coalescing
  5267. // `done` events.
  5268. // We do this for the case where there is an audio and video track in the
  5269. // segment but no audio data. (seen in several playlists with alternate
  5270. // audio tracks and no audio present in the main TS segments.)
  5271. this.emittedTracks++;
  5272. if (this.emittedTracks >= this.numberOfTracks) {
  5273. this.trigger('done');
  5274. this.emittedTracks = 0;
  5275. }
  5276. return;
  5277. }
  5278. }
  5279. if (this.videoTrack) {
  5280. timelineStartPts = this.videoTrack.timelineStartInfo.pts;
  5281. VIDEO_PROPERTIES.forEach(function(prop) {
  5282. event.info[prop] = this.videoTrack[prop];
  5283. }, this);
  5284. } else if (this.audioTrack) {
  5285. timelineStartPts = this.audioTrack.timelineStartInfo.pts;
  5286. AUDIO_PROPERTIES.forEach(function(prop) {
  5287. event.info[prop] = this.audioTrack[prop];
  5288. }, this);
  5289. }
  5290. if (this.pendingTracks.length === 1) {
  5291. event.type = this.pendingTracks[0].type;
  5292. } else {
  5293. event.type = 'combined';
  5294. }
  5295. this.emittedTracks += this.pendingTracks.length;
  5296. initSegment = mp4.initSegment(this.pendingTracks);
  5297. // Create a new typed array to hold the init segment
  5298. event.initSegment = new Uint8Array(initSegment.byteLength);
  5299. // Create an init segment containing a moov
  5300. // and track definitions
  5301. event.initSegment.set(initSegment);
  5302. // Create a new typed array to hold the moof+mdats
  5303. event.data = new Uint8Array(this.pendingBytes);
  5304. // Append each moof+mdat (one per track) together
  5305. for (i = 0; i < this.pendingBoxes.length; i++) {
  5306. event.data.set(this.pendingBoxes[i], offset);
  5307. offset += this.pendingBoxes[i].byteLength;
  5308. }
  5309. // Translate caption PTS times into second offsets into the
  5310. // video timeline for the segment, and add track info
  5311. for (i = 0; i < this.pendingCaptions.length; i++) {
  5312. caption = this.pendingCaptions[i];
  5313. caption.startTime = (caption.startPts - timelineStartPts);
  5314. caption.startTime /= 90e3;
  5315. caption.endTime = (caption.endPts - timelineStartPts);
  5316. caption.endTime /= 90e3;
  5317. event.captionStreams[caption.stream] = true;
  5318. event.captions.push(caption);
  5319. }
  5320. // Translate ID3 frame PTS times into second offsets into the
  5321. // video timeline for the segment
  5322. for (i = 0; i < this.pendingMetadata.length; i++) {
  5323. id3 = this.pendingMetadata[i];
  5324. id3.cueTime = (id3.pts - timelineStartPts);
  5325. id3.cueTime /= 90e3;
  5326. event.metadata.push(id3);
  5327. }
  5328. // We add this to every single emitted segment even though we only need
  5329. // it for the first
  5330. event.metadata.dispatchType = this.metadataStream.dispatchType;
  5331. // Reset stream state
  5332. this.pendingTracks.length = 0;
  5333. this.videoTrack = null;
  5334. this.pendingBoxes.length = 0;
  5335. this.pendingCaptions.length = 0;
  5336. this.pendingBytes = 0;
  5337. this.pendingMetadata.length = 0;
  5338. // Emit the built segment
  5339. this.trigger('data', event);
  5340. // Only emit `done` if all tracks have been flushed and emitted
  5341. if (this.emittedTracks >= this.numberOfTracks) {
  5342. this.trigger('done');
  5343. this.emittedTracks = 0;
  5344. }
  5345. };
  5346. /**
  5347. * A Stream that expects MP2T binary data as input and produces
  5348. * corresponding media segments, suitable for use with Media Source
  5349. * Extension (MSE) implementations that support the ISO BMFF byte
  5350. * stream format, like Chrome.
  5351. */
  5352. Transmuxer = function(options) {
  5353. var
  5354. self = this,
  5355. hasFlushed = true,
  5356. videoTrack,
  5357. audioTrack;
  5358. Transmuxer.prototype.init.call(this);
  5359. options = options || {};
  5360. this.baseMediaDecodeTime = options.baseMediaDecodeTime || 0;
  5361. this.transmuxPipeline_ = {};
  5362. this.setupAacPipeline = function() {
  5363. var pipeline = {};
  5364. this.transmuxPipeline_ = pipeline;
  5365. pipeline.type = 'aac';
  5366. pipeline.metadataStream = new m2ts.MetadataStream();
  5367. // set up the parsing pipeline
  5368. pipeline.aacStream = new AacStream();
  5369. pipeline.audioTimestampRolloverStream = new m2ts.TimestampRolloverStream('audio');
  5370. pipeline.timedMetadataTimestampRolloverStream = new m2ts.TimestampRolloverStream('timed-metadata');
  5371. pipeline.adtsStream = new AdtsStream();
  5372. pipeline.coalesceStream = new CoalesceStream(options, pipeline.metadataStream);
  5373. pipeline.headOfPipeline = pipeline.aacStream;
  5374. pipeline.aacStream
  5375. .pipe(pipeline.audioTimestampRolloverStream)
  5376. .pipe(pipeline.adtsStream);
  5377. pipeline.aacStream
  5378. .pipe(pipeline.timedMetadataTimestampRolloverStream)
  5379. .pipe(pipeline.metadataStream)
  5380. .pipe(pipeline.coalesceStream);
  5381. pipeline.metadataStream.on('timestamp', function(frame) {
  5382. pipeline.aacStream.setTimestamp(frame.timeStamp);
  5383. });
  5384. pipeline.aacStream.on('data', function(data) {
  5385. if (data.type === 'timed-metadata' && !pipeline.audioSegmentStream) {
  5386. audioTrack = audioTrack || {
  5387. timelineStartInfo: {
  5388. baseMediaDecodeTime: self.baseMediaDecodeTime
  5389. },
  5390. codec: 'adts',
  5391. type: 'audio'
  5392. };
  5393. // hook up the audio segment stream to the first track with aac data
  5394. pipeline.coalesceStream.numberOfTracks++;
  5395. pipeline.audioSegmentStream = new AudioSegmentStream(audioTrack);
  5396. // Set up the final part of the audio pipeline
  5397. pipeline.adtsStream
  5398. .pipe(pipeline.audioSegmentStream)
  5399. .pipe(pipeline.coalesceStream);
  5400. }
  5401. });
  5402. // Re-emit any data coming from the coalesce stream to the outside world
  5403. pipeline.coalesceStream.on('data', this.trigger.bind(this, 'data'));
  5404. // Let the consumer know we have finished flushing the entire pipeline
  5405. pipeline.coalesceStream.on('done', this.trigger.bind(this, 'done'));
  5406. };
  5407. this.setupTsPipeline = function() {
  5408. var pipeline = {};
  5409. this.transmuxPipeline_ = pipeline;
  5410. pipeline.type = 'ts';
  5411. pipeline.metadataStream = new m2ts.MetadataStream();
  5412. // set up the parsing pipeline
  5413. pipeline.packetStream = new m2ts.TransportPacketStream();
  5414. pipeline.parseStream = new m2ts.TransportParseStream();
  5415. pipeline.elementaryStream = new m2ts.ElementaryStream();
  5416. pipeline.videoTimestampRolloverStream = new m2ts.TimestampRolloverStream('video');
  5417. pipeline.audioTimestampRolloverStream = new m2ts.TimestampRolloverStream('audio');
  5418. pipeline.timedMetadataTimestampRolloverStream = new m2ts.TimestampRolloverStream('timed-metadata');
  5419. pipeline.adtsStream = new AdtsStream();
  5420. pipeline.h264Stream = new H264Stream();
  5421. pipeline.captionStream = new m2ts.CaptionStream();
  5422. pipeline.coalesceStream = new CoalesceStream(options, pipeline.metadataStream);
  5423. pipeline.headOfPipeline = pipeline.packetStream;
  5424. // disassemble MPEG2-TS packets into elementary streams
  5425. pipeline.packetStream
  5426. .pipe(pipeline.parseStream)
  5427. .pipe(pipeline.elementaryStream);
  5428. // !!THIS ORDER IS IMPORTANT!!
  5429. // demux the streams
  5430. pipeline.elementaryStream
  5431. .pipe(pipeline.videoTimestampRolloverStream)
  5432. .pipe(pipeline.h264Stream);
  5433. pipeline.elementaryStream
  5434. .pipe(pipeline.audioTimestampRolloverStream)
  5435. .pipe(pipeline.adtsStream);
  5436. pipeline.elementaryStream
  5437. .pipe(pipeline.timedMetadataTimestampRolloverStream)
  5438. .pipe(pipeline.metadataStream)
  5439. .pipe(pipeline.coalesceStream);
  5440. // Hook up CEA-608/708 caption stream
  5441. pipeline.h264Stream.pipe(pipeline.captionStream)
  5442. .pipe(pipeline.coalesceStream);
  5443. pipeline.elementaryStream.on('data', function(data) {
  5444. var i;
  5445. if (data.type === 'metadata') {
  5446. i = data.tracks.length;
  5447. // scan the tracks listed in the metadata
  5448. while (i--) {
  5449. if (!videoTrack && data.tracks[i].type === 'video') {
  5450. videoTrack = data.tracks[i];
  5451. videoTrack.timelineStartInfo.baseMediaDecodeTime = self.baseMediaDecodeTime;
  5452. } else if (!audioTrack && data.tracks[i].type === 'audio') {
  5453. audioTrack = data.tracks[i];
  5454. audioTrack.timelineStartInfo.baseMediaDecodeTime = self.baseMediaDecodeTime;
  5455. }
  5456. }
  5457. // hook up the video segment stream to the first track with h264 data
  5458. if (videoTrack && !pipeline.videoSegmentStream) {
  5459. pipeline.coalesceStream.numberOfTracks++;
  5460. pipeline.videoSegmentStream = new VideoSegmentStream(videoTrack, options);
  5461. pipeline.videoSegmentStream.on('timelineStartInfo', function(timelineStartInfo) {
  5462. // When video emits timelineStartInfo data after a flush, we forward that
  5463. // info to the AudioSegmentStream, if it exists, because video timeline
  5464. // data takes precedence.
  5465. if (audioTrack) {
  5466. audioTrack.timelineStartInfo = timelineStartInfo;
  5467. // On the first segment we trim AAC frames that exist before the
  5468. // very earliest DTS we have seen in video because Chrome will
  5469. // interpret any video track with a baseMediaDecodeTime that is
  5470. // non-zero as a gap.
  5471. pipeline.audioSegmentStream.setEarliestDts(timelineStartInfo.dts);
  5472. }
  5473. });
  5474. pipeline.videoSegmentStream.on('processedGopsInfo',
  5475. self.trigger.bind(self, 'gopInfo'));
  5476. pipeline.videoSegmentStream.on('baseMediaDecodeTime', function(baseMediaDecodeTime) {
  5477. if (audioTrack) {
  5478. pipeline.audioSegmentStream.setVideoBaseMediaDecodeTime(baseMediaDecodeTime);
  5479. }
  5480. });
  5481. // Set up the final part of the video pipeline
  5482. pipeline.h264Stream
  5483. .pipe(pipeline.videoSegmentStream)
  5484. .pipe(pipeline.coalesceStream);
  5485. }
  5486. if (audioTrack && !pipeline.audioSegmentStream) {
  5487. // hook up the audio segment stream to the first track with aac data
  5488. pipeline.coalesceStream.numberOfTracks++;
  5489. pipeline.audioSegmentStream = new AudioSegmentStream(audioTrack);
  5490. // Set up the final part of the audio pipeline
  5491. pipeline.adtsStream
  5492. .pipe(pipeline.audioSegmentStream)
  5493. .pipe(pipeline.coalesceStream);
  5494. }
  5495. }
  5496. });
  5497. // Re-emit any data coming from the coalesce stream to the outside world
  5498. pipeline.coalesceStream.on('data', this.trigger.bind(this, 'data'));
  5499. // Let the consumer know we have finished flushing the entire pipeline
  5500. pipeline.coalesceStream.on('done', this.trigger.bind(this, 'done'));
  5501. };
  5502. // hook up the segment streams once track metadata is delivered
  5503. this.setBaseMediaDecodeTime = function(baseMediaDecodeTime) {
  5504. var pipeline = this.transmuxPipeline_;
  5505. this.baseMediaDecodeTime = baseMediaDecodeTime;
  5506. if (audioTrack) {
  5507. audioTrack.timelineStartInfo.dts = undefined;
  5508. audioTrack.timelineStartInfo.pts = undefined;
  5509. clearDtsInfo(audioTrack);
  5510. audioTrack.timelineStartInfo.baseMediaDecodeTime = baseMediaDecodeTime;
  5511. if (pipeline.audioTimestampRolloverStream) {
  5512. pipeline.audioTimestampRolloverStream.discontinuity();
  5513. }
  5514. }
  5515. if (videoTrack) {
  5516. if (pipeline.videoSegmentStream) {
  5517. pipeline.videoSegmentStream.gopCache_ = [];
  5518. pipeline.videoTimestampRolloverStream.discontinuity();
  5519. }
  5520. videoTrack.timelineStartInfo.dts = undefined;
  5521. videoTrack.timelineStartInfo.pts = undefined;
  5522. clearDtsInfo(videoTrack);
  5523. pipeline.captionStream.reset();
  5524. videoTrack.timelineStartInfo.baseMediaDecodeTime = baseMediaDecodeTime;
  5525. }
  5526. if (pipeline.timedMetadataTimestampRolloverStream) {
  5527. pipeline.timedMetadataTimestampRolloverStream.discontinuity();
  5528. }
  5529. };
  5530. this.setAudioAppendStart = function(timestamp) {
  5531. if (audioTrack) {
  5532. this.transmuxPipeline_.audioSegmentStream.setAudioAppendStart(timestamp);
  5533. }
  5534. };
  5535. this.alignGopsWith = function(gopsToAlignWith) {
  5536. if (videoTrack && this.transmuxPipeline_.videoSegmentStream) {
  5537. this.transmuxPipeline_.videoSegmentStream.alignGopsWith(gopsToAlignWith);
  5538. }
  5539. };
  5540. // feed incoming data to the front of the parsing pipeline
  5541. this.push = function(data) {
  5542. if (hasFlushed) {
  5543. var isAac = isLikelyAacData(data);
  5544. if (isAac && this.transmuxPipeline_.type !== 'aac') {
  5545. this.setupAacPipeline();
  5546. } else if (!isAac && this.transmuxPipeline_.type !== 'ts') {
  5547. this.setupTsPipeline();
  5548. }
  5549. hasFlushed = false;
  5550. }
  5551. this.transmuxPipeline_.headOfPipeline.push(data);
  5552. };
  5553. // flush any buffered data
  5554. this.flush = function() {
  5555. hasFlushed = true;
  5556. // Start at the top of the pipeline and flush all pending work
  5557. this.transmuxPipeline_.headOfPipeline.flush();
  5558. };
  5559. // Caption data has to be reset when seeking outside buffered range
  5560. this.resetCaptions = function() {
  5561. if (this.transmuxPipeline_.captionStream) {
  5562. this.transmuxPipeline_.captionStream.reset();
  5563. }
  5564. };
  5565. };
  5566. Transmuxer.prototype = new Stream();
  5567. module.exports = {
  5568. Transmuxer: Transmuxer,
  5569. VideoSegmentStream: VideoSegmentStream,
  5570. AudioSegmentStream: AudioSegmentStream,
  5571. AUDIO_PROPERTIES: AUDIO_PROPERTIES,
  5572. VIDEO_PROPERTIES: VIDEO_PROPERTIES
  5573. };
  5574. },{"../aac":4,"../codecs/adts.js":6,"../codecs/h264":7,"../data/silence":9,"../m2ts/m2ts.js":19,"../utils/clock":31,"../utils/stream.js":33,"./mp4-generator.js":25}],28:[function(require,module,exports){
  5575. 'use strict';
  5576. var
  5577. tagTypes = {
  5578. 0x08: 'audio',
  5579. 0x09: 'video',
  5580. 0x12: 'metadata'
  5581. },
  5582. hex = function(val) {
  5583. return '0x' + ('00' + val.toString(16)).slice(-2).toUpperCase();
  5584. },
  5585. hexStringList = function(data) {
  5586. var arr = [], i;
  5587. while (data.byteLength > 0) {
  5588. i = 0;
  5589. arr.push(hex(data[i++]));
  5590. data = data.subarray(i);
  5591. }
  5592. return arr.join(' ');
  5593. },
  5594. parseAVCTag = function(tag, obj) {
  5595. var
  5596. avcPacketTypes = [
  5597. 'AVC Sequence Header',
  5598. 'AVC NALU',
  5599. 'AVC End-of-Sequence'
  5600. ],
  5601. compositionTime = (tag[1] & parseInt('01111111', 2) << 16) | (tag[2] << 8) | tag[3];
  5602. obj = obj || {};
  5603. obj.avcPacketType = avcPacketTypes[tag[0]];
  5604. obj.CompositionTime = (tag[1] & parseInt('10000000', 2)) ? -compositionTime : compositionTime;
  5605. if (tag[0] === 1) {
  5606. obj.nalUnitTypeRaw = hexStringList(tag.subarray(4, 100));
  5607. } else {
  5608. obj.data = hexStringList(tag.subarray(4));
  5609. }
  5610. return obj;
  5611. },
  5612. parseVideoTag = function(tag, obj) {
  5613. var
  5614. frameTypes = [
  5615. 'Unknown',
  5616. 'Keyframe (for AVC, a seekable frame)',
  5617. 'Inter frame (for AVC, a nonseekable frame)',
  5618. 'Disposable inter frame (H.263 only)',
  5619. 'Generated keyframe (reserved for server use only)',
  5620. 'Video info/command frame'
  5621. ],
  5622. codecID = tag[0] & parseInt('00001111', 2);
  5623. obj = obj || {};
  5624. obj.frameType = frameTypes[(tag[0] & parseInt('11110000', 2)) >>> 4];
  5625. obj.codecID = codecID;
  5626. if (codecID === 7) {
  5627. return parseAVCTag(tag.subarray(1), obj);
  5628. }
  5629. return obj;
  5630. },
  5631. parseAACTag = function(tag, obj) {
  5632. var packetTypes = [
  5633. 'AAC Sequence Header',
  5634. 'AAC Raw'
  5635. ];
  5636. obj = obj || {};
  5637. obj.aacPacketType = packetTypes[tag[0]];
  5638. obj.data = hexStringList(tag.subarray(1));
  5639. return obj;
  5640. },
  5641. parseAudioTag = function(tag, obj) {
  5642. var
  5643. formatTable = [
  5644. 'Linear PCM, platform endian',
  5645. 'ADPCM',
  5646. 'MP3',
  5647. 'Linear PCM, little endian',
  5648. 'Nellymoser 16-kHz mono',
  5649. 'Nellymoser 8-kHz mono',
  5650. 'Nellymoser',
  5651. 'G.711 A-law logarithmic PCM',
  5652. 'G.711 mu-law logarithmic PCM',
  5653. 'reserved',
  5654. 'AAC',
  5655. 'Speex',
  5656. 'MP3 8-Khz',
  5657. 'Device-specific sound'
  5658. ],
  5659. samplingRateTable = [
  5660. '5.5-kHz',
  5661. '11-kHz',
  5662. '22-kHz',
  5663. '44-kHz'
  5664. ],
  5665. soundFormat = (tag[0] & parseInt('11110000', 2)) >>> 4;
  5666. obj = obj || {};
  5667. obj.soundFormat = formatTable[soundFormat];
  5668. obj.soundRate = samplingRateTable[(tag[0] & parseInt('00001100', 2)) >>> 2];
  5669. obj.soundSize = ((tag[0] & parseInt('00000010', 2)) >>> 1) ? '16-bit' : '8-bit';
  5670. obj.soundType = (tag[0] & parseInt('00000001', 2)) ? 'Stereo' : 'Mono';
  5671. if (soundFormat === 10) {
  5672. return parseAACTag(tag.subarray(1), obj);
  5673. }
  5674. return obj;
  5675. },
  5676. parseGenericTag = function(tag) {
  5677. return {
  5678. tagType: tagTypes[tag[0]],
  5679. dataSize: (tag[1] << 16) | (tag[2] << 8) | tag[3],
  5680. timestamp: (tag[7] << 24) | (tag[4] << 16) | (tag[5] << 8) | tag[6],
  5681. streamID: (tag[8] << 16) | (tag[9] << 8) | tag[10]
  5682. };
  5683. },
  5684. inspectFlvTag = function(tag) {
  5685. var header = parseGenericTag(tag);
  5686. switch (tag[0]) {
  5687. case 0x08:
  5688. parseAudioTag(tag.subarray(11), header);
  5689. break;
  5690. case 0x09:
  5691. parseVideoTag(tag.subarray(11), header);
  5692. break;
  5693. case 0x12:
  5694. }
  5695. return header;
  5696. },
  5697. inspectFlv = function(bytes) {
  5698. var i = 9, // header
  5699. dataSize,
  5700. parsedResults = [],
  5701. tag;
  5702. // traverse the tags
  5703. i += 4; // skip previous tag size
  5704. while (i < bytes.byteLength) {
  5705. dataSize = bytes[i + 1] << 16;
  5706. dataSize |= bytes[i + 2] << 8;
  5707. dataSize |= bytes[i + 3];
  5708. dataSize += 11;
  5709. tag = bytes.subarray(i, i + dataSize);
  5710. parsedResults.push(inspectFlvTag(tag));
  5711. i += dataSize + 4;
  5712. }
  5713. return parsedResults;
  5714. },
  5715. textifyFlv = function(flvTagArray) {
  5716. return JSON.stringify(flvTagArray, null, 2);
  5717. };
  5718. module.exports = {
  5719. inspectTag: inspectFlvTag,
  5720. inspect: inspectFlv,
  5721. textify: textifyFlv
  5722. };
  5723. },{}],29:[function(require,module,exports){
  5724. (function (global){
  5725. /**
  5726. * mux.js
  5727. *
  5728. * Copyright (c) 2015 Brightcove
  5729. * All rights reserved.
  5730. *
  5731. * Parse the internal MP4 structure into an equivalent javascript
  5732. * object.
  5733. */
  5734. 'use strict';
  5735. var
  5736. inspectMp4,
  5737. textifyMp4,
  5738. parseType = require('../mp4/probe').parseType,
  5739. parseMp4Date = function(seconds) {
  5740. return new Date(seconds * 1000 - 2082844800000);
  5741. },
  5742. parseSampleFlags = function(flags) {
  5743. return {
  5744. isLeading: (flags[0] & 0x0c) >>> 2,
  5745. dependsOn: flags[0] & 0x03,
  5746. isDependedOn: (flags[1] & 0xc0) >>> 6,
  5747. hasRedundancy: (flags[1] & 0x30) >>> 4,
  5748. paddingValue: (flags[1] & 0x0e) >>> 1,
  5749. isNonSyncSample: flags[1] & 0x01,
  5750. degradationPriority: (flags[2] << 8) | flags[3]
  5751. };
  5752. },
  5753. nalParse = function(avcStream) {
  5754. var
  5755. avcView = new DataView(avcStream.buffer, avcStream.byteOffset, avcStream.byteLength),
  5756. result = [],
  5757. i,
  5758. length;
  5759. for (i = 0; i + 4 < avcStream.length; i += length) {
  5760. length = avcView.getUint32(i);
  5761. i += 4;
  5762. // bail if this doesn't appear to be an H264 stream
  5763. if (length <= 0) {
  5764. result.push('<span style=\'color:red;\'>MALFORMED DATA</span>');
  5765. continue;
  5766. }
  5767. switch (avcStream[i] & 0x1F) {
  5768. case 0x01:
  5769. result.push('slice_layer_without_partitioning_rbsp');
  5770. break;
  5771. case 0x05:
  5772. result.push('slice_layer_without_partitioning_rbsp_idr');
  5773. break;
  5774. case 0x06:
  5775. result.push('sei_rbsp');
  5776. break;
  5777. case 0x07:
  5778. result.push('seq_parameter_set_rbsp');
  5779. break;
  5780. case 0x08:
  5781. result.push('pic_parameter_set_rbsp');
  5782. break;
  5783. case 0x09:
  5784. result.push('access_unit_delimiter_rbsp');
  5785. break;
  5786. default:
  5787. result.push('UNKNOWN NAL - ' + avcStream[i] & 0x1F);
  5788. break;
  5789. }
  5790. }
  5791. return result;
  5792. },
  5793. // registry of handlers for individual mp4 box types
  5794. parse = {
  5795. // codingname, not a first-class box type. stsd entries share the
  5796. // same format as real boxes so the parsing infrastructure can be
  5797. // shared
  5798. avc1: function(data) {
  5799. var view = new DataView(data.buffer, data.byteOffset, data.byteLength);
  5800. return {
  5801. dataReferenceIndex: view.getUint16(6),
  5802. width: view.getUint16(24),
  5803. height: view.getUint16(26),
  5804. horizresolution: view.getUint16(28) + (view.getUint16(30) / 16),
  5805. vertresolution: view.getUint16(32) + (view.getUint16(34) / 16),
  5806. frameCount: view.getUint16(40),
  5807. depth: view.getUint16(74),
  5808. config: inspectMp4(data.subarray(78, data.byteLength))
  5809. };
  5810. },
  5811. avcC: function(data) {
  5812. var
  5813. view = new DataView(data.buffer, data.byteOffset, data.byteLength),
  5814. result = {
  5815. configurationVersion: data[0],
  5816. avcProfileIndication: data[1],
  5817. profileCompatibility: data[2],
  5818. avcLevelIndication: data[3],
  5819. lengthSizeMinusOne: data[4] & 0x03,
  5820. sps: [],
  5821. pps: []
  5822. },
  5823. numOfSequenceParameterSets = data[5] & 0x1f,
  5824. numOfPictureParameterSets,
  5825. nalSize,
  5826. offset,
  5827. i;
  5828. // iterate past any SPSs
  5829. offset = 6;
  5830. for (i = 0; i < numOfSequenceParameterSets; i++) {
  5831. nalSize = view.getUint16(offset);
  5832. offset += 2;
  5833. result.sps.push(new Uint8Array(data.subarray(offset, offset + nalSize)));
  5834. offset += nalSize;
  5835. }
  5836. // iterate past any PPSs
  5837. numOfPictureParameterSets = data[offset];
  5838. offset++;
  5839. for (i = 0; i < numOfPictureParameterSets; i++) {
  5840. nalSize = view.getUint16(offset);
  5841. offset += 2;
  5842. result.pps.push(new Uint8Array(data.subarray(offset, offset + nalSize)));
  5843. offset += nalSize;
  5844. }
  5845. return result;
  5846. },
  5847. btrt: function(data) {
  5848. var view = new DataView(data.buffer, data.byteOffset, data.byteLength);
  5849. return {
  5850. bufferSizeDB: view.getUint32(0),
  5851. maxBitrate: view.getUint32(4),
  5852. avgBitrate: view.getUint32(8)
  5853. };
  5854. },
  5855. esds: function(data) {
  5856. return {
  5857. version: data[0],
  5858. flags: new Uint8Array(data.subarray(1, 4)),
  5859. esId: (data[6] << 8) | data[7],
  5860. streamPriority: data[8] & 0x1f,
  5861. decoderConfig: {
  5862. objectProfileIndication: data[11],
  5863. streamType: (data[12] >>> 2) & 0x3f,
  5864. bufferSize: (data[13] << 16) | (data[14] << 8) | data[15],
  5865. maxBitrate: (data[16] << 24) |
  5866. (data[17] << 16) |
  5867. (data[18] << 8) |
  5868. data[19],
  5869. avgBitrate: (data[20] << 24) |
  5870. (data[21] << 16) |
  5871. (data[22] << 8) |
  5872. data[23],
  5873. decoderConfigDescriptor: {
  5874. tag: data[24],
  5875. length: data[25],
  5876. audioObjectType: (data[26] >>> 3) & 0x1f,
  5877. samplingFrequencyIndex: ((data[26] & 0x07) << 1) |
  5878. ((data[27] >>> 7) & 0x01),
  5879. channelConfiguration: (data[27] >>> 3) & 0x0f
  5880. }
  5881. }
  5882. };
  5883. },
  5884. ftyp: function(data) {
  5885. var
  5886. view = new DataView(data.buffer, data.byteOffset, data.byteLength),
  5887. result = {
  5888. majorBrand: parseType(data.subarray(0, 4)),
  5889. minorVersion: view.getUint32(4),
  5890. compatibleBrands: []
  5891. },
  5892. i = 8;
  5893. while (i < data.byteLength) {
  5894. result.compatibleBrands.push(parseType(data.subarray(i, i + 4)));
  5895. i += 4;
  5896. }
  5897. return result;
  5898. },
  5899. dinf: function(data) {
  5900. return {
  5901. boxes: inspectMp4(data)
  5902. };
  5903. },
  5904. dref: function(data) {
  5905. return {
  5906. version: data[0],
  5907. flags: new Uint8Array(data.subarray(1, 4)),
  5908. dataReferences: inspectMp4(data.subarray(8))
  5909. };
  5910. },
  5911. hdlr: function(data) {
  5912. var
  5913. view = new DataView(data.buffer, data.byteOffset, data.byteLength),
  5914. result = {
  5915. version: view.getUint8(0),
  5916. flags: new Uint8Array(data.subarray(1, 4)),
  5917. handlerType: parseType(data.subarray(8, 12)),
  5918. name: ''
  5919. },
  5920. i = 8;
  5921. // parse out the name field
  5922. for (i = 24; i < data.byteLength; i++) {
  5923. if (data[i] === 0x00) {
  5924. // the name field is null-terminated
  5925. i++;
  5926. break;
  5927. }
  5928. result.name += String.fromCharCode(data[i]);
  5929. }
  5930. // decode UTF-8 to javascript's internal representation
  5931. // see http://ecmanaut.blogspot.com/2006/07/encoding-decoding-utf8-in-javascript.html
  5932. result.name = decodeURIComponent(global.escape(result.name));
  5933. return result;
  5934. },
  5935. mdat: function(data) {
  5936. return {
  5937. byteLength: data.byteLength,
  5938. nals: nalParse(data)
  5939. };
  5940. },
  5941. mdhd: function(data) {
  5942. var
  5943. view = new DataView(data.buffer, data.byteOffset, data.byteLength),
  5944. i = 4,
  5945. language,
  5946. result = {
  5947. version: view.getUint8(0),
  5948. flags: new Uint8Array(data.subarray(1, 4)),
  5949. language: ''
  5950. };
  5951. if (result.version === 1) {
  5952. i += 4;
  5953. result.creationTime = parseMp4Date(view.getUint32(i)); // truncating top 4 bytes
  5954. i += 8;
  5955. result.modificationTime = parseMp4Date(view.getUint32(i)); // truncating top 4 bytes
  5956. i += 4;
  5957. result.timescale = view.getUint32(i);
  5958. i += 8;
  5959. result.duration = view.getUint32(i); // truncating top 4 bytes
  5960. } else {
  5961. result.creationTime = parseMp4Date(view.getUint32(i));
  5962. i += 4;
  5963. result.modificationTime = parseMp4Date(view.getUint32(i));
  5964. i += 4;
  5965. result.timescale = view.getUint32(i);
  5966. i += 4;
  5967. result.duration = view.getUint32(i);
  5968. }
  5969. i += 4;
  5970. // language is stored as an ISO-639-2/T code in an array of three 5-bit fields
  5971. // each field is the packed difference between its ASCII value and 0x60
  5972. language = view.getUint16(i);
  5973. result.language += String.fromCharCode((language >> 10) + 0x60);
  5974. result.language += String.fromCharCode(((language & 0x03c0) >> 5) + 0x60);
  5975. result.language += String.fromCharCode((language & 0x1f) + 0x60);
  5976. return result;
  5977. },
  5978. mdia: function(data) {
  5979. return {
  5980. boxes: inspectMp4(data)
  5981. };
  5982. },
  5983. mfhd: function(data) {
  5984. return {
  5985. version: data[0],
  5986. flags: new Uint8Array(data.subarray(1, 4)),
  5987. sequenceNumber: (data[4] << 24) |
  5988. (data[5] << 16) |
  5989. (data[6] << 8) |
  5990. (data[7])
  5991. };
  5992. },
  5993. minf: function(data) {
  5994. return {
  5995. boxes: inspectMp4(data)
  5996. };
  5997. },
  5998. // codingname, not a first-class box type. stsd entries share the
  5999. // same format as real boxes so the parsing infrastructure can be
  6000. // shared
  6001. mp4a: function(data) {
  6002. var
  6003. view = new DataView(data.buffer, data.byteOffset, data.byteLength),
  6004. result = {
  6005. // 6 bytes reserved
  6006. dataReferenceIndex: view.getUint16(6),
  6007. // 4 + 4 bytes reserved
  6008. channelcount: view.getUint16(16),
  6009. samplesize: view.getUint16(18),
  6010. // 2 bytes pre_defined
  6011. // 2 bytes reserved
  6012. samplerate: view.getUint16(24) + (view.getUint16(26) / 65536)
  6013. };
  6014. // if there are more bytes to process, assume this is an ISO/IEC
  6015. // 14496-14 MP4AudioSampleEntry and parse the ESDBox
  6016. if (data.byteLength > 28) {
  6017. result.streamDescriptor = inspectMp4(data.subarray(28))[0];
  6018. }
  6019. return result;
  6020. },
  6021. moof: function(data) {
  6022. return {
  6023. boxes: inspectMp4(data)
  6024. };
  6025. },
  6026. moov: function(data) {
  6027. return {
  6028. boxes: inspectMp4(data)
  6029. };
  6030. },
  6031. mvex: function(data) {
  6032. return {
  6033. boxes: inspectMp4(data)
  6034. };
  6035. },
  6036. mvhd: function(data) {
  6037. var
  6038. view = new DataView(data.buffer, data.byteOffset, data.byteLength),
  6039. i = 4,
  6040. result = {
  6041. version: view.getUint8(0),
  6042. flags: new Uint8Array(data.subarray(1, 4))
  6043. };
  6044. if (result.version === 1) {
  6045. i += 4;
  6046. result.creationTime = parseMp4Date(view.getUint32(i)); // truncating top 4 bytes
  6047. i += 8;
  6048. result.modificationTime = parseMp4Date(view.getUint32(i)); // truncating top 4 bytes
  6049. i += 4;
  6050. result.timescale = view.getUint32(i);
  6051. i += 8;
  6052. result.duration = view.getUint32(i); // truncating top 4 bytes
  6053. } else {
  6054. result.creationTime = parseMp4Date(view.getUint32(i));
  6055. i += 4;
  6056. result.modificationTime = parseMp4Date(view.getUint32(i));
  6057. i += 4;
  6058. result.timescale = view.getUint32(i);
  6059. i += 4;
  6060. result.duration = view.getUint32(i);
  6061. }
  6062. i += 4;
  6063. // convert fixed-point, base 16 back to a number
  6064. result.rate = view.getUint16(i) + (view.getUint16(i + 2) / 16);
  6065. i += 4;
  6066. result.volume = view.getUint8(i) + (view.getUint8(i + 1) / 8);
  6067. i += 2;
  6068. i += 2;
  6069. i += 2 * 4;
  6070. result.matrix = new Uint32Array(data.subarray(i, i + (9 * 4)));
  6071. i += 9 * 4;
  6072. i += 6 * 4;
  6073. result.nextTrackId = view.getUint32(i);
  6074. return result;
  6075. },
  6076. pdin: function(data) {
  6077. var view = new DataView(data.buffer, data.byteOffset, data.byteLength);
  6078. return {
  6079. version: view.getUint8(0),
  6080. flags: new Uint8Array(data.subarray(1, 4)),
  6081. rate: view.getUint32(4),
  6082. initialDelay: view.getUint32(8)
  6083. };
  6084. },
  6085. sdtp: function(data) {
  6086. var
  6087. result = {
  6088. version: data[0],
  6089. flags: new Uint8Array(data.subarray(1, 4)),
  6090. samples: []
  6091. }, i;
  6092. for (i = 4; i < data.byteLength; i++) {
  6093. result.samples.push({
  6094. dependsOn: (data[i] & 0x30) >> 4,
  6095. isDependedOn: (data[i] & 0x0c) >> 2,
  6096. hasRedundancy: data[i] & 0x03
  6097. });
  6098. }
  6099. return result;
  6100. },
  6101. sidx: function(data) {
  6102. var view = new DataView(data.buffer, data.byteOffset, data.byteLength),
  6103. result = {
  6104. version: data[0],
  6105. flags: new Uint8Array(data.subarray(1, 4)),
  6106. references: [],
  6107. referenceId: view.getUint32(4),
  6108. timescale: view.getUint32(8),
  6109. earliestPresentationTime: view.getUint32(12),
  6110. firstOffset: view.getUint32(16)
  6111. },
  6112. referenceCount = view.getUint16(22),
  6113. i;
  6114. for (i = 24; referenceCount; i += 12, referenceCount--) {
  6115. result.references.push({
  6116. referenceType: (data[i] & 0x80) >>> 7,
  6117. referencedSize: view.getUint32(i) & 0x7FFFFFFF,
  6118. subsegmentDuration: view.getUint32(i + 4),
  6119. startsWithSap: !!(data[i + 8] & 0x80),
  6120. sapType: (data[i + 8] & 0x70) >>> 4,
  6121. sapDeltaTime: view.getUint32(i + 8) & 0x0FFFFFFF
  6122. });
  6123. }
  6124. return result;
  6125. },
  6126. smhd: function(data) {
  6127. return {
  6128. version: data[0],
  6129. flags: new Uint8Array(data.subarray(1, 4)),
  6130. balance: data[4] + (data[5] / 256)
  6131. };
  6132. },
  6133. stbl: function(data) {
  6134. return {
  6135. boxes: inspectMp4(data)
  6136. };
  6137. },
  6138. stco: function(data) {
  6139. var
  6140. view = new DataView(data.buffer, data.byteOffset, data.byteLength),
  6141. result = {
  6142. version: data[0],
  6143. flags: new Uint8Array(data.subarray(1, 4)),
  6144. chunkOffsets: []
  6145. },
  6146. entryCount = view.getUint32(4),
  6147. i;
  6148. for (i = 8; entryCount; i += 4, entryCount--) {
  6149. result.chunkOffsets.push(view.getUint32(i));
  6150. }
  6151. return result;
  6152. },
  6153. stsc: function(data) {
  6154. var
  6155. view = new DataView(data.buffer, data.byteOffset, data.byteLength),
  6156. entryCount = view.getUint32(4),
  6157. result = {
  6158. version: data[0],
  6159. flags: new Uint8Array(data.subarray(1, 4)),
  6160. sampleToChunks: []
  6161. },
  6162. i;
  6163. for (i = 8; entryCount; i += 12, entryCount--) {
  6164. result.sampleToChunks.push({
  6165. firstChunk: view.getUint32(i),
  6166. samplesPerChunk: view.getUint32(i + 4),
  6167. sampleDescriptionIndex: view.getUint32(i + 8)
  6168. });
  6169. }
  6170. return result;
  6171. },
  6172. stsd: function(data) {
  6173. return {
  6174. version: data[0],
  6175. flags: new Uint8Array(data.subarray(1, 4)),
  6176. sampleDescriptions: inspectMp4(data.subarray(8))
  6177. };
  6178. },
  6179. stsz: function(data) {
  6180. var
  6181. view = new DataView(data.buffer, data.byteOffset, data.byteLength),
  6182. result = {
  6183. version: data[0],
  6184. flags: new Uint8Array(data.subarray(1, 4)),
  6185. sampleSize: view.getUint32(4),
  6186. entries: []
  6187. },
  6188. i;
  6189. for (i = 12; i < data.byteLength; i += 4) {
  6190. result.entries.push(view.getUint32(i));
  6191. }
  6192. return result;
  6193. },
  6194. stts: function(data) {
  6195. var
  6196. view = new DataView(data.buffer, data.byteOffset, data.byteLength),
  6197. result = {
  6198. version: data[0],
  6199. flags: new Uint8Array(data.subarray(1, 4)),
  6200. timeToSamples: []
  6201. },
  6202. entryCount = view.getUint32(4),
  6203. i;
  6204. for (i = 8; entryCount; i += 8, entryCount--) {
  6205. result.timeToSamples.push({
  6206. sampleCount: view.getUint32(i),
  6207. sampleDelta: view.getUint32(i + 4)
  6208. });
  6209. }
  6210. return result;
  6211. },
  6212. styp: function(data) {
  6213. return parse.ftyp(data);
  6214. },
  6215. tfdt: function(data) {
  6216. var result = {
  6217. version: data[0],
  6218. flags: new Uint8Array(data.subarray(1, 4)),
  6219. baseMediaDecodeTime: data[4] << 24 | data[5] << 16 | data[6] << 8 | data[7]
  6220. };
  6221. if (result.version === 1) {
  6222. result.baseMediaDecodeTime *= Math.pow(2, 32);
  6223. result.baseMediaDecodeTime += data[8] << 24 | data[9] << 16 | data[10] << 8 | data[11];
  6224. }
  6225. return result;
  6226. },
  6227. tfhd: function(data) {
  6228. var
  6229. view = new DataView(data.buffer, data.byteOffset, data.byteLength),
  6230. result = {
  6231. version: data[0],
  6232. flags: new Uint8Array(data.subarray(1, 4)),
  6233. trackId: view.getUint32(4)
  6234. },
  6235. baseDataOffsetPresent = result.flags[2] & 0x01,
  6236. sampleDescriptionIndexPresent = result.flags[2] & 0x02,
  6237. defaultSampleDurationPresent = result.flags[2] & 0x08,
  6238. defaultSampleSizePresent = result.flags[2] & 0x10,
  6239. defaultSampleFlagsPresent = result.flags[2] & 0x20,
  6240. i;
  6241. i = 8;
  6242. if (baseDataOffsetPresent) {
  6243. i += 4; // truncate top 4 bytes
  6244. result.baseDataOffset = view.getUint32(12);
  6245. i += 4;
  6246. }
  6247. if (sampleDescriptionIndexPresent) {
  6248. result.sampleDescriptionIndex = view.getUint32(i);
  6249. i += 4;
  6250. }
  6251. if (defaultSampleDurationPresent) {
  6252. result.defaultSampleDuration = view.getUint32(i);
  6253. i += 4;
  6254. }
  6255. if (defaultSampleSizePresent) {
  6256. result.defaultSampleSize = view.getUint32(i);
  6257. i += 4;
  6258. }
  6259. if (defaultSampleFlagsPresent) {
  6260. result.defaultSampleFlags = view.getUint32(i);
  6261. }
  6262. return result;
  6263. },
  6264. tkhd: function(data) {
  6265. var
  6266. view = new DataView(data.buffer, data.byteOffset, data.byteLength),
  6267. i = 4,
  6268. result = {
  6269. version: view.getUint8(0),
  6270. flags: new Uint8Array(data.subarray(1, 4))
  6271. };
  6272. if (result.version === 1) {
  6273. i += 4;
  6274. result.creationTime = parseMp4Date(view.getUint32(i)); // truncating top 4 bytes
  6275. i += 8;
  6276. result.modificationTime = parseMp4Date(view.getUint32(i)); // truncating top 4 bytes
  6277. i += 4;
  6278. result.trackId = view.getUint32(i);
  6279. i += 4;
  6280. i += 8;
  6281. result.duration = view.getUint32(i); // truncating top 4 bytes
  6282. } else {
  6283. result.creationTime = parseMp4Date(view.getUint32(i));
  6284. i += 4;
  6285. result.modificationTime = parseMp4Date(view.getUint32(i));
  6286. i += 4;
  6287. result.trackId = view.getUint32(i);
  6288. i += 4;
  6289. i += 4;
  6290. result.duration = view.getUint32(i);
  6291. }
  6292. i += 4;
  6293. i += 2 * 4;
  6294. result.layer = view.getUint16(i);
  6295. i += 2;
  6296. result.alternateGroup = view.getUint16(i);
  6297. i += 2;
  6298. // convert fixed-point, base 16 back to a number
  6299. result.volume = view.getUint8(i) + (view.getUint8(i + 1) / 8);
  6300. i += 2;
  6301. i += 2;
  6302. result.matrix = new Uint32Array(data.subarray(i, i + (9 * 4)));
  6303. i += 9 * 4;
  6304. result.width = view.getUint16(i) + (view.getUint16(i + 2) / 16);
  6305. i += 4;
  6306. result.height = view.getUint16(i) + (view.getUint16(i + 2) / 16);
  6307. return result;
  6308. },
  6309. traf: function(data) {
  6310. return {
  6311. boxes: inspectMp4(data)
  6312. };
  6313. },
  6314. trak: function(data) {
  6315. return {
  6316. boxes: inspectMp4(data)
  6317. };
  6318. },
  6319. trex: function(data) {
  6320. var view = new DataView(data.buffer, data.byteOffset, data.byteLength);
  6321. return {
  6322. version: data[0],
  6323. flags: new Uint8Array(data.subarray(1, 4)),
  6324. trackId: view.getUint32(4),
  6325. defaultSampleDescriptionIndex: view.getUint32(8),
  6326. defaultSampleDuration: view.getUint32(12),
  6327. defaultSampleSize: view.getUint32(16),
  6328. sampleDependsOn: data[20] & 0x03,
  6329. sampleIsDependedOn: (data[21] & 0xc0) >> 6,
  6330. sampleHasRedundancy: (data[21] & 0x30) >> 4,
  6331. samplePaddingValue: (data[21] & 0x0e) >> 1,
  6332. sampleIsDifferenceSample: !!(data[21] & 0x01),
  6333. sampleDegradationPriority: view.getUint16(22)
  6334. };
  6335. },
  6336. trun: function(data) {
  6337. var
  6338. result = {
  6339. version: data[0],
  6340. flags: new Uint8Array(data.subarray(1, 4)),
  6341. samples: []
  6342. },
  6343. view = new DataView(data.buffer, data.byteOffset, data.byteLength),
  6344. dataOffsetPresent = result.flags[2] & 0x01,
  6345. firstSampleFlagsPresent = result.flags[2] & 0x04,
  6346. sampleDurationPresent = result.flags[1] & 0x01,
  6347. sampleSizePresent = result.flags[1] & 0x02,
  6348. sampleFlagsPresent = result.flags[1] & 0x04,
  6349. sampleCompositionTimeOffsetPresent = result.flags[1] & 0x08,
  6350. sampleCount = view.getUint32(4),
  6351. offset = 8,
  6352. sample;
  6353. if (dataOffsetPresent) {
  6354. result.dataOffset = view.getUint32(offset);
  6355. offset += 4;
  6356. }
  6357. if (firstSampleFlagsPresent && sampleCount) {
  6358. sample = {
  6359. flags: parseSampleFlags(data.subarray(offset, offset + 4))
  6360. };
  6361. offset += 4;
  6362. if (sampleDurationPresent) {
  6363. sample.duration = view.getUint32(offset);
  6364. offset += 4;
  6365. }
  6366. if (sampleSizePresent) {
  6367. sample.size = view.getUint32(offset);
  6368. offset += 4;
  6369. }
  6370. if (sampleCompositionTimeOffsetPresent) {
  6371. sample.compositionTimeOffset = view.getUint32(offset);
  6372. offset += 4;
  6373. }
  6374. result.samples.push(sample);
  6375. sampleCount--;
  6376. }
  6377. while (sampleCount--) {
  6378. sample = {};
  6379. if (sampleDurationPresent) {
  6380. sample.duration = view.getUint32(offset);
  6381. offset += 4;
  6382. }
  6383. if (sampleSizePresent) {
  6384. sample.size = view.getUint32(offset);
  6385. offset += 4;
  6386. }
  6387. if (sampleFlagsPresent) {
  6388. sample.flags = parseSampleFlags(data.subarray(offset, offset + 4));
  6389. offset += 4;
  6390. }
  6391. if (sampleCompositionTimeOffsetPresent) {
  6392. sample.compositionTimeOffset = view.getUint32(offset);
  6393. offset += 4;
  6394. }
  6395. result.samples.push(sample);
  6396. }
  6397. return result;
  6398. },
  6399. 'url ': function(data) {
  6400. return {
  6401. version: data[0],
  6402. flags: new Uint8Array(data.subarray(1, 4))
  6403. };
  6404. },
  6405. vmhd: function(data) {
  6406. var view = new DataView(data.buffer, data.byteOffset, data.byteLength);
  6407. return {
  6408. version: data[0],
  6409. flags: new Uint8Array(data.subarray(1, 4)),
  6410. graphicsmode: view.getUint16(4),
  6411. opcolor: new Uint16Array([view.getUint16(6),
  6412. view.getUint16(8),
  6413. view.getUint16(10)])
  6414. };
  6415. }
  6416. };
  6417. /**
  6418. * Return a javascript array of box objects parsed from an ISO base
  6419. * media file.
  6420. * @param data {Uint8Array} the binary data of the media to be inspected
  6421. * @return {array} a javascript array of potentially nested box objects
  6422. */
  6423. inspectMp4 = function(data) {
  6424. var
  6425. i = 0,
  6426. result = [],
  6427. view,
  6428. size,
  6429. type,
  6430. end,
  6431. box;
  6432. // Convert data from Uint8Array to ArrayBuffer, to follow Dataview API
  6433. var ab = new ArrayBuffer(data.length);
  6434. var v = new Uint8Array(ab);
  6435. for (var z = 0; z < data.length; ++z) {
  6436. v[z] = data[z];
  6437. }
  6438. view = new DataView(ab);
  6439. while (i < data.byteLength) {
  6440. // parse box data
  6441. size = view.getUint32(i);
  6442. type = parseType(data.subarray(i + 4, i + 8));
  6443. end = size > 1 ? i + size : data.byteLength;
  6444. // parse type-specific data
  6445. box = (parse[type] || function(data) {
  6446. return {
  6447. data: data
  6448. };
  6449. })(data.subarray(i + 8, end));
  6450. box.size = size;
  6451. box.type = type;
  6452. // store this box and move to the next
  6453. result.push(box);
  6454. i = end;
  6455. }
  6456. return result;
  6457. };
  6458. /**
  6459. * Returns a textual representation of the javascript represtentation
  6460. * of an MP4 file. You can use it as an alternative to
  6461. * JSON.stringify() to compare inspected MP4s.
  6462. * @param inspectedMp4 {array} the parsed array of boxes in an MP4
  6463. * file
  6464. * @param depth {number} (optional) the number of ancestor boxes of
  6465. * the elements of inspectedMp4. Assumed to be zero if unspecified.
  6466. * @return {string} a text representation of the parsed MP4
  6467. */
  6468. textifyMp4 = function(inspectedMp4, depth) {
  6469. var indent;
  6470. depth = depth || 0;
  6471. indent = new Array(depth * 2 + 1).join(' ');
  6472. // iterate over all the boxes
  6473. return inspectedMp4.map(function(box, index) {
  6474. // list the box type first at the current indentation level
  6475. return indent + box.type + '\n' +
  6476. // the type is already included and handle child boxes separately
  6477. Object.keys(box).filter(function(key) {
  6478. return key !== 'type' && key !== 'boxes';
  6479. // output all the box properties
  6480. }).map(function(key) {
  6481. var prefix = indent + ' ' + key + ': ',
  6482. value = box[key];
  6483. // print out raw bytes as hexademical
  6484. if (value instanceof Uint8Array || value instanceof Uint32Array) {
  6485. var bytes = Array.prototype.slice.call(new Uint8Array(value.buffer, value.byteOffset, value.byteLength))
  6486. .map(function(byte) {
  6487. return ' ' + ('00' + byte.toString(16)).slice(-2);
  6488. }).join('').match(/.{1,24}/g);
  6489. if (!bytes) {
  6490. return prefix + '<>';
  6491. }
  6492. if (bytes.length === 1) {
  6493. return prefix + '<' + bytes.join('').slice(1) + '>';
  6494. }
  6495. return prefix + '<\n' + bytes.map(function(line) {
  6496. return indent + ' ' + line;
  6497. }).join('\n') + '\n' + indent + ' >';
  6498. }
  6499. // stringify generic objects
  6500. return prefix +
  6501. JSON.stringify(value, null, 2)
  6502. .split('\n').map(function(line, index) {
  6503. if (index === 0) {
  6504. return line;
  6505. }
  6506. return indent + ' ' + line;
  6507. }).join('\n');
  6508. }).join('\n') +
  6509. // recursively textify the child boxes
  6510. (box.boxes ? '\n' + textifyMp4(box.boxes, depth + 1) : '');
  6511. }).join('\n');
  6512. };
  6513. module.exports = {
  6514. inspect: inspectMp4,
  6515. textify: textifyMp4
  6516. };
  6517. }).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
  6518. },{"../mp4/probe":26}],30:[function(require,module,exports){
  6519. /**
  6520. * mux.js
  6521. *
  6522. * Copyright (c) 2016 Brightcove
  6523. * All rights reserved.
  6524. *
  6525. * Parse mpeg2 transport stream packets to extract basic timing information
  6526. */
  6527. 'use strict';
  6528. var StreamTypes = require('../m2ts/stream-types.js');
  6529. var handleRollover = require('../m2ts/timestamp-rollover-stream.js').handleRollover;
  6530. var probe = {};
  6531. probe.ts = require('../m2ts/probe.js');
  6532. probe.aac = require('../aac/probe.js');
  6533. var
  6534. PES_TIMESCALE = 90000,
  6535. MP2T_PACKET_LENGTH = 188, // bytes
  6536. SYNC_BYTE = 0x47;
  6537. var isLikelyAacData = function(data) {
  6538. if ((data[0] === 'I'.charCodeAt(0)) &&
  6539. (data[1] === 'D'.charCodeAt(0)) &&
  6540. (data[2] === '3'.charCodeAt(0))) {
  6541. return true;
  6542. }
  6543. return false;
  6544. };
  6545. /**
  6546. * walks through segment data looking for pat and pmt packets to parse out
  6547. * program map table information
  6548. */
  6549. var parsePsi_ = function(bytes, pmt) {
  6550. var
  6551. startIndex = 0,
  6552. endIndex = MP2T_PACKET_LENGTH,
  6553. packet, type;
  6554. while (endIndex < bytes.byteLength) {
  6555. // Look for a pair of start and end sync bytes in the data..
  6556. if (bytes[startIndex] === SYNC_BYTE && bytes[endIndex] === SYNC_BYTE) {
  6557. // We found a packet
  6558. packet = bytes.subarray(startIndex, endIndex);
  6559. type = probe.ts.parseType(packet, pmt.pid);
  6560. switch (type) {
  6561. case 'pat':
  6562. if (!pmt.pid) {
  6563. pmt.pid = probe.ts.parsePat(packet);
  6564. }
  6565. break;
  6566. case 'pmt':
  6567. if (!pmt.table) {
  6568. pmt.table = probe.ts.parsePmt(packet);
  6569. }
  6570. break;
  6571. default:
  6572. break;
  6573. }
  6574. // Found the pat and pmt, we can stop walking the segment
  6575. if (pmt.pid && pmt.table) {
  6576. return;
  6577. }
  6578. startIndex += MP2T_PACKET_LENGTH;
  6579. endIndex += MP2T_PACKET_LENGTH;
  6580. continue;
  6581. }
  6582. // If we get here, we have somehow become de-synchronized and we need to step
  6583. // forward one byte at a time until we find a pair of sync bytes that denote
  6584. // a packet
  6585. startIndex++;
  6586. endIndex++;
  6587. }
  6588. };
  6589. /**
  6590. * walks through the segment data from the start and end to get timing information
  6591. * for the first and last audio pes packets
  6592. */
  6593. var parseAudioPes_ = function(bytes, pmt, result) {
  6594. var
  6595. startIndex = 0,
  6596. endIndex = MP2T_PACKET_LENGTH,
  6597. packet, type, pesType, pusi, parsed;
  6598. var endLoop = false;
  6599. // Start walking from start of segment to get first audio packet
  6600. while (endIndex < bytes.byteLength) {
  6601. // Look for a pair of start and end sync bytes in the data..
  6602. if (bytes[startIndex] === SYNC_BYTE && bytes[endIndex] === SYNC_BYTE) {
  6603. // We found a packet
  6604. packet = bytes.subarray(startIndex, endIndex);
  6605. type = probe.ts.parseType(packet, pmt.pid);
  6606. switch (type) {
  6607. case 'pes':
  6608. pesType = probe.ts.parsePesType(packet, pmt.table);
  6609. pusi = probe.ts.parsePayloadUnitStartIndicator(packet);
  6610. if (pesType === 'audio' && pusi) {
  6611. parsed = probe.ts.parsePesTime(packet);
  6612. if (parsed) {
  6613. parsed.type = 'audio';
  6614. result.audio.push(parsed);
  6615. endLoop = true;
  6616. }
  6617. }
  6618. break;
  6619. default:
  6620. break;
  6621. }
  6622. if (endLoop) {
  6623. break;
  6624. }
  6625. startIndex += MP2T_PACKET_LENGTH;
  6626. endIndex += MP2T_PACKET_LENGTH;
  6627. continue;
  6628. }
  6629. // If we get here, we have somehow become de-synchronized and we need to step
  6630. // forward one byte at a time until we find a pair of sync bytes that denote
  6631. // a packet
  6632. startIndex++;
  6633. endIndex++;
  6634. }
  6635. // Start walking from end of segment to get last audio packet
  6636. endIndex = bytes.byteLength;
  6637. startIndex = endIndex - MP2T_PACKET_LENGTH;
  6638. endLoop = false;
  6639. while (startIndex >= 0) {
  6640. // Look for a pair of start and end sync bytes in the data..
  6641. if (bytes[startIndex] === SYNC_BYTE && bytes[endIndex] === SYNC_BYTE) {
  6642. // We found a packet
  6643. packet = bytes.subarray(startIndex, endIndex);
  6644. type = probe.ts.parseType(packet, pmt.pid);
  6645. switch (type) {
  6646. case 'pes':
  6647. pesType = probe.ts.parsePesType(packet, pmt.table);
  6648. pusi = probe.ts.parsePayloadUnitStartIndicator(packet);
  6649. if (pesType === 'audio' && pusi) {
  6650. parsed = probe.ts.parsePesTime(packet);
  6651. if (parsed) {
  6652. parsed.type = 'audio';
  6653. result.audio.push(parsed);
  6654. endLoop = true;
  6655. }
  6656. }
  6657. break;
  6658. default:
  6659. break;
  6660. }
  6661. if (endLoop) {
  6662. break;
  6663. }
  6664. startIndex -= MP2T_PACKET_LENGTH;
  6665. endIndex -= MP2T_PACKET_LENGTH;
  6666. continue;
  6667. }
  6668. // If we get here, we have somehow become de-synchronized and we need to step
  6669. // forward one byte at a time until we find a pair of sync bytes that denote
  6670. // a packet
  6671. startIndex--;
  6672. endIndex--;
  6673. }
  6674. };
  6675. /**
  6676. * walks through the segment data from the start and end to get timing information
  6677. * for the first and last video pes packets as well as timing information for the first
  6678. * key frame.
  6679. */
  6680. var parseVideoPes_ = function(bytes, pmt, result) {
  6681. var
  6682. startIndex = 0,
  6683. endIndex = MP2T_PACKET_LENGTH,
  6684. packet, type, pesType, pusi, parsed, frame, i, pes;
  6685. var endLoop = false;
  6686. var currentFrame = {
  6687. data: [],
  6688. size: 0
  6689. };
  6690. // Start walking from start of segment to get first video packet
  6691. while (endIndex < bytes.byteLength) {
  6692. // Look for a pair of start and end sync bytes in the data..
  6693. if (bytes[startIndex] === SYNC_BYTE && bytes[endIndex] === SYNC_BYTE) {
  6694. // We found a packet
  6695. packet = bytes.subarray(startIndex, endIndex);
  6696. type = probe.ts.parseType(packet, pmt.pid);
  6697. switch (type) {
  6698. case 'pes':
  6699. pesType = probe.ts.parsePesType(packet, pmt.table);
  6700. pusi = probe.ts.parsePayloadUnitStartIndicator(packet);
  6701. if (pesType === 'video') {
  6702. if (pusi && !endLoop) {
  6703. parsed = probe.ts.parsePesTime(packet);
  6704. if (parsed) {
  6705. parsed.type = 'video';
  6706. result.video.push(parsed);
  6707. endLoop = true;
  6708. }
  6709. }
  6710. if (!result.firstKeyFrame) {
  6711. if (pusi) {
  6712. if (currentFrame.size !== 0) {
  6713. frame = new Uint8Array(currentFrame.size);
  6714. i = 0;
  6715. while (currentFrame.data.length) {
  6716. pes = currentFrame.data.shift();
  6717. frame.set(pes, i);
  6718. i += pes.byteLength;
  6719. }
  6720. if (probe.ts.videoPacketContainsKeyFrame(frame)) {
  6721. result.firstKeyFrame = probe.ts.parsePesTime(frame);
  6722. result.firstKeyFrame.type = 'video';
  6723. }
  6724. currentFrame.size = 0;
  6725. }
  6726. }
  6727. currentFrame.data.push(packet);
  6728. currentFrame.size += packet.byteLength;
  6729. }
  6730. }
  6731. break;
  6732. default:
  6733. break;
  6734. }
  6735. if (endLoop && result.firstKeyFrame) {
  6736. break;
  6737. }
  6738. startIndex += MP2T_PACKET_LENGTH;
  6739. endIndex += MP2T_PACKET_LENGTH;
  6740. continue;
  6741. }
  6742. // If we get here, we have somehow become de-synchronized and we need to step
  6743. // forward one byte at a time until we find a pair of sync bytes that denote
  6744. // a packet
  6745. startIndex++;
  6746. endIndex++;
  6747. }
  6748. // Start walking from end of segment to get last video packet
  6749. endIndex = bytes.byteLength;
  6750. startIndex = endIndex - MP2T_PACKET_LENGTH;
  6751. endLoop = false;
  6752. while (startIndex >= 0) {
  6753. // Look for a pair of start and end sync bytes in the data..
  6754. if (bytes[startIndex] === SYNC_BYTE && bytes[endIndex] === SYNC_BYTE) {
  6755. // We found a packet
  6756. packet = bytes.subarray(startIndex, endIndex);
  6757. type = probe.ts.parseType(packet, pmt.pid);
  6758. switch (type) {
  6759. case 'pes':
  6760. pesType = probe.ts.parsePesType(packet, pmt.table);
  6761. pusi = probe.ts.parsePayloadUnitStartIndicator(packet);
  6762. if (pesType === 'video' && pusi) {
  6763. parsed = probe.ts.parsePesTime(packet);
  6764. if (parsed) {
  6765. parsed.type = 'video';
  6766. result.video.push(parsed);
  6767. endLoop = true;
  6768. }
  6769. }
  6770. break;
  6771. default:
  6772. break;
  6773. }
  6774. if (endLoop) {
  6775. break;
  6776. }
  6777. startIndex -= MP2T_PACKET_LENGTH;
  6778. endIndex -= MP2T_PACKET_LENGTH;
  6779. continue;
  6780. }
  6781. // If we get here, we have somehow become de-synchronized and we need to step
  6782. // forward one byte at a time until we find a pair of sync bytes that denote
  6783. // a packet
  6784. startIndex--;
  6785. endIndex--;
  6786. }
  6787. };
  6788. /**
  6789. * Adjusts the timestamp information for the segment to account for
  6790. * rollover and convert to seconds based on pes packet timescale (90khz clock)
  6791. */
  6792. var adjustTimestamp_ = function(segmentInfo, baseTimestamp) {
  6793. if (segmentInfo.audio && segmentInfo.audio.length) {
  6794. var audioBaseTimestamp = baseTimestamp;
  6795. if (typeof audioBaseTimestamp === 'undefined') {
  6796. audioBaseTimestamp = segmentInfo.audio[0].dts;
  6797. }
  6798. segmentInfo.audio.forEach(function(info) {
  6799. info.dts = handleRollover(info.dts, audioBaseTimestamp);
  6800. info.pts = handleRollover(info.pts, audioBaseTimestamp);
  6801. // time in seconds
  6802. info.dtsTime = info.dts / PES_TIMESCALE;
  6803. info.ptsTime = info.pts / PES_TIMESCALE;
  6804. });
  6805. }
  6806. if (segmentInfo.video && segmentInfo.video.length) {
  6807. var videoBaseTimestamp = baseTimestamp;
  6808. if (typeof videoBaseTimestamp === 'undefined') {
  6809. videoBaseTimestamp = segmentInfo.video[0].dts;
  6810. }
  6811. segmentInfo.video.forEach(function(info) {
  6812. info.dts = handleRollover(info.dts, videoBaseTimestamp);
  6813. info.pts = handleRollover(info.pts, videoBaseTimestamp);
  6814. // time in seconds
  6815. info.dtsTime = info.dts / PES_TIMESCALE;
  6816. info.ptsTime = info.pts / PES_TIMESCALE;
  6817. });
  6818. if (segmentInfo.firstKeyFrame) {
  6819. var frame = segmentInfo.firstKeyFrame;
  6820. frame.dts = handleRollover(frame.dts, videoBaseTimestamp);
  6821. frame.pts = handleRollover(frame.pts, videoBaseTimestamp);
  6822. // time in seconds
  6823. frame.dtsTime = frame.dts / PES_TIMESCALE;
  6824. frame.ptsTime = frame.dts / PES_TIMESCALE;
  6825. }
  6826. }
  6827. };
  6828. /**
  6829. * inspects the aac data stream for start and end time information
  6830. */
  6831. var inspectAac_ = function(bytes) {
  6832. var
  6833. endLoop = false,
  6834. audioCount = 0,
  6835. sampleRate = null,
  6836. timestamp = null,
  6837. frameSize = 0,
  6838. byteIndex = 0,
  6839. packet;
  6840. while (bytes.length - byteIndex >= 3) {
  6841. var type = probe.aac.parseType(bytes, byteIndex);
  6842. switch (type) {
  6843. case 'timed-metadata':
  6844. // Exit early because we don't have enough to parse
  6845. // the ID3 tag header
  6846. if (bytes.length - byteIndex < 10) {
  6847. endLoop = true;
  6848. break;
  6849. }
  6850. frameSize = probe.aac.parseId3TagSize(bytes, byteIndex);
  6851. // Exit early if we don't have enough in the buffer
  6852. // to emit a full packet
  6853. if (frameSize > bytes.length) {
  6854. endLoop = true;
  6855. break;
  6856. }
  6857. if (timestamp === null) {
  6858. packet = bytes.subarray(byteIndex, byteIndex + frameSize);
  6859. timestamp = probe.aac.parseAacTimestamp(packet);
  6860. }
  6861. byteIndex += frameSize;
  6862. break;
  6863. case 'audio':
  6864. // Exit early because we don't have enough to parse
  6865. // the ADTS frame header
  6866. if (bytes.length - byteIndex < 7) {
  6867. endLoop = true;
  6868. break;
  6869. }
  6870. frameSize = probe.aac.parseAdtsSize(bytes, byteIndex);
  6871. // Exit early if we don't have enough in the buffer
  6872. // to emit a full packet
  6873. if (frameSize > bytes.length) {
  6874. endLoop = true;
  6875. break;
  6876. }
  6877. if (sampleRate === null) {
  6878. packet = bytes.subarray(byteIndex, byteIndex + frameSize);
  6879. sampleRate = probe.aac.parseSampleRate(packet);
  6880. }
  6881. audioCount++;
  6882. byteIndex += frameSize;
  6883. break;
  6884. default:
  6885. byteIndex++;
  6886. break;
  6887. }
  6888. if (endLoop) {
  6889. return null;
  6890. }
  6891. }
  6892. if (sampleRate === null || timestamp === null) {
  6893. return null;
  6894. }
  6895. var audioTimescale = PES_TIMESCALE / sampleRate;
  6896. var result = {
  6897. audio: [
  6898. {
  6899. type: 'audio',
  6900. dts: timestamp,
  6901. pts: timestamp
  6902. },
  6903. {
  6904. type: 'audio',
  6905. dts: timestamp + (audioCount * 1024 * audioTimescale),
  6906. pts: timestamp + (audioCount * 1024 * audioTimescale)
  6907. }
  6908. ]
  6909. };
  6910. return result;
  6911. };
  6912. /**
  6913. * inspects the transport stream segment data for start and end time information
  6914. * of the audio and video tracks (when present) as well as the first key frame's
  6915. * start time.
  6916. */
  6917. var inspectTs_ = function(bytes) {
  6918. var pmt = {
  6919. pid: null,
  6920. table: null
  6921. };
  6922. var result = {};
  6923. parsePsi_(bytes, pmt);
  6924. for (var pid in pmt.table) {
  6925. if (pmt.table.hasOwnProperty(pid)) {
  6926. var type = pmt.table[pid];
  6927. switch (type) {
  6928. case StreamTypes.H264_STREAM_TYPE:
  6929. result.video = [];
  6930. parseVideoPes_(bytes, pmt, result);
  6931. if (result.video.length === 0) {
  6932. delete result.video;
  6933. }
  6934. break;
  6935. case StreamTypes.ADTS_STREAM_TYPE:
  6936. result.audio = [];
  6937. parseAudioPes_(bytes, pmt, result);
  6938. if (result.audio.length === 0) {
  6939. delete result.audio;
  6940. }
  6941. break;
  6942. default:
  6943. break;
  6944. }
  6945. }
  6946. }
  6947. return result;
  6948. };
  6949. /**
  6950. * Inspects segment byte data and returns an object with start and end timing information
  6951. *
  6952. * @param {Uint8Array} bytes The segment byte data
  6953. * @param {Number} baseTimestamp Relative reference timestamp used when adjusting frame
  6954. * timestamps for rollover. This value must be in 90khz clock.
  6955. * @return {Object} Object containing start and end frame timing info of segment.
  6956. */
  6957. var inspect = function(bytes, baseTimestamp) {
  6958. var isAacData = isLikelyAacData(bytes);
  6959. var result;
  6960. if (isAacData) {
  6961. result = inspectAac_(bytes);
  6962. } else {
  6963. result = inspectTs_(bytes);
  6964. }
  6965. if (!result || (!result.audio && !result.video)) {
  6966. return null;
  6967. }
  6968. adjustTimestamp_(result, baseTimestamp);
  6969. return result;
  6970. };
  6971. module.exports = {
  6972. inspect: inspect
  6973. };
  6974. },{"../aac/probe.js":5,"../m2ts/probe.js":21,"../m2ts/stream-types.js":22,"../m2ts/timestamp-rollover-stream.js":23}],31:[function(require,module,exports){
  6975. var
  6976. ONE_SECOND_IN_TS = 90000, // 90kHz clock
  6977. secondsToVideoTs,
  6978. secondsToAudioTs,
  6979. videoTsToSeconds,
  6980. audioTsToSeconds,
  6981. audioTsToVideoTs,
  6982. videoTsToAudioTs;
  6983. secondsToVideoTs = function(seconds) {
  6984. return seconds * ONE_SECOND_IN_TS;
  6985. };
  6986. secondsToAudioTs = function(seconds, sampleRate) {
  6987. return seconds * sampleRate;
  6988. };
  6989. videoTsToSeconds = function(timestamp) {
  6990. return timestamp / ONE_SECOND_IN_TS;
  6991. };
  6992. audioTsToSeconds = function(timestamp, sampleRate) {
  6993. return timestamp / sampleRate;
  6994. };
  6995. audioTsToVideoTs = function(timestamp, sampleRate) {
  6996. return secondsToVideoTs(audioTsToSeconds(timestamp, sampleRate));
  6997. };
  6998. videoTsToAudioTs = function(timestamp, sampleRate) {
  6999. return secondsToAudioTs(videoTsToSeconds(timestamp), sampleRate);
  7000. };
  7001. module.exports = {
  7002. secondsToVideoTs: secondsToVideoTs,
  7003. secondsToAudioTs: secondsToAudioTs,
  7004. videoTsToSeconds: videoTsToSeconds,
  7005. audioTsToSeconds: audioTsToSeconds,
  7006. audioTsToVideoTs: audioTsToVideoTs,
  7007. videoTsToAudioTs: videoTsToAudioTs
  7008. };
  7009. },{}],32:[function(require,module,exports){
  7010. 'use strict';
  7011. var ExpGolomb;
  7012. /**
  7013. * Parser for exponential Golomb codes, a variable-bitwidth number encoding
  7014. * scheme used by h264.
  7015. */
  7016. ExpGolomb = function(workingData) {
  7017. var
  7018. // the number of bytes left to examine in workingData
  7019. workingBytesAvailable = workingData.byteLength,
  7020. // the current word being examined
  7021. workingWord = 0, // :uint
  7022. // the number of bits left to examine in the current word
  7023. workingBitsAvailable = 0; // :uint;
  7024. // ():uint
  7025. this.length = function() {
  7026. return (8 * workingBytesAvailable);
  7027. };
  7028. // ():uint
  7029. this.bitsAvailable = function() {
  7030. return (8 * workingBytesAvailable) + workingBitsAvailable;
  7031. };
  7032. // ():void
  7033. this.loadWord = function() {
  7034. var
  7035. position = workingData.byteLength - workingBytesAvailable,
  7036. workingBytes = new Uint8Array(4),
  7037. availableBytes = Math.min(4, workingBytesAvailable);
  7038. if (availableBytes === 0) {
  7039. throw new Error('no bytes available');
  7040. }
  7041. workingBytes.set(workingData.subarray(position,
  7042. position + availableBytes));
  7043. workingWord = new DataView(workingBytes.buffer).getUint32(0);
  7044. // track the amount of workingData that has been processed
  7045. workingBitsAvailable = availableBytes * 8;
  7046. workingBytesAvailable -= availableBytes;
  7047. };
  7048. // (count:int):void
  7049. this.skipBits = function(count) {
  7050. var skipBytes; // :int
  7051. if (workingBitsAvailable > count) {
  7052. workingWord <<= count;
  7053. workingBitsAvailable -= count;
  7054. } else {
  7055. count -= workingBitsAvailable;
  7056. skipBytes = Math.floor(count / 8);
  7057. count -= (skipBytes * 8);
  7058. workingBytesAvailable -= skipBytes;
  7059. this.loadWord();
  7060. workingWord <<= count;
  7061. workingBitsAvailable -= count;
  7062. }
  7063. };
  7064. // (size:int):uint
  7065. this.readBits = function(size) {
  7066. var
  7067. bits = Math.min(workingBitsAvailable, size), // :uint
  7068. valu = workingWord >>> (32 - bits); // :uint
  7069. // if size > 31, handle error
  7070. workingBitsAvailable -= bits;
  7071. if (workingBitsAvailable > 0) {
  7072. workingWord <<= bits;
  7073. } else if (workingBytesAvailable > 0) {
  7074. this.loadWord();
  7075. }
  7076. bits = size - bits;
  7077. if (bits > 0) {
  7078. return valu << bits | this.readBits(bits);
  7079. }
  7080. return valu;
  7081. };
  7082. // ():uint
  7083. this.skipLeadingZeros = function() {
  7084. var leadingZeroCount; // :uint
  7085. for (leadingZeroCount = 0; leadingZeroCount < workingBitsAvailable; ++leadingZeroCount) {
  7086. if ((workingWord & (0x80000000 >>> leadingZeroCount)) !== 0) {
  7087. // the first bit of working word is 1
  7088. workingWord <<= leadingZeroCount;
  7089. workingBitsAvailable -= leadingZeroCount;
  7090. return leadingZeroCount;
  7091. }
  7092. }
  7093. // we exhausted workingWord and still have not found a 1
  7094. this.loadWord();
  7095. return leadingZeroCount + this.skipLeadingZeros();
  7096. };
  7097. // ():void
  7098. this.skipUnsignedExpGolomb = function() {
  7099. this.skipBits(1 + this.skipLeadingZeros());
  7100. };
  7101. // ():void
  7102. this.skipExpGolomb = function() {
  7103. this.skipBits(1 + this.skipLeadingZeros());
  7104. };
  7105. // ():uint
  7106. this.readUnsignedExpGolomb = function() {
  7107. var clz = this.skipLeadingZeros(); // :uint
  7108. return this.readBits(clz + 1) - 1;
  7109. };
  7110. // ():int
  7111. this.readExpGolomb = function() {
  7112. var valu = this.readUnsignedExpGolomb(); // :int
  7113. if (0x01 & valu) {
  7114. // the number is odd if the low order bit is set
  7115. return (1 + valu) >>> 1; // add 1 to make it even, and divide by 2
  7116. }
  7117. return -1 * (valu >>> 1); // divide by two then make it negative
  7118. };
  7119. // Some convenience functions
  7120. // :Boolean
  7121. this.readBoolean = function() {
  7122. return this.readBits(1) === 1;
  7123. };
  7124. // ():int
  7125. this.readUnsignedByte = function() {
  7126. return this.readBits(8);
  7127. };
  7128. this.loadWord();
  7129. };
  7130. module.exports = ExpGolomb;
  7131. },{}],33:[function(require,module,exports){
  7132. /**
  7133. * mux.js
  7134. *
  7135. * Copyright (c) 2014 Brightcove
  7136. * All rights reserved.
  7137. *
  7138. * A lightweight readable stream implemention that handles event dispatching.
  7139. * Objects that inherit from streams should call init in their constructors.
  7140. */
  7141. 'use strict';
  7142. var Stream = function() {
  7143. this.init = function() {
  7144. var listeners = {};
  7145. /**
  7146. * Add a listener for a specified event type.
  7147. * @param type {string} the event name
  7148. * @param listener {function} the callback to be invoked when an event of
  7149. * the specified type occurs
  7150. */
  7151. this.on = function(type, listener) {
  7152. if (!listeners[type]) {
  7153. listeners[type] = [];
  7154. }
  7155. listeners[type] = listeners[type].concat(listener);
  7156. };
  7157. /**
  7158. * Remove a listener for a specified event type.
  7159. * @param type {string} the event name
  7160. * @param listener {function} a function previously registered for this
  7161. * type of event through `on`
  7162. */
  7163. this.off = function(type, listener) {
  7164. var index;
  7165. if (!listeners[type]) {
  7166. return false;
  7167. }
  7168. index = listeners[type].indexOf(listener);
  7169. listeners[type] = listeners[type].slice();
  7170. listeners[type].splice(index, 1);
  7171. return index > -1;
  7172. };
  7173. /**
  7174. * Trigger an event of the specified type on this stream. Any additional
  7175. * arguments to this function are passed as parameters to event listeners.
  7176. * @param type {string} the event name
  7177. */
  7178. this.trigger = function(type) {
  7179. var callbacks, i, length, args;
  7180. callbacks = listeners[type];
  7181. if (!callbacks) {
  7182. return;
  7183. }
  7184. // Slicing the arguments on every invocation of this method
  7185. // can add a significant amount of overhead. Avoid the
  7186. // intermediate object creation for the common case of a
  7187. // single callback argument
  7188. if (arguments.length === 2) {
  7189. length = callbacks.length;
  7190. for (i = 0; i < length; ++i) {
  7191. callbacks[i].call(this, arguments[1]);
  7192. }
  7193. } else {
  7194. args = [];
  7195. i = arguments.length;
  7196. for (i = 1; i < arguments.length; ++i) {
  7197. args.push(arguments[i]);
  7198. }
  7199. length = callbacks.length;
  7200. for (i = 0; i < length; ++i) {
  7201. callbacks[i].apply(this, args);
  7202. }
  7203. }
  7204. };
  7205. /**
  7206. * Destroys the stream and cleans up.
  7207. */
  7208. this.dispose = function() {
  7209. listeners = {};
  7210. };
  7211. };
  7212. };
  7213. /**
  7214. * Forwards all `data` events on this stream to the destination stream. The
  7215. * destination stream should provide a method `push` to receive the data
  7216. * events as they arrive.
  7217. * @param destination {stream} the stream that will receive all `data` events
  7218. * @param autoFlush {boolean} if false, we will not call `flush` on the destination
  7219. * when the current stream emits a 'done' event
  7220. * @see http://nodejs.org/api/stream.html#stream_readable_pipe_destination_options
  7221. */
  7222. Stream.prototype.pipe = function(destination) {
  7223. this.on('data', function(data) {
  7224. destination.push(data);
  7225. });
  7226. this.on('done', function(flushSource) {
  7227. destination.flush(flushSource);
  7228. });
  7229. return destination;
  7230. };
  7231. // Default stream functions that are expected to be overridden to perform
  7232. // actual work. These are provided by the prototype as a sort of no-op
  7233. // implementation so that we don't have to check for their existence in the
  7234. // `pipe` function above.
  7235. Stream.prototype.push = function(data) {
  7236. this.trigger('data', data);
  7237. };
  7238. Stream.prototype.flush = function(flushSource) {
  7239. this.trigger('done', flushSource);
  7240. };
  7241. module.exports = Stream;
  7242. },{}],34:[function(require,module,exports){
  7243. // By default assume browserify was used to bundle app. These arguments are passed to
  7244. // the module by browserify.
  7245. var bundleFn = arguments[3];
  7246. var sources = arguments[4];
  7247. var cache = arguments[5];
  7248. var stringify = JSON.stringify;
  7249. var webpack = false;
  7250. // webpackBootstrap
  7251. var webpackBootstrapFn = function(modules) {
  7252. // The module cache
  7253. var installedModules = {};
  7254. // The require function
  7255. function __webpack_require__(moduleId) {
  7256. // Check if module is in cache
  7257. if(installedModules[moduleId]) {
  7258. return installedModules[moduleId].exports;
  7259. }
  7260. // Create a new module (and put it into the cache)
  7261. var module = installedModules[moduleId] = {
  7262. i: moduleId,
  7263. l: false,
  7264. exports: {}
  7265. };
  7266. // Execute the module function
  7267. modules[moduleId].call(module.exports, module, module.exports, __webpack_require__);
  7268. // Flag the module as loaded
  7269. module.l = true;
  7270. // Return the exports of the module
  7271. return module.exports;
  7272. }
  7273. // expose the modules object (__webpack_modules__)
  7274. __webpack_require__.m = modules;
  7275. // expose the module cache
  7276. __webpack_require__.c = installedModules;
  7277. // define getter function for harmony exports
  7278. __webpack_require__.d = function(exports, name, getter) {
  7279. if(!__webpack_require__.o(exports, name)) {
  7280. Object.defineProperty(exports, name, {
  7281. configurable: false,
  7282. enumerable: true,
  7283. get: getter
  7284. });
  7285. }
  7286. };
  7287. // getDefaultExport function for compatibility with non-harmony modules
  7288. __webpack_require__.n = function(module) {
  7289. var getter = module && module.__esModule ?
  7290. function getDefault() { return module['default']; } :
  7291. function getModuleExports() { return module; };
  7292. __webpack_require__.d(getter, 'a', getter);
  7293. return getter;
  7294. };
  7295. // Object.prototype.hasOwnProperty.call
  7296. __webpack_require__.o = function(object, property) { return Object.prototype.hasOwnProperty.call(object, property); };
  7297. // __webpack_public_path__
  7298. __webpack_require__.p = "";
  7299. // Load entry module and return exports
  7300. return __webpack_require__(__webpack_require__.s = entryModule);
  7301. }
  7302. if (typeof bundleFn === 'undefined') {
  7303. // Assume this was bundled with webpack and not browserify
  7304. webpack = true;
  7305. bundleFn = webpackBootstrapFn;
  7306. sources = __webpack_modules__;
  7307. }
  7308. var bundleWithBrowserify = function(fn) {
  7309. // with browserify we must find the module key ourselves
  7310. var cacheKeys = Object.keys(cache);
  7311. var fnModuleKey;
  7312. for (var i = 0; i < cacheKeys.length; i++) {
  7313. var cacheKey = cacheKeys[i];
  7314. var cacheExports = cache[cacheKey].exports;
  7315. // Using babel as a transpiler to use esmodule, the export will always
  7316. // be an object with the default export as a property of it. To ensure
  7317. // the existing api and babel esmodule exports are both supported we
  7318. // check for both
  7319. if (cacheExports === fn || cacheExports && cacheExports.default === fn) {
  7320. fnModuleKey = cacheKey;
  7321. break;
  7322. }
  7323. }
  7324. // if we couldn't find one, lets make one
  7325. if (!fnModuleKey) {
  7326. fnModuleKey = Math.floor(Math.pow(16, 8) * Math.random()).toString(16);
  7327. var fnModuleCache = {};
  7328. for (var i = 0; i < cacheKeys.length; i++) {
  7329. var cacheKey = cacheKeys[i];
  7330. fnModuleCache[cacheKey] = cacheKey;
  7331. }
  7332. sources[fnModuleKey] = [
  7333. 'function(require,module,exports){' + fn + '(self); }',
  7334. fnModuleCache
  7335. ];
  7336. }
  7337. var entryKey = Math.floor(Math.pow(16, 8) * Math.random()).toString(16);
  7338. var entryCache = {};
  7339. entryCache[fnModuleKey] = fnModuleKey;
  7340. sources[entryKey] = [
  7341. 'function(require,module,exports){' +
  7342. // try to call default if defined to also support babel esmodule exports
  7343. 'var f = require(' + stringify(fnModuleKey) + ');' +
  7344. '(f.default ? f.default : f)(self);' +
  7345. '}',
  7346. entryCache
  7347. ];
  7348. return '(' + bundleFn + ')({'
  7349. + Object.keys(sources).map(function(key) {
  7350. return stringify(key) + ':['
  7351. + sources[key][0] + ','
  7352. + stringify(sources[key][1]) + ']';
  7353. }).join(',')
  7354. + '},{},[' + stringify(entryKey) + '])';
  7355. };
  7356. var bundleWithWebpack = function(fn, fnModuleId) {
  7357. var devMode = typeof fnModuleId === 'string';
  7358. var sourceStrings;
  7359. if (devMode) {
  7360. sourceStrings = {};
  7361. } else {
  7362. sourceStrings = [];
  7363. }
  7364. Object.keys(sources).forEach(function(sKey) {
  7365. if (!sources[sKey]) {
  7366. return;
  7367. }
  7368. sourceStrings[sKey] = sources[sKey].toString();
  7369. });
  7370. var fnModuleExports = __webpack_require__(fnModuleId);
  7371. // Using babel as a transpiler to use esmodule, the export will always
  7372. // be an object with the default export as a property of it. To ensure
  7373. // the existing api and babel esmodule exports are both supported we
  7374. // check for both
  7375. if (!(fnModuleExports && (fnModuleExports === fn || fnModuleExports.default === fn))) {
  7376. var fnSourceString = sourceStrings[fnModuleId];
  7377. sourceStrings[fnModuleId] = fnSourceString.substring(0, fnSourceString.length - 1) +
  7378. '\n' + fn.name + '();\n}';
  7379. }
  7380. var modulesString;
  7381. if (devMode) {
  7382. // must escape quotes to support webpack loader options
  7383. fnModuleId = stringify(fnModuleId);
  7384. // dev mode in webpack4, modules are passed as an object
  7385. var mappedSourceStrings = Object.keys(sourceStrings).map(function(sKey) {
  7386. return stringify(sKey) + ':' + sourceStrings[sKey];
  7387. });
  7388. modulesString = '{' + mappedSourceStrings.join(',') + '}';
  7389. } else {
  7390. modulesString = '[' + sourceStrings.join(',') + ']';
  7391. }
  7392. return 'var fn = (' + bundleFn.toString().replace('entryModule', fnModuleId) + ')('
  7393. + modulesString
  7394. + ');\n'
  7395. // not a function when calling a function from the current scope
  7396. + '(typeof fn === "function") && fn(self);';
  7397. };
  7398. module.exports = function webwackify(fn, fnModuleId) {
  7399. var src;
  7400. if (webpack) {
  7401. src = bundleWithWebpack(fn, fnModuleId);
  7402. } else {
  7403. src = bundleWithBrowserify(fn);
  7404. }
  7405. var blob = new Blob([src], { type: 'text/javascript' });
  7406. var URL = window.URL || window.webkitURL || window.mozURL || window.msURL;
  7407. var workerUrl = URL.createObjectURL(blob);
  7408. var worker = new Worker(workerUrl);
  7409. worker.objectURL = workerUrl;
  7410. return worker;
  7411. };
  7412. },{}],35:[function(require,module,exports){
  7413. (function (global){
  7414. /**
  7415. * @file add-text-track-data.js
  7416. */
  7417. 'use strict';
  7418. Object.defineProperty(exports, '__esModule', {
  7419. value: true
  7420. });
  7421. function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
  7422. var _globalWindow = require('global/window');
  7423. var _globalWindow2 = _interopRequireDefault(_globalWindow);
  7424. var _videoJs = (typeof window !== "undefined" ? window['videojs'] : typeof global !== "undefined" ? global['videojs'] : null);
  7425. var _videoJs2 = _interopRequireDefault(_videoJs);
  7426. /**
  7427. * Define properties on a cue for backwards compatability,
  7428. * but warn the user that the way that they are using it
  7429. * is depricated and will be removed at a later date.
  7430. *
  7431. * @param {Cue} cue the cue to add the properties on
  7432. * @private
  7433. */
  7434. var deprecateOldCue = function deprecateOldCue(cue) {
  7435. Object.defineProperties(cue.frame, {
  7436. id: {
  7437. get: function get() {
  7438. _videoJs2['default'].log.warn('cue.frame.id is deprecated. Use cue.value.key instead.');
  7439. return cue.value.key;
  7440. }
  7441. },
  7442. value: {
  7443. get: function get() {
  7444. _videoJs2['default'].log.warn('cue.frame.value is deprecated. Use cue.value.data instead.');
  7445. return cue.value.data;
  7446. }
  7447. },
  7448. privateData: {
  7449. get: function get() {
  7450. _videoJs2['default'].log.warn('cue.frame.privateData is deprecated. Use cue.value.data instead.');
  7451. return cue.value.data;
  7452. }
  7453. }
  7454. });
  7455. };
  7456. var durationOfVideo = function durationOfVideo(duration) {
  7457. var dur = undefined;
  7458. if (isNaN(duration) || Math.abs(duration) === Infinity) {
  7459. dur = Number.MAX_VALUE;
  7460. } else {
  7461. dur = duration;
  7462. }
  7463. return dur;
  7464. };
  7465. /**
  7466. * Add text track data to a source handler given the captions and
  7467. * metadata from the buffer.
  7468. *
  7469. * @param {Object} sourceHandler the flash or virtual source buffer
  7470. * @param {Array} captionArray an array of caption data
  7471. * @param {Array} metadataArray an array of meta data
  7472. * @private
  7473. */
  7474. var addTextTrackData = function addTextTrackData(sourceHandler, captionArray, metadataArray) {
  7475. var Cue = _globalWindow2['default'].WebKitDataCue || _globalWindow2['default'].VTTCue;
  7476. if (captionArray) {
  7477. captionArray.forEach(function (caption) {
  7478. var track = caption.stream;
  7479. this.inbandTextTracks_[track].addCue(new Cue(caption.startTime + this.timestampOffset, caption.endTime + this.timestampOffset, caption.text));
  7480. }, sourceHandler);
  7481. }
  7482. if (metadataArray) {
  7483. (function () {
  7484. var videoDuration = durationOfVideo(sourceHandler.mediaSource_.duration);
  7485. metadataArray.forEach(function (metadata) {
  7486. var time = metadata.cueTime + this.timestampOffset;
  7487. metadata.frames.forEach(function (frame) {
  7488. var cue = new Cue(time, time, frame.value || frame.url || frame.data || '');
  7489. cue.frame = frame;
  7490. cue.value = frame;
  7491. deprecateOldCue(cue);
  7492. this.metadataTrack_.addCue(cue);
  7493. }, this);
  7494. }, sourceHandler);
  7495. // Updating the metadeta cues so that
  7496. // the endTime of each cue is the startTime of the next cue
  7497. // the endTime of last cue is the duration of the video
  7498. if (sourceHandler.metadataTrack_ && sourceHandler.metadataTrack_.cues && sourceHandler.metadataTrack_.cues.length) {
  7499. (function () {
  7500. var cues = sourceHandler.metadataTrack_.cues;
  7501. var cuesArray = [];
  7502. // Create a copy of the TextTrackCueList...
  7503. // ...disregarding cues with a falsey value
  7504. for (var i = 0; i < cues.length; i++) {
  7505. if (cues[i]) {
  7506. cuesArray.push(cues[i]);
  7507. }
  7508. }
  7509. // Group cues by their startTime value
  7510. var cuesGroupedByStartTime = cuesArray.reduce(function (obj, cue) {
  7511. var timeSlot = obj[cue.startTime] || [];
  7512. timeSlot.push(cue);
  7513. obj[cue.startTime] = timeSlot;
  7514. return obj;
  7515. }, {});
  7516. // Sort startTimes by ascending order
  7517. var sortedStartTimes = Object.keys(cuesGroupedByStartTime).sort(function (a, b) {
  7518. return Number(a) - Number(b);
  7519. });
  7520. // Map each cue group's endTime to the next group's startTime
  7521. sortedStartTimes.forEach(function (startTime, idx) {
  7522. var cueGroup = cuesGroupedByStartTime[startTime];
  7523. var nextTime = Number(sortedStartTimes[idx + 1]) || videoDuration;
  7524. // Map each cue's endTime the next group's startTime
  7525. cueGroup.forEach(function (cue) {
  7526. cue.endTime = nextTime;
  7527. });
  7528. });
  7529. })();
  7530. }
  7531. })();
  7532. }
  7533. };
  7534. exports['default'] = {
  7535. addTextTrackData: addTextTrackData,
  7536. durationOfVideo: durationOfVideo
  7537. };
  7538. module.exports = exports['default'];
  7539. }).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
  7540. },{"global/window":3}],36:[function(require,module,exports){
  7541. /**
  7542. * @file codec-utils.js
  7543. */
  7544. /**
  7545. * Check if a codec string refers to an audio codec.
  7546. *
  7547. * @param {String} codec codec string to check
  7548. * @return {Boolean} if this is an audio codec
  7549. * @private
  7550. */
  7551. 'use strict';
  7552. Object.defineProperty(exports, '__esModule', {
  7553. value: true
  7554. });
  7555. var isAudioCodec = function isAudioCodec(codec) {
  7556. return (/mp4a\.\d+.\d+/i.test(codec)
  7557. );
  7558. };
  7559. /**
  7560. * Check if a codec string refers to a video codec.
  7561. *
  7562. * @param {String} codec codec string to check
  7563. * @return {Boolean} if this is a video codec
  7564. * @private
  7565. */
  7566. var isVideoCodec = function isVideoCodec(codec) {
  7567. return (/avc1\.[\da-f]+/i.test(codec)
  7568. );
  7569. };
  7570. /**
  7571. * Parse a content type header into a type and parameters
  7572. * object
  7573. *
  7574. * @param {String} type the content type header
  7575. * @return {Object} the parsed content-type
  7576. * @private
  7577. */
  7578. var parseContentType = function parseContentType(type) {
  7579. var object = { type: '', parameters: {} };
  7580. var parameters = type.trim().split(';');
  7581. // first parameter should always be content-type
  7582. object.type = parameters.shift().trim();
  7583. parameters.forEach(function (parameter) {
  7584. var pair = parameter.trim().split('=');
  7585. if (pair.length > 1) {
  7586. var _name = pair[0].replace(/"/g, '').trim();
  7587. var value = pair[1].replace(/"/g, '').trim();
  7588. object.parameters[_name] = value;
  7589. }
  7590. });
  7591. return object;
  7592. };
  7593. /**
  7594. * Replace the old apple-style `avc1.<dd>.<dd>` codec string with the standard
  7595. * `avc1.<hhhhhh>`
  7596. *
  7597. * @param {Array} codecs an array of codec strings to fix
  7598. * @return {Array} the translated codec array
  7599. * @private
  7600. */
  7601. var translateLegacyCodecs = function translateLegacyCodecs(codecs) {
  7602. return codecs.map(function (codec) {
  7603. return codec.replace(/avc1\.(\d+)\.(\d+)/i, function (orig, profile, avcLevel) {
  7604. var profileHex = ('00' + Number(profile).toString(16)).slice(-2);
  7605. var avcLevelHex = ('00' + Number(avcLevel).toString(16)).slice(-2);
  7606. return 'avc1.' + profileHex + '00' + avcLevelHex;
  7607. });
  7608. });
  7609. };
  7610. exports['default'] = {
  7611. isAudioCodec: isAudioCodec,
  7612. parseContentType: parseContentType,
  7613. isVideoCodec: isVideoCodec,
  7614. translateLegacyCodecs: translateLegacyCodecs
  7615. };
  7616. module.exports = exports['default'];
  7617. },{}],37:[function(require,module,exports){
  7618. /**
  7619. * @file create-text-tracks-if-necessary.js
  7620. */
  7621. /**
  7622. * Create text tracks on video.js if they exist on a segment.
  7623. *
  7624. * @param {Object} sourceBuffer the VSB or FSB
  7625. * @param {Object} mediaSource the HTML or Flash media source
  7626. * @param {Object} segment the segment that may contain the text track
  7627. * @private
  7628. */
  7629. 'use strict';
  7630. Object.defineProperty(exports, '__esModule', {
  7631. value: true
  7632. });
  7633. var createTextTracksIfNecessary = function createTextTracksIfNecessary(sourceBuffer, mediaSource, segment) {
  7634. var player = mediaSource.player_;
  7635. // create an in-band caption track if one is present in the segment
  7636. if (segment.captions && segment.captions.length) {
  7637. if (!sourceBuffer.inbandTextTracks_) {
  7638. sourceBuffer.inbandTextTracks_ = {};
  7639. }
  7640. for (var trackId in segment.captionStreams) {
  7641. if (!sourceBuffer.inbandTextTracks_[trackId]) {
  7642. player.tech_.trigger({ type: 'usage', name: 'hls-608' });
  7643. var track = player.textTracks().getTrackById(trackId);
  7644. if (track) {
  7645. // Resuse an existing track with a CC# id because this was
  7646. // very likely created by videojs-contrib-hls from information
  7647. // in the m3u8 for us to use
  7648. sourceBuffer.inbandTextTracks_[trackId] = track;
  7649. } else {
  7650. // Otherwise, create a track with the default `CC#` label and
  7651. // without a language
  7652. sourceBuffer.inbandTextTracks_[trackId] = player.addRemoteTextTrack({
  7653. kind: 'captions',
  7654. id: trackId,
  7655. label: trackId
  7656. }, false).track;
  7657. }
  7658. }
  7659. }
  7660. }
  7661. if (segment.metadata && segment.metadata.length && !sourceBuffer.metadataTrack_) {
  7662. sourceBuffer.metadataTrack_ = player.addRemoteTextTrack({
  7663. kind: 'metadata',
  7664. label: 'Timed Metadata'
  7665. }, false).track;
  7666. sourceBuffer.metadataTrack_.inBandMetadataTrackDispatchType = segment.metadata.dispatchType;
  7667. }
  7668. };
  7669. exports['default'] = createTextTracksIfNecessary;
  7670. module.exports = exports['default'];
  7671. },{}],38:[function(require,module,exports){
  7672. /**
  7673. * @file flash-constants.js
  7674. */
  7675. /**
  7676. * The maximum size in bytes for append operations to the video.js
  7677. * SWF. Calling through to Flash blocks and can be expensive so
  7678. * we chunk data and pass through 4KB at a time, yielding to the
  7679. * browser between chunks. This gives a theoretical maximum rate of
  7680. * 1MB/s into Flash. Any higher and we begin to drop frames and UI
  7681. * responsiveness suffers.
  7682. *
  7683. * @private
  7684. */
  7685. "use strict";
  7686. Object.defineProperty(exports, "__esModule", {
  7687. value: true
  7688. });
  7689. var flashConstants = {
  7690. // times in milliseconds
  7691. TIME_BETWEEN_CHUNKS: 1,
  7692. BYTES_PER_CHUNK: 1024 * 32
  7693. };
  7694. exports["default"] = flashConstants;
  7695. module.exports = exports["default"];
  7696. },{}],39:[function(require,module,exports){
  7697. (function (global){
  7698. /**
  7699. * @file flash-media-source.js
  7700. */
  7701. 'use strict';
  7702. Object.defineProperty(exports, '__esModule', {
  7703. value: true
  7704. });
  7705. var _createClass = (function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ('value' in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; })();
  7706. var _get = function get(_x, _x2, _x3) { var _again = true; _function: while (_again) { var object = _x, property = _x2, receiver = _x3; _again = false; if (object === null) object = Function.prototype; var desc = Object.getOwnPropertyDescriptor(object, property); if (desc === undefined) { var parent = Object.getPrototypeOf(object); if (parent === null) { return undefined; } else { _x = parent; _x2 = property; _x3 = receiver; _again = true; desc = parent = undefined; continue _function; } } else if ('value' in desc) { return desc.value; } else { var getter = desc.get; if (getter === undefined) { return undefined; } return getter.call(receiver); } } };
  7707. function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
  7708. function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError('Cannot call a class as a function'); } }
  7709. function _inherits(subClass, superClass) { if (typeof superClass !== 'function' && superClass !== null) { throw new TypeError('Super expression must either be null or a function, not ' + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; }
  7710. var _globalDocument = require('global/document');
  7711. var _globalDocument2 = _interopRequireDefault(_globalDocument);
  7712. var _videoJs = (typeof window !== "undefined" ? window['videojs'] : typeof global !== "undefined" ? global['videojs'] : null);
  7713. var _videoJs2 = _interopRequireDefault(_videoJs);
  7714. var _flashSourceBuffer = require('./flash-source-buffer');
  7715. var _flashSourceBuffer2 = _interopRequireDefault(_flashSourceBuffer);
  7716. var _flashConstants = require('./flash-constants');
  7717. var _flashConstants2 = _interopRequireDefault(_flashConstants);
  7718. var _codecUtils = require('./codec-utils');
  7719. /**
  7720. * A flash implmentation of HTML MediaSources and a polyfill
  7721. * for browsers that don't support native or HTML MediaSources..
  7722. *
  7723. * @link https://developer.mozilla.org/en-US/docs/Web/API/MediaSource
  7724. * @class FlashMediaSource
  7725. * @extends videojs.EventTarget
  7726. */
  7727. var FlashMediaSource = (function (_videojs$EventTarget) {
  7728. _inherits(FlashMediaSource, _videojs$EventTarget);
  7729. function FlashMediaSource() {
  7730. var _this = this;
  7731. _classCallCheck(this, FlashMediaSource);
  7732. _get(Object.getPrototypeOf(FlashMediaSource.prototype), 'constructor', this).call(this);
  7733. this.sourceBuffers = [];
  7734. this.readyState = 'closed';
  7735. this.on(['sourceopen', 'webkitsourceopen'], function (event) {
  7736. // find the swf where we will push media data
  7737. _this.swfObj = _globalDocument2['default'].getElementById(event.swfId);
  7738. _this.player_ = (0, _videoJs2['default'])(_this.swfObj.parentNode);
  7739. _this.tech_ = _this.swfObj.tech;
  7740. _this.readyState = 'open';
  7741. _this.tech_.on('seeking', function () {
  7742. var i = _this.sourceBuffers.length;
  7743. while (i--) {
  7744. _this.sourceBuffers[i].abort();
  7745. }
  7746. });
  7747. // trigger load events
  7748. if (_this.swfObj) {
  7749. _this.swfObj.vjs_load();
  7750. }
  7751. });
  7752. }
  7753. /**
  7754. * Set or return the presentation duration.
  7755. *
  7756. * @param {Double} value the duration of the media in seconds
  7757. * @param {Double} the current presentation duration
  7758. * @link http://www.w3.org/TR/media-source/#widl-MediaSource-duration
  7759. */
  7760. /**
  7761. * We have this function so that the html and flash interfaces
  7762. * are the same.
  7763. *
  7764. * @private
  7765. */
  7766. _createClass(FlashMediaSource, [{
  7767. key: 'addSeekableRange_',
  7768. value: function addSeekableRange_() {}
  7769. // intentional no-op
  7770. /**
  7771. * Create a new flash source buffer and add it to our flash media source.
  7772. *
  7773. * @link https://developer.mozilla.org/en-US/docs/Web/API/MediaSource/addSourceBuffer
  7774. * @param {String} type the content-type of the source
  7775. * @return {Object} the flash source buffer
  7776. */
  7777. }, {
  7778. key: 'addSourceBuffer',
  7779. value: function addSourceBuffer(type) {
  7780. var parsedType = (0, _codecUtils.parseContentType)(type);
  7781. var sourceBuffer = undefined;
  7782. // if this is an FLV type, we'll push data to flash
  7783. if (parsedType.type === 'video/mp2t' || parsedType.type === 'audio/mp2t') {
  7784. // Flash source buffers
  7785. sourceBuffer = new _flashSourceBuffer2['default'](this);
  7786. } else {
  7787. throw new Error('NotSupportedError (Video.js)');
  7788. }
  7789. this.sourceBuffers.push(sourceBuffer);
  7790. return sourceBuffer;
  7791. }
  7792. /**
  7793. * Signals the end of the stream.
  7794. *
  7795. * @link https://w3c.github.io/media-source/#widl-MediaSource-endOfStream-void-EndOfStreamError-error
  7796. * @param {String=} error Signals that a playback error
  7797. * has occurred. If specified, it must be either "network" or
  7798. * "decode".
  7799. */
  7800. }, {
  7801. key: 'endOfStream',
  7802. value: function endOfStream(error) {
  7803. if (error === 'network') {
  7804. // MEDIA_ERR_NETWORK
  7805. this.tech_.error(2);
  7806. } else if (error === 'decode') {
  7807. // MEDIA_ERR_DECODE
  7808. this.tech_.error(3);
  7809. }
  7810. if (this.readyState !== 'ended') {
  7811. this.readyState = 'ended';
  7812. this.swfObj.vjs_endOfStream();
  7813. }
  7814. }
  7815. }]);
  7816. return FlashMediaSource;
  7817. })(_videoJs2['default'].EventTarget);
  7818. exports['default'] = FlashMediaSource;
  7819. try {
  7820. Object.defineProperty(FlashMediaSource.prototype, 'duration', {
  7821. /**
  7822. * Return the presentation duration.
  7823. *
  7824. * @return {Double} the duration of the media in seconds
  7825. * @link http://www.w3.org/TR/media-source/#widl-MediaSource-duration
  7826. */
  7827. get: function get() {
  7828. if (!this.swfObj) {
  7829. return NaN;
  7830. }
  7831. // get the current duration from the SWF
  7832. return this.swfObj.vjs_getProperty('duration');
  7833. },
  7834. /**
  7835. * Set the presentation duration.
  7836. *
  7837. * @param {Double} value the duration of the media in seconds
  7838. * @return {Double} the duration of the media in seconds
  7839. * @link http://www.w3.org/TR/media-source/#widl-MediaSource-duration
  7840. */
  7841. set: function set(value) {
  7842. var i = undefined;
  7843. var oldDuration = this.swfObj.vjs_getProperty('duration');
  7844. this.swfObj.vjs_setProperty('duration', value);
  7845. if (value < oldDuration) {
  7846. // In MSE, this triggers the range removal algorithm which causes
  7847. // an update to occur
  7848. for (i = 0; i < this.sourceBuffers.length; i++) {
  7849. this.sourceBuffers[i].remove(value, oldDuration);
  7850. }
  7851. }
  7852. return value;
  7853. }
  7854. });
  7855. } catch (e) {
  7856. // IE8 throws if defineProperty is called on a non-DOM node. We
  7857. // don't support IE8 but we shouldn't throw an error if loaded
  7858. // there.
  7859. FlashMediaSource.prototype.duration = NaN;
  7860. }
  7861. for (var property in _flashConstants2['default']) {
  7862. FlashMediaSource[property] = _flashConstants2['default'][property];
  7863. }
  7864. module.exports = exports['default'];
  7865. }).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
  7866. },{"./codec-utils":36,"./flash-constants":38,"./flash-source-buffer":40,"global/document":2}],40:[function(require,module,exports){
  7867. (function (global){
  7868. /**
  7869. * @file flash-source-buffer.js
  7870. */
  7871. 'use strict';
  7872. Object.defineProperty(exports, '__esModule', {
  7873. value: true
  7874. });
  7875. var _createClass = (function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ('value' in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; })();
  7876. var _get = function get(_x, _x2, _x3) { var _again = true; _function: while (_again) { var object = _x, property = _x2, receiver = _x3; _again = false; if (object === null) object = Function.prototype; var desc = Object.getOwnPropertyDescriptor(object, property); if (desc === undefined) { var parent = Object.getPrototypeOf(object); if (parent === null) { return undefined; } else { _x = parent; _x2 = property; _x3 = receiver; _again = true; desc = parent = undefined; continue _function; } } else if ('value' in desc) { return desc.value; } else { var getter = desc.get; if (getter === undefined) { return undefined; } return getter.call(receiver); } } };
  7877. function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
  7878. function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError('Cannot call a class as a function'); } }
  7879. function _inherits(subClass, superClass) { if (typeof superClass !== 'function' && superClass !== null) { throw new TypeError('Super expression must either be null or a function, not ' + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; }
  7880. var _globalWindow = require('global/window');
  7881. var _globalWindow2 = _interopRequireDefault(_globalWindow);
  7882. var _videoJs = (typeof window !== "undefined" ? window['videojs'] : typeof global !== "undefined" ? global['videojs'] : null);
  7883. var _videoJs2 = _interopRequireDefault(_videoJs);
  7884. var _muxJsLibFlv = require('mux.js/lib/flv');
  7885. var _muxJsLibFlv2 = _interopRequireDefault(_muxJsLibFlv);
  7886. var _removeCuesFromTrack = require('./remove-cues-from-track');
  7887. var _removeCuesFromTrack2 = _interopRequireDefault(_removeCuesFromTrack);
  7888. var _createTextTracksIfNecessary = require('./create-text-tracks-if-necessary');
  7889. var _createTextTracksIfNecessary2 = _interopRequireDefault(_createTextTracksIfNecessary);
  7890. var _addTextTrackData = require('./add-text-track-data');
  7891. var _flashTransmuxerWorker = require('./flash-transmuxer-worker');
  7892. var _flashTransmuxerWorker2 = _interopRequireDefault(_flashTransmuxerWorker);
  7893. var _webwackify = require('webwackify');
  7894. var _webwackify2 = _interopRequireDefault(_webwackify);
  7895. var _flashConstants = require('./flash-constants');
  7896. var _flashConstants2 = _interopRequireDefault(_flashConstants);
  7897. var resolveFlashTransmuxWorker = function resolveFlashTransmuxWorker() {
  7898. var result = undefined;
  7899. try {
  7900. result = require.resolve('./flash-transmuxer-worker');
  7901. } catch (e) {
  7902. // no result
  7903. }
  7904. return result;
  7905. };
  7906. /**
  7907. * A wrapper around the setTimeout function that uses
  7908. * the flash constant time between ticks value.
  7909. *
  7910. * @param {Function} func the function callback to run
  7911. * @private
  7912. */
  7913. var scheduleTick = function scheduleTick(func) {
  7914. // Chrome doesn't invoke requestAnimationFrame callbacks
  7915. // in background tabs, so use setTimeout.
  7916. _globalWindow2['default'].setTimeout(func, _flashConstants2['default'].TIME_BETWEEN_CHUNKS);
  7917. };
  7918. /**
  7919. * Generates a random string of max length 6
  7920. *
  7921. * @return {String} the randomly generated string
  7922. * @function generateRandomString
  7923. * @private
  7924. */
  7925. var generateRandomString = function generateRandomString() {
  7926. return Math.random().toString(36).slice(2, 8);
  7927. };
  7928. /**
  7929. * Round a number to a specified number of places much like
  7930. * toFixed but return a number instead of a string representation.
  7931. *
  7932. * @param {Number} num A number
  7933. * @param {Number} places The number of decimal places which to
  7934. * round
  7935. * @private
  7936. */
  7937. var toDecimalPlaces = function toDecimalPlaces(num, places) {
  7938. if (typeof places !== 'number' || places < 0) {
  7939. places = 0;
  7940. }
  7941. var scale = Math.pow(10, places);
  7942. return Math.round(num * scale) / scale;
  7943. };
  7944. /**
  7945. * A SourceBuffer implementation for Flash rather than HTML.
  7946. *
  7947. * @link https://developer.mozilla.org/en-US/docs/Web/API/MediaSource
  7948. * @param {Object} mediaSource the flash media source
  7949. * @class FlashSourceBuffer
  7950. * @extends videojs.EventTarget
  7951. */
  7952. var FlashSourceBuffer = (function (_videojs$EventTarget) {
  7953. _inherits(FlashSourceBuffer, _videojs$EventTarget);
  7954. function FlashSourceBuffer(mediaSource) {
  7955. var _this = this;
  7956. _classCallCheck(this, FlashSourceBuffer);
  7957. _get(Object.getPrototypeOf(FlashSourceBuffer.prototype), 'constructor', this).call(this);
  7958. var encodedHeader = undefined;
  7959. // Start off using the globally defined value but refine
  7960. // as we append data into flash
  7961. this.chunkSize_ = _flashConstants2['default'].BYTES_PER_CHUNK;
  7962. // byte arrays queued to be appended
  7963. this.buffer_ = [];
  7964. // the total number of queued bytes
  7965. this.bufferSize_ = 0;
  7966. // to be able to determine the correct position to seek to, we
  7967. // need to retain information about the mapping between the
  7968. // media timeline and PTS values
  7969. this.basePtsOffset_ = NaN;
  7970. this.mediaSource_ = mediaSource;
  7971. this.audioBufferEnd_ = NaN;
  7972. this.videoBufferEnd_ = NaN;
  7973. // indicates whether the asynchronous continuation of an operation
  7974. // is still being processed
  7975. // see https://w3c.github.io/media-source/#widl-SourceBuffer-updating
  7976. this.updating = false;
  7977. this.timestampOffset_ = 0;
  7978. encodedHeader = _globalWindow2['default'].btoa(String.fromCharCode.apply(null, Array.prototype.slice.call(_muxJsLibFlv2['default'].getFlvHeader())));
  7979. // create function names with added randomness for the global callbacks flash will use
  7980. // to get data from javascript into the swf. Random strings are added as a safety
  7981. // measure for pages with multiple players since these functions will be global
  7982. // instead of per instance. When making a call to the swf, the browser generates a
  7983. // try catch code snippet, but just takes the function name and writes out an unquoted
  7984. // call to that function. If the player id has any special characters, this will result
  7985. // in an error, so safePlayerId replaces all special characters to '_'
  7986. var safePlayerId = this.mediaSource_.player_.id().replace(/[^a-zA-Z0-9]/g, '_');
  7987. this.flashEncodedHeaderName_ = 'vjs_flashEncodedHeader_' + safePlayerId + generateRandomString();
  7988. this.flashEncodedDataName_ = 'vjs_flashEncodedData_' + safePlayerId + generateRandomString();
  7989. _globalWindow2['default'][this.flashEncodedHeaderName_] = function () {
  7990. delete _globalWindow2['default'][_this.flashEncodedHeaderName_];
  7991. return encodedHeader;
  7992. };
  7993. this.mediaSource_.swfObj.vjs_appendChunkReady(this.flashEncodedHeaderName_);
  7994. this.transmuxer_ = (0, _webwackify2['default'])(_flashTransmuxerWorker2['default'], resolveFlashTransmuxWorker());
  7995. this.transmuxer_.postMessage({ action: 'init', options: {} });
  7996. this.transmuxer_.onmessage = function (event) {
  7997. if (event.data.action === 'data') {
  7998. _this.receiveBuffer_(event.data.segment);
  7999. }
  8000. };
  8001. this.one('updateend', function () {
  8002. _this.mediaSource_.tech_.trigger('loadedmetadata');
  8003. });
  8004. Object.defineProperty(this, 'timestampOffset', {
  8005. get: function get() {
  8006. return this.timestampOffset_;
  8007. },
  8008. set: function set(val) {
  8009. if (typeof val === 'number' && val >= 0) {
  8010. this.timestampOffset_ = val;
  8011. // We have to tell flash to expect a discontinuity
  8012. this.mediaSource_.swfObj.vjs_discontinuity();
  8013. // the media <-> PTS mapping must be re-established after
  8014. // the discontinuity
  8015. this.basePtsOffset_ = NaN;
  8016. this.audioBufferEnd_ = NaN;
  8017. this.videoBufferEnd_ = NaN;
  8018. this.transmuxer_.postMessage({ action: 'reset' });
  8019. }
  8020. }
  8021. });
  8022. Object.defineProperty(this, 'buffered', {
  8023. get: function get() {
  8024. if (!this.mediaSource_ || !this.mediaSource_.swfObj || !('vjs_getProperty' in this.mediaSource_.swfObj)) {
  8025. return _videoJs2['default'].createTimeRange();
  8026. }
  8027. var buffered = this.mediaSource_.swfObj.vjs_getProperty('buffered');
  8028. if (buffered && buffered.length) {
  8029. buffered[0][0] = toDecimalPlaces(buffered[0][0], 3);
  8030. buffered[0][1] = toDecimalPlaces(buffered[0][1], 3);
  8031. }
  8032. return _videoJs2['default'].createTimeRanges(buffered);
  8033. }
  8034. });
  8035. // On a seek we remove all text track data since flash has no concept
  8036. // of a buffered-range and everything else is reset on seek
  8037. this.mediaSource_.player_.on('seeked', function () {
  8038. (0, _removeCuesFromTrack2['default'])(0, Infinity, _this.metadataTrack_);
  8039. if (_this.inbandTextTracks_) {
  8040. for (var track in _this.inbandTextTracks_) {
  8041. (0, _removeCuesFromTrack2['default'])(0, Infinity, _this.inbandTextTracks_[track]);
  8042. }
  8043. }
  8044. });
  8045. var onHlsReset = this.onHlsReset_.bind(this);
  8046. // hls-reset is fired by videojs.Hls on to the tech after the main SegmentLoader
  8047. // resets its state and flushes the buffer
  8048. this.mediaSource_.player_.tech_.on('hls-reset', onHlsReset);
  8049. this.mediaSource_.player_.tech_.hls.on('dispose', function () {
  8050. _this.transmuxer_.terminate();
  8051. _this.mediaSource_.player_.tech_.off('hls-reset', onHlsReset);
  8052. });
  8053. }
  8054. /**
  8055. * Append bytes to the sourcebuffers buffer, in this case we
  8056. * have to append it to swf object.
  8057. *
  8058. * @link https://developer.mozilla.org/en-US/docs/Web/API/SourceBuffer/appendBuffer
  8059. * @param {Array} bytes
  8060. */
  8061. _createClass(FlashSourceBuffer, [{
  8062. key: 'appendBuffer',
  8063. value: function appendBuffer(bytes) {
  8064. var error = undefined;
  8065. if (this.updating) {
  8066. error = new Error('SourceBuffer.append() cannot be called ' + 'while an update is in progress');
  8067. error.name = 'InvalidStateError';
  8068. error.code = 11;
  8069. throw error;
  8070. }
  8071. this.updating = true;
  8072. this.mediaSource_.readyState = 'open';
  8073. this.trigger({ type: 'update' });
  8074. this.transmuxer_.postMessage({
  8075. action: 'push',
  8076. data: bytes.buffer,
  8077. byteOffset: bytes.byteOffset,
  8078. byteLength: bytes.byteLength
  8079. }, [bytes.buffer]);
  8080. this.transmuxer_.postMessage({ action: 'flush' });
  8081. }
  8082. /**
  8083. * Reset the parser and remove any data queued to be sent to the SWF.
  8084. *
  8085. * @link https://developer.mozilla.org/en-US/docs/Web/API/SourceBuffer/abort
  8086. */
  8087. }, {
  8088. key: 'abort',
  8089. value: function abort() {
  8090. this.buffer_ = [];
  8091. this.bufferSize_ = 0;
  8092. this.mediaSource_.swfObj.vjs_abort();
  8093. // report any outstanding updates have ended
  8094. if (this.updating) {
  8095. this.updating = false;
  8096. this.trigger({ type: 'updateend' });
  8097. }
  8098. }
  8099. /**
  8100. * Flash cannot remove ranges already buffered in the NetStream
  8101. * but seeking clears the buffer entirely. For most purposes,
  8102. * having this operation act as a no-op is acceptable.
  8103. *
  8104. * @link https://developer.mozilla.org/en-US/docs/Web/API/SourceBuffer/remove
  8105. * @param {Double} start start of the section to remove
  8106. * @param {Double} end end of the section to remove
  8107. */
  8108. }, {
  8109. key: 'remove',
  8110. value: function remove(start, end) {
  8111. (0, _removeCuesFromTrack2['default'])(start, end, this.metadataTrack_);
  8112. if (this.inbandTextTracks_) {
  8113. for (var track in this.inbandTextTracks_) {
  8114. (0, _removeCuesFromTrack2['default'])(start, end, this.inbandTextTracks_[track]);
  8115. }
  8116. }
  8117. this.trigger({ type: 'update' });
  8118. this.trigger({ type: 'updateend' });
  8119. }
  8120. /**
  8121. * Receive a buffer from the flv.
  8122. *
  8123. * @param {Object} segment
  8124. * @private
  8125. */
  8126. }, {
  8127. key: 'receiveBuffer_',
  8128. value: function receiveBuffer_(segment) {
  8129. var _this2 = this;
  8130. // create an in-band caption track if one is present in the segment
  8131. (0, _createTextTracksIfNecessary2['default'])(this, this.mediaSource_, segment);
  8132. (0, _addTextTrackData.addTextTrackData)(this, segment.captions, segment.metadata);
  8133. // Do this asynchronously since convertTagsToData_ can be time consuming
  8134. scheduleTick(function () {
  8135. var flvBytes = _this2.convertTagsToData_(segment);
  8136. if (_this2.buffer_.length === 0) {
  8137. scheduleTick(_this2.processBuffer_.bind(_this2));
  8138. }
  8139. if (flvBytes) {
  8140. _this2.buffer_.push(flvBytes);
  8141. _this2.bufferSize_ += flvBytes.byteLength;
  8142. }
  8143. });
  8144. }
  8145. /**
  8146. * Append a portion of the current buffer to the SWF.
  8147. *
  8148. * @private
  8149. */
  8150. }, {
  8151. key: 'processBuffer_',
  8152. value: function processBuffer_() {
  8153. var _this3 = this;
  8154. var chunkSize = _flashConstants2['default'].BYTES_PER_CHUNK;
  8155. if (!this.buffer_.length) {
  8156. if (this.updating !== false) {
  8157. this.updating = false;
  8158. this.trigger({ type: 'updateend' });
  8159. }
  8160. // do nothing if the buffer is empty
  8161. return;
  8162. }
  8163. // concatenate appends up to the max append size
  8164. var chunk = this.buffer_[0].subarray(0, chunkSize);
  8165. // requeue any bytes that won't make it this round
  8166. if (chunk.byteLength < chunkSize || this.buffer_[0].byteLength === chunkSize) {
  8167. this.buffer_.shift();
  8168. } else {
  8169. this.buffer_[0] = this.buffer_[0].subarray(chunkSize);
  8170. }
  8171. this.bufferSize_ -= chunk.byteLength;
  8172. // base64 encode the bytes
  8173. var binary = [];
  8174. var length = chunk.byteLength;
  8175. for (var i = 0; i < length; i++) {
  8176. binary.push(String.fromCharCode(chunk[i]));
  8177. }
  8178. var b64str = _globalWindow2['default'].btoa(binary.join(''));
  8179. _globalWindow2['default'][this.flashEncodedDataName_] = function () {
  8180. // schedule another processBuffer to process any left over data or to
  8181. // trigger updateend
  8182. scheduleTick(_this3.processBuffer_.bind(_this3));
  8183. delete _globalWindow2['default'][_this3.flashEncodedDataName_];
  8184. return b64str;
  8185. };
  8186. // Notify the swf that segment data is ready to be appended
  8187. this.mediaSource_.swfObj.vjs_appendChunkReady(this.flashEncodedDataName_);
  8188. }
  8189. /**
  8190. * Turns an array of flv tags into a Uint8Array representing the
  8191. * flv data. Also removes any tags that are before the current
  8192. * time so that playback begins at or slightly after the right
  8193. * place on a seek
  8194. *
  8195. * @private
  8196. * @param {Object} segmentData object of segment data
  8197. */
  8198. }, {
  8199. key: 'convertTagsToData_',
  8200. value: function convertTagsToData_(segmentData) {
  8201. var segmentByteLength = 0;
  8202. var tech = this.mediaSource_.tech_;
  8203. var videoTargetPts = 0;
  8204. var segment = undefined;
  8205. var videoTags = segmentData.tags.videoTags;
  8206. var audioTags = segmentData.tags.audioTags;
  8207. // Establish the media timeline to PTS translation if we don't
  8208. // have one already
  8209. if (isNaN(this.basePtsOffset_) && (videoTags.length || audioTags.length)) {
  8210. // We know there is at least one video or audio tag, but since we may not have both,
  8211. // we use pts: Infinity for the missing tag. The will force the following Math.min
  8212. // call will to use the proper pts value since it will always be less than Infinity
  8213. var firstVideoTag = videoTags[0] || { pts: Infinity };
  8214. var firstAudioTag = audioTags[0] || { pts: Infinity };
  8215. this.basePtsOffset_ = Math.min(firstAudioTag.pts, firstVideoTag.pts);
  8216. }
  8217. if (tech.seeking()) {
  8218. // Do not use previously saved buffer end values while seeking since buffer
  8219. // is cleared on all seeks
  8220. this.videoBufferEnd_ = NaN;
  8221. this.audioBufferEnd_ = NaN;
  8222. }
  8223. if (isNaN(this.videoBufferEnd_)) {
  8224. if (tech.buffered().length) {
  8225. videoTargetPts = tech.buffered().end(0) - this.timestampOffset;
  8226. }
  8227. // Trim to currentTime if seeking
  8228. if (tech.seeking()) {
  8229. videoTargetPts = Math.max(videoTargetPts, tech.currentTime() - this.timestampOffset);
  8230. }
  8231. // PTS values are represented in milliseconds
  8232. videoTargetPts *= 1e3;
  8233. videoTargetPts += this.basePtsOffset_;
  8234. } else {
  8235. // Add a fudge factor of 0.1 to the last video pts appended since a rendition change
  8236. // could append an overlapping segment, in which case there is a high likelyhood
  8237. // a tag could have a matching pts to videoBufferEnd_, which would cause
  8238. // that tag to get appended by the tag.pts >= targetPts check below even though it
  8239. // is a duplicate of what was previously appended
  8240. videoTargetPts = this.videoBufferEnd_ + 0.1;
  8241. }
  8242. // filter complete GOPs with a presentation time less than the seek target/end of buffer
  8243. var currentIndex = videoTags.length;
  8244. // if the last tag is beyond videoTargetPts, then do not search the list for a GOP
  8245. // since our videoTargetPts lies in a future segment
  8246. if (currentIndex && videoTags[currentIndex - 1].pts >= videoTargetPts) {
  8247. // Start by walking backwards from the end of the list until we reach a tag that
  8248. // is equal to or less than videoTargetPts
  8249. while (--currentIndex) {
  8250. var currentTag = videoTags[currentIndex];
  8251. if (currentTag.pts > videoTargetPts) {
  8252. continue;
  8253. }
  8254. // if we see a keyFrame or metadata tag once we've gone below videoTargetPts,
  8255. // exit the loop as this is the start of the GOP that we want to append
  8256. if (currentTag.keyFrame || currentTag.metaDataTag) {
  8257. break;
  8258. }
  8259. }
  8260. // We need to check if there are any metadata tags that come before currentIndex
  8261. // as those will be metadata tags associated with the GOP we are appending
  8262. // There could be 0 to 2 metadata tags that come before the currentIndex depending
  8263. // on what videoTargetPts is and whether the transmuxer prepended metadata tags to this
  8264. // key frame
  8265. while (currentIndex) {
  8266. var nextTag = videoTags[currentIndex - 1];
  8267. if (!nextTag.metaDataTag) {
  8268. break;
  8269. }
  8270. currentIndex--;
  8271. }
  8272. }
  8273. var filteredVideoTags = videoTags.slice(currentIndex);
  8274. var audioTargetPts = undefined;
  8275. if (isNaN(this.audioBufferEnd_)) {
  8276. audioTargetPts = videoTargetPts;
  8277. } else {
  8278. // Add a fudge factor of 0.1 to the last video pts appended since a rendition change
  8279. // could append an overlapping segment, in which case there is a high likelyhood
  8280. // a tag could have a matching pts to videoBufferEnd_, which would cause
  8281. // that tag to get appended by the tag.pts >= targetPts check below even though it
  8282. // is a duplicate of what was previously appended
  8283. audioTargetPts = this.audioBufferEnd_ + 0.1;
  8284. }
  8285. if (filteredVideoTags.length) {
  8286. // If targetPts intersects a GOP and we appended the tags for the GOP that came
  8287. // before targetPts, we want to make sure to trim audio tags at the pts
  8288. // of the first video tag to avoid brief moments of silence
  8289. audioTargetPts = Math.min(audioTargetPts, filteredVideoTags[0].pts);
  8290. }
  8291. // skip tags with a presentation time less than the seek target/end of buffer
  8292. currentIndex = 0;
  8293. while (currentIndex < audioTags.length) {
  8294. if (audioTags[currentIndex].pts >= audioTargetPts) {
  8295. break;
  8296. }
  8297. currentIndex++;
  8298. }
  8299. var filteredAudioTags = audioTags.slice(currentIndex);
  8300. // update the audio and video buffer ends
  8301. if (filteredAudioTags.length) {
  8302. this.audioBufferEnd_ = filteredAudioTags[filteredAudioTags.length - 1].pts;
  8303. }
  8304. if (filteredVideoTags.length) {
  8305. this.videoBufferEnd_ = filteredVideoTags[filteredVideoTags.length - 1].pts;
  8306. }
  8307. var tags = this.getOrderedTags_(filteredVideoTags, filteredAudioTags);
  8308. if (tags.length === 0) {
  8309. return;
  8310. }
  8311. // If we are appending data that comes before our target pts, we want to tell
  8312. // the swf to adjust its notion of current time to account for the extra tags
  8313. // we are appending to complete the GOP that intersects with targetPts
  8314. if (tags[0].pts < videoTargetPts && tech.seeking()) {
  8315. var fudgeFactor = 1 / 30;
  8316. var currentTime = tech.currentTime();
  8317. var diff = (videoTargetPts - tags[0].pts) / 1e3;
  8318. var adjustedTime = currentTime - diff;
  8319. if (adjustedTime < fudgeFactor) {
  8320. adjustedTime = 0;
  8321. }
  8322. try {
  8323. this.mediaSource_.swfObj.vjs_adjustCurrentTime(adjustedTime);
  8324. } catch (e) {
  8325. // no-op for backwards compatability of swf. If adjustCurrentTime fails,
  8326. // the swf may incorrectly report currentTime and buffered ranges
  8327. // but should not affect playback over than the time displayed on the
  8328. // progress bar is inaccurate
  8329. }
  8330. }
  8331. // concatenate the bytes into a single segment
  8332. for (var i = 0; i < tags.length; i++) {
  8333. segmentByteLength += tags[i].bytes.byteLength;
  8334. }
  8335. segment = new Uint8Array(segmentByteLength);
  8336. for (var i = 0, j = 0; i < tags.length; i++) {
  8337. segment.set(tags[i].bytes, j);
  8338. j += tags[i].bytes.byteLength;
  8339. }
  8340. return segment;
  8341. }
  8342. /**
  8343. * Assemble the FLV tags in decoder order.
  8344. *
  8345. * @private
  8346. * @param {Array} videoTags list of video tags
  8347. * @param {Array} audioTags list of audio tags
  8348. */
  8349. }, {
  8350. key: 'getOrderedTags_',
  8351. value: function getOrderedTags_(videoTags, audioTags) {
  8352. var tag = undefined;
  8353. var tags = [];
  8354. while (videoTags.length || audioTags.length) {
  8355. if (!videoTags.length) {
  8356. // only audio tags remain
  8357. tag = audioTags.shift();
  8358. } else if (!audioTags.length) {
  8359. // only video tags remain
  8360. tag = videoTags.shift();
  8361. } else if (audioTags[0].dts < videoTags[0].dts) {
  8362. // audio should be decoded next
  8363. tag = audioTags.shift();
  8364. } else {
  8365. // video should be decoded next
  8366. tag = videoTags.shift();
  8367. }
  8368. tags.push(tag);
  8369. }
  8370. return tags;
  8371. }
  8372. }, {
  8373. key: 'onHlsReset_',
  8374. value: function onHlsReset_() {
  8375. this.transmuxer_.postMessage({ action: 'resetCaptions' });
  8376. }
  8377. }]);
  8378. return FlashSourceBuffer;
  8379. })(_videoJs2['default'].EventTarget);
  8380. exports['default'] = FlashSourceBuffer;
  8381. module.exports = exports['default'];
  8382. }).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
  8383. },{"./add-text-track-data":35,"./create-text-tracks-if-necessary":37,"./flash-constants":38,"./flash-transmuxer-worker":41,"./remove-cues-from-track":43,"global/window":3,"mux.js/lib/flv":13,"webwackify":34}],41:[function(require,module,exports){
  8384. /**
  8385. * @file flash-transmuxer-worker.js
  8386. */
  8387. 'use strict';
  8388. Object.defineProperty(exports, '__esModule', {
  8389. value: true
  8390. });
  8391. var _createClass = (function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ('value' in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; })();
  8392. function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
  8393. function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError('Cannot call a class as a function'); } }
  8394. var _globalWindow = require('global/window');
  8395. var _globalWindow2 = _interopRequireDefault(_globalWindow);
  8396. var _muxJsLibFlv = require('mux.js/lib/flv');
  8397. var _muxJsLibFlv2 = _interopRequireDefault(_muxJsLibFlv);
  8398. /**
  8399. * Re-emits transmuxer events by converting them into messages to the
  8400. * world outside the worker.
  8401. *
  8402. * @param {Object} transmuxer the transmuxer to wire events on
  8403. * @private
  8404. */
  8405. var wireTransmuxerEvents = function wireTransmuxerEvents(transmuxer) {
  8406. transmuxer.on('data', function (segment) {
  8407. _globalWindow2['default'].postMessage({
  8408. action: 'data',
  8409. segment: segment
  8410. });
  8411. });
  8412. transmuxer.on('done', function (data) {
  8413. _globalWindow2['default'].postMessage({ action: 'done' });
  8414. });
  8415. };
  8416. /**
  8417. * All incoming messages route through this hash. If no function exists
  8418. * to handle an incoming message, then we ignore the message.
  8419. *
  8420. * @class MessageHandlers
  8421. * @param {Object} options the options to initialize with
  8422. */
  8423. var MessageHandlers = (function () {
  8424. function MessageHandlers(options) {
  8425. _classCallCheck(this, MessageHandlers);
  8426. this.options = options || {};
  8427. this.init();
  8428. }
  8429. /**
  8430. * Our web wroker interface so that things can talk to mux.js
  8431. * that will be running in a web worker. The scope is passed to this by
  8432. * webworkify.
  8433. *
  8434. * @param {Object} self the scope for the web worker
  8435. */
  8436. /**
  8437. * initialize our web worker and wire all the events.
  8438. */
  8439. _createClass(MessageHandlers, [{
  8440. key: 'init',
  8441. value: function init() {
  8442. if (this.transmuxer) {
  8443. this.transmuxer.dispose();
  8444. }
  8445. this.transmuxer = new _muxJsLibFlv2['default'].Transmuxer(this.options);
  8446. wireTransmuxerEvents(this.transmuxer);
  8447. }
  8448. /**
  8449. * Adds data (a ts segment) to the start of the transmuxer pipeline for
  8450. * processing.
  8451. *
  8452. * @param {ArrayBuffer} data data to push into the muxer
  8453. */
  8454. }, {
  8455. key: 'push',
  8456. value: function push(data) {
  8457. // Cast array buffer to correct type for transmuxer
  8458. var segment = new Uint8Array(data.data, data.byteOffset, data.byteLength);
  8459. this.transmuxer.push(segment);
  8460. }
  8461. /**
  8462. * Recreate the transmuxer so that the next segment added via `push`
  8463. * start with a fresh transmuxer.
  8464. */
  8465. }, {
  8466. key: 'reset',
  8467. value: function reset() {
  8468. this.init();
  8469. }
  8470. /**
  8471. * Forces the pipeline to finish processing the last segment and emit its
  8472. * results.
  8473. */
  8474. }, {
  8475. key: 'flush',
  8476. value: function flush() {
  8477. this.transmuxer.flush();
  8478. }
  8479. }, {
  8480. key: 'resetCaptions',
  8481. value: function resetCaptions() {
  8482. this.transmuxer.resetCaptions();
  8483. }
  8484. }]);
  8485. return MessageHandlers;
  8486. })();
  8487. var FlashTransmuxerWorker = function FlashTransmuxerWorker(self) {
  8488. self.onmessage = function (event) {
  8489. if (event.data.action === 'init' && event.data.options) {
  8490. this.messageHandlers = new MessageHandlers(event.data.options);
  8491. return;
  8492. }
  8493. if (!this.messageHandlers) {
  8494. this.messageHandlers = new MessageHandlers();
  8495. }
  8496. if (event.data && event.data.action && event.data.action !== 'init') {
  8497. if (this.messageHandlers[event.data.action]) {
  8498. this.messageHandlers[event.data.action](event.data);
  8499. }
  8500. }
  8501. };
  8502. };
  8503. exports['default'] = function (self) {
  8504. return new FlashTransmuxerWorker(self);
  8505. };
  8506. module.exports = exports['default'];
  8507. },{"global/window":3,"mux.js/lib/flv":13}],42:[function(require,module,exports){
  8508. (function (global){
  8509. /**
  8510. * @file html-media-source.js
  8511. */
  8512. 'use strict';
  8513. Object.defineProperty(exports, '__esModule', {
  8514. value: true
  8515. });
  8516. var _createClass = (function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ('value' in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; })();
  8517. var _get = function get(_x, _x2, _x3) { var _again = true; _function: while (_again) { var object = _x, property = _x2, receiver = _x3; _again = false; if (object === null) object = Function.prototype; var desc = Object.getOwnPropertyDescriptor(object, property); if (desc === undefined) { var parent = Object.getPrototypeOf(object); if (parent === null) { return undefined; } else { _x = parent; _x2 = property; _x3 = receiver; _again = true; desc = parent = undefined; continue _function; } } else if ('value' in desc) { return desc.value; } else { var getter = desc.get; if (getter === undefined) { return undefined; } return getter.call(receiver); } } };
  8518. function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
  8519. function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError('Cannot call a class as a function'); } }
  8520. function _inherits(subClass, superClass) { if (typeof superClass !== 'function' && superClass !== null) { throw new TypeError('Super expression must either be null or a function, not ' + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; }
  8521. var _globalWindow = require('global/window');
  8522. var _globalWindow2 = _interopRequireDefault(_globalWindow);
  8523. var _globalDocument = require('global/document');
  8524. var _globalDocument2 = _interopRequireDefault(_globalDocument);
  8525. var _videoJs = (typeof window !== "undefined" ? window['videojs'] : typeof global !== "undefined" ? global['videojs'] : null);
  8526. var _videoJs2 = _interopRequireDefault(_videoJs);
  8527. var _virtualSourceBuffer = require('./virtual-source-buffer');
  8528. var _virtualSourceBuffer2 = _interopRequireDefault(_virtualSourceBuffer);
  8529. var _addTextTrackData = require('./add-text-track-data');
  8530. var _codecUtils = require('./codec-utils');
  8531. /**
  8532. * Our MediaSource implementation in HTML, mimics native
  8533. * MediaSource where/if possible.
  8534. *
  8535. * @link https://developer.mozilla.org/en-US/docs/Web/API/MediaSource
  8536. * @class HtmlMediaSource
  8537. * @extends videojs.EventTarget
  8538. */
  8539. var HtmlMediaSource = (function (_videojs$EventTarget) {
  8540. _inherits(HtmlMediaSource, _videojs$EventTarget);
  8541. function HtmlMediaSource() {
  8542. var _this = this;
  8543. _classCallCheck(this, HtmlMediaSource);
  8544. _get(Object.getPrototypeOf(HtmlMediaSource.prototype), 'constructor', this).call(this);
  8545. var property = undefined;
  8546. this.nativeMediaSource_ = new _globalWindow2['default'].MediaSource();
  8547. // delegate to the native MediaSource's methods by default
  8548. for (property in this.nativeMediaSource_) {
  8549. if (!(property in HtmlMediaSource.prototype) && typeof this.nativeMediaSource_[property] === 'function') {
  8550. this[property] = this.nativeMediaSource_[property].bind(this.nativeMediaSource_);
  8551. }
  8552. }
  8553. // emulate `duration` and `seekable` until seeking can be
  8554. // handled uniformly for live streams
  8555. // see https://github.com/w3c/media-source/issues/5
  8556. this.duration_ = NaN;
  8557. Object.defineProperty(this, 'duration', {
  8558. get: function get() {
  8559. if (this.duration_ === Infinity) {
  8560. return this.duration_;
  8561. }
  8562. return this.nativeMediaSource_.duration;
  8563. },
  8564. set: function set(duration) {
  8565. this.duration_ = duration;
  8566. if (duration !== Infinity) {
  8567. this.nativeMediaSource_.duration = duration;
  8568. return;
  8569. }
  8570. }
  8571. });
  8572. Object.defineProperty(this, 'seekable', {
  8573. get: function get() {
  8574. if (this.duration_ === Infinity) {
  8575. return _videoJs2['default'].createTimeRanges([[0, this.nativeMediaSource_.duration]]);
  8576. }
  8577. return this.nativeMediaSource_.seekable;
  8578. }
  8579. });
  8580. Object.defineProperty(this, 'readyState', {
  8581. get: function get() {
  8582. return this.nativeMediaSource_.readyState;
  8583. }
  8584. });
  8585. Object.defineProperty(this, 'activeSourceBuffers', {
  8586. get: function get() {
  8587. return this.activeSourceBuffers_;
  8588. }
  8589. });
  8590. // the list of virtual and native SourceBuffers created by this
  8591. // MediaSource
  8592. this.sourceBuffers = [];
  8593. this.activeSourceBuffers_ = [];
  8594. /**
  8595. * update the list of active source buffers based upon various
  8596. * imformation from HLS and video.js
  8597. *
  8598. * @private
  8599. */
  8600. this.updateActiveSourceBuffers_ = function () {
  8601. // Retain the reference but empty the array
  8602. _this.activeSourceBuffers_.length = 0;
  8603. // If there is only one source buffer, then it will always be active and audio will
  8604. // be disabled based on the codec of the source buffer
  8605. if (_this.sourceBuffers.length === 1) {
  8606. var sourceBuffer = _this.sourceBuffers[0];
  8607. sourceBuffer.appendAudioInitSegment_ = true;
  8608. sourceBuffer.audioDisabled_ = !sourceBuffer.audioCodec_;
  8609. _this.activeSourceBuffers_.push(sourceBuffer);
  8610. return;
  8611. }
  8612. // There are 2 source buffers, a combined (possibly video only) source buffer and
  8613. // and an audio only source buffer.
  8614. // By default, the audio in the combined virtual source buffer is enabled
  8615. // and the audio-only source buffer (if it exists) is disabled.
  8616. var disableCombined = false;
  8617. var disableAudioOnly = true;
  8618. // TODO: maybe we can store the sourcebuffers on the track objects?
  8619. // safari may do something like this
  8620. for (var i = 0; i < _this.player_.audioTracks().length; i++) {
  8621. var track = _this.player_.audioTracks()[i];
  8622. if (track.enabled && track.kind !== 'main') {
  8623. // The enabled track is an alternate audio track so disable the audio in
  8624. // the combined source buffer and enable the audio-only source buffer.
  8625. disableCombined = true;
  8626. disableAudioOnly = false;
  8627. break;
  8628. }
  8629. }
  8630. _this.sourceBuffers.forEach(function (sourceBuffer) {
  8631. /* eslinst-disable */
  8632. // TODO once codecs are required, we can switch to using the codecs to determine
  8633. // what stream is the video stream, rather than relying on videoTracks
  8634. /* eslinst-enable */
  8635. sourceBuffer.appendAudioInitSegment_ = true;
  8636. if (sourceBuffer.videoCodec_ && sourceBuffer.audioCodec_) {
  8637. // combined
  8638. sourceBuffer.audioDisabled_ = disableCombined;
  8639. } else if (sourceBuffer.videoCodec_ && !sourceBuffer.audioCodec_) {
  8640. // If the "combined" source buffer is video only, then we do not want
  8641. // disable the audio-only source buffer (this is mostly for demuxed
  8642. // audio and video hls)
  8643. sourceBuffer.audioDisabled_ = true;
  8644. disableAudioOnly = false;
  8645. } else if (!sourceBuffer.videoCodec_ && sourceBuffer.audioCodec_) {
  8646. // audio only
  8647. sourceBuffer.audioDisabled_ = disableAudioOnly;
  8648. if (disableAudioOnly) {
  8649. return;
  8650. }
  8651. }
  8652. _this.activeSourceBuffers_.push(sourceBuffer);
  8653. });
  8654. };
  8655. this.onPlayerMediachange_ = function () {
  8656. _this.sourceBuffers.forEach(function (sourceBuffer) {
  8657. sourceBuffer.appendAudioInitSegment_ = true;
  8658. });
  8659. };
  8660. this.onHlsReset_ = function () {
  8661. _this.sourceBuffers.forEach(function (sourceBuffer) {
  8662. if (sourceBuffer.transmuxer_) {
  8663. sourceBuffer.transmuxer_.postMessage({ action: 'resetCaptions' });
  8664. }
  8665. });
  8666. };
  8667. this.onHlsSegmentTimeMapping_ = function (event) {
  8668. _this.sourceBuffers.forEach(function (buffer) {
  8669. return buffer.timeMapping_ = event.mapping;
  8670. });
  8671. };
  8672. // Re-emit MediaSource events on the polyfill
  8673. ['sourceopen', 'sourceclose', 'sourceended'].forEach(function (eventName) {
  8674. this.nativeMediaSource_.addEventListener(eventName, this.trigger.bind(this));
  8675. }, this);
  8676. // capture the associated player when the MediaSource is
  8677. // successfully attached
  8678. this.on('sourceopen', function (event) {
  8679. // Get the player this MediaSource is attached to
  8680. var video = _globalDocument2['default'].querySelector('[src="' + _this.url_ + '"]');
  8681. if (!video) {
  8682. return;
  8683. }
  8684. _this.player_ = (0, _videoJs2['default'])(video.parentNode);
  8685. // hls-reset is fired by videojs.Hls on to the tech after the main SegmentLoader
  8686. // resets its state and flushes the buffer
  8687. _this.player_.tech_.on('hls-reset', _this.onHlsReset_);
  8688. // hls-segment-time-mapping is fired by videojs.Hls on to the tech after the main
  8689. // SegmentLoader inspects an MTS segment and has an accurate stream to display
  8690. // time mapping
  8691. _this.player_.tech_.on('hls-segment-time-mapping', _this.onHlsSegmentTimeMapping_);
  8692. if (_this.player_.audioTracks && _this.player_.audioTracks()) {
  8693. _this.player_.audioTracks().on('change', _this.updateActiveSourceBuffers_);
  8694. _this.player_.audioTracks().on('addtrack', _this.updateActiveSourceBuffers_);
  8695. _this.player_.audioTracks().on('removetrack', _this.updateActiveSourceBuffers_);
  8696. }
  8697. _this.player_.on('mediachange', _this.onPlayerMediachange_);
  8698. });
  8699. this.on('sourceended', function (event) {
  8700. var duration = (0, _addTextTrackData.durationOfVideo)(_this.duration);
  8701. for (var i = 0; i < _this.sourceBuffers.length; i++) {
  8702. var sourcebuffer = _this.sourceBuffers[i];
  8703. var cues = sourcebuffer.metadataTrack_ && sourcebuffer.metadataTrack_.cues;
  8704. if (cues && cues.length) {
  8705. cues[cues.length - 1].endTime = duration;
  8706. }
  8707. }
  8708. });
  8709. // explicitly terminate any WebWorkers that were created
  8710. // by SourceHandlers
  8711. this.on('sourceclose', function (event) {
  8712. this.sourceBuffers.forEach(function (sourceBuffer) {
  8713. if (sourceBuffer.transmuxer_) {
  8714. sourceBuffer.transmuxer_.terminate();
  8715. }
  8716. });
  8717. this.sourceBuffers.length = 0;
  8718. if (!this.player_) {
  8719. return;
  8720. }
  8721. if (this.player_.audioTracks && this.player_.audioTracks()) {
  8722. this.player_.audioTracks().off('change', this.updateActiveSourceBuffers_);
  8723. this.player_.audioTracks().off('addtrack', this.updateActiveSourceBuffers_);
  8724. this.player_.audioTracks().off('removetrack', this.updateActiveSourceBuffers_);
  8725. }
  8726. // We can only change this if the player hasn't been disposed of yet
  8727. // because `off` eventually tries to use the el_ property. If it has
  8728. // been disposed of, then don't worry about it because there are no
  8729. // event handlers left to unbind anyway
  8730. if (this.player_.el_) {
  8731. this.player_.off('mediachange', this.onPlayerMediachange_);
  8732. this.player_.tech_.off('hls-reset', this.onHlsReset_);
  8733. this.player_.tech_.off('hls-segment-time-mapping', this.onHlsSegmentTimeMapping_);
  8734. }
  8735. });
  8736. }
  8737. /**
  8738. * Add a range that that can now be seeked to.
  8739. *
  8740. * @param {Double} start where to start the addition
  8741. * @param {Double} end where to end the addition
  8742. * @private
  8743. */
  8744. _createClass(HtmlMediaSource, [{
  8745. key: 'addSeekableRange_',
  8746. value: function addSeekableRange_(start, end) {
  8747. var error = undefined;
  8748. if (this.duration !== Infinity) {
  8749. error = new Error('MediaSource.addSeekableRange() can only be invoked ' + 'when the duration is Infinity');
  8750. error.name = 'InvalidStateError';
  8751. error.code = 11;
  8752. throw error;
  8753. }
  8754. if (end > this.nativeMediaSource_.duration || isNaN(this.nativeMediaSource_.duration)) {
  8755. this.nativeMediaSource_.duration = end;
  8756. }
  8757. }
  8758. /**
  8759. * Add a source buffer to the media source.
  8760. *
  8761. * @link https://developer.mozilla.org/en-US/docs/Web/API/MediaSource/addSourceBuffer
  8762. * @param {String} type the content-type of the content
  8763. * @return {Object} the created source buffer
  8764. */
  8765. }, {
  8766. key: 'addSourceBuffer',
  8767. value: function addSourceBuffer(type) {
  8768. var buffer = undefined;
  8769. var parsedType = (0, _codecUtils.parseContentType)(type);
  8770. // Create a VirtualSourceBuffer to transmux MPEG-2 transport
  8771. // stream segments into fragmented MP4s
  8772. if (/^(video|audio)\/mp2t$/i.test(parsedType.type)) {
  8773. var codecs = [];
  8774. if (parsedType.parameters && parsedType.parameters.codecs) {
  8775. codecs = parsedType.parameters.codecs.split(',');
  8776. codecs = (0, _codecUtils.translateLegacyCodecs)(codecs);
  8777. codecs = codecs.filter(function (codec) {
  8778. return (0, _codecUtils.isAudioCodec)(codec) || (0, _codecUtils.isVideoCodec)(codec);
  8779. });
  8780. }
  8781. if (codecs.length === 0) {
  8782. codecs = ['avc1.4d400d', 'mp4a.40.2'];
  8783. }
  8784. buffer = new _virtualSourceBuffer2['default'](this, codecs);
  8785. if (this.sourceBuffers.length !== 0) {
  8786. // If another VirtualSourceBuffer already exists, then we are creating a
  8787. // SourceBuffer for an alternate audio track and therefore we know that
  8788. // the source has both an audio and video track.
  8789. // That means we should trigger the manual creation of the real
  8790. // SourceBuffers instead of waiting for the transmuxer to return data
  8791. this.sourceBuffers[0].createRealSourceBuffers_();
  8792. buffer.createRealSourceBuffers_();
  8793. // Automatically disable the audio on the first source buffer if
  8794. // a second source buffer is ever created
  8795. this.sourceBuffers[0].audioDisabled_ = true;
  8796. }
  8797. } else {
  8798. // delegate to the native implementation
  8799. buffer = this.nativeMediaSource_.addSourceBuffer(type);
  8800. }
  8801. this.sourceBuffers.push(buffer);
  8802. return buffer;
  8803. }
  8804. }]);
  8805. return HtmlMediaSource;
  8806. })(_videoJs2['default'].EventTarget);
  8807. exports['default'] = HtmlMediaSource;
  8808. module.exports = exports['default'];
  8809. }).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
  8810. },{"./add-text-track-data":35,"./codec-utils":36,"./virtual-source-buffer":46,"global/document":2,"global/window":3}],43:[function(require,module,exports){
  8811. /**
  8812. * @file remove-cues-from-track.js
  8813. */
  8814. /**
  8815. * Remove cues from a track on video.js.
  8816. *
  8817. * @param {Double} start start of where we should remove the cue
  8818. * @param {Double} end end of where the we should remove the cue
  8819. * @param {Object} track the text track to remove the cues from
  8820. * @private
  8821. */
  8822. "use strict";
  8823. Object.defineProperty(exports, "__esModule", {
  8824. value: true
  8825. });
  8826. var removeCuesFromTrack = function removeCuesFromTrack(start, end, track) {
  8827. var i = undefined;
  8828. var cue = undefined;
  8829. if (!track) {
  8830. return;
  8831. }
  8832. if (!track.cues) {
  8833. return;
  8834. }
  8835. i = track.cues.length;
  8836. while (i--) {
  8837. cue = track.cues[i];
  8838. // Remove any overlapping cue
  8839. if (cue.startTime <= end && cue.endTime >= start) {
  8840. track.removeCue(cue);
  8841. }
  8842. }
  8843. };
  8844. exports["default"] = removeCuesFromTrack;
  8845. module.exports = exports["default"];
  8846. },{}],44:[function(require,module,exports){
  8847. /**
  8848. * @file transmuxer-worker.js
  8849. */
  8850. /**
  8851. * videojs-contrib-media-sources
  8852. *
  8853. * Copyright (c) 2015 Brightcove
  8854. * All rights reserved.
  8855. *
  8856. * Handles communication between the browser-world and the mux.js
  8857. * transmuxer running inside of a WebWorker by exposing a simple
  8858. * message-based interface to a Transmuxer object.
  8859. */
  8860. 'use strict';
  8861. Object.defineProperty(exports, '__esModule', {
  8862. value: true
  8863. });
  8864. var _createClass = (function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ('value' in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; })();
  8865. function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
  8866. function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError('Cannot call a class as a function'); } }
  8867. var _globalWindow = require('global/window');
  8868. var _globalWindow2 = _interopRequireDefault(_globalWindow);
  8869. var _muxJsLibMp4 = require('mux.js/lib/mp4');
  8870. var _muxJsLibMp42 = _interopRequireDefault(_muxJsLibMp4);
  8871. /**
  8872. * Re-emits transmuxer events by converting them into messages to the
  8873. * world outside the worker.
  8874. *
  8875. * @param {Object} transmuxer the transmuxer to wire events on
  8876. * @private
  8877. */
  8878. var wireTransmuxerEvents = function wireTransmuxerEvents(transmuxer) {
  8879. transmuxer.on('data', function (segment) {
  8880. // transfer ownership of the underlying ArrayBuffer
  8881. // instead of doing a copy to save memory
  8882. // ArrayBuffers are transferable but generic TypedArrays are not
  8883. // @link https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API/Using_web_workers#Passing_data_by_transferring_ownership_(transferable_objects)
  8884. var initArray = segment.initSegment;
  8885. segment.initSegment = {
  8886. data: initArray.buffer,
  8887. byteOffset: initArray.byteOffset,
  8888. byteLength: initArray.byteLength
  8889. };
  8890. var typedArray = segment.data;
  8891. segment.data = typedArray.buffer;
  8892. _globalWindow2['default'].postMessage({
  8893. action: 'data',
  8894. segment: segment,
  8895. byteOffset: typedArray.byteOffset,
  8896. byteLength: typedArray.byteLength
  8897. }, [segment.data]);
  8898. });
  8899. if (transmuxer.captionStream) {
  8900. transmuxer.captionStream.on('data', function (caption) {
  8901. _globalWindow2['default'].postMessage({
  8902. action: 'caption',
  8903. data: caption
  8904. });
  8905. });
  8906. }
  8907. transmuxer.on('done', function (data) {
  8908. _globalWindow2['default'].postMessage({ action: 'done' });
  8909. });
  8910. transmuxer.on('gopInfo', function (gopInfo) {
  8911. _globalWindow2['default'].postMessage({
  8912. action: 'gopInfo',
  8913. gopInfo: gopInfo
  8914. });
  8915. });
  8916. };
  8917. /**
  8918. * All incoming messages route through this hash. If no function exists
  8919. * to handle an incoming message, then we ignore the message.
  8920. *
  8921. * @class MessageHandlers
  8922. * @param {Object} options the options to initialize with
  8923. */
  8924. var MessageHandlers = (function () {
  8925. function MessageHandlers(options) {
  8926. _classCallCheck(this, MessageHandlers);
  8927. this.options = options || {};
  8928. this.init();
  8929. }
  8930. /**
  8931. * Our web wroker interface so that things can talk to mux.js
  8932. * that will be running in a web worker. the scope is passed to this by
  8933. * webworkify.
  8934. *
  8935. * @param {Object} self the scope for the web worker
  8936. */
  8937. /**
  8938. * initialize our web worker and wire all the events.
  8939. */
  8940. _createClass(MessageHandlers, [{
  8941. key: 'init',
  8942. value: function init() {
  8943. if (this.transmuxer) {
  8944. this.transmuxer.dispose();
  8945. }
  8946. this.transmuxer = new _muxJsLibMp42['default'].Transmuxer(this.options);
  8947. wireTransmuxerEvents(this.transmuxer);
  8948. }
  8949. /**
  8950. * Adds data (a ts segment) to the start of the transmuxer pipeline for
  8951. * processing.
  8952. *
  8953. * @param {ArrayBuffer} data data to push into the muxer
  8954. */
  8955. }, {
  8956. key: 'push',
  8957. value: function push(data) {
  8958. // Cast array buffer to correct type for transmuxer
  8959. var segment = new Uint8Array(data.data, data.byteOffset, data.byteLength);
  8960. this.transmuxer.push(segment);
  8961. }
  8962. /**
  8963. * Recreate the transmuxer so that the next segment added via `push`
  8964. * start with a fresh transmuxer.
  8965. */
  8966. }, {
  8967. key: 'reset',
  8968. value: function reset() {
  8969. this.init();
  8970. }
  8971. /**
  8972. * Set the value that will be used as the `baseMediaDecodeTime` time for the
  8973. * next segment pushed in. Subsequent segments will have their `baseMediaDecodeTime`
  8974. * set relative to the first based on the PTS values.
  8975. *
  8976. * @param {Object} data used to set the timestamp offset in the muxer
  8977. */
  8978. }, {
  8979. key: 'setTimestampOffset',
  8980. value: function setTimestampOffset(data) {
  8981. var timestampOffset = data.timestampOffset || 0;
  8982. this.transmuxer.setBaseMediaDecodeTime(Math.round(timestampOffset * 90000));
  8983. }
  8984. }, {
  8985. key: 'setAudioAppendStart',
  8986. value: function setAudioAppendStart(data) {
  8987. this.transmuxer.setAudioAppendStart(Math.ceil(data.appendStart * 90000));
  8988. }
  8989. /**
  8990. * Forces the pipeline to finish processing the last segment and emit it's
  8991. * results.
  8992. *
  8993. * @param {Object} data event data, not really used
  8994. */
  8995. }, {
  8996. key: 'flush',
  8997. value: function flush(data) {
  8998. this.transmuxer.flush();
  8999. }
  9000. }, {
  9001. key: 'resetCaptions',
  9002. value: function resetCaptions() {
  9003. this.transmuxer.resetCaptions();
  9004. }
  9005. }, {
  9006. key: 'alignGopsWith',
  9007. value: function alignGopsWith(data) {
  9008. this.transmuxer.alignGopsWith(data.gopsToAlignWith.slice());
  9009. }
  9010. }]);
  9011. return MessageHandlers;
  9012. })();
  9013. var TransmuxerWorker = function TransmuxerWorker(self) {
  9014. self.onmessage = function (event) {
  9015. if (event.data.action === 'init' && event.data.options) {
  9016. this.messageHandlers = new MessageHandlers(event.data.options);
  9017. return;
  9018. }
  9019. if (!this.messageHandlers) {
  9020. this.messageHandlers = new MessageHandlers();
  9021. }
  9022. if (event.data && event.data.action && event.data.action !== 'init') {
  9023. if (this.messageHandlers[event.data.action]) {
  9024. this.messageHandlers[event.data.action](event.data);
  9025. }
  9026. }
  9027. };
  9028. };
  9029. exports['default'] = function (self) {
  9030. return new TransmuxerWorker(self);
  9031. };
  9032. module.exports = exports['default'];
  9033. },{"global/window":3,"mux.js/lib/mp4":24}],45:[function(require,module,exports){
  9034. (function (global){
  9035. /**
  9036. * @file videojs-contrib-media-sources.js
  9037. */
  9038. 'use strict';
  9039. Object.defineProperty(exports, '__esModule', {
  9040. value: true
  9041. });
  9042. function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
  9043. var _globalWindow = require('global/window');
  9044. var _globalWindow2 = _interopRequireDefault(_globalWindow);
  9045. var _flashMediaSource = require('./flash-media-source');
  9046. var _flashMediaSource2 = _interopRequireDefault(_flashMediaSource);
  9047. var _htmlMediaSource = require('./html-media-source');
  9048. var _htmlMediaSource2 = _interopRequireDefault(_htmlMediaSource);
  9049. var _videoJs = (typeof window !== "undefined" ? window['videojs'] : typeof global !== "undefined" ? global['videojs'] : null);
  9050. var _videoJs2 = _interopRequireDefault(_videoJs);
  9051. var urlCount = 0;
  9052. // ------------
  9053. // Media Source
  9054. // ------------
  9055. var defaults = {
  9056. // how to determine the MediaSource implementation to use. There
  9057. // are three available modes:
  9058. // - auto: use native MediaSources where available and Flash
  9059. // everywhere else
  9060. // - html5: always use native MediaSources
  9061. // - flash: always use the Flash MediaSource polyfill
  9062. mode: 'auto'
  9063. };
  9064. // store references to the media sources so they can be connected
  9065. // to a video element (a swf object)
  9066. // TODO: can we store this somewhere local to this module?
  9067. _videoJs2['default'].mediaSources = {};
  9068. /**
  9069. * Provide a method for a swf object to notify JS that a
  9070. * media source is now open.
  9071. *
  9072. * @param {String} msObjectURL string referencing the MSE Object URL
  9073. * @param {String} swfId the swf id
  9074. */
  9075. var open = function open(msObjectURL, swfId) {
  9076. var mediaSource = _videoJs2['default'].mediaSources[msObjectURL];
  9077. if (mediaSource) {
  9078. mediaSource.trigger({ type: 'sourceopen', swfId: swfId });
  9079. } else {
  9080. throw new Error('Media Source not found (Video.js)');
  9081. }
  9082. };
  9083. /**
  9084. * Check to see if the native MediaSource object exists and supports
  9085. * an MP4 container with both H.264 video and AAC-LC audio.
  9086. *
  9087. * @return {Boolean} if native media sources are supported
  9088. */
  9089. var supportsNativeMediaSources = function supportsNativeMediaSources() {
  9090. return !!_globalWindow2['default'].MediaSource && !!_globalWindow2['default'].MediaSource.isTypeSupported && _globalWindow2['default'].MediaSource.isTypeSupported('video/mp4;codecs="avc1.4d400d,mp4a.40.2"');
  9091. };
  9092. /**
  9093. * An emulation of the MediaSource API so that we can support
  9094. * native and non-native functionality such as flash and
  9095. * video/mp2t videos. returns an instance of HtmlMediaSource or
  9096. * FlashMediaSource depending on what is supported and what options
  9097. * are passed in.
  9098. *
  9099. * @link https://developer.mozilla.org/en-US/docs/Web/API/MediaSource/MediaSource
  9100. * @param {Object} options options to use during setup.
  9101. */
  9102. var MediaSource = function MediaSource(options) {
  9103. var settings = _videoJs2['default'].mergeOptions(defaults, options);
  9104. this.MediaSource = {
  9105. open: open,
  9106. supportsNativeMediaSources: supportsNativeMediaSources
  9107. };
  9108. // determine whether HTML MediaSources should be used
  9109. if (settings.mode === 'html5' || settings.mode === 'auto' && supportsNativeMediaSources()) {
  9110. return new _htmlMediaSource2['default']();
  9111. } else if (_videoJs2['default'].getTech('Flash')) {
  9112. return new _flashMediaSource2['default']();
  9113. }
  9114. throw new Error('Cannot use Flash or Html5 to create a MediaSource for this video');
  9115. };
  9116. exports.MediaSource = MediaSource;
  9117. MediaSource.open = open;
  9118. MediaSource.supportsNativeMediaSources = supportsNativeMediaSources;
  9119. /**
  9120. * A wrapper around the native URL for our MSE object
  9121. * implementation, this object is exposed under videojs.URL
  9122. *
  9123. * @link https://developer.mozilla.org/en-US/docs/Web/API/URL/URL
  9124. */
  9125. var URL = {
  9126. /**
  9127. * A wrapper around the native createObjectURL for our objects.
  9128. * This function maps a native or emulated mediaSource to a blob
  9129. * url so that it can be loaded into video.js
  9130. *
  9131. * @link https://developer.mozilla.org/en-US/docs/Web/API/URL/createObjectURL
  9132. * @param {MediaSource} object the object to create a blob url to
  9133. */
  9134. createObjectURL: function createObjectURL(object) {
  9135. var objectUrlPrefix = 'blob:vjs-media-source/';
  9136. var url = undefined;
  9137. // use the native MediaSource to generate an object URL
  9138. if (object instanceof _htmlMediaSource2['default']) {
  9139. url = _globalWindow2['default'].URL.createObjectURL(object.nativeMediaSource_);
  9140. object.url_ = url;
  9141. return url;
  9142. }
  9143. // if the object isn't an emulated MediaSource, delegate to the
  9144. // native implementation
  9145. if (!(object instanceof _flashMediaSource2['default'])) {
  9146. url = _globalWindow2['default'].URL.createObjectURL(object);
  9147. object.url_ = url;
  9148. return url;
  9149. }
  9150. // build a URL that can be used to map back to the emulated
  9151. // MediaSource
  9152. url = objectUrlPrefix + urlCount;
  9153. urlCount++;
  9154. // setup the mapping back to object
  9155. _videoJs2['default'].mediaSources[url] = object;
  9156. return url;
  9157. }
  9158. };
  9159. exports.URL = URL;
  9160. _videoJs2['default'].MediaSource = MediaSource;
  9161. _videoJs2['default'].URL = URL;
  9162. }).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
  9163. },{"./flash-media-source":39,"./html-media-source":42,"global/window":3}],46:[function(require,module,exports){
  9164. (function (global){
  9165. /**
  9166. * @file virtual-source-buffer.js
  9167. */
  9168. 'use strict';
  9169. Object.defineProperty(exports, '__esModule', {
  9170. value: true
  9171. });
  9172. var _createClass = (function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ('value' in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; })();
  9173. var _get = function get(_x, _x2, _x3) { var _again = true; _function: while (_again) { var object = _x, property = _x2, receiver = _x3; _again = false; if (object === null) object = Function.prototype; var desc = Object.getOwnPropertyDescriptor(object, property); if (desc === undefined) { var parent = Object.getPrototypeOf(object); if (parent === null) { return undefined; } else { _x = parent; _x2 = property; _x3 = receiver; _again = true; desc = parent = undefined; continue _function; } } else if ('value' in desc) { return desc.value; } else { var getter = desc.get; if (getter === undefined) { return undefined; } return getter.call(receiver); } } };
  9174. function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
  9175. function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError('Cannot call a class as a function'); } }
  9176. function _inherits(subClass, superClass) { if (typeof superClass !== 'function' && superClass !== null) { throw new TypeError('Super expression must either be null or a function, not ' + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; }
  9177. var _videoJs = (typeof window !== "undefined" ? window['videojs'] : typeof global !== "undefined" ? global['videojs'] : null);
  9178. var _videoJs2 = _interopRequireDefault(_videoJs);
  9179. var _createTextTracksIfNecessary = require('./create-text-tracks-if-necessary');
  9180. var _createTextTracksIfNecessary2 = _interopRequireDefault(_createTextTracksIfNecessary);
  9181. var _removeCuesFromTrack = require('./remove-cues-from-track');
  9182. var _removeCuesFromTrack2 = _interopRequireDefault(_removeCuesFromTrack);
  9183. var _addTextTrackData = require('./add-text-track-data');
  9184. var _webwackify = require('webwackify');
  9185. var _webwackify2 = _interopRequireDefault(_webwackify);
  9186. var _transmuxerWorker = require('./transmuxer-worker');
  9187. var _transmuxerWorker2 = _interopRequireDefault(_transmuxerWorker);
  9188. var _codecUtils = require('./codec-utils');
  9189. var resolveTransmuxWorker = function resolveTransmuxWorker() {
  9190. var result = undefined;
  9191. try {
  9192. result = require.resolve('./transmuxer-worker');
  9193. } catch (e) {
  9194. // no result
  9195. }
  9196. return result;
  9197. };
  9198. // We create a wrapper around the SourceBuffer so that we can manage the
  9199. // state of the `updating` property manually. We have to do this because
  9200. // Firefox changes `updating` to false long before triggering `updateend`
  9201. // events and that was causing strange problems in videojs-contrib-hls
  9202. var makeWrappedSourceBuffer = function makeWrappedSourceBuffer(mediaSource, mimeType) {
  9203. var sourceBuffer = mediaSource.addSourceBuffer(mimeType);
  9204. var wrapper = Object.create(null);
  9205. wrapper.updating = false;
  9206. wrapper.realBuffer_ = sourceBuffer;
  9207. var _loop = function (key) {
  9208. if (typeof sourceBuffer[key] === 'function') {
  9209. wrapper[key] = function () {
  9210. return sourceBuffer[key].apply(sourceBuffer, arguments);
  9211. };
  9212. } else if (typeof wrapper[key] === 'undefined') {
  9213. Object.defineProperty(wrapper, key, {
  9214. get: function get() {
  9215. return sourceBuffer[key];
  9216. },
  9217. set: function set(v) {
  9218. return sourceBuffer[key] = v;
  9219. }
  9220. });
  9221. }
  9222. };
  9223. for (var key in sourceBuffer) {
  9224. _loop(key);
  9225. }
  9226. return wrapper;
  9227. };
  9228. /**
  9229. * Returns a list of gops in the buffer that have a pts value of 3 seconds or more in
  9230. * front of current time.
  9231. *
  9232. * @param {Array} buffer
  9233. * The current buffer of gop information
  9234. * @param {Player} player
  9235. * The player instance
  9236. * @param {Double} mapping
  9237. * Offset to map display time to stream presentation time
  9238. * @return {Array}
  9239. * List of gops considered safe to append over
  9240. */
  9241. var gopsSafeToAlignWith = function gopsSafeToAlignWith(buffer, player, mapping) {
  9242. if (!player || !buffer.length) {
  9243. return [];
  9244. }
  9245. // pts value for current time + 3 seconds to give a bit more wiggle room
  9246. var currentTimePts = Math.ceil((player.currentTime() - mapping + 3) * 90000);
  9247. var i = undefined;
  9248. for (i = 0; i < buffer.length; i++) {
  9249. if (buffer[i].pts > currentTimePts) {
  9250. break;
  9251. }
  9252. }
  9253. return buffer.slice(i);
  9254. };
  9255. exports.gopsSafeToAlignWith = gopsSafeToAlignWith;
  9256. /**
  9257. * Appends gop information (timing and byteLength) received by the transmuxer for the
  9258. * gops appended in the last call to appendBuffer
  9259. *
  9260. * @param {Array} buffer
  9261. * The current buffer of gop information
  9262. * @param {Array} gops
  9263. * List of new gop information
  9264. * @param {boolean} replace
  9265. * If true, replace the buffer with the new gop information. If false, append the
  9266. * new gop information to the buffer in the right location of time.
  9267. * @return {Array}
  9268. * Updated list of gop information
  9269. */
  9270. var updateGopBuffer = function updateGopBuffer(buffer, gops, replace) {
  9271. if (!gops.length) {
  9272. return buffer;
  9273. }
  9274. if (replace) {
  9275. // If we are in safe append mode, then completely overwrite the gop buffer
  9276. // with the most recent appeneded data. This will make sure that when appending
  9277. // future segments, we only try to align with gops that are both ahead of current
  9278. // time and in the last segment appended.
  9279. return gops.slice();
  9280. }
  9281. var start = gops[0].pts;
  9282. var i = 0;
  9283. for (i; i < buffer.length; i++) {
  9284. if (buffer[i].pts >= start) {
  9285. break;
  9286. }
  9287. }
  9288. return buffer.slice(0, i).concat(gops);
  9289. };
  9290. exports.updateGopBuffer = updateGopBuffer;
  9291. /**
  9292. * Removes gop information in buffer that overlaps with provided start and end
  9293. *
  9294. * @param {Array} buffer
  9295. * The current buffer of gop information
  9296. * @param {Double} start
  9297. * position to start the remove at
  9298. * @param {Double} end
  9299. * position to end the remove at
  9300. * @param {Double} mapping
  9301. * Offset to map display time to stream presentation time
  9302. */
  9303. var removeGopBuffer = function removeGopBuffer(buffer, start, end, mapping) {
  9304. var startPts = Math.ceil((start - mapping) * 90000);
  9305. var endPts = Math.ceil((end - mapping) * 90000);
  9306. var updatedBuffer = buffer.slice();
  9307. var i = buffer.length;
  9308. while (i--) {
  9309. if (buffer[i].pts <= endPts) {
  9310. break;
  9311. }
  9312. }
  9313. if (i === -1) {
  9314. // no removal because end of remove range is before start of buffer
  9315. return updatedBuffer;
  9316. }
  9317. var j = i + 1;
  9318. while (j--) {
  9319. if (buffer[j].pts <= startPts) {
  9320. break;
  9321. }
  9322. }
  9323. // clamp remove range start to 0 index
  9324. j = Math.max(j, 0);
  9325. updatedBuffer.splice(j, i - j + 1);
  9326. return updatedBuffer;
  9327. };
  9328. exports.removeGopBuffer = removeGopBuffer;
  9329. /**
  9330. * VirtualSourceBuffers exist so that we can transmux non native formats
  9331. * into a native format, but keep the same api as a native source buffer.
  9332. * It creates a transmuxer, that works in its own thread (a web worker) and
  9333. * that transmuxer muxes the data into a native format. VirtualSourceBuffer will
  9334. * then send all of that data to the naive sourcebuffer so that it is
  9335. * indestinguishable from a natively supported format.
  9336. *
  9337. * @param {HtmlMediaSource} mediaSource the parent mediaSource
  9338. * @param {Array} codecs array of codecs that we will be dealing with
  9339. * @class VirtualSourceBuffer
  9340. * @extends video.js.EventTarget
  9341. */
  9342. var VirtualSourceBuffer = (function (_videojs$EventTarget) {
  9343. _inherits(VirtualSourceBuffer, _videojs$EventTarget);
  9344. function VirtualSourceBuffer(mediaSource, codecs) {
  9345. var _this = this;
  9346. _classCallCheck(this, VirtualSourceBuffer);
  9347. _get(Object.getPrototypeOf(VirtualSourceBuffer.prototype), 'constructor', this).call(this, _videoJs2['default'].EventTarget);
  9348. this.timestampOffset_ = 0;
  9349. this.pendingBuffers_ = [];
  9350. this.bufferUpdating_ = false;
  9351. this.mediaSource_ = mediaSource;
  9352. this.codecs_ = codecs;
  9353. this.audioCodec_ = null;
  9354. this.videoCodec_ = null;
  9355. this.audioDisabled_ = false;
  9356. this.appendAudioInitSegment_ = true;
  9357. this.gopBuffer_ = [];
  9358. this.timeMapping_ = 0;
  9359. this.safeAppend_ = _videoJs2['default'].browser.IE_VERSION >= 11;
  9360. var options = {
  9361. remux: false,
  9362. alignGopsAtEnd: this.safeAppend_
  9363. };
  9364. this.codecs_.forEach(function (codec) {
  9365. if ((0, _codecUtils.isAudioCodec)(codec)) {
  9366. _this.audioCodec_ = codec;
  9367. } else if ((0, _codecUtils.isVideoCodec)(codec)) {
  9368. _this.videoCodec_ = codec;
  9369. }
  9370. });
  9371. // append muxed segments to their respective native buffers as
  9372. // soon as they are available
  9373. this.transmuxer_ = (0, _webwackify2['default'])(_transmuxerWorker2['default'], resolveTransmuxWorker());
  9374. this.transmuxer_.postMessage({ action: 'init', options: options });
  9375. this.transmuxer_.onmessage = function (event) {
  9376. if (event.data.action === 'data') {
  9377. return _this.data_(event);
  9378. }
  9379. if (event.data.action === 'done') {
  9380. return _this.done_(event);
  9381. }
  9382. if (event.data.action === 'gopInfo') {
  9383. return _this.appendGopInfo_(event);
  9384. }
  9385. };
  9386. // this timestampOffset is a property with the side-effect of resetting
  9387. // baseMediaDecodeTime in the transmuxer on the setter
  9388. Object.defineProperty(this, 'timestampOffset', {
  9389. get: function get() {
  9390. return this.timestampOffset_;
  9391. },
  9392. set: function set(val) {
  9393. if (typeof val === 'number' && val >= 0) {
  9394. this.timestampOffset_ = val;
  9395. this.appendAudioInitSegment_ = true;
  9396. // reset gop buffer on timestampoffset as this signals a change in timeline
  9397. this.gopBuffer_.length = 0;
  9398. this.timeMapping_ = 0;
  9399. // We have to tell the transmuxer to set the baseMediaDecodeTime to
  9400. // the desired timestampOffset for the next segment
  9401. this.transmuxer_.postMessage({
  9402. action: 'setTimestampOffset',
  9403. timestampOffset: val
  9404. });
  9405. }
  9406. }
  9407. });
  9408. // setting the append window affects both source buffers
  9409. Object.defineProperty(this, 'appendWindowStart', {
  9410. get: function get() {
  9411. return (this.videoBuffer_ || this.audioBuffer_).appendWindowStart;
  9412. },
  9413. set: function set(start) {
  9414. if (this.videoBuffer_) {
  9415. this.videoBuffer_.appendWindowStart = start;
  9416. }
  9417. if (this.audioBuffer_) {
  9418. this.audioBuffer_.appendWindowStart = start;
  9419. }
  9420. }
  9421. });
  9422. // this buffer is "updating" if either of its native buffers are
  9423. Object.defineProperty(this, 'updating', {
  9424. get: function get() {
  9425. return !!(this.bufferUpdating_ || !this.audioDisabled_ && this.audioBuffer_ && this.audioBuffer_.updating || this.videoBuffer_ && this.videoBuffer_.updating);
  9426. }
  9427. });
  9428. // the buffered property is the intersection of the buffered
  9429. // ranges of the native source buffers
  9430. Object.defineProperty(this, 'buffered', {
  9431. get: function get() {
  9432. var start = null;
  9433. var end = null;
  9434. var arity = 0;
  9435. var extents = [];
  9436. var ranges = [];
  9437. // neither buffer has been created yet
  9438. if (!this.videoBuffer_ && !this.audioBuffer_) {
  9439. return _videoJs2['default'].createTimeRange();
  9440. }
  9441. // only one buffer is configured
  9442. if (!this.videoBuffer_) {
  9443. return this.audioBuffer_.buffered;
  9444. }
  9445. if (!this.audioBuffer_) {
  9446. return this.videoBuffer_.buffered;
  9447. }
  9448. // both buffers are configured
  9449. if (this.audioDisabled_) {
  9450. return this.videoBuffer_.buffered;
  9451. }
  9452. // both buffers are empty
  9453. if (this.videoBuffer_.buffered.length === 0 && this.audioBuffer_.buffered.length === 0) {
  9454. return _videoJs2['default'].createTimeRange();
  9455. }
  9456. // Handle the case where we have both buffers and create an
  9457. // intersection of the two
  9458. var videoBuffered = this.videoBuffer_.buffered;
  9459. var audioBuffered = this.audioBuffer_.buffered;
  9460. var count = videoBuffered.length;
  9461. // A) Gather up all start and end times
  9462. while (count--) {
  9463. extents.push({ time: videoBuffered.start(count), type: 'start' });
  9464. extents.push({ time: videoBuffered.end(count), type: 'end' });
  9465. }
  9466. count = audioBuffered.length;
  9467. while (count--) {
  9468. extents.push({ time: audioBuffered.start(count), type: 'start' });
  9469. extents.push({ time: audioBuffered.end(count), type: 'end' });
  9470. }
  9471. // B) Sort them by time
  9472. extents.sort(function (a, b) {
  9473. return a.time - b.time;
  9474. });
  9475. // C) Go along one by one incrementing arity for start and decrementing
  9476. // arity for ends
  9477. for (count = 0; count < extents.length; count++) {
  9478. if (extents[count].type === 'start') {
  9479. arity++;
  9480. // D) If arity is ever incremented to 2 we are entering an
  9481. // overlapping range
  9482. if (arity === 2) {
  9483. start = extents[count].time;
  9484. }
  9485. } else if (extents[count].type === 'end') {
  9486. arity--;
  9487. // E) If arity is ever decremented to 1 we leaving an
  9488. // overlapping range
  9489. if (arity === 1) {
  9490. end = extents[count].time;
  9491. }
  9492. }
  9493. // F) Record overlapping ranges
  9494. if (start !== null && end !== null) {
  9495. ranges.push([start, end]);
  9496. start = null;
  9497. end = null;
  9498. }
  9499. }
  9500. return _videoJs2['default'].createTimeRanges(ranges);
  9501. }
  9502. });
  9503. }
  9504. /**
  9505. * When we get a data event from the transmuxer
  9506. * we call this function and handle the data that
  9507. * was sent to us
  9508. *
  9509. * @private
  9510. * @param {Event} event the data event from the transmuxer
  9511. */
  9512. _createClass(VirtualSourceBuffer, [{
  9513. key: 'data_',
  9514. value: function data_(event) {
  9515. var segment = event.data.segment;
  9516. // Cast ArrayBuffer to TypedArray
  9517. segment.data = new Uint8Array(segment.data, event.data.byteOffset, event.data.byteLength);
  9518. segment.initSegment = new Uint8Array(segment.initSegment.data, segment.initSegment.byteOffset, segment.initSegment.byteLength);
  9519. (0, _createTextTracksIfNecessary2['default'])(this, this.mediaSource_, segment);
  9520. // Add the segments to the pendingBuffers array
  9521. this.pendingBuffers_.push(segment);
  9522. return;
  9523. }
  9524. /**
  9525. * When we get a done event from the transmuxer
  9526. * we call this function and we process all
  9527. * of the pending data that we have been saving in the
  9528. * data_ function
  9529. *
  9530. * @private
  9531. * @param {Event} event the done event from the transmuxer
  9532. */
  9533. }, {
  9534. key: 'done_',
  9535. value: function done_(event) {
  9536. // Don't process and append data if the mediaSource is closed
  9537. if (this.mediaSource_.readyState === 'closed') {
  9538. this.pendingBuffers_.length = 0;
  9539. return;
  9540. }
  9541. // All buffers should have been flushed from the muxer
  9542. // start processing anything we have received
  9543. this.processPendingSegments_();
  9544. return;
  9545. }
  9546. /**
  9547. * Create our internal native audio/video source buffers and add
  9548. * event handlers to them with the following conditions:
  9549. * 1. they do not already exist on the mediaSource
  9550. * 2. this VSB has a codec for them
  9551. *
  9552. * @private
  9553. */
  9554. }, {
  9555. key: 'createRealSourceBuffers_',
  9556. value: function createRealSourceBuffers_() {
  9557. var _this2 = this;
  9558. var types = ['audio', 'video'];
  9559. types.forEach(function (type) {
  9560. // Don't create a SourceBuffer of this type if we don't have a
  9561. // codec for it
  9562. if (!_this2[type + 'Codec_']) {
  9563. return;
  9564. }
  9565. // Do nothing if a SourceBuffer of this type already exists
  9566. if (_this2[type + 'Buffer_']) {
  9567. return;
  9568. }
  9569. var buffer = null;
  9570. // If the mediasource already has a SourceBuffer for the codec
  9571. // use that
  9572. if (_this2.mediaSource_[type + 'Buffer_']) {
  9573. buffer = _this2.mediaSource_[type + 'Buffer_'];
  9574. // In multiple audio track cases, the audio source buffer is disabled
  9575. // on the main VirtualSourceBuffer by the HTMLMediaSource much earlier
  9576. // than createRealSourceBuffers_ is called to create the second
  9577. // VirtualSourceBuffer because that happens as a side-effect of
  9578. // videojs-contrib-hls starting the audioSegmentLoader. As a result,
  9579. // the audioBuffer is essentially "ownerless" and no one will toggle
  9580. // the `updating` state back to false once the `updateend` event is received
  9581. //
  9582. // Setting `updating` to false manually will work around this
  9583. // situation and allow work to continue
  9584. buffer.updating = false;
  9585. } else {
  9586. var codecProperty = type + 'Codec_';
  9587. var mimeType = type + '/mp4;codecs="' + _this2[codecProperty] + '"';
  9588. buffer = makeWrappedSourceBuffer(_this2.mediaSource_.nativeMediaSource_, mimeType);
  9589. _this2.mediaSource_[type + 'Buffer_'] = buffer;
  9590. }
  9591. _this2[type + 'Buffer_'] = buffer;
  9592. // Wire up the events to the SourceBuffer
  9593. ['update', 'updatestart', 'updateend'].forEach(function (event) {
  9594. buffer.addEventListener(event, function () {
  9595. // if audio is disabled
  9596. if (type === 'audio' && _this2.audioDisabled_) {
  9597. return;
  9598. }
  9599. if (event === 'updateend') {
  9600. _this2[type + 'Buffer_'].updating = false;
  9601. }
  9602. var shouldTrigger = types.every(function (t) {
  9603. // skip checking audio's updating status if audio
  9604. // is not enabled
  9605. if (t === 'audio' && _this2.audioDisabled_) {
  9606. return true;
  9607. }
  9608. // if the other type if updating we don't trigger
  9609. if (type !== t && _this2[t + 'Buffer_'] && _this2[t + 'Buffer_'].updating) {
  9610. return false;
  9611. }
  9612. return true;
  9613. });
  9614. if (shouldTrigger) {
  9615. return _this2.trigger(event);
  9616. }
  9617. });
  9618. });
  9619. });
  9620. }
  9621. /**
  9622. * Emulate the native mediasource function, but our function will
  9623. * send all of the proposed segments to the transmuxer so that we
  9624. * can transmux them before we append them to our internal
  9625. * native source buffers in the correct format.
  9626. *
  9627. * @link https://developer.mozilla.org/en-US/docs/Web/API/SourceBuffer/appendBuffer
  9628. * @param {Uint8Array} segment the segment to append to the buffer
  9629. */
  9630. }, {
  9631. key: 'appendBuffer',
  9632. value: function appendBuffer(segment) {
  9633. // Start the internal "updating" state
  9634. this.bufferUpdating_ = true;
  9635. if (this.audioBuffer_ && this.audioBuffer_.buffered.length) {
  9636. var audioBuffered = this.audioBuffer_.buffered;
  9637. this.transmuxer_.postMessage({
  9638. action: 'setAudioAppendStart',
  9639. appendStart: audioBuffered.end(audioBuffered.length - 1)
  9640. });
  9641. }
  9642. if (this.videoBuffer_) {
  9643. this.transmuxer_.postMessage({
  9644. action: 'alignGopsWith',
  9645. gopsToAlignWith: gopsSafeToAlignWith(this.gopBuffer_, this.mediaSource_.player_, this.timeMapping_)
  9646. });
  9647. }
  9648. this.transmuxer_.postMessage({
  9649. action: 'push',
  9650. // Send the typed-array of data as an ArrayBuffer so that
  9651. // it can be sent as a "Transferable" and avoid the costly
  9652. // memory copy
  9653. data: segment.buffer,
  9654. // To recreate the original typed-array, we need information
  9655. // about what portion of the ArrayBuffer it was a view into
  9656. byteOffset: segment.byteOffset,
  9657. byteLength: segment.byteLength
  9658. }, [segment.buffer]);
  9659. this.transmuxer_.postMessage({ action: 'flush' });
  9660. }
  9661. /**
  9662. * Appends gop information (timing and byteLength) received by the transmuxer for the
  9663. * gops appended in the last call to appendBuffer
  9664. *
  9665. * @param {Event} event
  9666. * The gopInfo event from the transmuxer
  9667. * @param {Array} event.data.gopInfo
  9668. * List of gop info to append
  9669. */
  9670. }, {
  9671. key: 'appendGopInfo_',
  9672. value: function appendGopInfo_(event) {
  9673. this.gopBuffer_ = updateGopBuffer(this.gopBuffer_, event.data.gopInfo, this.safeAppend_);
  9674. }
  9675. /**
  9676. * Emulate the native mediasource function and remove parts
  9677. * of the buffer from any of our internal buffers that exist
  9678. *
  9679. * @link https://developer.mozilla.org/en-US/docs/Web/API/SourceBuffer/remove
  9680. * @param {Double} start position to start the remove at
  9681. * @param {Double} end position to end the remove at
  9682. */
  9683. }, {
  9684. key: 'remove',
  9685. value: function remove(start, end) {
  9686. if (this.videoBuffer_) {
  9687. this.videoBuffer_.updating = true;
  9688. this.videoBuffer_.remove(start, end);
  9689. this.gopBuffer_ = removeGopBuffer(this.gopBuffer_, start, end, this.timeMapping_);
  9690. }
  9691. if (!this.audioDisabled_ && this.audioBuffer_) {
  9692. this.audioBuffer_.updating = true;
  9693. this.audioBuffer_.remove(start, end);
  9694. }
  9695. // Remove Metadata Cues (id3)
  9696. (0, _removeCuesFromTrack2['default'])(start, end, this.metadataTrack_);
  9697. // Remove Any Captions
  9698. if (this.inbandTextTracks_) {
  9699. for (var track in this.inbandTextTracks_) {
  9700. (0, _removeCuesFromTrack2['default'])(start, end, this.inbandTextTracks_[track]);
  9701. }
  9702. }
  9703. }
  9704. /**
  9705. * Process any segments that the muxer has output
  9706. * Concatenate segments together based on type and append them into
  9707. * their respective sourceBuffers
  9708. *
  9709. * @private
  9710. */
  9711. }, {
  9712. key: 'processPendingSegments_',
  9713. value: function processPendingSegments_() {
  9714. var sortedSegments = {
  9715. video: {
  9716. segments: [],
  9717. bytes: 0
  9718. },
  9719. audio: {
  9720. segments: [],
  9721. bytes: 0
  9722. },
  9723. captions: [],
  9724. metadata: []
  9725. };
  9726. // Sort segments into separate video/audio arrays and
  9727. // keep track of their total byte lengths
  9728. sortedSegments = this.pendingBuffers_.reduce(function (segmentObj, segment) {
  9729. var type = segment.type;
  9730. var data = segment.data;
  9731. var initSegment = segment.initSegment;
  9732. segmentObj[type].segments.push(data);
  9733. segmentObj[type].bytes += data.byteLength;
  9734. segmentObj[type].initSegment = initSegment;
  9735. // Gather any captions into a single array
  9736. if (segment.captions) {
  9737. segmentObj.captions = segmentObj.captions.concat(segment.captions);
  9738. }
  9739. if (segment.info) {
  9740. segmentObj[type].info = segment.info;
  9741. }
  9742. // Gather any metadata into a single array
  9743. if (segment.metadata) {
  9744. segmentObj.metadata = segmentObj.metadata.concat(segment.metadata);
  9745. }
  9746. return segmentObj;
  9747. }, sortedSegments);
  9748. // Create the real source buffers if they don't exist by now since we
  9749. // finally are sure what tracks are contained in the source
  9750. if (!this.videoBuffer_ && !this.audioBuffer_) {
  9751. // Remove any codecs that may have been specified by default but
  9752. // are no longer applicable now
  9753. if (sortedSegments.video.bytes === 0) {
  9754. this.videoCodec_ = null;
  9755. }
  9756. if (sortedSegments.audio.bytes === 0) {
  9757. this.audioCodec_ = null;
  9758. }
  9759. this.createRealSourceBuffers_();
  9760. }
  9761. if (sortedSegments.audio.info) {
  9762. this.mediaSource_.trigger({ type: 'audioinfo', info: sortedSegments.audio.info });
  9763. }
  9764. if (sortedSegments.video.info) {
  9765. this.mediaSource_.trigger({ type: 'videoinfo', info: sortedSegments.video.info });
  9766. }
  9767. if (this.appendAudioInitSegment_) {
  9768. if (!this.audioDisabled_ && this.audioBuffer_) {
  9769. sortedSegments.audio.segments.unshift(sortedSegments.audio.initSegment);
  9770. sortedSegments.audio.bytes += sortedSegments.audio.initSegment.byteLength;
  9771. }
  9772. this.appendAudioInitSegment_ = false;
  9773. }
  9774. var triggerUpdateend = false;
  9775. // Merge multiple video and audio segments into one and append
  9776. if (this.videoBuffer_ && sortedSegments.video.bytes) {
  9777. sortedSegments.video.segments.unshift(sortedSegments.video.initSegment);
  9778. sortedSegments.video.bytes += sortedSegments.video.initSegment.byteLength;
  9779. this.concatAndAppendSegments_(sortedSegments.video, this.videoBuffer_);
  9780. // TODO: are video tracks the only ones with text tracks?
  9781. (0, _addTextTrackData.addTextTrackData)(this, sortedSegments.captions, sortedSegments.metadata);
  9782. } else if (this.videoBuffer_ && (this.audioDisabled_ || !this.audioBuffer_)) {
  9783. // The transmuxer did not return any bytes of video, meaning it was all trimmed
  9784. // for gop alignment. Since we have a video buffer and audio is disabled, updateend
  9785. // will never be triggered by this source buffer, which will cause contrib-hls
  9786. // to be stuck forever waiting for updateend. If audio is not disabled, updateend
  9787. // will be triggered by the audio buffer, which will be sent upwards since the video
  9788. // buffer will not be in an updating state.
  9789. triggerUpdateend = true;
  9790. }
  9791. if (!this.audioDisabled_ && this.audioBuffer_) {
  9792. this.concatAndAppendSegments_(sortedSegments.audio, this.audioBuffer_);
  9793. }
  9794. this.pendingBuffers_.length = 0;
  9795. if (triggerUpdateend) {
  9796. this.trigger('updateend');
  9797. }
  9798. // We are no longer in the internal "updating" state
  9799. this.bufferUpdating_ = false;
  9800. }
  9801. /**
  9802. * Combine all segments into a single Uint8Array and then append them
  9803. * to the destination buffer
  9804. *
  9805. * @param {Object} segmentObj
  9806. * @param {SourceBuffer} destinationBuffer native source buffer to append data to
  9807. * @private
  9808. */
  9809. }, {
  9810. key: 'concatAndAppendSegments_',
  9811. value: function concatAndAppendSegments_(segmentObj, destinationBuffer) {
  9812. var offset = 0;
  9813. var tempBuffer = undefined;
  9814. if (segmentObj.bytes) {
  9815. tempBuffer = new Uint8Array(segmentObj.bytes);
  9816. // Combine the individual segments into one large typed-array
  9817. segmentObj.segments.forEach(function (segment) {
  9818. tempBuffer.set(segment, offset);
  9819. offset += segment.byteLength;
  9820. });
  9821. try {
  9822. destinationBuffer.updating = true;
  9823. destinationBuffer.appendBuffer(tempBuffer);
  9824. } catch (error) {
  9825. if (this.mediaSource_.player_) {
  9826. this.mediaSource_.player_.error({
  9827. code: -3,
  9828. type: 'APPEND_BUFFER_ERR',
  9829. message: error.message,
  9830. originalError: error
  9831. });
  9832. }
  9833. }
  9834. }
  9835. }
  9836. /**
  9837. * Emulate the native mediasource function. abort any soureBuffer
  9838. * actions and throw out any un-appended data.
  9839. *
  9840. * @link https://developer.mozilla.org/en-US/docs/Web/API/SourceBuffer/abort
  9841. */
  9842. }, {
  9843. key: 'abort',
  9844. value: function abort() {
  9845. if (this.videoBuffer_) {
  9846. this.videoBuffer_.abort();
  9847. }
  9848. if (!this.audioDisabled_ && this.audioBuffer_) {
  9849. this.audioBuffer_.abort();
  9850. }
  9851. if (this.transmuxer_) {
  9852. this.transmuxer_.postMessage({ action: 'reset' });
  9853. }
  9854. this.pendingBuffers_.length = 0;
  9855. this.bufferUpdating_ = false;
  9856. }
  9857. }]);
  9858. return VirtualSourceBuffer;
  9859. })(_videoJs2['default'].EventTarget);
  9860. exports['default'] = VirtualSourceBuffer;
  9861. }).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
  9862. },{"./add-text-track-data":35,"./codec-utils":36,"./create-text-tracks-if-necessary":37,"./remove-cues-from-track":43,"./transmuxer-worker":44,"webwackify":34}],47:[function(require,module,exports){
  9863. (function (global){
  9864. 'use strict';
  9865. var _createClass = (function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ('value' in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; })();
  9866. function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
  9867. function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError('Cannot call a class as a function'); } }
  9868. var _qunit = (typeof window !== "undefined" ? window['QUnit'] : typeof global !== "undefined" ? global['QUnit'] : null);
  9869. var _qunit2 = _interopRequireDefault(_qunit);
  9870. var _srcAddTextTrackData = require('../src/add-text-track-data');
  9871. var equal = _qunit2['default'].equal;
  9872. var _module = _qunit2['default'].module;
  9873. var test = _qunit2['default'].test;
  9874. var MockTextTrack = (function () {
  9875. function MockTextTrack() {
  9876. _classCallCheck(this, MockTextTrack);
  9877. this.cues = [];
  9878. }
  9879. _createClass(MockTextTrack, [{
  9880. key: 'addCue',
  9881. value: function addCue(cue) {
  9882. this.cues.push(cue);
  9883. }
  9884. }]);
  9885. return MockTextTrack;
  9886. })();
  9887. _module('Text Track Data', {
  9888. beforeEach: function beforeEach() {
  9889. this.sourceHandler = {
  9890. inbandTextTracks_: {
  9891. CC1: new MockTextTrack(),
  9892. CC2: new MockTextTrack(),
  9893. CC3: new MockTextTrack(),
  9894. CC4: new MockTextTrack()
  9895. },
  9896. metadataTrack_: new MockTextTrack(),
  9897. mediaSource_: {
  9898. duration: NaN
  9899. },
  9900. timestampOffset: 0
  9901. };
  9902. }
  9903. });
  9904. test('does nothing if no cues are specified', function () {
  9905. (0, _srcAddTextTrackData.addTextTrackData)(this.sourceHandler, [], []);
  9906. equal(this.sourceHandler.inbandTextTracks_.CC1.cues.length, 0, 'added no 608 cues');
  9907. equal(this.sourceHandler.metadataTrack_.cues.length, 0, 'added no metadata cues');
  9908. });
  9909. test('creates cues for 608 captions with "stream" property in ccX', function () {
  9910. (0, _srcAddTextTrackData.addTextTrackData)(this.sourceHandler, [{
  9911. startTime: 0,
  9912. endTime: 1,
  9913. text: 'CC1 text',
  9914. stream: 'CC1'
  9915. }, {
  9916. startTime: 0,
  9917. endTime: 1,
  9918. text: 'CC2 text',
  9919. stream: 'CC2'
  9920. }, {
  9921. startTime: 0,
  9922. endTime: 1,
  9923. text: 'CC3 text',
  9924. stream: 'CC3'
  9925. }, {
  9926. startTime: 0,
  9927. endTime: 1,
  9928. text: 'CC4 text',
  9929. stream: 'CC4'
  9930. }], []);
  9931. equal(this.sourceHandler.inbandTextTracks_.CC1.cues.length, 1, 'added one 608 cue to CC1');
  9932. equal(this.sourceHandler.inbandTextTracks_.CC2.cues.length, 1, 'added one 608 cue to CC2');
  9933. equal(this.sourceHandler.inbandTextTracks_.CC3.cues.length, 1, 'added one 608 cue to CC3');
  9934. equal(this.sourceHandler.inbandTextTracks_.CC4.cues.length, 1, 'added one 608 cue to CC4');
  9935. equal(this.sourceHandler.metadataTrack_.cues.length, 0, 'added no metadata cues');
  9936. });
  9937. test('creates cues for timed metadata', function () {
  9938. (0, _srcAddTextTrackData.addTextTrackData)(this.sourceHandler, [], [{
  9939. cueTime: 1,
  9940. frames: [{}]
  9941. }]);
  9942. equal(this.sourceHandler.inbandTextTracks_.CC1.cues.length, 0, 'added no 608 cues');
  9943. equal(this.sourceHandler.metadataTrack_.cues.length, 1, 'added one metadata cues');
  9944. });
  9945. }).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
  9946. },{"../src/add-text-track-data":35}],48:[function(require,module,exports){
  9947. (function (global){
  9948. 'use strict';
  9949. function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
  9950. var _qunit = (typeof window !== "undefined" ? window['QUnit'] : typeof global !== "undefined" ? global['QUnit'] : null);
  9951. var _qunit2 = _interopRequireDefault(_qunit);
  9952. var _srcCodecUtils = require('../src/codec-utils');
  9953. var deepEqual = _qunit2['default'].deepEqual;
  9954. var _module = _qunit2['default'].module;
  9955. var test = _qunit2['default'].test;
  9956. _module('Codec Utils');
  9957. test('translates legacy codecs', function () {
  9958. deepEqual((0, _srcCodecUtils.translateLegacyCodecs)(['avc1.66.30', 'avc1.66.30']), ['avc1.42001e', 'avc1.42001e'], 'translates legacy avc1.66.30 codec');
  9959. deepEqual((0, _srcCodecUtils.translateLegacyCodecs)(['avc1.42C01E', 'avc1.42C01E']), ['avc1.42C01E', 'avc1.42C01E'], 'does not translate modern codecs');
  9960. deepEqual((0, _srcCodecUtils.translateLegacyCodecs)(['avc1.42C01E', 'avc1.66.30']), ['avc1.42C01E', 'avc1.42001e'], 'only translates legacy codecs when mixed');
  9961. deepEqual((0, _srcCodecUtils.translateLegacyCodecs)(['avc1.4d0020', 'avc1.100.41', 'avc1.77.41', 'avc1.77.32', 'avc1.77.31', 'avc1.77.30', 'avc1.66.30', 'avc1.66.21', 'avc1.42C01e']), ['avc1.4d0020', 'avc1.640029', 'avc1.4d0029', 'avc1.4d0020', 'avc1.4d001f', 'avc1.4d001e', 'avc1.42001e', 'avc1.420015', 'avc1.42C01e'], 'translates a whole bunch');
  9962. });
  9963. }).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
  9964. },{"../src/codec-utils":36}],49:[function(require,module,exports){
  9965. (function (global){
  9966. 'use strict';
  9967. function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
  9968. var _globalDocument = require('global/document');
  9969. var _globalDocument2 = _interopRequireDefault(_globalDocument);
  9970. var _globalWindow = require('global/window');
  9971. var _globalWindow2 = _interopRequireDefault(_globalWindow);
  9972. var _qunit = (typeof window !== "undefined" ? window['QUnit'] : typeof global !== "undefined" ? global['QUnit'] : null);
  9973. var _qunit2 = _interopRequireDefault(_qunit);
  9974. var _sinon = (typeof window !== "undefined" ? window['sinon'] : typeof global !== "undefined" ? global['sinon'] : null);
  9975. var _sinon2 = _interopRequireDefault(_sinon);
  9976. var _videoJs = (typeof window !== "undefined" ? window['videojs'] : typeof global !== "undefined" ? global['videojs'] : null);
  9977. var _videoJs2 = _interopRequireDefault(_videoJs);
  9978. var _srcFlashMediaSource = require('../src/flash-media-source');
  9979. var _srcFlashMediaSource2 = _interopRequireDefault(_srcFlashMediaSource);
  9980. var _srcHtmlMediaSource = require('../src/html-media-source');
  9981. var _srcHtmlMediaSource2 = _interopRequireDefault(_srcHtmlMediaSource);
  9982. // we disable this because browserify needs to include these files
  9983. // but the exports are not important
  9984. /* eslint-disable no-unused-vars */
  9985. var _srcVideojsContribMediaSourcesJs = require('../src/videojs-contrib-media-sources.js');
  9986. /* eslint-disable no-unused-vars */
  9987. _qunit2['default'].module('createObjectURL', {
  9988. beforeEach: function beforeEach() {
  9989. this.fixture = _globalDocument2['default'].getElementById('qunit-fixture');
  9990. this.video = _globalDocument2['default'].createElement('video');
  9991. this.fixture.appendChild(this.video);
  9992. this.player = (0, _videoJs2['default'])(this.video);
  9993. // Mock the environment's timers because certain things - particularly
  9994. // player readiness - are asynchronous in video.js 5.
  9995. this.clock = _sinon2['default'].useFakeTimers();
  9996. this.oldMediaSource = _globalWindow2['default'].MediaSource || _globalWindow2['default'].WebKitMediaSource;
  9997. // force MediaSource support
  9998. if (!_globalWindow2['default'].MediaSource) {
  9999. _globalWindow2['default'].MediaSource = function () {
  10000. var result = new _globalWindow2['default'].Blob();
  10001. result.addEventListener = function () {};
  10002. result.addSourceBuffer = function () {};
  10003. return result;
  10004. };
  10005. }
  10006. },
  10007. afterEach: function afterEach() {
  10008. // The clock _must_ be restored before disposing the player; otherwise,
  10009. // certain timeout listeners that happen inside video.js may throw errors.
  10010. this.clock.restore();
  10011. this.player.dispose();
  10012. _globalWindow2['default'].MediaSource = _globalWindow2['default'].WebKitMediaSource = this.oldMediaSource;
  10013. }
  10014. });
  10015. _qunit2['default'].test('delegates to the native implementation', function () {
  10016. _qunit2['default'].ok(!/blob:vjs-media-source\//.test(_videoJs2['default'].URL.createObjectURL(new _globalWindow2['default'].Blob())), 'created a native blob URL');
  10017. });
  10018. _qunit2['default'].test('uses the native MediaSource when available', function () {
  10019. _qunit2['default'].ok(!/blob:vjs-media-source\//.test(_videoJs2['default'].URL.createObjectURL(new _srcHtmlMediaSource2['default']())), 'created a native blob URL');
  10020. });
  10021. _qunit2['default'].test('emulates a URL for the shim', function () {
  10022. _qunit2['default'].ok(/blob:vjs-media-source\//.test(_videoJs2['default'].URL.createObjectURL(new _srcFlashMediaSource2['default']())), 'created an emulated blob URL');
  10023. });
  10024. _qunit2['default'].test('stores the associated blob URL on the media source', function () {
  10025. var blob = new _globalWindow2['default'].Blob();
  10026. var url = _videoJs2['default'].URL.createObjectURL(blob);
  10027. _qunit2['default'].equal(blob.url_, url, 'captured the generated URL');
  10028. });
  10029. }).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
  10030. },{"../src/flash-media-source":39,"../src/html-media-source":42,"../src/videojs-contrib-media-sources.js":45,"global/document":2,"global/window":3}],50:[function(require,module,exports){
  10031. (function (global){
  10032. 'use strict';
  10033. function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
  10034. var _globalDocument = require('global/document');
  10035. var _globalDocument2 = _interopRequireDefault(_globalDocument);
  10036. var _globalWindow = require('global/window');
  10037. var _globalWindow2 = _interopRequireDefault(_globalWindow);
  10038. var _qunit = (typeof window !== "undefined" ? window['QUnit'] : typeof global !== "undefined" ? global['QUnit'] : null);
  10039. var _qunit2 = _interopRequireDefault(_qunit);
  10040. var _sinon = (typeof window !== "undefined" ? window['sinon'] : typeof global !== "undefined" ? global['sinon'] : null);
  10041. var _sinon2 = _interopRequireDefault(_sinon);
  10042. var _videoJs = (typeof window !== "undefined" ? window['videojs'] : typeof global !== "undefined" ? global['videojs'] : null);
  10043. var _videoJs2 = _interopRequireDefault(_videoJs);
  10044. var _muxJs = require('mux.js');
  10045. var _muxJs2 = _interopRequireDefault(_muxJs);
  10046. var _srcFlashSourceBuffer = require('../src/flash-source-buffer');
  10047. var _srcFlashSourceBuffer2 = _interopRequireDefault(_srcFlashSourceBuffer);
  10048. var _srcFlashConstants = require('../src/flash-constants');
  10049. var _srcFlashConstants2 = _interopRequireDefault(_srcFlashConstants);
  10050. // we disable this because browserify needs to include these files
  10051. // but the exports are not important
  10052. /* eslint-disable no-unused-vars */
  10053. var _srcVideojsContribMediaSourcesJs = require('../src/videojs-contrib-media-sources.js');
  10054. /* eslint-disable no-unused-vars */
  10055. // return the sequence of calls to append to the SWF
  10056. var appendCalls = function appendCalls(calls) {
  10057. return calls.filter(function (call) {
  10058. return call.callee && call.callee === 'vjs_appendChunkReady';
  10059. });
  10060. };
  10061. var getFlvHeader = function getFlvHeader() {
  10062. return new Uint8Array([1, 2, 3]);
  10063. };
  10064. var makeFlvTag = function makeFlvTag(pts, data) {
  10065. return {
  10066. pts: pts,
  10067. dts: pts,
  10068. bytes: data
  10069. };
  10070. };
  10071. var timers = undefined;
  10072. var oldSTO = undefined;
  10073. var fakeSTO = function fakeSTO() {
  10074. oldSTO = _globalWindow2['default'].setTimeout;
  10075. timers = [];
  10076. timers.run = function (num) {
  10077. var timer = undefined;
  10078. while (num--) {
  10079. timer = this.pop();
  10080. if (timer) {
  10081. timer();
  10082. }
  10083. }
  10084. };
  10085. timers.runAll = function () {
  10086. while (this.length) {
  10087. this.pop()();
  10088. }
  10089. };
  10090. _globalWindow2['default'].setTimeout = function (callback) {
  10091. timers.push(callback);
  10092. };
  10093. _globalWindow2['default'].setTimeout.fake = true;
  10094. };
  10095. var unfakeSTO = function unfakeSTO() {
  10096. timers = [];
  10097. _globalWindow2['default'].setTimeout = oldSTO;
  10098. };
  10099. // Create a WebWorker-style message that signals the transmuxer is done
  10100. var createDataMessage = function createDataMessage(data, audioData, metadata, captions) {
  10101. var captionStreams = {};
  10102. if (captions) {
  10103. captions.forEach(function (caption) {
  10104. captionStreams[caption.stream] = true;
  10105. });
  10106. }
  10107. return {
  10108. data: {
  10109. action: 'data',
  10110. segment: {
  10111. tags: {
  10112. videoTags: data.map(function (tag) {
  10113. return makeFlvTag(tag.pts, tag.bytes);
  10114. }),
  10115. audioTags: audioData ? audioData.map(function (tag) {
  10116. return makeFlvTag(tag.pts, tag.bytes);
  10117. }) : []
  10118. },
  10119. metadata: metadata,
  10120. captions: captions,
  10121. captionStreams: captionStreams
  10122. }
  10123. }
  10124. };
  10125. };
  10126. var doneMessage = {
  10127. data: {
  10128. action: 'done'
  10129. }
  10130. };
  10131. var postMessage_ = function postMessage_(msg) {
  10132. var _this = this;
  10133. if (msg.action === 'push') {
  10134. _globalWindow2['default'].setTimeout(function () {
  10135. _this.onmessage(createDataMessage([{
  10136. bytes: new Uint8Array(msg.data, msg.byteOffset, msg.byteLength),
  10137. pts: 0
  10138. }]));
  10139. }, 1);
  10140. } else if (msg.action === 'flush') {
  10141. _globalWindow2['default'].setTimeout(function () {
  10142. _this.onmessage(doneMessage);
  10143. }, 1);
  10144. }
  10145. };
  10146. _qunit2['default'].module('Flash MediaSource', {
  10147. beforeEach: function beforeEach(assert) {
  10148. var _this2 = this;
  10149. var swfObj = undefined;
  10150. // Mock the environment's timers because certain things - particularly
  10151. // player readiness - are asynchronous in video.js 5.
  10152. this.clock = _sinon2['default'].useFakeTimers();
  10153. this.fixture = _globalDocument2['default'].getElementById('qunit-fixture');
  10154. this.video = _globalDocument2['default'].createElement('video');
  10155. this.fixture.appendChild(this.video);
  10156. this.player = (0, _videoJs2['default'])(this.video);
  10157. this.oldMediaSource = _globalWindow2['default'].MediaSource || _globalWindow2['default'].WebKitMediaSource;
  10158. _globalWindow2['default'].MediaSource = null;
  10159. _globalWindow2['default'].WebKitMediaSource = null;
  10160. this.Flash = _videoJs2['default'].getTech('Flash');
  10161. this.oldFlashSupport = this.Flash.isSupported;
  10162. this.oldCanPlay = this.Flash.canPlaySource;
  10163. this.Flash.canPlaySource = this.Flash.isSupported = function () {
  10164. return true;
  10165. };
  10166. this.oldFlashTransmuxerPostMessage = _muxJs2['default'].flv.Transmuxer.postMessage;
  10167. this.oldGetFlvHeader = _muxJs2['default'].flv.getFlvHeader;
  10168. _muxJs2['default'].flv.getFlvHeader = getFlvHeader;
  10169. this.swfCalls = [];
  10170. this.mediaSource = new _videoJs2['default'].MediaSource();
  10171. this.player.src({
  10172. src: _videoJs2['default'].URL.createObjectURL(this.mediaSource),
  10173. type: 'video/mp2t'
  10174. });
  10175. // vjs6 takes 1 tick to set source async
  10176. this.clock.tick(1);
  10177. swfObj = _globalDocument2['default'].createElement('fake-object');
  10178. swfObj.id = 'fake-swf-' + assert.test.testId;
  10179. this.player.el().replaceChild(swfObj, this.player.tech_.el());
  10180. this.player.tech_.hls = new _videoJs2['default'].EventTarget();
  10181. this.player.tech_.el_ = swfObj;
  10182. swfObj.tech = this.player.tech_;
  10183. /* eslint-disable camelcase */
  10184. swfObj.vjs_abort = function () {
  10185. _this2.swfCalls.push('abort');
  10186. };
  10187. swfObj.vjs_getProperty = function (attr) {
  10188. if (attr === 'buffered') {
  10189. return [];
  10190. } else if (attr === 'currentTime') {
  10191. return 0;
  10192. // ignored for vjs6
  10193. } else if (attr === 'videoWidth') {
  10194. return 0;
  10195. }
  10196. _this2.swfCalls.push({ attr: attr });
  10197. };
  10198. swfObj.vjs_load = function () {
  10199. _this2.swfCalls.push('load');
  10200. };
  10201. swfObj.vjs_setProperty = function (attr, value) {
  10202. _this2.swfCalls.push({ attr: attr, value: value });
  10203. };
  10204. swfObj.vjs_discontinuity = function (attr, value) {
  10205. _this2.swfCalls.push({ attr: attr, value: value });
  10206. };
  10207. swfObj.vjs_appendChunkReady = function (method) {
  10208. _globalWindow2['default'].setTimeout(function () {
  10209. var chunk = _globalWindow2['default'][method]();
  10210. // only care about the segment data, not the flv header
  10211. if (method.substr(0, 21) === 'vjs_flashEncodedData_') {
  10212. var call = {
  10213. callee: 'vjs_appendChunkReady',
  10214. arguments: [_globalWindow2['default'].atob(chunk).split('').map(function (c) {
  10215. return c.charCodeAt(0);
  10216. })]
  10217. };
  10218. _this2.swfCalls.push(call);
  10219. }
  10220. }, 1);
  10221. };
  10222. swfObj.vjs_adjustCurrentTime = function (value) {
  10223. _this2.swfCalls.push({ call: 'adjustCurrentTime', value: value });
  10224. };
  10225. /* eslint-enable camelcase */
  10226. this.mediaSource.trigger({
  10227. type: 'sourceopen',
  10228. swfId: swfObj.id
  10229. });
  10230. fakeSTO();
  10231. },
  10232. afterEach: function afterEach() {
  10233. _globalWindow2['default'].MediaSource = this.oldMediaSource;
  10234. _globalWindow2['default'].WebKitMediaSource = _globalWindow2['default'].MediaSource;
  10235. this.Flash.isSupported = this.oldFlashSupport;
  10236. this.Flash.canPlaySource = this.oldCanPlay;
  10237. _muxJs2['default'].flv.Transmuxer.postMessage = this.oldFlashTransmuxerPostMessage;
  10238. _muxJs2['default'].flv.getFlvHeader = this.oldGetFlvHeader;
  10239. this.player.dispose();
  10240. this.clock.restore();
  10241. this.swfCalls = [];
  10242. unfakeSTO();
  10243. }
  10244. });
  10245. _qunit2['default'].test('raises an exception for unrecognized MIME types', function () {
  10246. try {
  10247. this.mediaSource.addSourceBuffer('video/garbage');
  10248. } catch (e) {
  10249. _qunit2['default'].ok(e, 'an error was thrown');
  10250. return;
  10251. }
  10252. _qunit2['default'].ok(false, 'no error was thrown');
  10253. });
  10254. _qunit2['default'].test('creates FlashSourceBuffers for video/mp2t', function () {
  10255. _qunit2['default'].ok(this.mediaSource.addSourceBuffer('video/mp2t') instanceof _srcFlashSourceBuffer2['default'], 'create source buffer');
  10256. });
  10257. _qunit2['default'].test('creates FlashSourceBuffers for audio/mp2t', function () {
  10258. _qunit2['default'].ok(this.mediaSource.addSourceBuffer('audio/mp2t') instanceof _srcFlashSourceBuffer2['default'], 'create source buffer');
  10259. });
  10260. _qunit2['default'].test('waits for the next tick to append', function () {
  10261. var sourceBuffer = this.mediaSource.addSourceBuffer('video/mp2t');
  10262. sourceBuffer.transmuxer_.postMessage = postMessage_;
  10263. _qunit2['default'].equal(this.swfCalls.length, 1, 'made one call on init');
  10264. _qunit2['default'].equal(this.swfCalls[0], 'load', 'called load');
  10265. sourceBuffer.appendBuffer(new Uint8Array([0, 1]));
  10266. this.swfCalls = appendCalls(this.swfCalls);
  10267. _qunit2['default'].strictEqual(this.swfCalls.length, 0, 'no appends were made');
  10268. });
  10269. _qunit2['default'].test('passes bytes to Flash', function () {
  10270. var sourceBuffer = this.mediaSource.addSourceBuffer('video/mp2t');
  10271. sourceBuffer.transmuxer_.postMessage = postMessage_;
  10272. this.swfCalls.length = 0;
  10273. sourceBuffer.appendBuffer(new Uint8Array([0, 1]));
  10274. timers.runAll();
  10275. timers.runAll();
  10276. _qunit2['default'].ok(this.swfCalls.length, 'the SWF was called');
  10277. this.swfCalls = appendCalls(this.swfCalls);
  10278. _qunit2['default'].strictEqual(this.swfCalls[0].callee, 'vjs_appendChunkReady', 'called vjs_appendChunkReady');
  10279. _qunit2['default'].deepEqual(this.swfCalls[0].arguments[0], [0, 1], 'passed the base64 encoded data');
  10280. });
  10281. _qunit2['default'].test('passes chunked bytes to Flash', function () {
  10282. var sourceBuffer = this.mediaSource.addSourceBuffer('video/mp2t');
  10283. var oldChunkSize = _srcFlashConstants2['default'].BYTES_PER_CHUNK;
  10284. sourceBuffer.transmuxer_.postMessage = postMessage_;
  10285. _srcFlashConstants2['default'].BYTES_PER_CHUNK = 2;
  10286. this.swfCalls.length = 0;
  10287. sourceBuffer.appendBuffer(new Uint8Array([0, 1, 2, 3, 4]));
  10288. timers.runAll();
  10289. _qunit2['default'].ok(this.swfCalls.length, 'the SWF was called');
  10290. this.swfCalls = appendCalls(this.swfCalls);
  10291. _qunit2['default'].equal(this.swfCalls.length, 3, 'the SWF received 3 chunks');
  10292. _qunit2['default'].strictEqual(this.swfCalls[0].callee, 'vjs_appendChunkReady', 'called vjs_appendChunkReady');
  10293. _qunit2['default'].deepEqual(this.swfCalls[0].arguments[0], [0, 1], 'passed the base64 encoded data');
  10294. _qunit2['default'].deepEqual(this.swfCalls[1].arguments[0], [2, 3], 'passed the base64 encoded data');
  10295. _qunit2['default'].deepEqual(this.swfCalls[2].arguments[0], [4], 'passed the base64 encoded data');
  10296. _srcFlashConstants2['default'].BYTES_PER_CHUNK = oldChunkSize;
  10297. });
  10298. _qunit2['default'].test('clears the SWF on seeking', function () {
  10299. var aborts = 0;
  10300. this.mediaSource.addSourceBuffer('video/mp2t');
  10301. // track calls to abort()
  10302. /* eslint-disable camelcase */
  10303. this.mediaSource.swfObj.vjs_abort = function () {
  10304. aborts++;
  10305. };
  10306. /* eslint-enable camelcase */
  10307. this.mediaSource.tech_.trigger('seeking');
  10308. _qunit2['default'].strictEqual(1, aborts, 'aborted pending buffer');
  10309. });
  10310. _qunit2['default'].test('drops tags before currentTime when seeking', function () {
  10311. var sourceBuffer = this.mediaSource.addSourceBuffer('video/mp2t');
  10312. var i = 10;
  10313. var currentTime = undefined;
  10314. var tags_ = [];
  10315. sourceBuffer.transmuxer_.postMessage = postMessage_;
  10316. this.mediaSource.tech_.currentTime = function () {
  10317. return currentTime;
  10318. };
  10319. // push a tag into the buffer to establish the starting PTS value
  10320. currentTime = 0;
  10321. sourceBuffer.transmuxer_.onmessage(createDataMessage([{
  10322. pts: 19 * 1000,
  10323. bytes: new Uint8Array(1)
  10324. }]));
  10325. timers.runAll();
  10326. sourceBuffer.appendBuffer(new Uint8Array(10));
  10327. timers.runAll();
  10328. // mock out a new segment of FLV tags, starting 10s after the
  10329. // starting PTS value
  10330. while (i--) {
  10331. tags_.unshift({
  10332. pts: i * 1000 + 29 * 1000,
  10333. bytes: new Uint8Array([i])
  10334. });
  10335. }
  10336. var dataMessage = createDataMessage(tags_);
  10337. // mock gop start at seek point
  10338. dataMessage.data.segment.tags.videoTags[7].keyFrame = true;
  10339. sourceBuffer.transmuxer_.onmessage(dataMessage);
  10340. // seek to 7 seconds into the new swegment
  10341. this.mediaSource.tech_.seeking = function () {
  10342. return true;
  10343. };
  10344. currentTime = 10 + 7;
  10345. this.mediaSource.tech_.trigger('seeking');
  10346. sourceBuffer.appendBuffer(new Uint8Array(10));
  10347. this.swfCalls.length = 0;
  10348. timers.runAll();
  10349. _qunit2['default'].deepEqual(this.swfCalls[0].arguments[0], [7, 8, 9], 'three tags are appended');
  10350. });
  10351. _qunit2['default'].test('drops audio and video (complete gops) tags before the buffered end always', function () {
  10352. var sourceBuffer = this.mediaSource.addSourceBuffer('video/mp2t');
  10353. var endTime = undefined;
  10354. var videoTags_ = [];
  10355. var audioTags_ = [];
  10356. sourceBuffer.transmuxer_.postMessage = postMessage_;
  10357. this.mediaSource.tech_.buffered = function () {
  10358. return _videoJs2['default'].createTimeRange([[0, endTime]]);
  10359. };
  10360. // push a tag into the buffer to establish the starting PTS value
  10361. endTime = 0;
  10362. // mock buffering 17 seconds of data so flash source buffer internal end of buffer
  10363. // tracking is accurate
  10364. var i = 17;
  10365. while (i--) {
  10366. videoTags_.unshift({
  10367. pts: i * 1000 + 19 * 1000,
  10368. bytes: new Uint8Array(1)
  10369. });
  10370. }
  10371. i = 17;
  10372. while (i--) {
  10373. audioTags_.unshift({
  10374. pts: i * 1000 + 19 * 1000,
  10375. bytes: new Uint8Array(1)
  10376. });
  10377. }
  10378. var dataMessage = createDataMessage(videoTags_, audioTags_);
  10379. sourceBuffer.transmuxer_.onmessage(dataMessage);
  10380. timers.runAll();
  10381. sourceBuffer.appendBuffer(new Uint8Array(10));
  10382. timers.runAll();
  10383. i = 10;
  10384. videoTags_ = [];
  10385. audioTags_ = [];
  10386. // mock out a new segment of FLV tags, starting 10s after the
  10387. // starting PTS value
  10388. while (i--) {
  10389. videoTags_.unshift({
  10390. pts: i * 1000 + 29 * 1000,
  10391. bytes: new Uint8Array([i])
  10392. });
  10393. }
  10394. i = 10;
  10395. while (i--) {
  10396. audioTags_.unshift({
  10397. pts: i * 1000 + 29 * 1000,
  10398. bytes: new Uint8Array([i + 100])
  10399. });
  10400. }
  10401. dataMessage = createDataMessage(videoTags_, audioTags_);
  10402. dataMessage.data.segment.tags.videoTags[0].keyFrame = true;
  10403. dataMessage.data.segment.tags.videoTags[3].keyFrame = true;
  10404. dataMessage.data.segment.tags.videoTags[6].keyFrame = true;
  10405. dataMessage.data.segment.tags.videoTags[8].keyFrame = true;
  10406. sourceBuffer.transmuxer_.onmessage(dataMessage);
  10407. endTime = 10 + 7;
  10408. sourceBuffer.appendBuffer(new Uint8Array(10));
  10409. this.swfCalls.length = 0;
  10410. timers.runAll();
  10411. // end of buffer is 17 seconds
  10412. // frames 0-6 for video have pts values less than 17 seconds
  10413. // since frame 6 is a key frame, it should still be appended to preserve the entire gop
  10414. // so we should have appeneded frames 6 - 9
  10415. // frames 100-106 for audio have pts values less than 17 seconds
  10416. // but since we appended an extra video frame, we should also append audio frames
  10417. // to fill in the gap in audio. This means we should be appending audio frames
  10418. // 106, 107, 108, 109
  10419. // Append order is 6, 7, 107, 8, 108, 9, 109 since we order tags based on dts value
  10420. _qunit2['default'].deepEqual(this.swfCalls[0].arguments[0], [6, 106, 7, 107, 8, 108, 9, 109], 'audio and video tags properly dropped');
  10421. });
  10422. _qunit2['default'].test('seeking into the middle of a GOP adjusts currentTime to the start of the GOP', function () {
  10423. var sourceBuffer = this.mediaSource.addSourceBuffer('video/mp2t');
  10424. var i = 10;
  10425. var currentTime = undefined;
  10426. var tags_ = [];
  10427. sourceBuffer.transmuxer_.postMessage = postMessage_;
  10428. this.mediaSource.tech_.currentTime = function () {
  10429. return currentTime;
  10430. };
  10431. // push a tag into the buffer to establish the starting PTS value
  10432. currentTime = 0;
  10433. var dataMessage = createDataMessage([{
  10434. pts: 19 * 1000,
  10435. bytes: new Uint8Array(1)
  10436. }]);
  10437. sourceBuffer.transmuxer_.onmessage(dataMessage);
  10438. timers.runAll();
  10439. sourceBuffer.appendBuffer(new Uint8Array(10));
  10440. timers.runAll();
  10441. // mock out a new segment of FLV tags, starting 10s after the
  10442. // starting PTS value
  10443. while (i--) {
  10444. tags_.unshift({
  10445. pts: i * 1000 + 29 * 1000,
  10446. bytes: new Uint8Array([i])
  10447. });
  10448. }
  10449. dataMessage = createDataMessage(tags_);
  10450. // mock the GOP structure
  10451. dataMessage.data.segment.tags.videoTags[0].keyFrame = true;
  10452. dataMessage.data.segment.tags.videoTags[3].keyFrame = true;
  10453. dataMessage.data.segment.tags.videoTags[5].keyFrame = true;
  10454. dataMessage.data.segment.tags.videoTags[8].keyFrame = true;
  10455. sourceBuffer.transmuxer_.onmessage(dataMessage);
  10456. // seek to 7 seconds into the new swegment
  10457. this.mediaSource.tech_.seeking = function () {
  10458. return true;
  10459. };
  10460. currentTime = 10 + 7;
  10461. this.mediaSource.tech_.trigger('seeking');
  10462. sourceBuffer.appendBuffer(new Uint8Array(10));
  10463. this.swfCalls.length = 0;
  10464. timers.runAll();
  10465. _qunit2['default'].deepEqual(this.swfCalls[0], { call: 'adjustCurrentTime', value: 15 });
  10466. _qunit2['default'].deepEqual(this.swfCalls[1].arguments[0], [5, 6, 7, 8, 9], '5 tags are appended');
  10467. });
  10468. _qunit2['default'].test('GOP trimming accounts for metadata tags prepended to key frames by mux.js', function () {
  10469. var sourceBuffer = this.mediaSource.addSourceBuffer('video/mp2t');
  10470. var i = 10;
  10471. var currentTime = undefined;
  10472. var tags_ = [];
  10473. sourceBuffer.transmuxer_.postMessage = postMessage_;
  10474. this.mediaSource.tech_.currentTime = function () {
  10475. return currentTime;
  10476. };
  10477. // push a tag into the buffer to establish the starting PTS value
  10478. currentTime = 0;
  10479. var dataMessage = createDataMessage([{
  10480. pts: 19 * 1000,
  10481. bytes: new Uint8Array(1)
  10482. }]);
  10483. sourceBuffer.transmuxer_.onmessage(dataMessage);
  10484. timers.runAll();
  10485. sourceBuffer.appendBuffer(new Uint8Array(10));
  10486. timers.runAll();
  10487. // mock out a new segment of FLV tags, starting 10s after the
  10488. // starting PTS value
  10489. while (i--) {
  10490. tags_.unshift({
  10491. pts: i * 1000 + 29 * 1000,
  10492. bytes: new Uint8Array([i])
  10493. });
  10494. }
  10495. // add in the metadata tags
  10496. tags_.splice(8, 0, {
  10497. pts: tags_[8].pts,
  10498. bytes: new Uint8Array([8])
  10499. }, {
  10500. pts: tags_[8].pts,
  10501. bytes: new Uint8Array([8])
  10502. });
  10503. tags_.splice(5, 0, {
  10504. pts: tags_[5].pts,
  10505. bytes: new Uint8Array([5])
  10506. }, {
  10507. pts: tags_[5].pts,
  10508. bytes: new Uint8Array([5])
  10509. });
  10510. tags_.splice(0, 0, {
  10511. pts: tags_[0].pts,
  10512. bytes: new Uint8Array([0])
  10513. }, {
  10514. pts: tags_[0].pts,
  10515. bytes: new Uint8Array([0])
  10516. });
  10517. dataMessage = createDataMessage(tags_);
  10518. // mock the GOP structure + metadata tags
  10519. // if we see a metadata tag, that means the next tag will also be a metadata tag with
  10520. // keyFrame true and the tag after that will be the keyFrame
  10521. // e.g.
  10522. // { keyFrame: false, metaDataTag: true},
  10523. // { keyFrame: true, metaDataTag: true},
  10524. // { keyFrame: true, metaDataTag: false}
  10525. dataMessage.data.segment.tags.videoTags[0].metaDataTag = true;
  10526. dataMessage.data.segment.tags.videoTags[1].metaDataTag = true;
  10527. dataMessage.data.segment.tags.videoTags[1].keyFrame = true;
  10528. dataMessage.data.segment.tags.videoTags[2].keyFrame = true;
  10529. // no metadata tags in front of this key to test the case where mux.js does not prepend
  10530. // the metadata tags
  10531. dataMessage.data.segment.tags.videoTags[5].keyFrame = true;
  10532. dataMessage.data.segment.tags.videoTags[7].metaDataTag = true;
  10533. dataMessage.data.segment.tags.videoTags[8].metaDataTag = true;
  10534. dataMessage.data.segment.tags.videoTags[8].keyFrame = true;
  10535. dataMessage.data.segment.tags.videoTags[9].keyFrame = true;
  10536. dataMessage.data.segment.tags.videoTags[12].metaDataTag = true;
  10537. dataMessage.data.segment.tags.videoTags[13].metaDataTag = true;
  10538. dataMessage.data.segment.tags.videoTags[13].keyFrame = true;
  10539. dataMessage.data.segment.tags.videoTags[14].keyFrame = true;
  10540. sourceBuffer.transmuxer_.onmessage(dataMessage);
  10541. // seek to 7 seconds into the new swegment
  10542. this.mediaSource.tech_.seeking = function () {
  10543. return true;
  10544. };
  10545. currentTime = 10 + 7;
  10546. this.mediaSource.tech_.trigger('seeking');
  10547. sourceBuffer.appendBuffer(new Uint8Array(10));
  10548. this.swfCalls.length = 0;
  10549. timers.runAll();
  10550. _qunit2['default'].deepEqual(this.swfCalls[0], { call: 'adjustCurrentTime', value: 15 });
  10551. _qunit2['default'].deepEqual(this.swfCalls[1].arguments[0], [5, 5, 5, 6, 7, 8, 8, 8, 9], '10 tags are appended, 4 of which are metadata tags');
  10552. });
  10553. _qunit2['default'].test('drops all tags if target pts append time does not fall within segment', function () {
  10554. var sourceBuffer = this.mediaSource.addSourceBuffer('video/mp2t');
  10555. var i = 10;
  10556. var currentTime = undefined;
  10557. var tags_ = [];
  10558. this.mediaSource.tech_.currentTime = function () {
  10559. return currentTime;
  10560. };
  10561. sourceBuffer.transmuxer_.postMessage = postMessage_;
  10562. // push a tag into the buffer to establish the starting PTS value
  10563. currentTime = 0;
  10564. var dataMessage = createDataMessage([{
  10565. pts: 19 * 1000,
  10566. bytes: new Uint8Array(1)
  10567. }]);
  10568. sourceBuffer.transmuxer_.onmessage(dataMessage);
  10569. timers.runAll();
  10570. sourceBuffer.appendBuffer(new Uint8Array(10));
  10571. timers.runAll();
  10572. // mock out a new segment of FLV tags, starting 10s after the
  10573. // starting PTS value
  10574. while (i--) {
  10575. tags_.unshift({
  10576. pts: i * 1000 + 19 * 1000,
  10577. bytes: new Uint8Array([i])
  10578. });
  10579. }
  10580. dataMessage = createDataMessage(tags_);
  10581. // mock the GOP structure
  10582. dataMessage.data.segment.tags.videoTags[0].keyFrame = true;
  10583. dataMessage.data.segment.tags.videoTags[3].keyFrame = true;
  10584. dataMessage.data.segment.tags.videoTags[5].keyFrame = true;
  10585. dataMessage.data.segment.tags.videoTags[8].keyFrame = true;
  10586. sourceBuffer.transmuxer_.onmessage(dataMessage);
  10587. // seek to 7 seconds into the new swegment
  10588. this.mediaSource.tech_.seeking = function () {
  10589. return true;
  10590. };
  10591. currentTime = 10 + 7;
  10592. this.mediaSource.tech_.trigger('seeking');
  10593. sourceBuffer.appendBuffer(new Uint8Array(10));
  10594. this.swfCalls.length = 0;
  10595. timers.runAll();
  10596. _qunit2['default'].equal(this.swfCalls.length, 0, 'dropped all tags and made no swf calls');
  10597. });
  10598. _qunit2['default'].test('seek targeting accounts for changing timestampOffsets', function () {
  10599. var sourceBuffer = this.mediaSource.addSourceBuffer('video/mp2t');
  10600. var i = 10;
  10601. var tags_ = [];
  10602. var currentTime = undefined;
  10603. this.mediaSource.tech_.currentTime = function () {
  10604. return currentTime;
  10605. };
  10606. sourceBuffer.transmuxer_.postMessage = postMessage_;
  10607. var dataMessage = createDataMessage([{
  10608. pts: 19 * 1000,
  10609. bytes: new Uint8Array(1)
  10610. }]);
  10611. // push a tag into the buffer to establish the starting PTS value
  10612. currentTime = 0;
  10613. sourceBuffer.transmuxer_.onmessage(dataMessage);
  10614. timers.runAll();
  10615. // to seek across a discontinuity:
  10616. // 1. set the timestamp offset to the media timeline position for
  10617. // the start of the segment
  10618. // 2. set currentTime to the desired media timeline position
  10619. sourceBuffer.timestampOffset = 22;
  10620. currentTime = sourceBuffer.timestampOffset + 3.5;
  10621. this.mediaSource.tech_.seeking = function () {
  10622. return true;
  10623. };
  10624. // the new segment FLV tags are at disjoint PTS positions
  10625. while (i--) {
  10626. tags_.unshift({
  10627. // (101 * 1000) !== the old PTS offset
  10628. pts: i * 1000 + 101 * 1000,
  10629. bytes: new Uint8Array([i + sourceBuffer.timestampOffset])
  10630. });
  10631. }
  10632. dataMessage = createDataMessage(tags_);
  10633. // mock gop start at seek point
  10634. dataMessage.data.segment.tags.videoTags[3].keyFrame = true;
  10635. sourceBuffer.transmuxer_.onmessage(dataMessage);
  10636. this.mediaSource.tech_.trigger('seeking');
  10637. this.swfCalls.length = 0;
  10638. timers.runAll();
  10639. _qunit2['default'].equal(this.swfCalls[0].value, 25, 'adjusted current time');
  10640. _qunit2['default'].deepEqual(this.swfCalls[1].arguments[0], [25, 26, 27, 28, 29, 30, 31], 'filtered the appended tags');
  10641. });
  10642. _qunit2['default'].test('calling endOfStream sets mediaSource readyState to ended', function () {
  10643. var _this3 = this;
  10644. var sourceBuffer = this.mediaSource.addSourceBuffer('video/mp2t');
  10645. sourceBuffer.transmuxer_.postMessage = postMessage_;
  10646. /* eslint-disable camelcase */
  10647. this.mediaSource.swfObj.vjs_endOfStream = function () {
  10648. _this3.swfCalls.push('endOfStream');
  10649. };
  10650. /* eslint-enable camelcase */
  10651. sourceBuffer.addEventListener('updateend', function () {
  10652. _this3.mediaSource.endOfStream();
  10653. });
  10654. this.swfCalls.length = 0;
  10655. sourceBuffer.appendBuffer(new Uint8Array([0, 1]));
  10656. timers.runAll();
  10657. _qunit2['default'].strictEqual(sourceBuffer.mediaSource_.readyState, 'ended', 'readyState is \'ended\'');
  10658. _qunit2['default'].strictEqual(this.swfCalls.length, 2, 'made two calls to swf');
  10659. _qunit2['default'].deepEqual(this.swfCalls.shift().arguments[0], [0, 1], 'contains the data');
  10660. _qunit2['default'].ok(this.swfCalls.shift().indexOf('endOfStream') === 0, 'the second call should be for the updateend');
  10661. _qunit2['default'].strictEqual(timers.length, 0, 'no more appends are scheduled');
  10662. });
  10663. _qunit2['default'].test('opens the stream on sourceBuffer.appendBuffer after endOfStream', function () {
  10664. var _this4 = this;
  10665. var sourceBuffer = this.mediaSource.addSourceBuffer('video/mp2t');
  10666. var foo = function foo() {
  10667. _this4.mediaSource.endOfStream();
  10668. sourceBuffer.removeEventListener('updateend', foo);
  10669. };
  10670. sourceBuffer.transmuxer_.postMessage = postMessage_;
  10671. /* eslint-disable camelcase */
  10672. this.mediaSource.swfObj.vjs_endOfStream = function () {
  10673. _this4.swfCalls.push('endOfStream');
  10674. };
  10675. /* eslint-enable camelcase */
  10676. sourceBuffer.addEventListener('updateend', foo);
  10677. this.swfCalls.length = 0;
  10678. sourceBuffer.appendBuffer(new Uint8Array([0, 1]));
  10679. timers.runAll();
  10680. _qunit2['default'].strictEqual(this.swfCalls.length, 2, 'made two calls to swf');
  10681. _qunit2['default'].deepEqual(this.swfCalls.shift().arguments[0], [0, 1], 'contains the data');
  10682. _qunit2['default'].equal(this.swfCalls.shift(), 'endOfStream', 'the second call should be for the updateend');
  10683. sourceBuffer.appendBuffer(new Uint8Array([2, 3]));
  10684. // remove previous video pts save because mock appends don't have actual timing data
  10685. sourceBuffer.videoBufferEnd_ = NaN;
  10686. timers.runAll();
  10687. _qunit2['default'].strictEqual(this.swfCalls.length, 1, 'made one more append');
  10688. _qunit2['default'].deepEqual(this.swfCalls.shift().arguments[0], [2, 3], 'contains the third and fourth bytes');
  10689. _qunit2['default'].strictEqual(sourceBuffer.mediaSource_.readyState, 'open', 'The streams should be open if more bytes are appended to an "ended" stream');
  10690. _qunit2['default'].strictEqual(timers.length, 0, 'no more appends are scheduled');
  10691. });
  10692. _qunit2['default'].test('abort() clears any buffered input', function () {
  10693. var sourceBuffer = this.mediaSource.addSourceBuffer('video/mp2t');
  10694. sourceBuffer.transmuxer_.postMessage = postMessage_;
  10695. this.swfCalls.length = 0;
  10696. sourceBuffer.appendBuffer(new Uint8Array([0]));
  10697. sourceBuffer.abort();
  10698. timers.pop()();
  10699. _qunit2['default'].strictEqual(this.swfCalls.length, 1, 'called the swf');
  10700. _qunit2['default'].strictEqual(this.swfCalls[0], 'abort', 'invoked abort');
  10701. });
  10702. // requestAnimationFrame is heavily throttled or unscheduled when
  10703. // the browser tab running contrib-media-sources is in a background
  10704. // tab. If that happens, video data can continuously build up in
  10705. // memory and cause the tab or browser to crash.
  10706. _qunit2['default'].test('does not use requestAnimationFrame', function () {
  10707. var oldRFA = _globalWindow2['default'].requestAnimationFrame;
  10708. var requests = 0;
  10709. var sourceBuffer = undefined;
  10710. _globalWindow2['default'].requestAnimationFrame = function () {
  10711. requests++;
  10712. };
  10713. sourceBuffer = this.mediaSource.addSourceBuffer('video/mp2t');
  10714. sourceBuffer.transmuxer_.postMessage = postMessage_;
  10715. sourceBuffer.appendBuffer(new Uint8Array([0, 1, 2, 3]));
  10716. while (timers.length) {
  10717. timers.pop()();
  10718. }
  10719. _qunit2['default'].equal(requests, 0, 'no calls to requestAnimationFrame were made');
  10720. _globalWindow2['default'].requestAnimationFrame = oldRFA;
  10721. });
  10722. _qunit2['default'].test('updating is true while an append is in progress', function () {
  10723. var sourceBuffer = this.mediaSource.addSourceBuffer('video/mp2t');
  10724. var ended = false;
  10725. sourceBuffer.transmuxer_.postMessage = postMessage_;
  10726. sourceBuffer.addEventListener('updateend', function () {
  10727. ended = true;
  10728. });
  10729. sourceBuffer.appendBuffer(new Uint8Array([0, 1]));
  10730. _qunit2['default'].equal(sourceBuffer.updating, true, 'updating is set');
  10731. while (!ended) {
  10732. timers.pop()();
  10733. }
  10734. _qunit2['default'].equal(sourceBuffer.updating, false, 'updating is unset');
  10735. });
  10736. _qunit2['default'].test('throws an error if append is called while updating', function () {
  10737. var sourceBuffer = this.mediaSource.addSourceBuffer('video/mp2t');
  10738. sourceBuffer.appendBuffer(new Uint8Array([0, 1]));
  10739. sourceBuffer.transmuxer_.postMessage = postMessage_;
  10740. _qunit2['default'].throws(function () {
  10741. sourceBuffer.appendBuffer(new Uint8Array([0, 1]));
  10742. }, function (e) {
  10743. return e.name === 'InvalidStateError' && e.code === _globalWindow2['default'].DOMException.INVALID_STATE_ERR;
  10744. }, 'threw an InvalidStateError');
  10745. });
  10746. _qunit2['default'].test('stops updating if abort is called', function () {
  10747. var sourceBuffer = this.mediaSource.addSourceBuffer('video/mp2t');
  10748. var updateEnds = 0;
  10749. sourceBuffer.transmuxer_.postMessage = postMessage_;
  10750. sourceBuffer.addEventListener('updateend', function () {
  10751. updateEnds++;
  10752. });
  10753. sourceBuffer.appendBuffer(new Uint8Array([0, 1]));
  10754. sourceBuffer.abort();
  10755. _qunit2['default'].equal(sourceBuffer.updating, false, 'no longer updating');
  10756. _qunit2['default'].equal(updateEnds, 1, 'triggered updateend');
  10757. });
  10758. _qunit2['default'].test('forwards duration overrides to the SWF', function () {
  10759. /* eslint-disable no-unused-vars */
  10760. var ignored = this.mediaSource.duration;
  10761. /* eslint-enable no-unused-vars */
  10762. _qunit2['default'].deepEqual(this.swfCalls[1], {
  10763. attr: 'duration'
  10764. }, 'requests duration from the SWF');
  10765. this.mediaSource.duration = 101.3;
  10766. // Setting a duration results in two calls to the swf
  10767. // Ignore the first call (this.swfCalls[2]) as it was just to get the
  10768. // current duration
  10769. _qunit2['default'].deepEqual(this.swfCalls[3], {
  10770. attr: 'duration', value: 101.3
  10771. }, 'set the duration override');
  10772. });
  10773. _qunit2['default'].test('returns NaN for duration before the SWF is ready', function () {
  10774. this.mediaSource.swfObj = null;
  10775. _qunit2['default'].ok(isNaN(this.mediaSource.duration), 'duration is NaN');
  10776. });
  10777. _qunit2['default'].test('calculates the base PTS for the media', function () {
  10778. var sourceBuffer = this.mediaSource.addSourceBuffer('video/mp2t');
  10779. var tags_ = [];
  10780. sourceBuffer.transmuxer_.postMessage = postMessage_;
  10781. // seek to 15 seconds
  10782. this.player.tech_.seeking = function () {
  10783. return true;
  10784. };
  10785. this.player.tech_.currentTime = function () {
  10786. return 15;
  10787. };
  10788. // FLV tags for this segment start at 10 seconds in the media
  10789. // timeline
  10790. tags_.push(
  10791. // zero in the media timeline is PTS 3
  10792. { pts: (10 + 3) * 1000, bytes: new Uint8Array([10]) }, { pts: (15 + 3) * 1000, bytes: new Uint8Array([15]) });
  10793. var dataMessage = createDataMessage(tags_);
  10794. // mock gop start at seek point
  10795. dataMessage.data.segment.tags.videoTags[1].keyFrame = true;
  10796. sourceBuffer.transmuxer_.onmessage(dataMessage);
  10797. // let the source buffer know the segment start time
  10798. sourceBuffer.timestampOffset = 10;
  10799. this.swfCalls.length = 0;
  10800. timers.runAll();
  10801. _qunit2['default'].equal(this.swfCalls.length, 1, 'made a SWF call');
  10802. _qunit2['default'].deepEqual(this.swfCalls[0].arguments[0], [15], 'dropped the early tag');
  10803. });
  10804. _qunit2['default'].test('remove fires update events', function () {
  10805. var sourceBuffer = this.mediaSource.addSourceBuffer('video/mp2t');
  10806. var events = [];
  10807. sourceBuffer.transmuxer_.postMessage = postMessage_;
  10808. sourceBuffer.on(['update', 'updateend'], function (event) {
  10809. events.push(event.type);
  10810. });
  10811. sourceBuffer.remove(0, 1);
  10812. _qunit2['default'].deepEqual(events, ['update', 'updateend'], 'fired update events');
  10813. _qunit2['default'].equal(sourceBuffer.updating, false, 'finished updating');
  10814. });
  10815. _qunit2['default'].test('passes endOfStream network errors to the tech', function () {
  10816. this.mediaSource.readyState = 'ended';
  10817. this.mediaSource.endOfStream('network');
  10818. _qunit2['default'].equal(this.player.tech_.error().code, 2, 'set a network error');
  10819. });
  10820. _qunit2['default'].test('passes endOfStream decode errors to the tech', function () {
  10821. this.mediaSource.readyState = 'ended';
  10822. this.mediaSource.endOfStream('decode');
  10823. _qunit2['default'].equal(this.player.tech_.error().code, 3, 'set a decode error');
  10824. });
  10825. _qunit2['default'].test('has addSeekableRange()', function () {
  10826. _qunit2['default'].ok(this.mediaSource.addSeekableRange_, 'has addSeekableRange_');
  10827. });
  10828. _qunit2['default'].test('fires loadedmetadata after first segment append', function () {
  10829. var loadedmetadataCount = 0;
  10830. this.mediaSource.tech_.on('loadedmetadata', function () {
  10831. return loadedmetadataCount++;
  10832. });
  10833. var sourceBuffer = this.mediaSource.addSourceBuffer('video/mp2t');
  10834. sourceBuffer.transmuxer_.postMessage = postMessage_;
  10835. _qunit2['default'].equal(loadedmetadataCount, 0, 'loadedmetadata not called on buffer creation');
  10836. sourceBuffer.appendBuffer(new Uint8Array([0, 1]));
  10837. _qunit2['default'].equal(loadedmetadataCount, 0, 'loadedmetadata not called on segment append');
  10838. timers.runAll();
  10839. _qunit2['default'].equal(loadedmetadataCount, 1, 'loadedmetadata fires after first append');
  10840. sourceBuffer.appendBuffer(new Uint8Array([0, 1]));
  10841. timers.runAll();
  10842. _qunit2['default'].equal(loadedmetadataCount, 1, 'loadedmetadata does not fire after second append');
  10843. });
  10844. }).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
  10845. },{"../src/flash-constants":38,"../src/flash-source-buffer":40,"../src/videojs-contrib-media-sources.js":45,"global/document":2,"global/window":3,"mux.js":16}],51:[function(require,module,exports){
  10846. (function (global){
  10847. 'use strict';
  10848. function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
  10849. var _globalDocument = require('global/document');
  10850. var _globalDocument2 = _interopRequireDefault(_globalDocument);
  10851. var _globalWindow = require('global/window');
  10852. var _globalWindow2 = _interopRequireDefault(_globalWindow);
  10853. var _qunit = (typeof window !== "undefined" ? window['QUnit'] : typeof global !== "undefined" ? global['QUnit'] : null);
  10854. var _qunit2 = _interopRequireDefault(_qunit);
  10855. var _sinon = (typeof window !== "undefined" ? window['sinon'] : typeof global !== "undefined" ? global['sinon'] : null);
  10856. var _sinon2 = _interopRequireDefault(_sinon);
  10857. var _videoJs = (typeof window !== "undefined" ? window['videojs'] : typeof global !== "undefined" ? global['videojs'] : null);
  10858. var _videoJs2 = _interopRequireDefault(_videoJs);
  10859. var _srcHtmlMediaSource = require('../src/html-media-source');
  10860. var _srcHtmlMediaSource2 = _interopRequireDefault(_srcHtmlMediaSource);
  10861. var _srcVirtualSourceBuffer = require('../src/virtual-source-buffer');
  10862. // we disable this because browserify needs to include these files
  10863. // but the exports are not important
  10864. /* eslint-disable no-unused-vars */
  10865. var _srcVideojsContribMediaSourcesJs = require('../src/videojs-contrib-media-sources.js');
  10866. /* eslint-disable no-unused-vars */
  10867. _qunit2['default'].module('videojs-contrib-media-sources - HTML', {
  10868. beforeEach: function beforeEach() {
  10869. this.fixture = _globalDocument2['default'].getElementById('qunit-fixture');
  10870. this.video = _globalDocument2['default'].createElement('video');
  10871. this.fixture.appendChild(this.video);
  10872. this.source = _globalDocument2['default'].createElement('source');
  10873. this.player = (0, _videoJs2['default'])(this.video);
  10874. // add a fake source so that we can get this.player_ on sourceopen
  10875. this.url = 'fake.ts';
  10876. this.source.src = this.url;
  10877. this.video.appendChild(this.source);
  10878. // Mock the environment's timers because certain things - particularly
  10879. // player readiness - are asynchronous in video.js 5.
  10880. this.clock = _sinon2['default'].useFakeTimers();
  10881. this.oldMediaSource = _globalWindow2['default'].MediaSource || _globalWindow2['default'].WebKitMediaSource;
  10882. _globalWindow2['default'].MediaSource = _videoJs2['default'].extend(_videoJs2['default'].EventTarget, {
  10883. constructor: function constructor() {
  10884. this.isNative = true;
  10885. this.sourceBuffers = [];
  10886. this.duration = NaN;
  10887. },
  10888. addSourceBuffer: function addSourceBuffer(type) {
  10889. var buffer = new (_videoJs2['default'].extend(_videoJs2['default'].EventTarget, {
  10890. type: type,
  10891. appendBuffer: function appendBuffer() {}
  10892. }))();
  10893. this.sourceBuffers.push(buffer);
  10894. return buffer;
  10895. }
  10896. });
  10897. _globalWindow2['default'].MediaSource.isTypeSupported = function (mime) {
  10898. return true;
  10899. };
  10900. _globalWindow2['default'].WebKitMediaSource = _globalWindow2['default'].MediaSource;
  10901. },
  10902. afterEach: function afterEach() {
  10903. this.clock.restore();
  10904. this.player.dispose();
  10905. _globalWindow2['default'].MediaSource = this.oldMediaSource;
  10906. _globalWindow2['default'].WebKitMediaSource = _globalWindow2['default'].MediaSource;
  10907. }
  10908. });
  10909. _qunit2['default'].test('constructs a native MediaSource', function () {
  10910. _qunit2['default'].ok(new _videoJs2['default'].MediaSource().nativeMediaSource_.isNative, 'constructed a MediaSource');
  10911. });
  10912. var createDataMessage = function createDataMessage(type, typedArray, extraObject) {
  10913. var message = {
  10914. data: {
  10915. action: 'data',
  10916. segment: {
  10917. type: type,
  10918. data: typedArray.buffer,
  10919. initSegment: {
  10920. data: typedArray.buffer,
  10921. byteOffset: typedArray.byteOffset,
  10922. byteLength: typedArray.byteLength
  10923. }
  10924. },
  10925. byteOffset: typedArray.byteOffset,
  10926. byteLength: typedArray.byteLength
  10927. }
  10928. };
  10929. return Object.keys(extraObject || {}).reduce(function (obj, key) {
  10930. obj.data.segment[key] = extraObject[key];
  10931. return obj;
  10932. }, message);
  10933. };
  10934. // Create a WebWorker-style message that signals the transmuxer is done
  10935. var doneMessage = {
  10936. data: {
  10937. action: 'done'
  10938. }
  10939. };
  10940. // send fake data to the transmuxer to trigger the creation of the
  10941. // native source buffers
  10942. var initializeNativeSourceBuffers = function initializeNativeSourceBuffers(sourceBuffer) {
  10943. // initialize an audio source buffer
  10944. sourceBuffer.transmuxer_.onmessage(createDataMessage('audio', new Uint8Array(1)));
  10945. // initialize a video source buffer
  10946. sourceBuffer.transmuxer_.onmessage(createDataMessage('video', new Uint8Array(1)));
  10947. // instruct the transmuxer to flush the "data" it has buffered so
  10948. // far
  10949. sourceBuffer.transmuxer_.onmessage(doneMessage);
  10950. };
  10951. _qunit2['default'].test('creates mp4 source buffers for mp2t segments', function () {
  10952. var mediaSource = new _videoJs2['default'].MediaSource();
  10953. var sourceBuffer = mediaSource.addSourceBuffer('video/mp2t');
  10954. initializeNativeSourceBuffers(sourceBuffer);
  10955. _qunit2['default'].ok(mediaSource.videoBuffer_, 'created a video buffer');
  10956. _qunit2['default'].equal(mediaSource.videoBuffer_.type, 'video/mp4;codecs="avc1.4d400d"', 'video buffer has the default codec');
  10957. _qunit2['default'].ok(mediaSource.audioBuffer_, 'created an audio buffer');
  10958. _qunit2['default'].equal(mediaSource.audioBuffer_.type, 'audio/mp4;codecs="mp4a.40.2"', 'audio buffer has the default codec');
  10959. _qunit2['default'].equal(mediaSource.sourceBuffers.length, 1, 'created one virtual buffer');
  10960. _qunit2['default'].equal(mediaSource.sourceBuffers[0], sourceBuffer, 'returned the virtual buffer');
  10961. _qunit2['default'].ok(sourceBuffer.transmuxer_, 'created a transmuxer');
  10962. });
  10963. _qunit2['default'].test('the terminate is called on the transmuxer when the media source is killed', function () {
  10964. var mediaSource = new _videoJs2['default'].MediaSource();
  10965. var sourceBuffer = mediaSource.addSourceBuffer('video/mp2t');
  10966. var terminates = 0;
  10967. sourceBuffer.transmuxer_ = {
  10968. terminate: function terminate() {
  10969. terminates++;
  10970. }
  10971. };
  10972. mediaSource.trigger('sourceclose');
  10973. _qunit2['default'].equal(terminates, 1, 'called terminate on transmux web worker');
  10974. });
  10975. _qunit2['default'].test('duration is faked when playing a live stream', function () {
  10976. var mediaSource = new _videoJs2['default'].MediaSource();
  10977. var sourceBuffer = mediaSource.addSourceBuffer('video/mp2t');
  10978. mediaSource.duration = Infinity;
  10979. mediaSource.nativeMediaSource_.duration = 100;
  10980. _qunit2['default'].equal(mediaSource.nativeMediaSource_.duration, 100, 'native duration was not set to infinity');
  10981. _qunit2['default'].equal(mediaSource.duration, Infinity, 'the MediaSource wrapper pretends it has an infinite duration');
  10982. });
  10983. _qunit2['default'].test('duration uses the underlying MediaSource\'s duration when not live', function () {
  10984. var mediaSource = new _videoJs2['default'].MediaSource();
  10985. var sourceBuffer = mediaSource.addSourceBuffer('video/mp2t');
  10986. mediaSource.duration = 100;
  10987. mediaSource.nativeMediaSource_.duration = 120;
  10988. _qunit2['default'].equal(mediaSource.duration, 120, 'the MediaSource wrapper returns the native duration');
  10989. });
  10990. _qunit2['default'].test('abort on the fake source buffer calls abort on the real ones', function () {
  10991. var mediaSource = new _videoJs2['default'].MediaSource();
  10992. var sourceBuffer = mediaSource.addSourceBuffer('video/mp2t');
  10993. var messages = [];
  10994. var aborts = 0;
  10995. initializeNativeSourceBuffers(sourceBuffer);
  10996. sourceBuffer.transmuxer_.postMessage = function (message) {
  10997. messages.push(message);
  10998. };
  10999. sourceBuffer.bufferUpdating_ = true;
  11000. sourceBuffer.videoBuffer_.abort = function () {
  11001. aborts++;
  11002. };
  11003. sourceBuffer.audioBuffer_.abort = function () {
  11004. aborts++;
  11005. };
  11006. sourceBuffer.abort();
  11007. _qunit2['default'].equal(aborts, 2, 'called abort on both');
  11008. _qunit2['default'].equal(sourceBuffer.bufferUpdating_, false, 'set updating to false');
  11009. _qunit2['default'].equal(messages.length, 1, 'has one message');
  11010. _qunit2['default'].equal(messages[0].action, 'reset', 'reset called on transmuxer');
  11011. });
  11012. _qunit2['default'].test('calling remove deletes cues and invokes remove on any extant source buffers', function () {
  11013. var mediaSource = new _videoJs2['default'].MediaSource();
  11014. var sourceBuffer = mediaSource.addSourceBuffer('video/mp2t');
  11015. var removedCue = [];
  11016. var removes = 0;
  11017. initializeNativeSourceBuffers(sourceBuffer);
  11018. sourceBuffer.inbandTextTracks_ = {
  11019. CC1: {
  11020. removeCue: function removeCue(cue) {
  11021. removedCue.push(cue);
  11022. this.cues.splice(this.cues.indexOf(cue), 1);
  11023. },
  11024. cues: [{ startTime: 10, endTime: 20, text: 'delete me' }, { startTime: 0, endTime: 2, text: 'save me' }]
  11025. }
  11026. };
  11027. mediaSource.videoBuffer_.remove = function (start, end) {
  11028. if (start === 3 && end === 10) {
  11029. removes++;
  11030. }
  11031. };
  11032. mediaSource.audioBuffer_.remove = function (start, end) {
  11033. if (start === 3 && end === 10) {
  11034. removes++;
  11035. }
  11036. };
  11037. sourceBuffer.remove(3, 10);
  11038. _qunit2['default'].equal(removes, 2, 'called remove on both sourceBuffers');
  11039. _qunit2['default'].equal(sourceBuffer.inbandTextTracks_.CC1.cues.length, 1, 'one cue remains after remove');
  11040. _qunit2['default'].equal(removedCue[0].text, 'delete me', 'the cue that overlapped the remove region was removed');
  11041. });
  11042. _qunit2['default'].test('calling remove property handles absence of cues (null)', function () {
  11043. var mediaSource = new _videoJs2['default'].MediaSource();
  11044. var sourceBuffer = mediaSource.addSourceBuffer('video/mp2t');
  11045. initializeNativeSourceBuffers(sourceBuffer);
  11046. sourceBuffer.inbandTextTracks_ = {
  11047. CC1: {
  11048. cues: null
  11049. }
  11050. };
  11051. mediaSource.videoBuffer_.remove = function (start, end) {
  11052. // pass
  11053. };
  11054. mediaSource.audioBuffer_.remove = function (start, end) {
  11055. // pass
  11056. };
  11057. // this call should not raise an exception
  11058. sourceBuffer.remove(3, 10);
  11059. _qunit2['default'].equal(sourceBuffer.inbandTextTracks_.CC1.cues, null, 'cues are still null');
  11060. });
  11061. _qunit2['default'].test('removing doesn\'t happen with audio disabled', function () {
  11062. var mediaSource = new _videoJs2['default'].MediaSource();
  11063. var muxedBuffer = mediaSource.addSourceBuffer('video/mp2t');
  11064. // creating this audio buffer disables audio in the muxed one
  11065. var audioBuffer = mediaSource.addSourceBuffer('audio/mp2t; codecs="mp4a.40.2"');
  11066. var removedCue = [];
  11067. var removes = 0;
  11068. initializeNativeSourceBuffers(muxedBuffer);
  11069. muxedBuffer.inbandTextTracks_ = {
  11070. CC1: {
  11071. removeCue: function removeCue(cue) {
  11072. removedCue.push(cue);
  11073. this.cues.splice(this.cues.indexOf(cue), 1);
  11074. },
  11075. cues: [{ startTime: 10, endTime: 20, text: 'delete me' }, { startTime: 0, endTime: 2, text: 'save me' }]
  11076. }
  11077. };
  11078. mediaSource.videoBuffer_.remove = function (start, end) {
  11079. if (start === 3 && end === 10) {
  11080. removes++;
  11081. }
  11082. };
  11083. mediaSource.audioBuffer_.remove = function (start, end) {
  11084. if (start === 3 && end === 10) {
  11085. removes++;
  11086. }
  11087. };
  11088. muxedBuffer.remove(3, 10);
  11089. _qunit2['default'].equal(removes, 1, 'called remove on only one source buffer');
  11090. _qunit2['default'].equal(muxedBuffer.inbandTextTracks_.CC1.cues.length, 1, 'one cue remains after remove');
  11091. _qunit2['default'].equal(removedCue[0].text, 'delete me', 'the cue that overlapped the remove region was removed');
  11092. });
  11093. _qunit2['default'].test('readyState delegates to the native implementation', function () {
  11094. var mediaSource = new _srcHtmlMediaSource2['default']();
  11095. _qunit2['default'].equal(mediaSource.readyState, mediaSource.nativeMediaSource_.readyState, 'readyStates are equal');
  11096. mediaSource.nativeMediaSource_.readyState = 'nonsense stuff';
  11097. _qunit2['default'].equal(mediaSource.readyState, mediaSource.nativeMediaSource_.readyState, 'readyStates are equal');
  11098. });
  11099. _qunit2['default'].test('addSeekableRange_ throws an error for media with known duration', function () {
  11100. var mediaSource = new _videoJs2['default'].MediaSource();
  11101. mediaSource.duration = 100;
  11102. _qunit2['default'].throws(function () {
  11103. mediaSource.addSeekableRange_(0, 100);
  11104. }, 'cannot add seekable range');
  11105. });
  11106. _qunit2['default'].test('addSeekableRange_ adds to the native MediaSource duration', function () {
  11107. var mediaSource = new _videoJs2['default'].MediaSource();
  11108. mediaSource.duration = Infinity;
  11109. mediaSource.addSeekableRange_(120, 240);
  11110. _qunit2['default'].equal(mediaSource.nativeMediaSource_.duration, 240, 'set native duration');
  11111. _qunit2['default'].equal(mediaSource.duration, Infinity, 'emulated duration');
  11112. mediaSource.addSeekableRange_(120, 220);
  11113. _qunit2['default'].equal(mediaSource.nativeMediaSource_.duration, 240, 'ignored the smaller range');
  11114. _qunit2['default'].equal(mediaSource.duration, Infinity, 'emulated duration');
  11115. });
  11116. _qunit2['default'].test('appendBuffer error triggers on the player', function () {
  11117. var mediaSource = new _videoJs2['default'].MediaSource();
  11118. var sourceBuffer = mediaSource.addSourceBuffer('video/mp2t');
  11119. var error = false;
  11120. mediaSource.player_ = this.player;
  11121. initializeNativeSourceBuffers(sourceBuffer);
  11122. sourceBuffer.videoBuffer_.appendBuffer = function () {
  11123. throw new Error();
  11124. };
  11125. this.player.on('error', function () {
  11126. return error = true;
  11127. });
  11128. // send fake data to the source buffer from the transmuxer to append to native buffer
  11129. // initializeNativeSourceBuffers does the same thing to trigger the creation of
  11130. // native source buffers.
  11131. var fakeTransmuxerMessage = initializeNativeSourceBuffers;
  11132. fakeTransmuxerMessage(sourceBuffer);
  11133. this.clock.tick(1);
  11134. _qunit2['default'].ok(error, 'error triggered on player');
  11135. });
  11136. _qunit2['default'].test('transmuxes mp2t segments', function () {
  11137. var mp2tSegments = [];
  11138. var mp4Segments = [];
  11139. var data = new Uint8Array(1);
  11140. var mediaSource = undefined;
  11141. var sourceBuffer = undefined;
  11142. mediaSource = new _videoJs2['default'].MediaSource();
  11143. sourceBuffer = mediaSource.addSourceBuffer('video/mp2t');
  11144. sourceBuffer.transmuxer_.postMessage = function (segment) {
  11145. if (segment.action === 'push') {
  11146. var buffer = new Uint8Array(segment.data, segment.byteOffset, segment.byteLength);
  11147. mp2tSegments.push(buffer);
  11148. }
  11149. };
  11150. sourceBuffer.concatAndAppendSegments_ = function (segmentObj, destinationBuffer) {
  11151. mp4Segments.push(segmentObj);
  11152. };
  11153. sourceBuffer.appendBuffer(data);
  11154. _qunit2['default'].equal(mp2tSegments.length, 1, 'transmuxed one segment');
  11155. _qunit2['default'].equal(mp2tSegments[0].length, 1, 'did not alter the segment');
  11156. _qunit2['default'].equal(mp2tSegments[0][0], data[0], 'did not alter the segment');
  11157. // an init segment
  11158. sourceBuffer.transmuxer_.onmessage(createDataMessage('video', new Uint8Array(1)));
  11159. // a media segment
  11160. sourceBuffer.transmuxer_.onmessage(createDataMessage('audio', new Uint8Array(1)));
  11161. // Segments are concatenated
  11162. _qunit2['default'].equal(mp4Segments.length, 0, 'segments are not appended until after the `done` message');
  11163. // send `done` message
  11164. sourceBuffer.transmuxer_.onmessage(doneMessage);
  11165. // Segments are concatenated
  11166. _qunit2['default'].equal(mp4Segments.length, 2, 'appended the segments');
  11167. });
  11168. _qunit2['default'].test('handles typed-arrays that are subsets of their underlying buffer', function () {
  11169. var mp2tSegments = [];
  11170. var mp4Segments = [];
  11171. var dataBuffer = new Uint8Array([0, 1, 2, 3, 4, 5, 6, 7, 8, 9]);
  11172. var data = dataBuffer.subarray(5, 7);
  11173. var mediaSource = undefined;
  11174. var sourceBuffer = undefined;
  11175. mediaSource = new _videoJs2['default'].MediaSource();
  11176. sourceBuffer = mediaSource.addSourceBuffer('video/mp2t');
  11177. sourceBuffer.transmuxer_.postMessage = function (segment) {
  11178. if (segment.action === 'push') {
  11179. var buffer = new Uint8Array(segment.data, segment.byteOffset, segment.byteLength);
  11180. mp2tSegments.push(buffer);
  11181. }
  11182. };
  11183. sourceBuffer.concatAndAppendSegments_ = function (segmentObj, destinationBuffer) {
  11184. mp4Segments.push(segmentObj.segments[0]);
  11185. };
  11186. sourceBuffer.appendBuffer(data);
  11187. _qunit2['default'].equal(mp2tSegments.length, 1, 'emitted the fragment');
  11188. _qunit2['default'].equal(mp2tSegments[0].length, 2, 'correctly handled a typed-array that is a subset');
  11189. _qunit2['default'].equal(mp2tSegments[0][0], 5, 'fragment contains the correct first byte');
  11190. _qunit2['default'].equal(mp2tSegments[0][1], 6, 'fragment contains the correct second byte');
  11191. // an init segment
  11192. sourceBuffer.transmuxer_.onmessage(createDataMessage('video', data));
  11193. // Segments are concatenated
  11194. _qunit2['default'].equal(mp4Segments.length, 0, 'segments are not appended until after the `done` message');
  11195. // send `done` message
  11196. sourceBuffer.transmuxer_.onmessage(doneMessage);
  11197. // Segments are concatenated
  11198. _qunit2['default'].equal(mp4Segments.length, 1, 'emitted the fragment');
  11199. _qunit2['default'].equal(mp4Segments[0].length, 2, 'correctly handled a typed-array that is a subset');
  11200. _qunit2['default'].equal(mp4Segments[0][0], 5, 'fragment contains the correct first byte');
  11201. _qunit2['default'].equal(mp4Segments[0][1], 6, 'fragment contains the correct second byte');
  11202. });
  11203. _qunit2['default'].test('only appends audio init segment for first segment or on audio/media changes', function () {
  11204. var mp4Segments = [];
  11205. var initBuffer = new Uint8Array([0, 1]);
  11206. var dataBuffer = new Uint8Array([2, 3]);
  11207. var mediaSource = undefined;
  11208. var sourceBuffer = undefined;
  11209. mediaSource = new _videoJs2['default'].MediaSource();
  11210. sourceBuffer = mediaSource.addSourceBuffer('video/mp2t');
  11211. sourceBuffer.audioDisabled_ = false;
  11212. mediaSource.player_ = this.player;
  11213. mediaSource.url_ = this.url;
  11214. mediaSource.trigger('sourceopen');
  11215. sourceBuffer.concatAndAppendSegments_ = function (segmentObj, destinationBuffer) {
  11216. var segment = segmentObj.segments.reduce(function (seg, arr) {
  11217. return seg.concat(Array.from(arr));
  11218. }, []);
  11219. mp4Segments.push(segment);
  11220. };
  11221. _qunit2['default'].ok(sourceBuffer.appendAudioInitSegment_, 'will append init segment next');
  11222. // an init segment
  11223. sourceBuffer.transmuxer_.onmessage(createDataMessage('audio', dataBuffer, {
  11224. initSegment: {
  11225. data: initBuffer.buffer,
  11226. byteOffset: initBuffer.byteOffset,
  11227. byteLength: initBuffer.byteLength
  11228. }
  11229. }));
  11230. // Segments are concatenated
  11231. _qunit2['default'].equal(mp4Segments.length, 0, 'segments are not appended until after the `done` message');
  11232. // send `done` message
  11233. sourceBuffer.transmuxer_.onmessage(doneMessage);
  11234. // Segments are concatenated
  11235. _qunit2['default'].equal(mp4Segments.length, 1, 'emitted the fragment');
  11236. // Contains init segment on first segment
  11237. _qunit2['default'].equal(mp4Segments[0][0], 0, 'fragment contains the correct first byte');
  11238. _qunit2['default'].equal(mp4Segments[0][1], 1, 'fragment contains the correct second byte');
  11239. _qunit2['default'].equal(mp4Segments[0][2], 2, 'fragment contains the correct third byte');
  11240. _qunit2['default'].equal(mp4Segments[0][3], 3, 'fragment contains the correct fourth byte');
  11241. _qunit2['default'].ok(!sourceBuffer.appendAudioInitSegment_, 'will not append init segment next');
  11242. dataBuffer = new Uint8Array([4, 5]);
  11243. sourceBuffer.transmuxer_.onmessage(createDataMessage('audio', dataBuffer, {
  11244. initSegment: {
  11245. data: initBuffer.buffer,
  11246. byteOffset: initBuffer.byteOffset,
  11247. byteLength: initBuffer.byteLength
  11248. }
  11249. }));
  11250. sourceBuffer.transmuxer_.onmessage(doneMessage);
  11251. _qunit2['default'].equal(mp4Segments.length, 2, 'emitted the fragment');
  11252. // does not contain init segment on next segment
  11253. _qunit2['default'].equal(mp4Segments[1][0], 4, 'fragment contains the correct first byte');
  11254. _qunit2['default'].equal(mp4Segments[1][1], 5, 'fragment contains the correct second byte');
  11255. // audio track change
  11256. this.player.audioTracks().trigger('change');
  11257. sourceBuffer.audioDisabled_ = false;
  11258. _qunit2['default'].ok(sourceBuffer.appendAudioInitSegment_, 'audio change sets appendAudioInitSegment_');
  11259. dataBuffer = new Uint8Array([6, 7]);
  11260. sourceBuffer.transmuxer_.onmessage(createDataMessage('audio', dataBuffer, {
  11261. initSegment: {
  11262. data: initBuffer.buffer,
  11263. byteOffset: initBuffer.byteOffset,
  11264. byteLength: initBuffer.byteLength
  11265. }
  11266. }));
  11267. sourceBuffer.transmuxer_.onmessage(doneMessage);
  11268. _qunit2['default'].equal(mp4Segments.length, 3, 'emitted the fragment');
  11269. // contains init segment after audio track change
  11270. _qunit2['default'].equal(mp4Segments[2][0], 0, 'fragment contains the correct first byte');
  11271. _qunit2['default'].equal(mp4Segments[2][1], 1, 'fragment contains the correct second byte');
  11272. _qunit2['default'].equal(mp4Segments[2][2], 6, 'fragment contains the correct third byte');
  11273. _qunit2['default'].equal(mp4Segments[2][3], 7, 'fragment contains the correct fourth byte');
  11274. _qunit2['default'].ok(!sourceBuffer.appendAudioInitSegment_, 'will not append init segment next');
  11275. dataBuffer = new Uint8Array([8, 9]);
  11276. sourceBuffer.transmuxer_.onmessage(createDataMessage('audio', dataBuffer, {
  11277. initSegment: {
  11278. data: initBuffer.buffer,
  11279. byteOffset: initBuffer.byteOffset,
  11280. byteLength: initBuffer.byteLength
  11281. }
  11282. }));
  11283. sourceBuffer.transmuxer_.onmessage(doneMessage);
  11284. _qunit2['default'].equal(mp4Segments.length, 4, 'emitted the fragment');
  11285. // does not contain init segment in next segment
  11286. _qunit2['default'].equal(mp4Segments[3][0], 8, 'fragment contains the correct first byte');
  11287. _qunit2['default'].equal(mp4Segments[3][1], 9, 'fragment contains the correct second byte');
  11288. _qunit2['default'].ok(!sourceBuffer.appendAudioInitSegment_, 'will not append init segment next');
  11289. // rendition switch
  11290. this.player.trigger('mediachange');
  11291. _qunit2['default'].ok(sourceBuffer.appendAudioInitSegment_, 'media change sets appendAudioInitSegment_');
  11292. dataBuffer = new Uint8Array([10, 11]);
  11293. sourceBuffer.transmuxer_.onmessage(createDataMessage('audio', dataBuffer, {
  11294. initSegment: {
  11295. data: initBuffer.buffer,
  11296. byteOffset: initBuffer.byteOffset,
  11297. byteLength: initBuffer.byteLength
  11298. }
  11299. }));
  11300. sourceBuffer.transmuxer_.onmessage(doneMessage);
  11301. _qunit2['default'].equal(mp4Segments.length, 5, 'emitted the fragment');
  11302. // contains init segment after audio track change
  11303. _qunit2['default'].equal(mp4Segments[4][0], 0, 'fragment contains the correct first byte');
  11304. _qunit2['default'].equal(mp4Segments[4][1], 1, 'fragment contains the correct second byte');
  11305. _qunit2['default'].equal(mp4Segments[4][2], 10, 'fragment contains the correct third byte');
  11306. _qunit2['default'].equal(mp4Segments[4][3], 11, 'fragment contains the correct fourth byte');
  11307. _qunit2['default'].ok(!sourceBuffer.appendAudioInitSegment_, 'will not append init segment next');
  11308. });
  11309. _qunit2['default'].test('appends video init segment for every segment', function () {
  11310. var mp4Segments = [];
  11311. var initBuffer = new Uint8Array([0, 1]);
  11312. var dataBuffer = new Uint8Array([2, 3]);
  11313. var mediaSource = undefined;
  11314. var sourceBuffer = undefined;
  11315. mediaSource = new _videoJs2['default'].MediaSource();
  11316. sourceBuffer = mediaSource.addSourceBuffer('video/mp2t');
  11317. mediaSource.player_ = this.player;
  11318. mediaSource.url_ = this.url;
  11319. mediaSource.trigger('sourceopen');
  11320. sourceBuffer.concatAndAppendSegments_ = function (segmentObj, destinationBuffer) {
  11321. var segment = segmentObj.segments.reduce(function (seg, arr) {
  11322. return seg.concat(Array.from(arr));
  11323. }, []);
  11324. mp4Segments.push(segment);
  11325. };
  11326. // an init segment
  11327. sourceBuffer.transmuxer_.onmessage(createDataMessage('video', dataBuffer, {
  11328. initSegment: {
  11329. data: initBuffer.buffer,
  11330. byteOffset: initBuffer.byteOffset,
  11331. byteLength: initBuffer.byteLength
  11332. }
  11333. }));
  11334. // Segments are concatenated
  11335. _qunit2['default'].equal(mp4Segments.length, 0, 'segments are not appended until after the `done` message');
  11336. // send `done` message
  11337. sourceBuffer.transmuxer_.onmessage(doneMessage);
  11338. // Segments are concatenated
  11339. _qunit2['default'].equal(mp4Segments.length, 1, 'emitted the fragment');
  11340. // Contains init segment on first segment
  11341. _qunit2['default'].equal(mp4Segments[0][0], 0, 'fragment contains the correct first byte');
  11342. _qunit2['default'].equal(mp4Segments[0][1], 1, 'fragment contains the correct second byte');
  11343. _qunit2['default'].equal(mp4Segments[0][2], 2, 'fragment contains the correct third byte');
  11344. _qunit2['default'].equal(mp4Segments[0][3], 3, 'fragment contains the correct fourth byte');
  11345. dataBuffer = new Uint8Array([4, 5]);
  11346. sourceBuffer.transmuxer_.onmessage(createDataMessage('video', dataBuffer, {
  11347. initSegment: {
  11348. data: initBuffer.buffer,
  11349. byteOffset: initBuffer.byteOffset,
  11350. byteLength: initBuffer.byteLength
  11351. }
  11352. }));
  11353. sourceBuffer.transmuxer_.onmessage(doneMessage);
  11354. _qunit2['default'].equal(mp4Segments.length, 2, 'emitted the fragment');
  11355. _qunit2['default'].equal(mp4Segments[1][0], 0, 'fragment contains the correct first byte');
  11356. _qunit2['default'].equal(mp4Segments[1][1], 1, 'fragment contains the correct second byte');
  11357. _qunit2['default'].equal(mp4Segments[1][2], 4, 'fragment contains the correct third byte');
  11358. _qunit2['default'].equal(mp4Segments[1][3], 5, 'fragment contains the correct fourth byte');
  11359. dataBuffer = new Uint8Array([6, 7]);
  11360. sourceBuffer.transmuxer_.onmessage(createDataMessage('video', dataBuffer, {
  11361. initSegment: {
  11362. data: initBuffer.buffer,
  11363. byteOffset: initBuffer.byteOffset,
  11364. byteLength: initBuffer.byteLength
  11365. }
  11366. }));
  11367. sourceBuffer.transmuxer_.onmessage(doneMessage);
  11368. _qunit2['default'].equal(mp4Segments.length, 3, 'emitted the fragment');
  11369. // contains init segment after audio track change
  11370. _qunit2['default'].equal(mp4Segments[2][0], 0, 'fragment contains the correct first byte');
  11371. _qunit2['default'].equal(mp4Segments[2][1], 1, 'fragment contains the correct second byte');
  11372. _qunit2['default'].equal(mp4Segments[2][2], 6, 'fragment contains the correct third byte');
  11373. _qunit2['default'].equal(mp4Segments[2][3], 7, 'fragment contains the correct fourth byte');
  11374. });
  11375. _qunit2['default'].test('handles empty codec string value', function () {
  11376. var mediaSource = new _videoJs2['default'].MediaSource();
  11377. var sourceBuffer = mediaSource.addSourceBuffer('video/mp2t; codecs=""');
  11378. initializeNativeSourceBuffers(sourceBuffer);
  11379. _qunit2['default'].ok(mediaSource.videoBuffer_, 'created a video buffer');
  11380. _qunit2['default'].equal(mediaSource.videoBuffer_.type, 'video/mp4;codecs="avc1.4d400d"', 'video buffer has the default codec');
  11381. _qunit2['default'].ok(mediaSource.audioBuffer_, 'created an audio buffer');
  11382. _qunit2['default'].equal(mediaSource.audioBuffer_.type, 'audio/mp4;codecs="mp4a.40.2"', 'audio buffer has the default codec');
  11383. _qunit2['default'].equal(mediaSource.sourceBuffers.length, 1, 'created one virtual buffer');
  11384. _qunit2['default'].equal(mediaSource.sourceBuffers[0], sourceBuffer, 'returned the virtual buffer');
  11385. });
  11386. _qunit2['default'].test('can create an audio buffer by itself', function () {
  11387. var mediaSource = new _videoJs2['default'].MediaSource();
  11388. var sourceBuffer = mediaSource.addSourceBuffer('video/mp2t; codecs="mp4a.40.2"');
  11389. initializeNativeSourceBuffers(sourceBuffer);
  11390. _qunit2['default'].ok(!mediaSource.videoBuffer_, 'did not create a video buffer');
  11391. _qunit2['default'].ok(mediaSource.audioBuffer_, 'created an audio buffer');
  11392. _qunit2['default'].equal(mediaSource.audioBuffer_.type, 'audio/mp4;codecs="mp4a.40.2"', 'audio buffer has the default codec');
  11393. _qunit2['default'].equal(mediaSource.sourceBuffers.length, 1, 'created one virtual buffer');
  11394. _qunit2['default'].equal(mediaSource.sourceBuffers[0], sourceBuffer, 'returned the virtual buffer');
  11395. });
  11396. _qunit2['default'].test('can create an video buffer by itself', function () {
  11397. var mediaSource = new _videoJs2['default'].MediaSource();
  11398. var sourceBuffer = mediaSource.addSourceBuffer('video/mp2t; codecs="avc1.4d400d"');
  11399. initializeNativeSourceBuffers(sourceBuffer);
  11400. _qunit2['default'].ok(!mediaSource.audioBuffer_, 'did not create an audio buffer');
  11401. _qunit2['default'].ok(mediaSource.videoBuffer_, 'created an video buffer');
  11402. _qunit2['default'].equal(mediaSource.videoBuffer_.type, 'video/mp4;codecs="avc1.4d400d"', 'video buffer has the codec that was passed');
  11403. _qunit2['default'].equal(mediaSource.sourceBuffers.length, 1, 'created one virtual buffer');
  11404. _qunit2['default'].equal(mediaSource.sourceBuffers[0], sourceBuffer, 'returned the virtual buffer');
  11405. });
  11406. _qunit2['default'].test('handles invalid codec string', function () {
  11407. var mediaSource = new _videoJs2['default'].MediaSource();
  11408. var sourceBuffer = mediaSource.addSourceBuffer('video/mp2t; codecs="nope"');
  11409. initializeNativeSourceBuffers(sourceBuffer);
  11410. _qunit2['default'].ok(mediaSource.videoBuffer_, 'created a video buffer');
  11411. _qunit2['default'].equal(mediaSource.videoBuffer_.type, 'video/mp4;codecs="avc1.4d400d"', 'video buffer has the default codec');
  11412. _qunit2['default'].ok(mediaSource.audioBuffer_, 'created an audio buffer');
  11413. _qunit2['default'].equal(mediaSource.audioBuffer_.type, 'audio/mp4;codecs="mp4a.40.2"', 'audio buffer has the default codec');
  11414. _qunit2['default'].equal(mediaSource.sourceBuffers.length, 1, 'created one virtual buffer');
  11415. _qunit2['default'].equal(mediaSource.sourceBuffers[0], sourceBuffer, 'returned the virtual buffer');
  11416. });
  11417. _qunit2['default'].test('handles codec strings in reverse order', function () {
  11418. var mediaSource = new _videoJs2['default'].MediaSource();
  11419. var sourceBuffer = mediaSource.addSourceBuffer('video/mp2t; codecs="mp4a.40.5,avc1.64001f"');
  11420. initializeNativeSourceBuffers(sourceBuffer);
  11421. _qunit2['default'].ok(mediaSource.videoBuffer_, 'created a video buffer');
  11422. _qunit2['default'].equal(mediaSource.videoBuffer_.type, 'video/mp4;codecs="avc1.64001f"', 'video buffer has the passed codec');
  11423. _qunit2['default'].ok(mediaSource.audioBuffer_, 'created an audio buffer');
  11424. _qunit2['default'].equal(mediaSource.audioBuffer_.type, 'audio/mp4;codecs="mp4a.40.5"', 'audio buffer has the passed codec');
  11425. _qunit2['default'].equal(mediaSource.sourceBuffers.length, 1, 'created one virtual buffer');
  11426. _qunit2['default'].equal(mediaSource.sourceBuffers[0], sourceBuffer, 'returned the virtual buffer');
  11427. _qunit2['default'].ok(sourceBuffer.transmuxer_, 'created a transmuxer');
  11428. });
  11429. _qunit2['default'].test('forwards codec strings to native buffers when specified', function () {
  11430. var mediaSource = new _videoJs2['default'].MediaSource();
  11431. var sourceBuffer = mediaSource.addSourceBuffer('video/mp2t; codecs="avc1.64001f,mp4a.40.5"');
  11432. initializeNativeSourceBuffers(sourceBuffer);
  11433. _qunit2['default'].ok(mediaSource.videoBuffer_, 'created a video buffer');
  11434. _qunit2['default'].equal(mediaSource.videoBuffer_.type, 'video/mp4;codecs="avc1.64001f"', 'passed the video codec along');
  11435. _qunit2['default'].ok(mediaSource.audioBuffer_, 'created a video buffer');
  11436. _qunit2['default'].equal(mediaSource.audioBuffer_.type, 'audio/mp4;codecs="mp4a.40.5"', 'passed the audio codec along');
  11437. });
  11438. _qunit2['default'].test('parses old-school apple codec strings to the modern standard', function () {
  11439. var mediaSource = new _videoJs2['default'].MediaSource();
  11440. var sourceBuffer = mediaSource.addSourceBuffer('video/mp2t; codecs="avc1.100.31,mp4a.40.5"');
  11441. initializeNativeSourceBuffers(sourceBuffer);
  11442. _qunit2['default'].ok(mediaSource.videoBuffer_, 'created a video buffer');
  11443. _qunit2['default'].equal(mediaSource.videoBuffer_.type, 'video/mp4;codecs="avc1.64001f"', 'passed the video codec along');
  11444. _qunit2['default'].ok(mediaSource.audioBuffer_, 'created a video buffer');
  11445. _qunit2['default'].equal(mediaSource.audioBuffer_.type, 'audio/mp4;codecs="mp4a.40.5"', 'passed the audio codec along');
  11446. });
  11447. _qunit2['default'].test('specifies reasonable codecs if none are specified', function () {
  11448. var mediaSource = new _videoJs2['default'].MediaSource();
  11449. var sourceBuffer = mediaSource.addSourceBuffer('video/mp2t');
  11450. initializeNativeSourceBuffers(sourceBuffer);
  11451. _qunit2['default'].ok(mediaSource.videoBuffer_, 'created a video buffer');
  11452. _qunit2['default'].equal(mediaSource.videoBuffer_.type, 'video/mp4;codecs="avc1.4d400d"', 'passed the video codec along');
  11453. _qunit2['default'].ok(mediaSource.audioBuffer_, 'created a video buffer');
  11454. _qunit2['default'].equal(mediaSource.audioBuffer_.type, 'audio/mp4;codecs="mp4a.40.2"', 'passed the audio codec along');
  11455. });
  11456. _qunit2['default'].test('virtual buffers are updating if either native buffer is', function () {
  11457. var mediaSource = new _videoJs2['default'].MediaSource();
  11458. var sourceBuffer = mediaSource.addSourceBuffer('video/mp2t');
  11459. initializeNativeSourceBuffers(sourceBuffer);
  11460. mediaSource.videoBuffer_.updating = true;
  11461. mediaSource.audioBuffer_.updating = false;
  11462. _qunit2['default'].equal(sourceBuffer.updating, true, 'virtual buffer is updating');
  11463. mediaSource.audioBuffer_.updating = true;
  11464. _qunit2['default'].equal(sourceBuffer.updating, true, 'virtual buffer is updating');
  11465. mediaSource.videoBuffer_.updating = false;
  11466. _qunit2['default'].equal(sourceBuffer.updating, true, 'virtual buffer is updating');
  11467. mediaSource.audioBuffer_.updating = false;
  11468. _qunit2['default'].equal(sourceBuffer.updating, false, 'virtual buffer is not updating');
  11469. });
  11470. _qunit2['default'].test('virtual buffers have a position buffered if both native buffers do', function () {
  11471. var mediaSource = new _videoJs2['default'].MediaSource();
  11472. var sourceBuffer = mediaSource.addSourceBuffer('video/mp2t');
  11473. initializeNativeSourceBuffers(sourceBuffer);
  11474. mediaSource.videoBuffer_.buffered = _videoJs2['default'].createTimeRanges([[0, 10], [20, 30]]);
  11475. mediaSource.audioBuffer_.buffered = _videoJs2['default'].createTimeRanges([[0, 7], [11, 15], [16, 40]]);
  11476. _qunit2['default'].equal(sourceBuffer.buffered.length, 2, 'two buffered ranges');
  11477. _qunit2['default'].equal(sourceBuffer.buffered.start(0), 0, 'first starts at zero');
  11478. _qunit2['default'].equal(sourceBuffer.buffered.end(0), 7, 'first ends at seven');
  11479. _qunit2['default'].equal(sourceBuffer.buffered.start(1), 20, 'second starts at twenty');
  11480. _qunit2['default'].equal(sourceBuffer.buffered.end(1), 30, 'second ends at 30');
  11481. });
  11482. _qunit2['default'].test('disabled audio does not affect buffered property', function () {
  11483. var mediaSource = new _videoJs2['default'].MediaSource();
  11484. var muxedBuffer = mediaSource.addSourceBuffer('video/mp2t');
  11485. // creating a separate audio buffer disables audio on the muxed one
  11486. var audioBuffer = mediaSource.addSourceBuffer('audio/mp2t; codecs="mp4a.40.2"');
  11487. initializeNativeSourceBuffers(muxedBuffer);
  11488. mediaSource.videoBuffer_.buffered = _videoJs2['default'].createTimeRanges([[1, 10]]);
  11489. mediaSource.audioBuffer_.buffered = _videoJs2['default'].createTimeRanges([[2, 11]]);
  11490. _qunit2['default'].equal(audioBuffer.buffered.length, 1, 'one buffered range');
  11491. _qunit2['default'].equal(audioBuffer.buffered.start(0), 2, 'starts at two');
  11492. _qunit2['default'].equal(audioBuffer.buffered.end(0), 11, 'ends at eleven');
  11493. _qunit2['default'].equal(muxedBuffer.buffered.length, 1, 'one buffered range');
  11494. _qunit2['default'].equal(muxedBuffer.buffered.start(0), 1, 'starts at one');
  11495. _qunit2['default'].equal(muxedBuffer.buffered.end(0), 10, 'ends at ten');
  11496. });
  11497. _qunit2['default'].test('sets transmuxer baseMediaDecodeTime on appends', function () {
  11498. var mediaSource = new _videoJs2['default'].MediaSource();
  11499. var sourceBuffer = mediaSource.addSourceBuffer('video/mp2t');
  11500. var resets = [];
  11501. sourceBuffer.transmuxer_.postMessage = function (message) {
  11502. if (message.action === 'setTimestampOffset') {
  11503. resets.push(message.timestampOffset);
  11504. }
  11505. };
  11506. sourceBuffer.timestampOffset = 42;
  11507. _qunit2['default'].equal(resets.length, 1, 'reset called');
  11508. _qunit2['default'].equal(resets[0], 42, 'set the baseMediaDecodeTime based on timestampOffset');
  11509. });
  11510. _qunit2['default'].test('aggregates source buffer update events', function () {
  11511. var mediaSource = new _videoJs2['default'].MediaSource();
  11512. var sourceBuffer = mediaSource.addSourceBuffer('video/mp2t');
  11513. var updates = 0;
  11514. var updateends = 0;
  11515. var updatestarts = 0;
  11516. initializeNativeSourceBuffers(sourceBuffer);
  11517. mediaSource.player_ = this.player;
  11518. sourceBuffer.addEventListener('updatestart', function () {
  11519. updatestarts++;
  11520. });
  11521. sourceBuffer.addEventListener('update', function () {
  11522. updates++;
  11523. });
  11524. sourceBuffer.addEventListener('updateend', function () {
  11525. updateends++;
  11526. });
  11527. _qunit2['default'].equal(updatestarts, 0, 'no updatestarts before a `done` message is received');
  11528. _qunit2['default'].equal(updates, 0, 'no updates before a `done` message is received');
  11529. _qunit2['default'].equal(updateends, 0, 'no updateends before a `done` message is received');
  11530. // the video buffer begins updating first:
  11531. sourceBuffer.videoBuffer_.updating = true;
  11532. sourceBuffer.audioBuffer_.updating = false;
  11533. sourceBuffer.videoBuffer_.trigger('updatestart');
  11534. _qunit2['default'].equal(updatestarts, 1, 'aggregated updatestart');
  11535. sourceBuffer.audioBuffer_.updating = true;
  11536. sourceBuffer.audioBuffer_.trigger('updatestart');
  11537. _qunit2['default'].equal(updatestarts, 1, 'aggregated updatestart');
  11538. // the audio buffer finishes first:
  11539. sourceBuffer.audioBuffer_.updating = false;
  11540. sourceBuffer.videoBuffer_.updating = true;
  11541. sourceBuffer.audioBuffer_.trigger('update');
  11542. _qunit2['default'].equal(updates, 0, 'waited for the second update');
  11543. sourceBuffer.videoBuffer_.updating = false;
  11544. sourceBuffer.videoBuffer_.trigger('update');
  11545. _qunit2['default'].equal(updates, 1, 'aggregated update');
  11546. // audio finishes first:
  11547. sourceBuffer.videoBuffer_.updating = true;
  11548. sourceBuffer.audioBuffer_.updating = false;
  11549. sourceBuffer.audioBuffer_.trigger('updateend');
  11550. _qunit2['default'].equal(updateends, 0, 'waited for the second updateend');
  11551. sourceBuffer.videoBuffer_.updating = false;
  11552. sourceBuffer.videoBuffer_.trigger('updateend');
  11553. _qunit2['default'].equal(updateends, 1, 'aggregated updateend');
  11554. });
  11555. _qunit2['default'].test('translates caption events into WebVTT cues', function () {
  11556. var mediaSource = new _videoJs2['default'].MediaSource();
  11557. var sourceBuffer = mediaSource.addSourceBuffer('video/mp2t');
  11558. var types = [];
  11559. var hls608 = 0;
  11560. mediaSource.player_ = {
  11561. addRemoteTextTrack: function addRemoteTextTrack(options) {
  11562. types.push(options.kind);
  11563. return {
  11564. track: {
  11565. kind: options.kind,
  11566. label: options.label,
  11567. cues: [],
  11568. addCue: function addCue(cue) {
  11569. this.cues.push(cue);
  11570. }
  11571. }
  11572. };
  11573. },
  11574. textTracks: function textTracks() {
  11575. return {
  11576. getTrackById: function getTrackById() {}
  11577. };
  11578. },
  11579. remoteTextTracks: function remoteTextTracks() {},
  11580. tech_: new _videoJs2['default'].EventTarget()
  11581. };
  11582. mediaSource.player_.tech_.on('usage', function (event) {
  11583. if (event.name === 'hls-608') {
  11584. hls608++;
  11585. }
  11586. });
  11587. sourceBuffer.timestampOffset = 10;
  11588. sourceBuffer.transmuxer_.onmessage(createDataMessage('video', new Uint8Array(1), {
  11589. captions: [{
  11590. startTime: 1,
  11591. endTime: 3,
  11592. text: 'This is an in-band caption in CC1',
  11593. stream: 'CC1'
  11594. }],
  11595. captionStreams: { CC1: true }
  11596. }));
  11597. sourceBuffer.transmuxer_.onmessage(doneMessage);
  11598. var cues = sourceBuffer.inbandTextTracks_.CC1.cues;
  11599. _qunit2['default'].equal(hls608, 1, 'one hls-608 event was triggered');
  11600. _qunit2['default'].equal(types.length, 1, 'created one text track');
  11601. _qunit2['default'].equal(types[0], 'captions', 'the type was captions');
  11602. _qunit2['default'].equal(cues.length, 1, 'created one cue');
  11603. _qunit2['default'].equal(cues[0].text, 'This is an in-band caption in CC1', 'included the text');
  11604. _qunit2['default'].equal(cues[0].startTime, 11, 'started at eleven');
  11605. _qunit2['default'].equal(cues[0].endTime, 13, 'ended at thirteen');
  11606. });
  11607. _qunit2['default'].test('captions use existing tracks with id equal to CC#', function () {
  11608. var mediaSource = new _videoJs2['default'].MediaSource();
  11609. var sourceBuffer = mediaSource.addSourceBuffer('video/mp2t');
  11610. var addTrackCalled = 0;
  11611. var tracks = {
  11612. CC1: {
  11613. kind: 'captions',
  11614. label: 'CC1',
  11615. id: 'CC1',
  11616. cues: [],
  11617. addCue: function addCue(cue) {
  11618. this.cues.push(cue);
  11619. }
  11620. },
  11621. CC2: {
  11622. kind: 'captions',
  11623. label: 'CC2',
  11624. id: 'CC2',
  11625. cues: [],
  11626. addCue: function addCue(cue) {
  11627. this.cues.push(cue);
  11628. }
  11629. }
  11630. };
  11631. mediaSource.player_ = {
  11632. addRemoteTextTrack: function addRemoteTextTrack(options) {
  11633. addTrackCalled++;
  11634. },
  11635. textTracks: function textTracks() {
  11636. return {
  11637. getTrackById: function getTrackById(id) {
  11638. return tracks[id];
  11639. }
  11640. };
  11641. },
  11642. remoteTextTracks: function remoteTextTracks() {},
  11643. tech_: new _videoJs2['default'].EventTarget()
  11644. };
  11645. sourceBuffer.timestampOffset = 10;
  11646. sourceBuffer.transmuxer_.onmessage(createDataMessage('video', new Uint8Array(1), {
  11647. captions: [{
  11648. stream: 'CC1',
  11649. startTime: 1,
  11650. endTime: 3,
  11651. text: 'This is an in-band caption in CC1'
  11652. }, {
  11653. stream: 'CC2',
  11654. startTime: 1,
  11655. endTime: 3,
  11656. text: 'This is an in-band caption in CC2'
  11657. }],
  11658. captionStreams: { CC1: true, CC2: true }
  11659. }));
  11660. sourceBuffer.transmuxer_.onmessage(doneMessage);
  11661. var cues = sourceBuffer.inbandTextTracks_.CC1.cues;
  11662. _qunit2['default'].equal(addTrackCalled, 0, 'no tracks were created');
  11663. _qunit2['default'].equal(tracks.CC1.cues.length, 1, 'CC1 contains 1 cue');
  11664. _qunit2['default'].equal(tracks.CC2.cues.length, 1, 'CC2 contains 1 cue');
  11665. _qunit2['default'].equal(tracks.CC1.cues[0].text, 'This is an in-band caption in CC1', 'CC1 contains the right cue');
  11666. _qunit2['default'].equal(tracks.CC2.cues[0].text, 'This is an in-band caption in CC2', 'CC2 contains the right cue');
  11667. });
  11668. _qunit2['default'].test('translates metadata events into WebVTT cues', function () {
  11669. var mediaSource = new _videoJs2['default'].MediaSource();
  11670. var sourceBuffer = mediaSource.addSourceBuffer('video/mp2t');
  11671. mediaSource.duration = Infinity;
  11672. mediaSource.nativeMediaSource_.duration = 60;
  11673. var types = [];
  11674. var metadata = [{
  11675. cueTime: 2,
  11676. frames: [{
  11677. url: 'This is a url tag'
  11678. }, {
  11679. value: 'This is a text tag'
  11680. }]
  11681. }, {
  11682. cueTime: 12,
  11683. frames: [{
  11684. data: 'This is a priv tag'
  11685. }]
  11686. }];
  11687. metadata.dispatchType = 0x10;
  11688. mediaSource.player_ = {
  11689. addRemoteTextTrack: function addRemoteTextTrack(options) {
  11690. types.push(options.kind);
  11691. return {
  11692. track: {
  11693. kind: options.kind,
  11694. label: options.label,
  11695. cues: [],
  11696. addCue: function addCue(cue) {
  11697. this.cues.push(cue);
  11698. }
  11699. }
  11700. };
  11701. },
  11702. remoteTextTracks: function remoteTextTracks() {}
  11703. };
  11704. sourceBuffer.timestampOffset = 10;
  11705. sourceBuffer.transmuxer_.onmessage(createDataMessage('video', new Uint8Array(1), {
  11706. metadata: metadata
  11707. }));
  11708. sourceBuffer.transmuxer_.onmessage(doneMessage);
  11709. _qunit2['default'].equal(sourceBuffer.metadataTrack_.inBandMetadataTrackDispatchType, 16, 'in-band metadata track dispatch type correctly set');
  11710. var cues = sourceBuffer.metadataTrack_.cues;
  11711. _qunit2['default'].equal(types.length, 1, 'created one text track');
  11712. _qunit2['default'].equal(types[0], 'metadata', 'the type was metadata');
  11713. _qunit2['default'].equal(cues.length, 3, 'created three cues');
  11714. _qunit2['default'].equal(cues[0].text, 'This is a url tag', 'included the text');
  11715. _qunit2['default'].equal(cues[0].startTime, 12, 'started at twelve');
  11716. _qunit2['default'].equal(cues[0].endTime, 22, 'ended at StartTime of next cue(22)');
  11717. _qunit2['default'].equal(cues[1].text, 'This is a text tag', 'included the text');
  11718. _qunit2['default'].equal(cues[1].startTime, 12, 'started at twelve');
  11719. _qunit2['default'].equal(cues[1].endTime, 22, 'ended at the startTime of next cue(22)');
  11720. _qunit2['default'].equal(cues[2].text, 'This is a priv tag', 'included the text');
  11721. _qunit2['default'].equal(cues[2].startTime, 22, 'started at twenty two');
  11722. _qunit2['default'].equal(cues[2].endTime, Number.MAX_VALUE, 'ended at the maximum value');
  11723. mediaSource.duration = 100;
  11724. mediaSource.trigger('sourceended');
  11725. _qunit2['default'].equal(cues[2].endTime, mediaSource.duration, 'sourceended is fired');
  11726. });
  11727. _qunit2['default'].test('does not wrap mp4 source buffers', function () {
  11728. var mediaSource = new _videoJs2['default'].MediaSource();
  11729. mediaSource.addSourceBuffer('video/mp4;codecs=avc1.4d400d');
  11730. mediaSource.addSourceBuffer('audio/mp4;codecs=mp4a.40.2');
  11731. _qunit2['default'].equal(mediaSource.sourceBuffers.length, mediaSource.nativeMediaSource_.sourceBuffers.length, 'did not need virtual buffers');
  11732. _qunit2['default'].equal(mediaSource.sourceBuffers.length, 2, 'created native buffers');
  11733. });
  11734. _qunit2['default'].test('can get activeSourceBuffers', function () {
  11735. var mediaSource = new _videoJs2['default'].MediaSource();
  11736. // although activeSourceBuffers should technically be a SourceBufferList, we are
  11737. // returning it as an array, and users may expect it to behave as such
  11738. _qunit2['default'].ok(Array.isArray(mediaSource.activeSourceBuffers));
  11739. });
  11740. _qunit2['default'].test('active source buffers are updated on each buffer\'s updateend', function () {
  11741. var mediaSource = new _videoJs2['default'].MediaSource();
  11742. var updateCallCount = 0;
  11743. var sourceBuffer = undefined;
  11744. mediaSource.updateActiveSourceBuffers_ = function () {
  11745. updateCallCount++;
  11746. };
  11747. sourceBuffer = mediaSource.addSourceBuffer('video/mp2t');
  11748. mediaSource.player_ = this.player;
  11749. mediaSource.url_ = this.url;
  11750. mediaSource.trigger('sourceopen');
  11751. _qunit2['default'].equal(updateCallCount, 0, 'active source buffers not updated on adding source buffer');
  11752. mediaSource.player_.audioTracks().trigger('addtrack');
  11753. _qunit2['default'].equal(updateCallCount, 1, 'active source buffers updated after addtrack');
  11754. sourceBuffer = mediaSource.addSourceBuffer('video/mp2t');
  11755. _qunit2['default'].equal(updateCallCount, 1, 'active source buffers not updated on adding second source buffer');
  11756. mediaSource.player_.audioTracks().trigger('removetrack');
  11757. _qunit2['default'].equal(updateCallCount, 2, 'active source buffers updated after removetrack');
  11758. mediaSource.player_.audioTracks().trigger('change');
  11759. _qunit2['default'].equal(updateCallCount, 3, 'active source buffers updated after change');
  11760. });
  11761. _qunit2['default'].test('combined buffer is the only active buffer when main track enabled', function () {
  11762. var mediaSource = new _videoJs2['default'].MediaSource();
  11763. var sourceBufferAudio = undefined;
  11764. var sourceBufferCombined = undefined;
  11765. var audioTracks = [{
  11766. enabled: true,
  11767. kind: 'main',
  11768. label: 'main'
  11769. }, {
  11770. enabled: false,
  11771. kind: 'alternative',
  11772. label: 'English (UK)'
  11773. }];
  11774. this.player.audioTracks = function () {
  11775. return audioTracks;
  11776. };
  11777. mediaSource.player_ = this.player;
  11778. sourceBufferCombined = mediaSource.addSourceBuffer('video/m2pt');
  11779. sourceBufferCombined.videoCodec_ = true;
  11780. sourceBufferCombined.audioCodec_ = true;
  11781. sourceBufferAudio = mediaSource.addSourceBuffer('video/m2pt');
  11782. sourceBufferAudio.videoCodec_ = false;
  11783. sourceBufferAudio.audioCodec_ = true;
  11784. mediaSource.updateActiveSourceBuffers_();
  11785. _qunit2['default'].equal(mediaSource.activeSourceBuffers.length, 1, 'active source buffers starts with one source buffer');
  11786. _qunit2['default'].equal(mediaSource.activeSourceBuffers[0], sourceBufferCombined, 'active source buffers starts with combined source buffer');
  11787. });
  11788. _qunit2['default'].test('combined & audio buffers are active when alternative track enabled', function () {
  11789. var mediaSource = new _videoJs2['default'].MediaSource();
  11790. var sourceBufferAudio = undefined;
  11791. var sourceBufferCombined = undefined;
  11792. var audioTracks = [{
  11793. enabled: false,
  11794. kind: 'main',
  11795. label: 'main'
  11796. }, {
  11797. enabled: true,
  11798. kind: 'alternative',
  11799. label: 'English (UK)'
  11800. }];
  11801. this.player.audioTracks = function () {
  11802. return audioTracks;
  11803. };
  11804. mediaSource.player_ = this.player;
  11805. sourceBufferCombined = mediaSource.addSourceBuffer('video/m2pt');
  11806. sourceBufferCombined.videoCodec_ = true;
  11807. sourceBufferCombined.audioCodec_ = true;
  11808. sourceBufferAudio = mediaSource.addSourceBuffer('video/m2pt');
  11809. sourceBufferAudio.videoCodec_ = false;
  11810. sourceBufferAudio.audioCodec_ = true;
  11811. mediaSource.updateActiveSourceBuffers_();
  11812. _qunit2['default'].equal(mediaSource.activeSourceBuffers.length, 2, 'active source buffers includes both source buffers');
  11813. // maintains same order as source buffers were created
  11814. _qunit2['default'].equal(mediaSource.activeSourceBuffers[0], sourceBufferCombined, 'active source buffers starts with combined source buffer');
  11815. _qunit2['default'].equal(mediaSource.activeSourceBuffers[1], sourceBufferAudio, 'active source buffers ends with audio source buffer');
  11816. });
  11817. _qunit2['default'].test('video only & audio only buffers are always active', function () {
  11818. var mediaSource = new _videoJs2['default'].MediaSource();
  11819. var sourceBufferAudio = undefined;
  11820. var sourceBufferCombined = undefined;
  11821. var audioTracks = [{
  11822. enabled: false,
  11823. kind: 'main',
  11824. label: 'main'
  11825. }, {
  11826. enabled: true,
  11827. kind: 'alternative',
  11828. label: 'English (UK)'
  11829. }];
  11830. this.player.audioTracks = function () {
  11831. return audioTracks;
  11832. };
  11833. mediaSource.player_ = this.player;
  11834. sourceBufferCombined = mediaSource.addSourceBuffer('video/m2pt');
  11835. sourceBufferCombined.videoCodec_ = true;
  11836. sourceBufferCombined.audioCodec_ = false;
  11837. sourceBufferAudio = mediaSource.addSourceBuffer('video/m2pt');
  11838. sourceBufferAudio.videoCodec_ = false;
  11839. sourceBufferAudio.audioCodec_ = true;
  11840. mediaSource.updateActiveSourceBuffers_();
  11841. _qunit2['default'].equal(mediaSource.activeSourceBuffers.length, 2, 'active source buffers includes both source buffers');
  11842. // maintains same order as source buffers were created
  11843. _qunit2['default'].equal(mediaSource.activeSourceBuffers[0], sourceBufferCombined, 'active source buffers starts with combined source buffer');
  11844. _qunit2['default'].equal(mediaSource.activeSourceBuffers[1], sourceBufferAudio, 'active source buffers ends with audio source buffer');
  11845. audioTracks[0].enabled = true;
  11846. audioTracks[1].enabled = false;
  11847. mediaSource.updateActiveSourceBuffers_();
  11848. _qunit2['default'].equal(mediaSource.activeSourceBuffers.length, 2, 'active source buffers includes both source buffers');
  11849. // maintains same order as source buffers were created
  11850. _qunit2['default'].equal(mediaSource.activeSourceBuffers[0], sourceBufferCombined, 'active source buffers starts with combined source buffer');
  11851. _qunit2['default'].equal(mediaSource.activeSourceBuffers[1], sourceBufferAudio, 'active source buffers ends with audio source buffer');
  11852. });
  11853. _qunit2['default'].test('Single buffer always active. Audio disabled depends on audio codec', function () {
  11854. var mediaSource = new _videoJs2['default'].MediaSource();
  11855. var audioTracks = [{
  11856. enabled: true,
  11857. kind: 'main',
  11858. label: 'main'
  11859. }];
  11860. this.player.audioTracks = function () {
  11861. return audioTracks;
  11862. };
  11863. mediaSource.player_ = this.player;
  11864. var sourceBuffer = mediaSource.addSourceBuffer('video/m2pt');
  11865. // video only
  11866. sourceBuffer.videoCodec_ = true;
  11867. sourceBuffer.audioCodec_ = false;
  11868. mediaSource.updateActiveSourceBuffers_();
  11869. _qunit2['default'].equal(mediaSource.activeSourceBuffers.length, 1, 'sourceBuffer is active');
  11870. _qunit2['default'].ok(mediaSource.activeSourceBuffers[0].audioDisabled_, 'audio is disabled on video only active sourceBuffer');
  11871. // audio only
  11872. sourceBuffer.videoCodec_ = false;
  11873. sourceBuffer.audioCodec_ = true;
  11874. mediaSource.updateActiveSourceBuffers_();
  11875. _qunit2['default'].equal(mediaSource.activeSourceBuffers.length, 1, 'sourceBuffer is active');
  11876. _qunit2['default'].notOk(mediaSource.activeSourceBuffers[0].audioDisabled_, 'audio not disabled on audio only active sourceBuffer');
  11877. });
  11878. _qunit2['default'].test('video segments with info trigger videooinfo event', function () {
  11879. var data = new Uint8Array(1);
  11880. var infoEvents = [];
  11881. var mediaSource = new _videoJs2['default'].MediaSource();
  11882. var sourceBuffer = mediaSource.addSourceBuffer('video/mp2t');
  11883. var info = { width: 100 };
  11884. var newinfo = { width: 225 };
  11885. mediaSource.on('videoinfo', function (e) {
  11886. return infoEvents.push(e);
  11887. });
  11888. // send an audio segment with info, then send done
  11889. sourceBuffer.transmuxer_.onmessage(createDataMessage('video', data, { info: info }));
  11890. sourceBuffer.transmuxer_.onmessage(doneMessage);
  11891. _qunit2['default'].equal(infoEvents.length, 1, 'video info should trigger');
  11892. _qunit2['default'].deepEqual(infoEvents[0].info, info, 'video info = muxed info');
  11893. // send an audio segment with info, then send done
  11894. sourceBuffer.transmuxer_.onmessage(createDataMessage('video', data, { info: newinfo }));
  11895. sourceBuffer.transmuxer_.onmessage(doneMessage);
  11896. _qunit2['default'].equal(infoEvents.length, 2, 'video info should trigger');
  11897. _qunit2['default'].deepEqual(infoEvents[1].info, newinfo, 'video info = muxed info');
  11898. });
  11899. _qunit2['default'].test('audio segments with info trigger audioinfo event', function () {
  11900. var data = new Uint8Array(1);
  11901. var infoEvents = [];
  11902. var mediaSource = new _videoJs2['default'].MediaSource();
  11903. var sourceBuffer = mediaSource.addSourceBuffer('video/mp2t');
  11904. var info = { width: 100 };
  11905. var newinfo = { width: 225 };
  11906. mediaSource.on('audioinfo', function (e) {
  11907. return infoEvents.push(e);
  11908. });
  11909. // send an audio segment with info, then send done
  11910. sourceBuffer.transmuxer_.onmessage(createDataMessage('audio', data, { info: info }));
  11911. sourceBuffer.transmuxer_.onmessage(doneMessage);
  11912. _qunit2['default'].equal(infoEvents.length, 1, 'audio info should trigger');
  11913. _qunit2['default'].deepEqual(infoEvents[0].info, info, 'audio info = muxed info');
  11914. // send an audio segment with info, then send done
  11915. sourceBuffer.transmuxer_.onmessage(createDataMessage('audio', data, { info: newinfo }));
  11916. sourceBuffer.transmuxer_.onmessage(doneMessage);
  11917. _qunit2['default'].equal(infoEvents.length, 2, 'audio info should trigger');
  11918. _qunit2['default'].deepEqual(infoEvents[1].info, newinfo, 'audio info = muxed info');
  11919. });
  11920. _qunit2['default'].test('creates native SourceBuffers immediately if a second ' + 'VirtualSourceBuffer is created', function () {
  11921. var mediaSource = new _videoJs2['default'].MediaSource();
  11922. var sourceBuffer = mediaSource.addSourceBuffer('video/mp2t; codecs="avc1.64001f,mp4a.40.5"');
  11923. var sourceBuffer2 = mediaSource.addSourceBuffer('video/mp2t; codecs="mp4a.40.5"');
  11924. _qunit2['default'].ok(mediaSource.videoBuffer_, 'created a video buffer');
  11925. _qunit2['default'].equal(mediaSource.videoBuffer_.type, 'video/mp4;codecs="avc1.64001f"', 'video buffer has the specified codec');
  11926. _qunit2['default'].ok(mediaSource.audioBuffer_, 'created an audio buffer');
  11927. _qunit2['default'].equal(mediaSource.audioBuffer_.type, 'audio/mp4;codecs="mp4a.40.5"', 'audio buffer has the specified codec');
  11928. _qunit2['default'].equal(mediaSource.sourceBuffers.length, 2, 'created two virtual buffers');
  11929. _qunit2['default'].equal(mediaSource.sourceBuffers[0], sourceBuffer, 'returned the virtual buffer');
  11930. _qunit2['default'].equal(mediaSource.sourceBuffers[1], sourceBuffer2, 'returned the virtual buffer');
  11931. _qunit2['default'].equal(sourceBuffer.audioDisabled_, true, 'first source buffer\'s audio is automatically disabled');
  11932. _qunit2['default'].ok(sourceBuffer2.audioBuffer_, 'second source buffer has an audio source buffer');
  11933. });
  11934. _qunit2['default'].module('VirtualSourceBuffer - Isolated Functions');
  11935. _qunit2['default'].test('gopsSafeToAlignWith returns correct list', function () {
  11936. // gopsSafeToAlignWith uses a 3 second safetyNet so that gops very close to the playhead
  11937. // are not considered safe to append to
  11938. var safetyNet = 3;
  11939. var pts = function pts(time) {
  11940. return Math.ceil(time * 90000);
  11941. };
  11942. var mapping = 0;
  11943. var _currentTime = 0;
  11944. var buffer = [];
  11945. var player = undefined;
  11946. var actual = undefined;
  11947. var expected = undefined;
  11948. expected = [];
  11949. actual = (0, _srcVirtualSourceBuffer.gopsSafeToAlignWith)(buffer, player, mapping);
  11950. _qunit2['default'].deepEqual(actual, expected, 'empty array when player is undefined');
  11951. player = { currentTime: function currentTime() {
  11952. return _currentTime;
  11953. } };
  11954. actual = (0, _srcVirtualSourceBuffer.gopsSafeToAlignWith)(buffer, player, mapping);
  11955. _qunit2['default'].deepEqual(actual, expected, 'empty array when buffer is empty');
  11956. buffer = expected = [{ pts: pts(_currentTime + safetyNet + 1) }, { pts: pts(_currentTime + safetyNet + 2) }, { pts: pts(_currentTime + safetyNet + 3) }];
  11957. actual = (0, _srcVirtualSourceBuffer.gopsSafeToAlignWith)(buffer, player, mapping);
  11958. _qunit2['default'].deepEqual(actual, expected, 'entire buffer considered safe when all gops come after currentTime + safetyNet');
  11959. buffer = [{ pts: pts(_currentTime + safetyNet) }, { pts: pts(_currentTime + safetyNet + 1) }, { pts: pts(_currentTime + safetyNet + 2) }];
  11960. expected = [{ pts: pts(_currentTime + safetyNet + 1) }, { pts: pts(_currentTime + safetyNet + 2) }];
  11961. actual = (0, _srcVirtualSourceBuffer.gopsSafeToAlignWith)(buffer, player, mapping);
  11962. _qunit2['default'].deepEqual(actual, expected, 'safetyNet comparison is not inclusive');
  11963. _currentTime = 10;
  11964. mapping = -5;
  11965. buffer = [{ pts: pts(_currentTime - mapping + safetyNet - 2) }, { pts: pts(_currentTime - mapping + safetyNet - 1) }, { pts: pts(_currentTime - mapping + safetyNet) }, { pts: pts(_currentTime - mapping + safetyNet + 1) }, { pts: pts(_currentTime - mapping + safetyNet + 2) }];
  11966. expected = [{ pts: pts(_currentTime - mapping + safetyNet + 1) }, { pts: pts(_currentTime - mapping + safetyNet + 2) }];
  11967. actual = (0, _srcVirtualSourceBuffer.gopsSafeToAlignWith)(buffer, player, mapping);
  11968. _qunit2['default'].deepEqual(actual, expected, 'uses mapping to shift currentTime');
  11969. _currentTime = 20;
  11970. expected = [];
  11971. actual = (0, _srcVirtualSourceBuffer.gopsSafeToAlignWith)(buffer, player, mapping);
  11972. _qunit2['default'].deepEqual(actual, expected, 'empty array when no gops in buffer come after currentTime');
  11973. });
  11974. _qunit2['default'].test('updateGopBuffer correctly processes new gop information', function () {
  11975. var buffer = [];
  11976. var gops = [];
  11977. var replace = true;
  11978. var actual = undefined;
  11979. var expected = undefined;
  11980. buffer = expected = [{ pts: 100 }, { pts: 200 }];
  11981. actual = (0, _srcVirtualSourceBuffer.updateGopBuffer)(buffer, gops, replace);
  11982. _qunit2['default'].deepEqual(actual, expected, 'returns buffer when no new gops');
  11983. gops = expected = [{ pts: 300 }, { pts: 400 }];
  11984. actual = (0, _srcVirtualSourceBuffer.updateGopBuffer)(buffer, gops, replace);
  11985. _qunit2['default'].deepEqual(actual, expected, 'returns only new gops when replace is true');
  11986. replace = false;
  11987. buffer = [];
  11988. gops = [{ pts: 100 }];
  11989. expected = [{ pts: 100 }];
  11990. actual = (0, _srcVirtualSourceBuffer.updateGopBuffer)(buffer, gops, replace);
  11991. _qunit2['default'].deepEqual(actual, expected, 'appends new gops to empty buffer');
  11992. buffer = [{ pts: 100 }, { pts: 200 }];
  11993. gops = [{ pts: 300 }, { pts: 400 }];
  11994. expected = [{ pts: 100 }, { pts: 200 }, { pts: 300 }, { pts: 400 }];
  11995. actual = (0, _srcVirtualSourceBuffer.updateGopBuffer)(buffer, gops, replace);
  11996. _qunit2['default'].deepEqual(actual, expected, 'appends new gops at end of buffer when no overlap');
  11997. buffer = [{ pts: 100 }, { pts: 200 }, { pts: 300 }, { pts: 400 }];
  11998. gops = [{ pts: 250 }, { pts: 300 }, { pts: 350 }];
  11999. expected = [{ pts: 100 }, { pts: 200 }, { pts: 250 }, { pts: 300 }, { pts: 350 }];
  12000. actual = (0, _srcVirtualSourceBuffer.updateGopBuffer)(buffer, gops, replace);
  12001. _qunit2['default'].deepEqual(actual, expected, 'slices buffer at point of overlap and appends new gops');
  12002. buffer = [{ pts: 100 }, { pts: 200 }, { pts: 300 }, { pts: 400 }];
  12003. gops = [{ pts: 200 }, { pts: 300 }, { pts: 350 }];
  12004. expected = [{ pts: 100 }, { pts: 200 }, { pts: 300 }, { pts: 350 }];
  12005. actual = (0, _srcVirtualSourceBuffer.updateGopBuffer)(buffer, gops, replace);
  12006. _qunit2['default'].deepEqual(actual, expected, 'overlap slice is inclusive');
  12007. buffer = [{ pts: 300 }, { pts: 400 }, { pts: 500 }, { pts: 600 }];
  12008. gops = [{ pts: 100 }, { pts: 200 }, { pts: 250 }];
  12009. expected = [{ pts: 100 }, { pts: 200 }, { pts: 250 }];
  12010. actual = (0, _srcVirtualSourceBuffer.updateGopBuffer)(buffer, gops, replace);
  12011. _qunit2['default'].deepEqual(actual, expected, 'completely replaces buffer with new gops when all gops come before buffer');
  12012. });
  12013. _qunit2['default'].test('removeGopBuffer correctly removes range from buffer', function () {
  12014. var pts = function pts(time) {
  12015. return Math.ceil(time * 90000);
  12016. };
  12017. var buffer = [];
  12018. var start = 0;
  12019. var end = 0;
  12020. var mapping = -5;
  12021. var actual = undefined;
  12022. var expected = undefined;
  12023. expected = [];
  12024. actual = (0, _srcVirtualSourceBuffer.removeGopBuffer)(buffer, start, end, mapping);
  12025. _qunit2['default'].deepEqual(actual, expected, 'returns empty array when buffer empty');
  12026. start = 0;
  12027. end = 8;
  12028. buffer = expected = [{ pts: pts(10 - mapping) }, { pts: pts(11 - mapping) }, { pts: pts(12 - mapping) }, { pts: pts(15 - mapping) }, { pts: pts(18 - mapping) }, { pts: pts(20 - mapping) }];
  12029. actual = (0, _srcVirtualSourceBuffer.removeGopBuffer)(buffer, start, end, mapping);
  12030. _qunit2['default'].deepEqual(actual, expected, 'no removal when remove range comes before start of buffer');
  12031. start = 22;
  12032. end = 30;
  12033. buffer = [{ pts: pts(10 - mapping) }, { pts: pts(11 - mapping) }, { pts: pts(12 - mapping) }, { pts: pts(15 - mapping) }, { pts: pts(18 - mapping) }, { pts: pts(20 - mapping) }];
  12034. expected = [{ pts: pts(10 - mapping) }, { pts: pts(11 - mapping) }, { pts: pts(12 - mapping) }, { pts: pts(15 - mapping) }, { pts: pts(18 - mapping) }];
  12035. actual = (0, _srcVirtualSourceBuffer.removeGopBuffer)(buffer, start, end, mapping);
  12036. _qunit2['default'].deepEqual(actual, expected, 'removes last gop when remove range is after end of buffer');
  12037. start = 0;
  12038. end = 10;
  12039. buffer = [{ pts: pts(10 - mapping) }, { pts: pts(11 - mapping) }, { pts: pts(12 - mapping) }, { pts: pts(15 - mapping) }, { pts: pts(18 - mapping) }, { pts: pts(20 - mapping) }];
  12040. expected = [{ pts: pts(11 - mapping) }, { pts: pts(12 - mapping) }, { pts: pts(15 - mapping) }, { pts: pts(18 - mapping) }, { pts: pts(20 - mapping) }];
  12041. actual = (0, _srcVirtualSourceBuffer.removeGopBuffer)(buffer, start, end, mapping);
  12042. _qunit2['default'].deepEqual(actual, expected, 'clamps start range to begining of buffer');
  12043. start = 0;
  12044. end = 12;
  12045. buffer = [{ pts: pts(10 - mapping) }, { pts: pts(11 - mapping) }, { pts: pts(12 - mapping) }, { pts: pts(15 - mapping) }, { pts: pts(18 - mapping) }, { pts: pts(20 - mapping) }];
  12046. expected = [{ pts: pts(15 - mapping) }, { pts: pts(18 - mapping) }, { pts: pts(20 - mapping) }];
  12047. actual = (0, _srcVirtualSourceBuffer.removeGopBuffer)(buffer, start, end, mapping);
  12048. _qunit2['default'].deepEqual(actual, expected, 'clamps start range to begining of buffer');
  12049. start = 0;
  12050. end = 14;
  12051. buffer = [{ pts: pts(10 - mapping) }, { pts: pts(11 - mapping) }, { pts: pts(12 - mapping) }, { pts: pts(15 - mapping) }, { pts: pts(18 - mapping) }, { pts: pts(20 - mapping) }];
  12052. expected = [{ pts: pts(15 - mapping) }, { pts: pts(18 - mapping) }, { pts: pts(20 - mapping) }];
  12053. actual = (0, _srcVirtualSourceBuffer.removeGopBuffer)(buffer, start, end, mapping);
  12054. _qunit2['default'].deepEqual(actual, expected, 'clamps start range to begining of buffer');
  12055. start = 15;
  12056. end = 30;
  12057. buffer = [{ pts: pts(10 - mapping) }, { pts: pts(11 - mapping) }, { pts: pts(12 - mapping) }, { pts: pts(15 - mapping) }, { pts: pts(18 - mapping) }, { pts: pts(20 - mapping) }];
  12058. expected = [{ pts: pts(10 - mapping) }, { pts: pts(11 - mapping) }, { pts: pts(12 - mapping) }];
  12059. actual = (0, _srcVirtualSourceBuffer.removeGopBuffer)(buffer, start, end, mapping);
  12060. _qunit2['default'].deepEqual(actual, expected, 'clamps end range to end of buffer');
  12061. start = 17;
  12062. end = 30;
  12063. buffer = [{ pts: pts(10 - mapping) }, { pts: pts(11 - mapping) }, { pts: pts(12 - mapping) }, { pts: pts(15 - mapping) }, { pts: pts(18 - mapping) }, { pts: pts(20 - mapping) }];
  12064. expected = [{ pts: pts(10 - mapping) }, { pts: pts(11 - mapping) }, { pts: pts(12 - mapping) }];
  12065. actual = (0, _srcVirtualSourceBuffer.removeGopBuffer)(buffer, start, end, mapping);
  12066. _qunit2['default'].deepEqual(actual, expected, 'clamps end range to end of buffer');
  12067. start = 20;
  12068. end = 30;
  12069. buffer = [{ pts: pts(10 - mapping) }, { pts: pts(11 - mapping) }, { pts: pts(12 - mapping) }, { pts: pts(15 - mapping) }, { pts: pts(18 - mapping) }, { pts: pts(20 - mapping) }];
  12070. expected = [{ pts: pts(10 - mapping) }, { pts: pts(11 - mapping) }, { pts: pts(12 - mapping) }, { pts: pts(15 - mapping) }, { pts: pts(18 - mapping) }];
  12071. actual = (0, _srcVirtualSourceBuffer.removeGopBuffer)(buffer, start, end, mapping);
  12072. _qunit2['default'].deepEqual(actual, expected, 'clamps end range to end of buffer');
  12073. buffer = [{ pts: pts(10 - mapping) }, { pts: pts(11 - mapping) }, { pts: pts(12 - mapping) }, { pts: pts(15 - mapping) }, { pts: pts(18 - mapping) }, { pts: pts(20 - mapping) }];
  12074. start = 12;
  12075. end = 15;
  12076. expected = [{ pts: pts(10 - mapping) }, { pts: pts(11 - mapping) }, { pts: pts(18 - mapping) }, { pts: pts(20 - mapping) }];
  12077. actual = (0, _srcVirtualSourceBuffer.removeGopBuffer)(buffer, start, end, mapping);
  12078. _qunit2['default'].deepEqual(actual, expected, 'removes gops that remove range intersects with');
  12079. buffer = [{ pts: pts(10 - mapping) }, { pts: pts(11 - mapping) }, { pts: pts(12 - mapping) }, { pts: pts(15 - mapping) }, { pts: pts(18 - mapping) }, { pts: pts(20 - mapping) }];
  12080. start = 12;
  12081. end = 14;
  12082. expected = [{ pts: pts(10 - mapping) }, { pts: pts(11 - mapping) }, { pts: pts(15 - mapping) }, { pts: pts(18 - mapping) }, { pts: pts(20 - mapping) }];
  12083. actual = (0, _srcVirtualSourceBuffer.removeGopBuffer)(buffer, start, end, mapping);
  12084. _qunit2['default'].deepEqual(actual, expected, 'removes gops that remove range intersects with');
  12085. buffer = [{ pts: pts(10 - mapping) }, { pts: pts(11 - mapping) }, { pts: pts(12 - mapping) }, { pts: pts(15 - mapping) }, { pts: pts(18 - mapping) }, { pts: pts(20 - mapping) }];
  12086. start = 13;
  12087. end = 14;
  12088. expected = [{ pts: pts(10 - mapping) }, { pts: pts(11 - mapping) }, { pts: pts(15 - mapping) }, { pts: pts(18 - mapping) }, { pts: pts(20 - mapping) }];
  12089. actual = (0, _srcVirtualSourceBuffer.removeGopBuffer)(buffer, start, end, mapping);
  12090. _qunit2['default'].deepEqual(actual, expected, 'removes gops that remove range intersects with');
  12091. buffer = [{ pts: pts(10 - mapping) }, { pts: pts(11 - mapping) }, { pts: pts(12 - mapping) }, { pts: pts(15 - mapping) }, { pts: pts(18 - mapping) }, { pts: pts(20 - mapping) }];
  12092. start = 13;
  12093. end = 15;
  12094. expected = [{ pts: pts(10 - mapping) }, { pts: pts(11 - mapping) }, { pts: pts(18 - mapping) }, { pts: pts(20 - mapping) }];
  12095. actual = (0, _srcVirtualSourceBuffer.removeGopBuffer)(buffer, start, end, mapping);
  12096. _qunit2['default'].deepEqual(actual, expected, 'removes gops that remove range intersects with');
  12097. buffer = [{ pts: pts(10 - mapping) }, { pts: pts(11 - mapping) }, { pts: pts(12 - mapping) }, { pts: pts(15 - mapping) }, { pts: pts(18 - mapping) }, { pts: pts(20 - mapping) }];
  12098. start = 12;
  12099. end = 17;
  12100. expected = [{ pts: pts(10 - mapping) }, { pts: pts(11 - mapping) }, { pts: pts(18 - mapping) }, { pts: pts(20 - mapping) }];
  12101. actual = (0, _srcVirtualSourceBuffer.removeGopBuffer)(buffer, start, end, mapping);
  12102. _qunit2['default'].deepEqual(actual, expected, 'removes gops that remove range intersects with');
  12103. buffer = [{ pts: pts(10 - mapping) }, { pts: pts(11 - mapping) }, { pts: pts(12 - mapping) }, { pts: pts(15 - mapping) }, { pts: pts(18 - mapping) }, { pts: pts(20 - mapping) }];
  12104. start = 13;
  12105. end = 16;
  12106. expected = [{ pts: pts(10 - mapping) }, { pts: pts(11 - mapping) }, { pts: pts(18 - mapping) }, { pts: pts(20 - mapping) }];
  12107. actual = (0, _srcVirtualSourceBuffer.removeGopBuffer)(buffer, start, end, mapping);
  12108. _qunit2['default'].deepEqual(actual, expected, 'removes gops that remove range intersects with');
  12109. start = 10;
  12110. end = 20;
  12111. buffer = [{ pts: pts(10 - mapping) }, { pts: pts(11 - mapping) }, { pts: pts(12 - mapping) }, { pts: pts(15 - mapping) }, { pts: pts(18 - mapping) }, { pts: pts(20 - mapping) }];
  12112. expected = [];
  12113. actual = (0, _srcVirtualSourceBuffer.removeGopBuffer)(buffer, start, end, mapping);
  12114. _qunit2['default'].deepEqual(actual, expected, 'removes entire buffer when buffer inside remove range');
  12115. start = 0;
  12116. end = 30;
  12117. buffer = [{ pts: pts(10 - mapping) }, { pts: pts(11 - mapping) }, { pts: pts(12 - mapping) }, { pts: pts(15 - mapping) }, { pts: pts(18 - mapping) }, { pts: pts(20 - mapping) }];
  12118. expected = [];
  12119. actual = (0, _srcVirtualSourceBuffer.removeGopBuffer)(buffer, start, end, mapping);
  12120. _qunit2['default'].deepEqual(actual, expected, 'removes entire buffer when buffer inside remove range');
  12121. });
  12122. }).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
  12123. },{"../src/html-media-source":42,"../src/videojs-contrib-media-sources.js":45,"../src/virtual-source-buffer":46,"global/document":2,"global/window":3}]},{},[47,48,49,50,51]);