12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387238823892390239123922393239423952396239723982399240024012402240324042405240624072408240924102411241224132414241524162417241824192420242124222423242424252426242724282429243024312432243324342435243624372438243924402441244224432444244524462447244824492450245124522453245424552456245724582459246024612462246324642465246624672468246924702471247224732474247524762477247824792480248124822483248424852486248724882489249024912492249324942495249624972498249925002501250225032504250525062507250825092510251125122513251425152516251725182519252025212522252325242525252625272528252925302531253225332534253525362537253825392540254125422543254425452546254725482549255025512552255325542555255625572558255925602561256225632564256525662567256825692570257125722573257425752576257725782579258025812582258325842585258625872588258925902591259225932594259525962597259825992600260126022603260426052606260726082609261026112612261326142615261626172618261926202621262226232624262526262627262826292630263126322633263426352636263726382639264026412642264326442645264626472648264926502651265226532654265526562657265826592660266126622663266426652666266726682669267026712672267326742675267626772678267926802681268226832684268526862687268826892690269126922693269426952696269726982699270027012702270327042705270627072708270927102711271227132714271527162717271827192720272127222723272427252726272727282729273027312732273327342735273627372738273927402741274227432744274527462747274827492750275127522753275427552756275727582759276027612762276327642765276627672768276927702771277227732774277527762777277827792780278127822783278427852786278727882789279027912792279327942795279627972798279928002801280228032804280528062807280828092810281128122813281428152816281728182819282028212822282328242825282628272828282928302831283228332834283528362837283828392840284128422843284428452846284728482849285028512852285328542855285628572858285928602861286228632864286528662867286828692870287128722873287428752876287728782879288028812882288328842885288628872888288928902891289228932894289528962897289828992900290129022903290429052906290729082909291029112912291329142915291629172918291929202921292229232924292529262927292829292930293129322933293429352936293729382939294029412942294329442945294629472948294929502951295229532954295529562957295829592960296129622963296429652966296729682969297029712972297329742975297629772978297929802981298229832984298529862987298829892990299129922993299429952996299729982999300030013002300330043005300630073008300930103011301230133014301530163017301830193020302130223023302430253026302730283029303030313032303330343035303630373038303930403041304230433044304530463047304830493050305130523053305430553056305730583059306030613062306330643065306630673068306930703071307230733074307530763077307830793080308130823083308430853086308730883089309030913092309330943095309630973098309931003101310231033104310531063107310831093110311131123113311431153116311731183119312031213122312331243125312631273128312931303131313231333134313531363137313831393140314131423143314431453146314731483149315031513152315331543155315631573158315931603161316231633164316531663167316831693170317131723173317431753176317731783179318031813182318331843185318631873188318931903191319231933194319531963197319831993200320132023203320432053206320732083209321032113212321332143215321632173218321932203221322232233224322532263227322832293230323132323233323432353236323732383239324032413242324332443245324632473248324932503251325232533254325532563257325832593260326132623263326432653266326732683269327032713272327332743275327632773278327932803281328232833284328532863287328832893290329132923293329432953296329732983299330033013302330333043305330633073308330933103311331233133314331533163317331833193320332133223323332433253326332733283329333033313332333333343335333633373338333933403341334233433344334533463347334833493350335133523353335433553356335733583359336033613362336333643365336633673368336933703371337233733374337533763377337833793380338133823383338433853386338733883389339033913392339333943395339633973398339934003401340234033404340534063407340834093410341134123413341434153416341734183419342034213422342334243425342634273428342934303431343234333434343534363437343834393440344134423443344434453446344734483449345034513452345334543455345634573458345934603461346234633464346534663467346834693470347134723473347434753476347734783479348034813482348334843485348634873488348934903491349234933494349534963497349834993500350135023503350435053506350735083509351035113512351335143515351635173518351935203521352235233524352535263527352835293530353135323533353435353536353735383539354035413542354335443545354635473548354935503551355235533554355535563557355835593560356135623563356435653566356735683569357035713572357335743575357635773578357935803581358235833584358535863587358835893590359135923593359435953596359735983599360036013602360336043605360636073608360936103611361236133614361536163617361836193620362136223623362436253626362736283629363036313632363336343635363636373638363936403641364236433644364536463647364836493650365136523653365436553656365736583659366036613662366336643665366636673668366936703671367236733674367536763677367836793680368136823683368436853686368736883689369036913692369336943695369636973698369937003701370237033704370537063707370837093710371137123713371437153716371737183719372037213722372337243725372637273728372937303731373237333734373537363737373837393740374137423743374437453746374737483749375037513752375337543755375637573758375937603761376237633764376537663767376837693770377137723773377437753776377737783779378037813782378337843785378637873788378937903791379237933794379537963797379837993800380138023803380438053806380738083809381038113812381338143815381638173818381938203821382238233824382538263827382838293830383138323833383438353836383738383839384038413842384338443845384638473848384938503851385238533854385538563857385838593860386138623863386438653866386738683869387038713872387338743875387638773878387938803881388238833884388538863887388838893890389138923893389438953896389738983899390039013902390339043905390639073908390939103911391239133914391539163917391839193920392139223923392439253926392739283929393039313932393339343935393639373938393939403941394239433944394539463947394839493950395139523953395439553956395739583959396039613962396339643965396639673968396939703971397239733974397539763977397839793980398139823983398439853986398739883989399039913992399339943995399639973998399940004001400240034004400540064007400840094010401140124013401440154016401740184019402040214022402340244025402640274028402940304031403240334034403540364037403840394040404140424043404440454046404740484049405040514052405340544055405640574058405940604061406240634064406540664067406840694070407140724073407440754076407740784079408040814082408340844085408640874088408940904091409240934094409540964097409840994100410141024103410441054106410741084109411041114112411341144115411641174118411941204121412241234124412541264127412841294130413141324133413441354136413741384139414041414142414341444145414641474148414941504151415241534154415541564157415841594160416141624163416441654166416741684169417041714172417341744175417641774178417941804181418241834184418541864187418841894190419141924193419441954196419741984199420042014202420342044205420642074208420942104211421242134214421542164217421842194220422142224223422442254226422742284229423042314232423342344235423642374238423942404241424242434244424542464247424842494250425142524253425442554256425742584259426042614262426342644265426642674268426942704271427242734274427542764277427842794280428142824283428442854286428742884289429042914292429342944295429642974298429943004301430243034304430543064307430843094310431143124313431443154316431743184319432043214322432343244325432643274328432943304331433243334334433543364337433843394340434143424343434443454346434743484349435043514352435343544355435643574358435943604361436243634364436543664367436843694370437143724373437443754376437743784379438043814382438343844385438643874388438943904391439243934394439543964397439843994400440144024403440444054406440744084409441044114412441344144415441644174418441944204421442244234424442544264427442844294430443144324433443444354436443744384439444044414442444344444445444644474448444944504451445244534454445544564457445844594460446144624463446444654466446744684469447044714472447344744475447644774478447944804481448244834484448544864487448844894490449144924493449444954496449744984499450045014502450345044505450645074508450945104511451245134514451545164517451845194520452145224523452445254526452745284529453045314532453345344535453645374538453945404541454245434544454545464547454845494550455145524553455445554556455745584559456045614562456345644565456645674568456945704571457245734574457545764577457845794580458145824583458445854586458745884589459045914592459345944595459645974598459946004601460246034604460546064607460846094610461146124613461446154616461746184619462046214622462346244625462646274628462946304631463246334634463546364637463846394640464146424643464446454646464746484649465046514652465346544655465646574658465946604661466246634664466546664667466846694670467146724673467446754676467746784679468046814682468346844685468646874688468946904691469246934694469546964697469846994700470147024703470447054706470747084709471047114712471347144715471647174718471947204721472247234724472547264727472847294730473147324733473447354736473747384739474047414742474347444745474647474748474947504751475247534754475547564757475847594760476147624763476447654766476747684769477047714772477347744775477647774778477947804781478247834784478547864787478847894790479147924793479447954796479747984799480048014802480348044805480648074808480948104811481248134814481548164817481848194820482148224823482448254826482748284829483048314832483348344835483648374838483948404841484248434844484548464847484848494850485148524853485448554856485748584859486048614862486348644865486648674868486948704871487248734874487548764877487848794880488148824883488448854886488748884889489048914892489348944895489648974898489949004901490249034904490549064907490849094910491149124913491449154916491749184919492049214922492349244925492649274928492949304931493249334934493549364937493849394940494149424943494449454946494749484949495049514952495349544955495649574958495949604961496249634964496549664967496849694970497149724973497449754976497749784979498049814982498349844985498649874988498949904991499249934994499549964997499849995000500150025003500450055006500750085009501050115012501350145015501650175018501950205021502250235024502550265027502850295030503150325033503450355036503750385039504050415042504350445045504650475048504950505051505250535054505550565057505850595060506150625063506450655066506750685069507050715072507350745075507650775078507950805081508250835084508550865087508850895090509150925093509450955096509750985099510051015102510351045105510651075108510951105111511251135114511551165117511851195120512151225123512451255126512751285129513051315132513351345135513651375138513951405141514251435144514551465147514851495150515151525153515451555156515751585159516051615162516351645165516651675168516951705171517251735174517551765177517851795180518151825183518451855186518751885189519051915192519351945195519651975198519952005201520252035204520552065207520852095210521152125213521452155216521752185219522052215222522352245225522652275228522952305231523252335234523552365237523852395240524152425243524452455246524752485249525052515252525352545255525652575258525952605261526252635264526552665267526852695270527152725273527452755276527752785279528052815282528352845285528652875288528952905291529252935294529552965297529852995300530153025303530453055306530753085309531053115312531353145315531653175318531953205321532253235324532553265327532853295330533153325333533453355336533753385339534053415342534353445345534653475348534953505351535253535354535553565357535853595360536153625363536453655366536753685369537053715372537353745375537653775378537953805381538253835384538553865387538853895390539153925393539453955396539753985399540054015402540354045405540654075408540954105411541254135414541554165417541854195420542154225423542454255426542754285429543054315432543354345435543654375438543954405441544254435444544554465447544854495450545154525453545454555456545754585459546054615462546354645465546654675468546954705471547254735474547554765477547854795480548154825483548454855486548754885489549054915492549354945495549654975498549955005501550255035504550555065507550855095510551155125513551455155516551755185519552055215522552355245525552655275528552955305531553255335534553555365537553855395540554155425543554455455546554755485549555055515552555355545555555655575558555955605561556255635564556555665567556855695570557155725573557455755576557755785579558055815582558355845585558655875588558955905591559255935594559555965597559855995600560156025603560456055606560756085609561056115612561356145615561656175618561956205621562256235624562556265627562856295630563156325633563456355636563756385639564056415642564356445645564656475648564956505651565256535654565556565657565856595660566156625663566456655666566756685669567056715672567356745675567656775678567956805681568256835684568556865687568856895690569156925693569456955696569756985699570057015702570357045705570657075708570957105711571257135714571557165717571857195720572157225723572457255726572757285729573057315732573357345735573657375738573957405741574257435744574557465747574857495750575157525753575457555756575757585759576057615762576357645765576657675768576957705771577257735774577557765777577857795780578157825783578457855786578757885789579057915792579357945795579657975798579958005801580258035804580558065807580858095810581158125813581458155816581758185819582058215822582358245825582658275828582958305831583258335834583558365837583858395840584158425843584458455846584758485849585058515852585358545855585658575858585958605861586258635864586558665867586858695870587158725873587458755876587758785879588058815882588358845885588658875888588958905891589258935894589558965897589858995900590159025903590459055906590759085909591059115912591359145915591659175918591959205921592259235924592559265927592859295930593159325933593459355936593759385939594059415942594359445945594659475948594959505951595259535954595559565957595859595960596159625963596459655966596759685969597059715972597359745975597659775978597959805981598259835984598559865987598859895990599159925993599459955996599759985999600060016002600360046005600660076008600960106011601260136014601560166017601860196020602160226023602460256026602760286029603060316032603360346035603660376038603960406041604260436044604560466047604860496050605160526053605460556056605760586059606060616062606360646065606660676068606960706071607260736074607560766077607860796080608160826083608460856086608760886089609060916092609360946095609660976098609961006101610261036104610561066107610861096110611161126113611461156116611761186119612061216122612361246125612661276128612961306131613261336134613561366137613861396140614161426143614461456146614761486149615061516152615361546155615661576158615961606161616261636164616561666167616861696170617161726173617461756176617761786179618061816182618361846185618661876188618961906191619261936194619561966197619861996200620162026203620462056206620762086209621062116212621362146215621662176218621962206221622262236224622562266227622862296230623162326233623462356236623762386239624062416242624362446245624662476248624962506251625262536254625562566257625862596260626162626263626462656266626762686269627062716272627362746275627662776278627962806281628262836284628562866287628862896290629162926293629462956296629762986299630063016302630363046305630663076308630963106311631263136314631563166317631863196320632163226323632463256326632763286329633063316332633363346335633663376338633963406341634263436344634563466347634863496350635163526353635463556356635763586359636063616362636363646365636663676368636963706371637263736374637563766377637863796380638163826383638463856386638763886389639063916392639363946395639663976398639964006401640264036404640564066407640864096410641164126413641464156416641764186419642064216422642364246425642664276428642964306431643264336434643564366437643864396440644164426443644464456446644764486449645064516452645364546455645664576458645964606461646264636464646564666467646864696470647164726473647464756476647764786479648064816482648364846485648664876488648964906491649264936494649564966497649864996500650165026503650465056506650765086509651065116512651365146515651665176518651965206521652265236524652565266527652865296530653165326533653465356536653765386539654065416542654365446545654665476548654965506551655265536554655565566557655865596560656165626563656465656566656765686569657065716572657365746575657665776578657965806581658265836584658565866587658865896590659165926593659465956596659765986599660066016602660366046605660666076608660966106611661266136614661566166617661866196620662166226623662466256626662766286629663066316632663366346635663666376638663966406641664266436644664566466647664866496650665166526653665466556656665766586659666066616662666366646665666666676668666966706671667266736674667566766677667866796680668166826683668466856686668766886689669066916692669366946695669666976698669967006701670267036704670567066707670867096710671167126713671467156716671767186719672067216722672367246725672667276728672967306731673267336734673567366737673867396740674167426743674467456746674767486749675067516752675367546755675667576758675967606761676267636764676567666767676867696770677167726773677467756776677767786779678067816782678367846785678667876788678967906791679267936794679567966797679867996800680168026803680468056806680768086809681068116812681368146815681668176818681968206821682268236824682568266827682868296830683168326833683468356836683768386839684068416842684368446845684668476848684968506851685268536854685568566857685868596860686168626863686468656866686768686869687068716872687368746875687668776878687968806881688268836884688568866887688868896890689168926893689468956896689768986899690069016902690369046905690669076908690969106911691269136914691569166917691869196920692169226923692469256926692769286929693069316932693369346935693669376938693969406941694269436944694569466947694869496950695169526953695469556956695769586959696069616962696369646965696669676968696969706971697269736974697569766977697869796980698169826983698469856986698769886989699069916992699369946995699669976998699970007001700270037004700570067007700870097010701170127013701470157016701770187019702070217022702370247025702670277028702970307031703270337034703570367037703870397040704170427043704470457046704770487049705070517052705370547055705670577058705970607061706270637064706570667067706870697070707170727073707470757076707770787079708070817082708370847085708670877088708970907091709270937094709570967097709870997100710171027103710471057106710771087109711071117112711371147115711671177118711971207121712271237124712571267127712871297130713171327133713471357136713771387139714071417142714371447145714671477148714971507151715271537154715571567157715871597160716171627163716471657166716771687169717071717172717371747175717671777178717971807181718271837184718571867187718871897190719171927193719471957196719771987199720072017202720372047205720672077208720972107211721272137214721572167217721872197220722172227223722472257226722772287229723072317232723372347235723672377238723972407241724272437244724572467247724872497250725172527253725472557256725772587259726072617262726372647265726672677268726972707271727272737274727572767277727872797280728172827283728472857286728772887289729072917292729372947295729672977298729973007301730273037304730573067307730873097310731173127313731473157316731773187319732073217322732373247325732673277328732973307331733273337334733573367337733873397340734173427343734473457346734773487349735073517352735373547355735673577358735973607361736273637364736573667367736873697370737173727373737473757376737773787379738073817382738373847385738673877388738973907391739273937394739573967397739873997400740174027403740474057406740774087409741074117412741374147415741674177418741974207421742274237424742574267427742874297430743174327433743474357436743774387439744074417442744374447445744674477448744974507451745274537454745574567457745874597460746174627463746474657466746774687469747074717472747374747475747674777478747974807481748274837484748574867487748874897490749174927493749474957496749774987499750075017502750375047505750675077508750975107511751275137514751575167517751875197520752175227523752475257526752775287529753075317532753375347535753675377538753975407541754275437544754575467547754875497550755175527553755475557556755775587559756075617562756375647565756675677568756975707571757275737574757575767577757875797580758175827583758475857586758775887589759075917592759375947595759675977598759976007601760276037604760576067607760876097610761176127613761476157616761776187619762076217622762376247625762676277628762976307631763276337634763576367637763876397640764176427643764476457646764776487649765076517652765376547655765676577658765976607661766276637664766576667667766876697670767176727673767476757676767776787679768076817682768376847685768676877688768976907691769276937694769576967697769876997700770177027703770477057706770777087709771077117712771377147715771677177718771977207721772277237724772577267727772877297730773177327733773477357736773777387739774077417742774377447745774677477748774977507751775277537754775577567757775877597760776177627763776477657766776777687769777077717772777377747775777677777778777977807781778277837784778577867787778877897790779177927793779477957796779777987799780078017802780378047805780678077808780978107811781278137814781578167817781878197820782178227823782478257826782778287829783078317832783378347835783678377838783978407841784278437844784578467847784878497850785178527853785478557856785778587859786078617862786378647865786678677868786978707871787278737874787578767877787878797880788178827883788478857886788778887889789078917892789378947895789678977898789979007901790279037904790579067907790879097910791179127913791479157916791779187919792079217922792379247925792679277928792979307931793279337934793579367937793879397940794179427943794479457946794779487949795079517952795379547955795679577958795979607961796279637964796579667967796879697970797179727973797479757976797779787979798079817982798379847985798679877988798979907991799279937994799579967997799879998000800180028003800480058006800780088009801080118012801380148015801680178018801980208021802280238024802580268027802880298030803180328033803480358036803780388039804080418042804380448045804680478048804980508051805280538054805580568057805880598060806180628063806480658066806780688069807080718072807380748075807680778078807980808081808280838084808580868087808880898090809180928093809480958096809780988099810081018102810381048105810681078108810981108111811281138114811581168117811881198120812181228123812481258126812781288129813081318132813381348135813681378138813981408141814281438144814581468147814881498150815181528153815481558156815781588159816081618162816381648165816681678168816981708171817281738174817581768177817881798180818181828183818481858186818781888189819081918192819381948195819681978198819982008201820282038204820582068207820882098210821182128213821482158216821782188219822082218222822382248225822682278228822982308231823282338234823582368237823882398240824182428243824482458246824782488249825082518252825382548255825682578258825982608261826282638264826582668267826882698270827182728273827482758276827782788279828082818282828382848285828682878288828982908291829282938294829582968297829882998300830183028303830483058306830783088309831083118312831383148315831683178318831983208321832283238324832583268327832883298330833183328333833483358336833783388339834083418342834383448345834683478348834983508351835283538354835583568357835883598360836183628363836483658366836783688369837083718372837383748375837683778378837983808381838283838384838583868387838883898390839183928393839483958396839783988399840084018402840384048405840684078408840984108411841284138414841584168417841884198420842184228423842484258426842784288429843084318432843384348435843684378438843984408441844284438444844584468447844884498450845184528453845484558456845784588459846084618462846384648465846684678468846984708471847284738474847584768477847884798480848184828483848484858486848784888489849084918492849384948495849684978498849985008501850285038504850585068507850885098510851185128513851485158516851785188519852085218522852385248525852685278528852985308531853285338534853585368537853885398540854185428543854485458546854785488549855085518552855385548555855685578558855985608561856285638564856585668567856885698570857185728573857485758576857785788579858085818582858385848585858685878588858985908591859285938594859585968597859885998600860186028603860486058606860786088609861086118612861386148615861686178618861986208621862286238624862586268627862886298630863186328633863486358636863786388639864086418642864386448645864686478648864986508651865286538654865586568657865886598660866186628663866486658666866786688669867086718672867386748675867686778678867986808681868286838684868586868687868886898690869186928693869486958696869786988699870087018702870387048705870687078708870987108711871287138714871587168717871887198720872187228723872487258726872787288729873087318732873387348735873687378738873987408741874287438744874587468747874887498750875187528753875487558756875787588759876087618762876387648765876687678768876987708771877287738774877587768777877887798780878187828783878487858786878787888789879087918792879387948795879687978798879988008801880288038804880588068807880888098810881188128813881488158816881788188819882088218822882388248825882688278828882988308831883288338834883588368837883888398840884188428843884488458846884788488849885088518852885388548855885688578858885988608861886288638864886588668867886888698870887188728873887488758876887788788879888088818882888388848885888688878888888988908891889288938894889588968897889888998900890189028903890489058906890789088909891089118912891389148915891689178918891989208921892289238924892589268927892889298930893189328933893489358936893789388939894089418942894389448945894689478948894989508951895289538954895589568957895889598960896189628963896489658966896789688969897089718972897389748975897689778978897989808981898289838984898589868987898889898990899189928993899489958996899789988999900090019002900390049005900690079008900990109011901290139014901590169017901890199020902190229023902490259026902790289029903090319032903390349035903690379038903990409041904290439044904590469047904890499050905190529053905490559056905790589059906090619062906390649065906690679068906990709071907290739074907590769077907890799080908190829083908490859086908790889089909090919092909390949095909690979098909991009101910291039104910591069107910891099110911191129113911491159116911791189119912091219122912391249125912691279128912991309131913291339134913591369137913891399140914191429143914491459146914791489149915091519152915391549155915691579158915991609161916291639164916591669167916891699170917191729173917491759176917791789179918091819182918391849185918691879188918991909191919291939194919591969197919891999200920192029203920492059206920792089209921092119212921392149215921692179218921992209221922292239224922592269227922892299230923192329233923492359236923792389239924092419242924392449245924692479248924992509251925292539254925592569257925892599260926192629263926492659266926792689269927092719272927392749275927692779278927992809281928292839284928592869287928892899290929192929293929492959296929792989299930093019302930393049305930693079308930993109311931293139314931593169317931893199320932193229323932493259326932793289329933093319332933393349335933693379338933993409341934293439344934593469347934893499350935193529353935493559356935793589359936093619362936393649365936693679368936993709371937293739374937593769377937893799380938193829383938493859386938793889389939093919392939393949395939693979398939994009401940294039404940594069407940894099410941194129413941494159416941794189419942094219422942394249425942694279428942994309431943294339434943594369437943894399440944194429443944494459446944794489449945094519452945394549455945694579458945994609461946294639464946594669467946894699470947194729473947494759476947794789479948094819482948394849485948694879488948994909491949294939494949594969497949894999500950195029503950495059506950795089509951095119512951395149515951695179518951995209521952295239524952595269527952895299530953195329533953495359536953795389539954095419542954395449545954695479548954995509551955295539554955595569557955895599560956195629563956495659566956795689569957095719572957395749575957695779578957995809581958295839584958595869587958895899590959195929593959495959596959795989599960096019602960396049605960696079608960996109611961296139614961596169617961896199620962196229623962496259626962796289629963096319632963396349635963696379638963996409641964296439644964596469647964896499650965196529653965496559656965796589659966096619662966396649665966696679668966996709671967296739674967596769677967896799680968196829683968496859686968796889689969096919692969396949695969696979698969997009701970297039704970597069707970897099710971197129713971497159716971797189719972097219722972397249725972697279728972997309731973297339734973597369737973897399740974197429743974497459746974797489749975097519752975397549755975697579758975997609761976297639764976597669767976897699770977197729773977497759776977797789779978097819782978397849785978697879788978997909791979297939794979597969797979897999800980198029803980498059806980798089809981098119812981398149815981698179818981998209821982298239824982598269827982898299830983198329833983498359836983798389839984098419842984398449845984698479848984998509851985298539854985598569857985898599860986198629863986498659866986798689869987098719872987398749875987698779878987998809881988298839884988598869887988898899890989198929893989498959896989798989899990099019902990399049905990699079908990999109911991299139914991599169917991899199920992199229923992499259926992799289929993099319932993399349935993699379938993999409941994299439944994599469947994899499950995199529953995499559956995799589959996099619962996399649965996699679968996999709971997299739974997599769977997899799980998199829983998499859986998799889989999099919992999399949995999699979998999910000100011000210003100041000510006100071000810009100101001110012100131001410015100161001710018100191002010021100221002310024100251002610027100281002910030100311003210033100341003510036100371003810039100401004110042100431004410045100461004710048100491005010051100521005310054100551005610057100581005910060100611006210063100641006510066100671006810069100701007110072100731007410075100761007710078100791008010081100821008310084100851008610087100881008910090100911009210093100941009510096100971009810099101001010110102101031010410105101061010710108101091011010111101121011310114101151011610117101181011910120101211012210123101241012510126101271012810129101301013110132101331013410135101361013710138101391014010141101421014310144101451014610147101481014910150101511015210153101541015510156101571015810159101601016110162101631016410165101661016710168101691017010171101721017310174101751017610177101781017910180101811018210183101841018510186101871018810189101901019110192101931019410195101961019710198101991020010201102021020310204102051020610207102081020910210102111021210213102141021510216102171021810219102201022110222102231022410225102261022710228102291023010231102321023310234102351023610237102381023910240102411024210243102441024510246102471024810249102501025110252102531025410255102561025710258102591026010261102621026310264102651026610267102681026910270102711027210273102741027510276102771027810279102801028110282102831028410285102861028710288102891029010291102921029310294102951029610297102981029910300103011030210303103041030510306103071030810309103101031110312103131031410315103161031710318103191032010321103221032310324103251032610327103281032910330103311033210333103341033510336103371033810339103401034110342103431034410345103461034710348103491035010351103521035310354103551035610357103581035910360103611036210363103641036510366103671036810369103701037110372103731037410375103761037710378103791038010381103821038310384103851038610387103881038910390103911039210393103941039510396103971039810399104001040110402104031040410405104061040710408104091041010411104121041310414104151041610417104181041910420104211042210423104241042510426104271042810429104301043110432104331043410435104361043710438104391044010441104421044310444104451044610447104481044910450104511045210453104541045510456104571045810459104601046110462104631046410465104661046710468104691047010471104721047310474104751047610477104781047910480104811048210483104841048510486104871048810489104901049110492104931049410495104961049710498104991050010501105021050310504105051050610507105081050910510105111051210513105141051510516105171051810519105201052110522105231052410525105261052710528105291053010531105321053310534105351053610537105381053910540105411054210543105441054510546105471054810549105501055110552105531055410555105561055710558105591056010561105621056310564105651056610567105681056910570105711057210573105741057510576105771057810579105801058110582105831058410585105861058710588105891059010591105921059310594105951059610597105981059910600106011060210603106041060510606106071060810609106101061110612106131061410615106161061710618106191062010621106221062310624106251062610627106281062910630106311063210633106341063510636106371063810639106401064110642106431064410645106461064710648106491065010651106521065310654106551065610657106581065910660106611066210663106641066510666106671066810669106701067110672106731067410675106761067710678106791068010681106821068310684106851068610687106881068910690106911069210693106941069510696106971069810699107001070110702107031070410705107061070710708107091071010711107121071310714107151071610717107181071910720107211072210723107241072510726107271072810729107301073110732107331073410735107361073710738107391074010741107421074310744107451074610747107481074910750107511075210753107541075510756107571075810759107601076110762107631076410765107661076710768107691077010771107721077310774107751077610777107781077910780107811078210783107841078510786107871078810789107901079110792107931079410795107961079710798107991080010801108021080310804108051080610807108081080910810108111081210813108141081510816108171081810819108201082110822108231082410825108261082710828108291083010831108321083310834108351083610837108381083910840108411084210843108441084510846108471084810849108501085110852108531085410855108561085710858108591086010861108621086310864108651086610867108681086910870108711087210873108741087510876108771087810879108801088110882108831088410885108861088710888108891089010891108921089310894108951089610897108981089910900109011090210903109041090510906109071090810909109101091110912109131091410915109161091710918109191092010921109221092310924109251092610927109281092910930109311093210933109341093510936109371093810939109401094110942109431094410945109461094710948109491095010951109521095310954109551095610957109581095910960109611096210963109641096510966109671096810969109701097110972109731097410975109761097710978109791098010981109821098310984109851098610987109881098910990109911099210993109941099510996109971099810999110001100111002110031100411005110061100711008110091101011011110121101311014110151101611017110181101911020110211102211023110241102511026110271102811029110301103111032110331103411035110361103711038110391104011041110421104311044110451104611047110481104911050110511105211053110541105511056110571105811059110601106111062110631106411065110661106711068110691107011071110721107311074110751107611077110781107911080110811108211083110841108511086110871108811089110901109111092110931109411095110961109711098110991110011101111021110311104111051110611107111081110911110111111111211113111141111511116111171111811119111201112111122111231112411125111261112711128111291113011131111321113311134111351113611137111381113911140111411114211143111441114511146111471114811149111501115111152111531115411155111561115711158111591116011161111621116311164111651116611167111681116911170111711117211173111741117511176111771117811179111801118111182111831118411185111861118711188111891119011191111921119311194111951119611197111981119911200112011120211203112041120511206112071120811209112101121111212112131121411215112161121711218112191122011221112221122311224112251122611227112281122911230112311123211233112341123511236112371123811239112401124111242112431124411245112461124711248112491125011251112521125311254112551125611257112581125911260112611126211263112641126511266112671126811269112701127111272112731127411275112761127711278112791128011281112821128311284112851128611287112881128911290112911129211293112941129511296112971129811299113001130111302113031130411305113061130711308113091131011311113121131311314113151131611317113181131911320113211132211323113241132511326113271132811329113301133111332113331133411335113361133711338113391134011341113421134311344113451134611347113481134911350113511135211353113541135511356113571135811359113601136111362113631136411365113661136711368113691137011371113721137311374113751137611377113781137911380113811138211383113841138511386113871138811389113901139111392113931139411395113961139711398113991140011401114021140311404114051140611407114081140911410114111141211413114141141511416114171141811419114201142111422114231142411425114261142711428114291143011431114321143311434114351143611437114381143911440114411144211443114441144511446114471144811449114501145111452114531145411455114561145711458114591146011461114621146311464114651146611467114681146911470114711147211473114741147511476114771147811479114801148111482114831148411485114861148711488114891149011491114921149311494114951149611497114981149911500115011150211503115041150511506115071150811509115101151111512115131151411515115161151711518115191152011521115221152311524115251152611527115281152911530115311153211533115341153511536115371153811539115401154111542115431154411545115461154711548115491155011551115521155311554115551155611557115581155911560115611156211563115641156511566115671156811569115701157111572115731157411575115761157711578115791158011581115821158311584115851158611587115881158911590115911159211593115941159511596115971159811599116001160111602116031160411605116061160711608116091161011611116121161311614116151161611617116181161911620116211162211623116241162511626116271162811629116301163111632116331163411635116361163711638116391164011641116421164311644116451164611647116481164911650116511165211653116541165511656116571165811659116601166111662116631166411665116661166711668116691167011671116721167311674116751167611677116781167911680116811168211683116841168511686116871168811689116901169111692116931169411695116961169711698116991170011701117021170311704117051170611707117081170911710117111171211713117141171511716117171171811719117201172111722117231172411725117261172711728117291173011731117321173311734117351173611737117381173911740117411174211743117441174511746117471174811749117501175111752117531175411755117561175711758117591176011761117621176311764117651176611767117681176911770117711177211773117741177511776117771177811779117801178111782117831178411785117861178711788117891179011791117921179311794117951179611797117981179911800118011180211803118041180511806118071180811809118101181111812118131181411815118161181711818118191182011821118221182311824118251182611827118281182911830118311183211833118341183511836118371183811839118401184111842118431184411845118461184711848118491185011851118521185311854118551185611857118581185911860118611186211863118641186511866118671186811869118701187111872118731187411875118761187711878118791188011881118821188311884118851188611887118881188911890118911189211893118941189511896118971189811899119001190111902119031190411905119061190711908119091191011911119121191311914119151191611917119181191911920119211192211923119241192511926119271192811929119301193111932119331193411935119361193711938119391194011941119421194311944119451194611947119481194911950119511195211953119541195511956119571195811959119601196111962119631196411965119661196711968119691197011971119721197311974119751197611977119781197911980119811198211983119841198511986119871198811989119901199111992119931199411995119961199711998119991200012001120021200312004120051200612007120081200912010120111201212013120141201512016120171201812019120201202112022120231202412025120261202712028120291203012031120321203312034120351203612037120381203912040120411204212043120441204512046120471204812049120501205112052120531205412055120561205712058120591206012061120621206312064120651206612067120681206912070120711207212073120741207512076120771207812079120801208112082120831208412085120861208712088120891209012091120921209312094120951209612097120981209912100121011210212103121041210512106121071210812109121101211112112121131211412115121161211712118121191212012121121221212312124121251212612127121281212912130121311213212133121341213512136121371213812139121401214112142121431214412145121461214712148121491215012151121521215312154121551215612157121581215912160121611216212163121641216512166121671216812169121701217112172121731217412175121761217712178121791218012181121821218312184121851218612187121881218912190121911219212193121941219512196121971219812199122001220112202122031220412205122061220712208122091221012211122121221312214122151221612217122181221912220122211222212223122241222512226122271222812229122301223112232122331223412235122361223712238122391224012241122421224312244122451224612247122481224912250122511225212253122541225512256122571225812259122601226112262122631226412265122661226712268122691227012271122721227312274122751227612277122781227912280122811228212283122841228512286122871228812289122901229112292122931229412295122961229712298122991230012301123021230312304123051230612307123081230912310123111231212313123141231512316123171231812319123201232112322123231232412325123261232712328123291233012331123321233312334123351233612337123381233912340123411234212343123441234512346123471234812349123501235112352123531235412355123561235712358123591236012361123621236312364123651236612367123681236912370123711237212373123741237512376123771237812379123801238112382123831238412385123861238712388123891239012391123921239312394123951239612397123981239912400124011240212403124041240512406124071240812409124101241112412124131241412415124161241712418124191242012421124221242312424124251242612427124281242912430124311243212433124341243512436124371243812439124401244112442124431244412445124461244712448124491245012451124521245312454124551245612457124581245912460124611246212463124641246512466124671246812469124701247112472124731247412475124761247712478124791248012481124821248312484124851248612487124881248912490124911249212493124941249512496124971249812499125001250112502125031250412505125061250712508125091251012511125121251312514125151251612517125181251912520125211252212523125241252512526125271252812529125301253112532125331253412535125361253712538125391254012541125421254312544125451254612547125481254912550125511255212553125541255512556125571255812559125601256112562125631256412565125661256712568125691257012571125721257312574125751257612577125781257912580125811258212583125841258512586125871258812589125901259112592125931259412595125961259712598125991260012601126021260312604126051260612607126081260912610126111261212613126141261512616126171261812619126201262112622126231262412625126261262712628126291263012631126321263312634126351263612637126381263912640126411264212643126441264512646126471264812649126501265112652126531265412655126561265712658126591266012661126621266312664126651266612667126681266912670126711267212673126741267512676126771267812679126801268112682126831268412685126861268712688126891269012691126921269312694126951269612697126981269912700127011270212703127041270512706127071270812709127101271112712127131271412715127161271712718127191272012721127221272312724127251272612727127281272912730127311273212733127341273512736127371273812739127401274112742127431274412745127461274712748127491275012751127521275312754127551275612757127581275912760127611276212763127641276512766127671276812769127701277112772127731277412775127761277712778127791278012781127821278312784127851278612787127881278912790127911279212793127941279512796127971279812799128001280112802128031280412805128061280712808128091281012811128121281312814128151281612817128181281912820128211282212823128241282512826128271282812829128301283112832128331283412835128361283712838128391284012841128421284312844128451284612847128481284912850128511285212853128541285512856128571285812859128601286112862128631286412865128661286712868128691287012871128721287312874128751287612877128781287912880128811288212883128841288512886128871288812889128901289112892128931289412895128961289712898128991290012901129021290312904129051290612907129081290912910129111291212913129141291512916129171291812919129201292112922129231292412925129261292712928129291293012931129321293312934129351293612937129381293912940129411294212943129441294512946129471294812949129501295112952129531295412955129561295712958129591296012961129621296312964129651296612967129681296912970129711297212973129741297512976129771297812979129801298112982129831298412985129861298712988129891299012991129921299312994129951299612997129981299913000130011300213003130041300513006130071300813009130101301113012130131301413015130161301713018130191302013021130221302313024130251302613027130281302913030130311303213033130341303513036130371303813039130401304113042130431304413045130461304713048130491305013051130521305313054130551305613057130581305913060130611306213063130641306513066130671306813069130701307113072130731307413075130761307713078130791308013081130821308313084130851308613087130881308913090130911309213093130941309513096130971309813099131001310113102131031310413105131061310713108131091311013111131121311313114131151311613117131181311913120131211312213123131241312513126131271312813129131301313113132131331313413135131361313713138131391314013141131421314313144131451314613147131481314913150131511315213153131541315513156131571315813159131601316113162131631316413165131661316713168131691317013171131721317313174131751317613177131781317913180131811318213183131841318513186131871318813189131901319113192131931319413195131961319713198131991320013201132021320313204132051320613207132081320913210132111321213213132141321513216132171321813219132201322113222132231322413225132261322713228132291323013231132321323313234132351323613237132381323913240132411324213243132441324513246132471324813249132501325113252132531325413255132561325713258132591326013261132621326313264132651326613267132681326913270132711327213273132741327513276132771327813279132801328113282132831328413285132861328713288132891329013291132921329313294132951329613297132981329913300133011330213303133041330513306133071330813309133101331113312133131331413315133161331713318133191332013321133221332313324133251332613327133281332913330133311333213333133341333513336133371333813339133401334113342133431334413345133461334713348133491335013351133521335313354133551335613357133581335913360133611336213363133641336513366133671336813369133701337113372133731337413375133761337713378133791338013381133821338313384133851338613387133881338913390133911339213393133941339513396133971339813399134001340113402134031340413405134061340713408134091341013411134121341313414134151341613417134181341913420134211342213423134241342513426134271342813429134301343113432134331343413435134361343713438134391344013441134421344313444134451344613447134481344913450134511345213453134541345513456134571345813459134601346113462134631346413465134661346713468134691347013471134721347313474134751347613477134781347913480134811348213483134841348513486134871348813489134901349113492134931349413495134961349713498134991350013501135021350313504135051350613507135081350913510135111351213513135141351513516135171351813519135201352113522135231352413525135261352713528135291353013531135321353313534135351353613537135381353913540135411354213543135441354513546135471354813549135501355113552135531355413555135561355713558135591356013561135621356313564135651356613567135681356913570135711357213573135741357513576135771357813579135801358113582135831358413585135861358713588135891359013591135921359313594135951359613597135981359913600136011360213603136041360513606136071360813609136101361113612136131361413615136161361713618136191362013621136221362313624136251362613627136281362913630136311363213633136341363513636136371363813639136401364113642136431364413645136461364713648136491365013651136521365313654136551365613657136581365913660136611366213663136641366513666136671366813669136701367113672136731367413675136761367713678136791368013681136821368313684136851368613687136881368913690136911369213693136941369513696136971369813699137001370113702137031370413705137061370713708137091371013711137121371313714137151371613717137181371913720137211372213723137241372513726137271372813729137301373113732137331373413735137361373713738137391374013741137421374313744137451374613747137481374913750137511375213753137541375513756137571375813759137601376113762137631376413765137661376713768137691377013771137721377313774137751377613777137781377913780137811378213783137841378513786137871378813789137901379113792137931379413795137961379713798137991380013801138021380313804138051380613807138081380913810138111381213813138141381513816138171381813819138201382113822138231382413825138261382713828138291383013831138321383313834138351383613837138381383913840138411384213843138441384513846138471384813849138501385113852138531385413855138561385713858138591386013861138621386313864138651386613867138681386913870138711387213873138741387513876138771387813879138801388113882138831388413885138861388713888138891389013891138921389313894138951389613897138981389913900139011390213903139041390513906139071390813909139101391113912139131391413915139161391713918139191392013921139221392313924139251392613927139281392913930139311393213933139341393513936139371393813939139401394113942139431394413945139461394713948139491395013951139521395313954139551395613957139581395913960139611396213963139641396513966139671396813969139701397113972139731397413975139761397713978139791398013981139821398313984139851398613987139881398913990139911399213993139941399513996139971399813999140001400114002140031400414005140061400714008140091401014011140121401314014140151401614017140181401914020140211402214023140241402514026140271402814029140301403114032140331403414035140361403714038140391404014041140421404314044140451404614047140481404914050140511405214053140541405514056140571405814059140601406114062140631406414065140661406714068140691407014071140721407314074140751407614077140781407914080140811408214083140841408514086140871408814089140901409114092140931409414095140961409714098140991410014101141021410314104141051410614107141081410914110141111411214113141141411514116141171411814119141201412114122141231412414125141261412714128141291413014131141321413314134141351413614137141381413914140141411414214143141441414514146141471414814149141501415114152141531415414155141561415714158141591416014161141621416314164141651416614167141681416914170141711417214173141741417514176141771417814179141801418114182141831418414185141861418714188141891419014191141921419314194141951419614197141981419914200142011420214203142041420514206142071420814209142101421114212142131421414215142161421714218142191422014221142221422314224142251422614227142281422914230142311423214233142341423514236142371423814239142401424114242142431424414245142461424714248142491425014251142521425314254142551425614257142581425914260142611426214263142641426514266142671426814269142701427114272142731427414275142761427714278142791428014281 |
- (function e(t,n,r){function s(o,u){if(!n[o]){if(!t[o]){var a=typeof require=="function"&&require;if(!u&&a)return a(o,!0);if(i)return i(o,!0);var f=new Error("Cannot find module '"+o+"'");throw f.code="MODULE_NOT_FOUND",f}var l=n[o]={exports:{}};t[o][0].call(l.exports,function(e){var n=t[o][1][e];return s(n?n:e)},l,l.exports,e,t,n,r)}return n[o].exports}var i=typeof require=="function"&&require;for(var o=0;o<r.length;o++)s(r[o]);return s})({1:[function(require,module,exports){
- },{}],2:[function(require,module,exports){
- (function (global){
- var topLevel = typeof global !== 'undefined' ? global :
- typeof window !== 'undefined' ? window : {}
- var minDoc = require('min-document');
- var doccy;
- if (typeof document !== 'undefined') {
- doccy = document;
- } else {
- doccy = topLevel['__GLOBAL_DOCUMENT_CACHE@4'];
- if (!doccy) {
- doccy = topLevel['__GLOBAL_DOCUMENT_CACHE@4'] = minDoc;
- }
- }
- module.exports = doccy;
- }).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
- },{"min-document":1}],3:[function(require,module,exports){
- (function (global){
- var win;
- if (typeof window !== "undefined") {
- win = window;
- } else if (typeof global !== "undefined") {
- win = global;
- } else if (typeof self !== "undefined"){
- win = self;
- } else {
- win = {};
- }
- module.exports = win;
- }).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
- },{}],4:[function(require,module,exports){
- /**
- * mux.js
- *
- * Copyright (c) 2016 Brightcove
- * All rights reserved.
- *
- * A stream-based aac to mp4 converter. This utility can be used to
- * deliver mp4s to a SourceBuffer on platforms that support native
- * Media Source Extensions.
- */
- 'use strict';
- var Stream = require('../utils/stream.js');
- // Constants
- var AacStream;
- /**
- * Splits an incoming stream of binary data into ADTS and ID3 Frames.
- */
- AacStream = function() {
- var
- everything = new Uint8Array(),
- timeStamp = 0;
- AacStream.prototype.init.call(this);
- this.setTimestamp = function(timestamp) {
- timeStamp = timestamp;
- };
- this.parseId3TagSize = function(header, byteIndex) {
- var
- returnSize = (header[byteIndex + 6] << 21) |
- (header[byteIndex + 7] << 14) |
- (header[byteIndex + 8] << 7) |
- (header[byteIndex + 9]),
- flags = header[byteIndex + 5],
- footerPresent = (flags & 16) >> 4;
- if (footerPresent) {
- return returnSize + 20;
- }
- return returnSize + 10;
- };
- this.parseAdtsSize = function(header, byteIndex) {
- var
- lowThree = (header[byteIndex + 5] & 0xE0) >> 5,
- middle = header[byteIndex + 4] << 3,
- highTwo = header[byteIndex + 3] & 0x3 << 11;
- return (highTwo | middle) | lowThree;
- };
- this.push = function(bytes) {
- var
- frameSize = 0,
- byteIndex = 0,
- bytesLeft,
- chunk,
- packet,
- tempLength;
- // If there are bytes remaining from the last segment, prepend them to the
- // bytes that were pushed in
- if (everything.length) {
- tempLength = everything.length;
- everything = new Uint8Array(bytes.byteLength + tempLength);
- everything.set(everything.subarray(0, tempLength));
- everything.set(bytes, tempLength);
- } else {
- everything = bytes;
- }
- while (everything.length - byteIndex >= 3) {
- if ((everything[byteIndex] === 'I'.charCodeAt(0)) &&
- (everything[byteIndex + 1] === 'D'.charCodeAt(0)) &&
- (everything[byteIndex + 2] === '3'.charCodeAt(0))) {
- // Exit early because we don't have enough to parse
- // the ID3 tag header
- if (everything.length - byteIndex < 10) {
- break;
- }
- // check framesize
- frameSize = this.parseId3TagSize(everything, byteIndex);
- // Exit early if we don't have enough in the buffer
- // to emit a full packet
- if (frameSize > everything.length) {
- break;
- }
- chunk = {
- type: 'timed-metadata',
- data: everything.subarray(byteIndex, byteIndex + frameSize)
- };
- this.trigger('data', chunk);
- byteIndex += frameSize;
- continue;
- } else if ((everything[byteIndex] & 0xff === 0xff) &&
- ((everything[byteIndex + 1] & 0xf0) === 0xf0)) {
- // Exit early because we don't have enough to parse
- // the ADTS frame header
- if (everything.length - byteIndex < 7) {
- break;
- }
- frameSize = this.parseAdtsSize(everything, byteIndex);
- // Exit early if we don't have enough in the buffer
- // to emit a full packet
- if (frameSize > everything.length) {
- break;
- }
- packet = {
- type: 'audio',
- data: everything.subarray(byteIndex, byteIndex + frameSize),
- pts: timeStamp,
- dts: timeStamp
- };
- this.trigger('data', packet);
- byteIndex += frameSize;
- continue;
- }
- byteIndex++;
- }
- bytesLeft = everything.length - byteIndex;
- if (bytesLeft > 0) {
- everything = everything.subarray(byteIndex);
- } else {
- everything = new Uint8Array();
- }
- };
- };
- AacStream.prototype = new Stream();
- module.exports = AacStream;
- },{"../utils/stream.js":33}],5:[function(require,module,exports){
- /**
- * mux.js
- *
- * Copyright (c) 2016 Brightcove
- * All rights reserved.
- *
- * Utilities to detect basic properties and metadata about Aac data.
- */
- 'use strict';
- var ADTS_SAMPLING_FREQUENCIES = [
- 96000,
- 88200,
- 64000,
- 48000,
- 44100,
- 32000,
- 24000,
- 22050,
- 16000,
- 12000,
- 11025,
- 8000,
- 7350
- ];
- var parseSyncSafeInteger = function(data) {
- return (data[0] << 21) |
- (data[1] << 14) |
- (data[2] << 7) |
- (data[3]);
- };
- // return a percent-encoded representation of the specified byte range
- // @see http://en.wikipedia.org/wiki/Percent-encoding
- var percentEncode = function(bytes, start, end) {
- var i, result = '';
- for (i = start; i < end; i++) {
- result += '%' + ('00' + bytes[i].toString(16)).slice(-2);
- }
- return result;
- };
- // return the string representation of the specified byte range,
- // interpreted as ISO-8859-1.
- var parseIso88591 = function(bytes, start, end) {
- return unescape(percentEncode(bytes, start, end)); // jshint ignore:line
- };
- var parseId3TagSize = function(header, byteIndex) {
- var
- returnSize = (header[byteIndex + 6] << 21) |
- (header[byteIndex + 7] << 14) |
- (header[byteIndex + 8] << 7) |
- (header[byteIndex + 9]),
- flags = header[byteIndex + 5],
- footerPresent = (flags & 16) >> 4;
- if (footerPresent) {
- return returnSize + 20;
- }
- return returnSize + 10;
- };
- var parseAdtsSize = function(header, byteIndex) {
- var
- lowThree = (header[byteIndex + 5] & 0xE0) >> 5,
- middle = header[byteIndex + 4] << 3,
- highTwo = header[byteIndex + 3] & 0x3 << 11;
- return (highTwo | middle) | lowThree;
- };
- var parseType = function(header, byteIndex) {
- if ((header[byteIndex] === 'I'.charCodeAt(0)) &&
- (header[byteIndex + 1] === 'D'.charCodeAt(0)) &&
- (header[byteIndex + 2] === '3'.charCodeAt(0))) {
- return 'timed-metadata';
- } else if ((header[byteIndex] & 0xff === 0xff) &&
- ((header[byteIndex + 1] & 0xf0) === 0xf0)) {
- return 'audio';
- }
- return null;
- };
- var parseSampleRate = function(packet) {
- var i = 0;
- while (i + 5 < packet.length) {
- if (packet[i] !== 0xFF || (packet[i + 1] & 0xF6) !== 0xF0) {
- // If a valid header was not found, jump one forward and attempt to
- // find a valid ADTS header starting at the next byte
- i++;
- continue;
- }
- return ADTS_SAMPLING_FREQUENCIES[(packet[i + 2] & 0x3c) >>> 2];
- }
- return null;
- };
- var parseAacTimestamp = function(packet) {
- var frameStart, frameSize, frame, frameHeader;
- // find the start of the first frame and the end of the tag
- frameStart = 10;
- if (packet[5] & 0x40) {
- // advance the frame start past the extended header
- frameStart += 4; // header size field
- frameStart += parseSyncSafeInteger(packet.subarray(10, 14));
- }
- // parse one or more ID3 frames
- // http://id3.org/id3v2.3.0#ID3v2_frame_overview
- do {
- // determine the number of bytes in this frame
- frameSize = parseSyncSafeInteger(packet.subarray(frameStart + 4, frameStart + 8));
- if (frameSize < 1) {
- return null;
- }
- frameHeader = String.fromCharCode(packet[frameStart],
- packet[frameStart + 1],
- packet[frameStart + 2],
- packet[frameStart + 3]);
- if (frameHeader === 'PRIV') {
- frame = packet.subarray(frameStart + 10, frameStart + frameSize + 10);
- for (var i = 0; i < frame.byteLength; i++) {
- if (frame[i] === 0) {
- var owner = parseIso88591(frame, 0, i);
- if (owner === 'com.apple.streaming.transportStreamTimestamp') {
- var d = frame.subarray(i + 1);
- var size = ((d[3] & 0x01) << 30) |
- (d[4] << 22) |
- (d[5] << 14) |
- (d[6] << 6) |
- (d[7] >>> 2);
- size *= 4;
- size += d[7] & 0x03;
- return size;
- }
- break;
- }
- }
- }
- frameStart += 10; // advance past the frame header
- frameStart += frameSize; // advance past the frame body
- } while (frameStart < packet.byteLength);
- return null;
- };
- module.exports = {
- parseId3TagSize: parseId3TagSize,
- parseAdtsSize: parseAdtsSize,
- parseType: parseType,
- parseSampleRate: parseSampleRate,
- parseAacTimestamp: parseAacTimestamp
- };
- },{}],6:[function(require,module,exports){
- 'use strict';
- var Stream = require('../utils/stream.js');
- var AdtsStream;
- var
- ADTS_SAMPLING_FREQUENCIES = [
- 96000,
- 88200,
- 64000,
- 48000,
- 44100,
- 32000,
- 24000,
- 22050,
- 16000,
- 12000,
- 11025,
- 8000,
- 7350
- ];
- /*
- * Accepts a ElementaryStream and emits data events with parsed
- * AAC Audio Frames of the individual packets. Input audio in ADTS
- * format is unpacked and re-emitted as AAC frames.
- *
- * @see http://wiki.multimedia.cx/index.php?title=ADTS
- * @see http://wiki.multimedia.cx/?title=Understanding_AAC
- */
- AdtsStream = function() {
- var buffer;
- AdtsStream.prototype.init.call(this);
- this.push = function(packet) {
- var
- i = 0,
- frameNum = 0,
- frameLength,
- protectionSkipBytes,
- frameEnd,
- oldBuffer,
- sampleCount,
- adtsFrameDuration;
- if (packet.type !== 'audio') {
- // ignore non-audio data
- return;
- }
- // Prepend any data in the buffer to the input data so that we can parse
- // aac frames the cross a PES packet boundary
- if (buffer) {
- oldBuffer = buffer;
- buffer = new Uint8Array(oldBuffer.byteLength + packet.data.byteLength);
- buffer.set(oldBuffer);
- buffer.set(packet.data, oldBuffer.byteLength);
- } else {
- buffer = packet.data;
- }
- // unpack any ADTS frames which have been fully received
- // for details on the ADTS header, see http://wiki.multimedia.cx/index.php?title=ADTS
- while (i + 5 < buffer.length) {
- // Loook for the start of an ADTS header..
- if (buffer[i] !== 0xFF || (buffer[i + 1] & 0xF6) !== 0xF0) {
- // If a valid header was not found, jump one forward and attempt to
- // find a valid ADTS header starting at the next byte
- i++;
- continue;
- }
- // The protection skip bit tells us if we have 2 bytes of CRC data at the
- // end of the ADTS header
- protectionSkipBytes = (~buffer[i + 1] & 0x01) * 2;
- // Frame length is a 13 bit integer starting 16 bits from the
- // end of the sync sequence
- frameLength = ((buffer[i + 3] & 0x03) << 11) |
- (buffer[i + 4] << 3) |
- ((buffer[i + 5] & 0xe0) >> 5);
- sampleCount = ((buffer[i + 6] & 0x03) + 1) * 1024;
- adtsFrameDuration = (sampleCount * 90000) /
- ADTS_SAMPLING_FREQUENCIES[(buffer[i + 2] & 0x3c) >>> 2];
- frameEnd = i + frameLength;
- // If we don't have enough data to actually finish this ADTS frame, return
- // and wait for more data
- if (buffer.byteLength < frameEnd) {
- return;
- }
- // Otherwise, deliver the complete AAC frame
- this.trigger('data', {
- pts: packet.pts + (frameNum * adtsFrameDuration),
- dts: packet.dts + (frameNum * adtsFrameDuration),
- sampleCount: sampleCount,
- audioobjecttype: ((buffer[i + 2] >>> 6) & 0x03) + 1,
- channelcount: ((buffer[i + 2] & 1) << 2) |
- ((buffer[i + 3] & 0xc0) >>> 6),
- samplerate: ADTS_SAMPLING_FREQUENCIES[(buffer[i + 2] & 0x3c) >>> 2],
- samplingfrequencyindex: (buffer[i + 2] & 0x3c) >>> 2,
- // assume ISO/IEC 14496-12 AudioSampleEntry default of 16
- samplesize: 16,
- data: buffer.subarray(i + 7 + protectionSkipBytes, frameEnd)
- });
- // If the buffer is empty, clear it and return
- if (buffer.byteLength === frameEnd) {
- buffer = undefined;
- return;
- }
- frameNum++;
- // Remove the finished frame from the buffer and start the process again
- buffer = buffer.subarray(frameEnd);
- }
- };
- this.flush = function() {
- this.trigger('done');
- };
- };
- AdtsStream.prototype = new Stream();
- module.exports = AdtsStream;
- },{"../utils/stream.js":33}],7:[function(require,module,exports){
- 'use strict';
- var Stream = require('../utils/stream.js');
- var ExpGolomb = require('../utils/exp-golomb.js');
- var H264Stream, NalByteStream;
- var PROFILES_WITH_OPTIONAL_SPS_DATA;
- /**
- * Accepts a NAL unit byte stream and unpacks the embedded NAL units.
- */
- NalByteStream = function() {
- var
- syncPoint = 0,
- i,
- buffer;
- NalByteStream.prototype.init.call(this);
- this.push = function(data) {
- var swapBuffer;
- if (!buffer) {
- buffer = data.data;
- } else {
- swapBuffer = new Uint8Array(buffer.byteLength + data.data.byteLength);
- swapBuffer.set(buffer);
- swapBuffer.set(data.data, buffer.byteLength);
- buffer = swapBuffer;
- }
- // Rec. ITU-T H.264, Annex B
- // scan for NAL unit boundaries
- // a match looks like this:
- // 0 0 1 .. NAL .. 0 0 1
- // ^ sync point ^ i
- // or this:
- // 0 0 1 .. NAL .. 0 0 0
- // ^ sync point ^ i
- // advance the sync point to a NAL start, if necessary
- for (; syncPoint < buffer.byteLength - 3; syncPoint++) {
- if (buffer[syncPoint + 2] === 1) {
- // the sync point is properly aligned
- i = syncPoint + 5;
- break;
- }
- }
- while (i < buffer.byteLength) {
- // look at the current byte to determine if we've hit the end of
- // a NAL unit boundary
- switch (buffer[i]) {
- case 0:
- // skip past non-sync sequences
- if (buffer[i - 1] !== 0) {
- i += 2;
- break;
- } else if (buffer[i - 2] !== 0) {
- i++;
- break;
- }
- // deliver the NAL unit if it isn't empty
- if (syncPoint + 3 !== i - 2) {
- this.trigger('data', buffer.subarray(syncPoint + 3, i - 2));
- }
- // drop trailing zeroes
- do {
- i++;
- } while (buffer[i] !== 1 && i < buffer.length);
- syncPoint = i - 2;
- i += 3;
- break;
- case 1:
- // skip past non-sync sequences
- if (buffer[i - 1] !== 0 ||
- buffer[i - 2] !== 0) {
- i += 3;
- break;
- }
- // deliver the NAL unit
- this.trigger('data', buffer.subarray(syncPoint + 3, i - 2));
- syncPoint = i - 2;
- i += 3;
- break;
- default:
- // the current byte isn't a one or zero, so it cannot be part
- // of a sync sequence
- i += 3;
- break;
- }
- }
- // filter out the NAL units that were delivered
- buffer = buffer.subarray(syncPoint);
- i -= syncPoint;
- syncPoint = 0;
- };
- this.flush = function() {
- // deliver the last buffered NAL unit
- if (buffer && buffer.byteLength > 3) {
- this.trigger('data', buffer.subarray(syncPoint + 3));
- }
- // reset the stream state
- buffer = null;
- syncPoint = 0;
- this.trigger('done');
- };
- };
- NalByteStream.prototype = new Stream();
- // values of profile_idc that indicate additional fields are included in the SPS
- // see Recommendation ITU-T H.264 (4/2013),
- // 7.3.2.1.1 Sequence parameter set data syntax
- PROFILES_WITH_OPTIONAL_SPS_DATA = {
- 100: true,
- 110: true,
- 122: true,
- 244: true,
- 44: true,
- 83: true,
- 86: true,
- 118: true,
- 128: true,
- 138: true,
- 139: true,
- 134: true
- };
- /**
- * Accepts input from a ElementaryStream and produces H.264 NAL unit data
- * events.
- */
- H264Stream = function() {
- var
- nalByteStream = new NalByteStream(),
- self,
- trackId,
- currentPts,
- currentDts,
- discardEmulationPreventionBytes,
- readSequenceParameterSet,
- skipScalingList;
- H264Stream.prototype.init.call(this);
- self = this;
- this.push = function(packet) {
- if (packet.type !== 'video') {
- return;
- }
- trackId = packet.trackId;
- currentPts = packet.pts;
- currentDts = packet.dts;
- nalByteStream.push(packet);
- };
- nalByteStream.on('data', function(data) {
- var
- event = {
- trackId: trackId,
- pts: currentPts,
- dts: currentDts,
- data: data
- };
- switch (data[0] & 0x1f) {
- case 0x05:
- event.nalUnitType = 'slice_layer_without_partitioning_rbsp_idr';
- break;
- case 0x06:
- event.nalUnitType = 'sei_rbsp';
- event.escapedRBSP = discardEmulationPreventionBytes(data.subarray(1));
- break;
- case 0x07:
- event.nalUnitType = 'seq_parameter_set_rbsp';
- event.escapedRBSP = discardEmulationPreventionBytes(data.subarray(1));
- event.config = readSequenceParameterSet(event.escapedRBSP);
- break;
- case 0x08:
- event.nalUnitType = 'pic_parameter_set_rbsp';
- break;
- case 0x09:
- event.nalUnitType = 'access_unit_delimiter_rbsp';
- break;
- default:
- break;
- }
- self.trigger('data', event);
- });
- nalByteStream.on('done', function() {
- self.trigger('done');
- });
- this.flush = function() {
- nalByteStream.flush();
- };
- /**
- * Advance the ExpGolomb decoder past a scaling list. The scaling
- * list is optionally transmitted as part of a sequence parameter
- * set and is not relevant to transmuxing.
- * @param count {number} the number of entries in this scaling list
- * @param expGolombDecoder {object} an ExpGolomb pointed to the
- * start of a scaling list
- * @see Recommendation ITU-T H.264, Section 7.3.2.1.1.1
- */
- skipScalingList = function(count, expGolombDecoder) {
- var
- lastScale = 8,
- nextScale = 8,
- j,
- deltaScale;
- for (j = 0; j < count; j++) {
- if (nextScale !== 0) {
- deltaScale = expGolombDecoder.readExpGolomb();
- nextScale = (lastScale + deltaScale + 256) % 256;
- }
- lastScale = (nextScale === 0) ? lastScale : nextScale;
- }
- };
- /**
- * Expunge any "Emulation Prevention" bytes from a "Raw Byte
- * Sequence Payload"
- * @param data {Uint8Array} the bytes of a RBSP from a NAL
- * unit
- * @return {Uint8Array} the RBSP without any Emulation
- * Prevention Bytes
- */
- discardEmulationPreventionBytes = function(data) {
- var
- length = data.byteLength,
- emulationPreventionBytesPositions = [],
- i = 1,
- newLength, newData;
- // Find all `Emulation Prevention Bytes`
- while (i < length - 2) {
- if (data[i] === 0 && data[i + 1] === 0 && data[i + 2] === 0x03) {
- emulationPreventionBytesPositions.push(i + 2);
- i += 2;
- } else {
- i++;
- }
- }
- // If no Emulation Prevention Bytes were found just return the original
- // array
- if (emulationPreventionBytesPositions.length === 0) {
- return data;
- }
- // Create a new array to hold the NAL unit data
- newLength = length - emulationPreventionBytesPositions.length;
- newData = new Uint8Array(newLength);
- var sourceIndex = 0;
- for (i = 0; i < newLength; sourceIndex++, i++) {
- if (sourceIndex === emulationPreventionBytesPositions[0]) {
- // Skip this byte
- sourceIndex++;
- // Remove this position index
- emulationPreventionBytesPositions.shift();
- }
- newData[i] = data[sourceIndex];
- }
- return newData;
- };
- /**
- * Read a sequence parameter set and return some interesting video
- * properties. A sequence parameter set is the H264 metadata that
- * describes the properties of upcoming video frames.
- * @param data {Uint8Array} the bytes of a sequence parameter set
- * @return {object} an object with configuration parsed from the
- * sequence parameter set, including the dimensions of the
- * associated video frames.
- */
- readSequenceParameterSet = function(data) {
- var
- frameCropLeftOffset = 0,
- frameCropRightOffset = 0,
- frameCropTopOffset = 0,
- frameCropBottomOffset = 0,
- sarScale = 1,
- expGolombDecoder, profileIdc, levelIdc, profileCompatibility,
- chromaFormatIdc, picOrderCntType,
- numRefFramesInPicOrderCntCycle, picWidthInMbsMinus1,
- picHeightInMapUnitsMinus1,
- frameMbsOnlyFlag,
- scalingListCount,
- sarRatio,
- aspectRatioIdc,
- i;
- expGolombDecoder = new ExpGolomb(data);
- profileIdc = expGolombDecoder.readUnsignedByte(); // profile_idc
- profileCompatibility = expGolombDecoder.readUnsignedByte(); // constraint_set[0-5]_flag
- levelIdc = expGolombDecoder.readUnsignedByte(); // level_idc u(8)
- expGolombDecoder.skipUnsignedExpGolomb(); // seq_parameter_set_id
- // some profiles have more optional data we don't need
- if (PROFILES_WITH_OPTIONAL_SPS_DATA[profileIdc]) {
- chromaFormatIdc = expGolombDecoder.readUnsignedExpGolomb();
- if (chromaFormatIdc === 3) {
- expGolombDecoder.skipBits(1); // separate_colour_plane_flag
- }
- expGolombDecoder.skipUnsignedExpGolomb(); // bit_depth_luma_minus8
- expGolombDecoder.skipUnsignedExpGolomb(); // bit_depth_chroma_minus8
- expGolombDecoder.skipBits(1); // qpprime_y_zero_transform_bypass_flag
- if (expGolombDecoder.readBoolean()) { // seq_scaling_matrix_present_flag
- scalingListCount = (chromaFormatIdc !== 3) ? 8 : 12;
- for (i = 0; i < scalingListCount; i++) {
- if (expGolombDecoder.readBoolean()) { // seq_scaling_list_present_flag[ i ]
- if (i < 6) {
- skipScalingList(16, expGolombDecoder);
- } else {
- skipScalingList(64, expGolombDecoder);
- }
- }
- }
- }
- }
- expGolombDecoder.skipUnsignedExpGolomb(); // log2_max_frame_num_minus4
- picOrderCntType = expGolombDecoder.readUnsignedExpGolomb();
- if (picOrderCntType === 0) {
- expGolombDecoder.readUnsignedExpGolomb(); // log2_max_pic_order_cnt_lsb_minus4
- } else if (picOrderCntType === 1) {
- expGolombDecoder.skipBits(1); // delta_pic_order_always_zero_flag
- expGolombDecoder.skipExpGolomb(); // offset_for_non_ref_pic
- expGolombDecoder.skipExpGolomb(); // offset_for_top_to_bottom_field
- numRefFramesInPicOrderCntCycle = expGolombDecoder.readUnsignedExpGolomb();
- for (i = 0; i < numRefFramesInPicOrderCntCycle; i++) {
- expGolombDecoder.skipExpGolomb(); // offset_for_ref_frame[ i ]
- }
- }
- expGolombDecoder.skipUnsignedExpGolomb(); // max_num_ref_frames
- expGolombDecoder.skipBits(1); // gaps_in_frame_num_value_allowed_flag
- picWidthInMbsMinus1 = expGolombDecoder.readUnsignedExpGolomb();
- picHeightInMapUnitsMinus1 = expGolombDecoder.readUnsignedExpGolomb();
- frameMbsOnlyFlag = expGolombDecoder.readBits(1);
- if (frameMbsOnlyFlag === 0) {
- expGolombDecoder.skipBits(1); // mb_adaptive_frame_field_flag
- }
- expGolombDecoder.skipBits(1); // direct_8x8_inference_flag
- if (expGolombDecoder.readBoolean()) { // frame_cropping_flag
- frameCropLeftOffset = expGolombDecoder.readUnsignedExpGolomb();
- frameCropRightOffset = expGolombDecoder.readUnsignedExpGolomb();
- frameCropTopOffset = expGolombDecoder.readUnsignedExpGolomb();
- frameCropBottomOffset = expGolombDecoder.readUnsignedExpGolomb();
- }
- if (expGolombDecoder.readBoolean()) {
- // vui_parameters_present_flag
- if (expGolombDecoder.readBoolean()) {
- // aspect_ratio_info_present_flag
- aspectRatioIdc = expGolombDecoder.readUnsignedByte();
- switch (aspectRatioIdc) {
- case 1: sarRatio = [1, 1]; break;
- case 2: sarRatio = [12, 11]; break;
- case 3: sarRatio = [10, 11]; break;
- case 4: sarRatio = [16, 11]; break;
- case 5: sarRatio = [40, 33]; break;
- case 6: sarRatio = [24, 11]; break;
- case 7: sarRatio = [20, 11]; break;
- case 8: sarRatio = [32, 11]; break;
- case 9: sarRatio = [80, 33]; break;
- case 10: sarRatio = [18, 11]; break;
- case 11: sarRatio = [15, 11]; break;
- case 12: sarRatio = [64, 33]; break;
- case 13: sarRatio = [160, 99]; break;
- case 14: sarRatio = [4, 3]; break;
- case 15: sarRatio = [3, 2]; break;
- case 16: sarRatio = [2, 1]; break;
- case 255: {
- sarRatio = [expGolombDecoder.readUnsignedByte() << 8 |
- expGolombDecoder.readUnsignedByte(),
- expGolombDecoder.readUnsignedByte() << 8 |
- expGolombDecoder.readUnsignedByte() ];
- break;
- }
- }
- if (sarRatio) {
- sarScale = sarRatio[0] / sarRatio[1];
- }
- }
- }
- return {
- profileIdc: profileIdc,
- levelIdc: levelIdc,
- profileCompatibility: profileCompatibility,
- width: Math.ceil((((picWidthInMbsMinus1 + 1) * 16) - frameCropLeftOffset * 2 - frameCropRightOffset * 2) * sarScale),
- height: ((2 - frameMbsOnlyFlag) * (picHeightInMapUnitsMinus1 + 1) * 16) - (frameCropTopOffset * 2) - (frameCropBottomOffset * 2)
- };
- };
- };
- H264Stream.prototype = new Stream();
- module.exports = {
- H264Stream: H264Stream,
- NalByteStream: NalByteStream
- };
- },{"../utils/exp-golomb.js":32,"../utils/stream.js":33}],8:[function(require,module,exports){
- module.exports = {
- adts: require('./adts'),
- h264: require('./h264')
- };
- },{"./adts":6,"./h264":7}],9:[function(require,module,exports){
- var highPrefix = [33, 16, 5, 32, 164, 27];
- var lowPrefix = [33, 65, 108, 84, 1, 2, 4, 8, 168, 2, 4, 8, 17, 191, 252];
- var zeroFill = function(count) {
- var a = [];
- while (count--) {
- a.push(0);
- }
- return a;
- };
- var makeTable = function(metaTable) {
- return Object.keys(metaTable).reduce(function(obj, key) {
- obj[key] = new Uint8Array(metaTable[key].reduce(function(arr, part) {
- return arr.concat(part);
- }, []));
- return obj;
- }, {});
- };
- // Frames-of-silence to use for filling in missing AAC frames
- var coneOfSilence = {
- 96000: [highPrefix, [227, 64], zeroFill(154), [56]],
- 88200: [highPrefix, [231], zeroFill(170), [56]],
- 64000: [highPrefix, [248, 192], zeroFill(240), [56]],
- 48000: [highPrefix, [255, 192], zeroFill(268), [55, 148, 128], zeroFill(54), [112]],
- 44100: [highPrefix, [255, 192], zeroFill(268), [55, 163, 128], zeroFill(84), [112]],
- 32000: [highPrefix, [255, 192], zeroFill(268), [55, 234], zeroFill(226), [112]],
- 24000: [highPrefix, [255, 192], zeroFill(268), [55, 255, 128], zeroFill(268), [111, 112], zeroFill(126), [224]],
- 16000: [highPrefix, [255, 192], zeroFill(268), [55, 255, 128], zeroFill(268), [111, 255], zeroFill(269), [223, 108], zeroFill(195), [1, 192]],
- 12000: [lowPrefix, zeroFill(268), [3, 127, 248], zeroFill(268), [6, 255, 240], zeroFill(268), [13, 255, 224], zeroFill(268), [27, 253, 128], zeroFill(259), [56]],
- 11025: [lowPrefix, zeroFill(268), [3, 127, 248], zeroFill(268), [6, 255, 240], zeroFill(268), [13, 255, 224], zeroFill(268), [27, 255, 192], zeroFill(268), [55, 175, 128], zeroFill(108), [112]],
- 8000: [lowPrefix, zeroFill(268), [3, 121, 16], zeroFill(47), [7]]
- };
- module.exports = makeTable(coneOfSilence);
- },{}],10:[function(require,module,exports){
- 'use strict';
- var Stream = require('../utils/stream.js');
- /**
- * The final stage of the transmuxer that emits the flv tags
- * for audio, video, and metadata. Also tranlates in time and
- * outputs caption data and id3 cues.
- */
- var CoalesceStream = function(options) {
- // Number of Tracks per output segment
- // If greater than 1, we combine multiple
- // tracks into a single segment
- this.numberOfTracks = 0;
- this.metadataStream = options.metadataStream;
- this.videoTags = [];
- this.audioTags = [];
- this.videoTrack = null;
- this.audioTrack = null;
- this.pendingCaptions = [];
- this.pendingMetadata = [];
- this.pendingTracks = 0;
- this.processedTracks = 0;
- CoalesceStream.prototype.init.call(this);
- // Take output from multiple
- this.push = function(output) {
- // buffer incoming captions until the associated video segment
- // finishes
- if (output.text) {
- return this.pendingCaptions.push(output);
- }
- // buffer incoming id3 tags until the final flush
- if (output.frames) {
- return this.pendingMetadata.push(output);
- }
- if (output.track.type === 'video') {
- this.videoTrack = output.track;
- this.videoTags = output.tags;
- this.pendingTracks++;
- }
- if (output.track.type === 'audio') {
- this.audioTrack = output.track;
- this.audioTags = output.tags;
- this.pendingTracks++;
- }
- };
- };
- CoalesceStream.prototype = new Stream();
- CoalesceStream.prototype.flush = function(flushSource) {
- var
- id3,
- caption,
- i,
- timelineStartPts,
- event = {
- tags: {},
- captions: [],
- captionStreams: {},
- metadata: []
- };
- if (this.pendingTracks < this.numberOfTracks) {
- if (flushSource !== 'VideoSegmentStream' &&
- flushSource !== 'AudioSegmentStream') {
- // Return because we haven't received a flush from a data-generating
- // portion of the segment (meaning that we have only recieved meta-data
- // or captions.)
- return;
- } else if (this.pendingTracks === 0) {
- // In the case where we receive a flush without any data having been
- // received we consider it an emitted track for the purposes of coalescing
- // `done` events.
- // We do this for the case where there is an audio and video track in the
- // segment but no audio data. (seen in several playlists with alternate
- // audio tracks and no audio present in the main TS segments.)
- this.processedTracks++;
- if (this.processedTracks < this.numberOfTracks) {
- return;
- }
- }
- }
- this.processedTracks += this.pendingTracks;
- this.pendingTracks = 0;
- if (this.processedTracks < this.numberOfTracks) {
- return;
- }
- if (this.videoTrack) {
- timelineStartPts = this.videoTrack.timelineStartInfo.pts;
- } else if (this.audioTrack) {
- timelineStartPts = this.audioTrack.timelineStartInfo.pts;
- }
- event.tags.videoTags = this.videoTags;
- event.tags.audioTags = this.audioTags;
- // Translate caption PTS times into second offsets into the
- // video timeline for the segment, and add track info
- for (i = 0; i < this.pendingCaptions.length; i++) {
- caption = this.pendingCaptions[i];
- caption.startTime = caption.startPts - timelineStartPts;
- caption.startTime /= 90e3;
- caption.endTime = caption.endPts - timelineStartPts;
- caption.endTime /= 90e3;
- event.captionStreams[caption.stream] = true;
- event.captions.push(caption);
- }
- // Translate ID3 frame PTS times into second offsets into the
- // video timeline for the segment
- for (i = 0; i < this.pendingMetadata.length; i++) {
- id3 = this.pendingMetadata[i];
- id3.cueTime = id3.pts - timelineStartPts;
- id3.cueTime /= 90e3;
- event.metadata.push(id3);
- }
- // We add this to every single emitted segment even though we only need
- // it for the first
- event.metadata.dispatchType = this.metadataStream.dispatchType;
- // Reset stream state
- this.videoTrack = null;
- this.audioTrack = null;
- this.videoTags = [];
- this.audioTags = [];
- this.pendingCaptions.length = 0;
- this.pendingMetadata.length = 0;
- this.pendingTracks = 0;
- this.processedTracks = 0;
- // Emit the final segment
- this.trigger('data', event);
- this.trigger('done');
- };
- module.exports = CoalesceStream;
- },{"../utils/stream.js":33}],11:[function(require,module,exports){
- 'use strict';
- var FlvTag = require('./flv-tag.js');
- // For information on the FLV format, see
- // http://download.macromedia.com/f4v/video_file_format_spec_v10_1.pdf.
- // Technically, this function returns the header and a metadata FLV tag
- // if duration is greater than zero
- // duration in seconds
- // @return {object} the bytes of the FLV header as a Uint8Array
- var getFlvHeader = function(duration, audio, video) { // :ByteArray {
- var
- headBytes = new Uint8Array(3 + 1 + 1 + 4),
- head = new DataView(headBytes.buffer),
- metadata,
- result,
- metadataLength;
- // default arguments
- duration = duration || 0;
- audio = audio === undefined ? true : audio;
- video = video === undefined ? true : video;
- // signature
- head.setUint8(0, 0x46); // 'F'
- head.setUint8(1, 0x4c); // 'L'
- head.setUint8(2, 0x56); // 'V'
- // version
- head.setUint8(3, 0x01);
- // flags
- head.setUint8(4, (audio ? 0x04 : 0x00) | (video ? 0x01 : 0x00));
- // data offset, should be 9 for FLV v1
- head.setUint32(5, headBytes.byteLength);
- // init the first FLV tag
- if (duration <= 0) {
- // no duration available so just write the first field of the first
- // FLV tag
- result = new Uint8Array(headBytes.byteLength + 4);
- result.set(headBytes);
- result.set([0, 0, 0, 0], headBytes.byteLength);
- return result;
- }
- // write out the duration metadata tag
- metadata = new FlvTag(FlvTag.METADATA_TAG);
- metadata.pts = metadata.dts = 0;
- metadata.writeMetaDataDouble('duration', duration);
- metadataLength = metadata.finalize().length;
- result = new Uint8Array(headBytes.byteLength + metadataLength);
- result.set(headBytes);
- result.set(head.byteLength, metadataLength);
- return result;
- };
- module.exports = getFlvHeader;
- },{"./flv-tag.js":12}],12:[function(require,module,exports){
- /**
- * An object that stores the bytes of an FLV tag and methods for
- * querying and manipulating that data.
- * @see http://download.macromedia.com/f4v/video_file_format_spec_v10_1.pdf
- */
- 'use strict';
- var FlvTag;
- // (type:uint, extraData:Boolean = false) extends ByteArray
- FlvTag = function(type, extraData) {
- var
- // Counter if this is a metadata tag, nal start marker if this is a video
- // tag. unused if this is an audio tag
- adHoc = 0, // :uint
- // The default size is 16kb but this is not enough to hold iframe
- // data and the resizing algorithm costs a bit so we create a larger
- // starting buffer for video tags
- bufferStartSize = 16384,
- // checks whether the FLV tag has enough capacity to accept the proposed
- // write and re-allocates the internal buffers if necessary
- prepareWrite = function(flv, count) {
- var
- bytes,
- minLength = flv.position + count;
- if (minLength < flv.bytes.byteLength) {
- // there's enough capacity so do nothing
- return;
- }
- // allocate a new buffer and copy over the data that will not be modified
- bytes = new Uint8Array(minLength * 2);
- bytes.set(flv.bytes.subarray(0, flv.position), 0);
- flv.bytes = bytes;
- flv.view = new DataView(flv.bytes.buffer);
- },
- // commonly used metadata properties
- widthBytes = FlvTag.widthBytes || new Uint8Array('width'.length),
- heightBytes = FlvTag.heightBytes || new Uint8Array('height'.length),
- videocodecidBytes = FlvTag.videocodecidBytes || new Uint8Array('videocodecid'.length),
- i;
- if (!FlvTag.widthBytes) {
- // calculating the bytes of common metadata names ahead of time makes the
- // corresponding writes faster because we don't have to loop over the
- // characters
- // re-test with test/perf.html if you're planning on changing this
- for (i = 0; i < 'width'.length; i++) {
- widthBytes[i] = 'width'.charCodeAt(i);
- }
- for (i = 0; i < 'height'.length; i++) {
- heightBytes[i] = 'height'.charCodeAt(i);
- }
- for (i = 0; i < 'videocodecid'.length; i++) {
- videocodecidBytes[i] = 'videocodecid'.charCodeAt(i);
- }
- FlvTag.widthBytes = widthBytes;
- FlvTag.heightBytes = heightBytes;
- FlvTag.videocodecidBytes = videocodecidBytes;
- }
- this.keyFrame = false; // :Boolean
- switch (type) {
- case FlvTag.VIDEO_TAG:
- this.length = 16;
- // Start the buffer at 256k
- bufferStartSize *= 6;
- break;
- case FlvTag.AUDIO_TAG:
- this.length = 13;
- this.keyFrame = true;
- break;
- case FlvTag.METADATA_TAG:
- this.length = 29;
- this.keyFrame = true;
- break;
- default:
- throw new Error('Unknown FLV tag type');
- }
- this.bytes = new Uint8Array(bufferStartSize);
- this.view = new DataView(this.bytes.buffer);
- this.bytes[0] = type;
- this.position = this.length;
- this.keyFrame = extraData; // Defaults to false
- // presentation timestamp
- this.pts = 0;
- // decoder timestamp
- this.dts = 0;
- // ByteArray#writeBytes(bytes:ByteArray, offset:uint = 0, length:uint = 0)
- this.writeBytes = function(bytes, offset, length) {
- var
- start = offset || 0,
- end;
- length = length || bytes.byteLength;
- end = start + length;
- prepareWrite(this, length);
- this.bytes.set(bytes.subarray(start, end), this.position);
- this.position += length;
- this.length = Math.max(this.length, this.position);
- };
- // ByteArray#writeByte(value:int):void
- this.writeByte = function(byte) {
- prepareWrite(this, 1);
- this.bytes[this.position] = byte;
- this.position++;
- this.length = Math.max(this.length, this.position);
- };
- // ByteArray#writeShort(value:int):void
- this.writeShort = function(short) {
- prepareWrite(this, 2);
- this.view.setUint16(this.position, short);
- this.position += 2;
- this.length = Math.max(this.length, this.position);
- };
- // Negative index into array
- // (pos:uint):int
- this.negIndex = function(pos) {
- return this.bytes[this.length - pos];
- };
- // The functions below ONLY work when this[0] == VIDEO_TAG.
- // We are not going to check for that because we dont want the overhead
- // (nal:ByteArray = null):int
- this.nalUnitSize = function() {
- if (adHoc === 0) {
- return 0;
- }
- return this.length - (adHoc + 4);
- };
- this.startNalUnit = function() {
- // remember position and add 4 bytes
- if (adHoc > 0) {
- throw new Error('Attempted to create new NAL wihout closing the old one');
- }
- // reserve 4 bytes for nal unit size
- adHoc = this.length;
- this.length += 4;
- this.position = this.length;
- };
- // (nal:ByteArray = null):void
- this.endNalUnit = function(nalContainer) {
- var
- nalStart, // :uint
- nalLength; // :uint
- // Rewind to the marker and write the size
- if (this.length === adHoc + 4) {
- // we started a nal unit, but didnt write one, so roll back the 4 byte size value
- this.length -= 4;
- } else if (adHoc > 0) {
- nalStart = adHoc + 4;
- nalLength = this.length - nalStart;
- this.position = adHoc;
- this.view.setUint32(this.position, nalLength);
- this.position = this.length;
- if (nalContainer) {
- // Add the tag to the NAL unit
- nalContainer.push(this.bytes.subarray(nalStart, nalStart + nalLength));
- }
- }
- adHoc = 0;
- };
- /**
- * Write out a 64-bit floating point valued metadata property. This method is
- * called frequently during a typical parse and needs to be fast.
- */
- // (key:String, val:Number):void
- this.writeMetaDataDouble = function(key, val) {
- var i;
- prepareWrite(this, 2 + key.length + 9);
- // write size of property name
- this.view.setUint16(this.position, key.length);
- this.position += 2;
- // this next part looks terrible but it improves parser throughput by
- // 10kB/s in my testing
- // write property name
- if (key === 'width') {
- this.bytes.set(widthBytes, this.position);
- this.position += 5;
- } else if (key === 'height') {
- this.bytes.set(heightBytes, this.position);
- this.position += 6;
- } else if (key === 'videocodecid') {
- this.bytes.set(videocodecidBytes, this.position);
- this.position += 12;
- } else {
- for (i = 0; i < key.length; i++) {
- this.bytes[this.position] = key.charCodeAt(i);
- this.position++;
- }
- }
- // skip null byte
- this.position++;
- // write property value
- this.view.setFloat64(this.position, val);
- this.position += 8;
- // update flv tag length
- this.length = Math.max(this.length, this.position);
- ++adHoc;
- };
- // (key:String, val:Boolean):void
- this.writeMetaDataBoolean = function(key, val) {
- var i;
- prepareWrite(this, 2);
- this.view.setUint16(this.position, key.length);
- this.position += 2;
- for (i = 0; i < key.length; i++) {
- // if key.charCodeAt(i) >= 255, handle error
- prepareWrite(this, 1);
- this.bytes[this.position] = key.charCodeAt(i);
- this.position++;
- }
- prepareWrite(this, 2);
- this.view.setUint8(this.position, 0x01);
- this.position++;
- this.view.setUint8(this.position, val ? 0x01 : 0x00);
- this.position++;
- this.length = Math.max(this.length, this.position);
- ++adHoc;
- };
- // ():ByteArray
- this.finalize = function() {
- var
- dtsDelta, // :int
- len; // :int
- switch (this.bytes[0]) {
- // Video Data
- case FlvTag.VIDEO_TAG:
- // We only support AVC, 1 = key frame (for AVC, a seekable
- // frame), 2 = inter frame (for AVC, a non-seekable frame)
- this.bytes[11] = ((this.keyFrame || extraData) ? 0x10 : 0x20) | 0x07;
- this.bytes[12] = extraData ? 0x00 : 0x01;
- dtsDelta = this.pts - this.dts;
- this.bytes[13] = (dtsDelta & 0x00FF0000) >>> 16;
- this.bytes[14] = (dtsDelta & 0x0000FF00) >>> 8;
- this.bytes[15] = (dtsDelta & 0x000000FF) >>> 0;
- break;
- case FlvTag.AUDIO_TAG:
- this.bytes[11] = 0xAF; // 44 kHz, 16-bit stereo
- this.bytes[12] = extraData ? 0x00 : 0x01;
- break;
- case FlvTag.METADATA_TAG:
- this.position = 11;
- this.view.setUint8(this.position, 0x02); // String type
- this.position++;
- this.view.setUint16(this.position, 0x0A); // 10 Bytes
- this.position += 2;
- // set "onMetaData"
- this.bytes.set([0x6f, 0x6e, 0x4d, 0x65,
- 0x74, 0x61, 0x44, 0x61,
- 0x74, 0x61], this.position);
- this.position += 10;
- this.bytes[this.position] = 0x08; // Array type
- this.position++;
- this.view.setUint32(this.position, adHoc);
- this.position = this.length;
- this.bytes.set([0, 0, 9], this.position);
- this.position += 3; // End Data Tag
- this.length = this.position;
- break;
- }
- len = this.length - 11;
- // write the DataSize field
- this.bytes[ 1] = (len & 0x00FF0000) >>> 16;
- this.bytes[ 2] = (len & 0x0000FF00) >>> 8;
- this.bytes[ 3] = (len & 0x000000FF) >>> 0;
- // write the Timestamp
- this.bytes[ 4] = (this.dts & 0x00FF0000) >>> 16;
- this.bytes[ 5] = (this.dts & 0x0000FF00) >>> 8;
- this.bytes[ 6] = (this.dts & 0x000000FF) >>> 0;
- this.bytes[ 7] = (this.dts & 0xFF000000) >>> 24;
- // write the StreamID
- this.bytes[ 8] = 0;
- this.bytes[ 9] = 0;
- this.bytes[10] = 0;
- // Sometimes we're at the end of the view and have one slot to write a
- // uint32, so, prepareWrite of count 4, since, view is uint8
- prepareWrite(this, 4);
- this.view.setUint32(this.length, this.length);
- this.length += 4;
- this.position += 4;
- // trim down the byte buffer to what is actually being used
- this.bytes = this.bytes.subarray(0, this.length);
- this.frameTime = FlvTag.frameTime(this.bytes);
- // if bytes.bytelength isn't equal to this.length, handle error
- return this;
- };
- };
- FlvTag.AUDIO_TAG = 0x08; // == 8, :uint
- FlvTag.VIDEO_TAG = 0x09; // == 9, :uint
- FlvTag.METADATA_TAG = 0x12; // == 18, :uint
- // (tag:ByteArray):Boolean {
- FlvTag.isAudioFrame = function(tag) {
- return FlvTag.AUDIO_TAG === tag[0];
- };
- // (tag:ByteArray):Boolean {
- FlvTag.isVideoFrame = function(tag) {
- return FlvTag.VIDEO_TAG === tag[0];
- };
- // (tag:ByteArray):Boolean {
- FlvTag.isMetaData = function(tag) {
- return FlvTag.METADATA_TAG === tag[0];
- };
- // (tag:ByteArray):Boolean {
- FlvTag.isKeyFrame = function(tag) {
- if (FlvTag.isVideoFrame(tag)) {
- return tag[11] === 0x17;
- }
- if (FlvTag.isAudioFrame(tag)) {
- return true;
- }
- if (FlvTag.isMetaData(tag)) {
- return true;
- }
- return false;
- };
- // (tag:ByteArray):uint {
- FlvTag.frameTime = function(tag) {
- var pts = tag[ 4] << 16; // :uint
- pts |= tag[ 5] << 8;
- pts |= tag[ 6] << 0;
- pts |= tag[ 7] << 24;
- return pts;
- };
- module.exports = FlvTag;
- },{}],13:[function(require,module,exports){
- module.exports = {
- tag: require('./flv-tag'),
- Transmuxer: require('./transmuxer'),
- getFlvHeader: require('./flv-header')
- };
- },{"./flv-header":11,"./flv-tag":12,"./transmuxer":15}],14:[function(require,module,exports){
- 'use strict';
- var TagList = function() {
- var self = this;
- this.list = [];
- this.push = function(tag) {
- this.list.push({
- bytes: tag.bytes,
- dts: tag.dts,
- pts: tag.pts,
- keyFrame: tag.keyFrame,
- metaDataTag: tag.metaDataTag
- });
- };
- Object.defineProperty(this, 'length', {
- get: function() {
- return self.list.length;
- }
- });
- };
- module.exports = TagList;
- },{}],15:[function(require,module,exports){
- 'use strict';
- var Stream = require('../utils/stream.js');
- var FlvTag = require('./flv-tag.js');
- var m2ts = require('../m2ts/m2ts.js');
- var AdtsStream = require('../codecs/adts.js');
- var H264Stream = require('../codecs/h264').H264Stream;
- var CoalesceStream = require('./coalesce-stream.js');
- var TagList = require('./tag-list.js');
- var
- Transmuxer,
- VideoSegmentStream,
- AudioSegmentStream,
- collectTimelineInfo,
- metaDataTag,
- extraDataTag;
- /**
- * Store information about the start and end of the tracka and the
- * duration for each frame/sample we process in order to calculate
- * the baseMediaDecodeTime
- */
- collectTimelineInfo = function(track, data) {
- if (typeof data.pts === 'number') {
- if (track.timelineStartInfo.pts === undefined) {
- track.timelineStartInfo.pts = data.pts;
- } else {
- track.timelineStartInfo.pts =
- Math.min(track.timelineStartInfo.pts, data.pts);
- }
- }
- if (typeof data.dts === 'number') {
- if (track.timelineStartInfo.dts === undefined) {
- track.timelineStartInfo.dts = data.dts;
- } else {
- track.timelineStartInfo.dts =
- Math.min(track.timelineStartInfo.dts, data.dts);
- }
- }
- };
- metaDataTag = function(track, pts) {
- var
- tag = new FlvTag(FlvTag.METADATA_TAG); // :FlvTag
- tag.dts = pts;
- tag.pts = pts;
- tag.writeMetaDataDouble('videocodecid', 7);
- tag.writeMetaDataDouble('width', track.width);
- tag.writeMetaDataDouble('height', track.height);
- return tag;
- };
- extraDataTag = function(track, pts) {
- var
- i,
- tag = new FlvTag(FlvTag.VIDEO_TAG, true);
- tag.dts = pts;
- tag.pts = pts;
- tag.writeByte(0x01);// version
- tag.writeByte(track.profileIdc);// profile
- tag.writeByte(track.profileCompatibility);// compatibility
- tag.writeByte(track.levelIdc);// level
- tag.writeByte(0xFC | 0x03); // reserved (6 bits), NULA length size - 1 (2 bits)
- tag.writeByte(0xE0 | 0x01); // reserved (3 bits), num of SPS (5 bits)
- tag.writeShort(track.sps[0].length); // data of SPS
- tag.writeBytes(track.sps[0]); // SPS
- tag.writeByte(track.pps.length); // num of PPS (will there ever be more that 1 PPS?)
- for (i = 0; i < track.pps.length; ++i) {
- tag.writeShort(track.pps[i].length); // 2 bytes for length of PPS
- tag.writeBytes(track.pps[i]); // data of PPS
- }
- return tag;
- };
- /**
- * Constructs a single-track, media segment from AAC data
- * events. The output of this stream can be fed to flash.
- */
- AudioSegmentStream = function(track) {
- var
- adtsFrames = [],
- videoKeyFrames = [],
- oldExtraData;
- AudioSegmentStream.prototype.init.call(this);
- this.push = function(data) {
- collectTimelineInfo(track, data);
- if (track) {
- track.audioobjecttype = data.audioobjecttype;
- track.channelcount = data.channelcount;
- track.samplerate = data.samplerate;
- track.samplingfrequencyindex = data.samplingfrequencyindex;
- track.samplesize = data.samplesize;
- track.extraData = (track.audioobjecttype << 11) |
- (track.samplingfrequencyindex << 7) |
- (track.channelcount << 3);
- }
- data.pts = Math.round(data.pts / 90);
- data.dts = Math.round(data.dts / 90);
- // buffer audio data until end() is called
- adtsFrames.push(data);
- };
- this.flush = function() {
- var currentFrame, adtsFrame, lastMetaPts, tags = new TagList();
- // return early if no audio data has been observed
- if (adtsFrames.length === 0) {
- this.trigger('done', 'AudioSegmentStream');
- return;
- }
- lastMetaPts = -Infinity;
- while (adtsFrames.length) {
- currentFrame = adtsFrames.shift();
- // write out a metadata frame at every video key frame
- if (videoKeyFrames.length && currentFrame.pts >= videoKeyFrames[0]) {
- lastMetaPts = videoKeyFrames.shift();
- this.writeMetaDataTags(tags, lastMetaPts);
- }
- // also write out metadata tags every 1 second so that the decoder
- // is re-initialized quickly after seeking into a different
- // audio configuration.
- if (track.extraData !== oldExtraData || currentFrame.pts - lastMetaPts >= 1000) {
- this.writeMetaDataTags(tags, currentFrame.pts);
- oldExtraData = track.extraData;
- lastMetaPts = currentFrame.pts;
- }
- adtsFrame = new FlvTag(FlvTag.AUDIO_TAG);
- adtsFrame.pts = currentFrame.pts;
- adtsFrame.dts = currentFrame.dts;
- adtsFrame.writeBytes(currentFrame.data);
- tags.push(adtsFrame.finalize());
- }
- videoKeyFrames.length = 0;
- oldExtraData = null;
- this.trigger('data', {track: track, tags: tags.list});
- this.trigger('done', 'AudioSegmentStream');
- };
- this.writeMetaDataTags = function(tags, pts) {
- var adtsFrame;
- adtsFrame = new FlvTag(FlvTag.METADATA_TAG);
- // For audio, DTS is always the same as PTS. We want to set the DTS
- // however so we can compare with video DTS to determine approximate
- // packet order
- adtsFrame.pts = pts;
- adtsFrame.dts = pts;
- // AAC is always 10
- adtsFrame.writeMetaDataDouble('audiocodecid', 10);
- adtsFrame.writeMetaDataBoolean('stereo', track.channelcount === 2);
- adtsFrame.writeMetaDataDouble('audiosamplerate', track.samplerate);
- // Is AAC always 16 bit?
- adtsFrame.writeMetaDataDouble('audiosamplesize', 16);
- tags.push(adtsFrame.finalize());
- adtsFrame = new FlvTag(FlvTag.AUDIO_TAG, true);
- // For audio, DTS is always the same as PTS. We want to set the DTS
- // however so we can compare with video DTS to determine approximate
- // packet order
- adtsFrame.pts = pts;
- adtsFrame.dts = pts;
- adtsFrame.view.setUint16(adtsFrame.position, track.extraData);
- adtsFrame.position += 2;
- adtsFrame.length = Math.max(adtsFrame.length, adtsFrame.position);
- tags.push(adtsFrame.finalize());
- };
- this.onVideoKeyFrame = function(pts) {
- videoKeyFrames.push(pts);
- };
- };
- AudioSegmentStream.prototype = new Stream();
- /**
- * Store FlvTags for the h264 stream
- * @param track {object} track metadata configuration
- */
- VideoSegmentStream = function(track) {
- var
- nalUnits = [],
- config,
- h264Frame;
- VideoSegmentStream.prototype.init.call(this);
- this.finishFrame = function(tags, frame) {
- if (!frame) {
- return;
- }
- // Check if keyframe and the length of tags.
- // This makes sure we write metadata on the first frame of a segment.
- if (config && track && track.newMetadata &&
- (frame.keyFrame || tags.length === 0)) {
- // Push extra data on every IDR frame in case we did a stream change + seek
- var metaTag = metaDataTag(config, frame.dts).finalize();
- var extraTag = extraDataTag(track, frame.dts).finalize();
- metaTag.metaDataTag = extraTag.metaDataTag = true;
- tags.push(metaTag);
- tags.push(extraTag);
- track.newMetadata = false;
- this.trigger('keyframe', frame.dts);
- }
- frame.endNalUnit();
- tags.push(frame.finalize());
- h264Frame = null;
- };
- this.push = function(data) {
- collectTimelineInfo(track, data);
- data.pts = Math.round(data.pts / 90);
- data.dts = Math.round(data.dts / 90);
- // buffer video until flush() is called
- nalUnits.push(data);
- };
- this.flush = function() {
- var
- currentNal,
- tags = new TagList();
- // Throw away nalUnits at the start of the byte stream until we find
- // the first AUD
- while (nalUnits.length) {
- if (nalUnits[0].nalUnitType === 'access_unit_delimiter_rbsp') {
- break;
- }
- nalUnits.shift();
- }
- // return early if no video data has been observed
- if (nalUnits.length === 0) {
- this.trigger('done', 'VideoSegmentStream');
- return;
- }
- while (nalUnits.length) {
- currentNal = nalUnits.shift();
- // record the track config
- if (currentNal.nalUnitType === 'seq_parameter_set_rbsp') {
- track.newMetadata = true;
- config = currentNal.config;
- track.width = config.width;
- track.height = config.height;
- track.sps = [currentNal.data];
- track.profileIdc = config.profileIdc;
- track.levelIdc = config.levelIdc;
- track.profileCompatibility = config.profileCompatibility;
- h264Frame.endNalUnit();
- } else if (currentNal.nalUnitType === 'pic_parameter_set_rbsp') {
- track.newMetadata = true;
- track.pps = [currentNal.data];
- h264Frame.endNalUnit();
- } else if (currentNal.nalUnitType === 'access_unit_delimiter_rbsp') {
- if (h264Frame) {
- this.finishFrame(tags, h264Frame);
- }
- h264Frame = new FlvTag(FlvTag.VIDEO_TAG);
- h264Frame.pts = currentNal.pts;
- h264Frame.dts = currentNal.dts;
- } else {
- if (currentNal.nalUnitType === 'slice_layer_without_partitioning_rbsp_idr') {
- // the current sample is a key frame
- h264Frame.keyFrame = true;
- }
- h264Frame.endNalUnit();
- }
- h264Frame.startNalUnit();
- h264Frame.writeBytes(currentNal.data);
- }
- if (h264Frame) {
- this.finishFrame(tags, h264Frame);
- }
- this.trigger('data', {track: track, tags: tags.list});
- // Continue with the flush process now
- this.trigger('done', 'VideoSegmentStream');
- };
- };
- VideoSegmentStream.prototype = new Stream();
- /**
- * An object that incrementally transmuxes MPEG2 Trasport Stream
- * chunks into an FLV.
- */
- Transmuxer = function(options) {
- var
- self = this,
- packetStream, parseStream, elementaryStream,
- videoTimestampRolloverStream, audioTimestampRolloverStream,
- timedMetadataTimestampRolloverStream,
- adtsStream, h264Stream,
- videoSegmentStream, audioSegmentStream, captionStream,
- coalesceStream;
- Transmuxer.prototype.init.call(this);
- options = options || {};
- // expose the metadata stream
- this.metadataStream = new m2ts.MetadataStream();
- options.metadataStream = this.metadataStream;
- // set up the parsing pipeline
- packetStream = new m2ts.TransportPacketStream();
- parseStream = new m2ts.TransportParseStream();
- elementaryStream = new m2ts.ElementaryStream();
- videoTimestampRolloverStream = new m2ts.TimestampRolloverStream('video');
- audioTimestampRolloverStream = new m2ts.TimestampRolloverStream('audio');
- timedMetadataTimestampRolloverStream = new m2ts.TimestampRolloverStream('timed-metadata');
- adtsStream = new AdtsStream();
- h264Stream = new H264Stream();
- coalesceStream = new CoalesceStream(options);
- // disassemble MPEG2-TS packets into elementary streams
- packetStream
- .pipe(parseStream)
- .pipe(elementaryStream);
- // !!THIS ORDER IS IMPORTANT!!
- // demux the streams
- elementaryStream
- .pipe(videoTimestampRolloverStream)
- .pipe(h264Stream);
- elementaryStream
- .pipe(audioTimestampRolloverStream)
- .pipe(adtsStream);
- elementaryStream
- .pipe(timedMetadataTimestampRolloverStream)
- .pipe(this.metadataStream)
- .pipe(coalesceStream);
- // if CEA-708 parsing is available, hook up a caption stream
- captionStream = new m2ts.CaptionStream();
- h264Stream.pipe(captionStream)
- .pipe(coalesceStream);
- // hook up the segment streams once track metadata is delivered
- elementaryStream.on('data', function(data) {
- var i, videoTrack, audioTrack;
- if (data.type === 'metadata') {
- i = data.tracks.length;
- // scan the tracks listed in the metadata
- while (i--) {
- if (data.tracks[i].type === 'video') {
- videoTrack = data.tracks[i];
- } else if (data.tracks[i].type === 'audio') {
- audioTrack = data.tracks[i];
- }
- }
- // hook up the video segment stream to the first track with h264 data
- if (videoTrack && !videoSegmentStream) {
- coalesceStream.numberOfTracks++;
- videoSegmentStream = new VideoSegmentStream(videoTrack);
- // Set up the final part of the video pipeline
- h264Stream
- .pipe(videoSegmentStream)
- .pipe(coalesceStream);
- }
- if (audioTrack && !audioSegmentStream) {
- // hook up the audio segment stream to the first track with aac data
- coalesceStream.numberOfTracks++;
- audioSegmentStream = new AudioSegmentStream(audioTrack);
- // Set up the final part of the audio pipeline
- adtsStream
- .pipe(audioSegmentStream)
- .pipe(coalesceStream);
- if (videoSegmentStream) {
- videoSegmentStream.on('keyframe', audioSegmentStream.onVideoKeyFrame);
- }
- }
- }
- });
- // feed incoming data to the front of the parsing pipeline
- this.push = function(data) {
- packetStream.push(data);
- };
- // flush any buffered data
- this.flush = function() {
- // Start at the top of the pipeline and flush all pending work
- packetStream.flush();
- };
- // Caption data has to be reset when seeking outside buffered range
- this.resetCaptions = function() {
- captionStream.reset();
- };
- // Re-emit any data coming from the coalesce stream to the outside world
- coalesceStream.on('data', function(event) {
- self.trigger('data', event);
- });
- // Let the consumer know we have finished flushing the entire pipeline
- coalesceStream.on('done', function() {
- self.trigger('done');
- });
- };
- Transmuxer.prototype = new Stream();
- // forward compatibility
- module.exports = Transmuxer;
- },{"../codecs/adts.js":6,"../codecs/h264":7,"../m2ts/m2ts.js":19,"../utils/stream.js":33,"./coalesce-stream.js":10,"./flv-tag.js":12,"./tag-list.js":14}],16:[function(require,module,exports){
- 'use strict';
- var muxjs = {
- codecs: require('./codecs'),
- mp4: require('./mp4'),
- flv: require('./flv'),
- mp2t: require('./m2ts')
- };
- // include all the tools when the full library is required
- muxjs.mp4.tools = require('./tools/mp4-inspector');
- muxjs.flv.tools = require('./tools/flv-inspector');
- muxjs.mp2t.tools = require('./tools/ts-inspector');
- module.exports = muxjs;
- },{"./codecs":8,"./flv":13,"./m2ts":18,"./mp4":24,"./tools/flv-inspector":28,"./tools/mp4-inspector":29,"./tools/ts-inspector":30}],17:[function(require,module,exports){
- /**
- * mux.js
- *
- * Copyright (c) 2015 Brightcove
- * All rights reserved.
- *
- * Reads in-band caption information from a video elementary
- * stream. Captions must follow the CEA-708 standard for injection
- * into an MPEG-2 transport streams.
- * @see https://en.wikipedia.org/wiki/CEA-708
- * @see https://www.gpo.gov/fdsys/pkg/CFR-2007-title47-vol1/pdf/CFR-2007-title47-vol1-sec15-119.pdf
- */
- 'use strict';
- // -----------------
- // Link To Transport
- // -----------------
- // Supplemental enhancement information (SEI) NAL units have a
- // payload type field to indicate how they are to be
- // interpreted. CEAS-708 caption content is always transmitted with
- // payload type 0x04.
- var USER_DATA_REGISTERED_ITU_T_T35 = 4,
- RBSP_TRAILING_BITS = 128,
- Stream = require('../utils/stream');
- /**
- * Parse a supplemental enhancement information (SEI) NAL unit.
- * Stops parsing once a message of type ITU T T35 has been found.
- *
- * @param bytes {Uint8Array} the bytes of a SEI NAL unit
- * @return {object} the parsed SEI payload
- * @see Rec. ITU-T H.264, 7.3.2.3.1
- */
- var parseSei = function(bytes) {
- var
- i = 0,
- result = {
- payloadType: -1,
- payloadSize: 0
- },
- payloadType = 0,
- payloadSize = 0;
- // go through the sei_rbsp parsing each each individual sei_message
- while (i < bytes.byteLength) {
- // stop once we have hit the end of the sei_rbsp
- if (bytes[i] === RBSP_TRAILING_BITS) {
- break;
- }
- // Parse payload type
- while (bytes[i] === 0xFF) {
- payloadType += 255;
- i++;
- }
- payloadType += bytes[i++];
- // Parse payload size
- while (bytes[i] === 0xFF) {
- payloadSize += 255;
- i++;
- }
- payloadSize += bytes[i++];
- // this sei_message is a 608/708 caption so save it and break
- // there can only ever be one caption message in a frame's sei
- if (!result.payload && payloadType === USER_DATA_REGISTERED_ITU_T_T35) {
- result.payloadType = payloadType;
- result.payloadSize = payloadSize;
- result.payload = bytes.subarray(i, i + payloadSize);
- break;
- }
- // skip the payload and parse the next message
- i += payloadSize;
- payloadType = 0;
- payloadSize = 0;
- }
- return result;
- };
- // see ANSI/SCTE 128-1 (2013), section 8.1
- var parseUserData = function(sei) {
- // itu_t_t35_contry_code must be 181 (United States) for
- // captions
- if (sei.payload[0] !== 181) {
- return null;
- }
- // itu_t_t35_provider_code should be 49 (ATSC) for captions
- if (((sei.payload[1] << 8) | sei.payload[2]) !== 49) {
- return null;
- }
- // the user_identifier should be "GA94" to indicate ATSC1 data
- if (String.fromCharCode(sei.payload[3],
- sei.payload[4],
- sei.payload[5],
- sei.payload[6]) !== 'GA94') {
- return null;
- }
- // finally, user_data_type_code should be 0x03 for caption data
- if (sei.payload[7] !== 0x03) {
- return null;
- }
- // return the user_data_type_structure and strip the trailing
- // marker bits
- return sei.payload.subarray(8, sei.payload.length - 1);
- };
- // see CEA-708-D, section 4.4
- var parseCaptionPackets = function(pts, userData) {
- var results = [], i, count, offset, data;
- // if this is just filler, return immediately
- if (!(userData[0] & 0x40)) {
- return results;
- }
- // parse out the cc_data_1 and cc_data_2 fields
- count = userData[0] & 0x1f;
- for (i = 0; i < count; i++) {
- offset = i * 3;
- data = {
- type: userData[offset + 2] & 0x03,
- pts: pts
- };
- // capture cc data when cc_valid is 1
- if (userData[offset + 2] & 0x04) {
- data.ccData = (userData[offset + 3] << 8) | userData[offset + 4];
- results.push(data);
- }
- }
- return results;
- };
- var CaptionStream = function() {
- CaptionStream.prototype.init.call(this);
- this.captionPackets_ = [];
- this.ccStreams_ = [
- new Cea608Stream(0, 0), // eslint-disable-line no-use-before-define
- new Cea608Stream(0, 1), // eslint-disable-line no-use-before-define
- new Cea608Stream(1, 0), // eslint-disable-line no-use-before-define
- new Cea608Stream(1, 1) // eslint-disable-line no-use-before-define
- ];
- this.reset();
- // forward data and done events from CCs to this CaptionStream
- this.ccStreams_.forEach(function(cc) {
- cc.on('data', this.trigger.bind(this, 'data'));
- cc.on('done', this.trigger.bind(this, 'done'));
- }, this);
- };
- CaptionStream.prototype = new Stream();
- CaptionStream.prototype.push = function(event) {
- var sei, userData;
- // only examine SEI NALs
- if (event.nalUnitType !== 'sei_rbsp') {
- return;
- }
- // parse the sei
- sei = parseSei(event.escapedRBSP);
- // ignore everything but user_data_registered_itu_t_t35
- if (sei.payloadType !== USER_DATA_REGISTERED_ITU_T_T35) {
- return;
- }
- // parse out the user data payload
- userData = parseUserData(sei);
- // ignore unrecognized userData
- if (!userData) {
- return;
- }
- // Sometimes, the same segment # will be downloaded twice. To stop the
- // caption data from being processed twice, we track the latest dts we've
- // received and ignore everything with a dts before that. However, since
- // data for a specific dts can be split across 2 packets on either side of
- // a segment boundary, we need to make sure we *don't* ignore the second
- // dts packet we receive that has dts === this.latestDts_. And thus, the
- // ignoreNextEqualDts_ flag was born.
- if (event.dts < this.latestDts_) {
- // We've started getting older data, so set the flag.
- this.ignoreNextEqualDts_ = true;
- return;
- } else if ((event.dts === this.latestDts_) && (this.ignoreNextEqualDts_)) {
- // We've received the last duplicate packet, time to start processing again
- this.ignoreNextEqualDts_ = false;
- return;
- }
- // parse out CC data packets and save them for later
- this.captionPackets_ = this.captionPackets_.concat(parseCaptionPackets(event.pts, userData));
- this.latestDts_ = event.dts;
- };
- CaptionStream.prototype.flush = function() {
- // make sure we actually parsed captions before proceeding
- if (!this.captionPackets_.length) {
- this.ccStreams_.forEach(function(cc) {
- cc.flush();
- }, this);
- return;
- }
- // In Chrome, the Array#sort function is not stable so add a
- // presortIndex that we can use to ensure we get a stable-sort
- this.captionPackets_.forEach(function(elem, idx) {
- elem.presortIndex = idx;
- });
- // sort caption byte-pairs based on their PTS values
- this.captionPackets_.sort(function(a, b) {
- if (a.pts === b.pts) {
- return a.presortIndex - b.presortIndex;
- }
- return a.pts - b.pts;
- });
- this.captionPackets_.forEach(function(packet) {
- if (packet.type < 2) {
- // Dispatch packet to the right Cea608Stream
- this.dispatchCea608Packet(packet);
- }
- // this is where an 'else' would go for a dispatching packets
- // to a theoretical Cea708Stream that handles SERVICEn data
- }, this);
- this.captionPackets_.length = 0;
- this.ccStreams_.forEach(function(cc) {
- cc.flush();
- }, this);
- return;
- };
- CaptionStream.prototype.reset = function() {
- this.latestDts_ = null;
- this.ignoreNextEqualDts_ = false;
- this.activeCea608Channel_ = [null, null];
- this.ccStreams_.forEach(function(ccStream) {
- ccStream.reset();
- });
- };
- CaptionStream.prototype.dispatchCea608Packet = function(packet) {
- // NOTE: packet.type is the CEA608 field
- if (this.setsChannel1Active(packet)) {
- this.activeCea608Channel_[packet.type] = 0;
- } else if (this.setsChannel2Active(packet)) {
- this.activeCea608Channel_[packet.type] = 1;
- }
- if (this.activeCea608Channel_[packet.type] === null) {
- // If we haven't received anything to set the active channel, discard the
- // data; we don't want jumbled captions
- return;
- }
- this.ccStreams_[(packet.type << 1) + this.activeCea608Channel_[packet.type]].push(packet);
- };
- CaptionStream.prototype.setsChannel1Active = function(packet) {
- return ((packet.ccData & 0x7800) === 0x1000);
- };
- CaptionStream.prototype.setsChannel2Active = function(packet) {
- return ((packet.ccData & 0x7800) === 0x1800);
- };
- // ----------------------
- // Session to Application
- // ----------------------
- var CHARACTER_TRANSLATION = {
- 0x2a: 0xe1, // á
- 0x5c: 0xe9, // é
- 0x5e: 0xed, // í
- 0x5f: 0xf3, // ó
- 0x60: 0xfa, // ú
- 0x7b: 0xe7, // ç
- 0x7c: 0xf7, // ÷
- 0x7d: 0xd1, // Ñ
- 0x7e: 0xf1, // ñ
- 0x7f: 0x2588, // █
- 0x0130: 0xae, // ®
- 0x0131: 0xb0, // °
- 0x0132: 0xbd, // ½
- 0x0133: 0xbf, // ¿
- 0x0134: 0x2122, // ™
- 0x0135: 0xa2, // ¢
- 0x0136: 0xa3, // £
- 0x0137: 0x266a, // ♪
- 0x0138: 0xe0, // à
- 0x0139: 0xa0, //
- 0x013a: 0xe8, // è
- 0x013b: 0xe2, // â
- 0x013c: 0xea, // ê
- 0x013d: 0xee, // î
- 0x013e: 0xf4, // ô
- 0x013f: 0xfb, // û
- 0x0220: 0xc1, // Á
- 0x0221: 0xc9, // É
- 0x0222: 0xd3, // Ó
- 0x0223: 0xda, // Ú
- 0x0224: 0xdc, // Ü
- 0x0225: 0xfc, // ü
- 0x0226: 0x2018, // ‘
- 0x0227: 0xa1, // ¡
- 0x0228: 0x2a, // *
- 0x0229: 0x27, // '
- 0x022a: 0x2014, // —
- 0x022b: 0xa9, // ©
- 0x022c: 0x2120, // ℠
- 0x022d: 0x2022, // •
- 0x022e: 0x201c, // “
- 0x022f: 0x201d, // ”
- 0x0230: 0xc0, // À
- 0x0231: 0xc2, // Â
- 0x0232: 0xc7, // Ç
- 0x0233: 0xc8, // È
- 0x0234: 0xca, // Ê
- 0x0235: 0xcb, // Ë
- 0x0236: 0xeb, // ë
- 0x0237: 0xce, // Î
- 0x0238: 0xcf, // Ï
- 0x0239: 0xef, // ï
- 0x023a: 0xd4, // Ô
- 0x023b: 0xd9, // Ù
- 0x023c: 0xf9, // ù
- 0x023d: 0xdb, // Û
- 0x023e: 0xab, // «
- 0x023f: 0xbb, // »
- 0x0320: 0xc3, // Ã
- 0x0321: 0xe3, // ã
- 0x0322: 0xcd, // Í
- 0x0323: 0xcc, // Ì
- 0x0324: 0xec, // ì
- 0x0325: 0xd2, // Ò
- 0x0326: 0xf2, // ò
- 0x0327: 0xd5, // Õ
- 0x0328: 0xf5, // õ
- 0x0329: 0x7b, // {
- 0x032a: 0x7d, // }
- 0x032b: 0x5c, // \
- 0x032c: 0x5e, // ^
- 0x032d: 0x5f, // _
- 0x032e: 0x7c, // |
- 0x032f: 0x7e, // ~
- 0x0330: 0xc4, // Ä
- 0x0331: 0xe4, // ä
- 0x0332: 0xd6, // Ö
- 0x0333: 0xf6, // ö
- 0x0334: 0xdf, // ß
- 0x0335: 0xa5, // ¥
- 0x0336: 0xa4, // ¤
- 0x0337: 0x2502, // │
- 0x0338: 0xc5, // Å
- 0x0339: 0xe5, // å
- 0x033a: 0xd8, // Ø
- 0x033b: 0xf8, // ø
- 0x033c: 0x250c, // ┌
- 0x033d: 0x2510, // ┐
- 0x033e: 0x2514, // └
- 0x033f: 0x2518 // ┘
- };
- var getCharFromCode = function(code) {
- if (code === null) {
- return '';
- }
- code = CHARACTER_TRANSLATION[code] || code;
- return String.fromCharCode(code);
- };
- // the index of the last row in a CEA-608 display buffer
- var BOTTOM_ROW = 14;
- // This array is used for mapping PACs -> row #, since there's no way of
- // getting it through bit logic.
- var ROWS = [0x1100, 0x1120, 0x1200, 0x1220, 0x1500, 0x1520, 0x1600, 0x1620,
- 0x1700, 0x1720, 0x1000, 0x1300, 0x1320, 0x1400, 0x1420];
- // CEA-608 captions are rendered onto a 34x15 matrix of character
- // cells. The "bottom" row is the last element in the outer array.
- var createDisplayBuffer = function() {
- var result = [], i = BOTTOM_ROW + 1;
- while (i--) {
- result.push('');
- }
- return result;
- };
- var Cea608Stream = function(field, dataChannel) {
- Cea608Stream.prototype.init.call(this);
- this.field_ = field || 0;
- this.dataChannel_ = dataChannel || 0;
- this.name_ = 'CC' + (((this.field_ << 1) | this.dataChannel_) + 1);
- this.setConstants();
- this.reset();
- this.push = function(packet) {
- var data, swap, char0, char1, text;
- // remove the parity bits
- data = packet.ccData & 0x7f7f;
- // ignore duplicate control codes; the spec demands they're sent twice
- if (data === this.lastControlCode_) {
- this.lastControlCode_ = null;
- return;
- }
- // Store control codes
- if ((data & 0xf000) === 0x1000) {
- this.lastControlCode_ = data;
- } else if (data !== this.PADDING_) {
- this.lastControlCode_ = null;
- }
- char0 = data >>> 8;
- char1 = data & 0xff;
- if (data === this.PADDING_) {
- return;
- } else if (data === this.RESUME_CAPTION_LOADING_) {
- this.mode_ = 'popOn';
- } else if (data === this.END_OF_CAPTION_) {
- this.clearFormatting(packet.pts);
- // if a caption was being displayed, it's gone now
- this.flushDisplayed(packet.pts);
- // flip memory
- swap = this.displayed_;
- this.displayed_ = this.nonDisplayed_;
- this.nonDisplayed_ = swap;
- // start measuring the time to display the caption
- this.startPts_ = packet.pts;
- } else if (data === this.ROLL_UP_2_ROWS_) {
- this.topRow_ = BOTTOM_ROW - 1;
- this.mode_ = 'rollUp';
- } else if (data === this.ROLL_UP_3_ROWS_) {
- this.topRow_ = BOTTOM_ROW - 2;
- this.mode_ = 'rollUp';
- } else if (data === this.ROLL_UP_4_ROWS_) {
- this.topRow_ = BOTTOM_ROW - 3;
- this.mode_ = 'rollUp';
- } else if (data === this.CARRIAGE_RETURN_) {
- this.clearFormatting(packet.pts);
- this.flushDisplayed(packet.pts);
- this.shiftRowsUp_();
- this.startPts_ = packet.pts;
- } else if (data === this.BACKSPACE_) {
- if (this.mode_ === 'popOn') {
- this.nonDisplayed_[BOTTOM_ROW] = this.nonDisplayed_[BOTTOM_ROW].slice(0, -1);
- } else {
- this.displayed_[BOTTOM_ROW] = this.displayed_[BOTTOM_ROW].slice(0, -1);
- }
- } else if (data === this.ERASE_DISPLAYED_MEMORY_) {
- this.flushDisplayed(packet.pts);
- this.displayed_ = createDisplayBuffer();
- } else if (data === this.ERASE_NON_DISPLAYED_MEMORY_) {
- this.nonDisplayed_ = createDisplayBuffer();
- } else if (data === this.RESUME_DIRECT_CAPTIONING_) {
- this.mode_ = 'paintOn';
- // Append special characters to caption text
- } else if (this.isSpecialCharacter(char0, char1)) {
- // Bitmask char0 so that we can apply character transformations
- // regardless of field and data channel.
- // Then byte-shift to the left and OR with char1 so we can pass the
- // entire character code to `getCharFromCode`.
- char0 = (char0 & 0x03) << 8;
- text = getCharFromCode(char0 | char1);
- this[this.mode_](packet.pts, text);
- this.column_++;
- // Append extended characters to caption text
- } else if (this.isExtCharacter(char0, char1)) {
- // Extended characters always follow their "non-extended" equivalents.
- // IE if a "è" is desired, you'll always receive "eè"; non-compliant
- // decoders are supposed to drop the "è", while compliant decoders
- // backspace the "e" and insert "è".
- // Delete the previous character
- if (this.mode_ === 'popOn') {
- this.nonDisplayed_[this.row_] = this.nonDisplayed_[this.row_].slice(0, -1);
- } else {
- this.displayed_[BOTTOM_ROW] = this.displayed_[BOTTOM_ROW].slice(0, -1);
- }
- // Bitmask char0 so that we can apply character transformations
- // regardless of field and data channel.
- // Then byte-shift to the left and OR with char1 so we can pass the
- // entire character code to `getCharFromCode`.
- char0 = (char0 & 0x03) << 8;
- text = getCharFromCode(char0 | char1);
- this[this.mode_](packet.pts, text);
- this.column_++;
- // Process mid-row codes
- } else if (this.isMidRowCode(char0, char1)) {
- // Attributes are not additive, so clear all formatting
- this.clearFormatting(packet.pts);
- // According to the standard, mid-row codes
- // should be replaced with spaces, so add one now
- this[this.mode_](packet.pts, ' ');
- this.column_++;
- if ((char1 & 0xe) === 0xe) {
- this.addFormatting(packet.pts, ['i']);
- }
- if ((char1 & 0x1) === 0x1) {
- this.addFormatting(packet.pts, ['u']);
- }
- // Detect offset control codes and adjust cursor
- } else if (this.isOffsetControlCode(char0, char1)) {
- // Cursor position is set by indent PAC (see below) in 4-column
- // increments, with an additional offset code of 1-3 to reach any
- // of the 32 columns specified by CEA-608. So all we need to do
- // here is increment the column cursor by the given offset.
- this.column_ += (char1 & 0x03);
- // Detect PACs (Preamble Address Codes)
- } else if (this.isPAC(char0, char1)) {
- // There's no logic for PAC -> row mapping, so we have to just
- // find the row code in an array and use its index :(
- var row = ROWS.indexOf(data & 0x1f20);
- if (row !== this.row_) {
- // formatting is only persistent for current row
- this.clearFormatting(packet.pts);
- this.row_ = row;
- }
- // All PACs can apply underline, so detect and apply
- // (All odd-numbered second bytes set underline)
- if ((char1 & 0x1) && (this.formatting_.indexOf('u') === -1)) {
- this.addFormatting(packet.pts, ['u']);
- }
- if ((data & 0x10) === 0x10) {
- // We've got an indent level code. Each successive even number
- // increments the column cursor by 4, so we can get the desired
- // column position by bit-shifting to the right (to get n/2)
- // and multiplying by 4.
- this.column_ = ((data & 0xe) >> 1) * 4;
- }
- if (this.isColorPAC(char1)) {
- // it's a color code, though we only support white, which
- // can be either normal or italicized. white italics can be
- // either 0x4e or 0x6e depending on the row, so we just
- // bitwise-and with 0xe to see if italics should be turned on
- if ((char1 & 0xe) === 0xe) {
- this.addFormatting(packet.pts, ['i']);
- }
- }
- // We have a normal character in char0, and possibly one in char1
- } else if (this.isNormalChar(char0)) {
- if (char1 === 0x00) {
- char1 = null;
- }
- text = getCharFromCode(char0);
- text += getCharFromCode(char1);
- this[this.mode_](packet.pts, text);
- this.column_ += text.length;
- } // finish data processing
- };
- };
- Cea608Stream.prototype = new Stream();
- // Trigger a cue point that captures the current state of the
- // display buffer
- Cea608Stream.prototype.flushDisplayed = function(pts) {
- var content = this.displayed_
- // remove spaces from the start and end of the string
- .map(function(row) {
- return row.trim();
- })
- // combine all text rows to display in one cue
- .join('\n')
- // and remove blank rows from the start and end, but not the middle
- .replace(/^\n+|\n+$/g, '');
- if (content.length) {
- this.trigger('data', {
- startPts: this.startPts_,
- endPts: pts,
- text: content,
- stream: this.name_
- });
- }
- };
- /**
- * Zero out the data, used for startup and on seek
- */
- Cea608Stream.prototype.reset = function() {
- this.mode_ = 'popOn';
- // When in roll-up mode, the index of the last row that will
- // actually display captions. If a caption is shifted to a row
- // with a lower index than this, it is cleared from the display
- // buffer
- this.topRow_ = 0;
- this.startPts_ = 0;
- this.displayed_ = createDisplayBuffer();
- this.nonDisplayed_ = createDisplayBuffer();
- this.lastControlCode_ = null;
- // Track row and column for proper line-breaking and spacing
- this.column_ = 0;
- this.row_ = BOTTOM_ROW;
- // This variable holds currently-applied formatting
- this.formatting_ = [];
- };
- /**
- * Sets up control code and related constants for this instance
- */
- Cea608Stream.prototype.setConstants = function() {
- // The following attributes have these uses:
- // ext_ : char0 for mid-row codes, and the base for extended
- // chars (ext_+0, ext_+1, and ext_+2 are char0s for
- // extended codes)
- // control_: char0 for control codes, except byte-shifted to the
- // left so that we can do this.control_ | CONTROL_CODE
- // offset_: char0 for tab offset codes
- //
- // It's also worth noting that control codes, and _only_ control codes,
- // differ between field 1 and field2. Field 2 control codes are always
- // their field 1 value plus 1. That's why there's the "| field" on the
- // control value.
- if (this.dataChannel_ === 0) {
- this.BASE_ = 0x10;
- this.EXT_ = 0x11;
- this.CONTROL_ = (0x14 | this.field_) << 8;
- this.OFFSET_ = 0x17;
- } else if (this.dataChannel_ === 1) {
- this.BASE_ = 0x18;
- this.EXT_ = 0x19;
- this.CONTROL_ = (0x1c | this.field_) << 8;
- this.OFFSET_ = 0x1f;
- }
- // Constants for the LSByte command codes recognized by Cea608Stream. This
- // list is not exhaustive. For a more comprehensive listing and semantics see
- // http://www.gpo.gov/fdsys/pkg/CFR-2010-title47-vol1/pdf/CFR-2010-title47-vol1-sec15-119.pdf
- // Padding
- this.PADDING_ = 0x0000;
- // Pop-on Mode
- this.RESUME_CAPTION_LOADING_ = this.CONTROL_ | 0x20;
- this.END_OF_CAPTION_ = this.CONTROL_ | 0x2f;
- // Roll-up Mode
- this.ROLL_UP_2_ROWS_ = this.CONTROL_ | 0x25;
- this.ROLL_UP_3_ROWS_ = this.CONTROL_ | 0x26;
- this.ROLL_UP_4_ROWS_ = this.CONTROL_ | 0x27;
- this.CARRIAGE_RETURN_ = this.CONTROL_ | 0x2d;
- // paint-on mode (not supported)
- this.RESUME_DIRECT_CAPTIONING_ = this.CONTROL_ | 0x29;
- // Erasure
- this.BACKSPACE_ = this.CONTROL_ | 0x21;
- this.ERASE_DISPLAYED_MEMORY_ = this.CONTROL_ | 0x2c;
- this.ERASE_NON_DISPLAYED_MEMORY_ = this.CONTROL_ | 0x2e;
- };
- /**
- * Detects if the 2-byte packet data is a special character
- *
- * Special characters have a second byte in the range 0x30 to 0x3f,
- * with the first byte being 0x11 (for data channel 1) or 0x19 (for
- * data channel 2).
- *
- * @param {Integer} char0 The first byte
- * @param {Integer} char1 The second byte
- * @return {Boolean} Whether the 2 bytes are an special character
- */
- Cea608Stream.prototype.isSpecialCharacter = function(char0, char1) {
- return (char0 === this.EXT_ && char1 >= 0x30 && char1 <= 0x3f);
- };
- /**
- * Detects if the 2-byte packet data is an extended character
- *
- * Extended characters have a second byte in the range 0x20 to 0x3f,
- * with the first byte being 0x12 or 0x13 (for data channel 1) or
- * 0x1a or 0x1b (for data channel 2).
- *
- * @param {Integer} char0 The first byte
- * @param {Integer} char1 The second byte
- * @return {Boolean} Whether the 2 bytes are an extended character
- */
- Cea608Stream.prototype.isExtCharacter = function(char0, char1) {
- return ((char0 === (this.EXT_ + 1) || char0 === (this.EXT_ + 2)) &&
- (char1 >= 0x20 && char1 <= 0x3f));
- };
- /**
- * Detects if the 2-byte packet is a mid-row code
- *
- * Mid-row codes have a second byte in the range 0x20 to 0x2f, with
- * the first byte being 0x11 (for data channel 1) or 0x19 (for data
- * channel 2).
- *
- * @param {Integer} char0 The first byte
- * @param {Integer} char1 The second byte
- * @return {Boolean} Whether the 2 bytes are a mid-row code
- */
- Cea608Stream.prototype.isMidRowCode = function(char0, char1) {
- return (char0 === this.EXT_ && (char1 >= 0x20 && char1 <= 0x2f));
- };
- /**
- * Detects if the 2-byte packet is an offset control code
- *
- * Offset control codes have a second byte in the range 0x21 to 0x23,
- * with the first byte being 0x17 (for data channel 1) or 0x1f (for
- * data channel 2).
- *
- * @param {Integer} char0 The first byte
- * @param {Integer} char1 The second byte
- * @return {Boolean} Whether the 2 bytes are an offset control code
- */
- Cea608Stream.prototype.isOffsetControlCode = function(char0, char1) {
- return (char0 === this.OFFSET_ && (char1 >= 0x21 && char1 <= 0x23));
- };
- /**
- * Detects if the 2-byte packet is a Preamble Address Code
- *
- * PACs have a first byte in the range 0x10 to 0x17 (for data channel 1)
- * or 0x18 to 0x1f (for data channel 2), with the second byte in the
- * range 0x40 to 0x7f.
- *
- * @param {Integer} char0 The first byte
- * @param {Integer} char1 The second byte
- * @return {Boolean} Whether the 2 bytes are a PAC
- */
- Cea608Stream.prototype.isPAC = function(char0, char1) {
- return (char0 >= this.BASE_ && char0 < (this.BASE_ + 8) &&
- (char1 >= 0x40 && char1 <= 0x7f));
- };
- /**
- * Detects if a packet's second byte is in the range of a PAC color code
- *
- * PAC color codes have the second byte be in the range 0x40 to 0x4f, or
- * 0x60 to 0x6f.
- *
- * @param {Integer} char1 The second byte
- * @return {Boolean} Whether the byte is a color PAC
- */
- Cea608Stream.prototype.isColorPAC = function(char1) {
- return ((char1 >= 0x40 && char1 <= 0x4f) || (char1 >= 0x60 && char1 <= 0x7f));
- };
- /**
- * Detects if a single byte is in the range of a normal character
- *
- * Normal text bytes are in the range 0x20 to 0x7f.
- *
- * @param {Integer} char The byte
- * @return {Boolean} Whether the byte is a normal character
- */
- Cea608Stream.prototype.isNormalChar = function(char) {
- return (char >= 0x20 && char <= 0x7f);
- };
- // Adds the opening HTML tag for the passed character to the caption text,
- // and keeps track of it for later closing
- Cea608Stream.prototype.addFormatting = function(pts, format) {
- this.formatting_ = this.formatting_.concat(format);
- var text = format.reduce(function(text, format) {
- return text + '<' + format + '>';
- }, '');
- this[this.mode_](pts, text);
- };
- // Adds HTML closing tags for current formatting to caption text and
- // clears remembered formatting
- Cea608Stream.prototype.clearFormatting = function(pts) {
- if (!this.formatting_.length) {
- return;
- }
- var text = this.formatting_.reverse().reduce(function(text, format) {
- return text + '</' + format + '>';
- }, '');
- this.formatting_ = [];
- this[this.mode_](pts, text);
- };
- // Mode Implementations
- Cea608Stream.prototype.popOn = function(pts, text) {
- var baseRow = this.nonDisplayed_[this.row_];
- // buffer characters
- baseRow += text;
- this.nonDisplayed_[this.row_] = baseRow;
- };
- Cea608Stream.prototype.rollUp = function(pts, text) {
- var baseRow = this.displayed_[BOTTOM_ROW];
- baseRow += text;
- this.displayed_[BOTTOM_ROW] = baseRow;
- };
- Cea608Stream.prototype.shiftRowsUp_ = function() {
- var i;
- // clear out inactive rows
- for (i = 0; i < this.topRow_; i++) {
- this.displayed_[i] = '';
- }
- // shift displayed rows up
- for (i = this.topRow_; i < BOTTOM_ROW; i++) {
- this.displayed_[i] = this.displayed_[i + 1];
- }
- // clear out the bottom row
- this.displayed_[BOTTOM_ROW] = '';
- };
- // paintOn mode is not implemented
- Cea608Stream.prototype.paintOn = function() {};
- // exports
- module.exports = {
- CaptionStream: CaptionStream,
- Cea608Stream: Cea608Stream
- };
- },{"../utils/stream":33}],18:[function(require,module,exports){
- module.exports = require('./m2ts');
- },{"./m2ts":19}],19:[function(require,module,exports){
- /**
- * mux.js
- *
- * Copyright (c) 2015 Brightcove
- * All rights reserved.
- *
- * A stream-based mp2t to mp4 converter. This utility can be used to
- * deliver mp4s to a SourceBuffer on platforms that support native
- * Media Source Extensions.
- */
- 'use strict';
- var Stream = require('../utils/stream.js'),
- CaptionStream = require('./caption-stream'),
- StreamTypes = require('./stream-types'),
- TimestampRolloverStream = require('./timestamp-rollover-stream').TimestampRolloverStream;
- var m2tsStreamTypes = require('./stream-types.js');
- // object types
- var TransportPacketStream, TransportParseStream, ElementaryStream;
- // constants
- var
- MP2T_PACKET_LENGTH = 188, // bytes
- SYNC_BYTE = 0x47;
- /**
- * Splits an incoming stream of binary data into MPEG-2 Transport
- * Stream packets.
- */
- TransportPacketStream = function() {
- var
- buffer = new Uint8Array(MP2T_PACKET_LENGTH),
- bytesInBuffer = 0;
- TransportPacketStream.prototype.init.call(this);
- // Deliver new bytes to the stream.
- this.push = function(bytes) {
- var
- startIndex = 0,
- endIndex = MP2T_PACKET_LENGTH,
- everything;
- // If there are bytes remaining from the last segment, prepend them to the
- // bytes that were pushed in
- if (bytesInBuffer) {
- everything = new Uint8Array(bytes.byteLength + bytesInBuffer);
- everything.set(buffer.subarray(0, bytesInBuffer));
- everything.set(bytes, bytesInBuffer);
- bytesInBuffer = 0;
- } else {
- everything = bytes;
- }
- // While we have enough data for a packet
- while (endIndex < everything.byteLength) {
- // Look for a pair of start and end sync bytes in the data..
- if (everything[startIndex] === SYNC_BYTE && everything[endIndex] === SYNC_BYTE) {
- // We found a packet so emit it and jump one whole packet forward in
- // the stream
- this.trigger('data', everything.subarray(startIndex, endIndex));
- startIndex += MP2T_PACKET_LENGTH;
- endIndex += MP2T_PACKET_LENGTH;
- continue;
- }
- // If we get here, we have somehow become de-synchronized and we need to step
- // forward one byte at a time until we find a pair of sync bytes that denote
- // a packet
- startIndex++;
- endIndex++;
- }
- // If there was some data left over at the end of the segment that couldn't
- // possibly be a whole packet, keep it because it might be the start of a packet
- // that continues in the next segment
- if (startIndex < everything.byteLength) {
- buffer.set(everything.subarray(startIndex), 0);
- bytesInBuffer = everything.byteLength - startIndex;
- }
- };
- this.flush = function() {
- // If the buffer contains a whole packet when we are being flushed, emit it
- // and empty the buffer. Otherwise hold onto the data because it may be
- // important for decoding the next segment
- if (bytesInBuffer === MP2T_PACKET_LENGTH && buffer[0] === SYNC_BYTE) {
- this.trigger('data', buffer);
- bytesInBuffer = 0;
- }
- this.trigger('done');
- };
- };
- TransportPacketStream.prototype = new Stream();
- /**
- * Accepts an MP2T TransportPacketStream and emits data events with parsed
- * forms of the individual transport stream packets.
- */
- TransportParseStream = function() {
- var parsePsi, parsePat, parsePmt, self;
- TransportParseStream.prototype.init.call(this);
- self = this;
- this.packetsWaitingForPmt = [];
- this.programMapTable = undefined;
- parsePsi = function(payload, psi) {
- var offset = 0;
- // PSI packets may be split into multiple sections and those
- // sections may be split into multiple packets. If a PSI
- // section starts in this packet, the payload_unit_start_indicator
- // will be true and the first byte of the payload will indicate
- // the offset from the current position to the start of the
- // section.
- if (psi.payloadUnitStartIndicator) {
- offset += payload[offset] + 1;
- }
- if (psi.type === 'pat') {
- parsePat(payload.subarray(offset), psi);
- } else {
- parsePmt(payload.subarray(offset), psi);
- }
- };
- parsePat = function(payload, pat) {
- pat.section_number = payload[7]; // eslint-disable-line camelcase
- pat.last_section_number = payload[8]; // eslint-disable-line camelcase
- // skip the PSI header and parse the first PMT entry
- self.pmtPid = (payload[10] & 0x1F) << 8 | payload[11];
- pat.pmtPid = self.pmtPid;
- };
- /**
- * Parse out the relevant fields of a Program Map Table (PMT).
- * @param payload {Uint8Array} the PMT-specific portion of an MP2T
- * packet. The first byte in this array should be the table_id
- * field.
- * @param pmt {object} the object that should be decorated with
- * fields parsed from the PMT.
- */
- parsePmt = function(payload, pmt) {
- var sectionLength, tableEnd, programInfoLength, offset;
- // PMTs can be sent ahead of the time when they should actually
- // take effect. We don't believe this should ever be the case
- // for HLS but we'll ignore "forward" PMT declarations if we see
- // them. Future PMT declarations have the current_next_indicator
- // set to zero.
- if (!(payload[5] & 0x01)) {
- return;
- }
- // overwrite any existing program map table
- self.programMapTable = {
- video: null,
- audio: null,
- 'timed-metadata': {}
- };
- // the mapping table ends at the end of the current section
- sectionLength = (payload[1] & 0x0f) << 8 | payload[2];
- tableEnd = 3 + sectionLength - 4;
- // to determine where the table is, we have to figure out how
- // long the program info descriptors are
- programInfoLength = (payload[10] & 0x0f) << 8 | payload[11];
- // advance the offset to the first entry in the mapping table
- offset = 12 + programInfoLength;
- while (offset < tableEnd) {
- var streamType = payload[offset];
- var pid = (payload[offset + 1] & 0x1F) << 8 | payload[offset + 2];
- // only map a single elementary_pid for audio and video stream types
- // TODO: should this be done for metadata too? for now maintain behavior of
- // multiple metadata streams
- if (streamType === StreamTypes.H264_STREAM_TYPE &&
- self.programMapTable.video === null) {
- self.programMapTable.video = pid;
- } else if (streamType === StreamTypes.ADTS_STREAM_TYPE &&
- self.programMapTable.audio === null) {
- self.programMapTable.audio = pid;
- } else if (streamType === StreamTypes.METADATA_STREAM_TYPE) {
- // map pid to stream type for metadata streams
- self.programMapTable['timed-metadata'][pid] = streamType;
- }
- // move to the next table entry
- // skip past the elementary stream descriptors, if present
- offset += ((payload[offset + 3] & 0x0F) << 8 | payload[offset + 4]) + 5;
- }
- // record the map on the packet as well
- pmt.programMapTable = self.programMapTable;
- };
- /**
- * Deliver a new MP2T packet to the stream.
- */
- this.push = function(packet) {
- var
- result = {},
- offset = 4;
- result.payloadUnitStartIndicator = !!(packet[1] & 0x40);
- // pid is a 13-bit field starting at the last bit of packet[1]
- result.pid = packet[1] & 0x1f;
- result.pid <<= 8;
- result.pid |= packet[2];
- // if an adaption field is present, its length is specified by the
- // fifth byte of the TS packet header. The adaptation field is
- // used to add stuffing to PES packets that don't fill a complete
- // TS packet, and to specify some forms of timing and control data
- // that we do not currently use.
- if (((packet[3] & 0x30) >>> 4) > 0x01) {
- offset += packet[offset] + 1;
- }
- // parse the rest of the packet based on the type
- if (result.pid === 0) {
- result.type = 'pat';
- parsePsi(packet.subarray(offset), result);
- this.trigger('data', result);
- } else if (result.pid === this.pmtPid) {
- result.type = 'pmt';
- parsePsi(packet.subarray(offset), result);
- this.trigger('data', result);
- // if there are any packets waiting for a PMT to be found, process them now
- while (this.packetsWaitingForPmt.length) {
- this.processPes_.apply(this, this.packetsWaitingForPmt.shift());
- }
- } else if (this.programMapTable === undefined) {
- // When we have not seen a PMT yet, defer further processing of
- // PES packets until one has been parsed
- this.packetsWaitingForPmt.push([packet, offset, result]);
- } else {
- this.processPes_(packet, offset, result);
- }
- };
- this.processPes_ = function(packet, offset, result) {
- // set the appropriate stream type
- if (result.pid === this.programMapTable.video) {
- result.streamType = StreamTypes.H264_STREAM_TYPE;
- } else if (result.pid === this.programMapTable.audio) {
- result.streamType = StreamTypes.ADTS_STREAM_TYPE;
- } else {
- // if not video or audio, it is timed-metadata or unknown
- // if unknown, streamType will be undefined
- result.streamType = this.programMapTable['timed-metadata'][result.pid];
- }
- result.type = 'pes';
- result.data = packet.subarray(offset);
- this.trigger('data', result);
- };
- };
- TransportParseStream.prototype = new Stream();
- TransportParseStream.STREAM_TYPES = {
- h264: 0x1b,
- adts: 0x0f
- };
- /**
- * Reconsistutes program elementary stream (PES) packets from parsed
- * transport stream packets. That is, if you pipe an
- * mp2t.TransportParseStream into a mp2t.ElementaryStream, the output
- * events will be events which capture the bytes for individual PES
- * packets plus relevant metadata that has been extracted from the
- * container.
- */
- ElementaryStream = function() {
- var
- self = this,
- // PES packet fragments
- video = {
- data: [],
- size: 0
- },
- audio = {
- data: [],
- size: 0
- },
- timedMetadata = {
- data: [],
- size: 0
- },
- parsePes = function(payload, pes) {
- var ptsDtsFlags;
- // get the packet length, this will be 0 for video
- pes.packetLength = 6 + ((payload[4] << 8) | payload[5]);
- // find out if this packets starts a new keyframe
- pes.dataAlignmentIndicator = (payload[6] & 0x04) !== 0;
- // PES packets may be annotated with a PTS value, or a PTS value
- // and a DTS value. Determine what combination of values is
- // available to work with.
- ptsDtsFlags = payload[7];
- // PTS and DTS are normally stored as a 33-bit number. Javascript
- // performs all bitwise operations on 32-bit integers but javascript
- // supports a much greater range (52-bits) of integer using standard
- // mathematical operations.
- // We construct a 31-bit value using bitwise operators over the 31
- // most significant bits and then multiply by 4 (equal to a left-shift
- // of 2) before we add the final 2 least significant bits of the
- // timestamp (equal to an OR.)
- if (ptsDtsFlags & 0xC0) {
- // the PTS and DTS are not written out directly. For information
- // on how they are encoded, see
- // http://dvd.sourceforge.net/dvdinfo/pes-hdr.html
- pes.pts = (payload[9] & 0x0E) << 27 |
- (payload[10] & 0xFF) << 20 |
- (payload[11] & 0xFE) << 12 |
- (payload[12] & 0xFF) << 5 |
- (payload[13] & 0xFE) >>> 3;
- pes.pts *= 4; // Left shift by 2
- pes.pts += (payload[13] & 0x06) >>> 1; // OR by the two LSBs
- pes.dts = pes.pts;
- if (ptsDtsFlags & 0x40) {
- pes.dts = (payload[14] & 0x0E) << 27 |
- (payload[15] & 0xFF) << 20 |
- (payload[16] & 0xFE) << 12 |
- (payload[17] & 0xFF) << 5 |
- (payload[18] & 0xFE) >>> 3;
- pes.dts *= 4; // Left shift by 2
- pes.dts += (payload[18] & 0x06) >>> 1; // OR by the two LSBs
- }
- }
- // the data section starts immediately after the PES header.
- // pes_header_data_length specifies the number of header bytes
- // that follow the last byte of the field.
- pes.data = payload.subarray(9 + payload[8]);
- },
- flushStream = function(stream, type, forceFlush) {
- var
- packetData = new Uint8Array(stream.size),
- event = {
- type: type
- },
- i = 0,
- offset = 0,
- packetFlushable = false,
- fragment;
- // do nothing if there is not enough buffered data for a complete
- // PES header
- if (!stream.data.length || stream.size < 9) {
- return;
- }
- event.trackId = stream.data[0].pid;
- // reassemble the packet
- for (i = 0; i < stream.data.length; i++) {
- fragment = stream.data[i];
- packetData.set(fragment.data, offset);
- offset += fragment.data.byteLength;
- }
- // parse assembled packet's PES header
- parsePes(packetData, event);
- // non-video PES packets MUST have a non-zero PES_packet_length
- // check that there is enough stream data to fill the packet
- packetFlushable = type === 'video' || event.packetLength <= stream.size;
- // flush pending packets if the conditions are right
- if (forceFlush || packetFlushable) {
- stream.size = 0;
- stream.data.length = 0;
- }
- // only emit packets that are complete. this is to avoid assembling
- // incomplete PES packets due to poor segmentation
- if (packetFlushable) {
- self.trigger('data', event);
- }
- };
- ElementaryStream.prototype.init.call(this);
- this.push = function(data) {
- ({
- pat: function() {
- // we have to wait for the PMT to arrive as well before we
- // have any meaningful metadata
- },
- pes: function() {
- var stream, streamType;
- switch (data.streamType) {
- case StreamTypes.H264_STREAM_TYPE:
- case m2tsStreamTypes.H264_STREAM_TYPE:
- stream = video;
- streamType = 'video';
- break;
- case StreamTypes.ADTS_STREAM_TYPE:
- stream = audio;
- streamType = 'audio';
- break;
- case StreamTypes.METADATA_STREAM_TYPE:
- stream = timedMetadata;
- streamType = 'timed-metadata';
- break;
- default:
- // ignore unknown stream types
- return;
- }
- // if a new packet is starting, we can flush the completed
- // packet
- if (data.payloadUnitStartIndicator) {
- flushStream(stream, streamType, true);
- }
- // buffer this fragment until we are sure we've received the
- // complete payload
- stream.data.push(data);
- stream.size += data.data.byteLength;
- },
- pmt: function() {
- var
- event = {
- type: 'metadata',
- tracks: []
- },
- programMapTable = data.programMapTable;
- // translate audio and video streams to tracks
- if (programMapTable.video !== null) {
- event.tracks.push({
- timelineStartInfo: {
- baseMediaDecodeTime: 0
- },
- id: +programMapTable.video,
- codec: 'avc',
- type: 'video'
- });
- }
- if (programMapTable.audio !== null) {
- event.tracks.push({
- timelineStartInfo: {
- baseMediaDecodeTime: 0
- },
- id: +programMapTable.audio,
- codec: 'adts',
- type: 'audio'
- });
- }
- self.trigger('data', event);
- }
- })[data.type]();
- };
- /**
- * Flush any remaining input. Video PES packets may be of variable
- * length. Normally, the start of a new video packet can trigger the
- * finalization of the previous packet. That is not possible if no
- * more video is forthcoming, however. In that case, some other
- * mechanism (like the end of the file) has to be employed. When it is
- * clear that no additional data is forthcoming, calling this method
- * will flush the buffered packets.
- */
- this.flush = function() {
- // !!THIS ORDER IS IMPORTANT!!
- // video first then audio
- flushStream(video, 'video');
- flushStream(audio, 'audio');
- flushStream(timedMetadata, 'timed-metadata');
- this.trigger('done');
- };
- };
- ElementaryStream.prototype = new Stream();
- var m2ts = {
- PAT_PID: 0x0000,
- MP2T_PACKET_LENGTH: MP2T_PACKET_LENGTH,
- TransportPacketStream: TransportPacketStream,
- TransportParseStream: TransportParseStream,
- ElementaryStream: ElementaryStream,
- TimestampRolloverStream: TimestampRolloverStream,
- CaptionStream: CaptionStream.CaptionStream,
- Cea608Stream: CaptionStream.Cea608Stream,
- MetadataStream: require('./metadata-stream')
- };
- for (var type in StreamTypes) {
- if (StreamTypes.hasOwnProperty(type)) {
- m2ts[type] = StreamTypes[type];
- }
- }
- module.exports = m2ts;
- },{"../utils/stream.js":33,"./caption-stream":17,"./metadata-stream":20,"./stream-types":22,"./stream-types.js":22,"./timestamp-rollover-stream":23}],20:[function(require,module,exports){
- /**
- * Accepts program elementary stream (PES) data events and parses out
- * ID3 metadata from them, if present.
- * @see http://id3.org/id3v2.3.0
- */
- 'use strict';
- var
- Stream = require('../utils/stream'),
- StreamTypes = require('./stream-types'),
- // return a percent-encoded representation of the specified byte range
- // @see http://en.wikipedia.org/wiki/Percent-encoding
- percentEncode = function(bytes, start, end) {
- var i, result = '';
- for (i = start; i < end; i++) {
- result += '%' + ('00' + bytes[i].toString(16)).slice(-2);
- }
- return result;
- },
- // return the string representation of the specified byte range,
- // interpreted as UTf-8.
- parseUtf8 = function(bytes, start, end) {
- return decodeURIComponent(percentEncode(bytes, start, end));
- },
- // return the string representation of the specified byte range,
- // interpreted as ISO-8859-1.
- parseIso88591 = function(bytes, start, end) {
- return unescape(percentEncode(bytes, start, end)); // jshint ignore:line
- },
- parseSyncSafeInteger = function(data) {
- return (data[0] << 21) |
- (data[1] << 14) |
- (data[2] << 7) |
- (data[3]);
- },
- tagParsers = {
- TXXX: function(tag) {
- var i;
- if (tag.data[0] !== 3) {
- // ignore frames with unrecognized character encodings
- return;
- }
- for (i = 1; i < tag.data.length; i++) {
- if (tag.data[i] === 0) {
- // parse the text fields
- tag.description = parseUtf8(tag.data, 1, i);
- // do not include the null terminator in the tag value
- tag.value = parseUtf8(tag.data, i + 1, tag.data.length).replace(/\0*$/, '');
- break;
- }
- }
- tag.data = tag.value;
- },
- WXXX: function(tag) {
- var i;
- if (tag.data[0] !== 3) {
- // ignore frames with unrecognized character encodings
- return;
- }
- for (i = 1; i < tag.data.length; i++) {
- if (tag.data[i] === 0) {
- // parse the description and URL fields
- tag.description = parseUtf8(tag.data, 1, i);
- tag.url = parseUtf8(tag.data, i + 1, tag.data.length);
- break;
- }
- }
- },
- PRIV: function(tag) {
- var i;
- for (i = 0; i < tag.data.length; i++) {
- if (tag.data[i] === 0) {
- // parse the description and URL fields
- tag.owner = parseIso88591(tag.data, 0, i);
- break;
- }
- }
- tag.privateData = tag.data.subarray(i + 1);
- tag.data = tag.privateData;
- }
- },
- MetadataStream;
- MetadataStream = function(options) {
- var
- settings = {
- debug: !!(options && options.debug),
- // the bytes of the program-level descriptor field in MP2T
- // see ISO/IEC 13818-1:2013 (E), section 2.6 "Program and
- // program element descriptors"
- descriptor: options && options.descriptor
- },
- // the total size in bytes of the ID3 tag being parsed
- tagSize = 0,
- // tag data that is not complete enough to be parsed
- buffer = [],
- // the total number of bytes currently in the buffer
- bufferSize = 0,
- i;
- MetadataStream.prototype.init.call(this);
- // calculate the text track in-band metadata track dispatch type
- // https://html.spec.whatwg.org/multipage/embedded-content.html#steps-to-expose-a-media-resource-specific-text-track
- this.dispatchType = StreamTypes.METADATA_STREAM_TYPE.toString(16);
- if (settings.descriptor) {
- for (i = 0; i < settings.descriptor.length; i++) {
- this.dispatchType += ('00' + settings.descriptor[i].toString(16)).slice(-2);
- }
- }
- this.push = function(chunk) {
- var tag, frameStart, frameSize, frame, i, frameHeader;
- if (chunk.type !== 'timed-metadata') {
- return;
- }
- // if data_alignment_indicator is set in the PES header,
- // we must have the start of a new ID3 tag. Assume anything
- // remaining in the buffer was malformed and throw it out
- if (chunk.dataAlignmentIndicator) {
- bufferSize = 0;
- buffer.length = 0;
- }
- // ignore events that don't look like ID3 data
- if (buffer.length === 0 &&
- (chunk.data.length < 10 ||
- chunk.data[0] !== 'I'.charCodeAt(0) ||
- chunk.data[1] !== 'D'.charCodeAt(0) ||
- chunk.data[2] !== '3'.charCodeAt(0))) {
- if (settings.debug) {
- // eslint-disable-next-line no-console
- console.log('Skipping unrecognized metadata packet');
- }
- return;
- }
- // add this chunk to the data we've collected so far
- buffer.push(chunk);
- bufferSize += chunk.data.byteLength;
- // grab the size of the entire frame from the ID3 header
- if (buffer.length === 1) {
- // the frame size is transmitted as a 28-bit integer in the
- // last four bytes of the ID3 header.
- // The most significant bit of each byte is dropped and the
- // results concatenated to recover the actual value.
- tagSize = parseSyncSafeInteger(chunk.data.subarray(6, 10));
- // ID3 reports the tag size excluding the header but it's more
- // convenient for our comparisons to include it
- tagSize += 10;
- }
- // if the entire frame has not arrived, wait for more data
- if (bufferSize < tagSize) {
- return;
- }
- // collect the entire frame so it can be parsed
- tag = {
- data: new Uint8Array(tagSize),
- frames: [],
- pts: buffer[0].pts,
- dts: buffer[0].dts
- };
- for (i = 0; i < tagSize;) {
- tag.data.set(buffer[0].data.subarray(0, tagSize - i), i);
- i += buffer[0].data.byteLength;
- bufferSize -= buffer[0].data.byteLength;
- buffer.shift();
- }
- // find the start of the first frame and the end of the tag
- frameStart = 10;
- if (tag.data[5] & 0x40) {
- // advance the frame start past the extended header
- frameStart += 4; // header size field
- frameStart += parseSyncSafeInteger(tag.data.subarray(10, 14));
- // clip any padding off the end
- tagSize -= parseSyncSafeInteger(tag.data.subarray(16, 20));
- }
- // parse one or more ID3 frames
- // http://id3.org/id3v2.3.0#ID3v2_frame_overview
- do {
- // determine the number of bytes in this frame
- frameSize = parseSyncSafeInteger(tag.data.subarray(frameStart + 4, frameStart + 8));
- if (frameSize < 1) {
- // eslint-disable-next-line no-console
- return console.log('Malformed ID3 frame encountered. Skipping metadata parsing.');
- }
- frameHeader = String.fromCharCode(tag.data[frameStart],
- tag.data[frameStart + 1],
- tag.data[frameStart + 2],
- tag.data[frameStart + 3]);
- frame = {
- id: frameHeader,
- data: tag.data.subarray(frameStart + 10, frameStart + frameSize + 10)
- };
- frame.key = frame.id;
- if (tagParsers[frame.id]) {
- tagParsers[frame.id](frame);
- // handle the special PRIV frame used to indicate the start
- // time for raw AAC data
- if (frame.owner === 'com.apple.streaming.transportStreamTimestamp') {
- var
- d = frame.data,
- size = ((d[3] & 0x01) << 30) |
- (d[4] << 22) |
- (d[5] << 14) |
- (d[6] << 6) |
- (d[7] >>> 2);
- size *= 4;
- size += d[7] & 0x03;
- frame.timeStamp = size;
- // in raw AAC, all subsequent data will be timestamped based
- // on the value of this frame
- // we couldn't have known the appropriate pts and dts before
- // parsing this ID3 tag so set those values now
- if (tag.pts === undefined && tag.dts === undefined) {
- tag.pts = frame.timeStamp;
- tag.dts = frame.timeStamp;
- }
- this.trigger('timestamp', frame);
- }
- }
- tag.frames.push(frame);
- frameStart += 10; // advance past the frame header
- frameStart += frameSize; // advance past the frame body
- } while (frameStart < tagSize);
- this.trigger('data', tag);
- };
- };
- MetadataStream.prototype = new Stream();
- module.exports = MetadataStream;
- },{"../utils/stream":33,"./stream-types":22}],21:[function(require,module,exports){
- /**
- * mux.js
- *
- * Copyright (c) 2016 Brightcove
- * All rights reserved.
- *
- * Utilities to detect basic properties and metadata about TS Segments.
- */
- 'use strict';
- var StreamTypes = require('./stream-types.js');
- var parsePid = function(packet) {
- var pid = packet[1] & 0x1f;
- pid <<= 8;
- pid |= packet[2];
- return pid;
- };
- var parsePayloadUnitStartIndicator = function(packet) {
- return !!(packet[1] & 0x40);
- };
- var parseAdaptionField = function(packet) {
- var offset = 0;
- // if an adaption field is present, its length is specified by the
- // fifth byte of the TS packet header. The adaptation field is
- // used to add stuffing to PES packets that don't fill a complete
- // TS packet, and to specify some forms of timing and control data
- // that we do not currently use.
- if (((packet[3] & 0x30) >>> 4) > 0x01) {
- offset += packet[4] + 1;
- }
- return offset;
- };
- var parseType = function(packet, pmtPid) {
- var pid = parsePid(packet);
- if (pid === 0) {
- return 'pat';
- } else if (pid === pmtPid) {
- return 'pmt';
- } else if (pmtPid) {
- return 'pes';
- }
- return null;
- };
- var parsePat = function(packet) {
- var pusi = parsePayloadUnitStartIndicator(packet);
- var offset = 4 + parseAdaptionField(packet);
- if (pusi) {
- offset += packet[offset] + 1;
- }
- return (packet[offset + 10] & 0x1f) << 8 | packet[offset + 11];
- };
- var parsePmt = function(packet) {
- var programMapTable = {};
- var pusi = parsePayloadUnitStartIndicator(packet);
- var payloadOffset = 4 + parseAdaptionField(packet);
- if (pusi) {
- payloadOffset += packet[payloadOffset] + 1;
- }
- // PMTs can be sent ahead of the time when they should actually
- // take effect. We don't believe this should ever be the case
- // for HLS but we'll ignore "forward" PMT declarations if we see
- // them. Future PMT declarations have the current_next_indicator
- // set to zero.
- if (!(packet[payloadOffset + 5] & 0x01)) {
- return;
- }
- var sectionLength, tableEnd, programInfoLength;
- // the mapping table ends at the end of the current section
- sectionLength = (packet[payloadOffset + 1] & 0x0f) << 8 | packet[payloadOffset + 2];
- tableEnd = 3 + sectionLength - 4;
- // to determine where the table is, we have to figure out how
- // long the program info descriptors are
- programInfoLength = (packet[payloadOffset + 10] & 0x0f) << 8 | packet[payloadOffset + 11];
- // advance the offset to the first entry in the mapping table
- var offset = 12 + programInfoLength;
- while (offset < tableEnd) {
- var i = payloadOffset + offset;
- // add an entry that maps the elementary_pid to the stream_type
- programMapTable[(packet[i + 1] & 0x1F) << 8 | packet[i + 2]] = packet[i];
- // move to the next table entry
- // skip past the elementary stream descriptors, if present
- offset += ((packet[i + 3] & 0x0F) << 8 | packet[i + 4]) + 5;
- }
- return programMapTable;
- };
- var parsePesType = function(packet, programMapTable) {
- var pid = parsePid(packet);
- var type = programMapTable[pid];
- switch (type) {
- case StreamTypes.H264_STREAM_TYPE:
- return 'video';
- case StreamTypes.ADTS_STREAM_TYPE:
- return 'audio';
- case StreamTypes.METADATA_STREAM_TYPE:
- return 'timed-metadata';
- default:
- return null;
- }
- };
- var parsePesTime = function(packet) {
- var pusi = parsePayloadUnitStartIndicator(packet);
- if (!pusi) {
- return null;
- }
- var offset = 4 + parseAdaptionField(packet);
- if (offset >= packet.byteLength) {
- // From the H 222.0 MPEG-TS spec
- // "For transport stream packets carrying PES packets, stuffing is needed when there
- // is insufficient PES packet data to completely fill the transport stream packet
- // payload bytes. Stuffing is accomplished by defining an adaptation field longer than
- // the sum of the lengths of the data elements in it, so that the payload bytes
- // remaining after the adaptation field exactly accommodates the available PES packet
- // data."
- //
- // If the offset is >= the length of the packet, then the packet contains no data
- // and instead is just adaption field stuffing bytes
- return null;
- }
- var pes = null;
- var ptsDtsFlags;
- // PES packets may be annotated with a PTS value, or a PTS value
- // and a DTS value. Determine what combination of values is
- // available to work with.
- ptsDtsFlags = packet[offset + 7];
- // PTS and DTS are normally stored as a 33-bit number. Javascript
- // performs all bitwise operations on 32-bit integers but javascript
- // supports a much greater range (52-bits) of integer using standard
- // mathematical operations.
- // We construct a 31-bit value using bitwise operators over the 31
- // most significant bits and then multiply by 4 (equal to a left-shift
- // of 2) before we add the final 2 least significant bits of the
- // timestamp (equal to an OR.)
- if (ptsDtsFlags & 0xC0) {
- pes = {};
- // the PTS and DTS are not written out directly. For information
- // on how they are encoded, see
- // http://dvd.sourceforge.net/dvdinfo/pes-hdr.html
- pes.pts = (packet[offset + 9] & 0x0E) << 27 |
- (packet[offset + 10] & 0xFF) << 20 |
- (packet[offset + 11] & 0xFE) << 12 |
- (packet[offset + 12] & 0xFF) << 5 |
- (packet[offset + 13] & 0xFE) >>> 3;
- pes.pts *= 4; // Left shift by 2
- pes.pts += (packet[offset + 13] & 0x06) >>> 1; // OR by the two LSBs
- pes.dts = pes.pts;
- if (ptsDtsFlags & 0x40) {
- pes.dts = (packet[offset + 14] & 0x0E) << 27 |
- (packet[offset + 15] & 0xFF) << 20 |
- (packet[offset + 16] & 0xFE) << 12 |
- (packet[offset + 17] & 0xFF) << 5 |
- (packet[offset + 18] & 0xFE) >>> 3;
- pes.dts *= 4; // Left shift by 2
- pes.dts += (packet[offset + 18] & 0x06) >>> 1; // OR by the two LSBs
- }
- }
- return pes;
- };
- var parseNalUnitType = function(type) {
- switch (type) {
- case 0x05:
- return 'slice_layer_without_partitioning_rbsp_idr';
- case 0x06:
- return 'sei_rbsp';
- case 0x07:
- return 'seq_parameter_set_rbsp';
- case 0x08:
- return 'pic_parameter_set_rbsp';
- case 0x09:
- return 'access_unit_delimiter_rbsp';
- default:
- return null;
- }
- };
- var videoPacketContainsKeyFrame = function(packet) {
- var offset = 4 + parseAdaptionField(packet);
- var frameBuffer = packet.subarray(offset);
- var frameI = 0;
- var frameSyncPoint = 0;
- var foundKeyFrame = false;
- var nalType;
- // advance the sync point to a NAL start, if necessary
- for (; frameSyncPoint < frameBuffer.byteLength - 3; frameSyncPoint++) {
- if (frameBuffer[frameSyncPoint + 2] === 1) {
- // the sync point is properly aligned
- frameI = frameSyncPoint + 5;
- break;
- }
- }
- while (frameI < frameBuffer.byteLength) {
- // look at the current byte to determine if we've hit the end of
- // a NAL unit boundary
- switch (frameBuffer[frameI]) {
- case 0:
- // skip past non-sync sequences
- if (frameBuffer[frameI - 1] !== 0) {
- frameI += 2;
- break;
- } else if (frameBuffer[frameI - 2] !== 0) {
- frameI++;
- break;
- }
- if (frameSyncPoint + 3 !== frameI - 2) {
- nalType = parseNalUnitType(frameBuffer[frameSyncPoint + 3] & 0x1f);
- if (nalType === 'slice_layer_without_partitioning_rbsp_idr') {
- foundKeyFrame = true;
- }
- }
- // drop trailing zeroes
- do {
- frameI++;
- } while (frameBuffer[frameI] !== 1 && frameI < frameBuffer.length);
- frameSyncPoint = frameI - 2;
- frameI += 3;
- break;
- case 1:
- // skip past non-sync sequences
- if (frameBuffer[frameI - 1] !== 0 ||
- frameBuffer[frameI - 2] !== 0) {
- frameI += 3;
- break;
- }
- nalType = parseNalUnitType(frameBuffer[frameSyncPoint + 3] & 0x1f);
- if (nalType === 'slice_layer_without_partitioning_rbsp_idr') {
- foundKeyFrame = true;
- }
- frameSyncPoint = frameI - 2;
- frameI += 3;
- break;
- default:
- // the current byte isn't a one or zero, so it cannot be part
- // of a sync sequence
- frameI += 3;
- break;
- }
- }
- frameBuffer = frameBuffer.subarray(frameSyncPoint);
- frameI -= frameSyncPoint;
- frameSyncPoint = 0;
- // parse the final nal
- if (frameBuffer && frameBuffer.byteLength > 3) {
- nalType = parseNalUnitType(frameBuffer[frameSyncPoint + 3] & 0x1f);
- if (nalType === 'slice_layer_without_partitioning_rbsp_idr') {
- foundKeyFrame = true;
- }
- }
- return foundKeyFrame;
- };
- module.exports = {
- parseType: parseType,
- parsePat: parsePat,
- parsePmt: parsePmt,
- parsePayloadUnitStartIndicator: parsePayloadUnitStartIndicator,
- parsePesType: parsePesType,
- parsePesTime: parsePesTime,
- videoPacketContainsKeyFrame: videoPacketContainsKeyFrame
- };
- },{"./stream-types.js":22}],22:[function(require,module,exports){
- 'use strict';
- module.exports = {
- H264_STREAM_TYPE: 0x1B,
- ADTS_STREAM_TYPE: 0x0F,
- METADATA_STREAM_TYPE: 0x15
- };
- },{}],23:[function(require,module,exports){
- /**
- * mux.js
- *
- * Copyright (c) 2016 Brightcove
- * All rights reserved.
- *
- * Accepts program elementary stream (PES) data events and corrects
- * decode and presentation time stamps to account for a rollover
- * of the 33 bit value.
- */
- 'use strict';
- var Stream = require('../utils/stream');
- var MAX_TS = 8589934592;
- var RO_THRESH = 4294967296;
- var handleRollover = function(value, reference) {
- var direction = 1;
- if (value > reference) {
- // If the current timestamp value is greater than our reference timestamp and we detect a
- // timestamp rollover, this means the roll over is happening in the opposite direction.
- // Example scenario: Enter a long stream/video just after a rollover occurred. The reference
- // point will be set to a small number, e.g. 1. The user then seeks backwards over the
- // rollover point. In loading this segment, the timestamp values will be very large,
- // e.g. 2^33 - 1. Since this comes before the data we loaded previously, we want to adjust
- // the time stamp to be `value - 2^33`.
- direction = -1;
- }
- // Note: A seek forwards or back that is greater than the RO_THRESH (2^32, ~13 hours) will
- // cause an incorrect adjustment.
- while (Math.abs(reference - value) > RO_THRESH) {
- value += (direction * MAX_TS);
- }
- return value;
- };
- var TimestampRolloverStream = function(type) {
- var lastDTS, referenceDTS;
- TimestampRolloverStream.prototype.init.call(this);
- this.type_ = type;
- this.push = function(data) {
- if (data.type !== this.type_) {
- return;
- }
- if (referenceDTS === undefined) {
- referenceDTS = data.dts;
- }
- data.dts = handleRollover(data.dts, referenceDTS);
- data.pts = handleRollover(data.pts, referenceDTS);
- lastDTS = data.dts;
- this.trigger('data', data);
- };
- this.flush = function() {
- referenceDTS = lastDTS;
- this.trigger('done');
- };
- this.discontinuity = function() {
- referenceDTS = void 0;
- lastDTS = void 0;
- };
- };
- TimestampRolloverStream.prototype = new Stream();
- module.exports = {
- TimestampRolloverStream: TimestampRolloverStream,
- handleRollover: handleRollover
- };
- },{"../utils/stream":33}],24:[function(require,module,exports){
- module.exports = {
- generator: require('./mp4-generator'),
- Transmuxer: require('./transmuxer').Transmuxer,
- AudioSegmentStream: require('./transmuxer').AudioSegmentStream,
- VideoSegmentStream: require('./transmuxer').VideoSegmentStream
- };
- },{"./mp4-generator":25,"./transmuxer":27}],25:[function(require,module,exports){
- /**
- * mux.js
- *
- * Copyright (c) 2015 Brightcove
- * All rights reserved.
- *
- * Functions that generate fragmented MP4s suitable for use with Media
- * Source Extensions.
- */
- 'use strict';
- var UINT32_MAX = Math.pow(2, 32) - 1;
- var box, dinf, esds, ftyp, mdat, mfhd, minf, moof, moov, mvex, mvhd,
- trak, tkhd, mdia, mdhd, hdlr, sdtp, stbl, stsd, traf, trex,
- trun, types, MAJOR_BRAND, MINOR_VERSION, AVC1_BRAND, VIDEO_HDLR,
- AUDIO_HDLR, HDLR_TYPES, VMHD, SMHD, DREF, STCO, STSC, STSZ, STTS;
- // pre-calculate constants
- (function() {
- var i;
- types = {
- avc1: [], // codingname
- avcC: [],
- btrt: [],
- dinf: [],
- dref: [],
- esds: [],
- ftyp: [],
- hdlr: [],
- mdat: [],
- mdhd: [],
- mdia: [],
- mfhd: [],
- minf: [],
- moof: [],
- moov: [],
- mp4a: [], // codingname
- mvex: [],
- mvhd: [],
- sdtp: [],
- smhd: [],
- stbl: [],
- stco: [],
- stsc: [],
- stsd: [],
- stsz: [],
- stts: [],
- styp: [],
- tfdt: [],
- tfhd: [],
- traf: [],
- trak: [],
- trun: [],
- trex: [],
- tkhd: [],
- vmhd: []
- };
- // In environments where Uint8Array is undefined (e.g., IE8), skip set up so that we
- // don't throw an error
- if (typeof Uint8Array === 'undefined') {
- return;
- }
- for (i in types) {
- if (types.hasOwnProperty(i)) {
- types[i] = [
- i.charCodeAt(0),
- i.charCodeAt(1),
- i.charCodeAt(2),
- i.charCodeAt(3)
- ];
- }
- }
- MAJOR_BRAND = new Uint8Array([
- 'i'.charCodeAt(0),
- 's'.charCodeAt(0),
- 'o'.charCodeAt(0),
- 'm'.charCodeAt(0)
- ]);
- AVC1_BRAND = new Uint8Array([
- 'a'.charCodeAt(0),
- 'v'.charCodeAt(0),
- 'c'.charCodeAt(0),
- '1'.charCodeAt(0)
- ]);
- MINOR_VERSION = new Uint8Array([0, 0, 0, 1]);
- VIDEO_HDLR = new Uint8Array([
- 0x00, // version 0
- 0x00, 0x00, 0x00, // flags
- 0x00, 0x00, 0x00, 0x00, // pre_defined
- 0x76, 0x69, 0x64, 0x65, // handler_type: 'vide'
- 0x00, 0x00, 0x00, 0x00, // reserved
- 0x00, 0x00, 0x00, 0x00, // reserved
- 0x00, 0x00, 0x00, 0x00, // reserved
- 0x56, 0x69, 0x64, 0x65,
- 0x6f, 0x48, 0x61, 0x6e,
- 0x64, 0x6c, 0x65, 0x72, 0x00 // name: 'VideoHandler'
- ]);
- AUDIO_HDLR = new Uint8Array([
- 0x00, // version 0
- 0x00, 0x00, 0x00, // flags
- 0x00, 0x00, 0x00, 0x00, // pre_defined
- 0x73, 0x6f, 0x75, 0x6e, // handler_type: 'soun'
- 0x00, 0x00, 0x00, 0x00, // reserved
- 0x00, 0x00, 0x00, 0x00, // reserved
- 0x00, 0x00, 0x00, 0x00, // reserved
- 0x53, 0x6f, 0x75, 0x6e,
- 0x64, 0x48, 0x61, 0x6e,
- 0x64, 0x6c, 0x65, 0x72, 0x00 // name: 'SoundHandler'
- ]);
- HDLR_TYPES = {
- video: VIDEO_HDLR,
- audio: AUDIO_HDLR
- };
- DREF = new Uint8Array([
- 0x00, // version 0
- 0x00, 0x00, 0x00, // flags
- 0x00, 0x00, 0x00, 0x01, // entry_count
- 0x00, 0x00, 0x00, 0x0c, // entry_size
- 0x75, 0x72, 0x6c, 0x20, // 'url' type
- 0x00, // version 0
- 0x00, 0x00, 0x01 // entry_flags
- ]);
- SMHD = new Uint8Array([
- 0x00, // version
- 0x00, 0x00, 0x00, // flags
- 0x00, 0x00, // balance, 0 means centered
- 0x00, 0x00 // reserved
- ]);
- STCO = new Uint8Array([
- 0x00, // version
- 0x00, 0x00, 0x00, // flags
- 0x00, 0x00, 0x00, 0x00 // entry_count
- ]);
- STSC = STCO;
- STSZ = new Uint8Array([
- 0x00, // version
- 0x00, 0x00, 0x00, // flags
- 0x00, 0x00, 0x00, 0x00, // sample_size
- 0x00, 0x00, 0x00, 0x00 // sample_count
- ]);
- STTS = STCO;
- VMHD = new Uint8Array([
- 0x00, // version
- 0x00, 0x00, 0x01, // flags
- 0x00, 0x00, // graphicsmode
- 0x00, 0x00,
- 0x00, 0x00,
- 0x00, 0x00 // opcolor
- ]);
- }());
- box = function(type) {
- var
- payload = [],
- size = 0,
- i,
- result,
- view;
- for (i = 1; i < arguments.length; i++) {
- payload.push(arguments[i]);
- }
- i = payload.length;
- // calculate the total size we need to allocate
- while (i--) {
- size += payload[i].byteLength;
- }
- result = new Uint8Array(size + 8);
- view = new DataView(result.buffer, result.byteOffset, result.byteLength);
- view.setUint32(0, result.byteLength);
- result.set(type, 4);
- // copy the payload into the result
- for (i = 0, size = 8; i < payload.length; i++) {
- result.set(payload[i], size);
- size += payload[i].byteLength;
- }
- return result;
- };
- dinf = function() {
- return box(types.dinf, box(types.dref, DREF));
- };
- esds = function(track) {
- return box(types.esds, new Uint8Array([
- 0x00, // version
- 0x00, 0x00, 0x00, // flags
- // ES_Descriptor
- 0x03, // tag, ES_DescrTag
- 0x19, // length
- 0x00, 0x00, // ES_ID
- 0x00, // streamDependenceFlag, URL_flag, reserved, streamPriority
- // DecoderConfigDescriptor
- 0x04, // tag, DecoderConfigDescrTag
- 0x11, // length
- 0x40, // object type
- 0x15, // streamType
- 0x00, 0x06, 0x00, // bufferSizeDB
- 0x00, 0x00, 0xda, 0xc0, // maxBitrate
- 0x00, 0x00, 0xda, 0xc0, // avgBitrate
- // DecoderSpecificInfo
- 0x05, // tag, DecoderSpecificInfoTag
- 0x02, // length
- // ISO/IEC 14496-3, AudioSpecificConfig
- // for samplingFrequencyIndex see ISO/IEC 13818-7:2006, 8.1.3.2.2, Table 35
- (track.audioobjecttype << 3) | (track.samplingfrequencyindex >>> 1),
- (track.samplingfrequencyindex << 7) | (track.channelcount << 3),
- 0x06, 0x01, 0x02 // GASpecificConfig
- ]));
- };
- ftyp = function() {
- return box(types.ftyp, MAJOR_BRAND, MINOR_VERSION, MAJOR_BRAND, AVC1_BRAND);
- };
- hdlr = function(type) {
- return box(types.hdlr, HDLR_TYPES[type]);
- };
- mdat = function(data) {
- return box(types.mdat, data);
- };
- mdhd = function(track) {
- var result = new Uint8Array([
- 0x00, // version 0
- 0x00, 0x00, 0x00, // flags
- 0x00, 0x00, 0x00, 0x02, // creation_time
- 0x00, 0x00, 0x00, 0x03, // modification_time
- 0x00, 0x01, 0x5f, 0x90, // timescale, 90,000 "ticks" per second
- (track.duration >>> 24) & 0xFF,
- (track.duration >>> 16) & 0xFF,
- (track.duration >>> 8) & 0xFF,
- track.duration & 0xFF, // duration
- 0x55, 0xc4, // 'und' language (undetermined)
- 0x00, 0x00
- ]);
- // Use the sample rate from the track metadata, when it is
- // defined. The sample rate can be parsed out of an ADTS header, for
- // instance.
- if (track.samplerate) {
- result[12] = (track.samplerate >>> 24) & 0xFF;
- result[13] = (track.samplerate >>> 16) & 0xFF;
- result[14] = (track.samplerate >>> 8) & 0xFF;
- result[15] = (track.samplerate) & 0xFF;
- }
- return box(types.mdhd, result);
- };
- mdia = function(track) {
- return box(types.mdia, mdhd(track), hdlr(track.type), minf(track));
- };
- mfhd = function(sequenceNumber) {
- return box(types.mfhd, new Uint8Array([
- 0x00,
- 0x00, 0x00, 0x00, // flags
- (sequenceNumber & 0xFF000000) >> 24,
- (sequenceNumber & 0xFF0000) >> 16,
- (sequenceNumber & 0xFF00) >> 8,
- sequenceNumber & 0xFF // sequence_number
- ]));
- };
- minf = function(track) {
- return box(types.minf,
- track.type === 'video' ? box(types.vmhd, VMHD) : box(types.smhd, SMHD),
- dinf(),
- stbl(track));
- };
- moof = function(sequenceNumber, tracks) {
- var
- trackFragments = [],
- i = tracks.length;
- // build traf boxes for each track fragment
- while (i--) {
- trackFragments[i] = traf(tracks[i]);
- }
- return box.apply(null, [
- types.moof,
- mfhd(sequenceNumber)
- ].concat(trackFragments));
- };
- /**
- * Returns a movie box.
- * @param tracks {array} the tracks associated with this movie
- * @see ISO/IEC 14496-12:2012(E), section 8.2.1
- */
- moov = function(tracks) {
- var
- i = tracks.length,
- boxes = [];
- while (i--) {
- boxes[i] = trak(tracks[i]);
- }
- return box.apply(null, [types.moov, mvhd(0xffffffff)].concat(boxes).concat(mvex(tracks)));
- };
- mvex = function(tracks) {
- var
- i = tracks.length,
- boxes = [];
- while (i--) {
- boxes[i] = trex(tracks[i]);
- }
- return box.apply(null, [types.mvex].concat(boxes));
- };
- mvhd = function(duration) {
- var
- bytes = new Uint8Array([
- 0x00, // version 0
- 0x00, 0x00, 0x00, // flags
- 0x00, 0x00, 0x00, 0x01, // creation_time
- 0x00, 0x00, 0x00, 0x02, // modification_time
- 0x00, 0x01, 0x5f, 0x90, // timescale, 90,000 "ticks" per second
- (duration & 0xFF000000) >> 24,
- (duration & 0xFF0000) >> 16,
- (duration & 0xFF00) >> 8,
- duration & 0xFF, // duration
- 0x00, 0x01, 0x00, 0x00, // 1.0 rate
- 0x01, 0x00, // 1.0 volume
- 0x00, 0x00, // reserved
- 0x00, 0x00, 0x00, 0x00, // reserved
- 0x00, 0x00, 0x00, 0x00, // reserved
- 0x00, 0x01, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x01, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00,
- 0x40, 0x00, 0x00, 0x00, // transformation: unity matrix
- 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, // pre_defined
- 0xff, 0xff, 0xff, 0xff // next_track_ID
- ]);
- return box(types.mvhd, bytes);
- };
- sdtp = function(track) {
- var
- samples = track.samples || [],
- bytes = new Uint8Array(4 + samples.length),
- flags,
- i;
- // leave the full box header (4 bytes) all zero
- // write the sample table
- for (i = 0; i < samples.length; i++) {
- flags = samples[i].flags;
- bytes[i + 4] = (flags.dependsOn << 4) |
- (flags.isDependedOn << 2) |
- (flags.hasRedundancy);
- }
- return box(types.sdtp,
- bytes);
- };
- stbl = function(track) {
- return box(types.stbl,
- stsd(track),
- box(types.stts, STTS),
- box(types.stsc, STSC),
- box(types.stsz, STSZ),
- box(types.stco, STCO));
- };
- (function() {
- var videoSample, audioSample;
- stsd = function(track) {
- return box(types.stsd, new Uint8Array([
- 0x00, // version 0
- 0x00, 0x00, 0x00, // flags
- 0x00, 0x00, 0x00, 0x01
- ]), track.type === 'video' ? videoSample(track) : audioSample(track));
- };
- videoSample = function(track) {
- var
- sps = track.sps || [],
- pps = track.pps || [],
- sequenceParameterSets = [],
- pictureParameterSets = [],
- i;
- // assemble the SPSs
- for (i = 0; i < sps.length; i++) {
- sequenceParameterSets.push((sps[i].byteLength & 0xFF00) >>> 8);
- sequenceParameterSets.push((sps[i].byteLength & 0xFF)); // sequenceParameterSetLength
- sequenceParameterSets = sequenceParameterSets.concat(Array.prototype.slice.call(sps[i])); // SPS
- }
- // assemble the PPSs
- for (i = 0; i < pps.length; i++) {
- pictureParameterSets.push((pps[i].byteLength & 0xFF00) >>> 8);
- pictureParameterSets.push((pps[i].byteLength & 0xFF));
- pictureParameterSets = pictureParameterSets.concat(Array.prototype.slice.call(pps[i]));
- }
- return box(types.avc1, new Uint8Array([
- 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, // reserved
- 0x00, 0x01, // data_reference_index
- 0x00, 0x00, // pre_defined
- 0x00, 0x00, // reserved
- 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, // pre_defined
- (track.width & 0xff00) >> 8,
- track.width & 0xff, // width
- (track.height & 0xff00) >> 8,
- track.height & 0xff, // height
- 0x00, 0x48, 0x00, 0x00, // horizresolution
- 0x00, 0x48, 0x00, 0x00, // vertresolution
- 0x00, 0x00, 0x00, 0x00, // reserved
- 0x00, 0x01, // frame_count
- 0x13,
- 0x76, 0x69, 0x64, 0x65,
- 0x6f, 0x6a, 0x73, 0x2d,
- 0x63, 0x6f, 0x6e, 0x74,
- 0x72, 0x69, 0x62, 0x2d,
- 0x68, 0x6c, 0x73, 0x00,
- 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, // compressorname
- 0x00, 0x18, // depth = 24
- 0x11, 0x11 // pre_defined = -1
- ]), box(types.avcC, new Uint8Array([
- 0x01, // configurationVersion
- track.profileIdc, // AVCProfileIndication
- track.profileCompatibility, // profile_compatibility
- track.levelIdc, // AVCLevelIndication
- 0xff // lengthSizeMinusOne, hard-coded to 4 bytes
- ].concat([
- sps.length // numOfSequenceParameterSets
- ]).concat(sequenceParameterSets).concat([
- pps.length // numOfPictureParameterSets
- ]).concat(pictureParameterSets))), // "PPS"
- box(types.btrt, new Uint8Array([
- 0x00, 0x1c, 0x9c, 0x80, // bufferSizeDB
- 0x00, 0x2d, 0xc6, 0xc0, // maxBitrate
- 0x00, 0x2d, 0xc6, 0xc0
- ])) // avgBitrate
- );
- };
- audioSample = function(track) {
- return box(types.mp4a, new Uint8Array([
- // SampleEntry, ISO/IEC 14496-12
- 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, // reserved
- 0x00, 0x01, // data_reference_index
- // AudioSampleEntry, ISO/IEC 14496-12
- 0x00, 0x00, 0x00, 0x00, // reserved
- 0x00, 0x00, 0x00, 0x00, // reserved
- (track.channelcount & 0xff00) >> 8,
- (track.channelcount & 0xff), // channelcount
- (track.samplesize & 0xff00) >> 8,
- (track.samplesize & 0xff), // samplesize
- 0x00, 0x00, // pre_defined
- 0x00, 0x00, // reserved
- (track.samplerate & 0xff00) >> 8,
- (track.samplerate & 0xff),
- 0x00, 0x00 // samplerate, 16.16
- // MP4AudioSampleEntry, ISO/IEC 14496-14
- ]), esds(track));
- };
- }());
- tkhd = function(track) {
- var result = new Uint8Array([
- 0x00, // version 0
- 0x00, 0x00, 0x07, // flags
- 0x00, 0x00, 0x00, 0x00, // creation_time
- 0x00, 0x00, 0x00, 0x00, // modification_time
- (track.id & 0xFF000000) >> 24,
- (track.id & 0xFF0000) >> 16,
- (track.id & 0xFF00) >> 8,
- track.id & 0xFF, // track_ID
- 0x00, 0x00, 0x00, 0x00, // reserved
- (track.duration & 0xFF000000) >> 24,
- (track.duration & 0xFF0000) >> 16,
- (track.duration & 0xFF00) >> 8,
- track.duration & 0xFF, // duration
- 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00, // reserved
- 0x00, 0x00, // layer
- 0x00, 0x00, // alternate_group
- 0x01, 0x00, // non-audio track volume
- 0x00, 0x00, // reserved
- 0x00, 0x01, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x01, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00,
- 0x40, 0x00, 0x00, 0x00, // transformation: unity matrix
- (track.width & 0xFF00) >> 8,
- track.width & 0xFF,
- 0x00, 0x00, // width
- (track.height & 0xFF00) >> 8,
- track.height & 0xFF,
- 0x00, 0x00 // height
- ]);
- return box(types.tkhd, result);
- };
- /**
- * Generate a track fragment (traf) box. A traf box collects metadata
- * about tracks in a movie fragment (moof) box.
- */
- traf = function(track) {
- var trackFragmentHeader, trackFragmentDecodeTime, trackFragmentRun,
- sampleDependencyTable, dataOffset,
- upperWordBaseMediaDecodeTime, lowerWordBaseMediaDecodeTime;
- trackFragmentHeader = box(types.tfhd, new Uint8Array([
- 0x00, // version 0
- 0x00, 0x00, 0x3a, // flags
- (track.id & 0xFF000000) >> 24,
- (track.id & 0xFF0000) >> 16,
- (track.id & 0xFF00) >> 8,
- (track.id & 0xFF), // track_ID
- 0x00, 0x00, 0x00, 0x01, // sample_description_index
- 0x00, 0x00, 0x00, 0x00, // default_sample_duration
- 0x00, 0x00, 0x00, 0x00, // default_sample_size
- 0x00, 0x00, 0x00, 0x00 // default_sample_flags
- ]));
- upperWordBaseMediaDecodeTime = Math.floor(track.baseMediaDecodeTime / (UINT32_MAX + 1));
- lowerWordBaseMediaDecodeTime = Math.floor(track.baseMediaDecodeTime % (UINT32_MAX + 1));
- trackFragmentDecodeTime = box(types.tfdt, new Uint8Array([
- 0x01, // version 1
- 0x00, 0x00, 0x00, // flags
- // baseMediaDecodeTime
- (upperWordBaseMediaDecodeTime >>> 24) & 0xFF,
- (upperWordBaseMediaDecodeTime >>> 16) & 0xFF,
- (upperWordBaseMediaDecodeTime >>> 8) & 0xFF,
- upperWordBaseMediaDecodeTime & 0xFF,
- (lowerWordBaseMediaDecodeTime >>> 24) & 0xFF,
- (lowerWordBaseMediaDecodeTime >>> 16) & 0xFF,
- (lowerWordBaseMediaDecodeTime >>> 8) & 0xFF,
- lowerWordBaseMediaDecodeTime & 0xFF
- ]));
- // the data offset specifies the number of bytes from the start of
- // the containing moof to the first payload byte of the associated
- // mdat
- dataOffset = (32 + // tfhd
- 20 + // tfdt
- 8 + // traf header
- 16 + // mfhd
- 8 + // moof header
- 8); // mdat header
- // audio tracks require less metadata
- if (track.type === 'audio') {
- trackFragmentRun = trun(track, dataOffset);
- return box(types.traf,
- trackFragmentHeader,
- trackFragmentDecodeTime,
- trackFragmentRun);
- }
- // video tracks should contain an independent and disposable samples
- // box (sdtp)
- // generate one and adjust offsets to match
- sampleDependencyTable = sdtp(track);
- trackFragmentRun = trun(track,
- sampleDependencyTable.length + dataOffset);
- return box(types.traf,
- trackFragmentHeader,
- trackFragmentDecodeTime,
- trackFragmentRun,
- sampleDependencyTable);
- };
- /**
- * Generate a track box.
- * @param track {object} a track definition
- * @return {Uint8Array} the track box
- */
- trak = function(track) {
- track.duration = track.duration || 0xffffffff;
- return box(types.trak,
- tkhd(track),
- mdia(track));
- };
- trex = function(track) {
- var result = new Uint8Array([
- 0x00, // version 0
- 0x00, 0x00, 0x00, // flags
- (track.id & 0xFF000000) >> 24,
- (track.id & 0xFF0000) >> 16,
- (track.id & 0xFF00) >> 8,
- (track.id & 0xFF), // track_ID
- 0x00, 0x00, 0x00, 0x01, // default_sample_description_index
- 0x00, 0x00, 0x00, 0x00, // default_sample_duration
- 0x00, 0x00, 0x00, 0x00, // default_sample_size
- 0x00, 0x01, 0x00, 0x01 // default_sample_flags
- ]);
- // the last two bytes of default_sample_flags is the sample
- // degradation priority, a hint about the importance of this sample
- // relative to others. Lower the degradation priority for all sample
- // types other than video.
- if (track.type !== 'video') {
- result[result.length - 1] = 0x00;
- }
- return box(types.trex, result);
- };
- (function() {
- var audioTrun, videoTrun, trunHeader;
- // This method assumes all samples are uniform. That is, if a
- // duration is present for the first sample, it will be present for
- // all subsequent samples.
- // see ISO/IEC 14496-12:2012, Section 8.8.8.1
- trunHeader = function(samples, offset) {
- var durationPresent = 0, sizePresent = 0,
- flagsPresent = 0, compositionTimeOffset = 0;
- // trun flag constants
- if (samples.length) {
- if (samples[0].duration !== undefined) {
- durationPresent = 0x1;
- }
- if (samples[0].size !== undefined) {
- sizePresent = 0x2;
- }
- if (samples[0].flags !== undefined) {
- flagsPresent = 0x4;
- }
- if (samples[0].compositionTimeOffset !== undefined) {
- compositionTimeOffset = 0x8;
- }
- }
- return [
- 0x00, // version 0
- 0x00,
- durationPresent | sizePresent | flagsPresent | compositionTimeOffset,
- 0x01, // flags
- (samples.length & 0xFF000000) >>> 24,
- (samples.length & 0xFF0000) >>> 16,
- (samples.length & 0xFF00) >>> 8,
- samples.length & 0xFF, // sample_count
- (offset & 0xFF000000) >>> 24,
- (offset & 0xFF0000) >>> 16,
- (offset & 0xFF00) >>> 8,
- offset & 0xFF // data_offset
- ];
- };
- videoTrun = function(track, offset) {
- var bytes, samples, sample, i;
- samples = track.samples || [];
- offset += 8 + 12 + (16 * samples.length);
- bytes = trunHeader(samples, offset);
- for (i = 0; i < samples.length; i++) {
- sample = samples[i];
- bytes = bytes.concat([
- (sample.duration & 0xFF000000) >>> 24,
- (sample.duration & 0xFF0000) >>> 16,
- (sample.duration & 0xFF00) >>> 8,
- sample.duration & 0xFF, // sample_duration
- (sample.size & 0xFF000000) >>> 24,
- (sample.size & 0xFF0000) >>> 16,
- (sample.size & 0xFF00) >>> 8,
- sample.size & 0xFF, // sample_size
- (sample.flags.isLeading << 2) | sample.flags.dependsOn,
- (sample.flags.isDependedOn << 6) |
- (sample.flags.hasRedundancy << 4) |
- (sample.flags.paddingValue << 1) |
- sample.flags.isNonSyncSample,
- sample.flags.degradationPriority & 0xF0 << 8,
- sample.flags.degradationPriority & 0x0F, // sample_flags
- (sample.compositionTimeOffset & 0xFF000000) >>> 24,
- (sample.compositionTimeOffset & 0xFF0000) >>> 16,
- (sample.compositionTimeOffset & 0xFF00) >>> 8,
- sample.compositionTimeOffset & 0xFF // sample_composition_time_offset
- ]);
- }
- return box(types.trun, new Uint8Array(bytes));
- };
- audioTrun = function(track, offset) {
- var bytes, samples, sample, i;
- samples = track.samples || [];
- offset += 8 + 12 + (8 * samples.length);
- bytes = trunHeader(samples, offset);
- for (i = 0; i < samples.length; i++) {
- sample = samples[i];
- bytes = bytes.concat([
- (sample.duration & 0xFF000000) >>> 24,
- (sample.duration & 0xFF0000) >>> 16,
- (sample.duration & 0xFF00) >>> 8,
- sample.duration & 0xFF, // sample_duration
- (sample.size & 0xFF000000) >>> 24,
- (sample.size & 0xFF0000) >>> 16,
- (sample.size & 0xFF00) >>> 8,
- sample.size & 0xFF]); // sample_size
- }
- return box(types.trun, new Uint8Array(bytes));
- };
- trun = function(track, offset) {
- if (track.type === 'audio') {
- return audioTrun(track, offset);
- }
- return videoTrun(track, offset);
- };
- }());
- module.exports = {
- ftyp: ftyp,
- mdat: mdat,
- moof: moof,
- moov: moov,
- initSegment: function(tracks) {
- var
- fileType = ftyp(),
- movie = moov(tracks),
- result;
- result = new Uint8Array(fileType.byteLength + movie.byteLength);
- result.set(fileType);
- result.set(movie, fileType.byteLength);
- return result;
- }
- };
- },{}],26:[function(require,module,exports){
- /**
- * mux.js
- *
- * Copyright (c) 2015 Brightcove
- * All rights reserved.
- *
- * Utilities to detect basic properties and metadata about MP4s.
- */
- 'use strict';
- var findBox, parseType, timescale, startTime;
- // Find the data for a box specified by its path
- findBox = function(data, path) {
- var results = [],
- i, size, type, end, subresults;
- if (!path.length) {
- // short-circuit the search for empty paths
- return null;
- }
- for (i = 0; i < data.byteLength;) {
- size = data[i] << 24;
- size |= data[i + 1] << 16;
- size |= data[i + 2] << 8;
- size |= data[i + 3];
- type = parseType(data.subarray(i + 4, i + 8));
- end = size > 1 ? i + size : data.byteLength;
- if (type === path[0]) {
- if (path.length === 1) {
- // this is the end of the path and we've found the box we were
- // looking for
- results.push(data.subarray(i + 8, end));
- } else {
- // recursively search for the next box along the path
- subresults = findBox(data.subarray(i + 8, end), path.slice(1));
- if (subresults.length) {
- results = results.concat(subresults);
- }
- }
- }
- i = end;
- }
- // we've finished searching all of data
- return results;
- };
- /**
- * Returns the string representation of an ASCII encoded four byte buffer.
- * @param buffer {Uint8Array} a four-byte buffer to translate
- * @return {string} the corresponding string
- */
- parseType = function(buffer) {
- var result = '';
- result += String.fromCharCode(buffer[0]);
- result += String.fromCharCode(buffer[1]);
- result += String.fromCharCode(buffer[2]);
- result += String.fromCharCode(buffer[3]);
- return result;
- };
- /**
- * Parses an MP4 initialization segment and extracts the timescale
- * values for any declared tracks. Timescale values indicate the
- * number of clock ticks per second to assume for time-based values
- * elsewhere in the MP4.
- *
- * To determine the start time of an MP4, you need two pieces of
- * information: the timescale unit and the earliest base media decode
- * time. Multiple timescales can be specified within an MP4 but the
- * base media decode time is always expressed in the timescale from
- * the media header box for the track:
- * ```
- * moov > trak > mdia > mdhd.timescale
- * ```
- * @param init {Uint8Array} the bytes of the init segment
- * @return {object} a hash of track ids to timescale values or null if
- * the init segment is malformed.
- */
- timescale = function(init) {
- var
- result = {},
- traks = findBox(init, ['moov', 'trak']);
- // mdhd timescale
- return traks.reduce(function(result, trak) {
- var tkhd, version, index, id, mdhd;
- tkhd = findBox(trak, ['tkhd'])[0];
- if (!tkhd) {
- return null;
- }
- version = tkhd[0];
- index = version === 0 ? 12 : 20;
- id = tkhd[index] << 24 |
- tkhd[index + 1] << 16 |
- tkhd[index + 2] << 8 |
- tkhd[index + 3];
- mdhd = findBox(trak, ['mdia', 'mdhd'])[0];
- if (!mdhd) {
- return null;
- }
- version = mdhd[0];
- index = version === 0 ? 12 : 20;
- result[id] = mdhd[index] << 24 |
- mdhd[index + 1] << 16 |
- mdhd[index + 2] << 8 |
- mdhd[index + 3];
- return result;
- }, result);
- };
- /**
- * Determine the base media decode start time, in seconds, for an MP4
- * fragment. If multiple fragments are specified, the earliest time is
- * returned.
- *
- * The base media decode time can be parsed from track fragment
- * metadata:
- * ```
- * moof > traf > tfdt.baseMediaDecodeTime
- * ```
- * It requires the timescale value from the mdhd to interpret.
- *
- * @param timescale {object} a hash of track ids to timescale values.
- * @return {number} the earliest base media decode start time for the
- * fragment, in seconds
- */
- startTime = function(timescale, fragment) {
- var trafs, baseTimes, result;
- // we need info from two childrend of each track fragment box
- trafs = findBox(fragment, ['moof', 'traf']);
- // determine the start times for each track
- baseTimes = [].concat.apply([], trafs.map(function(traf) {
- return findBox(traf, ['tfhd']).map(function(tfhd) {
- var id, scale, baseTime;
- // get the track id from the tfhd
- id = tfhd[4] << 24 |
- tfhd[5] << 16 |
- tfhd[6] << 8 |
- tfhd[7];
- // assume a 90kHz clock if no timescale was specified
- scale = timescale[id] || 90e3;
- // get the base media decode time from the tfdt
- baseTime = findBox(traf, ['tfdt']).map(function(tfdt) {
- var version, result;
- version = tfdt[0];
- result = tfdt[4] << 24 |
- tfdt[5] << 16 |
- tfdt[6] << 8 |
- tfdt[7];
- if (version === 1) {
- result *= Math.pow(2, 32);
- result += tfdt[8] << 24 |
- tfdt[9] << 16 |
- tfdt[10] << 8 |
- tfdt[11];
- }
- return result;
- })[0];
- baseTime = baseTime || Infinity;
- // convert base time to seconds
- return baseTime / scale;
- });
- }));
- // return the minimum
- result = Math.min.apply(null, baseTimes);
- return isFinite(result) ? result : 0;
- };
- module.exports = {
- parseType: parseType,
- timescale: timescale,
- startTime: startTime
- };
- },{}],27:[function(require,module,exports){
- /**
- * mux.js
- *
- * Copyright (c) 2015 Brightcove
- * All rights reserved.
- *
- * A stream-based mp2t to mp4 converter. This utility can be used to
- * deliver mp4s to a SourceBuffer on platforms that support native
- * Media Source Extensions.
- */
- 'use strict';
- var Stream = require('../utils/stream.js');
- var mp4 = require('./mp4-generator.js');
- var m2ts = require('../m2ts/m2ts.js');
- var AdtsStream = require('../codecs/adts.js');
- var H264Stream = require('../codecs/h264').H264Stream;
- var AacStream = require('../aac');
- var coneOfSilence = require('../data/silence');
- var clock = require('../utils/clock');
- // constants
- var AUDIO_PROPERTIES = [
- 'audioobjecttype',
- 'channelcount',
- 'samplerate',
- 'samplingfrequencyindex',
- 'samplesize'
- ];
- var VIDEO_PROPERTIES = [
- 'width',
- 'height',
- 'profileIdc',
- 'levelIdc',
- 'profileCompatibility'
- ];
- var ONE_SECOND_IN_TS = 90000; // 90kHz clock
- // object types
- var VideoSegmentStream, AudioSegmentStream, Transmuxer, CoalesceStream;
- // Helper functions
- var
- createDefaultSample,
- isLikelyAacData,
- collectDtsInfo,
- clearDtsInfo,
- calculateTrackBaseMediaDecodeTime,
- arrayEquals,
- sumFrameByteLengths;
- /**
- * Default sample object
- * see ISO/IEC 14496-12:2012, section 8.6.4.3
- */
- createDefaultSample = function() {
- return {
- size: 0,
- flags: {
- isLeading: 0,
- dependsOn: 1,
- isDependedOn: 0,
- hasRedundancy: 0,
- degradationPriority: 0
- }
- };
- };
- isLikelyAacData = function(data) {
- if ((data[0] === 'I'.charCodeAt(0)) &&
- (data[1] === 'D'.charCodeAt(0)) &&
- (data[2] === '3'.charCodeAt(0))) {
- return true;
- }
- return false;
- };
- /**
- * Compare two arrays (even typed) for same-ness
- */
- arrayEquals = function(a, b) {
- var
- i;
- if (a.length !== b.length) {
- return false;
- }
- // compare the value of each element in the array
- for (i = 0; i < a.length; i++) {
- if (a[i] !== b[i]) {
- return false;
- }
- }
- return true;
- };
- /**
- * Sum the `byteLength` properties of the data in each AAC frame
- */
- sumFrameByteLengths = function(array) {
- var
- i,
- currentObj,
- sum = 0;
- // sum the byteLength's all each nal unit in the frame
- for (i = 0; i < array.length; i++) {
- currentObj = array[i];
- sum += currentObj.data.byteLength;
- }
- return sum;
- };
- /**
- * Constructs a single-track, ISO BMFF media segment from AAC data
- * events. The output of this stream can be fed to a SourceBuffer
- * configured with a suitable initialization segment.
- */
- AudioSegmentStream = function(track) {
- var
- adtsFrames = [],
- sequenceNumber = 0,
- earliestAllowedDts = 0,
- audioAppendStartTs = 0,
- videoBaseMediaDecodeTime = Infinity;
- AudioSegmentStream.prototype.init.call(this);
- this.push = function(data) {
- collectDtsInfo(track, data);
- if (track) {
- AUDIO_PROPERTIES.forEach(function(prop) {
- track[prop] = data[prop];
- });
- }
- // buffer audio data until end() is called
- adtsFrames.push(data);
- };
- this.setEarliestDts = function(earliestDts) {
- earliestAllowedDts = earliestDts - track.timelineStartInfo.baseMediaDecodeTime;
- };
- this.setVideoBaseMediaDecodeTime = function(baseMediaDecodeTime) {
- videoBaseMediaDecodeTime = baseMediaDecodeTime;
- };
- this.setAudioAppendStart = function(timestamp) {
- audioAppendStartTs = timestamp;
- };
- this.flush = function() {
- var
- frames,
- moof,
- mdat,
- boxes;
- // return early if no audio data has been observed
- if (adtsFrames.length === 0) {
- this.trigger('done', 'AudioSegmentStream');
- return;
- }
- frames = this.trimAdtsFramesByEarliestDts_(adtsFrames);
- track.baseMediaDecodeTime = calculateTrackBaseMediaDecodeTime(track);
- this.prefixWithSilence_(track, frames);
- // we have to build the index from byte locations to
- // samples (that is, adts frames) in the audio data
- track.samples = this.generateSampleTable_(frames);
- // concatenate the audio data to constuct the mdat
- mdat = mp4.mdat(this.concatenateFrameData_(frames));
- adtsFrames = [];
- moof = mp4.moof(sequenceNumber, [track]);
- boxes = new Uint8Array(moof.byteLength + mdat.byteLength);
- // bump the sequence number for next time
- sequenceNumber++;
- boxes.set(moof);
- boxes.set(mdat, moof.byteLength);
- clearDtsInfo(track);
- this.trigger('data', {track: track, boxes: boxes});
- this.trigger('done', 'AudioSegmentStream');
- };
- // Possibly pad (prefix) the audio track with silence if appending this track
- // would lead to the introduction of a gap in the audio buffer
- this.prefixWithSilence_ = function(track, frames) {
- var
- baseMediaDecodeTimeTs,
- frameDuration = 0,
- audioGapDuration = 0,
- audioFillFrameCount = 0,
- audioFillDuration = 0,
- silentFrame,
- i;
- if (!frames.length) {
- return;
- }
- baseMediaDecodeTimeTs = clock.audioTsToVideoTs(track.baseMediaDecodeTime, track.samplerate);
- // determine frame clock duration based on sample rate, round up to avoid overfills
- frameDuration = Math.ceil(ONE_SECOND_IN_TS / (track.samplerate / 1024));
- if (audioAppendStartTs && videoBaseMediaDecodeTime) {
- // insert the shortest possible amount (audio gap or audio to video gap)
- audioGapDuration =
- baseMediaDecodeTimeTs - Math.max(audioAppendStartTs, videoBaseMediaDecodeTime);
- // number of full frames in the audio gap
- audioFillFrameCount = Math.floor(audioGapDuration / frameDuration);
- audioFillDuration = audioFillFrameCount * frameDuration;
- }
- // don't attempt to fill gaps smaller than a single frame or larger
- // than a half second
- if (audioFillFrameCount < 1 || audioFillDuration > ONE_SECOND_IN_TS / 2) {
- return;
- }
- silentFrame = coneOfSilence[track.samplerate];
- if (!silentFrame) {
- // we don't have a silent frame pregenerated for the sample rate, so use a frame
- // from the content instead
- silentFrame = frames[0].data;
- }
- for (i = 0; i < audioFillFrameCount; i++) {
- frames.splice(i, 0, {
- data: silentFrame
- });
- }
- track.baseMediaDecodeTime -=
- Math.floor(clock.videoTsToAudioTs(audioFillDuration, track.samplerate));
- };
- // If the audio segment extends before the earliest allowed dts
- // value, remove AAC frames until starts at or after the earliest
- // allowed DTS so that we don't end up with a negative baseMedia-
- // DecodeTime for the audio track
- this.trimAdtsFramesByEarliestDts_ = function(adtsFrames) {
- if (track.minSegmentDts >= earliestAllowedDts) {
- return adtsFrames;
- }
- // We will need to recalculate the earliest segment Dts
- track.minSegmentDts = Infinity;
- return adtsFrames.filter(function(currentFrame) {
- // If this is an allowed frame, keep it and record it's Dts
- if (currentFrame.dts >= earliestAllowedDts) {
- track.minSegmentDts = Math.min(track.minSegmentDts, currentFrame.dts);
- track.minSegmentPts = track.minSegmentDts;
- return true;
- }
- // Otherwise, discard it
- return false;
- });
- };
- // generate the track's raw mdat data from an array of frames
- this.generateSampleTable_ = function(frames) {
- var
- i,
- currentFrame,
- samples = [];
- for (i = 0; i < frames.length; i++) {
- currentFrame = frames[i];
- samples.push({
- size: currentFrame.data.byteLength,
- duration: 1024 // For AAC audio, all samples contain 1024 samples
- });
- }
- return samples;
- };
- // generate the track's sample table from an array of frames
- this.concatenateFrameData_ = function(frames) {
- var
- i,
- currentFrame,
- dataOffset = 0,
- data = new Uint8Array(sumFrameByteLengths(frames));
- for (i = 0; i < frames.length; i++) {
- currentFrame = frames[i];
- data.set(currentFrame.data, dataOffset);
- dataOffset += currentFrame.data.byteLength;
- }
- return data;
- };
- };
- AudioSegmentStream.prototype = new Stream();
- /**
- * Constructs a single-track, ISO BMFF media segment from H264 data
- * events. The output of this stream can be fed to a SourceBuffer
- * configured with a suitable initialization segment.
- * @param track {object} track metadata configuration
- * @param options {object} transmuxer options object
- * @param options.alignGopsAtEnd {boolean} If true, start from the end of the
- * gopsToAlignWith list when attempting to align gop pts
- */
- VideoSegmentStream = function(track, options) {
- var
- sequenceNumber = 0,
- nalUnits = [],
- gopsToAlignWith = [],
- config,
- pps;
- options = options || {};
- VideoSegmentStream.prototype.init.call(this);
- delete track.minPTS;
- this.gopCache_ = [];
- this.push = function(nalUnit) {
- collectDtsInfo(track, nalUnit);
- // record the track config
- if (nalUnit.nalUnitType === 'seq_parameter_set_rbsp' && !config) {
- config = nalUnit.config;
- track.sps = [nalUnit.data];
- VIDEO_PROPERTIES.forEach(function(prop) {
- track[prop] = config[prop];
- }, this);
- }
- if (nalUnit.nalUnitType === 'pic_parameter_set_rbsp' &&
- !pps) {
- pps = nalUnit.data;
- track.pps = [nalUnit.data];
- }
- // buffer video until flush() is called
- nalUnits.push(nalUnit);
- };
- this.flush = function() {
- var
- frames,
- gopForFusion,
- gops,
- moof,
- mdat,
- boxes;
- // Throw away nalUnits at the start of the byte stream until
- // we find the first AUD
- while (nalUnits.length) {
- if (nalUnits[0].nalUnitType === 'access_unit_delimiter_rbsp') {
- break;
- }
- nalUnits.shift();
- }
- // Return early if no video data has been observed
- if (nalUnits.length === 0) {
- this.resetStream_();
- this.trigger('done', 'VideoSegmentStream');
- return;
- }
- // Organize the raw nal-units into arrays that represent
- // higher-level constructs such as frames and gops
- // (group-of-pictures)
- frames = this.groupNalsIntoFrames_(nalUnits);
- gops = this.groupFramesIntoGops_(frames);
- // If the first frame of this fragment is not a keyframe we have
- // a problem since MSE (on Chrome) requires a leading keyframe.
- //
- // We have two approaches to repairing this situation:
- // 1) GOP-FUSION:
- // This is where we keep track of the GOPS (group-of-pictures)
- // from previous fragments and attempt to find one that we can
- // prepend to the current fragment in order to create a valid
- // fragment.
- // 2) KEYFRAME-PULLING:
- // Here we search for the first keyframe in the fragment and
- // throw away all the frames between the start of the fragment
- // and that keyframe. We then extend the duration and pull the
- // PTS of the keyframe forward so that it covers the time range
- // of the frames that were disposed of.
- //
- // #1 is far prefereable over #2 which can cause "stuttering" but
- // requires more things to be just right.
- if (!gops[0][0].keyFrame) {
- // Search for a gop for fusion from our gopCache
- gopForFusion = this.getGopForFusion_(nalUnits[0], track);
- if (gopForFusion) {
- gops.unshift(gopForFusion);
- // Adjust Gops' metadata to account for the inclusion of the
- // new gop at the beginning
- gops.byteLength += gopForFusion.byteLength;
- gops.nalCount += gopForFusion.nalCount;
- gops.pts = gopForFusion.pts;
- gops.dts = gopForFusion.dts;
- gops.duration += gopForFusion.duration;
- } else {
- // If we didn't find a candidate gop fall back to keyrame-pulling
- gops = this.extendFirstKeyFrame_(gops);
- }
- }
- // Trim gops to align with gopsToAlignWith
- if (gopsToAlignWith.length) {
- var alignedGops;
- if (options.alignGopsAtEnd) {
- alignedGops = this.alignGopsAtEnd_(gops);
- } else {
- alignedGops = this.alignGopsAtStart_(gops);
- }
- if (!alignedGops) {
- // save all the nals in the last GOP into the gop cache
- this.gopCache_.unshift({
- gop: gops.pop(),
- pps: track.pps,
- sps: track.sps
- });
- // Keep a maximum of 6 GOPs in the cache
- this.gopCache_.length = Math.min(6, this.gopCache_.length);
- // Clear nalUnits
- nalUnits = [];
- // return early no gops can be aligned with desired gopsToAlignWith
- this.resetStream_();
- this.trigger('done', 'VideoSegmentStream');
- return;
- }
- // Some gops were trimmed. clear dts info so minSegmentDts and pts are correct
- // when recalculated before sending off to CoalesceStream
- clearDtsInfo(track);
- gops = alignedGops;
- }
- collectDtsInfo(track, gops);
- // First, we have to build the index from byte locations to
- // samples (that is, frames) in the video data
- track.samples = this.generateSampleTable_(gops);
- // Concatenate the video data and construct the mdat
- mdat = mp4.mdat(this.concatenateNalData_(gops));
- track.baseMediaDecodeTime = calculateTrackBaseMediaDecodeTime(track);
- this.trigger('processedGopsInfo', gops.map(function(gop) {
- return {
- pts: gop.pts,
- dts: gop.dts,
- byteLength: gop.byteLength
- };
- }));
- // save all the nals in the last GOP into the gop cache
- this.gopCache_.unshift({
- gop: gops.pop(),
- pps: track.pps,
- sps: track.sps
- });
- // Keep a maximum of 6 GOPs in the cache
- this.gopCache_.length = Math.min(6, this.gopCache_.length);
- // Clear nalUnits
- nalUnits = [];
- this.trigger('baseMediaDecodeTime', track.baseMediaDecodeTime);
- this.trigger('timelineStartInfo', track.timelineStartInfo);
- moof = mp4.moof(sequenceNumber, [track]);
- // it would be great to allocate this array up front instead of
- // throwing away hundreds of media segment fragments
- boxes = new Uint8Array(moof.byteLength + mdat.byteLength);
- // Bump the sequence number for next time
- sequenceNumber++;
- boxes.set(moof);
- boxes.set(mdat, moof.byteLength);
- this.trigger('data', {track: track, boxes: boxes});
- this.resetStream_();
- // Continue with the flush process now
- this.trigger('done', 'VideoSegmentStream');
- };
- this.resetStream_ = function() {
- clearDtsInfo(track);
- // reset config and pps because they may differ across segments
- // for instance, when we are rendition switching
- config = undefined;
- pps = undefined;
- };
- // Search for a candidate Gop for gop-fusion from the gop cache and
- // return it or return null if no good candidate was found
- this.getGopForFusion_ = function(nalUnit) {
- var
- halfSecond = 45000, // Half-a-second in a 90khz clock
- allowableOverlap = 10000, // About 3 frames @ 30fps
- nearestDistance = Infinity,
- dtsDistance,
- nearestGopObj,
- currentGop,
- currentGopObj,
- i;
- // Search for the GOP nearest to the beginning of this nal unit
- for (i = 0; i < this.gopCache_.length; i++) {
- currentGopObj = this.gopCache_[i];
- currentGop = currentGopObj.gop;
- // Reject Gops with different SPS or PPS
- if (!(track.pps && arrayEquals(track.pps[0], currentGopObj.pps[0])) ||
- !(track.sps && arrayEquals(track.sps[0], currentGopObj.sps[0]))) {
- continue;
- }
- // Reject Gops that would require a negative baseMediaDecodeTime
- if (currentGop.dts < track.timelineStartInfo.dts) {
- continue;
- }
- // The distance between the end of the gop and the start of the nalUnit
- dtsDistance = (nalUnit.dts - currentGop.dts) - currentGop.duration;
- // Only consider GOPS that start before the nal unit and end within
- // a half-second of the nal unit
- if (dtsDistance >= -allowableOverlap &&
- dtsDistance <= halfSecond) {
- // Always use the closest GOP we found if there is more than
- // one candidate
- if (!nearestGopObj ||
- nearestDistance > dtsDistance) {
- nearestGopObj = currentGopObj;
- nearestDistance = dtsDistance;
- }
- }
- }
- if (nearestGopObj) {
- return nearestGopObj.gop;
- }
- return null;
- };
- this.extendFirstKeyFrame_ = function(gops) {
- var currentGop;
- if (!gops[0][0].keyFrame && gops.length > 1) {
- // Remove the first GOP
- currentGop = gops.shift();
- gops.byteLength -= currentGop.byteLength;
- gops.nalCount -= currentGop.nalCount;
- // Extend the first frame of what is now the
- // first gop to cover the time period of the
- // frames we just removed
- gops[0][0].dts = currentGop.dts;
- gops[0][0].pts = currentGop.pts;
- gops[0][0].duration += currentGop.duration;
- }
- return gops;
- };
- // Convert an array of nal units into an array of frames with each frame being
- // composed of the nal units that make up that frame
- // Also keep track of cummulative data about the frame from the nal units such
- // as the frame duration, starting pts, etc.
- this.groupNalsIntoFrames_ = function(nalUnits) {
- var
- i,
- currentNal,
- currentFrame = [],
- frames = [];
- currentFrame.byteLength = 0;
- for (i = 0; i < nalUnits.length; i++) {
- currentNal = nalUnits[i];
- // Split on 'aud'-type nal units
- if (currentNal.nalUnitType === 'access_unit_delimiter_rbsp') {
- // Since the very first nal unit is expected to be an AUD
- // only push to the frames array when currentFrame is not empty
- if (currentFrame.length) {
- currentFrame.duration = currentNal.dts - currentFrame.dts;
- frames.push(currentFrame);
- }
- currentFrame = [currentNal];
- currentFrame.byteLength = currentNal.data.byteLength;
- currentFrame.pts = currentNal.pts;
- currentFrame.dts = currentNal.dts;
- } else {
- // Specifically flag key frames for ease of use later
- if (currentNal.nalUnitType === 'slice_layer_without_partitioning_rbsp_idr') {
- currentFrame.keyFrame = true;
- }
- currentFrame.duration = currentNal.dts - currentFrame.dts;
- currentFrame.byteLength += currentNal.data.byteLength;
- currentFrame.push(currentNal);
- }
- }
- // For the last frame, use the duration of the previous frame if we
- // have nothing better to go on
- if (frames.length &&
- (!currentFrame.duration ||
- currentFrame.duration <= 0)) {
- currentFrame.duration = frames[frames.length - 1].duration;
- }
- // Push the final frame
- frames.push(currentFrame);
- return frames;
- };
- // Convert an array of frames into an array of Gop with each Gop being composed
- // of the frames that make up that Gop
- // Also keep track of cummulative data about the Gop from the frames such as the
- // Gop duration, starting pts, etc.
- this.groupFramesIntoGops_ = function(frames) {
- var
- i,
- currentFrame,
- currentGop = [],
- gops = [];
- // We must pre-set some of the values on the Gop since we
- // keep running totals of these values
- currentGop.byteLength = 0;
- currentGop.nalCount = 0;
- currentGop.duration = 0;
- currentGop.pts = frames[0].pts;
- currentGop.dts = frames[0].dts;
- // store some metadata about all the Gops
- gops.byteLength = 0;
- gops.nalCount = 0;
- gops.duration = 0;
- gops.pts = frames[0].pts;
- gops.dts = frames[0].dts;
- for (i = 0; i < frames.length; i++) {
- currentFrame = frames[i];
- if (currentFrame.keyFrame) {
- // Since the very first frame is expected to be an keyframe
- // only push to the gops array when currentGop is not empty
- if (currentGop.length) {
- gops.push(currentGop);
- gops.byteLength += currentGop.byteLength;
- gops.nalCount += currentGop.nalCount;
- gops.duration += currentGop.duration;
- }
- currentGop = [currentFrame];
- currentGop.nalCount = currentFrame.length;
- currentGop.byteLength = currentFrame.byteLength;
- currentGop.pts = currentFrame.pts;
- currentGop.dts = currentFrame.dts;
- currentGop.duration = currentFrame.duration;
- } else {
- currentGop.duration += currentFrame.duration;
- currentGop.nalCount += currentFrame.length;
- currentGop.byteLength += currentFrame.byteLength;
- currentGop.push(currentFrame);
- }
- }
- if (gops.length && currentGop.duration <= 0) {
- currentGop.duration = gops[gops.length - 1].duration;
- }
- gops.byteLength += currentGop.byteLength;
- gops.nalCount += currentGop.nalCount;
- gops.duration += currentGop.duration;
- // push the final Gop
- gops.push(currentGop);
- return gops;
- };
- // generate the track's sample table from an array of gops
- this.generateSampleTable_ = function(gops, baseDataOffset) {
- var
- h, i,
- sample,
- currentGop,
- currentFrame,
- dataOffset = baseDataOffset || 0,
- samples = [];
- for (h = 0; h < gops.length; h++) {
- currentGop = gops[h];
- for (i = 0; i < currentGop.length; i++) {
- currentFrame = currentGop[i];
- sample = createDefaultSample();
- sample.dataOffset = dataOffset;
- sample.compositionTimeOffset = currentFrame.pts - currentFrame.dts;
- sample.duration = currentFrame.duration;
- sample.size = 4 * currentFrame.length; // Space for nal unit size
- sample.size += currentFrame.byteLength;
- if (currentFrame.keyFrame) {
- sample.flags.dependsOn = 2;
- }
- dataOffset += sample.size;
- samples.push(sample);
- }
- }
- return samples;
- };
- // generate the track's raw mdat data from an array of gops
- this.concatenateNalData_ = function(gops) {
- var
- h, i, j,
- currentGop,
- currentFrame,
- currentNal,
- dataOffset = 0,
- nalsByteLength = gops.byteLength,
- numberOfNals = gops.nalCount,
- totalByteLength = nalsByteLength + 4 * numberOfNals,
- data = new Uint8Array(totalByteLength),
- view = new DataView(data.buffer);
- // For each Gop..
- for (h = 0; h < gops.length; h++) {
- currentGop = gops[h];
- // For each Frame..
- for (i = 0; i < currentGop.length; i++) {
- currentFrame = currentGop[i];
- // For each NAL..
- for (j = 0; j < currentFrame.length; j++) {
- currentNal = currentFrame[j];
- view.setUint32(dataOffset, currentNal.data.byteLength);
- dataOffset += 4;
- data.set(currentNal.data, dataOffset);
- dataOffset += currentNal.data.byteLength;
- }
- }
- }
- return data;
- };
- // trim gop list to the first gop found that has a matching pts with a gop in the list
- // of gopsToAlignWith starting from the START of the list
- this.alignGopsAtStart_ = function(gops) {
- var alignIndex, gopIndex, align, gop, byteLength, nalCount, duration, alignedGops;
- byteLength = gops.byteLength;
- nalCount = gops.nalCount;
- duration = gops.duration;
- alignIndex = gopIndex = 0;
- while (alignIndex < gopsToAlignWith.length && gopIndex < gops.length) {
- align = gopsToAlignWith[alignIndex];
- gop = gops[gopIndex];
- if (align.pts === gop.pts) {
- break;
- }
- if (gop.pts > align.pts) {
- // this current gop starts after the current gop we want to align on, so increment
- // align index
- alignIndex++;
- continue;
- }
- // current gop starts before the current gop we want to align on. so increment gop
- // index
- gopIndex++;
- byteLength -= gop.byteLength;
- nalCount -= gop.nalCount;
- duration -= gop.duration;
- }
- if (gopIndex === 0) {
- // no gops to trim
- return gops;
- }
- if (gopIndex === gops.length) {
- // all gops trimmed, skip appending all gops
- return null;
- }
- alignedGops = gops.slice(gopIndex);
- alignedGops.byteLength = byteLength;
- alignedGops.duration = duration;
- alignedGops.nalCount = nalCount;
- alignedGops.pts = alignedGops[0].pts;
- alignedGops.dts = alignedGops[0].dts;
- return alignedGops;
- };
- // trim gop list to the first gop found that has a matching pts with a gop in the list
- // of gopsToAlignWith starting from the END of the list
- this.alignGopsAtEnd_ = function(gops) {
- var alignIndex, gopIndex, align, gop, alignEndIndex, matchFound;
- alignIndex = gopsToAlignWith.length - 1;
- gopIndex = gops.length - 1;
- alignEndIndex = null;
- matchFound = false;
- while (alignIndex >= 0 && gopIndex >= 0) {
- align = gopsToAlignWith[alignIndex];
- gop = gops[gopIndex];
- if (align.pts === gop.pts) {
- matchFound = true;
- break;
- }
- if (align.pts > gop.pts) {
- alignIndex--;
- continue;
- }
- if (alignIndex === gopsToAlignWith.length - 1) {
- // gop.pts is greater than the last alignment candidate. If no match is found
- // by the end of this loop, we still want to append gops that come after this
- // point
- alignEndIndex = gopIndex;
- }
- gopIndex--;
- }
- if (!matchFound && alignEndIndex === null) {
- return null;
- }
- var trimIndex;
- if (matchFound) {
- trimIndex = gopIndex;
- } else {
- trimIndex = alignEndIndex;
- }
- if (trimIndex === 0) {
- return gops;
- }
- var alignedGops = gops.slice(trimIndex);
- var metadata = alignedGops.reduce(function(total, gop) {
- total.byteLength += gop.byteLength;
- total.duration += gop.duration;
- total.nalCount += gop.nalCount;
- return total;
- }, { byteLength: 0, duration: 0, nalCount: 0 });
- alignedGops.byteLength = metadata.byteLength;
- alignedGops.duration = metadata.duration;
- alignedGops.nalCount = metadata.nalCount;
- alignedGops.pts = alignedGops[0].pts;
- alignedGops.dts = alignedGops[0].dts;
- return alignedGops;
- };
- this.alignGopsWith = function(newGopsToAlignWith) {
- gopsToAlignWith = newGopsToAlignWith;
- };
- };
- VideoSegmentStream.prototype = new Stream();
- /**
- * Store information about the start and end of the track and the
- * duration for each frame/sample we process in order to calculate
- * the baseMediaDecodeTime
- */
- collectDtsInfo = function(track, data) {
- if (typeof data.pts === 'number') {
- if (track.timelineStartInfo.pts === undefined) {
- track.timelineStartInfo.pts = data.pts;
- }
- if (track.minSegmentPts === undefined) {
- track.minSegmentPts = data.pts;
- } else {
- track.minSegmentPts = Math.min(track.minSegmentPts, data.pts);
- }
- if (track.maxSegmentPts === undefined) {
- track.maxSegmentPts = data.pts;
- } else {
- track.maxSegmentPts = Math.max(track.maxSegmentPts, data.pts);
- }
- }
- if (typeof data.dts === 'number') {
- if (track.timelineStartInfo.dts === undefined) {
- track.timelineStartInfo.dts = data.dts;
- }
- if (track.minSegmentDts === undefined) {
- track.minSegmentDts = data.dts;
- } else {
- track.minSegmentDts = Math.min(track.minSegmentDts, data.dts);
- }
- if (track.maxSegmentDts === undefined) {
- track.maxSegmentDts = data.dts;
- } else {
- track.maxSegmentDts = Math.max(track.maxSegmentDts, data.dts);
- }
- }
- };
- /**
- * Clear values used to calculate the baseMediaDecodeTime between
- * tracks
- */
- clearDtsInfo = function(track) {
- delete track.minSegmentDts;
- delete track.maxSegmentDts;
- delete track.minSegmentPts;
- delete track.maxSegmentPts;
- };
- /**
- * Calculate the track's baseMediaDecodeTime based on the earliest
- * DTS the transmuxer has ever seen and the minimum DTS for the
- * current track
- */
- calculateTrackBaseMediaDecodeTime = function(track) {
- var
- baseMediaDecodeTime,
- scale,
- // Calculate the distance, in time, that this segment starts from the start
- // of the timeline (earliest time seen since the transmuxer initialized)
- timeSinceStartOfTimeline = track.minSegmentDts - track.timelineStartInfo.dts;
- // track.timelineStartInfo.baseMediaDecodeTime is the location, in time, where
- // we want the start of the first segment to be placed
- baseMediaDecodeTime = track.timelineStartInfo.baseMediaDecodeTime;
- // Add to that the distance this segment is from the very first
- baseMediaDecodeTime += timeSinceStartOfTimeline;
- // baseMediaDecodeTime must not become negative
- baseMediaDecodeTime = Math.max(0, baseMediaDecodeTime);
- if (track.type === 'audio') {
- // Audio has a different clock equal to the sampling_rate so we need to
- // scale the PTS values into the clock rate of the track
- scale = track.samplerate / ONE_SECOND_IN_TS;
- baseMediaDecodeTime *= scale;
- baseMediaDecodeTime = Math.floor(baseMediaDecodeTime);
- }
- return baseMediaDecodeTime;
- };
- /**
- * A Stream that can combine multiple streams (ie. audio & video)
- * into a single output segment for MSE. Also supports audio-only
- * and video-only streams.
- */
- CoalesceStream = function(options, metadataStream) {
- // Number of Tracks per output segment
- // If greater than 1, we combine multiple
- // tracks into a single segment
- this.numberOfTracks = 0;
- this.metadataStream = metadataStream;
- if (typeof options.remux !== 'undefined') {
- this.remuxTracks = !!options.remux;
- } else {
- this.remuxTracks = true;
- }
- this.pendingTracks = [];
- this.videoTrack = null;
- this.pendingBoxes = [];
- this.pendingCaptions = [];
- this.pendingMetadata = [];
- this.pendingBytes = 0;
- this.emittedTracks = 0;
- CoalesceStream.prototype.init.call(this);
- // Take output from multiple
- this.push = function(output) {
- // buffer incoming captions until the associated video segment
- // finishes
- if (output.text) {
- return this.pendingCaptions.push(output);
- }
- // buffer incoming id3 tags until the final flush
- if (output.frames) {
- return this.pendingMetadata.push(output);
- }
- // Add this track to the list of pending tracks and store
- // important information required for the construction of
- // the final segment
- this.pendingTracks.push(output.track);
- this.pendingBoxes.push(output.boxes);
- this.pendingBytes += output.boxes.byteLength;
- if (output.track.type === 'video') {
- this.videoTrack = output.track;
- }
- if (output.track.type === 'audio') {
- this.audioTrack = output.track;
- }
- };
- };
- CoalesceStream.prototype = new Stream();
- CoalesceStream.prototype.flush = function(flushSource) {
- var
- offset = 0,
- event = {
- captions: [],
- captionStreams: {},
- metadata: [],
- info: {}
- },
- caption,
- id3,
- initSegment,
- timelineStartPts = 0,
- i;
- if (this.pendingTracks.length < this.numberOfTracks) {
- if (flushSource !== 'VideoSegmentStream' &&
- flushSource !== 'AudioSegmentStream') {
- // Return because we haven't received a flush from a data-generating
- // portion of the segment (meaning that we have only recieved meta-data
- // or captions.)
- return;
- } else if (this.remuxTracks) {
- // Return until we have enough tracks from the pipeline to remux (if we
- // are remuxing audio and video into a single MP4)
- return;
- } else if (this.pendingTracks.length === 0) {
- // In the case where we receive a flush without any data having been
- // received we consider it an emitted track for the purposes of coalescing
- // `done` events.
- // We do this for the case where there is an audio and video track in the
- // segment but no audio data. (seen in several playlists with alternate
- // audio tracks and no audio present in the main TS segments.)
- this.emittedTracks++;
- if (this.emittedTracks >= this.numberOfTracks) {
- this.trigger('done');
- this.emittedTracks = 0;
- }
- return;
- }
- }
- if (this.videoTrack) {
- timelineStartPts = this.videoTrack.timelineStartInfo.pts;
- VIDEO_PROPERTIES.forEach(function(prop) {
- event.info[prop] = this.videoTrack[prop];
- }, this);
- } else if (this.audioTrack) {
- timelineStartPts = this.audioTrack.timelineStartInfo.pts;
- AUDIO_PROPERTIES.forEach(function(prop) {
- event.info[prop] = this.audioTrack[prop];
- }, this);
- }
- if (this.pendingTracks.length === 1) {
- event.type = this.pendingTracks[0].type;
- } else {
- event.type = 'combined';
- }
- this.emittedTracks += this.pendingTracks.length;
- initSegment = mp4.initSegment(this.pendingTracks);
- // Create a new typed array to hold the init segment
- event.initSegment = new Uint8Array(initSegment.byteLength);
- // Create an init segment containing a moov
- // and track definitions
- event.initSegment.set(initSegment);
- // Create a new typed array to hold the moof+mdats
- event.data = new Uint8Array(this.pendingBytes);
- // Append each moof+mdat (one per track) together
- for (i = 0; i < this.pendingBoxes.length; i++) {
- event.data.set(this.pendingBoxes[i], offset);
- offset += this.pendingBoxes[i].byteLength;
- }
- // Translate caption PTS times into second offsets into the
- // video timeline for the segment, and add track info
- for (i = 0; i < this.pendingCaptions.length; i++) {
- caption = this.pendingCaptions[i];
- caption.startTime = (caption.startPts - timelineStartPts);
- caption.startTime /= 90e3;
- caption.endTime = (caption.endPts - timelineStartPts);
- caption.endTime /= 90e3;
- event.captionStreams[caption.stream] = true;
- event.captions.push(caption);
- }
- // Translate ID3 frame PTS times into second offsets into the
- // video timeline for the segment
- for (i = 0; i < this.pendingMetadata.length; i++) {
- id3 = this.pendingMetadata[i];
- id3.cueTime = (id3.pts - timelineStartPts);
- id3.cueTime /= 90e3;
- event.metadata.push(id3);
- }
- // We add this to every single emitted segment even though we only need
- // it for the first
- event.metadata.dispatchType = this.metadataStream.dispatchType;
- // Reset stream state
- this.pendingTracks.length = 0;
- this.videoTrack = null;
- this.pendingBoxes.length = 0;
- this.pendingCaptions.length = 0;
- this.pendingBytes = 0;
- this.pendingMetadata.length = 0;
- // Emit the built segment
- this.trigger('data', event);
- // Only emit `done` if all tracks have been flushed and emitted
- if (this.emittedTracks >= this.numberOfTracks) {
- this.trigger('done');
- this.emittedTracks = 0;
- }
- };
- /**
- * A Stream that expects MP2T binary data as input and produces
- * corresponding media segments, suitable for use with Media Source
- * Extension (MSE) implementations that support the ISO BMFF byte
- * stream format, like Chrome.
- */
- Transmuxer = function(options) {
- var
- self = this,
- hasFlushed = true,
- videoTrack,
- audioTrack;
- Transmuxer.prototype.init.call(this);
- options = options || {};
- this.baseMediaDecodeTime = options.baseMediaDecodeTime || 0;
- this.transmuxPipeline_ = {};
- this.setupAacPipeline = function() {
- var pipeline = {};
- this.transmuxPipeline_ = pipeline;
- pipeline.type = 'aac';
- pipeline.metadataStream = new m2ts.MetadataStream();
- // set up the parsing pipeline
- pipeline.aacStream = new AacStream();
- pipeline.audioTimestampRolloverStream = new m2ts.TimestampRolloverStream('audio');
- pipeline.timedMetadataTimestampRolloverStream = new m2ts.TimestampRolloverStream('timed-metadata');
- pipeline.adtsStream = new AdtsStream();
- pipeline.coalesceStream = new CoalesceStream(options, pipeline.metadataStream);
- pipeline.headOfPipeline = pipeline.aacStream;
- pipeline.aacStream
- .pipe(pipeline.audioTimestampRolloverStream)
- .pipe(pipeline.adtsStream);
- pipeline.aacStream
- .pipe(pipeline.timedMetadataTimestampRolloverStream)
- .pipe(pipeline.metadataStream)
- .pipe(pipeline.coalesceStream);
- pipeline.metadataStream.on('timestamp', function(frame) {
- pipeline.aacStream.setTimestamp(frame.timeStamp);
- });
- pipeline.aacStream.on('data', function(data) {
- if (data.type === 'timed-metadata' && !pipeline.audioSegmentStream) {
- audioTrack = audioTrack || {
- timelineStartInfo: {
- baseMediaDecodeTime: self.baseMediaDecodeTime
- },
- codec: 'adts',
- type: 'audio'
- };
- // hook up the audio segment stream to the first track with aac data
- pipeline.coalesceStream.numberOfTracks++;
- pipeline.audioSegmentStream = new AudioSegmentStream(audioTrack);
- // Set up the final part of the audio pipeline
- pipeline.adtsStream
- .pipe(pipeline.audioSegmentStream)
- .pipe(pipeline.coalesceStream);
- }
- });
- // Re-emit any data coming from the coalesce stream to the outside world
- pipeline.coalesceStream.on('data', this.trigger.bind(this, 'data'));
- // Let the consumer know we have finished flushing the entire pipeline
- pipeline.coalesceStream.on('done', this.trigger.bind(this, 'done'));
- };
- this.setupTsPipeline = function() {
- var pipeline = {};
- this.transmuxPipeline_ = pipeline;
- pipeline.type = 'ts';
- pipeline.metadataStream = new m2ts.MetadataStream();
- // set up the parsing pipeline
- pipeline.packetStream = new m2ts.TransportPacketStream();
- pipeline.parseStream = new m2ts.TransportParseStream();
- pipeline.elementaryStream = new m2ts.ElementaryStream();
- pipeline.videoTimestampRolloverStream = new m2ts.TimestampRolloverStream('video');
- pipeline.audioTimestampRolloverStream = new m2ts.TimestampRolloverStream('audio');
- pipeline.timedMetadataTimestampRolloverStream = new m2ts.TimestampRolloverStream('timed-metadata');
- pipeline.adtsStream = new AdtsStream();
- pipeline.h264Stream = new H264Stream();
- pipeline.captionStream = new m2ts.CaptionStream();
- pipeline.coalesceStream = new CoalesceStream(options, pipeline.metadataStream);
- pipeline.headOfPipeline = pipeline.packetStream;
- // disassemble MPEG2-TS packets into elementary streams
- pipeline.packetStream
- .pipe(pipeline.parseStream)
- .pipe(pipeline.elementaryStream);
- // !!THIS ORDER IS IMPORTANT!!
- // demux the streams
- pipeline.elementaryStream
- .pipe(pipeline.videoTimestampRolloverStream)
- .pipe(pipeline.h264Stream);
- pipeline.elementaryStream
- .pipe(pipeline.audioTimestampRolloverStream)
- .pipe(pipeline.adtsStream);
- pipeline.elementaryStream
- .pipe(pipeline.timedMetadataTimestampRolloverStream)
- .pipe(pipeline.metadataStream)
- .pipe(pipeline.coalesceStream);
- // Hook up CEA-608/708 caption stream
- pipeline.h264Stream.pipe(pipeline.captionStream)
- .pipe(pipeline.coalesceStream);
- pipeline.elementaryStream.on('data', function(data) {
- var i;
- if (data.type === 'metadata') {
- i = data.tracks.length;
- // scan the tracks listed in the metadata
- while (i--) {
- if (!videoTrack && data.tracks[i].type === 'video') {
- videoTrack = data.tracks[i];
- videoTrack.timelineStartInfo.baseMediaDecodeTime = self.baseMediaDecodeTime;
- } else if (!audioTrack && data.tracks[i].type === 'audio') {
- audioTrack = data.tracks[i];
- audioTrack.timelineStartInfo.baseMediaDecodeTime = self.baseMediaDecodeTime;
- }
- }
- // hook up the video segment stream to the first track with h264 data
- if (videoTrack && !pipeline.videoSegmentStream) {
- pipeline.coalesceStream.numberOfTracks++;
- pipeline.videoSegmentStream = new VideoSegmentStream(videoTrack, options);
- pipeline.videoSegmentStream.on('timelineStartInfo', function(timelineStartInfo) {
- // When video emits timelineStartInfo data after a flush, we forward that
- // info to the AudioSegmentStream, if it exists, because video timeline
- // data takes precedence.
- if (audioTrack) {
- audioTrack.timelineStartInfo = timelineStartInfo;
- // On the first segment we trim AAC frames that exist before the
- // very earliest DTS we have seen in video because Chrome will
- // interpret any video track with a baseMediaDecodeTime that is
- // non-zero as a gap.
- pipeline.audioSegmentStream.setEarliestDts(timelineStartInfo.dts);
- }
- });
- pipeline.videoSegmentStream.on('processedGopsInfo',
- self.trigger.bind(self, 'gopInfo'));
- pipeline.videoSegmentStream.on('baseMediaDecodeTime', function(baseMediaDecodeTime) {
- if (audioTrack) {
- pipeline.audioSegmentStream.setVideoBaseMediaDecodeTime(baseMediaDecodeTime);
- }
- });
- // Set up the final part of the video pipeline
- pipeline.h264Stream
- .pipe(pipeline.videoSegmentStream)
- .pipe(pipeline.coalesceStream);
- }
- if (audioTrack && !pipeline.audioSegmentStream) {
- // hook up the audio segment stream to the first track with aac data
- pipeline.coalesceStream.numberOfTracks++;
- pipeline.audioSegmentStream = new AudioSegmentStream(audioTrack);
- // Set up the final part of the audio pipeline
- pipeline.adtsStream
- .pipe(pipeline.audioSegmentStream)
- .pipe(pipeline.coalesceStream);
- }
- }
- });
- // Re-emit any data coming from the coalesce stream to the outside world
- pipeline.coalesceStream.on('data', this.trigger.bind(this, 'data'));
- // Let the consumer know we have finished flushing the entire pipeline
- pipeline.coalesceStream.on('done', this.trigger.bind(this, 'done'));
- };
- // hook up the segment streams once track metadata is delivered
- this.setBaseMediaDecodeTime = function(baseMediaDecodeTime) {
- var pipeline = this.transmuxPipeline_;
- this.baseMediaDecodeTime = baseMediaDecodeTime;
- if (audioTrack) {
- audioTrack.timelineStartInfo.dts = undefined;
- audioTrack.timelineStartInfo.pts = undefined;
- clearDtsInfo(audioTrack);
- audioTrack.timelineStartInfo.baseMediaDecodeTime = baseMediaDecodeTime;
- if (pipeline.audioTimestampRolloverStream) {
- pipeline.audioTimestampRolloverStream.discontinuity();
- }
- }
- if (videoTrack) {
- if (pipeline.videoSegmentStream) {
- pipeline.videoSegmentStream.gopCache_ = [];
- pipeline.videoTimestampRolloverStream.discontinuity();
- }
- videoTrack.timelineStartInfo.dts = undefined;
- videoTrack.timelineStartInfo.pts = undefined;
- clearDtsInfo(videoTrack);
- pipeline.captionStream.reset();
- videoTrack.timelineStartInfo.baseMediaDecodeTime = baseMediaDecodeTime;
- }
- if (pipeline.timedMetadataTimestampRolloverStream) {
- pipeline.timedMetadataTimestampRolloverStream.discontinuity();
- }
- };
- this.setAudioAppendStart = function(timestamp) {
- if (audioTrack) {
- this.transmuxPipeline_.audioSegmentStream.setAudioAppendStart(timestamp);
- }
- };
- this.alignGopsWith = function(gopsToAlignWith) {
- if (videoTrack && this.transmuxPipeline_.videoSegmentStream) {
- this.transmuxPipeline_.videoSegmentStream.alignGopsWith(gopsToAlignWith);
- }
- };
- // feed incoming data to the front of the parsing pipeline
- this.push = function(data) {
- if (hasFlushed) {
- var isAac = isLikelyAacData(data);
- if (isAac && this.transmuxPipeline_.type !== 'aac') {
- this.setupAacPipeline();
- } else if (!isAac && this.transmuxPipeline_.type !== 'ts') {
- this.setupTsPipeline();
- }
- hasFlushed = false;
- }
- this.transmuxPipeline_.headOfPipeline.push(data);
- };
- // flush any buffered data
- this.flush = function() {
- hasFlushed = true;
- // Start at the top of the pipeline and flush all pending work
- this.transmuxPipeline_.headOfPipeline.flush();
- };
- // Caption data has to be reset when seeking outside buffered range
- this.resetCaptions = function() {
- if (this.transmuxPipeline_.captionStream) {
- this.transmuxPipeline_.captionStream.reset();
- }
- };
- };
- Transmuxer.prototype = new Stream();
- module.exports = {
- Transmuxer: Transmuxer,
- VideoSegmentStream: VideoSegmentStream,
- AudioSegmentStream: AudioSegmentStream,
- AUDIO_PROPERTIES: AUDIO_PROPERTIES,
- VIDEO_PROPERTIES: VIDEO_PROPERTIES
- };
- },{"../aac":4,"../codecs/adts.js":6,"../codecs/h264":7,"../data/silence":9,"../m2ts/m2ts.js":19,"../utils/clock":31,"../utils/stream.js":33,"./mp4-generator.js":25}],28:[function(require,module,exports){
- 'use strict';
- var
- tagTypes = {
- 0x08: 'audio',
- 0x09: 'video',
- 0x12: 'metadata'
- },
- hex = function(val) {
- return '0x' + ('00' + val.toString(16)).slice(-2).toUpperCase();
- },
- hexStringList = function(data) {
- var arr = [], i;
- while (data.byteLength > 0) {
- i = 0;
- arr.push(hex(data[i++]));
- data = data.subarray(i);
- }
- return arr.join(' ');
- },
- parseAVCTag = function(tag, obj) {
- var
- avcPacketTypes = [
- 'AVC Sequence Header',
- 'AVC NALU',
- 'AVC End-of-Sequence'
- ],
- compositionTime = (tag[1] & parseInt('01111111', 2) << 16) | (tag[2] << 8) | tag[3];
- obj = obj || {};
- obj.avcPacketType = avcPacketTypes[tag[0]];
- obj.CompositionTime = (tag[1] & parseInt('10000000', 2)) ? -compositionTime : compositionTime;
- if (tag[0] === 1) {
- obj.nalUnitTypeRaw = hexStringList(tag.subarray(4, 100));
- } else {
- obj.data = hexStringList(tag.subarray(4));
- }
- return obj;
- },
- parseVideoTag = function(tag, obj) {
- var
- frameTypes = [
- 'Unknown',
- 'Keyframe (for AVC, a seekable frame)',
- 'Inter frame (for AVC, a nonseekable frame)',
- 'Disposable inter frame (H.263 only)',
- 'Generated keyframe (reserved for server use only)',
- 'Video info/command frame'
- ],
- codecID = tag[0] & parseInt('00001111', 2);
- obj = obj || {};
- obj.frameType = frameTypes[(tag[0] & parseInt('11110000', 2)) >>> 4];
- obj.codecID = codecID;
- if (codecID === 7) {
- return parseAVCTag(tag.subarray(1), obj);
- }
- return obj;
- },
- parseAACTag = function(tag, obj) {
- var packetTypes = [
- 'AAC Sequence Header',
- 'AAC Raw'
- ];
- obj = obj || {};
- obj.aacPacketType = packetTypes[tag[0]];
- obj.data = hexStringList(tag.subarray(1));
- return obj;
- },
- parseAudioTag = function(tag, obj) {
- var
- formatTable = [
- 'Linear PCM, platform endian',
- 'ADPCM',
- 'MP3',
- 'Linear PCM, little endian',
- 'Nellymoser 16-kHz mono',
- 'Nellymoser 8-kHz mono',
- 'Nellymoser',
- 'G.711 A-law logarithmic PCM',
- 'G.711 mu-law logarithmic PCM',
- 'reserved',
- 'AAC',
- 'Speex',
- 'MP3 8-Khz',
- 'Device-specific sound'
- ],
- samplingRateTable = [
- '5.5-kHz',
- '11-kHz',
- '22-kHz',
- '44-kHz'
- ],
- soundFormat = (tag[0] & parseInt('11110000', 2)) >>> 4;
- obj = obj || {};
- obj.soundFormat = formatTable[soundFormat];
- obj.soundRate = samplingRateTable[(tag[0] & parseInt('00001100', 2)) >>> 2];
- obj.soundSize = ((tag[0] & parseInt('00000010', 2)) >>> 1) ? '16-bit' : '8-bit';
- obj.soundType = (tag[0] & parseInt('00000001', 2)) ? 'Stereo' : 'Mono';
- if (soundFormat === 10) {
- return parseAACTag(tag.subarray(1), obj);
- }
- return obj;
- },
- parseGenericTag = function(tag) {
- return {
- tagType: tagTypes[tag[0]],
- dataSize: (tag[1] << 16) | (tag[2] << 8) | tag[3],
- timestamp: (tag[7] << 24) | (tag[4] << 16) | (tag[5] << 8) | tag[6],
- streamID: (tag[8] << 16) | (tag[9] << 8) | tag[10]
- };
- },
- inspectFlvTag = function(tag) {
- var header = parseGenericTag(tag);
- switch (tag[0]) {
- case 0x08:
- parseAudioTag(tag.subarray(11), header);
- break;
- case 0x09:
- parseVideoTag(tag.subarray(11), header);
- break;
- case 0x12:
- }
- return header;
- },
- inspectFlv = function(bytes) {
- var i = 9, // header
- dataSize,
- parsedResults = [],
- tag;
- // traverse the tags
- i += 4; // skip previous tag size
- while (i < bytes.byteLength) {
- dataSize = bytes[i + 1] << 16;
- dataSize |= bytes[i + 2] << 8;
- dataSize |= bytes[i + 3];
- dataSize += 11;
- tag = bytes.subarray(i, i + dataSize);
- parsedResults.push(inspectFlvTag(tag));
- i += dataSize + 4;
- }
- return parsedResults;
- },
- textifyFlv = function(flvTagArray) {
- return JSON.stringify(flvTagArray, null, 2);
- };
- module.exports = {
- inspectTag: inspectFlvTag,
- inspect: inspectFlv,
- textify: textifyFlv
- };
- },{}],29:[function(require,module,exports){
- (function (global){
- /**
- * mux.js
- *
- * Copyright (c) 2015 Brightcove
- * All rights reserved.
- *
- * Parse the internal MP4 structure into an equivalent javascript
- * object.
- */
- 'use strict';
- var
- inspectMp4,
- textifyMp4,
- parseType = require('../mp4/probe').parseType,
- parseMp4Date = function(seconds) {
- return new Date(seconds * 1000 - 2082844800000);
- },
- parseSampleFlags = function(flags) {
- return {
- isLeading: (flags[0] & 0x0c) >>> 2,
- dependsOn: flags[0] & 0x03,
- isDependedOn: (flags[1] & 0xc0) >>> 6,
- hasRedundancy: (flags[1] & 0x30) >>> 4,
- paddingValue: (flags[1] & 0x0e) >>> 1,
- isNonSyncSample: flags[1] & 0x01,
- degradationPriority: (flags[2] << 8) | flags[3]
- };
- },
- nalParse = function(avcStream) {
- var
- avcView = new DataView(avcStream.buffer, avcStream.byteOffset, avcStream.byteLength),
- result = [],
- i,
- length;
- for (i = 0; i + 4 < avcStream.length; i += length) {
- length = avcView.getUint32(i);
- i += 4;
- // bail if this doesn't appear to be an H264 stream
- if (length <= 0) {
- result.push('<span style=\'color:red;\'>MALFORMED DATA</span>');
- continue;
- }
- switch (avcStream[i] & 0x1F) {
- case 0x01:
- result.push('slice_layer_without_partitioning_rbsp');
- break;
- case 0x05:
- result.push('slice_layer_without_partitioning_rbsp_idr');
- break;
- case 0x06:
- result.push('sei_rbsp');
- break;
- case 0x07:
- result.push('seq_parameter_set_rbsp');
- break;
- case 0x08:
- result.push('pic_parameter_set_rbsp');
- break;
- case 0x09:
- result.push('access_unit_delimiter_rbsp');
- break;
- default:
- result.push('UNKNOWN NAL - ' + avcStream[i] & 0x1F);
- break;
- }
- }
- return result;
- },
- // registry of handlers for individual mp4 box types
- parse = {
- // codingname, not a first-class box type. stsd entries share the
- // same format as real boxes so the parsing infrastructure can be
- // shared
- avc1: function(data) {
- var view = new DataView(data.buffer, data.byteOffset, data.byteLength);
- return {
- dataReferenceIndex: view.getUint16(6),
- width: view.getUint16(24),
- height: view.getUint16(26),
- horizresolution: view.getUint16(28) + (view.getUint16(30) / 16),
- vertresolution: view.getUint16(32) + (view.getUint16(34) / 16),
- frameCount: view.getUint16(40),
- depth: view.getUint16(74),
- config: inspectMp4(data.subarray(78, data.byteLength))
- };
- },
- avcC: function(data) {
- var
- view = new DataView(data.buffer, data.byteOffset, data.byteLength),
- result = {
- configurationVersion: data[0],
- avcProfileIndication: data[1],
- profileCompatibility: data[2],
- avcLevelIndication: data[3],
- lengthSizeMinusOne: data[4] & 0x03,
- sps: [],
- pps: []
- },
- numOfSequenceParameterSets = data[5] & 0x1f,
- numOfPictureParameterSets,
- nalSize,
- offset,
- i;
- // iterate past any SPSs
- offset = 6;
- for (i = 0; i < numOfSequenceParameterSets; i++) {
- nalSize = view.getUint16(offset);
- offset += 2;
- result.sps.push(new Uint8Array(data.subarray(offset, offset + nalSize)));
- offset += nalSize;
- }
- // iterate past any PPSs
- numOfPictureParameterSets = data[offset];
- offset++;
- for (i = 0; i < numOfPictureParameterSets; i++) {
- nalSize = view.getUint16(offset);
- offset += 2;
- result.pps.push(new Uint8Array(data.subarray(offset, offset + nalSize)));
- offset += nalSize;
- }
- return result;
- },
- btrt: function(data) {
- var view = new DataView(data.buffer, data.byteOffset, data.byteLength);
- return {
- bufferSizeDB: view.getUint32(0),
- maxBitrate: view.getUint32(4),
- avgBitrate: view.getUint32(8)
- };
- },
- esds: function(data) {
- return {
- version: data[0],
- flags: new Uint8Array(data.subarray(1, 4)),
- esId: (data[6] << 8) | data[7],
- streamPriority: data[8] & 0x1f,
- decoderConfig: {
- objectProfileIndication: data[11],
- streamType: (data[12] >>> 2) & 0x3f,
- bufferSize: (data[13] << 16) | (data[14] << 8) | data[15],
- maxBitrate: (data[16] << 24) |
- (data[17] << 16) |
- (data[18] << 8) |
- data[19],
- avgBitrate: (data[20] << 24) |
- (data[21] << 16) |
- (data[22] << 8) |
- data[23],
- decoderConfigDescriptor: {
- tag: data[24],
- length: data[25],
- audioObjectType: (data[26] >>> 3) & 0x1f,
- samplingFrequencyIndex: ((data[26] & 0x07) << 1) |
- ((data[27] >>> 7) & 0x01),
- channelConfiguration: (data[27] >>> 3) & 0x0f
- }
- }
- };
- },
- ftyp: function(data) {
- var
- view = new DataView(data.buffer, data.byteOffset, data.byteLength),
- result = {
- majorBrand: parseType(data.subarray(0, 4)),
- minorVersion: view.getUint32(4),
- compatibleBrands: []
- },
- i = 8;
- while (i < data.byteLength) {
- result.compatibleBrands.push(parseType(data.subarray(i, i + 4)));
- i += 4;
- }
- return result;
- },
- dinf: function(data) {
- return {
- boxes: inspectMp4(data)
- };
- },
- dref: function(data) {
- return {
- version: data[0],
- flags: new Uint8Array(data.subarray(1, 4)),
- dataReferences: inspectMp4(data.subarray(8))
- };
- },
- hdlr: function(data) {
- var
- view = new DataView(data.buffer, data.byteOffset, data.byteLength),
- result = {
- version: view.getUint8(0),
- flags: new Uint8Array(data.subarray(1, 4)),
- handlerType: parseType(data.subarray(8, 12)),
- name: ''
- },
- i = 8;
- // parse out the name field
- for (i = 24; i < data.byteLength; i++) {
- if (data[i] === 0x00) {
- // the name field is null-terminated
- i++;
- break;
- }
- result.name += String.fromCharCode(data[i]);
- }
- // decode UTF-8 to javascript's internal representation
- // see http://ecmanaut.blogspot.com/2006/07/encoding-decoding-utf8-in-javascript.html
- result.name = decodeURIComponent(global.escape(result.name));
- return result;
- },
- mdat: function(data) {
- return {
- byteLength: data.byteLength,
- nals: nalParse(data)
- };
- },
- mdhd: function(data) {
- var
- view = new DataView(data.buffer, data.byteOffset, data.byteLength),
- i = 4,
- language,
- result = {
- version: view.getUint8(0),
- flags: new Uint8Array(data.subarray(1, 4)),
- language: ''
- };
- if (result.version === 1) {
- i += 4;
- result.creationTime = parseMp4Date(view.getUint32(i)); // truncating top 4 bytes
- i += 8;
- result.modificationTime = parseMp4Date(view.getUint32(i)); // truncating top 4 bytes
- i += 4;
- result.timescale = view.getUint32(i);
- i += 8;
- result.duration = view.getUint32(i); // truncating top 4 bytes
- } else {
- result.creationTime = parseMp4Date(view.getUint32(i));
- i += 4;
- result.modificationTime = parseMp4Date(view.getUint32(i));
- i += 4;
- result.timescale = view.getUint32(i);
- i += 4;
- result.duration = view.getUint32(i);
- }
- i += 4;
- // language is stored as an ISO-639-2/T code in an array of three 5-bit fields
- // each field is the packed difference between its ASCII value and 0x60
- language = view.getUint16(i);
- result.language += String.fromCharCode((language >> 10) + 0x60);
- result.language += String.fromCharCode(((language & 0x03c0) >> 5) + 0x60);
- result.language += String.fromCharCode((language & 0x1f) + 0x60);
- return result;
- },
- mdia: function(data) {
- return {
- boxes: inspectMp4(data)
- };
- },
- mfhd: function(data) {
- return {
- version: data[0],
- flags: new Uint8Array(data.subarray(1, 4)),
- sequenceNumber: (data[4] << 24) |
- (data[5] << 16) |
- (data[6] << 8) |
- (data[7])
- };
- },
- minf: function(data) {
- return {
- boxes: inspectMp4(data)
- };
- },
- // codingname, not a first-class box type. stsd entries share the
- // same format as real boxes so the parsing infrastructure can be
- // shared
- mp4a: function(data) {
- var
- view = new DataView(data.buffer, data.byteOffset, data.byteLength),
- result = {
- // 6 bytes reserved
- dataReferenceIndex: view.getUint16(6),
- // 4 + 4 bytes reserved
- channelcount: view.getUint16(16),
- samplesize: view.getUint16(18),
- // 2 bytes pre_defined
- // 2 bytes reserved
- samplerate: view.getUint16(24) + (view.getUint16(26) / 65536)
- };
- // if there are more bytes to process, assume this is an ISO/IEC
- // 14496-14 MP4AudioSampleEntry and parse the ESDBox
- if (data.byteLength > 28) {
- result.streamDescriptor = inspectMp4(data.subarray(28))[0];
- }
- return result;
- },
- moof: function(data) {
- return {
- boxes: inspectMp4(data)
- };
- },
- moov: function(data) {
- return {
- boxes: inspectMp4(data)
- };
- },
- mvex: function(data) {
- return {
- boxes: inspectMp4(data)
- };
- },
- mvhd: function(data) {
- var
- view = new DataView(data.buffer, data.byteOffset, data.byteLength),
- i = 4,
- result = {
- version: view.getUint8(0),
- flags: new Uint8Array(data.subarray(1, 4))
- };
- if (result.version === 1) {
- i += 4;
- result.creationTime = parseMp4Date(view.getUint32(i)); // truncating top 4 bytes
- i += 8;
- result.modificationTime = parseMp4Date(view.getUint32(i)); // truncating top 4 bytes
- i += 4;
- result.timescale = view.getUint32(i);
- i += 8;
- result.duration = view.getUint32(i); // truncating top 4 bytes
- } else {
- result.creationTime = parseMp4Date(view.getUint32(i));
- i += 4;
- result.modificationTime = parseMp4Date(view.getUint32(i));
- i += 4;
- result.timescale = view.getUint32(i);
- i += 4;
- result.duration = view.getUint32(i);
- }
- i += 4;
- // convert fixed-point, base 16 back to a number
- result.rate = view.getUint16(i) + (view.getUint16(i + 2) / 16);
- i += 4;
- result.volume = view.getUint8(i) + (view.getUint8(i + 1) / 8);
- i += 2;
- i += 2;
- i += 2 * 4;
- result.matrix = new Uint32Array(data.subarray(i, i + (9 * 4)));
- i += 9 * 4;
- i += 6 * 4;
- result.nextTrackId = view.getUint32(i);
- return result;
- },
- pdin: function(data) {
- var view = new DataView(data.buffer, data.byteOffset, data.byteLength);
- return {
- version: view.getUint8(0),
- flags: new Uint8Array(data.subarray(1, 4)),
- rate: view.getUint32(4),
- initialDelay: view.getUint32(8)
- };
- },
- sdtp: function(data) {
- var
- result = {
- version: data[0],
- flags: new Uint8Array(data.subarray(1, 4)),
- samples: []
- }, i;
- for (i = 4; i < data.byteLength; i++) {
- result.samples.push({
- dependsOn: (data[i] & 0x30) >> 4,
- isDependedOn: (data[i] & 0x0c) >> 2,
- hasRedundancy: data[i] & 0x03
- });
- }
- return result;
- },
- sidx: function(data) {
- var view = new DataView(data.buffer, data.byteOffset, data.byteLength),
- result = {
- version: data[0],
- flags: new Uint8Array(data.subarray(1, 4)),
- references: [],
- referenceId: view.getUint32(4),
- timescale: view.getUint32(8),
- earliestPresentationTime: view.getUint32(12),
- firstOffset: view.getUint32(16)
- },
- referenceCount = view.getUint16(22),
- i;
- for (i = 24; referenceCount; i += 12, referenceCount--) {
- result.references.push({
- referenceType: (data[i] & 0x80) >>> 7,
- referencedSize: view.getUint32(i) & 0x7FFFFFFF,
- subsegmentDuration: view.getUint32(i + 4),
- startsWithSap: !!(data[i + 8] & 0x80),
- sapType: (data[i + 8] & 0x70) >>> 4,
- sapDeltaTime: view.getUint32(i + 8) & 0x0FFFFFFF
- });
- }
- return result;
- },
- smhd: function(data) {
- return {
- version: data[0],
- flags: new Uint8Array(data.subarray(1, 4)),
- balance: data[4] + (data[5] / 256)
- };
- },
- stbl: function(data) {
- return {
- boxes: inspectMp4(data)
- };
- },
- stco: function(data) {
- var
- view = new DataView(data.buffer, data.byteOffset, data.byteLength),
- result = {
- version: data[0],
- flags: new Uint8Array(data.subarray(1, 4)),
- chunkOffsets: []
- },
- entryCount = view.getUint32(4),
- i;
- for (i = 8; entryCount; i += 4, entryCount--) {
- result.chunkOffsets.push(view.getUint32(i));
- }
- return result;
- },
- stsc: function(data) {
- var
- view = new DataView(data.buffer, data.byteOffset, data.byteLength),
- entryCount = view.getUint32(4),
- result = {
- version: data[0],
- flags: new Uint8Array(data.subarray(1, 4)),
- sampleToChunks: []
- },
- i;
- for (i = 8; entryCount; i += 12, entryCount--) {
- result.sampleToChunks.push({
- firstChunk: view.getUint32(i),
- samplesPerChunk: view.getUint32(i + 4),
- sampleDescriptionIndex: view.getUint32(i + 8)
- });
- }
- return result;
- },
- stsd: function(data) {
- return {
- version: data[0],
- flags: new Uint8Array(data.subarray(1, 4)),
- sampleDescriptions: inspectMp4(data.subarray(8))
- };
- },
- stsz: function(data) {
- var
- view = new DataView(data.buffer, data.byteOffset, data.byteLength),
- result = {
- version: data[0],
- flags: new Uint8Array(data.subarray(1, 4)),
- sampleSize: view.getUint32(4),
- entries: []
- },
- i;
- for (i = 12; i < data.byteLength; i += 4) {
- result.entries.push(view.getUint32(i));
- }
- return result;
- },
- stts: function(data) {
- var
- view = new DataView(data.buffer, data.byteOffset, data.byteLength),
- result = {
- version: data[0],
- flags: new Uint8Array(data.subarray(1, 4)),
- timeToSamples: []
- },
- entryCount = view.getUint32(4),
- i;
- for (i = 8; entryCount; i += 8, entryCount--) {
- result.timeToSamples.push({
- sampleCount: view.getUint32(i),
- sampleDelta: view.getUint32(i + 4)
- });
- }
- return result;
- },
- styp: function(data) {
- return parse.ftyp(data);
- },
- tfdt: function(data) {
- var result = {
- version: data[0],
- flags: new Uint8Array(data.subarray(1, 4)),
- baseMediaDecodeTime: data[4] << 24 | data[5] << 16 | data[6] << 8 | data[7]
- };
- if (result.version === 1) {
- result.baseMediaDecodeTime *= Math.pow(2, 32);
- result.baseMediaDecodeTime += data[8] << 24 | data[9] << 16 | data[10] << 8 | data[11];
- }
- return result;
- },
- tfhd: function(data) {
- var
- view = new DataView(data.buffer, data.byteOffset, data.byteLength),
- result = {
- version: data[0],
- flags: new Uint8Array(data.subarray(1, 4)),
- trackId: view.getUint32(4)
- },
- baseDataOffsetPresent = result.flags[2] & 0x01,
- sampleDescriptionIndexPresent = result.flags[2] & 0x02,
- defaultSampleDurationPresent = result.flags[2] & 0x08,
- defaultSampleSizePresent = result.flags[2] & 0x10,
- defaultSampleFlagsPresent = result.flags[2] & 0x20,
- i;
- i = 8;
- if (baseDataOffsetPresent) {
- i += 4; // truncate top 4 bytes
- result.baseDataOffset = view.getUint32(12);
- i += 4;
- }
- if (sampleDescriptionIndexPresent) {
- result.sampleDescriptionIndex = view.getUint32(i);
- i += 4;
- }
- if (defaultSampleDurationPresent) {
- result.defaultSampleDuration = view.getUint32(i);
- i += 4;
- }
- if (defaultSampleSizePresent) {
- result.defaultSampleSize = view.getUint32(i);
- i += 4;
- }
- if (defaultSampleFlagsPresent) {
- result.defaultSampleFlags = view.getUint32(i);
- }
- return result;
- },
- tkhd: function(data) {
- var
- view = new DataView(data.buffer, data.byteOffset, data.byteLength),
- i = 4,
- result = {
- version: view.getUint8(0),
- flags: new Uint8Array(data.subarray(1, 4))
- };
- if (result.version === 1) {
- i += 4;
- result.creationTime = parseMp4Date(view.getUint32(i)); // truncating top 4 bytes
- i += 8;
- result.modificationTime = parseMp4Date(view.getUint32(i)); // truncating top 4 bytes
- i += 4;
- result.trackId = view.getUint32(i);
- i += 4;
- i += 8;
- result.duration = view.getUint32(i); // truncating top 4 bytes
- } else {
- result.creationTime = parseMp4Date(view.getUint32(i));
- i += 4;
- result.modificationTime = parseMp4Date(view.getUint32(i));
- i += 4;
- result.trackId = view.getUint32(i);
- i += 4;
- i += 4;
- result.duration = view.getUint32(i);
- }
- i += 4;
- i += 2 * 4;
- result.layer = view.getUint16(i);
- i += 2;
- result.alternateGroup = view.getUint16(i);
- i += 2;
- // convert fixed-point, base 16 back to a number
- result.volume = view.getUint8(i) + (view.getUint8(i + 1) / 8);
- i += 2;
- i += 2;
- result.matrix = new Uint32Array(data.subarray(i, i + (9 * 4)));
- i += 9 * 4;
- result.width = view.getUint16(i) + (view.getUint16(i + 2) / 16);
- i += 4;
- result.height = view.getUint16(i) + (view.getUint16(i + 2) / 16);
- return result;
- },
- traf: function(data) {
- return {
- boxes: inspectMp4(data)
- };
- },
- trak: function(data) {
- return {
- boxes: inspectMp4(data)
- };
- },
- trex: function(data) {
- var view = new DataView(data.buffer, data.byteOffset, data.byteLength);
- return {
- version: data[0],
- flags: new Uint8Array(data.subarray(1, 4)),
- trackId: view.getUint32(4),
- defaultSampleDescriptionIndex: view.getUint32(8),
- defaultSampleDuration: view.getUint32(12),
- defaultSampleSize: view.getUint32(16),
- sampleDependsOn: data[20] & 0x03,
- sampleIsDependedOn: (data[21] & 0xc0) >> 6,
- sampleHasRedundancy: (data[21] & 0x30) >> 4,
- samplePaddingValue: (data[21] & 0x0e) >> 1,
- sampleIsDifferenceSample: !!(data[21] & 0x01),
- sampleDegradationPriority: view.getUint16(22)
- };
- },
- trun: function(data) {
- var
- result = {
- version: data[0],
- flags: new Uint8Array(data.subarray(1, 4)),
- samples: []
- },
- view = new DataView(data.buffer, data.byteOffset, data.byteLength),
- dataOffsetPresent = result.flags[2] & 0x01,
- firstSampleFlagsPresent = result.flags[2] & 0x04,
- sampleDurationPresent = result.flags[1] & 0x01,
- sampleSizePresent = result.flags[1] & 0x02,
- sampleFlagsPresent = result.flags[1] & 0x04,
- sampleCompositionTimeOffsetPresent = result.flags[1] & 0x08,
- sampleCount = view.getUint32(4),
- offset = 8,
- sample;
- if (dataOffsetPresent) {
- result.dataOffset = view.getUint32(offset);
- offset += 4;
- }
- if (firstSampleFlagsPresent && sampleCount) {
- sample = {
- flags: parseSampleFlags(data.subarray(offset, offset + 4))
- };
- offset += 4;
- if (sampleDurationPresent) {
- sample.duration = view.getUint32(offset);
- offset += 4;
- }
- if (sampleSizePresent) {
- sample.size = view.getUint32(offset);
- offset += 4;
- }
- if (sampleCompositionTimeOffsetPresent) {
- sample.compositionTimeOffset = view.getUint32(offset);
- offset += 4;
- }
- result.samples.push(sample);
- sampleCount--;
- }
- while (sampleCount--) {
- sample = {};
- if (sampleDurationPresent) {
- sample.duration = view.getUint32(offset);
- offset += 4;
- }
- if (sampleSizePresent) {
- sample.size = view.getUint32(offset);
- offset += 4;
- }
- if (sampleFlagsPresent) {
- sample.flags = parseSampleFlags(data.subarray(offset, offset + 4));
- offset += 4;
- }
- if (sampleCompositionTimeOffsetPresent) {
- sample.compositionTimeOffset = view.getUint32(offset);
- offset += 4;
- }
- result.samples.push(sample);
- }
- return result;
- },
- 'url ': function(data) {
- return {
- version: data[0],
- flags: new Uint8Array(data.subarray(1, 4))
- };
- },
- vmhd: function(data) {
- var view = new DataView(data.buffer, data.byteOffset, data.byteLength);
- return {
- version: data[0],
- flags: new Uint8Array(data.subarray(1, 4)),
- graphicsmode: view.getUint16(4),
- opcolor: new Uint16Array([view.getUint16(6),
- view.getUint16(8),
- view.getUint16(10)])
- };
- }
- };
- /**
- * Return a javascript array of box objects parsed from an ISO base
- * media file.
- * @param data {Uint8Array} the binary data of the media to be inspected
- * @return {array} a javascript array of potentially nested box objects
- */
- inspectMp4 = function(data) {
- var
- i = 0,
- result = [],
- view,
- size,
- type,
- end,
- box;
- // Convert data from Uint8Array to ArrayBuffer, to follow Dataview API
- var ab = new ArrayBuffer(data.length);
- var v = new Uint8Array(ab);
- for (var z = 0; z < data.length; ++z) {
- v[z] = data[z];
- }
- view = new DataView(ab);
- while (i < data.byteLength) {
- // parse box data
- size = view.getUint32(i);
- type = parseType(data.subarray(i + 4, i + 8));
- end = size > 1 ? i + size : data.byteLength;
- // parse type-specific data
- box = (parse[type] || function(data) {
- return {
- data: data
- };
- })(data.subarray(i + 8, end));
- box.size = size;
- box.type = type;
- // store this box and move to the next
- result.push(box);
- i = end;
- }
- return result;
- };
- /**
- * Returns a textual representation of the javascript represtentation
- * of an MP4 file. You can use it as an alternative to
- * JSON.stringify() to compare inspected MP4s.
- * @param inspectedMp4 {array} the parsed array of boxes in an MP4
- * file
- * @param depth {number} (optional) the number of ancestor boxes of
- * the elements of inspectedMp4. Assumed to be zero if unspecified.
- * @return {string} a text representation of the parsed MP4
- */
- textifyMp4 = function(inspectedMp4, depth) {
- var indent;
- depth = depth || 0;
- indent = new Array(depth * 2 + 1).join(' ');
- // iterate over all the boxes
- return inspectedMp4.map(function(box, index) {
- // list the box type first at the current indentation level
- return indent + box.type + '\n' +
- // the type is already included and handle child boxes separately
- Object.keys(box).filter(function(key) {
- return key !== 'type' && key !== 'boxes';
- // output all the box properties
- }).map(function(key) {
- var prefix = indent + ' ' + key + ': ',
- value = box[key];
- // print out raw bytes as hexademical
- if (value instanceof Uint8Array || value instanceof Uint32Array) {
- var bytes = Array.prototype.slice.call(new Uint8Array(value.buffer, value.byteOffset, value.byteLength))
- .map(function(byte) {
- return ' ' + ('00' + byte.toString(16)).slice(-2);
- }).join('').match(/.{1,24}/g);
- if (!bytes) {
- return prefix + '<>';
- }
- if (bytes.length === 1) {
- return prefix + '<' + bytes.join('').slice(1) + '>';
- }
- return prefix + '<\n' + bytes.map(function(line) {
- return indent + ' ' + line;
- }).join('\n') + '\n' + indent + ' >';
- }
- // stringify generic objects
- return prefix +
- JSON.stringify(value, null, 2)
- .split('\n').map(function(line, index) {
- if (index === 0) {
- return line;
- }
- return indent + ' ' + line;
- }).join('\n');
- }).join('\n') +
- // recursively textify the child boxes
- (box.boxes ? '\n' + textifyMp4(box.boxes, depth + 1) : '');
- }).join('\n');
- };
- module.exports = {
- inspect: inspectMp4,
- textify: textifyMp4
- };
- }).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
- },{"../mp4/probe":26}],30:[function(require,module,exports){
- /**
- * mux.js
- *
- * Copyright (c) 2016 Brightcove
- * All rights reserved.
- *
- * Parse mpeg2 transport stream packets to extract basic timing information
- */
- 'use strict';
- var StreamTypes = require('../m2ts/stream-types.js');
- var handleRollover = require('../m2ts/timestamp-rollover-stream.js').handleRollover;
- var probe = {};
- probe.ts = require('../m2ts/probe.js');
- probe.aac = require('../aac/probe.js');
- var
- PES_TIMESCALE = 90000,
- MP2T_PACKET_LENGTH = 188, // bytes
- SYNC_BYTE = 0x47;
- var isLikelyAacData = function(data) {
- if ((data[0] === 'I'.charCodeAt(0)) &&
- (data[1] === 'D'.charCodeAt(0)) &&
- (data[2] === '3'.charCodeAt(0))) {
- return true;
- }
- return false;
- };
- /**
- * walks through segment data looking for pat and pmt packets to parse out
- * program map table information
- */
- var parsePsi_ = function(bytes, pmt) {
- var
- startIndex = 0,
- endIndex = MP2T_PACKET_LENGTH,
- packet, type;
- while (endIndex < bytes.byteLength) {
- // Look for a pair of start and end sync bytes in the data..
- if (bytes[startIndex] === SYNC_BYTE && bytes[endIndex] === SYNC_BYTE) {
- // We found a packet
- packet = bytes.subarray(startIndex, endIndex);
- type = probe.ts.parseType(packet, pmt.pid);
- switch (type) {
- case 'pat':
- if (!pmt.pid) {
- pmt.pid = probe.ts.parsePat(packet);
- }
- break;
- case 'pmt':
- if (!pmt.table) {
- pmt.table = probe.ts.parsePmt(packet);
- }
- break;
- default:
- break;
- }
- // Found the pat and pmt, we can stop walking the segment
- if (pmt.pid && pmt.table) {
- return;
- }
- startIndex += MP2T_PACKET_LENGTH;
- endIndex += MP2T_PACKET_LENGTH;
- continue;
- }
- // If we get here, we have somehow become de-synchronized and we need to step
- // forward one byte at a time until we find a pair of sync bytes that denote
- // a packet
- startIndex++;
- endIndex++;
- }
- };
- /**
- * walks through the segment data from the start and end to get timing information
- * for the first and last audio pes packets
- */
- var parseAudioPes_ = function(bytes, pmt, result) {
- var
- startIndex = 0,
- endIndex = MP2T_PACKET_LENGTH,
- packet, type, pesType, pusi, parsed;
- var endLoop = false;
- // Start walking from start of segment to get first audio packet
- while (endIndex < bytes.byteLength) {
- // Look for a pair of start and end sync bytes in the data..
- if (bytes[startIndex] === SYNC_BYTE && bytes[endIndex] === SYNC_BYTE) {
- // We found a packet
- packet = bytes.subarray(startIndex, endIndex);
- type = probe.ts.parseType(packet, pmt.pid);
- switch (type) {
- case 'pes':
- pesType = probe.ts.parsePesType(packet, pmt.table);
- pusi = probe.ts.parsePayloadUnitStartIndicator(packet);
- if (pesType === 'audio' && pusi) {
- parsed = probe.ts.parsePesTime(packet);
- if (parsed) {
- parsed.type = 'audio';
- result.audio.push(parsed);
- endLoop = true;
- }
- }
- break;
- default:
- break;
- }
- if (endLoop) {
- break;
- }
- startIndex += MP2T_PACKET_LENGTH;
- endIndex += MP2T_PACKET_LENGTH;
- continue;
- }
- // If we get here, we have somehow become de-synchronized and we need to step
- // forward one byte at a time until we find a pair of sync bytes that denote
- // a packet
- startIndex++;
- endIndex++;
- }
- // Start walking from end of segment to get last audio packet
- endIndex = bytes.byteLength;
- startIndex = endIndex - MP2T_PACKET_LENGTH;
- endLoop = false;
- while (startIndex >= 0) {
- // Look for a pair of start and end sync bytes in the data..
- if (bytes[startIndex] === SYNC_BYTE && bytes[endIndex] === SYNC_BYTE) {
- // We found a packet
- packet = bytes.subarray(startIndex, endIndex);
- type = probe.ts.parseType(packet, pmt.pid);
- switch (type) {
- case 'pes':
- pesType = probe.ts.parsePesType(packet, pmt.table);
- pusi = probe.ts.parsePayloadUnitStartIndicator(packet);
- if (pesType === 'audio' && pusi) {
- parsed = probe.ts.parsePesTime(packet);
- if (parsed) {
- parsed.type = 'audio';
- result.audio.push(parsed);
- endLoop = true;
- }
- }
- break;
- default:
- break;
- }
- if (endLoop) {
- break;
- }
- startIndex -= MP2T_PACKET_LENGTH;
- endIndex -= MP2T_PACKET_LENGTH;
- continue;
- }
- // If we get here, we have somehow become de-synchronized and we need to step
- // forward one byte at a time until we find a pair of sync bytes that denote
- // a packet
- startIndex--;
- endIndex--;
- }
- };
- /**
- * walks through the segment data from the start and end to get timing information
- * for the first and last video pes packets as well as timing information for the first
- * key frame.
- */
- var parseVideoPes_ = function(bytes, pmt, result) {
- var
- startIndex = 0,
- endIndex = MP2T_PACKET_LENGTH,
- packet, type, pesType, pusi, parsed, frame, i, pes;
- var endLoop = false;
- var currentFrame = {
- data: [],
- size: 0
- };
- // Start walking from start of segment to get first video packet
- while (endIndex < bytes.byteLength) {
- // Look for a pair of start and end sync bytes in the data..
- if (bytes[startIndex] === SYNC_BYTE && bytes[endIndex] === SYNC_BYTE) {
- // We found a packet
- packet = bytes.subarray(startIndex, endIndex);
- type = probe.ts.parseType(packet, pmt.pid);
- switch (type) {
- case 'pes':
- pesType = probe.ts.parsePesType(packet, pmt.table);
- pusi = probe.ts.parsePayloadUnitStartIndicator(packet);
- if (pesType === 'video') {
- if (pusi && !endLoop) {
- parsed = probe.ts.parsePesTime(packet);
- if (parsed) {
- parsed.type = 'video';
- result.video.push(parsed);
- endLoop = true;
- }
- }
- if (!result.firstKeyFrame) {
- if (pusi) {
- if (currentFrame.size !== 0) {
- frame = new Uint8Array(currentFrame.size);
- i = 0;
- while (currentFrame.data.length) {
- pes = currentFrame.data.shift();
- frame.set(pes, i);
- i += pes.byteLength;
- }
- if (probe.ts.videoPacketContainsKeyFrame(frame)) {
- result.firstKeyFrame = probe.ts.parsePesTime(frame);
- result.firstKeyFrame.type = 'video';
- }
- currentFrame.size = 0;
- }
- }
- currentFrame.data.push(packet);
- currentFrame.size += packet.byteLength;
- }
- }
- break;
- default:
- break;
- }
- if (endLoop && result.firstKeyFrame) {
- break;
- }
- startIndex += MP2T_PACKET_LENGTH;
- endIndex += MP2T_PACKET_LENGTH;
- continue;
- }
- // If we get here, we have somehow become de-synchronized and we need to step
- // forward one byte at a time until we find a pair of sync bytes that denote
- // a packet
- startIndex++;
- endIndex++;
- }
- // Start walking from end of segment to get last video packet
- endIndex = bytes.byteLength;
- startIndex = endIndex - MP2T_PACKET_LENGTH;
- endLoop = false;
- while (startIndex >= 0) {
- // Look for a pair of start and end sync bytes in the data..
- if (bytes[startIndex] === SYNC_BYTE && bytes[endIndex] === SYNC_BYTE) {
- // We found a packet
- packet = bytes.subarray(startIndex, endIndex);
- type = probe.ts.parseType(packet, pmt.pid);
- switch (type) {
- case 'pes':
- pesType = probe.ts.parsePesType(packet, pmt.table);
- pusi = probe.ts.parsePayloadUnitStartIndicator(packet);
- if (pesType === 'video' && pusi) {
- parsed = probe.ts.parsePesTime(packet);
- if (parsed) {
- parsed.type = 'video';
- result.video.push(parsed);
- endLoop = true;
- }
- }
- break;
- default:
- break;
- }
- if (endLoop) {
- break;
- }
- startIndex -= MP2T_PACKET_LENGTH;
- endIndex -= MP2T_PACKET_LENGTH;
- continue;
- }
- // If we get here, we have somehow become de-synchronized and we need to step
- // forward one byte at a time until we find a pair of sync bytes that denote
- // a packet
- startIndex--;
- endIndex--;
- }
- };
- /**
- * Adjusts the timestamp information for the segment to account for
- * rollover and convert to seconds based on pes packet timescale (90khz clock)
- */
- var adjustTimestamp_ = function(segmentInfo, baseTimestamp) {
- if (segmentInfo.audio && segmentInfo.audio.length) {
- var audioBaseTimestamp = baseTimestamp;
- if (typeof audioBaseTimestamp === 'undefined') {
- audioBaseTimestamp = segmentInfo.audio[0].dts;
- }
- segmentInfo.audio.forEach(function(info) {
- info.dts = handleRollover(info.dts, audioBaseTimestamp);
- info.pts = handleRollover(info.pts, audioBaseTimestamp);
- // time in seconds
- info.dtsTime = info.dts / PES_TIMESCALE;
- info.ptsTime = info.pts / PES_TIMESCALE;
- });
- }
- if (segmentInfo.video && segmentInfo.video.length) {
- var videoBaseTimestamp = baseTimestamp;
- if (typeof videoBaseTimestamp === 'undefined') {
- videoBaseTimestamp = segmentInfo.video[0].dts;
- }
- segmentInfo.video.forEach(function(info) {
- info.dts = handleRollover(info.dts, videoBaseTimestamp);
- info.pts = handleRollover(info.pts, videoBaseTimestamp);
- // time in seconds
- info.dtsTime = info.dts / PES_TIMESCALE;
- info.ptsTime = info.pts / PES_TIMESCALE;
- });
- if (segmentInfo.firstKeyFrame) {
- var frame = segmentInfo.firstKeyFrame;
- frame.dts = handleRollover(frame.dts, videoBaseTimestamp);
- frame.pts = handleRollover(frame.pts, videoBaseTimestamp);
- // time in seconds
- frame.dtsTime = frame.dts / PES_TIMESCALE;
- frame.ptsTime = frame.dts / PES_TIMESCALE;
- }
- }
- };
- /**
- * inspects the aac data stream for start and end time information
- */
- var inspectAac_ = function(bytes) {
- var
- endLoop = false,
- audioCount = 0,
- sampleRate = null,
- timestamp = null,
- frameSize = 0,
- byteIndex = 0,
- packet;
- while (bytes.length - byteIndex >= 3) {
- var type = probe.aac.parseType(bytes, byteIndex);
- switch (type) {
- case 'timed-metadata':
- // Exit early because we don't have enough to parse
- // the ID3 tag header
- if (bytes.length - byteIndex < 10) {
- endLoop = true;
- break;
- }
- frameSize = probe.aac.parseId3TagSize(bytes, byteIndex);
- // Exit early if we don't have enough in the buffer
- // to emit a full packet
- if (frameSize > bytes.length) {
- endLoop = true;
- break;
- }
- if (timestamp === null) {
- packet = bytes.subarray(byteIndex, byteIndex + frameSize);
- timestamp = probe.aac.parseAacTimestamp(packet);
- }
- byteIndex += frameSize;
- break;
- case 'audio':
- // Exit early because we don't have enough to parse
- // the ADTS frame header
- if (bytes.length - byteIndex < 7) {
- endLoop = true;
- break;
- }
- frameSize = probe.aac.parseAdtsSize(bytes, byteIndex);
- // Exit early if we don't have enough in the buffer
- // to emit a full packet
- if (frameSize > bytes.length) {
- endLoop = true;
- break;
- }
- if (sampleRate === null) {
- packet = bytes.subarray(byteIndex, byteIndex + frameSize);
- sampleRate = probe.aac.parseSampleRate(packet);
- }
- audioCount++;
- byteIndex += frameSize;
- break;
- default:
- byteIndex++;
- break;
- }
- if (endLoop) {
- return null;
- }
- }
- if (sampleRate === null || timestamp === null) {
- return null;
- }
- var audioTimescale = PES_TIMESCALE / sampleRate;
- var result = {
- audio: [
- {
- type: 'audio',
- dts: timestamp,
- pts: timestamp
- },
- {
- type: 'audio',
- dts: timestamp + (audioCount * 1024 * audioTimescale),
- pts: timestamp + (audioCount * 1024 * audioTimescale)
- }
- ]
- };
- return result;
- };
- /**
- * inspects the transport stream segment data for start and end time information
- * of the audio and video tracks (when present) as well as the first key frame's
- * start time.
- */
- var inspectTs_ = function(bytes) {
- var pmt = {
- pid: null,
- table: null
- };
- var result = {};
- parsePsi_(bytes, pmt);
- for (var pid in pmt.table) {
- if (pmt.table.hasOwnProperty(pid)) {
- var type = pmt.table[pid];
- switch (type) {
- case StreamTypes.H264_STREAM_TYPE:
- result.video = [];
- parseVideoPes_(bytes, pmt, result);
- if (result.video.length === 0) {
- delete result.video;
- }
- break;
- case StreamTypes.ADTS_STREAM_TYPE:
- result.audio = [];
- parseAudioPes_(bytes, pmt, result);
- if (result.audio.length === 0) {
- delete result.audio;
- }
- break;
- default:
- break;
- }
- }
- }
- return result;
- };
- /**
- * Inspects segment byte data and returns an object with start and end timing information
- *
- * @param {Uint8Array} bytes The segment byte data
- * @param {Number} baseTimestamp Relative reference timestamp used when adjusting frame
- * timestamps for rollover. This value must be in 90khz clock.
- * @return {Object} Object containing start and end frame timing info of segment.
- */
- var inspect = function(bytes, baseTimestamp) {
- var isAacData = isLikelyAacData(bytes);
- var result;
- if (isAacData) {
- result = inspectAac_(bytes);
- } else {
- result = inspectTs_(bytes);
- }
- if (!result || (!result.audio && !result.video)) {
- return null;
- }
- adjustTimestamp_(result, baseTimestamp);
- return result;
- };
- module.exports = {
- inspect: inspect
- };
- },{"../aac/probe.js":5,"../m2ts/probe.js":21,"../m2ts/stream-types.js":22,"../m2ts/timestamp-rollover-stream.js":23}],31:[function(require,module,exports){
- var
- ONE_SECOND_IN_TS = 90000, // 90kHz clock
- secondsToVideoTs,
- secondsToAudioTs,
- videoTsToSeconds,
- audioTsToSeconds,
- audioTsToVideoTs,
- videoTsToAudioTs;
- secondsToVideoTs = function(seconds) {
- return seconds * ONE_SECOND_IN_TS;
- };
- secondsToAudioTs = function(seconds, sampleRate) {
- return seconds * sampleRate;
- };
- videoTsToSeconds = function(timestamp) {
- return timestamp / ONE_SECOND_IN_TS;
- };
- audioTsToSeconds = function(timestamp, sampleRate) {
- return timestamp / sampleRate;
- };
- audioTsToVideoTs = function(timestamp, sampleRate) {
- return secondsToVideoTs(audioTsToSeconds(timestamp, sampleRate));
- };
- videoTsToAudioTs = function(timestamp, sampleRate) {
- return secondsToAudioTs(videoTsToSeconds(timestamp), sampleRate);
- };
- module.exports = {
- secondsToVideoTs: secondsToVideoTs,
- secondsToAudioTs: secondsToAudioTs,
- videoTsToSeconds: videoTsToSeconds,
- audioTsToSeconds: audioTsToSeconds,
- audioTsToVideoTs: audioTsToVideoTs,
- videoTsToAudioTs: videoTsToAudioTs
- };
- },{}],32:[function(require,module,exports){
- 'use strict';
- var ExpGolomb;
- /**
- * Parser for exponential Golomb codes, a variable-bitwidth number encoding
- * scheme used by h264.
- */
- ExpGolomb = function(workingData) {
- var
- // the number of bytes left to examine in workingData
- workingBytesAvailable = workingData.byteLength,
- // the current word being examined
- workingWord = 0, // :uint
- // the number of bits left to examine in the current word
- workingBitsAvailable = 0; // :uint;
- // ():uint
- this.length = function() {
- return (8 * workingBytesAvailable);
- };
- // ():uint
- this.bitsAvailable = function() {
- return (8 * workingBytesAvailable) + workingBitsAvailable;
- };
- // ():void
- this.loadWord = function() {
- var
- position = workingData.byteLength - workingBytesAvailable,
- workingBytes = new Uint8Array(4),
- availableBytes = Math.min(4, workingBytesAvailable);
- if (availableBytes === 0) {
- throw new Error('no bytes available');
- }
- workingBytes.set(workingData.subarray(position,
- position + availableBytes));
- workingWord = new DataView(workingBytes.buffer).getUint32(0);
- // track the amount of workingData that has been processed
- workingBitsAvailable = availableBytes * 8;
- workingBytesAvailable -= availableBytes;
- };
- // (count:int):void
- this.skipBits = function(count) {
- var skipBytes; // :int
- if (workingBitsAvailable > count) {
- workingWord <<= count;
- workingBitsAvailable -= count;
- } else {
- count -= workingBitsAvailable;
- skipBytes = Math.floor(count / 8);
- count -= (skipBytes * 8);
- workingBytesAvailable -= skipBytes;
- this.loadWord();
- workingWord <<= count;
- workingBitsAvailable -= count;
- }
- };
- // (size:int):uint
- this.readBits = function(size) {
- var
- bits = Math.min(workingBitsAvailable, size), // :uint
- valu = workingWord >>> (32 - bits); // :uint
- // if size > 31, handle error
- workingBitsAvailable -= bits;
- if (workingBitsAvailable > 0) {
- workingWord <<= bits;
- } else if (workingBytesAvailable > 0) {
- this.loadWord();
- }
- bits = size - bits;
- if (bits > 0) {
- return valu << bits | this.readBits(bits);
- }
- return valu;
- };
- // ():uint
- this.skipLeadingZeros = function() {
- var leadingZeroCount; // :uint
- for (leadingZeroCount = 0; leadingZeroCount < workingBitsAvailable; ++leadingZeroCount) {
- if ((workingWord & (0x80000000 >>> leadingZeroCount)) !== 0) {
- // the first bit of working word is 1
- workingWord <<= leadingZeroCount;
- workingBitsAvailable -= leadingZeroCount;
- return leadingZeroCount;
- }
- }
- // we exhausted workingWord and still have not found a 1
- this.loadWord();
- return leadingZeroCount + this.skipLeadingZeros();
- };
- // ():void
- this.skipUnsignedExpGolomb = function() {
- this.skipBits(1 + this.skipLeadingZeros());
- };
- // ():void
- this.skipExpGolomb = function() {
- this.skipBits(1 + this.skipLeadingZeros());
- };
- // ():uint
- this.readUnsignedExpGolomb = function() {
- var clz = this.skipLeadingZeros(); // :uint
- return this.readBits(clz + 1) - 1;
- };
- // ():int
- this.readExpGolomb = function() {
- var valu = this.readUnsignedExpGolomb(); // :int
- if (0x01 & valu) {
- // the number is odd if the low order bit is set
- return (1 + valu) >>> 1; // add 1 to make it even, and divide by 2
- }
- return -1 * (valu >>> 1); // divide by two then make it negative
- };
- // Some convenience functions
- // :Boolean
- this.readBoolean = function() {
- return this.readBits(1) === 1;
- };
- // ():int
- this.readUnsignedByte = function() {
- return this.readBits(8);
- };
- this.loadWord();
- };
- module.exports = ExpGolomb;
- },{}],33:[function(require,module,exports){
- /**
- * mux.js
- *
- * Copyright (c) 2014 Brightcove
- * All rights reserved.
- *
- * A lightweight readable stream implemention that handles event dispatching.
- * Objects that inherit from streams should call init in their constructors.
- */
- 'use strict';
- var Stream = function() {
- this.init = function() {
- var listeners = {};
- /**
- * Add a listener for a specified event type.
- * @param type {string} the event name
- * @param listener {function} the callback to be invoked when an event of
- * the specified type occurs
- */
- this.on = function(type, listener) {
- if (!listeners[type]) {
- listeners[type] = [];
- }
- listeners[type] = listeners[type].concat(listener);
- };
- /**
- * Remove a listener for a specified event type.
- * @param type {string} the event name
- * @param listener {function} a function previously registered for this
- * type of event through `on`
- */
- this.off = function(type, listener) {
- var index;
- if (!listeners[type]) {
- return false;
- }
- index = listeners[type].indexOf(listener);
- listeners[type] = listeners[type].slice();
- listeners[type].splice(index, 1);
- return index > -1;
- };
- /**
- * Trigger an event of the specified type on this stream. Any additional
- * arguments to this function are passed as parameters to event listeners.
- * @param type {string} the event name
- */
- this.trigger = function(type) {
- var callbacks, i, length, args;
- callbacks = listeners[type];
- if (!callbacks) {
- return;
- }
- // Slicing the arguments on every invocation of this method
- // can add a significant amount of overhead. Avoid the
- // intermediate object creation for the common case of a
- // single callback argument
- if (arguments.length === 2) {
- length = callbacks.length;
- for (i = 0; i < length; ++i) {
- callbacks[i].call(this, arguments[1]);
- }
- } else {
- args = [];
- i = arguments.length;
- for (i = 1; i < arguments.length; ++i) {
- args.push(arguments[i]);
- }
- length = callbacks.length;
- for (i = 0; i < length; ++i) {
- callbacks[i].apply(this, args);
- }
- }
- };
- /**
- * Destroys the stream and cleans up.
- */
- this.dispose = function() {
- listeners = {};
- };
- };
- };
- /**
- * Forwards all `data` events on this stream to the destination stream. The
- * destination stream should provide a method `push` to receive the data
- * events as they arrive.
- * @param destination {stream} the stream that will receive all `data` events
- * @param autoFlush {boolean} if false, we will not call `flush` on the destination
- * when the current stream emits a 'done' event
- * @see http://nodejs.org/api/stream.html#stream_readable_pipe_destination_options
- */
- Stream.prototype.pipe = function(destination) {
- this.on('data', function(data) {
- destination.push(data);
- });
- this.on('done', function(flushSource) {
- destination.flush(flushSource);
- });
- return destination;
- };
- // Default stream functions that are expected to be overridden to perform
- // actual work. These are provided by the prototype as a sort of no-op
- // implementation so that we don't have to check for their existence in the
- // `pipe` function above.
- Stream.prototype.push = function(data) {
- this.trigger('data', data);
- };
- Stream.prototype.flush = function(flushSource) {
- this.trigger('done', flushSource);
- };
- module.exports = Stream;
- },{}],34:[function(require,module,exports){
- // By default assume browserify was used to bundle app. These arguments are passed to
- // the module by browserify.
- var bundleFn = arguments[3];
- var sources = arguments[4];
- var cache = arguments[5];
- var stringify = JSON.stringify;
- var webpack = false;
- // webpackBootstrap
- var webpackBootstrapFn = function(modules) {
- // The module cache
- var installedModules = {};
- // The require function
- function __webpack_require__(moduleId) {
- // Check if module is in cache
- if(installedModules[moduleId]) {
- return installedModules[moduleId].exports;
- }
- // Create a new module (and put it into the cache)
- var module = installedModules[moduleId] = {
- i: moduleId,
- l: false,
- exports: {}
- };
- // Execute the module function
- modules[moduleId].call(module.exports, module, module.exports, __webpack_require__);
- // Flag the module as loaded
- module.l = true;
- // Return the exports of the module
- return module.exports;
- }
- // expose the modules object (__webpack_modules__)
- __webpack_require__.m = modules;
- // expose the module cache
- __webpack_require__.c = installedModules;
- // define getter function for harmony exports
- __webpack_require__.d = function(exports, name, getter) {
- if(!__webpack_require__.o(exports, name)) {
- Object.defineProperty(exports, name, {
- configurable: false,
- enumerable: true,
- get: getter
- });
- }
- };
- // getDefaultExport function for compatibility with non-harmony modules
- __webpack_require__.n = function(module) {
- var getter = module && module.__esModule ?
- function getDefault() { return module['default']; } :
- function getModuleExports() { return module; };
- __webpack_require__.d(getter, 'a', getter);
- return getter;
- };
- // Object.prototype.hasOwnProperty.call
- __webpack_require__.o = function(object, property) { return Object.prototype.hasOwnProperty.call(object, property); };
- // __webpack_public_path__
- __webpack_require__.p = "";
- // Load entry module and return exports
- return __webpack_require__(__webpack_require__.s = entryModule);
- }
- if (typeof bundleFn === 'undefined') {
- // Assume this was bundled with webpack and not browserify
- webpack = true;
- bundleFn = webpackBootstrapFn;
- sources = __webpack_modules__;
- }
- var bundleWithBrowserify = function(fn) {
- // with browserify we must find the module key ourselves
- var cacheKeys = Object.keys(cache);
- var fnModuleKey;
- for (var i = 0; i < cacheKeys.length; i++) {
- var cacheKey = cacheKeys[i];
- var cacheExports = cache[cacheKey].exports;
- // Using babel as a transpiler to use esmodule, the export will always
- // be an object with the default export as a property of it. To ensure
- // the existing api and babel esmodule exports are both supported we
- // check for both
- if (cacheExports === fn || cacheExports && cacheExports.default === fn) {
- fnModuleKey = cacheKey;
- break;
- }
- }
- // if we couldn't find one, lets make one
- if (!fnModuleKey) {
- fnModuleKey = Math.floor(Math.pow(16, 8) * Math.random()).toString(16);
- var fnModuleCache = {};
- for (var i = 0; i < cacheKeys.length; i++) {
- var cacheKey = cacheKeys[i];
- fnModuleCache[cacheKey] = cacheKey;
- }
- sources[fnModuleKey] = [
- 'function(require,module,exports){' + fn + '(self); }',
- fnModuleCache
- ];
- }
- var entryKey = Math.floor(Math.pow(16, 8) * Math.random()).toString(16);
- var entryCache = {};
- entryCache[fnModuleKey] = fnModuleKey;
- sources[entryKey] = [
- 'function(require,module,exports){' +
- // try to call default if defined to also support babel esmodule exports
- 'var f = require(' + stringify(fnModuleKey) + ');' +
- '(f.default ? f.default : f)(self);' +
- '}',
- entryCache
- ];
- return '(' + bundleFn + ')({'
- + Object.keys(sources).map(function(key) {
- return stringify(key) + ':['
- + sources[key][0] + ','
- + stringify(sources[key][1]) + ']';
- }).join(',')
- + '},{},[' + stringify(entryKey) + '])';
- };
- var bundleWithWebpack = function(fn, fnModuleId) {
- var devMode = typeof fnModuleId === 'string';
- var sourceStrings;
- if (devMode) {
- sourceStrings = {};
- } else {
- sourceStrings = [];
- }
- Object.keys(sources).forEach(function(sKey) {
- if (!sources[sKey]) {
- return;
- }
- sourceStrings[sKey] = sources[sKey].toString();
- });
- var fnModuleExports = __webpack_require__(fnModuleId);
- // Using babel as a transpiler to use esmodule, the export will always
- // be an object with the default export as a property of it. To ensure
- // the existing api and babel esmodule exports are both supported we
- // check for both
- if (!(fnModuleExports && (fnModuleExports === fn || fnModuleExports.default === fn))) {
- var fnSourceString = sourceStrings[fnModuleId];
- sourceStrings[fnModuleId] = fnSourceString.substring(0, fnSourceString.length - 1) +
- '\n' + fn.name + '();\n}';
- }
- var modulesString;
- if (devMode) {
- // must escape quotes to support webpack loader options
- fnModuleId = stringify(fnModuleId);
- // dev mode in webpack4, modules are passed as an object
- var mappedSourceStrings = Object.keys(sourceStrings).map(function(sKey) {
- return stringify(sKey) + ':' + sourceStrings[sKey];
- });
- modulesString = '{' + mappedSourceStrings.join(',') + '}';
- } else {
- modulesString = '[' + sourceStrings.join(',') + ']';
- }
- return 'var fn = (' + bundleFn.toString().replace('entryModule', fnModuleId) + ')('
- + modulesString
- + ');\n'
- // not a function when calling a function from the current scope
- + '(typeof fn === "function") && fn(self);';
- };
- module.exports = function webwackify(fn, fnModuleId) {
- var src;
- if (webpack) {
- src = bundleWithWebpack(fn, fnModuleId);
- } else {
- src = bundleWithBrowserify(fn);
- }
- var blob = new Blob([src], { type: 'text/javascript' });
- var URL = window.URL || window.webkitURL || window.mozURL || window.msURL;
- var workerUrl = URL.createObjectURL(blob);
- var worker = new Worker(workerUrl);
- worker.objectURL = workerUrl;
- return worker;
- };
- },{}],35:[function(require,module,exports){
- (function (global){
- /**
- * @file add-text-track-data.js
- */
- 'use strict';
- Object.defineProperty(exports, '__esModule', {
- value: true
- });
- function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
- var _globalWindow = require('global/window');
- var _globalWindow2 = _interopRequireDefault(_globalWindow);
- var _videoJs = (typeof window !== "undefined" ? window['videojs'] : typeof global !== "undefined" ? global['videojs'] : null);
- var _videoJs2 = _interopRequireDefault(_videoJs);
- /**
- * Define properties on a cue for backwards compatability,
- * but warn the user that the way that they are using it
- * is depricated and will be removed at a later date.
- *
- * @param {Cue} cue the cue to add the properties on
- * @private
- */
- var deprecateOldCue = function deprecateOldCue(cue) {
- Object.defineProperties(cue.frame, {
- id: {
- get: function get() {
- _videoJs2['default'].log.warn('cue.frame.id is deprecated. Use cue.value.key instead.');
- return cue.value.key;
- }
- },
- value: {
- get: function get() {
- _videoJs2['default'].log.warn('cue.frame.value is deprecated. Use cue.value.data instead.');
- return cue.value.data;
- }
- },
- privateData: {
- get: function get() {
- _videoJs2['default'].log.warn('cue.frame.privateData is deprecated. Use cue.value.data instead.');
- return cue.value.data;
- }
- }
- });
- };
- var durationOfVideo = function durationOfVideo(duration) {
- var dur = undefined;
- if (isNaN(duration) || Math.abs(duration) === Infinity) {
- dur = Number.MAX_VALUE;
- } else {
- dur = duration;
- }
- return dur;
- };
- /**
- * Add text track data to a source handler given the captions and
- * metadata from the buffer.
- *
- * @param {Object} sourceHandler the flash or virtual source buffer
- * @param {Array} captionArray an array of caption data
- * @param {Array} metadataArray an array of meta data
- * @private
- */
- var addTextTrackData = function addTextTrackData(sourceHandler, captionArray, metadataArray) {
- var Cue = _globalWindow2['default'].WebKitDataCue || _globalWindow2['default'].VTTCue;
- if (captionArray) {
- captionArray.forEach(function (caption) {
- var track = caption.stream;
- this.inbandTextTracks_[track].addCue(new Cue(caption.startTime + this.timestampOffset, caption.endTime + this.timestampOffset, caption.text));
- }, sourceHandler);
- }
- if (metadataArray) {
- (function () {
- var videoDuration = durationOfVideo(sourceHandler.mediaSource_.duration);
- metadataArray.forEach(function (metadata) {
- var time = metadata.cueTime + this.timestampOffset;
- metadata.frames.forEach(function (frame) {
- var cue = new Cue(time, time, frame.value || frame.url || frame.data || '');
- cue.frame = frame;
- cue.value = frame;
- deprecateOldCue(cue);
- this.metadataTrack_.addCue(cue);
- }, this);
- }, sourceHandler);
- // Updating the metadeta cues so that
- // the endTime of each cue is the startTime of the next cue
- // the endTime of last cue is the duration of the video
- if (sourceHandler.metadataTrack_ && sourceHandler.metadataTrack_.cues && sourceHandler.metadataTrack_.cues.length) {
- (function () {
- var cues = sourceHandler.metadataTrack_.cues;
- var cuesArray = [];
- // Create a copy of the TextTrackCueList...
- // ...disregarding cues with a falsey value
- for (var i = 0; i < cues.length; i++) {
- if (cues[i]) {
- cuesArray.push(cues[i]);
- }
- }
- // Group cues by their startTime value
- var cuesGroupedByStartTime = cuesArray.reduce(function (obj, cue) {
- var timeSlot = obj[cue.startTime] || [];
- timeSlot.push(cue);
- obj[cue.startTime] = timeSlot;
- return obj;
- }, {});
- // Sort startTimes by ascending order
- var sortedStartTimes = Object.keys(cuesGroupedByStartTime).sort(function (a, b) {
- return Number(a) - Number(b);
- });
- // Map each cue group's endTime to the next group's startTime
- sortedStartTimes.forEach(function (startTime, idx) {
- var cueGroup = cuesGroupedByStartTime[startTime];
- var nextTime = Number(sortedStartTimes[idx + 1]) || videoDuration;
- // Map each cue's endTime the next group's startTime
- cueGroup.forEach(function (cue) {
- cue.endTime = nextTime;
- });
- });
- })();
- }
- })();
- }
- };
- exports['default'] = {
- addTextTrackData: addTextTrackData,
- durationOfVideo: durationOfVideo
- };
- module.exports = exports['default'];
- }).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
- },{"global/window":3}],36:[function(require,module,exports){
- /**
- * @file codec-utils.js
- */
- /**
- * Check if a codec string refers to an audio codec.
- *
- * @param {String} codec codec string to check
- * @return {Boolean} if this is an audio codec
- * @private
- */
- 'use strict';
- Object.defineProperty(exports, '__esModule', {
- value: true
- });
- var isAudioCodec = function isAudioCodec(codec) {
- return (/mp4a\.\d+.\d+/i.test(codec)
- );
- };
- /**
- * Check if a codec string refers to a video codec.
- *
- * @param {String} codec codec string to check
- * @return {Boolean} if this is a video codec
- * @private
- */
- var isVideoCodec = function isVideoCodec(codec) {
- return (/avc1\.[\da-f]+/i.test(codec)
- );
- };
- /**
- * Parse a content type header into a type and parameters
- * object
- *
- * @param {String} type the content type header
- * @return {Object} the parsed content-type
- * @private
- */
- var parseContentType = function parseContentType(type) {
- var object = { type: '', parameters: {} };
- var parameters = type.trim().split(';');
- // first parameter should always be content-type
- object.type = parameters.shift().trim();
- parameters.forEach(function (parameter) {
- var pair = parameter.trim().split('=');
- if (pair.length > 1) {
- var _name = pair[0].replace(/"/g, '').trim();
- var value = pair[1].replace(/"/g, '').trim();
- object.parameters[_name] = value;
- }
- });
- return object;
- };
- /**
- * Replace the old apple-style `avc1.<dd>.<dd>` codec string with the standard
- * `avc1.<hhhhhh>`
- *
- * @param {Array} codecs an array of codec strings to fix
- * @return {Array} the translated codec array
- * @private
- */
- var translateLegacyCodecs = function translateLegacyCodecs(codecs) {
- return codecs.map(function (codec) {
- return codec.replace(/avc1\.(\d+)\.(\d+)/i, function (orig, profile, avcLevel) {
- var profileHex = ('00' + Number(profile).toString(16)).slice(-2);
- var avcLevelHex = ('00' + Number(avcLevel).toString(16)).slice(-2);
- return 'avc1.' + profileHex + '00' + avcLevelHex;
- });
- });
- };
- exports['default'] = {
- isAudioCodec: isAudioCodec,
- parseContentType: parseContentType,
- isVideoCodec: isVideoCodec,
- translateLegacyCodecs: translateLegacyCodecs
- };
- module.exports = exports['default'];
- },{}],37:[function(require,module,exports){
- /**
- * @file create-text-tracks-if-necessary.js
- */
- /**
- * Create text tracks on video.js if they exist on a segment.
- *
- * @param {Object} sourceBuffer the VSB or FSB
- * @param {Object} mediaSource the HTML or Flash media source
- * @param {Object} segment the segment that may contain the text track
- * @private
- */
- 'use strict';
- Object.defineProperty(exports, '__esModule', {
- value: true
- });
- var createTextTracksIfNecessary = function createTextTracksIfNecessary(sourceBuffer, mediaSource, segment) {
- var player = mediaSource.player_;
- // create an in-band caption track if one is present in the segment
- if (segment.captions && segment.captions.length) {
- if (!sourceBuffer.inbandTextTracks_) {
- sourceBuffer.inbandTextTracks_ = {};
- }
- for (var trackId in segment.captionStreams) {
- if (!sourceBuffer.inbandTextTracks_[trackId]) {
- player.tech_.trigger({ type: 'usage', name: 'hls-608' });
- var track = player.textTracks().getTrackById(trackId);
- if (track) {
- // Resuse an existing track with a CC# id because this was
- // very likely created by videojs-contrib-hls from information
- // in the m3u8 for us to use
- sourceBuffer.inbandTextTracks_[trackId] = track;
- } else {
- // Otherwise, create a track with the default `CC#` label and
- // without a language
- sourceBuffer.inbandTextTracks_[trackId] = player.addRemoteTextTrack({
- kind: 'captions',
- id: trackId,
- label: trackId
- }, false).track;
- }
- }
- }
- }
- if (segment.metadata && segment.metadata.length && !sourceBuffer.metadataTrack_) {
- sourceBuffer.metadataTrack_ = player.addRemoteTextTrack({
- kind: 'metadata',
- label: 'Timed Metadata'
- }, false).track;
- sourceBuffer.metadataTrack_.inBandMetadataTrackDispatchType = segment.metadata.dispatchType;
- }
- };
- exports['default'] = createTextTracksIfNecessary;
- module.exports = exports['default'];
- },{}],38:[function(require,module,exports){
- /**
- * @file flash-constants.js
- */
- /**
- * The maximum size in bytes for append operations to the video.js
- * SWF. Calling through to Flash blocks and can be expensive so
- * we chunk data and pass through 4KB at a time, yielding to the
- * browser between chunks. This gives a theoretical maximum rate of
- * 1MB/s into Flash. Any higher and we begin to drop frames and UI
- * responsiveness suffers.
- *
- * @private
- */
- "use strict";
- Object.defineProperty(exports, "__esModule", {
- value: true
- });
- var flashConstants = {
- // times in milliseconds
- TIME_BETWEEN_CHUNKS: 1,
- BYTES_PER_CHUNK: 1024 * 32
- };
- exports["default"] = flashConstants;
- module.exports = exports["default"];
- },{}],39:[function(require,module,exports){
- (function (global){
- /**
- * @file flash-media-source.js
- */
- 'use strict';
- Object.defineProperty(exports, '__esModule', {
- value: true
- });
- var _createClass = (function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ('value' in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; })();
- var _get = function get(_x, _x2, _x3) { var _again = true; _function: while (_again) { var object = _x, property = _x2, receiver = _x3; _again = false; if (object === null) object = Function.prototype; var desc = Object.getOwnPropertyDescriptor(object, property); if (desc === undefined) { var parent = Object.getPrototypeOf(object); if (parent === null) { return undefined; } else { _x = parent; _x2 = property; _x3 = receiver; _again = true; desc = parent = undefined; continue _function; } } else if ('value' in desc) { return desc.value; } else { var getter = desc.get; if (getter === undefined) { return undefined; } return getter.call(receiver); } } };
- function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
- function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError('Cannot call a class as a function'); } }
- function _inherits(subClass, superClass) { if (typeof superClass !== 'function' && superClass !== null) { throw new TypeError('Super expression must either be null or a function, not ' + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; }
- var _globalDocument = require('global/document');
- var _globalDocument2 = _interopRequireDefault(_globalDocument);
- var _videoJs = (typeof window !== "undefined" ? window['videojs'] : typeof global !== "undefined" ? global['videojs'] : null);
- var _videoJs2 = _interopRequireDefault(_videoJs);
- var _flashSourceBuffer = require('./flash-source-buffer');
- var _flashSourceBuffer2 = _interopRequireDefault(_flashSourceBuffer);
- var _flashConstants = require('./flash-constants');
- var _flashConstants2 = _interopRequireDefault(_flashConstants);
- var _codecUtils = require('./codec-utils');
- /**
- * A flash implmentation of HTML MediaSources and a polyfill
- * for browsers that don't support native or HTML MediaSources..
- *
- * @link https://developer.mozilla.org/en-US/docs/Web/API/MediaSource
- * @class FlashMediaSource
- * @extends videojs.EventTarget
- */
- var FlashMediaSource = (function (_videojs$EventTarget) {
- _inherits(FlashMediaSource, _videojs$EventTarget);
- function FlashMediaSource() {
- var _this = this;
- _classCallCheck(this, FlashMediaSource);
- _get(Object.getPrototypeOf(FlashMediaSource.prototype), 'constructor', this).call(this);
- this.sourceBuffers = [];
- this.readyState = 'closed';
- this.on(['sourceopen', 'webkitsourceopen'], function (event) {
- // find the swf where we will push media data
- _this.swfObj = _globalDocument2['default'].getElementById(event.swfId);
- _this.player_ = (0, _videoJs2['default'])(_this.swfObj.parentNode);
- _this.tech_ = _this.swfObj.tech;
- _this.readyState = 'open';
- _this.tech_.on('seeking', function () {
- var i = _this.sourceBuffers.length;
- while (i--) {
- _this.sourceBuffers[i].abort();
- }
- });
- // trigger load events
- if (_this.swfObj) {
- _this.swfObj.vjs_load();
- }
- });
- }
- /**
- * Set or return the presentation duration.
- *
- * @param {Double} value the duration of the media in seconds
- * @param {Double} the current presentation duration
- * @link http://www.w3.org/TR/media-source/#widl-MediaSource-duration
- */
- /**
- * We have this function so that the html and flash interfaces
- * are the same.
- *
- * @private
- */
- _createClass(FlashMediaSource, [{
- key: 'addSeekableRange_',
- value: function addSeekableRange_() {}
- // intentional no-op
- /**
- * Create a new flash source buffer and add it to our flash media source.
- *
- * @link https://developer.mozilla.org/en-US/docs/Web/API/MediaSource/addSourceBuffer
- * @param {String} type the content-type of the source
- * @return {Object} the flash source buffer
- */
- }, {
- key: 'addSourceBuffer',
- value: function addSourceBuffer(type) {
- var parsedType = (0, _codecUtils.parseContentType)(type);
- var sourceBuffer = undefined;
- // if this is an FLV type, we'll push data to flash
- if (parsedType.type === 'video/mp2t' || parsedType.type === 'audio/mp2t') {
- // Flash source buffers
- sourceBuffer = new _flashSourceBuffer2['default'](this);
- } else {
- throw new Error('NotSupportedError (Video.js)');
- }
- this.sourceBuffers.push(sourceBuffer);
- return sourceBuffer;
- }
- /**
- * Signals the end of the stream.
- *
- * @link https://w3c.github.io/media-source/#widl-MediaSource-endOfStream-void-EndOfStreamError-error
- * @param {String=} error Signals that a playback error
- * has occurred. If specified, it must be either "network" or
- * "decode".
- */
- }, {
- key: 'endOfStream',
- value: function endOfStream(error) {
- if (error === 'network') {
- // MEDIA_ERR_NETWORK
- this.tech_.error(2);
- } else if (error === 'decode') {
- // MEDIA_ERR_DECODE
- this.tech_.error(3);
- }
- if (this.readyState !== 'ended') {
- this.readyState = 'ended';
- this.swfObj.vjs_endOfStream();
- }
- }
- }]);
- return FlashMediaSource;
- })(_videoJs2['default'].EventTarget);
- exports['default'] = FlashMediaSource;
- try {
- Object.defineProperty(FlashMediaSource.prototype, 'duration', {
- /**
- * Return the presentation duration.
- *
- * @return {Double} the duration of the media in seconds
- * @link http://www.w3.org/TR/media-source/#widl-MediaSource-duration
- */
- get: function get() {
- if (!this.swfObj) {
- return NaN;
- }
- // get the current duration from the SWF
- return this.swfObj.vjs_getProperty('duration');
- },
- /**
- * Set the presentation duration.
- *
- * @param {Double} value the duration of the media in seconds
- * @return {Double} the duration of the media in seconds
- * @link http://www.w3.org/TR/media-source/#widl-MediaSource-duration
- */
- set: function set(value) {
- var i = undefined;
- var oldDuration = this.swfObj.vjs_getProperty('duration');
- this.swfObj.vjs_setProperty('duration', value);
- if (value < oldDuration) {
- // In MSE, this triggers the range removal algorithm which causes
- // an update to occur
- for (i = 0; i < this.sourceBuffers.length; i++) {
- this.sourceBuffers[i].remove(value, oldDuration);
- }
- }
- return value;
- }
- });
- } catch (e) {
- // IE8 throws if defineProperty is called on a non-DOM node. We
- // don't support IE8 but we shouldn't throw an error if loaded
- // there.
- FlashMediaSource.prototype.duration = NaN;
- }
- for (var property in _flashConstants2['default']) {
- FlashMediaSource[property] = _flashConstants2['default'][property];
- }
- module.exports = exports['default'];
- }).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
- },{"./codec-utils":36,"./flash-constants":38,"./flash-source-buffer":40,"global/document":2}],40:[function(require,module,exports){
- (function (global){
- /**
- * @file flash-source-buffer.js
- */
- 'use strict';
- Object.defineProperty(exports, '__esModule', {
- value: true
- });
- var _createClass = (function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ('value' in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; })();
- var _get = function get(_x, _x2, _x3) { var _again = true; _function: while (_again) { var object = _x, property = _x2, receiver = _x3; _again = false; if (object === null) object = Function.prototype; var desc = Object.getOwnPropertyDescriptor(object, property); if (desc === undefined) { var parent = Object.getPrototypeOf(object); if (parent === null) { return undefined; } else { _x = parent; _x2 = property; _x3 = receiver; _again = true; desc = parent = undefined; continue _function; } } else if ('value' in desc) { return desc.value; } else { var getter = desc.get; if (getter === undefined) { return undefined; } return getter.call(receiver); } } };
- function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
- function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError('Cannot call a class as a function'); } }
- function _inherits(subClass, superClass) { if (typeof superClass !== 'function' && superClass !== null) { throw new TypeError('Super expression must either be null or a function, not ' + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; }
- var _globalWindow = require('global/window');
- var _globalWindow2 = _interopRequireDefault(_globalWindow);
- var _videoJs = (typeof window !== "undefined" ? window['videojs'] : typeof global !== "undefined" ? global['videojs'] : null);
- var _videoJs2 = _interopRequireDefault(_videoJs);
- var _muxJsLibFlv = require('mux.js/lib/flv');
- var _muxJsLibFlv2 = _interopRequireDefault(_muxJsLibFlv);
- var _removeCuesFromTrack = require('./remove-cues-from-track');
- var _removeCuesFromTrack2 = _interopRequireDefault(_removeCuesFromTrack);
- var _createTextTracksIfNecessary = require('./create-text-tracks-if-necessary');
- var _createTextTracksIfNecessary2 = _interopRequireDefault(_createTextTracksIfNecessary);
- var _addTextTrackData = require('./add-text-track-data');
- var _flashTransmuxerWorker = require('./flash-transmuxer-worker');
- var _flashTransmuxerWorker2 = _interopRequireDefault(_flashTransmuxerWorker);
- var _webwackify = require('webwackify');
- var _webwackify2 = _interopRequireDefault(_webwackify);
- var _flashConstants = require('./flash-constants');
- var _flashConstants2 = _interopRequireDefault(_flashConstants);
- var resolveFlashTransmuxWorker = function resolveFlashTransmuxWorker() {
- var result = undefined;
- try {
- result = require.resolve('./flash-transmuxer-worker');
- } catch (e) {
- // no result
- }
- return result;
- };
- /**
- * A wrapper around the setTimeout function that uses
- * the flash constant time between ticks value.
- *
- * @param {Function} func the function callback to run
- * @private
- */
- var scheduleTick = function scheduleTick(func) {
- // Chrome doesn't invoke requestAnimationFrame callbacks
- // in background tabs, so use setTimeout.
- _globalWindow2['default'].setTimeout(func, _flashConstants2['default'].TIME_BETWEEN_CHUNKS);
- };
- /**
- * Generates a random string of max length 6
- *
- * @return {String} the randomly generated string
- * @function generateRandomString
- * @private
- */
- var generateRandomString = function generateRandomString() {
- return Math.random().toString(36).slice(2, 8);
- };
- /**
- * Round a number to a specified number of places much like
- * toFixed but return a number instead of a string representation.
- *
- * @param {Number} num A number
- * @param {Number} places The number of decimal places which to
- * round
- * @private
- */
- var toDecimalPlaces = function toDecimalPlaces(num, places) {
- if (typeof places !== 'number' || places < 0) {
- places = 0;
- }
- var scale = Math.pow(10, places);
- return Math.round(num * scale) / scale;
- };
- /**
- * A SourceBuffer implementation for Flash rather than HTML.
- *
- * @link https://developer.mozilla.org/en-US/docs/Web/API/MediaSource
- * @param {Object} mediaSource the flash media source
- * @class FlashSourceBuffer
- * @extends videojs.EventTarget
- */
- var FlashSourceBuffer = (function (_videojs$EventTarget) {
- _inherits(FlashSourceBuffer, _videojs$EventTarget);
- function FlashSourceBuffer(mediaSource) {
- var _this = this;
- _classCallCheck(this, FlashSourceBuffer);
- _get(Object.getPrototypeOf(FlashSourceBuffer.prototype), 'constructor', this).call(this);
- var encodedHeader = undefined;
- // Start off using the globally defined value but refine
- // as we append data into flash
- this.chunkSize_ = _flashConstants2['default'].BYTES_PER_CHUNK;
- // byte arrays queued to be appended
- this.buffer_ = [];
- // the total number of queued bytes
- this.bufferSize_ = 0;
- // to be able to determine the correct position to seek to, we
- // need to retain information about the mapping between the
- // media timeline and PTS values
- this.basePtsOffset_ = NaN;
- this.mediaSource_ = mediaSource;
- this.audioBufferEnd_ = NaN;
- this.videoBufferEnd_ = NaN;
- // indicates whether the asynchronous continuation of an operation
- // is still being processed
- // see https://w3c.github.io/media-source/#widl-SourceBuffer-updating
- this.updating = false;
- this.timestampOffset_ = 0;
- encodedHeader = _globalWindow2['default'].btoa(String.fromCharCode.apply(null, Array.prototype.slice.call(_muxJsLibFlv2['default'].getFlvHeader())));
- // create function names with added randomness for the global callbacks flash will use
- // to get data from javascript into the swf. Random strings are added as a safety
- // measure for pages with multiple players since these functions will be global
- // instead of per instance. When making a call to the swf, the browser generates a
- // try catch code snippet, but just takes the function name and writes out an unquoted
- // call to that function. If the player id has any special characters, this will result
- // in an error, so safePlayerId replaces all special characters to '_'
- var safePlayerId = this.mediaSource_.player_.id().replace(/[^a-zA-Z0-9]/g, '_');
- this.flashEncodedHeaderName_ = 'vjs_flashEncodedHeader_' + safePlayerId + generateRandomString();
- this.flashEncodedDataName_ = 'vjs_flashEncodedData_' + safePlayerId + generateRandomString();
- _globalWindow2['default'][this.flashEncodedHeaderName_] = function () {
- delete _globalWindow2['default'][_this.flashEncodedHeaderName_];
- return encodedHeader;
- };
- this.mediaSource_.swfObj.vjs_appendChunkReady(this.flashEncodedHeaderName_);
- this.transmuxer_ = (0, _webwackify2['default'])(_flashTransmuxerWorker2['default'], resolveFlashTransmuxWorker());
- this.transmuxer_.postMessage({ action: 'init', options: {} });
- this.transmuxer_.onmessage = function (event) {
- if (event.data.action === 'data') {
- _this.receiveBuffer_(event.data.segment);
- }
- };
- this.one('updateend', function () {
- _this.mediaSource_.tech_.trigger('loadedmetadata');
- });
- Object.defineProperty(this, 'timestampOffset', {
- get: function get() {
- return this.timestampOffset_;
- },
- set: function set(val) {
- if (typeof val === 'number' && val >= 0) {
- this.timestampOffset_ = val;
- // We have to tell flash to expect a discontinuity
- this.mediaSource_.swfObj.vjs_discontinuity();
- // the media <-> PTS mapping must be re-established after
- // the discontinuity
- this.basePtsOffset_ = NaN;
- this.audioBufferEnd_ = NaN;
- this.videoBufferEnd_ = NaN;
- this.transmuxer_.postMessage({ action: 'reset' });
- }
- }
- });
- Object.defineProperty(this, 'buffered', {
- get: function get() {
- if (!this.mediaSource_ || !this.mediaSource_.swfObj || !('vjs_getProperty' in this.mediaSource_.swfObj)) {
- return _videoJs2['default'].createTimeRange();
- }
- var buffered = this.mediaSource_.swfObj.vjs_getProperty('buffered');
- if (buffered && buffered.length) {
- buffered[0][0] = toDecimalPlaces(buffered[0][0], 3);
- buffered[0][1] = toDecimalPlaces(buffered[0][1], 3);
- }
- return _videoJs2['default'].createTimeRanges(buffered);
- }
- });
- // On a seek we remove all text track data since flash has no concept
- // of a buffered-range and everything else is reset on seek
- this.mediaSource_.player_.on('seeked', function () {
- (0, _removeCuesFromTrack2['default'])(0, Infinity, _this.metadataTrack_);
- if (_this.inbandTextTracks_) {
- for (var track in _this.inbandTextTracks_) {
- (0, _removeCuesFromTrack2['default'])(0, Infinity, _this.inbandTextTracks_[track]);
- }
- }
- });
- var onHlsReset = this.onHlsReset_.bind(this);
- // hls-reset is fired by videojs.Hls on to the tech after the main SegmentLoader
- // resets its state and flushes the buffer
- this.mediaSource_.player_.tech_.on('hls-reset', onHlsReset);
- this.mediaSource_.player_.tech_.hls.on('dispose', function () {
- _this.transmuxer_.terminate();
- _this.mediaSource_.player_.tech_.off('hls-reset', onHlsReset);
- });
- }
- /**
- * Append bytes to the sourcebuffers buffer, in this case we
- * have to append it to swf object.
- *
- * @link https://developer.mozilla.org/en-US/docs/Web/API/SourceBuffer/appendBuffer
- * @param {Array} bytes
- */
- _createClass(FlashSourceBuffer, [{
- key: 'appendBuffer',
- value: function appendBuffer(bytes) {
- var error = undefined;
- if (this.updating) {
- error = new Error('SourceBuffer.append() cannot be called ' + 'while an update is in progress');
- error.name = 'InvalidStateError';
- error.code = 11;
- throw error;
- }
- this.updating = true;
- this.mediaSource_.readyState = 'open';
- this.trigger({ type: 'update' });
- this.transmuxer_.postMessage({
- action: 'push',
- data: bytes.buffer,
- byteOffset: bytes.byteOffset,
- byteLength: bytes.byteLength
- }, [bytes.buffer]);
- this.transmuxer_.postMessage({ action: 'flush' });
- }
- /**
- * Reset the parser and remove any data queued to be sent to the SWF.
- *
- * @link https://developer.mozilla.org/en-US/docs/Web/API/SourceBuffer/abort
- */
- }, {
- key: 'abort',
- value: function abort() {
- this.buffer_ = [];
- this.bufferSize_ = 0;
- this.mediaSource_.swfObj.vjs_abort();
- // report any outstanding updates have ended
- if (this.updating) {
- this.updating = false;
- this.trigger({ type: 'updateend' });
- }
- }
- /**
- * Flash cannot remove ranges already buffered in the NetStream
- * but seeking clears the buffer entirely. For most purposes,
- * having this operation act as a no-op is acceptable.
- *
- * @link https://developer.mozilla.org/en-US/docs/Web/API/SourceBuffer/remove
- * @param {Double} start start of the section to remove
- * @param {Double} end end of the section to remove
- */
- }, {
- key: 'remove',
- value: function remove(start, end) {
- (0, _removeCuesFromTrack2['default'])(start, end, this.metadataTrack_);
- if (this.inbandTextTracks_) {
- for (var track in this.inbandTextTracks_) {
- (0, _removeCuesFromTrack2['default'])(start, end, this.inbandTextTracks_[track]);
- }
- }
- this.trigger({ type: 'update' });
- this.trigger({ type: 'updateend' });
- }
- /**
- * Receive a buffer from the flv.
- *
- * @param {Object} segment
- * @private
- */
- }, {
- key: 'receiveBuffer_',
- value: function receiveBuffer_(segment) {
- var _this2 = this;
- // create an in-band caption track if one is present in the segment
- (0, _createTextTracksIfNecessary2['default'])(this, this.mediaSource_, segment);
- (0, _addTextTrackData.addTextTrackData)(this, segment.captions, segment.metadata);
- // Do this asynchronously since convertTagsToData_ can be time consuming
- scheduleTick(function () {
- var flvBytes = _this2.convertTagsToData_(segment);
- if (_this2.buffer_.length === 0) {
- scheduleTick(_this2.processBuffer_.bind(_this2));
- }
- if (flvBytes) {
- _this2.buffer_.push(flvBytes);
- _this2.bufferSize_ += flvBytes.byteLength;
- }
- });
- }
- /**
- * Append a portion of the current buffer to the SWF.
- *
- * @private
- */
- }, {
- key: 'processBuffer_',
- value: function processBuffer_() {
- var _this3 = this;
- var chunkSize = _flashConstants2['default'].BYTES_PER_CHUNK;
- if (!this.buffer_.length) {
- if (this.updating !== false) {
- this.updating = false;
- this.trigger({ type: 'updateend' });
- }
- // do nothing if the buffer is empty
- return;
- }
- // concatenate appends up to the max append size
- var chunk = this.buffer_[0].subarray(0, chunkSize);
- // requeue any bytes that won't make it this round
- if (chunk.byteLength < chunkSize || this.buffer_[0].byteLength === chunkSize) {
- this.buffer_.shift();
- } else {
- this.buffer_[0] = this.buffer_[0].subarray(chunkSize);
- }
- this.bufferSize_ -= chunk.byteLength;
- // base64 encode the bytes
- var binary = [];
- var length = chunk.byteLength;
- for (var i = 0; i < length; i++) {
- binary.push(String.fromCharCode(chunk[i]));
- }
- var b64str = _globalWindow2['default'].btoa(binary.join(''));
- _globalWindow2['default'][this.flashEncodedDataName_] = function () {
- // schedule another processBuffer to process any left over data or to
- // trigger updateend
- scheduleTick(_this3.processBuffer_.bind(_this3));
- delete _globalWindow2['default'][_this3.flashEncodedDataName_];
- return b64str;
- };
- // Notify the swf that segment data is ready to be appended
- this.mediaSource_.swfObj.vjs_appendChunkReady(this.flashEncodedDataName_);
- }
- /**
- * Turns an array of flv tags into a Uint8Array representing the
- * flv data. Also removes any tags that are before the current
- * time so that playback begins at or slightly after the right
- * place on a seek
- *
- * @private
- * @param {Object} segmentData object of segment data
- */
- }, {
- key: 'convertTagsToData_',
- value: function convertTagsToData_(segmentData) {
- var segmentByteLength = 0;
- var tech = this.mediaSource_.tech_;
- var videoTargetPts = 0;
- var segment = undefined;
- var videoTags = segmentData.tags.videoTags;
- var audioTags = segmentData.tags.audioTags;
- // Establish the media timeline to PTS translation if we don't
- // have one already
- if (isNaN(this.basePtsOffset_) && (videoTags.length || audioTags.length)) {
- // We know there is at least one video or audio tag, but since we may not have both,
- // we use pts: Infinity for the missing tag. The will force the following Math.min
- // call will to use the proper pts value since it will always be less than Infinity
- var firstVideoTag = videoTags[0] || { pts: Infinity };
- var firstAudioTag = audioTags[0] || { pts: Infinity };
- this.basePtsOffset_ = Math.min(firstAudioTag.pts, firstVideoTag.pts);
- }
- if (tech.seeking()) {
- // Do not use previously saved buffer end values while seeking since buffer
- // is cleared on all seeks
- this.videoBufferEnd_ = NaN;
- this.audioBufferEnd_ = NaN;
- }
- if (isNaN(this.videoBufferEnd_)) {
- if (tech.buffered().length) {
- videoTargetPts = tech.buffered().end(0) - this.timestampOffset;
- }
- // Trim to currentTime if seeking
- if (tech.seeking()) {
- videoTargetPts = Math.max(videoTargetPts, tech.currentTime() - this.timestampOffset);
- }
- // PTS values are represented in milliseconds
- videoTargetPts *= 1e3;
- videoTargetPts += this.basePtsOffset_;
- } else {
- // Add a fudge factor of 0.1 to the last video pts appended since a rendition change
- // could append an overlapping segment, in which case there is a high likelyhood
- // a tag could have a matching pts to videoBufferEnd_, which would cause
- // that tag to get appended by the tag.pts >= targetPts check below even though it
- // is a duplicate of what was previously appended
- videoTargetPts = this.videoBufferEnd_ + 0.1;
- }
- // filter complete GOPs with a presentation time less than the seek target/end of buffer
- var currentIndex = videoTags.length;
- // if the last tag is beyond videoTargetPts, then do not search the list for a GOP
- // since our videoTargetPts lies in a future segment
- if (currentIndex && videoTags[currentIndex - 1].pts >= videoTargetPts) {
- // Start by walking backwards from the end of the list until we reach a tag that
- // is equal to or less than videoTargetPts
- while (--currentIndex) {
- var currentTag = videoTags[currentIndex];
- if (currentTag.pts > videoTargetPts) {
- continue;
- }
- // if we see a keyFrame or metadata tag once we've gone below videoTargetPts,
- // exit the loop as this is the start of the GOP that we want to append
- if (currentTag.keyFrame || currentTag.metaDataTag) {
- break;
- }
- }
- // We need to check if there are any metadata tags that come before currentIndex
- // as those will be metadata tags associated with the GOP we are appending
- // There could be 0 to 2 metadata tags that come before the currentIndex depending
- // on what videoTargetPts is and whether the transmuxer prepended metadata tags to this
- // key frame
- while (currentIndex) {
- var nextTag = videoTags[currentIndex - 1];
- if (!nextTag.metaDataTag) {
- break;
- }
- currentIndex--;
- }
- }
- var filteredVideoTags = videoTags.slice(currentIndex);
- var audioTargetPts = undefined;
- if (isNaN(this.audioBufferEnd_)) {
- audioTargetPts = videoTargetPts;
- } else {
- // Add a fudge factor of 0.1 to the last video pts appended since a rendition change
- // could append an overlapping segment, in which case there is a high likelyhood
- // a tag could have a matching pts to videoBufferEnd_, which would cause
- // that tag to get appended by the tag.pts >= targetPts check below even though it
- // is a duplicate of what was previously appended
- audioTargetPts = this.audioBufferEnd_ + 0.1;
- }
- if (filteredVideoTags.length) {
- // If targetPts intersects a GOP and we appended the tags for the GOP that came
- // before targetPts, we want to make sure to trim audio tags at the pts
- // of the first video tag to avoid brief moments of silence
- audioTargetPts = Math.min(audioTargetPts, filteredVideoTags[0].pts);
- }
- // skip tags with a presentation time less than the seek target/end of buffer
- currentIndex = 0;
- while (currentIndex < audioTags.length) {
- if (audioTags[currentIndex].pts >= audioTargetPts) {
- break;
- }
- currentIndex++;
- }
- var filteredAudioTags = audioTags.slice(currentIndex);
- // update the audio and video buffer ends
- if (filteredAudioTags.length) {
- this.audioBufferEnd_ = filteredAudioTags[filteredAudioTags.length - 1].pts;
- }
- if (filteredVideoTags.length) {
- this.videoBufferEnd_ = filteredVideoTags[filteredVideoTags.length - 1].pts;
- }
- var tags = this.getOrderedTags_(filteredVideoTags, filteredAudioTags);
- if (tags.length === 0) {
- return;
- }
- // If we are appending data that comes before our target pts, we want to tell
- // the swf to adjust its notion of current time to account for the extra tags
- // we are appending to complete the GOP that intersects with targetPts
- if (tags[0].pts < videoTargetPts && tech.seeking()) {
- var fudgeFactor = 1 / 30;
- var currentTime = tech.currentTime();
- var diff = (videoTargetPts - tags[0].pts) / 1e3;
- var adjustedTime = currentTime - diff;
- if (adjustedTime < fudgeFactor) {
- adjustedTime = 0;
- }
- try {
- this.mediaSource_.swfObj.vjs_adjustCurrentTime(adjustedTime);
- } catch (e) {
- // no-op for backwards compatability of swf. If adjustCurrentTime fails,
- // the swf may incorrectly report currentTime and buffered ranges
- // but should not affect playback over than the time displayed on the
- // progress bar is inaccurate
- }
- }
- // concatenate the bytes into a single segment
- for (var i = 0; i < tags.length; i++) {
- segmentByteLength += tags[i].bytes.byteLength;
- }
- segment = new Uint8Array(segmentByteLength);
- for (var i = 0, j = 0; i < tags.length; i++) {
- segment.set(tags[i].bytes, j);
- j += tags[i].bytes.byteLength;
- }
- return segment;
- }
- /**
- * Assemble the FLV tags in decoder order.
- *
- * @private
- * @param {Array} videoTags list of video tags
- * @param {Array} audioTags list of audio tags
- */
- }, {
- key: 'getOrderedTags_',
- value: function getOrderedTags_(videoTags, audioTags) {
- var tag = undefined;
- var tags = [];
- while (videoTags.length || audioTags.length) {
- if (!videoTags.length) {
- // only audio tags remain
- tag = audioTags.shift();
- } else if (!audioTags.length) {
- // only video tags remain
- tag = videoTags.shift();
- } else if (audioTags[0].dts < videoTags[0].dts) {
- // audio should be decoded next
- tag = audioTags.shift();
- } else {
- // video should be decoded next
- tag = videoTags.shift();
- }
- tags.push(tag);
- }
- return tags;
- }
- }, {
- key: 'onHlsReset_',
- value: function onHlsReset_() {
- this.transmuxer_.postMessage({ action: 'resetCaptions' });
- }
- }]);
- return FlashSourceBuffer;
- })(_videoJs2['default'].EventTarget);
- exports['default'] = FlashSourceBuffer;
- module.exports = exports['default'];
- }).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
- },{"./add-text-track-data":35,"./create-text-tracks-if-necessary":37,"./flash-constants":38,"./flash-transmuxer-worker":41,"./remove-cues-from-track":43,"global/window":3,"mux.js/lib/flv":13,"webwackify":34}],41:[function(require,module,exports){
- /**
- * @file flash-transmuxer-worker.js
- */
- 'use strict';
- Object.defineProperty(exports, '__esModule', {
- value: true
- });
- var _createClass = (function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ('value' in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; })();
- function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
- function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError('Cannot call a class as a function'); } }
- var _globalWindow = require('global/window');
- var _globalWindow2 = _interopRequireDefault(_globalWindow);
- var _muxJsLibFlv = require('mux.js/lib/flv');
- var _muxJsLibFlv2 = _interopRequireDefault(_muxJsLibFlv);
- /**
- * Re-emits transmuxer events by converting them into messages to the
- * world outside the worker.
- *
- * @param {Object} transmuxer the transmuxer to wire events on
- * @private
- */
- var wireTransmuxerEvents = function wireTransmuxerEvents(transmuxer) {
- transmuxer.on('data', function (segment) {
- _globalWindow2['default'].postMessage({
- action: 'data',
- segment: segment
- });
- });
- transmuxer.on('done', function (data) {
- _globalWindow2['default'].postMessage({ action: 'done' });
- });
- };
- /**
- * All incoming messages route through this hash. If no function exists
- * to handle an incoming message, then we ignore the message.
- *
- * @class MessageHandlers
- * @param {Object} options the options to initialize with
- */
- var MessageHandlers = (function () {
- function MessageHandlers(options) {
- _classCallCheck(this, MessageHandlers);
- this.options = options || {};
- this.init();
- }
- /**
- * Our web wroker interface so that things can talk to mux.js
- * that will be running in a web worker. The scope is passed to this by
- * webworkify.
- *
- * @param {Object} self the scope for the web worker
- */
- /**
- * initialize our web worker and wire all the events.
- */
- _createClass(MessageHandlers, [{
- key: 'init',
- value: function init() {
- if (this.transmuxer) {
- this.transmuxer.dispose();
- }
- this.transmuxer = new _muxJsLibFlv2['default'].Transmuxer(this.options);
- wireTransmuxerEvents(this.transmuxer);
- }
- /**
- * Adds data (a ts segment) to the start of the transmuxer pipeline for
- * processing.
- *
- * @param {ArrayBuffer} data data to push into the muxer
- */
- }, {
- key: 'push',
- value: function push(data) {
- // Cast array buffer to correct type for transmuxer
- var segment = new Uint8Array(data.data, data.byteOffset, data.byteLength);
- this.transmuxer.push(segment);
- }
- /**
- * Recreate the transmuxer so that the next segment added via `push`
- * start with a fresh transmuxer.
- */
- }, {
- key: 'reset',
- value: function reset() {
- this.init();
- }
- /**
- * Forces the pipeline to finish processing the last segment and emit its
- * results.
- */
- }, {
- key: 'flush',
- value: function flush() {
- this.transmuxer.flush();
- }
- }, {
- key: 'resetCaptions',
- value: function resetCaptions() {
- this.transmuxer.resetCaptions();
- }
- }]);
- return MessageHandlers;
- })();
- var FlashTransmuxerWorker = function FlashTransmuxerWorker(self) {
- self.onmessage = function (event) {
- if (event.data.action === 'init' && event.data.options) {
- this.messageHandlers = new MessageHandlers(event.data.options);
- return;
- }
- if (!this.messageHandlers) {
- this.messageHandlers = new MessageHandlers();
- }
- if (event.data && event.data.action && event.data.action !== 'init') {
- if (this.messageHandlers[event.data.action]) {
- this.messageHandlers[event.data.action](event.data);
- }
- }
- };
- };
- exports['default'] = function (self) {
- return new FlashTransmuxerWorker(self);
- };
- module.exports = exports['default'];
- },{"global/window":3,"mux.js/lib/flv":13}],42:[function(require,module,exports){
- (function (global){
- /**
- * @file html-media-source.js
- */
- 'use strict';
- Object.defineProperty(exports, '__esModule', {
- value: true
- });
- var _createClass = (function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ('value' in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; })();
- var _get = function get(_x, _x2, _x3) { var _again = true; _function: while (_again) { var object = _x, property = _x2, receiver = _x3; _again = false; if (object === null) object = Function.prototype; var desc = Object.getOwnPropertyDescriptor(object, property); if (desc === undefined) { var parent = Object.getPrototypeOf(object); if (parent === null) { return undefined; } else { _x = parent; _x2 = property; _x3 = receiver; _again = true; desc = parent = undefined; continue _function; } } else if ('value' in desc) { return desc.value; } else { var getter = desc.get; if (getter === undefined) { return undefined; } return getter.call(receiver); } } };
- function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
- function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError('Cannot call a class as a function'); } }
- function _inherits(subClass, superClass) { if (typeof superClass !== 'function' && superClass !== null) { throw new TypeError('Super expression must either be null or a function, not ' + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; }
- var _globalWindow = require('global/window');
- var _globalWindow2 = _interopRequireDefault(_globalWindow);
- var _globalDocument = require('global/document');
- var _globalDocument2 = _interopRequireDefault(_globalDocument);
- var _videoJs = (typeof window !== "undefined" ? window['videojs'] : typeof global !== "undefined" ? global['videojs'] : null);
- var _videoJs2 = _interopRequireDefault(_videoJs);
- var _virtualSourceBuffer = require('./virtual-source-buffer');
- var _virtualSourceBuffer2 = _interopRequireDefault(_virtualSourceBuffer);
- var _addTextTrackData = require('./add-text-track-data');
- var _codecUtils = require('./codec-utils');
- /**
- * Our MediaSource implementation in HTML, mimics native
- * MediaSource where/if possible.
- *
- * @link https://developer.mozilla.org/en-US/docs/Web/API/MediaSource
- * @class HtmlMediaSource
- * @extends videojs.EventTarget
- */
- var HtmlMediaSource = (function (_videojs$EventTarget) {
- _inherits(HtmlMediaSource, _videojs$EventTarget);
- function HtmlMediaSource() {
- var _this = this;
- _classCallCheck(this, HtmlMediaSource);
- _get(Object.getPrototypeOf(HtmlMediaSource.prototype), 'constructor', this).call(this);
- var property = undefined;
- this.nativeMediaSource_ = new _globalWindow2['default'].MediaSource();
- // delegate to the native MediaSource's methods by default
- for (property in this.nativeMediaSource_) {
- if (!(property in HtmlMediaSource.prototype) && typeof this.nativeMediaSource_[property] === 'function') {
- this[property] = this.nativeMediaSource_[property].bind(this.nativeMediaSource_);
- }
- }
- // emulate `duration` and `seekable` until seeking can be
- // handled uniformly for live streams
- // see https://github.com/w3c/media-source/issues/5
- this.duration_ = NaN;
- Object.defineProperty(this, 'duration', {
- get: function get() {
- if (this.duration_ === Infinity) {
- return this.duration_;
- }
- return this.nativeMediaSource_.duration;
- },
- set: function set(duration) {
- this.duration_ = duration;
- if (duration !== Infinity) {
- this.nativeMediaSource_.duration = duration;
- return;
- }
- }
- });
- Object.defineProperty(this, 'seekable', {
- get: function get() {
- if (this.duration_ === Infinity) {
- return _videoJs2['default'].createTimeRanges([[0, this.nativeMediaSource_.duration]]);
- }
- return this.nativeMediaSource_.seekable;
- }
- });
- Object.defineProperty(this, 'readyState', {
- get: function get() {
- return this.nativeMediaSource_.readyState;
- }
- });
- Object.defineProperty(this, 'activeSourceBuffers', {
- get: function get() {
- return this.activeSourceBuffers_;
- }
- });
- // the list of virtual and native SourceBuffers created by this
- // MediaSource
- this.sourceBuffers = [];
- this.activeSourceBuffers_ = [];
- /**
- * update the list of active source buffers based upon various
- * imformation from HLS and video.js
- *
- * @private
- */
- this.updateActiveSourceBuffers_ = function () {
- // Retain the reference but empty the array
- _this.activeSourceBuffers_.length = 0;
- // If there is only one source buffer, then it will always be active and audio will
- // be disabled based on the codec of the source buffer
- if (_this.sourceBuffers.length === 1) {
- var sourceBuffer = _this.sourceBuffers[0];
- sourceBuffer.appendAudioInitSegment_ = true;
- sourceBuffer.audioDisabled_ = !sourceBuffer.audioCodec_;
- _this.activeSourceBuffers_.push(sourceBuffer);
- return;
- }
- // There are 2 source buffers, a combined (possibly video only) source buffer and
- // and an audio only source buffer.
- // By default, the audio in the combined virtual source buffer is enabled
- // and the audio-only source buffer (if it exists) is disabled.
- var disableCombined = false;
- var disableAudioOnly = true;
- // TODO: maybe we can store the sourcebuffers on the track objects?
- // safari may do something like this
- for (var i = 0; i < _this.player_.audioTracks().length; i++) {
- var track = _this.player_.audioTracks()[i];
- if (track.enabled && track.kind !== 'main') {
- // The enabled track is an alternate audio track so disable the audio in
- // the combined source buffer and enable the audio-only source buffer.
- disableCombined = true;
- disableAudioOnly = false;
- break;
- }
- }
- _this.sourceBuffers.forEach(function (sourceBuffer) {
- /* eslinst-disable */
- // TODO once codecs are required, we can switch to using the codecs to determine
- // what stream is the video stream, rather than relying on videoTracks
- /* eslinst-enable */
- sourceBuffer.appendAudioInitSegment_ = true;
- if (sourceBuffer.videoCodec_ && sourceBuffer.audioCodec_) {
- // combined
- sourceBuffer.audioDisabled_ = disableCombined;
- } else if (sourceBuffer.videoCodec_ && !sourceBuffer.audioCodec_) {
- // If the "combined" source buffer is video only, then we do not want
- // disable the audio-only source buffer (this is mostly for demuxed
- // audio and video hls)
- sourceBuffer.audioDisabled_ = true;
- disableAudioOnly = false;
- } else if (!sourceBuffer.videoCodec_ && sourceBuffer.audioCodec_) {
- // audio only
- sourceBuffer.audioDisabled_ = disableAudioOnly;
- if (disableAudioOnly) {
- return;
- }
- }
- _this.activeSourceBuffers_.push(sourceBuffer);
- });
- };
- this.onPlayerMediachange_ = function () {
- _this.sourceBuffers.forEach(function (sourceBuffer) {
- sourceBuffer.appendAudioInitSegment_ = true;
- });
- };
- this.onHlsReset_ = function () {
- _this.sourceBuffers.forEach(function (sourceBuffer) {
- if (sourceBuffer.transmuxer_) {
- sourceBuffer.transmuxer_.postMessage({ action: 'resetCaptions' });
- }
- });
- };
- this.onHlsSegmentTimeMapping_ = function (event) {
- _this.sourceBuffers.forEach(function (buffer) {
- return buffer.timeMapping_ = event.mapping;
- });
- };
- // Re-emit MediaSource events on the polyfill
- ['sourceopen', 'sourceclose', 'sourceended'].forEach(function (eventName) {
- this.nativeMediaSource_.addEventListener(eventName, this.trigger.bind(this));
- }, this);
- // capture the associated player when the MediaSource is
- // successfully attached
- this.on('sourceopen', function (event) {
- // Get the player this MediaSource is attached to
- var video = _globalDocument2['default'].querySelector('[src="' + _this.url_ + '"]');
- if (!video) {
- return;
- }
- _this.player_ = (0, _videoJs2['default'])(video.parentNode);
- // hls-reset is fired by videojs.Hls on to the tech after the main SegmentLoader
- // resets its state and flushes the buffer
- _this.player_.tech_.on('hls-reset', _this.onHlsReset_);
- // hls-segment-time-mapping is fired by videojs.Hls on to the tech after the main
- // SegmentLoader inspects an MTS segment and has an accurate stream to display
- // time mapping
- _this.player_.tech_.on('hls-segment-time-mapping', _this.onHlsSegmentTimeMapping_);
- if (_this.player_.audioTracks && _this.player_.audioTracks()) {
- _this.player_.audioTracks().on('change', _this.updateActiveSourceBuffers_);
- _this.player_.audioTracks().on('addtrack', _this.updateActiveSourceBuffers_);
- _this.player_.audioTracks().on('removetrack', _this.updateActiveSourceBuffers_);
- }
- _this.player_.on('mediachange', _this.onPlayerMediachange_);
- });
- this.on('sourceended', function (event) {
- var duration = (0, _addTextTrackData.durationOfVideo)(_this.duration);
- for (var i = 0; i < _this.sourceBuffers.length; i++) {
- var sourcebuffer = _this.sourceBuffers[i];
- var cues = sourcebuffer.metadataTrack_ && sourcebuffer.metadataTrack_.cues;
- if (cues && cues.length) {
- cues[cues.length - 1].endTime = duration;
- }
- }
- });
- // explicitly terminate any WebWorkers that were created
- // by SourceHandlers
- this.on('sourceclose', function (event) {
- this.sourceBuffers.forEach(function (sourceBuffer) {
- if (sourceBuffer.transmuxer_) {
- sourceBuffer.transmuxer_.terminate();
- }
- });
- this.sourceBuffers.length = 0;
- if (!this.player_) {
- return;
- }
- if (this.player_.audioTracks && this.player_.audioTracks()) {
- this.player_.audioTracks().off('change', this.updateActiveSourceBuffers_);
- this.player_.audioTracks().off('addtrack', this.updateActiveSourceBuffers_);
- this.player_.audioTracks().off('removetrack', this.updateActiveSourceBuffers_);
- }
- // We can only change this if the player hasn't been disposed of yet
- // because `off` eventually tries to use the el_ property. If it has
- // been disposed of, then don't worry about it because there are no
- // event handlers left to unbind anyway
- if (this.player_.el_) {
- this.player_.off('mediachange', this.onPlayerMediachange_);
- this.player_.tech_.off('hls-reset', this.onHlsReset_);
- this.player_.tech_.off('hls-segment-time-mapping', this.onHlsSegmentTimeMapping_);
- }
- });
- }
- /**
- * Add a range that that can now be seeked to.
- *
- * @param {Double} start where to start the addition
- * @param {Double} end where to end the addition
- * @private
- */
- _createClass(HtmlMediaSource, [{
- key: 'addSeekableRange_',
- value: function addSeekableRange_(start, end) {
- var error = undefined;
- if (this.duration !== Infinity) {
- error = new Error('MediaSource.addSeekableRange() can only be invoked ' + 'when the duration is Infinity');
- error.name = 'InvalidStateError';
- error.code = 11;
- throw error;
- }
- if (end > this.nativeMediaSource_.duration || isNaN(this.nativeMediaSource_.duration)) {
- this.nativeMediaSource_.duration = end;
- }
- }
- /**
- * Add a source buffer to the media source.
- *
- * @link https://developer.mozilla.org/en-US/docs/Web/API/MediaSource/addSourceBuffer
- * @param {String} type the content-type of the content
- * @return {Object} the created source buffer
- */
- }, {
- key: 'addSourceBuffer',
- value: function addSourceBuffer(type) {
- var buffer = undefined;
- var parsedType = (0, _codecUtils.parseContentType)(type);
- // Create a VirtualSourceBuffer to transmux MPEG-2 transport
- // stream segments into fragmented MP4s
- if (/^(video|audio)\/mp2t$/i.test(parsedType.type)) {
- var codecs = [];
- if (parsedType.parameters && parsedType.parameters.codecs) {
- codecs = parsedType.parameters.codecs.split(',');
- codecs = (0, _codecUtils.translateLegacyCodecs)(codecs);
- codecs = codecs.filter(function (codec) {
- return (0, _codecUtils.isAudioCodec)(codec) || (0, _codecUtils.isVideoCodec)(codec);
- });
- }
- if (codecs.length === 0) {
- codecs = ['avc1.4d400d', 'mp4a.40.2'];
- }
- buffer = new _virtualSourceBuffer2['default'](this, codecs);
- if (this.sourceBuffers.length !== 0) {
- // If another VirtualSourceBuffer already exists, then we are creating a
- // SourceBuffer for an alternate audio track and therefore we know that
- // the source has both an audio and video track.
- // That means we should trigger the manual creation of the real
- // SourceBuffers instead of waiting for the transmuxer to return data
- this.sourceBuffers[0].createRealSourceBuffers_();
- buffer.createRealSourceBuffers_();
- // Automatically disable the audio on the first source buffer if
- // a second source buffer is ever created
- this.sourceBuffers[0].audioDisabled_ = true;
- }
- } else {
- // delegate to the native implementation
- buffer = this.nativeMediaSource_.addSourceBuffer(type);
- }
- this.sourceBuffers.push(buffer);
- return buffer;
- }
- }]);
- return HtmlMediaSource;
- })(_videoJs2['default'].EventTarget);
- exports['default'] = HtmlMediaSource;
- module.exports = exports['default'];
- }).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
- },{"./add-text-track-data":35,"./codec-utils":36,"./virtual-source-buffer":46,"global/document":2,"global/window":3}],43:[function(require,module,exports){
- /**
- * @file remove-cues-from-track.js
- */
- /**
- * Remove cues from a track on video.js.
- *
- * @param {Double} start start of where we should remove the cue
- * @param {Double} end end of where the we should remove the cue
- * @param {Object} track the text track to remove the cues from
- * @private
- */
- "use strict";
- Object.defineProperty(exports, "__esModule", {
- value: true
- });
- var removeCuesFromTrack = function removeCuesFromTrack(start, end, track) {
- var i = undefined;
- var cue = undefined;
- if (!track) {
- return;
- }
- if (!track.cues) {
- return;
- }
- i = track.cues.length;
- while (i--) {
- cue = track.cues[i];
- // Remove any overlapping cue
- if (cue.startTime <= end && cue.endTime >= start) {
- track.removeCue(cue);
- }
- }
- };
- exports["default"] = removeCuesFromTrack;
- module.exports = exports["default"];
- },{}],44:[function(require,module,exports){
- /**
- * @file transmuxer-worker.js
- */
- /**
- * videojs-contrib-media-sources
- *
- * Copyright (c) 2015 Brightcove
- * All rights reserved.
- *
- * Handles communication between the browser-world and the mux.js
- * transmuxer running inside of a WebWorker by exposing a simple
- * message-based interface to a Transmuxer object.
- */
- 'use strict';
- Object.defineProperty(exports, '__esModule', {
- value: true
- });
- var _createClass = (function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ('value' in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; })();
- function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
- function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError('Cannot call a class as a function'); } }
- var _globalWindow = require('global/window');
- var _globalWindow2 = _interopRequireDefault(_globalWindow);
- var _muxJsLibMp4 = require('mux.js/lib/mp4');
- var _muxJsLibMp42 = _interopRequireDefault(_muxJsLibMp4);
- /**
- * Re-emits transmuxer events by converting them into messages to the
- * world outside the worker.
- *
- * @param {Object} transmuxer the transmuxer to wire events on
- * @private
- */
- var wireTransmuxerEvents = function wireTransmuxerEvents(transmuxer) {
- transmuxer.on('data', function (segment) {
- // transfer ownership of the underlying ArrayBuffer
- // instead of doing a copy to save memory
- // ArrayBuffers are transferable but generic TypedArrays are not
- // @link https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API/Using_web_workers#Passing_data_by_transferring_ownership_(transferable_objects)
- var initArray = segment.initSegment;
- segment.initSegment = {
- data: initArray.buffer,
- byteOffset: initArray.byteOffset,
- byteLength: initArray.byteLength
- };
- var typedArray = segment.data;
- segment.data = typedArray.buffer;
- _globalWindow2['default'].postMessage({
- action: 'data',
- segment: segment,
- byteOffset: typedArray.byteOffset,
- byteLength: typedArray.byteLength
- }, [segment.data]);
- });
- if (transmuxer.captionStream) {
- transmuxer.captionStream.on('data', function (caption) {
- _globalWindow2['default'].postMessage({
- action: 'caption',
- data: caption
- });
- });
- }
- transmuxer.on('done', function (data) {
- _globalWindow2['default'].postMessage({ action: 'done' });
- });
- transmuxer.on('gopInfo', function (gopInfo) {
- _globalWindow2['default'].postMessage({
- action: 'gopInfo',
- gopInfo: gopInfo
- });
- });
- };
- /**
- * All incoming messages route through this hash. If no function exists
- * to handle an incoming message, then we ignore the message.
- *
- * @class MessageHandlers
- * @param {Object} options the options to initialize with
- */
- var MessageHandlers = (function () {
- function MessageHandlers(options) {
- _classCallCheck(this, MessageHandlers);
- this.options = options || {};
- this.init();
- }
- /**
- * Our web wroker interface so that things can talk to mux.js
- * that will be running in a web worker. the scope is passed to this by
- * webworkify.
- *
- * @param {Object} self the scope for the web worker
- */
- /**
- * initialize our web worker and wire all the events.
- */
- _createClass(MessageHandlers, [{
- key: 'init',
- value: function init() {
- if (this.transmuxer) {
- this.transmuxer.dispose();
- }
- this.transmuxer = new _muxJsLibMp42['default'].Transmuxer(this.options);
- wireTransmuxerEvents(this.transmuxer);
- }
- /**
- * Adds data (a ts segment) to the start of the transmuxer pipeline for
- * processing.
- *
- * @param {ArrayBuffer} data data to push into the muxer
- */
- }, {
- key: 'push',
- value: function push(data) {
- // Cast array buffer to correct type for transmuxer
- var segment = new Uint8Array(data.data, data.byteOffset, data.byteLength);
- this.transmuxer.push(segment);
- }
- /**
- * Recreate the transmuxer so that the next segment added via `push`
- * start with a fresh transmuxer.
- */
- }, {
- key: 'reset',
- value: function reset() {
- this.init();
- }
- /**
- * Set the value that will be used as the `baseMediaDecodeTime` time for the
- * next segment pushed in. Subsequent segments will have their `baseMediaDecodeTime`
- * set relative to the first based on the PTS values.
- *
- * @param {Object} data used to set the timestamp offset in the muxer
- */
- }, {
- key: 'setTimestampOffset',
- value: function setTimestampOffset(data) {
- var timestampOffset = data.timestampOffset || 0;
- this.transmuxer.setBaseMediaDecodeTime(Math.round(timestampOffset * 90000));
- }
- }, {
- key: 'setAudioAppendStart',
- value: function setAudioAppendStart(data) {
- this.transmuxer.setAudioAppendStart(Math.ceil(data.appendStart * 90000));
- }
- /**
- * Forces the pipeline to finish processing the last segment and emit it's
- * results.
- *
- * @param {Object} data event data, not really used
- */
- }, {
- key: 'flush',
- value: function flush(data) {
- this.transmuxer.flush();
- }
- }, {
- key: 'resetCaptions',
- value: function resetCaptions() {
- this.transmuxer.resetCaptions();
- }
- }, {
- key: 'alignGopsWith',
- value: function alignGopsWith(data) {
- this.transmuxer.alignGopsWith(data.gopsToAlignWith.slice());
- }
- }]);
- return MessageHandlers;
- })();
- var TransmuxerWorker = function TransmuxerWorker(self) {
- self.onmessage = function (event) {
- if (event.data.action === 'init' && event.data.options) {
- this.messageHandlers = new MessageHandlers(event.data.options);
- return;
- }
- if (!this.messageHandlers) {
- this.messageHandlers = new MessageHandlers();
- }
- if (event.data && event.data.action && event.data.action !== 'init') {
- if (this.messageHandlers[event.data.action]) {
- this.messageHandlers[event.data.action](event.data);
- }
- }
- };
- };
- exports['default'] = function (self) {
- return new TransmuxerWorker(self);
- };
- module.exports = exports['default'];
- },{"global/window":3,"mux.js/lib/mp4":24}],45:[function(require,module,exports){
- (function (global){
- /**
- * @file videojs-contrib-media-sources.js
- */
- 'use strict';
- Object.defineProperty(exports, '__esModule', {
- value: true
- });
- function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
- var _globalWindow = require('global/window');
- var _globalWindow2 = _interopRequireDefault(_globalWindow);
- var _flashMediaSource = require('./flash-media-source');
- var _flashMediaSource2 = _interopRequireDefault(_flashMediaSource);
- var _htmlMediaSource = require('./html-media-source');
- var _htmlMediaSource2 = _interopRequireDefault(_htmlMediaSource);
- var _videoJs = (typeof window !== "undefined" ? window['videojs'] : typeof global !== "undefined" ? global['videojs'] : null);
- var _videoJs2 = _interopRequireDefault(_videoJs);
- var urlCount = 0;
- // ------------
- // Media Source
- // ------------
- var defaults = {
- // how to determine the MediaSource implementation to use. There
- // are three available modes:
- // - auto: use native MediaSources where available and Flash
- // everywhere else
- // - html5: always use native MediaSources
- // - flash: always use the Flash MediaSource polyfill
- mode: 'auto'
- };
- // store references to the media sources so they can be connected
- // to a video element (a swf object)
- // TODO: can we store this somewhere local to this module?
- _videoJs2['default'].mediaSources = {};
- /**
- * Provide a method for a swf object to notify JS that a
- * media source is now open.
- *
- * @param {String} msObjectURL string referencing the MSE Object URL
- * @param {String} swfId the swf id
- */
- var open = function open(msObjectURL, swfId) {
- var mediaSource = _videoJs2['default'].mediaSources[msObjectURL];
- if (mediaSource) {
- mediaSource.trigger({ type: 'sourceopen', swfId: swfId });
- } else {
- throw new Error('Media Source not found (Video.js)');
- }
- };
- /**
- * Check to see if the native MediaSource object exists and supports
- * an MP4 container with both H.264 video and AAC-LC audio.
- *
- * @return {Boolean} if native media sources are supported
- */
- var supportsNativeMediaSources = function supportsNativeMediaSources() {
- return !!_globalWindow2['default'].MediaSource && !!_globalWindow2['default'].MediaSource.isTypeSupported && _globalWindow2['default'].MediaSource.isTypeSupported('video/mp4;codecs="avc1.4d400d,mp4a.40.2"');
- };
- /**
- * An emulation of the MediaSource API so that we can support
- * native and non-native functionality such as flash and
- * video/mp2t videos. returns an instance of HtmlMediaSource or
- * FlashMediaSource depending on what is supported and what options
- * are passed in.
- *
- * @link https://developer.mozilla.org/en-US/docs/Web/API/MediaSource/MediaSource
- * @param {Object} options options to use during setup.
- */
- var MediaSource = function MediaSource(options) {
- var settings = _videoJs2['default'].mergeOptions(defaults, options);
- this.MediaSource = {
- open: open,
- supportsNativeMediaSources: supportsNativeMediaSources
- };
- // determine whether HTML MediaSources should be used
- if (settings.mode === 'html5' || settings.mode === 'auto' && supportsNativeMediaSources()) {
- return new _htmlMediaSource2['default']();
- } else if (_videoJs2['default'].getTech('Flash')) {
- return new _flashMediaSource2['default']();
- }
- throw new Error('Cannot use Flash or Html5 to create a MediaSource for this video');
- };
- exports.MediaSource = MediaSource;
- MediaSource.open = open;
- MediaSource.supportsNativeMediaSources = supportsNativeMediaSources;
- /**
- * A wrapper around the native URL for our MSE object
- * implementation, this object is exposed under videojs.URL
- *
- * @link https://developer.mozilla.org/en-US/docs/Web/API/URL/URL
- */
- var URL = {
- /**
- * A wrapper around the native createObjectURL for our objects.
- * This function maps a native or emulated mediaSource to a blob
- * url so that it can be loaded into video.js
- *
- * @link https://developer.mozilla.org/en-US/docs/Web/API/URL/createObjectURL
- * @param {MediaSource} object the object to create a blob url to
- */
- createObjectURL: function createObjectURL(object) {
- var objectUrlPrefix = 'blob:vjs-media-source/';
- var url = undefined;
- // use the native MediaSource to generate an object URL
- if (object instanceof _htmlMediaSource2['default']) {
- url = _globalWindow2['default'].URL.createObjectURL(object.nativeMediaSource_);
- object.url_ = url;
- return url;
- }
- // if the object isn't an emulated MediaSource, delegate to the
- // native implementation
- if (!(object instanceof _flashMediaSource2['default'])) {
- url = _globalWindow2['default'].URL.createObjectURL(object);
- object.url_ = url;
- return url;
- }
- // build a URL that can be used to map back to the emulated
- // MediaSource
- url = objectUrlPrefix + urlCount;
- urlCount++;
- // setup the mapping back to object
- _videoJs2['default'].mediaSources[url] = object;
- return url;
- }
- };
- exports.URL = URL;
- _videoJs2['default'].MediaSource = MediaSource;
- _videoJs2['default'].URL = URL;
- }).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
- },{"./flash-media-source":39,"./html-media-source":42,"global/window":3}],46:[function(require,module,exports){
- (function (global){
- /**
- * @file virtual-source-buffer.js
- */
- 'use strict';
- Object.defineProperty(exports, '__esModule', {
- value: true
- });
- var _createClass = (function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ('value' in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; })();
- var _get = function get(_x, _x2, _x3) { var _again = true; _function: while (_again) { var object = _x, property = _x2, receiver = _x3; _again = false; if (object === null) object = Function.prototype; var desc = Object.getOwnPropertyDescriptor(object, property); if (desc === undefined) { var parent = Object.getPrototypeOf(object); if (parent === null) { return undefined; } else { _x = parent; _x2 = property; _x3 = receiver; _again = true; desc = parent = undefined; continue _function; } } else if ('value' in desc) { return desc.value; } else { var getter = desc.get; if (getter === undefined) { return undefined; } return getter.call(receiver); } } };
- function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
- function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError('Cannot call a class as a function'); } }
- function _inherits(subClass, superClass) { if (typeof superClass !== 'function' && superClass !== null) { throw new TypeError('Super expression must either be null or a function, not ' + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; }
- var _videoJs = (typeof window !== "undefined" ? window['videojs'] : typeof global !== "undefined" ? global['videojs'] : null);
- var _videoJs2 = _interopRequireDefault(_videoJs);
- var _createTextTracksIfNecessary = require('./create-text-tracks-if-necessary');
- var _createTextTracksIfNecessary2 = _interopRequireDefault(_createTextTracksIfNecessary);
- var _removeCuesFromTrack = require('./remove-cues-from-track');
- var _removeCuesFromTrack2 = _interopRequireDefault(_removeCuesFromTrack);
- var _addTextTrackData = require('./add-text-track-data');
- var _webwackify = require('webwackify');
- var _webwackify2 = _interopRequireDefault(_webwackify);
- var _transmuxerWorker = require('./transmuxer-worker');
- var _transmuxerWorker2 = _interopRequireDefault(_transmuxerWorker);
- var _codecUtils = require('./codec-utils');
- var resolveTransmuxWorker = function resolveTransmuxWorker() {
- var result = undefined;
- try {
- result = require.resolve('./transmuxer-worker');
- } catch (e) {
- // no result
- }
- return result;
- };
- // We create a wrapper around the SourceBuffer so that we can manage the
- // state of the `updating` property manually. We have to do this because
- // Firefox changes `updating` to false long before triggering `updateend`
- // events and that was causing strange problems in videojs-contrib-hls
- var makeWrappedSourceBuffer = function makeWrappedSourceBuffer(mediaSource, mimeType) {
- var sourceBuffer = mediaSource.addSourceBuffer(mimeType);
- var wrapper = Object.create(null);
- wrapper.updating = false;
- wrapper.realBuffer_ = sourceBuffer;
- var _loop = function (key) {
- if (typeof sourceBuffer[key] === 'function') {
- wrapper[key] = function () {
- return sourceBuffer[key].apply(sourceBuffer, arguments);
- };
- } else if (typeof wrapper[key] === 'undefined') {
- Object.defineProperty(wrapper, key, {
- get: function get() {
- return sourceBuffer[key];
- },
- set: function set(v) {
- return sourceBuffer[key] = v;
- }
- });
- }
- };
- for (var key in sourceBuffer) {
- _loop(key);
- }
- return wrapper;
- };
- /**
- * Returns a list of gops in the buffer that have a pts value of 3 seconds or more in
- * front of current time.
- *
- * @param {Array} buffer
- * The current buffer of gop information
- * @param {Player} player
- * The player instance
- * @param {Double} mapping
- * Offset to map display time to stream presentation time
- * @return {Array}
- * List of gops considered safe to append over
- */
- var gopsSafeToAlignWith = function gopsSafeToAlignWith(buffer, player, mapping) {
- if (!player || !buffer.length) {
- return [];
- }
- // pts value for current time + 3 seconds to give a bit more wiggle room
- var currentTimePts = Math.ceil((player.currentTime() - mapping + 3) * 90000);
- var i = undefined;
- for (i = 0; i < buffer.length; i++) {
- if (buffer[i].pts > currentTimePts) {
- break;
- }
- }
- return buffer.slice(i);
- };
- exports.gopsSafeToAlignWith = gopsSafeToAlignWith;
- /**
- * Appends gop information (timing and byteLength) received by the transmuxer for the
- * gops appended in the last call to appendBuffer
- *
- * @param {Array} buffer
- * The current buffer of gop information
- * @param {Array} gops
- * List of new gop information
- * @param {boolean} replace
- * If true, replace the buffer with the new gop information. If false, append the
- * new gop information to the buffer in the right location of time.
- * @return {Array}
- * Updated list of gop information
- */
- var updateGopBuffer = function updateGopBuffer(buffer, gops, replace) {
- if (!gops.length) {
- return buffer;
- }
- if (replace) {
- // If we are in safe append mode, then completely overwrite the gop buffer
- // with the most recent appeneded data. This will make sure that when appending
- // future segments, we only try to align with gops that are both ahead of current
- // time and in the last segment appended.
- return gops.slice();
- }
- var start = gops[0].pts;
- var i = 0;
- for (i; i < buffer.length; i++) {
- if (buffer[i].pts >= start) {
- break;
- }
- }
- return buffer.slice(0, i).concat(gops);
- };
- exports.updateGopBuffer = updateGopBuffer;
- /**
- * Removes gop information in buffer that overlaps with provided start and end
- *
- * @param {Array} buffer
- * The current buffer of gop information
- * @param {Double} start
- * position to start the remove at
- * @param {Double} end
- * position to end the remove at
- * @param {Double} mapping
- * Offset to map display time to stream presentation time
- */
- var removeGopBuffer = function removeGopBuffer(buffer, start, end, mapping) {
- var startPts = Math.ceil((start - mapping) * 90000);
- var endPts = Math.ceil((end - mapping) * 90000);
- var updatedBuffer = buffer.slice();
- var i = buffer.length;
- while (i--) {
- if (buffer[i].pts <= endPts) {
- break;
- }
- }
- if (i === -1) {
- // no removal because end of remove range is before start of buffer
- return updatedBuffer;
- }
- var j = i + 1;
- while (j--) {
- if (buffer[j].pts <= startPts) {
- break;
- }
- }
- // clamp remove range start to 0 index
- j = Math.max(j, 0);
- updatedBuffer.splice(j, i - j + 1);
- return updatedBuffer;
- };
- exports.removeGopBuffer = removeGopBuffer;
- /**
- * VirtualSourceBuffers exist so that we can transmux non native formats
- * into a native format, but keep the same api as a native source buffer.
- * It creates a transmuxer, that works in its own thread (a web worker) and
- * that transmuxer muxes the data into a native format. VirtualSourceBuffer will
- * then send all of that data to the naive sourcebuffer so that it is
- * indestinguishable from a natively supported format.
- *
- * @param {HtmlMediaSource} mediaSource the parent mediaSource
- * @param {Array} codecs array of codecs that we will be dealing with
- * @class VirtualSourceBuffer
- * @extends video.js.EventTarget
- */
- var VirtualSourceBuffer = (function (_videojs$EventTarget) {
- _inherits(VirtualSourceBuffer, _videojs$EventTarget);
- function VirtualSourceBuffer(mediaSource, codecs) {
- var _this = this;
- _classCallCheck(this, VirtualSourceBuffer);
- _get(Object.getPrototypeOf(VirtualSourceBuffer.prototype), 'constructor', this).call(this, _videoJs2['default'].EventTarget);
- this.timestampOffset_ = 0;
- this.pendingBuffers_ = [];
- this.bufferUpdating_ = false;
- this.mediaSource_ = mediaSource;
- this.codecs_ = codecs;
- this.audioCodec_ = null;
- this.videoCodec_ = null;
- this.audioDisabled_ = false;
- this.appendAudioInitSegment_ = true;
- this.gopBuffer_ = [];
- this.timeMapping_ = 0;
- this.safeAppend_ = _videoJs2['default'].browser.IE_VERSION >= 11;
- var options = {
- remux: false,
- alignGopsAtEnd: this.safeAppend_
- };
- this.codecs_.forEach(function (codec) {
- if ((0, _codecUtils.isAudioCodec)(codec)) {
- _this.audioCodec_ = codec;
- } else if ((0, _codecUtils.isVideoCodec)(codec)) {
- _this.videoCodec_ = codec;
- }
- });
- // append muxed segments to their respective native buffers as
- // soon as they are available
- this.transmuxer_ = (0, _webwackify2['default'])(_transmuxerWorker2['default'], resolveTransmuxWorker());
- this.transmuxer_.postMessage({ action: 'init', options: options });
- this.transmuxer_.onmessage = function (event) {
- if (event.data.action === 'data') {
- return _this.data_(event);
- }
- if (event.data.action === 'done') {
- return _this.done_(event);
- }
- if (event.data.action === 'gopInfo') {
- return _this.appendGopInfo_(event);
- }
- };
- // this timestampOffset is a property with the side-effect of resetting
- // baseMediaDecodeTime in the transmuxer on the setter
- Object.defineProperty(this, 'timestampOffset', {
- get: function get() {
- return this.timestampOffset_;
- },
- set: function set(val) {
- if (typeof val === 'number' && val >= 0) {
- this.timestampOffset_ = val;
- this.appendAudioInitSegment_ = true;
- // reset gop buffer on timestampoffset as this signals a change in timeline
- this.gopBuffer_.length = 0;
- this.timeMapping_ = 0;
- // We have to tell the transmuxer to set the baseMediaDecodeTime to
- // the desired timestampOffset for the next segment
- this.transmuxer_.postMessage({
- action: 'setTimestampOffset',
- timestampOffset: val
- });
- }
- }
- });
- // setting the append window affects both source buffers
- Object.defineProperty(this, 'appendWindowStart', {
- get: function get() {
- return (this.videoBuffer_ || this.audioBuffer_).appendWindowStart;
- },
- set: function set(start) {
- if (this.videoBuffer_) {
- this.videoBuffer_.appendWindowStart = start;
- }
- if (this.audioBuffer_) {
- this.audioBuffer_.appendWindowStart = start;
- }
- }
- });
- // this buffer is "updating" if either of its native buffers are
- Object.defineProperty(this, 'updating', {
- get: function get() {
- return !!(this.bufferUpdating_ || !this.audioDisabled_ && this.audioBuffer_ && this.audioBuffer_.updating || this.videoBuffer_ && this.videoBuffer_.updating);
- }
- });
- // the buffered property is the intersection of the buffered
- // ranges of the native source buffers
- Object.defineProperty(this, 'buffered', {
- get: function get() {
- var start = null;
- var end = null;
- var arity = 0;
- var extents = [];
- var ranges = [];
- // neither buffer has been created yet
- if (!this.videoBuffer_ && !this.audioBuffer_) {
- return _videoJs2['default'].createTimeRange();
- }
- // only one buffer is configured
- if (!this.videoBuffer_) {
- return this.audioBuffer_.buffered;
- }
- if (!this.audioBuffer_) {
- return this.videoBuffer_.buffered;
- }
- // both buffers are configured
- if (this.audioDisabled_) {
- return this.videoBuffer_.buffered;
- }
- // both buffers are empty
- if (this.videoBuffer_.buffered.length === 0 && this.audioBuffer_.buffered.length === 0) {
- return _videoJs2['default'].createTimeRange();
- }
- // Handle the case where we have both buffers and create an
- // intersection of the two
- var videoBuffered = this.videoBuffer_.buffered;
- var audioBuffered = this.audioBuffer_.buffered;
- var count = videoBuffered.length;
- // A) Gather up all start and end times
- while (count--) {
- extents.push({ time: videoBuffered.start(count), type: 'start' });
- extents.push({ time: videoBuffered.end(count), type: 'end' });
- }
- count = audioBuffered.length;
- while (count--) {
- extents.push({ time: audioBuffered.start(count), type: 'start' });
- extents.push({ time: audioBuffered.end(count), type: 'end' });
- }
- // B) Sort them by time
- extents.sort(function (a, b) {
- return a.time - b.time;
- });
- // C) Go along one by one incrementing arity for start and decrementing
- // arity for ends
- for (count = 0; count < extents.length; count++) {
- if (extents[count].type === 'start') {
- arity++;
- // D) If arity is ever incremented to 2 we are entering an
- // overlapping range
- if (arity === 2) {
- start = extents[count].time;
- }
- } else if (extents[count].type === 'end') {
- arity--;
- // E) If arity is ever decremented to 1 we leaving an
- // overlapping range
- if (arity === 1) {
- end = extents[count].time;
- }
- }
- // F) Record overlapping ranges
- if (start !== null && end !== null) {
- ranges.push([start, end]);
- start = null;
- end = null;
- }
- }
- return _videoJs2['default'].createTimeRanges(ranges);
- }
- });
- }
- /**
- * When we get a data event from the transmuxer
- * we call this function and handle the data that
- * was sent to us
- *
- * @private
- * @param {Event} event the data event from the transmuxer
- */
- _createClass(VirtualSourceBuffer, [{
- key: 'data_',
- value: function data_(event) {
- var segment = event.data.segment;
- // Cast ArrayBuffer to TypedArray
- segment.data = new Uint8Array(segment.data, event.data.byteOffset, event.data.byteLength);
- segment.initSegment = new Uint8Array(segment.initSegment.data, segment.initSegment.byteOffset, segment.initSegment.byteLength);
- (0, _createTextTracksIfNecessary2['default'])(this, this.mediaSource_, segment);
- // Add the segments to the pendingBuffers array
- this.pendingBuffers_.push(segment);
- return;
- }
- /**
- * When we get a done event from the transmuxer
- * we call this function and we process all
- * of the pending data that we have been saving in the
- * data_ function
- *
- * @private
- * @param {Event} event the done event from the transmuxer
- */
- }, {
- key: 'done_',
- value: function done_(event) {
- // Don't process and append data if the mediaSource is closed
- if (this.mediaSource_.readyState === 'closed') {
- this.pendingBuffers_.length = 0;
- return;
- }
- // All buffers should have been flushed from the muxer
- // start processing anything we have received
- this.processPendingSegments_();
- return;
- }
- /**
- * Create our internal native audio/video source buffers and add
- * event handlers to them with the following conditions:
- * 1. they do not already exist on the mediaSource
- * 2. this VSB has a codec for them
- *
- * @private
- */
- }, {
- key: 'createRealSourceBuffers_',
- value: function createRealSourceBuffers_() {
- var _this2 = this;
- var types = ['audio', 'video'];
- types.forEach(function (type) {
- // Don't create a SourceBuffer of this type if we don't have a
- // codec for it
- if (!_this2[type + 'Codec_']) {
- return;
- }
- // Do nothing if a SourceBuffer of this type already exists
- if (_this2[type + 'Buffer_']) {
- return;
- }
- var buffer = null;
- // If the mediasource already has a SourceBuffer for the codec
- // use that
- if (_this2.mediaSource_[type + 'Buffer_']) {
- buffer = _this2.mediaSource_[type + 'Buffer_'];
- // In multiple audio track cases, the audio source buffer is disabled
- // on the main VirtualSourceBuffer by the HTMLMediaSource much earlier
- // than createRealSourceBuffers_ is called to create the second
- // VirtualSourceBuffer because that happens as a side-effect of
- // videojs-contrib-hls starting the audioSegmentLoader. As a result,
- // the audioBuffer is essentially "ownerless" and no one will toggle
- // the `updating` state back to false once the `updateend` event is received
- //
- // Setting `updating` to false manually will work around this
- // situation and allow work to continue
- buffer.updating = false;
- } else {
- var codecProperty = type + 'Codec_';
- var mimeType = type + '/mp4;codecs="' + _this2[codecProperty] + '"';
- buffer = makeWrappedSourceBuffer(_this2.mediaSource_.nativeMediaSource_, mimeType);
- _this2.mediaSource_[type + 'Buffer_'] = buffer;
- }
- _this2[type + 'Buffer_'] = buffer;
- // Wire up the events to the SourceBuffer
- ['update', 'updatestart', 'updateend'].forEach(function (event) {
- buffer.addEventListener(event, function () {
- // if audio is disabled
- if (type === 'audio' && _this2.audioDisabled_) {
- return;
- }
- if (event === 'updateend') {
- _this2[type + 'Buffer_'].updating = false;
- }
- var shouldTrigger = types.every(function (t) {
- // skip checking audio's updating status if audio
- // is not enabled
- if (t === 'audio' && _this2.audioDisabled_) {
- return true;
- }
- // if the other type if updating we don't trigger
- if (type !== t && _this2[t + 'Buffer_'] && _this2[t + 'Buffer_'].updating) {
- return false;
- }
- return true;
- });
- if (shouldTrigger) {
- return _this2.trigger(event);
- }
- });
- });
- });
- }
- /**
- * Emulate the native mediasource function, but our function will
- * send all of the proposed segments to the transmuxer so that we
- * can transmux them before we append them to our internal
- * native source buffers in the correct format.
- *
- * @link https://developer.mozilla.org/en-US/docs/Web/API/SourceBuffer/appendBuffer
- * @param {Uint8Array} segment the segment to append to the buffer
- */
- }, {
- key: 'appendBuffer',
- value: function appendBuffer(segment) {
- // Start the internal "updating" state
- this.bufferUpdating_ = true;
- if (this.audioBuffer_ && this.audioBuffer_.buffered.length) {
- var audioBuffered = this.audioBuffer_.buffered;
- this.transmuxer_.postMessage({
- action: 'setAudioAppendStart',
- appendStart: audioBuffered.end(audioBuffered.length - 1)
- });
- }
- if (this.videoBuffer_) {
- this.transmuxer_.postMessage({
- action: 'alignGopsWith',
- gopsToAlignWith: gopsSafeToAlignWith(this.gopBuffer_, this.mediaSource_.player_, this.timeMapping_)
- });
- }
- this.transmuxer_.postMessage({
- action: 'push',
- // Send the typed-array of data as an ArrayBuffer so that
- // it can be sent as a "Transferable" and avoid the costly
- // memory copy
- data: segment.buffer,
- // To recreate the original typed-array, we need information
- // about what portion of the ArrayBuffer it was a view into
- byteOffset: segment.byteOffset,
- byteLength: segment.byteLength
- }, [segment.buffer]);
- this.transmuxer_.postMessage({ action: 'flush' });
- }
- /**
- * Appends gop information (timing and byteLength) received by the transmuxer for the
- * gops appended in the last call to appendBuffer
- *
- * @param {Event} event
- * The gopInfo event from the transmuxer
- * @param {Array} event.data.gopInfo
- * List of gop info to append
- */
- }, {
- key: 'appendGopInfo_',
- value: function appendGopInfo_(event) {
- this.gopBuffer_ = updateGopBuffer(this.gopBuffer_, event.data.gopInfo, this.safeAppend_);
- }
- /**
- * Emulate the native mediasource function and remove parts
- * of the buffer from any of our internal buffers that exist
- *
- * @link https://developer.mozilla.org/en-US/docs/Web/API/SourceBuffer/remove
- * @param {Double} start position to start the remove at
- * @param {Double} end position to end the remove at
- */
- }, {
- key: 'remove',
- value: function remove(start, end) {
- if (this.videoBuffer_) {
- this.videoBuffer_.updating = true;
- this.videoBuffer_.remove(start, end);
- this.gopBuffer_ = removeGopBuffer(this.gopBuffer_, start, end, this.timeMapping_);
- }
- if (!this.audioDisabled_ && this.audioBuffer_) {
- this.audioBuffer_.updating = true;
- this.audioBuffer_.remove(start, end);
- }
- // Remove Metadata Cues (id3)
- (0, _removeCuesFromTrack2['default'])(start, end, this.metadataTrack_);
- // Remove Any Captions
- if (this.inbandTextTracks_) {
- for (var track in this.inbandTextTracks_) {
- (0, _removeCuesFromTrack2['default'])(start, end, this.inbandTextTracks_[track]);
- }
- }
- }
- /**
- * Process any segments that the muxer has output
- * Concatenate segments together based on type and append them into
- * their respective sourceBuffers
- *
- * @private
- */
- }, {
- key: 'processPendingSegments_',
- value: function processPendingSegments_() {
- var sortedSegments = {
- video: {
- segments: [],
- bytes: 0
- },
- audio: {
- segments: [],
- bytes: 0
- },
- captions: [],
- metadata: []
- };
- // Sort segments into separate video/audio arrays and
- // keep track of their total byte lengths
- sortedSegments = this.pendingBuffers_.reduce(function (segmentObj, segment) {
- var type = segment.type;
- var data = segment.data;
- var initSegment = segment.initSegment;
- segmentObj[type].segments.push(data);
- segmentObj[type].bytes += data.byteLength;
- segmentObj[type].initSegment = initSegment;
- // Gather any captions into a single array
- if (segment.captions) {
- segmentObj.captions = segmentObj.captions.concat(segment.captions);
- }
- if (segment.info) {
- segmentObj[type].info = segment.info;
- }
- // Gather any metadata into a single array
- if (segment.metadata) {
- segmentObj.metadata = segmentObj.metadata.concat(segment.metadata);
- }
- return segmentObj;
- }, sortedSegments);
- // Create the real source buffers if they don't exist by now since we
- // finally are sure what tracks are contained in the source
- if (!this.videoBuffer_ && !this.audioBuffer_) {
- // Remove any codecs that may have been specified by default but
- // are no longer applicable now
- if (sortedSegments.video.bytes === 0) {
- this.videoCodec_ = null;
- }
- if (sortedSegments.audio.bytes === 0) {
- this.audioCodec_ = null;
- }
- this.createRealSourceBuffers_();
- }
- if (sortedSegments.audio.info) {
- this.mediaSource_.trigger({ type: 'audioinfo', info: sortedSegments.audio.info });
- }
- if (sortedSegments.video.info) {
- this.mediaSource_.trigger({ type: 'videoinfo', info: sortedSegments.video.info });
- }
- if (this.appendAudioInitSegment_) {
- if (!this.audioDisabled_ && this.audioBuffer_) {
- sortedSegments.audio.segments.unshift(sortedSegments.audio.initSegment);
- sortedSegments.audio.bytes += sortedSegments.audio.initSegment.byteLength;
- }
- this.appendAudioInitSegment_ = false;
- }
- var triggerUpdateend = false;
- // Merge multiple video and audio segments into one and append
- if (this.videoBuffer_ && sortedSegments.video.bytes) {
- sortedSegments.video.segments.unshift(sortedSegments.video.initSegment);
- sortedSegments.video.bytes += sortedSegments.video.initSegment.byteLength;
- this.concatAndAppendSegments_(sortedSegments.video, this.videoBuffer_);
- // TODO: are video tracks the only ones with text tracks?
- (0, _addTextTrackData.addTextTrackData)(this, sortedSegments.captions, sortedSegments.metadata);
- } else if (this.videoBuffer_ && (this.audioDisabled_ || !this.audioBuffer_)) {
- // The transmuxer did not return any bytes of video, meaning it was all trimmed
- // for gop alignment. Since we have a video buffer and audio is disabled, updateend
- // will never be triggered by this source buffer, which will cause contrib-hls
- // to be stuck forever waiting for updateend. If audio is not disabled, updateend
- // will be triggered by the audio buffer, which will be sent upwards since the video
- // buffer will not be in an updating state.
- triggerUpdateend = true;
- }
- if (!this.audioDisabled_ && this.audioBuffer_) {
- this.concatAndAppendSegments_(sortedSegments.audio, this.audioBuffer_);
- }
- this.pendingBuffers_.length = 0;
- if (triggerUpdateend) {
- this.trigger('updateend');
- }
- // We are no longer in the internal "updating" state
- this.bufferUpdating_ = false;
- }
- /**
- * Combine all segments into a single Uint8Array and then append them
- * to the destination buffer
- *
- * @param {Object} segmentObj
- * @param {SourceBuffer} destinationBuffer native source buffer to append data to
- * @private
- */
- }, {
- key: 'concatAndAppendSegments_',
- value: function concatAndAppendSegments_(segmentObj, destinationBuffer) {
- var offset = 0;
- var tempBuffer = undefined;
- if (segmentObj.bytes) {
- tempBuffer = new Uint8Array(segmentObj.bytes);
- // Combine the individual segments into one large typed-array
- segmentObj.segments.forEach(function (segment) {
- tempBuffer.set(segment, offset);
- offset += segment.byteLength;
- });
- try {
- destinationBuffer.updating = true;
- destinationBuffer.appendBuffer(tempBuffer);
- } catch (error) {
- if (this.mediaSource_.player_) {
- this.mediaSource_.player_.error({
- code: -3,
- type: 'APPEND_BUFFER_ERR',
- message: error.message,
- originalError: error
- });
- }
- }
- }
- }
- /**
- * Emulate the native mediasource function. abort any soureBuffer
- * actions and throw out any un-appended data.
- *
- * @link https://developer.mozilla.org/en-US/docs/Web/API/SourceBuffer/abort
- */
- }, {
- key: 'abort',
- value: function abort() {
- if (this.videoBuffer_) {
- this.videoBuffer_.abort();
- }
- if (!this.audioDisabled_ && this.audioBuffer_) {
- this.audioBuffer_.abort();
- }
- if (this.transmuxer_) {
- this.transmuxer_.postMessage({ action: 'reset' });
- }
- this.pendingBuffers_.length = 0;
- this.bufferUpdating_ = false;
- }
- }]);
- return VirtualSourceBuffer;
- })(_videoJs2['default'].EventTarget);
- exports['default'] = VirtualSourceBuffer;
- }).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
- },{"./add-text-track-data":35,"./codec-utils":36,"./create-text-tracks-if-necessary":37,"./remove-cues-from-track":43,"./transmuxer-worker":44,"webwackify":34}],47:[function(require,module,exports){
- (function (global){
- 'use strict';
- var _createClass = (function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ('value' in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; })();
- function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
- function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError('Cannot call a class as a function'); } }
- var _qunit = (typeof window !== "undefined" ? window['QUnit'] : typeof global !== "undefined" ? global['QUnit'] : null);
- var _qunit2 = _interopRequireDefault(_qunit);
- var _srcAddTextTrackData = require('../src/add-text-track-data');
- var equal = _qunit2['default'].equal;
- var _module = _qunit2['default'].module;
- var test = _qunit2['default'].test;
- var MockTextTrack = (function () {
- function MockTextTrack() {
- _classCallCheck(this, MockTextTrack);
- this.cues = [];
- }
- _createClass(MockTextTrack, [{
- key: 'addCue',
- value: function addCue(cue) {
- this.cues.push(cue);
- }
- }]);
- return MockTextTrack;
- })();
- _module('Text Track Data', {
- beforeEach: function beforeEach() {
- this.sourceHandler = {
- inbandTextTracks_: {
- CC1: new MockTextTrack(),
- CC2: new MockTextTrack(),
- CC3: new MockTextTrack(),
- CC4: new MockTextTrack()
- },
- metadataTrack_: new MockTextTrack(),
- mediaSource_: {
- duration: NaN
- },
- timestampOffset: 0
- };
- }
- });
- test('does nothing if no cues are specified', function () {
- (0, _srcAddTextTrackData.addTextTrackData)(this.sourceHandler, [], []);
- equal(this.sourceHandler.inbandTextTracks_.CC1.cues.length, 0, 'added no 608 cues');
- equal(this.sourceHandler.metadataTrack_.cues.length, 0, 'added no metadata cues');
- });
- test('creates cues for 608 captions with "stream" property in ccX', function () {
- (0, _srcAddTextTrackData.addTextTrackData)(this.sourceHandler, [{
- startTime: 0,
- endTime: 1,
- text: 'CC1 text',
- stream: 'CC1'
- }, {
- startTime: 0,
- endTime: 1,
- text: 'CC2 text',
- stream: 'CC2'
- }, {
- startTime: 0,
- endTime: 1,
- text: 'CC3 text',
- stream: 'CC3'
- }, {
- startTime: 0,
- endTime: 1,
- text: 'CC4 text',
- stream: 'CC4'
- }], []);
- equal(this.sourceHandler.inbandTextTracks_.CC1.cues.length, 1, 'added one 608 cue to CC1');
- equal(this.sourceHandler.inbandTextTracks_.CC2.cues.length, 1, 'added one 608 cue to CC2');
- equal(this.sourceHandler.inbandTextTracks_.CC3.cues.length, 1, 'added one 608 cue to CC3');
- equal(this.sourceHandler.inbandTextTracks_.CC4.cues.length, 1, 'added one 608 cue to CC4');
- equal(this.sourceHandler.metadataTrack_.cues.length, 0, 'added no metadata cues');
- });
- test('creates cues for timed metadata', function () {
- (0, _srcAddTextTrackData.addTextTrackData)(this.sourceHandler, [], [{
- cueTime: 1,
- frames: [{}]
- }]);
- equal(this.sourceHandler.inbandTextTracks_.CC1.cues.length, 0, 'added no 608 cues');
- equal(this.sourceHandler.metadataTrack_.cues.length, 1, 'added one metadata cues');
- });
- }).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
- },{"../src/add-text-track-data":35}],48:[function(require,module,exports){
- (function (global){
- 'use strict';
- function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
- var _qunit = (typeof window !== "undefined" ? window['QUnit'] : typeof global !== "undefined" ? global['QUnit'] : null);
- var _qunit2 = _interopRequireDefault(_qunit);
- var _srcCodecUtils = require('../src/codec-utils');
- var deepEqual = _qunit2['default'].deepEqual;
- var _module = _qunit2['default'].module;
- var test = _qunit2['default'].test;
- _module('Codec Utils');
- test('translates legacy codecs', function () {
- deepEqual((0, _srcCodecUtils.translateLegacyCodecs)(['avc1.66.30', 'avc1.66.30']), ['avc1.42001e', 'avc1.42001e'], 'translates legacy avc1.66.30 codec');
- deepEqual((0, _srcCodecUtils.translateLegacyCodecs)(['avc1.42C01E', 'avc1.42C01E']), ['avc1.42C01E', 'avc1.42C01E'], 'does not translate modern codecs');
- deepEqual((0, _srcCodecUtils.translateLegacyCodecs)(['avc1.42C01E', 'avc1.66.30']), ['avc1.42C01E', 'avc1.42001e'], 'only translates legacy codecs when mixed');
- deepEqual((0, _srcCodecUtils.translateLegacyCodecs)(['avc1.4d0020', 'avc1.100.41', 'avc1.77.41', 'avc1.77.32', 'avc1.77.31', 'avc1.77.30', 'avc1.66.30', 'avc1.66.21', 'avc1.42C01e']), ['avc1.4d0020', 'avc1.640029', 'avc1.4d0029', 'avc1.4d0020', 'avc1.4d001f', 'avc1.4d001e', 'avc1.42001e', 'avc1.420015', 'avc1.42C01e'], 'translates a whole bunch');
- });
- }).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
- },{"../src/codec-utils":36}],49:[function(require,module,exports){
- (function (global){
- 'use strict';
- function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
- var _globalDocument = require('global/document');
- var _globalDocument2 = _interopRequireDefault(_globalDocument);
- var _globalWindow = require('global/window');
- var _globalWindow2 = _interopRequireDefault(_globalWindow);
- var _qunit = (typeof window !== "undefined" ? window['QUnit'] : typeof global !== "undefined" ? global['QUnit'] : null);
- var _qunit2 = _interopRequireDefault(_qunit);
- var _sinon = (typeof window !== "undefined" ? window['sinon'] : typeof global !== "undefined" ? global['sinon'] : null);
- var _sinon2 = _interopRequireDefault(_sinon);
- var _videoJs = (typeof window !== "undefined" ? window['videojs'] : typeof global !== "undefined" ? global['videojs'] : null);
- var _videoJs2 = _interopRequireDefault(_videoJs);
- var _srcFlashMediaSource = require('../src/flash-media-source');
- var _srcFlashMediaSource2 = _interopRequireDefault(_srcFlashMediaSource);
- var _srcHtmlMediaSource = require('../src/html-media-source');
- var _srcHtmlMediaSource2 = _interopRequireDefault(_srcHtmlMediaSource);
- // we disable this because browserify needs to include these files
- // but the exports are not important
- /* eslint-disable no-unused-vars */
- var _srcVideojsContribMediaSourcesJs = require('../src/videojs-contrib-media-sources.js');
- /* eslint-disable no-unused-vars */
- _qunit2['default'].module('createObjectURL', {
- beforeEach: function beforeEach() {
- this.fixture = _globalDocument2['default'].getElementById('qunit-fixture');
- this.video = _globalDocument2['default'].createElement('video');
- this.fixture.appendChild(this.video);
- this.player = (0, _videoJs2['default'])(this.video);
- // Mock the environment's timers because certain things - particularly
- // player readiness - are asynchronous in video.js 5.
- this.clock = _sinon2['default'].useFakeTimers();
- this.oldMediaSource = _globalWindow2['default'].MediaSource || _globalWindow2['default'].WebKitMediaSource;
- // force MediaSource support
- if (!_globalWindow2['default'].MediaSource) {
- _globalWindow2['default'].MediaSource = function () {
- var result = new _globalWindow2['default'].Blob();
- result.addEventListener = function () {};
- result.addSourceBuffer = function () {};
- return result;
- };
- }
- },
- afterEach: function afterEach() {
- // The clock _must_ be restored before disposing the player; otherwise,
- // certain timeout listeners that happen inside video.js may throw errors.
- this.clock.restore();
- this.player.dispose();
- _globalWindow2['default'].MediaSource = _globalWindow2['default'].WebKitMediaSource = this.oldMediaSource;
- }
- });
- _qunit2['default'].test('delegates to the native implementation', function () {
- _qunit2['default'].ok(!/blob:vjs-media-source\//.test(_videoJs2['default'].URL.createObjectURL(new _globalWindow2['default'].Blob())), 'created a native blob URL');
- });
- _qunit2['default'].test('uses the native MediaSource when available', function () {
- _qunit2['default'].ok(!/blob:vjs-media-source\//.test(_videoJs2['default'].URL.createObjectURL(new _srcHtmlMediaSource2['default']())), 'created a native blob URL');
- });
- _qunit2['default'].test('emulates a URL for the shim', function () {
- _qunit2['default'].ok(/blob:vjs-media-source\//.test(_videoJs2['default'].URL.createObjectURL(new _srcFlashMediaSource2['default']())), 'created an emulated blob URL');
- });
- _qunit2['default'].test('stores the associated blob URL on the media source', function () {
- var blob = new _globalWindow2['default'].Blob();
- var url = _videoJs2['default'].URL.createObjectURL(blob);
- _qunit2['default'].equal(blob.url_, url, 'captured the generated URL');
- });
- }).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
- },{"../src/flash-media-source":39,"../src/html-media-source":42,"../src/videojs-contrib-media-sources.js":45,"global/document":2,"global/window":3}],50:[function(require,module,exports){
- (function (global){
- 'use strict';
- function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
- var _globalDocument = require('global/document');
- var _globalDocument2 = _interopRequireDefault(_globalDocument);
- var _globalWindow = require('global/window');
- var _globalWindow2 = _interopRequireDefault(_globalWindow);
- var _qunit = (typeof window !== "undefined" ? window['QUnit'] : typeof global !== "undefined" ? global['QUnit'] : null);
- var _qunit2 = _interopRequireDefault(_qunit);
- var _sinon = (typeof window !== "undefined" ? window['sinon'] : typeof global !== "undefined" ? global['sinon'] : null);
- var _sinon2 = _interopRequireDefault(_sinon);
- var _videoJs = (typeof window !== "undefined" ? window['videojs'] : typeof global !== "undefined" ? global['videojs'] : null);
- var _videoJs2 = _interopRequireDefault(_videoJs);
- var _muxJs = require('mux.js');
- var _muxJs2 = _interopRequireDefault(_muxJs);
- var _srcFlashSourceBuffer = require('../src/flash-source-buffer');
- var _srcFlashSourceBuffer2 = _interopRequireDefault(_srcFlashSourceBuffer);
- var _srcFlashConstants = require('../src/flash-constants');
- var _srcFlashConstants2 = _interopRequireDefault(_srcFlashConstants);
- // we disable this because browserify needs to include these files
- // but the exports are not important
- /* eslint-disable no-unused-vars */
- var _srcVideojsContribMediaSourcesJs = require('../src/videojs-contrib-media-sources.js');
- /* eslint-disable no-unused-vars */
- // return the sequence of calls to append to the SWF
- var appendCalls = function appendCalls(calls) {
- return calls.filter(function (call) {
- return call.callee && call.callee === 'vjs_appendChunkReady';
- });
- };
- var getFlvHeader = function getFlvHeader() {
- return new Uint8Array([1, 2, 3]);
- };
- var makeFlvTag = function makeFlvTag(pts, data) {
- return {
- pts: pts,
- dts: pts,
- bytes: data
- };
- };
- var timers = undefined;
- var oldSTO = undefined;
- var fakeSTO = function fakeSTO() {
- oldSTO = _globalWindow2['default'].setTimeout;
- timers = [];
- timers.run = function (num) {
- var timer = undefined;
- while (num--) {
- timer = this.pop();
- if (timer) {
- timer();
- }
- }
- };
- timers.runAll = function () {
- while (this.length) {
- this.pop()();
- }
- };
- _globalWindow2['default'].setTimeout = function (callback) {
- timers.push(callback);
- };
- _globalWindow2['default'].setTimeout.fake = true;
- };
- var unfakeSTO = function unfakeSTO() {
- timers = [];
- _globalWindow2['default'].setTimeout = oldSTO;
- };
- // Create a WebWorker-style message that signals the transmuxer is done
- var createDataMessage = function createDataMessage(data, audioData, metadata, captions) {
- var captionStreams = {};
- if (captions) {
- captions.forEach(function (caption) {
- captionStreams[caption.stream] = true;
- });
- }
- return {
- data: {
- action: 'data',
- segment: {
- tags: {
- videoTags: data.map(function (tag) {
- return makeFlvTag(tag.pts, tag.bytes);
- }),
- audioTags: audioData ? audioData.map(function (tag) {
- return makeFlvTag(tag.pts, tag.bytes);
- }) : []
- },
- metadata: metadata,
- captions: captions,
- captionStreams: captionStreams
- }
- }
- };
- };
- var doneMessage = {
- data: {
- action: 'done'
- }
- };
- var postMessage_ = function postMessage_(msg) {
- var _this = this;
- if (msg.action === 'push') {
- _globalWindow2['default'].setTimeout(function () {
- _this.onmessage(createDataMessage([{
- bytes: new Uint8Array(msg.data, msg.byteOffset, msg.byteLength),
- pts: 0
- }]));
- }, 1);
- } else if (msg.action === 'flush') {
- _globalWindow2['default'].setTimeout(function () {
- _this.onmessage(doneMessage);
- }, 1);
- }
- };
- _qunit2['default'].module('Flash MediaSource', {
- beforeEach: function beforeEach(assert) {
- var _this2 = this;
- var swfObj = undefined;
- // Mock the environment's timers because certain things - particularly
- // player readiness - are asynchronous in video.js 5.
- this.clock = _sinon2['default'].useFakeTimers();
- this.fixture = _globalDocument2['default'].getElementById('qunit-fixture');
- this.video = _globalDocument2['default'].createElement('video');
- this.fixture.appendChild(this.video);
- this.player = (0, _videoJs2['default'])(this.video);
- this.oldMediaSource = _globalWindow2['default'].MediaSource || _globalWindow2['default'].WebKitMediaSource;
- _globalWindow2['default'].MediaSource = null;
- _globalWindow2['default'].WebKitMediaSource = null;
- this.Flash = _videoJs2['default'].getTech('Flash');
- this.oldFlashSupport = this.Flash.isSupported;
- this.oldCanPlay = this.Flash.canPlaySource;
- this.Flash.canPlaySource = this.Flash.isSupported = function () {
- return true;
- };
- this.oldFlashTransmuxerPostMessage = _muxJs2['default'].flv.Transmuxer.postMessage;
- this.oldGetFlvHeader = _muxJs2['default'].flv.getFlvHeader;
- _muxJs2['default'].flv.getFlvHeader = getFlvHeader;
- this.swfCalls = [];
- this.mediaSource = new _videoJs2['default'].MediaSource();
- this.player.src({
- src: _videoJs2['default'].URL.createObjectURL(this.mediaSource),
- type: 'video/mp2t'
- });
- // vjs6 takes 1 tick to set source async
- this.clock.tick(1);
- swfObj = _globalDocument2['default'].createElement('fake-object');
- swfObj.id = 'fake-swf-' + assert.test.testId;
- this.player.el().replaceChild(swfObj, this.player.tech_.el());
- this.player.tech_.hls = new _videoJs2['default'].EventTarget();
- this.player.tech_.el_ = swfObj;
- swfObj.tech = this.player.tech_;
- /* eslint-disable camelcase */
- swfObj.vjs_abort = function () {
- _this2.swfCalls.push('abort');
- };
- swfObj.vjs_getProperty = function (attr) {
- if (attr === 'buffered') {
- return [];
- } else if (attr === 'currentTime') {
- return 0;
- // ignored for vjs6
- } else if (attr === 'videoWidth') {
- return 0;
- }
- _this2.swfCalls.push({ attr: attr });
- };
- swfObj.vjs_load = function () {
- _this2.swfCalls.push('load');
- };
- swfObj.vjs_setProperty = function (attr, value) {
- _this2.swfCalls.push({ attr: attr, value: value });
- };
- swfObj.vjs_discontinuity = function (attr, value) {
- _this2.swfCalls.push({ attr: attr, value: value });
- };
- swfObj.vjs_appendChunkReady = function (method) {
- _globalWindow2['default'].setTimeout(function () {
- var chunk = _globalWindow2['default'][method]();
- // only care about the segment data, not the flv header
- if (method.substr(0, 21) === 'vjs_flashEncodedData_') {
- var call = {
- callee: 'vjs_appendChunkReady',
- arguments: [_globalWindow2['default'].atob(chunk).split('').map(function (c) {
- return c.charCodeAt(0);
- })]
- };
- _this2.swfCalls.push(call);
- }
- }, 1);
- };
- swfObj.vjs_adjustCurrentTime = function (value) {
- _this2.swfCalls.push({ call: 'adjustCurrentTime', value: value });
- };
- /* eslint-enable camelcase */
- this.mediaSource.trigger({
- type: 'sourceopen',
- swfId: swfObj.id
- });
- fakeSTO();
- },
- afterEach: function afterEach() {
- _globalWindow2['default'].MediaSource = this.oldMediaSource;
- _globalWindow2['default'].WebKitMediaSource = _globalWindow2['default'].MediaSource;
- this.Flash.isSupported = this.oldFlashSupport;
- this.Flash.canPlaySource = this.oldCanPlay;
- _muxJs2['default'].flv.Transmuxer.postMessage = this.oldFlashTransmuxerPostMessage;
- _muxJs2['default'].flv.getFlvHeader = this.oldGetFlvHeader;
- this.player.dispose();
- this.clock.restore();
- this.swfCalls = [];
- unfakeSTO();
- }
- });
- _qunit2['default'].test('raises an exception for unrecognized MIME types', function () {
- try {
- this.mediaSource.addSourceBuffer('video/garbage');
- } catch (e) {
- _qunit2['default'].ok(e, 'an error was thrown');
- return;
- }
- _qunit2['default'].ok(false, 'no error was thrown');
- });
- _qunit2['default'].test('creates FlashSourceBuffers for video/mp2t', function () {
- _qunit2['default'].ok(this.mediaSource.addSourceBuffer('video/mp2t') instanceof _srcFlashSourceBuffer2['default'], 'create source buffer');
- });
- _qunit2['default'].test('creates FlashSourceBuffers for audio/mp2t', function () {
- _qunit2['default'].ok(this.mediaSource.addSourceBuffer('audio/mp2t') instanceof _srcFlashSourceBuffer2['default'], 'create source buffer');
- });
- _qunit2['default'].test('waits for the next tick to append', function () {
- var sourceBuffer = this.mediaSource.addSourceBuffer('video/mp2t');
- sourceBuffer.transmuxer_.postMessage = postMessage_;
- _qunit2['default'].equal(this.swfCalls.length, 1, 'made one call on init');
- _qunit2['default'].equal(this.swfCalls[0], 'load', 'called load');
- sourceBuffer.appendBuffer(new Uint8Array([0, 1]));
- this.swfCalls = appendCalls(this.swfCalls);
- _qunit2['default'].strictEqual(this.swfCalls.length, 0, 'no appends were made');
- });
- _qunit2['default'].test('passes bytes to Flash', function () {
- var sourceBuffer = this.mediaSource.addSourceBuffer('video/mp2t');
- sourceBuffer.transmuxer_.postMessage = postMessage_;
- this.swfCalls.length = 0;
- sourceBuffer.appendBuffer(new Uint8Array([0, 1]));
- timers.runAll();
- timers.runAll();
- _qunit2['default'].ok(this.swfCalls.length, 'the SWF was called');
- this.swfCalls = appendCalls(this.swfCalls);
- _qunit2['default'].strictEqual(this.swfCalls[0].callee, 'vjs_appendChunkReady', 'called vjs_appendChunkReady');
- _qunit2['default'].deepEqual(this.swfCalls[0].arguments[0], [0, 1], 'passed the base64 encoded data');
- });
- _qunit2['default'].test('passes chunked bytes to Flash', function () {
- var sourceBuffer = this.mediaSource.addSourceBuffer('video/mp2t');
- var oldChunkSize = _srcFlashConstants2['default'].BYTES_PER_CHUNK;
- sourceBuffer.transmuxer_.postMessage = postMessage_;
- _srcFlashConstants2['default'].BYTES_PER_CHUNK = 2;
- this.swfCalls.length = 0;
- sourceBuffer.appendBuffer(new Uint8Array([0, 1, 2, 3, 4]));
- timers.runAll();
- _qunit2['default'].ok(this.swfCalls.length, 'the SWF was called');
- this.swfCalls = appendCalls(this.swfCalls);
- _qunit2['default'].equal(this.swfCalls.length, 3, 'the SWF received 3 chunks');
- _qunit2['default'].strictEqual(this.swfCalls[0].callee, 'vjs_appendChunkReady', 'called vjs_appendChunkReady');
- _qunit2['default'].deepEqual(this.swfCalls[0].arguments[0], [0, 1], 'passed the base64 encoded data');
- _qunit2['default'].deepEqual(this.swfCalls[1].arguments[0], [2, 3], 'passed the base64 encoded data');
- _qunit2['default'].deepEqual(this.swfCalls[2].arguments[0], [4], 'passed the base64 encoded data');
- _srcFlashConstants2['default'].BYTES_PER_CHUNK = oldChunkSize;
- });
- _qunit2['default'].test('clears the SWF on seeking', function () {
- var aborts = 0;
- this.mediaSource.addSourceBuffer('video/mp2t');
- // track calls to abort()
- /* eslint-disable camelcase */
- this.mediaSource.swfObj.vjs_abort = function () {
- aborts++;
- };
- /* eslint-enable camelcase */
- this.mediaSource.tech_.trigger('seeking');
- _qunit2['default'].strictEqual(1, aborts, 'aborted pending buffer');
- });
- _qunit2['default'].test('drops tags before currentTime when seeking', function () {
- var sourceBuffer = this.mediaSource.addSourceBuffer('video/mp2t');
- var i = 10;
- var currentTime = undefined;
- var tags_ = [];
- sourceBuffer.transmuxer_.postMessage = postMessage_;
- this.mediaSource.tech_.currentTime = function () {
- return currentTime;
- };
- // push a tag into the buffer to establish the starting PTS value
- currentTime = 0;
- sourceBuffer.transmuxer_.onmessage(createDataMessage([{
- pts: 19 * 1000,
- bytes: new Uint8Array(1)
- }]));
- timers.runAll();
- sourceBuffer.appendBuffer(new Uint8Array(10));
- timers.runAll();
- // mock out a new segment of FLV tags, starting 10s after the
- // starting PTS value
- while (i--) {
- tags_.unshift({
- pts: i * 1000 + 29 * 1000,
- bytes: new Uint8Array([i])
- });
- }
- var dataMessage = createDataMessage(tags_);
- // mock gop start at seek point
- dataMessage.data.segment.tags.videoTags[7].keyFrame = true;
- sourceBuffer.transmuxer_.onmessage(dataMessage);
- // seek to 7 seconds into the new swegment
- this.mediaSource.tech_.seeking = function () {
- return true;
- };
- currentTime = 10 + 7;
- this.mediaSource.tech_.trigger('seeking');
- sourceBuffer.appendBuffer(new Uint8Array(10));
- this.swfCalls.length = 0;
- timers.runAll();
- _qunit2['default'].deepEqual(this.swfCalls[0].arguments[0], [7, 8, 9], 'three tags are appended');
- });
- _qunit2['default'].test('drops audio and video (complete gops) tags before the buffered end always', function () {
- var sourceBuffer = this.mediaSource.addSourceBuffer('video/mp2t');
- var endTime = undefined;
- var videoTags_ = [];
- var audioTags_ = [];
- sourceBuffer.transmuxer_.postMessage = postMessage_;
- this.mediaSource.tech_.buffered = function () {
- return _videoJs2['default'].createTimeRange([[0, endTime]]);
- };
- // push a tag into the buffer to establish the starting PTS value
- endTime = 0;
- // mock buffering 17 seconds of data so flash source buffer internal end of buffer
- // tracking is accurate
- var i = 17;
- while (i--) {
- videoTags_.unshift({
- pts: i * 1000 + 19 * 1000,
- bytes: new Uint8Array(1)
- });
- }
- i = 17;
- while (i--) {
- audioTags_.unshift({
- pts: i * 1000 + 19 * 1000,
- bytes: new Uint8Array(1)
- });
- }
- var dataMessage = createDataMessage(videoTags_, audioTags_);
- sourceBuffer.transmuxer_.onmessage(dataMessage);
- timers.runAll();
- sourceBuffer.appendBuffer(new Uint8Array(10));
- timers.runAll();
- i = 10;
- videoTags_ = [];
- audioTags_ = [];
- // mock out a new segment of FLV tags, starting 10s after the
- // starting PTS value
- while (i--) {
- videoTags_.unshift({
- pts: i * 1000 + 29 * 1000,
- bytes: new Uint8Array([i])
- });
- }
- i = 10;
- while (i--) {
- audioTags_.unshift({
- pts: i * 1000 + 29 * 1000,
- bytes: new Uint8Array([i + 100])
- });
- }
- dataMessage = createDataMessage(videoTags_, audioTags_);
- dataMessage.data.segment.tags.videoTags[0].keyFrame = true;
- dataMessage.data.segment.tags.videoTags[3].keyFrame = true;
- dataMessage.data.segment.tags.videoTags[6].keyFrame = true;
- dataMessage.data.segment.tags.videoTags[8].keyFrame = true;
- sourceBuffer.transmuxer_.onmessage(dataMessage);
- endTime = 10 + 7;
- sourceBuffer.appendBuffer(new Uint8Array(10));
- this.swfCalls.length = 0;
- timers.runAll();
- // end of buffer is 17 seconds
- // frames 0-6 for video have pts values less than 17 seconds
- // since frame 6 is a key frame, it should still be appended to preserve the entire gop
- // so we should have appeneded frames 6 - 9
- // frames 100-106 for audio have pts values less than 17 seconds
- // but since we appended an extra video frame, we should also append audio frames
- // to fill in the gap in audio. This means we should be appending audio frames
- // 106, 107, 108, 109
- // Append order is 6, 7, 107, 8, 108, 9, 109 since we order tags based on dts value
- _qunit2['default'].deepEqual(this.swfCalls[0].arguments[0], [6, 106, 7, 107, 8, 108, 9, 109], 'audio and video tags properly dropped');
- });
- _qunit2['default'].test('seeking into the middle of a GOP adjusts currentTime to the start of the GOP', function () {
- var sourceBuffer = this.mediaSource.addSourceBuffer('video/mp2t');
- var i = 10;
- var currentTime = undefined;
- var tags_ = [];
- sourceBuffer.transmuxer_.postMessage = postMessage_;
- this.mediaSource.tech_.currentTime = function () {
- return currentTime;
- };
- // push a tag into the buffer to establish the starting PTS value
- currentTime = 0;
- var dataMessage = createDataMessage([{
- pts: 19 * 1000,
- bytes: new Uint8Array(1)
- }]);
- sourceBuffer.transmuxer_.onmessage(dataMessage);
- timers.runAll();
- sourceBuffer.appendBuffer(new Uint8Array(10));
- timers.runAll();
- // mock out a new segment of FLV tags, starting 10s after the
- // starting PTS value
- while (i--) {
- tags_.unshift({
- pts: i * 1000 + 29 * 1000,
- bytes: new Uint8Array([i])
- });
- }
- dataMessage = createDataMessage(tags_);
- // mock the GOP structure
- dataMessage.data.segment.tags.videoTags[0].keyFrame = true;
- dataMessage.data.segment.tags.videoTags[3].keyFrame = true;
- dataMessage.data.segment.tags.videoTags[5].keyFrame = true;
- dataMessage.data.segment.tags.videoTags[8].keyFrame = true;
- sourceBuffer.transmuxer_.onmessage(dataMessage);
- // seek to 7 seconds into the new swegment
- this.mediaSource.tech_.seeking = function () {
- return true;
- };
- currentTime = 10 + 7;
- this.mediaSource.tech_.trigger('seeking');
- sourceBuffer.appendBuffer(new Uint8Array(10));
- this.swfCalls.length = 0;
- timers.runAll();
- _qunit2['default'].deepEqual(this.swfCalls[0], { call: 'adjustCurrentTime', value: 15 });
- _qunit2['default'].deepEqual(this.swfCalls[1].arguments[0], [5, 6, 7, 8, 9], '5 tags are appended');
- });
- _qunit2['default'].test('GOP trimming accounts for metadata tags prepended to key frames by mux.js', function () {
- var sourceBuffer = this.mediaSource.addSourceBuffer('video/mp2t');
- var i = 10;
- var currentTime = undefined;
- var tags_ = [];
- sourceBuffer.transmuxer_.postMessage = postMessage_;
- this.mediaSource.tech_.currentTime = function () {
- return currentTime;
- };
- // push a tag into the buffer to establish the starting PTS value
- currentTime = 0;
- var dataMessage = createDataMessage([{
- pts: 19 * 1000,
- bytes: new Uint8Array(1)
- }]);
- sourceBuffer.transmuxer_.onmessage(dataMessage);
- timers.runAll();
- sourceBuffer.appendBuffer(new Uint8Array(10));
- timers.runAll();
- // mock out a new segment of FLV tags, starting 10s after the
- // starting PTS value
- while (i--) {
- tags_.unshift({
- pts: i * 1000 + 29 * 1000,
- bytes: new Uint8Array([i])
- });
- }
- // add in the metadata tags
- tags_.splice(8, 0, {
- pts: tags_[8].pts,
- bytes: new Uint8Array([8])
- }, {
- pts: tags_[8].pts,
- bytes: new Uint8Array([8])
- });
- tags_.splice(5, 0, {
- pts: tags_[5].pts,
- bytes: new Uint8Array([5])
- }, {
- pts: tags_[5].pts,
- bytes: new Uint8Array([5])
- });
- tags_.splice(0, 0, {
- pts: tags_[0].pts,
- bytes: new Uint8Array([0])
- }, {
- pts: tags_[0].pts,
- bytes: new Uint8Array([0])
- });
- dataMessage = createDataMessage(tags_);
- // mock the GOP structure + metadata tags
- // if we see a metadata tag, that means the next tag will also be a metadata tag with
- // keyFrame true and the tag after that will be the keyFrame
- // e.g.
- // { keyFrame: false, metaDataTag: true},
- // { keyFrame: true, metaDataTag: true},
- // { keyFrame: true, metaDataTag: false}
- dataMessage.data.segment.tags.videoTags[0].metaDataTag = true;
- dataMessage.data.segment.tags.videoTags[1].metaDataTag = true;
- dataMessage.data.segment.tags.videoTags[1].keyFrame = true;
- dataMessage.data.segment.tags.videoTags[2].keyFrame = true;
- // no metadata tags in front of this key to test the case where mux.js does not prepend
- // the metadata tags
- dataMessage.data.segment.tags.videoTags[5].keyFrame = true;
- dataMessage.data.segment.tags.videoTags[7].metaDataTag = true;
- dataMessage.data.segment.tags.videoTags[8].metaDataTag = true;
- dataMessage.data.segment.tags.videoTags[8].keyFrame = true;
- dataMessage.data.segment.tags.videoTags[9].keyFrame = true;
- dataMessage.data.segment.tags.videoTags[12].metaDataTag = true;
- dataMessage.data.segment.tags.videoTags[13].metaDataTag = true;
- dataMessage.data.segment.tags.videoTags[13].keyFrame = true;
- dataMessage.data.segment.tags.videoTags[14].keyFrame = true;
- sourceBuffer.transmuxer_.onmessage(dataMessage);
- // seek to 7 seconds into the new swegment
- this.mediaSource.tech_.seeking = function () {
- return true;
- };
- currentTime = 10 + 7;
- this.mediaSource.tech_.trigger('seeking');
- sourceBuffer.appendBuffer(new Uint8Array(10));
- this.swfCalls.length = 0;
- timers.runAll();
- _qunit2['default'].deepEqual(this.swfCalls[0], { call: 'adjustCurrentTime', value: 15 });
- _qunit2['default'].deepEqual(this.swfCalls[1].arguments[0], [5, 5, 5, 6, 7, 8, 8, 8, 9], '10 tags are appended, 4 of which are metadata tags');
- });
- _qunit2['default'].test('drops all tags if target pts append time does not fall within segment', function () {
- var sourceBuffer = this.mediaSource.addSourceBuffer('video/mp2t');
- var i = 10;
- var currentTime = undefined;
- var tags_ = [];
- this.mediaSource.tech_.currentTime = function () {
- return currentTime;
- };
- sourceBuffer.transmuxer_.postMessage = postMessage_;
- // push a tag into the buffer to establish the starting PTS value
- currentTime = 0;
- var dataMessage = createDataMessage([{
- pts: 19 * 1000,
- bytes: new Uint8Array(1)
- }]);
- sourceBuffer.transmuxer_.onmessage(dataMessage);
- timers.runAll();
- sourceBuffer.appendBuffer(new Uint8Array(10));
- timers.runAll();
- // mock out a new segment of FLV tags, starting 10s after the
- // starting PTS value
- while (i--) {
- tags_.unshift({
- pts: i * 1000 + 19 * 1000,
- bytes: new Uint8Array([i])
- });
- }
- dataMessage = createDataMessage(tags_);
- // mock the GOP structure
- dataMessage.data.segment.tags.videoTags[0].keyFrame = true;
- dataMessage.data.segment.tags.videoTags[3].keyFrame = true;
- dataMessage.data.segment.tags.videoTags[5].keyFrame = true;
- dataMessage.data.segment.tags.videoTags[8].keyFrame = true;
- sourceBuffer.transmuxer_.onmessage(dataMessage);
- // seek to 7 seconds into the new swegment
- this.mediaSource.tech_.seeking = function () {
- return true;
- };
- currentTime = 10 + 7;
- this.mediaSource.tech_.trigger('seeking');
- sourceBuffer.appendBuffer(new Uint8Array(10));
- this.swfCalls.length = 0;
- timers.runAll();
- _qunit2['default'].equal(this.swfCalls.length, 0, 'dropped all tags and made no swf calls');
- });
- _qunit2['default'].test('seek targeting accounts for changing timestampOffsets', function () {
- var sourceBuffer = this.mediaSource.addSourceBuffer('video/mp2t');
- var i = 10;
- var tags_ = [];
- var currentTime = undefined;
- this.mediaSource.tech_.currentTime = function () {
- return currentTime;
- };
- sourceBuffer.transmuxer_.postMessage = postMessage_;
- var dataMessage = createDataMessage([{
- pts: 19 * 1000,
- bytes: new Uint8Array(1)
- }]);
- // push a tag into the buffer to establish the starting PTS value
- currentTime = 0;
- sourceBuffer.transmuxer_.onmessage(dataMessage);
- timers.runAll();
- // to seek across a discontinuity:
- // 1. set the timestamp offset to the media timeline position for
- // the start of the segment
- // 2. set currentTime to the desired media timeline position
- sourceBuffer.timestampOffset = 22;
- currentTime = sourceBuffer.timestampOffset + 3.5;
- this.mediaSource.tech_.seeking = function () {
- return true;
- };
- // the new segment FLV tags are at disjoint PTS positions
- while (i--) {
- tags_.unshift({
- // (101 * 1000) !== the old PTS offset
- pts: i * 1000 + 101 * 1000,
- bytes: new Uint8Array([i + sourceBuffer.timestampOffset])
- });
- }
- dataMessage = createDataMessage(tags_);
- // mock gop start at seek point
- dataMessage.data.segment.tags.videoTags[3].keyFrame = true;
- sourceBuffer.transmuxer_.onmessage(dataMessage);
- this.mediaSource.tech_.trigger('seeking');
- this.swfCalls.length = 0;
- timers.runAll();
- _qunit2['default'].equal(this.swfCalls[0].value, 25, 'adjusted current time');
- _qunit2['default'].deepEqual(this.swfCalls[1].arguments[0], [25, 26, 27, 28, 29, 30, 31], 'filtered the appended tags');
- });
- _qunit2['default'].test('calling endOfStream sets mediaSource readyState to ended', function () {
- var _this3 = this;
- var sourceBuffer = this.mediaSource.addSourceBuffer('video/mp2t');
- sourceBuffer.transmuxer_.postMessage = postMessage_;
- /* eslint-disable camelcase */
- this.mediaSource.swfObj.vjs_endOfStream = function () {
- _this3.swfCalls.push('endOfStream');
- };
- /* eslint-enable camelcase */
- sourceBuffer.addEventListener('updateend', function () {
- _this3.mediaSource.endOfStream();
- });
- this.swfCalls.length = 0;
- sourceBuffer.appendBuffer(new Uint8Array([0, 1]));
- timers.runAll();
- _qunit2['default'].strictEqual(sourceBuffer.mediaSource_.readyState, 'ended', 'readyState is \'ended\'');
- _qunit2['default'].strictEqual(this.swfCalls.length, 2, 'made two calls to swf');
- _qunit2['default'].deepEqual(this.swfCalls.shift().arguments[0], [0, 1], 'contains the data');
- _qunit2['default'].ok(this.swfCalls.shift().indexOf('endOfStream') === 0, 'the second call should be for the updateend');
- _qunit2['default'].strictEqual(timers.length, 0, 'no more appends are scheduled');
- });
- _qunit2['default'].test('opens the stream on sourceBuffer.appendBuffer after endOfStream', function () {
- var _this4 = this;
- var sourceBuffer = this.mediaSource.addSourceBuffer('video/mp2t');
- var foo = function foo() {
- _this4.mediaSource.endOfStream();
- sourceBuffer.removeEventListener('updateend', foo);
- };
- sourceBuffer.transmuxer_.postMessage = postMessage_;
- /* eslint-disable camelcase */
- this.mediaSource.swfObj.vjs_endOfStream = function () {
- _this4.swfCalls.push('endOfStream');
- };
- /* eslint-enable camelcase */
- sourceBuffer.addEventListener('updateend', foo);
- this.swfCalls.length = 0;
- sourceBuffer.appendBuffer(new Uint8Array([0, 1]));
- timers.runAll();
- _qunit2['default'].strictEqual(this.swfCalls.length, 2, 'made two calls to swf');
- _qunit2['default'].deepEqual(this.swfCalls.shift().arguments[0], [0, 1], 'contains the data');
- _qunit2['default'].equal(this.swfCalls.shift(), 'endOfStream', 'the second call should be for the updateend');
- sourceBuffer.appendBuffer(new Uint8Array([2, 3]));
- // remove previous video pts save because mock appends don't have actual timing data
- sourceBuffer.videoBufferEnd_ = NaN;
- timers.runAll();
- _qunit2['default'].strictEqual(this.swfCalls.length, 1, 'made one more append');
- _qunit2['default'].deepEqual(this.swfCalls.shift().arguments[0], [2, 3], 'contains the third and fourth bytes');
- _qunit2['default'].strictEqual(sourceBuffer.mediaSource_.readyState, 'open', 'The streams should be open if more bytes are appended to an "ended" stream');
- _qunit2['default'].strictEqual(timers.length, 0, 'no more appends are scheduled');
- });
- _qunit2['default'].test('abort() clears any buffered input', function () {
- var sourceBuffer = this.mediaSource.addSourceBuffer('video/mp2t');
- sourceBuffer.transmuxer_.postMessage = postMessage_;
- this.swfCalls.length = 0;
- sourceBuffer.appendBuffer(new Uint8Array([0]));
- sourceBuffer.abort();
- timers.pop()();
- _qunit2['default'].strictEqual(this.swfCalls.length, 1, 'called the swf');
- _qunit2['default'].strictEqual(this.swfCalls[0], 'abort', 'invoked abort');
- });
- // requestAnimationFrame is heavily throttled or unscheduled when
- // the browser tab running contrib-media-sources is in a background
- // tab. If that happens, video data can continuously build up in
- // memory and cause the tab or browser to crash.
- _qunit2['default'].test('does not use requestAnimationFrame', function () {
- var oldRFA = _globalWindow2['default'].requestAnimationFrame;
- var requests = 0;
- var sourceBuffer = undefined;
- _globalWindow2['default'].requestAnimationFrame = function () {
- requests++;
- };
- sourceBuffer = this.mediaSource.addSourceBuffer('video/mp2t');
- sourceBuffer.transmuxer_.postMessage = postMessage_;
- sourceBuffer.appendBuffer(new Uint8Array([0, 1, 2, 3]));
- while (timers.length) {
- timers.pop()();
- }
- _qunit2['default'].equal(requests, 0, 'no calls to requestAnimationFrame were made');
- _globalWindow2['default'].requestAnimationFrame = oldRFA;
- });
- _qunit2['default'].test('updating is true while an append is in progress', function () {
- var sourceBuffer = this.mediaSource.addSourceBuffer('video/mp2t');
- var ended = false;
- sourceBuffer.transmuxer_.postMessage = postMessage_;
- sourceBuffer.addEventListener('updateend', function () {
- ended = true;
- });
- sourceBuffer.appendBuffer(new Uint8Array([0, 1]));
- _qunit2['default'].equal(sourceBuffer.updating, true, 'updating is set');
- while (!ended) {
- timers.pop()();
- }
- _qunit2['default'].equal(sourceBuffer.updating, false, 'updating is unset');
- });
- _qunit2['default'].test('throws an error if append is called while updating', function () {
- var sourceBuffer = this.mediaSource.addSourceBuffer('video/mp2t');
- sourceBuffer.appendBuffer(new Uint8Array([0, 1]));
- sourceBuffer.transmuxer_.postMessage = postMessage_;
- _qunit2['default'].throws(function () {
- sourceBuffer.appendBuffer(new Uint8Array([0, 1]));
- }, function (e) {
- return e.name === 'InvalidStateError' && e.code === _globalWindow2['default'].DOMException.INVALID_STATE_ERR;
- }, 'threw an InvalidStateError');
- });
- _qunit2['default'].test('stops updating if abort is called', function () {
- var sourceBuffer = this.mediaSource.addSourceBuffer('video/mp2t');
- var updateEnds = 0;
- sourceBuffer.transmuxer_.postMessage = postMessage_;
- sourceBuffer.addEventListener('updateend', function () {
- updateEnds++;
- });
- sourceBuffer.appendBuffer(new Uint8Array([0, 1]));
- sourceBuffer.abort();
- _qunit2['default'].equal(sourceBuffer.updating, false, 'no longer updating');
- _qunit2['default'].equal(updateEnds, 1, 'triggered updateend');
- });
- _qunit2['default'].test('forwards duration overrides to the SWF', function () {
- /* eslint-disable no-unused-vars */
- var ignored = this.mediaSource.duration;
- /* eslint-enable no-unused-vars */
- _qunit2['default'].deepEqual(this.swfCalls[1], {
- attr: 'duration'
- }, 'requests duration from the SWF');
- this.mediaSource.duration = 101.3;
- // Setting a duration results in two calls to the swf
- // Ignore the first call (this.swfCalls[2]) as it was just to get the
- // current duration
- _qunit2['default'].deepEqual(this.swfCalls[3], {
- attr: 'duration', value: 101.3
- }, 'set the duration override');
- });
- _qunit2['default'].test('returns NaN for duration before the SWF is ready', function () {
- this.mediaSource.swfObj = null;
- _qunit2['default'].ok(isNaN(this.mediaSource.duration), 'duration is NaN');
- });
- _qunit2['default'].test('calculates the base PTS for the media', function () {
- var sourceBuffer = this.mediaSource.addSourceBuffer('video/mp2t');
- var tags_ = [];
- sourceBuffer.transmuxer_.postMessage = postMessage_;
- // seek to 15 seconds
- this.player.tech_.seeking = function () {
- return true;
- };
- this.player.tech_.currentTime = function () {
- return 15;
- };
- // FLV tags for this segment start at 10 seconds in the media
- // timeline
- tags_.push(
- // zero in the media timeline is PTS 3
- { pts: (10 + 3) * 1000, bytes: new Uint8Array([10]) }, { pts: (15 + 3) * 1000, bytes: new Uint8Array([15]) });
- var dataMessage = createDataMessage(tags_);
- // mock gop start at seek point
- dataMessage.data.segment.tags.videoTags[1].keyFrame = true;
- sourceBuffer.transmuxer_.onmessage(dataMessage);
- // let the source buffer know the segment start time
- sourceBuffer.timestampOffset = 10;
- this.swfCalls.length = 0;
- timers.runAll();
- _qunit2['default'].equal(this.swfCalls.length, 1, 'made a SWF call');
- _qunit2['default'].deepEqual(this.swfCalls[0].arguments[0], [15], 'dropped the early tag');
- });
- _qunit2['default'].test('remove fires update events', function () {
- var sourceBuffer = this.mediaSource.addSourceBuffer('video/mp2t');
- var events = [];
- sourceBuffer.transmuxer_.postMessage = postMessage_;
- sourceBuffer.on(['update', 'updateend'], function (event) {
- events.push(event.type);
- });
- sourceBuffer.remove(0, 1);
- _qunit2['default'].deepEqual(events, ['update', 'updateend'], 'fired update events');
- _qunit2['default'].equal(sourceBuffer.updating, false, 'finished updating');
- });
- _qunit2['default'].test('passes endOfStream network errors to the tech', function () {
- this.mediaSource.readyState = 'ended';
- this.mediaSource.endOfStream('network');
- _qunit2['default'].equal(this.player.tech_.error().code, 2, 'set a network error');
- });
- _qunit2['default'].test('passes endOfStream decode errors to the tech', function () {
- this.mediaSource.readyState = 'ended';
- this.mediaSource.endOfStream('decode');
- _qunit2['default'].equal(this.player.tech_.error().code, 3, 'set a decode error');
- });
- _qunit2['default'].test('has addSeekableRange()', function () {
- _qunit2['default'].ok(this.mediaSource.addSeekableRange_, 'has addSeekableRange_');
- });
- _qunit2['default'].test('fires loadedmetadata after first segment append', function () {
- var loadedmetadataCount = 0;
- this.mediaSource.tech_.on('loadedmetadata', function () {
- return loadedmetadataCount++;
- });
- var sourceBuffer = this.mediaSource.addSourceBuffer('video/mp2t');
- sourceBuffer.transmuxer_.postMessage = postMessage_;
- _qunit2['default'].equal(loadedmetadataCount, 0, 'loadedmetadata not called on buffer creation');
- sourceBuffer.appendBuffer(new Uint8Array([0, 1]));
- _qunit2['default'].equal(loadedmetadataCount, 0, 'loadedmetadata not called on segment append');
- timers.runAll();
- _qunit2['default'].equal(loadedmetadataCount, 1, 'loadedmetadata fires after first append');
- sourceBuffer.appendBuffer(new Uint8Array([0, 1]));
- timers.runAll();
- _qunit2['default'].equal(loadedmetadataCount, 1, 'loadedmetadata does not fire after second append');
- });
- }).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
- },{"../src/flash-constants":38,"../src/flash-source-buffer":40,"../src/videojs-contrib-media-sources.js":45,"global/document":2,"global/window":3,"mux.js":16}],51:[function(require,module,exports){
- (function (global){
- 'use strict';
- function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
- var _globalDocument = require('global/document');
- var _globalDocument2 = _interopRequireDefault(_globalDocument);
- var _globalWindow = require('global/window');
- var _globalWindow2 = _interopRequireDefault(_globalWindow);
- var _qunit = (typeof window !== "undefined" ? window['QUnit'] : typeof global !== "undefined" ? global['QUnit'] : null);
- var _qunit2 = _interopRequireDefault(_qunit);
- var _sinon = (typeof window !== "undefined" ? window['sinon'] : typeof global !== "undefined" ? global['sinon'] : null);
- var _sinon2 = _interopRequireDefault(_sinon);
- var _videoJs = (typeof window !== "undefined" ? window['videojs'] : typeof global !== "undefined" ? global['videojs'] : null);
- var _videoJs2 = _interopRequireDefault(_videoJs);
- var _srcHtmlMediaSource = require('../src/html-media-source');
- var _srcHtmlMediaSource2 = _interopRequireDefault(_srcHtmlMediaSource);
- var _srcVirtualSourceBuffer = require('../src/virtual-source-buffer');
- // we disable this because browserify needs to include these files
- // but the exports are not important
- /* eslint-disable no-unused-vars */
- var _srcVideojsContribMediaSourcesJs = require('../src/videojs-contrib-media-sources.js');
- /* eslint-disable no-unused-vars */
- _qunit2['default'].module('videojs-contrib-media-sources - HTML', {
- beforeEach: function beforeEach() {
- this.fixture = _globalDocument2['default'].getElementById('qunit-fixture');
- this.video = _globalDocument2['default'].createElement('video');
- this.fixture.appendChild(this.video);
- this.source = _globalDocument2['default'].createElement('source');
- this.player = (0, _videoJs2['default'])(this.video);
- // add a fake source so that we can get this.player_ on sourceopen
- this.url = 'fake.ts';
- this.source.src = this.url;
- this.video.appendChild(this.source);
- // Mock the environment's timers because certain things - particularly
- // player readiness - are asynchronous in video.js 5.
- this.clock = _sinon2['default'].useFakeTimers();
- this.oldMediaSource = _globalWindow2['default'].MediaSource || _globalWindow2['default'].WebKitMediaSource;
- _globalWindow2['default'].MediaSource = _videoJs2['default'].extend(_videoJs2['default'].EventTarget, {
- constructor: function constructor() {
- this.isNative = true;
- this.sourceBuffers = [];
- this.duration = NaN;
- },
- addSourceBuffer: function addSourceBuffer(type) {
- var buffer = new (_videoJs2['default'].extend(_videoJs2['default'].EventTarget, {
- type: type,
- appendBuffer: function appendBuffer() {}
- }))();
- this.sourceBuffers.push(buffer);
- return buffer;
- }
- });
- _globalWindow2['default'].MediaSource.isTypeSupported = function (mime) {
- return true;
- };
- _globalWindow2['default'].WebKitMediaSource = _globalWindow2['default'].MediaSource;
- },
- afterEach: function afterEach() {
- this.clock.restore();
- this.player.dispose();
- _globalWindow2['default'].MediaSource = this.oldMediaSource;
- _globalWindow2['default'].WebKitMediaSource = _globalWindow2['default'].MediaSource;
- }
- });
- _qunit2['default'].test('constructs a native MediaSource', function () {
- _qunit2['default'].ok(new _videoJs2['default'].MediaSource().nativeMediaSource_.isNative, 'constructed a MediaSource');
- });
- var createDataMessage = function createDataMessage(type, typedArray, extraObject) {
- var message = {
- data: {
- action: 'data',
- segment: {
- type: type,
- data: typedArray.buffer,
- initSegment: {
- data: typedArray.buffer,
- byteOffset: typedArray.byteOffset,
- byteLength: typedArray.byteLength
- }
- },
- byteOffset: typedArray.byteOffset,
- byteLength: typedArray.byteLength
- }
- };
- return Object.keys(extraObject || {}).reduce(function (obj, key) {
- obj.data.segment[key] = extraObject[key];
- return obj;
- }, message);
- };
- // Create a WebWorker-style message that signals the transmuxer is done
- var doneMessage = {
- data: {
- action: 'done'
- }
- };
- // send fake data to the transmuxer to trigger the creation of the
- // native source buffers
- var initializeNativeSourceBuffers = function initializeNativeSourceBuffers(sourceBuffer) {
- // initialize an audio source buffer
- sourceBuffer.transmuxer_.onmessage(createDataMessage('audio', new Uint8Array(1)));
- // initialize a video source buffer
- sourceBuffer.transmuxer_.onmessage(createDataMessage('video', new Uint8Array(1)));
- // instruct the transmuxer to flush the "data" it has buffered so
- // far
- sourceBuffer.transmuxer_.onmessage(doneMessage);
- };
- _qunit2['default'].test('creates mp4 source buffers for mp2t segments', function () {
- var mediaSource = new _videoJs2['default'].MediaSource();
- var sourceBuffer = mediaSource.addSourceBuffer('video/mp2t');
- initializeNativeSourceBuffers(sourceBuffer);
- _qunit2['default'].ok(mediaSource.videoBuffer_, 'created a video buffer');
- _qunit2['default'].equal(mediaSource.videoBuffer_.type, 'video/mp4;codecs="avc1.4d400d"', 'video buffer has the default codec');
- _qunit2['default'].ok(mediaSource.audioBuffer_, 'created an audio buffer');
- _qunit2['default'].equal(mediaSource.audioBuffer_.type, 'audio/mp4;codecs="mp4a.40.2"', 'audio buffer has the default codec');
- _qunit2['default'].equal(mediaSource.sourceBuffers.length, 1, 'created one virtual buffer');
- _qunit2['default'].equal(mediaSource.sourceBuffers[0], sourceBuffer, 'returned the virtual buffer');
- _qunit2['default'].ok(sourceBuffer.transmuxer_, 'created a transmuxer');
- });
- _qunit2['default'].test('the terminate is called on the transmuxer when the media source is killed', function () {
- var mediaSource = new _videoJs2['default'].MediaSource();
- var sourceBuffer = mediaSource.addSourceBuffer('video/mp2t');
- var terminates = 0;
- sourceBuffer.transmuxer_ = {
- terminate: function terminate() {
- terminates++;
- }
- };
- mediaSource.trigger('sourceclose');
- _qunit2['default'].equal(terminates, 1, 'called terminate on transmux web worker');
- });
- _qunit2['default'].test('duration is faked when playing a live stream', function () {
- var mediaSource = new _videoJs2['default'].MediaSource();
- var sourceBuffer = mediaSource.addSourceBuffer('video/mp2t');
- mediaSource.duration = Infinity;
- mediaSource.nativeMediaSource_.duration = 100;
- _qunit2['default'].equal(mediaSource.nativeMediaSource_.duration, 100, 'native duration was not set to infinity');
- _qunit2['default'].equal(mediaSource.duration, Infinity, 'the MediaSource wrapper pretends it has an infinite duration');
- });
- _qunit2['default'].test('duration uses the underlying MediaSource\'s duration when not live', function () {
- var mediaSource = new _videoJs2['default'].MediaSource();
- var sourceBuffer = mediaSource.addSourceBuffer('video/mp2t');
- mediaSource.duration = 100;
- mediaSource.nativeMediaSource_.duration = 120;
- _qunit2['default'].equal(mediaSource.duration, 120, 'the MediaSource wrapper returns the native duration');
- });
- _qunit2['default'].test('abort on the fake source buffer calls abort on the real ones', function () {
- var mediaSource = new _videoJs2['default'].MediaSource();
- var sourceBuffer = mediaSource.addSourceBuffer('video/mp2t');
- var messages = [];
- var aborts = 0;
- initializeNativeSourceBuffers(sourceBuffer);
- sourceBuffer.transmuxer_.postMessage = function (message) {
- messages.push(message);
- };
- sourceBuffer.bufferUpdating_ = true;
- sourceBuffer.videoBuffer_.abort = function () {
- aborts++;
- };
- sourceBuffer.audioBuffer_.abort = function () {
- aborts++;
- };
- sourceBuffer.abort();
- _qunit2['default'].equal(aborts, 2, 'called abort on both');
- _qunit2['default'].equal(sourceBuffer.bufferUpdating_, false, 'set updating to false');
- _qunit2['default'].equal(messages.length, 1, 'has one message');
- _qunit2['default'].equal(messages[0].action, 'reset', 'reset called on transmuxer');
- });
- _qunit2['default'].test('calling remove deletes cues and invokes remove on any extant source buffers', function () {
- var mediaSource = new _videoJs2['default'].MediaSource();
- var sourceBuffer = mediaSource.addSourceBuffer('video/mp2t');
- var removedCue = [];
- var removes = 0;
- initializeNativeSourceBuffers(sourceBuffer);
- sourceBuffer.inbandTextTracks_ = {
- CC1: {
- removeCue: function removeCue(cue) {
- removedCue.push(cue);
- this.cues.splice(this.cues.indexOf(cue), 1);
- },
- cues: [{ startTime: 10, endTime: 20, text: 'delete me' }, { startTime: 0, endTime: 2, text: 'save me' }]
- }
- };
- mediaSource.videoBuffer_.remove = function (start, end) {
- if (start === 3 && end === 10) {
- removes++;
- }
- };
- mediaSource.audioBuffer_.remove = function (start, end) {
- if (start === 3 && end === 10) {
- removes++;
- }
- };
- sourceBuffer.remove(3, 10);
- _qunit2['default'].equal(removes, 2, 'called remove on both sourceBuffers');
- _qunit2['default'].equal(sourceBuffer.inbandTextTracks_.CC1.cues.length, 1, 'one cue remains after remove');
- _qunit2['default'].equal(removedCue[0].text, 'delete me', 'the cue that overlapped the remove region was removed');
- });
- _qunit2['default'].test('calling remove property handles absence of cues (null)', function () {
- var mediaSource = new _videoJs2['default'].MediaSource();
- var sourceBuffer = mediaSource.addSourceBuffer('video/mp2t');
- initializeNativeSourceBuffers(sourceBuffer);
- sourceBuffer.inbandTextTracks_ = {
- CC1: {
- cues: null
- }
- };
- mediaSource.videoBuffer_.remove = function (start, end) {
- // pass
- };
- mediaSource.audioBuffer_.remove = function (start, end) {
- // pass
- };
- // this call should not raise an exception
- sourceBuffer.remove(3, 10);
- _qunit2['default'].equal(sourceBuffer.inbandTextTracks_.CC1.cues, null, 'cues are still null');
- });
- _qunit2['default'].test('removing doesn\'t happen with audio disabled', function () {
- var mediaSource = new _videoJs2['default'].MediaSource();
- var muxedBuffer = mediaSource.addSourceBuffer('video/mp2t');
- // creating this audio buffer disables audio in the muxed one
- var audioBuffer = mediaSource.addSourceBuffer('audio/mp2t; codecs="mp4a.40.2"');
- var removedCue = [];
- var removes = 0;
- initializeNativeSourceBuffers(muxedBuffer);
- muxedBuffer.inbandTextTracks_ = {
- CC1: {
- removeCue: function removeCue(cue) {
- removedCue.push(cue);
- this.cues.splice(this.cues.indexOf(cue), 1);
- },
- cues: [{ startTime: 10, endTime: 20, text: 'delete me' }, { startTime: 0, endTime: 2, text: 'save me' }]
- }
- };
- mediaSource.videoBuffer_.remove = function (start, end) {
- if (start === 3 && end === 10) {
- removes++;
- }
- };
- mediaSource.audioBuffer_.remove = function (start, end) {
- if (start === 3 && end === 10) {
- removes++;
- }
- };
- muxedBuffer.remove(3, 10);
- _qunit2['default'].equal(removes, 1, 'called remove on only one source buffer');
- _qunit2['default'].equal(muxedBuffer.inbandTextTracks_.CC1.cues.length, 1, 'one cue remains after remove');
- _qunit2['default'].equal(removedCue[0].text, 'delete me', 'the cue that overlapped the remove region was removed');
- });
- _qunit2['default'].test('readyState delegates to the native implementation', function () {
- var mediaSource = new _srcHtmlMediaSource2['default']();
- _qunit2['default'].equal(mediaSource.readyState, mediaSource.nativeMediaSource_.readyState, 'readyStates are equal');
- mediaSource.nativeMediaSource_.readyState = 'nonsense stuff';
- _qunit2['default'].equal(mediaSource.readyState, mediaSource.nativeMediaSource_.readyState, 'readyStates are equal');
- });
- _qunit2['default'].test('addSeekableRange_ throws an error for media with known duration', function () {
- var mediaSource = new _videoJs2['default'].MediaSource();
- mediaSource.duration = 100;
- _qunit2['default'].throws(function () {
- mediaSource.addSeekableRange_(0, 100);
- }, 'cannot add seekable range');
- });
- _qunit2['default'].test('addSeekableRange_ adds to the native MediaSource duration', function () {
- var mediaSource = new _videoJs2['default'].MediaSource();
- mediaSource.duration = Infinity;
- mediaSource.addSeekableRange_(120, 240);
- _qunit2['default'].equal(mediaSource.nativeMediaSource_.duration, 240, 'set native duration');
- _qunit2['default'].equal(mediaSource.duration, Infinity, 'emulated duration');
- mediaSource.addSeekableRange_(120, 220);
- _qunit2['default'].equal(mediaSource.nativeMediaSource_.duration, 240, 'ignored the smaller range');
- _qunit2['default'].equal(mediaSource.duration, Infinity, 'emulated duration');
- });
- _qunit2['default'].test('appendBuffer error triggers on the player', function () {
- var mediaSource = new _videoJs2['default'].MediaSource();
- var sourceBuffer = mediaSource.addSourceBuffer('video/mp2t');
- var error = false;
- mediaSource.player_ = this.player;
- initializeNativeSourceBuffers(sourceBuffer);
- sourceBuffer.videoBuffer_.appendBuffer = function () {
- throw new Error();
- };
- this.player.on('error', function () {
- return error = true;
- });
- // send fake data to the source buffer from the transmuxer to append to native buffer
- // initializeNativeSourceBuffers does the same thing to trigger the creation of
- // native source buffers.
- var fakeTransmuxerMessage = initializeNativeSourceBuffers;
- fakeTransmuxerMessage(sourceBuffer);
- this.clock.tick(1);
- _qunit2['default'].ok(error, 'error triggered on player');
- });
- _qunit2['default'].test('transmuxes mp2t segments', function () {
- var mp2tSegments = [];
- var mp4Segments = [];
- var data = new Uint8Array(1);
- var mediaSource = undefined;
- var sourceBuffer = undefined;
- mediaSource = new _videoJs2['default'].MediaSource();
- sourceBuffer = mediaSource.addSourceBuffer('video/mp2t');
- sourceBuffer.transmuxer_.postMessage = function (segment) {
- if (segment.action === 'push') {
- var buffer = new Uint8Array(segment.data, segment.byteOffset, segment.byteLength);
- mp2tSegments.push(buffer);
- }
- };
- sourceBuffer.concatAndAppendSegments_ = function (segmentObj, destinationBuffer) {
- mp4Segments.push(segmentObj);
- };
- sourceBuffer.appendBuffer(data);
- _qunit2['default'].equal(mp2tSegments.length, 1, 'transmuxed one segment');
- _qunit2['default'].equal(mp2tSegments[0].length, 1, 'did not alter the segment');
- _qunit2['default'].equal(mp2tSegments[0][0], data[0], 'did not alter the segment');
- // an init segment
- sourceBuffer.transmuxer_.onmessage(createDataMessage('video', new Uint8Array(1)));
- // a media segment
- sourceBuffer.transmuxer_.onmessage(createDataMessage('audio', new Uint8Array(1)));
- // Segments are concatenated
- _qunit2['default'].equal(mp4Segments.length, 0, 'segments are not appended until after the `done` message');
- // send `done` message
- sourceBuffer.transmuxer_.onmessage(doneMessage);
- // Segments are concatenated
- _qunit2['default'].equal(mp4Segments.length, 2, 'appended the segments');
- });
- _qunit2['default'].test('handles typed-arrays that are subsets of their underlying buffer', function () {
- var mp2tSegments = [];
- var mp4Segments = [];
- var dataBuffer = new Uint8Array([0, 1, 2, 3, 4, 5, 6, 7, 8, 9]);
- var data = dataBuffer.subarray(5, 7);
- var mediaSource = undefined;
- var sourceBuffer = undefined;
- mediaSource = new _videoJs2['default'].MediaSource();
- sourceBuffer = mediaSource.addSourceBuffer('video/mp2t');
- sourceBuffer.transmuxer_.postMessage = function (segment) {
- if (segment.action === 'push') {
- var buffer = new Uint8Array(segment.data, segment.byteOffset, segment.byteLength);
- mp2tSegments.push(buffer);
- }
- };
- sourceBuffer.concatAndAppendSegments_ = function (segmentObj, destinationBuffer) {
- mp4Segments.push(segmentObj.segments[0]);
- };
- sourceBuffer.appendBuffer(data);
- _qunit2['default'].equal(mp2tSegments.length, 1, 'emitted the fragment');
- _qunit2['default'].equal(mp2tSegments[0].length, 2, 'correctly handled a typed-array that is a subset');
- _qunit2['default'].equal(mp2tSegments[0][0], 5, 'fragment contains the correct first byte');
- _qunit2['default'].equal(mp2tSegments[0][1], 6, 'fragment contains the correct second byte');
- // an init segment
- sourceBuffer.transmuxer_.onmessage(createDataMessage('video', data));
- // Segments are concatenated
- _qunit2['default'].equal(mp4Segments.length, 0, 'segments are not appended until after the `done` message');
- // send `done` message
- sourceBuffer.transmuxer_.onmessage(doneMessage);
- // Segments are concatenated
- _qunit2['default'].equal(mp4Segments.length, 1, 'emitted the fragment');
- _qunit2['default'].equal(mp4Segments[0].length, 2, 'correctly handled a typed-array that is a subset');
- _qunit2['default'].equal(mp4Segments[0][0], 5, 'fragment contains the correct first byte');
- _qunit2['default'].equal(mp4Segments[0][1], 6, 'fragment contains the correct second byte');
- });
- _qunit2['default'].test('only appends audio init segment for first segment or on audio/media changes', function () {
- var mp4Segments = [];
- var initBuffer = new Uint8Array([0, 1]);
- var dataBuffer = new Uint8Array([2, 3]);
- var mediaSource = undefined;
- var sourceBuffer = undefined;
- mediaSource = new _videoJs2['default'].MediaSource();
- sourceBuffer = mediaSource.addSourceBuffer('video/mp2t');
- sourceBuffer.audioDisabled_ = false;
- mediaSource.player_ = this.player;
- mediaSource.url_ = this.url;
- mediaSource.trigger('sourceopen');
- sourceBuffer.concatAndAppendSegments_ = function (segmentObj, destinationBuffer) {
- var segment = segmentObj.segments.reduce(function (seg, arr) {
- return seg.concat(Array.from(arr));
- }, []);
- mp4Segments.push(segment);
- };
- _qunit2['default'].ok(sourceBuffer.appendAudioInitSegment_, 'will append init segment next');
- // an init segment
- sourceBuffer.transmuxer_.onmessage(createDataMessage('audio', dataBuffer, {
- initSegment: {
- data: initBuffer.buffer,
- byteOffset: initBuffer.byteOffset,
- byteLength: initBuffer.byteLength
- }
- }));
- // Segments are concatenated
- _qunit2['default'].equal(mp4Segments.length, 0, 'segments are not appended until after the `done` message');
- // send `done` message
- sourceBuffer.transmuxer_.onmessage(doneMessage);
- // Segments are concatenated
- _qunit2['default'].equal(mp4Segments.length, 1, 'emitted the fragment');
- // Contains init segment on first segment
- _qunit2['default'].equal(mp4Segments[0][0], 0, 'fragment contains the correct first byte');
- _qunit2['default'].equal(mp4Segments[0][1], 1, 'fragment contains the correct second byte');
- _qunit2['default'].equal(mp4Segments[0][2], 2, 'fragment contains the correct third byte');
- _qunit2['default'].equal(mp4Segments[0][3], 3, 'fragment contains the correct fourth byte');
- _qunit2['default'].ok(!sourceBuffer.appendAudioInitSegment_, 'will not append init segment next');
- dataBuffer = new Uint8Array([4, 5]);
- sourceBuffer.transmuxer_.onmessage(createDataMessage('audio', dataBuffer, {
- initSegment: {
- data: initBuffer.buffer,
- byteOffset: initBuffer.byteOffset,
- byteLength: initBuffer.byteLength
- }
- }));
- sourceBuffer.transmuxer_.onmessage(doneMessage);
- _qunit2['default'].equal(mp4Segments.length, 2, 'emitted the fragment');
- // does not contain init segment on next segment
- _qunit2['default'].equal(mp4Segments[1][0], 4, 'fragment contains the correct first byte');
- _qunit2['default'].equal(mp4Segments[1][1], 5, 'fragment contains the correct second byte');
- // audio track change
- this.player.audioTracks().trigger('change');
- sourceBuffer.audioDisabled_ = false;
- _qunit2['default'].ok(sourceBuffer.appendAudioInitSegment_, 'audio change sets appendAudioInitSegment_');
- dataBuffer = new Uint8Array([6, 7]);
- sourceBuffer.transmuxer_.onmessage(createDataMessage('audio', dataBuffer, {
- initSegment: {
- data: initBuffer.buffer,
- byteOffset: initBuffer.byteOffset,
- byteLength: initBuffer.byteLength
- }
- }));
- sourceBuffer.transmuxer_.onmessage(doneMessage);
- _qunit2['default'].equal(mp4Segments.length, 3, 'emitted the fragment');
- // contains init segment after audio track change
- _qunit2['default'].equal(mp4Segments[2][0], 0, 'fragment contains the correct first byte');
- _qunit2['default'].equal(mp4Segments[2][1], 1, 'fragment contains the correct second byte');
- _qunit2['default'].equal(mp4Segments[2][2], 6, 'fragment contains the correct third byte');
- _qunit2['default'].equal(mp4Segments[2][3], 7, 'fragment contains the correct fourth byte');
- _qunit2['default'].ok(!sourceBuffer.appendAudioInitSegment_, 'will not append init segment next');
- dataBuffer = new Uint8Array([8, 9]);
- sourceBuffer.transmuxer_.onmessage(createDataMessage('audio', dataBuffer, {
- initSegment: {
- data: initBuffer.buffer,
- byteOffset: initBuffer.byteOffset,
- byteLength: initBuffer.byteLength
- }
- }));
- sourceBuffer.transmuxer_.onmessage(doneMessage);
- _qunit2['default'].equal(mp4Segments.length, 4, 'emitted the fragment');
- // does not contain init segment in next segment
- _qunit2['default'].equal(mp4Segments[3][0], 8, 'fragment contains the correct first byte');
- _qunit2['default'].equal(mp4Segments[3][1], 9, 'fragment contains the correct second byte');
- _qunit2['default'].ok(!sourceBuffer.appendAudioInitSegment_, 'will not append init segment next');
- // rendition switch
- this.player.trigger('mediachange');
- _qunit2['default'].ok(sourceBuffer.appendAudioInitSegment_, 'media change sets appendAudioInitSegment_');
- dataBuffer = new Uint8Array([10, 11]);
- sourceBuffer.transmuxer_.onmessage(createDataMessage('audio', dataBuffer, {
- initSegment: {
- data: initBuffer.buffer,
- byteOffset: initBuffer.byteOffset,
- byteLength: initBuffer.byteLength
- }
- }));
- sourceBuffer.transmuxer_.onmessage(doneMessage);
- _qunit2['default'].equal(mp4Segments.length, 5, 'emitted the fragment');
- // contains init segment after audio track change
- _qunit2['default'].equal(mp4Segments[4][0], 0, 'fragment contains the correct first byte');
- _qunit2['default'].equal(mp4Segments[4][1], 1, 'fragment contains the correct second byte');
- _qunit2['default'].equal(mp4Segments[4][2], 10, 'fragment contains the correct third byte');
- _qunit2['default'].equal(mp4Segments[4][3], 11, 'fragment contains the correct fourth byte');
- _qunit2['default'].ok(!sourceBuffer.appendAudioInitSegment_, 'will not append init segment next');
- });
- _qunit2['default'].test('appends video init segment for every segment', function () {
- var mp4Segments = [];
- var initBuffer = new Uint8Array([0, 1]);
- var dataBuffer = new Uint8Array([2, 3]);
- var mediaSource = undefined;
- var sourceBuffer = undefined;
- mediaSource = new _videoJs2['default'].MediaSource();
- sourceBuffer = mediaSource.addSourceBuffer('video/mp2t');
- mediaSource.player_ = this.player;
- mediaSource.url_ = this.url;
- mediaSource.trigger('sourceopen');
- sourceBuffer.concatAndAppendSegments_ = function (segmentObj, destinationBuffer) {
- var segment = segmentObj.segments.reduce(function (seg, arr) {
- return seg.concat(Array.from(arr));
- }, []);
- mp4Segments.push(segment);
- };
- // an init segment
- sourceBuffer.transmuxer_.onmessage(createDataMessage('video', dataBuffer, {
- initSegment: {
- data: initBuffer.buffer,
- byteOffset: initBuffer.byteOffset,
- byteLength: initBuffer.byteLength
- }
- }));
- // Segments are concatenated
- _qunit2['default'].equal(mp4Segments.length, 0, 'segments are not appended until after the `done` message');
- // send `done` message
- sourceBuffer.transmuxer_.onmessage(doneMessage);
- // Segments are concatenated
- _qunit2['default'].equal(mp4Segments.length, 1, 'emitted the fragment');
- // Contains init segment on first segment
- _qunit2['default'].equal(mp4Segments[0][0], 0, 'fragment contains the correct first byte');
- _qunit2['default'].equal(mp4Segments[0][1], 1, 'fragment contains the correct second byte');
- _qunit2['default'].equal(mp4Segments[0][2], 2, 'fragment contains the correct third byte');
- _qunit2['default'].equal(mp4Segments[0][3], 3, 'fragment contains the correct fourth byte');
- dataBuffer = new Uint8Array([4, 5]);
- sourceBuffer.transmuxer_.onmessage(createDataMessage('video', dataBuffer, {
- initSegment: {
- data: initBuffer.buffer,
- byteOffset: initBuffer.byteOffset,
- byteLength: initBuffer.byteLength
- }
- }));
- sourceBuffer.transmuxer_.onmessage(doneMessage);
- _qunit2['default'].equal(mp4Segments.length, 2, 'emitted the fragment');
- _qunit2['default'].equal(mp4Segments[1][0], 0, 'fragment contains the correct first byte');
- _qunit2['default'].equal(mp4Segments[1][1], 1, 'fragment contains the correct second byte');
- _qunit2['default'].equal(mp4Segments[1][2], 4, 'fragment contains the correct third byte');
- _qunit2['default'].equal(mp4Segments[1][3], 5, 'fragment contains the correct fourth byte');
- dataBuffer = new Uint8Array([6, 7]);
- sourceBuffer.transmuxer_.onmessage(createDataMessage('video', dataBuffer, {
- initSegment: {
- data: initBuffer.buffer,
- byteOffset: initBuffer.byteOffset,
- byteLength: initBuffer.byteLength
- }
- }));
- sourceBuffer.transmuxer_.onmessage(doneMessage);
- _qunit2['default'].equal(mp4Segments.length, 3, 'emitted the fragment');
- // contains init segment after audio track change
- _qunit2['default'].equal(mp4Segments[2][0], 0, 'fragment contains the correct first byte');
- _qunit2['default'].equal(mp4Segments[2][1], 1, 'fragment contains the correct second byte');
- _qunit2['default'].equal(mp4Segments[2][2], 6, 'fragment contains the correct third byte');
- _qunit2['default'].equal(mp4Segments[2][3], 7, 'fragment contains the correct fourth byte');
- });
- _qunit2['default'].test('handles empty codec string value', function () {
- var mediaSource = new _videoJs2['default'].MediaSource();
- var sourceBuffer = mediaSource.addSourceBuffer('video/mp2t; codecs=""');
- initializeNativeSourceBuffers(sourceBuffer);
- _qunit2['default'].ok(mediaSource.videoBuffer_, 'created a video buffer');
- _qunit2['default'].equal(mediaSource.videoBuffer_.type, 'video/mp4;codecs="avc1.4d400d"', 'video buffer has the default codec');
- _qunit2['default'].ok(mediaSource.audioBuffer_, 'created an audio buffer');
- _qunit2['default'].equal(mediaSource.audioBuffer_.type, 'audio/mp4;codecs="mp4a.40.2"', 'audio buffer has the default codec');
- _qunit2['default'].equal(mediaSource.sourceBuffers.length, 1, 'created one virtual buffer');
- _qunit2['default'].equal(mediaSource.sourceBuffers[0], sourceBuffer, 'returned the virtual buffer');
- });
- _qunit2['default'].test('can create an audio buffer by itself', function () {
- var mediaSource = new _videoJs2['default'].MediaSource();
- var sourceBuffer = mediaSource.addSourceBuffer('video/mp2t; codecs="mp4a.40.2"');
- initializeNativeSourceBuffers(sourceBuffer);
- _qunit2['default'].ok(!mediaSource.videoBuffer_, 'did not create a video buffer');
- _qunit2['default'].ok(mediaSource.audioBuffer_, 'created an audio buffer');
- _qunit2['default'].equal(mediaSource.audioBuffer_.type, 'audio/mp4;codecs="mp4a.40.2"', 'audio buffer has the default codec');
- _qunit2['default'].equal(mediaSource.sourceBuffers.length, 1, 'created one virtual buffer');
- _qunit2['default'].equal(mediaSource.sourceBuffers[0], sourceBuffer, 'returned the virtual buffer');
- });
- _qunit2['default'].test('can create an video buffer by itself', function () {
- var mediaSource = new _videoJs2['default'].MediaSource();
- var sourceBuffer = mediaSource.addSourceBuffer('video/mp2t; codecs="avc1.4d400d"');
- initializeNativeSourceBuffers(sourceBuffer);
- _qunit2['default'].ok(!mediaSource.audioBuffer_, 'did not create an audio buffer');
- _qunit2['default'].ok(mediaSource.videoBuffer_, 'created an video buffer');
- _qunit2['default'].equal(mediaSource.videoBuffer_.type, 'video/mp4;codecs="avc1.4d400d"', 'video buffer has the codec that was passed');
- _qunit2['default'].equal(mediaSource.sourceBuffers.length, 1, 'created one virtual buffer');
- _qunit2['default'].equal(mediaSource.sourceBuffers[0], sourceBuffer, 'returned the virtual buffer');
- });
- _qunit2['default'].test('handles invalid codec string', function () {
- var mediaSource = new _videoJs2['default'].MediaSource();
- var sourceBuffer = mediaSource.addSourceBuffer('video/mp2t; codecs="nope"');
- initializeNativeSourceBuffers(sourceBuffer);
- _qunit2['default'].ok(mediaSource.videoBuffer_, 'created a video buffer');
- _qunit2['default'].equal(mediaSource.videoBuffer_.type, 'video/mp4;codecs="avc1.4d400d"', 'video buffer has the default codec');
- _qunit2['default'].ok(mediaSource.audioBuffer_, 'created an audio buffer');
- _qunit2['default'].equal(mediaSource.audioBuffer_.type, 'audio/mp4;codecs="mp4a.40.2"', 'audio buffer has the default codec');
- _qunit2['default'].equal(mediaSource.sourceBuffers.length, 1, 'created one virtual buffer');
- _qunit2['default'].equal(mediaSource.sourceBuffers[0], sourceBuffer, 'returned the virtual buffer');
- });
- _qunit2['default'].test('handles codec strings in reverse order', function () {
- var mediaSource = new _videoJs2['default'].MediaSource();
- var sourceBuffer = mediaSource.addSourceBuffer('video/mp2t; codecs="mp4a.40.5,avc1.64001f"');
- initializeNativeSourceBuffers(sourceBuffer);
- _qunit2['default'].ok(mediaSource.videoBuffer_, 'created a video buffer');
- _qunit2['default'].equal(mediaSource.videoBuffer_.type, 'video/mp4;codecs="avc1.64001f"', 'video buffer has the passed codec');
- _qunit2['default'].ok(mediaSource.audioBuffer_, 'created an audio buffer');
- _qunit2['default'].equal(mediaSource.audioBuffer_.type, 'audio/mp4;codecs="mp4a.40.5"', 'audio buffer has the passed codec');
- _qunit2['default'].equal(mediaSource.sourceBuffers.length, 1, 'created one virtual buffer');
- _qunit2['default'].equal(mediaSource.sourceBuffers[0], sourceBuffer, 'returned the virtual buffer');
- _qunit2['default'].ok(sourceBuffer.transmuxer_, 'created a transmuxer');
- });
- _qunit2['default'].test('forwards codec strings to native buffers when specified', function () {
- var mediaSource = new _videoJs2['default'].MediaSource();
- var sourceBuffer = mediaSource.addSourceBuffer('video/mp2t; codecs="avc1.64001f,mp4a.40.5"');
- initializeNativeSourceBuffers(sourceBuffer);
- _qunit2['default'].ok(mediaSource.videoBuffer_, 'created a video buffer');
- _qunit2['default'].equal(mediaSource.videoBuffer_.type, 'video/mp4;codecs="avc1.64001f"', 'passed the video codec along');
- _qunit2['default'].ok(mediaSource.audioBuffer_, 'created a video buffer');
- _qunit2['default'].equal(mediaSource.audioBuffer_.type, 'audio/mp4;codecs="mp4a.40.5"', 'passed the audio codec along');
- });
- _qunit2['default'].test('parses old-school apple codec strings to the modern standard', function () {
- var mediaSource = new _videoJs2['default'].MediaSource();
- var sourceBuffer = mediaSource.addSourceBuffer('video/mp2t; codecs="avc1.100.31,mp4a.40.5"');
- initializeNativeSourceBuffers(sourceBuffer);
- _qunit2['default'].ok(mediaSource.videoBuffer_, 'created a video buffer');
- _qunit2['default'].equal(mediaSource.videoBuffer_.type, 'video/mp4;codecs="avc1.64001f"', 'passed the video codec along');
- _qunit2['default'].ok(mediaSource.audioBuffer_, 'created a video buffer');
- _qunit2['default'].equal(mediaSource.audioBuffer_.type, 'audio/mp4;codecs="mp4a.40.5"', 'passed the audio codec along');
- });
- _qunit2['default'].test('specifies reasonable codecs if none are specified', function () {
- var mediaSource = new _videoJs2['default'].MediaSource();
- var sourceBuffer = mediaSource.addSourceBuffer('video/mp2t');
- initializeNativeSourceBuffers(sourceBuffer);
- _qunit2['default'].ok(mediaSource.videoBuffer_, 'created a video buffer');
- _qunit2['default'].equal(mediaSource.videoBuffer_.type, 'video/mp4;codecs="avc1.4d400d"', 'passed the video codec along');
- _qunit2['default'].ok(mediaSource.audioBuffer_, 'created a video buffer');
- _qunit2['default'].equal(mediaSource.audioBuffer_.type, 'audio/mp4;codecs="mp4a.40.2"', 'passed the audio codec along');
- });
- _qunit2['default'].test('virtual buffers are updating if either native buffer is', function () {
- var mediaSource = new _videoJs2['default'].MediaSource();
- var sourceBuffer = mediaSource.addSourceBuffer('video/mp2t');
- initializeNativeSourceBuffers(sourceBuffer);
- mediaSource.videoBuffer_.updating = true;
- mediaSource.audioBuffer_.updating = false;
- _qunit2['default'].equal(sourceBuffer.updating, true, 'virtual buffer is updating');
- mediaSource.audioBuffer_.updating = true;
- _qunit2['default'].equal(sourceBuffer.updating, true, 'virtual buffer is updating');
- mediaSource.videoBuffer_.updating = false;
- _qunit2['default'].equal(sourceBuffer.updating, true, 'virtual buffer is updating');
- mediaSource.audioBuffer_.updating = false;
- _qunit2['default'].equal(sourceBuffer.updating, false, 'virtual buffer is not updating');
- });
- _qunit2['default'].test('virtual buffers have a position buffered if both native buffers do', function () {
- var mediaSource = new _videoJs2['default'].MediaSource();
- var sourceBuffer = mediaSource.addSourceBuffer('video/mp2t');
- initializeNativeSourceBuffers(sourceBuffer);
- mediaSource.videoBuffer_.buffered = _videoJs2['default'].createTimeRanges([[0, 10], [20, 30]]);
- mediaSource.audioBuffer_.buffered = _videoJs2['default'].createTimeRanges([[0, 7], [11, 15], [16, 40]]);
- _qunit2['default'].equal(sourceBuffer.buffered.length, 2, 'two buffered ranges');
- _qunit2['default'].equal(sourceBuffer.buffered.start(0), 0, 'first starts at zero');
- _qunit2['default'].equal(sourceBuffer.buffered.end(0), 7, 'first ends at seven');
- _qunit2['default'].equal(sourceBuffer.buffered.start(1), 20, 'second starts at twenty');
- _qunit2['default'].equal(sourceBuffer.buffered.end(1), 30, 'second ends at 30');
- });
- _qunit2['default'].test('disabled audio does not affect buffered property', function () {
- var mediaSource = new _videoJs2['default'].MediaSource();
- var muxedBuffer = mediaSource.addSourceBuffer('video/mp2t');
- // creating a separate audio buffer disables audio on the muxed one
- var audioBuffer = mediaSource.addSourceBuffer('audio/mp2t; codecs="mp4a.40.2"');
- initializeNativeSourceBuffers(muxedBuffer);
- mediaSource.videoBuffer_.buffered = _videoJs2['default'].createTimeRanges([[1, 10]]);
- mediaSource.audioBuffer_.buffered = _videoJs2['default'].createTimeRanges([[2, 11]]);
- _qunit2['default'].equal(audioBuffer.buffered.length, 1, 'one buffered range');
- _qunit2['default'].equal(audioBuffer.buffered.start(0), 2, 'starts at two');
- _qunit2['default'].equal(audioBuffer.buffered.end(0), 11, 'ends at eleven');
- _qunit2['default'].equal(muxedBuffer.buffered.length, 1, 'one buffered range');
- _qunit2['default'].equal(muxedBuffer.buffered.start(0), 1, 'starts at one');
- _qunit2['default'].equal(muxedBuffer.buffered.end(0), 10, 'ends at ten');
- });
- _qunit2['default'].test('sets transmuxer baseMediaDecodeTime on appends', function () {
- var mediaSource = new _videoJs2['default'].MediaSource();
- var sourceBuffer = mediaSource.addSourceBuffer('video/mp2t');
- var resets = [];
- sourceBuffer.transmuxer_.postMessage = function (message) {
- if (message.action === 'setTimestampOffset') {
- resets.push(message.timestampOffset);
- }
- };
- sourceBuffer.timestampOffset = 42;
- _qunit2['default'].equal(resets.length, 1, 'reset called');
- _qunit2['default'].equal(resets[0], 42, 'set the baseMediaDecodeTime based on timestampOffset');
- });
- _qunit2['default'].test('aggregates source buffer update events', function () {
- var mediaSource = new _videoJs2['default'].MediaSource();
- var sourceBuffer = mediaSource.addSourceBuffer('video/mp2t');
- var updates = 0;
- var updateends = 0;
- var updatestarts = 0;
- initializeNativeSourceBuffers(sourceBuffer);
- mediaSource.player_ = this.player;
- sourceBuffer.addEventListener('updatestart', function () {
- updatestarts++;
- });
- sourceBuffer.addEventListener('update', function () {
- updates++;
- });
- sourceBuffer.addEventListener('updateend', function () {
- updateends++;
- });
- _qunit2['default'].equal(updatestarts, 0, 'no updatestarts before a `done` message is received');
- _qunit2['default'].equal(updates, 0, 'no updates before a `done` message is received');
- _qunit2['default'].equal(updateends, 0, 'no updateends before a `done` message is received');
- // the video buffer begins updating first:
- sourceBuffer.videoBuffer_.updating = true;
- sourceBuffer.audioBuffer_.updating = false;
- sourceBuffer.videoBuffer_.trigger('updatestart');
- _qunit2['default'].equal(updatestarts, 1, 'aggregated updatestart');
- sourceBuffer.audioBuffer_.updating = true;
- sourceBuffer.audioBuffer_.trigger('updatestart');
- _qunit2['default'].equal(updatestarts, 1, 'aggregated updatestart');
- // the audio buffer finishes first:
- sourceBuffer.audioBuffer_.updating = false;
- sourceBuffer.videoBuffer_.updating = true;
- sourceBuffer.audioBuffer_.trigger('update');
- _qunit2['default'].equal(updates, 0, 'waited for the second update');
- sourceBuffer.videoBuffer_.updating = false;
- sourceBuffer.videoBuffer_.trigger('update');
- _qunit2['default'].equal(updates, 1, 'aggregated update');
- // audio finishes first:
- sourceBuffer.videoBuffer_.updating = true;
- sourceBuffer.audioBuffer_.updating = false;
- sourceBuffer.audioBuffer_.trigger('updateend');
- _qunit2['default'].equal(updateends, 0, 'waited for the second updateend');
- sourceBuffer.videoBuffer_.updating = false;
- sourceBuffer.videoBuffer_.trigger('updateend');
- _qunit2['default'].equal(updateends, 1, 'aggregated updateend');
- });
- _qunit2['default'].test('translates caption events into WebVTT cues', function () {
- var mediaSource = new _videoJs2['default'].MediaSource();
- var sourceBuffer = mediaSource.addSourceBuffer('video/mp2t');
- var types = [];
- var hls608 = 0;
- mediaSource.player_ = {
- addRemoteTextTrack: function addRemoteTextTrack(options) {
- types.push(options.kind);
- return {
- track: {
- kind: options.kind,
- label: options.label,
- cues: [],
- addCue: function addCue(cue) {
- this.cues.push(cue);
- }
- }
- };
- },
- textTracks: function textTracks() {
- return {
- getTrackById: function getTrackById() {}
- };
- },
- remoteTextTracks: function remoteTextTracks() {},
- tech_: new _videoJs2['default'].EventTarget()
- };
- mediaSource.player_.tech_.on('usage', function (event) {
- if (event.name === 'hls-608') {
- hls608++;
- }
- });
- sourceBuffer.timestampOffset = 10;
- sourceBuffer.transmuxer_.onmessage(createDataMessage('video', new Uint8Array(1), {
- captions: [{
- startTime: 1,
- endTime: 3,
- text: 'This is an in-band caption in CC1',
- stream: 'CC1'
- }],
- captionStreams: { CC1: true }
- }));
- sourceBuffer.transmuxer_.onmessage(doneMessage);
- var cues = sourceBuffer.inbandTextTracks_.CC1.cues;
- _qunit2['default'].equal(hls608, 1, 'one hls-608 event was triggered');
- _qunit2['default'].equal(types.length, 1, 'created one text track');
- _qunit2['default'].equal(types[0], 'captions', 'the type was captions');
- _qunit2['default'].equal(cues.length, 1, 'created one cue');
- _qunit2['default'].equal(cues[0].text, 'This is an in-band caption in CC1', 'included the text');
- _qunit2['default'].equal(cues[0].startTime, 11, 'started at eleven');
- _qunit2['default'].equal(cues[0].endTime, 13, 'ended at thirteen');
- });
- _qunit2['default'].test('captions use existing tracks with id equal to CC#', function () {
- var mediaSource = new _videoJs2['default'].MediaSource();
- var sourceBuffer = mediaSource.addSourceBuffer('video/mp2t');
- var addTrackCalled = 0;
- var tracks = {
- CC1: {
- kind: 'captions',
- label: 'CC1',
- id: 'CC1',
- cues: [],
- addCue: function addCue(cue) {
- this.cues.push(cue);
- }
- },
- CC2: {
- kind: 'captions',
- label: 'CC2',
- id: 'CC2',
- cues: [],
- addCue: function addCue(cue) {
- this.cues.push(cue);
- }
- }
- };
- mediaSource.player_ = {
- addRemoteTextTrack: function addRemoteTextTrack(options) {
- addTrackCalled++;
- },
- textTracks: function textTracks() {
- return {
- getTrackById: function getTrackById(id) {
- return tracks[id];
- }
- };
- },
- remoteTextTracks: function remoteTextTracks() {},
- tech_: new _videoJs2['default'].EventTarget()
- };
- sourceBuffer.timestampOffset = 10;
- sourceBuffer.transmuxer_.onmessage(createDataMessage('video', new Uint8Array(1), {
- captions: [{
- stream: 'CC1',
- startTime: 1,
- endTime: 3,
- text: 'This is an in-band caption in CC1'
- }, {
- stream: 'CC2',
- startTime: 1,
- endTime: 3,
- text: 'This is an in-band caption in CC2'
- }],
- captionStreams: { CC1: true, CC2: true }
- }));
- sourceBuffer.transmuxer_.onmessage(doneMessage);
- var cues = sourceBuffer.inbandTextTracks_.CC1.cues;
- _qunit2['default'].equal(addTrackCalled, 0, 'no tracks were created');
- _qunit2['default'].equal(tracks.CC1.cues.length, 1, 'CC1 contains 1 cue');
- _qunit2['default'].equal(tracks.CC2.cues.length, 1, 'CC2 contains 1 cue');
- _qunit2['default'].equal(tracks.CC1.cues[0].text, 'This is an in-band caption in CC1', 'CC1 contains the right cue');
- _qunit2['default'].equal(tracks.CC2.cues[0].text, 'This is an in-band caption in CC2', 'CC2 contains the right cue');
- });
- _qunit2['default'].test('translates metadata events into WebVTT cues', function () {
- var mediaSource = new _videoJs2['default'].MediaSource();
- var sourceBuffer = mediaSource.addSourceBuffer('video/mp2t');
- mediaSource.duration = Infinity;
- mediaSource.nativeMediaSource_.duration = 60;
- var types = [];
- var metadata = [{
- cueTime: 2,
- frames: [{
- url: 'This is a url tag'
- }, {
- value: 'This is a text tag'
- }]
- }, {
- cueTime: 12,
- frames: [{
- data: 'This is a priv tag'
- }]
- }];
- metadata.dispatchType = 0x10;
- mediaSource.player_ = {
- addRemoteTextTrack: function addRemoteTextTrack(options) {
- types.push(options.kind);
- return {
- track: {
- kind: options.kind,
- label: options.label,
- cues: [],
- addCue: function addCue(cue) {
- this.cues.push(cue);
- }
- }
- };
- },
- remoteTextTracks: function remoteTextTracks() {}
- };
- sourceBuffer.timestampOffset = 10;
- sourceBuffer.transmuxer_.onmessage(createDataMessage('video', new Uint8Array(1), {
- metadata: metadata
- }));
- sourceBuffer.transmuxer_.onmessage(doneMessage);
- _qunit2['default'].equal(sourceBuffer.metadataTrack_.inBandMetadataTrackDispatchType, 16, 'in-band metadata track dispatch type correctly set');
- var cues = sourceBuffer.metadataTrack_.cues;
- _qunit2['default'].equal(types.length, 1, 'created one text track');
- _qunit2['default'].equal(types[0], 'metadata', 'the type was metadata');
- _qunit2['default'].equal(cues.length, 3, 'created three cues');
- _qunit2['default'].equal(cues[0].text, 'This is a url tag', 'included the text');
- _qunit2['default'].equal(cues[0].startTime, 12, 'started at twelve');
- _qunit2['default'].equal(cues[0].endTime, 22, 'ended at StartTime of next cue(22)');
- _qunit2['default'].equal(cues[1].text, 'This is a text tag', 'included the text');
- _qunit2['default'].equal(cues[1].startTime, 12, 'started at twelve');
- _qunit2['default'].equal(cues[1].endTime, 22, 'ended at the startTime of next cue(22)');
- _qunit2['default'].equal(cues[2].text, 'This is a priv tag', 'included the text');
- _qunit2['default'].equal(cues[2].startTime, 22, 'started at twenty two');
- _qunit2['default'].equal(cues[2].endTime, Number.MAX_VALUE, 'ended at the maximum value');
- mediaSource.duration = 100;
- mediaSource.trigger('sourceended');
- _qunit2['default'].equal(cues[2].endTime, mediaSource.duration, 'sourceended is fired');
- });
- _qunit2['default'].test('does not wrap mp4 source buffers', function () {
- var mediaSource = new _videoJs2['default'].MediaSource();
- mediaSource.addSourceBuffer('video/mp4;codecs=avc1.4d400d');
- mediaSource.addSourceBuffer('audio/mp4;codecs=mp4a.40.2');
- _qunit2['default'].equal(mediaSource.sourceBuffers.length, mediaSource.nativeMediaSource_.sourceBuffers.length, 'did not need virtual buffers');
- _qunit2['default'].equal(mediaSource.sourceBuffers.length, 2, 'created native buffers');
- });
- _qunit2['default'].test('can get activeSourceBuffers', function () {
- var mediaSource = new _videoJs2['default'].MediaSource();
- // although activeSourceBuffers should technically be a SourceBufferList, we are
- // returning it as an array, and users may expect it to behave as such
- _qunit2['default'].ok(Array.isArray(mediaSource.activeSourceBuffers));
- });
- _qunit2['default'].test('active source buffers are updated on each buffer\'s updateend', function () {
- var mediaSource = new _videoJs2['default'].MediaSource();
- var updateCallCount = 0;
- var sourceBuffer = undefined;
- mediaSource.updateActiveSourceBuffers_ = function () {
- updateCallCount++;
- };
- sourceBuffer = mediaSource.addSourceBuffer('video/mp2t');
- mediaSource.player_ = this.player;
- mediaSource.url_ = this.url;
- mediaSource.trigger('sourceopen');
- _qunit2['default'].equal(updateCallCount, 0, 'active source buffers not updated on adding source buffer');
- mediaSource.player_.audioTracks().trigger('addtrack');
- _qunit2['default'].equal(updateCallCount, 1, 'active source buffers updated after addtrack');
- sourceBuffer = mediaSource.addSourceBuffer('video/mp2t');
- _qunit2['default'].equal(updateCallCount, 1, 'active source buffers not updated on adding second source buffer');
- mediaSource.player_.audioTracks().trigger('removetrack');
- _qunit2['default'].equal(updateCallCount, 2, 'active source buffers updated after removetrack');
- mediaSource.player_.audioTracks().trigger('change');
- _qunit2['default'].equal(updateCallCount, 3, 'active source buffers updated after change');
- });
- _qunit2['default'].test('combined buffer is the only active buffer when main track enabled', function () {
- var mediaSource = new _videoJs2['default'].MediaSource();
- var sourceBufferAudio = undefined;
- var sourceBufferCombined = undefined;
- var audioTracks = [{
- enabled: true,
- kind: 'main',
- label: 'main'
- }, {
- enabled: false,
- kind: 'alternative',
- label: 'English (UK)'
- }];
- this.player.audioTracks = function () {
- return audioTracks;
- };
- mediaSource.player_ = this.player;
- sourceBufferCombined = mediaSource.addSourceBuffer('video/m2pt');
- sourceBufferCombined.videoCodec_ = true;
- sourceBufferCombined.audioCodec_ = true;
- sourceBufferAudio = mediaSource.addSourceBuffer('video/m2pt');
- sourceBufferAudio.videoCodec_ = false;
- sourceBufferAudio.audioCodec_ = true;
- mediaSource.updateActiveSourceBuffers_();
- _qunit2['default'].equal(mediaSource.activeSourceBuffers.length, 1, 'active source buffers starts with one source buffer');
- _qunit2['default'].equal(mediaSource.activeSourceBuffers[0], sourceBufferCombined, 'active source buffers starts with combined source buffer');
- });
- _qunit2['default'].test('combined & audio buffers are active when alternative track enabled', function () {
- var mediaSource = new _videoJs2['default'].MediaSource();
- var sourceBufferAudio = undefined;
- var sourceBufferCombined = undefined;
- var audioTracks = [{
- enabled: false,
- kind: 'main',
- label: 'main'
- }, {
- enabled: true,
- kind: 'alternative',
- label: 'English (UK)'
- }];
- this.player.audioTracks = function () {
- return audioTracks;
- };
- mediaSource.player_ = this.player;
- sourceBufferCombined = mediaSource.addSourceBuffer('video/m2pt');
- sourceBufferCombined.videoCodec_ = true;
- sourceBufferCombined.audioCodec_ = true;
- sourceBufferAudio = mediaSource.addSourceBuffer('video/m2pt');
- sourceBufferAudio.videoCodec_ = false;
- sourceBufferAudio.audioCodec_ = true;
- mediaSource.updateActiveSourceBuffers_();
- _qunit2['default'].equal(mediaSource.activeSourceBuffers.length, 2, 'active source buffers includes both source buffers');
- // maintains same order as source buffers were created
- _qunit2['default'].equal(mediaSource.activeSourceBuffers[0], sourceBufferCombined, 'active source buffers starts with combined source buffer');
- _qunit2['default'].equal(mediaSource.activeSourceBuffers[1], sourceBufferAudio, 'active source buffers ends with audio source buffer');
- });
- _qunit2['default'].test('video only & audio only buffers are always active', function () {
- var mediaSource = new _videoJs2['default'].MediaSource();
- var sourceBufferAudio = undefined;
- var sourceBufferCombined = undefined;
- var audioTracks = [{
- enabled: false,
- kind: 'main',
- label: 'main'
- }, {
- enabled: true,
- kind: 'alternative',
- label: 'English (UK)'
- }];
- this.player.audioTracks = function () {
- return audioTracks;
- };
- mediaSource.player_ = this.player;
- sourceBufferCombined = mediaSource.addSourceBuffer('video/m2pt');
- sourceBufferCombined.videoCodec_ = true;
- sourceBufferCombined.audioCodec_ = false;
- sourceBufferAudio = mediaSource.addSourceBuffer('video/m2pt');
- sourceBufferAudio.videoCodec_ = false;
- sourceBufferAudio.audioCodec_ = true;
- mediaSource.updateActiveSourceBuffers_();
- _qunit2['default'].equal(mediaSource.activeSourceBuffers.length, 2, 'active source buffers includes both source buffers');
- // maintains same order as source buffers were created
- _qunit2['default'].equal(mediaSource.activeSourceBuffers[0], sourceBufferCombined, 'active source buffers starts with combined source buffer');
- _qunit2['default'].equal(mediaSource.activeSourceBuffers[1], sourceBufferAudio, 'active source buffers ends with audio source buffer');
- audioTracks[0].enabled = true;
- audioTracks[1].enabled = false;
- mediaSource.updateActiveSourceBuffers_();
- _qunit2['default'].equal(mediaSource.activeSourceBuffers.length, 2, 'active source buffers includes both source buffers');
- // maintains same order as source buffers were created
- _qunit2['default'].equal(mediaSource.activeSourceBuffers[0], sourceBufferCombined, 'active source buffers starts with combined source buffer');
- _qunit2['default'].equal(mediaSource.activeSourceBuffers[1], sourceBufferAudio, 'active source buffers ends with audio source buffer');
- });
- _qunit2['default'].test('Single buffer always active. Audio disabled depends on audio codec', function () {
- var mediaSource = new _videoJs2['default'].MediaSource();
- var audioTracks = [{
- enabled: true,
- kind: 'main',
- label: 'main'
- }];
- this.player.audioTracks = function () {
- return audioTracks;
- };
- mediaSource.player_ = this.player;
- var sourceBuffer = mediaSource.addSourceBuffer('video/m2pt');
- // video only
- sourceBuffer.videoCodec_ = true;
- sourceBuffer.audioCodec_ = false;
- mediaSource.updateActiveSourceBuffers_();
- _qunit2['default'].equal(mediaSource.activeSourceBuffers.length, 1, 'sourceBuffer is active');
- _qunit2['default'].ok(mediaSource.activeSourceBuffers[0].audioDisabled_, 'audio is disabled on video only active sourceBuffer');
- // audio only
- sourceBuffer.videoCodec_ = false;
- sourceBuffer.audioCodec_ = true;
- mediaSource.updateActiveSourceBuffers_();
- _qunit2['default'].equal(mediaSource.activeSourceBuffers.length, 1, 'sourceBuffer is active');
- _qunit2['default'].notOk(mediaSource.activeSourceBuffers[0].audioDisabled_, 'audio not disabled on audio only active sourceBuffer');
- });
- _qunit2['default'].test('video segments with info trigger videooinfo event', function () {
- var data = new Uint8Array(1);
- var infoEvents = [];
- var mediaSource = new _videoJs2['default'].MediaSource();
- var sourceBuffer = mediaSource.addSourceBuffer('video/mp2t');
- var info = { width: 100 };
- var newinfo = { width: 225 };
- mediaSource.on('videoinfo', function (e) {
- return infoEvents.push(e);
- });
- // send an audio segment with info, then send done
- sourceBuffer.transmuxer_.onmessage(createDataMessage('video', data, { info: info }));
- sourceBuffer.transmuxer_.onmessage(doneMessage);
- _qunit2['default'].equal(infoEvents.length, 1, 'video info should trigger');
- _qunit2['default'].deepEqual(infoEvents[0].info, info, 'video info = muxed info');
- // send an audio segment with info, then send done
- sourceBuffer.transmuxer_.onmessage(createDataMessage('video', data, { info: newinfo }));
- sourceBuffer.transmuxer_.onmessage(doneMessage);
- _qunit2['default'].equal(infoEvents.length, 2, 'video info should trigger');
- _qunit2['default'].deepEqual(infoEvents[1].info, newinfo, 'video info = muxed info');
- });
- _qunit2['default'].test('audio segments with info trigger audioinfo event', function () {
- var data = new Uint8Array(1);
- var infoEvents = [];
- var mediaSource = new _videoJs2['default'].MediaSource();
- var sourceBuffer = mediaSource.addSourceBuffer('video/mp2t');
- var info = { width: 100 };
- var newinfo = { width: 225 };
- mediaSource.on('audioinfo', function (e) {
- return infoEvents.push(e);
- });
- // send an audio segment with info, then send done
- sourceBuffer.transmuxer_.onmessage(createDataMessage('audio', data, { info: info }));
- sourceBuffer.transmuxer_.onmessage(doneMessage);
- _qunit2['default'].equal(infoEvents.length, 1, 'audio info should trigger');
- _qunit2['default'].deepEqual(infoEvents[0].info, info, 'audio info = muxed info');
- // send an audio segment with info, then send done
- sourceBuffer.transmuxer_.onmessage(createDataMessage('audio', data, { info: newinfo }));
- sourceBuffer.transmuxer_.onmessage(doneMessage);
- _qunit2['default'].equal(infoEvents.length, 2, 'audio info should trigger');
- _qunit2['default'].deepEqual(infoEvents[1].info, newinfo, 'audio info = muxed info');
- });
- _qunit2['default'].test('creates native SourceBuffers immediately if a second ' + 'VirtualSourceBuffer is created', function () {
- var mediaSource = new _videoJs2['default'].MediaSource();
- var sourceBuffer = mediaSource.addSourceBuffer('video/mp2t; codecs="avc1.64001f,mp4a.40.5"');
- var sourceBuffer2 = mediaSource.addSourceBuffer('video/mp2t; codecs="mp4a.40.5"');
- _qunit2['default'].ok(mediaSource.videoBuffer_, 'created a video buffer');
- _qunit2['default'].equal(mediaSource.videoBuffer_.type, 'video/mp4;codecs="avc1.64001f"', 'video buffer has the specified codec');
- _qunit2['default'].ok(mediaSource.audioBuffer_, 'created an audio buffer');
- _qunit2['default'].equal(mediaSource.audioBuffer_.type, 'audio/mp4;codecs="mp4a.40.5"', 'audio buffer has the specified codec');
- _qunit2['default'].equal(mediaSource.sourceBuffers.length, 2, 'created two virtual buffers');
- _qunit2['default'].equal(mediaSource.sourceBuffers[0], sourceBuffer, 'returned the virtual buffer');
- _qunit2['default'].equal(mediaSource.sourceBuffers[1], sourceBuffer2, 'returned the virtual buffer');
- _qunit2['default'].equal(sourceBuffer.audioDisabled_, true, 'first source buffer\'s audio is automatically disabled');
- _qunit2['default'].ok(sourceBuffer2.audioBuffer_, 'second source buffer has an audio source buffer');
- });
- _qunit2['default'].module('VirtualSourceBuffer - Isolated Functions');
- _qunit2['default'].test('gopsSafeToAlignWith returns correct list', function () {
- // gopsSafeToAlignWith uses a 3 second safetyNet so that gops very close to the playhead
- // are not considered safe to append to
- var safetyNet = 3;
- var pts = function pts(time) {
- return Math.ceil(time * 90000);
- };
- var mapping = 0;
- var _currentTime = 0;
- var buffer = [];
- var player = undefined;
- var actual = undefined;
- var expected = undefined;
- expected = [];
- actual = (0, _srcVirtualSourceBuffer.gopsSafeToAlignWith)(buffer, player, mapping);
- _qunit2['default'].deepEqual(actual, expected, 'empty array when player is undefined');
- player = { currentTime: function currentTime() {
- return _currentTime;
- } };
- actual = (0, _srcVirtualSourceBuffer.gopsSafeToAlignWith)(buffer, player, mapping);
- _qunit2['default'].deepEqual(actual, expected, 'empty array when buffer is empty');
- buffer = expected = [{ pts: pts(_currentTime + safetyNet + 1) }, { pts: pts(_currentTime + safetyNet + 2) }, { pts: pts(_currentTime + safetyNet + 3) }];
- actual = (0, _srcVirtualSourceBuffer.gopsSafeToAlignWith)(buffer, player, mapping);
- _qunit2['default'].deepEqual(actual, expected, 'entire buffer considered safe when all gops come after currentTime + safetyNet');
- buffer = [{ pts: pts(_currentTime + safetyNet) }, { pts: pts(_currentTime + safetyNet + 1) }, { pts: pts(_currentTime + safetyNet + 2) }];
- expected = [{ pts: pts(_currentTime + safetyNet + 1) }, { pts: pts(_currentTime + safetyNet + 2) }];
- actual = (0, _srcVirtualSourceBuffer.gopsSafeToAlignWith)(buffer, player, mapping);
- _qunit2['default'].deepEqual(actual, expected, 'safetyNet comparison is not inclusive');
- _currentTime = 10;
- mapping = -5;
- buffer = [{ pts: pts(_currentTime - mapping + safetyNet - 2) }, { pts: pts(_currentTime - mapping + safetyNet - 1) }, { pts: pts(_currentTime - mapping + safetyNet) }, { pts: pts(_currentTime - mapping + safetyNet + 1) }, { pts: pts(_currentTime - mapping + safetyNet + 2) }];
- expected = [{ pts: pts(_currentTime - mapping + safetyNet + 1) }, { pts: pts(_currentTime - mapping + safetyNet + 2) }];
- actual = (0, _srcVirtualSourceBuffer.gopsSafeToAlignWith)(buffer, player, mapping);
- _qunit2['default'].deepEqual(actual, expected, 'uses mapping to shift currentTime');
- _currentTime = 20;
- expected = [];
- actual = (0, _srcVirtualSourceBuffer.gopsSafeToAlignWith)(buffer, player, mapping);
- _qunit2['default'].deepEqual(actual, expected, 'empty array when no gops in buffer come after currentTime');
- });
- _qunit2['default'].test('updateGopBuffer correctly processes new gop information', function () {
- var buffer = [];
- var gops = [];
- var replace = true;
- var actual = undefined;
- var expected = undefined;
- buffer = expected = [{ pts: 100 }, { pts: 200 }];
- actual = (0, _srcVirtualSourceBuffer.updateGopBuffer)(buffer, gops, replace);
- _qunit2['default'].deepEqual(actual, expected, 'returns buffer when no new gops');
- gops = expected = [{ pts: 300 }, { pts: 400 }];
- actual = (0, _srcVirtualSourceBuffer.updateGopBuffer)(buffer, gops, replace);
- _qunit2['default'].deepEqual(actual, expected, 'returns only new gops when replace is true');
- replace = false;
- buffer = [];
- gops = [{ pts: 100 }];
- expected = [{ pts: 100 }];
- actual = (0, _srcVirtualSourceBuffer.updateGopBuffer)(buffer, gops, replace);
- _qunit2['default'].deepEqual(actual, expected, 'appends new gops to empty buffer');
- buffer = [{ pts: 100 }, { pts: 200 }];
- gops = [{ pts: 300 }, { pts: 400 }];
- expected = [{ pts: 100 }, { pts: 200 }, { pts: 300 }, { pts: 400 }];
- actual = (0, _srcVirtualSourceBuffer.updateGopBuffer)(buffer, gops, replace);
- _qunit2['default'].deepEqual(actual, expected, 'appends new gops at end of buffer when no overlap');
- buffer = [{ pts: 100 }, { pts: 200 }, { pts: 300 }, { pts: 400 }];
- gops = [{ pts: 250 }, { pts: 300 }, { pts: 350 }];
- expected = [{ pts: 100 }, { pts: 200 }, { pts: 250 }, { pts: 300 }, { pts: 350 }];
- actual = (0, _srcVirtualSourceBuffer.updateGopBuffer)(buffer, gops, replace);
- _qunit2['default'].deepEqual(actual, expected, 'slices buffer at point of overlap and appends new gops');
- buffer = [{ pts: 100 }, { pts: 200 }, { pts: 300 }, { pts: 400 }];
- gops = [{ pts: 200 }, { pts: 300 }, { pts: 350 }];
- expected = [{ pts: 100 }, { pts: 200 }, { pts: 300 }, { pts: 350 }];
- actual = (0, _srcVirtualSourceBuffer.updateGopBuffer)(buffer, gops, replace);
- _qunit2['default'].deepEqual(actual, expected, 'overlap slice is inclusive');
- buffer = [{ pts: 300 }, { pts: 400 }, { pts: 500 }, { pts: 600 }];
- gops = [{ pts: 100 }, { pts: 200 }, { pts: 250 }];
- expected = [{ pts: 100 }, { pts: 200 }, { pts: 250 }];
- actual = (0, _srcVirtualSourceBuffer.updateGopBuffer)(buffer, gops, replace);
- _qunit2['default'].deepEqual(actual, expected, 'completely replaces buffer with new gops when all gops come before buffer');
- });
- _qunit2['default'].test('removeGopBuffer correctly removes range from buffer', function () {
- var pts = function pts(time) {
- return Math.ceil(time * 90000);
- };
- var buffer = [];
- var start = 0;
- var end = 0;
- var mapping = -5;
- var actual = undefined;
- var expected = undefined;
- expected = [];
- actual = (0, _srcVirtualSourceBuffer.removeGopBuffer)(buffer, start, end, mapping);
- _qunit2['default'].deepEqual(actual, expected, 'returns empty array when buffer empty');
- start = 0;
- end = 8;
- buffer = expected = [{ pts: pts(10 - mapping) }, { pts: pts(11 - mapping) }, { pts: pts(12 - mapping) }, { pts: pts(15 - mapping) }, { pts: pts(18 - mapping) }, { pts: pts(20 - mapping) }];
- actual = (0, _srcVirtualSourceBuffer.removeGopBuffer)(buffer, start, end, mapping);
- _qunit2['default'].deepEqual(actual, expected, 'no removal when remove range comes before start of buffer');
- start = 22;
- end = 30;
- buffer = [{ pts: pts(10 - mapping) }, { pts: pts(11 - mapping) }, { pts: pts(12 - mapping) }, { pts: pts(15 - mapping) }, { pts: pts(18 - mapping) }, { pts: pts(20 - mapping) }];
- expected = [{ pts: pts(10 - mapping) }, { pts: pts(11 - mapping) }, { pts: pts(12 - mapping) }, { pts: pts(15 - mapping) }, { pts: pts(18 - mapping) }];
- actual = (0, _srcVirtualSourceBuffer.removeGopBuffer)(buffer, start, end, mapping);
- _qunit2['default'].deepEqual(actual, expected, 'removes last gop when remove range is after end of buffer');
- start = 0;
- end = 10;
- buffer = [{ pts: pts(10 - mapping) }, { pts: pts(11 - mapping) }, { pts: pts(12 - mapping) }, { pts: pts(15 - mapping) }, { pts: pts(18 - mapping) }, { pts: pts(20 - mapping) }];
- expected = [{ pts: pts(11 - mapping) }, { pts: pts(12 - mapping) }, { pts: pts(15 - mapping) }, { pts: pts(18 - mapping) }, { pts: pts(20 - mapping) }];
- actual = (0, _srcVirtualSourceBuffer.removeGopBuffer)(buffer, start, end, mapping);
- _qunit2['default'].deepEqual(actual, expected, 'clamps start range to begining of buffer');
- start = 0;
- end = 12;
- buffer = [{ pts: pts(10 - mapping) }, { pts: pts(11 - mapping) }, { pts: pts(12 - mapping) }, { pts: pts(15 - mapping) }, { pts: pts(18 - mapping) }, { pts: pts(20 - mapping) }];
- expected = [{ pts: pts(15 - mapping) }, { pts: pts(18 - mapping) }, { pts: pts(20 - mapping) }];
- actual = (0, _srcVirtualSourceBuffer.removeGopBuffer)(buffer, start, end, mapping);
- _qunit2['default'].deepEqual(actual, expected, 'clamps start range to begining of buffer');
- start = 0;
- end = 14;
- buffer = [{ pts: pts(10 - mapping) }, { pts: pts(11 - mapping) }, { pts: pts(12 - mapping) }, { pts: pts(15 - mapping) }, { pts: pts(18 - mapping) }, { pts: pts(20 - mapping) }];
- expected = [{ pts: pts(15 - mapping) }, { pts: pts(18 - mapping) }, { pts: pts(20 - mapping) }];
- actual = (0, _srcVirtualSourceBuffer.removeGopBuffer)(buffer, start, end, mapping);
- _qunit2['default'].deepEqual(actual, expected, 'clamps start range to begining of buffer');
- start = 15;
- end = 30;
- buffer = [{ pts: pts(10 - mapping) }, { pts: pts(11 - mapping) }, { pts: pts(12 - mapping) }, { pts: pts(15 - mapping) }, { pts: pts(18 - mapping) }, { pts: pts(20 - mapping) }];
- expected = [{ pts: pts(10 - mapping) }, { pts: pts(11 - mapping) }, { pts: pts(12 - mapping) }];
- actual = (0, _srcVirtualSourceBuffer.removeGopBuffer)(buffer, start, end, mapping);
- _qunit2['default'].deepEqual(actual, expected, 'clamps end range to end of buffer');
- start = 17;
- end = 30;
- buffer = [{ pts: pts(10 - mapping) }, { pts: pts(11 - mapping) }, { pts: pts(12 - mapping) }, { pts: pts(15 - mapping) }, { pts: pts(18 - mapping) }, { pts: pts(20 - mapping) }];
- expected = [{ pts: pts(10 - mapping) }, { pts: pts(11 - mapping) }, { pts: pts(12 - mapping) }];
- actual = (0, _srcVirtualSourceBuffer.removeGopBuffer)(buffer, start, end, mapping);
- _qunit2['default'].deepEqual(actual, expected, 'clamps end range to end of buffer');
- start = 20;
- end = 30;
- buffer = [{ pts: pts(10 - mapping) }, { pts: pts(11 - mapping) }, { pts: pts(12 - mapping) }, { pts: pts(15 - mapping) }, { pts: pts(18 - mapping) }, { pts: pts(20 - mapping) }];
- expected = [{ pts: pts(10 - mapping) }, { pts: pts(11 - mapping) }, { pts: pts(12 - mapping) }, { pts: pts(15 - mapping) }, { pts: pts(18 - mapping) }];
- actual = (0, _srcVirtualSourceBuffer.removeGopBuffer)(buffer, start, end, mapping);
- _qunit2['default'].deepEqual(actual, expected, 'clamps end range to end of buffer');
- buffer = [{ pts: pts(10 - mapping) }, { pts: pts(11 - mapping) }, { pts: pts(12 - mapping) }, { pts: pts(15 - mapping) }, { pts: pts(18 - mapping) }, { pts: pts(20 - mapping) }];
- start = 12;
- end = 15;
- expected = [{ pts: pts(10 - mapping) }, { pts: pts(11 - mapping) }, { pts: pts(18 - mapping) }, { pts: pts(20 - mapping) }];
- actual = (0, _srcVirtualSourceBuffer.removeGopBuffer)(buffer, start, end, mapping);
- _qunit2['default'].deepEqual(actual, expected, 'removes gops that remove range intersects with');
- buffer = [{ pts: pts(10 - mapping) }, { pts: pts(11 - mapping) }, { pts: pts(12 - mapping) }, { pts: pts(15 - mapping) }, { pts: pts(18 - mapping) }, { pts: pts(20 - mapping) }];
- start = 12;
- end = 14;
- expected = [{ pts: pts(10 - mapping) }, { pts: pts(11 - mapping) }, { pts: pts(15 - mapping) }, { pts: pts(18 - mapping) }, { pts: pts(20 - mapping) }];
- actual = (0, _srcVirtualSourceBuffer.removeGopBuffer)(buffer, start, end, mapping);
- _qunit2['default'].deepEqual(actual, expected, 'removes gops that remove range intersects with');
- buffer = [{ pts: pts(10 - mapping) }, { pts: pts(11 - mapping) }, { pts: pts(12 - mapping) }, { pts: pts(15 - mapping) }, { pts: pts(18 - mapping) }, { pts: pts(20 - mapping) }];
- start = 13;
- end = 14;
- expected = [{ pts: pts(10 - mapping) }, { pts: pts(11 - mapping) }, { pts: pts(15 - mapping) }, { pts: pts(18 - mapping) }, { pts: pts(20 - mapping) }];
- actual = (0, _srcVirtualSourceBuffer.removeGopBuffer)(buffer, start, end, mapping);
- _qunit2['default'].deepEqual(actual, expected, 'removes gops that remove range intersects with');
- buffer = [{ pts: pts(10 - mapping) }, { pts: pts(11 - mapping) }, { pts: pts(12 - mapping) }, { pts: pts(15 - mapping) }, { pts: pts(18 - mapping) }, { pts: pts(20 - mapping) }];
- start = 13;
- end = 15;
- expected = [{ pts: pts(10 - mapping) }, { pts: pts(11 - mapping) }, { pts: pts(18 - mapping) }, { pts: pts(20 - mapping) }];
- actual = (0, _srcVirtualSourceBuffer.removeGopBuffer)(buffer, start, end, mapping);
- _qunit2['default'].deepEqual(actual, expected, 'removes gops that remove range intersects with');
- buffer = [{ pts: pts(10 - mapping) }, { pts: pts(11 - mapping) }, { pts: pts(12 - mapping) }, { pts: pts(15 - mapping) }, { pts: pts(18 - mapping) }, { pts: pts(20 - mapping) }];
- start = 12;
- end = 17;
- expected = [{ pts: pts(10 - mapping) }, { pts: pts(11 - mapping) }, { pts: pts(18 - mapping) }, { pts: pts(20 - mapping) }];
- actual = (0, _srcVirtualSourceBuffer.removeGopBuffer)(buffer, start, end, mapping);
- _qunit2['default'].deepEqual(actual, expected, 'removes gops that remove range intersects with');
- buffer = [{ pts: pts(10 - mapping) }, { pts: pts(11 - mapping) }, { pts: pts(12 - mapping) }, { pts: pts(15 - mapping) }, { pts: pts(18 - mapping) }, { pts: pts(20 - mapping) }];
- start = 13;
- end = 16;
- expected = [{ pts: pts(10 - mapping) }, { pts: pts(11 - mapping) }, { pts: pts(18 - mapping) }, { pts: pts(20 - mapping) }];
- actual = (0, _srcVirtualSourceBuffer.removeGopBuffer)(buffer, start, end, mapping);
- _qunit2['default'].deepEqual(actual, expected, 'removes gops that remove range intersects with');
- start = 10;
- end = 20;
- buffer = [{ pts: pts(10 - mapping) }, { pts: pts(11 - mapping) }, { pts: pts(12 - mapping) }, { pts: pts(15 - mapping) }, { pts: pts(18 - mapping) }, { pts: pts(20 - mapping) }];
- expected = [];
- actual = (0, _srcVirtualSourceBuffer.removeGopBuffer)(buffer, start, end, mapping);
- _qunit2['default'].deepEqual(actual, expected, 'removes entire buffer when buffer inside remove range');
- start = 0;
- end = 30;
- buffer = [{ pts: pts(10 - mapping) }, { pts: pts(11 - mapping) }, { pts: pts(12 - mapping) }, { pts: pts(15 - mapping) }, { pts: pts(18 - mapping) }, { pts: pts(20 - mapping) }];
- expected = [];
- actual = (0, _srcVirtualSourceBuffer.removeGopBuffer)(buffer, start, end, mapping);
- _qunit2['default'].deepEqual(actual, expected, 'removes entire buffer when buffer inside remove range');
- });
- }).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
- },{"../src/html-media-source":42,"../src/videojs-contrib-media-sources.js":45,"../src/virtual-source-buffer":46,"global/document":2,"global/window":3}]},{},[47,48,49,50,51]);
|