hls.light.mjs 712 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387238823892390239123922393239423952396239723982399240024012402240324042405240624072408240924102411241224132414241524162417241824192420242124222423242424252426242724282429243024312432243324342435243624372438243924402441244224432444244524462447244824492450245124522453245424552456245724582459246024612462246324642465246624672468246924702471247224732474247524762477247824792480248124822483248424852486248724882489249024912492249324942495249624972498249925002501250225032504250525062507250825092510251125122513251425152516251725182519252025212522252325242525252625272528252925302531253225332534253525362537253825392540254125422543254425452546254725482549255025512552255325542555255625572558255925602561256225632564256525662567256825692570257125722573257425752576257725782579258025812582258325842585258625872588258925902591259225932594259525962597259825992600260126022603260426052606260726082609261026112612261326142615261626172618261926202621262226232624262526262627262826292630263126322633263426352636263726382639264026412642264326442645264626472648264926502651265226532654265526562657265826592660266126622663266426652666266726682669267026712672267326742675267626772678267926802681268226832684268526862687268826892690269126922693269426952696269726982699270027012702270327042705270627072708270927102711271227132714271527162717271827192720272127222723272427252726272727282729273027312732273327342735273627372738273927402741274227432744274527462747274827492750275127522753275427552756275727582759276027612762276327642765276627672768276927702771277227732774277527762777277827792780278127822783278427852786278727882789279027912792279327942795279627972798279928002801280228032804280528062807280828092810281128122813281428152816281728182819282028212822282328242825282628272828282928302831283228332834283528362837283828392840284128422843284428452846284728482849285028512852285328542855285628572858285928602861286228632864286528662867286828692870287128722873287428752876287728782879288028812882288328842885288628872888288928902891289228932894289528962897289828992900290129022903290429052906290729082909291029112912291329142915291629172918291929202921292229232924292529262927292829292930293129322933293429352936293729382939294029412942294329442945294629472948294929502951295229532954295529562957295829592960296129622963296429652966296729682969297029712972297329742975297629772978297929802981298229832984298529862987298829892990299129922993299429952996299729982999300030013002300330043005300630073008300930103011301230133014301530163017301830193020302130223023302430253026302730283029303030313032303330343035303630373038303930403041304230433044304530463047304830493050305130523053305430553056305730583059306030613062306330643065306630673068306930703071307230733074307530763077307830793080308130823083308430853086308730883089309030913092309330943095309630973098309931003101310231033104310531063107310831093110311131123113311431153116311731183119312031213122312331243125312631273128312931303131313231333134313531363137313831393140314131423143314431453146314731483149315031513152315331543155315631573158315931603161316231633164316531663167316831693170317131723173317431753176317731783179318031813182318331843185318631873188318931903191319231933194319531963197319831993200320132023203320432053206320732083209321032113212321332143215321632173218321932203221322232233224322532263227322832293230323132323233323432353236323732383239324032413242324332443245324632473248324932503251325232533254325532563257325832593260326132623263326432653266326732683269327032713272327332743275327632773278327932803281328232833284328532863287328832893290329132923293329432953296329732983299330033013302330333043305330633073308330933103311331233133314331533163317331833193320332133223323332433253326332733283329333033313332333333343335333633373338333933403341334233433344334533463347334833493350335133523353335433553356335733583359336033613362336333643365336633673368336933703371337233733374337533763377337833793380338133823383338433853386338733883389339033913392339333943395339633973398339934003401340234033404340534063407340834093410341134123413341434153416341734183419342034213422342334243425342634273428342934303431343234333434343534363437343834393440344134423443344434453446344734483449345034513452345334543455345634573458345934603461346234633464346534663467346834693470347134723473347434753476347734783479348034813482348334843485348634873488348934903491349234933494349534963497349834993500350135023503350435053506350735083509351035113512351335143515351635173518351935203521352235233524352535263527352835293530353135323533353435353536353735383539354035413542354335443545354635473548354935503551355235533554355535563557355835593560356135623563356435653566356735683569357035713572357335743575357635773578357935803581358235833584358535863587358835893590359135923593359435953596359735983599360036013602360336043605360636073608360936103611361236133614361536163617361836193620362136223623362436253626362736283629363036313632363336343635363636373638363936403641364236433644364536463647364836493650365136523653365436553656365736583659366036613662366336643665366636673668366936703671367236733674367536763677367836793680368136823683368436853686368736883689369036913692369336943695369636973698369937003701370237033704370537063707370837093710371137123713371437153716371737183719372037213722372337243725372637273728372937303731373237333734373537363737373837393740374137423743374437453746374737483749375037513752375337543755375637573758375937603761376237633764376537663767376837693770377137723773377437753776377737783779378037813782378337843785378637873788378937903791379237933794379537963797379837993800380138023803380438053806380738083809381038113812381338143815381638173818381938203821382238233824382538263827382838293830383138323833383438353836383738383839384038413842384338443845384638473848384938503851385238533854385538563857385838593860386138623863386438653866386738683869387038713872387338743875387638773878387938803881388238833884388538863887388838893890389138923893389438953896389738983899390039013902390339043905390639073908390939103911391239133914391539163917391839193920392139223923392439253926392739283929393039313932393339343935393639373938393939403941394239433944394539463947394839493950395139523953395439553956395739583959396039613962396339643965396639673968396939703971397239733974397539763977397839793980398139823983398439853986398739883989399039913992399339943995399639973998399940004001400240034004400540064007400840094010401140124013401440154016401740184019402040214022402340244025402640274028402940304031403240334034403540364037403840394040404140424043404440454046404740484049405040514052405340544055405640574058405940604061406240634064406540664067406840694070407140724073407440754076407740784079408040814082408340844085408640874088408940904091409240934094409540964097409840994100410141024103410441054106410741084109411041114112411341144115411641174118411941204121412241234124412541264127412841294130413141324133413441354136413741384139414041414142414341444145414641474148414941504151415241534154415541564157415841594160416141624163416441654166416741684169417041714172417341744175417641774178417941804181418241834184418541864187418841894190419141924193419441954196419741984199420042014202420342044205420642074208420942104211421242134214421542164217421842194220422142224223422442254226422742284229423042314232423342344235423642374238423942404241424242434244424542464247424842494250425142524253425442554256425742584259426042614262426342644265426642674268426942704271427242734274427542764277427842794280428142824283428442854286428742884289429042914292429342944295429642974298429943004301430243034304430543064307430843094310431143124313431443154316431743184319432043214322432343244325432643274328432943304331433243334334433543364337433843394340434143424343434443454346434743484349435043514352435343544355435643574358435943604361436243634364436543664367436843694370437143724373437443754376437743784379438043814382438343844385438643874388438943904391439243934394439543964397439843994400440144024403440444054406440744084409441044114412441344144415441644174418441944204421442244234424442544264427442844294430443144324433443444354436443744384439444044414442444344444445444644474448444944504451445244534454445544564457445844594460446144624463446444654466446744684469447044714472447344744475447644774478447944804481448244834484448544864487448844894490449144924493449444954496449744984499450045014502450345044505450645074508450945104511451245134514451545164517451845194520452145224523452445254526452745284529453045314532453345344535453645374538453945404541454245434544454545464547454845494550455145524553455445554556455745584559456045614562456345644565456645674568456945704571457245734574457545764577457845794580458145824583458445854586458745884589459045914592459345944595459645974598459946004601460246034604460546064607460846094610461146124613461446154616461746184619462046214622462346244625462646274628462946304631463246334634463546364637463846394640464146424643464446454646464746484649465046514652465346544655465646574658465946604661466246634664466546664667466846694670467146724673467446754676467746784679468046814682468346844685468646874688468946904691469246934694469546964697469846994700470147024703470447054706470747084709471047114712471347144715471647174718471947204721472247234724472547264727472847294730473147324733473447354736473747384739474047414742474347444745474647474748474947504751475247534754475547564757475847594760476147624763476447654766476747684769477047714772477347744775477647774778477947804781478247834784478547864787478847894790479147924793479447954796479747984799480048014802480348044805480648074808480948104811481248134814481548164817481848194820482148224823482448254826482748284829483048314832483348344835483648374838483948404841484248434844484548464847484848494850485148524853485448554856485748584859486048614862486348644865486648674868486948704871487248734874487548764877487848794880488148824883488448854886488748884889489048914892489348944895489648974898489949004901490249034904490549064907490849094910491149124913491449154916491749184919492049214922492349244925492649274928492949304931493249334934493549364937493849394940494149424943494449454946494749484949495049514952495349544955495649574958495949604961496249634964496549664967496849694970497149724973497449754976497749784979498049814982498349844985498649874988498949904991499249934994499549964997499849995000500150025003500450055006500750085009501050115012501350145015501650175018501950205021502250235024502550265027502850295030503150325033503450355036503750385039504050415042504350445045504650475048504950505051505250535054505550565057505850595060506150625063506450655066506750685069507050715072507350745075507650775078507950805081508250835084508550865087508850895090509150925093509450955096509750985099510051015102510351045105510651075108510951105111511251135114511551165117511851195120512151225123512451255126512751285129513051315132513351345135513651375138513951405141514251435144514551465147514851495150515151525153515451555156515751585159516051615162516351645165516651675168516951705171517251735174517551765177517851795180518151825183518451855186518751885189519051915192519351945195519651975198519952005201520252035204520552065207520852095210521152125213521452155216521752185219522052215222522352245225522652275228522952305231523252335234523552365237523852395240524152425243524452455246524752485249525052515252525352545255525652575258525952605261526252635264526552665267526852695270527152725273527452755276527752785279528052815282528352845285528652875288528952905291529252935294529552965297529852995300530153025303530453055306530753085309531053115312531353145315531653175318531953205321532253235324532553265327532853295330533153325333533453355336533753385339534053415342534353445345534653475348534953505351535253535354535553565357535853595360536153625363536453655366536753685369537053715372537353745375537653775378537953805381538253835384538553865387538853895390539153925393539453955396539753985399540054015402540354045405540654075408540954105411541254135414541554165417541854195420542154225423542454255426542754285429543054315432543354345435543654375438543954405441544254435444544554465447544854495450545154525453545454555456545754585459546054615462546354645465546654675468546954705471547254735474547554765477547854795480548154825483548454855486548754885489549054915492549354945495549654975498549955005501550255035504550555065507550855095510551155125513551455155516551755185519552055215522552355245525552655275528552955305531553255335534553555365537553855395540554155425543554455455546554755485549555055515552555355545555555655575558555955605561556255635564556555665567556855695570557155725573557455755576557755785579558055815582558355845585558655875588558955905591559255935594559555965597559855995600560156025603560456055606560756085609561056115612561356145615561656175618561956205621562256235624562556265627562856295630563156325633563456355636563756385639564056415642564356445645564656475648564956505651565256535654565556565657565856595660566156625663566456655666566756685669567056715672567356745675567656775678567956805681568256835684568556865687568856895690569156925693569456955696569756985699570057015702570357045705570657075708570957105711571257135714571557165717571857195720572157225723572457255726572757285729573057315732573357345735573657375738573957405741574257435744574557465747574857495750575157525753575457555756575757585759576057615762576357645765576657675768576957705771577257735774577557765777577857795780578157825783578457855786578757885789579057915792579357945795579657975798579958005801580258035804580558065807580858095810581158125813581458155816581758185819582058215822582358245825582658275828582958305831583258335834583558365837583858395840584158425843584458455846584758485849585058515852585358545855585658575858585958605861586258635864586558665867586858695870587158725873587458755876587758785879588058815882588358845885588658875888588958905891589258935894589558965897589858995900590159025903590459055906590759085909591059115912591359145915591659175918591959205921592259235924592559265927592859295930593159325933593459355936593759385939594059415942594359445945594659475948594959505951595259535954595559565957595859595960596159625963596459655966596759685969597059715972597359745975597659775978597959805981598259835984598559865987598859895990599159925993599459955996599759985999600060016002600360046005600660076008600960106011601260136014601560166017601860196020602160226023602460256026602760286029603060316032603360346035603660376038603960406041604260436044604560466047604860496050605160526053605460556056605760586059606060616062606360646065606660676068606960706071607260736074607560766077607860796080608160826083608460856086608760886089609060916092609360946095609660976098609961006101610261036104610561066107610861096110611161126113611461156116611761186119612061216122612361246125612661276128612961306131613261336134613561366137613861396140614161426143614461456146614761486149615061516152615361546155615661576158615961606161616261636164616561666167616861696170617161726173617461756176617761786179618061816182618361846185618661876188618961906191619261936194619561966197619861996200620162026203620462056206620762086209621062116212621362146215621662176218621962206221622262236224622562266227622862296230623162326233623462356236623762386239624062416242624362446245624662476248624962506251625262536254625562566257625862596260626162626263626462656266626762686269627062716272627362746275627662776278627962806281628262836284628562866287628862896290629162926293629462956296629762986299630063016302630363046305630663076308630963106311631263136314631563166317631863196320632163226323632463256326632763286329633063316332633363346335633663376338633963406341634263436344634563466347634863496350635163526353635463556356635763586359636063616362636363646365636663676368636963706371637263736374637563766377637863796380638163826383638463856386638763886389639063916392639363946395639663976398639964006401640264036404640564066407640864096410641164126413641464156416641764186419642064216422642364246425642664276428642964306431643264336434643564366437643864396440644164426443644464456446644764486449645064516452645364546455645664576458645964606461646264636464646564666467646864696470647164726473647464756476647764786479648064816482648364846485648664876488648964906491649264936494649564966497649864996500650165026503650465056506650765086509651065116512651365146515651665176518651965206521652265236524652565266527652865296530653165326533653465356536653765386539654065416542654365446545654665476548654965506551655265536554655565566557655865596560656165626563656465656566656765686569657065716572657365746575657665776578657965806581658265836584658565866587658865896590659165926593659465956596659765986599660066016602660366046605660666076608660966106611661266136614661566166617661866196620662166226623662466256626662766286629663066316632663366346635663666376638663966406641664266436644664566466647664866496650665166526653665466556656665766586659666066616662666366646665666666676668666966706671667266736674667566766677667866796680668166826683668466856686668766886689669066916692669366946695669666976698669967006701670267036704670567066707670867096710671167126713671467156716671767186719672067216722672367246725672667276728672967306731673267336734673567366737673867396740674167426743674467456746674767486749675067516752675367546755675667576758675967606761676267636764676567666767676867696770677167726773677467756776677767786779678067816782678367846785678667876788678967906791679267936794679567966797679867996800680168026803680468056806680768086809681068116812681368146815681668176818681968206821682268236824682568266827682868296830683168326833683468356836683768386839684068416842684368446845684668476848684968506851685268536854685568566857685868596860686168626863686468656866686768686869687068716872687368746875687668776878687968806881688268836884688568866887688868896890689168926893689468956896689768986899690069016902690369046905690669076908690969106911691269136914691569166917691869196920692169226923692469256926692769286929693069316932693369346935693669376938693969406941694269436944694569466947694869496950695169526953695469556956695769586959696069616962696369646965696669676968696969706971697269736974697569766977697869796980698169826983698469856986698769886989699069916992699369946995699669976998699970007001700270037004700570067007700870097010701170127013701470157016701770187019702070217022702370247025702670277028702970307031703270337034703570367037703870397040704170427043704470457046704770487049705070517052705370547055705670577058705970607061706270637064706570667067706870697070707170727073707470757076707770787079708070817082708370847085708670877088708970907091709270937094709570967097709870997100710171027103710471057106710771087109711071117112711371147115711671177118711971207121712271237124712571267127712871297130713171327133713471357136713771387139714071417142714371447145714671477148714971507151715271537154715571567157715871597160716171627163716471657166716771687169717071717172717371747175717671777178717971807181718271837184718571867187718871897190719171927193719471957196719771987199720072017202720372047205720672077208720972107211721272137214721572167217721872197220722172227223722472257226722772287229723072317232723372347235723672377238723972407241724272437244724572467247724872497250725172527253725472557256725772587259726072617262726372647265726672677268726972707271727272737274727572767277727872797280728172827283728472857286728772887289729072917292729372947295729672977298729973007301730273037304730573067307730873097310731173127313731473157316731773187319732073217322732373247325732673277328732973307331733273337334733573367337733873397340734173427343734473457346734773487349735073517352735373547355735673577358735973607361736273637364736573667367736873697370737173727373737473757376737773787379738073817382738373847385738673877388738973907391739273937394739573967397739873997400740174027403740474057406740774087409741074117412741374147415741674177418741974207421742274237424742574267427742874297430743174327433743474357436743774387439744074417442744374447445744674477448744974507451745274537454745574567457745874597460746174627463746474657466746774687469747074717472747374747475747674777478747974807481748274837484748574867487748874897490749174927493749474957496749774987499750075017502750375047505750675077508750975107511751275137514751575167517751875197520752175227523752475257526752775287529753075317532753375347535753675377538753975407541754275437544754575467547754875497550755175527553755475557556755775587559756075617562756375647565756675677568756975707571757275737574757575767577757875797580758175827583758475857586758775887589759075917592759375947595759675977598759976007601760276037604760576067607760876097610761176127613761476157616761776187619762076217622762376247625762676277628762976307631763276337634763576367637763876397640764176427643764476457646764776487649765076517652765376547655765676577658765976607661766276637664766576667667766876697670767176727673767476757676767776787679768076817682768376847685768676877688768976907691769276937694769576967697769876997700770177027703770477057706770777087709771077117712771377147715771677177718771977207721772277237724772577267727772877297730773177327733773477357736773777387739774077417742774377447745774677477748774977507751775277537754775577567757775877597760776177627763776477657766776777687769777077717772777377747775777677777778777977807781778277837784778577867787778877897790779177927793779477957796779777987799780078017802780378047805780678077808780978107811781278137814781578167817781878197820782178227823782478257826782778287829783078317832783378347835783678377838783978407841784278437844784578467847784878497850785178527853785478557856785778587859786078617862786378647865786678677868786978707871787278737874787578767877787878797880788178827883788478857886788778887889789078917892789378947895789678977898789979007901790279037904790579067907790879097910791179127913791479157916791779187919792079217922792379247925792679277928792979307931793279337934793579367937793879397940794179427943794479457946794779487949795079517952795379547955795679577958795979607961796279637964796579667967796879697970797179727973797479757976797779787979798079817982798379847985798679877988798979907991799279937994799579967997799879998000800180028003800480058006800780088009801080118012801380148015801680178018801980208021802280238024802580268027802880298030803180328033803480358036803780388039804080418042804380448045804680478048804980508051805280538054805580568057805880598060806180628063806480658066806780688069807080718072807380748075807680778078807980808081808280838084808580868087808880898090809180928093809480958096809780988099810081018102810381048105810681078108810981108111811281138114811581168117811881198120812181228123812481258126812781288129813081318132813381348135813681378138813981408141814281438144814581468147814881498150815181528153815481558156815781588159816081618162816381648165816681678168816981708171817281738174817581768177817881798180818181828183818481858186818781888189819081918192819381948195819681978198819982008201820282038204820582068207820882098210821182128213821482158216821782188219822082218222822382248225822682278228822982308231823282338234823582368237823882398240824182428243824482458246824782488249825082518252825382548255825682578258825982608261826282638264826582668267826882698270827182728273827482758276827782788279828082818282828382848285828682878288828982908291829282938294829582968297829882998300830183028303830483058306830783088309831083118312831383148315831683178318831983208321832283238324832583268327832883298330833183328333833483358336833783388339834083418342834383448345834683478348834983508351835283538354835583568357835883598360836183628363836483658366836783688369837083718372837383748375837683778378837983808381838283838384838583868387838883898390839183928393839483958396839783988399840084018402840384048405840684078408840984108411841284138414841584168417841884198420842184228423842484258426842784288429843084318432843384348435843684378438843984408441844284438444844584468447844884498450845184528453845484558456845784588459846084618462846384648465846684678468846984708471847284738474847584768477847884798480848184828483848484858486848784888489849084918492849384948495849684978498849985008501850285038504850585068507850885098510851185128513851485158516851785188519852085218522852385248525852685278528852985308531853285338534853585368537853885398540854185428543854485458546854785488549855085518552855385548555855685578558855985608561856285638564856585668567856885698570857185728573857485758576857785788579858085818582858385848585858685878588858985908591859285938594859585968597859885998600860186028603860486058606860786088609861086118612861386148615861686178618861986208621862286238624862586268627862886298630863186328633863486358636863786388639864086418642864386448645864686478648864986508651865286538654865586568657865886598660866186628663866486658666866786688669867086718672867386748675867686778678867986808681868286838684868586868687868886898690869186928693869486958696869786988699870087018702870387048705870687078708870987108711871287138714871587168717871887198720872187228723872487258726872787288729873087318732873387348735873687378738873987408741874287438744874587468747874887498750875187528753875487558756875787588759876087618762876387648765876687678768876987708771877287738774877587768777877887798780878187828783878487858786878787888789879087918792879387948795879687978798879988008801880288038804880588068807880888098810881188128813881488158816881788188819882088218822882388248825882688278828882988308831883288338834883588368837883888398840884188428843884488458846884788488849885088518852885388548855885688578858885988608861886288638864886588668867886888698870887188728873887488758876887788788879888088818882888388848885888688878888888988908891889288938894889588968897889888998900890189028903890489058906890789088909891089118912891389148915891689178918891989208921892289238924892589268927892889298930893189328933893489358936893789388939894089418942894389448945894689478948894989508951895289538954895589568957895889598960896189628963896489658966896789688969897089718972897389748975897689778978897989808981898289838984898589868987898889898990899189928993899489958996899789988999900090019002900390049005900690079008900990109011901290139014901590169017901890199020902190229023902490259026902790289029903090319032903390349035903690379038903990409041904290439044904590469047904890499050905190529053905490559056905790589059906090619062906390649065906690679068906990709071907290739074907590769077907890799080908190829083908490859086908790889089909090919092909390949095909690979098909991009101910291039104910591069107910891099110911191129113911491159116911791189119912091219122912391249125912691279128912991309131913291339134913591369137913891399140914191429143914491459146914791489149915091519152915391549155915691579158915991609161916291639164916591669167916891699170917191729173917491759176917791789179918091819182918391849185918691879188918991909191919291939194919591969197919891999200920192029203920492059206920792089209921092119212921392149215921692179218921992209221922292239224922592269227922892299230923192329233923492359236923792389239924092419242924392449245924692479248924992509251925292539254925592569257925892599260926192629263926492659266926792689269927092719272927392749275927692779278927992809281928292839284928592869287928892899290929192929293929492959296929792989299930093019302930393049305930693079308930993109311931293139314931593169317931893199320932193229323932493259326932793289329933093319332933393349335933693379338933993409341934293439344934593469347934893499350935193529353935493559356935793589359936093619362936393649365936693679368936993709371937293739374937593769377937893799380938193829383938493859386938793889389939093919392939393949395939693979398939994009401940294039404940594069407940894099410941194129413941494159416941794189419942094219422942394249425942694279428942994309431943294339434943594369437943894399440944194429443944494459446944794489449945094519452945394549455945694579458945994609461946294639464946594669467946894699470947194729473947494759476947794789479948094819482948394849485948694879488948994909491949294939494949594969497949894999500950195029503950495059506950795089509951095119512951395149515951695179518951995209521952295239524952595269527952895299530953195329533953495359536953795389539954095419542954395449545954695479548954995509551955295539554955595569557955895599560956195629563956495659566956795689569957095719572957395749575957695779578957995809581958295839584958595869587958895899590959195929593959495959596959795989599960096019602960396049605960696079608960996109611961296139614961596169617961896199620962196229623962496259626962796289629963096319632963396349635963696379638963996409641964296439644964596469647964896499650965196529653965496559656965796589659966096619662966396649665966696679668966996709671967296739674967596769677967896799680968196829683968496859686968796889689969096919692969396949695969696979698969997009701970297039704970597069707970897099710971197129713971497159716971797189719972097219722972397249725972697279728972997309731973297339734973597369737973897399740974197429743974497459746974797489749975097519752975397549755975697579758975997609761976297639764976597669767976897699770977197729773977497759776977797789779978097819782978397849785978697879788978997909791979297939794979597969797979897999800980198029803980498059806980798089809981098119812981398149815981698179818981998209821982298239824982598269827982898299830983198329833983498359836983798389839984098419842984398449845984698479848984998509851985298539854985598569857985898599860986198629863986498659866986798689869987098719872987398749875987698779878987998809881988298839884988598869887988898899890989198929893989498959896989798989899990099019902990399049905990699079908990999109911991299139914991599169917991899199920992199229923992499259926992799289929993099319932993399349935993699379938993999409941994299439944994599469947994899499950995199529953995499559956995799589959996099619962996399649965996699679968996999709971997299739974997599769977997899799980998199829983998499859986998799889989999099919992999399949995999699979998999910000100011000210003100041000510006100071000810009100101001110012100131001410015100161001710018100191002010021100221002310024100251002610027100281002910030100311003210033100341003510036100371003810039100401004110042100431004410045100461004710048100491005010051100521005310054100551005610057100581005910060100611006210063100641006510066100671006810069100701007110072100731007410075100761007710078100791008010081100821008310084100851008610087100881008910090100911009210093100941009510096100971009810099101001010110102101031010410105101061010710108101091011010111101121011310114101151011610117101181011910120101211012210123101241012510126101271012810129101301013110132101331013410135101361013710138101391014010141101421014310144101451014610147101481014910150101511015210153101541015510156101571015810159101601016110162101631016410165101661016710168101691017010171101721017310174101751017610177101781017910180101811018210183101841018510186101871018810189101901019110192101931019410195101961019710198101991020010201102021020310204102051020610207102081020910210102111021210213102141021510216102171021810219102201022110222102231022410225102261022710228102291023010231102321023310234102351023610237102381023910240102411024210243102441024510246102471024810249102501025110252102531025410255102561025710258102591026010261102621026310264102651026610267102681026910270102711027210273102741027510276102771027810279102801028110282102831028410285102861028710288102891029010291102921029310294102951029610297102981029910300103011030210303103041030510306103071030810309103101031110312103131031410315103161031710318103191032010321103221032310324103251032610327103281032910330103311033210333103341033510336103371033810339103401034110342103431034410345103461034710348103491035010351103521035310354103551035610357103581035910360103611036210363103641036510366103671036810369103701037110372103731037410375103761037710378103791038010381103821038310384103851038610387103881038910390103911039210393103941039510396103971039810399104001040110402104031040410405104061040710408104091041010411104121041310414104151041610417104181041910420104211042210423104241042510426104271042810429104301043110432104331043410435104361043710438104391044010441104421044310444104451044610447104481044910450104511045210453104541045510456104571045810459104601046110462104631046410465104661046710468104691047010471104721047310474104751047610477104781047910480104811048210483104841048510486104871048810489104901049110492104931049410495104961049710498104991050010501105021050310504105051050610507105081050910510105111051210513105141051510516105171051810519105201052110522105231052410525105261052710528105291053010531105321053310534105351053610537105381053910540105411054210543105441054510546105471054810549105501055110552105531055410555105561055710558105591056010561105621056310564105651056610567105681056910570105711057210573105741057510576105771057810579105801058110582105831058410585105861058710588105891059010591105921059310594105951059610597105981059910600106011060210603106041060510606106071060810609106101061110612106131061410615106161061710618106191062010621106221062310624106251062610627106281062910630106311063210633106341063510636106371063810639106401064110642106431064410645106461064710648106491065010651106521065310654106551065610657106581065910660106611066210663106641066510666106671066810669106701067110672106731067410675106761067710678106791068010681106821068310684106851068610687106881068910690106911069210693106941069510696106971069810699107001070110702107031070410705107061070710708107091071010711107121071310714107151071610717107181071910720107211072210723107241072510726107271072810729107301073110732107331073410735107361073710738107391074010741107421074310744107451074610747107481074910750107511075210753107541075510756107571075810759107601076110762107631076410765107661076710768107691077010771107721077310774107751077610777107781077910780107811078210783107841078510786107871078810789107901079110792107931079410795107961079710798107991080010801108021080310804108051080610807108081080910810108111081210813108141081510816108171081810819108201082110822108231082410825108261082710828108291083010831108321083310834108351083610837108381083910840108411084210843108441084510846108471084810849108501085110852108531085410855108561085710858108591086010861108621086310864108651086610867108681086910870108711087210873108741087510876108771087810879108801088110882108831088410885108861088710888108891089010891108921089310894108951089610897108981089910900109011090210903109041090510906109071090810909109101091110912109131091410915109161091710918109191092010921109221092310924109251092610927109281092910930109311093210933109341093510936109371093810939109401094110942109431094410945109461094710948109491095010951109521095310954109551095610957109581095910960109611096210963109641096510966109671096810969109701097110972109731097410975109761097710978109791098010981109821098310984109851098610987109881098910990109911099210993109941099510996109971099810999110001100111002110031100411005110061100711008110091101011011110121101311014110151101611017110181101911020110211102211023110241102511026110271102811029110301103111032110331103411035110361103711038110391104011041110421104311044110451104611047110481104911050110511105211053110541105511056110571105811059110601106111062110631106411065110661106711068110691107011071110721107311074110751107611077110781107911080110811108211083110841108511086110871108811089110901109111092110931109411095110961109711098110991110011101111021110311104111051110611107111081110911110111111111211113111141111511116111171111811119111201112111122111231112411125111261112711128111291113011131111321113311134111351113611137111381113911140111411114211143111441114511146111471114811149111501115111152111531115411155111561115711158111591116011161111621116311164111651116611167111681116911170111711117211173111741117511176111771117811179111801118111182111831118411185111861118711188111891119011191111921119311194111951119611197111981119911200112011120211203112041120511206112071120811209112101121111212112131121411215112161121711218112191122011221112221122311224112251122611227112281122911230112311123211233112341123511236112371123811239112401124111242112431124411245112461124711248112491125011251112521125311254112551125611257112581125911260112611126211263112641126511266112671126811269112701127111272112731127411275112761127711278112791128011281112821128311284112851128611287112881128911290112911129211293112941129511296112971129811299113001130111302113031130411305113061130711308113091131011311113121131311314113151131611317113181131911320113211132211323113241132511326113271132811329113301133111332113331133411335113361133711338113391134011341113421134311344113451134611347113481134911350113511135211353113541135511356113571135811359113601136111362113631136411365113661136711368113691137011371113721137311374113751137611377113781137911380113811138211383113841138511386113871138811389113901139111392113931139411395113961139711398113991140011401114021140311404114051140611407114081140911410114111141211413114141141511416114171141811419114201142111422114231142411425114261142711428114291143011431114321143311434114351143611437114381143911440114411144211443114441144511446114471144811449114501145111452114531145411455114561145711458114591146011461114621146311464114651146611467114681146911470114711147211473114741147511476114771147811479114801148111482114831148411485114861148711488114891149011491114921149311494114951149611497114981149911500115011150211503115041150511506115071150811509115101151111512115131151411515115161151711518115191152011521115221152311524115251152611527115281152911530115311153211533115341153511536115371153811539115401154111542115431154411545115461154711548115491155011551115521155311554115551155611557115581155911560115611156211563115641156511566115671156811569115701157111572115731157411575115761157711578115791158011581115821158311584115851158611587115881158911590115911159211593115941159511596115971159811599116001160111602116031160411605116061160711608116091161011611116121161311614116151161611617116181161911620116211162211623116241162511626116271162811629116301163111632116331163411635116361163711638116391164011641116421164311644116451164611647116481164911650116511165211653116541165511656116571165811659116601166111662116631166411665116661166711668116691167011671116721167311674116751167611677116781167911680116811168211683116841168511686116871168811689116901169111692116931169411695116961169711698116991170011701117021170311704117051170611707117081170911710117111171211713117141171511716117171171811719117201172111722117231172411725117261172711728117291173011731117321173311734117351173611737117381173911740117411174211743117441174511746117471174811749117501175111752117531175411755117561175711758117591176011761117621176311764117651176611767117681176911770117711177211773117741177511776117771177811779117801178111782117831178411785117861178711788117891179011791117921179311794117951179611797117981179911800118011180211803118041180511806118071180811809118101181111812118131181411815118161181711818118191182011821118221182311824118251182611827118281182911830118311183211833118341183511836118371183811839118401184111842118431184411845118461184711848118491185011851118521185311854118551185611857118581185911860118611186211863118641186511866118671186811869118701187111872118731187411875118761187711878118791188011881118821188311884118851188611887118881188911890118911189211893118941189511896118971189811899119001190111902119031190411905119061190711908119091191011911119121191311914119151191611917119181191911920119211192211923119241192511926119271192811929119301193111932119331193411935119361193711938119391194011941119421194311944119451194611947119481194911950119511195211953119541195511956119571195811959119601196111962119631196411965119661196711968119691197011971119721197311974119751197611977119781197911980119811198211983119841198511986119871198811989119901199111992119931199411995119961199711998119991200012001120021200312004120051200612007120081200912010120111201212013120141201512016120171201812019120201202112022120231202412025120261202712028120291203012031120321203312034120351203612037120381203912040120411204212043120441204512046120471204812049120501205112052120531205412055120561205712058120591206012061120621206312064120651206612067120681206912070120711207212073120741207512076120771207812079120801208112082120831208412085120861208712088120891209012091120921209312094120951209612097120981209912100121011210212103121041210512106121071210812109121101211112112121131211412115121161211712118121191212012121121221212312124121251212612127121281212912130121311213212133121341213512136121371213812139121401214112142121431214412145121461214712148121491215012151121521215312154121551215612157121581215912160121611216212163121641216512166121671216812169121701217112172121731217412175121761217712178121791218012181121821218312184121851218612187121881218912190121911219212193121941219512196121971219812199122001220112202122031220412205122061220712208122091221012211122121221312214122151221612217122181221912220122211222212223122241222512226122271222812229122301223112232122331223412235122361223712238122391224012241122421224312244122451224612247122481224912250122511225212253122541225512256122571225812259122601226112262122631226412265122661226712268122691227012271122721227312274122751227612277122781227912280122811228212283122841228512286122871228812289122901229112292122931229412295122961229712298122991230012301123021230312304123051230612307123081230912310123111231212313123141231512316123171231812319123201232112322123231232412325123261232712328123291233012331123321233312334123351233612337123381233912340123411234212343123441234512346123471234812349123501235112352123531235412355123561235712358123591236012361123621236312364123651236612367123681236912370123711237212373123741237512376123771237812379123801238112382123831238412385123861238712388123891239012391123921239312394123951239612397123981239912400124011240212403124041240512406124071240812409124101241112412124131241412415124161241712418124191242012421124221242312424124251242612427124281242912430124311243212433124341243512436124371243812439124401244112442124431244412445124461244712448124491245012451124521245312454124551245612457124581245912460124611246212463124641246512466124671246812469124701247112472124731247412475124761247712478124791248012481124821248312484124851248612487124881248912490124911249212493124941249512496124971249812499125001250112502125031250412505125061250712508125091251012511125121251312514125151251612517125181251912520125211252212523125241252512526125271252812529125301253112532125331253412535125361253712538125391254012541125421254312544125451254612547125481254912550125511255212553125541255512556125571255812559125601256112562125631256412565125661256712568125691257012571125721257312574125751257612577125781257912580125811258212583125841258512586125871258812589125901259112592125931259412595125961259712598125991260012601126021260312604126051260612607126081260912610126111261212613126141261512616126171261812619126201262112622126231262412625126261262712628126291263012631126321263312634126351263612637126381263912640126411264212643126441264512646126471264812649126501265112652126531265412655126561265712658126591266012661126621266312664126651266612667126681266912670126711267212673126741267512676126771267812679126801268112682126831268412685126861268712688126891269012691126921269312694126951269612697126981269912700127011270212703127041270512706127071270812709127101271112712127131271412715127161271712718127191272012721127221272312724127251272612727127281272912730127311273212733127341273512736127371273812739127401274112742127431274412745127461274712748127491275012751127521275312754127551275612757127581275912760127611276212763127641276512766127671276812769127701277112772127731277412775127761277712778127791278012781127821278312784127851278612787127881278912790127911279212793127941279512796127971279812799128001280112802128031280412805128061280712808128091281012811128121281312814128151281612817128181281912820128211282212823128241282512826128271282812829128301283112832128331283412835128361283712838128391284012841128421284312844128451284612847128481284912850128511285212853128541285512856128571285812859128601286112862128631286412865128661286712868128691287012871128721287312874128751287612877128781287912880128811288212883128841288512886128871288812889128901289112892128931289412895128961289712898128991290012901129021290312904129051290612907129081290912910129111291212913129141291512916129171291812919129201292112922129231292412925129261292712928129291293012931129321293312934129351293612937129381293912940129411294212943129441294512946129471294812949129501295112952129531295412955129561295712958129591296012961129621296312964129651296612967129681296912970129711297212973129741297512976129771297812979129801298112982129831298412985129861298712988129891299012991129921299312994129951299612997129981299913000130011300213003130041300513006130071300813009130101301113012130131301413015130161301713018130191302013021130221302313024130251302613027130281302913030130311303213033130341303513036130371303813039130401304113042130431304413045130461304713048130491305013051130521305313054130551305613057130581305913060130611306213063130641306513066130671306813069130701307113072130731307413075130761307713078130791308013081130821308313084130851308613087130881308913090130911309213093130941309513096130971309813099131001310113102131031310413105131061310713108131091311013111131121311313114131151311613117131181311913120131211312213123131241312513126131271312813129131301313113132131331313413135131361313713138131391314013141131421314313144131451314613147131481314913150131511315213153131541315513156131571315813159131601316113162131631316413165131661316713168131691317013171131721317313174131751317613177131781317913180131811318213183131841318513186131871318813189131901319113192131931319413195131961319713198131991320013201132021320313204132051320613207132081320913210132111321213213132141321513216132171321813219132201322113222132231322413225132261322713228132291323013231132321323313234132351323613237132381323913240132411324213243132441324513246132471324813249132501325113252132531325413255132561325713258132591326013261132621326313264132651326613267132681326913270132711327213273132741327513276132771327813279132801328113282132831328413285132861328713288132891329013291132921329313294132951329613297132981329913300133011330213303133041330513306133071330813309133101331113312133131331413315133161331713318133191332013321133221332313324133251332613327133281332913330133311333213333133341333513336133371333813339133401334113342133431334413345133461334713348133491335013351133521335313354133551335613357133581335913360133611336213363133641336513366133671336813369133701337113372133731337413375133761337713378133791338013381133821338313384133851338613387133881338913390133911339213393133941339513396133971339813399134001340113402134031340413405134061340713408134091341013411134121341313414134151341613417134181341913420134211342213423134241342513426134271342813429134301343113432134331343413435134361343713438134391344013441134421344313444134451344613447134481344913450134511345213453134541345513456134571345813459134601346113462134631346413465134661346713468134691347013471134721347313474134751347613477134781347913480134811348213483134841348513486134871348813489134901349113492134931349413495134961349713498134991350013501135021350313504135051350613507135081350913510135111351213513135141351513516135171351813519135201352113522135231352413525135261352713528135291353013531135321353313534135351353613537135381353913540135411354213543135441354513546135471354813549135501355113552135531355413555135561355713558135591356013561135621356313564135651356613567135681356913570135711357213573135741357513576135771357813579135801358113582135831358413585135861358713588135891359013591135921359313594135951359613597135981359913600136011360213603136041360513606136071360813609136101361113612136131361413615136161361713618136191362013621136221362313624136251362613627136281362913630136311363213633136341363513636136371363813639136401364113642136431364413645136461364713648136491365013651136521365313654136551365613657136581365913660136611366213663136641366513666136671366813669136701367113672136731367413675136761367713678136791368013681136821368313684136851368613687136881368913690136911369213693136941369513696136971369813699137001370113702137031370413705137061370713708137091371013711137121371313714137151371613717137181371913720137211372213723137241372513726137271372813729137301373113732137331373413735137361373713738137391374013741137421374313744137451374613747137481374913750137511375213753137541375513756137571375813759137601376113762137631376413765137661376713768137691377013771137721377313774137751377613777137781377913780137811378213783137841378513786137871378813789137901379113792137931379413795137961379713798137991380013801138021380313804138051380613807138081380913810138111381213813138141381513816138171381813819138201382113822138231382413825138261382713828138291383013831138321383313834138351383613837138381383913840138411384213843138441384513846138471384813849138501385113852138531385413855138561385713858138591386013861138621386313864138651386613867138681386913870138711387213873138741387513876138771387813879138801388113882138831388413885138861388713888138891389013891138921389313894138951389613897138981389913900139011390213903139041390513906139071390813909139101391113912139131391413915139161391713918139191392013921139221392313924139251392613927139281392913930139311393213933139341393513936139371393813939139401394113942139431394413945139461394713948139491395013951139521395313954139551395613957139581395913960139611396213963139641396513966139671396813969139701397113972139731397413975139761397713978139791398013981139821398313984139851398613987139881398913990139911399213993139941399513996139971399813999140001400114002140031400414005140061400714008140091401014011140121401314014140151401614017140181401914020140211402214023140241402514026140271402814029140301403114032140331403414035140361403714038140391404014041140421404314044140451404614047140481404914050140511405214053140541405514056140571405814059140601406114062140631406414065140661406714068140691407014071140721407314074140751407614077140781407914080140811408214083140841408514086140871408814089140901409114092140931409414095140961409714098140991410014101141021410314104141051410614107141081410914110141111411214113141141411514116141171411814119141201412114122141231412414125141261412714128141291413014131141321413314134141351413614137141381413914140141411414214143141441414514146141471414814149141501415114152141531415414155141561415714158141591416014161141621416314164141651416614167141681416914170141711417214173141741417514176141771417814179141801418114182141831418414185141861418714188141891419014191141921419314194141951419614197141981419914200142011420214203142041420514206142071420814209142101421114212142131421414215142161421714218142191422014221142221422314224142251422614227142281422914230142311423214233142341423514236142371423814239142401424114242142431424414245142461424714248142491425014251142521425314254142551425614257142581425914260142611426214263142641426514266142671426814269142701427114272142731427414275142761427714278142791428014281142821428314284142851428614287142881428914290142911429214293142941429514296142971429814299143001430114302143031430414305143061430714308143091431014311143121431314314143151431614317143181431914320143211432214323143241432514326143271432814329143301433114332143331433414335143361433714338143391434014341143421434314344143451434614347143481434914350143511435214353143541435514356143571435814359143601436114362143631436414365143661436714368143691437014371143721437314374143751437614377143781437914380143811438214383143841438514386143871438814389143901439114392143931439414395143961439714398143991440014401144021440314404144051440614407144081440914410144111441214413144141441514416144171441814419144201442114422144231442414425144261442714428144291443014431144321443314434144351443614437144381443914440144411444214443144441444514446144471444814449144501445114452144531445414455144561445714458144591446014461144621446314464144651446614467144681446914470144711447214473144741447514476144771447814479144801448114482144831448414485144861448714488144891449014491144921449314494144951449614497144981449914500145011450214503145041450514506145071450814509145101451114512145131451414515145161451714518145191452014521145221452314524145251452614527145281452914530145311453214533145341453514536145371453814539145401454114542145431454414545145461454714548145491455014551145521455314554145551455614557145581455914560145611456214563145641456514566145671456814569145701457114572145731457414575145761457714578145791458014581145821458314584145851458614587145881458914590145911459214593145941459514596145971459814599146001460114602146031460414605146061460714608146091461014611146121461314614146151461614617146181461914620146211462214623146241462514626146271462814629146301463114632146331463414635146361463714638146391464014641146421464314644146451464614647146481464914650146511465214653146541465514656146571465814659146601466114662146631466414665146661466714668146691467014671146721467314674146751467614677146781467914680146811468214683146841468514686146871468814689146901469114692146931469414695146961469714698146991470014701147021470314704147051470614707147081470914710147111471214713147141471514716147171471814719147201472114722147231472414725147261472714728147291473014731147321473314734147351473614737147381473914740147411474214743147441474514746147471474814749147501475114752147531475414755147561475714758147591476014761147621476314764147651476614767147681476914770147711477214773147741477514776147771477814779147801478114782147831478414785147861478714788147891479014791147921479314794147951479614797147981479914800148011480214803148041480514806148071480814809148101481114812148131481414815148161481714818148191482014821148221482314824148251482614827148281482914830148311483214833148341483514836148371483814839148401484114842148431484414845148461484714848148491485014851148521485314854148551485614857148581485914860148611486214863148641486514866148671486814869148701487114872148731487414875148761487714878148791488014881148821488314884148851488614887148881488914890148911489214893148941489514896148971489814899149001490114902149031490414905149061490714908149091491014911149121491314914149151491614917149181491914920149211492214923149241492514926149271492814929149301493114932149331493414935149361493714938149391494014941149421494314944149451494614947149481494914950149511495214953149541495514956149571495814959149601496114962149631496414965149661496714968149691497014971149721497314974149751497614977149781497914980149811498214983149841498514986149871498814989149901499114992149931499414995149961499714998149991500015001150021500315004150051500615007150081500915010150111501215013150141501515016150171501815019150201502115022150231502415025150261502715028150291503015031150321503315034150351503615037150381503915040150411504215043150441504515046150471504815049150501505115052150531505415055150561505715058150591506015061150621506315064150651506615067150681506915070150711507215073150741507515076150771507815079150801508115082150831508415085150861508715088150891509015091150921509315094150951509615097150981509915100151011510215103151041510515106151071510815109151101511115112151131511415115151161511715118151191512015121151221512315124151251512615127151281512915130151311513215133151341513515136151371513815139151401514115142151431514415145151461514715148151491515015151151521515315154151551515615157151581515915160151611516215163151641516515166151671516815169151701517115172151731517415175151761517715178151791518015181151821518315184151851518615187151881518915190151911519215193151941519515196151971519815199152001520115202152031520415205152061520715208152091521015211152121521315214152151521615217152181521915220152211522215223152241522515226152271522815229152301523115232152331523415235152361523715238152391524015241152421524315244152451524615247152481524915250152511525215253152541525515256152571525815259152601526115262152631526415265152661526715268152691527015271152721527315274152751527615277152781527915280152811528215283152841528515286152871528815289152901529115292152931529415295152961529715298152991530015301153021530315304153051530615307153081530915310153111531215313153141531515316153171531815319153201532115322153231532415325153261532715328153291533015331153321533315334153351533615337153381533915340153411534215343153441534515346153471534815349153501535115352153531535415355153561535715358153591536015361153621536315364153651536615367153681536915370153711537215373153741537515376153771537815379153801538115382153831538415385153861538715388153891539015391153921539315394153951539615397153981539915400154011540215403154041540515406154071540815409154101541115412154131541415415154161541715418154191542015421154221542315424154251542615427154281542915430154311543215433154341543515436154371543815439154401544115442154431544415445154461544715448154491545015451154521545315454154551545615457154581545915460154611546215463154641546515466154671546815469154701547115472154731547415475154761547715478154791548015481154821548315484154851548615487154881548915490154911549215493154941549515496154971549815499155001550115502155031550415505155061550715508155091551015511155121551315514155151551615517155181551915520155211552215523155241552515526155271552815529155301553115532155331553415535155361553715538155391554015541155421554315544155451554615547155481554915550155511555215553155541555515556155571555815559155601556115562155631556415565155661556715568155691557015571155721557315574155751557615577155781557915580155811558215583155841558515586155871558815589155901559115592155931559415595155961559715598155991560015601156021560315604156051560615607156081560915610156111561215613156141561515616156171561815619156201562115622156231562415625156261562715628156291563015631156321563315634156351563615637156381563915640156411564215643156441564515646156471564815649156501565115652156531565415655156561565715658156591566015661156621566315664156651566615667156681566915670156711567215673156741567515676156771567815679156801568115682156831568415685156861568715688156891569015691156921569315694156951569615697156981569915700157011570215703157041570515706157071570815709157101571115712157131571415715157161571715718157191572015721157221572315724157251572615727157281572915730157311573215733157341573515736157371573815739157401574115742157431574415745157461574715748157491575015751157521575315754157551575615757157581575915760157611576215763157641576515766157671576815769157701577115772157731577415775157761577715778157791578015781157821578315784157851578615787157881578915790157911579215793157941579515796157971579815799158001580115802158031580415805158061580715808158091581015811158121581315814158151581615817158181581915820158211582215823158241582515826158271582815829158301583115832158331583415835158361583715838158391584015841158421584315844158451584615847158481584915850158511585215853158541585515856158571585815859158601586115862158631586415865158661586715868158691587015871158721587315874158751587615877158781587915880158811588215883158841588515886158871588815889158901589115892158931589415895158961589715898158991590015901159021590315904159051590615907159081590915910159111591215913159141591515916159171591815919159201592115922159231592415925159261592715928159291593015931159321593315934159351593615937159381593915940159411594215943159441594515946159471594815949159501595115952159531595415955159561595715958159591596015961159621596315964159651596615967159681596915970159711597215973159741597515976159771597815979159801598115982159831598415985159861598715988159891599015991159921599315994159951599615997159981599916000160011600216003160041600516006160071600816009160101601116012160131601416015160161601716018160191602016021160221602316024160251602616027160281602916030160311603216033160341603516036160371603816039160401604116042160431604416045160461604716048160491605016051160521605316054160551605616057160581605916060160611606216063160641606516066160671606816069160701607116072160731607416075160761607716078160791608016081160821608316084160851608616087160881608916090160911609216093160941609516096160971609816099161001610116102161031610416105161061610716108161091611016111161121611316114161151611616117161181611916120161211612216123161241612516126161271612816129161301613116132161331613416135161361613716138161391614016141161421614316144161451614616147161481614916150161511615216153161541615516156161571615816159161601616116162161631616416165161661616716168161691617016171161721617316174161751617616177161781617916180161811618216183161841618516186161871618816189161901619116192161931619416195161961619716198161991620016201162021620316204162051620616207162081620916210162111621216213162141621516216162171621816219162201622116222162231622416225162261622716228162291623016231162321623316234162351623616237162381623916240162411624216243162441624516246162471624816249162501625116252162531625416255162561625716258162591626016261162621626316264162651626616267162681626916270162711627216273162741627516276162771627816279162801628116282162831628416285162861628716288162891629016291162921629316294162951629616297162981629916300163011630216303163041630516306163071630816309163101631116312163131631416315163161631716318163191632016321163221632316324163251632616327163281632916330163311633216333163341633516336163371633816339163401634116342163431634416345163461634716348163491635016351163521635316354163551635616357163581635916360163611636216363163641636516366163671636816369163701637116372163731637416375163761637716378163791638016381163821638316384163851638616387163881638916390163911639216393163941639516396163971639816399164001640116402164031640416405164061640716408164091641016411164121641316414164151641616417164181641916420164211642216423164241642516426164271642816429164301643116432164331643416435164361643716438164391644016441164421644316444164451644616447164481644916450164511645216453164541645516456164571645816459164601646116462164631646416465164661646716468164691647016471164721647316474164751647616477164781647916480164811648216483164841648516486164871648816489164901649116492164931649416495164961649716498164991650016501165021650316504165051650616507165081650916510165111651216513165141651516516165171651816519165201652116522165231652416525165261652716528165291653016531165321653316534165351653616537165381653916540165411654216543165441654516546165471654816549165501655116552165531655416555165561655716558165591656016561165621656316564165651656616567165681656916570165711657216573165741657516576165771657816579165801658116582165831658416585165861658716588165891659016591165921659316594165951659616597165981659916600166011660216603166041660516606166071660816609166101661116612166131661416615166161661716618166191662016621166221662316624166251662616627166281662916630166311663216633166341663516636166371663816639166401664116642166431664416645166461664716648166491665016651166521665316654166551665616657166581665916660166611666216663166641666516666166671666816669166701667116672166731667416675166761667716678166791668016681166821668316684166851668616687166881668916690166911669216693166941669516696166971669816699167001670116702167031670416705167061670716708167091671016711167121671316714167151671616717167181671916720167211672216723167241672516726167271672816729167301673116732167331673416735167361673716738167391674016741167421674316744167451674616747167481674916750167511675216753167541675516756167571675816759167601676116762167631676416765167661676716768167691677016771167721677316774167751677616777167781677916780167811678216783167841678516786167871678816789167901679116792167931679416795167961679716798167991680016801168021680316804168051680616807168081680916810168111681216813168141681516816168171681816819168201682116822168231682416825168261682716828168291683016831168321683316834168351683616837168381683916840168411684216843168441684516846168471684816849168501685116852168531685416855168561685716858168591686016861168621686316864168651686616867168681686916870168711687216873168741687516876168771687816879168801688116882168831688416885168861688716888168891689016891168921689316894168951689616897168981689916900169011690216903169041690516906169071690816909169101691116912169131691416915169161691716918169191692016921169221692316924169251692616927169281692916930169311693216933169341693516936169371693816939169401694116942169431694416945169461694716948169491695016951169521695316954169551695616957169581695916960169611696216963169641696516966169671696816969169701697116972169731697416975169761697716978169791698016981169821698316984169851698616987169881698916990169911699216993169941699516996169971699816999170001700117002170031700417005170061700717008170091701017011170121701317014170151701617017170181701917020170211702217023170241702517026170271702817029170301703117032170331703417035170361703717038170391704017041170421704317044170451704617047170481704917050170511705217053170541705517056170571705817059170601706117062170631706417065170661706717068170691707017071170721707317074170751707617077170781707917080170811708217083170841708517086170871708817089170901709117092170931709417095170961709717098170991710017101171021710317104171051710617107171081710917110171111711217113171141711517116171171711817119171201712117122171231712417125171261712717128171291713017131171321713317134171351713617137171381713917140171411714217143171441714517146171471714817149171501715117152171531715417155171561715717158171591716017161171621716317164171651716617167171681716917170171711717217173171741717517176171771717817179171801718117182171831718417185171861718717188171891719017191171921719317194171951719617197171981719917200172011720217203172041720517206172071720817209172101721117212172131721417215172161721717218172191722017221172221722317224172251722617227172281722917230172311723217233172341723517236172371723817239172401724117242172431724417245172461724717248172491725017251172521725317254172551725617257172581725917260172611726217263172641726517266172671726817269172701727117272172731727417275172761727717278172791728017281172821728317284172851728617287172881728917290172911729217293172941729517296172971729817299173001730117302173031730417305173061730717308173091731017311173121731317314173151731617317173181731917320173211732217323173241732517326173271732817329173301733117332173331733417335173361733717338173391734017341173421734317344173451734617347173481734917350173511735217353173541735517356173571735817359173601736117362173631736417365173661736717368173691737017371173721737317374173751737617377173781737917380173811738217383173841738517386173871738817389173901739117392173931739417395173961739717398173991740017401174021740317404174051740617407174081740917410174111741217413174141741517416174171741817419174201742117422174231742417425174261742717428174291743017431174321743317434174351743617437174381743917440174411744217443174441744517446174471744817449174501745117452174531745417455174561745717458174591746017461174621746317464174651746617467174681746917470174711747217473174741747517476174771747817479174801748117482174831748417485174861748717488174891749017491174921749317494174951749617497174981749917500175011750217503175041750517506175071750817509175101751117512175131751417515175161751717518175191752017521175221752317524175251752617527175281752917530175311753217533175341753517536175371753817539175401754117542175431754417545175461754717548175491755017551175521755317554175551755617557175581755917560175611756217563175641756517566175671756817569175701757117572175731757417575175761757717578175791758017581175821758317584175851758617587175881758917590175911759217593175941759517596175971759817599176001760117602176031760417605176061760717608176091761017611176121761317614176151761617617176181761917620176211762217623176241762517626176271762817629176301763117632176331763417635176361763717638176391764017641176421764317644176451764617647176481764917650176511765217653176541765517656176571765817659176601766117662176631766417665176661766717668176691767017671176721767317674176751767617677176781767917680176811768217683176841768517686176871768817689176901769117692176931769417695176961769717698176991770017701177021770317704177051770617707177081770917710177111771217713177141771517716177171771817719177201772117722177231772417725177261772717728177291773017731177321773317734177351773617737177381773917740177411774217743177441774517746177471774817749177501775117752177531775417755177561775717758177591776017761177621776317764177651776617767177681776917770177711777217773177741777517776177771777817779177801778117782177831778417785177861778717788177891779017791177921779317794177951779617797177981779917800178011780217803178041780517806178071780817809178101781117812178131781417815178161781717818178191782017821178221782317824178251782617827178281782917830178311783217833178341783517836178371783817839178401784117842178431784417845178461784717848178491785017851178521785317854178551785617857178581785917860178611786217863178641786517866178671786817869178701787117872178731787417875178761787717878178791788017881178821788317884178851788617887178881788917890178911789217893178941789517896178971789817899179001790117902179031790417905179061790717908179091791017911179121791317914179151791617917179181791917920179211792217923179241792517926179271792817929179301793117932179331793417935179361793717938179391794017941179421794317944179451794617947179481794917950179511795217953179541795517956179571795817959179601796117962179631796417965179661796717968179691797017971179721797317974179751797617977179781797917980179811798217983179841798517986179871798817989179901799117992179931799417995179961799717998179991800018001180021800318004180051800618007180081800918010180111801218013180141801518016180171801818019180201802118022180231802418025180261802718028180291803018031180321803318034180351803618037180381803918040180411804218043180441804518046180471804818049180501805118052180531805418055180561805718058180591806018061180621806318064180651806618067180681806918070180711807218073180741807518076180771807818079180801808118082180831808418085180861808718088180891809018091180921809318094180951809618097180981809918100181011810218103181041810518106181071810818109181101811118112181131811418115181161811718118181191812018121181221812318124181251812618127181281812918130181311813218133181341813518136181371813818139181401814118142181431814418145181461814718148181491815018151181521815318154181551815618157181581815918160181611816218163181641816518166181671816818169181701817118172181731817418175181761817718178181791818018181181821818318184181851818618187181881818918190181911819218193181941819518196181971819818199182001820118202182031820418205182061820718208182091821018211182121821318214182151821618217182181821918220182211822218223182241822518226182271822818229182301823118232182331823418235182361823718238182391824018241182421824318244182451824618247182481824918250182511825218253182541825518256182571825818259182601826118262182631826418265182661826718268182691827018271182721827318274182751827618277182781827918280182811828218283182841828518286182871828818289182901829118292182931829418295182961829718298182991830018301183021830318304183051830618307183081830918310183111831218313183141831518316183171831818319183201832118322183231832418325183261832718328183291833018331183321833318334183351833618337183381833918340183411834218343183441834518346183471834818349183501835118352183531835418355183561835718358183591836018361183621836318364183651836618367183681836918370183711837218373183741837518376183771837818379183801838118382183831838418385183861838718388183891839018391183921839318394183951839618397183981839918400184011840218403184041840518406184071840818409184101841118412184131841418415184161841718418184191842018421184221842318424184251842618427184281842918430184311843218433184341843518436184371843818439184401844118442184431844418445184461844718448184491845018451184521845318454184551845618457184581845918460184611846218463184641846518466184671846818469184701847118472184731847418475184761847718478184791848018481184821848318484184851848618487184881848918490184911849218493184941849518496184971849818499185001850118502185031850418505185061850718508185091851018511185121851318514185151851618517185181851918520185211852218523185241852518526185271852818529185301853118532185331853418535185361853718538185391854018541185421854318544185451854618547185481854918550185511855218553185541855518556185571855818559185601856118562185631856418565185661856718568185691857018571185721857318574185751857618577185781857918580185811858218583185841858518586185871858818589185901859118592185931859418595185961859718598185991860018601186021860318604186051860618607186081860918610186111861218613186141861518616186171861818619186201862118622186231862418625186261862718628186291863018631186321863318634186351863618637186381863918640186411864218643186441864518646186471864818649186501865118652186531865418655186561865718658186591866018661186621866318664186651866618667186681866918670186711867218673186741867518676186771867818679186801868118682186831868418685186861868718688186891869018691186921869318694186951869618697186981869918700187011870218703187041870518706187071870818709187101871118712187131871418715187161871718718187191872018721187221872318724187251872618727187281872918730187311873218733187341873518736187371873818739187401874118742187431874418745187461874718748187491875018751187521875318754187551875618757187581875918760187611876218763187641876518766187671876818769187701877118772187731877418775187761877718778187791878018781187821878318784187851878618787187881878918790187911879218793187941879518796187971879818799188001880118802188031880418805188061880718808188091881018811188121881318814188151881618817188181881918820188211882218823188241882518826188271882818829188301883118832188331883418835188361883718838188391884018841188421884318844188451884618847188481884918850188511885218853188541885518856188571885818859188601886118862188631886418865188661886718868188691887018871188721887318874188751887618877188781887918880188811888218883188841888518886188871888818889188901889118892188931889418895188961889718898188991890018901189021890318904189051890618907189081890918910189111891218913189141891518916189171891818919189201892118922189231892418925189261892718928189291893018931189321893318934189351893618937189381893918940189411894218943189441894518946189471894818949189501895118952189531895418955189561895718958189591896018961189621896318964189651896618967189681896918970189711897218973189741897518976189771897818979189801898118982189831898418985189861898718988189891899018991189921899318994189951899618997189981899919000190011900219003190041900519006190071900819009190101901119012190131901419015190161901719018190191902019021190221902319024190251902619027190281902919030190311903219033190341903519036190371903819039190401904119042190431904419045190461904719048190491905019051190521905319054190551905619057190581905919060190611906219063190641906519066190671906819069190701907119072190731907419075190761907719078190791908019081190821908319084190851908619087190881908919090190911909219093190941909519096190971909819099191001910119102191031910419105191061910719108191091911019111191121911319114191151911619117191181911919120191211912219123191241912519126191271912819129191301913119132191331913419135191361913719138191391914019141191421914319144191451914619147191481914919150191511915219153191541915519156191571915819159191601916119162191631916419165191661916719168191691917019171191721917319174191751917619177191781917919180191811918219183191841918519186191871918819189191901919119192191931919419195191961919719198191991920019201192021920319204192051920619207192081920919210192111921219213192141921519216192171921819219192201922119222192231922419225192261922719228192291923019231192321923319234192351923619237192381923919240192411924219243192441924519246192471924819249192501925119252192531925419255192561925719258192591926019261192621926319264192651926619267192681926919270192711927219273192741927519276192771927819279192801928119282192831928419285192861928719288192891929019291192921929319294192951929619297192981929919300193011930219303193041930519306193071930819309193101931119312193131931419315193161931719318193191932019321193221932319324193251932619327193281932919330193311933219333193341933519336193371933819339193401934119342193431934419345193461934719348193491935019351193521935319354193551935619357193581935919360193611936219363193641936519366193671936819369193701937119372193731937419375193761937719378193791938019381193821938319384193851938619387193881938919390193911939219393193941939519396193971939819399194001940119402194031940419405194061940719408194091941019411194121941319414194151941619417194181941919420194211942219423194241942519426194271942819429194301943119432194331943419435194361943719438194391944019441194421944319444194451944619447194481944919450194511945219453194541945519456194571945819459194601946119462194631946419465194661946719468194691947019471194721947319474194751947619477194781947919480194811948219483194841948519486194871948819489194901949119492194931949419495194961949719498194991950019501195021950319504195051950619507195081950919510195111951219513195141951519516195171951819519195201952119522195231952419525195261952719528195291953019531195321953319534195351953619537195381953919540195411954219543195441954519546195471954819549195501955119552195531955419555195561955719558195591956019561195621956319564195651956619567195681956919570195711957219573195741957519576195771957819579195801958119582195831958419585195861958719588195891959019591195921959319594195951959619597195981959919600196011960219603196041960519606196071960819609196101961119612196131961419615196161961719618196191962019621196221962319624196251962619627196281962919630196311963219633196341963519636196371963819639196401964119642196431964419645196461964719648196491965019651196521965319654196551965619657196581965919660196611966219663196641966519666196671966819669196701967119672196731967419675196761967719678196791968019681196821968319684196851968619687196881968919690196911969219693196941969519696196971969819699197001970119702197031970419705197061970719708197091971019711197121971319714197151971619717197181971919720197211972219723197241972519726197271972819729197301973119732197331973419735197361973719738197391974019741197421974319744197451974619747197481974919750197511975219753197541975519756197571975819759197601976119762197631976419765197661976719768197691977019771197721977319774197751977619777197781977919780197811978219783197841978519786197871978819789197901979119792197931979419795197961979719798197991980019801198021980319804198051980619807198081980919810198111981219813198141981519816198171981819819198201982119822198231982419825198261982719828198291983019831198321983319834198351983619837198381983919840198411984219843198441984519846198471984819849198501985119852198531985419855198561985719858198591986019861198621986319864198651986619867198681986919870198711987219873198741987519876198771987819879198801988119882198831988419885198861988719888198891989019891198921989319894198951989619897198981989919900199011990219903199041990519906199071990819909199101991119912199131991419915199161991719918199191992019921199221992319924199251992619927199281992919930199311993219933199341993519936199371993819939199401994119942199431994419945199461994719948199491995019951199521995319954199551995619957199581995919960199611996219963199641996519966199671996819969199701997119972199731997419975199761997719978199791998019981199821998319984199851998619987199881998919990199911999219993199941999519996199971999819999200002000120002200032000420005200062000720008200092001020011200122001320014200152001620017200182001920020200212002220023200242002520026200272002820029200302003120032200332003420035200362003720038200392004020041200422004320044200452004620047200482004920050200512005220053200542005520056200572005820059200602006120062200632006420065200662006720068200692007020071200722007320074200752007620077200782007920080200812008220083200842008520086200872008820089200902009120092200932009420095200962009720098200992010020101201022010320104201052010620107201082010920110201112011220113201142011520116201172011820119201202012120122201232012420125201262012720128201292013020131201322013320134201352013620137201382013920140201412014220143201442014520146201472014820149201502015120152201532015420155201562015720158201592016020161201622016320164201652016620167201682016920170201712017220173201742017520176201772017820179201802018120182201832018420185201862018720188201892019020191201922019320194201952019620197201982019920200202012020220203202042020520206202072020820209202102021120212202132021420215202162021720218202192022020221202222022320224202252022620227202282022920230202312023220233202342023520236202372023820239202402024120242202432024420245202462024720248202492025020251202522025320254202552025620257202582025920260202612026220263202642026520266202672026820269202702027120272202732027420275202762027720278202792028020281202822028320284202852028620287202882028920290202912029220293202942029520296202972029820299203002030120302203032030420305203062030720308203092031020311203122031320314203152031620317203182031920320203212032220323203242032520326203272032820329203302033120332203332033420335203362033720338203392034020341203422034320344203452034620347203482034920350203512035220353203542035520356203572035820359203602036120362203632036420365203662036720368203692037020371203722037320374203752037620377203782037920380203812038220383203842038520386203872038820389203902039120392203932039420395203962039720398203992040020401204022040320404204052040620407204082040920410204112041220413204142041520416204172041820419204202042120422204232042420425204262042720428204292043020431204322043320434204352043620437204382043920440204412044220443204442044520446204472044820449204502045120452204532045420455204562045720458204592046020461204622046320464204652046620467204682046920470204712047220473204742047520476204772047820479204802048120482204832048420485204862048720488204892049020491204922049320494204952049620497204982049920500205012050220503205042050520506205072050820509205102051120512205132051420515205162051720518205192052020521205222052320524205252052620527205282052920530205312053220533205342053520536205372053820539205402054120542205432054420545
  1. function getDefaultExportFromCjs (x) {
  2. return x && x.__esModule && Object.prototype.hasOwnProperty.call(x, 'default') ? x['default'] : x;
  3. }
  4. var urlToolkit = {exports: {}};
  5. (function (module, exports) {
  6. // see https://tools.ietf.org/html/rfc1808
  7. (function (root) {
  8. var URL_REGEX =
  9. /^(?=((?:[a-zA-Z0-9+\-.]+:)?))\1(?=((?:\/\/[^\/?#]*)?))\2(?=((?:(?:[^?#\/]*\/)*[^;?#\/]*)?))\3((?:;[^?#]*)?)(\?[^#]*)?(#[^]*)?$/;
  10. var FIRST_SEGMENT_REGEX = /^(?=([^\/?#]*))\1([^]*)$/;
  11. var SLASH_DOT_REGEX = /(?:\/|^)\.(?=\/)/g;
  12. var SLASH_DOT_DOT_REGEX = /(?:\/|^)\.\.\/(?!\.\.\/)[^\/]*(?=\/)/g;
  13. var URLToolkit = {
  14. // If opts.alwaysNormalize is true then the path will always be normalized even when it starts with / or //
  15. // E.g
  16. // With opts.alwaysNormalize = false (default, spec compliant)
  17. // http://a.com/b/cd + /e/f/../g => http://a.com/e/f/../g
  18. // With opts.alwaysNormalize = true (not spec compliant)
  19. // http://a.com/b/cd + /e/f/../g => http://a.com/e/g
  20. buildAbsoluteURL: function (baseURL, relativeURL, opts) {
  21. opts = opts || {};
  22. // remove any remaining space and CRLF
  23. baseURL = baseURL.trim();
  24. relativeURL = relativeURL.trim();
  25. if (!relativeURL) {
  26. // 2a) If the embedded URL is entirely empty, it inherits the
  27. // entire base URL (i.e., is set equal to the base URL)
  28. // and we are done.
  29. if (!opts.alwaysNormalize) {
  30. return baseURL;
  31. }
  32. var basePartsForNormalise = URLToolkit.parseURL(baseURL);
  33. if (!basePartsForNormalise) {
  34. throw new Error('Error trying to parse base URL.');
  35. }
  36. basePartsForNormalise.path = URLToolkit.normalizePath(
  37. basePartsForNormalise.path
  38. );
  39. return URLToolkit.buildURLFromParts(basePartsForNormalise);
  40. }
  41. var relativeParts = URLToolkit.parseURL(relativeURL);
  42. if (!relativeParts) {
  43. throw new Error('Error trying to parse relative URL.');
  44. }
  45. if (relativeParts.scheme) {
  46. // 2b) If the embedded URL starts with a scheme name, it is
  47. // interpreted as an absolute URL and we are done.
  48. if (!opts.alwaysNormalize) {
  49. return relativeURL;
  50. }
  51. relativeParts.path = URLToolkit.normalizePath(relativeParts.path);
  52. return URLToolkit.buildURLFromParts(relativeParts);
  53. }
  54. var baseParts = URLToolkit.parseURL(baseURL);
  55. if (!baseParts) {
  56. throw new Error('Error trying to parse base URL.');
  57. }
  58. if (!baseParts.netLoc && baseParts.path && baseParts.path[0] !== '/') {
  59. // If netLoc missing and path doesn't start with '/', assume everthing before the first '/' is the netLoc
  60. // This causes 'example.com/a' to be handled as '//example.com/a' instead of '/example.com/a'
  61. var pathParts = FIRST_SEGMENT_REGEX.exec(baseParts.path);
  62. baseParts.netLoc = pathParts[1];
  63. baseParts.path = pathParts[2];
  64. }
  65. if (baseParts.netLoc && !baseParts.path) {
  66. baseParts.path = '/';
  67. }
  68. var builtParts = {
  69. // 2c) Otherwise, the embedded URL inherits the scheme of
  70. // the base URL.
  71. scheme: baseParts.scheme,
  72. netLoc: relativeParts.netLoc,
  73. path: null,
  74. params: relativeParts.params,
  75. query: relativeParts.query,
  76. fragment: relativeParts.fragment,
  77. };
  78. if (!relativeParts.netLoc) {
  79. // 3) If the embedded URL's <net_loc> is non-empty, we skip to
  80. // Step 7. Otherwise, the embedded URL inherits the <net_loc>
  81. // (if any) of the base URL.
  82. builtParts.netLoc = baseParts.netLoc;
  83. // 4) If the embedded URL path is preceded by a slash "/", the
  84. // path is not relative and we skip to Step 7.
  85. if (relativeParts.path[0] !== '/') {
  86. if (!relativeParts.path) {
  87. // 5) If the embedded URL path is empty (and not preceded by a
  88. // slash), then the embedded URL inherits the base URL path
  89. builtParts.path = baseParts.path;
  90. // 5a) if the embedded URL's <params> is non-empty, we skip to
  91. // step 7; otherwise, it inherits the <params> of the base
  92. // URL (if any) and
  93. if (!relativeParts.params) {
  94. builtParts.params = baseParts.params;
  95. // 5b) if the embedded URL's <query> is non-empty, we skip to
  96. // step 7; otherwise, it inherits the <query> of the base
  97. // URL (if any) and we skip to step 7.
  98. if (!relativeParts.query) {
  99. builtParts.query = baseParts.query;
  100. }
  101. }
  102. } else {
  103. // 6) The last segment of the base URL's path (anything
  104. // following the rightmost slash "/", or the entire path if no
  105. // slash is present) is removed and the embedded URL's path is
  106. // appended in its place.
  107. var baseURLPath = baseParts.path;
  108. var newPath =
  109. baseURLPath.substring(0, baseURLPath.lastIndexOf('/') + 1) +
  110. relativeParts.path;
  111. builtParts.path = URLToolkit.normalizePath(newPath);
  112. }
  113. }
  114. }
  115. if (builtParts.path === null) {
  116. builtParts.path = opts.alwaysNormalize
  117. ? URLToolkit.normalizePath(relativeParts.path)
  118. : relativeParts.path;
  119. }
  120. return URLToolkit.buildURLFromParts(builtParts);
  121. },
  122. parseURL: function (url) {
  123. var parts = URL_REGEX.exec(url);
  124. if (!parts) {
  125. return null;
  126. }
  127. return {
  128. scheme: parts[1] || '',
  129. netLoc: parts[2] || '',
  130. path: parts[3] || '',
  131. params: parts[4] || '',
  132. query: parts[5] || '',
  133. fragment: parts[6] || '',
  134. };
  135. },
  136. normalizePath: function (path) {
  137. // The following operations are
  138. // then applied, in order, to the new path:
  139. // 6a) All occurrences of "./", where "." is a complete path
  140. // segment, are removed.
  141. // 6b) If the path ends with "." as a complete path segment,
  142. // that "." is removed.
  143. path = path.split('').reverse().join('').replace(SLASH_DOT_REGEX, '');
  144. // 6c) All occurrences of "<segment>/../", where <segment> is a
  145. // complete path segment not equal to "..", are removed.
  146. // Removal of these path segments is performed iteratively,
  147. // removing the leftmost matching pattern on each iteration,
  148. // until no matching pattern remains.
  149. // 6d) If the path ends with "<segment>/..", where <segment> is a
  150. // complete path segment not equal to "..", that
  151. // "<segment>/.." is removed.
  152. while (
  153. path.length !== (path = path.replace(SLASH_DOT_DOT_REGEX, '')).length
  154. ) {}
  155. return path.split('').reverse().join('');
  156. },
  157. buildURLFromParts: function (parts) {
  158. return (
  159. parts.scheme +
  160. parts.netLoc +
  161. parts.path +
  162. parts.params +
  163. parts.query +
  164. parts.fragment
  165. );
  166. },
  167. };
  168. module.exports = URLToolkit;
  169. })();
  170. } (urlToolkit));
  171. var urlToolkitExports = urlToolkit.exports;
  172. function ownKeys(e, r) {
  173. var t = Object.keys(e);
  174. if (Object.getOwnPropertySymbols) {
  175. var o = Object.getOwnPropertySymbols(e);
  176. r && (o = o.filter(function (r) {
  177. return Object.getOwnPropertyDescriptor(e, r).enumerable;
  178. })), t.push.apply(t, o);
  179. }
  180. return t;
  181. }
  182. function _objectSpread2(e) {
  183. for (var r = 1; r < arguments.length; r++) {
  184. var t = null != arguments[r] ? arguments[r] : {};
  185. r % 2 ? ownKeys(Object(t), !0).forEach(function (r) {
  186. _defineProperty(e, r, t[r]);
  187. }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(e, Object.getOwnPropertyDescriptors(t)) : ownKeys(Object(t)).forEach(function (r) {
  188. Object.defineProperty(e, r, Object.getOwnPropertyDescriptor(t, r));
  189. });
  190. }
  191. return e;
  192. }
  193. function _toPrimitive(t, r) {
  194. if ("object" != typeof t || !t) return t;
  195. var e = t[Symbol.toPrimitive];
  196. if (void 0 !== e) {
  197. var i = e.call(t, r || "default");
  198. if ("object" != typeof i) return i;
  199. throw new TypeError("@@toPrimitive must return a primitive value.");
  200. }
  201. return ("string" === r ? String : Number)(t);
  202. }
  203. function _toPropertyKey(t) {
  204. var i = _toPrimitive(t, "string");
  205. return "symbol" == typeof i ? i : String(i);
  206. }
  207. function _defineProperty(obj, key, value) {
  208. key = _toPropertyKey(key);
  209. if (key in obj) {
  210. Object.defineProperty(obj, key, {
  211. value: value,
  212. enumerable: true,
  213. configurable: true,
  214. writable: true
  215. });
  216. } else {
  217. obj[key] = value;
  218. }
  219. return obj;
  220. }
  221. function _extends() {
  222. _extends = Object.assign ? Object.assign.bind() : function (target) {
  223. for (var i = 1; i < arguments.length; i++) {
  224. var source = arguments[i];
  225. for (var key in source) {
  226. if (Object.prototype.hasOwnProperty.call(source, key)) {
  227. target[key] = source[key];
  228. }
  229. }
  230. }
  231. return target;
  232. };
  233. return _extends.apply(this, arguments);
  234. }
  235. // https://caniuse.com/mdn-javascript_builtins_number_isfinite
  236. const isFiniteNumber = Number.isFinite || function (value) {
  237. return typeof value === 'number' && isFinite(value);
  238. };
  239. // https://caniuse.com/mdn-javascript_builtins_number_issafeinteger
  240. const isSafeInteger = Number.isSafeInteger || function (value) {
  241. return typeof value === 'number' && Math.abs(value) <= MAX_SAFE_INTEGER;
  242. };
  243. const MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER || 9007199254740991;
  244. let Events = /*#__PURE__*/function (Events) {
  245. Events["MEDIA_ATTACHING"] = "hlsMediaAttaching";
  246. Events["MEDIA_ATTACHED"] = "hlsMediaAttached";
  247. Events["MEDIA_DETACHING"] = "hlsMediaDetaching";
  248. Events["MEDIA_DETACHED"] = "hlsMediaDetached";
  249. Events["BUFFER_RESET"] = "hlsBufferReset";
  250. Events["BUFFER_CODECS"] = "hlsBufferCodecs";
  251. Events["BUFFER_CREATED"] = "hlsBufferCreated";
  252. Events["BUFFER_APPENDING"] = "hlsBufferAppending";
  253. Events["BUFFER_APPENDED"] = "hlsBufferAppended";
  254. Events["BUFFER_EOS"] = "hlsBufferEos";
  255. Events["BUFFER_FLUSHING"] = "hlsBufferFlushing";
  256. Events["BUFFER_FLUSHED"] = "hlsBufferFlushed";
  257. Events["MANIFEST_LOADING"] = "hlsManifestLoading";
  258. Events["MANIFEST_LOADED"] = "hlsManifestLoaded";
  259. Events["MANIFEST_PARSED"] = "hlsManifestParsed";
  260. Events["LEVEL_SWITCHING"] = "hlsLevelSwitching";
  261. Events["LEVEL_SWITCHED"] = "hlsLevelSwitched";
  262. Events["LEVEL_LOADING"] = "hlsLevelLoading";
  263. Events["LEVEL_LOADED"] = "hlsLevelLoaded";
  264. Events["LEVEL_UPDATED"] = "hlsLevelUpdated";
  265. Events["LEVEL_PTS_UPDATED"] = "hlsLevelPtsUpdated";
  266. Events["LEVELS_UPDATED"] = "hlsLevelsUpdated";
  267. Events["AUDIO_TRACKS_UPDATED"] = "hlsAudioTracksUpdated";
  268. Events["AUDIO_TRACK_SWITCHING"] = "hlsAudioTrackSwitching";
  269. Events["AUDIO_TRACK_SWITCHED"] = "hlsAudioTrackSwitched";
  270. Events["AUDIO_TRACK_LOADING"] = "hlsAudioTrackLoading";
  271. Events["AUDIO_TRACK_LOADED"] = "hlsAudioTrackLoaded";
  272. Events["SUBTITLE_TRACKS_UPDATED"] = "hlsSubtitleTracksUpdated";
  273. Events["SUBTITLE_TRACKS_CLEARED"] = "hlsSubtitleTracksCleared";
  274. Events["SUBTITLE_TRACK_SWITCH"] = "hlsSubtitleTrackSwitch";
  275. Events["SUBTITLE_TRACK_LOADING"] = "hlsSubtitleTrackLoading";
  276. Events["SUBTITLE_TRACK_LOADED"] = "hlsSubtitleTrackLoaded";
  277. Events["SUBTITLE_FRAG_PROCESSED"] = "hlsSubtitleFragProcessed";
  278. Events["CUES_PARSED"] = "hlsCuesParsed";
  279. Events["NON_NATIVE_TEXT_TRACKS_FOUND"] = "hlsNonNativeTextTracksFound";
  280. Events["INIT_PTS_FOUND"] = "hlsInitPtsFound";
  281. Events["FRAG_LOADING"] = "hlsFragLoading";
  282. Events["FRAG_LOAD_EMERGENCY_ABORTED"] = "hlsFragLoadEmergencyAborted";
  283. Events["FRAG_LOADED"] = "hlsFragLoaded";
  284. Events["FRAG_DECRYPTED"] = "hlsFragDecrypted";
  285. Events["FRAG_PARSING_INIT_SEGMENT"] = "hlsFragParsingInitSegment";
  286. Events["FRAG_PARSING_USERDATA"] = "hlsFragParsingUserdata";
  287. Events["FRAG_PARSING_METADATA"] = "hlsFragParsingMetadata";
  288. Events["FRAG_PARSED"] = "hlsFragParsed";
  289. Events["FRAG_BUFFERED"] = "hlsFragBuffered";
  290. Events["FRAG_CHANGED"] = "hlsFragChanged";
  291. Events["FPS_DROP"] = "hlsFpsDrop";
  292. Events["FPS_DROP_LEVEL_CAPPING"] = "hlsFpsDropLevelCapping";
  293. Events["MAX_AUTO_LEVEL_UPDATED"] = "hlsMaxAutoLevelUpdated";
  294. Events["ERROR"] = "hlsError";
  295. Events["DESTROYING"] = "hlsDestroying";
  296. Events["KEY_LOADING"] = "hlsKeyLoading";
  297. Events["KEY_LOADED"] = "hlsKeyLoaded";
  298. Events["LIVE_BACK_BUFFER_REACHED"] = "hlsLiveBackBufferReached";
  299. Events["BACK_BUFFER_REACHED"] = "hlsBackBufferReached";
  300. Events["STEERING_MANIFEST_LOADED"] = "hlsSteeringManifestLoaded";
  301. return Events;
  302. }({});
  303. /**
  304. * Defines each Event type and payload by Event name. Used in {@link hls.js#HlsEventEmitter} to strongly type the event listener API.
  305. */
  306. let ErrorTypes = /*#__PURE__*/function (ErrorTypes) {
  307. ErrorTypes["NETWORK_ERROR"] = "networkError";
  308. ErrorTypes["MEDIA_ERROR"] = "mediaError";
  309. ErrorTypes["KEY_SYSTEM_ERROR"] = "keySystemError";
  310. ErrorTypes["MUX_ERROR"] = "muxError";
  311. ErrorTypes["OTHER_ERROR"] = "otherError";
  312. return ErrorTypes;
  313. }({});
  314. let ErrorDetails = /*#__PURE__*/function (ErrorDetails) {
  315. ErrorDetails["KEY_SYSTEM_NO_KEYS"] = "keySystemNoKeys";
  316. ErrorDetails["KEY_SYSTEM_NO_ACCESS"] = "keySystemNoAccess";
  317. ErrorDetails["KEY_SYSTEM_NO_SESSION"] = "keySystemNoSession";
  318. ErrorDetails["KEY_SYSTEM_NO_CONFIGURED_LICENSE"] = "keySystemNoConfiguredLicense";
  319. ErrorDetails["KEY_SYSTEM_LICENSE_REQUEST_FAILED"] = "keySystemLicenseRequestFailed";
  320. ErrorDetails["KEY_SYSTEM_SERVER_CERTIFICATE_REQUEST_FAILED"] = "keySystemServerCertificateRequestFailed";
  321. ErrorDetails["KEY_SYSTEM_SERVER_CERTIFICATE_UPDATE_FAILED"] = "keySystemServerCertificateUpdateFailed";
  322. ErrorDetails["KEY_SYSTEM_SESSION_UPDATE_FAILED"] = "keySystemSessionUpdateFailed";
  323. ErrorDetails["KEY_SYSTEM_STATUS_OUTPUT_RESTRICTED"] = "keySystemStatusOutputRestricted";
  324. ErrorDetails["KEY_SYSTEM_STATUS_INTERNAL_ERROR"] = "keySystemStatusInternalError";
  325. ErrorDetails["MANIFEST_LOAD_ERROR"] = "manifestLoadError";
  326. ErrorDetails["MANIFEST_LOAD_TIMEOUT"] = "manifestLoadTimeOut";
  327. ErrorDetails["MANIFEST_PARSING_ERROR"] = "manifestParsingError";
  328. ErrorDetails["MANIFEST_INCOMPATIBLE_CODECS_ERROR"] = "manifestIncompatibleCodecsError";
  329. ErrorDetails["LEVEL_EMPTY_ERROR"] = "levelEmptyError";
  330. ErrorDetails["LEVEL_LOAD_ERROR"] = "levelLoadError";
  331. ErrorDetails["LEVEL_LOAD_TIMEOUT"] = "levelLoadTimeOut";
  332. ErrorDetails["LEVEL_PARSING_ERROR"] = "levelParsingError";
  333. ErrorDetails["LEVEL_SWITCH_ERROR"] = "levelSwitchError";
  334. ErrorDetails["AUDIO_TRACK_LOAD_ERROR"] = "audioTrackLoadError";
  335. ErrorDetails["AUDIO_TRACK_LOAD_TIMEOUT"] = "audioTrackLoadTimeOut";
  336. ErrorDetails["SUBTITLE_LOAD_ERROR"] = "subtitleTrackLoadError";
  337. ErrorDetails["SUBTITLE_TRACK_LOAD_TIMEOUT"] = "subtitleTrackLoadTimeOut";
  338. ErrorDetails["FRAG_LOAD_ERROR"] = "fragLoadError";
  339. ErrorDetails["FRAG_LOAD_TIMEOUT"] = "fragLoadTimeOut";
  340. ErrorDetails["FRAG_DECRYPT_ERROR"] = "fragDecryptError";
  341. ErrorDetails["FRAG_PARSING_ERROR"] = "fragParsingError";
  342. ErrorDetails["FRAG_GAP"] = "fragGap";
  343. ErrorDetails["REMUX_ALLOC_ERROR"] = "remuxAllocError";
  344. ErrorDetails["KEY_LOAD_ERROR"] = "keyLoadError";
  345. ErrorDetails["KEY_LOAD_TIMEOUT"] = "keyLoadTimeOut";
  346. ErrorDetails["BUFFER_ADD_CODEC_ERROR"] = "bufferAddCodecError";
  347. ErrorDetails["BUFFER_INCOMPATIBLE_CODECS_ERROR"] = "bufferIncompatibleCodecsError";
  348. ErrorDetails["BUFFER_APPEND_ERROR"] = "bufferAppendError";
  349. ErrorDetails["BUFFER_APPENDING_ERROR"] = "bufferAppendingError";
  350. ErrorDetails["BUFFER_STALLED_ERROR"] = "bufferStalledError";
  351. ErrorDetails["BUFFER_FULL_ERROR"] = "bufferFullError";
  352. ErrorDetails["BUFFER_SEEK_OVER_HOLE"] = "bufferSeekOverHole";
  353. ErrorDetails["BUFFER_NUDGE_ON_STALL"] = "bufferNudgeOnStall";
  354. ErrorDetails["INTERNAL_EXCEPTION"] = "internalException";
  355. ErrorDetails["INTERNAL_ABORTED"] = "aborted";
  356. ErrorDetails["UNKNOWN"] = "unknown";
  357. return ErrorDetails;
  358. }({});
  359. const noop = function noop() {};
  360. const fakeLogger = {
  361. trace: noop,
  362. debug: noop,
  363. log: noop,
  364. warn: noop,
  365. info: noop,
  366. error: noop
  367. };
  368. let exportedLogger = fakeLogger;
  369. // let lastCallTime;
  370. // function formatMsgWithTimeInfo(type, msg) {
  371. // const now = Date.now();
  372. // const diff = lastCallTime ? '+' + (now - lastCallTime) : '0';
  373. // lastCallTime = now;
  374. // msg = (new Date(now)).toISOString() + ' | [' + type + '] > ' + msg + ' ( ' + diff + ' ms )';
  375. // return msg;
  376. // }
  377. function consolePrintFn(type) {
  378. const func = self.console[type];
  379. if (func) {
  380. return func.bind(self.console, `[${type}] >`);
  381. }
  382. return noop;
  383. }
  384. function exportLoggerFunctions(debugConfig, ...functions) {
  385. functions.forEach(function (type) {
  386. exportedLogger[type] = debugConfig[type] ? debugConfig[type].bind(debugConfig) : consolePrintFn(type);
  387. });
  388. }
  389. function enableLogs(debugConfig, id) {
  390. // check that console is available
  391. if (typeof console === 'object' && debugConfig === true || typeof debugConfig === 'object') {
  392. exportLoggerFunctions(debugConfig,
  393. // Remove out from list here to hard-disable a log-level
  394. // 'trace',
  395. 'debug', 'log', 'info', 'warn', 'error');
  396. // Some browsers don't allow to use bind on console object anyway
  397. // fallback to default if needed
  398. try {
  399. exportedLogger.log(`Debug logs enabled for "${id}" in hls.js version ${"1.5.11"}`);
  400. } catch (e) {
  401. exportedLogger = fakeLogger;
  402. }
  403. } else {
  404. exportedLogger = fakeLogger;
  405. }
  406. }
  407. const logger = exportedLogger;
  408. const DECIMAL_RESOLUTION_REGEX = /^(\d+)x(\d+)$/;
  409. const ATTR_LIST_REGEX = /(.+?)=(".*?"|.*?)(?:,|$)/g;
  410. // adapted from https://github.com/kanongil/node-m3u8parse/blob/master/attrlist.js
  411. class AttrList {
  412. constructor(attrs) {
  413. if (typeof attrs === 'string') {
  414. attrs = AttrList.parseAttrList(attrs);
  415. }
  416. _extends(this, attrs);
  417. }
  418. get clientAttrs() {
  419. return Object.keys(this).filter(attr => attr.substring(0, 2) === 'X-');
  420. }
  421. decimalInteger(attrName) {
  422. const intValue = parseInt(this[attrName], 10);
  423. if (intValue > Number.MAX_SAFE_INTEGER) {
  424. return Infinity;
  425. }
  426. return intValue;
  427. }
  428. hexadecimalInteger(attrName) {
  429. if (this[attrName]) {
  430. let stringValue = (this[attrName] || '0x').slice(2);
  431. stringValue = (stringValue.length & 1 ? '0' : '') + stringValue;
  432. const value = new Uint8Array(stringValue.length / 2);
  433. for (let i = 0; i < stringValue.length / 2; i++) {
  434. value[i] = parseInt(stringValue.slice(i * 2, i * 2 + 2), 16);
  435. }
  436. return value;
  437. } else {
  438. return null;
  439. }
  440. }
  441. hexadecimalIntegerAsNumber(attrName) {
  442. const intValue = parseInt(this[attrName], 16);
  443. if (intValue > Number.MAX_SAFE_INTEGER) {
  444. return Infinity;
  445. }
  446. return intValue;
  447. }
  448. decimalFloatingPoint(attrName) {
  449. return parseFloat(this[attrName]);
  450. }
  451. optionalFloat(attrName, defaultValue) {
  452. const value = this[attrName];
  453. return value ? parseFloat(value) : defaultValue;
  454. }
  455. enumeratedString(attrName) {
  456. return this[attrName];
  457. }
  458. bool(attrName) {
  459. return this[attrName] === 'YES';
  460. }
  461. decimalResolution(attrName) {
  462. const res = DECIMAL_RESOLUTION_REGEX.exec(this[attrName]);
  463. if (res === null) {
  464. return undefined;
  465. }
  466. return {
  467. width: parseInt(res[1], 10),
  468. height: parseInt(res[2], 10)
  469. };
  470. }
  471. static parseAttrList(input) {
  472. let match;
  473. const attrs = {};
  474. const quote = '"';
  475. ATTR_LIST_REGEX.lastIndex = 0;
  476. while ((match = ATTR_LIST_REGEX.exec(input)) !== null) {
  477. let value = match[2];
  478. if (value.indexOf(quote) === 0 && value.lastIndexOf(quote) === value.length - 1) {
  479. value = value.slice(1, -1);
  480. }
  481. const name = match[1].trim();
  482. attrs[name] = value;
  483. }
  484. return attrs;
  485. }
  486. }
  487. // Avoid exporting const enum so that these values can be inlined
  488. function isDateRangeCueAttribute(attrName) {
  489. return attrName !== "ID" && attrName !== "CLASS" && attrName !== "START-DATE" && attrName !== "DURATION" && attrName !== "END-DATE" && attrName !== "END-ON-NEXT";
  490. }
  491. function isSCTE35Attribute(attrName) {
  492. return attrName === "SCTE35-OUT" || attrName === "SCTE35-IN";
  493. }
  494. class DateRange {
  495. constructor(dateRangeAttr, dateRangeWithSameId) {
  496. this.attr = void 0;
  497. this._startDate = void 0;
  498. this._endDate = void 0;
  499. this._badValueForSameId = void 0;
  500. if (dateRangeWithSameId) {
  501. const previousAttr = dateRangeWithSameId.attr;
  502. for (const key in previousAttr) {
  503. if (Object.prototype.hasOwnProperty.call(dateRangeAttr, key) && dateRangeAttr[key] !== previousAttr[key]) {
  504. logger.warn(`DATERANGE tag attribute: "${key}" does not match for tags with ID: "${dateRangeAttr.ID}"`);
  505. this._badValueForSameId = key;
  506. break;
  507. }
  508. }
  509. // Merge DateRange tags with the same ID
  510. dateRangeAttr = _extends(new AttrList({}), previousAttr, dateRangeAttr);
  511. }
  512. this.attr = dateRangeAttr;
  513. this._startDate = new Date(dateRangeAttr["START-DATE"]);
  514. if ("END-DATE" in this.attr) {
  515. const endDate = new Date(this.attr["END-DATE"]);
  516. if (isFiniteNumber(endDate.getTime())) {
  517. this._endDate = endDate;
  518. }
  519. }
  520. }
  521. get id() {
  522. return this.attr.ID;
  523. }
  524. get class() {
  525. return this.attr.CLASS;
  526. }
  527. get startDate() {
  528. return this._startDate;
  529. }
  530. get endDate() {
  531. if (this._endDate) {
  532. return this._endDate;
  533. }
  534. const duration = this.duration;
  535. if (duration !== null) {
  536. return new Date(this._startDate.getTime() + duration * 1000);
  537. }
  538. return null;
  539. }
  540. get duration() {
  541. if ("DURATION" in this.attr) {
  542. const duration = this.attr.decimalFloatingPoint("DURATION");
  543. if (isFiniteNumber(duration)) {
  544. return duration;
  545. }
  546. } else if (this._endDate) {
  547. return (this._endDate.getTime() - this._startDate.getTime()) / 1000;
  548. }
  549. return null;
  550. }
  551. get plannedDuration() {
  552. if ("PLANNED-DURATION" in this.attr) {
  553. return this.attr.decimalFloatingPoint("PLANNED-DURATION");
  554. }
  555. return null;
  556. }
  557. get endOnNext() {
  558. return this.attr.bool("END-ON-NEXT");
  559. }
  560. get isValid() {
  561. return !!this.id && !this._badValueForSameId && isFiniteNumber(this.startDate.getTime()) && (this.duration === null || this.duration >= 0) && (!this.endOnNext || !!this.class);
  562. }
  563. }
  564. class LoadStats {
  565. constructor() {
  566. this.aborted = false;
  567. this.loaded = 0;
  568. this.retry = 0;
  569. this.total = 0;
  570. this.chunkCount = 0;
  571. this.bwEstimate = 0;
  572. this.loading = {
  573. start: 0,
  574. first: 0,
  575. end: 0
  576. };
  577. this.parsing = {
  578. start: 0,
  579. end: 0
  580. };
  581. this.buffering = {
  582. start: 0,
  583. first: 0,
  584. end: 0
  585. };
  586. }
  587. }
  588. var ElementaryStreamTypes = {
  589. AUDIO: "audio",
  590. VIDEO: "video",
  591. AUDIOVIDEO: "audiovideo"
  592. };
  593. class BaseSegment {
  594. constructor(baseurl) {
  595. this._byteRange = null;
  596. this._url = null;
  597. // baseurl is the URL to the playlist
  598. this.baseurl = void 0;
  599. // relurl is the portion of the URL that comes from inside the playlist.
  600. this.relurl = void 0;
  601. // Holds the types of data this fragment supports
  602. this.elementaryStreams = {
  603. [ElementaryStreamTypes.AUDIO]: null,
  604. [ElementaryStreamTypes.VIDEO]: null,
  605. [ElementaryStreamTypes.AUDIOVIDEO]: null
  606. };
  607. this.baseurl = baseurl;
  608. }
  609. // setByteRange converts a EXT-X-BYTERANGE attribute into a two element array
  610. setByteRange(value, previous) {
  611. const params = value.split('@', 2);
  612. let start;
  613. if (params.length === 1) {
  614. start = (previous == null ? void 0 : previous.byteRangeEndOffset) || 0;
  615. } else {
  616. start = parseInt(params[1]);
  617. }
  618. this._byteRange = [start, parseInt(params[0]) + start];
  619. }
  620. get byteRange() {
  621. if (!this._byteRange) {
  622. return [];
  623. }
  624. return this._byteRange;
  625. }
  626. get byteRangeStartOffset() {
  627. return this.byteRange[0];
  628. }
  629. get byteRangeEndOffset() {
  630. return this.byteRange[1];
  631. }
  632. get url() {
  633. if (!this._url && this.baseurl && this.relurl) {
  634. this._url = urlToolkitExports.buildAbsoluteURL(this.baseurl, this.relurl, {
  635. alwaysNormalize: true
  636. });
  637. }
  638. return this._url || '';
  639. }
  640. set url(value) {
  641. this._url = value;
  642. }
  643. }
  644. /**
  645. * Object representing parsed data from an HLS Segment. Found in {@link hls.js#LevelDetails.fragments}.
  646. */
  647. class Fragment extends BaseSegment {
  648. constructor(type, baseurl) {
  649. super(baseurl);
  650. this._decryptdata = null;
  651. this.rawProgramDateTime = null;
  652. this.programDateTime = null;
  653. this.tagList = [];
  654. // EXTINF has to be present for a m3u8 to be considered valid
  655. this.duration = 0;
  656. // sn notates the sequence number for a segment, and if set to a string can be 'initSegment'
  657. this.sn = 0;
  658. // levelkeys are the EXT-X-KEY tags that apply to this segment for decryption
  659. // core difference from the private field _decryptdata is the lack of the initialized IV
  660. // _decryptdata will set the IV for this segment based on the segment number in the fragment
  661. this.levelkeys = void 0;
  662. // A string representing the fragment type
  663. this.type = void 0;
  664. // A reference to the loader. Set while the fragment is loading, and removed afterwards. Used to abort fragment loading
  665. this.loader = null;
  666. // A reference to the key loader. Set while the key is loading, and removed afterwards. Used to abort key loading
  667. this.keyLoader = null;
  668. // The level/track index to which the fragment belongs
  669. this.level = -1;
  670. // The continuity counter of the fragment
  671. this.cc = 0;
  672. // The starting Presentation Time Stamp (PTS) of the fragment. Set after transmux complete.
  673. this.startPTS = void 0;
  674. // The ending Presentation Time Stamp (PTS) of the fragment. Set after transmux complete.
  675. this.endPTS = void 0;
  676. // The starting Decode Time Stamp (DTS) of the fragment. Set after transmux complete.
  677. this.startDTS = void 0;
  678. // The ending Decode Time Stamp (DTS) of the fragment. Set after transmux complete.
  679. this.endDTS = void 0;
  680. // The start time of the fragment, as listed in the manifest. Updated after transmux complete.
  681. this.start = 0;
  682. // Set by `updateFragPTSDTS` in level-helper
  683. this.deltaPTS = void 0;
  684. // The maximum starting Presentation Time Stamp (audio/video PTS) of the fragment. Set after transmux complete.
  685. this.maxStartPTS = void 0;
  686. // The minimum ending Presentation Time Stamp (audio/video PTS) of the fragment. Set after transmux complete.
  687. this.minEndPTS = void 0;
  688. // Load/parse timing information
  689. this.stats = new LoadStats();
  690. // Init Segment bytes (unset for media segments)
  691. this.data = void 0;
  692. // A flag indicating whether the segment was downloaded in order to test bitrate, and was not buffered
  693. this.bitrateTest = false;
  694. // #EXTINF segment title
  695. this.title = null;
  696. // The Media Initialization Section for this segment
  697. this.initSegment = null;
  698. // Fragment is the last fragment in the media playlist
  699. this.endList = void 0;
  700. // Fragment is marked by an EXT-X-GAP tag indicating that it does not contain media data and should not be loaded
  701. this.gap = void 0;
  702. // Deprecated
  703. this.urlId = 0;
  704. this.type = type;
  705. }
  706. get decryptdata() {
  707. const {
  708. levelkeys
  709. } = this;
  710. if (!levelkeys && !this._decryptdata) {
  711. return null;
  712. }
  713. if (!this._decryptdata && this.levelkeys && !this.levelkeys.NONE) {
  714. const key = this.levelkeys.identity;
  715. if (key) {
  716. this._decryptdata = key.getDecryptData(this.sn);
  717. } else {
  718. const keyFormats = Object.keys(this.levelkeys);
  719. if (keyFormats.length === 1) {
  720. return this._decryptdata = this.levelkeys[keyFormats[0]].getDecryptData(this.sn);
  721. }
  722. }
  723. }
  724. return this._decryptdata;
  725. }
  726. get end() {
  727. return this.start + this.duration;
  728. }
  729. get endProgramDateTime() {
  730. if (this.programDateTime === null) {
  731. return null;
  732. }
  733. if (!isFiniteNumber(this.programDateTime)) {
  734. return null;
  735. }
  736. const duration = !isFiniteNumber(this.duration) ? 0 : this.duration;
  737. return this.programDateTime + duration * 1000;
  738. }
  739. get encrypted() {
  740. var _this$_decryptdata;
  741. // At the m3u8-parser level we need to add support for manifest signalled keyformats
  742. // when we want the fragment to start reporting that it is encrypted.
  743. // Currently, keyFormat will only be set for identity keys
  744. if ((_this$_decryptdata = this._decryptdata) != null && _this$_decryptdata.encrypted) {
  745. return true;
  746. } else if (this.levelkeys) {
  747. const keyFormats = Object.keys(this.levelkeys);
  748. const len = keyFormats.length;
  749. if (len > 1 || len === 1 && this.levelkeys[keyFormats[0]].encrypted) {
  750. return true;
  751. }
  752. }
  753. return false;
  754. }
  755. setKeyFormat(keyFormat) {
  756. if (this.levelkeys) {
  757. const key = this.levelkeys[keyFormat];
  758. if (key && !this._decryptdata) {
  759. this._decryptdata = key.getDecryptData(this.sn);
  760. }
  761. }
  762. }
  763. abortRequests() {
  764. var _this$loader, _this$keyLoader;
  765. (_this$loader = this.loader) == null ? void 0 : _this$loader.abort();
  766. (_this$keyLoader = this.keyLoader) == null ? void 0 : _this$keyLoader.abort();
  767. }
  768. setElementaryStreamInfo(type, startPTS, endPTS, startDTS, endDTS, partial = false) {
  769. const {
  770. elementaryStreams
  771. } = this;
  772. const info = elementaryStreams[type];
  773. if (!info) {
  774. elementaryStreams[type] = {
  775. startPTS,
  776. endPTS,
  777. startDTS,
  778. endDTS,
  779. partial
  780. };
  781. return;
  782. }
  783. info.startPTS = Math.min(info.startPTS, startPTS);
  784. info.endPTS = Math.max(info.endPTS, endPTS);
  785. info.startDTS = Math.min(info.startDTS, startDTS);
  786. info.endDTS = Math.max(info.endDTS, endDTS);
  787. }
  788. clearElementaryStreamInfo() {
  789. const {
  790. elementaryStreams
  791. } = this;
  792. elementaryStreams[ElementaryStreamTypes.AUDIO] = null;
  793. elementaryStreams[ElementaryStreamTypes.VIDEO] = null;
  794. elementaryStreams[ElementaryStreamTypes.AUDIOVIDEO] = null;
  795. }
  796. }
  797. /**
  798. * Object representing parsed data from an HLS Partial Segment. Found in {@link hls.js#LevelDetails.partList}.
  799. */
  800. class Part extends BaseSegment {
  801. constructor(partAttrs, frag, baseurl, index, previous) {
  802. super(baseurl);
  803. this.fragOffset = 0;
  804. this.duration = 0;
  805. this.gap = false;
  806. this.independent = false;
  807. this.relurl = void 0;
  808. this.fragment = void 0;
  809. this.index = void 0;
  810. this.stats = new LoadStats();
  811. this.duration = partAttrs.decimalFloatingPoint('DURATION');
  812. this.gap = partAttrs.bool('GAP');
  813. this.independent = partAttrs.bool('INDEPENDENT');
  814. this.relurl = partAttrs.enumeratedString('URI');
  815. this.fragment = frag;
  816. this.index = index;
  817. const byteRange = partAttrs.enumeratedString('BYTERANGE');
  818. if (byteRange) {
  819. this.setByteRange(byteRange, previous);
  820. }
  821. if (previous) {
  822. this.fragOffset = previous.fragOffset + previous.duration;
  823. }
  824. }
  825. get start() {
  826. return this.fragment.start + this.fragOffset;
  827. }
  828. get end() {
  829. return this.start + this.duration;
  830. }
  831. get loaded() {
  832. const {
  833. elementaryStreams
  834. } = this;
  835. return !!(elementaryStreams.audio || elementaryStreams.video || elementaryStreams.audiovideo);
  836. }
  837. }
  838. const DEFAULT_TARGET_DURATION = 10;
  839. /**
  840. * Object representing parsed data from an HLS Media Playlist. Found in {@link hls.js#Level.details}.
  841. */
  842. class LevelDetails {
  843. constructor(baseUrl) {
  844. this.PTSKnown = false;
  845. this.alignedSliding = false;
  846. this.averagetargetduration = void 0;
  847. this.endCC = 0;
  848. this.endSN = 0;
  849. this.fragments = void 0;
  850. this.fragmentHint = void 0;
  851. this.partList = null;
  852. this.dateRanges = void 0;
  853. this.live = true;
  854. this.ageHeader = 0;
  855. this.advancedDateTime = void 0;
  856. this.updated = true;
  857. this.advanced = true;
  858. this.availabilityDelay = void 0;
  859. // Manifest reload synchronization
  860. this.misses = 0;
  861. this.startCC = 0;
  862. this.startSN = 0;
  863. this.startTimeOffset = null;
  864. this.targetduration = 0;
  865. this.totalduration = 0;
  866. this.type = null;
  867. this.url = void 0;
  868. this.m3u8 = '';
  869. this.version = null;
  870. this.canBlockReload = false;
  871. this.canSkipUntil = 0;
  872. this.canSkipDateRanges = false;
  873. this.skippedSegments = 0;
  874. this.recentlyRemovedDateranges = void 0;
  875. this.partHoldBack = 0;
  876. this.holdBack = 0;
  877. this.partTarget = 0;
  878. this.preloadHint = void 0;
  879. this.renditionReports = void 0;
  880. this.tuneInGoal = 0;
  881. this.deltaUpdateFailed = void 0;
  882. this.driftStartTime = 0;
  883. this.driftEndTime = 0;
  884. this.driftStart = 0;
  885. this.driftEnd = 0;
  886. this.encryptedFragments = void 0;
  887. this.playlistParsingError = null;
  888. this.variableList = null;
  889. this.hasVariableRefs = false;
  890. this.fragments = [];
  891. this.encryptedFragments = [];
  892. this.dateRanges = {};
  893. this.url = baseUrl;
  894. }
  895. reloaded(previous) {
  896. if (!previous) {
  897. this.advanced = true;
  898. this.updated = true;
  899. return;
  900. }
  901. const partSnDiff = this.lastPartSn - previous.lastPartSn;
  902. const partIndexDiff = this.lastPartIndex - previous.lastPartIndex;
  903. this.updated = this.endSN !== previous.endSN || !!partIndexDiff || !!partSnDiff || !this.live;
  904. this.advanced = this.endSN > previous.endSN || partSnDiff > 0 || partSnDiff === 0 && partIndexDiff > 0;
  905. if (this.updated || this.advanced) {
  906. this.misses = Math.floor(previous.misses * 0.6);
  907. } else {
  908. this.misses = previous.misses + 1;
  909. }
  910. this.availabilityDelay = previous.availabilityDelay;
  911. }
  912. get hasProgramDateTime() {
  913. if (this.fragments.length) {
  914. return isFiniteNumber(this.fragments[this.fragments.length - 1].programDateTime);
  915. }
  916. return false;
  917. }
  918. get levelTargetDuration() {
  919. return this.averagetargetduration || this.targetduration || DEFAULT_TARGET_DURATION;
  920. }
  921. get drift() {
  922. const runTime = this.driftEndTime - this.driftStartTime;
  923. if (runTime > 0) {
  924. const runDuration = this.driftEnd - this.driftStart;
  925. return runDuration * 1000 / runTime;
  926. }
  927. return 1;
  928. }
  929. get edge() {
  930. return this.partEnd || this.fragmentEnd;
  931. }
  932. get partEnd() {
  933. var _this$partList;
  934. if ((_this$partList = this.partList) != null && _this$partList.length) {
  935. return this.partList[this.partList.length - 1].end;
  936. }
  937. return this.fragmentEnd;
  938. }
  939. get fragmentEnd() {
  940. var _this$fragments;
  941. if ((_this$fragments = this.fragments) != null && _this$fragments.length) {
  942. return this.fragments[this.fragments.length - 1].end;
  943. }
  944. return 0;
  945. }
  946. get age() {
  947. if (this.advancedDateTime) {
  948. return Math.max(Date.now() - this.advancedDateTime, 0) / 1000;
  949. }
  950. return 0;
  951. }
  952. get lastPartIndex() {
  953. var _this$partList2;
  954. if ((_this$partList2 = this.partList) != null && _this$partList2.length) {
  955. return this.partList[this.partList.length - 1].index;
  956. }
  957. return -1;
  958. }
  959. get lastPartSn() {
  960. var _this$partList3;
  961. if ((_this$partList3 = this.partList) != null && _this$partList3.length) {
  962. return this.partList[this.partList.length - 1].fragment.sn;
  963. }
  964. return this.endSN;
  965. }
  966. }
  967. // This file is inserted as a shim for modules which we do not want to include into the distro.
  968. // This replacement is done in the "alias" plugin of the rollup config.
  969. // Use a ES dedicated file as Rollup assigns an object in the output
  970. // For example: "var KeySystemFormats = emptyEs.KeySystemFormats;"
  971. var emptyEs = {};
  972. var Cues = /*@__PURE__*/getDefaultExportFromCjs(emptyEs);
  973. function sliceUint8(array, start, end) {
  974. // @ts-expect-error This polyfills IE11 usage of Uint8Array slice.
  975. // It always exists in the TypeScript definition so fails, but it fails at runtime on IE11.
  976. return Uint8Array.prototype.slice ? array.slice(start, end) : new Uint8Array(Array.prototype.slice.call(array, start, end));
  977. }
  978. // breaking up those two types in order to clarify what is happening in the decoding path.
  979. /**
  980. * Returns true if an ID3 header can be found at offset in data
  981. * @param data - The data to search
  982. * @param offset - The offset at which to start searching
  983. */
  984. const isHeader$2 = (data, offset) => {
  985. /*
  986. * http://id3.org/id3v2.3.0
  987. * [0] = 'I'
  988. * [1] = 'D'
  989. * [2] = '3'
  990. * [3,4] = {Version}
  991. * [5] = {Flags}
  992. * [6-9] = {ID3 Size}
  993. *
  994. * An ID3v2 tag can be detected with the following pattern:
  995. * $49 44 33 yy yy xx zz zz zz zz
  996. * Where yy is less than $FF, xx is the 'flags' byte and zz is less than $80
  997. */
  998. if (offset + 10 <= data.length) {
  999. // look for 'ID3' identifier
  1000. if (data[offset] === 0x49 && data[offset + 1] === 0x44 && data[offset + 2] === 0x33) {
  1001. // check version is within range
  1002. if (data[offset + 3] < 0xff && data[offset + 4] < 0xff) {
  1003. // check size is within range
  1004. if (data[offset + 6] < 0x80 && data[offset + 7] < 0x80 && data[offset + 8] < 0x80 && data[offset + 9] < 0x80) {
  1005. return true;
  1006. }
  1007. }
  1008. }
  1009. }
  1010. return false;
  1011. };
  1012. /**
  1013. * Returns true if an ID3 footer can be found at offset in data
  1014. * @param data - The data to search
  1015. * @param offset - The offset at which to start searching
  1016. */
  1017. const isFooter = (data, offset) => {
  1018. /*
  1019. * The footer is a copy of the header, but with a different identifier
  1020. */
  1021. if (offset + 10 <= data.length) {
  1022. // look for '3DI' identifier
  1023. if (data[offset] === 0x33 && data[offset + 1] === 0x44 && data[offset + 2] === 0x49) {
  1024. // check version is within range
  1025. if (data[offset + 3] < 0xff && data[offset + 4] < 0xff) {
  1026. // check size is within range
  1027. if (data[offset + 6] < 0x80 && data[offset + 7] < 0x80 && data[offset + 8] < 0x80 && data[offset + 9] < 0x80) {
  1028. return true;
  1029. }
  1030. }
  1031. }
  1032. }
  1033. return false;
  1034. };
  1035. /**
  1036. * Returns any adjacent ID3 tags found in data starting at offset, as one block of data
  1037. * @param data - The data to search in
  1038. * @param offset - The offset at which to start searching
  1039. * @returns the block of data containing any ID3 tags found
  1040. * or *undefined* if no header is found at the starting offset
  1041. */
  1042. const getID3Data = (data, offset) => {
  1043. const front = offset;
  1044. let length = 0;
  1045. while (isHeader$2(data, offset)) {
  1046. // ID3 header is 10 bytes
  1047. length += 10;
  1048. const size = readSize(data, offset + 6);
  1049. length += size;
  1050. if (isFooter(data, offset + 10)) {
  1051. // ID3 footer is 10 bytes
  1052. length += 10;
  1053. }
  1054. offset += length;
  1055. }
  1056. if (length > 0) {
  1057. return data.subarray(front, front + length);
  1058. }
  1059. return undefined;
  1060. };
  1061. const readSize = (data, offset) => {
  1062. let size = 0;
  1063. size = (data[offset] & 0x7f) << 21;
  1064. size |= (data[offset + 1] & 0x7f) << 14;
  1065. size |= (data[offset + 2] & 0x7f) << 7;
  1066. size |= data[offset + 3] & 0x7f;
  1067. return size;
  1068. };
  1069. const canParse$2 = (data, offset) => {
  1070. return isHeader$2(data, offset) && readSize(data, offset + 6) + 10 <= data.length - offset;
  1071. };
  1072. /**
  1073. * Searches for the Elementary Stream timestamp found in the ID3 data chunk
  1074. * @param data - Block of data containing one or more ID3 tags
  1075. */
  1076. const getTimeStamp = data => {
  1077. const frames = getID3Frames(data);
  1078. for (let i = 0; i < frames.length; i++) {
  1079. const frame = frames[i];
  1080. if (isTimeStampFrame(frame)) {
  1081. return readTimeStamp(frame);
  1082. }
  1083. }
  1084. return undefined;
  1085. };
  1086. /**
  1087. * Returns true if the ID3 frame is an Elementary Stream timestamp frame
  1088. */
  1089. const isTimeStampFrame = frame => {
  1090. return frame && frame.key === 'PRIV' && frame.info === 'com.apple.streaming.transportStreamTimestamp';
  1091. };
  1092. const getFrameData = data => {
  1093. /*
  1094. Frame ID $xx xx xx xx (four characters)
  1095. Size $xx xx xx xx
  1096. Flags $xx xx
  1097. */
  1098. const type = String.fromCharCode(data[0], data[1], data[2], data[3]);
  1099. const size = readSize(data, 4);
  1100. // skip frame id, size, and flags
  1101. const offset = 10;
  1102. return {
  1103. type,
  1104. size,
  1105. data: data.subarray(offset, offset + size)
  1106. };
  1107. };
  1108. /**
  1109. * Returns an array of ID3 frames found in all the ID3 tags in the id3Data
  1110. * @param id3Data - The ID3 data containing one or more ID3 tags
  1111. */
  1112. const getID3Frames = id3Data => {
  1113. let offset = 0;
  1114. const frames = [];
  1115. while (isHeader$2(id3Data, offset)) {
  1116. const size = readSize(id3Data, offset + 6);
  1117. // skip past ID3 header
  1118. offset += 10;
  1119. const end = offset + size;
  1120. // loop through frames in the ID3 tag
  1121. while (offset + 8 < end) {
  1122. const frameData = getFrameData(id3Data.subarray(offset));
  1123. const frame = decodeFrame(frameData);
  1124. if (frame) {
  1125. frames.push(frame);
  1126. }
  1127. // skip frame header and frame data
  1128. offset += frameData.size + 10;
  1129. }
  1130. if (isFooter(id3Data, offset)) {
  1131. offset += 10;
  1132. }
  1133. }
  1134. return frames;
  1135. };
  1136. const decodeFrame = frame => {
  1137. if (frame.type === 'PRIV') {
  1138. return decodePrivFrame(frame);
  1139. } else if (frame.type[0] === 'W') {
  1140. return decodeURLFrame(frame);
  1141. }
  1142. return decodeTextFrame(frame);
  1143. };
  1144. const decodePrivFrame = frame => {
  1145. /*
  1146. Format: <text string>\0<binary data>
  1147. */
  1148. if (frame.size < 2) {
  1149. return undefined;
  1150. }
  1151. const owner = utf8ArrayToStr(frame.data, true);
  1152. const privateData = new Uint8Array(frame.data.subarray(owner.length + 1));
  1153. return {
  1154. key: frame.type,
  1155. info: owner,
  1156. data: privateData.buffer
  1157. };
  1158. };
  1159. const decodeTextFrame = frame => {
  1160. if (frame.size < 2) {
  1161. return undefined;
  1162. }
  1163. if (frame.type === 'TXXX') {
  1164. /*
  1165. Format:
  1166. [0] = {Text Encoding}
  1167. [1-?] = {Description}\0{Value}
  1168. */
  1169. let index = 1;
  1170. const description = utf8ArrayToStr(frame.data.subarray(index), true);
  1171. index += description.length + 1;
  1172. const value = utf8ArrayToStr(frame.data.subarray(index));
  1173. return {
  1174. key: frame.type,
  1175. info: description,
  1176. data: value
  1177. };
  1178. }
  1179. /*
  1180. Format:
  1181. [0] = {Text Encoding}
  1182. [1-?] = {Value}
  1183. */
  1184. const text = utf8ArrayToStr(frame.data.subarray(1));
  1185. return {
  1186. key: frame.type,
  1187. data: text
  1188. };
  1189. };
  1190. const decodeURLFrame = frame => {
  1191. if (frame.type === 'WXXX') {
  1192. /*
  1193. Format:
  1194. [0] = {Text Encoding}
  1195. [1-?] = {Description}\0{URL}
  1196. */
  1197. if (frame.size < 2) {
  1198. return undefined;
  1199. }
  1200. let index = 1;
  1201. const description = utf8ArrayToStr(frame.data.subarray(index), true);
  1202. index += description.length + 1;
  1203. const value = utf8ArrayToStr(frame.data.subarray(index));
  1204. return {
  1205. key: frame.type,
  1206. info: description,
  1207. data: value
  1208. };
  1209. }
  1210. /*
  1211. Format:
  1212. [0-?] = {URL}
  1213. */
  1214. const url = utf8ArrayToStr(frame.data);
  1215. return {
  1216. key: frame.type,
  1217. data: url
  1218. };
  1219. };
  1220. const readTimeStamp = timeStampFrame => {
  1221. if (timeStampFrame.data.byteLength === 8) {
  1222. const data = new Uint8Array(timeStampFrame.data);
  1223. // timestamp is 33 bit expressed as a big-endian eight-octet number,
  1224. // with the upper 31 bits set to zero.
  1225. const pts33Bit = data[3] & 0x1;
  1226. let timestamp = (data[4] << 23) + (data[5] << 15) + (data[6] << 7) + data[7];
  1227. timestamp /= 45;
  1228. if (pts33Bit) {
  1229. timestamp += 47721858.84;
  1230. } // 2^32 / 90
  1231. return Math.round(timestamp);
  1232. }
  1233. return undefined;
  1234. };
  1235. // http://stackoverflow.com/questions/8936984/uint8array-to-string-in-javascript/22373197
  1236. // http://www.onicos.com/staff/iz/amuse/javascript/expert/utf.txt
  1237. /* utf.js - UTF-8 <=> UTF-16 convertion
  1238. *
  1239. * Copyright (C) 1999 Masanao Izumo <iz@onicos.co.jp>
  1240. * Version: 1.0
  1241. * LastModified: Dec 25 1999
  1242. * This library is free. You can redistribute it and/or modify it.
  1243. */
  1244. const utf8ArrayToStr = (array, exitOnNull = false) => {
  1245. const decoder = getTextDecoder();
  1246. if (decoder) {
  1247. const decoded = decoder.decode(array);
  1248. if (exitOnNull) {
  1249. // grab up to the first null
  1250. const idx = decoded.indexOf('\0');
  1251. return idx !== -1 ? decoded.substring(0, idx) : decoded;
  1252. }
  1253. // remove any null characters
  1254. return decoded.replace(/\0/g, '');
  1255. }
  1256. const len = array.length;
  1257. let c;
  1258. let char2;
  1259. let char3;
  1260. let out = '';
  1261. let i = 0;
  1262. while (i < len) {
  1263. c = array[i++];
  1264. if (c === 0x00 && exitOnNull) {
  1265. return out;
  1266. } else if (c === 0x00 || c === 0x03) {
  1267. // If the character is 3 (END_OF_TEXT) or 0 (NULL) then skip it
  1268. continue;
  1269. }
  1270. switch (c >> 4) {
  1271. case 0:
  1272. case 1:
  1273. case 2:
  1274. case 3:
  1275. case 4:
  1276. case 5:
  1277. case 6:
  1278. case 7:
  1279. // 0xxxxxxx
  1280. out += String.fromCharCode(c);
  1281. break;
  1282. case 12:
  1283. case 13:
  1284. // 110x xxxx 10xx xxxx
  1285. char2 = array[i++];
  1286. out += String.fromCharCode((c & 0x1f) << 6 | char2 & 0x3f);
  1287. break;
  1288. case 14:
  1289. // 1110 xxxx 10xx xxxx 10xx xxxx
  1290. char2 = array[i++];
  1291. char3 = array[i++];
  1292. out += String.fromCharCode((c & 0x0f) << 12 | (char2 & 0x3f) << 6 | (char3 & 0x3f) << 0);
  1293. break;
  1294. }
  1295. }
  1296. return out;
  1297. };
  1298. let decoder;
  1299. function getTextDecoder() {
  1300. // On Play Station 4, TextDecoder is defined but partially implemented.
  1301. // Manual decoding option is preferable
  1302. if (navigator.userAgent.includes('PlayStation 4')) {
  1303. return;
  1304. }
  1305. if (!decoder && typeof self.TextDecoder !== 'undefined') {
  1306. decoder = new self.TextDecoder('utf-8');
  1307. }
  1308. return decoder;
  1309. }
  1310. /**
  1311. * hex dump helper class
  1312. */
  1313. const Hex = {
  1314. hexDump: function (array) {
  1315. let str = '';
  1316. for (let i = 0; i < array.length; i++) {
  1317. let h = array[i].toString(16);
  1318. if (h.length < 2) {
  1319. h = '0' + h;
  1320. }
  1321. str += h;
  1322. }
  1323. return str;
  1324. }
  1325. };
  1326. const UINT32_MAX$1 = Math.pow(2, 32) - 1;
  1327. const push = [].push;
  1328. // We are using fixed track IDs for driving the MP4 remuxer
  1329. // instead of following the TS PIDs.
  1330. // There is no reason not to do this and some browsers/SourceBuffer-demuxers
  1331. // may not like if there are TrackID "switches"
  1332. // See https://github.com/video-dev/hls.js/issues/1331
  1333. // Here we are mapping our internal track types to constant MP4 track IDs
  1334. // With MSE currently one can only have one track of each, and we are muxing
  1335. // whatever video/audio rendition in them.
  1336. const RemuxerTrackIdConfig = {
  1337. video: 1,
  1338. audio: 2,
  1339. id3: 3,
  1340. text: 4
  1341. };
  1342. function bin2str(data) {
  1343. return String.fromCharCode.apply(null, data);
  1344. }
  1345. function readUint16(buffer, offset) {
  1346. const val = buffer[offset] << 8 | buffer[offset + 1];
  1347. return val < 0 ? 65536 + val : val;
  1348. }
  1349. function readUint32(buffer, offset) {
  1350. const val = readSint32(buffer, offset);
  1351. return val < 0 ? 4294967296 + val : val;
  1352. }
  1353. function readUint64(buffer, offset) {
  1354. let result = readUint32(buffer, offset);
  1355. result *= Math.pow(2, 32);
  1356. result += readUint32(buffer, offset + 4);
  1357. return result;
  1358. }
  1359. function readSint32(buffer, offset) {
  1360. return buffer[offset] << 24 | buffer[offset + 1] << 16 | buffer[offset + 2] << 8 | buffer[offset + 3];
  1361. }
  1362. function writeUint32(buffer, offset, value) {
  1363. buffer[offset] = value >> 24;
  1364. buffer[offset + 1] = value >> 16 & 0xff;
  1365. buffer[offset + 2] = value >> 8 & 0xff;
  1366. buffer[offset + 3] = value & 0xff;
  1367. }
  1368. // Find "moof" box
  1369. function hasMoofData(data) {
  1370. const end = data.byteLength;
  1371. for (let i = 0; i < end;) {
  1372. const size = readUint32(data, i);
  1373. if (size > 8 && data[i + 4] === 0x6d && data[i + 5] === 0x6f && data[i + 6] === 0x6f && data[i + 7] === 0x66) {
  1374. return true;
  1375. }
  1376. i = size > 1 ? i + size : end;
  1377. }
  1378. return false;
  1379. }
  1380. // Find the data for a box specified by its path
  1381. function findBox(data, path) {
  1382. const results = [];
  1383. if (!path.length) {
  1384. // short-circuit the search for empty paths
  1385. return results;
  1386. }
  1387. const end = data.byteLength;
  1388. for (let i = 0; i < end;) {
  1389. const size = readUint32(data, i);
  1390. const type = bin2str(data.subarray(i + 4, i + 8));
  1391. const endbox = size > 1 ? i + size : end;
  1392. if (type === path[0]) {
  1393. if (path.length === 1) {
  1394. // this is the end of the path and we've found the box we were
  1395. // looking for
  1396. results.push(data.subarray(i + 8, endbox));
  1397. } else {
  1398. // recursively search for the next box along the path
  1399. const subresults = findBox(data.subarray(i + 8, endbox), path.slice(1));
  1400. if (subresults.length) {
  1401. push.apply(results, subresults);
  1402. }
  1403. }
  1404. }
  1405. i = endbox;
  1406. }
  1407. // we've finished searching all of data
  1408. return results;
  1409. }
  1410. function parseSegmentIndex(sidx) {
  1411. const references = [];
  1412. const version = sidx[0];
  1413. // set initial offset, we skip the reference ID (not needed)
  1414. let index = 8;
  1415. const timescale = readUint32(sidx, index);
  1416. index += 4;
  1417. let earliestPresentationTime = 0;
  1418. let firstOffset = 0;
  1419. if (version === 0) {
  1420. earliestPresentationTime = readUint32(sidx, index);
  1421. firstOffset = readUint32(sidx, index + 4);
  1422. index += 8;
  1423. } else {
  1424. earliestPresentationTime = readUint64(sidx, index);
  1425. firstOffset = readUint64(sidx, index + 8);
  1426. index += 16;
  1427. }
  1428. // skip reserved
  1429. index += 2;
  1430. let startByte = sidx.length + firstOffset;
  1431. const referencesCount = readUint16(sidx, index);
  1432. index += 2;
  1433. for (let i = 0; i < referencesCount; i++) {
  1434. let referenceIndex = index;
  1435. const referenceInfo = readUint32(sidx, referenceIndex);
  1436. referenceIndex += 4;
  1437. const referenceSize = referenceInfo & 0x7fffffff;
  1438. const referenceType = (referenceInfo & 0x80000000) >>> 31;
  1439. if (referenceType === 1) {
  1440. logger.warn('SIDX has hierarchical references (not supported)');
  1441. return null;
  1442. }
  1443. const subsegmentDuration = readUint32(sidx, referenceIndex);
  1444. referenceIndex += 4;
  1445. references.push({
  1446. referenceSize,
  1447. subsegmentDuration,
  1448. // unscaled
  1449. info: {
  1450. duration: subsegmentDuration / timescale,
  1451. start: startByte,
  1452. end: startByte + referenceSize - 1
  1453. }
  1454. });
  1455. startByte += referenceSize;
  1456. // Skipping 1 bit for |startsWithSap|, 3 bits for |sapType|, and 28 bits
  1457. // for |sapDelta|.
  1458. referenceIndex += 4;
  1459. // skip to next ref
  1460. index = referenceIndex;
  1461. }
  1462. return {
  1463. earliestPresentationTime,
  1464. timescale,
  1465. version,
  1466. referencesCount,
  1467. references
  1468. };
  1469. }
  1470. /**
  1471. * Parses an MP4 initialization segment and extracts stream type and
  1472. * timescale values for any declared tracks. Timescale values indicate the
  1473. * number of clock ticks per second to assume for time-based values
  1474. * elsewhere in the MP4.
  1475. *
  1476. * To determine the start time of an MP4, you need two pieces of
  1477. * information: the timescale unit and the earliest base media decode
  1478. * time. Multiple timescales can be specified within an MP4 but the
  1479. * base media decode time is always expressed in the timescale from
  1480. * the media header box for the track:
  1481. * ```
  1482. * moov > trak > mdia > mdhd.timescale
  1483. * moov > trak > mdia > hdlr
  1484. * ```
  1485. * @param initSegment the bytes of the init segment
  1486. * @returns a hash of track type to timescale values or null if
  1487. * the init segment is malformed.
  1488. */
  1489. function parseInitSegment(initSegment) {
  1490. const result = [];
  1491. const traks = findBox(initSegment, ['moov', 'trak']);
  1492. for (let i = 0; i < traks.length; i++) {
  1493. const trak = traks[i];
  1494. const tkhd = findBox(trak, ['tkhd'])[0];
  1495. if (tkhd) {
  1496. let version = tkhd[0];
  1497. const trackId = readUint32(tkhd, version === 0 ? 12 : 20);
  1498. const mdhd = findBox(trak, ['mdia', 'mdhd'])[0];
  1499. if (mdhd) {
  1500. version = mdhd[0];
  1501. const timescale = readUint32(mdhd, version === 0 ? 12 : 20);
  1502. const hdlr = findBox(trak, ['mdia', 'hdlr'])[0];
  1503. if (hdlr) {
  1504. const hdlrType = bin2str(hdlr.subarray(8, 12));
  1505. const type = {
  1506. soun: ElementaryStreamTypes.AUDIO,
  1507. vide: ElementaryStreamTypes.VIDEO
  1508. }[hdlrType];
  1509. if (type) {
  1510. // Parse codec details
  1511. const stsd = findBox(trak, ['mdia', 'minf', 'stbl', 'stsd'])[0];
  1512. const stsdData = parseStsd(stsd);
  1513. result[trackId] = {
  1514. timescale,
  1515. type
  1516. };
  1517. result[type] = _objectSpread2({
  1518. timescale,
  1519. id: trackId
  1520. }, stsdData);
  1521. }
  1522. }
  1523. }
  1524. }
  1525. }
  1526. const trex = findBox(initSegment, ['moov', 'mvex', 'trex']);
  1527. trex.forEach(trex => {
  1528. const trackId = readUint32(trex, 4);
  1529. const track = result[trackId];
  1530. if (track) {
  1531. track.default = {
  1532. duration: readUint32(trex, 12),
  1533. flags: readUint32(trex, 20)
  1534. };
  1535. }
  1536. });
  1537. return result;
  1538. }
  1539. function parseStsd(stsd) {
  1540. const sampleEntries = stsd.subarray(8);
  1541. const sampleEntriesEnd = sampleEntries.subarray(8 + 78);
  1542. const fourCC = bin2str(sampleEntries.subarray(4, 8));
  1543. let codec = fourCC;
  1544. const encrypted = fourCC === 'enca' || fourCC === 'encv';
  1545. if (encrypted) {
  1546. const encBox = findBox(sampleEntries, [fourCC])[0];
  1547. const encBoxChildren = encBox.subarray(fourCC === 'enca' ? 28 : 78);
  1548. const sinfs = findBox(encBoxChildren, ['sinf']);
  1549. sinfs.forEach(sinf => {
  1550. const schm = findBox(sinf, ['schm'])[0];
  1551. if (schm) {
  1552. const scheme = bin2str(schm.subarray(4, 8));
  1553. if (scheme === 'cbcs' || scheme === 'cenc') {
  1554. const frma = findBox(sinf, ['frma'])[0];
  1555. if (frma) {
  1556. // for encrypted content codec fourCC will be in frma
  1557. codec = bin2str(frma);
  1558. }
  1559. }
  1560. }
  1561. });
  1562. }
  1563. switch (codec) {
  1564. case 'avc1':
  1565. case 'avc2':
  1566. case 'avc3':
  1567. case 'avc4':
  1568. {
  1569. // extract profile + compatibility + level out of avcC box
  1570. const avcCBox = findBox(sampleEntriesEnd, ['avcC'])[0];
  1571. codec += '.' + toHex(avcCBox[1]) + toHex(avcCBox[2]) + toHex(avcCBox[3]);
  1572. break;
  1573. }
  1574. case 'mp4a':
  1575. {
  1576. const codecBox = findBox(sampleEntries, [fourCC])[0];
  1577. const esdsBox = findBox(codecBox.subarray(28), ['esds'])[0];
  1578. if (esdsBox && esdsBox.length > 12) {
  1579. let i = 4;
  1580. // ES Descriptor tag
  1581. if (esdsBox[i++] !== 0x03) {
  1582. break;
  1583. }
  1584. i = skipBERInteger(esdsBox, i);
  1585. i += 2; // skip es_id;
  1586. const flags = esdsBox[i++];
  1587. if (flags & 0x80) {
  1588. i += 2; // skip dependency es_id
  1589. }
  1590. if (flags & 0x40) {
  1591. i += esdsBox[i++]; // skip URL
  1592. }
  1593. // Decoder config descriptor
  1594. if (esdsBox[i++] !== 0x04) {
  1595. break;
  1596. }
  1597. i = skipBERInteger(esdsBox, i);
  1598. const objectType = esdsBox[i++];
  1599. if (objectType === 0x40) {
  1600. codec += '.' + toHex(objectType);
  1601. } else {
  1602. break;
  1603. }
  1604. i += 12;
  1605. // Decoder specific info
  1606. if (esdsBox[i++] !== 0x05) {
  1607. break;
  1608. }
  1609. i = skipBERInteger(esdsBox, i);
  1610. const firstByte = esdsBox[i++];
  1611. let audioObjectType = (firstByte & 0xf8) >> 3;
  1612. if (audioObjectType === 31) {
  1613. audioObjectType += 1 + ((firstByte & 0x7) << 3) + ((esdsBox[i] & 0xe0) >> 5);
  1614. }
  1615. codec += '.' + audioObjectType;
  1616. }
  1617. break;
  1618. }
  1619. case 'hvc1':
  1620. case 'hev1':
  1621. {
  1622. const hvcCBox = findBox(sampleEntriesEnd, ['hvcC'])[0];
  1623. const profileByte = hvcCBox[1];
  1624. const profileSpace = ['', 'A', 'B', 'C'][profileByte >> 6];
  1625. const generalProfileIdc = profileByte & 0x1f;
  1626. const profileCompat = readUint32(hvcCBox, 2);
  1627. const tierFlag = (profileByte & 0x20) >> 5 ? 'H' : 'L';
  1628. const levelIDC = hvcCBox[12];
  1629. const constraintIndicator = hvcCBox.subarray(6, 12);
  1630. codec += '.' + profileSpace + generalProfileIdc;
  1631. codec += '.' + profileCompat.toString(16).toUpperCase();
  1632. codec += '.' + tierFlag + levelIDC;
  1633. let constraintString = '';
  1634. for (let i = constraintIndicator.length; i--;) {
  1635. const byte = constraintIndicator[i];
  1636. if (byte || constraintString) {
  1637. const encodedByte = byte.toString(16).toUpperCase();
  1638. constraintString = '.' + encodedByte + constraintString;
  1639. }
  1640. }
  1641. codec += constraintString;
  1642. break;
  1643. }
  1644. case 'dvh1':
  1645. case 'dvhe':
  1646. {
  1647. const dvcCBox = findBox(sampleEntriesEnd, ['dvcC'])[0];
  1648. const profile = dvcCBox[2] >> 1 & 0x7f;
  1649. const level = dvcCBox[2] << 5 & 0x20 | dvcCBox[3] >> 3 & 0x1f;
  1650. codec += '.' + addLeadingZero(profile) + '.' + addLeadingZero(level);
  1651. break;
  1652. }
  1653. case 'vp09':
  1654. {
  1655. const vpcCBox = findBox(sampleEntriesEnd, ['vpcC'])[0];
  1656. const profile = vpcCBox[4];
  1657. const level = vpcCBox[5];
  1658. const bitDepth = vpcCBox[6] >> 4 & 0x0f;
  1659. codec += '.' + addLeadingZero(profile) + '.' + addLeadingZero(level) + '.' + addLeadingZero(bitDepth);
  1660. break;
  1661. }
  1662. case 'av01':
  1663. {
  1664. const av1CBox = findBox(sampleEntriesEnd, ['av1C'])[0];
  1665. const profile = av1CBox[1] >>> 5;
  1666. const level = av1CBox[1] & 0x1f;
  1667. const tierFlag = av1CBox[2] >>> 7 ? 'H' : 'M';
  1668. const highBitDepth = (av1CBox[2] & 0x40) >> 6;
  1669. const twelveBit = (av1CBox[2] & 0x20) >> 5;
  1670. const bitDepth = profile === 2 && highBitDepth ? twelveBit ? 12 : 10 : highBitDepth ? 10 : 8;
  1671. const monochrome = (av1CBox[2] & 0x10) >> 4;
  1672. const chromaSubsamplingX = (av1CBox[2] & 0x08) >> 3;
  1673. const chromaSubsamplingY = (av1CBox[2] & 0x04) >> 2;
  1674. const chromaSamplePosition = av1CBox[2] & 0x03;
  1675. // TODO: parse color_description_present_flag
  1676. // default it to BT.709/limited range for now
  1677. // more info https://aomediacodec.github.io/av1-isobmff/#av1codecconfigurationbox-syntax
  1678. const colorPrimaries = 1;
  1679. const transferCharacteristics = 1;
  1680. const matrixCoefficients = 1;
  1681. const videoFullRangeFlag = 0;
  1682. codec += '.' + profile + '.' + addLeadingZero(level) + tierFlag + '.' + addLeadingZero(bitDepth) + '.' + monochrome + '.' + chromaSubsamplingX + chromaSubsamplingY + chromaSamplePosition + '.' + addLeadingZero(colorPrimaries) + '.' + addLeadingZero(transferCharacteristics) + '.' + addLeadingZero(matrixCoefficients) + '.' + videoFullRangeFlag;
  1683. break;
  1684. }
  1685. }
  1686. return {
  1687. codec,
  1688. encrypted
  1689. };
  1690. }
  1691. function skipBERInteger(bytes, i) {
  1692. const limit = i + 5;
  1693. while (bytes[i++] & 0x80 && i < limit) {}
  1694. return i;
  1695. }
  1696. function toHex(x) {
  1697. return ('0' + x.toString(16).toUpperCase()).slice(-2);
  1698. }
  1699. function addLeadingZero(num) {
  1700. return (num < 10 ? '0' : '') + num;
  1701. }
  1702. function patchEncyptionData(initSegment, decryptdata) {
  1703. if (!initSegment || !decryptdata) {
  1704. return initSegment;
  1705. }
  1706. const keyId = decryptdata.keyId;
  1707. if (keyId && decryptdata.isCommonEncryption) {
  1708. const traks = findBox(initSegment, ['moov', 'trak']);
  1709. traks.forEach(trak => {
  1710. const stsd = findBox(trak, ['mdia', 'minf', 'stbl', 'stsd'])[0];
  1711. // skip the sample entry count
  1712. const sampleEntries = stsd.subarray(8);
  1713. let encBoxes = findBox(sampleEntries, ['enca']);
  1714. const isAudio = encBoxes.length > 0;
  1715. if (!isAudio) {
  1716. encBoxes = findBox(sampleEntries, ['encv']);
  1717. }
  1718. encBoxes.forEach(enc => {
  1719. const encBoxChildren = isAudio ? enc.subarray(28) : enc.subarray(78);
  1720. const sinfBoxes = findBox(encBoxChildren, ['sinf']);
  1721. sinfBoxes.forEach(sinf => {
  1722. const tenc = parseSinf(sinf);
  1723. if (tenc) {
  1724. // Look for default key id (keyID offset is always 8 within the tenc box):
  1725. const tencKeyId = tenc.subarray(8, 24);
  1726. if (!tencKeyId.some(b => b !== 0)) {
  1727. logger.log(`[eme] Patching keyId in 'enc${isAudio ? 'a' : 'v'}>sinf>>tenc' box: ${Hex.hexDump(tencKeyId)} -> ${Hex.hexDump(keyId)}`);
  1728. tenc.set(keyId, 8);
  1729. }
  1730. }
  1731. });
  1732. });
  1733. });
  1734. }
  1735. return initSegment;
  1736. }
  1737. function parseSinf(sinf) {
  1738. const schm = findBox(sinf, ['schm'])[0];
  1739. if (schm) {
  1740. const scheme = bin2str(schm.subarray(4, 8));
  1741. if (scheme === 'cbcs' || scheme === 'cenc') {
  1742. return findBox(sinf, ['schi', 'tenc'])[0];
  1743. }
  1744. }
  1745. logger.error(`[eme] missing 'schm' box`);
  1746. return null;
  1747. }
  1748. /**
  1749. * Determine the base media decode start time, in seconds, for an MP4
  1750. * fragment. If multiple fragments are specified, the earliest time is
  1751. * returned.
  1752. *
  1753. * The base media decode time can be parsed from track fragment
  1754. * metadata:
  1755. * ```
  1756. * moof > traf > tfdt.baseMediaDecodeTime
  1757. * ```
  1758. * It requires the timescale value from the mdhd to interpret.
  1759. *
  1760. * @param initData - a hash of track type to timescale values
  1761. * @param fmp4 - the bytes of the mp4 fragment
  1762. * @returns the earliest base media decode start time for the
  1763. * fragment, in seconds
  1764. */
  1765. function getStartDTS(initData, fmp4) {
  1766. // we need info from two children of each track fragment box
  1767. return findBox(fmp4, ['moof', 'traf']).reduce((result, traf) => {
  1768. const tfdt = findBox(traf, ['tfdt'])[0];
  1769. const version = tfdt[0];
  1770. const start = findBox(traf, ['tfhd']).reduce((result, tfhd) => {
  1771. // get the track id from the tfhd
  1772. const id = readUint32(tfhd, 4);
  1773. const track = initData[id];
  1774. if (track) {
  1775. let baseTime = readUint32(tfdt, 4);
  1776. if (version === 1) {
  1777. // If value is too large, assume signed 64-bit. Negative track fragment decode times are invalid, but they exist in the wild.
  1778. // This prevents large values from being used for initPTS, which can cause playlist sync issues.
  1779. // https://github.com/video-dev/hls.js/issues/5303
  1780. if (baseTime === UINT32_MAX$1) {
  1781. logger.warn(`[mp4-demuxer]: Ignoring assumed invalid signed 64-bit track fragment decode time`);
  1782. return result;
  1783. }
  1784. baseTime *= UINT32_MAX$1 + 1;
  1785. baseTime += readUint32(tfdt, 8);
  1786. }
  1787. // assume a 90kHz clock if no timescale was specified
  1788. const scale = track.timescale || 90e3;
  1789. // convert base time to seconds
  1790. const startTime = baseTime / scale;
  1791. if (isFiniteNumber(startTime) && (result === null || startTime < result)) {
  1792. return startTime;
  1793. }
  1794. }
  1795. return result;
  1796. }, null);
  1797. if (start !== null && isFiniteNumber(start) && (result === null || start < result)) {
  1798. return start;
  1799. }
  1800. return result;
  1801. }, null);
  1802. }
  1803. /*
  1804. For Reference:
  1805. aligned(8) class TrackFragmentHeaderBox
  1806. extends FullBox(‘tfhd’, 0, tf_flags){
  1807. unsigned int(32) track_ID;
  1808. // all the following are optional fields
  1809. unsigned int(64) base_data_offset;
  1810. unsigned int(32) sample_description_index;
  1811. unsigned int(32) default_sample_duration;
  1812. unsigned int(32) default_sample_size;
  1813. unsigned int(32) default_sample_flags
  1814. }
  1815. */
  1816. function getDuration(data, initData) {
  1817. let rawDuration = 0;
  1818. let videoDuration = 0;
  1819. let audioDuration = 0;
  1820. const trafs = findBox(data, ['moof', 'traf']);
  1821. for (let i = 0; i < trafs.length; i++) {
  1822. const traf = trafs[i];
  1823. // There is only one tfhd & trun per traf
  1824. // This is true for CMAF style content, and we should perhaps check the ftyp
  1825. // and only look for a single trun then, but for ISOBMFF we should check
  1826. // for multiple track runs.
  1827. const tfhd = findBox(traf, ['tfhd'])[0];
  1828. // get the track id from the tfhd
  1829. const id = readUint32(tfhd, 4);
  1830. const track = initData[id];
  1831. if (!track) {
  1832. continue;
  1833. }
  1834. const trackDefault = track.default;
  1835. const tfhdFlags = readUint32(tfhd, 0) | (trackDefault == null ? void 0 : trackDefault.flags);
  1836. let sampleDuration = trackDefault == null ? void 0 : trackDefault.duration;
  1837. if (tfhdFlags & 0x000008) {
  1838. // 0x000008 indicates the presence of the default_sample_duration field
  1839. if (tfhdFlags & 0x000002) {
  1840. // 0x000002 indicates the presence of the sample_description_index field, which precedes default_sample_duration
  1841. // If present, the default_sample_duration exists at byte offset 12
  1842. sampleDuration = readUint32(tfhd, 12);
  1843. } else {
  1844. // Otherwise, the duration is at byte offset 8
  1845. sampleDuration = readUint32(tfhd, 8);
  1846. }
  1847. }
  1848. // assume a 90kHz clock if no timescale was specified
  1849. const timescale = track.timescale || 90e3;
  1850. const truns = findBox(traf, ['trun']);
  1851. for (let j = 0; j < truns.length; j++) {
  1852. rawDuration = computeRawDurationFromSamples(truns[j]);
  1853. if (!rawDuration && sampleDuration) {
  1854. const sampleCount = readUint32(truns[j], 4);
  1855. rawDuration = sampleDuration * sampleCount;
  1856. }
  1857. if (track.type === ElementaryStreamTypes.VIDEO) {
  1858. videoDuration += rawDuration / timescale;
  1859. } else if (track.type === ElementaryStreamTypes.AUDIO) {
  1860. audioDuration += rawDuration / timescale;
  1861. }
  1862. }
  1863. }
  1864. if (videoDuration === 0 && audioDuration === 0) {
  1865. // If duration samples are not available in the traf use sidx subsegment_duration
  1866. let sidxMinStart = Infinity;
  1867. let sidxMaxEnd = 0;
  1868. let sidxDuration = 0;
  1869. const sidxs = findBox(data, ['sidx']);
  1870. for (let i = 0; i < sidxs.length; i++) {
  1871. const sidx = parseSegmentIndex(sidxs[i]);
  1872. if (sidx != null && sidx.references) {
  1873. sidxMinStart = Math.min(sidxMinStart, sidx.earliestPresentationTime / sidx.timescale);
  1874. const subSegmentDuration = sidx.references.reduce((dur, ref) => dur + ref.info.duration || 0, 0);
  1875. sidxMaxEnd = Math.max(sidxMaxEnd, subSegmentDuration + sidx.earliestPresentationTime / sidx.timescale);
  1876. sidxDuration = sidxMaxEnd - sidxMinStart;
  1877. }
  1878. }
  1879. if (sidxDuration && isFiniteNumber(sidxDuration)) {
  1880. return sidxDuration;
  1881. }
  1882. }
  1883. if (videoDuration) {
  1884. return videoDuration;
  1885. }
  1886. return audioDuration;
  1887. }
  1888. /*
  1889. For Reference:
  1890. aligned(8) class TrackRunBox
  1891. extends FullBox(‘trun’, version, tr_flags) {
  1892. unsigned int(32) sample_count;
  1893. // the following are optional fields
  1894. signed int(32) data_offset;
  1895. unsigned int(32) first_sample_flags;
  1896. // all fields in the following array are optional
  1897. {
  1898. unsigned int(32) sample_duration;
  1899. unsigned int(32) sample_size;
  1900. unsigned int(32) sample_flags
  1901. if (version == 0)
  1902. { unsigned int(32)
  1903. else
  1904. { signed int(32)
  1905. }[ sample_count ]
  1906. }
  1907. */
  1908. function computeRawDurationFromSamples(trun) {
  1909. const flags = readUint32(trun, 0);
  1910. // Flags are at offset 0, non-optional sample_count is at offset 4. Therefore we start 8 bytes in.
  1911. // Each field is an int32, which is 4 bytes
  1912. let offset = 8;
  1913. // data-offset-present flag
  1914. if (flags & 0x000001) {
  1915. offset += 4;
  1916. }
  1917. // first-sample-flags-present flag
  1918. if (flags & 0x000004) {
  1919. offset += 4;
  1920. }
  1921. let duration = 0;
  1922. const sampleCount = readUint32(trun, 4);
  1923. for (let i = 0; i < sampleCount; i++) {
  1924. // sample-duration-present flag
  1925. if (flags & 0x000100) {
  1926. const sampleDuration = readUint32(trun, offset);
  1927. duration += sampleDuration;
  1928. offset += 4;
  1929. }
  1930. // sample-size-present flag
  1931. if (flags & 0x000200) {
  1932. offset += 4;
  1933. }
  1934. // sample-flags-present flag
  1935. if (flags & 0x000400) {
  1936. offset += 4;
  1937. }
  1938. // sample-composition-time-offsets-present flag
  1939. if (flags & 0x000800) {
  1940. offset += 4;
  1941. }
  1942. }
  1943. return duration;
  1944. }
  1945. function offsetStartDTS(initData, fmp4, timeOffset) {
  1946. findBox(fmp4, ['moof', 'traf']).forEach(traf => {
  1947. findBox(traf, ['tfhd']).forEach(tfhd => {
  1948. // get the track id from the tfhd
  1949. const id = readUint32(tfhd, 4);
  1950. const track = initData[id];
  1951. if (!track) {
  1952. return;
  1953. }
  1954. // assume a 90kHz clock if no timescale was specified
  1955. const timescale = track.timescale || 90e3;
  1956. // get the base media decode time from the tfdt
  1957. findBox(traf, ['tfdt']).forEach(tfdt => {
  1958. const version = tfdt[0];
  1959. const offset = timeOffset * timescale;
  1960. if (offset) {
  1961. let baseMediaDecodeTime = readUint32(tfdt, 4);
  1962. if (version === 0) {
  1963. baseMediaDecodeTime -= offset;
  1964. baseMediaDecodeTime = Math.max(baseMediaDecodeTime, 0);
  1965. writeUint32(tfdt, 4, baseMediaDecodeTime);
  1966. } else {
  1967. baseMediaDecodeTime *= Math.pow(2, 32);
  1968. baseMediaDecodeTime += readUint32(tfdt, 8);
  1969. baseMediaDecodeTime -= offset;
  1970. baseMediaDecodeTime = Math.max(baseMediaDecodeTime, 0);
  1971. const upper = Math.floor(baseMediaDecodeTime / (UINT32_MAX$1 + 1));
  1972. const lower = Math.floor(baseMediaDecodeTime % (UINT32_MAX$1 + 1));
  1973. writeUint32(tfdt, 4, upper);
  1974. writeUint32(tfdt, 8, lower);
  1975. }
  1976. }
  1977. });
  1978. });
  1979. });
  1980. }
  1981. // TODO: Check if the last moof+mdat pair is part of the valid range
  1982. function segmentValidRange(data) {
  1983. const segmentedRange = {
  1984. valid: null,
  1985. remainder: null
  1986. };
  1987. const moofs = findBox(data, ['moof']);
  1988. if (moofs.length < 2) {
  1989. segmentedRange.remainder = data;
  1990. return segmentedRange;
  1991. }
  1992. const last = moofs[moofs.length - 1];
  1993. // Offset by 8 bytes; findBox offsets the start by as much
  1994. segmentedRange.valid = sliceUint8(data, 0, last.byteOffset - 8);
  1995. segmentedRange.remainder = sliceUint8(data, last.byteOffset - 8);
  1996. return segmentedRange;
  1997. }
  1998. function appendUint8Array(data1, data2) {
  1999. const temp = new Uint8Array(data1.length + data2.length);
  2000. temp.set(data1);
  2001. temp.set(data2, data1.length);
  2002. return temp;
  2003. }
  2004. function parseSamples(timeOffset, track) {
  2005. const seiSamples = [];
  2006. const videoData = track.samples;
  2007. const timescale = track.timescale;
  2008. const trackId = track.id;
  2009. let isHEVCFlavor = false;
  2010. const moofs = findBox(videoData, ['moof']);
  2011. moofs.map(moof => {
  2012. const moofOffset = moof.byteOffset - 8;
  2013. const trafs = findBox(moof, ['traf']);
  2014. trafs.map(traf => {
  2015. // get the base media decode time from the tfdt
  2016. const baseTime = findBox(traf, ['tfdt']).map(tfdt => {
  2017. const version = tfdt[0];
  2018. let result = readUint32(tfdt, 4);
  2019. if (version === 1) {
  2020. result *= Math.pow(2, 32);
  2021. result += readUint32(tfdt, 8);
  2022. }
  2023. return result / timescale;
  2024. })[0];
  2025. if (baseTime !== undefined) {
  2026. timeOffset = baseTime;
  2027. }
  2028. return findBox(traf, ['tfhd']).map(tfhd => {
  2029. const id = readUint32(tfhd, 4);
  2030. const tfhdFlags = readUint32(tfhd, 0) & 0xffffff;
  2031. const baseDataOffsetPresent = (tfhdFlags & 0x000001) !== 0;
  2032. const sampleDescriptionIndexPresent = (tfhdFlags & 0x000002) !== 0;
  2033. const defaultSampleDurationPresent = (tfhdFlags & 0x000008) !== 0;
  2034. let defaultSampleDuration = 0;
  2035. const defaultSampleSizePresent = (tfhdFlags & 0x000010) !== 0;
  2036. let defaultSampleSize = 0;
  2037. const defaultSampleFlagsPresent = (tfhdFlags & 0x000020) !== 0;
  2038. let tfhdOffset = 8;
  2039. if (id === trackId) {
  2040. if (baseDataOffsetPresent) {
  2041. tfhdOffset += 8;
  2042. }
  2043. if (sampleDescriptionIndexPresent) {
  2044. tfhdOffset += 4;
  2045. }
  2046. if (defaultSampleDurationPresent) {
  2047. defaultSampleDuration = readUint32(tfhd, tfhdOffset);
  2048. tfhdOffset += 4;
  2049. }
  2050. if (defaultSampleSizePresent) {
  2051. defaultSampleSize = readUint32(tfhd, tfhdOffset);
  2052. tfhdOffset += 4;
  2053. }
  2054. if (defaultSampleFlagsPresent) {
  2055. tfhdOffset += 4;
  2056. }
  2057. if (track.type === 'video') {
  2058. isHEVCFlavor = isHEVC(track.codec);
  2059. }
  2060. findBox(traf, ['trun']).map(trun => {
  2061. const version = trun[0];
  2062. const flags = readUint32(trun, 0) & 0xffffff;
  2063. const dataOffsetPresent = (flags & 0x000001) !== 0;
  2064. let dataOffset = 0;
  2065. const firstSampleFlagsPresent = (flags & 0x000004) !== 0;
  2066. const sampleDurationPresent = (flags & 0x000100) !== 0;
  2067. let sampleDuration = 0;
  2068. const sampleSizePresent = (flags & 0x000200) !== 0;
  2069. let sampleSize = 0;
  2070. const sampleFlagsPresent = (flags & 0x000400) !== 0;
  2071. const sampleCompositionOffsetsPresent = (flags & 0x000800) !== 0;
  2072. let compositionOffset = 0;
  2073. const sampleCount = readUint32(trun, 4);
  2074. let trunOffset = 8; // past version, flags, and sample count
  2075. if (dataOffsetPresent) {
  2076. dataOffset = readUint32(trun, trunOffset);
  2077. trunOffset += 4;
  2078. }
  2079. if (firstSampleFlagsPresent) {
  2080. trunOffset += 4;
  2081. }
  2082. let sampleOffset = dataOffset + moofOffset;
  2083. for (let ix = 0; ix < sampleCount; ix++) {
  2084. if (sampleDurationPresent) {
  2085. sampleDuration = readUint32(trun, trunOffset);
  2086. trunOffset += 4;
  2087. } else {
  2088. sampleDuration = defaultSampleDuration;
  2089. }
  2090. if (sampleSizePresent) {
  2091. sampleSize = readUint32(trun, trunOffset);
  2092. trunOffset += 4;
  2093. } else {
  2094. sampleSize = defaultSampleSize;
  2095. }
  2096. if (sampleFlagsPresent) {
  2097. trunOffset += 4;
  2098. }
  2099. if (sampleCompositionOffsetsPresent) {
  2100. if (version === 0) {
  2101. compositionOffset = readUint32(trun, trunOffset);
  2102. } else {
  2103. compositionOffset = readSint32(trun, trunOffset);
  2104. }
  2105. trunOffset += 4;
  2106. }
  2107. if (track.type === ElementaryStreamTypes.VIDEO) {
  2108. let naluTotalSize = 0;
  2109. while (naluTotalSize < sampleSize) {
  2110. const naluSize = readUint32(videoData, sampleOffset);
  2111. sampleOffset += 4;
  2112. if (isSEIMessage(isHEVCFlavor, videoData[sampleOffset])) {
  2113. const data = videoData.subarray(sampleOffset, sampleOffset + naluSize);
  2114. parseSEIMessageFromNALu(data, isHEVCFlavor ? 2 : 1, timeOffset + compositionOffset / timescale, seiSamples);
  2115. }
  2116. sampleOffset += naluSize;
  2117. naluTotalSize += naluSize + 4;
  2118. }
  2119. }
  2120. timeOffset += sampleDuration / timescale;
  2121. }
  2122. });
  2123. }
  2124. });
  2125. });
  2126. });
  2127. return seiSamples;
  2128. }
  2129. function isHEVC(codec) {
  2130. if (!codec) {
  2131. return false;
  2132. }
  2133. const delimit = codec.indexOf('.');
  2134. const baseCodec = delimit < 0 ? codec : codec.substring(0, delimit);
  2135. return baseCodec === 'hvc1' || baseCodec === 'hev1' ||
  2136. // Dolby Vision
  2137. baseCodec === 'dvh1' || baseCodec === 'dvhe';
  2138. }
  2139. function isSEIMessage(isHEVCFlavor, naluHeader) {
  2140. if (isHEVCFlavor) {
  2141. const naluType = naluHeader >> 1 & 0x3f;
  2142. return naluType === 39 || naluType === 40;
  2143. } else {
  2144. const naluType = naluHeader & 0x1f;
  2145. return naluType === 6;
  2146. }
  2147. }
  2148. function parseSEIMessageFromNALu(unescapedData, headerSize, pts, samples) {
  2149. const data = discardEPB(unescapedData);
  2150. let seiPtr = 0;
  2151. // skip nal header
  2152. seiPtr += headerSize;
  2153. let payloadType = 0;
  2154. let payloadSize = 0;
  2155. let b = 0;
  2156. while (seiPtr < data.length) {
  2157. payloadType = 0;
  2158. do {
  2159. if (seiPtr >= data.length) {
  2160. break;
  2161. }
  2162. b = data[seiPtr++];
  2163. payloadType += b;
  2164. } while (b === 0xff);
  2165. // Parse payload size.
  2166. payloadSize = 0;
  2167. do {
  2168. if (seiPtr >= data.length) {
  2169. break;
  2170. }
  2171. b = data[seiPtr++];
  2172. payloadSize += b;
  2173. } while (b === 0xff);
  2174. const leftOver = data.length - seiPtr;
  2175. // Create a variable to process the payload
  2176. let payPtr = seiPtr;
  2177. // Increment the seiPtr to the end of the payload
  2178. if (payloadSize < leftOver) {
  2179. seiPtr += payloadSize;
  2180. } else if (payloadSize > leftOver) {
  2181. // Some type of corruption has happened?
  2182. logger.error(`Malformed SEI payload. ${payloadSize} is too small, only ${leftOver} bytes left to parse.`);
  2183. // We might be able to parse some data, but let's be safe and ignore it.
  2184. break;
  2185. }
  2186. if (payloadType === 4) {
  2187. const countryCode = data[payPtr++];
  2188. if (countryCode === 181) {
  2189. const providerCode = readUint16(data, payPtr);
  2190. payPtr += 2;
  2191. if (providerCode === 49) {
  2192. const userStructure = readUint32(data, payPtr);
  2193. payPtr += 4;
  2194. if (userStructure === 0x47413934) {
  2195. const userDataType = data[payPtr++];
  2196. // Raw CEA-608 bytes wrapped in CEA-708 packet
  2197. if (userDataType === 3) {
  2198. const firstByte = data[payPtr++];
  2199. const totalCCs = 0x1f & firstByte;
  2200. const enabled = 0x40 & firstByte;
  2201. const totalBytes = enabled ? 2 + totalCCs * 3 : 0;
  2202. const byteArray = new Uint8Array(totalBytes);
  2203. if (enabled) {
  2204. byteArray[0] = firstByte;
  2205. for (let i = 1; i < totalBytes; i++) {
  2206. byteArray[i] = data[payPtr++];
  2207. }
  2208. }
  2209. samples.push({
  2210. type: userDataType,
  2211. payloadType,
  2212. pts,
  2213. bytes: byteArray
  2214. });
  2215. }
  2216. }
  2217. }
  2218. }
  2219. } else if (payloadType === 5) {
  2220. if (payloadSize > 16) {
  2221. const uuidStrArray = [];
  2222. for (let i = 0; i < 16; i++) {
  2223. const _b = data[payPtr++].toString(16);
  2224. uuidStrArray.push(_b.length == 1 ? '0' + _b : _b);
  2225. if (i === 3 || i === 5 || i === 7 || i === 9) {
  2226. uuidStrArray.push('-');
  2227. }
  2228. }
  2229. const length = payloadSize - 16;
  2230. const userDataBytes = new Uint8Array(length);
  2231. for (let i = 0; i < length; i++) {
  2232. userDataBytes[i] = data[payPtr++];
  2233. }
  2234. samples.push({
  2235. payloadType,
  2236. pts,
  2237. uuid: uuidStrArray.join(''),
  2238. userData: utf8ArrayToStr(userDataBytes),
  2239. userDataBytes
  2240. });
  2241. }
  2242. }
  2243. }
  2244. }
  2245. /**
  2246. * remove Emulation Prevention bytes from a RBSP
  2247. */
  2248. function discardEPB(data) {
  2249. const length = data.byteLength;
  2250. const EPBPositions = [];
  2251. let i = 1;
  2252. // Find all `Emulation Prevention Bytes`
  2253. while (i < length - 2) {
  2254. if (data[i] === 0 && data[i + 1] === 0 && data[i + 2] === 0x03) {
  2255. EPBPositions.push(i + 2);
  2256. i += 2;
  2257. } else {
  2258. i++;
  2259. }
  2260. }
  2261. // If no Emulation Prevention Bytes were found just return the original
  2262. // array
  2263. if (EPBPositions.length === 0) {
  2264. return data;
  2265. }
  2266. // Create a new array to hold the NAL unit data
  2267. const newLength = length - EPBPositions.length;
  2268. const newData = new Uint8Array(newLength);
  2269. let sourceIndex = 0;
  2270. for (i = 0; i < newLength; sourceIndex++, i++) {
  2271. if (sourceIndex === EPBPositions[0]) {
  2272. // Skip this byte
  2273. sourceIndex++;
  2274. // Remove this position index
  2275. EPBPositions.shift();
  2276. }
  2277. newData[i] = data[sourceIndex];
  2278. }
  2279. return newData;
  2280. }
  2281. function parseEmsg(data) {
  2282. const version = data[0];
  2283. let schemeIdUri = '';
  2284. let value = '';
  2285. let timeScale = 0;
  2286. let presentationTimeDelta = 0;
  2287. let presentationTime = 0;
  2288. let eventDuration = 0;
  2289. let id = 0;
  2290. let offset = 0;
  2291. if (version === 0) {
  2292. while (bin2str(data.subarray(offset, offset + 1)) !== '\0') {
  2293. schemeIdUri += bin2str(data.subarray(offset, offset + 1));
  2294. offset += 1;
  2295. }
  2296. schemeIdUri += bin2str(data.subarray(offset, offset + 1));
  2297. offset += 1;
  2298. while (bin2str(data.subarray(offset, offset + 1)) !== '\0') {
  2299. value += bin2str(data.subarray(offset, offset + 1));
  2300. offset += 1;
  2301. }
  2302. value += bin2str(data.subarray(offset, offset + 1));
  2303. offset += 1;
  2304. timeScale = readUint32(data, 12);
  2305. presentationTimeDelta = readUint32(data, 16);
  2306. eventDuration = readUint32(data, 20);
  2307. id = readUint32(data, 24);
  2308. offset = 28;
  2309. } else if (version === 1) {
  2310. offset += 4;
  2311. timeScale = readUint32(data, offset);
  2312. offset += 4;
  2313. const leftPresentationTime = readUint32(data, offset);
  2314. offset += 4;
  2315. const rightPresentationTime = readUint32(data, offset);
  2316. offset += 4;
  2317. presentationTime = 2 ** 32 * leftPresentationTime + rightPresentationTime;
  2318. if (!isSafeInteger(presentationTime)) {
  2319. presentationTime = Number.MAX_SAFE_INTEGER;
  2320. logger.warn('Presentation time exceeds safe integer limit and wrapped to max safe integer in parsing emsg box');
  2321. }
  2322. eventDuration = readUint32(data, offset);
  2323. offset += 4;
  2324. id = readUint32(data, offset);
  2325. offset += 4;
  2326. while (bin2str(data.subarray(offset, offset + 1)) !== '\0') {
  2327. schemeIdUri += bin2str(data.subarray(offset, offset + 1));
  2328. offset += 1;
  2329. }
  2330. schemeIdUri += bin2str(data.subarray(offset, offset + 1));
  2331. offset += 1;
  2332. while (bin2str(data.subarray(offset, offset + 1)) !== '\0') {
  2333. value += bin2str(data.subarray(offset, offset + 1));
  2334. offset += 1;
  2335. }
  2336. value += bin2str(data.subarray(offset, offset + 1));
  2337. offset += 1;
  2338. }
  2339. const payload = data.subarray(offset, data.byteLength);
  2340. return {
  2341. schemeIdUri,
  2342. value,
  2343. timeScale,
  2344. presentationTime,
  2345. presentationTimeDelta,
  2346. eventDuration,
  2347. id,
  2348. payload
  2349. };
  2350. }
  2351. class LevelKey {
  2352. static clearKeyUriToKeyIdMap() {
  2353. }
  2354. constructor(method, uri, format, formatversions = [1], iv = null) {
  2355. this.uri = void 0;
  2356. this.method = void 0;
  2357. this.keyFormat = void 0;
  2358. this.keyFormatVersions = void 0;
  2359. this.encrypted = void 0;
  2360. this.isCommonEncryption = void 0;
  2361. this.iv = null;
  2362. this.key = null;
  2363. this.keyId = null;
  2364. this.pssh = null;
  2365. this.method = method;
  2366. this.uri = uri;
  2367. this.keyFormat = format;
  2368. this.keyFormatVersions = formatversions;
  2369. this.iv = iv;
  2370. this.encrypted = method ? method !== 'NONE' : false;
  2371. this.isCommonEncryption = this.encrypted && method !== 'AES-128';
  2372. }
  2373. isSupported() {
  2374. // If it's Segment encryption or No encryption, just select that key system
  2375. if (this.method) {
  2376. if (this.method === 'AES-128' || this.method === 'NONE') {
  2377. return true;
  2378. }
  2379. if (this.keyFormat === 'identity') {
  2380. // Maintain support for clear SAMPLE-AES with MPEG-3 TS
  2381. return this.method === 'SAMPLE-AES';
  2382. }
  2383. }
  2384. return false;
  2385. }
  2386. getDecryptData(sn) {
  2387. if (!this.encrypted || !this.uri) {
  2388. return null;
  2389. }
  2390. if (this.method === 'AES-128' && this.uri && !this.iv) {
  2391. if (typeof sn !== 'number') {
  2392. // We are fetching decryption data for a initialization segment
  2393. // If the segment was encrypted with AES-128
  2394. // It must have an IV defined. We cannot substitute the Segment Number in.
  2395. if (this.method === 'AES-128' && !this.iv) {
  2396. logger.warn(`missing IV for initialization segment with method="${this.method}" - compliance issue`);
  2397. }
  2398. // Explicitly set sn to resulting value from implicit conversions 'initSegment' values for IV generation.
  2399. sn = 0;
  2400. }
  2401. const iv = createInitializationVector(sn);
  2402. const decryptdata = new LevelKey(this.method, this.uri, 'identity', this.keyFormatVersions, iv);
  2403. return decryptdata;
  2404. }
  2405. {
  2406. return this;
  2407. }
  2408. }
  2409. }
  2410. function createInitializationVector(segmentNumber) {
  2411. const uint8View = new Uint8Array(16);
  2412. for (let i = 12; i < 16; i++) {
  2413. uint8View[i] = segmentNumber >> 8 * (15 - i) & 0xff;
  2414. }
  2415. return uint8View;
  2416. }
  2417. /**
  2418. * MediaSource helper
  2419. */
  2420. function getMediaSource(preferManagedMediaSource = true) {
  2421. if (typeof self === 'undefined') return undefined;
  2422. const mms = (preferManagedMediaSource || !self.MediaSource) && self.ManagedMediaSource;
  2423. return mms || self.MediaSource || self.WebKitMediaSource;
  2424. }
  2425. function isManagedMediaSource(source) {
  2426. return typeof self !== 'undefined' && source === self.ManagedMediaSource;
  2427. }
  2428. // from http://mp4ra.org/codecs.html
  2429. // values indicate codec selection preference (lower is higher priority)
  2430. const sampleEntryCodesISO = {
  2431. audio: {
  2432. a3ds: 1,
  2433. 'ac-3': 0.95,
  2434. 'ac-4': 1,
  2435. alac: 0.9,
  2436. alaw: 1,
  2437. dra1: 1,
  2438. 'dts+': 1,
  2439. 'dts-': 1,
  2440. dtsc: 1,
  2441. dtse: 1,
  2442. dtsh: 1,
  2443. 'ec-3': 0.9,
  2444. enca: 1,
  2445. fLaC: 0.9,
  2446. // MP4-RA listed codec entry for FLAC
  2447. flac: 0.9,
  2448. // legacy browser codec name for FLAC
  2449. FLAC: 0.9,
  2450. // some manifests may list "FLAC" with Apple's tools
  2451. g719: 1,
  2452. g726: 1,
  2453. m4ae: 1,
  2454. mha1: 1,
  2455. mha2: 1,
  2456. mhm1: 1,
  2457. mhm2: 1,
  2458. mlpa: 1,
  2459. mp4a: 1,
  2460. 'raw ': 1,
  2461. Opus: 1,
  2462. opus: 1,
  2463. // browsers expect this to be lowercase despite MP4RA says 'Opus'
  2464. samr: 1,
  2465. sawb: 1,
  2466. sawp: 1,
  2467. sevc: 1,
  2468. sqcp: 1,
  2469. ssmv: 1,
  2470. twos: 1,
  2471. ulaw: 1
  2472. },
  2473. video: {
  2474. avc1: 1,
  2475. avc2: 1,
  2476. avc3: 1,
  2477. avc4: 1,
  2478. avcp: 1,
  2479. av01: 0.8,
  2480. drac: 1,
  2481. dva1: 1,
  2482. dvav: 1,
  2483. dvh1: 0.7,
  2484. dvhe: 0.7,
  2485. encv: 1,
  2486. hev1: 0.75,
  2487. hvc1: 0.75,
  2488. mjp2: 1,
  2489. mp4v: 1,
  2490. mvc1: 1,
  2491. mvc2: 1,
  2492. mvc3: 1,
  2493. mvc4: 1,
  2494. resv: 1,
  2495. rv60: 1,
  2496. s263: 1,
  2497. svc1: 1,
  2498. svc2: 1,
  2499. 'vc-1': 1,
  2500. vp08: 1,
  2501. vp09: 0.9
  2502. },
  2503. text: {
  2504. stpp: 1,
  2505. wvtt: 1
  2506. }
  2507. };
  2508. function isCodecType(codec, type) {
  2509. const typeCodes = sampleEntryCodesISO[type];
  2510. return !!typeCodes && !!typeCodes[codec.slice(0, 4)];
  2511. }
  2512. function areCodecsMediaSourceSupported(codecs, type, preferManagedMediaSource = true) {
  2513. return !codecs.split(',').some(codec => !isCodecMediaSourceSupported(codec, type, preferManagedMediaSource));
  2514. }
  2515. function isCodecMediaSourceSupported(codec, type, preferManagedMediaSource = true) {
  2516. var _MediaSource$isTypeSu;
  2517. const MediaSource = getMediaSource(preferManagedMediaSource);
  2518. return (_MediaSource$isTypeSu = MediaSource == null ? void 0 : MediaSource.isTypeSupported(mimeTypeForCodec(codec, type))) != null ? _MediaSource$isTypeSu : false;
  2519. }
  2520. function mimeTypeForCodec(codec, type) {
  2521. return `${type}/mp4;codecs="${codec}"`;
  2522. }
  2523. function videoCodecPreferenceValue(videoCodec) {
  2524. if (videoCodec) {
  2525. const fourCC = videoCodec.substring(0, 4);
  2526. return sampleEntryCodesISO.video[fourCC];
  2527. }
  2528. return 2;
  2529. }
  2530. function codecsSetSelectionPreferenceValue(codecSet) {
  2531. return codecSet.split(',').reduce((num, fourCC) => {
  2532. const preferenceValue = sampleEntryCodesISO.video[fourCC];
  2533. if (preferenceValue) {
  2534. return (preferenceValue * 2 + num) / (num ? 3 : 2);
  2535. }
  2536. return (sampleEntryCodesISO.audio[fourCC] + num) / (num ? 2 : 1);
  2537. }, 0);
  2538. }
  2539. const CODEC_COMPATIBLE_NAMES = {};
  2540. function getCodecCompatibleNameLower(lowerCaseCodec, preferManagedMediaSource = true) {
  2541. if (CODEC_COMPATIBLE_NAMES[lowerCaseCodec]) {
  2542. return CODEC_COMPATIBLE_NAMES[lowerCaseCodec];
  2543. }
  2544. // Idealy fLaC and Opus would be first (spec-compliant) but
  2545. // some browsers will report that fLaC is supported then fail.
  2546. // see: https://bugs.chromium.org/p/chromium/issues/detail?id=1422728
  2547. const codecsToCheck = {
  2548. flac: ['flac', 'fLaC', 'FLAC'],
  2549. opus: ['opus', 'Opus']
  2550. }[lowerCaseCodec];
  2551. for (let i = 0; i < codecsToCheck.length; i++) {
  2552. if (isCodecMediaSourceSupported(codecsToCheck[i], 'audio', preferManagedMediaSource)) {
  2553. CODEC_COMPATIBLE_NAMES[lowerCaseCodec] = codecsToCheck[i];
  2554. return codecsToCheck[i];
  2555. }
  2556. }
  2557. return lowerCaseCodec;
  2558. }
  2559. const AUDIO_CODEC_REGEXP = /flac|opus/i;
  2560. function getCodecCompatibleName(codec, preferManagedMediaSource = true) {
  2561. return codec.replace(AUDIO_CODEC_REGEXP, m => getCodecCompatibleNameLower(m.toLowerCase(), preferManagedMediaSource));
  2562. }
  2563. function pickMostCompleteCodecName(parsedCodec, levelCodec) {
  2564. // Parsing of mp4a codecs strings in mp4-tools from media is incomplete as of d8c6c7a
  2565. // so use level codec is parsed codec is unavailable or incomplete
  2566. if (parsedCodec && parsedCodec !== 'mp4a') {
  2567. return parsedCodec;
  2568. }
  2569. return levelCodec ? levelCodec.split(',')[0] : levelCodec;
  2570. }
  2571. function convertAVC1ToAVCOTI(codec) {
  2572. // Convert avc1 codec string from RFC-4281 to RFC-6381 for MediaSource.isTypeSupported
  2573. const avcdata = codec.split('.');
  2574. if (avcdata.length > 2) {
  2575. let result = avcdata.shift() + '.';
  2576. result += parseInt(avcdata.shift()).toString(16);
  2577. result += ('000' + parseInt(avcdata.shift()).toString(16)).slice(-4);
  2578. return result;
  2579. }
  2580. return codec;
  2581. }
  2582. const MASTER_PLAYLIST_REGEX = /#EXT-X-STREAM-INF:([^\r\n]*)(?:[\r\n](?:#[^\r\n]*)?)*([^\r\n]+)|#EXT-X-(SESSION-DATA|SESSION-KEY|DEFINE|CONTENT-STEERING|START):([^\r\n]*)[\r\n]+/g;
  2583. const MASTER_PLAYLIST_MEDIA_REGEX = /#EXT-X-MEDIA:(.*)/g;
  2584. const IS_MEDIA_PLAYLIST = /^#EXT(?:INF|-X-TARGETDURATION):/m; // Handle empty Media Playlist (first EXTINF not signaled, but TARGETDURATION present)
  2585. const LEVEL_PLAYLIST_REGEX_FAST = new RegExp([/#EXTINF:\s*(\d*(?:\.\d+)?)(?:,(.*)\s+)?/.source,
  2586. // duration (#EXTINF:<duration>,<title>), group 1 => duration, group 2 => title
  2587. /(?!#) *(\S[^\r\n]*)/.source,
  2588. // segment URI, group 3 => the URI (note newline is not eaten)
  2589. /#EXT-X-BYTERANGE:*(.+)/.source,
  2590. // next segment's byterange, group 4 => range spec (x@y)
  2591. /#EXT-X-PROGRAM-DATE-TIME:(.+)/.source,
  2592. // next segment's program date/time group 5 => the datetime spec
  2593. /#.*/.source // All other non-segment oriented tags will match with all groups empty
  2594. ].join('|'), 'g');
  2595. const LEVEL_PLAYLIST_REGEX_SLOW = new RegExp([/#(EXTM3U)/.source, /#EXT-X-(DATERANGE|DEFINE|KEY|MAP|PART|PART-INF|PLAYLIST-TYPE|PRELOAD-HINT|RENDITION-REPORT|SERVER-CONTROL|SKIP|START):(.+)/.source, /#EXT-X-(BITRATE|DISCONTINUITY-SEQUENCE|MEDIA-SEQUENCE|TARGETDURATION|VERSION): *(\d+)/.source, /#EXT-X-(DISCONTINUITY|ENDLIST|GAP|INDEPENDENT-SEGMENTS)/.source, /(#)([^:]*):(.*)/.source, /(#)(.*)(?:.*)\r?\n?/.source].join('|'));
  2596. class M3U8Parser {
  2597. static findGroup(groups, mediaGroupId) {
  2598. for (let i = 0; i < groups.length; i++) {
  2599. const group = groups[i];
  2600. if (group.id === mediaGroupId) {
  2601. return group;
  2602. }
  2603. }
  2604. }
  2605. static resolve(url, baseUrl) {
  2606. return urlToolkitExports.buildAbsoluteURL(baseUrl, url, {
  2607. alwaysNormalize: true
  2608. });
  2609. }
  2610. static isMediaPlaylist(str) {
  2611. return IS_MEDIA_PLAYLIST.test(str);
  2612. }
  2613. static parseMasterPlaylist(string, baseurl) {
  2614. const hasVariableRefs = false;
  2615. const parsed = {
  2616. contentSteering: null,
  2617. levels: [],
  2618. playlistParsingError: null,
  2619. sessionData: null,
  2620. sessionKeys: null,
  2621. startTimeOffset: null,
  2622. variableList: null,
  2623. hasVariableRefs
  2624. };
  2625. const levelsWithKnownCodecs = [];
  2626. MASTER_PLAYLIST_REGEX.lastIndex = 0;
  2627. let result;
  2628. while ((result = MASTER_PLAYLIST_REGEX.exec(string)) != null) {
  2629. if (result[1]) {
  2630. var _level$unknownCodecs;
  2631. // '#EXT-X-STREAM-INF' is found, parse level tag in group 1
  2632. const attrs = new AttrList(result[1]);
  2633. const uri = result[2];
  2634. const level = {
  2635. attrs,
  2636. bitrate: attrs.decimalInteger('BANDWIDTH') || attrs.decimalInteger('AVERAGE-BANDWIDTH'),
  2637. name: attrs.NAME,
  2638. url: M3U8Parser.resolve(uri, baseurl)
  2639. };
  2640. const resolution = attrs.decimalResolution('RESOLUTION');
  2641. if (resolution) {
  2642. level.width = resolution.width;
  2643. level.height = resolution.height;
  2644. }
  2645. setCodecs(attrs.CODECS, level);
  2646. if (!((_level$unknownCodecs = level.unknownCodecs) != null && _level$unknownCodecs.length)) {
  2647. levelsWithKnownCodecs.push(level);
  2648. }
  2649. parsed.levels.push(level);
  2650. } else if (result[3]) {
  2651. const tag = result[3];
  2652. const attributes = result[4];
  2653. switch (tag) {
  2654. case 'SESSION-DATA':
  2655. {
  2656. // #EXT-X-SESSION-DATA
  2657. const sessionAttrs = new AttrList(attributes);
  2658. const dataId = sessionAttrs['DATA-ID'];
  2659. if (dataId) {
  2660. if (parsed.sessionData === null) {
  2661. parsed.sessionData = {};
  2662. }
  2663. parsed.sessionData[dataId] = sessionAttrs;
  2664. }
  2665. break;
  2666. }
  2667. case 'SESSION-KEY':
  2668. {
  2669. // #EXT-X-SESSION-KEY
  2670. const sessionKey = parseKey(attributes, baseurl);
  2671. if (sessionKey.encrypted && sessionKey.isSupported()) {
  2672. if (parsed.sessionKeys === null) {
  2673. parsed.sessionKeys = [];
  2674. }
  2675. parsed.sessionKeys.push(sessionKey);
  2676. } else {
  2677. logger.warn(`[Keys] Ignoring invalid EXT-X-SESSION-KEY tag: "${attributes}"`);
  2678. }
  2679. break;
  2680. }
  2681. case 'DEFINE':
  2682. {
  2683. break;
  2684. }
  2685. case 'CONTENT-STEERING':
  2686. {
  2687. // #EXT-X-CONTENT-STEERING
  2688. const contentSteeringAttributes = new AttrList(attributes);
  2689. parsed.contentSteering = {
  2690. uri: M3U8Parser.resolve(contentSteeringAttributes['SERVER-URI'], baseurl),
  2691. pathwayId: contentSteeringAttributes['PATHWAY-ID'] || '.'
  2692. };
  2693. break;
  2694. }
  2695. case 'START':
  2696. {
  2697. // #EXT-X-START
  2698. parsed.startTimeOffset = parseStartTimeOffset(attributes);
  2699. break;
  2700. }
  2701. }
  2702. }
  2703. }
  2704. // Filter out levels with unknown codecs if it does not remove all levels
  2705. const stripUnknownCodecLevels = levelsWithKnownCodecs.length > 0 && levelsWithKnownCodecs.length < parsed.levels.length;
  2706. parsed.levels = stripUnknownCodecLevels ? levelsWithKnownCodecs : parsed.levels;
  2707. if (parsed.levels.length === 0) {
  2708. parsed.playlistParsingError = new Error('no levels found in manifest');
  2709. }
  2710. return parsed;
  2711. }
  2712. static parseMasterPlaylistMedia(string, baseurl, parsed) {
  2713. let result;
  2714. const results = {};
  2715. const levels = parsed.levels;
  2716. const groupsByType = {
  2717. AUDIO: levels.map(level => ({
  2718. id: level.attrs.AUDIO,
  2719. audioCodec: level.audioCodec
  2720. })),
  2721. SUBTITLES: levels.map(level => ({
  2722. id: level.attrs.SUBTITLES,
  2723. textCodec: level.textCodec
  2724. })),
  2725. 'CLOSED-CAPTIONS': []
  2726. };
  2727. let id = 0;
  2728. MASTER_PLAYLIST_MEDIA_REGEX.lastIndex = 0;
  2729. while ((result = MASTER_PLAYLIST_MEDIA_REGEX.exec(string)) !== null) {
  2730. const attrs = new AttrList(result[1]);
  2731. const type = attrs.TYPE;
  2732. if (type) {
  2733. const groups = groupsByType[type];
  2734. const medias = results[type] || [];
  2735. results[type] = medias;
  2736. const lang = attrs.LANGUAGE;
  2737. const assocLang = attrs['ASSOC-LANGUAGE'];
  2738. const channels = attrs.CHANNELS;
  2739. const characteristics = attrs.CHARACTERISTICS;
  2740. const instreamId = attrs['INSTREAM-ID'];
  2741. const media = {
  2742. attrs,
  2743. bitrate: 0,
  2744. id: id++,
  2745. groupId: attrs['GROUP-ID'] || '',
  2746. name: attrs.NAME || lang || '',
  2747. type,
  2748. default: attrs.bool('DEFAULT'),
  2749. autoselect: attrs.bool('AUTOSELECT'),
  2750. forced: attrs.bool('FORCED'),
  2751. lang,
  2752. url: attrs.URI ? M3U8Parser.resolve(attrs.URI, baseurl) : ''
  2753. };
  2754. if (assocLang) {
  2755. media.assocLang = assocLang;
  2756. }
  2757. if (channels) {
  2758. media.channels = channels;
  2759. }
  2760. if (characteristics) {
  2761. media.characteristics = characteristics;
  2762. }
  2763. if (instreamId) {
  2764. media.instreamId = instreamId;
  2765. }
  2766. if (groups != null && groups.length) {
  2767. // If there are audio or text groups signalled in the manifest, let's look for a matching codec string for this track
  2768. // If we don't find the track signalled, lets use the first audio groups codec we have
  2769. // Acting as a best guess
  2770. const groupCodec = M3U8Parser.findGroup(groups, media.groupId) || groups[0];
  2771. assignCodec(media, groupCodec, 'audioCodec');
  2772. assignCodec(media, groupCodec, 'textCodec');
  2773. }
  2774. medias.push(media);
  2775. }
  2776. }
  2777. return results;
  2778. }
  2779. static parseLevelPlaylist(string, baseurl, id, type, levelUrlId, multivariantVariableList) {
  2780. const level = new LevelDetails(baseurl);
  2781. const fragments = level.fragments;
  2782. // The most recent init segment seen (applies to all subsequent segments)
  2783. let currentInitSegment = null;
  2784. let currentSN = 0;
  2785. let currentPart = 0;
  2786. let totalduration = 0;
  2787. let discontinuityCounter = 0;
  2788. let prevFrag = null;
  2789. let frag = new Fragment(type, baseurl);
  2790. let result;
  2791. let i;
  2792. let levelkeys;
  2793. let firstPdtIndex = -1;
  2794. let createNextFrag = false;
  2795. let nextByteRange = null;
  2796. LEVEL_PLAYLIST_REGEX_FAST.lastIndex = 0;
  2797. level.m3u8 = string;
  2798. level.hasVariableRefs = false;
  2799. while ((result = LEVEL_PLAYLIST_REGEX_FAST.exec(string)) !== null) {
  2800. if (createNextFrag) {
  2801. createNextFrag = false;
  2802. frag = new Fragment(type, baseurl);
  2803. // setup the next fragment for part loading
  2804. frag.start = totalduration;
  2805. frag.sn = currentSN;
  2806. frag.cc = discontinuityCounter;
  2807. frag.level = id;
  2808. if (currentInitSegment) {
  2809. frag.initSegment = currentInitSegment;
  2810. frag.rawProgramDateTime = currentInitSegment.rawProgramDateTime;
  2811. currentInitSegment.rawProgramDateTime = null;
  2812. if (nextByteRange) {
  2813. frag.setByteRange(nextByteRange);
  2814. nextByteRange = null;
  2815. }
  2816. }
  2817. }
  2818. const duration = result[1];
  2819. if (duration) {
  2820. // INF
  2821. frag.duration = parseFloat(duration);
  2822. // avoid sliced strings https://github.com/video-dev/hls.js/issues/939
  2823. const title = (' ' + result[2]).slice(1);
  2824. frag.title = title || null;
  2825. frag.tagList.push(title ? ['INF', duration, title] : ['INF', duration]);
  2826. } else if (result[3]) {
  2827. // url
  2828. if (isFiniteNumber(frag.duration)) {
  2829. frag.start = totalduration;
  2830. if (levelkeys) {
  2831. setFragLevelKeys(frag, levelkeys, level);
  2832. }
  2833. frag.sn = currentSN;
  2834. frag.level = id;
  2835. frag.cc = discontinuityCounter;
  2836. fragments.push(frag);
  2837. // avoid sliced strings https://github.com/video-dev/hls.js/issues/939
  2838. const uri = (' ' + result[3]).slice(1);
  2839. frag.relurl = uri;
  2840. assignProgramDateTime(frag, prevFrag);
  2841. prevFrag = frag;
  2842. totalduration += frag.duration;
  2843. currentSN++;
  2844. currentPart = 0;
  2845. createNextFrag = true;
  2846. }
  2847. } else if (result[4]) {
  2848. // X-BYTERANGE
  2849. const data = (' ' + result[4]).slice(1);
  2850. if (prevFrag) {
  2851. frag.setByteRange(data, prevFrag);
  2852. } else {
  2853. frag.setByteRange(data);
  2854. }
  2855. } else if (result[5]) {
  2856. // PROGRAM-DATE-TIME
  2857. // avoid sliced strings https://github.com/video-dev/hls.js/issues/939
  2858. frag.rawProgramDateTime = (' ' + result[5]).slice(1);
  2859. frag.tagList.push(['PROGRAM-DATE-TIME', frag.rawProgramDateTime]);
  2860. if (firstPdtIndex === -1) {
  2861. firstPdtIndex = fragments.length;
  2862. }
  2863. } else {
  2864. result = result[0].match(LEVEL_PLAYLIST_REGEX_SLOW);
  2865. if (!result) {
  2866. logger.warn('No matches on slow regex match for level playlist!');
  2867. continue;
  2868. }
  2869. for (i = 1; i < result.length; i++) {
  2870. if (typeof result[i] !== 'undefined') {
  2871. break;
  2872. }
  2873. }
  2874. // avoid sliced strings https://github.com/video-dev/hls.js/issues/939
  2875. const tag = (' ' + result[i]).slice(1);
  2876. const value1 = (' ' + result[i + 1]).slice(1);
  2877. const value2 = result[i + 2] ? (' ' + result[i + 2]).slice(1) : '';
  2878. switch (tag) {
  2879. case 'PLAYLIST-TYPE':
  2880. level.type = value1.toUpperCase();
  2881. break;
  2882. case 'MEDIA-SEQUENCE':
  2883. currentSN = level.startSN = parseInt(value1);
  2884. break;
  2885. case 'SKIP':
  2886. {
  2887. const skipAttrs = new AttrList(value1);
  2888. const skippedSegments = skipAttrs.decimalInteger('SKIPPED-SEGMENTS');
  2889. if (isFiniteNumber(skippedSegments)) {
  2890. level.skippedSegments = skippedSegments;
  2891. // This will result in fragments[] containing undefined values, which we will fill in with `mergeDetails`
  2892. for (let _i = skippedSegments; _i--;) {
  2893. fragments.unshift(null);
  2894. }
  2895. currentSN += skippedSegments;
  2896. }
  2897. const recentlyRemovedDateranges = skipAttrs.enumeratedString('RECENTLY-REMOVED-DATERANGES');
  2898. if (recentlyRemovedDateranges) {
  2899. level.recentlyRemovedDateranges = recentlyRemovedDateranges.split('\t');
  2900. }
  2901. break;
  2902. }
  2903. case 'TARGETDURATION':
  2904. level.targetduration = Math.max(parseInt(value1), 1);
  2905. break;
  2906. case 'VERSION':
  2907. level.version = parseInt(value1);
  2908. break;
  2909. case 'INDEPENDENT-SEGMENTS':
  2910. case 'EXTM3U':
  2911. break;
  2912. case 'ENDLIST':
  2913. level.live = false;
  2914. break;
  2915. case '#':
  2916. if (value1 || value2) {
  2917. frag.tagList.push(value2 ? [value1, value2] : [value1]);
  2918. }
  2919. break;
  2920. case 'DISCONTINUITY':
  2921. discontinuityCounter++;
  2922. frag.tagList.push(['DIS']);
  2923. break;
  2924. case 'GAP':
  2925. frag.gap = true;
  2926. frag.tagList.push([tag]);
  2927. break;
  2928. case 'BITRATE':
  2929. frag.tagList.push([tag, value1]);
  2930. break;
  2931. case 'DATERANGE':
  2932. {
  2933. const dateRangeAttr = new AttrList(value1);
  2934. const dateRange = new DateRange(dateRangeAttr, level.dateRanges[dateRangeAttr.ID]);
  2935. if (dateRange.isValid || level.skippedSegments) {
  2936. level.dateRanges[dateRange.id] = dateRange;
  2937. } else {
  2938. logger.warn(`Ignoring invalid DATERANGE tag: "${value1}"`);
  2939. }
  2940. // Add to fragment tag list for backwards compatibility (< v1.2.0)
  2941. frag.tagList.push(['EXT-X-DATERANGE', value1]);
  2942. break;
  2943. }
  2944. case 'DEFINE':
  2945. {
  2946. break;
  2947. }
  2948. case 'DISCONTINUITY-SEQUENCE':
  2949. discontinuityCounter = parseInt(value1);
  2950. break;
  2951. case 'KEY':
  2952. {
  2953. const levelKey = parseKey(value1, baseurl);
  2954. if (levelKey.isSupported()) {
  2955. if (levelKey.method === 'NONE') {
  2956. levelkeys = undefined;
  2957. break;
  2958. }
  2959. if (!levelkeys) {
  2960. levelkeys = {};
  2961. }
  2962. if (levelkeys[levelKey.keyFormat]) {
  2963. levelkeys = _extends({}, levelkeys);
  2964. }
  2965. levelkeys[levelKey.keyFormat] = levelKey;
  2966. } else {
  2967. logger.warn(`[Keys] Ignoring invalid EXT-X-KEY tag: "${value1}"`);
  2968. }
  2969. break;
  2970. }
  2971. case 'START':
  2972. level.startTimeOffset = parseStartTimeOffset(value1);
  2973. break;
  2974. case 'MAP':
  2975. {
  2976. const mapAttrs = new AttrList(value1);
  2977. if (frag.duration) {
  2978. // Initial segment tag is after segment duration tag.
  2979. // #EXTINF: 6.0
  2980. // #EXT-X-MAP:URI="init.mp4
  2981. const init = new Fragment(type, baseurl);
  2982. setInitSegment(init, mapAttrs, id, levelkeys);
  2983. currentInitSegment = init;
  2984. frag.initSegment = currentInitSegment;
  2985. if (currentInitSegment.rawProgramDateTime && !frag.rawProgramDateTime) {
  2986. frag.rawProgramDateTime = currentInitSegment.rawProgramDateTime;
  2987. }
  2988. } else {
  2989. // Initial segment tag is before segment duration tag
  2990. // Handle case where EXT-X-MAP is declared after EXT-X-BYTERANGE
  2991. const end = frag.byteRangeEndOffset;
  2992. if (end) {
  2993. const start = frag.byteRangeStartOffset;
  2994. nextByteRange = `${end - start}@${start}`;
  2995. } else {
  2996. nextByteRange = null;
  2997. }
  2998. setInitSegment(frag, mapAttrs, id, levelkeys);
  2999. currentInitSegment = frag;
  3000. createNextFrag = true;
  3001. }
  3002. break;
  3003. }
  3004. case 'SERVER-CONTROL':
  3005. {
  3006. const serverControlAttrs = new AttrList(value1);
  3007. level.canBlockReload = serverControlAttrs.bool('CAN-BLOCK-RELOAD');
  3008. level.canSkipUntil = serverControlAttrs.optionalFloat('CAN-SKIP-UNTIL', 0);
  3009. level.canSkipDateRanges = level.canSkipUntil > 0 && serverControlAttrs.bool('CAN-SKIP-DATERANGES');
  3010. level.partHoldBack = serverControlAttrs.optionalFloat('PART-HOLD-BACK', 0);
  3011. level.holdBack = serverControlAttrs.optionalFloat('HOLD-BACK', 0);
  3012. break;
  3013. }
  3014. case 'PART-INF':
  3015. {
  3016. const partInfAttrs = new AttrList(value1);
  3017. level.partTarget = partInfAttrs.decimalFloatingPoint('PART-TARGET');
  3018. break;
  3019. }
  3020. case 'PART':
  3021. {
  3022. let partList = level.partList;
  3023. if (!partList) {
  3024. partList = level.partList = [];
  3025. }
  3026. const previousFragmentPart = currentPart > 0 ? partList[partList.length - 1] : undefined;
  3027. const index = currentPart++;
  3028. const partAttrs = new AttrList(value1);
  3029. const part = new Part(partAttrs, frag, baseurl, index, previousFragmentPart);
  3030. partList.push(part);
  3031. frag.duration += part.duration;
  3032. break;
  3033. }
  3034. case 'PRELOAD-HINT':
  3035. {
  3036. const preloadHintAttrs = new AttrList(value1);
  3037. level.preloadHint = preloadHintAttrs;
  3038. break;
  3039. }
  3040. case 'RENDITION-REPORT':
  3041. {
  3042. const renditionReportAttrs = new AttrList(value1);
  3043. level.renditionReports = level.renditionReports || [];
  3044. level.renditionReports.push(renditionReportAttrs);
  3045. break;
  3046. }
  3047. default:
  3048. logger.warn(`line parsed but not handled: ${result}`);
  3049. break;
  3050. }
  3051. }
  3052. }
  3053. if (prevFrag && !prevFrag.relurl) {
  3054. fragments.pop();
  3055. totalduration -= prevFrag.duration;
  3056. if (level.partList) {
  3057. level.fragmentHint = prevFrag;
  3058. }
  3059. } else if (level.partList) {
  3060. assignProgramDateTime(frag, prevFrag);
  3061. frag.cc = discontinuityCounter;
  3062. level.fragmentHint = frag;
  3063. if (levelkeys) {
  3064. setFragLevelKeys(frag, levelkeys, level);
  3065. }
  3066. }
  3067. const fragmentLength = fragments.length;
  3068. const firstFragment = fragments[0];
  3069. const lastFragment = fragments[fragmentLength - 1];
  3070. totalduration += level.skippedSegments * level.targetduration;
  3071. if (totalduration > 0 && fragmentLength && lastFragment) {
  3072. level.averagetargetduration = totalduration / fragmentLength;
  3073. const lastSn = lastFragment.sn;
  3074. level.endSN = lastSn !== 'initSegment' ? lastSn : 0;
  3075. if (!level.live) {
  3076. lastFragment.endList = true;
  3077. }
  3078. if (firstFragment) {
  3079. level.startCC = firstFragment.cc;
  3080. }
  3081. } else {
  3082. level.endSN = 0;
  3083. level.startCC = 0;
  3084. }
  3085. if (level.fragmentHint) {
  3086. totalduration += level.fragmentHint.duration;
  3087. }
  3088. level.totalduration = totalduration;
  3089. level.endCC = discontinuityCounter;
  3090. /**
  3091. * Backfill any missing PDT values
  3092. * "If the first EXT-X-PROGRAM-DATE-TIME tag in a Playlist appears after
  3093. * one or more Media Segment URIs, the client SHOULD extrapolate
  3094. * backward from that tag (using EXTINF durations and/or media
  3095. * timestamps) to associate dates with those segments."
  3096. * We have already extrapolated forward, but all fragments up to the first instance of PDT do not have their PDTs
  3097. * computed.
  3098. */
  3099. if (firstPdtIndex > 0) {
  3100. backfillProgramDateTimes(fragments, firstPdtIndex);
  3101. }
  3102. return level;
  3103. }
  3104. }
  3105. function parseKey(keyTagAttributes, baseurl, parsed) {
  3106. var _keyAttrs$METHOD, _keyAttrs$KEYFORMAT;
  3107. // https://tools.ietf.org/html/rfc8216#section-4.3.2.4
  3108. const keyAttrs = new AttrList(keyTagAttributes);
  3109. const decryptmethod = (_keyAttrs$METHOD = keyAttrs.METHOD) != null ? _keyAttrs$METHOD : '';
  3110. const decrypturi = keyAttrs.URI;
  3111. const decryptiv = keyAttrs.hexadecimalInteger('IV');
  3112. const decryptkeyformatversions = keyAttrs.KEYFORMATVERSIONS;
  3113. // From RFC: This attribute is OPTIONAL; its absence indicates an implicit value of "identity".
  3114. const decryptkeyformat = (_keyAttrs$KEYFORMAT = keyAttrs.KEYFORMAT) != null ? _keyAttrs$KEYFORMAT : 'identity';
  3115. if (decrypturi && keyAttrs.IV && !decryptiv) {
  3116. logger.error(`Invalid IV: ${keyAttrs.IV}`);
  3117. }
  3118. // If decrypturi is a URI with a scheme, then baseurl will be ignored
  3119. // No uri is allowed when METHOD is NONE
  3120. const resolvedUri = decrypturi ? M3U8Parser.resolve(decrypturi, baseurl) : '';
  3121. const keyFormatVersions = (decryptkeyformatversions ? decryptkeyformatversions : '1').split('/').map(Number).filter(Number.isFinite);
  3122. return new LevelKey(decryptmethod, resolvedUri, decryptkeyformat, keyFormatVersions, decryptiv);
  3123. }
  3124. function parseStartTimeOffset(startAttributes) {
  3125. const startAttrs = new AttrList(startAttributes);
  3126. const startTimeOffset = startAttrs.decimalFloatingPoint('TIME-OFFSET');
  3127. if (isFiniteNumber(startTimeOffset)) {
  3128. return startTimeOffset;
  3129. }
  3130. return null;
  3131. }
  3132. function setCodecs(codecsAttributeValue, level) {
  3133. let codecs = (codecsAttributeValue || '').split(/[ ,]+/).filter(c => c);
  3134. ['video', 'audio', 'text'].forEach(type => {
  3135. const filtered = codecs.filter(codec => isCodecType(codec, type));
  3136. if (filtered.length) {
  3137. // Comma separated list of all codecs for type
  3138. level[`${type}Codec`] = filtered.join(',');
  3139. // Remove known codecs so that only unknownCodecs are left after iterating through each type
  3140. codecs = codecs.filter(codec => filtered.indexOf(codec) === -1);
  3141. }
  3142. });
  3143. level.unknownCodecs = codecs;
  3144. }
  3145. function assignCodec(media, groupItem, codecProperty) {
  3146. const codecValue = groupItem[codecProperty];
  3147. if (codecValue) {
  3148. media[codecProperty] = codecValue;
  3149. }
  3150. }
  3151. function backfillProgramDateTimes(fragments, firstPdtIndex) {
  3152. let fragPrev = fragments[firstPdtIndex];
  3153. for (let i = firstPdtIndex; i--;) {
  3154. const frag = fragments[i];
  3155. // Exit on delta-playlist skipped segments
  3156. if (!frag) {
  3157. return;
  3158. }
  3159. frag.programDateTime = fragPrev.programDateTime - frag.duration * 1000;
  3160. fragPrev = frag;
  3161. }
  3162. }
  3163. function assignProgramDateTime(frag, prevFrag) {
  3164. if (frag.rawProgramDateTime) {
  3165. frag.programDateTime = Date.parse(frag.rawProgramDateTime);
  3166. } else if (prevFrag != null && prevFrag.programDateTime) {
  3167. frag.programDateTime = prevFrag.endProgramDateTime;
  3168. }
  3169. if (!isFiniteNumber(frag.programDateTime)) {
  3170. frag.programDateTime = null;
  3171. frag.rawProgramDateTime = null;
  3172. }
  3173. }
  3174. function setInitSegment(frag, mapAttrs, id, levelkeys) {
  3175. frag.relurl = mapAttrs.URI;
  3176. if (mapAttrs.BYTERANGE) {
  3177. frag.setByteRange(mapAttrs.BYTERANGE);
  3178. }
  3179. frag.level = id;
  3180. frag.sn = 'initSegment';
  3181. if (levelkeys) {
  3182. frag.levelkeys = levelkeys;
  3183. }
  3184. frag.initSegment = null;
  3185. }
  3186. function setFragLevelKeys(frag, levelkeys, level) {
  3187. frag.levelkeys = levelkeys;
  3188. const {
  3189. encryptedFragments
  3190. } = level;
  3191. if ((!encryptedFragments.length || encryptedFragments[encryptedFragments.length - 1].levelkeys !== levelkeys) && Object.keys(levelkeys).some(format => levelkeys[format].isCommonEncryption)) {
  3192. encryptedFragments.push(frag);
  3193. }
  3194. }
  3195. var PlaylistContextType = {
  3196. MANIFEST: "manifest",
  3197. LEVEL: "level",
  3198. AUDIO_TRACK: "audioTrack",
  3199. SUBTITLE_TRACK: "subtitleTrack"
  3200. };
  3201. var PlaylistLevelType = {
  3202. MAIN: "main",
  3203. AUDIO: "audio",
  3204. SUBTITLE: "subtitle"
  3205. };
  3206. function mapContextToLevelType(context) {
  3207. const {
  3208. type
  3209. } = context;
  3210. switch (type) {
  3211. case PlaylistContextType.AUDIO_TRACK:
  3212. return PlaylistLevelType.AUDIO;
  3213. case PlaylistContextType.SUBTITLE_TRACK:
  3214. return PlaylistLevelType.SUBTITLE;
  3215. default:
  3216. return PlaylistLevelType.MAIN;
  3217. }
  3218. }
  3219. function getResponseUrl(response, context) {
  3220. let url = response.url;
  3221. // responseURL not supported on some browsers (it is used to detect URL redirection)
  3222. // data-uri mode also not supported (but no need to detect redirection)
  3223. if (url === undefined || url.indexOf('data:') === 0) {
  3224. // fallback to initial URL
  3225. url = context.url;
  3226. }
  3227. return url;
  3228. }
  3229. class PlaylistLoader {
  3230. constructor(hls) {
  3231. this.hls = void 0;
  3232. this.loaders = Object.create(null);
  3233. this.variableList = null;
  3234. this.hls = hls;
  3235. this.registerListeners();
  3236. }
  3237. startLoad(startPosition) {}
  3238. stopLoad() {
  3239. this.destroyInternalLoaders();
  3240. }
  3241. registerListeners() {
  3242. const {
  3243. hls
  3244. } = this;
  3245. hls.on(Events.MANIFEST_LOADING, this.onManifestLoading, this);
  3246. hls.on(Events.LEVEL_LOADING, this.onLevelLoading, this);
  3247. hls.on(Events.AUDIO_TRACK_LOADING, this.onAudioTrackLoading, this);
  3248. hls.on(Events.SUBTITLE_TRACK_LOADING, this.onSubtitleTrackLoading, this);
  3249. }
  3250. unregisterListeners() {
  3251. const {
  3252. hls
  3253. } = this;
  3254. hls.off(Events.MANIFEST_LOADING, this.onManifestLoading, this);
  3255. hls.off(Events.LEVEL_LOADING, this.onLevelLoading, this);
  3256. hls.off(Events.AUDIO_TRACK_LOADING, this.onAudioTrackLoading, this);
  3257. hls.off(Events.SUBTITLE_TRACK_LOADING, this.onSubtitleTrackLoading, this);
  3258. }
  3259. /**
  3260. * Returns defaults or configured loader-type overloads (pLoader and loader config params)
  3261. */
  3262. createInternalLoader(context) {
  3263. const config = this.hls.config;
  3264. const PLoader = config.pLoader;
  3265. const Loader = config.loader;
  3266. const InternalLoader = PLoader || Loader;
  3267. const loader = new InternalLoader(config);
  3268. this.loaders[context.type] = loader;
  3269. return loader;
  3270. }
  3271. getInternalLoader(context) {
  3272. return this.loaders[context.type];
  3273. }
  3274. resetInternalLoader(contextType) {
  3275. if (this.loaders[contextType]) {
  3276. delete this.loaders[contextType];
  3277. }
  3278. }
  3279. /**
  3280. * Call `destroy` on all internal loader instances mapped (one per context type)
  3281. */
  3282. destroyInternalLoaders() {
  3283. for (const contextType in this.loaders) {
  3284. const loader = this.loaders[contextType];
  3285. if (loader) {
  3286. loader.destroy();
  3287. }
  3288. this.resetInternalLoader(contextType);
  3289. }
  3290. }
  3291. destroy() {
  3292. this.variableList = null;
  3293. this.unregisterListeners();
  3294. this.destroyInternalLoaders();
  3295. }
  3296. onManifestLoading(event, data) {
  3297. const {
  3298. url
  3299. } = data;
  3300. this.variableList = null;
  3301. this.load({
  3302. id: null,
  3303. level: 0,
  3304. responseType: 'text',
  3305. type: PlaylistContextType.MANIFEST,
  3306. url,
  3307. deliveryDirectives: null
  3308. });
  3309. }
  3310. onLevelLoading(event, data) {
  3311. const {
  3312. id,
  3313. level,
  3314. pathwayId,
  3315. url,
  3316. deliveryDirectives
  3317. } = data;
  3318. this.load({
  3319. id,
  3320. level,
  3321. pathwayId,
  3322. responseType: 'text',
  3323. type: PlaylistContextType.LEVEL,
  3324. url,
  3325. deliveryDirectives
  3326. });
  3327. }
  3328. onAudioTrackLoading(event, data) {
  3329. const {
  3330. id,
  3331. groupId,
  3332. url,
  3333. deliveryDirectives
  3334. } = data;
  3335. this.load({
  3336. id,
  3337. groupId,
  3338. level: null,
  3339. responseType: 'text',
  3340. type: PlaylistContextType.AUDIO_TRACK,
  3341. url,
  3342. deliveryDirectives
  3343. });
  3344. }
  3345. onSubtitleTrackLoading(event, data) {
  3346. const {
  3347. id,
  3348. groupId,
  3349. url,
  3350. deliveryDirectives
  3351. } = data;
  3352. this.load({
  3353. id,
  3354. groupId,
  3355. level: null,
  3356. responseType: 'text',
  3357. type: PlaylistContextType.SUBTITLE_TRACK,
  3358. url,
  3359. deliveryDirectives
  3360. });
  3361. }
  3362. load(context) {
  3363. var _context$deliveryDire;
  3364. const config = this.hls.config;
  3365. // logger.debug(`[playlist-loader]: Loading playlist of type ${context.type}, level: ${context.level}, id: ${context.id}`);
  3366. // Check if a loader for this context already exists
  3367. let loader = this.getInternalLoader(context);
  3368. if (loader) {
  3369. const loaderContext = loader.context;
  3370. if (loaderContext && loaderContext.url === context.url && loaderContext.level === context.level) {
  3371. // same URL can't overlap
  3372. logger.trace('[playlist-loader]: playlist request ongoing');
  3373. return;
  3374. }
  3375. logger.log(`[playlist-loader]: aborting previous loader for type: ${context.type}`);
  3376. loader.abort();
  3377. }
  3378. // apply different configs for retries depending on
  3379. // context (manifest, level, audio/subs playlist)
  3380. let loadPolicy;
  3381. if (context.type === PlaylistContextType.MANIFEST) {
  3382. loadPolicy = config.manifestLoadPolicy.default;
  3383. } else {
  3384. loadPolicy = _extends({}, config.playlistLoadPolicy.default, {
  3385. timeoutRetry: null,
  3386. errorRetry: null
  3387. });
  3388. }
  3389. loader = this.createInternalLoader(context);
  3390. // Override level/track timeout for LL-HLS requests
  3391. // (the default of 10000ms is counter productive to blocking playlist reload requests)
  3392. if (isFiniteNumber((_context$deliveryDire = context.deliveryDirectives) == null ? void 0 : _context$deliveryDire.part)) {
  3393. let levelDetails;
  3394. if (context.type === PlaylistContextType.LEVEL && context.level !== null) {
  3395. levelDetails = this.hls.levels[context.level].details;
  3396. } else if (context.type === PlaylistContextType.AUDIO_TRACK && context.id !== null) {
  3397. levelDetails = this.hls.audioTracks[context.id].details;
  3398. } else if (context.type === PlaylistContextType.SUBTITLE_TRACK && context.id !== null) {
  3399. levelDetails = this.hls.subtitleTracks[context.id].details;
  3400. }
  3401. if (levelDetails) {
  3402. const partTarget = levelDetails.partTarget;
  3403. const targetDuration = levelDetails.targetduration;
  3404. if (partTarget && targetDuration) {
  3405. const maxLowLatencyPlaylistRefresh = Math.max(partTarget * 3, targetDuration * 0.8) * 1000;
  3406. loadPolicy = _extends({}, loadPolicy, {
  3407. maxTimeToFirstByteMs: Math.min(maxLowLatencyPlaylistRefresh, loadPolicy.maxTimeToFirstByteMs),
  3408. maxLoadTimeMs: Math.min(maxLowLatencyPlaylistRefresh, loadPolicy.maxTimeToFirstByteMs)
  3409. });
  3410. }
  3411. }
  3412. }
  3413. const legacyRetryCompatibility = loadPolicy.errorRetry || loadPolicy.timeoutRetry || {};
  3414. const loaderConfig = {
  3415. loadPolicy,
  3416. timeout: loadPolicy.maxLoadTimeMs,
  3417. maxRetry: legacyRetryCompatibility.maxNumRetry || 0,
  3418. retryDelay: legacyRetryCompatibility.retryDelayMs || 0,
  3419. maxRetryDelay: legacyRetryCompatibility.maxRetryDelayMs || 0
  3420. };
  3421. const loaderCallbacks = {
  3422. onSuccess: (response, stats, context, networkDetails) => {
  3423. const loader = this.getInternalLoader(context);
  3424. this.resetInternalLoader(context.type);
  3425. const string = response.data;
  3426. // Validate if it is an M3U8 at all
  3427. if (string.indexOf('#EXTM3U') !== 0) {
  3428. this.handleManifestParsingError(response, context, new Error('no EXTM3U delimiter'), networkDetails || null, stats);
  3429. return;
  3430. }
  3431. stats.parsing.start = performance.now();
  3432. if (M3U8Parser.isMediaPlaylist(string)) {
  3433. this.handleTrackOrLevelPlaylist(response, stats, context, networkDetails || null, loader);
  3434. } else {
  3435. this.handleMasterPlaylist(response, stats, context, networkDetails);
  3436. }
  3437. },
  3438. onError: (response, context, networkDetails, stats) => {
  3439. this.handleNetworkError(context, networkDetails, false, response, stats);
  3440. },
  3441. onTimeout: (stats, context, networkDetails) => {
  3442. this.handleNetworkError(context, networkDetails, true, undefined, stats);
  3443. }
  3444. };
  3445. // logger.debug(`[playlist-loader]: Calling internal loader delegate for URL: ${context.url}`);
  3446. loader.load(context, loaderConfig, loaderCallbacks);
  3447. }
  3448. handleMasterPlaylist(response, stats, context, networkDetails) {
  3449. const hls = this.hls;
  3450. const string = response.data;
  3451. const url = getResponseUrl(response, context);
  3452. const parsedResult = M3U8Parser.parseMasterPlaylist(string, url);
  3453. if (parsedResult.playlistParsingError) {
  3454. this.handleManifestParsingError(response, context, parsedResult.playlistParsingError, networkDetails, stats);
  3455. return;
  3456. }
  3457. const {
  3458. contentSteering,
  3459. levels,
  3460. sessionData,
  3461. sessionKeys,
  3462. startTimeOffset,
  3463. variableList
  3464. } = parsedResult;
  3465. this.variableList = variableList;
  3466. const {
  3467. AUDIO: audioTracks = [],
  3468. SUBTITLES: subtitles,
  3469. 'CLOSED-CAPTIONS': captions
  3470. } = M3U8Parser.parseMasterPlaylistMedia(string, url, parsedResult);
  3471. if (audioTracks.length) {
  3472. // check if we have found an audio track embedded in main playlist (audio track without URI attribute)
  3473. const embeddedAudioFound = audioTracks.some(audioTrack => !audioTrack.url);
  3474. // if no embedded audio track defined, but audio codec signaled in quality level,
  3475. // we need to signal this main audio track this could happen with playlists with
  3476. // alt audio rendition in which quality levels (main)
  3477. // contains both audio+video. but with mixed audio track not signaled
  3478. if (!embeddedAudioFound && levels[0].audioCodec && !levels[0].attrs.AUDIO) {
  3479. logger.log('[playlist-loader]: audio codec signaled in quality level, but no embedded audio track signaled, create one');
  3480. audioTracks.unshift({
  3481. type: 'main',
  3482. name: 'main',
  3483. groupId: 'main',
  3484. default: false,
  3485. autoselect: false,
  3486. forced: false,
  3487. id: -1,
  3488. attrs: new AttrList({}),
  3489. bitrate: 0,
  3490. url: ''
  3491. });
  3492. }
  3493. }
  3494. hls.trigger(Events.MANIFEST_LOADED, {
  3495. levels,
  3496. audioTracks,
  3497. subtitles,
  3498. captions,
  3499. contentSteering,
  3500. url,
  3501. stats,
  3502. networkDetails,
  3503. sessionData,
  3504. sessionKeys,
  3505. startTimeOffset,
  3506. variableList
  3507. });
  3508. }
  3509. handleTrackOrLevelPlaylist(response, stats, context, networkDetails, loader) {
  3510. const hls = this.hls;
  3511. const {
  3512. id,
  3513. level,
  3514. type
  3515. } = context;
  3516. const url = getResponseUrl(response, context);
  3517. const levelUrlId = 0;
  3518. const levelId = isFiniteNumber(level) ? level : isFiniteNumber(id) ? id : 0;
  3519. const levelType = mapContextToLevelType(context);
  3520. const levelDetails = M3U8Parser.parseLevelPlaylist(response.data, url, levelId, levelType, levelUrlId, this.variableList);
  3521. // We have done our first request (Manifest-type) and receive
  3522. // not a master playlist but a chunk-list (track/level)
  3523. // We fire the manifest-loaded event anyway with the parsed level-details
  3524. // by creating a single-level structure for it.
  3525. if (type === PlaylistContextType.MANIFEST) {
  3526. const singleLevel = {
  3527. attrs: new AttrList({}),
  3528. bitrate: 0,
  3529. details: levelDetails,
  3530. name: '',
  3531. url
  3532. };
  3533. hls.trigger(Events.MANIFEST_LOADED, {
  3534. levels: [singleLevel],
  3535. audioTracks: [],
  3536. url,
  3537. stats,
  3538. networkDetails,
  3539. sessionData: null,
  3540. sessionKeys: null,
  3541. contentSteering: null,
  3542. startTimeOffset: null,
  3543. variableList: null
  3544. });
  3545. }
  3546. // save parsing time
  3547. stats.parsing.end = performance.now();
  3548. // extend the context with the new levelDetails property
  3549. context.levelDetails = levelDetails;
  3550. this.handlePlaylistLoaded(levelDetails, response, stats, context, networkDetails, loader);
  3551. }
  3552. handleManifestParsingError(response, context, error, networkDetails, stats) {
  3553. this.hls.trigger(Events.ERROR, {
  3554. type: ErrorTypes.NETWORK_ERROR,
  3555. details: ErrorDetails.MANIFEST_PARSING_ERROR,
  3556. fatal: context.type === PlaylistContextType.MANIFEST,
  3557. url: response.url,
  3558. err: error,
  3559. error,
  3560. reason: error.message,
  3561. response,
  3562. context,
  3563. networkDetails,
  3564. stats
  3565. });
  3566. }
  3567. handleNetworkError(context, networkDetails, timeout = false, response, stats) {
  3568. let message = `A network ${timeout ? 'timeout' : 'error' + (response ? ' (status ' + response.code + ')' : '')} occurred while loading ${context.type}`;
  3569. if (context.type === PlaylistContextType.LEVEL) {
  3570. message += `: ${context.level} id: ${context.id}`;
  3571. } else if (context.type === PlaylistContextType.AUDIO_TRACK || context.type === PlaylistContextType.SUBTITLE_TRACK) {
  3572. message += ` id: ${context.id} group-id: "${context.groupId}"`;
  3573. }
  3574. const error = new Error(message);
  3575. logger.warn(`[playlist-loader]: ${message}`);
  3576. let details = ErrorDetails.UNKNOWN;
  3577. let fatal = false;
  3578. const loader = this.getInternalLoader(context);
  3579. switch (context.type) {
  3580. case PlaylistContextType.MANIFEST:
  3581. details = timeout ? ErrorDetails.MANIFEST_LOAD_TIMEOUT : ErrorDetails.MANIFEST_LOAD_ERROR;
  3582. fatal = true;
  3583. break;
  3584. case PlaylistContextType.LEVEL:
  3585. details = timeout ? ErrorDetails.LEVEL_LOAD_TIMEOUT : ErrorDetails.LEVEL_LOAD_ERROR;
  3586. fatal = false;
  3587. break;
  3588. case PlaylistContextType.AUDIO_TRACK:
  3589. details = timeout ? ErrorDetails.AUDIO_TRACK_LOAD_TIMEOUT : ErrorDetails.AUDIO_TRACK_LOAD_ERROR;
  3590. fatal = false;
  3591. break;
  3592. case PlaylistContextType.SUBTITLE_TRACK:
  3593. details = timeout ? ErrorDetails.SUBTITLE_TRACK_LOAD_TIMEOUT : ErrorDetails.SUBTITLE_LOAD_ERROR;
  3594. fatal = false;
  3595. break;
  3596. }
  3597. if (loader) {
  3598. this.resetInternalLoader(context.type);
  3599. }
  3600. const errorData = {
  3601. type: ErrorTypes.NETWORK_ERROR,
  3602. details,
  3603. fatal,
  3604. url: context.url,
  3605. loader,
  3606. context,
  3607. error,
  3608. networkDetails,
  3609. stats
  3610. };
  3611. if (response) {
  3612. const url = (networkDetails == null ? void 0 : networkDetails.url) || context.url;
  3613. errorData.response = _objectSpread2({
  3614. url,
  3615. data: undefined
  3616. }, response);
  3617. }
  3618. this.hls.trigger(Events.ERROR, errorData);
  3619. }
  3620. handlePlaylistLoaded(levelDetails, response, stats, context, networkDetails, loader) {
  3621. const hls = this.hls;
  3622. const {
  3623. type,
  3624. level,
  3625. id,
  3626. groupId,
  3627. deliveryDirectives
  3628. } = context;
  3629. const url = getResponseUrl(response, context);
  3630. const parent = mapContextToLevelType(context);
  3631. const levelIndex = typeof context.level === 'number' && parent === PlaylistLevelType.MAIN ? level : undefined;
  3632. if (!levelDetails.fragments.length) {
  3633. const _error = new Error('No Segments found in Playlist');
  3634. hls.trigger(Events.ERROR, {
  3635. type: ErrorTypes.NETWORK_ERROR,
  3636. details: ErrorDetails.LEVEL_EMPTY_ERROR,
  3637. fatal: false,
  3638. url,
  3639. error: _error,
  3640. reason: _error.message,
  3641. response,
  3642. context,
  3643. level: levelIndex,
  3644. parent,
  3645. networkDetails,
  3646. stats
  3647. });
  3648. return;
  3649. }
  3650. if (!levelDetails.targetduration) {
  3651. levelDetails.playlistParsingError = new Error('Missing Target Duration');
  3652. }
  3653. const error = levelDetails.playlistParsingError;
  3654. if (error) {
  3655. hls.trigger(Events.ERROR, {
  3656. type: ErrorTypes.NETWORK_ERROR,
  3657. details: ErrorDetails.LEVEL_PARSING_ERROR,
  3658. fatal: false,
  3659. url,
  3660. error,
  3661. reason: error.message,
  3662. response,
  3663. context,
  3664. level: levelIndex,
  3665. parent,
  3666. networkDetails,
  3667. stats
  3668. });
  3669. return;
  3670. }
  3671. if (levelDetails.live && loader) {
  3672. if (loader.getCacheAge) {
  3673. levelDetails.ageHeader = loader.getCacheAge() || 0;
  3674. }
  3675. if (!loader.getCacheAge || isNaN(levelDetails.ageHeader)) {
  3676. levelDetails.ageHeader = 0;
  3677. }
  3678. }
  3679. switch (type) {
  3680. case PlaylistContextType.MANIFEST:
  3681. case PlaylistContextType.LEVEL:
  3682. hls.trigger(Events.LEVEL_LOADED, {
  3683. details: levelDetails,
  3684. level: levelIndex || 0,
  3685. id: id || 0,
  3686. stats,
  3687. networkDetails,
  3688. deliveryDirectives
  3689. });
  3690. break;
  3691. case PlaylistContextType.AUDIO_TRACK:
  3692. hls.trigger(Events.AUDIO_TRACK_LOADED, {
  3693. details: levelDetails,
  3694. id: id || 0,
  3695. groupId: groupId || '',
  3696. stats,
  3697. networkDetails,
  3698. deliveryDirectives
  3699. });
  3700. break;
  3701. case PlaylistContextType.SUBTITLE_TRACK:
  3702. hls.trigger(Events.SUBTITLE_TRACK_LOADED, {
  3703. details: levelDetails,
  3704. id: id || 0,
  3705. groupId: groupId || '',
  3706. stats,
  3707. networkDetails,
  3708. deliveryDirectives
  3709. });
  3710. break;
  3711. }
  3712. }
  3713. }
  3714. function sendAddTrackEvent(track, videoEl) {
  3715. let event;
  3716. try {
  3717. event = new Event('addtrack');
  3718. } catch (err) {
  3719. // for IE11
  3720. event = document.createEvent('Event');
  3721. event.initEvent('addtrack', false, false);
  3722. }
  3723. event.track = track;
  3724. videoEl.dispatchEvent(event);
  3725. }
  3726. function clearCurrentCues(track) {
  3727. // When track.mode is disabled, track.cues will be null.
  3728. // To guarantee the removal of cues, we need to temporarily
  3729. // change the mode to hidden
  3730. const mode = track.mode;
  3731. if (mode === 'disabled') {
  3732. track.mode = 'hidden';
  3733. }
  3734. if (track.cues) {
  3735. for (let i = track.cues.length; i--;) {
  3736. track.removeCue(track.cues[i]);
  3737. }
  3738. }
  3739. if (mode === 'disabled') {
  3740. track.mode = mode;
  3741. }
  3742. }
  3743. function removeCuesInRange(track, start, end, predicate) {
  3744. const mode = track.mode;
  3745. if (mode === 'disabled') {
  3746. track.mode = 'hidden';
  3747. }
  3748. if (track.cues && track.cues.length > 0) {
  3749. const cues = getCuesInRange(track.cues, start, end);
  3750. for (let i = 0; i < cues.length; i++) {
  3751. if (!predicate || predicate(cues[i])) {
  3752. track.removeCue(cues[i]);
  3753. }
  3754. }
  3755. }
  3756. if (mode === 'disabled') {
  3757. track.mode = mode;
  3758. }
  3759. }
  3760. // Find first cue starting after given time.
  3761. // Modified version of binary search O(log(n)).
  3762. function getFirstCueIndexAfterTime(cues, time) {
  3763. // If first cue starts after time, start there
  3764. if (time < cues[0].startTime) {
  3765. return 0;
  3766. }
  3767. // If the last cue ends before time there is no overlap
  3768. const len = cues.length - 1;
  3769. if (time > cues[len].endTime) {
  3770. return -1;
  3771. }
  3772. let left = 0;
  3773. let right = len;
  3774. while (left <= right) {
  3775. const mid = Math.floor((right + left) / 2);
  3776. if (time < cues[mid].startTime) {
  3777. right = mid - 1;
  3778. } else if (time > cues[mid].startTime && left < len) {
  3779. left = mid + 1;
  3780. } else {
  3781. // If it's not lower or higher, it must be equal.
  3782. return mid;
  3783. }
  3784. }
  3785. // At this point, left and right have swapped.
  3786. // No direct match was found, left or right element must be the closest. Check which one has the smallest diff.
  3787. return cues[left].startTime - time < time - cues[right].startTime ? left : right;
  3788. }
  3789. function getCuesInRange(cues, start, end) {
  3790. const cuesFound = [];
  3791. const firstCueInRange = getFirstCueIndexAfterTime(cues, start);
  3792. if (firstCueInRange > -1) {
  3793. for (let i = firstCueInRange, len = cues.length; i < len; i++) {
  3794. const cue = cues[i];
  3795. if (cue.startTime >= start && cue.endTime <= end) {
  3796. cuesFound.push(cue);
  3797. } else if (cue.startTime > end) {
  3798. return cuesFound;
  3799. }
  3800. }
  3801. }
  3802. return cuesFound;
  3803. }
  3804. var MetadataSchema = {
  3805. audioId3: "org.id3",
  3806. dateRange: "com.apple.quicktime.HLS",
  3807. emsg: "https://aomedia.org/emsg/ID3"
  3808. };
  3809. const MIN_CUE_DURATION = 0.25;
  3810. function getCueClass() {
  3811. if (typeof self === 'undefined') return undefined;
  3812. return self.VTTCue || self.TextTrackCue;
  3813. }
  3814. function createCueWithDataFields(Cue, startTime, endTime, data, type) {
  3815. let cue = new Cue(startTime, endTime, '');
  3816. try {
  3817. cue.value = data;
  3818. if (type) {
  3819. cue.type = type;
  3820. }
  3821. } catch (e) {
  3822. cue = new Cue(startTime, endTime, JSON.stringify(type ? _objectSpread2({
  3823. type
  3824. }, data) : data));
  3825. }
  3826. return cue;
  3827. }
  3828. // VTTCue latest draft allows an infinite duration, fallback
  3829. // to MAX_VALUE if necessary
  3830. const MAX_CUE_ENDTIME = (() => {
  3831. const Cue = getCueClass();
  3832. try {
  3833. Cue && new Cue(0, Number.POSITIVE_INFINITY, '');
  3834. } catch (e) {
  3835. return Number.MAX_VALUE;
  3836. }
  3837. return Number.POSITIVE_INFINITY;
  3838. })();
  3839. function dateRangeDateToTimelineSeconds(date, offset) {
  3840. return date.getTime() / 1000 - offset;
  3841. }
  3842. function hexToArrayBuffer(str) {
  3843. return Uint8Array.from(str.replace(/^0x/, '').replace(/([\da-fA-F]{2}) ?/g, '0x$1 ').replace(/ +$/, '').split(' ')).buffer;
  3844. }
  3845. class ID3TrackController {
  3846. constructor(hls) {
  3847. this.hls = void 0;
  3848. this.id3Track = null;
  3849. this.media = null;
  3850. this.dateRangeCuesAppended = {};
  3851. this.hls = hls;
  3852. this._registerListeners();
  3853. }
  3854. destroy() {
  3855. this._unregisterListeners();
  3856. this.id3Track = null;
  3857. this.media = null;
  3858. this.dateRangeCuesAppended = {};
  3859. // @ts-ignore
  3860. this.hls = null;
  3861. }
  3862. _registerListeners() {
  3863. const {
  3864. hls
  3865. } = this;
  3866. hls.on(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
  3867. hls.on(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
  3868. hls.on(Events.MANIFEST_LOADING, this.onManifestLoading, this);
  3869. hls.on(Events.FRAG_PARSING_METADATA, this.onFragParsingMetadata, this);
  3870. hls.on(Events.BUFFER_FLUSHING, this.onBufferFlushing, this);
  3871. hls.on(Events.LEVEL_UPDATED, this.onLevelUpdated, this);
  3872. }
  3873. _unregisterListeners() {
  3874. const {
  3875. hls
  3876. } = this;
  3877. hls.off(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
  3878. hls.off(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
  3879. hls.off(Events.MANIFEST_LOADING, this.onManifestLoading, this);
  3880. hls.off(Events.FRAG_PARSING_METADATA, this.onFragParsingMetadata, this);
  3881. hls.off(Events.BUFFER_FLUSHING, this.onBufferFlushing, this);
  3882. hls.off(Events.LEVEL_UPDATED, this.onLevelUpdated, this);
  3883. }
  3884. // Add ID3 metatadata text track.
  3885. onMediaAttached(event, data) {
  3886. this.media = data.media;
  3887. }
  3888. onMediaDetaching() {
  3889. if (!this.id3Track) {
  3890. return;
  3891. }
  3892. clearCurrentCues(this.id3Track);
  3893. this.id3Track = null;
  3894. this.media = null;
  3895. this.dateRangeCuesAppended = {};
  3896. }
  3897. onManifestLoading() {
  3898. this.dateRangeCuesAppended = {};
  3899. }
  3900. createTrack(media) {
  3901. const track = this.getID3Track(media.textTracks);
  3902. track.mode = 'hidden';
  3903. return track;
  3904. }
  3905. getID3Track(textTracks) {
  3906. if (!this.media) {
  3907. return;
  3908. }
  3909. for (let i = 0; i < textTracks.length; i++) {
  3910. const textTrack = textTracks[i];
  3911. if (textTrack.kind === 'metadata' && textTrack.label === 'id3') {
  3912. // send 'addtrack' when reusing the textTrack for metadata,
  3913. // same as what we do for captions
  3914. sendAddTrackEvent(textTrack, this.media);
  3915. return textTrack;
  3916. }
  3917. }
  3918. return this.media.addTextTrack('metadata', 'id3');
  3919. }
  3920. onFragParsingMetadata(event, data) {
  3921. if (!this.media) {
  3922. return;
  3923. }
  3924. const {
  3925. hls: {
  3926. config: {
  3927. enableEmsgMetadataCues,
  3928. enableID3MetadataCues
  3929. }
  3930. }
  3931. } = this;
  3932. if (!enableEmsgMetadataCues && !enableID3MetadataCues) {
  3933. return;
  3934. }
  3935. const {
  3936. samples
  3937. } = data;
  3938. // create track dynamically
  3939. if (!this.id3Track) {
  3940. this.id3Track = this.createTrack(this.media);
  3941. }
  3942. const Cue = getCueClass();
  3943. if (!Cue) {
  3944. return;
  3945. }
  3946. for (let i = 0; i < samples.length; i++) {
  3947. const type = samples[i].type;
  3948. if (type === MetadataSchema.emsg && !enableEmsgMetadataCues || !enableID3MetadataCues) {
  3949. continue;
  3950. }
  3951. const frames = getID3Frames(samples[i].data);
  3952. if (frames) {
  3953. const startTime = samples[i].pts;
  3954. let endTime = startTime + samples[i].duration;
  3955. if (endTime > MAX_CUE_ENDTIME) {
  3956. endTime = MAX_CUE_ENDTIME;
  3957. }
  3958. const timeDiff = endTime - startTime;
  3959. if (timeDiff <= 0) {
  3960. endTime = startTime + MIN_CUE_DURATION;
  3961. }
  3962. for (let j = 0; j < frames.length; j++) {
  3963. const frame = frames[j];
  3964. // Safari doesn't put the timestamp frame in the TextTrack
  3965. if (!isTimeStampFrame(frame)) {
  3966. // add a bounds to any unbounded cues
  3967. this.updateId3CueEnds(startTime, type);
  3968. const cue = createCueWithDataFields(Cue, startTime, endTime, frame, type);
  3969. if (cue) {
  3970. this.id3Track.addCue(cue);
  3971. }
  3972. }
  3973. }
  3974. }
  3975. }
  3976. }
  3977. updateId3CueEnds(startTime, type) {
  3978. var _this$id3Track;
  3979. const cues = (_this$id3Track = this.id3Track) == null ? void 0 : _this$id3Track.cues;
  3980. if (cues) {
  3981. for (let i = cues.length; i--;) {
  3982. const cue = cues[i];
  3983. if (cue.type === type && cue.startTime < startTime && cue.endTime === MAX_CUE_ENDTIME) {
  3984. cue.endTime = startTime;
  3985. }
  3986. }
  3987. }
  3988. }
  3989. onBufferFlushing(event, {
  3990. startOffset,
  3991. endOffset,
  3992. type
  3993. }) {
  3994. const {
  3995. id3Track,
  3996. hls
  3997. } = this;
  3998. if (!hls) {
  3999. return;
  4000. }
  4001. const {
  4002. config: {
  4003. enableEmsgMetadataCues,
  4004. enableID3MetadataCues
  4005. }
  4006. } = hls;
  4007. if (id3Track && (enableEmsgMetadataCues || enableID3MetadataCues)) {
  4008. let predicate;
  4009. if (type === 'audio') {
  4010. predicate = cue => cue.type === MetadataSchema.audioId3 && enableID3MetadataCues;
  4011. } else if (type === 'video') {
  4012. predicate = cue => cue.type === MetadataSchema.emsg && enableEmsgMetadataCues;
  4013. } else {
  4014. predicate = cue => cue.type === MetadataSchema.audioId3 && enableID3MetadataCues || cue.type === MetadataSchema.emsg && enableEmsgMetadataCues;
  4015. }
  4016. removeCuesInRange(id3Track, startOffset, endOffset, predicate);
  4017. }
  4018. }
  4019. onLevelUpdated(event, {
  4020. details
  4021. }) {
  4022. if (!this.media || !details.hasProgramDateTime || !this.hls.config.enableDateRangeMetadataCues) {
  4023. return;
  4024. }
  4025. const {
  4026. dateRangeCuesAppended,
  4027. id3Track
  4028. } = this;
  4029. const {
  4030. dateRanges
  4031. } = details;
  4032. const ids = Object.keys(dateRanges);
  4033. // Remove cues from track not found in details.dateRanges
  4034. if (id3Track) {
  4035. const idsToRemove = Object.keys(dateRangeCuesAppended).filter(id => !ids.includes(id));
  4036. for (let i = idsToRemove.length; i--;) {
  4037. const id = idsToRemove[i];
  4038. Object.keys(dateRangeCuesAppended[id].cues).forEach(key => {
  4039. id3Track.removeCue(dateRangeCuesAppended[id].cues[key]);
  4040. });
  4041. delete dateRangeCuesAppended[id];
  4042. }
  4043. }
  4044. // Exit if the playlist does not have Date Ranges or does not have Program Date Time
  4045. const lastFragment = details.fragments[details.fragments.length - 1];
  4046. if (ids.length === 0 || !isFiniteNumber(lastFragment == null ? void 0 : lastFragment.programDateTime)) {
  4047. return;
  4048. }
  4049. if (!this.id3Track) {
  4050. this.id3Track = this.createTrack(this.media);
  4051. }
  4052. const dateTimeOffset = lastFragment.programDateTime / 1000 - lastFragment.start;
  4053. const Cue = getCueClass();
  4054. for (let i = 0; i < ids.length; i++) {
  4055. const id = ids[i];
  4056. const dateRange = dateRanges[id];
  4057. const startTime = dateRangeDateToTimelineSeconds(dateRange.startDate, dateTimeOffset);
  4058. // Process DateRanges to determine end-time (known DURATION, END-DATE, or END-ON-NEXT)
  4059. const appendedDateRangeCues = dateRangeCuesAppended[id];
  4060. const cues = (appendedDateRangeCues == null ? void 0 : appendedDateRangeCues.cues) || {};
  4061. let durationKnown = (appendedDateRangeCues == null ? void 0 : appendedDateRangeCues.durationKnown) || false;
  4062. let endTime = MAX_CUE_ENDTIME;
  4063. const endDate = dateRange.endDate;
  4064. if (endDate) {
  4065. endTime = dateRangeDateToTimelineSeconds(endDate, dateTimeOffset);
  4066. durationKnown = true;
  4067. } else if (dateRange.endOnNext && !durationKnown) {
  4068. const nextDateRangeWithSameClass = ids.reduce((candidateDateRange, id) => {
  4069. if (id !== dateRange.id) {
  4070. const otherDateRange = dateRanges[id];
  4071. if (otherDateRange.class === dateRange.class && otherDateRange.startDate > dateRange.startDate && (!candidateDateRange || dateRange.startDate < candidateDateRange.startDate)) {
  4072. return otherDateRange;
  4073. }
  4074. }
  4075. return candidateDateRange;
  4076. }, null);
  4077. if (nextDateRangeWithSameClass) {
  4078. endTime = dateRangeDateToTimelineSeconds(nextDateRangeWithSameClass.startDate, dateTimeOffset);
  4079. durationKnown = true;
  4080. }
  4081. }
  4082. // Create TextTrack Cues for each MetadataGroup Item (select DateRange attribute)
  4083. // This is to emulate Safari HLS playback handling of DateRange tags
  4084. const attributes = Object.keys(dateRange.attr);
  4085. for (let j = 0; j < attributes.length; j++) {
  4086. const key = attributes[j];
  4087. if (!isDateRangeCueAttribute(key)) {
  4088. continue;
  4089. }
  4090. const cue = cues[key];
  4091. if (cue) {
  4092. if (durationKnown && !appendedDateRangeCues.durationKnown) {
  4093. cue.endTime = endTime;
  4094. }
  4095. } else if (Cue) {
  4096. let data = dateRange.attr[key];
  4097. if (isSCTE35Attribute(key)) {
  4098. data = hexToArrayBuffer(data);
  4099. }
  4100. const _cue = createCueWithDataFields(Cue, startTime, endTime, {
  4101. key,
  4102. data
  4103. }, MetadataSchema.dateRange);
  4104. if (_cue) {
  4105. _cue.id = id;
  4106. this.id3Track.addCue(_cue);
  4107. cues[key] = _cue;
  4108. }
  4109. }
  4110. }
  4111. // Keep track of processed DateRanges by ID for updating cues with new DateRange tag attributes
  4112. dateRangeCuesAppended[id] = {
  4113. cues,
  4114. dateRange,
  4115. durationKnown
  4116. };
  4117. }
  4118. }
  4119. }
  4120. class LatencyController {
  4121. constructor(hls) {
  4122. this.hls = void 0;
  4123. this.config = void 0;
  4124. this.media = null;
  4125. this.levelDetails = null;
  4126. this.currentTime = 0;
  4127. this.stallCount = 0;
  4128. this._latency = null;
  4129. this.timeupdateHandler = () => this.timeupdate();
  4130. this.hls = hls;
  4131. this.config = hls.config;
  4132. this.registerListeners();
  4133. }
  4134. get latency() {
  4135. return this._latency || 0;
  4136. }
  4137. get maxLatency() {
  4138. const {
  4139. config,
  4140. levelDetails
  4141. } = this;
  4142. if (config.liveMaxLatencyDuration !== undefined) {
  4143. return config.liveMaxLatencyDuration;
  4144. }
  4145. return levelDetails ? config.liveMaxLatencyDurationCount * levelDetails.targetduration : 0;
  4146. }
  4147. get targetLatency() {
  4148. const {
  4149. levelDetails
  4150. } = this;
  4151. if (levelDetails === null) {
  4152. return null;
  4153. }
  4154. const {
  4155. holdBack,
  4156. partHoldBack,
  4157. targetduration
  4158. } = levelDetails;
  4159. const {
  4160. liveSyncDuration,
  4161. liveSyncDurationCount,
  4162. lowLatencyMode
  4163. } = this.config;
  4164. const userConfig = this.hls.userConfig;
  4165. let targetLatency = lowLatencyMode ? partHoldBack || holdBack : holdBack;
  4166. if (userConfig.liveSyncDuration || userConfig.liveSyncDurationCount || targetLatency === 0) {
  4167. targetLatency = liveSyncDuration !== undefined ? liveSyncDuration : liveSyncDurationCount * targetduration;
  4168. }
  4169. const maxLiveSyncOnStallIncrease = targetduration;
  4170. const liveSyncOnStallIncrease = 1.0;
  4171. return targetLatency + Math.min(this.stallCount * liveSyncOnStallIncrease, maxLiveSyncOnStallIncrease);
  4172. }
  4173. get liveSyncPosition() {
  4174. const liveEdge = this.estimateLiveEdge();
  4175. const targetLatency = this.targetLatency;
  4176. const levelDetails = this.levelDetails;
  4177. if (liveEdge === null || targetLatency === null || levelDetails === null) {
  4178. return null;
  4179. }
  4180. const edge = levelDetails.edge;
  4181. const syncPosition = liveEdge - targetLatency - this.edgeStalled;
  4182. const min = edge - levelDetails.totalduration;
  4183. const max = edge - (this.config.lowLatencyMode && levelDetails.partTarget || levelDetails.targetduration);
  4184. return Math.min(Math.max(min, syncPosition), max);
  4185. }
  4186. get drift() {
  4187. const {
  4188. levelDetails
  4189. } = this;
  4190. if (levelDetails === null) {
  4191. return 1;
  4192. }
  4193. return levelDetails.drift;
  4194. }
  4195. get edgeStalled() {
  4196. const {
  4197. levelDetails
  4198. } = this;
  4199. if (levelDetails === null) {
  4200. return 0;
  4201. }
  4202. const maxLevelUpdateAge = (this.config.lowLatencyMode && levelDetails.partTarget || levelDetails.targetduration) * 3;
  4203. return Math.max(levelDetails.age - maxLevelUpdateAge, 0);
  4204. }
  4205. get forwardBufferLength() {
  4206. const {
  4207. media,
  4208. levelDetails
  4209. } = this;
  4210. if (!media || !levelDetails) {
  4211. return 0;
  4212. }
  4213. const bufferedRanges = media.buffered.length;
  4214. return (bufferedRanges ? media.buffered.end(bufferedRanges - 1) : levelDetails.edge) - this.currentTime;
  4215. }
  4216. destroy() {
  4217. this.unregisterListeners();
  4218. this.onMediaDetaching();
  4219. this.levelDetails = null;
  4220. // @ts-ignore
  4221. this.hls = this.timeupdateHandler = null;
  4222. }
  4223. registerListeners() {
  4224. this.hls.on(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
  4225. this.hls.on(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
  4226. this.hls.on(Events.MANIFEST_LOADING, this.onManifestLoading, this);
  4227. this.hls.on(Events.LEVEL_UPDATED, this.onLevelUpdated, this);
  4228. this.hls.on(Events.ERROR, this.onError, this);
  4229. }
  4230. unregisterListeners() {
  4231. this.hls.off(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
  4232. this.hls.off(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
  4233. this.hls.off(Events.MANIFEST_LOADING, this.onManifestLoading, this);
  4234. this.hls.off(Events.LEVEL_UPDATED, this.onLevelUpdated, this);
  4235. this.hls.off(Events.ERROR, this.onError, this);
  4236. }
  4237. onMediaAttached(event, data) {
  4238. this.media = data.media;
  4239. this.media.addEventListener('timeupdate', this.timeupdateHandler);
  4240. }
  4241. onMediaDetaching() {
  4242. if (this.media) {
  4243. this.media.removeEventListener('timeupdate', this.timeupdateHandler);
  4244. this.media = null;
  4245. }
  4246. }
  4247. onManifestLoading() {
  4248. this.levelDetails = null;
  4249. this._latency = null;
  4250. this.stallCount = 0;
  4251. }
  4252. onLevelUpdated(event, {
  4253. details
  4254. }) {
  4255. this.levelDetails = details;
  4256. if (details.advanced) {
  4257. this.timeupdate();
  4258. }
  4259. if (!details.live && this.media) {
  4260. this.media.removeEventListener('timeupdate', this.timeupdateHandler);
  4261. }
  4262. }
  4263. onError(event, data) {
  4264. var _this$levelDetails;
  4265. if (data.details !== ErrorDetails.BUFFER_STALLED_ERROR) {
  4266. return;
  4267. }
  4268. this.stallCount++;
  4269. if ((_this$levelDetails = this.levelDetails) != null && _this$levelDetails.live) {
  4270. logger.warn('[playback-rate-controller]: Stall detected, adjusting target latency');
  4271. }
  4272. }
  4273. timeupdate() {
  4274. const {
  4275. media,
  4276. levelDetails
  4277. } = this;
  4278. if (!media || !levelDetails) {
  4279. return;
  4280. }
  4281. this.currentTime = media.currentTime;
  4282. const latency = this.computeLatency();
  4283. if (latency === null) {
  4284. return;
  4285. }
  4286. this._latency = latency;
  4287. // Adapt playbackRate to meet target latency in low-latency mode
  4288. const {
  4289. lowLatencyMode,
  4290. maxLiveSyncPlaybackRate
  4291. } = this.config;
  4292. if (!lowLatencyMode || maxLiveSyncPlaybackRate === 1 || !levelDetails.live) {
  4293. return;
  4294. }
  4295. const targetLatency = this.targetLatency;
  4296. if (targetLatency === null) {
  4297. return;
  4298. }
  4299. const distanceFromTarget = latency - targetLatency;
  4300. // Only adjust playbackRate when within one target duration of targetLatency
  4301. // and more than one second from under-buffering.
  4302. // Playback further than one target duration from target can be considered DVR playback.
  4303. const liveMinLatencyDuration = Math.min(this.maxLatency, targetLatency + levelDetails.targetduration);
  4304. const inLiveRange = distanceFromTarget < liveMinLatencyDuration;
  4305. if (inLiveRange && distanceFromTarget > 0.05 && this.forwardBufferLength > 1) {
  4306. const max = Math.min(2, Math.max(1.0, maxLiveSyncPlaybackRate));
  4307. const rate = Math.round(2 / (1 + Math.exp(-0.75 * distanceFromTarget - this.edgeStalled)) * 20) / 20;
  4308. media.playbackRate = Math.min(max, Math.max(1, rate));
  4309. } else if (media.playbackRate !== 1 && media.playbackRate !== 0) {
  4310. media.playbackRate = 1;
  4311. }
  4312. }
  4313. estimateLiveEdge() {
  4314. const {
  4315. levelDetails
  4316. } = this;
  4317. if (levelDetails === null) {
  4318. return null;
  4319. }
  4320. return levelDetails.edge + levelDetails.age;
  4321. }
  4322. computeLatency() {
  4323. const liveEdge = this.estimateLiveEdge();
  4324. if (liveEdge === null) {
  4325. return null;
  4326. }
  4327. return liveEdge - this.currentTime;
  4328. }
  4329. }
  4330. const HdcpLevels = ['NONE', 'TYPE-0', 'TYPE-1', null];
  4331. function isHdcpLevel(value) {
  4332. return HdcpLevels.indexOf(value) > -1;
  4333. }
  4334. const VideoRangeValues = ['SDR', 'PQ', 'HLG'];
  4335. function isVideoRange(value) {
  4336. return !!value && VideoRangeValues.indexOf(value) > -1;
  4337. }
  4338. var HlsSkip = {
  4339. No: "",
  4340. Yes: "YES",
  4341. v2: "v2"
  4342. };
  4343. function getSkipValue(details) {
  4344. const {
  4345. canSkipUntil,
  4346. canSkipDateRanges,
  4347. age
  4348. } = details;
  4349. // A Client SHOULD NOT request a Playlist Delta Update unless it already
  4350. // has a version of the Playlist that is no older than one-half of the Skip Boundary.
  4351. // @see: https://datatracker.ietf.org/doc/html/draft-pantos-hls-rfc8216bis#section-6.3.7
  4352. const playlistRecentEnough = age < canSkipUntil / 2;
  4353. if (canSkipUntil && playlistRecentEnough) {
  4354. if (canSkipDateRanges) {
  4355. return HlsSkip.v2;
  4356. }
  4357. return HlsSkip.Yes;
  4358. }
  4359. return HlsSkip.No;
  4360. }
  4361. class HlsUrlParameters {
  4362. constructor(msn, part, skip) {
  4363. this.msn = void 0;
  4364. this.part = void 0;
  4365. this.skip = void 0;
  4366. this.msn = msn;
  4367. this.part = part;
  4368. this.skip = skip;
  4369. }
  4370. addDirectives(uri) {
  4371. const url = new self.URL(uri);
  4372. if (this.msn !== undefined) {
  4373. url.searchParams.set('_HLS_msn', this.msn.toString());
  4374. }
  4375. if (this.part !== undefined) {
  4376. url.searchParams.set('_HLS_part', this.part.toString());
  4377. }
  4378. if (this.skip) {
  4379. url.searchParams.set('_HLS_skip', this.skip);
  4380. }
  4381. return url.href;
  4382. }
  4383. }
  4384. class Level {
  4385. constructor(data) {
  4386. this._attrs = void 0;
  4387. this.audioCodec = void 0;
  4388. this.bitrate = void 0;
  4389. this.codecSet = void 0;
  4390. this.url = void 0;
  4391. this.frameRate = void 0;
  4392. this.height = void 0;
  4393. this.id = void 0;
  4394. this.name = void 0;
  4395. this.videoCodec = void 0;
  4396. this.width = void 0;
  4397. this.details = void 0;
  4398. this.fragmentError = 0;
  4399. this.loadError = 0;
  4400. this.loaded = void 0;
  4401. this.realBitrate = 0;
  4402. this.supportedPromise = void 0;
  4403. this.supportedResult = void 0;
  4404. this._avgBitrate = 0;
  4405. this._audioGroups = void 0;
  4406. this._subtitleGroups = void 0;
  4407. // Deprecated (retained for backwards compatibility)
  4408. this._urlId = 0;
  4409. this.url = [data.url];
  4410. this._attrs = [data.attrs];
  4411. this.bitrate = data.bitrate;
  4412. if (data.details) {
  4413. this.details = data.details;
  4414. }
  4415. this.id = data.id || 0;
  4416. this.name = data.name;
  4417. this.width = data.width || 0;
  4418. this.height = data.height || 0;
  4419. this.frameRate = data.attrs.optionalFloat('FRAME-RATE', 0);
  4420. this._avgBitrate = data.attrs.decimalInteger('AVERAGE-BANDWIDTH');
  4421. this.audioCodec = data.audioCodec;
  4422. this.videoCodec = data.videoCodec;
  4423. this.codecSet = [data.videoCodec, data.audioCodec].filter(c => !!c).map(s => s.substring(0, 4)).join(',');
  4424. this.addGroupId('audio', data.attrs.AUDIO);
  4425. this.addGroupId('text', data.attrs.SUBTITLES);
  4426. }
  4427. get maxBitrate() {
  4428. return Math.max(this.realBitrate, this.bitrate);
  4429. }
  4430. get averageBitrate() {
  4431. return this._avgBitrate || this.realBitrate || this.bitrate;
  4432. }
  4433. get attrs() {
  4434. return this._attrs[0];
  4435. }
  4436. get codecs() {
  4437. return this.attrs.CODECS || '';
  4438. }
  4439. get pathwayId() {
  4440. return this.attrs['PATHWAY-ID'] || '.';
  4441. }
  4442. get videoRange() {
  4443. return this.attrs['VIDEO-RANGE'] || 'SDR';
  4444. }
  4445. get score() {
  4446. return this.attrs.optionalFloat('SCORE', 0);
  4447. }
  4448. get uri() {
  4449. return this.url[0] || '';
  4450. }
  4451. hasAudioGroup(groupId) {
  4452. return hasGroup(this._audioGroups, groupId);
  4453. }
  4454. hasSubtitleGroup(groupId) {
  4455. return hasGroup(this._subtitleGroups, groupId);
  4456. }
  4457. get audioGroups() {
  4458. return this._audioGroups;
  4459. }
  4460. get subtitleGroups() {
  4461. return this._subtitleGroups;
  4462. }
  4463. addGroupId(type, groupId) {
  4464. if (!groupId) {
  4465. return;
  4466. }
  4467. if (type === 'audio') {
  4468. let audioGroups = this._audioGroups;
  4469. if (!audioGroups) {
  4470. audioGroups = this._audioGroups = [];
  4471. }
  4472. if (audioGroups.indexOf(groupId) === -1) {
  4473. audioGroups.push(groupId);
  4474. }
  4475. } else if (type === 'text') {
  4476. let subtitleGroups = this._subtitleGroups;
  4477. if (!subtitleGroups) {
  4478. subtitleGroups = this._subtitleGroups = [];
  4479. }
  4480. if (subtitleGroups.indexOf(groupId) === -1) {
  4481. subtitleGroups.push(groupId);
  4482. }
  4483. }
  4484. }
  4485. // Deprecated methods (retained for backwards compatibility)
  4486. get urlId() {
  4487. return 0;
  4488. }
  4489. set urlId(value) {}
  4490. get audioGroupIds() {
  4491. return this.audioGroups ? [this.audioGroupId] : undefined;
  4492. }
  4493. get textGroupIds() {
  4494. return this.subtitleGroups ? [this.textGroupId] : undefined;
  4495. }
  4496. get audioGroupId() {
  4497. var _this$audioGroups;
  4498. return (_this$audioGroups = this.audioGroups) == null ? void 0 : _this$audioGroups[0];
  4499. }
  4500. get textGroupId() {
  4501. var _this$subtitleGroups;
  4502. return (_this$subtitleGroups = this.subtitleGroups) == null ? void 0 : _this$subtitleGroups[0];
  4503. }
  4504. addFallback() {}
  4505. }
  4506. function hasGroup(groups, groupId) {
  4507. if (!groupId || !groups) {
  4508. return false;
  4509. }
  4510. return groups.indexOf(groupId) !== -1;
  4511. }
  4512. function updateFromToPTS(fragFrom, fragTo) {
  4513. const fragToPTS = fragTo.startPTS;
  4514. // if we know startPTS[toIdx]
  4515. if (isFiniteNumber(fragToPTS)) {
  4516. // update fragment duration.
  4517. // it helps to fix drifts between playlist reported duration and fragment real duration
  4518. let duration = 0;
  4519. let frag;
  4520. if (fragTo.sn > fragFrom.sn) {
  4521. duration = fragToPTS - fragFrom.start;
  4522. frag = fragFrom;
  4523. } else {
  4524. duration = fragFrom.start - fragToPTS;
  4525. frag = fragTo;
  4526. }
  4527. if (frag.duration !== duration) {
  4528. frag.duration = duration;
  4529. }
  4530. // we dont know startPTS[toIdx]
  4531. } else if (fragTo.sn > fragFrom.sn) {
  4532. const contiguous = fragFrom.cc === fragTo.cc;
  4533. // TODO: With part-loading end/durations we need to confirm the whole fragment is loaded before using (or setting) minEndPTS
  4534. if (contiguous && fragFrom.minEndPTS) {
  4535. fragTo.start = fragFrom.start + (fragFrom.minEndPTS - fragFrom.start);
  4536. } else {
  4537. fragTo.start = fragFrom.start + fragFrom.duration;
  4538. }
  4539. } else {
  4540. fragTo.start = Math.max(fragFrom.start - fragTo.duration, 0);
  4541. }
  4542. }
  4543. function updateFragPTSDTS(details, frag, startPTS, endPTS, startDTS, endDTS) {
  4544. const parsedMediaDuration = endPTS - startPTS;
  4545. if (parsedMediaDuration <= 0) {
  4546. logger.warn('Fragment should have a positive duration', frag);
  4547. endPTS = startPTS + frag.duration;
  4548. endDTS = startDTS + frag.duration;
  4549. }
  4550. let maxStartPTS = startPTS;
  4551. let minEndPTS = endPTS;
  4552. const fragStartPts = frag.startPTS;
  4553. const fragEndPts = frag.endPTS;
  4554. if (isFiniteNumber(fragStartPts)) {
  4555. // delta PTS between audio and video
  4556. const deltaPTS = Math.abs(fragStartPts - startPTS);
  4557. if (!isFiniteNumber(frag.deltaPTS)) {
  4558. frag.deltaPTS = deltaPTS;
  4559. } else {
  4560. frag.deltaPTS = Math.max(deltaPTS, frag.deltaPTS);
  4561. }
  4562. maxStartPTS = Math.max(startPTS, fragStartPts);
  4563. startPTS = Math.min(startPTS, fragStartPts);
  4564. startDTS = Math.min(startDTS, frag.startDTS);
  4565. minEndPTS = Math.min(endPTS, fragEndPts);
  4566. endPTS = Math.max(endPTS, fragEndPts);
  4567. endDTS = Math.max(endDTS, frag.endDTS);
  4568. }
  4569. const drift = startPTS - frag.start;
  4570. if (frag.start !== 0) {
  4571. frag.start = startPTS;
  4572. }
  4573. frag.duration = endPTS - frag.start;
  4574. frag.startPTS = startPTS;
  4575. frag.maxStartPTS = maxStartPTS;
  4576. frag.startDTS = startDTS;
  4577. frag.endPTS = endPTS;
  4578. frag.minEndPTS = minEndPTS;
  4579. frag.endDTS = endDTS;
  4580. const sn = frag.sn; // 'initSegment'
  4581. // exit if sn out of range
  4582. if (!details || sn < details.startSN || sn > details.endSN) {
  4583. return 0;
  4584. }
  4585. let i;
  4586. const fragIdx = sn - details.startSN;
  4587. const fragments = details.fragments;
  4588. // update frag reference in fragments array
  4589. // rationale is that fragments array might not contain this frag object.
  4590. // this will happen if playlist has been refreshed between frag loading and call to updateFragPTSDTS()
  4591. // if we don't update frag, we won't be able to propagate PTS info on the playlist
  4592. // resulting in invalid sliding computation
  4593. fragments[fragIdx] = frag;
  4594. // adjust fragment PTS/duration from seqnum-1 to frag 0
  4595. for (i = fragIdx; i > 0; i--) {
  4596. updateFromToPTS(fragments[i], fragments[i - 1]);
  4597. }
  4598. // adjust fragment PTS/duration from seqnum to last frag
  4599. for (i = fragIdx; i < fragments.length - 1; i++) {
  4600. updateFromToPTS(fragments[i], fragments[i + 1]);
  4601. }
  4602. if (details.fragmentHint) {
  4603. updateFromToPTS(fragments[fragments.length - 1], details.fragmentHint);
  4604. }
  4605. details.PTSKnown = details.alignedSliding = true;
  4606. return drift;
  4607. }
  4608. function mergeDetails(oldDetails, newDetails) {
  4609. // Track the last initSegment processed. Initialize it to the last one on the timeline.
  4610. let currentInitSegment = null;
  4611. const oldFragments = oldDetails.fragments;
  4612. for (let i = oldFragments.length - 1; i >= 0; i--) {
  4613. const oldInit = oldFragments[i].initSegment;
  4614. if (oldInit) {
  4615. currentInitSegment = oldInit;
  4616. break;
  4617. }
  4618. }
  4619. if (oldDetails.fragmentHint) {
  4620. // prevent PTS and duration from being adjusted on the next hint
  4621. delete oldDetails.fragmentHint.endPTS;
  4622. }
  4623. // check if old/new playlists have fragments in common
  4624. // loop through overlapping SN and update startPTS , cc, and duration if any found
  4625. let ccOffset = 0;
  4626. let PTSFrag;
  4627. mapFragmentIntersection(oldDetails, newDetails, (oldFrag, newFrag) => {
  4628. if (oldFrag.relurl) {
  4629. // Do not compare CC if the old fragment has no url. This is a level.fragmentHint used by LL-HLS parts.
  4630. // It maybe be off by 1 if it was created before any parts or discontinuity tags were appended to the end
  4631. // of the playlist.
  4632. ccOffset = oldFrag.cc - newFrag.cc;
  4633. }
  4634. if (isFiniteNumber(oldFrag.startPTS) && isFiniteNumber(oldFrag.endPTS)) {
  4635. newFrag.start = newFrag.startPTS = oldFrag.startPTS;
  4636. newFrag.startDTS = oldFrag.startDTS;
  4637. newFrag.maxStartPTS = oldFrag.maxStartPTS;
  4638. newFrag.endPTS = oldFrag.endPTS;
  4639. newFrag.endDTS = oldFrag.endDTS;
  4640. newFrag.minEndPTS = oldFrag.minEndPTS;
  4641. newFrag.duration = oldFrag.endPTS - oldFrag.startPTS;
  4642. if (newFrag.duration) {
  4643. PTSFrag = newFrag;
  4644. }
  4645. // PTS is known when any segment has startPTS and endPTS
  4646. newDetails.PTSKnown = newDetails.alignedSliding = true;
  4647. }
  4648. newFrag.elementaryStreams = oldFrag.elementaryStreams;
  4649. newFrag.loader = oldFrag.loader;
  4650. newFrag.stats = oldFrag.stats;
  4651. if (oldFrag.initSegment) {
  4652. newFrag.initSegment = oldFrag.initSegment;
  4653. currentInitSegment = oldFrag.initSegment;
  4654. }
  4655. });
  4656. if (currentInitSegment) {
  4657. const fragmentsToCheck = newDetails.fragmentHint ? newDetails.fragments.concat(newDetails.fragmentHint) : newDetails.fragments;
  4658. fragmentsToCheck.forEach(frag => {
  4659. var _currentInitSegment;
  4660. if (frag && (!frag.initSegment || frag.initSegment.relurl === ((_currentInitSegment = currentInitSegment) == null ? void 0 : _currentInitSegment.relurl))) {
  4661. frag.initSegment = currentInitSegment;
  4662. }
  4663. });
  4664. }
  4665. if (newDetails.skippedSegments) {
  4666. newDetails.deltaUpdateFailed = newDetails.fragments.some(frag => !frag);
  4667. if (newDetails.deltaUpdateFailed) {
  4668. logger.warn('[level-helper] Previous playlist missing segments skipped in delta playlist');
  4669. for (let i = newDetails.skippedSegments; i--;) {
  4670. newDetails.fragments.shift();
  4671. }
  4672. newDetails.startSN = newDetails.fragments[0].sn;
  4673. newDetails.startCC = newDetails.fragments[0].cc;
  4674. } else if (newDetails.canSkipDateRanges) {
  4675. newDetails.dateRanges = mergeDateRanges(oldDetails.dateRanges, newDetails.dateRanges, newDetails.recentlyRemovedDateranges);
  4676. }
  4677. }
  4678. const newFragments = newDetails.fragments;
  4679. if (ccOffset) {
  4680. logger.warn('discontinuity sliding from playlist, take drift into account');
  4681. for (let i = 0; i < newFragments.length; i++) {
  4682. newFragments[i].cc += ccOffset;
  4683. }
  4684. }
  4685. if (newDetails.skippedSegments) {
  4686. newDetails.startCC = newDetails.fragments[0].cc;
  4687. }
  4688. // Merge parts
  4689. mapPartIntersection(oldDetails.partList, newDetails.partList, (oldPart, newPart) => {
  4690. newPart.elementaryStreams = oldPart.elementaryStreams;
  4691. newPart.stats = oldPart.stats;
  4692. });
  4693. // if at least one fragment contains PTS info, recompute PTS information for all fragments
  4694. if (PTSFrag) {
  4695. updateFragPTSDTS(newDetails, PTSFrag, PTSFrag.startPTS, PTSFrag.endPTS, PTSFrag.startDTS, PTSFrag.endDTS);
  4696. } else {
  4697. // ensure that delta is within oldFragments range
  4698. // also adjust sliding in case delta is 0 (we could have old=[50-60] and new=old=[50-61])
  4699. // in that case we also need to adjust start offset of all fragments
  4700. adjustSliding(oldDetails, newDetails);
  4701. }
  4702. if (newFragments.length) {
  4703. newDetails.totalduration = newDetails.edge - newFragments[0].start;
  4704. }
  4705. newDetails.driftStartTime = oldDetails.driftStartTime;
  4706. newDetails.driftStart = oldDetails.driftStart;
  4707. const advancedDateTime = newDetails.advancedDateTime;
  4708. if (newDetails.advanced && advancedDateTime) {
  4709. const edge = newDetails.edge;
  4710. if (!newDetails.driftStart) {
  4711. newDetails.driftStartTime = advancedDateTime;
  4712. newDetails.driftStart = edge;
  4713. }
  4714. newDetails.driftEndTime = advancedDateTime;
  4715. newDetails.driftEnd = edge;
  4716. } else {
  4717. newDetails.driftEndTime = oldDetails.driftEndTime;
  4718. newDetails.driftEnd = oldDetails.driftEnd;
  4719. newDetails.advancedDateTime = oldDetails.advancedDateTime;
  4720. }
  4721. }
  4722. function mergeDateRanges(oldDateRanges, deltaDateRanges, recentlyRemovedDateranges) {
  4723. const dateRanges = _extends({}, oldDateRanges);
  4724. if (recentlyRemovedDateranges) {
  4725. recentlyRemovedDateranges.forEach(id => {
  4726. delete dateRanges[id];
  4727. });
  4728. }
  4729. Object.keys(deltaDateRanges).forEach(id => {
  4730. const dateRange = new DateRange(deltaDateRanges[id].attr, dateRanges[id]);
  4731. if (dateRange.isValid) {
  4732. dateRanges[id] = dateRange;
  4733. } else {
  4734. logger.warn(`Ignoring invalid Playlist Delta Update DATERANGE tag: "${JSON.stringify(deltaDateRanges[id].attr)}"`);
  4735. }
  4736. });
  4737. return dateRanges;
  4738. }
  4739. function mapPartIntersection(oldParts, newParts, intersectionFn) {
  4740. if (oldParts && newParts) {
  4741. let delta = 0;
  4742. for (let i = 0, len = oldParts.length; i <= len; i++) {
  4743. const oldPart = oldParts[i];
  4744. const newPart = newParts[i + delta];
  4745. if (oldPart && newPart && oldPart.index === newPart.index && oldPart.fragment.sn === newPart.fragment.sn) {
  4746. intersectionFn(oldPart, newPart);
  4747. } else {
  4748. delta--;
  4749. }
  4750. }
  4751. }
  4752. }
  4753. function mapFragmentIntersection(oldDetails, newDetails, intersectionFn) {
  4754. const skippedSegments = newDetails.skippedSegments;
  4755. const start = Math.max(oldDetails.startSN, newDetails.startSN) - newDetails.startSN;
  4756. const end = (oldDetails.fragmentHint ? 1 : 0) + (skippedSegments ? newDetails.endSN : Math.min(oldDetails.endSN, newDetails.endSN)) - newDetails.startSN;
  4757. const delta = newDetails.startSN - oldDetails.startSN;
  4758. const newFrags = newDetails.fragmentHint ? newDetails.fragments.concat(newDetails.fragmentHint) : newDetails.fragments;
  4759. const oldFrags = oldDetails.fragmentHint ? oldDetails.fragments.concat(oldDetails.fragmentHint) : oldDetails.fragments;
  4760. for (let i = start; i <= end; i++) {
  4761. const oldFrag = oldFrags[delta + i];
  4762. let newFrag = newFrags[i];
  4763. if (skippedSegments && !newFrag && i < skippedSegments) {
  4764. // Fill in skipped segments in delta playlist
  4765. newFrag = newDetails.fragments[i] = oldFrag;
  4766. }
  4767. if (oldFrag && newFrag) {
  4768. intersectionFn(oldFrag, newFrag);
  4769. }
  4770. }
  4771. }
  4772. function adjustSliding(oldDetails, newDetails) {
  4773. const delta = newDetails.startSN + newDetails.skippedSegments - oldDetails.startSN;
  4774. const oldFragments = oldDetails.fragments;
  4775. if (delta < 0 || delta >= oldFragments.length) {
  4776. return;
  4777. }
  4778. addSliding(newDetails, oldFragments[delta].start);
  4779. }
  4780. function addSliding(details, start) {
  4781. if (start) {
  4782. const fragments = details.fragments;
  4783. for (let i = details.skippedSegments; i < fragments.length; i++) {
  4784. fragments[i].start += start;
  4785. }
  4786. if (details.fragmentHint) {
  4787. details.fragmentHint.start += start;
  4788. }
  4789. }
  4790. }
  4791. function computeReloadInterval(newDetails, distanceToLiveEdgeMs = Infinity) {
  4792. let reloadInterval = 1000 * newDetails.targetduration;
  4793. if (newDetails.updated) {
  4794. // Use last segment duration when shorter than target duration and near live edge
  4795. const fragments = newDetails.fragments;
  4796. const liveEdgeMaxTargetDurations = 4;
  4797. if (fragments.length && reloadInterval * liveEdgeMaxTargetDurations > distanceToLiveEdgeMs) {
  4798. const lastSegmentDuration = fragments[fragments.length - 1].duration * 1000;
  4799. if (lastSegmentDuration < reloadInterval) {
  4800. reloadInterval = lastSegmentDuration;
  4801. }
  4802. }
  4803. } else {
  4804. // estimate = 'miss half average';
  4805. // follow HLS Spec, If the client reloads a Playlist file and finds that it has not
  4806. // changed then it MUST wait for a period of one-half the target
  4807. // duration before retrying.
  4808. reloadInterval /= 2;
  4809. }
  4810. return Math.round(reloadInterval);
  4811. }
  4812. function getFragmentWithSN(level, sn, fragCurrent) {
  4813. if (!(level != null && level.details)) {
  4814. return null;
  4815. }
  4816. const levelDetails = level.details;
  4817. let fragment = levelDetails.fragments[sn - levelDetails.startSN];
  4818. if (fragment) {
  4819. return fragment;
  4820. }
  4821. fragment = levelDetails.fragmentHint;
  4822. if (fragment && fragment.sn === sn) {
  4823. return fragment;
  4824. }
  4825. if (sn < levelDetails.startSN && fragCurrent && fragCurrent.sn === sn) {
  4826. return fragCurrent;
  4827. }
  4828. return null;
  4829. }
  4830. function getPartWith(level, sn, partIndex) {
  4831. var _level$details;
  4832. if (!(level != null && level.details)) {
  4833. return null;
  4834. }
  4835. return findPart((_level$details = level.details) == null ? void 0 : _level$details.partList, sn, partIndex);
  4836. }
  4837. function findPart(partList, sn, partIndex) {
  4838. if (partList) {
  4839. for (let i = partList.length; i--;) {
  4840. const part = partList[i];
  4841. if (part.index === partIndex && part.fragment.sn === sn) {
  4842. return part;
  4843. }
  4844. }
  4845. }
  4846. return null;
  4847. }
  4848. function reassignFragmentLevelIndexes(levels) {
  4849. levels.forEach((level, index) => {
  4850. const {
  4851. details
  4852. } = level;
  4853. if (details != null && details.fragments) {
  4854. details.fragments.forEach(fragment => {
  4855. fragment.level = index;
  4856. });
  4857. }
  4858. });
  4859. }
  4860. function isTimeoutError(error) {
  4861. switch (error.details) {
  4862. case ErrorDetails.FRAG_LOAD_TIMEOUT:
  4863. case ErrorDetails.KEY_LOAD_TIMEOUT:
  4864. case ErrorDetails.LEVEL_LOAD_TIMEOUT:
  4865. case ErrorDetails.MANIFEST_LOAD_TIMEOUT:
  4866. return true;
  4867. }
  4868. return false;
  4869. }
  4870. function getRetryConfig(loadPolicy, error) {
  4871. const isTimeout = isTimeoutError(error);
  4872. return loadPolicy.default[`${isTimeout ? 'timeout' : 'error'}Retry`];
  4873. }
  4874. function getRetryDelay(retryConfig, retryCount) {
  4875. // exponential backoff capped to max retry delay
  4876. const backoffFactor = retryConfig.backoff === 'linear' ? 1 : Math.pow(2, retryCount);
  4877. return Math.min(backoffFactor * retryConfig.retryDelayMs, retryConfig.maxRetryDelayMs);
  4878. }
  4879. function getLoaderConfigWithoutReties(loderConfig) {
  4880. return _objectSpread2(_objectSpread2({}, loderConfig), {
  4881. errorRetry: null,
  4882. timeoutRetry: null
  4883. });
  4884. }
  4885. function shouldRetry(retryConfig, retryCount, isTimeout, loaderResponse) {
  4886. if (!retryConfig) {
  4887. return false;
  4888. }
  4889. const httpStatus = loaderResponse == null ? void 0 : loaderResponse.code;
  4890. const retry = retryCount < retryConfig.maxNumRetry && (retryForHttpStatus(httpStatus) || !!isTimeout);
  4891. return retryConfig.shouldRetry ? retryConfig.shouldRetry(retryConfig, retryCount, isTimeout, loaderResponse, retry) : retry;
  4892. }
  4893. function retryForHttpStatus(httpStatus) {
  4894. // Do not retry on status 4xx, status 0 (CORS error), or undefined (decrypt/gap/parse error)
  4895. return httpStatus === 0 && navigator.onLine === false || !!httpStatus && (httpStatus < 400 || httpStatus > 499);
  4896. }
  4897. const BinarySearch = {
  4898. /**
  4899. * Searches for an item in an array which matches a certain condition.
  4900. * This requires the condition to only match one item in the array,
  4901. * and for the array to be ordered.
  4902. *
  4903. * @param list The array to search.
  4904. * @param comparisonFn
  4905. * Called and provided a candidate item as the first argument.
  4906. * Should return:
  4907. * > -1 if the item should be located at a lower index than the provided item.
  4908. * > 1 if the item should be located at a higher index than the provided item.
  4909. * > 0 if the item is the item you're looking for.
  4910. *
  4911. * @returns the object if found, otherwise returns null
  4912. */
  4913. search: function (list, comparisonFn) {
  4914. let minIndex = 0;
  4915. let maxIndex = list.length - 1;
  4916. let currentIndex = null;
  4917. let currentElement = null;
  4918. while (minIndex <= maxIndex) {
  4919. currentIndex = (minIndex + maxIndex) / 2 | 0;
  4920. currentElement = list[currentIndex];
  4921. const comparisonResult = comparisonFn(currentElement);
  4922. if (comparisonResult > 0) {
  4923. minIndex = currentIndex + 1;
  4924. } else if (comparisonResult < 0) {
  4925. maxIndex = currentIndex - 1;
  4926. } else {
  4927. return currentElement;
  4928. }
  4929. }
  4930. return null;
  4931. }
  4932. };
  4933. /**
  4934. * Returns first fragment whose endPdt value exceeds the given PDT, or null.
  4935. * @param fragments - The array of candidate fragments
  4936. * @param PDTValue - The PDT value which must be exceeded
  4937. * @param maxFragLookUpTolerance - The amount of time that a fragment's start/end can be within in order to be considered contiguous
  4938. */
  4939. function findFragmentByPDT(fragments, PDTValue, maxFragLookUpTolerance) {
  4940. if (PDTValue === null || !Array.isArray(fragments) || !fragments.length || !isFiniteNumber(PDTValue)) {
  4941. return null;
  4942. }
  4943. // if less than start
  4944. const startPDT = fragments[0].programDateTime;
  4945. if (PDTValue < (startPDT || 0)) {
  4946. return null;
  4947. }
  4948. const endPDT = fragments[fragments.length - 1].endProgramDateTime;
  4949. if (PDTValue >= (endPDT || 0)) {
  4950. return null;
  4951. }
  4952. maxFragLookUpTolerance = maxFragLookUpTolerance || 0;
  4953. for (let seg = 0; seg < fragments.length; ++seg) {
  4954. const frag = fragments[seg];
  4955. if (pdtWithinToleranceTest(PDTValue, maxFragLookUpTolerance, frag)) {
  4956. return frag;
  4957. }
  4958. }
  4959. return null;
  4960. }
  4961. /**
  4962. * Finds a fragment based on the SN of the previous fragment; or based on the needs of the current buffer.
  4963. * This method compensates for small buffer gaps by applying a tolerance to the start of any candidate fragment, thus
  4964. * breaking any traps which would cause the same fragment to be continuously selected within a small range.
  4965. * @param fragPrevious - The last frag successfully appended
  4966. * @param fragments - The array of candidate fragments
  4967. * @param bufferEnd - The end of the contiguous buffered range the playhead is currently within
  4968. * @param maxFragLookUpTolerance - The amount of time that a fragment's start/end can be within in order to be considered contiguous
  4969. * @returns a matching fragment or null
  4970. */
  4971. function findFragmentByPTS(fragPrevious, fragments, bufferEnd = 0, maxFragLookUpTolerance = 0, nextFragLookupTolerance = 0.005) {
  4972. let fragNext = null;
  4973. if (fragPrevious) {
  4974. fragNext = fragments[fragPrevious.sn - fragments[0].sn + 1] || null;
  4975. // check for buffer-end rounding error
  4976. const bufferEdgeError = fragPrevious.endDTS - bufferEnd;
  4977. if (bufferEdgeError > 0 && bufferEdgeError < 0.0000015) {
  4978. bufferEnd += 0.0000015;
  4979. }
  4980. } else if (bufferEnd === 0 && fragments[0].start === 0) {
  4981. fragNext = fragments[0];
  4982. }
  4983. // Prefer the next fragment if it's within tolerance
  4984. if (fragNext && ((!fragPrevious || fragPrevious.level === fragNext.level) && fragmentWithinToleranceTest(bufferEnd, maxFragLookUpTolerance, fragNext) === 0 || fragmentWithinFastStartSwitch(fragNext, fragPrevious, Math.min(nextFragLookupTolerance, maxFragLookUpTolerance)))) {
  4985. return fragNext;
  4986. }
  4987. // We might be seeking past the tolerance so find the best match
  4988. const foundFragment = BinarySearch.search(fragments, fragmentWithinToleranceTest.bind(null, bufferEnd, maxFragLookUpTolerance));
  4989. if (foundFragment && (foundFragment !== fragPrevious || !fragNext)) {
  4990. return foundFragment;
  4991. }
  4992. // If no match was found return the next fragment after fragPrevious, or null
  4993. return fragNext;
  4994. }
  4995. function fragmentWithinFastStartSwitch(fragNext, fragPrevious, nextFragLookupTolerance) {
  4996. if (fragPrevious && fragPrevious.start === 0 && fragPrevious.level < fragNext.level && (fragPrevious.endPTS || 0) > 0) {
  4997. const firstDuration = fragPrevious.tagList.reduce((duration, tag) => {
  4998. if (tag[0] === 'INF') {
  4999. duration += parseFloat(tag[1]);
  5000. }
  5001. return duration;
  5002. }, nextFragLookupTolerance);
  5003. return fragNext.start <= firstDuration;
  5004. }
  5005. return false;
  5006. }
  5007. /**
  5008. * The test function used by the findFragmentBySn's BinarySearch to look for the best match to the current buffer conditions.
  5009. * @param candidate - The fragment to test
  5010. * @param bufferEnd - The end of the current buffered range the playhead is currently within
  5011. * @param maxFragLookUpTolerance - The amount of time that a fragment's start can be within in order to be considered contiguous
  5012. * @returns 0 if it matches, 1 if too low, -1 if too high
  5013. */
  5014. function fragmentWithinToleranceTest(bufferEnd = 0, maxFragLookUpTolerance = 0, candidate) {
  5015. // eagerly accept an accurate match (no tolerance)
  5016. if (candidate.start <= bufferEnd && candidate.start + candidate.duration > bufferEnd) {
  5017. return 0;
  5018. }
  5019. // offset should be within fragment boundary - config.maxFragLookUpTolerance
  5020. // this is to cope with situations like
  5021. // bufferEnd = 9.991
  5022. // frag[Ø] : [0,10]
  5023. // frag[1] : [10,20]
  5024. // bufferEnd is within frag[0] range ... although what we are expecting is to return frag[1] here
  5025. // frag start frag start+duration
  5026. // |-----------------------------|
  5027. // <---> <--->
  5028. // ...--------><-----------------------------><---------....
  5029. // previous frag matching fragment next frag
  5030. // return -1 return 0 return 1
  5031. // logger.log(`level/sn/start/end/bufEnd:${level}/${candidate.sn}/${candidate.start}/${(candidate.start+candidate.duration)}/${bufferEnd}`);
  5032. // Set the lookup tolerance to be small enough to detect the current segment - ensures we don't skip over very small segments
  5033. const candidateLookupTolerance = Math.min(maxFragLookUpTolerance, candidate.duration + (candidate.deltaPTS ? candidate.deltaPTS : 0));
  5034. if (candidate.start + candidate.duration - candidateLookupTolerance <= bufferEnd) {
  5035. return 1;
  5036. } else if (candidate.start - candidateLookupTolerance > bufferEnd && candidate.start) {
  5037. // if maxFragLookUpTolerance will have negative value then don't return -1 for first element
  5038. return -1;
  5039. }
  5040. return 0;
  5041. }
  5042. /**
  5043. * The test function used by the findFragmentByPdt's BinarySearch to look for the best match to the current buffer conditions.
  5044. * This function tests the candidate's program date time values, as represented in Unix time
  5045. * @param candidate - The fragment to test
  5046. * @param pdtBufferEnd - The Unix time representing the end of the current buffered range
  5047. * @param maxFragLookUpTolerance - The amount of time that a fragment's start can be within in order to be considered contiguous
  5048. * @returns true if contiguous, false otherwise
  5049. */
  5050. function pdtWithinToleranceTest(pdtBufferEnd, maxFragLookUpTolerance, candidate) {
  5051. const candidateLookupTolerance = Math.min(maxFragLookUpTolerance, candidate.duration + (candidate.deltaPTS ? candidate.deltaPTS : 0)) * 1000;
  5052. // endProgramDateTime can be null, default to zero
  5053. const endProgramDateTime = candidate.endProgramDateTime || 0;
  5054. return endProgramDateTime - candidateLookupTolerance > pdtBufferEnd;
  5055. }
  5056. function findFragWithCC(fragments, cc) {
  5057. return BinarySearch.search(fragments, candidate => {
  5058. if (candidate.cc < cc) {
  5059. return 1;
  5060. } else if (candidate.cc > cc) {
  5061. return -1;
  5062. } else {
  5063. return 0;
  5064. }
  5065. });
  5066. }
  5067. var NetworkErrorAction = {
  5068. DoNothing: 0,
  5069. SendEndCallback: 1,
  5070. SendAlternateToPenaltyBox: 2,
  5071. RemoveAlternatePermanently: 3,
  5072. InsertDiscontinuity: 4,
  5073. RetryRequest: 5
  5074. };
  5075. var ErrorActionFlags = {
  5076. None: 0,
  5077. MoveAllAlternatesMatchingHost: 1,
  5078. MoveAllAlternatesMatchingHDCP: 2,
  5079. SwitchToSDR: 4
  5080. }; // Reserved for future use
  5081. class ErrorController {
  5082. constructor(hls) {
  5083. this.hls = void 0;
  5084. this.playlistError = 0;
  5085. this.penalizedRenditions = {};
  5086. this.log = void 0;
  5087. this.warn = void 0;
  5088. this.error = void 0;
  5089. this.hls = hls;
  5090. this.log = logger.log.bind(logger, `[info]:`);
  5091. this.warn = logger.warn.bind(logger, `[warning]:`);
  5092. this.error = logger.error.bind(logger, `[error]:`);
  5093. this.registerListeners();
  5094. }
  5095. registerListeners() {
  5096. const hls = this.hls;
  5097. hls.on(Events.ERROR, this.onError, this);
  5098. hls.on(Events.MANIFEST_LOADING, this.onManifestLoading, this);
  5099. hls.on(Events.LEVEL_UPDATED, this.onLevelUpdated, this);
  5100. }
  5101. unregisterListeners() {
  5102. const hls = this.hls;
  5103. if (!hls) {
  5104. return;
  5105. }
  5106. hls.off(Events.ERROR, this.onError, this);
  5107. hls.off(Events.ERROR, this.onErrorOut, this);
  5108. hls.off(Events.MANIFEST_LOADING, this.onManifestLoading, this);
  5109. hls.off(Events.LEVEL_UPDATED, this.onLevelUpdated, this);
  5110. }
  5111. destroy() {
  5112. this.unregisterListeners();
  5113. // @ts-ignore
  5114. this.hls = null;
  5115. this.penalizedRenditions = {};
  5116. }
  5117. startLoad(startPosition) {}
  5118. stopLoad() {
  5119. this.playlistError = 0;
  5120. }
  5121. getVariantLevelIndex(frag) {
  5122. return (frag == null ? void 0 : frag.type) === PlaylistLevelType.MAIN ? frag.level : this.hls.loadLevel;
  5123. }
  5124. onManifestLoading() {
  5125. this.playlistError = 0;
  5126. this.penalizedRenditions = {};
  5127. }
  5128. onLevelUpdated() {
  5129. this.playlistError = 0;
  5130. }
  5131. onError(event, data) {
  5132. var _data$frag, _data$level;
  5133. if (data.fatal) {
  5134. return;
  5135. }
  5136. const hls = this.hls;
  5137. const context = data.context;
  5138. switch (data.details) {
  5139. case ErrorDetails.FRAG_LOAD_ERROR:
  5140. case ErrorDetails.FRAG_LOAD_TIMEOUT:
  5141. case ErrorDetails.KEY_LOAD_ERROR:
  5142. case ErrorDetails.KEY_LOAD_TIMEOUT:
  5143. data.errorAction = this.getFragRetryOrSwitchAction(data);
  5144. return;
  5145. case ErrorDetails.FRAG_PARSING_ERROR:
  5146. // ignore empty segment errors marked as gap
  5147. if ((_data$frag = data.frag) != null && _data$frag.gap) {
  5148. data.errorAction = {
  5149. action: NetworkErrorAction.DoNothing,
  5150. flags: ErrorActionFlags.None
  5151. };
  5152. return;
  5153. }
  5154. // falls through
  5155. case ErrorDetails.FRAG_GAP:
  5156. case ErrorDetails.FRAG_DECRYPT_ERROR:
  5157. {
  5158. // Switch level if possible, otherwise allow retry count to reach max error retries
  5159. data.errorAction = this.getFragRetryOrSwitchAction(data);
  5160. data.errorAction.action = NetworkErrorAction.SendAlternateToPenaltyBox;
  5161. return;
  5162. }
  5163. case ErrorDetails.LEVEL_EMPTY_ERROR:
  5164. case ErrorDetails.LEVEL_PARSING_ERROR:
  5165. {
  5166. var _data$context, _data$context$levelDe;
  5167. // Only retry when empty and live
  5168. const levelIndex = data.parent === PlaylistLevelType.MAIN ? data.level : hls.loadLevel;
  5169. if (data.details === ErrorDetails.LEVEL_EMPTY_ERROR && !!((_data$context = data.context) != null && (_data$context$levelDe = _data$context.levelDetails) != null && _data$context$levelDe.live)) {
  5170. data.errorAction = this.getPlaylistRetryOrSwitchAction(data, levelIndex);
  5171. } else {
  5172. // Escalate to fatal if not retrying or switching
  5173. data.levelRetry = false;
  5174. data.errorAction = this.getLevelSwitchAction(data, levelIndex);
  5175. }
  5176. }
  5177. return;
  5178. case ErrorDetails.LEVEL_LOAD_ERROR:
  5179. case ErrorDetails.LEVEL_LOAD_TIMEOUT:
  5180. if (typeof (context == null ? void 0 : context.level) === 'number') {
  5181. data.errorAction = this.getPlaylistRetryOrSwitchAction(data, context.level);
  5182. }
  5183. return;
  5184. case ErrorDetails.AUDIO_TRACK_LOAD_ERROR:
  5185. case ErrorDetails.AUDIO_TRACK_LOAD_TIMEOUT:
  5186. case ErrorDetails.SUBTITLE_LOAD_ERROR:
  5187. case ErrorDetails.SUBTITLE_TRACK_LOAD_TIMEOUT:
  5188. if (context) {
  5189. const level = hls.levels[hls.loadLevel];
  5190. if (level && (context.type === PlaylistContextType.AUDIO_TRACK && level.hasAudioGroup(context.groupId) || context.type === PlaylistContextType.SUBTITLE_TRACK && level.hasSubtitleGroup(context.groupId))) {
  5191. // Perform Pathway switch or Redundant failover if possible for fastest recovery
  5192. // otherwise allow playlist retry count to reach max error retries
  5193. data.errorAction = this.getPlaylistRetryOrSwitchAction(data, hls.loadLevel);
  5194. data.errorAction.action = NetworkErrorAction.SendAlternateToPenaltyBox;
  5195. data.errorAction.flags = ErrorActionFlags.MoveAllAlternatesMatchingHost;
  5196. return;
  5197. }
  5198. }
  5199. return;
  5200. case ErrorDetails.KEY_SYSTEM_STATUS_OUTPUT_RESTRICTED:
  5201. {
  5202. const level = hls.levels[hls.loadLevel];
  5203. const restrictedHdcpLevel = level == null ? void 0 : level.attrs['HDCP-LEVEL'];
  5204. if (restrictedHdcpLevel) {
  5205. data.errorAction = {
  5206. action: NetworkErrorAction.SendAlternateToPenaltyBox,
  5207. flags: ErrorActionFlags.MoveAllAlternatesMatchingHDCP,
  5208. hdcpLevel: restrictedHdcpLevel
  5209. };
  5210. } else {
  5211. this.keySystemError(data);
  5212. }
  5213. }
  5214. return;
  5215. case ErrorDetails.BUFFER_ADD_CODEC_ERROR:
  5216. case ErrorDetails.REMUX_ALLOC_ERROR:
  5217. case ErrorDetails.BUFFER_APPEND_ERROR:
  5218. data.errorAction = this.getLevelSwitchAction(data, (_data$level = data.level) != null ? _data$level : hls.loadLevel);
  5219. return;
  5220. case ErrorDetails.INTERNAL_EXCEPTION:
  5221. case ErrorDetails.BUFFER_APPENDING_ERROR:
  5222. case ErrorDetails.BUFFER_FULL_ERROR:
  5223. case ErrorDetails.LEVEL_SWITCH_ERROR:
  5224. case ErrorDetails.BUFFER_STALLED_ERROR:
  5225. case ErrorDetails.BUFFER_SEEK_OVER_HOLE:
  5226. case ErrorDetails.BUFFER_NUDGE_ON_STALL:
  5227. data.errorAction = {
  5228. action: NetworkErrorAction.DoNothing,
  5229. flags: ErrorActionFlags.None
  5230. };
  5231. return;
  5232. }
  5233. if (data.type === ErrorTypes.KEY_SYSTEM_ERROR) {
  5234. this.keySystemError(data);
  5235. }
  5236. }
  5237. keySystemError(data) {
  5238. const levelIndex = this.getVariantLevelIndex(data.frag);
  5239. // Do not retry level. Escalate to fatal if switching levels fails.
  5240. data.levelRetry = false;
  5241. data.errorAction = this.getLevelSwitchAction(data, levelIndex);
  5242. }
  5243. getPlaylistRetryOrSwitchAction(data, levelIndex) {
  5244. const hls = this.hls;
  5245. const retryConfig = getRetryConfig(hls.config.playlistLoadPolicy, data);
  5246. const retryCount = this.playlistError++;
  5247. const retry = shouldRetry(retryConfig, retryCount, isTimeoutError(data), data.response);
  5248. if (retry) {
  5249. return {
  5250. action: NetworkErrorAction.RetryRequest,
  5251. flags: ErrorActionFlags.None,
  5252. retryConfig,
  5253. retryCount
  5254. };
  5255. }
  5256. const errorAction = this.getLevelSwitchAction(data, levelIndex);
  5257. if (retryConfig) {
  5258. errorAction.retryConfig = retryConfig;
  5259. errorAction.retryCount = retryCount;
  5260. }
  5261. return errorAction;
  5262. }
  5263. getFragRetryOrSwitchAction(data) {
  5264. const hls = this.hls;
  5265. // Share fragment error count accross media options (main, audio, subs)
  5266. // This allows for level based rendition switching when media option assets fail
  5267. const variantLevelIndex = this.getVariantLevelIndex(data.frag);
  5268. const level = hls.levels[variantLevelIndex];
  5269. const {
  5270. fragLoadPolicy,
  5271. keyLoadPolicy
  5272. } = hls.config;
  5273. const retryConfig = getRetryConfig(data.details.startsWith('key') ? keyLoadPolicy : fragLoadPolicy, data);
  5274. const fragmentErrors = hls.levels.reduce((acc, level) => acc + level.fragmentError, 0);
  5275. // Switch levels when out of retried or level index out of bounds
  5276. if (level) {
  5277. if (data.details !== ErrorDetails.FRAG_GAP) {
  5278. level.fragmentError++;
  5279. }
  5280. const retry = shouldRetry(retryConfig, fragmentErrors, isTimeoutError(data), data.response);
  5281. if (retry) {
  5282. return {
  5283. action: NetworkErrorAction.RetryRequest,
  5284. flags: ErrorActionFlags.None,
  5285. retryConfig,
  5286. retryCount: fragmentErrors
  5287. };
  5288. }
  5289. }
  5290. // Reach max retry count, or Missing level reference
  5291. // Switch to valid index
  5292. const errorAction = this.getLevelSwitchAction(data, variantLevelIndex);
  5293. // Add retry details to allow skipping of FRAG_PARSING_ERROR
  5294. if (retryConfig) {
  5295. errorAction.retryConfig = retryConfig;
  5296. errorAction.retryCount = fragmentErrors;
  5297. }
  5298. return errorAction;
  5299. }
  5300. getLevelSwitchAction(data, levelIndex) {
  5301. const hls = this.hls;
  5302. if (levelIndex === null || levelIndex === undefined) {
  5303. levelIndex = hls.loadLevel;
  5304. }
  5305. const level = this.hls.levels[levelIndex];
  5306. if (level) {
  5307. var _data$frag2, _data$context2;
  5308. const errorDetails = data.details;
  5309. level.loadError++;
  5310. if (errorDetails === ErrorDetails.BUFFER_APPEND_ERROR) {
  5311. level.fragmentError++;
  5312. }
  5313. // Search for next level to retry
  5314. let nextLevel = -1;
  5315. const {
  5316. levels,
  5317. loadLevel,
  5318. minAutoLevel,
  5319. maxAutoLevel
  5320. } = hls;
  5321. if (!hls.autoLevelEnabled) {
  5322. hls.loadLevel = -1;
  5323. }
  5324. const fragErrorType = (_data$frag2 = data.frag) == null ? void 0 : _data$frag2.type;
  5325. // Find alternate audio codec if available on audio codec error
  5326. const isAudioCodecError = fragErrorType === PlaylistLevelType.AUDIO && errorDetails === ErrorDetails.FRAG_PARSING_ERROR || data.sourceBufferName === 'audio' && (errorDetails === ErrorDetails.BUFFER_ADD_CODEC_ERROR || errorDetails === ErrorDetails.BUFFER_APPEND_ERROR);
  5327. const findAudioCodecAlternate = isAudioCodecError && levels.some(({
  5328. audioCodec
  5329. }) => level.audioCodec !== audioCodec);
  5330. // Find alternate video codec if available on video codec error
  5331. const isVideoCodecError = data.sourceBufferName === 'video' && (errorDetails === ErrorDetails.BUFFER_ADD_CODEC_ERROR || errorDetails === ErrorDetails.BUFFER_APPEND_ERROR);
  5332. const findVideoCodecAlternate = isVideoCodecError && levels.some(({
  5333. codecSet,
  5334. audioCodec
  5335. }) => level.codecSet !== codecSet && level.audioCodec === audioCodec);
  5336. const {
  5337. type: playlistErrorType,
  5338. groupId: playlistErrorGroupId
  5339. } = (_data$context2 = data.context) != null ? _data$context2 : {};
  5340. for (let i = levels.length; i--;) {
  5341. const candidate = (i + loadLevel) % levels.length;
  5342. if (candidate !== loadLevel && candidate >= minAutoLevel && candidate <= maxAutoLevel && levels[candidate].loadError === 0) {
  5343. var _level$audioGroups, _level$subtitleGroups;
  5344. const levelCandidate = levels[candidate];
  5345. // Skip level switch if GAP tag is found in next level at same position
  5346. if (errorDetails === ErrorDetails.FRAG_GAP && fragErrorType === PlaylistLevelType.MAIN && data.frag) {
  5347. const levelDetails = levels[candidate].details;
  5348. if (levelDetails) {
  5349. const fragCandidate = findFragmentByPTS(data.frag, levelDetails.fragments, data.frag.start);
  5350. if (fragCandidate != null && fragCandidate.gap) {
  5351. continue;
  5352. }
  5353. }
  5354. } else if (playlistErrorType === PlaylistContextType.AUDIO_TRACK && levelCandidate.hasAudioGroup(playlistErrorGroupId) || playlistErrorType === PlaylistContextType.SUBTITLE_TRACK && levelCandidate.hasSubtitleGroup(playlistErrorGroupId)) {
  5355. // For audio/subs playlist errors find another group ID or fallthrough to redundant fail-over
  5356. continue;
  5357. } else if (fragErrorType === PlaylistLevelType.AUDIO && (_level$audioGroups = level.audioGroups) != null && _level$audioGroups.some(groupId => levelCandidate.hasAudioGroup(groupId)) || fragErrorType === PlaylistLevelType.SUBTITLE && (_level$subtitleGroups = level.subtitleGroups) != null && _level$subtitleGroups.some(groupId => levelCandidate.hasSubtitleGroup(groupId)) || findAudioCodecAlternate && level.audioCodec === levelCandidate.audioCodec || !findAudioCodecAlternate && level.audioCodec !== levelCandidate.audioCodec || findVideoCodecAlternate && level.codecSet === levelCandidate.codecSet) {
  5358. // For video/audio/subs frag errors find another group ID or fallthrough to redundant fail-over
  5359. continue;
  5360. }
  5361. nextLevel = candidate;
  5362. break;
  5363. }
  5364. }
  5365. if (nextLevel > -1 && hls.loadLevel !== nextLevel) {
  5366. data.levelRetry = true;
  5367. this.playlistError = 0;
  5368. return {
  5369. action: NetworkErrorAction.SendAlternateToPenaltyBox,
  5370. flags: ErrorActionFlags.None,
  5371. nextAutoLevel: nextLevel
  5372. };
  5373. }
  5374. }
  5375. // No levels to switch / Manual level selection / Level not found
  5376. // Resolve with Pathway switch, Redundant fail-over, or stay on lowest Level
  5377. return {
  5378. action: NetworkErrorAction.SendAlternateToPenaltyBox,
  5379. flags: ErrorActionFlags.MoveAllAlternatesMatchingHost
  5380. };
  5381. }
  5382. onErrorOut(event, data) {
  5383. var _data$errorAction;
  5384. switch ((_data$errorAction = data.errorAction) == null ? void 0 : _data$errorAction.action) {
  5385. case NetworkErrorAction.DoNothing:
  5386. break;
  5387. case NetworkErrorAction.SendAlternateToPenaltyBox:
  5388. this.sendAlternateToPenaltyBox(data);
  5389. if (!data.errorAction.resolved && data.details !== ErrorDetails.FRAG_GAP) {
  5390. data.fatal = true;
  5391. } else if (/MediaSource readyState: ended/.test(data.error.message)) {
  5392. this.warn(`MediaSource ended after "${data.sourceBufferName}" sourceBuffer append error. Attempting to recover from media error.`);
  5393. this.hls.recoverMediaError();
  5394. }
  5395. break;
  5396. case NetworkErrorAction.RetryRequest:
  5397. // handled by stream and playlist/level controllers
  5398. break;
  5399. }
  5400. if (data.fatal) {
  5401. this.hls.stopLoad();
  5402. return;
  5403. }
  5404. }
  5405. sendAlternateToPenaltyBox(data) {
  5406. const hls = this.hls;
  5407. const errorAction = data.errorAction;
  5408. if (!errorAction) {
  5409. return;
  5410. }
  5411. const {
  5412. flags,
  5413. hdcpLevel,
  5414. nextAutoLevel
  5415. } = errorAction;
  5416. switch (flags) {
  5417. case ErrorActionFlags.None:
  5418. this.switchLevel(data, nextAutoLevel);
  5419. break;
  5420. case ErrorActionFlags.MoveAllAlternatesMatchingHDCP:
  5421. if (hdcpLevel) {
  5422. hls.maxHdcpLevel = HdcpLevels[HdcpLevels.indexOf(hdcpLevel) - 1];
  5423. errorAction.resolved = true;
  5424. }
  5425. this.warn(`Restricting playback to HDCP-LEVEL of "${hls.maxHdcpLevel}" or lower`);
  5426. break;
  5427. }
  5428. // If not resolved by previous actions try to switch to next level
  5429. if (!errorAction.resolved) {
  5430. this.switchLevel(data, nextAutoLevel);
  5431. }
  5432. }
  5433. switchLevel(data, levelIndex) {
  5434. if (levelIndex !== undefined && data.errorAction) {
  5435. this.warn(`switching to level ${levelIndex} after ${data.details}`);
  5436. this.hls.nextAutoLevel = levelIndex;
  5437. data.errorAction.resolved = true;
  5438. // Stream controller is responsible for this but won't switch on false start
  5439. this.hls.nextLoadLevel = this.hls.nextAutoLevel;
  5440. }
  5441. }
  5442. }
  5443. class BasePlaylistController {
  5444. constructor(hls, logPrefix) {
  5445. this.hls = void 0;
  5446. this.timer = -1;
  5447. this.requestScheduled = -1;
  5448. this.canLoad = false;
  5449. this.log = void 0;
  5450. this.warn = void 0;
  5451. this.log = logger.log.bind(logger, `${logPrefix}:`);
  5452. this.warn = logger.warn.bind(logger, `${logPrefix}:`);
  5453. this.hls = hls;
  5454. }
  5455. destroy() {
  5456. this.clearTimer();
  5457. // @ts-ignore
  5458. this.hls = this.log = this.warn = null;
  5459. }
  5460. clearTimer() {
  5461. if (this.timer !== -1) {
  5462. self.clearTimeout(this.timer);
  5463. this.timer = -1;
  5464. }
  5465. }
  5466. startLoad() {
  5467. this.canLoad = true;
  5468. this.requestScheduled = -1;
  5469. this.loadPlaylist();
  5470. }
  5471. stopLoad() {
  5472. this.canLoad = false;
  5473. this.clearTimer();
  5474. }
  5475. switchParams(playlistUri, previous, current) {
  5476. const renditionReports = previous == null ? void 0 : previous.renditionReports;
  5477. if (renditionReports) {
  5478. let foundIndex = -1;
  5479. for (let i = 0; i < renditionReports.length; i++) {
  5480. const attr = renditionReports[i];
  5481. let uri;
  5482. try {
  5483. uri = new self.URL(attr.URI, previous.url).href;
  5484. } catch (error) {
  5485. logger.warn(`Could not construct new URL for Rendition Report: ${error}`);
  5486. uri = attr.URI || '';
  5487. }
  5488. // Use exact match. Otherwise, the last partial match, if any, will be used
  5489. // (Playlist URI includes a query string that the Rendition Report does not)
  5490. if (uri === playlistUri) {
  5491. foundIndex = i;
  5492. break;
  5493. } else if (uri === playlistUri.substring(0, uri.length)) {
  5494. foundIndex = i;
  5495. }
  5496. }
  5497. if (foundIndex !== -1) {
  5498. const attr = renditionReports[foundIndex];
  5499. const msn = parseInt(attr['LAST-MSN']) || (previous == null ? void 0 : previous.lastPartSn);
  5500. let part = parseInt(attr['LAST-PART']) || (previous == null ? void 0 : previous.lastPartIndex);
  5501. if (this.hls.config.lowLatencyMode) {
  5502. const currentGoal = Math.min(previous.age - previous.partTarget, previous.targetduration);
  5503. if (part >= 0 && currentGoal > previous.partTarget) {
  5504. part += 1;
  5505. }
  5506. }
  5507. const skip = current && getSkipValue(current);
  5508. return new HlsUrlParameters(msn, part >= 0 ? part : undefined, skip);
  5509. }
  5510. }
  5511. }
  5512. loadPlaylist(hlsUrlParameters) {
  5513. if (this.requestScheduled === -1) {
  5514. this.requestScheduled = self.performance.now();
  5515. }
  5516. // Loading is handled by the subclasses
  5517. }
  5518. shouldLoadPlaylist(playlist) {
  5519. return this.canLoad && !!playlist && !!playlist.url && (!playlist.details || playlist.details.live);
  5520. }
  5521. shouldReloadPlaylist(playlist) {
  5522. return this.timer === -1 && this.requestScheduled === -1 && this.shouldLoadPlaylist(playlist);
  5523. }
  5524. playlistLoaded(index, data, previousDetails) {
  5525. const {
  5526. details,
  5527. stats
  5528. } = data;
  5529. // Set last updated date-time
  5530. const now = self.performance.now();
  5531. const elapsed = stats.loading.first ? Math.max(0, now - stats.loading.first) : 0;
  5532. details.advancedDateTime = Date.now() - elapsed;
  5533. // if current playlist is a live playlist, arm a timer to reload it
  5534. if (details.live || previousDetails != null && previousDetails.live) {
  5535. details.reloaded(previousDetails);
  5536. if (previousDetails) {
  5537. this.log(`live playlist ${index} ${details.advanced ? 'REFRESHED ' + details.lastPartSn + '-' + details.lastPartIndex : details.updated ? 'UPDATED' : 'MISSED'}`);
  5538. }
  5539. // Merge live playlists to adjust fragment starts and fill in delta playlist skipped segments
  5540. if (previousDetails && details.fragments.length > 0) {
  5541. mergeDetails(previousDetails, details);
  5542. }
  5543. if (!this.canLoad || !details.live) {
  5544. return;
  5545. }
  5546. let deliveryDirectives;
  5547. let msn = undefined;
  5548. let part = undefined;
  5549. if (details.canBlockReload && details.endSN && details.advanced) {
  5550. // Load level with LL-HLS delivery directives
  5551. const lowLatencyMode = this.hls.config.lowLatencyMode;
  5552. const lastPartSn = details.lastPartSn;
  5553. const endSn = details.endSN;
  5554. const lastPartIndex = details.lastPartIndex;
  5555. const hasParts = lastPartIndex !== -1;
  5556. const lastPart = lastPartSn === endSn;
  5557. // When low latency mode is disabled, we'll skip part requests once the last part index is found
  5558. const nextSnStartIndex = lowLatencyMode ? 0 : lastPartIndex;
  5559. if (hasParts) {
  5560. msn = lastPart ? endSn + 1 : lastPartSn;
  5561. part = lastPart ? nextSnStartIndex : lastPartIndex + 1;
  5562. } else {
  5563. msn = endSn + 1;
  5564. }
  5565. // Low-Latency CDN Tune-in: "age" header and time since load indicates we're behind by more than one part
  5566. // Update directives to obtain the Playlist that has the estimated additional duration of media
  5567. const lastAdvanced = details.age;
  5568. const cdnAge = lastAdvanced + details.ageHeader;
  5569. let currentGoal = Math.min(cdnAge - details.partTarget, details.targetduration * 1.5);
  5570. if (currentGoal > 0) {
  5571. if (previousDetails && currentGoal > previousDetails.tuneInGoal) {
  5572. // If we attempted to get the next or latest playlist update, but currentGoal increased,
  5573. // then we either can't catchup, or the "age" header cannot be trusted.
  5574. this.warn(`CDN Tune-in goal increased from: ${previousDetails.tuneInGoal} to: ${currentGoal} with playlist age: ${details.age}`);
  5575. currentGoal = 0;
  5576. } else {
  5577. const segments = Math.floor(currentGoal / details.targetduration);
  5578. msn += segments;
  5579. if (part !== undefined) {
  5580. const parts = Math.round(currentGoal % details.targetduration / details.partTarget);
  5581. part += parts;
  5582. }
  5583. this.log(`CDN Tune-in age: ${details.ageHeader}s last advanced ${lastAdvanced.toFixed(2)}s goal: ${currentGoal} skip sn ${segments} to part ${part}`);
  5584. }
  5585. details.tuneInGoal = currentGoal;
  5586. }
  5587. deliveryDirectives = this.getDeliveryDirectives(details, data.deliveryDirectives, msn, part);
  5588. if (lowLatencyMode || !lastPart) {
  5589. this.loadPlaylist(deliveryDirectives);
  5590. return;
  5591. }
  5592. } else if (details.canBlockReload || details.canSkipUntil) {
  5593. deliveryDirectives = this.getDeliveryDirectives(details, data.deliveryDirectives, msn, part);
  5594. }
  5595. const bufferInfo = this.hls.mainForwardBufferInfo;
  5596. const position = bufferInfo ? bufferInfo.end - bufferInfo.len : 0;
  5597. const distanceToLiveEdgeMs = (details.edge - position) * 1000;
  5598. const reloadInterval = computeReloadInterval(details, distanceToLiveEdgeMs);
  5599. if (details.updated && now > this.requestScheduled + reloadInterval) {
  5600. this.requestScheduled = stats.loading.start;
  5601. }
  5602. if (msn !== undefined && details.canBlockReload) {
  5603. this.requestScheduled = stats.loading.first + reloadInterval - (details.partTarget * 1000 || 1000);
  5604. } else if (this.requestScheduled === -1 || this.requestScheduled + reloadInterval < now) {
  5605. this.requestScheduled = now;
  5606. } else if (this.requestScheduled - now <= 0) {
  5607. this.requestScheduled += reloadInterval;
  5608. }
  5609. let estimatedTimeUntilUpdate = this.requestScheduled - now;
  5610. estimatedTimeUntilUpdate = Math.max(0, estimatedTimeUntilUpdate);
  5611. this.log(`reload live playlist ${index} in ${Math.round(estimatedTimeUntilUpdate)} ms`);
  5612. // this.log(
  5613. // `live reload ${details.updated ? 'REFRESHED' : 'MISSED'}
  5614. // reload in ${estimatedTimeUntilUpdate / 1000}
  5615. // round trip ${(stats.loading.end - stats.loading.start) / 1000}
  5616. // diff ${
  5617. // (reloadInterval -
  5618. // (estimatedTimeUntilUpdate +
  5619. // stats.loading.end -
  5620. // stats.loading.start)) /
  5621. // 1000
  5622. // }
  5623. // reload interval ${reloadInterval / 1000}
  5624. // target duration ${details.targetduration}
  5625. // distance to edge ${distanceToLiveEdgeMs / 1000}`
  5626. // );
  5627. this.timer = self.setTimeout(() => this.loadPlaylist(deliveryDirectives), estimatedTimeUntilUpdate);
  5628. } else {
  5629. this.clearTimer();
  5630. }
  5631. }
  5632. getDeliveryDirectives(details, previousDeliveryDirectives, msn, part) {
  5633. let skip = getSkipValue(details);
  5634. if (previousDeliveryDirectives != null && previousDeliveryDirectives.skip && details.deltaUpdateFailed) {
  5635. msn = previousDeliveryDirectives.msn;
  5636. part = previousDeliveryDirectives.part;
  5637. skip = HlsSkip.No;
  5638. }
  5639. return new HlsUrlParameters(msn, part, skip);
  5640. }
  5641. checkRetry(errorEvent) {
  5642. const errorDetails = errorEvent.details;
  5643. const isTimeout = isTimeoutError(errorEvent);
  5644. const errorAction = errorEvent.errorAction;
  5645. const {
  5646. action,
  5647. retryCount = 0,
  5648. retryConfig
  5649. } = errorAction || {};
  5650. const retry = !!errorAction && !!retryConfig && (action === NetworkErrorAction.RetryRequest || !errorAction.resolved && action === NetworkErrorAction.SendAlternateToPenaltyBox);
  5651. if (retry) {
  5652. var _errorEvent$context;
  5653. this.requestScheduled = -1;
  5654. if (retryCount >= retryConfig.maxNumRetry) {
  5655. return false;
  5656. }
  5657. if (isTimeout && (_errorEvent$context = errorEvent.context) != null && _errorEvent$context.deliveryDirectives) {
  5658. // The LL-HLS request already timed out so retry immediately
  5659. this.warn(`Retrying playlist loading ${retryCount + 1}/${retryConfig.maxNumRetry} after "${errorDetails}" without delivery-directives`);
  5660. this.loadPlaylist();
  5661. } else {
  5662. const delay = getRetryDelay(retryConfig, retryCount);
  5663. // Schedule level/track reload
  5664. this.timer = self.setTimeout(() => this.loadPlaylist(), delay);
  5665. this.warn(`Retrying playlist loading ${retryCount + 1}/${retryConfig.maxNumRetry} after "${errorDetails}" in ${delay}ms`);
  5666. }
  5667. // `levelRetry = true` used to inform other controllers that a retry is happening
  5668. errorEvent.levelRetry = true;
  5669. errorAction.resolved = true;
  5670. }
  5671. return retry;
  5672. }
  5673. }
  5674. /*
  5675. * compute an Exponential Weighted moving average
  5676. * - https://en.wikipedia.org/wiki/Moving_average#Exponential_moving_average
  5677. * - heavily inspired from shaka-player
  5678. */
  5679. class EWMA {
  5680. // About half of the estimated value will be from the last |halfLife| samples by weight.
  5681. constructor(halfLife, estimate = 0, weight = 0) {
  5682. this.halfLife = void 0;
  5683. this.alpha_ = void 0;
  5684. this.estimate_ = void 0;
  5685. this.totalWeight_ = void 0;
  5686. this.halfLife = halfLife;
  5687. // Larger values of alpha expire historical data more slowly.
  5688. this.alpha_ = halfLife ? Math.exp(Math.log(0.5) / halfLife) : 0;
  5689. this.estimate_ = estimate;
  5690. this.totalWeight_ = weight;
  5691. }
  5692. sample(weight, value) {
  5693. const adjAlpha = Math.pow(this.alpha_, weight);
  5694. this.estimate_ = value * (1 - adjAlpha) + adjAlpha * this.estimate_;
  5695. this.totalWeight_ += weight;
  5696. }
  5697. getTotalWeight() {
  5698. return this.totalWeight_;
  5699. }
  5700. getEstimate() {
  5701. if (this.alpha_) {
  5702. const zeroFactor = 1 - Math.pow(this.alpha_, this.totalWeight_);
  5703. if (zeroFactor) {
  5704. return this.estimate_ / zeroFactor;
  5705. }
  5706. }
  5707. return this.estimate_;
  5708. }
  5709. }
  5710. /*
  5711. * EWMA Bandwidth Estimator
  5712. * - heavily inspired from shaka-player
  5713. * Tracks bandwidth samples and estimates available bandwidth.
  5714. * Based on the minimum of two exponentially-weighted moving averages with
  5715. * different half-lives.
  5716. */
  5717. class EwmaBandWidthEstimator {
  5718. constructor(slow, fast, defaultEstimate, defaultTTFB = 100) {
  5719. this.defaultEstimate_ = void 0;
  5720. this.minWeight_ = void 0;
  5721. this.minDelayMs_ = void 0;
  5722. this.slow_ = void 0;
  5723. this.fast_ = void 0;
  5724. this.defaultTTFB_ = void 0;
  5725. this.ttfb_ = void 0;
  5726. this.defaultEstimate_ = defaultEstimate;
  5727. this.minWeight_ = 0.001;
  5728. this.minDelayMs_ = 50;
  5729. this.slow_ = new EWMA(slow);
  5730. this.fast_ = new EWMA(fast);
  5731. this.defaultTTFB_ = defaultTTFB;
  5732. this.ttfb_ = new EWMA(slow);
  5733. }
  5734. update(slow, fast) {
  5735. const {
  5736. slow_,
  5737. fast_,
  5738. ttfb_
  5739. } = this;
  5740. if (slow_.halfLife !== slow) {
  5741. this.slow_ = new EWMA(slow, slow_.getEstimate(), slow_.getTotalWeight());
  5742. }
  5743. if (fast_.halfLife !== fast) {
  5744. this.fast_ = new EWMA(fast, fast_.getEstimate(), fast_.getTotalWeight());
  5745. }
  5746. if (ttfb_.halfLife !== slow) {
  5747. this.ttfb_ = new EWMA(slow, ttfb_.getEstimate(), ttfb_.getTotalWeight());
  5748. }
  5749. }
  5750. sample(durationMs, numBytes) {
  5751. durationMs = Math.max(durationMs, this.minDelayMs_);
  5752. const numBits = 8 * numBytes;
  5753. // weight is duration in seconds
  5754. const durationS = durationMs / 1000;
  5755. // value is bandwidth in bits/s
  5756. const bandwidthInBps = numBits / durationS;
  5757. this.fast_.sample(durationS, bandwidthInBps);
  5758. this.slow_.sample(durationS, bandwidthInBps);
  5759. }
  5760. sampleTTFB(ttfb) {
  5761. // weight is frequency curve applied to TTFB in seconds
  5762. // (longer times have less weight with expected input under 1 second)
  5763. const seconds = ttfb / 1000;
  5764. const weight = Math.sqrt(2) * Math.exp(-Math.pow(seconds, 2) / 2);
  5765. this.ttfb_.sample(weight, Math.max(ttfb, 5));
  5766. }
  5767. canEstimate() {
  5768. return this.fast_.getTotalWeight() >= this.minWeight_;
  5769. }
  5770. getEstimate() {
  5771. if (this.canEstimate()) {
  5772. // console.log('slow estimate:'+ Math.round(this.slow_.getEstimate()));
  5773. // console.log('fast estimate:'+ Math.round(this.fast_.getEstimate()));
  5774. // Take the minimum of these two estimates. This should have the effect of
  5775. // adapting down quickly, but up more slowly.
  5776. return Math.min(this.fast_.getEstimate(), this.slow_.getEstimate());
  5777. } else {
  5778. return this.defaultEstimate_;
  5779. }
  5780. }
  5781. getEstimateTTFB() {
  5782. if (this.ttfb_.getTotalWeight() >= this.minWeight_) {
  5783. return this.ttfb_.getEstimate();
  5784. } else {
  5785. return this.defaultTTFB_;
  5786. }
  5787. }
  5788. destroy() {}
  5789. }
  5790. /**
  5791. * @returns Whether we can detect and validate HDR capability within the window context
  5792. */
  5793. function isHdrSupported() {
  5794. if (typeof matchMedia === 'function') {
  5795. const mediaQueryList = matchMedia('(dynamic-range: high)');
  5796. const badQuery = matchMedia('bad query');
  5797. if (mediaQueryList.media !== badQuery.media) {
  5798. return mediaQueryList.matches === true;
  5799. }
  5800. }
  5801. return false;
  5802. }
  5803. /**
  5804. * Sanitizes inputs to return the active video selection options for HDR/SDR.
  5805. * When both inputs are null:
  5806. *
  5807. * `{ preferHDR: false, allowedVideoRanges: [] }`
  5808. *
  5809. * When `currentVideoRange` non-null, maintain the active range:
  5810. *
  5811. * `{ preferHDR: currentVideoRange !== 'SDR', allowedVideoRanges: [currentVideoRange] }`
  5812. *
  5813. * When VideoSelectionOption non-null:
  5814. *
  5815. * - Allow all video ranges if `allowedVideoRanges` unspecified.
  5816. * - If `preferHDR` is non-null use the value to filter `allowedVideoRanges`.
  5817. * - Else check window for HDR support and set `preferHDR` to the result.
  5818. *
  5819. * @param currentVideoRange
  5820. * @param videoPreference
  5821. */
  5822. function getVideoSelectionOptions(currentVideoRange, videoPreference) {
  5823. let preferHDR = false;
  5824. let allowedVideoRanges = [];
  5825. if (currentVideoRange) {
  5826. preferHDR = currentVideoRange !== 'SDR';
  5827. allowedVideoRanges = [currentVideoRange];
  5828. }
  5829. if (videoPreference) {
  5830. allowedVideoRanges = videoPreference.allowedVideoRanges || VideoRangeValues.slice(0);
  5831. preferHDR = videoPreference.preferHDR !== undefined ? videoPreference.preferHDR : isHdrSupported();
  5832. if (preferHDR) {
  5833. allowedVideoRanges = allowedVideoRanges.filter(range => range !== 'SDR');
  5834. } else {
  5835. allowedVideoRanges = ['SDR'];
  5836. }
  5837. }
  5838. return {
  5839. preferHDR,
  5840. allowedVideoRanges
  5841. };
  5842. }
  5843. function getStartCodecTier(codecTiers, currentVideoRange, currentBw, audioPreference, videoPreference) {
  5844. const codecSets = Object.keys(codecTiers);
  5845. const channelsPreference = audioPreference == null ? void 0 : audioPreference.channels;
  5846. const audioCodecPreference = audioPreference == null ? void 0 : audioPreference.audioCodec;
  5847. const preferStereo = channelsPreference && parseInt(channelsPreference) === 2;
  5848. // Use first level set to determine stereo, and minimum resolution and framerate
  5849. let hasStereo = true;
  5850. let hasCurrentVideoRange = false;
  5851. let minHeight = Infinity;
  5852. let minFramerate = Infinity;
  5853. let minBitrate = Infinity;
  5854. let selectedScore = 0;
  5855. let videoRanges = [];
  5856. const {
  5857. preferHDR,
  5858. allowedVideoRanges
  5859. } = getVideoSelectionOptions(currentVideoRange, videoPreference);
  5860. for (let i = codecSets.length; i--;) {
  5861. const tier = codecTiers[codecSets[i]];
  5862. hasStereo = tier.channels[2] > 0;
  5863. minHeight = Math.min(minHeight, tier.minHeight);
  5864. minFramerate = Math.min(minFramerate, tier.minFramerate);
  5865. minBitrate = Math.min(minBitrate, tier.minBitrate);
  5866. const matchingVideoRanges = allowedVideoRanges.filter(range => tier.videoRanges[range] > 0);
  5867. if (matchingVideoRanges.length > 0) {
  5868. hasCurrentVideoRange = true;
  5869. videoRanges = matchingVideoRanges;
  5870. }
  5871. }
  5872. minHeight = isFiniteNumber(minHeight) ? minHeight : 0;
  5873. minFramerate = isFiniteNumber(minFramerate) ? minFramerate : 0;
  5874. const maxHeight = Math.max(1080, minHeight);
  5875. const maxFramerate = Math.max(30, minFramerate);
  5876. minBitrate = isFiniteNumber(minBitrate) ? minBitrate : currentBw;
  5877. currentBw = Math.max(minBitrate, currentBw);
  5878. // If there are no variants with matching preference, set currentVideoRange to undefined
  5879. if (!hasCurrentVideoRange) {
  5880. currentVideoRange = undefined;
  5881. videoRanges = [];
  5882. }
  5883. const codecSet = codecSets.reduce((selected, candidate) => {
  5884. // Remove candiates which do not meet bitrate, default audio, stereo or channels preference, 1080p or lower, 30fps or lower, or SDR/HDR selection if present
  5885. const candidateTier = codecTiers[candidate];
  5886. if (candidate === selected) {
  5887. return selected;
  5888. }
  5889. if (candidateTier.minBitrate > currentBw) {
  5890. logStartCodecCandidateIgnored(candidate, `min bitrate of ${candidateTier.minBitrate} > current estimate of ${currentBw}`);
  5891. return selected;
  5892. }
  5893. if (!candidateTier.hasDefaultAudio) {
  5894. logStartCodecCandidateIgnored(candidate, `no renditions with default or auto-select sound found`);
  5895. return selected;
  5896. }
  5897. if (audioCodecPreference && candidate.indexOf(audioCodecPreference.substring(0, 4)) % 5 !== 0) {
  5898. logStartCodecCandidateIgnored(candidate, `audio codec preference "${audioCodecPreference}" not found`);
  5899. return selected;
  5900. }
  5901. if (channelsPreference && !preferStereo) {
  5902. if (!candidateTier.channels[channelsPreference]) {
  5903. logStartCodecCandidateIgnored(candidate, `no renditions with ${channelsPreference} channel sound found (channels options: ${Object.keys(candidateTier.channels)})`);
  5904. return selected;
  5905. }
  5906. } else if ((!audioCodecPreference || preferStereo) && hasStereo && candidateTier.channels['2'] === 0) {
  5907. logStartCodecCandidateIgnored(candidate, `no renditions with stereo sound found`);
  5908. return selected;
  5909. }
  5910. if (candidateTier.minHeight > maxHeight) {
  5911. logStartCodecCandidateIgnored(candidate, `min resolution of ${candidateTier.minHeight} > maximum of ${maxHeight}`);
  5912. return selected;
  5913. }
  5914. if (candidateTier.minFramerate > maxFramerate) {
  5915. logStartCodecCandidateIgnored(candidate, `min framerate of ${candidateTier.minFramerate} > maximum of ${maxFramerate}`);
  5916. return selected;
  5917. }
  5918. if (!videoRanges.some(range => candidateTier.videoRanges[range] > 0)) {
  5919. logStartCodecCandidateIgnored(candidate, `no variants with VIDEO-RANGE of ${JSON.stringify(videoRanges)} found`);
  5920. return selected;
  5921. }
  5922. if (candidateTier.maxScore < selectedScore) {
  5923. logStartCodecCandidateIgnored(candidate, `max score of ${candidateTier.maxScore} < selected max of ${selectedScore}`);
  5924. return selected;
  5925. }
  5926. // Remove candiates with less preferred codecs or more errors
  5927. if (selected && (codecsSetSelectionPreferenceValue(candidate) >= codecsSetSelectionPreferenceValue(selected) || candidateTier.fragmentError > codecTiers[selected].fragmentError)) {
  5928. return selected;
  5929. }
  5930. selectedScore = candidateTier.maxScore;
  5931. return candidate;
  5932. }, undefined);
  5933. return {
  5934. codecSet,
  5935. videoRanges,
  5936. preferHDR,
  5937. minFramerate,
  5938. minBitrate
  5939. };
  5940. }
  5941. function logStartCodecCandidateIgnored(codeSet, reason) {
  5942. logger.log(`[abr] start candidates with "${codeSet}" ignored because ${reason}`);
  5943. }
  5944. function getAudioTracksByGroup(allAudioTracks) {
  5945. return allAudioTracks.reduce((audioTracksByGroup, track) => {
  5946. let trackGroup = audioTracksByGroup.groups[track.groupId];
  5947. if (!trackGroup) {
  5948. trackGroup = audioTracksByGroup.groups[track.groupId] = {
  5949. tracks: [],
  5950. channels: {
  5951. 2: 0
  5952. },
  5953. hasDefault: false,
  5954. hasAutoSelect: false
  5955. };
  5956. }
  5957. trackGroup.tracks.push(track);
  5958. const channelsKey = track.channels || '2';
  5959. trackGroup.channels[channelsKey] = (trackGroup.channels[channelsKey] || 0) + 1;
  5960. trackGroup.hasDefault = trackGroup.hasDefault || track.default;
  5961. trackGroup.hasAutoSelect = trackGroup.hasAutoSelect || track.autoselect;
  5962. if (trackGroup.hasDefault) {
  5963. audioTracksByGroup.hasDefaultAudio = true;
  5964. }
  5965. if (trackGroup.hasAutoSelect) {
  5966. audioTracksByGroup.hasAutoSelectAudio = true;
  5967. }
  5968. return audioTracksByGroup;
  5969. }, {
  5970. hasDefaultAudio: false,
  5971. hasAutoSelectAudio: false,
  5972. groups: {}
  5973. });
  5974. }
  5975. function getCodecTiers(levels, audioTracksByGroup, minAutoLevel, maxAutoLevel) {
  5976. return levels.slice(minAutoLevel, maxAutoLevel + 1).reduce((tiers, level) => {
  5977. if (!level.codecSet) {
  5978. return tiers;
  5979. }
  5980. const audioGroups = level.audioGroups;
  5981. let tier = tiers[level.codecSet];
  5982. if (!tier) {
  5983. tiers[level.codecSet] = tier = {
  5984. minBitrate: Infinity,
  5985. minHeight: Infinity,
  5986. minFramerate: Infinity,
  5987. maxScore: 0,
  5988. videoRanges: {
  5989. SDR: 0
  5990. },
  5991. channels: {
  5992. '2': 0
  5993. },
  5994. hasDefaultAudio: !audioGroups,
  5995. fragmentError: 0
  5996. };
  5997. }
  5998. tier.minBitrate = Math.min(tier.minBitrate, level.bitrate);
  5999. const lesserWidthOrHeight = Math.min(level.height, level.width);
  6000. tier.minHeight = Math.min(tier.minHeight, lesserWidthOrHeight);
  6001. tier.minFramerate = Math.min(tier.minFramerate, level.frameRate);
  6002. tier.maxScore = Math.max(tier.maxScore, level.score);
  6003. tier.fragmentError += level.fragmentError;
  6004. tier.videoRanges[level.videoRange] = (tier.videoRanges[level.videoRange] || 0) + 1;
  6005. return tiers;
  6006. }, {});
  6007. }
  6008. class AbrController {
  6009. constructor(_hls) {
  6010. this.hls = void 0;
  6011. this.lastLevelLoadSec = 0;
  6012. this.lastLoadedFragLevel = -1;
  6013. this.firstSelection = -1;
  6014. this._nextAutoLevel = -1;
  6015. this.nextAutoLevelKey = '';
  6016. this.audioTracksByGroup = null;
  6017. this.codecTiers = null;
  6018. this.timer = -1;
  6019. this.fragCurrent = null;
  6020. this.partCurrent = null;
  6021. this.bitrateTestDelay = 0;
  6022. this.bwEstimator = void 0;
  6023. /*
  6024. This method monitors the download rate of the current fragment, and will downswitch if that fragment will not load
  6025. quickly enough to prevent underbuffering
  6026. */
  6027. this._abandonRulesCheck = () => {
  6028. const {
  6029. fragCurrent: frag,
  6030. partCurrent: part,
  6031. hls
  6032. } = this;
  6033. const {
  6034. autoLevelEnabled,
  6035. media
  6036. } = hls;
  6037. if (!frag || !media) {
  6038. return;
  6039. }
  6040. const now = performance.now();
  6041. const stats = part ? part.stats : frag.stats;
  6042. const duration = part ? part.duration : frag.duration;
  6043. const timeLoading = now - stats.loading.start;
  6044. const minAutoLevel = hls.minAutoLevel;
  6045. // If frag loading is aborted, complete, or from lowest level, stop timer and return
  6046. if (stats.aborted || stats.loaded && stats.loaded === stats.total || frag.level <= minAutoLevel) {
  6047. this.clearTimer();
  6048. // reset forced auto level value so that next level will be selected
  6049. this._nextAutoLevel = -1;
  6050. return;
  6051. }
  6052. // This check only runs if we're in ABR mode and actually playing
  6053. if (!autoLevelEnabled || media.paused || !media.playbackRate || !media.readyState) {
  6054. return;
  6055. }
  6056. const bufferInfo = hls.mainForwardBufferInfo;
  6057. if (bufferInfo === null) {
  6058. return;
  6059. }
  6060. const ttfbEstimate = this.bwEstimator.getEstimateTTFB();
  6061. const playbackRate = Math.abs(media.playbackRate);
  6062. // To maintain stable adaptive playback, only begin monitoring frag loading after half or more of its playback duration has passed
  6063. if (timeLoading <= Math.max(ttfbEstimate, 1000 * (duration / (playbackRate * 2)))) {
  6064. return;
  6065. }
  6066. // bufferStarvationDelay is an estimate of the amount time (in seconds) it will take to exhaust the buffer
  6067. const bufferStarvationDelay = bufferInfo.len / playbackRate;
  6068. const ttfb = stats.loading.first ? stats.loading.first - stats.loading.start : -1;
  6069. const loadedFirstByte = stats.loaded && ttfb > -1;
  6070. const bwEstimate = this.getBwEstimate();
  6071. const levels = hls.levels;
  6072. const level = levels[frag.level];
  6073. const expectedLen = stats.total || Math.max(stats.loaded, Math.round(duration * level.averageBitrate / 8));
  6074. let timeStreaming = loadedFirstByte ? timeLoading - ttfb : timeLoading;
  6075. if (timeStreaming < 1 && loadedFirstByte) {
  6076. timeStreaming = Math.min(timeLoading, stats.loaded * 8 / bwEstimate);
  6077. }
  6078. const loadRate = loadedFirstByte ? stats.loaded * 1000 / timeStreaming : 0;
  6079. // fragLoadDelay is an estimate of the time (in seconds) it will take to buffer the remainder of the fragment
  6080. const fragLoadedDelay = loadRate ? (expectedLen - stats.loaded) / loadRate : expectedLen * 8 / bwEstimate + ttfbEstimate / 1000;
  6081. // Only downswitch if the time to finish loading the current fragment is greater than the amount of buffer left
  6082. if (fragLoadedDelay <= bufferStarvationDelay) {
  6083. return;
  6084. }
  6085. const bwe = loadRate ? loadRate * 8 : bwEstimate;
  6086. let fragLevelNextLoadedDelay = Number.POSITIVE_INFINITY;
  6087. let nextLoadLevel;
  6088. // Iterate through lower level and try to find the largest one that avoids rebuffering
  6089. for (nextLoadLevel = frag.level - 1; nextLoadLevel > minAutoLevel; nextLoadLevel--) {
  6090. // compute time to load next fragment at lower level
  6091. // 8 = bits per byte (bps/Bps)
  6092. const levelNextBitrate = levels[nextLoadLevel].maxBitrate;
  6093. fragLevelNextLoadedDelay = this.getTimeToLoadFrag(ttfbEstimate / 1000, bwe, duration * levelNextBitrate, !levels[nextLoadLevel].details);
  6094. if (fragLevelNextLoadedDelay < bufferStarvationDelay) {
  6095. break;
  6096. }
  6097. }
  6098. // Only emergency switch down if it takes less time to load a new fragment at lowest level instead of continuing
  6099. // to load the current one
  6100. if (fragLevelNextLoadedDelay >= fragLoadedDelay) {
  6101. return;
  6102. }
  6103. // if estimated load time of new segment is completely unreasonable, ignore and do not emergency switch down
  6104. if (fragLevelNextLoadedDelay > duration * 10) {
  6105. return;
  6106. }
  6107. hls.nextLoadLevel = hls.nextAutoLevel = nextLoadLevel;
  6108. if (loadedFirstByte) {
  6109. // If there has been loading progress, sample bandwidth using loading time offset by minimum TTFB time
  6110. this.bwEstimator.sample(timeLoading - Math.min(ttfbEstimate, ttfb), stats.loaded);
  6111. } else {
  6112. // If there has been no loading progress, sample TTFB
  6113. this.bwEstimator.sampleTTFB(timeLoading);
  6114. }
  6115. const nextLoadLevelBitrate = levels[nextLoadLevel].maxBitrate;
  6116. if (this.getBwEstimate() * this.hls.config.abrBandWidthUpFactor > nextLoadLevelBitrate) {
  6117. this.resetEstimator(nextLoadLevelBitrate);
  6118. }
  6119. this.clearTimer();
  6120. logger.warn(`[abr] Fragment ${frag.sn}${part ? ' part ' + part.index : ''} of level ${frag.level} is loading too slowly;
  6121. Time to underbuffer: ${bufferStarvationDelay.toFixed(3)} s
  6122. Estimated load time for current fragment: ${fragLoadedDelay.toFixed(3)} s
  6123. Estimated load time for down switch fragment: ${fragLevelNextLoadedDelay.toFixed(3)} s
  6124. TTFB estimate: ${ttfb | 0} ms
  6125. Current BW estimate: ${isFiniteNumber(bwEstimate) ? bwEstimate | 0 : 'Unknown'} bps
  6126. New BW estimate: ${this.getBwEstimate() | 0} bps
  6127. Switching to level ${nextLoadLevel} @ ${nextLoadLevelBitrate | 0} bps`);
  6128. hls.trigger(Events.FRAG_LOAD_EMERGENCY_ABORTED, {
  6129. frag,
  6130. part,
  6131. stats
  6132. });
  6133. };
  6134. this.hls = _hls;
  6135. this.bwEstimator = this.initEstimator();
  6136. this.registerListeners();
  6137. }
  6138. resetEstimator(abrEwmaDefaultEstimate) {
  6139. if (abrEwmaDefaultEstimate) {
  6140. logger.log(`setting initial bwe to ${abrEwmaDefaultEstimate}`);
  6141. this.hls.config.abrEwmaDefaultEstimate = abrEwmaDefaultEstimate;
  6142. }
  6143. this.firstSelection = -1;
  6144. this.bwEstimator = this.initEstimator();
  6145. }
  6146. initEstimator() {
  6147. const config = this.hls.config;
  6148. return new EwmaBandWidthEstimator(config.abrEwmaSlowVoD, config.abrEwmaFastVoD, config.abrEwmaDefaultEstimate);
  6149. }
  6150. registerListeners() {
  6151. const {
  6152. hls
  6153. } = this;
  6154. hls.on(Events.MANIFEST_LOADING, this.onManifestLoading, this);
  6155. hls.on(Events.FRAG_LOADING, this.onFragLoading, this);
  6156. hls.on(Events.FRAG_LOADED, this.onFragLoaded, this);
  6157. hls.on(Events.FRAG_BUFFERED, this.onFragBuffered, this);
  6158. hls.on(Events.LEVEL_SWITCHING, this.onLevelSwitching, this);
  6159. hls.on(Events.LEVEL_LOADED, this.onLevelLoaded, this);
  6160. hls.on(Events.LEVELS_UPDATED, this.onLevelsUpdated, this);
  6161. hls.on(Events.MAX_AUTO_LEVEL_UPDATED, this.onMaxAutoLevelUpdated, this);
  6162. hls.on(Events.ERROR, this.onError, this);
  6163. }
  6164. unregisterListeners() {
  6165. const {
  6166. hls
  6167. } = this;
  6168. if (!hls) {
  6169. return;
  6170. }
  6171. hls.off(Events.MANIFEST_LOADING, this.onManifestLoading, this);
  6172. hls.off(Events.FRAG_LOADING, this.onFragLoading, this);
  6173. hls.off(Events.FRAG_LOADED, this.onFragLoaded, this);
  6174. hls.off(Events.FRAG_BUFFERED, this.onFragBuffered, this);
  6175. hls.off(Events.LEVEL_SWITCHING, this.onLevelSwitching, this);
  6176. hls.off(Events.LEVEL_LOADED, this.onLevelLoaded, this);
  6177. hls.off(Events.LEVELS_UPDATED, this.onLevelsUpdated, this);
  6178. hls.off(Events.MAX_AUTO_LEVEL_UPDATED, this.onMaxAutoLevelUpdated, this);
  6179. hls.off(Events.ERROR, this.onError, this);
  6180. }
  6181. destroy() {
  6182. this.unregisterListeners();
  6183. this.clearTimer();
  6184. // @ts-ignore
  6185. this.hls = this._abandonRulesCheck = null;
  6186. this.fragCurrent = this.partCurrent = null;
  6187. }
  6188. onManifestLoading(event, data) {
  6189. this.lastLoadedFragLevel = -1;
  6190. this.firstSelection = -1;
  6191. this.lastLevelLoadSec = 0;
  6192. this.fragCurrent = this.partCurrent = null;
  6193. this.onLevelsUpdated();
  6194. this.clearTimer();
  6195. }
  6196. onLevelsUpdated() {
  6197. if (this.lastLoadedFragLevel > -1 && this.fragCurrent) {
  6198. this.lastLoadedFragLevel = this.fragCurrent.level;
  6199. }
  6200. this._nextAutoLevel = -1;
  6201. this.onMaxAutoLevelUpdated();
  6202. this.codecTiers = null;
  6203. this.audioTracksByGroup = null;
  6204. }
  6205. onMaxAutoLevelUpdated() {
  6206. this.firstSelection = -1;
  6207. this.nextAutoLevelKey = '';
  6208. }
  6209. onFragLoading(event, data) {
  6210. const frag = data.frag;
  6211. if (this.ignoreFragment(frag)) {
  6212. return;
  6213. }
  6214. if (!frag.bitrateTest) {
  6215. var _data$part;
  6216. this.fragCurrent = frag;
  6217. this.partCurrent = (_data$part = data.part) != null ? _data$part : null;
  6218. }
  6219. this.clearTimer();
  6220. this.timer = self.setInterval(this._abandonRulesCheck, 100);
  6221. }
  6222. onLevelSwitching(event, data) {
  6223. this.clearTimer();
  6224. }
  6225. onError(event, data) {
  6226. if (data.fatal) {
  6227. return;
  6228. }
  6229. switch (data.details) {
  6230. case ErrorDetails.BUFFER_ADD_CODEC_ERROR:
  6231. case ErrorDetails.BUFFER_APPEND_ERROR:
  6232. // Reset last loaded level so that a new selection can be made after calling recoverMediaError
  6233. this.lastLoadedFragLevel = -1;
  6234. this.firstSelection = -1;
  6235. break;
  6236. case ErrorDetails.FRAG_LOAD_TIMEOUT:
  6237. {
  6238. const frag = data.frag;
  6239. const {
  6240. fragCurrent,
  6241. partCurrent: part
  6242. } = this;
  6243. if (frag && fragCurrent && frag.sn === fragCurrent.sn && frag.level === fragCurrent.level) {
  6244. const now = performance.now();
  6245. const stats = part ? part.stats : frag.stats;
  6246. const timeLoading = now - stats.loading.start;
  6247. const ttfb = stats.loading.first ? stats.loading.first - stats.loading.start : -1;
  6248. const loadedFirstByte = stats.loaded && ttfb > -1;
  6249. if (loadedFirstByte) {
  6250. const ttfbEstimate = this.bwEstimator.getEstimateTTFB();
  6251. this.bwEstimator.sample(timeLoading - Math.min(ttfbEstimate, ttfb), stats.loaded);
  6252. } else {
  6253. this.bwEstimator.sampleTTFB(timeLoading);
  6254. }
  6255. }
  6256. break;
  6257. }
  6258. }
  6259. }
  6260. getTimeToLoadFrag(timeToFirstByteSec, bandwidth, fragSizeBits, isSwitch) {
  6261. const fragLoadSec = timeToFirstByteSec + fragSizeBits / bandwidth;
  6262. const playlistLoadSec = isSwitch ? this.lastLevelLoadSec : 0;
  6263. return fragLoadSec + playlistLoadSec;
  6264. }
  6265. onLevelLoaded(event, data) {
  6266. const config = this.hls.config;
  6267. const {
  6268. loading
  6269. } = data.stats;
  6270. const timeLoadingMs = loading.end - loading.start;
  6271. if (isFiniteNumber(timeLoadingMs)) {
  6272. this.lastLevelLoadSec = timeLoadingMs / 1000;
  6273. }
  6274. if (data.details.live) {
  6275. this.bwEstimator.update(config.abrEwmaSlowLive, config.abrEwmaFastLive);
  6276. } else {
  6277. this.bwEstimator.update(config.abrEwmaSlowVoD, config.abrEwmaFastVoD);
  6278. }
  6279. }
  6280. onFragLoaded(event, {
  6281. frag,
  6282. part
  6283. }) {
  6284. const stats = part ? part.stats : frag.stats;
  6285. if (frag.type === PlaylistLevelType.MAIN) {
  6286. this.bwEstimator.sampleTTFB(stats.loading.first - stats.loading.start);
  6287. }
  6288. if (this.ignoreFragment(frag)) {
  6289. return;
  6290. }
  6291. // stop monitoring bw once frag loaded
  6292. this.clearTimer();
  6293. // reset forced auto level value so that next level will be selected
  6294. if (frag.level === this._nextAutoLevel) {
  6295. this._nextAutoLevel = -1;
  6296. }
  6297. this.firstSelection = -1;
  6298. // compute level average bitrate
  6299. if (this.hls.config.abrMaxWithRealBitrate) {
  6300. const duration = part ? part.duration : frag.duration;
  6301. const level = this.hls.levels[frag.level];
  6302. const loadedBytes = (level.loaded ? level.loaded.bytes : 0) + stats.loaded;
  6303. const loadedDuration = (level.loaded ? level.loaded.duration : 0) + duration;
  6304. level.loaded = {
  6305. bytes: loadedBytes,
  6306. duration: loadedDuration
  6307. };
  6308. level.realBitrate = Math.round(8 * loadedBytes / loadedDuration);
  6309. }
  6310. if (frag.bitrateTest) {
  6311. const fragBufferedData = {
  6312. stats,
  6313. frag,
  6314. part,
  6315. id: frag.type
  6316. };
  6317. this.onFragBuffered(Events.FRAG_BUFFERED, fragBufferedData);
  6318. frag.bitrateTest = false;
  6319. } else {
  6320. // store level id after successful fragment load for playback
  6321. this.lastLoadedFragLevel = frag.level;
  6322. }
  6323. }
  6324. onFragBuffered(event, data) {
  6325. const {
  6326. frag,
  6327. part
  6328. } = data;
  6329. const stats = part != null && part.stats.loaded ? part.stats : frag.stats;
  6330. if (stats.aborted) {
  6331. return;
  6332. }
  6333. if (this.ignoreFragment(frag)) {
  6334. return;
  6335. }
  6336. // Use the difference between parsing and request instead of buffering and request to compute fragLoadingProcessing;
  6337. // rationale is that buffer appending only happens once media is attached. This can happen when config.startFragPrefetch
  6338. // is used. If we used buffering in that case, our BW estimate sample will be very large.
  6339. const processingMs = stats.parsing.end - stats.loading.start - Math.min(stats.loading.first - stats.loading.start, this.bwEstimator.getEstimateTTFB());
  6340. this.bwEstimator.sample(processingMs, stats.loaded);
  6341. stats.bwEstimate = this.getBwEstimate();
  6342. if (frag.bitrateTest) {
  6343. this.bitrateTestDelay = processingMs / 1000;
  6344. } else {
  6345. this.bitrateTestDelay = 0;
  6346. }
  6347. }
  6348. ignoreFragment(frag) {
  6349. // Only count non-alt-audio frags which were actually buffered in our BW calculations
  6350. return frag.type !== PlaylistLevelType.MAIN || frag.sn === 'initSegment';
  6351. }
  6352. clearTimer() {
  6353. if (this.timer > -1) {
  6354. self.clearInterval(this.timer);
  6355. this.timer = -1;
  6356. }
  6357. }
  6358. get firstAutoLevel() {
  6359. const {
  6360. maxAutoLevel,
  6361. minAutoLevel
  6362. } = this.hls;
  6363. const bwEstimate = this.getBwEstimate();
  6364. const maxStartDelay = this.hls.config.maxStarvationDelay;
  6365. const abrAutoLevel = this.findBestLevel(bwEstimate, minAutoLevel, maxAutoLevel, 0, maxStartDelay, 1, 1);
  6366. if (abrAutoLevel > -1) {
  6367. return abrAutoLevel;
  6368. }
  6369. const firstLevel = this.hls.firstLevel;
  6370. const clamped = Math.min(Math.max(firstLevel, minAutoLevel), maxAutoLevel);
  6371. logger.warn(`[abr] Could not find best starting auto level. Defaulting to first in playlist ${firstLevel} clamped to ${clamped}`);
  6372. return clamped;
  6373. }
  6374. get forcedAutoLevel() {
  6375. if (this.nextAutoLevelKey) {
  6376. return -1;
  6377. }
  6378. return this._nextAutoLevel;
  6379. }
  6380. // return next auto level
  6381. get nextAutoLevel() {
  6382. const forcedAutoLevel = this.forcedAutoLevel;
  6383. const bwEstimator = this.bwEstimator;
  6384. const useEstimate = bwEstimator.canEstimate();
  6385. const loadedFirstFrag = this.lastLoadedFragLevel > -1;
  6386. // in case next auto level has been forced, and bw not available or not reliable, return forced value
  6387. if (forcedAutoLevel !== -1 && (!useEstimate || !loadedFirstFrag || this.nextAutoLevelKey === this.getAutoLevelKey())) {
  6388. return forcedAutoLevel;
  6389. }
  6390. // compute next level using ABR logic
  6391. const nextABRAutoLevel = useEstimate && loadedFirstFrag ? this.getNextABRAutoLevel() : this.firstAutoLevel;
  6392. // use forced auto level while it hasn't errored more than ABR selection
  6393. if (forcedAutoLevel !== -1) {
  6394. const levels = this.hls.levels;
  6395. if (levels.length > Math.max(forcedAutoLevel, nextABRAutoLevel) && levels[forcedAutoLevel].loadError <= levels[nextABRAutoLevel].loadError) {
  6396. return forcedAutoLevel;
  6397. }
  6398. }
  6399. // save result until state has changed
  6400. this._nextAutoLevel = nextABRAutoLevel;
  6401. this.nextAutoLevelKey = this.getAutoLevelKey();
  6402. return nextABRAutoLevel;
  6403. }
  6404. getAutoLevelKey() {
  6405. return `${this.getBwEstimate()}_${this.getStarvationDelay().toFixed(2)}`;
  6406. }
  6407. getNextABRAutoLevel() {
  6408. const {
  6409. fragCurrent,
  6410. partCurrent,
  6411. hls
  6412. } = this;
  6413. const {
  6414. maxAutoLevel,
  6415. config,
  6416. minAutoLevel
  6417. } = hls;
  6418. const currentFragDuration = partCurrent ? partCurrent.duration : fragCurrent ? fragCurrent.duration : 0;
  6419. const avgbw = this.getBwEstimate();
  6420. // bufferStarvationDelay is the wall-clock time left until the playback buffer is exhausted.
  6421. const bufferStarvationDelay = this.getStarvationDelay();
  6422. let bwFactor = config.abrBandWidthFactor;
  6423. let bwUpFactor = config.abrBandWidthUpFactor;
  6424. // First, look to see if we can find a level matching with our avg bandwidth AND that could also guarantee no rebuffering at all
  6425. if (bufferStarvationDelay) {
  6426. const _bestLevel = this.findBestLevel(avgbw, minAutoLevel, maxAutoLevel, bufferStarvationDelay, 0, bwFactor, bwUpFactor);
  6427. if (_bestLevel >= 0) {
  6428. return _bestLevel;
  6429. }
  6430. }
  6431. // not possible to get rid of rebuffering... try to find level that will guarantee less than maxStarvationDelay of rebuffering
  6432. let maxStarvationDelay = currentFragDuration ? Math.min(currentFragDuration, config.maxStarvationDelay) : config.maxStarvationDelay;
  6433. if (!bufferStarvationDelay) {
  6434. // in case buffer is empty, let's check if previous fragment was loaded to perform a bitrate test
  6435. const bitrateTestDelay = this.bitrateTestDelay;
  6436. if (bitrateTestDelay) {
  6437. // if it is the case, then we need to adjust our max starvation delay using maxLoadingDelay config value
  6438. // max video loading delay used in automatic start level selection :
  6439. // in that mode ABR controller will ensure that video loading time (ie the time to fetch the first fragment at lowest quality level +
  6440. // the time to fetch the fragment at the appropriate quality level is less than ```maxLoadingDelay``` )
  6441. // cap maxLoadingDelay and ensure it is not bigger 'than bitrate test' frag duration
  6442. const maxLoadingDelay = currentFragDuration ? Math.min(currentFragDuration, config.maxLoadingDelay) : config.maxLoadingDelay;
  6443. maxStarvationDelay = maxLoadingDelay - bitrateTestDelay;
  6444. logger.info(`[abr] bitrate test took ${Math.round(1000 * bitrateTestDelay)}ms, set first fragment max fetchDuration to ${Math.round(1000 * maxStarvationDelay)} ms`);
  6445. // don't use conservative factor on bitrate test
  6446. bwFactor = bwUpFactor = 1;
  6447. }
  6448. }
  6449. const bestLevel = this.findBestLevel(avgbw, minAutoLevel, maxAutoLevel, bufferStarvationDelay, maxStarvationDelay, bwFactor, bwUpFactor);
  6450. logger.info(`[abr] ${bufferStarvationDelay ? 'rebuffering expected' : 'buffer is empty'}, optimal quality level ${bestLevel}`);
  6451. if (bestLevel > -1) {
  6452. return bestLevel;
  6453. }
  6454. // If no matching level found, see if min auto level would be a better option
  6455. const minLevel = hls.levels[minAutoLevel];
  6456. const autoLevel = hls.levels[hls.loadLevel];
  6457. if ((minLevel == null ? void 0 : minLevel.bitrate) < (autoLevel == null ? void 0 : autoLevel.bitrate)) {
  6458. return minAutoLevel;
  6459. }
  6460. // or if bitrate is not lower, continue to use loadLevel
  6461. return hls.loadLevel;
  6462. }
  6463. getStarvationDelay() {
  6464. const hls = this.hls;
  6465. const media = hls.media;
  6466. if (!media) {
  6467. return Infinity;
  6468. }
  6469. // playbackRate is the absolute value of the playback rate; if media.playbackRate is 0, we use 1 to load as
  6470. // if we're playing back at the normal rate.
  6471. const playbackRate = media && media.playbackRate !== 0 ? Math.abs(media.playbackRate) : 1.0;
  6472. const bufferInfo = hls.mainForwardBufferInfo;
  6473. return (bufferInfo ? bufferInfo.len : 0) / playbackRate;
  6474. }
  6475. getBwEstimate() {
  6476. return this.bwEstimator.canEstimate() ? this.bwEstimator.getEstimate() : this.hls.config.abrEwmaDefaultEstimate;
  6477. }
  6478. findBestLevel(currentBw, minAutoLevel, maxAutoLevel, bufferStarvationDelay, maxStarvationDelay, bwFactor, bwUpFactor) {
  6479. var _level$details;
  6480. const maxFetchDuration = bufferStarvationDelay + maxStarvationDelay;
  6481. const lastLoadedFragLevel = this.lastLoadedFragLevel;
  6482. const selectionBaseLevel = lastLoadedFragLevel === -1 ? this.hls.firstLevel : lastLoadedFragLevel;
  6483. const {
  6484. fragCurrent,
  6485. partCurrent
  6486. } = this;
  6487. const {
  6488. levels,
  6489. allAudioTracks,
  6490. loadLevel,
  6491. config
  6492. } = this.hls;
  6493. if (levels.length === 1) {
  6494. return 0;
  6495. }
  6496. const level = levels[selectionBaseLevel];
  6497. const live = !!(level != null && (_level$details = level.details) != null && _level$details.live);
  6498. const firstSelection = loadLevel === -1 || lastLoadedFragLevel === -1;
  6499. let currentCodecSet;
  6500. let currentVideoRange = 'SDR';
  6501. let currentFrameRate = (level == null ? void 0 : level.frameRate) || 0;
  6502. const {
  6503. audioPreference,
  6504. videoPreference
  6505. } = config;
  6506. const audioTracksByGroup = this.audioTracksByGroup || (this.audioTracksByGroup = getAudioTracksByGroup(allAudioTracks));
  6507. if (firstSelection) {
  6508. if (this.firstSelection !== -1) {
  6509. return this.firstSelection;
  6510. }
  6511. const codecTiers = this.codecTiers || (this.codecTiers = getCodecTiers(levels, audioTracksByGroup, minAutoLevel, maxAutoLevel));
  6512. const startTier = getStartCodecTier(codecTiers, currentVideoRange, currentBw, audioPreference, videoPreference);
  6513. const {
  6514. codecSet,
  6515. videoRanges,
  6516. minFramerate,
  6517. minBitrate,
  6518. preferHDR
  6519. } = startTier;
  6520. currentCodecSet = codecSet;
  6521. currentVideoRange = preferHDR ? videoRanges[videoRanges.length - 1] : videoRanges[0];
  6522. currentFrameRate = minFramerate;
  6523. currentBw = Math.max(currentBw, minBitrate);
  6524. logger.log(`[abr] picked start tier ${JSON.stringify(startTier)}`);
  6525. } else {
  6526. currentCodecSet = level == null ? void 0 : level.codecSet;
  6527. currentVideoRange = level == null ? void 0 : level.videoRange;
  6528. }
  6529. const currentFragDuration = partCurrent ? partCurrent.duration : fragCurrent ? fragCurrent.duration : 0;
  6530. const ttfbEstimateSec = this.bwEstimator.getEstimateTTFB() / 1000;
  6531. const levelsSkipped = [];
  6532. for (let i = maxAutoLevel; i >= minAutoLevel; i--) {
  6533. var _levelInfo$supportedR;
  6534. const levelInfo = levels[i];
  6535. const upSwitch = i > selectionBaseLevel;
  6536. if (!levelInfo) {
  6537. continue;
  6538. }
  6539. // skip candidates which change codec-family or video-range,
  6540. // and which decrease or increase frame-rate for up and down-switch respectfully
  6541. if (currentCodecSet && levelInfo.codecSet !== currentCodecSet || currentVideoRange && levelInfo.videoRange !== currentVideoRange || upSwitch && currentFrameRate > levelInfo.frameRate || !upSwitch && currentFrameRate > 0 && currentFrameRate < levelInfo.frameRate || levelInfo.supportedResult && !((_levelInfo$supportedR = levelInfo.supportedResult.decodingInfoResults) != null && _levelInfo$supportedR[0].smooth)) {
  6542. levelsSkipped.push(i);
  6543. continue;
  6544. }
  6545. const levelDetails = levelInfo.details;
  6546. const avgDuration = (partCurrent ? levelDetails == null ? void 0 : levelDetails.partTarget : levelDetails == null ? void 0 : levelDetails.averagetargetduration) || currentFragDuration;
  6547. let adjustedbw;
  6548. // follow algorithm captured from stagefright :
  6549. // https://android.googlesource.com/platform/frameworks/av/+/master/media/libstagefright/httplive/LiveSession.cpp
  6550. // Pick the highest bandwidth stream below or equal to estimated bandwidth.
  6551. // consider only 80% of the available bandwidth, but if we are switching up,
  6552. // be even more conservative (70%) to avoid overestimating and immediately
  6553. // switching back.
  6554. if (!upSwitch) {
  6555. adjustedbw = bwFactor * currentBw;
  6556. } else {
  6557. adjustedbw = bwUpFactor * currentBw;
  6558. }
  6559. // Use average bitrate when starvation delay (buffer length) is gt or eq two segment durations and rebuffering is not expected (maxStarvationDelay > 0)
  6560. const bitrate = currentFragDuration && bufferStarvationDelay >= currentFragDuration * 2 && maxStarvationDelay === 0 ? levels[i].averageBitrate : levels[i].maxBitrate;
  6561. const fetchDuration = this.getTimeToLoadFrag(ttfbEstimateSec, adjustedbw, bitrate * avgDuration, levelDetails === undefined);
  6562. const canSwitchWithinTolerance =
  6563. // if adjusted bw is greater than level bitrate AND
  6564. adjustedbw >= bitrate && (
  6565. // no level change, or new level has no error history
  6566. i === lastLoadedFragLevel || levelInfo.loadError === 0 && levelInfo.fragmentError === 0) && (
  6567. // fragment fetchDuration unknown OR live stream OR fragment fetchDuration less than max allowed fetch duration, then this level matches
  6568. // we don't account for max Fetch Duration for live streams, this is to avoid switching down when near the edge of live sliding window ...
  6569. // special case to support startLevel = -1 (bitrateTest) on live streams : in that case we should not exit loop so that findBestLevel will return -1
  6570. fetchDuration <= ttfbEstimateSec || !isFiniteNumber(fetchDuration) || live && !this.bitrateTestDelay || fetchDuration < maxFetchDuration);
  6571. if (canSwitchWithinTolerance) {
  6572. const forcedAutoLevel = this.forcedAutoLevel;
  6573. if (i !== loadLevel && (forcedAutoLevel === -1 || forcedAutoLevel !== loadLevel)) {
  6574. if (levelsSkipped.length) {
  6575. logger.trace(`[abr] Skipped level(s) ${levelsSkipped.join(',')} of ${maxAutoLevel} max with CODECS and VIDEO-RANGE:"${levels[levelsSkipped[0]].codecs}" ${levels[levelsSkipped[0]].videoRange}; not compatible with "${level.codecs}" ${currentVideoRange}`);
  6576. }
  6577. logger.info(`[abr] switch candidate:${selectionBaseLevel}->${i} adjustedbw(${Math.round(adjustedbw)})-bitrate=${Math.round(adjustedbw - bitrate)} ttfb:${ttfbEstimateSec.toFixed(1)} avgDuration:${avgDuration.toFixed(1)} maxFetchDuration:${maxFetchDuration.toFixed(1)} fetchDuration:${fetchDuration.toFixed(1)} firstSelection:${firstSelection} codecSet:${currentCodecSet} videoRange:${currentVideoRange} hls.loadLevel:${loadLevel}`);
  6578. }
  6579. if (firstSelection) {
  6580. this.firstSelection = i;
  6581. }
  6582. // as we are looping from highest to lowest, this will return the best achievable quality level
  6583. return i;
  6584. }
  6585. }
  6586. // not enough time budget even with quality level 0 ... rebuffering might happen
  6587. return -1;
  6588. }
  6589. set nextAutoLevel(nextLevel) {
  6590. const {
  6591. maxAutoLevel,
  6592. minAutoLevel
  6593. } = this.hls;
  6594. const value = Math.min(Math.max(nextLevel, minAutoLevel), maxAutoLevel);
  6595. if (this._nextAutoLevel !== value) {
  6596. this.nextAutoLevelKey = '';
  6597. this._nextAutoLevel = value;
  6598. }
  6599. }
  6600. }
  6601. /**
  6602. * Provides methods dealing with buffer length retrieval for example.
  6603. *
  6604. * In general, a helper around HTML5 MediaElement TimeRanges gathered from `buffered` property.
  6605. *
  6606. * Also @see https://developer.mozilla.org/en-US/docs/Web/API/HTMLMediaElement/buffered
  6607. */
  6608. const noopBuffered = {
  6609. length: 0,
  6610. start: () => 0,
  6611. end: () => 0
  6612. };
  6613. class BufferHelper {
  6614. /**
  6615. * Return true if `media`'s buffered include `position`
  6616. */
  6617. static isBuffered(media, position) {
  6618. try {
  6619. if (media) {
  6620. const buffered = BufferHelper.getBuffered(media);
  6621. for (let i = 0; i < buffered.length; i++) {
  6622. if (position >= buffered.start(i) && position <= buffered.end(i)) {
  6623. return true;
  6624. }
  6625. }
  6626. }
  6627. } catch (error) {
  6628. // this is to catch
  6629. // InvalidStateError: Failed to read the 'buffered' property from 'SourceBuffer':
  6630. // This SourceBuffer has been removed from the parent media source
  6631. }
  6632. return false;
  6633. }
  6634. static bufferInfo(media, pos, maxHoleDuration) {
  6635. try {
  6636. if (media) {
  6637. const vbuffered = BufferHelper.getBuffered(media);
  6638. const buffered = [];
  6639. let i;
  6640. for (i = 0; i < vbuffered.length; i++) {
  6641. buffered.push({
  6642. start: vbuffered.start(i),
  6643. end: vbuffered.end(i)
  6644. });
  6645. }
  6646. return this.bufferedInfo(buffered, pos, maxHoleDuration);
  6647. }
  6648. } catch (error) {
  6649. // this is to catch
  6650. // InvalidStateError: Failed to read the 'buffered' property from 'SourceBuffer':
  6651. // This SourceBuffer has been removed from the parent media source
  6652. }
  6653. return {
  6654. len: 0,
  6655. start: pos,
  6656. end: pos,
  6657. nextStart: undefined
  6658. };
  6659. }
  6660. static bufferedInfo(buffered, pos, maxHoleDuration) {
  6661. pos = Math.max(0, pos);
  6662. // sort on buffer.start/smaller end (IE does not always return sorted buffered range)
  6663. buffered.sort(function (a, b) {
  6664. const diff = a.start - b.start;
  6665. if (diff) {
  6666. return diff;
  6667. } else {
  6668. return b.end - a.end;
  6669. }
  6670. });
  6671. let buffered2 = [];
  6672. if (maxHoleDuration) {
  6673. // there might be some small holes between buffer time range
  6674. // consider that holes smaller than maxHoleDuration are irrelevant and build another
  6675. // buffer time range representations that discards those holes
  6676. for (let i = 0; i < buffered.length; i++) {
  6677. const buf2len = buffered2.length;
  6678. if (buf2len) {
  6679. const buf2end = buffered2[buf2len - 1].end;
  6680. // if small hole (value between 0 or maxHoleDuration ) or overlapping (negative)
  6681. if (buffered[i].start - buf2end < maxHoleDuration) {
  6682. // merge overlapping time ranges
  6683. // update lastRange.end only if smaller than item.end
  6684. // e.g. [ 1, 15] with [ 2,8] => [ 1,15] (no need to modify lastRange.end)
  6685. // whereas [ 1, 8] with [ 2,15] => [ 1,15] ( lastRange should switch from [1,8] to [1,15])
  6686. if (buffered[i].end > buf2end) {
  6687. buffered2[buf2len - 1].end = buffered[i].end;
  6688. }
  6689. } else {
  6690. // big hole
  6691. buffered2.push(buffered[i]);
  6692. }
  6693. } else {
  6694. // first value
  6695. buffered2.push(buffered[i]);
  6696. }
  6697. }
  6698. } else {
  6699. buffered2 = buffered;
  6700. }
  6701. let bufferLen = 0;
  6702. // bufferStartNext can possibly be undefined based on the conditional logic below
  6703. let bufferStartNext;
  6704. // bufferStart and bufferEnd are buffer boundaries around current video position
  6705. let bufferStart = pos;
  6706. let bufferEnd = pos;
  6707. for (let i = 0; i < buffered2.length; i++) {
  6708. const start = buffered2[i].start;
  6709. const end = buffered2[i].end;
  6710. // logger.log('buf start/end:' + buffered.start(i) + '/' + buffered.end(i));
  6711. if (pos + maxHoleDuration >= start && pos < end) {
  6712. // play position is inside this buffer TimeRange, retrieve end of buffer position and buffer length
  6713. bufferStart = start;
  6714. bufferEnd = end;
  6715. bufferLen = bufferEnd - pos;
  6716. } else if (pos + maxHoleDuration < start) {
  6717. bufferStartNext = start;
  6718. break;
  6719. }
  6720. }
  6721. return {
  6722. len: bufferLen,
  6723. start: bufferStart || 0,
  6724. end: bufferEnd || 0,
  6725. nextStart: bufferStartNext
  6726. };
  6727. }
  6728. /**
  6729. * Safe method to get buffered property.
  6730. * SourceBuffer.buffered may throw if SourceBuffer is removed from it's MediaSource
  6731. */
  6732. static getBuffered(media) {
  6733. try {
  6734. return media.buffered;
  6735. } catch (e) {
  6736. logger.log('failed to get media.buffered', e);
  6737. return noopBuffered;
  6738. }
  6739. }
  6740. }
  6741. class BufferOperationQueue {
  6742. constructor(sourceBufferReference) {
  6743. this.buffers = void 0;
  6744. this.queues = {
  6745. video: [],
  6746. audio: [],
  6747. audiovideo: []
  6748. };
  6749. this.buffers = sourceBufferReference;
  6750. }
  6751. append(operation, type, pending) {
  6752. const queue = this.queues[type];
  6753. queue.push(operation);
  6754. if (queue.length === 1 && !pending) {
  6755. this.executeNext(type);
  6756. }
  6757. }
  6758. insertAbort(operation, type) {
  6759. const queue = this.queues[type];
  6760. queue.unshift(operation);
  6761. this.executeNext(type);
  6762. }
  6763. appendBlocker(type) {
  6764. let execute;
  6765. const promise = new Promise(resolve => {
  6766. execute = resolve;
  6767. });
  6768. const operation = {
  6769. execute,
  6770. onStart: () => {},
  6771. onComplete: () => {},
  6772. onError: () => {}
  6773. };
  6774. this.append(operation, type);
  6775. return promise;
  6776. }
  6777. executeNext(type) {
  6778. const queue = this.queues[type];
  6779. if (queue.length) {
  6780. const operation = queue[0];
  6781. try {
  6782. // Operations are expected to result in an 'updateend' event being fired. If not, the queue will lock. Operations
  6783. // which do not end with this event must call _onSBUpdateEnd manually
  6784. operation.execute();
  6785. } catch (error) {
  6786. logger.warn(`[buffer-operation-queue]: Exception executing "${type}" SourceBuffer operation: ${error}`);
  6787. operation.onError(error);
  6788. // Only shift the current operation off, otherwise the updateend handler will do this for us
  6789. const sb = this.buffers[type];
  6790. if (!(sb != null && sb.updating)) {
  6791. this.shiftAndExecuteNext(type);
  6792. }
  6793. }
  6794. }
  6795. }
  6796. shiftAndExecuteNext(type) {
  6797. this.queues[type].shift();
  6798. this.executeNext(type);
  6799. }
  6800. current(type) {
  6801. return this.queues[type][0];
  6802. }
  6803. }
  6804. const VIDEO_CODEC_PROFILE_REPLACE = /(avc[1234]|hvc1|hev1|dvh[1e]|vp09|av01)(?:\.[^.,]+)+/;
  6805. class BufferController {
  6806. constructor(hls) {
  6807. // The level details used to determine duration, target-duration and live
  6808. this.details = null;
  6809. // cache the self generated object url to detect hijack of video tag
  6810. this._objectUrl = null;
  6811. // A queue of buffer operations which require the SourceBuffer to not be updating upon execution
  6812. this.operationQueue = void 0;
  6813. // References to event listeners for each SourceBuffer, so that they can be referenced for event removal
  6814. this.listeners = void 0;
  6815. this.hls = void 0;
  6816. // The number of BUFFER_CODEC events received before any sourceBuffers are created
  6817. this.bufferCodecEventsExpected = 0;
  6818. // The total number of BUFFER_CODEC events received
  6819. this._bufferCodecEventsTotal = 0;
  6820. // A reference to the attached media element
  6821. this.media = null;
  6822. // A reference to the active media source
  6823. this.mediaSource = null;
  6824. // Last MP3 audio chunk appended
  6825. this.lastMpegAudioChunk = null;
  6826. this.appendSource = void 0;
  6827. // counters
  6828. this.appendErrors = {
  6829. audio: 0,
  6830. video: 0,
  6831. audiovideo: 0
  6832. };
  6833. this.tracks = {};
  6834. this.pendingTracks = {};
  6835. this.sourceBuffer = void 0;
  6836. this.log = void 0;
  6837. this.warn = void 0;
  6838. this.error = void 0;
  6839. this._onEndStreaming = event => {
  6840. if (!this.hls) {
  6841. return;
  6842. }
  6843. this.hls.pauseBuffering();
  6844. };
  6845. this._onStartStreaming = event => {
  6846. if (!this.hls) {
  6847. return;
  6848. }
  6849. this.hls.resumeBuffering();
  6850. };
  6851. // Keep as arrow functions so that we can directly reference these functions directly as event listeners
  6852. this._onMediaSourceOpen = () => {
  6853. const {
  6854. media,
  6855. mediaSource
  6856. } = this;
  6857. this.log('Media source opened');
  6858. if (media) {
  6859. media.removeEventListener('emptied', this._onMediaEmptied);
  6860. this.updateMediaElementDuration();
  6861. this.hls.trigger(Events.MEDIA_ATTACHED, {
  6862. media,
  6863. mediaSource: mediaSource
  6864. });
  6865. }
  6866. if (mediaSource) {
  6867. // once received, don't listen anymore to sourceopen event
  6868. mediaSource.removeEventListener('sourceopen', this._onMediaSourceOpen);
  6869. }
  6870. this.checkPendingTracks();
  6871. };
  6872. this._onMediaSourceClose = () => {
  6873. this.log('Media source closed');
  6874. };
  6875. this._onMediaSourceEnded = () => {
  6876. this.log('Media source ended');
  6877. };
  6878. this._onMediaEmptied = () => {
  6879. const {
  6880. mediaSrc,
  6881. _objectUrl
  6882. } = this;
  6883. if (mediaSrc !== _objectUrl) {
  6884. logger.error(`Media element src was set while attaching MediaSource (${_objectUrl} > ${mediaSrc})`);
  6885. }
  6886. };
  6887. this.hls = hls;
  6888. const logPrefix = '[buffer-controller]';
  6889. this.appendSource = isManagedMediaSource(getMediaSource(hls.config.preferManagedMediaSource));
  6890. this.log = logger.log.bind(logger, logPrefix);
  6891. this.warn = logger.warn.bind(logger, logPrefix);
  6892. this.error = logger.error.bind(logger, logPrefix);
  6893. this._initSourceBuffer();
  6894. this.registerListeners();
  6895. }
  6896. hasSourceTypes() {
  6897. return this.getSourceBufferTypes().length > 0 || Object.keys(this.pendingTracks).length > 0;
  6898. }
  6899. destroy() {
  6900. this.unregisterListeners();
  6901. this.details = null;
  6902. this.lastMpegAudioChunk = null;
  6903. // @ts-ignore
  6904. this.hls = null;
  6905. }
  6906. registerListeners() {
  6907. const {
  6908. hls
  6909. } = this;
  6910. hls.on(Events.MEDIA_ATTACHING, this.onMediaAttaching, this);
  6911. hls.on(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
  6912. hls.on(Events.MANIFEST_LOADING, this.onManifestLoading, this);
  6913. hls.on(Events.MANIFEST_PARSED, this.onManifestParsed, this);
  6914. hls.on(Events.BUFFER_RESET, this.onBufferReset, this);
  6915. hls.on(Events.BUFFER_APPENDING, this.onBufferAppending, this);
  6916. hls.on(Events.BUFFER_CODECS, this.onBufferCodecs, this);
  6917. hls.on(Events.BUFFER_EOS, this.onBufferEos, this);
  6918. hls.on(Events.BUFFER_FLUSHING, this.onBufferFlushing, this);
  6919. hls.on(Events.LEVEL_UPDATED, this.onLevelUpdated, this);
  6920. hls.on(Events.FRAG_PARSED, this.onFragParsed, this);
  6921. hls.on(Events.FRAG_CHANGED, this.onFragChanged, this);
  6922. }
  6923. unregisterListeners() {
  6924. const {
  6925. hls
  6926. } = this;
  6927. hls.off(Events.MEDIA_ATTACHING, this.onMediaAttaching, this);
  6928. hls.off(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
  6929. hls.off(Events.MANIFEST_LOADING, this.onManifestLoading, this);
  6930. hls.off(Events.MANIFEST_PARSED, this.onManifestParsed, this);
  6931. hls.off(Events.BUFFER_RESET, this.onBufferReset, this);
  6932. hls.off(Events.BUFFER_APPENDING, this.onBufferAppending, this);
  6933. hls.off(Events.BUFFER_CODECS, this.onBufferCodecs, this);
  6934. hls.off(Events.BUFFER_EOS, this.onBufferEos, this);
  6935. hls.off(Events.BUFFER_FLUSHING, this.onBufferFlushing, this);
  6936. hls.off(Events.LEVEL_UPDATED, this.onLevelUpdated, this);
  6937. hls.off(Events.FRAG_PARSED, this.onFragParsed, this);
  6938. hls.off(Events.FRAG_CHANGED, this.onFragChanged, this);
  6939. }
  6940. _initSourceBuffer() {
  6941. this.sourceBuffer = {};
  6942. this.operationQueue = new BufferOperationQueue(this.sourceBuffer);
  6943. this.listeners = {
  6944. audio: [],
  6945. video: [],
  6946. audiovideo: []
  6947. };
  6948. this.appendErrors = {
  6949. audio: 0,
  6950. video: 0,
  6951. audiovideo: 0
  6952. };
  6953. this.lastMpegAudioChunk = null;
  6954. }
  6955. onManifestLoading() {
  6956. this.bufferCodecEventsExpected = this._bufferCodecEventsTotal = 0;
  6957. this.details = null;
  6958. }
  6959. onManifestParsed(event, data) {
  6960. // in case of alt audio 2 BUFFER_CODECS events will be triggered, one per stream controller
  6961. // sourcebuffers will be created all at once when the expected nb of tracks will be reached
  6962. // in case alt audio is not used, only one BUFFER_CODEC event will be fired from main stream controller
  6963. // it will contain the expected nb of source buffers, no need to compute it
  6964. let codecEvents = 2;
  6965. if (data.audio && !data.video || !data.altAudio || !false) {
  6966. codecEvents = 1;
  6967. }
  6968. this.bufferCodecEventsExpected = this._bufferCodecEventsTotal = codecEvents;
  6969. this.log(`${this.bufferCodecEventsExpected} bufferCodec event(s) expected`);
  6970. }
  6971. onMediaAttaching(event, data) {
  6972. const media = this.media = data.media;
  6973. const MediaSource = getMediaSource(this.appendSource);
  6974. if (media && MediaSource) {
  6975. var _ms$constructor;
  6976. const ms = this.mediaSource = new MediaSource();
  6977. this.log(`created media source: ${(_ms$constructor = ms.constructor) == null ? void 0 : _ms$constructor.name}`);
  6978. // MediaSource listeners are arrow functions with a lexical scope, and do not need to be bound
  6979. ms.addEventListener('sourceopen', this._onMediaSourceOpen);
  6980. ms.addEventListener('sourceended', this._onMediaSourceEnded);
  6981. ms.addEventListener('sourceclose', this._onMediaSourceClose);
  6982. if (this.appendSource) {
  6983. ms.addEventListener('startstreaming', this._onStartStreaming);
  6984. ms.addEventListener('endstreaming', this._onEndStreaming);
  6985. }
  6986. // cache the locally generated object url
  6987. const objectUrl = this._objectUrl = self.URL.createObjectURL(ms);
  6988. // link video and media Source
  6989. if (this.appendSource) {
  6990. try {
  6991. media.removeAttribute('src');
  6992. // ManagedMediaSource will not open without disableRemotePlayback set to false or source alternatives
  6993. const MMS = self.ManagedMediaSource;
  6994. media.disableRemotePlayback = media.disableRemotePlayback || MMS && ms instanceof MMS;
  6995. removeSourceChildren(media);
  6996. addSource(media, objectUrl);
  6997. media.load();
  6998. } catch (error) {
  6999. media.src = objectUrl;
  7000. }
  7001. } else {
  7002. media.src = objectUrl;
  7003. }
  7004. media.addEventListener('emptied', this._onMediaEmptied);
  7005. }
  7006. }
  7007. onMediaDetaching() {
  7008. const {
  7009. media,
  7010. mediaSource,
  7011. _objectUrl
  7012. } = this;
  7013. if (mediaSource) {
  7014. this.log('media source detaching');
  7015. if (mediaSource.readyState === 'open') {
  7016. try {
  7017. // endOfStream could trigger exception if any sourcebuffer is in updating state
  7018. // we don't really care about checking sourcebuffer state here,
  7019. // as we are anyway detaching the MediaSource
  7020. // let's just avoid this exception to propagate
  7021. mediaSource.endOfStream();
  7022. } catch (err) {
  7023. this.warn(`onMediaDetaching: ${err.message} while calling endOfStream`);
  7024. }
  7025. }
  7026. // Clean up the SourceBuffers by invoking onBufferReset
  7027. this.onBufferReset();
  7028. mediaSource.removeEventListener('sourceopen', this._onMediaSourceOpen);
  7029. mediaSource.removeEventListener('sourceended', this._onMediaSourceEnded);
  7030. mediaSource.removeEventListener('sourceclose', this._onMediaSourceClose);
  7031. if (this.appendSource) {
  7032. mediaSource.removeEventListener('startstreaming', this._onStartStreaming);
  7033. mediaSource.removeEventListener('endstreaming', this._onEndStreaming);
  7034. }
  7035. // Detach properly the MediaSource from the HTMLMediaElement as
  7036. // suggested in https://github.com/w3c/media-source/issues/53.
  7037. if (media) {
  7038. media.removeEventListener('emptied', this._onMediaEmptied);
  7039. if (_objectUrl) {
  7040. self.URL.revokeObjectURL(_objectUrl);
  7041. }
  7042. // clean up video tag src only if it's our own url. some external libraries might
  7043. // hijack the video tag and change its 'src' without destroying the Hls instance first
  7044. if (this.mediaSrc === _objectUrl) {
  7045. media.removeAttribute('src');
  7046. if (this.appendSource) {
  7047. removeSourceChildren(media);
  7048. }
  7049. media.load();
  7050. } else {
  7051. this.warn('media|source.src was changed by a third party - skip cleanup');
  7052. }
  7053. }
  7054. this.mediaSource = null;
  7055. this.media = null;
  7056. this._objectUrl = null;
  7057. this.bufferCodecEventsExpected = this._bufferCodecEventsTotal;
  7058. this.pendingTracks = {};
  7059. this.tracks = {};
  7060. }
  7061. this.hls.trigger(Events.MEDIA_DETACHED, undefined);
  7062. }
  7063. onBufferReset() {
  7064. this.getSourceBufferTypes().forEach(type => {
  7065. this.resetBuffer(type);
  7066. });
  7067. this._initSourceBuffer();
  7068. }
  7069. resetBuffer(type) {
  7070. const sb = this.sourceBuffer[type];
  7071. try {
  7072. if (sb) {
  7073. var _this$mediaSource;
  7074. this.removeBufferListeners(type);
  7075. // Synchronously remove the SB from the map before the next call in order to prevent an async function from
  7076. // accessing it
  7077. this.sourceBuffer[type] = undefined;
  7078. if ((_this$mediaSource = this.mediaSource) != null && _this$mediaSource.sourceBuffers.length) {
  7079. this.mediaSource.removeSourceBuffer(sb);
  7080. }
  7081. }
  7082. } catch (err) {
  7083. this.warn(`onBufferReset ${type}`, err);
  7084. }
  7085. }
  7086. onBufferCodecs(event, data) {
  7087. const sourceBufferCount = this.getSourceBufferTypes().length;
  7088. const trackNames = Object.keys(data);
  7089. trackNames.forEach(trackName => {
  7090. if (sourceBufferCount) {
  7091. // check if SourceBuffer codec needs to change
  7092. const track = this.tracks[trackName];
  7093. if (track && typeof track.buffer.changeType === 'function') {
  7094. var _trackCodec;
  7095. const {
  7096. id,
  7097. codec,
  7098. levelCodec,
  7099. container,
  7100. metadata
  7101. } = data[trackName];
  7102. const currentCodecFull = pickMostCompleteCodecName(track.codec, track.levelCodec);
  7103. const currentCodec = currentCodecFull == null ? void 0 : currentCodecFull.replace(VIDEO_CODEC_PROFILE_REPLACE, '$1');
  7104. let trackCodec = pickMostCompleteCodecName(codec, levelCodec);
  7105. const nextCodec = (_trackCodec = trackCodec) == null ? void 0 : _trackCodec.replace(VIDEO_CODEC_PROFILE_REPLACE, '$1');
  7106. if (trackCodec && currentCodec !== nextCodec) {
  7107. if (trackName.slice(0, 5) === 'audio') {
  7108. trackCodec = getCodecCompatibleName(trackCodec, this.appendSource);
  7109. }
  7110. const mimeType = `${container};codecs=${trackCodec}`;
  7111. this.appendChangeType(trackName, mimeType);
  7112. this.log(`switching codec ${currentCodecFull} to ${trackCodec}`);
  7113. this.tracks[trackName] = {
  7114. buffer: track.buffer,
  7115. codec,
  7116. container,
  7117. levelCodec,
  7118. metadata,
  7119. id
  7120. };
  7121. }
  7122. }
  7123. } else {
  7124. // if source buffer(s) not created yet, appended buffer tracks in this.pendingTracks
  7125. this.pendingTracks[trackName] = data[trackName];
  7126. }
  7127. });
  7128. // if sourcebuffers already created, do nothing ...
  7129. if (sourceBufferCount) {
  7130. return;
  7131. }
  7132. const bufferCodecEventsExpected = Math.max(this.bufferCodecEventsExpected - 1, 0);
  7133. if (this.bufferCodecEventsExpected !== bufferCodecEventsExpected) {
  7134. this.log(`${bufferCodecEventsExpected} bufferCodec event(s) expected ${trackNames.join(',')}`);
  7135. this.bufferCodecEventsExpected = bufferCodecEventsExpected;
  7136. }
  7137. if (this.mediaSource && this.mediaSource.readyState === 'open') {
  7138. this.checkPendingTracks();
  7139. }
  7140. }
  7141. appendChangeType(type, mimeType) {
  7142. const {
  7143. operationQueue
  7144. } = this;
  7145. const operation = {
  7146. execute: () => {
  7147. const sb = this.sourceBuffer[type];
  7148. if (sb) {
  7149. this.log(`changing ${type} sourceBuffer type to ${mimeType}`);
  7150. sb.changeType(mimeType);
  7151. }
  7152. operationQueue.shiftAndExecuteNext(type);
  7153. },
  7154. onStart: () => {},
  7155. onComplete: () => {},
  7156. onError: error => {
  7157. this.warn(`Failed to change ${type} SourceBuffer type`, error);
  7158. }
  7159. };
  7160. operationQueue.append(operation, type, !!this.pendingTracks[type]);
  7161. }
  7162. onBufferAppending(event, eventData) {
  7163. const {
  7164. hls,
  7165. operationQueue,
  7166. tracks
  7167. } = this;
  7168. const {
  7169. data,
  7170. type,
  7171. frag,
  7172. part,
  7173. chunkMeta
  7174. } = eventData;
  7175. const chunkStats = chunkMeta.buffering[type];
  7176. const bufferAppendingStart = self.performance.now();
  7177. chunkStats.start = bufferAppendingStart;
  7178. const fragBuffering = frag.stats.buffering;
  7179. const partBuffering = part ? part.stats.buffering : null;
  7180. if (fragBuffering.start === 0) {
  7181. fragBuffering.start = bufferAppendingStart;
  7182. }
  7183. if (partBuffering && partBuffering.start === 0) {
  7184. partBuffering.start = bufferAppendingStart;
  7185. }
  7186. // TODO: Only update timestampOffset when audio/mpeg fragment or part is not contiguous with previously appended
  7187. // Adjusting `SourceBuffer.timestampOffset` (desired point in the timeline where the next frames should be appended)
  7188. // in Chrome browser when we detect MPEG audio container and time delta between level PTS and `SourceBuffer.timestampOffset`
  7189. // is greater than 100ms (this is enough to handle seek for VOD or level change for LIVE videos).
  7190. // More info here: https://github.com/video-dev/hls.js/issues/332#issuecomment-257986486
  7191. const audioTrack = tracks.audio;
  7192. let checkTimestampOffset = false;
  7193. if (type === 'audio' && (audioTrack == null ? void 0 : audioTrack.container) === 'audio/mpeg') {
  7194. checkTimestampOffset = !this.lastMpegAudioChunk || chunkMeta.id === 1 || this.lastMpegAudioChunk.sn !== chunkMeta.sn;
  7195. this.lastMpegAudioChunk = chunkMeta;
  7196. }
  7197. const fragStart = frag.start;
  7198. const operation = {
  7199. execute: () => {
  7200. chunkStats.executeStart = self.performance.now();
  7201. if (checkTimestampOffset) {
  7202. const sb = this.sourceBuffer[type];
  7203. if (sb) {
  7204. const delta = fragStart - sb.timestampOffset;
  7205. if (Math.abs(delta) >= 0.1) {
  7206. this.log(`Updating audio SourceBuffer timestampOffset to ${fragStart} (delta: ${delta}) sn: ${frag.sn})`);
  7207. sb.timestampOffset = fragStart;
  7208. }
  7209. }
  7210. }
  7211. this.appendExecutor(data, type);
  7212. },
  7213. onStart: () => {
  7214. // logger.debug(`[buffer-controller]: ${type} SourceBuffer updatestart`);
  7215. },
  7216. onComplete: () => {
  7217. // logger.debug(`[buffer-controller]: ${type} SourceBuffer updateend`);
  7218. const end = self.performance.now();
  7219. chunkStats.executeEnd = chunkStats.end = end;
  7220. if (fragBuffering.first === 0) {
  7221. fragBuffering.first = end;
  7222. }
  7223. if (partBuffering && partBuffering.first === 0) {
  7224. partBuffering.first = end;
  7225. }
  7226. const {
  7227. sourceBuffer
  7228. } = this;
  7229. const timeRanges = {};
  7230. for (const type in sourceBuffer) {
  7231. timeRanges[type] = BufferHelper.getBuffered(sourceBuffer[type]);
  7232. }
  7233. this.appendErrors[type] = 0;
  7234. if (type === 'audio' || type === 'video') {
  7235. this.appendErrors.audiovideo = 0;
  7236. } else {
  7237. this.appendErrors.audio = 0;
  7238. this.appendErrors.video = 0;
  7239. }
  7240. this.hls.trigger(Events.BUFFER_APPENDED, {
  7241. type,
  7242. frag,
  7243. part,
  7244. chunkMeta,
  7245. parent: frag.type,
  7246. timeRanges
  7247. });
  7248. },
  7249. onError: error => {
  7250. // in case any error occured while appending, put back segment in segments table
  7251. const event = {
  7252. type: ErrorTypes.MEDIA_ERROR,
  7253. parent: frag.type,
  7254. details: ErrorDetails.BUFFER_APPEND_ERROR,
  7255. sourceBufferName: type,
  7256. frag,
  7257. part,
  7258. chunkMeta,
  7259. error,
  7260. err: error,
  7261. fatal: false
  7262. };
  7263. if (error.code === DOMException.QUOTA_EXCEEDED_ERR) {
  7264. // QuotaExceededError: http://www.w3.org/TR/html5/infrastructure.html#quotaexceedederror
  7265. // let's stop appending any segments, and report BUFFER_FULL_ERROR error
  7266. event.details = ErrorDetails.BUFFER_FULL_ERROR;
  7267. } else {
  7268. const appendErrorCount = ++this.appendErrors[type];
  7269. event.details = ErrorDetails.BUFFER_APPEND_ERROR;
  7270. /* with UHD content, we could get loop of quota exceeded error until
  7271. browser is able to evict some data from sourcebuffer. Retrying can help recover.
  7272. */
  7273. this.warn(`Failed ${appendErrorCount}/${hls.config.appendErrorMaxRetry} times to append segment in "${type}" sourceBuffer`);
  7274. if (appendErrorCount >= hls.config.appendErrorMaxRetry) {
  7275. event.fatal = true;
  7276. }
  7277. }
  7278. hls.trigger(Events.ERROR, event);
  7279. }
  7280. };
  7281. operationQueue.append(operation, type, !!this.pendingTracks[type]);
  7282. }
  7283. onBufferFlushing(event, data) {
  7284. const {
  7285. operationQueue
  7286. } = this;
  7287. const flushOperation = type => ({
  7288. execute: this.removeExecutor.bind(this, type, data.startOffset, data.endOffset),
  7289. onStart: () => {
  7290. // logger.debug(`[buffer-controller]: Started flushing ${data.startOffset} -> ${data.endOffset} for ${type} Source Buffer`);
  7291. },
  7292. onComplete: () => {
  7293. // logger.debug(`[buffer-controller]: Finished flushing ${data.startOffset} -> ${data.endOffset} for ${type} Source Buffer`);
  7294. this.hls.trigger(Events.BUFFER_FLUSHED, {
  7295. type
  7296. });
  7297. },
  7298. onError: error => {
  7299. this.warn(`Failed to remove from ${type} SourceBuffer`, error);
  7300. }
  7301. });
  7302. if (data.type) {
  7303. operationQueue.append(flushOperation(data.type), data.type);
  7304. } else {
  7305. this.getSourceBufferTypes().forEach(type => {
  7306. operationQueue.append(flushOperation(type), type);
  7307. });
  7308. }
  7309. }
  7310. onFragParsed(event, data) {
  7311. const {
  7312. frag,
  7313. part
  7314. } = data;
  7315. const buffersAppendedTo = [];
  7316. const elementaryStreams = part ? part.elementaryStreams : frag.elementaryStreams;
  7317. if (elementaryStreams[ElementaryStreamTypes.AUDIOVIDEO]) {
  7318. buffersAppendedTo.push('audiovideo');
  7319. } else {
  7320. if (elementaryStreams[ElementaryStreamTypes.AUDIO]) {
  7321. buffersAppendedTo.push('audio');
  7322. }
  7323. if (elementaryStreams[ElementaryStreamTypes.VIDEO]) {
  7324. buffersAppendedTo.push('video');
  7325. }
  7326. }
  7327. const onUnblocked = () => {
  7328. const now = self.performance.now();
  7329. frag.stats.buffering.end = now;
  7330. if (part) {
  7331. part.stats.buffering.end = now;
  7332. }
  7333. const stats = part ? part.stats : frag.stats;
  7334. this.hls.trigger(Events.FRAG_BUFFERED, {
  7335. frag,
  7336. part,
  7337. stats,
  7338. id: frag.type
  7339. });
  7340. };
  7341. if (buffersAppendedTo.length === 0) {
  7342. this.warn(`Fragments must have at least one ElementaryStreamType set. type: ${frag.type} level: ${frag.level} sn: ${frag.sn}`);
  7343. }
  7344. this.blockBuffers(onUnblocked, buffersAppendedTo);
  7345. }
  7346. onFragChanged(event, data) {
  7347. this.trimBuffers();
  7348. }
  7349. // on BUFFER_EOS mark matching sourcebuffer(s) as ended and trigger checkEos()
  7350. // an undefined data.type will mark all buffers as EOS.
  7351. onBufferEos(event, data) {
  7352. const ended = this.getSourceBufferTypes().reduce((acc, type) => {
  7353. const sb = this.sourceBuffer[type];
  7354. if (sb && (!data.type || data.type === type)) {
  7355. sb.ending = true;
  7356. if (!sb.ended) {
  7357. sb.ended = true;
  7358. this.log(`${type} sourceBuffer now EOS`);
  7359. }
  7360. }
  7361. return acc && !!(!sb || sb.ended);
  7362. }, true);
  7363. if (ended) {
  7364. this.log(`Queueing mediaSource.endOfStream()`);
  7365. this.blockBuffers(() => {
  7366. this.getSourceBufferTypes().forEach(type => {
  7367. const sb = this.sourceBuffer[type];
  7368. if (sb) {
  7369. sb.ending = false;
  7370. }
  7371. });
  7372. const {
  7373. mediaSource
  7374. } = this;
  7375. if (!mediaSource || mediaSource.readyState !== 'open') {
  7376. if (mediaSource) {
  7377. this.log(`Could not call mediaSource.endOfStream(). mediaSource.readyState: ${mediaSource.readyState}`);
  7378. }
  7379. return;
  7380. }
  7381. this.log(`Calling mediaSource.endOfStream()`);
  7382. // Allow this to throw and be caught by the enqueueing function
  7383. mediaSource.endOfStream();
  7384. });
  7385. }
  7386. }
  7387. onLevelUpdated(event, {
  7388. details
  7389. }) {
  7390. if (!details.fragments.length) {
  7391. return;
  7392. }
  7393. this.details = details;
  7394. if (this.getSourceBufferTypes().length) {
  7395. this.blockBuffers(this.updateMediaElementDuration.bind(this));
  7396. } else {
  7397. this.updateMediaElementDuration();
  7398. }
  7399. }
  7400. trimBuffers() {
  7401. const {
  7402. hls,
  7403. details,
  7404. media
  7405. } = this;
  7406. if (!media || details === null) {
  7407. return;
  7408. }
  7409. const sourceBufferTypes = this.getSourceBufferTypes();
  7410. if (!sourceBufferTypes.length) {
  7411. return;
  7412. }
  7413. const config = hls.config;
  7414. const currentTime = media.currentTime;
  7415. const targetDuration = details.levelTargetDuration;
  7416. // Support for deprecated liveBackBufferLength
  7417. const backBufferLength = details.live && config.liveBackBufferLength !== null ? config.liveBackBufferLength : config.backBufferLength;
  7418. if (isFiniteNumber(backBufferLength) && backBufferLength > 0) {
  7419. const maxBackBufferLength = Math.max(backBufferLength, targetDuration);
  7420. const targetBackBufferPosition = Math.floor(currentTime / targetDuration) * targetDuration - maxBackBufferLength;
  7421. this.flushBackBuffer(currentTime, targetDuration, targetBackBufferPosition);
  7422. }
  7423. if (isFiniteNumber(config.frontBufferFlushThreshold) && config.frontBufferFlushThreshold > 0) {
  7424. const frontBufferLength = Math.max(config.maxBufferLength, config.frontBufferFlushThreshold);
  7425. const maxFrontBufferLength = Math.max(frontBufferLength, targetDuration);
  7426. const targetFrontBufferPosition = Math.floor(currentTime / targetDuration) * targetDuration + maxFrontBufferLength;
  7427. this.flushFrontBuffer(currentTime, targetDuration, targetFrontBufferPosition);
  7428. }
  7429. }
  7430. flushBackBuffer(currentTime, targetDuration, targetBackBufferPosition) {
  7431. const {
  7432. details,
  7433. sourceBuffer
  7434. } = this;
  7435. const sourceBufferTypes = this.getSourceBufferTypes();
  7436. sourceBufferTypes.forEach(type => {
  7437. const sb = sourceBuffer[type];
  7438. if (sb) {
  7439. const buffered = BufferHelper.getBuffered(sb);
  7440. // when target buffer start exceeds actual buffer start
  7441. if (buffered.length > 0 && targetBackBufferPosition > buffered.start(0)) {
  7442. this.hls.trigger(Events.BACK_BUFFER_REACHED, {
  7443. bufferEnd: targetBackBufferPosition
  7444. });
  7445. // Support for deprecated event:
  7446. if (details != null && details.live) {
  7447. this.hls.trigger(Events.LIVE_BACK_BUFFER_REACHED, {
  7448. bufferEnd: targetBackBufferPosition
  7449. });
  7450. } else if (sb.ended && buffered.end(buffered.length - 1) - currentTime < targetDuration * 2) {
  7451. this.log(`Cannot flush ${type} back buffer while SourceBuffer is in ended state`);
  7452. return;
  7453. }
  7454. this.hls.trigger(Events.BUFFER_FLUSHING, {
  7455. startOffset: 0,
  7456. endOffset: targetBackBufferPosition,
  7457. type
  7458. });
  7459. }
  7460. }
  7461. });
  7462. }
  7463. flushFrontBuffer(currentTime, targetDuration, targetFrontBufferPosition) {
  7464. const {
  7465. sourceBuffer
  7466. } = this;
  7467. const sourceBufferTypes = this.getSourceBufferTypes();
  7468. sourceBufferTypes.forEach(type => {
  7469. const sb = sourceBuffer[type];
  7470. if (sb) {
  7471. const buffered = BufferHelper.getBuffered(sb);
  7472. const numBufferedRanges = buffered.length;
  7473. // The buffer is either empty or contiguous
  7474. if (numBufferedRanges < 2) {
  7475. return;
  7476. }
  7477. const bufferStart = buffered.start(numBufferedRanges - 1);
  7478. const bufferEnd = buffered.end(numBufferedRanges - 1);
  7479. // No flush if we can tolerate the current buffer length or the current buffer range we would flush is contiguous with current position
  7480. if (targetFrontBufferPosition > bufferStart || currentTime >= bufferStart && currentTime <= bufferEnd) {
  7481. return;
  7482. } else if (sb.ended && currentTime - bufferEnd < 2 * targetDuration) {
  7483. this.log(`Cannot flush ${type} front buffer while SourceBuffer is in ended state`);
  7484. return;
  7485. }
  7486. this.hls.trigger(Events.BUFFER_FLUSHING, {
  7487. startOffset: bufferStart,
  7488. endOffset: Infinity,
  7489. type
  7490. });
  7491. }
  7492. });
  7493. }
  7494. /**
  7495. * Update Media Source duration to current level duration or override to Infinity if configuration parameter
  7496. * 'liveDurationInfinity` is set to `true`
  7497. * More details: https://github.com/video-dev/hls.js/issues/355
  7498. */
  7499. updateMediaElementDuration() {
  7500. if (!this.details || !this.media || !this.mediaSource || this.mediaSource.readyState !== 'open') {
  7501. return;
  7502. }
  7503. const {
  7504. details,
  7505. hls,
  7506. media,
  7507. mediaSource
  7508. } = this;
  7509. const levelDuration = details.fragments[0].start + details.totalduration;
  7510. const mediaDuration = media.duration;
  7511. const msDuration = isFiniteNumber(mediaSource.duration) ? mediaSource.duration : 0;
  7512. if (details.live && hls.config.liveDurationInfinity) {
  7513. // Override duration to Infinity
  7514. mediaSource.duration = Infinity;
  7515. this.updateSeekableRange(details);
  7516. } else if (levelDuration > msDuration && levelDuration > mediaDuration || !isFiniteNumber(mediaDuration)) {
  7517. // levelDuration was the last value we set.
  7518. // not using mediaSource.duration as the browser may tweak this value
  7519. // only update Media Source duration if its value increase, this is to avoid
  7520. // flushing already buffered portion when switching between quality level
  7521. this.log(`Updating Media Source duration to ${levelDuration.toFixed(3)}`);
  7522. mediaSource.duration = levelDuration;
  7523. }
  7524. }
  7525. updateSeekableRange(levelDetails) {
  7526. const mediaSource = this.mediaSource;
  7527. const fragments = levelDetails.fragments;
  7528. const len = fragments.length;
  7529. if (len && levelDetails.live && mediaSource != null && mediaSource.setLiveSeekableRange) {
  7530. const start = Math.max(0, fragments[0].start);
  7531. const end = Math.max(start, start + levelDetails.totalduration);
  7532. this.log(`Media Source duration is set to ${mediaSource.duration}. Setting seekable range to ${start}-${end}.`);
  7533. mediaSource.setLiveSeekableRange(start, end);
  7534. }
  7535. }
  7536. checkPendingTracks() {
  7537. const {
  7538. bufferCodecEventsExpected,
  7539. operationQueue,
  7540. pendingTracks
  7541. } = this;
  7542. // Check if we've received all of the expected bufferCodec events. When none remain, create all the sourceBuffers at once.
  7543. // This is important because the MSE spec allows implementations to throw QuotaExceededErrors if creating new sourceBuffers after
  7544. // data has been appended to existing ones.
  7545. // 2 tracks is the max (one for audio, one for video). If we've reach this max go ahead and create the buffers.
  7546. const pendingTracksCount = Object.keys(pendingTracks).length;
  7547. if (pendingTracksCount && (!bufferCodecEventsExpected || pendingTracksCount === 2 || 'audiovideo' in pendingTracks)) {
  7548. // ok, let's create them now !
  7549. this.createSourceBuffers(pendingTracks);
  7550. this.pendingTracks = {};
  7551. // append any pending segments now !
  7552. const buffers = this.getSourceBufferTypes();
  7553. if (buffers.length) {
  7554. this.hls.trigger(Events.BUFFER_CREATED, {
  7555. tracks: this.tracks
  7556. });
  7557. buffers.forEach(type => {
  7558. operationQueue.executeNext(type);
  7559. });
  7560. } else {
  7561. const error = new Error('could not create source buffer for media codec(s)');
  7562. this.hls.trigger(Events.ERROR, {
  7563. type: ErrorTypes.MEDIA_ERROR,
  7564. details: ErrorDetails.BUFFER_INCOMPATIBLE_CODECS_ERROR,
  7565. fatal: true,
  7566. error,
  7567. reason: error.message
  7568. });
  7569. }
  7570. }
  7571. }
  7572. createSourceBuffers(tracks) {
  7573. const {
  7574. sourceBuffer,
  7575. mediaSource
  7576. } = this;
  7577. if (!mediaSource) {
  7578. throw Error('createSourceBuffers called when mediaSource was null');
  7579. }
  7580. for (const trackName in tracks) {
  7581. if (!sourceBuffer[trackName]) {
  7582. var _track$levelCodec;
  7583. const track = tracks[trackName];
  7584. if (!track) {
  7585. throw Error(`source buffer exists for track ${trackName}, however track does not`);
  7586. }
  7587. // use levelCodec as first priority unless it contains multiple comma-separated codec values
  7588. let codec = ((_track$levelCodec = track.levelCodec) == null ? void 0 : _track$levelCodec.indexOf(',')) === -1 ? track.levelCodec : track.codec;
  7589. if (codec) {
  7590. if (trackName.slice(0, 5) === 'audio') {
  7591. codec = getCodecCompatibleName(codec, this.appendSource);
  7592. }
  7593. }
  7594. const mimeType = `${track.container};codecs=${codec}`;
  7595. this.log(`creating sourceBuffer(${mimeType})`);
  7596. try {
  7597. const sb = sourceBuffer[trackName] = mediaSource.addSourceBuffer(mimeType);
  7598. const sbName = trackName;
  7599. this.addBufferListener(sbName, 'updatestart', this._onSBUpdateStart);
  7600. this.addBufferListener(sbName, 'updateend', this._onSBUpdateEnd);
  7601. this.addBufferListener(sbName, 'error', this._onSBUpdateError);
  7602. // ManagedSourceBuffer bufferedchange event
  7603. if (this.appendSource) {
  7604. this.addBufferListener(sbName, 'bufferedchange', (type, event) => {
  7605. // If media was ejected check for a change. Added ranges are redundant with changes on 'updateend' event.
  7606. const removedRanges = event.removedRanges;
  7607. if (removedRanges != null && removedRanges.length) {
  7608. this.hls.trigger(Events.BUFFER_FLUSHED, {
  7609. type: trackName
  7610. });
  7611. }
  7612. });
  7613. }
  7614. this.tracks[trackName] = {
  7615. buffer: sb,
  7616. codec: codec,
  7617. container: track.container,
  7618. levelCodec: track.levelCodec,
  7619. metadata: track.metadata,
  7620. id: track.id
  7621. };
  7622. } catch (err) {
  7623. this.error(`error while trying to add sourceBuffer: ${err.message}`);
  7624. this.hls.trigger(Events.ERROR, {
  7625. type: ErrorTypes.MEDIA_ERROR,
  7626. details: ErrorDetails.BUFFER_ADD_CODEC_ERROR,
  7627. fatal: false,
  7628. error: err,
  7629. sourceBufferName: trackName,
  7630. mimeType: mimeType
  7631. });
  7632. }
  7633. }
  7634. }
  7635. }
  7636. get mediaSrc() {
  7637. var _this$media;
  7638. const media = ((_this$media = this.media) == null ? void 0 : _this$media.firstChild) || this.media;
  7639. return media == null ? void 0 : media.src;
  7640. }
  7641. _onSBUpdateStart(type) {
  7642. const {
  7643. operationQueue
  7644. } = this;
  7645. const operation = operationQueue.current(type);
  7646. operation.onStart();
  7647. }
  7648. _onSBUpdateEnd(type) {
  7649. var _this$mediaSource2;
  7650. if (((_this$mediaSource2 = this.mediaSource) == null ? void 0 : _this$mediaSource2.readyState) === 'closed') {
  7651. this.resetBuffer(type);
  7652. return;
  7653. }
  7654. const {
  7655. operationQueue
  7656. } = this;
  7657. const operation = operationQueue.current(type);
  7658. operation.onComplete();
  7659. operationQueue.shiftAndExecuteNext(type);
  7660. }
  7661. _onSBUpdateError(type, event) {
  7662. var _this$mediaSource3;
  7663. const error = new Error(`${type} SourceBuffer error. MediaSource readyState: ${(_this$mediaSource3 = this.mediaSource) == null ? void 0 : _this$mediaSource3.readyState}`);
  7664. this.error(`${error}`, event);
  7665. // according to http://www.w3.org/TR/media-source/#sourcebuffer-append-error
  7666. // SourceBuffer errors are not necessarily fatal; if so, the HTMLMediaElement will fire an error event
  7667. this.hls.trigger(Events.ERROR, {
  7668. type: ErrorTypes.MEDIA_ERROR,
  7669. details: ErrorDetails.BUFFER_APPENDING_ERROR,
  7670. sourceBufferName: type,
  7671. error,
  7672. fatal: false
  7673. });
  7674. // updateend is always fired after error, so we'll allow that to shift the current operation off of the queue
  7675. const operation = this.operationQueue.current(type);
  7676. if (operation) {
  7677. operation.onError(error);
  7678. }
  7679. }
  7680. // This method must result in an updateend event; if remove is not called, _onSBUpdateEnd must be called manually
  7681. removeExecutor(type, startOffset, endOffset) {
  7682. const {
  7683. media,
  7684. mediaSource,
  7685. operationQueue,
  7686. sourceBuffer
  7687. } = this;
  7688. const sb = sourceBuffer[type];
  7689. if (!media || !mediaSource || !sb) {
  7690. this.warn(`Attempting to remove from the ${type} SourceBuffer, but it does not exist`);
  7691. operationQueue.shiftAndExecuteNext(type);
  7692. return;
  7693. }
  7694. const mediaDuration = isFiniteNumber(media.duration) ? media.duration : Infinity;
  7695. const msDuration = isFiniteNumber(mediaSource.duration) ? mediaSource.duration : Infinity;
  7696. const removeStart = Math.max(0, startOffset);
  7697. const removeEnd = Math.min(endOffset, mediaDuration, msDuration);
  7698. if (removeEnd > removeStart && (!sb.ending || sb.ended)) {
  7699. sb.ended = false;
  7700. this.log(`Removing [${removeStart},${removeEnd}] from the ${type} SourceBuffer`);
  7701. sb.remove(removeStart, removeEnd);
  7702. } else {
  7703. // Cycle the queue
  7704. operationQueue.shiftAndExecuteNext(type);
  7705. }
  7706. }
  7707. // This method must result in an updateend event; if append is not called, _onSBUpdateEnd must be called manually
  7708. appendExecutor(data, type) {
  7709. const sb = this.sourceBuffer[type];
  7710. if (!sb) {
  7711. if (!this.pendingTracks[type]) {
  7712. throw new Error(`Attempting to append to the ${type} SourceBuffer, but it does not exist`);
  7713. }
  7714. return;
  7715. }
  7716. sb.ended = false;
  7717. sb.appendBuffer(data);
  7718. }
  7719. // Enqueues an operation to each SourceBuffer queue which, upon execution, resolves a promise. When all promises
  7720. // resolve, the onUnblocked function is executed. Functions calling this method do not need to unblock the queue
  7721. // upon completion, since we already do it here
  7722. blockBuffers(onUnblocked, buffers = this.getSourceBufferTypes()) {
  7723. if (!buffers.length) {
  7724. this.log('Blocking operation requested, but no SourceBuffers exist');
  7725. Promise.resolve().then(onUnblocked);
  7726. return;
  7727. }
  7728. const {
  7729. operationQueue
  7730. } = this;
  7731. // logger.debug(`[buffer-controller]: Blocking ${buffers} SourceBuffer`);
  7732. const blockingOperations = buffers.map(type => operationQueue.appendBlocker(type));
  7733. Promise.all(blockingOperations).then(() => {
  7734. // logger.debug(`[buffer-controller]: Blocking operation resolved; unblocking ${buffers} SourceBuffer`);
  7735. onUnblocked();
  7736. buffers.forEach(type => {
  7737. const sb = this.sourceBuffer[type];
  7738. // Only cycle the queue if the SB is not updating. There's a bug in Chrome which sets the SB updating flag to
  7739. // true when changing the MediaSource duration (https://bugs.chromium.org/p/chromium/issues/detail?id=959359&can=2&q=mediasource%20duration)
  7740. // While this is a workaround, it's probably useful to have around
  7741. if (!(sb != null && sb.updating)) {
  7742. operationQueue.shiftAndExecuteNext(type);
  7743. }
  7744. });
  7745. });
  7746. }
  7747. getSourceBufferTypes() {
  7748. return Object.keys(this.sourceBuffer);
  7749. }
  7750. addBufferListener(type, event, fn) {
  7751. const buffer = this.sourceBuffer[type];
  7752. if (!buffer) {
  7753. return;
  7754. }
  7755. const listener = fn.bind(this, type);
  7756. this.listeners[type].push({
  7757. event,
  7758. listener
  7759. });
  7760. buffer.addEventListener(event, listener);
  7761. }
  7762. removeBufferListeners(type) {
  7763. const buffer = this.sourceBuffer[type];
  7764. if (!buffer) {
  7765. return;
  7766. }
  7767. this.listeners[type].forEach(l => {
  7768. buffer.removeEventListener(l.event, l.listener);
  7769. });
  7770. }
  7771. }
  7772. function removeSourceChildren(node) {
  7773. const sourceChildren = node.querySelectorAll('source');
  7774. [].slice.call(sourceChildren).forEach(source => {
  7775. node.removeChild(source);
  7776. });
  7777. }
  7778. function addSource(media, url) {
  7779. const source = self.document.createElement('source');
  7780. source.type = 'video/mp4';
  7781. source.src = url;
  7782. media.appendChild(source);
  7783. }
  7784. class CapLevelController {
  7785. constructor(hls) {
  7786. this.hls = void 0;
  7787. this.autoLevelCapping = void 0;
  7788. this.firstLevel = void 0;
  7789. this.media = void 0;
  7790. this.restrictedLevels = void 0;
  7791. this.timer = void 0;
  7792. this.clientRect = void 0;
  7793. this.streamController = void 0;
  7794. this.hls = hls;
  7795. this.autoLevelCapping = Number.POSITIVE_INFINITY;
  7796. this.firstLevel = -1;
  7797. this.media = null;
  7798. this.restrictedLevels = [];
  7799. this.timer = undefined;
  7800. this.clientRect = null;
  7801. this.registerListeners();
  7802. }
  7803. setStreamController(streamController) {
  7804. this.streamController = streamController;
  7805. }
  7806. destroy() {
  7807. if (this.hls) {
  7808. this.unregisterListener();
  7809. }
  7810. if (this.timer) {
  7811. this.stopCapping();
  7812. }
  7813. this.media = null;
  7814. this.clientRect = null;
  7815. // @ts-ignore
  7816. this.hls = this.streamController = null;
  7817. }
  7818. registerListeners() {
  7819. const {
  7820. hls
  7821. } = this;
  7822. hls.on(Events.FPS_DROP_LEVEL_CAPPING, this.onFpsDropLevelCapping, this);
  7823. hls.on(Events.MEDIA_ATTACHING, this.onMediaAttaching, this);
  7824. hls.on(Events.MANIFEST_PARSED, this.onManifestParsed, this);
  7825. hls.on(Events.LEVELS_UPDATED, this.onLevelsUpdated, this);
  7826. hls.on(Events.BUFFER_CODECS, this.onBufferCodecs, this);
  7827. hls.on(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
  7828. }
  7829. unregisterListener() {
  7830. const {
  7831. hls
  7832. } = this;
  7833. hls.off(Events.FPS_DROP_LEVEL_CAPPING, this.onFpsDropLevelCapping, this);
  7834. hls.off(Events.MEDIA_ATTACHING, this.onMediaAttaching, this);
  7835. hls.off(Events.MANIFEST_PARSED, this.onManifestParsed, this);
  7836. hls.off(Events.LEVELS_UPDATED, this.onLevelsUpdated, this);
  7837. hls.off(Events.BUFFER_CODECS, this.onBufferCodecs, this);
  7838. hls.off(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
  7839. }
  7840. onFpsDropLevelCapping(event, data) {
  7841. // Don't add a restricted level more than once
  7842. const level = this.hls.levels[data.droppedLevel];
  7843. if (this.isLevelAllowed(level)) {
  7844. this.restrictedLevels.push({
  7845. bitrate: level.bitrate,
  7846. height: level.height,
  7847. width: level.width
  7848. });
  7849. }
  7850. }
  7851. onMediaAttaching(event, data) {
  7852. this.media = data.media instanceof HTMLVideoElement ? data.media : null;
  7853. this.clientRect = null;
  7854. if (this.timer && this.hls.levels.length) {
  7855. this.detectPlayerSize();
  7856. }
  7857. }
  7858. onManifestParsed(event, data) {
  7859. const hls = this.hls;
  7860. this.restrictedLevels = [];
  7861. this.firstLevel = data.firstLevel;
  7862. if (hls.config.capLevelToPlayerSize && data.video) {
  7863. // Start capping immediately if the manifest has signaled video codecs
  7864. this.startCapping();
  7865. }
  7866. }
  7867. onLevelsUpdated(event, data) {
  7868. if (this.timer && isFiniteNumber(this.autoLevelCapping)) {
  7869. this.detectPlayerSize();
  7870. }
  7871. }
  7872. // Only activate capping when playing a video stream; otherwise, multi-bitrate audio-only streams will be restricted
  7873. // to the first level
  7874. onBufferCodecs(event, data) {
  7875. const hls = this.hls;
  7876. if (hls.config.capLevelToPlayerSize && data.video) {
  7877. // If the manifest did not signal a video codec capping has been deferred until we're certain video is present
  7878. this.startCapping();
  7879. }
  7880. }
  7881. onMediaDetaching() {
  7882. this.stopCapping();
  7883. }
  7884. detectPlayerSize() {
  7885. if (this.media) {
  7886. if (this.mediaHeight <= 0 || this.mediaWidth <= 0) {
  7887. this.clientRect = null;
  7888. return;
  7889. }
  7890. const levels = this.hls.levels;
  7891. if (levels.length) {
  7892. const hls = this.hls;
  7893. const maxLevel = this.getMaxLevel(levels.length - 1);
  7894. if (maxLevel !== this.autoLevelCapping) {
  7895. logger.log(`Setting autoLevelCapping to ${maxLevel}: ${levels[maxLevel].height}p@${levels[maxLevel].bitrate} for media ${this.mediaWidth}x${this.mediaHeight}`);
  7896. }
  7897. hls.autoLevelCapping = maxLevel;
  7898. if (hls.autoLevelCapping > this.autoLevelCapping && this.streamController) {
  7899. // if auto level capping has a higher value for the previous one, flush the buffer using nextLevelSwitch
  7900. // usually happen when the user go to the fullscreen mode.
  7901. this.streamController.nextLevelSwitch();
  7902. }
  7903. this.autoLevelCapping = hls.autoLevelCapping;
  7904. }
  7905. }
  7906. }
  7907. /*
  7908. * returns level should be the one with the dimensions equal or greater than the media (player) dimensions (so the video will be downscaled)
  7909. */
  7910. getMaxLevel(capLevelIndex) {
  7911. const levels = this.hls.levels;
  7912. if (!levels.length) {
  7913. return -1;
  7914. }
  7915. const validLevels = levels.filter((level, index) => this.isLevelAllowed(level) && index <= capLevelIndex);
  7916. this.clientRect = null;
  7917. return CapLevelController.getMaxLevelByMediaSize(validLevels, this.mediaWidth, this.mediaHeight);
  7918. }
  7919. startCapping() {
  7920. if (this.timer) {
  7921. // Don't reset capping if started twice; this can happen if the manifest signals a video codec
  7922. return;
  7923. }
  7924. this.autoLevelCapping = Number.POSITIVE_INFINITY;
  7925. self.clearInterval(this.timer);
  7926. this.timer = self.setInterval(this.detectPlayerSize.bind(this), 1000);
  7927. this.detectPlayerSize();
  7928. }
  7929. stopCapping() {
  7930. this.restrictedLevels = [];
  7931. this.firstLevel = -1;
  7932. this.autoLevelCapping = Number.POSITIVE_INFINITY;
  7933. if (this.timer) {
  7934. self.clearInterval(this.timer);
  7935. this.timer = undefined;
  7936. }
  7937. }
  7938. getDimensions() {
  7939. if (this.clientRect) {
  7940. return this.clientRect;
  7941. }
  7942. const media = this.media;
  7943. const boundsRect = {
  7944. width: 0,
  7945. height: 0
  7946. };
  7947. if (media) {
  7948. const clientRect = media.getBoundingClientRect();
  7949. boundsRect.width = clientRect.width;
  7950. boundsRect.height = clientRect.height;
  7951. if (!boundsRect.width && !boundsRect.height) {
  7952. // When the media element has no width or height (equivalent to not being in the DOM),
  7953. // then use its width and height attributes (media.width, media.height)
  7954. boundsRect.width = clientRect.right - clientRect.left || media.width || 0;
  7955. boundsRect.height = clientRect.bottom - clientRect.top || media.height || 0;
  7956. }
  7957. }
  7958. this.clientRect = boundsRect;
  7959. return boundsRect;
  7960. }
  7961. get mediaWidth() {
  7962. return this.getDimensions().width * this.contentScaleFactor;
  7963. }
  7964. get mediaHeight() {
  7965. return this.getDimensions().height * this.contentScaleFactor;
  7966. }
  7967. get contentScaleFactor() {
  7968. let pixelRatio = 1;
  7969. if (!this.hls.config.ignoreDevicePixelRatio) {
  7970. try {
  7971. pixelRatio = self.devicePixelRatio;
  7972. } catch (e) {
  7973. /* no-op */
  7974. }
  7975. }
  7976. return pixelRatio;
  7977. }
  7978. isLevelAllowed(level) {
  7979. const restrictedLevels = this.restrictedLevels;
  7980. return !restrictedLevels.some(restrictedLevel => {
  7981. return level.bitrate === restrictedLevel.bitrate && level.width === restrictedLevel.width && level.height === restrictedLevel.height;
  7982. });
  7983. }
  7984. static getMaxLevelByMediaSize(levels, width, height) {
  7985. if (!(levels != null && levels.length)) {
  7986. return -1;
  7987. }
  7988. // Levels can have the same dimensions but differing bandwidths - since levels are ordered, we can look to the next
  7989. // to determine whether we've chosen the greatest bandwidth for the media's dimensions
  7990. const atGreatestBandwidth = (curLevel, nextLevel) => {
  7991. if (!nextLevel) {
  7992. return true;
  7993. }
  7994. return curLevel.width !== nextLevel.width || curLevel.height !== nextLevel.height;
  7995. };
  7996. // If we run through the loop without breaking, the media's dimensions are greater than every level, so default to
  7997. // the max level
  7998. let maxLevelIndex = levels.length - 1;
  7999. // Prevent changes in aspect-ratio from causing capping to toggle back and forth
  8000. const squareSize = Math.max(width, height);
  8001. for (let i = 0; i < levels.length; i += 1) {
  8002. const level = levels[i];
  8003. if ((level.width >= squareSize || level.height >= squareSize) && atGreatestBandwidth(level, levels[i + 1])) {
  8004. maxLevelIndex = i;
  8005. break;
  8006. }
  8007. }
  8008. return maxLevelIndex;
  8009. }
  8010. }
  8011. class FPSController {
  8012. constructor(hls) {
  8013. this.hls = void 0;
  8014. this.isVideoPlaybackQualityAvailable = false;
  8015. this.timer = void 0;
  8016. this.media = null;
  8017. this.lastTime = void 0;
  8018. this.lastDroppedFrames = 0;
  8019. this.lastDecodedFrames = 0;
  8020. // stream controller must be provided as a dependency!
  8021. this.streamController = void 0;
  8022. this.hls = hls;
  8023. this.registerListeners();
  8024. }
  8025. setStreamController(streamController) {
  8026. this.streamController = streamController;
  8027. }
  8028. registerListeners() {
  8029. this.hls.on(Events.MEDIA_ATTACHING, this.onMediaAttaching, this);
  8030. }
  8031. unregisterListeners() {
  8032. this.hls.off(Events.MEDIA_ATTACHING, this.onMediaAttaching, this);
  8033. }
  8034. destroy() {
  8035. if (this.timer) {
  8036. clearInterval(this.timer);
  8037. }
  8038. this.unregisterListeners();
  8039. this.isVideoPlaybackQualityAvailable = false;
  8040. this.media = null;
  8041. }
  8042. onMediaAttaching(event, data) {
  8043. const config = this.hls.config;
  8044. if (config.capLevelOnFPSDrop) {
  8045. const media = data.media instanceof self.HTMLVideoElement ? data.media : null;
  8046. this.media = media;
  8047. if (media && typeof media.getVideoPlaybackQuality === 'function') {
  8048. this.isVideoPlaybackQualityAvailable = true;
  8049. }
  8050. self.clearInterval(this.timer);
  8051. this.timer = self.setInterval(this.checkFPSInterval.bind(this), config.fpsDroppedMonitoringPeriod);
  8052. }
  8053. }
  8054. checkFPS(video, decodedFrames, droppedFrames) {
  8055. const currentTime = performance.now();
  8056. if (decodedFrames) {
  8057. if (this.lastTime) {
  8058. const currentPeriod = currentTime - this.lastTime;
  8059. const currentDropped = droppedFrames - this.lastDroppedFrames;
  8060. const currentDecoded = decodedFrames - this.lastDecodedFrames;
  8061. const droppedFPS = 1000 * currentDropped / currentPeriod;
  8062. const hls = this.hls;
  8063. hls.trigger(Events.FPS_DROP, {
  8064. currentDropped: currentDropped,
  8065. currentDecoded: currentDecoded,
  8066. totalDroppedFrames: droppedFrames
  8067. });
  8068. if (droppedFPS > 0) {
  8069. // logger.log('checkFPS : droppedFPS/decodedFPS:' + droppedFPS/(1000 * currentDecoded / currentPeriod));
  8070. if (currentDropped > hls.config.fpsDroppedMonitoringThreshold * currentDecoded) {
  8071. let currentLevel = hls.currentLevel;
  8072. logger.warn('drop FPS ratio greater than max allowed value for currentLevel: ' + currentLevel);
  8073. if (currentLevel > 0 && (hls.autoLevelCapping === -1 || hls.autoLevelCapping >= currentLevel)) {
  8074. currentLevel = currentLevel - 1;
  8075. hls.trigger(Events.FPS_DROP_LEVEL_CAPPING, {
  8076. level: currentLevel,
  8077. droppedLevel: hls.currentLevel
  8078. });
  8079. hls.autoLevelCapping = currentLevel;
  8080. this.streamController.nextLevelSwitch();
  8081. }
  8082. }
  8083. }
  8084. }
  8085. this.lastTime = currentTime;
  8086. this.lastDroppedFrames = droppedFrames;
  8087. this.lastDecodedFrames = decodedFrames;
  8088. }
  8089. }
  8090. checkFPSInterval() {
  8091. const video = this.media;
  8092. if (video) {
  8093. if (this.isVideoPlaybackQualityAvailable) {
  8094. const videoPlaybackQuality = video.getVideoPlaybackQuality();
  8095. this.checkFPS(video, videoPlaybackQuality.totalVideoFrames, videoPlaybackQuality.droppedVideoFrames);
  8096. } else {
  8097. // HTMLVideoElement doesn't include the webkit types
  8098. this.checkFPS(video, video.webkitDecodedFrameCount, video.webkitDroppedFrameCount);
  8099. }
  8100. }
  8101. }
  8102. }
  8103. const PATHWAY_PENALTY_DURATION_MS = 300000;
  8104. class ContentSteeringController {
  8105. constructor(hls) {
  8106. this.hls = void 0;
  8107. this.log = void 0;
  8108. this.loader = null;
  8109. this.uri = null;
  8110. this.pathwayId = '.';
  8111. this.pathwayPriority = null;
  8112. this.timeToLoad = 300;
  8113. this.reloadTimer = -1;
  8114. this.updated = 0;
  8115. this.started = false;
  8116. this.enabled = true;
  8117. this.levels = null;
  8118. this.audioTracks = null;
  8119. this.subtitleTracks = null;
  8120. this.penalizedPathways = {};
  8121. this.hls = hls;
  8122. this.log = logger.log.bind(logger, `[content-steering]:`);
  8123. this.registerListeners();
  8124. }
  8125. registerListeners() {
  8126. const hls = this.hls;
  8127. hls.on(Events.MANIFEST_LOADING, this.onManifestLoading, this);
  8128. hls.on(Events.MANIFEST_LOADED, this.onManifestLoaded, this);
  8129. hls.on(Events.MANIFEST_PARSED, this.onManifestParsed, this);
  8130. hls.on(Events.ERROR, this.onError, this);
  8131. }
  8132. unregisterListeners() {
  8133. const hls = this.hls;
  8134. if (!hls) {
  8135. return;
  8136. }
  8137. hls.off(Events.MANIFEST_LOADING, this.onManifestLoading, this);
  8138. hls.off(Events.MANIFEST_LOADED, this.onManifestLoaded, this);
  8139. hls.off(Events.MANIFEST_PARSED, this.onManifestParsed, this);
  8140. hls.off(Events.ERROR, this.onError, this);
  8141. }
  8142. startLoad() {
  8143. this.started = true;
  8144. this.clearTimeout();
  8145. if (this.enabled && this.uri) {
  8146. if (this.updated) {
  8147. const ttl = this.timeToLoad * 1000 - (performance.now() - this.updated);
  8148. if (ttl > 0) {
  8149. this.scheduleRefresh(this.uri, ttl);
  8150. return;
  8151. }
  8152. }
  8153. this.loadSteeringManifest(this.uri);
  8154. }
  8155. }
  8156. stopLoad() {
  8157. this.started = false;
  8158. if (this.loader) {
  8159. this.loader.destroy();
  8160. this.loader = null;
  8161. }
  8162. this.clearTimeout();
  8163. }
  8164. clearTimeout() {
  8165. if (this.reloadTimer !== -1) {
  8166. self.clearTimeout(this.reloadTimer);
  8167. this.reloadTimer = -1;
  8168. }
  8169. }
  8170. destroy() {
  8171. this.unregisterListeners();
  8172. this.stopLoad();
  8173. // @ts-ignore
  8174. this.hls = null;
  8175. this.levels = this.audioTracks = this.subtitleTracks = null;
  8176. }
  8177. removeLevel(levelToRemove) {
  8178. const levels = this.levels;
  8179. if (levels) {
  8180. this.levels = levels.filter(level => level !== levelToRemove);
  8181. }
  8182. }
  8183. onManifestLoading() {
  8184. this.stopLoad();
  8185. this.enabled = true;
  8186. this.timeToLoad = 300;
  8187. this.updated = 0;
  8188. this.uri = null;
  8189. this.pathwayId = '.';
  8190. this.levels = this.audioTracks = this.subtitleTracks = null;
  8191. }
  8192. onManifestLoaded(event, data) {
  8193. const {
  8194. contentSteering
  8195. } = data;
  8196. if (contentSteering === null) {
  8197. return;
  8198. }
  8199. this.pathwayId = contentSteering.pathwayId;
  8200. this.uri = contentSteering.uri;
  8201. if (this.started) {
  8202. this.startLoad();
  8203. }
  8204. }
  8205. onManifestParsed(event, data) {
  8206. this.audioTracks = data.audioTracks;
  8207. this.subtitleTracks = data.subtitleTracks;
  8208. }
  8209. onError(event, data) {
  8210. const {
  8211. errorAction
  8212. } = data;
  8213. if ((errorAction == null ? void 0 : errorAction.action) === NetworkErrorAction.SendAlternateToPenaltyBox && errorAction.flags === ErrorActionFlags.MoveAllAlternatesMatchingHost) {
  8214. const levels = this.levels;
  8215. let pathwayPriority = this.pathwayPriority;
  8216. let errorPathway = this.pathwayId;
  8217. if (data.context) {
  8218. const {
  8219. groupId,
  8220. pathwayId,
  8221. type
  8222. } = data.context;
  8223. if (groupId && levels) {
  8224. errorPathway = this.getPathwayForGroupId(groupId, type, errorPathway);
  8225. } else if (pathwayId) {
  8226. errorPathway = pathwayId;
  8227. }
  8228. }
  8229. if (!(errorPathway in this.penalizedPathways)) {
  8230. this.penalizedPathways[errorPathway] = performance.now();
  8231. }
  8232. if (!pathwayPriority && levels) {
  8233. // If PATHWAY-PRIORITY was not provided, list pathways for error handling
  8234. pathwayPriority = levels.reduce((pathways, level) => {
  8235. if (pathways.indexOf(level.pathwayId) === -1) {
  8236. pathways.push(level.pathwayId);
  8237. }
  8238. return pathways;
  8239. }, []);
  8240. }
  8241. if (pathwayPriority && pathwayPriority.length > 1) {
  8242. this.updatePathwayPriority(pathwayPriority);
  8243. errorAction.resolved = this.pathwayId !== errorPathway;
  8244. }
  8245. if (!errorAction.resolved) {
  8246. logger.warn(`Could not resolve ${data.details} ("${data.error.message}") with content-steering for Pathway: ${errorPathway} levels: ${levels ? levels.length : levels} priorities: ${JSON.stringify(pathwayPriority)} penalized: ${JSON.stringify(this.penalizedPathways)}`);
  8247. }
  8248. }
  8249. }
  8250. filterParsedLevels(levels) {
  8251. // Filter levels to only include those that are in the initial pathway
  8252. this.levels = levels;
  8253. let pathwayLevels = this.getLevelsForPathway(this.pathwayId);
  8254. if (pathwayLevels.length === 0) {
  8255. const pathwayId = levels[0].pathwayId;
  8256. this.log(`No levels found in Pathway ${this.pathwayId}. Setting initial Pathway to "${pathwayId}"`);
  8257. pathwayLevels = this.getLevelsForPathway(pathwayId);
  8258. this.pathwayId = pathwayId;
  8259. }
  8260. if (pathwayLevels.length !== levels.length) {
  8261. this.log(`Found ${pathwayLevels.length}/${levels.length} levels in Pathway "${this.pathwayId}"`);
  8262. return pathwayLevels;
  8263. }
  8264. return levels;
  8265. }
  8266. getLevelsForPathway(pathwayId) {
  8267. if (this.levels === null) {
  8268. return [];
  8269. }
  8270. return this.levels.filter(level => pathwayId === level.pathwayId);
  8271. }
  8272. updatePathwayPriority(pathwayPriority) {
  8273. this.pathwayPriority = pathwayPriority;
  8274. let levels;
  8275. // Evaluate if we should remove the pathway from the penalized list
  8276. const penalizedPathways = this.penalizedPathways;
  8277. const now = performance.now();
  8278. Object.keys(penalizedPathways).forEach(pathwayId => {
  8279. if (now - penalizedPathways[pathwayId] > PATHWAY_PENALTY_DURATION_MS) {
  8280. delete penalizedPathways[pathwayId];
  8281. }
  8282. });
  8283. for (let i = 0; i < pathwayPriority.length; i++) {
  8284. const pathwayId = pathwayPriority[i];
  8285. if (pathwayId in penalizedPathways) {
  8286. continue;
  8287. }
  8288. if (pathwayId === this.pathwayId) {
  8289. return;
  8290. }
  8291. const selectedIndex = this.hls.nextLoadLevel;
  8292. const selectedLevel = this.hls.levels[selectedIndex];
  8293. levels = this.getLevelsForPathway(pathwayId);
  8294. if (levels.length > 0) {
  8295. this.log(`Setting Pathway to "${pathwayId}"`);
  8296. this.pathwayId = pathwayId;
  8297. reassignFragmentLevelIndexes(levels);
  8298. this.hls.trigger(Events.LEVELS_UPDATED, {
  8299. levels
  8300. });
  8301. // Set LevelController's level to trigger LEVEL_SWITCHING which loads playlist if needed
  8302. const levelAfterChange = this.hls.levels[selectedIndex];
  8303. if (selectedLevel && levelAfterChange && this.levels) {
  8304. if (levelAfterChange.attrs['STABLE-VARIANT-ID'] !== selectedLevel.attrs['STABLE-VARIANT-ID'] && levelAfterChange.bitrate !== selectedLevel.bitrate) {
  8305. this.log(`Unstable Pathways change from bitrate ${selectedLevel.bitrate} to ${levelAfterChange.bitrate}`);
  8306. }
  8307. this.hls.nextLoadLevel = selectedIndex;
  8308. }
  8309. break;
  8310. }
  8311. }
  8312. }
  8313. getPathwayForGroupId(groupId, type, defaultPathway) {
  8314. const levels = this.getLevelsForPathway(defaultPathway).concat(this.levels || []);
  8315. for (let i = 0; i < levels.length; i++) {
  8316. if (type === PlaylistContextType.AUDIO_TRACK && levels[i].hasAudioGroup(groupId) || type === PlaylistContextType.SUBTITLE_TRACK && levels[i].hasSubtitleGroup(groupId)) {
  8317. return levels[i].pathwayId;
  8318. }
  8319. }
  8320. return defaultPathway;
  8321. }
  8322. clonePathways(pathwayClones) {
  8323. const levels = this.levels;
  8324. if (!levels) {
  8325. return;
  8326. }
  8327. const audioGroupCloneMap = {};
  8328. const subtitleGroupCloneMap = {};
  8329. pathwayClones.forEach(pathwayClone => {
  8330. const {
  8331. ID: cloneId,
  8332. 'BASE-ID': baseId,
  8333. 'URI-REPLACEMENT': uriReplacement
  8334. } = pathwayClone;
  8335. if (levels.some(level => level.pathwayId === cloneId)) {
  8336. return;
  8337. }
  8338. const clonedVariants = this.getLevelsForPathway(baseId).map(baseLevel => {
  8339. const attributes = new AttrList(baseLevel.attrs);
  8340. attributes['PATHWAY-ID'] = cloneId;
  8341. const clonedAudioGroupId = attributes.AUDIO && `${attributes.AUDIO}_clone_${cloneId}`;
  8342. const clonedSubtitleGroupId = attributes.SUBTITLES && `${attributes.SUBTITLES}_clone_${cloneId}`;
  8343. if (clonedAudioGroupId) {
  8344. audioGroupCloneMap[attributes.AUDIO] = clonedAudioGroupId;
  8345. attributes.AUDIO = clonedAudioGroupId;
  8346. }
  8347. if (clonedSubtitleGroupId) {
  8348. subtitleGroupCloneMap[attributes.SUBTITLES] = clonedSubtitleGroupId;
  8349. attributes.SUBTITLES = clonedSubtitleGroupId;
  8350. }
  8351. const url = performUriReplacement(baseLevel.uri, attributes['STABLE-VARIANT-ID'], 'PER-VARIANT-URIS', uriReplacement);
  8352. const clonedLevel = new Level({
  8353. attrs: attributes,
  8354. audioCodec: baseLevel.audioCodec,
  8355. bitrate: baseLevel.bitrate,
  8356. height: baseLevel.height,
  8357. name: baseLevel.name,
  8358. url,
  8359. videoCodec: baseLevel.videoCodec,
  8360. width: baseLevel.width
  8361. });
  8362. if (baseLevel.audioGroups) {
  8363. for (let i = 1; i < baseLevel.audioGroups.length; i++) {
  8364. clonedLevel.addGroupId('audio', `${baseLevel.audioGroups[i]}_clone_${cloneId}`);
  8365. }
  8366. }
  8367. if (baseLevel.subtitleGroups) {
  8368. for (let i = 1; i < baseLevel.subtitleGroups.length; i++) {
  8369. clonedLevel.addGroupId('text', `${baseLevel.subtitleGroups[i]}_clone_${cloneId}`);
  8370. }
  8371. }
  8372. return clonedLevel;
  8373. });
  8374. levels.push(...clonedVariants);
  8375. cloneRenditionGroups(this.audioTracks, audioGroupCloneMap, uriReplacement, cloneId);
  8376. cloneRenditionGroups(this.subtitleTracks, subtitleGroupCloneMap, uriReplacement, cloneId);
  8377. });
  8378. }
  8379. loadSteeringManifest(uri) {
  8380. const config = this.hls.config;
  8381. const Loader = config.loader;
  8382. if (this.loader) {
  8383. this.loader.destroy();
  8384. }
  8385. this.loader = new Loader(config);
  8386. let url;
  8387. try {
  8388. url = new self.URL(uri);
  8389. } catch (error) {
  8390. this.enabled = false;
  8391. this.log(`Failed to parse Steering Manifest URI: ${uri}`);
  8392. return;
  8393. }
  8394. if (url.protocol !== 'data:') {
  8395. const throughput = (this.hls.bandwidthEstimate || config.abrEwmaDefaultEstimate) | 0;
  8396. url.searchParams.set('_HLS_pathway', this.pathwayId);
  8397. url.searchParams.set('_HLS_throughput', '' + throughput);
  8398. }
  8399. const context = {
  8400. responseType: 'json',
  8401. url: url.href
  8402. };
  8403. const loadPolicy = config.steeringManifestLoadPolicy.default;
  8404. const legacyRetryCompatibility = loadPolicy.errorRetry || loadPolicy.timeoutRetry || {};
  8405. const loaderConfig = {
  8406. loadPolicy,
  8407. timeout: loadPolicy.maxLoadTimeMs,
  8408. maxRetry: legacyRetryCompatibility.maxNumRetry || 0,
  8409. retryDelay: legacyRetryCompatibility.retryDelayMs || 0,
  8410. maxRetryDelay: legacyRetryCompatibility.maxRetryDelayMs || 0
  8411. };
  8412. const callbacks = {
  8413. onSuccess: (response, stats, context, networkDetails) => {
  8414. this.log(`Loaded steering manifest: "${url}"`);
  8415. const steeringData = response.data;
  8416. if (steeringData.VERSION !== 1) {
  8417. this.log(`Steering VERSION ${steeringData.VERSION} not supported!`);
  8418. return;
  8419. }
  8420. this.updated = performance.now();
  8421. this.timeToLoad = steeringData.TTL;
  8422. const {
  8423. 'RELOAD-URI': reloadUri,
  8424. 'PATHWAY-CLONES': pathwayClones,
  8425. 'PATHWAY-PRIORITY': pathwayPriority
  8426. } = steeringData;
  8427. if (reloadUri) {
  8428. try {
  8429. this.uri = new self.URL(reloadUri, url).href;
  8430. } catch (error) {
  8431. this.enabled = false;
  8432. this.log(`Failed to parse Steering Manifest RELOAD-URI: ${reloadUri}`);
  8433. return;
  8434. }
  8435. }
  8436. this.scheduleRefresh(this.uri || context.url);
  8437. if (pathwayClones) {
  8438. this.clonePathways(pathwayClones);
  8439. }
  8440. const loadedSteeringData = {
  8441. steeringManifest: steeringData,
  8442. url: url.toString()
  8443. };
  8444. this.hls.trigger(Events.STEERING_MANIFEST_LOADED, loadedSteeringData);
  8445. if (pathwayPriority) {
  8446. this.updatePathwayPriority(pathwayPriority);
  8447. }
  8448. },
  8449. onError: (error, context, networkDetails, stats) => {
  8450. this.log(`Error loading steering manifest: ${error.code} ${error.text} (${context.url})`);
  8451. this.stopLoad();
  8452. if (error.code === 410) {
  8453. this.enabled = false;
  8454. this.log(`Steering manifest ${context.url} no longer available`);
  8455. return;
  8456. }
  8457. let ttl = this.timeToLoad * 1000;
  8458. if (error.code === 429) {
  8459. const loader = this.loader;
  8460. if (typeof (loader == null ? void 0 : loader.getResponseHeader) === 'function') {
  8461. const retryAfter = loader.getResponseHeader('Retry-After');
  8462. if (retryAfter) {
  8463. ttl = parseFloat(retryAfter) * 1000;
  8464. }
  8465. }
  8466. this.log(`Steering manifest ${context.url} rate limited`);
  8467. return;
  8468. }
  8469. this.scheduleRefresh(this.uri || context.url, ttl);
  8470. },
  8471. onTimeout: (stats, context, networkDetails) => {
  8472. this.log(`Timeout loading steering manifest (${context.url})`);
  8473. this.scheduleRefresh(this.uri || context.url);
  8474. }
  8475. };
  8476. this.log(`Requesting steering manifest: ${url}`);
  8477. this.loader.load(context, loaderConfig, callbacks);
  8478. }
  8479. scheduleRefresh(uri, ttlMs = this.timeToLoad * 1000) {
  8480. this.clearTimeout();
  8481. this.reloadTimer = self.setTimeout(() => {
  8482. var _this$hls;
  8483. const media = (_this$hls = this.hls) == null ? void 0 : _this$hls.media;
  8484. if (media && !media.ended) {
  8485. this.loadSteeringManifest(uri);
  8486. return;
  8487. }
  8488. this.scheduleRefresh(uri, this.timeToLoad * 1000);
  8489. }, ttlMs);
  8490. }
  8491. }
  8492. function cloneRenditionGroups(tracks, groupCloneMap, uriReplacement, cloneId) {
  8493. if (!tracks) {
  8494. return;
  8495. }
  8496. Object.keys(groupCloneMap).forEach(audioGroupId => {
  8497. const clonedTracks = tracks.filter(track => track.groupId === audioGroupId).map(track => {
  8498. const clonedTrack = _extends({}, track);
  8499. clonedTrack.details = undefined;
  8500. clonedTrack.attrs = new AttrList(clonedTrack.attrs);
  8501. clonedTrack.url = clonedTrack.attrs.URI = performUriReplacement(track.url, track.attrs['STABLE-RENDITION-ID'], 'PER-RENDITION-URIS', uriReplacement);
  8502. clonedTrack.groupId = clonedTrack.attrs['GROUP-ID'] = groupCloneMap[audioGroupId];
  8503. clonedTrack.attrs['PATHWAY-ID'] = cloneId;
  8504. return clonedTrack;
  8505. });
  8506. tracks.push(...clonedTracks);
  8507. });
  8508. }
  8509. function performUriReplacement(uri, stableId, perOptionKey, uriReplacement) {
  8510. const {
  8511. HOST: host,
  8512. PARAMS: params,
  8513. [perOptionKey]: perOptionUris
  8514. } = uriReplacement;
  8515. let perVariantUri;
  8516. if (stableId) {
  8517. perVariantUri = perOptionUris == null ? void 0 : perOptionUris[stableId];
  8518. if (perVariantUri) {
  8519. uri = perVariantUri;
  8520. }
  8521. }
  8522. const url = new self.URL(uri);
  8523. if (host && !perVariantUri) {
  8524. url.host = host;
  8525. }
  8526. if (params) {
  8527. Object.keys(params).sort().forEach(key => {
  8528. if (key) {
  8529. url.searchParams.set(key, params[key]);
  8530. }
  8531. });
  8532. }
  8533. return url.href;
  8534. }
  8535. const AGE_HEADER_LINE_REGEX = /^age:\s*[\d.]+\s*$/im;
  8536. class XhrLoader {
  8537. constructor(config) {
  8538. this.xhrSetup = void 0;
  8539. this.requestTimeout = void 0;
  8540. this.retryTimeout = void 0;
  8541. this.retryDelay = void 0;
  8542. this.config = null;
  8543. this.callbacks = null;
  8544. this.context = null;
  8545. this.loader = null;
  8546. this.stats = void 0;
  8547. this.xhrSetup = config ? config.xhrSetup || null : null;
  8548. this.stats = new LoadStats();
  8549. this.retryDelay = 0;
  8550. }
  8551. destroy() {
  8552. this.callbacks = null;
  8553. this.abortInternal();
  8554. this.loader = null;
  8555. this.config = null;
  8556. this.context = null;
  8557. this.xhrSetup = null;
  8558. }
  8559. abortInternal() {
  8560. const loader = this.loader;
  8561. self.clearTimeout(this.requestTimeout);
  8562. self.clearTimeout(this.retryTimeout);
  8563. if (loader) {
  8564. loader.onreadystatechange = null;
  8565. loader.onprogress = null;
  8566. if (loader.readyState !== 4) {
  8567. this.stats.aborted = true;
  8568. loader.abort();
  8569. }
  8570. }
  8571. }
  8572. abort() {
  8573. var _this$callbacks;
  8574. this.abortInternal();
  8575. if ((_this$callbacks = this.callbacks) != null && _this$callbacks.onAbort) {
  8576. this.callbacks.onAbort(this.stats, this.context, this.loader);
  8577. }
  8578. }
  8579. load(context, config, callbacks) {
  8580. if (this.stats.loading.start) {
  8581. throw new Error('Loader can only be used once.');
  8582. }
  8583. this.stats.loading.start = self.performance.now();
  8584. this.context = context;
  8585. this.config = config;
  8586. this.callbacks = callbacks;
  8587. this.loadInternal();
  8588. }
  8589. loadInternal() {
  8590. const {
  8591. config,
  8592. context
  8593. } = this;
  8594. if (!config || !context) {
  8595. return;
  8596. }
  8597. const xhr = this.loader = new self.XMLHttpRequest();
  8598. const stats = this.stats;
  8599. stats.loading.first = 0;
  8600. stats.loaded = 0;
  8601. stats.aborted = false;
  8602. const xhrSetup = this.xhrSetup;
  8603. if (xhrSetup) {
  8604. Promise.resolve().then(() => {
  8605. if (this.loader !== xhr || this.stats.aborted) return;
  8606. return xhrSetup(xhr, context.url);
  8607. }).catch(error => {
  8608. if (this.loader !== xhr || this.stats.aborted) return;
  8609. xhr.open('GET', context.url, true);
  8610. return xhrSetup(xhr, context.url);
  8611. }).then(() => {
  8612. if (this.loader !== xhr || this.stats.aborted) return;
  8613. this.openAndSendXhr(xhr, context, config);
  8614. }).catch(error => {
  8615. // IE11 throws an exception on xhr.open if attempting to access an HTTP resource over HTTPS
  8616. this.callbacks.onError({
  8617. code: xhr.status,
  8618. text: error.message
  8619. }, context, xhr, stats);
  8620. return;
  8621. });
  8622. } else {
  8623. this.openAndSendXhr(xhr, context, config);
  8624. }
  8625. }
  8626. openAndSendXhr(xhr, context, config) {
  8627. if (!xhr.readyState) {
  8628. xhr.open('GET', context.url, true);
  8629. }
  8630. const headers = context.headers;
  8631. const {
  8632. maxTimeToFirstByteMs,
  8633. maxLoadTimeMs
  8634. } = config.loadPolicy;
  8635. if (headers) {
  8636. for (const header in headers) {
  8637. xhr.setRequestHeader(header, headers[header]);
  8638. }
  8639. }
  8640. if (context.rangeEnd) {
  8641. xhr.setRequestHeader('Range', 'bytes=' + context.rangeStart + '-' + (context.rangeEnd - 1));
  8642. }
  8643. xhr.onreadystatechange = this.readystatechange.bind(this);
  8644. xhr.onprogress = this.loadprogress.bind(this);
  8645. xhr.responseType = context.responseType;
  8646. // setup timeout before we perform request
  8647. self.clearTimeout(this.requestTimeout);
  8648. config.timeout = maxTimeToFirstByteMs && isFiniteNumber(maxTimeToFirstByteMs) ? maxTimeToFirstByteMs : maxLoadTimeMs;
  8649. this.requestTimeout = self.setTimeout(this.loadtimeout.bind(this), config.timeout);
  8650. xhr.send();
  8651. }
  8652. readystatechange() {
  8653. const {
  8654. context,
  8655. loader: xhr,
  8656. stats
  8657. } = this;
  8658. if (!context || !xhr) {
  8659. return;
  8660. }
  8661. const readyState = xhr.readyState;
  8662. const config = this.config;
  8663. // don't proceed if xhr has been aborted
  8664. if (stats.aborted) {
  8665. return;
  8666. }
  8667. // >= HEADERS_RECEIVED
  8668. if (readyState >= 2) {
  8669. if (stats.loading.first === 0) {
  8670. stats.loading.first = Math.max(self.performance.now(), stats.loading.start);
  8671. // readyState >= 2 AND readyState !==4 (readyState = HEADERS_RECEIVED || LOADING) rearm timeout as xhr not finished yet
  8672. if (config.timeout !== config.loadPolicy.maxLoadTimeMs) {
  8673. self.clearTimeout(this.requestTimeout);
  8674. config.timeout = config.loadPolicy.maxLoadTimeMs;
  8675. this.requestTimeout = self.setTimeout(this.loadtimeout.bind(this), config.loadPolicy.maxLoadTimeMs - (stats.loading.first - stats.loading.start));
  8676. }
  8677. }
  8678. if (readyState === 4) {
  8679. self.clearTimeout(this.requestTimeout);
  8680. xhr.onreadystatechange = null;
  8681. xhr.onprogress = null;
  8682. const status = xhr.status;
  8683. // http status between 200 to 299 are all successful
  8684. const useResponse = xhr.responseType !== 'text';
  8685. if (status >= 200 && status < 300 && (useResponse && xhr.response || xhr.responseText !== null)) {
  8686. stats.loading.end = Math.max(self.performance.now(), stats.loading.first);
  8687. const data = useResponse ? xhr.response : xhr.responseText;
  8688. const len = xhr.responseType === 'arraybuffer' ? data.byteLength : data.length;
  8689. stats.loaded = stats.total = len;
  8690. stats.bwEstimate = stats.total * 8000 / (stats.loading.end - stats.loading.first);
  8691. if (!this.callbacks) {
  8692. return;
  8693. }
  8694. const onProgress = this.callbacks.onProgress;
  8695. if (onProgress) {
  8696. onProgress(stats, context, data, xhr);
  8697. }
  8698. if (!this.callbacks) {
  8699. return;
  8700. }
  8701. const response = {
  8702. url: xhr.responseURL,
  8703. data: data,
  8704. code: status
  8705. };
  8706. this.callbacks.onSuccess(response, stats, context, xhr);
  8707. } else {
  8708. const retryConfig = config.loadPolicy.errorRetry;
  8709. const retryCount = stats.retry;
  8710. // if max nb of retries reached or if http status between 400 and 499 (such error cannot be recovered, retrying is useless), return error
  8711. const response = {
  8712. url: context.url,
  8713. data: undefined,
  8714. code: status
  8715. };
  8716. if (shouldRetry(retryConfig, retryCount, false, response)) {
  8717. this.retry(retryConfig);
  8718. } else {
  8719. logger.error(`${status} while loading ${context.url}`);
  8720. this.callbacks.onError({
  8721. code: status,
  8722. text: xhr.statusText
  8723. }, context, xhr, stats);
  8724. }
  8725. }
  8726. }
  8727. }
  8728. }
  8729. loadtimeout() {
  8730. if (!this.config) return;
  8731. const retryConfig = this.config.loadPolicy.timeoutRetry;
  8732. const retryCount = this.stats.retry;
  8733. if (shouldRetry(retryConfig, retryCount, true)) {
  8734. this.retry(retryConfig);
  8735. } else {
  8736. var _this$context;
  8737. logger.warn(`timeout while loading ${(_this$context = this.context) == null ? void 0 : _this$context.url}`);
  8738. const callbacks = this.callbacks;
  8739. if (callbacks) {
  8740. this.abortInternal();
  8741. callbacks.onTimeout(this.stats, this.context, this.loader);
  8742. }
  8743. }
  8744. }
  8745. retry(retryConfig) {
  8746. const {
  8747. context,
  8748. stats
  8749. } = this;
  8750. this.retryDelay = getRetryDelay(retryConfig, stats.retry);
  8751. stats.retry++;
  8752. logger.warn(`${status ? 'HTTP Status ' + status : 'Timeout'} while loading ${context == null ? void 0 : context.url}, retrying ${stats.retry}/${retryConfig.maxNumRetry} in ${this.retryDelay}ms`);
  8753. // abort and reset internal state
  8754. this.abortInternal();
  8755. this.loader = null;
  8756. // schedule retry
  8757. self.clearTimeout(this.retryTimeout);
  8758. this.retryTimeout = self.setTimeout(this.loadInternal.bind(this), this.retryDelay);
  8759. }
  8760. loadprogress(event) {
  8761. const stats = this.stats;
  8762. stats.loaded = event.loaded;
  8763. if (event.lengthComputable) {
  8764. stats.total = event.total;
  8765. }
  8766. }
  8767. getCacheAge() {
  8768. let result = null;
  8769. if (this.loader && AGE_HEADER_LINE_REGEX.test(this.loader.getAllResponseHeaders())) {
  8770. const ageHeader = this.loader.getResponseHeader('age');
  8771. result = ageHeader ? parseFloat(ageHeader) : null;
  8772. }
  8773. return result;
  8774. }
  8775. getResponseHeader(name) {
  8776. if (this.loader && new RegExp(`^${name}:\\s*[\\d.]+\\s*$`, 'im').test(this.loader.getAllResponseHeaders())) {
  8777. return this.loader.getResponseHeader(name);
  8778. }
  8779. return null;
  8780. }
  8781. }
  8782. class ChunkCache {
  8783. constructor() {
  8784. this.chunks = [];
  8785. this.dataLength = 0;
  8786. }
  8787. push(chunk) {
  8788. this.chunks.push(chunk);
  8789. this.dataLength += chunk.length;
  8790. }
  8791. flush() {
  8792. const {
  8793. chunks,
  8794. dataLength
  8795. } = this;
  8796. let result;
  8797. if (!chunks.length) {
  8798. return new Uint8Array(0);
  8799. } else if (chunks.length === 1) {
  8800. result = chunks[0];
  8801. } else {
  8802. result = concatUint8Arrays(chunks, dataLength);
  8803. }
  8804. this.reset();
  8805. return result;
  8806. }
  8807. reset() {
  8808. this.chunks.length = 0;
  8809. this.dataLength = 0;
  8810. }
  8811. }
  8812. function concatUint8Arrays(chunks, dataLength) {
  8813. const result = new Uint8Array(dataLength);
  8814. let offset = 0;
  8815. for (let i = 0; i < chunks.length; i++) {
  8816. const chunk = chunks[i];
  8817. result.set(chunk, offset);
  8818. offset += chunk.length;
  8819. }
  8820. return result;
  8821. }
  8822. function fetchSupported() {
  8823. if (
  8824. // @ts-ignore
  8825. self.fetch && self.AbortController && self.ReadableStream && self.Request) {
  8826. try {
  8827. new self.ReadableStream({}); // eslint-disable-line no-new
  8828. return true;
  8829. } catch (e) {
  8830. /* noop */
  8831. }
  8832. }
  8833. return false;
  8834. }
  8835. const BYTERANGE = /(\d+)-(\d+)\/(\d+)/;
  8836. class FetchLoader {
  8837. constructor(config /* HlsConfig */) {
  8838. this.fetchSetup = void 0;
  8839. this.requestTimeout = void 0;
  8840. this.request = null;
  8841. this.response = null;
  8842. this.controller = void 0;
  8843. this.context = null;
  8844. this.config = null;
  8845. this.callbacks = null;
  8846. this.stats = void 0;
  8847. this.loader = null;
  8848. this.fetchSetup = config.fetchSetup || getRequest;
  8849. this.controller = new self.AbortController();
  8850. this.stats = new LoadStats();
  8851. }
  8852. destroy() {
  8853. this.loader = this.callbacks = this.context = this.config = this.request = null;
  8854. this.abortInternal();
  8855. this.response = null;
  8856. // @ts-ignore
  8857. this.fetchSetup = this.controller = this.stats = null;
  8858. }
  8859. abortInternal() {
  8860. if (this.controller && !this.stats.loading.end) {
  8861. this.stats.aborted = true;
  8862. this.controller.abort();
  8863. }
  8864. }
  8865. abort() {
  8866. var _this$callbacks;
  8867. this.abortInternal();
  8868. if ((_this$callbacks = this.callbacks) != null && _this$callbacks.onAbort) {
  8869. this.callbacks.onAbort(this.stats, this.context, this.response);
  8870. }
  8871. }
  8872. load(context, config, callbacks) {
  8873. const stats = this.stats;
  8874. if (stats.loading.start) {
  8875. throw new Error('Loader can only be used once.');
  8876. }
  8877. stats.loading.start = self.performance.now();
  8878. const initParams = getRequestParameters(context, this.controller.signal);
  8879. const onProgress = callbacks.onProgress;
  8880. const isArrayBuffer = context.responseType === 'arraybuffer';
  8881. const LENGTH = isArrayBuffer ? 'byteLength' : 'length';
  8882. const {
  8883. maxTimeToFirstByteMs,
  8884. maxLoadTimeMs
  8885. } = config.loadPolicy;
  8886. this.context = context;
  8887. this.config = config;
  8888. this.callbacks = callbacks;
  8889. this.request = this.fetchSetup(context, initParams);
  8890. self.clearTimeout(this.requestTimeout);
  8891. config.timeout = maxTimeToFirstByteMs && isFiniteNumber(maxTimeToFirstByteMs) ? maxTimeToFirstByteMs : maxLoadTimeMs;
  8892. this.requestTimeout = self.setTimeout(() => {
  8893. this.abortInternal();
  8894. callbacks.onTimeout(stats, context, this.response);
  8895. }, config.timeout);
  8896. self.fetch(this.request).then(response => {
  8897. this.response = this.loader = response;
  8898. const first = Math.max(self.performance.now(), stats.loading.start);
  8899. self.clearTimeout(this.requestTimeout);
  8900. config.timeout = maxLoadTimeMs;
  8901. this.requestTimeout = self.setTimeout(() => {
  8902. this.abortInternal();
  8903. callbacks.onTimeout(stats, context, this.response);
  8904. }, maxLoadTimeMs - (first - stats.loading.start));
  8905. if (!response.ok) {
  8906. const {
  8907. status,
  8908. statusText
  8909. } = response;
  8910. throw new FetchError(statusText || 'fetch, bad network response', status, response);
  8911. }
  8912. stats.loading.first = first;
  8913. stats.total = getContentLength(response.headers) || stats.total;
  8914. if (onProgress && isFiniteNumber(config.highWaterMark)) {
  8915. return this.loadProgressively(response, stats, context, config.highWaterMark, onProgress);
  8916. }
  8917. if (isArrayBuffer) {
  8918. return response.arrayBuffer();
  8919. }
  8920. if (context.responseType === 'json') {
  8921. return response.json();
  8922. }
  8923. return response.text();
  8924. }).then(responseData => {
  8925. const response = this.response;
  8926. if (!response) {
  8927. throw new Error('loader destroyed');
  8928. }
  8929. self.clearTimeout(this.requestTimeout);
  8930. stats.loading.end = Math.max(self.performance.now(), stats.loading.first);
  8931. const total = responseData[LENGTH];
  8932. if (total) {
  8933. stats.loaded = stats.total = total;
  8934. }
  8935. const loaderResponse = {
  8936. url: response.url,
  8937. data: responseData,
  8938. code: response.status
  8939. };
  8940. if (onProgress && !isFiniteNumber(config.highWaterMark)) {
  8941. onProgress(stats, context, responseData, response);
  8942. }
  8943. callbacks.onSuccess(loaderResponse, stats, context, response);
  8944. }).catch(error => {
  8945. self.clearTimeout(this.requestTimeout);
  8946. if (stats.aborted) {
  8947. return;
  8948. }
  8949. // CORS errors result in an undefined code. Set it to 0 here to align with XHR's behavior
  8950. // when destroying, 'error' itself can be undefined
  8951. const code = !error ? 0 : error.code || 0;
  8952. const text = !error ? null : error.message;
  8953. callbacks.onError({
  8954. code,
  8955. text
  8956. }, context, error ? error.details : null, stats);
  8957. });
  8958. }
  8959. getCacheAge() {
  8960. let result = null;
  8961. if (this.response) {
  8962. const ageHeader = this.response.headers.get('age');
  8963. result = ageHeader ? parseFloat(ageHeader) : null;
  8964. }
  8965. return result;
  8966. }
  8967. getResponseHeader(name) {
  8968. return this.response ? this.response.headers.get(name) : null;
  8969. }
  8970. loadProgressively(response, stats, context, highWaterMark = 0, onProgress) {
  8971. const chunkCache = new ChunkCache();
  8972. const reader = response.body.getReader();
  8973. const pump = () => {
  8974. return reader.read().then(data => {
  8975. if (data.done) {
  8976. if (chunkCache.dataLength) {
  8977. onProgress(stats, context, chunkCache.flush(), response);
  8978. }
  8979. return Promise.resolve(new ArrayBuffer(0));
  8980. }
  8981. const chunk = data.value;
  8982. const len = chunk.length;
  8983. stats.loaded += len;
  8984. if (len < highWaterMark || chunkCache.dataLength) {
  8985. // The current chunk is too small to to be emitted or the cache already has data
  8986. // Push it to the cache
  8987. chunkCache.push(chunk);
  8988. if (chunkCache.dataLength >= highWaterMark) {
  8989. // flush in order to join the typed arrays
  8990. onProgress(stats, context, chunkCache.flush(), response);
  8991. }
  8992. } else {
  8993. // If there's nothing cached already, and the chache is large enough
  8994. // just emit the progress event
  8995. onProgress(stats, context, chunk, response);
  8996. }
  8997. return pump();
  8998. }).catch(() => {
  8999. /* aborted */
  9000. return Promise.reject();
  9001. });
  9002. };
  9003. return pump();
  9004. }
  9005. }
  9006. function getRequestParameters(context, signal) {
  9007. const initParams = {
  9008. method: 'GET',
  9009. mode: 'cors',
  9010. credentials: 'same-origin',
  9011. signal,
  9012. headers: new self.Headers(_extends({}, context.headers))
  9013. };
  9014. if (context.rangeEnd) {
  9015. initParams.headers.set('Range', 'bytes=' + context.rangeStart + '-' + String(context.rangeEnd - 1));
  9016. }
  9017. return initParams;
  9018. }
  9019. function getByteRangeLength(byteRangeHeader) {
  9020. const result = BYTERANGE.exec(byteRangeHeader);
  9021. if (result) {
  9022. return parseInt(result[2]) - parseInt(result[1]) + 1;
  9023. }
  9024. }
  9025. function getContentLength(headers) {
  9026. const contentRange = headers.get('Content-Range');
  9027. if (contentRange) {
  9028. const byteRangeLength = getByteRangeLength(contentRange);
  9029. if (isFiniteNumber(byteRangeLength)) {
  9030. return byteRangeLength;
  9031. }
  9032. }
  9033. const contentLength = headers.get('Content-Length');
  9034. if (contentLength) {
  9035. return parseInt(contentLength);
  9036. }
  9037. }
  9038. function getRequest(context, initParams) {
  9039. return new self.Request(context.url, initParams);
  9040. }
  9041. class FetchError extends Error {
  9042. constructor(message, code, details) {
  9043. super(message);
  9044. this.code = void 0;
  9045. this.details = void 0;
  9046. this.code = code;
  9047. this.details = details;
  9048. }
  9049. }
  9050. /**
  9051. * @deprecated use fragLoadPolicy.default
  9052. */
  9053. /**
  9054. * @deprecated use manifestLoadPolicy.default and playlistLoadPolicy.default
  9055. */
  9056. const defaultLoadPolicy = {
  9057. maxTimeToFirstByteMs: 8000,
  9058. maxLoadTimeMs: 20000,
  9059. timeoutRetry: null,
  9060. errorRetry: null
  9061. };
  9062. /**
  9063. * @ignore
  9064. * If possible, keep hlsDefaultConfig shallow
  9065. * It is cloned whenever a new Hls instance is created, by keeping the config
  9066. * shallow the properties are cloned, and we don't end up manipulating the default
  9067. */
  9068. const hlsDefaultConfig = _objectSpread2(_objectSpread2({
  9069. autoStartLoad: true,
  9070. // used by stream-controller
  9071. startPosition: -1,
  9072. // used by stream-controller
  9073. defaultAudioCodec: undefined,
  9074. // used by stream-controller
  9075. debug: false,
  9076. // used by logger
  9077. capLevelOnFPSDrop: false,
  9078. // used by fps-controller
  9079. capLevelToPlayerSize: false,
  9080. // used by cap-level-controller
  9081. ignoreDevicePixelRatio: false,
  9082. // used by cap-level-controller
  9083. preferManagedMediaSource: true,
  9084. initialLiveManifestSize: 1,
  9085. // used by stream-controller
  9086. maxBufferLength: 30,
  9087. // used by stream-controller
  9088. backBufferLength: Infinity,
  9089. // used by buffer-controller
  9090. frontBufferFlushThreshold: Infinity,
  9091. maxBufferSize: 60 * 1000 * 1000,
  9092. // used by stream-controller
  9093. maxBufferHole: 0.1,
  9094. // used by stream-controller
  9095. highBufferWatchdogPeriod: 2,
  9096. // used by stream-controller
  9097. nudgeOffset: 0.1,
  9098. // used by stream-controller
  9099. nudgeMaxRetry: 3,
  9100. // used by stream-controller
  9101. maxFragLookUpTolerance: 0.25,
  9102. // used by stream-controller
  9103. liveSyncDurationCount: 3,
  9104. // used by latency-controller
  9105. liveMaxLatencyDurationCount: Infinity,
  9106. // used by latency-controller
  9107. liveSyncDuration: undefined,
  9108. // used by latency-controller
  9109. liveMaxLatencyDuration: undefined,
  9110. // used by latency-controller
  9111. maxLiveSyncPlaybackRate: 1,
  9112. // used by latency-controller
  9113. liveDurationInfinity: false,
  9114. // used by buffer-controller
  9115. /**
  9116. * @deprecated use backBufferLength
  9117. */
  9118. liveBackBufferLength: null,
  9119. // used by buffer-controller
  9120. maxMaxBufferLength: 600,
  9121. // used by stream-controller
  9122. enableWorker: true,
  9123. // used by transmuxer
  9124. workerPath: null,
  9125. // used by transmuxer
  9126. enableSoftwareAES: true,
  9127. // used by decrypter
  9128. startLevel: undefined,
  9129. // used by level-controller
  9130. startFragPrefetch: false,
  9131. // used by stream-controller
  9132. fpsDroppedMonitoringPeriod: 5000,
  9133. // used by fps-controller
  9134. fpsDroppedMonitoringThreshold: 0.2,
  9135. // used by fps-controller
  9136. appendErrorMaxRetry: 3,
  9137. // used by buffer-controller
  9138. loader: XhrLoader,
  9139. // loader: FetchLoader,
  9140. fLoader: undefined,
  9141. // used by fragment-loader
  9142. pLoader: undefined,
  9143. // used by playlist-loader
  9144. xhrSetup: undefined,
  9145. // used by xhr-loader
  9146. licenseXhrSetup: undefined,
  9147. // used by eme-controller
  9148. licenseResponseCallback: undefined,
  9149. // used by eme-controller
  9150. abrController: AbrController,
  9151. bufferController: BufferController,
  9152. capLevelController: CapLevelController,
  9153. errorController: ErrorController,
  9154. fpsController: FPSController,
  9155. stretchShortVideoTrack: false,
  9156. // used by mp4-remuxer
  9157. maxAudioFramesDrift: 1,
  9158. // used by mp4-remuxer
  9159. forceKeyFrameOnDiscontinuity: true,
  9160. // used by ts-demuxer
  9161. abrEwmaFastLive: 3,
  9162. // used by abr-controller
  9163. abrEwmaSlowLive: 9,
  9164. // used by abr-controller
  9165. abrEwmaFastVoD: 3,
  9166. // used by abr-controller
  9167. abrEwmaSlowVoD: 9,
  9168. // used by abr-controller
  9169. abrEwmaDefaultEstimate: 5e5,
  9170. // 500 kbps // used by abr-controller
  9171. abrEwmaDefaultEstimateMax: 5e6,
  9172. // 5 mbps
  9173. abrBandWidthFactor: 0.95,
  9174. // used by abr-controller
  9175. abrBandWidthUpFactor: 0.7,
  9176. // used by abr-controller
  9177. abrMaxWithRealBitrate: false,
  9178. // used by abr-controller
  9179. maxStarvationDelay: 4,
  9180. // used by abr-controller
  9181. maxLoadingDelay: 4,
  9182. // used by abr-controller
  9183. minAutoBitrate: 0,
  9184. // used by hls
  9185. emeEnabled: false,
  9186. // used by eme-controller
  9187. widevineLicenseUrl: undefined,
  9188. // used by eme-controller
  9189. drmSystems: {},
  9190. // used by eme-controller
  9191. drmSystemOptions: {},
  9192. // used by eme-controller
  9193. requestMediaKeySystemAccessFunc: null,
  9194. // used by eme-controller
  9195. testBandwidth: true,
  9196. progressive: false,
  9197. lowLatencyMode: true,
  9198. cmcd: undefined,
  9199. enableDateRangeMetadataCues: true,
  9200. enableEmsgMetadataCues: true,
  9201. enableID3MetadataCues: true,
  9202. useMediaCapabilities: false,
  9203. certLoadPolicy: {
  9204. default: defaultLoadPolicy
  9205. },
  9206. keyLoadPolicy: {
  9207. default: {
  9208. maxTimeToFirstByteMs: 8000,
  9209. maxLoadTimeMs: 20000,
  9210. timeoutRetry: {
  9211. maxNumRetry: 1,
  9212. retryDelayMs: 1000,
  9213. maxRetryDelayMs: 20000,
  9214. backoff: 'linear'
  9215. },
  9216. errorRetry: {
  9217. maxNumRetry: 8,
  9218. retryDelayMs: 1000,
  9219. maxRetryDelayMs: 20000,
  9220. backoff: 'linear'
  9221. }
  9222. }
  9223. },
  9224. manifestLoadPolicy: {
  9225. default: {
  9226. maxTimeToFirstByteMs: Infinity,
  9227. maxLoadTimeMs: 20000,
  9228. timeoutRetry: {
  9229. maxNumRetry: 2,
  9230. retryDelayMs: 0,
  9231. maxRetryDelayMs: 0
  9232. },
  9233. errorRetry: {
  9234. maxNumRetry: 1,
  9235. retryDelayMs: 1000,
  9236. maxRetryDelayMs: 8000
  9237. }
  9238. }
  9239. },
  9240. playlistLoadPolicy: {
  9241. default: {
  9242. maxTimeToFirstByteMs: 10000,
  9243. maxLoadTimeMs: 20000,
  9244. timeoutRetry: {
  9245. maxNumRetry: 2,
  9246. retryDelayMs: 0,
  9247. maxRetryDelayMs: 0
  9248. },
  9249. errorRetry: {
  9250. maxNumRetry: 2,
  9251. retryDelayMs: 1000,
  9252. maxRetryDelayMs: 8000
  9253. }
  9254. }
  9255. },
  9256. fragLoadPolicy: {
  9257. default: {
  9258. maxTimeToFirstByteMs: 10000,
  9259. maxLoadTimeMs: 120000,
  9260. timeoutRetry: {
  9261. maxNumRetry: 4,
  9262. retryDelayMs: 0,
  9263. maxRetryDelayMs: 0
  9264. },
  9265. errorRetry: {
  9266. maxNumRetry: 6,
  9267. retryDelayMs: 1000,
  9268. maxRetryDelayMs: 8000
  9269. }
  9270. }
  9271. },
  9272. steeringManifestLoadPolicy: {
  9273. default: {
  9274. maxTimeToFirstByteMs: 10000,
  9275. maxLoadTimeMs: 20000,
  9276. timeoutRetry: {
  9277. maxNumRetry: 2,
  9278. retryDelayMs: 0,
  9279. maxRetryDelayMs: 0
  9280. },
  9281. errorRetry: {
  9282. maxNumRetry: 1,
  9283. retryDelayMs: 1000,
  9284. maxRetryDelayMs: 8000
  9285. }
  9286. }
  9287. },
  9288. // These default settings are deprecated in favor of the above policies
  9289. // and are maintained for backwards compatibility
  9290. manifestLoadingTimeOut: 10000,
  9291. manifestLoadingMaxRetry: 1,
  9292. manifestLoadingRetryDelay: 1000,
  9293. manifestLoadingMaxRetryTimeout: 64000,
  9294. levelLoadingTimeOut: 10000,
  9295. levelLoadingMaxRetry: 4,
  9296. levelLoadingRetryDelay: 1000,
  9297. levelLoadingMaxRetryTimeout: 64000,
  9298. fragLoadingTimeOut: 20000,
  9299. fragLoadingMaxRetry: 6,
  9300. fragLoadingRetryDelay: 1000,
  9301. fragLoadingMaxRetryTimeout: 64000
  9302. }, timelineConfig()), {}, {
  9303. subtitleStreamController: undefined,
  9304. subtitleTrackController: undefined,
  9305. timelineController: undefined,
  9306. audioStreamController: undefined,
  9307. audioTrackController: undefined,
  9308. emeController: undefined,
  9309. cmcdController: undefined,
  9310. contentSteeringController: ContentSteeringController
  9311. });
  9312. function timelineConfig() {
  9313. return {
  9314. cueHandler: Cues,
  9315. // used by timeline-controller
  9316. enableWebVTT: false,
  9317. // used by timeline-controller
  9318. enableIMSC1: false,
  9319. // used by timeline-controller
  9320. enableCEA708Captions: false,
  9321. // used by timeline-controller
  9322. captionsTextTrack1Label: 'English',
  9323. // used by timeline-controller
  9324. captionsTextTrack1LanguageCode: 'en',
  9325. // used by timeline-controller
  9326. captionsTextTrack2Label: 'Spanish',
  9327. // used by timeline-controller
  9328. captionsTextTrack2LanguageCode: 'es',
  9329. // used by timeline-controller
  9330. captionsTextTrack3Label: 'Unknown CC',
  9331. // used by timeline-controller
  9332. captionsTextTrack3LanguageCode: '',
  9333. // used by timeline-controller
  9334. captionsTextTrack4Label: 'Unknown CC',
  9335. // used by timeline-controller
  9336. captionsTextTrack4LanguageCode: '',
  9337. // used by timeline-controller
  9338. renderTextTracksNatively: true
  9339. };
  9340. }
  9341. /**
  9342. * @ignore
  9343. */
  9344. function mergeConfig(defaultConfig, userConfig) {
  9345. if ((userConfig.liveSyncDurationCount || userConfig.liveMaxLatencyDurationCount) && (userConfig.liveSyncDuration || userConfig.liveMaxLatencyDuration)) {
  9346. throw new Error("Illegal hls.js config: don't mix up liveSyncDurationCount/liveMaxLatencyDurationCount and liveSyncDuration/liveMaxLatencyDuration");
  9347. }
  9348. if (userConfig.liveMaxLatencyDurationCount !== undefined && (userConfig.liveSyncDurationCount === undefined || userConfig.liveMaxLatencyDurationCount <= userConfig.liveSyncDurationCount)) {
  9349. throw new Error('Illegal hls.js config: "liveMaxLatencyDurationCount" must be greater than "liveSyncDurationCount"');
  9350. }
  9351. if (userConfig.liveMaxLatencyDuration !== undefined && (userConfig.liveSyncDuration === undefined || userConfig.liveMaxLatencyDuration <= userConfig.liveSyncDuration)) {
  9352. throw new Error('Illegal hls.js config: "liveMaxLatencyDuration" must be greater than "liveSyncDuration"');
  9353. }
  9354. const defaultsCopy = deepCpy(defaultConfig);
  9355. // Backwards compatibility with deprecated config values
  9356. const deprecatedSettingTypes = ['manifest', 'level', 'frag'];
  9357. const deprecatedSettings = ['TimeOut', 'MaxRetry', 'RetryDelay', 'MaxRetryTimeout'];
  9358. deprecatedSettingTypes.forEach(type => {
  9359. const policyName = `${type === 'level' ? 'playlist' : type}LoadPolicy`;
  9360. const policyNotSet = userConfig[policyName] === undefined;
  9361. const report = [];
  9362. deprecatedSettings.forEach(setting => {
  9363. const deprecatedSetting = `${type}Loading${setting}`;
  9364. const value = userConfig[deprecatedSetting];
  9365. if (value !== undefined && policyNotSet) {
  9366. report.push(deprecatedSetting);
  9367. const settings = defaultsCopy[policyName].default;
  9368. userConfig[policyName] = {
  9369. default: settings
  9370. };
  9371. switch (setting) {
  9372. case 'TimeOut':
  9373. settings.maxLoadTimeMs = value;
  9374. settings.maxTimeToFirstByteMs = value;
  9375. break;
  9376. case 'MaxRetry':
  9377. settings.errorRetry.maxNumRetry = value;
  9378. settings.timeoutRetry.maxNumRetry = value;
  9379. break;
  9380. case 'RetryDelay':
  9381. settings.errorRetry.retryDelayMs = value;
  9382. settings.timeoutRetry.retryDelayMs = value;
  9383. break;
  9384. case 'MaxRetryTimeout':
  9385. settings.errorRetry.maxRetryDelayMs = value;
  9386. settings.timeoutRetry.maxRetryDelayMs = value;
  9387. break;
  9388. }
  9389. }
  9390. });
  9391. if (report.length) {
  9392. logger.warn(`hls.js config: "${report.join('", "')}" setting(s) are deprecated, use "${policyName}": ${JSON.stringify(userConfig[policyName])}`);
  9393. }
  9394. });
  9395. return _objectSpread2(_objectSpread2({}, defaultsCopy), userConfig);
  9396. }
  9397. function deepCpy(obj) {
  9398. if (obj && typeof obj === 'object') {
  9399. if (Array.isArray(obj)) {
  9400. return obj.map(deepCpy);
  9401. }
  9402. return Object.keys(obj).reduce((result, key) => {
  9403. result[key] = deepCpy(obj[key]);
  9404. return result;
  9405. }, {});
  9406. }
  9407. return obj;
  9408. }
  9409. /**
  9410. * @ignore
  9411. */
  9412. function enableStreamingMode(config) {
  9413. const currentLoader = config.loader;
  9414. if (currentLoader !== FetchLoader && currentLoader !== XhrLoader) {
  9415. // If a developer has configured their own loader, respect that choice
  9416. logger.log('[config]: Custom loader detected, cannot enable progressive streaming');
  9417. config.progressive = false;
  9418. } else {
  9419. const canStreamProgressively = fetchSupported();
  9420. if (canStreamProgressively) {
  9421. config.loader = FetchLoader;
  9422. config.progressive = true;
  9423. config.enableSoftwareAES = true;
  9424. logger.log('[config]: Progressive streaming enabled, using FetchLoader');
  9425. }
  9426. }
  9427. }
  9428. let chromeOrFirefox;
  9429. class LevelController extends BasePlaylistController {
  9430. constructor(hls, contentSteeringController) {
  9431. super(hls, '[level-controller]');
  9432. this._levels = [];
  9433. this._firstLevel = -1;
  9434. this._maxAutoLevel = -1;
  9435. this._startLevel = void 0;
  9436. this.currentLevel = null;
  9437. this.currentLevelIndex = -1;
  9438. this.manualLevelIndex = -1;
  9439. this.steering = void 0;
  9440. this.onParsedComplete = void 0;
  9441. this.steering = contentSteeringController;
  9442. this._registerListeners();
  9443. }
  9444. _registerListeners() {
  9445. const {
  9446. hls
  9447. } = this;
  9448. hls.on(Events.MANIFEST_LOADING, this.onManifestLoading, this);
  9449. hls.on(Events.MANIFEST_LOADED, this.onManifestLoaded, this);
  9450. hls.on(Events.LEVEL_LOADED, this.onLevelLoaded, this);
  9451. hls.on(Events.LEVELS_UPDATED, this.onLevelsUpdated, this);
  9452. hls.on(Events.FRAG_BUFFERED, this.onFragBuffered, this);
  9453. hls.on(Events.ERROR, this.onError, this);
  9454. }
  9455. _unregisterListeners() {
  9456. const {
  9457. hls
  9458. } = this;
  9459. hls.off(Events.MANIFEST_LOADING, this.onManifestLoading, this);
  9460. hls.off(Events.MANIFEST_LOADED, this.onManifestLoaded, this);
  9461. hls.off(Events.LEVEL_LOADED, this.onLevelLoaded, this);
  9462. hls.off(Events.LEVELS_UPDATED, this.onLevelsUpdated, this);
  9463. hls.off(Events.FRAG_BUFFERED, this.onFragBuffered, this);
  9464. hls.off(Events.ERROR, this.onError, this);
  9465. }
  9466. destroy() {
  9467. this._unregisterListeners();
  9468. this.steering = null;
  9469. this.resetLevels();
  9470. super.destroy();
  9471. }
  9472. stopLoad() {
  9473. const levels = this._levels;
  9474. // clean up live level details to force reload them, and reset load errors
  9475. levels.forEach(level => {
  9476. level.loadError = 0;
  9477. level.fragmentError = 0;
  9478. });
  9479. super.stopLoad();
  9480. }
  9481. resetLevels() {
  9482. this._startLevel = undefined;
  9483. this.manualLevelIndex = -1;
  9484. this.currentLevelIndex = -1;
  9485. this.currentLevel = null;
  9486. this._levels = [];
  9487. this._maxAutoLevel = -1;
  9488. }
  9489. onManifestLoading(event, data) {
  9490. this.resetLevels();
  9491. }
  9492. onManifestLoaded(event, data) {
  9493. const preferManagedMediaSource = this.hls.config.preferManagedMediaSource;
  9494. const levels = [];
  9495. const redundantSet = {};
  9496. const generatePathwaySet = {};
  9497. let resolutionFound = false;
  9498. let videoCodecFound = false;
  9499. let audioCodecFound = false;
  9500. data.levels.forEach(levelParsed => {
  9501. var _audioCodec, _videoCodec;
  9502. const attributes = levelParsed.attrs;
  9503. // erase audio codec info if browser does not support mp4a.40.34.
  9504. // demuxer will autodetect codec and fallback to mpeg/audio
  9505. let {
  9506. audioCodec,
  9507. videoCodec
  9508. } = levelParsed;
  9509. if (((_audioCodec = audioCodec) == null ? void 0 : _audioCodec.indexOf('mp4a.40.34')) !== -1) {
  9510. chromeOrFirefox || (chromeOrFirefox = /chrome|firefox/i.test(navigator.userAgent));
  9511. if (chromeOrFirefox) {
  9512. levelParsed.audioCodec = audioCodec = undefined;
  9513. }
  9514. }
  9515. if (audioCodec) {
  9516. levelParsed.audioCodec = audioCodec = getCodecCompatibleName(audioCodec, preferManagedMediaSource);
  9517. }
  9518. if (((_videoCodec = videoCodec) == null ? void 0 : _videoCodec.indexOf('avc1')) === 0) {
  9519. videoCodec = levelParsed.videoCodec = convertAVC1ToAVCOTI(videoCodec);
  9520. }
  9521. // only keep levels with supported audio/video codecs
  9522. const {
  9523. width,
  9524. height,
  9525. unknownCodecs
  9526. } = levelParsed;
  9527. resolutionFound || (resolutionFound = !!(width && height));
  9528. videoCodecFound || (videoCodecFound = !!videoCodec);
  9529. audioCodecFound || (audioCodecFound = !!audioCodec);
  9530. if (unknownCodecs != null && unknownCodecs.length || audioCodec && !areCodecsMediaSourceSupported(audioCodec, 'audio', preferManagedMediaSource) || videoCodec && !areCodecsMediaSourceSupported(videoCodec, 'video', preferManagedMediaSource)) {
  9531. return;
  9532. }
  9533. const {
  9534. CODECS,
  9535. 'FRAME-RATE': FRAMERATE,
  9536. 'HDCP-LEVEL': HDCP,
  9537. 'PATHWAY-ID': PATHWAY,
  9538. RESOLUTION,
  9539. 'VIDEO-RANGE': VIDEO_RANGE
  9540. } = attributes;
  9541. const contentSteeringPrefix = `${PATHWAY || '.'}-`;
  9542. const levelKey = `${contentSteeringPrefix}${levelParsed.bitrate}-${RESOLUTION}-${FRAMERATE}-${CODECS}-${VIDEO_RANGE}-${HDCP}`;
  9543. if (!redundantSet[levelKey]) {
  9544. const level = new Level(levelParsed);
  9545. redundantSet[levelKey] = level;
  9546. generatePathwaySet[levelKey] = 1;
  9547. levels.push(level);
  9548. } else if (redundantSet[levelKey].uri !== levelParsed.url && !levelParsed.attrs['PATHWAY-ID']) {
  9549. // Assign Pathway IDs to Redundant Streams (default Pathways is ".". Redundant Streams "..", "...", and so on.)
  9550. // Content Steering controller to handles Pathway fallback on error
  9551. const pathwayCount = generatePathwaySet[levelKey] += 1;
  9552. levelParsed.attrs['PATHWAY-ID'] = new Array(pathwayCount + 1).join('.');
  9553. const level = new Level(levelParsed);
  9554. redundantSet[levelKey] = level;
  9555. levels.push(level);
  9556. } else {
  9557. redundantSet[levelKey].addGroupId('audio', attributes.AUDIO);
  9558. redundantSet[levelKey].addGroupId('text', attributes.SUBTITLES);
  9559. }
  9560. });
  9561. this.filterAndSortMediaOptions(levels, data, resolutionFound, videoCodecFound, audioCodecFound);
  9562. }
  9563. filterAndSortMediaOptions(filteredLevels, data, resolutionFound, videoCodecFound, audioCodecFound) {
  9564. let audioTracks = [];
  9565. let subtitleTracks = [];
  9566. let levels = filteredLevels;
  9567. // remove audio-only and invalid video-range levels if we also have levels with video codecs or RESOLUTION signalled
  9568. if ((resolutionFound || videoCodecFound) && audioCodecFound) {
  9569. levels = levels.filter(({
  9570. videoCodec,
  9571. videoRange,
  9572. width,
  9573. height
  9574. }) => (!!videoCodec || !!(width && height)) && isVideoRange(videoRange));
  9575. }
  9576. if (levels.length === 0) {
  9577. // Dispatch error after MANIFEST_LOADED is done propagating
  9578. Promise.resolve().then(() => {
  9579. if (this.hls) {
  9580. if (data.levels.length) {
  9581. this.warn(`One or more CODECS in variant not supported: ${JSON.stringify(data.levels[0].attrs)}`);
  9582. }
  9583. const error = new Error('no level with compatible codecs found in manifest');
  9584. this.hls.trigger(Events.ERROR, {
  9585. type: ErrorTypes.MEDIA_ERROR,
  9586. details: ErrorDetails.MANIFEST_INCOMPATIBLE_CODECS_ERROR,
  9587. fatal: true,
  9588. url: data.url,
  9589. error,
  9590. reason: error.message
  9591. });
  9592. }
  9593. });
  9594. return;
  9595. }
  9596. if (data.audioTracks) {
  9597. const {
  9598. preferManagedMediaSource
  9599. } = this.hls.config;
  9600. audioTracks = data.audioTracks.filter(track => !track.audioCodec || areCodecsMediaSourceSupported(track.audioCodec, 'audio', preferManagedMediaSource));
  9601. // Assign ids after filtering as array indices by group-id
  9602. assignTrackIdsByGroup(audioTracks);
  9603. }
  9604. if (data.subtitles) {
  9605. subtitleTracks = data.subtitles;
  9606. assignTrackIdsByGroup(subtitleTracks);
  9607. }
  9608. // start bitrate is the first bitrate of the manifest
  9609. const unsortedLevels = levels.slice(0);
  9610. // sort levels from lowest to highest
  9611. levels.sort((a, b) => {
  9612. if (a.attrs['HDCP-LEVEL'] !== b.attrs['HDCP-LEVEL']) {
  9613. return (a.attrs['HDCP-LEVEL'] || '') > (b.attrs['HDCP-LEVEL'] || '') ? 1 : -1;
  9614. }
  9615. // sort on height before bitrate for cap-level-controller
  9616. if (resolutionFound && a.height !== b.height) {
  9617. return a.height - b.height;
  9618. }
  9619. if (a.frameRate !== b.frameRate) {
  9620. return a.frameRate - b.frameRate;
  9621. }
  9622. if (a.videoRange !== b.videoRange) {
  9623. return VideoRangeValues.indexOf(a.videoRange) - VideoRangeValues.indexOf(b.videoRange);
  9624. }
  9625. if (a.videoCodec !== b.videoCodec) {
  9626. const valueA = videoCodecPreferenceValue(a.videoCodec);
  9627. const valueB = videoCodecPreferenceValue(b.videoCodec);
  9628. if (valueA !== valueB) {
  9629. return valueB - valueA;
  9630. }
  9631. }
  9632. if (a.uri === b.uri && a.codecSet !== b.codecSet) {
  9633. const valueA = codecsSetSelectionPreferenceValue(a.codecSet);
  9634. const valueB = codecsSetSelectionPreferenceValue(b.codecSet);
  9635. if (valueA !== valueB) {
  9636. return valueB - valueA;
  9637. }
  9638. }
  9639. if (a.averageBitrate !== b.averageBitrate) {
  9640. return a.averageBitrate - b.averageBitrate;
  9641. }
  9642. return 0;
  9643. });
  9644. let firstLevelInPlaylist = unsortedLevels[0];
  9645. if (this.steering) {
  9646. levels = this.steering.filterParsedLevels(levels);
  9647. if (levels.length !== unsortedLevels.length) {
  9648. for (let i = 0; i < unsortedLevels.length; i++) {
  9649. if (unsortedLevels[i].pathwayId === levels[0].pathwayId) {
  9650. firstLevelInPlaylist = unsortedLevels[i];
  9651. break;
  9652. }
  9653. }
  9654. }
  9655. }
  9656. this._levels = levels;
  9657. // find index of first level in sorted levels
  9658. for (let i = 0; i < levels.length; i++) {
  9659. if (levels[i] === firstLevelInPlaylist) {
  9660. var _this$hls$userConfig;
  9661. this._firstLevel = i;
  9662. const firstLevelBitrate = firstLevelInPlaylist.bitrate;
  9663. const bandwidthEstimate = this.hls.bandwidthEstimate;
  9664. this.log(`manifest loaded, ${levels.length} level(s) found, first bitrate: ${firstLevelBitrate}`);
  9665. // Update default bwe to first variant bitrate as long it has not been configured or set
  9666. if (((_this$hls$userConfig = this.hls.userConfig) == null ? void 0 : _this$hls$userConfig.abrEwmaDefaultEstimate) === undefined) {
  9667. const startingBwEstimate = Math.min(firstLevelBitrate, this.hls.config.abrEwmaDefaultEstimateMax);
  9668. if (startingBwEstimate > bandwidthEstimate && bandwidthEstimate === hlsDefaultConfig.abrEwmaDefaultEstimate) {
  9669. this.hls.bandwidthEstimate = startingBwEstimate;
  9670. }
  9671. }
  9672. break;
  9673. }
  9674. }
  9675. // Audio is only alternate if manifest include a URI along with the audio group tag,
  9676. // and this is not an audio-only stream where levels contain audio-only
  9677. const audioOnly = audioCodecFound && !videoCodecFound;
  9678. const edata = {
  9679. levels,
  9680. audioTracks,
  9681. subtitleTracks,
  9682. sessionData: data.sessionData,
  9683. sessionKeys: data.sessionKeys,
  9684. firstLevel: this._firstLevel,
  9685. stats: data.stats,
  9686. audio: audioCodecFound,
  9687. video: videoCodecFound,
  9688. altAudio: !audioOnly && audioTracks.some(t => !!t.url)
  9689. };
  9690. this.hls.trigger(Events.MANIFEST_PARSED, edata);
  9691. // Initiate loading after all controllers have received MANIFEST_PARSED
  9692. if (this.hls.config.autoStartLoad || this.hls.forceStartLoad) {
  9693. this.hls.startLoad(this.hls.config.startPosition);
  9694. }
  9695. }
  9696. get levels() {
  9697. if (this._levels.length === 0) {
  9698. return null;
  9699. }
  9700. return this._levels;
  9701. }
  9702. get level() {
  9703. return this.currentLevelIndex;
  9704. }
  9705. set level(newLevel) {
  9706. const levels = this._levels;
  9707. if (levels.length === 0) {
  9708. return;
  9709. }
  9710. // check if level idx is valid
  9711. if (newLevel < 0 || newLevel >= levels.length) {
  9712. // invalid level id given, trigger error
  9713. const error = new Error('invalid level idx');
  9714. const fatal = newLevel < 0;
  9715. this.hls.trigger(Events.ERROR, {
  9716. type: ErrorTypes.OTHER_ERROR,
  9717. details: ErrorDetails.LEVEL_SWITCH_ERROR,
  9718. level: newLevel,
  9719. fatal,
  9720. error,
  9721. reason: error.message
  9722. });
  9723. if (fatal) {
  9724. return;
  9725. }
  9726. newLevel = Math.min(newLevel, levels.length - 1);
  9727. }
  9728. const lastLevelIndex = this.currentLevelIndex;
  9729. const lastLevel = this.currentLevel;
  9730. const lastPathwayId = lastLevel ? lastLevel.attrs['PATHWAY-ID'] : undefined;
  9731. const level = levels[newLevel];
  9732. const pathwayId = level.attrs['PATHWAY-ID'];
  9733. this.currentLevelIndex = newLevel;
  9734. this.currentLevel = level;
  9735. if (lastLevelIndex === newLevel && level.details && lastLevel && lastPathwayId === pathwayId) {
  9736. return;
  9737. }
  9738. this.log(`Switching to level ${newLevel} (${level.height ? level.height + 'p ' : ''}${level.videoRange ? level.videoRange + ' ' : ''}${level.codecSet ? level.codecSet + ' ' : ''}@${level.bitrate})${pathwayId ? ' with Pathway ' + pathwayId : ''} from level ${lastLevelIndex}${lastPathwayId ? ' with Pathway ' + lastPathwayId : ''}`);
  9739. const levelSwitchingData = {
  9740. level: newLevel,
  9741. attrs: level.attrs,
  9742. details: level.details,
  9743. bitrate: level.bitrate,
  9744. averageBitrate: level.averageBitrate,
  9745. maxBitrate: level.maxBitrate,
  9746. realBitrate: level.realBitrate,
  9747. width: level.width,
  9748. height: level.height,
  9749. codecSet: level.codecSet,
  9750. audioCodec: level.audioCodec,
  9751. videoCodec: level.videoCodec,
  9752. audioGroups: level.audioGroups,
  9753. subtitleGroups: level.subtitleGroups,
  9754. loaded: level.loaded,
  9755. loadError: level.loadError,
  9756. fragmentError: level.fragmentError,
  9757. name: level.name,
  9758. id: level.id,
  9759. uri: level.uri,
  9760. url: level.url,
  9761. urlId: 0,
  9762. audioGroupIds: level.audioGroupIds,
  9763. textGroupIds: level.textGroupIds
  9764. };
  9765. this.hls.trigger(Events.LEVEL_SWITCHING, levelSwitchingData);
  9766. // check if we need to load playlist for this level
  9767. const levelDetails = level.details;
  9768. if (!levelDetails || levelDetails.live) {
  9769. // level not retrieved yet, or live playlist we need to (re)load it
  9770. const hlsUrlParameters = this.switchParams(level.uri, lastLevel == null ? void 0 : lastLevel.details, levelDetails);
  9771. this.loadPlaylist(hlsUrlParameters);
  9772. }
  9773. }
  9774. get manualLevel() {
  9775. return this.manualLevelIndex;
  9776. }
  9777. set manualLevel(newLevel) {
  9778. this.manualLevelIndex = newLevel;
  9779. if (this._startLevel === undefined) {
  9780. this._startLevel = newLevel;
  9781. }
  9782. if (newLevel !== -1) {
  9783. this.level = newLevel;
  9784. }
  9785. }
  9786. get firstLevel() {
  9787. return this._firstLevel;
  9788. }
  9789. set firstLevel(newLevel) {
  9790. this._firstLevel = newLevel;
  9791. }
  9792. get startLevel() {
  9793. // Setting hls.startLevel (this._startLevel) overrides config.startLevel
  9794. if (this._startLevel === undefined) {
  9795. const configStartLevel = this.hls.config.startLevel;
  9796. if (configStartLevel !== undefined) {
  9797. return configStartLevel;
  9798. }
  9799. return this.hls.firstAutoLevel;
  9800. }
  9801. return this._startLevel;
  9802. }
  9803. set startLevel(newLevel) {
  9804. this._startLevel = newLevel;
  9805. }
  9806. onError(event, data) {
  9807. if (data.fatal || !data.context) {
  9808. return;
  9809. }
  9810. if (data.context.type === PlaylistContextType.LEVEL && data.context.level === this.level) {
  9811. this.checkRetry(data);
  9812. }
  9813. }
  9814. // reset errors on the successful load of a fragment
  9815. onFragBuffered(event, {
  9816. frag
  9817. }) {
  9818. if (frag !== undefined && frag.type === PlaylistLevelType.MAIN) {
  9819. const el = frag.elementaryStreams;
  9820. if (!Object.keys(el).some(type => !!el[type])) {
  9821. return;
  9822. }
  9823. const level = this._levels[frag.level];
  9824. if (level != null && level.loadError) {
  9825. this.log(`Resetting level error count of ${level.loadError} on frag buffered`);
  9826. level.loadError = 0;
  9827. }
  9828. }
  9829. }
  9830. onLevelLoaded(event, data) {
  9831. var _data$deliveryDirecti2;
  9832. const {
  9833. level,
  9834. details
  9835. } = data;
  9836. const curLevel = this._levels[level];
  9837. if (!curLevel) {
  9838. var _data$deliveryDirecti;
  9839. this.warn(`Invalid level index ${level}`);
  9840. if ((_data$deliveryDirecti = data.deliveryDirectives) != null && _data$deliveryDirecti.skip) {
  9841. details.deltaUpdateFailed = true;
  9842. }
  9843. return;
  9844. }
  9845. // only process level loaded events matching with expected level
  9846. if (level === this.currentLevelIndex) {
  9847. // reset level load error counter on successful level loaded only if there is no issues with fragments
  9848. if (curLevel.fragmentError === 0) {
  9849. curLevel.loadError = 0;
  9850. }
  9851. this.playlistLoaded(level, data, curLevel.details);
  9852. } else if ((_data$deliveryDirecti2 = data.deliveryDirectives) != null && _data$deliveryDirecti2.skip) {
  9853. // received a delta playlist update that cannot be merged
  9854. details.deltaUpdateFailed = true;
  9855. }
  9856. }
  9857. loadPlaylist(hlsUrlParameters) {
  9858. super.loadPlaylist();
  9859. const currentLevelIndex = this.currentLevelIndex;
  9860. const currentLevel = this.currentLevel;
  9861. if (currentLevel && this.shouldLoadPlaylist(currentLevel)) {
  9862. let url = currentLevel.uri;
  9863. if (hlsUrlParameters) {
  9864. try {
  9865. url = hlsUrlParameters.addDirectives(url);
  9866. } catch (error) {
  9867. this.warn(`Could not construct new URL with HLS Delivery Directives: ${error}`);
  9868. }
  9869. }
  9870. const pathwayId = currentLevel.attrs['PATHWAY-ID'];
  9871. this.log(`Loading level index ${currentLevelIndex}${(hlsUrlParameters == null ? void 0 : hlsUrlParameters.msn) !== undefined ? ' at sn ' + hlsUrlParameters.msn + ' part ' + hlsUrlParameters.part : ''} with${pathwayId ? ' Pathway ' + pathwayId : ''} ${url}`);
  9872. // console.log('Current audio track group ID:', this.hls.audioTracks[this.hls.audioTrack].groupId);
  9873. // console.log('New video quality level audio group id:', levelObject.attrs.AUDIO, level);
  9874. this.clearTimer();
  9875. this.hls.trigger(Events.LEVEL_LOADING, {
  9876. url,
  9877. level: currentLevelIndex,
  9878. pathwayId: currentLevel.attrs['PATHWAY-ID'],
  9879. id: 0,
  9880. // Deprecated Level urlId
  9881. deliveryDirectives: hlsUrlParameters || null
  9882. });
  9883. }
  9884. }
  9885. get nextLoadLevel() {
  9886. if (this.manualLevelIndex !== -1) {
  9887. return this.manualLevelIndex;
  9888. } else {
  9889. return this.hls.nextAutoLevel;
  9890. }
  9891. }
  9892. set nextLoadLevel(nextLevel) {
  9893. this.level = nextLevel;
  9894. if (this.manualLevelIndex === -1) {
  9895. this.hls.nextAutoLevel = nextLevel;
  9896. }
  9897. }
  9898. removeLevel(levelIndex) {
  9899. var _this$currentLevel;
  9900. const levels = this._levels.filter((level, index) => {
  9901. if (index !== levelIndex) {
  9902. return true;
  9903. }
  9904. if (this.steering) {
  9905. this.steering.removeLevel(level);
  9906. }
  9907. if (level === this.currentLevel) {
  9908. this.currentLevel = null;
  9909. this.currentLevelIndex = -1;
  9910. if (level.details) {
  9911. level.details.fragments.forEach(f => f.level = -1);
  9912. }
  9913. }
  9914. return false;
  9915. });
  9916. reassignFragmentLevelIndexes(levels);
  9917. this._levels = levels;
  9918. if (this.currentLevelIndex > -1 && (_this$currentLevel = this.currentLevel) != null && _this$currentLevel.details) {
  9919. this.currentLevelIndex = this.currentLevel.details.fragments[0].level;
  9920. }
  9921. this.hls.trigger(Events.LEVELS_UPDATED, {
  9922. levels
  9923. });
  9924. }
  9925. onLevelsUpdated(event, {
  9926. levels
  9927. }) {
  9928. this._levels = levels;
  9929. }
  9930. checkMaxAutoUpdated() {
  9931. const {
  9932. autoLevelCapping,
  9933. maxAutoLevel,
  9934. maxHdcpLevel
  9935. } = this.hls;
  9936. if (this._maxAutoLevel !== maxAutoLevel) {
  9937. this._maxAutoLevel = maxAutoLevel;
  9938. this.hls.trigger(Events.MAX_AUTO_LEVEL_UPDATED, {
  9939. autoLevelCapping,
  9940. levels: this.levels,
  9941. maxAutoLevel,
  9942. minAutoLevel: this.hls.minAutoLevel,
  9943. maxHdcpLevel
  9944. });
  9945. }
  9946. }
  9947. }
  9948. function assignTrackIdsByGroup(tracks) {
  9949. const groups = {};
  9950. tracks.forEach(track => {
  9951. const groupId = track.groupId || '';
  9952. track.id = groups[groupId] = groups[groupId] || 0;
  9953. groups[groupId]++;
  9954. });
  9955. }
  9956. var FragmentState = {
  9957. NOT_LOADED: "NOT_LOADED",
  9958. APPENDING: "APPENDING",
  9959. PARTIAL: "PARTIAL",
  9960. OK: "OK"
  9961. };
  9962. class FragmentTracker {
  9963. constructor(hls) {
  9964. this.activePartLists = Object.create(null);
  9965. this.endListFragments = Object.create(null);
  9966. this.fragments = Object.create(null);
  9967. this.timeRanges = Object.create(null);
  9968. this.bufferPadding = 0.2;
  9969. this.hls = void 0;
  9970. this.hasGaps = false;
  9971. this.hls = hls;
  9972. this._registerListeners();
  9973. }
  9974. _registerListeners() {
  9975. const {
  9976. hls
  9977. } = this;
  9978. hls.on(Events.BUFFER_APPENDED, this.onBufferAppended, this);
  9979. hls.on(Events.FRAG_BUFFERED, this.onFragBuffered, this);
  9980. hls.on(Events.FRAG_LOADED, this.onFragLoaded, this);
  9981. }
  9982. _unregisterListeners() {
  9983. const {
  9984. hls
  9985. } = this;
  9986. hls.off(Events.BUFFER_APPENDED, this.onBufferAppended, this);
  9987. hls.off(Events.FRAG_BUFFERED, this.onFragBuffered, this);
  9988. hls.off(Events.FRAG_LOADED, this.onFragLoaded, this);
  9989. }
  9990. destroy() {
  9991. this._unregisterListeners();
  9992. // @ts-ignore
  9993. this.fragments =
  9994. // @ts-ignore
  9995. this.activePartLists =
  9996. // @ts-ignore
  9997. this.endListFragments = this.timeRanges = null;
  9998. }
  9999. /**
  10000. * Return a Fragment or Part with an appended range that matches the position and levelType
  10001. * Otherwise, return null
  10002. */
  10003. getAppendedFrag(position, levelType) {
  10004. const activeParts = this.activePartLists[levelType];
  10005. if (activeParts) {
  10006. for (let i = activeParts.length; i--;) {
  10007. const activePart = activeParts[i];
  10008. if (!activePart) {
  10009. break;
  10010. }
  10011. const appendedPTS = activePart.end;
  10012. if (activePart.start <= position && appendedPTS !== null && position <= appendedPTS) {
  10013. return activePart;
  10014. }
  10015. }
  10016. }
  10017. return this.getBufferedFrag(position, levelType);
  10018. }
  10019. /**
  10020. * Return a buffered Fragment that matches the position and levelType.
  10021. * A buffered Fragment is one whose loading, parsing and appending is done (completed or "partial" meaning aborted).
  10022. * If not found any Fragment, return null
  10023. */
  10024. getBufferedFrag(position, levelType) {
  10025. const {
  10026. fragments
  10027. } = this;
  10028. const keys = Object.keys(fragments);
  10029. for (let i = keys.length; i--;) {
  10030. const fragmentEntity = fragments[keys[i]];
  10031. if ((fragmentEntity == null ? void 0 : fragmentEntity.body.type) === levelType && fragmentEntity.buffered) {
  10032. const frag = fragmentEntity.body;
  10033. if (frag.start <= position && position <= frag.end) {
  10034. return frag;
  10035. }
  10036. }
  10037. }
  10038. return null;
  10039. }
  10040. /**
  10041. * Partial fragments effected by coded frame eviction will be removed
  10042. * The browser will unload parts of the buffer to free up memory for new buffer data
  10043. * Fragments will need to be reloaded when the buffer is freed up, removing partial fragments will allow them to reload(since there might be parts that are still playable)
  10044. */
  10045. detectEvictedFragments(elementaryStream, timeRange, playlistType, appendedPart) {
  10046. if (this.timeRanges) {
  10047. this.timeRanges[elementaryStream] = timeRange;
  10048. }
  10049. // Check if any flagged fragments have been unloaded
  10050. // excluding anything newer than appendedPartSn
  10051. const appendedPartSn = (appendedPart == null ? void 0 : appendedPart.fragment.sn) || -1;
  10052. Object.keys(this.fragments).forEach(key => {
  10053. const fragmentEntity = this.fragments[key];
  10054. if (!fragmentEntity) {
  10055. return;
  10056. }
  10057. if (appendedPartSn >= fragmentEntity.body.sn) {
  10058. return;
  10059. }
  10060. if (!fragmentEntity.buffered && !fragmentEntity.loaded) {
  10061. if (fragmentEntity.body.type === playlistType) {
  10062. this.removeFragment(fragmentEntity.body);
  10063. }
  10064. return;
  10065. }
  10066. const esData = fragmentEntity.range[elementaryStream];
  10067. if (!esData) {
  10068. return;
  10069. }
  10070. esData.time.some(time => {
  10071. const isNotBuffered = !this.isTimeBuffered(time.startPTS, time.endPTS, timeRange);
  10072. if (isNotBuffered) {
  10073. // Unregister partial fragment as it needs to load again to be reused
  10074. this.removeFragment(fragmentEntity.body);
  10075. }
  10076. return isNotBuffered;
  10077. });
  10078. });
  10079. }
  10080. /**
  10081. * Checks if the fragment passed in is loaded in the buffer properly
  10082. * Partially loaded fragments will be registered as a partial fragment
  10083. */
  10084. detectPartialFragments(data) {
  10085. const timeRanges = this.timeRanges;
  10086. const {
  10087. frag,
  10088. part
  10089. } = data;
  10090. if (!timeRanges || frag.sn === 'initSegment') {
  10091. return;
  10092. }
  10093. const fragKey = getFragmentKey(frag);
  10094. const fragmentEntity = this.fragments[fragKey];
  10095. if (!fragmentEntity || fragmentEntity.buffered && frag.gap) {
  10096. return;
  10097. }
  10098. const isFragHint = !frag.relurl;
  10099. Object.keys(timeRanges).forEach(elementaryStream => {
  10100. const streamInfo = frag.elementaryStreams[elementaryStream];
  10101. if (!streamInfo) {
  10102. return;
  10103. }
  10104. const timeRange = timeRanges[elementaryStream];
  10105. const partial = isFragHint || streamInfo.partial === true;
  10106. fragmentEntity.range[elementaryStream] = this.getBufferedTimes(frag, part, partial, timeRange);
  10107. });
  10108. fragmentEntity.loaded = null;
  10109. if (Object.keys(fragmentEntity.range).length) {
  10110. fragmentEntity.buffered = true;
  10111. const endList = fragmentEntity.body.endList = frag.endList || fragmentEntity.body.endList;
  10112. if (endList) {
  10113. this.endListFragments[fragmentEntity.body.type] = fragmentEntity;
  10114. }
  10115. if (!isPartial(fragmentEntity)) {
  10116. // Remove older fragment parts from lookup after frag is tracked as buffered
  10117. this.removeParts(frag.sn - 1, frag.type);
  10118. }
  10119. } else {
  10120. // remove fragment if nothing was appended
  10121. this.removeFragment(fragmentEntity.body);
  10122. }
  10123. }
  10124. removeParts(snToKeep, levelType) {
  10125. const activeParts = this.activePartLists[levelType];
  10126. if (!activeParts) {
  10127. return;
  10128. }
  10129. this.activePartLists[levelType] = activeParts.filter(part => part.fragment.sn >= snToKeep);
  10130. }
  10131. fragBuffered(frag, force) {
  10132. const fragKey = getFragmentKey(frag);
  10133. let fragmentEntity = this.fragments[fragKey];
  10134. if (!fragmentEntity && force) {
  10135. fragmentEntity = this.fragments[fragKey] = {
  10136. body: frag,
  10137. appendedPTS: null,
  10138. loaded: null,
  10139. buffered: false,
  10140. range: Object.create(null)
  10141. };
  10142. if (frag.gap) {
  10143. this.hasGaps = true;
  10144. }
  10145. }
  10146. if (fragmentEntity) {
  10147. fragmentEntity.loaded = null;
  10148. fragmentEntity.buffered = true;
  10149. }
  10150. }
  10151. getBufferedTimes(fragment, part, partial, timeRange) {
  10152. const buffered = {
  10153. time: [],
  10154. partial
  10155. };
  10156. const startPTS = fragment.start;
  10157. const endPTS = fragment.end;
  10158. const minEndPTS = fragment.minEndPTS || endPTS;
  10159. const maxStartPTS = fragment.maxStartPTS || startPTS;
  10160. for (let i = 0; i < timeRange.length; i++) {
  10161. const startTime = timeRange.start(i) - this.bufferPadding;
  10162. const endTime = timeRange.end(i) + this.bufferPadding;
  10163. if (maxStartPTS >= startTime && minEndPTS <= endTime) {
  10164. // Fragment is entirely contained in buffer
  10165. // No need to check the other timeRange times since it's completely playable
  10166. buffered.time.push({
  10167. startPTS: Math.max(startPTS, timeRange.start(i)),
  10168. endPTS: Math.min(endPTS, timeRange.end(i))
  10169. });
  10170. break;
  10171. } else if (startPTS < endTime && endPTS > startTime) {
  10172. const start = Math.max(startPTS, timeRange.start(i));
  10173. const end = Math.min(endPTS, timeRange.end(i));
  10174. if (end > start) {
  10175. buffered.partial = true;
  10176. // Check for intersection with buffer
  10177. // Get playable sections of the fragment
  10178. buffered.time.push({
  10179. startPTS: start,
  10180. endPTS: end
  10181. });
  10182. }
  10183. } else if (endPTS <= startTime) {
  10184. // No need to check the rest of the timeRange as it is in order
  10185. break;
  10186. }
  10187. }
  10188. return buffered;
  10189. }
  10190. /**
  10191. * Gets the partial fragment for a certain time
  10192. */
  10193. getPartialFragment(time) {
  10194. let bestFragment = null;
  10195. let timePadding;
  10196. let startTime;
  10197. let endTime;
  10198. let bestOverlap = 0;
  10199. const {
  10200. bufferPadding,
  10201. fragments
  10202. } = this;
  10203. Object.keys(fragments).forEach(key => {
  10204. const fragmentEntity = fragments[key];
  10205. if (!fragmentEntity) {
  10206. return;
  10207. }
  10208. if (isPartial(fragmentEntity)) {
  10209. startTime = fragmentEntity.body.start - bufferPadding;
  10210. endTime = fragmentEntity.body.end + bufferPadding;
  10211. if (time >= startTime && time <= endTime) {
  10212. // Use the fragment that has the most padding from start and end time
  10213. timePadding = Math.min(time - startTime, endTime - time);
  10214. if (bestOverlap <= timePadding) {
  10215. bestFragment = fragmentEntity.body;
  10216. bestOverlap = timePadding;
  10217. }
  10218. }
  10219. }
  10220. });
  10221. return bestFragment;
  10222. }
  10223. isEndListAppended(type) {
  10224. const lastFragmentEntity = this.endListFragments[type];
  10225. return lastFragmentEntity !== undefined && (lastFragmentEntity.buffered || isPartial(lastFragmentEntity));
  10226. }
  10227. getState(fragment) {
  10228. const fragKey = getFragmentKey(fragment);
  10229. const fragmentEntity = this.fragments[fragKey];
  10230. if (fragmentEntity) {
  10231. if (!fragmentEntity.buffered) {
  10232. return FragmentState.APPENDING;
  10233. } else if (isPartial(fragmentEntity)) {
  10234. return FragmentState.PARTIAL;
  10235. } else {
  10236. return FragmentState.OK;
  10237. }
  10238. }
  10239. return FragmentState.NOT_LOADED;
  10240. }
  10241. isTimeBuffered(startPTS, endPTS, timeRange) {
  10242. let startTime;
  10243. let endTime;
  10244. for (let i = 0; i < timeRange.length; i++) {
  10245. startTime = timeRange.start(i) - this.bufferPadding;
  10246. endTime = timeRange.end(i) + this.bufferPadding;
  10247. if (startPTS >= startTime && endPTS <= endTime) {
  10248. return true;
  10249. }
  10250. if (endPTS <= startTime) {
  10251. // No need to check the rest of the timeRange as it is in order
  10252. return false;
  10253. }
  10254. }
  10255. return false;
  10256. }
  10257. onFragLoaded(event, data) {
  10258. const {
  10259. frag,
  10260. part
  10261. } = data;
  10262. // don't track initsegment (for which sn is not a number)
  10263. // don't track frags used for bitrateTest, they're irrelevant.
  10264. if (frag.sn === 'initSegment' || frag.bitrateTest) {
  10265. return;
  10266. }
  10267. // Fragment entity `loaded` FragLoadedData is null when loading parts
  10268. const loaded = part ? null : data;
  10269. const fragKey = getFragmentKey(frag);
  10270. this.fragments[fragKey] = {
  10271. body: frag,
  10272. appendedPTS: null,
  10273. loaded,
  10274. buffered: false,
  10275. range: Object.create(null)
  10276. };
  10277. }
  10278. onBufferAppended(event, data) {
  10279. const {
  10280. frag,
  10281. part,
  10282. timeRanges
  10283. } = data;
  10284. if (frag.sn === 'initSegment') {
  10285. return;
  10286. }
  10287. const playlistType = frag.type;
  10288. if (part) {
  10289. let activeParts = this.activePartLists[playlistType];
  10290. if (!activeParts) {
  10291. this.activePartLists[playlistType] = activeParts = [];
  10292. }
  10293. activeParts.push(part);
  10294. }
  10295. // Store the latest timeRanges loaded in the buffer
  10296. this.timeRanges = timeRanges;
  10297. Object.keys(timeRanges).forEach(elementaryStream => {
  10298. const timeRange = timeRanges[elementaryStream];
  10299. this.detectEvictedFragments(elementaryStream, timeRange, playlistType, part);
  10300. });
  10301. }
  10302. onFragBuffered(event, data) {
  10303. this.detectPartialFragments(data);
  10304. }
  10305. hasFragment(fragment) {
  10306. const fragKey = getFragmentKey(fragment);
  10307. return !!this.fragments[fragKey];
  10308. }
  10309. hasParts(type) {
  10310. var _this$activePartLists;
  10311. return !!((_this$activePartLists = this.activePartLists[type]) != null && _this$activePartLists.length);
  10312. }
  10313. removeFragmentsInRange(start, end, playlistType, withGapOnly, unbufferedOnly) {
  10314. if (withGapOnly && !this.hasGaps) {
  10315. return;
  10316. }
  10317. Object.keys(this.fragments).forEach(key => {
  10318. const fragmentEntity = this.fragments[key];
  10319. if (!fragmentEntity) {
  10320. return;
  10321. }
  10322. const frag = fragmentEntity.body;
  10323. if (frag.type !== playlistType || withGapOnly && !frag.gap) {
  10324. return;
  10325. }
  10326. if (frag.start < end && frag.end > start && (fragmentEntity.buffered || unbufferedOnly)) {
  10327. this.removeFragment(frag);
  10328. }
  10329. });
  10330. }
  10331. removeFragment(fragment) {
  10332. const fragKey = getFragmentKey(fragment);
  10333. fragment.stats.loaded = 0;
  10334. fragment.clearElementaryStreamInfo();
  10335. const activeParts = this.activePartLists[fragment.type];
  10336. if (activeParts) {
  10337. const snToRemove = fragment.sn;
  10338. this.activePartLists[fragment.type] = activeParts.filter(part => part.fragment.sn !== snToRemove);
  10339. }
  10340. delete this.fragments[fragKey];
  10341. if (fragment.endList) {
  10342. delete this.endListFragments[fragment.type];
  10343. }
  10344. }
  10345. removeAllFragments() {
  10346. this.fragments = Object.create(null);
  10347. this.endListFragments = Object.create(null);
  10348. this.activePartLists = Object.create(null);
  10349. this.hasGaps = false;
  10350. }
  10351. }
  10352. function isPartial(fragmentEntity) {
  10353. var _fragmentEntity$range, _fragmentEntity$range2, _fragmentEntity$range3;
  10354. return fragmentEntity.buffered && (fragmentEntity.body.gap || ((_fragmentEntity$range = fragmentEntity.range.video) == null ? void 0 : _fragmentEntity$range.partial) || ((_fragmentEntity$range2 = fragmentEntity.range.audio) == null ? void 0 : _fragmentEntity$range2.partial) || ((_fragmentEntity$range3 = fragmentEntity.range.audiovideo) == null ? void 0 : _fragmentEntity$range3.partial));
  10355. }
  10356. function getFragmentKey(fragment) {
  10357. return `${fragment.type}_${fragment.level}_${fragment.sn}`;
  10358. }
  10359. const MIN_CHUNK_SIZE = Math.pow(2, 17); // 128kb
  10360. class FragmentLoader {
  10361. constructor(config) {
  10362. this.config = void 0;
  10363. this.loader = null;
  10364. this.partLoadTimeout = -1;
  10365. this.config = config;
  10366. }
  10367. destroy() {
  10368. if (this.loader) {
  10369. this.loader.destroy();
  10370. this.loader = null;
  10371. }
  10372. }
  10373. abort() {
  10374. if (this.loader) {
  10375. // Abort the loader for current fragment. Only one may load at any given time
  10376. this.loader.abort();
  10377. }
  10378. }
  10379. load(frag, onProgress) {
  10380. const url = frag.url;
  10381. if (!url) {
  10382. return Promise.reject(new LoadError({
  10383. type: ErrorTypes.NETWORK_ERROR,
  10384. details: ErrorDetails.FRAG_LOAD_ERROR,
  10385. fatal: false,
  10386. frag,
  10387. error: new Error(`Fragment does not have a ${url ? 'part list' : 'url'}`),
  10388. networkDetails: null
  10389. }));
  10390. }
  10391. this.abort();
  10392. const config = this.config;
  10393. const FragmentILoader = config.fLoader;
  10394. const DefaultILoader = config.loader;
  10395. return new Promise((resolve, reject) => {
  10396. if (this.loader) {
  10397. this.loader.destroy();
  10398. }
  10399. if (frag.gap) {
  10400. if (frag.tagList.some(tags => tags[0] === 'GAP')) {
  10401. reject(createGapLoadError(frag));
  10402. return;
  10403. } else {
  10404. // Reset temporary treatment as GAP tag
  10405. frag.gap = false;
  10406. }
  10407. }
  10408. const loader = this.loader = frag.loader = FragmentILoader ? new FragmentILoader(config) : new DefaultILoader(config);
  10409. const loaderContext = createLoaderContext(frag);
  10410. const loadPolicy = getLoaderConfigWithoutReties(config.fragLoadPolicy.default);
  10411. const loaderConfig = {
  10412. loadPolicy,
  10413. timeout: loadPolicy.maxLoadTimeMs,
  10414. maxRetry: 0,
  10415. retryDelay: 0,
  10416. maxRetryDelay: 0,
  10417. highWaterMark: frag.sn === 'initSegment' ? Infinity : MIN_CHUNK_SIZE
  10418. };
  10419. // Assign frag stats to the loader's stats reference
  10420. frag.stats = loader.stats;
  10421. loader.load(loaderContext, loaderConfig, {
  10422. onSuccess: (response, stats, context, networkDetails) => {
  10423. this.resetLoader(frag, loader);
  10424. let payload = response.data;
  10425. if (context.resetIV && frag.decryptdata) {
  10426. frag.decryptdata.iv = new Uint8Array(payload.slice(0, 16));
  10427. payload = payload.slice(16);
  10428. }
  10429. resolve({
  10430. frag,
  10431. part: null,
  10432. payload,
  10433. networkDetails
  10434. });
  10435. },
  10436. onError: (response, context, networkDetails, stats) => {
  10437. this.resetLoader(frag, loader);
  10438. reject(new LoadError({
  10439. type: ErrorTypes.NETWORK_ERROR,
  10440. details: ErrorDetails.FRAG_LOAD_ERROR,
  10441. fatal: false,
  10442. frag,
  10443. response: _objectSpread2({
  10444. url,
  10445. data: undefined
  10446. }, response),
  10447. error: new Error(`HTTP Error ${response.code} ${response.text}`),
  10448. networkDetails,
  10449. stats
  10450. }));
  10451. },
  10452. onAbort: (stats, context, networkDetails) => {
  10453. this.resetLoader(frag, loader);
  10454. reject(new LoadError({
  10455. type: ErrorTypes.NETWORK_ERROR,
  10456. details: ErrorDetails.INTERNAL_ABORTED,
  10457. fatal: false,
  10458. frag,
  10459. error: new Error('Aborted'),
  10460. networkDetails,
  10461. stats
  10462. }));
  10463. },
  10464. onTimeout: (stats, context, networkDetails) => {
  10465. this.resetLoader(frag, loader);
  10466. reject(new LoadError({
  10467. type: ErrorTypes.NETWORK_ERROR,
  10468. details: ErrorDetails.FRAG_LOAD_TIMEOUT,
  10469. fatal: false,
  10470. frag,
  10471. error: new Error(`Timeout after ${loaderConfig.timeout}ms`),
  10472. networkDetails,
  10473. stats
  10474. }));
  10475. },
  10476. onProgress: (stats, context, data, networkDetails) => {
  10477. if (onProgress) {
  10478. onProgress({
  10479. frag,
  10480. part: null,
  10481. payload: data,
  10482. networkDetails
  10483. });
  10484. }
  10485. }
  10486. });
  10487. });
  10488. }
  10489. loadPart(frag, part, onProgress) {
  10490. this.abort();
  10491. const config = this.config;
  10492. const FragmentILoader = config.fLoader;
  10493. const DefaultILoader = config.loader;
  10494. return new Promise((resolve, reject) => {
  10495. if (this.loader) {
  10496. this.loader.destroy();
  10497. }
  10498. if (frag.gap || part.gap) {
  10499. reject(createGapLoadError(frag, part));
  10500. return;
  10501. }
  10502. const loader = this.loader = frag.loader = FragmentILoader ? new FragmentILoader(config) : new DefaultILoader(config);
  10503. const loaderContext = createLoaderContext(frag, part);
  10504. // Should we define another load policy for parts?
  10505. const loadPolicy = getLoaderConfigWithoutReties(config.fragLoadPolicy.default);
  10506. const loaderConfig = {
  10507. loadPolicy,
  10508. timeout: loadPolicy.maxLoadTimeMs,
  10509. maxRetry: 0,
  10510. retryDelay: 0,
  10511. maxRetryDelay: 0,
  10512. highWaterMark: MIN_CHUNK_SIZE
  10513. };
  10514. // Assign part stats to the loader's stats reference
  10515. part.stats = loader.stats;
  10516. loader.load(loaderContext, loaderConfig, {
  10517. onSuccess: (response, stats, context, networkDetails) => {
  10518. this.resetLoader(frag, loader);
  10519. this.updateStatsFromPart(frag, part);
  10520. const partLoadedData = {
  10521. frag,
  10522. part,
  10523. payload: response.data,
  10524. networkDetails
  10525. };
  10526. onProgress(partLoadedData);
  10527. resolve(partLoadedData);
  10528. },
  10529. onError: (response, context, networkDetails, stats) => {
  10530. this.resetLoader(frag, loader);
  10531. reject(new LoadError({
  10532. type: ErrorTypes.NETWORK_ERROR,
  10533. details: ErrorDetails.FRAG_LOAD_ERROR,
  10534. fatal: false,
  10535. frag,
  10536. part,
  10537. response: _objectSpread2({
  10538. url: loaderContext.url,
  10539. data: undefined
  10540. }, response),
  10541. error: new Error(`HTTP Error ${response.code} ${response.text}`),
  10542. networkDetails,
  10543. stats
  10544. }));
  10545. },
  10546. onAbort: (stats, context, networkDetails) => {
  10547. frag.stats.aborted = part.stats.aborted;
  10548. this.resetLoader(frag, loader);
  10549. reject(new LoadError({
  10550. type: ErrorTypes.NETWORK_ERROR,
  10551. details: ErrorDetails.INTERNAL_ABORTED,
  10552. fatal: false,
  10553. frag,
  10554. part,
  10555. error: new Error('Aborted'),
  10556. networkDetails,
  10557. stats
  10558. }));
  10559. },
  10560. onTimeout: (stats, context, networkDetails) => {
  10561. this.resetLoader(frag, loader);
  10562. reject(new LoadError({
  10563. type: ErrorTypes.NETWORK_ERROR,
  10564. details: ErrorDetails.FRAG_LOAD_TIMEOUT,
  10565. fatal: false,
  10566. frag,
  10567. part,
  10568. error: new Error(`Timeout after ${loaderConfig.timeout}ms`),
  10569. networkDetails,
  10570. stats
  10571. }));
  10572. }
  10573. });
  10574. });
  10575. }
  10576. updateStatsFromPart(frag, part) {
  10577. const fragStats = frag.stats;
  10578. const partStats = part.stats;
  10579. const partTotal = partStats.total;
  10580. fragStats.loaded += partStats.loaded;
  10581. if (partTotal) {
  10582. const estTotalParts = Math.round(frag.duration / part.duration);
  10583. const estLoadedParts = Math.min(Math.round(fragStats.loaded / partTotal), estTotalParts);
  10584. const estRemainingParts = estTotalParts - estLoadedParts;
  10585. const estRemainingBytes = estRemainingParts * Math.round(fragStats.loaded / estLoadedParts);
  10586. fragStats.total = fragStats.loaded + estRemainingBytes;
  10587. } else {
  10588. fragStats.total = Math.max(fragStats.loaded, fragStats.total);
  10589. }
  10590. const fragLoading = fragStats.loading;
  10591. const partLoading = partStats.loading;
  10592. if (fragLoading.start) {
  10593. // add to fragment loader latency
  10594. fragLoading.first += partLoading.first - partLoading.start;
  10595. } else {
  10596. fragLoading.start = partLoading.start;
  10597. fragLoading.first = partLoading.first;
  10598. }
  10599. fragLoading.end = partLoading.end;
  10600. }
  10601. resetLoader(frag, loader) {
  10602. frag.loader = null;
  10603. if (this.loader === loader) {
  10604. self.clearTimeout(this.partLoadTimeout);
  10605. this.loader = null;
  10606. }
  10607. loader.destroy();
  10608. }
  10609. }
  10610. function createLoaderContext(frag, part = null) {
  10611. const segment = part || frag;
  10612. const loaderContext = {
  10613. frag,
  10614. part,
  10615. responseType: 'arraybuffer',
  10616. url: segment.url,
  10617. headers: {},
  10618. rangeStart: 0,
  10619. rangeEnd: 0
  10620. };
  10621. const start = segment.byteRangeStartOffset;
  10622. const end = segment.byteRangeEndOffset;
  10623. if (isFiniteNumber(start) && isFiniteNumber(end)) {
  10624. var _frag$decryptdata;
  10625. let byteRangeStart = start;
  10626. let byteRangeEnd = end;
  10627. if (frag.sn === 'initSegment' && ((_frag$decryptdata = frag.decryptdata) == null ? void 0 : _frag$decryptdata.method) === 'AES-128') {
  10628. // MAP segment encrypted with method 'AES-128', when served with HTTP Range,
  10629. // has the unencrypted size specified in the range.
  10630. // Ref: https://tools.ietf.org/html/draft-pantos-hls-rfc8216bis-08#section-6.3.6
  10631. const fragmentLen = end - start;
  10632. if (fragmentLen % 16) {
  10633. byteRangeEnd = end + (16 - fragmentLen % 16);
  10634. }
  10635. if (start !== 0) {
  10636. loaderContext.resetIV = true;
  10637. byteRangeStart = start - 16;
  10638. }
  10639. }
  10640. loaderContext.rangeStart = byteRangeStart;
  10641. loaderContext.rangeEnd = byteRangeEnd;
  10642. }
  10643. return loaderContext;
  10644. }
  10645. function createGapLoadError(frag, part) {
  10646. const error = new Error(`GAP ${frag.gap ? 'tag' : 'attribute'} found`);
  10647. const errorData = {
  10648. type: ErrorTypes.MEDIA_ERROR,
  10649. details: ErrorDetails.FRAG_GAP,
  10650. fatal: false,
  10651. frag,
  10652. error,
  10653. networkDetails: null
  10654. };
  10655. if (part) {
  10656. errorData.part = part;
  10657. }
  10658. (part ? part : frag).stats.aborted = true;
  10659. return new LoadError(errorData);
  10660. }
  10661. class LoadError extends Error {
  10662. constructor(data) {
  10663. super(data.error.message);
  10664. this.data = void 0;
  10665. this.data = data;
  10666. }
  10667. }
  10668. class KeyLoader {
  10669. constructor(config) {
  10670. this.config = void 0;
  10671. this.keyUriToKeyInfo = {};
  10672. this.emeController = null;
  10673. this.config = config;
  10674. }
  10675. abort(type) {
  10676. for (const uri in this.keyUriToKeyInfo) {
  10677. const loader = this.keyUriToKeyInfo[uri].loader;
  10678. if (loader) {
  10679. var _loader$context;
  10680. if (type && type !== ((_loader$context = loader.context) == null ? void 0 : _loader$context.frag.type)) {
  10681. return;
  10682. }
  10683. loader.abort();
  10684. }
  10685. }
  10686. }
  10687. detach() {
  10688. for (const uri in this.keyUriToKeyInfo) {
  10689. const keyInfo = this.keyUriToKeyInfo[uri];
  10690. // Remove cached EME keys on detach
  10691. if (keyInfo.mediaKeySessionContext || keyInfo.decryptdata.isCommonEncryption) {
  10692. delete this.keyUriToKeyInfo[uri];
  10693. }
  10694. }
  10695. }
  10696. destroy() {
  10697. this.detach();
  10698. for (const uri in this.keyUriToKeyInfo) {
  10699. const loader = this.keyUriToKeyInfo[uri].loader;
  10700. if (loader) {
  10701. loader.destroy();
  10702. }
  10703. }
  10704. this.keyUriToKeyInfo = {};
  10705. }
  10706. createKeyLoadError(frag, details = ErrorDetails.KEY_LOAD_ERROR, error, networkDetails, response) {
  10707. return new LoadError({
  10708. type: ErrorTypes.NETWORK_ERROR,
  10709. details,
  10710. fatal: false,
  10711. frag,
  10712. response,
  10713. error,
  10714. networkDetails
  10715. });
  10716. }
  10717. loadClear(loadingFrag, encryptedFragments) {
  10718. if (this.emeController && this.config.emeEnabled) {
  10719. // access key-system with nearest key on start (loaidng frag is unencrypted)
  10720. const {
  10721. sn,
  10722. cc
  10723. } = loadingFrag;
  10724. for (let i = 0; i < encryptedFragments.length; i++) {
  10725. const frag = encryptedFragments[i];
  10726. if (cc <= frag.cc && (sn === 'initSegment' || frag.sn === 'initSegment' || sn < frag.sn)) {
  10727. this.emeController.selectKeySystemFormat(frag).then(keySystemFormat => {
  10728. frag.setKeyFormat(keySystemFormat);
  10729. });
  10730. break;
  10731. }
  10732. }
  10733. }
  10734. }
  10735. load(frag) {
  10736. if (!frag.decryptdata && frag.encrypted && this.emeController) {
  10737. // Multiple keys, but none selected, resolve in eme-controller
  10738. return this.emeController.selectKeySystemFormat(frag).then(keySystemFormat => {
  10739. return this.loadInternal(frag, keySystemFormat);
  10740. });
  10741. }
  10742. return this.loadInternal(frag);
  10743. }
  10744. loadInternal(frag, keySystemFormat) {
  10745. var _keyInfo, _keyInfo2;
  10746. if (keySystemFormat) {
  10747. frag.setKeyFormat(keySystemFormat);
  10748. }
  10749. const decryptdata = frag.decryptdata;
  10750. if (!decryptdata) {
  10751. const error = new Error(keySystemFormat ? `Expected frag.decryptdata to be defined after setting format ${keySystemFormat}` : 'Missing decryption data on fragment in onKeyLoading');
  10752. return Promise.reject(this.createKeyLoadError(frag, ErrorDetails.KEY_LOAD_ERROR, error));
  10753. }
  10754. const uri = decryptdata.uri;
  10755. if (!uri) {
  10756. return Promise.reject(this.createKeyLoadError(frag, ErrorDetails.KEY_LOAD_ERROR, new Error(`Invalid key URI: "${uri}"`)));
  10757. }
  10758. let keyInfo = this.keyUriToKeyInfo[uri];
  10759. if ((_keyInfo = keyInfo) != null && _keyInfo.decryptdata.key) {
  10760. decryptdata.key = keyInfo.decryptdata.key;
  10761. return Promise.resolve({
  10762. frag,
  10763. keyInfo
  10764. });
  10765. }
  10766. // Return key load promise as long as it does not have a mediakey session with an unusable key status
  10767. if ((_keyInfo2 = keyInfo) != null && _keyInfo2.keyLoadPromise) {
  10768. var _keyInfo$mediaKeySess;
  10769. switch ((_keyInfo$mediaKeySess = keyInfo.mediaKeySessionContext) == null ? void 0 : _keyInfo$mediaKeySess.keyStatus) {
  10770. case undefined:
  10771. case 'status-pending':
  10772. case 'usable':
  10773. case 'usable-in-future':
  10774. return keyInfo.keyLoadPromise.then(keyLoadedData => {
  10775. // Return the correct fragment with updated decryptdata key and loaded keyInfo
  10776. decryptdata.key = keyLoadedData.keyInfo.decryptdata.key;
  10777. return {
  10778. frag,
  10779. keyInfo
  10780. };
  10781. });
  10782. }
  10783. // If we have a key session and status and it is not pending or usable, continue
  10784. // This will go back to the eme-controller for expired keys to get a new keyLoadPromise
  10785. }
  10786. // Load the key or return the loading promise
  10787. keyInfo = this.keyUriToKeyInfo[uri] = {
  10788. decryptdata,
  10789. keyLoadPromise: null,
  10790. loader: null,
  10791. mediaKeySessionContext: null
  10792. };
  10793. switch (decryptdata.method) {
  10794. case 'ISO-23001-7':
  10795. case 'SAMPLE-AES':
  10796. case 'SAMPLE-AES-CENC':
  10797. case 'SAMPLE-AES-CTR':
  10798. if (decryptdata.keyFormat === 'identity') {
  10799. // loadKeyHTTP handles http(s) and data URLs
  10800. return this.loadKeyHTTP(keyInfo, frag);
  10801. }
  10802. return this.loadKeyEME(keyInfo, frag);
  10803. case 'AES-128':
  10804. return this.loadKeyHTTP(keyInfo, frag);
  10805. default:
  10806. return Promise.reject(this.createKeyLoadError(frag, ErrorDetails.KEY_LOAD_ERROR, new Error(`Key supplied with unsupported METHOD: "${decryptdata.method}"`)));
  10807. }
  10808. }
  10809. loadKeyEME(keyInfo, frag) {
  10810. const keyLoadedData = {
  10811. frag,
  10812. keyInfo
  10813. };
  10814. if (this.emeController && this.config.emeEnabled) {
  10815. const keySessionContextPromise = this.emeController.loadKey(keyLoadedData);
  10816. if (keySessionContextPromise) {
  10817. return (keyInfo.keyLoadPromise = keySessionContextPromise.then(keySessionContext => {
  10818. keyInfo.mediaKeySessionContext = keySessionContext;
  10819. return keyLoadedData;
  10820. })).catch(error => {
  10821. // Remove promise for license renewal or retry
  10822. keyInfo.keyLoadPromise = null;
  10823. throw error;
  10824. });
  10825. }
  10826. }
  10827. return Promise.resolve(keyLoadedData);
  10828. }
  10829. loadKeyHTTP(keyInfo, frag) {
  10830. const config = this.config;
  10831. const Loader = config.loader;
  10832. const keyLoader = new Loader(config);
  10833. frag.keyLoader = keyInfo.loader = keyLoader;
  10834. return keyInfo.keyLoadPromise = new Promise((resolve, reject) => {
  10835. const loaderContext = {
  10836. keyInfo,
  10837. frag,
  10838. responseType: 'arraybuffer',
  10839. url: keyInfo.decryptdata.uri
  10840. };
  10841. // maxRetry is 0 so that instead of retrying the same key on the same variant multiple times,
  10842. // key-loader will trigger an error and rely on stream-controller to handle retry logic.
  10843. // this will also align retry logic with fragment-loader
  10844. const loadPolicy = config.keyLoadPolicy.default;
  10845. const loaderConfig = {
  10846. loadPolicy,
  10847. timeout: loadPolicy.maxLoadTimeMs,
  10848. maxRetry: 0,
  10849. retryDelay: 0,
  10850. maxRetryDelay: 0
  10851. };
  10852. const loaderCallbacks = {
  10853. onSuccess: (response, stats, context, networkDetails) => {
  10854. const {
  10855. frag,
  10856. keyInfo,
  10857. url: uri
  10858. } = context;
  10859. if (!frag.decryptdata || keyInfo !== this.keyUriToKeyInfo[uri]) {
  10860. return reject(this.createKeyLoadError(frag, ErrorDetails.KEY_LOAD_ERROR, new Error('after key load, decryptdata unset or changed'), networkDetails));
  10861. }
  10862. keyInfo.decryptdata.key = frag.decryptdata.key = new Uint8Array(response.data);
  10863. // detach fragment key loader on load success
  10864. frag.keyLoader = null;
  10865. keyInfo.loader = null;
  10866. resolve({
  10867. frag,
  10868. keyInfo
  10869. });
  10870. },
  10871. onError: (response, context, networkDetails, stats) => {
  10872. this.resetLoader(context);
  10873. reject(this.createKeyLoadError(frag, ErrorDetails.KEY_LOAD_ERROR, new Error(`HTTP Error ${response.code} loading key ${response.text}`), networkDetails, _objectSpread2({
  10874. url: loaderContext.url,
  10875. data: undefined
  10876. }, response)));
  10877. },
  10878. onTimeout: (stats, context, networkDetails) => {
  10879. this.resetLoader(context);
  10880. reject(this.createKeyLoadError(frag, ErrorDetails.KEY_LOAD_TIMEOUT, new Error('key loading timed out'), networkDetails));
  10881. },
  10882. onAbort: (stats, context, networkDetails) => {
  10883. this.resetLoader(context);
  10884. reject(this.createKeyLoadError(frag, ErrorDetails.INTERNAL_ABORTED, new Error('key loading aborted'), networkDetails));
  10885. }
  10886. };
  10887. keyLoader.load(loaderContext, loaderConfig, loaderCallbacks);
  10888. });
  10889. }
  10890. resetLoader(context) {
  10891. const {
  10892. frag,
  10893. keyInfo,
  10894. url: uri
  10895. } = context;
  10896. const loader = keyInfo.loader;
  10897. if (frag.keyLoader === loader) {
  10898. frag.keyLoader = null;
  10899. keyInfo.loader = null;
  10900. }
  10901. delete this.keyUriToKeyInfo[uri];
  10902. if (loader) {
  10903. loader.destroy();
  10904. }
  10905. }
  10906. }
  10907. /**
  10908. * @ignore
  10909. * Sub-class specialization of EventHandler base class.
  10910. *
  10911. * TaskLoop allows to schedule a task function being called (optionnaly repeatedly) on the main loop,
  10912. * scheduled asynchroneously, avoiding recursive calls in the same tick.
  10913. *
  10914. * The task itself is implemented in `doTick`. It can be requested and called for single execution
  10915. * using the `tick` method.
  10916. *
  10917. * It will be assured that the task execution method (`tick`) only gets called once per main loop "tick",
  10918. * no matter how often it gets requested for execution. Execution in further ticks will be scheduled accordingly.
  10919. *
  10920. * If further execution requests have already been scheduled on the next tick, it can be checked with `hasNextTick`,
  10921. * and cancelled with `clearNextTick`.
  10922. *
  10923. * The task can be scheduled as an interval repeatedly with a period as parameter (see `setInterval`, `clearInterval`).
  10924. *
  10925. * Sub-classes need to implement the `doTick` method which will effectively have the task execution routine.
  10926. *
  10927. * Further explanations:
  10928. *
  10929. * The baseclass has a `tick` method that will schedule the doTick call. It may be called synchroneously
  10930. * only for a stack-depth of one. On re-entrant calls, sub-sequent calls are scheduled for next main loop ticks.
  10931. *
  10932. * When the task execution (`tick` method) is called in re-entrant way this is detected and
  10933. * we are limiting the task execution per call stack to exactly one, but scheduling/post-poning further
  10934. * task processing on the next main loop iteration (also known as "next tick" in the Node/JS runtime lingo).
  10935. */
  10936. class TaskLoop {
  10937. constructor() {
  10938. this._boundTick = void 0;
  10939. this._tickTimer = null;
  10940. this._tickInterval = null;
  10941. this._tickCallCount = 0;
  10942. this._boundTick = this.tick.bind(this);
  10943. }
  10944. destroy() {
  10945. this.onHandlerDestroying();
  10946. this.onHandlerDestroyed();
  10947. }
  10948. onHandlerDestroying() {
  10949. // clear all timers before unregistering from event bus
  10950. this.clearNextTick();
  10951. this.clearInterval();
  10952. }
  10953. onHandlerDestroyed() {}
  10954. hasInterval() {
  10955. return !!this._tickInterval;
  10956. }
  10957. hasNextTick() {
  10958. return !!this._tickTimer;
  10959. }
  10960. /**
  10961. * @param millis - Interval time (ms)
  10962. * @eturns True when interval has been scheduled, false when already scheduled (no effect)
  10963. */
  10964. setInterval(millis) {
  10965. if (!this._tickInterval) {
  10966. this._tickCallCount = 0;
  10967. this._tickInterval = self.setInterval(this._boundTick, millis);
  10968. return true;
  10969. }
  10970. return false;
  10971. }
  10972. /**
  10973. * @returns True when interval was cleared, false when none was set (no effect)
  10974. */
  10975. clearInterval() {
  10976. if (this._tickInterval) {
  10977. self.clearInterval(this._tickInterval);
  10978. this._tickInterval = null;
  10979. return true;
  10980. }
  10981. return false;
  10982. }
  10983. /**
  10984. * @returns True when timeout was cleared, false when none was set (no effect)
  10985. */
  10986. clearNextTick() {
  10987. if (this._tickTimer) {
  10988. self.clearTimeout(this._tickTimer);
  10989. this._tickTimer = null;
  10990. return true;
  10991. }
  10992. return false;
  10993. }
  10994. /**
  10995. * Will call the subclass doTick implementation in this main loop tick
  10996. * or in the next one (via setTimeout(,0)) in case it has already been called
  10997. * in this tick (in case this is a re-entrant call).
  10998. */
  10999. tick() {
  11000. this._tickCallCount++;
  11001. if (this._tickCallCount === 1) {
  11002. this.doTick();
  11003. // re-entrant call to tick from previous doTick call stack
  11004. // -> schedule a call on the next main loop iteration to process this task processing request
  11005. if (this._tickCallCount > 1) {
  11006. // make sure only one timer exists at any time at max
  11007. this.tickImmediate();
  11008. }
  11009. this._tickCallCount = 0;
  11010. }
  11011. }
  11012. tickImmediate() {
  11013. this.clearNextTick();
  11014. this._tickTimer = self.setTimeout(this._boundTick, 0);
  11015. }
  11016. /**
  11017. * For subclass to implement task logic
  11018. * @abstract
  11019. */
  11020. doTick() {}
  11021. }
  11022. class ChunkMetadata {
  11023. constructor(level, sn, id, size = 0, part = -1, partial = false) {
  11024. this.level = void 0;
  11025. this.sn = void 0;
  11026. this.part = void 0;
  11027. this.id = void 0;
  11028. this.size = void 0;
  11029. this.partial = void 0;
  11030. this.transmuxing = getNewPerformanceTiming();
  11031. this.buffering = {
  11032. audio: getNewPerformanceTiming(),
  11033. video: getNewPerformanceTiming(),
  11034. audiovideo: getNewPerformanceTiming()
  11035. };
  11036. this.level = level;
  11037. this.sn = sn;
  11038. this.id = id;
  11039. this.size = size;
  11040. this.part = part;
  11041. this.partial = partial;
  11042. }
  11043. }
  11044. function getNewPerformanceTiming() {
  11045. return {
  11046. start: 0,
  11047. executeStart: 0,
  11048. executeEnd: 0,
  11049. end: 0
  11050. };
  11051. }
  11052. function findFirstFragWithCC(fragments, cc) {
  11053. for (let i = 0, len = fragments.length; i < len; i++) {
  11054. var _fragments$i;
  11055. if (((_fragments$i = fragments[i]) == null ? void 0 : _fragments$i.cc) === cc) {
  11056. return fragments[i];
  11057. }
  11058. }
  11059. return null;
  11060. }
  11061. function shouldAlignOnDiscontinuities(lastFrag, switchDetails, details) {
  11062. if (switchDetails) {
  11063. if (details.endCC > details.startCC || lastFrag && lastFrag.cc < details.startCC) {
  11064. return true;
  11065. }
  11066. }
  11067. return false;
  11068. }
  11069. // Find the first frag in the previous level which matches the CC of the first frag of the new level
  11070. function findDiscontinuousReferenceFrag(prevDetails, curDetails) {
  11071. const prevFrags = prevDetails.fragments;
  11072. const curFrags = curDetails.fragments;
  11073. if (!curFrags.length || !prevFrags.length) {
  11074. logger.log('No fragments to align');
  11075. return;
  11076. }
  11077. const prevStartFrag = findFirstFragWithCC(prevFrags, curFrags[0].cc);
  11078. if (!prevStartFrag || prevStartFrag && !prevStartFrag.startPTS) {
  11079. logger.log('No frag in previous level to align on');
  11080. return;
  11081. }
  11082. return prevStartFrag;
  11083. }
  11084. function adjustFragmentStart(frag, sliding) {
  11085. if (frag) {
  11086. const start = frag.start + sliding;
  11087. frag.start = frag.startPTS = start;
  11088. frag.endPTS = start + frag.duration;
  11089. }
  11090. }
  11091. function adjustSlidingStart(sliding, details) {
  11092. // Update segments
  11093. const fragments = details.fragments;
  11094. for (let i = 0, len = fragments.length; i < len; i++) {
  11095. adjustFragmentStart(fragments[i], sliding);
  11096. }
  11097. // Update LL-HLS parts at the end of the playlist
  11098. if (details.fragmentHint) {
  11099. adjustFragmentStart(details.fragmentHint, sliding);
  11100. }
  11101. details.alignedSliding = true;
  11102. }
  11103. /**
  11104. * Using the parameters of the last level, this function computes PTS' of the new fragments so that they form a
  11105. * contiguous stream with the last fragments.
  11106. * The PTS of a fragment lets Hls.js know where it fits into a stream - by knowing every PTS, we know which fragment to
  11107. * download at any given time. PTS is normally computed when the fragment is demuxed, so taking this step saves us time
  11108. * and an extra download.
  11109. * @param lastFrag
  11110. * @param lastLevel
  11111. * @param details
  11112. */
  11113. function alignStream(lastFrag, switchDetails, details) {
  11114. if (!switchDetails) {
  11115. return;
  11116. }
  11117. alignDiscontinuities(lastFrag, details, switchDetails);
  11118. if (!details.alignedSliding && switchDetails) {
  11119. // If the PTS wasn't figured out via discontinuity sequence that means there was no CC increase within the level.
  11120. // Aligning via Program Date Time should therefore be reliable, since PDT should be the same within the same
  11121. // discontinuity sequence.
  11122. alignMediaPlaylistByPDT(details, switchDetails);
  11123. }
  11124. if (!details.alignedSliding && switchDetails && !details.skippedSegments) {
  11125. // Try to align on sn so that we pick a better start fragment.
  11126. // Do not perform this on playlists with delta updates as this is only to align levels on switch
  11127. // and adjustSliding only adjusts fragments after skippedSegments.
  11128. adjustSliding(switchDetails, details);
  11129. }
  11130. }
  11131. /**
  11132. * Computes the PTS if a new level's fragments using the PTS of a fragment in the last level which shares the same
  11133. * discontinuity sequence.
  11134. * @param lastFrag - The last Fragment which shares the same discontinuity sequence
  11135. * @param lastLevel - The details of the last loaded level
  11136. * @param details - The details of the new level
  11137. */
  11138. function alignDiscontinuities(lastFrag, details, switchDetails) {
  11139. if (shouldAlignOnDiscontinuities(lastFrag, switchDetails, details)) {
  11140. const referenceFrag = findDiscontinuousReferenceFrag(switchDetails, details);
  11141. if (referenceFrag && isFiniteNumber(referenceFrag.start)) {
  11142. logger.log(`Adjusting PTS using last level due to CC increase within current level ${details.url}`);
  11143. adjustSlidingStart(referenceFrag.start, details);
  11144. }
  11145. }
  11146. }
  11147. /**
  11148. * Ensures appropriate time-alignment between renditions based on PDT.
  11149. * This function assumes the timelines represented in `refDetails` are accurate, including the PDTs
  11150. * for the last discontinuity sequence number shared by both playlists when present,
  11151. * and uses the "wallclock"/PDT timeline as a cross-reference to `details`, adjusting the presentation
  11152. * times/timelines of `details` accordingly.
  11153. * Given the asynchronous nature of fetches and initial loads of live `main` and audio/subtitle tracks,
  11154. * the primary purpose of this function is to ensure the "local timelines" of audio/subtitle tracks
  11155. * are aligned to the main/video timeline, using PDT as the cross-reference/"anchor" that should
  11156. * be consistent across playlists, per the HLS spec.
  11157. * @param details - The details of the rendition you'd like to time-align (e.g. an audio rendition).
  11158. * @param refDetails - The details of the reference rendition with start and PDT times for alignment.
  11159. */
  11160. function alignMediaPlaylistByPDT(details, refDetails) {
  11161. if (!details.hasProgramDateTime || !refDetails.hasProgramDateTime) {
  11162. return;
  11163. }
  11164. const fragments = details.fragments;
  11165. const refFragments = refDetails.fragments;
  11166. if (!fragments.length || !refFragments.length) {
  11167. return;
  11168. }
  11169. // Calculate a delta to apply to all fragments according to the delta in PDT times and start times
  11170. // of a fragment in the reference details, and a fragment in the target details of the same discontinuity.
  11171. // If a fragment of the same discontinuity was not found use the middle fragment of both.
  11172. let refFrag;
  11173. let frag;
  11174. const targetCC = Math.min(refDetails.endCC, details.endCC);
  11175. if (refDetails.startCC < targetCC && details.startCC < targetCC) {
  11176. refFrag = findFirstFragWithCC(refFragments, targetCC);
  11177. frag = findFirstFragWithCC(fragments, targetCC);
  11178. }
  11179. if (!refFrag || !frag) {
  11180. refFrag = refFragments[Math.floor(refFragments.length / 2)];
  11181. frag = findFirstFragWithCC(fragments, refFrag.cc) || fragments[Math.floor(fragments.length / 2)];
  11182. }
  11183. const refPDT = refFrag.programDateTime;
  11184. const targetPDT = frag.programDateTime;
  11185. if (!refPDT || !targetPDT) {
  11186. return;
  11187. }
  11188. const delta = (targetPDT - refPDT) / 1000 - (frag.start - refFrag.start);
  11189. adjustSlidingStart(delta, details);
  11190. }
  11191. class AESCrypto {
  11192. constructor(subtle, iv) {
  11193. this.subtle = void 0;
  11194. this.aesIV = void 0;
  11195. this.subtle = subtle;
  11196. this.aesIV = iv;
  11197. }
  11198. decrypt(data, key) {
  11199. return this.subtle.decrypt({
  11200. name: 'AES-CBC',
  11201. iv: this.aesIV
  11202. }, key, data);
  11203. }
  11204. }
  11205. class FastAESKey {
  11206. constructor(subtle, key) {
  11207. this.subtle = void 0;
  11208. this.key = void 0;
  11209. this.subtle = subtle;
  11210. this.key = key;
  11211. }
  11212. expandKey() {
  11213. return this.subtle.importKey('raw', this.key, {
  11214. name: 'AES-CBC'
  11215. }, false, ['encrypt', 'decrypt']);
  11216. }
  11217. }
  11218. // PKCS7
  11219. function removePadding(array) {
  11220. const outputBytes = array.byteLength;
  11221. const paddingBytes = outputBytes && new DataView(array.buffer).getUint8(outputBytes - 1);
  11222. if (paddingBytes) {
  11223. return sliceUint8(array, 0, outputBytes - paddingBytes);
  11224. }
  11225. return array;
  11226. }
  11227. class AESDecryptor {
  11228. constructor() {
  11229. this.rcon = [0x0, 0x1, 0x2, 0x4, 0x8, 0x10, 0x20, 0x40, 0x80, 0x1b, 0x36];
  11230. this.subMix = [new Uint32Array(256), new Uint32Array(256), new Uint32Array(256), new Uint32Array(256)];
  11231. this.invSubMix = [new Uint32Array(256), new Uint32Array(256), new Uint32Array(256), new Uint32Array(256)];
  11232. this.sBox = new Uint32Array(256);
  11233. this.invSBox = new Uint32Array(256);
  11234. this.key = new Uint32Array(0);
  11235. this.ksRows = 0;
  11236. this.keySize = 0;
  11237. this.keySchedule = void 0;
  11238. this.invKeySchedule = void 0;
  11239. this.initTable();
  11240. }
  11241. // Using view.getUint32() also swaps the byte order.
  11242. uint8ArrayToUint32Array_(arrayBuffer) {
  11243. const view = new DataView(arrayBuffer);
  11244. const newArray = new Uint32Array(4);
  11245. for (let i = 0; i < 4; i++) {
  11246. newArray[i] = view.getUint32(i * 4);
  11247. }
  11248. return newArray;
  11249. }
  11250. initTable() {
  11251. const sBox = this.sBox;
  11252. const invSBox = this.invSBox;
  11253. const subMix = this.subMix;
  11254. const subMix0 = subMix[0];
  11255. const subMix1 = subMix[1];
  11256. const subMix2 = subMix[2];
  11257. const subMix3 = subMix[3];
  11258. const invSubMix = this.invSubMix;
  11259. const invSubMix0 = invSubMix[0];
  11260. const invSubMix1 = invSubMix[1];
  11261. const invSubMix2 = invSubMix[2];
  11262. const invSubMix3 = invSubMix[3];
  11263. const d = new Uint32Array(256);
  11264. let x = 0;
  11265. let xi = 0;
  11266. let i = 0;
  11267. for (i = 0; i < 256; i++) {
  11268. if (i < 128) {
  11269. d[i] = i << 1;
  11270. } else {
  11271. d[i] = i << 1 ^ 0x11b;
  11272. }
  11273. }
  11274. for (i = 0; i < 256; i++) {
  11275. let sx = xi ^ xi << 1 ^ xi << 2 ^ xi << 3 ^ xi << 4;
  11276. sx = sx >>> 8 ^ sx & 0xff ^ 0x63;
  11277. sBox[x] = sx;
  11278. invSBox[sx] = x;
  11279. // Compute multiplication
  11280. const x2 = d[x];
  11281. const x4 = d[x2];
  11282. const x8 = d[x4];
  11283. // Compute sub/invSub bytes, mix columns tables
  11284. let t = d[sx] * 0x101 ^ sx * 0x1010100;
  11285. subMix0[x] = t << 24 | t >>> 8;
  11286. subMix1[x] = t << 16 | t >>> 16;
  11287. subMix2[x] = t << 8 | t >>> 24;
  11288. subMix3[x] = t;
  11289. // Compute inv sub bytes, inv mix columns tables
  11290. t = x8 * 0x1010101 ^ x4 * 0x10001 ^ x2 * 0x101 ^ x * 0x1010100;
  11291. invSubMix0[sx] = t << 24 | t >>> 8;
  11292. invSubMix1[sx] = t << 16 | t >>> 16;
  11293. invSubMix2[sx] = t << 8 | t >>> 24;
  11294. invSubMix3[sx] = t;
  11295. // Compute next counter
  11296. if (!x) {
  11297. x = xi = 1;
  11298. } else {
  11299. x = x2 ^ d[d[d[x8 ^ x2]]];
  11300. xi ^= d[d[xi]];
  11301. }
  11302. }
  11303. }
  11304. expandKey(keyBuffer) {
  11305. // convert keyBuffer to Uint32Array
  11306. const key = this.uint8ArrayToUint32Array_(keyBuffer);
  11307. let sameKey = true;
  11308. let offset = 0;
  11309. while (offset < key.length && sameKey) {
  11310. sameKey = key[offset] === this.key[offset];
  11311. offset++;
  11312. }
  11313. if (sameKey) {
  11314. return;
  11315. }
  11316. this.key = key;
  11317. const keySize = this.keySize = key.length;
  11318. if (keySize !== 4 && keySize !== 6 && keySize !== 8) {
  11319. throw new Error('Invalid aes key size=' + keySize);
  11320. }
  11321. const ksRows = this.ksRows = (keySize + 6 + 1) * 4;
  11322. let ksRow;
  11323. let invKsRow;
  11324. const keySchedule = this.keySchedule = new Uint32Array(ksRows);
  11325. const invKeySchedule = this.invKeySchedule = new Uint32Array(ksRows);
  11326. const sbox = this.sBox;
  11327. const rcon = this.rcon;
  11328. const invSubMix = this.invSubMix;
  11329. const invSubMix0 = invSubMix[0];
  11330. const invSubMix1 = invSubMix[1];
  11331. const invSubMix2 = invSubMix[2];
  11332. const invSubMix3 = invSubMix[3];
  11333. let prev;
  11334. let t;
  11335. for (ksRow = 0; ksRow < ksRows; ksRow++) {
  11336. if (ksRow < keySize) {
  11337. prev = keySchedule[ksRow] = key[ksRow];
  11338. continue;
  11339. }
  11340. t = prev;
  11341. if (ksRow % keySize === 0) {
  11342. // Rot word
  11343. t = t << 8 | t >>> 24;
  11344. // Sub word
  11345. t = sbox[t >>> 24] << 24 | sbox[t >>> 16 & 0xff] << 16 | sbox[t >>> 8 & 0xff] << 8 | sbox[t & 0xff];
  11346. // Mix Rcon
  11347. t ^= rcon[ksRow / keySize | 0] << 24;
  11348. } else if (keySize > 6 && ksRow % keySize === 4) {
  11349. // Sub word
  11350. t = sbox[t >>> 24] << 24 | sbox[t >>> 16 & 0xff] << 16 | sbox[t >>> 8 & 0xff] << 8 | sbox[t & 0xff];
  11351. }
  11352. keySchedule[ksRow] = prev = (keySchedule[ksRow - keySize] ^ t) >>> 0;
  11353. }
  11354. for (invKsRow = 0; invKsRow < ksRows; invKsRow++) {
  11355. ksRow = ksRows - invKsRow;
  11356. if (invKsRow & 3) {
  11357. t = keySchedule[ksRow];
  11358. } else {
  11359. t = keySchedule[ksRow - 4];
  11360. }
  11361. if (invKsRow < 4 || ksRow <= 4) {
  11362. invKeySchedule[invKsRow] = t;
  11363. } else {
  11364. invKeySchedule[invKsRow] = invSubMix0[sbox[t >>> 24]] ^ invSubMix1[sbox[t >>> 16 & 0xff]] ^ invSubMix2[sbox[t >>> 8 & 0xff]] ^ invSubMix3[sbox[t & 0xff]];
  11365. }
  11366. invKeySchedule[invKsRow] = invKeySchedule[invKsRow] >>> 0;
  11367. }
  11368. }
  11369. // Adding this as a method greatly improves performance.
  11370. networkToHostOrderSwap(word) {
  11371. return word << 24 | (word & 0xff00) << 8 | (word & 0xff0000) >> 8 | word >>> 24;
  11372. }
  11373. decrypt(inputArrayBuffer, offset, aesIV) {
  11374. const nRounds = this.keySize + 6;
  11375. const invKeySchedule = this.invKeySchedule;
  11376. const invSBOX = this.invSBox;
  11377. const invSubMix = this.invSubMix;
  11378. const invSubMix0 = invSubMix[0];
  11379. const invSubMix1 = invSubMix[1];
  11380. const invSubMix2 = invSubMix[2];
  11381. const invSubMix3 = invSubMix[3];
  11382. const initVector = this.uint8ArrayToUint32Array_(aesIV);
  11383. let initVector0 = initVector[0];
  11384. let initVector1 = initVector[1];
  11385. let initVector2 = initVector[2];
  11386. let initVector3 = initVector[3];
  11387. const inputInt32 = new Int32Array(inputArrayBuffer);
  11388. const outputInt32 = new Int32Array(inputInt32.length);
  11389. let t0, t1, t2, t3;
  11390. let s0, s1, s2, s3;
  11391. let inputWords0, inputWords1, inputWords2, inputWords3;
  11392. let ksRow, i;
  11393. const swapWord = this.networkToHostOrderSwap;
  11394. while (offset < inputInt32.length) {
  11395. inputWords0 = swapWord(inputInt32[offset]);
  11396. inputWords1 = swapWord(inputInt32[offset + 1]);
  11397. inputWords2 = swapWord(inputInt32[offset + 2]);
  11398. inputWords3 = swapWord(inputInt32[offset + 3]);
  11399. s0 = inputWords0 ^ invKeySchedule[0];
  11400. s1 = inputWords3 ^ invKeySchedule[1];
  11401. s2 = inputWords2 ^ invKeySchedule[2];
  11402. s3 = inputWords1 ^ invKeySchedule[3];
  11403. ksRow = 4;
  11404. // Iterate through the rounds of decryption
  11405. for (i = 1; i < nRounds; i++) {
  11406. t0 = invSubMix0[s0 >>> 24] ^ invSubMix1[s1 >> 16 & 0xff] ^ invSubMix2[s2 >> 8 & 0xff] ^ invSubMix3[s3 & 0xff] ^ invKeySchedule[ksRow];
  11407. t1 = invSubMix0[s1 >>> 24] ^ invSubMix1[s2 >> 16 & 0xff] ^ invSubMix2[s3 >> 8 & 0xff] ^ invSubMix3[s0 & 0xff] ^ invKeySchedule[ksRow + 1];
  11408. t2 = invSubMix0[s2 >>> 24] ^ invSubMix1[s3 >> 16 & 0xff] ^ invSubMix2[s0 >> 8 & 0xff] ^ invSubMix3[s1 & 0xff] ^ invKeySchedule[ksRow + 2];
  11409. t3 = invSubMix0[s3 >>> 24] ^ invSubMix1[s0 >> 16 & 0xff] ^ invSubMix2[s1 >> 8 & 0xff] ^ invSubMix3[s2 & 0xff] ^ invKeySchedule[ksRow + 3];
  11410. // Update state
  11411. s0 = t0;
  11412. s1 = t1;
  11413. s2 = t2;
  11414. s3 = t3;
  11415. ksRow = ksRow + 4;
  11416. }
  11417. // Shift rows, sub bytes, add round key
  11418. t0 = invSBOX[s0 >>> 24] << 24 ^ invSBOX[s1 >> 16 & 0xff] << 16 ^ invSBOX[s2 >> 8 & 0xff] << 8 ^ invSBOX[s3 & 0xff] ^ invKeySchedule[ksRow];
  11419. t1 = invSBOX[s1 >>> 24] << 24 ^ invSBOX[s2 >> 16 & 0xff] << 16 ^ invSBOX[s3 >> 8 & 0xff] << 8 ^ invSBOX[s0 & 0xff] ^ invKeySchedule[ksRow + 1];
  11420. t2 = invSBOX[s2 >>> 24] << 24 ^ invSBOX[s3 >> 16 & 0xff] << 16 ^ invSBOX[s0 >> 8 & 0xff] << 8 ^ invSBOX[s1 & 0xff] ^ invKeySchedule[ksRow + 2];
  11421. t3 = invSBOX[s3 >>> 24] << 24 ^ invSBOX[s0 >> 16 & 0xff] << 16 ^ invSBOX[s1 >> 8 & 0xff] << 8 ^ invSBOX[s2 & 0xff] ^ invKeySchedule[ksRow + 3];
  11422. // Write
  11423. outputInt32[offset] = swapWord(t0 ^ initVector0);
  11424. outputInt32[offset + 1] = swapWord(t3 ^ initVector1);
  11425. outputInt32[offset + 2] = swapWord(t2 ^ initVector2);
  11426. outputInt32[offset + 3] = swapWord(t1 ^ initVector3);
  11427. // reset initVector to last 4 unsigned int
  11428. initVector0 = inputWords0;
  11429. initVector1 = inputWords1;
  11430. initVector2 = inputWords2;
  11431. initVector3 = inputWords3;
  11432. offset = offset + 4;
  11433. }
  11434. return outputInt32.buffer;
  11435. }
  11436. }
  11437. const CHUNK_SIZE = 16; // 16 bytes, 128 bits
  11438. class Decrypter {
  11439. constructor(config, {
  11440. removePKCS7Padding = true
  11441. } = {}) {
  11442. this.logEnabled = true;
  11443. this.removePKCS7Padding = void 0;
  11444. this.subtle = null;
  11445. this.softwareDecrypter = null;
  11446. this.key = null;
  11447. this.fastAesKey = null;
  11448. this.remainderData = null;
  11449. this.currentIV = null;
  11450. this.currentResult = null;
  11451. this.useSoftware = void 0;
  11452. this.useSoftware = config.enableSoftwareAES;
  11453. this.removePKCS7Padding = removePKCS7Padding;
  11454. // built in decryptor expects PKCS7 padding
  11455. if (removePKCS7Padding) {
  11456. try {
  11457. const browserCrypto = self.crypto;
  11458. if (browserCrypto) {
  11459. this.subtle = browserCrypto.subtle || browserCrypto.webkitSubtle;
  11460. }
  11461. } catch (e) {
  11462. /* no-op */
  11463. }
  11464. }
  11465. this.useSoftware = !this.subtle;
  11466. }
  11467. destroy() {
  11468. this.subtle = null;
  11469. this.softwareDecrypter = null;
  11470. this.key = null;
  11471. this.fastAesKey = null;
  11472. this.remainderData = null;
  11473. this.currentIV = null;
  11474. this.currentResult = null;
  11475. }
  11476. isSync() {
  11477. return this.useSoftware;
  11478. }
  11479. flush() {
  11480. const {
  11481. currentResult,
  11482. remainderData
  11483. } = this;
  11484. if (!currentResult || remainderData) {
  11485. this.reset();
  11486. return null;
  11487. }
  11488. const data = new Uint8Array(currentResult);
  11489. this.reset();
  11490. if (this.removePKCS7Padding) {
  11491. return removePadding(data);
  11492. }
  11493. return data;
  11494. }
  11495. reset() {
  11496. this.currentResult = null;
  11497. this.currentIV = null;
  11498. this.remainderData = null;
  11499. if (this.softwareDecrypter) {
  11500. this.softwareDecrypter = null;
  11501. }
  11502. }
  11503. decrypt(data, key, iv) {
  11504. if (this.useSoftware) {
  11505. return new Promise((resolve, reject) => {
  11506. this.softwareDecrypt(new Uint8Array(data), key, iv);
  11507. const decryptResult = this.flush();
  11508. if (decryptResult) {
  11509. resolve(decryptResult.buffer);
  11510. } else {
  11511. reject(new Error('[softwareDecrypt] Failed to decrypt data'));
  11512. }
  11513. });
  11514. }
  11515. return this.webCryptoDecrypt(new Uint8Array(data), key, iv);
  11516. }
  11517. // Software decryption is progressive. Progressive decryption may not return a result on each call. Any cached
  11518. // data is handled in the flush() call
  11519. softwareDecrypt(data, key, iv) {
  11520. const {
  11521. currentIV,
  11522. currentResult,
  11523. remainderData
  11524. } = this;
  11525. this.logOnce('JS AES decrypt');
  11526. // The output is staggered during progressive parsing - the current result is cached, and emitted on the next call
  11527. // This is done in order to strip PKCS7 padding, which is found at the end of each segment. We only know we've reached
  11528. // the end on flush(), but by that time we have already received all bytes for the segment.
  11529. // Progressive decryption does not work with WebCrypto
  11530. if (remainderData) {
  11531. data = appendUint8Array(remainderData, data);
  11532. this.remainderData = null;
  11533. }
  11534. // Byte length must be a multiple of 16 (AES-128 = 128 bit blocks = 16 bytes)
  11535. const currentChunk = this.getValidChunk(data);
  11536. if (!currentChunk.length) {
  11537. return null;
  11538. }
  11539. if (currentIV) {
  11540. iv = currentIV;
  11541. }
  11542. let softwareDecrypter = this.softwareDecrypter;
  11543. if (!softwareDecrypter) {
  11544. softwareDecrypter = this.softwareDecrypter = new AESDecryptor();
  11545. }
  11546. softwareDecrypter.expandKey(key);
  11547. const result = currentResult;
  11548. this.currentResult = softwareDecrypter.decrypt(currentChunk.buffer, 0, iv);
  11549. this.currentIV = sliceUint8(currentChunk, -16).buffer;
  11550. if (!result) {
  11551. return null;
  11552. }
  11553. return result;
  11554. }
  11555. webCryptoDecrypt(data, key, iv) {
  11556. if (this.key !== key || !this.fastAesKey) {
  11557. if (!this.subtle) {
  11558. return Promise.resolve(this.onWebCryptoError(data, key, iv));
  11559. }
  11560. this.key = key;
  11561. this.fastAesKey = new FastAESKey(this.subtle, key);
  11562. }
  11563. return this.fastAesKey.expandKey().then(aesKey => {
  11564. // decrypt using web crypto
  11565. if (!this.subtle) {
  11566. return Promise.reject(new Error('web crypto not initialized'));
  11567. }
  11568. this.logOnce('WebCrypto AES decrypt');
  11569. const crypto = new AESCrypto(this.subtle, new Uint8Array(iv));
  11570. return crypto.decrypt(data.buffer, aesKey);
  11571. }).catch(err => {
  11572. logger.warn(`[decrypter]: WebCrypto Error, disable WebCrypto API, ${err.name}: ${err.message}`);
  11573. return this.onWebCryptoError(data, key, iv);
  11574. });
  11575. }
  11576. onWebCryptoError(data, key, iv) {
  11577. this.useSoftware = true;
  11578. this.logEnabled = true;
  11579. this.softwareDecrypt(data, key, iv);
  11580. const decryptResult = this.flush();
  11581. if (decryptResult) {
  11582. return decryptResult.buffer;
  11583. }
  11584. throw new Error('WebCrypto and softwareDecrypt: failed to decrypt data');
  11585. }
  11586. getValidChunk(data) {
  11587. let currentChunk = data;
  11588. const splitPoint = data.length - data.length % CHUNK_SIZE;
  11589. if (splitPoint !== data.length) {
  11590. currentChunk = sliceUint8(data, 0, splitPoint);
  11591. this.remainderData = sliceUint8(data, splitPoint);
  11592. }
  11593. return currentChunk;
  11594. }
  11595. logOnce(msg) {
  11596. if (!this.logEnabled) {
  11597. return;
  11598. }
  11599. logger.log(`[decrypter]: ${msg}`);
  11600. this.logEnabled = false;
  11601. }
  11602. }
  11603. /**
  11604. * TimeRanges to string helper
  11605. */
  11606. const TimeRanges = {
  11607. toString: function (r) {
  11608. let log = '';
  11609. const len = r.length;
  11610. for (let i = 0; i < len; i++) {
  11611. log += `[${r.start(i).toFixed(3)}-${r.end(i).toFixed(3)}]`;
  11612. }
  11613. return log;
  11614. }
  11615. };
  11616. const State = {
  11617. STOPPED: 'STOPPED',
  11618. IDLE: 'IDLE',
  11619. KEY_LOADING: 'KEY_LOADING',
  11620. FRAG_LOADING: 'FRAG_LOADING',
  11621. FRAG_LOADING_WAITING_RETRY: 'FRAG_LOADING_WAITING_RETRY',
  11622. WAITING_TRACK: 'WAITING_TRACK',
  11623. PARSING: 'PARSING',
  11624. PARSED: 'PARSED',
  11625. ENDED: 'ENDED',
  11626. ERROR: 'ERROR',
  11627. WAITING_INIT_PTS: 'WAITING_INIT_PTS',
  11628. WAITING_LEVEL: 'WAITING_LEVEL'
  11629. };
  11630. class BaseStreamController extends TaskLoop {
  11631. constructor(hls, fragmentTracker, keyLoader, logPrefix, playlistType) {
  11632. super();
  11633. this.hls = void 0;
  11634. this.fragPrevious = null;
  11635. this.fragCurrent = null;
  11636. this.fragmentTracker = void 0;
  11637. this.transmuxer = null;
  11638. this._state = State.STOPPED;
  11639. this.playlistType = void 0;
  11640. this.media = null;
  11641. this.mediaBuffer = null;
  11642. this.config = void 0;
  11643. this.bitrateTest = false;
  11644. this.lastCurrentTime = 0;
  11645. this.nextLoadPosition = 0;
  11646. this.startPosition = 0;
  11647. this.startTimeOffset = null;
  11648. this.loadedmetadata = false;
  11649. this.retryDate = 0;
  11650. this.levels = null;
  11651. this.fragmentLoader = void 0;
  11652. this.keyLoader = void 0;
  11653. this.levelLastLoaded = null;
  11654. this.startFragRequested = false;
  11655. this.decrypter = void 0;
  11656. this.initPTS = [];
  11657. this.onvseeking = null;
  11658. this.onvended = null;
  11659. this.logPrefix = '';
  11660. this.log = void 0;
  11661. this.warn = void 0;
  11662. this.playlistType = playlistType;
  11663. this.logPrefix = logPrefix;
  11664. this.log = logger.log.bind(logger, `${logPrefix}:`);
  11665. this.warn = logger.warn.bind(logger, `${logPrefix}:`);
  11666. this.hls = hls;
  11667. this.fragmentLoader = new FragmentLoader(hls.config);
  11668. this.keyLoader = keyLoader;
  11669. this.fragmentTracker = fragmentTracker;
  11670. this.config = hls.config;
  11671. this.decrypter = new Decrypter(hls.config);
  11672. hls.on(Events.MANIFEST_LOADED, this.onManifestLoaded, this);
  11673. }
  11674. doTick() {
  11675. this.onTickEnd();
  11676. }
  11677. onTickEnd() {}
  11678. // eslint-disable-next-line @typescript-eslint/no-unused-vars
  11679. startLoad(startPosition) {}
  11680. stopLoad() {
  11681. this.fragmentLoader.abort();
  11682. this.keyLoader.abort(this.playlistType);
  11683. const frag = this.fragCurrent;
  11684. if (frag != null && frag.loader) {
  11685. frag.abortRequests();
  11686. this.fragmentTracker.removeFragment(frag);
  11687. }
  11688. this.resetTransmuxer();
  11689. this.fragCurrent = null;
  11690. this.fragPrevious = null;
  11691. this.clearInterval();
  11692. this.clearNextTick();
  11693. this.state = State.STOPPED;
  11694. }
  11695. _streamEnded(bufferInfo, levelDetails) {
  11696. // If playlist is live, there is another buffered range after the current range, nothing buffered, media is detached,
  11697. // of nothing loading/loaded return false
  11698. if (levelDetails.live || bufferInfo.nextStart || !bufferInfo.end || !this.media) {
  11699. return false;
  11700. }
  11701. const partList = levelDetails.partList;
  11702. // Since the last part isn't guaranteed to correspond to the last playlist segment for Low-Latency HLS,
  11703. // check instead if the last part is buffered.
  11704. if (partList != null && partList.length) {
  11705. const lastPart = partList[partList.length - 1];
  11706. // Checking the midpoint of the part for potential margin of error and related issues.
  11707. // NOTE: Technically I believe parts could yield content that is < the computed duration (including potential a duration of 0)
  11708. // and still be spec-compliant, so there may still be edge cases here. Likewise, there could be issues in end of stream
  11709. // part mismatches for independent audio and video playlists/segments.
  11710. const lastPartBuffered = BufferHelper.isBuffered(this.media, lastPart.start + lastPart.duration / 2);
  11711. return lastPartBuffered;
  11712. }
  11713. const playlistType = levelDetails.fragments[levelDetails.fragments.length - 1].type;
  11714. return this.fragmentTracker.isEndListAppended(playlistType);
  11715. }
  11716. getLevelDetails() {
  11717. if (this.levels && this.levelLastLoaded !== null) {
  11718. var _this$levelLastLoaded;
  11719. return (_this$levelLastLoaded = this.levelLastLoaded) == null ? void 0 : _this$levelLastLoaded.details;
  11720. }
  11721. }
  11722. onMediaAttached(event, data) {
  11723. const media = this.media = this.mediaBuffer = data.media;
  11724. this.onvseeking = this.onMediaSeeking.bind(this);
  11725. this.onvended = this.onMediaEnded.bind(this);
  11726. media.addEventListener('seeking', this.onvseeking);
  11727. media.addEventListener('ended', this.onvended);
  11728. const config = this.config;
  11729. if (this.levels && config.autoStartLoad && this.state === State.STOPPED) {
  11730. this.startLoad(config.startPosition);
  11731. }
  11732. }
  11733. onMediaDetaching() {
  11734. const media = this.media;
  11735. if (media != null && media.ended) {
  11736. this.log('MSE detaching and video ended, reset startPosition');
  11737. this.startPosition = this.lastCurrentTime = 0;
  11738. }
  11739. // remove video listeners
  11740. if (media && this.onvseeking && this.onvended) {
  11741. media.removeEventListener('seeking', this.onvseeking);
  11742. media.removeEventListener('ended', this.onvended);
  11743. this.onvseeking = this.onvended = null;
  11744. }
  11745. if (this.keyLoader) {
  11746. this.keyLoader.detach();
  11747. }
  11748. this.media = this.mediaBuffer = null;
  11749. this.loadedmetadata = false;
  11750. this.fragmentTracker.removeAllFragments();
  11751. this.stopLoad();
  11752. }
  11753. onMediaSeeking() {
  11754. const {
  11755. config,
  11756. fragCurrent,
  11757. media,
  11758. mediaBuffer,
  11759. state
  11760. } = this;
  11761. const currentTime = media ? media.currentTime : 0;
  11762. const bufferInfo = BufferHelper.bufferInfo(mediaBuffer ? mediaBuffer : media, currentTime, config.maxBufferHole);
  11763. this.log(`media seeking to ${isFiniteNumber(currentTime) ? currentTime.toFixed(3) : currentTime}, state: ${state}`);
  11764. if (this.state === State.ENDED) {
  11765. this.resetLoadingState();
  11766. } else if (fragCurrent) {
  11767. // Seeking while frag load is in progress
  11768. const tolerance = config.maxFragLookUpTolerance;
  11769. const fragStartOffset = fragCurrent.start - tolerance;
  11770. const fragEndOffset = fragCurrent.start + fragCurrent.duration + tolerance;
  11771. // if seeking out of buffered range or into new one
  11772. if (!bufferInfo.len || fragEndOffset < bufferInfo.start || fragStartOffset > bufferInfo.end) {
  11773. const pastFragment = currentTime > fragEndOffset;
  11774. // if the seek position is outside the current fragment range
  11775. if (currentTime < fragStartOffset || pastFragment) {
  11776. if (pastFragment && fragCurrent.loader) {
  11777. this.log('seeking outside of buffer while fragment load in progress, cancel fragment load');
  11778. fragCurrent.abortRequests();
  11779. this.resetLoadingState();
  11780. }
  11781. this.fragPrevious = null;
  11782. }
  11783. }
  11784. }
  11785. if (media) {
  11786. // Remove gap fragments
  11787. this.fragmentTracker.removeFragmentsInRange(currentTime, Infinity, this.playlistType, true);
  11788. this.lastCurrentTime = currentTime;
  11789. }
  11790. // in case seeking occurs although no media buffered, adjust startPosition and nextLoadPosition to seek target
  11791. if (!this.loadedmetadata && !bufferInfo.len) {
  11792. this.nextLoadPosition = this.startPosition = currentTime;
  11793. }
  11794. // Async tick to speed up processing
  11795. this.tickImmediate();
  11796. }
  11797. onMediaEnded() {
  11798. // reset startPosition and lastCurrentTime to restart playback @ stream beginning
  11799. this.startPosition = this.lastCurrentTime = 0;
  11800. }
  11801. onManifestLoaded(event, data) {
  11802. this.startTimeOffset = data.startTimeOffset;
  11803. this.initPTS = [];
  11804. }
  11805. onHandlerDestroying() {
  11806. this.hls.off(Events.MANIFEST_LOADED, this.onManifestLoaded, this);
  11807. this.stopLoad();
  11808. super.onHandlerDestroying();
  11809. // @ts-ignore
  11810. this.hls = null;
  11811. }
  11812. onHandlerDestroyed() {
  11813. this.state = State.STOPPED;
  11814. if (this.fragmentLoader) {
  11815. this.fragmentLoader.destroy();
  11816. }
  11817. if (this.keyLoader) {
  11818. this.keyLoader.destroy();
  11819. }
  11820. if (this.decrypter) {
  11821. this.decrypter.destroy();
  11822. }
  11823. this.hls = this.log = this.warn = this.decrypter = this.keyLoader = this.fragmentLoader = this.fragmentTracker = null;
  11824. super.onHandlerDestroyed();
  11825. }
  11826. loadFragment(frag, level, targetBufferTime) {
  11827. this._loadFragForPlayback(frag, level, targetBufferTime);
  11828. }
  11829. _loadFragForPlayback(frag, level, targetBufferTime) {
  11830. const progressCallback = data => {
  11831. if (this.fragContextChanged(frag)) {
  11832. this.warn(`Fragment ${frag.sn}${data.part ? ' p: ' + data.part.index : ''} of level ${frag.level} was dropped during download.`);
  11833. this.fragmentTracker.removeFragment(frag);
  11834. return;
  11835. }
  11836. frag.stats.chunkCount++;
  11837. this._handleFragmentLoadProgress(data);
  11838. };
  11839. this._doFragLoad(frag, level, targetBufferTime, progressCallback).then(data => {
  11840. if (!data) {
  11841. // if we're here we probably needed to backtrack or are waiting for more parts
  11842. return;
  11843. }
  11844. const state = this.state;
  11845. if (this.fragContextChanged(frag)) {
  11846. if (state === State.FRAG_LOADING || !this.fragCurrent && state === State.PARSING) {
  11847. this.fragmentTracker.removeFragment(frag);
  11848. this.state = State.IDLE;
  11849. }
  11850. return;
  11851. }
  11852. if ('payload' in data) {
  11853. this.log(`Loaded fragment ${frag.sn} of level ${frag.level}`);
  11854. this.hls.trigger(Events.FRAG_LOADED, data);
  11855. }
  11856. // Pass through the whole payload; controllers not implementing progressive loading receive data from this callback
  11857. this._handleFragmentLoadComplete(data);
  11858. }).catch(reason => {
  11859. if (this.state === State.STOPPED || this.state === State.ERROR) {
  11860. return;
  11861. }
  11862. this.warn(`Frag error: ${(reason == null ? void 0 : reason.message) || reason}`);
  11863. this.resetFragmentLoading(frag);
  11864. });
  11865. }
  11866. clearTrackerIfNeeded(frag) {
  11867. var _this$mediaBuffer;
  11868. const {
  11869. fragmentTracker
  11870. } = this;
  11871. const fragState = fragmentTracker.getState(frag);
  11872. if (fragState === FragmentState.APPENDING) {
  11873. // Lower the max buffer length and try again
  11874. const playlistType = frag.type;
  11875. const bufferedInfo = this.getFwdBufferInfo(this.mediaBuffer, playlistType);
  11876. const minForwardBufferLength = Math.max(frag.duration, bufferedInfo ? bufferedInfo.len : this.config.maxBufferLength);
  11877. // If backtracking, always remove from the tracker without reducing max buffer length
  11878. const backtrackFragment = this.backtrackFragment;
  11879. const backtracked = backtrackFragment ? frag.sn - backtrackFragment.sn : 0;
  11880. if (backtracked === 1 || this.reduceMaxBufferLength(minForwardBufferLength)) {
  11881. fragmentTracker.removeFragment(frag);
  11882. }
  11883. } else if (((_this$mediaBuffer = this.mediaBuffer) == null ? void 0 : _this$mediaBuffer.buffered.length) === 0) {
  11884. // Stop gap for bad tracker / buffer flush behavior
  11885. fragmentTracker.removeAllFragments();
  11886. } else if (fragmentTracker.hasParts(frag.type)) {
  11887. // In low latency mode, remove fragments for which only some parts were buffered
  11888. fragmentTracker.detectPartialFragments({
  11889. frag,
  11890. part: null,
  11891. stats: frag.stats,
  11892. id: frag.type
  11893. });
  11894. if (fragmentTracker.getState(frag) === FragmentState.PARTIAL) {
  11895. fragmentTracker.removeFragment(frag);
  11896. }
  11897. }
  11898. }
  11899. checkLiveUpdate(details) {
  11900. if (details.updated && !details.live) {
  11901. // Live stream ended, update fragment tracker
  11902. const lastFragment = details.fragments[details.fragments.length - 1];
  11903. this.fragmentTracker.detectPartialFragments({
  11904. frag: lastFragment,
  11905. part: null,
  11906. stats: lastFragment.stats,
  11907. id: lastFragment.type
  11908. });
  11909. }
  11910. if (!details.fragments[0]) {
  11911. details.deltaUpdateFailed = true;
  11912. }
  11913. }
  11914. flushMainBuffer(startOffset, endOffset, type = null) {
  11915. if (!(startOffset - endOffset)) {
  11916. return;
  11917. }
  11918. // When alternate audio is playing, the audio-stream-controller is responsible for the audio buffer. Otherwise,
  11919. // passing a null type flushes both buffers
  11920. const flushScope = {
  11921. startOffset,
  11922. endOffset,
  11923. type
  11924. };
  11925. this.hls.trigger(Events.BUFFER_FLUSHING, flushScope);
  11926. }
  11927. _loadInitSegment(frag, level) {
  11928. this._doFragLoad(frag, level).then(data => {
  11929. if (!data || this.fragContextChanged(frag) || !this.levels) {
  11930. throw new Error('init load aborted');
  11931. }
  11932. return data;
  11933. }).then(data => {
  11934. const {
  11935. hls
  11936. } = this;
  11937. const {
  11938. payload
  11939. } = data;
  11940. const decryptData = frag.decryptdata;
  11941. // check to see if the payload needs to be decrypted
  11942. if (payload && payload.byteLength > 0 && decryptData != null && decryptData.key && decryptData.iv && decryptData.method === 'AES-128') {
  11943. const startTime = self.performance.now();
  11944. // decrypt init segment data
  11945. return this.decrypter.decrypt(new Uint8Array(payload), decryptData.key.buffer, decryptData.iv.buffer).catch(err => {
  11946. hls.trigger(Events.ERROR, {
  11947. type: ErrorTypes.MEDIA_ERROR,
  11948. details: ErrorDetails.FRAG_DECRYPT_ERROR,
  11949. fatal: false,
  11950. error: err,
  11951. reason: err.message,
  11952. frag
  11953. });
  11954. throw err;
  11955. }).then(decryptedData => {
  11956. const endTime = self.performance.now();
  11957. hls.trigger(Events.FRAG_DECRYPTED, {
  11958. frag,
  11959. payload: decryptedData,
  11960. stats: {
  11961. tstart: startTime,
  11962. tdecrypt: endTime
  11963. }
  11964. });
  11965. data.payload = decryptedData;
  11966. return this.completeInitSegmentLoad(data);
  11967. });
  11968. }
  11969. return this.completeInitSegmentLoad(data);
  11970. }).catch(reason => {
  11971. if (this.state === State.STOPPED || this.state === State.ERROR) {
  11972. return;
  11973. }
  11974. this.warn(reason);
  11975. this.resetFragmentLoading(frag);
  11976. });
  11977. }
  11978. completeInitSegmentLoad(data) {
  11979. const {
  11980. levels
  11981. } = this;
  11982. if (!levels) {
  11983. throw new Error('init load aborted, missing levels');
  11984. }
  11985. const stats = data.frag.stats;
  11986. this.state = State.IDLE;
  11987. data.frag.data = new Uint8Array(data.payload);
  11988. stats.parsing.start = stats.buffering.start = self.performance.now();
  11989. stats.parsing.end = stats.buffering.end = self.performance.now();
  11990. this.tick();
  11991. }
  11992. fragContextChanged(frag) {
  11993. const {
  11994. fragCurrent
  11995. } = this;
  11996. return !frag || !fragCurrent || frag.sn !== fragCurrent.sn || frag.level !== fragCurrent.level;
  11997. }
  11998. fragBufferedComplete(frag, part) {
  11999. var _frag$startPTS, _frag$endPTS, _this$fragCurrent, _this$fragPrevious;
  12000. const media = this.mediaBuffer ? this.mediaBuffer : this.media;
  12001. this.log(`Buffered ${frag.type} sn: ${frag.sn}${part ? ' part: ' + part.index : ''} of ${this.playlistType === PlaylistLevelType.MAIN ? 'level' : 'track'} ${frag.level} (frag:[${((_frag$startPTS = frag.startPTS) != null ? _frag$startPTS : NaN).toFixed(3)}-${((_frag$endPTS = frag.endPTS) != null ? _frag$endPTS : NaN).toFixed(3)}] > buffer:${media ? TimeRanges.toString(BufferHelper.getBuffered(media)) : '(detached)'})`);
  12002. if (frag.sn !== 'initSegment') {
  12003. var _this$levels;
  12004. if (frag.type !== PlaylistLevelType.SUBTITLE) {
  12005. const el = frag.elementaryStreams;
  12006. if (!Object.keys(el).some(type => !!el[type])) {
  12007. // empty segment
  12008. this.state = State.IDLE;
  12009. return;
  12010. }
  12011. }
  12012. const level = (_this$levels = this.levels) == null ? void 0 : _this$levels[frag.level];
  12013. if (level != null && level.fragmentError) {
  12014. this.log(`Resetting level fragment error count of ${level.fragmentError} on frag buffered`);
  12015. level.fragmentError = 0;
  12016. }
  12017. }
  12018. this.state = State.IDLE;
  12019. if (!media) {
  12020. return;
  12021. }
  12022. if (!this.loadedmetadata && frag.type == PlaylistLevelType.MAIN && media.buffered.length && ((_this$fragCurrent = this.fragCurrent) == null ? void 0 : _this$fragCurrent.sn) === ((_this$fragPrevious = this.fragPrevious) == null ? void 0 : _this$fragPrevious.sn)) {
  12023. this.loadedmetadata = true;
  12024. this.seekToStartPos();
  12025. }
  12026. this.tick();
  12027. }
  12028. seekToStartPos() {}
  12029. _handleFragmentLoadComplete(fragLoadedEndData) {
  12030. const {
  12031. transmuxer
  12032. } = this;
  12033. if (!transmuxer) {
  12034. return;
  12035. }
  12036. const {
  12037. frag,
  12038. part,
  12039. partsLoaded
  12040. } = fragLoadedEndData;
  12041. // If we did not load parts, or loaded all parts, we have complete (not partial) fragment data
  12042. const complete = !partsLoaded || partsLoaded.length === 0 || partsLoaded.some(fragLoaded => !fragLoaded);
  12043. const chunkMeta = new ChunkMetadata(frag.level, frag.sn, frag.stats.chunkCount + 1, 0, part ? part.index : -1, !complete);
  12044. transmuxer.flush(chunkMeta);
  12045. }
  12046. // eslint-disable-next-line @typescript-eslint/no-unused-vars
  12047. _handleFragmentLoadProgress(frag) {}
  12048. _doFragLoad(frag, level, targetBufferTime = null, progressCallback) {
  12049. var _frag$decryptdata;
  12050. const details = level == null ? void 0 : level.details;
  12051. if (!this.levels || !details) {
  12052. throw new Error(`frag load aborted, missing level${details ? '' : ' detail'}s`);
  12053. }
  12054. let keyLoadingPromise = null;
  12055. if (frag.encrypted && !((_frag$decryptdata = frag.decryptdata) != null && _frag$decryptdata.key)) {
  12056. this.log(`Loading key for ${frag.sn} of [${details.startSN}-${details.endSN}], ${this.logPrefix === '[stream-controller]' ? 'level' : 'track'} ${frag.level}`);
  12057. this.state = State.KEY_LOADING;
  12058. this.fragCurrent = frag;
  12059. keyLoadingPromise = this.keyLoader.load(frag).then(keyLoadedData => {
  12060. if (!this.fragContextChanged(keyLoadedData.frag)) {
  12061. this.hls.trigger(Events.KEY_LOADED, keyLoadedData);
  12062. if (this.state === State.KEY_LOADING) {
  12063. this.state = State.IDLE;
  12064. }
  12065. return keyLoadedData;
  12066. }
  12067. });
  12068. this.hls.trigger(Events.KEY_LOADING, {
  12069. frag
  12070. });
  12071. if (this.fragCurrent === null) {
  12072. keyLoadingPromise = Promise.reject(new Error(`frag load aborted, context changed in KEY_LOADING`));
  12073. }
  12074. } else if (!frag.encrypted && details.encryptedFragments.length) {
  12075. this.keyLoader.loadClear(frag, details.encryptedFragments);
  12076. }
  12077. targetBufferTime = Math.max(frag.start, targetBufferTime || 0);
  12078. if (this.config.lowLatencyMode && frag.sn !== 'initSegment') {
  12079. const partList = details.partList;
  12080. if (partList && progressCallback) {
  12081. if (targetBufferTime > frag.end && details.fragmentHint) {
  12082. frag = details.fragmentHint;
  12083. }
  12084. const partIndex = this.getNextPart(partList, frag, targetBufferTime);
  12085. if (partIndex > -1) {
  12086. const part = partList[partIndex];
  12087. this.log(`Loading part sn: ${frag.sn} p: ${part.index} cc: ${frag.cc} of playlist [${details.startSN}-${details.endSN}] parts [0-${partIndex}-${partList.length - 1}] ${this.logPrefix === '[stream-controller]' ? 'level' : 'track'}: ${frag.level}, target: ${parseFloat(targetBufferTime.toFixed(3))}`);
  12088. this.nextLoadPosition = part.start + part.duration;
  12089. this.state = State.FRAG_LOADING;
  12090. let _result;
  12091. if (keyLoadingPromise) {
  12092. _result = keyLoadingPromise.then(keyLoadedData => {
  12093. if (!keyLoadedData || this.fragContextChanged(keyLoadedData.frag)) {
  12094. return null;
  12095. }
  12096. return this.doFragPartsLoad(frag, part, level, progressCallback);
  12097. }).catch(error => this.handleFragLoadError(error));
  12098. } else {
  12099. _result = this.doFragPartsLoad(frag, part, level, progressCallback).catch(error => this.handleFragLoadError(error));
  12100. }
  12101. this.hls.trigger(Events.FRAG_LOADING, {
  12102. frag,
  12103. part,
  12104. targetBufferTime
  12105. });
  12106. if (this.fragCurrent === null) {
  12107. return Promise.reject(new Error(`frag load aborted, context changed in FRAG_LOADING parts`));
  12108. }
  12109. return _result;
  12110. } else if (!frag.url || this.loadedEndOfParts(partList, targetBufferTime)) {
  12111. // Fragment hint has no parts
  12112. return Promise.resolve(null);
  12113. }
  12114. }
  12115. }
  12116. this.log(`Loading fragment ${frag.sn} cc: ${frag.cc} ${details ? 'of [' + details.startSN + '-' + details.endSN + '] ' : ''}${this.logPrefix === '[stream-controller]' ? 'level' : 'track'}: ${frag.level}, target: ${parseFloat(targetBufferTime.toFixed(3))}`);
  12117. // Don't update nextLoadPosition for fragments which are not buffered
  12118. if (isFiniteNumber(frag.sn) && !this.bitrateTest) {
  12119. this.nextLoadPosition = frag.start + frag.duration;
  12120. }
  12121. this.state = State.FRAG_LOADING;
  12122. // Load key before streaming fragment data
  12123. const dataOnProgress = this.config.progressive;
  12124. let result;
  12125. if (dataOnProgress && keyLoadingPromise) {
  12126. result = keyLoadingPromise.then(keyLoadedData => {
  12127. if (!keyLoadedData || this.fragContextChanged(keyLoadedData == null ? void 0 : keyLoadedData.frag)) {
  12128. return null;
  12129. }
  12130. return this.fragmentLoader.load(frag, progressCallback);
  12131. }).catch(error => this.handleFragLoadError(error));
  12132. } else {
  12133. // load unencrypted fragment data with progress event,
  12134. // or handle fragment result after key and fragment are finished loading
  12135. result = Promise.all([this.fragmentLoader.load(frag, dataOnProgress ? progressCallback : undefined), keyLoadingPromise]).then(([fragLoadedData]) => {
  12136. if (!dataOnProgress && fragLoadedData && progressCallback) {
  12137. progressCallback(fragLoadedData);
  12138. }
  12139. return fragLoadedData;
  12140. }).catch(error => this.handleFragLoadError(error));
  12141. }
  12142. this.hls.trigger(Events.FRAG_LOADING, {
  12143. frag,
  12144. targetBufferTime
  12145. });
  12146. if (this.fragCurrent === null) {
  12147. return Promise.reject(new Error(`frag load aborted, context changed in FRAG_LOADING`));
  12148. }
  12149. return result;
  12150. }
  12151. doFragPartsLoad(frag, fromPart, level, progressCallback) {
  12152. return new Promise((resolve, reject) => {
  12153. var _level$details;
  12154. const partsLoaded = [];
  12155. const initialPartList = (_level$details = level.details) == null ? void 0 : _level$details.partList;
  12156. const loadPart = part => {
  12157. this.fragmentLoader.loadPart(frag, part, progressCallback).then(partLoadedData => {
  12158. partsLoaded[part.index] = partLoadedData;
  12159. const loadedPart = partLoadedData.part;
  12160. this.hls.trigger(Events.FRAG_LOADED, partLoadedData);
  12161. const nextPart = getPartWith(level, frag.sn, part.index + 1) || findPart(initialPartList, frag.sn, part.index + 1);
  12162. if (nextPart) {
  12163. loadPart(nextPart);
  12164. } else {
  12165. return resolve({
  12166. frag,
  12167. part: loadedPart,
  12168. partsLoaded
  12169. });
  12170. }
  12171. }).catch(reject);
  12172. };
  12173. loadPart(fromPart);
  12174. });
  12175. }
  12176. handleFragLoadError(error) {
  12177. if ('data' in error) {
  12178. const data = error.data;
  12179. if (error.data && data.details === ErrorDetails.INTERNAL_ABORTED) {
  12180. this.handleFragLoadAborted(data.frag, data.part);
  12181. } else {
  12182. this.hls.trigger(Events.ERROR, data);
  12183. }
  12184. } else {
  12185. this.hls.trigger(Events.ERROR, {
  12186. type: ErrorTypes.OTHER_ERROR,
  12187. details: ErrorDetails.INTERNAL_EXCEPTION,
  12188. err: error,
  12189. error,
  12190. fatal: true
  12191. });
  12192. }
  12193. return null;
  12194. }
  12195. _handleTransmuxerFlush(chunkMeta) {
  12196. const context = this.getCurrentContext(chunkMeta);
  12197. if (!context || this.state !== State.PARSING) {
  12198. if (!this.fragCurrent && this.state !== State.STOPPED && this.state !== State.ERROR) {
  12199. this.state = State.IDLE;
  12200. }
  12201. return;
  12202. }
  12203. const {
  12204. frag,
  12205. part,
  12206. level
  12207. } = context;
  12208. const now = self.performance.now();
  12209. frag.stats.parsing.end = now;
  12210. if (part) {
  12211. part.stats.parsing.end = now;
  12212. }
  12213. this.updateLevelTiming(frag, part, level, chunkMeta.partial);
  12214. }
  12215. getCurrentContext(chunkMeta) {
  12216. const {
  12217. levels,
  12218. fragCurrent
  12219. } = this;
  12220. const {
  12221. level: levelIndex,
  12222. sn,
  12223. part: partIndex
  12224. } = chunkMeta;
  12225. if (!(levels != null && levels[levelIndex])) {
  12226. this.warn(`Levels object was unset while buffering fragment ${sn} of level ${levelIndex}. The current chunk will not be buffered.`);
  12227. return null;
  12228. }
  12229. const level = levels[levelIndex];
  12230. const part = partIndex > -1 ? getPartWith(level, sn, partIndex) : null;
  12231. const frag = part ? part.fragment : getFragmentWithSN(level, sn, fragCurrent);
  12232. if (!frag) {
  12233. return null;
  12234. }
  12235. if (fragCurrent && fragCurrent !== frag) {
  12236. frag.stats = fragCurrent.stats;
  12237. }
  12238. return {
  12239. frag,
  12240. part,
  12241. level
  12242. };
  12243. }
  12244. bufferFragmentData(data, frag, part, chunkMeta, noBacktracking) {
  12245. var _buffer;
  12246. if (!data || this.state !== State.PARSING) {
  12247. return;
  12248. }
  12249. const {
  12250. data1,
  12251. data2
  12252. } = data;
  12253. let buffer = data1;
  12254. if (data1 && data2) {
  12255. // Combine the moof + mdat so that we buffer with a single append
  12256. buffer = appendUint8Array(data1, data2);
  12257. }
  12258. if (!((_buffer = buffer) != null && _buffer.length)) {
  12259. return;
  12260. }
  12261. const segment = {
  12262. type: data.type,
  12263. frag,
  12264. part,
  12265. chunkMeta,
  12266. parent: frag.type,
  12267. data: buffer
  12268. };
  12269. this.hls.trigger(Events.BUFFER_APPENDING, segment);
  12270. if (data.dropped && data.independent && !part) {
  12271. if (noBacktracking) {
  12272. return;
  12273. }
  12274. // Clear buffer so that we reload previous segments sequentially if required
  12275. this.flushBufferGap(frag);
  12276. }
  12277. }
  12278. flushBufferGap(frag) {
  12279. const media = this.media;
  12280. if (!media) {
  12281. return;
  12282. }
  12283. // If currentTime is not buffered, clear the back buffer so that we can backtrack as much as needed
  12284. if (!BufferHelper.isBuffered(media, media.currentTime)) {
  12285. this.flushMainBuffer(0, frag.start);
  12286. return;
  12287. }
  12288. // Remove back-buffer without interrupting playback to allow back tracking
  12289. const currentTime = media.currentTime;
  12290. const bufferInfo = BufferHelper.bufferInfo(media, currentTime, 0);
  12291. const fragDuration = frag.duration;
  12292. const segmentFraction = Math.min(this.config.maxFragLookUpTolerance * 2, fragDuration * 0.25);
  12293. const start = Math.max(Math.min(frag.start - segmentFraction, bufferInfo.end - segmentFraction), currentTime + segmentFraction);
  12294. if (frag.start - start > segmentFraction) {
  12295. this.flushMainBuffer(start, frag.start);
  12296. }
  12297. }
  12298. getFwdBufferInfo(bufferable, type) {
  12299. const pos = this.getLoadPosition();
  12300. if (!isFiniteNumber(pos)) {
  12301. return null;
  12302. }
  12303. return this.getFwdBufferInfoAtPos(bufferable, pos, type);
  12304. }
  12305. getFwdBufferInfoAtPos(bufferable, pos, type) {
  12306. const {
  12307. config: {
  12308. maxBufferHole
  12309. }
  12310. } = this;
  12311. const bufferInfo = BufferHelper.bufferInfo(bufferable, pos, maxBufferHole);
  12312. // Workaround flaw in getting forward buffer when maxBufferHole is smaller than gap at current pos
  12313. if (bufferInfo.len === 0 && bufferInfo.nextStart !== undefined) {
  12314. const bufferedFragAtPos = this.fragmentTracker.getBufferedFrag(pos, type);
  12315. if (bufferedFragAtPos && bufferInfo.nextStart < bufferedFragAtPos.end) {
  12316. return BufferHelper.bufferInfo(bufferable, pos, Math.max(bufferInfo.nextStart, maxBufferHole));
  12317. }
  12318. }
  12319. return bufferInfo;
  12320. }
  12321. getMaxBufferLength(levelBitrate) {
  12322. const {
  12323. config
  12324. } = this;
  12325. let maxBufLen;
  12326. if (levelBitrate) {
  12327. maxBufLen = Math.max(8 * config.maxBufferSize / levelBitrate, config.maxBufferLength);
  12328. } else {
  12329. maxBufLen = config.maxBufferLength;
  12330. }
  12331. return Math.min(maxBufLen, config.maxMaxBufferLength);
  12332. }
  12333. reduceMaxBufferLength(threshold) {
  12334. const config = this.config;
  12335. const minLength = threshold || config.maxBufferLength;
  12336. const reducedLength = config.maxMaxBufferLength / 2;
  12337. if (reducedLength >= minLength) {
  12338. // reduce max buffer length as it might be too high. we do this to avoid loop flushing ...
  12339. config.maxMaxBufferLength = reducedLength;
  12340. this.warn(`Reduce max buffer length to ${reducedLength}s`);
  12341. return true;
  12342. }
  12343. return false;
  12344. }
  12345. getAppendedFrag(position, playlistType = PlaylistLevelType.MAIN) {
  12346. const fragOrPart = this.fragmentTracker.getAppendedFrag(position, PlaylistLevelType.MAIN);
  12347. if (fragOrPart && 'fragment' in fragOrPart) {
  12348. return fragOrPart.fragment;
  12349. }
  12350. return fragOrPart;
  12351. }
  12352. getNextFragment(pos, levelDetails) {
  12353. const fragments = levelDetails.fragments;
  12354. const fragLen = fragments.length;
  12355. if (!fragLen) {
  12356. return null;
  12357. }
  12358. // find fragment index, contiguous with end of buffer position
  12359. const {
  12360. config
  12361. } = this;
  12362. const start = fragments[0].start;
  12363. let frag;
  12364. if (levelDetails.live) {
  12365. const initialLiveManifestSize = config.initialLiveManifestSize;
  12366. if (fragLen < initialLiveManifestSize) {
  12367. this.warn(`Not enough fragments to start playback (have: ${fragLen}, need: ${initialLiveManifestSize})`);
  12368. return null;
  12369. }
  12370. // The real fragment start times for a live stream are only known after the PTS range for that level is known.
  12371. // In order to discover the range, we load the best matching fragment for that level and demux it.
  12372. // Do not load using live logic if the starting frag is requested - we want to use getFragmentAtPosition() so that
  12373. // we get the fragment matching that start time
  12374. if (!levelDetails.PTSKnown && !this.startFragRequested && this.startPosition === -1 || pos < start) {
  12375. frag = this.getInitialLiveFragment(levelDetails, fragments);
  12376. this.startPosition = this.nextLoadPosition = frag ? this.hls.liveSyncPosition || frag.start : pos;
  12377. }
  12378. } else if (pos <= start) {
  12379. // VoD playlist: if loadPosition before start of playlist, load first fragment
  12380. frag = fragments[0];
  12381. }
  12382. // If we haven't run into any special cases already, just load the fragment most closely matching the requested position
  12383. if (!frag) {
  12384. const end = config.lowLatencyMode ? levelDetails.partEnd : levelDetails.fragmentEnd;
  12385. frag = this.getFragmentAtPosition(pos, end, levelDetails);
  12386. }
  12387. return this.mapToInitFragWhenRequired(frag);
  12388. }
  12389. isLoopLoading(frag, targetBufferTime) {
  12390. const trackerState = this.fragmentTracker.getState(frag);
  12391. return (trackerState === FragmentState.OK || trackerState === FragmentState.PARTIAL && !!frag.gap) && this.nextLoadPosition > targetBufferTime;
  12392. }
  12393. getNextFragmentLoopLoading(frag, levelDetails, bufferInfo, playlistType, maxBufLen) {
  12394. const gapStart = frag.gap;
  12395. const nextFragment = this.getNextFragment(this.nextLoadPosition, levelDetails);
  12396. if (nextFragment === null) {
  12397. return nextFragment;
  12398. }
  12399. frag = nextFragment;
  12400. if (gapStart && frag && !frag.gap && bufferInfo.nextStart) {
  12401. // Media buffered after GAP tags should not make the next buffer timerange exceed forward buffer length
  12402. const nextbufferInfo = this.getFwdBufferInfoAtPos(this.mediaBuffer ? this.mediaBuffer : this.media, bufferInfo.nextStart, playlistType);
  12403. if (nextbufferInfo !== null && bufferInfo.len + nextbufferInfo.len >= maxBufLen) {
  12404. // Returning here might result in not finding an audio and video candiate to skip to
  12405. this.log(`buffer full after gaps in "${playlistType}" playlist starting at sn: ${frag.sn}`);
  12406. return null;
  12407. }
  12408. }
  12409. return frag;
  12410. }
  12411. mapToInitFragWhenRequired(frag) {
  12412. // If an initSegment is present, it must be buffered first
  12413. if (frag != null && frag.initSegment && !(frag != null && frag.initSegment.data) && !this.bitrateTest) {
  12414. return frag.initSegment;
  12415. }
  12416. return frag;
  12417. }
  12418. getNextPart(partList, frag, targetBufferTime) {
  12419. let nextPart = -1;
  12420. let contiguous = false;
  12421. let independentAttrOmitted = true;
  12422. for (let i = 0, len = partList.length; i < len; i++) {
  12423. const part = partList[i];
  12424. independentAttrOmitted = independentAttrOmitted && !part.independent;
  12425. if (nextPart > -1 && targetBufferTime < part.start) {
  12426. break;
  12427. }
  12428. const loaded = part.loaded;
  12429. if (loaded) {
  12430. nextPart = -1;
  12431. } else if ((contiguous || part.independent || independentAttrOmitted) && part.fragment === frag) {
  12432. nextPart = i;
  12433. }
  12434. contiguous = loaded;
  12435. }
  12436. return nextPart;
  12437. }
  12438. loadedEndOfParts(partList, targetBufferTime) {
  12439. const lastPart = partList[partList.length - 1];
  12440. return lastPart && targetBufferTime > lastPart.start && lastPart.loaded;
  12441. }
  12442. /*
  12443. This method is used find the best matching first fragment for a live playlist. This fragment is used to calculate the
  12444. "sliding" of the playlist, which is its offset from the start of playback. After sliding we can compute the real
  12445. start and end times for each fragment in the playlist (after which this method will not need to be called).
  12446. */
  12447. getInitialLiveFragment(levelDetails, fragments) {
  12448. const fragPrevious = this.fragPrevious;
  12449. let frag = null;
  12450. if (fragPrevious) {
  12451. if (levelDetails.hasProgramDateTime) {
  12452. // Prefer using PDT, because it can be accurate enough to choose the correct fragment without knowing the level sliding
  12453. this.log(`Live playlist, switching playlist, load frag with same PDT: ${fragPrevious.programDateTime}`);
  12454. frag = findFragmentByPDT(fragments, fragPrevious.endProgramDateTime, this.config.maxFragLookUpTolerance);
  12455. }
  12456. if (!frag) {
  12457. // SN does not need to be accurate between renditions, but depending on the packaging it may be so.
  12458. const targetSN = fragPrevious.sn + 1;
  12459. if (targetSN >= levelDetails.startSN && targetSN <= levelDetails.endSN) {
  12460. const fragNext = fragments[targetSN - levelDetails.startSN];
  12461. // Ensure that we're staying within the continuity range, since PTS resets upon a new range
  12462. if (fragPrevious.cc === fragNext.cc) {
  12463. frag = fragNext;
  12464. this.log(`Live playlist, switching playlist, load frag with next SN: ${frag.sn}`);
  12465. }
  12466. }
  12467. // It's important to stay within the continuity range if available; otherwise the fragments in the playlist
  12468. // will have the wrong start times
  12469. if (!frag) {
  12470. frag = findFragWithCC(fragments, fragPrevious.cc);
  12471. if (frag) {
  12472. this.log(`Live playlist, switching playlist, load frag with same CC: ${frag.sn}`);
  12473. }
  12474. }
  12475. }
  12476. } else {
  12477. // Find a new start fragment when fragPrevious is null
  12478. const liveStart = this.hls.liveSyncPosition;
  12479. if (liveStart !== null) {
  12480. frag = this.getFragmentAtPosition(liveStart, this.bitrateTest ? levelDetails.fragmentEnd : levelDetails.edge, levelDetails);
  12481. }
  12482. }
  12483. return frag;
  12484. }
  12485. /*
  12486. This method finds the best matching fragment given the provided position.
  12487. */
  12488. getFragmentAtPosition(bufferEnd, end, levelDetails) {
  12489. const {
  12490. config
  12491. } = this;
  12492. let {
  12493. fragPrevious
  12494. } = this;
  12495. let {
  12496. fragments,
  12497. endSN
  12498. } = levelDetails;
  12499. const {
  12500. fragmentHint
  12501. } = levelDetails;
  12502. const {
  12503. maxFragLookUpTolerance
  12504. } = config;
  12505. const partList = levelDetails.partList;
  12506. const loadingParts = !!(config.lowLatencyMode && partList != null && partList.length && fragmentHint);
  12507. if (loadingParts && fragmentHint && !this.bitrateTest) {
  12508. // Include incomplete fragment with parts at end
  12509. fragments = fragments.concat(fragmentHint);
  12510. endSN = fragmentHint.sn;
  12511. }
  12512. let frag;
  12513. if (bufferEnd < end) {
  12514. const lookupTolerance = bufferEnd > end - maxFragLookUpTolerance ? 0 : maxFragLookUpTolerance;
  12515. // Remove the tolerance if it would put the bufferEnd past the actual end of stream
  12516. // Uses buffer and sequence number to calculate switch segment (required if using EXT-X-DISCONTINUITY-SEQUENCE)
  12517. frag = findFragmentByPTS(fragPrevious, fragments, bufferEnd, lookupTolerance);
  12518. } else {
  12519. // reach end of playlist
  12520. frag = fragments[fragments.length - 1];
  12521. }
  12522. if (frag) {
  12523. const curSNIdx = frag.sn - levelDetails.startSN;
  12524. // Move fragPrevious forward to support forcing the next fragment to load
  12525. // when the buffer catches up to a previously buffered range.
  12526. const fragState = this.fragmentTracker.getState(frag);
  12527. if (fragState === FragmentState.OK || fragState === FragmentState.PARTIAL && frag.gap) {
  12528. fragPrevious = frag;
  12529. }
  12530. if (fragPrevious && frag.sn === fragPrevious.sn && (!loadingParts || partList[0].fragment.sn > frag.sn)) {
  12531. // Force the next fragment to load if the previous one was already selected. This can occasionally happen with
  12532. // non-uniform fragment durations
  12533. const sameLevel = fragPrevious && frag.level === fragPrevious.level;
  12534. if (sameLevel) {
  12535. const nextFrag = fragments[curSNIdx + 1];
  12536. if (frag.sn < endSN && this.fragmentTracker.getState(nextFrag) !== FragmentState.OK) {
  12537. frag = nextFrag;
  12538. } else {
  12539. frag = null;
  12540. }
  12541. }
  12542. }
  12543. }
  12544. return frag;
  12545. }
  12546. synchronizeToLiveEdge(levelDetails) {
  12547. const {
  12548. config,
  12549. media
  12550. } = this;
  12551. if (!media) {
  12552. return;
  12553. }
  12554. const liveSyncPosition = this.hls.liveSyncPosition;
  12555. const currentTime = media.currentTime;
  12556. const start = levelDetails.fragments[0].start;
  12557. const end = levelDetails.edge;
  12558. const withinSlidingWindow = currentTime >= start - config.maxFragLookUpTolerance && currentTime <= end;
  12559. // Continue if we can seek forward to sync position or if current time is outside of sliding window
  12560. if (liveSyncPosition !== null && media.duration > liveSyncPosition && (currentTime < liveSyncPosition || !withinSlidingWindow)) {
  12561. // Continue if buffer is starving or if current time is behind max latency
  12562. const maxLatency = config.liveMaxLatencyDuration !== undefined ? config.liveMaxLatencyDuration : config.liveMaxLatencyDurationCount * levelDetails.targetduration;
  12563. if (!withinSlidingWindow && media.readyState < 4 || currentTime < end - maxLatency) {
  12564. if (!this.loadedmetadata) {
  12565. this.nextLoadPosition = liveSyncPosition;
  12566. }
  12567. // Only seek if ready and there is not a significant forward buffer available for playback
  12568. if (media.readyState) {
  12569. this.warn(`Playback: ${currentTime.toFixed(3)} is located too far from the end of live sliding playlist: ${end}, reset currentTime to : ${liveSyncPosition.toFixed(3)}`);
  12570. media.currentTime = liveSyncPosition;
  12571. }
  12572. }
  12573. }
  12574. }
  12575. alignPlaylists(details, previousDetails, switchDetails) {
  12576. // FIXME: If not for `shouldAlignOnDiscontinuities` requiring fragPrevious.cc,
  12577. // this could all go in level-helper mergeDetails()
  12578. const length = details.fragments.length;
  12579. if (!length) {
  12580. this.warn(`No fragments in live playlist`);
  12581. return 0;
  12582. }
  12583. const slidingStart = details.fragments[0].start;
  12584. const firstLevelLoad = !previousDetails;
  12585. const aligned = details.alignedSliding && isFiniteNumber(slidingStart);
  12586. if (firstLevelLoad || !aligned && !slidingStart) {
  12587. const {
  12588. fragPrevious
  12589. } = this;
  12590. alignStream(fragPrevious, switchDetails, details);
  12591. const alignedSlidingStart = details.fragments[0].start;
  12592. this.log(`Live playlist sliding: ${alignedSlidingStart.toFixed(2)} start-sn: ${previousDetails ? previousDetails.startSN : 'na'}->${details.startSN} prev-sn: ${fragPrevious ? fragPrevious.sn : 'na'} fragments: ${length}`);
  12593. return alignedSlidingStart;
  12594. }
  12595. return slidingStart;
  12596. }
  12597. waitForCdnTuneIn(details) {
  12598. // Wait for Low-Latency CDN Tune-in to get an updated playlist
  12599. const advancePartLimit = 3;
  12600. return details.live && details.canBlockReload && details.partTarget && details.tuneInGoal > Math.max(details.partHoldBack, details.partTarget * advancePartLimit);
  12601. }
  12602. setStartPosition(details, sliding) {
  12603. // compute start position if set to -1. use it straight away if value is defined
  12604. let startPosition = this.startPosition;
  12605. if (startPosition < sliding) {
  12606. startPosition = -1;
  12607. }
  12608. if (startPosition === -1 || this.lastCurrentTime === -1) {
  12609. // Use Playlist EXT-X-START:TIME-OFFSET when set
  12610. // Prioritize Multivariant Playlist offset so that main, audio, and subtitle stream-controller start times match
  12611. const offsetInMultivariantPlaylist = this.startTimeOffset !== null;
  12612. const startTimeOffset = offsetInMultivariantPlaylist ? this.startTimeOffset : details.startTimeOffset;
  12613. if (startTimeOffset !== null && isFiniteNumber(startTimeOffset)) {
  12614. startPosition = sliding + startTimeOffset;
  12615. if (startTimeOffset < 0) {
  12616. startPosition += details.totalduration;
  12617. }
  12618. startPosition = Math.min(Math.max(sliding, startPosition), sliding + details.totalduration);
  12619. this.log(`Start time offset ${startTimeOffset} found in ${offsetInMultivariantPlaylist ? 'multivariant' : 'media'} playlist, adjust startPosition to ${startPosition}`);
  12620. this.startPosition = startPosition;
  12621. } else if (details.live) {
  12622. // Leave this.startPosition at -1, so that we can use `getInitialLiveFragment` logic when startPosition has
  12623. // not been specified via the config or an as an argument to startLoad (#3736).
  12624. startPosition = this.hls.liveSyncPosition || sliding;
  12625. } else {
  12626. this.startPosition = startPosition = 0;
  12627. }
  12628. this.lastCurrentTime = startPosition;
  12629. }
  12630. this.nextLoadPosition = startPosition;
  12631. }
  12632. getLoadPosition() {
  12633. const {
  12634. media
  12635. } = this;
  12636. // if we have not yet loaded any fragment, start loading from start position
  12637. let pos = 0;
  12638. if (this.loadedmetadata && media) {
  12639. pos = media.currentTime;
  12640. } else if (this.nextLoadPosition) {
  12641. pos = this.nextLoadPosition;
  12642. }
  12643. return pos;
  12644. }
  12645. handleFragLoadAborted(frag, part) {
  12646. if (this.transmuxer && frag.sn !== 'initSegment' && frag.stats.aborted) {
  12647. this.warn(`Fragment ${frag.sn}${part ? ' part ' + part.index : ''} of level ${frag.level} was aborted`);
  12648. this.resetFragmentLoading(frag);
  12649. }
  12650. }
  12651. resetFragmentLoading(frag) {
  12652. if (!this.fragCurrent || !this.fragContextChanged(frag) && this.state !== State.FRAG_LOADING_WAITING_RETRY) {
  12653. this.state = State.IDLE;
  12654. }
  12655. }
  12656. onFragmentOrKeyLoadError(filterType, data) {
  12657. if (data.chunkMeta && !data.frag) {
  12658. const context = this.getCurrentContext(data.chunkMeta);
  12659. if (context) {
  12660. data.frag = context.frag;
  12661. }
  12662. }
  12663. const frag = data.frag;
  12664. // Handle frag error related to caller's filterType
  12665. if (!frag || frag.type !== filterType || !this.levels) {
  12666. return;
  12667. }
  12668. if (this.fragContextChanged(frag)) {
  12669. var _this$fragCurrent2;
  12670. this.warn(`Frag load error must match current frag to retry ${frag.url} > ${(_this$fragCurrent2 = this.fragCurrent) == null ? void 0 : _this$fragCurrent2.url}`);
  12671. return;
  12672. }
  12673. const gapTagEncountered = data.details === ErrorDetails.FRAG_GAP;
  12674. if (gapTagEncountered) {
  12675. this.fragmentTracker.fragBuffered(frag, true);
  12676. }
  12677. // keep retrying until the limit will be reached
  12678. const errorAction = data.errorAction;
  12679. const {
  12680. action,
  12681. retryCount = 0,
  12682. retryConfig
  12683. } = errorAction || {};
  12684. if (errorAction && action === NetworkErrorAction.RetryRequest && retryConfig) {
  12685. this.resetStartWhenNotLoaded(this.levelLastLoaded);
  12686. const delay = getRetryDelay(retryConfig, retryCount);
  12687. this.warn(`Fragment ${frag.sn} of ${filterType} ${frag.level} errored with ${data.details}, retrying loading ${retryCount + 1}/${retryConfig.maxNumRetry} in ${delay}ms`);
  12688. errorAction.resolved = true;
  12689. this.retryDate = self.performance.now() + delay;
  12690. this.state = State.FRAG_LOADING_WAITING_RETRY;
  12691. } else if (retryConfig && errorAction) {
  12692. this.resetFragmentErrors(filterType);
  12693. if (retryCount < retryConfig.maxNumRetry) {
  12694. // Network retry is skipped when level switch is preferred
  12695. if (!gapTagEncountered && action !== NetworkErrorAction.RemoveAlternatePermanently) {
  12696. errorAction.resolved = true;
  12697. }
  12698. } else {
  12699. logger.warn(`${data.details} reached or exceeded max retry (${retryCount})`);
  12700. return;
  12701. }
  12702. } else if ((errorAction == null ? void 0 : errorAction.action) === NetworkErrorAction.SendAlternateToPenaltyBox) {
  12703. this.state = State.WAITING_LEVEL;
  12704. } else {
  12705. this.state = State.ERROR;
  12706. }
  12707. // Perform next async tick sooner to speed up error action resolution
  12708. this.tickImmediate();
  12709. }
  12710. reduceLengthAndFlushBuffer(data) {
  12711. // if in appending state
  12712. if (this.state === State.PARSING || this.state === State.PARSED) {
  12713. const playlistType = data.parent;
  12714. const bufferedInfo = this.getFwdBufferInfo(this.mediaBuffer, playlistType);
  12715. // 0.5 : tolerance needed as some browsers stalls playback before reaching buffered end
  12716. // reduce max buf len if current position is buffered
  12717. const buffered = bufferedInfo && bufferedInfo.len > 0.5;
  12718. if (buffered) {
  12719. this.reduceMaxBufferLength(bufferedInfo.len);
  12720. }
  12721. const flushBuffer = !buffered;
  12722. if (flushBuffer) {
  12723. // current position is not buffered, but browser is still complaining about buffer full error
  12724. // this happens on IE/Edge, refer to https://github.com/video-dev/hls.js/pull/708
  12725. // in that case flush the whole audio buffer to recover
  12726. this.warn(`Buffer full error while media.currentTime is not buffered, flush ${playlistType} buffer`);
  12727. }
  12728. if (data.frag) {
  12729. this.fragmentTracker.removeFragment(data.frag);
  12730. this.nextLoadPosition = data.frag.start;
  12731. }
  12732. this.resetLoadingState();
  12733. return flushBuffer;
  12734. }
  12735. return false;
  12736. }
  12737. resetFragmentErrors(filterType) {
  12738. if (filterType === PlaylistLevelType.AUDIO) {
  12739. // Reset current fragment since audio track audio is essential and may not have a fail-over track
  12740. this.fragCurrent = null;
  12741. }
  12742. // Fragment errors that result in a level switch or redundant fail-over
  12743. // should reset the stream controller state to idle
  12744. if (!this.loadedmetadata) {
  12745. this.startFragRequested = false;
  12746. }
  12747. if (this.state !== State.STOPPED) {
  12748. this.state = State.IDLE;
  12749. }
  12750. }
  12751. afterBufferFlushed(media, bufferType, playlistType) {
  12752. if (!media) {
  12753. return;
  12754. }
  12755. // After successful buffer flushing, filter flushed fragments from bufferedFrags use mediaBuffered instead of media
  12756. // (so that we will check against video.buffered ranges in case of alt audio track)
  12757. const bufferedTimeRanges = BufferHelper.getBuffered(media);
  12758. this.fragmentTracker.detectEvictedFragments(bufferType, bufferedTimeRanges, playlistType);
  12759. if (this.state === State.ENDED) {
  12760. this.resetLoadingState();
  12761. }
  12762. }
  12763. resetLoadingState() {
  12764. this.log('Reset loading state');
  12765. this.fragCurrent = null;
  12766. this.fragPrevious = null;
  12767. this.state = State.IDLE;
  12768. }
  12769. resetStartWhenNotLoaded(level) {
  12770. // if loadedmetadata is not set, it means that first frag request failed
  12771. // in that case, reset startFragRequested flag
  12772. if (!this.loadedmetadata) {
  12773. this.startFragRequested = false;
  12774. const details = level ? level.details : null;
  12775. if (details != null && details.live) {
  12776. // Update the start position and return to IDLE to recover live start
  12777. this.startPosition = -1;
  12778. this.setStartPosition(details, 0);
  12779. this.resetLoadingState();
  12780. } else {
  12781. this.nextLoadPosition = this.startPosition;
  12782. }
  12783. }
  12784. }
  12785. resetWhenMissingContext(chunkMeta) {
  12786. this.warn(`The loading context changed while buffering fragment ${chunkMeta.sn} of level ${chunkMeta.level}. This chunk will not be buffered.`);
  12787. this.removeUnbufferedFrags();
  12788. this.resetStartWhenNotLoaded(this.levelLastLoaded);
  12789. this.resetLoadingState();
  12790. }
  12791. removeUnbufferedFrags(start = 0) {
  12792. this.fragmentTracker.removeFragmentsInRange(start, Infinity, this.playlistType, false, true);
  12793. }
  12794. updateLevelTiming(frag, part, level, partial) {
  12795. var _this$transmuxer;
  12796. const details = level.details;
  12797. if (!details) {
  12798. this.warn('level.details undefined');
  12799. return;
  12800. }
  12801. const parsed = Object.keys(frag.elementaryStreams).reduce((result, type) => {
  12802. const info = frag.elementaryStreams[type];
  12803. if (info) {
  12804. const parsedDuration = info.endPTS - info.startPTS;
  12805. if (parsedDuration <= 0) {
  12806. // Destroy the transmuxer after it's next time offset failed to advance because duration was <= 0.
  12807. // The new transmuxer will be configured with a time offset matching the next fragment start,
  12808. // preventing the timeline from shifting.
  12809. this.warn(`Could not parse fragment ${frag.sn} ${type} duration reliably (${parsedDuration})`);
  12810. return result || false;
  12811. }
  12812. const drift = partial ? 0 : updateFragPTSDTS(details, frag, info.startPTS, info.endPTS, info.startDTS, info.endDTS);
  12813. this.hls.trigger(Events.LEVEL_PTS_UPDATED, {
  12814. details,
  12815. level,
  12816. drift,
  12817. type,
  12818. frag,
  12819. start: info.startPTS,
  12820. end: info.endPTS
  12821. });
  12822. return true;
  12823. }
  12824. return result;
  12825. }, false);
  12826. if (!parsed && ((_this$transmuxer = this.transmuxer) == null ? void 0 : _this$transmuxer.error) === null) {
  12827. const error = new Error(`Found no media in fragment ${frag.sn} of level ${frag.level} resetting transmuxer to fallback to playlist timing`);
  12828. if (level.fragmentError === 0) {
  12829. // Mark and track the odd empty segment as a gap to avoid reloading
  12830. level.fragmentError++;
  12831. frag.gap = true;
  12832. this.fragmentTracker.removeFragment(frag);
  12833. this.fragmentTracker.fragBuffered(frag, true);
  12834. }
  12835. this.warn(error.message);
  12836. this.hls.trigger(Events.ERROR, {
  12837. type: ErrorTypes.MEDIA_ERROR,
  12838. details: ErrorDetails.FRAG_PARSING_ERROR,
  12839. fatal: false,
  12840. error,
  12841. frag,
  12842. reason: `Found no media in msn ${frag.sn} of level "${level.url}"`
  12843. });
  12844. if (!this.hls) {
  12845. return;
  12846. }
  12847. this.resetTransmuxer();
  12848. // For this error fallthrough. Marking parsed will allow advancing to next fragment.
  12849. }
  12850. this.state = State.PARSED;
  12851. this.hls.trigger(Events.FRAG_PARSED, {
  12852. frag,
  12853. part
  12854. });
  12855. }
  12856. resetTransmuxer() {
  12857. if (this.transmuxer) {
  12858. this.transmuxer.destroy();
  12859. this.transmuxer = null;
  12860. }
  12861. }
  12862. recoverWorkerError(data) {
  12863. if (data.event === 'demuxerWorker') {
  12864. this.fragmentTracker.removeAllFragments();
  12865. this.resetTransmuxer();
  12866. this.resetStartWhenNotLoaded(this.levelLastLoaded);
  12867. this.resetLoadingState();
  12868. }
  12869. }
  12870. set state(nextState) {
  12871. const previousState = this._state;
  12872. if (previousState !== nextState) {
  12873. this._state = nextState;
  12874. this.log(`${previousState}->${nextState}`);
  12875. }
  12876. }
  12877. get state() {
  12878. return this._state;
  12879. }
  12880. }
  12881. function getSourceBuffer() {
  12882. return self.SourceBuffer || self.WebKitSourceBuffer;
  12883. }
  12884. function isMSESupported() {
  12885. const mediaSource = getMediaSource();
  12886. if (!mediaSource) {
  12887. return false;
  12888. }
  12889. // if SourceBuffer is exposed ensure its API is valid
  12890. // Older browsers do not expose SourceBuffer globally so checking SourceBuffer.prototype is impossible
  12891. const sourceBuffer = getSourceBuffer();
  12892. return !sourceBuffer || sourceBuffer.prototype && typeof sourceBuffer.prototype.appendBuffer === 'function' && typeof sourceBuffer.prototype.remove === 'function';
  12893. }
  12894. function isSupported() {
  12895. if (!isMSESupported()) {
  12896. return false;
  12897. }
  12898. const mediaSource = getMediaSource();
  12899. return typeof (mediaSource == null ? void 0 : mediaSource.isTypeSupported) === 'function' && (['avc1.42E01E,mp4a.40.2', 'av01.0.01M.08', 'vp09.00.50.08'].some(codecsForVideoContainer => mediaSource.isTypeSupported(mimeTypeForCodec(codecsForVideoContainer, 'video'))) || ['mp4a.40.2', 'fLaC'].some(codecForAudioContainer => mediaSource.isTypeSupported(mimeTypeForCodec(codecForAudioContainer, 'audio'))));
  12900. }
  12901. function changeTypeSupported() {
  12902. var _sourceBuffer$prototy;
  12903. const sourceBuffer = getSourceBuffer();
  12904. return typeof (sourceBuffer == null ? void 0 : (_sourceBuffer$prototy = sourceBuffer.prototype) == null ? void 0 : _sourceBuffer$prototy.changeType) === 'function';
  12905. }
  12906. // ensure the worker ends up in the bundle
  12907. // If the worker should not be included this gets aliased to empty.js
  12908. function hasUMDWorker() {
  12909. return typeof __HLS_WORKER_BUNDLE__ === 'function';
  12910. }
  12911. function injectWorker() {
  12912. const blob = new self.Blob([`var exports={};var module={exports:exports};function define(f){f()};define.amd=true;(${__HLS_WORKER_BUNDLE__.toString()})(true);`], {
  12913. type: 'text/javascript'
  12914. });
  12915. const objectURL = self.URL.createObjectURL(blob);
  12916. const worker = new self.Worker(objectURL);
  12917. return {
  12918. worker,
  12919. objectURL
  12920. };
  12921. }
  12922. function loadWorker(path) {
  12923. const scriptURL = new self.URL(path, self.location.href).href;
  12924. const worker = new self.Worker(scriptURL);
  12925. return {
  12926. worker,
  12927. scriptURL
  12928. };
  12929. }
  12930. function dummyTrack(type = '', inputTimeScale = 90000) {
  12931. return {
  12932. type,
  12933. id: -1,
  12934. pid: -1,
  12935. inputTimeScale,
  12936. sequenceNumber: -1,
  12937. samples: [],
  12938. dropped: 0
  12939. };
  12940. }
  12941. class BaseAudioDemuxer {
  12942. constructor() {
  12943. this._audioTrack = void 0;
  12944. this._id3Track = void 0;
  12945. this.frameIndex = 0;
  12946. this.cachedData = null;
  12947. this.basePTS = null;
  12948. this.initPTS = null;
  12949. this.lastPTS = null;
  12950. }
  12951. resetInitSegment(initSegment, audioCodec, videoCodec, trackDuration) {
  12952. this._id3Track = {
  12953. type: 'id3',
  12954. id: 3,
  12955. pid: -1,
  12956. inputTimeScale: 90000,
  12957. sequenceNumber: 0,
  12958. samples: [],
  12959. dropped: 0
  12960. };
  12961. }
  12962. resetTimeStamp(deaultTimestamp) {
  12963. this.initPTS = deaultTimestamp;
  12964. this.resetContiguity();
  12965. }
  12966. resetContiguity() {
  12967. this.basePTS = null;
  12968. this.lastPTS = null;
  12969. this.frameIndex = 0;
  12970. }
  12971. canParse(data, offset) {
  12972. return false;
  12973. }
  12974. appendFrame(track, data, offset) {}
  12975. // feed incoming data to the front of the parsing pipeline
  12976. demux(data, timeOffset) {
  12977. if (this.cachedData) {
  12978. data = appendUint8Array(this.cachedData, data);
  12979. this.cachedData = null;
  12980. }
  12981. let id3Data = getID3Data(data, 0);
  12982. let offset = id3Data ? id3Data.length : 0;
  12983. let lastDataIndex;
  12984. const track = this._audioTrack;
  12985. const id3Track = this._id3Track;
  12986. const timestamp = id3Data ? getTimeStamp(id3Data) : undefined;
  12987. const length = data.length;
  12988. if (this.basePTS === null || this.frameIndex === 0 && isFiniteNumber(timestamp)) {
  12989. this.basePTS = initPTSFn(timestamp, timeOffset, this.initPTS);
  12990. this.lastPTS = this.basePTS;
  12991. }
  12992. if (this.lastPTS === null) {
  12993. this.lastPTS = this.basePTS;
  12994. }
  12995. // more expressive than alternative: id3Data?.length
  12996. if (id3Data && id3Data.length > 0) {
  12997. id3Track.samples.push({
  12998. pts: this.lastPTS,
  12999. dts: this.lastPTS,
  13000. data: id3Data,
  13001. type: MetadataSchema.audioId3,
  13002. duration: Number.POSITIVE_INFINITY
  13003. });
  13004. }
  13005. while (offset < length) {
  13006. if (this.canParse(data, offset)) {
  13007. const frame = this.appendFrame(track, data, offset);
  13008. if (frame) {
  13009. this.frameIndex++;
  13010. this.lastPTS = frame.sample.pts;
  13011. offset += frame.length;
  13012. lastDataIndex = offset;
  13013. } else {
  13014. offset = length;
  13015. }
  13016. } else if (canParse$2(data, offset)) {
  13017. // after a ID3.canParse, a call to ID3.getID3Data *should* always returns some data
  13018. id3Data = getID3Data(data, offset);
  13019. id3Track.samples.push({
  13020. pts: this.lastPTS,
  13021. dts: this.lastPTS,
  13022. data: id3Data,
  13023. type: MetadataSchema.audioId3,
  13024. duration: Number.POSITIVE_INFINITY
  13025. });
  13026. offset += id3Data.length;
  13027. lastDataIndex = offset;
  13028. } else {
  13029. offset++;
  13030. }
  13031. if (offset === length && lastDataIndex !== length) {
  13032. const partialData = sliceUint8(data, lastDataIndex);
  13033. if (this.cachedData) {
  13034. this.cachedData = appendUint8Array(this.cachedData, partialData);
  13035. } else {
  13036. this.cachedData = partialData;
  13037. }
  13038. }
  13039. }
  13040. return {
  13041. audioTrack: track,
  13042. videoTrack: dummyTrack(),
  13043. id3Track,
  13044. textTrack: dummyTrack()
  13045. };
  13046. }
  13047. demuxSampleAes(data, keyData, timeOffset) {
  13048. return Promise.reject(new Error(`[${this}] This demuxer does not support Sample-AES decryption`));
  13049. }
  13050. flush(timeOffset) {
  13051. // Parse cache in case of remaining frames.
  13052. const cachedData = this.cachedData;
  13053. if (cachedData) {
  13054. this.cachedData = null;
  13055. this.demux(cachedData, 0);
  13056. }
  13057. return {
  13058. audioTrack: this._audioTrack,
  13059. videoTrack: dummyTrack(),
  13060. id3Track: this._id3Track,
  13061. textTrack: dummyTrack()
  13062. };
  13063. }
  13064. destroy() {}
  13065. }
  13066. /**
  13067. * Initialize PTS
  13068. * <p>
  13069. * use timestamp unless it is undefined, NaN or Infinity
  13070. * </p>
  13071. */
  13072. const initPTSFn = (timestamp, timeOffset, initPTS) => {
  13073. if (isFiniteNumber(timestamp)) {
  13074. return timestamp * 90;
  13075. }
  13076. const init90kHz = initPTS ? initPTS.baseTime * 90000 / initPTS.timescale : 0;
  13077. return timeOffset * 90000 + init90kHz;
  13078. };
  13079. /**
  13080. * ADTS parser helper
  13081. * @link https://wiki.multimedia.cx/index.php?title=ADTS
  13082. */
  13083. function getAudioConfig(observer, data, offset, audioCodec) {
  13084. let adtsObjectType;
  13085. let adtsExtensionSamplingIndex;
  13086. let adtsChannelConfig;
  13087. let config;
  13088. const userAgent = navigator.userAgent.toLowerCase();
  13089. const manifestCodec = audioCodec;
  13090. const adtsSamplingRates = [96000, 88200, 64000, 48000, 44100, 32000, 24000, 22050, 16000, 12000, 11025, 8000, 7350];
  13091. // byte 2
  13092. adtsObjectType = ((data[offset + 2] & 0xc0) >>> 6) + 1;
  13093. const adtsSamplingIndex = (data[offset + 2] & 0x3c) >>> 2;
  13094. if (adtsSamplingIndex > adtsSamplingRates.length - 1) {
  13095. const error = new Error(`invalid ADTS sampling index:${adtsSamplingIndex}`);
  13096. observer.emit(Events.ERROR, Events.ERROR, {
  13097. type: ErrorTypes.MEDIA_ERROR,
  13098. details: ErrorDetails.FRAG_PARSING_ERROR,
  13099. fatal: true,
  13100. error,
  13101. reason: error.message
  13102. });
  13103. return;
  13104. }
  13105. adtsChannelConfig = (data[offset + 2] & 0x01) << 2;
  13106. // byte 3
  13107. adtsChannelConfig |= (data[offset + 3] & 0xc0) >>> 6;
  13108. logger.log(`manifest codec:${audioCodec}, ADTS type:${adtsObjectType}, samplingIndex:${adtsSamplingIndex}`);
  13109. // firefox: freq less than 24kHz = AAC SBR (HE-AAC)
  13110. if (/firefox/i.test(userAgent)) {
  13111. if (adtsSamplingIndex >= 6) {
  13112. adtsObjectType = 5;
  13113. config = new Array(4);
  13114. // HE-AAC uses SBR (Spectral Band Replication) , high frequencies are constructed from low frequencies
  13115. // there is a factor 2 between frame sample rate and output sample rate
  13116. // multiply frequency by 2 (see table below, equivalent to substract 3)
  13117. adtsExtensionSamplingIndex = adtsSamplingIndex - 3;
  13118. } else {
  13119. adtsObjectType = 2;
  13120. config = new Array(2);
  13121. adtsExtensionSamplingIndex = adtsSamplingIndex;
  13122. }
  13123. // Android : always use AAC
  13124. } else if (userAgent.indexOf('android') !== -1) {
  13125. adtsObjectType = 2;
  13126. config = new Array(2);
  13127. adtsExtensionSamplingIndex = adtsSamplingIndex;
  13128. } else {
  13129. /* for other browsers (Chrome/Vivaldi/Opera ...)
  13130. always force audio type to be HE-AAC SBR, as some browsers do not support audio codec switch properly (like Chrome ...)
  13131. */
  13132. adtsObjectType = 5;
  13133. config = new Array(4);
  13134. // if (manifest codec is HE-AAC or HE-AACv2) OR (manifest codec not specified AND frequency less than 24kHz)
  13135. if (audioCodec && (audioCodec.indexOf('mp4a.40.29') !== -1 || audioCodec.indexOf('mp4a.40.5') !== -1) || !audioCodec && adtsSamplingIndex >= 6) {
  13136. // HE-AAC uses SBR (Spectral Band Replication) , high frequencies are constructed from low frequencies
  13137. // there is a factor 2 between frame sample rate and output sample rate
  13138. // multiply frequency by 2 (see table below, equivalent to substract 3)
  13139. adtsExtensionSamplingIndex = adtsSamplingIndex - 3;
  13140. } else {
  13141. // if (manifest codec is AAC) AND (frequency less than 24kHz AND nb channel is 1) OR (manifest codec not specified and mono audio)
  13142. // Chrome fails to play back with low frequency AAC LC mono when initialized with HE-AAC. This is not a problem with stereo.
  13143. if (audioCodec && audioCodec.indexOf('mp4a.40.2') !== -1 && (adtsSamplingIndex >= 6 && adtsChannelConfig === 1 || /vivaldi/i.test(userAgent)) || !audioCodec && adtsChannelConfig === 1) {
  13144. adtsObjectType = 2;
  13145. config = new Array(2);
  13146. }
  13147. adtsExtensionSamplingIndex = adtsSamplingIndex;
  13148. }
  13149. }
  13150. /* refer to http://wiki.multimedia.cx/index.php?title=MPEG-4_Audio#Audio_Specific_Config
  13151. ISO 14496-3 (AAC).pdf - Table 1.13 — Syntax of AudioSpecificConfig()
  13152. Audio Profile / Audio Object Type
  13153. 0: Null
  13154. 1: AAC Main
  13155. 2: AAC LC (Low Complexity)
  13156. 3: AAC SSR (Scalable Sample Rate)
  13157. 4: AAC LTP (Long Term Prediction)
  13158. 5: SBR (Spectral Band Replication)
  13159. 6: AAC Scalable
  13160. sampling freq
  13161. 0: 96000 Hz
  13162. 1: 88200 Hz
  13163. 2: 64000 Hz
  13164. 3: 48000 Hz
  13165. 4: 44100 Hz
  13166. 5: 32000 Hz
  13167. 6: 24000 Hz
  13168. 7: 22050 Hz
  13169. 8: 16000 Hz
  13170. 9: 12000 Hz
  13171. 10: 11025 Hz
  13172. 11: 8000 Hz
  13173. 12: 7350 Hz
  13174. 13: Reserved
  13175. 14: Reserved
  13176. 15: frequency is written explictly
  13177. Channel Configurations
  13178. These are the channel configurations:
  13179. 0: Defined in AOT Specifc Config
  13180. 1: 1 channel: front-center
  13181. 2: 2 channels: front-left, front-right
  13182. */
  13183. // audioObjectType = profile => profile, the MPEG-4 Audio Object Type minus 1
  13184. config[0] = adtsObjectType << 3;
  13185. // samplingFrequencyIndex
  13186. config[0] |= (adtsSamplingIndex & 0x0e) >> 1;
  13187. config[1] |= (adtsSamplingIndex & 0x01) << 7;
  13188. // channelConfiguration
  13189. config[1] |= adtsChannelConfig << 3;
  13190. if (adtsObjectType === 5) {
  13191. // adtsExtensionSamplingIndex
  13192. config[1] |= (adtsExtensionSamplingIndex & 0x0e) >> 1;
  13193. config[2] = (adtsExtensionSamplingIndex & 0x01) << 7;
  13194. // adtsObjectType (force to 2, chrome is checking that object type is less than 5 ???
  13195. // https://chromium.googlesource.com/chromium/src.git/+/master/media/formats/mp4/aac.cc
  13196. config[2] |= 2 << 2;
  13197. config[3] = 0;
  13198. }
  13199. return {
  13200. config,
  13201. samplerate: adtsSamplingRates[adtsSamplingIndex],
  13202. channelCount: adtsChannelConfig,
  13203. codec: 'mp4a.40.' + adtsObjectType,
  13204. manifestCodec
  13205. };
  13206. }
  13207. function isHeaderPattern$1(data, offset) {
  13208. return data[offset] === 0xff && (data[offset + 1] & 0xf6) === 0xf0;
  13209. }
  13210. function getHeaderLength(data, offset) {
  13211. return data[offset + 1] & 0x01 ? 7 : 9;
  13212. }
  13213. function getFullFrameLength(data, offset) {
  13214. return (data[offset + 3] & 0x03) << 11 | data[offset + 4] << 3 | (data[offset + 5] & 0xe0) >>> 5;
  13215. }
  13216. function canGetFrameLength(data, offset) {
  13217. return offset + 5 < data.length;
  13218. }
  13219. function isHeader$1(data, offset) {
  13220. // Look for ADTS header | 1111 1111 | 1111 X00X | where X can be either 0 or 1
  13221. // Layer bits (position 14 and 15) in header should be always 0 for ADTS
  13222. // More info https://wiki.multimedia.cx/index.php?title=ADTS
  13223. return offset + 1 < data.length && isHeaderPattern$1(data, offset);
  13224. }
  13225. function canParse$1(data, offset) {
  13226. return canGetFrameLength(data, offset) && isHeaderPattern$1(data, offset) && getFullFrameLength(data, offset) <= data.length - offset;
  13227. }
  13228. function probe$1(data, offset) {
  13229. // same as isHeader but we also check that ADTS frame follows last ADTS frame
  13230. // or end of data is reached
  13231. if (isHeader$1(data, offset)) {
  13232. // ADTS header Length
  13233. const headerLength = getHeaderLength(data, offset);
  13234. if (offset + headerLength >= data.length) {
  13235. return false;
  13236. }
  13237. // ADTS frame Length
  13238. const frameLength = getFullFrameLength(data, offset);
  13239. if (frameLength <= headerLength) {
  13240. return false;
  13241. }
  13242. const newOffset = offset + frameLength;
  13243. return newOffset === data.length || isHeader$1(data, newOffset);
  13244. }
  13245. return false;
  13246. }
  13247. function initTrackConfig(track, observer, data, offset, audioCodec) {
  13248. if (!track.samplerate) {
  13249. const config = getAudioConfig(observer, data, offset, audioCodec);
  13250. if (!config) {
  13251. return;
  13252. }
  13253. track.config = config.config;
  13254. track.samplerate = config.samplerate;
  13255. track.channelCount = config.channelCount;
  13256. track.codec = config.codec;
  13257. track.manifestCodec = config.manifestCodec;
  13258. logger.log(`parsed codec:${track.codec}, rate:${config.samplerate}, channels:${config.channelCount}`);
  13259. }
  13260. }
  13261. function getFrameDuration(samplerate) {
  13262. return 1024 * 90000 / samplerate;
  13263. }
  13264. function parseFrameHeader(data, offset) {
  13265. // The protection skip bit tells us if we have 2 bytes of CRC data at the end of the ADTS header
  13266. const headerLength = getHeaderLength(data, offset);
  13267. if (offset + headerLength <= data.length) {
  13268. // retrieve frame size
  13269. const frameLength = getFullFrameLength(data, offset) - headerLength;
  13270. if (frameLength > 0) {
  13271. // logger.log(`AAC frame, offset/length/total/pts:${offset+headerLength}/${frameLength}/${data.byteLength}`);
  13272. return {
  13273. headerLength,
  13274. frameLength
  13275. };
  13276. }
  13277. }
  13278. }
  13279. function appendFrame$1(track, data, offset, pts, frameIndex) {
  13280. const frameDuration = getFrameDuration(track.samplerate);
  13281. const stamp = pts + frameIndex * frameDuration;
  13282. const header = parseFrameHeader(data, offset);
  13283. let unit;
  13284. if (header) {
  13285. const {
  13286. frameLength,
  13287. headerLength
  13288. } = header;
  13289. const _length = headerLength + frameLength;
  13290. const missing = Math.max(0, offset + _length - data.length);
  13291. // logger.log(`AAC frame ${frameIndex}, pts:${stamp} length@offset/total: ${frameLength}@${offset+headerLength}/${data.byteLength} missing: ${missing}`);
  13292. if (missing) {
  13293. unit = new Uint8Array(_length - headerLength);
  13294. unit.set(data.subarray(offset + headerLength, data.length), 0);
  13295. } else {
  13296. unit = data.subarray(offset + headerLength, offset + _length);
  13297. }
  13298. const _sample = {
  13299. unit,
  13300. pts: stamp
  13301. };
  13302. if (!missing) {
  13303. track.samples.push(_sample);
  13304. }
  13305. return {
  13306. sample: _sample,
  13307. length: _length,
  13308. missing
  13309. };
  13310. }
  13311. // overflow incomplete header
  13312. const length = data.length - offset;
  13313. unit = new Uint8Array(length);
  13314. unit.set(data.subarray(offset, data.length), 0);
  13315. const sample = {
  13316. unit,
  13317. pts: stamp
  13318. };
  13319. return {
  13320. sample,
  13321. length,
  13322. missing: -1
  13323. };
  13324. }
  13325. /**
  13326. * MPEG parser helper
  13327. */
  13328. let chromeVersion$1 = null;
  13329. const BitratesMap = [32, 64, 96, 128, 160, 192, 224, 256, 288, 320, 352, 384, 416, 448, 32, 48, 56, 64, 80, 96, 112, 128, 160, 192, 224, 256, 320, 384, 32, 40, 48, 56, 64, 80, 96, 112, 128, 160, 192, 224, 256, 320, 32, 48, 56, 64, 80, 96, 112, 128, 144, 160, 176, 192, 224, 256, 8, 16, 24, 32, 40, 48, 56, 64, 80, 96, 112, 128, 144, 160];
  13330. const SamplingRateMap = [44100, 48000, 32000, 22050, 24000, 16000, 11025, 12000, 8000];
  13331. const SamplesCoefficients = [
  13332. // MPEG 2.5
  13333. [0,
  13334. // Reserved
  13335. 72,
  13336. // Layer3
  13337. 144,
  13338. // Layer2
  13339. 12 // Layer1
  13340. ],
  13341. // Reserved
  13342. [0,
  13343. // Reserved
  13344. 0,
  13345. // Layer3
  13346. 0,
  13347. // Layer2
  13348. 0 // Layer1
  13349. ],
  13350. // MPEG 2
  13351. [0,
  13352. // Reserved
  13353. 72,
  13354. // Layer3
  13355. 144,
  13356. // Layer2
  13357. 12 // Layer1
  13358. ],
  13359. // MPEG 1
  13360. [0,
  13361. // Reserved
  13362. 144,
  13363. // Layer3
  13364. 144,
  13365. // Layer2
  13366. 12 // Layer1
  13367. ]];
  13368. const BytesInSlot = [0,
  13369. // Reserved
  13370. 1,
  13371. // Layer3
  13372. 1,
  13373. // Layer2
  13374. 4 // Layer1
  13375. ];
  13376. function appendFrame(track, data, offset, pts, frameIndex) {
  13377. // Using http://www.datavoyage.com/mpgscript/mpeghdr.htm as a reference
  13378. if (offset + 24 > data.length) {
  13379. return;
  13380. }
  13381. const header = parseHeader(data, offset);
  13382. if (header && offset + header.frameLength <= data.length) {
  13383. const frameDuration = header.samplesPerFrame * 90000 / header.sampleRate;
  13384. const stamp = pts + frameIndex * frameDuration;
  13385. const sample = {
  13386. unit: data.subarray(offset, offset + header.frameLength),
  13387. pts: stamp,
  13388. dts: stamp
  13389. };
  13390. track.config = [];
  13391. track.channelCount = header.channelCount;
  13392. track.samplerate = header.sampleRate;
  13393. track.samples.push(sample);
  13394. return {
  13395. sample,
  13396. length: header.frameLength,
  13397. missing: 0
  13398. };
  13399. }
  13400. }
  13401. function parseHeader(data, offset) {
  13402. const mpegVersion = data[offset + 1] >> 3 & 3;
  13403. const mpegLayer = data[offset + 1] >> 1 & 3;
  13404. const bitRateIndex = data[offset + 2] >> 4 & 15;
  13405. const sampleRateIndex = data[offset + 2] >> 2 & 3;
  13406. if (mpegVersion !== 1 && bitRateIndex !== 0 && bitRateIndex !== 15 && sampleRateIndex !== 3) {
  13407. const paddingBit = data[offset + 2] >> 1 & 1;
  13408. const channelMode = data[offset + 3] >> 6;
  13409. const columnInBitrates = mpegVersion === 3 ? 3 - mpegLayer : mpegLayer === 3 ? 3 : 4;
  13410. const bitRate = BitratesMap[columnInBitrates * 14 + bitRateIndex - 1] * 1000;
  13411. const columnInSampleRates = mpegVersion === 3 ? 0 : mpegVersion === 2 ? 1 : 2;
  13412. const sampleRate = SamplingRateMap[columnInSampleRates * 3 + sampleRateIndex];
  13413. const channelCount = channelMode === 3 ? 1 : 2; // If bits of channel mode are `11` then it is a single channel (Mono)
  13414. const sampleCoefficient = SamplesCoefficients[mpegVersion][mpegLayer];
  13415. const bytesInSlot = BytesInSlot[mpegLayer];
  13416. const samplesPerFrame = sampleCoefficient * 8 * bytesInSlot;
  13417. const frameLength = Math.floor(sampleCoefficient * bitRate / sampleRate + paddingBit) * bytesInSlot;
  13418. if (chromeVersion$1 === null) {
  13419. const userAgent = navigator.userAgent || '';
  13420. const result = userAgent.match(/Chrome\/(\d+)/i);
  13421. chromeVersion$1 = result ? parseInt(result[1]) : 0;
  13422. }
  13423. const needChromeFix = !!chromeVersion$1 && chromeVersion$1 <= 87;
  13424. if (needChromeFix && mpegLayer === 2 && bitRate >= 224000 && channelMode === 0) {
  13425. // Work around bug in Chromium by setting channelMode to dual-channel (01) instead of stereo (00)
  13426. data[offset + 3] = data[offset + 3] | 0x80;
  13427. }
  13428. return {
  13429. sampleRate,
  13430. channelCount,
  13431. frameLength,
  13432. samplesPerFrame
  13433. };
  13434. }
  13435. }
  13436. function isHeaderPattern(data, offset) {
  13437. return data[offset] === 0xff && (data[offset + 1] & 0xe0) === 0xe0 && (data[offset + 1] & 0x06) !== 0x00;
  13438. }
  13439. function isHeader(data, offset) {
  13440. // Look for MPEG header | 1111 1111 | 111X XYZX | where X can be either 0 or 1 and Y or Z should be 1
  13441. // Layer bits (position 14 and 15) in header should be always different from 0 (Layer I or Layer II or Layer III)
  13442. // More info http://www.mp3-tech.org/programmer/frame_header.html
  13443. return offset + 1 < data.length && isHeaderPattern(data, offset);
  13444. }
  13445. function canParse(data, offset) {
  13446. const headerSize = 4;
  13447. return isHeaderPattern(data, offset) && headerSize <= data.length - offset;
  13448. }
  13449. function probe(data, offset) {
  13450. // same as isHeader but we also check that MPEG frame follows last MPEG frame
  13451. // or end of data is reached
  13452. if (offset + 1 < data.length && isHeaderPattern(data, offset)) {
  13453. // MPEG header Length
  13454. const headerLength = 4;
  13455. // MPEG frame Length
  13456. const header = parseHeader(data, offset);
  13457. let frameLength = headerLength;
  13458. if (header != null && header.frameLength) {
  13459. frameLength = header.frameLength;
  13460. }
  13461. const newOffset = offset + frameLength;
  13462. return newOffset === data.length || isHeader(data, newOffset);
  13463. }
  13464. return false;
  13465. }
  13466. /**
  13467. * AAC demuxer
  13468. */
  13469. class AACDemuxer extends BaseAudioDemuxer {
  13470. constructor(observer, config) {
  13471. super();
  13472. this.observer = void 0;
  13473. this.config = void 0;
  13474. this.observer = observer;
  13475. this.config = config;
  13476. }
  13477. resetInitSegment(initSegment, audioCodec, videoCodec, trackDuration) {
  13478. super.resetInitSegment(initSegment, audioCodec, videoCodec, trackDuration);
  13479. this._audioTrack = {
  13480. container: 'audio/adts',
  13481. type: 'audio',
  13482. id: 2,
  13483. pid: -1,
  13484. sequenceNumber: 0,
  13485. segmentCodec: 'aac',
  13486. samples: [],
  13487. manifestCodec: audioCodec,
  13488. duration: trackDuration,
  13489. inputTimeScale: 90000,
  13490. dropped: 0
  13491. };
  13492. }
  13493. // Source for probe info - https://wiki.multimedia.cx/index.php?title=ADTS
  13494. static probe(data) {
  13495. if (!data) {
  13496. return false;
  13497. }
  13498. // Check for the ADTS sync word
  13499. // Look for ADTS header | 1111 1111 | 1111 X00X | where X can be either 0 or 1
  13500. // Layer bits (position 14 and 15) in header should be always 0 for ADTS
  13501. // More info https://wiki.multimedia.cx/index.php?title=ADTS
  13502. const id3Data = getID3Data(data, 0);
  13503. let offset = (id3Data == null ? void 0 : id3Data.length) || 0;
  13504. if (probe(data, offset)) {
  13505. return false;
  13506. }
  13507. for (let length = data.length; offset < length; offset++) {
  13508. if (probe$1(data, offset)) {
  13509. logger.log('ADTS sync word found !');
  13510. return true;
  13511. }
  13512. }
  13513. return false;
  13514. }
  13515. canParse(data, offset) {
  13516. return canParse$1(data, offset);
  13517. }
  13518. appendFrame(track, data, offset) {
  13519. initTrackConfig(track, this.observer, data, offset, track.manifestCodec);
  13520. const frame = appendFrame$1(track, data, offset, this.basePTS, this.frameIndex);
  13521. if (frame && frame.missing === 0) {
  13522. return frame;
  13523. }
  13524. }
  13525. }
  13526. const emsgSchemePattern = /\/emsg[-/]ID3/i;
  13527. class MP4Demuxer {
  13528. constructor(observer, config) {
  13529. this.remainderData = null;
  13530. this.timeOffset = 0;
  13531. this.config = void 0;
  13532. this.videoTrack = void 0;
  13533. this.audioTrack = void 0;
  13534. this.id3Track = void 0;
  13535. this.txtTrack = void 0;
  13536. this.config = config;
  13537. }
  13538. resetTimeStamp() {}
  13539. resetInitSegment(initSegment, audioCodec, videoCodec, trackDuration) {
  13540. const videoTrack = this.videoTrack = dummyTrack('video', 1);
  13541. const audioTrack = this.audioTrack = dummyTrack('audio', 1);
  13542. const captionTrack = this.txtTrack = dummyTrack('text', 1);
  13543. this.id3Track = dummyTrack('id3', 1);
  13544. this.timeOffset = 0;
  13545. if (!(initSegment != null && initSegment.byteLength)) {
  13546. return;
  13547. }
  13548. const initData = parseInitSegment(initSegment);
  13549. if (initData.video) {
  13550. const {
  13551. id,
  13552. timescale,
  13553. codec
  13554. } = initData.video;
  13555. videoTrack.id = id;
  13556. videoTrack.timescale = captionTrack.timescale = timescale;
  13557. videoTrack.codec = codec;
  13558. }
  13559. if (initData.audio) {
  13560. const {
  13561. id,
  13562. timescale,
  13563. codec
  13564. } = initData.audio;
  13565. audioTrack.id = id;
  13566. audioTrack.timescale = timescale;
  13567. audioTrack.codec = codec;
  13568. }
  13569. captionTrack.id = RemuxerTrackIdConfig.text;
  13570. videoTrack.sampleDuration = 0;
  13571. videoTrack.duration = audioTrack.duration = trackDuration;
  13572. }
  13573. resetContiguity() {
  13574. this.remainderData = null;
  13575. }
  13576. static probe(data) {
  13577. return hasMoofData(data);
  13578. }
  13579. demux(data, timeOffset) {
  13580. this.timeOffset = timeOffset;
  13581. // Load all data into the avc track. The CMAF remuxer will look for the data in the samples object; the rest of the fields do not matter
  13582. let videoSamples = data;
  13583. const videoTrack = this.videoTrack;
  13584. const textTrack = this.txtTrack;
  13585. if (this.config.progressive) {
  13586. // Split the bytestream into two ranges: one encompassing all data up until the start of the last moof, and everything else.
  13587. // This is done to guarantee that we're sending valid data to MSE - when demuxing progressively, we have no guarantee
  13588. // that the fetch loader gives us flush moof+mdat pairs. If we push jagged data to MSE, it will throw an exception.
  13589. if (this.remainderData) {
  13590. videoSamples = appendUint8Array(this.remainderData, data);
  13591. }
  13592. const segmentedData = segmentValidRange(videoSamples);
  13593. this.remainderData = segmentedData.remainder;
  13594. videoTrack.samples = segmentedData.valid || new Uint8Array();
  13595. } else {
  13596. videoTrack.samples = videoSamples;
  13597. }
  13598. const id3Track = this.extractID3Track(videoTrack, timeOffset);
  13599. textTrack.samples = parseSamples(timeOffset, videoTrack);
  13600. return {
  13601. videoTrack,
  13602. audioTrack: this.audioTrack,
  13603. id3Track,
  13604. textTrack: this.txtTrack
  13605. };
  13606. }
  13607. flush() {
  13608. const timeOffset = this.timeOffset;
  13609. const videoTrack = this.videoTrack;
  13610. const textTrack = this.txtTrack;
  13611. videoTrack.samples = this.remainderData || new Uint8Array();
  13612. this.remainderData = null;
  13613. const id3Track = this.extractID3Track(videoTrack, this.timeOffset);
  13614. textTrack.samples = parseSamples(timeOffset, videoTrack);
  13615. return {
  13616. videoTrack,
  13617. audioTrack: dummyTrack(),
  13618. id3Track,
  13619. textTrack: dummyTrack()
  13620. };
  13621. }
  13622. extractID3Track(videoTrack, timeOffset) {
  13623. const id3Track = this.id3Track;
  13624. if (videoTrack.samples.length) {
  13625. const emsgs = findBox(videoTrack.samples, ['emsg']);
  13626. if (emsgs) {
  13627. emsgs.forEach(data => {
  13628. const emsgInfo = parseEmsg(data);
  13629. if (emsgSchemePattern.test(emsgInfo.schemeIdUri)) {
  13630. const pts = isFiniteNumber(emsgInfo.presentationTime) ? emsgInfo.presentationTime / emsgInfo.timeScale : timeOffset + emsgInfo.presentationTimeDelta / emsgInfo.timeScale;
  13631. let duration = emsgInfo.eventDuration === 0xffffffff ? Number.POSITIVE_INFINITY : emsgInfo.eventDuration / emsgInfo.timeScale;
  13632. // Safari takes anything <= 0.001 seconds and maps it to Infinity
  13633. if (duration <= 0.001) {
  13634. duration = Number.POSITIVE_INFINITY;
  13635. }
  13636. const payload = emsgInfo.payload;
  13637. id3Track.samples.push({
  13638. data: payload,
  13639. len: payload.byteLength,
  13640. dts: pts,
  13641. pts: pts,
  13642. type: MetadataSchema.emsg,
  13643. duration: duration
  13644. });
  13645. }
  13646. });
  13647. }
  13648. }
  13649. return id3Track;
  13650. }
  13651. demuxSampleAes(data, keyData, timeOffset) {
  13652. return Promise.reject(new Error('The MP4 demuxer does not support SAMPLE-AES decryption'));
  13653. }
  13654. destroy() {}
  13655. }
  13656. const getAudioBSID = (data, offset) => {
  13657. // check the bsid to confirm ac-3 | ec-3
  13658. let bsid = 0;
  13659. let numBits = 5;
  13660. offset += numBits;
  13661. const temp = new Uint32Array(1); // unsigned 32 bit for temporary storage
  13662. const mask = new Uint32Array(1); // unsigned 32 bit mask value
  13663. const byte = new Uint8Array(1); // unsigned 8 bit for temporary storage
  13664. while (numBits > 0) {
  13665. byte[0] = data[offset];
  13666. // read remaining bits, upto 8 bits at a time
  13667. const bits = Math.min(numBits, 8);
  13668. const shift = 8 - bits;
  13669. mask[0] = 0xff000000 >>> 24 + shift << shift;
  13670. temp[0] = (byte[0] & mask[0]) >> shift;
  13671. bsid = !bsid ? temp[0] : bsid << bits | temp[0];
  13672. offset += 1;
  13673. numBits -= bits;
  13674. }
  13675. return bsid;
  13676. };
  13677. class BaseVideoParser {
  13678. constructor() {
  13679. this.VideoSample = null;
  13680. }
  13681. createVideoSample(key, pts, dts, debug) {
  13682. return {
  13683. key,
  13684. frame: false,
  13685. pts,
  13686. dts,
  13687. units: [],
  13688. debug,
  13689. length: 0
  13690. };
  13691. }
  13692. getLastNalUnit(samples) {
  13693. var _VideoSample;
  13694. let VideoSample = this.VideoSample;
  13695. let lastUnit;
  13696. // try to fallback to previous sample if current one is empty
  13697. if (!VideoSample || VideoSample.units.length === 0) {
  13698. VideoSample = samples[samples.length - 1];
  13699. }
  13700. if ((_VideoSample = VideoSample) != null && _VideoSample.units) {
  13701. const units = VideoSample.units;
  13702. lastUnit = units[units.length - 1];
  13703. }
  13704. return lastUnit;
  13705. }
  13706. pushAccessUnit(VideoSample, videoTrack) {
  13707. if (VideoSample.units.length && VideoSample.frame) {
  13708. // if sample does not have PTS/DTS, patch with last sample PTS/DTS
  13709. if (VideoSample.pts === undefined) {
  13710. const samples = videoTrack.samples;
  13711. const nbSamples = samples.length;
  13712. if (nbSamples) {
  13713. const lastSample = samples[nbSamples - 1];
  13714. VideoSample.pts = lastSample.pts;
  13715. VideoSample.dts = lastSample.dts;
  13716. } else {
  13717. // dropping samples, no timestamp found
  13718. videoTrack.dropped++;
  13719. return;
  13720. }
  13721. }
  13722. videoTrack.samples.push(VideoSample);
  13723. }
  13724. if (VideoSample.debug.length) {
  13725. logger.log(VideoSample.pts + '/' + VideoSample.dts + ':' + VideoSample.debug);
  13726. }
  13727. }
  13728. }
  13729. /**
  13730. * Parser for exponential Golomb codes, a variable-bitwidth number encoding scheme used by h264.
  13731. */
  13732. class ExpGolomb {
  13733. constructor(data) {
  13734. this.data = void 0;
  13735. this.bytesAvailable = void 0;
  13736. this.word = void 0;
  13737. this.bitsAvailable = void 0;
  13738. this.data = data;
  13739. // the number of bytes left to examine in this.data
  13740. this.bytesAvailable = data.byteLength;
  13741. // the current word being examined
  13742. this.word = 0; // :uint
  13743. // the number of bits left to examine in the current word
  13744. this.bitsAvailable = 0; // :uint
  13745. }
  13746. // ():void
  13747. loadWord() {
  13748. const data = this.data;
  13749. const bytesAvailable = this.bytesAvailable;
  13750. const position = data.byteLength - bytesAvailable;
  13751. const workingBytes = new Uint8Array(4);
  13752. const availableBytes = Math.min(4, bytesAvailable);
  13753. if (availableBytes === 0) {
  13754. throw new Error('no bytes available');
  13755. }
  13756. workingBytes.set(data.subarray(position, position + availableBytes));
  13757. this.word = new DataView(workingBytes.buffer).getUint32(0);
  13758. // track the amount of this.data that has been processed
  13759. this.bitsAvailable = availableBytes * 8;
  13760. this.bytesAvailable -= availableBytes;
  13761. }
  13762. // (count:int):void
  13763. skipBits(count) {
  13764. let skipBytes; // :int
  13765. count = Math.min(count, this.bytesAvailable * 8 + this.bitsAvailable);
  13766. if (this.bitsAvailable > count) {
  13767. this.word <<= count;
  13768. this.bitsAvailable -= count;
  13769. } else {
  13770. count -= this.bitsAvailable;
  13771. skipBytes = count >> 3;
  13772. count -= skipBytes << 3;
  13773. this.bytesAvailable -= skipBytes;
  13774. this.loadWord();
  13775. this.word <<= count;
  13776. this.bitsAvailable -= count;
  13777. }
  13778. }
  13779. // (size:int):uint
  13780. readBits(size) {
  13781. let bits = Math.min(this.bitsAvailable, size); // :uint
  13782. const valu = this.word >>> 32 - bits; // :uint
  13783. if (size > 32) {
  13784. logger.error('Cannot read more than 32 bits at a time');
  13785. }
  13786. this.bitsAvailable -= bits;
  13787. if (this.bitsAvailable > 0) {
  13788. this.word <<= bits;
  13789. } else if (this.bytesAvailable > 0) {
  13790. this.loadWord();
  13791. } else {
  13792. throw new Error('no bits available');
  13793. }
  13794. bits = size - bits;
  13795. if (bits > 0 && this.bitsAvailable) {
  13796. return valu << bits | this.readBits(bits);
  13797. } else {
  13798. return valu;
  13799. }
  13800. }
  13801. // ():uint
  13802. skipLZ() {
  13803. let leadingZeroCount; // :uint
  13804. for (leadingZeroCount = 0; leadingZeroCount < this.bitsAvailable; ++leadingZeroCount) {
  13805. if ((this.word & 0x80000000 >>> leadingZeroCount) !== 0) {
  13806. // the first bit of working word is 1
  13807. this.word <<= leadingZeroCount;
  13808. this.bitsAvailable -= leadingZeroCount;
  13809. return leadingZeroCount;
  13810. }
  13811. }
  13812. // we exhausted word and still have not found a 1
  13813. this.loadWord();
  13814. return leadingZeroCount + this.skipLZ();
  13815. }
  13816. // ():void
  13817. skipUEG() {
  13818. this.skipBits(1 + this.skipLZ());
  13819. }
  13820. // ():void
  13821. skipEG() {
  13822. this.skipBits(1 + this.skipLZ());
  13823. }
  13824. // ():uint
  13825. readUEG() {
  13826. const clz = this.skipLZ(); // :uint
  13827. return this.readBits(clz + 1) - 1;
  13828. }
  13829. // ():int
  13830. readEG() {
  13831. const valu = this.readUEG(); // :int
  13832. if (0x01 & valu) {
  13833. // the number is odd if the low order bit is set
  13834. return 1 + valu >>> 1; // add 1 to make it even, and divide by 2
  13835. } else {
  13836. return -1 * (valu >>> 1); // divide by two then make it negative
  13837. }
  13838. }
  13839. // Some convenience functions
  13840. // :Boolean
  13841. readBoolean() {
  13842. return this.readBits(1) === 1;
  13843. }
  13844. // ():int
  13845. readUByte() {
  13846. return this.readBits(8);
  13847. }
  13848. // ():int
  13849. readUShort() {
  13850. return this.readBits(16);
  13851. }
  13852. // ():int
  13853. readUInt() {
  13854. return this.readBits(32);
  13855. }
  13856. /**
  13857. * Advance the ExpGolomb decoder past a scaling list. The scaling
  13858. * list is optionally transmitted as part of a sequence parameter
  13859. * set and is not relevant to transmuxing.
  13860. * @param count the number of entries in this scaling list
  13861. * @see Recommendation ITU-T H.264, Section 7.3.2.1.1.1
  13862. */
  13863. skipScalingList(count) {
  13864. let lastScale = 8;
  13865. let nextScale = 8;
  13866. let deltaScale;
  13867. for (let j = 0; j < count; j++) {
  13868. if (nextScale !== 0) {
  13869. deltaScale = this.readEG();
  13870. nextScale = (lastScale + deltaScale + 256) % 256;
  13871. }
  13872. lastScale = nextScale === 0 ? lastScale : nextScale;
  13873. }
  13874. }
  13875. /**
  13876. * Read a sequence parameter set and return some interesting video
  13877. * properties. A sequence parameter set is the H264 metadata that
  13878. * describes the properties of upcoming video frames.
  13879. * @returns an object with configuration parsed from the
  13880. * sequence parameter set, including the dimensions of the
  13881. * associated video frames.
  13882. */
  13883. readSPS() {
  13884. let frameCropLeftOffset = 0;
  13885. let frameCropRightOffset = 0;
  13886. let frameCropTopOffset = 0;
  13887. let frameCropBottomOffset = 0;
  13888. let numRefFramesInPicOrderCntCycle;
  13889. let scalingListCount;
  13890. let i;
  13891. const readUByte = this.readUByte.bind(this);
  13892. const readBits = this.readBits.bind(this);
  13893. const readUEG = this.readUEG.bind(this);
  13894. const readBoolean = this.readBoolean.bind(this);
  13895. const skipBits = this.skipBits.bind(this);
  13896. const skipEG = this.skipEG.bind(this);
  13897. const skipUEG = this.skipUEG.bind(this);
  13898. const skipScalingList = this.skipScalingList.bind(this);
  13899. readUByte();
  13900. const profileIdc = readUByte(); // profile_idc
  13901. readBits(5); // profileCompat constraint_set[0-4]_flag, u(5)
  13902. skipBits(3); // reserved_zero_3bits u(3),
  13903. readUByte(); // level_idc u(8)
  13904. skipUEG(); // seq_parameter_set_id
  13905. // some profiles have more optional data we don't need
  13906. if (profileIdc === 100 || profileIdc === 110 || profileIdc === 122 || profileIdc === 244 || profileIdc === 44 || profileIdc === 83 || profileIdc === 86 || profileIdc === 118 || profileIdc === 128) {
  13907. const chromaFormatIdc = readUEG();
  13908. if (chromaFormatIdc === 3) {
  13909. skipBits(1);
  13910. } // separate_colour_plane_flag
  13911. skipUEG(); // bit_depth_luma_minus8
  13912. skipUEG(); // bit_depth_chroma_minus8
  13913. skipBits(1); // qpprime_y_zero_transform_bypass_flag
  13914. if (readBoolean()) {
  13915. // seq_scaling_matrix_present_flag
  13916. scalingListCount = chromaFormatIdc !== 3 ? 8 : 12;
  13917. for (i = 0; i < scalingListCount; i++) {
  13918. if (readBoolean()) {
  13919. // seq_scaling_list_present_flag[ i ]
  13920. if (i < 6) {
  13921. skipScalingList(16);
  13922. } else {
  13923. skipScalingList(64);
  13924. }
  13925. }
  13926. }
  13927. }
  13928. }
  13929. skipUEG(); // log2_max_frame_num_minus4
  13930. const picOrderCntType = readUEG();
  13931. if (picOrderCntType === 0) {
  13932. readUEG(); // log2_max_pic_order_cnt_lsb_minus4
  13933. } else if (picOrderCntType === 1) {
  13934. skipBits(1); // delta_pic_order_always_zero_flag
  13935. skipEG(); // offset_for_non_ref_pic
  13936. skipEG(); // offset_for_top_to_bottom_field
  13937. numRefFramesInPicOrderCntCycle = readUEG();
  13938. for (i = 0; i < numRefFramesInPicOrderCntCycle; i++) {
  13939. skipEG();
  13940. } // offset_for_ref_frame[ i ]
  13941. }
  13942. skipUEG(); // max_num_ref_frames
  13943. skipBits(1); // gaps_in_frame_num_value_allowed_flag
  13944. const picWidthInMbsMinus1 = readUEG();
  13945. const picHeightInMapUnitsMinus1 = readUEG();
  13946. const frameMbsOnlyFlag = readBits(1);
  13947. if (frameMbsOnlyFlag === 0) {
  13948. skipBits(1);
  13949. } // mb_adaptive_frame_field_flag
  13950. skipBits(1); // direct_8x8_inference_flag
  13951. if (readBoolean()) {
  13952. // frame_cropping_flag
  13953. frameCropLeftOffset = readUEG();
  13954. frameCropRightOffset = readUEG();
  13955. frameCropTopOffset = readUEG();
  13956. frameCropBottomOffset = readUEG();
  13957. }
  13958. let pixelRatio = [1, 1];
  13959. if (readBoolean()) {
  13960. // vui_parameters_present_flag
  13961. if (readBoolean()) {
  13962. // aspect_ratio_info_present_flag
  13963. const aspectRatioIdc = readUByte();
  13964. switch (aspectRatioIdc) {
  13965. case 1:
  13966. pixelRatio = [1, 1];
  13967. break;
  13968. case 2:
  13969. pixelRatio = [12, 11];
  13970. break;
  13971. case 3:
  13972. pixelRatio = [10, 11];
  13973. break;
  13974. case 4:
  13975. pixelRatio = [16, 11];
  13976. break;
  13977. case 5:
  13978. pixelRatio = [40, 33];
  13979. break;
  13980. case 6:
  13981. pixelRatio = [24, 11];
  13982. break;
  13983. case 7:
  13984. pixelRatio = [20, 11];
  13985. break;
  13986. case 8:
  13987. pixelRatio = [32, 11];
  13988. break;
  13989. case 9:
  13990. pixelRatio = [80, 33];
  13991. break;
  13992. case 10:
  13993. pixelRatio = [18, 11];
  13994. break;
  13995. case 11:
  13996. pixelRatio = [15, 11];
  13997. break;
  13998. case 12:
  13999. pixelRatio = [64, 33];
  14000. break;
  14001. case 13:
  14002. pixelRatio = [160, 99];
  14003. break;
  14004. case 14:
  14005. pixelRatio = [4, 3];
  14006. break;
  14007. case 15:
  14008. pixelRatio = [3, 2];
  14009. break;
  14010. case 16:
  14011. pixelRatio = [2, 1];
  14012. break;
  14013. case 255:
  14014. {
  14015. pixelRatio = [readUByte() << 8 | readUByte(), readUByte() << 8 | readUByte()];
  14016. break;
  14017. }
  14018. }
  14019. }
  14020. }
  14021. return {
  14022. width: Math.ceil((picWidthInMbsMinus1 + 1) * 16 - frameCropLeftOffset * 2 - frameCropRightOffset * 2),
  14023. height: (2 - frameMbsOnlyFlag) * (picHeightInMapUnitsMinus1 + 1) * 16 - (frameMbsOnlyFlag ? 2 : 4) * (frameCropTopOffset + frameCropBottomOffset),
  14024. pixelRatio: pixelRatio
  14025. };
  14026. }
  14027. readSliceType() {
  14028. // skip NALu type
  14029. this.readUByte();
  14030. // discard first_mb_in_slice
  14031. this.readUEG();
  14032. // return slice_type
  14033. return this.readUEG();
  14034. }
  14035. }
  14036. class AvcVideoParser extends BaseVideoParser {
  14037. parseAVCPES(track, textTrack, pes, last, duration) {
  14038. const units = this.parseAVCNALu(track, pes.data);
  14039. let VideoSample = this.VideoSample;
  14040. let push;
  14041. let spsfound = false;
  14042. // free pes.data to save up some memory
  14043. pes.data = null;
  14044. // if new NAL units found and last sample still there, let's push ...
  14045. // this helps parsing streams with missing AUD (only do this if AUD never found)
  14046. if (VideoSample && units.length && !track.audFound) {
  14047. this.pushAccessUnit(VideoSample, track);
  14048. VideoSample = this.VideoSample = this.createVideoSample(false, pes.pts, pes.dts, '');
  14049. }
  14050. units.forEach(unit => {
  14051. var _VideoSample2;
  14052. switch (unit.type) {
  14053. // NDR
  14054. case 1:
  14055. {
  14056. let iskey = false;
  14057. push = true;
  14058. const data = unit.data;
  14059. // only check slice type to detect KF in case SPS found in same packet (any keyframe is preceded by SPS ...)
  14060. if (spsfound && data.length > 4) {
  14061. // retrieve slice type by parsing beginning of NAL unit (follow H264 spec, slice_header definition) to detect keyframe embedded in NDR
  14062. const sliceType = new ExpGolomb(data).readSliceType();
  14063. // 2 : I slice, 4 : SI slice, 7 : I slice, 9: SI slice
  14064. // SI slice : A slice that is coded using intra prediction only and using quantisation of the prediction samples.
  14065. // An SI slice can be coded such that its decoded samples can be constructed identically to an SP slice.
  14066. // I slice: A slice that is not an SI slice that is decoded using intra prediction only.
  14067. // if (sliceType === 2 || sliceType === 7) {
  14068. if (sliceType === 2 || sliceType === 4 || sliceType === 7 || sliceType === 9) {
  14069. iskey = true;
  14070. }
  14071. }
  14072. if (iskey) {
  14073. var _VideoSample;
  14074. // if we have non-keyframe data already, that cannot belong to the same frame as a keyframe, so force a push
  14075. if ((_VideoSample = VideoSample) != null && _VideoSample.frame && !VideoSample.key) {
  14076. this.pushAccessUnit(VideoSample, track);
  14077. VideoSample = this.VideoSample = null;
  14078. }
  14079. }
  14080. if (!VideoSample) {
  14081. VideoSample = this.VideoSample = this.createVideoSample(true, pes.pts, pes.dts, '');
  14082. }
  14083. VideoSample.frame = true;
  14084. VideoSample.key = iskey;
  14085. break;
  14086. // IDR
  14087. }
  14088. case 5:
  14089. push = true;
  14090. // handle PES not starting with AUD
  14091. // if we have frame data already, that cannot belong to the same frame, so force a push
  14092. if ((_VideoSample2 = VideoSample) != null && _VideoSample2.frame && !VideoSample.key) {
  14093. this.pushAccessUnit(VideoSample, track);
  14094. VideoSample = this.VideoSample = null;
  14095. }
  14096. if (!VideoSample) {
  14097. VideoSample = this.VideoSample = this.createVideoSample(true, pes.pts, pes.dts, '');
  14098. }
  14099. VideoSample.key = true;
  14100. VideoSample.frame = true;
  14101. break;
  14102. // SEI
  14103. case 6:
  14104. {
  14105. push = true;
  14106. parseSEIMessageFromNALu(unit.data, 1, pes.pts, textTrack.samples);
  14107. break;
  14108. // SPS
  14109. }
  14110. case 7:
  14111. {
  14112. var _track$pixelRatio, _track$pixelRatio2;
  14113. push = true;
  14114. spsfound = true;
  14115. const sps = unit.data;
  14116. const expGolombDecoder = new ExpGolomb(sps);
  14117. const config = expGolombDecoder.readSPS();
  14118. if (!track.sps || track.width !== config.width || track.height !== config.height || ((_track$pixelRatio = track.pixelRatio) == null ? void 0 : _track$pixelRatio[0]) !== config.pixelRatio[0] || ((_track$pixelRatio2 = track.pixelRatio) == null ? void 0 : _track$pixelRatio2[1]) !== config.pixelRatio[1]) {
  14119. track.width = config.width;
  14120. track.height = config.height;
  14121. track.pixelRatio = config.pixelRatio;
  14122. track.sps = [sps];
  14123. track.duration = duration;
  14124. const codecarray = sps.subarray(1, 4);
  14125. let codecstring = 'avc1.';
  14126. for (let i = 0; i < 3; i++) {
  14127. let h = codecarray[i].toString(16);
  14128. if (h.length < 2) {
  14129. h = '0' + h;
  14130. }
  14131. codecstring += h;
  14132. }
  14133. track.codec = codecstring;
  14134. }
  14135. break;
  14136. }
  14137. // PPS
  14138. case 8:
  14139. push = true;
  14140. track.pps = [unit.data];
  14141. break;
  14142. // AUD
  14143. case 9:
  14144. push = true;
  14145. track.audFound = true;
  14146. if (VideoSample) {
  14147. this.pushAccessUnit(VideoSample, track);
  14148. }
  14149. VideoSample = this.VideoSample = this.createVideoSample(false, pes.pts, pes.dts, '');
  14150. break;
  14151. // Filler Data
  14152. case 12:
  14153. push = true;
  14154. break;
  14155. default:
  14156. push = false;
  14157. if (VideoSample) {
  14158. VideoSample.debug += 'unknown NAL ' + unit.type + ' ';
  14159. }
  14160. break;
  14161. }
  14162. if (VideoSample && push) {
  14163. const units = VideoSample.units;
  14164. units.push(unit);
  14165. }
  14166. });
  14167. // if last PES packet, push samples
  14168. if (last && VideoSample) {
  14169. this.pushAccessUnit(VideoSample, track);
  14170. this.VideoSample = null;
  14171. }
  14172. }
  14173. parseAVCNALu(track, array) {
  14174. const len = array.byteLength;
  14175. let state = track.naluState || 0;
  14176. const lastState = state;
  14177. const units = [];
  14178. let i = 0;
  14179. let value;
  14180. let overflow;
  14181. let unitType;
  14182. let lastUnitStart = -1;
  14183. let lastUnitType = 0;
  14184. // logger.log('PES:' + Hex.hexDump(array));
  14185. if (state === -1) {
  14186. // special use case where we found 3 or 4-byte start codes exactly at the end of previous PES packet
  14187. lastUnitStart = 0;
  14188. // NALu type is value read from offset 0
  14189. lastUnitType = array[0] & 0x1f;
  14190. state = 0;
  14191. i = 1;
  14192. }
  14193. while (i < len) {
  14194. value = array[i++];
  14195. // optimization. state 0 and 1 are the predominant case. let's handle them outside of the switch/case
  14196. if (!state) {
  14197. state = value ? 0 : 1;
  14198. continue;
  14199. }
  14200. if (state === 1) {
  14201. state = value ? 0 : 2;
  14202. continue;
  14203. }
  14204. // here we have state either equal to 2 or 3
  14205. if (!value) {
  14206. state = 3;
  14207. } else if (value === 1) {
  14208. overflow = i - state - 1;
  14209. if (lastUnitStart >= 0) {
  14210. const unit = {
  14211. data: array.subarray(lastUnitStart, overflow),
  14212. type: lastUnitType
  14213. };
  14214. // logger.log('pushing NALU, type/size:' + unit.type + '/' + unit.data.byteLength);
  14215. units.push(unit);
  14216. } else {
  14217. // lastUnitStart is undefined => this is the first start code found in this PES packet
  14218. // first check if start code delimiter is overlapping between 2 PES packets,
  14219. // ie it started in last packet (lastState not zero)
  14220. // and ended at the beginning of this PES packet (i <= 4 - lastState)
  14221. const lastUnit = this.getLastNalUnit(track.samples);
  14222. if (lastUnit) {
  14223. if (lastState && i <= 4 - lastState) {
  14224. // start delimiter overlapping between PES packets
  14225. // strip start delimiter bytes from the end of last NAL unit
  14226. // check if lastUnit had a state different from zero
  14227. if (lastUnit.state) {
  14228. // strip last bytes
  14229. lastUnit.data = lastUnit.data.subarray(0, lastUnit.data.byteLength - lastState);
  14230. }
  14231. }
  14232. // If NAL units are not starting right at the beginning of the PES packet, push preceding data into previous NAL unit.
  14233. if (overflow > 0) {
  14234. // logger.log('first NALU found with overflow:' + overflow);
  14235. lastUnit.data = appendUint8Array(lastUnit.data, array.subarray(0, overflow));
  14236. lastUnit.state = 0;
  14237. }
  14238. }
  14239. }
  14240. // check if we can read unit type
  14241. if (i < len) {
  14242. unitType = array[i] & 0x1f;
  14243. // logger.log('find NALU @ offset:' + i + ',type:' + unitType);
  14244. lastUnitStart = i;
  14245. lastUnitType = unitType;
  14246. state = 0;
  14247. } else {
  14248. // not enough byte to read unit type. let's read it on next PES parsing
  14249. state = -1;
  14250. }
  14251. } else {
  14252. state = 0;
  14253. }
  14254. }
  14255. if (lastUnitStart >= 0 && state >= 0) {
  14256. const unit = {
  14257. data: array.subarray(lastUnitStart, len),
  14258. type: lastUnitType,
  14259. state: state
  14260. };
  14261. units.push(unit);
  14262. // logger.log('pushing NALU, type/size/state:' + unit.type + '/' + unit.data.byteLength + '/' + state);
  14263. }
  14264. // no NALu found
  14265. if (units.length === 0) {
  14266. // append pes.data to previous NAL unit
  14267. const lastUnit = this.getLastNalUnit(track.samples);
  14268. if (lastUnit) {
  14269. lastUnit.data = appendUint8Array(lastUnit.data, array);
  14270. }
  14271. }
  14272. track.naluState = state;
  14273. return units;
  14274. }
  14275. }
  14276. /**
  14277. * SAMPLE-AES decrypter
  14278. */
  14279. class SampleAesDecrypter {
  14280. constructor(observer, config, keyData) {
  14281. this.keyData = void 0;
  14282. this.decrypter = void 0;
  14283. this.keyData = keyData;
  14284. this.decrypter = new Decrypter(config, {
  14285. removePKCS7Padding: false
  14286. });
  14287. }
  14288. decryptBuffer(encryptedData) {
  14289. return this.decrypter.decrypt(encryptedData, this.keyData.key.buffer, this.keyData.iv.buffer);
  14290. }
  14291. // AAC - encrypt all full 16 bytes blocks starting from offset 16
  14292. decryptAacSample(samples, sampleIndex, callback) {
  14293. const curUnit = samples[sampleIndex].unit;
  14294. if (curUnit.length <= 16) {
  14295. // No encrypted portion in this sample (first 16 bytes is not
  14296. // encrypted, see https://developer.apple.com/library/archive/documentation/AudioVideo/Conceptual/HLS_Sample_Encryption/Encryption/Encryption.html),
  14297. return;
  14298. }
  14299. const encryptedData = curUnit.subarray(16, curUnit.length - curUnit.length % 16);
  14300. const encryptedBuffer = encryptedData.buffer.slice(encryptedData.byteOffset, encryptedData.byteOffset + encryptedData.length);
  14301. this.decryptBuffer(encryptedBuffer).then(decryptedBuffer => {
  14302. const decryptedData = new Uint8Array(decryptedBuffer);
  14303. curUnit.set(decryptedData, 16);
  14304. if (!this.decrypter.isSync()) {
  14305. this.decryptAacSamples(samples, sampleIndex + 1, callback);
  14306. }
  14307. });
  14308. }
  14309. decryptAacSamples(samples, sampleIndex, callback) {
  14310. for (;; sampleIndex++) {
  14311. if (sampleIndex >= samples.length) {
  14312. callback();
  14313. return;
  14314. }
  14315. if (samples[sampleIndex].unit.length < 32) {
  14316. continue;
  14317. }
  14318. this.decryptAacSample(samples, sampleIndex, callback);
  14319. if (!this.decrypter.isSync()) {
  14320. return;
  14321. }
  14322. }
  14323. }
  14324. // AVC - encrypt one 16 bytes block out of ten, starting from offset 32
  14325. getAvcEncryptedData(decodedData) {
  14326. const encryptedDataLen = Math.floor((decodedData.length - 48) / 160) * 16 + 16;
  14327. const encryptedData = new Int8Array(encryptedDataLen);
  14328. let outputPos = 0;
  14329. for (let inputPos = 32; inputPos < decodedData.length - 16; inputPos += 160, outputPos += 16) {
  14330. encryptedData.set(decodedData.subarray(inputPos, inputPos + 16), outputPos);
  14331. }
  14332. return encryptedData;
  14333. }
  14334. getAvcDecryptedUnit(decodedData, decryptedData) {
  14335. const uint8DecryptedData = new Uint8Array(decryptedData);
  14336. let inputPos = 0;
  14337. for (let outputPos = 32; outputPos < decodedData.length - 16; outputPos += 160, inputPos += 16) {
  14338. decodedData.set(uint8DecryptedData.subarray(inputPos, inputPos + 16), outputPos);
  14339. }
  14340. return decodedData;
  14341. }
  14342. decryptAvcSample(samples, sampleIndex, unitIndex, callback, curUnit) {
  14343. const decodedData = discardEPB(curUnit.data);
  14344. const encryptedData = this.getAvcEncryptedData(decodedData);
  14345. this.decryptBuffer(encryptedData.buffer).then(decryptedBuffer => {
  14346. curUnit.data = this.getAvcDecryptedUnit(decodedData, decryptedBuffer);
  14347. if (!this.decrypter.isSync()) {
  14348. this.decryptAvcSamples(samples, sampleIndex, unitIndex + 1, callback);
  14349. }
  14350. });
  14351. }
  14352. decryptAvcSamples(samples, sampleIndex, unitIndex, callback) {
  14353. if (samples instanceof Uint8Array) {
  14354. throw new Error('Cannot decrypt samples of type Uint8Array');
  14355. }
  14356. for (;; sampleIndex++, unitIndex = 0) {
  14357. if (sampleIndex >= samples.length) {
  14358. callback();
  14359. return;
  14360. }
  14361. const curUnits = samples[sampleIndex].units;
  14362. for (;; unitIndex++) {
  14363. if (unitIndex >= curUnits.length) {
  14364. break;
  14365. }
  14366. const curUnit = curUnits[unitIndex];
  14367. if (curUnit.data.length <= 48 || curUnit.type !== 1 && curUnit.type !== 5) {
  14368. continue;
  14369. }
  14370. this.decryptAvcSample(samples, sampleIndex, unitIndex, callback, curUnit);
  14371. if (!this.decrypter.isSync()) {
  14372. return;
  14373. }
  14374. }
  14375. }
  14376. }
  14377. }
  14378. const PACKET_LENGTH = 188;
  14379. class TSDemuxer {
  14380. constructor(observer, config, typeSupported) {
  14381. this.observer = void 0;
  14382. this.config = void 0;
  14383. this.typeSupported = void 0;
  14384. this.sampleAes = null;
  14385. this.pmtParsed = false;
  14386. this.audioCodec = void 0;
  14387. this.videoCodec = void 0;
  14388. this._duration = 0;
  14389. this._pmtId = -1;
  14390. this._videoTrack = void 0;
  14391. this._audioTrack = void 0;
  14392. this._id3Track = void 0;
  14393. this._txtTrack = void 0;
  14394. this.aacOverFlow = null;
  14395. this.remainderData = null;
  14396. this.videoParser = void 0;
  14397. this.observer = observer;
  14398. this.config = config;
  14399. this.typeSupported = typeSupported;
  14400. this.videoParser = new AvcVideoParser();
  14401. }
  14402. static probe(data) {
  14403. const syncOffset = TSDemuxer.syncOffset(data);
  14404. if (syncOffset > 0) {
  14405. logger.warn(`MPEG2-TS detected but first sync word found @ offset ${syncOffset}`);
  14406. }
  14407. return syncOffset !== -1;
  14408. }
  14409. static syncOffset(data) {
  14410. const length = data.length;
  14411. let scanwindow = Math.min(PACKET_LENGTH * 5, length - PACKET_LENGTH) + 1;
  14412. let i = 0;
  14413. while (i < scanwindow) {
  14414. // a TS init segment should contain at least 2 TS packets: PAT and PMT, each starting with 0x47
  14415. let foundPat = false;
  14416. let packetStart = -1;
  14417. let tsPackets = 0;
  14418. for (let j = i; j < length; j += PACKET_LENGTH) {
  14419. if (data[j] === 0x47 && (length - j === PACKET_LENGTH || data[j + PACKET_LENGTH] === 0x47)) {
  14420. tsPackets++;
  14421. if (packetStart === -1) {
  14422. packetStart = j;
  14423. // First sync word found at offset, increase scan length (#5251)
  14424. if (packetStart !== 0) {
  14425. scanwindow = Math.min(packetStart + PACKET_LENGTH * 99, data.length - PACKET_LENGTH) + 1;
  14426. }
  14427. }
  14428. if (!foundPat) {
  14429. foundPat = parsePID(data, j) === 0;
  14430. }
  14431. // Sync word found at 0 with 3 packets, or found at offset least 2 packets up to scanwindow (#5501)
  14432. if (foundPat && tsPackets > 1 && (packetStart === 0 && tsPackets > 2 || j + PACKET_LENGTH > scanwindow)) {
  14433. return packetStart;
  14434. }
  14435. } else if (tsPackets) {
  14436. // Exit if sync word found, but does not contain contiguous packets
  14437. return -1;
  14438. } else {
  14439. break;
  14440. }
  14441. }
  14442. i++;
  14443. }
  14444. return -1;
  14445. }
  14446. /**
  14447. * Creates a track model internal to demuxer used to drive remuxing input
  14448. */
  14449. static createTrack(type, duration) {
  14450. return {
  14451. container: type === 'video' || type === 'audio' ? 'video/mp2t' : undefined,
  14452. type,
  14453. id: RemuxerTrackIdConfig[type],
  14454. pid: -1,
  14455. inputTimeScale: 90000,
  14456. sequenceNumber: 0,
  14457. samples: [],
  14458. dropped: 0,
  14459. duration: type === 'audio' ? duration : undefined
  14460. };
  14461. }
  14462. /**
  14463. * Initializes a new init segment on the demuxer/remuxer interface. Needed for discontinuities/track-switches (or at stream start)
  14464. * Resets all internal track instances of the demuxer.
  14465. */
  14466. resetInitSegment(initSegment, audioCodec, videoCodec, trackDuration) {
  14467. this.pmtParsed = false;
  14468. this._pmtId = -1;
  14469. this._videoTrack = TSDemuxer.createTrack('video');
  14470. this._audioTrack = TSDemuxer.createTrack('audio', trackDuration);
  14471. this._id3Track = TSDemuxer.createTrack('id3');
  14472. this._txtTrack = TSDemuxer.createTrack('text');
  14473. this._audioTrack.segmentCodec = 'aac';
  14474. // flush any partial content
  14475. this.aacOverFlow = null;
  14476. this.remainderData = null;
  14477. this.audioCodec = audioCodec;
  14478. this.videoCodec = videoCodec;
  14479. this._duration = trackDuration;
  14480. }
  14481. resetTimeStamp() {}
  14482. resetContiguity() {
  14483. const {
  14484. _audioTrack,
  14485. _videoTrack,
  14486. _id3Track
  14487. } = this;
  14488. if (_audioTrack) {
  14489. _audioTrack.pesData = null;
  14490. }
  14491. if (_videoTrack) {
  14492. _videoTrack.pesData = null;
  14493. }
  14494. if (_id3Track) {
  14495. _id3Track.pesData = null;
  14496. }
  14497. this.aacOverFlow = null;
  14498. this.remainderData = null;
  14499. }
  14500. demux(data, timeOffset, isSampleAes = false, flush = false) {
  14501. if (!isSampleAes) {
  14502. this.sampleAes = null;
  14503. }
  14504. let pes;
  14505. const videoTrack = this._videoTrack;
  14506. const audioTrack = this._audioTrack;
  14507. const id3Track = this._id3Track;
  14508. const textTrack = this._txtTrack;
  14509. let videoPid = videoTrack.pid;
  14510. let videoData = videoTrack.pesData;
  14511. let audioPid = audioTrack.pid;
  14512. let id3Pid = id3Track.pid;
  14513. let audioData = audioTrack.pesData;
  14514. let id3Data = id3Track.pesData;
  14515. let unknownPID = null;
  14516. let pmtParsed = this.pmtParsed;
  14517. let pmtId = this._pmtId;
  14518. let len = data.length;
  14519. if (this.remainderData) {
  14520. data = appendUint8Array(this.remainderData, data);
  14521. len = data.length;
  14522. this.remainderData = null;
  14523. }
  14524. if (len < PACKET_LENGTH && !flush) {
  14525. this.remainderData = data;
  14526. return {
  14527. audioTrack,
  14528. videoTrack,
  14529. id3Track,
  14530. textTrack
  14531. };
  14532. }
  14533. const syncOffset = Math.max(0, TSDemuxer.syncOffset(data));
  14534. len -= (len - syncOffset) % PACKET_LENGTH;
  14535. if (len < data.byteLength && !flush) {
  14536. this.remainderData = new Uint8Array(data.buffer, len, data.buffer.byteLength - len);
  14537. }
  14538. // loop through TS packets
  14539. let tsPacketErrors = 0;
  14540. for (let start = syncOffset; start < len; start += PACKET_LENGTH) {
  14541. if (data[start] === 0x47) {
  14542. const stt = !!(data[start + 1] & 0x40);
  14543. const pid = parsePID(data, start);
  14544. const atf = (data[start + 3] & 0x30) >> 4;
  14545. // if an adaption field is present, its length is specified by the fifth byte of the TS packet header.
  14546. let offset;
  14547. if (atf > 1) {
  14548. offset = start + 5 + data[start + 4];
  14549. // continue if there is only adaptation field
  14550. if (offset === start + PACKET_LENGTH) {
  14551. continue;
  14552. }
  14553. } else {
  14554. offset = start + 4;
  14555. }
  14556. switch (pid) {
  14557. case videoPid:
  14558. if (stt) {
  14559. if (videoData && (pes = parsePES(videoData))) {
  14560. this.videoParser.parseAVCPES(videoTrack, textTrack, pes, false, this._duration);
  14561. }
  14562. videoData = {
  14563. data: [],
  14564. size: 0
  14565. };
  14566. }
  14567. if (videoData) {
  14568. videoData.data.push(data.subarray(offset, start + PACKET_LENGTH));
  14569. videoData.size += start + PACKET_LENGTH - offset;
  14570. }
  14571. break;
  14572. case audioPid:
  14573. if (stt) {
  14574. if (audioData && (pes = parsePES(audioData))) {
  14575. switch (audioTrack.segmentCodec) {
  14576. case 'aac':
  14577. this.parseAACPES(audioTrack, pes);
  14578. break;
  14579. case 'mp3':
  14580. this.parseMPEGPES(audioTrack, pes);
  14581. break;
  14582. }
  14583. }
  14584. audioData = {
  14585. data: [],
  14586. size: 0
  14587. };
  14588. }
  14589. if (audioData) {
  14590. audioData.data.push(data.subarray(offset, start + PACKET_LENGTH));
  14591. audioData.size += start + PACKET_LENGTH - offset;
  14592. }
  14593. break;
  14594. case id3Pid:
  14595. if (stt) {
  14596. if (id3Data && (pes = parsePES(id3Data))) {
  14597. this.parseID3PES(id3Track, pes);
  14598. }
  14599. id3Data = {
  14600. data: [],
  14601. size: 0
  14602. };
  14603. }
  14604. if (id3Data) {
  14605. id3Data.data.push(data.subarray(offset, start + PACKET_LENGTH));
  14606. id3Data.size += start + PACKET_LENGTH - offset;
  14607. }
  14608. break;
  14609. case 0:
  14610. if (stt) {
  14611. offset += data[offset] + 1;
  14612. }
  14613. pmtId = this._pmtId = parsePAT(data, offset);
  14614. // logger.log('PMT PID:' + this._pmtId);
  14615. break;
  14616. case pmtId:
  14617. {
  14618. if (stt) {
  14619. offset += data[offset] + 1;
  14620. }
  14621. const parsedPIDs = parsePMT(data, offset, this.typeSupported, isSampleAes, this.observer);
  14622. // only update track id if track PID found while parsing PMT
  14623. // this is to avoid resetting the PID to -1 in case
  14624. // track PID transiently disappears from the stream
  14625. // this could happen in case of transient missing audio samples for example
  14626. // NOTE this is only the PID of the track as found in TS,
  14627. // but we are not using this for MP4 track IDs.
  14628. videoPid = parsedPIDs.videoPid;
  14629. if (videoPid > 0) {
  14630. videoTrack.pid = videoPid;
  14631. videoTrack.segmentCodec = parsedPIDs.segmentVideoCodec;
  14632. }
  14633. audioPid = parsedPIDs.audioPid;
  14634. if (audioPid > 0) {
  14635. audioTrack.pid = audioPid;
  14636. audioTrack.segmentCodec = parsedPIDs.segmentAudioCodec;
  14637. }
  14638. id3Pid = parsedPIDs.id3Pid;
  14639. if (id3Pid > 0) {
  14640. id3Track.pid = id3Pid;
  14641. }
  14642. if (unknownPID !== null && !pmtParsed) {
  14643. logger.warn(`MPEG-TS PMT found at ${start} after unknown PID '${unknownPID}'. Backtracking to sync byte @${syncOffset} to parse all TS packets.`);
  14644. unknownPID = null;
  14645. // we set it to -188, the += 188 in the for loop will reset start to 0
  14646. start = syncOffset - 188;
  14647. }
  14648. pmtParsed = this.pmtParsed = true;
  14649. break;
  14650. }
  14651. case 0x11:
  14652. case 0x1fff:
  14653. break;
  14654. default:
  14655. unknownPID = pid;
  14656. break;
  14657. }
  14658. } else {
  14659. tsPacketErrors++;
  14660. }
  14661. }
  14662. if (tsPacketErrors > 0) {
  14663. emitParsingError(this.observer, new Error(`Found ${tsPacketErrors} TS packet/s that do not start with 0x47`));
  14664. }
  14665. videoTrack.pesData = videoData;
  14666. audioTrack.pesData = audioData;
  14667. id3Track.pesData = id3Data;
  14668. const demuxResult = {
  14669. audioTrack,
  14670. videoTrack,
  14671. id3Track,
  14672. textTrack
  14673. };
  14674. if (flush) {
  14675. this.extractRemainingSamples(demuxResult);
  14676. }
  14677. return demuxResult;
  14678. }
  14679. flush() {
  14680. const {
  14681. remainderData
  14682. } = this;
  14683. this.remainderData = null;
  14684. let result;
  14685. if (remainderData) {
  14686. result = this.demux(remainderData, -1, false, true);
  14687. } else {
  14688. result = {
  14689. videoTrack: this._videoTrack,
  14690. audioTrack: this._audioTrack,
  14691. id3Track: this._id3Track,
  14692. textTrack: this._txtTrack
  14693. };
  14694. }
  14695. this.extractRemainingSamples(result);
  14696. if (this.sampleAes) {
  14697. return this.decrypt(result, this.sampleAes);
  14698. }
  14699. return result;
  14700. }
  14701. extractRemainingSamples(demuxResult) {
  14702. const {
  14703. audioTrack,
  14704. videoTrack,
  14705. id3Track,
  14706. textTrack
  14707. } = demuxResult;
  14708. const videoData = videoTrack.pesData;
  14709. const audioData = audioTrack.pesData;
  14710. const id3Data = id3Track.pesData;
  14711. // try to parse last PES packets
  14712. let pes;
  14713. if (videoData && (pes = parsePES(videoData))) {
  14714. this.videoParser.parseAVCPES(videoTrack, textTrack, pes, true, this._duration);
  14715. videoTrack.pesData = null;
  14716. } else {
  14717. // either avcData null or PES truncated, keep it for next frag parsing
  14718. videoTrack.pesData = videoData;
  14719. }
  14720. if (audioData && (pes = parsePES(audioData))) {
  14721. switch (audioTrack.segmentCodec) {
  14722. case 'aac':
  14723. this.parseAACPES(audioTrack, pes);
  14724. break;
  14725. case 'mp3':
  14726. this.parseMPEGPES(audioTrack, pes);
  14727. break;
  14728. }
  14729. audioTrack.pesData = null;
  14730. } else {
  14731. if (audioData != null && audioData.size) {
  14732. logger.log('last AAC PES packet truncated,might overlap between fragments');
  14733. }
  14734. // either audioData null or PES truncated, keep it for next frag parsing
  14735. audioTrack.pesData = audioData;
  14736. }
  14737. if (id3Data && (pes = parsePES(id3Data))) {
  14738. this.parseID3PES(id3Track, pes);
  14739. id3Track.pesData = null;
  14740. } else {
  14741. // either id3Data null or PES truncated, keep it for next frag parsing
  14742. id3Track.pesData = id3Data;
  14743. }
  14744. }
  14745. demuxSampleAes(data, keyData, timeOffset) {
  14746. const demuxResult = this.demux(data, timeOffset, true, !this.config.progressive);
  14747. const sampleAes = this.sampleAes = new SampleAesDecrypter(this.observer, this.config, keyData);
  14748. return this.decrypt(demuxResult, sampleAes);
  14749. }
  14750. decrypt(demuxResult, sampleAes) {
  14751. return new Promise(resolve => {
  14752. const {
  14753. audioTrack,
  14754. videoTrack
  14755. } = demuxResult;
  14756. if (audioTrack.samples && audioTrack.segmentCodec === 'aac') {
  14757. sampleAes.decryptAacSamples(audioTrack.samples, 0, () => {
  14758. if (videoTrack.samples) {
  14759. sampleAes.decryptAvcSamples(videoTrack.samples, 0, 0, () => {
  14760. resolve(demuxResult);
  14761. });
  14762. } else {
  14763. resolve(demuxResult);
  14764. }
  14765. });
  14766. } else if (videoTrack.samples) {
  14767. sampleAes.decryptAvcSamples(videoTrack.samples, 0, 0, () => {
  14768. resolve(demuxResult);
  14769. });
  14770. }
  14771. });
  14772. }
  14773. destroy() {
  14774. this._duration = 0;
  14775. }
  14776. parseAACPES(track, pes) {
  14777. let startOffset = 0;
  14778. const aacOverFlow = this.aacOverFlow;
  14779. let data = pes.data;
  14780. if (aacOverFlow) {
  14781. this.aacOverFlow = null;
  14782. const frameMissingBytes = aacOverFlow.missing;
  14783. const sampleLength = aacOverFlow.sample.unit.byteLength;
  14784. // logger.log(`AAC: append overflowing ${sampleLength} bytes to beginning of new PES`);
  14785. if (frameMissingBytes === -1) {
  14786. data = appendUint8Array(aacOverFlow.sample.unit, data);
  14787. } else {
  14788. const frameOverflowBytes = sampleLength - frameMissingBytes;
  14789. aacOverFlow.sample.unit.set(data.subarray(0, frameMissingBytes), frameOverflowBytes);
  14790. track.samples.push(aacOverFlow.sample);
  14791. startOffset = aacOverFlow.missing;
  14792. }
  14793. }
  14794. // look for ADTS header (0xFFFx)
  14795. let offset;
  14796. let len;
  14797. for (offset = startOffset, len = data.length; offset < len - 1; offset++) {
  14798. if (isHeader$1(data, offset)) {
  14799. break;
  14800. }
  14801. }
  14802. // if ADTS header does not start straight from the beginning of the PES payload, raise an error
  14803. if (offset !== startOffset) {
  14804. let reason;
  14805. const recoverable = offset < len - 1;
  14806. if (recoverable) {
  14807. reason = `AAC PES did not start with ADTS header,offset:${offset}`;
  14808. } else {
  14809. reason = 'No ADTS header found in AAC PES';
  14810. }
  14811. emitParsingError(this.observer, new Error(reason), recoverable);
  14812. if (!recoverable) {
  14813. return;
  14814. }
  14815. }
  14816. initTrackConfig(track, this.observer, data, offset, this.audioCodec);
  14817. let pts;
  14818. if (pes.pts !== undefined) {
  14819. pts = pes.pts;
  14820. } else if (aacOverFlow) {
  14821. // if last AAC frame is overflowing, we should ensure timestamps are contiguous:
  14822. // first sample PTS should be equal to last sample PTS + frameDuration
  14823. const frameDuration = getFrameDuration(track.samplerate);
  14824. pts = aacOverFlow.sample.pts + frameDuration;
  14825. } else {
  14826. logger.warn('[tsdemuxer]: AAC PES unknown PTS');
  14827. return;
  14828. }
  14829. // scan for aac samples
  14830. let frameIndex = 0;
  14831. let frame;
  14832. while (offset < len) {
  14833. frame = appendFrame$1(track, data, offset, pts, frameIndex);
  14834. offset += frame.length;
  14835. if (!frame.missing) {
  14836. frameIndex++;
  14837. for (; offset < len - 1; offset++) {
  14838. if (isHeader$1(data, offset)) {
  14839. break;
  14840. }
  14841. }
  14842. } else {
  14843. this.aacOverFlow = frame;
  14844. break;
  14845. }
  14846. }
  14847. }
  14848. parseMPEGPES(track, pes) {
  14849. const data = pes.data;
  14850. const length = data.length;
  14851. let frameIndex = 0;
  14852. let offset = 0;
  14853. const pts = pes.pts;
  14854. if (pts === undefined) {
  14855. logger.warn('[tsdemuxer]: MPEG PES unknown PTS');
  14856. return;
  14857. }
  14858. while (offset < length) {
  14859. if (isHeader(data, offset)) {
  14860. const frame = appendFrame(track, data, offset, pts, frameIndex);
  14861. if (frame) {
  14862. offset += frame.length;
  14863. frameIndex++;
  14864. } else {
  14865. // logger.log('Unable to parse Mpeg audio frame');
  14866. break;
  14867. }
  14868. } else {
  14869. // nothing found, keep looking
  14870. offset++;
  14871. }
  14872. }
  14873. }
  14874. parseAC3PES(track, pes) {
  14875. }
  14876. parseID3PES(id3Track, pes) {
  14877. if (pes.pts === undefined) {
  14878. logger.warn('[tsdemuxer]: ID3 PES unknown PTS');
  14879. return;
  14880. }
  14881. const id3Sample = _extends({}, pes, {
  14882. type: this._videoTrack ? MetadataSchema.emsg : MetadataSchema.audioId3,
  14883. duration: Number.POSITIVE_INFINITY
  14884. });
  14885. id3Track.samples.push(id3Sample);
  14886. }
  14887. }
  14888. function parsePID(data, offset) {
  14889. // pid is a 13-bit field starting at the last bit of TS[1]
  14890. return ((data[offset + 1] & 0x1f) << 8) + data[offset + 2];
  14891. }
  14892. function parsePAT(data, offset) {
  14893. // skip the PSI header and parse the first PMT entry
  14894. return (data[offset + 10] & 0x1f) << 8 | data[offset + 11];
  14895. }
  14896. function parsePMT(data, offset, typeSupported, isSampleAes, observer) {
  14897. const result = {
  14898. audioPid: -1,
  14899. videoPid: -1,
  14900. id3Pid: -1,
  14901. segmentVideoCodec: 'avc',
  14902. segmentAudioCodec: 'aac'
  14903. };
  14904. const sectionLength = (data[offset + 1] & 0x0f) << 8 | data[offset + 2];
  14905. const tableEnd = offset + 3 + sectionLength - 4;
  14906. // to determine where the table is, we have to figure out how
  14907. // long the program info descriptors are
  14908. const programInfoLength = (data[offset + 10] & 0x0f) << 8 | data[offset + 11];
  14909. // advance the offset to the first entry in the mapping table
  14910. offset += 12 + programInfoLength;
  14911. while (offset < tableEnd) {
  14912. const pid = parsePID(data, offset);
  14913. const esInfoLength = (data[offset + 3] & 0x0f) << 8 | data[offset + 4];
  14914. switch (data[offset]) {
  14915. case 0xcf:
  14916. // SAMPLE-AES AAC
  14917. if (!isSampleAes) {
  14918. logEncryptedSamplesFoundInUnencryptedStream('ADTS AAC');
  14919. break;
  14920. }
  14921. /* falls through */
  14922. case 0x0f:
  14923. // ISO/IEC 13818-7 ADTS AAC (MPEG-2 lower bit-rate audio)
  14924. // logger.log('AAC PID:' + pid);
  14925. if (result.audioPid === -1) {
  14926. result.audioPid = pid;
  14927. }
  14928. break;
  14929. // Packetized metadata (ID3)
  14930. case 0x15:
  14931. // logger.log('ID3 PID:' + pid);
  14932. if (result.id3Pid === -1) {
  14933. result.id3Pid = pid;
  14934. }
  14935. break;
  14936. case 0xdb:
  14937. // SAMPLE-AES AVC
  14938. if (!isSampleAes) {
  14939. logEncryptedSamplesFoundInUnencryptedStream('H.264');
  14940. break;
  14941. }
  14942. /* falls through */
  14943. case 0x1b:
  14944. // ITU-T Rec. H.264 and ISO/IEC 14496-10 (lower bit-rate video)
  14945. // logger.log('AVC PID:' + pid);
  14946. if (result.videoPid === -1) {
  14947. result.videoPid = pid;
  14948. result.segmentVideoCodec = 'avc';
  14949. }
  14950. break;
  14951. // ISO/IEC 11172-3 (MPEG-1 audio)
  14952. // or ISO/IEC 13818-3 (MPEG-2 halved sample rate audio)
  14953. case 0x03:
  14954. case 0x04:
  14955. // logger.log('MPEG PID:' + pid);
  14956. if (!typeSupported.mpeg && !typeSupported.mp3) {
  14957. logger.log('MPEG audio found, not supported in this browser');
  14958. } else if (result.audioPid === -1) {
  14959. result.audioPid = pid;
  14960. result.segmentAudioCodec = 'mp3';
  14961. }
  14962. break;
  14963. case 0xc1:
  14964. // SAMPLE-AES AC3
  14965. if (!isSampleAes) {
  14966. logEncryptedSamplesFoundInUnencryptedStream('AC-3');
  14967. break;
  14968. }
  14969. /* falls through */
  14970. case 0x81:
  14971. {
  14972. logger.warn('AC-3 in M2TS support not included in build');
  14973. }
  14974. break;
  14975. case 0x06:
  14976. // stream_type 6 can mean a lot of different things in case of DVB.
  14977. // We need to look at the descriptors. Right now, we're only interested
  14978. // in AC-3 audio, so we do the descriptor parsing only when we don't have
  14979. // an audio PID yet.
  14980. if (result.audioPid === -1 && esInfoLength > 0) {
  14981. let parsePos = offset + 5;
  14982. let remaining = esInfoLength;
  14983. while (remaining > 2) {
  14984. const descriptorId = data[parsePos];
  14985. switch (descriptorId) {
  14986. case 0x6a:
  14987. // DVB Descriptor for AC-3
  14988. {
  14989. logger.warn('AC-3 in M2TS support not included in build');
  14990. }
  14991. break;
  14992. }
  14993. const descriptorLen = data[parsePos + 1] + 2;
  14994. parsePos += descriptorLen;
  14995. remaining -= descriptorLen;
  14996. }
  14997. }
  14998. break;
  14999. case 0xc2: // SAMPLE-AES EC3
  15000. /* falls through */
  15001. case 0x87:
  15002. emitParsingError(observer, new Error('Unsupported EC-3 in M2TS found'));
  15003. return result;
  15004. case 0x24:
  15005. emitParsingError(observer, new Error('Unsupported HEVC in M2TS found'));
  15006. return result;
  15007. }
  15008. // move to the next table entry
  15009. // skip past the elementary stream descriptors, if present
  15010. offset += esInfoLength + 5;
  15011. }
  15012. return result;
  15013. }
  15014. function emitParsingError(observer, error, levelRetry) {
  15015. logger.warn(`parsing error: ${error.message}`);
  15016. observer.emit(Events.ERROR, Events.ERROR, {
  15017. type: ErrorTypes.MEDIA_ERROR,
  15018. details: ErrorDetails.FRAG_PARSING_ERROR,
  15019. fatal: false,
  15020. levelRetry,
  15021. error,
  15022. reason: error.message
  15023. });
  15024. }
  15025. function logEncryptedSamplesFoundInUnencryptedStream(type) {
  15026. logger.log(`${type} with AES-128-CBC encryption found in unencrypted stream`);
  15027. }
  15028. function parsePES(stream) {
  15029. let i = 0;
  15030. let frag;
  15031. let pesLen;
  15032. let pesHdrLen;
  15033. let pesPts;
  15034. let pesDts;
  15035. const data = stream.data;
  15036. // safety check
  15037. if (!stream || stream.size === 0) {
  15038. return null;
  15039. }
  15040. // we might need up to 19 bytes to read PES header
  15041. // if first chunk of data is less than 19 bytes, let's merge it with following ones until we get 19 bytes
  15042. // usually only one merge is needed (and this is rare ...)
  15043. while (data[0].length < 19 && data.length > 1) {
  15044. data[0] = appendUint8Array(data[0], data[1]);
  15045. data.splice(1, 1);
  15046. }
  15047. // retrieve PTS/DTS from first fragment
  15048. frag = data[0];
  15049. const pesPrefix = (frag[0] << 16) + (frag[1] << 8) + frag[2];
  15050. if (pesPrefix === 1) {
  15051. pesLen = (frag[4] << 8) + frag[5];
  15052. // if PES parsed length is not zero and greater than total received length, stop parsing. PES might be truncated
  15053. // minus 6 : PES header size
  15054. if (pesLen && pesLen > stream.size - 6) {
  15055. return null;
  15056. }
  15057. const pesFlags = frag[7];
  15058. if (pesFlags & 0xc0) {
  15059. /* PES header described here : http://dvd.sourceforge.net/dvdinfo/pes-hdr.html
  15060. as PTS / DTS is 33 bit we cannot use bitwise operator in JS,
  15061. as Bitwise operators treat their operands as a sequence of 32 bits */
  15062. pesPts = (frag[9] & 0x0e) * 536870912 +
  15063. // 1 << 29
  15064. (frag[10] & 0xff) * 4194304 +
  15065. // 1 << 22
  15066. (frag[11] & 0xfe) * 16384 +
  15067. // 1 << 14
  15068. (frag[12] & 0xff) * 128 +
  15069. // 1 << 7
  15070. (frag[13] & 0xfe) / 2;
  15071. if (pesFlags & 0x40) {
  15072. pesDts = (frag[14] & 0x0e) * 536870912 +
  15073. // 1 << 29
  15074. (frag[15] & 0xff) * 4194304 +
  15075. // 1 << 22
  15076. (frag[16] & 0xfe) * 16384 +
  15077. // 1 << 14
  15078. (frag[17] & 0xff) * 128 +
  15079. // 1 << 7
  15080. (frag[18] & 0xfe) / 2;
  15081. if (pesPts - pesDts > 60 * 90000) {
  15082. logger.warn(`${Math.round((pesPts - pesDts) / 90000)}s delta between PTS and DTS, align them`);
  15083. pesPts = pesDts;
  15084. }
  15085. } else {
  15086. pesDts = pesPts;
  15087. }
  15088. }
  15089. pesHdrLen = frag[8];
  15090. // 9 bytes : 6 bytes for PES header + 3 bytes for PES extension
  15091. let payloadStartOffset = pesHdrLen + 9;
  15092. if (stream.size <= payloadStartOffset) {
  15093. return null;
  15094. }
  15095. stream.size -= payloadStartOffset;
  15096. // reassemble PES packet
  15097. const pesData = new Uint8Array(stream.size);
  15098. for (let j = 0, dataLen = data.length; j < dataLen; j++) {
  15099. frag = data[j];
  15100. let len = frag.byteLength;
  15101. if (payloadStartOffset) {
  15102. if (payloadStartOffset > len) {
  15103. // trim full frag if PES header bigger than frag
  15104. payloadStartOffset -= len;
  15105. continue;
  15106. } else {
  15107. // trim partial frag if PES header smaller than frag
  15108. frag = frag.subarray(payloadStartOffset);
  15109. len -= payloadStartOffset;
  15110. payloadStartOffset = 0;
  15111. }
  15112. }
  15113. pesData.set(frag, i);
  15114. i += len;
  15115. }
  15116. if (pesLen) {
  15117. // payload size : remove PES header + PES extension
  15118. pesLen -= pesHdrLen + 3;
  15119. }
  15120. return {
  15121. data: pesData,
  15122. pts: pesPts,
  15123. dts: pesDts,
  15124. len: pesLen
  15125. };
  15126. }
  15127. return null;
  15128. }
  15129. /**
  15130. * MP3 demuxer
  15131. */
  15132. class MP3Demuxer extends BaseAudioDemuxer {
  15133. resetInitSegment(initSegment, audioCodec, videoCodec, trackDuration) {
  15134. super.resetInitSegment(initSegment, audioCodec, videoCodec, trackDuration);
  15135. this._audioTrack = {
  15136. container: 'audio/mpeg',
  15137. type: 'audio',
  15138. id: 2,
  15139. pid: -1,
  15140. sequenceNumber: 0,
  15141. segmentCodec: 'mp3',
  15142. samples: [],
  15143. manifestCodec: audioCodec,
  15144. duration: trackDuration,
  15145. inputTimeScale: 90000,
  15146. dropped: 0
  15147. };
  15148. }
  15149. static probe(data) {
  15150. if (!data) {
  15151. return false;
  15152. }
  15153. // check if data contains ID3 timestamp and MPEG sync word
  15154. // Look for MPEG header | 1111 1111 | 111X XYZX | where X can be either 0 or 1 and Y or Z should be 1
  15155. // Layer bits (position 14 and 15) in header should be always different from 0 (Layer I or Layer II or Layer III)
  15156. // More info http://www.mp3-tech.org/programmer/frame_header.html
  15157. const id3Data = getID3Data(data, 0);
  15158. let offset = (id3Data == null ? void 0 : id3Data.length) || 0;
  15159. // Check for ac-3|ec-3 sync bytes and return false if present
  15160. if (id3Data && data[offset] === 0x0b && data[offset + 1] === 0x77 && getTimeStamp(id3Data) !== undefined &&
  15161. // check the bsid to confirm ac-3 or ec-3 (not mp3)
  15162. getAudioBSID(data, offset) <= 16) {
  15163. return false;
  15164. }
  15165. for (let length = data.length; offset < length; offset++) {
  15166. if (probe(data, offset)) {
  15167. logger.log('MPEG Audio sync word found !');
  15168. return true;
  15169. }
  15170. }
  15171. return false;
  15172. }
  15173. canParse(data, offset) {
  15174. return canParse(data, offset);
  15175. }
  15176. appendFrame(track, data, offset) {
  15177. if (this.basePTS === null) {
  15178. return;
  15179. }
  15180. return appendFrame(track, data, offset, this.basePTS, this.frameIndex);
  15181. }
  15182. }
  15183. /**
  15184. * AAC helper
  15185. */
  15186. class AAC {
  15187. static getSilentFrame(codec, channelCount) {
  15188. switch (codec) {
  15189. case 'mp4a.40.2':
  15190. if (channelCount === 1) {
  15191. return new Uint8Array([0x00, 0xc8, 0x00, 0x80, 0x23, 0x80]);
  15192. } else if (channelCount === 2) {
  15193. return new Uint8Array([0x21, 0x00, 0x49, 0x90, 0x02, 0x19, 0x00, 0x23, 0x80]);
  15194. } else if (channelCount === 3) {
  15195. return new Uint8Array([0x00, 0xc8, 0x00, 0x80, 0x20, 0x84, 0x01, 0x26, 0x40, 0x08, 0x64, 0x00, 0x8e]);
  15196. } else if (channelCount === 4) {
  15197. return new Uint8Array([0x00, 0xc8, 0x00, 0x80, 0x20, 0x84, 0x01, 0x26, 0x40, 0x08, 0x64, 0x00, 0x80, 0x2c, 0x80, 0x08, 0x02, 0x38]);
  15198. } else if (channelCount === 5) {
  15199. return new Uint8Array([0x00, 0xc8, 0x00, 0x80, 0x20, 0x84, 0x01, 0x26, 0x40, 0x08, 0x64, 0x00, 0x82, 0x30, 0x04, 0x99, 0x00, 0x21, 0x90, 0x02, 0x38]);
  15200. } else if (channelCount === 6) {
  15201. return new Uint8Array([0x00, 0xc8, 0x00, 0x80, 0x20, 0x84, 0x01, 0x26, 0x40, 0x08, 0x64, 0x00, 0x82, 0x30, 0x04, 0x99, 0x00, 0x21, 0x90, 0x02, 0x00, 0xb2, 0x00, 0x20, 0x08, 0xe0]);
  15202. }
  15203. break;
  15204. // handle HE-AAC below (mp4a.40.5 / mp4a.40.29)
  15205. default:
  15206. if (channelCount === 1) {
  15207. // ffmpeg -y -f lavfi -i "aevalsrc=0:d=0.05" -c:a libfdk_aac -profile:a aac_he -b:a 4k output.aac && hexdump -v -e '16/1 "0x%x," "\n"' -v output.aac
  15208. return new Uint8Array([0x1, 0x40, 0x22, 0x80, 0xa3, 0x4e, 0xe6, 0x80, 0xba, 0x8, 0x0, 0x0, 0x0, 0x1c, 0x6, 0xf1, 0xc1, 0xa, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5e]);
  15209. } else if (channelCount === 2) {
  15210. // ffmpeg -y -f lavfi -i "aevalsrc=0|0:d=0.05" -c:a libfdk_aac -profile:a aac_he_v2 -b:a 4k output.aac && hexdump -v -e '16/1 "0x%x," "\n"' -v output.aac
  15211. return new Uint8Array([0x1, 0x40, 0x22, 0x80, 0xa3, 0x5e, 0xe6, 0x80, 0xba, 0x8, 0x0, 0x0, 0x0, 0x0, 0x95, 0x0, 0x6, 0xf1, 0xa1, 0xa, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5e]);
  15212. } else if (channelCount === 3) {
  15213. // ffmpeg -y -f lavfi -i "aevalsrc=0|0|0:d=0.05" -c:a libfdk_aac -profile:a aac_he_v2 -b:a 4k output.aac && hexdump -v -e '16/1 "0x%x," "\n"' -v output.aac
  15214. return new Uint8Array([0x1, 0x40, 0x22, 0x80, 0xa3, 0x5e, 0xe6, 0x80, 0xba, 0x8, 0x0, 0x0, 0x0, 0x0, 0x95, 0x0, 0x6, 0xf1, 0xa1, 0xa, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5a, 0x5e]);
  15215. }
  15216. break;
  15217. }
  15218. return undefined;
  15219. }
  15220. }
  15221. /**
  15222. * Generate MP4 Box
  15223. */
  15224. const UINT32_MAX = Math.pow(2, 32) - 1;
  15225. class MP4 {
  15226. static init() {
  15227. MP4.types = {
  15228. avc1: [],
  15229. // codingname
  15230. avcC: [],
  15231. btrt: [],
  15232. dinf: [],
  15233. dref: [],
  15234. esds: [],
  15235. ftyp: [],
  15236. hdlr: [],
  15237. mdat: [],
  15238. mdhd: [],
  15239. mdia: [],
  15240. mfhd: [],
  15241. minf: [],
  15242. moof: [],
  15243. moov: [],
  15244. mp4a: [],
  15245. '.mp3': [],
  15246. dac3: [],
  15247. 'ac-3': [],
  15248. mvex: [],
  15249. mvhd: [],
  15250. pasp: [],
  15251. sdtp: [],
  15252. stbl: [],
  15253. stco: [],
  15254. stsc: [],
  15255. stsd: [],
  15256. stsz: [],
  15257. stts: [],
  15258. tfdt: [],
  15259. tfhd: [],
  15260. traf: [],
  15261. trak: [],
  15262. trun: [],
  15263. trex: [],
  15264. tkhd: [],
  15265. vmhd: [],
  15266. smhd: []
  15267. };
  15268. let i;
  15269. for (i in MP4.types) {
  15270. if (MP4.types.hasOwnProperty(i)) {
  15271. MP4.types[i] = [i.charCodeAt(0), i.charCodeAt(1), i.charCodeAt(2), i.charCodeAt(3)];
  15272. }
  15273. }
  15274. const videoHdlr = new Uint8Array([0x00,
  15275. // version 0
  15276. 0x00, 0x00, 0x00,
  15277. // flags
  15278. 0x00, 0x00, 0x00, 0x00,
  15279. // pre_defined
  15280. 0x76, 0x69, 0x64, 0x65,
  15281. // handler_type: 'vide'
  15282. 0x00, 0x00, 0x00, 0x00,
  15283. // reserved
  15284. 0x00, 0x00, 0x00, 0x00,
  15285. // reserved
  15286. 0x00, 0x00, 0x00, 0x00,
  15287. // reserved
  15288. 0x56, 0x69, 0x64, 0x65, 0x6f, 0x48, 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x72, 0x00 // name: 'VideoHandler'
  15289. ]);
  15290. const audioHdlr = new Uint8Array([0x00,
  15291. // version 0
  15292. 0x00, 0x00, 0x00,
  15293. // flags
  15294. 0x00, 0x00, 0x00, 0x00,
  15295. // pre_defined
  15296. 0x73, 0x6f, 0x75, 0x6e,
  15297. // handler_type: 'soun'
  15298. 0x00, 0x00, 0x00, 0x00,
  15299. // reserved
  15300. 0x00, 0x00, 0x00, 0x00,
  15301. // reserved
  15302. 0x00, 0x00, 0x00, 0x00,
  15303. // reserved
  15304. 0x53, 0x6f, 0x75, 0x6e, 0x64, 0x48, 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x72, 0x00 // name: 'SoundHandler'
  15305. ]);
  15306. MP4.HDLR_TYPES = {
  15307. video: videoHdlr,
  15308. audio: audioHdlr
  15309. };
  15310. const dref = new Uint8Array([0x00,
  15311. // version 0
  15312. 0x00, 0x00, 0x00,
  15313. // flags
  15314. 0x00, 0x00, 0x00, 0x01,
  15315. // entry_count
  15316. 0x00, 0x00, 0x00, 0x0c,
  15317. // entry_size
  15318. 0x75, 0x72, 0x6c, 0x20,
  15319. // 'url' type
  15320. 0x00,
  15321. // version 0
  15322. 0x00, 0x00, 0x01 // entry_flags
  15323. ]);
  15324. const stco = new Uint8Array([0x00,
  15325. // version
  15326. 0x00, 0x00, 0x00,
  15327. // flags
  15328. 0x00, 0x00, 0x00, 0x00 // entry_count
  15329. ]);
  15330. MP4.STTS = MP4.STSC = MP4.STCO = stco;
  15331. MP4.STSZ = new Uint8Array([0x00,
  15332. // version
  15333. 0x00, 0x00, 0x00,
  15334. // flags
  15335. 0x00, 0x00, 0x00, 0x00,
  15336. // sample_size
  15337. 0x00, 0x00, 0x00, 0x00 // sample_count
  15338. ]);
  15339. MP4.VMHD = new Uint8Array([0x00,
  15340. // version
  15341. 0x00, 0x00, 0x01,
  15342. // flags
  15343. 0x00, 0x00,
  15344. // graphicsmode
  15345. 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 // opcolor
  15346. ]);
  15347. MP4.SMHD = new Uint8Array([0x00,
  15348. // version
  15349. 0x00, 0x00, 0x00,
  15350. // flags
  15351. 0x00, 0x00,
  15352. // balance
  15353. 0x00, 0x00 // reserved
  15354. ]);
  15355. MP4.STSD = new Uint8Array([0x00,
  15356. // version 0
  15357. 0x00, 0x00, 0x00,
  15358. // flags
  15359. 0x00, 0x00, 0x00, 0x01]); // entry_count
  15360. const majorBrand = new Uint8Array([105, 115, 111, 109]); // isom
  15361. const avc1Brand = new Uint8Array([97, 118, 99, 49]); // avc1
  15362. const minorVersion = new Uint8Array([0, 0, 0, 1]);
  15363. MP4.FTYP = MP4.box(MP4.types.ftyp, majorBrand, minorVersion, majorBrand, avc1Brand);
  15364. MP4.DINF = MP4.box(MP4.types.dinf, MP4.box(MP4.types.dref, dref));
  15365. }
  15366. static box(type, ...payload) {
  15367. let size = 8;
  15368. let i = payload.length;
  15369. const len = i;
  15370. // calculate the total size we need to allocate
  15371. while (i--) {
  15372. size += payload[i].byteLength;
  15373. }
  15374. const result = new Uint8Array(size);
  15375. result[0] = size >> 24 & 0xff;
  15376. result[1] = size >> 16 & 0xff;
  15377. result[2] = size >> 8 & 0xff;
  15378. result[3] = size & 0xff;
  15379. result.set(type, 4);
  15380. // copy the payload into the result
  15381. for (i = 0, size = 8; i < len; i++) {
  15382. // copy payload[i] array @ offset size
  15383. result.set(payload[i], size);
  15384. size += payload[i].byteLength;
  15385. }
  15386. return result;
  15387. }
  15388. static hdlr(type) {
  15389. return MP4.box(MP4.types.hdlr, MP4.HDLR_TYPES[type]);
  15390. }
  15391. static mdat(data) {
  15392. return MP4.box(MP4.types.mdat, data);
  15393. }
  15394. static mdhd(timescale, duration) {
  15395. duration *= timescale;
  15396. const upperWordDuration = Math.floor(duration / (UINT32_MAX + 1));
  15397. const lowerWordDuration = Math.floor(duration % (UINT32_MAX + 1));
  15398. return MP4.box(MP4.types.mdhd, new Uint8Array([0x01,
  15399. // version 1
  15400. 0x00, 0x00, 0x00,
  15401. // flags
  15402. 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02,
  15403. // creation_time
  15404. 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x03,
  15405. // modification_time
  15406. timescale >> 24 & 0xff, timescale >> 16 & 0xff, timescale >> 8 & 0xff, timescale & 0xff,
  15407. // timescale
  15408. upperWordDuration >> 24, upperWordDuration >> 16 & 0xff, upperWordDuration >> 8 & 0xff, upperWordDuration & 0xff, lowerWordDuration >> 24, lowerWordDuration >> 16 & 0xff, lowerWordDuration >> 8 & 0xff, lowerWordDuration & 0xff, 0x55, 0xc4,
  15409. // 'und' language (undetermined)
  15410. 0x00, 0x00]));
  15411. }
  15412. static mdia(track) {
  15413. return MP4.box(MP4.types.mdia, MP4.mdhd(track.timescale, track.duration), MP4.hdlr(track.type), MP4.minf(track));
  15414. }
  15415. static mfhd(sequenceNumber) {
  15416. return MP4.box(MP4.types.mfhd, new Uint8Array([0x00, 0x00, 0x00, 0x00,
  15417. // flags
  15418. sequenceNumber >> 24, sequenceNumber >> 16 & 0xff, sequenceNumber >> 8 & 0xff, sequenceNumber & 0xff // sequence_number
  15419. ]));
  15420. }
  15421. static minf(track) {
  15422. if (track.type === 'audio') {
  15423. return MP4.box(MP4.types.minf, MP4.box(MP4.types.smhd, MP4.SMHD), MP4.DINF, MP4.stbl(track));
  15424. } else {
  15425. return MP4.box(MP4.types.minf, MP4.box(MP4.types.vmhd, MP4.VMHD), MP4.DINF, MP4.stbl(track));
  15426. }
  15427. }
  15428. static moof(sn, baseMediaDecodeTime, track) {
  15429. return MP4.box(MP4.types.moof, MP4.mfhd(sn), MP4.traf(track, baseMediaDecodeTime));
  15430. }
  15431. static moov(tracks) {
  15432. let i = tracks.length;
  15433. const boxes = [];
  15434. while (i--) {
  15435. boxes[i] = MP4.trak(tracks[i]);
  15436. }
  15437. return MP4.box.apply(null, [MP4.types.moov, MP4.mvhd(tracks[0].timescale, tracks[0].duration)].concat(boxes).concat(MP4.mvex(tracks)));
  15438. }
  15439. static mvex(tracks) {
  15440. let i = tracks.length;
  15441. const boxes = [];
  15442. while (i--) {
  15443. boxes[i] = MP4.trex(tracks[i]);
  15444. }
  15445. return MP4.box.apply(null, [MP4.types.mvex, ...boxes]);
  15446. }
  15447. static mvhd(timescale, duration) {
  15448. duration *= timescale;
  15449. const upperWordDuration = Math.floor(duration / (UINT32_MAX + 1));
  15450. const lowerWordDuration = Math.floor(duration % (UINT32_MAX + 1));
  15451. const bytes = new Uint8Array([0x01,
  15452. // version 1
  15453. 0x00, 0x00, 0x00,
  15454. // flags
  15455. 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02,
  15456. // creation_time
  15457. 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x03,
  15458. // modification_time
  15459. timescale >> 24 & 0xff, timescale >> 16 & 0xff, timescale >> 8 & 0xff, timescale & 0xff,
  15460. // timescale
  15461. upperWordDuration >> 24, upperWordDuration >> 16 & 0xff, upperWordDuration >> 8 & 0xff, upperWordDuration & 0xff, lowerWordDuration >> 24, lowerWordDuration >> 16 & 0xff, lowerWordDuration >> 8 & 0xff, lowerWordDuration & 0xff, 0x00, 0x01, 0x00, 0x00,
  15462. // 1.0 rate
  15463. 0x01, 0x00,
  15464. // 1.0 volume
  15465. 0x00, 0x00,
  15466. // reserved
  15467. 0x00, 0x00, 0x00, 0x00,
  15468. // reserved
  15469. 0x00, 0x00, 0x00, 0x00,
  15470. // reserved
  15471. 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00,
  15472. // transformation: unity matrix
  15473. 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
  15474. // pre_defined
  15475. 0xff, 0xff, 0xff, 0xff // next_track_ID
  15476. ]);
  15477. return MP4.box(MP4.types.mvhd, bytes);
  15478. }
  15479. static sdtp(track) {
  15480. const samples = track.samples || [];
  15481. const bytes = new Uint8Array(4 + samples.length);
  15482. let i;
  15483. let flags;
  15484. // leave the full box header (4 bytes) all zero
  15485. // write the sample table
  15486. for (i = 0; i < samples.length; i++) {
  15487. flags = samples[i].flags;
  15488. bytes[i + 4] = flags.dependsOn << 4 | flags.isDependedOn << 2 | flags.hasRedundancy;
  15489. }
  15490. return MP4.box(MP4.types.sdtp, bytes);
  15491. }
  15492. static stbl(track) {
  15493. return MP4.box(MP4.types.stbl, MP4.stsd(track), MP4.box(MP4.types.stts, MP4.STTS), MP4.box(MP4.types.stsc, MP4.STSC), MP4.box(MP4.types.stsz, MP4.STSZ), MP4.box(MP4.types.stco, MP4.STCO));
  15494. }
  15495. static avc1(track) {
  15496. let sps = [];
  15497. let pps = [];
  15498. let i;
  15499. let data;
  15500. let len;
  15501. // assemble the SPSs
  15502. for (i = 0; i < track.sps.length; i++) {
  15503. data = track.sps[i];
  15504. len = data.byteLength;
  15505. sps.push(len >>> 8 & 0xff);
  15506. sps.push(len & 0xff);
  15507. // SPS
  15508. sps = sps.concat(Array.prototype.slice.call(data));
  15509. }
  15510. // assemble the PPSs
  15511. for (i = 0; i < track.pps.length; i++) {
  15512. data = track.pps[i];
  15513. len = data.byteLength;
  15514. pps.push(len >>> 8 & 0xff);
  15515. pps.push(len & 0xff);
  15516. pps = pps.concat(Array.prototype.slice.call(data));
  15517. }
  15518. const avcc = MP4.box(MP4.types.avcC, new Uint8Array([0x01,
  15519. // version
  15520. sps[3],
  15521. // profile
  15522. sps[4],
  15523. // profile compat
  15524. sps[5],
  15525. // level
  15526. 0xfc | 3,
  15527. // lengthSizeMinusOne, hard-coded to 4 bytes
  15528. 0xe0 | track.sps.length // 3bit reserved (111) + numOfSequenceParameterSets
  15529. ].concat(sps).concat([track.pps.length // numOfPictureParameterSets
  15530. ]).concat(pps))); // "PPS"
  15531. const width = track.width;
  15532. const height = track.height;
  15533. const hSpacing = track.pixelRatio[0];
  15534. const vSpacing = track.pixelRatio[1];
  15535. return MP4.box(MP4.types.avc1, new Uint8Array([0x00, 0x00, 0x00,
  15536. // reserved
  15537. 0x00, 0x00, 0x00,
  15538. // reserved
  15539. 0x00, 0x01,
  15540. // data_reference_index
  15541. 0x00, 0x00,
  15542. // pre_defined
  15543. 0x00, 0x00,
  15544. // reserved
  15545. 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
  15546. // pre_defined
  15547. width >> 8 & 0xff, width & 0xff,
  15548. // width
  15549. height >> 8 & 0xff, height & 0xff,
  15550. // height
  15551. 0x00, 0x48, 0x00, 0x00,
  15552. // horizresolution
  15553. 0x00, 0x48, 0x00, 0x00,
  15554. // vertresolution
  15555. 0x00, 0x00, 0x00, 0x00,
  15556. // reserved
  15557. 0x00, 0x01,
  15558. // frame_count
  15559. 0x12, 0x64, 0x61, 0x69, 0x6c,
  15560. // dailymotion/hls.js
  15561. 0x79, 0x6d, 0x6f, 0x74, 0x69, 0x6f, 0x6e, 0x2f, 0x68, 0x6c, 0x73, 0x2e, 0x6a, 0x73, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
  15562. // compressorname
  15563. 0x00, 0x18,
  15564. // depth = 24
  15565. 0x11, 0x11]),
  15566. // pre_defined = -1
  15567. avcc, MP4.box(MP4.types.btrt, new Uint8Array([0x00, 0x1c, 0x9c, 0x80,
  15568. // bufferSizeDB
  15569. 0x00, 0x2d, 0xc6, 0xc0,
  15570. // maxBitrate
  15571. 0x00, 0x2d, 0xc6, 0xc0])),
  15572. // avgBitrate
  15573. MP4.box(MP4.types.pasp, new Uint8Array([hSpacing >> 24,
  15574. // hSpacing
  15575. hSpacing >> 16 & 0xff, hSpacing >> 8 & 0xff, hSpacing & 0xff, vSpacing >> 24,
  15576. // vSpacing
  15577. vSpacing >> 16 & 0xff, vSpacing >> 8 & 0xff, vSpacing & 0xff])));
  15578. }
  15579. static esds(track) {
  15580. const configlen = track.config.length;
  15581. return new Uint8Array([0x00,
  15582. // version 0
  15583. 0x00, 0x00, 0x00,
  15584. // flags
  15585. 0x03,
  15586. // descriptor_type
  15587. 0x17 + configlen,
  15588. // length
  15589. 0x00, 0x01,
  15590. // es_id
  15591. 0x00,
  15592. // stream_priority
  15593. 0x04,
  15594. // descriptor_type
  15595. 0x0f + configlen,
  15596. // length
  15597. 0x40,
  15598. // codec : mpeg4_audio
  15599. 0x15,
  15600. // stream_type
  15601. 0x00, 0x00, 0x00,
  15602. // buffer_size
  15603. 0x00, 0x00, 0x00, 0x00,
  15604. // maxBitrate
  15605. 0x00, 0x00, 0x00, 0x00,
  15606. // avgBitrate
  15607. 0x05 // descriptor_type
  15608. ].concat([configlen]).concat(track.config).concat([0x06, 0x01, 0x02])); // GASpecificConfig)); // length + audio config descriptor
  15609. }
  15610. static audioStsd(track) {
  15611. const samplerate = track.samplerate;
  15612. return new Uint8Array([0x00, 0x00, 0x00,
  15613. // reserved
  15614. 0x00, 0x00, 0x00,
  15615. // reserved
  15616. 0x00, 0x01,
  15617. // data_reference_index
  15618. 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
  15619. // reserved
  15620. 0x00, track.channelCount,
  15621. // channelcount
  15622. 0x00, 0x10,
  15623. // sampleSize:16bits
  15624. 0x00, 0x00, 0x00, 0x00,
  15625. // reserved2
  15626. samplerate >> 8 & 0xff, samplerate & 0xff,
  15627. //
  15628. 0x00, 0x00]);
  15629. }
  15630. static mp4a(track) {
  15631. return MP4.box(MP4.types.mp4a, MP4.audioStsd(track), MP4.box(MP4.types.esds, MP4.esds(track)));
  15632. }
  15633. static mp3(track) {
  15634. return MP4.box(MP4.types['.mp3'], MP4.audioStsd(track));
  15635. }
  15636. static ac3(track) {
  15637. return MP4.box(MP4.types['ac-3'], MP4.audioStsd(track), MP4.box(MP4.types.dac3, track.config));
  15638. }
  15639. static stsd(track) {
  15640. if (track.type === 'audio') {
  15641. if (track.segmentCodec === 'mp3' && track.codec === 'mp3') {
  15642. return MP4.box(MP4.types.stsd, MP4.STSD, MP4.mp3(track));
  15643. }
  15644. if (track.segmentCodec === 'ac3') {
  15645. return MP4.box(MP4.types.stsd, MP4.STSD, MP4.ac3(track));
  15646. }
  15647. return MP4.box(MP4.types.stsd, MP4.STSD, MP4.mp4a(track));
  15648. } else {
  15649. return MP4.box(MP4.types.stsd, MP4.STSD, MP4.avc1(track));
  15650. }
  15651. }
  15652. static tkhd(track) {
  15653. const id = track.id;
  15654. const duration = track.duration * track.timescale;
  15655. const width = track.width;
  15656. const height = track.height;
  15657. const upperWordDuration = Math.floor(duration / (UINT32_MAX + 1));
  15658. const lowerWordDuration = Math.floor(duration % (UINT32_MAX + 1));
  15659. return MP4.box(MP4.types.tkhd, new Uint8Array([0x01,
  15660. // version 1
  15661. 0x00, 0x00, 0x07,
  15662. // flags
  15663. 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02,
  15664. // creation_time
  15665. 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x03,
  15666. // modification_time
  15667. id >> 24 & 0xff, id >> 16 & 0xff, id >> 8 & 0xff, id & 0xff,
  15668. // track_ID
  15669. 0x00, 0x00, 0x00, 0x00,
  15670. // reserved
  15671. upperWordDuration >> 24, upperWordDuration >> 16 & 0xff, upperWordDuration >> 8 & 0xff, upperWordDuration & 0xff, lowerWordDuration >> 24, lowerWordDuration >> 16 & 0xff, lowerWordDuration >> 8 & 0xff, lowerWordDuration & 0xff, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
  15672. // reserved
  15673. 0x00, 0x00,
  15674. // layer
  15675. 0x00, 0x00,
  15676. // alternate_group
  15677. 0x00, 0x00,
  15678. // non-audio track volume
  15679. 0x00, 0x00,
  15680. // reserved
  15681. 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00,
  15682. // transformation: unity matrix
  15683. width >> 8 & 0xff, width & 0xff, 0x00, 0x00,
  15684. // width
  15685. height >> 8 & 0xff, height & 0xff, 0x00, 0x00 // height
  15686. ]));
  15687. }
  15688. static traf(track, baseMediaDecodeTime) {
  15689. const sampleDependencyTable = MP4.sdtp(track);
  15690. const id = track.id;
  15691. const upperWordBaseMediaDecodeTime = Math.floor(baseMediaDecodeTime / (UINT32_MAX + 1));
  15692. const lowerWordBaseMediaDecodeTime = Math.floor(baseMediaDecodeTime % (UINT32_MAX + 1));
  15693. return MP4.box(MP4.types.traf, MP4.box(MP4.types.tfhd, new Uint8Array([0x00,
  15694. // version 0
  15695. 0x00, 0x00, 0x00,
  15696. // flags
  15697. id >> 24, id >> 16 & 0xff, id >> 8 & 0xff, id & 0xff // track_ID
  15698. ])), MP4.box(MP4.types.tfdt, new Uint8Array([0x01,
  15699. // version 1
  15700. 0x00, 0x00, 0x00,
  15701. // flags
  15702. upperWordBaseMediaDecodeTime >> 24, upperWordBaseMediaDecodeTime >> 16 & 0xff, upperWordBaseMediaDecodeTime >> 8 & 0xff, upperWordBaseMediaDecodeTime & 0xff, lowerWordBaseMediaDecodeTime >> 24, lowerWordBaseMediaDecodeTime >> 16 & 0xff, lowerWordBaseMediaDecodeTime >> 8 & 0xff, lowerWordBaseMediaDecodeTime & 0xff])), MP4.trun(track, sampleDependencyTable.length + 16 +
  15703. // tfhd
  15704. 20 +
  15705. // tfdt
  15706. 8 +
  15707. // traf header
  15708. 16 +
  15709. // mfhd
  15710. 8 +
  15711. // moof header
  15712. 8),
  15713. // mdat header
  15714. sampleDependencyTable);
  15715. }
  15716. /**
  15717. * Generate a track box.
  15718. * @param track a track definition
  15719. */
  15720. static trak(track) {
  15721. track.duration = track.duration || 0xffffffff;
  15722. return MP4.box(MP4.types.trak, MP4.tkhd(track), MP4.mdia(track));
  15723. }
  15724. static trex(track) {
  15725. const id = track.id;
  15726. return MP4.box(MP4.types.trex, new Uint8Array([0x00,
  15727. // version 0
  15728. 0x00, 0x00, 0x00,
  15729. // flags
  15730. id >> 24, id >> 16 & 0xff, id >> 8 & 0xff, id & 0xff,
  15731. // track_ID
  15732. 0x00, 0x00, 0x00, 0x01,
  15733. // default_sample_description_index
  15734. 0x00, 0x00, 0x00, 0x00,
  15735. // default_sample_duration
  15736. 0x00, 0x00, 0x00, 0x00,
  15737. // default_sample_size
  15738. 0x00, 0x01, 0x00, 0x01 // default_sample_flags
  15739. ]));
  15740. }
  15741. static trun(track, offset) {
  15742. const samples = track.samples || [];
  15743. const len = samples.length;
  15744. const arraylen = 12 + 16 * len;
  15745. const array = new Uint8Array(arraylen);
  15746. let i;
  15747. let sample;
  15748. let duration;
  15749. let size;
  15750. let flags;
  15751. let cts;
  15752. offset += 8 + arraylen;
  15753. array.set([track.type === 'video' ? 0x01 : 0x00,
  15754. // version 1 for video with signed-int sample_composition_time_offset
  15755. 0x00, 0x0f, 0x01,
  15756. // flags
  15757. len >>> 24 & 0xff, len >>> 16 & 0xff, len >>> 8 & 0xff, len & 0xff,
  15758. // sample_count
  15759. offset >>> 24 & 0xff, offset >>> 16 & 0xff, offset >>> 8 & 0xff, offset & 0xff // data_offset
  15760. ], 0);
  15761. for (i = 0; i < len; i++) {
  15762. sample = samples[i];
  15763. duration = sample.duration;
  15764. size = sample.size;
  15765. flags = sample.flags;
  15766. cts = sample.cts;
  15767. array.set([duration >>> 24 & 0xff, duration >>> 16 & 0xff, duration >>> 8 & 0xff, duration & 0xff,
  15768. // sample_duration
  15769. size >>> 24 & 0xff, size >>> 16 & 0xff, size >>> 8 & 0xff, size & 0xff,
  15770. // sample_size
  15771. flags.isLeading << 2 | flags.dependsOn, flags.isDependedOn << 6 | flags.hasRedundancy << 4 | flags.paddingValue << 1 | flags.isNonSync, flags.degradPrio & 0xf0 << 8, flags.degradPrio & 0x0f,
  15772. // sample_flags
  15773. cts >>> 24 & 0xff, cts >>> 16 & 0xff, cts >>> 8 & 0xff, cts & 0xff // sample_composition_time_offset
  15774. ], 12 + 16 * i);
  15775. }
  15776. return MP4.box(MP4.types.trun, array);
  15777. }
  15778. static initSegment(tracks) {
  15779. if (!MP4.types) {
  15780. MP4.init();
  15781. }
  15782. const movie = MP4.moov(tracks);
  15783. const result = appendUint8Array(MP4.FTYP, movie);
  15784. return result;
  15785. }
  15786. }
  15787. MP4.types = void 0;
  15788. MP4.HDLR_TYPES = void 0;
  15789. MP4.STTS = void 0;
  15790. MP4.STSC = void 0;
  15791. MP4.STCO = void 0;
  15792. MP4.STSZ = void 0;
  15793. MP4.VMHD = void 0;
  15794. MP4.SMHD = void 0;
  15795. MP4.STSD = void 0;
  15796. MP4.FTYP = void 0;
  15797. MP4.DINF = void 0;
  15798. const MPEG_TS_CLOCK_FREQ_HZ = 90000;
  15799. function toTimescaleFromBase(baseTime, destScale, srcBase = 1, round = false) {
  15800. const result = baseTime * destScale * srcBase; // equivalent to `(value * scale) / (1 / base)`
  15801. return round ? Math.round(result) : result;
  15802. }
  15803. function toMsFromMpegTsClock(baseTime, round = false) {
  15804. return toTimescaleFromBase(baseTime, 1000, 1 / MPEG_TS_CLOCK_FREQ_HZ, round);
  15805. }
  15806. const MAX_SILENT_FRAME_DURATION = 10 * 1000; // 10 seconds
  15807. const AAC_SAMPLES_PER_FRAME = 1024;
  15808. const MPEG_AUDIO_SAMPLE_PER_FRAME = 1152;
  15809. const AC3_SAMPLES_PER_FRAME = 1536;
  15810. let chromeVersion = null;
  15811. let safariWebkitVersion = null;
  15812. class MP4Remuxer {
  15813. constructor(observer, config, typeSupported, vendor = '') {
  15814. this.observer = void 0;
  15815. this.config = void 0;
  15816. this.typeSupported = void 0;
  15817. this.ISGenerated = false;
  15818. this._initPTS = null;
  15819. this._initDTS = null;
  15820. this.nextAvcDts = null;
  15821. this.nextAudioPts = null;
  15822. this.videoSampleDuration = null;
  15823. this.isAudioContiguous = false;
  15824. this.isVideoContiguous = false;
  15825. this.videoTrackConfig = void 0;
  15826. this.observer = observer;
  15827. this.config = config;
  15828. this.typeSupported = typeSupported;
  15829. this.ISGenerated = false;
  15830. if (chromeVersion === null) {
  15831. const userAgent = navigator.userAgent || '';
  15832. const result = userAgent.match(/Chrome\/(\d+)/i);
  15833. chromeVersion = result ? parseInt(result[1]) : 0;
  15834. }
  15835. if (safariWebkitVersion === null) {
  15836. const result = navigator.userAgent.match(/Safari\/(\d+)/i);
  15837. safariWebkitVersion = result ? parseInt(result[1]) : 0;
  15838. }
  15839. }
  15840. destroy() {
  15841. // @ts-ignore
  15842. this.config = this.videoTrackConfig = this._initPTS = this._initDTS = null;
  15843. }
  15844. resetTimeStamp(defaultTimeStamp) {
  15845. logger.log('[mp4-remuxer]: initPTS & initDTS reset');
  15846. this._initPTS = this._initDTS = defaultTimeStamp;
  15847. }
  15848. resetNextTimestamp() {
  15849. logger.log('[mp4-remuxer]: reset next timestamp');
  15850. this.isVideoContiguous = false;
  15851. this.isAudioContiguous = false;
  15852. }
  15853. resetInitSegment() {
  15854. logger.log('[mp4-remuxer]: ISGenerated flag reset');
  15855. this.ISGenerated = false;
  15856. this.videoTrackConfig = undefined;
  15857. }
  15858. getVideoStartPts(videoSamples) {
  15859. let rolloverDetected = false;
  15860. const startPTS = videoSamples.reduce((minPTS, sample) => {
  15861. const delta = sample.pts - minPTS;
  15862. if (delta < -4294967296) {
  15863. // 2^32, see PTSNormalize for reasoning, but we're hitting a rollover here, and we don't want that to impact the timeOffset calculation
  15864. rolloverDetected = true;
  15865. return normalizePts(minPTS, sample.pts);
  15866. } else if (delta > 0) {
  15867. return minPTS;
  15868. } else {
  15869. return sample.pts;
  15870. }
  15871. }, videoSamples[0].pts);
  15872. if (rolloverDetected) {
  15873. logger.debug('PTS rollover detected');
  15874. }
  15875. return startPTS;
  15876. }
  15877. remux(audioTrack, videoTrack, id3Track, textTrack, timeOffset, accurateTimeOffset, flush, playlistType) {
  15878. let video;
  15879. let audio;
  15880. let initSegment;
  15881. let text;
  15882. let id3;
  15883. let independent;
  15884. let audioTimeOffset = timeOffset;
  15885. let videoTimeOffset = timeOffset;
  15886. // If we're remuxing audio and video progressively, wait until we've received enough samples for each track before proceeding.
  15887. // This is done to synchronize the audio and video streams. We know if the current segment will have samples if the "pid"
  15888. // parameter is greater than -1. The pid is set when the PMT is parsed, which contains the tracks list.
  15889. // However, if the initSegment has already been generated, or we've reached the end of a segment (flush),
  15890. // then we can remux one track without waiting for the other.
  15891. const hasAudio = audioTrack.pid > -1;
  15892. const hasVideo = videoTrack.pid > -1;
  15893. const length = videoTrack.samples.length;
  15894. const enoughAudioSamples = audioTrack.samples.length > 0;
  15895. const enoughVideoSamples = flush && length > 0 || length > 1;
  15896. const canRemuxAvc = (!hasAudio || enoughAudioSamples) && (!hasVideo || enoughVideoSamples) || this.ISGenerated || flush;
  15897. if (canRemuxAvc) {
  15898. if (this.ISGenerated) {
  15899. var _videoTrack$pixelRati, _config$pixelRatio, _videoTrack$pixelRati2, _config$pixelRatio2;
  15900. const config = this.videoTrackConfig;
  15901. if (config && (videoTrack.width !== config.width || videoTrack.height !== config.height || ((_videoTrack$pixelRati = videoTrack.pixelRatio) == null ? void 0 : _videoTrack$pixelRati[0]) !== ((_config$pixelRatio = config.pixelRatio) == null ? void 0 : _config$pixelRatio[0]) || ((_videoTrack$pixelRati2 = videoTrack.pixelRatio) == null ? void 0 : _videoTrack$pixelRati2[1]) !== ((_config$pixelRatio2 = config.pixelRatio) == null ? void 0 : _config$pixelRatio2[1]))) {
  15902. this.resetInitSegment();
  15903. }
  15904. } else {
  15905. initSegment = this.generateIS(audioTrack, videoTrack, timeOffset, accurateTimeOffset);
  15906. }
  15907. const isVideoContiguous = this.isVideoContiguous;
  15908. let firstKeyFrameIndex = -1;
  15909. let firstKeyFramePTS;
  15910. if (enoughVideoSamples) {
  15911. firstKeyFrameIndex = findKeyframeIndex(videoTrack.samples);
  15912. if (!isVideoContiguous && this.config.forceKeyFrameOnDiscontinuity) {
  15913. independent = true;
  15914. if (firstKeyFrameIndex > 0) {
  15915. logger.warn(`[mp4-remuxer]: Dropped ${firstKeyFrameIndex} out of ${length} video samples due to a missing keyframe`);
  15916. const startPTS = this.getVideoStartPts(videoTrack.samples);
  15917. videoTrack.samples = videoTrack.samples.slice(firstKeyFrameIndex);
  15918. videoTrack.dropped += firstKeyFrameIndex;
  15919. videoTimeOffset += (videoTrack.samples[0].pts - startPTS) / videoTrack.inputTimeScale;
  15920. firstKeyFramePTS = videoTimeOffset;
  15921. } else if (firstKeyFrameIndex === -1) {
  15922. logger.warn(`[mp4-remuxer]: No keyframe found out of ${length} video samples`);
  15923. independent = false;
  15924. }
  15925. }
  15926. }
  15927. if (this.ISGenerated) {
  15928. if (enoughAudioSamples && enoughVideoSamples) {
  15929. // timeOffset is expected to be the offset of the first timestamp of this fragment (first DTS)
  15930. // if first audio DTS is not aligned with first video DTS then we need to take that into account
  15931. // when providing timeOffset to remuxAudio / remuxVideo. if we don't do that, there might be a permanent / small
  15932. // drift between audio and video streams
  15933. const startPTS = this.getVideoStartPts(videoTrack.samples);
  15934. const tsDelta = normalizePts(audioTrack.samples[0].pts, startPTS) - startPTS;
  15935. const audiovideoTimestampDelta = tsDelta / videoTrack.inputTimeScale;
  15936. audioTimeOffset += Math.max(0, audiovideoTimestampDelta);
  15937. videoTimeOffset += Math.max(0, -audiovideoTimestampDelta);
  15938. }
  15939. // Purposefully remuxing audio before video, so that remuxVideo can use nextAudioPts, which is calculated in remuxAudio.
  15940. if (enoughAudioSamples) {
  15941. // if initSegment was generated without audio samples, regenerate it again
  15942. if (!audioTrack.samplerate) {
  15943. logger.warn('[mp4-remuxer]: regenerate InitSegment as audio detected');
  15944. initSegment = this.generateIS(audioTrack, videoTrack, timeOffset, accurateTimeOffset);
  15945. }
  15946. audio = this.remuxAudio(audioTrack, audioTimeOffset, this.isAudioContiguous, accurateTimeOffset, hasVideo || enoughVideoSamples || playlistType === PlaylistLevelType.AUDIO ? videoTimeOffset : undefined);
  15947. if (enoughVideoSamples) {
  15948. const audioTrackLength = audio ? audio.endPTS - audio.startPTS : 0;
  15949. // if initSegment was generated without video samples, regenerate it again
  15950. if (!videoTrack.inputTimeScale) {
  15951. logger.warn('[mp4-remuxer]: regenerate InitSegment as video detected');
  15952. initSegment = this.generateIS(audioTrack, videoTrack, timeOffset, accurateTimeOffset);
  15953. }
  15954. video = this.remuxVideo(videoTrack, videoTimeOffset, isVideoContiguous, audioTrackLength);
  15955. }
  15956. } else if (enoughVideoSamples) {
  15957. video = this.remuxVideo(videoTrack, videoTimeOffset, isVideoContiguous, 0);
  15958. }
  15959. if (video) {
  15960. video.firstKeyFrame = firstKeyFrameIndex;
  15961. video.independent = firstKeyFrameIndex !== -1;
  15962. video.firstKeyFramePTS = firstKeyFramePTS;
  15963. }
  15964. }
  15965. }
  15966. // Allow ID3 and text to remux, even if more audio/video samples are required
  15967. if (this.ISGenerated && this._initPTS && this._initDTS) {
  15968. if (id3Track.samples.length) {
  15969. id3 = flushTextTrackMetadataCueSamples(id3Track, timeOffset, this._initPTS, this._initDTS);
  15970. }
  15971. if (textTrack.samples.length) {
  15972. text = flushTextTrackUserdataCueSamples(textTrack, timeOffset, this._initPTS);
  15973. }
  15974. }
  15975. return {
  15976. audio,
  15977. video,
  15978. initSegment,
  15979. independent,
  15980. text,
  15981. id3
  15982. };
  15983. }
  15984. generateIS(audioTrack, videoTrack, timeOffset, accurateTimeOffset) {
  15985. const audioSamples = audioTrack.samples;
  15986. const videoSamples = videoTrack.samples;
  15987. const typeSupported = this.typeSupported;
  15988. const tracks = {};
  15989. const _initPTS = this._initPTS;
  15990. let computePTSDTS = !_initPTS || accurateTimeOffset;
  15991. let container = 'audio/mp4';
  15992. let initPTS;
  15993. let initDTS;
  15994. let timescale;
  15995. if (computePTSDTS) {
  15996. initPTS = initDTS = Infinity;
  15997. }
  15998. if (audioTrack.config && audioSamples.length) {
  15999. // let's use audio sampling rate as MP4 time scale.
  16000. // rationale is that there is a integer nb of audio frames per audio sample (1024 for AAC)
  16001. // using audio sampling rate here helps having an integer MP4 frame duration
  16002. // this avoids potential rounding issue and AV sync issue
  16003. audioTrack.timescale = audioTrack.samplerate;
  16004. switch (audioTrack.segmentCodec) {
  16005. case 'mp3':
  16006. if (typeSupported.mpeg) {
  16007. // Chrome and Safari
  16008. container = 'audio/mpeg';
  16009. audioTrack.codec = '';
  16010. } else if (typeSupported.mp3) {
  16011. // Firefox
  16012. audioTrack.codec = 'mp3';
  16013. }
  16014. break;
  16015. case 'ac3':
  16016. audioTrack.codec = 'ac-3';
  16017. break;
  16018. }
  16019. tracks.audio = {
  16020. id: 'audio',
  16021. container: container,
  16022. codec: audioTrack.codec,
  16023. initSegment: audioTrack.segmentCodec === 'mp3' && typeSupported.mpeg ? new Uint8Array(0) : MP4.initSegment([audioTrack]),
  16024. metadata: {
  16025. channelCount: audioTrack.channelCount
  16026. }
  16027. };
  16028. if (computePTSDTS) {
  16029. timescale = audioTrack.inputTimeScale;
  16030. if (!_initPTS || timescale !== _initPTS.timescale) {
  16031. // remember first PTS of this demuxing context. for audio, PTS = DTS
  16032. initPTS = initDTS = audioSamples[0].pts - Math.round(timescale * timeOffset);
  16033. } else {
  16034. computePTSDTS = false;
  16035. }
  16036. }
  16037. }
  16038. if (videoTrack.sps && videoTrack.pps && videoSamples.length) {
  16039. // let's use input time scale as MP4 video timescale
  16040. // we use input time scale straight away to avoid rounding issues on frame duration / cts computation
  16041. videoTrack.timescale = videoTrack.inputTimeScale;
  16042. tracks.video = {
  16043. id: 'main',
  16044. container: 'video/mp4',
  16045. codec: videoTrack.codec,
  16046. initSegment: MP4.initSegment([videoTrack]),
  16047. metadata: {
  16048. width: videoTrack.width,
  16049. height: videoTrack.height
  16050. }
  16051. };
  16052. if (computePTSDTS) {
  16053. timescale = videoTrack.inputTimeScale;
  16054. if (!_initPTS || timescale !== _initPTS.timescale) {
  16055. const startPTS = this.getVideoStartPts(videoSamples);
  16056. const startOffset = Math.round(timescale * timeOffset);
  16057. initDTS = Math.min(initDTS, normalizePts(videoSamples[0].dts, startPTS) - startOffset);
  16058. initPTS = Math.min(initPTS, startPTS - startOffset);
  16059. } else {
  16060. computePTSDTS = false;
  16061. }
  16062. }
  16063. this.videoTrackConfig = {
  16064. width: videoTrack.width,
  16065. height: videoTrack.height,
  16066. pixelRatio: videoTrack.pixelRatio
  16067. };
  16068. }
  16069. if (Object.keys(tracks).length) {
  16070. this.ISGenerated = true;
  16071. if (computePTSDTS) {
  16072. this._initPTS = {
  16073. baseTime: initPTS,
  16074. timescale: timescale
  16075. };
  16076. this._initDTS = {
  16077. baseTime: initDTS,
  16078. timescale: timescale
  16079. };
  16080. } else {
  16081. initPTS = timescale = undefined;
  16082. }
  16083. return {
  16084. tracks,
  16085. initPTS,
  16086. timescale
  16087. };
  16088. }
  16089. }
  16090. remuxVideo(track, timeOffset, contiguous, audioTrackLength) {
  16091. const timeScale = track.inputTimeScale;
  16092. const inputSamples = track.samples;
  16093. const outputSamples = [];
  16094. const nbSamples = inputSamples.length;
  16095. const initPTS = this._initPTS;
  16096. let nextAvcDts = this.nextAvcDts;
  16097. let offset = 8;
  16098. let mp4SampleDuration = this.videoSampleDuration;
  16099. let firstDTS;
  16100. let lastDTS;
  16101. let minPTS = Number.POSITIVE_INFINITY;
  16102. let maxPTS = Number.NEGATIVE_INFINITY;
  16103. let sortSamples = false;
  16104. // if parsed fragment is contiguous with last one, let's use last DTS value as reference
  16105. if (!contiguous || nextAvcDts === null) {
  16106. const pts = timeOffset * timeScale;
  16107. const cts = inputSamples[0].pts - normalizePts(inputSamples[0].dts, inputSamples[0].pts);
  16108. if (chromeVersion && nextAvcDts !== null && Math.abs(pts - cts - nextAvcDts) < 15000) {
  16109. // treat as contigous to adjust samples that would otherwise produce video buffer gaps in Chrome
  16110. contiguous = true;
  16111. } else {
  16112. // if not contiguous, let's use target timeOffset
  16113. nextAvcDts = pts - cts;
  16114. }
  16115. }
  16116. // PTS is coded on 33bits, and can loop from -2^32 to 2^32
  16117. // PTSNormalize will make PTS/DTS value monotonic, we use last known DTS value as reference value
  16118. const initTime = initPTS.baseTime * timeScale / initPTS.timescale;
  16119. for (let i = 0; i < nbSamples; i++) {
  16120. const sample = inputSamples[i];
  16121. sample.pts = normalizePts(sample.pts - initTime, nextAvcDts);
  16122. sample.dts = normalizePts(sample.dts - initTime, nextAvcDts);
  16123. if (sample.dts < inputSamples[i > 0 ? i - 1 : i].dts) {
  16124. sortSamples = true;
  16125. }
  16126. }
  16127. // sort video samples by DTS then PTS then demux id order
  16128. if (sortSamples) {
  16129. inputSamples.sort(function (a, b) {
  16130. const deltadts = a.dts - b.dts;
  16131. const deltapts = a.pts - b.pts;
  16132. return deltadts || deltapts;
  16133. });
  16134. }
  16135. // Get first/last DTS
  16136. firstDTS = inputSamples[0].dts;
  16137. lastDTS = inputSamples[inputSamples.length - 1].dts;
  16138. // Sample duration (as expected by trun MP4 boxes), should be the delta between sample DTS
  16139. // set this constant duration as being the avg delta between consecutive DTS.
  16140. const inputDuration = lastDTS - firstDTS;
  16141. const averageSampleDuration = inputDuration ? Math.round(inputDuration / (nbSamples - 1)) : mp4SampleDuration || track.inputTimeScale / 30;
  16142. // if fragment are contiguous, detect hole/overlapping between fragments
  16143. if (contiguous) {
  16144. // check timestamp continuity across consecutive fragments (this is to remove inter-fragment gap/hole)
  16145. const delta = firstDTS - nextAvcDts;
  16146. const foundHole = delta > averageSampleDuration;
  16147. const foundOverlap = delta < -1;
  16148. if (foundHole || foundOverlap) {
  16149. if (foundHole) {
  16150. logger.warn(`AVC: ${toMsFromMpegTsClock(delta, true)} ms (${delta}dts) hole between fragments detected at ${timeOffset.toFixed(3)}`);
  16151. } else {
  16152. logger.warn(`AVC: ${toMsFromMpegTsClock(-delta, true)} ms (${delta}dts) overlapping between fragments detected at ${timeOffset.toFixed(3)}`);
  16153. }
  16154. if (!foundOverlap || nextAvcDts >= inputSamples[0].pts || chromeVersion) {
  16155. firstDTS = nextAvcDts;
  16156. const firstPTS = inputSamples[0].pts - delta;
  16157. if (foundHole) {
  16158. inputSamples[0].dts = firstDTS;
  16159. inputSamples[0].pts = firstPTS;
  16160. } else {
  16161. for (let i = 0; i < inputSamples.length; i++) {
  16162. if (inputSamples[i].dts > firstPTS) {
  16163. break;
  16164. }
  16165. inputSamples[i].dts -= delta;
  16166. inputSamples[i].pts -= delta;
  16167. }
  16168. }
  16169. logger.log(`Video: Initial PTS/DTS adjusted: ${toMsFromMpegTsClock(firstPTS, true)}/${toMsFromMpegTsClock(firstDTS, true)}, delta: ${toMsFromMpegTsClock(delta, true)} ms`);
  16170. }
  16171. }
  16172. }
  16173. firstDTS = Math.max(0, firstDTS);
  16174. let nbNalu = 0;
  16175. let naluLen = 0;
  16176. let dtsStep = firstDTS;
  16177. for (let i = 0; i < nbSamples; i++) {
  16178. // compute total/avc sample length and nb of NAL units
  16179. const sample = inputSamples[i];
  16180. const units = sample.units;
  16181. const nbUnits = units.length;
  16182. let sampleLen = 0;
  16183. for (let j = 0; j < nbUnits; j++) {
  16184. sampleLen += units[j].data.length;
  16185. }
  16186. naluLen += sampleLen;
  16187. nbNalu += nbUnits;
  16188. sample.length = sampleLen;
  16189. // ensure sample monotonic DTS
  16190. if (sample.dts < dtsStep) {
  16191. sample.dts = dtsStep;
  16192. dtsStep += averageSampleDuration / 4 | 0 || 1;
  16193. } else {
  16194. dtsStep = sample.dts;
  16195. }
  16196. minPTS = Math.min(sample.pts, minPTS);
  16197. maxPTS = Math.max(sample.pts, maxPTS);
  16198. }
  16199. lastDTS = inputSamples[nbSamples - 1].dts;
  16200. /* concatenate the video data and construct the mdat in place
  16201. (need 8 more bytes to fill length and mpdat type) */
  16202. const mdatSize = naluLen + 4 * nbNalu + 8;
  16203. let mdat;
  16204. try {
  16205. mdat = new Uint8Array(mdatSize);
  16206. } catch (err) {
  16207. this.observer.emit(Events.ERROR, Events.ERROR, {
  16208. type: ErrorTypes.MUX_ERROR,
  16209. details: ErrorDetails.REMUX_ALLOC_ERROR,
  16210. fatal: false,
  16211. error: err,
  16212. bytes: mdatSize,
  16213. reason: `fail allocating video mdat ${mdatSize}`
  16214. });
  16215. return;
  16216. }
  16217. const view = new DataView(mdat.buffer);
  16218. view.setUint32(0, mdatSize);
  16219. mdat.set(MP4.types.mdat, 4);
  16220. let stretchedLastFrame = false;
  16221. let minDtsDelta = Number.POSITIVE_INFINITY;
  16222. let minPtsDelta = Number.POSITIVE_INFINITY;
  16223. let maxDtsDelta = Number.NEGATIVE_INFINITY;
  16224. let maxPtsDelta = Number.NEGATIVE_INFINITY;
  16225. for (let i = 0; i < nbSamples; i++) {
  16226. const VideoSample = inputSamples[i];
  16227. const VideoSampleUnits = VideoSample.units;
  16228. let mp4SampleLength = 0;
  16229. // convert NALU bitstream to MP4 format (prepend NALU with size field)
  16230. for (let j = 0, nbUnits = VideoSampleUnits.length; j < nbUnits; j++) {
  16231. const unit = VideoSampleUnits[j];
  16232. const unitData = unit.data;
  16233. const unitDataLen = unit.data.byteLength;
  16234. view.setUint32(offset, unitDataLen);
  16235. offset += 4;
  16236. mdat.set(unitData, offset);
  16237. offset += unitDataLen;
  16238. mp4SampleLength += 4 + unitDataLen;
  16239. }
  16240. // expected sample duration is the Decoding Timestamp diff of consecutive samples
  16241. let ptsDelta;
  16242. if (i < nbSamples - 1) {
  16243. mp4SampleDuration = inputSamples[i + 1].dts - VideoSample.dts;
  16244. ptsDelta = inputSamples[i + 1].pts - VideoSample.pts;
  16245. } else {
  16246. const config = this.config;
  16247. const lastFrameDuration = i > 0 ? VideoSample.dts - inputSamples[i - 1].dts : averageSampleDuration;
  16248. ptsDelta = i > 0 ? VideoSample.pts - inputSamples[i - 1].pts : averageSampleDuration;
  16249. if (config.stretchShortVideoTrack && this.nextAudioPts !== null) {
  16250. // In some cases, a segment's audio track duration may exceed the video track duration.
  16251. // Since we've already remuxed audio, and we know how long the audio track is, we look to
  16252. // see if the delta to the next segment is longer than maxBufferHole.
  16253. // If so, playback would potentially get stuck, so we artificially inflate
  16254. // the duration of the last frame to minimize any potential gap between segments.
  16255. const gapTolerance = Math.floor(config.maxBufferHole * timeScale);
  16256. const deltaToFrameEnd = (audioTrackLength ? minPTS + audioTrackLength * timeScale : this.nextAudioPts) - VideoSample.pts;
  16257. if (deltaToFrameEnd > gapTolerance) {
  16258. // We subtract lastFrameDuration from deltaToFrameEnd to try to prevent any video
  16259. // frame overlap. maxBufferHole should be >> lastFrameDuration anyway.
  16260. mp4SampleDuration = deltaToFrameEnd - lastFrameDuration;
  16261. if (mp4SampleDuration < 0) {
  16262. mp4SampleDuration = lastFrameDuration;
  16263. } else {
  16264. stretchedLastFrame = true;
  16265. }
  16266. logger.log(`[mp4-remuxer]: It is approximately ${deltaToFrameEnd / 90} ms to the next segment; using duration ${mp4SampleDuration / 90} ms for the last video frame.`);
  16267. } else {
  16268. mp4SampleDuration = lastFrameDuration;
  16269. }
  16270. } else {
  16271. mp4SampleDuration = lastFrameDuration;
  16272. }
  16273. }
  16274. const compositionTimeOffset = Math.round(VideoSample.pts - VideoSample.dts);
  16275. minDtsDelta = Math.min(minDtsDelta, mp4SampleDuration);
  16276. maxDtsDelta = Math.max(maxDtsDelta, mp4SampleDuration);
  16277. minPtsDelta = Math.min(minPtsDelta, ptsDelta);
  16278. maxPtsDelta = Math.max(maxPtsDelta, ptsDelta);
  16279. outputSamples.push(new Mp4Sample(VideoSample.key, mp4SampleDuration, mp4SampleLength, compositionTimeOffset));
  16280. }
  16281. if (outputSamples.length) {
  16282. if (chromeVersion) {
  16283. if (chromeVersion < 70) {
  16284. // Chrome workaround, mark first sample as being a Random Access Point (keyframe) to avoid sourcebuffer append issue
  16285. // https://code.google.com/p/chromium/issues/detail?id=229412
  16286. const flags = outputSamples[0].flags;
  16287. flags.dependsOn = 2;
  16288. flags.isNonSync = 0;
  16289. }
  16290. } else if (safariWebkitVersion) {
  16291. // Fix for "CNN special report, with CC" in test-streams (Safari browser only)
  16292. // Ignore DTS when frame durations are irregular. Safari MSE does not handle this leading to gaps.
  16293. if (maxPtsDelta - minPtsDelta < maxDtsDelta - minDtsDelta && averageSampleDuration / maxDtsDelta < 0.025 && outputSamples[0].cts === 0) {
  16294. logger.warn('Found irregular gaps in sample duration. Using PTS instead of DTS to determine MP4 sample duration.');
  16295. let dts = firstDTS;
  16296. for (let i = 0, len = outputSamples.length; i < len; i++) {
  16297. const nextDts = dts + outputSamples[i].duration;
  16298. const pts = dts + outputSamples[i].cts;
  16299. if (i < len - 1) {
  16300. const nextPts = nextDts + outputSamples[i + 1].cts;
  16301. outputSamples[i].duration = nextPts - pts;
  16302. } else {
  16303. outputSamples[i].duration = i ? outputSamples[i - 1].duration : averageSampleDuration;
  16304. }
  16305. outputSamples[i].cts = 0;
  16306. dts = nextDts;
  16307. }
  16308. }
  16309. }
  16310. }
  16311. // next AVC sample DTS should be equal to last sample DTS + last sample duration (in PES timescale)
  16312. mp4SampleDuration = stretchedLastFrame || !mp4SampleDuration ? averageSampleDuration : mp4SampleDuration;
  16313. this.nextAvcDts = nextAvcDts = lastDTS + mp4SampleDuration;
  16314. this.videoSampleDuration = mp4SampleDuration;
  16315. this.isVideoContiguous = true;
  16316. const moof = MP4.moof(track.sequenceNumber++, firstDTS, _extends({}, track, {
  16317. samples: outputSamples
  16318. }));
  16319. const type = 'video';
  16320. const data = {
  16321. data1: moof,
  16322. data2: mdat,
  16323. startPTS: minPTS / timeScale,
  16324. endPTS: (maxPTS + mp4SampleDuration) / timeScale,
  16325. startDTS: firstDTS / timeScale,
  16326. endDTS: nextAvcDts / timeScale,
  16327. type,
  16328. hasAudio: false,
  16329. hasVideo: true,
  16330. nb: outputSamples.length,
  16331. dropped: track.dropped
  16332. };
  16333. track.samples = [];
  16334. track.dropped = 0;
  16335. return data;
  16336. }
  16337. getSamplesPerFrame(track) {
  16338. switch (track.segmentCodec) {
  16339. case 'mp3':
  16340. return MPEG_AUDIO_SAMPLE_PER_FRAME;
  16341. case 'ac3':
  16342. return AC3_SAMPLES_PER_FRAME;
  16343. default:
  16344. return AAC_SAMPLES_PER_FRAME;
  16345. }
  16346. }
  16347. remuxAudio(track, timeOffset, contiguous, accurateTimeOffset, videoTimeOffset) {
  16348. const inputTimeScale = track.inputTimeScale;
  16349. const mp4timeScale = track.samplerate ? track.samplerate : inputTimeScale;
  16350. const scaleFactor = inputTimeScale / mp4timeScale;
  16351. const mp4SampleDuration = this.getSamplesPerFrame(track);
  16352. const inputSampleDuration = mp4SampleDuration * scaleFactor;
  16353. const initPTS = this._initPTS;
  16354. const rawMPEG = track.segmentCodec === 'mp3' && this.typeSupported.mpeg;
  16355. const outputSamples = [];
  16356. const alignedWithVideo = videoTimeOffset !== undefined;
  16357. let inputSamples = track.samples;
  16358. let offset = rawMPEG ? 0 : 8;
  16359. let nextAudioPts = this.nextAudioPts || -1;
  16360. // window.audioSamples ? window.audioSamples.push(inputSamples.map(s => s.pts)) : (window.audioSamples = [inputSamples.map(s => s.pts)]);
  16361. // for audio samples, also consider consecutive fragments as being contiguous (even if a level switch occurs),
  16362. // for sake of clarity:
  16363. // consecutive fragments are frags with
  16364. // - less than 100ms gaps between new time offset (if accurate) and next expected PTS OR
  16365. // - less than 20 audio frames distance
  16366. // contiguous fragments are consecutive fragments from same quality level (same level, new SN = old SN + 1)
  16367. // this helps ensuring audio continuity
  16368. // and this also avoids audio glitches/cut when switching quality, or reporting wrong duration on first audio frame
  16369. const timeOffsetMpegTS = timeOffset * inputTimeScale;
  16370. const initTime = initPTS.baseTime * inputTimeScale / initPTS.timescale;
  16371. this.isAudioContiguous = contiguous = contiguous || inputSamples.length && nextAudioPts > 0 && (accurateTimeOffset && Math.abs(timeOffsetMpegTS - nextAudioPts) < 9000 || Math.abs(normalizePts(inputSamples[0].pts - initTime, timeOffsetMpegTS) - nextAudioPts) < 20 * inputSampleDuration);
  16372. // compute normalized PTS
  16373. inputSamples.forEach(function (sample) {
  16374. sample.pts = normalizePts(sample.pts - initTime, timeOffsetMpegTS);
  16375. });
  16376. if (!contiguous || nextAudioPts < 0) {
  16377. // filter out sample with negative PTS that are not playable anyway
  16378. // if we don't remove these negative samples, they will shift all audio samples forward.
  16379. // leading to audio overlap between current / next fragment
  16380. inputSamples = inputSamples.filter(sample => sample.pts >= 0);
  16381. // in case all samples have negative PTS, and have been filtered out, return now
  16382. if (!inputSamples.length) {
  16383. return;
  16384. }
  16385. if (videoTimeOffset === 0) {
  16386. // Set the start to 0 to match video so that start gaps larger than inputSampleDuration are filled with silence
  16387. nextAudioPts = 0;
  16388. } else if (accurateTimeOffset && !alignedWithVideo) {
  16389. // When not seeking, not live, and LevelDetails.PTSKnown, use fragment start as predicted next audio PTS
  16390. nextAudioPts = Math.max(0, timeOffsetMpegTS);
  16391. } else {
  16392. // if frags are not contiguous and if we cant trust time offset, let's use first sample PTS as next audio PTS
  16393. nextAudioPts = inputSamples[0].pts;
  16394. }
  16395. }
  16396. // If the audio track is missing samples, the frames seem to get "left-shifted" within the
  16397. // resulting mp4 segment, causing sync issues and leaving gaps at the end of the audio segment.
  16398. // In an effort to prevent this from happening, we inject frames here where there are gaps.
  16399. // When possible, we inject a silent frame; when that's not possible, we duplicate the last
  16400. // frame.
  16401. if (track.segmentCodec === 'aac') {
  16402. const maxAudioFramesDrift = this.config.maxAudioFramesDrift;
  16403. for (let i = 0, nextPts = nextAudioPts; i < inputSamples.length; i++) {
  16404. // First, let's see how far off this frame is from where we expect it to be
  16405. const sample = inputSamples[i];
  16406. const pts = sample.pts;
  16407. const delta = pts - nextPts;
  16408. const duration = Math.abs(1000 * delta / inputTimeScale);
  16409. // When remuxing with video, if we're overlapping by more than a duration, drop this sample to stay in sync
  16410. if (delta <= -maxAudioFramesDrift * inputSampleDuration && alignedWithVideo) {
  16411. if (i === 0) {
  16412. logger.warn(`Audio frame @ ${(pts / inputTimeScale).toFixed(3)}s overlaps nextAudioPts by ${Math.round(1000 * delta / inputTimeScale)} ms.`);
  16413. this.nextAudioPts = nextAudioPts = nextPts = pts;
  16414. }
  16415. } // eslint-disable-line brace-style
  16416. // Insert missing frames if:
  16417. // 1: We're more than maxAudioFramesDrift frame away
  16418. // 2: Not more than MAX_SILENT_FRAME_DURATION away
  16419. // 3: currentTime (aka nextPtsNorm) is not 0
  16420. // 4: remuxing with video (videoTimeOffset !== undefined)
  16421. else if (delta >= maxAudioFramesDrift * inputSampleDuration && duration < MAX_SILENT_FRAME_DURATION && alignedWithVideo) {
  16422. let missing = Math.round(delta / inputSampleDuration);
  16423. // Adjust nextPts so that silent samples are aligned with media pts. This will prevent media samples from
  16424. // later being shifted if nextPts is based on timeOffset and delta is not a multiple of inputSampleDuration.
  16425. nextPts = pts - missing * inputSampleDuration;
  16426. if (nextPts < 0) {
  16427. missing--;
  16428. nextPts += inputSampleDuration;
  16429. }
  16430. if (i === 0) {
  16431. this.nextAudioPts = nextAudioPts = nextPts;
  16432. }
  16433. logger.warn(`[mp4-remuxer]: Injecting ${missing} audio frame @ ${(nextPts / inputTimeScale).toFixed(3)}s due to ${Math.round(1000 * delta / inputTimeScale)} ms gap.`);
  16434. for (let j = 0; j < missing; j++) {
  16435. const newStamp = Math.max(nextPts, 0);
  16436. let fillFrame = AAC.getSilentFrame(track.manifestCodec || track.codec, track.channelCount);
  16437. if (!fillFrame) {
  16438. logger.log('[mp4-remuxer]: Unable to get silent frame for given audio codec; duplicating last frame instead.');
  16439. fillFrame = sample.unit.subarray();
  16440. }
  16441. inputSamples.splice(i, 0, {
  16442. unit: fillFrame,
  16443. pts: newStamp
  16444. });
  16445. nextPts += inputSampleDuration;
  16446. i++;
  16447. }
  16448. }
  16449. sample.pts = nextPts;
  16450. nextPts += inputSampleDuration;
  16451. }
  16452. }
  16453. let firstPTS = null;
  16454. let lastPTS = null;
  16455. let mdat;
  16456. let mdatSize = 0;
  16457. let sampleLength = inputSamples.length;
  16458. while (sampleLength--) {
  16459. mdatSize += inputSamples[sampleLength].unit.byteLength;
  16460. }
  16461. for (let j = 0, _nbSamples = inputSamples.length; j < _nbSamples; j++) {
  16462. const audioSample = inputSamples[j];
  16463. const unit = audioSample.unit;
  16464. let pts = audioSample.pts;
  16465. if (lastPTS !== null) {
  16466. // If we have more than one sample, set the duration of the sample to the "real" duration; the PTS diff with
  16467. // the previous sample
  16468. const prevSample = outputSamples[j - 1];
  16469. prevSample.duration = Math.round((pts - lastPTS) / scaleFactor);
  16470. } else {
  16471. if (contiguous && track.segmentCodec === 'aac') {
  16472. // set PTS/DTS to expected PTS/DTS
  16473. pts = nextAudioPts;
  16474. }
  16475. // remember first PTS of our audioSamples
  16476. firstPTS = pts;
  16477. if (mdatSize > 0) {
  16478. /* concatenate the audio data and construct the mdat in place
  16479. (need 8 more bytes to fill length and mdat type) */
  16480. mdatSize += offset;
  16481. try {
  16482. mdat = new Uint8Array(mdatSize);
  16483. } catch (err) {
  16484. this.observer.emit(Events.ERROR, Events.ERROR, {
  16485. type: ErrorTypes.MUX_ERROR,
  16486. details: ErrorDetails.REMUX_ALLOC_ERROR,
  16487. fatal: false,
  16488. error: err,
  16489. bytes: mdatSize,
  16490. reason: `fail allocating audio mdat ${mdatSize}`
  16491. });
  16492. return;
  16493. }
  16494. if (!rawMPEG) {
  16495. const view = new DataView(mdat.buffer);
  16496. view.setUint32(0, mdatSize);
  16497. mdat.set(MP4.types.mdat, 4);
  16498. }
  16499. } else {
  16500. // no audio samples
  16501. return;
  16502. }
  16503. }
  16504. mdat.set(unit, offset);
  16505. const unitLen = unit.byteLength;
  16506. offset += unitLen;
  16507. // Default the sample's duration to the computed mp4SampleDuration, which will either be 1024 for AAC or 1152 for MPEG
  16508. // In the case that we have 1 sample, this will be the duration. If we have more than one sample, the duration
  16509. // becomes the PTS diff with the previous sample
  16510. outputSamples.push(new Mp4Sample(true, mp4SampleDuration, unitLen, 0));
  16511. lastPTS = pts;
  16512. }
  16513. // We could end up with no audio samples if all input samples were overlapping with the previously remuxed ones
  16514. const nbSamples = outputSamples.length;
  16515. if (!nbSamples) {
  16516. return;
  16517. }
  16518. // The next audio sample PTS should be equal to last sample PTS + duration
  16519. const lastSample = outputSamples[outputSamples.length - 1];
  16520. this.nextAudioPts = nextAudioPts = lastPTS + scaleFactor * lastSample.duration;
  16521. // Set the track samples from inputSamples to outputSamples before remuxing
  16522. const moof = rawMPEG ? new Uint8Array(0) : MP4.moof(track.sequenceNumber++, firstPTS / scaleFactor, _extends({}, track, {
  16523. samples: outputSamples
  16524. }));
  16525. // Clear the track samples. This also clears the samples array in the demuxer, since the reference is shared
  16526. track.samples = [];
  16527. const start = firstPTS / inputTimeScale;
  16528. const end = nextAudioPts / inputTimeScale;
  16529. const type = 'audio';
  16530. const audioData = {
  16531. data1: moof,
  16532. data2: mdat,
  16533. startPTS: start,
  16534. endPTS: end,
  16535. startDTS: start,
  16536. endDTS: end,
  16537. type,
  16538. hasAudio: true,
  16539. hasVideo: false,
  16540. nb: nbSamples
  16541. };
  16542. this.isAudioContiguous = true;
  16543. return audioData;
  16544. }
  16545. remuxEmptyAudio(track, timeOffset, contiguous, videoData) {
  16546. const inputTimeScale = track.inputTimeScale;
  16547. const mp4timeScale = track.samplerate ? track.samplerate : inputTimeScale;
  16548. const scaleFactor = inputTimeScale / mp4timeScale;
  16549. const nextAudioPts = this.nextAudioPts;
  16550. // sync with video's timestamp
  16551. const initDTS = this._initDTS;
  16552. const init90kHz = initDTS.baseTime * 90000 / initDTS.timescale;
  16553. const startDTS = (nextAudioPts !== null ? nextAudioPts : videoData.startDTS * inputTimeScale) + init90kHz;
  16554. const endDTS = videoData.endDTS * inputTimeScale + init90kHz;
  16555. // one sample's duration value
  16556. const frameDuration = scaleFactor * AAC_SAMPLES_PER_FRAME;
  16557. // samples count of this segment's duration
  16558. const nbSamples = Math.ceil((endDTS - startDTS) / frameDuration);
  16559. // silent frame
  16560. const silentFrame = AAC.getSilentFrame(track.manifestCodec || track.codec, track.channelCount);
  16561. logger.warn('[mp4-remuxer]: remux empty Audio');
  16562. // Can't remux if we can't generate a silent frame...
  16563. if (!silentFrame) {
  16564. logger.trace('[mp4-remuxer]: Unable to remuxEmptyAudio since we were unable to get a silent frame for given audio codec');
  16565. return;
  16566. }
  16567. const samples = [];
  16568. for (let i = 0; i < nbSamples; i++) {
  16569. const stamp = startDTS + i * frameDuration;
  16570. samples.push({
  16571. unit: silentFrame,
  16572. pts: stamp,
  16573. dts: stamp
  16574. });
  16575. }
  16576. track.samples = samples;
  16577. return this.remuxAudio(track, timeOffset, contiguous, false);
  16578. }
  16579. }
  16580. function normalizePts(value, reference) {
  16581. let offset;
  16582. if (reference === null) {
  16583. return value;
  16584. }
  16585. if (reference < value) {
  16586. // - 2^33
  16587. offset = -8589934592;
  16588. } else {
  16589. // + 2^33
  16590. offset = 8589934592;
  16591. }
  16592. /* PTS is 33bit (from 0 to 2^33 -1)
  16593. if diff between value and reference is bigger than half of the amplitude (2^32) then it means that
  16594. PTS looping occured. fill the gap */
  16595. while (Math.abs(value - reference) > 4294967296) {
  16596. value += offset;
  16597. }
  16598. return value;
  16599. }
  16600. function findKeyframeIndex(samples) {
  16601. for (let i = 0; i < samples.length; i++) {
  16602. if (samples[i].key) {
  16603. return i;
  16604. }
  16605. }
  16606. return -1;
  16607. }
  16608. function flushTextTrackMetadataCueSamples(track, timeOffset, initPTS, initDTS) {
  16609. const length = track.samples.length;
  16610. if (!length) {
  16611. return;
  16612. }
  16613. const inputTimeScale = track.inputTimeScale;
  16614. for (let index = 0; index < length; index++) {
  16615. const sample = track.samples[index];
  16616. // setting id3 pts, dts to relative time
  16617. // using this._initPTS and this._initDTS to calculate relative time
  16618. sample.pts = normalizePts(sample.pts - initPTS.baseTime * inputTimeScale / initPTS.timescale, timeOffset * inputTimeScale) / inputTimeScale;
  16619. sample.dts = normalizePts(sample.dts - initDTS.baseTime * inputTimeScale / initDTS.timescale, timeOffset * inputTimeScale) / inputTimeScale;
  16620. }
  16621. const samples = track.samples;
  16622. track.samples = [];
  16623. return {
  16624. samples
  16625. };
  16626. }
  16627. function flushTextTrackUserdataCueSamples(track, timeOffset, initPTS) {
  16628. const length = track.samples.length;
  16629. if (!length) {
  16630. return;
  16631. }
  16632. const inputTimeScale = track.inputTimeScale;
  16633. for (let index = 0; index < length; index++) {
  16634. const sample = track.samples[index];
  16635. // setting text pts, dts to relative time
  16636. // using this._initPTS and this._initDTS to calculate relative time
  16637. sample.pts = normalizePts(sample.pts - initPTS.baseTime * inputTimeScale / initPTS.timescale, timeOffset * inputTimeScale) / inputTimeScale;
  16638. }
  16639. track.samples.sort((a, b) => a.pts - b.pts);
  16640. const samples = track.samples;
  16641. track.samples = [];
  16642. return {
  16643. samples
  16644. };
  16645. }
  16646. class Mp4Sample {
  16647. constructor(isKeyframe, duration, size, cts) {
  16648. this.size = void 0;
  16649. this.duration = void 0;
  16650. this.cts = void 0;
  16651. this.flags = void 0;
  16652. this.duration = duration;
  16653. this.size = size;
  16654. this.cts = cts;
  16655. this.flags = {
  16656. isLeading: 0,
  16657. isDependedOn: 0,
  16658. hasRedundancy: 0,
  16659. degradPrio: 0,
  16660. dependsOn: isKeyframe ? 2 : 1,
  16661. isNonSync: isKeyframe ? 0 : 1
  16662. };
  16663. }
  16664. }
  16665. class PassThroughRemuxer {
  16666. constructor() {
  16667. this.emitInitSegment = false;
  16668. this.audioCodec = void 0;
  16669. this.videoCodec = void 0;
  16670. this.initData = void 0;
  16671. this.initPTS = null;
  16672. this.initTracks = void 0;
  16673. this.lastEndTime = null;
  16674. }
  16675. destroy() {}
  16676. resetTimeStamp(defaultInitPTS) {
  16677. this.initPTS = defaultInitPTS;
  16678. this.lastEndTime = null;
  16679. }
  16680. resetNextTimestamp() {
  16681. this.lastEndTime = null;
  16682. }
  16683. resetInitSegment(initSegment, audioCodec, videoCodec, decryptdata) {
  16684. this.audioCodec = audioCodec;
  16685. this.videoCodec = videoCodec;
  16686. this.generateInitSegment(patchEncyptionData(initSegment, decryptdata));
  16687. this.emitInitSegment = true;
  16688. }
  16689. generateInitSegment(initSegment) {
  16690. let {
  16691. audioCodec,
  16692. videoCodec
  16693. } = this;
  16694. if (!(initSegment != null && initSegment.byteLength)) {
  16695. this.initTracks = undefined;
  16696. this.initData = undefined;
  16697. return;
  16698. }
  16699. const initData = this.initData = parseInitSegment(initSegment);
  16700. // Get codec from initSegment or fallback to default
  16701. if (initData.audio) {
  16702. audioCodec = getParsedTrackCodec(initData.audio, ElementaryStreamTypes.AUDIO);
  16703. }
  16704. if (initData.video) {
  16705. videoCodec = getParsedTrackCodec(initData.video, ElementaryStreamTypes.VIDEO);
  16706. }
  16707. const tracks = {};
  16708. if (initData.audio && initData.video) {
  16709. tracks.audiovideo = {
  16710. container: 'video/mp4',
  16711. codec: audioCodec + ',' + videoCodec,
  16712. initSegment,
  16713. id: 'main'
  16714. };
  16715. } else if (initData.audio) {
  16716. tracks.audio = {
  16717. container: 'audio/mp4',
  16718. codec: audioCodec,
  16719. initSegment,
  16720. id: 'audio'
  16721. };
  16722. } else if (initData.video) {
  16723. tracks.video = {
  16724. container: 'video/mp4',
  16725. codec: videoCodec,
  16726. initSegment,
  16727. id: 'main'
  16728. };
  16729. } else {
  16730. logger.warn('[passthrough-remuxer.ts]: initSegment does not contain moov or trak boxes.');
  16731. }
  16732. this.initTracks = tracks;
  16733. }
  16734. remux(audioTrack, videoTrack, id3Track, textTrack, timeOffset, accurateTimeOffset) {
  16735. var _initData, _initData2;
  16736. let {
  16737. initPTS,
  16738. lastEndTime
  16739. } = this;
  16740. const result = {
  16741. audio: undefined,
  16742. video: undefined,
  16743. text: textTrack,
  16744. id3: id3Track,
  16745. initSegment: undefined
  16746. };
  16747. // If we haven't yet set a lastEndDTS, or it was reset, set it to the provided timeOffset. We want to use the
  16748. // lastEndDTS over timeOffset whenever possible; during progressive playback, the media source will not update
  16749. // the media duration (which is what timeOffset is provided as) before we need to process the next chunk.
  16750. if (!isFiniteNumber(lastEndTime)) {
  16751. lastEndTime = this.lastEndTime = timeOffset || 0;
  16752. }
  16753. // The binary segment data is added to the videoTrack in the mp4demuxer. We don't check to see if the data is only
  16754. // audio or video (or both); adding it to video was an arbitrary choice.
  16755. const data = videoTrack.samples;
  16756. if (!(data != null && data.length)) {
  16757. return result;
  16758. }
  16759. const initSegment = {
  16760. initPTS: undefined,
  16761. timescale: 1
  16762. };
  16763. let initData = this.initData;
  16764. if (!((_initData = initData) != null && _initData.length)) {
  16765. this.generateInitSegment(data);
  16766. initData = this.initData;
  16767. }
  16768. if (!((_initData2 = initData) != null && _initData2.length)) {
  16769. // We can't remux if the initSegment could not be generated
  16770. logger.warn('[passthrough-remuxer.ts]: Failed to generate initSegment.');
  16771. return result;
  16772. }
  16773. if (this.emitInitSegment) {
  16774. initSegment.tracks = this.initTracks;
  16775. this.emitInitSegment = false;
  16776. }
  16777. const duration = getDuration(data, initData);
  16778. const startDTS = getStartDTS(initData, data);
  16779. const decodeTime = startDTS === null ? timeOffset : startDTS;
  16780. if (isInvalidInitPts(initPTS, decodeTime, timeOffset, duration) || initSegment.timescale !== initPTS.timescale && accurateTimeOffset) {
  16781. initSegment.initPTS = decodeTime - timeOffset;
  16782. if (initPTS && initPTS.timescale === 1) {
  16783. logger.warn(`Adjusting initPTS by ${initSegment.initPTS - initPTS.baseTime}`);
  16784. }
  16785. this.initPTS = initPTS = {
  16786. baseTime: initSegment.initPTS,
  16787. timescale: 1
  16788. };
  16789. }
  16790. const startTime = audioTrack ? decodeTime - initPTS.baseTime / initPTS.timescale : lastEndTime;
  16791. const endTime = startTime + duration;
  16792. offsetStartDTS(initData, data, initPTS.baseTime / initPTS.timescale);
  16793. if (duration > 0) {
  16794. this.lastEndTime = endTime;
  16795. } else {
  16796. logger.warn('Duration parsed from mp4 should be greater than zero');
  16797. this.resetNextTimestamp();
  16798. }
  16799. const hasAudio = !!initData.audio;
  16800. const hasVideo = !!initData.video;
  16801. let type = '';
  16802. if (hasAudio) {
  16803. type += 'audio';
  16804. }
  16805. if (hasVideo) {
  16806. type += 'video';
  16807. }
  16808. const track = {
  16809. data1: data,
  16810. startPTS: startTime,
  16811. startDTS: startTime,
  16812. endPTS: endTime,
  16813. endDTS: endTime,
  16814. type,
  16815. hasAudio,
  16816. hasVideo,
  16817. nb: 1,
  16818. dropped: 0
  16819. };
  16820. result.audio = track.type === 'audio' ? track : undefined;
  16821. result.video = track.type !== 'audio' ? track : undefined;
  16822. result.initSegment = initSegment;
  16823. result.id3 = flushTextTrackMetadataCueSamples(id3Track, timeOffset, initPTS, initPTS);
  16824. if (textTrack.samples.length) {
  16825. result.text = flushTextTrackUserdataCueSamples(textTrack, timeOffset, initPTS);
  16826. }
  16827. return result;
  16828. }
  16829. }
  16830. function isInvalidInitPts(initPTS, startDTS, timeOffset, duration) {
  16831. if (initPTS === null) {
  16832. return true;
  16833. }
  16834. // InitPTS is invalid when distance from program would be more than segment duration or a minimum of one second
  16835. const minDuration = Math.max(duration, 1);
  16836. const startTime = startDTS - initPTS.baseTime / initPTS.timescale;
  16837. return Math.abs(startTime - timeOffset) > minDuration;
  16838. }
  16839. function getParsedTrackCodec(track, type) {
  16840. const parsedCodec = track == null ? void 0 : track.codec;
  16841. if (parsedCodec && parsedCodec.length > 4) {
  16842. return parsedCodec;
  16843. }
  16844. if (type === ElementaryStreamTypes.AUDIO) {
  16845. if (parsedCodec === 'ec-3' || parsedCodec === 'ac-3' || parsedCodec === 'alac') {
  16846. return parsedCodec;
  16847. }
  16848. if (parsedCodec === 'fLaC' || parsedCodec === 'Opus') {
  16849. // Opting not to get `preferManagedMediaSource` from player config for isSupported() check for simplicity
  16850. const preferManagedMediaSource = false;
  16851. return getCodecCompatibleName(parsedCodec, preferManagedMediaSource);
  16852. }
  16853. const result = 'mp4a.40.5';
  16854. logger.info(`Parsed audio codec "${parsedCodec}" or audio object type not handled. Using "${result}"`);
  16855. return result;
  16856. }
  16857. // Provide defaults based on codec type
  16858. // This allows for some playback of some fmp4 playlists without CODECS defined in manifest
  16859. logger.warn(`Unhandled video codec "${parsedCodec}"`);
  16860. if (parsedCodec === 'hvc1' || parsedCodec === 'hev1') {
  16861. return 'hvc1.1.6.L120.90';
  16862. }
  16863. if (parsedCodec === 'av01') {
  16864. return 'av01.0.04M.08';
  16865. }
  16866. return 'avc1.42e01e';
  16867. }
  16868. /** returns `undefined` is `self` is missing, e.g. in node */
  16869. const optionalSelf = typeof self !== 'undefined' ? self : undefined;
  16870. let now;
  16871. // performance.now() not available on WebWorker, at least on Safari Desktop
  16872. try {
  16873. now = self.performance.now.bind(self.performance);
  16874. } catch (err) {
  16875. logger.debug('Unable to use Performance API on this environment');
  16876. now = optionalSelf == null ? void 0 : optionalSelf.Date.now;
  16877. }
  16878. const muxConfig = [{
  16879. demux: MP4Demuxer,
  16880. remux: PassThroughRemuxer
  16881. }, {
  16882. demux: TSDemuxer,
  16883. remux: MP4Remuxer
  16884. }, {
  16885. demux: AACDemuxer,
  16886. remux: MP4Remuxer
  16887. }, {
  16888. demux: MP3Demuxer,
  16889. remux: MP4Remuxer
  16890. }];
  16891. class Transmuxer {
  16892. constructor(observer, typeSupported, config, vendor, id) {
  16893. this.async = false;
  16894. this.observer = void 0;
  16895. this.typeSupported = void 0;
  16896. this.config = void 0;
  16897. this.vendor = void 0;
  16898. this.id = void 0;
  16899. this.demuxer = void 0;
  16900. this.remuxer = void 0;
  16901. this.decrypter = void 0;
  16902. this.probe = void 0;
  16903. this.decryptionPromise = null;
  16904. this.transmuxConfig = void 0;
  16905. this.currentTransmuxState = void 0;
  16906. this.observer = observer;
  16907. this.typeSupported = typeSupported;
  16908. this.config = config;
  16909. this.vendor = vendor;
  16910. this.id = id;
  16911. }
  16912. configure(transmuxConfig) {
  16913. this.transmuxConfig = transmuxConfig;
  16914. if (this.decrypter) {
  16915. this.decrypter.reset();
  16916. }
  16917. }
  16918. push(data, decryptdata, chunkMeta, state) {
  16919. const stats = chunkMeta.transmuxing;
  16920. stats.executeStart = now();
  16921. let uintData = new Uint8Array(data);
  16922. const {
  16923. currentTransmuxState,
  16924. transmuxConfig
  16925. } = this;
  16926. if (state) {
  16927. this.currentTransmuxState = state;
  16928. }
  16929. const {
  16930. contiguous,
  16931. discontinuity,
  16932. trackSwitch,
  16933. accurateTimeOffset,
  16934. timeOffset,
  16935. initSegmentChange
  16936. } = state || currentTransmuxState;
  16937. const {
  16938. audioCodec,
  16939. videoCodec,
  16940. defaultInitPts,
  16941. duration,
  16942. initSegmentData
  16943. } = transmuxConfig;
  16944. const keyData = getEncryptionType(uintData, decryptdata);
  16945. if (keyData && keyData.method === 'AES-128') {
  16946. const decrypter = this.getDecrypter();
  16947. // Software decryption is synchronous; webCrypto is not
  16948. if (decrypter.isSync()) {
  16949. // Software decryption is progressive. Progressive decryption may not return a result on each call. Any cached
  16950. // data is handled in the flush() call
  16951. let decryptedData = decrypter.softwareDecrypt(uintData, keyData.key.buffer, keyData.iv.buffer);
  16952. // For Low-Latency HLS Parts, decrypt in place, since part parsing is expected on push progress
  16953. const loadingParts = chunkMeta.part > -1;
  16954. if (loadingParts) {
  16955. decryptedData = decrypter.flush();
  16956. }
  16957. if (!decryptedData) {
  16958. stats.executeEnd = now();
  16959. return emptyResult(chunkMeta);
  16960. }
  16961. uintData = new Uint8Array(decryptedData);
  16962. } else {
  16963. this.decryptionPromise = decrypter.webCryptoDecrypt(uintData, keyData.key.buffer, keyData.iv.buffer).then(decryptedData => {
  16964. // Calling push here is important; if flush() is called while this is still resolving, this ensures that
  16965. // the decrypted data has been transmuxed
  16966. const result = this.push(decryptedData, null, chunkMeta);
  16967. this.decryptionPromise = null;
  16968. return result;
  16969. });
  16970. return this.decryptionPromise;
  16971. }
  16972. }
  16973. const resetMuxers = this.needsProbing(discontinuity, trackSwitch);
  16974. if (resetMuxers) {
  16975. const error = this.configureTransmuxer(uintData);
  16976. if (error) {
  16977. logger.warn(`[transmuxer] ${error.message}`);
  16978. this.observer.emit(Events.ERROR, Events.ERROR, {
  16979. type: ErrorTypes.MEDIA_ERROR,
  16980. details: ErrorDetails.FRAG_PARSING_ERROR,
  16981. fatal: false,
  16982. error,
  16983. reason: error.message
  16984. });
  16985. stats.executeEnd = now();
  16986. return emptyResult(chunkMeta);
  16987. }
  16988. }
  16989. if (discontinuity || trackSwitch || initSegmentChange || resetMuxers) {
  16990. this.resetInitSegment(initSegmentData, audioCodec, videoCodec, duration, decryptdata);
  16991. }
  16992. if (discontinuity || initSegmentChange || resetMuxers) {
  16993. this.resetInitialTimestamp(defaultInitPts);
  16994. }
  16995. if (!contiguous) {
  16996. this.resetContiguity();
  16997. }
  16998. const result = this.transmux(uintData, keyData, timeOffset, accurateTimeOffset, chunkMeta);
  16999. const currentState = this.currentTransmuxState;
  17000. currentState.contiguous = true;
  17001. currentState.discontinuity = false;
  17002. currentState.trackSwitch = false;
  17003. stats.executeEnd = now();
  17004. return result;
  17005. }
  17006. // Due to data caching, flush calls can produce more than one TransmuxerResult (hence the Array type)
  17007. flush(chunkMeta) {
  17008. const stats = chunkMeta.transmuxing;
  17009. stats.executeStart = now();
  17010. const {
  17011. decrypter,
  17012. currentTransmuxState,
  17013. decryptionPromise
  17014. } = this;
  17015. if (decryptionPromise) {
  17016. // Upon resolution, the decryption promise calls push() and returns its TransmuxerResult up the stack. Therefore
  17017. // only flushing is required for async decryption
  17018. return decryptionPromise.then(() => {
  17019. return this.flush(chunkMeta);
  17020. });
  17021. }
  17022. const transmuxResults = [];
  17023. const {
  17024. timeOffset
  17025. } = currentTransmuxState;
  17026. if (decrypter) {
  17027. // The decrypter may have data cached, which needs to be demuxed. In this case we'll have two TransmuxResults
  17028. // This happens in the case that we receive only 1 push call for a segment (either for non-progressive downloads,
  17029. // or for progressive downloads with small segments)
  17030. const decryptedData = decrypter.flush();
  17031. if (decryptedData) {
  17032. // Push always returns a TransmuxerResult if decryptdata is null
  17033. transmuxResults.push(this.push(decryptedData, null, chunkMeta));
  17034. }
  17035. }
  17036. const {
  17037. demuxer,
  17038. remuxer
  17039. } = this;
  17040. if (!demuxer || !remuxer) {
  17041. // If probing failed, then Hls.js has been given content its not able to handle
  17042. stats.executeEnd = now();
  17043. return [emptyResult(chunkMeta)];
  17044. }
  17045. const demuxResultOrPromise = demuxer.flush(timeOffset);
  17046. if (isPromise(demuxResultOrPromise)) {
  17047. // Decrypt final SAMPLE-AES samples
  17048. return demuxResultOrPromise.then(demuxResult => {
  17049. this.flushRemux(transmuxResults, demuxResult, chunkMeta);
  17050. return transmuxResults;
  17051. });
  17052. }
  17053. this.flushRemux(transmuxResults, demuxResultOrPromise, chunkMeta);
  17054. return transmuxResults;
  17055. }
  17056. flushRemux(transmuxResults, demuxResult, chunkMeta) {
  17057. const {
  17058. audioTrack,
  17059. videoTrack,
  17060. id3Track,
  17061. textTrack
  17062. } = demuxResult;
  17063. const {
  17064. accurateTimeOffset,
  17065. timeOffset
  17066. } = this.currentTransmuxState;
  17067. logger.log(`[transmuxer.ts]: Flushed fragment ${chunkMeta.sn}${chunkMeta.part > -1 ? ' p: ' + chunkMeta.part : ''} of level ${chunkMeta.level}`);
  17068. const remuxResult = this.remuxer.remux(audioTrack, videoTrack, id3Track, textTrack, timeOffset, accurateTimeOffset, true, this.id);
  17069. transmuxResults.push({
  17070. remuxResult,
  17071. chunkMeta
  17072. });
  17073. chunkMeta.transmuxing.executeEnd = now();
  17074. }
  17075. resetInitialTimestamp(defaultInitPts) {
  17076. const {
  17077. demuxer,
  17078. remuxer
  17079. } = this;
  17080. if (!demuxer || !remuxer) {
  17081. return;
  17082. }
  17083. demuxer.resetTimeStamp(defaultInitPts);
  17084. remuxer.resetTimeStamp(defaultInitPts);
  17085. }
  17086. resetContiguity() {
  17087. const {
  17088. demuxer,
  17089. remuxer
  17090. } = this;
  17091. if (!demuxer || !remuxer) {
  17092. return;
  17093. }
  17094. demuxer.resetContiguity();
  17095. remuxer.resetNextTimestamp();
  17096. }
  17097. resetInitSegment(initSegmentData, audioCodec, videoCodec, trackDuration, decryptdata) {
  17098. const {
  17099. demuxer,
  17100. remuxer
  17101. } = this;
  17102. if (!demuxer || !remuxer) {
  17103. return;
  17104. }
  17105. demuxer.resetInitSegment(initSegmentData, audioCodec, videoCodec, trackDuration);
  17106. remuxer.resetInitSegment(initSegmentData, audioCodec, videoCodec, decryptdata);
  17107. }
  17108. destroy() {
  17109. if (this.demuxer) {
  17110. this.demuxer.destroy();
  17111. this.demuxer = undefined;
  17112. }
  17113. if (this.remuxer) {
  17114. this.remuxer.destroy();
  17115. this.remuxer = undefined;
  17116. }
  17117. }
  17118. transmux(data, keyData, timeOffset, accurateTimeOffset, chunkMeta) {
  17119. let result;
  17120. if (keyData && keyData.method === 'SAMPLE-AES') {
  17121. result = this.transmuxSampleAes(data, keyData, timeOffset, accurateTimeOffset, chunkMeta);
  17122. } else {
  17123. result = this.transmuxUnencrypted(data, timeOffset, accurateTimeOffset, chunkMeta);
  17124. }
  17125. return result;
  17126. }
  17127. transmuxUnencrypted(data, timeOffset, accurateTimeOffset, chunkMeta) {
  17128. const {
  17129. audioTrack,
  17130. videoTrack,
  17131. id3Track,
  17132. textTrack
  17133. } = this.demuxer.demux(data, timeOffset, false, !this.config.progressive);
  17134. const remuxResult = this.remuxer.remux(audioTrack, videoTrack, id3Track, textTrack, timeOffset, accurateTimeOffset, false, this.id);
  17135. return {
  17136. remuxResult,
  17137. chunkMeta
  17138. };
  17139. }
  17140. transmuxSampleAes(data, decryptData, timeOffset, accurateTimeOffset, chunkMeta) {
  17141. return this.demuxer.demuxSampleAes(data, decryptData, timeOffset).then(demuxResult => {
  17142. const remuxResult = this.remuxer.remux(demuxResult.audioTrack, demuxResult.videoTrack, demuxResult.id3Track, demuxResult.textTrack, timeOffset, accurateTimeOffset, false, this.id);
  17143. return {
  17144. remuxResult,
  17145. chunkMeta
  17146. };
  17147. });
  17148. }
  17149. configureTransmuxer(data) {
  17150. const {
  17151. config,
  17152. observer,
  17153. typeSupported,
  17154. vendor
  17155. } = this;
  17156. // probe for content type
  17157. let mux;
  17158. for (let i = 0, len = muxConfig.length; i < len; i++) {
  17159. var _muxConfig$i$demux;
  17160. if ((_muxConfig$i$demux = muxConfig[i].demux) != null && _muxConfig$i$demux.probe(data)) {
  17161. mux = muxConfig[i];
  17162. break;
  17163. }
  17164. }
  17165. if (!mux) {
  17166. return new Error('Failed to find demuxer by probing fragment data');
  17167. }
  17168. // so let's check that current remuxer and demuxer are still valid
  17169. const demuxer = this.demuxer;
  17170. const remuxer = this.remuxer;
  17171. const Remuxer = mux.remux;
  17172. const Demuxer = mux.demux;
  17173. if (!remuxer || !(remuxer instanceof Remuxer)) {
  17174. this.remuxer = new Remuxer(observer, config, typeSupported, vendor);
  17175. }
  17176. if (!demuxer || !(demuxer instanceof Demuxer)) {
  17177. this.demuxer = new Demuxer(observer, config, typeSupported);
  17178. this.probe = Demuxer.probe;
  17179. }
  17180. }
  17181. needsProbing(discontinuity, trackSwitch) {
  17182. // in case of continuity change, or track switch
  17183. // we might switch from content type (AAC container to TS container, or TS to fmp4 for example)
  17184. return !this.demuxer || !this.remuxer || discontinuity || trackSwitch;
  17185. }
  17186. getDecrypter() {
  17187. let decrypter = this.decrypter;
  17188. if (!decrypter) {
  17189. decrypter = this.decrypter = new Decrypter(this.config);
  17190. }
  17191. return decrypter;
  17192. }
  17193. }
  17194. function getEncryptionType(data, decryptData) {
  17195. let encryptionType = null;
  17196. if (data.byteLength > 0 && (decryptData == null ? void 0 : decryptData.key) != null && decryptData.iv !== null && decryptData.method != null) {
  17197. encryptionType = decryptData;
  17198. }
  17199. return encryptionType;
  17200. }
  17201. const emptyResult = chunkMeta => ({
  17202. remuxResult: {},
  17203. chunkMeta
  17204. });
  17205. function isPromise(p) {
  17206. return 'then' in p && p.then instanceof Function;
  17207. }
  17208. class TransmuxConfig {
  17209. constructor(audioCodec, videoCodec, initSegmentData, duration, defaultInitPts) {
  17210. this.audioCodec = void 0;
  17211. this.videoCodec = void 0;
  17212. this.initSegmentData = void 0;
  17213. this.duration = void 0;
  17214. this.defaultInitPts = void 0;
  17215. this.audioCodec = audioCodec;
  17216. this.videoCodec = videoCodec;
  17217. this.initSegmentData = initSegmentData;
  17218. this.duration = duration;
  17219. this.defaultInitPts = defaultInitPts || null;
  17220. }
  17221. }
  17222. class TransmuxState {
  17223. constructor(discontinuity, contiguous, accurateTimeOffset, trackSwitch, timeOffset, initSegmentChange) {
  17224. this.discontinuity = void 0;
  17225. this.contiguous = void 0;
  17226. this.accurateTimeOffset = void 0;
  17227. this.trackSwitch = void 0;
  17228. this.timeOffset = void 0;
  17229. this.initSegmentChange = void 0;
  17230. this.discontinuity = discontinuity;
  17231. this.contiguous = contiguous;
  17232. this.accurateTimeOffset = accurateTimeOffset;
  17233. this.trackSwitch = trackSwitch;
  17234. this.timeOffset = timeOffset;
  17235. this.initSegmentChange = initSegmentChange;
  17236. }
  17237. }
  17238. var eventemitter3 = {exports: {}};
  17239. (function (module) {
  17240. var has = Object.prototype.hasOwnProperty
  17241. , prefix = '~';
  17242. /**
  17243. * Constructor to create a storage for our `EE` objects.
  17244. * An `Events` instance is a plain object whose properties are event names.
  17245. *
  17246. * @constructor
  17247. * @private
  17248. */
  17249. function Events() {}
  17250. //
  17251. // We try to not inherit from `Object.prototype`. In some engines creating an
  17252. // instance in this way is faster than calling `Object.create(null)` directly.
  17253. // If `Object.create(null)` is not supported we prefix the event names with a
  17254. // character to make sure that the built-in object properties are not
  17255. // overridden or used as an attack vector.
  17256. //
  17257. if (Object.create) {
  17258. Events.prototype = Object.create(null);
  17259. //
  17260. // This hack is needed because the `__proto__` property is still inherited in
  17261. // some old browsers like Android 4, iPhone 5.1, Opera 11 and Safari 5.
  17262. //
  17263. if (!new Events().__proto__) prefix = false;
  17264. }
  17265. /**
  17266. * Representation of a single event listener.
  17267. *
  17268. * @param {Function} fn The listener function.
  17269. * @param {*} context The context to invoke the listener with.
  17270. * @param {Boolean} [once=false] Specify if the listener is a one-time listener.
  17271. * @constructor
  17272. * @private
  17273. */
  17274. function EE(fn, context, once) {
  17275. this.fn = fn;
  17276. this.context = context;
  17277. this.once = once || false;
  17278. }
  17279. /**
  17280. * Add a listener for a given event.
  17281. *
  17282. * @param {EventEmitter} emitter Reference to the `EventEmitter` instance.
  17283. * @param {(String|Symbol)} event The event name.
  17284. * @param {Function} fn The listener function.
  17285. * @param {*} context The context to invoke the listener with.
  17286. * @param {Boolean} once Specify if the listener is a one-time listener.
  17287. * @returns {EventEmitter}
  17288. * @private
  17289. */
  17290. function addListener(emitter, event, fn, context, once) {
  17291. if (typeof fn !== 'function') {
  17292. throw new TypeError('The listener must be a function');
  17293. }
  17294. var listener = new EE(fn, context || emitter, once)
  17295. , evt = prefix ? prefix + event : event;
  17296. if (!emitter._events[evt]) emitter._events[evt] = listener, emitter._eventsCount++;
  17297. else if (!emitter._events[evt].fn) emitter._events[evt].push(listener);
  17298. else emitter._events[evt] = [emitter._events[evt], listener];
  17299. return emitter;
  17300. }
  17301. /**
  17302. * Clear event by name.
  17303. *
  17304. * @param {EventEmitter} emitter Reference to the `EventEmitter` instance.
  17305. * @param {(String|Symbol)} evt The Event name.
  17306. * @private
  17307. */
  17308. function clearEvent(emitter, evt) {
  17309. if (--emitter._eventsCount === 0) emitter._events = new Events();
  17310. else delete emitter._events[evt];
  17311. }
  17312. /**
  17313. * Minimal `EventEmitter` interface that is molded against the Node.js
  17314. * `EventEmitter` interface.
  17315. *
  17316. * @constructor
  17317. * @public
  17318. */
  17319. function EventEmitter() {
  17320. this._events = new Events();
  17321. this._eventsCount = 0;
  17322. }
  17323. /**
  17324. * Return an array listing the events for which the emitter has registered
  17325. * listeners.
  17326. *
  17327. * @returns {Array}
  17328. * @public
  17329. */
  17330. EventEmitter.prototype.eventNames = function eventNames() {
  17331. var names = []
  17332. , events
  17333. , name;
  17334. if (this._eventsCount === 0) return names;
  17335. for (name in (events = this._events)) {
  17336. if (has.call(events, name)) names.push(prefix ? name.slice(1) : name);
  17337. }
  17338. if (Object.getOwnPropertySymbols) {
  17339. return names.concat(Object.getOwnPropertySymbols(events));
  17340. }
  17341. return names;
  17342. };
  17343. /**
  17344. * Return the listeners registered for a given event.
  17345. *
  17346. * @param {(String|Symbol)} event The event name.
  17347. * @returns {Array} The registered listeners.
  17348. * @public
  17349. */
  17350. EventEmitter.prototype.listeners = function listeners(event) {
  17351. var evt = prefix ? prefix + event : event
  17352. , handlers = this._events[evt];
  17353. if (!handlers) return [];
  17354. if (handlers.fn) return [handlers.fn];
  17355. for (var i = 0, l = handlers.length, ee = new Array(l); i < l; i++) {
  17356. ee[i] = handlers[i].fn;
  17357. }
  17358. return ee;
  17359. };
  17360. /**
  17361. * Return the number of listeners listening to a given event.
  17362. *
  17363. * @param {(String|Symbol)} event The event name.
  17364. * @returns {Number} The number of listeners.
  17365. * @public
  17366. */
  17367. EventEmitter.prototype.listenerCount = function listenerCount(event) {
  17368. var evt = prefix ? prefix + event : event
  17369. , listeners = this._events[evt];
  17370. if (!listeners) return 0;
  17371. if (listeners.fn) return 1;
  17372. return listeners.length;
  17373. };
  17374. /**
  17375. * Calls each of the listeners registered for a given event.
  17376. *
  17377. * @param {(String|Symbol)} event The event name.
  17378. * @returns {Boolean} `true` if the event had listeners, else `false`.
  17379. * @public
  17380. */
  17381. EventEmitter.prototype.emit = function emit(event, a1, a2, a3, a4, a5) {
  17382. var evt = prefix ? prefix + event : event;
  17383. if (!this._events[evt]) return false;
  17384. var listeners = this._events[evt]
  17385. , len = arguments.length
  17386. , args
  17387. , i;
  17388. if (listeners.fn) {
  17389. if (listeners.once) this.removeListener(event, listeners.fn, undefined, true);
  17390. switch (len) {
  17391. case 1: return listeners.fn.call(listeners.context), true;
  17392. case 2: return listeners.fn.call(listeners.context, a1), true;
  17393. case 3: return listeners.fn.call(listeners.context, a1, a2), true;
  17394. case 4: return listeners.fn.call(listeners.context, a1, a2, a3), true;
  17395. case 5: return listeners.fn.call(listeners.context, a1, a2, a3, a4), true;
  17396. case 6: return listeners.fn.call(listeners.context, a1, a2, a3, a4, a5), true;
  17397. }
  17398. for (i = 1, args = new Array(len -1); i < len; i++) {
  17399. args[i - 1] = arguments[i];
  17400. }
  17401. listeners.fn.apply(listeners.context, args);
  17402. } else {
  17403. var length = listeners.length
  17404. , j;
  17405. for (i = 0; i < length; i++) {
  17406. if (listeners[i].once) this.removeListener(event, listeners[i].fn, undefined, true);
  17407. switch (len) {
  17408. case 1: listeners[i].fn.call(listeners[i].context); break;
  17409. case 2: listeners[i].fn.call(listeners[i].context, a1); break;
  17410. case 3: listeners[i].fn.call(listeners[i].context, a1, a2); break;
  17411. case 4: listeners[i].fn.call(listeners[i].context, a1, a2, a3); break;
  17412. default:
  17413. if (!args) for (j = 1, args = new Array(len -1); j < len; j++) {
  17414. args[j - 1] = arguments[j];
  17415. }
  17416. listeners[i].fn.apply(listeners[i].context, args);
  17417. }
  17418. }
  17419. }
  17420. return true;
  17421. };
  17422. /**
  17423. * Add a listener for a given event.
  17424. *
  17425. * @param {(String|Symbol)} event The event name.
  17426. * @param {Function} fn The listener function.
  17427. * @param {*} [context=this] The context to invoke the listener with.
  17428. * @returns {EventEmitter} `this`.
  17429. * @public
  17430. */
  17431. EventEmitter.prototype.on = function on(event, fn, context) {
  17432. return addListener(this, event, fn, context, false);
  17433. };
  17434. /**
  17435. * Add a one-time listener for a given event.
  17436. *
  17437. * @param {(String|Symbol)} event The event name.
  17438. * @param {Function} fn The listener function.
  17439. * @param {*} [context=this] The context to invoke the listener with.
  17440. * @returns {EventEmitter} `this`.
  17441. * @public
  17442. */
  17443. EventEmitter.prototype.once = function once(event, fn, context) {
  17444. return addListener(this, event, fn, context, true);
  17445. };
  17446. /**
  17447. * Remove the listeners of a given event.
  17448. *
  17449. * @param {(String|Symbol)} event The event name.
  17450. * @param {Function} fn Only remove the listeners that match this function.
  17451. * @param {*} context Only remove the listeners that have this context.
  17452. * @param {Boolean} once Only remove one-time listeners.
  17453. * @returns {EventEmitter} `this`.
  17454. * @public
  17455. */
  17456. EventEmitter.prototype.removeListener = function removeListener(event, fn, context, once) {
  17457. var evt = prefix ? prefix + event : event;
  17458. if (!this._events[evt]) return this;
  17459. if (!fn) {
  17460. clearEvent(this, evt);
  17461. return this;
  17462. }
  17463. var listeners = this._events[evt];
  17464. if (listeners.fn) {
  17465. if (
  17466. listeners.fn === fn &&
  17467. (!once || listeners.once) &&
  17468. (!context || listeners.context === context)
  17469. ) {
  17470. clearEvent(this, evt);
  17471. }
  17472. } else {
  17473. for (var i = 0, events = [], length = listeners.length; i < length; i++) {
  17474. if (
  17475. listeners[i].fn !== fn ||
  17476. (once && !listeners[i].once) ||
  17477. (context && listeners[i].context !== context)
  17478. ) {
  17479. events.push(listeners[i]);
  17480. }
  17481. }
  17482. //
  17483. // Reset the array, or remove it completely if we have no more listeners.
  17484. //
  17485. if (events.length) this._events[evt] = events.length === 1 ? events[0] : events;
  17486. else clearEvent(this, evt);
  17487. }
  17488. return this;
  17489. };
  17490. /**
  17491. * Remove all listeners, or those of the specified event.
  17492. *
  17493. * @param {(String|Symbol)} [event] The event name.
  17494. * @returns {EventEmitter} `this`.
  17495. * @public
  17496. */
  17497. EventEmitter.prototype.removeAllListeners = function removeAllListeners(event) {
  17498. var evt;
  17499. if (event) {
  17500. evt = prefix ? prefix + event : event;
  17501. if (this._events[evt]) clearEvent(this, evt);
  17502. } else {
  17503. this._events = new Events();
  17504. this._eventsCount = 0;
  17505. }
  17506. return this;
  17507. };
  17508. //
  17509. // Alias methods names because people roll like that.
  17510. //
  17511. EventEmitter.prototype.off = EventEmitter.prototype.removeListener;
  17512. EventEmitter.prototype.addListener = EventEmitter.prototype.on;
  17513. //
  17514. // Expose the prefix.
  17515. //
  17516. EventEmitter.prefixed = prefix;
  17517. //
  17518. // Allow `EventEmitter` to be imported as module namespace.
  17519. //
  17520. EventEmitter.EventEmitter = EventEmitter;
  17521. //
  17522. // Expose the module.
  17523. //
  17524. {
  17525. module.exports = EventEmitter;
  17526. }
  17527. } (eventemitter3));
  17528. var eventemitter3Exports = eventemitter3.exports;
  17529. var EventEmitter = /*@__PURE__*/getDefaultExportFromCjs(eventemitter3Exports);
  17530. class TransmuxerInterface {
  17531. constructor(hls, id, onTransmuxComplete, onFlush) {
  17532. this.error = null;
  17533. this.hls = void 0;
  17534. this.id = void 0;
  17535. this.observer = void 0;
  17536. this.frag = null;
  17537. this.part = null;
  17538. this.useWorker = void 0;
  17539. this.workerContext = null;
  17540. this.onwmsg = void 0;
  17541. this.transmuxer = null;
  17542. this.onTransmuxComplete = void 0;
  17543. this.onFlush = void 0;
  17544. const config = hls.config;
  17545. this.hls = hls;
  17546. this.id = id;
  17547. this.useWorker = !!config.enableWorker;
  17548. this.onTransmuxComplete = onTransmuxComplete;
  17549. this.onFlush = onFlush;
  17550. const forwardMessage = (ev, data) => {
  17551. data = data || {};
  17552. data.frag = this.frag;
  17553. data.id = this.id;
  17554. if (ev === Events.ERROR) {
  17555. this.error = data.error;
  17556. }
  17557. this.hls.trigger(ev, data);
  17558. };
  17559. // forward events to main thread
  17560. this.observer = new EventEmitter();
  17561. this.observer.on(Events.FRAG_DECRYPTED, forwardMessage);
  17562. this.observer.on(Events.ERROR, forwardMessage);
  17563. const MediaSource = getMediaSource(config.preferManagedMediaSource) || {
  17564. isTypeSupported: () => false
  17565. };
  17566. const m2tsTypeSupported = {
  17567. mpeg: MediaSource.isTypeSupported('audio/mpeg'),
  17568. mp3: MediaSource.isTypeSupported('audio/mp4; codecs="mp3"'),
  17569. ac3: false
  17570. };
  17571. if (this.useWorker && typeof Worker !== 'undefined') {
  17572. const canCreateWorker = config.workerPath || hasUMDWorker();
  17573. if (canCreateWorker) {
  17574. try {
  17575. if (config.workerPath) {
  17576. logger.log(`loading Web Worker ${config.workerPath} for "${id}"`);
  17577. this.workerContext = loadWorker(config.workerPath);
  17578. } else {
  17579. logger.log(`injecting Web Worker for "${id}"`);
  17580. this.workerContext = injectWorker();
  17581. }
  17582. this.onwmsg = event => this.onWorkerMessage(event);
  17583. const {
  17584. worker
  17585. } = this.workerContext;
  17586. worker.addEventListener('message', this.onwmsg);
  17587. worker.onerror = event => {
  17588. const error = new Error(`${event.message} (${event.filename}:${event.lineno})`);
  17589. config.enableWorker = false;
  17590. logger.warn(`Error in "${id}" Web Worker, fallback to inline`);
  17591. this.hls.trigger(Events.ERROR, {
  17592. type: ErrorTypes.OTHER_ERROR,
  17593. details: ErrorDetails.INTERNAL_EXCEPTION,
  17594. fatal: false,
  17595. event: 'demuxerWorker',
  17596. error
  17597. });
  17598. };
  17599. worker.postMessage({
  17600. cmd: 'init',
  17601. typeSupported: m2tsTypeSupported,
  17602. vendor: '',
  17603. id: id,
  17604. config: JSON.stringify(config)
  17605. });
  17606. } catch (err) {
  17607. logger.warn(`Error setting up "${id}" Web Worker, fallback to inline`, err);
  17608. this.resetWorker();
  17609. this.error = null;
  17610. this.transmuxer = new Transmuxer(this.observer, m2tsTypeSupported, config, '', id);
  17611. }
  17612. return;
  17613. }
  17614. }
  17615. this.transmuxer = new Transmuxer(this.observer, m2tsTypeSupported, config, '', id);
  17616. }
  17617. resetWorker() {
  17618. if (this.workerContext) {
  17619. const {
  17620. worker,
  17621. objectURL
  17622. } = this.workerContext;
  17623. if (objectURL) {
  17624. // revoke the Object URL that was used to create transmuxer worker, so as not to leak it
  17625. self.URL.revokeObjectURL(objectURL);
  17626. }
  17627. worker.removeEventListener('message', this.onwmsg);
  17628. worker.onerror = null;
  17629. worker.terminate();
  17630. this.workerContext = null;
  17631. }
  17632. }
  17633. destroy() {
  17634. if (this.workerContext) {
  17635. this.resetWorker();
  17636. this.onwmsg = undefined;
  17637. } else {
  17638. const transmuxer = this.transmuxer;
  17639. if (transmuxer) {
  17640. transmuxer.destroy();
  17641. this.transmuxer = null;
  17642. }
  17643. }
  17644. const observer = this.observer;
  17645. if (observer) {
  17646. observer.removeAllListeners();
  17647. }
  17648. this.frag = null;
  17649. // @ts-ignore
  17650. this.observer = null;
  17651. // @ts-ignore
  17652. this.hls = null;
  17653. }
  17654. push(data, initSegmentData, audioCodec, videoCodec, frag, part, duration, accurateTimeOffset, chunkMeta, defaultInitPTS) {
  17655. var _frag$initSegment, _lastFrag$initSegment;
  17656. chunkMeta.transmuxing.start = self.performance.now();
  17657. const {
  17658. transmuxer
  17659. } = this;
  17660. const timeOffset = part ? part.start : frag.start;
  17661. // TODO: push "clear-lead" decrypt data for unencrypted fragments in streams with encrypted ones
  17662. const decryptdata = frag.decryptdata;
  17663. const lastFrag = this.frag;
  17664. const discontinuity = !(lastFrag && frag.cc === lastFrag.cc);
  17665. const trackSwitch = !(lastFrag && chunkMeta.level === lastFrag.level);
  17666. const snDiff = lastFrag ? chunkMeta.sn - lastFrag.sn : -1;
  17667. const partDiff = this.part ? chunkMeta.part - this.part.index : -1;
  17668. const progressive = snDiff === 0 && chunkMeta.id > 1 && chunkMeta.id === (lastFrag == null ? void 0 : lastFrag.stats.chunkCount);
  17669. const contiguous = !trackSwitch && (snDiff === 1 || snDiff === 0 && (partDiff === 1 || progressive && partDiff <= 0));
  17670. const now = self.performance.now();
  17671. if (trackSwitch || snDiff || frag.stats.parsing.start === 0) {
  17672. frag.stats.parsing.start = now;
  17673. }
  17674. if (part && (partDiff || !contiguous)) {
  17675. part.stats.parsing.start = now;
  17676. }
  17677. const initSegmentChange = !(lastFrag && ((_frag$initSegment = frag.initSegment) == null ? void 0 : _frag$initSegment.url) === ((_lastFrag$initSegment = lastFrag.initSegment) == null ? void 0 : _lastFrag$initSegment.url));
  17678. const state = new TransmuxState(discontinuity, contiguous, accurateTimeOffset, trackSwitch, timeOffset, initSegmentChange);
  17679. if (!contiguous || discontinuity || initSegmentChange) {
  17680. logger.log(`[transmuxer-interface, ${frag.type}]: Starting new transmux session for sn: ${chunkMeta.sn} p: ${chunkMeta.part} level: ${chunkMeta.level} id: ${chunkMeta.id}
  17681. discontinuity: ${discontinuity}
  17682. trackSwitch: ${trackSwitch}
  17683. contiguous: ${contiguous}
  17684. accurateTimeOffset: ${accurateTimeOffset}
  17685. timeOffset: ${timeOffset}
  17686. initSegmentChange: ${initSegmentChange}`);
  17687. const config = new TransmuxConfig(audioCodec, videoCodec, initSegmentData, duration, defaultInitPTS);
  17688. this.configureTransmuxer(config);
  17689. }
  17690. this.frag = frag;
  17691. this.part = part;
  17692. // Frags with sn of 'initSegment' are not transmuxed
  17693. if (this.workerContext) {
  17694. // post fragment payload as transferable objects for ArrayBuffer (no copy)
  17695. this.workerContext.worker.postMessage({
  17696. cmd: 'demux',
  17697. data,
  17698. decryptdata,
  17699. chunkMeta,
  17700. state
  17701. }, data instanceof ArrayBuffer ? [data] : []);
  17702. } else if (transmuxer) {
  17703. const transmuxResult = transmuxer.push(data, decryptdata, chunkMeta, state);
  17704. if (isPromise(transmuxResult)) {
  17705. transmuxer.async = true;
  17706. transmuxResult.then(data => {
  17707. this.handleTransmuxComplete(data);
  17708. }).catch(error => {
  17709. this.transmuxerError(error, chunkMeta, 'transmuxer-interface push error');
  17710. });
  17711. } else {
  17712. transmuxer.async = false;
  17713. this.handleTransmuxComplete(transmuxResult);
  17714. }
  17715. }
  17716. }
  17717. flush(chunkMeta) {
  17718. chunkMeta.transmuxing.start = self.performance.now();
  17719. const {
  17720. transmuxer
  17721. } = this;
  17722. if (this.workerContext) {
  17723. this.workerContext.worker.postMessage({
  17724. cmd: 'flush',
  17725. chunkMeta
  17726. });
  17727. } else if (transmuxer) {
  17728. let transmuxResult = transmuxer.flush(chunkMeta);
  17729. const asyncFlush = isPromise(transmuxResult);
  17730. if (asyncFlush || transmuxer.async) {
  17731. if (!isPromise(transmuxResult)) {
  17732. transmuxResult = Promise.resolve(transmuxResult);
  17733. }
  17734. transmuxResult.then(data => {
  17735. this.handleFlushResult(data, chunkMeta);
  17736. }).catch(error => {
  17737. this.transmuxerError(error, chunkMeta, 'transmuxer-interface flush error');
  17738. });
  17739. } else {
  17740. this.handleFlushResult(transmuxResult, chunkMeta);
  17741. }
  17742. }
  17743. }
  17744. transmuxerError(error, chunkMeta, reason) {
  17745. if (!this.hls) {
  17746. return;
  17747. }
  17748. this.error = error;
  17749. this.hls.trigger(Events.ERROR, {
  17750. type: ErrorTypes.MEDIA_ERROR,
  17751. details: ErrorDetails.FRAG_PARSING_ERROR,
  17752. chunkMeta,
  17753. frag: this.frag || undefined,
  17754. fatal: false,
  17755. error,
  17756. err: error,
  17757. reason
  17758. });
  17759. }
  17760. handleFlushResult(results, chunkMeta) {
  17761. results.forEach(result => {
  17762. this.handleTransmuxComplete(result);
  17763. });
  17764. this.onFlush(chunkMeta);
  17765. }
  17766. onWorkerMessage(event) {
  17767. const data = event.data;
  17768. if (!(data != null && data.event)) {
  17769. logger.warn(`worker message received with no ${data ? 'event name' : 'data'}`);
  17770. return;
  17771. }
  17772. const hls = this.hls;
  17773. if (!this.hls) {
  17774. return;
  17775. }
  17776. switch (data.event) {
  17777. case 'init':
  17778. {
  17779. var _this$workerContext;
  17780. const objectURL = (_this$workerContext = this.workerContext) == null ? void 0 : _this$workerContext.objectURL;
  17781. if (objectURL) {
  17782. // revoke the Object URL that was used to create transmuxer worker, so as not to leak it
  17783. self.URL.revokeObjectURL(objectURL);
  17784. }
  17785. break;
  17786. }
  17787. case 'transmuxComplete':
  17788. {
  17789. this.handleTransmuxComplete(data.data);
  17790. break;
  17791. }
  17792. case 'flush':
  17793. {
  17794. this.onFlush(data.data);
  17795. break;
  17796. }
  17797. // pass logs from the worker thread to the main logger
  17798. case 'workerLog':
  17799. if (logger[data.data.logType]) {
  17800. logger[data.data.logType](data.data.message);
  17801. }
  17802. break;
  17803. default:
  17804. {
  17805. data.data = data.data || {};
  17806. data.data.frag = this.frag;
  17807. data.data.id = this.id;
  17808. hls.trigger(data.event, data.data);
  17809. break;
  17810. }
  17811. }
  17812. }
  17813. configureTransmuxer(config) {
  17814. const {
  17815. transmuxer
  17816. } = this;
  17817. if (this.workerContext) {
  17818. this.workerContext.worker.postMessage({
  17819. cmd: 'configure',
  17820. config
  17821. });
  17822. } else if (transmuxer) {
  17823. transmuxer.configure(config);
  17824. }
  17825. }
  17826. handleTransmuxComplete(result) {
  17827. result.chunkMeta.transmuxing.end = self.performance.now();
  17828. this.onTransmuxComplete(result);
  17829. }
  17830. }
  17831. const STALL_MINIMUM_DURATION_MS = 250;
  17832. const MAX_START_GAP_JUMP = 2.0;
  17833. const SKIP_BUFFER_HOLE_STEP_SECONDS = 0.1;
  17834. const SKIP_BUFFER_RANGE_START = 0.05;
  17835. class GapController {
  17836. constructor(config, media, fragmentTracker, hls) {
  17837. this.config = void 0;
  17838. this.media = null;
  17839. this.fragmentTracker = void 0;
  17840. this.hls = void 0;
  17841. this.nudgeRetry = 0;
  17842. this.stallReported = false;
  17843. this.stalled = null;
  17844. this.moved = false;
  17845. this.seeking = false;
  17846. this.config = config;
  17847. this.media = media;
  17848. this.fragmentTracker = fragmentTracker;
  17849. this.hls = hls;
  17850. }
  17851. destroy() {
  17852. this.media = null;
  17853. // @ts-ignore
  17854. this.hls = this.fragmentTracker = null;
  17855. }
  17856. /**
  17857. * Checks if the playhead is stuck within a gap, and if so, attempts to free it.
  17858. * A gap is an unbuffered range between two buffered ranges (or the start and the first buffered range).
  17859. *
  17860. * @param lastCurrentTime - Previously read playhead position
  17861. */
  17862. poll(lastCurrentTime, activeFrag) {
  17863. const {
  17864. config,
  17865. media,
  17866. stalled
  17867. } = this;
  17868. if (media === null) {
  17869. return;
  17870. }
  17871. const {
  17872. currentTime,
  17873. seeking
  17874. } = media;
  17875. const seeked = this.seeking && !seeking;
  17876. const beginSeek = !this.seeking && seeking;
  17877. this.seeking = seeking;
  17878. // The playhead is moving, no-op
  17879. if (currentTime !== lastCurrentTime) {
  17880. this.moved = true;
  17881. if (!seeking) {
  17882. this.nudgeRetry = 0;
  17883. }
  17884. if (stalled !== null) {
  17885. // The playhead is now moving, but was previously stalled
  17886. if (this.stallReported) {
  17887. const _stalledDuration = self.performance.now() - stalled;
  17888. logger.warn(`playback not stuck anymore @${currentTime}, after ${Math.round(_stalledDuration)}ms`);
  17889. this.stallReported = false;
  17890. }
  17891. this.stalled = null;
  17892. }
  17893. return;
  17894. }
  17895. // Clear stalled state when beginning or finishing seeking so that we don't report stalls coming out of a seek
  17896. if (beginSeek || seeked) {
  17897. this.stalled = null;
  17898. return;
  17899. }
  17900. // The playhead should not be moving
  17901. if (media.paused && !seeking || media.ended || media.playbackRate === 0 || !BufferHelper.getBuffered(media).length) {
  17902. this.nudgeRetry = 0;
  17903. return;
  17904. }
  17905. const bufferInfo = BufferHelper.bufferInfo(media, currentTime, 0);
  17906. const nextStart = bufferInfo.nextStart || 0;
  17907. if (seeking) {
  17908. // Waiting for seeking in a buffered range to complete
  17909. const hasEnoughBuffer = bufferInfo.len > MAX_START_GAP_JUMP;
  17910. // Next buffered range is too far ahead to jump to while still seeking
  17911. const noBufferGap = !nextStart || activeFrag && activeFrag.start <= currentTime || nextStart - currentTime > MAX_START_GAP_JUMP && !this.fragmentTracker.getPartialFragment(currentTime);
  17912. if (hasEnoughBuffer || noBufferGap) {
  17913. return;
  17914. }
  17915. // Reset moved state when seeking to a point in or before a gap
  17916. this.moved = false;
  17917. }
  17918. // Skip start gaps if we haven't played, but the last poll detected the start of a stall
  17919. // The addition poll gives the browser a chance to jump the gap for us
  17920. if (!this.moved && this.stalled !== null) {
  17921. var _level$details;
  17922. // There is no playable buffer (seeked, waiting for buffer)
  17923. const isBuffered = bufferInfo.len > 0;
  17924. if (!isBuffered && !nextStart) {
  17925. return;
  17926. }
  17927. // Jump start gaps within jump threshold
  17928. const startJump = Math.max(nextStart, bufferInfo.start || 0) - currentTime;
  17929. // When joining a live stream with audio tracks, account for live playlist window sliding by allowing
  17930. // a larger jump over start gaps caused by the audio-stream-controller buffering a start fragment
  17931. // that begins over 1 target duration after the video start position.
  17932. const level = this.hls.levels ? this.hls.levels[this.hls.currentLevel] : null;
  17933. const isLive = level == null ? void 0 : (_level$details = level.details) == null ? void 0 : _level$details.live;
  17934. const maxStartGapJump = isLive ? level.details.targetduration * 2 : MAX_START_GAP_JUMP;
  17935. const partialOrGap = this.fragmentTracker.getPartialFragment(currentTime);
  17936. if (startJump > 0 && (startJump <= maxStartGapJump || partialOrGap)) {
  17937. if (!media.paused) {
  17938. this._trySkipBufferHole(partialOrGap);
  17939. }
  17940. return;
  17941. }
  17942. }
  17943. // Start tracking stall time
  17944. const tnow = self.performance.now();
  17945. if (stalled === null) {
  17946. this.stalled = tnow;
  17947. return;
  17948. }
  17949. const stalledDuration = tnow - stalled;
  17950. if (!seeking && stalledDuration >= STALL_MINIMUM_DURATION_MS) {
  17951. // Report stalling after trying to fix
  17952. this._reportStall(bufferInfo);
  17953. if (!this.media) {
  17954. return;
  17955. }
  17956. }
  17957. const bufferedWithHoles = BufferHelper.bufferInfo(media, currentTime, config.maxBufferHole);
  17958. this._tryFixBufferStall(bufferedWithHoles, stalledDuration);
  17959. }
  17960. /**
  17961. * Detects and attempts to fix known buffer stalling issues.
  17962. * @param bufferInfo - The properties of the current buffer.
  17963. * @param stalledDurationMs - The amount of time Hls.js has been stalling for.
  17964. * @private
  17965. */
  17966. _tryFixBufferStall(bufferInfo, stalledDurationMs) {
  17967. const {
  17968. config,
  17969. fragmentTracker,
  17970. media
  17971. } = this;
  17972. if (media === null) {
  17973. return;
  17974. }
  17975. const currentTime = media.currentTime;
  17976. const partial = fragmentTracker.getPartialFragment(currentTime);
  17977. if (partial) {
  17978. // Try to skip over the buffer hole caused by a partial fragment
  17979. // This method isn't limited by the size of the gap between buffered ranges
  17980. const targetTime = this._trySkipBufferHole(partial);
  17981. // we return here in this case, meaning
  17982. // the branch below only executes when we haven't seeked to a new position
  17983. if (targetTime || !this.media) {
  17984. return;
  17985. }
  17986. }
  17987. // if we haven't had to skip over a buffer hole of a partial fragment
  17988. // we may just have to "nudge" the playlist as the browser decoding/rendering engine
  17989. // needs to cross some sort of threshold covering all source-buffers content
  17990. // to start playing properly.
  17991. if ((bufferInfo.len > config.maxBufferHole || bufferInfo.nextStart && bufferInfo.nextStart - currentTime < config.maxBufferHole) && stalledDurationMs > config.highBufferWatchdogPeriod * 1000) {
  17992. logger.warn('Trying to nudge playhead over buffer-hole');
  17993. // Try to nudge currentTime over a buffer hole if we've been stalling for the configured amount of seconds
  17994. // We only try to jump the hole if it's under the configured size
  17995. // Reset stalled so to rearm watchdog timer
  17996. this.stalled = null;
  17997. this._tryNudgeBuffer();
  17998. }
  17999. }
  18000. /**
  18001. * Triggers a BUFFER_STALLED_ERROR event, but only once per stall period.
  18002. * @param bufferLen - The playhead distance from the end of the current buffer segment.
  18003. * @private
  18004. */
  18005. _reportStall(bufferInfo) {
  18006. const {
  18007. hls,
  18008. media,
  18009. stallReported
  18010. } = this;
  18011. if (!stallReported && media) {
  18012. // Report stalled error once
  18013. this.stallReported = true;
  18014. const error = new Error(`Playback stalling at @${media.currentTime} due to low buffer (${JSON.stringify(bufferInfo)})`);
  18015. logger.warn(error.message);
  18016. hls.trigger(Events.ERROR, {
  18017. type: ErrorTypes.MEDIA_ERROR,
  18018. details: ErrorDetails.BUFFER_STALLED_ERROR,
  18019. fatal: false,
  18020. error,
  18021. buffer: bufferInfo.len
  18022. });
  18023. }
  18024. }
  18025. /**
  18026. * Attempts to fix buffer stalls by jumping over known gaps caused by partial fragments
  18027. * @param partial - The partial fragment found at the current time (where playback is stalling).
  18028. * @private
  18029. */
  18030. _trySkipBufferHole(partial) {
  18031. const {
  18032. config,
  18033. hls,
  18034. media
  18035. } = this;
  18036. if (media === null) {
  18037. return 0;
  18038. }
  18039. // Check if currentTime is between unbuffered regions of partial fragments
  18040. const currentTime = media.currentTime;
  18041. const bufferInfo = BufferHelper.bufferInfo(media, currentTime, 0);
  18042. const startTime = currentTime < bufferInfo.start ? bufferInfo.start : bufferInfo.nextStart;
  18043. if (startTime) {
  18044. const bufferStarved = bufferInfo.len <= config.maxBufferHole;
  18045. const waiting = bufferInfo.len > 0 && bufferInfo.len < 1 && media.readyState < 3;
  18046. const gapLength = startTime - currentTime;
  18047. if (gapLength > 0 && (bufferStarved || waiting)) {
  18048. // Only allow large gaps to be skipped if it is a start gap, or all fragments in skip range are partial
  18049. if (gapLength > config.maxBufferHole) {
  18050. const {
  18051. fragmentTracker
  18052. } = this;
  18053. let startGap = false;
  18054. if (currentTime === 0) {
  18055. const startFrag = fragmentTracker.getAppendedFrag(0, PlaylistLevelType.MAIN);
  18056. if (startFrag && startTime < startFrag.end) {
  18057. startGap = true;
  18058. }
  18059. }
  18060. if (!startGap) {
  18061. const startProvisioned = partial || fragmentTracker.getAppendedFrag(currentTime, PlaylistLevelType.MAIN);
  18062. if (startProvisioned) {
  18063. let moreToLoad = false;
  18064. let pos = startProvisioned.end;
  18065. while (pos < startTime) {
  18066. const provisioned = fragmentTracker.getPartialFragment(pos);
  18067. if (provisioned) {
  18068. pos += provisioned.duration;
  18069. } else {
  18070. moreToLoad = true;
  18071. break;
  18072. }
  18073. }
  18074. if (moreToLoad) {
  18075. return 0;
  18076. }
  18077. }
  18078. }
  18079. }
  18080. const targetTime = Math.max(startTime + SKIP_BUFFER_RANGE_START, currentTime + SKIP_BUFFER_HOLE_STEP_SECONDS);
  18081. logger.warn(`skipping hole, adjusting currentTime from ${currentTime} to ${targetTime}`);
  18082. this.moved = true;
  18083. this.stalled = null;
  18084. media.currentTime = targetTime;
  18085. if (partial && !partial.gap) {
  18086. const error = new Error(`fragment loaded with buffer holes, seeking from ${currentTime} to ${targetTime}`);
  18087. hls.trigger(Events.ERROR, {
  18088. type: ErrorTypes.MEDIA_ERROR,
  18089. details: ErrorDetails.BUFFER_SEEK_OVER_HOLE,
  18090. fatal: false,
  18091. error,
  18092. reason: error.message,
  18093. frag: partial
  18094. });
  18095. }
  18096. return targetTime;
  18097. }
  18098. }
  18099. return 0;
  18100. }
  18101. /**
  18102. * Attempts to fix buffer stalls by advancing the mediaElement's current time by a small amount.
  18103. * @private
  18104. */
  18105. _tryNudgeBuffer() {
  18106. const {
  18107. config,
  18108. hls,
  18109. media,
  18110. nudgeRetry
  18111. } = this;
  18112. if (media === null) {
  18113. return;
  18114. }
  18115. const currentTime = media.currentTime;
  18116. this.nudgeRetry++;
  18117. if (nudgeRetry < config.nudgeMaxRetry) {
  18118. const targetTime = currentTime + (nudgeRetry + 1) * config.nudgeOffset;
  18119. // playback stalled in buffered area ... let's nudge currentTime to try to overcome this
  18120. const error = new Error(`Nudging 'currentTime' from ${currentTime} to ${targetTime}`);
  18121. logger.warn(error.message);
  18122. media.currentTime = targetTime;
  18123. hls.trigger(Events.ERROR, {
  18124. type: ErrorTypes.MEDIA_ERROR,
  18125. details: ErrorDetails.BUFFER_NUDGE_ON_STALL,
  18126. error,
  18127. fatal: false
  18128. });
  18129. } else {
  18130. const error = new Error(`Playhead still not moving while enough data buffered @${currentTime} after ${config.nudgeMaxRetry} nudges`);
  18131. logger.error(error.message);
  18132. hls.trigger(Events.ERROR, {
  18133. type: ErrorTypes.MEDIA_ERROR,
  18134. details: ErrorDetails.BUFFER_STALLED_ERROR,
  18135. error,
  18136. fatal: true
  18137. });
  18138. }
  18139. }
  18140. }
  18141. const TICK_INTERVAL = 100; // how often to tick in ms
  18142. class StreamController extends BaseStreamController {
  18143. constructor(hls, fragmentTracker, keyLoader) {
  18144. super(hls, fragmentTracker, keyLoader, '[stream-controller]', PlaylistLevelType.MAIN);
  18145. this.audioCodecSwap = false;
  18146. this.gapController = null;
  18147. this.level = -1;
  18148. this._forceStartLoad = false;
  18149. this.altAudio = false;
  18150. this.audioOnly = false;
  18151. this.fragPlaying = null;
  18152. this.onvplaying = null;
  18153. this.onvseeked = null;
  18154. this.fragLastKbps = 0;
  18155. this.couldBacktrack = false;
  18156. this.backtrackFragment = null;
  18157. this.audioCodecSwitch = false;
  18158. this.videoBuffer = null;
  18159. this._registerListeners();
  18160. }
  18161. _registerListeners() {
  18162. const {
  18163. hls
  18164. } = this;
  18165. hls.on(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
  18166. hls.on(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
  18167. hls.on(Events.MANIFEST_LOADING, this.onManifestLoading, this);
  18168. hls.on(Events.MANIFEST_PARSED, this.onManifestParsed, this);
  18169. hls.on(Events.LEVEL_LOADING, this.onLevelLoading, this);
  18170. hls.on(Events.LEVEL_LOADED, this.onLevelLoaded, this);
  18171. hls.on(Events.FRAG_LOAD_EMERGENCY_ABORTED, this.onFragLoadEmergencyAborted, this);
  18172. hls.on(Events.ERROR, this.onError, this);
  18173. hls.on(Events.AUDIO_TRACK_SWITCHING, this.onAudioTrackSwitching, this);
  18174. hls.on(Events.AUDIO_TRACK_SWITCHED, this.onAudioTrackSwitched, this);
  18175. hls.on(Events.BUFFER_CREATED, this.onBufferCreated, this);
  18176. hls.on(Events.BUFFER_FLUSHED, this.onBufferFlushed, this);
  18177. hls.on(Events.LEVELS_UPDATED, this.onLevelsUpdated, this);
  18178. hls.on(Events.FRAG_BUFFERED, this.onFragBuffered, this);
  18179. }
  18180. _unregisterListeners() {
  18181. const {
  18182. hls
  18183. } = this;
  18184. hls.off(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
  18185. hls.off(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
  18186. hls.off(Events.MANIFEST_LOADING, this.onManifestLoading, this);
  18187. hls.off(Events.MANIFEST_PARSED, this.onManifestParsed, this);
  18188. hls.off(Events.LEVEL_LOADED, this.onLevelLoaded, this);
  18189. hls.off(Events.FRAG_LOAD_EMERGENCY_ABORTED, this.onFragLoadEmergencyAborted, this);
  18190. hls.off(Events.ERROR, this.onError, this);
  18191. hls.off(Events.AUDIO_TRACK_SWITCHING, this.onAudioTrackSwitching, this);
  18192. hls.off(Events.AUDIO_TRACK_SWITCHED, this.onAudioTrackSwitched, this);
  18193. hls.off(Events.BUFFER_CREATED, this.onBufferCreated, this);
  18194. hls.off(Events.BUFFER_FLUSHED, this.onBufferFlushed, this);
  18195. hls.off(Events.LEVELS_UPDATED, this.onLevelsUpdated, this);
  18196. hls.off(Events.FRAG_BUFFERED, this.onFragBuffered, this);
  18197. }
  18198. onHandlerDestroying() {
  18199. this._unregisterListeners();
  18200. super.onHandlerDestroying();
  18201. }
  18202. startLoad(startPosition) {
  18203. if (this.levels) {
  18204. const {
  18205. lastCurrentTime,
  18206. hls
  18207. } = this;
  18208. this.stopLoad();
  18209. this.setInterval(TICK_INTERVAL);
  18210. this.level = -1;
  18211. if (!this.startFragRequested) {
  18212. // determine load level
  18213. let startLevel = hls.startLevel;
  18214. if (startLevel === -1) {
  18215. if (hls.config.testBandwidth && this.levels.length > 1) {
  18216. // -1 : guess start Level by doing a bitrate test by loading first fragment of lowest quality level
  18217. startLevel = 0;
  18218. this.bitrateTest = true;
  18219. } else {
  18220. startLevel = hls.firstAutoLevel;
  18221. }
  18222. }
  18223. // set new level to playlist loader : this will trigger start level load
  18224. // hls.nextLoadLevel remains until it is set to a new value or until a new frag is successfully loaded
  18225. hls.nextLoadLevel = startLevel;
  18226. this.level = hls.loadLevel;
  18227. this.loadedmetadata = false;
  18228. }
  18229. // if startPosition undefined but lastCurrentTime set, set startPosition to last currentTime
  18230. if (lastCurrentTime > 0 && startPosition === -1) {
  18231. this.log(`Override startPosition with lastCurrentTime @${lastCurrentTime.toFixed(3)}`);
  18232. startPosition = lastCurrentTime;
  18233. }
  18234. this.state = State.IDLE;
  18235. this.nextLoadPosition = this.startPosition = this.lastCurrentTime = startPosition;
  18236. this.tick();
  18237. } else {
  18238. this._forceStartLoad = true;
  18239. this.state = State.STOPPED;
  18240. }
  18241. }
  18242. stopLoad() {
  18243. this._forceStartLoad = false;
  18244. super.stopLoad();
  18245. }
  18246. doTick() {
  18247. switch (this.state) {
  18248. case State.WAITING_LEVEL:
  18249. {
  18250. const {
  18251. levels,
  18252. level
  18253. } = this;
  18254. const currentLevel = levels == null ? void 0 : levels[level];
  18255. const details = currentLevel == null ? void 0 : currentLevel.details;
  18256. if (details && (!details.live || this.levelLastLoaded === currentLevel)) {
  18257. if (this.waitForCdnTuneIn(details)) {
  18258. break;
  18259. }
  18260. this.state = State.IDLE;
  18261. break;
  18262. } else if (this.hls.nextLoadLevel !== this.level) {
  18263. this.state = State.IDLE;
  18264. break;
  18265. }
  18266. break;
  18267. }
  18268. case State.FRAG_LOADING_WAITING_RETRY:
  18269. {
  18270. var _this$media;
  18271. const now = self.performance.now();
  18272. const retryDate = this.retryDate;
  18273. // if current time is gt than retryDate, or if media seeking let's switch to IDLE state to retry loading
  18274. if (!retryDate || now >= retryDate || (_this$media = this.media) != null && _this$media.seeking) {
  18275. const {
  18276. levels,
  18277. level
  18278. } = this;
  18279. const currentLevel = levels == null ? void 0 : levels[level];
  18280. this.resetStartWhenNotLoaded(currentLevel || null);
  18281. this.state = State.IDLE;
  18282. }
  18283. }
  18284. break;
  18285. }
  18286. if (this.state === State.IDLE) {
  18287. this.doTickIdle();
  18288. }
  18289. this.onTickEnd();
  18290. }
  18291. onTickEnd() {
  18292. super.onTickEnd();
  18293. this.checkBuffer();
  18294. this.checkFragmentChanged();
  18295. }
  18296. doTickIdle() {
  18297. const {
  18298. hls,
  18299. levelLastLoaded,
  18300. levels,
  18301. media
  18302. } = this;
  18303. // if start level not parsed yet OR
  18304. // if video not attached AND start fragment already requested OR start frag prefetch not enabled
  18305. // exit loop, as we either need more info (level not parsed) or we need media to be attached to load new fragment
  18306. if (levelLastLoaded === null || !media && (this.startFragRequested || !hls.config.startFragPrefetch)) {
  18307. return;
  18308. }
  18309. // If the "main" level is audio-only but we are loading an alternate track in the same group, do not load anything
  18310. if (this.altAudio && this.audioOnly) {
  18311. return;
  18312. }
  18313. const level = hls.nextLoadLevel;
  18314. if (!(levels != null && levels[level])) {
  18315. return;
  18316. }
  18317. const levelInfo = levels[level];
  18318. // if buffer length is less than maxBufLen try to load a new fragment
  18319. const bufferInfo = this.getMainFwdBufferInfo();
  18320. if (bufferInfo === null) {
  18321. return;
  18322. }
  18323. const lastDetails = this.getLevelDetails();
  18324. if (lastDetails && this._streamEnded(bufferInfo, lastDetails)) {
  18325. const data = {};
  18326. if (this.altAudio) {
  18327. data.type = 'video';
  18328. }
  18329. this.hls.trigger(Events.BUFFER_EOS, data);
  18330. this.state = State.ENDED;
  18331. return;
  18332. }
  18333. // set next load level : this will trigger a playlist load if needed
  18334. if (hls.loadLevel !== level && hls.manualLevel === -1) {
  18335. this.log(`Adapting to level ${level} from level ${this.level}`);
  18336. }
  18337. this.level = hls.nextLoadLevel = level;
  18338. const levelDetails = levelInfo.details;
  18339. // if level info not retrieved yet, switch state and wait for level retrieval
  18340. // if live playlist, ensure that new playlist has been refreshed to avoid loading/try to load
  18341. // a useless and outdated fragment (that might even introduce load error if it is already out of the live playlist)
  18342. if (!levelDetails || this.state === State.WAITING_LEVEL || levelDetails.live && this.levelLastLoaded !== levelInfo) {
  18343. this.level = level;
  18344. this.state = State.WAITING_LEVEL;
  18345. return;
  18346. }
  18347. const bufferLen = bufferInfo.len;
  18348. // compute max Buffer Length that we could get from this load level, based on level bitrate. don't buffer more than 60 MB and more than 30s
  18349. const maxBufLen = this.getMaxBufferLength(levelInfo.maxBitrate);
  18350. // Stay idle if we are still with buffer margins
  18351. if (bufferLen >= maxBufLen) {
  18352. return;
  18353. }
  18354. if (this.backtrackFragment && this.backtrackFragment.start > bufferInfo.end) {
  18355. this.backtrackFragment = null;
  18356. }
  18357. const targetBufferTime = this.backtrackFragment ? this.backtrackFragment.start : bufferInfo.end;
  18358. let frag = this.getNextFragment(targetBufferTime, levelDetails);
  18359. // Avoid backtracking by loading an earlier segment in streams with segments that do not start with a key frame (flagged by `couldBacktrack`)
  18360. if (this.couldBacktrack && !this.fragPrevious && frag && frag.sn !== 'initSegment' && this.fragmentTracker.getState(frag) !== FragmentState.OK) {
  18361. var _this$backtrackFragme;
  18362. const backtrackSn = ((_this$backtrackFragme = this.backtrackFragment) != null ? _this$backtrackFragme : frag).sn;
  18363. const fragIdx = backtrackSn - levelDetails.startSN;
  18364. const backtrackFrag = levelDetails.fragments[fragIdx - 1];
  18365. if (backtrackFrag && frag.cc === backtrackFrag.cc) {
  18366. frag = backtrackFrag;
  18367. this.fragmentTracker.removeFragment(backtrackFrag);
  18368. }
  18369. } else if (this.backtrackFragment && bufferInfo.len) {
  18370. this.backtrackFragment = null;
  18371. }
  18372. // Avoid loop loading by using nextLoadPosition set for backtracking and skipping consecutive GAP tags
  18373. if (frag && this.isLoopLoading(frag, targetBufferTime)) {
  18374. const gapStart = frag.gap;
  18375. if (!gapStart) {
  18376. // Cleanup the fragment tracker before trying to find the next unbuffered fragment
  18377. const type = this.audioOnly && !this.altAudio ? ElementaryStreamTypes.AUDIO : ElementaryStreamTypes.VIDEO;
  18378. const mediaBuffer = (type === ElementaryStreamTypes.VIDEO ? this.videoBuffer : this.mediaBuffer) || this.media;
  18379. if (mediaBuffer) {
  18380. this.afterBufferFlushed(mediaBuffer, type, PlaylistLevelType.MAIN);
  18381. }
  18382. }
  18383. frag = this.getNextFragmentLoopLoading(frag, levelDetails, bufferInfo, PlaylistLevelType.MAIN, maxBufLen);
  18384. }
  18385. if (!frag) {
  18386. return;
  18387. }
  18388. if (frag.initSegment && !frag.initSegment.data && !this.bitrateTest) {
  18389. frag = frag.initSegment;
  18390. }
  18391. this.loadFragment(frag, levelInfo, targetBufferTime);
  18392. }
  18393. loadFragment(frag, level, targetBufferTime) {
  18394. // Check if fragment is not loaded
  18395. const fragState = this.fragmentTracker.getState(frag);
  18396. this.fragCurrent = frag;
  18397. if (fragState === FragmentState.NOT_LOADED || fragState === FragmentState.PARTIAL) {
  18398. if (frag.sn === 'initSegment') {
  18399. this._loadInitSegment(frag, level);
  18400. } else if (this.bitrateTest) {
  18401. this.log(`Fragment ${frag.sn} of level ${frag.level} is being downloaded to test bitrate and will not be buffered`);
  18402. this._loadBitrateTestFrag(frag, level);
  18403. } else {
  18404. this.startFragRequested = true;
  18405. super.loadFragment(frag, level, targetBufferTime);
  18406. }
  18407. } else {
  18408. this.clearTrackerIfNeeded(frag);
  18409. }
  18410. }
  18411. getBufferedFrag(position) {
  18412. return this.fragmentTracker.getBufferedFrag(position, PlaylistLevelType.MAIN);
  18413. }
  18414. followingBufferedFrag(frag) {
  18415. if (frag) {
  18416. // try to get range of next fragment (500ms after this range)
  18417. return this.getBufferedFrag(frag.end + 0.5);
  18418. }
  18419. return null;
  18420. }
  18421. /*
  18422. on immediate level switch :
  18423. - pause playback if playing
  18424. - cancel any pending load request
  18425. - and trigger a buffer flush
  18426. */
  18427. immediateLevelSwitch() {
  18428. this.abortCurrentFrag();
  18429. this.flushMainBuffer(0, Number.POSITIVE_INFINITY);
  18430. }
  18431. /**
  18432. * try to switch ASAP without breaking video playback:
  18433. * in order to ensure smooth but quick level switching,
  18434. * we need to find the next flushable buffer range
  18435. * we should take into account new segment fetch time
  18436. */
  18437. nextLevelSwitch() {
  18438. const {
  18439. levels,
  18440. media
  18441. } = this;
  18442. // ensure that media is defined and that metadata are available (to retrieve currentTime)
  18443. if (media != null && media.readyState) {
  18444. let fetchdelay;
  18445. const fragPlayingCurrent = this.getAppendedFrag(media.currentTime);
  18446. if (fragPlayingCurrent && fragPlayingCurrent.start > 1) {
  18447. // flush buffer preceding current fragment (flush until current fragment start offset)
  18448. // minus 1s to avoid video freezing, that could happen if we flush keyframe of current video ...
  18449. this.flushMainBuffer(0, fragPlayingCurrent.start - 1);
  18450. }
  18451. const levelDetails = this.getLevelDetails();
  18452. if (levelDetails != null && levelDetails.live) {
  18453. const bufferInfo = this.getMainFwdBufferInfo();
  18454. // Do not flush in live stream with low buffer
  18455. if (!bufferInfo || bufferInfo.len < levelDetails.targetduration * 2) {
  18456. return;
  18457. }
  18458. }
  18459. if (!media.paused && levels) {
  18460. // add a safety delay of 1s
  18461. const nextLevelId = this.hls.nextLoadLevel;
  18462. const nextLevel = levels[nextLevelId];
  18463. const fragLastKbps = this.fragLastKbps;
  18464. if (fragLastKbps && this.fragCurrent) {
  18465. fetchdelay = this.fragCurrent.duration * nextLevel.maxBitrate / (1000 * fragLastKbps) + 1;
  18466. } else {
  18467. fetchdelay = 0;
  18468. }
  18469. } else {
  18470. fetchdelay = 0;
  18471. }
  18472. // this.log('fetchdelay:'+fetchdelay);
  18473. // find buffer range that will be reached once new fragment will be fetched
  18474. const bufferedFrag = this.getBufferedFrag(media.currentTime + fetchdelay);
  18475. if (bufferedFrag) {
  18476. // we can flush buffer range following this one without stalling playback
  18477. const nextBufferedFrag = this.followingBufferedFrag(bufferedFrag);
  18478. if (nextBufferedFrag) {
  18479. // if we are here, we can also cancel any loading/demuxing in progress, as they are useless
  18480. this.abortCurrentFrag();
  18481. // start flush position is in next buffered frag. Leave some padding for non-independent segments and smoother playback.
  18482. const maxStart = nextBufferedFrag.maxStartPTS ? nextBufferedFrag.maxStartPTS : nextBufferedFrag.start;
  18483. const fragDuration = nextBufferedFrag.duration;
  18484. const startPts = Math.max(bufferedFrag.end, maxStart + Math.min(Math.max(fragDuration - this.config.maxFragLookUpTolerance, fragDuration * (this.couldBacktrack ? 0.5 : 0.125)), fragDuration * (this.couldBacktrack ? 0.75 : 0.25)));
  18485. this.flushMainBuffer(startPts, Number.POSITIVE_INFINITY);
  18486. }
  18487. }
  18488. }
  18489. }
  18490. abortCurrentFrag() {
  18491. const fragCurrent = this.fragCurrent;
  18492. this.fragCurrent = null;
  18493. this.backtrackFragment = null;
  18494. if (fragCurrent) {
  18495. fragCurrent.abortRequests();
  18496. this.fragmentTracker.removeFragment(fragCurrent);
  18497. }
  18498. switch (this.state) {
  18499. case State.KEY_LOADING:
  18500. case State.FRAG_LOADING:
  18501. case State.FRAG_LOADING_WAITING_RETRY:
  18502. case State.PARSING:
  18503. case State.PARSED:
  18504. this.state = State.IDLE;
  18505. break;
  18506. }
  18507. this.nextLoadPosition = this.getLoadPosition();
  18508. }
  18509. flushMainBuffer(startOffset, endOffset) {
  18510. super.flushMainBuffer(startOffset, endOffset, this.altAudio ? 'video' : null);
  18511. }
  18512. onMediaAttached(event, data) {
  18513. super.onMediaAttached(event, data);
  18514. const media = data.media;
  18515. this.onvplaying = this.onMediaPlaying.bind(this);
  18516. this.onvseeked = this.onMediaSeeked.bind(this);
  18517. media.addEventListener('playing', this.onvplaying);
  18518. media.addEventListener('seeked', this.onvseeked);
  18519. this.gapController = new GapController(this.config, media, this.fragmentTracker, this.hls);
  18520. }
  18521. onMediaDetaching() {
  18522. const {
  18523. media
  18524. } = this;
  18525. if (media && this.onvplaying && this.onvseeked) {
  18526. media.removeEventListener('playing', this.onvplaying);
  18527. media.removeEventListener('seeked', this.onvseeked);
  18528. this.onvplaying = this.onvseeked = null;
  18529. this.videoBuffer = null;
  18530. }
  18531. this.fragPlaying = null;
  18532. if (this.gapController) {
  18533. this.gapController.destroy();
  18534. this.gapController = null;
  18535. }
  18536. super.onMediaDetaching();
  18537. }
  18538. onMediaPlaying() {
  18539. // tick to speed up FRAG_CHANGED triggering
  18540. this.tick();
  18541. }
  18542. onMediaSeeked() {
  18543. const media = this.media;
  18544. const currentTime = media ? media.currentTime : null;
  18545. if (isFiniteNumber(currentTime)) {
  18546. this.log(`Media seeked to ${currentTime.toFixed(3)}`);
  18547. }
  18548. // If seeked was issued before buffer was appended do not tick immediately
  18549. const bufferInfo = this.getMainFwdBufferInfo();
  18550. if (bufferInfo === null || bufferInfo.len === 0) {
  18551. this.warn(`Main forward buffer length on "seeked" event ${bufferInfo ? bufferInfo.len : 'empty'})`);
  18552. return;
  18553. }
  18554. // tick to speed up FRAG_CHANGED triggering
  18555. this.tick();
  18556. }
  18557. onManifestLoading() {
  18558. // reset buffer on manifest loading
  18559. this.log('Trigger BUFFER_RESET');
  18560. this.hls.trigger(Events.BUFFER_RESET, undefined);
  18561. this.fragmentTracker.removeAllFragments();
  18562. this.couldBacktrack = false;
  18563. this.startPosition = this.lastCurrentTime = this.fragLastKbps = 0;
  18564. this.levels = this.fragPlaying = this.backtrackFragment = this.levelLastLoaded = null;
  18565. this.altAudio = this.audioOnly = this.startFragRequested = false;
  18566. }
  18567. onManifestParsed(event, data) {
  18568. // detect if we have different kind of audio codecs used amongst playlists
  18569. let aac = false;
  18570. let heaac = false;
  18571. data.levels.forEach(level => {
  18572. const codec = level.audioCodec;
  18573. if (codec) {
  18574. aac = aac || codec.indexOf('mp4a.40.2') !== -1;
  18575. heaac = heaac || codec.indexOf('mp4a.40.5') !== -1;
  18576. }
  18577. });
  18578. this.audioCodecSwitch = aac && heaac && !changeTypeSupported();
  18579. if (this.audioCodecSwitch) {
  18580. this.log('Both AAC/HE-AAC audio found in levels; declaring level codec as HE-AAC');
  18581. }
  18582. this.levels = data.levels;
  18583. this.startFragRequested = false;
  18584. }
  18585. onLevelLoading(event, data) {
  18586. const {
  18587. levels
  18588. } = this;
  18589. if (!levels || this.state !== State.IDLE) {
  18590. return;
  18591. }
  18592. const level = levels[data.level];
  18593. if (!level.details || level.details.live && this.levelLastLoaded !== level || this.waitForCdnTuneIn(level.details)) {
  18594. this.state = State.WAITING_LEVEL;
  18595. }
  18596. }
  18597. onLevelLoaded(event, data) {
  18598. var _curLevel$details;
  18599. const {
  18600. levels
  18601. } = this;
  18602. const newLevelId = data.level;
  18603. const newDetails = data.details;
  18604. const duration = newDetails.totalduration;
  18605. if (!levels) {
  18606. this.warn(`Levels were reset while loading level ${newLevelId}`);
  18607. return;
  18608. }
  18609. this.log(`Level ${newLevelId} loaded [${newDetails.startSN},${newDetails.endSN}]${newDetails.lastPartSn ? `[part-${newDetails.lastPartSn}-${newDetails.lastPartIndex}]` : ''}, cc [${newDetails.startCC}, ${newDetails.endCC}] duration:${duration}`);
  18610. const curLevel = levels[newLevelId];
  18611. const fragCurrent = this.fragCurrent;
  18612. if (fragCurrent && (this.state === State.FRAG_LOADING || this.state === State.FRAG_LOADING_WAITING_RETRY)) {
  18613. if (fragCurrent.level !== data.level && fragCurrent.loader) {
  18614. this.abortCurrentFrag();
  18615. }
  18616. }
  18617. let sliding = 0;
  18618. if (newDetails.live || (_curLevel$details = curLevel.details) != null && _curLevel$details.live) {
  18619. var _this$levelLastLoaded;
  18620. this.checkLiveUpdate(newDetails);
  18621. if (newDetails.deltaUpdateFailed) {
  18622. return;
  18623. }
  18624. sliding = this.alignPlaylists(newDetails, curLevel.details, (_this$levelLastLoaded = this.levelLastLoaded) == null ? void 0 : _this$levelLastLoaded.details);
  18625. }
  18626. // override level info
  18627. curLevel.details = newDetails;
  18628. this.levelLastLoaded = curLevel;
  18629. this.hls.trigger(Events.LEVEL_UPDATED, {
  18630. details: newDetails,
  18631. level: newLevelId
  18632. });
  18633. // only switch back to IDLE state if we were waiting for level to start downloading a new fragment
  18634. if (this.state === State.WAITING_LEVEL) {
  18635. if (this.waitForCdnTuneIn(newDetails)) {
  18636. // Wait for Low-Latency CDN Tune-in
  18637. return;
  18638. }
  18639. this.state = State.IDLE;
  18640. }
  18641. if (!this.startFragRequested) {
  18642. this.setStartPosition(newDetails, sliding);
  18643. } else if (newDetails.live) {
  18644. this.synchronizeToLiveEdge(newDetails);
  18645. }
  18646. // trigger handler right now
  18647. this.tick();
  18648. }
  18649. _handleFragmentLoadProgress(data) {
  18650. var _frag$initSegment;
  18651. const {
  18652. frag,
  18653. part,
  18654. payload
  18655. } = data;
  18656. const {
  18657. levels
  18658. } = this;
  18659. if (!levels) {
  18660. this.warn(`Levels were reset while fragment load was in progress. Fragment ${frag.sn} of level ${frag.level} will not be buffered`);
  18661. return;
  18662. }
  18663. const currentLevel = levels[frag.level];
  18664. const details = currentLevel.details;
  18665. if (!details) {
  18666. this.warn(`Dropping fragment ${frag.sn} of level ${frag.level} after level details were reset`);
  18667. this.fragmentTracker.removeFragment(frag);
  18668. return;
  18669. }
  18670. const videoCodec = currentLevel.videoCodec;
  18671. // time Offset is accurate if level PTS is known, or if playlist is not sliding (not live)
  18672. const accurateTimeOffset = details.PTSKnown || !details.live;
  18673. const initSegmentData = (_frag$initSegment = frag.initSegment) == null ? void 0 : _frag$initSegment.data;
  18674. const audioCodec = this._getAudioCodec(currentLevel);
  18675. // transmux the MPEG-TS data to ISO-BMFF segments
  18676. // this.log(`Transmuxing ${frag.sn} of [${details.startSN} ,${details.endSN}],level ${frag.level}, cc ${frag.cc}`);
  18677. const transmuxer = this.transmuxer = this.transmuxer || new TransmuxerInterface(this.hls, PlaylistLevelType.MAIN, this._handleTransmuxComplete.bind(this), this._handleTransmuxerFlush.bind(this));
  18678. const partIndex = part ? part.index : -1;
  18679. const partial = partIndex !== -1;
  18680. const chunkMeta = new ChunkMetadata(frag.level, frag.sn, frag.stats.chunkCount, payload.byteLength, partIndex, partial);
  18681. const initPTS = this.initPTS[frag.cc];
  18682. transmuxer.push(payload, initSegmentData, audioCodec, videoCodec, frag, part, details.totalduration, accurateTimeOffset, chunkMeta, initPTS);
  18683. }
  18684. onAudioTrackSwitching(event, data) {
  18685. // if any URL found on new audio track, it is an alternate audio track
  18686. const fromAltAudio = this.altAudio;
  18687. const altAudio = !!data.url;
  18688. // if we switch on main audio, ensure that main fragment scheduling is synced with media.buffered
  18689. // don't do anything if we switch to alt audio: audio stream controller is handling it.
  18690. // we will just have to change buffer scheduling on audioTrackSwitched
  18691. if (!altAudio) {
  18692. if (this.mediaBuffer !== this.media) {
  18693. this.log('Switching on main audio, use media.buffered to schedule main fragment loading');
  18694. this.mediaBuffer = this.media;
  18695. const fragCurrent = this.fragCurrent;
  18696. // we need to refill audio buffer from main: cancel any frag loading to speed up audio switch
  18697. if (fragCurrent) {
  18698. this.log('Switching to main audio track, cancel main fragment load');
  18699. fragCurrent.abortRequests();
  18700. this.fragmentTracker.removeFragment(fragCurrent);
  18701. }
  18702. // destroy transmuxer to force init segment generation (following audio switch)
  18703. this.resetTransmuxer();
  18704. // switch to IDLE state to load new fragment
  18705. this.resetLoadingState();
  18706. } else if (this.audioOnly) {
  18707. // Reset audio transmuxer so when switching back to main audio we're not still appending where we left off
  18708. this.resetTransmuxer();
  18709. }
  18710. const hls = this.hls;
  18711. // If switching from alt to main audio, flush all audio and trigger track switched
  18712. if (fromAltAudio) {
  18713. hls.trigger(Events.BUFFER_FLUSHING, {
  18714. startOffset: 0,
  18715. endOffset: Number.POSITIVE_INFINITY,
  18716. type: null
  18717. });
  18718. this.fragmentTracker.removeAllFragments();
  18719. }
  18720. hls.trigger(Events.AUDIO_TRACK_SWITCHED, data);
  18721. }
  18722. }
  18723. onAudioTrackSwitched(event, data) {
  18724. const trackId = data.id;
  18725. const altAudio = !!this.hls.audioTracks[trackId].url;
  18726. if (altAudio) {
  18727. const videoBuffer = this.videoBuffer;
  18728. // if we switched on alternate audio, ensure that main fragment scheduling is synced with video sourcebuffer buffered
  18729. if (videoBuffer && this.mediaBuffer !== videoBuffer) {
  18730. this.log('Switching on alternate audio, use video.buffered to schedule main fragment loading');
  18731. this.mediaBuffer = videoBuffer;
  18732. }
  18733. }
  18734. this.altAudio = altAudio;
  18735. this.tick();
  18736. }
  18737. onBufferCreated(event, data) {
  18738. const tracks = data.tracks;
  18739. let mediaTrack;
  18740. let name;
  18741. let alternate = false;
  18742. for (const type in tracks) {
  18743. const track = tracks[type];
  18744. if (track.id === 'main') {
  18745. name = type;
  18746. mediaTrack = track;
  18747. // keep video source buffer reference
  18748. if (type === 'video') {
  18749. const videoTrack = tracks[type];
  18750. if (videoTrack) {
  18751. this.videoBuffer = videoTrack.buffer;
  18752. }
  18753. }
  18754. } else {
  18755. alternate = true;
  18756. }
  18757. }
  18758. if (alternate && mediaTrack) {
  18759. this.log(`Alternate track found, use ${name}.buffered to schedule main fragment loading`);
  18760. this.mediaBuffer = mediaTrack.buffer;
  18761. } else {
  18762. this.mediaBuffer = this.media;
  18763. }
  18764. }
  18765. onFragBuffered(event, data) {
  18766. const {
  18767. frag,
  18768. part
  18769. } = data;
  18770. if (frag && frag.type !== PlaylistLevelType.MAIN) {
  18771. return;
  18772. }
  18773. if (this.fragContextChanged(frag)) {
  18774. // If a level switch was requested while a fragment was buffering, it will emit the FRAG_BUFFERED event upon completion
  18775. // Avoid setting state back to IDLE, since that will interfere with a level switch
  18776. this.warn(`Fragment ${frag.sn}${part ? ' p: ' + part.index : ''} of level ${frag.level} finished buffering, but was aborted. state: ${this.state}`);
  18777. if (this.state === State.PARSED) {
  18778. this.state = State.IDLE;
  18779. }
  18780. return;
  18781. }
  18782. const stats = part ? part.stats : frag.stats;
  18783. this.fragLastKbps = Math.round(8 * stats.total / (stats.buffering.end - stats.loading.first));
  18784. if (frag.sn !== 'initSegment') {
  18785. this.fragPrevious = frag;
  18786. }
  18787. this.fragBufferedComplete(frag, part);
  18788. }
  18789. onError(event, data) {
  18790. var _data$context;
  18791. if (data.fatal) {
  18792. this.state = State.ERROR;
  18793. return;
  18794. }
  18795. switch (data.details) {
  18796. case ErrorDetails.FRAG_GAP:
  18797. case ErrorDetails.FRAG_PARSING_ERROR:
  18798. case ErrorDetails.FRAG_DECRYPT_ERROR:
  18799. case ErrorDetails.FRAG_LOAD_ERROR:
  18800. case ErrorDetails.FRAG_LOAD_TIMEOUT:
  18801. case ErrorDetails.KEY_LOAD_ERROR:
  18802. case ErrorDetails.KEY_LOAD_TIMEOUT:
  18803. this.onFragmentOrKeyLoadError(PlaylistLevelType.MAIN, data);
  18804. break;
  18805. case ErrorDetails.LEVEL_LOAD_ERROR:
  18806. case ErrorDetails.LEVEL_LOAD_TIMEOUT:
  18807. case ErrorDetails.LEVEL_PARSING_ERROR:
  18808. // in case of non fatal error while loading level, if level controller is not retrying to load level, switch back to IDLE
  18809. if (!data.levelRetry && this.state === State.WAITING_LEVEL && ((_data$context = data.context) == null ? void 0 : _data$context.type) === PlaylistContextType.LEVEL) {
  18810. this.state = State.IDLE;
  18811. }
  18812. break;
  18813. case ErrorDetails.BUFFER_APPEND_ERROR:
  18814. case ErrorDetails.BUFFER_FULL_ERROR:
  18815. if (!data.parent || data.parent !== 'main') {
  18816. return;
  18817. }
  18818. if (data.details === ErrorDetails.BUFFER_APPEND_ERROR) {
  18819. this.resetLoadingState();
  18820. return;
  18821. }
  18822. if (this.reduceLengthAndFlushBuffer(data)) {
  18823. this.flushMainBuffer(0, Number.POSITIVE_INFINITY);
  18824. }
  18825. break;
  18826. case ErrorDetails.INTERNAL_EXCEPTION:
  18827. this.recoverWorkerError(data);
  18828. break;
  18829. }
  18830. }
  18831. // Checks the health of the buffer and attempts to resolve playback stalls.
  18832. checkBuffer() {
  18833. const {
  18834. media,
  18835. gapController
  18836. } = this;
  18837. if (!media || !gapController || !media.readyState) {
  18838. // Exit early if we don't have media or if the media hasn't buffered anything yet (readyState 0)
  18839. return;
  18840. }
  18841. if (this.loadedmetadata || !BufferHelper.getBuffered(media).length) {
  18842. // Resolve gaps using the main buffer, whose ranges are the intersections of the A/V sourcebuffers
  18843. const activeFrag = this.state !== State.IDLE ? this.fragCurrent : null;
  18844. gapController.poll(this.lastCurrentTime, activeFrag);
  18845. }
  18846. this.lastCurrentTime = media.currentTime;
  18847. }
  18848. onFragLoadEmergencyAborted() {
  18849. this.state = State.IDLE;
  18850. // if loadedmetadata is not set, it means that we are emergency switch down on first frag
  18851. // in that case, reset startFragRequested flag
  18852. if (!this.loadedmetadata) {
  18853. this.startFragRequested = false;
  18854. this.nextLoadPosition = this.startPosition;
  18855. }
  18856. this.tickImmediate();
  18857. }
  18858. onBufferFlushed(event, {
  18859. type
  18860. }) {
  18861. if (type !== ElementaryStreamTypes.AUDIO || this.audioOnly && !this.altAudio) {
  18862. const mediaBuffer = (type === ElementaryStreamTypes.VIDEO ? this.videoBuffer : this.mediaBuffer) || this.media;
  18863. this.afterBufferFlushed(mediaBuffer, type, PlaylistLevelType.MAIN);
  18864. this.tick();
  18865. }
  18866. }
  18867. onLevelsUpdated(event, data) {
  18868. if (this.level > -1 && this.fragCurrent) {
  18869. this.level = this.fragCurrent.level;
  18870. }
  18871. this.levels = data.levels;
  18872. }
  18873. swapAudioCodec() {
  18874. this.audioCodecSwap = !this.audioCodecSwap;
  18875. }
  18876. /**
  18877. * Seeks to the set startPosition if not equal to the mediaElement's current time.
  18878. */
  18879. seekToStartPos() {
  18880. const {
  18881. media
  18882. } = this;
  18883. if (!media) {
  18884. return;
  18885. }
  18886. const currentTime = media.currentTime;
  18887. let startPosition = this.startPosition;
  18888. // only adjust currentTime if different from startPosition or if startPosition not buffered
  18889. // at that stage, there should be only one buffered range, as we reach that code after first fragment has been buffered
  18890. if (startPosition >= 0 && currentTime < startPosition) {
  18891. if (media.seeking) {
  18892. this.log(`could not seek to ${startPosition}, already seeking at ${currentTime}`);
  18893. return;
  18894. }
  18895. const buffered = BufferHelper.getBuffered(media);
  18896. const bufferStart = buffered.length ? buffered.start(0) : 0;
  18897. const delta = bufferStart - startPosition;
  18898. if (delta > 0 && (delta < this.config.maxBufferHole || delta < this.config.maxFragLookUpTolerance)) {
  18899. this.log(`adjusting start position by ${delta} to match buffer start`);
  18900. startPosition += delta;
  18901. this.startPosition = startPosition;
  18902. }
  18903. this.log(`seek to target start position ${startPosition} from current time ${currentTime}`);
  18904. media.currentTime = startPosition;
  18905. }
  18906. }
  18907. _getAudioCodec(currentLevel) {
  18908. let audioCodec = this.config.defaultAudioCodec || currentLevel.audioCodec;
  18909. if (this.audioCodecSwap && audioCodec) {
  18910. this.log('Swapping audio codec');
  18911. if (audioCodec.indexOf('mp4a.40.5') !== -1) {
  18912. audioCodec = 'mp4a.40.2';
  18913. } else {
  18914. audioCodec = 'mp4a.40.5';
  18915. }
  18916. }
  18917. return audioCodec;
  18918. }
  18919. _loadBitrateTestFrag(frag, level) {
  18920. frag.bitrateTest = true;
  18921. this._doFragLoad(frag, level).then(data => {
  18922. const {
  18923. hls
  18924. } = this;
  18925. if (!data || this.fragContextChanged(frag)) {
  18926. return;
  18927. }
  18928. level.fragmentError = 0;
  18929. this.state = State.IDLE;
  18930. this.startFragRequested = false;
  18931. this.bitrateTest = false;
  18932. const stats = frag.stats;
  18933. // Bitrate tests fragments are neither parsed nor buffered
  18934. stats.parsing.start = stats.parsing.end = stats.buffering.start = stats.buffering.end = self.performance.now();
  18935. hls.trigger(Events.FRAG_LOADED, data);
  18936. frag.bitrateTest = false;
  18937. });
  18938. }
  18939. _handleTransmuxComplete(transmuxResult) {
  18940. var _id3$samples;
  18941. const id = 'main';
  18942. const {
  18943. hls
  18944. } = this;
  18945. const {
  18946. remuxResult,
  18947. chunkMeta
  18948. } = transmuxResult;
  18949. const context = this.getCurrentContext(chunkMeta);
  18950. if (!context) {
  18951. this.resetWhenMissingContext(chunkMeta);
  18952. return;
  18953. }
  18954. const {
  18955. frag,
  18956. part,
  18957. level
  18958. } = context;
  18959. const {
  18960. video,
  18961. text,
  18962. id3,
  18963. initSegment
  18964. } = remuxResult;
  18965. const {
  18966. details
  18967. } = level;
  18968. // The audio-stream-controller handles audio buffering if Hls.js is playing an alternate audio track
  18969. const audio = this.altAudio ? undefined : remuxResult.audio;
  18970. // Check if the current fragment has been aborted. We check this by first seeing if we're still playing the current level.
  18971. // If we are, subsequently check if the currently loading fragment (fragCurrent) has changed.
  18972. if (this.fragContextChanged(frag)) {
  18973. this.fragmentTracker.removeFragment(frag);
  18974. return;
  18975. }
  18976. this.state = State.PARSING;
  18977. if (initSegment) {
  18978. if (initSegment != null && initSegment.tracks) {
  18979. const mapFragment = frag.initSegment || frag;
  18980. this._bufferInitSegment(level, initSegment.tracks, mapFragment, chunkMeta);
  18981. hls.trigger(Events.FRAG_PARSING_INIT_SEGMENT, {
  18982. frag: mapFragment,
  18983. id,
  18984. tracks: initSegment.tracks
  18985. });
  18986. }
  18987. // This would be nice if Number.isFinite acted as a typeguard, but it doesn't. See: https://github.com/Microsoft/TypeScript/issues/10038
  18988. const initPTS = initSegment.initPTS;
  18989. const timescale = initSegment.timescale;
  18990. if (isFiniteNumber(initPTS)) {
  18991. this.initPTS[frag.cc] = {
  18992. baseTime: initPTS,
  18993. timescale
  18994. };
  18995. hls.trigger(Events.INIT_PTS_FOUND, {
  18996. frag,
  18997. id,
  18998. initPTS,
  18999. timescale
  19000. });
  19001. }
  19002. }
  19003. // Avoid buffering if backtracking this fragment
  19004. if (video && details && frag.sn !== 'initSegment') {
  19005. const prevFrag = details.fragments[frag.sn - 1 - details.startSN];
  19006. const isFirstFragment = frag.sn === details.startSN;
  19007. const isFirstInDiscontinuity = !prevFrag || frag.cc > prevFrag.cc;
  19008. if (remuxResult.independent !== false) {
  19009. const {
  19010. startPTS,
  19011. endPTS,
  19012. startDTS,
  19013. endDTS
  19014. } = video;
  19015. if (part) {
  19016. part.elementaryStreams[video.type] = {
  19017. startPTS,
  19018. endPTS,
  19019. startDTS,
  19020. endDTS
  19021. };
  19022. } else {
  19023. if (video.firstKeyFrame && video.independent && chunkMeta.id === 1 && !isFirstInDiscontinuity) {
  19024. this.couldBacktrack = true;
  19025. }
  19026. if (video.dropped && video.independent) {
  19027. // Backtrack if dropped frames create a gap after currentTime
  19028. const bufferInfo = this.getMainFwdBufferInfo();
  19029. const targetBufferTime = (bufferInfo ? bufferInfo.end : this.getLoadPosition()) + this.config.maxBufferHole;
  19030. const startTime = video.firstKeyFramePTS ? video.firstKeyFramePTS : startPTS;
  19031. if (!isFirstFragment && targetBufferTime < startTime - this.config.maxBufferHole && !isFirstInDiscontinuity) {
  19032. this.backtrack(frag);
  19033. return;
  19034. } else if (isFirstInDiscontinuity) {
  19035. // Mark segment with a gap to avoid loop loading
  19036. frag.gap = true;
  19037. }
  19038. // Set video stream start to fragment start so that truncated samples do not distort the timeline, and mark it partial
  19039. frag.setElementaryStreamInfo(video.type, frag.start, endPTS, frag.start, endDTS, true);
  19040. } else if (isFirstFragment && startPTS > MAX_START_GAP_JUMP) {
  19041. // Mark segment with a gap to skip large start gap
  19042. frag.gap = true;
  19043. }
  19044. }
  19045. frag.setElementaryStreamInfo(video.type, startPTS, endPTS, startDTS, endDTS);
  19046. if (this.backtrackFragment) {
  19047. this.backtrackFragment = frag;
  19048. }
  19049. this.bufferFragmentData(video, frag, part, chunkMeta, isFirstFragment || isFirstInDiscontinuity);
  19050. } else if (isFirstFragment || isFirstInDiscontinuity) {
  19051. // Mark segment with a gap to avoid loop loading
  19052. frag.gap = true;
  19053. } else {
  19054. this.backtrack(frag);
  19055. return;
  19056. }
  19057. }
  19058. if (audio) {
  19059. const {
  19060. startPTS,
  19061. endPTS,
  19062. startDTS,
  19063. endDTS
  19064. } = audio;
  19065. if (part) {
  19066. part.elementaryStreams[ElementaryStreamTypes.AUDIO] = {
  19067. startPTS,
  19068. endPTS,
  19069. startDTS,
  19070. endDTS
  19071. };
  19072. }
  19073. frag.setElementaryStreamInfo(ElementaryStreamTypes.AUDIO, startPTS, endPTS, startDTS, endDTS);
  19074. this.bufferFragmentData(audio, frag, part, chunkMeta);
  19075. }
  19076. if (details && id3 != null && (_id3$samples = id3.samples) != null && _id3$samples.length) {
  19077. const emittedID3 = {
  19078. id,
  19079. frag,
  19080. details,
  19081. samples: id3.samples
  19082. };
  19083. hls.trigger(Events.FRAG_PARSING_METADATA, emittedID3);
  19084. }
  19085. if (details && text) {
  19086. const emittedText = {
  19087. id,
  19088. frag,
  19089. details,
  19090. samples: text.samples
  19091. };
  19092. hls.trigger(Events.FRAG_PARSING_USERDATA, emittedText);
  19093. }
  19094. }
  19095. _bufferInitSegment(currentLevel, tracks, frag, chunkMeta) {
  19096. if (this.state !== State.PARSING) {
  19097. return;
  19098. }
  19099. this.audioOnly = !!tracks.audio && !tracks.video;
  19100. // if audio track is expected to come from audio stream controller, discard any coming from main
  19101. if (this.altAudio && !this.audioOnly) {
  19102. delete tracks.audio;
  19103. }
  19104. // include levelCodec in audio and video tracks
  19105. const {
  19106. audio,
  19107. video,
  19108. audiovideo
  19109. } = tracks;
  19110. if (audio) {
  19111. let audioCodec = currentLevel.audioCodec;
  19112. const ua = navigator.userAgent.toLowerCase();
  19113. if (this.audioCodecSwitch) {
  19114. if (audioCodec) {
  19115. if (audioCodec.indexOf('mp4a.40.5') !== -1) {
  19116. audioCodec = 'mp4a.40.2';
  19117. } else {
  19118. audioCodec = 'mp4a.40.5';
  19119. }
  19120. }
  19121. // In the case that AAC and HE-AAC audio codecs are signalled in manifest,
  19122. // force HE-AAC, as it seems that most browsers prefers it.
  19123. // don't force HE-AAC if mono stream, or in Firefox
  19124. const audioMetadata = audio.metadata;
  19125. if (audioMetadata && 'channelCount' in audioMetadata && (audioMetadata.channelCount || 1) !== 1 && ua.indexOf('firefox') === -1) {
  19126. audioCodec = 'mp4a.40.5';
  19127. }
  19128. }
  19129. // HE-AAC is broken on Android, always signal audio codec as AAC even if variant manifest states otherwise
  19130. if (audioCodec && audioCodec.indexOf('mp4a.40.5') !== -1 && ua.indexOf('android') !== -1 && audio.container !== 'audio/mpeg') {
  19131. // Exclude mpeg audio
  19132. audioCodec = 'mp4a.40.2';
  19133. this.log(`Android: force audio codec to ${audioCodec}`);
  19134. }
  19135. if (currentLevel.audioCodec && currentLevel.audioCodec !== audioCodec) {
  19136. this.log(`Swapping manifest audio codec "${currentLevel.audioCodec}" for "${audioCodec}"`);
  19137. }
  19138. audio.levelCodec = audioCodec;
  19139. audio.id = 'main';
  19140. this.log(`Init audio buffer, container:${audio.container}, codecs[selected/level/parsed]=[${audioCodec || ''}/${currentLevel.audioCodec || ''}/${audio.codec}]`);
  19141. }
  19142. if (video) {
  19143. video.levelCodec = currentLevel.videoCodec;
  19144. video.id = 'main';
  19145. this.log(`Init video buffer, container:${video.container}, codecs[level/parsed]=[${currentLevel.videoCodec || ''}/${video.codec}]`);
  19146. }
  19147. if (audiovideo) {
  19148. this.log(`Init audiovideo buffer, container:${audiovideo.container}, codecs[level/parsed]=[${currentLevel.codecs}/${audiovideo.codec}]`);
  19149. }
  19150. this.hls.trigger(Events.BUFFER_CODECS, tracks);
  19151. // loop through tracks that are going to be provided to bufferController
  19152. Object.keys(tracks).forEach(trackName => {
  19153. const track = tracks[trackName];
  19154. const initSegment = track.initSegment;
  19155. if (initSegment != null && initSegment.byteLength) {
  19156. this.hls.trigger(Events.BUFFER_APPENDING, {
  19157. type: trackName,
  19158. data: initSegment,
  19159. frag,
  19160. part: null,
  19161. chunkMeta,
  19162. parent: frag.type
  19163. });
  19164. }
  19165. });
  19166. // trigger handler right now
  19167. this.tickImmediate();
  19168. }
  19169. getMainFwdBufferInfo() {
  19170. return this.getFwdBufferInfo(this.mediaBuffer ? this.mediaBuffer : this.media, PlaylistLevelType.MAIN);
  19171. }
  19172. backtrack(frag) {
  19173. this.couldBacktrack = true;
  19174. // Causes findFragments to backtrack through fragments to find the keyframe
  19175. this.backtrackFragment = frag;
  19176. this.resetTransmuxer();
  19177. this.flushBufferGap(frag);
  19178. this.fragmentTracker.removeFragment(frag);
  19179. this.fragPrevious = null;
  19180. this.nextLoadPosition = frag.start;
  19181. this.state = State.IDLE;
  19182. }
  19183. checkFragmentChanged() {
  19184. const video = this.media;
  19185. let fragPlayingCurrent = null;
  19186. if (video && video.readyState > 1 && video.seeking === false) {
  19187. const currentTime = video.currentTime;
  19188. /* if video element is in seeked state, currentTime can only increase.
  19189. (assuming that playback rate is positive ...)
  19190. As sometimes currentTime jumps back to zero after a
  19191. media decode error, check this, to avoid seeking back to
  19192. wrong position after a media decode error
  19193. */
  19194. if (BufferHelper.isBuffered(video, currentTime)) {
  19195. fragPlayingCurrent = this.getAppendedFrag(currentTime);
  19196. } else if (BufferHelper.isBuffered(video, currentTime + 0.1)) {
  19197. /* ensure that FRAG_CHANGED event is triggered at startup,
  19198. when first video frame is displayed and playback is paused.
  19199. add a tolerance of 100ms, in case current position is not buffered,
  19200. check if current pos+100ms is buffered and use that buffer range
  19201. for FRAG_CHANGED event reporting */
  19202. fragPlayingCurrent = this.getAppendedFrag(currentTime + 0.1);
  19203. }
  19204. if (fragPlayingCurrent) {
  19205. this.backtrackFragment = null;
  19206. const fragPlaying = this.fragPlaying;
  19207. const fragCurrentLevel = fragPlayingCurrent.level;
  19208. if (!fragPlaying || fragPlayingCurrent.sn !== fragPlaying.sn || fragPlaying.level !== fragCurrentLevel) {
  19209. this.fragPlaying = fragPlayingCurrent;
  19210. this.hls.trigger(Events.FRAG_CHANGED, {
  19211. frag: fragPlayingCurrent
  19212. });
  19213. if (!fragPlaying || fragPlaying.level !== fragCurrentLevel) {
  19214. this.hls.trigger(Events.LEVEL_SWITCHED, {
  19215. level: fragCurrentLevel
  19216. });
  19217. }
  19218. }
  19219. }
  19220. }
  19221. }
  19222. get nextLevel() {
  19223. const frag = this.nextBufferedFrag;
  19224. if (frag) {
  19225. return frag.level;
  19226. }
  19227. return -1;
  19228. }
  19229. get currentFrag() {
  19230. const media = this.media;
  19231. if (media) {
  19232. return this.fragPlaying || this.getAppendedFrag(media.currentTime);
  19233. }
  19234. return null;
  19235. }
  19236. get currentProgramDateTime() {
  19237. const media = this.media;
  19238. if (media) {
  19239. const currentTime = media.currentTime;
  19240. const frag = this.currentFrag;
  19241. if (frag && isFiniteNumber(currentTime) && isFiniteNumber(frag.programDateTime)) {
  19242. const epocMs = frag.programDateTime + (currentTime - frag.start) * 1000;
  19243. return new Date(epocMs);
  19244. }
  19245. }
  19246. return null;
  19247. }
  19248. get currentLevel() {
  19249. const frag = this.currentFrag;
  19250. if (frag) {
  19251. return frag.level;
  19252. }
  19253. return -1;
  19254. }
  19255. get nextBufferedFrag() {
  19256. const frag = this.currentFrag;
  19257. if (frag) {
  19258. return this.followingBufferedFrag(frag);
  19259. }
  19260. return null;
  19261. }
  19262. get forceStartLoad() {
  19263. return this._forceStartLoad;
  19264. }
  19265. }
  19266. /**
  19267. * The `Hls` class is the core of the HLS.js library used to instantiate player instances.
  19268. * @public
  19269. */
  19270. class Hls {
  19271. /**
  19272. * Get the video-dev/hls.js package version.
  19273. */
  19274. static get version() {
  19275. return "1.5.11";
  19276. }
  19277. /**
  19278. * Check if the required MediaSource Extensions are available.
  19279. */
  19280. static isMSESupported() {
  19281. return isMSESupported();
  19282. }
  19283. /**
  19284. * Check if MediaSource Extensions are available and isTypeSupported checks pass for any baseline codecs.
  19285. */
  19286. static isSupported() {
  19287. return isSupported();
  19288. }
  19289. /**
  19290. * Get the MediaSource global used for MSE playback (ManagedMediaSource, MediaSource, or WebKitMediaSource).
  19291. */
  19292. static getMediaSource() {
  19293. return getMediaSource();
  19294. }
  19295. static get Events() {
  19296. return Events;
  19297. }
  19298. static get ErrorTypes() {
  19299. return ErrorTypes;
  19300. }
  19301. static get ErrorDetails() {
  19302. return ErrorDetails;
  19303. }
  19304. /**
  19305. * Get the default configuration applied to new instances.
  19306. */
  19307. static get DefaultConfig() {
  19308. if (!Hls.defaultConfig) {
  19309. return hlsDefaultConfig;
  19310. }
  19311. return Hls.defaultConfig;
  19312. }
  19313. /**
  19314. * Replace the default configuration applied to new instances.
  19315. */
  19316. static set DefaultConfig(defaultConfig) {
  19317. Hls.defaultConfig = defaultConfig;
  19318. }
  19319. /**
  19320. * Creates an instance of an HLS client that can attach to exactly one `HTMLMediaElement`.
  19321. * @param userConfig - Configuration options applied over `Hls.DefaultConfig`
  19322. */
  19323. constructor(userConfig = {}) {
  19324. /**
  19325. * The runtime configuration used by the player. At instantiation this is combination of `hls.userConfig` merged over `Hls.DefaultConfig`.
  19326. */
  19327. this.config = void 0;
  19328. /**
  19329. * The configuration object provided on player instantiation.
  19330. */
  19331. this.userConfig = void 0;
  19332. this.coreComponents = void 0;
  19333. this.networkControllers = void 0;
  19334. this.started = false;
  19335. this._emitter = new EventEmitter();
  19336. this._autoLevelCapping = -1;
  19337. this._maxHdcpLevel = null;
  19338. this.abrController = void 0;
  19339. this.bufferController = void 0;
  19340. this.capLevelController = void 0;
  19341. this.latencyController = void 0;
  19342. this.levelController = void 0;
  19343. this.streamController = void 0;
  19344. this.audioTrackController = void 0;
  19345. this.subtitleTrackController = void 0;
  19346. this.emeController = void 0;
  19347. this.cmcdController = void 0;
  19348. this._media = null;
  19349. this.url = null;
  19350. this.triggeringException = void 0;
  19351. enableLogs(userConfig.debug || false, 'Hls instance');
  19352. const config = this.config = mergeConfig(Hls.DefaultConfig, userConfig);
  19353. this.userConfig = userConfig;
  19354. if (config.progressive) {
  19355. enableStreamingMode(config);
  19356. }
  19357. // core controllers and network loaders
  19358. const {
  19359. abrController: ConfigAbrController,
  19360. bufferController: ConfigBufferController,
  19361. capLevelController: ConfigCapLevelController,
  19362. errorController: ConfigErrorController,
  19363. fpsController: ConfigFpsController
  19364. } = config;
  19365. const errorController = new ConfigErrorController(this);
  19366. const abrController = this.abrController = new ConfigAbrController(this);
  19367. const bufferController = this.bufferController = new ConfigBufferController(this);
  19368. const capLevelController = this.capLevelController = new ConfigCapLevelController(this);
  19369. const fpsController = new ConfigFpsController(this);
  19370. const playListLoader = new PlaylistLoader(this);
  19371. const id3TrackController = new ID3TrackController(this);
  19372. const ConfigContentSteeringController = config.contentSteeringController;
  19373. // ConentSteeringController is defined before LevelController to receive Multivariant Playlist events first
  19374. const contentSteering = ConfigContentSteeringController ? new ConfigContentSteeringController(this) : null;
  19375. const levelController = this.levelController = new LevelController(this, contentSteering);
  19376. // FragmentTracker must be defined before StreamController because the order of event handling is important
  19377. const fragmentTracker = new FragmentTracker(this);
  19378. const keyLoader = new KeyLoader(this.config);
  19379. const streamController = this.streamController = new StreamController(this, fragmentTracker, keyLoader);
  19380. // Cap level controller uses streamController to flush the buffer
  19381. capLevelController.setStreamController(streamController);
  19382. // fpsController uses streamController to switch when frames are being dropped
  19383. fpsController.setStreamController(streamController);
  19384. const networkControllers = [playListLoader, levelController, streamController];
  19385. if (contentSteering) {
  19386. networkControllers.splice(1, 0, contentSteering);
  19387. }
  19388. this.networkControllers = networkControllers;
  19389. const coreComponents = [abrController, bufferController, capLevelController, fpsController, id3TrackController, fragmentTracker];
  19390. this.audioTrackController = this.createController(config.audioTrackController, networkControllers);
  19391. const AudioStreamControllerClass = config.audioStreamController;
  19392. if (AudioStreamControllerClass) {
  19393. networkControllers.push(new AudioStreamControllerClass(this, fragmentTracker, keyLoader));
  19394. }
  19395. // subtitleTrackController must be defined before subtitleStreamController because the order of event handling is important
  19396. this.subtitleTrackController = this.createController(config.subtitleTrackController, networkControllers);
  19397. const SubtitleStreamControllerClass = config.subtitleStreamController;
  19398. if (SubtitleStreamControllerClass) {
  19399. networkControllers.push(new SubtitleStreamControllerClass(this, fragmentTracker, keyLoader));
  19400. }
  19401. this.createController(config.timelineController, coreComponents);
  19402. keyLoader.emeController = this.emeController = this.createController(config.emeController, coreComponents);
  19403. this.cmcdController = this.createController(config.cmcdController, coreComponents);
  19404. this.latencyController = this.createController(LatencyController, coreComponents);
  19405. this.coreComponents = coreComponents;
  19406. // Error controller handles errors before and after all other controllers
  19407. // This listener will be invoked after all other controllers error listeners
  19408. networkControllers.push(errorController);
  19409. const onErrorOut = errorController.onErrorOut;
  19410. if (typeof onErrorOut === 'function') {
  19411. this.on(Events.ERROR, onErrorOut, errorController);
  19412. }
  19413. }
  19414. createController(ControllerClass, components) {
  19415. if (ControllerClass) {
  19416. const controllerInstance = new ControllerClass(this);
  19417. if (components) {
  19418. components.push(controllerInstance);
  19419. }
  19420. return controllerInstance;
  19421. }
  19422. return null;
  19423. }
  19424. // Delegate the EventEmitter through the public API of Hls.js
  19425. on(event, listener, context = this) {
  19426. this._emitter.on(event, listener, context);
  19427. }
  19428. once(event, listener, context = this) {
  19429. this._emitter.once(event, listener, context);
  19430. }
  19431. removeAllListeners(event) {
  19432. this._emitter.removeAllListeners(event);
  19433. }
  19434. off(event, listener, context = this, once) {
  19435. this._emitter.off(event, listener, context, once);
  19436. }
  19437. listeners(event) {
  19438. return this._emitter.listeners(event);
  19439. }
  19440. emit(event, name, eventObject) {
  19441. return this._emitter.emit(event, name, eventObject);
  19442. }
  19443. trigger(event, eventObject) {
  19444. if (this.config.debug) {
  19445. return this.emit(event, event, eventObject);
  19446. } else {
  19447. try {
  19448. return this.emit(event, event, eventObject);
  19449. } catch (error) {
  19450. logger.error('An internal error happened while handling event ' + event + '. Error message: "' + error.message + '". Here is a stacktrace:', error);
  19451. // Prevent recursion in error event handlers that throw #5497
  19452. if (!this.triggeringException) {
  19453. this.triggeringException = true;
  19454. const fatal = event === Events.ERROR;
  19455. this.trigger(Events.ERROR, {
  19456. type: ErrorTypes.OTHER_ERROR,
  19457. details: ErrorDetails.INTERNAL_EXCEPTION,
  19458. fatal,
  19459. event,
  19460. error
  19461. });
  19462. this.triggeringException = false;
  19463. }
  19464. }
  19465. }
  19466. return false;
  19467. }
  19468. listenerCount(event) {
  19469. return this._emitter.listenerCount(event);
  19470. }
  19471. /**
  19472. * Dispose of the instance
  19473. */
  19474. destroy() {
  19475. logger.log('destroy');
  19476. this.trigger(Events.DESTROYING, undefined);
  19477. this.detachMedia();
  19478. this.removeAllListeners();
  19479. this._autoLevelCapping = -1;
  19480. this.url = null;
  19481. this.networkControllers.forEach(component => component.destroy());
  19482. this.networkControllers.length = 0;
  19483. this.coreComponents.forEach(component => component.destroy());
  19484. this.coreComponents.length = 0;
  19485. // Remove any references that could be held in config options or callbacks
  19486. const config = this.config;
  19487. config.xhrSetup = config.fetchSetup = undefined;
  19488. // @ts-ignore
  19489. this.userConfig = null;
  19490. }
  19491. /**
  19492. * Attaches Hls.js to a media element
  19493. */
  19494. attachMedia(media) {
  19495. logger.log('attachMedia');
  19496. this._media = media;
  19497. this.trigger(Events.MEDIA_ATTACHING, {
  19498. media: media
  19499. });
  19500. }
  19501. /**
  19502. * Detach Hls.js from the media
  19503. */
  19504. detachMedia() {
  19505. logger.log('detachMedia');
  19506. this.trigger(Events.MEDIA_DETACHING, undefined);
  19507. this._media = null;
  19508. }
  19509. /**
  19510. * Set the source URL. Can be relative or absolute.
  19511. */
  19512. loadSource(url) {
  19513. this.stopLoad();
  19514. const media = this.media;
  19515. const loadedSource = this.url;
  19516. const loadingSource = this.url = urlToolkitExports.buildAbsoluteURL(self.location.href, url, {
  19517. alwaysNormalize: true
  19518. });
  19519. this._autoLevelCapping = -1;
  19520. this._maxHdcpLevel = null;
  19521. logger.log(`loadSource:${loadingSource}`);
  19522. if (media && loadedSource && (loadedSource !== loadingSource || this.bufferController.hasSourceTypes())) {
  19523. this.detachMedia();
  19524. this.attachMedia(media);
  19525. }
  19526. // when attaching to a source URL, trigger a playlist load
  19527. this.trigger(Events.MANIFEST_LOADING, {
  19528. url: url
  19529. });
  19530. }
  19531. /**
  19532. * Start loading data from the stream source.
  19533. * Depending on default config, client starts loading automatically when a source is set.
  19534. *
  19535. * @param startPosition - Set the start position to stream from.
  19536. * Defaults to -1 (None: starts from earliest point)
  19537. */
  19538. startLoad(startPosition = -1) {
  19539. logger.log(`startLoad(${startPosition})`);
  19540. this.started = true;
  19541. this.networkControllers.forEach(controller => {
  19542. controller.startLoad(startPosition);
  19543. });
  19544. }
  19545. /**
  19546. * Stop loading of any stream data.
  19547. */
  19548. stopLoad() {
  19549. logger.log('stopLoad');
  19550. this.started = false;
  19551. this.networkControllers.forEach(controller => {
  19552. controller.stopLoad();
  19553. });
  19554. }
  19555. /**
  19556. * Resumes stream controller segment loading if previously started.
  19557. */
  19558. resumeBuffering() {
  19559. if (this.started) {
  19560. this.networkControllers.forEach(controller => {
  19561. if ('fragmentLoader' in controller) {
  19562. controller.startLoad(-1);
  19563. }
  19564. });
  19565. }
  19566. }
  19567. /**
  19568. * Stops stream controller segment loading without changing 'started' state like stopLoad().
  19569. * This allows for media buffering to be paused without interupting playlist loading.
  19570. */
  19571. pauseBuffering() {
  19572. this.networkControllers.forEach(controller => {
  19573. if ('fragmentLoader' in controller) {
  19574. controller.stopLoad();
  19575. }
  19576. });
  19577. }
  19578. /**
  19579. * Swap through possible audio codecs in the stream (for example to switch from stereo to 5.1)
  19580. */
  19581. swapAudioCodec() {
  19582. logger.log('swapAudioCodec');
  19583. this.streamController.swapAudioCodec();
  19584. }
  19585. /**
  19586. * When the media-element fails, this allows to detach and then re-attach it
  19587. * as one call (convenience method).
  19588. *
  19589. * Automatic recovery of media-errors by this process is configurable.
  19590. */
  19591. recoverMediaError() {
  19592. logger.log('recoverMediaError');
  19593. const media = this._media;
  19594. this.detachMedia();
  19595. if (media) {
  19596. this.attachMedia(media);
  19597. }
  19598. }
  19599. removeLevel(levelIndex) {
  19600. this.levelController.removeLevel(levelIndex);
  19601. }
  19602. /**
  19603. * @returns an array of levels (variants) sorted by HDCP-LEVEL, RESOLUTION (height), FRAME-RATE, CODECS, VIDEO-RANGE, and BANDWIDTH
  19604. */
  19605. get levels() {
  19606. const levels = this.levelController.levels;
  19607. return levels ? levels : [];
  19608. }
  19609. /**
  19610. * Index of quality level (variant) currently played
  19611. */
  19612. get currentLevel() {
  19613. return this.streamController.currentLevel;
  19614. }
  19615. /**
  19616. * Set quality level index immediately. This will flush the current buffer to replace the quality asap. That means playback will interrupt at least shortly to re-buffer and re-sync eventually. Set to -1 for automatic level selection.
  19617. */
  19618. set currentLevel(newLevel) {
  19619. logger.log(`set currentLevel:${newLevel}`);
  19620. this.levelController.manualLevel = newLevel;
  19621. this.streamController.immediateLevelSwitch();
  19622. }
  19623. /**
  19624. * Index of next quality level loaded as scheduled by stream controller.
  19625. */
  19626. get nextLevel() {
  19627. return this.streamController.nextLevel;
  19628. }
  19629. /**
  19630. * Set quality level index for next loaded data.
  19631. * This will switch the video quality asap, without interrupting playback.
  19632. * May abort current loading of data, and flush parts of buffer (outside currently played fragment region).
  19633. * @param newLevel - Pass -1 for automatic level selection
  19634. */
  19635. set nextLevel(newLevel) {
  19636. logger.log(`set nextLevel:${newLevel}`);
  19637. this.levelController.manualLevel = newLevel;
  19638. this.streamController.nextLevelSwitch();
  19639. }
  19640. /**
  19641. * Return the quality level of the currently or last (of none is loaded currently) segment
  19642. */
  19643. get loadLevel() {
  19644. return this.levelController.level;
  19645. }
  19646. /**
  19647. * Set quality level index for next loaded data in a conservative way.
  19648. * This will switch the quality without flushing, but interrupt current loading.
  19649. * Thus the moment when the quality switch will appear in effect will only be after the already existing buffer.
  19650. * @param newLevel - Pass -1 for automatic level selection
  19651. */
  19652. set loadLevel(newLevel) {
  19653. logger.log(`set loadLevel:${newLevel}`);
  19654. this.levelController.manualLevel = newLevel;
  19655. }
  19656. /**
  19657. * get next quality level loaded
  19658. */
  19659. get nextLoadLevel() {
  19660. return this.levelController.nextLoadLevel;
  19661. }
  19662. /**
  19663. * Set quality level of next loaded segment in a fully "non-destructive" way.
  19664. * Same as `loadLevel` but will wait for next switch (until current loading is done).
  19665. */
  19666. set nextLoadLevel(level) {
  19667. this.levelController.nextLoadLevel = level;
  19668. }
  19669. /**
  19670. * Return "first level": like a default level, if not set,
  19671. * falls back to index of first level referenced in manifest
  19672. */
  19673. get firstLevel() {
  19674. return Math.max(this.levelController.firstLevel, this.minAutoLevel);
  19675. }
  19676. /**
  19677. * Sets "first-level", see getter.
  19678. */
  19679. set firstLevel(newLevel) {
  19680. logger.log(`set firstLevel:${newLevel}`);
  19681. this.levelController.firstLevel = newLevel;
  19682. }
  19683. /**
  19684. * Return the desired start level for the first fragment that will be loaded.
  19685. * The default value of -1 indicates automatic start level selection.
  19686. * Setting hls.nextAutoLevel without setting a startLevel will result in
  19687. * the nextAutoLevel value being used for one fragment load.
  19688. */
  19689. get startLevel() {
  19690. const startLevel = this.levelController.startLevel;
  19691. if (startLevel === -1 && this.abrController.forcedAutoLevel > -1) {
  19692. return this.abrController.forcedAutoLevel;
  19693. }
  19694. return startLevel;
  19695. }
  19696. /**
  19697. * set start level (level of first fragment that will be played back)
  19698. * if not overrided by user, first level appearing in manifest will be used as start level
  19699. * if -1 : automatic start level selection, playback will start from level matching download bandwidth
  19700. * (determined from download of first segment)
  19701. */
  19702. set startLevel(newLevel) {
  19703. logger.log(`set startLevel:${newLevel}`);
  19704. // if not in automatic start level detection, ensure startLevel is greater than minAutoLevel
  19705. if (newLevel !== -1) {
  19706. newLevel = Math.max(newLevel, this.minAutoLevel);
  19707. }
  19708. this.levelController.startLevel = newLevel;
  19709. }
  19710. /**
  19711. * Whether level capping is enabled.
  19712. * Default value is set via `config.capLevelToPlayerSize`.
  19713. */
  19714. get capLevelToPlayerSize() {
  19715. return this.config.capLevelToPlayerSize;
  19716. }
  19717. /**
  19718. * Enables or disables level capping. If disabled after previously enabled, `nextLevelSwitch` will be immediately called.
  19719. */
  19720. set capLevelToPlayerSize(shouldStartCapping) {
  19721. const newCapLevelToPlayerSize = !!shouldStartCapping;
  19722. if (newCapLevelToPlayerSize !== this.config.capLevelToPlayerSize) {
  19723. if (newCapLevelToPlayerSize) {
  19724. this.capLevelController.startCapping(); // If capping occurs, nextLevelSwitch will happen based on size.
  19725. } else {
  19726. this.capLevelController.stopCapping();
  19727. this.autoLevelCapping = -1;
  19728. this.streamController.nextLevelSwitch(); // Now we're uncapped, get the next level asap.
  19729. }
  19730. this.config.capLevelToPlayerSize = newCapLevelToPlayerSize;
  19731. }
  19732. }
  19733. /**
  19734. * Capping/max level value that should be used by automatic level selection algorithm (`ABRController`)
  19735. */
  19736. get autoLevelCapping() {
  19737. return this._autoLevelCapping;
  19738. }
  19739. /**
  19740. * Returns the current bandwidth estimate in bits per second, when available. Otherwise, `NaN` is returned.
  19741. */
  19742. get bandwidthEstimate() {
  19743. const {
  19744. bwEstimator
  19745. } = this.abrController;
  19746. if (!bwEstimator) {
  19747. return NaN;
  19748. }
  19749. return bwEstimator.getEstimate();
  19750. }
  19751. set bandwidthEstimate(abrEwmaDefaultEstimate) {
  19752. this.abrController.resetEstimator(abrEwmaDefaultEstimate);
  19753. }
  19754. /**
  19755. * get time to first byte estimate
  19756. * @type {number}
  19757. */
  19758. get ttfbEstimate() {
  19759. const {
  19760. bwEstimator
  19761. } = this.abrController;
  19762. if (!bwEstimator) {
  19763. return NaN;
  19764. }
  19765. return bwEstimator.getEstimateTTFB();
  19766. }
  19767. /**
  19768. * Capping/max level value that should be used by automatic level selection algorithm (`ABRController`)
  19769. */
  19770. set autoLevelCapping(newLevel) {
  19771. if (this._autoLevelCapping !== newLevel) {
  19772. logger.log(`set autoLevelCapping:${newLevel}`);
  19773. this._autoLevelCapping = newLevel;
  19774. this.levelController.checkMaxAutoUpdated();
  19775. }
  19776. }
  19777. get maxHdcpLevel() {
  19778. return this._maxHdcpLevel;
  19779. }
  19780. set maxHdcpLevel(value) {
  19781. if (isHdcpLevel(value) && this._maxHdcpLevel !== value) {
  19782. this._maxHdcpLevel = value;
  19783. this.levelController.checkMaxAutoUpdated();
  19784. }
  19785. }
  19786. /**
  19787. * True when automatic level selection enabled
  19788. */
  19789. get autoLevelEnabled() {
  19790. return this.levelController.manualLevel === -1;
  19791. }
  19792. /**
  19793. * Level set manually (if any)
  19794. */
  19795. get manualLevel() {
  19796. return this.levelController.manualLevel;
  19797. }
  19798. /**
  19799. * min level selectable in auto mode according to config.minAutoBitrate
  19800. */
  19801. get minAutoLevel() {
  19802. const {
  19803. levels,
  19804. config: {
  19805. minAutoBitrate
  19806. }
  19807. } = this;
  19808. if (!levels) return 0;
  19809. const len = levels.length;
  19810. for (let i = 0; i < len; i++) {
  19811. if (levels[i].maxBitrate >= minAutoBitrate) {
  19812. return i;
  19813. }
  19814. }
  19815. return 0;
  19816. }
  19817. /**
  19818. * max level selectable in auto mode according to autoLevelCapping
  19819. */
  19820. get maxAutoLevel() {
  19821. const {
  19822. levels,
  19823. autoLevelCapping,
  19824. maxHdcpLevel
  19825. } = this;
  19826. let maxAutoLevel;
  19827. if (autoLevelCapping === -1 && levels != null && levels.length) {
  19828. maxAutoLevel = levels.length - 1;
  19829. } else {
  19830. maxAutoLevel = autoLevelCapping;
  19831. }
  19832. if (maxHdcpLevel) {
  19833. for (let i = maxAutoLevel; i--;) {
  19834. const hdcpLevel = levels[i].attrs['HDCP-LEVEL'];
  19835. if (hdcpLevel && hdcpLevel <= maxHdcpLevel) {
  19836. return i;
  19837. }
  19838. }
  19839. }
  19840. return maxAutoLevel;
  19841. }
  19842. get firstAutoLevel() {
  19843. return this.abrController.firstAutoLevel;
  19844. }
  19845. /**
  19846. * next automatically selected quality level
  19847. */
  19848. get nextAutoLevel() {
  19849. return this.abrController.nextAutoLevel;
  19850. }
  19851. /**
  19852. * this setter is used to force next auto level.
  19853. * this is useful to force a switch down in auto mode:
  19854. * in case of load error on level N, hls.js can set nextAutoLevel to N-1 for example)
  19855. * forced value is valid for one fragment. upon successful frag loading at forced level,
  19856. * this value will be resetted to -1 by ABR controller.
  19857. */
  19858. set nextAutoLevel(nextLevel) {
  19859. this.abrController.nextAutoLevel = nextLevel;
  19860. }
  19861. /**
  19862. * get the datetime value relative to media.currentTime for the active level Program Date Time if present
  19863. */
  19864. get playingDate() {
  19865. return this.streamController.currentProgramDateTime;
  19866. }
  19867. get mainForwardBufferInfo() {
  19868. return this.streamController.getMainFwdBufferInfo();
  19869. }
  19870. /**
  19871. * Find and select the best matching audio track, making a level switch when a Group change is necessary.
  19872. * Updates `hls.config.audioPreference`. Returns the selected track, or null when no matching track is found.
  19873. */
  19874. setAudioOption(audioOption) {
  19875. var _this$audioTrackContr;
  19876. return (_this$audioTrackContr = this.audioTrackController) == null ? void 0 : _this$audioTrackContr.setAudioOption(audioOption);
  19877. }
  19878. /**
  19879. * Find and select the best matching subtitle track, making a level switch when a Group change is necessary.
  19880. * Updates `hls.config.subtitlePreference`. Returns the selected track, or null when no matching track is found.
  19881. */
  19882. setSubtitleOption(subtitleOption) {
  19883. var _this$subtitleTrackCo;
  19884. (_this$subtitleTrackCo = this.subtitleTrackController) == null ? void 0 : _this$subtitleTrackCo.setSubtitleOption(subtitleOption);
  19885. return null;
  19886. }
  19887. /**
  19888. * Get the complete list of audio tracks across all media groups
  19889. */
  19890. get allAudioTracks() {
  19891. const audioTrackController = this.audioTrackController;
  19892. return audioTrackController ? audioTrackController.allAudioTracks : [];
  19893. }
  19894. /**
  19895. * Get the list of selectable audio tracks
  19896. */
  19897. get audioTracks() {
  19898. const audioTrackController = this.audioTrackController;
  19899. return audioTrackController ? audioTrackController.audioTracks : [];
  19900. }
  19901. /**
  19902. * index of the selected audio track (index in audio track lists)
  19903. */
  19904. get audioTrack() {
  19905. const audioTrackController = this.audioTrackController;
  19906. return audioTrackController ? audioTrackController.audioTrack : -1;
  19907. }
  19908. /**
  19909. * selects an audio track, based on its index in audio track lists
  19910. */
  19911. set audioTrack(audioTrackId) {
  19912. const audioTrackController = this.audioTrackController;
  19913. if (audioTrackController) {
  19914. audioTrackController.audioTrack = audioTrackId;
  19915. }
  19916. }
  19917. /**
  19918. * get the complete list of subtitle tracks across all media groups
  19919. */
  19920. get allSubtitleTracks() {
  19921. const subtitleTrackController = this.subtitleTrackController;
  19922. return subtitleTrackController ? subtitleTrackController.allSubtitleTracks : [];
  19923. }
  19924. /**
  19925. * get alternate subtitle tracks list from playlist
  19926. */
  19927. get subtitleTracks() {
  19928. const subtitleTrackController = this.subtitleTrackController;
  19929. return subtitleTrackController ? subtitleTrackController.subtitleTracks : [];
  19930. }
  19931. /**
  19932. * index of the selected subtitle track (index in subtitle track lists)
  19933. */
  19934. get subtitleTrack() {
  19935. const subtitleTrackController = this.subtitleTrackController;
  19936. return subtitleTrackController ? subtitleTrackController.subtitleTrack : -1;
  19937. }
  19938. get media() {
  19939. return this._media;
  19940. }
  19941. /**
  19942. * select an subtitle track, based on its index in subtitle track lists
  19943. */
  19944. set subtitleTrack(subtitleTrackId) {
  19945. const subtitleTrackController = this.subtitleTrackController;
  19946. if (subtitleTrackController) {
  19947. subtitleTrackController.subtitleTrack = subtitleTrackId;
  19948. }
  19949. }
  19950. /**
  19951. * Whether subtitle display is enabled or not
  19952. */
  19953. get subtitleDisplay() {
  19954. const subtitleTrackController = this.subtitleTrackController;
  19955. return subtitleTrackController ? subtitleTrackController.subtitleDisplay : false;
  19956. }
  19957. /**
  19958. * Enable/disable subtitle display rendering
  19959. */
  19960. set subtitleDisplay(value) {
  19961. const subtitleTrackController = this.subtitleTrackController;
  19962. if (subtitleTrackController) {
  19963. subtitleTrackController.subtitleDisplay = value;
  19964. }
  19965. }
  19966. /**
  19967. * get mode for Low-Latency HLS loading
  19968. */
  19969. get lowLatencyMode() {
  19970. return this.config.lowLatencyMode;
  19971. }
  19972. /**
  19973. * Enable/disable Low-Latency HLS part playlist and segment loading, and start live streams at playlist PART-HOLD-BACK rather than HOLD-BACK.
  19974. */
  19975. set lowLatencyMode(mode) {
  19976. this.config.lowLatencyMode = mode;
  19977. }
  19978. /**
  19979. * Position (in seconds) of live sync point (ie edge of live position minus safety delay defined by ```hls.config.liveSyncDuration```)
  19980. * @returns null prior to loading live Playlist
  19981. */
  19982. get liveSyncPosition() {
  19983. return this.latencyController.liveSyncPosition;
  19984. }
  19985. /**
  19986. * Estimated position (in seconds) of live edge (ie edge of live playlist plus time sync playlist advanced)
  19987. * @returns 0 before first playlist is loaded
  19988. */
  19989. get latency() {
  19990. return this.latencyController.latency;
  19991. }
  19992. /**
  19993. * maximum distance from the edge before the player seeks forward to ```hls.liveSyncPosition```
  19994. * configured using ```liveMaxLatencyDurationCount``` (multiple of target duration) or ```liveMaxLatencyDuration```
  19995. * @returns 0 before first playlist is loaded
  19996. */
  19997. get maxLatency() {
  19998. return this.latencyController.maxLatency;
  19999. }
  20000. /**
  20001. * target distance from the edge as calculated by the latency controller
  20002. */
  20003. get targetLatency() {
  20004. return this.latencyController.targetLatency;
  20005. }
  20006. /**
  20007. * the rate at which the edge of the current live playlist is advancing or 1 if there is none
  20008. */
  20009. get drift() {
  20010. return this.latencyController.drift;
  20011. }
  20012. /**
  20013. * set to true when startLoad is called before MANIFEST_PARSED event
  20014. */
  20015. get forceStartLoad() {
  20016. return this.streamController.forceStartLoad;
  20017. }
  20018. }
  20019. Hls.defaultConfig = void 0;
  20020. var KeySystemFormats = emptyEs.KeySystemFormats;
  20021. var KeySystems = emptyEs.KeySystems;
  20022. var SubtitleStreamController = emptyEs.SubtitleStreamController;
  20023. var TimelineController = emptyEs.TimelineController;
  20024. export { AbrController, AttrList, Cues as AudioStreamController, Cues as AudioTrackController, BasePlaylistController, BaseSegment, BaseStreamController, BufferController, Cues as CMCDController, CapLevelController, ChunkMetadata, ContentSteeringController, DateRange, Cues as EMEController, ErrorActionFlags, ErrorController, ErrorDetails, ErrorTypes, Events, FPSController, Fragment, Hls, HlsSkip, HlsUrlParameters, KeySystemFormats, KeySystems, Level, LevelDetails, LevelKey, LoadStats, MetadataSchema, NetworkErrorAction, Part, PlaylistLevelType, SubtitleStreamController, Cues as SubtitleTrackController, TimelineController, Hls as default, getMediaSource, isMSESupported, isSupported };
  20025. //# sourceMappingURL=hls.light.mjs.map