Home Reference Source

src/controller/buffer-controller.ts

  1. import { Events } from '../events';
  2. import { logger } from '../utils/logger';
  3. import { ErrorDetails, ErrorTypes } from '../errors';
  4. import { BufferHelper } from '../utils/buffer-helper';
  5. import { getMediaSource } from '../utils/mediasource-helper';
  6. import { ElementaryStreamTypes } from '../loader/fragment';
  7. import type { TrackSet } from '../types/track';
  8. import BufferOperationQueue from './buffer-operation-queue';
  9. import {
  10. BufferOperation,
  11. SourceBuffers,
  12. SourceBufferName,
  13. SourceBufferListeners,
  14. } from '../types/buffer';
  15. import type {
  16. LevelUpdatedData,
  17. BufferAppendingData,
  18. MediaAttachingData,
  19. ManifestParsedData,
  20. BufferCodecsData,
  21. BufferEOSData,
  22. BufferFlushingData,
  23. FragParsedData,
  24. FragChangedData,
  25. } from '../types/events';
  26. import type { ComponentAPI } from '../types/component-api';
  27. import type Hls from '../hls';
  28. import { LevelDetails } from '../loader/level-details';
  29.  
  30. const MediaSource = getMediaSource();
  31. const VIDEO_CODEC_PROFILE_REPACE = /([ha]vc.)(?:\.[^.,]+)+/;
  32.  
  33. export default class BufferController implements ComponentAPI {
  34. // The level details used to determine duration, target-duration and live
  35. private details: LevelDetails | null = null;
  36. // cache the self generated object url to detect hijack of video tag
  37. private _objectUrl: string | null = null;
  38. // A queue of buffer operations which require the SourceBuffer to not be updating upon execution
  39. private operationQueue!: BufferOperationQueue;
  40. // References to event listeners for each SourceBuffer, so that they can be referenced for event removal
  41. private listeners!: SourceBufferListeners;
  42.  
  43. private hls: Hls;
  44.  
  45. // The number of BUFFER_CODEC events received before any sourceBuffers are created
  46. public bufferCodecEventsExpected: number = 0;
  47.  
  48. // The total number of BUFFER_CODEC events received
  49. private _bufferCodecEventsTotal: number = 0;
  50.  
  51. // A reference to the attached media element
  52. public media: HTMLMediaElement | null = null;
  53.  
  54. // A reference to the active media source
  55. public mediaSource: MediaSource | null = null;
  56.  
  57. // counters
  58. public appendError: number = 0;
  59.  
  60. public tracks: TrackSet = {};
  61. public pendingTracks: TrackSet = {};
  62. public sourceBuffer!: SourceBuffers;
  63.  
  64. constructor(hls: Hls) {
  65. this.hls = hls;
  66. this._initSourceBuffer();
  67. this.registerListeners();
  68. }
  69.  
  70. public hasSourceTypes(): boolean {
  71. return (
  72. this.getSourceBufferTypes().length > 0 ||
  73. Object.keys(this.pendingTracks).length > 0
  74. );
  75. }
  76.  
  77. public destroy() {
  78. this.unregisterListeners();
  79. this.details = null;
  80. }
  81.  
  82. protected registerListeners() {
  83. const { hls } = this;
  84. hls.on(Events.MEDIA_ATTACHING, this.onMediaAttaching, this);
  85. hls.on(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
  86. hls.on(Events.MANIFEST_PARSED, this.onManifestParsed, this);
  87. hls.on(Events.BUFFER_RESET, this.onBufferReset, this);
  88. hls.on(Events.BUFFER_APPENDING, this.onBufferAppending, this);
  89. hls.on(Events.BUFFER_CODECS, this.onBufferCodecs, this);
  90. hls.on(Events.BUFFER_EOS, this.onBufferEos, this);
  91. hls.on(Events.BUFFER_FLUSHING, this.onBufferFlushing, this);
  92. hls.on(Events.LEVEL_UPDATED, this.onLevelUpdated, this);
  93. hls.on(Events.FRAG_PARSED, this.onFragParsed, this);
  94. hls.on(Events.FRAG_CHANGED, this.onFragChanged, this);
  95. }
  96.  
  97. protected unregisterListeners() {
  98. const { hls } = this;
  99. hls.off(Events.MEDIA_ATTACHING, this.onMediaAttaching, this);
  100. hls.off(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
  101. hls.off(Events.MANIFEST_PARSED, this.onManifestParsed, this);
  102. hls.off(Events.BUFFER_RESET, this.onBufferReset, this);
  103. hls.off(Events.BUFFER_APPENDING, this.onBufferAppending, this);
  104. hls.off(Events.BUFFER_CODECS, this.onBufferCodecs, this);
  105. hls.off(Events.BUFFER_EOS, this.onBufferEos, this);
  106. hls.off(Events.BUFFER_FLUSHING, this.onBufferFlushing, this);
  107. hls.off(Events.LEVEL_UPDATED, this.onLevelUpdated, this);
  108. hls.off(Events.FRAG_PARSED, this.onFragParsed, this);
  109. hls.off(Events.FRAG_CHANGED, this.onFragChanged, this);
  110. }
  111.  
  112. private _initSourceBuffer() {
  113. this.sourceBuffer = {};
  114. this.operationQueue = new BufferOperationQueue(this.sourceBuffer);
  115. this.listeners = {
  116. audio: [],
  117. video: [],
  118. audiovideo: [],
  119. };
  120. }
  121.  
  122. protected onManifestParsed(
  123. event: Events.MANIFEST_PARSED,
  124. data: ManifestParsedData
  125. ) {
  126. // in case of alt audio 2 BUFFER_CODECS events will be triggered, one per stream controller
  127. // sourcebuffers will be created all at once when the expected nb of tracks will be reached
  128. // in case alt audio is not used, only one BUFFER_CODEC event will be fired from main stream controller
  129. // it will contain the expected nb of source buffers, no need to compute it
  130. let codecEvents: number = 2;
  131. if ((data.audio && !data.video) || !data.altAudio) {
  132. codecEvents = 1;
  133. }
  134. this.bufferCodecEventsExpected = this._bufferCodecEventsTotal = codecEvents;
  135. this.details = null;
  136. logger.log(
  137. `${this.bufferCodecEventsExpected} bufferCodec event(s) expected`
  138. );
  139. }
  140.  
  141. protected onMediaAttaching(
  142. event: Events.MEDIA_ATTACHING,
  143. data: MediaAttachingData
  144. ) {
  145. const media = (this.media = data.media);
  146. if (media && MediaSource) {
  147. const ms = (this.mediaSource = new MediaSource());
  148. // MediaSource listeners are arrow functions with a lexical scope, and do not need to be bound
  149. ms.addEventListener('sourceopen', this._onMediaSourceOpen);
  150. ms.addEventListener('sourceended', this._onMediaSourceEnded);
  151. ms.addEventListener('sourceclose', this._onMediaSourceClose);
  152. // link video and media Source
  153. media.src = self.URL.createObjectURL(ms);
  154. // cache the locally generated object url
  155. this._objectUrl = media.src;
  156. }
  157. }
  158.  
  159. protected onMediaDetaching() {
  160. const { media, mediaSource, _objectUrl } = this;
  161. if (mediaSource) {
  162. logger.log('[buffer-controller]: media source detaching');
  163. if (mediaSource.readyState === 'open') {
  164. try {
  165. // endOfStream could trigger exception if any sourcebuffer is in updating state
  166. // we don't really care about checking sourcebuffer state here,
  167. // as we are anyway detaching the MediaSource
  168. // let's just avoid this exception to propagate
  169. mediaSource.endOfStream();
  170. } catch (err) {
  171. logger.warn(
  172. `[buffer-controller]: onMediaDetaching: ${err.message} while calling endOfStream`
  173. );
  174. }
  175. }
  176. // Clean up the SourceBuffers by invoking onBufferReset
  177. this.onBufferReset();
  178. mediaSource.removeEventListener('sourceopen', this._onMediaSourceOpen);
  179. mediaSource.removeEventListener('sourceended', this._onMediaSourceEnded);
  180. mediaSource.removeEventListener('sourceclose', this._onMediaSourceClose);
  181.  
  182. // Detach properly the MediaSource from the HTMLMediaElement as
  183. // suggested in https://github.com/w3c/media-source/issues/53.
  184. if (media) {
  185. if (_objectUrl) {
  186. self.URL.revokeObjectURL(_objectUrl);
  187. }
  188.  
  189. // clean up video tag src only if it's our own url. some external libraries might
  190. // hijack the video tag and change its 'src' without destroying the Hls instance first
  191. if (media.src === _objectUrl) {
  192. media.removeAttribute('src');
  193. media.load();
  194. } else {
  195. logger.warn(
  196. '[buffer-controller]: media.src was changed by a third party - skip cleanup'
  197. );
  198. }
  199. }
  200.  
  201. this.mediaSource = null;
  202. this.media = null;
  203. this._objectUrl = null;
  204. this.bufferCodecEventsExpected = this._bufferCodecEventsTotal;
  205. this.pendingTracks = {};
  206. this.tracks = {};
  207. }
  208.  
  209. this.hls.trigger(Events.MEDIA_DETACHED, undefined);
  210. }
  211.  
  212. protected onBufferReset() {
  213. this.getSourceBufferTypes().forEach((type) => {
  214. const sb = this.sourceBuffer[type];
  215. try {
  216. if (sb) {
  217. this.removeBufferListeners(type);
  218. if (this.mediaSource) {
  219. this.mediaSource.removeSourceBuffer(sb);
  220. }
  221. // Synchronously remove the SB from the map before the next call in order to prevent an async function from
  222. // accessing it
  223. this.sourceBuffer[type] = undefined;
  224. }
  225. } catch (err) {
  226. logger.warn(
  227. `[buffer-controller]: Failed to reset the ${type} buffer`,
  228. err
  229. );
  230. }
  231. });
  232. this._initSourceBuffer();
  233. }
  234.  
  235. protected onBufferCodecs(
  236. event: Events.BUFFER_CODECS,
  237. data: BufferCodecsData
  238. ) {
  239. const sourceBufferCount = this.getSourceBufferTypes().length;
  240.  
  241. Object.keys(data).forEach((trackName) => {
  242. if (sourceBufferCount) {
  243. // check if SourceBuffer codec needs to change
  244. const track = this.tracks[trackName];
  245. if (track && typeof track.buffer.changeType === 'function') {
  246. const { codec, levelCodec, container } = data[trackName];
  247. const currentCodec = (track.levelCodec || track.codec).replace(
  248. VIDEO_CODEC_PROFILE_REPACE,
  249. '$1'
  250. );
  251. const nextCodec = (levelCodec || codec).replace(
  252. VIDEO_CODEC_PROFILE_REPACE,
  253. '$1'
  254. );
  255. if (currentCodec !== nextCodec) {
  256. const mimeType = `${container};codecs=${levelCodec || codec}`;
  257. this.appendChangeType(trackName, mimeType);
  258. }
  259. }
  260. } else {
  261. // if source buffer(s) not created yet, appended buffer tracks in this.pendingTracks
  262. this.pendingTracks[trackName] = data[trackName];
  263. }
  264. });
  265.  
  266. // if sourcebuffers already created, do nothing ...
  267. if (sourceBufferCount) {
  268. return;
  269. }
  270.  
  271. this.bufferCodecEventsExpected = Math.max(
  272. this.bufferCodecEventsExpected - 1,
  273. 0
  274. );
  275. if (this.mediaSource && this.mediaSource.readyState === 'open') {
  276. this.checkPendingTracks();
  277. }
  278. }
  279.  
  280. protected appendChangeType(type, mimeType) {
  281. const { operationQueue } = this;
  282. const operation: BufferOperation = {
  283. execute: () => {
  284. const sb = this.sourceBuffer[type];
  285. if (sb) {
  286. logger.log(
  287. `[buffer-controller]: changing ${type} sourceBuffer type to ${mimeType}`
  288. );
  289. sb.changeType(mimeType);
  290. }
  291. operationQueue.shiftAndExecuteNext(type);
  292. },
  293. onStart: () => {},
  294. onComplete: () => {},
  295. onError: (e) => {
  296. logger.warn(
  297. `[buffer-controller]: Failed to change ${type} SourceBuffer type`,
  298. e
  299. );
  300. },
  301. };
  302.  
  303. operationQueue.append(operation, type);
  304. }
  305.  
  306. protected onBufferAppending(
  307. event: Events.BUFFER_APPENDING,
  308. eventData: BufferAppendingData
  309. ) {
  310. const { hls, operationQueue, tracks } = this;
  311. const { data, type, frag, part, chunkMeta } = eventData;
  312. const chunkStats = chunkMeta.buffering[type];
  313.  
  314. const bufferAppendingStart = self.performance.now();
  315. chunkStats.start = bufferAppendingStart;
  316. const fragBuffering = frag.stats.buffering;
  317. const partBuffering = part ? part.stats.buffering : null;
  318. if (fragBuffering.start === 0) {
  319. fragBuffering.start = bufferAppendingStart;
  320. }
  321. if (partBuffering && partBuffering.start === 0) {
  322. partBuffering.start = bufferAppendingStart;
  323. }
  324.  
  325. // TODO: Only update timestampOffset when audio/mpeg fragment or part is not contiguous with previously appended
  326. // Adjusting `SourceBuffer.timestampOffset` (desired point in the timeline where the next frames should be appended)
  327. // in Chrome browser when we detect MPEG audio container and time delta between level PTS and `SourceBuffer.timestampOffset`
  328. // is greater than 100ms (this is enough to handle seek for VOD or level change for LIVE videos).
  329. // More info here: https://github.com/video-dev/hls.js/issues/332#issuecomment-257986486
  330. const audioTrack = tracks.audio;
  331. const checkTimestampOffset =
  332. type === 'audio' &&
  333. chunkMeta.id === 1 &&
  334. audioTrack?.container === 'audio/mpeg';
  335.  
  336. const operation: BufferOperation = {
  337. execute: () => {
  338. chunkStats.executeStart = self.performance.now();
  339. if (checkTimestampOffset) {
  340. const sb = this.sourceBuffer[type];
  341. if (sb) {
  342. const delta = frag.start - sb.timestampOffset;
  343. if (Math.abs(delta) >= 0.1) {
  344. logger.log(
  345. `[buffer-controller]: Updating audio SourceBuffer timestampOffset to ${frag.start} (delta: ${delta}) sn: ${frag.sn})`
  346. );
  347. sb.timestampOffset = frag.start;
  348. }
  349. }
  350. }
  351. this.appendExecutor(data, type);
  352. },
  353. onStart: () => {
  354. // logger.debug(`[buffer-controller]: ${type} SourceBuffer updatestart`);
  355. },
  356. onComplete: () => {
  357. // logger.debug(`[buffer-controller]: ${type} SourceBuffer updateend`);
  358. const end = self.performance.now();
  359. chunkStats.executeEnd = chunkStats.end = end;
  360. if (fragBuffering.first === 0) {
  361. fragBuffering.first = end;
  362. }
  363. if (partBuffering && partBuffering.first === 0) {
  364. partBuffering.first = end;
  365. }
  366.  
  367. const { sourceBuffer } = this;
  368. const timeRanges = {};
  369. for (const type in sourceBuffer) {
  370. timeRanges[type] = BufferHelper.getBuffered(sourceBuffer[type]);
  371. }
  372. this.appendError = 0;
  373. this.hls.trigger(Events.BUFFER_APPENDED, {
  374. type,
  375. frag,
  376. part,
  377. chunkMeta,
  378. parent: frag.type,
  379. timeRanges,
  380. });
  381. },
  382. onError: (err) => {
  383. // in case any error occured while appending, put back segment in segments table
  384. logger.error(
  385. `[buffer-controller]: Error encountered while trying to append to the ${type} SourceBuffer`,
  386. err
  387. );
  388. const event = {
  389. type: ErrorTypes.MEDIA_ERROR,
  390. parent: frag.type,
  391. details: ErrorDetails.BUFFER_APPEND_ERROR,
  392. err,
  393. fatal: false,
  394. };
  395.  
  396. if (err.code === DOMException.QUOTA_EXCEEDED_ERR) {
  397. // QuotaExceededError: http://www.w3.org/TR/html5/infrastructure.html#quotaexceedederror
  398. // let's stop appending any segments, and report BUFFER_FULL_ERROR error
  399. event.details = ErrorDetails.BUFFER_FULL_ERROR;
  400. } else {
  401. this.appendError++;
  402. event.details = ErrorDetails.BUFFER_APPEND_ERROR;
  403. /* with UHD content, we could get loop of quota exceeded error until
  404. browser is able to evict some data from sourcebuffer. Retrying can help recover.
  405. */
  406. if (this.appendError > hls.config.appendErrorMaxRetry) {
  407. logger.error(
  408. `[buffer-controller]: Failed ${hls.config.appendErrorMaxRetry} times to append segment in sourceBuffer`
  409. );
  410. event.fatal = true;
  411. }
  412. }
  413. hls.trigger(Events.ERROR, event);
  414. },
  415. };
  416. operationQueue.append(operation, type);
  417. }
  418.  
  419. protected onBufferFlushing(
  420. event: Events.BUFFER_FLUSHING,
  421. data: BufferFlushingData
  422. ) {
  423. const { operationQueue } = this;
  424. const flushOperation = (type: SourceBufferName): BufferOperation => ({
  425. execute: this.removeExecutor.bind(
  426. this,
  427. type,
  428. data.startOffset,
  429. data.endOffset
  430. ),
  431. onStart: () => {
  432. // logger.debug(`[buffer-controller]: Started flushing ${data.startOffset} -> ${data.endOffset} for ${type} Source Buffer`);
  433. },
  434. onComplete: () => {
  435. // logger.debug(`[buffer-controller]: Finished flushing ${data.startOffset} -> ${data.endOffset} for ${type} Source Buffer`);
  436. this.hls.trigger(Events.BUFFER_FLUSHED, { type });
  437. },
  438. onError: (e) => {
  439. logger.warn(
  440. `[buffer-controller]: Failed to remove from ${type} SourceBuffer`,
  441. e
  442. );
  443. },
  444. });
  445.  
  446. if (data.type) {
  447. operationQueue.append(flushOperation(data.type), data.type);
  448. } else {
  449. this.getSourceBufferTypes().forEach((type: SourceBufferName) => {
  450. operationQueue.append(flushOperation(type), type);
  451. });
  452. }
  453. }
  454.  
  455. protected onFragParsed(event: Events.FRAG_PARSED, data: FragParsedData) {
  456. const { frag, part } = data;
  457. const buffersAppendedTo: Array<SourceBufferName> = [];
  458. const elementaryStreams = part
  459. ? part.elementaryStreams
  460. : frag.elementaryStreams;
  461. if (elementaryStreams[ElementaryStreamTypes.AUDIOVIDEO]) {
  462. buffersAppendedTo.push('audiovideo');
  463. } else {
  464. if (elementaryStreams[ElementaryStreamTypes.AUDIO]) {
  465. buffersAppendedTo.push('audio');
  466. }
  467. if (elementaryStreams[ElementaryStreamTypes.VIDEO]) {
  468. buffersAppendedTo.push('video');
  469. }
  470. }
  471.  
  472. const onUnblocked = () => {
  473. const now = self.performance.now();
  474. frag.stats.buffering.end = now;
  475. if (part) {
  476. part.stats.buffering.end = now;
  477. }
  478. const stats = part ? part.stats : frag.stats;
  479. this.hls.trigger(Events.FRAG_BUFFERED, {
  480. frag,
  481. part,
  482. stats,
  483. id: frag.type,
  484. });
  485. };
  486.  
  487. if (buffersAppendedTo.length === 0) {
  488. logger.warn(
  489. `Fragments must have at least one ElementaryStreamType set. type: ${frag.type} level: ${frag.level} sn: ${frag.sn}`
  490. );
  491. }
  492.  
  493. this.blockBuffers(onUnblocked, buffersAppendedTo);
  494. }
  495.  
  496. private onFragChanged(event: Events.FRAG_CHANGED, data: FragChangedData) {
  497. this.flushBackBuffer();
  498. }
  499.  
  500. // on BUFFER_EOS mark matching sourcebuffer(s) as ended and trigger checkEos()
  501. // an undefined data.type will mark all buffers as EOS.
  502. protected onBufferEos(event: Events.BUFFER_EOS, data: BufferEOSData) {
  503. const ended = this.getSourceBufferTypes().reduce((acc, type) => {
  504. const sb = this.sourceBuffer[type];
  505. if (!data.type || data.type === type) {
  506. if (sb && !sb.ended) {
  507. sb.ended = true;
  508. logger.log(`[buffer-controller]: ${type} sourceBuffer now EOS`);
  509. }
  510. }
  511. return acc && !!(!sb || sb.ended);
  512. }, true);
  513.  
  514. if (ended) {
  515. this.blockBuffers(() => {
  516. const { mediaSource } = this;
  517. if (!mediaSource || mediaSource.readyState !== 'open') {
  518. return;
  519. }
  520. // Allow this to throw and be caught by the enqueueing function
  521. mediaSource.endOfStream();
  522. });
  523. }
  524. }
  525.  
  526. protected onLevelUpdated(
  527. event: Events.LEVEL_UPDATED,
  528. { details }: LevelUpdatedData
  529. ) {
  530. if (!details.fragments.length) {
  531. return;
  532. }
  533. this.details = details;
  534.  
  535. if (this.getSourceBufferTypes().length) {
  536. this.blockBuffers(this.updateMediaElementDuration.bind(this));
  537. } else {
  538. this.updateMediaElementDuration();
  539. }
  540. }
  541.  
  542. flushBackBuffer() {
  543. const { hls, details, media, sourceBuffer } = this;
  544. if (!media || details === null) {
  545. return;
  546. }
  547.  
  548. const sourceBufferTypes = this.getSourceBufferTypes();
  549. if (!sourceBufferTypes.length) {
  550. return;
  551. }
  552.  
  553. // Support for deprecated liveBackBufferLength
  554. const backBufferLength =
  555. details.live && hls.config.liveBackBufferLength !== null
  556. ? hls.config.liveBackBufferLength
  557. : hls.config.backBufferLength;
  558.  
  559. if (!Number.isFinite(backBufferLength) || backBufferLength < 0) {
  560. return;
  561. }
  562.  
  563. const currentTime = media.currentTime;
  564. const targetDuration = details.levelTargetDuration;
  565. const maxBackBufferLength = Math.max(backBufferLength, targetDuration);
  566. const targetBackBufferPosition =
  567. Math.floor(currentTime / targetDuration) * targetDuration -
  568. maxBackBufferLength;
  569. sourceBufferTypes.forEach((type: SourceBufferName) => {
  570. const sb = sourceBuffer[type];
  571. if (sb) {
  572. const buffered = BufferHelper.getBuffered(sb);
  573. // when target buffer start exceeds actual buffer start
  574. if (
  575. buffered.length > 0 &&
  576. targetBackBufferPosition > buffered.start(0)
  577. ) {
  578. hls.trigger(Events.BACK_BUFFER_REACHED, {
  579. bufferEnd: targetBackBufferPosition,
  580. });
  581.  
  582. // Support for deprecated event:
  583. if (details.live) {
  584. hls.trigger(Events.LIVE_BACK_BUFFER_REACHED, {
  585. bufferEnd: targetBackBufferPosition,
  586. });
  587. }
  588.  
  589. hls.trigger(Events.BUFFER_FLUSHING, {
  590. startOffset: 0,
  591. endOffset: targetBackBufferPosition,
  592. type,
  593. });
  594. }
  595. }
  596. });
  597. }
  598.  
  599. /**
  600. * Update Media Source duration to current level duration or override to Infinity if configuration parameter
  601. * 'liveDurationInfinity` is set to `true`
  602. * More details: https://github.com/video-dev/hls.js/issues/355
  603. */
  604. private updateMediaElementDuration() {
  605. if (
  606. !this.details ||
  607. !this.media ||
  608. !this.mediaSource ||
  609. this.mediaSource.readyState !== 'open'
  610. ) {
  611. return;
  612. }
  613. const { details, hls, media, mediaSource } = this;
  614. const levelDuration = details.fragments[0].start + details.totalduration;
  615. const mediaDuration = media.duration;
  616. const msDuration = Number.isFinite(mediaSource.duration)
  617. ? mediaSource.duration
  618. : 0;
  619.  
  620. if (details.live && hls.config.liveDurationInfinity) {
  621. // Override duration to Infinity
  622. logger.log(
  623. '[buffer-controller]: Media Source duration is set to Infinity'
  624. );
  625. mediaSource.duration = Infinity;
  626. this.updateSeekableRange(details);
  627. } else if (
  628. (levelDuration > msDuration && levelDuration > mediaDuration) ||
  629. !Number.isFinite(mediaDuration)
  630. ) {
  631. // levelDuration was the last value we set.
  632. // not using mediaSource.duration as the browser may tweak this value
  633. // only update Media Source duration if its value increase, this is to avoid
  634. // flushing already buffered portion when switching between quality level
  635. logger.log(
  636. `[buffer-controller]: Updating Media Source duration to ${levelDuration.toFixed(
  637. 3
  638. )}`
  639. );
  640. mediaSource.duration = levelDuration;
  641. }
  642. }
  643.  
  644. updateSeekableRange(levelDetails) {
  645. const mediaSource = this.mediaSource;
  646. const fragments = levelDetails.fragments;
  647. const len = fragments.length;
  648. if (len && levelDetails.live && mediaSource?.setLiveSeekableRange) {
  649. const start = Math.max(0, fragments[0].start);
  650. const end = Math.max(start, start + levelDetails.totalduration);
  651. mediaSource.setLiveSeekableRange(start, end);
  652. }
  653. }
  654.  
  655. protected checkPendingTracks() {
  656. const { bufferCodecEventsExpected, operationQueue, pendingTracks } = this;
  657.  
  658. // Check if we've received all of the expected bufferCodec events. When none remain, create all the sourceBuffers at once.
  659. // This is important because the MSE spec allows implementations to throw QuotaExceededErrors if creating new sourceBuffers after
  660. // data has been appended to existing ones.
  661. // 2 tracks is the max (one for audio, one for video). If we've reach this max go ahead and create the buffers.
  662. const pendingTracksCount = Object.keys(pendingTracks).length;
  663. if (
  664. (pendingTracksCount && !bufferCodecEventsExpected) ||
  665. pendingTracksCount === 2
  666. ) {
  667. // ok, let's create them now !
  668. this.createSourceBuffers(pendingTracks);
  669. this.pendingTracks = {};
  670. // append any pending segments now !
  671. const buffers = this.getSourceBufferTypes();
  672. if (buffers.length === 0) {
  673. this.hls.trigger(Events.ERROR, {
  674. type: ErrorTypes.MEDIA_ERROR,
  675. details: ErrorDetails.BUFFER_INCOMPATIBLE_CODECS_ERROR,
  676. fatal: true,
  677. reason: 'could not create source buffer for media codec(s)',
  678. });
  679. return;
  680. }
  681. buffers.forEach((type: SourceBufferName) => {
  682. operationQueue.executeNext(type);
  683. });
  684. }
  685. }
  686.  
  687. protected createSourceBuffers(tracks: TrackSet) {
  688. const { sourceBuffer, mediaSource } = this;
  689. if (!mediaSource) {
  690. throw Error('createSourceBuffers called when mediaSource was null');
  691. }
  692. let tracksCreated = 0;
  693. for (const trackName in tracks) {
  694. if (!sourceBuffer[trackName]) {
  695. const track = tracks[trackName as keyof TrackSet];
  696. if (!track) {
  697. throw Error(
  698. `source buffer exists for track ${trackName}, however track does not`
  699. );
  700. }
  701. // use levelCodec as first priority
  702. const codec = track.levelCodec || track.codec;
  703. const mimeType = `${track.container};codecs=${codec}`;
  704. logger.log(`[buffer-controller]: creating sourceBuffer(${mimeType})`);
  705. try {
  706. const sb = (sourceBuffer[trackName] =
  707. mediaSource.addSourceBuffer(mimeType));
  708. const sbName = trackName as SourceBufferName;
  709. this.addBufferListener(sbName, 'updatestart', this._onSBUpdateStart);
  710. this.addBufferListener(sbName, 'updateend', this._onSBUpdateEnd);
  711. this.addBufferListener(sbName, 'error', this._onSBUpdateError);
  712. this.tracks[trackName] = {
  713. buffer: sb,
  714. codec: codec,
  715. container: track.container,
  716. levelCodec: track.levelCodec,
  717. id: track.id,
  718. };
  719. tracksCreated++;
  720. } catch (err) {
  721. logger.error(
  722. `[buffer-controller]: error while trying to add sourceBuffer: ${err.message}`
  723. );
  724. this.hls.trigger(Events.ERROR, {
  725. type: ErrorTypes.MEDIA_ERROR,
  726. details: ErrorDetails.BUFFER_ADD_CODEC_ERROR,
  727. fatal: false,
  728. error: err,
  729. mimeType: mimeType,
  730. });
  731. }
  732. }
  733. }
  734. if (tracksCreated) {
  735. this.hls.trigger(Events.BUFFER_CREATED, { tracks: this.tracks });
  736. }
  737. }
  738.  
  739. // Keep as arrow functions so that we can directly reference these functions directly as event listeners
  740. private _onMediaSourceOpen = () => {
  741. const { hls, media, mediaSource } = this;
  742. logger.log('[buffer-controller]: Media source opened');
  743. if (media) {
  744. this.updateMediaElementDuration();
  745. hls.trigger(Events.MEDIA_ATTACHED, { media });
  746. }
  747.  
  748. if (mediaSource) {
  749. // once received, don't listen anymore to sourceopen event
  750. mediaSource.removeEventListener('sourceopen', this._onMediaSourceOpen);
  751. }
  752. this.checkPendingTracks();
  753. };
  754.  
  755. private _onMediaSourceClose = () => {
  756. logger.log('[buffer-controller]: Media source closed');
  757. };
  758.  
  759. private _onMediaSourceEnded = () => {
  760. logger.log('[buffer-controller]: Media source ended');
  761. };
  762.  
  763. private _onSBUpdateStart(type: SourceBufferName) {
  764. const { operationQueue } = this;
  765. const operation = operationQueue.current(type);
  766. operation.onStart();
  767. }
  768.  
  769. private _onSBUpdateEnd(type: SourceBufferName) {
  770. const { operationQueue } = this;
  771. const operation = operationQueue.current(type);
  772. operation.onComplete();
  773. operationQueue.shiftAndExecuteNext(type);
  774. }
  775.  
  776. private _onSBUpdateError(type: SourceBufferName, event: Event) {
  777. logger.error(`[buffer-controller]: ${type} SourceBuffer error`, event);
  778. // according to http://www.w3.org/TR/media-source/#sourcebuffer-append-error
  779. // SourceBuffer errors are not necessarily fatal; if so, the HTMLMediaElement will fire an error event
  780. this.hls.trigger(Events.ERROR, {
  781. type: ErrorTypes.MEDIA_ERROR,
  782. details: ErrorDetails.BUFFER_APPENDING_ERROR,
  783. fatal: false,
  784. });
  785. // updateend is always fired after error, so we'll allow that to shift the current operation off of the queue
  786. const operation = this.operationQueue.current(type);
  787. if (operation) {
  788. operation.onError(event);
  789. }
  790. }
  791.  
  792. // This method must result in an updateend event; if remove is not called, _onSBUpdateEnd must be called manually
  793. private removeExecutor(
  794. type: SourceBufferName,
  795. startOffset: number,
  796. endOffset: number
  797. ) {
  798. const { media, mediaSource, operationQueue, sourceBuffer } = this;
  799. const sb = sourceBuffer[type];
  800. if (!media || !mediaSource || !sb) {
  801. logger.warn(
  802. `[buffer-controller]: Attempting to remove from the ${type} SourceBuffer, but it does not exist`
  803. );
  804. operationQueue.shiftAndExecuteNext(type);
  805. return;
  806. }
  807. const mediaDuration = Number.isFinite(media.duration)
  808. ? media.duration
  809. : Infinity;
  810. const msDuration = Number.isFinite(mediaSource.duration)
  811. ? mediaSource.duration
  812. : Infinity;
  813. const removeStart = Math.max(0, startOffset);
  814. const removeEnd = Math.min(endOffset, mediaDuration, msDuration);
  815. if (removeEnd > removeStart) {
  816. logger.log(
  817. `[buffer-controller]: Removing [${removeStart},${removeEnd}] from the ${type} SourceBuffer`
  818. );
  819. console.assert(!sb.updating, `${type} sourceBuffer must not be updating`);
  820. sb.remove(removeStart, removeEnd);
  821. } else {
  822. // Cycle the queue
  823. operationQueue.shiftAndExecuteNext(type);
  824. }
  825. }
  826.  
  827. // This method must result in an updateend event; if append is not called, _onSBUpdateEnd must be called manually
  828. private appendExecutor(data: Uint8Array, type: SourceBufferName) {
  829. const { operationQueue, sourceBuffer } = this;
  830. const sb = sourceBuffer[type];
  831. if (!sb) {
  832. logger.warn(
  833. `[buffer-controller]: Attempting to append to the ${type} SourceBuffer, but it does not exist`
  834. );
  835. operationQueue.shiftAndExecuteNext(type);
  836. return;
  837. }
  838.  
  839. sb.ended = false;
  840. console.assert(!sb.updating, `${type} sourceBuffer must not be updating`);
  841. sb.appendBuffer(data);
  842. }
  843.  
  844. // Enqueues an operation to each SourceBuffer queue which, upon execution, resolves a promise. When all promises
  845. // resolve, the onUnblocked function is executed. Functions calling this method do not need to unblock the queue
  846. // upon completion, since we already do it here
  847. private blockBuffers(
  848. onUnblocked: Function,
  849. buffers: Array<SourceBufferName> = this.getSourceBufferTypes()
  850. ) {
  851. if (!buffers.length) {
  852. logger.log(
  853. '[buffer-controller]: Blocking operation requested, but no SourceBuffers exist'
  854. );
  855. Promise.resolve(onUnblocked);
  856. return;
  857. }
  858. const { operationQueue } = this;
  859.  
  860. // logger.debug(`[buffer-controller]: Blocking ${buffers} SourceBuffer`);
  861. const blockingOperations = buffers.map((type) =>
  862. operationQueue.appendBlocker(type as SourceBufferName)
  863. );
  864. Promise.all(blockingOperations).then(() => {
  865. // logger.debug(`[buffer-controller]: Blocking operation resolved; unblocking ${buffers} SourceBuffer`);
  866. onUnblocked();
  867. buffers.forEach((type) => {
  868. const sb = this.sourceBuffer[type];
  869. // Only cycle the queue if the SB is not updating. There's a bug in Chrome which sets the SB updating flag to
  870. // true when changing the MediaSource duration (https://bugs.chromium.org/p/chromium/issues/detail?id=959359&can=2&q=mediasource%20duration)
  871. // While this is a workaround, it's probably useful to have around
  872. if (!sb || !sb.updating) {
  873. operationQueue.shiftAndExecuteNext(type);
  874. }
  875. });
  876. });
  877. }
  878.  
  879. private getSourceBufferTypes(): Array<SourceBufferName> {
  880. return Object.keys(this.sourceBuffer) as Array<SourceBufferName>;
  881. }
  882.  
  883. private addBufferListener(
  884. type: SourceBufferName,
  885. event: string,
  886. fn: Function
  887. ) {
  888. const buffer = this.sourceBuffer[type];
  889. if (!buffer) {
  890. return;
  891. }
  892. const listener = fn.bind(this, type);
  893. this.listeners[type].push({ event, listener });
  894. buffer.addEventListener(event, listener);
  895. }
  896.  
  897. private removeBufferListeners(type: SourceBufferName) {
  898. const buffer = this.sourceBuffer[type];
  899. if (!buffer) {
  900. return;
  901. }
  902. this.listeners[type].forEach((l) => {
  903. buffer.removeEventListener(l.event, l.listener);
  904. });
  905. }
  906. }