Source: lib/media/media_source_engine.js

  1. /*! @license
  2. * Shaka Player
  3. * Copyright 2016 Google LLC
  4. * SPDX-License-Identifier: Apache-2.0
  5. */
  6. goog.provide('shaka.media.MediaSourceEngine');
  7. goog.require('goog.asserts');
  8. goog.require('shaka.log');
  9. goog.require('shaka.media.ContentWorkarounds');
  10. goog.require('shaka.media.IClosedCaptionParser');
  11. goog.require('shaka.media.TimeRangesUtils');
  12. goog.require('shaka.media.Transmuxer');
  13. goog.require('shaka.text.TextEngine');
  14. goog.require('shaka.util.Destroyer');
  15. goog.require('shaka.util.Error');
  16. goog.require('shaka.util.EventManager');
  17. goog.require('shaka.util.Functional');
  18. goog.require('shaka.util.IDestroyable');
  19. goog.require('shaka.util.ManifestParserUtils');
  20. goog.require('shaka.util.MimeUtils');
  21. goog.require('shaka.util.Platform');
  22. goog.require('shaka.util.PublicPromise');
  23. /**
  24. * @summary
  25. * MediaSourceEngine wraps all operations on MediaSource and SourceBuffers.
  26. * All asynchronous operations return a Promise, and all operations are
  27. * internally synchronized and serialized as needed. Operations that can
  28. * be done in parallel will be done in parallel.
  29. *
  30. * @implements {shaka.util.IDestroyable}
  31. */
  32. shaka.media.MediaSourceEngine = class {
  33. /**
  34. * @param {HTMLMediaElement} video The video element, whose source is tied to
  35. * MediaSource during the lifetime of the MediaSourceEngine.
  36. * @param {!shaka.media.IClosedCaptionParser} closedCaptionParser
  37. * The closed caption parser that should be used to parser closed captions
  38. * from the video stream. MediaSourceEngine takes ownership of the parser.
  39. * When MediaSourceEngine is destroyed, it will destroy the parser.
  40. * @param {!shaka.extern.TextDisplayer} textDisplayer
  41. * The text displayer that will be used with the text engine.
  42. * MediaSourceEngine takes ownership of the displayer. When
  43. * MediaSourceEngine is destroyed, it will destroy the displayer.
  44. * @param {!function(!Array.<shaka.extern.ID3Metadata>, number, ?number)=}
  45. * onMetadata
  46. */
  47. constructor(video, closedCaptionParser, textDisplayer, onMetadata) {
  48. /** @private {HTMLMediaElement} */
  49. this.video_ = video;
  50. /** @private {shaka.extern.TextDisplayer} */
  51. this.textDisplayer_ = textDisplayer;
  52. /** @private {!Object.<shaka.util.ManifestParserUtils.ContentType,
  53. SourceBuffer>} */
  54. this.sourceBuffers_ = {};
  55. /** @private {!Object.<shaka.util.ManifestParserUtils.ContentType,
  56. string>} */
  57. this.sourceBufferTypes_ = {};
  58. /** @private {!Object.<shaka.util.ManifestParserUtils.ContentType,
  59. boolean>} */
  60. this.expectedEncryption_ = {};
  61. /** @private {shaka.text.TextEngine} */
  62. this.textEngine_ = null;
  63. /** @private {boolean} */
  64. this.segmentRelativeVttTiming_ = false;
  65. const onMetadataNoOp = (metadata, timestampOffset, segmentEnd) => {};
  66. /** @private {!function(!Array.<shaka.extern.ID3Metadata>,
  67. number, ?number)} */
  68. this.onMetadata_ = onMetadata || onMetadataNoOp;
  69. /**
  70. * @private {!Object.<string,
  71. * !Array.<shaka.media.MediaSourceEngine.Operation>>}
  72. */
  73. this.queues_ = {};
  74. /** @private {shaka.util.EventManager} */
  75. this.eventManager_ = new shaka.util.EventManager();
  76. /** @private {!Object.<string, !shaka.media.Transmuxer>} */
  77. this.transmuxers_ = {};
  78. /** @private {shaka.media.IClosedCaptionParser} */
  79. this.captionParser_ = closedCaptionParser;
  80. /** @private {!shaka.util.PublicPromise} */
  81. this.mediaSourceOpen_ = new shaka.util.PublicPromise();
  82. /** @private {MediaSource} */
  83. this.mediaSource_ = this.createMediaSource(this.mediaSourceOpen_);
  84. /** @type {!shaka.util.Destroyer} */
  85. this.destroyer_ = new shaka.util.Destroyer(() => this.doDestroy_());
  86. /** @private {string} */
  87. this.url_ = '';
  88. /** @type {boolean} */
  89. this.lowLatencyMode_ = false;
  90. /** @private {boolean} */
  91. this.sequenceMode_ = false;
  92. /** @private {!shaka.util.PublicPromise.<number>} */
  93. this.textSequenceModeOffset_ = new shaka.util.PublicPromise();
  94. }
  95. /**
  96. * Create a MediaSource object, attach it to the video element, and return it.
  97. * Resolves the given promise when the MediaSource is ready.
  98. *
  99. * Replaced by unit tests.
  100. *
  101. * @param {!shaka.util.PublicPromise} p
  102. * @return {!MediaSource}
  103. */
  104. createMediaSource(p) {
  105. const mediaSource = new MediaSource();
  106. // Set up MediaSource on the video element.
  107. this.eventManager_.listenOnce(
  108. mediaSource, 'sourceopen', () => this.onSourceOpen_(p));
  109. // Store the object URL for releasing it later.
  110. this.url_ = shaka.media.MediaSourceEngine.createObjectURL(mediaSource);
  111. this.video_.src = this.url_;
  112. return mediaSource;
  113. }
  114. /**
  115. * @param {!shaka.util.PublicPromise} p
  116. * @private
  117. */
  118. onSourceOpen_(p) {
  119. // Release the object URL that was previously created, to prevent memory
  120. // leak.
  121. // createObjectURL creates a strong reference to the MediaSource object
  122. // inside the browser. Setting the src of the video then creates another
  123. // reference within the video element. revokeObjectURL will remove the
  124. // strong reference to the MediaSource object, and allow it to be
  125. // garbage-collected later.
  126. URL.revokeObjectURL(this.url_);
  127. p.resolve();
  128. }
  129. /**
  130. * Checks if a certain type is supported.
  131. *
  132. * @param {shaka.extern.Stream} stream
  133. * @return {boolean}
  134. */
  135. static isStreamSupported(stream) {
  136. const fullMimeType = shaka.util.MimeUtils.getFullType(
  137. stream.mimeType, stream.codecs);
  138. const extendedMimeType = shaka.util.MimeUtils.getExtendedType(stream);
  139. return shaka.text.TextEngine.isTypeSupported(fullMimeType) ||
  140. MediaSource.isTypeSupported(extendedMimeType) ||
  141. shaka.media.Transmuxer.isSupported(fullMimeType, stream.type);
  142. }
  143. /**
  144. * Returns a map of MediaSource support for well-known types.
  145. *
  146. * @return {!Object.<string, boolean>}
  147. */
  148. static probeSupport() {
  149. const testMimeTypes = [
  150. // MP4 types
  151. 'video/mp4; codecs="avc1.42E01E"',
  152. 'video/mp4; codecs="avc3.42E01E"',
  153. 'video/mp4; codecs="hev1.1.6.L93.90"',
  154. 'video/mp4; codecs="hvc1.1.6.L93.90"',
  155. 'video/mp4; codecs="hev1.2.4.L153.B0"; eotf="smpte2084"', // HDR HEVC
  156. 'video/mp4; codecs="hvc1.2.4.L153.B0"; eotf="smpte2084"', // HDR HEVC
  157. 'video/mp4; codecs="vp9"',
  158. 'video/mp4; codecs="vp09.00.10.08"',
  159. 'video/mp4; codecs="av01.0.01M.08"',
  160. 'audio/mp4; codecs="mp4a.40.2"',
  161. 'audio/mp4; codecs="ac-3"',
  162. 'audio/mp4; codecs="ec-3"',
  163. 'audio/mp4; codecs="opus"',
  164. 'audio/mp4; codecs="flac"',
  165. // WebM types
  166. 'video/webm; codecs="vp8"',
  167. 'video/webm; codecs="vp9"',
  168. 'video/webm; codecs="vp09.00.10.08"',
  169. 'audio/webm; codecs="vorbis"',
  170. 'audio/webm; codecs="opus"',
  171. // MPEG2 TS types (video/ is also used for audio: https://bit.ly/TsMse)
  172. 'video/mp2t; codecs="avc1.42E01E"',
  173. 'video/mp2t; codecs="avc3.42E01E"',
  174. 'video/mp2t; codecs="hvc1.1.6.L93.90"',
  175. 'video/mp2t; codecs="mp4a.40.2"',
  176. 'video/mp2t; codecs="ac-3"',
  177. 'video/mp2t; codecs="ec-3"',
  178. // WebVTT types
  179. 'text/vtt',
  180. 'application/mp4; codecs="wvtt"',
  181. // TTML types
  182. 'application/ttml+xml',
  183. 'application/mp4; codecs="stpp"',
  184. // Containerless types
  185. ...shaka.media.MediaSourceEngine.RAW_FORMATS,
  186. ];
  187. const support = {};
  188. for (const type of testMimeTypes) {
  189. if (shaka.util.Platform.supportsMediaSource()) {
  190. // Our TextEngine is only effective for MSE platforms at the moment.
  191. if (shaka.text.TextEngine.isTypeSupported(type)) {
  192. support[type] = true;
  193. } else {
  194. support[type] = MediaSource.isTypeSupported(type) ||
  195. shaka.media.Transmuxer.isSupported(type);
  196. }
  197. } else {
  198. support[type] = shaka.util.Platform.supportsMediaType(type);
  199. }
  200. const basicType = type.split(';')[0];
  201. support[basicType] = support[basicType] || support[type];
  202. }
  203. return support;
  204. }
  205. /** @override */
  206. destroy() {
  207. return this.destroyer_.destroy();
  208. }
  209. /** @private */
  210. async doDestroy_() {
  211. const Functional = shaka.util.Functional;
  212. const cleanup = [];
  213. for (const contentType in this.queues_) {
  214. // Make a local copy of the queue and the first item.
  215. const q = this.queues_[contentType];
  216. const inProgress = q[0];
  217. // Drop everything else out of the original queue.
  218. this.queues_[contentType] = q.slice(0, 1);
  219. // We will wait for this item to complete/fail.
  220. if (inProgress) {
  221. cleanup.push(inProgress.p.catch(Functional.noop));
  222. }
  223. // The rest will be rejected silently if possible.
  224. for (const item of q.slice(1)) {
  225. item.p.reject(shaka.util.Destroyer.destroyedError());
  226. }
  227. }
  228. if (this.textEngine_) {
  229. cleanup.push(this.textEngine_.destroy());
  230. }
  231. if (this.textDisplayer_) {
  232. cleanup.push(this.textDisplayer_.destroy());
  233. }
  234. for (const contentType in this.transmuxers_) {
  235. cleanup.push(this.transmuxers_[contentType].destroy());
  236. }
  237. await Promise.all(cleanup);
  238. if (this.eventManager_) {
  239. this.eventManager_.release();
  240. this.eventManager_ = null;
  241. }
  242. if (this.video_) {
  243. // "unload" the video element.
  244. this.video_.removeAttribute('src');
  245. this.video_.load();
  246. this.video_ = null;
  247. }
  248. this.mediaSource_ = null;
  249. this.textEngine_ = null;
  250. this.textDisplayer_ = null;
  251. this.sourceBuffers_ = {};
  252. this.transmuxers_ = {};
  253. this.captionParser_ = null;
  254. if (goog.DEBUG) {
  255. for (const contentType in this.queues_) {
  256. goog.asserts.assert(
  257. this.queues_[contentType].length == 0,
  258. contentType + ' queue should be empty after destroy!');
  259. }
  260. }
  261. this.queues_ = {};
  262. }
  263. /**
  264. * @return {!Promise} Resolved when MediaSource is open and attached to the
  265. * media element. This process is actually initiated by the constructor.
  266. */
  267. open() {
  268. return this.mediaSourceOpen_;
  269. }
  270. /**
  271. * Initialize MediaSourceEngine.
  272. *
  273. * Note that it is not valid to call this multiple times, except to add or
  274. * reinitialize text streams.
  275. *
  276. * @param {!Map.<shaka.util.ManifestParserUtils.ContentType,
  277. * shaka.extern.Stream>} streamsByType
  278. * A map of content types to streams. All streams must be supported
  279. * according to MediaSourceEngine.isStreamSupported.
  280. * @param {boolean} forceTransmuxTS
  281. * If true, this will transmux TS content even if it is natively supported.
  282. * @param {boolean=} sequenceMode
  283. * If true, the media segments are appended to the SourceBuffer in strict
  284. * sequence.
  285. *
  286. * @return {!Promise}
  287. */
  288. async init(streamsByType, forceTransmuxTS, sequenceMode=false) {
  289. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  290. await this.mediaSourceOpen_;
  291. this.sequenceMode_ = sequenceMode;
  292. for (const contentType of streamsByType.keys()) {
  293. const stream = streamsByType.get(contentType);
  294. goog.asserts.assert(
  295. shaka.media.MediaSourceEngine.isStreamSupported(stream),
  296. 'Type negotiation should happen before MediaSourceEngine.init!');
  297. let mimeType = shaka.util.MimeUtils.getFullType(
  298. stream.mimeType, stream.codecs);
  299. if (contentType == ContentType.TEXT) {
  300. this.reinitText(mimeType, sequenceMode);
  301. } else {
  302. if ((forceTransmuxTS || !MediaSource.isTypeSupported(mimeType)) &&
  303. shaka.media.Transmuxer.isSupported(mimeType, contentType)) {
  304. this.transmuxers_[contentType] = new shaka.media.Transmuxer();
  305. mimeType =
  306. shaka.media.Transmuxer.convertTsCodecs(contentType, mimeType);
  307. }
  308. const sourceBuffer = this.mediaSource_.addSourceBuffer(mimeType);
  309. this.eventManager_.listen(
  310. sourceBuffer, 'error',
  311. () => this.onError_(contentType));
  312. this.eventManager_.listen(
  313. sourceBuffer, 'updateend',
  314. () => this.onUpdateEnd_(contentType));
  315. this.sourceBuffers_[contentType] = sourceBuffer;
  316. this.sourceBufferTypes_[contentType] = mimeType;
  317. this.queues_[contentType] = [];
  318. this.expectedEncryption_[contentType] = !!stream.drmInfos.length;
  319. }
  320. }
  321. }
  322. /**
  323. * Reinitialize the TextEngine for a new text type.
  324. * @param {string} mimeType
  325. * @param {boolean} sequenceMode
  326. */
  327. reinitText(mimeType, sequenceMode) {
  328. if (!this.textEngine_) {
  329. this.textEngine_ = new shaka.text.TextEngine(this.textDisplayer_);
  330. }
  331. this.textEngine_.initParser(mimeType, sequenceMode,
  332. this.segmentRelativeVttTiming_);
  333. }
  334. /**
  335. * @return {boolean} True if the MediaSource is in an "ended" state, or if the
  336. * object has been destroyed.
  337. */
  338. ended() {
  339. return this.mediaSource_ ? this.mediaSource_.readyState == 'ended' : true;
  340. }
  341. /**
  342. * Gets the first timestamp in buffer for the given content type.
  343. *
  344. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  345. * @return {?number} The timestamp in seconds, or null if nothing is buffered.
  346. */
  347. bufferStart(contentType) {
  348. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  349. if (contentType == ContentType.TEXT) {
  350. return this.textEngine_.bufferStart();
  351. }
  352. return shaka.media.TimeRangesUtils.bufferStart(
  353. this.getBuffered_(contentType));
  354. }
  355. /**
  356. * Gets the last timestamp in buffer for the given content type.
  357. *
  358. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  359. * @return {?number} The timestamp in seconds, or null if nothing is buffered.
  360. */
  361. bufferEnd(contentType) {
  362. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  363. if (contentType == ContentType.TEXT) {
  364. return this.textEngine_.bufferEnd();
  365. }
  366. return shaka.media.TimeRangesUtils.bufferEnd(
  367. this.getBuffered_(contentType));
  368. }
  369. /**
  370. * Determines if the given time is inside the buffered range of the given
  371. * content type.
  372. *
  373. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  374. * @param {number} time Playhead time
  375. * @return {boolean}
  376. */
  377. isBuffered(contentType, time) {
  378. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  379. if (contentType == ContentType.TEXT) {
  380. return this.textEngine_.isBuffered(time);
  381. } else {
  382. const buffered = this.getBuffered_(contentType);
  383. return shaka.media.TimeRangesUtils.isBuffered(buffered, time);
  384. }
  385. }
  386. /**
  387. * Computes how far ahead of the given timestamp is buffered for the given
  388. * content type.
  389. *
  390. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  391. * @param {number} time
  392. * @return {number} The amount of time buffered ahead in seconds.
  393. */
  394. bufferedAheadOf(contentType, time) {
  395. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  396. if (contentType == ContentType.TEXT) {
  397. return this.textEngine_.bufferedAheadOf(time);
  398. } else {
  399. const buffered = this.getBuffered_(contentType);
  400. return shaka.media.TimeRangesUtils.bufferedAheadOf(buffered, time);
  401. }
  402. }
  403. /**
  404. * Returns info about what is currently buffered.
  405. * @return {shaka.extern.BufferedInfo}
  406. */
  407. getBufferedInfo() {
  408. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  409. const TimeRangeUtils = shaka.media.TimeRangesUtils;
  410. const info = {
  411. total: TimeRangeUtils.getBufferedInfo(this.video_.buffered),
  412. audio: TimeRangeUtils.getBufferedInfo(
  413. this.getBuffered_(ContentType.AUDIO)),
  414. video: TimeRangeUtils.getBufferedInfo(
  415. this.getBuffered_(ContentType.VIDEO)),
  416. text: [],
  417. };
  418. if (this.textEngine_) {
  419. const start = this.textEngine_.bufferStart();
  420. const end = this.textEngine_.bufferEnd();
  421. if (start != null && end != null) {
  422. info.text.push({start: start, end: end});
  423. }
  424. }
  425. return info;
  426. }
  427. /**
  428. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  429. * @return {TimeRanges} The buffered ranges for the given content type, or
  430. * null if the buffered ranges could not be obtained.
  431. * @private
  432. */
  433. getBuffered_(contentType) {
  434. try {
  435. return this.sourceBuffers_[contentType].buffered;
  436. } catch (exception) {
  437. if (contentType in this.sourceBuffers_) {
  438. // Note: previous MediaSource errors may cause access to |buffered| to
  439. // throw.
  440. shaka.log.error('failed to get buffered range for ' + contentType,
  441. exception);
  442. }
  443. return null;
  444. }
  445. }
  446. /**
  447. * Enqueue an operation to append data to the SourceBuffer.
  448. * Start and end times are needed for TextEngine, but not for MediaSource.
  449. * Start and end times may be null for initialization segments; if present
  450. * they are relative to the presentation timeline.
  451. *
  452. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  453. * @param {!BufferSource} data
  454. * @param {?number} startTime relative to the start of the presentation
  455. * @param {?number} endTime relative to the start of the presentation
  456. * @param {?boolean} hasClosedCaptions True if the buffer contains CEA closed
  457. * captions
  458. * @param {boolean=} seeked True if we just seeked
  459. * @return {!Promise}
  460. */
  461. async appendBuffer(
  462. contentType, data, startTime, endTime, hasClosedCaptions, seeked) {
  463. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  464. if (contentType == ContentType.TEXT) {
  465. if (this.sequenceMode_) {
  466. // This won't be known until the first video segment is appended.
  467. const offset = await this.textSequenceModeOffset_;
  468. this.textEngine_.setTimestampOffset(offset);
  469. }
  470. await this.textEngine_.appendBuffer(data, startTime, endTime);
  471. return;
  472. }
  473. if (this.transmuxers_[contentType]) {
  474. const transmuxedData =
  475. await this.transmuxers_[contentType].transmux(data);
  476. // For HLS CEA-608/708 CLOSED-CAPTIONS, text data is embedded in
  477. // the video stream, so textEngine may not have been initialized.
  478. if (!this.textEngine_) {
  479. this.reinitText('text/vtt', this.sequenceMode_);
  480. }
  481. if (transmuxedData.metadata) {
  482. const timestampOffset =
  483. this.sourceBuffers_[contentType].timestampOffset;
  484. this.onMetadata_(transmuxedData.metadata, timestampOffset, endTime);
  485. }
  486. // This doesn't work for native TS support (ex. Edge/Chromecast),
  487. // since no transmuxing is needed for native TS.
  488. if (transmuxedData.captions && transmuxedData.captions.length) {
  489. const videoOffset =
  490. this.sourceBuffers_[ContentType.VIDEO].timestampOffset;
  491. const closedCaptions = this.textEngine_
  492. .convertMuxjsCaptionsToShakaCaptions(transmuxedData.captions);
  493. this.textEngine_.storeAndAppendClosedCaptions(
  494. closedCaptions, startTime, endTime, videoOffset);
  495. }
  496. // let transmuxedSegment = transmuxedData.data;
  497. // transmuxedSegment = this.workAroundBrokenPlatforms_(
  498. // transmuxedSegment, startTime, contentType);
  499. //
  500. //
  501. // await this.enqueueOperation_(
  502. // contentType, () => this.append_(contentType, transmuxedSegment));
  503. data = transmuxedData.data;
  504. if (!this.lowLatencyMode_) {
  505. await this.enqueueOperation_(
  506. contentType,
  507. () => this.abort_(contentType));
  508. }
  509. } else if (hasClosedCaptions) {
  510. if (!this.textEngine_) {
  511. this.reinitText('text/vtt', this.sequenceMode_);
  512. }
  513. // If it is the init segment for closed captions, initialize the closed
  514. // caption parser.
  515. if (startTime == null && endTime == null) {
  516. this.captionParser_.init(data);
  517. } else {
  518. const closedCaptions = this.captionParser_.parseFrom(data);
  519. if (closedCaptions.length) {
  520. const videoOffset =
  521. this.sourceBuffers_[ContentType.VIDEO].timestampOffset;
  522. this.textEngine_.storeAndAppendClosedCaptions(
  523. closedCaptions, startTime, endTime, videoOffset);
  524. }
  525. }
  526. }
  527. data = this.workAroundBrokenPlatforms_(data, startTime, contentType);
  528. const sourceBuffer = this.sourceBuffers_[contentType];
  529. const SEQUENCE = shaka.media.MediaSourceEngine.SourceBufferMode_.SEQUENCE;
  530. if (this.sequenceMode_ && sourceBuffer.mode != SEQUENCE &&
  531. startTime != null) {
  532. // This is the first media segment to be appended to a SourceBuffer in
  533. // sequence mode. We set the mode late so that we can trick MediaSource
  534. // into extracting a timestamp for us to align text segments in sequence
  535. // mode.
  536. // Timestamps can only be reliably extracted from video, not audio.
  537. // Packed audio formats do not have internal timestamps at all.
  538. // Prefer video for this when available.
  539. const isBestSourceBufferForTimestamps =
  540. contentType == ContentType.VIDEO ||
  541. !(ContentType.VIDEO in this.sourceBuffers_);
  542. if (isBestSourceBufferForTimestamps) {
  543. // Append the segment in segments mode first, with offset of 0 and an
  544. // open append window.
  545. const originalRange =
  546. [sourceBuffer.appendWindowStart, sourceBuffer.appendWindowEnd];
  547. sourceBuffer.appendWindowStart = 0;
  548. sourceBuffer.appendWindowEnd = Infinity;
  549. const originalOffset = sourceBuffer.timestampOffset;
  550. sourceBuffer.timestampOffset = 0;
  551. await this.enqueueOperation_(
  552. contentType, () => this.append_(contentType, data));
  553. // Reset the offset and append window.
  554. sourceBuffer.timestampOffset = originalOffset;
  555. sourceBuffer.appendWindowStart = originalRange[0];
  556. sourceBuffer.appendWindowEnd = originalRange[1];
  557. // Now get the timestamp of the segment and compute the offset for text
  558. // segments.
  559. const mediaStartTime = shaka.media.TimeRangesUtils.bufferStart(
  560. this.getBuffered_(contentType));
  561. const textOffset = (startTime || 0) - (mediaStartTime || 0);
  562. this.textSequenceModeOffset_.resolve(textOffset);
  563. // Finally, clear the buffer.
  564. await this.enqueueOperation_(
  565. contentType,
  566. () => this.remove_(contentType, 0, this.mediaSource_.duration));
  567. }
  568. // Now switch to sequence mode and fall through to our normal operations.
  569. sourceBuffer.mode = SEQUENCE;
  570. }
  571. if (!this.lowLatencyMode_) {
  572. await this.enqueueOperation_(
  573. contentType,
  574. () => this.abort_(contentType));
  575. }
  576. if (startTime != null && this.sequenceMode_ &&
  577. contentType != ContentType.TEXT) {
  578. // In sequence mode, for non-text streams, if we just cleared the buffer
  579. // and are performing an unbuffered seek, we need to set a new
  580. // timestampOffset on the sourceBuffer.
  581. if (seeked) {
  582. const timestampOffset = /** @type {number} */ (startTime);
  583. this.enqueueOperation_(
  584. contentType,
  585. () => this.setTimestampOffset_(contentType, timestampOffset));
  586. }
  587. }
  588. await this.enqueueOperation_(
  589. contentType,
  590. () => this.append_(contentType, data));
  591. }
  592. /**
  593. * Set the selected closed captions Id and language.
  594. *
  595. * @param {string} id
  596. */
  597. setSelectedClosedCaptionId(id) {
  598. const VIDEO = shaka.util.ManifestParserUtils.ContentType.VIDEO;
  599. const videoBufferEndTime = this.bufferEnd(VIDEO) || 0;
  600. this.textEngine_.setSelectedClosedCaptionId(id, videoBufferEndTime);
  601. }
  602. /** Disable embedded closed captions. */
  603. clearSelectedClosedCaptionId() {
  604. if (this.textEngine_) {
  605. this.textEngine_.setSelectedClosedCaptionId('', 0);
  606. }
  607. }
  608. /**
  609. * Enqueue an operation to remove data from the SourceBuffer.
  610. *
  611. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  612. * @param {number} startTime relative to the start of the presentation
  613. * @param {number} endTime relative to the start of the presentation
  614. * @return {!Promise}
  615. */
  616. async remove(contentType, startTime, endTime) {
  617. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  618. if (contentType == ContentType.TEXT) {
  619. await this.textEngine_.remove(startTime, endTime);
  620. } else {
  621. await this.enqueueOperation_(
  622. contentType,
  623. () => this.remove_(contentType, startTime, endTime));
  624. }
  625. }
  626. /**
  627. * Enqueue an operation to clear the SourceBuffer.
  628. *
  629. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  630. * @return {!Promise}
  631. */
  632. async clear(contentType) {
  633. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  634. if (contentType == ContentType.TEXT) {
  635. if (!this.textEngine_) {
  636. return;
  637. }
  638. await this.textEngine_.remove(0, Infinity);
  639. } else {
  640. // Note that not all platforms allow clearing to Infinity.
  641. await this.enqueueOperation_(
  642. contentType,
  643. () => this.remove_(contentType, 0, this.mediaSource_.duration));
  644. }
  645. }
  646. /**
  647. * Fully reset the state of the caption parser owned by MediaSourceEngine.
  648. */
  649. resetCaptionParser() {
  650. this.captionParser_.reset();
  651. }
  652. /**
  653. * Enqueue an operation to flush the SourceBuffer.
  654. * This is a workaround for what we believe is a Chromecast bug.
  655. *
  656. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  657. * @return {!Promise}
  658. */
  659. async flush(contentType) {
  660. // Flush the pipeline. Necessary on Chromecast, even though we have removed
  661. // everything.
  662. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  663. if (contentType == ContentType.TEXT) {
  664. // Nothing to flush for text.
  665. return;
  666. }
  667. await this.enqueueOperation_(
  668. contentType,
  669. () => this.flush_(contentType));
  670. }
  671. /**
  672. * Sets the timestamp offset and append window end for the given content type.
  673. *
  674. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  675. * @param {number} timestampOffset The timestamp offset. Segments which start
  676. * at time t will be inserted at time t + timestampOffset instead. This
  677. * value does not affect segments which have already been inserted.
  678. * @param {number} appendWindowStart The timestamp to set the append window
  679. * start to. For future appends, frames/samples with timestamps less than
  680. * this value will be dropped.
  681. * @param {number} appendWindowEnd The timestamp to set the append window end
  682. * to. For future appends, frames/samples with timestamps greater than this
  683. * value will be dropped.
  684. * @param {boolean} sequenceMode If true, the timestampOffset will not be
  685. * applied in this step.
  686. * @return {!Promise}
  687. */
  688. async setStreamProperties(
  689. contentType, timestampOffset, appendWindowStart, appendWindowEnd,
  690. sequenceMode) {
  691. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  692. if (contentType == ContentType.TEXT) {
  693. if (!sequenceMode) {
  694. this.textEngine_.setTimestampOffset(timestampOffset);
  695. }
  696. this.textEngine_.setAppendWindow(appendWindowStart, appendWindowEnd);
  697. return;
  698. }
  699. await Promise.all([
  700. // Queue an abort() to help MSE splice together overlapping segments.
  701. // We set appendWindowEnd when we change periods in DASH content, and the
  702. // period transition may result in overlap.
  703. //
  704. // An abort() also helps with MPEG2-TS. When we append a TS segment, we
  705. // always enter a PARSING_MEDIA_SEGMENT state and we can't change the
  706. // timestamp offset. By calling abort(), we reset the state so we can
  707. // set it.
  708. this.enqueueOperation_(
  709. contentType,
  710. () => this.abort_(contentType)),
  711. // Don't set the timestampOffset here when in sequenceMode, since we
  712. // use timestampOffset for a different purpose in that mode (e.g. to
  713. // indicate where the current segment is).
  714. sequenceMode ? Promise.resolve() : this.enqueueOperation_(
  715. contentType,
  716. () => this.setTimestampOffset_(contentType, timestampOffset)),
  717. this.enqueueOperation_(
  718. contentType,
  719. () => this.setAppendWindow_(
  720. contentType, appendWindowStart, appendWindowEnd)),
  721. ]);
  722. }
  723. /**
  724. * @param {string=} reason Valid reasons are 'network' and 'decode'.
  725. * @return {!Promise}
  726. * @see http://w3c.github.io/media-source/#idl-def-EndOfStreamError
  727. */
  728. async endOfStream(reason) {
  729. await this.enqueueBlockingOperation_(() => {
  730. // If endOfStream() has already been called on the media source,
  731. // don't call it again.
  732. if (this.ended()) {
  733. return;
  734. }
  735. // Tizen won't let us pass undefined, but it will let us omit the
  736. // argument.
  737. if (reason) {
  738. this.mediaSource_.endOfStream(reason);
  739. } else {
  740. this.mediaSource_.endOfStream();
  741. }
  742. });
  743. }
  744. /**
  745. * We only support increasing duration at this time. Decreasing duration
  746. * causes the MSE removal algorithm to run, which results in an 'updateend'
  747. * event. Supporting this scenario would be complicated, and is not currently
  748. * needed.
  749. *
  750. * @param {number} duration
  751. * @return {!Promise}
  752. */
  753. async setDuration(duration) {
  754. goog.asserts.assert(
  755. isNaN(this.mediaSource_.duration) ||
  756. this.mediaSource_.duration <= duration,
  757. 'duration cannot decrease: ' + this.mediaSource_.duration + ' -> ' +
  758. duration);
  759. await this.enqueueBlockingOperation_(() => {
  760. this.mediaSource_.duration = duration;
  761. });
  762. }
  763. /**
  764. * Get the current MediaSource duration.
  765. *
  766. * @return {number}
  767. */
  768. getDuration() {
  769. return this.mediaSource_.duration;
  770. }
  771. /**
  772. * Append data to the SourceBuffer.
  773. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  774. * @param {BufferSource} data
  775. * @private
  776. */
  777. append_(contentType, data) {
  778. // This will trigger an 'updateend' event.
  779. this.sourceBuffers_[contentType].appendBuffer(data);
  780. }
  781. /**
  782. * Remove data from the SourceBuffer.
  783. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  784. * @param {number} startTime relative to the start of the presentation
  785. * @param {number} endTime relative to the start of the presentation
  786. * @private
  787. */
  788. remove_(contentType, startTime, endTime) {
  789. if (endTime <= startTime) {
  790. // Ignore removal of inverted or empty ranges.
  791. // Fake 'updateend' event to resolve the operation.
  792. this.onUpdateEnd_(contentType);
  793. return;
  794. }
  795. // This will trigger an 'updateend' event.
  796. this.sourceBuffers_[contentType].remove(startTime, endTime);
  797. }
  798. /**
  799. * Call abort() on the SourceBuffer.
  800. * This resets MSE's last_decode_timestamp on all track buffers, which should
  801. * trigger the splicing logic for overlapping segments.
  802. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  803. * @private
  804. */
  805. abort_(contentType) {
  806. // Save the append window, which is reset on abort().
  807. const appendWindowStart =
  808. this.sourceBuffers_[contentType].appendWindowStart;
  809. const appendWindowEnd = this.sourceBuffers_[contentType].appendWindowEnd;
  810. // This will not trigger an 'updateend' event, since nothing is happening.
  811. // This is only to reset MSE internals, not to abort an actual operation.
  812. this.sourceBuffers_[contentType].abort();
  813. // Restore the append window.
  814. this.sourceBuffers_[contentType].appendWindowStart = appendWindowStart;
  815. this.sourceBuffers_[contentType].appendWindowEnd = appendWindowEnd;
  816. // Fake an 'updateend' event to resolve the operation.
  817. this.onUpdateEnd_(contentType);
  818. }
  819. /**
  820. * Nudge the playhead to force the media pipeline to be flushed.
  821. * This seems to be necessary on Chromecast to get new content to replace old
  822. * content.
  823. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  824. * @private
  825. */
  826. flush_(contentType) {
  827. // Never use flush_ if there's data. It causes a hiccup in playback.
  828. goog.asserts.assert(
  829. this.video_.buffered.length == 0, 'MediaSourceEngine.flush_ should ' +
  830. 'only be used after clearing all data!');
  831. // Seeking forces the pipeline to be flushed.
  832. this.video_.currentTime -= 0.001;
  833. // Fake an 'updateend' event to resolve the operation.
  834. this.onUpdateEnd_(contentType);
  835. }
  836. /**
  837. * Set the SourceBuffer's timestamp offset.
  838. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  839. * @param {number} timestampOffset
  840. * @private
  841. */
  842. setTimestampOffset_(contentType, timestampOffset) {
  843. // Work around for
  844. // https://github.com/shaka-project/shaka-player/issues/1281:
  845. // TODO(https://bit.ly/2ttKiBU): follow up when this is fixed in Edge
  846. if (timestampOffset < 0) {
  847. // Try to prevent rounding errors in Edge from removing the first
  848. // keyframe.
  849. timestampOffset += 0.001;
  850. }
  851. this.sourceBuffers_[contentType].timestampOffset = timestampOffset;
  852. // Fake an 'updateend' event to resolve the operation.
  853. this.onUpdateEnd_(contentType);
  854. }
  855. /**
  856. * Set the SourceBuffer's append window end.
  857. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  858. * @param {number} appendWindowStart
  859. * @param {number} appendWindowEnd
  860. * @private
  861. */
  862. setAppendWindow_(contentType, appendWindowStart, appendWindowEnd) {
  863. // You can't set start > end, so first set start to 0, then set the new
  864. // end, then set the new start. That way, there are no intermediate
  865. // states which are invalid.
  866. this.sourceBuffers_[contentType].appendWindowStart = 0;
  867. this.sourceBuffers_[contentType].appendWindowEnd = appendWindowEnd;
  868. this.sourceBuffers_[contentType].appendWindowStart = appendWindowStart;
  869. // Fake an 'updateend' event to resolve the operation.
  870. this.onUpdateEnd_(contentType);
  871. }
  872. /**
  873. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  874. * @private
  875. */
  876. onError_(contentType) {
  877. const operation = this.queues_[contentType][0];
  878. goog.asserts.assert(operation, 'Spurious error event!');
  879. goog.asserts.assert(!this.sourceBuffers_[contentType].updating,
  880. 'SourceBuffer should not be updating on error!');
  881. const code = this.video_.error ? this.video_.error.code : 0;
  882. operation.p.reject(new shaka.util.Error(
  883. shaka.util.Error.Severity.CRITICAL,
  884. shaka.util.Error.Category.MEDIA,
  885. shaka.util.Error.Code.MEDIA_SOURCE_OPERATION_FAILED,
  886. code));
  887. // Do not pop from queue. An 'updateend' event will fire next, and to
  888. // avoid synchronizing these two event handlers, we will allow that one to
  889. // pop from the queue as normal. Note that because the operation has
  890. // already been rejected, the call to resolve() in the 'updateend' handler
  891. // will have no effect.
  892. }
  893. /**
  894. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  895. * @private
  896. */
  897. onUpdateEnd_(contentType) {
  898. const operation = this.queues_[contentType][0];
  899. goog.asserts.assert(operation, 'Spurious updateend event!');
  900. if (!operation) {
  901. return;
  902. }
  903. goog.asserts.assert(!this.sourceBuffers_[contentType].updating,
  904. 'SourceBuffer should not be updating on updateend!');
  905. operation.p.resolve();
  906. this.popFromQueue_(contentType);
  907. }
  908. /**
  909. * Enqueue an operation and start it if appropriate.
  910. *
  911. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  912. * @param {function()} start
  913. * @return {!Promise}
  914. * @private
  915. */
  916. enqueueOperation_(contentType, start) {
  917. this.destroyer_.ensureNotDestroyed();
  918. const operation = {
  919. start: start,
  920. p: new shaka.util.PublicPromise(),
  921. };
  922. this.queues_[contentType].push(operation);
  923. if (this.queues_[contentType].length == 1) {
  924. this.startOperation_(contentType);
  925. }
  926. return operation.p;
  927. }
  928. /**
  929. * Enqueue an operation which must block all other operations on all
  930. * SourceBuffers.
  931. *
  932. * @param {function()} run
  933. * @return {!Promise}
  934. * @private
  935. */
  936. async enqueueBlockingOperation_(run) {
  937. this.destroyer_.ensureNotDestroyed();
  938. /** @type {!Array.<!shaka.util.PublicPromise>} */
  939. const allWaiters = [];
  940. // Enqueue a 'wait' operation onto each queue.
  941. // This operation signals its readiness when it starts.
  942. // When all wait operations are ready, the real operation takes place.
  943. for (const contentType in this.sourceBuffers_) {
  944. const ready = new shaka.util.PublicPromise();
  945. const operation = {
  946. start: () => ready.resolve(),
  947. p: ready,
  948. };
  949. this.queues_[contentType].push(operation);
  950. allWaiters.push(ready);
  951. if (this.queues_[contentType].length == 1) {
  952. operation.start();
  953. }
  954. }
  955. // Return a Promise to the real operation, which waits to begin until
  956. // there are no other in-progress operations on any SourceBuffers.
  957. try {
  958. await Promise.all(allWaiters);
  959. } catch (error) {
  960. // One of the waiters failed, which means we've been destroyed.
  961. goog.asserts.assert(
  962. this.destroyer_.destroyed(), 'Should be destroyed by now');
  963. // We haven't popped from the queue. Canceled waiters have been removed
  964. // by destroy. What's left now should just be resolved waiters. In
  965. // uncompiled mode, we will maintain good hygiene and make sure the
  966. // assert at the end of destroy passes. In compiled mode, the queues
  967. // are wiped in destroy.
  968. if (goog.DEBUG) {
  969. for (const contentType in this.sourceBuffers_) {
  970. if (this.queues_[contentType].length) {
  971. goog.asserts.assert(
  972. this.queues_[contentType].length == 1,
  973. 'Should be at most one item in queue!');
  974. goog.asserts.assert(
  975. allWaiters.includes(this.queues_[contentType][0].p),
  976. 'The item in queue should be one of our waiters!');
  977. this.queues_[contentType].shift();
  978. }
  979. }
  980. }
  981. throw error;
  982. }
  983. if (goog.DEBUG) {
  984. // If we did it correctly, nothing is updating.
  985. for (const contentType in this.sourceBuffers_) {
  986. goog.asserts.assert(
  987. this.sourceBuffers_[contentType].updating == false,
  988. 'SourceBuffers should not be updating after a blocking op!');
  989. }
  990. }
  991. // Run the real operation, which is synchronous.
  992. try {
  993. run();
  994. } catch (exception) {
  995. throw new shaka.util.Error(
  996. shaka.util.Error.Severity.CRITICAL,
  997. shaka.util.Error.Category.MEDIA,
  998. shaka.util.Error.Code.MEDIA_SOURCE_OPERATION_THREW,
  999. exception);
  1000. } finally {
  1001. // Unblock the queues.
  1002. for (const contentType in this.sourceBuffers_) {
  1003. this.popFromQueue_(contentType);
  1004. }
  1005. }
  1006. }
  1007. /**
  1008. * Pop from the front of the queue and start a new operation.
  1009. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1010. * @private
  1011. */
  1012. popFromQueue_(contentType) {
  1013. // Remove the in-progress operation, which is now complete.
  1014. this.queues_[contentType].shift();
  1015. this.startOperation_(contentType);
  1016. }
  1017. /**
  1018. * Starts the next operation in the queue.
  1019. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1020. * @private
  1021. */
  1022. startOperation_(contentType) {
  1023. // Retrieve the next operation, if any, from the queue and start it.
  1024. const next = this.queues_[contentType][0];
  1025. if (next) {
  1026. try {
  1027. next.start();
  1028. } catch (exception) {
  1029. if (exception.name == 'QuotaExceededError') {
  1030. next.p.reject(new shaka.util.Error(
  1031. shaka.util.Error.Severity.CRITICAL,
  1032. shaka.util.Error.Category.MEDIA,
  1033. shaka.util.Error.Code.QUOTA_EXCEEDED_ERROR,
  1034. contentType));
  1035. } else {
  1036. next.p.reject(new shaka.util.Error(
  1037. shaka.util.Error.Severity.CRITICAL,
  1038. shaka.util.Error.Category.MEDIA,
  1039. shaka.util.Error.Code.MEDIA_SOURCE_OPERATION_THREW,
  1040. exception));
  1041. }
  1042. this.popFromQueue_(contentType);
  1043. }
  1044. }
  1045. }
  1046. /**
  1047. * @return {!shaka.extern.TextDisplayer}
  1048. */
  1049. getTextDisplayer() {
  1050. goog.asserts.assert(
  1051. this.textDisplayer_,
  1052. 'TextDisplayer should only be null when this is destroyed');
  1053. return this.textDisplayer_;
  1054. }
  1055. /**
  1056. * @param {!shaka.extern.TextDisplayer} textDisplayer
  1057. */
  1058. setTextDisplayer(textDisplayer) {
  1059. const oldTextDisplayer = this.textDisplayer_;
  1060. this.textDisplayer_ = textDisplayer;
  1061. if (oldTextDisplayer) {
  1062. textDisplayer.setTextVisibility(oldTextDisplayer.isTextVisible());
  1063. oldTextDisplayer.destroy();
  1064. }
  1065. if (this.textEngine_) {
  1066. this.textEngine_.setDisplayer(textDisplayer);
  1067. }
  1068. }
  1069. /**
  1070. * @param {boolean} segmentRelativeVttTiming
  1071. */
  1072. setSegmentRelativeVttTiming(segmentRelativeVttTiming) {
  1073. this.segmentRelativeVttTiming_ = segmentRelativeVttTiming;
  1074. }
  1075. /**
  1076. * Apply platform-specific transformations to this segment to work around
  1077. * issues in the platform.
  1078. *
  1079. * @param {!BufferSource} segment
  1080. * @param {?number} startTime
  1081. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1082. * @return {!BufferSource}
  1083. * @private
  1084. */
  1085. workAroundBrokenPlatforms_(segment, startTime, contentType) {
  1086. const isInitSegment = startTime == null;
  1087. const encryptionExpected = this.expectedEncryption_[contentType];
  1088. // If:
  1089. // 1. this is an init segment,
  1090. // 2. and encryption is expected,
  1091. // 3. and the platform requires encryption in all init segments,
  1092. // 4. and the content is MP4 (mimeType == "video/mp4" or "audio/mp4"),
  1093. // then insert fake encryption metadata for init segments that lack it.
  1094. // The MP4 requirement is because we can currently only do this
  1095. // transformation on MP4 containers.
  1096. // See: https://github.com/shaka-project/shaka-player/issues/2759
  1097. if (isInitSegment &&
  1098. encryptionExpected &&
  1099. shaka.util.Platform.requiresEncryptionInfoInAllInitSegments() &&
  1100. shaka.util.MimeUtils.getContainerType(
  1101. this.sourceBufferTypes_[contentType]) == 'mp4') {
  1102. shaka.log.debug('Forcing fake encryption information in init segment.');
  1103. segment = shaka.media.ContentWorkarounds.fakeEncryption(segment);
  1104. }
  1105. return segment;
  1106. }
  1107. /**
  1108. * @param {!boolean} lowLatencyMode
  1109. */
  1110. setLowLatencyMode(lowLatencyMode) {
  1111. this.lowLatencyMode_ = lowLatencyMode;
  1112. }
  1113. };
  1114. /**
  1115. * Internal reference to window.URL.createObjectURL function to avoid
  1116. * compatibility issues with other libraries and frameworks such as React
  1117. * Native. For use in unit tests only, not meant for external use.
  1118. *
  1119. * @type {function(?):string}
  1120. */
  1121. shaka.media.MediaSourceEngine.createObjectURL = window.URL.createObjectURL;
  1122. /**
  1123. * @typedef {{
  1124. * start: function(),
  1125. * p: !shaka.util.PublicPromise
  1126. * }}
  1127. *
  1128. * @summary An operation in queue.
  1129. * @property {function()} start
  1130. * The function which starts the operation.
  1131. * @property {!shaka.util.PublicPromise} p
  1132. * The PublicPromise which is associated with this operation.
  1133. */
  1134. shaka.media.MediaSourceEngine.Operation;
  1135. /**
  1136. * @enum {string}
  1137. * @private
  1138. */
  1139. shaka.media.MediaSourceEngine.SourceBufferMode_ = {
  1140. SEQUENCE: 'sequence',
  1141. SEGMENTS: 'segments',
  1142. };
  1143. /**
  1144. * MIME types of raw formats.
  1145. *
  1146. * @const {!Array.<string>}
  1147. */
  1148. shaka.media.MediaSourceEngine.RAW_FORMATS = [
  1149. 'audio/aac',
  1150. 'audio/ac3',
  1151. 'audio/ec3',
  1152. 'audio/mpeg',
  1153. ];