Source: lib/media/media_source_engine.js

  1. /*! @license
  2. * Shaka Player
  3. * Copyright 2016 Google LLC
  4. * SPDX-License-Identifier: Apache-2.0
  5. */
  6. goog.provide('shaka.media.MediaSourceEngine');
  7. goog.require('goog.asserts');
  8. goog.require('shaka.log');
  9. goog.require('shaka.media.ContentWorkarounds');
  10. goog.require('shaka.media.IClosedCaptionParser');
  11. goog.require('shaka.media.TimeRangesUtils');
  12. goog.require('shaka.media.Transmuxer');
  13. goog.require('shaka.text.TextEngine');
  14. goog.require('shaka.util.Destroyer');
  15. goog.require('shaka.util.Error');
  16. goog.require('shaka.util.EventManager');
  17. goog.require('shaka.util.Functional');
  18. goog.require('shaka.util.IDestroyable');
  19. goog.require('shaka.util.ManifestParserUtils');
  20. goog.require('shaka.util.MimeUtils');
  21. goog.require('shaka.util.Platform');
  22. goog.require('shaka.util.PublicPromise');
  23. /**
  24. * @summary
  25. * MediaSourceEngine wraps all operations on MediaSource and SourceBuffers.
  26. * All asynchronous operations return a Promise, and all operations are
  27. * internally synchronized and serialized as needed. Operations that can
  28. * be done in parallel will be done in parallel.
  29. *
  30. * @implements {shaka.util.IDestroyable}
  31. */
  32. shaka.media.MediaSourceEngine = class {
  33. /**
  34. * @param {HTMLMediaElement} video The video element, whose source is tied to
  35. * MediaSource during the lifetime of the MediaSourceEngine.
  36. * @param {!shaka.media.IClosedCaptionParser} closedCaptionParser
  37. * The closed caption parser that should be used to parser closed captions
  38. * from the video stream. MediaSourceEngine takes ownership of the parser.
  39. * When MediaSourceEngine is destroyed, it will destroy the parser.
  40. * @param {!shaka.extern.TextDisplayer} textDisplayer
  41. * The text displayer that will be used with the text engine.
  42. * MediaSourceEngine takes ownership of the displayer. When
  43. * MediaSourceEngine is destroyed, it will destroy the displayer.
  44. * @param {!function(!Array.<shaka.extern.ID3Metadata>, number, ?number)=}
  45. * onMetadata
  46. */
  47. constructor(video, closedCaptionParser, textDisplayer, onMetadata) {
  48. /** @private {HTMLMediaElement} */
  49. this.video_ = video;
  50. /** @private {shaka.extern.TextDisplayer} */
  51. this.textDisplayer_ = textDisplayer;
  52. /** @private {!Object.<shaka.util.ManifestParserUtils.ContentType,
  53. SourceBuffer>} */
  54. this.sourceBuffers_ = {};
  55. /** @private {!Object.<shaka.util.ManifestParserUtils.ContentType,
  56. string>} */
  57. this.sourceBufferTypes_ = {};
  58. /** @private {!Object.<shaka.util.ManifestParserUtils.ContentType,
  59. boolean>} */
  60. this.expectedEncryption_ = {};
  61. /** @private {shaka.text.TextEngine} */
  62. this.textEngine_ = null;
  63. const onMetadataNoOp = (metadata, timestampOffset, segmentEnd) => {};
  64. /** @private {!function(!Array.<shaka.extern.ID3Metadata>,
  65. number, ?number)} */
  66. this.onMetadata_ = onMetadata || onMetadataNoOp;
  67. /**
  68. * @private {!Object.<string,
  69. * !Array.<shaka.media.MediaSourceEngine.Operation>>}
  70. */
  71. this.queues_ = {};
  72. /** @private {shaka.util.EventManager} */
  73. this.eventManager_ = new shaka.util.EventManager();
  74. /** @private {!Object.<string, !shaka.media.Transmuxer>} */
  75. this.transmuxers_ = {};
  76. /** @private {shaka.media.IClosedCaptionParser} */
  77. this.captionParser_ = closedCaptionParser;
  78. /** @private {!shaka.util.PublicPromise} */
  79. this.mediaSourceOpen_ = new shaka.util.PublicPromise();
  80. /** @private {MediaSource} */
  81. this.mediaSource_ = this.createMediaSource(this.mediaSourceOpen_);
  82. /** @type {!shaka.util.Destroyer} */
  83. this.destroyer_ = new shaka.util.Destroyer(() => this.doDestroy_());
  84. /** @private {string} */
  85. this.url_ = '';
  86. }
  87. /**
  88. * Create a MediaSource object, attach it to the video element, and return it.
  89. * Resolves the given promise when the MediaSource is ready.
  90. *
  91. * Replaced by unit tests.
  92. *
  93. * @param {!shaka.util.PublicPromise} p
  94. * @return {!MediaSource}
  95. */
  96. createMediaSource(p) {
  97. const mediaSource = new MediaSource();
  98. // Set up MediaSource on the video element.
  99. this.eventManager_.listenOnce(
  100. mediaSource, 'sourceopen', () => this.onSourceOpen_(p));
  101. // Store the object URL for releasing it later.
  102. this.url_ = shaka.media.MediaSourceEngine.createObjectURL(mediaSource);
  103. this.video_.src = this.url_;
  104. return mediaSource;
  105. }
  106. /**
  107. * @param {!shaka.util.PublicPromise} p
  108. * @private
  109. */
  110. onSourceOpen_(p) {
  111. // Release the object URL that was previously created, to prevent memory
  112. // leak.
  113. // createObjectURL creates a strong reference to the MediaSource object
  114. // inside the browser. Setting the src of the video then creates another
  115. // reference within the video element. revokeObjectURL will remove the
  116. // strong reference to the MediaSource object, and allow it to be
  117. // garbage-collected later.
  118. URL.revokeObjectURL(this.url_);
  119. p.resolve();
  120. }
  121. /**
  122. * Checks if a certain type is supported.
  123. *
  124. * @param {shaka.extern.Stream} stream
  125. * @return {boolean}
  126. */
  127. static isStreamSupported(stream) {
  128. const fullMimeType = shaka.util.MimeUtils.getFullType(
  129. stream.mimeType, stream.codecs);
  130. const extendedMimeType = shaka.util.MimeUtils.getExtendedType(stream);
  131. return shaka.text.TextEngine.isTypeSupported(fullMimeType) ||
  132. MediaSource.isTypeSupported(extendedMimeType) ||
  133. shaka.media.Transmuxer.isSupported(fullMimeType, stream.type);
  134. }
  135. /**
  136. * Returns a map of MediaSource support for well-known types.
  137. *
  138. * @return {!Object.<string, boolean>}
  139. */
  140. static probeSupport() {
  141. const testMimeTypes = [
  142. // MP4 types
  143. 'video/mp4; codecs="avc1.42E01E"',
  144. 'video/mp4; codecs="avc3.42E01E"',
  145. 'video/mp4; codecs="hev1.1.6.L93.90"',
  146. 'video/mp4; codecs="hvc1.1.6.L93.90"',
  147. 'video/mp4; codecs="hev1.2.4.L153.B0"; eotf="smpte2084"', // HDR HEVC
  148. 'video/mp4; codecs="hvc1.2.4.L153.B0"; eotf="smpte2084"', // HDR HEVC
  149. 'video/mp4; codecs="vp9"',
  150. 'video/mp4; codecs="vp09.00.10.08"',
  151. 'video/mp4; codecs="av01.0.01M.08"',
  152. 'audio/mp4; codecs="mp4a.40.2"',
  153. 'audio/mp4; codecs="ac-3"',
  154. 'audio/mp4; codecs="ec-3"',
  155. 'audio/mp4; codecs="opus"',
  156. 'audio/mp4; codecs="flac"',
  157. // WebM types
  158. 'video/webm; codecs="vp8"',
  159. 'video/webm; codecs="vp9"',
  160. 'video/webm; codecs="vp09.00.10.08"',
  161. 'audio/webm; codecs="vorbis"',
  162. 'audio/webm; codecs="opus"',
  163. // MPEG2 TS types (video/ is also used for audio: https://bit.ly/TsMse)
  164. 'video/mp2t; codecs="avc1.42E01E"',
  165. 'video/mp2t; codecs="avc3.42E01E"',
  166. 'video/mp2t; codecs="hvc1.1.6.L93.90"',
  167. 'video/mp2t; codecs="mp4a.40.2"',
  168. 'video/mp2t; codecs="ac-3"',
  169. 'video/mp2t; codecs="ec-3"',
  170. // WebVTT types
  171. 'text/vtt',
  172. 'application/mp4; codecs="wvtt"',
  173. // TTML types
  174. 'application/ttml+xml',
  175. 'application/mp4; codecs="stpp"',
  176. ];
  177. const support = {};
  178. for (const type of testMimeTypes) {
  179. if (shaka.util.Platform.supportsMediaSource()) {
  180. // Our TextEngine is only effective for MSE platforms at the moment.
  181. if (shaka.text.TextEngine.isTypeSupported(type)) {
  182. support[type] = true;
  183. } else {
  184. support[type] = MediaSource.isTypeSupported(type) ||
  185. shaka.media.Transmuxer.isSupported(type);
  186. }
  187. } else {
  188. support[type] = shaka.util.Platform.supportsMediaType(type);
  189. }
  190. const basicType = type.split(';')[0];
  191. support[basicType] = support[basicType] || support[type];
  192. }
  193. return support;
  194. }
  195. /** @override */
  196. destroy() {
  197. return this.destroyer_.destroy();
  198. }
  199. /** @private */
  200. async doDestroy_() {
  201. const Functional = shaka.util.Functional;
  202. const cleanup = [];
  203. for (const contentType in this.queues_) {
  204. // Make a local copy of the queue and the first item.
  205. const q = this.queues_[contentType];
  206. const inProgress = q[0];
  207. // Drop everything else out of the original queue.
  208. this.queues_[contentType] = q.slice(0, 1);
  209. // We will wait for this item to complete/fail.
  210. if (inProgress) {
  211. cleanup.push(inProgress.p.catch(Functional.noop));
  212. }
  213. // The rest will be rejected silently if possible.
  214. for (const item of q.slice(1)) {
  215. item.p.reject(shaka.util.Destroyer.destroyedError());
  216. }
  217. }
  218. if (this.textEngine_) {
  219. cleanup.push(this.textEngine_.destroy());
  220. }
  221. if (this.textDisplayer_) {
  222. cleanup.push(this.textDisplayer_.destroy());
  223. }
  224. for (const contentType in this.transmuxers_) {
  225. cleanup.push(this.transmuxers_[contentType].destroy());
  226. }
  227. await Promise.all(cleanup);
  228. if (this.eventManager_) {
  229. this.eventManager_.release();
  230. this.eventManager_ = null;
  231. }
  232. if (this.video_) {
  233. // "unload" the video element.
  234. this.video_.removeAttribute('src');
  235. this.video_.load();
  236. this.video_ = null;
  237. }
  238. this.mediaSource_ = null;
  239. this.textEngine_ = null;
  240. this.textDisplayer_ = null;
  241. this.sourceBuffers_ = {};
  242. this.transmuxers_ = {};
  243. this.captionParser_ = null;
  244. if (goog.DEBUG) {
  245. for (const contentType in this.queues_) {
  246. goog.asserts.assert(
  247. this.queues_[contentType].length == 0,
  248. contentType + ' queue should be empty after destroy!');
  249. }
  250. }
  251. this.queues_ = {};
  252. }
  253. /**
  254. * @return {!Promise} Resolved when MediaSource is open and attached to the
  255. * media element. This process is actually initiated by the constructor.
  256. */
  257. open() {
  258. return this.mediaSourceOpen_;
  259. }
  260. /**
  261. * Initialize MediaSourceEngine.
  262. *
  263. * Note that it is not valid to call this multiple times, except to add or
  264. * reinitialize text streams.
  265. *
  266. * @param {!Map.<shaka.util.ManifestParserUtils.ContentType,
  267. * shaka.extern.Stream>} streamsByType
  268. * A map of content types to streams. All streams must be supported
  269. * according to MediaSourceEngine.isStreamSupported.
  270. * @param {boolean} forceTransmuxTS
  271. * If true, this will transmux TS content even if it is natively supported.
  272. *
  273. * @return {!Promise}
  274. */
  275. async init(streamsByType, forceTransmuxTS) {
  276. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  277. await this.mediaSourceOpen_;
  278. for (const contentType of streamsByType.keys()) {
  279. const stream = streamsByType.get(contentType);
  280. goog.asserts.assert(
  281. shaka.media.MediaSourceEngine.isStreamSupported(stream),
  282. 'Type negotiation should happen before MediaSourceEngine.init!');
  283. let mimeType = shaka.util.MimeUtils.getFullType(
  284. stream.mimeType, stream.codecs);
  285. if (contentType == ContentType.TEXT) {
  286. this.reinitText(mimeType);
  287. } else {
  288. if ((forceTransmuxTS || !MediaSource.isTypeSupported(mimeType)) &&
  289. shaka.media.Transmuxer.isSupported(mimeType, contentType)) {
  290. this.transmuxers_[contentType] = new shaka.media.Transmuxer();
  291. mimeType =
  292. shaka.media.Transmuxer.convertTsCodecs(contentType, mimeType);
  293. }
  294. const sourceBuffer = this.mediaSource_.addSourceBuffer(mimeType);
  295. this.eventManager_.listen(
  296. sourceBuffer, 'error',
  297. () => this.onError_(contentType));
  298. this.eventManager_.listen(
  299. sourceBuffer, 'updateend',
  300. () => this.onUpdateEnd_(contentType));
  301. this.sourceBuffers_[contentType] = sourceBuffer;
  302. this.sourceBufferTypes_[contentType] = mimeType;
  303. this.queues_[contentType] = [];
  304. this.expectedEncryption_[contentType] = !!stream.drmInfos.length;
  305. }
  306. }
  307. }
  308. /**
  309. * Reinitialize the TextEngine for a new text type.
  310. * @param {string} mimeType
  311. */
  312. reinitText(mimeType) {
  313. if (!this.textEngine_) {
  314. this.textEngine_ = new shaka.text.TextEngine(this.textDisplayer_);
  315. }
  316. this.textEngine_.initParser(mimeType);
  317. }
  318. /**
  319. * @return {boolean} True if the MediaSource is in an "ended" state, or if the
  320. * object has been destroyed.
  321. */
  322. ended() {
  323. return this.mediaSource_ ? this.mediaSource_.readyState == 'ended' : true;
  324. }
  325. /**
  326. * Gets the first timestamp in buffer for the given content type.
  327. *
  328. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  329. * @return {?number} The timestamp in seconds, or null if nothing is buffered.
  330. */
  331. bufferStart(contentType) {
  332. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  333. if (contentType == ContentType.TEXT) {
  334. return this.textEngine_.bufferStart();
  335. }
  336. return shaka.media.TimeRangesUtils.bufferStart(
  337. this.getBuffered_(contentType));
  338. }
  339. /**
  340. * Gets the last timestamp in buffer for the given content type.
  341. *
  342. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  343. * @return {?number} The timestamp in seconds, or null if nothing is buffered.
  344. */
  345. bufferEnd(contentType) {
  346. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  347. if (contentType == ContentType.TEXT) {
  348. return this.textEngine_.bufferEnd();
  349. }
  350. return shaka.media.TimeRangesUtils.bufferEnd(
  351. this.getBuffered_(contentType));
  352. }
  353. /**
  354. * Determines if the given time is inside the buffered range of the given
  355. * content type.
  356. *
  357. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  358. * @param {number} time Playhead time
  359. * @param {number=} smallGapLimit
  360. * @return {boolean}
  361. */
  362. isBuffered(contentType, time, smallGapLimit) {
  363. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  364. if (contentType == ContentType.TEXT) {
  365. return this.textEngine_.isBuffered(time);
  366. } else {
  367. const buffered = this.getBuffered_(contentType);
  368. return shaka.media.TimeRangesUtils.isBuffered(
  369. buffered, time, smallGapLimit);
  370. }
  371. }
  372. /**
  373. * Computes how far ahead of the given timestamp is buffered for the given
  374. * content type.
  375. *
  376. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  377. * @param {number} time
  378. * @return {number} The amount of time buffered ahead in seconds.
  379. */
  380. bufferedAheadOf(contentType, time) {
  381. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  382. if (contentType == ContentType.TEXT) {
  383. return this.textEngine_.bufferedAheadOf(time);
  384. } else {
  385. const buffered = this.getBuffered_(contentType);
  386. return shaka.media.TimeRangesUtils.bufferedAheadOf(buffered, time);
  387. }
  388. }
  389. /**
  390. * Returns info about what is currently buffered.
  391. * @return {shaka.extern.BufferedInfo}
  392. */
  393. getBufferedInfo() {
  394. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  395. const TimeRangeUtils = shaka.media.TimeRangesUtils;
  396. const info = {
  397. total: TimeRangeUtils.getBufferedInfo(this.video_.buffered),
  398. audio: TimeRangeUtils.getBufferedInfo(
  399. this.getBuffered_(ContentType.AUDIO)),
  400. video: TimeRangeUtils.getBufferedInfo(
  401. this.getBuffered_(ContentType.VIDEO)),
  402. text: [],
  403. };
  404. if (this.textEngine_) {
  405. const start = this.textEngine_.bufferStart();
  406. const end = this.textEngine_.bufferEnd();
  407. if (start != null && end != null) {
  408. info.text.push({start: start, end: end});
  409. }
  410. }
  411. return info;
  412. }
  413. /**
  414. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  415. * @return {TimeRanges} The buffered ranges for the given content type, or
  416. * null if the buffered ranges could not be obtained.
  417. * @private
  418. */
  419. getBuffered_(contentType) {
  420. try {
  421. return this.sourceBuffers_[contentType].buffered;
  422. } catch (exception) {
  423. if (contentType in this.sourceBuffers_) {
  424. // Note: previous MediaSource errors may cause access to |buffered| to
  425. // throw.
  426. shaka.log.error('failed to get buffered range for ' + contentType,
  427. exception);
  428. }
  429. return null;
  430. }
  431. }
  432. /**
  433. * Enqueue an operation to append data to the SourceBuffer.
  434. * Start and end times are needed for TextEngine, but not for MediaSource.
  435. * Start and end times may be null for initialization segments; if present
  436. * they are relative to the presentation timeline.
  437. *
  438. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  439. * @param {!BufferSource} data
  440. * @param {?number} startTime relative to the start of the presentation
  441. * @param {?number} endTime relative to the start of the presentation
  442. * @param {?boolean} hasClosedCaptions True if the buffer contains CEA closed
  443. * captions
  444. * @return {!Promise}
  445. */
  446. async appendBuffer(contentType, data, startTime, endTime, hasClosedCaptions) {
  447. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  448. if (contentType == ContentType.TEXT) {
  449. await this.textEngine_.appendBuffer(data, startTime, endTime);
  450. } else if (this.transmuxers_[contentType]) {
  451. const transmuxedData =
  452. await this.transmuxers_[contentType].transmux(data);
  453. // For HLS CEA-608/708 CLOSED-CAPTIONS, text data is embedded in
  454. // the video stream, so textEngine may not have been initialized.
  455. if (!this.textEngine_) {
  456. this.reinitText('text/vtt');
  457. }
  458. if (transmuxedData.metadata) {
  459. const timestampOffset =
  460. this.sourceBuffers_[contentType].timestampOffset;
  461. this.onMetadata_(transmuxedData.metadata, timestampOffset, endTime);
  462. }
  463. // This doesn't work for native TS support (ex. Edge/Chromecast),
  464. // since no transmuxing is needed for native TS.
  465. if (transmuxedData.captions && transmuxedData.captions.length) {
  466. const videoOffset =
  467. this.sourceBuffers_[ContentType.VIDEO].timestampOffset;
  468. const closedCaptions = this.textEngine_
  469. .convertMuxjsCaptionsToShakaCaptions(transmuxedData.captions);
  470. this.textEngine_.storeAndAppendClosedCaptions(
  471. closedCaptions, startTime, endTime, videoOffset);
  472. }
  473. let transmuxedSegment = transmuxedData.data;
  474. transmuxedSegment = this.workAroundBrokenPlatforms_(
  475. transmuxedSegment, startTime, contentType);
  476. await this.enqueueOperation_(
  477. contentType, () => this.append_(contentType, transmuxedSegment));
  478. } else if (hasClosedCaptions) {
  479. if (!this.textEngine_) {
  480. this.reinitText('text/vtt');
  481. }
  482. // If it is the init segment for closed captions, initialize the closed
  483. // caption parser.
  484. if (startTime == null && endTime == null) {
  485. this.captionParser_.init(data);
  486. } else {
  487. const closedCaptions = this.captionParser_.parseFrom(data);
  488. if (closedCaptions.length) {
  489. const videoOffset =
  490. this.sourceBuffers_[ContentType.VIDEO].timestampOffset;
  491. this.textEngine_.storeAndAppendClosedCaptions(
  492. closedCaptions, startTime, endTime, videoOffset);
  493. }
  494. }
  495. data = this.workAroundBrokenPlatforms_(data, startTime, contentType);
  496. await this.enqueueOperation_(
  497. contentType,
  498. () => this.append_(contentType, data));
  499. } else {
  500. data = this.workAroundBrokenPlatforms_(data, startTime, contentType);
  501. await this.enqueueOperation_(
  502. contentType,
  503. () => this.append_(contentType, data));
  504. }
  505. }
  506. /**
  507. * Set the selected closed captions Id and language.
  508. *
  509. * @param {string} id
  510. */
  511. setSelectedClosedCaptionId(id) {
  512. const VIDEO = shaka.util.ManifestParserUtils.ContentType.VIDEO;
  513. const videoBufferEndTime = this.bufferEnd(VIDEO) || 0;
  514. this.textEngine_.setSelectedClosedCaptionId(id, videoBufferEndTime);
  515. }
  516. /** Disable embedded closed captions. */
  517. clearSelectedClosedCaptionId() {
  518. if (this.textEngine_) {
  519. this.textEngine_.setSelectedClosedCaptionId('', 0);
  520. }
  521. }
  522. /**
  523. * Enqueue an operation to remove data from the SourceBuffer.
  524. *
  525. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  526. * @param {number} startTime relative to the start of the presentation
  527. * @param {number} endTime relative to the start of the presentation
  528. * @return {!Promise}
  529. */
  530. async remove(contentType, startTime, endTime) {
  531. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  532. if (contentType == ContentType.TEXT) {
  533. await this.textEngine_.remove(startTime, endTime);
  534. } else {
  535. await this.enqueueOperation_(
  536. contentType,
  537. () => this.remove_(contentType, startTime, endTime));
  538. }
  539. }
  540. /**
  541. * Enqueue an operation to clear the SourceBuffer.
  542. *
  543. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  544. * @return {!Promise}
  545. */
  546. async clear(contentType) {
  547. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  548. if (contentType == ContentType.TEXT) {
  549. if (!this.textEngine_) {
  550. return;
  551. }
  552. await this.textEngine_.remove(0, Infinity);
  553. } else {
  554. // Note that not all platforms allow clearing to Infinity.
  555. await this.enqueueOperation_(
  556. contentType,
  557. () => this.remove_(contentType, 0, this.mediaSource_.duration));
  558. }
  559. }
  560. /**
  561. * Fully reset the state of the caption parser owned by MediaSourceEngine.
  562. */
  563. resetCaptionParser() {
  564. this.captionParser_.reset();
  565. }
  566. /**
  567. * Enqueue an operation to flush the SourceBuffer.
  568. * This is a workaround for what we believe is a Chromecast bug.
  569. *
  570. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  571. * @return {!Promise}
  572. */
  573. async flush(contentType) {
  574. // Flush the pipeline. Necessary on Chromecast, even though we have removed
  575. // everything.
  576. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  577. if (contentType == ContentType.TEXT) {
  578. // Nothing to flush for text.
  579. return;
  580. }
  581. await this.enqueueOperation_(
  582. contentType,
  583. () => this.flush_(contentType));
  584. }
  585. /**
  586. * Sets the timestamp offset and append window end for the given content type.
  587. *
  588. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  589. * @param {number} timestampOffset The timestamp offset. Segments which start
  590. * at time t will be inserted at time t + timestampOffset instead. This
  591. * value does not affect segments which have already been inserted.
  592. * @param {number} appendWindowStart The timestamp to set the append window
  593. * start to. For future appends, frames/samples with timestamps less than
  594. * this value will be dropped.
  595. * @param {number} appendWindowEnd The timestamp to set the append window end
  596. * to. For future appends, frames/samples with timestamps greater than this
  597. * value will be dropped.
  598. * @return {!Promise}
  599. */
  600. async setStreamProperties(
  601. contentType, timestampOffset, appendWindowStart, appendWindowEnd) {
  602. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  603. if (contentType == ContentType.TEXT) {
  604. this.textEngine_.setTimestampOffset(timestampOffset);
  605. this.textEngine_.setAppendWindow(appendWindowStart, appendWindowEnd);
  606. return;
  607. }
  608. await Promise.all([
  609. // Queue an abort() to help MSE splice together overlapping segments.
  610. // We set appendWindowEnd when we change periods in DASH content, and the
  611. // period transition may result in overlap.
  612. //
  613. // An abort() also helps with MPEG2-TS. When we append a TS segment, we
  614. // always enter a PARSING_MEDIA_SEGMENT state and we can't change the
  615. // timestamp offset. By calling abort(), we reset the state so we can
  616. // set it.
  617. this.enqueueOperation_(
  618. contentType,
  619. () => this.abort_(contentType)),
  620. this.enqueueOperation_(
  621. contentType,
  622. () => this.setTimestampOffset_(contentType, timestampOffset)),
  623. this.enqueueOperation_(
  624. contentType,
  625. () => this.setAppendWindow_(
  626. contentType, appendWindowStart, appendWindowEnd)),
  627. ]);
  628. }
  629. /**
  630. * @param {string=} reason Valid reasons are 'network' and 'decode'.
  631. * @return {!Promise}
  632. * @see http://w3c.github.io/media-source/#idl-def-EndOfStreamError
  633. */
  634. async endOfStream(reason) {
  635. await this.enqueueBlockingOperation_(() => {
  636. // If endOfStream() has already been called on the media source,
  637. // don't call it again.
  638. if (this.ended()) {
  639. return;
  640. }
  641. // Tizen won't let us pass undefined, but it will let us omit the
  642. // argument.
  643. if (reason) {
  644. this.mediaSource_.endOfStream(reason);
  645. } else {
  646. this.mediaSource_.endOfStream();
  647. }
  648. });
  649. }
  650. /**
  651. * We only support increasing duration at this time. Decreasing duration
  652. * causes the MSE removal algorithm to run, which results in an 'updateend'
  653. * event. Supporting this scenario would be complicated, and is not currently
  654. * needed.
  655. *
  656. * @param {number} duration
  657. * @return {!Promise}
  658. */
  659. async setDuration(duration) {
  660. goog.asserts.assert(
  661. isNaN(this.mediaSource_.duration) ||
  662. this.mediaSource_.duration <= duration,
  663. 'duration cannot decrease: ' + this.mediaSource_.duration + ' -> ' +
  664. duration);
  665. await this.enqueueBlockingOperation_(() => {
  666. this.mediaSource_.duration = duration;
  667. });
  668. }
  669. /**
  670. * Get the current MediaSource duration.
  671. *
  672. * @return {number}
  673. */
  674. getDuration() {
  675. return this.mediaSource_.duration;
  676. }
  677. /**
  678. * Append data to the SourceBuffer.
  679. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  680. * @param {BufferSource} data
  681. * @private
  682. */
  683. append_(contentType, data) {
  684. // This will trigger an 'updateend' event.
  685. this.sourceBuffers_[contentType].appendBuffer(data);
  686. }
  687. /**
  688. * Remove data from the SourceBuffer.
  689. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  690. * @param {number} startTime relative to the start of the presentation
  691. * @param {number} endTime relative to the start of the presentation
  692. * @private
  693. */
  694. remove_(contentType, startTime, endTime) {
  695. if (endTime <= startTime) {
  696. // Ignore removal of inverted or empty ranges.
  697. // Fake 'updateend' event to resolve the operation.
  698. this.onUpdateEnd_(contentType);
  699. return;
  700. }
  701. // This will trigger an 'updateend' event.
  702. this.sourceBuffers_[contentType].remove(startTime, endTime);
  703. }
  704. /**
  705. * Call abort() on the SourceBuffer.
  706. * This resets MSE's last_decode_timestamp on all track buffers, which should
  707. * trigger the splicing logic for overlapping segments.
  708. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  709. * @private
  710. */
  711. abort_(contentType) {
  712. // Save the append window, which is reset on abort().
  713. const appendWindowStart =
  714. this.sourceBuffers_[contentType].appendWindowStart;
  715. const appendWindowEnd = this.sourceBuffers_[contentType].appendWindowEnd;
  716. // This will not trigger an 'updateend' event, since nothing is happening.
  717. // This is only to reset MSE internals, not to abort an actual operation.
  718. this.sourceBuffers_[contentType].abort();
  719. // Restore the append window.
  720. this.sourceBuffers_[contentType].appendWindowStart = appendWindowStart;
  721. this.sourceBuffers_[contentType].appendWindowEnd = appendWindowEnd;
  722. // Fake an 'updateend' event to resolve the operation.
  723. this.onUpdateEnd_(contentType);
  724. }
  725. /**
  726. * Nudge the playhead to force the media pipeline to be flushed.
  727. * This seems to be necessary on Chromecast to get new content to replace old
  728. * content.
  729. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  730. * @private
  731. */
  732. flush_(contentType) {
  733. // Never use flush_ if there's data. It causes a hiccup in playback.
  734. goog.asserts.assert(
  735. this.video_.buffered.length == 0, 'MediaSourceEngine.flush_ should ' +
  736. 'only be used after clearing all data!');
  737. // Seeking forces the pipeline to be flushed.
  738. this.video_.currentTime -= 0.001;
  739. // Fake an 'updateend' event to resolve the operation.
  740. this.onUpdateEnd_(contentType);
  741. }
  742. /**
  743. * Set the SourceBuffer's timestamp offset.
  744. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  745. * @param {number} timestampOffset
  746. * @private
  747. */
  748. setTimestampOffset_(contentType, timestampOffset) {
  749. // Work around for
  750. // https://github.com/shaka-project/shaka-player/issues/1281:
  751. // TODO(https://bit.ly/2ttKiBU): follow up when this is fixed in Edge
  752. if (timestampOffset < 0) {
  753. // Try to prevent rounding errors in Edge from removing the first
  754. // keyframe.
  755. timestampOffset += 0.001;
  756. }
  757. this.sourceBuffers_[contentType].timestampOffset = timestampOffset;
  758. // Fake an 'updateend' event to resolve the operation.
  759. this.onUpdateEnd_(contentType);
  760. }
  761. /**
  762. * Set the SourceBuffer's append window end.
  763. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  764. * @param {number} appendWindowStart
  765. * @param {number} appendWindowEnd
  766. * @private
  767. */
  768. setAppendWindow_(contentType, appendWindowStart, appendWindowEnd) {
  769. // You can't set start > end, so first set start to 0, then set the new
  770. // end, then set the new start. That way, there are no intermediate
  771. // states which are invalid.
  772. this.sourceBuffers_[contentType].appendWindowStart = 0;
  773. this.sourceBuffers_[contentType].appendWindowEnd = appendWindowEnd;
  774. this.sourceBuffers_[contentType].appendWindowStart = appendWindowStart;
  775. // Fake an 'updateend' event to resolve the operation.
  776. this.onUpdateEnd_(contentType);
  777. }
  778. /**
  779. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  780. * @private
  781. */
  782. onError_(contentType) {
  783. const operation = this.queues_[contentType][0];
  784. goog.asserts.assert(operation, 'Spurious error event!');
  785. goog.asserts.assert(!this.sourceBuffers_[contentType].updating,
  786. 'SourceBuffer should not be updating on error!');
  787. const code = this.video_.error ? this.video_.error.code : 0;
  788. operation.p.reject(new shaka.util.Error(
  789. shaka.util.Error.Severity.CRITICAL,
  790. shaka.util.Error.Category.MEDIA,
  791. shaka.util.Error.Code.MEDIA_SOURCE_OPERATION_FAILED,
  792. code));
  793. // Do not pop from queue. An 'updateend' event will fire next, and to
  794. // avoid synchronizing these two event handlers, we will allow that one to
  795. // pop from the queue as normal. Note that because the operation has
  796. // already been rejected, the call to resolve() in the 'updateend' handler
  797. // will have no effect.
  798. }
  799. /**
  800. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  801. * @private
  802. */
  803. onUpdateEnd_(contentType) {
  804. const operation = this.queues_[contentType][0];
  805. goog.asserts.assert(operation, 'Spurious updateend event!');
  806. if (!operation) {
  807. return;
  808. }
  809. goog.asserts.assert(!this.sourceBuffers_[contentType].updating,
  810. 'SourceBuffer should not be updating on updateend!');
  811. operation.p.resolve();
  812. this.popFromQueue_(contentType);
  813. }
  814. /**
  815. * Enqueue an operation and start it if appropriate.
  816. *
  817. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  818. * @param {function()} start
  819. * @return {!Promise}
  820. * @private
  821. */
  822. enqueueOperation_(contentType, start) {
  823. this.destroyer_.ensureNotDestroyed();
  824. const operation = {
  825. start: start,
  826. p: new shaka.util.PublicPromise(),
  827. };
  828. this.queues_[contentType].push(operation);
  829. if (this.queues_[contentType].length == 1) {
  830. this.startOperation_(contentType);
  831. }
  832. return operation.p;
  833. }
  834. /**
  835. * Enqueue an operation which must block all other operations on all
  836. * SourceBuffers.
  837. *
  838. * @param {function()} run
  839. * @return {!Promise}
  840. * @private
  841. */
  842. async enqueueBlockingOperation_(run) {
  843. this.destroyer_.ensureNotDestroyed();
  844. /** @type {!Array.<!shaka.util.PublicPromise>} */
  845. const allWaiters = [];
  846. // Enqueue a 'wait' operation onto each queue.
  847. // This operation signals its readiness when it starts.
  848. // When all wait operations are ready, the real operation takes place.
  849. for (const contentType in this.sourceBuffers_) {
  850. const ready = new shaka.util.PublicPromise();
  851. const operation = {
  852. start: () => ready.resolve(),
  853. p: ready,
  854. };
  855. this.queues_[contentType].push(operation);
  856. allWaiters.push(ready);
  857. if (this.queues_[contentType].length == 1) {
  858. operation.start();
  859. }
  860. }
  861. // Return a Promise to the real operation, which waits to begin until
  862. // there are no other in-progress operations on any SourceBuffers.
  863. try {
  864. await Promise.all(allWaiters);
  865. } catch (error) {
  866. // One of the waiters failed, which means we've been destroyed.
  867. goog.asserts.assert(
  868. this.destroyer_.destroyed(), 'Should be destroyed by now');
  869. // We haven't popped from the queue. Canceled waiters have been removed
  870. // by destroy. What's left now should just be resolved waiters. In
  871. // uncompiled mode, we will maintain good hygiene and make sure the
  872. // assert at the end of destroy passes. In compiled mode, the queues
  873. // are wiped in destroy.
  874. if (goog.DEBUG) {
  875. for (const contentType in this.sourceBuffers_) {
  876. if (this.queues_[contentType].length) {
  877. goog.asserts.assert(
  878. this.queues_[contentType].length == 1,
  879. 'Should be at most one item in queue!');
  880. goog.asserts.assert(
  881. allWaiters.includes(this.queues_[contentType][0].p),
  882. 'The item in queue should be one of our waiters!');
  883. this.queues_[contentType].shift();
  884. }
  885. }
  886. }
  887. throw error;
  888. }
  889. if (goog.DEBUG) {
  890. // If we did it correctly, nothing is updating.
  891. for (const contentType in this.sourceBuffers_) {
  892. goog.asserts.assert(
  893. this.sourceBuffers_[contentType].updating == false,
  894. 'SourceBuffers should not be updating after a blocking op!');
  895. }
  896. }
  897. // Run the real operation, which is synchronous.
  898. try {
  899. run();
  900. } catch (exception) {
  901. throw new shaka.util.Error(
  902. shaka.util.Error.Severity.CRITICAL,
  903. shaka.util.Error.Category.MEDIA,
  904. shaka.util.Error.Code.MEDIA_SOURCE_OPERATION_THREW,
  905. exception);
  906. } finally {
  907. // Unblock the queues.
  908. for (const contentType in this.sourceBuffers_) {
  909. this.popFromQueue_(contentType);
  910. }
  911. }
  912. }
  913. /**
  914. * Pop from the front of the queue and start a new operation.
  915. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  916. * @private
  917. */
  918. popFromQueue_(contentType) {
  919. // Remove the in-progress operation, which is now complete.
  920. this.queues_[contentType].shift();
  921. this.startOperation_(contentType);
  922. }
  923. /**
  924. * Starts the next operation in the queue.
  925. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  926. * @private
  927. */
  928. startOperation_(contentType) {
  929. // Retrieve the next operation, if any, from the queue and start it.
  930. const next = this.queues_[contentType][0];
  931. if (next) {
  932. try {
  933. next.start();
  934. } catch (exception) {
  935. if (exception.name == 'QuotaExceededError') {
  936. next.p.reject(new shaka.util.Error(
  937. shaka.util.Error.Severity.CRITICAL,
  938. shaka.util.Error.Category.MEDIA,
  939. shaka.util.Error.Code.QUOTA_EXCEEDED_ERROR,
  940. contentType));
  941. } else {
  942. next.p.reject(new shaka.util.Error(
  943. shaka.util.Error.Severity.CRITICAL,
  944. shaka.util.Error.Category.MEDIA,
  945. shaka.util.Error.Code.MEDIA_SOURCE_OPERATION_THREW,
  946. exception));
  947. }
  948. this.popFromQueue_(contentType);
  949. }
  950. }
  951. }
  952. /**
  953. * @return {!shaka.extern.TextDisplayer}
  954. */
  955. getTextDisplayer() {
  956. goog.asserts.assert(
  957. this.textDisplayer_,
  958. 'TextDisplayer should only be null when this is destroyed');
  959. return this.textDisplayer_;
  960. }
  961. /**
  962. * @param {!shaka.extern.TextDisplayer} textDisplayer
  963. */
  964. setTextDisplayer(textDisplayer) {
  965. const oldTextDisplayer = this.textDisplayer_;
  966. this.textDisplayer_ = textDisplayer;
  967. if (oldTextDisplayer) {
  968. textDisplayer.setTextVisibility(oldTextDisplayer.isTextVisible());
  969. oldTextDisplayer.destroy();
  970. }
  971. if (this.textEngine_) {
  972. this.textEngine_.setDisplayer(textDisplayer);
  973. }
  974. }
  975. /**
  976. * Apply platform-specific transformations to this segment to work around
  977. * issues in the platform.
  978. *
  979. * @param {!BufferSource} segment
  980. * @param {?number} startTime
  981. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  982. * @return {!BufferSource}
  983. * @private
  984. */
  985. workAroundBrokenPlatforms_(segment, startTime, contentType) {
  986. const isInitSegment = startTime == null;
  987. const encryptionExpected = this.expectedEncryption_[contentType];
  988. // If:
  989. // 1. this is an init segment,
  990. // 2. and encryption is expected,
  991. // 3. and the platform requires encryption in all init segments,
  992. // 4. and the content is MP4 (mimeType == "video/mp4" or "audio/mp4"),
  993. // then insert fake encryption metadata for init segments that lack it.
  994. // The MP4 requirement is because we can currently only do this
  995. // transformation on MP4 containers.
  996. // See: https://github.com/shaka-project/shaka-player/issues/2759
  997. if (isInitSegment &&
  998. encryptionExpected &&
  999. shaka.util.Platform.requiresEncryptionInfoInAllInitSegments() &&
  1000. shaka.util.MimeUtils.getContainerType(
  1001. this.sourceBufferTypes_[contentType]) == 'mp4') {
  1002. shaka.log.debug('Forcing fake encryption information in init segment.');
  1003. segment = shaka.media.ContentWorkarounds.fakeEncryption(segment);
  1004. }
  1005. return segment;
  1006. }
  1007. };
  1008. /**
  1009. * Internal reference to window.URL.createObjectURL function to avoid
  1010. * compatibility issues with other libraries and frameworks such as React
  1011. * Native. For use in unit tests only, not meant for external use.
  1012. *
  1013. * @type {function(?):string}
  1014. */
  1015. shaka.media.MediaSourceEngine.createObjectURL = window.URL.createObjectURL;
  1016. /**
  1017. * @typedef {{
  1018. * start: function(),
  1019. * p: !shaka.util.PublicPromise
  1020. * }}
  1021. *
  1022. * @summary An operation in queue.
  1023. * @property {function()} start
  1024. * The function which starts the operation.
  1025. * @property {!shaka.util.PublicPromise} p
  1026. * The PublicPromise which is associated with this operation.
  1027. */
  1028. shaka.media.MediaSourceEngine.Operation;