Source: lib/media/media_source_engine.js

  1. /*! @license
  2. * Shaka Player
  3. * Copyright 2016 Google LLC
  4. * SPDX-License-Identifier: Apache-2.0
  5. */
  6. goog.provide('shaka.media.MediaSourceEngine');
  7. goog.require('goog.asserts');
  8. goog.require('shaka.log');
  9. goog.require('shaka.config.CodecSwitchingStrategy');
  10. goog.require('shaka.device.DeviceFactory');
  11. goog.require('shaka.device.IDevice');
  12. goog.require('shaka.media.Capabilities');
  13. goog.require('shaka.media.ContentWorkarounds');
  14. goog.require('shaka.media.ClosedCaptionParser');
  15. goog.require('shaka.media.IClosedCaptionParser');
  16. goog.require('shaka.media.ManifestParser');
  17. goog.require('shaka.media.SegmentReference');
  18. goog.require('shaka.media.TimeRangesUtils');
  19. goog.require('shaka.text.TextEngine');
  20. goog.require('shaka.transmuxer.TransmuxerEngine');
  21. goog.require('shaka.util.BufferUtils');
  22. goog.require('shaka.util.Destroyer');
  23. goog.require('shaka.util.Dom');
  24. goog.require('shaka.util.Error');
  25. goog.require('shaka.util.EventManager');
  26. goog.require('shaka.util.FakeEvent');
  27. goog.require('shaka.util.Functional');
  28. goog.require('shaka.util.IDestroyable');
  29. goog.require('shaka.util.Id3Utils');
  30. goog.require('shaka.util.ManifestParserUtils');
  31. goog.require('shaka.util.MimeUtils');
  32. goog.require('shaka.util.Mp4BoxParsers');
  33. goog.require('shaka.util.Mp4Parser');
  34. goog.require('shaka.util.PublicPromise');
  35. goog.require('shaka.util.StreamUtils');
  36. goog.require('shaka.util.TimeUtils');
  37. goog.require('shaka.util.TsParser');
  38. goog.require('shaka.lcevc.Dec');
  39. /**
  40. * @summary
  41. * MediaSourceEngine wraps all operations on MediaSource and SourceBuffers.
  42. * All asynchronous operations return a Promise, and all operations are
  43. * internally synchronized and serialized as needed. Operations that can
  44. * be done in parallel will be done in parallel.
  45. *
  46. * @implements {shaka.util.IDestroyable}
  47. */
  48. shaka.media.MediaSourceEngine = class {
  49. /**
  50. * @param {HTMLMediaElement} video The video element, whose source is tied to
  51. * MediaSource during the lifetime of the MediaSourceEngine.
  52. * @param {!shaka.extern.TextDisplayer} textDisplayer
  53. * The text displayer that will be used with the text engine.
  54. * MediaSourceEngine takes ownership of the displayer. When
  55. * MediaSourceEngine is destroyed, it will destroy the displayer.
  56. * @param {!shaka.media.MediaSourceEngine.PlayerInterface} playerInterface
  57. * Interface for common player methods.
  58. * @param {shaka.extern.MediaSourceConfiguration} config
  59. * @param {?shaka.lcevc.Dec} [lcevcDec] Optional - LCEVC Decoder Object
  60. */
  61. constructor(video, textDisplayer, playerInterface, config, lcevcDec) {
  62. /** @private {HTMLMediaElement} */
  63. this.video_ = video;
  64. /** @private {?shaka.media.MediaSourceEngine.PlayerInterface} */
  65. this.playerInterface_ = playerInterface;
  66. /** @private {?shaka.extern.MediaSourceConfiguration} */
  67. this.config_ = config;
  68. /** @private {shaka.extern.TextDisplayer} */
  69. this.textDisplayer_ = textDisplayer;
  70. /**
  71. * @private {!Map<shaka.util.ManifestParserUtils.ContentType, SourceBuffer>}
  72. */
  73. this.sourceBuffers_ = new Map();
  74. /**
  75. * @private {!Map<shaka.util.ManifestParserUtils.ContentType, string>}
  76. */
  77. this.sourceBufferTypes_ = new Map();
  78. /**
  79. * @private {!Map<shaka.util.ManifestParserUtils.ContentType,
  80. * boolean>}
  81. */
  82. this.expectedEncryption_ = new Map();
  83. /** @private {shaka.text.TextEngine} */
  84. this.textEngine_ = null;
  85. /** @private {boolean} */
  86. this.segmentRelativeVttTiming_ = false;
  87. /** @private {?shaka.lcevc.Dec} */
  88. this.lcevcDec_ = lcevcDec || null;
  89. /**
  90. * @private {!Map<string, !Array<shaka.media.MediaSourceEngine.Operation>>}
  91. */
  92. this.queues_ = new Map();
  93. /** @private {shaka.util.EventManager} */
  94. this.eventManager_ = new shaka.util.EventManager();
  95. /**
  96. * @private {!Map<shaka.util.ManifestParserUtils.ContentType,
  97. !shaka.extern.Transmuxer>} */
  98. this.transmuxers_ = new Map();
  99. /** @private {?shaka.media.IClosedCaptionParser} */
  100. this.captionParser_ = null;
  101. /** @private {!shaka.util.PublicPromise} */
  102. this.mediaSourceOpen_ = new shaka.util.PublicPromise();
  103. /** @private {string} */
  104. this.url_ = '';
  105. /** @private {boolean} */
  106. this.playbackHasBegun_ = false;
  107. /** @private {boolean} */
  108. this.streamingAllowed_ = true;
  109. /** @private {boolean} */
  110. this.usingRemotePlayback_ = false;
  111. /** @private {HTMLSourceElement} */
  112. this.source_ = null;
  113. /**
  114. * Fallback source element with direct media URI, used for casting
  115. * purposes.
  116. * @private {HTMLSourceElement}
  117. */
  118. this.secondarySource_ = null;
  119. /** @private {MediaSource} */
  120. this.mediaSource_ = this.createMediaSource(this.mediaSourceOpen_);
  121. /** @private {boolean} */
  122. this.reloadingMediaSource_ = false;
  123. /** @private {boolean} */
  124. this.playAfterReset_ = false;
  125. /** @type {!shaka.util.Destroyer} */
  126. this.destroyer_ = new shaka.util.Destroyer(() => this.doDestroy_());
  127. /** @private {boolean} */
  128. this.sequenceMode_ = false;
  129. /** @private {string} */
  130. this.manifestType_ = shaka.media.ManifestParser.UNKNOWN;
  131. /** @private {boolean} */
  132. this.ignoreManifestTimestampsInSegmentsMode_ = false;
  133. /** @private {boolean} */
  134. this.attemptTimestampOffsetCalculation_ = false;
  135. /** @private {!shaka.util.PublicPromise<number>} */
  136. this.textSequenceModeOffset_ = new shaka.util.PublicPromise();
  137. /** @private {boolean} */
  138. this.needSplitMuxedContent_ = false;
  139. /** @private {?number} */
  140. this.lastDuration_ = null;
  141. /**
  142. * @private {!Map<shaka.util.ManifestParserUtils.ContentType,
  143. * !shaka.util.TsParser>}
  144. */
  145. this.tsParsers_ = new Map();
  146. /** @private {?number} */
  147. this.firstVideoTimestamp_ = null;
  148. /** @private {?number} */
  149. this.firstVideoReferenceStartTime_ = null;
  150. /** @private {?number} */
  151. this.firstAudioTimestamp_ = null;
  152. /** @private {?number} */
  153. this.firstAudioReferenceStartTime_ = null;
  154. /** @private {!shaka.util.PublicPromise<number>} */
  155. this.audioCompensation_ = new shaka.util.PublicPromise();
  156. if (this.video_.remote) {
  157. this.usingRemotePlayback_ = this.video_.remote.state != 'disconnected';
  158. this.eventManager_.listen(this.video_.remote, 'connect', () => {
  159. this.usingRemotePlayback_ = this.video_.remote.state != 'disconnected';
  160. });
  161. this.eventManager_.listen(this.video_.remote, 'connecting', () => {
  162. this.usingRemotePlayback_ = this.video_.remote.state != 'disconnected';
  163. });
  164. this.eventManager_.listen(this.video_.remote, 'disconnect', () => {
  165. this.usingRemotePlayback_ = this.video_.remote.state != 'disconnected';
  166. });
  167. }
  168. }
  169. /**
  170. * Create a MediaSource object, attach it to the video element, and return it.
  171. * Resolves the given promise when the MediaSource is ready.
  172. *
  173. * Replaced by unit tests.
  174. *
  175. * @param {!shaka.util.PublicPromise} p
  176. * @return {!MediaSource}
  177. */
  178. createMediaSource(p) {
  179. this.streamingAllowed_ = true;
  180. /** @type {!MediaSource} */
  181. let mediaSource;
  182. if (window.ManagedMediaSource) {
  183. if (!this.secondarySource_) {
  184. this.video_.disableRemotePlayback = true;
  185. }
  186. mediaSource = new ManagedMediaSource();
  187. this.eventManager_.listen(
  188. mediaSource, 'startstreaming', () => {
  189. shaka.log.info('MMS startstreaming');
  190. this.streamingAllowed_ = true;
  191. });
  192. this.eventManager_.listen(
  193. mediaSource, 'endstreaming', () => {
  194. shaka.log.info('MMS endstreaming');
  195. this.streamingAllowed_ = false;
  196. });
  197. } else {
  198. mediaSource = new MediaSource();
  199. }
  200. // Set up MediaSource on the video element.
  201. this.eventManager_.listenOnce(
  202. mediaSource, 'sourceopen', () => this.onSourceOpen_(p));
  203. // Correctly set when playback has begun.
  204. this.eventManager_.listenOnce(this.video_, 'playing', () => {
  205. this.playbackHasBegun_ = true;
  206. });
  207. // Store the object URL for releasing it later.
  208. this.url_ = shaka.media.MediaSourceEngine.createObjectURL(mediaSource);
  209. if (this.config_.useSourceElements) {
  210. this.video_.removeAttribute('src');
  211. if (this.source_) {
  212. this.video_.removeChild(this.source_);
  213. }
  214. if (this.secondarySource_) {
  215. this.video_.removeChild(this.secondarySource_);
  216. }
  217. this.source_ = shaka.util.Dom.createSourceElement(this.url_);
  218. this.video_.appendChild(this.source_);
  219. if (this.secondarySource_) {
  220. this.video_.appendChild(this.secondarySource_);
  221. }
  222. this.video_.load();
  223. } else {
  224. this.video_.src = this.url_;
  225. }
  226. return mediaSource;
  227. }
  228. /**
  229. * @param {string} uri
  230. * @param {string} mimeType
  231. */
  232. addSecondarySource(uri, mimeType) {
  233. if (!this.video_ || !window.ManagedMediaSource || !this.mediaSource_) {
  234. shaka.log.warning(
  235. 'Secondary source is used only with ManagedMediaSource');
  236. return;
  237. }
  238. if (!this.config_.useSourceElements) {
  239. return;
  240. }
  241. if (this.secondarySource_) {
  242. this.video_.removeChild(this.secondarySource_);
  243. }
  244. this.secondarySource_ = shaka.util.Dom.createSourceElement(uri, mimeType);
  245. this.video_.appendChild(this.secondarySource_);
  246. this.video_.disableRemotePlayback = false;
  247. }
  248. /**
  249. * @param {shaka.util.PublicPromise} p
  250. * @private
  251. */
  252. onSourceOpen_(p) {
  253. goog.asserts.assert(this.url_, 'Must have object URL');
  254. // Release the object URL that was previously created, to prevent memory
  255. // leak.
  256. // createObjectURL creates a strong reference to the MediaSource object
  257. // inside the browser. Setting the src of the video then creates another
  258. // reference within the video element. revokeObjectURL will remove the
  259. // strong reference to the MediaSource object, and allow it to be
  260. // garbage-collected later.
  261. URL.revokeObjectURL(this.url_);
  262. p.resolve();
  263. }
  264. /**
  265. * Returns a map of MediaSource support for well-known types.
  266. *
  267. * @return {!Object<string, boolean>}
  268. */
  269. static probeSupport() {
  270. const testMimeTypes = [
  271. // MP4 types
  272. 'video/mp4; codecs="avc1.42E01E"',
  273. 'video/mp4; codecs="avc3.42E01E"',
  274. 'video/mp4; codecs="hev1.1.6.L93.90"',
  275. 'video/mp4; codecs="hvc1.1.6.L93.90"',
  276. 'video/mp4; codecs="hev1.2.4.L153.B0"; eotf="smpte2084"', // HDR HEVC
  277. 'video/mp4; codecs="hvc1.2.4.L153.B0"; eotf="smpte2084"', // HDR HEVC
  278. 'video/mp4; codecs="vp9"',
  279. 'video/mp4; codecs="vp09.00.10.08"',
  280. 'video/mp4; codecs="av01.0.01M.08"',
  281. 'video/mp4; codecs="dvh1.05.01"',
  282. 'video/mp4; codecs="dvh1.20.01"',
  283. 'audio/mp4; codecs="mp4a.40.2"',
  284. 'audio/mp4; codecs="ac-3"',
  285. 'audio/mp4; codecs="ec-3"',
  286. 'audio/mp4; codecs="ac-4.02.01.01"',
  287. 'audio/mp4; codecs="opus"',
  288. 'audio/mp4; codecs="flac"',
  289. 'audio/mp4; codecs="dtsc"', // DTS Digital Surround
  290. 'audio/mp4; codecs="dtse"', // DTS Express
  291. 'audio/mp4; codecs="dtsx"', // DTS:X
  292. 'audio/mp4; codecs="apac.31.00"',
  293. // WebM types
  294. 'video/webm; codecs="vp8"',
  295. 'video/webm; codecs="vp9"',
  296. 'video/webm; codecs="vp09.00.10.08"',
  297. 'audio/webm; codecs="vorbis"',
  298. 'audio/webm; codecs="opus"',
  299. // MPEG2 TS types (video/ is also used for audio: https://bit.ly/TsMse)
  300. 'video/mp2t; codecs="avc1.42E01E"',
  301. 'video/mp2t; codecs="avc3.42E01E"',
  302. 'video/mp2t; codecs="hvc1.1.6.L93.90"',
  303. 'video/mp2t; codecs="mp4a.40.2"',
  304. 'video/mp2t; codecs="ac-3"',
  305. 'video/mp2t; codecs="ec-3"',
  306. // WebVTT types
  307. 'text/vtt',
  308. 'application/mp4; codecs="wvtt"',
  309. // TTML types
  310. 'application/ttml+xml',
  311. 'application/mp4; codecs="stpp"',
  312. // Containerless types
  313. ...shaka.util.MimeUtils.RAW_FORMATS,
  314. ];
  315. const support = {};
  316. const device = shaka.device.DeviceFactory.getDevice();
  317. for (const type of testMimeTypes) {
  318. if (shaka.text.TextEngine.isTypeSupported(type)) {
  319. support[type] = true;
  320. } else if (device.supportsMediaSource()) {
  321. support[type] = shaka.media.Capabilities.isTypeSupported(type) ||
  322. shaka.transmuxer.TransmuxerEngine.isSupported(type);
  323. } else {
  324. support[type] = device.supportsMediaType(type);
  325. }
  326. const basicType = type.split(';')[0];
  327. support[basicType] = support[basicType] || support[type];
  328. }
  329. return support;
  330. }
  331. /** @override */
  332. destroy() {
  333. return this.destroyer_.destroy();
  334. }
  335. /** @private */
  336. async doDestroy_() {
  337. const Functional = shaka.util.Functional;
  338. const cleanup = [];
  339. for (const [key, q] of this.queues_) {
  340. // Make a local copy of the queue and the first item.
  341. const inProgress = q[0];
  342. const contentType = /** @type {string} */(key);
  343. // Drop everything else out of the original queue.
  344. this.queues_.set(contentType, q.slice(0, 1));
  345. // We will wait for this item to complete/fail.
  346. if (inProgress) {
  347. cleanup.push(inProgress.p.catch(Functional.noop));
  348. }
  349. // The rest will be rejected silently if possible.
  350. for (const item of q.slice(1)) {
  351. item.p.reject(shaka.util.Destroyer.destroyedError());
  352. }
  353. }
  354. if (this.textEngine_) {
  355. cleanup.push(this.textEngine_.destroy());
  356. }
  357. await Promise.all(cleanup);
  358. for (const transmuxer of this.transmuxers_.values()) {
  359. transmuxer.destroy();
  360. }
  361. if (this.eventManager_) {
  362. this.eventManager_.release();
  363. this.eventManager_ = null;
  364. }
  365. if (this.video_ && this.secondarySource_) {
  366. this.video_.removeChild(this.secondarySource_);
  367. }
  368. if (this.video_ && this.source_) {
  369. // "unload" the video element.
  370. this.video_.removeChild(this.source_);
  371. this.video_.load();
  372. this.video_.disableRemotePlayback = false;
  373. }
  374. this.video_ = null;
  375. this.source_ = null;
  376. this.secondarySource_ = null;
  377. this.config_ = null;
  378. this.mediaSource_ = null;
  379. this.textEngine_ = null;
  380. this.textDisplayer_ = null;
  381. this.sourceBuffers_.clear();
  382. this.expectedEncryption_.clear();
  383. this.transmuxers_.clear();
  384. this.captionParser_ = null;
  385. if (goog.DEBUG) {
  386. for (const [contentType, q] of this.queues_) {
  387. goog.asserts.assert(
  388. q.length == 0,
  389. contentType + ' queue should be empty after destroy!');
  390. }
  391. }
  392. this.queues_.clear();
  393. // This object is owned by Player
  394. this.lcevcDec_ = null;
  395. this.tsParsers_.clear();
  396. this.playerInterface_ = null;
  397. }
  398. /**
  399. * @return {!Promise} Resolved when MediaSource is open and attached to the
  400. * media element. This process is actually initiated by the constructor.
  401. */
  402. open() {
  403. return this.mediaSourceOpen_;
  404. }
  405. /**
  406. * Initialize MediaSourceEngine.
  407. *
  408. * Note that it is not valid to call this multiple times, except to add or
  409. * reinitialize text streams.
  410. *
  411. * @param {!Map<shaka.util.ManifestParserUtils.ContentType,
  412. * shaka.extern.Stream>} streamsByType
  413. * A map of content types to streams.
  414. * @param {boolean=} sequenceMode
  415. * If true, the media segments are appended to the SourceBuffer in strict
  416. * sequence.
  417. * @param {string=} manifestType
  418. * Indicates the type of the manifest.
  419. * @param {boolean=} ignoreManifestTimestampsInSegmentsMode
  420. * If true, don't adjust the timestamp offset to account for manifest
  421. * segment durations being out of sync with segment durations. In other
  422. * words, assume that there are no gaps in the segments when appending
  423. * to the SourceBuffer, even if the manifest and segment times disagree.
  424. * Indicates if the manifest has text streams.
  425. *
  426. * @return {!Promise}
  427. */
  428. async init(streamsByType, sequenceMode=false,
  429. manifestType=shaka.media.ManifestParser.UNKNOWN,
  430. ignoreManifestTimestampsInSegmentsMode=false) {
  431. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  432. await this.mediaSourceOpen_;
  433. if (this.ended() || this.closed()) {
  434. shaka.log.alwaysError('Expected MediaSource to be open during init(); ' +
  435. 'reopening the media source.');
  436. this.mediaSourceOpen_ = new shaka.util.PublicPromise();
  437. this.mediaSource_ = this.createMediaSource(this.mediaSourceOpen_);
  438. await this.mediaSourceOpen_;
  439. }
  440. this.sequenceMode_ = sequenceMode;
  441. this.manifestType_ = manifestType;
  442. this.ignoreManifestTimestampsInSegmentsMode_ =
  443. ignoreManifestTimestampsInSegmentsMode;
  444. this.attemptTimestampOffsetCalculation_ = !this.sequenceMode_ &&
  445. this.manifestType_ == shaka.media.ManifestParser.HLS &&
  446. !this.ignoreManifestTimestampsInSegmentsMode_;
  447. this.tsParsers_.clear();
  448. this.firstVideoTimestamp_ = null;
  449. this.firstVideoReferenceStartTime_ = null;
  450. this.firstAudioTimestamp_ = null;
  451. this.firstAudioReferenceStartTime_ = null;
  452. this.audioCompensation_ = new shaka.util.PublicPromise();
  453. for (const contentType of streamsByType.keys()) {
  454. const stream = streamsByType.get(contentType);
  455. this.initSourceBuffer_(contentType, stream, stream.codecs);
  456. if (this.needSplitMuxedContent_) {
  457. this.queues_.set(ContentType.AUDIO, []);
  458. this.queues_.set(ContentType.VIDEO, []);
  459. } else {
  460. this.queues_.set(contentType, []);
  461. }
  462. }
  463. const audio = streamsByType.get(ContentType.AUDIO);
  464. if (audio && audio.isAudioMuxedInVideo) {
  465. this.needSplitMuxedContent_ = true;
  466. }
  467. }
  468. /**
  469. * Initialize a specific SourceBuffer.
  470. *
  471. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  472. * @param {shaka.extern.Stream} stream
  473. * @param {string} codecs
  474. * @private
  475. */
  476. initSourceBuffer_(contentType, stream, codecs) {
  477. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  478. if (contentType == ContentType.AUDIO && codecs) {
  479. codecs = shaka.util.StreamUtils.getCorrectAudioCodecs(
  480. codecs, stream.mimeType);
  481. }
  482. let mimeType = shaka.util.MimeUtils.getFullType(
  483. stream.mimeType, codecs);
  484. if (contentType == ContentType.TEXT) {
  485. this.reinitText(mimeType, this.sequenceMode_, stream.external);
  486. } else {
  487. let needTransmux = this.config_.forceTransmux;
  488. if (!shaka.media.Capabilities.isTypeSupported(mimeType) ||
  489. (!this.sequenceMode_ &&
  490. shaka.util.MimeUtils.RAW_FORMATS.includes(mimeType))) {
  491. needTransmux = true;
  492. }
  493. const mimeTypeWithAllCodecs =
  494. shaka.util.MimeUtils.getFullTypeWithAllCodecs(
  495. stream.mimeType, codecs);
  496. if (needTransmux) {
  497. const audioCodec = shaka.util.ManifestParserUtils.guessCodecsSafe(
  498. ContentType.AUDIO, (codecs || '').split(','));
  499. const videoCodec = shaka.util.ManifestParserUtils.guessCodecsSafe(
  500. ContentType.VIDEO, (codecs || '').split(','));
  501. if (audioCodec && videoCodec) {
  502. this.needSplitMuxedContent_ = true;
  503. this.initSourceBuffer_(ContentType.AUDIO, stream, audioCodec);
  504. this.initSourceBuffer_(ContentType.VIDEO, stream, videoCodec);
  505. return;
  506. }
  507. const transmuxerPlugin = shaka.transmuxer.TransmuxerEngine
  508. .findTransmuxer(mimeTypeWithAllCodecs);
  509. if (transmuxerPlugin) {
  510. const transmuxer = transmuxerPlugin();
  511. this.transmuxers_.set(contentType, transmuxer);
  512. mimeType =
  513. transmuxer.convertCodecs(contentType, mimeTypeWithAllCodecs);
  514. }
  515. }
  516. const type = this.addExtraFeaturesToMimeType_(mimeType);
  517. this.destroyer_.ensureNotDestroyed();
  518. let sourceBuffer;
  519. try {
  520. sourceBuffer = this.mediaSource_.addSourceBuffer(type);
  521. } catch (exception) {
  522. throw new shaka.util.Error(
  523. shaka.util.Error.Severity.CRITICAL,
  524. shaka.util.Error.Category.MEDIA,
  525. shaka.util.Error.Code.MEDIA_SOURCE_OPERATION_THREW,
  526. exception,
  527. 'The mediaSource_ status was ' + this.mediaSource_.readyState +
  528. ' expected \'open\'',
  529. null);
  530. }
  531. if (this.sequenceMode_) {
  532. sourceBuffer.mode =
  533. shaka.media.MediaSourceEngine.SourceBufferMode_.SEQUENCE;
  534. }
  535. this.eventManager_.listen(
  536. sourceBuffer, 'error',
  537. () => this.onError_(contentType));
  538. this.eventManager_.listen(
  539. sourceBuffer, 'updateend',
  540. () => this.onUpdateEnd_(contentType));
  541. this.sourceBuffers_.set(contentType, sourceBuffer);
  542. this.sourceBufferTypes_.set(contentType, mimeType);
  543. this.expectedEncryption_.set(contentType, !!stream.drmInfos.length);
  544. }
  545. }
  546. /**
  547. * Called by the Player to provide an updated configuration any time it
  548. * changes. Must be called at least once before init().
  549. *
  550. * @param {shaka.extern.MediaSourceConfiguration} config
  551. */
  552. configure(config) {
  553. this.config_ = config;
  554. if (this.textEngine_) {
  555. this.textEngine_.setModifyCueCallback(config.modifyCueCallback);
  556. }
  557. }
  558. /**
  559. * Indicate if the streaming is allowed by MediaSourceEngine.
  560. * If we using MediaSource we always returns true.
  561. *
  562. * @return {boolean}
  563. */
  564. isStreamingAllowed() {
  565. return this.streamingAllowed_ && !this.usingRemotePlayback_ &&
  566. !this.reloadingMediaSource_;
  567. }
  568. /**
  569. * Reinitialize the TextEngine for a new text type.
  570. * @param {string} mimeType
  571. * @param {boolean} sequenceMode
  572. * @param {boolean} external
  573. */
  574. reinitText(mimeType, sequenceMode, external) {
  575. if (!this.textEngine_) {
  576. this.textEngine_ = new shaka.text.TextEngine(this.textDisplayer_);
  577. if (this.textEngine_) {
  578. this.textEngine_.setModifyCueCallback(this.config_.modifyCueCallback);
  579. }
  580. }
  581. this.textEngine_.initParser(mimeType, sequenceMode,
  582. external || this.segmentRelativeVttTiming_, this.manifestType_);
  583. }
  584. /**
  585. * @return {boolean} True if the MediaSource is in an "ended" state, or if the
  586. * object has been destroyed.
  587. */
  588. ended() {
  589. if (this.reloadingMediaSource_) {
  590. return false;
  591. }
  592. return this.mediaSource_ ? this.mediaSource_.readyState == 'ended' : true;
  593. }
  594. /**
  595. * @return {boolean} True if the MediaSource is in an "closed" state, or if
  596. * the object has been destroyed.
  597. */
  598. closed() {
  599. if (this.reloadingMediaSource_) {
  600. return false;
  601. }
  602. return this.mediaSource_ ? this.mediaSource_.readyState == 'closed' : true;
  603. }
  604. /**
  605. * Gets the first timestamp in buffer for the given content type.
  606. *
  607. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  608. * @return {?number} The timestamp in seconds, or null if nothing is buffered.
  609. */
  610. bufferStart(contentType) {
  611. if (!this.sourceBuffers_.size) {
  612. return null;
  613. }
  614. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  615. if (contentType == ContentType.TEXT) {
  616. return this.textEngine_.bufferStart();
  617. }
  618. return shaka.media.TimeRangesUtils.bufferStart(
  619. this.getBuffered_(contentType));
  620. }
  621. /**
  622. * Gets the last timestamp in buffer for the given content type.
  623. *
  624. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  625. * @return {?number} The timestamp in seconds, or null if nothing is buffered.
  626. */
  627. bufferEnd(contentType) {
  628. if (!this.sourceBuffers_.size) {
  629. return null;
  630. }
  631. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  632. if (contentType == ContentType.TEXT) {
  633. return this.textEngine_.bufferEnd();
  634. }
  635. return shaka.media.TimeRangesUtils.bufferEnd(
  636. this.getBuffered_(contentType));
  637. }
  638. /**
  639. * Determines if the given time is inside the buffered range of the given
  640. * content type.
  641. *
  642. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  643. * @param {number} time Playhead time
  644. * @return {boolean}
  645. */
  646. isBuffered(contentType, time) {
  647. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  648. if (contentType == ContentType.TEXT) {
  649. return this.textEngine_.isBuffered(time);
  650. } else {
  651. const buffered = this.getBuffered_(contentType);
  652. return shaka.media.TimeRangesUtils.isBuffered(buffered, time);
  653. }
  654. }
  655. /**
  656. * Computes how far ahead of the given timestamp is buffered for the given
  657. * content type.
  658. *
  659. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  660. * @param {number} time
  661. * @return {number} The amount of time buffered ahead in seconds.
  662. */
  663. bufferedAheadOf(contentType, time) {
  664. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  665. if (contentType == ContentType.TEXT) {
  666. return this.textEngine_.bufferedAheadOf(time);
  667. } else {
  668. const buffered = this.getBuffered_(contentType);
  669. return shaka.media.TimeRangesUtils.bufferedAheadOf(buffered, time);
  670. }
  671. }
  672. /**
  673. * Returns info about what is currently buffered.
  674. * @return {shaka.extern.BufferedInfo}
  675. */
  676. getBufferedInfo() {
  677. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  678. const TimeRangesUtils = shaka.media.TimeRangesUtils;
  679. const info = {
  680. total: this.reloadingMediaSource_ ? [] :
  681. TimeRangesUtils.getBufferedInfo(this.video_.buffered),
  682. audio:
  683. TimeRangesUtils.getBufferedInfo(this.getBuffered_(ContentType.AUDIO)),
  684. video:
  685. TimeRangesUtils.getBufferedInfo(this.getBuffered_(ContentType.VIDEO)),
  686. text: [],
  687. };
  688. if (this.textEngine_) {
  689. const start = this.textEngine_.bufferStart();
  690. const end = this.textEngine_.bufferEnd();
  691. if (start != null && end != null) {
  692. info.text.push({start: start, end: end});
  693. }
  694. }
  695. return info;
  696. }
  697. /**
  698. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  699. * @return {TimeRanges} The buffered ranges for the given content type, or
  700. * null if the buffered ranges could not be obtained.
  701. * @private
  702. */
  703. getBuffered_(contentType) {
  704. if (this.reloadingMediaSource_ || this.usingRemotePlayback_) {
  705. return null;
  706. }
  707. try {
  708. return this.sourceBuffers_.get(contentType).buffered;
  709. } catch (exception) {
  710. if (this.sourceBuffers_.has(contentType)) {
  711. // Note: previous MediaSource errors may cause access to |buffered| to
  712. // throw.
  713. shaka.log.error('failed to get buffered range for ' + contentType,
  714. exception);
  715. }
  716. return null;
  717. }
  718. }
  719. /**
  720. * Create a new closed caption parser. This will ONLY be replaced by tests as
  721. * a way to inject fake closed caption parser instances.
  722. *
  723. * @param {string} mimeType
  724. * @return {!shaka.media.IClosedCaptionParser}
  725. */
  726. getCaptionParser(mimeType) {
  727. return new shaka.media.ClosedCaptionParser(mimeType);
  728. }
  729. /**
  730. * This method is only public for testing.
  731. *
  732. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  733. * @param {!BufferSource} data
  734. * @param {!shaka.media.SegmentReference} reference The segment reference
  735. * we are appending
  736. * @param {shaka.extern.Stream} stream
  737. * @param {!string} mimeType
  738. * @return {{timestamp: ?number, metadata: !Array<shaka.extern.ID3Metadata>}}
  739. */
  740. getTimestampAndDispatchMetadata(contentType, data, reference, stream,
  741. mimeType) {
  742. let timestamp = null;
  743. let metadata = [];
  744. const uint8ArrayData = shaka.util.BufferUtils.toUint8(data);
  745. if (shaka.util.MimeUtils.RAW_FORMATS.includes(mimeType)) {
  746. const frames = shaka.util.Id3Utils.getID3Frames(uint8ArrayData);
  747. if (frames.length && reference) {
  748. const metadataTimestamp = frames.find((frame) => {
  749. return frame.description ===
  750. 'com.apple.streaming.transportStreamTimestamp';
  751. });
  752. if (metadataTimestamp && typeof metadataTimestamp.data == 'number') {
  753. timestamp = Math.round(metadataTimestamp.data) / 1000;
  754. }
  755. /** @private {shaka.extern.ID3Metadata} */
  756. const id3Metadata = {
  757. cueTime: reference.startTime,
  758. data: uint8ArrayData,
  759. frames: frames,
  760. dts: reference.startTime,
  761. pts: reference.startTime,
  762. };
  763. this.playerInterface_.onMetadata(
  764. [id3Metadata], /* offset= */ 0, reference.endTime);
  765. }
  766. } else if (mimeType.includes('/mp4') &&
  767. reference &&
  768. reference.initSegmentReference &&
  769. reference.initSegmentReference.timescale) {
  770. const timescale = reference.initSegmentReference.timescale;
  771. if (!isNaN(timescale)) {
  772. const hasEmsg = ((stream.emsgSchemeIdUris != null &&
  773. stream.emsgSchemeIdUris.length > 0) ||
  774. this.config_.dispatchAllEmsgBoxes);
  775. const Mp4Parser = shaka.util.Mp4Parser;
  776. let startTime = 0;
  777. let parsedMedia = false;
  778. const parser = new Mp4Parser();
  779. if (hasEmsg) {
  780. parser.fullBox('emsg', (box) =>
  781. this.parseEMSG_(reference, stream.emsgSchemeIdUris, box));
  782. }
  783. parser.fullBox('prft', (box) => this.parsePrft_(timescale, box))
  784. .box('moof', Mp4Parser.children)
  785. .box('traf', Mp4Parser.children)
  786. .fullBox('tfdt', (box) => {
  787. if (!parsedMedia) {
  788. goog.asserts.assert(
  789. box.version == 0 || box.version == 1,
  790. 'TFDT version can only be 0 or 1');
  791. const parsed = shaka.util.Mp4BoxParsers.parseTFDTInaccurate(
  792. box.reader, box.version);
  793. startTime = parsed.baseMediaDecodeTime / timescale;
  794. parsedMedia = true;
  795. if (!hasEmsg) {
  796. box.parser.stop();
  797. }
  798. }
  799. }).parse(data, /* partialOkay= */ true);
  800. if (parsedMedia && reference.timestampOffset == 0) {
  801. timestamp = startTime;
  802. }
  803. }
  804. } else if (!mimeType.includes('/mp4') && !mimeType.includes('/webm') &&
  805. shaka.util.TsParser.probe(uint8ArrayData)) {
  806. if (!this.tsParsers_.has(contentType)) {
  807. this.tsParsers_.set(contentType, new shaka.util.TsParser());
  808. } else {
  809. this.tsParsers_.get(contentType).clearData();
  810. }
  811. const tsParser = this.tsParsers_.get(contentType).parse(uint8ArrayData);
  812. const startTime = tsParser.getStartTime(contentType);
  813. if (startTime != null) {
  814. timestamp = startTime;
  815. }
  816. metadata = tsParser.getMetadata();
  817. }
  818. return {timestamp, metadata};
  819. }
  820. /**
  821. * Parse the EMSG box from a MP4 container.
  822. *
  823. * @param {!shaka.media.SegmentReference} reference
  824. * @param {?Array<string>} emsgSchemeIdUris Array of emsg
  825. * scheme_id_uri for which emsg boxes should be parsed.
  826. * @param {!shaka.extern.ParsedBox} box
  827. * @private
  828. * https://dashif-documents.azurewebsites.net/Events/master/event.html#emsg-format
  829. * aligned(8) class DASHEventMessageBox
  830. * extends FullBox(‘emsg’, version, flags = 0){
  831. * if (version==0) {
  832. * string scheme_id_uri;
  833. * string value;
  834. * unsigned int(32) timescale;
  835. * unsigned int(32) presentation_time_delta;
  836. * unsigned int(32) event_duration;
  837. * unsigned int(32) id;
  838. * } else if (version==1) {
  839. * unsigned int(32) timescale;
  840. * unsigned int(64) presentation_time;
  841. * unsigned int(32) event_duration;
  842. * unsigned int(32) id;
  843. * string scheme_id_uri;
  844. * string value;
  845. * }
  846. * unsigned int(8) message_data[];
  847. */
  848. parseEMSG_(reference, emsgSchemeIdUris, box) {
  849. let timescale;
  850. let id;
  851. let eventDuration;
  852. let schemeId;
  853. let startTime;
  854. let presentationTimeDelta;
  855. let value;
  856. if (box.version === 0) {
  857. schemeId = box.reader.readTerminatedString();
  858. value = box.reader.readTerminatedString();
  859. timescale = box.reader.readUint32();
  860. presentationTimeDelta = box.reader.readUint32();
  861. eventDuration = box.reader.readUint32();
  862. id = box.reader.readUint32();
  863. startTime = reference.startTime + (presentationTimeDelta / timescale);
  864. } else {
  865. timescale = box.reader.readUint32();
  866. const pts = box.reader.readUint64();
  867. startTime = (pts / timescale) + reference.timestampOffset;
  868. presentationTimeDelta = startTime - reference.startTime;
  869. eventDuration = box.reader.readUint32();
  870. id = box.reader.readUint32();
  871. schemeId = box.reader.readTerminatedString();
  872. value = box.reader.readTerminatedString();
  873. }
  874. const messageData = box.reader.readBytes(
  875. box.reader.getLength() - box.reader.getPosition());
  876. // See DASH sec. 5.10.3.3.1
  877. // If a DASH client detects an event message box with a scheme that is not
  878. // defined in MPD, the client is expected to ignore it.
  879. if ((emsgSchemeIdUris && emsgSchemeIdUris.includes(schemeId)) ||
  880. this.config_.dispatchAllEmsgBoxes) {
  881. // See DASH sec. 5.10.4.1
  882. // A special scheme in DASH used to signal manifest updates.
  883. if (schemeId == 'urn:mpeg:dash:event:2012') {
  884. this.playerInterface_.onManifestUpdate();
  885. } else {
  886. // All other schemes are dispatched as a general 'emsg' event.
  887. const endTime = startTime + (eventDuration / timescale);
  888. /** @type {shaka.extern.EmsgInfo} */
  889. const emsg = {
  890. startTime: startTime,
  891. endTime: endTime,
  892. schemeIdUri: schemeId,
  893. value: value,
  894. timescale: timescale,
  895. presentationTimeDelta: presentationTimeDelta,
  896. eventDuration: eventDuration,
  897. id: id,
  898. messageData: messageData,
  899. };
  900. // Dispatch an event to notify the application about the emsg box.
  901. const eventName = shaka.util.FakeEvent.EventName.Emsg;
  902. const data = (new Map()).set('detail', emsg);
  903. const event = new shaka.util.FakeEvent(eventName, data);
  904. // A user can call preventDefault() on a cancelable event.
  905. event.cancelable = true;
  906. this.playerInterface_.onEmsg(emsg);
  907. // Additionally, ID3 events generate a 'metadata' event. This is a
  908. // pre-parsed version of the metadata blob already dispatched in the
  909. // 'emsg' event.
  910. if (schemeId == 'https://aomedia.org/emsg/ID3' ||
  911. schemeId == 'https://developer.apple.com/streaming/emsg-id3') {
  912. // See https://aomediacodec.github.io/id3-emsg/
  913. const frames = shaka.util.Id3Utils.getID3Frames(messageData);
  914. if (frames.length) {
  915. /** @private {shaka.extern.ID3Metadata} */
  916. const metadata = {
  917. cueTime: startTime,
  918. data: messageData,
  919. frames: frames,
  920. dts: startTime,
  921. pts: startTime,
  922. };
  923. this.playerInterface_.onMetadata(
  924. [metadata], /* offset= */ 0, endTime);
  925. }
  926. }
  927. }
  928. }
  929. }
  930. /**
  931. * Parse PRFT box.
  932. * @param {number} timescale
  933. * @param {!shaka.extern.ParsedBox} box
  934. * @private
  935. */
  936. parsePrft_(timescale, box) {
  937. goog.asserts.assert(
  938. box.version == 0 || box.version == 1,
  939. 'PRFT version can only be 0 or 1');
  940. const parsed = shaka.util.Mp4BoxParsers.parsePRFTInaccurate(
  941. box.reader, box.version);
  942. const wallClockTime = shaka.util.TimeUtils.convertNtp(parsed.ntpTimestamp);
  943. const programStartDate = new Date(wallClockTime -
  944. (parsed.mediaTime / timescale) * 1000);
  945. /** @type {shaka.extern.ProducerReferenceTime} */
  946. const prftInfo = {
  947. wallClockTime,
  948. programStartDate,
  949. };
  950. const eventName = shaka.util.FakeEvent.EventName.Prft;
  951. const data = (new Map()).set('detail', prftInfo);
  952. const event = new shaka.util.FakeEvent(
  953. eventName, data);
  954. this.playerInterface_.onEvent(event);
  955. }
  956. /**
  957. * Enqueue an operation to append data to the SourceBuffer.
  958. * Start and end times are needed for TextEngine, but not for MediaSource.
  959. * Start and end times may be null for initialization segments; if present
  960. * they are relative to the presentation timeline.
  961. *
  962. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  963. * @param {!BufferSource} data
  964. * @param {?shaka.media.SegmentReference} reference The segment reference
  965. * we are appending, or null for init segments
  966. * @param {shaka.extern.Stream} stream
  967. * @param {?boolean} hasClosedCaptions True if the buffer contains CEA closed
  968. * captions
  969. * @param {boolean=} seeked True if we just seeked
  970. * @param {boolean=} adaptation True if we just automatically switched active
  971. * variant(s).
  972. * @param {boolean=} isChunkedData True if we add to the buffer from the
  973. * @param {boolean=} fromSplit
  974. * @param {number=} continuityTimeline an optional continuity timeline
  975. * @return {!Promise}
  976. */
  977. async appendBuffer(
  978. contentType, data, reference, stream, hasClosedCaptions, seeked = false,
  979. adaptation = false, isChunkedData = false, fromSplit = false,
  980. continuityTimeline) {
  981. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  982. if (contentType == ContentType.TEXT) {
  983. if (this.manifestType_ == shaka.media.ManifestParser.HLS) {
  984. // This won't be known until the first video segment is appended.
  985. const offset = await this.textSequenceModeOffset_;
  986. this.textEngine_.setTimestampOffset(offset);
  987. }
  988. await this.textEngine_.appendBuffer(
  989. data,
  990. reference ? reference.startTime : null,
  991. reference ? reference.endTime : null,
  992. reference ? reference.getUris()[0] : null);
  993. return;
  994. }
  995. if (!fromSplit && this.needSplitMuxedContent_) {
  996. await this.appendBuffer(ContentType.AUDIO, data, reference, stream,
  997. hasClosedCaptions, seeked, adaptation, isChunkedData,
  998. /* fromSplit= */ true);
  999. await this.appendBuffer(ContentType.VIDEO, data, reference, stream,
  1000. hasClosedCaptions, seeked, adaptation, isChunkedData,
  1001. /* fromSplit= */ true);
  1002. return;
  1003. }
  1004. if (!this.sourceBuffers_.has(contentType)) {
  1005. shaka.log.warning('Attempted to restore a non-existent source buffer');
  1006. return;
  1007. }
  1008. let timestampOffset = this.sourceBuffers_.get(contentType).timestampOffset;
  1009. let mimeType = this.sourceBufferTypes_.get(contentType);
  1010. if (this.transmuxers_.has(contentType)) {
  1011. mimeType = this.transmuxers_.get(contentType).getOriginalMimeType();
  1012. }
  1013. if (reference) {
  1014. const {timestamp, metadata} = this.getTimestampAndDispatchMetadata(
  1015. contentType, data, reference, stream, mimeType);
  1016. if (timestamp != null) {
  1017. if (this.firstVideoTimestamp_ == null &&
  1018. contentType == ContentType.VIDEO) {
  1019. this.firstVideoTimestamp_ = timestamp;
  1020. this.firstVideoReferenceStartTime_ = reference.startTime;
  1021. if (this.firstAudioTimestamp_ != null) {
  1022. let compensation = 0;
  1023. // Only apply compensation if video and audio segment startTime
  1024. // match, to avoid introducing sync issues.
  1025. if (this.firstVideoReferenceStartTime_ ==
  1026. this.firstAudioReferenceStartTime_) {
  1027. compensation =
  1028. this.firstVideoTimestamp_ - this.firstAudioTimestamp_;
  1029. }
  1030. this.audioCompensation_.resolve(compensation);
  1031. }
  1032. }
  1033. if (this.firstAudioTimestamp_ == null &&
  1034. contentType == ContentType.AUDIO) {
  1035. this.firstAudioTimestamp_ = timestamp;
  1036. this.firstAudioReferenceStartTime_ = reference.startTime;
  1037. if (this.firstVideoTimestamp_ != null) {
  1038. let compensation = 0;
  1039. // Only apply compensation if video and audio segment startTime
  1040. // match, to avoid introducing sync issues.
  1041. if (this.firstVideoReferenceStartTime_ ==
  1042. this.firstAudioReferenceStartTime_) {
  1043. compensation =
  1044. this.firstVideoTimestamp_ - this.firstAudioTimestamp_;
  1045. }
  1046. this.audioCompensation_.resolve(compensation);
  1047. }
  1048. }
  1049. let realTimestamp = timestamp;
  1050. const RAW_FORMATS = shaka.util.MimeUtils.RAW_FORMATS;
  1051. // For formats without containers and using segments mode, we need to
  1052. // adjust TimestampOffset relative to 0 because segments do not have
  1053. // any timestamp information.
  1054. if (!this.sequenceMode_ &&
  1055. RAW_FORMATS.includes(this.sourceBufferTypes_.get(contentType))) {
  1056. realTimestamp = 0;
  1057. }
  1058. const calculatedTimestampOffset = reference.startTime - realTimestamp;
  1059. const timestampOffsetDifference =
  1060. Math.abs(timestampOffset - calculatedTimestampOffset);
  1061. if ((timestampOffsetDifference >= 0.001 || seeked || adaptation) &&
  1062. (!isChunkedData || calculatedTimestampOffset > 0 ||
  1063. !timestampOffset)) {
  1064. timestampOffset = calculatedTimestampOffset;
  1065. if (this.attemptTimestampOffsetCalculation_) {
  1066. this.enqueueOperation_(
  1067. contentType,
  1068. () => this.abort_(contentType),
  1069. null);
  1070. this.enqueueOperation_(
  1071. contentType,
  1072. () => this.setTimestampOffset_(contentType, timestampOffset),
  1073. null);
  1074. }
  1075. }
  1076. // Timestamps can only be reliably extracted from video, not audio.
  1077. // Packed audio formats do not have internal timestamps at all.
  1078. // Prefer video for this when available.
  1079. const isBestSourceBufferForTimestamps =
  1080. contentType == ContentType.VIDEO ||
  1081. !(this.sourceBuffers_.has(ContentType.VIDEO));
  1082. if (isBestSourceBufferForTimestamps) {
  1083. this.textSequenceModeOffset_.resolve(timestampOffset);
  1084. }
  1085. }
  1086. if (metadata.length) {
  1087. this.playerInterface_.onMetadata(metadata, timestampOffset,
  1088. reference ? reference.endTime : null);
  1089. }
  1090. }
  1091. if (hasClosedCaptions && contentType == ContentType.VIDEO) {
  1092. if (!this.textEngine_) {
  1093. this.reinitText(shaka.util.MimeUtils.CEA608_CLOSED_CAPTION_MIMETYPE,
  1094. this.sequenceMode_, /* external= */ false);
  1095. }
  1096. if (!this.captionParser_) {
  1097. const basicType = mimeType.split(';', 1)[0];
  1098. this.captionParser_ = this.getCaptionParser(basicType);
  1099. }
  1100. // If it is the init segment for closed captions, initialize the closed
  1101. // caption parser.
  1102. if (!reference) {
  1103. this.captionParser_.init(data, adaptation, continuityTimeline);
  1104. } else {
  1105. const closedCaptions = this.captionParser_.parseFrom(data);
  1106. if (closedCaptions.length) {
  1107. this.textEngine_.storeAndAppendClosedCaptions(
  1108. closedCaptions,
  1109. reference.startTime,
  1110. reference.endTime,
  1111. timestampOffset);
  1112. }
  1113. }
  1114. }
  1115. if (this.transmuxers_.has(contentType)) {
  1116. data = await this.transmuxers_.get(contentType).transmux(
  1117. data, stream, reference, this.mediaSource_.duration, contentType);
  1118. }
  1119. data = this.workAroundBrokenPlatforms_(
  1120. stream, data, reference, contentType);
  1121. if (reference && this.sequenceMode_ && contentType != ContentType.TEXT) {
  1122. // In sequence mode, for non-text streams, if we just cleared the buffer
  1123. // and are either performing an unbuffered seek or handling an automatic
  1124. // adaptation, we need to set a new timestampOffset on the sourceBuffer.
  1125. if (seeked || adaptation) {
  1126. let timestampOffset = reference.startTime;
  1127. // Audio and video may not be aligned, so we will compensate for audio
  1128. // if necessary.
  1129. if (this.manifestType_ == shaka.media.ManifestParser.HLS &&
  1130. !this.needSplitMuxedContent_ &&
  1131. contentType == ContentType.AUDIO &&
  1132. this.sourceBuffers_.has(ContentType.VIDEO)) {
  1133. const compensation = await this.audioCompensation_;
  1134. // Only apply compensation if the difference is greater than 150ms
  1135. if (Math.abs(compensation) > 0.15) {
  1136. timestampOffset -= compensation;
  1137. }
  1138. }
  1139. // The logic to call abort() before setting the timestampOffset is
  1140. // extended during unbuffered seeks or automatic adaptations; it is
  1141. // possible for the append state to be PARSING_MEDIA_SEGMENT from the
  1142. // previous SourceBuffer#appendBuffer() call.
  1143. this.enqueueOperation_(
  1144. contentType,
  1145. () => this.abort_(contentType),
  1146. null);
  1147. this.enqueueOperation_(
  1148. contentType,
  1149. () => this.setTimestampOffset_(contentType, timestampOffset),
  1150. null);
  1151. }
  1152. }
  1153. let bufferedBefore = null;
  1154. await this.enqueueOperation_(contentType, () => {
  1155. if (goog.DEBUG && reference && !reference.isPreload() && !isChunkedData) {
  1156. bufferedBefore = this.getBuffered_(contentType);
  1157. }
  1158. this.append_(contentType, data, timestampOffset, stream);
  1159. }, reference ? reference.getUris()[0] : null);
  1160. if (goog.DEBUG && reference && !reference.isPreload() && !isChunkedData) {
  1161. const bufferedAfter = this.getBuffered_(contentType);
  1162. const newBuffered = shaka.media.TimeRangesUtils.computeAddedRange(
  1163. bufferedBefore, bufferedAfter);
  1164. if (newBuffered) {
  1165. const segmentDuration = reference.endTime - reference.startTime;
  1166. const timeAdded = newBuffered.end - newBuffered.start;
  1167. // Check end times instead of start times. We may be overwriting a
  1168. // buffer and only the end changes, and that would be fine.
  1169. // Also, exclude tiny segments. Sometimes alignment segments as small
  1170. // as 33ms are seen in Google DAI content. For such tiny segments,
  1171. // half a segment duration would be no issue.
  1172. const offset = Math.abs(newBuffered.end - reference.endTime);
  1173. if (segmentDuration > 0.100 && (offset > segmentDuration / 2 ||
  1174. Math.abs(segmentDuration - timeAdded) > 0.030)) {
  1175. shaka.log.error('Possible encoding problem detected!',
  1176. 'Unexpected buffered range for reference', reference,
  1177. 'from URIs', reference.getUris(),
  1178. 'should be', {start: reference.startTime, end: reference.endTime},
  1179. 'but got', newBuffered);
  1180. }
  1181. }
  1182. }
  1183. }
  1184. /**
  1185. * Set the selected closed captions Id and language.
  1186. *
  1187. * @param {string} id
  1188. */
  1189. setSelectedClosedCaptionId(id) {
  1190. const VIDEO = shaka.util.ManifestParserUtils.ContentType.VIDEO;
  1191. const videoBufferEndTime = this.bufferEnd(VIDEO) || 0;
  1192. this.textEngine_.setSelectedClosedCaptionId(id, videoBufferEndTime);
  1193. }
  1194. /** Disable embedded closed captions. */
  1195. clearSelectedClosedCaptionId() {
  1196. if (this.textEngine_) {
  1197. this.textEngine_.setSelectedClosedCaptionId('', 0);
  1198. }
  1199. }
  1200. /**
  1201. * Enqueue an operation to remove data from the SourceBuffer.
  1202. *
  1203. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1204. * @param {number} startTime relative to the start of the presentation
  1205. * @param {number} endTime relative to the start of the presentation
  1206. * @param {Array<number>=} continuityTimelines a list of continuity timelines
  1207. * that are still available on the stream.
  1208. * @return {!Promise}
  1209. */
  1210. async remove(contentType, startTime, endTime, continuityTimelines) {
  1211. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1212. if (contentType == ContentType.VIDEO && this.captionParser_) {
  1213. this.captionParser_.remove(continuityTimelines);
  1214. }
  1215. if (contentType == ContentType.TEXT) {
  1216. await this.textEngine_.remove(startTime, endTime);
  1217. } else if (endTime > startTime) {
  1218. await this.enqueueOperation_(
  1219. contentType,
  1220. () => this.remove_(contentType, startTime, endTime),
  1221. null);
  1222. if (this.needSplitMuxedContent_) {
  1223. await this.enqueueOperation_(
  1224. ContentType.AUDIO,
  1225. () => this.remove_(ContentType.AUDIO, startTime, endTime),
  1226. null);
  1227. }
  1228. }
  1229. }
  1230. /**
  1231. * Enqueue an operation to clear the SourceBuffer.
  1232. *
  1233. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1234. * @return {!Promise}
  1235. */
  1236. async clear(contentType) {
  1237. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1238. if (contentType == ContentType.TEXT) {
  1239. if (!this.textEngine_) {
  1240. return;
  1241. }
  1242. await this.textEngine_.remove(0, Infinity);
  1243. } else {
  1244. // Note that not all platforms allow clearing to Infinity.
  1245. await this.enqueueOperation_(
  1246. contentType,
  1247. () => this.remove_(contentType, 0, this.mediaSource_.duration),
  1248. null);
  1249. if (this.needSplitMuxedContent_) {
  1250. await this.enqueueOperation_(
  1251. ContentType.AUDIO,
  1252. () => this.remove_(
  1253. ContentType.AUDIO, 0, this.mediaSource_.duration),
  1254. null);
  1255. }
  1256. }
  1257. }
  1258. /**
  1259. * Fully reset the state of the caption parser owned by MediaSourceEngine.
  1260. */
  1261. resetCaptionParser() {
  1262. if (this.captionParser_) {
  1263. this.captionParser_.reset();
  1264. }
  1265. }
  1266. /**
  1267. * Enqueue an operation to flush the SourceBuffer.
  1268. * This is a workaround for what we believe is a Chromecast bug.
  1269. *
  1270. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1271. * @return {!Promise}
  1272. */
  1273. async flush(contentType) {
  1274. // Flush the pipeline. Necessary on Chromecast, even though we have removed
  1275. // everything.
  1276. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1277. if (contentType == ContentType.TEXT) {
  1278. // Nothing to flush for text.
  1279. return;
  1280. }
  1281. await this.enqueueOperation_(
  1282. contentType,
  1283. () => this.flush_(contentType),
  1284. null);
  1285. if (this.needSplitMuxedContent_) {
  1286. await this.enqueueOperation_(
  1287. ContentType.AUDIO,
  1288. () => this.flush_(ContentType.AUDIO),
  1289. null);
  1290. }
  1291. }
  1292. /**
  1293. * Sets the timestamp offset and append window end for the given content type.
  1294. *
  1295. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1296. * @param {number} timestampOffset The timestamp offset. Segments which start
  1297. * at time t will be inserted at time t + timestampOffset instead. This
  1298. * value does not affect segments which have already been inserted.
  1299. * @param {number} appendWindowStart The timestamp to set the append window
  1300. * start to. For future appends, frames/samples with timestamps less than
  1301. * this value will be dropped.
  1302. * @param {number} appendWindowEnd The timestamp to set the append window end
  1303. * to. For future appends, frames/samples with timestamps greater than this
  1304. * value will be dropped.
  1305. * @param {boolean} ignoreTimestampOffset If true, the timestampOffset will
  1306. * not be applied in this step.
  1307. * @param {string} mimeType
  1308. * @param {string} codecs
  1309. * @param {!Map<shaka.util.ManifestParserUtils.ContentType,
  1310. * shaka.extern.Stream>} streamsByType
  1311. * A map of content types to streams.
  1312. *
  1313. * @return {!Promise}
  1314. */
  1315. async setStreamProperties(
  1316. contentType, timestampOffset, appendWindowStart, appendWindowEnd,
  1317. ignoreTimestampOffset, mimeType, codecs, streamsByType) {
  1318. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1319. if (contentType == ContentType.TEXT) {
  1320. if (!ignoreTimestampOffset) {
  1321. this.textEngine_.setTimestampOffset(timestampOffset);
  1322. }
  1323. this.textEngine_.setAppendWindow(appendWindowStart, appendWindowEnd);
  1324. return;
  1325. }
  1326. const operations = [];
  1327. const hasChangedCodecs = await this.codecSwitchIfNecessary_(
  1328. contentType, mimeType, codecs, streamsByType);
  1329. if (!hasChangedCodecs) {
  1330. // Queue an abort() to help MSE splice together overlapping segments.
  1331. // We set appendWindowEnd when we change periods in DASH content, and the
  1332. // period transition may result in overlap.
  1333. //
  1334. // An abort() also helps with MPEG2-TS. When we append a TS segment, we
  1335. // always enter a PARSING_MEDIA_SEGMENT state and we can't change the
  1336. // timestamp offset. By calling abort(), we reset the state so we can
  1337. // set it.
  1338. operations.push(this.enqueueOperation_(
  1339. contentType,
  1340. () => this.abort_(contentType),
  1341. null));
  1342. if (this.needSplitMuxedContent_) {
  1343. operations.push(this.enqueueOperation_(
  1344. ContentType.AUDIO,
  1345. () => this.abort_(ContentType.AUDIO),
  1346. null));
  1347. }
  1348. }
  1349. if (!ignoreTimestampOffset) {
  1350. operations.push(this.enqueueOperation_(
  1351. contentType,
  1352. () => this.setTimestampOffset_(contentType, timestampOffset),
  1353. null));
  1354. if (this.needSplitMuxedContent_) {
  1355. operations.push(this.enqueueOperation_(
  1356. ContentType.AUDIO,
  1357. () => this.setTimestampOffset_(
  1358. ContentType.AUDIO, timestampOffset),
  1359. null));
  1360. }
  1361. }
  1362. if (appendWindowStart != 0 || appendWindowEnd != Infinity) {
  1363. operations.push(this.enqueueOperation_(
  1364. contentType,
  1365. () => this.setAppendWindow_(
  1366. contentType, appendWindowStart, appendWindowEnd),
  1367. null));
  1368. if (this.needSplitMuxedContent_) {
  1369. operations.push(this.enqueueOperation_(
  1370. ContentType.AUDIO,
  1371. () => this.setAppendWindow_(
  1372. ContentType.AUDIO, appendWindowStart, appendWindowEnd),
  1373. null));
  1374. }
  1375. }
  1376. if (operations.length) {
  1377. await Promise.all(operations);
  1378. }
  1379. }
  1380. /**
  1381. * Adjust timestamp offset to maintain AV sync across discontinuities.
  1382. *
  1383. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1384. * @param {number} timestampOffset
  1385. * @return {!Promise}
  1386. */
  1387. async resync(contentType, timestampOffset) {
  1388. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1389. if (contentType == ContentType.TEXT) {
  1390. // This operation is for audio and video only.
  1391. return;
  1392. }
  1393. // Reset the promise in case the timestamp offset changed during
  1394. // a period/discontinuity transition.
  1395. if (contentType == ContentType.VIDEO) {
  1396. this.textSequenceModeOffset_ = new shaka.util.PublicPromise();
  1397. }
  1398. if (!this.sequenceMode_) {
  1399. return;
  1400. }
  1401. // Avoid changing timestampOffset when the difference is less than 100 ms
  1402. // from the end of the current buffer.
  1403. const bufferEnd = this.bufferEnd(contentType);
  1404. if (bufferEnd && Math.abs(bufferEnd - timestampOffset) < 0.1) {
  1405. return;
  1406. }
  1407. // Queue an abort() to help MSE splice together overlapping segments.
  1408. // We set appendWindowEnd when we change periods in DASH content, and the
  1409. // period transition may result in overlap.
  1410. //
  1411. // An abort() also helps with MPEG2-TS. When we append a TS segment, we
  1412. // always enter a PARSING_MEDIA_SEGMENT state and we can't change the
  1413. // timestamp offset. By calling abort(), we reset the state so we can
  1414. // set it.
  1415. this.enqueueOperation_(
  1416. contentType,
  1417. () => this.abort_(contentType),
  1418. null);
  1419. if (this.needSplitMuxedContent_) {
  1420. this.enqueueOperation_(
  1421. ContentType.AUDIO,
  1422. () => this.abort_(ContentType.AUDIO),
  1423. null);
  1424. }
  1425. await this.enqueueOperation_(
  1426. contentType,
  1427. () => this.setTimestampOffset_(contentType, timestampOffset),
  1428. null);
  1429. if (this.needSplitMuxedContent_) {
  1430. await this.enqueueOperation_(
  1431. ContentType.AUDIO,
  1432. () => this.setTimestampOffset_(ContentType.AUDIO, timestampOffset),
  1433. null);
  1434. }
  1435. }
  1436. /**
  1437. * @param {string=} reason Valid reasons are 'network' and 'decode'.
  1438. * @return {!Promise}
  1439. * @see http://w3c.github.io/media-source/#idl-def-EndOfStreamError
  1440. */
  1441. async endOfStream(reason) {
  1442. await this.enqueueBlockingOperation_(() => {
  1443. // If endOfStream() has already been called on the media source,
  1444. // don't call it again. Also do not call if readyState is
  1445. // 'closed' (not attached to video element) since it is not a
  1446. // valid operation.
  1447. if (this.ended() || this.closed()) {
  1448. return;
  1449. }
  1450. // Tizen won't let us pass undefined, but it will let us omit the
  1451. // argument.
  1452. if (reason) {
  1453. this.mediaSource_.endOfStream(reason);
  1454. } else {
  1455. this.mediaSource_.endOfStream();
  1456. }
  1457. });
  1458. }
  1459. /**
  1460. * @param {number} duration
  1461. * @return {!Promise}
  1462. */
  1463. async setDuration(duration) {
  1464. await this.enqueueBlockingOperation_(() => {
  1465. // https://www.w3.org/TR/media-source-2/#duration-change-algorithm
  1466. // "Duration reductions that would truncate currently buffered media
  1467. // are disallowed.
  1468. // When truncation is necessary, use remove() to reduce the buffered
  1469. // range before updating duration."
  1470. // But in some platforms, truncating the duration causes the
  1471. // buffer range removal algorithm to run which triggers an
  1472. // 'updateend' event to fire.
  1473. // To handle this scenario, we have to insert a dummy operation into
  1474. // the beginning of each queue, which the 'updateend' handler will remove.
  1475. // Using config to disable it by default and enable only
  1476. // on relevant platforms.
  1477. if (this.config_.durationReductionEmitsUpdateEnd &&
  1478. duration < this.mediaSource_.duration) {
  1479. for (const contentType of this.sourceBuffers_.keys()) {
  1480. const dummyOperation = {
  1481. start: () => {},
  1482. p: new shaka.util.PublicPromise(),
  1483. uri: null,
  1484. };
  1485. this.queues_.get(contentType).unshift(dummyOperation);
  1486. }
  1487. }
  1488. this.mediaSource_.duration = duration;
  1489. this.lastDuration_ = duration;
  1490. });
  1491. }
  1492. /**
  1493. * Get the current MediaSource duration.
  1494. *
  1495. * @return {number}
  1496. */
  1497. getDuration() {
  1498. return this.mediaSource_.duration;
  1499. }
  1500. /**
  1501. * Updates the live seekable range.
  1502. *
  1503. * @param {number} startTime
  1504. * @param {number} endTime
  1505. */
  1506. async setLiveSeekableRange(startTime, endTime) {
  1507. if (this.destroyer_.destroyed() || this.video_.error ||
  1508. this.usingRemotePlayback_ || this.reloadingMediaSource_) {
  1509. return;
  1510. }
  1511. goog.asserts.assert('setLiveSeekableRange' in this.mediaSource_,
  1512. 'Using setLiveSeekableRange on not supported platform');
  1513. if (this.ended() || this.closed()) {
  1514. return;
  1515. }
  1516. await this.enqueueBlockingOperation_(() => {
  1517. if (this.ended() || this.closed()) {
  1518. return;
  1519. }
  1520. this.mediaSource_.setLiveSeekableRange(startTime, endTime);
  1521. });
  1522. }
  1523. /**
  1524. * Clear the current live seekable range.
  1525. */
  1526. async clearLiveSeekableRange() {
  1527. if (this.destroyer_.destroyed() || this.video_.error ||
  1528. this.usingRemotePlayback_ || this.reloadingMediaSource_) {
  1529. return;
  1530. }
  1531. goog.asserts.assert('clearLiveSeekableRange' in this.mediaSource_,
  1532. 'Using clearLiveSeekableRange on not supported platform');
  1533. if (this.ended() || this.closed()) {
  1534. return;
  1535. }
  1536. await this.enqueueBlockingOperation_(() => {
  1537. if (this.ended() || this.closed()) {
  1538. return;
  1539. }
  1540. this.mediaSource_.clearLiveSeekableRange();
  1541. });
  1542. }
  1543. /**
  1544. * Append dependency data.
  1545. * @param {BufferSource} data
  1546. * @param {number} timestampOffset
  1547. * @param {shaka.extern.Stream} stream
  1548. */
  1549. appendDependency(data, timestampOffset, stream) {
  1550. if (this.lcevcDec_) {
  1551. // Append buffers to the LCEVC Dec for parsing and storing
  1552. // of LCEVC data.
  1553. this.lcevcDec_.appendBuffer(data, timestampOffset, stream);
  1554. }
  1555. }
  1556. /**
  1557. * Append data to the SourceBuffer.
  1558. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1559. * @param {BufferSource} data
  1560. * @param {number} timestampOffset
  1561. * @param {shaka.extern.Stream} stream
  1562. * @private
  1563. */
  1564. append_(contentType, data, timestampOffset, stream) {
  1565. this.appendDependency(data, timestampOffset, stream);
  1566. // This will trigger an 'updateend' event.
  1567. this.sourceBuffers_.get(contentType).appendBuffer(data);
  1568. }
  1569. /**
  1570. * Remove data from the SourceBuffer.
  1571. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1572. * @param {number} startTime relative to the start of the presentation
  1573. * @param {number} endTime relative to the start of the presentation
  1574. * @private
  1575. */
  1576. remove_(contentType, startTime, endTime) {
  1577. if (endTime <= startTime) {
  1578. // Ignore removal of inverted or empty ranges.
  1579. // Fake 'updateend' event to resolve the operation.
  1580. this.onUpdateEnd_(contentType);
  1581. return;
  1582. }
  1583. // This will trigger an 'updateend' event.
  1584. this.sourceBuffers_.get(contentType).remove(startTime, endTime);
  1585. }
  1586. /**
  1587. * Call abort() on the SourceBuffer.
  1588. * This resets MSE's last_decode_timestamp on all track buffers, which should
  1589. * trigger the splicing logic for overlapping segments.
  1590. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1591. * @private
  1592. */
  1593. abort_(contentType) {
  1594. const sourceBuffer = this.sourceBuffers_.get(contentType);
  1595. // Save the append window, which is reset on abort().
  1596. const appendWindowStart = sourceBuffer.appendWindowStart;
  1597. const appendWindowEnd = sourceBuffer.appendWindowEnd;
  1598. // This will not trigger an 'updateend' event, since nothing is happening.
  1599. // This is only to reset MSE internals, not to abort an actual operation.
  1600. sourceBuffer.abort();
  1601. // Restore the append window.
  1602. sourceBuffer.appendWindowStart = appendWindowStart;
  1603. sourceBuffer.appendWindowEnd = appendWindowEnd;
  1604. // Fake an 'updateend' event to resolve the operation.
  1605. this.onUpdateEnd_(contentType);
  1606. }
  1607. /**
  1608. * Nudge the playhead to force the media pipeline to be flushed.
  1609. * This seems to be necessary on Chromecast to get new content to replace old
  1610. * content.
  1611. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1612. * @private
  1613. */
  1614. flush_(contentType) {
  1615. // Never use flush_ if there's data. It causes a hiccup in playback.
  1616. goog.asserts.assert(
  1617. this.video_.buffered.length == 0, 'MediaSourceEngine.flush_ should ' +
  1618. 'only be used after clearing all data!');
  1619. // Seeking forces the pipeline to be flushed.
  1620. this.video_.currentTime -= 0.001;
  1621. // Fake an 'updateend' event to resolve the operation.
  1622. this.onUpdateEnd_(contentType);
  1623. }
  1624. /**
  1625. * Set the SourceBuffer's timestamp offset.
  1626. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1627. * @param {number} timestampOffset
  1628. * @private
  1629. */
  1630. setTimestampOffset_(contentType, timestampOffset) {
  1631. // Work around for
  1632. // https://github.com/shaka-project/shaka-player/issues/1281:
  1633. // TODO(https://bit.ly/2ttKiBU): follow up when this is fixed in Edge
  1634. if (timestampOffset < 0) {
  1635. // Try to prevent rounding errors in Edge from removing the first
  1636. // keyframe.
  1637. timestampOffset += 0.001;
  1638. }
  1639. let shouldChangeTimestampOffset = true;
  1640. if (this.manifestType_ == shaka.media.ManifestParser.HLS) {
  1641. // Avoid changing timestampOffset when the difference is less than 150 ms
  1642. // from the end of the current buffer when using sequenceMode
  1643. const bufferEnd = this.bufferEnd(contentType);
  1644. if (!bufferEnd || Math.abs(bufferEnd - timestampOffset) > 0.15) {
  1645. shouldChangeTimestampOffset = true;
  1646. } else {
  1647. shouldChangeTimestampOffset = false;
  1648. }
  1649. }
  1650. if (shouldChangeTimestampOffset) {
  1651. this.sourceBuffers_.get(contentType).timestampOffset = timestampOffset;
  1652. }
  1653. // Fake an 'updateend' event to resolve the operation.
  1654. this.onUpdateEnd_(contentType);
  1655. }
  1656. /**
  1657. * Set the SourceBuffer's append window end.
  1658. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1659. * @param {number} appendWindowStart
  1660. * @param {number} appendWindowEnd
  1661. * @private
  1662. */
  1663. setAppendWindow_(contentType, appendWindowStart, appendWindowEnd) {
  1664. const sourceBuffer = this.sourceBuffers_.get(contentType);
  1665. if (sourceBuffer.appendWindowEnd !== appendWindowEnd ||
  1666. sourceBuffer.appendWindowStart !== appendWindowStart) {
  1667. // You can't set start > end, so first set start to 0, then set the new
  1668. // end, then set the new start. That way, there are no intermediate
  1669. // states which are invalid.
  1670. sourceBuffer.appendWindowStart = 0;
  1671. sourceBuffer.appendWindowEnd = appendWindowEnd;
  1672. sourceBuffer.appendWindowStart = appendWindowStart;
  1673. }
  1674. // Fake an 'updateend' event to resolve the operation.
  1675. this.onUpdateEnd_(contentType);
  1676. }
  1677. /**
  1678. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1679. * @private
  1680. */
  1681. onError_(contentType) {
  1682. const operation = this.queues_.get(contentType)[0];
  1683. goog.asserts.assert(operation, 'Spurious error event!');
  1684. goog.asserts.assert(!this.sourceBuffers_.get(contentType).updating,
  1685. 'SourceBuffer should not be updating on error!');
  1686. const code = this.video_.error ? this.video_.error.code : 0;
  1687. operation.p.reject(new shaka.util.Error(
  1688. shaka.util.Error.Severity.CRITICAL,
  1689. shaka.util.Error.Category.MEDIA,
  1690. shaka.util.Error.Code.MEDIA_SOURCE_OPERATION_FAILED,
  1691. code, operation.uri));
  1692. // Do not pop from queue. An 'updateend' event will fire next, and to
  1693. // avoid synchronizing these two event handlers, we will allow that one to
  1694. // pop from the queue as normal. Note that because the operation has
  1695. // already been rejected, the call to resolve() in the 'updateend' handler
  1696. // will have no effect.
  1697. }
  1698. /**
  1699. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1700. * @private
  1701. */
  1702. onUpdateEnd_(contentType) {
  1703. // If we're reloading or have been destroyed, clear the queue for this
  1704. // content type.
  1705. if (this.reloadingMediaSource_ || this.destroyer_.destroyed()) {
  1706. // Resolve any pending operations in this content type's queue
  1707. const queue = this.queues_.get(contentType);
  1708. if (queue && queue.length) {
  1709. // Resolve the first operation that triggered this updateEnd
  1710. const firstOperation = queue[0];
  1711. if (firstOperation && firstOperation.p) {
  1712. firstOperation.p.resolve();
  1713. }
  1714. // Clear the rest of the queue
  1715. this.queues_.set(contentType, []);
  1716. }
  1717. return;
  1718. }
  1719. const operation = this.queues_.get(contentType)[0];
  1720. goog.asserts.assert(operation, 'Spurious updateend event!');
  1721. if (!operation) {
  1722. return;
  1723. }
  1724. goog.asserts.assert(!this.sourceBuffers_.get(contentType).updating,
  1725. 'SourceBuffer should not be updating on updateend!');
  1726. operation.p.resolve();
  1727. this.popFromQueue_(contentType);
  1728. }
  1729. /**
  1730. * Enqueue an operation and start it if appropriate.
  1731. *
  1732. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1733. * @param {function()} start
  1734. * @param {?string} uri
  1735. * @return {!Promise}
  1736. * @private
  1737. */
  1738. enqueueOperation_(contentType, start, uri) {
  1739. this.destroyer_.ensureNotDestroyed();
  1740. const operation = {
  1741. start: start,
  1742. p: new shaka.util.PublicPromise(),
  1743. uri,
  1744. };
  1745. this.queues_.get(contentType).push(operation);
  1746. if (this.queues_.get(contentType).length == 1) {
  1747. this.startOperation_(contentType);
  1748. }
  1749. return operation.p;
  1750. }
  1751. /**
  1752. * Enqueue an operation which must block all other operations on all
  1753. * SourceBuffers.
  1754. *
  1755. * @param {function():(Promise|undefined)} run
  1756. * @return {!Promise}
  1757. * @private
  1758. */
  1759. async enqueueBlockingOperation_(run) {
  1760. this.destroyer_.ensureNotDestroyed();
  1761. /** @type {!Array<!shaka.util.PublicPromise>} */
  1762. const allWaiters = [];
  1763. /** @type {!Array<!shaka.util.ManifestParserUtils.ContentType>} */
  1764. const contentTypes = Array.from(this.sourceBuffers_.keys());
  1765. // Enqueue a 'wait' operation onto each queue.
  1766. // This operation signals its readiness when it starts.
  1767. // When all wait operations are ready, the real operation takes place.
  1768. for (const contentType of contentTypes) {
  1769. const ready = new shaka.util.PublicPromise();
  1770. const operation = {
  1771. start: () => ready.resolve(),
  1772. p: ready,
  1773. uri: null,
  1774. };
  1775. const queue = this.queues_.get(contentType);
  1776. queue.push(operation);
  1777. allWaiters.push(ready);
  1778. if (queue.length == 1) {
  1779. operation.start();
  1780. }
  1781. }
  1782. // Return a Promise to the real operation, which waits to begin until
  1783. // there are no other in-progress operations on any SourceBuffers.
  1784. try {
  1785. await Promise.all(allWaiters);
  1786. } catch (error) {
  1787. // One of the waiters failed, which means we've been destroyed.
  1788. goog.asserts.assert(
  1789. this.destroyer_.destroyed(), 'Should be destroyed by now');
  1790. // We haven't popped from the queue. Canceled waiters have been removed
  1791. // by destroy. What's left now should just be resolved waiters. In
  1792. // uncompiled mode, we will maintain good hygiene and make sure the
  1793. // assert at the end of destroy passes. In compiled mode, the queues
  1794. // are wiped in destroy.
  1795. if (goog.DEBUG) {
  1796. for (const contentType of contentTypes) {
  1797. const queue = this.queues_.get(contentType);
  1798. if (queue.length) {
  1799. goog.asserts.assert(queue.length == 1,
  1800. 'Should be at most one item in queue!');
  1801. goog.asserts.assert(allWaiters.includes(queue[0].p),
  1802. 'The item in queue should be one of our waiters!');
  1803. queue.shift();
  1804. }
  1805. }
  1806. }
  1807. throw error;
  1808. }
  1809. if (goog.DEBUG) {
  1810. // If we did it correctly, nothing is updating.
  1811. for (const contentType of contentTypes) {
  1812. goog.asserts.assert(
  1813. this.sourceBuffers_.get(contentType).updating == false,
  1814. 'SourceBuffers should not be updating after a blocking op!');
  1815. }
  1816. }
  1817. // Run the real operation, which can be asynchronous.
  1818. try {
  1819. await run();
  1820. } catch (exception) {
  1821. throw new shaka.util.Error(
  1822. shaka.util.Error.Severity.CRITICAL,
  1823. shaka.util.Error.Category.MEDIA,
  1824. shaka.util.Error.Code.MEDIA_SOURCE_OPERATION_THREW,
  1825. exception,
  1826. this.video_.error || 'No error in the media element',
  1827. null);
  1828. } finally {
  1829. // Unblock the queues.
  1830. for (const contentType of contentTypes) {
  1831. this.popFromQueue_(contentType);
  1832. }
  1833. }
  1834. }
  1835. /**
  1836. * Pop from the front of the queue and start a new operation.
  1837. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1838. * @private
  1839. */
  1840. popFromQueue_(contentType) {
  1841. goog.asserts.assert(this.queues_.has(contentType), 'Queue should exist');
  1842. // Remove the in-progress operation, which is now complete.
  1843. this.queues_.get(contentType).shift();
  1844. this.startOperation_(contentType);
  1845. }
  1846. /**
  1847. * Starts the next operation in the queue.
  1848. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1849. * @private
  1850. */
  1851. startOperation_(contentType) {
  1852. // Retrieve the next operation, if any, from the queue and start it.
  1853. const next = this.queues_.get(contentType)[0];
  1854. if (next) {
  1855. try {
  1856. next.start();
  1857. } catch (exception) {
  1858. if (exception.name == 'QuotaExceededError') {
  1859. next.p.reject(new shaka.util.Error(
  1860. shaka.util.Error.Severity.CRITICAL,
  1861. shaka.util.Error.Category.MEDIA,
  1862. shaka.util.Error.Code.QUOTA_EXCEEDED_ERROR,
  1863. contentType));
  1864. } else if (!this.isStreamingAllowed()) {
  1865. next.p.reject(new shaka.util.Error(
  1866. shaka.util.Error.Severity.CRITICAL,
  1867. shaka.util.Error.Category.MEDIA,
  1868. shaka.util.Error.Code.STREAMING_NOT_ALLOWED,
  1869. contentType));
  1870. } else {
  1871. next.p.reject(new shaka.util.Error(
  1872. shaka.util.Error.Severity.CRITICAL,
  1873. shaka.util.Error.Category.MEDIA,
  1874. shaka.util.Error.Code.MEDIA_SOURCE_OPERATION_THREW,
  1875. exception,
  1876. this.video_.error || 'No error in the media element',
  1877. next.uri));
  1878. }
  1879. this.popFromQueue_(contentType);
  1880. }
  1881. }
  1882. }
  1883. /**
  1884. * @return {!shaka.extern.TextDisplayer}
  1885. */
  1886. getTextDisplayer() {
  1887. goog.asserts.assert(
  1888. this.textDisplayer_,
  1889. 'TextDisplayer should only be null when this is destroyed');
  1890. return this.textDisplayer_;
  1891. }
  1892. /**
  1893. * @param {!shaka.extern.TextDisplayer} textDisplayer
  1894. */
  1895. setTextDisplayer(textDisplayer) {
  1896. this.textDisplayer_ = textDisplayer;
  1897. if (this.textEngine_) {
  1898. this.textEngine_.setDisplayer(textDisplayer);
  1899. }
  1900. }
  1901. /**
  1902. * @param {boolean} segmentRelativeVttTiming
  1903. */
  1904. setSegmentRelativeVttTiming(segmentRelativeVttTiming) {
  1905. this.segmentRelativeVttTiming_ = segmentRelativeVttTiming;
  1906. }
  1907. /**
  1908. * Apply platform-specific transformations to this segment to work around
  1909. * issues in the platform.
  1910. *
  1911. * @param {shaka.extern.Stream} stream
  1912. * @param {!BufferSource} segment
  1913. * @param {?shaka.media.SegmentReference} reference
  1914. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1915. * @return {!BufferSource}
  1916. * @private
  1917. */
  1918. workAroundBrokenPlatforms_(stream, segment, reference, contentType) {
  1919. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1920. const isMp4 = shaka.util.MimeUtils.getContainerType(
  1921. this.sourceBufferTypes_.get(contentType)) == 'mp4';
  1922. if (!isMp4) {
  1923. return segment;
  1924. }
  1925. const isInitSegment = reference === null;
  1926. const encryptionExpected = this.expectedEncryption_.get(contentType);
  1927. const keySystem = this.playerInterface_.getKeySystem();
  1928. let isEncrypted = false;
  1929. if (reference && reference.initSegmentReference) {
  1930. isEncrypted = reference.initSegmentReference.encrypted;
  1931. }
  1932. const uri = reference ? reference.getUris()[0] : null;
  1933. const device = shaka.device.DeviceFactory.getDevice();
  1934. if (this.config_.correctEc3Enca &&
  1935. isInitSegment &&
  1936. contentType === ContentType.AUDIO) {
  1937. segment = shaka.media.ContentWorkarounds.correctEnca(segment);
  1938. }
  1939. // If:
  1940. // 1. the configuration tells to insert fake encryption,
  1941. // 2. and this is an init segment or media segment,
  1942. // 3. and encryption is expected,
  1943. // 4. and the platform requires encryption in all init or media segments
  1944. // of current content type,
  1945. // then insert fake encryption metadata for init segments that lack it.
  1946. // The MP4 requirement is because we can currently only do this
  1947. // transformation on MP4 containers.
  1948. // See: https://github.com/shaka-project/shaka-player/issues/2759
  1949. if (this.config_.insertFakeEncryptionInInit && encryptionExpected &&
  1950. device.requiresEncryptionInfoInAllInitSegments(keySystem,
  1951. contentType)) {
  1952. if (isInitSegment) {
  1953. shaka.log.debug('Forcing fake encryption information in init segment.');
  1954. segment =
  1955. shaka.media.ContentWorkarounds.fakeEncryption(stream, segment, uri);
  1956. } else if (!isEncrypted && device.requiresTfhdFix(contentType)) {
  1957. shaka.log.debug(
  1958. 'Forcing fake encryption information in media segment.');
  1959. segment = shaka.media.ContentWorkarounds.fakeMediaEncryption(segment);
  1960. }
  1961. }
  1962. if (isInitSegment && device.requiresEC3InitSegments()) {
  1963. shaka.log.debug('Forcing fake EC-3 information in init segment.');
  1964. segment = shaka.media.ContentWorkarounds.fakeEC3(segment);
  1965. }
  1966. return segment;
  1967. }
  1968. /**
  1969. * Prepare the SourceBuffer to parse a potentially new type or codec.
  1970. *
  1971. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1972. * @param {string} mimeType
  1973. * @param {?shaka.extern.Transmuxer} transmuxer
  1974. * @private
  1975. */
  1976. change_(contentType, mimeType, transmuxer) {
  1977. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1978. if (contentType === ContentType.TEXT) {
  1979. shaka.log.debug(`Change not supported for ${contentType}`);
  1980. return;
  1981. }
  1982. const sourceBuffer = this.sourceBufferTypes_.get(contentType);
  1983. shaka.log.debug(
  1984. `Change Type: ${sourceBuffer} -> ${mimeType}`);
  1985. if (shaka.media.Capabilities.isChangeTypeSupported()) {
  1986. if (this.transmuxers_.has(contentType)) {
  1987. this.transmuxers_.get(contentType).destroy();
  1988. this.transmuxers_.delete(contentType);
  1989. }
  1990. if (transmuxer) {
  1991. this.transmuxers_.set(contentType, transmuxer);
  1992. }
  1993. const type = this.addExtraFeaturesToMimeType_(mimeType);
  1994. this.sourceBuffers_.get(contentType).changeType(type);
  1995. this.sourceBufferTypes_.set(contentType, mimeType);
  1996. } else {
  1997. shaka.log.debug('Change Type not supported');
  1998. }
  1999. // Fake an 'updateend' event to resolve the operation.
  2000. this.onUpdateEnd_(contentType);
  2001. }
  2002. /**
  2003. * Enqueue an operation to prepare the SourceBuffer to parse a potentially new
  2004. * type or codec.
  2005. *
  2006. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  2007. * @param {string} mimeType
  2008. * @param {?shaka.extern.Transmuxer} transmuxer
  2009. * @return {!Promise}
  2010. */
  2011. changeType(contentType, mimeType, transmuxer) {
  2012. return this.enqueueOperation_(
  2013. contentType,
  2014. () => this.change_(contentType, mimeType, transmuxer),
  2015. null);
  2016. }
  2017. /**
  2018. * Resets the MediaSource and re-adds source buffers due to codec mismatch
  2019. *
  2020. * @param {!Map<shaka.util.ManifestParserUtils.ContentType,
  2021. * shaka.extern.Stream>} streamsByType
  2022. * @private
  2023. */
  2024. async reset_(streamsByType) {
  2025. if (this.reloadingMediaSource_ || this.usingRemotePlayback_) {
  2026. return;
  2027. }
  2028. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  2029. this.reloadingMediaSource_ = true;
  2030. this.needSplitMuxedContent_ = false;
  2031. const currentTime = this.video_.currentTime;
  2032. // When codec switching if the user is currently paused we don't want
  2033. // to trigger a play when switching codec.
  2034. // Playing can also end up in a paused state after a codec switch
  2035. // so we need to remember the current states.
  2036. const previousAutoPlayState = this.video_.autoplay;
  2037. if (!this.video_.paused) {
  2038. this.playAfterReset_ = true;
  2039. }
  2040. if (this.playbackHasBegun_) {
  2041. // Only set autoplay to false if the video playback has already begun.
  2042. // When a codec switch happens before playback has begun this can cause
  2043. // autoplay not to work as expected.
  2044. this.video_.autoplay = false;
  2045. }
  2046. try {
  2047. this.eventManager_.removeAll();
  2048. for (const transmuxer of this.transmuxers_.values()) {
  2049. transmuxer.destroy();
  2050. }
  2051. for (const sourceBuffer of this.sourceBuffers_.values()) {
  2052. try {
  2053. this.mediaSource_.removeSourceBuffer(sourceBuffer);
  2054. } catch (e) {
  2055. shaka.log.debug('Exception on removeSourceBuffer', e);
  2056. }
  2057. }
  2058. this.transmuxers_.clear();
  2059. this.sourceBuffers_.clear();
  2060. const previousDuration = this.mediaSource_.duration;
  2061. this.mediaSourceOpen_ = new shaka.util.PublicPromise();
  2062. this.mediaSource_ = this.createMediaSource(this.mediaSourceOpen_);
  2063. await this.mediaSourceOpen_;
  2064. if (!isNaN(previousDuration) && previousDuration) {
  2065. this.mediaSource_.duration = previousDuration;
  2066. } else if (!isNaN(this.lastDuration_) && this.lastDuration_) {
  2067. this.mediaSource_.duration = this.lastDuration_;
  2068. }
  2069. const sourceBufferAdded = new shaka.util.PublicPromise();
  2070. const sourceBuffers =
  2071. /** @type {EventTarget} */(this.mediaSource_.sourceBuffers);
  2072. const totalOfBuffers = streamsByType.size;
  2073. let numberOfSourceBufferAdded = 0;
  2074. const onSourceBufferAdded = () => {
  2075. numberOfSourceBufferAdded++;
  2076. if (numberOfSourceBufferAdded === totalOfBuffers) {
  2077. sourceBufferAdded.resolve();
  2078. this.eventManager_.unlisten(sourceBuffers, 'addsourcebuffer',
  2079. onSourceBufferAdded);
  2080. }
  2081. };
  2082. this.eventManager_.listen(sourceBuffers, 'addsourcebuffer',
  2083. onSourceBufferAdded);
  2084. for (const contentType of streamsByType.keys()) {
  2085. const stream = streamsByType.get(contentType);
  2086. this.initSourceBuffer_(contentType, stream, stream.codecs);
  2087. }
  2088. const audio = streamsByType.get(ContentType.AUDIO);
  2089. if (audio && audio.isAudioMuxedInVideo) {
  2090. this.needSplitMuxedContent_ = true;
  2091. }
  2092. if (this.needSplitMuxedContent_ && !this.queues_.has(ContentType.AUDIO)) {
  2093. this.queues_.set(ContentType.AUDIO, []);
  2094. }
  2095. // Fake a seek to catchup the playhead.
  2096. this.video_.currentTime = currentTime;
  2097. await sourceBufferAdded;
  2098. } finally {
  2099. this.reloadingMediaSource_ = false;
  2100. this.destroyer_.ensureNotDestroyed();
  2101. this.eventManager_.listenOnce(this.video_, 'canplaythrough', () => {
  2102. // Don't use ensureNotDestroyed() from this event listener, because
  2103. // that results in an uncaught exception. Instead, just check the
  2104. // flag.
  2105. if (this.destroyer_.destroyed()) {
  2106. return;
  2107. }
  2108. this.video_.autoplay = previousAutoPlayState;
  2109. if (this.playAfterReset_) {
  2110. this.playAfterReset_ = false;
  2111. this.video_.play();
  2112. }
  2113. });
  2114. }
  2115. }
  2116. /**
  2117. * Resets the Media Source
  2118. * @param {!Map<shaka.util.ManifestParserUtils.ContentType,
  2119. * shaka.extern.Stream>} streamsByType
  2120. * @return {!Promise}
  2121. */
  2122. reset(streamsByType) {
  2123. return this.enqueueBlockingOperation_(
  2124. () => this.reset_(streamsByType));
  2125. }
  2126. /**
  2127. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  2128. * @param {string} mimeType
  2129. * @param {string} codecs
  2130. * @return {{transmuxer: ?shaka.extern.Transmuxer,
  2131. * transmuxerMuxed: boolean, basicType: string, codec: string,
  2132. * mimeType: string}}
  2133. * @private
  2134. */
  2135. getRealInfo_(contentType, mimeType, codecs) {
  2136. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  2137. const MimeUtils = shaka.util.MimeUtils;
  2138. /** @type {?shaka.extern.Transmuxer} */
  2139. let transmuxer;
  2140. let transmuxerMuxed = false;
  2141. const audioCodec = shaka.util.ManifestParserUtils.guessCodecsSafe(
  2142. ContentType.AUDIO, (codecs || '').split(','));
  2143. const videoCodec = shaka.util.ManifestParserUtils.guessCodecsSafe(
  2144. ContentType.VIDEO, (codecs || '').split(','));
  2145. let codec = videoCodec;
  2146. if (contentType == ContentType.AUDIO) {
  2147. codec = audioCodec;
  2148. }
  2149. if (!codec) {
  2150. codec = codecs;
  2151. }
  2152. let newMimeType = shaka.util.MimeUtils.getFullType(mimeType, codec);
  2153. const currentBasicType = MimeUtils.getBasicType(
  2154. this.sourceBufferTypes_.get(contentType));
  2155. let needTransmux = this.config_.forceTransmux;
  2156. if (!shaka.media.Capabilities.isTypeSupported(newMimeType) ||
  2157. (!this.sequenceMode_ &&
  2158. shaka.util.MimeUtils.RAW_FORMATS.includes(newMimeType))) {
  2159. needTransmux = true;
  2160. } else if (!needTransmux && mimeType != currentBasicType) {
  2161. const device = shaka.device.DeviceFactory.getDevice();
  2162. needTransmux = device.getBrowserEngine() ===
  2163. shaka.device.IDevice.BrowserEngine.WEBKIT &&
  2164. shaka.util.MimeUtils.RAW_FORMATS.includes(mimeType);
  2165. }
  2166. const TransmuxerEngine = shaka.transmuxer.TransmuxerEngine;
  2167. if (needTransmux) {
  2168. const newMimeTypeWithAllCodecs =
  2169. shaka.util.MimeUtils.getFullTypeWithAllCodecs(mimeType, codec);
  2170. const transmuxerPlugin =
  2171. TransmuxerEngine.findTransmuxer(newMimeTypeWithAllCodecs);
  2172. if (transmuxerPlugin) {
  2173. transmuxer = transmuxerPlugin();
  2174. if (audioCodec && videoCodec) {
  2175. transmuxerMuxed = true;
  2176. }
  2177. newMimeType =
  2178. transmuxer.convertCodecs(contentType, newMimeTypeWithAllCodecs);
  2179. }
  2180. }
  2181. const newCodec = MimeUtils.getNormalizedCodec(
  2182. MimeUtils.getCodecs(newMimeType));
  2183. const newBasicType = MimeUtils.getBasicType(newMimeType);
  2184. return {
  2185. transmuxer,
  2186. transmuxerMuxed,
  2187. basicType: newBasicType,
  2188. codec: newCodec,
  2189. mimeType: newMimeType,
  2190. };
  2191. }
  2192. /**
  2193. * Codec switch if necessary, this will not resolve until the codec
  2194. * switch is over.
  2195. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  2196. * @param {string} mimeType
  2197. * @param {string} codecs
  2198. * @param {!Map<shaka.util.ManifestParserUtils.ContentType,
  2199. * shaka.extern.Stream>} streamsByType
  2200. * @return {{type: string, newMimeType: string,
  2201. * transmuxer: ?shaka.extern.Transmuxer}}
  2202. * @private
  2203. */
  2204. getInfoAboutResetOrChangeType_(contentType, mimeType, codecs, streamsByType) {
  2205. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  2206. if (contentType == ContentType.TEXT) {
  2207. return {
  2208. type: shaka.media.MediaSourceEngine.ResetMode_.NONE,
  2209. newMimeType: mimeType,
  2210. transmuxer: null,
  2211. };
  2212. }
  2213. const MimeUtils = shaka.util.MimeUtils;
  2214. const currentCodec = MimeUtils.getNormalizedCodec(
  2215. MimeUtils.getCodecs(this.sourceBufferTypes_.get(contentType)));
  2216. const currentBasicType = MimeUtils.getBasicType(
  2217. this.sourceBufferTypes_.get(contentType));
  2218. const realInfo = this.getRealInfo_(contentType, mimeType, codecs);
  2219. const transmuxer = realInfo.transmuxer;
  2220. const transmuxerMuxed = realInfo.transmuxerMuxed;
  2221. const newBasicType = realInfo.basicType;
  2222. const newCodec = realInfo.codec;
  2223. const newMimeType = realInfo.mimeType;
  2224. let muxedContentCheck = true;
  2225. if (transmuxerMuxed &&
  2226. this.sourceBufferTypes_.has(ContentType.AUDIO)) {
  2227. const muxedRealInfo =
  2228. this.getRealInfo_(ContentType.AUDIO, mimeType, codecs);
  2229. const muxedCurrentCodec = MimeUtils.getNormalizedCodec(
  2230. MimeUtils.getCodecs(this.sourceBufferTypes_.get(ContentType.AUDIO)));
  2231. const muxedCurrentBasicType = MimeUtils.getBasicType(
  2232. this.sourceBufferTypes_.get(ContentType.AUDIO));
  2233. muxedContentCheck = muxedCurrentCodec == muxedRealInfo.codec &&
  2234. muxedCurrentBasicType == muxedRealInfo.basicType;
  2235. if (muxedRealInfo.transmuxer) {
  2236. muxedRealInfo.transmuxer.destroy();
  2237. }
  2238. }
  2239. // Current/new codecs base and basic type match then no need to switch
  2240. if (currentCodec === newCodec && currentBasicType === newBasicType &&
  2241. muxedContentCheck) {
  2242. return {
  2243. type: shaka.media.MediaSourceEngine.ResetMode_.NONE,
  2244. newMimeType,
  2245. transmuxer,
  2246. };
  2247. }
  2248. let allowChangeType = true;
  2249. if ((this.needSplitMuxedContent_ &&
  2250. !streamsByType.has(ContentType.AUDIO)) || (transmuxerMuxed &&
  2251. transmuxer && !this.transmuxers_.has(contentType))) {
  2252. allowChangeType = false;
  2253. }
  2254. if (allowChangeType && this.config_.codecSwitchingStrategy ===
  2255. shaka.config.CodecSwitchingStrategy.SMOOTH &&
  2256. shaka.media.Capabilities.isChangeTypeSupported()) {
  2257. return {
  2258. type: shaka.media.MediaSourceEngine.ResetMode_.CHANGE_TYPE,
  2259. newMimeType,
  2260. transmuxer,
  2261. };
  2262. } else {
  2263. if (transmuxer) {
  2264. transmuxer.destroy();
  2265. }
  2266. return {
  2267. type: shaka.media.MediaSourceEngine.ResetMode_.RESET,
  2268. newMimeType,
  2269. transmuxer: null,
  2270. };
  2271. }
  2272. }
  2273. /**
  2274. * Codec switch if necessary, this will not resolve until the codec
  2275. * switch is over.
  2276. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  2277. * @param {string} mimeType
  2278. * @param {string} codecs
  2279. * @param {!Map<shaka.util.ManifestParserUtils.ContentType,
  2280. * shaka.extern.Stream>} streamsByType
  2281. * @return {!Promise<boolean>} true if there was a codec switch,
  2282. * false otherwise.
  2283. * @private
  2284. */
  2285. async codecSwitchIfNecessary_(contentType, mimeType, codecs, streamsByType) {
  2286. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  2287. const {type, transmuxer, newMimeType} = this.getInfoAboutResetOrChangeType_(
  2288. contentType, mimeType, codecs, streamsByType);
  2289. const newAudioStream = streamsByType.get(ContentType.AUDIO);
  2290. if (newAudioStream) {
  2291. this.needSplitMuxedContent_ = newAudioStream.isAudioMuxedInVideo;
  2292. }
  2293. if (type == shaka.media.MediaSourceEngine.ResetMode_.NONE) {
  2294. if (this.transmuxers_.has(contentType) && !transmuxer) {
  2295. this.transmuxers_.get(contentType).destroy();
  2296. this.transmuxers_.delete(contentType);
  2297. } else if (!this.transmuxers_.has(contentType) && transmuxer) {
  2298. this.transmuxers_.set(contentType, transmuxer);
  2299. } else if (transmuxer) {
  2300. // Compare if the transmuxer is different
  2301. if (this.transmuxers_.has(contentType) &&
  2302. this.transmuxers_.get(contentType).transmux !==
  2303. transmuxer.transmux) {
  2304. this.transmuxers_.get(contentType).destroy();
  2305. this.transmuxers_.set(contentType, transmuxer);
  2306. } else {
  2307. transmuxer.destroy();
  2308. }
  2309. }
  2310. return false;
  2311. }
  2312. if (type == shaka.media.MediaSourceEngine.ResetMode_.CHANGE_TYPE) {
  2313. await this.changeType(contentType, newMimeType, transmuxer);
  2314. } else if (type == shaka.media.MediaSourceEngine.ResetMode_.RESET) {
  2315. if (transmuxer) {
  2316. transmuxer.destroy();
  2317. }
  2318. await this.reset(streamsByType);
  2319. }
  2320. return true;
  2321. }
  2322. /**
  2323. * Returns true if it's necessary reset the media source to load the
  2324. * new stream.
  2325. *
  2326. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  2327. * @param {string} mimeType
  2328. * @param {string} codecs
  2329. * @return {boolean}
  2330. */
  2331. isResetMediaSourceNecessary(contentType, mimeType, codecs, streamsByType) {
  2332. const info = this.getInfoAboutResetOrChangeType_(
  2333. contentType, mimeType, codecs, streamsByType);
  2334. if (info.transmuxer) {
  2335. info.transmuxer.destroy();
  2336. }
  2337. return info.type == shaka.media.MediaSourceEngine.ResetMode_.RESET;
  2338. }
  2339. /**
  2340. * Update LCEVC Decoder object when ready for LCEVC Decode.
  2341. * @param {?shaka.lcevc.Dec} lcevcDec
  2342. */
  2343. updateLcevcDec(lcevcDec) {
  2344. this.lcevcDec_ = lcevcDec;
  2345. }
  2346. /**
  2347. * @param {string} mimeType
  2348. * @return {string}
  2349. * @private
  2350. */
  2351. addExtraFeaturesToMimeType_(mimeType) {
  2352. const extraFeatures = this.config_.addExtraFeaturesToSourceBuffer(mimeType);
  2353. const extendedType = mimeType + extraFeatures;
  2354. shaka.log.debug('Using full mime type', extendedType);
  2355. return extendedType;
  2356. }
  2357. };
  2358. /**
  2359. * Internal reference to window.URL.createObjectURL function to avoid
  2360. * compatibility issues with other libraries and frameworks such as React
  2361. * Native. For use in unit tests only, not meant for external use.
  2362. *
  2363. * @type {function(?):string}
  2364. */
  2365. shaka.media.MediaSourceEngine.createObjectURL = window.URL.createObjectURL;
  2366. /**
  2367. * @typedef {{
  2368. * start: function(),
  2369. * p: !shaka.util.PublicPromise,
  2370. * uri: ?string
  2371. * }}
  2372. *
  2373. * @summary An operation in queue.
  2374. * @property {function()} start
  2375. * The function which starts the operation.
  2376. * @property {!shaka.util.PublicPromise} p
  2377. * The PublicPromise which is associated with this operation.
  2378. * @property {?string} uri
  2379. * A segment URI (if any) associated with this operation.
  2380. */
  2381. shaka.media.MediaSourceEngine.Operation;
  2382. /**
  2383. * @enum {string}
  2384. * @private
  2385. */
  2386. shaka.media.MediaSourceEngine.SourceBufferMode_ = {
  2387. SEQUENCE: 'sequence',
  2388. SEGMENTS: 'segments',
  2389. };
  2390. /**
  2391. * @enum {string}
  2392. * @private
  2393. */
  2394. shaka.media.MediaSourceEngine.ResetMode_ = {
  2395. NONE: 'none',
  2396. RESET: 'reset',
  2397. CHANGE_TYPE: 'changeType',
  2398. };
  2399. /**
  2400. * @typedef {{
  2401. * getKeySystem: function():?string,
  2402. * onMetadata: function(!Array<shaka.extern.ID3Metadata>, number, ?number),
  2403. * onEmsg: function(!shaka.extern.EmsgInfo),
  2404. * onEvent: function(!Event),
  2405. * onManifestUpdate: function()
  2406. * }}
  2407. *
  2408. * @summary Player interface
  2409. * @property {function():?string} getKeySystem
  2410. * Gets currently used key system or null if not used.
  2411. * @property {function(
  2412. * !Array<shaka.extern.ID3Metadata>, number, ?number)} onMetadata
  2413. * Callback to use when metadata arrives.
  2414. * @property {function(!shaka.extern.EmsgInfo)} onEmsg
  2415. * Callback to use when EMSG arrives.
  2416. * @property {function(!Event)} onEvent
  2417. * Called when an event occurs that should be sent to the app.
  2418. * @property {function()} onManifestUpdate
  2419. * Called when an embedded 'emsg' box should trigger a manifest update.
  2420. */
  2421. shaka.media.MediaSourceEngine.PlayerInterface;