Source: lib/media/media_source_engine.js

  1. /*! @license
  2. * Shaka Player
  3. * Copyright 2016 Google LLC
  4. * SPDX-License-Identifier: Apache-2.0
  5. */
  6. goog.provide('shaka.media.MediaSourceEngine');
  7. goog.require('goog.asserts');
  8. goog.require('shaka.log');
  9. goog.require('shaka.config.CodecSwitchingStrategy');
  10. goog.require('shaka.device.DeviceFactory');
  11. goog.require('shaka.media.Capabilities');
  12. goog.require('shaka.media.ContentWorkarounds');
  13. goog.require('shaka.media.ClosedCaptionParser');
  14. goog.require('shaka.media.IClosedCaptionParser');
  15. goog.require('shaka.media.ManifestParser');
  16. goog.require('shaka.media.SegmentReference');
  17. goog.require('shaka.media.TimeRangesUtils');
  18. goog.require('shaka.text.TextEngine');
  19. goog.require('shaka.transmuxer.TransmuxerEngine');
  20. goog.require('shaka.util.BufferUtils');
  21. goog.require('shaka.util.Destroyer');
  22. goog.require('shaka.util.Dom');
  23. goog.require('shaka.util.Error');
  24. goog.require('shaka.util.EventManager');
  25. goog.require('shaka.util.FakeEvent');
  26. goog.require('shaka.util.Functional');
  27. goog.require('shaka.util.IDestroyable');
  28. goog.require('shaka.util.Id3Utils');
  29. goog.require('shaka.util.ManifestParserUtils');
  30. goog.require('shaka.util.MimeUtils');
  31. goog.require('shaka.util.Mp4BoxParsers');
  32. goog.require('shaka.util.Mp4Parser');
  33. goog.require('shaka.util.PublicPromise');
  34. goog.require('shaka.util.StreamUtils');
  35. goog.require('shaka.util.TimeUtils');
  36. goog.require('shaka.util.TsParser');
  37. goog.require('shaka.lcevc.Dec');
  38. /**
  39. * @summary
  40. * MediaSourceEngine wraps all operations on MediaSource and SourceBuffers.
  41. * All asynchronous operations return a Promise, and all operations are
  42. * internally synchronized and serialized as needed. Operations that can
  43. * be done in parallel will be done in parallel.
  44. *
  45. * @implements {shaka.util.IDestroyable}
  46. */
  47. shaka.media.MediaSourceEngine = class {
  48. /**
  49. * @param {HTMLMediaElement} video The video element, whose source is tied to
  50. * MediaSource during the lifetime of the MediaSourceEngine.
  51. * @param {!shaka.extern.TextDisplayer} textDisplayer
  52. * The text displayer that will be used with the text engine.
  53. * MediaSourceEngine takes ownership of the displayer. When
  54. * MediaSourceEngine is destroyed, it will destroy the displayer.
  55. * @param {!shaka.media.MediaSourceEngine.PlayerInterface} playerInterface
  56. * Interface for common player methods.
  57. * @param {shaka.extern.MediaSourceConfiguration} config
  58. * @param {?shaka.lcevc.Dec} [lcevcDec] Optional - LCEVC Decoder Object
  59. */
  60. constructor(video, textDisplayer, playerInterface, config, lcevcDec) {
  61. /** @private {HTMLMediaElement} */
  62. this.video_ = video;
  63. /** @private {?shaka.media.MediaSourceEngine.PlayerInterface} */
  64. this.playerInterface_ = playerInterface;
  65. /** @private {?shaka.extern.MediaSourceConfiguration} */
  66. this.config_ = config;
  67. /** @private {shaka.extern.TextDisplayer} */
  68. this.textDisplayer_ = textDisplayer;
  69. /**
  70. * @private {!Map<shaka.util.ManifestParserUtils.ContentType, SourceBuffer>}
  71. */
  72. this.sourceBuffers_ = new Map();
  73. /**
  74. * @private {!Map<shaka.util.ManifestParserUtils.ContentType, string>}
  75. */
  76. this.sourceBufferTypes_ = new Map();
  77. /**
  78. * @private {!Map<shaka.util.ManifestParserUtils.ContentType,
  79. * boolean>}
  80. */
  81. this.expectedEncryption_ = new Map();
  82. /** @private {shaka.text.TextEngine} */
  83. this.textEngine_ = null;
  84. /** @private {boolean} */
  85. this.segmentRelativeVttTiming_ = false;
  86. /** @private {?shaka.lcevc.Dec} */
  87. this.lcevcDec_ = lcevcDec || null;
  88. /**
  89. * @private {!Map<string, !Array<shaka.media.MediaSourceEngine.Operation>>}
  90. */
  91. this.queues_ = new Map();
  92. /** @private {shaka.util.EventManager} */
  93. this.eventManager_ = new shaka.util.EventManager();
  94. /**
  95. * @private {!Map<shaka.util.ManifestParserUtils.ContentType,
  96. !shaka.extern.Transmuxer>} */
  97. this.transmuxers_ = new Map();
  98. /** @private {?shaka.media.IClosedCaptionParser} */
  99. this.captionParser_ = null;
  100. /** @private {!shaka.util.PublicPromise} */
  101. this.mediaSourceOpen_ = new shaka.util.PublicPromise();
  102. /** @private {string} */
  103. this.url_ = '';
  104. /** @private {boolean} */
  105. this.playbackHasBegun_ = false;
  106. /** @private {boolean} */
  107. this.streamingAllowed_ = true;
  108. /** @private {boolean} */
  109. this.usingRemotePlayback_ = false;
  110. /** @private {HTMLSourceElement} */
  111. this.source_ = null;
  112. /**
  113. * Fallback source element with direct media URI, used for casting
  114. * purposes.
  115. * @private {HTMLSourceElement}
  116. */
  117. this.secondarySource_ = null;
  118. /** @private {MediaSource} */
  119. this.mediaSource_ = this.createMediaSource(this.mediaSourceOpen_);
  120. /** @private {boolean} */
  121. this.reloadingMediaSource_ = false;
  122. /** @private {boolean} */
  123. this.playAfterReset_ = false;
  124. /** @type {!shaka.util.Destroyer} */
  125. this.destroyer_ = new shaka.util.Destroyer(() => this.doDestroy_());
  126. /** @private {boolean} */
  127. this.sequenceMode_ = false;
  128. /** @private {string} */
  129. this.manifestType_ = shaka.media.ManifestParser.UNKNOWN;
  130. /** @private {boolean} */
  131. this.ignoreManifestTimestampsInSegmentsMode_ = false;
  132. /** @private {boolean} */
  133. this.attemptTimestampOffsetCalculation_ = false;
  134. /** @private {!shaka.util.PublicPromise<number>} */
  135. this.textSequenceModeOffset_ = new shaka.util.PublicPromise();
  136. /** @private {boolean} */
  137. this.needSplitMuxedContent_ = false;
  138. /** @private {?number} */
  139. this.lastDuration_ = null;
  140. /**
  141. * @private {!Map<shaka.util.ManifestParserUtils.ContentType,
  142. * !shaka.util.TsParser>}
  143. */
  144. this.tsParsers_ = new Map();
  145. /** @private {?number} */
  146. this.firstVideoTimestamp_ = null;
  147. /** @private {?number} */
  148. this.firstVideoReferenceStartTime_ = null;
  149. /** @private {?number} */
  150. this.firstAudioTimestamp_ = null;
  151. /** @private {?number} */
  152. this.firstAudioReferenceStartTime_ = null;
  153. /** @private {!shaka.util.PublicPromise<number>} */
  154. this.audioCompensation_ = new shaka.util.PublicPromise();
  155. if (this.video_.remote) {
  156. this.usingRemotePlayback_ = this.video_.remote.state != 'disconnected';
  157. this.eventManager_.listen(this.video_.remote, 'connect', () => {
  158. this.usingRemotePlayback_ = this.video_.remote.state != 'disconnected';
  159. });
  160. this.eventManager_.listen(this.video_.remote, 'connecting', () => {
  161. this.usingRemotePlayback_ = this.video_.remote.state != 'disconnected';
  162. });
  163. this.eventManager_.listen(this.video_.remote, 'disconnect', () => {
  164. this.usingRemotePlayback_ = this.video_.remote.state != 'disconnected';
  165. });
  166. }
  167. }
  168. /**
  169. * Create a MediaSource object, attach it to the video element, and return it.
  170. * Resolves the given promise when the MediaSource is ready.
  171. *
  172. * Replaced by unit tests.
  173. *
  174. * @param {!shaka.util.PublicPromise} p
  175. * @return {!MediaSource}
  176. */
  177. createMediaSource(p) {
  178. this.streamingAllowed_ = true;
  179. /** @type {!MediaSource} */
  180. let mediaSource;
  181. if (window.ManagedMediaSource) {
  182. if (!this.secondarySource_) {
  183. this.video_.disableRemotePlayback = true;
  184. }
  185. mediaSource = new ManagedMediaSource();
  186. this.eventManager_.listen(
  187. mediaSource, 'startstreaming', () => {
  188. shaka.log.info('MMS startstreaming');
  189. this.streamingAllowed_ = true;
  190. });
  191. this.eventManager_.listen(
  192. mediaSource, 'endstreaming', () => {
  193. shaka.log.info('MMS endstreaming');
  194. this.streamingAllowed_ = false;
  195. });
  196. } else {
  197. mediaSource = new MediaSource();
  198. }
  199. // Set up MediaSource on the video element.
  200. this.eventManager_.listenOnce(
  201. mediaSource, 'sourceopen', () => this.onSourceOpen_(p));
  202. // Correctly set when playback has begun.
  203. this.eventManager_.listenOnce(this.video_, 'playing', () => {
  204. this.playbackHasBegun_ = true;
  205. });
  206. // Store the object URL for releasing it later.
  207. this.url_ = shaka.media.MediaSourceEngine.createObjectURL(mediaSource);
  208. if (this.config_.useSourceElements) {
  209. this.video_.removeAttribute('src');
  210. if (this.source_) {
  211. this.video_.removeChild(this.source_);
  212. }
  213. if (this.secondarySource_) {
  214. this.video_.removeChild(this.secondarySource_);
  215. }
  216. this.source_ = shaka.util.Dom.createSourceElement(this.url_);
  217. this.video_.appendChild(this.source_);
  218. if (this.secondarySource_) {
  219. this.video_.appendChild(this.secondarySource_);
  220. }
  221. this.video_.load();
  222. } else {
  223. this.video_.src = this.url_;
  224. }
  225. return mediaSource;
  226. }
  227. /**
  228. * @param {string} uri
  229. * @param {string} mimeType
  230. */
  231. addSecondarySource(uri, mimeType) {
  232. if (!this.video_ || !window.ManagedMediaSource || !this.mediaSource_) {
  233. shaka.log.warning(
  234. 'Secondary source is used only with ManagedMediaSource');
  235. return;
  236. }
  237. if (!this.config_.useSourceElements) {
  238. return;
  239. }
  240. if (this.secondarySource_) {
  241. this.video_.removeChild(this.secondarySource_);
  242. }
  243. this.secondarySource_ = shaka.util.Dom.createSourceElement(uri, mimeType);
  244. this.video_.appendChild(this.secondarySource_);
  245. this.video_.disableRemotePlayback = false;
  246. }
  247. /**
  248. * @param {shaka.util.PublicPromise} p
  249. * @private
  250. */
  251. onSourceOpen_(p) {
  252. goog.asserts.assert(this.url_, 'Must have object URL');
  253. // Release the object URL that was previously created, to prevent memory
  254. // leak.
  255. // createObjectURL creates a strong reference to the MediaSource object
  256. // inside the browser. Setting the src of the video then creates another
  257. // reference within the video element. revokeObjectURL will remove the
  258. // strong reference to the MediaSource object, and allow it to be
  259. // garbage-collected later.
  260. URL.revokeObjectURL(this.url_);
  261. p.resolve();
  262. }
  263. /**
  264. * Returns a map of MediaSource support for well-known types.
  265. *
  266. * @return {!Object<string, boolean>}
  267. */
  268. static probeSupport() {
  269. const testMimeTypes = [
  270. // MP4 types
  271. 'video/mp4; codecs="avc1.42E01E"',
  272. 'video/mp4; codecs="avc3.42E01E"',
  273. 'video/mp4; codecs="hev1.1.6.L93.90"',
  274. 'video/mp4; codecs="hvc1.1.6.L93.90"',
  275. 'video/mp4; codecs="hev1.2.4.L153.B0"; eotf="smpte2084"', // HDR HEVC
  276. 'video/mp4; codecs="hvc1.2.4.L153.B0"; eotf="smpte2084"', // HDR HEVC
  277. 'video/mp4; codecs="vp9"',
  278. 'video/mp4; codecs="vp09.00.10.08"',
  279. 'video/mp4; codecs="av01.0.01M.08"',
  280. 'video/mp4; codecs="dvh1.05.01"',
  281. 'video/mp4; codecs="dvh1.20.01"',
  282. 'audio/mp4; codecs="mp4a.40.2"',
  283. 'audio/mp4; codecs="ac-3"',
  284. 'audio/mp4; codecs="ec-3"',
  285. 'audio/mp4; codecs="ac-4.02.01.01"',
  286. 'audio/mp4; codecs="opus"',
  287. 'audio/mp4; codecs="flac"',
  288. 'audio/mp4; codecs="dtsc"', // DTS Digital Surround
  289. 'audio/mp4; codecs="dtse"', // DTS Express
  290. 'audio/mp4; codecs="dtsx"', // DTS:X
  291. // WebM types
  292. 'video/webm; codecs="vp8"',
  293. 'video/webm; codecs="vp9"',
  294. 'video/webm; codecs="vp09.00.10.08"',
  295. 'audio/webm; codecs="vorbis"',
  296. 'audio/webm; codecs="opus"',
  297. // MPEG2 TS types (video/ is also used for audio: https://bit.ly/TsMse)
  298. 'video/mp2t; codecs="avc1.42E01E"',
  299. 'video/mp2t; codecs="avc3.42E01E"',
  300. 'video/mp2t; codecs="hvc1.1.6.L93.90"',
  301. 'video/mp2t; codecs="mp4a.40.2"',
  302. 'video/mp2t; codecs="ac-3"',
  303. 'video/mp2t; codecs="ec-3"',
  304. // WebVTT types
  305. 'text/vtt',
  306. 'application/mp4; codecs="wvtt"',
  307. // TTML types
  308. 'application/ttml+xml',
  309. 'application/mp4; codecs="stpp"',
  310. // Containerless types
  311. ...shaka.util.MimeUtils.RAW_FORMATS,
  312. ];
  313. const support = {};
  314. const device = shaka.device.DeviceFactory.getDevice();
  315. for (const type of testMimeTypes) {
  316. if (shaka.text.TextEngine.isTypeSupported(type)) {
  317. support[type] = true;
  318. } else if (device.supportsMediaSource()) {
  319. support[type] = shaka.media.Capabilities.isTypeSupported(type) ||
  320. shaka.transmuxer.TransmuxerEngine.isSupported(type);
  321. } else {
  322. support[type] = device.supportsMediaType(type);
  323. }
  324. const basicType = type.split(';')[0];
  325. support[basicType] = support[basicType] || support[type];
  326. }
  327. return support;
  328. }
  329. /** @override */
  330. destroy() {
  331. return this.destroyer_.destroy();
  332. }
  333. /** @private */
  334. async doDestroy_() {
  335. const Functional = shaka.util.Functional;
  336. const cleanup = [];
  337. for (const [key, q] of this.queues_) {
  338. // Make a local copy of the queue and the first item.
  339. const inProgress = q[0];
  340. const contentType = /** @type {string} */(key);
  341. // Drop everything else out of the original queue.
  342. this.queues_.set(contentType, q.slice(0, 1));
  343. // We will wait for this item to complete/fail.
  344. if (inProgress) {
  345. cleanup.push(inProgress.p.catch(Functional.noop));
  346. }
  347. // The rest will be rejected silently if possible.
  348. for (const item of q.slice(1)) {
  349. item.p.reject(shaka.util.Destroyer.destroyedError());
  350. }
  351. }
  352. if (this.textEngine_) {
  353. cleanup.push(this.textEngine_.destroy());
  354. }
  355. await Promise.all(cleanup);
  356. for (const transmuxer of this.transmuxers_.values()) {
  357. transmuxer.destroy();
  358. }
  359. if (this.eventManager_) {
  360. this.eventManager_.release();
  361. this.eventManager_ = null;
  362. }
  363. if (this.video_ && this.secondarySource_) {
  364. this.video_.removeChild(this.secondarySource_);
  365. }
  366. if (this.video_ && this.source_) {
  367. // "unload" the video element.
  368. this.video_.removeChild(this.source_);
  369. this.video_.load();
  370. this.video_.disableRemotePlayback = false;
  371. }
  372. this.video_ = null;
  373. this.source_ = null;
  374. this.secondarySource_ = null;
  375. this.config_ = null;
  376. this.mediaSource_ = null;
  377. this.textEngine_ = null;
  378. this.textDisplayer_ = null;
  379. this.sourceBuffers_.clear();
  380. this.expectedEncryption_.clear();
  381. this.transmuxers_.clear();
  382. this.captionParser_ = null;
  383. if (goog.DEBUG) {
  384. for (const [contentType, q] of this.queues_) {
  385. goog.asserts.assert(
  386. q.length == 0,
  387. contentType + ' queue should be empty after destroy!');
  388. }
  389. }
  390. this.queues_.clear();
  391. // This object is owned by Player
  392. this.lcevcDec_ = null;
  393. this.tsParsers_.clear();
  394. this.playerInterface_ = null;
  395. }
  396. /**
  397. * @return {!Promise} Resolved when MediaSource is open and attached to the
  398. * media element. This process is actually initiated by the constructor.
  399. */
  400. open() {
  401. return this.mediaSourceOpen_;
  402. }
  403. /**
  404. * Initialize MediaSourceEngine.
  405. *
  406. * Note that it is not valid to call this multiple times, except to add or
  407. * reinitialize text streams.
  408. *
  409. * @param {!Map<shaka.util.ManifestParserUtils.ContentType,
  410. * shaka.extern.Stream>} streamsByType
  411. * A map of content types to streams.
  412. * @param {boolean=} sequenceMode
  413. * If true, the media segments are appended to the SourceBuffer in strict
  414. * sequence.
  415. * @param {string=} manifestType
  416. * Indicates the type of the manifest.
  417. * @param {boolean=} ignoreManifestTimestampsInSegmentsMode
  418. * If true, don't adjust the timestamp offset to account for manifest
  419. * segment durations being out of sync with segment durations. In other
  420. * words, assume that there are no gaps in the segments when appending
  421. * to the SourceBuffer, even if the manifest and segment times disagree.
  422. * Indicates if the manifest has text streams.
  423. *
  424. * @return {!Promise}
  425. */
  426. async init(streamsByType, sequenceMode=false,
  427. manifestType=shaka.media.ManifestParser.UNKNOWN,
  428. ignoreManifestTimestampsInSegmentsMode=false) {
  429. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  430. await this.mediaSourceOpen_;
  431. if (this.ended() || this.closed()) {
  432. shaka.log.alwaysError('Expected MediaSource to be open during init(); ' +
  433. 'reopening the media source.');
  434. this.mediaSourceOpen_ = new shaka.util.PublicPromise();
  435. this.mediaSource_ = this.createMediaSource(this.mediaSourceOpen_);
  436. await this.mediaSourceOpen_;
  437. }
  438. this.sequenceMode_ = sequenceMode;
  439. this.manifestType_ = manifestType;
  440. this.ignoreManifestTimestampsInSegmentsMode_ =
  441. ignoreManifestTimestampsInSegmentsMode;
  442. this.attemptTimestampOffsetCalculation_ = !this.sequenceMode_ &&
  443. this.manifestType_ == shaka.media.ManifestParser.HLS &&
  444. !this.ignoreManifestTimestampsInSegmentsMode_;
  445. this.tsParsers_.clear();
  446. this.firstVideoTimestamp_ = null;
  447. this.firstVideoReferenceStartTime_ = null;
  448. this.firstAudioTimestamp_ = null;
  449. this.firstAudioReferenceStartTime_ = null;
  450. this.audioCompensation_ = new shaka.util.PublicPromise();
  451. for (const contentType of streamsByType.keys()) {
  452. const stream = streamsByType.get(contentType);
  453. this.initSourceBuffer_(contentType, stream, stream.codecs);
  454. if (this.needSplitMuxedContent_) {
  455. this.queues_.set(ContentType.AUDIO, []);
  456. this.queues_.set(ContentType.VIDEO, []);
  457. } else {
  458. this.queues_.set(contentType, []);
  459. }
  460. }
  461. const audio = streamsByType.get(ContentType.AUDIO);
  462. if (audio && audio.isAudioMuxedInVideo) {
  463. this.needSplitMuxedContent_ = true;
  464. }
  465. }
  466. /**
  467. * Initialize a specific SourceBuffer.
  468. *
  469. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  470. * @param {shaka.extern.Stream} stream
  471. * @param {string} codecs
  472. * @private
  473. */
  474. initSourceBuffer_(contentType, stream, codecs) {
  475. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  476. if (contentType == ContentType.AUDIO && codecs) {
  477. codecs = shaka.util.StreamUtils.getCorrectAudioCodecs(
  478. codecs, stream.mimeType);
  479. }
  480. let mimeType = shaka.util.MimeUtils.getFullType(
  481. stream.mimeType, codecs);
  482. if (contentType == ContentType.TEXT) {
  483. this.reinitText(mimeType, this.sequenceMode_, stream.external);
  484. } else {
  485. let needTransmux = this.config_.forceTransmux;
  486. if (!shaka.media.Capabilities.isTypeSupported(mimeType) ||
  487. (!this.sequenceMode_ &&
  488. shaka.util.MimeUtils.RAW_FORMATS.includes(mimeType))) {
  489. needTransmux = true;
  490. }
  491. const mimeTypeWithAllCodecs =
  492. shaka.util.MimeUtils.getFullTypeWithAllCodecs(
  493. stream.mimeType, codecs);
  494. if (needTransmux) {
  495. const audioCodec = shaka.util.ManifestParserUtils.guessCodecsSafe(
  496. ContentType.AUDIO, (codecs || '').split(','));
  497. const videoCodec = shaka.util.ManifestParserUtils.guessCodecsSafe(
  498. ContentType.VIDEO, (codecs || '').split(','));
  499. if (audioCodec && videoCodec) {
  500. this.needSplitMuxedContent_ = true;
  501. this.initSourceBuffer_(ContentType.AUDIO, stream, audioCodec);
  502. this.initSourceBuffer_(ContentType.VIDEO, stream, videoCodec);
  503. return;
  504. }
  505. const transmuxerPlugin = shaka.transmuxer.TransmuxerEngine
  506. .findTransmuxer(mimeTypeWithAllCodecs);
  507. if (transmuxerPlugin) {
  508. const transmuxer = transmuxerPlugin();
  509. this.transmuxers_.set(contentType, transmuxer);
  510. mimeType =
  511. transmuxer.convertCodecs(contentType, mimeTypeWithAllCodecs);
  512. }
  513. }
  514. const type = this.addExtraFeaturesToMimeType_(mimeType);
  515. this.destroyer_.ensureNotDestroyed();
  516. let sourceBuffer;
  517. try {
  518. sourceBuffer = this.mediaSource_.addSourceBuffer(type);
  519. } catch (exception) {
  520. throw new shaka.util.Error(
  521. shaka.util.Error.Severity.CRITICAL,
  522. shaka.util.Error.Category.MEDIA,
  523. shaka.util.Error.Code.MEDIA_SOURCE_OPERATION_THREW,
  524. exception,
  525. 'The mediaSource_ status was ' + this.mediaSource_.readyState +
  526. ' expected \'open\'',
  527. null);
  528. }
  529. if (this.sequenceMode_) {
  530. sourceBuffer.mode =
  531. shaka.media.MediaSourceEngine.SourceBufferMode_.SEQUENCE;
  532. }
  533. this.eventManager_.listen(
  534. sourceBuffer, 'error',
  535. () => this.onError_(contentType));
  536. this.eventManager_.listen(
  537. sourceBuffer, 'updateend',
  538. () => this.onUpdateEnd_(contentType));
  539. this.sourceBuffers_.set(contentType, sourceBuffer);
  540. this.sourceBufferTypes_.set(contentType, mimeType);
  541. this.expectedEncryption_.set(contentType, !!stream.drmInfos.length);
  542. }
  543. }
  544. /**
  545. * Called by the Player to provide an updated configuration any time it
  546. * changes. Must be called at least once before init().
  547. *
  548. * @param {shaka.extern.MediaSourceConfiguration} config
  549. */
  550. configure(config) {
  551. this.config_ = config;
  552. if (this.textEngine_) {
  553. this.textEngine_.setModifyCueCallback(config.modifyCueCallback);
  554. }
  555. }
  556. /**
  557. * Indicate if the streaming is allowed by MediaSourceEngine.
  558. * If we using MediaSource we always returns true.
  559. *
  560. * @return {boolean}
  561. */
  562. isStreamingAllowed() {
  563. return this.streamingAllowed_ && !this.usingRemotePlayback_ &&
  564. !this.reloadingMediaSource_;
  565. }
  566. /**
  567. * Reinitialize the TextEngine for a new text type.
  568. * @param {string} mimeType
  569. * @param {boolean} sequenceMode
  570. * @param {boolean} external
  571. */
  572. reinitText(mimeType, sequenceMode, external) {
  573. if (!this.textEngine_) {
  574. this.textEngine_ = new shaka.text.TextEngine(this.textDisplayer_);
  575. if (this.textEngine_) {
  576. this.textEngine_.setModifyCueCallback(this.config_.modifyCueCallback);
  577. }
  578. }
  579. this.textEngine_.initParser(mimeType, sequenceMode,
  580. external || this.segmentRelativeVttTiming_, this.manifestType_);
  581. }
  582. /**
  583. * @return {boolean} True if the MediaSource is in an "ended" state, or if the
  584. * object has been destroyed.
  585. */
  586. ended() {
  587. if (this.reloadingMediaSource_) {
  588. return false;
  589. }
  590. return this.mediaSource_ ? this.mediaSource_.readyState == 'ended' : true;
  591. }
  592. /**
  593. * @return {boolean} True if the MediaSource is in an "closed" state, or if
  594. * the object has been destroyed.
  595. */
  596. closed() {
  597. if (this.reloadingMediaSource_) {
  598. return false;
  599. }
  600. return this.mediaSource_ ? this.mediaSource_.readyState == 'closed' : true;
  601. }
  602. /**
  603. * Gets the first timestamp in buffer for the given content type.
  604. *
  605. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  606. * @return {?number} The timestamp in seconds, or null if nothing is buffered.
  607. */
  608. bufferStart(contentType) {
  609. if (!this.sourceBuffers_.size) {
  610. return null;
  611. }
  612. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  613. if (contentType == ContentType.TEXT) {
  614. return this.textEngine_.bufferStart();
  615. }
  616. return shaka.media.TimeRangesUtils.bufferStart(
  617. this.getBuffered_(contentType));
  618. }
  619. /**
  620. * Gets the last timestamp in buffer for the given content type.
  621. *
  622. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  623. * @return {?number} The timestamp in seconds, or null if nothing is buffered.
  624. */
  625. bufferEnd(contentType) {
  626. if (!this.sourceBuffers_.size) {
  627. return null;
  628. }
  629. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  630. if (contentType == ContentType.TEXT) {
  631. return this.textEngine_.bufferEnd();
  632. }
  633. return shaka.media.TimeRangesUtils.bufferEnd(
  634. this.getBuffered_(contentType));
  635. }
  636. /**
  637. * Determines if the given time is inside the buffered range of the given
  638. * content type.
  639. *
  640. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  641. * @param {number} time Playhead time
  642. * @return {boolean}
  643. */
  644. isBuffered(contentType, time) {
  645. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  646. if (contentType == ContentType.TEXT) {
  647. return this.textEngine_.isBuffered(time);
  648. } else {
  649. const buffered = this.getBuffered_(contentType);
  650. return shaka.media.TimeRangesUtils.isBuffered(buffered, time);
  651. }
  652. }
  653. /**
  654. * Computes how far ahead of the given timestamp is buffered for the given
  655. * content type.
  656. *
  657. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  658. * @param {number} time
  659. * @return {number} The amount of time buffered ahead in seconds.
  660. */
  661. bufferedAheadOf(contentType, time) {
  662. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  663. if (contentType == ContentType.TEXT) {
  664. return this.textEngine_.bufferedAheadOf(time);
  665. } else {
  666. const buffered = this.getBuffered_(contentType);
  667. return shaka.media.TimeRangesUtils.bufferedAheadOf(buffered, time);
  668. }
  669. }
  670. /**
  671. * Returns info about what is currently buffered.
  672. * @return {shaka.extern.BufferedInfo}
  673. */
  674. getBufferedInfo() {
  675. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  676. const TimeRangesUtils = shaka.media.TimeRangesUtils;
  677. const info = {
  678. total: this.reloadingMediaSource_ ? [] :
  679. TimeRangesUtils.getBufferedInfo(this.video_.buffered),
  680. audio:
  681. TimeRangesUtils.getBufferedInfo(this.getBuffered_(ContentType.AUDIO)),
  682. video:
  683. TimeRangesUtils.getBufferedInfo(this.getBuffered_(ContentType.VIDEO)),
  684. text: [],
  685. };
  686. if (this.textEngine_) {
  687. const start = this.textEngine_.bufferStart();
  688. const end = this.textEngine_.bufferEnd();
  689. if (start != null && end != null) {
  690. info.text.push({start: start, end: end});
  691. }
  692. }
  693. return info;
  694. }
  695. /**
  696. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  697. * @return {TimeRanges} The buffered ranges for the given content type, or
  698. * null if the buffered ranges could not be obtained.
  699. * @private
  700. */
  701. getBuffered_(contentType) {
  702. if (this.reloadingMediaSource_ || this.usingRemotePlayback_) {
  703. return null;
  704. }
  705. try {
  706. return this.sourceBuffers_.get(contentType).buffered;
  707. } catch (exception) {
  708. if (this.sourceBuffers_.has(contentType)) {
  709. // Note: previous MediaSource errors may cause access to |buffered| to
  710. // throw.
  711. shaka.log.error('failed to get buffered range for ' + contentType,
  712. exception);
  713. }
  714. return null;
  715. }
  716. }
  717. /**
  718. * Create a new closed caption parser. This will ONLY be replaced by tests as
  719. * a way to inject fake closed caption parser instances.
  720. *
  721. * @param {string} mimeType
  722. * @return {!shaka.media.IClosedCaptionParser}
  723. */
  724. getCaptionParser(mimeType) {
  725. return new shaka.media.ClosedCaptionParser(mimeType);
  726. }
  727. /**
  728. * This method is only public for testing.
  729. *
  730. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  731. * @param {!BufferSource} data
  732. * @param {!shaka.media.SegmentReference} reference The segment reference
  733. * we are appending
  734. * @param {shaka.extern.Stream} stream
  735. * @param {!string} mimeType
  736. * @return {{timestamp: ?number, metadata: !Array<shaka.extern.ID3Metadata>}}
  737. */
  738. getTimestampAndDispatchMetadata(contentType, data, reference, stream,
  739. mimeType) {
  740. let timestamp = null;
  741. let metadata = [];
  742. const uint8ArrayData = shaka.util.BufferUtils.toUint8(data);
  743. if (shaka.util.MimeUtils.RAW_FORMATS.includes(mimeType)) {
  744. const frames = shaka.util.Id3Utils.getID3Frames(uint8ArrayData);
  745. if (frames.length && reference) {
  746. const metadataTimestamp = frames.find((frame) => {
  747. return frame.description ===
  748. 'com.apple.streaming.transportStreamTimestamp';
  749. });
  750. if (metadataTimestamp && typeof metadataTimestamp.data == 'number') {
  751. timestamp = Math.round(metadataTimestamp.data) / 1000;
  752. }
  753. /** @private {shaka.extern.ID3Metadata} */
  754. const id3Metadata = {
  755. cueTime: reference.startTime,
  756. data: uint8ArrayData,
  757. frames: frames,
  758. dts: reference.startTime,
  759. pts: reference.startTime,
  760. };
  761. this.playerInterface_.onMetadata(
  762. [id3Metadata], /* offset= */ 0, reference.endTime);
  763. }
  764. } else if (mimeType.includes('/mp4') &&
  765. reference &&
  766. reference.initSegmentReference &&
  767. reference.initSegmentReference.timescale) {
  768. const timescale = reference.initSegmentReference.timescale;
  769. if (!isNaN(timescale)) {
  770. const hasEmsg = ((stream.emsgSchemeIdUris != null &&
  771. stream.emsgSchemeIdUris.length > 0) ||
  772. this.config_.dispatchAllEmsgBoxes);
  773. const Mp4Parser = shaka.util.Mp4Parser;
  774. let startTime = 0;
  775. let parsedMedia = false;
  776. const parser = new Mp4Parser();
  777. if (hasEmsg) {
  778. parser.fullBox('emsg', (box) =>
  779. this.parseEMSG_(reference, stream.emsgSchemeIdUris, box));
  780. }
  781. parser.fullBox('prft', (box) => this.parsePrft_(timescale, box))
  782. .box('moof', Mp4Parser.children)
  783. .box('traf', Mp4Parser.children)
  784. .fullBox('tfdt', (box) => {
  785. if (!parsedMedia) {
  786. goog.asserts.assert(
  787. box.version == 0 || box.version == 1,
  788. 'TFDT version can only be 0 or 1');
  789. const parsed = shaka.util.Mp4BoxParsers.parseTFDTInaccurate(
  790. box.reader, box.version);
  791. startTime = parsed.baseMediaDecodeTime / timescale;
  792. parsedMedia = true;
  793. if (!hasEmsg) {
  794. box.parser.stop();
  795. }
  796. }
  797. }).parse(data, /* partialOkay= */ true);
  798. if (parsedMedia && reference.timestampOffset == 0) {
  799. timestamp = startTime;
  800. }
  801. }
  802. } else if (!mimeType.includes('/mp4') && !mimeType.includes('/webm') &&
  803. shaka.util.TsParser.probe(uint8ArrayData)) {
  804. if (!this.tsParsers_.has(contentType)) {
  805. this.tsParsers_.set(contentType, new shaka.util.TsParser());
  806. } else {
  807. this.tsParsers_.get(contentType).clearData();
  808. }
  809. const tsParser = this.tsParsers_.get(contentType).parse(uint8ArrayData);
  810. const startTime = tsParser.getStartTime(contentType);
  811. if (startTime != null) {
  812. timestamp = startTime;
  813. }
  814. metadata = tsParser.getMetadata();
  815. }
  816. return {timestamp, metadata};
  817. }
  818. /**
  819. * Parse the EMSG box from a MP4 container.
  820. *
  821. * @param {!shaka.media.SegmentReference} reference
  822. * @param {?Array<string>} emsgSchemeIdUris Array of emsg
  823. * scheme_id_uri for which emsg boxes should be parsed.
  824. * @param {!shaka.extern.ParsedBox} box
  825. * @private
  826. * https://dashif-documents.azurewebsites.net/Events/master/event.html#emsg-format
  827. * aligned(8) class DASHEventMessageBox
  828. * extends FullBox(‘emsg’, version, flags = 0){
  829. * if (version==0) {
  830. * string scheme_id_uri;
  831. * string value;
  832. * unsigned int(32) timescale;
  833. * unsigned int(32) presentation_time_delta;
  834. * unsigned int(32) event_duration;
  835. * unsigned int(32) id;
  836. * } else if (version==1) {
  837. * unsigned int(32) timescale;
  838. * unsigned int(64) presentation_time;
  839. * unsigned int(32) event_duration;
  840. * unsigned int(32) id;
  841. * string scheme_id_uri;
  842. * string value;
  843. * }
  844. * unsigned int(8) message_data[];
  845. */
  846. parseEMSG_(reference, emsgSchemeIdUris, box) {
  847. let timescale;
  848. let id;
  849. let eventDuration;
  850. let schemeId;
  851. let startTime;
  852. let presentationTimeDelta;
  853. let value;
  854. if (box.version === 0) {
  855. schemeId = box.reader.readTerminatedString();
  856. value = box.reader.readTerminatedString();
  857. timescale = box.reader.readUint32();
  858. presentationTimeDelta = box.reader.readUint32();
  859. eventDuration = box.reader.readUint32();
  860. id = box.reader.readUint32();
  861. startTime = reference.startTime + (presentationTimeDelta / timescale);
  862. } else {
  863. timescale = box.reader.readUint32();
  864. const pts = box.reader.readUint64();
  865. startTime = (pts / timescale) + reference.timestampOffset;
  866. presentationTimeDelta = startTime - reference.startTime;
  867. eventDuration = box.reader.readUint32();
  868. id = box.reader.readUint32();
  869. schemeId = box.reader.readTerminatedString();
  870. value = box.reader.readTerminatedString();
  871. }
  872. const messageData = box.reader.readBytes(
  873. box.reader.getLength() - box.reader.getPosition());
  874. // See DASH sec. 5.10.3.3.1
  875. // If a DASH client detects an event message box with a scheme that is not
  876. // defined in MPD, the client is expected to ignore it.
  877. if ((emsgSchemeIdUris && emsgSchemeIdUris.includes(schemeId)) ||
  878. this.config_.dispatchAllEmsgBoxes) {
  879. // See DASH sec. 5.10.4.1
  880. // A special scheme in DASH used to signal manifest updates.
  881. if (schemeId == 'urn:mpeg:dash:event:2012') {
  882. this.playerInterface_.onManifestUpdate();
  883. } else {
  884. // All other schemes are dispatched as a general 'emsg' event.
  885. const endTime = startTime + (eventDuration / timescale);
  886. /** @type {shaka.extern.EmsgInfo} */
  887. const emsg = {
  888. startTime: startTime,
  889. endTime: endTime,
  890. schemeIdUri: schemeId,
  891. value: value,
  892. timescale: timescale,
  893. presentationTimeDelta: presentationTimeDelta,
  894. eventDuration: eventDuration,
  895. id: id,
  896. messageData: messageData,
  897. };
  898. // Dispatch an event to notify the application about the emsg box.
  899. const eventName = shaka.util.FakeEvent.EventName.Emsg;
  900. const data = (new Map()).set('detail', emsg);
  901. const event = new shaka.util.FakeEvent(eventName, data);
  902. // A user can call preventDefault() on a cancelable event.
  903. event.cancelable = true;
  904. this.playerInterface_.onEmsg(emsg);
  905. // Additionally, ID3 events generate a 'metadata' event. This is a
  906. // pre-parsed version of the metadata blob already dispatched in the
  907. // 'emsg' event.
  908. if (schemeId == 'https://aomedia.org/emsg/ID3' ||
  909. schemeId == 'https://developer.apple.com/streaming/emsg-id3') {
  910. // See https://aomediacodec.github.io/id3-emsg/
  911. const frames = shaka.util.Id3Utils.getID3Frames(messageData);
  912. if (frames.length) {
  913. /** @private {shaka.extern.ID3Metadata} */
  914. const metadata = {
  915. cueTime: startTime,
  916. data: messageData,
  917. frames: frames,
  918. dts: startTime,
  919. pts: startTime,
  920. };
  921. this.playerInterface_.onMetadata(
  922. [metadata], /* offset= */ 0, endTime);
  923. }
  924. }
  925. }
  926. }
  927. }
  928. /**
  929. * Parse PRFT box.
  930. * @param {number} timescale
  931. * @param {!shaka.extern.ParsedBox} box
  932. * @private
  933. */
  934. parsePrft_(timescale, box) {
  935. goog.asserts.assert(
  936. box.version == 0 || box.version == 1,
  937. 'PRFT version can only be 0 or 1');
  938. const parsed = shaka.util.Mp4BoxParsers.parsePRFTInaccurate(
  939. box.reader, box.version);
  940. const wallClockTime = shaka.util.TimeUtils.convertNtp(parsed.ntpTimestamp);
  941. const programStartDate = new Date(wallClockTime -
  942. (parsed.mediaTime / timescale) * 1000);
  943. /** @type {shaka.extern.ProducerReferenceTime} */
  944. const prftInfo = {
  945. wallClockTime,
  946. programStartDate,
  947. };
  948. const eventName = shaka.util.FakeEvent.EventName.Prft;
  949. const data = (new Map()).set('detail', prftInfo);
  950. const event = new shaka.util.FakeEvent(
  951. eventName, data);
  952. this.playerInterface_.onEvent(event);
  953. }
  954. /**
  955. * Enqueue an operation to append data to the SourceBuffer.
  956. * Start and end times are needed for TextEngine, but not for MediaSource.
  957. * Start and end times may be null for initialization segments; if present
  958. * they are relative to the presentation timeline.
  959. *
  960. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  961. * @param {!BufferSource} data
  962. * @param {?shaka.media.SegmentReference} reference The segment reference
  963. * we are appending, or null for init segments
  964. * @param {shaka.extern.Stream} stream
  965. * @param {?boolean} hasClosedCaptions True if the buffer contains CEA closed
  966. * captions
  967. * @param {boolean=} seeked True if we just seeked
  968. * @param {boolean=} adaptation True if we just automatically switched active
  969. * variant(s).
  970. * @param {boolean=} isChunkedData True if we add to the buffer from the
  971. * @param {boolean=} fromSplit
  972. * @param {number=} continuityTimeline an optional continuity timeline
  973. * @return {!Promise}
  974. */
  975. async appendBuffer(
  976. contentType, data, reference, stream, hasClosedCaptions, seeked = false,
  977. adaptation = false, isChunkedData = false, fromSplit = false,
  978. continuityTimeline) {
  979. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  980. if (contentType == ContentType.TEXT) {
  981. if (this.manifestType_ == shaka.media.ManifestParser.HLS) {
  982. // This won't be known until the first video segment is appended.
  983. const offset = await this.textSequenceModeOffset_;
  984. this.textEngine_.setTimestampOffset(offset);
  985. }
  986. await this.textEngine_.appendBuffer(
  987. data,
  988. reference ? reference.startTime : null,
  989. reference ? reference.endTime : null,
  990. reference ? reference.getUris()[0] : null);
  991. return;
  992. }
  993. if (!fromSplit && this.needSplitMuxedContent_) {
  994. await this.appendBuffer(ContentType.AUDIO, data, reference, stream,
  995. hasClosedCaptions, seeked, adaptation, isChunkedData,
  996. /* fromSplit= */ true);
  997. await this.appendBuffer(ContentType.VIDEO, data, reference, stream,
  998. hasClosedCaptions, seeked, adaptation, isChunkedData,
  999. /* fromSplit= */ true);
  1000. return;
  1001. }
  1002. if (!this.sourceBuffers_.has(contentType)) {
  1003. shaka.log.warning('Attempted to restore a non-existent source buffer');
  1004. return;
  1005. }
  1006. let timestampOffset = this.sourceBuffers_.get(contentType).timestampOffset;
  1007. let mimeType = this.sourceBufferTypes_.get(contentType);
  1008. if (this.transmuxers_.has(contentType)) {
  1009. mimeType = this.transmuxers_.get(contentType).getOriginalMimeType();
  1010. }
  1011. if (reference) {
  1012. const {timestamp, metadata} = this.getTimestampAndDispatchMetadata(
  1013. contentType, data, reference, stream, mimeType);
  1014. if (timestamp != null) {
  1015. if (this.firstVideoTimestamp_ == null &&
  1016. contentType == ContentType.VIDEO) {
  1017. this.firstVideoTimestamp_ = timestamp;
  1018. this.firstVideoReferenceStartTime_ = reference.startTime;
  1019. if (this.firstAudioTimestamp_ != null) {
  1020. let compensation = 0;
  1021. // Only apply compensation if video and audio segment startTime
  1022. // match, to avoid introducing sync issues.
  1023. if (this.firstVideoReferenceStartTime_ ==
  1024. this.firstAudioReferenceStartTime_) {
  1025. compensation =
  1026. this.firstVideoTimestamp_ - this.firstAudioTimestamp_;
  1027. }
  1028. this.audioCompensation_.resolve(compensation);
  1029. }
  1030. }
  1031. if (this.firstAudioTimestamp_ == null &&
  1032. contentType == ContentType.AUDIO) {
  1033. this.firstAudioTimestamp_ = timestamp;
  1034. this.firstAudioReferenceStartTime_ = reference.startTime;
  1035. if (this.firstVideoTimestamp_ != null) {
  1036. let compensation = 0;
  1037. // Only apply compensation if video and audio segment startTime
  1038. // match, to avoid introducing sync issues.
  1039. if (this.firstVideoReferenceStartTime_ ==
  1040. this.firstAudioReferenceStartTime_) {
  1041. compensation =
  1042. this.firstVideoTimestamp_ - this.firstAudioTimestamp_;
  1043. }
  1044. this.audioCompensation_.resolve(compensation);
  1045. }
  1046. }
  1047. let realTimestamp = timestamp;
  1048. const RAW_FORMATS = shaka.util.MimeUtils.RAW_FORMATS;
  1049. // For formats without containers and using segments mode, we need to
  1050. // adjust TimestampOffset relative to 0 because segments do not have
  1051. // any timestamp information.
  1052. if (!this.sequenceMode_ &&
  1053. RAW_FORMATS.includes(this.sourceBufferTypes_.get(contentType))) {
  1054. realTimestamp = 0;
  1055. }
  1056. const calculatedTimestampOffset = reference.startTime - realTimestamp;
  1057. const timestampOffsetDifference =
  1058. Math.abs(timestampOffset - calculatedTimestampOffset);
  1059. if ((timestampOffsetDifference >= 0.001 || seeked || adaptation) &&
  1060. (!isChunkedData || calculatedTimestampOffset > 0 ||
  1061. !timestampOffset)) {
  1062. timestampOffset = calculatedTimestampOffset;
  1063. if (this.attemptTimestampOffsetCalculation_) {
  1064. this.enqueueOperation_(
  1065. contentType,
  1066. () => this.abort_(contentType),
  1067. null);
  1068. this.enqueueOperation_(
  1069. contentType,
  1070. () => this.setTimestampOffset_(contentType, timestampOffset),
  1071. null);
  1072. }
  1073. }
  1074. // Timestamps can only be reliably extracted from video, not audio.
  1075. // Packed audio formats do not have internal timestamps at all.
  1076. // Prefer video for this when available.
  1077. const isBestSourceBufferForTimestamps =
  1078. contentType == ContentType.VIDEO ||
  1079. !(this.sourceBuffers_.has(ContentType.VIDEO));
  1080. if (isBestSourceBufferForTimestamps) {
  1081. this.textSequenceModeOffset_.resolve(timestampOffset);
  1082. }
  1083. }
  1084. if (metadata.length) {
  1085. this.playerInterface_.onMetadata(metadata, timestampOffset,
  1086. reference ? reference.endTime : null);
  1087. }
  1088. }
  1089. if (hasClosedCaptions && contentType == ContentType.VIDEO) {
  1090. if (!this.textEngine_) {
  1091. this.reinitText(shaka.util.MimeUtils.CEA608_CLOSED_CAPTION_MIMETYPE,
  1092. this.sequenceMode_, /* external= */ false);
  1093. }
  1094. if (!this.captionParser_) {
  1095. const basicType = mimeType.split(';', 1)[0];
  1096. this.captionParser_ = this.getCaptionParser(basicType);
  1097. }
  1098. // If it is the init segment for closed captions, initialize the closed
  1099. // caption parser.
  1100. if (!reference) {
  1101. this.captionParser_.init(data, adaptation, continuityTimeline);
  1102. } else {
  1103. const closedCaptions = this.captionParser_.parseFrom(data);
  1104. if (closedCaptions.length) {
  1105. this.textEngine_.storeAndAppendClosedCaptions(
  1106. closedCaptions,
  1107. reference.startTime,
  1108. reference.endTime,
  1109. timestampOffset);
  1110. }
  1111. }
  1112. }
  1113. if (this.transmuxers_.has(contentType)) {
  1114. data = await this.transmuxers_.get(contentType).transmux(
  1115. data, stream, reference, this.mediaSource_.duration, contentType);
  1116. }
  1117. data = this.workAroundBrokenPlatforms_(
  1118. stream, data, reference, contentType);
  1119. if (reference && this.sequenceMode_ && contentType != ContentType.TEXT) {
  1120. // In sequence mode, for non-text streams, if we just cleared the buffer
  1121. // and are either performing an unbuffered seek or handling an automatic
  1122. // adaptation, we need to set a new timestampOffset on the sourceBuffer.
  1123. if (seeked || adaptation) {
  1124. let timestampOffset = reference.startTime;
  1125. // Audio and video may not be aligned, so we will compensate for audio
  1126. // if necessary.
  1127. if (this.manifestType_ == shaka.media.ManifestParser.HLS &&
  1128. !this.needSplitMuxedContent_ &&
  1129. contentType == ContentType.AUDIO &&
  1130. this.sourceBuffers_.has(ContentType.VIDEO)) {
  1131. const compensation = await this.audioCompensation_;
  1132. // Only apply compensation if the difference is greater than 150ms
  1133. if (Math.abs(compensation) > 0.15) {
  1134. timestampOffset -= compensation;
  1135. }
  1136. }
  1137. // The logic to call abort() before setting the timestampOffset is
  1138. // extended during unbuffered seeks or automatic adaptations; it is
  1139. // possible for the append state to be PARSING_MEDIA_SEGMENT from the
  1140. // previous SourceBuffer#appendBuffer() call.
  1141. this.enqueueOperation_(
  1142. contentType,
  1143. () => this.abort_(contentType),
  1144. null);
  1145. this.enqueueOperation_(
  1146. contentType,
  1147. () => this.setTimestampOffset_(contentType, timestampOffset),
  1148. null);
  1149. }
  1150. }
  1151. let bufferedBefore = null;
  1152. await this.enqueueOperation_(contentType, () => {
  1153. if (goog.DEBUG && reference && !reference.isPreload() && !isChunkedData) {
  1154. bufferedBefore = this.getBuffered_(contentType);
  1155. }
  1156. this.append_(contentType, data, timestampOffset, stream);
  1157. }, reference ? reference.getUris()[0] : null);
  1158. if (goog.DEBUG && reference && !reference.isPreload() && !isChunkedData) {
  1159. const bufferedAfter = this.getBuffered_(contentType);
  1160. const newBuffered = shaka.media.TimeRangesUtils.computeAddedRange(
  1161. bufferedBefore, bufferedAfter);
  1162. if (newBuffered) {
  1163. const segmentDuration = reference.endTime - reference.startTime;
  1164. const timeAdded = newBuffered.end - newBuffered.start;
  1165. // Check end times instead of start times. We may be overwriting a
  1166. // buffer and only the end changes, and that would be fine.
  1167. // Also, exclude tiny segments. Sometimes alignment segments as small
  1168. // as 33ms are seen in Google DAI content. For such tiny segments,
  1169. // half a segment duration would be no issue.
  1170. const offset = Math.abs(newBuffered.end - reference.endTime);
  1171. if (segmentDuration > 0.100 && (offset > segmentDuration / 2 ||
  1172. Math.abs(segmentDuration - timeAdded) > 0.030)) {
  1173. shaka.log.error('Possible encoding problem detected!',
  1174. 'Unexpected buffered range for reference', reference,
  1175. 'from URIs', reference.getUris(),
  1176. 'should be', {start: reference.startTime, end: reference.endTime},
  1177. 'but got', newBuffered);
  1178. }
  1179. }
  1180. }
  1181. }
  1182. /**
  1183. * Set the selected closed captions Id and language.
  1184. *
  1185. * @param {string} id
  1186. */
  1187. setSelectedClosedCaptionId(id) {
  1188. const VIDEO = shaka.util.ManifestParserUtils.ContentType.VIDEO;
  1189. const videoBufferEndTime = this.bufferEnd(VIDEO) || 0;
  1190. this.textEngine_.setSelectedClosedCaptionId(id, videoBufferEndTime);
  1191. }
  1192. /** Disable embedded closed captions. */
  1193. clearSelectedClosedCaptionId() {
  1194. if (this.textEngine_) {
  1195. this.textEngine_.setSelectedClosedCaptionId('', 0);
  1196. }
  1197. }
  1198. /**
  1199. * Enqueue an operation to remove data from the SourceBuffer.
  1200. *
  1201. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1202. * @param {number} startTime relative to the start of the presentation
  1203. * @param {number} endTime relative to the start of the presentation
  1204. * @param {Array<number>=} continuityTimelines a list of continuity timelines
  1205. * that are still available on the stream.
  1206. * @return {!Promise}
  1207. */
  1208. async remove(contentType, startTime, endTime, continuityTimelines) {
  1209. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1210. if (contentType == ContentType.VIDEO && this.captionParser_) {
  1211. this.captionParser_.remove(continuityTimelines);
  1212. }
  1213. if (contentType == ContentType.TEXT) {
  1214. await this.textEngine_.remove(startTime, endTime);
  1215. } else if (endTime > startTime) {
  1216. await this.enqueueOperation_(
  1217. contentType,
  1218. () => this.remove_(contentType, startTime, endTime),
  1219. null);
  1220. if (this.needSplitMuxedContent_) {
  1221. await this.enqueueOperation_(
  1222. ContentType.AUDIO,
  1223. () => this.remove_(ContentType.AUDIO, startTime, endTime),
  1224. null);
  1225. }
  1226. }
  1227. }
  1228. /**
  1229. * Enqueue an operation to clear the SourceBuffer.
  1230. *
  1231. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1232. * @return {!Promise}
  1233. */
  1234. async clear(contentType) {
  1235. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1236. if (contentType == ContentType.TEXT) {
  1237. if (!this.textEngine_) {
  1238. return;
  1239. }
  1240. await this.textEngine_.remove(0, Infinity);
  1241. } else {
  1242. // Note that not all platforms allow clearing to Infinity.
  1243. await this.enqueueOperation_(
  1244. contentType,
  1245. () => this.remove_(contentType, 0, this.mediaSource_.duration),
  1246. null);
  1247. if (this.needSplitMuxedContent_) {
  1248. await this.enqueueOperation_(
  1249. ContentType.AUDIO,
  1250. () => this.remove_(
  1251. ContentType.AUDIO, 0, this.mediaSource_.duration),
  1252. null);
  1253. }
  1254. }
  1255. }
  1256. /**
  1257. * Fully reset the state of the caption parser owned by MediaSourceEngine.
  1258. */
  1259. resetCaptionParser() {
  1260. if (this.captionParser_) {
  1261. this.captionParser_.reset();
  1262. }
  1263. }
  1264. /**
  1265. * Enqueue an operation to flush the SourceBuffer.
  1266. * This is a workaround for what we believe is a Chromecast bug.
  1267. *
  1268. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1269. * @return {!Promise}
  1270. */
  1271. async flush(contentType) {
  1272. // Flush the pipeline. Necessary on Chromecast, even though we have removed
  1273. // everything.
  1274. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1275. if (contentType == ContentType.TEXT) {
  1276. // Nothing to flush for text.
  1277. return;
  1278. }
  1279. await this.enqueueOperation_(
  1280. contentType,
  1281. () => this.flush_(contentType),
  1282. null);
  1283. if (this.needSplitMuxedContent_) {
  1284. await this.enqueueOperation_(
  1285. ContentType.AUDIO,
  1286. () => this.flush_(ContentType.AUDIO),
  1287. null);
  1288. }
  1289. }
  1290. /**
  1291. * Sets the timestamp offset and append window end for the given content type.
  1292. *
  1293. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1294. * @param {number} timestampOffset The timestamp offset. Segments which start
  1295. * at time t will be inserted at time t + timestampOffset instead. This
  1296. * value does not affect segments which have already been inserted.
  1297. * @param {number} appendWindowStart The timestamp to set the append window
  1298. * start to. For future appends, frames/samples with timestamps less than
  1299. * this value will be dropped.
  1300. * @param {number} appendWindowEnd The timestamp to set the append window end
  1301. * to. For future appends, frames/samples with timestamps greater than this
  1302. * value will be dropped.
  1303. * @param {boolean} ignoreTimestampOffset If true, the timestampOffset will
  1304. * not be applied in this step.
  1305. * @param {string} mimeType
  1306. * @param {string} codecs
  1307. * @param {!Map<shaka.util.ManifestParserUtils.ContentType,
  1308. * shaka.extern.Stream>} streamsByType
  1309. * A map of content types to streams.
  1310. *
  1311. * @return {!Promise}
  1312. */
  1313. async setStreamProperties(
  1314. contentType, timestampOffset, appendWindowStart, appendWindowEnd,
  1315. ignoreTimestampOffset, mimeType, codecs, streamsByType) {
  1316. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1317. if (contentType == ContentType.TEXT) {
  1318. if (!ignoreTimestampOffset) {
  1319. this.textEngine_.setTimestampOffset(timestampOffset);
  1320. }
  1321. this.textEngine_.setAppendWindow(appendWindowStart, appendWindowEnd);
  1322. return;
  1323. }
  1324. const operations = [];
  1325. const hasChangedCodecs = await this.codecSwitchIfNecessary_(
  1326. contentType, mimeType, codecs, streamsByType);
  1327. if (!hasChangedCodecs) {
  1328. // Queue an abort() to help MSE splice together overlapping segments.
  1329. // We set appendWindowEnd when we change periods in DASH content, and the
  1330. // period transition may result in overlap.
  1331. //
  1332. // An abort() also helps with MPEG2-TS. When we append a TS segment, we
  1333. // always enter a PARSING_MEDIA_SEGMENT state and we can't change the
  1334. // timestamp offset. By calling abort(), we reset the state so we can
  1335. // set it.
  1336. operations.push(this.enqueueOperation_(
  1337. contentType,
  1338. () => this.abort_(contentType),
  1339. null));
  1340. if (this.needSplitMuxedContent_) {
  1341. operations.push(this.enqueueOperation_(
  1342. ContentType.AUDIO,
  1343. () => this.abort_(ContentType.AUDIO),
  1344. null));
  1345. }
  1346. }
  1347. if (!ignoreTimestampOffset) {
  1348. operations.push(this.enqueueOperation_(
  1349. contentType,
  1350. () => this.setTimestampOffset_(contentType, timestampOffset),
  1351. null));
  1352. if (this.needSplitMuxedContent_) {
  1353. operations.push(this.enqueueOperation_(
  1354. ContentType.AUDIO,
  1355. () => this.setTimestampOffset_(
  1356. ContentType.AUDIO, timestampOffset),
  1357. null));
  1358. }
  1359. }
  1360. if (appendWindowStart != 0 || appendWindowEnd != Infinity) {
  1361. operations.push(this.enqueueOperation_(
  1362. contentType,
  1363. () => this.setAppendWindow_(
  1364. contentType, appendWindowStart, appendWindowEnd),
  1365. null));
  1366. if (this.needSplitMuxedContent_) {
  1367. operations.push(this.enqueueOperation_(
  1368. ContentType.AUDIO,
  1369. () => this.setAppendWindow_(
  1370. ContentType.AUDIO, appendWindowStart, appendWindowEnd),
  1371. null));
  1372. }
  1373. }
  1374. if (operations.length) {
  1375. await Promise.all(operations);
  1376. }
  1377. }
  1378. /**
  1379. * Adjust timestamp offset to maintain AV sync across discontinuities.
  1380. *
  1381. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1382. * @param {number} timestampOffset
  1383. * @return {!Promise}
  1384. */
  1385. async resync(contentType, timestampOffset) {
  1386. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1387. if (contentType == ContentType.TEXT) {
  1388. // This operation is for audio and video only.
  1389. return;
  1390. }
  1391. // Reset the promise in case the timestamp offset changed during
  1392. // a period/discontinuity transition.
  1393. if (contentType == ContentType.VIDEO) {
  1394. this.textSequenceModeOffset_ = new shaka.util.PublicPromise();
  1395. }
  1396. if (!this.sequenceMode_) {
  1397. return;
  1398. }
  1399. // Avoid changing timestampOffset when the difference is less than 100 ms
  1400. // from the end of the current buffer.
  1401. const bufferEnd = this.bufferEnd(contentType);
  1402. if (bufferEnd && Math.abs(bufferEnd - timestampOffset) < 0.1) {
  1403. return;
  1404. }
  1405. // Queue an abort() to help MSE splice together overlapping segments.
  1406. // We set appendWindowEnd when we change periods in DASH content, and the
  1407. // period transition may result in overlap.
  1408. //
  1409. // An abort() also helps with MPEG2-TS. When we append a TS segment, we
  1410. // always enter a PARSING_MEDIA_SEGMENT state and we can't change the
  1411. // timestamp offset. By calling abort(), we reset the state so we can
  1412. // set it.
  1413. this.enqueueOperation_(
  1414. contentType,
  1415. () => this.abort_(contentType),
  1416. null);
  1417. if (this.needSplitMuxedContent_) {
  1418. this.enqueueOperation_(
  1419. ContentType.AUDIO,
  1420. () => this.abort_(ContentType.AUDIO),
  1421. null);
  1422. }
  1423. await this.enqueueOperation_(
  1424. contentType,
  1425. () => this.setTimestampOffset_(contentType, timestampOffset),
  1426. null);
  1427. if (this.needSplitMuxedContent_) {
  1428. await this.enqueueOperation_(
  1429. ContentType.AUDIO,
  1430. () => this.setTimestampOffset_(ContentType.AUDIO, timestampOffset),
  1431. null);
  1432. }
  1433. }
  1434. /**
  1435. * @param {string=} reason Valid reasons are 'network' and 'decode'.
  1436. * @return {!Promise}
  1437. * @see http://w3c.github.io/media-source/#idl-def-EndOfStreamError
  1438. */
  1439. async endOfStream(reason) {
  1440. await this.enqueueBlockingOperation_(() => {
  1441. // If endOfStream() has already been called on the media source,
  1442. // don't call it again. Also do not call if readyState is
  1443. // 'closed' (not attached to video element) since it is not a
  1444. // valid operation.
  1445. if (this.ended() || this.closed()) {
  1446. return;
  1447. }
  1448. // Tizen won't let us pass undefined, but it will let us omit the
  1449. // argument.
  1450. if (reason) {
  1451. this.mediaSource_.endOfStream(reason);
  1452. } else {
  1453. this.mediaSource_.endOfStream();
  1454. }
  1455. });
  1456. }
  1457. /**
  1458. * @param {number} duration
  1459. * @return {!Promise}
  1460. */
  1461. async setDuration(duration) {
  1462. await this.enqueueBlockingOperation_(() => {
  1463. // https://www.w3.org/TR/media-source-2/#duration-change-algorithm
  1464. // "Duration reductions that would truncate currently buffered media
  1465. // are disallowed.
  1466. // When truncation is necessary, use remove() to reduce the buffered
  1467. // range before updating duration."
  1468. // But in some platforms, truncating the duration causes the
  1469. // buffer range removal algorithm to run which triggers an
  1470. // 'updateend' event to fire.
  1471. // To handle this scenario, we have to insert a dummy operation into
  1472. // the beginning of each queue, which the 'updateend' handler will remove.
  1473. // Using config to disable it by default and enable only
  1474. // on relevant platforms.
  1475. if (this.config_.durationReductionEmitsUpdateEnd &&
  1476. duration < this.mediaSource_.duration) {
  1477. for (const contentType of this.sourceBuffers_.keys()) {
  1478. const dummyOperation = {
  1479. start: () => {},
  1480. p: new shaka.util.PublicPromise(),
  1481. uri: null,
  1482. };
  1483. this.queues_.get(contentType).unshift(dummyOperation);
  1484. }
  1485. }
  1486. this.mediaSource_.duration = duration;
  1487. this.lastDuration_ = duration;
  1488. });
  1489. }
  1490. /**
  1491. * Get the current MediaSource duration.
  1492. *
  1493. * @return {number}
  1494. */
  1495. getDuration() {
  1496. return this.mediaSource_.duration;
  1497. }
  1498. /**
  1499. * Updates the live seekable range.
  1500. *
  1501. * @param {number} startTime
  1502. * @param {number} endTime
  1503. */
  1504. async setLiveSeekableRange(startTime, endTime) {
  1505. if (this.destroyer_.destroyed() || this.video_.error ||
  1506. this.usingRemotePlayback_ || this.reloadingMediaSource_) {
  1507. return;
  1508. }
  1509. goog.asserts.assert('setLiveSeekableRange' in this.mediaSource_,
  1510. 'Using setLiveSeekableRange on not supported platform');
  1511. if (this.ended() || this.closed()) {
  1512. return;
  1513. }
  1514. await this.enqueueBlockingOperation_(() => {
  1515. if (this.ended() || this.closed()) {
  1516. return;
  1517. }
  1518. this.mediaSource_.setLiveSeekableRange(startTime, endTime);
  1519. });
  1520. }
  1521. /**
  1522. * Clear the current live seekable range.
  1523. */
  1524. async clearLiveSeekableRange() {
  1525. if (this.destroyer_.destroyed() || this.video_.error ||
  1526. this.usingRemotePlayback_ || this.reloadingMediaSource_) {
  1527. return;
  1528. }
  1529. goog.asserts.assert('clearLiveSeekableRange' in this.mediaSource_,
  1530. 'Using clearLiveSeekableRange on not supported platform');
  1531. if (this.ended() || this.closed()) {
  1532. return;
  1533. }
  1534. await this.enqueueBlockingOperation_(() => {
  1535. if (this.ended() || this.closed()) {
  1536. return;
  1537. }
  1538. this.mediaSource_.clearLiveSeekableRange();
  1539. });
  1540. }
  1541. /**
  1542. * Append dependency data.
  1543. * @param {BufferSource} data
  1544. * @param {number} timestampOffset
  1545. * @param {shaka.extern.Stream} stream
  1546. */
  1547. appendDependency(data, timestampOffset, stream) {
  1548. if (this.lcevcDec_) {
  1549. // Append buffers to the LCEVC Dec for parsing and storing
  1550. // of LCEVC data.
  1551. this.lcevcDec_.appendBuffer(data, timestampOffset, stream);
  1552. }
  1553. }
  1554. /**
  1555. * Append data to the SourceBuffer.
  1556. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1557. * @param {BufferSource} data
  1558. * @param {number} timestampOffset
  1559. * @param {shaka.extern.Stream} stream
  1560. * @private
  1561. */
  1562. append_(contentType, data, timestampOffset, stream) {
  1563. this.appendDependency(data, timestampOffset, stream);
  1564. // This will trigger an 'updateend' event.
  1565. this.sourceBuffers_.get(contentType).appendBuffer(data);
  1566. }
  1567. /**
  1568. * Remove data from the SourceBuffer.
  1569. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1570. * @param {number} startTime relative to the start of the presentation
  1571. * @param {number} endTime relative to the start of the presentation
  1572. * @private
  1573. */
  1574. remove_(contentType, startTime, endTime) {
  1575. if (endTime <= startTime) {
  1576. // Ignore removal of inverted or empty ranges.
  1577. // Fake 'updateend' event to resolve the operation.
  1578. this.onUpdateEnd_(contentType);
  1579. return;
  1580. }
  1581. // This will trigger an 'updateend' event.
  1582. this.sourceBuffers_.get(contentType).remove(startTime, endTime);
  1583. }
  1584. /**
  1585. * Call abort() on the SourceBuffer.
  1586. * This resets MSE's last_decode_timestamp on all track buffers, which should
  1587. * trigger the splicing logic for overlapping segments.
  1588. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1589. * @private
  1590. */
  1591. abort_(contentType) {
  1592. const sourceBuffer = this.sourceBuffers_.get(contentType);
  1593. // Save the append window, which is reset on abort().
  1594. const appendWindowStart = sourceBuffer.appendWindowStart;
  1595. const appendWindowEnd = sourceBuffer.appendWindowEnd;
  1596. // This will not trigger an 'updateend' event, since nothing is happening.
  1597. // This is only to reset MSE internals, not to abort an actual operation.
  1598. sourceBuffer.abort();
  1599. // Restore the append window.
  1600. sourceBuffer.appendWindowStart = appendWindowStart;
  1601. sourceBuffer.appendWindowEnd = appendWindowEnd;
  1602. // Fake an 'updateend' event to resolve the operation.
  1603. this.onUpdateEnd_(contentType);
  1604. }
  1605. /**
  1606. * Nudge the playhead to force the media pipeline to be flushed.
  1607. * This seems to be necessary on Chromecast to get new content to replace old
  1608. * content.
  1609. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1610. * @private
  1611. */
  1612. flush_(contentType) {
  1613. // Never use flush_ if there's data. It causes a hiccup in playback.
  1614. goog.asserts.assert(
  1615. this.video_.buffered.length == 0, 'MediaSourceEngine.flush_ should ' +
  1616. 'only be used after clearing all data!');
  1617. // Seeking forces the pipeline to be flushed.
  1618. this.video_.currentTime -= 0.001;
  1619. // Fake an 'updateend' event to resolve the operation.
  1620. this.onUpdateEnd_(contentType);
  1621. }
  1622. /**
  1623. * Set the SourceBuffer's timestamp offset.
  1624. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1625. * @param {number} timestampOffset
  1626. * @private
  1627. */
  1628. setTimestampOffset_(contentType, timestampOffset) {
  1629. // Work around for
  1630. // https://github.com/shaka-project/shaka-player/issues/1281:
  1631. // TODO(https://bit.ly/2ttKiBU): follow up when this is fixed in Edge
  1632. if (timestampOffset < 0) {
  1633. // Try to prevent rounding errors in Edge from removing the first
  1634. // keyframe.
  1635. timestampOffset += 0.001;
  1636. }
  1637. let shouldChangeTimestampOffset = true;
  1638. if (this.manifestType_ == shaka.media.ManifestParser.HLS) {
  1639. // Avoid changing timestampOffset when the difference is less than 150 ms
  1640. // from the end of the current buffer when using sequenceMode
  1641. const bufferEnd = this.bufferEnd(contentType);
  1642. if (!bufferEnd || Math.abs(bufferEnd - timestampOffset) > 0.15) {
  1643. shouldChangeTimestampOffset = true;
  1644. } else {
  1645. shouldChangeTimestampOffset = false;
  1646. }
  1647. }
  1648. if (shouldChangeTimestampOffset) {
  1649. this.sourceBuffers_.get(contentType).timestampOffset = timestampOffset;
  1650. }
  1651. // Fake an 'updateend' event to resolve the operation.
  1652. this.onUpdateEnd_(contentType);
  1653. }
  1654. /**
  1655. * Set the SourceBuffer's append window end.
  1656. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1657. * @param {number} appendWindowStart
  1658. * @param {number} appendWindowEnd
  1659. * @private
  1660. */
  1661. setAppendWindow_(contentType, appendWindowStart, appendWindowEnd) {
  1662. const sourceBuffer = this.sourceBuffers_.get(contentType);
  1663. if (sourceBuffer.appendWindowEnd !== appendWindowEnd ||
  1664. sourceBuffer.appendWindowStart !== appendWindowStart) {
  1665. // You can't set start > end, so first set start to 0, then set the new
  1666. // end, then set the new start. That way, there are no intermediate
  1667. // states which are invalid.
  1668. sourceBuffer.appendWindowStart = 0;
  1669. sourceBuffer.appendWindowEnd = appendWindowEnd;
  1670. sourceBuffer.appendWindowStart = appendWindowStart;
  1671. }
  1672. // Fake an 'updateend' event to resolve the operation.
  1673. this.onUpdateEnd_(contentType);
  1674. }
  1675. /**
  1676. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1677. * @private
  1678. */
  1679. onError_(contentType) {
  1680. const operation = this.queues_.get(contentType)[0];
  1681. goog.asserts.assert(operation, 'Spurious error event!');
  1682. goog.asserts.assert(!this.sourceBuffers_.get(contentType).updating,
  1683. 'SourceBuffer should not be updating on error!');
  1684. const code = this.video_.error ? this.video_.error.code : 0;
  1685. operation.p.reject(new shaka.util.Error(
  1686. shaka.util.Error.Severity.CRITICAL,
  1687. shaka.util.Error.Category.MEDIA,
  1688. shaka.util.Error.Code.MEDIA_SOURCE_OPERATION_FAILED,
  1689. code, operation.uri));
  1690. // Do not pop from queue. An 'updateend' event will fire next, and to
  1691. // avoid synchronizing these two event handlers, we will allow that one to
  1692. // pop from the queue as normal. Note that because the operation has
  1693. // already been rejected, the call to resolve() in the 'updateend' handler
  1694. // will have no effect.
  1695. }
  1696. /**
  1697. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1698. * @private
  1699. */
  1700. onUpdateEnd_(contentType) {
  1701. // If we're reloading or have been destroyed, clear the queue for this
  1702. // content type.
  1703. if (this.reloadingMediaSource_ || this.destroyer_.destroyed()) {
  1704. // Resolve any pending operations in this content type's queue
  1705. const queue = this.queues_.get(contentType);
  1706. if (queue && queue.length) {
  1707. // Resolve the first operation that triggered this updateEnd
  1708. const firstOperation = queue[0];
  1709. if (firstOperation && firstOperation.p) {
  1710. firstOperation.p.resolve();
  1711. }
  1712. // Clear the rest of the queue
  1713. this.queues_.set(contentType, []);
  1714. }
  1715. return;
  1716. }
  1717. const operation = this.queues_.get(contentType)[0];
  1718. goog.asserts.assert(operation, 'Spurious updateend event!');
  1719. if (!operation) {
  1720. return;
  1721. }
  1722. goog.asserts.assert(!this.sourceBuffers_.get(contentType).updating,
  1723. 'SourceBuffer should not be updating on updateend!');
  1724. operation.p.resolve();
  1725. this.popFromQueue_(contentType);
  1726. }
  1727. /**
  1728. * Enqueue an operation and start it if appropriate.
  1729. *
  1730. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1731. * @param {function()} start
  1732. * @param {?string} uri
  1733. * @return {!Promise}
  1734. * @private
  1735. */
  1736. enqueueOperation_(contentType, start, uri) {
  1737. this.destroyer_.ensureNotDestroyed();
  1738. const operation = {
  1739. start: start,
  1740. p: new shaka.util.PublicPromise(),
  1741. uri,
  1742. };
  1743. this.queues_.get(contentType).push(operation);
  1744. if (this.queues_.get(contentType).length == 1) {
  1745. this.startOperation_(contentType);
  1746. }
  1747. return operation.p;
  1748. }
  1749. /**
  1750. * Enqueue an operation which must block all other operations on all
  1751. * SourceBuffers.
  1752. *
  1753. * @param {function():(Promise|undefined)} run
  1754. * @return {!Promise}
  1755. * @private
  1756. */
  1757. async enqueueBlockingOperation_(run) {
  1758. this.destroyer_.ensureNotDestroyed();
  1759. /** @type {!Array<!shaka.util.PublicPromise>} */
  1760. const allWaiters = [];
  1761. /** @type {!Array<!shaka.util.ManifestParserUtils.ContentType>} */
  1762. const contentTypes = Array.from(this.sourceBuffers_.keys());
  1763. // Enqueue a 'wait' operation onto each queue.
  1764. // This operation signals its readiness when it starts.
  1765. // When all wait operations are ready, the real operation takes place.
  1766. for (const contentType of contentTypes) {
  1767. const ready = new shaka.util.PublicPromise();
  1768. const operation = {
  1769. start: () => ready.resolve(),
  1770. p: ready,
  1771. uri: null,
  1772. };
  1773. const queue = this.queues_.get(contentType);
  1774. queue.push(operation);
  1775. allWaiters.push(ready);
  1776. if (queue.length == 1) {
  1777. operation.start();
  1778. }
  1779. }
  1780. // Return a Promise to the real operation, which waits to begin until
  1781. // there are no other in-progress operations on any SourceBuffers.
  1782. try {
  1783. await Promise.all(allWaiters);
  1784. } catch (error) {
  1785. // One of the waiters failed, which means we've been destroyed.
  1786. goog.asserts.assert(
  1787. this.destroyer_.destroyed(), 'Should be destroyed by now');
  1788. // We haven't popped from the queue. Canceled waiters have been removed
  1789. // by destroy. What's left now should just be resolved waiters. In
  1790. // uncompiled mode, we will maintain good hygiene and make sure the
  1791. // assert at the end of destroy passes. In compiled mode, the queues
  1792. // are wiped in destroy.
  1793. if (goog.DEBUG) {
  1794. for (const contentType of contentTypes) {
  1795. const queue = this.queues_.get(contentType);
  1796. if (queue.length) {
  1797. goog.asserts.assert(queue.length == 1,
  1798. 'Should be at most one item in queue!');
  1799. goog.asserts.assert(allWaiters.includes(queue[0].p),
  1800. 'The item in queue should be one of our waiters!');
  1801. queue.shift();
  1802. }
  1803. }
  1804. }
  1805. throw error;
  1806. }
  1807. if (goog.DEBUG) {
  1808. // If we did it correctly, nothing is updating.
  1809. for (const contentType of contentTypes) {
  1810. goog.asserts.assert(
  1811. this.sourceBuffers_.get(contentType).updating == false,
  1812. 'SourceBuffers should not be updating after a blocking op!');
  1813. }
  1814. }
  1815. // Run the real operation, which can be asynchronous.
  1816. try {
  1817. await run();
  1818. } catch (exception) {
  1819. throw new shaka.util.Error(
  1820. shaka.util.Error.Severity.CRITICAL,
  1821. shaka.util.Error.Category.MEDIA,
  1822. shaka.util.Error.Code.MEDIA_SOURCE_OPERATION_THREW,
  1823. exception,
  1824. this.video_.error || 'No error in the media element',
  1825. null);
  1826. } finally {
  1827. // Unblock the queues.
  1828. for (const contentType of contentTypes) {
  1829. this.popFromQueue_(contentType);
  1830. }
  1831. }
  1832. }
  1833. /**
  1834. * Pop from the front of the queue and start a new operation.
  1835. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1836. * @private
  1837. */
  1838. popFromQueue_(contentType) {
  1839. goog.asserts.assert(this.queues_.has(contentType), 'Queue should exist');
  1840. // Remove the in-progress operation, which is now complete.
  1841. this.queues_.get(contentType).shift();
  1842. this.startOperation_(contentType);
  1843. }
  1844. /**
  1845. * Starts the next operation in the queue.
  1846. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1847. * @private
  1848. */
  1849. startOperation_(contentType) {
  1850. // Retrieve the next operation, if any, from the queue and start it.
  1851. const next = this.queues_.get(contentType)[0];
  1852. if (next) {
  1853. try {
  1854. next.start();
  1855. } catch (exception) {
  1856. if (exception.name == 'QuotaExceededError') {
  1857. next.p.reject(new shaka.util.Error(
  1858. shaka.util.Error.Severity.CRITICAL,
  1859. shaka.util.Error.Category.MEDIA,
  1860. shaka.util.Error.Code.QUOTA_EXCEEDED_ERROR,
  1861. contentType));
  1862. } else if (!this.isStreamingAllowed()) {
  1863. next.p.reject(new shaka.util.Error(
  1864. shaka.util.Error.Severity.CRITICAL,
  1865. shaka.util.Error.Category.MEDIA,
  1866. shaka.util.Error.Code.STREAMING_NOT_ALLOWED,
  1867. contentType));
  1868. } else {
  1869. next.p.reject(new shaka.util.Error(
  1870. shaka.util.Error.Severity.CRITICAL,
  1871. shaka.util.Error.Category.MEDIA,
  1872. shaka.util.Error.Code.MEDIA_SOURCE_OPERATION_THREW,
  1873. exception,
  1874. this.video_.error || 'No error in the media element',
  1875. next.uri));
  1876. }
  1877. this.popFromQueue_(contentType);
  1878. }
  1879. }
  1880. }
  1881. /**
  1882. * @return {!shaka.extern.TextDisplayer}
  1883. */
  1884. getTextDisplayer() {
  1885. goog.asserts.assert(
  1886. this.textDisplayer_,
  1887. 'TextDisplayer should only be null when this is destroyed');
  1888. return this.textDisplayer_;
  1889. }
  1890. /**
  1891. * @param {!shaka.extern.TextDisplayer} textDisplayer
  1892. */
  1893. setTextDisplayer(textDisplayer) {
  1894. this.textDisplayer_ = textDisplayer;
  1895. if (this.textEngine_) {
  1896. this.textEngine_.setDisplayer(textDisplayer);
  1897. }
  1898. }
  1899. /**
  1900. * @param {boolean} segmentRelativeVttTiming
  1901. */
  1902. setSegmentRelativeVttTiming(segmentRelativeVttTiming) {
  1903. this.segmentRelativeVttTiming_ = segmentRelativeVttTiming;
  1904. }
  1905. /**
  1906. * Apply platform-specific transformations to this segment to work around
  1907. * issues in the platform.
  1908. *
  1909. * @param {shaka.extern.Stream} stream
  1910. * @param {!BufferSource} segment
  1911. * @param {?shaka.media.SegmentReference} reference
  1912. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1913. * @return {!BufferSource}
  1914. * @private
  1915. */
  1916. workAroundBrokenPlatforms_(stream, segment, reference, contentType) {
  1917. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1918. const isMp4 = shaka.util.MimeUtils.getContainerType(
  1919. this.sourceBufferTypes_.get(contentType)) == 'mp4';
  1920. if (!isMp4) {
  1921. return segment;
  1922. }
  1923. const isInitSegment = reference === null;
  1924. const encryptionExpected = this.expectedEncryption_.get(contentType);
  1925. const keySystem = this.playerInterface_.getKeySystem();
  1926. let isEncrypted = false;
  1927. if (reference && reference.initSegmentReference) {
  1928. isEncrypted = reference.initSegmentReference.encrypted;
  1929. }
  1930. const uri = reference ? reference.getUris()[0] : null;
  1931. const device = shaka.device.DeviceFactory.getDevice();
  1932. if (this.config_.correctEc3Enca &&
  1933. isInitSegment &&
  1934. contentType === ContentType.AUDIO) {
  1935. segment = shaka.media.ContentWorkarounds.correctEnca(segment);
  1936. }
  1937. // If:
  1938. // 1. the configuration tells to insert fake encryption,
  1939. // 2. and this is an init segment or media segment,
  1940. // 3. and encryption is expected,
  1941. // 4. and the platform requires encryption in all init or media segments
  1942. // of current content type,
  1943. // then insert fake encryption metadata for init segments that lack it.
  1944. // The MP4 requirement is because we can currently only do this
  1945. // transformation on MP4 containers.
  1946. // See: https://github.com/shaka-project/shaka-player/issues/2759
  1947. if (this.config_.insertFakeEncryptionInInit && encryptionExpected &&
  1948. device.requiresEncryptionInfoInAllInitSegments(keySystem,
  1949. contentType)) {
  1950. if (isInitSegment) {
  1951. shaka.log.debug('Forcing fake encryption information in init segment.');
  1952. segment =
  1953. shaka.media.ContentWorkarounds.fakeEncryption(stream, segment, uri);
  1954. } else if (!isEncrypted && device.requiresTfhdFix(contentType)) {
  1955. shaka.log.debug(
  1956. 'Forcing fake encryption information in media segment.');
  1957. segment = shaka.media.ContentWorkarounds.fakeMediaEncryption(segment);
  1958. }
  1959. }
  1960. if (isInitSegment && device.requiresEC3InitSegments()) {
  1961. shaka.log.debug('Forcing fake EC-3 information in init segment.');
  1962. segment = shaka.media.ContentWorkarounds.fakeEC3(segment);
  1963. }
  1964. return segment;
  1965. }
  1966. /**
  1967. * Prepare the SourceBuffer to parse a potentially new type or codec.
  1968. *
  1969. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1970. * @param {string} mimeType
  1971. * @param {?shaka.extern.Transmuxer} transmuxer
  1972. * @private
  1973. */
  1974. change_(contentType, mimeType, transmuxer) {
  1975. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1976. if (contentType === ContentType.TEXT) {
  1977. shaka.log.debug(`Change not supported for ${contentType}`);
  1978. return;
  1979. }
  1980. const sourceBuffer = this.sourceBufferTypes_.get(contentType);
  1981. shaka.log.debug(
  1982. `Change Type: ${sourceBuffer} -> ${mimeType}`);
  1983. if (shaka.media.Capabilities.isChangeTypeSupported()) {
  1984. if (this.transmuxers_.has(contentType)) {
  1985. this.transmuxers_.get(contentType).destroy();
  1986. this.transmuxers_.delete(contentType);
  1987. }
  1988. if (transmuxer) {
  1989. this.transmuxers_.set(contentType, transmuxer);
  1990. }
  1991. const type = this.addExtraFeaturesToMimeType_(mimeType);
  1992. this.sourceBuffers_.get(contentType).changeType(type);
  1993. this.sourceBufferTypes_.set(contentType, mimeType);
  1994. } else {
  1995. shaka.log.debug('Change Type not supported');
  1996. }
  1997. // Fake an 'updateend' event to resolve the operation.
  1998. this.onUpdateEnd_(contentType);
  1999. }
  2000. /**
  2001. * Enqueue an operation to prepare the SourceBuffer to parse a potentially new
  2002. * type or codec.
  2003. *
  2004. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  2005. * @param {string} mimeType
  2006. * @param {?shaka.extern.Transmuxer} transmuxer
  2007. * @return {!Promise}
  2008. */
  2009. changeType(contentType, mimeType, transmuxer) {
  2010. return this.enqueueOperation_(
  2011. contentType,
  2012. () => this.change_(contentType, mimeType, transmuxer),
  2013. null);
  2014. }
  2015. /**
  2016. * Resets the MediaSource and re-adds source buffers due to codec mismatch
  2017. *
  2018. * @param {!Map<shaka.util.ManifestParserUtils.ContentType,
  2019. * shaka.extern.Stream>} streamsByType
  2020. * @private
  2021. */
  2022. async reset_(streamsByType) {
  2023. if (this.reloadingMediaSource_ || this.usingRemotePlayback_) {
  2024. return;
  2025. }
  2026. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  2027. this.reloadingMediaSource_ = true;
  2028. this.needSplitMuxedContent_ = false;
  2029. const currentTime = this.video_.currentTime;
  2030. // When codec switching if the user is currently paused we don't want
  2031. // to trigger a play when switching codec.
  2032. // Playing can also end up in a paused state after a codec switch
  2033. // so we need to remember the current states.
  2034. const previousAutoPlayState = this.video_.autoplay;
  2035. if (!this.video_.paused) {
  2036. this.playAfterReset_ = true;
  2037. }
  2038. if (this.playbackHasBegun_) {
  2039. // Only set autoplay to false if the video playback has already begun.
  2040. // When a codec switch happens before playback has begun this can cause
  2041. // autoplay not to work as expected.
  2042. this.video_.autoplay = false;
  2043. }
  2044. try {
  2045. this.eventManager_.removeAll();
  2046. for (const transmuxer of this.transmuxers_.values()) {
  2047. transmuxer.destroy();
  2048. }
  2049. for (const sourceBuffer of this.sourceBuffers_.values()) {
  2050. try {
  2051. this.mediaSource_.removeSourceBuffer(sourceBuffer);
  2052. } catch (e) {
  2053. shaka.log.debug('Exception on removeSourceBuffer', e);
  2054. }
  2055. }
  2056. this.transmuxers_.clear();
  2057. this.sourceBuffers_.clear();
  2058. const previousDuration = this.mediaSource_.duration;
  2059. this.mediaSourceOpen_ = new shaka.util.PublicPromise();
  2060. this.mediaSource_ = this.createMediaSource(this.mediaSourceOpen_);
  2061. await this.mediaSourceOpen_;
  2062. if (!isNaN(previousDuration) && previousDuration) {
  2063. this.mediaSource_.duration = previousDuration;
  2064. } else if (!isNaN(this.lastDuration_) && this.lastDuration_) {
  2065. this.mediaSource_.duration = this.lastDuration_;
  2066. }
  2067. const sourceBufferAdded = new shaka.util.PublicPromise();
  2068. const sourceBuffers =
  2069. /** @type {EventTarget} */(this.mediaSource_.sourceBuffers);
  2070. const totalOfBuffers = streamsByType.size;
  2071. let numberOfSourceBufferAdded = 0;
  2072. const onSourceBufferAdded = () => {
  2073. numberOfSourceBufferAdded++;
  2074. if (numberOfSourceBufferAdded === totalOfBuffers) {
  2075. sourceBufferAdded.resolve();
  2076. this.eventManager_.unlisten(sourceBuffers, 'addsourcebuffer',
  2077. onSourceBufferAdded);
  2078. }
  2079. };
  2080. this.eventManager_.listen(sourceBuffers, 'addsourcebuffer',
  2081. onSourceBufferAdded);
  2082. for (const contentType of streamsByType.keys()) {
  2083. const stream = streamsByType.get(contentType);
  2084. this.initSourceBuffer_(contentType, stream, stream.codecs);
  2085. }
  2086. const audio = streamsByType.get(ContentType.AUDIO);
  2087. if (audio && audio.isAudioMuxedInVideo) {
  2088. this.needSplitMuxedContent_ = true;
  2089. }
  2090. if (this.needSplitMuxedContent_ && !this.queues_.has(ContentType.AUDIO)) {
  2091. this.queues_.set(ContentType.AUDIO, []);
  2092. }
  2093. // Fake a seek to catchup the playhead.
  2094. this.video_.currentTime = currentTime;
  2095. await sourceBufferAdded;
  2096. } finally {
  2097. this.reloadingMediaSource_ = false;
  2098. this.destroyer_.ensureNotDestroyed();
  2099. this.eventManager_.listenOnce(this.video_, 'canplaythrough', () => {
  2100. // Don't use ensureNotDestroyed() from this event listener, because
  2101. // that results in an uncaught exception. Instead, just check the
  2102. // flag.
  2103. if (this.destroyer_.destroyed()) {
  2104. return;
  2105. }
  2106. this.video_.autoplay = previousAutoPlayState;
  2107. if (this.playAfterReset_) {
  2108. this.playAfterReset_ = false;
  2109. this.video_.play();
  2110. }
  2111. });
  2112. }
  2113. }
  2114. /**
  2115. * Resets the Media Source
  2116. * @param {!Map<shaka.util.ManifestParserUtils.ContentType,
  2117. * shaka.extern.Stream>} streamsByType
  2118. * @return {!Promise}
  2119. */
  2120. reset(streamsByType) {
  2121. return this.enqueueBlockingOperation_(
  2122. () => this.reset_(streamsByType));
  2123. }
  2124. /**
  2125. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  2126. * @param {string} mimeType
  2127. * @param {string} codecs
  2128. * @return {{transmuxer: ?shaka.extern.Transmuxer,
  2129. * transmuxerMuxed: boolean, basicType: string, codec: string,
  2130. * mimeType: string}}
  2131. * @private
  2132. */
  2133. getRealInfo_(contentType, mimeType, codecs) {
  2134. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  2135. const MimeUtils = shaka.util.MimeUtils;
  2136. /** @type {?shaka.extern.Transmuxer} */
  2137. let transmuxer;
  2138. let transmuxerMuxed = false;
  2139. const audioCodec = shaka.util.ManifestParserUtils.guessCodecsSafe(
  2140. ContentType.AUDIO, (codecs || '').split(','));
  2141. const videoCodec = shaka.util.ManifestParserUtils.guessCodecsSafe(
  2142. ContentType.VIDEO, (codecs || '').split(','));
  2143. let codec = videoCodec;
  2144. if (contentType == ContentType.AUDIO) {
  2145. codec = audioCodec;
  2146. }
  2147. if (!codec) {
  2148. codec = codecs;
  2149. }
  2150. let newMimeType = shaka.util.MimeUtils.getFullType(mimeType, codec);
  2151. let needTransmux = this.config_.forceTransmux;
  2152. if (!shaka.media.Capabilities.isTypeSupported(newMimeType) ||
  2153. (!this.sequenceMode_ &&
  2154. shaka.util.MimeUtils.RAW_FORMATS.includes(newMimeType))) {
  2155. needTransmux = true;
  2156. }
  2157. const TransmuxerEngine = shaka.transmuxer.TransmuxerEngine;
  2158. if (needTransmux) {
  2159. const newMimeTypeWithAllCodecs =
  2160. shaka.util.MimeUtils.getFullTypeWithAllCodecs(mimeType, codec);
  2161. const transmuxerPlugin =
  2162. TransmuxerEngine.findTransmuxer(newMimeTypeWithAllCodecs);
  2163. if (transmuxerPlugin) {
  2164. transmuxer = transmuxerPlugin();
  2165. if (audioCodec && videoCodec) {
  2166. transmuxerMuxed = true;
  2167. }
  2168. newMimeType =
  2169. transmuxer.convertCodecs(contentType, newMimeTypeWithAllCodecs);
  2170. }
  2171. }
  2172. const newCodec = MimeUtils.getNormalizedCodec(
  2173. MimeUtils.getCodecs(newMimeType));
  2174. const newBasicType = MimeUtils.getBasicType(newMimeType);
  2175. return {
  2176. transmuxer,
  2177. transmuxerMuxed,
  2178. basicType: newBasicType,
  2179. codec: newCodec,
  2180. mimeType: newMimeType,
  2181. };
  2182. }
  2183. /**
  2184. * Codec switch if necessary, this will not resolve until the codec
  2185. * switch is over.
  2186. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  2187. * @param {string} mimeType
  2188. * @param {string} codecs
  2189. * @param {!Map<shaka.util.ManifestParserUtils.ContentType,
  2190. * shaka.extern.Stream>} streamsByType
  2191. * @return {!Promise<boolean>} true if there was a codec switch,
  2192. * false otherwise.
  2193. * @private
  2194. */
  2195. async codecSwitchIfNecessary_(contentType, mimeType, codecs, streamsByType) {
  2196. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  2197. if (contentType == ContentType.TEXT) {
  2198. return false;
  2199. }
  2200. const MimeUtils = shaka.util.MimeUtils;
  2201. const currentCodec = MimeUtils.getNormalizedCodec(
  2202. MimeUtils.getCodecs(this.sourceBufferTypes_.get(contentType)));
  2203. const currentBasicType = MimeUtils.getBasicType(
  2204. this.sourceBufferTypes_.get(contentType));
  2205. const realInfo = this.getRealInfo_(contentType, mimeType, codecs);
  2206. const transmuxer = realInfo.transmuxer;
  2207. const transmuxerMuxed = realInfo.transmuxerMuxed;
  2208. const newBasicType = realInfo.basicType;
  2209. const newCodec = realInfo.codec;
  2210. const newMimeType = realInfo.mimeType;
  2211. let muxedContentCheck = true;
  2212. if (transmuxerMuxed &&
  2213. this.sourceBufferTypes_.has(ContentType.AUDIO)) {
  2214. const muxedRealInfo =
  2215. this.getRealInfo_(ContentType.AUDIO, mimeType, codecs);
  2216. const muxedCurrentCodec = MimeUtils.getNormalizedCodec(
  2217. MimeUtils.getCodecs(this.sourceBufferTypes_.get(ContentType.AUDIO)));
  2218. const muxedCurrentBasicType = MimeUtils.getBasicType(
  2219. this.sourceBufferTypes_.get(ContentType.AUDIO));
  2220. muxedContentCheck = muxedCurrentCodec == muxedRealInfo.codec &&
  2221. muxedCurrentBasicType == muxedRealInfo.basicType;
  2222. if (muxedRealInfo.transmuxer) {
  2223. muxedRealInfo.transmuxer.destroy();
  2224. }
  2225. }
  2226. // Current/new codecs base and basic type match then no need to switch
  2227. if (currentCodec === newCodec && currentBasicType === newBasicType &&
  2228. muxedContentCheck) {
  2229. if (this.transmuxers_.has(contentType) && !transmuxer) {
  2230. this.transmuxers_.get(contentType).destroy();
  2231. this.transmuxers_.delete(contentType);
  2232. } else if (!this.transmuxers_.has(contentType) && transmuxer) {
  2233. this.transmuxers_.set(contentType, transmuxer);
  2234. } else if (transmuxer) {
  2235. // Compare if the transmuxer is different
  2236. if (this.transmuxers_.has(contentType) &&
  2237. this.transmuxers_.get(contentType).transmux !=
  2238. transmuxer.transmux) {
  2239. this.transmuxers_.get(contentType).destroy();
  2240. this.transmuxers_.delete(contentType);
  2241. this.transmuxers_.set(contentType, transmuxer);
  2242. } else {
  2243. transmuxer.destroy();
  2244. }
  2245. }
  2246. return false;
  2247. }
  2248. let allowChangeType = true;
  2249. if (this.needSplitMuxedContent_ || (transmuxerMuxed &&
  2250. transmuxer && !this.transmuxers_.has(contentType))) {
  2251. allowChangeType = false;
  2252. }
  2253. if (allowChangeType && this.config_.codecSwitchingStrategy ===
  2254. shaka.config.CodecSwitchingStrategy.SMOOTH &&
  2255. shaka.media.Capabilities.isChangeTypeSupported()) {
  2256. await this.changeType(contentType, newMimeType, transmuxer);
  2257. } else {
  2258. if (transmuxer) {
  2259. transmuxer.destroy();
  2260. }
  2261. await this.reset(streamsByType);
  2262. }
  2263. return true;
  2264. }
  2265. /**
  2266. * Returns true if it's necessary codec switch to load the new stream.
  2267. *
  2268. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  2269. * @param {string} refMimeType
  2270. * @param {string} refCodecs
  2271. * @return {boolean}
  2272. * @private
  2273. */
  2274. isCodecSwitchNecessary_(contentType, refMimeType, refCodecs) {
  2275. if (contentType == shaka.util.ManifestParserUtils.ContentType.TEXT) {
  2276. return false;
  2277. }
  2278. const MimeUtils = shaka.util.MimeUtils;
  2279. const currentCodec = MimeUtils.getNormalizedCodec(
  2280. MimeUtils.getCodecs(this.sourceBufferTypes_.get(contentType)));
  2281. const currentBasicType = MimeUtils.getBasicType(
  2282. this.sourceBufferTypes_.get(contentType));
  2283. let newMimeType = shaka.util.MimeUtils.getFullType(refMimeType, refCodecs);
  2284. let needTransmux = this.config_.forceTransmux;
  2285. if (!shaka.media.Capabilities.isTypeSupported(newMimeType) ||
  2286. (!this.sequenceMode_ &&
  2287. shaka.util.MimeUtils.RAW_FORMATS.includes(newMimeType))) {
  2288. needTransmux = true;
  2289. }
  2290. const newMimeTypeWithAllCodecs =
  2291. shaka.util.MimeUtils.getFullTypeWithAllCodecs(
  2292. refMimeType, refCodecs);
  2293. const TransmuxerEngine = shaka.transmuxer.TransmuxerEngine;
  2294. if (needTransmux) {
  2295. const transmuxerPlugin =
  2296. TransmuxerEngine.findTransmuxer(newMimeTypeWithAllCodecs);
  2297. if (transmuxerPlugin) {
  2298. const transmuxer = transmuxerPlugin();
  2299. newMimeType =
  2300. transmuxer.convertCodecs(contentType, newMimeTypeWithAllCodecs);
  2301. transmuxer.destroy();
  2302. }
  2303. }
  2304. const newCodec = MimeUtils.getNormalizedCodec(
  2305. MimeUtils.getCodecs(newMimeType));
  2306. const newBasicType = MimeUtils.getBasicType(newMimeType);
  2307. return currentCodec !== newCodec || currentBasicType !== newBasicType;
  2308. }
  2309. /**
  2310. * Returns true if it's necessary reset the media source to load the
  2311. * new stream.
  2312. *
  2313. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  2314. * @param {string} mimeType
  2315. * @param {string} codecs
  2316. * @return {boolean}
  2317. */
  2318. isResetMediaSourceNecessary(contentType, mimeType, codecs) {
  2319. if (!this.isCodecSwitchNecessary_(contentType, mimeType, codecs)) {
  2320. return false;
  2321. }
  2322. return this.config_.codecSwitchingStrategy !==
  2323. shaka.config.CodecSwitchingStrategy.SMOOTH ||
  2324. !shaka.media.Capabilities.isChangeTypeSupported() ||
  2325. this.needSplitMuxedContent_;
  2326. }
  2327. /**
  2328. * Update LCEVC Decoder object when ready for LCEVC Decode.
  2329. * @param {?shaka.lcevc.Dec} lcevcDec
  2330. */
  2331. updateLcevcDec(lcevcDec) {
  2332. this.lcevcDec_ = lcevcDec;
  2333. }
  2334. /**
  2335. * @param {string} mimeType
  2336. * @return {string}
  2337. * @private
  2338. */
  2339. addExtraFeaturesToMimeType_(mimeType) {
  2340. const extraFeatures = this.config_.addExtraFeaturesToSourceBuffer(mimeType);
  2341. const extendedType = mimeType + extraFeatures;
  2342. shaka.log.debug('Using full mime type', extendedType);
  2343. return extendedType;
  2344. }
  2345. };
  2346. /**
  2347. * Internal reference to window.URL.createObjectURL function to avoid
  2348. * compatibility issues with other libraries and frameworks such as React
  2349. * Native. For use in unit tests only, not meant for external use.
  2350. *
  2351. * @type {function(?):string}
  2352. */
  2353. shaka.media.MediaSourceEngine.createObjectURL = window.URL.createObjectURL;
  2354. /**
  2355. * @typedef {{
  2356. * start: function(),
  2357. * p: !shaka.util.PublicPromise,
  2358. * uri: ?string
  2359. * }}
  2360. *
  2361. * @summary An operation in queue.
  2362. * @property {function()} start
  2363. * The function which starts the operation.
  2364. * @property {!shaka.util.PublicPromise} p
  2365. * The PublicPromise which is associated with this operation.
  2366. * @property {?string} uri
  2367. * A segment URI (if any) associated with this operation.
  2368. */
  2369. shaka.media.MediaSourceEngine.Operation;
  2370. /**
  2371. * @enum {string}
  2372. * @private
  2373. */
  2374. shaka.media.MediaSourceEngine.SourceBufferMode_ = {
  2375. SEQUENCE: 'sequence',
  2376. SEGMENTS: 'segments',
  2377. };
  2378. /**
  2379. * @typedef {{
  2380. * getKeySystem: function():?string,
  2381. * onMetadata: function(!Array<shaka.extern.ID3Metadata>, number, ?number),
  2382. * onEmsg: function(!shaka.extern.EmsgInfo),
  2383. * onEvent: function(!Event),
  2384. * onManifestUpdate: function()
  2385. * }}
  2386. *
  2387. * @summary Player interface
  2388. * @property {function():?string} getKeySystem
  2389. * Gets currently used key system or null if not used.
  2390. * @property {function(
  2391. * !Array<shaka.extern.ID3Metadata>, number, ?number)} onMetadata
  2392. * Callback to use when metadata arrives.
  2393. * @property {function(!shaka.extern.EmsgInfo)} onEmsg
  2394. * Callback to use when EMSG arrives.
  2395. * @property {function(!Event)} onEvent
  2396. * Called when an event occurs that should be sent to the app.
  2397. * @property {function()} onManifestUpdate
  2398. * Called when an embedded 'emsg' box should trigger a manifest update.
  2399. */
  2400. shaka.media.MediaSourceEngine.PlayerInterface;