Source: lib/media/media_source_engine.js

  1. /*! @license
  2. * Shaka Player
  3. * Copyright 2016 Google LLC
  4. * SPDX-License-Identifier: Apache-2.0
  5. */
  6. goog.provide('shaka.media.MediaSourceEngine');
  7. goog.require('goog.asserts');
  8. goog.require('shaka.log');
  9. goog.require('shaka.config.CodecSwitchingStrategy');
  10. goog.require('shaka.device.DeviceFactory');
  11. goog.require('shaka.device.IDevice');
  12. goog.require('shaka.media.Capabilities');
  13. goog.require('shaka.media.ContentWorkarounds');
  14. goog.require('shaka.media.ClosedCaptionParser');
  15. goog.require('shaka.media.IClosedCaptionParser');
  16. goog.require('shaka.media.ManifestParser');
  17. goog.require('shaka.media.SegmentReference');
  18. goog.require('shaka.media.TimeRangesUtils');
  19. goog.require('shaka.text.TextEngine');
  20. goog.require('shaka.transmuxer.TransmuxerEngine');
  21. goog.require('shaka.util.BufferUtils');
  22. goog.require('shaka.util.Destroyer');
  23. goog.require('shaka.util.Dom');
  24. goog.require('shaka.util.Error');
  25. goog.require('shaka.util.EventManager');
  26. goog.require('shaka.util.FakeEvent');
  27. goog.require('shaka.util.Functional');
  28. goog.require('shaka.util.IDestroyable');
  29. goog.require('shaka.util.Id3Utils');
  30. goog.require('shaka.util.ManifestParserUtils');
  31. goog.require('shaka.util.MimeUtils');
  32. goog.require('shaka.util.Mp4BoxParsers');
  33. goog.require('shaka.util.Mp4Parser');
  34. goog.require('shaka.util.PublicPromise');
  35. goog.require('shaka.util.StreamUtils');
  36. goog.require('shaka.util.TimeUtils');
  37. goog.require('shaka.util.TsParser');
  38. goog.require('shaka.lcevc.Dec');
  39. /**
  40. * @summary
  41. * MediaSourceEngine wraps all operations on MediaSource and SourceBuffers.
  42. * All asynchronous operations return a Promise, and all operations are
  43. * internally synchronized and serialized as needed. Operations that can
  44. * be done in parallel will be done in parallel.
  45. *
  46. * @implements {shaka.util.IDestroyable}
  47. */
  48. shaka.media.MediaSourceEngine = class {
  49. /**
  50. * @param {HTMLMediaElement} video The video element, whose source is tied to
  51. * MediaSource during the lifetime of the MediaSourceEngine.
  52. * @param {!shaka.extern.TextDisplayer} textDisplayer
  53. * The text displayer that will be used with the text engine.
  54. * MediaSourceEngine takes ownership of the displayer. When
  55. * MediaSourceEngine is destroyed, it will destroy the displayer.
  56. * @param {!shaka.media.MediaSourceEngine.PlayerInterface} playerInterface
  57. * Interface for common player methods.
  58. * @param {shaka.extern.MediaSourceConfiguration} config
  59. * @param {?shaka.lcevc.Dec} [lcevcDec] Optional - LCEVC Decoder Object
  60. */
  61. constructor(video, textDisplayer, playerInterface, config, lcevcDec) {
  62. /** @private {HTMLMediaElement} */
  63. this.video_ = video;
  64. /** @private {?shaka.media.MediaSourceEngine.PlayerInterface} */
  65. this.playerInterface_ = playerInterface;
  66. /** @private {?shaka.extern.MediaSourceConfiguration} */
  67. this.config_ = config;
  68. /** @private {shaka.extern.TextDisplayer} */
  69. this.textDisplayer_ = textDisplayer;
  70. /**
  71. * @private {!Map<shaka.util.ManifestParserUtils.ContentType, SourceBuffer>}
  72. */
  73. this.sourceBuffers_ = new Map();
  74. /**
  75. * @private {!Map<shaka.util.ManifestParserUtils.ContentType, string>}
  76. */
  77. this.sourceBufferTypes_ = new Map();
  78. /**
  79. * @private {!Map<shaka.util.ManifestParserUtils.ContentType,
  80. * boolean>}
  81. */
  82. this.expectedEncryption_ = new Map();
  83. /** @private {shaka.text.TextEngine} */
  84. this.textEngine_ = null;
  85. /** @private {boolean} */
  86. this.segmentRelativeVttTiming_ = false;
  87. /** @private {?shaka.lcevc.Dec} */
  88. this.lcevcDec_ = lcevcDec || null;
  89. /**
  90. * @private {!Map<string, !Array<shaka.media.MediaSourceEngine.Operation>>}
  91. */
  92. this.queues_ = new Map();
  93. /** @private {shaka.util.EventManager} */
  94. this.eventManager_ = new shaka.util.EventManager();
  95. /**
  96. * @private {!Map<shaka.util.ManifestParserUtils.ContentType,
  97. !shaka.extern.Transmuxer>} */
  98. this.transmuxers_ = new Map();
  99. /** @private {?shaka.media.IClosedCaptionParser} */
  100. this.captionParser_ = null;
  101. /** @private {!shaka.util.PublicPromise} */
  102. this.mediaSourceOpen_ = new shaka.util.PublicPromise();
  103. /** @private {string} */
  104. this.url_ = '';
  105. /** @private {boolean} */
  106. this.playbackHasBegun_ = false;
  107. /** @private {boolean} */
  108. this.streamingAllowed_ = true;
  109. /** @private {boolean} */
  110. this.usingRemotePlayback_ = false;
  111. /** @private {HTMLSourceElement} */
  112. this.source_ = null;
  113. /**
  114. * Fallback source element with direct media URI, used for casting
  115. * purposes.
  116. * @private {HTMLSourceElement}
  117. */
  118. this.secondarySource_ = null;
  119. /** @private {MediaSource} */
  120. this.mediaSource_ = this.createMediaSource(this.mediaSourceOpen_);
  121. /** @private {boolean} */
  122. this.reloadingMediaSource_ = false;
  123. /** @private {boolean} */
  124. this.playAfterReset_ = false;
  125. /** @type {!shaka.util.Destroyer} */
  126. this.destroyer_ = new shaka.util.Destroyer(() => this.doDestroy_());
  127. /** @private {boolean} */
  128. this.sequenceMode_ = false;
  129. /** @private {string} */
  130. this.manifestType_ = shaka.media.ManifestParser.UNKNOWN;
  131. /** @private {boolean} */
  132. this.ignoreManifestTimestampsInSegmentsMode_ = false;
  133. /** @private {boolean} */
  134. this.attemptTimestampOffsetCalculation_ = false;
  135. /** @private {!shaka.util.PublicPromise<number>} */
  136. this.textSequenceModeOffset_ = new shaka.util.PublicPromise();
  137. /** @private {boolean} */
  138. this.needSplitMuxedContent_ = false;
  139. /** @private {?number} */
  140. this.lastDuration_ = null;
  141. /**
  142. * @private {!Map<shaka.util.ManifestParserUtils.ContentType,
  143. * !shaka.util.TsParser>}
  144. */
  145. this.tsParsers_ = new Map();
  146. /** @private {?number} */
  147. this.firstVideoTimestamp_ = null;
  148. /** @private {?number} */
  149. this.firstVideoReferenceStartTime_ = null;
  150. /** @private {?number} */
  151. this.firstAudioTimestamp_ = null;
  152. /** @private {?number} */
  153. this.firstAudioReferenceStartTime_ = null;
  154. /** @private {!shaka.util.PublicPromise<number>} */
  155. this.audioCompensation_ = new shaka.util.PublicPromise();
  156. if (this.video_.remote) {
  157. this.usingRemotePlayback_ = this.video_.remote.state != 'disconnected';
  158. this.eventManager_.listen(this.video_.remote, 'connect', () => {
  159. this.usingRemotePlayback_ = this.video_.remote.state != 'disconnected';
  160. });
  161. this.eventManager_.listen(this.video_.remote, 'connecting', () => {
  162. this.usingRemotePlayback_ = this.video_.remote.state != 'disconnected';
  163. });
  164. this.eventManager_.listen(this.video_.remote, 'disconnect', () => {
  165. this.usingRemotePlayback_ = this.video_.remote.state != 'disconnected';
  166. });
  167. }
  168. }
  169. /**
  170. * Create a MediaSource object, attach it to the video element, and return it.
  171. * Resolves the given promise when the MediaSource is ready.
  172. *
  173. * Replaced by unit tests.
  174. *
  175. * @param {!shaka.util.PublicPromise} p
  176. * @return {!MediaSource}
  177. */
  178. createMediaSource(p) {
  179. this.streamingAllowed_ = true;
  180. /** @type {!MediaSource} */
  181. let mediaSource;
  182. if (window.ManagedMediaSource) {
  183. if (!this.secondarySource_) {
  184. this.video_.disableRemotePlayback = true;
  185. }
  186. mediaSource = new ManagedMediaSource();
  187. this.eventManager_.listen(
  188. mediaSource, 'startstreaming', () => {
  189. shaka.log.info('MMS startstreaming');
  190. this.streamingAllowed_ = true;
  191. });
  192. this.eventManager_.listen(
  193. mediaSource, 'endstreaming', () => {
  194. shaka.log.info('MMS endstreaming');
  195. this.streamingAllowed_ = false;
  196. });
  197. } else {
  198. mediaSource = new MediaSource();
  199. }
  200. // Set up MediaSource on the video element.
  201. this.eventManager_.listenOnce(
  202. mediaSource, 'sourceopen', () => this.onSourceOpen_(p));
  203. // Correctly set when playback has begun.
  204. this.eventManager_.listenOnce(this.video_, 'playing', () => {
  205. this.playbackHasBegun_ = true;
  206. });
  207. // Store the object URL for releasing it later.
  208. this.url_ = shaka.media.MediaSourceEngine.createObjectURL(mediaSource);
  209. if (this.config_.useSourceElements) {
  210. this.video_.removeAttribute('src');
  211. if (this.source_) {
  212. this.video_.removeChild(this.source_);
  213. }
  214. if (this.secondarySource_) {
  215. this.video_.removeChild(this.secondarySource_);
  216. }
  217. this.source_ = shaka.util.Dom.createSourceElement(this.url_);
  218. this.video_.appendChild(this.source_);
  219. if (this.secondarySource_) {
  220. this.video_.appendChild(this.secondarySource_);
  221. }
  222. this.video_.load();
  223. } else {
  224. this.video_.src = this.url_;
  225. }
  226. return mediaSource;
  227. }
  228. /**
  229. * @param {string} uri
  230. * @param {string} mimeType
  231. */
  232. addSecondarySource(uri, mimeType) {
  233. if (!this.video_ || !window.ManagedMediaSource || !this.mediaSource_) {
  234. shaka.log.warning(
  235. 'Secondary source is used only with ManagedMediaSource');
  236. return;
  237. }
  238. if (!this.config_.useSourceElements) {
  239. return;
  240. }
  241. if (this.secondarySource_) {
  242. this.video_.removeChild(this.secondarySource_);
  243. }
  244. this.secondarySource_ = shaka.util.Dom.createSourceElement(uri, mimeType);
  245. this.video_.appendChild(this.secondarySource_);
  246. this.video_.disableRemotePlayback = false;
  247. }
  248. /**
  249. * @param {shaka.util.PublicPromise} p
  250. * @private
  251. */
  252. onSourceOpen_(p) {
  253. goog.asserts.assert(this.url_, 'Must have object URL');
  254. // Release the object URL that was previously created, to prevent memory
  255. // leak.
  256. // createObjectURL creates a strong reference to the MediaSource object
  257. // inside the browser. Setting the src of the video then creates another
  258. // reference within the video element. revokeObjectURL will remove the
  259. // strong reference to the MediaSource object, and allow it to be
  260. // garbage-collected later.
  261. URL.revokeObjectURL(this.url_);
  262. p.resolve();
  263. }
  264. /**
  265. * Returns a map of MediaSource support for well-known types.
  266. *
  267. * @return {!Object<string, boolean>}
  268. */
  269. static probeSupport() {
  270. const testMimeTypes = [
  271. // MP4 types
  272. 'video/mp4; codecs="avc1.42E01E"',
  273. 'video/mp4; codecs="avc3.42E01E"',
  274. 'video/mp4; codecs="hev1.1.6.L93.90"',
  275. 'video/mp4; codecs="hvc1.1.6.L93.90"',
  276. 'video/mp4; codecs="hev1.2.4.L153.B0"; eotf="smpte2084"', // HDR HEVC
  277. 'video/mp4; codecs="hvc1.2.4.L153.B0"; eotf="smpte2084"', // HDR HEVC
  278. 'video/mp4; codecs="vp9"',
  279. 'video/mp4; codecs="vp09.00.10.08"',
  280. 'video/mp4; codecs="av01.0.01M.08"',
  281. 'video/mp4; codecs="dvh1.05.01"',
  282. 'video/mp4; codecs="dvh1.20.01"',
  283. 'audio/mp4; codecs="mp4a.40.2"',
  284. 'audio/mp4; codecs="ac-3"',
  285. 'audio/mp4; codecs="ec-3"',
  286. 'audio/mp4; codecs="ac-4.02.01.01"',
  287. 'audio/mp4; codecs="opus"',
  288. 'audio/mp4; codecs="flac"',
  289. 'audio/mp4; codecs="dtsc"', // DTS Digital Surround
  290. 'audio/mp4; codecs="dtse"', // DTS Express
  291. 'audio/mp4; codecs="dtsx"', // DTS:X
  292. 'audio/mp4; codecs="apac.31.00"',
  293. // WebM types
  294. 'video/webm; codecs="vp8"',
  295. 'video/webm; codecs="vp9"',
  296. 'video/webm; codecs="vp09.00.10.08"',
  297. 'audio/webm; codecs="vorbis"',
  298. 'audio/webm; codecs="opus"',
  299. // MPEG2 TS types (video/ is also used for audio: https://bit.ly/TsMse)
  300. 'video/mp2t; codecs="avc1.42E01E"',
  301. 'video/mp2t; codecs="avc3.42E01E"',
  302. 'video/mp2t; codecs="hvc1.1.6.L93.90"',
  303. 'video/mp2t; codecs="mp4a.40.2"',
  304. 'video/mp2t; codecs="ac-3"',
  305. 'video/mp2t; codecs="ec-3"',
  306. // WebVTT types
  307. 'text/vtt',
  308. 'application/mp4; codecs="wvtt"',
  309. // TTML types
  310. 'application/ttml+xml',
  311. 'application/mp4; codecs="stpp"',
  312. // Containerless types
  313. ...shaka.util.MimeUtils.RAW_FORMATS,
  314. ];
  315. const support = {};
  316. const device = shaka.device.DeviceFactory.getDevice();
  317. for (const type of testMimeTypes) {
  318. if (shaka.text.TextEngine.isTypeSupported(type)) {
  319. support[type] = true;
  320. } else if (device.supportsMediaSource()) {
  321. support[type] = shaka.media.Capabilities.isTypeSupported(type) ||
  322. shaka.transmuxer.TransmuxerEngine.isSupported(type);
  323. } else {
  324. support[type] = device.supportsMediaType(type);
  325. }
  326. const basicType = type.split(';')[0];
  327. support[basicType] = support[basicType] || support[type];
  328. }
  329. return support;
  330. }
  331. /** @override */
  332. destroy() {
  333. return this.destroyer_.destroy();
  334. }
  335. /** @private */
  336. async doDestroy_() {
  337. const Functional = shaka.util.Functional;
  338. const cleanup = [];
  339. for (const [key, q] of this.queues_) {
  340. // Make a local copy of the queue and the first item.
  341. const inProgress = q[0];
  342. const contentType = /** @type {string} */(key);
  343. // Drop everything else out of the original queue.
  344. this.queues_.set(contentType, q.slice(0, 1));
  345. // We will wait for this item to complete/fail.
  346. if (inProgress) {
  347. cleanup.push(inProgress.p.catch(Functional.noop));
  348. }
  349. // The rest will be rejected silently if possible.
  350. for (const item of q.slice(1)) {
  351. item.p.reject(shaka.util.Destroyer.destroyedError());
  352. }
  353. }
  354. if (this.textEngine_) {
  355. cleanup.push(this.textEngine_.destroy());
  356. }
  357. await Promise.all(cleanup);
  358. for (const transmuxer of this.transmuxers_.values()) {
  359. transmuxer.destroy();
  360. }
  361. if (this.eventManager_) {
  362. this.eventManager_.release();
  363. this.eventManager_ = null;
  364. }
  365. if (this.video_ && this.secondarySource_) {
  366. this.video_.removeChild(this.secondarySource_);
  367. }
  368. if (this.video_ && this.source_) {
  369. // "unload" the video element.
  370. this.video_.removeChild(this.source_);
  371. this.video_.load();
  372. this.video_.disableRemotePlayback = false;
  373. }
  374. this.video_ = null;
  375. this.source_ = null;
  376. this.secondarySource_ = null;
  377. this.config_ = null;
  378. this.mediaSource_ = null;
  379. this.textEngine_ = null;
  380. this.textDisplayer_ = null;
  381. this.sourceBuffers_.clear();
  382. this.expectedEncryption_.clear();
  383. this.transmuxers_.clear();
  384. this.captionParser_ = null;
  385. if (goog.DEBUG) {
  386. for (const [contentType, q] of this.queues_) {
  387. goog.asserts.assert(
  388. q.length == 0,
  389. contentType + ' queue should be empty after destroy!');
  390. }
  391. }
  392. this.queues_.clear();
  393. // This object is owned by Player
  394. this.lcevcDec_ = null;
  395. this.tsParsers_.clear();
  396. this.playerInterface_ = null;
  397. }
  398. /**
  399. * @return {!Promise} Resolved when MediaSource is open and attached to the
  400. * media element. This process is actually initiated by the constructor.
  401. */
  402. open() {
  403. return this.mediaSourceOpen_;
  404. }
  405. /**
  406. * Initialize MediaSourceEngine.
  407. *
  408. * Note that it is not valid to call this multiple times, except to add or
  409. * reinitialize text streams.
  410. *
  411. * @param {!Map<shaka.util.ManifestParserUtils.ContentType,
  412. * shaka.extern.Stream>} streamsByType
  413. * A map of content types to streams.
  414. * @param {boolean=} sequenceMode
  415. * If true, the media segments are appended to the SourceBuffer in strict
  416. * sequence.
  417. * @param {string=} manifestType
  418. * Indicates the type of the manifest.
  419. * @param {boolean=} ignoreManifestTimestampsInSegmentsMode
  420. * If true, don't adjust the timestamp offset to account for manifest
  421. * segment durations being out of sync with segment durations. In other
  422. * words, assume that there are no gaps in the segments when appending
  423. * to the SourceBuffer, even if the manifest and segment times disagree.
  424. * Indicates if the manifest has text streams.
  425. *
  426. * @return {!Promise}
  427. */
  428. async init(streamsByType, sequenceMode=false,
  429. manifestType=shaka.media.ManifestParser.UNKNOWN,
  430. ignoreManifestTimestampsInSegmentsMode=false) {
  431. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  432. await this.mediaSourceOpen_;
  433. if (this.ended() || this.closed()) {
  434. shaka.log.alwaysError('Expected MediaSource to be open during init(); ' +
  435. 'reopening the media source.');
  436. this.mediaSourceOpen_ = new shaka.util.PublicPromise();
  437. this.mediaSource_ = this.createMediaSource(this.mediaSourceOpen_);
  438. await this.mediaSourceOpen_;
  439. }
  440. this.sequenceMode_ = sequenceMode;
  441. this.manifestType_ = manifestType;
  442. this.ignoreManifestTimestampsInSegmentsMode_ =
  443. ignoreManifestTimestampsInSegmentsMode;
  444. this.attemptTimestampOffsetCalculation_ = !this.sequenceMode_ &&
  445. this.manifestType_ == shaka.media.ManifestParser.HLS &&
  446. !this.ignoreManifestTimestampsInSegmentsMode_;
  447. this.tsParsers_.clear();
  448. this.firstVideoTimestamp_ = null;
  449. this.firstVideoReferenceStartTime_ = null;
  450. this.firstAudioTimestamp_ = null;
  451. this.firstAudioReferenceStartTime_ = null;
  452. this.audioCompensation_ = new shaka.util.PublicPromise();
  453. for (const contentType of streamsByType.keys()) {
  454. const stream = streamsByType.get(contentType);
  455. this.initSourceBuffer_(contentType, stream, stream.codecs);
  456. if (this.needSplitMuxedContent_) {
  457. this.queues_.set(ContentType.AUDIO, []);
  458. this.queues_.set(ContentType.VIDEO, []);
  459. } else {
  460. this.queues_.set(contentType, []);
  461. }
  462. }
  463. const audio = streamsByType.get(ContentType.AUDIO);
  464. if (audio && audio.isAudioMuxedInVideo) {
  465. this.needSplitMuxedContent_ = true;
  466. }
  467. }
  468. /**
  469. * Initialize a specific SourceBuffer.
  470. *
  471. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  472. * @param {shaka.extern.Stream} stream
  473. * @param {string} codecs
  474. * @private
  475. */
  476. initSourceBuffer_(contentType, stream, codecs) {
  477. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  478. if (contentType == ContentType.AUDIO && codecs) {
  479. codecs = shaka.util.StreamUtils.getCorrectAudioCodecs(
  480. codecs, stream.mimeType);
  481. }
  482. let mimeType = shaka.util.MimeUtils.getFullType(
  483. stream.mimeType, codecs);
  484. if (contentType == ContentType.TEXT) {
  485. this.reinitText(mimeType, this.sequenceMode_, stream.external);
  486. } else {
  487. let needTransmux = this.config_.forceTransmux;
  488. if (!shaka.media.Capabilities.isTypeSupported(mimeType) ||
  489. (!this.sequenceMode_ &&
  490. shaka.util.MimeUtils.RAW_FORMATS.includes(mimeType))) {
  491. needTransmux = true;
  492. }
  493. const mimeTypeWithAllCodecs =
  494. shaka.util.MimeUtils.getFullTypeWithAllCodecs(
  495. stream.mimeType, codecs);
  496. if (needTransmux) {
  497. const audioCodec = shaka.util.ManifestParserUtils.guessCodecsSafe(
  498. ContentType.AUDIO, (codecs || '').split(','));
  499. const videoCodec = shaka.util.ManifestParserUtils.guessCodecsSafe(
  500. ContentType.VIDEO, (codecs || '').split(','));
  501. if (audioCodec && videoCodec) {
  502. this.needSplitMuxedContent_ = true;
  503. this.initSourceBuffer_(ContentType.AUDIO, stream, audioCodec);
  504. this.initSourceBuffer_(ContentType.VIDEO, stream, videoCodec);
  505. return;
  506. }
  507. const transmuxerPlugin = shaka.transmuxer.TransmuxerEngine
  508. .findTransmuxer(mimeTypeWithAllCodecs);
  509. if (transmuxerPlugin) {
  510. const transmuxer = transmuxerPlugin();
  511. this.transmuxers_.set(contentType, transmuxer);
  512. mimeType =
  513. transmuxer.convertCodecs(contentType, mimeTypeWithAllCodecs);
  514. }
  515. }
  516. const type = this.addExtraFeaturesToMimeType_(mimeType);
  517. this.destroyer_.ensureNotDestroyed();
  518. let sourceBuffer;
  519. try {
  520. sourceBuffer = this.mediaSource_.addSourceBuffer(type);
  521. } catch (exception) {
  522. throw new shaka.util.Error(
  523. shaka.util.Error.Severity.CRITICAL,
  524. shaka.util.Error.Category.MEDIA,
  525. shaka.util.Error.Code.MEDIA_SOURCE_OPERATION_THREW,
  526. exception,
  527. 'The mediaSource_ status was ' + this.mediaSource_.readyState +
  528. ' expected \'open\'',
  529. null);
  530. }
  531. if (this.sequenceMode_) {
  532. sourceBuffer.mode =
  533. shaka.media.MediaSourceEngine.SourceBufferMode_.SEQUENCE;
  534. }
  535. this.eventManager_.listen(
  536. sourceBuffer, 'error',
  537. () => this.onError_(contentType));
  538. this.eventManager_.listen(
  539. sourceBuffer, 'updateend',
  540. () => this.onUpdateEnd_(contentType));
  541. this.sourceBuffers_.set(contentType, sourceBuffer);
  542. this.sourceBufferTypes_.set(contentType, mimeType);
  543. this.expectedEncryption_.set(contentType, !!stream.drmInfos.length);
  544. }
  545. }
  546. /**
  547. * Called by the Player to provide an updated configuration any time it
  548. * changes. Must be called at least once before init().
  549. *
  550. * @param {shaka.extern.MediaSourceConfiguration} config
  551. */
  552. configure(config) {
  553. this.config_ = config;
  554. if (this.textEngine_) {
  555. this.textEngine_.setModifyCueCallback(config.modifyCueCallback);
  556. }
  557. }
  558. /**
  559. * Indicate if the streaming is allowed by MediaSourceEngine.
  560. * If we using MediaSource we always returns true.
  561. *
  562. * @return {boolean}
  563. */
  564. isStreamingAllowed() {
  565. return this.streamingAllowed_ && !this.usingRemotePlayback_ &&
  566. !this.reloadingMediaSource_;
  567. }
  568. /**
  569. * Reinitialize the TextEngine for a new text type.
  570. * @param {string} mimeType
  571. * @param {boolean} sequenceMode
  572. * @param {boolean} external
  573. */
  574. reinitText(mimeType, sequenceMode, external) {
  575. if (!this.textEngine_) {
  576. this.textEngine_ = new shaka.text.TextEngine(this.textDisplayer_);
  577. if (this.textEngine_) {
  578. this.textEngine_.setModifyCueCallback(this.config_.modifyCueCallback);
  579. }
  580. }
  581. this.textEngine_.initParser(mimeType, sequenceMode,
  582. external || this.segmentRelativeVttTiming_, this.manifestType_);
  583. }
  584. /**
  585. * @return {boolean} True if the MediaSource is in an "ended" state, or if the
  586. * object has been destroyed.
  587. */
  588. ended() {
  589. if (this.reloadingMediaSource_) {
  590. return false;
  591. }
  592. return this.mediaSource_ ? this.mediaSource_.readyState == 'ended' : true;
  593. }
  594. /**
  595. * @return {boolean} True if the MediaSource is in an "closed" state, or if
  596. * the object has been destroyed.
  597. */
  598. closed() {
  599. if (this.reloadingMediaSource_) {
  600. return false;
  601. }
  602. return this.mediaSource_ ? this.mediaSource_.readyState == 'closed' : true;
  603. }
  604. /**
  605. * Gets the first timestamp in buffer for the given content type.
  606. *
  607. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  608. * @return {?number} The timestamp in seconds, or null if nothing is buffered.
  609. */
  610. bufferStart(contentType) {
  611. if (!this.sourceBuffers_.size) {
  612. return null;
  613. }
  614. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  615. if (contentType == ContentType.TEXT) {
  616. return this.textEngine_.bufferStart();
  617. }
  618. return shaka.media.TimeRangesUtils.bufferStart(
  619. this.getBuffered_(contentType));
  620. }
  621. /**
  622. * Gets the last timestamp in buffer for the given content type.
  623. *
  624. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  625. * @return {?number} The timestamp in seconds, or null if nothing is buffered.
  626. */
  627. bufferEnd(contentType) {
  628. if (!this.sourceBuffers_.size) {
  629. return null;
  630. }
  631. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  632. if (contentType == ContentType.TEXT) {
  633. return this.textEngine_.bufferEnd();
  634. }
  635. return shaka.media.TimeRangesUtils.bufferEnd(
  636. this.getBuffered_(contentType));
  637. }
  638. /**
  639. * Determines if the given time is inside the buffered range of the given
  640. * content type.
  641. *
  642. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  643. * @param {number} time Playhead time
  644. * @return {boolean}
  645. */
  646. isBuffered(contentType, time) {
  647. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  648. if (contentType == ContentType.TEXT) {
  649. return this.textEngine_.isBuffered(time);
  650. } else {
  651. const buffered = this.getBuffered_(contentType);
  652. return shaka.media.TimeRangesUtils.isBuffered(buffered, time);
  653. }
  654. }
  655. /**
  656. * Computes how far ahead of the given timestamp is buffered for the given
  657. * content type.
  658. *
  659. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  660. * @param {number} time
  661. * @return {number} The amount of time buffered ahead in seconds.
  662. */
  663. bufferedAheadOf(contentType, time) {
  664. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  665. if (contentType == ContentType.TEXT) {
  666. return this.textEngine_.bufferedAheadOf(time);
  667. } else {
  668. const buffered = this.getBuffered_(contentType);
  669. return shaka.media.TimeRangesUtils.bufferedAheadOf(buffered, time);
  670. }
  671. }
  672. /**
  673. * Returns info about what is currently buffered.
  674. * @return {shaka.extern.BufferedInfo}
  675. */
  676. getBufferedInfo() {
  677. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  678. const TimeRangesUtils = shaka.media.TimeRangesUtils;
  679. const info = {
  680. total: this.reloadingMediaSource_ ? [] :
  681. TimeRangesUtils.getBufferedInfo(this.video_.buffered),
  682. audio:
  683. TimeRangesUtils.getBufferedInfo(this.getBuffered_(ContentType.AUDIO)),
  684. video:
  685. TimeRangesUtils.getBufferedInfo(this.getBuffered_(ContentType.VIDEO)),
  686. text: [],
  687. };
  688. if (this.textEngine_) {
  689. const start = this.textEngine_.bufferStart();
  690. const end = this.textEngine_.bufferEnd();
  691. if (start != null && end != null) {
  692. info.text.push({start: start, end: end});
  693. }
  694. }
  695. return info;
  696. }
  697. /**
  698. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  699. * @return {TimeRanges} The buffered ranges for the given content type, or
  700. * null if the buffered ranges could not be obtained.
  701. * @private
  702. */
  703. getBuffered_(contentType) {
  704. if (this.reloadingMediaSource_ || this.usingRemotePlayback_) {
  705. return null;
  706. }
  707. try {
  708. return this.sourceBuffers_.get(contentType).buffered;
  709. } catch (exception) {
  710. if (this.sourceBuffers_.has(contentType)) {
  711. // Note: previous MediaSource errors may cause access to |buffered| to
  712. // throw.
  713. shaka.log.error('failed to get buffered range for ' + contentType,
  714. exception);
  715. }
  716. return null;
  717. }
  718. }
  719. /**
  720. * Create a new closed caption parser. This will ONLY be replaced by tests as
  721. * a way to inject fake closed caption parser instances.
  722. *
  723. * @param {string} mimeType
  724. * @return {!shaka.media.IClosedCaptionParser}
  725. */
  726. getCaptionParser(mimeType) {
  727. return new shaka.media.ClosedCaptionParser(mimeType);
  728. }
  729. /**
  730. * This method is only public for testing.
  731. *
  732. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  733. * @param {!BufferSource} data
  734. * @param {!shaka.media.SegmentReference} reference The segment reference
  735. * we are appending
  736. * @param {shaka.extern.Stream} stream
  737. * @param {!string} mimeType
  738. * @return {{timestamp: ?number, metadata: !Array<shaka.extern.ID3Metadata>}}
  739. */
  740. getTimestampAndDispatchMetadata(contentType, data, reference, stream,
  741. mimeType) {
  742. let timestamp = null;
  743. let metadata = [];
  744. const uint8ArrayData = shaka.util.BufferUtils.toUint8(data);
  745. if (shaka.util.MimeUtils.RAW_FORMATS.includes(mimeType)) {
  746. const frames = shaka.util.Id3Utils.getID3Frames(uint8ArrayData);
  747. if (frames.length && reference) {
  748. const metadataTimestamp = frames.find((frame) => {
  749. return frame.description ===
  750. 'com.apple.streaming.transportStreamTimestamp';
  751. });
  752. if (metadataTimestamp && typeof metadataTimestamp.data == 'number') {
  753. timestamp = Math.round(metadataTimestamp.data) / 1000;
  754. }
  755. /** @private {shaka.extern.ID3Metadata} */
  756. const id3Metadata = {
  757. cueTime: reference.startTime,
  758. data: uint8ArrayData,
  759. frames: frames,
  760. dts: reference.startTime,
  761. pts: reference.startTime,
  762. };
  763. this.playerInterface_.onMetadata(
  764. [id3Metadata], /* offset= */ 0, reference.endTime);
  765. }
  766. } else if (mimeType.includes('/mp4') &&
  767. reference &&
  768. reference.initSegmentReference &&
  769. reference.initSegmentReference.timescale) {
  770. const timescale = reference.initSegmentReference.timescale;
  771. if (!isNaN(timescale)) {
  772. const hasEmsg = ((stream.emsgSchemeIdUris != null &&
  773. stream.emsgSchemeIdUris.length > 0) ||
  774. this.config_.dispatchAllEmsgBoxes);
  775. const Mp4Parser = shaka.util.Mp4Parser;
  776. let startTime = 0;
  777. let parsedMedia = false;
  778. const parser = new Mp4Parser();
  779. if (hasEmsg) {
  780. parser.fullBox('emsg', (box) =>
  781. this.parseEMSG_(reference, stream.emsgSchemeIdUris, box));
  782. }
  783. parser.fullBox('prft', (box) => this.parsePrft_(timescale, box))
  784. .box('moof', Mp4Parser.children)
  785. .box('traf', Mp4Parser.children)
  786. .fullBox('tfdt', (box) => {
  787. if (!parsedMedia) {
  788. goog.asserts.assert(
  789. box.version == 0 || box.version == 1,
  790. 'TFDT version can only be 0 or 1');
  791. const parsed = shaka.util.Mp4BoxParsers.parseTFDTInaccurate(
  792. box.reader, box.version);
  793. startTime = parsed.baseMediaDecodeTime / timescale;
  794. parsedMedia = true;
  795. if (!hasEmsg) {
  796. box.parser.stop();
  797. }
  798. }
  799. }).parse(data, /* partialOkay= */ true);
  800. if (parsedMedia && reference.timestampOffset == 0) {
  801. timestamp = startTime;
  802. }
  803. }
  804. } else if (!mimeType.includes('/mp4') && !mimeType.includes('/webm') &&
  805. shaka.util.TsParser.probe(uint8ArrayData)) {
  806. if (!this.tsParsers_.has(contentType)) {
  807. this.tsParsers_.set(contentType, new shaka.util.TsParser());
  808. }
  809. const tsParser = this.tsParsers_.get(contentType);
  810. tsParser.clearData();
  811. tsParser.setDiscontinuitySequence(reference.discontinuitySequence);
  812. tsParser.parse(uint8ArrayData);
  813. const startTime = tsParser.getStartTime(contentType);
  814. if (startTime != null) {
  815. timestamp = startTime;
  816. }
  817. metadata = tsParser.getMetadata();
  818. }
  819. return {timestamp, metadata};
  820. }
  821. /**
  822. * Parse the EMSG box from a MP4 container.
  823. *
  824. * @param {!shaka.media.SegmentReference} reference
  825. * @param {?Array<string>} emsgSchemeIdUris Array of emsg
  826. * scheme_id_uri for which emsg boxes should be parsed.
  827. * @param {!shaka.extern.ParsedBox} box
  828. * @private
  829. * https://dashif-documents.azurewebsites.net/Events/master/event.html#emsg-format
  830. * aligned(8) class DASHEventMessageBox
  831. * extends FullBox(‘emsg’, version, flags = 0){
  832. * if (version==0) {
  833. * string scheme_id_uri;
  834. * string value;
  835. * unsigned int(32) timescale;
  836. * unsigned int(32) presentation_time_delta;
  837. * unsigned int(32) event_duration;
  838. * unsigned int(32) id;
  839. * } else if (version==1) {
  840. * unsigned int(32) timescale;
  841. * unsigned int(64) presentation_time;
  842. * unsigned int(32) event_duration;
  843. * unsigned int(32) id;
  844. * string scheme_id_uri;
  845. * string value;
  846. * }
  847. * unsigned int(8) message_data[];
  848. */
  849. parseEMSG_(reference, emsgSchemeIdUris, box) {
  850. let timescale;
  851. let id;
  852. let eventDuration;
  853. let schemeId;
  854. let startTime;
  855. let presentationTimeDelta;
  856. let value;
  857. if (box.version === 0) {
  858. schemeId = box.reader.readTerminatedString();
  859. value = box.reader.readTerminatedString();
  860. timescale = box.reader.readUint32();
  861. presentationTimeDelta = box.reader.readUint32();
  862. eventDuration = box.reader.readUint32();
  863. id = box.reader.readUint32();
  864. startTime = reference.startTime + (presentationTimeDelta / timescale);
  865. } else {
  866. timescale = box.reader.readUint32();
  867. const pts = box.reader.readUint64();
  868. startTime = (pts / timescale) + reference.timestampOffset;
  869. presentationTimeDelta = startTime - reference.startTime;
  870. eventDuration = box.reader.readUint32();
  871. id = box.reader.readUint32();
  872. schemeId = box.reader.readTerminatedString();
  873. value = box.reader.readTerminatedString();
  874. }
  875. const messageData = box.reader.readBytes(
  876. box.reader.getLength() - box.reader.getPosition());
  877. // See DASH sec. 5.10.3.3.1
  878. // If a DASH client detects an event message box with a scheme that is not
  879. // defined in MPD, the client is expected to ignore it.
  880. if ((emsgSchemeIdUris && emsgSchemeIdUris.includes(schemeId)) ||
  881. this.config_.dispatchAllEmsgBoxes) {
  882. // See DASH sec. 5.10.4.1
  883. // A special scheme in DASH used to signal manifest updates.
  884. if (schemeId == 'urn:mpeg:dash:event:2012') {
  885. this.playerInterface_.onManifestUpdate();
  886. } else {
  887. // All other schemes are dispatched as a general 'emsg' event.
  888. const endTime = startTime + (eventDuration / timescale);
  889. /** @type {shaka.extern.EmsgInfo} */
  890. const emsg = {
  891. startTime: startTime,
  892. endTime: endTime,
  893. schemeIdUri: schemeId,
  894. value: value,
  895. timescale: timescale,
  896. presentationTimeDelta: presentationTimeDelta,
  897. eventDuration: eventDuration,
  898. id: id,
  899. messageData: messageData,
  900. };
  901. // Dispatch an event to notify the application about the emsg box.
  902. const eventName = shaka.util.FakeEvent.EventName.Emsg;
  903. const data = (new Map()).set('detail', emsg);
  904. const event = new shaka.util.FakeEvent(eventName, data);
  905. // A user can call preventDefault() on a cancelable event.
  906. event.cancelable = true;
  907. this.playerInterface_.onEmsg(emsg);
  908. // Additionally, ID3 events generate a 'metadata' event. This is a
  909. // pre-parsed version of the metadata blob already dispatched in the
  910. // 'emsg' event.
  911. if (schemeId == 'https://aomedia.org/emsg/ID3' ||
  912. schemeId == 'https://developer.apple.com/streaming/emsg-id3') {
  913. // See https://aomediacodec.github.io/id3-emsg/
  914. const frames = shaka.util.Id3Utils.getID3Frames(messageData);
  915. if (frames.length) {
  916. /** @private {shaka.extern.ID3Metadata} */
  917. const metadata = {
  918. cueTime: startTime,
  919. data: messageData,
  920. frames: frames,
  921. dts: startTime,
  922. pts: startTime,
  923. };
  924. this.playerInterface_.onMetadata(
  925. [metadata], /* offset= */ 0, endTime);
  926. }
  927. }
  928. }
  929. }
  930. }
  931. /**
  932. * Parse PRFT box.
  933. * @param {number} timescale
  934. * @param {!shaka.extern.ParsedBox} box
  935. * @private
  936. */
  937. parsePrft_(timescale, box) {
  938. goog.asserts.assert(
  939. box.version == 0 || box.version == 1,
  940. 'PRFT version can only be 0 or 1');
  941. const parsed = shaka.util.Mp4BoxParsers.parsePRFTInaccurate(
  942. box.reader, box.version);
  943. const wallClockTime = shaka.util.TimeUtils.convertNtp(parsed.ntpTimestamp);
  944. const programStartDate = new Date(wallClockTime -
  945. (parsed.mediaTime / timescale) * 1000);
  946. /** @type {shaka.extern.ProducerReferenceTime} */
  947. const prftInfo = {
  948. wallClockTime,
  949. programStartDate,
  950. };
  951. const eventName = shaka.util.FakeEvent.EventName.Prft;
  952. const data = (new Map()).set('detail', prftInfo);
  953. const event = new shaka.util.FakeEvent(
  954. eventName, data);
  955. this.playerInterface_.onEvent(event);
  956. }
  957. /**
  958. * Enqueue an operation to append data to the SourceBuffer.
  959. * Start and end times are needed for TextEngine, but not for MediaSource.
  960. * Start and end times may be null for initialization segments; if present
  961. * they are relative to the presentation timeline.
  962. *
  963. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  964. * @param {!BufferSource} data
  965. * @param {?shaka.media.SegmentReference} reference The segment reference
  966. * we are appending, or null for init segments
  967. * @param {shaka.extern.Stream} stream
  968. * @param {?boolean} hasClosedCaptions True if the buffer contains CEA closed
  969. * captions
  970. * @param {boolean=} seeked True if we just seeked
  971. * @param {boolean=} adaptation True if we just automatically switched active
  972. * variant(s).
  973. * @param {boolean=} isChunkedData True if we add to the buffer from the
  974. * @param {boolean=} fromSplit
  975. * @param {number=} continuityTimeline an optional continuity timeline
  976. * @return {!Promise}
  977. */
  978. async appendBuffer(
  979. contentType, data, reference, stream, hasClosedCaptions, seeked = false,
  980. adaptation = false, isChunkedData = false, fromSplit = false,
  981. continuityTimeline) {
  982. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  983. if (contentType == ContentType.TEXT) {
  984. if (this.manifestType_ == shaka.media.ManifestParser.HLS) {
  985. // This won't be known until the first video segment is appended.
  986. const offset = await this.textSequenceModeOffset_;
  987. this.textEngine_.setTimestampOffset(offset);
  988. }
  989. await this.textEngine_.appendBuffer(
  990. data,
  991. reference ? reference.startTime : null,
  992. reference ? reference.endTime : null,
  993. reference ? reference.getUris()[0] : null);
  994. return;
  995. }
  996. if (!fromSplit && this.needSplitMuxedContent_) {
  997. await this.appendBuffer(ContentType.AUDIO, data, reference, stream,
  998. hasClosedCaptions, seeked, adaptation, isChunkedData,
  999. /* fromSplit= */ true);
  1000. await this.appendBuffer(ContentType.VIDEO, data, reference, stream,
  1001. hasClosedCaptions, seeked, adaptation, isChunkedData,
  1002. /* fromSplit= */ true);
  1003. return;
  1004. }
  1005. if (!this.sourceBuffers_.has(contentType)) {
  1006. shaka.log.warning('Attempted to restore a non-existent source buffer');
  1007. return;
  1008. }
  1009. let timestampOffset = this.sourceBuffers_.get(contentType).timestampOffset;
  1010. let mimeType = this.sourceBufferTypes_.get(contentType);
  1011. if (this.transmuxers_.has(contentType)) {
  1012. mimeType = this.transmuxers_.get(contentType).getOriginalMimeType();
  1013. }
  1014. if (reference) {
  1015. const {timestamp, metadata} = this.getTimestampAndDispatchMetadata(
  1016. contentType, data, reference, stream, mimeType);
  1017. if (timestamp != null) {
  1018. if (this.firstVideoTimestamp_ == null &&
  1019. contentType == ContentType.VIDEO) {
  1020. this.firstVideoTimestamp_ = timestamp;
  1021. this.firstVideoReferenceStartTime_ = reference.startTime;
  1022. if (this.firstAudioTimestamp_ != null) {
  1023. let compensation = 0;
  1024. // Only apply compensation if video and audio segment startTime
  1025. // match, to avoid introducing sync issues.
  1026. if (this.firstVideoReferenceStartTime_ ==
  1027. this.firstAudioReferenceStartTime_) {
  1028. compensation =
  1029. this.firstVideoTimestamp_ - this.firstAudioTimestamp_;
  1030. }
  1031. this.audioCompensation_.resolve(compensation);
  1032. }
  1033. }
  1034. if (this.firstAudioTimestamp_ == null &&
  1035. contentType == ContentType.AUDIO) {
  1036. this.firstAudioTimestamp_ = timestamp;
  1037. this.firstAudioReferenceStartTime_ = reference.startTime;
  1038. if (this.firstVideoTimestamp_ != null) {
  1039. let compensation = 0;
  1040. // Only apply compensation if video and audio segment startTime
  1041. // match, to avoid introducing sync issues.
  1042. if (this.firstVideoReferenceStartTime_ ==
  1043. this.firstAudioReferenceStartTime_) {
  1044. compensation =
  1045. this.firstVideoTimestamp_ - this.firstAudioTimestamp_;
  1046. }
  1047. this.audioCompensation_.resolve(compensation);
  1048. }
  1049. }
  1050. let realTimestamp = timestamp;
  1051. const RAW_FORMATS = shaka.util.MimeUtils.RAW_FORMATS;
  1052. // For formats without containers and using segments mode, we need to
  1053. // adjust TimestampOffset relative to 0 because segments do not have
  1054. // any timestamp information.
  1055. if (!this.sequenceMode_ &&
  1056. RAW_FORMATS.includes(this.sourceBufferTypes_.get(contentType))) {
  1057. realTimestamp = 0;
  1058. }
  1059. const calculatedTimestampOffset = reference.startTime - realTimestamp;
  1060. const timestampOffsetDifference =
  1061. Math.abs(timestampOffset - calculatedTimestampOffset);
  1062. if ((timestampOffsetDifference >= 0.001 || seeked || adaptation) &&
  1063. (!isChunkedData || calculatedTimestampOffset > 0 ||
  1064. !timestampOffset)) {
  1065. timestampOffset = calculatedTimestampOffset;
  1066. if (this.attemptTimestampOffsetCalculation_) {
  1067. this.enqueueOperation_(
  1068. contentType,
  1069. () => this.abort_(contentType),
  1070. null);
  1071. this.enqueueOperation_(
  1072. contentType,
  1073. () => this.setTimestampOffset_(contentType, timestampOffset),
  1074. null);
  1075. }
  1076. }
  1077. // Timestamps can only be reliably extracted from video, not audio.
  1078. // Packed audio formats do not have internal timestamps at all.
  1079. // Prefer video for this when available.
  1080. const isBestSourceBufferForTimestamps =
  1081. contentType == ContentType.VIDEO ||
  1082. !(this.sourceBuffers_.has(ContentType.VIDEO));
  1083. if (isBestSourceBufferForTimestamps) {
  1084. this.textSequenceModeOffset_.resolve(timestampOffset);
  1085. }
  1086. }
  1087. if (metadata.length) {
  1088. this.playerInterface_.onMetadata(metadata, timestampOffset,
  1089. reference ? reference.endTime : null);
  1090. }
  1091. }
  1092. if (hasClosedCaptions && contentType == ContentType.VIDEO) {
  1093. if (!this.textEngine_) {
  1094. this.reinitText(shaka.util.MimeUtils.CEA608_CLOSED_CAPTION_MIMETYPE,
  1095. this.sequenceMode_, /* external= */ false);
  1096. }
  1097. if (!this.captionParser_) {
  1098. const basicType = mimeType.split(';', 1)[0];
  1099. this.captionParser_ = this.getCaptionParser(basicType);
  1100. }
  1101. // If it is the init segment for closed captions, initialize the closed
  1102. // caption parser.
  1103. if (!reference) {
  1104. this.captionParser_.init(data, adaptation, continuityTimeline);
  1105. } else {
  1106. const closedCaptions = this.captionParser_.parseFrom(data);
  1107. if (closedCaptions.length) {
  1108. this.textEngine_.storeAndAppendClosedCaptions(
  1109. closedCaptions,
  1110. reference.startTime,
  1111. reference.endTime,
  1112. timestampOffset);
  1113. }
  1114. }
  1115. }
  1116. if (this.transmuxers_.has(contentType)) {
  1117. data = await this.transmuxers_.get(contentType).transmux(
  1118. data, stream, reference, this.mediaSource_.duration, contentType);
  1119. }
  1120. data = this.workAroundBrokenPlatforms_(
  1121. stream, data, reference, contentType);
  1122. if (reference && this.sequenceMode_ && contentType != ContentType.TEXT) {
  1123. // In sequence mode, for non-text streams, if we just cleared the buffer
  1124. // and are either performing an unbuffered seek or handling an automatic
  1125. // adaptation, we need to set a new timestampOffset on the sourceBuffer.
  1126. if (seeked || adaptation) {
  1127. let timestampOffset = reference.startTime;
  1128. // Audio and video may not be aligned, so we will compensate for audio
  1129. // if necessary.
  1130. if (this.manifestType_ == shaka.media.ManifestParser.HLS &&
  1131. !this.needSplitMuxedContent_ &&
  1132. contentType == ContentType.AUDIO &&
  1133. this.sourceBuffers_.has(ContentType.VIDEO)) {
  1134. const compensation = await this.audioCompensation_;
  1135. // Only apply compensation if the difference is greater than 150ms
  1136. if (Math.abs(compensation) > 0.15) {
  1137. timestampOffset -= compensation;
  1138. }
  1139. }
  1140. // The logic to call abort() before setting the timestampOffset is
  1141. // extended during unbuffered seeks or automatic adaptations; it is
  1142. // possible for the append state to be PARSING_MEDIA_SEGMENT from the
  1143. // previous SourceBuffer#appendBuffer() call.
  1144. this.enqueueOperation_(
  1145. contentType,
  1146. () => this.abort_(contentType),
  1147. null);
  1148. this.enqueueOperation_(
  1149. contentType,
  1150. () => this.setTimestampOffset_(contentType, timestampOffset),
  1151. null);
  1152. }
  1153. }
  1154. let bufferedBefore = null;
  1155. await this.enqueueOperation_(contentType, () => {
  1156. if (goog.DEBUG && reference && !reference.isPreload() && !isChunkedData) {
  1157. bufferedBefore = this.getBuffered_(contentType);
  1158. }
  1159. this.append_(contentType, data, timestampOffset, stream);
  1160. }, reference ? reference.getUris()[0] : null);
  1161. if (goog.DEBUG && reference && !reference.isPreload() && !isChunkedData) {
  1162. const bufferedAfter = this.getBuffered_(contentType);
  1163. const newBuffered = shaka.media.TimeRangesUtils.computeAddedRange(
  1164. bufferedBefore, bufferedAfter);
  1165. if (newBuffered) {
  1166. const segmentDuration = reference.endTime - reference.startTime;
  1167. const timeAdded = newBuffered.end - newBuffered.start;
  1168. // Check end times instead of start times. We may be overwriting a
  1169. // buffer and only the end changes, and that would be fine.
  1170. // Also, exclude tiny segments. Sometimes alignment segments as small
  1171. // as 33ms are seen in Google DAI content. For such tiny segments,
  1172. // half a segment duration would be no issue.
  1173. const offset = Math.abs(newBuffered.end - reference.endTime);
  1174. if (segmentDuration > 0.100 && (offset > segmentDuration / 2 ||
  1175. Math.abs(segmentDuration - timeAdded) > 0.030)) {
  1176. shaka.log.error('Possible encoding problem detected!',
  1177. 'Unexpected buffered range for reference', reference,
  1178. 'from URIs', reference.getUris(),
  1179. 'should be', {start: reference.startTime, end: reference.endTime},
  1180. 'but got', newBuffered);
  1181. }
  1182. }
  1183. }
  1184. }
  1185. /**
  1186. * Set the selected closed captions Id and language.
  1187. *
  1188. * @param {string} id
  1189. */
  1190. setSelectedClosedCaptionId(id) {
  1191. const VIDEO = shaka.util.ManifestParserUtils.ContentType.VIDEO;
  1192. const videoBufferEndTime = this.bufferEnd(VIDEO) || 0;
  1193. this.textEngine_.setSelectedClosedCaptionId(id, videoBufferEndTime);
  1194. }
  1195. /** Disable embedded closed captions. */
  1196. clearSelectedClosedCaptionId() {
  1197. if (this.textEngine_) {
  1198. this.textEngine_.setSelectedClosedCaptionId('', 0);
  1199. }
  1200. }
  1201. /**
  1202. * Enqueue an operation to remove data from the SourceBuffer.
  1203. *
  1204. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1205. * @param {number} startTime relative to the start of the presentation
  1206. * @param {number} endTime relative to the start of the presentation
  1207. * @param {Array<number>=} continuityTimelines a list of continuity timelines
  1208. * that are still available on the stream.
  1209. * @return {!Promise}
  1210. */
  1211. async remove(contentType, startTime, endTime, continuityTimelines) {
  1212. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1213. if (contentType == ContentType.VIDEO && this.captionParser_) {
  1214. this.captionParser_.remove(continuityTimelines);
  1215. }
  1216. if (contentType == ContentType.TEXT) {
  1217. await this.textEngine_.remove(startTime, endTime);
  1218. } else if (endTime > startTime) {
  1219. await this.enqueueOperation_(
  1220. contentType,
  1221. () => this.remove_(contentType, startTime, endTime),
  1222. null);
  1223. if (this.needSplitMuxedContent_) {
  1224. await this.enqueueOperation_(
  1225. ContentType.AUDIO,
  1226. () => this.remove_(ContentType.AUDIO, startTime, endTime),
  1227. null);
  1228. }
  1229. }
  1230. }
  1231. /**
  1232. * Enqueue an operation to clear the SourceBuffer.
  1233. *
  1234. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1235. * @return {!Promise}
  1236. */
  1237. async clear(contentType) {
  1238. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1239. if (contentType == ContentType.TEXT) {
  1240. if (!this.textEngine_) {
  1241. return;
  1242. }
  1243. await this.textEngine_.remove(0, Infinity);
  1244. } else {
  1245. // Note that not all platforms allow clearing to Infinity.
  1246. await this.enqueueOperation_(
  1247. contentType,
  1248. () => this.remove_(contentType, 0, this.mediaSource_.duration),
  1249. null);
  1250. if (this.needSplitMuxedContent_) {
  1251. await this.enqueueOperation_(
  1252. ContentType.AUDIO,
  1253. () => this.remove_(
  1254. ContentType.AUDIO, 0, this.mediaSource_.duration),
  1255. null);
  1256. }
  1257. }
  1258. }
  1259. /**
  1260. * Fully reset the state of the caption parser owned by MediaSourceEngine.
  1261. */
  1262. resetCaptionParser() {
  1263. if (this.captionParser_) {
  1264. this.captionParser_.reset();
  1265. }
  1266. }
  1267. /**
  1268. * Enqueue an operation to flush the SourceBuffer.
  1269. * This is a workaround for what we believe is a Chromecast bug.
  1270. *
  1271. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1272. * @return {!Promise}
  1273. */
  1274. async flush(contentType) {
  1275. // Flush the pipeline. Necessary on Chromecast, even though we have removed
  1276. // everything.
  1277. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1278. if (contentType == ContentType.TEXT) {
  1279. // Nothing to flush for text.
  1280. return;
  1281. }
  1282. await this.enqueueOperation_(
  1283. contentType,
  1284. () => this.flush_(contentType),
  1285. null);
  1286. if (this.needSplitMuxedContent_) {
  1287. await this.enqueueOperation_(
  1288. ContentType.AUDIO,
  1289. () => this.flush_(ContentType.AUDIO),
  1290. null);
  1291. }
  1292. }
  1293. /**
  1294. * Sets the timestamp offset and append window end for the given content type.
  1295. *
  1296. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1297. * @param {number} timestampOffset The timestamp offset. Segments which start
  1298. * at time t will be inserted at time t + timestampOffset instead. This
  1299. * value does not affect segments which have already been inserted.
  1300. * @param {number} appendWindowStart The timestamp to set the append window
  1301. * start to. For future appends, frames/samples with timestamps less than
  1302. * this value will be dropped.
  1303. * @param {number} appendWindowEnd The timestamp to set the append window end
  1304. * to. For future appends, frames/samples with timestamps greater than this
  1305. * value will be dropped.
  1306. * @param {boolean} ignoreTimestampOffset If true, the timestampOffset will
  1307. * not be applied in this step.
  1308. * @param {string} mimeType
  1309. * @param {string} codecs
  1310. * @param {!Map<shaka.util.ManifestParserUtils.ContentType,
  1311. * shaka.extern.Stream>} streamsByType
  1312. * A map of content types to streams.
  1313. *
  1314. * @return {!Promise}
  1315. */
  1316. async setStreamProperties(
  1317. contentType, timestampOffset, appendWindowStart, appendWindowEnd,
  1318. ignoreTimestampOffset, mimeType, codecs, streamsByType) {
  1319. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1320. if (contentType == ContentType.TEXT) {
  1321. if (!ignoreTimestampOffset) {
  1322. this.textEngine_.setTimestampOffset(timestampOffset);
  1323. }
  1324. this.textEngine_.setAppendWindow(appendWindowStart, appendWindowEnd);
  1325. return;
  1326. }
  1327. const operations = [];
  1328. const hasChangedCodecs = await this.codecSwitchIfNecessary_(
  1329. contentType, mimeType, codecs, streamsByType);
  1330. if (!hasChangedCodecs) {
  1331. // Queue an abort() to help MSE splice together overlapping segments.
  1332. // We set appendWindowEnd when we change periods in DASH content, and the
  1333. // period transition may result in overlap.
  1334. //
  1335. // An abort() also helps with MPEG2-TS. When we append a TS segment, we
  1336. // always enter a PARSING_MEDIA_SEGMENT state and we can't change the
  1337. // timestamp offset. By calling abort(), we reset the state so we can
  1338. // set it.
  1339. operations.push(this.enqueueOperation_(
  1340. contentType,
  1341. () => this.abort_(contentType),
  1342. null));
  1343. if (this.needSplitMuxedContent_) {
  1344. operations.push(this.enqueueOperation_(
  1345. ContentType.AUDIO,
  1346. () => this.abort_(ContentType.AUDIO),
  1347. null));
  1348. }
  1349. }
  1350. if (!ignoreTimestampOffset) {
  1351. operations.push(this.enqueueOperation_(
  1352. contentType,
  1353. () => this.setTimestampOffset_(contentType, timestampOffset),
  1354. null));
  1355. if (this.needSplitMuxedContent_) {
  1356. operations.push(this.enqueueOperation_(
  1357. ContentType.AUDIO,
  1358. () => this.setTimestampOffset_(
  1359. ContentType.AUDIO, timestampOffset),
  1360. null));
  1361. }
  1362. }
  1363. if (appendWindowStart != 0 || appendWindowEnd != Infinity) {
  1364. operations.push(this.enqueueOperation_(
  1365. contentType,
  1366. () => this.setAppendWindow_(
  1367. contentType, appendWindowStart, appendWindowEnd),
  1368. null));
  1369. if (this.needSplitMuxedContent_) {
  1370. operations.push(this.enqueueOperation_(
  1371. ContentType.AUDIO,
  1372. () => this.setAppendWindow_(
  1373. ContentType.AUDIO, appendWindowStart, appendWindowEnd),
  1374. null));
  1375. }
  1376. }
  1377. if (operations.length) {
  1378. await Promise.all(operations);
  1379. }
  1380. }
  1381. /**
  1382. * Adjust timestamp offset to maintain AV sync across discontinuities.
  1383. *
  1384. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1385. * @param {number} timestampOffset
  1386. * @return {!Promise}
  1387. */
  1388. async resync(contentType, timestampOffset) {
  1389. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1390. if (contentType == ContentType.TEXT) {
  1391. // This operation is for audio and video only.
  1392. return;
  1393. }
  1394. // Reset the promise in case the timestamp offset changed during
  1395. // a period/discontinuity transition.
  1396. if (contentType == ContentType.VIDEO) {
  1397. this.textSequenceModeOffset_ = new shaka.util.PublicPromise();
  1398. }
  1399. if (!this.sequenceMode_) {
  1400. return;
  1401. }
  1402. // Avoid changing timestampOffset when the difference is less than 100 ms
  1403. // from the end of the current buffer.
  1404. const bufferEnd = this.bufferEnd(contentType);
  1405. if (bufferEnd && Math.abs(bufferEnd - timestampOffset) < 0.15) {
  1406. return;
  1407. }
  1408. // Queue an abort() to help MSE splice together overlapping segments.
  1409. // We set appendWindowEnd when we change periods in DASH content, and the
  1410. // period transition may result in overlap.
  1411. //
  1412. // An abort() also helps with MPEG2-TS. When we append a TS segment, we
  1413. // always enter a PARSING_MEDIA_SEGMENT state and we can't change the
  1414. // timestamp offset. By calling abort(), we reset the state so we can
  1415. // set it.
  1416. this.enqueueOperation_(
  1417. contentType,
  1418. () => this.abort_(contentType),
  1419. null);
  1420. if (this.needSplitMuxedContent_) {
  1421. this.enqueueOperation_(
  1422. ContentType.AUDIO,
  1423. () => this.abort_(ContentType.AUDIO),
  1424. null);
  1425. }
  1426. await this.enqueueOperation_(
  1427. contentType,
  1428. () => this.setTimestampOffset_(contentType, timestampOffset),
  1429. null);
  1430. if (this.needSplitMuxedContent_) {
  1431. await this.enqueueOperation_(
  1432. ContentType.AUDIO,
  1433. () => this.setTimestampOffset_(ContentType.AUDIO, timestampOffset),
  1434. null);
  1435. }
  1436. }
  1437. /**
  1438. * @param {string=} reason Valid reasons are 'network' and 'decode'.
  1439. * @return {!Promise}
  1440. * @see http://w3c.github.io/media-source/#idl-def-EndOfStreamError
  1441. */
  1442. async endOfStream(reason) {
  1443. await this.enqueueBlockingOperation_(() => {
  1444. // If endOfStream() has already been called on the media source,
  1445. // don't call it again. Also do not call if readyState is
  1446. // 'closed' (not attached to video element) since it is not a
  1447. // valid operation.
  1448. if (this.ended() || this.closed()) {
  1449. return;
  1450. }
  1451. // Tizen won't let us pass undefined, but it will let us omit the
  1452. // argument.
  1453. if (reason) {
  1454. this.mediaSource_.endOfStream(reason);
  1455. } else {
  1456. this.mediaSource_.endOfStream();
  1457. }
  1458. });
  1459. }
  1460. /**
  1461. * @param {number} duration
  1462. * @return {!Promise}
  1463. */
  1464. async setDuration(duration) {
  1465. await this.enqueueBlockingOperation_(() => {
  1466. // https://www.w3.org/TR/media-source-2/#duration-change-algorithm
  1467. // "Duration reductions that would truncate currently buffered media
  1468. // are disallowed.
  1469. // When truncation is necessary, use remove() to reduce the buffered
  1470. // range before updating duration."
  1471. // But in some platforms, truncating the duration causes the
  1472. // buffer range removal algorithm to run which triggers an
  1473. // 'updateend' event to fire.
  1474. // To handle this scenario, we have to insert a dummy operation into
  1475. // the beginning of each queue, which the 'updateend' handler will remove.
  1476. // Using config to disable it by default and enable only
  1477. // on relevant platforms.
  1478. if (this.config_.durationReductionEmitsUpdateEnd &&
  1479. duration < this.mediaSource_.duration) {
  1480. for (const contentType of this.sourceBuffers_.keys()) {
  1481. const dummyOperation = {
  1482. start: () => {},
  1483. p: new shaka.util.PublicPromise(),
  1484. uri: null,
  1485. };
  1486. this.queues_.get(contentType).unshift(dummyOperation);
  1487. }
  1488. }
  1489. this.mediaSource_.duration = duration;
  1490. this.lastDuration_ = duration;
  1491. });
  1492. }
  1493. /**
  1494. * Get the current MediaSource duration.
  1495. *
  1496. * @return {number}
  1497. */
  1498. getDuration() {
  1499. return this.mediaSource_.duration;
  1500. }
  1501. /**
  1502. * Updates the live seekable range.
  1503. *
  1504. * @param {number} startTime
  1505. * @param {number} endTime
  1506. */
  1507. async setLiveSeekableRange(startTime, endTime) {
  1508. if (this.destroyer_.destroyed() || this.video_.error ||
  1509. this.usingRemotePlayback_ || this.reloadingMediaSource_) {
  1510. return;
  1511. }
  1512. goog.asserts.assert('setLiveSeekableRange' in this.mediaSource_,
  1513. 'Using setLiveSeekableRange on not supported platform');
  1514. if (this.ended() || this.closed()) {
  1515. return;
  1516. }
  1517. await this.enqueueBlockingOperation_(() => {
  1518. if (this.ended() || this.closed()) {
  1519. return;
  1520. }
  1521. this.mediaSource_.setLiveSeekableRange(startTime, endTime);
  1522. });
  1523. }
  1524. /**
  1525. * Clear the current live seekable range.
  1526. */
  1527. async clearLiveSeekableRange() {
  1528. if (this.destroyer_.destroyed() || this.video_.error ||
  1529. this.usingRemotePlayback_ || this.reloadingMediaSource_) {
  1530. return;
  1531. }
  1532. goog.asserts.assert('clearLiveSeekableRange' in this.mediaSource_,
  1533. 'Using clearLiveSeekableRange on not supported platform');
  1534. if (this.ended() || this.closed()) {
  1535. return;
  1536. }
  1537. await this.enqueueBlockingOperation_(() => {
  1538. if (this.ended() || this.closed()) {
  1539. return;
  1540. }
  1541. this.mediaSource_.clearLiveSeekableRange();
  1542. });
  1543. }
  1544. /**
  1545. * Append dependency data.
  1546. * @param {BufferSource} data
  1547. * @param {number} timestampOffset
  1548. * @param {shaka.extern.Stream} stream
  1549. */
  1550. appendDependency(data, timestampOffset, stream) {
  1551. if (this.lcevcDec_) {
  1552. // Append buffers to the LCEVC Dec for parsing and storing
  1553. // of LCEVC data.
  1554. this.lcevcDec_.appendBuffer(data, timestampOffset, stream);
  1555. }
  1556. }
  1557. /**
  1558. * Append data to the SourceBuffer.
  1559. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1560. * @param {BufferSource} data
  1561. * @param {number} timestampOffset
  1562. * @param {shaka.extern.Stream} stream
  1563. * @private
  1564. */
  1565. append_(contentType, data, timestampOffset, stream) {
  1566. this.appendDependency(data, timestampOffset, stream);
  1567. // This will trigger an 'updateend' event.
  1568. this.sourceBuffers_.get(contentType).appendBuffer(data);
  1569. }
  1570. /**
  1571. * Remove data from the SourceBuffer.
  1572. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1573. * @param {number} startTime relative to the start of the presentation
  1574. * @param {number} endTime relative to the start of the presentation
  1575. * @private
  1576. */
  1577. remove_(contentType, startTime, endTime) {
  1578. if (endTime <= startTime) {
  1579. // Ignore removal of inverted or empty ranges.
  1580. // Fake 'updateend' event to resolve the operation.
  1581. this.onUpdateEnd_(contentType);
  1582. return;
  1583. }
  1584. // This will trigger an 'updateend' event.
  1585. this.sourceBuffers_.get(contentType).remove(startTime, endTime);
  1586. }
  1587. /**
  1588. * Call abort() on the SourceBuffer.
  1589. * This resets MSE's last_decode_timestamp on all track buffers, which should
  1590. * trigger the splicing logic for overlapping segments.
  1591. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1592. * @private
  1593. */
  1594. abort_(contentType) {
  1595. const sourceBuffer = this.sourceBuffers_.get(contentType);
  1596. // Save the append window, which is reset on abort().
  1597. const appendWindowStart = sourceBuffer.appendWindowStart;
  1598. const appendWindowEnd = sourceBuffer.appendWindowEnd;
  1599. // This will not trigger an 'updateend' event, since nothing is happening.
  1600. // This is only to reset MSE internals, not to abort an actual operation.
  1601. sourceBuffer.abort();
  1602. // Restore the append window.
  1603. sourceBuffer.appendWindowStart = appendWindowStart;
  1604. sourceBuffer.appendWindowEnd = appendWindowEnd;
  1605. // Fake an 'updateend' event to resolve the operation.
  1606. this.onUpdateEnd_(contentType);
  1607. }
  1608. /**
  1609. * Nudge the playhead to force the media pipeline to be flushed.
  1610. * This seems to be necessary on Chromecast to get new content to replace old
  1611. * content.
  1612. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1613. * @private
  1614. */
  1615. flush_(contentType) {
  1616. // Never use flush_ if there's data. It causes a hiccup in playback.
  1617. goog.asserts.assert(
  1618. this.video_.buffered.length == 0, 'MediaSourceEngine.flush_ should ' +
  1619. 'only be used after clearing all data!');
  1620. // Seeking forces the pipeline to be flushed.
  1621. this.video_.currentTime -= 0.001;
  1622. // Fake an 'updateend' event to resolve the operation.
  1623. this.onUpdateEnd_(contentType);
  1624. }
  1625. /**
  1626. * Set the SourceBuffer's timestamp offset.
  1627. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1628. * @param {number} timestampOffset
  1629. * @private
  1630. */
  1631. setTimestampOffset_(contentType, timestampOffset) {
  1632. // Work around for
  1633. // https://github.com/shaka-project/shaka-player/issues/1281:
  1634. // TODO(https://bit.ly/2ttKiBU): follow up when this is fixed in Edge
  1635. if (timestampOffset < 0) {
  1636. // Try to prevent rounding errors in Edge from removing the first
  1637. // keyframe.
  1638. timestampOffset += 0.001;
  1639. }
  1640. this.sourceBuffers_.get(contentType).timestampOffset = timestampOffset;
  1641. // Fake an 'updateend' event to resolve the operation.
  1642. this.onUpdateEnd_(contentType);
  1643. }
  1644. /**
  1645. * Set the SourceBuffer's append window end.
  1646. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1647. * @param {number} appendWindowStart
  1648. * @param {number} appendWindowEnd
  1649. * @private
  1650. */
  1651. setAppendWindow_(contentType, appendWindowStart, appendWindowEnd) {
  1652. const sourceBuffer = this.sourceBuffers_.get(contentType);
  1653. if (sourceBuffer.appendWindowEnd !== appendWindowEnd ||
  1654. sourceBuffer.appendWindowStart !== appendWindowStart) {
  1655. // You can't set start > end, so first set start to 0, then set the new
  1656. // end, then set the new start. That way, there are no intermediate
  1657. // states which are invalid.
  1658. sourceBuffer.appendWindowStart = 0;
  1659. sourceBuffer.appendWindowEnd = appendWindowEnd;
  1660. sourceBuffer.appendWindowStart = appendWindowStart;
  1661. }
  1662. // Fake an 'updateend' event to resolve the operation.
  1663. this.onUpdateEnd_(contentType);
  1664. }
  1665. /**
  1666. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1667. * @private
  1668. */
  1669. onError_(contentType) {
  1670. const operation = this.queues_.get(contentType)[0];
  1671. goog.asserts.assert(operation, 'Spurious error event!');
  1672. goog.asserts.assert(!this.sourceBuffers_.get(contentType).updating,
  1673. 'SourceBuffer should not be updating on error!');
  1674. const code = this.video_.error ? this.video_.error.code : 0;
  1675. operation.p.reject(new shaka.util.Error(
  1676. shaka.util.Error.Severity.CRITICAL,
  1677. shaka.util.Error.Category.MEDIA,
  1678. shaka.util.Error.Code.MEDIA_SOURCE_OPERATION_FAILED,
  1679. code, operation.uri));
  1680. // Do not pop from queue. An 'updateend' event will fire next, and to
  1681. // avoid synchronizing these two event handlers, we will allow that one to
  1682. // pop from the queue as normal. Note that because the operation has
  1683. // already been rejected, the call to resolve() in the 'updateend' handler
  1684. // will have no effect.
  1685. }
  1686. /**
  1687. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1688. * @private
  1689. */
  1690. onUpdateEnd_(contentType) {
  1691. // If we're reloading or have been destroyed, clear the queue for this
  1692. // content type.
  1693. if (this.reloadingMediaSource_ || this.destroyer_.destroyed()) {
  1694. // Resolve any pending operations in this content type's queue
  1695. const queue = this.queues_.get(contentType);
  1696. if (queue && queue.length) {
  1697. // Resolve the first operation that triggered this updateEnd
  1698. const firstOperation = queue[0];
  1699. if (firstOperation && firstOperation.p) {
  1700. firstOperation.p.resolve();
  1701. }
  1702. // Clear the rest of the queue
  1703. this.queues_.set(contentType, []);
  1704. }
  1705. return;
  1706. }
  1707. const operation = this.queues_.get(contentType)[0];
  1708. goog.asserts.assert(operation, 'Spurious updateend event!');
  1709. if (!operation) {
  1710. return;
  1711. }
  1712. goog.asserts.assert(!this.sourceBuffers_.get(contentType).updating,
  1713. 'SourceBuffer should not be updating on updateend!');
  1714. operation.p.resolve();
  1715. this.popFromQueue_(contentType);
  1716. }
  1717. /**
  1718. * Enqueue an operation and start it if appropriate.
  1719. *
  1720. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1721. * @param {function()} start
  1722. * @param {?string} uri
  1723. * @return {!Promise}
  1724. * @private
  1725. */
  1726. enqueueOperation_(contentType, start, uri) {
  1727. this.destroyer_.ensureNotDestroyed();
  1728. const operation = {
  1729. start: start,
  1730. p: new shaka.util.PublicPromise(),
  1731. uri,
  1732. };
  1733. this.queues_.get(contentType).push(operation);
  1734. if (this.queues_.get(contentType).length == 1) {
  1735. this.startOperation_(contentType);
  1736. }
  1737. return operation.p;
  1738. }
  1739. /**
  1740. * Enqueue an operation which must block all other operations on all
  1741. * SourceBuffers.
  1742. *
  1743. * @param {function():(Promise|undefined)} run
  1744. * @return {!Promise}
  1745. * @private
  1746. */
  1747. async enqueueBlockingOperation_(run) {
  1748. this.destroyer_.ensureNotDestroyed();
  1749. /** @type {!Array<!shaka.util.PublicPromise>} */
  1750. const allWaiters = [];
  1751. /** @type {!Array<!shaka.util.ManifestParserUtils.ContentType>} */
  1752. const contentTypes = Array.from(this.sourceBuffers_.keys());
  1753. // Enqueue a 'wait' operation onto each queue.
  1754. // This operation signals its readiness when it starts.
  1755. // When all wait operations are ready, the real operation takes place.
  1756. for (const contentType of contentTypes) {
  1757. const ready = new shaka.util.PublicPromise();
  1758. const operation = {
  1759. start: () => ready.resolve(),
  1760. p: ready,
  1761. uri: null,
  1762. };
  1763. const queue = this.queues_.get(contentType);
  1764. queue.push(operation);
  1765. allWaiters.push(ready);
  1766. if (queue.length == 1) {
  1767. operation.start();
  1768. }
  1769. }
  1770. // Return a Promise to the real operation, which waits to begin until
  1771. // there are no other in-progress operations on any SourceBuffers.
  1772. try {
  1773. await Promise.all(allWaiters);
  1774. } catch (error) {
  1775. // One of the waiters failed, which means we've been destroyed.
  1776. goog.asserts.assert(
  1777. this.destroyer_.destroyed(), 'Should be destroyed by now');
  1778. // We haven't popped from the queue. Canceled waiters have been removed
  1779. // by destroy. What's left now should just be resolved waiters. In
  1780. // uncompiled mode, we will maintain good hygiene and make sure the
  1781. // assert at the end of destroy passes. In compiled mode, the queues
  1782. // are wiped in destroy.
  1783. if (goog.DEBUG) {
  1784. for (const contentType of contentTypes) {
  1785. const queue = this.queues_.get(contentType);
  1786. if (queue.length) {
  1787. goog.asserts.assert(queue.length == 1,
  1788. 'Should be at most one item in queue!');
  1789. goog.asserts.assert(allWaiters.includes(queue[0].p),
  1790. 'The item in queue should be one of our waiters!');
  1791. queue.shift();
  1792. }
  1793. }
  1794. }
  1795. throw error;
  1796. }
  1797. if (goog.DEBUG) {
  1798. // If we did it correctly, nothing is updating.
  1799. for (const contentType of contentTypes) {
  1800. goog.asserts.assert(
  1801. this.sourceBuffers_.get(contentType).updating == false,
  1802. 'SourceBuffers should not be updating after a blocking op!');
  1803. }
  1804. }
  1805. // Run the real operation, which can be asynchronous.
  1806. try {
  1807. await run();
  1808. } catch (exception) {
  1809. throw new shaka.util.Error(
  1810. shaka.util.Error.Severity.CRITICAL,
  1811. shaka.util.Error.Category.MEDIA,
  1812. shaka.util.Error.Code.MEDIA_SOURCE_OPERATION_THREW,
  1813. exception,
  1814. this.video_.error || 'No error in the media element',
  1815. null);
  1816. } finally {
  1817. // Unblock the queues.
  1818. for (const contentType of contentTypes) {
  1819. this.popFromQueue_(contentType);
  1820. }
  1821. }
  1822. }
  1823. /**
  1824. * Pop from the front of the queue and start a new operation.
  1825. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1826. * @private
  1827. */
  1828. popFromQueue_(contentType) {
  1829. goog.asserts.assert(this.queues_.has(contentType), 'Queue should exist');
  1830. // Remove the in-progress operation, which is now complete.
  1831. this.queues_.get(contentType).shift();
  1832. this.startOperation_(contentType);
  1833. }
  1834. /**
  1835. * Starts the next operation in the queue.
  1836. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1837. * @private
  1838. */
  1839. startOperation_(contentType) {
  1840. // Retrieve the next operation, if any, from the queue and start it.
  1841. const next = this.queues_.get(contentType)[0];
  1842. if (next) {
  1843. try {
  1844. next.start();
  1845. } catch (exception) {
  1846. if (exception.name == 'QuotaExceededError') {
  1847. next.p.reject(new shaka.util.Error(
  1848. shaka.util.Error.Severity.CRITICAL,
  1849. shaka.util.Error.Category.MEDIA,
  1850. shaka.util.Error.Code.QUOTA_EXCEEDED_ERROR,
  1851. contentType));
  1852. } else if (!this.isStreamingAllowed()) {
  1853. next.p.reject(new shaka.util.Error(
  1854. shaka.util.Error.Severity.CRITICAL,
  1855. shaka.util.Error.Category.MEDIA,
  1856. shaka.util.Error.Code.STREAMING_NOT_ALLOWED,
  1857. contentType));
  1858. } else {
  1859. next.p.reject(new shaka.util.Error(
  1860. shaka.util.Error.Severity.CRITICAL,
  1861. shaka.util.Error.Category.MEDIA,
  1862. shaka.util.Error.Code.MEDIA_SOURCE_OPERATION_THREW,
  1863. exception,
  1864. this.video_.error || 'No error in the media element',
  1865. next.uri));
  1866. }
  1867. this.popFromQueue_(contentType);
  1868. }
  1869. }
  1870. }
  1871. /**
  1872. * @return {!shaka.extern.TextDisplayer}
  1873. */
  1874. getTextDisplayer() {
  1875. goog.asserts.assert(
  1876. this.textDisplayer_,
  1877. 'TextDisplayer should only be null when this is destroyed');
  1878. return this.textDisplayer_;
  1879. }
  1880. /**
  1881. * @param {!shaka.extern.TextDisplayer} textDisplayer
  1882. */
  1883. setTextDisplayer(textDisplayer) {
  1884. this.textDisplayer_ = textDisplayer;
  1885. if (this.textEngine_) {
  1886. this.textEngine_.setDisplayer(textDisplayer);
  1887. }
  1888. }
  1889. /**
  1890. * @param {boolean} segmentRelativeVttTiming
  1891. */
  1892. setSegmentRelativeVttTiming(segmentRelativeVttTiming) {
  1893. this.segmentRelativeVttTiming_ = segmentRelativeVttTiming;
  1894. }
  1895. /**
  1896. * Apply platform-specific transformations to this segment to work around
  1897. * issues in the platform.
  1898. *
  1899. * @param {shaka.extern.Stream} stream
  1900. * @param {!BufferSource} segment
  1901. * @param {?shaka.media.SegmentReference} reference
  1902. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1903. * @return {!BufferSource}
  1904. * @private
  1905. */
  1906. workAroundBrokenPlatforms_(stream, segment, reference, contentType) {
  1907. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1908. const isMp4 = shaka.util.MimeUtils.getContainerType(
  1909. this.sourceBufferTypes_.get(contentType)) == 'mp4';
  1910. if (!isMp4) {
  1911. return segment;
  1912. }
  1913. const isInitSegment = reference === null;
  1914. const encryptionExpected = this.expectedEncryption_.get(contentType);
  1915. const keySystem = this.playerInterface_.getKeySystem();
  1916. let isEncrypted = false;
  1917. if (reference && reference.initSegmentReference) {
  1918. isEncrypted = reference.initSegmentReference.encrypted;
  1919. }
  1920. const uri = reference ? reference.getUris()[0] : null;
  1921. const device = shaka.device.DeviceFactory.getDevice();
  1922. if (this.config_.correctEc3Enca &&
  1923. isInitSegment &&
  1924. contentType === ContentType.AUDIO) {
  1925. segment = shaka.media.ContentWorkarounds.correctEnca(segment);
  1926. }
  1927. // If:
  1928. // 1. the configuration tells to insert fake encryption,
  1929. // 2. and this is an init segment or media segment,
  1930. // 3. and encryption is expected,
  1931. // 4. and the platform requires encryption in all init or media segments
  1932. // of current content type,
  1933. // then insert fake encryption metadata for init segments that lack it.
  1934. // The MP4 requirement is because we can currently only do this
  1935. // transformation on MP4 containers.
  1936. // See: https://github.com/shaka-project/shaka-player/issues/2759
  1937. if (this.config_.insertFakeEncryptionInInit && encryptionExpected &&
  1938. device.requiresEncryptionInfoInAllInitSegments(keySystem,
  1939. contentType)) {
  1940. if (isInitSegment) {
  1941. shaka.log.debug('Forcing fake encryption information in init segment.');
  1942. segment =
  1943. shaka.media.ContentWorkarounds.fakeEncryption(stream, segment, uri);
  1944. } else if (!isEncrypted && device.requiresTfhdFix(contentType)) {
  1945. shaka.log.debug(
  1946. 'Forcing fake encryption information in media segment.');
  1947. segment = shaka.media.ContentWorkarounds.fakeMediaEncryption(segment);
  1948. }
  1949. }
  1950. if (isInitSegment && device.requiresEC3InitSegments()) {
  1951. shaka.log.debug('Forcing fake EC-3 information in init segment.');
  1952. segment = shaka.media.ContentWorkarounds.fakeEC3(segment);
  1953. }
  1954. return segment;
  1955. }
  1956. /**
  1957. * Prepare the SourceBuffer to parse a potentially new type or codec.
  1958. *
  1959. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1960. * @param {string} mimeType
  1961. * @param {?shaka.extern.Transmuxer} transmuxer
  1962. * @private
  1963. */
  1964. change_(contentType, mimeType, transmuxer) {
  1965. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1966. if (contentType === ContentType.TEXT) {
  1967. shaka.log.debug(`Change not supported for ${contentType}`);
  1968. return;
  1969. }
  1970. const sourceBuffer = this.sourceBufferTypes_.get(contentType);
  1971. shaka.log.debug(
  1972. `Change Type: ${sourceBuffer} -> ${mimeType}`);
  1973. if (shaka.media.Capabilities.isChangeTypeSupported()) {
  1974. if (this.transmuxers_.has(contentType)) {
  1975. this.transmuxers_.get(contentType).destroy();
  1976. this.transmuxers_.delete(contentType);
  1977. }
  1978. if (transmuxer) {
  1979. this.transmuxers_.set(contentType, transmuxer);
  1980. }
  1981. const type = this.addExtraFeaturesToMimeType_(mimeType);
  1982. this.sourceBuffers_.get(contentType).changeType(type);
  1983. this.sourceBufferTypes_.set(contentType, mimeType);
  1984. } else {
  1985. shaka.log.debug('Change Type not supported');
  1986. }
  1987. // Fake an 'updateend' event to resolve the operation.
  1988. this.onUpdateEnd_(contentType);
  1989. }
  1990. /**
  1991. * Enqueue an operation to prepare the SourceBuffer to parse a potentially new
  1992. * type or codec.
  1993. *
  1994. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  1995. * @param {string} mimeType
  1996. * @param {?shaka.extern.Transmuxer} transmuxer
  1997. * @return {!Promise}
  1998. */
  1999. changeType(contentType, mimeType, transmuxer) {
  2000. return this.enqueueOperation_(
  2001. contentType,
  2002. () => this.change_(contentType, mimeType, transmuxer),
  2003. null);
  2004. }
  2005. /**
  2006. * Resets the MediaSource and re-adds source buffers due to codec mismatch
  2007. *
  2008. * @param {!Map<shaka.util.ManifestParserUtils.ContentType,
  2009. * shaka.extern.Stream>} streamsByType
  2010. * @private
  2011. */
  2012. async reset_(streamsByType) {
  2013. if (this.reloadingMediaSource_ || this.usingRemotePlayback_) {
  2014. return;
  2015. }
  2016. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  2017. this.reloadingMediaSource_ = true;
  2018. this.needSplitMuxedContent_ = false;
  2019. const currentTime = this.video_.currentTime;
  2020. // When codec switching if the user is currently paused we don't want
  2021. // to trigger a play when switching codec.
  2022. // Playing can also end up in a paused state after a codec switch
  2023. // so we need to remember the current states.
  2024. const previousAutoPlayState = this.video_.autoplay;
  2025. if (!this.video_.paused) {
  2026. this.playAfterReset_ = true;
  2027. }
  2028. if (this.playbackHasBegun_) {
  2029. // Only set autoplay to false if the video playback has already begun.
  2030. // When a codec switch happens before playback has begun this can cause
  2031. // autoplay not to work as expected.
  2032. this.video_.autoplay = false;
  2033. }
  2034. try {
  2035. this.eventManager_.removeAll();
  2036. for (const transmuxer of this.transmuxers_.values()) {
  2037. transmuxer.destroy();
  2038. }
  2039. for (const sourceBuffer of this.sourceBuffers_.values()) {
  2040. try {
  2041. this.mediaSource_.removeSourceBuffer(sourceBuffer);
  2042. } catch (e) {
  2043. shaka.log.debug('Exception on removeSourceBuffer', e);
  2044. }
  2045. }
  2046. this.transmuxers_.clear();
  2047. this.sourceBuffers_.clear();
  2048. const previousDuration = this.mediaSource_.duration;
  2049. this.mediaSourceOpen_ = new shaka.util.PublicPromise();
  2050. this.mediaSource_ = this.createMediaSource(this.mediaSourceOpen_);
  2051. await this.mediaSourceOpen_;
  2052. if (!isNaN(previousDuration) && previousDuration) {
  2053. this.mediaSource_.duration = previousDuration;
  2054. } else if (!isNaN(this.lastDuration_) && this.lastDuration_) {
  2055. this.mediaSource_.duration = this.lastDuration_;
  2056. }
  2057. const sourceBufferAdded = new shaka.util.PublicPromise();
  2058. const sourceBuffers =
  2059. /** @type {EventTarget} */(this.mediaSource_.sourceBuffers);
  2060. const totalOfBuffers = streamsByType.size;
  2061. let numberOfSourceBufferAdded = 0;
  2062. const onSourceBufferAdded = () => {
  2063. numberOfSourceBufferAdded++;
  2064. if (numberOfSourceBufferAdded === totalOfBuffers) {
  2065. sourceBufferAdded.resolve();
  2066. this.eventManager_.unlisten(sourceBuffers, 'addsourcebuffer',
  2067. onSourceBufferAdded);
  2068. }
  2069. };
  2070. this.eventManager_.listen(sourceBuffers, 'addsourcebuffer',
  2071. onSourceBufferAdded);
  2072. for (const contentType of streamsByType.keys()) {
  2073. const stream = streamsByType.get(contentType);
  2074. this.initSourceBuffer_(contentType, stream, stream.codecs);
  2075. }
  2076. const audio = streamsByType.get(ContentType.AUDIO);
  2077. if (audio && audio.isAudioMuxedInVideo) {
  2078. this.needSplitMuxedContent_ = true;
  2079. }
  2080. if (this.needSplitMuxedContent_ && !this.queues_.has(ContentType.AUDIO)) {
  2081. this.queues_.set(ContentType.AUDIO, []);
  2082. }
  2083. // Fake a seek to catchup the playhead.
  2084. this.video_.currentTime = currentTime;
  2085. await sourceBufferAdded;
  2086. } finally {
  2087. this.reloadingMediaSource_ = false;
  2088. this.destroyer_.ensureNotDestroyed();
  2089. this.eventManager_.listenOnce(this.video_, 'canplaythrough', () => {
  2090. // Don't use ensureNotDestroyed() from this event listener, because
  2091. // that results in an uncaught exception. Instead, just check the
  2092. // flag.
  2093. if (this.destroyer_.destroyed()) {
  2094. return;
  2095. }
  2096. this.video_.autoplay = previousAutoPlayState;
  2097. if (this.playAfterReset_) {
  2098. this.playAfterReset_ = false;
  2099. this.video_.play();
  2100. }
  2101. });
  2102. }
  2103. }
  2104. /**
  2105. * Resets the Media Source
  2106. * @param {!Map<shaka.util.ManifestParserUtils.ContentType,
  2107. * shaka.extern.Stream>} streamsByType
  2108. * @return {!Promise}
  2109. */
  2110. reset(streamsByType) {
  2111. return this.enqueueBlockingOperation_(
  2112. () => this.reset_(streamsByType));
  2113. }
  2114. /**
  2115. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  2116. * @param {string} mimeType
  2117. * @param {string} codecs
  2118. * @return {{transmuxer: ?shaka.extern.Transmuxer,
  2119. * transmuxerMuxed: boolean, basicType: string, codec: string,
  2120. * mimeType: string}}
  2121. * @private
  2122. */
  2123. getRealInfo_(contentType, mimeType, codecs) {
  2124. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  2125. const MimeUtils = shaka.util.MimeUtils;
  2126. /** @type {?shaka.extern.Transmuxer} */
  2127. let transmuxer;
  2128. let transmuxerMuxed = false;
  2129. const audioCodec = shaka.util.ManifestParserUtils.guessCodecsSafe(
  2130. ContentType.AUDIO, (codecs || '').split(','));
  2131. const videoCodec = shaka.util.ManifestParserUtils.guessCodecsSafe(
  2132. ContentType.VIDEO, (codecs || '').split(','));
  2133. let codec = videoCodec;
  2134. if (contentType == ContentType.AUDIO) {
  2135. codec = audioCodec;
  2136. }
  2137. if (!codec) {
  2138. codec = codecs;
  2139. }
  2140. let newMimeType = shaka.util.MimeUtils.getFullType(mimeType, codec);
  2141. const currentBasicType = MimeUtils.getBasicType(
  2142. this.sourceBufferTypes_.get(contentType));
  2143. let needTransmux = this.config_.forceTransmux;
  2144. if (!shaka.media.Capabilities.isTypeSupported(newMimeType) ||
  2145. (!this.sequenceMode_ &&
  2146. shaka.util.MimeUtils.RAW_FORMATS.includes(newMimeType))) {
  2147. needTransmux = true;
  2148. } else if (!needTransmux && mimeType != currentBasicType) {
  2149. const device = shaka.device.DeviceFactory.getDevice();
  2150. needTransmux = device.getBrowserEngine() ===
  2151. shaka.device.IDevice.BrowserEngine.WEBKIT &&
  2152. shaka.util.MimeUtils.RAW_FORMATS.includes(mimeType);
  2153. }
  2154. const TransmuxerEngine = shaka.transmuxer.TransmuxerEngine;
  2155. if (needTransmux) {
  2156. const newMimeTypeWithAllCodecs =
  2157. shaka.util.MimeUtils.getFullTypeWithAllCodecs(mimeType, codec);
  2158. const transmuxerPlugin =
  2159. TransmuxerEngine.findTransmuxer(newMimeTypeWithAllCodecs);
  2160. if (transmuxerPlugin) {
  2161. transmuxer = transmuxerPlugin();
  2162. if (audioCodec && videoCodec) {
  2163. transmuxerMuxed = true;
  2164. }
  2165. newMimeType =
  2166. transmuxer.convertCodecs(contentType, newMimeTypeWithAllCodecs);
  2167. }
  2168. }
  2169. const newCodec = MimeUtils.getNormalizedCodec(
  2170. MimeUtils.getCodecs(newMimeType));
  2171. const newBasicType = MimeUtils.getBasicType(newMimeType);
  2172. return {
  2173. transmuxer,
  2174. transmuxerMuxed,
  2175. basicType: newBasicType,
  2176. codec: newCodec,
  2177. mimeType: newMimeType,
  2178. };
  2179. }
  2180. /**
  2181. * Codec switch if necessary, this will not resolve until the codec
  2182. * switch is over.
  2183. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  2184. * @param {string} mimeType
  2185. * @param {string} codecs
  2186. * @param {!Map<shaka.util.ManifestParserUtils.ContentType,
  2187. * shaka.extern.Stream>} streamsByType
  2188. * @return {{type: string, newMimeType: string,
  2189. * transmuxer: ?shaka.extern.Transmuxer}}
  2190. * @private
  2191. */
  2192. getInfoAboutResetOrChangeType_(contentType, mimeType, codecs, streamsByType) {
  2193. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  2194. if (contentType == ContentType.TEXT) {
  2195. return {
  2196. type: shaka.media.MediaSourceEngine.ResetMode_.NONE,
  2197. newMimeType: mimeType,
  2198. transmuxer: null,
  2199. };
  2200. }
  2201. const MimeUtils = shaka.util.MimeUtils;
  2202. const currentCodec = MimeUtils.getNormalizedCodec(
  2203. MimeUtils.getCodecs(this.sourceBufferTypes_.get(contentType)));
  2204. const currentBasicType = MimeUtils.getBasicType(
  2205. this.sourceBufferTypes_.get(contentType));
  2206. const realInfo = this.getRealInfo_(contentType, mimeType, codecs);
  2207. const transmuxer = realInfo.transmuxer;
  2208. const transmuxerMuxed = realInfo.transmuxerMuxed;
  2209. const newBasicType = realInfo.basicType;
  2210. const newCodec = realInfo.codec;
  2211. const newMimeType = realInfo.mimeType;
  2212. let muxedContentCheck = true;
  2213. if (transmuxerMuxed &&
  2214. this.sourceBufferTypes_.has(ContentType.AUDIO)) {
  2215. const muxedRealInfo =
  2216. this.getRealInfo_(ContentType.AUDIO, mimeType, codecs);
  2217. const muxedCurrentCodec = MimeUtils.getNormalizedCodec(
  2218. MimeUtils.getCodecs(this.sourceBufferTypes_.get(ContentType.AUDIO)));
  2219. const muxedCurrentBasicType = MimeUtils.getBasicType(
  2220. this.sourceBufferTypes_.get(ContentType.AUDIO));
  2221. muxedContentCheck = muxedCurrentCodec == muxedRealInfo.codec &&
  2222. muxedCurrentBasicType == muxedRealInfo.basicType;
  2223. if (muxedRealInfo.transmuxer) {
  2224. muxedRealInfo.transmuxer.destroy();
  2225. }
  2226. }
  2227. // Current/new codecs base and basic type match then no need to switch
  2228. if (currentCodec === newCodec && currentBasicType === newBasicType &&
  2229. muxedContentCheck) {
  2230. return {
  2231. type: shaka.media.MediaSourceEngine.ResetMode_.NONE,
  2232. newMimeType,
  2233. transmuxer,
  2234. };
  2235. }
  2236. let allowChangeType = true;
  2237. if ((this.needSplitMuxedContent_ &&
  2238. !streamsByType.has(ContentType.AUDIO)) || (transmuxerMuxed &&
  2239. transmuxer && !this.transmuxers_.has(contentType))) {
  2240. allowChangeType = false;
  2241. }
  2242. if (allowChangeType && this.config_.codecSwitchingStrategy ===
  2243. shaka.config.CodecSwitchingStrategy.SMOOTH &&
  2244. shaka.media.Capabilities.isChangeTypeSupported()) {
  2245. return {
  2246. type: shaka.media.MediaSourceEngine.ResetMode_.CHANGE_TYPE,
  2247. newMimeType,
  2248. transmuxer,
  2249. };
  2250. } else {
  2251. if (transmuxer) {
  2252. transmuxer.destroy();
  2253. }
  2254. return {
  2255. type: shaka.media.MediaSourceEngine.ResetMode_.RESET,
  2256. newMimeType,
  2257. transmuxer: null,
  2258. };
  2259. }
  2260. }
  2261. /**
  2262. * Codec switch if necessary, this will not resolve until the codec
  2263. * switch is over.
  2264. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  2265. * @param {string} mimeType
  2266. * @param {string} codecs
  2267. * @param {!Map<shaka.util.ManifestParserUtils.ContentType,
  2268. * shaka.extern.Stream>} streamsByType
  2269. * @return {!Promise<boolean>} true if there was a codec switch,
  2270. * false otherwise.
  2271. * @private
  2272. */
  2273. async codecSwitchIfNecessary_(contentType, mimeType, codecs, streamsByType) {
  2274. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  2275. const {type, transmuxer, newMimeType} = this.getInfoAboutResetOrChangeType_(
  2276. contentType, mimeType, codecs, streamsByType);
  2277. const newAudioStream = streamsByType.get(ContentType.AUDIO);
  2278. if (newAudioStream) {
  2279. this.needSplitMuxedContent_ = newAudioStream.isAudioMuxedInVideo;
  2280. }
  2281. if (type == shaka.media.MediaSourceEngine.ResetMode_.NONE) {
  2282. if (this.transmuxers_.has(contentType) && !transmuxer) {
  2283. this.transmuxers_.get(contentType).destroy();
  2284. this.transmuxers_.delete(contentType);
  2285. } else if (!this.transmuxers_.has(contentType) && transmuxer) {
  2286. this.transmuxers_.set(contentType, transmuxer);
  2287. } else if (transmuxer) {
  2288. // Compare if the transmuxer is different
  2289. if (this.transmuxers_.has(contentType) &&
  2290. this.transmuxers_.get(contentType).transmux !==
  2291. transmuxer.transmux) {
  2292. this.transmuxers_.get(contentType).destroy();
  2293. this.transmuxers_.set(contentType, transmuxer);
  2294. } else {
  2295. transmuxer.destroy();
  2296. }
  2297. }
  2298. return false;
  2299. }
  2300. if (type == shaka.media.MediaSourceEngine.ResetMode_.CHANGE_TYPE) {
  2301. await this.changeType(contentType, newMimeType, transmuxer);
  2302. } else if (type == shaka.media.MediaSourceEngine.ResetMode_.RESET) {
  2303. if (transmuxer) {
  2304. transmuxer.destroy();
  2305. }
  2306. await this.reset(streamsByType);
  2307. }
  2308. return true;
  2309. }
  2310. /**
  2311. * Returns true if it's necessary reset the media source to load the
  2312. * new stream.
  2313. *
  2314. * @param {shaka.util.ManifestParserUtils.ContentType} contentType
  2315. * @param {string} mimeType
  2316. * @param {string} codecs
  2317. * @return {boolean}
  2318. */
  2319. isResetMediaSourceNecessary(contentType, mimeType, codecs, streamsByType) {
  2320. const info = this.getInfoAboutResetOrChangeType_(
  2321. contentType, mimeType, codecs, streamsByType);
  2322. if (info.transmuxer) {
  2323. info.transmuxer.destroy();
  2324. }
  2325. return info.type == shaka.media.MediaSourceEngine.ResetMode_.RESET;
  2326. }
  2327. /**
  2328. * Update LCEVC Decoder object when ready for LCEVC Decode.
  2329. * @param {?shaka.lcevc.Dec} lcevcDec
  2330. */
  2331. updateLcevcDec(lcevcDec) {
  2332. this.lcevcDec_ = lcevcDec;
  2333. }
  2334. /**
  2335. * @param {string} mimeType
  2336. * @return {string}
  2337. * @private
  2338. */
  2339. addExtraFeaturesToMimeType_(mimeType) {
  2340. const extraFeatures = this.config_.addExtraFeaturesToSourceBuffer(mimeType);
  2341. const extendedType = mimeType + extraFeatures;
  2342. shaka.log.debug('Using full mime type', extendedType);
  2343. return extendedType;
  2344. }
  2345. };
  2346. /**
  2347. * Internal reference to window.URL.createObjectURL function to avoid
  2348. * compatibility issues with other libraries and frameworks such as React
  2349. * Native. For use in unit tests only, not meant for external use.
  2350. *
  2351. * @type {function(?):string}
  2352. */
  2353. shaka.media.MediaSourceEngine.createObjectURL = window.URL.createObjectURL;
  2354. /**
  2355. * @typedef {{
  2356. * start: function(),
  2357. * p: !shaka.util.PublicPromise,
  2358. * uri: ?string
  2359. * }}
  2360. *
  2361. * @summary An operation in queue.
  2362. * @property {function()} start
  2363. * The function which starts the operation.
  2364. * @property {!shaka.util.PublicPromise} p
  2365. * The PublicPromise which is associated with this operation.
  2366. * @property {?string} uri
  2367. * A segment URI (if any) associated with this operation.
  2368. */
  2369. shaka.media.MediaSourceEngine.Operation;
  2370. /**
  2371. * @enum {string}
  2372. * @private
  2373. */
  2374. shaka.media.MediaSourceEngine.SourceBufferMode_ = {
  2375. SEQUENCE: 'sequence',
  2376. SEGMENTS: 'segments',
  2377. };
  2378. /**
  2379. * @enum {string}
  2380. * @private
  2381. */
  2382. shaka.media.MediaSourceEngine.ResetMode_ = {
  2383. NONE: 'none',
  2384. RESET: 'reset',
  2385. CHANGE_TYPE: 'changeType',
  2386. };
  2387. /**
  2388. * @typedef {{
  2389. * getKeySystem: function():?string,
  2390. * onMetadata: function(!Array<shaka.extern.ID3Metadata>, number, ?number),
  2391. * onEmsg: function(!shaka.extern.EmsgInfo),
  2392. * onEvent: function(!Event),
  2393. * onManifestUpdate: function()
  2394. * }}
  2395. *
  2396. * @summary Player interface
  2397. * @property {function():?string} getKeySystem
  2398. * Gets currently used key system or null if not used.
  2399. * @property {function(
  2400. * !Array<shaka.extern.ID3Metadata>, number, ?number)} onMetadata
  2401. * Callback to use when metadata arrives.
  2402. * @property {function(!shaka.extern.EmsgInfo)} onEmsg
  2403. * Callback to use when EMSG arrives.
  2404. * @property {function(!Event)} onEvent
  2405. * Called when an event occurs that should be sent to the app.
  2406. * @property {function()} onManifestUpdate
  2407. * Called when an embedded 'emsg' box should trigger a manifest update.
  2408. */
  2409. shaka.media.MediaSourceEngine.PlayerInterface;