Source: lib/hls/hls_parser.js

  1. /*! @license
  2. * Shaka Player
  3. * Copyright 2016 Google LLC
  4. * SPDX-License-Identifier: Apache-2.0
  5. */
  6. goog.provide('shaka.hls.HlsParser');
  7. goog.require('goog.Uri');
  8. goog.require('goog.asserts');
  9. goog.require('shaka.hls.ManifestTextParser');
  10. goog.require('shaka.hls.Playlist');
  11. goog.require('shaka.hls.PlaylistType');
  12. goog.require('shaka.hls.Tag');
  13. goog.require('shaka.hls.Utils');
  14. goog.require('shaka.log');
  15. goog.require('shaka.media.DrmEngine');
  16. goog.require('shaka.media.InitSegmentReference');
  17. goog.require('shaka.media.ManifestParser');
  18. goog.require('shaka.media.MediaSourceEngine');
  19. goog.require('shaka.media.PresentationTimeline');
  20. goog.require('shaka.media.SegmentIndex');
  21. goog.require('shaka.media.SegmentReference');
  22. goog.require('shaka.net.DataUriPlugin');
  23. goog.require('shaka.net.NetworkingEngine');
  24. goog.require('shaka.util.ArrayUtils');
  25. goog.require('shaka.util.BufferUtils');
  26. goog.require('shaka.util.CmcdManager');
  27. goog.require('shaka.util.Error');
  28. goog.require('shaka.util.FakeEvent');
  29. goog.require('shaka.util.Functional');
  30. goog.require('shaka.util.LanguageUtils');
  31. goog.require('shaka.util.ManifestParserUtils');
  32. goog.require('shaka.util.MimeUtils');
  33. goog.require('shaka.util.OperationManager');
  34. goog.require('shaka.util.Pssh');
  35. goog.require('shaka.util.Timer');
  36. goog.require('shaka.util.Platform');
  37. goog.require('shaka.util.Uint8ArrayUtils');
  38. goog.require('shaka.util.XmlUtils');
  39. goog.requireType('shaka.hls.Segment');
  40. /**
  41. * HLS parser.
  42. *
  43. * @implements {shaka.extern.ManifestParser}
  44. * @export
  45. */
  46. shaka.hls.HlsParser = class {
  47. /**
  48. * Creates an Hls Parser object.
  49. */
  50. constructor() {
  51. /** @private {?shaka.extern.ManifestParser.PlayerInterface} */
  52. this.playerInterface_ = null;
  53. /** @private {?shaka.extern.ManifestConfiguration} */
  54. this.config_ = null;
  55. /** @private {number} */
  56. this.globalId_ = 1;
  57. /** @private {!Map.<string, string>} */
  58. this.globalVariables_ = new Map();
  59. /**
  60. * A map from group id to stream infos created from the media tags.
  61. * @private {!Map.<string, !Array.<?shaka.hls.HlsParser.StreamInfo>>}
  62. */
  63. this.groupIdToStreamInfosMap_ = new Map();
  64. /**
  65. * For media playlist lazy-loading to work in livestreams, we have to assume
  66. * that each stream of a type (video, audio, etc) has the same mappings of
  67. * sequence number to start time.
  68. * This map stores those relationships.
  69. * Only used during livestreams; we do not assume that VOD content is
  70. * aligned in that way.
  71. * @private {!Map.<string, !Map.<number, number>>}
  72. */
  73. this.mediaSequenceToStartTimeByType_ = new Map();
  74. // Set initial maps.
  75. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  76. this.mediaSequenceToStartTimeByType_.set(ContentType.VIDEO, new Map());
  77. this.mediaSequenceToStartTimeByType_.set(ContentType.AUDIO, new Map());
  78. this.mediaSequenceToStartTimeByType_.set(ContentType.TEXT, new Map());
  79. this.mediaSequenceToStartTimeByType_.set(ContentType.IMAGE, new Map());
  80. /**
  81. * The values are strings of the form "<VIDEO URI> - <AUDIO URI>",
  82. * where the URIs are the verbatim media playlist URIs as they appeared in
  83. * the master playlist.
  84. *
  85. * Used to avoid duplicates that vary only in their text stream.
  86. *
  87. * @private {!Set.<string>}
  88. */
  89. this.variantUriSet_ = new Set();
  90. /**
  91. * A map from (verbatim) media playlist URI to stream infos representing the
  92. * playlists.
  93. *
  94. * On update, used to iterate through and update from media playlists.
  95. *
  96. * On initial parse, used to iterate through and determine minimum
  97. * timestamps, offsets, and to handle TS rollover.
  98. *
  99. * During parsing, used to avoid duplicates in the async methods
  100. * createStreamInfoFromMediaTag_, createStreamInfoFromImageTag_ and
  101. * createStreamInfoFromVariantTag_.
  102. *
  103. * @private {!Map.<string, shaka.hls.HlsParser.StreamInfo>}
  104. */
  105. this.uriToStreamInfosMap_ = new Map();
  106. /** @private {?shaka.media.PresentationTimeline} */
  107. this.presentationTimeline_ = null;
  108. /**
  109. * The master playlist URI, after redirects.
  110. *
  111. * @private {string}
  112. */
  113. this.masterPlaylistUri_ = '';
  114. /** @private {shaka.hls.ManifestTextParser} */
  115. this.manifestTextParser_ = new shaka.hls.ManifestTextParser();
  116. /**
  117. * This is the number of seconds we want to wait between finishing a
  118. * manifest update and starting the next one. This will be set when we parse
  119. * the manifest.
  120. *
  121. * @private {number}
  122. */
  123. this.updatePlaylistDelay_ = 0;
  124. /**
  125. * The minimum sequence number for generated segments, when ignoring
  126. * EXT-X-PROGRAM-DATE-TIME.
  127. *
  128. * @private {number}
  129. */
  130. this.minSequenceNumber_ = -1;
  131. /**
  132. * The lowest time value for any of the streams, as defined by the
  133. * EXT-X-PROGRAM-DATE-TIME value. Measured in seconds since January 1, 1970.
  134. *
  135. * @private {number}
  136. */
  137. this.lowestSyncTime_ = Infinity;
  138. /**
  139. * Whether the streams have previously been "finalized"; that is to say,
  140. * whether we have loaded enough streams to know information about the asset
  141. * such as timing information, live status, etc.
  142. *
  143. * @private {boolean}
  144. */
  145. this.streamsFinalized_ = false;
  146. /**
  147. * This timer is used to trigger the start of a manifest update. A manifest
  148. * update is async. Once the update is finished, the timer will be restarted
  149. * to trigger the next update. The timer will only be started if the content
  150. * is live content.
  151. *
  152. * @private {shaka.util.Timer}
  153. */
  154. this.updatePlaylistTimer_ = new shaka.util.Timer(() => {
  155. this.onUpdate_();
  156. });
  157. /** @private {shaka.hls.HlsParser.PresentationType_} */
  158. this.presentationType_ = shaka.hls.HlsParser.PresentationType_.VOD;
  159. /** @private {?shaka.extern.Manifest} */
  160. this.manifest_ = null;
  161. /** @private {number} */
  162. this.maxTargetDuration_ = 0;
  163. /** @private {number} */
  164. this.minTargetDuration_ = Infinity;
  165. /** Partial segments target duration.
  166. * @private {number}
  167. */
  168. this.partialTargetDuration_ = 0;
  169. /** @private {number} */
  170. this.lowLatencyPresentationDelay_ = 0;
  171. /** @private {shaka.util.OperationManager} */
  172. this.operationManager_ = new shaka.util.OperationManager();
  173. /** A map from closed captions' group id, to a map of closed captions info.
  174. * {group id -> {closed captions channel id -> language}}
  175. * @private {Map.<string, Map.<string, string>>}
  176. */
  177. this.groupIdToClosedCaptionsMap_ = new Map();
  178. /** @private {Map.<string, string>} */
  179. this.groupIdToCodecsMap_ = new Map();
  180. /** A cache mapping EXT-X-MAP tag info to the InitSegmentReference created
  181. * from the tag.
  182. * The key is a string combining the EXT-X-MAP tag's absolute uri, and
  183. * its BYTERANGE if available.
  184. * {!Map.<string, !shaka.media.InitSegmentReference>} */
  185. this.mapTagToInitSegmentRefMap_ = new Map();
  186. /** @private {boolean} */
  187. this.lowLatencyMode_ = false;
  188. }
  189. /**
  190. * @override
  191. * @exportInterface
  192. */
  193. configure(config) {
  194. this.config_ = config;
  195. }
  196. /**
  197. * @override
  198. * @exportInterface
  199. */
  200. async start(uri, playerInterface) {
  201. goog.asserts.assert(this.config_, 'Must call configure() before start()!');
  202. this.playerInterface_ = playerInterface;
  203. this.lowLatencyMode_ = playerInterface.isLowLatencyMode();
  204. const response = await this.requestManifest_(uri);
  205. // Record the master playlist URI after redirects.
  206. this.masterPlaylistUri_ = response.uri;
  207. goog.asserts.assert(response.data, 'Response data should be non-null!');
  208. await this.parseManifest_(response.data, uri);
  209. goog.asserts.assert(this.manifest_, 'Manifest should be non-null');
  210. return this.manifest_;
  211. }
  212. /**
  213. * @override
  214. * @exportInterface
  215. */
  216. stop() {
  217. // Make sure we don't update the manifest again. Even if the timer is not
  218. // running, this is safe to call.
  219. if (this.updatePlaylistTimer_) {
  220. this.updatePlaylistTimer_.stop();
  221. this.updatePlaylistTimer_ = null;
  222. }
  223. /** @type {!Array.<!Promise>} */
  224. const pending = [];
  225. if (this.operationManager_) {
  226. pending.push(this.operationManager_.destroy());
  227. this.operationManager_ = null;
  228. }
  229. this.playerInterface_ = null;
  230. this.config_ = null;
  231. this.variantUriSet_.clear();
  232. this.manifest_ = null;
  233. this.uriToStreamInfosMap_.clear();
  234. this.groupIdToStreamInfosMap_.clear();
  235. this.groupIdToCodecsMap_.clear();
  236. this.globalVariables_.clear();
  237. return Promise.all(pending);
  238. }
  239. /**
  240. * @override
  241. * @exportInterface
  242. */
  243. async update() {
  244. if (!this.isLive_()) {
  245. return;
  246. }
  247. /** @type {!Array.<!Promise>} */
  248. const updates = [];
  249. const streamInfos = Array.from(this.uriToStreamInfosMap_.values());
  250. // Only update active streams.
  251. const activeStreamInfos = streamInfos.filter((s) => s.stream.segmentIndex);
  252. for (const streamInfo of activeStreamInfos) {
  253. updates.push(this.updateStream_(streamInfo));
  254. }
  255. await Promise.all(updates);
  256. // Now that streams have been updated, notify the presentation timeline.
  257. this.notifySegmentsForStreams_(activeStreamInfos.map((s) => s.stream));
  258. // If any hasEndList is false, the stream is still live.
  259. const stillLive = activeStreamInfos.some((s) => s.hasEndList == false);
  260. if (activeStreamInfos.length && !stillLive) {
  261. // Convert the presentation to VOD and set the duration.
  262. const PresentationType = shaka.hls.HlsParser.PresentationType_;
  263. this.setPresentationType_(PresentationType.VOD);
  264. const maxTimestamps = streamInfos.map((s) => s.maxTimestamp);
  265. // The duration is the minimum of the end times of all streams.
  266. this.presentationTimeline_.setDuration(Math.min(...maxTimestamps));
  267. this.playerInterface_.updateDuration();
  268. }
  269. }
  270. /**
  271. * @param {!shaka.hls.HlsParser.StreamInfo} streamInfo
  272. * @return {!Map.<number, number>}
  273. * @private
  274. */
  275. getMediaSequenceToStartTimeFor_(streamInfo) {
  276. if (this.isLive_()) {
  277. return this.mediaSequenceToStartTimeByType_.get(streamInfo.type);
  278. } else {
  279. return streamInfo.mediaSequenceToStartTime;
  280. }
  281. }
  282. /**
  283. * Updates a stream.
  284. *
  285. * @param {!shaka.hls.HlsParser.StreamInfo} streamInfo
  286. * @return {!Promise}
  287. * @private
  288. */
  289. async updateStream_(streamInfo) {
  290. const manifestUri = streamInfo.absoluteMediaPlaylistUri;
  291. const uriObj = new goog.Uri(manifestUri);
  292. if (this.lowLatencyMode_ && streamInfo.canSkipSegments) {
  293. // Enable delta updates. This will replace older segments with
  294. // 'EXT-X-SKIP' tag in the media playlist.
  295. uriObj.setQueryData(new goog.Uri.QueryData('_HLS_skip=YES'));
  296. }
  297. const response = await this.requestManifest_(uriObj.toString());
  298. if (!streamInfo.stream.segmentIndex) {
  299. // The stream was closed since the update was first requested.
  300. return;
  301. }
  302. /** @type {shaka.hls.Playlist} */
  303. const playlist = this.manifestTextParser_.parsePlaylist(
  304. response.data, response.uri);
  305. if (playlist.type != shaka.hls.PlaylistType.MEDIA) {
  306. throw new shaka.util.Error(
  307. shaka.util.Error.Severity.CRITICAL,
  308. shaka.util.Error.Category.MANIFEST,
  309. shaka.util.Error.Code.HLS_INVALID_PLAYLIST_HIERARCHY);
  310. }
  311. /** @type {!Array.<!shaka.hls.Tag>} */
  312. const variablesTags = shaka.hls.Utils.filterTagsByName(playlist.tags,
  313. 'EXT-X-DEFINE');
  314. const mediaVariables = this.parseMediaVariables_(variablesTags);
  315. const stream = streamInfo.stream;
  316. const mediaSequenceToStartTime =
  317. this.getMediaSequenceToStartTimeFor_(streamInfo);
  318. const {keyIds, drmInfos} = this.parseDrmInfo_(playlist, stream.mimeType);
  319. const keysAreEqual =
  320. (a, b) => a.size === b.size && [...a].every((value) => b.has(value));
  321. if (!keysAreEqual(stream.keyIds, keyIds)) {
  322. stream.keyIds = keyIds;
  323. stream.drmInfos = drmInfos;
  324. this.playerInterface_.newDrmInfo(stream);
  325. }
  326. const segments = this.createSegments_(
  327. streamInfo.verbatimMediaPlaylistUri, playlist, stream.type,
  328. stream.mimeType, mediaSequenceToStartTime, mediaVariables);
  329. stream.segmentIndex.mergeAndEvict(
  330. segments, this.presentationTimeline_.getSegmentAvailabilityStart());
  331. if (segments.length) {
  332. const mediaSequenceNumber = shaka.hls.Utils.getFirstTagWithNameAsNumber(
  333. playlist.tags, 'EXT-X-MEDIA-SEQUENCE', 0);
  334. const playlistStartTime = mediaSequenceToStartTime.get(
  335. mediaSequenceNumber);
  336. stream.segmentIndex.evict(playlistStartTime);
  337. }
  338. const newestSegment = segments[segments.length - 1];
  339. goog.asserts.assert(newestSegment, 'Should have segments!');
  340. // Once the last segment has been added to the playlist,
  341. // #EXT-X-ENDLIST tag will be appended.
  342. // If that happened, treat the rest of the EVENT presentation as VOD.
  343. const endListTag =
  344. shaka.hls.Utils.getFirstTagWithName(playlist.tags, 'EXT-X-ENDLIST');
  345. if (endListTag) {
  346. // Flag this for later. We don't convert the whole presentation into VOD
  347. // until we've seen the ENDLIST tag for all active playlists.
  348. streamInfo.hasEndList = true;
  349. streamInfo.maxTimestamp = newestSegment.endTime;
  350. }
  351. }
  352. /**
  353. * @override
  354. * @exportInterface
  355. */
  356. onExpirationUpdated(sessionId, expiration) {
  357. // No-op
  358. }
  359. /**
  360. * Align the streams by sequence number by dropping early segments. Then
  361. * offset the streams to begin at presentation time 0.
  362. * @param {!Array.<!shaka.hls.HlsParser.StreamInfo>} streamInfos
  363. * @private
  364. */
  365. syncStreamsWithSequenceNumber_(streamInfos) {
  366. // We assume that, when this is first called, we have enough info to
  367. // determine how to use the program date times (e.g. we have both a video
  368. // and an audio, and all other videos and audios match those).
  369. // Thus, we only need to calculate this once.
  370. const updateMinSequenceNumber = this.minSequenceNumber_ == -1;
  371. // Sync using media sequence number. Find the highest starting sequence
  372. // number among all streams. Later, we will drop any references to
  373. // earlier segments in other streams, then offset everything back to 0.
  374. for (const streamInfo of streamInfos) {
  375. const segmentIndex = streamInfo.stream.segmentIndex;
  376. goog.asserts.assert(segmentIndex,
  377. 'Only loaded streams should be synced');
  378. const mediaSequenceToStartTime =
  379. this.getMediaSequenceToStartTimeFor_(streamInfo);
  380. const segment0 = segmentIndex.earliestReference();
  381. if (segment0) {
  382. // This looks inefficient, but iteration order is insertion order.
  383. // So the very first entry should be the one we want.
  384. // We assert that this holds true so that we are alerted by debug
  385. // builds and tests if it changes. We still do a loop, though, so
  386. // that the code functions correctly in production no matter what.
  387. if (goog.DEBUG) {
  388. const firstSequenceStartTime =
  389. mediaSequenceToStartTime.values().next().value;
  390. goog.asserts.assert(
  391. firstSequenceStartTime == segment0.startTime,
  392. 'Sequence number map is not ordered as expected!');
  393. }
  394. for (const [sequence, start] of mediaSequenceToStartTime) {
  395. if (start == segment0.startTime) {
  396. if (updateMinSequenceNumber) {
  397. this.minSequenceNumber_ = Math.max(
  398. this.minSequenceNumber_, sequence);
  399. }
  400. // Even if we already have decided on a value for
  401. // |this.minSequenceNumber_|, we still need to determine the first
  402. // sequence number for the stream, to offset it in the code below.
  403. streamInfo.firstSequenceNumber = sequence;
  404. break;
  405. }
  406. }
  407. }
  408. }
  409. if (this.minSequenceNumber_ < 0) {
  410. // Nothing to sync.
  411. return;
  412. }
  413. shaka.log.debug('Syncing HLS streams against base sequence number:',
  414. this.minSequenceNumber_);
  415. for (const streamInfo of streamInfos) {
  416. const segmentIndex = streamInfo.stream.segmentIndex;
  417. if (segmentIndex) {
  418. // Drop any earlier references.
  419. const numSegmentsToDrop =
  420. this.minSequenceNumber_ - streamInfo.firstSequenceNumber;
  421. segmentIndex.dropFirstReferences(numSegmentsToDrop);
  422. // Now adjust timestamps back to begin at 0.
  423. const segmentN = segmentIndex.earliestReference();
  424. if (segmentN) {
  425. this.offsetStream_(streamInfo, -segmentN.startTime);
  426. }
  427. }
  428. }
  429. }
  430. /**
  431. * Synchronize streams by the EXT-X-PROGRAM-DATE-TIME tags attached to their
  432. * segments. Also normalizes segment times so that the earliest segment in
  433. * any stream is at time 0.
  434. * @param {!Array.<!shaka.hls.HlsParser.StreamInfo>} streamInfos
  435. * @private
  436. */
  437. syncStreamsWithProgramDateTime_(streamInfos) {
  438. // We assume that, when this is first called, we have enough info to
  439. // determine how to use the program date times (e.g. we have both a video
  440. // and an audio, and all other videos and audios match those).
  441. // Thus, we only need to calculate this once.
  442. if (this.lowestSyncTime_ == Infinity) {
  443. for (const streamInfo of streamInfos) {
  444. const segmentIndex = streamInfo.stream.segmentIndex;
  445. goog.asserts.assert(segmentIndex,
  446. 'Only loaded streams should be synced');
  447. const segment0 = segmentIndex.earliestReference();
  448. if (segment0 != null && segment0.syncTime != null) {
  449. this.lowestSyncTime_ =
  450. Math.min(this.lowestSyncTime_, segment0.syncTime);
  451. }
  452. }
  453. }
  454. const lowestSyncTime = this.lowestSyncTime_;
  455. if (lowestSyncTime == Infinity) {
  456. // Nothing to sync.
  457. return;
  458. }
  459. shaka.log.debug('Syncing HLS streams against base time:', lowestSyncTime);
  460. for (const streamInfo of this.uriToStreamInfosMap_.values()) {
  461. const segmentIndex = streamInfo.stream.segmentIndex;
  462. if (segmentIndex != null) {
  463. const segment0 = segmentIndex.earliestReference();
  464. if (segment0.syncTime == null) {
  465. shaka.log.alwaysError('Missing EXT-X-PROGRAM-DATE-TIME for stream',
  466. streamInfo.verbatimMediaPlaylistUri,
  467. 'Expect AV sync issues!');
  468. } else {
  469. // The first segment's target startTime should be based entirely on
  470. // its syncTime. The rest of the stream will be based on that
  471. // starting point. The earliest segment sync time from any stream
  472. // will become presentation time 0. If two streams start e.g. 6
  473. // seconds apart in syncTime, then their first segments will also
  474. // start 6 seconds apart in presentation time.
  475. const segment0TargetTime = segment0.syncTime - lowestSyncTime;
  476. const streamOffset = segment0TargetTime - segment0.startTime;
  477. this.offsetStream_(streamInfo, streamOffset);
  478. }
  479. }
  480. }
  481. }
  482. /**
  483. * @param {!shaka.hls.HlsParser.StreamInfo} streamInfo
  484. * @param {number} offset
  485. * @private
  486. */
  487. offsetStream_(streamInfo, offset) {
  488. streamInfo.stream.segmentIndex.offset(offset);
  489. streamInfo.maxTimestamp += offset;
  490. goog.asserts.assert(streamInfo.maxTimestamp >= 0,
  491. 'Negative maxTimestamp after adjustment!');
  492. const mediaSequenceToStartTime =
  493. this.getMediaSequenceToStartTimeFor_(streamInfo);
  494. for (const [key, value] of mediaSequenceToStartTime) {
  495. mediaSequenceToStartTime.set(key, value + offset);
  496. }
  497. shaka.log.debug('Offset', offset, 'applied to',
  498. streamInfo.verbatimMediaPlaylistUri);
  499. }
  500. /**
  501. * Parses the manifest.
  502. *
  503. * @param {BufferSource} data
  504. * @param {string} uri
  505. * @return {!Promise}
  506. * @private
  507. */
  508. async parseManifest_(data, uri) {
  509. const HlsParser = shaka.hls.HlsParser;
  510. const Utils = shaka.hls.Utils;
  511. goog.asserts.assert(this.masterPlaylistUri_,
  512. 'Master playlist URI must be set before calling parseManifest_!');
  513. const playlist = this.manifestTextParser_.parsePlaylist(
  514. data, this.masterPlaylistUri_);
  515. /** @type {!Array.<!shaka.hls.Tag>} */
  516. const variablesTags = Utils.filterTagsByName(playlist.tags, 'EXT-X-DEFINE');
  517. this.parseMasterVariables_(variablesTags);
  518. /** @type {!Array.<!shaka.extern.Variant>} */
  519. let variants = [];
  520. /** @type {!Array.<!shaka.extern.Stream>} */
  521. let textStreams = [];
  522. /** @type {!Array.<!shaka.extern.Stream>} */
  523. let imageStreams = [];
  524. // Parsing a media playlist results in a single-variant stream.
  525. if (playlist.type == shaka.hls.PlaylistType.MEDIA) {
  526. // Get necessary info for this stream, from the config. These are things
  527. // we would normally find from the master playlist (e.g. from values on
  528. // EXT-X-MEDIA tags).
  529. let fullMimeType = this.config_.hls.mediaPlaylistFullMimeType;
  530. // Try to infer the full mimetype better.
  531. if (playlist.segments.length) {
  532. const parsedUri = new goog.Uri(playlist.segments[0].absoluteUri);
  533. const extension = parsedUri.getPath().split('.').pop();
  534. const mimeType = HlsParser.RAW_FORMATS_TO_MIME_TYPES_[extension];
  535. if (mimeType) {
  536. fullMimeType = mimeType;
  537. }
  538. }
  539. const mimeType = shaka.util.MimeUtils.getBasicType(fullMimeType);
  540. const type = mimeType.split('/')[0];
  541. const codecs = shaka.util.MimeUtils.getCodecs(fullMimeType);
  542. // Some values we cannot figure out, and aren't important enough to ask
  543. // the user to provide through config values. A lot of these are only
  544. // relevant to ABR, which isn't necessary if there's only one variant.
  545. // So these unknowns should be set to false or null, largely.
  546. const language = '';
  547. const channelsCount = null;
  548. const spatialAudio = false;
  549. const characteristics = null;
  550. const closedCaptions = new Map();
  551. const forced = false; // Only relevant for text.
  552. const primary = true; // This is the only stream!
  553. const name = 'Media Playlist';
  554. // Make the stream info, with those values.
  555. const streamInfo = await this.convertParsedPlaylistIntoStreamInfo_(
  556. playlist, uri, uri, codecs, type, language, primary, name,
  557. channelsCount, closedCaptions, characteristics, forced, spatialAudio,
  558. mimeType);
  559. this.uriToStreamInfosMap_.set(uri, streamInfo);
  560. // Wrap the stream from that stream info with a variant.
  561. variants.push({
  562. id: 0,
  563. language: 'und',
  564. disabledUntilTime: 0,
  565. primary: true,
  566. audio: type == 'audio' ? streamInfo.stream : null,
  567. video: type == 'video' ? streamInfo.stream : null,
  568. bandwidth: 0,
  569. allowedByApplication: true,
  570. allowedByKeySystem: true,
  571. decodingInfos: [],
  572. });
  573. } else {
  574. /** @type {!Array.<!shaka.hls.Tag>} */
  575. const mediaTags = Utils.filterTagsByName(playlist.tags, 'EXT-X-MEDIA');
  576. /** @type {!Array.<!shaka.hls.Tag>} */
  577. const variantTags = Utils.filterTagsByName(
  578. playlist.tags, 'EXT-X-STREAM-INF');
  579. /** @type {!Array.<!shaka.hls.Tag>} */
  580. const imageTags = Utils.filterTagsByName(
  581. playlist.tags, 'EXT-X-IMAGE-STREAM-INF');
  582. /** @type {!Array.<!shaka.hls.Tag>} */
  583. const sessionKeyTags = Utils.filterTagsByName(
  584. playlist.tags, 'EXT-X-SESSION-KEY');
  585. this.parseCodecs_(variantTags);
  586. /** @type {!Array.<!shaka.hls.Tag>} */
  587. const sesionDataTags =
  588. Utils.filterTagsByName(playlist.tags, 'EXT-X-SESSION-DATA');
  589. for (const tag of sesionDataTags) {
  590. const id = tag.getAttributeValue('DATA-ID');
  591. const uri = tag.getAttributeValue('URI');
  592. const language = tag.getAttributeValue('LANGUAGE');
  593. const value = tag.getAttributeValue('VALUE');
  594. const data = (new Map()).set('id', id);
  595. if (uri) {
  596. data.set('uri', shaka.hls.Utils.constructAbsoluteUri(
  597. this.masterPlaylistUri_, uri));
  598. }
  599. if (language) {
  600. data.set('language', language);
  601. }
  602. if (value) {
  603. data.set('value', value);
  604. }
  605. const event = new shaka.util.FakeEvent('sessiondata', data);
  606. if (this.playerInterface_) {
  607. this.playerInterface_.onEvent(event);
  608. }
  609. }
  610. // Parse audio and video media tags first, so that we can extract segment
  611. // start time from audio/video streams and reuse for text streams.
  612. this.createStreamInfosFromMediaTags_(mediaTags);
  613. this.parseClosedCaptions_(mediaTags);
  614. variants = this.createVariantsForTags_(variantTags, sessionKeyTags);
  615. textStreams = this.parseTexts_(mediaTags);
  616. imageStreams = await this.parseImages_(imageTags);
  617. }
  618. // Make sure that the parser has not been destroyed.
  619. if (!this.playerInterface_) {
  620. throw new shaka.util.Error(
  621. shaka.util.Error.Severity.CRITICAL,
  622. shaka.util.Error.Category.PLAYER,
  623. shaka.util.Error.Code.OPERATION_ABORTED);
  624. }
  625. // This assert is our own sanity check.
  626. goog.asserts.assert(this.presentationTimeline_ == null,
  627. 'Presentation timeline created early!');
  628. // We don't know if the presentation is VOD or live until we parse at least
  629. // one media playlist, so make a VOD-style presentation timeline for now
  630. // and change the type later if we discover this is live.
  631. // Since the player will load the first variant chosen early in the process,
  632. // there isn't a window during playback where the live-ness is unknown.
  633. this.presentationTimeline_ = new shaka.media.PresentationTimeline(
  634. /* presentationStartTime= */ null, /* delay= */ 0);
  635. this.presentationTimeline_.setStatic(true);
  636. // Single-variant streams aren't lazy-loaded, so for them we already have
  637. // enough info here to determine the presentation type and duration.
  638. if (playlist.type == shaka.hls.PlaylistType.MEDIA) {
  639. if (this.isLive_()) {
  640. this.changePresentationTimelineToLive_();
  641. const delay = this.updatePlaylistDelay_;
  642. this.updatePlaylistTimer_.tickAfter(/* seconds= */ delay);
  643. }
  644. const streamInfos = Array.from(this.uriToStreamInfosMap_.values());
  645. this.finalizeStreams_(streamInfos);
  646. this.determineDuration_();
  647. }
  648. this.manifest_ = {
  649. presentationTimeline: this.presentationTimeline_,
  650. variants,
  651. textStreams,
  652. imageStreams,
  653. offlineSessionIds: [],
  654. minBufferTime: 0,
  655. sequenceMode: true,
  656. };
  657. this.playerInterface_.makeTextStreamsForClosedCaptions(this.manifest_);
  658. }
  659. /** @private */
  660. determineDuration_() {
  661. goog.asserts.assert(this.presentationTimeline_,
  662. 'Presentation timeline not created!');
  663. if (this.isLive_()) {
  664. // The HLS spec (RFC 8216) states in 6.3.4:
  665. // "the client MUST wait for at least the target duration before
  666. // attempting to reload the Playlist file again".
  667. // For LL-HLS, the server must add a new partial segment to the Playlist
  668. // every part target duration.
  669. this.updatePlaylistDelay_ = this.minTargetDuration_;
  670. // The spec says nothing much about seeking in live content, but Safari's
  671. // built-in HLS implementation does not allow it. Therefore we will set
  672. // the availability window equal to the presentation delay. The player
  673. // will be able to buffer ahead three segments, but the seek window will
  674. // be zero-sized.
  675. const PresentationType = shaka.hls.HlsParser.PresentationType_;
  676. if (this.presentationType_ == PresentationType.LIVE) {
  677. let segmentAvailabilityDuration = this.getMinDuration_();
  678. // This defaults to the presentation delay, which has the effect of
  679. // making the live stream unseekable. This is consistent with Apple's
  680. // HLS implementation.
  681. if (this.config_.hls.useSafariBehaviorForLive) {
  682. segmentAvailabilityDuration = this.presentationTimeline_.getDelay();
  683. }
  684. // The app can override that with a longer duration, to allow seeking.
  685. if (!isNaN(this.config_.availabilityWindowOverride)) {
  686. segmentAvailabilityDuration = this.config_.availabilityWindowOverride;
  687. }
  688. this.presentationTimeline_.setSegmentAvailabilityDuration(
  689. segmentAvailabilityDuration);
  690. }
  691. } else {
  692. // Use the minimum duration as the presentation duration.
  693. this.presentationTimeline_.setDuration(this.getMinDuration_());
  694. }
  695. // This is the first point where we have a meaningful presentation start
  696. // time, and we need to tell PresentationTimeline that so that it can
  697. // maintain consistency from here on.
  698. this.presentationTimeline_.lockStartTime();
  699. // This asserts that the live edge is being calculated from segment times.
  700. // For VOD and event streams, this check should still pass.
  701. goog.asserts.assert(
  702. !this.presentationTimeline_.usingPresentationStartTime(),
  703. 'We should not be using the presentation start time in HLS!');
  704. }
  705. /**
  706. * Get the variables of each variant tag, and store in a map.
  707. * @param {!Array.<!shaka.hls.Tag>} tags Variant tags from the playlist.
  708. * @private
  709. */
  710. parseMasterVariables_(tags) {
  711. for (const variableTag of tags) {
  712. const name = variableTag.getAttributeValue('NAME');
  713. const value = variableTag.getAttributeValue('VALUE');
  714. if (name && value) {
  715. if (!this.globalVariables_.has(name)) {
  716. this.globalVariables_.set(name, value);
  717. }
  718. }
  719. }
  720. }
  721. /**
  722. * Get the variables of each variant tag, and store in a map.
  723. * @param {!Array.<!shaka.hls.Tag>} tags Variant tags from the playlist.
  724. * @return {!Map.<string, string>}
  725. * @private
  726. */
  727. parseMediaVariables_(tags) {
  728. const mediaVariables = new Map();
  729. for (const variableTag of tags) {
  730. const name = variableTag.getAttributeValue('NAME');
  731. const value = variableTag.getAttributeValue('VALUE');
  732. const mediaImport = variableTag.getAttributeValue('IMPORT');
  733. if (name && value) {
  734. mediaVariables.set(name, value);
  735. }
  736. if (mediaImport) {
  737. const globalValue = this.globalVariables_.get(mediaImport);
  738. if (globalValue) {
  739. mediaVariables.set(mediaImport, globalValue);
  740. }
  741. }
  742. }
  743. return mediaVariables;
  744. }
  745. /**
  746. * Get the codecs of each variant tag, and store in a map from
  747. * audio/video/subtitle group id to the codecs arraylist.
  748. * @param {!Array.<!shaka.hls.Tag>} tags Variant tags from the playlist.
  749. * @private
  750. */
  751. parseCodecs_(tags) {
  752. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  753. for (const variantTag of tags) {
  754. const audioGroupId = variantTag.getAttributeValue('AUDIO');
  755. const videoGroupId = variantTag.getAttributeValue('VIDEO');
  756. const subGroupId = variantTag.getAttributeValue('SUBTITLES');
  757. const allCodecs = this.getCodecsForVariantTag_(variantTag);
  758. if (subGroupId) {
  759. const textCodecs = shaka.util.ManifestParserUtils.guessCodecsSafe(
  760. ContentType.TEXT, allCodecs);
  761. goog.asserts.assert(textCodecs != null, 'Text codecs should be valid.');
  762. this.groupIdToCodecsMap_.set(subGroupId, textCodecs);
  763. shaka.util.ArrayUtils.remove(allCodecs, textCodecs);
  764. }
  765. if (audioGroupId) {
  766. let codecs = shaka.util.ManifestParserUtils.guessCodecsSafe(
  767. ContentType.AUDIO, allCodecs);
  768. if (!codecs) {
  769. codecs = this.config_.hls.defaultAudioCodec;
  770. }
  771. this.groupIdToCodecsMap_.set(audioGroupId, codecs);
  772. }
  773. if (videoGroupId) {
  774. let codecs = shaka.util.ManifestParserUtils.guessCodecsSafe(
  775. ContentType.VIDEO, allCodecs);
  776. if (!codecs) {
  777. codecs = this.config_.hls.defaultVideoCodec;
  778. }
  779. this.groupIdToCodecsMap_.set(videoGroupId, codecs);
  780. }
  781. }
  782. }
  783. /**
  784. * Parse Subtitles and Closed Captions from 'EXT-X-MEDIA' tags.
  785. * Create text streams for Subtitles, but not Closed Captions.
  786. *
  787. * @param {!Array.<!shaka.hls.Tag>} mediaTags Media tags from the playlist.
  788. * @return {!Array.<!shaka.extern.Stream>}
  789. * @private
  790. */
  791. parseTexts_(mediaTags) {
  792. // Create text stream for each Subtitle media tag.
  793. const subtitleTags =
  794. shaka.hls.Utils.filterTagsByType(mediaTags, 'SUBTITLES');
  795. const textStreams = subtitleTags.map((tag) => {
  796. const disableText = this.config_.disableText;
  797. if (disableText) {
  798. return null;
  799. }
  800. try {
  801. return this.createStreamInfoFromMediaTag_(tag).stream;
  802. } catch (e) {
  803. if (this.config_.hls.ignoreTextStreamFailures) {
  804. return null;
  805. }
  806. throw e;
  807. }
  808. });
  809. // Set the codecs for text streams.
  810. for (const tag of subtitleTags) {
  811. const groupId = tag.getRequiredAttrValue('GROUP-ID');
  812. const codecs = this.groupIdToCodecsMap_.get(groupId);
  813. if (codecs) {
  814. const textStreamInfos = this.groupIdToStreamInfosMap_.get(groupId);
  815. if (textStreamInfos) {
  816. for (const textStreamInfo of textStreamInfos) {
  817. textStreamInfo.stream.codecs = codecs;
  818. }
  819. }
  820. }
  821. }
  822. // Do not create text streams for Closed captions.
  823. return textStreams.filter((s) => s);
  824. }
  825. /**
  826. * @param {!Array.<!shaka.hls.Tag>} imageTags from the playlist.
  827. * @return {!Promise.<!Array.<!shaka.extern.Stream>>}
  828. * @private
  829. */
  830. async parseImages_(imageTags) {
  831. // Create image stream for each image tag.
  832. const imageStreamPromises = imageTags.map(async (tag) => {
  833. const disableThumbnails = this.config_.disableThumbnails;
  834. if (disableThumbnails) {
  835. return null;
  836. }
  837. try {
  838. const streamInfo = await this.createStreamInfoFromImageTag_(tag);
  839. return streamInfo.stream;
  840. } catch (e) {
  841. if (this.config_.hls.ignoreImageStreamFailures) {
  842. return null;
  843. }
  844. throw e;
  845. }
  846. });
  847. const imageStreams = await Promise.all(imageStreamPromises);
  848. return imageStreams.filter((s) => s);
  849. }
  850. /**
  851. * @param {!Array.<!shaka.hls.Tag>} mediaTags Media tags from the playlist.
  852. * @private
  853. */
  854. createStreamInfosFromMediaTags_(mediaTags) {
  855. // Filter out subtitles and media tags without uri.
  856. mediaTags = mediaTags.filter((tag) => {
  857. const uri = tag.getAttributeValue('URI') || '';
  858. const type = tag.getAttributeValue('TYPE');
  859. return type != 'SUBTITLES' && uri != '';
  860. });
  861. // Create stream info for each audio / video media tag.
  862. for (const tag of mediaTags) {
  863. this.createStreamInfoFromMediaTag_(tag);
  864. }
  865. }
  866. /**
  867. * @param {!Array.<!shaka.hls.Tag>} tags Variant tags from the playlist.
  868. * @param {!Array.<!shaka.hls.Tag>} sessionKeyTags EXT-X-SESSION-KEY tags
  869. * from the playlist.
  870. * @return {!Array.<!shaka.extern.Variant>}
  871. * @private
  872. */
  873. createVariantsForTags_(tags, sessionKeyTags) {
  874. // EXT-X-SESSION-KEY processing
  875. const drmInfos = [];
  876. const keyIds = new Set();
  877. if (sessionKeyTags.length > 0) {
  878. for (const drmTag of sessionKeyTags) {
  879. const method = drmTag.getRequiredAttrValue('METHOD');
  880. if (method != 'NONE' && method != 'AES-128') {
  881. // According to the HLS spec, KEYFORMAT is optional and implicitly
  882. // defaults to "identity".
  883. // https://datatracker.ietf.org/doc/html/draft-pantos-hls-rfc8216bis-11#section-4.4.4.4
  884. const keyFormat =
  885. drmTag.getAttributeValue('KEYFORMAT') || 'identity';
  886. const drmParser =
  887. shaka.hls.HlsParser.KEYFORMATS_TO_DRM_PARSERS_[keyFormat];
  888. const drmInfo = drmParser ?
  889. drmParser(drmTag, /* mimeType= */ '') : null;
  890. if (drmInfo) {
  891. if (drmInfo.keyIds) {
  892. for (const keyId of drmInfo.keyIds) {
  893. keyIds.add(keyId);
  894. }
  895. }
  896. drmInfos.push(drmInfo);
  897. } else {
  898. shaka.log.warning('Unsupported HLS KEYFORMAT', keyFormat);
  899. }
  900. }
  901. }
  902. }
  903. // Create variants for each variant tag.
  904. const allVariants = tags.map((tag) => {
  905. const frameRate = tag.getAttributeValue('FRAME-RATE');
  906. const bandwidth = Number(tag.getAttributeValue('AVERAGE-BANDWIDTH')) ||
  907. Number(tag.getRequiredAttrValue('BANDWIDTH'));
  908. const resolution = tag.getAttributeValue('RESOLUTION');
  909. const [width, height] = resolution ? resolution.split('x') : [null, null];
  910. const videoRange = tag.getAttributeValue('VIDEO-RANGE');
  911. const streamInfos = this.createStreamInfosForVariantTag_(tag,
  912. resolution, frameRate);
  913. goog.asserts.assert(streamInfos.audio.length ||
  914. streamInfos.video.length, 'We should have created a stream!');
  915. return this.createVariants_(
  916. streamInfos.audio,
  917. streamInfos.video,
  918. bandwidth,
  919. width,
  920. height,
  921. frameRate,
  922. videoRange,
  923. drmInfos,
  924. keyIds);
  925. });
  926. let variants = allVariants.reduce(shaka.util.Functional.collapseArrays, []);
  927. // Filter out null variants.
  928. variants = variants.filter((variant) => variant != null);
  929. return variants;
  930. }
  931. /**
  932. * Create audio and video streamInfos from an 'EXT-X-STREAM-INF' tag and its
  933. * related media tags.
  934. *
  935. * @param {!shaka.hls.Tag} tag
  936. * @param {?string} resolution
  937. * @param {?string} frameRate
  938. * @return {!shaka.hls.HlsParser.StreamInfos}
  939. * @private
  940. */
  941. createStreamInfosForVariantTag_(tag, resolution, frameRate) {
  942. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  943. /** @type {!Array.<string>} */
  944. let allCodecs = this.getCodecsForVariantTag_(tag);
  945. const audioGroupId = tag.getAttributeValue('AUDIO');
  946. const videoGroupId = tag.getAttributeValue('VIDEO');
  947. goog.asserts.assert(audioGroupId == null || videoGroupId == null,
  948. 'Unexpected: both video and audio described by media tags!');
  949. const groupId = audioGroupId || videoGroupId;
  950. const streamInfos =
  951. (groupId && this.groupIdToStreamInfosMap_.has(groupId)) ?
  952. this.groupIdToStreamInfosMap_.get(groupId) : [];
  953. /** @type {shaka.hls.HlsParser.StreamInfos} */
  954. const res = {
  955. audio: audioGroupId ? streamInfos : [],
  956. video: videoGroupId ? streamInfos : [],
  957. };
  958. // Make an educated guess about the stream type.
  959. shaka.log.debug('Guessing stream type for', tag.toString());
  960. let type;
  961. let ignoreStream = false;
  962. // The Microsoft HLS manifest generators will make audio-only variants
  963. // that link to their URI both directly and through an audio tag.
  964. // In that case, ignore the local URI and use the version in the
  965. // AUDIO tag, so you inherit its language.
  966. // As an example, see the manifest linked in issue #860.
  967. const streamURI = tag.getRequiredAttrValue('URI');
  968. const hasSameUri = res.audio.find((audio) => {
  969. return audio && audio.verbatimMediaPlaylistUri == streamURI;
  970. });
  971. const videoCodecs = shaka.util.ManifestParserUtils.guessCodecsSafe(
  972. ContentType.VIDEO, allCodecs);
  973. const audioCodecs = shaka.util.ManifestParserUtils.guessCodecsSafe(
  974. ContentType.AUDIO, allCodecs);
  975. if (audioCodecs && !videoCodecs) {
  976. // There are no associated media tags, and there's only audio codec,
  977. // and no video codec, so it should be audio.
  978. type = ContentType.AUDIO;
  979. shaka.log.debug('Guessing audio-only.');
  980. } else if (!streamInfos.length && audioCodecs && videoCodecs) {
  981. // There are both audio and video codecs, so assume multiplexed content.
  982. // Note that the default used when CODECS is missing assumes multiple
  983. // (and therefore multiplexed).
  984. // Recombine the codec strings into one so that MediaSource isn't
  985. // lied to later. (That would trigger an error in Chrome.)
  986. shaka.log.debug('Guessing multiplexed audio+video.');
  987. type = ContentType.VIDEO;
  988. allCodecs = [[videoCodecs, audioCodecs].join(',')];
  989. } else if (res.audio.length && hasSameUri) {
  990. shaka.log.debug('Guessing audio-only.');
  991. type = ContentType.AUDIO;
  992. ignoreStream = true;
  993. } else if (res.video.length) {
  994. // There are associated video streams. Assume this is audio.
  995. shaka.log.debug('Guessing audio-only.');
  996. type = ContentType.AUDIO;
  997. } else {
  998. shaka.log.debug('Guessing video-only.');
  999. type = ContentType.VIDEO;
  1000. }
  1001. if (!ignoreStream) {
  1002. const streamInfo =
  1003. this.createStreamInfoFromVariantTag_(tag, allCodecs, type);
  1004. res[streamInfo.stream.type] = [streamInfo];
  1005. }
  1006. return res;
  1007. }
  1008. /**
  1009. * Get the codecs from the 'EXT-X-STREAM-INF' tag.
  1010. *
  1011. * @param {!shaka.hls.Tag} tag
  1012. * @return {!Array.<string>} codecs
  1013. * @private
  1014. */
  1015. getCodecsForVariantTag_(tag) {
  1016. // These are the default codecs to assume if none are specified.
  1017. const defaultCodecsArray = [];
  1018. if (!this.config_.disableVideo) {
  1019. defaultCodecsArray.push(this.config_.hls.defaultVideoCodec);
  1020. }
  1021. if (!this.config_.disableAudio) {
  1022. defaultCodecsArray.push(this.config_.hls.defaultAudioCodec);
  1023. }
  1024. const defaultCodecs = defaultCodecsArray.join(',');
  1025. const codecsString = tag.getAttributeValue('CODECS', defaultCodecs);
  1026. // Strip out internal whitespace while splitting on commas:
  1027. /** @type {!Array.<string>} */
  1028. const codecs = codecsString.split(/\s*,\s*/);
  1029. // Filter out duplicate codecs.
  1030. const seen = new Set();
  1031. const ret = [];
  1032. for (const codec of codecs) {
  1033. // HLS says the CODECS field needs to include all codecs that appear in
  1034. // the content. This means that if the content changes profiles, it should
  1035. // include both. Since all known browsers support changing profiles
  1036. // without any other work, just ignore them. See also:
  1037. // https://github.com/shaka-project/shaka-player/issues/1817
  1038. const shortCodec = shaka.util.MimeUtils.getCodecBase(codec);
  1039. if (!seen.has(shortCodec)) {
  1040. ret.push(codec);
  1041. seen.add(shortCodec);
  1042. } else {
  1043. shaka.log.debug('Ignoring duplicate codec');
  1044. }
  1045. }
  1046. return ret;
  1047. }
  1048. /**
  1049. * Get the channel count information for an HLS audio track.
  1050. * CHANNELS specifies an ordered, "/" separated list of parameters.
  1051. * If the type is audio, the first parameter will be a decimal integer
  1052. * specifying the number of independent, simultaneous audio channels.
  1053. * No other channels parameters are currently defined.
  1054. *
  1055. * @param {!shaka.hls.Tag} tag
  1056. * @return {?number}
  1057. * @private
  1058. */
  1059. getChannelsCount_(tag) {
  1060. const channels = tag.getAttributeValue('CHANNELS');
  1061. if (!channels) {
  1062. return null;
  1063. }
  1064. const channelcountstring = channels.split('/')[0];
  1065. const count = parseInt(channelcountstring, 10);
  1066. return count;
  1067. }
  1068. /**
  1069. * Get the spatial audio information for an HLS audio track.
  1070. * In HLS the channels field indicates the number of audio channels that the
  1071. * stream has (eg: 2). In the case of Dolby Atmos, the complexity is
  1072. * expressed with the number of channels followed by the word JOC
  1073. * (eg: 16/JOC), so 16 would be the number of channels (eg: 7.3.6 layout),
  1074. * and JOC indicates that the stream has spatial audio.
  1075. * @see https://developer.apple.com/documentation/http_live_streaming/hls_authoring_specification_for_apple_devices/hls_authoring_specification_for_apple_devices_appendixes
  1076. *
  1077. * @param {!shaka.hls.Tag} tag
  1078. * @return {boolean}
  1079. * @private
  1080. */
  1081. isSpatialAudio_(tag) {
  1082. const channels = tag.getAttributeValue('CHANNELS');
  1083. if (!channels) {
  1084. return false;
  1085. }
  1086. return channels.includes('/JOC');
  1087. }
  1088. /**
  1089. * Get the closed captions map information for the EXT-X-STREAM-INF tag, to
  1090. * create the stream info.
  1091. * @param {!shaka.hls.Tag} tag
  1092. * @param {string} type
  1093. * @return {Map.<string, string>} closedCaptions
  1094. * @private
  1095. */
  1096. getClosedCaptions_(tag, type) {
  1097. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1098. // The attribute of closed captions is optional, and the value may be
  1099. // 'NONE'.
  1100. const closedCaptionsAttr = tag.getAttributeValue('CLOSED-CAPTIONS');
  1101. // EXT-X-STREAM-INF tags may have CLOSED-CAPTIONS attributes.
  1102. // The value can be either a quoted-string or an enumerated-string with
  1103. // the value NONE. If the value is a quoted-string, it MUST match the
  1104. // value of the GROUP-ID attribute of an EXT-X-MEDIA tag elsewhere in the
  1105. // Playlist whose TYPE attribute is CLOSED-CAPTIONS.
  1106. if (type == ContentType.VIDEO && closedCaptionsAttr &&
  1107. closedCaptionsAttr != 'NONE') {
  1108. return this.groupIdToClosedCaptionsMap_.get(closedCaptionsAttr);
  1109. }
  1110. return null;
  1111. }
  1112. /**
  1113. * Get the language value.
  1114. *
  1115. * @param {!shaka.hls.Tag} tag
  1116. * @return {string}
  1117. * @private
  1118. */
  1119. getLanguage_(tag) {
  1120. const LanguageUtils = shaka.util.LanguageUtils;
  1121. const languageValue = tag.getAttributeValue('LANGUAGE') || 'und';
  1122. return LanguageUtils.normalize(languageValue);
  1123. }
  1124. /**
  1125. * Get the type value.
  1126. * Shaka recognizes the content types 'audio', 'video', 'text', and 'image'.
  1127. * The HLS 'subtitles' type needs to be mapped to 'text'.
  1128. * @param {!shaka.hls.Tag} tag
  1129. * @return {string}
  1130. * @private
  1131. */
  1132. getType_(tag) {
  1133. let type = tag.getRequiredAttrValue('TYPE').toLowerCase();
  1134. if (type == 'subtitles') {
  1135. type = shaka.util.ManifestParserUtils.ContentType.TEXT;
  1136. }
  1137. return type;
  1138. }
  1139. /**
  1140. * @param {!Array.<shaka.hls.HlsParser.StreamInfo>} audioInfos
  1141. * @param {!Array.<shaka.hls.HlsParser.StreamInfo>} videoInfos
  1142. * @param {number} bandwidth
  1143. * @param {?string} width
  1144. * @param {?string} height
  1145. * @param {?string} frameRate
  1146. * @param {?string} videoRange
  1147. * @param {!Array.<shaka.extern.DrmInfo>} drmInfos
  1148. * @param {!Set.<string>} keyIds
  1149. * @return {!Array.<!shaka.extern.Variant>}
  1150. * @private
  1151. */
  1152. createVariants_(
  1153. audioInfos, videoInfos, bandwidth, width, height, frameRate, videoRange,
  1154. drmInfos, keyIds) {
  1155. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1156. const DrmEngine = shaka.media.DrmEngine;
  1157. for (const info of videoInfos) {
  1158. this.addVideoAttributes_(
  1159. info.stream, width, height, frameRate, videoRange);
  1160. }
  1161. // In case of audio-only or video-only content or the audio/video is
  1162. // disabled by the config, we create an array of one item containing
  1163. // a null. This way, the double-loop works for all kinds of content.
  1164. // NOTE: we currently don't have support for audio-only content.
  1165. const disableAudio = this.config_.disableAudio;
  1166. if (!audioInfos.length || disableAudio) {
  1167. audioInfos = [null];
  1168. }
  1169. const disableVideo = this.config_.disableVideo;
  1170. if (!videoInfos.length || disableVideo) {
  1171. videoInfos = [null];
  1172. }
  1173. const variants = [];
  1174. for (const audioInfo of audioInfos) {
  1175. for (const videoInfo of videoInfos) {
  1176. const audioStream = audioInfo ? audioInfo.stream : null;
  1177. if (audioStream) {
  1178. audioStream.drmInfos = drmInfos;
  1179. audioStream.keyIds = keyIds;
  1180. }
  1181. const videoStream = videoInfo ? videoInfo.stream : null;
  1182. if (videoStream) {
  1183. videoStream.drmInfos = drmInfos;
  1184. videoStream.keyIds = keyIds;
  1185. }
  1186. const audioDrmInfos = audioInfo ? audioInfo.stream.drmInfos : null;
  1187. const videoDrmInfos = videoInfo ? videoInfo.stream.drmInfos : null;
  1188. const videoStreamUri =
  1189. videoInfo ? videoInfo.verbatimMediaPlaylistUri : '';
  1190. const audioStreamUri =
  1191. audioInfo ? audioInfo.verbatimMediaPlaylistUri : '';
  1192. const variantUriKey = videoStreamUri + ' - ' + audioStreamUri;
  1193. if (audioStream && videoStream) {
  1194. if (!DrmEngine.areDrmCompatible(audioDrmInfos, videoDrmInfos)) {
  1195. shaka.log.warning(
  1196. 'Incompatible DRM info in HLS variant. Skipping.');
  1197. continue;
  1198. }
  1199. }
  1200. if (this.variantUriSet_.has(variantUriKey)) {
  1201. // This happens when two variants only differ in their text streams.
  1202. shaka.log.debug(
  1203. 'Skipping variant which only differs in text streams.');
  1204. continue;
  1205. }
  1206. // Since both audio and video are of the same type, this assertion will
  1207. // catch certain mistakes at runtime that the compiler would miss.
  1208. goog.asserts.assert(!audioStream ||
  1209. audioStream.type == ContentType.AUDIO, 'Audio parameter mismatch!');
  1210. goog.asserts.assert(!videoStream ||
  1211. videoStream.type == ContentType.VIDEO, 'Video parameter mismatch!');
  1212. const variant = {
  1213. id: this.globalId_++,
  1214. language: audioStream ? audioStream.language : 'und',
  1215. primary: (!!audioStream && audioStream.primary) ||
  1216. (!!videoStream && videoStream.primary),
  1217. audio: audioStream,
  1218. video: videoStream,
  1219. bandwidth,
  1220. allowedByApplication: true,
  1221. allowedByKeySystem: true,
  1222. decodingInfos: [],
  1223. };
  1224. variants.push(variant);
  1225. this.variantUriSet_.add(variantUriKey);
  1226. }
  1227. }
  1228. return variants;
  1229. }
  1230. /**
  1231. * Parses an array of EXT-X-MEDIA tags, then stores the values of all tags
  1232. * with TYPE="CLOSED-CAPTIONS" into a map of group id to closed captions.
  1233. *
  1234. * @param {!Array.<!shaka.hls.Tag>} mediaTags
  1235. * @private
  1236. */
  1237. parseClosedCaptions_(mediaTags) {
  1238. const closedCaptionsTags =
  1239. shaka.hls.Utils.filterTagsByType(mediaTags, 'CLOSED-CAPTIONS');
  1240. for (const tag of closedCaptionsTags) {
  1241. goog.asserts.assert(tag.name == 'EXT-X-MEDIA',
  1242. 'Should only be called on media tags!');
  1243. const language = this.getLanguage_(tag);
  1244. // The GROUP-ID value is a quoted-string that specifies the group to which
  1245. // the Rendition belongs.
  1246. const groupId = tag.getRequiredAttrValue('GROUP-ID');
  1247. // The value of INSTREAM-ID is a quoted-string that specifies a Rendition
  1248. // within the segments in the Media Playlist. This attribute is REQUIRED
  1249. // if the TYPE attribute is CLOSED-CAPTIONS.
  1250. const instreamId = tag.getRequiredAttrValue('INSTREAM-ID');
  1251. if (!this.groupIdToClosedCaptionsMap_.get(groupId)) {
  1252. this.groupIdToClosedCaptionsMap_.set(groupId, new Map());
  1253. }
  1254. this.groupIdToClosedCaptionsMap_.get(groupId).set(instreamId, language);
  1255. }
  1256. }
  1257. /**
  1258. * Parse EXT-X-MEDIA media tag into a Stream object.
  1259. *
  1260. * @param {shaka.hls.Tag} tag
  1261. * @return {!shaka.hls.HlsParser.StreamInfo}
  1262. * @private
  1263. */
  1264. createStreamInfoFromMediaTag_(tag) {
  1265. goog.asserts.assert(tag.name == 'EXT-X-MEDIA',
  1266. 'Should only be called on media tags!');
  1267. const groupId = tag.getRequiredAttrValue('GROUP-ID');
  1268. let codecs = '';
  1269. /** @type {string} */
  1270. const type = this.getType_(tag);
  1271. // Text does not require a codec.
  1272. if (type != shaka.util.ManifestParserUtils.ContentType.TEXT && groupId &&
  1273. this.groupIdToCodecsMap_.has(groupId)) {
  1274. codecs = this.groupIdToCodecsMap_.get(groupId);
  1275. }
  1276. const verbatimMediaPlaylistUri = this.variableSubstitution_(
  1277. tag.getRequiredAttrValue('URI'), this.globalVariables_);
  1278. // Check if the stream has already been created as part of another Variant
  1279. // and return it if it has.
  1280. if (this.uriToStreamInfosMap_.has(verbatimMediaPlaylistUri)) {
  1281. return this.uriToStreamInfosMap_.get(verbatimMediaPlaylistUri);
  1282. }
  1283. const language = this.getLanguage_(tag);
  1284. const name = tag.getAttributeValue('NAME');
  1285. // NOTE: According to the HLS spec, "DEFAULT=YES" requires "AUTOSELECT=YES".
  1286. // However, we don't bother to validate "AUTOSELECT", since we don't
  1287. // actually use it in our streaming model, and we treat everything as
  1288. // "AUTOSELECT=YES". A value of "AUTOSELECT=NO" would imply that it may
  1289. // only be selected explicitly by the user, and we don't have a way to
  1290. // represent that in our model.
  1291. const defaultAttrValue = tag.getAttributeValue('DEFAULT');
  1292. const primary = defaultAttrValue == 'YES';
  1293. const channelsCount = type == 'audio' ? this.getChannelsCount_(tag) : null;
  1294. const spatialAudio = type == 'audio' ? this.isSpatialAudio_(tag) : false;
  1295. const characteristics = tag.getAttributeValue('CHARACTERISTICS');
  1296. const forcedAttrValue = tag.getAttributeValue('FORCED');
  1297. const forced = forcedAttrValue == 'YES';
  1298. // TODO: Should we take into account some of the currently ignored
  1299. // attributes: INSTREAM-ID, Attribute descriptions: https://bit.ly/2lpjOhj
  1300. const streamInfo = this.createStreamInfo_(
  1301. verbatimMediaPlaylistUri, codecs, type, language, primary, name,
  1302. channelsCount, /* closedCaptions= */ null, characteristics, forced,
  1303. spatialAudio);
  1304. if (this.groupIdToStreamInfosMap_.has(groupId)) {
  1305. this.groupIdToStreamInfosMap_.get(groupId).push(streamInfo);
  1306. } else {
  1307. this.groupIdToStreamInfosMap_.set(groupId, [streamInfo]);
  1308. }
  1309. // TODO: This check is necessary because of the possibility of multiple
  1310. // calls to createStreamInfoFromMediaTag_ before either has resolved.
  1311. if (this.uriToStreamInfosMap_.has(verbatimMediaPlaylistUri)) {
  1312. return this.uriToStreamInfosMap_.get(verbatimMediaPlaylistUri);
  1313. }
  1314. this.uriToStreamInfosMap_.set(verbatimMediaPlaylistUri, streamInfo);
  1315. return streamInfo;
  1316. }
  1317. /**
  1318. * Parse EXT-X-MEDIA media tag into a Stream object.
  1319. *
  1320. * @param {shaka.hls.Tag} tag
  1321. * @return {!Promise.<!shaka.hls.HlsParser.StreamInfo>}
  1322. * @private
  1323. */
  1324. async createStreamInfoFromImageTag_(tag) {
  1325. goog.asserts.assert(tag.name == 'EXT-X-IMAGE-STREAM-INF',
  1326. 'Should only be called on image tags!');
  1327. /** @type {string} */
  1328. const type = shaka.util.ManifestParserUtils.ContentType.IMAGE;
  1329. const verbatimImagePlaylistUri = this.variableSubstitution_(
  1330. tag.getRequiredAttrValue('URI'), this.globalVariables_);
  1331. const codecs = tag.getAttributeValue('CODECS', 'jpeg') || '';
  1332. // Check if the stream has already been created as part of another Variant
  1333. // and return it if it has.
  1334. if (this.uriToStreamInfosMap_.has(verbatimImagePlaylistUri)) {
  1335. return this.uriToStreamInfosMap_.get(verbatimImagePlaylistUri);
  1336. }
  1337. const language = this.getLanguage_(tag);
  1338. const name = tag.getAttributeValue('NAME');
  1339. const characteristics = tag.getAttributeValue('CHARACTERISTICS');
  1340. const streamInfo = this.createStreamInfo_(
  1341. verbatimImagePlaylistUri, codecs, type, language, /* primary= */ false,
  1342. name, /* channelsCount= */ null, /* closedCaptions= */ null,
  1343. characteristics, /* forced= */ false, /* spatialAudio= */ false);
  1344. // TODO: This check is necessary because of the possibility of multiple
  1345. // calls to createStreamInfoFromImageTag_ before either has resolved.
  1346. if (this.uriToStreamInfosMap_.has(verbatimImagePlaylistUri)) {
  1347. return this.uriToStreamInfosMap_.get(verbatimImagePlaylistUri);
  1348. }
  1349. // Parse misc attributes.
  1350. const resolution = tag.getAttributeValue('RESOLUTION');
  1351. if (resolution) {
  1352. // The RESOLUTION tag represents the resolution of a single thumbnail, not
  1353. // of the entire sheet at once (like we expect in the output).
  1354. // So multiply by the layout size.
  1355. // Since we need to have generated the segment index for this, we can't
  1356. // lazy-load in this situation.
  1357. await streamInfo.stream.createSegmentIndex();
  1358. const reference = streamInfo.stream.segmentIndex.get(0);
  1359. const layout = reference.getTilesLayout();
  1360. if (layout) {
  1361. streamInfo.stream.width =
  1362. Number(resolution.split('x')[0]) * Number(layout.split('x')[0]);
  1363. streamInfo.stream.height =
  1364. Number(resolution.split('x')[1]) * Number(layout.split('x')[1]);
  1365. // TODO: What happens if there are multiple grids, with different
  1366. // layout sizes, inside this image stream?
  1367. }
  1368. }
  1369. const bandwidth = tag.getAttributeValue('BANDWIDTH');
  1370. if (bandwidth) {
  1371. streamInfo.stream.bandwidth = Number(bandwidth);
  1372. }
  1373. this.uriToStreamInfosMap_.set(verbatimImagePlaylistUri, streamInfo);
  1374. return streamInfo;
  1375. }
  1376. /**
  1377. * Parse an EXT-X-STREAM-INF media tag into a Stream object.
  1378. *
  1379. * @param {!shaka.hls.Tag} tag
  1380. * @param {!Array.<string>} allCodecs
  1381. * @param {string} type
  1382. * @return {!shaka.hls.HlsParser.StreamInfo}
  1383. * @private
  1384. */
  1385. createStreamInfoFromVariantTag_(tag, allCodecs, type) {
  1386. goog.asserts.assert(tag.name == 'EXT-X-STREAM-INF',
  1387. 'Should only be called on variant tags!');
  1388. const verbatimMediaPlaylistUri = this.variableSubstitution_(
  1389. tag.getRequiredAttrValue('URI'), this.globalVariables_);
  1390. if (this.uriToStreamInfosMap_.has(verbatimMediaPlaylistUri)) {
  1391. return this.uriToStreamInfosMap_.get(verbatimMediaPlaylistUri);
  1392. }
  1393. const closedCaptions = this.getClosedCaptions_(tag, type);
  1394. const codecs = shaka.util.ManifestParserUtils.guessCodecs(type, allCodecs);
  1395. const streamInfo = this.createStreamInfo_(verbatimMediaPlaylistUri,
  1396. codecs, type, /* language= */ 'und', /* primary= */ false,
  1397. /* name= */ null, /* channelcount= */ null, closedCaptions,
  1398. /* characteristics= */ null, /* forced= */ false,
  1399. /* spatialAudio= */ false);
  1400. // TODO: This check is necessary because of the possibility of multiple
  1401. // calls to createStreamInfoFromVariantTag_ before either has resolved.
  1402. if (this.uriToStreamInfosMap_.has(verbatimMediaPlaylistUri)) {
  1403. return this.uriToStreamInfosMap_.get(verbatimMediaPlaylistUri);
  1404. }
  1405. this.uriToStreamInfosMap_.set(verbatimMediaPlaylistUri, streamInfo);
  1406. return streamInfo;
  1407. }
  1408. /**
  1409. * @param {string} verbatimMediaPlaylistUri
  1410. * @param {string} codecs
  1411. * @param {string} type
  1412. * @param {string} language
  1413. * @param {boolean} primary
  1414. * @param {?string} name
  1415. * @param {?number} channelsCount
  1416. * @param {Map.<string, string>} closedCaptions
  1417. * @param {?string} characteristics
  1418. * @param {boolean} forced
  1419. * @param {boolean} spatialAudio
  1420. * @return {!shaka.hls.HlsParser.StreamInfo}
  1421. * @private
  1422. */
  1423. createStreamInfo_(verbatimMediaPlaylistUri, codecs, type, language,
  1424. primary, name, channelsCount, closedCaptions, characteristics, forced,
  1425. spatialAudio) {
  1426. // TODO: Refactor, too many parameters
  1427. const initialMediaPlaylistUri = shaka.hls.Utils.constructAbsoluteUri(
  1428. this.masterPlaylistUri_, verbatimMediaPlaylistUri);
  1429. // This stream is lazy-loaded inside the createSegmentIndex function.
  1430. // So we start out with a stream object that does not contain the actual
  1431. // segment index, then download when createSegmentIndex is called.
  1432. const stream = this.makeStreamObject_(codecs, type, language, primary, name,
  1433. channelsCount, closedCaptions, characteristics, forced, spatialAudio);
  1434. if (shaka.media.MediaSourceEngine.RAW_FORMATS.includes(stream.mimeType)) {
  1435. stream.codecs = '';
  1436. }
  1437. const streamInfo = {
  1438. stream,
  1439. type,
  1440. verbatimMediaPlaylistUri,
  1441. // These values are filled out or updated after lazy-loading:
  1442. absoluteMediaPlaylistUri: initialMediaPlaylistUri,
  1443. maxTimestamp: 0,
  1444. mediaSequenceToStartTime: new Map(),
  1445. canSkipSegments: false,
  1446. hasEndList: false,
  1447. firstSequenceNumber: -1,
  1448. loadedOnce: false,
  1449. };
  1450. /** @param {!AbortSignal} abortSignal */
  1451. const downloadSegmentIndex = async (abortSignal) => {
  1452. // Download the actual manifest.
  1453. const response = await this.requestManifest_(
  1454. streamInfo.absoluteMediaPlaylistUri);
  1455. if (abortSignal.aborted) {
  1456. return;
  1457. }
  1458. // Record the final URI after redirects.
  1459. const absoluteMediaPlaylistUri = response.uri;
  1460. // Record the redirected, final URI of this media playlist when we parse
  1461. // it.
  1462. /** @type {!shaka.hls.Playlist} */
  1463. const playlist = this.manifestTextParser_.parsePlaylist(
  1464. response.data, absoluteMediaPlaylistUri);
  1465. const wasLive = this.isLive_();
  1466. const realStreamInfo = await this.convertParsedPlaylistIntoStreamInfo_(
  1467. playlist, verbatimMediaPlaylistUri, absoluteMediaPlaylistUri, codecs,
  1468. type, language, primary, name, channelsCount, closedCaptions,
  1469. characteristics, forced, spatialAudio);
  1470. if (abortSignal.aborted) {
  1471. return;
  1472. }
  1473. const realStream = realStreamInfo.stream;
  1474. if (this.isLive_() && !wasLive) {
  1475. // Now that we know that the presentation is live, convert the timeline
  1476. // to live.
  1477. this.changePresentationTimelineToLive_();
  1478. }
  1479. // Copy values from the real stream info to our initial one.
  1480. streamInfo.absoluteMediaPlaylistUri = absoluteMediaPlaylistUri;
  1481. streamInfo.maxTimestamp = realStreamInfo.maxTimestamp;
  1482. streamInfo.canSkipSegments = realStreamInfo.canSkipSegments;
  1483. streamInfo.hasEndList = realStreamInfo.hasEndList;
  1484. streamInfo.mediaSequenceToStartTime =
  1485. realStreamInfo.mediaSequenceToStartTime;
  1486. streamInfo.loadedOnce = true;
  1487. stream.segmentIndex = realStream.segmentIndex;
  1488. stream.encrypted = realStream.encrypted;
  1489. stream.drmInfos = realStream.drmInfos;
  1490. stream.keyIds = realStream.keyIds;
  1491. stream.kind = realStream.kind;
  1492. stream.roles = realStream.roles;
  1493. stream.mimeType = realStream.mimeType;
  1494. // Since we lazy-loaded this content, the player may need to create new
  1495. // sessions for the DRM info in this stream.
  1496. if (stream.drmInfos.length) {
  1497. this.playerInterface_.newDrmInfo(stream);
  1498. }
  1499. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1500. if (type == ContentType.VIDEO || type == ContentType.AUDIO) {
  1501. for (const otherStreamInfo of this.uriToStreamInfosMap_.values()) {
  1502. if (!otherStreamInfo.loadedOnce && otherStreamInfo.type == type) {
  1503. // To aid manifest filtering, assume before loading that all video
  1504. // renditions have the same MIME type. (And likewise for audio.)
  1505. otherStreamInfo.stream.mimeType = realStream.mimeType;
  1506. }
  1507. }
  1508. }
  1509. // Add finishing touches to the stream that can only be done once we have
  1510. // more full context on the media as a whole.
  1511. if (this.hasEnoughInfoToFinalizeStreams_()) {
  1512. if (!this.streamsFinalized_) {
  1513. // Mark this manifest as having been finalized, so we don't go through
  1514. // this whole process of finishing touches a second time.
  1515. this.streamsFinalized_ = true;
  1516. // Finalize all of the currently-loaded streams.
  1517. const streamInfos = Array.from(this.uriToStreamInfosMap_.values());
  1518. const activeStreamInfos =
  1519. streamInfos.filter((s) => s.stream.segmentIndex);
  1520. this.finalizeStreams_(activeStreamInfos);
  1521. // With the addition of this new stream, we now have enough info to
  1522. // figure out how long the streams should be. So process all streams
  1523. // we have downloaded up until this point.
  1524. this.determineDuration_();
  1525. // Finally, start the update timer, if this asset has been determined
  1526. // to be a livestream.
  1527. const delay = this.updatePlaylistDelay_;
  1528. if (delay > 0) {
  1529. this.updatePlaylistTimer_.tickAfter(/* seconds= */ delay);
  1530. }
  1531. } else {
  1532. // We don't need to go through the full process; just finalize this
  1533. // single stream.
  1534. this.finalizeStreams_([streamInfo]);
  1535. }
  1536. }
  1537. };
  1538. /** @type {Promise} */
  1539. let creationPromise = null;
  1540. /** @type {!AbortController} */
  1541. let abortController = new AbortController();
  1542. const safeCreateSegmentIndex = () => {
  1543. // An operation is already in progress. The second and subsequent
  1544. // callers receive the same Promise as the first caller, and only one
  1545. // download operation will occur.
  1546. if (creationPromise) {
  1547. return creationPromise;
  1548. }
  1549. // Create a new AbortController to be able to cancel this specific
  1550. // download.
  1551. abortController = new AbortController();
  1552. // Create a Promise tied to the outcome of downloadSegmentIndex(). If
  1553. // downloadSegmentIndex is rejected, creationPromise will also be
  1554. // rejected.
  1555. creationPromise = new Promise((resolve) => {
  1556. resolve(downloadSegmentIndex(abortController.signal));
  1557. });
  1558. return creationPromise;
  1559. };
  1560. stream.createSegmentIndex = safeCreateSegmentIndex;
  1561. stream.closeSegmentIndex = () => {
  1562. // If we're mid-creation, cancel it.
  1563. if (creationPromise && !stream.segmentIndex) {
  1564. abortController.abort();
  1565. }
  1566. // If we have a segment index, release it.
  1567. if (stream.segmentIndex) {
  1568. stream.segmentIndex.release();
  1569. stream.segmentIndex = null;
  1570. }
  1571. // Clear the creation Promise so that a new operation can begin.
  1572. creationPromise = null;
  1573. };
  1574. return streamInfo;
  1575. }
  1576. /**
  1577. * @return {number}
  1578. * @private
  1579. */
  1580. getMinDuration_() {
  1581. let minDuration = Infinity;
  1582. for (const streamInfo of this.uriToStreamInfosMap_.values()) {
  1583. if (streamInfo.stream.segmentIndex && streamInfo.stream.type != 'text') {
  1584. // Since everything is already offset to 0 (either by sync or by being
  1585. // VOD), only maxTimestamp is necessary to compute the duration.
  1586. minDuration = Math.min(minDuration, streamInfo.maxTimestamp);
  1587. }
  1588. }
  1589. return minDuration;
  1590. }
  1591. /**
  1592. * @param {!Array.<!shaka.extern.Stream>} streams
  1593. * @private
  1594. */
  1595. notifySegmentsForStreams_(streams) {
  1596. const references = [];
  1597. for (const stream of streams) {
  1598. if (!stream.segmentIndex) {
  1599. // The stream was closed since the list of streams was built.
  1600. continue;
  1601. }
  1602. stream.segmentIndex.forEachTopLevelReference((reference) => {
  1603. references.push(reference);
  1604. });
  1605. }
  1606. this.presentationTimeline_.notifySegments(references);
  1607. }
  1608. /**
  1609. * @param {!Array.<!shaka.hls.HlsParser.StreamInfo>} streamInfos
  1610. * @private
  1611. */
  1612. finalizeStreams_(streamInfos) {
  1613. if (!this.isLive_()) {
  1614. const minDuration = this.getMinDuration_();
  1615. for (const streamInfo of streamInfos) {
  1616. streamInfo.stream.segmentIndex.fit(/* periodStart= */ 0, minDuration);
  1617. }
  1618. }
  1619. // MediaSource expects no codec strings combined with raw formats.
  1620. for (const streamInfo of streamInfos) {
  1621. const stream = streamInfo.stream;
  1622. if (shaka.media.MediaSourceEngine.RAW_FORMATS.includes(stream.mimeType)) {
  1623. stream.codecs = '';
  1624. }
  1625. }
  1626. this.notifySegmentsForStreams_(streamInfos.map((s) => s.stream));
  1627. if (this.config_.hls.ignoreManifestProgramDateTime) {
  1628. this.syncStreamsWithSequenceNumber_(streamInfos);
  1629. } else {
  1630. this.syncStreamsWithProgramDateTime_(streamInfos);
  1631. }
  1632. }
  1633. /**
  1634. * There are some values on streams that can only be set once we know about
  1635. * both the video and audio content, if present.
  1636. * This checks if there is at least one video downloaded (if the media has
  1637. * video), and that there is at least one audio downloaded (if the media has
  1638. * audio).
  1639. * @return {boolean}
  1640. * @private
  1641. */
  1642. hasEnoughInfoToFinalizeStreams_() {
  1643. if (!this.manifest_) {
  1644. return false;
  1645. }
  1646. const videos = [];
  1647. const audios = [];
  1648. for (const variant of this.manifest_.variants) {
  1649. if (variant.video) {
  1650. videos.push(variant.video);
  1651. }
  1652. if (variant.audio) {
  1653. audios.push(variant.audio);
  1654. }
  1655. }
  1656. if (videos.length > 0 && !videos.some((stream) => stream.segmentIndex)) {
  1657. return false;
  1658. }
  1659. if (audios.length > 0 && !audios.some((stream) => stream.segmentIndex)) {
  1660. return false;
  1661. }
  1662. return true;
  1663. }
  1664. /**
  1665. * @param {!shaka.hls.Playlist} playlist
  1666. * @param {string} verbatimMediaPlaylistUri
  1667. * @param {string} absoluteMediaPlaylistUri
  1668. * @param {string} codecs
  1669. * @param {string} type
  1670. * @param {string} language
  1671. * @param {boolean} primary
  1672. * @param {?string} name
  1673. * @param {?number} channelsCount
  1674. * @param {Map.<string, string>} closedCaptions
  1675. * @param {?string} characteristics
  1676. * @param {boolean} forced
  1677. * @param {boolean} spatialAudio
  1678. * @param {(string|undefined)} mimeType
  1679. * @return {!Promise.<!shaka.hls.HlsParser.StreamInfo>}
  1680. * @private
  1681. */
  1682. async convertParsedPlaylistIntoStreamInfo_(playlist, verbatimMediaPlaylistUri,
  1683. absoluteMediaPlaylistUri, codecs, type, language, primary, name,
  1684. channelsCount, closedCaptions, characteristics, forced, spatialAudio,
  1685. mimeType = undefined) {
  1686. if (playlist.type != shaka.hls.PlaylistType.MEDIA) {
  1687. // EXT-X-MEDIA and EXT-X-IMAGE-STREAM-INF tags should point to media
  1688. // playlists.
  1689. throw new shaka.util.Error(
  1690. shaka.util.Error.Severity.CRITICAL,
  1691. shaka.util.Error.Category.MANIFEST,
  1692. shaka.util.Error.Code.HLS_INVALID_PLAYLIST_HIERARCHY);
  1693. }
  1694. /** @type {!Array.<!shaka.hls.Tag>} */
  1695. const variablesTags = shaka.hls.Utils.filterTagsByName(playlist.tags,
  1696. 'EXT-X-DEFINE');
  1697. const mediaVariables = this.parseMediaVariables_(variablesTags);
  1698. goog.asserts.assert(playlist.segments != null,
  1699. 'Media playlist should have segments!');
  1700. this.determinePresentationType_(playlist);
  1701. if (!mimeType) {
  1702. mimeType = await this.guessMimeType_(type, codecs, playlist,
  1703. mediaVariables);
  1704. }
  1705. const {drmInfos, keyIds, encrypted, aesEncrypted} =
  1706. this.parseDrmInfo_(playlist, mimeType);
  1707. if (encrypted && !drmInfos.length && !aesEncrypted) {
  1708. throw new shaka.util.Error(
  1709. shaka.util.Error.Severity.CRITICAL,
  1710. shaka.util.Error.Category.MANIFEST,
  1711. shaka.util.Error.Code.HLS_KEYFORMATS_NOT_SUPPORTED);
  1712. }
  1713. const mediaSequenceToStartTime = this.isLive_() ?
  1714. this.mediaSequenceToStartTimeByType_.get(type) : new Map();
  1715. const segments = this.createSegments_(verbatimMediaPlaylistUri, playlist,
  1716. type, mimeType, mediaSequenceToStartTime, mediaVariables);
  1717. const lastEndTime = segments[segments.length - 1].endTime;
  1718. /** @type {!shaka.media.SegmentIndex} */
  1719. const segmentIndex = new shaka.media.SegmentIndex(segments);
  1720. const kind = (type == shaka.util.ManifestParserUtils.ContentType.TEXT) ?
  1721. shaka.util.ManifestParserUtils.TextStreamKind.SUBTITLE : undefined;
  1722. const roles = [];
  1723. if (characteristics) {
  1724. for (const characteristic of characteristics.split(',')) {
  1725. roles.push(characteristic);
  1726. }
  1727. }
  1728. const serverControlTag = shaka.hls.Utils.getFirstTagWithName(
  1729. playlist.tags, 'EXT-X-SERVER-CONTROL');
  1730. const canSkipSegments = serverControlTag ?
  1731. serverControlTag.getAttribute('CAN-SKIP-UNTIL') != null : false;
  1732. const stream = this.makeStreamObject_(codecs, type, language, primary, name,
  1733. channelsCount, closedCaptions, characteristics, forced, spatialAudio);
  1734. stream.segmentIndex = segmentIndex;
  1735. stream.encrypted = encrypted;
  1736. stream.drmInfos = drmInfos;
  1737. stream.keyIds = keyIds;
  1738. stream.kind = kind;
  1739. stream.roles = roles;
  1740. stream.mimeType = mimeType;
  1741. return {
  1742. stream,
  1743. type,
  1744. verbatimMediaPlaylistUri,
  1745. absoluteMediaPlaylistUri,
  1746. maxTimestamp: lastEndTime,
  1747. canSkipSegments,
  1748. hasEndList: false,
  1749. firstSequenceNumber: -1,
  1750. mediaSequenceToStartTime,
  1751. loadedOnce: false,
  1752. };
  1753. }
  1754. /**
  1755. * Creates a stream object with the given parameters.
  1756. * The parameters that are passed into here are only the things that can be
  1757. * known without downloading the media playlist; other values must be set
  1758. * manually on the object after creation.
  1759. * @param {string} codecs
  1760. * @param {string} type
  1761. * @param {string} language
  1762. * @param {boolean} primary
  1763. * @param {?string} name
  1764. * @param {?number} channelsCount
  1765. * @param {Map.<string, string>} closedCaptions
  1766. * @param {?string} characteristics
  1767. * @param {boolean} forced
  1768. * @param {boolean} spatialAudio
  1769. * @return {!shaka.extern.Stream}
  1770. * @private
  1771. */
  1772. makeStreamObject_(codecs, type, language, primary, name, channelsCount,
  1773. closedCaptions, characteristics, forced, spatialAudio) {
  1774. // Fill out a "best-guess" mimeType, for now. It will be replaced once the
  1775. // stream is lazy-loaded.
  1776. const mimeType = this.guessMimeTypeBeforeLoading_(type, codecs) ||
  1777. this.guessMimeTypeFallback_(type);
  1778. return {
  1779. id: this.globalId_++,
  1780. originalId: name,
  1781. createSegmentIndex: () => Promise.resolve(),
  1782. segmentIndex: null,
  1783. mimeType,
  1784. codecs,
  1785. kind: undefined,
  1786. encrypted: false,
  1787. drmInfos: [],
  1788. keyIds: new Set(),
  1789. language,
  1790. label: name, // For historical reasons, since before "originalId".
  1791. type,
  1792. primary,
  1793. // TODO: trick mode
  1794. trickModeVideo: null,
  1795. emsgSchemeIdUris: null,
  1796. frameRate: undefined,
  1797. pixelAspectRatio: undefined,
  1798. width: undefined,
  1799. height: undefined,
  1800. bandwidth: undefined,
  1801. roles: [],
  1802. forced,
  1803. channelsCount,
  1804. audioSamplingRate: null,
  1805. spatialAudio,
  1806. closedCaptions,
  1807. hdr: undefined,
  1808. tilesLayout: undefined,
  1809. };
  1810. }
  1811. /**
  1812. * @param {!shaka.hls.Playlist} playlist
  1813. * @param {string} mimeType
  1814. * @return {{
  1815. * drmInfos: !Array.<shaka.extern.DrmInfo>,
  1816. * keyIds: !Set.<string>,
  1817. * encrypted: boolean,
  1818. * aesEncrypted: boolean
  1819. * }}
  1820. * @private
  1821. */
  1822. parseDrmInfo_(playlist, mimeType) {
  1823. /** @type {!Array.<!shaka.hls.Tag>} */
  1824. const drmTags = [];
  1825. if (playlist.segments) {
  1826. for (const segment of playlist.segments) {
  1827. const segmentKeyTags = shaka.hls.Utils.filterTagsByName(segment.tags,
  1828. 'EXT-X-KEY');
  1829. drmTags.push(...segmentKeyTags);
  1830. }
  1831. }
  1832. let encrypted = false;
  1833. let aesEncrypted = false;
  1834. /** @type {!Array.<shaka.extern.DrmInfo>}*/
  1835. const drmInfos = [];
  1836. const keyIds = new Set();
  1837. for (const drmTag of drmTags) {
  1838. const method = drmTag.getRequiredAttrValue('METHOD');
  1839. if (method != 'NONE') {
  1840. encrypted = true;
  1841. if (method == 'AES-128') {
  1842. // These keys are handled separately.
  1843. aesEncrypted = true;
  1844. } else {
  1845. // According to the HLS spec, KEYFORMAT is optional and implicitly
  1846. // defaults to "identity".
  1847. // https://datatracker.ietf.org/doc/html/draft-pantos-hls-rfc8216bis-11#section-4.4.4.4
  1848. const keyFormat =
  1849. drmTag.getAttributeValue('KEYFORMAT') || 'identity';
  1850. const drmParser =
  1851. shaka.hls.HlsParser.KEYFORMATS_TO_DRM_PARSERS_[keyFormat];
  1852. const drmInfo = drmParser ? drmParser(drmTag, mimeType) : null;
  1853. if (drmInfo) {
  1854. if (drmInfo.keyIds) {
  1855. for (const keyId of drmInfo.keyIds) {
  1856. keyIds.add(keyId);
  1857. }
  1858. }
  1859. drmInfos.push(drmInfo);
  1860. } else {
  1861. shaka.log.warning('Unsupported HLS KEYFORMAT', keyFormat);
  1862. }
  1863. }
  1864. }
  1865. }
  1866. return {drmInfos, keyIds, encrypted, aesEncrypted};
  1867. }
  1868. /**
  1869. * @param {!shaka.hls.Tag} drmTag
  1870. * @param {!shaka.hls.Playlist} playlist
  1871. * @return {!shaka.extern.HlsAes128Key}
  1872. * @private
  1873. */
  1874. parseAES128DrmTag_(drmTag, playlist) {
  1875. // Check if the Web Crypto API is available.
  1876. if (!window.crypto || !window.crypto.subtle) {
  1877. shaka.log.alwaysWarn('Web Crypto API is not available to decrypt ' +
  1878. 'AES-128. (Web Crypto only exists in secure origins like https)');
  1879. throw new shaka.util.Error(
  1880. shaka.util.Error.Severity.CRITICAL,
  1881. shaka.util.Error.Category.MANIFEST,
  1882. shaka.util.Error.Code.NO_WEB_CRYPTO_API);
  1883. }
  1884. // HLS RFC 8216 Section 5.2:
  1885. // An EXT-X-KEY tag with a KEYFORMAT of "identity" that does not have an IV
  1886. // attribute indicates that the Media Sequence Number is to be used as the
  1887. // IV when decrypting a Media Segment, by putting its big-endian binary
  1888. // representation into a 16-octet (128-bit) buffer and padding (on the left)
  1889. // with zeros.
  1890. let firstMediaSequenceNumber = 0;
  1891. let iv;
  1892. const ivHex = drmTag.getAttributeValue('IV', '');
  1893. if (!ivHex) {
  1894. // Media Sequence Number will be used as IV.
  1895. firstMediaSequenceNumber = shaka.hls.Utils.getFirstTagWithNameAsNumber(
  1896. playlist.tags, 'EXT-X-MEDIA-SEQUENCE', 0);
  1897. } else {
  1898. // Exclude 0x at the start of string.
  1899. iv = shaka.util.Uint8ArrayUtils.fromHex(ivHex.substr(2));
  1900. if (iv.byteLength != 16) {
  1901. throw new shaka.util.Error(
  1902. shaka.util.Error.Severity.CRITICAL,
  1903. shaka.util.Error.Category.MANIFEST,
  1904. shaka.util.Error.Code.HLS_AES_128_INVALID_IV_LENGTH);
  1905. }
  1906. }
  1907. const keyUri = shaka.hls.Utils.constructAbsoluteUri(
  1908. playlist.absoluteUri, drmTag.getRequiredAttrValue('URI'));
  1909. const requestType = shaka.net.NetworkingEngine.RequestType.KEY;
  1910. const request = shaka.net.NetworkingEngine.makeRequest(
  1911. [keyUri], this.config_.retryParameters);
  1912. const keyInfo = {method: 'AES-128', iv, firstMediaSequenceNumber};
  1913. // Don't download the key object until the segment is parsed, to avoid a
  1914. // startup delay for long manifests with lots of keys.
  1915. keyInfo.fetchKey = async () => {
  1916. const keyResponse = await this.makeNetworkRequest_(request, requestType);
  1917. // keyResponse.status is undefined when URI is "data:text/plain;base64,"
  1918. if (!keyResponse.data || keyResponse.data.byteLength != 16) {
  1919. throw new shaka.util.Error(
  1920. shaka.util.Error.Severity.CRITICAL,
  1921. shaka.util.Error.Category.MANIFEST,
  1922. shaka.util.Error.Code.HLS_AES_128_INVALID_KEY_LENGTH);
  1923. }
  1924. keyInfo.cryptoKey = await window.crypto.subtle.importKey(
  1925. 'raw', keyResponse.data, 'AES-CBC', true, ['decrypt']);
  1926. keyInfo.fetchKey = undefined; // No longer needed.
  1927. };
  1928. return keyInfo;
  1929. }
  1930. /**
  1931. * @param {!shaka.hls.Playlist} playlist
  1932. * @private
  1933. */
  1934. determinePresentationType_(playlist) {
  1935. const PresentationType = shaka.hls.HlsParser.PresentationType_;
  1936. const presentationTypeTag =
  1937. shaka.hls.Utils.getFirstTagWithName(playlist.tags,
  1938. 'EXT-X-PLAYLIST-TYPE');
  1939. const endListTag =
  1940. shaka.hls.Utils.getFirstTagWithName(playlist.tags, 'EXT-X-ENDLIST');
  1941. const isVod = (presentationTypeTag && presentationTypeTag.value == 'VOD') ||
  1942. endListTag;
  1943. const isEvent = presentationTypeTag &&
  1944. presentationTypeTag.value == 'EVENT' && !isVod;
  1945. const isLive = !isVod && !isEvent;
  1946. if (isVod) {
  1947. this.setPresentationType_(PresentationType.VOD);
  1948. } else {
  1949. // If it's not VOD, it must be presentation type LIVE or an ongoing EVENT.
  1950. if (isLive) {
  1951. this.setPresentationType_(PresentationType.LIVE);
  1952. } else {
  1953. this.setPresentationType_(PresentationType.EVENT);
  1954. }
  1955. const targetDurationTag = this.getRequiredTag_(playlist.tags,
  1956. 'EXT-X-TARGETDURATION');
  1957. const targetDuration = Number(targetDurationTag.value);
  1958. const partialTargetDurationTag =
  1959. shaka.hls.Utils.getFirstTagWithName(playlist.tags, 'EXT-X-PART-INF');
  1960. // According to the HLS spec, updates should not happen more often than
  1961. // once in targetDuration. It also requires us to only update the active
  1962. // variant. We might implement that later, but for now every variant
  1963. // will be updated. To get the update period, choose the smallest
  1964. // targetDuration value across all playlists.
  1965. // 1. Update the shortest one to use as update period and segment
  1966. // availability time (for LIVE).
  1967. if (this.lowLatencyMode_ && partialTargetDurationTag) {
  1968. // For low latency streaming, use the partial segment target duration.
  1969. this.partialTargetDuration_ = Number(
  1970. partialTargetDurationTag.getRequiredAttrValue('PART-TARGET'));
  1971. this.minTargetDuration_ = Math.min(
  1972. this.partialTargetDuration_, this.minTargetDuration_);
  1973. // Get the server-recommended min distance from the live edge.
  1974. const serverControlTag = shaka.hls.Utils.getFirstTagWithName(
  1975. playlist.tags, 'EXT-X-SERVER-CONTROL');
  1976. // Use 'PART-HOLD-BACK' as the presentation delay for low latency mode.
  1977. this.lowLatencyPresentationDelay_ = serverControlTag ? Number(
  1978. serverControlTag.getRequiredAttrValue('PART-HOLD-BACK')) : 0;
  1979. } else {
  1980. // For regular HLS, use the target duration of regular segments.
  1981. this.minTargetDuration_ = Math.min(
  1982. targetDuration, this.minTargetDuration_);
  1983. }
  1984. // 2. Update the longest target duration if need be to use as a
  1985. // presentation delay later.
  1986. this.maxTargetDuration_ = Math.max(
  1987. targetDuration, this.maxTargetDuration_);
  1988. }
  1989. }
  1990. /**
  1991. * @private
  1992. */
  1993. changePresentationTimelineToLive_() {
  1994. // The live edge will be calculated from segments, so we don't need to
  1995. // set a presentation start time. We will assert later that this is
  1996. // working as expected.
  1997. // The HLS spec (RFC 8216) states in 6.3.3:
  1998. //
  1999. // "The client SHALL choose which Media Segment to play first ... the
  2000. // client SHOULD NOT choose a segment that starts less than three target
  2001. // durations from the end of the Playlist file. Doing so can trigger
  2002. // playback stalls."
  2003. //
  2004. // We accomplish this in our DASH-y model by setting a presentation
  2005. // delay of configured value, or 3 segments duration if not configured.
  2006. // This will be the "live edge" of the presentation.
  2007. let presentationDelay;
  2008. if (this.config_.defaultPresentationDelay) {
  2009. presentationDelay = this.config_.defaultPresentationDelay;
  2010. } else if (this.lowLatencyPresentationDelay_) {
  2011. presentationDelay = this.lowLatencyPresentationDelay_;
  2012. } else {
  2013. const numberOfSegments = this.config_.hls.liveSegmentsDelay;
  2014. presentationDelay = this.maxTargetDuration_ * numberOfSegments;
  2015. }
  2016. this.presentationTimeline_.setPresentationStartTime(0);
  2017. this.presentationTimeline_.setDelay(presentationDelay);
  2018. this.presentationTimeline_.setStatic(false);
  2019. }
  2020. /**
  2021. * Get the InitSegmentReference for a segment if it has a EXT-X-MAP tag.
  2022. * @param {string} playlistUri The absolute uri of the media playlist.
  2023. * @param {!Array.<!shaka.hls.Tag>} tags Segment tags
  2024. * @param {!Map.<string, string>} variables
  2025. * @return {shaka.media.InitSegmentReference}
  2026. * @private
  2027. */
  2028. getInitSegmentReference_(playlistUri, tags, variables) {
  2029. /** @type {?shaka.hls.Tag} */
  2030. const mapTag = shaka.hls.Utils.getFirstTagWithName(tags, 'EXT-X-MAP');
  2031. if (!mapTag) {
  2032. return null;
  2033. }
  2034. // Map tag example: #EXT-X-MAP:URI="main.mp4",BYTERANGE="720@0"
  2035. const verbatimInitSegmentUri = mapTag.getRequiredAttrValue('URI');
  2036. const absoluteInitSegmentUri = this.variableSubstitution_(
  2037. shaka.hls.Utils.constructAbsoluteUri(
  2038. playlistUri, verbatimInitSegmentUri),
  2039. variables);
  2040. const mapTagKey = [
  2041. absoluteInitSegmentUri,
  2042. mapTag.getAttributeValue('BYTERANGE', ''),
  2043. ].join('-');
  2044. if (!this.mapTagToInitSegmentRefMap_.has(mapTagKey)) {
  2045. const initSegmentRef = this.createInitSegmentReference_(
  2046. absoluteInitSegmentUri, mapTag);
  2047. this.mapTagToInitSegmentRefMap_.set(mapTagKey, initSegmentRef);
  2048. }
  2049. return this.mapTagToInitSegmentRefMap_.get(mapTagKey);
  2050. }
  2051. /**
  2052. * Create an InitSegmentReference object for the EXT-X-MAP tag in the media
  2053. * playlist.
  2054. * @param {string} absoluteInitSegmentUri
  2055. * @param {!shaka.hls.Tag} mapTag EXT-X-MAP
  2056. * @return {!shaka.media.InitSegmentReference}
  2057. * @private
  2058. */
  2059. createInitSegmentReference_(absoluteInitSegmentUri, mapTag) {
  2060. let startByte = 0;
  2061. let endByte = null;
  2062. const byterange = mapTag.getAttributeValue('BYTERANGE');
  2063. // If a BYTERANGE attribute is not specified, the segment consists
  2064. // of the entire resource.
  2065. if (byterange) {
  2066. const blocks = byterange.split('@');
  2067. const byteLength = Number(blocks[0]);
  2068. startByte = Number(blocks[1]);
  2069. endByte = startByte + byteLength - 1;
  2070. }
  2071. const initSegmentRef = new shaka.media.InitSegmentReference(
  2072. () => [absoluteInitSegmentUri],
  2073. startByte,
  2074. endByte);
  2075. return initSegmentRef;
  2076. }
  2077. /**
  2078. * Parses one shaka.hls.Segment object into a shaka.media.SegmentReference.
  2079. *
  2080. * @param {shaka.media.InitSegmentReference} initSegmentReference
  2081. * @param {shaka.media.SegmentReference} previousReference
  2082. * @param {!shaka.hls.Segment} hlsSegment
  2083. * @param {number} startTime
  2084. * @param {!Map.<string, string>} variables
  2085. * @param {string} absoluteMediaPlaylistUri
  2086. * @param {string} type
  2087. * @param {shaka.extern.HlsAes128Key=} hlsAes128Key
  2088. * @return {shaka.media.SegmentReference}
  2089. * @private
  2090. */
  2091. createSegmentReference_(
  2092. initSegmentReference, previousReference, hlsSegment, startTime,
  2093. variables, absoluteMediaPlaylistUri, type, hlsAes128Key) {
  2094. const tags = hlsSegment.tags;
  2095. const absoluteSegmentUri = this.variableSubstitution_(
  2096. hlsSegment.absoluteUri, variables);
  2097. const extinfTag =
  2098. shaka.hls.Utils.getFirstTagWithName(tags, 'EXTINF');
  2099. let endTime = 0;
  2100. let startByte = 0;
  2101. let endByte = null;
  2102. if (hlsSegment.partialSegments.length && !this.lowLatencyMode_) {
  2103. shaka.log.alwaysWarn('Low-latency HLS live stream detected, but ' +
  2104. 'low-latency streaming mode is not enabled in Shaka ' +
  2105. 'Player. Set streaming.lowLatencyMode configuration to ' +
  2106. 'true, and see https://bit.ly/3clctcj for details.');
  2107. }
  2108. let syncTime = null;
  2109. if (!this.config_.hls.ignoreManifestProgramDateTime) {
  2110. const dateTimeTag =
  2111. shaka.hls.Utils.getFirstTagWithName(tags, 'EXT-X-PROGRAM-DATE-TIME');
  2112. if (dateTimeTag && dateTimeTag.value) {
  2113. syncTime = shaka.util.XmlUtils.parseDate(dateTimeTag.value);
  2114. goog.asserts.assert(syncTime != null,
  2115. 'EXT-X-PROGRAM-DATE-TIME format not valid');
  2116. }
  2117. }
  2118. let status = shaka.media.SegmentReference.Status.AVAILABLE;
  2119. if (shaka.hls.Utils.getFirstTagWithName(tags, 'EXT-X-GAP')) {
  2120. status = shaka.media.SegmentReference.Status.MISSING;
  2121. }
  2122. if (!extinfTag) {
  2123. if (hlsSegment.partialSegments.length == 0) {
  2124. // EXTINF tag must be available if the segment has no partial segments.
  2125. throw new shaka.util.Error(
  2126. shaka.util.Error.Severity.CRITICAL,
  2127. shaka.util.Error.Category.MANIFEST,
  2128. shaka.util.Error.Code.HLS_REQUIRED_TAG_MISSING, 'EXTINF');
  2129. } else if (!this.lowLatencyMode_) {
  2130. // Without EXTINF and without low-latency mode, partial segments get
  2131. // ignored.
  2132. return null;
  2133. }
  2134. }
  2135. // Create SegmentReferences for the partial segments.
  2136. const partialSegmentRefs = [];
  2137. if (this.lowLatencyMode_) {
  2138. for (let i = 0; i < hlsSegment.partialSegments.length; i++) {
  2139. const item = hlsSegment.partialSegments[i];
  2140. const pPreviousReference = i == 0 ?
  2141. previousReference : partialSegmentRefs[partialSegmentRefs.length - 1];
  2142. const pStartTime = (i == 0) ? startTime : pPreviousReference.endTime;
  2143. // If DURATION is missing from this partial segment, use the target
  2144. // partial duration from the top of the playlist, which is a required
  2145. // attribute for content with partial segments.
  2146. const pDuration = Number(item.getAttributeValue('DURATION')) ||
  2147. this.partialTargetDuration_;
  2148. // If for some reason we have neither an explicit duration, nor a target
  2149. // partial duration, we should SKIP this partial segment to avoid
  2150. // duplicating content in the presentation timeline.
  2151. if (!pDuration) {
  2152. continue;
  2153. }
  2154. const pEndTime = pStartTime + pDuration;
  2155. let pStartByte = 0;
  2156. let pEndByte = null;
  2157. if (item.name == 'EXT-X-PRELOAD-HINT') {
  2158. // A preload hinted partial segment may have byterange start info.
  2159. const pByterangeStart = item.getAttributeValue('BYTERANGE-START');
  2160. pStartByte = pByterangeStart ? Number(pByterangeStart) : 0;
  2161. } else {
  2162. const pByterange = item.getAttributeValue('BYTERANGE');
  2163. [pStartByte, pEndByte] =
  2164. this.parseByteRange_(pPreviousReference, pByterange);
  2165. }
  2166. const pUri = item.getAttributeValue('URI');
  2167. if (!pUri) {
  2168. continue;
  2169. }
  2170. const pAbsoluteUri = shaka.hls.Utils.constructAbsoluteUri(
  2171. absoluteMediaPlaylistUri, pUri);
  2172. let partialStatus = shaka.media.SegmentReference.Status.AVAILABLE;
  2173. if (item.getAttributeValue('GAP') == 'YES') {
  2174. partialStatus = shaka.media.SegmentReference.Status.MISSING;
  2175. }
  2176. // We do not set the AES-128 key information for partial segments, as we
  2177. // do not support AES-128 and low-latency at the same time.
  2178. const partial = new shaka.media.SegmentReference(
  2179. pStartTime,
  2180. pEndTime,
  2181. () => [pAbsoluteUri],
  2182. pStartByte,
  2183. pEndByte,
  2184. initSegmentReference,
  2185. /* timestampOffset= */ 0, // This value is ignored in sequence mode.
  2186. /* appendWindowStart= */ 0,
  2187. /* appendWindowEnd= */ Infinity,
  2188. /* partialReferences= */ [],
  2189. /* tilesLayout= */ '',
  2190. /* tileDuration= */ null,
  2191. /* syncTime= */ null,
  2192. partialStatus);
  2193. partialSegmentRefs.push(partial);
  2194. } // for-loop of hlsSegment.partialSegments
  2195. }
  2196. // If the segment has EXTINF tag, set the segment's end time, start byte
  2197. // and end byte based on the duration and byterange information.
  2198. // Otherwise, calculate the end time, start / end byte based on its partial
  2199. // segments.
  2200. // Note that the sum of partial segments durations may be slightly different
  2201. // from the parent segment's duration. In this case, use the duration from
  2202. // the parent segment tag.
  2203. if (extinfTag) {
  2204. // The EXTINF tag format is '#EXTINF:<duration>,[<title>]'.
  2205. // We're interested in the duration part.
  2206. const extinfValues = extinfTag.value.split(',');
  2207. const duration = Number(extinfValues[0]);
  2208. endTime = startTime + duration;
  2209. } else {
  2210. endTime = partialSegmentRefs[partialSegmentRefs.length - 1].endTime;
  2211. }
  2212. // If the segment has EXT-X-BYTERANGE tag, set the start byte and end byte
  2213. // base on the byterange information. If segment has no EXT-X-BYTERANGE tag
  2214. // and has partial segments, set the start byte and end byte base on the
  2215. // partial segments.
  2216. const byterangeTag =
  2217. shaka.hls.Utils.getFirstTagWithName(tags, 'EXT-X-BYTERANGE');
  2218. if (byterangeTag) {
  2219. [startByte, endByte] =
  2220. this.parseByteRange_(previousReference, byterangeTag.value);
  2221. } else if (partialSegmentRefs.length) {
  2222. startByte = partialSegmentRefs[0].startByte;
  2223. endByte = partialSegmentRefs[partialSegmentRefs.length - 1].endByte;
  2224. }
  2225. let tilesLayout = '';
  2226. let tileDuration = null;
  2227. if (type == shaka.util.ManifestParserUtils.ContentType.IMAGE) {
  2228. // By default in HLS the tilesLayout is 1x1
  2229. tilesLayout = '1x1';
  2230. const tilesTag =
  2231. shaka.hls.Utils.getFirstTagWithName(tags, 'EXT-X-TILES');
  2232. if (tilesTag) {
  2233. tilesLayout = tilesTag.getRequiredAttrValue('LAYOUT');
  2234. const duration = tilesTag.getAttributeValue('DURATION');
  2235. if (duration) {
  2236. tileDuration = Number(duration);
  2237. }
  2238. }
  2239. }
  2240. return new shaka.media.SegmentReference(
  2241. startTime,
  2242. endTime,
  2243. () => absoluteSegmentUri.length ? [absoluteSegmentUri] : [],
  2244. startByte,
  2245. endByte,
  2246. initSegmentReference,
  2247. /* timestampOffset= */ 0, // This value is ignored in sequence mode.
  2248. /* appendWindowStart= */ 0,
  2249. /* appendWindowEnd= */ Infinity,
  2250. partialSegmentRefs,
  2251. tilesLayout,
  2252. tileDuration,
  2253. syncTime,
  2254. status,
  2255. hlsAes128Key,
  2256. );
  2257. }
  2258. /**
  2259. * Parse the startByte and endByte.
  2260. * @param {shaka.media.SegmentReference} previousReference
  2261. * @param {?string} byterange
  2262. * @return {!Array.<number>} An array with the start byte and end byte.
  2263. * @private
  2264. */
  2265. parseByteRange_(previousReference, byterange) {
  2266. let startByte = 0;
  2267. let endByte = null;
  2268. // If BYTERANGE is not specified, the segment consists of the entire
  2269. // resource.
  2270. if (byterange) {
  2271. const blocks = byterange.split('@');
  2272. const byteLength = Number(blocks[0]);
  2273. if (blocks[1]) {
  2274. startByte = Number(blocks[1]);
  2275. } else {
  2276. goog.asserts.assert(previousReference,
  2277. 'Cannot refer back to previous HLS segment!');
  2278. startByte = previousReference.endByte + 1;
  2279. }
  2280. endByte = startByte + byteLength - 1;
  2281. }
  2282. return [startByte, endByte];
  2283. }
  2284. /**
  2285. * Parses shaka.hls.Segment objects into shaka.media.SegmentReferences.
  2286. *
  2287. * @param {string} verbatimMediaPlaylistUri
  2288. * @param {!shaka.hls.Playlist} playlist
  2289. * @param {string} type
  2290. * @param {string} mimeType
  2291. * @param {!Map.<number, number>} mediaSequenceToStartTime
  2292. * @param {!Map.<string, string>} variables
  2293. * @return {!Array.<!shaka.media.SegmentReference>}
  2294. * @private
  2295. */
  2296. createSegments_(verbatimMediaPlaylistUri, playlist, type, mimeType,
  2297. mediaSequenceToStartTime, variables) {
  2298. /** @type {Array.<!shaka.hls.Segment>} */
  2299. const hlsSegments = playlist.segments;
  2300. goog.asserts.assert(hlsSegments.length, 'Playlist should have segments!');
  2301. /** @type {shaka.media.InitSegmentReference} */
  2302. let initSegmentRef;
  2303. /** @type {shaka.extern.HlsAes128Key|undefined} */
  2304. let hlsAes128Key = undefined;
  2305. // We may need to look at the media itself to determine a segment start
  2306. // time.
  2307. const mediaSequenceNumber = shaka.hls.Utils.getFirstTagWithNameAsNumber(
  2308. playlist.tags, 'EXT-X-MEDIA-SEQUENCE', 0);
  2309. const skipTag = shaka.hls.Utils.getFirstTagWithName(playlist.tags,
  2310. 'EXT-X-SKIP');
  2311. const skippedSegments =
  2312. skipTag ? Number(skipTag.getAttributeValue('SKIPPED-SEGMENTS')) : 0;
  2313. let position = mediaSequenceNumber + skippedSegments;
  2314. let firstStartTime = 0;
  2315. // For live stream, use the cached value in the mediaSequenceToStartTime
  2316. // map if available.
  2317. if (this.isLive_() && mediaSequenceToStartTime.has(position)) {
  2318. firstStartTime = mediaSequenceToStartTime.get(position);
  2319. }
  2320. /** @type {!Array.<!shaka.media.SegmentReference>} */
  2321. const references = [];
  2322. let previousReference = null;
  2323. for (let i = 0; i < hlsSegments.length; i++) {
  2324. const item = hlsSegments[i];
  2325. const startTime =
  2326. (i == 0) ? firstStartTime : previousReference.endTime;
  2327. position = mediaSequenceNumber + skippedSegments + i;
  2328. // Apply new AES-128 tags as you see them, keeping a running total.
  2329. for (const drmTag of item.tags) {
  2330. if (drmTag.name == 'EXT-X-KEY' &&
  2331. drmTag.getRequiredAttrValue('METHOD') == 'AES-128') {
  2332. hlsAes128Key = this.parseAES128DrmTag_(drmTag, playlist);
  2333. }
  2334. }
  2335. mediaSequenceToStartTime.set(position, startTime);
  2336. initSegmentRef = this.getInitSegmentReference_(playlist.absoluteUri,
  2337. item.tags, variables);
  2338. // If the stream is low latency and the user has not configured the
  2339. // lowLatencyMode, but if it has been configured to activate the
  2340. // lowLatencyMode if a stream of this type is detected, we automatically
  2341. // activate the lowLatencyMode.
  2342. if (!this.lowLatencyMode_) {
  2343. const autoLowLatencyMode = this.playerInterface_.isAutoLowLatencyMode();
  2344. if (autoLowLatencyMode) {
  2345. this.playerInterface_.enableLowLatencyMode();
  2346. this.lowLatencyMode_ = this.playerInterface_.isLowLatencyMode();
  2347. }
  2348. }
  2349. const reference = this.createSegmentReference_(
  2350. initSegmentRef,
  2351. previousReference,
  2352. item,
  2353. startTime,
  2354. variables,
  2355. playlist.absoluteUri,
  2356. type,
  2357. hlsAes128Key);
  2358. previousReference = reference;
  2359. if (reference) {
  2360. if (this.config_.hls.ignoreManifestProgramDateTime &&
  2361. this.minSequenceNumber_ != null &&
  2362. position < this.minSequenceNumber_) {
  2363. // This segment is ignored as part of our fallback synchronization
  2364. // method.
  2365. } else {
  2366. references.push(reference);
  2367. }
  2368. }
  2369. }
  2370. // If some segments have sync times, but not all, extrapolate the sync
  2371. // times of the ones with none.
  2372. const someSyncTime = references.some((ref) => ref.syncTime != null);
  2373. if (someSyncTime) {
  2374. for (let i = 0; i < references.length; i++) {
  2375. const reference = references[i];
  2376. if (reference.syncTime != null) {
  2377. // No need to extrapolate.
  2378. continue;
  2379. }
  2380. // Find the nearest segment with syncTime, in either direction.
  2381. // This looks forward and backward simultaneously, keeping track of what
  2382. // to offset the syncTime it finds by as it goes.
  2383. let forwardAdd = 0;
  2384. let forwardI = i;
  2385. /**
  2386. * Look forwards one reference at a time, summing all durations as we
  2387. * go, until we find a reference with a syncTime to use as a basis.
  2388. * This DOES count the original reference, but DOESN'T count the first
  2389. * reference with a syncTime (as we approach it from behind).
  2390. * @return {?number}
  2391. */
  2392. const lookForward = () => {
  2393. const other = references[forwardI];
  2394. if (other) {
  2395. if (other.syncTime != null) {
  2396. return other.syncTime + forwardAdd;
  2397. }
  2398. forwardAdd -= other.endTime - other.startTime;
  2399. forwardI += 1;
  2400. }
  2401. return null;
  2402. };
  2403. let backwardAdd = 0;
  2404. let backwardI = i;
  2405. /**
  2406. * Look backwards one reference at a time, summing all durations as we
  2407. * go, until we find a reference with a syncTime to use as a basis.
  2408. * This DOESN'T count the original reference, but DOES count the first
  2409. * reference with a syncTime (as we approach it from ahead).
  2410. * @return {?number}
  2411. */
  2412. const lookBackward = () => {
  2413. const other = references[backwardI];
  2414. if (other) {
  2415. if (other != reference) {
  2416. backwardAdd += other.endTime - other.startTime;
  2417. }
  2418. if (other.syncTime != null) {
  2419. return other.syncTime + backwardAdd;
  2420. }
  2421. backwardI -= 1;
  2422. }
  2423. return null;
  2424. };
  2425. while (reference.syncTime == null) {
  2426. reference.syncTime = lookBackward();
  2427. if (reference.syncTime == null) {
  2428. reference.syncTime = lookForward();
  2429. }
  2430. }
  2431. }
  2432. }
  2433. // Split the sync times properly among partial segments.
  2434. if (someSyncTime) {
  2435. for (const reference of references) {
  2436. let syncTime = reference.syncTime;
  2437. for (const partial of reference.partialReferences) {
  2438. partial.syncTime = syncTime;
  2439. syncTime += partial.endTime - partial.startTime;
  2440. }
  2441. }
  2442. }
  2443. return references;
  2444. }
  2445. /**
  2446. * Replaces the variables of a given URI.
  2447. *
  2448. * @param {string} uri
  2449. * @param {!Map.<string, string>} variables
  2450. * @return {string}
  2451. * @private
  2452. */
  2453. variableSubstitution_(uri, variables) {
  2454. let newUri = String(uri).replace(/%7B/g, '{').replace(/%7D/g, '}');
  2455. const uriVariables = newUri.match(/{\$\w*}/g);
  2456. if (uriVariables) {
  2457. for (const variable of uriVariables) {
  2458. // Note: All variables have the structure {$...}
  2459. const variableName = variable.slice(2, variable.length - 1);
  2460. const replaceValue = variables.get(variableName);
  2461. if (replaceValue) {
  2462. newUri = newUri.replace(variable, replaceValue);
  2463. } else {
  2464. shaka.log.error('A variable has been found that is not declared',
  2465. variableName);
  2466. throw new shaka.util.Error(
  2467. shaka.util.Error.Severity.CRITICAL,
  2468. shaka.util.Error.Category.MANIFEST,
  2469. shaka.util.Error.Code.HLS_VARIABLE_NOT_FOUND,
  2470. variableName);
  2471. }
  2472. }
  2473. }
  2474. return newUri;
  2475. }
  2476. /**
  2477. * Attempts to guess stream's mime type based on content type and URI.
  2478. *
  2479. * @param {string} contentType
  2480. * @param {string} codecs
  2481. * @return {?string}
  2482. * @private
  2483. */
  2484. guessMimeTypeBeforeLoading_(contentType, codecs) {
  2485. if (contentType == shaka.util.ManifestParserUtils.ContentType.TEXT) {
  2486. if (codecs == 'vtt' || codecs == 'wvtt') {
  2487. // If codecs is 'vtt', it's WebVTT.
  2488. return 'text/vtt';
  2489. } else if (codecs && codecs !== '') {
  2490. // Otherwise, assume MP4-embedded text, since text-based formats tend
  2491. // not to have a codecs string at all.
  2492. return 'application/mp4';
  2493. }
  2494. }
  2495. if (contentType == shaka.util.ManifestParserUtils.ContentType.IMAGE) {
  2496. if (!codecs || codecs == 'jpeg') {
  2497. return 'image/jpeg';
  2498. }
  2499. }
  2500. if (contentType == shaka.util.ManifestParserUtils.ContentType.AUDIO) {
  2501. // See: https://bugs.chromium.org/p/chromium/issues/detail?id=489520
  2502. if (codecs == 'mp4a.40.34') {
  2503. return 'audio/mpeg';
  2504. }
  2505. }
  2506. // Not enough information to guess from the content type and codecs.
  2507. return null;
  2508. }
  2509. /**
  2510. * Get a fallback mime type for the content. Used if all the better methods
  2511. * for determining the mime type have failed.
  2512. *
  2513. * @param {string} contentType
  2514. * @return {string}
  2515. * @private
  2516. */
  2517. guessMimeTypeFallback_(contentType) {
  2518. if (contentType == shaka.util.ManifestParserUtils.ContentType.TEXT) {
  2519. // If there was no codecs string and no content-type, assume HLS text
  2520. // streams are WebVTT.
  2521. return 'text/vtt';
  2522. }
  2523. // If the HLS content is lacking in both MIME type metadata and
  2524. // segment file extensions, we fall back to assuming it's MP4.
  2525. const map = shaka.hls.HlsParser.EXTENSION_MAP_BY_CONTENT_TYPE_[contentType];
  2526. return map['mp4'];
  2527. }
  2528. /**
  2529. * Attempts to guess stream's mime type based on content type, URI, and
  2530. * contents of the playlist.
  2531. *
  2532. * @param {string} contentType
  2533. * @param {string} codecs
  2534. * @param {!shaka.hls.Playlist} playlist
  2535. * @param {!Map.<string, string>} variables
  2536. * @return {!Promise.<string>}
  2537. * @private
  2538. */
  2539. async guessMimeType_(contentType, codecs, playlist, variables) {
  2540. const HlsParser = shaka.hls.HlsParser;
  2541. const requestType = shaka.net.NetworkingEngine.RequestType.SEGMENT;
  2542. // If you wait long enough, requesting the first segment can fail
  2543. // because it has fallen off the left edge of DVR, so to be safer,
  2544. // let's request the middle segment.
  2545. goog.asserts.assert(playlist.segments.length,
  2546. 'Playlist should have segments!');
  2547. const middleSegmentIdx = Math.trunc((playlist.segments.length - 1) / 2);
  2548. const middleSegmentUri = this.variableSubstitution_(
  2549. playlist.segments[middleSegmentIdx].absoluteUri, variables);
  2550. const parsedUri = new goog.Uri(middleSegmentUri);
  2551. const extension = parsedUri.getPath().split('.').pop();
  2552. const map = HlsParser.EXTENSION_MAP_BY_CONTENT_TYPE_[contentType];
  2553. let mimeType = map[extension];
  2554. if (mimeType) {
  2555. return mimeType;
  2556. }
  2557. mimeType = HlsParser.RAW_FORMATS_TO_MIME_TYPES_[extension];
  2558. if (mimeType) {
  2559. return mimeType;
  2560. }
  2561. // The extension map didn't work, so guess based on codecs.
  2562. mimeType = this.guessMimeTypeBeforeLoading_(contentType, codecs);
  2563. if (mimeType) {
  2564. return mimeType;
  2565. }
  2566. // If unable to guess mime type, request a segment and try getting it
  2567. // from the response.
  2568. const headRequest = shaka.net.NetworkingEngine.makeRequest(
  2569. [middleSegmentUri], this.config_.retryParameters);
  2570. headRequest.method = 'HEAD';
  2571. const response = await this.makeNetworkRequest_(
  2572. headRequest, requestType);
  2573. const contentMimeType = response.headers['content-type'];
  2574. if (contentMimeType) {
  2575. // Split the MIME type in case the server sent additional parameters.
  2576. return contentMimeType.split(';')[0];
  2577. }
  2578. return this.guessMimeTypeFallback_(contentType);
  2579. }
  2580. /**
  2581. * Returns a tag with a given name.
  2582. * Throws an error if tag was not found.
  2583. *
  2584. * @param {!Array.<shaka.hls.Tag>} tags
  2585. * @param {string} tagName
  2586. * @return {!shaka.hls.Tag}
  2587. * @private
  2588. */
  2589. getRequiredTag_(tags, tagName) {
  2590. const tag = shaka.hls.Utils.getFirstTagWithName(tags, tagName);
  2591. if (!tag) {
  2592. throw new shaka.util.Error(
  2593. shaka.util.Error.Severity.CRITICAL,
  2594. shaka.util.Error.Category.MANIFEST,
  2595. shaka.util.Error.Code.HLS_REQUIRED_TAG_MISSING, tagName);
  2596. }
  2597. return tag;
  2598. }
  2599. /**
  2600. * @param {shaka.extern.Stream} stream
  2601. * @param {?string} width
  2602. * @param {?string} height
  2603. * @param {?string} frameRate
  2604. * @param {?string} videoRange
  2605. * @private
  2606. */
  2607. addVideoAttributes_(stream, width, height, frameRate, videoRange) {
  2608. if (stream) {
  2609. stream.width = Number(width) || undefined;
  2610. stream.height = Number(height) || undefined;
  2611. stream.frameRate = Number(frameRate) || undefined;
  2612. stream.hdr = videoRange || undefined;
  2613. }
  2614. }
  2615. /**
  2616. * Makes a network request for the manifest and returns a Promise
  2617. * with the resulting data.
  2618. *
  2619. * @param {string} absoluteUri
  2620. * @return {!Promise.<!shaka.extern.Response>}
  2621. * @private
  2622. */
  2623. requestManifest_(absoluteUri) {
  2624. const requestType = shaka.net.NetworkingEngine.RequestType.MANIFEST;
  2625. const request = shaka.net.NetworkingEngine.makeRequest(
  2626. [absoluteUri], this.config_.retryParameters);
  2627. const format = shaka.util.CmcdManager.StreamingFormat.HLS;
  2628. this.playerInterface_.modifyManifestRequest(request, {format: format});
  2629. return this.makeNetworkRequest_(request, requestType);
  2630. }
  2631. /**
  2632. * Called when the update timer ticks. Because parsing a manifest is async,
  2633. * this method is async. To work with this, this method will schedule the next
  2634. * update when it finished instead of using a repeating-start.
  2635. *
  2636. * @return {!Promise}
  2637. * @private
  2638. */
  2639. async onUpdate_() {
  2640. shaka.log.info('Updating manifest...');
  2641. goog.asserts.assert(
  2642. this.updatePlaylistDelay_ > 0,
  2643. 'We should only call |onUpdate_| when we are suppose to be updating.');
  2644. // Detect a call to stop()
  2645. if (!this.playerInterface_) {
  2646. return;
  2647. }
  2648. try {
  2649. await this.update();
  2650. // This may have converted to VOD, in which case we stop updating.
  2651. if (this.isLive_()) {
  2652. const delay = this.updatePlaylistDelay_;
  2653. this.updatePlaylistTimer_.tickAfter(/* seconds= */ delay);
  2654. }
  2655. } catch (error) {
  2656. // Detect a call to stop() during this.update()
  2657. if (!this.playerInterface_) {
  2658. return;
  2659. }
  2660. goog.asserts.assert(error instanceof shaka.util.Error,
  2661. 'Should only receive a Shaka error');
  2662. // We will retry updating, so override the severity of the error.
  2663. error.severity = shaka.util.Error.Severity.RECOVERABLE;
  2664. this.playerInterface_.onError(error);
  2665. // Try again very soon.
  2666. this.updatePlaylistTimer_.tickAfter(/* seconds= */ 0.1);
  2667. }
  2668. }
  2669. /**
  2670. * @return {boolean}
  2671. * @private
  2672. */
  2673. isLive_() {
  2674. const PresentationType = shaka.hls.HlsParser.PresentationType_;
  2675. return this.presentationType_ != PresentationType.VOD;
  2676. }
  2677. /**
  2678. * @param {shaka.hls.HlsParser.PresentationType_} type
  2679. * @private
  2680. */
  2681. setPresentationType_(type) {
  2682. this.presentationType_ = type;
  2683. if (this.presentationTimeline_) {
  2684. this.presentationTimeline_.setStatic(!this.isLive_());
  2685. }
  2686. // If this manifest is not for live content, then we have no reason to
  2687. // update it.
  2688. if (!this.isLive_()) {
  2689. this.updatePlaylistTimer_.stop();
  2690. }
  2691. }
  2692. /**
  2693. * Create a networking request. This will manage the request using the
  2694. * parser's operation manager. If the parser has already been stopped, the
  2695. * request will not be made.
  2696. *
  2697. * @param {shaka.extern.Request} request
  2698. * @param {shaka.net.NetworkingEngine.RequestType} type
  2699. * @return {!Promise.<shaka.extern.Response>}
  2700. * @private
  2701. */
  2702. makeNetworkRequest_(request, type) {
  2703. if (!this.operationManager_) {
  2704. throw new shaka.util.Error(
  2705. shaka.util.Error.Severity.CRITICAL,
  2706. shaka.util.Error.Category.PLAYER,
  2707. shaka.util.Error.Code.OPERATION_ABORTED);
  2708. }
  2709. const op = this.playerInterface_.networkingEngine.request(type, request);
  2710. this.operationManager_.manage(op);
  2711. return op.promise;
  2712. }
  2713. /**
  2714. * @param {!shaka.hls.Tag} drmTag
  2715. * @param {string} mimeType
  2716. * @return {?shaka.extern.DrmInfo}
  2717. * @private
  2718. */
  2719. static fairplayDrmParser_(drmTag, mimeType) {
  2720. if (mimeType == 'video/mp2t') {
  2721. throw new shaka.util.Error(
  2722. shaka.util.Error.Severity.CRITICAL,
  2723. shaka.util.Error.Category.MANIFEST,
  2724. shaka.util.Error.Code.HLS_MSE_ENCRYPTED_MP2T_NOT_SUPPORTED);
  2725. }
  2726. if (shaka.util.Platform.isMediaKeysPolyfilled()) {
  2727. throw new shaka.util.Error(
  2728. shaka.util.Error.Severity.CRITICAL,
  2729. shaka.util.Error.Category.MANIFEST,
  2730. shaka.util.Error.Code
  2731. .HLS_MSE_ENCRYPTED_LEGACY_APPLE_MEDIA_KEYS_NOT_SUPPORTED);
  2732. }
  2733. /*
  2734. * Even if we're not able to construct initData through the HLS tag, adding
  2735. * a DRMInfo will allow DRM Engine to request a media key system access
  2736. * with the correct keySystem and initDataType
  2737. */
  2738. const drmInfo = shaka.util.ManifestParserUtils.createDrmInfo(
  2739. 'com.apple.fps', [
  2740. {initDataType: 'sinf', initData: new Uint8Array(0), keyId: null},
  2741. ]);
  2742. return drmInfo;
  2743. }
  2744. /**
  2745. * @param {!shaka.hls.Tag} drmTag
  2746. * @return {?shaka.extern.DrmInfo}
  2747. * @private
  2748. */
  2749. static widevineDrmParser_(drmTag) {
  2750. const method = drmTag.getRequiredAttrValue('METHOD');
  2751. const VALID_METHODS = ['SAMPLE-AES', 'SAMPLE-AES-CTR'];
  2752. if (!VALID_METHODS.includes(method)) {
  2753. shaka.log.error('Widevine in HLS is only supported with [',
  2754. VALID_METHODS.join(', '), '], not', method);
  2755. return null;
  2756. }
  2757. const uri = drmTag.getRequiredAttrValue('URI');
  2758. const parsedData = shaka.net.DataUriPlugin.parseRaw(uri);
  2759. // The data encoded in the URI is a PSSH box to be used as init data.
  2760. const pssh = shaka.util.BufferUtils.toUint8(parsedData.data);
  2761. const drmInfo = shaka.util.ManifestParserUtils.createDrmInfo(
  2762. 'com.widevine.alpha', [
  2763. {initDataType: 'cenc', initData: pssh},
  2764. ]);
  2765. const keyId = drmTag.getAttributeValue('KEYID');
  2766. if (keyId) {
  2767. const keyIdLowerCase = keyId.toLowerCase();
  2768. // This value should begin with '0x':
  2769. goog.asserts.assert(
  2770. keyIdLowerCase.startsWith('0x'), 'Incorrect KEYID format!');
  2771. // But the output should not contain the '0x':
  2772. drmInfo.keyIds = new Set([keyIdLowerCase.substr(2)]);
  2773. }
  2774. return drmInfo;
  2775. }
  2776. /**
  2777. * See: https://docs.microsoft.com/en-us/playready/packaging/mp4-based-formats-supported-by-playready-clients?tabs=case4
  2778. *
  2779. * @param {!shaka.hls.Tag} drmTag
  2780. * @return {?shaka.extern.DrmInfo}
  2781. * @private
  2782. */
  2783. static playreadyDrmParser_(drmTag) {
  2784. const method = drmTag.getRequiredAttrValue('METHOD');
  2785. const VALID_METHODS = ['SAMPLE-AES', 'SAMPLE-AES-CTR'];
  2786. if (!VALID_METHODS.includes(method)) {
  2787. shaka.log.error('PlayReady in HLS is only supported with [',
  2788. VALID_METHODS.join(', '), '], not', method);
  2789. return null;
  2790. }
  2791. const uri = drmTag.getRequiredAttrValue('URI');
  2792. const parsedData = shaka.net.DataUriPlugin.parseRaw(uri);
  2793. // The data encoded in the URI is a PlayReady Pro Object, so we need
  2794. // convert it to pssh.
  2795. const data = shaka.util.BufferUtils.toUint8(parsedData.data);
  2796. const systemId = new Uint8Array([
  2797. 0x9a, 0x04, 0xf0, 0x79, 0x98, 0x40, 0x42, 0x86,
  2798. 0xab, 0x92, 0xe6, 0x5b, 0xe0, 0x88, 0x5f, 0x95,
  2799. ]);
  2800. const keyIds = new Set();
  2801. const psshVersion = 0;
  2802. const pssh =
  2803. shaka.util.Pssh.createPssh(data, systemId, keyIds, psshVersion);
  2804. const drmInfo = shaka.util.ManifestParserUtils.createDrmInfo(
  2805. 'com.microsoft.playready', [
  2806. {initDataType: 'cenc', initData: pssh},
  2807. ]);
  2808. return drmInfo;
  2809. }
  2810. /**
  2811. * See: https://datatracker.ietf.org/doc/html/draft-pantos-hls-rfc8216bis-11#section-5.1
  2812. *
  2813. * @param {!shaka.hls.Tag} drmTag
  2814. * @return {?shaka.extern.DrmInfo}
  2815. * @private
  2816. */
  2817. static identityDrmParser_(drmTag) {
  2818. const method = drmTag.getRequiredAttrValue('METHOD');
  2819. const VALID_METHODS = ['SAMPLE-AES', 'SAMPLE-AES-CTR'];
  2820. if (!VALID_METHODS.includes(method)) {
  2821. shaka.log.error('Identity (ClearKey) in HLS is only supported with [',
  2822. VALID_METHODS.join(', '), '], not', method);
  2823. return null;
  2824. }
  2825. // NOTE: The ClearKey CDM requires a key-id to key mapping. HLS doesn't
  2826. // provide a key ID anywhere. So although we could use the 'URI' attribute
  2827. // to fetch the actual 16-byte key, without a key ID, we can't provide this
  2828. // automatically to the ClearKey CDM. Instead, the application will have
  2829. // to use player.configure('drm.clearKeys', { ... }) to provide the key IDs
  2830. // and keys or player.configure('drm.servers.org\.w3\.clearkey', ...) to
  2831. // provide a ClearKey license server URI.
  2832. return shaka.util.ManifestParserUtils.createDrmInfo(
  2833. 'org.w3.clearkey', /* initDatas= */ null);
  2834. }
  2835. };
  2836. /**
  2837. * @typedef {{
  2838. * stream: !shaka.extern.Stream,
  2839. * type: string,
  2840. * verbatimMediaPlaylistUri: string,
  2841. * absoluteMediaPlaylistUri: string,
  2842. * maxTimestamp: number,
  2843. * mediaSequenceToStartTime: !Map.<number, number>,
  2844. * canSkipSegments: boolean,
  2845. * hasEndList: boolean,
  2846. * firstSequenceNumber: number,
  2847. * loadedOnce: boolean
  2848. * }}
  2849. *
  2850. * @description
  2851. * Contains a stream and information about it.
  2852. *
  2853. * @property {!shaka.extern.Stream} stream
  2854. * The Stream itself.
  2855. * @property {string} type
  2856. * The type value. Could be 'video', 'audio', 'text', or 'image'.
  2857. * @property {string} verbatimMediaPlaylistUri
  2858. * The verbatim media playlist URI, as it appeared in the master playlist.
  2859. * This has not been canonicalized into an absolute URI. This gives us a
  2860. * consistent key for this playlist, even if redirects cause us to update
  2861. * from different origins each time.
  2862. * @property {string} absoluteMediaPlaylistUri
  2863. * The absolute media playlist URI, resolved relative to the master playlist
  2864. * and updated to reflect any redirects.
  2865. * @property {number} maxTimestamp
  2866. * The maximum timestamp found in the stream.
  2867. * @property {!Map.<number, number>} mediaSequenceToStartTime
  2868. * A map of media sequence numbers to media start times.
  2869. * Only used for VOD content.
  2870. * @property {boolean} canSkipSegments
  2871. * True if the server supports delta playlist updates, and we can send a
  2872. * request for a playlist that can skip older media segments.
  2873. * @property {boolean} hasEndList
  2874. * True if the stream has an EXT-X-ENDLIST tag.
  2875. * @property {number} firstSequenceNumber
  2876. * The sequence number of the first reference. Only calculated if needed.
  2877. * @property {boolean} loadedOnce
  2878. * True if the stream has been loaded at least once.
  2879. */
  2880. shaka.hls.HlsParser.StreamInfo;
  2881. /**
  2882. * @typedef {{
  2883. * audio: !Array.<shaka.hls.HlsParser.StreamInfo>,
  2884. * video: !Array.<shaka.hls.HlsParser.StreamInfo>
  2885. * }}
  2886. *
  2887. * @description Audio and video stream infos.
  2888. * @property {!Array.<shaka.hls.HlsParser.StreamInfo>} audio
  2889. * @property {!Array.<shaka.hls.HlsParser.StreamInfo>} video
  2890. */
  2891. shaka.hls.HlsParser.StreamInfos;
  2892. /**
  2893. * @const {!Object.<string, string>}
  2894. * @private
  2895. */
  2896. shaka.hls.HlsParser.RAW_FORMATS_TO_MIME_TYPES_ = {
  2897. 'aac': 'audio/aac',
  2898. 'ac3': 'audio/ac3',
  2899. 'ec3': 'audio/ec3',
  2900. 'mp3': 'audio/mpeg',
  2901. };
  2902. /**
  2903. * @const {!Object.<string, string>}
  2904. * @private
  2905. */
  2906. shaka.hls.HlsParser.AUDIO_EXTENSIONS_TO_MIME_TYPES_ = {
  2907. 'mp4': 'audio/mp4',
  2908. 'mp4a': 'audio/mp4',
  2909. 'm4s': 'audio/mp4',
  2910. 'm4i': 'audio/mp4',
  2911. 'm4a': 'audio/mp4',
  2912. 'm4f': 'audio/mp4',
  2913. 'cmfa': 'audio/mp4',
  2914. // MPEG2-TS also uses video/ for audio: https://bit.ly/TsMse
  2915. 'ts': 'video/mp2t',
  2916. };
  2917. /**
  2918. * @const {!Object.<string, string>}
  2919. * @private
  2920. */
  2921. shaka.hls.HlsParser.VIDEO_EXTENSIONS_TO_MIME_TYPES_ = {
  2922. 'mp4': 'video/mp4',
  2923. 'mp4v': 'video/mp4',
  2924. 'm4s': 'video/mp4',
  2925. 'm4i': 'video/mp4',
  2926. 'm4v': 'video/mp4',
  2927. 'm4f': 'video/mp4',
  2928. 'cmfv': 'video/mp4',
  2929. 'ts': 'video/mp2t',
  2930. };
  2931. /**
  2932. * @const {!Object.<string, string>}
  2933. * @private
  2934. */
  2935. shaka.hls.HlsParser.TEXT_EXTENSIONS_TO_MIME_TYPES_ = {
  2936. 'mp4': 'application/mp4',
  2937. 'm4s': 'application/mp4',
  2938. 'm4i': 'application/mp4',
  2939. 'm4f': 'application/mp4',
  2940. 'cmft': 'application/mp4',
  2941. 'vtt': 'text/vtt',
  2942. 'webvtt': 'txt/vtt',
  2943. 'ttml': 'application/ttml+xml',
  2944. };
  2945. /**
  2946. * @const {!Object.<string, string>}
  2947. * @private
  2948. */
  2949. shaka.hls.HlsParser.IMAGE_EXTENSIONS_TO_MIME_TYPES_ = {
  2950. 'jpg': 'image/jpeg',
  2951. 'png': 'image/png',
  2952. 'svg': 'image/svg+xml',
  2953. 'webp': 'image/webp',
  2954. 'avif': 'image/avif',
  2955. };
  2956. /**
  2957. * @const {!Object.<string, !Object.<string, string>>}
  2958. * @private
  2959. */
  2960. shaka.hls.HlsParser.EXTENSION_MAP_BY_CONTENT_TYPE_ = {
  2961. 'audio': shaka.hls.HlsParser.AUDIO_EXTENSIONS_TO_MIME_TYPES_,
  2962. 'video': shaka.hls.HlsParser.VIDEO_EXTENSIONS_TO_MIME_TYPES_,
  2963. 'text': shaka.hls.HlsParser.TEXT_EXTENSIONS_TO_MIME_TYPES_,
  2964. 'image': shaka.hls.HlsParser.IMAGE_EXTENSIONS_TO_MIME_TYPES_,
  2965. };
  2966. /**
  2967. * @typedef {function(!shaka.hls.Tag, string):?shaka.extern.DrmInfo}
  2968. * @private
  2969. */
  2970. shaka.hls.HlsParser.DrmParser_;
  2971. /**
  2972. * @const {!Object.<string, shaka.hls.HlsParser.DrmParser_>}
  2973. * @private
  2974. */
  2975. shaka.hls.HlsParser.KEYFORMATS_TO_DRM_PARSERS_ = {
  2976. 'com.apple.streamingkeydelivery':
  2977. shaka.hls.HlsParser.fairplayDrmParser_,
  2978. 'urn:uuid:edef8ba9-79d6-4ace-a3c8-27dcd51d21ed':
  2979. shaka.hls.HlsParser.widevineDrmParser_,
  2980. 'com.microsoft.playready':
  2981. shaka.hls.HlsParser.playreadyDrmParser_,
  2982. 'identity':
  2983. shaka.hls.HlsParser.identityDrmParser_,
  2984. };
  2985. /**
  2986. * @enum {string}
  2987. * @private
  2988. */
  2989. shaka.hls.HlsParser.PresentationType_ = {
  2990. VOD: 'VOD',
  2991. EVENT: 'EVENT',
  2992. LIVE: 'LIVE',
  2993. };
  2994. if (!shaka.util.Platform.isTizen3() &&
  2995. !shaka.util.Platform.isTizen2() &&
  2996. !shaka.util.Platform.isWebOS3()) {
  2997. shaka.media.ManifestParser.registerParserByExtension(
  2998. 'm3u8', () => new shaka.hls.HlsParser());
  2999. shaka.media.ManifestParser.registerParserByMime(
  3000. 'application/x-mpegurl', () => new shaka.hls.HlsParser());
  3001. shaka.media.ManifestParser.registerParserByMime(
  3002. 'application/vnd.apple.mpegurl', () => new shaka.hls.HlsParser());
  3003. }