diff --git a/demo/common/assets.js b/demo/common/assets.js index 4e32720023..836bd48bea 100644 --- a/demo/common/assets.js +++ b/demo/common/assets.js @@ -936,8 +936,6 @@ shakaAssets.testAssets = [ /* iconUri= */ 'https://storage.googleapis.com/shaka-asset-icons/apple_test_pattern.png', /* manifestUri= */ 'https://storage.googleapis.com/shaka-demo-assets/apple-advanced-stream-ts/master.m3u8', /* source= */ shakaAssets.Source.APPLE) - // Disabled until we support raw AAC: https://github.com/google/shaka-player/issues/2337 - .markAsDisabled() .addFeature(shakaAssets.Feature.HLS) .addFeature(shakaAssets.Feature.MP2TS) .addFeature(shakaAssets.Feature.CAPTIONS) diff --git a/lib/hls/hls_parser.js b/lib/hls/hls_parser.js index 09b81bf95c..60977b1d67 100644 --- a/lib/hls/hls_parser.js +++ b/lib/hls/hls_parser.js @@ -660,7 +660,7 @@ shaka.hls.HlsParser = class { // As an example, see the manifest linked in issue #860. const streamURI = tag.getRequiredAttrValue('URI'); const hasSameUri = res.audio.find((audio) => { - return audio.verbatimMediaPlaylistUri == streamURI; + return audio && audio.verbatimMediaPlaylistUri == streamURI; }); const videoCodecs = this.guessCodecsSafe_(ContentType.VIDEO, allCodecs); @@ -831,6 +831,9 @@ shaka.hls.HlsParser = class { */ filterLegacyCodecs_(streamInfos) { for (const streamInfo of streamInfos.audio.concat(streamInfos.video)) { + if (!streamInfo) { + continue; + } let codecs = streamInfo.stream.codecs.split(','); codecs = codecs.filter((codec) => { // mp4a.40.34 is a nonstandard codec string that is sometimes used in @@ -844,8 +847,8 @@ shaka.hls.HlsParser = class { } /** - * @param {!Array.} audioInfos - * @param {!Array.} videoInfos + * @param {!Array.} audioInfos + * @param {!Array.} videoInfos * @param {number} bandwidth * @param {?string} width * @param {?string} height @@ -1007,15 +1010,15 @@ shaka.hls.HlsParser = class { const streamInfo = await this.createStreamInfo_( verbatimMediaPlaylistUri, codecs, type, language, primary, name, channelsCount, /* closedCaptions= */ null); - if (streamInfo == null) { - return null; - } - if (this.groupIdToStreamInfosMap_.has(groupId)) { this.groupIdToStreamInfosMap_.get(groupId).push(streamInfo); } else { this.groupIdToStreamInfosMap_.set(groupId, [streamInfo]); } + if (streamInfo == null) { + return null; + } + // TODO: This check is necessary because of the possibility of multiple // calls to createStreamInfoFromMediaTag_ before either has resolved. if (this.uriToStreamInfosMap_.has(verbatimMediaPlaylistUri)) { @@ -1160,8 +1163,8 @@ shaka.hls.HlsParser = class { const mimeType = await this.guessMimeType_(type, codecs, playlist); // MediaSource expects no codec strings combined with raw formats. - // TODO(#2337): Replace with a flag indicating a raw format. - if (mimeType == 'audio/mpeg' || mimeType == 'audio/aac') { + // TODO(#2337): Instead, create a Stream flag indicating a raw format. + if (shaka.hls.HlsParser.RAW_FORMATS_.includes(mimeType)) { codecs = ''; } @@ -1170,8 +1173,19 @@ shaka.hls.HlsParser = class { const startPosition = mediaSequenceTag ? Number(mediaSequenceTag.value) : 0; - const segments = await this.createSegments_(verbatimMediaPlaylistUri, - playlist, startPosition, type, mimeType, codecs); + let segments; + try { + segments = await this.createSegments_(verbatimMediaPlaylistUri, + playlist, startPosition, type, mimeType, codecs); + } catch (error) { + if (error.code == shaka.util.Error.Code.HLS_INTERNAL_SKIP_STREAM) { + shaka.log.alwaysWarn('Skipping unsupported HLS stream', + mimeType, verbatimMediaPlaylistUri); + return null; + } + + throw error; + } const minTimestamp = segments[0].startTime; const lastEndTime = segments[segments.length - 1].endTime; @@ -1628,12 +1642,27 @@ shaka.hls.HlsParser = class { shaka.log.v1('Fetching segment to find start time'); - if (mimeType == 'audio/mpeg' || mimeType == 'audio/aac') { - // Raw MP3 and AAC files contain no timestamps. - // Don't return a false timestamp. We want to treat them as aligning to - // their corresponding video segments. - // TODO(#2337): Avoid trying to fetch timestamps for raw formats. - return null; + if (shaka.hls.HlsParser.RAW_FORMATS_.includes(mimeType)) { + // Raw formats contain no timestamps. Even if there is an ID3 tag with a + // timestamp, that's not going to be honored by MediaSource, which will + // use sequence mode for these segments. We don't yet support sequence + // mode, so we must reject these streams. + // TODO(#2337): Support sequence mode and align raw format timestamps to + // other streams. + shaka.log.alwaysWarn( + 'Raw formats are not yet supported. Skipping ' + mimeType); + throw new shaka.util.Error( + shaka.util.Error.Severity.RECOVERABLE, + shaka.util.Error.Category.MANIFEST, + shaka.util.Error.Code.HLS_INTERNAL_SKIP_STREAM); + } + + if (mimeType == 'video/webm') { + shaka.log.alwaysWarn('WebM in HLS is not yet supported. Skipping.'); + throw new shaka.util.Error( + shaka.util.Error.Severity.RECOVERABLE, + shaka.util.Error.Category.MANIFEST, + shaka.util.Error.Code.HLS_INTERNAL_SKIP_STREAM); } if (mimeType == 'video/mp4' || mimeType == 'audio/mp4') { @@ -1653,36 +1682,35 @@ shaka.hls.HlsParser = class { const initSegmentResponse = responses[1] || responses[0]; return this.getStartTimeFromMp4Segment_( + verbatimMediaPlaylistUri, segmentResponse.uri, segmentResponse.data, initSegmentResponse.data); } if (mimeType == 'video/mp2t') { const response = await this.fetchPartialSegment_(segmentRef); goog.asserts.assert(response.data, 'Should have a response body!'); - return this.getStartTimeFromTsSegment_(response.data); - } - - if (mimeType == 'video/webm') { - shaka.log.warning( - 'Hls+WebM combination is not supported at the moment. Skipping.'); - return null; + return this.getStartTimeFromTsSegment_( + verbatimMediaPlaylistUri, response.uri, response.data); } throw new shaka.util.Error( shaka.util.Error.Severity.CRITICAL, shaka.util.Error.Category.MANIFEST, - shaka.util.Error.Code.HLS_COULD_NOT_PARSE_SEGMENT_START_TIME); + shaka.util.Error.Code.HLS_COULD_NOT_PARSE_SEGMENT_START_TIME, + verbatimMediaPlaylistUri); } /** * Parses an mp4 segment to get its start time. * + * @param {string} playlistUri + * @param {string} segmentUri * @param {BufferSource} mediaData * @param {BufferSource} initData * @return {number} * @private */ - getStartTimeFromMp4Segment_(mediaData, initData) { + getStartTimeFromMp4Segment_(playlistUri, segmentUri, mediaData, initData) { const Mp4Parser = shaka.util.Mp4Parser; let timescale = 0; @@ -1709,7 +1737,8 @@ shaka.hls.HlsParser = class { throw new shaka.util.Error( shaka.util.Error.Severity.CRITICAL, shaka.util.Error.Category.MANIFEST, - shaka.util.Error.Code.HLS_COULD_NOT_PARSE_SEGMENT_START_TIME); + shaka.util.Error.Code.HLS_COULD_NOT_PARSE_SEGMENT_START_TIME, + playlistUri, segmentUri); } let startTime = 0; @@ -1733,7 +1762,8 @@ shaka.hls.HlsParser = class { throw new shaka.util.Error( shaka.util.Error.Severity.CRITICAL, shaka.util.Error.Category.MANIFEST, - shaka.util.Error.Code.HLS_COULD_NOT_PARSE_SEGMENT_START_TIME); + shaka.util.Error.Code.HLS_COULD_NOT_PARSE_SEGMENT_START_TIME, + playlistUri, segmentUri); } return startTime; } @@ -1741,11 +1771,13 @@ shaka.hls.HlsParser = class { /** * Parses a TS segment to get its start time. * + * @param {string} playlistUri + * @param {string} segmentUri * @param {BufferSource} data * @return {number} * @private */ - getStartTimeFromTsSegment_(data) { + getStartTimeFromTsSegment_(playlistUri, segmentUri, data) { const reader = new shaka.util.DataViewReader( data, shaka.util.DataViewReader.Endianness.BIG_ENDIAN); @@ -1753,7 +1785,8 @@ shaka.hls.HlsParser = class { throw new shaka.util.Error( shaka.util.Error.Severity.CRITICAL, shaka.util.Error.Category.MANIFEST, - shaka.util.Error.Code.HLS_COULD_NOT_PARSE_SEGMENT_START_TIME); + shaka.util.Error.Code.HLS_COULD_NOT_PARSE_SEGMENT_START_TIME, + playlistUri, segmentUri); }; let packetStart = 0; @@ -2273,10 +2306,30 @@ shaka.hls.HlsParser.AUDIO_EXTENSIONS_TO_MIME_TYPES_ = { 'm4a': 'audio/mp4', // MPEG2-TS also uses video/ for audio: https://bit.ly/TsMse 'ts': 'video/mp2t', + + // Raw formats: 'aac': 'audio/aac', + 'ac3': 'audio/ac3', + 'ec3': 'audio/ec3', + 'mp3': 'audio/mpeg', }; +/** + * MIME types of raw formats. + * TODO(#2337): Support raw formats and share this list among parsers. + * + * @const {!Array.} + * @private + */ +shaka.hls.HlsParser.RAW_FORMATS_ = [ + 'audio/aac', + 'audio/ac3', + 'audio/ec3', + 'audio/mpeg', +]; + + /** * @const {!Object.} * @private diff --git a/lib/util/error.js b/lib/util/error.js index 59ce5176e6..8793e648cc 100644 --- a/lib/util/error.js +++ b/lib/util/error.js @@ -587,6 +587,8 @@ shaka.util.Error.Code = { /** * The HLS parser was unable to parse segment start time from the media. + *
error.data[0] is the failed media playlist URI. + *
error.data[1] is the failed media segment URI (if any). */ 'HLS_COULD_NOT_PARSE_SEGMENT_START_TIME': 4030, @@ -615,6 +617,12 @@ shaka.util.Error.Code = { */ 'HLS_AES_128_ENCRYPTION_NOT_SUPPORTED': 4034, + /** + * An internal error code that should never be seen by applications, thrown + * to force the HLS parser to skip an unsupported stream. + */ + 'HLS_INTERNAL_SKIP_STREAM': 4035, + // RETIRED: 'INCONSISTENT_BUFFER_STATE': 5000, // RETIRED: 'INVALID_SEGMENT_INDEX': 5001, // RETIRED: 'SEGMENT_DOES_NOT_EXIST': 5002, diff --git a/test/hls/hls_parser_unit.js b/test/hls/hls_parser_unit.js index 72013d86a8..3cd96cf306 100644 --- a/test/hls/hls_parser_unit.js +++ b/test/hls/hls_parser_unit.js @@ -8,6 +8,7 @@ describe('HlsParser', () => { const ManifestParser = shaka.test.ManifestParser; const TextStreamKind = shaka.util.ManifestParserUtils.TextStreamKind; const Util = shaka.test.Util; + const originalAlwaysWarn = shaka.log.alwaysWarn; const vttText = [ 'WEBVTT\n', @@ -31,6 +32,10 @@ describe('HlsParser', () => { /** @type {!Uint8Array} */ let selfInitializingSegmentData; + afterEach(() => { + shaka.log.alwaysWarn = originalAlwaysWarn; + }); + beforeEach(() => { // TODO: use StreamGenerator? initSegmentData = new Uint8Array([ @@ -2412,4 +2417,73 @@ describe('HlsParser', () => { await testHlsParser(master, media, manifest); }); + + it('skips raw audio formats', async () => { + const master = [ + '#EXTM3U\n', + '#EXT-X-MEDIA:TYPE=AUDIO,GROUP-ID="audio",URI="audio1"\n', + '#EXT-X-MEDIA:TYPE=AUDIO,GROUP-ID="audio",URI="audio2"\n', + '#EXT-X-MEDIA:TYPE=AUDIO,GROUP-ID="audio",URI="audio3"\n', + '#EXT-X-MEDIA:TYPE=AUDIO,GROUP-ID="audio",URI="audio4"\n', + '#EXT-X-STREAM-INF:BANDWIDTH=400,CODECS="avc1,mp4a",', + 'RESOLUTION=1280x720,AUDIO="audio"\n', + 'video\n', + ].join(''); + + const videoMedia = [ + '#EXTM3U\n', + '#EXT-X-PLAYLIST-TYPE:VOD\n', + '#EXT-X-MAP:URI="v-init.mp4"\n', + '#EXTINF:5,\n', + 'v1.mp4', + ].join(''); + + const audioMedia1 = [ + '#EXTM3U\n', + '#EXT-X-PLAYLIST-TYPE:VOD\n', + '#EXTINF:5,\n', + 'a1.mp3', + ].join(''); + + const audioMedia2 = [ + '#EXTM3U\n', + '#EXT-X-PLAYLIST-TYPE:VOD\n', + '#EXTINF:5,\n', + 'a1.aac', + ].join(''); + + const audioMedia3 = [ + '#EXTM3U\n', + '#EXT-X-PLAYLIST-TYPE:VOD\n', + '#EXTINF:5,\n', + 'a1.ac3', + ].join(''); + + const audioMedia4 = [ + '#EXTM3U\n', + '#EXT-X-PLAYLIST-TYPE:VOD\n', + '#EXTINF:5,\n', + 'a1.ec3', + ].join(''); + + fakeNetEngine + .setResponseText('test:/master', master) + .setResponseText('test:/video', videoMedia) + .setResponseText('test:/audio1', audioMedia1) + .setResponseText('test:/audio2', audioMedia2) + .setResponseText('test:/audio3', audioMedia3) + .setResponseText('test:/audio4', audioMedia4) + .setResponseValue('test:/v-init.mp4', initSegmentData) + .setResponseValue('test:/v1.mp4', segmentData); + + const alwaysWarnSpy = jasmine.createSpy('shaka.log.alwaysWarn'); + shaka.log.alwaysWarn = shaka.test.Util.spyFunc(alwaysWarnSpy); + + const manifest = await parser.start('test:/master', playerInterface); + expect(manifest.periods[0].variants.length).toBe(1); + expect(manifest.periods[0].variants[0].audio).toBe(null); + + // We should log a warning when this happens. + expect(alwaysWarnSpy).toHaveBeenCalled(); + }); });