diff --git a/index.d.ts b/index.d.ts index a7a1c4df76..f174bfd824 100644 --- a/index.d.ts +++ b/index.d.ts @@ -115,6 +115,7 @@ declare namespace dashjs { bufferToKeep?: number; bufferAheadToKeep?: number; jumpGaps?: boolean; + jumpLargeGaps?: boolean; smallGapLimit?: number; stableBufferTime?: number; bufferTimeAtTopQuality?: number; @@ -125,7 +126,7 @@ declare namespace dashjs { keepProtectionMediaKeys?: boolean; useManifestDateHeaderTimeSource?: boolean; useSuggestedPresentationDelay?: boolean; - useAppendWindowEnd?: boolean, + useAppendWindow?: boolean, manifestUpdateRetryInterval?: number; liveCatchUpMinDrift?: number; liveCatchUpMaxDrift?: number; diff --git a/samples/dash-if-reference-player/app/main.js b/samples/dash-if-reference-player/app/main.js index f19dd036b8..fc9cbe501c 100644 --- a/samples/dash-if-reference-player/app/main.js +++ b/samples/dash-if-reference-player/app/main.js @@ -61,11 +61,11 @@ app.controller('DashController', function ($scope, sources, contributors, dashif // Add provider to beginning of each Vector var provider = data.provider; $scope.availableStreams.forEach(function (item) { - if(item && item.submenu && item.submenu.length > 0) { + if (item && item.submenu && item.submenu.length > 0) { item.submenu.forEach(function (subitem) { - if(subitem && subitem.name && subitem.provider && provider[subitem.provider] && provider[subitem.provider].acronym) { - subitem.name = '[' + provider[subitem.provider].acronym + '] ' + subitem.name; - } + if (subitem && subitem.name && subitem.provider && provider[subitem.provider] && provider[subitem.provider].acronym) { + subitem.name = '[' + provider[subitem.provider].acronym + '] ' + subitem.name; + } }); } }); @@ -853,7 +853,8 @@ app.controller('DashController', function ($scope, sources, contributors, dashif var dashAdapter = $scope.player.getDashAdapter(); if (dashMetrics && $scope.streamInfo) { - var periodIdx = $scope.streamInfo.index; + var period = dashAdapter.getPeriodById($scope.streamInfo.id); + var periodIdx = period ? period.index : $scope.streamInfo.index; var maxIndex = dashAdapter.getMaxIndexForBufferType(type, periodIdx); var repSwitch = dashMetrics.getCurrentRepresentationSwitch(type, true); diff --git a/samples/dash-if-reference-player/dashjs_config.json b/samples/dash-if-reference-player/dashjs_config.json index 5385eb4085..b01e2defd1 100644 --- a/samples/dash-if-reference-player/dashjs_config.json +++ b/samples/dash-if-reference-player/dashjs_config.json @@ -1,6 +1,6 @@ { "debug": { - "logLevel": 4 + "logLevel": 5 }, "streaming": { "metricsMaxListDepth": 50, diff --git a/src/core/Settings.js b/src/core/Settings.js index 4b6b932fbe..c22f38f89c 100644 --- a/src/core/Settings.js +++ b/src/core/Settings.js @@ -62,6 +62,7 @@ import {HTTPRequest} from '../streaming/vo/metrics/HTTPRequest'; * bufferToKeep: 20, * bufferAheadToKeep: 80, * jumpGaps: true, + * jumpLargeGaps: true, * smallGapLimit: 1.5, * stableBufferTime: 12, * bufferTimeAtTopQuality: 30, @@ -72,7 +73,7 @@ import {HTTPRequest} from '../streaming/vo/metrics/HTTPRequest'; * keepProtectionMediaKeys: false, * useManifestDateHeaderTimeSource: true, * useSuggestedPresentationDelay: true, - * useAppendWindowEnd: true, + * useAppendWindow: true, * manifestUpdateRetryInterval: 100, * liveCatchUpMinDrift: 0.02, * liveCatchUpMaxDrift: 0, @@ -243,6 +244,7 @@ import {HTTPRequest} from '../streaming/vo/metrics/HTTPRequest'; * Allows you to modify the buffer ahead of current time position that is kept in source buffer in seconds. *
0|--------|currentTime|-----bufferAheadToKeep----|----bufferToPrune-----------|end|
* @property {boolean} [jumpGaps=true] Sets whether player should jump small gaps (discontinuities) in the buffer. + * @property {boolean} [jumpLargeGaps=true] Sets whether player should jump large gaps (discontinuities) in the buffer. * @property {number} [smallGapLimit=1.8] Time in seconds for a gap to be considered small. * @property {number} [stableBufferTime=12] * The time that the internal buffer target will be set to post startup/seeks (NOT top quality). @@ -270,8 +272,8 @@ import {HTTPRequest} from '../streaming/vo/metrics/HTTPRequest'; * use of the date header will happen only after the other timing source that take precedence fail or are omitted as described. * @property {boolean} [useSuggestedPresentationDelay=true] *

Set to true if you would like to override the default live delay and honor the SuggestedPresentationDelay attribute in by the manifest.

- * @property {boolean} [useAppendWindowEnd=true] - * Specifies if the appendWindowEnd attribute of the MSE SourceBuffers should be set according to content duration from manifest. + * @property {boolean} [useAppendWindow=true] + * Specifies if the appendWindow attributes of the MSE SourceBuffers should be set according to content duration from manifest. * @property {number} [manifestUpdateRetryInterval=100] * For live streams, set the interval-frequency in milliseconds at which * dash.js will check if the current manifest is still processed before @@ -392,6 +394,7 @@ function Settings() { bufferToKeep: 20, bufferAheadToKeep: 80, jumpGaps: true, + jumpLargeGaps: true, smallGapLimit: 1.5, stableBufferTime: 12, bufferTimeAtTopQuality: 30, @@ -402,7 +405,7 @@ function Settings() { keepProtectionMediaKeys: false, useManifestDateHeaderTimeSource: true, useSuggestedPresentationDelay: true, - useAppendWindowEnd: true, + useAppendWindow: true, manifestUpdateRetryInterval: 100, liveCatchUpMinDrift: 0.02, liveCatchUpMaxDrift: 0, diff --git a/src/dash/DashAdapter.js b/src/dash/DashAdapter.js index 4016055b9c..564266daa9 100644 --- a/src/dash/DashAdapter.js +++ b/src/dash/DashAdapter.js @@ -635,14 +635,14 @@ function DashAdapter() { /** * Returns the bandwidth for a given representation id * @param {number} representationId - * @param {number} periodId + * @param {number} periodIdx * @returns {number} bandwidth * @memberOf module:DashAdapter * @instance */ - function getBandwidthForRepresentation(representationId, periodId) { + function getBandwidthForRepresentation(representationId, periodIdx) { let representation; - let period = getPeriod(periodId); + let period = getPeriod(periodIdx); representation = findRepresentation(period, representationId); @@ -678,6 +678,26 @@ function DashAdapter() { return findMaxBufferIndex(period, bufferType); } + /** + * Returns the voPeriod object for a given id + * @param {String} id + * @returns {object|null} + */ + function getPeriodById(id) { + if (!id || voPeriods.length === 0) { + return null; + } + const periods = voPeriods.filter((p) => { + return p.id === id; + }); + + if (periods && periods.length > 0) { + return periods[0]; + } + + return null; + } + function reset() { voPeriods = []; voAdaptations = {}; @@ -821,8 +841,8 @@ function DashAdapter() { } } - function getPeriod(periodId) { - return voPeriods.length > 0 ? voPeriods[0].mpd.manifest.Period_asArray[periodId] : null; + function getPeriod(periodIdx) { + return voPeriods.length > 0 ? voPeriods[0].mpd.manifest.Period_asArray[periodIdx] : null; } function findRepresentationIndex(period, representationId) { @@ -915,6 +935,7 @@ function DashAdapter() { getCodec: getCodec, getVoAdaptations: getVoAdaptations, getVoPeriods: getVoPeriods, + getPeriodById, setCurrentMediaInfo: setCurrentMediaInfo, reset: reset }; diff --git a/src/dash/DashHandler.js b/src/dash/DashHandler.js index f6698d9eaf..087018fc21 100644 --- a/src/dash/DashHandler.js +++ b/src/dash/DashHandler.js @@ -269,7 +269,6 @@ function DashHandler(config) { isFinished = endTime >= duration; } } - return isFinished; } @@ -329,7 +328,7 @@ function DashHandler(config) { // check that there is a segment in this index const segment = segmentsController.getSegmentByIndex(representation, indexToRequest, lastSegment ? lastSegment.mediaStartTime : -1); if (!segment && isEndlessMedia(representation) && !dynamicStreamCompleted) { - logger.debug('No segment found at index: ' + indexToRequest + '. Wait for next loop'); + logger.debug(getType() + ' No segment found at index: ' + indexToRequest + '. Wait for next loop'); return null; } else { if (segment) { diff --git a/src/dash/controllers/RepresentationController.js b/src/dash/controllers/RepresentationController.js index 073d9a6686..d091786b5c 100644 --- a/src/dash/controllers/RepresentationController.js +++ b/src/dash/controllers/RepresentationController.js @@ -239,7 +239,7 @@ function RepresentationController(config) { err, repSwitch; - if (r.adaptation.period.mpd.manifest.type === dashConstants.DYNAMIC && !r.adaptation.period.mpd.manifest.ignorePostponeTimePeriod) { + if (r.adaptation.period.mpd.manifest.type === dashConstants.DYNAMIC && !r.adaptation.period.mpd.manifest.ignorePostponeTimePeriod && playbackController.getStreamController().getStreams().length <= 1) { // We must put things to sleep unless till e.g. the startTime calculation in ScheduleController.onLiveEdgeSearchCompleted fall after the segmentAvailabilityRange.start postponeTimePeriod = getRepresentationUpdatePostponeTimePeriod(r, streamInfo); } diff --git a/src/streaming/MediaPlayer.js b/src/streaming/MediaPlayer.js index 340fde8f94..7b4915fa3d 100644 --- a/src/streaming/MediaPlayer.js +++ b/src/streaming/MediaPlayer.js @@ -34,6 +34,7 @@ import DashConstants from '../dash/constants/DashConstants'; import MetricsConstants from './constants/MetricsConstants'; import PlaybackController from './controllers/PlaybackController'; import StreamController from './controllers/StreamController'; +import GapController from './controllers/GapController'; import MediaController from './controllers/MediaController'; import BaseURLController from './controllers/BaseURLController'; import ManifestLoader from './ManifestLoader'; @@ -145,6 +146,7 @@ function MediaPlayer() { baseURLController, capabilities, streamController, + gapController, playbackController, dashMetrics, manifestModel, @@ -196,6 +198,9 @@ function MediaPlayer() { if (config.streamController) { streamController = config.streamController; } + if (config.gapController) { + gapController = config.gapController; + } if (config.playbackController) { playbackController = config.playbackController; } @@ -272,6 +277,10 @@ function MediaPlayer() { streamController = StreamController(context).getInstance(); } + if (!gapController) { + gapController = GapController(context).getInstance(); + } + adapter = DashAdapter(context).getInstance(); manifestModel = ManifestModel(context).getInstance(); @@ -1903,6 +1912,7 @@ function MediaPlayer() { streamingInitialized = false; adapter.reset(); streamController.reset(); + gapController.reset(); playbackController.reset(); abrController.reset(); mediaController.reset(); @@ -1951,6 +1961,13 @@ function MediaPlayer() { baseURLController: baseURLController }); + gapController.setConfig({ + settings, + playbackController, + streamController, + videoModel + }); + playbackController.setConfig({ streamController: streamController, dashMetrics: dashMetrics, @@ -1990,6 +2007,7 @@ function MediaPlayer() { // initialises controller streamController.initialize(autoPlay, protectionData); + gapController.initialize(); cmcdModel.initialize(); } diff --git a/src/streaming/MediaPlayerEvents.js b/src/streaming/MediaPlayerEvents.js index 6c65171fc8..8d2141b351 100644 --- a/src/streaming/MediaPlayerEvents.js +++ b/src/streaming/MediaPlayerEvents.js @@ -335,6 +335,12 @@ class MediaPlayerEvents extends EventsBase { * @event MediaPlayerEvents#MANIFEST_VALIDITY_CHANGED */ this.MANIFEST_VALIDITY_CHANGED = 'manifestValidityChanged'; + + /** + * A gap occured in the timeline which requires a seek + * @event MediaPlayerEvents#MANIFEST_VALIDITY_CHANGED + */ + this.GAP_CAUSED_PLAYBACK_SEEK = 'gapCausedPlaybackSeek'; } } diff --git a/src/streaming/SourceBufferSink.js b/src/streaming/SourceBufferSink.js index 15c6b78a10..36ad729075 100644 --- a/src/streaming/SourceBufferSink.js +++ b/src/streaming/SourceBufferSink.js @@ -43,7 +43,7 @@ const MAX_ALLOWED_DISCONTINUITY = 0.1; // 100 milliseconds * @ignore * @implements FragmentSink */ -function SourceBufferSink(mediaSource, mediaInfo, onAppendedCallback, useAppendWindowEnd, oldBuffer) { +function SourceBufferSink(mediaSource, mediaInfo, onAppendedCallback, oldBuffer) { const context = this.context; const eventBus = EventBus(context).getInstance(); @@ -56,7 +56,6 @@ function SourceBufferSink(mediaSource, mediaInfo, onAppendedCallback, useAppendW let callbacks = []; let appendQueue = []; let onAppended = onAppendedCallback; - let setAppendWindowEnd = (useAppendWindowEnd === false) ? false : true; function setup() { logger = Debug(context).getInstance().getLogger(instance); @@ -77,10 +76,6 @@ function SourceBufferSink(mediaSource, mediaInfo, onAppendedCallback, useAppendW buffer.changeType(codec); } - if (setAppendWindowEnd && buffer) { - buffer.appendWindowEnd = mediaSource.duration; - } - const CHECK_INTERVAL = 50; // use updateend event if possible if (typeof buffer.addEventListener === 'function') { @@ -116,7 +111,12 @@ function SourceBufferSink(mediaSource, mediaInfo, onAppendedCallback, useAppendW buffer.removeEventListener('abort', errHandler, false); } clearInterval(intervalId); - buffer.appendWindowEnd = Infinity; + try { + buffer.appendWindowEnd = Infinity; + buffer.appendWindowStart = 0; + } catch (e) { + logger.error('Failed to reset append window'); + } if (!keepBuffer) { try { if (!buffer.getClassName || buffer.getClassName() !== 'TextSourceBuffer') { @@ -181,11 +181,34 @@ function SourceBufferSink(mediaSource, mediaInfo, onAppendedCallback, useAppendW function updateTimestampOffset(MSETimeOffset) { if (buffer.timestampOffset !== MSETimeOffset && !isNaN(MSETimeOffset)) { waitForUpdateEnd(() => { + if (MSETimeOffset < 0) { + MSETimeOffset += 0.001; + } buffer.timestampOffset = MSETimeOffset; }); } } + function updateAppendWindow(sInfo) { + if (!buffer) { + return; + } + waitForUpdateEnd(() => { + let appendWindowEnd = mediaSource.duration; + let appendWindowStart = 0; + if (sInfo.start && sInfo.duration && isFinite(sInfo.duration)) { + appendWindowEnd = sInfo.start + sInfo.duration; + } + if (sInfo.start) { + appendWindowStart = sInfo.start; + } + buffer.appendWindowStart = 0; + buffer.appendWindowEnd = appendWindowEnd; + buffer.appendWindowStart = appendWindowStart; + logger.debug(`Updated append window for ${mediaInfo.type}. Set start to ${buffer.appendWindowStart} and end to ${buffer.appendWindowEnd}`); + }); + } + function remove(start, end, forceRemoval) { const sourceBufferSink = this; // make sure that the given time range is correct. Otherwise we will get InvalidAccessError @@ -221,7 +244,7 @@ function SourceBufferSink(mediaSource, mediaInfo, onAppendedCallback, useAppendW if (appendQueue.length > 0) { isAppendingInProgress = true; const nextChunk = appendQueue[0]; - appendQueue.splice(0,1); + appendQueue.splice(0, 1); let oldRanges = []; const afterSuccess = function () { // Safari sometimes drops a portion of a buffer after appending. Handle these situations here @@ -284,10 +307,10 @@ function SourceBufferSink(mediaSource, mediaInfo, onAppendedCallback, useAppendW } function isChunkAlignedWithRange(oldRanges, chunk) { - for (let i = 0; i < oldRanges.length; i++ ) { + for (let i = 0; i < oldRanges.length; i++) { const start = Math.round(oldRanges.start(i)); const end = Math.round(oldRanges.end(i)); - if (end === chunk.start || start === chunk.end || (chunk.start >= start && chunk.end <= end) ) { + if (end === chunk.start || start === chunk.end || (chunk.start >= start && chunk.end <= end)) { return true; } } @@ -354,7 +377,8 @@ function SourceBufferSink(mediaSource, mediaInfo, onAppendedCallback, useAppendW reset: reset, updateTimestampOffset: updateTimestampOffset, hasDiscontinuitiesAfter: hasDiscontinuitiesAfter, - waitForUpdateEnd: waitForUpdateEnd + waitForUpdateEnd: waitForUpdateEnd, + updateAppendWindow }; setup(); diff --git a/src/streaming/Stream.js b/src/streaming/Stream.js index 985bafd837..4866f53cbb 100644 --- a/src/streaming/Stream.js +++ b/src/streaming/Stream.js @@ -81,7 +81,9 @@ function Stream(config) { thumbnailController, preloaded, boxParser, + preloadingScheduled, debug, + isEndedEventSignaled, trackChangedEvent; const codecCompatibilityTable = [ @@ -233,6 +235,8 @@ function Stream(config) { hasAudioTrack = false; updateError = {}; isUpdating = false; + preloadingScheduled = false; + isEndedEventSignaled = false; } function reset() { @@ -259,10 +263,26 @@ function Stream(config) { return streamInfo ? streamInfo.duration : NaN; } + function getIsEndedEventSignaled() { + return isEndedEventSignaled; + } + + function setIsEndedEventSignaled(value) { + isEndedEventSignaled = value; + } + function getStartTime() { return streamInfo ? streamInfo.start : NaN; } + function getPreloadingScheduled() { + return preloadingScheduled; + } + + function setPreloadingScheduled(value) { + preloadingScheduled = value; + } + function getLiveStartTime() { if (!streamInfo.manifestInfo.isDynamic) return NaN; // Get live start time of the video stream (1st in array of streams) @@ -284,11 +304,11 @@ function Stream(config) { return streamInfo; } - function getHasAudioTrack () { + function getHasAudioTrack() { return hasAudioTrack; } - function getHasVideoTrack () { + function getHasVideoTrack() { return hasVideoTrack; } @@ -390,7 +410,7 @@ function Stream(config) { function createStreamProcessor(mediaInfo, allMediaForType, mediaSource, optionalSettings) { - let fragmentModel = fragmentController.getModel(getId(), mediaInfo ? mediaInfo.type : null); + let fragmentModel = fragmentController.getModel(getId(), mediaInfo ? mediaInfo.type : null); let streamProcessor = StreamProcessor(context).create({ streamInfo: streamInfo, @@ -864,24 +884,26 @@ function Stream(config) { } function preload(mediaSource, previousBuffers) { - addInlineEvents(); + if (!getPreloaded()) { + addInlineEvents(); - initializeMediaForType(Constants.VIDEO, mediaSource); - initializeMediaForType(Constants.AUDIO, mediaSource); - initializeMediaForType(Constants.TEXT, mediaSource); - initializeMediaForType(Constants.FRAGMENTED_TEXT, mediaSource); - initializeMediaForType(Constants.EMBEDDED_TEXT, mediaSource); - initializeMediaForType(Constants.MUXED, mediaSource); - initializeMediaForType(Constants.IMAGE, mediaSource); + initializeMediaForType(Constants.VIDEO, mediaSource); + initializeMediaForType(Constants.AUDIO, mediaSource); + initializeMediaForType(Constants.TEXT, mediaSource); + initializeMediaForType(Constants.FRAGMENTED_TEXT, mediaSource); + initializeMediaForType(Constants.EMBEDDED_TEXT, mediaSource); + initializeMediaForType(Constants.MUXED, mediaSource); + initializeMediaForType(Constants.IMAGE, mediaSource); - createBuffers(previousBuffers); + createBuffers(previousBuffers); - eventBus.on(Events.CURRENT_TRACK_CHANGED, onCurrentTrackChanged, instance); - for (let i = 0; i < streamProcessors.length && streamProcessors[i]; i++) { - streamProcessors[i].getScheduleController().start(); - } + eventBus.on(Events.CURRENT_TRACK_CHANGED, onCurrentTrackChanged, instance); + for (let i = 0; i < streamProcessors.length && streamProcessors[i]; i++) { + streamProcessors[i].getScheduleController().start(); + } - setPreloaded(true); + setPreloaded(true); + } } @@ -905,7 +927,11 @@ function Stream(config) { setMediaSource: setMediaSource, isMediaCodecCompatible: isMediaCodecCompatible, isProtectionCompatible: isProtectionCompatible, - getPreloaded: getPreloaded + getPreloaded: getPreloaded, + getPreloadingScheduled, + setPreloadingScheduled, + getIsEndedEventSignaled, + setIsEndedEventSignaled }; setup(); diff --git a/src/streaming/StreamProcessor.js b/src/streaming/StreamProcessor.js index a8043d807f..fc41652e66 100644 --- a/src/streaming/StreamProcessor.js +++ b/src/streaming/StreamProcessor.js @@ -314,6 +314,9 @@ function StreamProcessor(config) { function updateStreamInfo(newStreamInfo) { streamInfo = newStreamInfo; + if (settings.get().streaming.useAppendWindow) { + bufferController.updateAppendWindow(); + } } function getStreamInfo() { @@ -431,10 +434,12 @@ function StreamProcessor(config) { } function onMediaFragmentNeeded(e) { - if (!e.sender || e.mediaType !== type || e.streamId !== streamInfo.id) return; - + if (!e.sender || e.mediaType !== type || e.streamId !== streamInfo.id) { + return; + } let request; + // Don't schedule next fragments while pruning to avoid buffer inconsistencies if (!bufferController.getIsPruningInProgress()) { request = findNextRequest(e.seekTarget, e.replacement); @@ -615,6 +620,11 @@ function StreamProcessor(config) { let liveStartTime = NaN; const currentRepresentationInfo = getRepresentationInfo(); const liveEdge = liveEdgeFinder.getLiveEdge(currentRepresentationInfo); + + if (isNaN(liveEdge)) { + return NaN; + } + const request = findRequestForLiveEdge(liveEdge, currentRepresentationInfo); if (request) { diff --git a/src/streaming/controllers/BufferController.js b/src/streaming/controllers/BufferController.js index 020178dd93..165ce9dd2d 100644 --- a/src/streaming/controllers/BufferController.js +++ b/src/streaming/controllers/BufferController.js @@ -42,7 +42,7 @@ import Debug from '../../core/Debug'; import InitCache from '../utils/InitCache'; import DashJSError from '../vo/DashJSError'; import Errors from '../../core/errors/Errors'; -import { HTTPRequest } from '../vo/metrics/HTTPRequest'; +import {HTTPRequest} from '../vo/metrics/HTTPRequest'; const STALL_THRESHOLD = 0.5; const BUFFER_END_THRESHOLD = 0.5; @@ -135,9 +135,12 @@ function BufferController(config) { if (mediaSource) { try { if (oldBuffers && oldBuffers[type]) { - buffer = SourceBufferSink(context).create(mediaSource, mediaInfo, onAppended.bind(this), settings.get().streaming.useAppendWindowEnd, oldBuffers[type]); + buffer = SourceBufferSink(context).create(mediaSource, mediaInfo, onAppended.bind(this), oldBuffers[type]); } else { - buffer = SourceBufferSink(context).create(mediaSource, mediaInfo, onAppended.bind(this), settings.get().streaming.useAppendWindowEnd); + buffer = SourceBufferSink(context).create(mediaSource, mediaInfo, onAppended.bind(this), null); + } + if (settings.get().streaming.useAppendWindow) { + buffer.updateAppendWindow(streamInfo); } if (typeof buffer.getBuffer().initialize === 'function') { buffer.getBuffer().initialize(type, streamInfo, mediaInfoArr, fragmentModel); @@ -191,7 +194,7 @@ function BufferController(config) { logger.info('Init fragment finished loading saving to', type + '\'s init cache'); initCache.save(e.chunk); - logger.debug('Append Init fragment', type, ' with representationId:', e.chunk.representationId, ' and quality:', e.chunk.quality, ', data size:', e.chunk.bytes.byteLength); + logger.debug('Append Init fragment', type, ' with representationId:', e.chunk.representationId, ' and quality:', e.chunk.quality, ', data size:', e.chunk.bytes.byteLength); appendToBuffer(e.chunk); } @@ -212,7 +215,7 @@ function BufferController(config) { function onMediaFragmentLoaded(e) { const chunk = e.chunk; - if (chunk.streamId !== streamInfo.id || chunk.mediaInfo.type != type) return; + if (chunk.streamId !== streamInfo.id || chunk.mediaInfo.type !== type) return; if (replacingBuffer) { mediaChunk = chunk; @@ -234,7 +237,7 @@ function BufferController(config) { buffer.append(chunk); if (chunk.mediaInfo.type === Constants.VIDEO) { - triggerEvent(Events.VIDEO_CHUNK_RECEIVED, { chunk: chunk }); + triggerEvent(Events.VIDEO_CHUNK_RECEIVED, {chunk: chunk}); } } @@ -257,14 +260,18 @@ function BufferController(config) { // recalculate buffer lengths to keep (bufferToKeep, bufferAheadToKeep, bufferTimeAtTopQuality) according to criticalBufferLevel const bufferToKeep = Math.max(0.2 * criticalBufferLevel, 1); const bufferAhead = criticalBufferLevel - bufferToKeep; - const s = { streaming: { bufferToKeep: parseFloat(bufferToKeep.toFixed(5)), - bufferAheadToKeep: parseFloat(bufferAhead.toFixed(5))}}; + const s = { + streaming: { + bufferToKeep: parseFloat(bufferToKeep.toFixed(5)), + bufferAheadToKeep: parseFloat(bufferAhead.toFixed(5)) + } + }; settings.update(s); } } if (e.error.code === QUOTA_EXCEEDED_ERROR_CODE || !hasEnoughSpaceToAppend()) { logger.warn('Clearing playback buffer to overcome quota exceed situation'); - triggerEvent(Events.QUOTA_EXCEEDED, { criticalBufferLevel: criticalBufferLevel }); //Tells ScheduleController to stop scheduling. + triggerEvent(Events.QUOTA_EXCEEDED, {criticalBufferLevel: criticalBufferLevel}); //Tells ScheduleController to stop scheduling. pruneAllSafely(); // Then we clear the buffer and onCleared event will tell ScheduleController to start scheduling again. } return; @@ -301,7 +308,7 @@ function BufferController(config) { } } - function adjustSeekTarget () { + function adjustSeekTarget() { // Check buffered data only for audio and video if (type !== Constants.AUDIO && type !== Constants.VIDEO) return; if (isNaN(seekTarget)) return; @@ -334,7 +341,7 @@ function BufferController(config) { } function onQualityChanged(e) { - if (e.streamInfo.id != streamInfo.id || e.mediaType !== type || requiredQuality === e.newQuality) return; + if (e.streamInfo.id !== streamInfo.id || e.mediaType !== type || requiredQuality === e.newQuality) return; updateBufferTimestampOffset(this.getRepresentationInfo(e.newQuality)); requiredQuality = e.newQuality; @@ -525,7 +532,7 @@ function BufferController(config) { function updateBufferLevel() { if (playbackController) { bufferLevel = getBufferLength(getWorkingTime() || 0); - triggerEvent(Events.BUFFER_LEVEL_UPDATED, { bufferLevel: bufferLevel }); + triggerEvent(Events.BUFFER_LEVEL_UPDATED, {bufferLevel: bufferLevel}); checkIfSufficientBuffer(); } } @@ -534,7 +541,7 @@ function BufferController(config) { const isLastIdxAppended = maxAppendedIndex >= lastIndex - 1; // Handles 0 and non 0 based request index if (isLastIdxAppended && !isBufferingCompleted && buffer.discharge === undefined) { isBufferingCompleted = true; - logger.debug('checkIfBufferingCompleted trigger BUFFERING_COMPLETED'); + logger.debug('checkIfBufferingCompleted trigger BUFFERING_COMPLETED for ' + type); triggerEvent(Events.BUFFERING_COMPLETED); } } @@ -546,7 +553,8 @@ function BufferController(config) { if (seekClearedBufferingCompleted && !isBufferingCompleted && bufferLevel > 0 && playbackController && playbackController.getTimeToStreamEnd() - bufferLevel < STALL_THRESHOLD) { seekClearedBufferingCompleted = false; isBufferingCompleted = true; - logger.debug('checkIfSufficientBuffer trigger BUFFERING_COMPLETED'); + logger.debug('checkIfSufficientBuffer trigger BUFFERING_COMPLETED for type ' + type); + logger.debug('checkIfSufficientBuffer trigger BUFFERING_COMPLETED for type ' + type); triggerEvent(Events.BUFFERING_COMPLETED); } @@ -571,7 +579,7 @@ function BufferController(config) { bufferState = state; - triggerEvent(Events.BUFFER_LEVEL_STATE_CHANGED, { state: state }); + triggerEvent(Events.BUFFER_LEVEL_STATE_CHANGED, {state: state}); triggerEvent(state === MetricsConstants.BUFFER_LOADED ? Events.BUFFER_LOADED : Events.BUFFER_EMPTY); logger.debug(state === MetricsConstants.BUFFER_LOADED ? 'Got enough buffer to start' : 'Waiting for more buffer before starting playback'); } @@ -718,9 +726,9 @@ function BufferController(config) { to: e.to, unintended: e.unintended, hasEnoughSpaceToAppend: hasEnoughSpaceToAppend(), - quotaExceeded: isQuotaExceeded }); + quotaExceeded: isQuotaExceeded + }); } - //TODO - REMEMBER removed a timerout hack calling clearBuffer after manifestInfo.minBufferTime * 1000 if !hasEnoughSpaceToAppend() Aug 04 2016 } function updateBufferTimestampOffset(representationInfo) { @@ -732,9 +740,18 @@ function BufferController(config) { } } + function updateAppendWindow() { + if (buffer && !isBufferingCompleted) { + buffer.updateAppendWindow(streamInfo); + } + } + function onDataUpdateCompleted(e) { if (e.sender.getStreamId() !== streamInfo.id || e.sender.getType() !== type) return; if (e.error) return; + if (isBufferingCompleted) { + return; + } updateBufferTimestampOffset(e.currentRepresentation); } @@ -890,23 +907,24 @@ function BufferController(config) { } instance = { - getBufferControllerType: getBufferControllerType, - getRepresentationInfo: getRepresentationInfo, - initialize: initialize, - createBuffer: createBuffer, - dischargePreBuffer: dischargePreBuffer, - getType: getType, - getBuffer: getBuffer, - setBuffer: setBuffer, - getBufferLevel: getBufferLevel, - getRangeAt: getRangeAt, - setMediaSource: setMediaSource, - getMediaSource: getMediaSource, - appendInitSegment: appendInitSegment, - replaceBuffer: replaceBuffer, - getIsBufferingCompleted: getIsBufferingCompleted, - getIsPruningInProgress: getIsPruningInProgress, - reset: reset + getBufferControllerType, + getRepresentationInfo, + initialize, + createBuffer, + dischargePreBuffer, + getType, + getBuffer, + setBuffer, + getBufferLevel, + getRangeAt, + setMediaSource, + getMediaSource, + appendInitSegment, + replaceBuffer, + getIsBufferingCompleted, + getIsPruningInProgress, + reset, + updateAppendWindow }; setup(); diff --git a/src/streaming/controllers/GapController.js b/src/streaming/controllers/GapController.js new file mode 100644 index 0000000000..9815d584b6 --- /dev/null +++ b/src/streaming/controllers/GapController.js @@ -0,0 +1,231 @@ +/** + * The copyright in this software is being made available under the BSD License, + * included below. This software may be subject to other third party and contributor + * rights, including patent rights, and no such rights are granted under this license. + * + * Copyright (c) 2013, Dash Industry Forum. + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without modification, + * are permitted provided that the following conditions are met: + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation and/or + * other materials provided with the distribution. + * * Neither the name of Dash Industry Forum nor the names of its + * contributors may be used to endorse or promote products derived from this software + * without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS AS IS AND ANY + * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED + * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. + * IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, + * INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT + * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR + * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, + * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ +import FactoryMaker from '../../core/FactoryMaker'; +import Debug from '../../core/Debug'; +import Events from '../../core/events/Events'; +import EventBus from '../../core/EventBus'; + +const GAP_HANDLER_INTERVAL = 100; +const THRESHOLD_TO_STALLS = 10; +const GAP_THRESHOLD = 0.1; + +function GapController() { + const context = this.context; + const eventBus = EventBus(context).getInstance(); + + let instance, + lastPlaybackTime, + settings, + wallclockTicked, + gapHandlerInterval, + lastGapJumpPosition, + playbackController, + streamController, + videoModel, + logger; + + function initialize() { + registerEvents(); + } + + function setup() { + logger = Debug(context).getInstance().getLogger(instance); + + reset(); + } + + function reset() { + stopGapHandler(); + unregisterEvents(); + resetInitialSettings(); + } + + function resetInitialSettings() { + gapHandlerInterval = null; + lastGapJumpPosition = NaN; + wallclockTicked = 0; + } + + function setConfig(config) { + if (!config) { + return; + } + if (config.settings) { + settings = config.settings; + } + if (config.playbackController) { + playbackController = config.playbackController; + } + if (config.streamController) { + streamController = config.streamController; + } + if (config.videoModel) { + videoModel = config.videoModel; + } + } + + function registerEvents() { + eventBus.on(Events.WALLCLOCK_TIME_UPDATED, onWallclockTimeUpdated, this); + eventBus.on(Events.BYTES_APPENDED_END_FRAGMENT, onBytesAppended, this); + } + + function unregisterEvents() { + eventBus.off(Events.WALLCLOCK_TIME_UPDATED, onWallclockTimeUpdated, this); + eventBus.off(Events.BYTES_APPENDED_END_FRAGMENT, onBytesAppended, this); + } + + function onBytesAppended() { + if (!gapHandlerInterval) { + startGapHandler(); + } + } + + function _shouldCheckForGaps() { + return settings.get().streaming.jumpGaps && streamController.getActiveStreamProcessors().length > 0 && + !playbackController.isSeeking() && !playbackController.isPaused() && !streamController.getIsStreamSwitchInProgress() && + !streamController.getHasMediaOrIntialisationError(); + } + + function onWallclockTimeUpdated(/*e*/) { + if (!_shouldCheckForGaps()) { + return; + } + + wallclockTicked++; + if (wallclockTicked >= THRESHOLD_TO_STALLS) { + const currentTime = playbackController.getTime(); + if (lastPlaybackTime === currentTime) { + jumpGap(currentTime, true); + } else { + lastPlaybackTime = currentTime; + } + wallclockTicked = 0; + } + } + + function getNextRangeStartTime(currentTime) { + try { + const ranges = videoModel.getBufferRange(); + if (!ranges || (ranges.length <= 1 && currentTime > 0)) { + return null; + } + let nextRangeStartTime = null; + let j = 0; + + while (!nextRangeStartTime && j < ranges.length) { + const rangeEnd = j > 0 ? ranges.end(j - 1) : 0; + if (currentTime < ranges.start(j) && rangeEnd - currentTime < GAP_THRESHOLD) { + nextRangeStartTime = ranges.start(j); + } + j += 1; + } + return nextRangeStartTime; + + } catch (e) { + return null; + } + } + + + function startGapHandler() { + try { + if (!gapHandlerInterval) { + logger.debug('Starting the gap controller'); + gapHandlerInterval = setInterval(() => { + if (!_shouldCheckForGaps()) { + return; + } + const currentTime = playbackController.getTime(); + jumpGap(currentTime); + + }, GAP_HANDLER_INTERVAL); + } + } catch (e) { + } + } + + function stopGapHandler() { + logger.debug('Stopping the gap controller'); + if (gapHandlerInterval) { + clearInterval(gapHandlerInterval); + gapHandlerInterval = null; + } + } + + function jumpGap(currentTime, playbackStalled = false) { + const smallGapLimit = settings.get().streaming.smallGapLimit; + const jumpLargeGaps = settings.get().streaming.jumpLargeGaps; + let nextRangeStartTime = null; + let seekToPosition = NaN; + let jumpToStreamEnd = false; + + + // Get the range just after current time position + nextRangeStartTime = getNextRangeStartTime(currentTime); + + if (nextRangeStartTime && nextRangeStartTime > 0) { + const gap = nextRangeStartTime - currentTime; + if (gap > 0 && (gap <= smallGapLimit || jumpLargeGaps)) { + seekToPosition = nextRangeStartTime; + } + } + + const timeToStreamEnd = playbackController.getTimeToStreamEnd(); + if (isNaN(seekToPosition) && playbackStalled && isFinite(timeToStreamEnd) && !isNaN(timeToStreamEnd) && (timeToStreamEnd < smallGapLimit)) { + seekToPosition = parseFloat(currentTime + timeToStreamEnd).toFixed(5); + jumpToStreamEnd = true; + } + + if (seekToPosition > 0 && lastGapJumpPosition !== seekToPosition) { + if (jumpToStreamEnd) { + logger.warn(`Jumping to end of stream because of gap from ${currentTime} to ${seekToPosition}. Gap duration: ${seekToPosition - currentTime}`); + eventBus.trigger(Events.GAP_CAUSED_PLAYBACK_SEEK, {seekTime: seekToPosition}); + } else { + logger.warn(`Jumping gap from ${currentTime} to ${seekToPosition}. Gap duration: ${seekToPosition - currentTime}`); + playbackController.seek(seekToPosition, true, true); + } + lastGapJumpPosition = seekToPosition; + } + } + + instance = { + reset, + setConfig, + initialize + }; + + setup(); + + return instance; +} + +GapController.__dashjs_factory_name = 'GapController'; +export default FactoryMaker.getSingletonFactory(GapController); diff --git a/src/streaming/controllers/PlaybackController.js b/src/streaming/controllers/PlaybackController.js index 221b31dc4f..bb5337dd24 100644 --- a/src/streaming/controllers/PlaybackController.js +++ b/src/streaming/controllers/PlaybackController.js @@ -491,7 +491,6 @@ function PlaybackController() { function onPlaybackSeeking() { let seekTime = getTime(); - // On some browsers/devices, in case of live streams, setting current time on video element fails when there is no buffered data at requested time // Then re-set seek target time and video element will be seeked afterwhile once data is buffered (see BufferContoller) if (!isNaN(seekTarget) && seekTarget !== seekTime) { @@ -561,7 +560,8 @@ function PlaybackController() { if (wallclockTimeIntervalId && e.isLast) { // PLAYBACK_ENDED was triggered elsewhere, react. logger.info('onPlaybackEnded -- PLAYBACK_ENDED but native video element didn\'t fire ended'); - videoModel.setCurrentTime(getStreamEndTime()); + const seekTime = e.seekTime ? e.seekTime : getStreamEndTime(); + videoModel.setCurrentTime(seekTime); pause(); stopUpdatingWallclockTime(); } diff --git a/src/streaming/controllers/ScheduleController.js b/src/streaming/controllers/ScheduleController.js index 5108e82000..f8196cbb65 100644 --- a/src/streaming/controllers/ScheduleController.js +++ b/src/streaming/controllers/ScheduleController.js @@ -137,6 +137,7 @@ function ScheduleController(config) { if (isStopped) return; logger.debug('Schedule Controller stops'); + logger.debug(type + ' Schedule Controller stops'); isStopped = true; clearTimeout(scheduleTimeout); } @@ -169,7 +170,6 @@ function ScheduleController(config) { if (replacingBuffer || isNaN(lastInitQuality) || switchTrack || isReplacement || hasTopQualityChanged(type, streamId) || bufferLevelRule.execute(type, currentRepresentationInfo, hasVideoTrack)) { - const getNextFragment = function () { if ((currentRepresentationInfo.quality !== lastInitQuality || switchTrack) && (!replacingBuffer)) { if (switchTrack) { @@ -485,7 +485,7 @@ function ScheduleController(config) { } function getBufferTarget() { - return bufferLevelRule.getBufferTarget(type, currentRepresentationInfo, hasVideoTrack); + return bufferLevelRule.getBufferTarget(type, currentRepresentationInfo); } function getType() { diff --git a/src/streaming/controllers/StreamController.js b/src/streaming/controllers/StreamController.js index a908585094..f891027493 100644 --- a/src/streaming/controllers/StreamController.js +++ b/src/streaming/controllers/StreamController.js @@ -49,10 +49,10 @@ import DashJSError from '../vo/DashJSError'; import Errors from '../../core/errors/Errors'; import EventController from './EventController'; +const PLAYBACK_ENDED_TIMER_INTERVAL = 200; +const PREBUFFERING_CAN_START_INTERVAL = 500; + function StreamController() { - // Check whether there is a gap every 40 wallClockUpdateEvent times - const STALL_THRESHOLD_TO_CHECK_GAPS = 40; - const PERIOD_PREFETCH_TIME = 2000; const context = this.context; const eventBus = EventBus(context).getInstance(); @@ -91,14 +91,13 @@ function StreamController() { isPaused, initialPlayback, isPeriodSwitchInProgress, - playbackEndedTimerId, - prefetchTimerId, - wallclockTicked, + playbackEndedTimerInterval, + prebufferingCanStartInterval, buffers, - preloading, - lastPlaybackTime, + preloadingStreams, supportsChangeType, - settings; + settings, + preBufferingCheckInProgress; function setup() { logger = Debug(context).getInstance().getLogger(instance); @@ -140,32 +139,30 @@ function StreamController() { function registerEvents() { eventBus.on(Events.PLAYBACK_TIME_UPDATED, onPlaybackTimeUpdated, this); eventBus.on(Events.PLAYBACK_SEEKING, onPlaybackSeeking, this); + eventBus.on(Events.GAP_CAUSED_PLAYBACK_SEEK, onGapCausedPlaybackSeek, this); eventBus.on(Events.PLAYBACK_ERROR, onPlaybackError, this); eventBus.on(Events.PLAYBACK_STARTED, onPlaybackStarted, this); eventBus.on(Events.PLAYBACK_PAUSED, onPlaybackPaused, this); eventBus.on(Events.PLAYBACK_ENDED, onEnded, this); eventBus.on(Events.MANIFEST_UPDATED, onManifestUpdated, this); - eventBus.on(Events.BUFFERING_COMPLETED, onTrackBufferingCompleted, this); eventBus.on(Events.STREAM_BUFFERING_COMPLETED, onStreamBufferingCompleted, this); eventBus.on(Events.MANIFEST_VALIDITY_CHANGED, onManifestValidityChanged, this); eventBus.on(Events.TIME_SYNCHRONIZATION_COMPLETED, onTimeSyncCompleted, this); - eventBus.on(Events.WALLCLOCK_TIME_UPDATED, onWallclockTimeUpdated, this); eventBus.on(MediaPlayerEvents.METRIC_ADDED, onMetricAdded, this); } function unRegisterEvents() { eventBus.off(Events.PLAYBACK_TIME_UPDATED, onPlaybackTimeUpdated, this); eventBus.off(Events.PLAYBACK_SEEKING, onPlaybackSeeking, this); + eventBus.off(Events.GAP_CAUSED_PLAYBACK_SEEK, onGapCausedPlaybackSeek, this); eventBus.off(Events.PLAYBACK_ERROR, onPlaybackError, this); eventBus.off(Events.PLAYBACK_STARTED, onPlaybackStarted, this); eventBus.off(Events.PLAYBACK_PAUSED, onPlaybackPaused, this); eventBus.off(Events.PLAYBACK_ENDED, onEnded, this); eventBus.off(Events.MANIFEST_UPDATED, onManifestUpdated, this); - eventBus.off(Events.BUFFERING_COMPLETED, onTrackBufferingCompleted, this); eventBus.off(Events.STREAM_BUFFERING_COMPLETED, onStreamBufferingCompleted, this); eventBus.off(Events.MANIFEST_VALIDITY_CHANGED, onManifestValidityChanged, this); eventBus.off(Events.TIME_SYNCHRONIZATION_COMPLETED, onTimeSyncCompleted, this); - eventBus.off(Events.WALLCLOCK_TIME_UPDATED, onWallclockTimeUpdated, this); eventBus.off(MediaPlayerEvents.METRIC_ADDED, onMetricAdded, this); } @@ -182,92 +179,17 @@ function StreamController() { } } - function onWallclockTimeUpdated(/*e*/) { - if (!settings.get().streaming.jumpGaps || getActiveStreamProcessors() === 0 || - playbackController.isSeeking() || isPaused || isStreamSwitchingInProgress || - hasMediaError || hasInitialisationError) { - return; - } - - wallclockTicked++; - if (wallclockTicked >= STALL_THRESHOLD_TO_CHECK_GAPS) { - const currentTime = playbackController.getTime(); - if (lastPlaybackTime === currentTime) { - jumpGap(currentTime); - } else { - lastPlaybackTime = currentTime; - } - wallclockTicked = 0; - } - } - - function jumpGap(time) { - const streamProcessors = getActiveStreamProcessors(); - const smallGapLimit = settings.get().streaming.smallGapLimit; - let seekToPosition; - - // Find out what is the right time position to jump to taking - // into account state of buffer - for (let i = 0; i < streamProcessors.length; i++) { - const mediaBuffer = streamProcessors[i].getBuffer(); - const ranges = mediaBuffer.getAllBufferRanges(); - let nextRangeStartTime; - if (!ranges || ranges.length <= 1) continue; - - // Get the range just after current time position - for (let j = 0; j < ranges.length; j++) { - if (time < ranges.start(j)) { - nextRangeStartTime = ranges.start(j); - break; - } - } - - if (nextRangeStartTime > 0) { - const gap = nextRangeStartTime - time; - if (gap > 0 && gap <= smallGapLimit) { - if (seekToPosition === undefined || nextRangeStartTime > seekToPosition) { - seekToPosition = nextRangeStartTime; - } - } - } - } - - const timeToStreamEnd = playbackController.getTimeToStreamEnd(); - if (seekToPosition === undefined && !isNaN(timeToStreamEnd) && timeToStreamEnd < smallGapLimit) { - seekToPosition = time + timeToStreamEnd; - } - - // If there is a safe position to jump to, do the seeking - if (seekToPosition > 0) { - if (!isNaN(timeToStreamEnd) && seekToPosition >= time + timeToStreamEnd) { - logger.info('Jumping media gap (discontinuity) at time ', time, '. Jumping to end of the stream'); - eventBus.trigger(Events.PLAYBACK_ENDED, {'isLast': getActiveStreamInfo().isLast}); - } else { - logger.info('Jumping media gap (discontinuity) at time ', time, '. Jumping to time position', seekToPosition); - playbackController.seek(seekToPosition, true, true); - } - } - } - function onPlaybackSeeking(e) { const seekingStream = getStreamForTime(e.seekTime); - //if end period has been detected, stop timer and reset isPeriodSwitchInProgress - if (playbackEndedTimerId) { - stopEndPeriodTimer(); - isPeriodSwitchInProgress = false; - } - - if (prefetchTimerId) { - stopPreloadTimer(); - } - - if (seekingStream === activeStream && preloading) { + if (seekingStream === activeStream && preloadingStreams && preloadingStreams.length > 0) { // Seeking to the current period was requested while preloading the next one, deactivate preloading one - preloading.deactivate(true); + preloadingStreams.forEach((s) => { + s.deactivate(true); + }); } - if (seekingStream && (seekingStream !== activeStream || (preloading && !activeStream.isActive()))) { + if (seekingStream && seekingStream !== activeStream) { // If we're preloading other stream, the active one was deactivated and we need to switch back flushPlaylistMetrics(PlayListTrace.END_OF_PERIOD_STOP_REASON); switchStream(seekingStream, activeStream, e.seekTime); @@ -278,6 +200,13 @@ function StreamController() { createPlaylistMetrics(PlayList.SEEK_START_REASON); } + function onGapCausedPlaybackSeek(e) { + const nextStream = getNextStream(); + flushPlaylistMetrics(PlayListTrace.END_OF_PERIOD_STOP_REASON); + switchStream(nextStream, activeStream, e.seekTime); + createPlaylistMetrics(PlayList.SEEK_START_REASON); + } + function onPlaybackStarted( /*e*/) { logger.debug('[onPlaybackStarted]'); if (initialPlayback) { @@ -287,7 +216,6 @@ function StreamController() { if (isPaused) { isPaused = false; createPlaylistMetrics(PlayList.RESUME_FROM_PAUSE_START_REASON); - toggleEndPeriodTimer(); } } } @@ -297,54 +225,99 @@ function StreamController() { if (!e.ended) { isPaused = true; flushPlaylistMetrics(PlayListTrace.USER_REQUEST_STOP_REASON); - toggleEndPeriodTimer(); } } - function stopEndPeriodTimer() { - logger.debug('[toggleEndPeriodTimer] stop end period timer.'); - clearTimeout(playbackEndedTimerId); - playbackEndedTimerId = undefined; + function startPlaybackEndedTimerInterval() { + if (!playbackEndedTimerInterval) { + playbackEndedTimerInterval = setInterval(function () { + if (!isStreamSwitchingInProgress && playbackController.getTimeToStreamEnd() <= 0) { + eventBus.trigger(Events.PLAYBACK_ENDED, {'isLast': getActiveStreamInfo().isLast}); + } + }, PLAYBACK_ENDED_TIMER_INTERVAL); + } } - function stopPreloadTimer() { - logger.debug('[PreloadTimer] stop period preload timer.'); - clearTimeout(prefetchTimerId); - prefetchTimerId = undefined; + function stopPlaybackEndedTimerInterval() { + clearInterval(playbackEndedTimerInterval); + playbackEndedTimerInterval = null; } - function toggleEndPeriodTimer() { - //stream buffering completed has not been detected, nothing to do.... - if (isPeriodSwitchInProgress) { - //stream buffering completed has been detected, if end period timer is running, stop it, otherwise start it.... - if (playbackEndedTimerId) { - stopEndPeriodTimer(); - } else { - const timeToEnd = playbackController.getTimeToStreamEnd(); - const delayPlaybackEnded = timeToEnd > 0 ? timeToEnd * 1000 : 0; - const prefetchDelay = delayPlaybackEnded < PERIOD_PREFETCH_TIME ? delayPlaybackEnded / 4 : delayPlaybackEnded - PERIOD_PREFETCH_TIME; - logger.debug('[toggleEndPeriodTimer] Going to fire preload in', prefetchDelay, 'milliseconds'); - prefetchTimerId = setTimeout(onStreamCanLoadNext, prefetchDelay); - logger.debug('[toggleEndPeriodTimer] start-up of timer to notify PLAYBACK_ENDED event. It will be triggered in', delayPlaybackEnded, 'milliseconds'); - playbackEndedTimerId = setTimeout(function () { - eventBus.trigger(Events.PLAYBACK_ENDED, {'isLast': getActiveStreamInfo().isLast}); - }, delayPlaybackEnded); + function startCheckIfPrebufferingCanStartInterval() { + if (!prebufferingCanStartInterval) { + prebufferingCanStartInterval = setInterval(function () { + checkIfPrebufferingCanStart(); + }, PREBUFFERING_CAN_START_INTERVAL); + } + } + + function stopCheckIfPrebufferingCanStartInterval() { + clearInterval(prebufferingCanStartInterval); + prebufferingCanStartInterval = null; + } + + function checkIfPrebufferingCanStart() { + // In multiperiod situations, we constantly check if the streams have finished buffering so we can immediately start buffering the next stream + if (!activeStream || !hasStreamFinishedBuffering(activeStream)) { + return; + } + const upcomingStreams = getNextStreams(activeStream); + let i = 0; + + while (i < upcomingStreams.length) { + const stream = upcomingStreams[i]; + const previousStream = i === 0 ? activeStream : upcomingStreams[i - 1]; + // If the preloading for the current stream is not scheduled, but its predecessor has finished buffering we can start prebuffering this stream + if (!stream.getPreloadingScheduled() && (hasStreamFinishedBuffering(previousStream))) { + + if (mediaSource) { + // We can not start prebuffering if the start of the next period is in the future. This will cause problems when calculating the segmentAvailabilityRange and updating the representations in the RepresentationController + // As long as the timeline converter returns an invalid range we do not start the prebuffering + const mediaTypes = [Constants.VIDEO, Constants.AUDIO]; + let segmentAvailabilityRangeIsOk = true; + + mediaTypes.forEach((mediaType) => { + const mediaInfo = adapter.getMediaInfoForType(stream.getStreamInfo(), mediaType); + const voRepresentations = adapter.getVoRepresentations(mediaInfo); + voRepresentations.forEach((voRep) => { + const range = timelineConverter.calcSegmentAvailabilityRange(voRep, true); + + if (range.end < range.start) { + segmentAvailabilityRangeIsOk = false; + } + }); + }); + + if (segmentAvailabilityRangeIsOk) { + onStreamCanLoadNext(stream, previousStream); + } + } } + i += 1; } } - function onTrackBufferingCompleted(e) { - // In multiperiod situations, as soon as one of the tracks (AUDIO, VIDEO) is finished we should - // start doing prefetching of the next period - if (e.mediaType !== Constants.AUDIO && e.mediaType !== Constants.VIDEO) return; + function hasStreamFinishedBuffering(stream) { + try { + if (!stream) { + return false; + } + const streamProcessors = stream.getProcessors().filter((sp) => { + return sp.getType() === Constants.AUDIO || sp.getType() === Constants.VIDEO; + }); - const isLast = getActiveStreamInfo().isLast; - if (mediaSource && !isLast && playbackEndedTimerId === undefined) { - logger.info('[onTrackBufferingCompleted] end of period detected. Track', e.mediaType, 'has finished'); - isPeriodSwitchInProgress = true; - if (isPaused === false) { - toggleEndPeriodTimer(); + if (!streamProcessors || streamProcessors.length === 0) { + return false; } + + const unfinishedStreamProcessors = streamProcessors.filter((sp) => { + return !sp.isBufferingCompleted(); + }); + + return unfinishedStreamProcessors && unfinishedStreamProcessors.length === 0; + + } catch (e) { + return false; } } @@ -356,45 +329,48 @@ function StreamController() { } } - function onStreamCanLoadNext() { - const isLast = getActiveStreamInfo().isLast; - - if (mediaSource && !isLast) { - const newStream = getNextStream(); + function onStreamCanLoadNext(nextStream, previousStream = null) { + if (mediaSource && !nextStream.getPreloaded()) { // Seamless period switch allowed only if: // - none of the periods uses contentProtection. // - AND changeType method implemented by browser or periods use the same codec. - let seamlessPeriodSwitch = activeStream.isProtectionCompatible(newStream) && - (supportsChangeType || activeStream.isMediaCodecCompatible(newStream)); + let seamlessPeriodSwitch = previousStream.isProtectionCompatible(nextStream) && + (supportsChangeType || previousStream.isMediaCodecCompatible(nextStream)); if (seamlessPeriodSwitch) { + nextStream.setPreloadingScheduled(true); logger.info('[onStreamCanLoadNext] Preloading next stream'); - activeStream.deactivate(true); - newStream.preload(mediaSource, buffers); - preloading = newStream; - newStream.getProcessors().forEach(p => { - p.setIndexHandlerTime(newStream.getStartTime()); + isPeriodSwitchInProgress = true; + nextStream.preload(mediaSource, buffers); + preloadingStreams.push(nextStream); + nextStream.getProcessors().forEach(p => { + p.setIndexHandlerTime(nextStream.getStartTime()); }); } } } function getStreamForTime(time) { - let duration = 0; + + if (isNaN(time)) { + return null; + } + + let streamDuration = 0; let stream = null; const ln = streams.length; if (ln > 0) { - duration += streams[0].getStartTime(); + streamDuration += streams[0].getStartTime(); } for (let i = 0; i < ln; i++) { stream = streams[i]; - duration = parseFloat((duration + stream.getDuration()).toFixed(5)); + streamDuration = parseFloat(streamDuration + stream.getDuration()); - if (time < duration) { + if (time < streamDuration) { return stream; } } @@ -443,26 +419,49 @@ function StreamController() { } function onEnded() { - const nextStream = getNextStream(); - if (nextStream) { - switchStream(nextStream, activeStream, NaN); - } else { - logger.debug('StreamController no next stream found'); + if (!activeStream.getIsEndedEventSignaled()) { + activeStream.setIsEndedEventSignaled(true); + const nextStream = getNextStream(); + if (nextStream) { + switchStream(nextStream, activeStream, NaN); + } else { + logger.debug('StreamController no next stream found'); + activeStream.setIsEndedEventSignaled(false); + } + flushPlaylistMetrics(nextStream ? PlayListTrace.END_OF_PERIOD_STOP_REASON : PlayListTrace.END_OF_CONTENT_STOP_REASON); + isPeriodSwitchInProgress = false; } - flushPlaylistMetrics(nextStream ? PlayListTrace.END_OF_PERIOD_STOP_REASON : PlayListTrace.END_OF_CONTENT_STOP_REASON); - playbackEndedTimerId = undefined; - isPeriodSwitchInProgress = false; } - function getNextStream() { - if (activeStream) { - const start = getActiveStreamInfo().start; - const duration = getActiveStreamInfo().duration; + function getNextStream(stream = null) { + const refStream = stream ? stream : activeStream ? activeStream : null; + if (refStream) { + const start = refStream.getStreamInfo().start; + const duration = refStream.getStreamInfo().duration; + const streamEnd = parseFloat((start + duration).toFixed(5)); return streams.filter(function (stream) { - return (stream.getStreamInfo().start === parseFloat((start + duration).toFixed(5))); + return (Math.abs(stream.getStreamInfo().start - streamEnd) <= 0.1); })[0]; } + + return null; + } + + function getNextStreams(stream) { + try { + const refStream = stream ? stream : activeStream ? activeStream : null; + + if (refStream) { + const start = refStream.getStreamInfo().start; + + return streams.filter(function (stream) { + return (stream.getStreamInfo().start > start); + }); + } + } catch (e) { + return []; + } } function switchStream(stream, previousStream, seekTime) { @@ -490,7 +489,9 @@ function StreamController() { seekTime = !isNaN(seekTime) ? seekTime : (!seamlessPeriodSwitch && previousStream ? stream.getStreamInfo().start : NaN); activeStream = stream; - preloading = false; + preloadingStreams = preloadingStreams.filter((s) => { + return s.getId() !== activeStream.getId(); + }); playbackController.initialize(getActiveStreamInfo(), !!previousStream, seekTime); if (videoModel.getElement()) { //TODO detect if we should close jump to activateStream. @@ -498,6 +499,7 @@ function StreamController() { } else { activateStream(seekTime, seamlessPeriodSwitch); } + isPeriodSwitchInProgress = false; } function switchToVideoElement(seekTime) { @@ -590,8 +592,11 @@ function StreamController() { function setMediaDuration(duration) { const manifestDuration = duration ? duration : getActiveStreamInfo().manifestInfo.duration; - const mediaDuration = mediaSourceController.setDuration(mediaSource, manifestDuration); - logger.debug('Duration successfully set to: ' + mediaDuration); + + if (manifestDuration && !isNaN(manifestDuration)) { + const mediaDuration = mediaSourceController.setDuration(mediaSource, manifestDuration); + logger.debug('Duration successfully set to: ' + mediaDuration); + } } function getComposedStream(streamInfo) { @@ -661,16 +666,18 @@ function StreamController() { // we need to figure out what the correct starting period is let initialStream = null; - const startTimeFormUri = playbackController.getStartTimeFromUriParameters(streamsInfo[0].start, adapter.getIsDynamic()); - if (!isNaN(startTimeFormUri)) { - initialStream = getStreamForTime(startTimeFormUri); - } + const startTimeFromUri = playbackController.getStartTimeFromUriParameters(streamsInfo[0].start, adapter.getIsDynamic()); + + initialStream = getStreamForTime(startTimeFromUri); + // For multiperiod streams we should avoid a switch of streams after the seek to the live edge. So we do a calculation of the expected seek time to find the right stream object. if (!initialStream && adapter.getIsDynamic() && streams.length) { logger.debug('Dynamic stream: Trying to find the correct starting period'); initialStream = getInitialStream(); } switchStream(initialStream !== null ? initialStream : streams[0], null, NaN); + startPlaybackEndedTimerInterval(); + startCheckIfPrebufferingCanStartInterval(); } eventBus.trigger(Events.STREAMS_COMPOSED); @@ -884,6 +891,14 @@ function StreamController() { return activeStream ? activeStream.getStreamInfo() : null; } + function getIsStreamSwitchInProgress() { + return isStreamSwitchingInProgress; + } + + function getHasMediaOrIntialisationError() { + return hasMediaError || hasInitialisationError; + } + function getStreamById(id) { return streams.filter(function (item) { return item.getId() === id; @@ -977,7 +992,6 @@ function StreamController() { protectionData = protData; } - function resetInitialSettings() { streams = []; protectionController = null; @@ -988,9 +1002,11 @@ function StreamController() { initialPlayback = true; isPaused = false; autoPlay = true; - playbackEndedTimerId = undefined; + playbackEndedTimerInterval = null; isPeriodSwitchInProgress = false; - wallclockTicked = 0; + prebufferingCanStartInterval = null; + preBufferingCheckInProgress = false; + preloadingStreams = []; } function reset() { @@ -1036,6 +1052,8 @@ function StreamController() { } } + stopPlaybackEndedTimerInterval(); + stopCheckIfPrebufferingCanStartInterval(); eventBus.trigger(Events.STREAM_TEARDOWN_COMPLETE); resetInitialSettings(); } @@ -1051,21 +1069,28 @@ function StreamController() { } } + function getStreams() { + return streams; + } + instance = { - initialize: initialize, - getActiveStreamInfo: getActiveStreamInfo, - hasVideoTrack: hasVideoTrack, - hasAudioTrack: hasAudioTrack, - switchToVideoElement: switchToVideoElement, - getStreamById: getStreamById, - getStreamForTime: getStreamForTime, - getTimeRelativeToStreamId: getTimeRelativeToStreamId, - load: load, - loadWithManifest: loadWithManifest, - getActiveStreamProcessors: getActiveStreamProcessors, - setConfig: setConfig, - setProtectionData: setProtectionData, - reset: reset + initialize, + getActiveStreamInfo, + hasVideoTrack, + hasAudioTrack, + switchToVideoElement, + getStreamById, + getStreamForTime, + getTimeRelativeToStreamId, + load, + loadWithManifest, + getActiveStreamProcessors, + setConfig, + setProtectionData, + getIsStreamSwitchInProgress, + getHasMediaOrIntialisationError, + getStreams, + reset }; setup(); diff --git a/src/streaming/rules/abr/BolaRule.js b/src/streaming/rules/abr/BolaRule.js index 2ab77c96d1..9f976b480e 100644 --- a/src/streaming/rules/abr/BolaRule.js +++ b/src/streaming/rules/abr/BolaRule.js @@ -73,7 +73,6 @@ function BolaRule(config) { eventBus.on(Events.BUFFER_EMPTY, onBufferEmpty, instance); eventBus.on(Events.PLAYBACK_SEEKING, onPlaybackSeeking, instance); - eventBus.on(Events.PERIOD_SWITCH_STARTED, onPeriodSwitchStarted, instance); eventBus.on(Events.MEDIA_FRAGMENT_LOADED, onMediaFragmentLoaded, instance); eventBus.on(Events.METRIC_ADDED, onMetricAdded, instance); eventBus.on(Events.QUALITY_CHANGE_REQUESTED, onQualityChangeRequested, instance); @@ -280,10 +279,6 @@ function BolaRule(config) { } } - function onPeriodSwitchStarted() { - // TODO: does this have to be handled here? - } - function onMediaFragmentLoaded(e) { if (e && e.chunk && e.chunk.mediaInfo) { const bolaState = bolaStateDict[e.chunk.mediaInfo.type]; @@ -521,7 +516,6 @@ function BolaRule(config) { eventBus.off(Events.BUFFER_EMPTY, onBufferEmpty, instance); eventBus.off(Events.PLAYBACK_SEEKING, onPlaybackSeeking, instance); - eventBus.off(Events.PERIOD_SWITCH_STARTED, onPeriodSwitchStarted, instance); eventBus.off(Events.MEDIA_FRAGMENT_LOADED, onMediaFragmentLoaded, instance); eventBus.off(Events.METRIC_ADDED, onMetricAdded, instance); eventBus.off(Events.QUALITY_CHANGE_REQUESTED, onQualityChangeRequested, instance); diff --git a/src/streaming/rules/scheduling/BufferLevelRule.js b/src/streaming/rules/scheduling/BufferLevelRule.js index c61f408b70..a9de8d22c7 100644 --- a/src/streaming/rules/scheduling/BufferLevelRule.js +++ b/src/streaming/rules/scheduling/BufferLevelRule.js @@ -44,15 +44,15 @@ function BufferLevelRule(config) { function setup() { } - function execute(type, representationInfo, hasVideoTrack) { + function execute(type, representationInfo) { if (!type || !representationInfo) { return true; } const bufferLevel = dashMetrics.getCurrentBufferLevel(type); - return bufferLevel < getBufferTarget(type, representationInfo, hasVideoTrack); + return bufferLevel < getBufferTarget(type, representationInfo); } - function getBufferTarget(type, representationInfo, hasVideoTrack) { + function getBufferTarget(type, representationInfo) { let bufferTarget = NaN; if (!type || !representationInfo) { @@ -72,14 +72,7 @@ function BufferLevelRule(config) { } else { // text is disabled, rule will return false bufferTarget = 0; } - } else if (type === Constants.AUDIO && hasVideoTrack) { - const videoBufferLevel = dashMetrics.getCurrentBufferLevel(Constants.VIDEO); - if (isNaN(representationInfo.fragmentDuration)) { - bufferTarget = videoBufferLevel; - } else { - bufferTarget = Math.max(videoBufferLevel, representationInfo.fragmentDuration); - } - } else { + } else { const streamInfo = representationInfo.mediaInfo.streamInfo; if (abrController.isPlayingAtTopQuality(streamInfo)) { const isLongFormContent = streamInfo.manifestInfo.duration >= settings.get().streaming.longFormContentDurationThreshold; diff --git a/src/streaming/text/TextBufferController.js b/src/streaming/text/TextBufferController.js index 427354458a..d153e4db27 100644 --- a/src/streaming/text/TextBufferController.js +++ b/src/streaming/text/TextBufferController.js @@ -83,8 +83,8 @@ function TextBufferController(config) { return _BufferControllerImpl.initialize(source, StreamProcessor); } - function createBuffer(mediaInfoArr) { - return _BufferControllerImpl.createBuffer(mediaInfoArr); + function createBuffer(mediaInfoArr, previousBuffers) { + return _BufferControllerImpl.createBuffer(mediaInfoArr, previousBuffers); } function getType() { diff --git a/test/unit/streaming.rules.scheduling.BufferLevelRule.js b/test/unit/streaming.rules.scheduling.BufferLevelRule.js index 130a40c4bd..7270448b9a 100644 --- a/test/unit/streaming.rules.scheduling.BufferLevelRule.js +++ b/test/unit/streaming.rules.scheduling.BufferLevelRule.js @@ -61,7 +61,7 @@ describe('BufferLevelRule', function () { it('should return 15 (value returns by getCurrentBufferLevel of DashMetricsMock) if streamProcessor is defined and current representation is audio and videoTrackPresent is true', function () { const result = bufferLevelRule.getBufferTarget(testAudioType, representationInfo, true); - expect(result).to.be.equal(15); // jshint ignore:line + expect(result).to.be.equal(12); // jshint ignore:line }); it('should return 12 (DEFAULT_MIN_BUFFER_TIME of MediaPlayerModelMock) if streamProcessor is defined and current representation is audio and videoTrackPresent is false', function () {