Diff
Modified: trunk/LayoutTests/ChangeLog (254669 => 254670)
--- trunk/LayoutTests/ChangeLog 2020-01-16 10:32:35 UTC (rev 254669)
+++ trunk/LayoutTests/ChangeLog 2020-01-16 10:43:19 UTC (rev 254670)
@@ -1,3 +1,14 @@
+2020-01-16 Alicia Boya García <ab...@igalia.com>
+
+ [MSE] Don't enqueue samples that start at a big discontinuity
+ https://bugs.webkit.org/show_bug.cgi?id=201323
+
+ Reviewed by Xabier Rodriguez-Calvar.
+
+ * media/media-source/media-source-append-presentation-durations.html: Added.
+ * media/media-source/media-source-first-append-not-starting-at-zero.html: Added.
+ * media/media-source/media-source-remove-b-frame.html: Added.
+
2020-01-15 Lauro Moura <lmo...@igalia.com>
[GTK] Gardening tests using language override
Added: trunk/LayoutTests/media/media-source/media-source-append-presentation-durations-expected.txt (0 => 254670)
--- trunk/LayoutTests/media/media-source/media-source-append-presentation-durations-expected.txt (rev 0)
+++ trunk/LayoutTests/media/media-source/media-source-append-presentation-durations-expected.txt 2020-01-16 10:43:19 UTC (rev 254670)
@@ -0,0 +1,24 @@
+
+EXPECTED (source.readyState == 'closed') OK
+EVENT(sourceopen)
+RUN(sourceBuffer = source.addSourceBuffer("video/mock; codecs=mock"))
+RUN(sourceBuffer.appendBuffer(initSegment))
+EVENT(updateend)
+RUN(sourceBuffer.appendBuffer(doubleIbpSampleRun()))
+EVENT(updateend)
+Buffered:
+{PTS({0/10 = 0.000000}), DTS({0/10 = 0.000000}), duration({5/10 = 0.500000}), flags(1), generation(0)}
+{PTS({20/10 = 2.000000}), DTS({5/10 = 0.500000}), duration({5/10 = 0.500000}), flags(0), generation(0)}
+{PTS({10/10 = 1.000000}), DTS({10/10 = 1.000000}), duration({20/10 = 2.000000}), flags(0), generation(0)}
+{PTS({30/10 = 3.000000}), DTS({30/10 = 3.000000}), duration({5/10 = 0.500000}), flags(1), generation(0)}
+{PTS({50/10 = 5.000000}), DTS({35/10 = 3.500000}), duration({5/10 = 0.500000}), flags(0), generation(0)}
+{PTS({40/10 = 4.000000}), DTS({40/10 = 4.000000}), duration({20/10 = 2.000000}), flags(0), generation(0)}
+Enqueued:
+{PTS({0/10 = 0.000000}), DTS({0/10 = 0.000000}), duration({5/10 = 0.500000}), flags(1), generation(0)}
+{PTS({20/10 = 2.000000}), DTS({5/10 = 0.500000}), duration({5/10 = 0.500000}), flags(0), generation(0)}
+{PTS({10/10 = 1.000000}), DTS({10/10 = 1.000000}), duration({20/10 = 2.000000}), flags(0), generation(0)}
+{PTS({30/10 = 3.000000}), DTS({30/10 = 3.000000}), duration({5/10 = 0.500000}), flags(1), generation(0)}
+{PTS({50/10 = 5.000000}), DTS({35/10 = 3.500000}), duration({5/10 = 0.500000}), flags(0), generation(0)}
+{PTS({40/10 = 4.000000}), DTS({40/10 = 4.000000}), duration({20/10 = 2.000000}), flags(0), generation(0)}
+END OF TEST
+
Added: trunk/LayoutTests/media/media-source/media-source-append-presentation-durations.html (0 => 254670)
--- trunk/LayoutTests/media/media-source/media-source-append-presentation-durations.html (rev 0)
+++ trunk/LayoutTests/media/media-source/media-source-append-presentation-durations.html 2020-01-16 10:43:19 UTC (rev 254670)
@@ -0,0 +1,64 @@
+<!DOCTYPE html>
+<html>
+<head>
+ <title>media-source-append-acb-no-frame-lost</title>
+ <script src=""
+ <script src=""
+ <script>
+ var source;
+ var sourceBuffer;
+ var initSegment;
+
+ if (window.internals)
+ internals.initializeMockMediaSource();
+
+ function doubleIbpSampleRun() {
+ // IBP in presentation order, IPB in decode order.
+ // Some MP4 files do a trick where they play with the decode durations to have B-frames with the presentation
+ // timeline starting at zero without using edit lists.
+ // This test checks these kind of files are handled correctly.
+ return concatenateSamples([
+ makeASample( 0, 0, 5, 10, 1, SAMPLE_FLAG.SYNC), // I
+ makeASample(20, 5, 5, 10, 1, SAMPLE_FLAG.NONE), // P
+ makeASample(10, 10, 20, 10, 1, SAMPLE_FLAG.NONE), // B
+ makeASample(30, 30, 5, 10, 1, SAMPLE_FLAG.SYNC), // I
+ makeASample(50, 35, 5, 10, 1, SAMPLE_FLAG.NONE), // P
+ makeASample(40, 40, 20, 10, 1, SAMPLE_FLAG.NONE), // B
+ ]);
+ }
+
+ window.addEventListener('load', async () => {
+ findMediaElement();
+ source = new MediaSource();
+ testExpected('source.readyState', 'closed');
+ const sourceOpened = waitFor(source, 'sourceopen');
+
+ const videoSource = document.createElement('source');
+ videoSource.type = 'video/mock; codecs=mock';
+ videoSource.src = ""
+ video.appendChild(videoSource);
+
+ await sourceOpened;
+ run('sourceBuffer = source.addSourceBuffer("video/mock; codecs=mock")');
+
+ initSegment = makeAInit(20, [makeATrack(1, 'mock', TRACK_KIND.VIDEO)]);
+
+ run('sourceBuffer.appendBuffer(initSegment)');
+ await waitFor(sourceBuffer, 'updateend');
+
+ run('sourceBuffer.appendBuffer(doubleIbpSampleRun())');
+ await waitFor(sourceBuffer, 'updateend');
+
+ consoleWrite("Buffered:");
+ internals.bufferedSamplesForTrackID(sourceBuffer, 1).forEach(consoleWrite);
+ consoleWrite("Enqueued:");
+ internals.enqueuedSamplesForTrackID(sourceBuffer, 1).forEach(consoleWrite);
+
+ endTest();
+ });
+ </script>
+</head>
+<body>
+ <video controls></video>
+</body>
+</html>
\ No newline at end of file
Added: trunk/LayoutTests/media/media-source/media-source-first-append-not-starting-at-zero-expected.txt (0 => 254670)
--- trunk/LayoutTests/media/media-source/media-source-first-append-not-starting-at-zero-expected.txt (rev 0)
+++ trunk/LayoutTests/media/media-source/media-source-first-append-not-starting-at-zero-expected.txt 2020-01-16 10:43:19 UTC (rev 254670)
@@ -0,0 +1,24 @@
+
+EXPECTED (source.readyState == 'closed') OK
+EVENT(sourceopen)
+RUN(sourceBuffer = source.addSourceBuffer("video/mock; codecs=mock"))
+RUN(sourceBuffer.appendBuffer(initSegment))
+EVENT(updateend)
+RUN(sourceBuffer.appendBuffer(syncSampleRun(5, 10)))
+EVENT(updateend)
+Enqueued so far: (expecting no frames yet)
+RUN(sourceBuffer.appendBuffer(syncSampleRun(0, 5)))
+EVENT(updateend)
+Enqueued now:
+{PTS({0/1 = 0.000000}), DTS({0/1 = 0.000000}), duration({1/1 = 1.000000}), flags(1), generation(0)}
+{PTS({1/1 = 1.000000}), DTS({1/1 = 1.000000}), duration({1/1 = 1.000000}), flags(0), generation(0)}
+{PTS({2/1 = 2.000000}), DTS({2/1 = 2.000000}), duration({1/1 = 1.000000}), flags(0), generation(0)}
+{PTS({3/1 = 3.000000}), DTS({3/1 = 3.000000}), duration({1/1 = 1.000000}), flags(0), generation(0)}
+{PTS({4/1 = 4.000000}), DTS({4/1 = 4.000000}), duration({1/1 = 1.000000}), flags(0), generation(0)}
+{PTS({5/1 = 5.000000}), DTS({5/1 = 5.000000}), duration({1/1 = 1.000000}), flags(1), generation(0)}
+{PTS({6/1 = 6.000000}), DTS({6/1 = 6.000000}), duration({1/1 = 1.000000}), flags(0), generation(0)}
+{PTS({7/1 = 7.000000}), DTS({7/1 = 7.000000}), duration({1/1 = 1.000000}), flags(0), generation(0)}
+{PTS({8/1 = 8.000000}), DTS({8/1 = 8.000000}), duration({1/1 = 1.000000}), flags(0), generation(0)}
+{PTS({9/1 = 9.000000}), DTS({9/1 = 9.000000}), duration({1/1 = 1.000000}), flags(0), generation(0)}
+END OF TEST
+
Added: trunk/LayoutTests/media/media-source/media-source-first-append-not-starting-at-zero.html (0 => 254670)
--- trunk/LayoutTests/media/media-source/media-source-first-append-not-starting-at-zero.html (rev 0)
+++ trunk/LayoutTests/media/media-source/media-source-first-append-not-starting-at-zero.html 2020-01-16 10:43:19 UTC (rev 254670)
@@ -0,0 +1,60 @@
+<!DOCTYPE html>
+<html>
+<head>
+ <title>media-source-append-not-starting-at-zero</title>
+ <script src=""
+ <script src=""
+ <script>
+ var source;
+ var sourceBuffer;
+ var initSegment;
+
+ if (window.internals)
+ internals.initializeMockMediaSource();
+
+ function syncSampleRun(start, end) {
+ const samples = [];
+ for (let time = start; time < end; time++)
+ samples.push(makeASample(time, time, 1, 1, 1, time === start ? SAMPLE_FLAG.SYNC : SAMPLE_FLAG.NONE));
+ return concatenateSamples(samples);
+ }
+
+ window.addEventListener('load', async () => {
+ findMediaElement();
+ source = new MediaSource();
+ testExpected('source.readyState', 'closed');
+ const sourceOpened = waitFor(source, 'sourceopen');
+
+ const videoSource = document.createElement('source');
+ videoSource.type = 'video/mock; codecs=mock';
+ videoSource.src = ""
+ video.appendChild(videoSource);
+
+ await sourceOpened;
+ run('sourceBuffer = source.addSourceBuffer("video/mock; codecs=mock")');
+
+ initSegment = makeAInit(10, [makeATrack(1, 'mock', TRACK_KIND.VIDEO)]);
+
+ run('sourceBuffer.appendBuffer(initSegment)');
+ await waitFor(sourceBuffer, 'updateend');
+
+ run('sourceBuffer.appendBuffer(syncSampleRun(5, 10))');
+ await waitFor(sourceBuffer, 'updateend');
+
+ consoleWrite("Enqueued so far: (expecting no frames yet)");
+ internals.enqueuedSamplesForTrackID(sourceBuffer, 1).forEach(consoleWrite);
+
+ run('sourceBuffer.appendBuffer(syncSampleRun(0, 5))');
+ await waitFor(sourceBuffer, 'updateend');
+
+ consoleWrite("Enqueued now:");
+ internals.enqueuedSamplesForTrackID(sourceBuffer, 1).forEach(consoleWrite);
+
+ endTest();
+ });
+ </script>
+</head>
+<body>
+ <video controls></video>
+</body>
+</html>
\ No newline at end of file
Added: trunk/LayoutTests/media/media-source/media-source-remove-b-frame-expected.txt (0 => 254670)
--- trunk/LayoutTests/media/media-source/media-source-remove-b-frame-expected.txt (rev 0)
+++ trunk/LayoutTests/media/media-source/media-source-remove-b-frame-expected.txt 2020-01-16 10:43:19 UTC (rev 254670)
@@ -0,0 +1,24 @@
+
+EXPECTED (source.readyState == 'closed') OK
+EVENT(sourceopen)
+RUN(sourceBuffer = source.addSourceBuffer("video/mock; codecs=mock"))
+RUN(sourceBuffer.appendBuffer(initSegment))
+EVENT(updateend)
+RUN(sourceBuffer.appendBuffer(ibpSampleRun()))
+EVENT(updateend)
+{PTS({0/1 = 0.000000}), DTS({-1/1 = -1.000000}), duration({1/1 = 1.000000}), flags(1), generation(0)}
+{PTS({2/1 = 2.000000}), DTS({0/1 = 0.000000}), duration({1/1 = 1.000000}), flags(0), generation(0)}
+{PTS({1/1 = 1.000000}), DTS({1/1 = 1.000000}), duration({1/1 = 1.000000}), flags(0), generation(0)}
+RUN(sourceBuffer.remove(1, 2))
+EVENT(updateend)
+RUN(sourceBuffer.appendBuffer(new Uint8Array()))
+EVENT(updateend)
+The frame with presentation time [1, 2) should have been deleted from the buffered samples and a reenqueue must have happened. That frame should not have been enqueued.
+Buffered:
+{PTS({0/1 = 0.000000}), DTS({-1/1 = -1.000000}), duration({1/1 = 1.000000}), flags(1), generation(0)}
+{PTS({2/1 = 2.000000}), DTS({0/1 = 0.000000}), duration({1/1 = 1.000000}), flags(0), generation(0)}
+Enqueued:
+{PTS({0/1 = 0.000000}), DTS({-1/1 = -1.000000}), duration({1/1 = 1.000000}), flags(1), generation(0)}
+{PTS({2/1 = 2.000000}), DTS({0/1 = 0.000000}), duration({1/1 = 1.000000}), flags(0), generation(0)}
+END OF TEST
+
Added: trunk/LayoutTests/media/media-source/media-source-remove-b-frame.html (0 => 254670)
--- trunk/LayoutTests/media/media-source/media-source-remove-b-frame.html (rev 0)
+++ trunk/LayoutTests/media/media-source/media-source-remove-b-frame.html 2020-01-16 10:43:19 UTC (rev 254670)
@@ -0,0 +1,69 @@
+<!DOCTYPE html>
+<html>
+<head>
+ <title>media-source-append-remove-b-frame</title>
+ <script src=""
+ <script src=""
+ <script>
+ var source;
+ var sourceBuffer;
+ var initSegment;
+
+ if (window.internals)
+ internals.initializeMockMediaSource();
+
+ function ibpSampleRun() {
+ // IBP in presentation order, IPB in decode order.
+ return concatenateSamples([
+ makeASample(0, -1, 1, 1, 1, SAMPLE_FLAG.SYNC), // I
+ makeASample(2, 0, 1, 1, 1, SAMPLE_FLAG.NONE), // P
+ makeASample(1, 1, 1, 1, 1, SAMPLE_FLAG.NONE), // B
+ ]);
+ }
+
+ window.addEventListener('load', async () => {
+ findMediaElement();
+ source = new MediaSource();
+ testExpected('source.readyState', 'closed');
+ const sourceOpened = waitFor(source, 'sourceopen');
+
+ const videoSource = document.createElement('source');
+ videoSource.type = 'video/mock; codecs=mock';
+ videoSource.src = ""
+ video.appendChild(videoSource);
+
+ await sourceOpened;
+ run('sourceBuffer = source.addSourceBuffer("video/mock; codecs=mock")');
+
+ initSegment = makeAInit(10, [makeATrack(1, 'mock', TRACK_KIND.VIDEO)]);
+
+ run('sourceBuffer.appendBuffer(initSegment)');
+ await waitFor(sourceBuffer, 'updateend');
+
+ run('sourceBuffer.appendBuffer(ibpSampleRun())');
+ await waitFor(sourceBuffer, 'updateend');
+ internals.enqueuedSamplesForTrackID(sourceBuffer, 1).forEach(consoleWrite);
+
+ run('sourceBuffer.remove(1, 2)');
+ await waitFor(sourceBuffer, 'updateend');
+
+ // Even if .remove() sets trackBuffer.needsReenqueueing to true, reenqueueing will only happen after an append.
+ // So let's make an empty append.
+ run('sourceBuffer.appendBuffer(new Uint8Array())');
+ await waitFor(sourceBuffer, 'updateend');
+
+ consoleWrite("The frame with presentation time [1, 2) should have been deleted from the buffered samples and " +
+ "a reenqueue must have happened. That frame should not have been enqueued.");
+ consoleWrite("Buffered:");
+ internals.bufferedSamplesForTrackID(sourceBuffer, 1).forEach(consoleWrite);
+ consoleWrite("Enqueued:");
+ internals.enqueuedSamplesForTrackID(sourceBuffer, 1).forEach(consoleWrite);
+
+ endTest();
+ });
+ </script>
+</head>
+<body>
+ <video controls></video>
+</body>
+</html>
\ No newline at end of file
Modified: trunk/Source/WebCore/ChangeLog (254669 => 254670)
--- trunk/Source/WebCore/ChangeLog 2020-01-16 10:32:35 UTC (rev 254669)
+++ trunk/Source/WebCore/ChangeLog 2020-01-16 10:43:19 UTC (rev 254670)
@@ -1,3 +1,46 @@
+2020-01-16 Alicia Boya García <ab...@igalia.com>
+
+ [MSE] Don't enqueue samples that start at a big discontinuity
+ https://bugs.webkit.org/show_bug.cgi?id=201323
+
+ With the old logic SourceBuffer was enqueueing the first frame to be
+ appended in any circumstances. This was a bug because the user could
+ append first [5, 10) and then [0, 5). With the old behavior [5, 10)
+ would be enqueued first despite being clearly ahead of the initial
+ playback time (zero). By the time [0, 5) is enqueued it can't be
+ enqueued anymore because the decodeQueue is already ahead.
+
+ This patch fixes that logic to work when the first segments are
+ appended unordered. The test media-source-first-append-not-starting-at-zero.html
+ validates it.
+
+ The test media-source-append-presentation-durations.html checks the
+ new logic does not break in presence of presentation duration !=
+ decode duration.
+
+ As part of the same logic block, the lastEnqueuedPresentationTime was
+ used to decide when it's necessary to perform reenqueue after an
+ .erase() (it is necessary if any enqueued frames are replaced). Using
+ lastEnqueuedPresentationTime was not entirely accurate in presence of
+ B-frames, as you could erase a frame that has a presentation time
+ higher than the last enqueued one. That logic is replaced with a
+ monotonicly increasing highestEnqueuedPresentationTime and is tested
+ by media-source-remove-b-frame.html.
+
+ Reviewed by Xabier Rodriguez-Calvar.
+
+ Tests: media/media-source/media-source-append-presentation-durations.html
+ media/media-source/media-source-first-append-not-starting-at-zero.html
+ media/media-source/media-source-remove-b-frame.html
+
+ * Modules/mediasource/SourceBuffer.cpp:
+ (WebCore::SourceBuffer::TrackBuffer::TrackBuffer):
+ (WebCore::SourceBuffer::removeCodedFrames):
+ (WebCore::SourceBuffer::sourceBufferPrivateDidReceiveSample):
+ (WebCore::SourceBuffer::provideMediaData):
+ (WebCore::SourceBuffer::reenqueueMediaForTime):
+ (WebCore::SourceBuffer::TrackBuffer::lastEnqueuedDecodeDuration): Deleted.
+
2020-01-16 Cathie Chen <cathiec...@igalia.com>
Mapping HTML attributes width/height to the default aspect ratio of <img>
Modified: trunk/Source/WebCore/Modules/mediasource/SourceBuffer.cpp (254669 => 254670)
--- trunk/Source/WebCore/Modules/mediasource/SourceBuffer.cpp 2020-01-16 10:32:35 UTC (rev 254669)
+++ trunk/Source/WebCore/Modules/mediasource/SourceBuffer.cpp 2020-01-16 10:43:19 UTC (rev 254670)
@@ -66,15 +66,24 @@
static const double ExponentialMovingAverageCoefficient = 0.1;
+// Do not enqueue samples spanning a significant unbuffered gap.
+// NOTE: one second is somewhat arbitrary. MediaSource::monitorSourceBuffers() is run
+// on the playbackTimer, which is effectively every 350ms. Allowing > 350ms gap between
+// enqueued samples allows for situations where we overrun the end of a buffered range
+// but don't notice for 350s of playback time, and the client can enqueue data for the
+// new current time without triggering this early return.
+// FIXME(135867): Make this gap detection logic less arbitrary.
+static const MediaTime discontinuityTolerance = MediaTime(1, 1);
+
struct SourceBuffer::TrackBuffer {
MediaTime lastDecodeTimestamp;
MediaTime greatestDecodeDuration;
MediaTime lastFrameDuration;
MediaTime highestPresentationTimestamp;
- MediaTime lastEnqueuedPresentationTime;
+ MediaTime highestEnqueuedPresentationTime;
MediaTime minimumEnqueuedPresentationTime;
DecodeOrderSampleMap::KeyType lastEnqueuedDecodeKey;
- MediaTime lastEnqueuedDecodeDuration;
+ MediaTime enqueueDiscontinuityBoundary { MediaTime::zeroTime() };
MediaTime roundedTimestampOffset;
uint32_t lastFrameTimescale { 0 };
bool needRandomAccessFlag { true };
@@ -91,9 +100,9 @@
, greatestDecodeDuration(MediaTime::invalidTime())
, lastFrameDuration(MediaTime::invalidTime())
, highestPresentationTimestamp(MediaTime::invalidTime())
- , lastEnqueuedPresentationTime(MediaTime::invalidTime())
+ , highestEnqueuedPresentationTime(MediaTime::invalidTime())
, lastEnqueuedDecodeKey({MediaTime::invalidTime(), MediaTime::invalidTime()})
- , lastEnqueuedDecodeDuration(MediaTime::invalidTime())
+ , enqueueDiscontinuityBoundary(discontinuityTolerance)
{
}
};
@@ -864,8 +873,8 @@
// Only force the TrackBuffer to re-enqueue if the removed ranges overlap with enqueued and possibly
// not yet displayed samples.
- if (trackBuffer.lastEnqueuedPresentationTime.isValid() && currentMediaTime < trackBuffer.lastEnqueuedPresentationTime) {
- PlatformTimeRanges possiblyEnqueuedRanges(currentMediaTime, trackBuffer.lastEnqueuedPresentationTime);
+ if (trackBuffer.highestEnqueuedPresentationTime.isValid() && currentMediaTime < trackBuffer.highestEnqueuedPresentationTime) {
+ PlatformTimeRanges possiblyEnqueuedRanges(currentMediaTime, trackBuffer.highestEnqueuedPresentationTime);
possiblyEnqueuedRanges.intersectWith(erasedRanges);
if (possiblyEnqueuedRanges.length()) {
trackBuffer.needsReenqueueing = true;
@@ -1737,8 +1746,8 @@
// Only force the TrackBuffer to re-enqueue if the removed ranges overlap with enqueued and possibly
// not yet displayed samples.
MediaTime currentMediaTime = m_source->currentTime();
- if (trackBuffer.lastEnqueuedPresentationTime.isValid() && currentMediaTime < trackBuffer.lastEnqueuedPresentationTime) {
- PlatformTimeRanges possiblyEnqueuedRanges(currentMediaTime, trackBuffer.lastEnqueuedPresentationTime);
+ if (trackBuffer.highestEnqueuedPresentationTime.isValid() && currentMediaTime < trackBuffer.highestEnqueuedPresentationTime) {
+ PlatformTimeRanges possiblyEnqueuedRanges(currentMediaTime, trackBuffer.highestEnqueuedPresentationTime);
possiblyEnqueuedRanges.intersectWith(erasedRanges);
if (possiblyEnqueuedRanges.length())
trackBuffer.needsReenqueueing = true;
@@ -1760,11 +1769,15 @@
trackBuffer.samples.addSample(sample);
// Note: The terminology here is confusing: "enqueuing" means providing a frame to the inner media framework.
- // First, frames are inserted in the decode queue; later, at the end of the append all the frames in the decode
- // queue are "enqueued" (sent to the inner media framework) in `provideMediaData()`.
+ // First, frames are inserted in the decode queue; later, at the end of the append some of the frames in the
+ // decode may be "enqueued" (sent to the inner media framework) in `provideMediaData()`.
//
- // In order to check whether a frame should be added to the decode queue we check whether it starts after the
- // lastEnqueuedDecodeKey.
+ // In order to check whether a frame should be added to the decode queue we check that it does not precede any
+ // frame already enqueued.
+ //
+ // Note that adding a frame to the decode queue is no guarantee that it will be actually enqueued at that point.
+ // If the frame is after the discontinuity boundary, the enqueueing algorithm will hold it there until samples
+ // with earlier timestamps are enqueued. The decode queue is not FIFO, but rather an ordered map.
DecodeOrderSampleMap::KeyType decodeKey(sample.decodeTime(), sample.presentationTime());
if (trackBuffer.lastEnqueuedDecodeKey.first.isInvalid() || decodeKey > trackBuffer.lastEnqueuedDecodeKey) {
trackBuffer.decodeQueue.insert(DecodeOrderSampleMap::MapType::value_type(decodeKey, &sample));
@@ -2019,19 +2032,8 @@
// rather than when all samples have been enqueued.
auto sample = trackBuffer.decodeQueue.begin()->second;
- // Do not enqueue samples spanning a significant unbuffered gap.
- // NOTE: one second is somewhat arbitrary. MediaSource::monitorSourceBuffers() is run
- // on the playbackTimer, which is effectively every 350ms. Allowing > 350ms gap between
- // enqueued samples allows for situations where we overrun the end of a buffered range
- // but don't notice for 350s of playback time, and the client can enqueue data for the
- // new current time without triggering this early return.
- // FIXME(135867): Make this gap detection logic less arbitrary.
- MediaTime oneSecond(1, 1);
- if (trackBuffer.lastEnqueuedDecodeKey.first.isValid()
- && trackBuffer.lastEnqueuedDecodeDuration.isValid()
- && sample->decodeTime() - trackBuffer.lastEnqueuedDecodeKey.first > oneSecond + trackBuffer.lastEnqueuedDecodeDuration) {
-
- DEBUG_LOG(LOGIDENTIFIER, "bailing early because of unbuffered gap, new sample: ", sample->decodeTime(), ", last enqueued sample ends: ", trackBuffer.lastEnqueuedDecodeKey.first + trackBuffer.lastEnqueuedDecodeDuration);
+ if (sample->decodeTime() > trackBuffer.enqueueDiscontinuityBoundary) {
+ DEBUG_LOG(LOGIDENTIFIER, "bailing early because of unbuffered gap, new sample: ", sample->decodeTime(), " >= the current discontinuity boundary: ", trackBuffer.enqueueDiscontinuityBoundary);
break;
}
@@ -2038,9 +2040,13 @@
// Remove the sample from the decode queue now.
trackBuffer.decodeQueue.erase(trackBuffer.decodeQueue.begin());
- trackBuffer.lastEnqueuedPresentationTime = sample->presentationTime();
+ MediaTime samplePresentationEnd = sample->presentationTime() + sample->duration();
+ if (trackBuffer.highestEnqueuedPresentationTime.isInvalid() || samplePresentationEnd > trackBuffer.highestEnqueuedPresentationTime)
+ trackBuffer.highestEnqueuedPresentationTime = samplePresentationEnd;
+
trackBuffer.lastEnqueuedDecodeKey = {sample->decodeTime(), sample->presentationTime()};
- trackBuffer.lastEnqueuedDecodeDuration = sample->duration();
+ trackBuffer.enqueueDiscontinuityBoundary = sample->decodeTime() + sample->duration() + discontinuityTolerance;
+
m_private->enqueueSample(sample.releaseNonNull(), trackID);
#if !RELEASE_LOG_DISABLED
++enqueuedSamples;
@@ -2110,6 +2116,10 @@
m_private->flush(trackID);
trackBuffer.decodeQueue.clear();
+ trackBuffer.highestEnqueuedPresentationTime = MediaTime::invalidTime();
+ trackBuffer.lastEnqueuedDecodeKey = {MediaTime::invalidTime(), MediaTime::invalidTime()};
+ trackBuffer.enqueueDiscontinuityBoundary = time + discontinuityTolerance;
+
// Find the sample which contains the current presentation time.
auto currentSamplePTSIterator = trackBuffer.samples.presentationOrder().findSampleContainingPresentationTime(time);
@@ -2137,19 +2147,6 @@
trackBuffer.decodeQueue.insert(DecodeOrderSampleMap::MapType::value_type(decodeKey, WTFMove(copy)));
}
- if (!trackBuffer.decodeQueue.empty()) {
- auto lastSampleIter = trackBuffer.decodeQueue.rbegin();
- auto lastSampleDecodeKey = lastSampleIter->first;
- auto lastSampleDuration = lastSampleIter->second->duration();
- trackBuffer.lastEnqueuedPresentationTime = lastSampleDecodeKey.second;
- trackBuffer.lastEnqueuedDecodeKey = lastSampleDecodeKey;
- trackBuffer.lastEnqueuedDecodeDuration = lastSampleDuration;
- } else {
- trackBuffer.lastEnqueuedPresentationTime = MediaTime::invalidTime();
- trackBuffer.lastEnqueuedDecodeKey = {MediaTime::invalidTime(), MediaTime::invalidTime()};
- trackBuffer.lastEnqueuedDecodeDuration = MediaTime::invalidTime();
- }
-
// Fill the decode queue with the remaining samples.
for (auto iter = currentSampleDTSIterator; iter != trackBuffer.samples.decodeOrder().end(); ++iter)
trackBuffer.decodeQueue.insert(*iter);