Diff
Modified: trunk/LayoutTests/ChangeLog (202828 => 202829)
--- trunk/LayoutTests/ChangeLog 2016-07-05 20:05:02 UTC (rev 202828)
+++ trunk/LayoutTests/ChangeLog 2016-07-05 20:22:58 UTC (rev 202829)
@@ -1,3 +1,13 @@
+2016-07-05 Jer Noble <jer.no...@apple.com>
+
+ REGRESSION (r202641): Netflix playback stalls after a few seconds
+ https://bugs.webkit.org/show_bug.cgi?id=159365
+
+ Reviewed by Eric Carlson.
+
+ * media/media-source/media-source-small-gap-expected.txt: Added.
+ * media/media-source/media-source-small-gap.html: Added.
+
2016-07-05 Myles C. Maxfield <mmaxfi...@apple.com>
[Sierra] Rebaseline tests to use un-mocked system font metrics
Added: trunk/LayoutTests/media/media-source/media-source-small-gap-expected.txt (0 => 202829)
--- trunk/LayoutTests/media/media-source/media-source-small-gap-expected.txt (rev 0)
+++ trunk/LayoutTests/media/media-source/media-source-small-gap-expected.txt 2016-07-05 20:22:58 UTC (rev 202829)
@@ -0,0 +1,17 @@
+This tests the SourceBuffer.buffered() API. This test will add 8 samples with a small gap (0.01s) between each sample. The SourceBuffer should coalesce such small gaps to make a single contiguous range. The video element should also play continuously from the beginning to the end.
+
+RUN(video.src = ""
+EVENT(sourceopen)
+RUN(sourceBuffer = source.addSourceBuffer("video/mock; codecs=mock"))
+RUN(sourceBuffer.appendBuffer(initSegment))
+EVENT(updateend)
+RUN(sourceBuffer.appendBuffer(samples))
+EVENT(updateend)
+EXPECTED (sourceBuffer.buffered.length == '1') OK
+EXPECTED (sourceBuffer.buffered.start(0) == '0') OK
+EXPECTED (sourceBuffer.buffered.end(0) == '8') OK
+RUN(video.play())
+EVENT(ended)
+EXPECTED (video.currentTime == '8') OK
+END OF TEST
+
Added: trunk/LayoutTests/media/media-source/media-source-small-gap.html (0 => 202829)
--- trunk/LayoutTests/media/media-source/media-source-small-gap.html (rev 0)
+++ trunk/LayoutTests/media/media-source/media-source-small-gap.html 2016-07-05 20:22:58 UTC (rev 202829)
@@ -0,0 +1,65 @@
+<!DOCTYPE html>
+<html>
+<head>
+ <title>mock-media-source</title>
+ <script src=""
+ <script src=""
+ <script>
+ var source;
+ var sourceBuffer;
+ var initSegment;
+
+ if (window.internals)
+ internals.initializeMockMediaSource();
+
+ function runTest() {
+ findMediaElement();
+
+ source = new MediaSource();
+ waitForEventOn(source, 'sourceopen', sourceOpen);
+ run('video.src = ""
+ }
+
+ function sourceOpen() {
+ run('sourceBuffer = source.addSourceBuffer("video/mock; codecs=mock")');
+ waitForEventOn(sourceBuffer, 'updateend', loadSamples, false, true);
+ initSegment = makeAInit(8, [makeATrack(1, 'mock', TRACK_KIND.VIDEO)]);
+ run('sourceBuffer.appendBuffer(initSegment)');
+ }
+
+ function loadSamples() {
+ samples = concatenateSamples([
+ makeASample(0, 0, 0.99, 1, SAMPLE_FLAG.SYNC),
+ makeASample(1, 1, 0.99, 1, SAMPLE_FLAG.NONE),
+ makeASample(2, 2, 0.99, 1, SAMPLE_FLAG.NONE),
+ makeASample(3, 3, 0.99, 1, SAMPLE_FLAG.NONE),
+ makeASample(4, 4, 0.99, 1, SAMPLE_FLAG.SYNC),
+ makeASample(5, 5, 0.99, 1, SAMPLE_FLAG.NONE),
+ makeASample(6, 6, 0.99, 1, SAMPLE_FLAG.NONE),
+ makeASample(7, 7, 1, 1, SAMPLE_FLAG.NONE),
+ ]);
+ waitForEventOn(sourceBuffer, 'updateend', checkBuffered, false, true);
+ run('sourceBuffer.appendBuffer(samples)');
+ }
+
+ function checkBuffered() {
+ testExpected('sourceBuffer.buffered.length', 1);
+ testExpected('sourceBuffer.buffered.start(0)', 0);
+ testExpected('sourceBuffer.buffered.end(0)', 8);
+
+ run('video.play()');
+ waitForEvent('ended', ended);
+ }
+
+ function ended() {
+ testExpected('video.currentTime', 8);
+ endTest();
+ }
+
+ </script>
+</head>
+<body _onload_="runTest()">
+ <div>This tests the SourceBuffer.buffered() API. This test will add 8 samples with a small gap (0.01s) between each sample. The SourceBuffer should coalesce such small gaps to make a single contiguous range. The video element should also play continuously from the beginning to the end.</div>
+ <video></video>
+</body>
+</html>
Modified: trunk/Source/WTF/ChangeLog (202828 => 202829)
--- trunk/Source/WTF/ChangeLog 2016-07-05 20:05:02 UTC (rev 202828)
+++ trunk/Source/WTF/ChangeLog 2016-07-05 20:22:58 UTC (rev 202829)
@@ -1,3 +1,16 @@
+2016-07-01 Jer Noble <jer.no...@apple.com>
+
+ REGRESSION (r202641): Netflix playback stalls after a few seconds
+ https://bugs.webkit.org/show_bug.cgi?id=159365
+
+ Reviewed by Eric Carlson.
+
+ Add a isBetween() convenience method.
+
+ * wtf/MediaTime.cpp:
+ (WTF::MediaTime::isBetween):
+ * wtf/MediaTime.h:
+
2016-07-03 Per Arne Vollan <pvol...@apple.com>
[Win] DLLs are missing version information.
Modified: trunk/Source/WTF/wtf/MediaTime.cpp (202828 => 202829)
--- trunk/Source/WTF/wtf/MediaTime.cpp 2016-07-05 20:05:02 UTC (rev 202828)
+++ trunk/Source/WTF/wtf/MediaTime.cpp 2016-07-05 20:22:58 UTC (rev 202829)
@@ -433,6 +433,13 @@
return lhsFactor > rhsFactor ? GreaterThan : LessThan;
}
+bool MediaTime::isBetween(const MediaTime& a, const MediaTime& b) const
+{
+ if (a > b)
+ return *this > b && *this < a;
+ return *this > a && *this < b;
+}
+
const MediaTime& MediaTime::zeroTime()
{
static const MediaTime* time = new MediaTime(0, 1, Valid);
Modified: trunk/Source/WTF/wtf/MediaTime.h (202828 => 202829)
--- trunk/Source/WTF/wtf/MediaTime.h 2016-07-05 20:05:02 UTC (rev 202828)
+++ trunk/Source/WTF/wtf/MediaTime.h 2016-07-05 20:22:58 UTC (rev 202829)
@@ -88,6 +88,7 @@
} ComparisonFlags;
ComparisonFlags compare(const MediaTime& rhs) const;
+ bool isBetween(const MediaTime&, const MediaTime&) const;
bool isValid() const { return m_timeFlags & Valid; }
bool isInvalid() const { return !isValid(); }
Modified: trunk/Source/WebCore/ChangeLog (202828 => 202829)
--- trunk/Source/WebCore/ChangeLog 2016-07-05 20:05:02 UTC (rev 202828)
+++ trunk/Source/WebCore/ChangeLog 2016-07-05 20:22:58 UTC (rev 202829)
@@ -1,3 +1,25 @@
+2016-07-01 Jer Noble <jer.no...@apple.com>
+
+ REGRESSION (r202641): Netflix playback stalls after a few seconds
+ https://bugs.webkit.org/show_bug.cgi?id=159365
+
+ Reviewed by Eric Carlson.
+
+ Test: LayoutTests/media/media-source/media-source-small-gap.html
+
+ In r202641, we removed a "fudge factor" of 1 millisecond added onto the duration
+ of every sample for the purposes of calculating a SourceBuffer's buffered ranges.
+ Netflix (and likely other providers) have streams that have 1 "timeScale" gaps
+ between segments (e.g., 1/9000s, 1/3003s, etc.). Fill those gaps by looking for
+ the previous and next samples and extending the buffered range to cover the gaps
+ if they're short enough. We have to ensure that we correctly remove those extended
+ durations when we remove samples from the SourceBuffer as well.
+
+ * Modules/mediasource/SourceBuffer.cpp:
+ (WebCore::removeSamplesFromTrackBuffer):
+ (WebCore::SourceBuffer::removeCodedFrames):
+ (WebCore::SourceBuffer::sourceBufferPrivateDidReceiveSample):
+
2016-07-05 Brady Eidson <beid...@apple.com>
Database process crashes deleting a corrupt SQLite database file (null deref).
Modified: trunk/Source/WebCore/Modules/mediasource/SourceBuffer.cpp (202828 => 202829)
--- trunk/Source/WebCore/Modules/mediasource/SourceBuffer.cpp 2016-07-05 20:05:02 UTC (rev 202828)
+++ trunk/Source/WebCore/Modules/mediasource/SourceBuffer.cpp 2016-07-05 20:22:58 UTC (rev 202829)
@@ -652,7 +652,7 @@
return a.second->decodeTime() < b.second->decodeTime();
}
-static PassRefPtr<TimeRanges> removeSamplesFromTrackBuffer(const DecodeOrderSampleMap::MapType& samples, SourceBuffer::TrackBuffer& trackBuffer, const SourceBuffer* buffer, const char* logPrefix)
+static PlatformTimeRanges removeSamplesFromTrackBuffer(const DecodeOrderSampleMap::MapType& samples, SourceBuffer::TrackBuffer& trackBuffer, const SourceBuffer* buffer, const char* logPrefix)
{
#if !LOG_DISABLED
MediaTime earliestSample = MediaTime::positiveInfiniteTime();
@@ -663,7 +663,7 @@
UNUSED_PARAM(buffer);
#endif
- auto erasedRanges = TimeRanges::create();
+ PlatformTimeRanges erasedRanges;
for (auto sampleIt : samples) {
const DecodeOrderSampleMap::KeyType& decodeKey = sampleIt.first;
#if !LOG_DISABLED
@@ -681,7 +681,7 @@
auto startTime = sample->presentationTime();
auto endTime = startTime + sample->duration();
- erasedRanges->ranges().add(startTime, endTime);
+ erasedRanges.add(startTime, endTime);
#if !LOG_DISABLED
bytesRemoved += startBufferSize - trackBuffer.samples.sizeInBytes();
@@ -692,12 +692,40 @@
#endif
}
+ // Because we may have added artificial padding in the buffered ranges when adding samples, we may
+ // need to remove that padding when removing those same samples. Walk over the erased ranges looking
+ // for unbuffered areas and expand erasedRanges to encompass those areas.
+ PlatformTimeRanges additionalErasedRanges;
+ for (unsigned i = 0; i < erasedRanges.length(); ++i) {
+ auto erasedStart = erasedRanges.start(i);
+ auto erasedEnd = erasedRanges.end(i);
+ auto startIterator = trackBuffer.samples.presentationOrder().reverseFindSampleBeforePresentationTime(erasedStart);
+ if (startIterator == trackBuffer.samples.presentationOrder().rend())
+ additionalErasedRanges.add(MediaTime::zeroTime(), erasedStart);
+ else {
+ auto& previousSample = *startIterator->second;
+ if (previousSample.presentationTime() + previousSample.duration() < erasedStart)
+ additionalErasedRanges.add(previousSample.presentationTime() + previousSample.duration(), erasedStart);
+ }
+
+ auto endIterator = trackBuffer.samples.presentationOrder().findSampleOnOrAfterPresentationTime(erasedEnd);
+ if (endIterator == trackBuffer.samples.presentationOrder().end())
+ additionalErasedRanges.add(erasedEnd, MediaTime::positiveInfiniteTime());
+ else {
+ auto& nextSample = *endIterator->second;
+ if (nextSample.presentationTime() > erasedEnd)
+ additionalErasedRanges.add(erasedEnd, nextSample.presentationTime());
+ }
+ }
+ if (additionalErasedRanges.length())
+ erasedRanges.unionWith(additionalErasedRanges);
+
#if !LOG_DISABLED
if (bytesRemoved)
LOG(MediaSource, "SourceBuffer::%s(%p) removed %zu bytes, start(%lf), end(%lf)", logPrefix, buffer, bytesRemoved, earliestSample.toDouble(), latestSample.toDouble());
#endif
- return WTFMove(erasedRanges);
+ return erasedRanges;
}
void SourceBuffer::removeCodedFrames(const MediaTime& start, const MediaTime& end)
@@ -742,19 +770,19 @@
DecodeOrderSampleMap::iterator removeDecodeStart = trackBuffer.samples.decodeOrder().findSampleWithDecodeKey(decodeKey);
DecodeOrderSampleMap::MapType erasedSamples(removeDecodeStart, removeDecodeEnd);
- RefPtr<TimeRanges> erasedRanges = removeSamplesFromTrackBuffer(erasedSamples, trackBuffer, this, "removeCodedFrames");
+ PlatformTimeRanges erasedRanges = removeSamplesFromTrackBuffer(erasedSamples, trackBuffer, this, "removeCodedFrames");
// Only force the TrackBuffer to re-enqueue if the removed ranges overlap with enqueued and possibly
// not yet displayed samples.
if (currentMediaTime < trackBuffer.lastEnqueuedPresentationTime) {
PlatformTimeRanges possiblyEnqueuedRanges(currentMediaTime, trackBuffer.lastEnqueuedPresentationTime);
- possiblyEnqueuedRanges.intersectWith(erasedRanges->ranges());
+ possiblyEnqueuedRanges.intersectWith(erasedRanges);
if (possiblyEnqueuedRanges.length())
trackBuffer.needsReenqueueing = true;
}
- erasedRanges->invert();
- m_buffered->intersectWith(*erasedRanges);
+ erasedRanges.invert();
+ m_buffered->ranges().intersectWith(erasedRanges);
setBufferedDirty(true);
// 3.4 If this object is in activeSourceBuffers, the current playback position is greater than or equal to start
@@ -1532,7 +1560,7 @@
auto nextSyncIter = trackBuffer.samples.decodeOrder().findSyncSampleAfterDecodeIterator(lastDecodeIter);
dependentSamples.insert(firstDecodeIter, nextSyncIter);
- RefPtr<TimeRanges> erasedRanges = removeSamplesFromTrackBuffer(dependentSamples, trackBuffer, this, "sourceBufferPrivateDidReceiveSample");
+ PlatformTimeRanges erasedRanges = removeSamplesFromTrackBuffer(dependentSamples, trackBuffer, this, "sourceBufferPrivateDidReceiveSample");
// Only force the TrackBuffer to re-enqueue if the removed ranges overlap with enqueued and possibly
// not yet displayed samples.
@@ -1539,13 +1567,13 @@
MediaTime currentMediaTime = m_source->currentTime();
if (currentMediaTime < trackBuffer.lastEnqueuedPresentationTime) {
PlatformTimeRanges possiblyEnqueuedRanges(currentMediaTime, trackBuffer.lastEnqueuedPresentationTime);
- possiblyEnqueuedRanges.intersectWith(erasedRanges->ranges());
+ possiblyEnqueuedRanges.intersectWith(erasedRanges);
if (possiblyEnqueuedRanges.length())
trackBuffer.needsReenqueueing = true;
}
- erasedRanges->invert();
- m_buffered->intersectWith(*erasedRanges);
+ erasedRanges.invert();
+ m_buffered->ranges().intersectWith(erasedRanges);
setBufferedDirty(true);
}
@@ -1585,7 +1613,18 @@
if (m_shouldGenerateTimestamps)
m_timestampOffset = frameEndTimestamp;
- m_buffered->ranges().add(presentationTimestamp, presentationTimestamp + frameDuration);
+ // Eliminate small gaps between buffered ranges by coalescing
+ // disjoint ranges separated by less than a "fudge factor".
+ auto presentationEndTime = presentationTimestamp + frameDuration;
+ auto nearestToPresentationStartTime = m_buffered->ranges().nearest(presentationTimestamp);
+ if ((presentationTimestamp - nearestToPresentationStartTime).isBetween(MediaTime::zeroTime(), currentTimeFudgeFactor()))
+ presentationTimestamp = nearestToPresentationStartTime;
+
+ auto nearestToPresentationEndTime = m_buffered->ranges().nearest(presentationEndTime);
+ if ((nearestToPresentationEndTime - presentationEndTime).isBetween(MediaTime::zeroTime(), currentTimeFudgeFactor()))
+ presentationEndTime = nearestToPresentationEndTime;
+
+ m_buffered->ranges().add(presentationTimestamp, presentationEndTime);
m_bufferedSinceLastMonitor += frameDuration.toDouble();
setBufferedDirty(true);