Diff
Modified: trunk/LayoutTests/ChangeLog (178437 => 178438)
--- trunk/LayoutTests/ChangeLog 2015-01-14 20:36:01 UTC (rev 178437)
+++ trunk/LayoutTests/ChangeLog 2015-01-14 20:36:25 UTC (rev 178438)
@@ -1,3 +1,18 @@
+2015-01-14 Bartlomiej Gajda <b.ga...@samsung.com>
+
+ [MSE] Implement per TrackBuffer buffered.
+ https://bugs.webkit.org/show_bug.cgi?id=139813.
+
+ Reviewed by Jer Noble.
+
+ Added tests which checks how buffered behaves for two trackBuffers in single SourceBuffer
+ after appends, and with MediaSource readyState switching to ended and back to open.
+
+ * media/media-source/media-source-multiple-trackBuffers-buffered-expected.txt: Added.
+ * media/media-source/media-source-multiple-trackBuffers-buffered-in-ended-state-expected.txt: Added.
+ * media/media-source/media-source-multiple-trackBuffers-buffered-in-ended-state.html: Added.
+ * media/media-source/media-source-multiple-trackBuffers-buffered.html: Added.
+
2015-01-14 Commit Queue <commit-qu...@webkit.org>
Unreviewed, rolling out r178432.
Added: trunk/LayoutTests/media/media-source/media-source-multiple-trackBuffers-buffered-expected.txt (0 => 178438)
--- trunk/LayoutTests/media/media-source/media-source-multiple-trackBuffers-buffered-expected.txt (rev 0)
+++ trunk/LayoutTests/media/media-source/media-source-multiple-trackBuffers-buffered-expected.txt 2015-01-14 20:36:25 UTC (rev 178438)
@@ -0,0 +1,21 @@
+
+RUN(video.src = ""
+RUN(sourceBuffer.appendBuffer(init);)
+EVENT(updateend)
+RUN(sourceBuffer.appendBuffer(samples))
+EVENT(updateend)
+EXPECTED (sourceBuffer.buffered.length == '0') OK
+RUN(sourceBuffer.appendBuffer(samples))
+EVENT(updateend)
+EXPECTED (sourceBuffer.buffered.length == '1') OK
+EXPECTED (sourceBuffer.buffered.end(0).toFixed(3) == '3') OK
+RUN(sourceBuffer.appendBuffer(samples))
+EVENT(updateend)
+EXPECTED (sourceBuffer.buffered.length == '1') OK
+EXPECTED (sourceBuffer.buffered.end(0).toFixed(3) == '6') OK
+RUN(sourceBuffer.remove(0, 2.5))
+EVENT(updateend)
+EXPECTED (sourceBuffer.buffered.length == '1') OK
+EXPECTED (sourceBuffer.buffered.start(0).toFixed(3) == '3') OK
+END OF TEST
+
Added: trunk/LayoutTests/media/media-source/media-source-multiple-trackBuffers-buffered-in-ended-state-expected.txt (0 => 178438)
--- trunk/LayoutTests/media/media-source/media-source-multiple-trackBuffers-buffered-in-ended-state-expected.txt (rev 0)
+++ trunk/LayoutTests/media/media-source/media-source-multiple-trackBuffers-buffered-in-ended-state-expected.txt 2015-01-14 20:36:25 UTC (rev 178438)
@@ -0,0 +1,28 @@
+
+RUN(video.src = ""
+EXPECTED (sourceBuffer.updating == 'false') OK
+RUN(sourceBuffer.appendBuffer(init);)
+EVENT(updateend)
+RUN(sourceBuffer.appendBuffer(samples))
+EVENT(updateend)
+EXPECTED (sourceBuffer.buffered.length == '0') OK
+RUN(sourceBuffer.appendBuffer(samples))
+EVENT(updateend)
+EXPECTED (sourceBuffer.buffered.length == '1') OK
+EXPECTED (sourceBuffer.buffered.start(0).toFixed(3) == '3') OK
+EXPECTED (sourceBuffer.buffered.end(0).toFixed(3) == '4') OK
+RUN(source.endOfStream())
+EVENT(sourceended)
+EXPECTED (source.readyState == 'ended') OK
+EXPECTED (sourceBuffer.buffered.length == '2') OK
+EXPECTED (sourceBuffer.buffered.start(1).toFixed(3) == '7') OK
+EXPECTED (sourceBuffer.buffered.end(1).toFixed(3) == '10') OK
+EVENT(updateend)
+EXPECTED (source.readyState == 'ended') OK
+RUN(sourceBuffer.timestampOffset = 0;)
+EXPECTED (source.readyState == 'open') OK
+EXPECTED (sourceBuffer.buffered.length == '1') OK
+EXPECTED (sourceBuffer.buffered.start(0).toFixed(3) == '3') OK
+EXPECTED (sourceBuffer.buffered.end(0).toFixed(3) == '4') OK
+END OF TEST
+
Added: trunk/LayoutTests/media/media-source/media-source-multiple-trackBuffers-buffered-in-ended-state.html (0 => 178438)
--- trunk/LayoutTests/media/media-source/media-source-multiple-trackBuffers-buffered-in-ended-state.html (rev 0)
+++ trunk/LayoutTests/media/media-source/media-source-multiple-trackBuffers-buffered-in-ended-state.html 2015-01-14 20:36:25 UTC (rev 178438)
@@ -0,0 +1,95 @@
+<!DOCTYPE html>
+<html>
+<head>
+ <title>mock-media-source</title>
+ <script src=""
+ <script src=""
+ <script>
+ var source;
+ var sourceBuffer;
+ var init;
+ var samples;
+
+ if (window.internals)
+ internals.initializeMockMediaSource();
+
+ function runTest() {
+ findMediaElement();
+ source = new MediaSource();
+ source.addEventListener('sourceopen', startLoad);
+ run('video.src = ""
+ }
+
+ function startLoad() {
+ sourceBuffer = source.addSourceBuffer('video/mock; codecs="mock"');
+ waitForEventOn(sourceBuffer, 'updateend', loadSamples1, false, true);
+
+ // Make an init segment with 1 video track and 1 audio track
+ init = makeAInit(0, [
+ makeATrack(1, 'mock', TRACK_KIND.VIDEO),
+ makeATrack(2, 'mock', TRACK_KIND.AUDIO),
+ ]);
+ testExpected('sourceBuffer.updating', false);
+ run('sourceBuffer.appendBuffer(init);');
+ }
+
+ function loadSamples1() {
+ samples = concatenateSamples([
+ makeASample(3, 3, 1, 1, SAMPLE_FLAG.SYNC),
+ makeASample(4, 4, 1, 1, SAMPLE_FLAG.NONE),
+ makeASample(5, 5, 1, 1, SAMPLE_FLAG.NONE),
+ ]);
+ waitForEventOn(sourceBuffer, 'updateend', loadSamples2, false, true);
+ run('sourceBuffer.appendBuffer(samples)');
+ }
+
+ function loadSamples2() {
+ testExpected('sourceBuffer.buffered.length', 0);
+
+ samples = concatenateSamples([
+ makeASample(0, 0, 1, 2, SAMPLE_FLAG.SYNC),
+ makeASample(1, 1, 1, 2, SAMPLE_FLAG.SYNC),
+ makeASample(2, 2, 1, 2, SAMPLE_FLAG.SYNC),
+ makeASample(3, 3, 1, 2, SAMPLE_FLAG.SYNC),
+
+ makeASample(7, 7, 1, 2, SAMPLE_FLAG.SYNC),
+ makeASample(8, 8, 1, 2, SAMPLE_FLAG.SYNC),
+ makeASample(9, 9, 1, 2, SAMPLE_FLAG.SYNC),
+ ]);
+ waitForEventOn(sourceBuffer, 'updateend', loadSamples3, false, true);
+ run('sourceBuffer.appendBuffer(samples)');
+ }
+
+ function loadSamples3() {
+ testExpected('sourceBuffer.buffered.length', 1);
+ testExpected('sourceBuffer.buffered.start(0).toFixed(3)', 3);
+ testExpected('sourceBuffer.buffered.end(0).toFixed(3)', 4);
+
+ waitForEventOn(source, 'sourceended', ended, false, true);
+ waitForEventOn(sourceBuffer, 'updateend', finishTest, false, true);
+ run('source.endOfStream()');
+ }
+
+ function ended() {
+ testExpected('source.readyState', "ended");
+ testExpected('sourceBuffer.buffered.length', 2);
+ testExpected('sourceBuffer.buffered.start(1).toFixed(3)', 7);
+ testExpected('sourceBuffer.buffered.end(1).toFixed(3)', 10);
+ }
+
+ function finishTest() {
+ testExpected('source.readyState', "ended");
+ run('sourceBuffer.timestampOffset = 0;'); // this opens MediaSource
+ testExpected('source.readyState', "open");
+ testExpected('sourceBuffer.buffered.length', 1);
+ testExpected('sourceBuffer.buffered.start(0).toFixed(3)', 3);
+ testExpected('sourceBuffer.buffered.end(0).toFixed(3)', 4);
+ endTest();
+ }
+
+ </script>
+</head>
+<body _onload_="runTest()">
+ <video></video>
+</body>
+</html>
Added: trunk/LayoutTests/media/media-source/media-source-multiple-trackBuffers-buffered.html (0 => 178438)
--- trunk/LayoutTests/media/media-source/media-source-multiple-trackBuffers-buffered.html (rev 0)
+++ trunk/LayoutTests/media/media-source/media-source-multiple-trackBuffers-buffered.html 2015-01-14 20:36:25 UTC (rev 178438)
@@ -0,0 +1,97 @@
+<!DOCTYPE html>
+<html>
+<head>
+ <title>mock-media-source</title>
+ <script src=""
+ <script src=""
+ <script>
+ var source;
+ var sourceBuffer;
+ var init;
+ var samples;
+
+ if (window.internals)
+ internals.initializeMockMediaSource();
+
+ function runTest() {
+ findMediaElement();
+ source = new MediaSource();
+ source.addEventListener('sourceopen', startLoad);
+ run('video.src = ""
+ }
+
+ function startLoad() {
+ sourceBuffer = source.addSourceBuffer('video/mock; codecs="mock"');
+ waitForEventOn(sourceBuffer, 'updateend', loadSamples1, false, true);
+
+ // Make an init segment with 1 video track and 1 audio track
+ init = makeAInit(0, [
+ makeATrack(1, 'mock', TRACK_KIND.VIDEO),
+ makeATrack(2, 'mock', TRACK_KIND.AUDIO),
+ ]);
+ run('sourceBuffer.appendBuffer(init);');
+ }
+
+ function loadSamples1() {
+ samples = concatenateSamples([
+ makeASample(0, 0, 1, 1, SAMPLE_FLAG.SYNC),
+ makeASample(1, 1, 1, 1, SAMPLE_FLAG.NONE),
+ makeASample(2, 2, 1, 1, SAMPLE_FLAG.NONE),
+ makeASample(3, 3, 1, 1, SAMPLE_FLAG.SYNC),
+ makeASample(4, 4, 1, 1, SAMPLE_FLAG.NONE),
+ makeASample(5, 5, 1, 1, SAMPLE_FLAG.NONE),
+ ]);
+ waitForEventOn(sourceBuffer, 'updateend', loadSamples2, false, true);
+ run('sourceBuffer.appendBuffer(samples)');
+ }
+
+ function loadSamples2() {
+ testExpected('sourceBuffer.buffered.length', 0);
+
+ samples = concatenateSamples([
+ makeASample(0, 0, 1, 2, SAMPLE_FLAG.SYNC),
+ makeASample(1, 1, 1, 2, SAMPLE_FLAG.SYNC),
+ makeASample(2, 2, 1, 2, SAMPLE_FLAG.SYNC),
+ ]);
+ waitForEventOn(sourceBuffer, 'updateend', loadSamples3, false, true);
+ run('sourceBuffer.appendBuffer(samples)');
+ }
+
+ function loadSamples3() {
+ testExpected('sourceBuffer.buffered.length', 1);
+ testExpected('sourceBuffer.buffered.end(0).toFixed(3)', 3);
+
+ samples = concatenateSamples([
+ makeASample(3, 3, 1, 2, SAMPLE_FLAG.SYNC),
+ makeASample(4, 4, 1, 2, SAMPLE_FLAG.SYNC),
+ makeASample(5, 5, 1, 2, SAMPLE_FLAG.SYNC),
+ makeASample(6, 6, 1, 2, SAMPLE_FLAG.SYNC),
+ makeASample(7, 7, 1, 2, SAMPLE_FLAG.SYNC),
+ makeASample(8, 8, 1, 2, SAMPLE_FLAG.SYNC),
+
+ ]);
+ waitForEventOn(sourceBuffer, 'updateend', removeSamples1, false, true);
+ run('sourceBuffer.appendBuffer(samples)');
+ }
+
+
+ function removeSamples1() {
+ testExpected('sourceBuffer.buffered.length', 1);
+ testExpected('sourceBuffer.buffered.end(0).toFixed(3)', 6);
+
+ waitForEventOn(sourceBuffer, 'updateend', doEnd, false, true);
+ run('sourceBuffer.remove(0, 2.5)');
+ }
+
+ function doEnd() {
+ testExpected('sourceBuffer.buffered.length', 1);
+ testExpected('sourceBuffer.buffered.start(0).toFixed(3)', 3);
+ endTest();
+ }
+
+ </script>
+</head>
+<body _onload_="runTest()">
+ <video></video>
+</body>
+</html>
Modified: trunk/Source/WebCore/ChangeLog (178437 => 178438)
--- trunk/Source/WebCore/ChangeLog 2015-01-14 20:36:01 UTC (rev 178437)
+++ trunk/Source/WebCore/ChangeLog 2015-01-14 20:36:25 UTC (rev 178438)
@@ -1,3 +1,33 @@
+2015-01-14 Bartlomiej Gajda <b.ga...@samsung.com>
+
+ [MSE] Implement per TrackBuffer buffered.
+ https://bugs.webkit.org/show_bug.cgi?id=139813.
+
+ Reviewed by Jer Noble.
+
+ Added implementation of SourceBuffer buffered() algorithm as per specification,
+ and caching mechanism called when samples are added, removed, or MediaSource
+ readyState is changed.
+
+ Tests: media/media-source/media-source-multiple-trackBuffers-buffered-in-ended-state.html
+ media/media-source/media-source-multiple-trackBuffers-buffered.html
+
+ * Modules/mediasource/MediaSource.cpp:
+ (WebCore::MediaSource::setReadyState):
+ * Modules/mediasource/SourceBuffer.cpp:
+ (WebCore::SourceBuffer::TrackBuffer::TrackBuffer):
+ (WebCore::SourceBuffer::SourceBuffer):
+ (WebCore::SourceBuffer::buffered):
+ (WebCore::SourceBuffer::invalidateBuffered):
+ (WebCore::SourceBuffer::recalculateBuffered):
+ (WebCore::SourceBuffer::sourceBufferPrivateAppendComplete):
+ (WebCore::SourceBuffer::removeCodedFrames):
+ (WebCore::SourceBuffer::evictCodedFrames):
+ (WebCore::SourceBuffer::sourceBufferPrivateDidReceiveSample):
+ (WebCore::SourceBuffer::bufferedAccountingForEndOfStream):
+ (WebCore::SourceBuffer::hasCurrentTime):
+ * Modules/mediasource/SourceBuffer.h:
+
2015-01-14 Benjamin Poulain <benja...@webkit.org>
Do not create new set for every sub-operation when converting a NFA to DFA
Modified: trunk/Source/WebCore/Modules/mediasource/MediaSource.cpp (178437 => 178438)
--- trunk/Source/WebCore/Modules/mediasource/MediaSource.cpp 2015-01-14 20:36:01 UTC (rev 178437)
+++ trunk/Source/WebCore/Modules/mediasource/MediaSource.cpp 2015-01-14 20:36:25 UTC (rev 178438)
@@ -405,6 +405,9 @@
m_readyState = state;
onReadyStateChange(oldState, state);
+
+ for (auto& sourceBuffer : *m_sourceBuffers)
+ sourceBuffer->invalidateBuffered();
}
static bool SourceBufferIsUpdating(RefPtr<SourceBuffer>& sourceBuffer)
Modified: trunk/Source/WebCore/Modules/mediasource/SourceBuffer.cpp (178437 => 178438)
--- trunk/Source/WebCore/Modules/mediasource/SourceBuffer.cpp 2015-01-14 20:36:01 UTC (rev 178437)
+++ trunk/Source/WebCore/Modules/mediasource/SourceBuffer.cpp 2015-01-14 20:36:25 UTC (rev 178438)
@@ -89,6 +89,7 @@
MediaTime highestPresentationTimestamp;
MediaTime lastEnqueuedPresentationTime;
MediaTime lastEnqueuedDecodeEndTime;
+ RefPtr<TimeRanges> m_buffered;
bool needRandomAccessFlag;
bool enabled;
bool needsReenqueueing;
@@ -102,6 +103,7 @@
, highestPresentationTimestamp(MediaTime::invalidTime())
, lastEnqueuedPresentationTime(MediaTime::invalidTime())
, lastEnqueuedDecodeEndTime(MediaTime::invalidTime())
+ , m_buffered(TimeRanges::create())
, needRandomAccessFlag(true)
, enabled(false)
, needsReenqueueing(false)
@@ -141,6 +143,7 @@
, m_active(false)
, m_bufferFull(false)
, m_shouldGenerateTimestamps(false)
+ , m_shouldRecalculateBuffered(false)
{
ASSERT(m_source);
@@ -165,15 +168,68 @@
return nullptr;
}
- // 2. Return a new static normalized TimeRanges object for the media segments buffered.
+ // Note: Steps 2-4 are handled by recalculateBuffered
+ if (m_shouldRecalculateBuffered)
+ recalculateBuffered();
+
+ // 5. Return the intersection ranges.
return m_buffered->copy();
}
const RefPtr<TimeRanges>& SourceBuffer::buffered() const
{
+ if (m_shouldRecalculateBuffered)
+ recalculateBuffered();
+
return m_buffered;
}
+void SourceBuffer::invalidateBuffered()
+{
+ m_shouldRecalculateBuffered = true;
+ // FIXME: for caching buffered in MediaSource should add here :
+ // m_source->invalidateBuffered();
+}
+
+void SourceBuffer::recalculateBuffered() const
+{
+ // Section 3.1 buffered attribute steps.
+ m_shouldRecalculateBuffered = false;
+
+ // 2. Let highest end time be the largest track buffer ranges end time across all the track buffers managed by this SourceBuffer object.
+ MediaTime highestEndTime = MediaTime::zeroTime();
+ for (auto& trackBuffer : m_trackBufferMap.values()) {
+ PlatformTimeRanges& trackRanges = trackBuffer.m_buffered->ranges();
+
+ if (trackRanges.length())
+ highestEndTime = std::max(highestEndTime, trackRanges.maximumBufferedTime());
+ }
+
+ // Return an empty range if all ranges are empty.
+ if (!highestEndTime) {
+ m_buffered = TimeRanges::create();
+ return;
+ }
+
+ // 3. Let intersection ranges equal a TimeRange object containing a single range from 0 to highest end time.
+ PlatformTimeRanges intersectionRanges(MediaTime::zeroTime(), highestEndTime);
+
+ // 4. For each track buffer managed by this SourceBuffer, run the following steps:
+ bool ended = m_source->readyState() == MediaSource::endedKeyword();
+ for (auto& trackBuffer : m_trackBufferMap.values()) {
+ // 4.1 Let track ranges equal the track buffer ranges for the current track buffer.
+ PlatformTimeRanges trackRanges = trackBuffer.m_buffered->ranges();
+ // 4.2 If readyState is "ended", then set the end time on the last range in track ranges to highest end time.
+ if (ended)
+ trackRanges.add(trackRanges.maximumBufferedTime(), highestEndTime);
+ // 4.3 Let new intersection ranges equal the intersection between the intersection ranges and the track ranges.
+ // 4.4 Replace the ranges in intersection ranges with the new intersection ranges.
+ intersectionRanges.intersectWith(trackRanges);
+ }
+
+ m_buffered = TimeRanges::create(intersectionRanges);
+}
+
double SourceBuffer::timestampOffset() const
{
return m_timestampOffset.toDouble();
@@ -612,6 +668,8 @@
// 7. Need more data: Return control to the calling algorithm.
+ invalidateBuffered();
+
// NOTE: return to Section 3.5.5
// 2.If the segment parser loop algorithm in the previous step was aborted, then abort this algorithm.
if (result != AppendSucceeded)
@@ -645,7 +703,7 @@
if (extraMemoryCost() > this->maximumBufferSize())
m_bufferFull = true;
- LOG(Media, "SourceBuffer::sourceBufferPrivateAppendComplete(%p) - buffered = %s", this, toString(m_buffered->ranges()).utf8().data());
+ LOG(Media, "SourceBuffer::sourceBufferPrivateAppendComplete(%p) - buffered = %s", this, toString(buffered()->ranges()).utf8().data());
}
void SourceBuffer::sourceBufferPrivateDidReceiveRenderingError(SourceBufferPrivate*, int error)
@@ -768,7 +826,7 @@
}
erasedRanges->invert();
- m_buffered->intersectWith(*erasedRanges);
+ trackBuffer.m_buffered->intersectWith(*erasedRanges);
// 3.4 If this object is in activeSourceBuffers, the current playback position is greater than or equal to start
// and less than the remove end timestamp, and HTMLMediaElement.readyState is greater than HAVE_METADATA, then set
@@ -777,10 +835,11 @@
m_private->setReadyState(MediaPlayer::HaveMetadata);
}
+ invalidateBuffered();
// 4. If buffer full flag equals true and this object is ready to accept more bytes, then set the buffer full flag to false.
// No-op
- LOG(Media, "SourceBuffer::removeCodedFrames(%p) - buffered = %s", this, toString(m_buffered->ranges()).utf8().data());
+ LOG(Media, "SourceBuffer::removeCodedFrames(%p) - buffered = %s", this, toString(buffered()->ranges()).utf8().data());
}
void SourceBuffer::removeTimerFired()
@@ -860,9 +919,9 @@
// If there still isn't enough free space and there buffers in time ranges after the current range (ie. there is a gap after
// the current buffered range), delete 30 seconds at a time from duration back to the current time range or 30 seconds after
// currenTime whichever we hit first.
- auto buffered = m_buffered->ranges();
- size_t currentTimeRange = buffered.find(currentTime);
- if (currentTimeRange == notFound || currentTimeRange == buffered.length() - 1) {
+ auto bufferedRegion = buffered()->ranges();
+ size_t currentTimeRange = bufferedRegion.find(currentTime);
+ if (currentTimeRange == notFound || currentTimeRange == bufferedRegion.length() - 1) {
LOG(MediaSource, "SourceBuffer::evictCodedFrames(%p) - evicted %zu bytes but FAILED to free enough", this, initialBufferedSize - extraMemoryCost());
return;
}
@@ -874,13 +933,13 @@
while (rangeStart > minimumRangeStart) {
// Do not evict data from the time range that contains currentTime.
- size_t startTimeRange = buffered.find(rangeStart);
+ size_t startTimeRange = bufferedRegion.find(rangeStart);
if (startTimeRange == currentTimeRange) {
- size_t endTimeRange = buffered.find(rangeEnd);
+ size_t endTimeRange = bufferedRegion.find(rangeEnd);
if (endTimeRange == currentTimeRange)
break;
- rangeEnd = buffered.start(endTimeRange);
+ rangeEnd = bufferedRegion.start(endTimeRange);
}
// 4. For each range in removal ranges, run the coded frame removal algorithm with start and
@@ -1480,14 +1539,14 @@
// NOTE: Searching from the end of the trackBuffer will be vastly more efficient if the search range is
// near the end of the buffered range. Use a linear-backwards search if the search range is within one
// frame duration of the end:
- if (!m_buffered)
+ if (!trackBuffer.m_buffered)
break;
- unsigned bufferedLength = m_buffered->ranges().length();
+ unsigned bufferedLength = trackBuffer.m_buffered->ranges().length();
if (!bufferedLength)
break;
- MediaTime highestBufferedTime = m_buffered->ranges().maximumBufferedTime();
+ MediaTime highestBufferedTime = trackBuffer.m_buffered->ranges().maximumBufferedTime();
PresentationOrderSampleMap::iterator_range range;
if (highestBufferedTime - trackBuffer.highestPresentationTimestamp < trackBuffer.lastFrameDuration)
@@ -1526,7 +1585,7 @@
}
erasedRanges->invert();
- m_buffered->intersectWith(*erasedRanges);
+ trackBuffer.m_buffered->intersectWith(*erasedRanges);
}
// 1.17 If spliced audio frame is set:
@@ -1565,7 +1624,7 @@
if (m_shouldGenerateTimestamps)
m_timestampOffset = frameEndTimestamp;
- m_buffered->add(presentationTimestamp.toDouble(), (presentationTimestamp + frameDuration + microsecond).toDouble());
+ trackBuffer.m_buffered->add(presentationTimestamp.toDouble(), (presentationTimestamp + frameDuration + microsecond).toDouble());
m_bufferedSinceLastMonitor += frameDuration.toDouble();
break;
@@ -1826,7 +1885,7 @@
std::unique_ptr<PlatformTimeRanges> SourceBuffer::bufferedAccountingForEndOfStream() const
{
// FIXME: Revisit this method once the spec bug <https://www.w3.org/Bugs/Public/show_bug.cgi?id=26436> is resolved.
- std::unique_ptr<PlatformTimeRanges> virtualRanges = PlatformTimeRanges::create(m_buffered->ranges());
+ std::unique_ptr<PlatformTimeRanges> virtualRanges = PlatformTimeRanges::create(buffered()->ranges());
if (m_source->isEnded()) {
MediaTime start = virtualRanges->maximumBufferedTime();
MediaTime end = m_source->duration();
@@ -1838,7 +1897,7 @@
bool SourceBuffer::hasCurrentTime() const
{
- if (isRemoved() || !m_buffered->length())
+ if (isRemoved() || !buffered()->length())
return false;
MediaTime currentTime = m_source->currentTime();
Modified: trunk/Source/WebCore/Modules/mediasource/SourceBuffer.h (178437 => 178438)
--- trunk/Source/WebCore/Modules/mediasource/SourceBuffer.h 2015-01-14 20:36:01 UTC (rev 178437)
+++ trunk/Source/WebCore/Modules/mediasource/SourceBuffer.h 2015-01-14 20:36:25 UTC (rev 178438)
@@ -126,6 +126,7 @@
bool shouldGenerateTimestamps() const { return m_shouldGenerateTimestamps; }
void setShouldGenerateTimestamps(bool flag) { m_shouldGenerateTimestamps = flag; }
+ void invalidateBuffered();
protected:
// EventTarget interface
virtual void refEventTarget() override { ref(); }
@@ -185,6 +186,7 @@
void removeTimerFired();
void removeCodedFrames(const MediaTime& start, const MediaTime& end);
+ void recalculateBuffered() const;
size_t extraMemoryCost() const;
void reportExtraMemoryCost();
@@ -219,7 +221,7 @@
MediaTime m_groupEndTimestamp;
HashMap<AtomicString, TrackBuffer> m_trackBufferMap;
- RefPtr<TimeRanges> m_buffered;
+ mutable RefPtr<TimeRanges> m_buffered;
enum AppendStateType { WaitingForSegment, ParsingInitSegment, ParsingMediaSegment };
AppendStateType m_appendState;
@@ -239,6 +241,7 @@
bool m_active;
bool m_bufferFull;
bool m_shouldGenerateTimestamps;
+ mutable bool m_shouldRecalculateBuffered;
};
} // namespace WebCore