Diff
Modified: trunk/LayoutTests/ChangeLog (219418 => 219419)
--- trunk/LayoutTests/ChangeLog 2017-07-12 19:16:44 UTC (rev 219418)
+++ trunk/LayoutTests/ChangeLog 2017-07-12 20:19:44 UTC (rev 219419)
@@ -1,3 +1,13 @@
+2017-07-12 Eric Carlson <eric.carl...@apple.com>
+
+ [MediaStream] a capture source failure should end the MediaStreamTrack
+ https://bugs.webkit.org/show_bug.cgi?id=174375
+
+ Reviewed by Youenn Fablet.
+
+ * fast/mediastream/media-stream-track-source-failure-expected.txt: Added.
+ * fast/mediastream/media-stream-track-source-failure.html: Added.
+
2017-07-12 Antoine Quint <grao...@apple.com>
Playback controls should not hide while AirPlay is active
Added: trunk/LayoutTests/fast/mediastream/media-stream-track-source-failure-expected.txt (0 => 219419)
--- trunk/LayoutTests/fast/mediastream/media-stream-track-source-failure-expected.txt (rev 0)
+++ trunk/LayoutTests/fast/mediastream/media-stream-track-source-failure-expected.txt 2017-07-12 20:19:44 UTC (rev 219419)
@@ -0,0 +1,6 @@
+
+
+PASS Create stream
+PASS End video track only
+PASS End audio track only
+
Added: trunk/LayoutTests/fast/mediastream/media-stream-track-source-failure.html (0 => 219419)
--- trunk/LayoutTests/fast/mediastream/media-stream-track-source-failure.html (rev 0)
+++ trunk/LayoutTests/fast/mediastream/media-stream-track-source-failure.html 2017-07-12 20:19:44 UTC (rev 219419)
@@ -0,0 +1,90 @@
+<!DOCTYPE html>
+<html>
+<head>
+ <meta charset="utf-8">
+ <title>Capture source failure.</title>
+ <script src=""
+ <script src=""
+ </head>
+<body>
+ <video width=320 height=240></video>
+
+ <script>
+
+ let video;
+
+ function waitForPageStateChange(numberOfTries, originalState, resolve, reject)
+ {
+ let newState = internals.pageMediaState();
+ if (newState != originalState) {
+ resolve(newState);
+ return;
+ }
+
+ if (numberOfTries <= 0) {
+ reject('Page state did not change in time.');
+ return;
+ }
+
+ setTimeout(() => { waitForPageStateChange(--numberOfTries, originalState, resolve, reject); }, 10);
+ }
+
+ function testTrack(track, title)
+ {
+ promise_test((test) => {
+ return new Promise((resolve, reject) => {
+
+ track._onended_ = () => {
+ new Promise((innerResolve, innerReject) => {
+ waitForPageStateChange(10, internals.pageMediaState(), innerResolve, innerReject)
+ }).then((pageMediaState) => {
+ assert_equals(video.videoTracks.length, 1, "Unexpected video track count");
+ assert_equals(video.audioTracks.length, 1, "Unexpected audio track count");
+ assert_equals(video.videoHeight + video.videoWidth, 0, "Video track should have no size");
+
+ if (window.internals) {
+ if (track.kind === "video") {
+ assert_false(pageMediaState.includes('HasMutedVideoCaptureDevice'), "Unexpected HasMutedVideoCaptureDevice");
+ assert_false(pageMediaState.includes('HasMutedAudioCaptureDevice'), "Unexpected HasMutedAudioCaptureDevice");
+ assert_true(pageMediaState.includes('HasActiveAudioCaptureDevice'), "Unexpected HasActiveAudioCaptureDevice");
+ assert_false(pageMediaState.includes('HasActiveVideoCaptureDevice'), "Unexpected HasActiveVideoCaptureDevice");
+ } else {
+ assert_false(pageMediaState.includes('HasMutedAudioCaptureDevice'), "Unexpected HasMutedAudioCaptureDevice");
+ assert_false(pageMediaState.includes('HasMutedVideoCaptureDevice'), "Unexpected HasMutedVideoCaptureDevice");
+ assert_false(pageMediaState.includes('HasActiveAudioCaptureDevice'), "Unexpected HasActiveAudioCaptureDevice");
+ assert_false(pageMediaState.includes('HasActiveVideoCaptureDevice'), "Unexpected HasActiveVideoCaptureDevice");
+ }
+ }
+ resolve();
+ })
+ }
+
+ if (window.internals)
+ internals.simulateMediaStreamTrackCaptureSourceFailure(track);
+ setTimeout(() => reject("Device state did not change in 5 second"), 5000);
+
+ });
+ }, title);
+ }
+
+ promise_test((test) => {
+ let stream;
+ return navigator.mediaDevices.getUserMedia({ video: true, audio: true})
+ .then(s => {
+ stream = s;
+ video = document.getElementsByTagName("video")[0];
+ video.srcObject = stream;
+ })
+ .then(() => new Promise(resolve => video._onloadedmetadata_ = resolve))
+ .then(() => {
+ assert_equals(video.videoTracks.length, 1, "Expected one video track");
+ assert_equals(video.audioTracks.length, 1, "Expected one audio track");
+ testTrack(stream.getVideoTracks()[0], "End video track only");
+ testTrack(stream.getAudioTracks()[0], "End audio track only");
+ })
+ }, "Create stream");
+
+ </script>
+
+</body>
+</html>
Modified: trunk/Source/WebCore/ChangeLog (219418 => 219419)
--- trunk/Source/WebCore/ChangeLog 2017-07-12 19:16:44 UTC (rev 219418)
+++ trunk/Source/WebCore/ChangeLog 2017-07-12 20:19:44 UTC (rev 219419)
@@ -1,3 +1,31 @@
+2017-07-12 Eric Carlson <eric.carl...@apple.com>
+
+ [MediaStream] a capture source failure should end the MediaStreamTrack
+ https://bugs.webkit.org/show_bug.cgi?id=174375
+
+ Reviewed by Youenn Fablet.
+
+ Test: fast/mediastream/media-stream-track-source-failure.html
+
+ * platform/mediastream/RealtimeMediaSource.cpp:
+ (WebCore::RealtimeMediaSource::captureFailed): New, signal observers that the source has ended.
+ * platform/mediastream/RealtimeMediaSource.h:
+
+ * platform/mediastream/mac/AVMediaCaptureSource.h:
+ * platform/mediastream/mac/AVMediaCaptureSource.mm:
+ (WebCore::AVMediaCaptureSource::setupSession): Call captureFailed if setupCaptureSession fails.
+
+ * platform/mediastream/mac/AVVideoCaptureSource.h:
+ * platform/mediastream/mac/AVVideoCaptureSource.mm:
+ (WebCore::AVVideoCaptureSource::setupCaptureSession): Return false on failure.
+ (WebCore::AVVideoCaptureSource::shutdownCaptureSession): Delete unused instance variable.
+ (WebCore::AVVideoCaptureSource::processNewFrame): Ditto.
+
+ * testing/Internals.cpp:
+ (WebCore::Internals::endMediaStreamTrackCaptureSource): Call track.source.captureFailed().
+ * testing/Internals.h:
+ * testing/Internals.idl:
+
2017-07-12 Timothy Hatcher <timo...@hatcher.name>
Improve font matching with FontConfig and FreeType
Modified: trunk/Source/WebCore/page/MediaProducer.h (219418 => 219419)
--- trunk/Source/WebCore/page/MediaProducer.h 2017-07-12 19:16:44 UTC (rev 219418)
+++ trunk/Source/WebCore/page/MediaProducer.h 2017-07-12 20:19:44 UTC (rev 219419)
@@ -47,7 +47,11 @@
HasMutedAudioCaptureDevice = 1 << 13,
HasMutedVideoCaptureDevice = 1 << 14,
HasInterruptedAudioCaptureDevice = 1 << 15,
- HasInterruptedVideoCaptureDevice = 1 << 15,
+ HasInterruptedVideoCaptureDevice = 1 << 16,
+
+ AudioCaptureMask = HasActiveAudioCaptureDevice | HasMutedAudioCaptureDevice | HasInterruptedAudioCaptureDevice,
+ VideoCaptureMask = HasActiveVideoCaptureDevice | HasMutedVideoCaptureDevice | HasInterruptedVideoCaptureDevice,
+ MediaCaptureMask = AudioCaptureMask | VideoCaptureMask,
};
typedef unsigned MediaStateFlags;
Modified: trunk/Source/WebCore/platform/mediastream/RealtimeMediaSource.cpp (219418 => 219419)
--- trunk/Source/WebCore/platform/mediastream/RealtimeMediaSource.cpp 2017-07-12 19:16:44 UTC (rev 219418)
+++ trunk/Source/WebCore/platform/mediastream/RealtimeMediaSource.cpp 2017-07-12 20:19:44 UTC (rev 219419)
@@ -152,6 +152,9 @@
m_isProducingData = true;
startProducingData();
+ if (!m_isProducingData)
+ return;
+
for (Observer& observer : m_observers)
observer.sourceStarted();
}
@@ -183,6 +186,14 @@
}
}
+void RealtimeMediaSource::captureFailed()
+{
+ m_isProducingData = false;
+
+ for (Observer& observer : m_observers)
+ observer.sourceStopped();
+}
+
bool RealtimeMediaSource::supportsSizeAndFrameRate(std::optional<int>, std::optional<int>, std::optional<double>)
{
// The size and frame rate are within the capability limits, so they are supported.
Modified: trunk/Source/WebCore/platform/mediastream/RealtimeMediaSource.h (219418 => 219419)
--- trunk/Source/WebCore/platform/mediastream/RealtimeMediaSource.h 2017-07-12 19:16:44 UTC (rev 219418)
+++ trunk/Source/WebCore/platform/mediastream/RealtimeMediaSource.h 2017-07-12 20:19:44 UTC (rev 219419)
@@ -206,6 +206,8 @@
virtual void monitorOrientation(OrientationNotifier&) { }
+ void captureFailed();
+
// Testing only
virtual void delaySamples(float) { };
Modified: trunk/Source/WebCore/platform/mediastream/mac/AVMediaCaptureSource.h (219418 => 219419)
--- trunk/Source/WebCore/platform/mediastream/mac/AVMediaCaptureSource.h 2017-07-12 19:16:44 UTC (rev 219418)
+++ trunk/Source/WebCore/platform/mediastream/mac/AVMediaCaptureSource.h 2017-07-12 20:19:44 UTC (rev 219419)
@@ -72,7 +72,7 @@
protected:
AVMediaCaptureSource(AVCaptureDevice*, const AtomicString&, RealtimeMediaSource::Type);
- virtual void setupCaptureSession() = 0;
+ virtual bool setupCaptureSession() = 0;
virtual void shutdownCaptureSession() = 0;
virtual void updateSettings(RealtimeMediaSourceSettings&) = 0;
virtual void initializeCapabilities(RealtimeMediaSourceCapabilities&) = 0;
@@ -87,7 +87,7 @@
void setAudioSampleBufferDelegate(AVCaptureAudioDataOutput*);
private:
- void setupSession();
+ bool setupSession();
void beginConfiguration() final;
void commitConfiguration() final;
Modified: trunk/Source/WebCore/platform/mediastream/mac/AVMediaCaptureSource.mm (219418 => 219419)
--- trunk/Source/WebCore/platform/mediastream/mac/AVMediaCaptureSource.mm 2017-07-12 19:16:44 UTC (rev 219418)
+++ trunk/Source/WebCore/platform/mediastream/mac/AVMediaCaptureSource.mm 2017-07-12 20:19:44 UTC (rev 219419)
@@ -168,9 +168,11 @@
void AVMediaCaptureSource::startProducingData()
{
- if (!m_session)
- setupSession();
-
+ if (!m_session) {
+ if (!setupSession())
+ return;
+ }
+
if ([m_session isRunning])
return;
@@ -192,7 +194,6 @@
#if PLATFORM(IOS)
m_session = nullptr;
#endif
-
}
void AVMediaCaptureSource::beginConfiguration()
@@ -248,10 +249,10 @@
return *m_capabilities;
}
-void AVMediaCaptureSource::setupSession()
+bool AVMediaCaptureSource::setupSession()
{
if (m_session)
- return;
+ return true;
m_session = adoptNS([allocAVCaptureSessionInstance() init]);
for (NSString* keyName in sessionKVOProperties())
@@ -258,8 +259,13 @@
[m_session addObserver:m_objcObserver.get() forKeyPath:keyName options:NSKeyValueObservingOptionNew context:(void *)nil];
[m_session beginConfiguration];
- setupCaptureSession();
+ bool success = setupCaptureSession();
[m_session commitConfiguration];
+
+ if (!success)
+ captureFailed();
+
+ return success;
}
void AVMediaCaptureSource::captureSessionIsRunningDidChange(bool state)
Modified: trunk/Source/WebCore/platform/mediastream/mac/AVVideoCaptureSource.h (219418 => 219419)
--- trunk/Source/WebCore/platform/mediastream/mac/AVVideoCaptureSource.h 2017-07-12 19:16:44 UTC (rev 219418)
+++ trunk/Source/WebCore/platform/mediastream/mac/AVVideoCaptureSource.h 2017-07-12 20:19:44 UTC (rev 219419)
@@ -56,7 +56,7 @@
AVVideoCaptureSource(AVCaptureDevice*, const AtomicString&);
virtual ~AVVideoCaptureSource();
- void setupCaptureSession() final;
+ bool setupCaptureSession() final;
void shutdownCaptureSession() final;
void updateSettings(RealtimeMediaSourceSettings&) final;
@@ -87,7 +87,6 @@
RetainPtr<NSString> m_pendingPreset;
RetainPtr<CMSampleBufferRef> m_buffer;
- RetainPtr<CGImageRef> m_lastImage;
RetainPtr<AVCaptureVideoDataOutput> m_videoOutput;
std::unique_ptr<PixelBufferConformerCV> m_pixelBufferConformer;
Modified: trunk/Source/WebCore/platform/mediastream/mac/AVVideoCaptureSource.mm (219418 => 219419)
--- trunk/Source/WebCore/platform/mediastream/mac/AVVideoCaptureSource.mm 2017-07-12 19:16:44 UTC (rev 219418)
+++ trunk/Source/WebCore/platform/mediastream/mac/AVVideoCaptureSource.mm 2017-07-12 20:19:44 UTC (rev 219419)
@@ -410,7 +410,7 @@
return connection ? sensorOrientation([connection videoOrientation]) : 0;
}
-void AVVideoCaptureSource::setupCaptureSession()
+bool AVVideoCaptureSource::setupCaptureSession()
{
#if PLATFORM(IOS)
avVideoCaptureSourceFactory().setActiveSource(*this);
@@ -420,12 +420,12 @@
RetainPtr<AVCaptureDeviceInputType> videoIn = adoptNS([allocAVCaptureDeviceInputInstance() initWithDevice:device() error:&error]);
if (error) {
LOG(Media, "AVVideoCaptureSource::setupCaptureSession(%p), failed to allocate AVCaptureDeviceInput: %s", this, [[error localizedDescription] UTF8String]);
- return;
+ return false;
}
if (![session() canAddInput:videoIn.get()]) {
LOG(Media, "AVVideoCaptureSource::setupCaptureSession(%p), unable to add video input device", this);
- return;
+ return false;
}
[session() addInput:videoIn.get()];
@@ -445,7 +445,7 @@
if (![session() canAddOutput:m_videoOutput.get()]) {
LOG(Media, "AVVideoCaptureSource::setupCaptureSession(%p), unable to add video sample buffer output delegate", this);
- return;
+ return false;
}
[session() addOutput:m_videoOutput.get()];
@@ -455,12 +455,13 @@
m_sensorOrientation = sensorOrientationFromVideoOutput(m_videoOutput.get());
computeSampleRotation();
+
+ return true;
}
void AVVideoCaptureSource::shutdownCaptureSession()
{
m_buffer = nullptr;
- m_lastImage = nullptr;
m_width = 0;
m_height = 0;
}
@@ -508,9 +509,7 @@
void AVVideoCaptureSource::processNewFrame(RetainPtr<CMSampleBufferRef> sampleBuffer, RetainPtr<AVCaptureConnectionType> connection)
{
- // Ignore frames delivered when the session is not running, we want to hang onto the last image
- // delivered before it stopped.
- if (m_lastImage && (!isProducingData() || muted()))
+ if (!isProducingData() || muted())
return;
CMFormatDescriptionRef formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer.get());
@@ -518,8 +517,6 @@
return;
m_buffer = sampleBuffer;
- m_lastImage = nullptr;
-
CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions(formatDescription);
if (m_sampleRotation == MediaSample::VideoRotation::Left || m_sampleRotation == MediaSample::VideoRotation::Right)
std::swap(dimensions.width, dimensions.height);
Modified: trunk/Source/WebCore/testing/Internals.cpp (219418 => 219419)
--- trunk/Source/WebCore/testing/Internals.cpp 2017-07-12 19:16:44 UTC (rev 219418)
+++ trunk/Source/WebCore/testing/Internals.cpp 2017-07-12 20:19:44 UTC (rev 219419)
@@ -4161,6 +4161,11 @@
stream.internalRemoveTrack(track.id(), MediaStream::StreamModifier::Platform);
}
+void Internals::simulateMediaStreamTrackCaptureSourceFailure(MediaStreamTrack& track)
+{
+ track.source().captureFailed();
+}
+
#endif
String Internals::audioSessionCategory() const
Modified: trunk/Source/WebCore/testing/Internals.h (219418 => 219419)
--- trunk/Source/WebCore/testing/Internals.h 2017-07-12 19:16:44 UTC (rev 219418)
+++ trunk/Source/WebCore/testing/Internals.h 2017-07-12 20:19:44 UTC (rev 219419)
@@ -595,6 +595,7 @@
void delayMediaStreamTrackSamples(MediaStreamTrack&, float);
void setMediaStreamTrackMuted(MediaStreamTrack&, bool);
void removeMediaStreamTrack(MediaStream&, MediaStreamTrack&);
+ void simulateMediaStreamTrackCaptureSourceFailure(MediaStreamTrack&);
#endif
String audioSessionCategory() const;
Modified: trunk/Source/WebCore/testing/Internals.idl (219418 => 219419)
--- trunk/Source/WebCore/testing/Internals.idl 2017-07-12 19:16:44 UTC (rev 219418)
+++ trunk/Source/WebCore/testing/Internals.idl 2017-07-12 20:19:44 UTC (rev 219419)
@@ -544,6 +544,7 @@
[Conditional=MEDIA_STREAM] void delayMediaStreamTrackSamples(MediaStreamTrack track, float delay);
[Conditional=MEDIA_STREAM] void setMediaStreamTrackMuted(MediaStreamTrack track, boolean muted);
[Conditional=MEDIA_STREAM] void removeMediaStreamTrack(MediaStream stream, MediaStreamTrack track);
+ [Conditional=MEDIA_STREAM] void simulateMediaStreamTrackCaptureSourceFailure(MediaStreamTrack track);
DOMString audioSessionCategory();
};
Modified: trunk/Source/WebKit2/ChangeLog (219418 => 219419)
--- trunk/Source/WebKit2/ChangeLog 2017-07-12 19:16:44 UTC (rev 219418)
+++ trunk/Source/WebKit2/ChangeLog 2017-07-12 20:19:44 UTC (rev 219419)
@@ -1,3 +1,20 @@
+2017-07-12 Eric Carlson <eric.carl...@apple.com>
+
+ [MediaStream] a capture source failure should end the MediaStreamTrack
+ https://bugs.webkit.org/show_bug.cgi?id=174375
+
+ Reviewed by Youenn Fablet.
+
+ * UIProcess/UserMediaPermissionRequestManagerProxy.cpp:
+ (WebKit::UserMediaPermissionRequestManagerProxy::captureStateChanged): Consolidate start/end.
+ (WebKit::UserMediaPermissionRequestManagerProxy::startedCaptureSession): Deleted.
+ (WebKit::UserMediaPermissionRequestManagerProxy::endedCaptureSession): Deleted.
+ * UIProcess/UserMediaPermissionRequestManagerProxy.h:
+
+ * UIProcess/WebPageProxy.cpp:
+ (WebKit::WebPageProxy::isPlayingMediaDidChange): Call captureStateChanged if the capture
+ state changes. Consider interrupted flags.
+
2017-07-12 Daniel Bates <daba...@apple.com>
NavigationAction should track whether the navigation was initiated by the main frame
Modified: trunk/Source/WebKit2/UIProcess/UserMediaPermissionRequestManagerProxy.cpp (219418 => 219419)
--- trunk/Source/WebKit2/UIProcess/UserMediaPermissionRequestManagerProxy.cpp 2017-07-12 19:16:44 UTC (rev 219418)
+++ trunk/Source/WebKit2/UIProcess/UserMediaPermissionRequestManagerProxy.cpp 2017-07-12 20:19:44 UTC (rev 219419)
@@ -382,23 +382,21 @@
#endif
}
-void UserMediaPermissionRequestManagerProxy::startedCaptureSession()
+void UserMediaPermissionRequestManagerProxy::captureStateChanged(WebCore::MediaProducer::MediaStateFlags oldState, WebCore::MediaProducer::MediaStateFlags newState)
{
if (!m_page.isValid())
return;
#if ENABLE(MEDIA_STREAM)
- UserMediaProcessManager::singleton().startedCaptureSession(*this);
-#endif
-}
+ bool wasCapturingAudio = oldState & WebCore::MediaProducer::AudioCaptureMask;
+ bool wasCapturingVideo = oldState & WebCore::MediaProducer::VideoCaptureMask;
+ bool isCapturingAudio = newState & WebCore::MediaProducer::AudioCaptureMask;
+ bool isCapturingVideo = newState & WebCore::MediaProducer::VideoCaptureMask;
-void UserMediaPermissionRequestManagerProxy::endedCaptureSession()
-{
- if (!m_page.isValid())
- return;
-
-#if ENABLE(MEDIA_STREAM)
- UserMediaProcessManager::singleton().endedCaptureSession(*this);
+ if ((wasCapturingAudio && !isCapturingAudio) || (wasCapturingVideo && !isCapturingVideo))
+ UserMediaProcessManager::singleton().endedCaptureSession(*this);
+ if ((!wasCapturingAudio && isCapturingAudio) || (!wasCapturingVideo && isCapturingVideo))
+ UserMediaProcessManager::singleton().startedCaptureSession(*this);
#endif
}
Modified: trunk/Source/WebKit2/UIProcess/UserMediaPermissionRequestManagerProxy.h (219418 => 219419)
--- trunk/Source/WebKit2/UIProcess/UserMediaPermissionRequestManagerProxy.h 2017-07-12 19:16:44 UTC (rev 219418)
+++ trunk/Source/WebKit2/UIProcess/UserMediaPermissionRequestManagerProxy.h 2017-07-12 20:19:44 UTC (rev 219419)
@@ -21,6 +21,7 @@
#include "UserMediaPermissionCheckProxy.h"
#include "UserMediaPermissionRequestProxy.h"
+#include <WebCore/MediaProducer.h>
#include <WebCore/SecurityOrigin.h>
#include <WebCore/Timer.h>
#include <wtf/HashMap.h>
@@ -60,8 +61,7 @@
void rejectionTimerFired();
void clearCachedState();
- void startedCaptureSession();
- void endedCaptureSession();
+ void captureStateChanged(WebCore::MediaProducer::MediaStateFlags oldState, WebCore::MediaProducer::MediaStateFlags newState);
private:
Ref<UserMediaPermissionRequestProxy> createRequest(uint64_t userMediaID, uint64_t mainFrameID, uint64_t frameID, Ref<WebCore::SecurityOrigin>&& userMediaDocumentOrigin, Ref<WebCore::SecurityOrigin>&& topLevelDocumentOrigin, Vector<String>&& audioDeviceUIDs, Vector<String>&& videoDeviceUIDs, String&&);
Modified: trunk/Source/WebKit2/UIProcess/WebPageProxy.cpp (219418 => 219419)
--- trunk/Source/WebKit2/UIProcess/WebPageProxy.cpp 2017-07-12 19:16:44 UTC (rev 219418)
+++ trunk/Source/WebKit2/UIProcess/WebPageProxy.cpp 2017-07-12 20:19:44 UTC (rev 219419)
@@ -6463,10 +6463,9 @@
if (state == m_mediaState)
return;
- WebCore::MediaProducer::MediaStateFlags activeCaptureMask = WebCore::MediaProducer::HasActiveAudioCaptureDevice | WebCore::MediaProducer::HasActiveVideoCaptureDevice | WebCore::MediaProducer::HasMutedAudioCaptureDevice | WebCore::MediaProducer::HasMutedVideoCaptureDevice;
#if ENABLE(MEDIA_STREAM)
- WebCore::MediaProducer::MediaStateFlags oldMediaStateHasActiveCapture = m_mediaState & activeCaptureMask;
- WebCore::MediaProducer::MediaStateFlags newMediaStateHasActiveCapture = state & activeCaptureMask;
+ WebCore::MediaProducer::MediaStateFlags oldMediaCaptureState = m_mediaState & WebCore::MediaProducer::MediaCaptureMask;
+ WebCore::MediaProducer::MediaStateFlags newMediaCaptureState = state & WebCore::MediaProducer::MediaCaptureMask;
#endif
MediaProducer::MediaStateFlags playingMediaMask = MediaProducer::IsPlayingAudio | MediaProducer::IsPlayingVideo;
@@ -6474,17 +6473,15 @@
m_mediaState = state;
#if ENABLE(MEDIA_STREAM)
- if (oldMediaStateHasActiveCapture != newMediaStateHasActiveCapture)
+ if (oldMediaCaptureState != newMediaCaptureState) {
m_uiClient->mediaCaptureStateDidChange(m_mediaState);
- if (!oldMediaStateHasActiveCapture && newMediaStateHasActiveCapture)
- userMediaPermissionRequestManager().startedCaptureSession();
- else if (oldMediaStateHasActiveCapture && !newMediaStateHasActiveCapture)
- userMediaPermissionRequestManager().endedCaptureSession();
+ userMediaPermissionRequestManager().captureStateChanged(oldMediaCaptureState, newMediaCaptureState);
+ }
#endif
activityStateDidChange(ActivityState::IsAudible | ActivityState::IsCapturingMedia);
- playingMediaMask |= activeCaptureMask;
+ playingMediaMask |= WebCore::MediaProducer::MediaCaptureMask;
if ((oldState & playingMediaMask) != (m_mediaState & playingMediaMask))
m_uiClient->isPlayingAudioDidChange(*this);