Title: [257039] trunk
Revision
257039
Author
you...@apple.com
Date
2020-02-19 19:51:21 -0800 (Wed, 19 Feb 2020)

Log Message

Add support for AudioSession handling in GPUProcess for capture
https://bugs.webkit.org/show_bug.cgi?id=207950

Reviewed by Eric Carlson.

Source/WebCore:

Add a dedicated interface for handling audio capture in session manager.
This was previoulsy PlatformMediaSessionClient, but this one is more related to audio/video rendering.
Use this new interface for MediaStreamTrack capture sources.
The session manager now owns a WeakHashSet of capture sources to compute the correct AudioSession category.

Modernize the code, for instance use an enum class for MediaType to remove some debug asserts.
Add a new PlatformMediaSessionManager::create that is used for sharedManager.

Remove the direct use of PlatformMediaSessionManager::sharedManager from classes that can be used outside of WebProcess.
This includes BaseAudioSharedUnit, for which we add a audioUnitWillStart callback
so that we compute the right AudioSession category before starting the capture.

Covered by existing tests and debug assertions to check for AudioSession category computation.

* Modules/mediastream/MediaStreamTrack.cpp:
(WebCore::MediaStreamTrack::MediaStreamTrack):
(WebCore::MediaStreamTrack::~MediaStreamTrack):
(WebCore::MediaStreamTrack::configureTrackRendering):
(WebCore::MediaStreamTrack::isCapturingAudio const):
* Modules/mediastream/MediaStreamTrack.h:
* Modules/webaudio/AudioContext.h:
* html/HTMLAudioElement.h:
* html/HTMLMediaElement.cpp:
(WebCore::HTMLMediaElement::shouldDisableSleep const):
(WebCore::HTMLMediaElement::mediaType const):
(WebCore::HTMLMediaElement::presentationType const):
* html/HTMLVideoElement.h:
* html/MediaElementSession.cpp:
(WebCore::MediaElementSession::canShowControlsManager const):
* html/MediaElementSession.h:
* platform/audio/PlatformMediaSession.cpp:
(WebCore::PlatformMediaSession::PlatformMediaSession):
(WebCore::PlatformMediaSession::activeAudioSessionRequired const):
* platform/audio/PlatformMediaSession.h:
* platform/audio/PlatformMediaSessionManager.cpp:
(WebCore::sharedPlatformMediaSessionManager):
(WebCore::PlatformMediaSessionManager::sharedManager):
(WebCore::PlatformMediaSessionManager::sharedManagerIfExists):
(WebCore::PlatformMediaSessionManager::create):
(WebCore::indexFromMediaType):
(WebCore::PlatformMediaSessionManager::resetRestrictions):
(WebCore::PlatformMediaSessionManager::has const):
(WebCore::PlatformMediaSessionManager::count const):
(WebCore::PlatformMediaSessionManager::countActiveAudioCaptureSources):
(WebCore::PlatformMediaSessionManager::addRestriction):
(WebCore::PlatformMediaSessionManager::removeRestriction):
(WebCore::PlatformMediaSessionManager::restrictions):
(WebCore::PlatformMediaSessionManager::sessionWillBeginPlayback):
(WebCore::PlatformMediaSessionManager::applicationWillBecomeInactive):
(WebCore::PlatformMediaSessionManager::applicationDidBecomeActive):
(WebCore::PlatformMediaSessionManager::applicationDidEnterBackground):
(WebCore::PlatformMediaSessionManager::applicationWillEnterForeground):
(WebCore::PlatformMediaSessionManager::sessionIsPlayingToWirelessPlaybackTargetChanged):
(WebCore::PlatformMediaSessionManager::addAudioCaptureSource):
(WebCore::PlatformMediaSessionManager::removeAudioCaptureSource):
* platform/audio/PlatformMediaSessionManager.h:
* platform/audio/cocoa/MediaSessionManagerCocoa.mm:
(PlatformMediaSessionManager::create):
(MediaSessionManagerCocoa::updateSessionState):
* platform/audio/ios/MediaSessionManagerIOS.mm:
(WebCore::PlatformMediaSessionManager::create):
(WebCore::MediaSessionManageriOS::resetRestrictions):
* platform/mediastream/MediaStreamTrackPrivate.cpp:
(WebCore::MediaStreamTrackPrivate::audioUnitWillStart):
* platform/mediastream/MediaStreamTrackPrivate.h:
* platform/mediastream/RealtimeMediaSource.h:
* platform/mediastream/mac/BaseAudioSharedUnit.cpp:
(WebCore::BaseAudioSharedUnit::startUnit):
* platform/mediastream/mac/BaseAudioSharedUnit.h:
* platform/mediastream/mac/CoreAudioCaptureSource.cpp:
(WebCore::CoreAudioCaptureSource::audioUnitWillStart):
* platform/mediastream/mac/CoreAudioCaptureSource.h:
* testing/Internals.cpp:
(WebCore::mediaTypeFromString):
(WebCore::Internals::setMediaSessionRestrictions):
(WebCore::Internals::mediaSessionRestrictions const):

Source/WebKit:

Create a session manager for the GPUConnectionToWebProcess.
Pass it to any audio capture source proxy.
For UIProcesss audio capture, we still use the shared manager.

* GPUProcess/GPUConnectionToWebProcess.cpp:
(WebKit::GPUConnectionToWebProcess::sessionManager):
* GPUProcess/GPUConnectionToWebProcess.h:
* GPUProcess/GPUProcess.cpp:
(WebKit::GPUProcess::setMockCaptureDevicesEnabled):
* UIProcess/Cocoa/UserMediaCaptureManagerProxy.cpp:
(WebKit::UserMediaCaptureManagerProxy::SourceProxy::SourceProxy):
(WebKit::UserMediaCaptureManagerProxy::SourceProxy::~SourceProxy):
(WebKit::UserMediaCaptureManagerProxy::createMediaSourceForCaptureDeviceWithConstraints):
(WebKit::UserMediaCaptureManagerProxy::clone):
* UIProcess/Cocoa/UserMediaCaptureManagerProxy.h:
* UIProcess/WebProcessProxy.cpp:

LayoutTests:

* platform/ios/TestExpectations:

Modified Paths

Diff

Modified: trunk/LayoutTests/ChangeLog (257038 => 257039)


--- trunk/LayoutTests/ChangeLog	2020-02-20 03:48:16 UTC (rev 257038)
+++ trunk/LayoutTests/ChangeLog	2020-02-20 03:51:21 UTC (rev 257039)
@@ -1,3 +1,12 @@
+2020-02-19  Youenn Fablet  <you...@apple.com>
+
+        Add support for AudioSession handling in GPUProcess for capture
+        https://bugs.webkit.org/show_bug.cgi?id=207950
+
+        Reviewed by Eric Carlson.
+
+        * platform/ios/TestExpectations:
+
 2020-02-19  Sunny He  <sunny...@apple.com>
 
         Fix crash when Node::normalize() triggers mutation event that modifies child order

Modified: trunk/LayoutTests/platform/ios/TestExpectations (257038 => 257039)


--- trunk/LayoutTests/platform/ios/TestExpectations	2020-02-20 03:48:16 UTC (rev 257038)
+++ trunk/LayoutTests/platform/ios/TestExpectations	2020-02-20 03:51:21 UTC (rev 257039)
@@ -106,6 +106,7 @@
 # Some media stream tests crash because of OS issues, so skip all but one while we investigate.
 fast/mediastream [ Skip ]
 fast/mediastream/getUserMedia-default.html [ Pass ]
+fast/mediastream/captureInGPUProcess.html [ Pass ]
 
 # ENABLE(MEDIA_CAPTURE) is not enabled
 fast/forms/file/file-input-capture.html

Modified: trunk/Source/WebCore/ChangeLog (257038 => 257039)


--- trunk/Source/WebCore/ChangeLog	2020-02-20 03:48:16 UTC (rev 257038)
+++ trunk/Source/WebCore/ChangeLog	2020-02-20 03:51:21 UTC (rev 257039)
@@ -1,3 +1,87 @@
+2020-02-19  Youenn Fablet  <you...@apple.com>
+
+        Add support for AudioSession handling in GPUProcess for capture
+        https://bugs.webkit.org/show_bug.cgi?id=207950
+
+        Reviewed by Eric Carlson.
+
+        Add a dedicated interface for handling audio capture in session manager.
+        This was previoulsy PlatformMediaSessionClient, but this one is more related to audio/video rendering.
+        Use this new interface for MediaStreamTrack capture sources.
+        The session manager now owns a WeakHashSet of capture sources to compute the correct AudioSession category.
+
+        Modernize the code, for instance use an enum class for MediaType to remove some debug asserts.
+        Add a new PlatformMediaSessionManager::create that is used for sharedManager.
+
+        Remove the direct use of PlatformMediaSessionManager::sharedManager from classes that can be used outside of WebProcess.
+        This includes BaseAudioSharedUnit, for which we add a audioUnitWillStart callback
+        so that we compute the right AudioSession category before starting the capture.
+
+        Covered by existing tests and debug assertions to check for AudioSession category computation.
+
+        * Modules/mediastream/MediaStreamTrack.cpp:
+        (WebCore::MediaStreamTrack::MediaStreamTrack):
+        (WebCore::MediaStreamTrack::~MediaStreamTrack):
+        (WebCore::MediaStreamTrack::configureTrackRendering):
+        (WebCore::MediaStreamTrack::isCapturingAudio const):
+        * Modules/mediastream/MediaStreamTrack.h:
+        * Modules/webaudio/AudioContext.h:
+        * html/HTMLAudioElement.h:
+        * html/HTMLMediaElement.cpp:
+        (WebCore::HTMLMediaElement::shouldDisableSleep const):
+        (WebCore::HTMLMediaElement::mediaType const):
+        (WebCore::HTMLMediaElement::presentationType const):
+        * html/HTMLVideoElement.h:
+        * html/MediaElementSession.cpp:
+        (WebCore::MediaElementSession::canShowControlsManager const):
+        * html/MediaElementSession.h:
+        * platform/audio/PlatformMediaSession.cpp:
+        (WebCore::PlatformMediaSession::PlatformMediaSession):
+        (WebCore::PlatformMediaSession::activeAudioSessionRequired const):
+        * platform/audio/PlatformMediaSession.h:
+        * platform/audio/PlatformMediaSessionManager.cpp:
+        (WebCore::sharedPlatformMediaSessionManager):
+        (WebCore::PlatformMediaSessionManager::sharedManager):
+        (WebCore::PlatformMediaSessionManager::sharedManagerIfExists):
+        (WebCore::PlatformMediaSessionManager::create):
+        (WebCore::indexFromMediaType):
+        (WebCore::PlatformMediaSessionManager::resetRestrictions):
+        (WebCore::PlatformMediaSessionManager::has const):
+        (WebCore::PlatformMediaSessionManager::count const):
+        (WebCore::PlatformMediaSessionManager::countActiveAudioCaptureSources):
+        (WebCore::PlatformMediaSessionManager::addRestriction):
+        (WebCore::PlatformMediaSessionManager::removeRestriction):
+        (WebCore::PlatformMediaSessionManager::restrictions):
+        (WebCore::PlatformMediaSessionManager::sessionWillBeginPlayback):
+        (WebCore::PlatformMediaSessionManager::applicationWillBecomeInactive):
+        (WebCore::PlatformMediaSessionManager::applicationDidBecomeActive):
+        (WebCore::PlatformMediaSessionManager::applicationDidEnterBackground):
+        (WebCore::PlatformMediaSessionManager::applicationWillEnterForeground):
+        (WebCore::PlatformMediaSessionManager::sessionIsPlayingToWirelessPlaybackTargetChanged):
+        (WebCore::PlatformMediaSessionManager::addAudioCaptureSource):
+        (WebCore::PlatformMediaSessionManager::removeAudioCaptureSource):
+        * platform/audio/PlatformMediaSessionManager.h:
+        * platform/audio/cocoa/MediaSessionManagerCocoa.mm:
+        (PlatformMediaSessionManager::create):
+        (MediaSessionManagerCocoa::updateSessionState):
+        * platform/audio/ios/MediaSessionManagerIOS.mm:
+        (WebCore::PlatformMediaSessionManager::create):
+        (WebCore::MediaSessionManageriOS::resetRestrictions):
+        * platform/mediastream/MediaStreamTrackPrivate.cpp:
+        (WebCore::MediaStreamTrackPrivate::audioUnitWillStart):
+        * platform/mediastream/MediaStreamTrackPrivate.h:
+        * platform/mediastream/RealtimeMediaSource.h:
+        * platform/mediastream/mac/BaseAudioSharedUnit.cpp:
+        (WebCore::BaseAudioSharedUnit::startUnit):
+        * platform/mediastream/mac/BaseAudioSharedUnit.h:
+        * platform/mediastream/mac/CoreAudioCaptureSource.cpp:
+        (WebCore::CoreAudioCaptureSource::audioUnitWillStart):
+        * platform/mediastream/mac/CoreAudioCaptureSource.h:
+        * testing/Internals.cpp:
+        (WebCore::mediaTypeFromString):
+        (WebCore::Internals::setMediaSessionRestrictions):
+        (WebCore::Internals::mediaSessionRestrictions const):
+
 2020-02-19  Sunny He  <sunny...@apple.com>
 
         Fix crash when Node::normalize() triggers mutation event that modifies child order

Modified: trunk/Source/WebCore/Modules/mediastream/MediaStreamTrack.cpp (257038 => 257039)


--- trunk/Source/WebCore/Modules/mediastream/MediaStreamTrack.cpp	2020-02-20 03:48:16 UTC (rev 257038)
+++ trunk/Source/WebCore/Modules/mediastream/MediaStreamTrack.cpp	2020-02-20 03:51:21 UTC (rev 257039)
@@ -75,7 +75,6 @@
     : ActiveDOMObject(&context)
     , m_private(WTFMove(privateTrack))
     , m_isCaptureTrack(m_private->isCaptureTrack())
-    , m_mediaSession(PlatformMediaSession::create(PlatformMediaSessionManager::sharedManager(), *this))
 {
     ALWAYS_LOG(LOGIDENTIFIER);
 
@@ -86,6 +85,9 @@
 
     allCaptureTracks().add(this);
 
+    if (m_private->type() == RealtimeMediaSource::Type::Audio)
+        PlatformMediaSessionManager::sharedManager().addAudioCaptureSource(*this);
+
     if (auto document = this->document()) {
         if (document->page() && document->page()->mutedState())
             setMuted(true);
@@ -100,6 +102,9 @@
         return;
 
     allCaptureTracks().remove(this);
+
+    if (m_private->type() == RealtimeMediaSource::Type::Audio)
+        PlatformMediaSessionManager::sharedManager().removeAudioCaptureSource(*this);
 }
 
 const AtomString& MediaStreamTrack::kind() const
@@ -536,9 +541,6 @@
 
 void MediaStreamTrack::configureTrackRendering()
 {
-    if (m_mediaSession && m_private->type() == RealtimeMediaSource::Type::Audio)
-        m_mediaSession->canProduceAudioChanged();
-
     if (auto document = this->document())
         document->updateIsPlayingMedia();
 
@@ -580,48 +582,12 @@
     return downcast<Document>(scriptExecutionContext());
 }
 
-PlatformMediaSession::MediaType MediaStreamTrack::mediaType() const
+bool MediaStreamTrack::isCapturingAudio() const
 {
-    return (isCaptureTrack() && canProduceAudio()) ? PlatformMediaSession::MediaStreamCapturingAudio : PlatformMediaSession::None;
+    ASSERT(isCaptureTrack() && m_private->type() == RealtimeMediaSource::Type::Audio);
+    return !ended() && !muted();
 }
 
-PlatformMediaSession::MediaType MediaStreamTrack::presentationType() const
-{
-    return mediaType();
-}
-
-void MediaStreamTrack::mayResumePlayback(bool)
-{
-    // FIXME: should a media stream track pay attention to this directly, or only when attached to a media element?
-}
-
-void MediaStreamTrack::suspendPlayback()
-{
-    // FIXME: should a media stream track pay attention to this directly, or only when attached to a media element?
-}
-
-String MediaStreamTrack::sourceApplicationIdentifier() const
-{
-    auto* document = this->document();
-    if (document && document->frame()) {
-        if (auto* networkingContext = document->frame()->loader().networkingContext())
-            return networkingContext->sourceApplicationIdentifier();
-    }
-
-    return emptyString();
-}
-
-bool MediaStreamTrack::canProduceAudio() const
-{
-    return m_private->type() == RealtimeMediaSource::Type::Audio && !ended() && !muted();
-}
-
-DocumentIdentifier MediaStreamTrack::hostingDocumentIdentifier() const
-{
-    auto* document = downcast<Document>(m_scriptExecutionContext);
-    return document ? document->identifier() : DocumentIdentifier { };
-}
-
 #if !RELEASE_LOG_DISABLED
 WTFLogChannel& MediaStreamTrack::logChannel() const
 {

Modified: trunk/Source/WebCore/Modules/mediastream/MediaStreamTrack.h (257038 => 257039)


--- trunk/Source/WebCore/Modules/mediastream/MediaStreamTrack.h	2020-02-20 03:48:16 UTC (rev 257038)
+++ trunk/Source/WebCore/Modules/mediastream/MediaStreamTrack.h	2020-02-20 03:51:21 UTC (rev 257039)
@@ -53,7 +53,7 @@
     , public ActiveDOMObject
     , public EventTargetWithInlineData
     , private MediaStreamTrackPrivate::Observer
-    , private PlatformMediaSessionClient
+    , private PlatformMediaSession::AudioCaptureSource
 #if !RELEASE_LOG_DISABLED
     , private LoggerHelper
 #endif
@@ -190,19 +190,8 @@
     void trackSettingsChanged(MediaStreamTrackPrivate&) final;
     void trackEnabledChanged(MediaStreamTrackPrivate&) final;
 
-    // PlatformMediaSessionClient
-    PlatformMediaSession::MediaType mediaType() const final;
-    PlatformMediaSession::MediaType presentationType() const final;
-    void mayResumePlayback(bool shouldResume) final;
-    void suspendPlayback() final;
-    bool canReceiveRemoteControlCommands() const final { return false; }
-    void didReceiveRemoteControlCommand(PlatformMediaSession::RemoteControlCommandType, const PlatformMediaSession::RemoteCommandArgument*) final { }
-    bool supportsSeeking() const final { return false; }
-    bool shouldOverrideBackgroundPlaybackRestriction(PlatformMediaSession::InterruptionType) const final { return false; }
-    String sourceApplicationIdentifier() const final;
-    bool canProduceAudio() const final;
-    DocumentIdentifier hostingDocumentIdentifier() const final;
-    bool shouldOverridePauseDuringRouteChange() const final { return true; }
+    // PlatformMediaSession::AudioCaptureSource
+    bool isCapturingAudio() const final;
 
 #if !RELEASE_LOG_DISABLED
     const char* logClassName() const final { return "MediaStreamTrack"; }
@@ -216,7 +205,6 @@
 
     bool m_ended { false };
     const bool m_isCaptureTrack { false };
-    std::unique_ptr<PlatformMediaSession> m_mediaSession;
 };
 
 typedef Vector<RefPtr<MediaStreamTrack>> MediaStreamTrackVector;

Modified: trunk/Source/WebCore/Modules/webaudio/AudioContext.h (257038 => 257039)


--- trunk/Source/WebCore/Modules/webaudio/AudioContext.h	2020-02-20 03:48:16 UTC (rev 257038)
+++ trunk/Source/WebCore/Modules/webaudio/AudioContext.h	2020-02-20 03:51:21 UTC (rev 257039)
@@ -347,8 +347,8 @@
     void derefUnfinishedSourceNodes();
 
     // PlatformMediaSessionClient
-    PlatformMediaSession::MediaType mediaType() const override { return PlatformMediaSession::WebAudio; }
-    PlatformMediaSession::MediaType presentationType() const override { return PlatformMediaSession::WebAudio; }
+    PlatformMediaSession::MediaType mediaType() const override { return PlatformMediaSession::MediaType::WebAudio; }
+    PlatformMediaSession::MediaType presentationType() const override { return PlatformMediaSession::MediaType::WebAudio; }
     void mayResumePlayback(bool shouldResume) override;
     void suspendPlayback() override;
     bool canReceiveRemoteControlCommands() const override { return false; }

Modified: trunk/Source/WebCore/html/HTMLAudioElement.h (257038 => 257039)


--- trunk/Source/WebCore/html/HTMLAudioElement.h	2020-02-20 03:48:16 UTC (rev 257038)
+++ trunk/Source/WebCore/html/HTMLAudioElement.h	2020-02-20 03:51:21 UTC (rev 257039)
@@ -43,7 +43,7 @@
 private:
     HTMLAudioElement(const QualifiedName&, Document&, bool);
 
-    PlatformMediaSession::MediaType presentationType() const final { return PlatformMediaSession::Audio; }
+    PlatformMediaSession::MediaType presentationType() const final { return PlatformMediaSession::MediaType::Audio; }
 };
 
 } // namespace WebCore

Modified: trunk/Source/WebCore/html/HTMLMediaElement.cpp (257038 => 257039)


--- trunk/Source/WebCore/html/HTMLMediaElement.cpp	2020-02-20 03:48:16 UTC (rev 257038)
+++ trunk/Source/WebCore/html/HTMLMediaElement.cpp	2020-02-20 03:51:21 UTC (rev 257039)
@@ -6916,7 +6916,7 @@
     if (PlatformMediaSessionManager::sharedManager().processIsSuspended())
         return SleepType::None;
 
-    bool shouldBeAbleToSleep = mediaType() != PlatformMediaSession::VideoAudio;
+    bool shouldBeAbleToSleep = mediaType() != PlatformMediaSession::MediaType::VideoAudio;
 #if ENABLE(MEDIA_STREAM)
     // Remote media stream video tracks may have their corresponding audio tracks being played outside of the media element. Let's ensure to not IDLE the screen in that case.
     // FIXME: We should check that audio is being/to be played. Ideally, we would come up with a media stream agnostic heuristisc.
@@ -7517,8 +7517,8 @@
 {
     if (m_player && m_readyState >= HAVE_METADATA) {
         if (hasVideo() && hasAudio() && !muted())
-            return PlatformMediaSession::VideoAudio;
-        return hasVideo() ? PlatformMediaSession::Video : PlatformMediaSession::Audio;
+            return PlatformMediaSession::MediaType::VideoAudio;
+        return hasVideo() ? PlatformMediaSession::MediaType::Video : PlatformMediaSession::MediaType::Audio;
     }
 
     return presentationType();
@@ -7527,9 +7527,9 @@
 PlatformMediaSession::MediaType HTMLMediaElement::presentationType() const
 {
     if (hasTagName(HTMLNames::videoTag))
-        return muted() ? PlatformMediaSession::Video : PlatformMediaSession::VideoAudio;
+        return muted() ? PlatformMediaSession::MediaType::Video : PlatformMediaSession::MediaType::VideoAudio;
 
-    return PlatformMediaSession::Audio;
+    return PlatformMediaSession::MediaType::Audio;
 }
 
 PlatformMediaSession::DisplayType HTMLMediaElement::displayType() const

Modified: trunk/Source/WebCore/html/HTMLVideoElement.h (257038 => 257039)


--- trunk/Source/WebCore/html/HTMLVideoElement.h	2020-02-20 03:48:16 UTC (rev 257038)
+++ trunk/Source/WebCore/html/HTMLVideoElement.h	2020-02-20 03:51:21 UTC (rev 257039)
@@ -127,7 +127,7 @@
     void didMoveToNewDocument(Document& oldDocument, Document& newDocument) final;
     void setDisplayMode(DisplayMode) final;
 
-    PlatformMediaSession::MediaType presentationType() const final { return PlatformMediaSession::Video; }
+    PlatformMediaSession::MediaType presentationType() const final { return PlatformMediaSession::MediaType::Video; }
 
     std::unique_ptr<HTMLImageLoader> m_imageLoader;
 

Modified: trunk/Source/WebCore/html/MediaElementSession.cpp (257038 => 257039)


--- trunk/Source/WebCore/html/MediaElementSession.cpp	2020-02-20 03:48:16 UTC (rev 257038)
+++ trunk/Source/WebCore/html/MediaElementSession.cpp	2020-02-20 03:51:21 UTC (rev 257039)
@@ -436,7 +436,7 @@
         return true;
     }
 
-    if (client().presentationType() == Audio && purpose == PlaybackControlsPurpose::ControlsManager) {
+    if (client().presentationType() == MediaType::Audio && purpose == PlaybackControlsPurpose::ControlsManager) {
         if (!hasBehaviorRestriction(RequireUserGestureToControlControlsManager) || m_element.document().processingUserGestureForMedia()) {
             INFO_LOG(LOGIDENTIFIER, "returning TRUE: audio element with user gesture");
             return true;

Modified: trunk/Source/WebCore/html/MediaElementSession.h (257038 => 257039)


--- trunk/Source/WebCore/html/MediaElementSession.h	2020-02-20 03:48:16 UTC (rev 257038)
+++ trunk/Source/WebCore/html/MediaElementSession.h	2020-02-20 03:51:21 UTC (rev 257039)
@@ -153,9 +153,9 @@
 
     static bool isMediaElementSessionMediaType(MediaType type)
     {
-        return type == Video
-            || type == Audio
-            || type == VideoAudio;
+        return type == MediaType::Video
+            || type == MediaType::Audio
+            || type == MediaType::VideoAudio;
     }
 
 #if !RELEASE_LOG_DISABLED

Modified: trunk/Source/WebCore/platform/audio/PlatformMediaSession.cpp (257038 => 257039)


--- trunk/Source/WebCore/platform/audio/PlatformMediaSession.cpp	2020-02-20 03:48:16 UTC (rev 257038)
+++ trunk/Source/WebCore/platform/audio/PlatformMediaSession.cpp	2020-02-20 03:51:21 UTC (rev 257039)
@@ -123,7 +123,6 @@
     , m_logIdentifier(uniqueLogIdentifier())
 #endif
 {
-    ASSERT(m_client.mediaType() >= None && m_client.mediaType() <= MediaStreamCapturingAudio);
     manager.addSession(*this);
 }
 
@@ -351,7 +350,7 @@
 
 bool PlatformMediaSession::activeAudioSessionRequired() const
 {
-    if (mediaType() == PlatformMediaSession::None)
+    if (mediaType() == PlatformMediaSession::MediaType::None)
         return false;
     if (state() != PlatformMediaSession::State::Playing)
         return false;

Modified: trunk/Source/WebCore/platform/audio/PlatformMediaSession.h (257038 => 257039)


--- trunk/Source/WebCore/platform/audio/PlatformMediaSession.h	2020-02-20 03:48:16 UTC (rev 257038)
+++ trunk/Source/WebCore/platform/audio/PlatformMediaSession.h	2020-02-20 03:51:21 UTC (rev 257039)
@@ -59,13 +59,12 @@
 
     virtual ~PlatformMediaSession();
 
-    enum MediaType {
+    enum class MediaType {
         None = 0,
         Video,
         VideoAudio,
         Audio,
         WebAudio,
-        MediaStreamCapturingAudio,
     };
     MediaType mediaType() const;
     MediaType presentationType() const;
@@ -190,6 +189,12 @@
     bool canPlayConcurrently(const PlatformMediaSession&) const;
     bool shouldOverridePauseDuringRouteChange() const;
 
+    class AudioCaptureSource : public CanMakeWeakPtr<AudioCaptureSource> {
+    public:
+        virtual ~AudioCaptureSource() = default;
+        virtual bool isCapturingAudio() const = 0;
+    };
+
 protected:
     PlatformMediaSession(PlatformMediaSessionManager&, PlatformMediaSessionClient&);
     PlatformMediaSessionClient& client() const { return m_client; }

Modified: trunk/Source/WebCore/platform/audio/PlatformMediaSessionManager.cpp (257038 => 257039)


--- trunk/Source/WebCore/platform/audio/PlatformMediaSessionManager.cpp	2020-02-20 03:48:16 UTC (rev 257038)
+++ trunk/Source/WebCore/platform/audio/PlatformMediaSessionManager.cpp	2020-02-20 03:51:21 UTC (rev 257039)
@@ -35,20 +35,30 @@
 
 #if ENABLE(VIDEO) || ENABLE(WEB_AUDIO)
 
-#if !PLATFORM(COCOA)
-static PlatformMediaSessionManager* platformMediaSessionManager = nullptr;
+static std::unique_ptr<PlatformMediaSessionManager>& sharedPlatformMediaSessionManager()
+{
+    static NeverDestroyed<std::unique_ptr<PlatformMediaSessionManager>> platformMediaSessionManager;
+    return platformMediaSessionManager.get();
+}
 
 PlatformMediaSessionManager& PlatformMediaSessionManager::sharedManager()
 {
-    if (!platformMediaSessionManager)
-        platformMediaSessionManager = new PlatformMediaSessionManager;
-    return *platformMediaSessionManager;
+    auto& manager = sharedPlatformMediaSessionManager();
+    if (!manager)
+        manager = PlatformMediaSessionManager::create();
+    return *manager;
 }
 
 PlatformMediaSessionManager* PlatformMediaSessionManager::sharedManagerIfExists()
 {
-    return platformMediaSessionManager;
+    return sharedPlatformMediaSessionManager().get();
 }
+
+#if !PLATFORM(COCOA)
+std::unique_ptr<PlatformMediaSessionManager> PlatformMediaSessionManager::create()
+{
+    return std::unique_ptr<PlatformMediaSessionManager>(new PlatformMediaSessionManager);
+}
 #endif // !PLATFORM(COCOA)
 
 void PlatformMediaSessionManager::updateNowPlayingInfoIfNecessary()
@@ -66,19 +76,21 @@
     resetRestrictions();
 }
 
+static inline unsigned indexFromMediaType(PlatformMediaSession::MediaType type)
+{
+    return static_cast<unsigned>(type);
+}
+
 void PlatformMediaSessionManager::resetRestrictions()
 {
-    m_restrictions[PlatformMediaSession::Video] = NoRestrictions;
-    m_restrictions[PlatformMediaSession::Audio] = NoRestrictions;
-    m_restrictions[PlatformMediaSession::VideoAudio] = NoRestrictions;
-    m_restrictions[PlatformMediaSession::WebAudio] = NoRestrictions;
-    m_restrictions[PlatformMediaSession::MediaStreamCapturingAudio] = NoRestrictions;
+    m_restrictions[indexFromMediaType(PlatformMediaSession::MediaType::Video)] = NoRestrictions;
+    m_restrictions[indexFromMediaType(PlatformMediaSession::MediaType::Audio)] = NoRestrictions;
+    m_restrictions[indexFromMediaType(PlatformMediaSession::MediaType::VideoAudio)] = NoRestrictions;
+    m_restrictions[indexFromMediaType(PlatformMediaSession::MediaType::WebAudio)] = NoRestrictions;
 }
 
 bool PlatformMediaSessionManager::has(PlatformMediaSession::MediaType type) const
 {
-    ASSERT(type >= PlatformMediaSession::None && type <= PlatformMediaSession::MediaStreamCapturingAudio);
-
     return anyOfSessions([type] (auto& session) {
         return session.mediaType() == type;
     });
@@ -100,8 +112,6 @@
 
 int PlatformMediaSessionManager::count(PlatformMediaSession::MediaType type) const
 {
-    ASSERT(type >= PlatformMediaSession::None && type <= PlatformMediaSession::MediaStreamCapturingAudio);
-
     int count = 0;
     for (const auto& session : m_sessions) {
         if (session->mediaType() == type)
@@ -111,6 +121,16 @@
     return count;
 }
 
+int PlatformMediaSessionManager::countActiveAudioCaptureSources()
+{
+    int count = 0;
+    for (const auto& source : m_audioCaptureSources) {
+        if (source.isCapturingAudio())
+            ++count;
+    }
+    return count;
+}
+
 void PlatformMediaSessionManager::beginInterruption(PlatformMediaSession::InterruptionType type)
 {
     ALWAYS_LOG(LOGIDENTIFIER);
@@ -179,20 +199,17 @@
 
 void PlatformMediaSessionManager::addRestriction(PlatformMediaSession::MediaType type, SessionRestrictions restriction)
 {
-    ASSERT(type > PlatformMediaSession::None && type <= PlatformMediaSession::MediaStreamCapturingAudio);
-    m_restrictions[type] |= restriction;
+    m_restrictions[indexFromMediaType(type)] |= restriction;
 }
 
 void PlatformMediaSessionManager::removeRestriction(PlatformMediaSession::MediaType type, SessionRestrictions restriction)
 {
-    ASSERT(type > PlatformMediaSession::None && type <= PlatformMediaSession::MediaStreamCapturingAudio);
-    m_restrictions[type] &= ~restriction;
+    m_restrictions[indexFromMediaType(type)] &= ~restriction;
 }
 
 PlatformMediaSessionManager::SessionRestrictions PlatformMediaSessionManager::restrictions(PlatformMediaSession::MediaType type)
 {
-    ASSERT(type > PlatformMediaSession::None && type <= PlatformMediaSession::MediaStreamCapturingAudio);
-    return m_restrictions[type];
+    return m_restrictions[indexFromMediaType(type)];
 }
 
 bool PlatformMediaSessionManager::sessionWillBeginPlayback(PlatformMediaSession& session)
@@ -199,8 +216,8 @@
 {
     setCurrentSession(session);
 
-    PlatformMediaSession::MediaType sessionType = session.mediaType();
-    SessionRestrictions restrictions = m_restrictions[sessionType];
+    auto sessionType = session.mediaType();
+    auto restrictions = this->restrictions(sessionType);
     if (session.state() == PlatformMediaSession::Interrupted && restrictions & InterruptedPlaybackNotPermitted) {
         ALWAYS_LOG(LOGIDENTIFIER, session.logIdentifier(), " returning false because session.state() is Interrupted, and InterruptedPlaybackNotPermitted");
         return false;
@@ -304,7 +321,7 @@
     ALWAYS_LOG(LOGIDENTIFIER);
 
     forEachMatchingSession([&](auto& session) {
-        return m_restrictions[session.mediaType()] & InactiveProcessPlaybackRestricted;
+        return restrictions(session.mediaType()) & InactiveProcessPlaybackRestricted;
     }, [](auto& session) {
         session.beginInterruption(PlatformMediaSession::ProcessInactive);
     });
@@ -315,7 +332,7 @@
     ALWAYS_LOG(LOGIDENTIFIER);
 
     forEachMatchingSession([&](auto& session) {
-        return m_restrictions[session.mediaType()] & InactiveProcessPlaybackRestricted;
+        return restrictions(session.mediaType()) & InactiveProcessPlaybackRestricted;
     }, [](auto& session) {
         session.endInterruption(PlatformMediaSession::MayResumePlaying);
     });
@@ -331,9 +348,9 @@
     m_isApplicationInBackground = true;
 
     forEachSession([&] (auto& session) {
-        if (suspendedUnderLock && m_restrictions[session.mediaType()] & SuspendedUnderLockPlaybackRestricted)
+        if (suspendedUnderLock && restrictions(session.mediaType()) & SuspendedUnderLockPlaybackRestricted)
             session.beginInterruption(PlatformMediaSession::SuspendedUnderLock);
-        else if (m_restrictions[session.mediaType()] & BackgroundProcessPlaybackRestricted)
+        else if (restrictions(session.mediaType()) & BackgroundProcessPlaybackRestricted)
             session.beginInterruption(PlatformMediaSession::EnteringBackground);
     });
 }
@@ -348,7 +365,7 @@
     m_isApplicationInBackground = false;
 
     forEachMatchingSession([&](auto& session) {
-        return (suspendedUnderLock && m_restrictions[session.mediaType()] & SuspendedUnderLockPlaybackRestricted) || m_restrictions[session.mediaType()] & BackgroundProcessPlaybackRestricted;
+        return (suspendedUnderLock && restrictions(session.mediaType()) & SuspendedUnderLockPlaybackRestricted) || restrictions(session.mediaType()) & BackgroundProcessPlaybackRestricted;
     }, [](auto& session) {
         session.endInterruption(PlatformMediaSession::MayResumePlaying);
     });
@@ -400,7 +417,7 @@
 
 void PlatformMediaSessionManager::sessionIsPlayingToWirelessPlaybackTargetChanged(PlatformMediaSession& session)
 {
-    if (!m_isApplicationInBackground || !(m_restrictions[session.mediaType()] & BackgroundProcessPlaybackRestricted))
+    if (!m_isApplicationInBackground || !(restrictions(session.mediaType()) & BackgroundProcessPlaybackRestricted))
         return;
 
     if (session.state() != PlatformMediaSession::Interrupted)
@@ -540,6 +557,21 @@
     });
 }
 
+void PlatformMediaSessionManager::addAudioCaptureSource(PlatformMediaSession::AudioCaptureSource& source)
+{
+    ASSERT(!m_audioCaptureSources.contains(source));
+    m_audioCaptureSources.add(source);
+    updateSessionState();
+}
+
+
+void PlatformMediaSessionManager::removeAudioCaptureSource(PlatformMediaSession::AudioCaptureSource& source)
+{
+    ASSERT(m_audioCaptureSources.contains(source));
+    m_audioCaptureSources.remove(source);
+    updateSessionState();
+}
+
 #if USE(AUDIO_SESSION)
 void PlatformMediaSessionManager::maybeDeactivateAudioSession()
 {

Modified: trunk/Source/WebCore/platform/audio/PlatformMediaSessionManager.h (257038 => 257039)


--- trunk/Source/WebCore/platform/audio/PlatformMediaSessionManager.h	2020-02-20 03:48:16 UTC (rev 257038)
+++ trunk/Source/WebCore/platform/audio/PlatformMediaSessionManager.h	2020-02-20 03:51:21 UTC (rev 257039)
@@ -34,6 +34,7 @@
 #include <pal/system/SystemSleepListener.h>
 #include <wtf/AggregateLogger.h>
 #include <wtf/Vector.h>
+#include <wtf/WeakHashSet.h>
 #include <wtf/WeakPtr.h>
 
 namespace WebCore {
@@ -54,6 +55,7 @@
 public:
     WEBCORE_EXPORT static PlatformMediaSessionManager* sharedManagerIfExists();
     WEBCORE_EXPORT static PlatformMediaSessionManager& sharedManager();
+    WEBCORE_EXPORT static std::unique_ptr<PlatformMediaSessionManager> create();
 
     static void updateNowPlayingInfoIfNecessary();
 
@@ -138,6 +140,9 @@
 
     bool processIsSuspended() const { return m_processIsSuspended; }
 
+    WEBCORE_EXPORT void addAudioCaptureSource(PlatformMediaSession::AudioCaptureSource&);
+    WEBCORE_EXPORT void removeAudioCaptureSource(PlatformMediaSession::AudioCaptureSource&);
+
 protected:
     friend class PlatformMediaSession;
     explicit PlatformMediaSessionManager();
@@ -163,6 +168,8 @@
     WTFLogChannel& logChannel() const final;
 #endif
 
+    int countActiveAudioCaptureSources();
+
 private:
     friend class Internals;
 
@@ -183,7 +190,7 @@
 
     Vector<WeakPtr<PlatformMediaSession>> sessionsMatching(const Function<bool(const PlatformMediaSession&)>&) const;
 
-    SessionRestrictions m_restrictions[PlatformMediaSession::MediaStreamCapturingAudio + 1];
+    SessionRestrictions m_restrictions[static_cast<unsigned>(PlatformMediaSession::MediaType::WebAudio) + 1];
     mutable Vector<WeakPtr<PlatformMediaSession>> m_sessions;
     std::unique_ptr<RemoteCommandListener> m_remoteCommandListener;
     std::unique_ptr<PAL::SystemSleepListener> m_systemSleepListener;
@@ -204,6 +211,8 @@
     bool m_becameActive { false };
 #endif
 
+    WeakHashSet<PlatformMediaSession::AudioCaptureSource> m_audioCaptureSources;
+
 #if !RELEASE_LOG_DISABLED
     Ref<AggregateLogger> m_logger;
 #endif

Modified: trunk/Source/WebCore/platform/audio/cocoa/MediaSessionManagerCocoa.mm (257038 => 257039)


--- trunk/Source/WebCore/platform/audio/cocoa/MediaSessionManagerCocoa.mm	2020-02-20 03:48:16 UTC (rev 257038)
+++ trunk/Source/WebCore/platform/audio/cocoa/MediaSessionManagerCocoa.mm	2020-02-20 03:51:21 UTC (rev 257039)
@@ -45,28 +45,19 @@
 static const size_t kLowPowerVideoBufferSize = 4096;
 
 #if PLATFORM(MAC)
-static MediaSessionManagerCocoa* platformMediaSessionManager = nullptr;
-
-PlatformMediaSessionManager& PlatformMediaSessionManager::sharedManager()
+std::unique_ptr<PlatformMediaSessionManager> PlatformMediaSessionManager::create()
 {
-    if (!platformMediaSessionManager)
-        platformMediaSessionManager = new MediaSessionManagerCocoa;
-    return *platformMediaSessionManager;
+    return makeUnique<MediaSessionManagerCocoa>();
 }
+#endif // !PLATFORM(MAC)
 
-PlatformMediaSessionManager* PlatformMediaSessionManager::sharedManagerIfExists()
-{
-    return platformMediaSessionManager;
-}
-#endif
-
 void MediaSessionManagerCocoa::updateSessionState()
 {
-    int videoCount = count(PlatformMediaSession::Video);
-    int videoAudioCount = count(PlatformMediaSession::VideoAudio);
-    int audioCount = count(PlatformMediaSession::Audio);
-    int webAudioCount = count(PlatformMediaSession::WebAudio);
-    int captureCount = count(PlatformMediaSession::MediaStreamCapturingAudio);
+    int videoCount = count(PlatformMediaSession::MediaType::Video);
+    int videoAudioCount = count(PlatformMediaSession::MediaType::VideoAudio);
+    int audioCount = count(PlatformMediaSession::MediaType::Audio);
+    int webAudioCount = count(PlatformMediaSession::MediaType::WebAudio);
+    int captureCount = countActiveAudioCaptureSources();
     ALWAYS_LOG(LOGIDENTIFIER, "types: "
         "AudioCapture(", captureCount, "), "
         "Video(", videoCount, "), "
@@ -99,7 +90,7 @@
     bool hasAudibleAudioOrVideoMediaType = false;
     forEachSession([&hasAudibleAudioOrVideoMediaType] (auto& session) mutable {
         auto type = session.mediaType();
-        if ((type == PlatformMediaSession::VideoAudio || type == PlatformMediaSession::Audio) && session.canProduceAudio() && session.hasPlayedSinceLastInterruption())
+        if ((type == PlatformMediaSession::MediaType::VideoAudio || type == PlatformMediaSession::MediaType::Audio) && session.canProduceAudio() && session.hasPlayedSinceLastInterruption())
             hasAudibleAudioOrVideoMediaType = true;
         if (session.isPlayingToWirelessPlaybackTarget())
             hasAudibleAudioOrVideoMediaType = true;

Modified: trunk/Source/WebCore/platform/audio/ios/MediaSessionManagerIOS.mm (257038 => 257039)


--- trunk/Source/WebCore/platform/audio/ios/MediaSessionManagerIOS.mm	2020-02-20 03:48:16 UTC (rev 257038)
+++ trunk/Source/WebCore/platform/audio/ios/MediaSessionManagerIOS.mm	2020-02-20 03:51:21 UTC (rev 257039)
@@ -98,20 +98,11 @@
 
 namespace WebCore {
 
-static MediaSessionManageriOS* platformMediaSessionManager = nullptr;
-
-PlatformMediaSessionManager& PlatformMediaSessionManager::sharedManager()
+std::unique_ptr<PlatformMediaSessionManager> PlatformMediaSessionManager::create()
 {
-    if (!platformMediaSessionManager)
-        platformMediaSessionManager = new MediaSessionManageriOS;
-    return *platformMediaSessionManager;
+    return std::unique_ptr<MediaSessionManageriOS>(new MediaSessionManageriOS);
 }
 
-PlatformMediaSessionManager* PlatformMediaSessionManager::sharedManagerIfExists()
-{
-    return platformMediaSessionManager;
-}
-
 MediaSessionManageriOS::MediaSessionManageriOS()
     : MediaSessionManagerCocoa()
 {
@@ -143,11 +134,11 @@
 
     if (ramSize() < systemMemoryRequiredForVideoInBackgroundTabs) {
         ALWAYS_LOG(LOGIDENTIFIER, "restricting video in background tabs because system memory = ", ramSize());
-        addRestriction(PlatformMediaSession::Video, BackgroundTabPlaybackRestricted);
+        addRestriction(PlatformMediaSession::MediaType::Video, BackgroundTabPlaybackRestricted);
     }
 
-    addRestriction(PlatformMediaSession::Video, BackgroundProcessPlaybackRestricted);
-    addRestriction(PlatformMediaSession::VideoAudio, ConcurrentPlaybackNotPermitted | BackgroundProcessPlaybackRestricted | SuspendedUnderLockPlaybackRestricted);
+    addRestriction(PlatformMediaSession::MediaType::Video, BackgroundProcessPlaybackRestricted);
+    addRestriction(PlatformMediaSession::MediaType::VideoAudio, ConcurrentPlaybackNotPermitted | BackgroundProcessPlaybackRestricted | SuspendedUnderLockPlaybackRestricted);
 }
 
 bool MediaSessionManageriOS::hasWirelessTargetsAvailable()

Modified: trunk/Source/WebCore/platform/mediastream/MediaStreamTrackPrivate.cpp (257038 => 257039)


--- trunk/Source/WebCore/platform/mediastream/MediaStreamTrackPrivate.cpp	2020-02-20 03:48:16 UTC (rev 257038)
+++ trunk/Source/WebCore/platform/mediastream/MediaStreamTrackPrivate.cpp	2020-02-20 03:51:21 UTC (rev 257039)
@@ -33,6 +33,7 @@
 #include "GraphicsContext.h"
 #include "IntRect.h"
 #include "Logging.h"
+#include "PlatformMediaSessionManager.h"
 #include <wtf/UUID.h>
 
 #if PLATFORM(COCOA)
@@ -302,6 +303,12 @@
     });
 }
 
+void MediaStreamTrackPrivate::audioUnitWillStart()
+{
+    if (!m_isEnded)
+        PlatformMediaSessionManager::sharedManager().sessionCanProduceAudioChanged();
+}
+
 #if !RELEASE_LOG_DISABLED
 WTFLogChannel& MediaStreamTrackPrivate::logChannel() const
 {

Modified: trunk/Source/WebCore/platform/mediastream/MediaStreamTrackPrivate.h (257038 => 257039)


--- trunk/Source/WebCore/platform/mediastream/MediaStreamTrackPrivate.h	2020-02-20 03:48:16 UTC (rev 257038)
+++ trunk/Source/WebCore/platform/mediastream/MediaStreamTrackPrivate.h	2020-02-20 03:51:21 UTC (rev 257039)
@@ -140,6 +140,7 @@
     bool preventSourceFromStopping() final;
     void videoSampleAvailable(MediaSample&) final;
     void audioSamplesAvailable(const MediaTime&, const PlatformAudioData&, const AudioStreamDescription&, size_t) final;
+    void audioUnitWillStart() final;
 
     void updateReadyState();
 

Modified: trunk/Source/WebCore/platform/mediastream/RealtimeMediaSource.h (257038 => 257039)


--- trunk/Source/WebCore/platform/mediastream/RealtimeMediaSource.h	2020-02-20 03:48:16 UTC (rev 257038)
+++ trunk/Source/WebCore/platform/mediastream/RealtimeMediaSource.h	2020-02-20 03:51:21 UTC (rev 257039)
@@ -83,6 +83,7 @@
         virtual void sourceStopped() { }
         virtual void sourceMutedChanged() { }
         virtual void sourceSettingsChanged() { }
+        virtual void audioUnitWillStart() { }
 
         // Observer state queries.
         virtual bool preventSourceFromStopping() { return false; }

Modified: trunk/Source/WebCore/platform/mediastream/mac/BaseAudioSharedUnit.cpp (257038 => 257039)


--- trunk/Source/WebCore/platform/mediastream/mac/BaseAudioSharedUnit.cpp	2020-02-20 03:48:16 UTC (rev 257038)
+++ trunk/Source/WebCore/platform/mediastream/mac/BaseAudioSharedUnit.cpp	2020-02-20 03:51:21 UTC (rev 257039)
@@ -30,6 +30,7 @@
 
 #include "AudioSession.h"
 #include "CoreAudioCaptureSource.h"
+#include "DeprecatedGlobalSettings.h"
 #include "Logging.h"
 #include "PlatformMediaSessionManager.h"
 
@@ -93,12 +94,10 @@
 
 OSStatus BaseAudioSharedUnit::startUnit()
 {
-#if PLATFORM(IOS_FAMILY)
-    if (!m_disableAudioSessionCheck) {
-        PlatformMediaSessionManager::sharedManager().sessionCanProduceAudioChanged();
-        ASSERT(AudioSession::sharedSession().category() == AudioSession::PlayAndRecord);
-    }
-#endif
+    forEachClient([](auto& client) {
+        client.audioUnitWillStart();
+    });
+    ASSERT(!DeprecatedGlobalSettings::shouldManageAudioSessionCategory() || AudioSession::sharedSession().category() == AudioSession::PlayAndRecord);
 
     if (auto error = startInternal()) {
         captureFailed();

Modified: trunk/Source/WebCore/platform/mediastream/mac/BaseAudioSharedUnit.h (257038 => 257039)


--- trunk/Source/WebCore/platform/mediastream/mac/BaseAudioSharedUnit.h	2020-02-20 03:48:16 UTC (rev 257038)
+++ trunk/Source/WebCore/platform/mediastream/mac/BaseAudioSharedUnit.h	2020-02-20 03:51:21 UTC (rev 257039)
@@ -76,8 +76,6 @@
 
     virtual CapabilityValueOrRange sampleRateCapacities() const = 0;
 
-    void setDisableAudioSessionCheck(bool value) { m_disableAudioSessionCheck = value; };
-
 protected:
     void forEachClient(const Function<void(CoreAudioCaptureSource&)>&) const;
     bool hasClients() const { return !m_clients.isEmpty(); }
@@ -105,7 +103,6 @@
 
     HashSet<CoreAudioCaptureSource*> m_clients;
     mutable RecursiveLock m_clientsLock;
-    bool m_disableAudioSessionCheck { false };
 };
 
 } // namespace WebCore

Modified: trunk/Source/WebCore/platform/mediastream/mac/CoreAudioCaptureSource.cpp (257038 => 257039)


--- trunk/Source/WebCore/platform/mediastream/mac/CoreAudioCaptureSource.cpp	2020-02-20 03:48:16 UTC (rev 257038)
+++ trunk/Source/WebCore/platform/mediastream/mac/CoreAudioCaptureSource.cpp	2020-02-20 03:51:21 UTC (rev 257039)
@@ -803,6 +803,13 @@
     unit().delaySamples(seconds);
 }
 
+void CoreAudioCaptureSource::audioUnitWillStart()
+{
+    forEachObserver([](auto& observer) {
+        observer.audioUnitWillStart();
+    });
+}
+
 } // namespace WebCore
 
 #endif // ENABLE(MEDIA_STREAM)

Modified: trunk/Source/WebCore/platform/mediastream/mac/CoreAudioCaptureSource.h (257038 => 257039)


--- trunk/Source/WebCore/platform/mediastream/mac/CoreAudioCaptureSource.h	2020-02-20 03:48:16 UTC (rev 257038)
+++ trunk/Source/WebCore/platform/mediastream/mac/CoreAudioCaptureSource.h	2020-02-20 03:51:21 UTC (rev 257039)
@@ -89,6 +89,7 @@
     CaptureDevice::DeviceType deviceType() const final { return CaptureDevice::DeviceType::Microphone; }
 
     void initializeToStartProducingData();
+    void audioUnitWillStart();
 
 #if !RELEASE_LOG_DISABLED
     const char* logClassName() const override { return "CoreAudioCaptureSource"; }

Modified: trunk/Source/WebCore/testing/Internals.cpp (257038 => 257039)


--- trunk/Source/WebCore/testing/Internals.cpp	2020-02-20 03:48:16 UTC (rev 257038)
+++ trunk/Source/WebCore/testing/Internals.cpp	2020-02-20 03:51:21 UTC (rev 257039)
@@ -3914,26 +3914,24 @@
 static PlatformMediaSession::MediaType mediaTypeFromString(const String& mediaTypeString)
 {
     if (equalLettersIgnoringASCIICase(mediaTypeString, "video"))
-        return PlatformMediaSession::Video;
+        return PlatformMediaSession::MediaType::Video;
     if (equalLettersIgnoringASCIICase(mediaTypeString, "audio"))
-        return PlatformMediaSession::Audio;
+        return PlatformMediaSession::MediaType::Audio;
     if (equalLettersIgnoringASCIICase(mediaTypeString, "videoaudio"))
-        return PlatformMediaSession::VideoAudio;
+        return PlatformMediaSession::MediaType::VideoAudio;
     if (equalLettersIgnoringASCIICase(mediaTypeString, "webaudio"))
-        return PlatformMediaSession::WebAudio;
-    if (equalLettersIgnoringASCIICase(mediaTypeString, "mediastreamcapturingaudio"))
-        return PlatformMediaSession::MediaStreamCapturingAudio;
+        return PlatformMediaSession::MediaType::WebAudio;
 
-    return PlatformMediaSession::None;
+    return PlatformMediaSession::MediaType::None;
 }
 
 ExceptionOr<void> Internals::setMediaSessionRestrictions(const String& mediaTypeString, StringView restrictionsString)
 {
-    PlatformMediaSession::MediaType mediaType = mediaTypeFromString(mediaTypeString);
-    if (mediaType == PlatformMediaSession::None)
+    auto mediaType = mediaTypeFromString(mediaTypeString);
+    if (mediaType == PlatformMediaSession::MediaType::None)
         return Exception { InvalidAccessError };
 
-    PlatformMediaSessionManager::SessionRestrictions restrictions = PlatformMediaSessionManager::sharedManager().restrictions(mediaType);
+    auto restrictions = PlatformMediaSessionManager::sharedManager().restrictions(mediaType);
     PlatformMediaSessionManager::sharedManager().removeRestriction(mediaType, restrictions);
 
     restrictions = PlatformMediaSessionManager::NoRestrictions;
@@ -3959,7 +3957,7 @@
 ExceptionOr<String> Internals::mediaSessionRestrictions(const String& mediaTypeString) const
 {
     PlatformMediaSession::MediaType mediaType = mediaTypeFromString(mediaTypeString);
-    if (mediaType == PlatformMediaSession::None)
+    if (mediaType == PlatformMediaSession::MediaType::None)
         return Exception { InvalidAccessError };
 
     PlatformMediaSessionManager::SessionRestrictions restrictions = PlatformMediaSessionManager::sharedManager().restrictions(mediaType);

Modified: trunk/Source/WebKit/ChangeLog (257038 => 257039)


--- trunk/Source/WebKit/ChangeLog	2020-02-20 03:48:16 UTC (rev 257038)
+++ trunk/Source/WebKit/ChangeLog	2020-02-20 03:51:21 UTC (rev 257039)
@@ -1,3 +1,27 @@
+2020-02-19  Youenn Fablet  <you...@apple.com>
+
+        Add support for AudioSession handling in GPUProcess for capture
+        https://bugs.webkit.org/show_bug.cgi?id=207950
+
+        Reviewed by Eric Carlson.
+
+        Create a session manager for the GPUConnectionToWebProcess.
+        Pass it to any audio capture source proxy.
+        For UIProcesss audio capture, we still use the shared manager.
+
+        * GPUProcess/GPUConnectionToWebProcess.cpp:
+        (WebKit::GPUConnectionToWebProcess::sessionManager):
+        * GPUProcess/GPUConnectionToWebProcess.h:
+        * GPUProcess/GPUProcess.cpp:
+        (WebKit::GPUProcess::setMockCaptureDevicesEnabled):
+        * UIProcess/Cocoa/UserMediaCaptureManagerProxy.cpp:
+        (WebKit::UserMediaCaptureManagerProxy::SourceProxy::SourceProxy):
+        (WebKit::UserMediaCaptureManagerProxy::SourceProxy::~SourceProxy):
+        (WebKit::UserMediaCaptureManagerProxy::createMediaSourceForCaptureDeviceWithConstraints):
+        (WebKit::UserMediaCaptureManagerProxy::clone):
+        * UIProcess/Cocoa/UserMediaCaptureManagerProxy.h:
+        * UIProcess/WebProcessProxy.cpp:
+
 2020-02-19  Per Arne Vollan  <pvol...@apple.com>
 
         [iOS] Rename NSUserDefaultsSPI.h

Modified: trunk/Source/WebKit/GPUProcess/GPUConnectionToWebProcess.cpp (257038 => 257039)


--- trunk/Source/WebKit/GPUProcess/GPUConnectionToWebProcess.cpp	2020-02-20 03:48:16 UTC (rev 257038)
+++ trunk/Source/WebKit/GPUProcess/GPUConnectionToWebProcess.cpp	2020-02-20 03:51:21 UTC (rev 257039)
@@ -60,6 +60,7 @@
 #include "WebProcessMessages.h"
 
 #include <WebCore/MockRealtimeMediaSourceCenter.h>
+#include <WebCore/PlatformMediaSessionManager.h>
 
 namespace WebKit {
 using namespace WebCore;
@@ -77,6 +78,7 @@
     void addMessageReceiver(IPC::StringReference messageReceiverName, IPC::MessageReceiver& receiver) final { }
     void removeMessageReceiver(IPC::StringReference messageReceiverName) final { }
     IPC::Connection& connection() final { return m_process.connection(); }
+    PlatformMediaSessionManager& sessionManager() final { return m_process.sessionManager(); }
 
     GPUConnectionToWebProcess& m_process;
 };
@@ -273,6 +275,13 @@
 }
 #endif
 
+PlatformMediaSessionManager& GPUConnectionToWebProcess::sessionManager()
+{
+    if (!m_sessionManager)
+        m_sessionManager = PlatformMediaSessionManager::create();
+    return *m_sessionManager;
+}
+
 } // namespace WebKit
 
 #endif // ENABLE(GPU_PROCESS)

Modified: trunk/Source/WebKit/GPUProcess/GPUConnectionToWebProcess.h (257038 => 257039)


--- trunk/Source/WebKit/GPUProcess/GPUConnectionToWebProcess.h	2020-02-20 03:48:16 UTC (rev 257038)
+++ trunk/Source/WebKit/GPUProcess/GPUConnectionToWebProcess.h	2020-02-20 03:51:21 UTC (rev 257039)
@@ -36,6 +36,10 @@
 #include <wtf/RefCounted.h>
 #include <wtf/UniqueRef.h>
 
+namespace WebCore {
+class PlatformMediaSessionManager;
+}
+
 namespace WebKit {
 
 class GPUProcess;
@@ -70,6 +74,8 @@
     const String& mediaKeysStorageDirectory() const;
 #endif
 
+    WebCore::PlatformMediaSessionManager& sessionManager();
+
 private:
     GPUConnectionToWebProcess(GPUProcess&, WebCore::ProcessIdentifier, IPC::Connection::Identifier, PAL::SessionID);
 
@@ -111,6 +117,7 @@
 #if PLATFORM(COCOA) && USE(LIBWEBRTC)
     std::unique_ptr<LibWebRTCCodecsProxy> m_libWebRTCCodecsProxy;
 #endif
+    std::unique_ptr<WebCore::PlatformMediaSessionManager> m_sessionManager;
 };
 
 } // namespace WebKit

Modified: trunk/Source/WebKit/GPUProcess/GPUProcess.cpp (257038 => 257039)


--- trunk/Source/WebKit/GPUProcess/GPUProcess.cpp	2020-02-20 03:48:16 UTC (rev 257038)
+++ trunk/Source/WebKit/GPUProcess/GPUProcess.cpp	2020-02-20 03:51:21 UTC (rev 257039)
@@ -151,8 +151,6 @@
 void GPUProcess::setMockCaptureDevicesEnabled(bool isEnabled)
 {
 #if ENABLE(MEDIA_STREAM)
-    // FIXME: Enable the audio session check by implementing an AudioSession for the GPUProcess.
-    MockAudioSharedUnit::singleton().setDisableAudioSessionCheck(isEnabled);
     MockRealtimeMediaSourceCenter::setMockRealtimeMediaSourceCenterEnabled(isEnabled);
 #endif
 }

Modified: trunk/Source/WebKit/UIProcess/Cocoa/UserMediaCaptureManagerProxy.cpp (257038 => 257039)


--- trunk/Source/WebKit/UIProcess/Cocoa/UserMediaCaptureManagerProxy.cpp	2020-02-20 03:48:16 UTC (rev 257038)
+++ trunk/Source/WebKit/UIProcess/Cocoa/UserMediaCaptureManagerProxy.cpp	2020-02-20 03:51:21 UTC (rev 257039)
@@ -35,6 +35,7 @@
 #include "WebProcessProxy.h"
 #include <WebCore/CARingBuffer.h>
 #include <WebCore/MediaConstraints.h>
+#include <WebCore/PlatformMediaSessionManager.h>
 #include <WebCore/RealtimeMediaSourceCenter.h>
 #include <WebCore/RemoteVideoSample.h>
 #include <WebCore/WebAudioBufferList.h>
@@ -43,16 +44,23 @@
 namespace WebKit {
 using namespace WebCore;
 
-class UserMediaCaptureManagerProxy::SourceProxy : public RealtimeMediaSource::Observer, public SharedRingBufferStorage::Client {
+class UserMediaCaptureManagerProxy::SourceProxy
+    : public RealtimeMediaSource::Observer
+    , public SharedRingBufferStorage::Client
+    , public PlatformMediaSession::AudioCaptureSource {
     WTF_MAKE_FAST_ALLOCATED;
 public:
-    SourceProxy(RealtimeMediaSourceIdentifier id, Ref<IPC::Connection>&& connection, Ref<RealtimeMediaSource>&& source)
+    SourceProxy(RealtimeMediaSourceIdentifier id, PlatformMediaSessionManager& sessionManager, Ref<IPC::Connection>&& connection, Ref<RealtimeMediaSource>&& source)
         : m_id(id)
+        , m_sessionManager(makeWeakPtr(sessionManager))
         , m_connection(WTFMove(connection))
         , m_source(WTFMove(source))
         , m_ringBuffer(makeUniqueRef<SharedRingBufferStorage>(makeUniqueRef<SharedRingBufferStorage>(this)))
     {
         m_source->addObserver(*this);
+
+        if (m_source->type() == RealtimeMediaSource::Type::Audio)
+            sessionManager.addAudioCaptureSource(*this);
     }
 
     ~SourceProxy()
@@ -59,6 +67,9 @@
     {
         storage().invalidate();
         m_source->removeObserver(*this);
+
+        if (m_source->type() == RealtimeMediaSource::Type::Audio && m_sessionManager)
+            m_sessionManager->removeAudioCaptureSource(*this);
     }
 
     RealtimeMediaSource& source() { return m_source; }
@@ -66,6 +77,14 @@
     CAAudioStreamDescription& description() { return m_description; }
     int64_t numberOfFrames() { return m_numberOfFrames; }
 
+    bool isCapturingAudio() const final { return !m_isEnded && m_source->isProducingData(); }
+
+    void audioUnitWillStart() final
+    {
+        if (!m_isEnded && m_sessionManager)
+            m_sessionManager->sessionCanProduceAudioChanged();
+    }
+
     void start()
     {
         m_isEnded = false;
@@ -145,6 +164,7 @@
     }
 
     RealtimeMediaSourceIdentifier m_id;
+    WeakPtr<PlatformMediaSessionManager> m_sessionManager;
     Ref<IPC::Connection> m_connection;
     Ref<RealtimeMediaSource> m_source;
     CARingBuffer m_ringBuffer;
@@ -192,7 +212,7 @@
         auto source = sourceOrError.source();
         settings = source->settings();
         ASSERT(!m_proxies.contains(id));
-        m_proxies.add(id, makeUnique<SourceProxy>(id, m_connectionProxy->connection(), WTFMove(source)));
+        m_proxies.add(id, makeUnique<SourceProxy>(id, m_connectionProxy->sessionManager(), m_connectionProxy->connection(), WTFMove(source)));
     } else
         invalidConstraints = WTFMove(sourceOrError.errorMessage);
     completionHandler(succeeded, invalidConstraints, WTFMove(settings));
@@ -248,7 +268,7 @@
     ASSERT(m_proxies.contains(clonedID));
     ASSERT(!m_proxies.contains(newSourceID));
     if (auto* proxy = m_proxies.get(clonedID))
-        m_proxies.add(newSourceID, makeUnique<SourceProxy>(newSourceID, m_connectionProxy->connection(), proxy->source().clone()));
+        m_proxies.add(newSourceID, makeUnique<SourceProxy>(newSourceID, m_connectionProxy->sessionManager(), m_connectionProxy->connection(), proxy->source().clone()));
 }
 
 void UserMediaCaptureManagerProxy::requestToEnd(RealtimeMediaSourceIdentifier sourceID)

Modified: trunk/Source/WebKit/UIProcess/Cocoa/UserMediaCaptureManagerProxy.h (257038 => 257039)


--- trunk/Source/WebKit/UIProcess/Cocoa/UserMediaCaptureManagerProxy.h	2020-02-20 03:48:16 UTC (rev 257038)
+++ trunk/Source/WebKit/UIProcess/Cocoa/UserMediaCaptureManagerProxy.h	2020-02-20 03:51:21 UTC (rev 257039)
@@ -35,6 +35,10 @@
 #include <WebCore/RealtimeMediaSourceIdentifier.h>
 #include <wtf/UniqueRef.h>
 
+namespace WebCore {
+class PlatformMediaSessionManager;
+}
+
 namespace WebKit {
 
 class SharedMemory;
@@ -49,6 +53,7 @@
         virtual void addMessageReceiver(IPC::StringReference, IPC::MessageReceiver&) = 0;
         virtual void removeMessageReceiver(IPC::StringReference) = 0;
         virtual IPC::Connection& connection() = 0;
+        virtual WebCore::PlatformMediaSessionManager& sessionManager() = 0;
     };
     explicit UserMediaCaptureManagerProxy(UniqueRef<ConnectionProxy>&&);
     ~UserMediaCaptureManagerProxy();

Modified: trunk/Source/WebKit/UIProcess/WebProcessProxy.cpp (257038 => 257039)


--- trunk/Source/WebKit/UIProcess/WebProcessProxy.cpp	2020-02-20 03:48:16 UTC (rev 257038)
+++ trunk/Source/WebKit/UIProcess/WebProcessProxy.cpp	2020-02-20 03:51:21 UTC (rev 257039)
@@ -59,6 +59,7 @@
 #include "WebsiteData.h"
 #include "WebsiteDataFetchOption.h"
 #include <WebCore/DiagnosticLoggingKeys.h>
+#include <WebCore/PlatformMediaSessionManager.h>
 #include <WebCore/PrewarmInformation.h>
 #include <WebCore/PublicSuffix.h>
 #include <WebCore/SuddenTermination.h>
@@ -159,6 +160,7 @@
     void addMessageReceiver(IPC::StringReference messageReceiverName, IPC::MessageReceiver& receiver) final { m_process.addMessageReceiver(messageReceiverName, receiver); }
     void removeMessageReceiver(IPC::StringReference messageReceiverName) final { m_process.removeMessageReceiver(messageReceiverName); }
     IPC::Connection& connection() final { return *m_process.connection(); }
+    PlatformMediaSessionManager& sessionManager() final { return PlatformMediaSessionManager::sharedManager(); }
 
     WebProcessProxy& m_process;
 };
_______________________________________________
webkit-changes mailing list
webkit-changes@lists.webkit.org
https://lists.webkit.org/mailman/listinfo/webkit-changes

Reply via email to