Title: [273253] branches/safari-611-branch
Revision
273253
Author
alanc...@apple.com
Date
2021-02-22 09:54:51 -0800 (Mon, 22 Feb 2021)

Log Message

Cherry-pick r271636. rdar://problem/74452635

    Update media state for active speech recognition as it uses audio capture
    https://bugs.webkit.org/show_bug.cgi?id=220667

    Patch by Sihui Liu <sihui_...@appe.com> on 2021-01-19
    Reviewed by Youenn Fablet.

    Source/WebCore:

    To make sure the media capture state is correctly sent to client.

    API test: WebKit2.SpeechRecognitionMediaCaptureStateChange

    * Modules/speech/SpeechRecognition.cpp:
    (WebCore::SpeechRecognition::startRecognition):
    (WebCore::SpeechRecognition::stop):
    (WebCore::SpeechRecognition::didStartCapturingAudio):
    (WebCore::SpeechRecognition::didStopCapturingAudio):
    * Modules/speech/SpeechRecognition.h:
    * Modules/speech/SpeechRecognitionConnection.h:
    * dom/Document.cpp:
    (WebCore::Document::setActiveSpeechRecognition):
    (WebCore::Document::updateIsPlayingMedia):
    * dom/Document.h:
    * page/DummySpeechRecognitionProvider.h:

    Source/WebKit:

    * WebProcess/WebCoreSupport/WebSpeechRecognitionConnection.cpp:
    (WebKit::WebSpeechRecognitionConnection::unregisterClient):
    * WebProcess/WebCoreSupport/WebSpeechRecognitionConnection.h:

    Tools:

    * TestWebKitAPI/Tests/WebKitCocoa/SpeechRecognition.mm:
    (-[SpeechRecognitionUIDelegate _webView:mediaCaptureStateDidChange:]):
    (TestWebKitAPI::TEST):
    (-[SpeechRecognitionPermissionUIDelegate _webView:requestSpeechRecognitionPermissionForOrigin:decisionHandler:]): Deleted.
    (-[SpeechRecognitionPermissionUIDelegate _webView:requestMediaCaptureAuthorization:decisionHandler:]): Deleted.
    (-[SpeechRecognitionPermissionUIDelegate _webView:checkUserMediaPermissionForURL:mainFrameURL:frameIdentifier:decisionHandler:]): Deleted.
    (-[SpeechRecognitionPermissionUIDelegate webView:createWebViewWithConfiguration:forNavigationAction:windowFeatures:]): Deleted.

    git-svn-id: https://svn.webkit.org/repository/webkit/trunk@271636 268f45cc-cd09-0410-ab3c-d52691b4dbfc

Modified Paths

Diff

Modified: branches/safari-611-branch/Source/WebCore/ChangeLog (273252 => 273253)


--- branches/safari-611-branch/Source/WebCore/ChangeLog	2021-02-22 17:54:45 UTC (rev 273252)
+++ branches/safari-611-branch/Source/WebCore/ChangeLog	2021-02-22 17:54:51 UTC (rev 273253)
@@ -1,5 +1,76 @@
 2021-02-17  Ruben Turcios  <rubent...@apple.com>
 
+        Cherry-pick r271636. rdar://problem/74452635
+
+    Update media state for active speech recognition as it uses audio capture
+    https://bugs.webkit.org/show_bug.cgi?id=220667
+    
+    Patch by Sihui Liu <sihui_...@appe.com> on 2021-01-19
+    Reviewed by Youenn Fablet.
+    
+    Source/WebCore:
+    
+    To make sure the media capture state is correctly sent to client.
+    
+    API test: WebKit2.SpeechRecognitionMediaCaptureStateChange
+    
+    * Modules/speech/SpeechRecognition.cpp:
+    (WebCore::SpeechRecognition::startRecognition):
+    (WebCore::SpeechRecognition::stop):
+    (WebCore::SpeechRecognition::didStartCapturingAudio):
+    (WebCore::SpeechRecognition::didStopCapturingAudio):
+    * Modules/speech/SpeechRecognition.h:
+    * Modules/speech/SpeechRecognitionConnection.h:
+    * dom/Document.cpp:
+    (WebCore::Document::setActiveSpeechRecognition):
+    (WebCore::Document::updateIsPlayingMedia):
+    * dom/Document.h:
+    * page/DummySpeechRecognitionProvider.h:
+    
+    Source/WebKit:
+    
+    * WebProcess/WebCoreSupport/WebSpeechRecognitionConnection.cpp:
+    (WebKit::WebSpeechRecognitionConnection::unregisterClient):
+    * WebProcess/WebCoreSupport/WebSpeechRecognitionConnection.h:
+    
+    Tools:
+    
+    * TestWebKitAPI/Tests/WebKitCocoa/SpeechRecognition.mm:
+    (-[SpeechRecognitionUIDelegate _webView:mediaCaptureStateDidChange:]):
+    (TestWebKitAPI::TEST):
+    (-[SpeechRecognitionPermissionUIDelegate _webView:requestSpeechRecognitionPermissionForOrigin:decisionHandler:]): Deleted.
+    (-[SpeechRecognitionPermissionUIDelegate _webView:requestMediaCaptureAuthorization:decisionHandler:]): Deleted.
+    (-[SpeechRecognitionPermissionUIDelegate _webView:checkUserMediaPermissionForURL:mainFrameURL:frameIdentifier:decisionHandler:]): Deleted.
+    (-[SpeechRecognitionPermissionUIDelegate webView:createWebViewWithConfiguration:forNavigationAction:windowFeatures:]): Deleted.
+    
+    git-svn-id: https://svn.webkit.org/repository/webkit/trunk@271636 268f45cc-cd09-0410-ab3c-d52691b4dbfc
+
+    2021-01-19  Sihui Liu  <sihui_...@appe.com>
+
+            Update media state for active speech recognition as it uses audio capture
+            https://bugs.webkit.org/show_bug.cgi?id=220667
+
+            Reviewed by Youenn Fablet.
+
+            To make sure the media capture state is correctly sent to client.
+
+            API test: WebKit2.SpeechRecognitionMediaCaptureStateChange
+
+            * Modules/speech/SpeechRecognition.cpp:
+            (WebCore::SpeechRecognition::startRecognition):
+            (WebCore::SpeechRecognition::stop):
+            (WebCore::SpeechRecognition::didStartCapturingAudio):
+            (WebCore::SpeechRecognition::didStopCapturingAudio):
+            * Modules/speech/SpeechRecognition.h:
+            * Modules/speech/SpeechRecognitionConnection.h:
+            * dom/Document.cpp:
+            (WebCore::Document::setActiveSpeechRecognition):
+            (WebCore::Document::updateIsPlayingMedia):
+            * dom/Document.h:
+            * page/DummySpeechRecognitionProvider.h:
+
+2021-02-17  Ruben Turcios  <rubent...@apple.com>
+
         Cherry-pick r272490. rdar://problem/74409784
 
     Add support for aria-sort change notifications.

Modified: branches/safari-611-branch/Source/WebCore/Modules/speech/SpeechRecognition.cpp (273252 => 273253)


--- branches/safari-611-branch/Source/WebCore/Modules/speech/SpeechRecognition.cpp	2021-02-22 17:54:45 UTC (rev 273252)
+++ branches/safari-611-branch/Source/WebCore/Modules/speech/SpeechRecognition.cpp	2021-02-22 17:54:51 UTC (rev 273253)
@@ -99,6 +99,15 @@
     return "SpeechRecognition";
 }
 
+void SpeechRecognition::stop()
+{
+    abortRecognition();
+    m_connection->unregisterClient(*this);
+
+    auto& document = downcast<Document>(*scriptExecutionContext());
+    document.setActiveSpeechRecognition(nullptr);
+}
+
 void SpeechRecognition::didStart()
 {
     if (m_state == State::Starting)
@@ -109,6 +118,9 @@
 
 void SpeechRecognition::didStartCapturingAudio()
 {
+    auto& document = downcast<Document>(*scriptExecutionContext());
+    document.setActiveSpeechRecognition(this);
+
     queueTaskToDispatchEvent(*this, TaskSource::Speech, Event::create(eventNames().audiostartEvent, Event::CanBubble::No, Event::IsCancelable::No));
 }
 
@@ -134,6 +146,9 @@
 
 void SpeechRecognition::didStopCapturingAudio()
 {
+    auto& document = downcast<Document>(*scriptExecutionContext());
+    document.setActiveSpeechRecognition(nullptr);
+
     queueTaskToDispatchEvent(*this, TaskSource::Speech, Event::create(eventNames().audioendEvent, Event::CanBubble::No, Event::IsCancelable::No));
 }
 

Modified: branches/safari-611-branch/Source/WebCore/Modules/speech/SpeechRecognition.h (273252 => 273253)


--- branches/safari-611-branch/Source/WebCore/Modules/speech/SpeechRecognition.h	2021-02-22 17:54:45 UTC (rev 273252)
+++ branches/safari-611-branch/Source/WebCore/Modules/speech/SpeechRecognition.h	2021-02-22 17:54:51 UTC (rev 273253)
@@ -85,8 +85,9 @@
     void didEnd() final;
 
     // ActiveDOMObject
-    const char* activeDOMObjectName() const;
-    void suspend(ReasonForSuspension);
+    const char* activeDOMObjectName() const final;
+    void suspend(ReasonForSuspension) final;
+    void stop() final;
 
     // EventTarget
     ScriptExecutionContext* scriptExecutionContext() const final { return ActiveDOMObject::scriptExecutionContext(); }

Modified: branches/safari-611-branch/Source/WebCore/Modules/speech/SpeechRecognitionConnection.h (273252 => 273253)


--- branches/safari-611-branch/Source/WebCore/Modules/speech/SpeechRecognitionConnection.h	2021-02-22 17:54:45 UTC (rev 273252)
+++ branches/safari-611-branch/Source/WebCore/Modules/speech/SpeechRecognitionConnection.h	2021-02-22 17:54:51 UTC (rev 273253)
@@ -37,6 +37,7 @@
 public:
     virtual ~SpeechRecognitionConnection() { }
     virtual void registerClient(SpeechRecognitionConnectionClient&) = 0;
+    virtual void unregisterClient(SpeechRecognitionConnectionClient&) = 0;
     virtual void start(SpeechRecognitionConnectionClientIdentifier, const String& lang, bool continuous, bool interimResults, uint64_t maxAlternatives, ClientOrigin&&) = 0;
     virtual void stop(SpeechRecognitionConnectionClientIdentifier) = 0;
     virtual void abort(SpeechRecognitionConnectionClientIdentifier) = 0;

Modified: branches/safari-611-branch/Source/WebCore/dom/Document.cpp (273252 => 273253)


--- branches/safari-611-branch/Source/WebCore/dom/Document.cpp	2021-02-22 17:54:45 UTC (rev 273252)
+++ branches/safari-611-branch/Source/WebCore/dom/Document.cpp	2021-02-22 17:54:51 UTC (rev 273253)
@@ -209,6 +209,7 @@
 #include "Settings.h"
 #include "ShadowRoot.h"
 #include "SocketProvider.h"
+#include "SpeechRecognition.h"
 #include "StorageEvent.h"
 #include "StringCallback.h"
 #include "StyleAdjuster.h"
@@ -4251,6 +4252,15 @@
     updateIsPlayingMedia();
 }
 
+void Document::setActiveSpeechRecognition(SpeechRecognition* speechRecognition)
+{
+    if (m_activeSpeechRecognition == speechRecognition)
+        return;
+
+    m_activeSpeechRecognition = makeWeakPtr(speechRecognition);
+    updateIsPlayingMedia();
+}
+
 void Document::noteUserInteractionWithMediaElement()
 {
     if (m_userHasInteractedWithMediaElement)
@@ -4272,6 +4282,8 @@
 
 #if ENABLE(MEDIA_STREAM)
     state |= MediaStreamTrack::captureState(*this);
+    if (m_activeSpeechRecognition)
+        state |= MediaProducer::HasActiveAudioCaptureDevice;
 #endif
 
     if (m_userHasInteractedWithMediaElement)

Modified: branches/safari-611-branch/Source/WebCore/dom/Document.h (273252 => 273253)


--- branches/safari-611-branch/Source/WebCore/dom/Document.h	2021-02-22 17:54:45 UTC (rev 273252)
+++ branches/safari-611-branch/Source/WebCore/dom/Document.h	2021-02-22 17:54:51 UTC (rev 273253)
@@ -208,6 +208,7 @@
 class SelectorQueryCache;
 class SerializedScriptValue;
 class Settings;
+class SpeechRecognition;
 class StringCallback;
 class StyleSheet;
 class StyleSheetContents;
@@ -1383,6 +1384,7 @@
 
     WEBCORE_EXPORT void addAudioProducer(MediaProducer&);
     WEBCORE_EXPORT void removeAudioProducer(MediaProducer&);
+    void setActiveSpeechRecognition(SpeechRecognition*);
     MediaProducer::MediaStateFlags mediaState() const { return m_mediaState; }
     void noteUserInteractionWithMediaElement();
     bool isCapturing() const { return MediaProducer::isCapturing(m_mediaState); }
@@ -1975,6 +1977,7 @@
     Ref<CSSFontSelector> m_fontSelector;
 
     WeakHashSet<MediaProducer> m_audioProducers;
+    WeakPtr<SpeechRecognition> m_activeSpeechRecognition;
 
     HashSet<ShadowRoot*> m_inDocumentShadowRoots;
 

Modified: branches/safari-611-branch/Source/WebCore/page/DummySpeechRecognitionProvider.h (273252 => 273253)


--- branches/safari-611-branch/Source/WebCore/page/DummySpeechRecognitionProvider.h	2021-02-22 17:54:45 UTC (rev 273252)
+++ branches/safari-611-branch/Source/WebCore/page/DummySpeechRecognitionProvider.h	2021-02-22 17:54:51 UTC (rev 273253)
@@ -39,6 +39,7 @@
             return adoptRef(*new DummySpeechRecognitionConnection());
         }
         void registerClient(SpeechRecognitionConnectionClient&) final { }
+        void unregisterClient(SpeechRecognitionConnectionClient&) final { }
         void start(SpeechRecognitionConnectionClientIdentifier, const String&, bool, bool, uint64_t, ClientOrigin&&) final { }
         void stop(SpeechRecognitionConnectionClientIdentifier) final { }
         void abort(SpeechRecognitionConnectionClientIdentifier) final { }

Modified: branches/safari-611-branch/Source/WebKit/ChangeLog (273252 => 273253)


--- branches/safari-611-branch/Source/WebKit/ChangeLog	2021-02-22 17:54:45 UTC (rev 273252)
+++ branches/safari-611-branch/Source/WebKit/ChangeLog	2021-02-22 17:54:51 UTC (rev 273253)
@@ -1,5 +1,63 @@
 2021-02-17  Ruben Turcios  <rubent...@apple.com>
 
+        Cherry-pick r271636. rdar://problem/74452635
+
+    Update media state for active speech recognition as it uses audio capture
+    https://bugs.webkit.org/show_bug.cgi?id=220667
+    
+    Patch by Sihui Liu <sihui_...@appe.com> on 2021-01-19
+    Reviewed by Youenn Fablet.
+    
+    Source/WebCore:
+    
+    To make sure the media capture state is correctly sent to client.
+    
+    API test: WebKit2.SpeechRecognitionMediaCaptureStateChange
+    
+    * Modules/speech/SpeechRecognition.cpp:
+    (WebCore::SpeechRecognition::startRecognition):
+    (WebCore::SpeechRecognition::stop):
+    (WebCore::SpeechRecognition::didStartCapturingAudio):
+    (WebCore::SpeechRecognition::didStopCapturingAudio):
+    * Modules/speech/SpeechRecognition.h:
+    * Modules/speech/SpeechRecognitionConnection.h:
+    * dom/Document.cpp:
+    (WebCore::Document::setActiveSpeechRecognition):
+    (WebCore::Document::updateIsPlayingMedia):
+    * dom/Document.h:
+    * page/DummySpeechRecognitionProvider.h:
+    
+    Source/WebKit:
+    
+    * WebProcess/WebCoreSupport/WebSpeechRecognitionConnection.cpp:
+    (WebKit::WebSpeechRecognitionConnection::unregisterClient):
+    * WebProcess/WebCoreSupport/WebSpeechRecognitionConnection.h:
+    
+    Tools:
+    
+    * TestWebKitAPI/Tests/WebKitCocoa/SpeechRecognition.mm:
+    (-[SpeechRecognitionUIDelegate _webView:mediaCaptureStateDidChange:]):
+    (TestWebKitAPI::TEST):
+    (-[SpeechRecognitionPermissionUIDelegate _webView:requestSpeechRecognitionPermissionForOrigin:decisionHandler:]): Deleted.
+    (-[SpeechRecognitionPermissionUIDelegate _webView:requestMediaCaptureAuthorization:decisionHandler:]): Deleted.
+    (-[SpeechRecognitionPermissionUIDelegate _webView:checkUserMediaPermissionForURL:mainFrameURL:frameIdentifier:decisionHandler:]): Deleted.
+    (-[SpeechRecognitionPermissionUIDelegate webView:createWebViewWithConfiguration:forNavigationAction:windowFeatures:]): Deleted.
+    
+    git-svn-id: https://svn.webkit.org/repository/webkit/trunk@271636 268f45cc-cd09-0410-ab3c-d52691b4dbfc
+
+    2021-01-19  Sihui Liu  <sihui_...@appe.com>
+
+            Update media state for active speech recognition as it uses audio capture
+            https://bugs.webkit.org/show_bug.cgi?id=220667
+
+            Reviewed by Youenn Fablet.
+
+            * WebProcess/WebCoreSupport/WebSpeechRecognitionConnection.cpp:
+            (WebKit::WebSpeechRecognitionConnection::unregisterClient):
+            * WebProcess/WebCoreSupport/WebSpeechRecognitionConnection.h:
+
+2021-02-17  Ruben Turcios  <rubent...@apple.com>
+
         Cherry-pick r271381. rdar://problem/74451875
 
     Make SpeechRecognition permission error more informative

Modified: branches/safari-611-branch/Source/WebKit/WebProcess/WebCoreSupport/WebSpeechRecognitionConnection.cpp (273252 => 273253)


--- branches/safari-611-branch/Source/WebKit/WebProcess/WebCoreSupport/WebSpeechRecognitionConnection.cpp	2021-02-22 17:54:45 UTC (rev 273252)
+++ branches/safari-611-branch/Source/WebKit/WebProcess/WebCoreSupport/WebSpeechRecognitionConnection.cpp	2021-02-22 17:54:51 UTC (rev 273253)
@@ -63,6 +63,11 @@
     m_clientMap.add(client.identifier(), makeWeakPtr(client));
 }
 
+void WebSpeechRecognitionConnection::unregisterClient(WebCore::SpeechRecognitionConnectionClient& client)
+{
+    m_clientMap.remove(client.identifier());
+}
+
 void WebSpeechRecognitionConnection::start(WebCore::SpeechRecognitionConnectionClientIdentifier clientIdentifier, const String& lang, bool continuous, bool interimResults, uint64_t maxAlternatives, WebCore::ClientOrigin&& clientOrigin)
 {
     send(Messages::SpeechRecognitionServer::Start(clientIdentifier, lang, continuous, interimResults, maxAlternatives, WTFMove(clientOrigin)));

Modified: branches/safari-611-branch/Source/WebKit/WebProcess/WebCoreSupport/WebSpeechRecognitionConnection.h (273252 => 273253)


--- branches/safari-611-branch/Source/WebKit/WebProcess/WebCoreSupport/WebSpeechRecognitionConnection.h	2021-02-22 17:54:45 UTC (rev 273252)
+++ branches/safari-611-branch/Source/WebKit/WebProcess/WebCoreSupport/WebSpeechRecognitionConnection.h	2021-02-22 17:54:51 UTC (rev 273253)
@@ -56,6 +56,7 @@
     ~WebSpeechRecognitionConnection();
 
     void registerClient(WebCore::SpeechRecognitionConnectionClient&) final;
+    void unregisterClient(WebCore::SpeechRecognitionConnectionClient&) final;
     void didReceiveUpdate(WebCore::SpeechRecognitionUpdate&&) final;
     void invalidate(WebCore::SpeechRecognitionConnectionClientIdentifier);
 

Modified: branches/safari-611-branch/Tools/ChangeLog (273252 => 273253)


--- branches/safari-611-branch/Tools/ChangeLog	2021-02-22 17:54:45 UTC (rev 273252)
+++ branches/safari-611-branch/Tools/ChangeLog	2021-02-22 17:54:51 UTC (rev 273253)
@@ -1,5 +1,67 @@
 2021-02-17  Ruben Turcios  <rubent...@apple.com>
 
+        Cherry-pick r271636. rdar://problem/74452635
+
+    Update media state for active speech recognition as it uses audio capture
+    https://bugs.webkit.org/show_bug.cgi?id=220667
+    
+    Patch by Sihui Liu <sihui_...@appe.com> on 2021-01-19
+    Reviewed by Youenn Fablet.
+    
+    Source/WebCore:
+    
+    To make sure the media capture state is correctly sent to client.
+    
+    API test: WebKit2.SpeechRecognitionMediaCaptureStateChange
+    
+    * Modules/speech/SpeechRecognition.cpp:
+    (WebCore::SpeechRecognition::startRecognition):
+    (WebCore::SpeechRecognition::stop):
+    (WebCore::SpeechRecognition::didStartCapturingAudio):
+    (WebCore::SpeechRecognition::didStopCapturingAudio):
+    * Modules/speech/SpeechRecognition.h:
+    * Modules/speech/SpeechRecognitionConnection.h:
+    * dom/Document.cpp:
+    (WebCore::Document::setActiveSpeechRecognition):
+    (WebCore::Document::updateIsPlayingMedia):
+    * dom/Document.h:
+    * page/DummySpeechRecognitionProvider.h:
+    
+    Source/WebKit:
+    
+    * WebProcess/WebCoreSupport/WebSpeechRecognitionConnection.cpp:
+    (WebKit::WebSpeechRecognitionConnection::unregisterClient):
+    * WebProcess/WebCoreSupport/WebSpeechRecognitionConnection.h:
+    
+    Tools:
+    
+    * TestWebKitAPI/Tests/WebKitCocoa/SpeechRecognition.mm:
+    (-[SpeechRecognitionUIDelegate _webView:mediaCaptureStateDidChange:]):
+    (TestWebKitAPI::TEST):
+    (-[SpeechRecognitionPermissionUIDelegate _webView:requestSpeechRecognitionPermissionForOrigin:decisionHandler:]): Deleted.
+    (-[SpeechRecognitionPermissionUIDelegate _webView:requestMediaCaptureAuthorization:decisionHandler:]): Deleted.
+    (-[SpeechRecognitionPermissionUIDelegate _webView:checkUserMediaPermissionForURL:mainFrameURL:frameIdentifier:decisionHandler:]): Deleted.
+    (-[SpeechRecognitionPermissionUIDelegate webView:createWebViewWithConfiguration:forNavigationAction:windowFeatures:]): Deleted.
+    
+    git-svn-id: https://svn.webkit.org/repository/webkit/trunk@271636 268f45cc-cd09-0410-ab3c-d52691b4dbfc
+
+    2021-01-19  Sihui Liu  <sihui_...@appe.com>
+
+            Update media state for active speech recognition as it uses audio capture
+            https://bugs.webkit.org/show_bug.cgi?id=220667
+
+            Reviewed by Youenn Fablet.
+
+            * TestWebKitAPI/Tests/WebKitCocoa/SpeechRecognition.mm:
+            (-[SpeechRecognitionUIDelegate _webView:mediaCaptureStateDidChange:]):
+            (TestWebKitAPI::TEST):
+            (-[SpeechRecognitionPermissionUIDelegate _webView:requestSpeechRecognitionPermissionForOrigin:decisionHandler:]): Deleted.
+            (-[SpeechRecognitionPermissionUIDelegate _webView:requestMediaCaptureAuthorization:decisionHandler:]): Deleted.
+            (-[SpeechRecognitionPermissionUIDelegate _webView:checkUserMediaPermissionForURL:mainFrameURL:frameIdentifier:decisionHandler:]): Deleted.
+            (-[SpeechRecognitionPermissionUIDelegate webView:createWebViewWithConfiguration:forNavigationAction:windowFeatures:]): Deleted.
+
+2021-02-17  Ruben Turcios  <rubent...@apple.com>
+
         Cherry-pick r272490. rdar://problem/74409784
 
     Add support for aria-sort change notifications.

Modified: branches/safari-611-branch/Tools/TestWebKitAPI/Tests/WebKitCocoa/SpeechRecognition.mm (273252 => 273253)


--- branches/safari-611-branch/Tools/TestWebKitAPI/Tests/WebKitCocoa/SpeechRecognition.mm	2021-02-22 17:54:45 UTC (rev 273252)
+++ branches/safari-611-branch/Tools/TestWebKitAPI/Tests/WebKitCocoa/SpeechRecognition.mm	2021-02-22 17:54:51 UTC (rev 273253)
@@ -38,17 +38,20 @@
 static bool permissionRequested = false;
 static bool receivedScriptMessage;
 static bool didFinishNavigation;
+static bool captureStateDidChange;
+static bool isCapturing;
 static RetainPtr<WKScriptMessage> lastScriptMessage;
 static RetainPtr<WKWebView> createdWebView;
 
-@interface SpeechRecognitionPermissionUIDelegate : NSObject<WKUIDelegatePrivate>
+@interface SpeechRecognitionUIDelegate : NSObject<WKUIDelegatePrivate>
 - (void)_webView:(WKWebView *)webView requestSpeechRecognitionPermissionForOrigin:(WKSecurityOrigin *)origin decisionHandler:(void (^)(BOOL))decisionHandler;
 - (void)_webView:(WKWebView *)webView requestMediaCaptureAuthorization: (_WKCaptureDevices)devices decisionHandler:(void (^)(BOOL))decisionHandler;
 - (void)_webView:(WKWebView *)webView checkUserMediaPermissionForURL:(NSURL *)url mainFrameURL:(NSURL *)mainFrameURL frameIdentifier:(NSUInteger)frameIdentifier decisionHandler:(void (^)(NSString *salt, BOOL authorized))decisionHandler;
 - (WKWebView *)webView:(WKWebView *)webView createWebViewWithConfiguration:(WKWebViewConfiguration *)configuration forNavigationAction:(WKNavigationAction *)navigationAction windowFeatures:(WKWindowFeatures *)windowFeatures;
+- (void)_webView:(WKWebView *)webView mediaCaptureStateDidChange:(_WKMediaCaptureState)state;
 @end
 
-@implementation SpeechRecognitionPermissionUIDelegate
+@implementation SpeechRecognitionUIDelegate
 - (void)_webView:(WKWebView *)webView requestSpeechRecognitionPermissionForOrigin:(WKSecurityOrigin *)origin decisionHandler:(void (^)(BOOL))decisionHandler
 {
     permissionRequested = true;
@@ -70,6 +73,12 @@
     createdWebView = adoptNS([[WKWebView alloc] initWithFrame:CGRectMake(0, 0, 800, 600) configuration:configuration]);
     return createdWebView.get();
 }
+
+- (void)_webView:(WKWebView *)webView mediaCaptureStateDidChange:(_WKMediaCaptureState)state
+{
+    isCapturing = state == _WKMediaCaptureStateActiveMicrophone;
+    captureStateDidChange = true;
+}
 @end
 
 @interface SpeechRecognitionMessageHandler : NSObject <WKScriptMessageHandler>
@@ -111,7 +120,7 @@
     preferences._mockCaptureDevicesEnabled = YES;
     preferences._speechRecognitionEnabled = YES;
     auto webView = adoptNS([[TestWKWebView alloc] initWithFrame:CGRectMake(0, 0, 800, 600) configuration:configuration.get()]);
-    auto delegate = adoptNS([[SpeechRecognitionPermissionUIDelegate alloc] init]);
+    auto delegate = adoptNS([[SpeechRecognitionUIDelegate alloc] init]);
     [webView setUIDelegate:delegate.get()];
 
     shouldGrantPermissionRequest = false;
@@ -154,7 +163,7 @@
     preferences._mockCaptureDevicesEnabled = YES;
     preferences._speechRecognitionEnabled = YES;
     preferences._mediaCaptureRequiresSecureConnection = NO;
-    auto delegate = adoptNS([[SpeechRecognitionPermissionUIDelegate alloc] init]);
+    auto delegate = adoptNS([[SpeechRecognitionUIDelegate alloc] init]);
     auto firstWebView = adoptNS([[TestWKWebView alloc] initWithFrame:CGRectMake(0, 0, 100, 100) configuration:configuration.get()]);
     [firstWebView setUIDelegate:delegate.get()];
     auto secondWebView = adoptNS([[TestWKWebView alloc] initWithFrame:CGRectMake(100, 0, 100, 100) configuration:configuration.get()]);
@@ -206,7 +215,7 @@
     preferences._mockCaptureDevicesEnabled = YES;
     preferences._speechRecognitionEnabled = YES;
     auto webView = adoptNS([[TestWKWebView alloc] initWithFrame:CGRectMake(0, 0, 800, 600) configuration:configuration.get()]);
-    auto delegate = adoptNS([[SpeechRecognitionPermissionUIDelegate alloc] init]);
+    auto delegate = adoptNS([[SpeechRecognitionUIDelegate alloc] init]);
     [webView setUIDelegate:delegate.get()];
 
     // Page is visible.
@@ -241,7 +250,7 @@
     preferences._mockCaptureDevicesEnabled = YES;
     preferences._speechRecognitionEnabled = YES;
     preferences._javascript_CanOpenWindowsAutomatically = YES;
-    auto delegate = adoptNS([[SpeechRecognitionPermissionUIDelegate alloc] init]);
+    auto delegate = adoptNS([[SpeechRecognitionUIDelegate alloc] init]);
     auto navigationDelegate = adoptNS([[SpeechRecognitionNavigationDelegate alloc] init]);
     shouldGrantPermissionRequest = true;
     createdWebView = nullptr;
@@ -268,6 +277,31 @@
     EXPECT_TRUE(!!createdWebView);
 }
 
+TEST(WebKit2, SpeechRecognitionMediaCaptureStateChange)
+{
+    auto configuration = adoptNS([[WKWebViewConfiguration alloc] init]);
+    auto handler = adoptNS([[SpeechRecognitionMessageHandler alloc] init]);
+    [[configuration userContentController] addScriptMessageHandler:handler.get() name:@"testHandler"];
+    auto preferences = [configuration preferences];
+    preferences._mockCaptureDevicesEnabled = YES;
+    preferences._speechRecognitionEnabled = YES;
+    auto delegate = adoptNS([[SpeechRecognitionUIDelegate alloc] init]);
+    auto webView = adoptNS([[TestWKWebView alloc] initWithFrame:CGRectMake(0, 0, 800, 600) configuration:configuration.get()]);
+    [webView setUIDelegate:delegate.get()];
+    shouldGrantPermissionRequest = true;
+
+    captureStateDidChange = false;
+    [webView synchronouslyLoadTestPageNamed:@"speechrecognition-basic"];
+    [webView stringByEvaluatingJavaScript:@"start()"];
+    TestWebKitAPI::Util::run(&captureStateDidChange);
+    EXPECT_TRUE(isCapturing);
+
+    captureStateDidChange = false;
+    [webView stringByEvaluatingJavaScript:@"stop()"];
+    TestWebKitAPI::Util::run(&captureStateDidChange);
+    EXPECT_FALSE(isCapturing);
+}
+
 #endif
 
 } // namespace TestWebKitAPI
_______________________________________________
webkit-changes mailing list
webkit-changes@lists.webkit.org
https://lists.webkit.org/mailman/listinfo/webkit-changes

Reply via email to