Diff
Modified: trunk/LayoutTests/ChangeLog (286258 => 286259)
--- trunk/LayoutTests/ChangeLog 2021-11-29 20:37:40 UTC (rev 286258)
+++ trunk/LayoutTests/ChangeLog 2021-11-29 20:48:59 UTC (rev 286259)
@@ -1,3 +1,17 @@
+2021-11-29 Chris Fleizach <cfleiz...@apple.com>
+
+ AX: Unify speech synthesizer platform usage for Mac/iOS
+ https://bugs.webkit.org/show_bug.cgi?id=231895
+ <rdar://problem/84372479>
+
+ Reviewed by Andres Gonzalez.
+
+ * fast/speechsynthesis/speech-synthesis-speak-empty-string-expected.txt:
+ * platform/mac-catalina-wk1/TestExpectations: Added.
+ * platform/mac-wk1/TestExpectations:
+ * platform/mac-wk2/TestExpectations:
+ * platform/mac/TestExpectations:
+
2021-11-29 Myles C. Maxfield <mmaxfi...@apple.com>
[Cocoa] REGRESSION(r281291): Text Style fonts don't have the correct weight set
Modified: trunk/LayoutTests/fast/speechsynthesis/speech-synthesis-speak-empty-string-expected.txt (286258 => 286259)
--- trunk/LayoutTests/fast/speechsynthesis/speech-synthesis-speak-empty-string-expected.txt 2021-11-29 20:37:40 UTC (rev 286258)
+++ trunk/LayoutTests/fast/speechsynthesis/speech-synthesis-speak-empty-string-expected.txt 2021-11-29 20:48:59 UTC (rev 286259)
@@ -5,6 +5,8 @@
PASS speechSynthesis.speaking is false
PASS event.elapsedTime > 0 is true
+PASS speechSynthesis.speaking is true
+PASS event.elapsedTime > 0 is true
PASS speechSynthesis.speaking is false
PASS successfullyParsed is true
Modified: trunk/LayoutTests/imported/w3c/ChangeLog (286258 => 286259)
--- trunk/LayoutTests/imported/w3c/ChangeLog 2021-11-29 20:37:40 UTC (rev 286258)
+++ trunk/LayoutTests/imported/w3c/ChangeLog 2021-11-29 20:48:59 UTC (rev 286259)
@@ -1,3 +1,15 @@
+2021-11-29 Chris Fleizach <cfleiz...@apple.com>
+
+ AX: Unify speech synthesizer platform usage for Mac/iOS
+ https://bugs.webkit.org/show_bug.cgi?id=231895
+ <rdar://problem/84372479>
+
+ Reviewed by Andres Gonzalez.
+
+ Update expectations now that the test passes.
+
+ * web-platform-tests/speech-api/SpeechSynthesis-speak-events-expected.txt:
+
2021-11-29 Antti Koivisto <an...@apple.com>
[:has() pseudo-class] id invalidation support
Modified: trunk/LayoutTests/imported/w3c/web-platform-tests/speech-api/SpeechSynthesis-speak-events-expected.txt (286258 => 286259)
--- trunk/LayoutTests/imported/w3c/web-platform-tests/speech-api/SpeechSynthesis-speak-events-expected.txt 2021-11-29 20:37:40 UTC (rev 286258)
+++ trunk/LayoutTests/imported/w3c/web-platform-tests/speech-api/SpeechSynthesis-speak-events-expected.txt 2021-11-29 20:48:59 UTC (rev 286259)
@@ -1,4 +1,4 @@
-FAIL speechSynthesis.speak() fires start and end events with empty utterance assert_true: Not expecting event, but got end event expected true got false
+PASS speechSynthesis.speak() fires start and end events with empty utterance
PASS speechSynthesis.speak() fires start and end events
Modified: trunk/LayoutTests/platform/mac/TestExpectations (286258 => 286259)
--- trunk/LayoutTests/platform/mac/TestExpectations 2021-11-29 20:37:40 UTC (rev 286258)
+++ trunk/LayoutTests/platform/mac/TestExpectations 2021-11-29 20:48:59 UTC (rev 286259)
@@ -2031,10 +2031,6 @@
webkit.org/b/217620 inspector/audit/basic-async.html [ Pass Timeout ]
-imported/w3c/web-platform-tests/speech-api/SpeechSynthesis-speak-twice.html [ Skip ]
-imported/w3c/web-platform-tests/speech-api/SpeechSynthesis-speak-without-activation-fails.tentative.html [ Skip ]
-webkit.org/b/227501 [ Debug ] imported/w3c/web-platform-tests/speech-api/SpeechSynthesis-speak-events.html [ Skip ]
-
webkit.org/b/217669 http/wpt/service-workers/service-worker-spinning-message.https.html [ Pass Failure ]
webkit.org/b/217994 imported/w3c/web-platform-tests/webaudio/the-audio-api/the-audioworklet-interface/audioworkletnode-output-channel-count.https.html [ Pass Failure ]
@@ -2301,8 +2297,6 @@
#rdar://82146367 ([Mac, iOS Release] imported/w3c/web-platform-tests/worklets/layout-worklet-csp.https.html is a flaky failure) imported/w3c/web-platform-tests/worklets/layout-worklet-csp.https.html [ Pass Failure ]
-webkit.org/b/228396 fast/speechsynthesis/speech-synthesis-speak-empty-string.html [ Pass Failure ]
-
webkit.org/b/228176 [ Mojave Catalina BigSur ] fast/text/variable-system-font.html [ ImageOnlyFailure ]
webkit.org/b/228176 [ Monterey ] fast/text/variable-system-font.html [ Pass ]
Added: trunk/LayoutTests/platform/mac-catalina-wk1/TestExpectations (0 => 286259)
--- trunk/LayoutTests/platform/mac-catalina-wk1/TestExpectations (rev 0)
+++ trunk/LayoutTests/platform/mac-catalina-wk1/TestExpectations 2021-11-29 20:48:59 UTC (rev 286259)
@@ -0,0 +1,5 @@
+# This file should contain entries for expectations that are specific
+# to the Apple Mac Catalina port running WebKit1 (DumpRenderTree)
+
+imported/w3c/web-platform-tests/speech-api/SpeechSynthesis-speak-events.html [ Skip ]
+imported/w3c/web-platform-tests/speech-api/SpeechSynthesis-pause-resume.tentative.html [ Skip ]
Modified: trunk/LayoutTests/platform/mac-wk1/TestExpectations (286258 => 286259)
--- trunk/LayoutTests/platform/mac-wk1/TestExpectations 2021-11-29 20:37:40 UTC (rev 286258)
+++ trunk/LayoutTests/platform/mac-wk1/TestExpectations 2021-11-29 20:48:59 UTC (rev 286259)
@@ -72,7 +72,6 @@
fast/speechrecognition/start-recognition-after-gum.html [ Skip ]
fast/speechrecognition/start-recognition-after-denied-gum.html [ Skip ]
-imported/w3c/web-platform-tests/speech-api/SpeechSynthesis-pause-resume.tentative.html [ Skip ]
# Datalist is unsupported in WK1
accessibility/datalist.html [ WontFix ]
Modified: trunk/LayoutTests/platform/mac-wk2/TestExpectations (286258 => 286259)
--- trunk/LayoutTests/platform/mac-wk2/TestExpectations 2021-11-29 20:37:40 UTC (rev 286258)
+++ trunk/LayoutTests/platform/mac-wk2/TestExpectations 2021-11-29 20:48:59 UTC (rev 286259)
@@ -1208,9 +1208,6 @@
# <rdar://problem/64700658> imported/w3c/web-platform-tests/css/css-font-loading/idlharness.https.html is a flaky timeout and flaky failure
[ BigSur+ ] imported/w3c/web-platform-tests/css/css-font-loading/idlharness.https.html [ Pass Failure Timeout ]
-# rdar://66701815 (REGRESSION (20A2321a-20A2348b): fast/speechsynthesis/speech-synthesis-speak-empty-string.html is a flaky failure)
-[ BigSur+ ] fast/speechsynthesis/speech-synthesis-speak-empty-string.html [ Pass Failure ]
-
# rdar://66703773 (REGRESSION (20A2316-20A2348b): http/tests/security/contentSecurityPolicy/plugin-blocked-in-about-blank-window.html is a constant failure)
[ arm64 ] http/tests/security/contentSecurityPolicy/plugin-blocked-in-about-blank-window.html [ Skip ]
Modified: trunk/Source/WTF/ChangeLog (286258 => 286259)
--- trunk/Source/WTF/ChangeLog 2021-11-29 20:37:40 UTC (rev 286258)
+++ trunk/Source/WTF/ChangeLog 2021-11-29 20:48:59 UTC (rev 286259)
@@ -1,3 +1,13 @@
+2021-11-29 Chris Fleizach <cfleiz...@apple.com>
+
+ AX: Unify speech synthesizer platform usage for Mac/iOS
+ https://bugs.webkit.org/show_bug.cgi?id=231895
+ <rdar://problem/84372479>
+
+ Reviewed by Andres Gonzalez.
+
+ * wtf/PlatformHave.h:
+
2021-11-29 Yusuke Suzuki <ysuz...@apple.com>
[JSC] Public Class Field initialization is slow
Modified: trunk/Source/WTF/wtf/PlatformHave.h (286258 => 286259)
--- trunk/Source/WTF/wtf/PlatformHave.h 2021-11-29 20:37:40 UTC (rev 286258)
+++ trunk/Source/WTF/wtf/PlatformHave.h 2021-11-29 20:48:59 UTC (rev 286259)
@@ -757,7 +757,7 @@
#endif
#endif
-#if (PLATFORM(IOS) || PLATFORM(MACCATALYST)) && __IPHONE_OS_VERSION_MIN_REQUIRED >= 140300
+#if ((PLATFORM(IOS) || PLATFORM(MACCATALYST)) && __IPHONE_OS_VERSION_MAX_ALLOWED >= 140300) || (PLATFORM(MAC) && __MAC_OS_X_VERSION_MIN_REQUIRED > 120000)
#if !defined(HAVE_AVSPEECHSYNTHESIS_SYSTEMVOICE)
#define HAVE_AVSPEECHSYNTHESIS_SYSTEMVOICE 1
#endif
@@ -769,12 +769,6 @@
#endif
#endif
-#if PLATFORM(MAC) && __MAC_OS_X_VERSION_MIN_REQUIRED >= 120000
-#if !defined(HAVE_SPEECHSYNTHESIS_MONTEREY_SPI)
-#define HAVE_SPEECHSYNTHESIS_MONTEREY_SPI 1
-#endif
-#endif
-
#if COMPILER(GCC_COMPATIBLE) && defined(__has_attribute)
#if __has_attribute(objc_direct) && (!PLATFORM(MAC) || __MAC_OS_X_VERSION_MIN_REQUIRED >= 120000)
#if !defined(HAVE_NS_DIRECT_SUPPORT)
@@ -1110,3 +1104,7 @@
#if PLATFORM(MAC)
#define HAVE_SCENEKIT 1
#endif
+
+#if (PLATFORM(MAC) && __MAC_OS_X_VERSION_MIN_REQUIRED >= 130000)
+#define HAVE_UNIFIED_SPEECHSYNTHESIS_FIX_FOR_81465164 1
+#endif
Modified: trunk/Source/WebCore/ChangeLog (286258 => 286259)
--- trunk/Source/WebCore/ChangeLog 2021-11-29 20:37:40 UTC (rev 286258)
+++ trunk/Source/WebCore/ChangeLog 2021-11-29 20:48:59 UTC (rev 286259)
@@ -1,3 +1,28 @@
+2021-11-29 Chris Fleizach <cfleiz...@apple.com>
+
+ AX: Unify speech synthesizer platform usage for Mac/iOS
+ https://bugs.webkit.org/show_bug.cgi?id=231895
+ <rdar://problem/84372479>
+
+ Reviewed by Andres Gonzalez.
+
+ AVSpeechSynthesizer has been fully supported on macOS for a number of years. This allows us to unify platform usage.
+ Fix a number of tests that had been marked failing/flaky related to speech synthesis.
+
+ * Modules/speech/SpeechSynthesis.cpp:
+ (WebCore::SpeechSynthesis::SpeechSynthesis):
+ (WebCore::SpeechSynthesis::startSpeakingImmediately):
+ (WebCore::SpeechSynthesis::speak):
+ * SourcesCocoa.txt:
+ * WebCore.xcodeproj/project.pbxproj:
+ * page/SpeechSynthesisClient.h:
+ * platform/PlatformSpeechSynthesizer.h:
+ * platform/graphics/cocoa/FontCacheCoreText.cpp:
+ (WebCore::variationAxes):
+ * platform/ios/PlatformSpeechSynthesizerIOS.mm: Removed.
+ * platform/mac/PlatformSpeechSynthesizerMac.mm: Removed.
+ * Source/WebCore/platform/cocoa/PlatformSpeechSynthesizerCocoa.mm: Added.
+
2021-11-29 Myles C. Maxfield <mmaxfi...@apple.com>
[Cocoa] REGRESSION(r281291): Text Style fonts don't have the correct weight set
Modified: trunk/Source/WebCore/Modules/speech/SpeechSynthesis.cpp (286258 => 286259)
--- trunk/Source/WebCore/Modules/speech/SpeechSynthesis.cpp 2021-11-29 20:37:40 UTC (rev 286258)
+++ trunk/Source/WebCore/Modules/speech/SpeechSynthesis.cpp 2021-11-29 20:48:59 UTC (rev 286259)
@@ -59,8 +59,10 @@
UNUSED_PARAM(document);
#endif
- if (m_speechSynthesisClient)
+ if (m_speechSynthesisClient) {
m_speechSynthesisClient->setObserver(*this);
+ m_speechSynthesisClient->resetState();
+ }
}
void SpeechSynthesis::setPlatformSynthesizer(std::unique_ptr<PlatformSpeechSynthesizer> synthesizer)
@@ -122,12 +124,6 @@
m_currentSpeechUtterance = &utterance;
m_isPaused = false;
- // Zero lengthed strings should immediately notify that the event is complete.
- if (utterance.text().isEmpty()) {
- handleSpeakingCompleted(utterance, false);
- return;
- }
-
if (m_speechSynthesisClient)
m_speechSynthesisClient->speak(utterance.platformUtterance());
else
@@ -145,7 +141,6 @@
#endif
m_utteranceQueue.append(utterance);
-
// If the queue was empty, speak this immediately and add it to the queue.
if (m_utteranceQueue.size() == 1)
startSpeakingImmediately(m_utteranceQueue.first());
Modified: trunk/Source/WebCore/PAL/ChangeLog (286258 => 286259)
--- trunk/Source/WebCore/PAL/ChangeLog 2021-11-29 20:37:40 UTC (rev 286258)
+++ trunk/Source/WebCore/PAL/ChangeLog 2021-11-29 20:48:59 UTC (rev 286259)
@@ -1,3 +1,17 @@
+2021-11-29 Chris Fleizach <cfleiz...@apple.com>
+
+ AX: Unify speech synthesizer platform usage for Mac/iOS
+ https://bugs.webkit.org/show_bug.cgi?id=231895
+ <rdar://problem/84372479>
+
+ Reviewed by Andres Gonzalez.
+
+ * PAL.xcodeproj/project.pbxproj:
+ * pal/cocoa/AVFoundationSoftLink.h:
+ * pal/cocoa/AVFoundationSoftLink.mm:
+ * pal/spi/cocoa/AXSpeechManagerSPI.h:
+ * pal/spi/mac/SpeechSynthesisSPI.h: Removed.
+
2021-11-29 Myles C. Maxfield <mmaxfi...@apple.com>
[WebGPU] Provide default values for descriptor struct members
Modified: trunk/Source/WebCore/PAL/PAL.xcodeproj/project.pbxproj (286258 => 286259)
--- trunk/Source/WebCore/PAL/PAL.xcodeproj/project.pbxproj 2021-11-29 20:37:40 UTC (rev 286258)
+++ trunk/Source/WebCore/PAL/PAL.xcodeproj/project.pbxproj 2021-11-29 20:48:59 UTC (rev 286259)
@@ -323,7 +323,6 @@
A10826F91F576292004772AC /* WebPanel.h in Headers */ = {isa = PBXBuildFile; fileRef = A10826F71F576292004772AC /* WebPanel.h */; };
A10826FA1F576292004772AC /* WebPanel.mm in Sources */ = {isa = PBXBuildFile; fileRef = A10826F81F576292004772AC /* WebPanel.mm */; };
A10826FE1F58A433004772AC /* NSGraphicsSPI.h in Headers */ = {isa = PBXBuildFile; fileRef = A10826FD1F58A433004772AC /* NSGraphicsSPI.h */; };
- A1175B491F6AFF8E00C4B9F0 /* SpeechSynthesisSPI.h in Headers */ = {isa = PBXBuildFile; fileRef = A1175B481F6AFF8E00C4B9F0 /* SpeechSynthesisSPI.h */; };
A1175B4B1F6B2D7E00C4B9F0 /* NSCellSPI.h in Headers */ = {isa = PBXBuildFile; fileRef = A1175B4A1F6B2D7E00C4B9F0 /* NSCellSPI.h */; };
A1175B4E1F6B337300C4B9F0 /* PopupMenu.h in Headers */ = {isa = PBXBuildFile; fileRef = A1175B4C1F6B337300C4B9F0 /* PopupMenu.h */; };
A1175B4F1F6B337300C4B9F0 /* PopupMenu.mm in Sources */ = {isa = PBXBuildFile; fileRef = A1175B4D1F6B337300C4B9F0 /* PopupMenu.mm */; };
@@ -728,7 +727,6 @@
A10826F71F576292004772AC /* WebPanel.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = WebPanel.h; sourceTree = "<group>"; };
A10826F81F576292004772AC /* WebPanel.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = WebPanel.mm; sourceTree = "<group>"; };
A10826FD1F58A433004772AC /* NSGraphicsSPI.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = NSGraphicsSPI.h; sourceTree = "<group>"; };
- A1175B481F6AFF8E00C4B9F0 /* SpeechSynthesisSPI.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = SpeechSynthesisSPI.h; sourceTree = "<group>"; };
A1175B4A1F6B2D7E00C4B9F0 /* NSCellSPI.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = NSCellSPI.h; sourceTree = "<group>"; };
A1175B4C1F6B337300C4B9F0 /* PopupMenu.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = PopupMenu.h; sourceTree = "<group>"; };
A1175B4D1F6B337300C4B9F0 /* PopupMenu.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = PopupMenu.mm; sourceTree = "<group>"; };
@@ -978,7 +976,6 @@
0C7785841F45130F00F4EBB6 /* NSWindowSPI.h */,
0C7785851F45130F00F4EBB6 /* PIPSPI.h */,
0C7785871F45130F00F4EBB6 /* QuickLookMacSPI.h */,
- A1175B481F6AFF8E00C4B9F0 /* SpeechSynthesisSPI.h */,
71B1141F26823ACD004D6701 /* SystemPreviewSPI.h */,
0C7785881F45130F00F4EBB6 /* TelephonyUtilitiesSPI.h */,
);
@@ -1549,7 +1546,6 @@
A3788E981F05B6CE00679425 /* Sound.h in Headers */,
93B38EBE25821CB600198E63 /* SpeechSoftLink.h in Headers */,
93B38EC225821D2200198E63 /* SpeechSPI.h in Headers */,
- A1175B491F6AFF8E00C4B9F0 /* SpeechSynthesisSPI.h in Headers */,
0C5AF9211F43A4C7002EAC02 /* SQLite3SPI.h in Headers */,
71B1142026823ACD004D6701 /* SystemPreviewSPI.h in Headers */,
31308B1420A21705003FB929 /* SystemPreviewSPI.h in Headers */,
Modified: trunk/Source/WebCore/PAL/pal/cocoa/AVFoundationSoftLink.h (286258 => 286259)
--- trunk/Source/WebCore/PAL/pal/cocoa/AVFoundationSoftLink.h 2021-11-29 20:37:40 UTC (rev 286258)
+++ trunk/Source/WebCore/PAL/pal/cocoa/AVFoundationSoftLink.h 2021-11-29 20:48:59 UTC (rev 286259)
@@ -76,6 +76,9 @@
#if PLATFORM(IOS_FAMILY)
SOFT_LINK_CLASS_FOR_HEADER(PAL, AVAudioSession)
SOFT_LINK_CLASS_FOR_HEADER(PAL, AVPersistableContentKeyRequest)
+#endif
+
+#if PLATFORM(COCOA)
SOFT_LINK_CLASS_FOR_HEADER(PAL, AVSpeechSynthesisVoice)
SOFT_LINK_CLASS_FOR_HEADER(PAL, AVSpeechSynthesizer)
SOFT_LINK_CLASS_FOR_HEADER(PAL, AVSpeechUtterance)
Modified: trunk/Source/WebCore/PAL/pal/cocoa/AVFoundationSoftLink.mm (286258 => 286259)
--- trunk/Source/WebCore/PAL/pal/cocoa/AVFoundationSoftLink.mm 2021-11-29 20:37:40 UTC (rev 286258)
+++ trunk/Source/WebCore/PAL/pal/cocoa/AVFoundationSoftLink.mm 2021-11-29 20:48:59 UTC (rev 286259)
@@ -102,6 +102,9 @@
#if PLATFORM(IOS_FAMILY)
SOFT_LINK_CLASS_FOR_SOURCE_WITH_EXPORT(PAL, AVFoundation, AVAudioSession, PAL_EXPORT)
SOFT_LINK_CLASS_FOR_SOURCE_WITH_EXPORT(PAL, AVFoundation, AVPersistableContentKeyRequest, PAL_EXPORT)
+#endif
+
+#if PLATFORM(COCOA)
SOFT_LINK_CLASS_FOR_SOURCE_WITH_EXPORT(PAL, AVFoundation, AVSpeechSynthesisVoice, PAL_EXPORT)
SOFT_LINK_CLASS_FOR_SOURCE_WITH_EXPORT(PAL, AVFoundation, AVSpeechSynthesizer, PAL_EXPORT)
SOFT_LINK_CLASS_FOR_SOURCE_WITH_EXPORT(PAL, AVFoundation, AVSpeechUtterance, PAL_EXPORT)
Modified: trunk/Source/WebCore/PAL/pal/spi/cocoa/AXSpeechManagerSPI.h (286258 => 286259)
--- trunk/Source/WebCore/PAL/pal/spi/cocoa/AXSpeechManagerSPI.h 2021-11-29 20:37:40 UTC (rev 286258)
+++ trunk/Source/WebCore/PAL/pal/spi/cocoa/AXSpeechManagerSPI.h 2021-11-29 20:48:59 UTC (rev 286259)
@@ -25,7 +25,7 @@
#pragma once
-#if PLATFORM(IOS_FAMILY)
+#if PLATFORM(COCOA)
// FIXME: Undo when isSystemVoice is available in all SDKs.
#if USE(APPLE_INTERNAL_SDK) && 0
@@ -42,5 +42,5 @@
#endif // USE(APPLE_INTERNAL_SDK)
-#endif // PLATFORM(IOS_FAMILY)
+#endif // PLATFORM(COCOA)
Deleted: trunk/Source/WebCore/PAL/pal/spi/mac/SpeechSynthesisSPI.h (286258 => 286259)
--- trunk/Source/WebCore/PAL/pal/spi/mac/SpeechSynthesisSPI.h 2021-11-29 20:37:40 UTC (rev 286258)
+++ trunk/Source/WebCore/PAL/pal/spi/mac/SpeechSynthesisSPI.h 2021-11-29 20:48:59 UTC (rev 286259)
@@ -1,43 +0,0 @@
-/*
- * Copyright (C) 2017-2021 Apple Inc. All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions
- * are met:
- * 1. Redistributions of source code must retain the above copyright
- * notice, this list of conditions and the following disclaimer.
- * 2. Redistributions in binary form must reproduce the above copyright
- * notice, this list of conditions and the following disclaimer in the
- * documentation and/or other materials provided with the distribution.
- *
- * THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS''
- * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
- * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
- * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS
- * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
- * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
- * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
- * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
- * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
- * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
- * THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-#pragma once
-
-#if USE(APPLE_INTERNAL_SDK)
-
-#include <ApplicationServices/ApplicationServicesPriv.h>
-
-#endif
-
-WTF_EXTERN_C_BEGIN
-
-CFArrayRef CopySpeechSynthesisVoicesForMode(CFTypeRef mode);
-#if HAVE(SPEECHSYNTHESIS_MONTEREY_SPI)
-CFStringRef CopyIdentifierStringForPreferredVoiceInListWithLocale(CFArrayRef voices, CFLocaleRef);
-#else
-CFStringRef GetIdentifierStringForPreferredVoiceInListWithLocale(CFArrayRef voices, CFLocaleRef);
-#endif
-
-WTF_EXTERN_C_END
Modified: trunk/Source/WebCore/SourcesCocoa.txt (286258 => 286259)
--- trunk/Source/WebCore/SourcesCocoa.txt 2021-11-29 20:37:40 UTC (rev 286258)
+++ trunk/Source/WebCore/SourcesCocoa.txt 2021-11-29 20:48:59 UTC (rev 286259)
@@ -259,6 +259,7 @@
platform/cocoa/PasteboardCocoa.mm
platform/cocoa/PasteboardCustomDataCocoa.mm
platform/cocoa/PlatformPasteboardCocoa.mm
+platform/cocoa/PlatformSpeechSynthesizerCocoa.mm @no-unify
platform/cocoa/PlaybackSessionModelMediaElement.mm
platform/cocoa/PowerSourceNotifier.mm
platform/cocoa/RemoteCommandListenerCocoa.mm
@@ -444,7 +445,6 @@
platform/ios/PlatformEventFactoryIOS.mm @no-unify
platform/ios/PlatformPasteboardIOS.mm
platform/ios/PlatformScreenIOS.mm
-platform/ios/PlatformSpeechSynthesizerIOS.mm @no-unify
platform/ios/PlaybackSessionInterfaceAVKit.mm @no-unify
platform/ios/PreviewConverterIOS.mm
platform/ios/QuickLook.mm
@@ -491,7 +491,6 @@
platform/mac/PlatformEventFactoryMac.mm
platform/mac/PlatformPasteboardMac.mm
platform/mac/PlatformScreenMac.mm
-platform/mac/PlatformSpeechSynthesizerMac.mm
platform/mac/PlaybackSessionInterfaceMac.mm @no-unify
platform/mac/PluginBlocklist.mm
platform/mac/PowerObserverMac.cpp
Modified: trunk/Source/WebCore/WebCore.xcodeproj/project.pbxproj (286258 => 286259)
--- trunk/Source/WebCore/WebCore.xcodeproj/project.pbxproj 2021-11-29 20:37:40 UTC (rev 286258)
+++ trunk/Source/WebCore/WebCore.xcodeproj/project.pbxproj 2021-11-29 20:48:59 UTC (rev 286259)
@@ -3705,7 +3705,7 @@
A9D248010D757E6900FDF959 /* JSDOMPluginArray.h in Headers */ = {isa = PBXBuildFile; fileRef = A9D247FD0D757E6900FDF959 /* JSDOMPluginArray.h */; };
A9D248070D757E7D00FDF959 /* JSDOMMimeType.h in Headers */ = {isa = PBXBuildFile; fileRef = A9D248030D757E7D00FDF959 /* JSDOMMimeType.h */; };
A9D248090D757E7D00FDF959 /* JSDOMMimeTypeArray.h in Headers */ = {isa = PBXBuildFile; fileRef = A9D248050D757E7D00FDF959 /* JSDOMMimeTypeArray.h */; };
- AA12DF491743DF83004DAFDF /* PlatformSpeechSynthesizerIOS.mm in Sources */ = {isa = PBXBuildFile; fileRef = AAE3755D17429BCC006200C2 /* PlatformSpeechSynthesizerIOS.mm */; };
+ AA12DF491743DF83004DAFDF /* PlatformSpeechSynthesizerCocoa.mm in Sources */ = {isa = PBXBuildFile; fileRef = AAE3755D17429BCC006200C2 /* PlatformSpeechSynthesizerCocoa.mm */; };
AA21ECCD0ABF0FC6002B834C /* CSSCursorImageValue.h in Headers */ = {isa = PBXBuildFile; fileRef = AA0978EE0ABAA6E100874480 /* CSSCursorImageValue.h */; settings = {ATTRIBUTES = (Private, ); }; };
AA2A5ACE16A485FD00975A25 /* SpeechSynthesisVoice.h in Headers */ = {isa = PBXBuildFile; fileRef = AA2A5AC716A485D500975A25 /* SpeechSynthesisVoice.h */; };
AA2A5AD016A4860400975A25 /* SpeechSynthesisUtterance.h in Headers */ = {isa = PBXBuildFile; fileRef = AA2A5AC516A485D500975A25 /* SpeechSynthesisUtterance.h */; };
@@ -7846,7 +7846,6 @@
29D7BCF91444AF7D0070619C /* AccessibilitySpinButton.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = AccessibilitySpinButton.h; sourceTree = "<group>"; };
29E04A27BED2F81F98E9022B /* JSBeforeUnloadEvent.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = JSBeforeUnloadEvent.h; sourceTree = "<group>"; };
29E4D8DF16B0940F00C84704 /* PlatformSpeechSynthesizer.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = PlatformSpeechSynthesizer.h; sourceTree = "<group>"; };
- 29E4D8E016B0959800C84704 /* PlatformSpeechSynthesizerMac.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; path = PlatformSpeechSynthesizerMac.mm; sourceTree = "<group>"; };
29FAF4B5195AB08900A522DC /* TextUndoInsertionMarkupMac.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = TextUndoInsertionMarkupMac.h; sourceTree = "<group>"; };
2A4107A026CB66ED003BF797 /* CSSKeywordValue.cpp */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.cpp; path = CSSKeywordValue.cpp; sourceTree = "<group>"; };
2A4107A226CB66ED003BF797 /* CSSKeywordValue.idl */ = {isa = PBXFileReference; lastKnownFileType = text; path = CSSKeywordValue.idl; sourceTree = "<group>"; };
@@ -14249,7 +14248,7 @@
AADEFE4325AF4FCB0040DD67 /* FocusOptions.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = FocusOptions.h; sourceTree = "<group>"; };
AAE27B7416CBFC0D00623043 /* PlatformSpeechSynthesizerMock.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = PlatformSpeechSynthesizerMock.cpp; sourceTree = "<group>"; };
AAE27B7516CBFC0D00623043 /* PlatformSpeechSynthesizerMock.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = PlatformSpeechSynthesizerMock.h; sourceTree = "<group>"; };
- AAE3755D17429BCC006200C2 /* PlatformSpeechSynthesizerIOS.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; path = PlatformSpeechSynthesizerIOS.mm; sourceTree = "<group>"; };
+ AAE3755D17429BCC006200C2 /* PlatformSpeechSynthesizerCocoa.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; path = PlatformSpeechSynthesizerCocoa.mm; sourceTree = "<group>"; };
AAF5B7B11524B4BD0004CB49 /* WebSocketFrame.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = WebSocketFrame.cpp; sourceTree = "<group>"; };
AB23A32509BBA7D00067CC53 /* BeforeTextInsertedEvent.cpp */ = {isa = PBXFileReference; fileEncoding = 30; lastKnownFileType = sourcecode.cpp.cpp; path = BeforeTextInsertedEvent.cpp; sourceTree = "<group>"; };
AB23A32609BBA7D00067CC53 /* BeforeTextInsertedEvent.h */ = {isa = PBXFileReference; fileEncoding = 30; lastKnownFileType = sourcecode.c.h; path = BeforeTextInsertedEvent.h; sourceTree = "<group>"; };
@@ -23595,7 +23594,6 @@
BCAA486D14A052530088FAC4 /* PlatformEventFactoryMac.mm */,
C5F765BA14E1ECF4006C899B /* PlatformPasteboardMac.mm */,
BC94D1070C274F88006BC617 /* PlatformScreenMac.mm */,
- 29E4D8E016B0959800C84704 /* PlatformSpeechSynthesizerMac.mm */,
CDA29A151CBDA56C00901CCF /* PlaybackSessionInterfaceMac.h */,
CDA29A141CBDA56C00901CCF /* PlaybackSessionInterfaceMac.mm */,
1AFFC44F1D5E7EC700267A66 /* PluginBlocklist.h */,
@@ -26333,7 +26331,6 @@
26601EBE14B3B9AD0012C0FE /* PlatformEventFactoryIOS.mm */,
C5278B0B17F212EA003A2998 /* PlatformPasteboardIOS.mm */,
E45390320EAFD637003695C8 /* PlatformScreenIOS.mm */,
- AAE3755D17429BCC006200C2 /* PlatformSpeechSynthesizerIOS.mm */,
CDA29A2E1CBF73FC00901CCF /* PlaybackSessionInterfaceAVKit.h */,
CDA29A2F1CBF73FC00901CCF /* PlaybackSessionInterfaceAVKit.mm */,
A1C150771E3F2B3E0032C98C /* PreviewConverterIOS.mm */,
@@ -26492,6 +26489,7 @@
9BED2CAF1F7CC06200666018 /* PasteboardCocoa.mm */,
F4FB35002350C96200F0094A /* PasteboardCustomDataCocoa.mm */,
F4628A9E234D3BBF00BC884C /* PlatformPasteboardCocoa.mm */,
+ AAE3755D17429BCC006200C2 /* PlatformSpeechSynthesizerCocoa.mm */,
52B0D4BD1C57FD1E0077CE53 /* PlatformView.h */,
95BA4FAB26D981AA002A0E62 /* PlatformViewController.h */,
CDA29A0A1CBD9A7400901CCF /* PlaybackSessionModel.h */,
@@ -38223,7 +38221,7 @@
1A569D1A0D7E2B82007C3983 /* objc_utility.mm in Sources */,
E16982601134636A00894115 /* ObjCRuntimeObject.mm in Sources */,
CEA284662141E84900E407E8 /* PlatformEventFactoryIOS.mm in Sources */,
- AA12DF491743DF83004DAFDF /* PlatformSpeechSynthesizerIOS.mm in Sources */,
+ AA12DF491743DF83004DAFDF /* PlatformSpeechSynthesizerCocoa.mm in Sources */,
CDA29A301CBF74D400901CCF /* PlaybackSessionInterfaceAVKit.mm in Sources */,
CDA29A161CBDA56C00901CCF /* PlaybackSessionInterfaceMac.mm in Sources */,
419242492127B93E00634FCF /* RealtimeOutgoingVideoSourceCocoa.mm in Sources */,
Modified: trunk/Source/WebCore/page/SpeechSynthesisClient.h (286258 => 286259)
--- trunk/Source/WebCore/page/SpeechSynthesisClient.h 2021-11-29 20:37:40 UTC (rev 286258)
+++ trunk/Source/WebCore/page/SpeechSynthesisClient.h 2021-11-29 20:48:59 UTC (rev 286259)
@@ -47,6 +47,7 @@
virtual void cancel() = 0;
virtual void pause() = 0;
virtual void resume() = 0;
+ virtual void resetState() = 0;
};
Modified: trunk/Source/WebCore/platform/PlatformSpeechSynthesizer.h (286258 => 286259)
--- trunk/Source/WebCore/platform/PlatformSpeechSynthesizer.h 2021-11-29 20:37:40 UTC (rev 286258)
+++ trunk/Source/WebCore/platform/PlatformSpeechSynthesizer.h 2021-11-29 20:48:59 UTC (rev 286259)
@@ -72,8 +72,8 @@
virtual void pause();
virtual void resume();
virtual void cancel();
+ virtual void resetState();
- void resetState();
PlatformSpeechSynthesizerClient* client() const { return m_speechSynthesizerClient; }
protected:
Copied: trunk/Source/WebCore/platform/cocoa/PlatformSpeechSynthesizerCocoa.mm (from rev 286258, trunk/Source/WebCore/platform/ios/PlatformSpeechSynthesizerIOS.mm) (0 => 286259)
--- trunk/Source/WebCore/platform/cocoa/PlatformSpeechSynthesizerCocoa.mm (rev 0)
+++ trunk/Source/WebCore/platform/cocoa/PlatformSpeechSynthesizerCocoa.mm 2021-11-29 20:48:59 UTC (rev 286259)
@@ -0,0 +1,334 @@
+/*
+ * Copyright (C) 2013 Apple Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS'' AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+ * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+ * DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS BE LIABLE FOR ANY
+ * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+ * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+ * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+ * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#import "config.h"
+#import "PlatformSpeechSynthesizer.h"
+
+#if ENABLE(SPEECH_SYNTHESIS) && PLATFORM(COCOA)
+
+#import "PlatformSpeechSynthesisUtterance.h"
+#import "PlatformSpeechSynthesisVoice.h"
+
+#if __has_include(<AVFAudio/AVSpeechSynthesis.h>)
+#import <AVFAudio/AVSpeechSynthesis.h>
+#else
+#import <AVFoundation/AVFoundation.h>
+#endif
+
+#import <pal/spi/cocoa/AXSpeechManagerSPI.h>
+#import <wtf/BlockObjCExceptions.h>
+#import <wtf/RetainPtr.h>
+
+#import <pal/cocoa/AVFoundationSoftLink.h>
+
+static float getAVSpeechUtteranceDefaultSpeechRate()
+{
+ static float value;
+ static void* symbol;
+ if (!symbol) {
+ void* symbol = dlsym(PAL::AVFoundationLibrary(), "AVSpeechUtteranceDefaultSpeechRate");
+ RELEASE_ASSERT_WITH_MESSAGE(symbol, "%s", dlerror());
+ value = *static_cast<float const *>(symbol);
+ }
+ return value;
+}
+
+static float getAVSpeechUtteranceMaximumSpeechRate()
+{
+ static float value;
+ static void* symbol;
+ if (!symbol) {
+ void* symbol = dlsym(PAL::AVFoundationLibrary(), "AVSpeechUtteranceMaximumSpeechRate");
+ RELEASE_ASSERT_WITH_MESSAGE(symbol, "%s", dlerror());
+ value = *static_cast<float const *>(symbol);
+ }
+ return value;
+}
+
+#define AVSpeechUtteranceDefaultSpeechRate getAVSpeechUtteranceDefaultSpeechRate()
+#define AVSpeechUtteranceMaximumSpeechRate getAVSpeechUtteranceMaximumSpeechRate()
+
+@interface WebSpeechSynthesisWrapper : NSObject<AVSpeechSynthesizerDelegate> {
+ WebCore::PlatformSpeechSynthesizer* m_synthesizerObject;
+ // Hold a Ref to the utterance so that it won't disappear until the synth is done with it.
+ RefPtr<WebCore::PlatformSpeechSynthesisUtterance> m_utterance;
+
+ RetainPtr<AVSpeechSynthesizer> m_synthesizer;
+}
+
+- (WebSpeechSynthesisWrapper *)initWithSpeechSynthesizer:(WebCore::PlatformSpeechSynthesizer*)synthesizer;
+- (void)speakUtterance:(RefPtr<WebCore::PlatformSpeechSynthesisUtterance>&&)utterance;
+
+@end
+
+@implementation WebSpeechSynthesisWrapper
+
+- (WebSpeechSynthesisWrapper *)initWithSpeechSynthesizer:(WebCore::PlatformSpeechSynthesizer*)synthesizer
+{
+ if (!(self = [super init]))
+ return nil;
+
+ m_synthesizerObject = synthesizer;
+ return self;
+}
+
+- (float)mapSpeechRateToPlatformRate:(float)rate
+{
+ // WebSpeech says to go from .1 -> 10 (default 1)
+ // AVSpeechSynthesizer asks for 0 -> 1 (default. 5)
+ if (rate < 1)
+ rate *= AVSpeechUtteranceDefaultSpeechRate;
+ else
+ rate = AVSpeechUtteranceDefaultSpeechRate + ((rate - 1) * (AVSpeechUtteranceMaximumSpeechRate - AVSpeechUtteranceDefaultSpeechRate));
+
+ return rate;
+}
+
+- (void)speakUtterance:(RefPtr<WebCore::PlatformSpeechSynthesisUtterance>&&)utterance
+{
+ // When speak is called we should not have an existing speech utterance outstanding.
+ ASSERT(!m_utterance);
+ ASSERT(utterance);
+ if (!utterance)
+ return;
+
+ BEGIN_BLOCK_OBJC_EXCEPTIONS
+ if (!m_synthesizer) {
+ m_synthesizer = adoptNS([PAL::allocAVSpeechSynthesizerInstance() init]);
+ [m_synthesizer setDelegate:self];
+ }
+
+ // Choose the best voice, by first looking at the utterance voice, then the utterance language,
+ // then choose the default language.
+ WebCore::PlatformSpeechSynthesisVoice* utteranceVoice = utterance->voice();
+ NSString *voiceLanguage = nil;
+ if (!utteranceVoice) {
+ if (utterance->lang().isEmpty())
+ voiceLanguage = [PAL::getAVSpeechSynthesisVoiceClass() currentLanguageCode];
+ else
+ voiceLanguage = utterance->lang();
+ } else
+ voiceLanguage = utterance->voice()->lang();
+
+ AVSpeechSynthesisVoice *avVoice = nil;
+ if (voiceLanguage)
+ avVoice = [PAL::getAVSpeechSynthesisVoiceClass() voiceWithLanguage:voiceLanguage];
+
+ AVSpeechUtterance *avUtterance = [PAL::getAVSpeechUtteranceClass() speechUtteranceWithString:utterance->text()];
+
+ [avUtterance setRate:[self mapSpeechRateToPlatformRate:utterance->rate()]];
+ [avUtterance setVolume:utterance->volume()];
+ [avUtterance setPitchMultiplier:utterance->pitch()];
+ [avUtterance setVoice:avVoice];
+ m_utterance = WTFMove(utterance);
+
+ // macOS won't send a did start speaking callback for empty strings.
+#if !HAVE(UNIFIED_SPEECHSYNTHESIS_FIX_FOR_81465164)
+ if (!m_utterance->text().length())
+ m_synthesizerObject->client()->didStartSpeaking(*m_utterance);
+#endif
+
+ [m_synthesizer speakUtterance:avUtterance];
+ END_BLOCK_OBJC_EXCEPTIONS
+}
+
+- (void)pause
+{
+ if (!m_utterance)
+ return;
+
+ BEGIN_BLOCK_OBJC_EXCEPTIONS
+ [m_synthesizer pauseSpeakingAtBoundary:AVSpeechBoundaryImmediate];
+ END_BLOCK_OBJC_EXCEPTIONS
+}
+
+- (void)resume
+{
+ if (!m_utterance)
+ return;
+
+ BEGIN_BLOCK_OBJC_EXCEPTIONS
+ [m_synthesizer continueSpeaking];
+ END_BLOCK_OBJC_EXCEPTIONS
+}
+
+- (void)resetState
+{
+ // On a reset, cancel utterance and set to nil immediately so the next speech job continues without waiting for a callback
+ [self cancel];
+ m_utterance = nil;
+}
+
+- (void)cancel
+{
+ if (!m_utterance)
+ return;
+
+ BEGIN_BLOCK_OBJC_EXCEPTIONS
+ [m_synthesizer stopSpeakingAtBoundary:AVSpeechBoundaryImmediate];
+ END_BLOCK_OBJC_EXCEPTIONS
+}
+
+- (void)speechSynthesizer:(AVSpeechSynthesizer *)synthesizer didStartSpeechUtterance:(AVSpeechUtterance *)utterance
+{
+ UNUSED_PARAM(synthesizer);
+ UNUSED_PARAM(utterance);
+ if (!m_utterance)
+ return;
+
+ m_synthesizerObject->client()->didStartSpeaking(*m_utterance);
+}
+
+- (void)speechSynthesizer:(AVSpeechSynthesizer *)synthesizer didFinishSpeechUtterance:(AVSpeechUtterance *)utterance
+{
+ UNUSED_PARAM(synthesizer);
+ UNUSED_PARAM(utterance);
+ if (!m_utterance)
+ return;
+
+ // Clear the m_utterance variable in case finish speaking kicks off a new speaking job immediately.
+ RefPtr<WebCore::PlatformSpeechSynthesisUtterance> protectedUtterance = m_utterance;
+ m_utterance = nullptr;
+
+ m_synthesizerObject->client()->didFinishSpeaking(*protectedUtterance);
+}
+
+- (void)speechSynthesizer:(AVSpeechSynthesizer *)synthesizer didPauseSpeechUtterance:(AVSpeechUtterance *)utterance
+{
+ UNUSED_PARAM(synthesizer);
+ UNUSED_PARAM(utterance);
+ if (!m_utterance)
+ return;
+
+ m_synthesizerObject->client()->didPauseSpeaking(*m_utterance);
+}
+
+- (void)speechSynthesizer:(AVSpeechSynthesizer *)synthesizer didContinueSpeechUtterance:(AVSpeechUtterance *)utterance
+{
+ UNUSED_PARAM(synthesizer);
+ UNUSED_PARAM(utterance);
+ if (!m_utterance)
+ return;
+
+ m_synthesizerObject->client()->didResumeSpeaking(*m_utterance);
+}
+
+- (void)speechSynthesizer:(AVSpeechSynthesizer *)synthesizer didCancelSpeechUtterance:(AVSpeechUtterance *)utterance
+{
+ UNUSED_PARAM(synthesizer);
+ UNUSED_PARAM(utterance);
+ if (!m_utterance)
+ return;
+
+ // Clear the m_utterance variable in case finish speaking kicks off a new speaking job immediately.
+ RefPtr<WebCore::PlatformSpeechSynthesisUtterance> protectedUtterance = m_utterance;
+ m_utterance = nullptr;
+
+ m_synthesizerObject->client()->didFinishSpeaking(*protectedUtterance);
+}
+
+- (void)speechSynthesizer:(AVSpeechSynthesizer *)synthesizer willSpeakRangeOfSpeechString:(NSRange)characterRange utterance:(AVSpeechUtterance *)utterance
+{
+ UNUSED_PARAM(synthesizer);
+ UNUSED_PARAM(utterance);
+
+ if (!m_utterance)
+ return;
+
+ // AVSpeechSynthesizer only supports word boundaries.
+ m_synthesizerObject->client()->boundaryEventOccurred(*m_utterance, WebCore::SpeechBoundary::SpeechWordBoundary, characterRange.location);
+}
+
+@end
+
+namespace WebCore {
+
+PlatformSpeechSynthesizer::PlatformSpeechSynthesizer(PlatformSpeechSynthesizerClient* client)
+ : m_speechSynthesizerClient(client)
+{
+}
+
+PlatformSpeechSynthesizer::~PlatformSpeechSynthesizer()
+{
+}
+
+void PlatformSpeechSynthesizer::initializeVoiceList()
+{
+ BEGIN_BLOCK_OBJC_EXCEPTIONS
+ for (AVSpeechSynthesisVoice *voice in [PAL::getAVSpeechSynthesisVoiceClass() speechVoices]) {
+ NSString *language = [voice language];
+ bool isDefault = true;
+ NSString *voiceURI = [voice identifier];
+ NSString *name = [voice name];
+
+ // Only show built-in voices when requesting through WebKit to reduce fingerprinting surface area.
+#if HAVE(AVSPEECHSYNTHESIS_SYSTEMVOICE)
+ // FIXME: Remove respondsToSelector check when is available on all SDKs.
+ BOOL includeVoice = NO;
+ if ([voice respondsToSelector:@selector(isSystemVoice)])
+ includeVoice = voice.isSystemVoice;
+ else
+ includeVoice = voice.quality == AVSpeechSynthesisVoiceQualityDefault;
+ if (includeVoice)
+#else
+ // AVSpeechSynthesis on macOS does not support quality property correctly.
+ if (voice.quality == AVSpeechSynthesisVoiceQualityDefault
+ || (TARGET_OS_OSX && ![voiceURI hasSuffix:@"premium"]))
+#endif
+ m_voiceList.append(PlatformSpeechSynthesisVoice::create(voiceURI, name, language, true, isDefault));
+ }
+ END_BLOCK_OBJC_EXCEPTIONS
+}
+
+void PlatformSpeechSynthesizer::pause()
+{
+ [m_platformSpeechWrapper pause];
+}
+
+void PlatformSpeechSynthesizer::resume()
+{
+ [m_platformSpeechWrapper resume];
+}
+
+void PlatformSpeechSynthesizer::speak(RefPtr<PlatformSpeechSynthesisUtterance>&& utterance)
+{
+ if (!m_platformSpeechWrapper)
+ m_platformSpeechWrapper = adoptNS([[WebSpeechSynthesisWrapper alloc] initWithSpeechSynthesizer:this]);
+
+ [m_platformSpeechWrapper speakUtterance:utterance.get()];
+}
+
+void PlatformSpeechSynthesizer::cancel()
+{
+ [m_platformSpeechWrapper cancel];
+}
+
+void PlatformSpeechSynthesizer::resetState()
+{
+ [m_platformSpeechWrapper resetState];
+}
+
+} // namespace WebCore
+
+#endif // ENABLE(SPEECH_SYNTHESIS) && PLATFORM(COCOA)
Deleted: trunk/Source/WebCore/platform/ios/PlatformSpeechSynthesizerIOS.mm (286258 => 286259)
--- trunk/Source/WebCore/platform/ios/PlatformSpeechSynthesizerIOS.mm 2021-11-29 20:37:40 UTC (rev 286258)
+++ trunk/Source/WebCore/platform/ios/PlatformSpeechSynthesizerIOS.mm 2021-11-29 20:48:59 UTC (rev 286259)
@@ -1,315 +0,0 @@
-/*
- * Copyright (C) 2013 Apple Inc. All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions
- * are met:
- * 1. Redistributions of source code must retain the above copyright
- * notice, this list of conditions and the following disclaimer.
- * 2. Redistributions in binary form must reproduce the above copyright
- * notice, this list of conditions and the following disclaimer in the
- * documentation and/or other materials provided with the distribution.
- *
- * THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS'' AND ANY
- * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
- * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
- * DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS BE LIABLE FOR ANY
- * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
- * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
- * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
- * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
- * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
- * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-#import "config.h"
-#import "PlatformSpeechSynthesizer.h"
-
-#if ENABLE(SPEECH_SYNTHESIS) && PLATFORM(IOS_FAMILY)
-
-#import "PlatformSpeechSynthesisUtterance.h"
-#import "PlatformSpeechSynthesisVoice.h"
-#import <AVFoundation/AVSpeechSynthesis.h>
-#import <pal/spi/cocoa/AXSpeechManagerSPI.h>
-#import <wtf/BlockObjCExceptions.h>
-#import <wtf/RetainPtr.h>
-
-#import <pal/cocoa/AVFoundationSoftLink.h>
-
-static float getAVSpeechUtteranceDefaultSpeechRate()
-{
- static float value;
- static void* symbol;
- if (!symbol) {
- void* symbol = dlsym(PAL::AVFoundationLibrary(), "AVSpeechUtteranceDefaultSpeechRate");
- RELEASE_ASSERT_WITH_MESSAGE(symbol, "%s", dlerror());
- value = *static_cast<float const *>(symbol);
- }
- return value;
-}
-
-static float getAVSpeechUtteranceMaximumSpeechRate()
-{
- static float value;
- static void* symbol;
- if (!symbol) {
- void* symbol = dlsym(PAL::AVFoundationLibrary(), "AVSpeechUtteranceMaximumSpeechRate");
- RELEASE_ASSERT_WITH_MESSAGE(symbol, "%s", dlerror());
- value = *static_cast<float const *>(symbol);
- }
- return value;
-}
-
-#define AVSpeechUtteranceDefaultSpeechRate getAVSpeechUtteranceDefaultSpeechRate()
-#define AVSpeechUtteranceMaximumSpeechRate getAVSpeechUtteranceMaximumSpeechRate()
-
-@interface WebSpeechSynthesisWrapper : NSObject<AVSpeechSynthesizerDelegate>
-{
- WebCore::PlatformSpeechSynthesizer* m_synthesizerObject;
- // Hold a Ref to the utterance so that it won't disappear until the synth is done with it.
- RefPtr<WebCore::PlatformSpeechSynthesisUtterance> m_utterance;
-
- RetainPtr<AVSpeechSynthesizer> m_synthesizer;
-}
-
-- (WebSpeechSynthesisWrapper *)initWithSpeechSynthesizer:(WebCore::PlatformSpeechSynthesizer*)synthesizer;
-- (void)speakUtterance:(RefPtr<WebCore::PlatformSpeechSynthesisUtterance>&&)utterance;
-
-@end
-
-@implementation WebSpeechSynthesisWrapper
-
-- (WebSpeechSynthesisWrapper *)initWithSpeechSynthesizer:(WebCore::PlatformSpeechSynthesizer*)synthesizer
-{
- if (!(self = [super init]))
- return nil;
-
- m_synthesizerObject = synthesizer;
- return self;
-}
-
-- (float)mapSpeechRateToPlatformRate:(float)rate
-{
- // WebSpeech says to go from .1 -> 10 (default 1)
- // AVSpeechSynthesizer asks for 0 -> 1 (default. 5)
- if (rate < 1)
- rate *= AVSpeechUtteranceDefaultSpeechRate;
- else
- rate = AVSpeechUtteranceDefaultSpeechRate + ((rate - 1) * (AVSpeechUtteranceMaximumSpeechRate - AVSpeechUtteranceDefaultSpeechRate));
-
- return rate;
-}
-
-- (void)speakUtterance:(RefPtr<WebCore::PlatformSpeechSynthesisUtterance>&&)utterance
-{
- // When speak is called we should not have an existing speech utterance outstanding.
- ASSERT(!m_utterance);
- ASSERT(utterance);
-
- if (!utterance)
- return;
-
- BEGIN_BLOCK_OBJC_EXCEPTIONS
- if (!m_synthesizer) {
- m_synthesizer = adoptNS([PAL::allocAVSpeechSynthesizerInstance() init]);
- [m_synthesizer setDelegate:self];
- }
-
- // Choose the best voice, by first looking at the utterance voice, then the utterance language,
- // then choose the default language.
- WebCore::PlatformSpeechSynthesisVoice* utteranceVoice = utterance->voice();
- NSString *voiceLanguage = nil;
- if (!utteranceVoice) {
- if (utterance->lang().isEmpty())
- voiceLanguage = [PAL::getAVSpeechSynthesisVoiceClass() currentLanguageCode];
- else
- voiceLanguage = utterance->lang();
- } else
- voiceLanguage = utterance->voice()->lang();
-
- AVSpeechSynthesisVoice *avVoice = nil;
- if (voiceLanguage)
- avVoice = [PAL::getAVSpeechSynthesisVoiceClass() voiceWithLanguage:voiceLanguage];
-
- AVSpeechUtterance *avUtterance = [PAL::getAVSpeechUtteranceClass() speechUtteranceWithString:utterance->text()];
-
- [avUtterance setRate:[self mapSpeechRateToPlatformRate:utterance->rate()]];
- [avUtterance setVolume:utterance->volume()];
- [avUtterance setPitchMultiplier:utterance->pitch()];
- [avUtterance setVoice:avVoice];
- m_utterance = WTFMove(utterance);
-
- [m_synthesizer speakUtterance:avUtterance];
- END_BLOCK_OBJC_EXCEPTIONS
-}
-
-- (void)pause
-{
- if (!m_utterance)
- return;
-
- BEGIN_BLOCK_OBJC_EXCEPTIONS
- [m_synthesizer pauseSpeakingAtBoundary:AVSpeechBoundaryImmediate];
- END_BLOCK_OBJC_EXCEPTIONS
-}
-
-- (void)resume
-{
- if (!m_utterance)
- return;
-
- BEGIN_BLOCK_OBJC_EXCEPTIONS
- [m_synthesizer continueSpeaking];
- END_BLOCK_OBJC_EXCEPTIONS
-}
-
-- (void)cancel
-{
- if (!m_utterance)
- return;
-
- BEGIN_BLOCK_OBJC_EXCEPTIONS
- [m_synthesizer stopSpeakingAtBoundary:AVSpeechBoundaryImmediate];
- END_BLOCK_OBJC_EXCEPTIONS
-}
-
-- (void)speechSynthesizer:(AVSpeechSynthesizer *)synthesizer didStartSpeechUtterance:(AVSpeechUtterance *)utterance
-{
- UNUSED_PARAM(synthesizer);
- UNUSED_PARAM(utterance);
- if (!m_utterance)
- return;
-
- m_synthesizerObject->client()->didStartSpeaking(*m_utterance);
-}
-
-- (void)speechSynthesizer:(AVSpeechSynthesizer *)synthesizer didFinishSpeechUtterance:(AVSpeechUtterance *)utterance
-{
- UNUSED_PARAM(synthesizer);
- UNUSED_PARAM(utterance);
- if (!m_utterance)
- return;
-
- // Clear the m_utterance variable in case finish speaking kicks off a new speaking job immediately.
- RefPtr<WebCore::PlatformSpeechSynthesisUtterance> platformUtterance = m_utterance;
- m_utterance = nullptr;
-
- m_synthesizerObject->client()->didFinishSpeaking(*platformUtterance);
-}
-
-- (void)speechSynthesizer:(AVSpeechSynthesizer *)synthesizer didPauseSpeechUtterance:(AVSpeechUtterance *)utterance
-{
- UNUSED_PARAM(synthesizer);
- UNUSED_PARAM(utterance);
- if (!m_utterance)
- return;
-
- m_synthesizerObject->client()->didPauseSpeaking(*m_utterance);
-}
-
-- (void)speechSynthesizer:(AVSpeechSynthesizer *)synthesizer didContinueSpeechUtterance:(AVSpeechUtterance *)utterance
-{
- UNUSED_PARAM(synthesizer);
- UNUSED_PARAM(utterance);
- if (!m_utterance)
- return;
-
- m_synthesizerObject->client()->didResumeSpeaking(*m_utterance);
-}
-
-- (void)speechSynthesizer:(AVSpeechSynthesizer *)synthesizer didCancelSpeechUtterance:(AVSpeechUtterance *)utterance
-{
- UNUSED_PARAM(synthesizer);
- UNUSED_PARAM(utterance);
- if (!m_utterance)
- return;
-
- // Clear the m_utterance variable in case finish speaking kicks off a new speaking job immediately.
- RefPtr<WebCore::PlatformSpeechSynthesisUtterance> platformUtterance = m_utterance;
- m_utterance = nullptr;
-
- m_synthesizerObject->client()->didFinishSpeaking(*platformUtterance);
-}
-
-- (void)speechSynthesizer:(AVSpeechSynthesizer *)synthesizer willSpeakRangeOfSpeechString:(NSRange)characterRange utterance:(AVSpeechUtterance *)utterance
-{
- UNUSED_PARAM(synthesizer);
- UNUSED_PARAM(utterance);
-
- if (!m_utterance)
- return;
-
- // iOS only supports word boundaries.
- m_synthesizerObject->client()->boundaryEventOccurred(*m_utterance, WebCore::SpeechBoundary::SpeechWordBoundary, characterRange.location);
-}
-
-@end
-
-namespace WebCore {
-
-PlatformSpeechSynthesizer::PlatformSpeechSynthesizer(PlatformSpeechSynthesizerClient* client)
- : m_speechSynthesizerClient(client)
-{
-}
-
-PlatformSpeechSynthesizer::~PlatformSpeechSynthesizer()
-{
-}
-
-void PlatformSpeechSynthesizer::initializeVoiceList()
-{
- BEGIN_BLOCK_OBJC_EXCEPTIONS
- for (AVSpeechSynthesisVoice *voice in [PAL::getAVSpeechSynthesisVoiceClass() speechVoices]) {
- NSString *language = [voice language];
- bool isDefault = true;
- NSString *voiceURI = [voice identifier];
- NSString *name = [voice name];
-
- // Only show built-in voices when requesting through WebKit to reduce fingerprinting surface area.
-#if HAVE(AVSPEECHSYNTHESIS_SYSTEMVOICE)
- // FIXME: Remove respondsToSelector check when is available on all SDKs.
- BOOL includeVoice = NO;
- if ([voice respondsToSelector:@selector(isSystemVoice)])
- includeVoice = voice.isSystemVoice;
- else
- includeVoice = voice.quality == AVSpeechSynthesisVoiceQualityDefault;
- if (includeVoice)
-#else
- if (voice.quality == AVSpeechSynthesisVoiceQualityDefault)
-#endif
- m_voiceList.append(PlatformSpeechSynthesisVoice::create(voiceURI, name, language, true, isDefault));
- }
- END_BLOCK_OBJC_EXCEPTIONS
-}
-
-void PlatformSpeechSynthesizer::pause()
-{
- [m_platformSpeechWrapper pause];
-}
-
-void PlatformSpeechSynthesizer::resume()
-{
- [m_platformSpeechWrapper resume];
-}
-
-void PlatformSpeechSynthesizer::speak(RefPtr<PlatformSpeechSynthesisUtterance>&& utterance)
-{
- if (!m_platformSpeechWrapper)
- m_platformSpeechWrapper = adoptNS([[WebSpeechSynthesisWrapper alloc] initWithSpeechSynthesizer:this]);
-
- [m_platformSpeechWrapper speakUtterance:utterance.get()];
-}
-
-void PlatformSpeechSynthesizer::cancel()
-{
- [m_platformSpeechWrapper cancel];
-}
-
-void PlatformSpeechSynthesizer::resetState()
-{
- [m_platformSpeechWrapper cancel];
-}
-
-} // namespace WebCore
-
-#endif // ENABLE(SPEECH_SYNTHESIS) && PLATFORM(IOS_FAMILY)
Deleted: trunk/Source/WebCore/platform/mac/PlatformSpeechSynthesizerMac.mm (286258 => 286259)
--- trunk/Source/WebCore/platform/mac/PlatformSpeechSynthesizerMac.mm 2021-11-29 20:37:40 UTC (rev 286258)
+++ trunk/Source/WebCore/platform/mac/PlatformSpeechSynthesizerMac.mm 2021-11-29 20:48:59 UTC (rev 286259)
@@ -1,289 +0,0 @@
-/*
- * Copyright (C) 2013-2021 Apple Inc. All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions
- * are met:
- * 1. Redistributions of source code must retain the above copyright
- * notice, this list of conditions and the following disclaimer.
- * 2. Redistributions in binary form must reproduce the above copyright
- * notice, this list of conditions and the following disclaimer in the
- * documentation and/or other materials provided with the distribution.
- *
- * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
- * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
- * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
- * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
- * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
- * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
- * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
- * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
- * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
- * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-#import "config.h"
-#import "PlatformSpeechSynthesizer.h"
-
-#if ENABLE(SPEECH_SYNTHESIS) && PLATFORM(MAC)
-
-#import "PlatformSpeechSynthesisUtterance.h"
-#import "PlatformSpeechSynthesisVoice.h"
-#import <AppKit/NSSpeechSynthesizer.h>
-#import <pal/spi/mac/SpeechSynthesisSPI.h>
-#import <wtf/RetainPtr.h>
-
-@interface WebSpeechSynthesisWrapper : NSObject<NSSpeechSynthesizerDelegate>
-{
- WebCore::PlatformSpeechSynthesizer* m_synthesizerObject;
- // Hold a Ref to the utterance so that it won't disappear until the synth is done with it.
- WebCore::PlatformSpeechSynthesisUtterance* m_utterance;
-
- RetainPtr<NSSpeechSynthesizer> m_synthesizer;
- float m_basePitch;
-}
-
-- (WebSpeechSynthesisWrapper *)initWithSpeechSynthesizer:(WebCore::PlatformSpeechSynthesizer *)synthesizer;
-- (void)speakUtterance:(WebCore::PlatformSpeechSynthesisUtterance *)utterance;
-
-@end
-
-@implementation WebSpeechSynthesisWrapper
-
-- (WebSpeechSynthesisWrapper *)initWithSpeechSynthesizer:(WebCore::PlatformSpeechSynthesizer *)synthesizer
-{
- if (!(self = [super init]))
- return nil;
-
- m_synthesizerObject = synthesizer;
- [self updateBasePitchForSynthesizer];
- return self;
-}
-
-// NSSpeechSynthesizer expects a Words per Minute (WPM) rate. There is no preset default
-// but they recommend that normal speaking is 180-220 WPM
-- (float)convertRateToWPM:(float)rate
-{
- // We'll say 200 WPM is the default 1x value.
- return 200.0f * rate;
-}
-
-- (float)convertPitchToNSSpeechValue:(float)pitch
-{
- // This allows the base pitch to range from 0% - 200% of the normal pitch.
- return m_basePitch * pitch;
-}
-
-- (void)updateBasePitchForSynthesizer
-{
- // Reset the base pitch whenever we change voices, since the base pitch is different for each voice.
- [m_synthesizer setObject:nil forProperty:NSSpeechResetProperty error:nil];
- m_basePitch = [[m_synthesizer objectForProperty:NSSpeechPitchBaseProperty error:nil] floatValue];
-}
-
-- (void)speakUtterance:(WebCore::PlatformSpeechSynthesisUtterance *)utterance
-{
- // When speak is called we should not have an existing speech utterance outstanding.
- ASSERT(!m_utterance);
- ASSERT(utterance);
-
- if (!m_synthesizer) {
- m_synthesizer = adoptNS([[NSSpeechSynthesizer alloc] initWithVoice:nil]);
- [m_synthesizer setDelegate:self];
- }
-
- // Find if we should use a specific voice based on the voiceURI in utterance.
- // Otherwise, find the voice that matches the language. The Mac doesn't have a default voice per language, so the first
- // one will have to do.
-
- WebCore::PlatformSpeechSynthesisVoice* utteranceVoice = utterance->voice();
- // If no voice was specified, try to match by language.
- if (!utteranceVoice && !utterance->lang().isEmpty()) {
- for (auto& voice : m_synthesizerObject->voiceList()) {
- if (equalIgnoringASCIICase(utterance->lang(), voice->lang())) {
- utteranceVoice = voice.get();
- if (voice->isDefault())
- break;
- }
- }
- }
-
- NSString *voiceURI = nil;
- if (utteranceVoice)
- voiceURI = utteranceVoice->voiceURI();
- else
- voiceURI = [NSSpeechSynthesizer defaultVoice];
-
- // Don't set the voice unless necessary. There's a bug in NSSpeechSynthesizer such that
- // setting the voice for the first time will cause the first speechDone callback to report it was unsuccessful.
- BOOL updatePitch = NO;
- if (![[m_synthesizer voice] isEqualToString:voiceURI]) {
- [m_synthesizer setVoice:voiceURI];
- // Reset the base pitch whenever we change voices.
- updatePitch = YES;
- }
-
- if (m_basePitch == 0 || updatePitch)
- [self updateBasePitchForSynthesizer];
-
- [m_synthesizer setObject:[NSNumber numberWithFloat:[self convertPitchToNSSpeechValue:utterance->pitch()]] forProperty:NSSpeechPitchBaseProperty error:nil];
- [m_synthesizer setRate:[self convertRateToWPM:utterance->rate()]];
- [m_synthesizer setVolume:utterance->volume()];
-
- m_utterance = utterance;
- [m_synthesizer startSpeakingString:utterance->text()];
- m_synthesizerObject->client()->didStartSpeaking(*m_utterance);
-}
-
-- (void)pause
-{
- if (!m_utterance)
- return;
-
- [m_synthesizer pauseSpeakingAtBoundary:NSSpeechImmediateBoundary];
- m_synthesizerObject->client()->didPauseSpeaking(*m_utterance);
-}
-
-- (void)resume
-{
- if (!m_utterance)
- return;
-
- [m_synthesizer continueSpeaking];
- m_synthesizerObject->client()->didResumeSpeaking(*m_utterance);
-}
-
-- (void)cancel
-{
- if (!m_utterance)
- return;
-
- [m_synthesizer stopSpeakingAtBoundary:NSSpeechImmediateBoundary];
- m_synthesizerObject->client()->speakingErrorOccurred(*m_utterance);
- m_utterance = 0;
-}
-
-- (void)invalidate
-{
- m_utterance = 0;
- [m_synthesizer setDelegate:nil];
- [m_synthesizer stopSpeakingAtBoundary:NSSpeechImmediateBoundary];
-}
-
-- (void)speechSynthesizer:(NSSpeechSynthesizer *)sender didFinishSpeaking:(BOOL)finishedSpeaking
-{
- if (!m_utterance)
- return;
-
- UNUSED_PARAM(sender);
-
- // Clear the m_utterance variable in case finish speaking kicks off a new speaking job immediately.
- WebCore::PlatformSpeechSynthesisUtterance* utterance = m_utterance;
- m_utterance = 0;
-
- if (finishedSpeaking)
- m_synthesizerObject->client()->didFinishSpeaking(*utterance);
- else
- m_synthesizerObject->client()->speakingErrorOccurred(*utterance);
-}
-
-- (void)speechSynthesizer:(NSSpeechSynthesizer *)sender willSpeakWord:(NSRange)characterRange ofString:(NSString *)string
-{
- UNUSED_PARAM(sender);
- UNUSED_PARAM(string);
-
- if (!m_utterance)
- return;
-
- // Mac platform only supports word boundaries.
- m_synthesizerObject->client()->boundaryEventOccurred(*m_utterance, WebCore::SpeechBoundary::SpeechWordBoundary, characterRange.location);
-}
-
-@end
-
-namespace WebCore {
-
-PlatformSpeechSynthesizer::PlatformSpeechSynthesizer(PlatformSpeechSynthesizerClient* client)
- : m_speechSynthesizerClient(client)
-{
-}
-
-PlatformSpeechSynthesizer::~PlatformSpeechSynthesizer()
-{
- [m_platformSpeechWrapper invalidate];
-}
-
-static RetainPtr<CFArrayRef> speechSynthesisGetVoiceIdentifiers()
-{
- // Get all the voices offered by TTS.
- // By default speech only returns "premium" voices, which does not include all the
- // international voices. This allows us to offer speech synthesis for all supported languages.
- return adoptCF(CopySpeechSynthesisVoicesForMode((__bridge CFArrayRef)@[ @"VoiceGroupDefault", @"VoiceGroupCompact" ]));
-}
-
-static RetainPtr<CFStringRef> speechSynthesisGetDefaultVoiceIdentifierForLocale(NSLocale *userLocale)
-{
- if (!userLocale)
- return nil;
-
-#if HAVE(SPEECHSYNTHESIS_MONTEREY_SPI)
- return adoptCF(CopyIdentifierStringForPreferredVoiceInListWithLocale(speechSynthesisGetVoiceIdentifiers().get(), (__bridge CFLocaleRef)userLocale));
-#else
- return GetIdentifierStringForPreferredVoiceInListWithLocale(speechSynthesisGetVoiceIdentifiers().get(), (__bridge CFLocaleRef)userLocale);
-#endif
-}
-
-void PlatformSpeechSynthesizer::initializeVoiceList()
-{
- auto availableVoices = speechSynthesisGetVoiceIdentifiers();
- NSUInteger count = [(__bridge NSArray *)availableVoices.get() count];
- for (NSUInteger k = 0; k < count; k++) {
- NSString *voiceName = [(__bridge NSArray *)availableVoices.get() objectAtIndex:k];
- NSDictionary *attributes = [NSSpeechSynthesizer attributesForVoice:voiceName];
-
- NSString *voiceURI = [attributes objectForKey:NSVoiceIdentifier];
- NSString *name = [attributes objectForKey:NSVoiceName];
- NSString *language = [attributes objectForKey:NSVoiceLocaleIdentifier];
- auto defaultVoiceURI = speechSynthesisGetDefaultVoiceIdentifierForLocale(adoptNS([[NSLocale alloc] initWithLocaleIdentifier:language]).get());
-
- // Change to BCP-47 format as defined by spec.
- language = [language stringByReplacingOccurrencesOfString:@"_" withString:@"-"];
-
- bool isDefault = [(__bridge NSString *)defaultVoiceURI.get() isEqualToString:voiceURI];
-
- m_voiceList.append(PlatformSpeechSynthesisVoice::create(voiceURI, name, language, true, isDefault));
- }
-}
-
-void PlatformSpeechSynthesizer::pause()
-{
- [m_platformSpeechWrapper pause];
-}
-
-void PlatformSpeechSynthesizer::resume()
-{
- [m_platformSpeechWrapper resume];
-}
-
-void PlatformSpeechSynthesizer::speak(RefPtr<PlatformSpeechSynthesisUtterance>&& utterance)
-{
- if (!m_platformSpeechWrapper)
- m_platformSpeechWrapper = adoptNS([[WebSpeechSynthesisWrapper alloc] initWithSpeechSynthesizer:this]);
-
- [m_platformSpeechWrapper speakUtterance:utterance.get()];
-}
-
-void PlatformSpeechSynthesizer::cancel()
-{
- [m_platformSpeechWrapper cancel];
-}
-
-void PlatformSpeechSynthesizer::resetState()
-{
- [m_platformSpeechWrapper cancel];
-}
-
-} // namespace WebCore
-
-#endif // ENABLE(SPEECH_SYNTHESIS) && PLATFORM(MAC)
Modified: trunk/Source/WebKit/ChangeLog (286258 => 286259)
--- trunk/Source/WebKit/ChangeLog 2021-11-29 20:37:40 UTC (rev 286258)
+++ trunk/Source/WebKit/ChangeLog 2021-11-29 20:48:59 UTC (rev 286259)
@@ -1,3 +1,22 @@
+2021-11-29 Chris Fleizach <cfleiz...@apple.com>
+
+ AX: Unify speech synthesizer platform usage for Mac/iOS
+ https://bugs.webkit.org/show_bug.cgi?id=231895
+ <rdar://problem/84372479>
+
+ Reviewed by Andres Gonzalez.
+
+ Create a new message for resetting speech synthesis state when the DOMWindow changes
+ but the Page (and its referenced speech synthesis) stays the same.
+
+ * UIProcess/WebPageProxy.cpp:
+ (WebKit::WebPageProxy::speechSynthesisResetState):
+ * UIProcess/WebPageProxy.h:
+ * UIProcess/WebPageProxy.messages.in:
+ * WebProcess/WebCoreSupport/WebSpeechSynthesisClient.cpp:
+ (WebKit::WebSpeechSynthesisClient::resetState):
+ * WebProcess/WebCoreSupport/WebSpeechSynthesisClient.h:
+
2021-11-29 Brent Fulgham <bfulg...@apple.com>
REGRESSION(286219): Build fix.
Modified: trunk/Source/WebKit/UIProcess/WebPageProxy.cpp (286258 => 286259)
--- trunk/Source/WebKit/UIProcess/WebPageProxy.cpp 2021-11-29 20:37:40 UTC (rev 286258)
+++ trunk/Source/WebKit/UIProcess/WebPageProxy.cpp 2021-11-29 20:48:59 UTC (rev 286259)
@@ -10515,6 +10515,11 @@
speechSynthesisData().synthesizer->cancel();
}
+void WebPageProxy::speechSynthesisResetState()
+{
+ speechSynthesisData().synthesizer->resetState();
+}
+
void WebPageProxy::speechSynthesisPause(CompletionHandler<void()>&& completionHandler)
{
speechSynthesisData().speakingPausedCompletionHandler = WTFMove(completionHandler);
Modified: trunk/Source/WebKit/UIProcess/WebPageProxy.h (286258 => 286259)
--- trunk/Source/WebKit/UIProcess/WebPageProxy.h 2021-11-29 20:37:40 UTC (rev 286258)
+++ trunk/Source/WebKit/UIProcess/WebPageProxy.h 2021-11-29 20:48:59 UTC (rev 286259)
@@ -1818,6 +1818,7 @@
void speechSynthesisCancel();
void speechSynthesisPause(CompletionHandler<void()>&&);
void speechSynthesisResume(CompletionHandler<void()>&&);
+ void speechSynthesisResetState();
#endif
void configureLoggingChannel(const String&, WTFLogChannelState, WTFLogLevel);
Modified: trunk/Source/WebKit/UIProcess/WebPageProxy.messages.in (286258 => 286259)
--- trunk/Source/WebKit/UIProcess/WebPageProxy.messages.in 2021-11-29 20:37:40 UTC (rev 286258)
+++ trunk/Source/WebKit/UIProcess/WebPageProxy.messages.in 2021-11-29 20:48:59 UTC (rev 286259)
@@ -544,6 +544,7 @@
SpeechSynthesisCancel()
SpeechSynthesisPause() -> () Async
SpeechSynthesisResume() -> () Async
+ SpeechSynthesisResetState()
#endif
#if ENABLE(UI_PROCESS_PDF_HUD)
Modified: trunk/Source/WebKit/WebProcess/WebCoreSupport/WebSpeechSynthesisClient.cpp (286258 => 286259)
--- trunk/Source/WebKit/WebProcess/WebCoreSupport/WebSpeechSynthesisClient.cpp 2021-11-29 20:37:40 UTC (rev 286258)
+++ trunk/Source/WebKit/WebProcess/WebCoreSupport/WebSpeechSynthesisClient.cpp 2021-11-29 20:48:59 UTC (rev 286259)
@@ -55,6 +55,11 @@
return nullptr;
}
+void WebSpeechSynthesisClient::resetState()
+{
+ m_page.send(Messages::WebPageProxy::SpeechSynthesisResetState());
+}
+
void WebSpeechSynthesisClient::speak(RefPtr<WebCore::PlatformSpeechSynthesisUtterance> utterance)
{
WTF::CompletionHandler<void()> startedCompletionHandler = [this, weakThis = WeakPtr { *this }]() mutable {
Modified: trunk/Source/WebKit/WebProcess/WebCoreSupport/WebSpeechSynthesisClient.h (286258 => 286259)
--- trunk/Source/WebKit/WebProcess/WebCoreSupport/WebSpeechSynthesisClient.h 2021-11-29 20:37:40 UTC (rev 286258)
+++ trunk/Source/WebKit/WebProcess/WebCoreSupport/WebSpeechSynthesisClient.h 2021-11-29 20:48:59 UTC (rev 286259)
@@ -53,6 +53,7 @@
private:
void setObserver(WeakPtr<WebCore::SpeechSynthesisClientObserver> observer) override { m_observer = observer; }
WeakPtr<WebCore::SpeechSynthesisClientObserver> observer() const override { return m_observer; }
+ void resetState() override;
WebCore::SpeechSynthesisClientObserver* corePageObserver() const;