- Revision
- 151301
- Author
- jer.no...@apple.com
- Date
- 2013-06-06 16:53:41 -0700 (Thu, 06 Jun 2013)
Log Message
Upstream iOS WebAudio behavior restrictions.
https://bugs.webkit.org/show_bug.cgi?id=117151
Reviewed by Eric Carlson.
2012-06-25 Jeffrey Pfau <jp...@apple.com>
<rdar://problem/11041007> WebAudio should restrict playback to user interaction on iOS
Added a bevahior restrictions field in AudioContext that behaviors similarly to
the one in HTMLMediaElement. Restrictions are lifted when _javascript_ calls noteOn,
noteGrainOn or startRendering from a user gesture event.
Reviewed by Dean Jackson.
No new tests, testing framework doesn't support web audio.
* Modules/webaudio/AudioBufferSourceNode.cpp:
(WebCore::AudioBufferSourceNode::startGrain):
* Modules/webaudio/AudioContext.cpp:
(WebCore::AudioContext::AudioContext):
(WebCore::AudioContext::constructCommon):
(WebCore::AudioContext::lazyInitialize):
(WebCore::AudioContext::startRendering):
* Modules/webaudio/AudioContext.h:
(WebCore::AudioContext::userGestureRequiredForAudioStart):
(WebCore::AudioContext::addBehaviorRestriction):
(WebCore::AudioContext::removeBehaviorRestriction):
* Modules/webaudio/AudioDestinationNode.cpp:
(WebCore::AudioDestinationNode::render):
* Modules/webaudio/AudioScheduledSourceNode.cpp:
(WebCore::AudioScheduledSourceNode::start):
Modified Paths
Diff
Modified: trunk/Source/WebCore/ChangeLog (151300 => 151301)
--- trunk/Source/WebCore/ChangeLog 2013-06-06 23:37:24 UTC (rev 151300)
+++ trunk/Source/WebCore/ChangeLog 2013-06-06 23:53:41 UTC (rev 151301)
@@ -1,3 +1,38 @@
+2013-06-03 Jer Noble <jer.no...@apple.com>
+
+ Upstream iOS WebAudio behavior restrictions.
+ https://bugs.webkit.org/show_bug.cgi?id=117151
+
+ Reviewed by Eric Carlson.
+
+ 2012-06-25 Jeffrey Pfau <jp...@apple.com>
+
+ <rdar://problem/11041007> WebAudio should restrict playback to user interaction on iOS
+
+ Added a bevahior restrictions field in AudioContext that behaviors similarly to
+ the one in HTMLMediaElement. Restrictions are lifted when _javascript_ calls noteOn,
+ noteGrainOn or startRendering from a user gesture event.
+
+ Reviewed by Dean Jackson.
+
+ No new tests, testing framework doesn't support web audio.
+
+ * Modules/webaudio/AudioBufferSourceNode.cpp:
+ (WebCore::AudioBufferSourceNode::startGrain):
+ * Modules/webaudio/AudioContext.cpp:
+ (WebCore::AudioContext::AudioContext):
+ (WebCore::AudioContext::constructCommon):
+ (WebCore::AudioContext::lazyInitialize):
+ (WebCore::AudioContext::startRendering):
+ * Modules/webaudio/AudioContext.h:
+ (WebCore::AudioContext::userGestureRequiredForAudioStart):
+ (WebCore::AudioContext::addBehaviorRestriction):
+ (WebCore::AudioContext::removeBehaviorRestriction):
+ * Modules/webaudio/AudioDestinationNode.cpp:
+ (WebCore::AudioDestinationNode::render):
+ * Modules/webaudio/AudioScheduledSourceNode.cpp:
+ (WebCore::AudioScheduledSourceNode::start):
+
2013-06-06 Ruth Fong <ruth_f...@apple.com>
Expose HTMLCanvasElement supportsContext
Modified: trunk/Source/WebCore/Modules/webaudio/AudioBufferSourceNode.cpp (151300 => 151301)
--- trunk/Source/WebCore/Modules/webaudio/AudioBufferSourceNode.cpp 2013-06-06 23:37:24 UTC (rev 151300)
+++ trunk/Source/WebCore/Modules/webaudio/AudioBufferSourceNode.cpp 2013-06-06 23:53:41 UTC (rev 151301)
@@ -33,6 +33,7 @@
#include "AudioUtilities.h"
#include "FloatConversion.h"
#include "ScriptCallStack.h"
+#include "ScriptController.h"
#include "ScriptExecutionContext.h"
#include <algorithm>
#include <wtf/MainThread.h>
@@ -382,6 +383,9 @@
{
ASSERT(isMainThread());
+ if (ScriptController::processingUserGesture())
+ context()->removeBehaviorRestriction(AudioContext::RequireUserGestureForAudioStartRestriction);
+
if (m_playbackState != UNSCHEDULED_STATE)
return;
Modified: trunk/Source/WebCore/Modules/webaudio/AudioContext.cpp (151300 => 151301)
--- trunk/Source/WebCore/Modules/webaudio/AudioContext.cpp 2013-06-06 23:37:24 UTC (rev 151300)
+++ trunk/Source/WebCore/Modules/webaudio/AudioContext.cpp 2013-06-06 23:53:41 UTC (rev 151301)
@@ -54,6 +54,7 @@
#include "OscillatorNode.h"
#include "PannerNode.h"
#include "ScriptCallStack.h"
+#include "ScriptController.h"
#include "ScriptProcessorNode.h"
#include "WaveShaperNode.h"
#include "WaveTable.h"
@@ -131,6 +132,7 @@
, m_graphOwnerThread(UndefinedThreadIdentifier)
, m_isOfflineContext(false)
, m_activeSourceCount(0)
+ , m_restrictions(NoRestrictions)
{
constructCommon();
@@ -156,6 +158,7 @@
, m_graphOwnerThread(UndefinedThreadIdentifier)
, m_isOfflineContext(true)
, m_activeSourceCount(0)
+ , m_restrictions(NoRestrictions)
{
constructCommon();
@@ -180,6 +183,13 @@
FFTFrame::initialize();
m_listener = AudioListener::create();
+
+#if PLATFORM(IOS)
+ if (!document()->settings() || document()->settings()->mediaPlaybackRequiresUserGesture())
+ addBehaviorRestriction(RequireUserGestureForAudioStartRestriction);
+ else
+ m_restrictions = NoRestrictions;
+#endif
}
AudioContext::~AudioContext()
@@ -213,7 +223,7 @@
// Each time provideInput() is called, a portion of the audio stream is rendered. Let's call this time period a "render quantum".
// NOTE: for now default AudioContext does not need an explicit startRendering() call from _javascript_.
// We may want to consider requiring it for symmetry with OfflineAudioContext.
- m_destinationNode->startRendering();
+ m_destinationNode->startRendering();
++s_hardwareContextCount;
}
@@ -943,6 +953,9 @@
void AudioContext::startRendering()
{
+ if (ScriptController::processingUserGesture())
+ removeBehaviorRestriction(AudioContext::RequireUserGestureForAudioStartRestriction);
+
destination()->startRendering();
}
Modified: trunk/Source/WebCore/Modules/webaudio/AudioContext.h (151300 => 151301)
--- trunk/Source/WebCore/Modules/webaudio/AudioContext.h 2013-06-06 23:37:24 UTC (rev 151300)
+++ trunk/Source/WebCore/Modules/webaudio/AudioContext.h 2013-06-06 23:53:41 UTC (rev 151301)
@@ -250,6 +250,19 @@
static unsigned s_hardwareContextCount;
+
+ // Restrictions to change default behaviors.
+ enum BehaviorRestrictionFlags {
+ NoRestrictions = 0,
+ RequireUserGestureForAudioStartRestriction = 1 << 0,
+ };
+ typedef unsigned BehaviorRestrictions;
+
+ bool userGestureRequiredForAudioStart() const { return m_restrictions & RequireUserGestureForAudioStartRestriction; }
+
+ void addBehaviorRestriction(BehaviorRestrictions restriction) { m_restrictions |= restriction; }
+ void removeBehaviorRestriction(BehaviorRestrictions restriction) { m_restrictions &= ~restriction; }
+
protected:
explicit AudioContext(Document*);
AudioContext(Document*, unsigned numberOfChannels, size_t numberOfFrames, float sampleRate);
@@ -350,6 +363,8 @@
// Number of AudioBufferSourceNodes that are active (playing).
int m_activeSourceCount;
+
+ BehaviorRestrictions m_restrictions;
};
} // WebCore
Modified: trunk/Source/WebCore/Modules/webaudio/AudioDestinationNode.cpp (151300 => 151301)
--- trunk/Source/WebCore/Modules/webaudio/AudioDestinationNode.cpp 2013-06-06 23:37:24 UTC (rev 151300)
+++ trunk/Source/WebCore/Modules/webaudio/AudioDestinationNode.cpp 2013-06-06 23:53:41 UTC (rev 151301)
@@ -64,6 +64,11 @@
return;
}
+ if (context()->userGestureRequiredForAudioStart()) {
+ destinationBus->zero();
+ return;
+ }
+
// Let the context take care of any business at the start of each render quantum.
context()->handlePreRenderTasks();
Modified: trunk/Source/WebCore/Modules/webaudio/AudioScheduledSourceNode.cpp (151300 => 151301)
--- trunk/Source/WebCore/Modules/webaudio/AudioScheduledSourceNode.cpp 2013-06-06 23:37:24 UTC (rev 151300)
+++ trunk/Source/WebCore/Modules/webaudio/AudioScheduledSourceNode.cpp 2013-06-06 23:53:41 UTC (rev 151301)
@@ -31,6 +31,7 @@
#include "AudioContext.h"
#include "AudioUtilities.h"
#include "Event.h"
+#include "ScriptController.h"
#include <algorithm>
#include <wtf/MathExtras.h>
@@ -137,6 +138,10 @@
void AudioScheduledSourceNode::start(double when)
{
ASSERT(isMainThread());
+
+ if (ScriptController::processingUserGesture())
+ context()->removeBehaviorRestriction(AudioContext::RequireUserGestureForAudioStartRestriction);
+
if (m_playbackState != UNSCHEDULED_STATE)
return;