Diff
Modified: trunk/Source/WTF/ChangeLog (173627 => 173628)
--- trunk/Source/WTF/ChangeLog 2014-09-15 19:50:03 UTC (rev 173627)
+++ trunk/Source/WTF/ChangeLog 2014-09-15 20:05:49 UTC (rev 173628)
@@ -1,3 +1,12 @@
+2014-09-15 Jer Noble <[email protected]>
+
+ [Mac] Support audioSourceProvider() in MediaPlayerPrivateAVFoundationObjC
+ https://bugs.webkit.org/show_bug.cgi?id=135042
+
+ Reviewed by Eric Carlson.
+
+ * wtf/Platform.h: Add WTF_USE_MEDIATOOLBOX.
+
2014-09-15 [email protected] <[email protected]>
[WinCairo] Make it easier to enable/disable GStreamer.
Modified: trunk/Source/WTF/wtf/Platform.h (173627 => 173628)
--- trunk/Source/WTF/wtf/Platform.h 2014-09-15 19:50:03 UTC (rev 173627)
+++ trunk/Source/WTF/wtf/Platform.h 2014-09-15 20:05:49 UTC (rev 173628)
@@ -1085,4 +1085,8 @@
#define TARGET_OS_IPHONE 0
#endif
+#if PLATFORM(IOS) || (PLATFORM(COCOA) && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1090)
+#define WTF_USE_MEDIATOOLBOX 1
+#endif
+
#endif /* WTF_Platform_h */
Modified: trunk/Source/WebCore/ChangeLog (173627 => 173628)
--- trunk/Source/WebCore/ChangeLog 2014-09-15 19:50:03 UTC (rev 173627)
+++ trunk/Source/WebCore/ChangeLog 2014-09-15 20:05:49 UTC (rev 173628)
@@ -1,3 +1,92 @@
+2014-09-15 Jer Noble <[email protected]>
+
+ [Mac] Support audioSourceProvider() in MediaPlayerPrivateAVFoundationObjC
+ https://bugs.webkit.org/show_bug.cgi?id=135042
+
+ Reviewed by Eric Carlson.
+
+ Add support for AudioSourceProvider in MediaPlayerPrivateAVFoundationObjC, thereby
+ supporting MediaElementAudioSourceNode.
+
+ Import the CARingBuffer from CoreAudio sample code, making the necessary modifications
+ for WebKit style and coding conventions. The threading assumptions for this class are
+ that there will be a single writer thread and a single reader thread.
+
+ * platform/audio/mac/CARingBuffer.cpp: Added.
+ (WebCore::CARingBuffer::create): Create a unique_ptr.
+ (WebCore::CARingBuffer::CARingBuffer): Simple constructor.
+ (WebCore::CARingBuffer::~CARingBuffer): Simple destructor.
+ (WebCore::CARingBuffer::allocate): Initialize the buffers and time bounds structures.
+ (WebCore::CARingBuffer::deallocate): Destroy same.
+ (WebCore::ZeroRange): Static utility method.
+ (WebCore::StoreABL): Ditto.
+ (WebCore::FetchABL): Ditto.
+ (WebCore::ZeroABL): Ditto.
+ (WebCore::CARingBuffer::store): Store the passed in buffer in the ring buffer, wrapping
+ if necessary.
+ (WebCore::CARingBuffer::setTimeBounds): Set the new time bounds by incrementing the
+ bounds index locally, filling in the new values, then atomically incrementing the
+ reader-visible index.
+ (WebCore::CARingBuffer::getTimeBounds): Get the new time bounds by masking the time
+ bounds index, and using it as an offset to the time bounds array. Protect against
+ CPU starvation on the reading thread by checking the index against the stored
+ index in the array entry. This check will fail if the writing thread has wrapped
+ more than the size of the array.
+ (WebCore::CARingBuffer::clipTimeBounds): Clamp the passed values to the available bounds.
+ (WebCore::CARingBuffer::startFrame): Simple accessor.
+ (WebCore::CARingBuffer::endFrame): Ditto.
+ (WebCore::CARingBuffer::fetch): Read an audio buffer list from the ring buffer, possbily
+ wrapping around to the start of the ring buffer.
+ * platform/audio/mac/CARingBuffer.h: Added.
+ (WebCore::CARingBuffer::numberOfChannels): Simple accessor.
+ (WebCore::CARingBuffer::frameOffset): Ditto.
+ (WebCore::CARingBuffer::TimeBounds::TimeBounds): Struct holding the start and end frame values.
+
+ Add an implementation of AudioSourceProvider, using AVAudioMix and MTAudioProcessingTap
+ to tap an AVPlayerItem's audio output and feed it to a MediaElementAudioSourceNode.
+
+ * platform/graphics/avfoundation/AudioSourceProviderAVFObjC.h: Added.
+ * platform/graphics/avfoundation/AudioSourceProviderAVFObjC.mm: Added.
+ (WebCore::AudioSourceProviderAVFObjC::create): Simple factory.
+ (WebCore::AudioSourceProviderAVFObjC::AudioSourceProviderAVFObjC): Simple constructor.
+ (WebCore::AudioSourceProviderAVFObjC::~AudioSourceProviderAVFObjC): Simple destructor.
+ (WebCore::AudioSourceProviderAVFObjC::provideInput): Push audio into the ring buffer.
+ (WebCore::AudioSourceProviderAVFObjC::setClient): Call destroyMix()/createMix() as appropriate.
+ (WebCore::AudioSourceProviderAVFObjC::setPlayerItem): Ditto.
+ (WebCore::AudioSourceProviderAVFObjC::destroyMix): Detach and destroy the AVAudioMix and tap.
+ (WebCore::AudioSourceProviderAVFObjC::createMix): Set up the AVAudioMix and tap and attach to
+ the AVPlayerItem.
+ (WebCore::AudioSourceProviderAVFObjC::initCallback): Pass to class method.
+ (WebCore::AudioSourceProviderAVFObjC::finalizeCallback): Ditto.
+ (WebCore::AudioSourceProviderAVFObjC::prepareCallback): Ditto.
+ (WebCore::AudioSourceProviderAVFObjC::unprepareCallback): Ditto.
+ (WebCore::AudioSourceProviderAVFObjC::processCallback): Ditto.
+ (WebCore::AudioSourceProviderAVFObjC::init): Set up the storage pointer for the tap.
+ (WebCore::AudioSourceProviderAVFObjC::finalize): No-op.
+ (WebCore::operator==): Compare two AudioStreamBasicDescription objects.
+ (WebCore::operator!=): Ditto.
+ (WebCore::AudioSourceProviderAVFObjC::prepare): Create an AudioCoverter object if necessary,
+ create the ring buffer, and notify the client of format changes.
+ (WebCore::AudioSourceProviderAVFObjC::unprepare): Destroy same.
+ (WebCore::AudioSourceProviderAVFObjC::process): Copy samples out of the ring buffer,
+ format converting them if necessary. Mute original audio.
+
+ Create the AudioSourceProviderAVFObjC object and notify it when the current AVPlayerItem changes.
+
+ * platform/graphics/avfoundation/objc/MediaPlayerPrivateAVFoundationObjC.h:
+ * platform/graphics/avfoundation/objc/MediaPlayerPrivateAVFoundationObjC.mm:
+ (WebCore::MediaPlayerPrivateAVFoundationObjC::cancelLoad):
+ (WebCore::MediaPlayerPrivateAVFoundationObjC::createAVPlayerItem):
+ (WebCore::MediaPlayerPrivateAVFoundationObjC::audioSourceProvider):
+
+ Enable SOFT_LINK_MAY_FAIL on the Mac platform.
+
+ * platform/mac/SoftLinking.h:
+
+ Add new files to the project.
+
+ * WebCore.xcodeproj/project.pbxproj:
+
2014-09-15 Chris Dumez <[email protected]>
Avoid redundant isElementNode() checks in Traversal<HTML*Element> / Traversal<SVG*Element>
Modified: trunk/Source/WebCore/WebCore.xcodeproj/project.pbxproj (173627 => 173628)
--- trunk/Source/WebCore/WebCore.xcodeproj/project.pbxproj 2014-09-15 19:50:03 UTC (rev 173627)
+++ trunk/Source/WebCore/WebCore.xcodeproj/project.pbxproj 2014-09-15 20:05:49 UTC (rev 173628)
@@ -5631,6 +5631,8 @@
CD82030D1395AB6A00F956C6 /* WebVideoFullscreenHUDWindowController.mm in Sources */ = {isa = PBXBuildFile; fileRef = CD8203091395AB6A00F956C6 /* WebVideoFullscreenHUDWindowController.mm */; };
CD8203101395ACE700F956C6 /* WebWindowAnimation.h in Headers */ = {isa = PBXBuildFile; fileRef = CD82030E1395ACE700F956C6 /* WebWindowAnimation.h */; settings = {ATTRIBUTES = (Private, ); }; };
CD8203111395ACE700F956C6 /* WebWindowAnimation.mm in Sources */ = {isa = PBXBuildFile; fileRef = CD82030F1395ACE700F956C6 /* WebWindowAnimation.mm */; };
+ CD8A7BBB197735FE00CBD643 /* AudioSourceProviderAVFObjC.mm in Sources */ = {isa = PBXBuildFile; fileRef = CD8A7BB9197735FE00CBD643 /* AudioSourceProviderAVFObjC.mm */; };
+ CD8A7BBC197735FE00CBD643 /* AudioSourceProviderAVFObjC.h in Headers */ = {isa = PBXBuildFile; fileRef = CD8A7BBA197735FE00CBD643 /* AudioSourceProviderAVFObjC.h */; };
CD8B5A42180D149A008B8E65 /* VideoTrackPrivateMediaSourceAVFObjC.mm in Sources */ = {isa = PBXBuildFile; fileRef = CD8B5A40180D149A008B8E65 /* VideoTrackPrivateMediaSourceAVFObjC.mm */; };
CD8B5A43180D149A008B8E65 /* VideoTrackPrivateMediaSourceAVFObjC.h in Headers */ = {isa = PBXBuildFile; fileRef = CD8B5A41180D149A008B8E65 /* VideoTrackPrivateMediaSourceAVFObjC.h */; };
CD8B5A46180DFF4E008B8E65 /* VideoTrackMediaSource.h in Headers */ = {isa = PBXBuildFile; fileRef = CD8B5A45180DFF4E008B8E65 /* VideoTrackMediaSource.h */; };
@@ -5676,6 +5678,8 @@
CDC69DD71632026C007C38DF /* WebCoreFullScreenWarningView.mm in Sources */ = {isa = PBXBuildFile; fileRef = CDC69DD51632026C007C38DF /* WebCoreFullScreenWarningView.mm */; };
CDC69DDA16371FD4007C38DF /* WebCoreFullScreenPlaceholderView.h in Headers */ = {isa = PBXBuildFile; fileRef = CDC69DD816371FD3007C38DF /* WebCoreFullScreenPlaceholderView.h */; settings = {ATTRIBUTES = (Private, ); }; };
CDC69DDB16371FD4007C38DF /* WebCoreFullScreenPlaceholderView.mm in Sources */ = {isa = PBXBuildFile; fileRef = CDC69DD916371FD3007C38DF /* WebCoreFullScreenPlaceholderView.mm */; };
+ CDC734141977896C0046BFC5 /* CARingBuffer.cpp in Sources */ = {isa = PBXBuildFile; fileRef = CDC734121977896C0046BFC5 /* CARingBuffer.cpp */; };
+ CDC734151977896D0046BFC5 /* CARingBuffer.h in Headers */ = {isa = PBXBuildFile; fileRef = CDC734131977896C0046BFC5 /* CARingBuffer.h */; };
CDC8B5A2180463470016E685 /* MediaPlayerPrivateMediaSourceAVFObjC.mm in Sources */ = {isa = PBXBuildFile; fileRef = CDC8B5A0180463470016E685 /* MediaPlayerPrivateMediaSourceAVFObjC.mm */; };
CDC8B5A3180463470016E685 /* MediaPlayerPrivateMediaSourceAVFObjC.h in Headers */ = {isa = PBXBuildFile; fileRef = CDC8B5A1180463470016E685 /* MediaPlayerPrivateMediaSourceAVFObjC.h */; };
CDC8B5A6180474F70016E685 /* MediaSourcePrivateAVFObjC.mm in Sources */ = {isa = PBXBuildFile; fileRef = CDC8B5A4180474F70016E685 /* MediaSourcePrivateAVFObjC.mm */; };
@@ -13051,6 +13055,8 @@
CD8203091395AB6A00F956C6 /* WebVideoFullscreenHUDWindowController.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; path = WebVideoFullscreenHUDWindowController.mm; sourceTree = "<group>"; };
CD82030E1395ACE700F956C6 /* WebWindowAnimation.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = WebWindowAnimation.h; sourceTree = "<group>"; };
CD82030F1395ACE700F956C6 /* WebWindowAnimation.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; path = WebWindowAnimation.mm; sourceTree = "<group>"; };
+ CD8A7BB9197735FE00CBD643 /* AudioSourceProviderAVFObjC.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; path = AudioSourceProviderAVFObjC.mm; sourceTree = "<group>"; };
+ CD8A7BBA197735FE00CBD643 /* AudioSourceProviderAVFObjC.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = AudioSourceProviderAVFObjC.h; sourceTree = "<group>"; };
CD8B5A40180D149A008B8E65 /* VideoTrackPrivateMediaSourceAVFObjC.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; name = VideoTrackPrivateMediaSourceAVFObjC.mm; path = objc/VideoTrackPrivateMediaSourceAVFObjC.mm; sourceTree = "<group>"; };
CD8B5A41180D149A008B8E65 /* VideoTrackPrivateMediaSourceAVFObjC.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = VideoTrackPrivateMediaSourceAVFObjC.h; path = objc/VideoTrackPrivateMediaSourceAVFObjC.h; sourceTree = "<group>"; };
CD8B5A44180DD8D6008B8E65 /* VideoTrackMediaSource.idl */ = {isa = PBXFileReference; lastKnownFileType = text; path = VideoTrackMediaSource.idl; sourceTree = "<group>"; };
@@ -13126,6 +13132,8 @@
CDC69DD51632026C007C38DF /* WebCoreFullScreenWarningView.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; path = WebCoreFullScreenWarningView.mm; sourceTree = "<group>"; };
CDC69DD816371FD3007C38DF /* WebCoreFullScreenPlaceholderView.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = WebCoreFullScreenPlaceholderView.h; sourceTree = "<group>"; };
CDC69DD916371FD3007C38DF /* WebCoreFullScreenPlaceholderView.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; path = WebCoreFullScreenPlaceholderView.mm; sourceTree = "<group>"; };
+ CDC734121977896C0046BFC5 /* CARingBuffer.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = CARingBuffer.cpp; sourceTree = "<group>"; };
+ CDC734131977896C0046BFC5 /* CARingBuffer.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = CARingBuffer.h; sourceTree = "<group>"; };
CDC8B5A0180463470016E685 /* MediaPlayerPrivateMediaSourceAVFObjC.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; name = MediaPlayerPrivateMediaSourceAVFObjC.mm; path = objc/MediaPlayerPrivateMediaSourceAVFObjC.mm; sourceTree = "<group>"; };
CDC8B5A1180463470016E685 /* MediaPlayerPrivateMediaSourceAVFObjC.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = MediaPlayerPrivateMediaSourceAVFObjC.h; path = objc/MediaPlayerPrivateMediaSourceAVFObjC.h; sourceTree = "<group>"; };
CDC8B5A4180474F70016E685 /* MediaSourcePrivateAVFObjC.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; name = MediaSourcePrivateAVFObjC.mm; path = objc/MediaSourcePrivateAVFObjC.mm; sourceTree = "<group>"; };
@@ -21591,6 +21599,8 @@
CD8B5A40180D149A008B8E65 /* VideoTrackPrivateMediaSourceAVFObjC.mm */,
CD7E05201651A84100C1201F /* WebCoreAVFResourceLoader.h */,
CD7E05211651A84100C1201F /* WebCoreAVFResourceLoader.mm */,
+ CD8A7BB9197735FE00CBD643 /* AudioSourceProviderAVFObjC.mm */,
+ CD8A7BBA197735FE00CBD643 /* AudioSourceProviderAVFObjC.h */,
);
name = objc;
sourceTree = "<group>";
@@ -23103,6 +23113,8 @@
CD54DE4517468B6F005E5B36 /* MediaSessionManagerMac.cpp */,
CD2F4A2518D8A3490063746D /* AudioHardwareListenerMac.cpp */,
CD2F4A2618D8A3490063746D /* AudioHardwareListenerMac.h */,
+ CDC734121977896C0046BFC5 /* CARingBuffer.cpp */,
+ CDC734131977896C0046BFC5 /* CARingBuffer.h */,
);
path = mac;
sourceTree = "<group>";
@@ -24327,6 +24339,7 @@
A8EA79FA0A1916DF00A8EF5F /* HTMLDirectoryElement.h in Headers */,
A8EA7CB70A192B9C00A8EF5F /* HTMLDivElement.h in Headers */,
A8EA79F70A1916DF00A8EF5F /* HTMLDListElement.h in Headers */,
+ CDC734151977896D0046BFC5 /* CARingBuffer.h in Headers */,
93F198E508245E59001E9ABC /* HTMLDocument.h in Headers */,
977B3867122883E900B81FF8 /* HTMLDocumentParser.h in Headers */,
93309DE8099E64920056E581 /* htmlediting.h in Headers */,
@@ -25014,6 +25027,7 @@
B2FA3DA30AB75A6F000E5AC4 /* JSSVGLengthList.h in Headers */,
B2FA3DA50AB75A6F000E5AC4 /* JSSVGLinearGradientElement.h in Headers */,
B2FA3DA70AB75A6F000E5AC4 /* JSSVGLineElement.h in Headers */,
+ CD8A7BBC197735FE00CBD643 /* AudioSourceProviderAVFObjC.h in Headers */,
B2FA3DA90AB75A6F000E5AC4 /* JSSVGMarkerElement.h in Headers */,
B2FA3DAB0AB75A6F000E5AC4 /* JSSVGMaskElement.h in Headers */,
B2FA3DAD0AB75A6F000E5AC4 /* JSSVGMatrix.h in Headers */,
@@ -29276,6 +29290,7 @@
1A4A95520B4EDCFF002D8C3C /* SharedBufferMac.mm in Sources */,
E45390460EAFD637003695C8 /* SharedTimerIOS.mm in Sources */,
93309EA2099EB78C0056E581 /* SharedTimerMac.mm in Sources */,
+ CD8A7BBB197735FE00CBD643 /* AudioSourceProviderAVFObjC.mm in Sources */,
41E1B1D30FF5986900576B3B /* SharedWorker.cpp in Sources */,
41D168E710226E89009BC827 /* SharedWorkerGlobalScope.cpp in Sources */,
E1B784201639CBBE0007B692 /* SharedWorkerRepository.cpp in Sources */,
@@ -29442,6 +29457,7 @@
B22279A40D00BF220071B782 /* SVGComponentTransferFunctionElement.cpp in Sources */,
B2227B050D00BFF10071B782 /* SVGCSSComputedStyleDeclaration.cpp in Sources */,
B2227B060D00BFF10071B782 /* SVGCSSParser.cpp in Sources */,
+ CDC734141977896C0046BFC5 /* CARingBuffer.cpp in Sources */,
B2227B080D00BFF10071B782 /* SVGCSSStyleSelector.cpp in Sources */,
B22279A70D00BF220071B782 /* SVGCursorElement.cpp in Sources */,
B22279AD0D00BF220071B782 /* SVGDefsElement.cpp in Sources */,
Added: trunk/Source/WebCore/platform/audio/mac/CARingBuffer.cpp (0 => 173628)
--- trunk/Source/WebCore/platform/audio/mac/CARingBuffer.cpp (rev 0)
+++ trunk/Source/WebCore/platform/audio/mac/CARingBuffer.cpp 2014-09-15 20:05:49 UTC (rev 173628)
@@ -0,0 +1,311 @@
+/*
+ * Copyright (C) 2014 Apple Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS''
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
+ * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "config.h"
+#include "CARingBuffer.h"
+
+#include <CoreAudio/CoreAudioTypes.h>
+#include <libkern/OSAtomic.h>
+#include <wtf/MathExtras.h>
+
+const uint32_t kGeneralRingTimeBoundsQueueSize = 32;
+const uint32_t kGeneralRingTimeBoundsQueueMask = kGeneralRingTimeBoundsQueueSize - 1;
+
+namespace WebCore {
+
+std::unique_ptr<CARingBuffer> CARingBuffer::create()
+{
+ return std::unique_ptr<CARingBuffer>(new CARingBuffer());
+}
+
+CARingBuffer::CARingBuffer()
+ : m_channelCount(0)
+ , m_frameCount(0)
+ , m_capacityBytes(0)
+ , m_timeBoundsQueue(kGeneralRingTimeBoundsQueueSize)
+ , m_timeBoundsQueuePtr(0)
+{
+}
+
+CARingBuffer::~CARingBuffer()
+{
+ deallocate();
+}
+
+void CARingBuffer::allocate(uint32_t channelCount, size_t bytesPerFrame, size_t frameCount)
+{
+ deallocate();
+
+ frameCount = WTF::roundUpToPowerOfTwo(frameCount);
+
+ m_channelCount = channelCount;
+ m_bytesPerFrame = bytesPerFrame;
+ m_frameCount = frameCount;
+ m_frameCountMask = frameCount - 1;
+ m_capacityBytes = bytesPerFrame * frameCount;
+
+ size_t pointersSize = channelCount * sizeof(Byte*);
+ size_t allocSize = pointersSize + (m_capacityBytes * channelCount);
+ m_buffers = ArrayBuffer::create(allocSize, 1);
+
+ Byte** pointers = static_cast<Byte**>(m_buffers->data());
+ Byte* channelData = static_cast<Byte*>(m_buffers->data()) + pointersSize;
+
+ for (unsigned i = 0; i < channelCount; ++i) {
+ pointers[i] = channelData;
+ channelData += m_capacityBytes;
+ }
+
+ for (auto timeBounds : m_timeBoundsQueue) {
+ timeBounds.m_startFrame = 0;
+ timeBounds.m_endFrame = 0;
+ timeBounds.m_updateCounter = 0;
+ }
+ m_timeBoundsQueuePtr = 0;
+}
+
+void CARingBuffer::deallocate()
+{
+ if (m_buffers)
+ m_buffers.clear();
+
+ m_channelCount = 0;
+ m_capacityBytes = 0;
+ m_frameCount = 0;
+}
+
+static void ZeroRange(Byte** buffers, int channelCount, size_t offset, size_t nbytes)
+{
+ while (--channelCount >= 0) {
+ memset(*buffers + offset, 0, nbytes);
+ ++buffers;
+ }
+}
+
+static void StoreABL(Byte** buffers, size_t destOffset, const AudioBufferList* list, size_t srcOffset, size_t nbytes)
+{
+ int channelCount = list->mNumberBuffers;
+ const AudioBuffer* src = ""
+ while (--channelCount >= 0) {
+ if (srcOffset > src->mDataByteSize)
+ continue;
+ memcpy(*buffers + destOffset, static_cast<Byte*>(src->mData) + srcOffset, std::min<size_t>(nbytes, src->mDataByteSize - srcOffset));
+ ++buffers;
+ ++src;
+ }
+}
+
+static void FetchABL(AudioBufferList* list, size_t destOffset, Byte** buffers, size_t srcOffset, size_t nbytes)
+{
+ int channelCount = list->mNumberBuffers;
+ AudioBuffer* dest = list->mBuffers;
+ while (--channelCount >= 0) {
+ if (destOffset > dest->mDataByteSize)
+ continue;
+ memcpy(static_cast<Byte*>(dest->mData) + destOffset, *buffers + srcOffset, std::min<size_t>(nbytes, dest->mDataByteSize - destOffset));
+ ++buffers;
+ ++dest;
+ }
+}
+
+inline void ZeroABL(AudioBufferList* list, size_t destOffset, size_t nbytes)
+{
+ int nBuffers = list->mNumberBuffers;
+ AudioBuffer* dest = list->mBuffers;
+ while (--nBuffers >= 0) {
+ if (destOffset > dest->mDataByteSize)
+ continue;
+ memset(static_cast<Byte*>(dest->mData) + destOffset, 0, std::min<size_t>(nbytes, dest->mDataByteSize - destOffset));
+ ++dest;
+ }
+}
+
+CARingBuffer::Error CARingBuffer::store(const AudioBufferList* list, size_t framesToWrite, uint64_t startFrame)
+{
+ if (!framesToWrite)
+ return Ok;
+
+ if (framesToWrite > m_frameCount)
+ return TooMuch;
+
+ uint64_t endFrame = startFrame + framesToWrite;
+
+ if (startFrame < currentEndFrame()) {
+ // Throw everything out when going backwards.
+ setCurrentFrameBounds(startFrame, startFrame);
+ } else if (endFrame - currentStartFrame() <= m_frameCount) {
+ // The buffer has not yet wrapped and will not need to.
+ // No-op.
+ } else {
+ // Advance the start time past the region we are about to overwrite
+ // starting one buffer of time behind where we're writing.
+ uint64_t newStartFrame = endFrame - m_frameCount;
+ uint64_t newEndFrame = std::max(newStartFrame, currentEndFrame());
+ setCurrentFrameBounds(newStartFrame, newEndFrame);
+ }
+
+ // Write the new frames.
+ Byte** buffers = static_cast<Byte**>(m_buffers->data());
+ size_t offset0;
+ size_t offset1;
+ uint64_t curEnd = currentEndFrame();
+
+ if (startFrame > curEnd) {
+ // We are skipping some samples, so zero the range we are skipping.
+ offset0 = frameOffset(curEnd);
+ offset1 = frameOffset(startFrame);
+ if (offset0 < offset1)
+ ZeroRange(buffers, m_channelCount, offset0, offset1 - offset0);
+ else {
+ ZeroRange(buffers, m_channelCount, offset0, m_capacityBytes - offset0);
+ ZeroRange(buffers, m_channelCount, 0, offset1);
+ }
+ offset0 = offset1;
+ } else
+ offset0 = frameOffset(startFrame);
+
+ offset1 = frameOffset(endFrame);
+ if (offset0 < offset1)
+ StoreABL(buffers, offset0, list, 0, offset1 - offset0);
+ else {
+ size_t nbytes = m_capacityBytes - offset0;
+ StoreABL(buffers, offset0, list, 0, nbytes);
+ StoreABL(buffers, 0, list, nbytes, offset1);
+ }
+
+ // Now update the end time.
+ setCurrentFrameBounds(currentStartFrame(), endFrame);
+
+ return Ok;
+}
+
+void CARingBuffer::setCurrentFrameBounds(uint64_t startTime, uint64_t endTime)
+{
+ ByteSpinLocker locker(m_currentFrameBoundsLock);
+ uint32_t nextPtr = m_timeBoundsQueuePtr + 1;
+ uint32_t index = nextPtr & kGeneralRingTimeBoundsQueueMask;
+
+ m_timeBoundsQueue[index].m_startFrame = startTime;
+ m_timeBoundsQueue[index].m_endFrame = endTime;
+ m_timeBoundsQueue[index].m_updateCounter = nextPtr;
+ OSAtomicIncrement32Barrier(static_cast<int32_t*>(&m_timeBoundsQueuePtr));
+}
+
+void CARingBuffer::getCurrentFrameBounds(uint64_t &startTime, uint64_t &endTime)
+{
+ ByteSpinLocker locker(m_currentFrameBoundsLock);
+ uint32_t curPtr = m_timeBoundsQueuePtr;
+ uint32_t index = curPtr & kGeneralRingTimeBoundsQueueMask;
+ CARingBuffer::TimeBounds& bounds = m_timeBoundsQueue[index];
+
+ startTime = bounds.m_startFrame;
+ endTime = bounds.m_endFrame;
+}
+
+void CARingBuffer::clipTimeBounds(uint64_t& startRead, uint64_t& endRead)
+{
+ uint64_t startTime;
+ uint64_t endTime;
+
+ getCurrentFrameBounds(startTime, endTime);
+
+ if (startRead > endTime || endRead < startTime) {
+ endRead = startRead;
+ return;
+ }
+
+ startRead = std::max(startRead, startTime);
+ endRead = std::min(endRead, endTime);
+ endRead = std::max(endRead, startRead);
+}
+
+uint64_t CARingBuffer::currentStartFrame() const
+{
+ uint32_t index = m_timeBoundsQueuePtr & kGeneralRingTimeBoundsQueueMask;
+ return m_timeBoundsQueue[index].m_startFrame;
+}
+
+uint64_t CARingBuffer::currentEndFrame() const
+{
+ uint32_t index = m_timeBoundsQueuePtr & kGeneralRingTimeBoundsQueueMask;
+ return m_timeBoundsQueue[index].m_endFrame;
+}
+
+CARingBuffer::Error CARingBuffer::fetch(AudioBufferList* list, size_t nFrames, uint64_t startRead)
+{
+ if (!nFrames)
+ return Ok;
+
+ startRead = std::max<uint64_t>(0, startRead);
+
+ uint64_t endRead = startRead + nFrames;
+
+ uint64_t startRead0 = startRead;
+ uint64_t endRead0 = endRead;
+
+ clipTimeBounds(startRead, endRead);
+
+ if (startRead == endRead) {
+ ZeroABL(list, 0, nFrames * m_bytesPerFrame);
+ return Ok;
+ }
+
+ size_t byteSize = (endRead - startRead) * m_bytesPerFrame;
+
+ size_t destStartByteOffset = std::max<size_t>(0, (startRead - startRead0) * m_bytesPerFrame);
+
+ if (destStartByteOffset > 0)
+ ZeroABL(list, 0, std::min<size_t>(nFrames * m_bytesPerFrame, destStartByteOffset));
+
+ size_t destEndSize = std::max<size_t>(0, endRead0 - endRead);
+ if (destEndSize > 0)
+ ZeroABL(list, destStartByteOffset + byteSize, destEndSize * m_bytesPerFrame);
+
+ Byte **buffers = static_cast<Byte**>(m_buffers->data());
+ size_t offset0 = frameOffset(startRead);
+ size_t offset1 = frameOffset(endRead);
+ size_t nbytes;
+
+ if (offset0 < offset1) {
+ nbytes = offset1 - offset0;
+ FetchABL(list, destStartByteOffset, buffers, offset0, nbytes);
+ } else {
+ nbytes = m_capacityBytes - offset0;
+ FetchABL(list, destStartByteOffset, buffers, offset0, nbytes);
+ FetchABL(list, destStartByteOffset + nbytes, buffers, 0, offset1);
+ nbytes += offset1;
+ }
+
+ int channelCount = list->mNumberBuffers;
+ AudioBuffer* dest = list->mBuffers;
+ while (--channelCount >= 0) {
+ dest->mDataByteSize = nbytes;
+ dest++;
+ }
+
+ return Ok;
+}
+
+}
Added: trunk/Source/WebCore/platform/audio/mac/CARingBuffer.h (0 => 173628)
--- trunk/Source/WebCore/platform/audio/mac/CARingBuffer.h (rev 0)
+++ trunk/Source/WebCore/platform/audio/mac/CARingBuffer.h 2014-09-15 20:05:49 UTC (rev 173628)
@@ -0,0 +1,94 @@
+/*
+ * Copyright (C) 2014 Apple Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS''
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
+ * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef CARingBuffer_h
+#define CARingBuffer_h
+
+#include <runtime/ArrayBuffer.h>
+#include <wtf/ByteSpinLock.h>
+#include <wtf/Vector.h>
+
+typedef struct AudioBufferList AudioBufferList;
+
+namespace WebCore {
+
+class CARingBuffer {
+public:
+ static std::unique_ptr<CARingBuffer> create();
+ ~CARingBuffer();
+
+ enum Error {
+ Ok,
+ TooMuch, // fetch start time is earlier than buffer start time and fetch end time is later than buffer end time
+ CPUOverload, // the reader is unable to get enough CPU cycles to capture a consistent snapshot of the time bounds
+ };
+
+ void allocate(uint32_t m_channelCount, size_t bytesPerFrame, size_t frameCount);
+ void deallocate();
+ Error store(const AudioBufferList*, size_t frameCount, uint64_t startFrame);
+ Error fetch(AudioBufferList*, size_t frameCount, uint64_t startFrame);
+ void getCurrentFrameBounds(uint64_t &startTime, uint64_t &endTime);
+
+ uint32_t channelCount() const { return m_channelCount; }
+
+private:
+ CARingBuffer();
+
+ size_t frameOffset(uint64_t frameNumber) { return (frameNumber & m_frameCountMask) * m_bytesPerFrame; }
+
+ void clipTimeBounds(uint64_t& startRead, uint64_t& endRead);
+
+ uint64_t currentStartFrame() const;
+ uint64_t currentEndFrame() const;
+ void setCurrentFrameBounds(uint64_t startFrame, uint64_t endFrame);
+
+ RefPtr<ArrayBuffer> m_buffers;
+ uint32_t m_channelCount;
+ size_t m_bytesPerFrame;
+ uint32_t m_frameCount;
+ uint32_t m_frameCountMask;
+ size_t m_capacityBytes;
+
+ struct TimeBounds {
+ TimeBounds()
+ : m_startFrame(0)
+ , m_endFrame(0)
+ , m_updateCounter(0)
+ {
+ }
+ volatile uint64_t m_startFrame;
+ volatile uint64_t m_endFrame;
+ volatile uint32_t m_updateCounter;
+ };
+
+ Vector<TimeBounds> m_timeBoundsQueue;
+ ByteSpinLock m_currentFrameBoundsLock;
+ int32_t m_timeBoundsQueuePtr;
+};
+
+}
+
+
+#endif // CARingBuffer_h
Added: trunk/Source/WebCore/platform/graphics/avfoundation/AudioSourceProviderAVFObjC.h (0 => 173628)
--- trunk/Source/WebCore/platform/graphics/avfoundation/AudioSourceProviderAVFObjC.h (rev 0)
+++ trunk/Source/WebCore/platform/graphics/avfoundation/AudioSourceProviderAVFObjC.h 2014-09-15 20:05:49 UTC (rev 173628)
@@ -0,0 +1,102 @@
+/*
+ * Copyright (C) 2014 Apple Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE COMPUTER, INC. ``AS IS'' AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE COMPUTER, INC. OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
+ * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef AudioSourceProviderAVFObjC_h
+#define AudioSourceProviderAVFObjC_h
+
+#if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
+
+#include "AudioSourceProvider.h"
+#include <wtf/MediaTime.h>
+#include <wtf/RefCounted.h>
+#include <wtf/RefPtr.h>
+#include <wtf/RetainPtr.h>
+
+OBJC_CLASS AVPlayerItem;
+OBJC_CLASS AVMutableAudioMix;
+
+typedef const struct opaqueMTAudioProcessingTap *MTAudioProcessingTapRef;
+typedef struct AudioBufferList AudioBufferList;
+typedef struct AudioStreamBasicDescription AudioStreamBasicDescription;
+typedef struct OpaqueAudioConverter* AudioConverterRef;
+typedef uint32_t MTAudioProcessingTapFlags;
+typedef signed long CMItemCount;
+
+namespace WebCore {
+
+class CARingBuffer;
+
+class AudioSourceProviderAVFObjC : public RefCounted<AudioSourceProviderAVFObjC>, public AudioSourceProvider {
+public:
+ static PassRefPtr<AudioSourceProviderAVFObjC> create(AVPlayerItem*);
+ virtual ~AudioSourceProviderAVFObjC();
+
+ void setPlayerItem(AVPlayerItem *);
+
+private:
+ AudioSourceProviderAVFObjC(AVPlayerItem *);
+
+ void destroyMix();
+ void createMix();
+
+ // AudioSourceProvider
+ virtual void provideInput(AudioBus*, size_t framesToProcess) override;
+ virtual void setClient(AudioSourceProviderClient*) override;
+
+ static void initCallback(MTAudioProcessingTapRef, void*, void**);
+ static void finalizeCallback(MTAudioProcessingTapRef);
+ static void prepareCallback(MTAudioProcessingTapRef, CMItemCount, const AudioStreamBasicDescription*);
+ static void unprepareCallback(MTAudioProcessingTapRef);
+ static void processCallback(MTAudioProcessingTapRef, CMItemCount, MTAudioProcessingTapFlags, AudioBufferList*, CMItemCount*, MTAudioProcessingTapFlags*);
+
+ void init(void* clientInfo, void** tapStorageOut);
+ void finalize();
+ void prepare(CMItemCount maxFrames, const AudioStreamBasicDescription *processingFormat);
+ void unprepare();
+ void process(CMItemCount numberFrames, MTAudioProcessingTapFlags flagsIn, AudioBufferList *bufferListInOut, CMItemCount *numberFramesOut, MTAudioProcessingTapFlags *flagsOut);
+
+ RetainPtr<AVPlayerItem> m_avPlayerItem;
+ RetainPtr<AVMutableAudioMix> m_avAudioMix;
+ RetainPtr<MTAudioProcessingTapRef> m_tap;
+ RetainPtr<AudioConverterRef> m_converter;
+ std::unique_ptr<AudioBufferList> m_list;
+ std::unique_ptr<AudioStreamBasicDescription> m_tapDescription;
+ std::unique_ptr<AudioStreamBasicDescription> m_outputDescription;
+ std::unique_ptr<CARingBuffer> m_ringBuffer;
+
+ MediaTime m_startTimeAtLastProcess;
+ MediaTime m_endTimeAtLastProcess;
+ uint64_t m_writeAheadCount;
+ uint64_t m_writeCount;
+ uint64_t m_readCount;
+ bool m_paused;
+ AudioSourceProviderClient* m_client;
+};
+
+}
+
+#endif
+
+#endif
Added: trunk/Source/WebCore/platform/graphics/avfoundation/AudioSourceProviderAVFObjC.mm (0 => 173628)
--- trunk/Source/WebCore/platform/graphics/avfoundation/AudioSourceProviderAVFObjC.mm (rev 0)
+++ trunk/Source/WebCore/platform/graphics/avfoundation/AudioSourceProviderAVFObjC.mm 2014-09-15 20:05:49 UTC (rev 173628)
@@ -0,0 +1,398 @@
+/*
+ * Copyright (C) 2014 Apple Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE COMPUTER, INC. ``AS IS'' AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE COMPUTER, INC. OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
+ * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#import "config.h"
+#import "AudioSourceProviderAVFObjC.h"
+
+#if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
+
+#import "AudioBus.h"
+#import "AudioChannel.h"
+#import "AudioSourceProviderClient.h"
+#import "CARingBuffer.h"
+#import "Logging.h"
+#import "MediaTimeAVFoundation.h"
+#import "SoftLinking.h"
+#import <AVFoundation/AVAssetTrack.h>
+#import <AVFoundation/AVAudioMix.h>
+#import <AVFoundation/AVMediaFormat.h>
+#import <AVFoundation/AVPlayerItem.h>
+#import <AVFoundation/AVPlayerItemTrack.h>
+#import <objc/runtime.h>
+#import <wtf/MainThread.h>
+
+#if !LOG_DISABLED
+#include <wtf/StringPrintStream.h>
+#endif
+
+SOFT_LINK_FRAMEWORK(AVFoundation)
+SOFT_LINK_FRAMEWORK(MediaToolbox)
+SOFT_LINK_FRAMEWORK(AudioToolbox)
+SOFT_LINK_FRAMEWORK(CoreMedia)
+
+SOFT_LINK_CLASS(AVFoundation, AVPlayerItem)
+SOFT_LINK_CLASS(AVFoundation, AVMutableAudioMix)
+SOFT_LINK_CLASS(AVFoundation, AVMutableAudioMixInputParameters)
+
+typedef struct opaqueCMNotificationCenter *CMNotificationCenterRef;
+typedef void (*CMNotificationCallback)(CMNotificationCenterRef inCenter, const void *inListener, CFStringRef inNotificationName, const void *inNotifyingObject, CFTypeRef inNotificationPayload);
+
+SOFT_LINK(CoreMedia, CMNotificationCenterGetDefaultLocalCenter, CMNotificationCenterRef, (void), ());
+SOFT_LINK(CoreMedia, CMNotificationCenterAddListener, OSStatus, (CMNotificationCenterRef center, const void* listener, CMNotificationCallback callback, CFStringRef notification, const void* object, UInt32 flags), (center, listener, callback, notification, object, flags))
+SOFT_LINK(CoreMedia, CMNotificationCenterRemoveListener, OSStatus, (CMNotificationCenterRef center, const void* listener, CMNotificationCallback callback, CFStringRef notification, const void* object), (center, listener, callback, notification, object))
+SOFT_LINK(CoreMedia, CMTimebaseGetTime, CMTime, (CMTimebaseRef timebase), (timebase))
+SOFT_LINK(CoreMedia, CMTimebaseGetEffectiveRate, Float64, (CMTimebaseRef timebase), (timebase))
+
+SOFT_LINK_CONSTANT(CoreMedia, kCMTimebaseNotification_EffectiveRateChanged, CFStringRef)
+SOFT_LINK_CONSTANT(CoreMedia, kCMTimebaseNotification_TimeJumped, CFStringRef)
+#define kCMTimebaseNotification_EffectiveRateChanged getkCMTimebaseNotification_EffectiveRateChanged()
+#define kCMTimebaseNotification_TimeJumped getkCMTimebaseNotification_TimeJumped()
+
+SOFT_LINK(AudioToolbox, AudioConverterConvertComplexBuffer, OSStatus, (AudioConverterRef inAudioConverter, UInt32 inNumberPCMFrames, const AudioBufferList* inInputData, AudioBufferList* outOutputData), (inAudioConverter, inNumberPCMFrames, inInputData, outOutputData))
+SOFT_LINK(AudioToolbox, AudioConverterNew, OSStatus, (const AudioStreamBasicDescription* inSourceFormat, const AudioStreamBasicDescription* inDestinationFormat, AudioConverterRef* outAudioConverter), (inSourceFormat, inDestinationFormat, outAudioConverter))
+
+SOFT_LINK(MediaToolbox, MTAudioProcessingTapGetStorage, void*, (MTAudioProcessingTapRef tap), (tap))
+SOFT_LINK(MediaToolbox, MTAudioProcessingTapGetSourceAudio, OSStatus, (MTAudioProcessingTapRef tap, CMItemCount numberFrames, AudioBufferList *bufferListInOut, MTAudioProcessingTapFlags *flagsOut, CMTimeRange *timeRangeOut, CMItemCount *numberFramesOut), (tap, numberFrames, bufferListInOut, flagsOut, timeRangeOut, numberFramesOut))
+SOFT_LINK_MAY_FAIL(MediaToolbox, MTAudioProcessingTapCreate, OSStatus, (CFAllocatorRef allocator, const MTAudioProcessingTapCallbacks *callbacks, MTAudioProcessingTapCreationFlags flags, MTAudioProcessingTapRef *tapOut), (allocator, callbacks, flags, tapOut))
+
+SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicAudible, NSString *)
+#define AVMediaCharacteristicAudible getAVMediaCharacteristicAudible()
+
+namespace WebCore {
+
+static double kRingBufferDuration = 1;
+
+PassRefPtr<AudioSourceProviderAVFObjC> AudioSourceProviderAVFObjC::create(AVPlayerItem *item)
+{
+ if (!canLoadMTAudioProcessingTapCreate())
+ return nullptr;
+ return adoptRef(new AudioSourceProviderAVFObjC(item));
+}
+
+AudioSourceProviderAVFObjC::AudioSourceProviderAVFObjC(AVPlayerItem *item)
+ : m_avPlayerItem(item)
+ , m_writeCount(0)
+ , m_readCount(0)
+ , m_paused(true)
+ , m_client(nullptr)
+{
+}
+
+AudioSourceProviderAVFObjC::~AudioSourceProviderAVFObjC()
+{
+ setClient(nullptr);
+}
+
+void AudioSourceProviderAVFObjC::provideInput(AudioBus* bus, size_t framesToProcess)
+{
+ if (!m_avPlayerItem)
+ return;
+
+ uint64_t startFrame = 0;
+ uint64_t endFrame = 0;
+ m_ringBuffer->getCurrentFrameBounds(startFrame, endFrame);
+
+ if (m_writeCount <= m_readCount + m_writeAheadCount) {
+ bus->zero();
+ return;
+ }
+
+ size_t framesAvailable = endFrame - (m_readCount + m_writeAheadCount);
+ if (framesAvailable < framesToProcess) {
+ framesToProcess = framesAvailable;
+ bus->zero();
+ }
+
+ ASSERT(bus->numberOfChannels() == m_ringBuffer->channelCount());
+
+ for (unsigned i = 0; i < m_list->mNumberBuffers; ++i) {
+ AudioChannel* channel = bus->channel(i);
+ m_list->mBuffers[i].mNumberChannels = 1;
+ m_list->mBuffers[i].mData = channel->mutableData();
+ m_list->mBuffers[i].mDataByteSize = channel->length() * sizeof(float);
+ }
+
+ m_ringBuffer->fetch(m_list.get(), framesToProcess, m_readCount);
+ m_readCount += framesToProcess;
+
+ if (m_converter)
+ AudioConverterConvertComplexBuffer(m_converter.get(), framesToProcess, m_list.get(), m_list.get());
+}
+
+void AudioSourceProviderAVFObjC::setClient(AudioSourceProviderClient* client)
+{
+ if (m_client == client)
+ return;
+
+ if (m_avAudioMix)
+ destroyMix();
+
+ m_client = client;
+
+ if (m_client && m_avPlayerItem)
+ createMix();
+}
+
+void AudioSourceProviderAVFObjC::setPlayerItem(AVPlayerItem *avPlayerItem)
+{
+ if (m_avPlayerItem == avPlayerItem)
+ return;
+
+ if (m_avAudioMix)
+ destroyMix();
+
+ m_avPlayerItem = avPlayerItem;
+
+ if (m_client && m_avPlayerItem)
+ createMix();
+}
+
+void AudioSourceProviderAVFObjC::destroyMix()
+{
+ if (m_avPlayerItem)
+ [m_avPlayerItem setAudioMix:nil];
+ [m_avAudioMix setInputParameters:nil];
+ m_avAudioMix.clear();
+ m_tap.clear();
+}
+
+void AudioSourceProviderAVFObjC::createMix()
+{
+ ASSERT(!m_avAudioMix);
+ ASSERT(m_avPlayerItem);
+ ASSERT(m_client);
+
+ m_avAudioMix = adoptNS([[getAVMutableAudioMixClass() alloc] init]);
+
+ MTAudioProcessingTapCallbacks callbacks = {
+ 0,
+ this,
+ initCallback,
+ finalizeCallback,
+ prepareCallback,
+ unprepareCallback,
+ processCallback,
+ };
+
+ MTAudioProcessingTapRef tap = nullptr;
+ MTAudioProcessingTapCreate(kCFAllocatorDefault, &callbacks, 1, &tap);
+ ASSERT(tap);
+ ASSERT(m_tap == tap);
+
+ RetainPtr<AVMutableAudioMixInputParameters> parameters = adoptNS([[getAVMutableAudioMixInputParametersClass() alloc] init]);
+ [parameters setAudioTapProcessor:m_tap.get()];
+
+ CMPersistentTrackID firstEnabledAudioTrackID = kCMPersistentTrackID_Invalid;
+ NSArray* tracks = [m_avPlayerItem tracks];
+ for (AVPlayerItemTrack* track in tracks) {
+ if ([track.assetTrack hasMediaCharacteristic:AVMediaCharacteristicAudible] && track.enabled) {
+ firstEnabledAudioTrackID = track.assetTrack.trackID;
+ break;
+ }
+ }
+ [parameters setTrackID:firstEnabledAudioTrackID];
+
+ [m_avAudioMix setInputParameters:@[parameters.get()]];
+ [m_avPlayerItem setAudioMix:m_avAudioMix.get()];
+}
+
+void AudioSourceProviderAVFObjC::initCallback(MTAudioProcessingTapRef tap, void* clientInfo, void** tapStorageOut)
+{
+ AudioSourceProviderAVFObjC* _this = static_cast<AudioSourceProviderAVFObjC*>(clientInfo);
+ _this->m_tap = tap;
+ _this->init(clientInfo, tapStorageOut);
+}
+
+void AudioSourceProviderAVFObjC::finalizeCallback(MTAudioProcessingTapRef tap)
+{
+ ASSERT(tap);
+ AudioSourceProviderAVFObjC* _this = static_cast<AudioSourceProviderAVFObjC*>(MTAudioProcessingTapGetStorage(tap));
+ _this->finalize();
+}
+
+void AudioSourceProviderAVFObjC::prepareCallback(MTAudioProcessingTapRef tap, CMItemCount maxFrames, const AudioStreamBasicDescription *processingFormat)
+{
+ ASSERT(tap);
+ AudioSourceProviderAVFObjC* _this = static_cast<AudioSourceProviderAVFObjC*>(MTAudioProcessingTapGetStorage(tap));
+ _this->prepare(maxFrames, processingFormat);
+}
+
+void AudioSourceProviderAVFObjC::unprepareCallback(MTAudioProcessingTapRef tap)
+{
+ ASSERT(tap);
+ AudioSourceProviderAVFObjC* _this = static_cast<AudioSourceProviderAVFObjC*>(MTAudioProcessingTapGetStorage(tap));
+ _this->unprepare();
+}
+
+void AudioSourceProviderAVFObjC::processCallback(MTAudioProcessingTapRef tap, CMItemCount numberFrames, MTAudioProcessingTapFlags flags, AudioBufferList *bufferListInOut, CMItemCount *numberFramesOut, MTAudioProcessingTapFlags *flagsOut)
+{
+ ASSERT(tap);
+ AudioSourceProviderAVFObjC* _this = static_cast<AudioSourceProviderAVFObjC*>(MTAudioProcessingTapGetStorage(tap));
+ _this->process(numberFrames, flags, bufferListInOut, numberFramesOut, flagsOut);
+}
+
+void AudioSourceProviderAVFObjC::init(void* clientInfo, void** tapStorageOut)
+{
+ ASSERT(clientInfo == this);
+ UNUSED_PARAM(clientInfo);
+ *tapStorageOut = this;
+}
+
+void AudioSourceProviderAVFObjC::finalize()
+{
+}
+
+static bool operator==(const AudioStreamBasicDescription& a, const AudioStreamBasicDescription& b)
+{
+ return a.mSampleRate == b.mSampleRate
+ && a.mFormatID == b.mFormatID
+ && a.mFormatFlags == b.mFormatFlags
+ && a.mBytesPerPacket == b.mBytesPerPacket
+ && a.mFramesPerPacket == b.mFramesPerPacket
+ && a.mBytesPerFrame == b.mBytesPerFrame
+ && a.mChannelsPerFrame == b.mChannelsPerFrame
+ && a.mBitsPerChannel == b.mBitsPerChannel;
+}
+
+static bool operator!=(const AudioStreamBasicDescription& a, const AudioStreamBasicDescription& b)
+{
+ return !(a == b);
+}
+
+void AudioSourceProviderAVFObjC::prepare(CMItemCount maxFrames, const AudioStreamBasicDescription *processingFormat)
+{
+ ASSERT(maxFrames >= 0);
+
+ m_tapDescription = std::make_unique<AudioStreamBasicDescription>(*processingFormat);
+ int numberOfChannels = processingFormat->mChannelsPerFrame;
+ size_t bytesPerFrame = processingFormat->mBytesPerFrame;
+ double sampleRate = processingFormat->mSampleRate;
+ ASSERT(sampleRate >= 0);
+
+ m_outputDescription = std::make_unique<AudioStreamBasicDescription>();
+ m_outputDescription->mSampleRate = sampleRate;
+ m_outputDescription->mFormatID = kAudioFormatLinearPCM;
+ m_outputDescription->mFormatFlags = kAudioFormatFlagsNativeFloatPacked;
+ m_outputDescription->mBitsPerChannel = 8 * sizeof(Float32);
+ m_outputDescription->mChannelsPerFrame = numberOfChannels;
+ m_outputDescription->mFramesPerPacket = 1;
+ m_outputDescription->mBytesPerPacket = sizeof(Float32);
+ m_outputDescription->mBytesPerFrame = sizeof(Float32);
+ m_outputDescription->mFormatFlags |= kAudioFormatFlagIsNonInterleaved;
+
+ if (*m_tapDescription != *m_outputDescription) {
+ AudioConverterRef outConverter = nullptr;
+ AudioConverterNew(m_tapDescription.get(), m_outputDescription.get(), &outConverter);
+ m_converter = outConverter;
+ }
+
+ // Make the ringbuffer large enough to store at least two callbacks worth of audio, or 1s, whichever is larger.
+ size_t capacity = std::max(static_cast<size_t>(2 * maxFrames), static_cast<size_t>(kRingBufferDuration * sampleRate));
+
+ m_ringBuffer = CARingBuffer::create();
+ m_ringBuffer->allocate(numberOfChannels, bytesPerFrame, capacity);
+
+ // AudioBufferList is a variable-length struct, so create on the heap with a generic new() operator
+ // with a custom size, and initialize the struct manually.
+ size_t bufferListSize = sizeof(AudioBufferList) + (sizeof(AudioBuffer) * std::max(1, numberOfChannels - 1));
+ m_list = std::unique_ptr<AudioBufferList>((AudioBufferList*) ::operator new (bufferListSize));
+ memset(m_list.get(), 0, bufferListSize);
+ m_list->mNumberBuffers = numberOfChannels;
+
+ RefPtr<AudioSourceProviderAVFObjC> strongThis = this;
+ callOnMainThread([strongThis, numberOfChannels, sampleRate] {
+ strongThis->m_client->setFormat(numberOfChannels, sampleRate);
+ });
+}
+
+void AudioSourceProviderAVFObjC::unprepare()
+{
+ m_tapDescription = nullptr;
+ m_outputDescription = nullptr;
+ m_ringBuffer = nullptr;
+ m_list = nullptr;
+}
+
+void AudioSourceProviderAVFObjC::process(CMItemCount numberOfFrames, MTAudioProcessingTapFlags flags, AudioBufferList* bufferListInOut, CMItemCount* numberFramesOut, MTAudioProcessingTapFlags* flagsOut)
+{
+ UNUSED_PARAM(flags);
+
+ CMItemCount itemCount = 0;
+ CMTimeRange rangeOut;
+ OSStatus status = MTAudioProcessingTapGetSourceAudio(m_tap.get(), numberOfFrames, bufferListInOut, flagsOut, &rangeOut, &itemCount);
+ if (status != noErr || !itemCount)
+ return;
+
+ MediaTime rangeStart = toMediaTime(rangeOut.start);
+ MediaTime rangeDuration = toMediaTime(rangeOut.duration);
+
+ if (rangeStart.isInvalid())
+ return;
+
+ MediaTime currentTime = toMediaTime(CMTimebaseGetTime([m_avPlayerItem timebase]));
+ if (currentTime.isInvalid())
+ return;
+
+ // The audio tap will generate silence when the media is paused, and will not advance the
+ // tap currentTime.
+ if (rangeStart == m_startTimeAtLastProcess || rangeDuration == MediaTime::zeroTime()) {
+ m_paused = true;
+ return;
+ }
+
+ if (m_paused) {
+ // Only check the write-ahead time when playback begins.
+ m_paused = false;
+ MediaTime earlyBy = rangeStart - currentTime;
+ m_writeAheadCount = m_tapDescription->mSampleRate * earlyBy.toDouble();
+ }
+
+ // Check to see if the underlying media has seeked, which would require us to "flush"
+ // our outstanding buffers.
+ if (rangeStart != m_endTimeAtLastProcess)
+ m_readCount = m_writeCount;
+
+ m_startTimeAtLastProcess = rangeStart;
+ m_endTimeAtLastProcess = rangeStart + rangeDuration;
+
+ // StartOfStream indicates a discontinuity, such as when an AVPlayerItem is re-added
+ // to an AVPlayer, so "flush" outstanding buffers.
+ if (flagsOut && *flagsOut & kMTAudioProcessingTapFlag_StartOfStream)
+ m_readCount = m_writeCount;
+
+ m_ringBuffer->store(bufferListInOut, itemCount, m_writeCount);
+ m_writeCount += itemCount;
+
+ // Mute the default audio playback by zeroing the tap-owned buffers.
+ for (uint32_t i = 0; i < bufferListInOut->mNumberBuffers; ++i) {
+ AudioBuffer& buffer = bufferListInOut->mBuffers[i];
+ memset(buffer.mData, 0, buffer.mDataByteSize);
+ }
+ *numberFramesOut = 0;
+}
+
+}
+
+#endif // ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
Modified: trunk/Source/WebCore/platform/graphics/avfoundation/objc/MediaPlayerPrivateAVFoundationObjC.h (173627 => 173628)
--- trunk/Source/WebCore/platform/graphics/avfoundation/objc/MediaPlayerPrivateAVFoundationObjC.h 2014-09-15 19:50:03 UTC (rev 173627)
+++ trunk/Source/WebCore/platform/graphics/avfoundation/objc/MediaPlayerPrivateAVFoundationObjC.h 2014-09-15 20:05:49 UTC (rev 173628)
@@ -62,6 +62,7 @@
class WebCoreAVFResourceLoader;
class InbandMetadataTextTrackPrivateAVF;
class InbandTextTrackPrivateAVFObjC;
+class AudioSourceProviderAVFObjC;
class AudioTrackPrivateAVFObjC;
class VideoTrackPrivateAVFObjC;
@@ -209,6 +210,10 @@
void setAVPlayerItem(AVPlayerItem *);
+#if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
+ virtual AudioSourceProvider* audioSourceProvider();
+#endif
+
void createImageGenerator();
void destroyImageGenerator();
RetainPtr<CGImageRef> createImageForTimeInRect(float, const IntRect&);
@@ -289,6 +294,10 @@
bool m_videoFrameHasDrawn;
bool m_haveCheckedPlayability;
+#if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
+ RefPtr<AudioSourceProviderAVFObjC> m_provider;
+#endif
+
RetainPtr<AVAssetImageGenerator> m_imageGenerator;
#if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
RetainPtr<AVPlayerItemVideoOutput> m_videoOutput;
Modified: trunk/Source/WebCore/platform/graphics/avfoundation/objc/MediaPlayerPrivateAVFoundationObjC.mm (173627 => 173628)
--- trunk/Source/WebCore/platform/graphics/avfoundation/objc/MediaPlayerPrivateAVFoundationObjC.mm 2014-09-15 19:50:03 UTC (rev 173627)
+++ trunk/Source/WebCore/platform/graphics/avfoundation/objc/MediaPlayerPrivateAVFoundationObjC.mm 2014-09-15 20:05:49 UTC (rev 173628)
@@ -29,6 +29,7 @@
#import "MediaPlayerPrivateAVFoundationObjC.h"
#import "AVTrackPrivateAVFObjCImpl.h"
+#import "AudioSourceProviderAVFObjC.h"
#import "AudioTrackPrivateAVFObjC.h"
#import "AuthenticationChallenge.h"
#import "BlockExceptions.h"
@@ -499,6 +500,11 @@
[track removeObserver:m_objcObserver.get() forKeyPath:@"enabled"];
m_cachedTracks = nullptr;
+#if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
+ if (m_provider)
+ m_provider->setPlayerItem(nullptr);
+#endif
+
setIgnoreLoadStateChanges(false);
}
@@ -909,6 +915,11 @@
[m_avPlayerItem.get() addOutput:m_legibleOutput.get()];
#endif
+#if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
+ if (m_provider)
+ m_provider->setPlayerItem(m_avPlayerItem.get());
+#endif
+
setDelayCallbacks(false);
}
@@ -1837,6 +1848,15 @@
}
#endif // ENABLE(VIDEO_TRACK)
+#if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
+AudioSourceProvider* MediaPlayerPrivateAVFoundationObjC::audioSourceProvider()
+{
+ if (!m_provider)
+ m_provider = AudioSourceProviderAVFObjC::create(m_avPlayerItem.get());
+ return m_provider.get();
+}
+#endif
+
void MediaPlayerPrivateAVFoundationObjC::sizeChanged()
{
if (!m_avAsset)
Modified: trunk/Source/WebCore/platform/mac/SoftLinking.h (173627 => 173628)
--- trunk/Source/WebCore/platform/mac/SoftLinking.h 2014-09-15 19:50:03 UTC (rev 173627)
+++ trunk/Source/WebCore/platform/mac/SoftLinking.h 2014-09-15 20:05:49 UTC (rev 173628)
@@ -110,7 +110,6 @@
return softLink##functionName parameterNames; \
}
-#if PLATFORM(IOS)
#define SOFT_LINK_MAY_FAIL(framework, functionName, resultType, parameterDeclarations, parameterNames) \
static resultType (*softLink##functionName) parameterDeclarations = 0; \
\
@@ -132,7 +131,6 @@
ASSERT(softLink##functionName); \
return softLink##functionName parameterNames; \
}
-#endif
/* callingConvention is unused on Mac but is here to keep the macro prototype the same between Mac and Windows. */
#define SOFT_LINK_OPTIONAL(framework, functionName, resultType, callingConvention, parameterDeclarations) \