Title: [211728] trunk/Source/WebCore
Revision
211728
Author
eric.carl...@apple.com
Date
2017-02-06 09:22:27 -0800 (Mon, 06 Feb 2017)

Log Message

[MediaStream Mac] Stop using AVSampleBufferAudioRenderer
https://bugs.webkit.org/show_bug.cgi?id=167821

Reviewed by Jer Noble.

* WebCore.xcodeproj/project.pbxproj: Add new files.

* platform/audio/mac/AudioSampleDataSource.cpp:
(WebCore::AudioSampleDataSource::pullSamplesInternal): Don't assume the first timestamp from the
render proc after a pause is zero.

Stop using an audio renderer for each audio track. No audio renderers means we don't need to use
an AVSampleBufferRenderSynchronizer.
* platform/graphics/avfoundation/objc/MediaPlayerPrivateMediaStreamAVFObjC.h:
* platform/graphics/avfoundation/objc/MediaPlayerPrivateMediaStreamAVFObjC.mm:
(-[WebAVSampleBufferStatusChangeListener invalidate]): No more audio renderers.
(-[WebAVSampleBufferStatusChangeListener observeValueForKeyPath:ofObject:change:context:]): Ditto.
(WebCore::MediaPlayerPrivateMediaStreamAVFObjC::MediaPlayerPrivateMediaStreamAVFObjC): Ditto.
(WebCore::MediaPlayerPrivateMediaStreamAVFObjC::~MediaPlayerPrivateMediaStreamAVFObjC): Pause
  audio tracks explicitly.
(WebCore::MediaPlayerPrivateMediaStreamAVFObjC::audioSourceProvider): Remove the existing code,
  it was incorrect and not thread safe.
(WebCore::MediaPlayerPrivateMediaStreamAVFObjC::flushRenderers): No more audio renderers.
(WebCore::MediaPlayerPrivateMediaStreamAVFObjC::ensureLayer): No more render synchronizer.
(WebCore::MediaPlayerPrivateMediaStreamAVFObjC::destroyLayer): Ditto.
(WebCore::MediaPlayerPrivateMediaStreamAVFObjC::play): Start each audio track.
(WebCore::MediaPlayerPrivateMediaStreamAVFObjC::pause): Pause each audio track.
(WebCore::MediaPlayerPrivateMediaStreamAVFObjC::setVolume): Pass the command to each audio track.
(WebCore::MediaPlayerPrivateMediaStreamAVFObjC::setMuted): Ditto.
(WebCore::MediaPlayerPrivateMediaStreamAVFObjC::streamTime): No more render synchronizer.
(WebCore::MediaPlayerPrivateMediaStreamAVFObjC::sampleBufferUpdated): Don't handle audio samples.
(WebCore::MediaPlayerPrivateMediaStreamAVFObjC::updateTracks): Update for audio track class change. No
more render synchronizer.
(-[WebAVSampleBufferStatusChangeListener beginObservingRenderer:]): Deleted.
(-[WebAVSampleBufferStatusChangeListener stopObservingRenderer:]): Deleted.
(WebCore::MediaPlayerPrivateMediaStreamAVFObjC::enqueueAudioSample): Deleted.
(WebCore::MediaPlayerPrivateMediaStreamAVFObjC::requestNotificationWhenReadyForAudioData): Deleted.
(WebCore::MediaPlayerPrivateMediaStreamAVFObjC::createAudioRenderer): Deleted.
(WebCore::MediaPlayerPrivateMediaStreamAVFObjC::destroyAudioRenderer): Deleted.
(WebCore::MediaPlayerPrivateMediaStreamAVFObjC::destroyAudioRenderers): Deleted.
(WebCore::MediaPlayerPrivateMediaStreamAVFObjC::rendererStatusDidChange): Deleted.

* platform/mediastream/AudioTrackPrivateMediaStream.h:

* platform/mediastream/MediaStreamTrackPrivate.cpp:
(WebCore::MediaStreamTrackPrivate::MediaStreamTrackPrivate): add/removeObserver takes a reference,
not a pointer.
(WebCore::MediaStreamTrackPrivate::~MediaStreamTrackPrivate): Ditto.
(WebCore::MediaStreamTrackPrivate::videoSampleAvailable): Renamed from sourceHasMoreMediaData.
(WebCore::MediaStreamTrackPrivate::sourceHasMoreMediaData): Deleted.
* platform/mediastream/MediaStreamTrackPrivate.h:

* platform/mediastream/RealtimeMediaSource.cpp:
(WebCore::RealtimeMediaSource::addObserver): Take a reference, not a pointer.
(WebCore::RealtimeMediaSource::removeObserver): Ditto.
(WebCore::RealtimeMediaSource::videoSampleAvailable): Renamed from mediaDataUpdated.
(WebCore::RealtimeMediaSource::audioSamplesAvailable): New.
(WebCore::RealtimeMediaSource::stop): Drive-by cleanup.
(WebCore::RealtimeMediaSource::requestStop): Ditto.
(WebCore::RealtimeMediaSource::mediaDataUpdated): Deleted.
* platform/mediastream/RealtimeMediaSource.h:

* platform/mediastream/mac/AVAudioCaptureSource.h:
* platform/mediastream/mac/AVAudioCaptureSource.mm:
(WebCore::AVAudioCaptureSource::AVAudioCaptureSource):
(WebCore::AVAudioCaptureSource::addObserver):
(WebCore::AVAudioCaptureSource::shutdownCaptureSession):
(WebCore::AVAudioCaptureSource::captureOutputDidOutputSampleBufferFromConnection):
(WebCore::operator==): Deleted.
(WebCore::operator!=): Deleted.

* platform/mediastream/mac/AVVideoCaptureSource.mm:
(WebCore::AVVideoCaptureSource::processNewFrame): Call videoSampleAvailable, not mediaDataUpdated.

Render audio with a CoreAudio output unit.
* platform/mediastream/mac/AudioTrackPrivateMediaStreamCocoa.cpp: Added.
(WebCore::AudioTrackPrivateMediaStreamCocoa::AudioTrackPrivateMediaStreamCocoa):
(WebCore::AudioTrackPrivateMediaStreamCocoa::~AudioTrackPrivateMediaStreamCocoa):
(WebCore::AudioTrackPrivateMediaStreamCocoa::playInternal):
(WebCore::AudioTrackPrivateMediaStreamCocoa::play):
(WebCore::AudioTrackPrivateMediaStreamCocoa::pause):
(WebCore::AudioTrackPrivateMediaStreamCocoa::setVolume):
(WebCore::AudioTrackPrivateMediaStreamCocoa::setupAudioUnit):
(WebCore::AudioTrackPrivateMediaStreamCocoa::audioSamplesAvailable):
(WebCore::AudioTrackPrivateMediaStreamCocoa::sourceStopped):
(WebCore::AudioTrackPrivateMediaStreamCocoa::render):
(WebCore::AudioTrackPrivateMediaStreamCocoa::inputProc):
* platform/mediastream/mac/AudioTrackPrivateMediaStreamCocoa.h: Added.

* platform/mediastream/mac/MockRealtimeAudioSourceMac.h:
* platform/mediastream/mac/MockRealtimeAudioSourceMac.mm:
(WebCore::alignTo16Bytes):
(WebCore::MockRealtimeAudioSourceMac::emitSampleBuffers):
(WebCore::MockRealtimeAudioSourceMac::reconfigure): Minor cleanup.
(WebCore::MockRealtimeAudioSourceMac::render): Ditto.

* platform/mediastream/mac/MockRealtimeVideoSourceMac.mm:
(WebCore::MockRealtimeVideoSourceMac::updateSampleBuffer): Call videoSampleAvailable, not mediaDataUpdated.

* platform/mediastream/mac/WebAudioSourceProviderAVFObjC.h:
* platform/mediastream/mac/WebAudioSourceProviderAVFObjC.mm:
(WebCore::WebAudioSourceProviderAVFObjC::~WebAudioSourceProviderAVFObjC):
(WebCore::WebAudioSourceProviderAVFObjC::provideInput): Use a mutex. Get rid of m_writeAheadCount,
it is always 0.
(WebCore::WebAudioSourceProviderAVFObjC::prepare): Use a lock.
(WebCore::WebAudioSourceProviderAVFObjC::unprepare): Ditto.
(WebCore::WebAudioSourceProviderAVFObjC::process): Ditto.
* platform/mock/MockRealtimeAudioSource.h:
(WebCore::MockRealtimeAudioSource::renderInterval): Decrease the render interval.

Modified Paths

Added Paths

Diff

Modified: trunk/Source/WebCore/ChangeLog (211727 => 211728)


--- trunk/Source/WebCore/ChangeLog	2017-02-06 17:07:24 UTC (rev 211727)
+++ trunk/Source/WebCore/ChangeLog	2017-02-06 17:22:27 UTC (rev 211728)
@@ -1,3 +1,115 @@
+2017-02-06  Eric Carlson  <eric.carl...@apple.com>
+
+        [MediaStream Mac] Stop using AVSampleBufferAudioRenderer
+        https://bugs.webkit.org/show_bug.cgi?id=167821
+
+        Reviewed by Jer Noble.
+
+        * WebCore.xcodeproj/project.pbxproj: Add new files.
+
+        * platform/audio/mac/AudioSampleDataSource.cpp:
+        (WebCore::AudioSampleDataSource::pullSamplesInternal): Don't assume the first timestamp from the
+        render proc after a pause is zero.
+
+        Stop using an audio renderer for each audio track. No audio renderers means we don't need to use
+        an AVSampleBufferRenderSynchronizer.
+        * platform/graphics/avfoundation/objc/MediaPlayerPrivateMediaStreamAVFObjC.h:
+        * platform/graphics/avfoundation/objc/MediaPlayerPrivateMediaStreamAVFObjC.mm:
+        (-[WebAVSampleBufferStatusChangeListener invalidate]): No more audio renderers.
+        (-[WebAVSampleBufferStatusChangeListener observeValueForKeyPath:ofObject:change:context:]): Ditto.
+        (WebCore::MediaPlayerPrivateMediaStreamAVFObjC::MediaPlayerPrivateMediaStreamAVFObjC): Ditto.
+        (WebCore::MediaPlayerPrivateMediaStreamAVFObjC::~MediaPlayerPrivateMediaStreamAVFObjC): Pause
+          audio tracks explicitly.
+        (WebCore::MediaPlayerPrivateMediaStreamAVFObjC::audioSourceProvider): Remove the existing code,
+          it was incorrect and not thread safe.
+        (WebCore::MediaPlayerPrivateMediaStreamAVFObjC::flushRenderers): No more audio renderers.
+        (WebCore::MediaPlayerPrivateMediaStreamAVFObjC::ensureLayer): No more render synchronizer.
+        (WebCore::MediaPlayerPrivateMediaStreamAVFObjC::destroyLayer): Ditto.
+        (WebCore::MediaPlayerPrivateMediaStreamAVFObjC::play): Start each audio track.
+        (WebCore::MediaPlayerPrivateMediaStreamAVFObjC::pause): Pause each audio track.
+        (WebCore::MediaPlayerPrivateMediaStreamAVFObjC::setVolume): Pass the command to each audio track.
+        (WebCore::MediaPlayerPrivateMediaStreamAVFObjC::setMuted): Ditto.
+        (WebCore::MediaPlayerPrivateMediaStreamAVFObjC::streamTime): No more render synchronizer.
+        (WebCore::MediaPlayerPrivateMediaStreamAVFObjC::sampleBufferUpdated): Don't handle audio samples.
+        (WebCore::MediaPlayerPrivateMediaStreamAVFObjC::updateTracks): Update for audio track class change. No
+        more render synchronizer.
+        (-[WebAVSampleBufferStatusChangeListener beginObservingRenderer:]): Deleted.
+        (-[WebAVSampleBufferStatusChangeListener stopObservingRenderer:]): Deleted.
+        (WebCore::MediaPlayerPrivateMediaStreamAVFObjC::enqueueAudioSample): Deleted.
+        (WebCore::MediaPlayerPrivateMediaStreamAVFObjC::requestNotificationWhenReadyForAudioData): Deleted.
+        (WebCore::MediaPlayerPrivateMediaStreamAVFObjC::createAudioRenderer): Deleted.
+        (WebCore::MediaPlayerPrivateMediaStreamAVFObjC::destroyAudioRenderer): Deleted.
+        (WebCore::MediaPlayerPrivateMediaStreamAVFObjC::destroyAudioRenderers): Deleted.
+        (WebCore::MediaPlayerPrivateMediaStreamAVFObjC::rendererStatusDidChange): Deleted.
+
+        * platform/mediastream/AudioTrackPrivateMediaStream.h:
+
+        * platform/mediastream/MediaStreamTrackPrivate.cpp:
+        (WebCore::MediaStreamTrackPrivate::MediaStreamTrackPrivate): add/removeObserver takes a reference,
+        not a pointer.
+        (WebCore::MediaStreamTrackPrivate::~MediaStreamTrackPrivate): Ditto.
+        (WebCore::MediaStreamTrackPrivate::videoSampleAvailable): Renamed from sourceHasMoreMediaData.
+        (WebCore::MediaStreamTrackPrivate::sourceHasMoreMediaData): Deleted.
+        * platform/mediastream/MediaStreamTrackPrivate.h:
+
+        * platform/mediastream/RealtimeMediaSource.cpp:
+        (WebCore::RealtimeMediaSource::addObserver): Take a reference, not a pointer.
+        (WebCore::RealtimeMediaSource::removeObserver): Ditto.
+        (WebCore::RealtimeMediaSource::videoSampleAvailable): Renamed from mediaDataUpdated.
+        (WebCore::RealtimeMediaSource::audioSamplesAvailable): New.
+        (WebCore::RealtimeMediaSource::stop): Drive-by cleanup.
+        (WebCore::RealtimeMediaSource::requestStop): Ditto.
+        (WebCore::RealtimeMediaSource::mediaDataUpdated): Deleted.
+        * platform/mediastream/RealtimeMediaSource.h:
+
+        * platform/mediastream/mac/AVAudioCaptureSource.h:
+        * platform/mediastream/mac/AVAudioCaptureSource.mm:
+        (WebCore::AVAudioCaptureSource::AVAudioCaptureSource):
+        (WebCore::AVAudioCaptureSource::addObserver):
+        (WebCore::AVAudioCaptureSource::shutdownCaptureSession):
+        (WebCore::AVAudioCaptureSource::captureOutputDidOutputSampleBufferFromConnection):
+        (WebCore::operator==): Deleted.
+        (WebCore::operator!=): Deleted.
+
+        * platform/mediastream/mac/AVVideoCaptureSource.mm:
+        (WebCore::AVVideoCaptureSource::processNewFrame): Call videoSampleAvailable, not mediaDataUpdated.
+
+        Render audio with a CoreAudio output unit.
+        * platform/mediastream/mac/AudioTrackPrivateMediaStreamCocoa.cpp: Added.
+        (WebCore::AudioTrackPrivateMediaStreamCocoa::AudioTrackPrivateMediaStreamCocoa):
+        (WebCore::AudioTrackPrivateMediaStreamCocoa::~AudioTrackPrivateMediaStreamCocoa):
+        (WebCore::AudioTrackPrivateMediaStreamCocoa::playInternal):
+        (WebCore::AudioTrackPrivateMediaStreamCocoa::play):
+        (WebCore::AudioTrackPrivateMediaStreamCocoa::pause):
+        (WebCore::AudioTrackPrivateMediaStreamCocoa::setVolume):
+        (WebCore::AudioTrackPrivateMediaStreamCocoa::setupAudioUnit):
+        (WebCore::AudioTrackPrivateMediaStreamCocoa::audioSamplesAvailable):
+        (WebCore::AudioTrackPrivateMediaStreamCocoa::sourceStopped):
+        (WebCore::AudioTrackPrivateMediaStreamCocoa::render):
+        (WebCore::AudioTrackPrivateMediaStreamCocoa::inputProc):
+        * platform/mediastream/mac/AudioTrackPrivateMediaStreamCocoa.h: Added.
+
+        * platform/mediastream/mac/MockRealtimeAudioSourceMac.h:
+        * platform/mediastream/mac/MockRealtimeAudioSourceMac.mm:
+        (WebCore::alignTo16Bytes):
+        (WebCore::MockRealtimeAudioSourceMac::emitSampleBuffers):
+        (WebCore::MockRealtimeAudioSourceMac::reconfigure): Minor cleanup.
+        (WebCore::MockRealtimeAudioSourceMac::render): Ditto.
+
+        * platform/mediastream/mac/MockRealtimeVideoSourceMac.mm:
+        (WebCore::MockRealtimeVideoSourceMac::updateSampleBuffer): Call videoSampleAvailable, not mediaDataUpdated.
+
+        * platform/mediastream/mac/WebAudioSourceProviderAVFObjC.h:
+        * platform/mediastream/mac/WebAudioSourceProviderAVFObjC.mm:
+        (WebCore::WebAudioSourceProviderAVFObjC::~WebAudioSourceProviderAVFObjC):
+        (WebCore::WebAudioSourceProviderAVFObjC::provideInput): Use a mutex. Get rid of m_writeAheadCount,
+        it is always 0.
+        (WebCore::WebAudioSourceProviderAVFObjC::prepare): Use a lock.
+        (WebCore::WebAudioSourceProviderAVFObjC::unprepare): Ditto.
+        (WebCore::WebAudioSourceProviderAVFObjC::process): Ditto.
+        * platform/mock/MockRealtimeAudioSource.h:
+        (WebCore::MockRealtimeAudioSource::renderInterval): Decrease the render interval.
+
 2017-02-06  Antoine Quint  <grao...@apple.com>
 
         [Modern Media Controls] Add a backdrop filter to the start button on macOS

Modified: trunk/Source/WebCore/WebCore.xcodeproj/project.pbxproj (211727 => 211728)


--- trunk/Source/WebCore/WebCore.xcodeproj/project.pbxproj	2017-02-06 17:07:24 UTC (rev 211727)
+++ trunk/Source/WebCore/WebCore.xcodeproj/project.pbxproj	2017-02-06 17:22:27 UTC (rev 211728)
@@ -159,6 +159,7 @@
 		07638A9A1884487200E15A1B /* MediaSessionManagerIOS.mm in Sources */ = {isa = PBXBuildFile; fileRef = 07638A981884487200E15A1B /* MediaSessionManagerIOS.mm */; };
 		076970861463AD8700F502CF /* TextTrackList.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 076970841463AD8700F502CF /* TextTrackList.cpp */; };
 		076970871463AD8700F502CF /* TextTrackList.h in Headers */ = {isa = PBXBuildFile; fileRef = 076970851463AD8700F502CF /* TextTrackList.h */; };
+		076EC1331E44F56D00E5D813 /* AudioTrackPrivateMediaStreamCocoa.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 076EC1321E44F2CB00E5D813 /* AudioTrackPrivateMediaStreamCocoa.cpp */; };
 		076F0D0E12B8192700C26AA4 /* MediaPlayerPrivateAVFoundation.h in Headers */ = {isa = PBXBuildFile; fileRef = 076F0D0A12B8192700C26AA4 /* MediaPlayerPrivateAVFoundation.h */; };
 		07707CB01E205EE300005BF7 /* AudioSourceObserverObjC.h in Headers */ = {isa = PBXBuildFile; fileRef = 07707CAF1E205EC400005BF7 /* AudioSourceObserverObjC.h */; };
 		077664FC183E6B5C00133B92 /* JSQuickTimePluginReplacement.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 077664FA183E6B5C00133B92 /* JSQuickTimePluginReplacement.cpp */; };
@@ -284,7 +285,6 @@
 		07B7116F1D899E63009F0FFB /* CaptureDeviceManager.h in Headers */ = {isa = PBXBuildFile; fileRef = 07B7116C1D899E63009F0FFB /* CaptureDeviceManager.h */; };
 		07C046C31E42508B007201E7 /* CAAudioStreamDescription.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 073B87571E40DCFD0071C0EC /* CAAudioStreamDescription.cpp */; };
 		07C046C41E42508B007201E7 /* CAAudioStreamDescription.h in Headers */ = {isa = PBXBuildFile; fileRef = 073B87581E40DCFD0071C0EC /* CAAudioStreamDescription.h */; settings = {ATTRIBUTES = (Private, ); }; };
-		07C046C71E425155007201E7 /* AudioTrackPrivateMediaStreamCocoa.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 07C046C51E42512F007201E7 /* AudioTrackPrivateMediaStreamCocoa.cpp */; };
 		07C046C81E425155007201E7 /* AudioTrackPrivateMediaStreamCocoa.h in Headers */ = {isa = PBXBuildFile; fileRef = 07C046C61E42512F007201E7 /* AudioTrackPrivateMediaStreamCocoa.h */; };
 		07C046CB1E426413007201E7 /* AudioStreamDescription.h in Headers */ = {isa = PBXBuildFile; fileRef = 073B87561E40DCE50071C0EC /* AudioStreamDescription.h */; settings = {ATTRIBUTES = (Private, ); }; };
 		07C1C0E21BFB600100BD2256 /* MediaTrackSupportedConstraints.h in Headers */ = {isa = PBXBuildFile; fileRef = 07C1C0E01BFB600100BD2256 /* MediaTrackSupportedConstraints.h */; };
@@ -7256,6 +7256,7 @@
 		07638A981884487200E15A1B /* MediaSessionManagerIOS.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; path = MediaSessionManagerIOS.mm; sourceTree = "<group>"; };
 		076970841463AD8700F502CF /* TextTrackList.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = TextTrackList.cpp; sourceTree = "<group>"; };
 		076970851463AD8700F502CF /* TextTrackList.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = TextTrackList.h; sourceTree = "<group>"; };
+		076EC1321E44F2CB00E5D813 /* AudioTrackPrivateMediaStreamCocoa.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = AudioTrackPrivateMediaStreamCocoa.cpp; sourceTree = "<group>"; };
 		076F0D0912B8192700C26AA4 /* MediaPlayerPrivateAVFoundation.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = MediaPlayerPrivateAVFoundation.cpp; sourceTree = "<group>"; };
 		076F0D0A12B8192700C26AA4 /* MediaPlayerPrivateAVFoundation.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = MediaPlayerPrivateAVFoundation.h; sourceTree = "<group>"; };
 		07707CAF1E205EC400005BF7 /* AudioSourceObserverObjC.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = AudioSourceObserverObjC.h; sourceTree = "<group>"; };
@@ -7337,6 +7338,7 @@
 		07B7116A1D899E63009F0FFB /* CaptureDevice.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = CaptureDevice.h; sourceTree = "<group>"; };
 		07B7116B1D899E63009F0FFB /* CaptureDeviceManager.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = CaptureDeviceManager.cpp; sourceTree = "<group>"; };
 		07B7116C1D899E63009F0FFB /* CaptureDeviceManager.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = CaptureDeviceManager.h; sourceTree = "<group>"; };
+		07C046C61E42512F007201E7 /* AudioTrackPrivateMediaStreamCocoa.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = AudioTrackPrivateMediaStreamCocoa.h; sourceTree = "<group>"; };
 		07C1C0E01BFB600100BD2256 /* MediaTrackSupportedConstraints.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = MediaTrackSupportedConstraints.h; sourceTree = "<group>"; };
 		07C1C0E11BFB600100BD2256 /* MediaTrackSupportedConstraints.idl */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text; path = MediaTrackSupportedConstraints.idl; sourceTree = "<group>"; };
 		07C1C0E41BFB60ED00BD2256 /* RealtimeMediaSourceSupportedConstraints.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = RealtimeMediaSourceSupportedConstraints.h; sourceTree = "<group>"; };
@@ -15402,6 +15404,7 @@
 		0729B14D17CFCCA0004F1D60 /* mac */ = {
 			isa = PBXGroup;
 			children = (
+				076EC1321E44F2CB00E5D813 /* AudioTrackPrivateMediaStreamCocoa.cpp */,
 				5CDD83391E4324BB00621E92 /* RealtimeIncomingVideoSource.cpp */,
 				5CDD833A1E4324BB00621E92 /* RealtimeIncomingVideoSource.h */,
 				5CDD833B1E4324BB00621E92 /* RealtimeOutgoingVideoSource.cpp */,
@@ -15408,7 +15411,6 @@
 				5CDD833C1E4324BB00621E92 /* RealtimeOutgoingVideoSource.h */,
 				07707CB11E20649C00005BF7 /* AudioCaptureSourceProviderObjC.h */,
 				07707CAF1E205EC400005BF7 /* AudioSourceObserverObjC.h */,
-				07C046C51E42512F007201E7 /* AudioTrackPrivateMediaStreamCocoa.cpp */,
 				07C046C61E42512F007201E7 /* AudioTrackPrivateMediaStreamCocoa.h */,
 				070363D8181A1CDC00C074A5 /* AVAudioCaptureSource.h */,
 				070363D9181A1CDC00C074A5 /* AVAudioCaptureSource.mm */,
@@ -25245,6 +25247,7 @@
 				CDE3A85417F5FCE600C5BE20 /* AudioTrackPrivateAVF.h in Headers */,
 				CDE3A85817F6020400C5BE20 /* AudioTrackPrivateAVFObjC.h in Headers */,
 				CD54A763180F9F7000B076C9 /* AudioTrackPrivateMediaSourceAVFObjC.h in Headers */,
+				07C046C81E425155007201E7 /* AudioTrackPrivateMediaStreamCocoa.h in Headers */,
 				07D6A4F81BF2307D00174146 /* AudioTrackPrivateMediaStream.h in Headers */,
 				FD31608B12B026F700C1A359 /* AudioUtilities.h in Headers */,
 				7EE6846012D26E3800E79415 /* AuthenticationCF.h in Headers */,
@@ -31870,6 +31873,7 @@
 				7C39C3741DDBB8D300FEFB29 /* SVGTransformListValues.cpp in Sources */,
 				7CE58D571DD7D96D00128552 /* SVGTransformValue.cpp in Sources */,
 				B2227AE10D00BF220071B782 /* SVGTRefElement.cpp in Sources */,
+				076EC1331E44F56D00E5D813 /* AudioTrackPrivateMediaStreamCocoa.cpp in Sources */,
 				B2227AE40D00BF220071B782 /* SVGTSpanElement.cpp in Sources */,
 				B2227AE90D00BF220071B782 /* SVGURIReference.cpp in Sources */,
 				B2227AEC0D00BF220071B782 /* SVGUseElement.cpp in Sources */,
@@ -32119,7 +32123,6 @@
 				49C7B9E51042D32F0009D447 /* WebGLTexture.cpp in Sources */,
 				6F995A231A7078B100A735F4 /* WebGLTransformFeedback.cpp in Sources */,
 				0C3F1F5A10C8871200D72CE1 /* WebGLUniformLocation.cpp in Sources */,
-				07C046C71E425155007201E7 /* AudioTrackPrivateMediaStreamCocoa.cpp in Sources */,
 				6F995A251A7078B100A735F4 /* WebGLVertexArrayObject.cpp in Sources */,
 				6F222B761AB52D8A0094651A /* WebGLVertexArrayObjectBase.cpp in Sources */,
 				77A17A7712F28642004E02F6 /* WebGLVertexArrayObjectOES.cpp in Sources */,

Modified: trunk/Source/WebCore/platform/audio/mac/AudioSampleDataSource.cpp (211727 => 211728)


--- trunk/Source/WebCore/platform/audio/mac/AudioSampleDataSource.cpp	2017-02-06 17:07:24 UTC (rev 211727)
+++ trunk/Source/WebCore/platform/audio/mac/AudioSampleDataSource.cpp	2017-02-06 17:22:27 UTC (rev 211728)
@@ -255,14 +255,13 @@
         const double tenMS = .01;
         const double fiveMS = .005;
         double sampleRate = m_outputDescription->sampleRate();
+        m_outputSampleOffset = timeStamp + m_timeStamp;
         if (buffered > sampleRate * twentyMS)
-            m_outputSampleOffset = m_timeStamp - sampleRate * twentyMS;
+            m_outputSampleOffset -= sampleRate * twentyMS;
         else if (buffered > sampleRate * tenMS)
-            m_outputSampleOffset = m_timeStamp - sampleRate * tenMS;
+            m_outputSampleOffset -= sampleRate * tenMS;
         else if (buffered > sampleRate * fiveMS)
-            m_outputSampleOffset = m_timeStamp - sampleRate * fiveMS;
-        else
-            m_outputSampleOffset = m_timeStamp;
+            m_outputSampleOffset -= sampleRate * fiveMS;
 
         m_transitioningFromPaused = false;
     }

Modified: trunk/Source/WebCore/platform/graphics/avfoundation/objc/MediaPlayerPrivateMediaStreamAVFObjC.h (211727 => 211728)


--- trunk/Source/WebCore/platform/graphics/avfoundation/objc/MediaPlayerPrivateMediaStreamAVFObjC.h	2017-02-06 17:07:24 UTC (rev 211727)
+++ trunk/Source/WebCore/platform/graphics/avfoundation/objc/MediaPlayerPrivateMediaStreamAVFObjC.h	2017-02-06 17:22:27 UTC (rev 211728)
@@ -45,7 +45,7 @@
 
 namespace WebCore {
 
-class AudioTrackPrivateMediaStream;
+class AudioTrackPrivateMediaStreamCocoa;
 class AVVideoCaptureSource;
 class Clock;
 class MediaSourcePrivateClient;
@@ -55,10 +55,6 @@
 class VideoFullscreenLayerManager;
 #endif
 
-#if __has_include(<AVFoundation/AVSampleBufferRenderSynchronizer.h>)
-#define USE_RENDER_SYNCHRONIZER 1
-#endif
-
 class MediaPlayerPrivateMediaStreamAVFObjC final : public MediaPlayerPrivateInterface, private MediaStreamPrivate::Observer, private MediaStreamTrackPrivate::Observer {
 public:
     explicit MediaPlayerPrivateMediaStreamAVFObjC(MediaPlayer*);
@@ -81,7 +77,6 @@
     void ensureLayer();
     void destroyLayer();
 
-    void rendererStatusDidChange(AVSampleBufferAudioRenderer*, NSNumber*);
     void layerStatusDidChange(AVSampleBufferDisplayLayer*, NSNumber*);
 
 private:
@@ -144,13 +139,6 @@
     void flushAndRemoveVideoSampleBuffers();
     void requestNotificationWhenReadyForVideoData();
 
-    void enqueueAudioSample(MediaStreamTrackPrivate&, MediaSample&);
-    void createAudioRenderer(AtomicString);
-    void destroyAudioRenderer(AVSampleBufferAudioRenderer*);
-    void destroyAudioRenderer(AtomicString);
-    void destroyAudioRenderers();
-    void requestNotificationWhenReadyForAudioData(AtomicString);
-
     void paint(GraphicsContext&, const FloatRect&) override;
     void paintCurrentFrameInContext(GraphicsContext&, const FloatRect&) override;
     bool metaDataAvailable() const { return m_mediaStreamPrivate && m_readyState >= MediaPlayer::HaveMetadata; }
@@ -210,9 +198,7 @@
 
     MediaTime streamTime() const;
 
-#if USE(RENDER_SYNCHRONIZER)
     AudioSourceProvider* audioSourceProvider() final;
-#endif
 
     MediaPlayer* m_player { nullptr };
     WeakPtrFactory<MediaPlayerPrivateMediaStreamAVFObjC> m_weakPtrFactory;
@@ -222,22 +208,14 @@
 
     RetainPtr<WebAVSampleBufferStatusChangeListener> m_statusChangeListener;
     RetainPtr<AVSampleBufferDisplayLayer> m_sampleBufferDisplayLayer;
-#if USE(RENDER_SYNCHRONIZER)
-    HashMap<String, RetainPtr<AVSampleBufferAudioRenderer>> m_audioRenderers;
-    RetainPtr<AVSampleBufferRenderSynchronizer> m_synchronizer;
-#else
     std::unique_ptr<Clock> m_clock;
-#endif
 
     MediaTime m_pausedTime;
     RetainPtr<CGImageRef> m_pausedImage;
 
-    HashMap<String, RefPtr<AudioTrackPrivateMediaStream>> m_audioTrackMap;
+    HashMap<String, RefPtr<AudioTrackPrivateMediaStreamCocoa>> m_audioTrackMap;
     HashMap<String, RefPtr<VideoTrackPrivateMediaStream>> m_videoTrackMap;
     PendingSampleQueue m_pendingVideoSampleQueue;
-#if USE(RENDER_SYNCHRONIZER)
-    PendingSampleQueue m_pendingAudioSampleQueue;
-#endif
 
     MediaPlayer::NetworkState m_networkState { MediaPlayer::Empty };
     MediaPlayer::ReadyState m_readyState { MediaPlayer::HaveNothing };

Modified: trunk/Source/WebCore/platform/graphics/avfoundation/objc/MediaPlayerPrivateMediaStreamAVFObjC.mm (211727 => 211728)


--- trunk/Source/WebCore/platform/graphics/avfoundation/objc/MediaPlayerPrivateMediaStreamAVFObjC.mm	2017-02-06 17:07:24 UTC (rev 211727)
+++ trunk/Source/WebCore/platform/graphics/avfoundation/objc/MediaPlayerPrivateMediaStreamAVFObjC.mm	2017-02-06 17:22:27 UTC (rev 211728)
@@ -29,7 +29,7 @@
 #if ENABLE(MEDIA_STREAM) && USE(AVFOUNDATION)
 
 #import "AVFoundationSPI.h"
-#import "AudioTrackPrivateMediaStream.h"
+#import "AudioTrackPrivateMediaStreamCocoa.h"
 #import "Clock.h"
 #import "CoreMediaSoftLink.h"
 #import "GraphicsContext.h"
@@ -52,7 +52,6 @@
 
 SOFT_LINK_FRAMEWORK_OPTIONAL(AVFoundation)
 
-SOFT_LINK_CLASS_OPTIONAL(AVFoundation, AVSampleBufferAudioRenderer)
 SOFT_LINK_CLASS_OPTIONAL(AVFoundation, AVSampleBufferDisplayLayer)
 SOFT_LINK_CLASS_OPTIONAL(AVFoundation, AVSampleBufferRenderSynchronizer)
 
@@ -67,7 +66,6 @@
 @interface WebAVSampleBufferStatusChangeListener : NSObject {
     MediaPlayerPrivateMediaStreamAVFObjC* _parent;
     Vector<RetainPtr<AVSampleBufferDisplayLayer>> _layers;
-    Vector<RetainPtr<AVSampleBufferAudioRenderer>> _renderers;
 }
 
 - (id)initWithParent:(MediaPlayerPrivateMediaStreamAVFObjC*)callback;
@@ -74,8 +72,6 @@
 - (void)invalidate;
 - (void)beginObservingLayer:(AVSampleBufferDisplayLayer *)layer;
 - (void)stopObservingLayer:(AVSampleBufferDisplayLayer *)layer;
-- (void)beginObservingRenderer:(AVSampleBufferAudioRenderer *)renderer;
-- (void)stopObservingRenderer:(AVSampleBufferAudioRenderer *)renderer;
 @end
 
 @implementation WebAVSampleBufferStatusChangeListener
@@ -101,10 +97,6 @@
         [layer removeObserver:self forKeyPath:@"status"];
     _layers.clear();
 
-    for (auto& renderer : _renderers)
-        [renderer removeObserver:self forKeyPath:@"status"];
-    _renderers.clear();
-
     [[NSNotificationCenter defaultCenter] removeObserver:self];
 
     _parent = nullptr;
@@ -128,24 +120,6 @@
     _layers.remove(_layers.find(layer));
 }
 
-- (void)beginObservingRenderer:(AVSampleBufferAudioRenderer*)renderer
-{
-    ASSERT(_parent);
-    ASSERT(!_renderers.contains(renderer));
-
-    _renderers.append(renderer);
-    [renderer addObserver:self forKeyPath:@"status" options:NSKeyValueObservingOptionNew context:nullptr];
-}
-
-- (void)stopObservingRenderer:(AVSampleBufferAudioRenderer*)renderer
-{
-    ASSERT(_parent);
-    ASSERT(_renderers.contains(renderer));
-
-    [renderer removeObserver:self forKeyPath:@"status"];
-    _renderers.remove(_renderers.find(renderer));
-}
-
 - (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context
 {
     UNUSED_PARAM(context);
@@ -167,19 +141,6 @@
             protectedSelf->_parent->layerStatusDidChange(layer.get(), status.get());
         });
 
-    } else if ([object isKindOfClass:getAVSampleBufferAudioRendererClass()]) {
-        RetainPtr<AVSampleBufferAudioRenderer> renderer = (AVSampleBufferAudioRenderer *)object;
-        RetainPtr<NSNumber> status = [change valueForKey:NSKeyValueChangeNewKey];
-
-        ASSERT(_renderers.contains(renderer.get()));
-        ASSERT([keyPath isEqualToString:@"status"]);
-
-        callOnMainThread([protectedSelf = WTFMove(protectedSelf), renderer = WTFMove(renderer), status = WTFMove(status)] {
-            if (!protectedSelf->_parent)
-                return;
-
-            protectedSelf->_parent->rendererStatusDidChange(renderer.get(), status.get());
-        });
     } else
         ASSERT_NOT_REACHED();
 }
@@ -196,11 +157,7 @@
     : m_player(player)
     , m_weakPtrFactory(this)
     , m_statusChangeListener(adoptNS([[WebAVSampleBufferStatusChangeListener alloc] initWithParent:this]))
-#if USE(RENDER_SYNCHRONIZER)
-    , m_synchronizer(adoptNS([allocAVSampleBufferRenderSynchronizerInstance() init]))
-#else
     , m_clock(Clock::create())
-#endif
 #if PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE)
     , m_videoFullscreenLayerManager(VideoFullscreenLayerManager::create())
 #endif
@@ -211,6 +168,9 @@
 MediaPlayerPrivateMediaStreamAVFObjC::~MediaPlayerPrivateMediaStreamAVFObjC()
 {
     LOG(Media, "MediaPlayerPrivateMediaStreamAVFObjC::~MediaPlayerPrivateMediaStreamAVFObjC(%p)", this);
+    for (const auto& track : m_audioTrackMap.values())
+        track->pause();
+
     if (m_mediaStreamPrivate) {
         m_mediaStreamPrivate->removeObserver(*this);
 
@@ -219,9 +179,6 @@
     }
 
     destroyLayer();
-#if USE(RENDER_SYNCHRONIZER)
-    destroyAudioRenderers();
-#endif
 
     m_audioTrackMap.clear();
     m_videoTrackMap.clear();
@@ -315,33 +272,6 @@
     return timelineOffset;
 }
 
-#if USE(RENDER_SYNCHRONIZER)
-void MediaPlayerPrivateMediaStreamAVFObjC::enqueueAudioSample(MediaStreamTrackPrivate& track, MediaSample& sample)
-{
-    ASSERT(m_audioTrackMap.contains(track.id()));
-    ASSERT(m_audioRenderers.contains(sample.trackID()));
-
-    auto audioTrack = m_audioTrackMap.get(track.id());
-    MediaTime timelineOffset = audioTrack->timelineOffset();
-    if (timelineOffset == MediaTime::invalidTime()) {
-        timelineOffset = calculateTimelineOffset(sample, rendererLatency);
-        audioTrack->setTimelineOffset(timelineOffset);
-        LOG(MediaCaptureSamples, "MediaPlayerPrivateMediaStreamAVFObjC::enqueueAudioSample: timeline offset for track %s set to %s", track.id().utf8().data(), toString(timelineOffset).utf8().data());
-    }
-
-    updateSampleTimes(sample, timelineOffset, "MediaPlayerPrivateMediaStreamAVFObjC::enqueueAudioSample");
-
-    auto renderer = m_audioRenderers.get(sample.trackID());
-    if (![renderer isReadyForMoreMediaData]) {
-        addSampleToPendingQueue(m_pendingAudioSampleQueue, sample);
-        requestNotificationWhenReadyForAudioData(sample.trackID());
-        return;
-    }
-
-    [renderer enqueueSampleBuffer:sample.platformSample().sample.cmSampleBuffer];
-}
-#endif
-
 void MediaPlayerPrivateMediaStreamAVFObjC::enqueueVideoSample(MediaStreamTrackPrivate& track, MediaSample& sample)
 {
     ASSERT(m_videoTrackMap.contains(track.id()));
@@ -400,102 +330,12 @@
     }];
 }
 
-#if USE(RENDER_SYNCHRONIZER)
-void MediaPlayerPrivateMediaStreamAVFObjC::requestNotificationWhenReadyForAudioData(AtomicString trackID)
-{
-    if (!m_audioRenderers.contains(trackID))
-        return;
-
-    auto renderer = m_audioRenderers.get(trackID);
-    [renderer requestMediaDataWhenReadyOnQueue:dispatch_get_main_queue() usingBlock:^ {
-        [renderer stopRequestingMediaData];
-
-        auto audioTrack = m_audioTrackMap.get(trackID);
-        while (!m_pendingAudioSampleQueue.isEmpty()) {
-            if (![renderer isReadyForMoreMediaData]) {
-                requestNotificationWhenReadyForAudioData(trackID);
-                return;
-            }
-
-            auto sample = m_pendingAudioSampleQueue.takeFirst();
-            enqueueAudioSample(audioTrack->streamTrack(), sample.get());
-        }
-    }];
-}
-
-void MediaPlayerPrivateMediaStreamAVFObjC::createAudioRenderer(AtomicString trackID)
-{
-    ASSERT(!m_audioRenderers.contains(trackID));
-    auto renderer = adoptNS([allocAVSampleBufferAudioRendererInstance() init]);
-    [renderer setAudioTimePitchAlgorithm:(m_player->preservesPitch() ? AVAudioTimePitchAlgorithmSpectral : AVAudioTimePitchAlgorithmVarispeed)];
-    m_audioRenderers.set(trackID, renderer);
-    [m_synchronizer addRenderer:renderer.get()];
-    [m_statusChangeListener beginObservingRenderer:renderer.get()];
-    if (m_audioRenderers.size() == 1)
-        renderingModeChanged();
-}
-
-void MediaPlayerPrivateMediaStreamAVFObjC::destroyAudioRenderer(AVSampleBufferAudioRenderer* renderer)
-{
-    [m_statusChangeListener stopObservingRenderer:renderer];
-    [renderer flush];
-    [renderer stopRequestingMediaData];
-
-    CMTime now = CMTimebaseGetTime([m_synchronizer timebase]);
-    [m_synchronizer removeRenderer:renderer atTime:now withCompletionHandler:^(BOOL) { }];
-}
-
-void MediaPlayerPrivateMediaStreamAVFObjC::destroyAudioRenderer(AtomicString trackID)
-{
-    if (!m_audioRenderers.contains(trackID))
-        return;
-
-    destroyAudioRenderer(m_audioRenderers.get(trackID).get());
-    m_audioRenderers.remove(trackID);
-    if (!m_audioRenderers.size())
-        renderingModeChanged();
-}
-
-void MediaPlayerPrivateMediaStreamAVFObjC::destroyAudioRenderers()
-{
-    m_pendingAudioSampleQueue.clear();
-    for (auto& renderer : m_audioRenderers.values())
-        destroyAudioRenderer(renderer.get());
-    m_audioRenderers.clear();
-}
-
 AudioSourceProvider* MediaPlayerPrivateMediaStreamAVFObjC::audioSourceProvider()
 {
     // FIXME: This should return a mix of all audio tracks - https://bugs.webkit.org/show_bug.cgi?id=160305
-    for (const auto& track : m_audioTrackMap.values()) {
-        if (track->streamTrack().ended() || !track->streamTrack().enabled() || track->streamTrack().muted())
-            continue;
-
-        return track->streamTrack().audioSourceProvider();
-    }
     return nullptr;
 }
-#endif
 
-void MediaPlayerPrivateMediaStreamAVFObjC::rendererStatusDidChange(AVSampleBufferAudioRenderer* renderer, NSNumber* status)
-{
-#if USE(RENDER_SYNCHRONIZER)
-    String trackID;
-    for (auto& pair : m_audioRenderers) {
-        if (pair.value == renderer) {
-            trackID = pair.key;
-            break;
-        }
-    }
-    ASSERT(!trackID.isEmpty());
-    if (status.integerValue == AVQueuedSampleBufferRenderingStatusRendering)
-        m_audioTrackMap.get(trackID)->setTimelineOffset(MediaTime::invalidTime());
-#else
-    UNUSED_PARAM(renderer);
-    UNUSED_PARAM(status);
-#endif
-}
-
 void MediaPlayerPrivateMediaStreamAVFObjC::layerStatusDidChange(AVSampleBufferDisplayLayer* layer, NSNumber* status)
 {
     if (status.integerValue != AVQueuedSampleBufferRenderingStatusRendering)
@@ -513,11 +353,6 @@
 {
     if (m_sampleBufferDisplayLayer)
         [m_sampleBufferDisplayLayer flush];
-
-#if USE(RENDER_SYNCHRONIZER)
-    for (auto& renderer : m_audioRenderers.values())
-        [renderer flush];
-#endif
 }
 
 bool MediaPlayerPrivateMediaStreamAVFObjC::shouldEnqueueVideoSampleBuffer() const
@@ -549,10 +384,6 @@
     m_sampleBufferDisplayLayer.get().backgroundColor = cachedCGColor(Color::black);
     [m_statusChangeListener beginObservingLayer:m_sampleBufferDisplayLayer.get()];
 
-#if USE(RENDER_SYNCHRONIZER)
-    [m_synchronizer addRenderer:m_sampleBufferDisplayLayer.get()];
-#endif
-
     renderingModeChanged();
     
 #if PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE)
@@ -570,13 +401,6 @@
         [m_statusChangeListener stopObservingLayer:m_sampleBufferDisplayLayer.get()];
         [m_sampleBufferDisplayLayer stopRequestingMediaData];
         [m_sampleBufferDisplayLayer flush];
-#if USE(RENDER_SYNCHRONIZER)
-        CMTime currentTime = CMTimebaseGetTime([m_synchronizer timebase]);
-        [m_synchronizer removeRenderer:m_sampleBufferDisplayLayer.get() atTime:currentTime withCompletionHandler:^(BOOL) {
-            // No-op.
-        }];
-        m_sampleBufferDisplayLayer = nullptr;
-#endif
     }
 
     renderingModeChanged();
@@ -700,14 +524,12 @@
         return;
 
     m_playing = true;
-#if USE(RENDER_SYNCHRONIZER)
-    if (!m_synchronizer.get().rate)
-        [m_synchronizer setRate:1 ]; // streamtime
-#else
     if (!m_clock->isRunning())
         m_clock->start();
-#endif
 
+    for (const auto& track : m_audioTrackMap.values())
+        track->play();
+
     m_haveEverPlayed = true;
     scheduleDeferredTask([this] {
         updateDisplayMode();
@@ -725,6 +547,9 @@
     m_pausedTime = currentMediaTime();
     m_playing = false;
 
+    for (const auto& track : m_audioTrackMap.values())
+        track->pause();
+
     updateDisplayMode();
     updatePausedImage();
     flushRenderers();
@@ -743,11 +568,8 @@
         return;
 
     m_volume = volume;
-
-#if USE(RENDER_SYNCHRONIZER)
-    for (auto& renderer : m_audioRenderers.values())
-        [renderer setVolume:volume];
-#endif
+    for (const auto& track : m_audioTrackMap.values())
+        track->setVolume(m_volume);
 }
 
 void MediaPlayerPrivateMediaStreamAVFObjC::setMuted(bool muted)
@@ -758,11 +580,6 @@
         return;
 
     m_muted = muted;
-    
-#if USE(RENDER_SYNCHRONIZER)
-    for (auto& renderer : m_audioRenderers.values())
-        [renderer setMuted:muted];
-#endif
 }
 
 bool MediaPlayerPrivateMediaStreamAVFObjC::hasVideo() const
@@ -796,11 +613,7 @@
 
 MediaTime MediaPlayerPrivateMediaStreamAVFObjC::streamTime() const
 {
-#if USE(RENDER_SYNCHRONIZER)
-    return toMediaTime(CMTimebaseGetTime([m_synchronizer timebase]));
-#else
     return MediaTime::createWithDouble(m_clock->currentTime());
-#endif
 }
 
 MediaPlayer::NetworkState MediaPlayerPrivateMediaStreamAVFObjC::networkState() const
@@ -925,19 +738,11 @@
     if (!m_playing || streamTime().toDouble() < 0)
         return;
 
-#if USE(RENDER_SYNCHRONIZER)
-    if (!CMTimebaseGetEffectiveRate([m_synchronizer timebase]))
-        return;
-#endif
-
     switch (track.type()) {
     case RealtimeMediaSource::None:
         // Do nothing.
         break;
     case RealtimeMediaSource::Audio:
-#if USE(RENDER_SYNCHRONIZER)
-        enqueueAudioSample(track, mediaSample);
-#endif
         break;
     case RealtimeMediaSource::Video:
         if (&track == m_activeVideoTrack.get())
@@ -1037,36 +842,23 @@
 {
     MediaStreamTrackPrivateVector currentTracks = m_mediaStreamPrivate->tracks();
 
-    Function<void(RefPtr<AudioTrackPrivateMediaStream>, int, TrackState)>  setAudioTrackState = [this](auto track, int index, TrackState state)
+    Function<void(RefPtr<AudioTrackPrivateMediaStreamCocoa>, int, TrackState)>  setAudioTrackState = [this](auto track, int index, TrackState state)
     {
         switch (state) {
         case TrackState::Remove:
-            track->streamTrack().removeObserver(*this);
             m_player->removeAudioTrack(*track);
-#if USE(RENDER_SYNCHRONIZER)
-            destroyAudioRenderer(track->id());
-#endif
             break;
         case TrackState::Add:
-            track->streamTrack().addObserver(*this);
             m_player->addAudioTrack(*track);
-#if USE(RENDER_SYNCHRONIZER)
-            createAudioRenderer(track->id());
-#endif
             break;
         case TrackState::Configure:
             track->setTrackIndex(index);
             bool enabled = track->streamTrack().enabled() && !track->streamTrack().muted();
             track->setEnabled(enabled);
-#if USE(RENDER_SYNCHRONIZER)
-            auto renderer = m_audioRenderers.get(track->id());
-            ASSERT(renderer);
-            renderer.get().muted = !enabled;
-#endif
             break;
         }
     };
-    updateTracksOfType(m_audioTrackMap, RealtimeMediaSource::Audio, currentTracks, &AudioTrackPrivateMediaStream::create, setAudioTrackState);
+    updateTracksOfType(m_audioTrackMap, RealtimeMediaSource::Audio, currentTracks, &AudioTrackPrivateMediaStreamCocoa::create, setAudioTrackState);
 
     Function<void(RefPtr<VideoTrackPrivateMediaStream>, int, TrackState)> setVideoTrackState = [&](auto track, int index, TrackState state)
     {

Modified: trunk/Source/WebCore/platform/mediastream/AudioTrackPrivateMediaStream.h (211727 => 211728)


--- trunk/Source/WebCore/platform/mediastream/AudioTrackPrivateMediaStream.h	2017-02-06 17:07:24 UTC (rev 211727)
+++ trunk/Source/WebCore/platform/mediastream/AudioTrackPrivateMediaStream.h	2017-02-06 17:22:27 UTC (rev 211728)
@@ -32,7 +32,7 @@
 
 namespace WebCore {
 
-class AudioTrackPrivateMediaStream final : public AudioTrackPrivate {
+class AudioTrackPrivateMediaStream : public AudioTrackPrivate {
     WTF_MAKE_NONCOPYABLE(AudioTrackPrivateMediaStream)
 public:
     static RefPtr<AudioTrackPrivateMediaStream> create(MediaStreamTrackPrivate& streamTrack)
@@ -53,7 +53,7 @@
     MediaTime timelineOffset() const { return m_timelineOffset; }
     void setTimelineOffset(const MediaTime& offset) { m_timelineOffset = offset; }
 
-private:
+protected:
     AudioTrackPrivateMediaStream(MediaStreamTrackPrivate& track)
         : m_streamTrack(track)
         , m_id(track.id())

Modified: trunk/Source/WebCore/platform/mediastream/MediaStreamTrackPrivate.cpp (211727 => 211728)


--- trunk/Source/WebCore/platform/mediastream/MediaStreamTrackPrivate.cpp	2017-02-06 17:07:24 UTC (rev 211727)
+++ trunk/Source/WebCore/platform/mediastream/MediaStreamTrackPrivate.cpp	2017-02-06 17:22:27 UTC (rev 211728)
@@ -52,12 +52,12 @@
     , m_isEnabled(true)
     , m_isEnded(false)
 {
-    m_source->addObserver(this);
+    m_source->addObserver(*this);
 }
 
 MediaStreamTrackPrivate::~MediaStreamTrackPrivate()
 {
-    m_source->removeObserver(this);
+    m_source->removeObserver(*this);
 }
 
 void MediaStreamTrackPrivate::addObserver(MediaStreamTrackPrivate::Observer& observer)
@@ -198,7 +198,7 @@
     return !m_isEnded;
 }
 
-void MediaStreamTrackPrivate::sourceHasMoreMediaData(MediaSample& mediaSample)
+void MediaStreamTrackPrivate::videoSampleAvailable(MediaSample& mediaSample)
 {
     mediaSample.setTrackID(id());
     for (auto& observer : m_observers)

Modified: trunk/Source/WebCore/platform/mediastream/MediaStreamTrackPrivate.h (211727 => 211728)


--- trunk/Source/WebCore/platform/mediastream/MediaStreamTrackPrivate.h	2017-02-06 17:07:24 UTC (rev 211727)
+++ trunk/Source/WebCore/platform/mediastream/MediaStreamTrackPrivate.h	2017-02-06 17:22:27 UTC (rev 211728)
@@ -100,7 +100,7 @@
     void sourceMutedChanged() final;
     void sourceSettingsChanged() final;
     bool preventSourceFromStopping() final;
-    void sourceHasMoreMediaData(MediaSample&) final;
+    void videoSampleAvailable(MediaSample&) final;
 
     Vector<Observer*> m_observers;
     Ref<RealtimeMediaSource> m_source;

Modified: trunk/Source/WebCore/platform/mediastream/RealtimeMediaSource.cpp (211727 => 211728)


--- trunk/Source/WebCore/platform/mediastream/RealtimeMediaSource.cpp	2017-02-06 17:07:24 UTC (rev 211727)
+++ trunk/Source/WebCore/platform/mediastream/RealtimeMediaSource.cpp	2017-02-06 17:22:27 UTC (rev 211728)
@@ -67,16 +67,16 @@
     m_remote = false;
 }
 
-void RealtimeMediaSource::addObserver(RealtimeMediaSource::Observer* observer)
+void RealtimeMediaSource::addObserver(RealtimeMediaSource::Observer& observer)
 {
-    m_observers.append(observer);
+    m_observers.append(&observer);
 }
 
-void RealtimeMediaSource::removeObserver(RealtimeMediaSource::Observer* observer)
+void RealtimeMediaSource::removeObserver(RealtimeMediaSource::Observer& observer)
 {
-    size_t pos = m_observers.find(observer);
-    if (pos != notFound)
-        m_observers.remove(pos);
+    m_observers.removeFirstMatching([&observer](auto* anObserver) {
+        return anObserver == &observer;
+    });
 
     if (!m_observers.size())
         stop();
@@ -112,12 +112,19 @@
     });
 }
 
-void RealtimeMediaSource::mediaDataUpdated(MediaSample& mediaSample)
+void RealtimeMediaSource::videoSampleAvailable(MediaSample& mediaSample)
 {
-    for (auto& observer : m_observers)
-        observer->sourceHasMoreMediaData(mediaSample);
+    ASSERT(isMainThread());
+    for (const auto& observer : m_observers)
+        observer->videoSampleAvailable(mediaSample);
 }
 
+void RealtimeMediaSource::audioSamplesAvailable(const MediaTime& time, void* audioData, const AudioStreamDescription& description, size_t numberOfFrames)
+{
+    for (const auto& observer : m_observers)
+        observer->audioSamplesAvailable(time, audioData, description, numberOfFrames);
+}
+
 bool RealtimeMediaSource::readonly() const
 {
     return m_readonly;
@@ -130,7 +137,7 @@
 
     m_stopped = true;
 
-    for (auto* observer : m_observers) {
+    for (const auto& observer : m_observers) {
         if (observer != callingObserver)
             observer->sourceStopped();
     }
@@ -143,7 +150,7 @@
     if (stopped())
         return;
 
-    for (auto* observer : m_observers) {
+    for (const auto& observer : m_observers) {
         if (observer->preventSourceFromStopping())
             return;
     }

Modified: trunk/Source/WebCore/platform/mediastream/RealtimeMediaSource.h (211727 => 211728)


--- trunk/Source/WebCore/platform/mediastream/RealtimeMediaSource.h	2017-02-06 17:07:24 UTC (rev 211727)
+++ trunk/Source/WebCore/platform/mediastream/RealtimeMediaSource.h	2017-02-06 17:22:27 UTC (rev 211728)
@@ -47,8 +47,13 @@
 #include <wtf/WeakPtr.h>
 #include <wtf/text/WTFString.h>
 
+namespace WTF {
+class MediaTime;
+}
+
 namespace WebCore {
 
+class AudioStreamDescription;
 class FloatRect;
 class GraphicsContext;
 class MediaStreamPrivate;
@@ -68,8 +73,11 @@
         // Observer state queries.
         virtual bool preventSourceFromStopping() = 0;
         
-        // Media data changes.
-        virtual void sourceHasMoreMediaData(MediaSample&) = 0;
+        // Called on the main thread.
+        virtual void videoSampleAvailable(MediaSample&) { }
+
+        // May be called on a background thread.
+        virtual void audioSamplesAvailable(const MediaTime&, void* /*audioData*/, const AudioStreamDescription&, size_t /*numberOfFrames*/) { }
     };
 
     virtual ~RealtimeMediaSource() { }
@@ -99,7 +107,9 @@
     virtual bool supportsConstraints(const MediaConstraints&, String&);
 
     virtual void settingsDidChange();
-    void mediaDataUpdated(MediaSample&);
+
+    void videoSampleAvailable(MediaSample&);
+    void audioSamplesAvailable(const MediaTime&, void*, const AudioStreamDescription&, size_t);
     
     bool stopped() const { return m_stopped; }
 
@@ -112,8 +122,8 @@
     virtual bool remote() const { return m_remote; }
     virtual void setRemote(bool remote) { m_remote = remote; }
 
-    void addObserver(Observer*);
-    void removeObserver(Observer*);
+    void addObserver(Observer&);
+    void removeObserver(Observer&);
 
     virtual void startProducingData() { }
     virtual void stopProducingData() { }

Modified: trunk/Source/WebCore/platform/mediastream/mac/AVAudioCaptureSource.h (211727 => 211728)


--- trunk/Source/WebCore/platform/mediastream/mac/AVAudioCaptureSource.h	2017-02-06 17:07:24 UTC (rev 211727)
+++ trunk/Source/WebCore/platform/mediastream/mac/AVAudioCaptureSource.h	2017-02-06 17:22:27 UTC (rev 211728)
@@ -30,8 +30,10 @@
 
 #include "AVMediaCaptureSource.h"
 #include "AudioCaptureSourceProviderObjC.h"
+#include "CAAudioStreamDescription.h"
 #include <wtf/Lock.h>
 
+typedef struct AudioBufferList AudioBufferList;
 typedef struct AudioStreamBasicDescription AudioStreamBasicDescription;
 typedef const struct opaqueCMFormatDescription *CMFormatDescriptionRef;
 
@@ -64,9 +66,11 @@
     AudioSourceProvider* audioSourceProvider() override;
 
     RetainPtr<AVCaptureConnection> m_audioConnection;
+    size_t m_listBufferSize { 0 };
+    std::unique_ptr<AudioBufferList> m_list;
 
     RefPtr<WebAudioSourceProviderAVFObjC> m_audioSourceProvider;
-    std::unique_ptr<AudioStreamBasicDescription> m_inputDescription;
+    std::unique_ptr<CAAudioStreamDescription> m_inputDescription;
     Vector<AudioSourceObserverObjC*> m_observers;
     Lock m_lock;
 };

Modified: trunk/Source/WebCore/platform/mediastream/mac/AVAudioCaptureSource.mm (211727 => 211728)


--- trunk/Source/WebCore/platform/mediastream/mac/AVAudioCaptureSource.mm	2017-02-06 17:07:24 UTC (rev 211727)
+++ trunk/Source/WebCore/platform/mediastream/mac/AVAudioCaptureSource.mm	2017-02-06 17:22:27 UTC (rev 211728)
@@ -28,7 +28,9 @@
 
 #if ENABLE(MEDIA_STREAM) && USE(AVFOUNDATION)
 
+#import "AudioSampleBufferList.h"
 #import "AudioSourceObserverObjC.h"
+#import "CAAudioStreamDescription.h"
 #import "Logging.h"
 #import "MediaConstraints.h"
 #import "MediaSampleAVFObjC.h"
@@ -91,7 +93,6 @@
 AVAudioCaptureSource::AVAudioCaptureSource(AVCaptureDeviceTypedef* device, const AtomicString& id)
     : AVMediaCaptureSource(device, id, RealtimeMediaSource::Audio)
 {
-    m_inputDescription = std::make_unique<AudioStreamBasicDescription>();
 }
     
 AVAudioCaptureSource::~AVAudioCaptureSource()
@@ -120,8 +121,8 @@
 {
     LockHolder lock(m_lock);
     m_observers.append(&observer);
-    if (m_inputDescription->mSampleRate)
-        observer.prepare(m_inputDescription.get());
+    if (m_inputDescription)
+        observer.prepare(&m_inputDescription->streamDescription());
 }
 
 void AVAudioCaptureSource::removeObserver(AudioSourceObserverObjC& observer)
@@ -162,7 +163,7 @@
         LockHolder lock(m_lock);
 
         m_audioConnection = nullptr;
-        m_inputDescription = std::make_unique<AudioStreamBasicDescription>();
+        m_inputDescription = nullptr;
 
         for (auto& observer : m_observers)
             observer->unprepare();
@@ -174,23 +175,6 @@
     m_audioSourceProvider = nullptr;
 }
 
-static bool operator==(const AudioStreamBasicDescription& a, const AudioStreamBasicDescription& b)
-{
-    return a.mSampleRate == b.mSampleRate
-        && a.mFormatID == b.mFormatID
-        && a.mFormatFlags == b.mFormatFlags
-        && a.mBytesPerPacket == b.mBytesPerPacket
-        && a.mFramesPerPacket == b.mFramesPerPacket
-        && a.mBytesPerFrame == b.mBytesPerFrame
-        && a.mChannelsPerFrame == b.mChannelsPerFrame
-        && a.mBitsPerChannel == b.mBitsPerChannel;
-}
-
-static bool operator!=(const AudioStreamBasicDescription& a, const AudioStreamBasicDescription& b)
-{
-    return !(a == b);
-}
-
 void AVAudioCaptureSource::captureOutputDidOutputSampleBufferFromConnection(AVCaptureOutputType*, CMSampleBufferRef sampleBuffer, AVCaptureConnectionType*)
 {
     if (muted())
@@ -200,11 +184,6 @@
     if (!formatDescription)
         return;
 
-    RetainPtr<CMSampleBufferRef> buffer = sampleBuffer;
-    scheduleDeferredTask([this, buffer] {
-        mediaDataUpdated(MediaSampleAVFObjC::create(buffer.get()));
-    });
-
     std::unique_lock<Lock> lock(m_lock, std::try_to_lock);
     if (!lock.owns_lock()) {
         // Failed to acquire the lock, just return instead of blocking.
@@ -211,16 +190,31 @@
         return;
     }
 
+    const AudioStreamBasicDescription* streamDescription = CMAudioFormatDescriptionGetStreamBasicDescription(formatDescription);
+    if (!m_inputDescription || *m_inputDescription != *streamDescription) {
+        m_inputDescription = std::make_unique<CAAudioStreamDescription>(*streamDescription);
+        m_listBufferSize = AudioSampleBufferList::audioBufferListSizeForStream(*m_inputDescription.get());
+        m_list = std::unique_ptr<AudioBufferList>(static_cast<AudioBufferList*>(::operator new (m_listBufferSize)));
+        memset(m_list.get(), 0, m_listBufferSize);
+        m_list->mNumberBuffers = m_inputDescription->numberOfChannelStreams();
+
+        if (!m_observers.isEmpty()) {
+            for (auto& observer : m_observers)
+                observer->prepare(streamDescription);
+        }
+    }
+
+    CMItemCount frameCount = CMSampleBufferGetNumSamples(sampleBuffer);
+    CMBlockBufferRef buffer = nil;
+    OSStatus err = CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer(sampleBuffer, nullptr, m_list.get(), m_listBufferSize, kCFAllocatorSystemDefault, kCFAllocatorSystemDefault, kCMSampleBufferFlag_AudioBufferList_Assure16ByteAlignment, &buffer);
+    if (!err)
+        audioSamplesAvailable(toMediaTime(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)), m_list->mBuffers[0].mData, CAAudioStreamDescription(*streamDescription), frameCount);
+    else
+        LOG_ERROR("AVAudioCaptureSource::captureOutputDidOutputSampleBufferFromConnection(%p) - CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer returned error %d (%.4s)", this, (int)err, (char*)&err);
+
     if (m_observers.isEmpty())
         return;
 
-    const AudioStreamBasicDescription* streamDescription = CMAudioFormatDescriptionGetStreamBasicDescription(formatDescription);
-    if (*m_inputDescription != *streamDescription) {
-        m_inputDescription = std::make_unique<AudioStreamBasicDescription>(*streamDescription);
-        for (auto& observer : m_observers)
-            observer->prepare(m_inputDescription.get());
-    }
-
     for (auto& observer : m_observers)
         observer->process(formatDescription, sampleBuffer);
 }

Modified: trunk/Source/WebCore/platform/mediastream/mac/AVVideoCaptureSource.mm (211727 => 211728)


--- trunk/Source/WebCore/platform/mediastream/mac/AVVideoCaptureSource.mm	2017-02-06 17:07:24 UTC (rev 211727)
+++ trunk/Source/WebCore/platform/mediastream/mac/AVVideoCaptureSource.mm	2017-02-06 17:22:27 UTC (rev 211728)
@@ -423,7 +423,7 @@
     if (settingsChanged)
         settingsDidChange();
 
-    mediaDataUpdated(MediaSampleAVFObjC::create(m_buffer.get()));
+    videoSampleAvailable(MediaSampleAVFObjC::create(m_buffer.get()));
 }
 
 void AVVideoCaptureSource::captureOutputDidOutputSampleBufferFromConnection(AVCaptureOutputType*, CMSampleBufferRef sampleBuffer, AVCaptureConnectionType*)

Added: trunk/Source/WebCore/platform/mediastream/mac/AudioTrackPrivateMediaStreamCocoa.cpp (0 => 211728)


--- trunk/Source/WebCore/platform/mediastream/mac/AudioTrackPrivateMediaStreamCocoa.cpp	                        (rev 0)
+++ trunk/Source/WebCore/platform/mediastream/mac/AudioTrackPrivateMediaStreamCocoa.cpp	2017-02-06 17:22:27 UTC (rev 211728)
@@ -0,0 +1,254 @@
+/*
+ * Copyright (C) 2017 Apple Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ *    notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ *    notice, this list of conditions and the following disclaimer in the
+ *    documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS''
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
+ * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "config.h"
+#include "AudioTrackPrivateMediaStreamCocoa.h"
+
+#include "AudioSampleBufferList.h"
+#include "AudioSampleDataSource.h"
+#include "AudioSession.h"
+#include "CAAudioStreamDescription.h"
+#include "Logging.h"
+
+#include "CoreMediaSoftLink.h"
+
+#if ENABLE(VIDEO_TRACK)
+
+namespace WebCore {
+
+const int renderBufferSize = 128;
+
+AudioTrackPrivateMediaStreamCocoa::AudioTrackPrivateMediaStreamCocoa(MediaStreamTrackPrivate& track)
+    : AudioTrackPrivateMediaStream(track)
+{
+    track.source().addObserver(*this);
+}
+
+AudioTrackPrivateMediaStreamCocoa::~AudioTrackPrivateMediaStreamCocoa()
+{
+    std::lock_guard<Lock> lock(m_internalStateLock);
+
+    streamTrack().source().removeObserver(*this);
+
+    if (m_dataSource)
+        m_dataSource->setPaused(true);
+
+    if (m_remoteIOUnit) {
+        AudioOutputUnitStop(m_remoteIOUnit);
+        AudioComponentInstanceDispose(m_remoteIOUnit);
+        m_remoteIOUnit = nullptr;
+    }
+
+    m_dataSource = nullptr;
+    m_inputDescription = nullptr;
+    m_outputDescription = nullptr;
+}
+
+void AudioTrackPrivateMediaStreamCocoa::playInternal()
+{
+    ASSERT(m_internalStateLock.isHeld());
+
+    if (m_isPlaying)
+        return;
+
+    if (m_remoteIOUnit) {
+        ASSERT(m_dataSource);
+        m_dataSource->setPaused(false);
+        if (!AudioOutputUnitStart(m_remoteIOUnit))
+            m_isPlaying = true;
+    }
+
+    m_autoPlay = !m_isPlaying;
+}
+
+void AudioTrackPrivateMediaStreamCocoa::play()
+{
+    std::lock_guard<Lock> lock(m_internalStateLock);
+    playInternal();
+}
+
+void AudioTrackPrivateMediaStreamCocoa::pause()
+{
+    std::lock_guard<Lock> lock(m_internalStateLock);
+
+    m_isPlaying = false;
+    m_autoPlay = false;
+
+    if (m_remoteIOUnit)
+        AudioOutputUnitStop(m_remoteIOUnit);
+    if (m_dataSource)
+        m_dataSource->setPaused(true);
+}
+
+void AudioTrackPrivateMediaStreamCocoa::setVolume(float volume)
+{
+    m_volume = volume;
+    if (m_dataSource)
+        m_dataSource->setVolume(m_volume);
+}
+
+OSStatus AudioTrackPrivateMediaStreamCocoa::setupAudioUnit()
+{
+    ASSERT(m_internalStateLock.isHeld());
+
+    AudioComponentDescription ioUnitDescription { kAudioUnitType_Output, 0, kAudioUnitManufacturer_Apple, 0, 0 };
+#if PLATFORM(IOS)
+    ioUnitDescription.componentSubType = kAudioUnitSubType_RemoteIO;
+#else
+    ioUnitDescription.componentSubType = kAudioUnitSubType_DefaultOutput;
+#endif
+
+    AudioComponent ioComponent = AudioComponentFindNext(nullptr, &ioUnitDescription);
+    ASSERT(ioComponent);
+    if (!ioComponent) {
+        LOG(Media, "AudioTrackPrivateMediaStreamCocoa::setupAudioUnit(%p) unable to find remote IO unit component", this);
+        return -1;
+    }
+
+    OSStatus err = AudioComponentInstanceNew(ioComponent, &m_remoteIOUnit);
+    if (err) {
+        LOG(Media, "AudioTrackPrivateMediaStreamCocoa::setupAudioUnit(%p) unable to open vpio unit, error %d (%.4s)", this, (int)err, (char*)&err);
+        return -1;
+    }
+
+#if PLATFORM(IOS)
+    UInt32 param = 1;
+    err = AudioUnitSetProperty(m_remoteIOUnit, kAudioOutputUnitProperty_EnableIO, kAudioUnitScope_Output, 0, &param, sizeof(param));
+    if (err) {
+        LOG(Media, "AudioTrackPrivateMediaStreamCocoa::setupAudioUnit(%p) unable to enable vpio unit output, error %d (%.4s)", this, (int)err, (char*)&err);
+        return err;
+    }
+#endif
+
+    AURenderCallbackStruct callback = { inputProc, this };
+    err = AudioUnitSetProperty(m_remoteIOUnit, kAudioUnitProperty_SetRenderCallback, kAudioUnitScope_Global, 0, &callback, sizeof(callback));
+    if (err) {
+        LOG(Media, "AudioTrackPrivateMediaStreamCocoa::setupAudioUnit(%p) unable to set vpio unit speaker proc, error %d (%.4s)", this, (int)err, (char*)&err);
+        return err;
+    }
+
+    AudioStreamBasicDescription outputDescription = { };
+    UInt32 size = sizeof(outputDescription);
+    err  = AudioUnitGetProperty(m_remoteIOUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Input, 0, &outputDescription, &size);
+    if (err) {
+        LOG(Media, "AudioTrackPrivateMediaStreamCocoa::setupAudioUnits(%p) unable to get input stream format, error %d (%.4s)", this, (int)err, (char*)&err);
+        return err;
+    }
+
+    outputDescription = m_inputDescription->streamDescription();
+    outputDescription.mSampleRate = AudioSession::sharedSession().sampleRate();
+
+    err = AudioUnitSetProperty(m_remoteIOUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Input, 0, &outputDescription, sizeof(outputDescription));
+    if (err) {
+        LOG(Media, "AudioTrackPrivateMediaStreamCocoa::setupAudioUnits(%p) unable to set input stream format, error %d (%.4s)", this, (int)err, (char*)&err);
+        return err;
+    }
+    m_outputDescription = std::make_unique<CAAudioStreamDescription>(outputDescription);
+
+    err = AudioUnitInitialize(m_remoteIOUnit);
+    if (err) {
+        LOG(Media, "AudioTrackPrivateMediaStreamCocoa::setupAudioUnits(%p) AudioUnitInitialize() failed, error %d (%.4s)", this, (int)err, (char*)&err);
+        return err;
+    }
+
+    AudioSession::sharedSession().setPreferredBufferSize(renderBufferSize);
+
+    return err;
+}
+
+void AudioTrackPrivateMediaStreamCocoa::audioSamplesAvailable(const MediaTime& sampleTime, void* audioData, const AudioStreamDescription& description, size_t sampleCount)
+{
+    ASSERT(description.platformDescription().type == PlatformDescription::CAAudioStreamBasicType);
+
+    std::lock_guard<Lock> lock(m_internalStateLock);
+
+    CAAudioStreamDescription streamDescription = toCAAudioStreamDescription(description);
+    if (!m_inputDescription || *m_inputDescription != description) {
+
+        m_inputDescription = nullptr;
+        m_outputDescription = nullptr;
+
+        if (m_remoteIOUnit) {
+            AudioOutputUnitStop(m_remoteIOUnit);
+            AudioComponentInstanceDispose(m_remoteIOUnit);
+            m_remoteIOUnit = nullptr;
+        }
+
+        m_inputDescription = std::make_unique<CAAudioStreamDescription>(streamDescription);
+        if (setupAudioUnit()) {
+            m_inputDescription = nullptr;
+            return;
+        }
+
+        if (!m_dataSource)
+            m_dataSource = AudioSampleDataSource::create(description.sampleRate() * 2);
+        if (!m_dataSource)
+            return;
+
+        if (m_dataSource->setInputFormat(streamDescription))
+            return;
+        if (m_dataSource->setOutputFormat(*m_outputDescription.get()))
+            return;
+
+        m_dataSource->setVolume(m_volume);
+    }
+
+    m_dataSource->pushSamples(m_inputDescription->streamDescription(), sampleTime, audioData, sampleCount);
+
+    if (m_autoPlay)
+        playInternal();
+}
+
+void AudioTrackPrivateMediaStreamCocoa::sourceStopped()
+{
+    pause();
+}
+
+OSStatus AudioTrackPrivateMediaStreamCocoa::render(UInt32 sampleCount, AudioBufferList& ioData, UInt32 /*inBusNumber*/, const AudioTimeStamp& timeStamp, AudioUnitRenderActionFlags& actionFlags)
+{
+    std::unique_lock<Lock> lock(m_internalStateLock, std::try_to_lock);
+    if (!lock.owns_lock())
+        return kAudioConverterErr_UnspecifiedError;
+
+    if (!m_isPlaying || m_muted || !m_dataSource || streamTrack().muted() || streamTrack().ended() || !streamTrack().enabled()) {
+        AudioSampleBufferList::zeroABL(ioData, static_cast<size_t>(sampleCount));
+        actionFlags = kAudioUnitRenderAction_OutputIsSilence;
+        return 0;
+    }
+
+    m_dataSource->pullSamples(ioData, static_cast<size_t>(sampleCount), timeStamp.mSampleTime, timeStamp.mHostTime, AudioSampleDataSource::Copy);
+
+    return 0;
+}
+
+OSStatus AudioTrackPrivateMediaStreamCocoa::inputProc(void* userData, AudioUnitRenderActionFlags* actionFlags, const AudioTimeStamp* timeStamp, UInt32 inBusNumber, UInt32 sampleCount, AudioBufferList* ioData)
+{
+    return static_cast<AudioTrackPrivateMediaStreamCocoa*>(userData)->render(sampleCount, *ioData, inBusNumber, *timeStamp, *actionFlags);
+}
+
+
+} // namespace WebCore
+
+#endif // ENABLE(VIDEO_TRACK)

Added: trunk/Source/WebCore/platform/mediastream/mac/AudioTrackPrivateMediaStreamCocoa.h (0 => 211728)


--- trunk/Source/WebCore/platform/mediastream/mac/AudioTrackPrivateMediaStreamCocoa.h	                        (rev 0)
+++ trunk/Source/WebCore/platform/mediastream/mac/AudioTrackPrivateMediaStreamCocoa.h	2017-02-06 17:22:27 UTC (rev 211728)
@@ -0,0 +1,94 @@
+/*
+ * Copyright (C) 2017 Apple Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ *    notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ *    notice, this list of conditions and the following disclaimer in the
+ *    documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS''
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
+ * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#pragma once
+
+#if ENABLE(VIDEO_TRACK) && ENABLE(MEDIA_STREAM)
+
+#include "AudioSourceObserverObjC.h"
+#include "AudioTrackPrivateMediaStream.h"
+#include <AudioToolbox/AudioToolbox.h>
+#include <CoreAudio/CoreAudioTypes.h>
+#include <wtf/Lock.h>
+
+namespace WebCore {
+
+class AudioSampleDataSource;
+class AudioSampleBufferList;
+class CAAudioStreamDescription;
+
+class AudioTrackPrivateMediaStreamCocoa final : public AudioTrackPrivateMediaStream, private RealtimeMediaSource::Observer {
+    WTF_MAKE_NONCOPYABLE(AudioTrackPrivateMediaStreamCocoa)
+public:
+    static RefPtr<AudioTrackPrivateMediaStreamCocoa> create(MediaStreamTrackPrivate& streamTrack)
+    {
+        return adoptRef(*new AudioTrackPrivateMediaStreamCocoa(streamTrack));
+    }
+
+    void play();
+    void pause();
+    bool isPlaying() { return m_isPlaying; }
+
+    void setVolume(float);
+    float volume() const { return m_volume; }
+
+    void setMuted(bool muted) { m_muted = muted; }
+    bool muted() const { return m_muted; }
+
+private:
+    AudioTrackPrivateMediaStreamCocoa(MediaStreamTrackPrivate&);
+    ~AudioTrackPrivateMediaStreamCocoa();
+
+    // RealtimeMediaSource::Observer
+    void sourceStopped() final;
+    void sourceMutedChanged()  final { }
+    void sourceSettingsChanged() final { }
+    bool preventSourceFromStopping() final { return false; }
+    void audioSamplesAvailable(const MediaTime&, void*, const AudioStreamDescription&, size_t) final;
+
+    static OSStatus inputProc(void*, AudioUnitRenderActionFlags*, const AudioTimeStamp*, UInt32 inBusNumber, UInt32 numberOfFrames, AudioBufferList*);
+    OSStatus render(UInt32 sampleCount, AudioBufferList&, UInt32 inBusNumber, const AudioTimeStamp&, AudioUnitRenderActionFlags&);
+
+    OSStatus setupAudioUnit();
+    void cleanup();
+    void zeroBufferList(AudioBufferList&, size_t);
+    void playInternal();
+
+    AudioComponentInstance m_remoteIOUnit { nullptr };
+    std::unique_ptr<CAAudioStreamDescription> m_inputDescription;
+    std::unique_ptr<CAAudioStreamDescription> m_outputDescription;
+
+    RefPtr<AudioSampleDataSource> m_dataSource;
+
+    Lock m_internalStateLock;
+    float m_volume { 1 };
+    bool m_isPlaying { false };
+    bool m_autoPlay { false };
+    bool m_muted { false };
+};
+
+}
+
+#endif // ENABLE(VIDEO_TRACK) && ENABLE(MEDIA_STREAM)

Modified: trunk/Source/WebCore/platform/mediastream/mac/MockRealtimeAudioSourceMac.h (211727 => 211728)


--- trunk/Source/WebCore/platform/mediastream/mac/MockRealtimeAudioSourceMac.h	2017-02-06 17:07:24 UTC (rev 211727)
+++ trunk/Source/WebCore/platform/mediastream/mac/MockRealtimeAudioSourceMac.h	2017-02-06 17:22:27 UTC (rev 211728)
@@ -72,7 +72,7 @@
 
     uint32_t m_maximiumFrameCount;
     uint32_t m_sampleRate { 44100 };
-    double m_bytesPerFrame { sizeof(Float32) };
+    uint64_t m_bytesEmitted { 0 };
 
     RetainPtr<CMFormatDescriptionRef> m_formatDescription;
     AudioStreamBasicDescription m_streamFormat;

Modified: trunk/Source/WebCore/platform/mediastream/mac/MockRealtimeAudioSourceMac.mm (211727 => 211728)


--- trunk/Source/WebCore/platform/mediastream/mac/MockRealtimeAudioSourceMac.mm	2017-02-06 17:07:24 UTC (rev 211727)
+++ trunk/Source/WebCore/platform/mediastream/mac/MockRealtimeAudioSourceMac.mm	2017-02-06 17:22:27 UTC (rev 211728)
@@ -32,6 +32,8 @@
 #import "MockRealtimeAudioSourceMac.h"
 
 #if ENABLE(MEDIA_STREAM)
+#import "AudioSampleBufferList.h"
+#import "CAAudioStreamDescription.h"
 #import "MediaConstraints.h"
 #import "MediaSampleAVFObjC.h"
 #import "NotImplemented.h"
@@ -49,6 +51,11 @@
 
 namespace WebCore {
 
+static inline size_t alignTo16Bytes(size_t size)
+{
+    return (size + 15) & ~15;
+}
+
 RefPtr<MockRealtimeAudioSource> MockRealtimeAudioSource::create(const String& name, const MediaConstraints* constraints)
 {
     auto source = adoptRef(new MockRealtimeAudioSourceMac(name));
@@ -92,7 +99,11 @@
 {
     ASSERT(m_formatDescription);
 
-    CMTime startTime = CMTimeMake(elapsedTime() * m_sampleRate, m_sampleRate);
+    CMTime startTime = CMTimeMake(m_bytesEmitted, m_sampleRate);
+    m_bytesEmitted += frameCount;
+
+    audioSamplesAvailable(toMediaTime(startTime), m_audioBufferList->mBuffers[0].mData, CAAudioStreamDescription(m_streamFormat), frameCount);
+
     CMSampleBufferRef sampleBuffer;
     OSStatus result = CMAudioSampleBufferCreateWithPacketDescriptions(nullptr, nullptr, true, nullptr, nullptr, m_formatDescription.get(), frameCount, startTime, nullptr, &sampleBuffer);
     ASSERT(sampleBuffer);
@@ -108,9 +119,7 @@
     result = CMSampleBufferSetDataReady(sampleBuffer);
     ASSERT(!result);
 
-    mediaDataUpdated(MediaSampleAVFObjC::create(sampleBuffer));
-
-    for (auto& observer : m_observers)
+    for (const auto& observer : m_observers)
         observer->process(m_formatDescription.get(), sampleBuffer);
 }
 
@@ -119,10 +128,24 @@
     m_maximiumFrameCount = WTF::roundUpToPowerOfTwo(renderInterval() / 1000. * m_sampleRate * 2);
     ASSERT(m_maximiumFrameCount);
 
+    const int bytesPerFloat = sizeof(Float32);
+    const int bitsPerByte = 8;
+    int channelCount = 1;
+    m_streamFormat = { };
+    m_streamFormat.mSampleRate = m_sampleRate;
+    m_streamFormat.mFormatID = kAudioFormatLinearPCM;
+    m_streamFormat.mFormatFlags = kAudioFormatFlagsNativeFloatPacked;
+    m_streamFormat.mBytesPerPacket = bytesPerFloat * channelCount;
+    m_streamFormat.mFramesPerPacket = 1;
+    m_streamFormat.mBytesPerFrame = bytesPerFloat * channelCount;
+    m_streamFormat.mChannelsPerFrame = channelCount;
+    m_streamFormat.mBitsPerChannel = bitsPerByte * bytesPerFloat;
+
     // AudioBufferList is a variable-length struct, so create on the heap with a generic new() operator
     // with a custom size, and initialize the struct manually.
-    uint32_t bufferDataSize = m_bytesPerFrame * m_maximiumFrameCount;
-    uint32_t baseSize = offsetof(AudioBufferList, mBuffers) + sizeof(AudioBuffer);
+    uint32_t bufferDataSize = m_streamFormat.mBytesPerFrame * m_maximiumFrameCount;
+    uint32_t baseSize = AudioSampleBufferList::audioBufferListSizeForStream(m_streamFormat);
+
     uint64_t bufferListSize = baseSize + bufferDataSize;
     ASSERT(bufferListSize <= SIZE_MAX);
     if (bufferListSize > SIZE_MAX)
@@ -132,24 +155,9 @@
     m_audioBufferList = std::unique_ptr<AudioBufferList>(static_cast<AudioBufferList*>(::operator new (m_audioBufferListBufferSize)));
     memset(m_audioBufferList.get(), 0, m_audioBufferListBufferSize);
 
-    m_audioBufferList->mNumberBuffers = 1;
-    auto& buffer = m_audioBufferList->mBuffers[0];
-    buffer.mNumberChannels = 1;
-    buffer.mDataByteSize = bufferDataSize;
-    buffer.mData = reinterpret_cast<uint8_t*>(m_audioBufferList.get()) + baseSize;
+    uint8_t* bufferData = reinterpret_cast<uint8_t*>(m_audioBufferList.get()) + baseSize;
+    AudioSampleBufferList::configureBufferListForStream(*m_audioBufferList.get(), m_streamFormat, bufferData, bufferDataSize);
 
-    const int bytesPerFloat = sizeof(Float32);
-    const int bitsPerByte = 8;
-    m_streamFormat = { };
-    m_streamFormat.mSampleRate = m_sampleRate;
-    m_streamFormat.mFormatID = kAudioFormatLinearPCM;
-    m_streamFormat.mFormatFlags = kAudioFormatFlagsNativeFloatPacked | kAudioFormatFlagIsNonInterleaved;
-    m_streamFormat.mBytesPerPacket = bytesPerFloat;
-    m_streamFormat.mFramesPerPacket = 1;
-    m_streamFormat.mBytesPerFrame = bytesPerFloat;
-    m_streamFormat.mChannelsPerFrame = 1;
-    m_streamFormat.mBitsPerChannel = bitsPerByte * bytesPerFloat;
-
     CMFormatDescriptionRef formatDescription;
     CMAudioFormatDescriptionCreate(NULL, &m_streamFormat, 0, NULL, 0, NULL, NULL, &formatDescription);
     m_formatDescription = adoptCF(formatDescription);
@@ -162,11 +170,12 @@
 {
     static double theta;
     static const double frequencies[] = { 1500., 500. };
+    static const double tau = 2 * M_PI;
 
     if (!m_audioBufferList)
         reconfigure();
 
-    uint32_t totalFrameCount = delta * m_sampleRate;
+    uint32_t totalFrameCount = alignTo16Bytes(delta * m_sampleRate);
     uint32_t frameCount = std::min(totalFrameCount, m_maximiumFrameCount);
     double elapsed = elapsedTime();
     while (frameCount) {
@@ -180,7 +189,7 @@
             case 0:
             case 14: {
                 int index = fmod(elapsed, 1) * 2;
-                increment = 2.0 * M_PI * frequencies[index] / m_sampleRate;
+                increment = tau * frequencies[index] / m_sampleRate;
                 silent = false;
                 break;
             }
@@ -193,10 +202,12 @@
                 continue;
             }
 
-            buffer[frame] = sin(theta) * 0.25;
-            theta += increment;
-            if (theta > 2.0 * M_PI)
-                theta -= 2.0 * M_PI;
+            float tone = sin(theta) * 0.25;
+            buffer[frame] = tone;
+
+                theta += increment;
+            if (theta > tau)
+                theta -= tau;
             elapsed += 1 / m_sampleRate;
         }
 

Modified: trunk/Source/WebCore/platform/mediastream/mac/MockRealtimeVideoSourceMac.mm (211727 => 211728)


--- trunk/Source/WebCore/platform/mediastream/mac/MockRealtimeVideoSourceMac.mm	2017-02-06 17:07:24 UTC (rev 211727)
+++ trunk/Source/WebCore/platform/mediastream/mac/MockRealtimeVideoSourceMac.mm	2017-02-06 17:22:27 UTC (rev 211728)
@@ -125,7 +125,7 @@
     auto pixelBuffer = pixelBufferFromCGImage(imageBuffer()->copyImage()->nativeImage().get());
     auto sampleBuffer = CMSampleBufferFromPixelBuffer(pixelBuffer.get());
     
-    mediaDataUpdated(MediaSampleAVFObjC::create(sampleBuffer.get()));
+    videoSampleAvailable(MediaSampleAVFObjC::create(sampleBuffer.get()));
 }
 
 } // namespace WebCore

Modified: trunk/Source/WebCore/platform/mediastream/mac/WebAudioSourceProviderAVFObjC.h (211727 => 211728)


--- trunk/Source/WebCore/platform/mediastream/mac/WebAudioSourceProviderAVFObjC.h	2017-02-06 17:07:24 UTC (rev 211727)
+++ trunk/Source/WebCore/platform/mediastream/mac/WebAudioSourceProviderAVFObjC.h	2017-02-06 17:22:27 UTC (rev 211728)
@@ -23,8 +23,7 @@
  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
  */
 
-#ifndef WebAudioSourceProviderAVFObjC_h
-#define WebAudioSourceProviderAVFObjC_h
+#pragma once
 
 #if ENABLE(WEB_AUDIO) && ENABLE(MEDIA_STREAM)
 
@@ -31,6 +30,7 @@
 #include "AudioCaptureSourceProviderObjC.h"
 #include "AudioSourceObserverObjC.h"
 #include "AudioSourceProvider.h"
+#include <wtf/Lock.h>
 #include <wtf/RefCounted.h>
 #include <wtf/RefPtr.h>
 
@@ -68,11 +68,11 @@
     std::unique_ptr<AudioStreamBasicDescription> m_outputDescription;
     std::unique_ptr<CARingBuffer> m_ringBuffer;
 
-    uint64_t m_writeAheadCount { 0 };
     uint64_t m_writeCount { 0 };
     uint64_t m_readCount { 0 };
     AudioSourceProviderClient* m_client { nullptr };
     AudioCaptureSourceProviderObjC* m_captureSource { nullptr };
+    Lock m_mutex;
     bool m_connected { false };
 };
     
@@ -79,5 +79,3 @@
 }
 
 #endif
-
-#endif

Modified: trunk/Source/WebCore/platform/mediastream/mac/WebAudioSourceProviderAVFObjC.mm (211727 => 211728)


--- trunk/Source/WebCore/platform/mediastream/mac/WebAudioSourceProviderAVFObjC.mm	2017-02-06 17:07:24 UTC (rev 211727)
+++ trunk/Source/WebCore/platform/mediastream/mac/WebAudioSourceProviderAVFObjC.mm	2017-02-06 17:22:27 UTC (rev 211728)
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2015 Apple Inc. All rights reserved.
+ * Copyright (C) 2015-2017 Apple Inc. All rights reserved.
  *
  * Redistribution and use in source and binary forms, with or without
  * modification, are permitted provided that the following conditions
@@ -65,6 +65,8 @@
 
 WebAudioSourceProviderAVFObjC::~WebAudioSourceProviderAVFObjC()
 {
+    std::lock_guard<Lock> lock(m_mutex);
+
     if (m_converter) {
         // FIXME: make and use a smart pointer for AudioConverter
         AudioConverterDispose(m_converter);
@@ -76,7 +78,8 @@
 
 void WebAudioSourceProviderAVFObjC::provideInput(AudioBus* bus, size_t framesToProcess)
 {
-    if (!m_ringBuffer) {
+    std::unique_lock<Lock> lock(m_mutex, std::try_to_lock);
+    if (!lock.owns_lock() || !m_ringBuffer) {
         bus->zero();
         return;
     }
@@ -85,12 +88,12 @@
     uint64_t endFrame = 0;
     m_ringBuffer->getCurrentFrameBounds(startFrame, endFrame);
 
-    if (m_writeCount <= m_readCount + m_writeAheadCount) {
+    if (m_writeCount <= m_readCount) {
         bus->zero();
         return;
     }
 
-    uint64_t framesAvailable = endFrame - (m_readCount + m_writeAheadCount);
+    uint64_t framesAvailable = endFrame - m_readCount;
     if (framesAvailable < framesToProcess) {
         framesToProcess = static_cast<size_t>(framesAvailable);
         bus->zero();
@@ -136,6 +139,8 @@
 
 void WebAudioSourceProviderAVFObjC::prepare(const AudioStreamBasicDescription* format)
 {
+    std::lock_guard<Lock> lock(m_mutex);
+
     LOG(Media, "WebAudioSourceProviderAVFObjC::prepare(%p)", this);
 
     m_inputDescription = std::make_unique<AudioStreamBasicDescription>(*format);
@@ -200,6 +205,8 @@
 
 void WebAudioSourceProviderAVFObjC::unprepare()
 {
+    std::lock_guard<Lock> lock(m_mutex);
+
     m_inputDescription = nullptr;
     m_outputDescription = nullptr;
     m_ringBuffer = nullptr;
@@ -214,6 +221,8 @@
 
 void WebAudioSourceProviderAVFObjC::process(CMFormatDescriptionRef, CMSampleBufferRef sampleBuffer)
 {
+    std::lock_guard<Lock> lock(m_mutex);
+
     if (!m_ringBuffer)
         return;
 

Modified: trunk/Source/WebCore/platform/mock/MockRealtimeAudioSource.h (211727 => 211728)


--- trunk/Source/WebCore/platform/mock/MockRealtimeAudioSource.h	2017-02-06 17:07:24 UTC (rev 211727)
+++ trunk/Source/WebCore/platform/mock/MockRealtimeAudioSource.h	2017-02-06 17:22:27 UTC (rev 211728)
@@ -56,7 +56,7 @@
     virtual void render(double) { }
 
     double elapsedTime();
-    static int renderInterval() { return 125; }
+    static int renderInterval() { return 60; }
 
 private:
 
_______________________________________________
webkit-changes mailing list
webkit-changes@lists.webkit.org
https://lists.webkit.org/mailman/listinfo/webkit-changes

Reply via email to