Diff
Modified: trunk/Source/WebKit/ChangeLog (289037 => 289038)
--- trunk/Source/WebKit/ChangeLog 2022-02-03 10:07:16 UTC (rev 289037)
+++ trunk/Source/WebKit/ChangeLog 2022-02-03 10:13:42 UTC (rev 289038)
@@ -1,3 +1,19 @@
+2022-02-03 Youenn Fablet <you...@apple.com>
+
+ SampleBufferDiplayLayer should not need to create IOSurfaces
+ https://bugs.webkit.org/show_bug.cgi?id=235954
+
+ Reviewed by Eric Carlson.
+
+ Make use of SharedVideoFrameReader and SharedVideoFrameWriter between SampleBufferDisplayLayer and RemoteSampleBufferDisplayLayer.
+ Manually tested.
+
+ * GPUProcess/webrtc/RemoteSampleBufferDisplayLayer.cpp:
+ * GPUProcess/webrtc/RemoteSampleBufferDisplayLayer.h:
+ * GPUProcess/webrtc/RemoteSampleBufferDisplayLayer.messages.in:
+ * WebProcess/GPU/webrtc/SampleBufferDisplayLayer.cpp:
+ * WebProcess/GPU/webrtc/SampleBufferDisplayLayer.h:
+
2022-02-03 Carlos Garcia Campos <cgar...@igalia.com>
Unreviewed. Update OptionsGTK.cmake and NEWS for 2.35.2 release
Modified: trunk/Source/WebKit/GPUProcess/webrtc/RemoteSampleBufferDisplayLayer.cpp (289037 => 289038)
--- trunk/Source/WebKit/GPUProcess/webrtc/RemoteSampleBufferDisplayLayer.cpp 2022-02-03 10:07:16 UTC (rev 289037)
+++ trunk/Source/WebKit/GPUProcess/webrtc/RemoteSampleBufferDisplayLayer.cpp 2022-02-03 10:13:42 UTC (rev 289038)
@@ -113,17 +113,27 @@
m_sampleBufferDisplayLayer->pause();
}
-void RemoteSampleBufferDisplayLayer::enqueueSample(WebCore::RemoteVideoSample&& remoteSample)
+void RemoteSampleBufferDisplayLayer::enqueueSample(RemoteVideoSample&& remoteSample)
{
- if (!m_imageTransferSession || m_imageTransferSession->pixelFormat() != remoteSample.videoFormat())
- m_imageTransferSession = ImageTransferSessionVT::create(remoteSample.videoFormat());
+ RefPtr<MediaSample> sample;
+ if (!remoteSample.surface()) {
+ auto pixelBuffer = m_sharedVideoFrameReader.read();
+ if (!pixelBuffer)
+ return;
- ASSERT(m_imageTransferSession);
- if (!m_imageTransferSession)
- return;
+ sample = MediaSampleAVFObjC::createImageSample(WTFMove(pixelBuffer), remoteSample.rotation(), remoteSample.mirrored());
+ sample->setTimestamps(remoteSample.time(), MediaTime { });
+ } else {
+ if (!m_imageTransferSession || m_imageTransferSession->pixelFormat() != remoteSample.videoFormat())
+ m_imageTransferSession = ImageTransferSessionVT::create(remoteSample.videoFormat());
- auto sample = m_imageTransferSession->createMediaSample(remoteSample);
+ ASSERT(m_imageTransferSession);
+ if (!m_imageTransferSession)
+ return;
+ sample = m_imageTransferSession->createMediaSample(remoteSample);
+ }
+
ASSERT(sample);
if (!sample)
return;
@@ -147,6 +157,20 @@
send(Messages::SampleBufferDisplayLayer::SetDidFail { m_sampleBufferDisplayLayer->didFail() });
}
+void RemoteSampleBufferDisplayLayer::setSharedVideoFrameSemaphore(IPC::Semaphore&& semaphore)
+{
+ m_sharedVideoFrameReader.setSemaphore(WTFMove(semaphore));
}
+void RemoteSampleBufferDisplayLayer::setSharedVideoFrameMemory(const SharedMemory::IPCHandle& ipcHandle)
+{
+ auto memory = SharedMemory::map(ipcHandle.handle, SharedMemory::Protection::ReadOnly);
+ if (!memory)
+ return;
+
+ m_sharedVideoFrameReader.setSharedMemory(memory.releaseNonNull());
+}
+
+}
+
#endif // PLATFORM(COCOA) && ENABLE(GPU_PROCESS) && ENABLE(MEDIA_STREAM)
Modified: trunk/Source/WebKit/GPUProcess/webrtc/RemoteSampleBufferDisplayLayer.h (289037 => 289038)
--- trunk/Source/WebKit/GPUProcess/webrtc/RemoteSampleBufferDisplayLayer.h 2022-02-03 10:07:16 UTC (rev 289037)
+++ trunk/Source/WebKit/GPUProcess/webrtc/RemoteSampleBufferDisplayLayer.h 2022-02-03 10:13:42 UTC (rev 289038)
@@ -31,6 +31,7 @@
#include "MessageReceiver.h"
#include "MessageSender.h"
#include "SampleBufferDisplayLayerIdentifier.h"
+#include "SharedVideoFrame.h"
#include <WebCore/SampleBufferDisplayLayer.h>
#include <wtf/MediaTime.h>
@@ -74,6 +75,8 @@
void pause();
void enqueueSample(WebCore::RemoteVideoSample&&);
void clearEnqueuedSamples();
+ void setSharedVideoFrameSemaphore(IPC::Semaphore&&);
+ void setSharedVideoFrameMemory(const SharedMemory::IPCHandle&);
// IPC::MessageSender
IPC::Connection* messageSenderConnection() const final;
@@ -87,6 +90,7 @@
std::unique_ptr<WebCore::ImageTransferSessionVT> m_imageTransferSession;
std::unique_ptr<WebCore::LocalSampleBufferDisplayLayer> m_sampleBufferDisplayLayer;
std::unique_ptr<LayerHostingContext> m_layerHostingContext;
+ SharedVideoFrameReader m_sharedVideoFrameReader;
};
}
Modified: trunk/Source/WebKit/GPUProcess/webrtc/RemoteSampleBufferDisplayLayer.messages.in (289037 => 289038)
--- trunk/Source/WebKit/GPUProcess/webrtc/RemoteSampleBufferDisplayLayer.messages.in 2022-02-03 10:07:16 UTC (rev 289037)
+++ trunk/Source/WebKit/GPUProcess/webrtc/RemoteSampleBufferDisplayLayer.messages.in 2022-02-03 10:13:42 UTC (rev 289038)
@@ -36,6 +36,8 @@
ClearEnqueuedSamples()
Play()
Pause()
+ SetSharedVideoFrameSemaphore(IPC::Semaphore semaphore)
+ SetSharedVideoFrameMemory(WebKit::SharedMemory::IPCHandle storageHandle)
}
#endif
Modified: trunk/Source/WebKit/WebProcess/GPU/webrtc/SampleBufferDisplayLayer.cpp (289037 => 289038)
--- trunk/Source/WebKit/WebProcess/GPU/webrtc/SampleBufferDisplayLayer.cpp 2022-02-03 10:07:16 UTC (rev 289037)
+++ trunk/Source/WebKit/WebProcess/GPU/webrtc/SampleBufferDisplayLayer.cpp 2022-02-03 10:13:42 UTC (rev 289038)
@@ -126,12 +126,29 @@
m_connection->send(Messages::RemoteSampleBufferDisplayLayer::Pause { }, m_identifier);
}
+bool SampleBufferDisplayLayer::copySharedVideoFrame(CVPixelBufferRef pixelBuffer)
+{
+ if (!pixelBuffer)
+ return false;
+ return m_sharedVideoFrameWriter.write(pixelBuffer,
+ [this](auto& semaphore) { m_connection->send(Messages::RemoteSampleBufferDisplayLayer::SetSharedVideoFrameSemaphore { semaphore }, m_identifier); },
+ [this](auto& handle) { m_connection->send(Messages::RemoteSampleBufferDisplayLayer::SetSharedVideoFrameMemory { handle }, m_identifier); }
+ );
+}
+
void SampleBufferDisplayLayer::enqueueSample(MediaSample& sample)
{
if (m_paused)
return;
- if (auto remoteSample = RemoteVideoSample::create(sample))
- m_connection->send(Messages::RemoteSampleBufferDisplayLayer::EnqueueSample { *remoteSample }, m_identifier);
+
+ auto remoteSample = RemoteVideoSample::create(sample, RemoteVideoSample::ShouldCheckForIOSurface::No);
+ if (!remoteSample->surface()) {
+ // buffer is not IOSurface, we need to copy to shared video frame.
+ if (!copySharedVideoFrame(remoteSample->imageBuffer()))
+ return;
+ }
+
+ m_connection->send(Messages::RemoteSampleBufferDisplayLayer::EnqueueSample { *remoteSample }, m_identifier);
}
void SampleBufferDisplayLayer::clearEnqueuedSamples()
Modified: trunk/Source/WebKit/WebProcess/GPU/webrtc/SampleBufferDisplayLayer.h (289037 => 289038)
--- trunk/Source/WebKit/WebProcess/GPU/webrtc/SampleBufferDisplayLayer.h 2022-02-03 10:07:16 UTC (rev 289037)
+++ trunk/Source/WebKit/WebProcess/GPU/webrtc/SampleBufferDisplayLayer.h 2022-02-03 10:13:42 UTC (rev 289038)
@@ -30,6 +30,7 @@
#include "GPUProcessConnection.h"
#include "MessageReceiver.h"
#include "SampleBufferDisplayLayerIdentifier.h"
+#include "SharedVideoFrame.h"
#include <WebCore/SampleBufferDisplayLayer.h>
#include <wtf/WeakPtr.h>
@@ -73,6 +74,7 @@
void gpuProcessConnectionDidClose(GPUProcessConnection&) final;
void setDidFail(bool);
+ bool copySharedVideoFrame(CVPixelBufferRef);
WeakPtr<SampleBufferDisplayLayerManager> m_manager;
Ref<IPC::Connection> m_connection;
@@ -81,6 +83,8 @@
PlatformLayerContainer m_videoLayer;
bool m_didFail { false };
bool m_paused { false };
+
+ SharedVideoFrameWriter m_sharedVideoFrameWriter;
};
}