Diff
Modified: trunk/Source/WebCore/ChangeLog (291467 => 291468)
--- trunk/Source/WebCore/ChangeLog 2022-03-18 07:33:15 UTC (rev 291467)
+++ trunk/Source/WebCore/ChangeLog 2022-03-18 07:35:02 UTC (rev 291468)
@@ -1,5 +1,34 @@
2022-03-18 Youenn Fablet <you...@apple.com>
+ Remove MediaSample usage from canvas capture code
+ https://bugs.webkit.org/show_bug.cgi?id=238015
+
+ Reviewed by Eric Carlson.
+
+ Refactoring to use VideoFrame as interface/wrapper around video frames instead of the more generic MediaSample.
+ Coverd by existing tests.
+
+ * Modules/mediastream/CanvasCaptureMediaStreamTrack.cpp:
+ (WebCore::CanvasCaptureMediaStreamTrack::Source::captureCanvas):
+ * html/HTMLCanvasElement.cpp:
+ (WebCore::HTMLCanvasElement::toVideoFrame):
+ (WebCore::HTMLCanvasElement::toMediaSample): Deleted.
+ * html/HTMLCanvasElement.h:
+ * html/canvas/WebGLRenderingContextBase.cpp:
+ (WebCore::WebGLRenderingContextBase::paintCompositedResultsToVideoFrame):
+ (WebCore::WebGLRenderingContextBase::paintCompositedResultsToMediaSample): Deleted.
+ * html/canvas/WebGLRenderingContextBase.h:
+ * platform/graphics/GraphicsContextGL.h:
+ * platform/graphics/cocoa/GraphicsContextGLCocoa.h:
+ * platform/graphics/cocoa/GraphicsContextGLCocoa.mm:
+ (WebCore::GraphicsContextGLCocoa::paintCompositedResultsToVideoFrame):
+ (WebCore::GraphicsContextGLCocoa::paintCompositedResultsToMediaSample): Deleted.
+ * platform/graphics/cv/VideoFrameCV.h:
+ * platform/graphics/cv/VideoFrameCV.mm:
+ (WebCore::VideoFrameCV::createFromPixelBuffer):
+
+2022-03-18 Youenn Fablet <you...@apple.com>
+
Keep service workers alive when they are inspected even though they should be terminated
https://bugs.webkit.org/show_bug.cgi?id=237827
<rdar://88313935>
Modified: trunk/Source/WebCore/Modules/mediastream/CanvasCaptureMediaStreamTrack.cpp (291467 => 291468)
--- trunk/Source/WebCore/Modules/mediastream/CanvasCaptureMediaStreamTrack.cpp 2022-03-18 07:33:15 UTC (rev 291467)
+++ trunk/Source/WebCore/Modules/mediastream/CanvasCaptureMediaStreamTrack.cpp 2022-03-18 07:35:02 UTC (rev 291468)
@@ -25,13 +25,14 @@
#include "config.h"
#include "CanvasCaptureMediaStreamTrack.h"
+#if ENABLE(MEDIA_STREAM)
+
#include "GraphicsContext.h"
#include "HTMLCanvasElement.h"
+#include "VideoFrame.h"
#include "WebGLRenderingContextBase.h"
#include <wtf/IsoMallocInlines.h>
-#if ENABLE(MEDIA_STREAM)
-
namespace WebCore {
WTF_MAKE_ISO_ALLOCATED_IMPL(CanvasCaptureMediaStreamTrack);
@@ -192,13 +193,13 @@
if (!m_canvas->originClean())
return;
- auto sample = m_canvas->toMediaSample();
- if (!sample)
+ auto videoFrame = m_canvas->toVideoFrame();
+ if (!videoFrame)
return;
VideoFrameTimeMetadata metadata;
metadata.captureTime = MonotonicTime::now().secondsSinceEpoch();
- videoSampleAvailable(*sample, metadata);
+ videoSampleAvailable(*videoFrame, metadata);
}
RefPtr<MediaStreamTrack> CanvasCaptureMediaStreamTrack::clone()
Modified: trunk/Source/WebCore/html/HTMLCanvasElement.cpp (291467 => 291468)
--- trunk/Source/WebCore/html/HTMLCanvasElement.cpp 2022-03-18 07:33:15 UTC (rev 291467)
+++ trunk/Source/WebCore/html/HTMLCanvasElement.cpp 2022-03-18 07:35:02 UTC (rev 291468)
@@ -98,7 +98,7 @@
#endif
#if PLATFORM(COCOA)
-#include "MediaSampleAVFObjC.h"
+#include "VideoFrameCV.h"
#include <pal/cf/CoreMediaSoftLink.h>
#endif
@@ -772,7 +772,7 @@
#if ENABLE(MEDIA_STREAM)
-RefPtr<MediaSample> HTMLCanvasElement::toMediaSample()
+RefPtr<VideoFrame> HTMLCanvasElement::toVideoFrame()
{
#if PLATFORM(COCOA) || USE(GSTREAMER)
#if ENABLE(WEBGL)
@@ -779,7 +779,7 @@
if (is<WebGLRenderingContextBase>(m_context.get())) {
if (RuntimeEnabledFeatures::sharedFeatures().webAPIStatisticsEnabled())
ResourceLoadObserver::shared().logCanvasRead(document());
- return downcast<WebGLRenderingContextBase>(*m_context).paintCompositedResultsToMediaSample();
+ return downcast<WebGLRenderingContextBase>(*m_context).paintCompositedResultsToVideoFrame();
}
#endif
auto* imageBuffer = buffer();
@@ -792,13 +792,13 @@
// FIXME: This can likely be optimized quite a bit, especially in the cases where
// the ImageBuffer is backed by GPU memory already and/or is in the GPU process by
- // specializing toMediaSample() in ImageBufferBackend to not use getPixelBuffer().
+ // specializing toVideoFrame() in ImageBufferBackend to not use getPixelBuffer().
auto pixelBuffer = imageBuffer->getPixelBuffer({ AlphaPremultiplication::Unpremultiplied, PixelFormat::BGRA8, DestinationColorSpace::SRGB() }, { { }, imageBuffer->truncatedLogicalSize() });
if (!pixelBuffer)
return nullptr;
#if PLATFORM(COCOA)
- return MediaSampleAVFObjC::createFromPixelBuffer(WTFMove(*pixelBuffer));
+ return VideoFrameCV::createFromPixelBuffer(WTFMove(*pixelBuffer));
#elif USE(GSTREAMER)
return VideoFrameGStreamer::createFromPixelBuffer(WTFMove(*pixelBuffer));
#endif
Modified: trunk/Source/WebCore/html/HTMLCanvasElement.h (291467 => 291468)
--- trunk/Source/WebCore/html/HTMLCanvasElement.h 2022-03-18 07:33:15 UTC (rev 291467)
+++ trunk/Source/WebCore/html/HTMLCanvasElement.h 2022-03-18 07:35:02 UTC (rev 291468)
@@ -48,9 +48,9 @@
class Image;
class ImageBuffer;
class ImageData;
-class MediaSample;
class MediaStream;
class OffscreenCanvas;
+class VideoFrame;
class WebGLRenderingContextBase;
class GPUCanvasContext;
struct CanvasRenderingContext2DSettings;
@@ -107,7 +107,7 @@
void paint(GraphicsContext&, const LayoutRect&);
#if ENABLE(MEDIA_STREAM)
- RefPtr<MediaSample> toMediaSample();
+ RefPtr<VideoFrame> toVideoFrame();
ExceptionOr<Ref<MediaStream>> captureStream(std::optional<double>&& frameRequestRate);
#endif
Modified: trunk/Source/WebCore/html/canvas/WebGLRenderingContextBase.cpp (291467 => 291468)
--- trunk/Source/WebCore/html/canvas/WebGLRenderingContextBase.cpp 2022-03-18 07:33:15 UTC (rev 291467)
+++ trunk/Source/WebCore/html/canvas/WebGLRenderingContextBase.cpp 2022-03-18 07:35:02 UTC (rev 291468)
@@ -1462,11 +1462,11 @@
}
#if ENABLE(MEDIA_STREAM)
-RefPtr<MediaSample> WebGLRenderingContextBase::paintCompositedResultsToMediaSample()
+RefPtr<VideoFrame> WebGLRenderingContextBase::paintCompositedResultsToVideoFrame()
{
if (isContextLostOrPending())
return nullptr;
- return m_context->paintCompositedResultsToMediaSample();
+ return m_context->paintCompositedResultsToVideoFrame();
}
#endif
Modified: trunk/Source/WebCore/html/canvas/WebGLRenderingContextBase.h (291467 => 291468)
--- trunk/Source/WebCore/html/canvas/WebGLRenderingContextBase.h 2022-03-18 07:33:15 UTC (rev 291467)
+++ trunk/Source/WebCore/html/canvas/WebGLRenderingContextBase.h 2022-03-18 07:35:02 UTC (rev 291468)
@@ -130,7 +130,7 @@
#endif
#if ENABLE(MEDIA_STREAM)
-class MediaSample;
+class VideoFrame;
#endif
class WebGLRenderingContextBase : public GraphicsContextGL::Client, public GPUBasedCanvasRenderingContext, private ActivityStateChangeObserver {
@@ -394,7 +394,7 @@
void paintRenderingResultsToCanvas() final;
std::optional<PixelBuffer> paintRenderingResultsToPixelBuffer();
#if ENABLE(MEDIA_STREAM)
- RefPtr<MediaSample> paintCompositedResultsToMediaSample();
+ RefPtr<VideoFrame> paintCompositedResultsToVideoFrame();
#endif
void removeSharedObject(WebGLSharedObject&);
Modified: trunk/Source/WebCore/platform/graphics/GraphicsContextGL.h (291467 => 291468)
--- trunk/Source/WebCore/platform/graphics/GraphicsContextGL.h 2022-03-18 07:33:15 UTC (rev 291467)
+++ trunk/Source/WebCore/platform/graphics/GraphicsContextGL.h 2022-03-18 07:35:02 UTC (rev 291468)
@@ -55,7 +55,7 @@
class MediaPlayer;
#endif
#if ENABLE(MEDIA_STREAM)
-class MediaSample;
+class VideoFrame;
#endif
// Base class for graphics context for implementing WebGL rendering model.
@@ -1439,7 +1439,7 @@
virtual std::optional<PixelBuffer> paintRenderingResultsToPixelBuffer() = 0;
virtual void paintCompositedResultsToCanvas(ImageBuffer&) = 0;
#if ENABLE(MEDIA_STREAM)
- virtual RefPtr<MediaSample> paintCompositedResultsToMediaSample() = 0;
+ virtual RefPtr<VideoFrame> paintCompositedResultsToVideoFrame() = 0;
#endif
// FIXME: this should be removed. The layer should be marked composited by
Modified: trunk/Source/WebCore/platform/graphics/cocoa/GraphicsContextGLCocoa.h (291467 => 291468)
--- trunk/Source/WebCore/platform/graphics/cocoa/GraphicsContextGLCocoa.h 2022-03-18 07:33:15 UTC (rev 291467)
+++ trunk/Source/WebCore/platform/graphics/cocoa/GraphicsContextGLCocoa.h 2022-03-18 07:35:02 UTC (rev 291468)
@@ -76,7 +76,7 @@
GraphicsContextGLCV* asCV() final;
#endif
#if ENABLE(MEDIA_STREAM)
- RefPtr<MediaSample> paintCompositedResultsToMediaSample() final;
+ RefPtr<VideoFrame> paintCompositedResultsToVideoFrame() final;
#endif
void setContextVisibility(bool) final;
void prepareForDisplay() override;
Modified: trunk/Source/WebCore/platform/graphics/cocoa/GraphicsContextGLCocoa.mm (291467 => 291468)
--- trunk/Source/WebCore/platform/graphics/cocoa/GraphicsContextGLCocoa.mm 2022-03-18 07:33:15 UTC (rev 291467)
+++ trunk/Source/WebCore/platform/graphics/cocoa/GraphicsContextGLCocoa.mm 2022-03-18 07:35:02 UTC (rev 291468)
@@ -55,7 +55,6 @@
#if ENABLE(MEDIA_STREAM)
#import "ImageRotationSessionVT.h"
-#import "MediaSampleAVFObjC.h"
#endif
namespace WebCore {
@@ -762,7 +761,7 @@
}
#if ENABLE(MEDIA_STREAM)
-RefPtr<MediaSample> GraphicsContextGLCocoa::paintCompositedResultsToMediaSample()
+RefPtr<VideoFrame> GraphicsContextGLCocoa::paintCompositedResultsToVideoFrame()
{
auto &displayBuffer = m_swapChain.displayBuffer();
if (!displayBuffer.surface || !displayBuffer.handle)
@@ -782,7 +781,7 @@
return nullptr;
if (m_resourceOwner)
setOwnershipIdentityForCVPixelBuffer(mediaSamplePixelBuffer.get(), m_resourceOwner);
- return MediaSampleAVFObjC::createFromPixelBuffer(WTFMove(mediaSamplePixelBuffer), MediaSampleAVFObjC::VideoRotation::None, false);
+ return VideoFrameCV::create({ }, false, VideoFrame::VideoRotation::None, WTFMove(mediaSamplePixelBuffer));
}
#endif
Modified: trunk/Source/WebCore/platform/graphics/cv/VideoFrameCV.h (291467 => 291468)
--- trunk/Source/WebCore/platform/graphics/cv/VideoFrameCV.h 2022-03-18 07:33:15 UTC (rev 291467)
+++ trunk/Source/WebCore/platform/graphics/cv/VideoFrameCV.h 2022-03-18 07:35:02 UTC (rev 291468)
@@ -35,9 +35,12 @@
namespace WebCore {
+class PixelBuffer;
+
class VideoFrameCV : public VideoFrame {
public:
WEBCORE_EXPORT static Ref<VideoFrameCV> create(MediaTime presentationTime, bool isMirrored, VideoRotation, RetainPtr<CVPixelBufferRef>&&);
+ static RefPtr<VideoFrameCV> createFromPixelBuffer(PixelBuffer&&);
WEBCORE_EXPORT ~VideoFrameCV();
CVPixelBufferRef pixelBuffer() const final { return m_pixelBuffer.get(); }
Modified: trunk/Source/WebCore/platform/graphics/cv/VideoFrameCV.mm (291467 => 291468)
--- trunk/Source/WebCore/platform/graphics/cv/VideoFrameCV.mm 2022-03-18 07:33:15 UTC (rev 291467)
+++ trunk/Source/WebCore/platform/graphics/cv/VideoFrameCV.mm 2022-03-18 07:35:02 UTC (rev 291468)
@@ -39,6 +39,32 @@
return adoptRef(*new VideoFrameCV(presentationTime, isMirrored, rotation, WTFMove(pixelBuffer)));
}
+RefPtr<VideoFrameCV> VideoFrameCV::createFromPixelBuffer(PixelBuffer&& pixelBuffer)
+{
+ auto size = pixelBuffer.size();
+ auto width = size.width();
+ auto height = size.height();
+
+ auto data = ""
+ auto dataBaseAddress = data->data();
+ auto leakedData = &data.leakRef();
+
+ auto derefBuffer = [] (void* context, const void*) {
+ static_cast<JSC::Uint8ClampedArray*>(context)->deref();
+ };
+
+ CVPixelBufferRef cvPixelBufferRaw = nullptr;
+ auto status = CVPixelBufferCreateWithBytes(kCFAllocatorDefault, width, height, kCVPixelFormatType_32BGRA, dataBaseAddress, width * 4, derefBuffer, leakedData, nullptr, &cvPixelBufferRaw);
+
+ auto cvPixelBuffer = adoptCF(cvPixelBufferRaw);
+ if (!cvPixelBuffer) {
+ derefBuffer(leakedData, nullptr);
+ return nullptr;
+ }
+ ASSERT_UNUSED(status, !status);
+ return create({ }, false, VideoRotation::None, WTFMove(cvPixelBuffer));
+}
+
VideoFrameCV::VideoFrameCV(MediaTime presentationTime, bool isMirrored, VideoRotation rotation, RetainPtr<CVPixelBufferRef>&& pixelBuffer)
: VideoFrame(presentationTime, isMirrored, rotation)
, m_pixelBuffer(WTFMove(pixelBuffer))
Modified: trunk/Source/WebCore/platform/graphics/texmap/GraphicsContextGLTextureMapper.cpp (291467 => 291468)
--- trunk/Source/WebCore/platform/graphics/texmap/GraphicsContextGLTextureMapper.cpp 2022-03-18 07:33:15 UTC (rev 291467)
+++ trunk/Source/WebCore/platform/graphics/texmap/GraphicsContextGLTextureMapper.cpp 2022-03-18 07:35:02 UTC (rev 291468)
@@ -59,7 +59,7 @@
#endif
#if ENABLE(MEDIA_STREAM)
-#include "MediaSample.h"
+#include "VideoFrame.h"
#endif
#if USE(GSTREAMER) && ENABLE(MEDIA_STREAM)
@@ -118,7 +118,7 @@
}
#if ENABLE(MEDIA_STREAM)
-RefPtr<MediaSample> GraphicsContextGLTextureMapper::paintCompositedResultsToMediaSample()
+RefPtr<VideoFrame> GraphicsContextGLTextureMapper::paintCompositedResultsToVideoFrame()
{
#if USE(GSTREAMER)
if (auto pixelBuffer = readCompositedResults())
Modified: trunk/Source/WebCore/platform/graphics/texmap/GraphicsContextGLTextureMapper.h (291467 => 291468)
--- trunk/Source/WebCore/platform/graphics/texmap/GraphicsContextGLTextureMapper.h 2022-03-18 07:33:15 UTC (rev 291467)
+++ trunk/Source/WebCore/platform/graphics/texmap/GraphicsContextGLTextureMapper.h 2022-03-18 07:35:02 UTC (rev 291468)
@@ -52,7 +52,7 @@
bool copyTextureFromMedia(MediaPlayer&, PlatformGLObject texture, GCGLenum target, GCGLint level, GCGLenum internalFormat, GCGLenum format, GCGLenum type, bool premultiplyAlpha, bool flipY) final;
#endif
#if ENABLE(MEDIA_STREAM)
- RefPtr<MediaSample> paintCompositedResultsToMediaSample() final;
+ RefPtr<VideoFrame> paintCompositedResultsToVideoFrame() final;
#endif
#if USE(ANGLE)
void setContextVisibility(bool) final;
Modified: trunk/Source/WebKit/ChangeLog (291467 => 291468)
--- trunk/Source/WebKit/ChangeLog 2022-03-18 07:33:15 UTC (rev 291467)
+++ trunk/Source/WebKit/ChangeLog 2022-03-18 07:35:02 UTC (rev 291468)
@@ -1,5 +1,22 @@
2022-03-18 Youenn Fablet <you...@apple.com>
+ Remove MediaSample usage from canvas capture code
+ https://bugs.webkit.org/show_bug.cgi?id=238015
+
+ Reviewed by Eric Carlson.
+
+ * GPUProcess/graphics/RemoteGraphicsContextGL.cpp:
+ (WebKit::RemoteGraphicsContextGL::paintCompositedResultsToVideoFrame):
+ (WebKit::RemoteGraphicsContextGL::paintCompositedResultsToMediaSample): Deleted.
+ * GPUProcess/graphics/RemoteGraphicsContextGL.h:
+ * GPUProcess/graphics/RemoteGraphicsContextGL.messages.in:
+ * WebProcess/GPU/graphics/RemoteGraphicsContextGLProxy.cpp:
+ (WebKit::RemoteGraphicsContextGLProxy::paintCompositedResultsToVideoFrame):
+ (WebKit::RemoteGraphicsContextGLProxy::paintCompositedResultsToMediaSample): Deleted.
+ * WebProcess/GPU/graphics/RemoteGraphicsContextGLProxy.h:
+
+2022-03-18 Youenn Fablet <you...@apple.com>
+
Keep service workers alive when they are inspected even though they should be terminated
https://bugs.webkit.org/show_bug.cgi?id=237827
<rdar://88313935>
Modified: trunk/Source/WebKit/GPUProcess/graphics/RemoteGraphicsContextGL.cpp (291467 => 291468)
--- trunk/Source/WebKit/GPUProcess/graphics/RemoteGraphicsContextGL.cpp 2022-03-18 07:33:15 UTC (rev 291467)
+++ trunk/Source/WebKit/GPUProcess/graphics/RemoteGraphicsContextGL.cpp 2022-03-18 07:35:02 UTC (rev 291468)
@@ -220,11 +220,11 @@
}
#if ENABLE(MEDIA_STREAM)
-void RemoteGraphicsContextGL::paintCompositedResultsToMediaSample(CompletionHandler<void(std::optional<WebKit::RemoteVideoFrameProxy::Properties>&&)>&& completionHandler)
+void RemoteGraphicsContextGL::paintCompositedResultsToVideoFrame(CompletionHandler<void(std::optional<WebKit::RemoteVideoFrameProxy::Properties>&&)>&& completionHandler)
{
assertIsCurrent(workQueue());
std::optional<WebKit::RemoteVideoFrameProxy::Properties> result;
- if (auto videoFrame = m_context->paintCompositedResultsToMediaSample())
+ if (auto videoFrame = m_context->paintCompositedResultsToVideoFrame())
result = m_videoFrameObjectHeap->add(videoFrame.releaseNonNull());
completionHandler(WTFMove(result));
}
Modified: trunk/Source/WebKit/GPUProcess/graphics/RemoteGraphicsContextGL.h (291467 => 291468)
--- trunk/Source/WebKit/GPUProcess/graphics/RemoteGraphicsContextGL.h 2022-03-18 07:33:15 UTC (rev 291467)
+++ trunk/Source/WebKit/GPUProcess/graphics/RemoteGraphicsContextGL.h 2022-03-18 07:35:02 UTC (rev 291468)
@@ -119,7 +119,7 @@
void paintRenderingResultsToCanvas(WebCore::RenderingResourceIdentifier, CompletionHandler<void()>&&);
void paintCompositedResultsToCanvas(WebCore::RenderingResourceIdentifier, CompletionHandler<void()>&&);
#if ENABLE(MEDIA_STREAM)
- void paintCompositedResultsToMediaSample(CompletionHandler<void(std::optional<WebKit::RemoteVideoFrameProxy::Properties>&&)>&&);
+ void paintCompositedResultsToVideoFrame(CompletionHandler<void(std::optional<WebKit::RemoteVideoFrameProxy::Properties>&&)>&&);
#endif
#if ENABLE(VIDEO)
void copyTextureFromVideoFrame(RemoteVideoFrameReadReference, uint32_t texture, uint32_t target, int32_t level, uint32_t internalFormat, uint32_t format, uint32_t type, bool premultiplyAlpha, bool flipY, CompletionHandler<void(bool)>&&);
Modified: trunk/Source/WebKit/GPUProcess/graphics/RemoteGraphicsContextGL.messages.in (291467 => 291468)
--- trunk/Source/WebKit/GPUProcess/graphics/RemoteGraphicsContextGL.messages.in 2022-03-18 07:33:15 UTC (rev 291467)
+++ trunk/Source/WebKit/GPUProcess/graphics/RemoteGraphicsContextGL.messages.in 2022-03-18 07:35:02 UTC (rev 291468)
@@ -45,7 +45,7 @@
void CopyTextureFromVideoFrame(WebKit::RemoteVideoFrameReadReference videoFrame, uint32_t texture, uint32_t target, int32_t level, uint32_t internalFormat, uint32_t format, uint32_t type, bool premultiplyAlpha, bool flipY) -> (bool success) Synchronous
#endif
#if ENABLE(MEDIA_STREAM)
- void PaintCompositedResultsToMediaSample() -> (std::optional<WebKit::RemoteVideoFrameProxy::Properties> properties) Synchronous
+ void PaintCompositedResultsToVideoFrame() -> (std::optional<WebKit::RemoteVideoFrameProxy::Properties> properties) Synchronous
#endif
void SimulateEventForTesting(WebCore::GraphicsContextGL::SimulatedEventForTesting event)
void ReadnPixels0(int32_t x, int32_t y, int32_t width, int32_t height, uint32_t format, uint32_t type, IPC::ArrayReference<uint8_t> data) -> (IPC::ArrayReference<uint8_t> data) Synchronous
Modified: trunk/Source/WebKit/WebProcess/GPU/graphics/RemoteGraphicsContextGLProxy.cpp (291467 => 291468)
--- trunk/Source/WebKit/WebProcess/GPU/graphics/RemoteGraphicsContextGLProxy.cpp 2022-03-18 07:33:15 UTC (rev 291467)
+++ trunk/Source/WebKit/WebProcess/GPU/graphics/RemoteGraphicsContextGLProxy.cpp 2022-03-18 07:35:02 UTC (rev 291468)
@@ -171,12 +171,12 @@
}
#if ENABLE(MEDIA_STREAM)
-RefPtr<WebCore::MediaSample> RemoteGraphicsContextGLProxy::paintCompositedResultsToMediaSample()
+RefPtr<WebCore::VideoFrame> RemoteGraphicsContextGLProxy::paintCompositedResultsToVideoFrame()
{
if (isContextLost())
return nullptr;
std::optional<RemoteVideoFrameProxy::Properties> result;
- auto sendResult = sendSync(Messages::RemoteGraphicsContextGL::PaintCompositedResultsToMediaSample(), Messages::RemoteGraphicsContextGL::PaintCompositedResultsToMediaSample::Reply(result));
+ auto sendResult = sendSync(Messages::RemoteGraphicsContextGL::PaintCompositedResultsToVideoFrame(), Messages::RemoteGraphicsContextGL::PaintCompositedResultsToVideoFrame::Reply(result));
if (!sendResult) {
markContextLost();
return nullptr;
Modified: trunk/Source/WebKit/WebProcess/GPU/graphics/RemoteGraphicsContextGLProxy.h (291467 => 291468)
--- trunk/Source/WebKit/WebProcess/GPU/graphics/RemoteGraphicsContextGLProxy.h 2022-03-18 07:33:15 UTC (rev 291467)
+++ trunk/Source/WebKit/WebProcess/GPU/graphics/RemoteGraphicsContextGLProxy.h 2022-03-18 07:35:02 UTC (rev 291468)
@@ -69,7 +69,7 @@
void paintRenderingResultsToCanvas(WebCore::ImageBuffer&) final;
void paintCompositedResultsToCanvas(WebCore::ImageBuffer&) final;
#if ENABLE(MEDIA_STREAM)
- RefPtr<WebCore::MediaSample> paintCompositedResultsToMediaSample() final;
+ RefPtr<WebCore::VideoFrame> paintCompositedResultsToVideoFrame() final;
#endif
void synthesizeGLError(GCGLenum error) final;
GCGLenum getError() final;
Modified: trunk/Source/WebKit/WebProcess/GPU/graphics/wc/RemoteGraphicsContextGLProxyWC.cpp (291467 => 291468)
--- trunk/Source/WebKit/WebProcess/GPU/graphics/wc/RemoteGraphicsContextGLProxyWC.cpp 2022-03-18 07:33:15 UTC (rev 291467)
+++ trunk/Source/WebKit/WebProcess/GPU/graphics/wc/RemoteGraphicsContextGLProxyWC.cpp 2022-03-18 07:35:02 UTC (rev 291468)
@@ -67,7 +67,7 @@
void prepareForDisplay() final;
RefPtr<WebCore::GraphicsLayerContentsDisplayDelegate> layerContentsDisplayDelegate() final { return m_layerContentsDisplayDelegate.ptr(); }
#if ENABLE(MEDIA_STREAM)
- RefPtr<WebCore::MediaSample> paintCompositedResultsToMediaSample() final { return nullptr; }
+ RefPtr<WebCore::VideoFrame> paintCompositedResultsToVideoFrame() final { return nullptr; }
#endif
private:
RemoteGraphicsContextGLProxyWC(GPUProcessConnection& gpuProcessConnection, const WebCore::GraphicsContextGLAttributes& attributes, RenderingBackendIdentifier renderingBackend)