Title: [254256] trunk
Revision
254256
Author
you...@apple.com
Date
2020-01-09 06:31:09 -0800 (Thu, 09 Jan 2020)

Log Message

RemoteVideoSample should be able to support canvas video samples
https://bugs.webkit.org/show_bug.cgi?id=205922

Reviewed by Eric Carlson.

Source/WebCore:

Canvas capture is producing video samples that are not IOSurface backed.
This makes it impossible to send them through IPC via RemoteVideoSample.
We beef up RemoteVideoSample to create an IOSurface for RGBA video samples.

Test: http/wpt/mediarecorder/MediaRecorder-AV-audio-video-dataavailable-gpuprocess.html

* platform/graphics/RemoteVideoSample.cpp:
(WebCore::transferBGRAPixelBufferToIOSurface):
(WebCore::RemoteVideoSample::create):

LayoutTests:

* http/wpt/mediarecorder/MediaRecorder-AV-audio-video-dataavailable-gpuprocess-expected.txt: Added.
* http/wpt/mediarecorder/MediaRecorder-AV-audio-video-dataavailable-gpuprocess.html: Added.
* http/wpt/mediarecorder/MediaRecorder-AV-audio-video-dataavailable.html:
Remove no longer valid comment and beef up test in case of assertion failure.

Modified Paths

Added Paths

Diff

Modified: trunk/LayoutTests/ChangeLog (254255 => 254256)


--- trunk/LayoutTests/ChangeLog	2020-01-09 14:23:52 UTC (rev 254255)
+++ trunk/LayoutTests/ChangeLog	2020-01-09 14:31:09 UTC (rev 254256)
@@ -1,3 +1,15 @@
+2020-01-09  youenn fablet  <you...@apple.com>
+
+        RemoteVideoSample should be able to support canvas video samples
+        https://bugs.webkit.org/show_bug.cgi?id=205922
+
+        Reviewed by Eric Carlson.
+
+        * http/wpt/mediarecorder/MediaRecorder-AV-audio-video-dataavailable-gpuprocess-expected.txt: Added.
+        * http/wpt/mediarecorder/MediaRecorder-AV-audio-video-dataavailable-gpuprocess.html: Added.
+        * http/wpt/mediarecorder/MediaRecorder-AV-audio-video-dataavailable.html:
+        Remove no longer valid comment and beef up test in case of assertion failure.
+
 2020-01-08  Diego Pino Garcia  <dp...@igalia.com>
 
         Fix test transferToImageBitmap-empty.html after r253099

Added: trunk/LayoutTests/http/wpt/mediarecorder/MediaRecorder-AV-audio-video-dataavailable-gpuprocess-expected.txt (0 => 254256)


--- trunk/LayoutTests/http/wpt/mediarecorder/MediaRecorder-AV-audio-video-dataavailable-gpuprocess-expected.txt	                        (rev 0)
+++ trunk/LayoutTests/http/wpt/mediarecorder/MediaRecorder-AV-audio-video-dataavailable-gpuprocess-expected.txt	2020-01-09 14:31:09 UTC (rev 254256)
@@ -0,0 +1,4 @@
+
+
+PASS Verify MediaRecorder is working in GPUProcess 
+

Copied: trunk/LayoutTests/http/wpt/mediarecorder/MediaRecorder-AV-audio-video-dataavailable-gpuprocess.html (from rev 254255, trunk/LayoutTests/http/wpt/mediarecorder/MediaRecorder-AV-audio-video-dataavailable.html) (0 => 254256)


--- trunk/LayoutTests/http/wpt/mediarecorder/MediaRecorder-AV-audio-video-dataavailable-gpuprocess.html	                        (rev 0)
+++ trunk/LayoutTests/http/wpt/mediarecorder/MediaRecorder-AV-audio-video-dataavailable-gpuprocess.html	2020-01-09 14:31:09 UTC (rev 254256)
@@ -0,0 +1,92 @@
+<!doctype html>
+<html>
+<head>
+    <title>MediaRecorder Dataavailable</title>
+    <link rel="help" href=""
+    <script src=""
+    <script src=""
+    <script src=""
+    <link rel="stylesheet" href=""
+</head>
+<body>
+<div>
+    <video id="player">
+    </video>
+</div>
+<div>
+    <canvas id="canvas" width="200" height="200">
+    </canvas>
+    <canvas id="frame" width="200" height="200">
+    </canvas>
+</div>
+<script>
+    if (window.internals)
+        window.internals.setUseGPUProcessForWebRTC(true);
+
+    var context;
+    var drawStartTime;
+
+    function createVideoStream() {
+        const canvas = document.getElementById("canvas");
+        context = canvas.getContext('2d');
+        return canvas.captureStream();
+    }
+
+    function doRedImageDraw() {
+        if (context) {
+            context.fillStyle = "#ff0000";
+            context.fillRect(0, 0, 200, 200);
+            if (Date.now() - drawStartTime < 500) {
+                window.requestAnimationFrame(doRedImageDraw);
+            } else {
+                drawStartTime = Date.now();
+                doGreenImageDraw();
+            }
+        }
+    }
+
+    function doGreenImageDraw() {
+        if (context) {
+            context.fillStyle = "#00ff00";
+            context.fillRect(0, 0, 200, 200);
+            if (Date.now() - drawStartTime < 2000) {
+                window.requestAnimationFrame(doGreenImageDraw);
+            }
+        }
+    }
+
+    async_test(t => {
+        const ac = new AudioContext();
+        const osc = ac.createOscillator();
+        const dest = ac.createMediaStreamDestination();
+        const audio = dest.stream;
+        osc.connect(dest);
+
+        const video = createVideoStream();
+        assert_equals(video.getAudioTracks().length, 0, "video mediastream starts with no audio track");
+        assert_equals(audio.getAudioTracks().length, 1, "audio mediastream starts with one audio track");
+        video.addTrack(audio.getAudioTracks()[0]);
+        assert_equals(video.getAudioTracks().length, 1, "video mediastream starts with one audio track");
+        const recorder = new MediaRecorder(video);
+        let mode = 0;
+
+        recorder._ondataavailable_ = t.step_func(blobEvent => {
+            assert_true(blobEvent instanceof BlobEvent, 'the type of event should be BlobEvent');
+            assert_equals(blobEvent.type, 'dataavailable', 'the event type should be dataavailable');
+            assert_true(blobEvent.isTrusted, 'isTrusted should be true when the event is created by C++');
+            assert_true(blobEvent.data instanceof Blob, 'the type of data should be Blob');
+            assert_true(blobEvent.data.size > 0, 'the blob should contain some buffers');
+            t.done();
+        });
+        drawStartTime = Date.now();
+        doRedImageDraw();
+        recorder.start();
+        assert_equals(recorder.state, 'recording', 'MediaRecorder has been started successfully');
+        setTimeout(() => {
+            recorder.stop();
+        }, 2000);
+    }, 'Verify MediaRecorder is working in GPUProcess');
+
+</script>
+</body>
+</html>

Modified: trunk/LayoutTests/http/wpt/mediarecorder/MediaRecorder-AV-audio-video-dataavailable.html (254255 => 254256)


--- trunk/LayoutTests/http/wpt/mediarecorder/MediaRecorder-AV-audio-video-dataavailable.html	2020-01-09 14:23:52 UTC (rev 254255)
+++ trunk/LayoutTests/http/wpt/mediarecorder/MediaRecorder-AV-audio-video-dataavailable.html	2020-01-09 14:31:09 UTC (rev 254256)
@@ -20,7 +20,6 @@
     </canvas>
 </div>
 <script>
-    // This test is not passing with GPUProcess as long as capture frames are not IOSurfaced backed.
     if (window.internals)
         window.internals.setUseGPUProcessForWebRTC(false);
 
@@ -81,15 +80,15 @@
             const resFrame = document.getElementById("frame");
             const resContext = resFrame.getContext('2d');
 
-            player._oncanplay_ = () => {
+            player._oncanplay_ = t.step_func(() => {
                 assert_greater_than(player.duration, 0.1, 'the duration should be greater than 100ms');
                 player.play();
-            };
+            });
             player._onplay_ = () => {
                 player.pause();
                 player.currentTime = 0.05;
             };
-            player._onseeked_ = () => {
+            player._onseeked_ = t.step_func(() => {
                 resContext.drawImage(player, 0, 0);
                 if (!mode) {
                     _assertPixelApprox(resFrame, 25, 25, 255, 0, 0, 255, "25, 25", "255, 0, 0, 255", 20);
@@ -101,7 +100,7 @@
                     _assertPixelApprox(resFrame, 199, 199, 0, 255, 0, 255, "199, 199", "0, 255, 0, 255", 20);
                     t.done();
                 }
-            };
+            });
             player.load();
         });
         drawStartTime = Date.now();

Modified: trunk/Source/WebCore/ChangeLog (254255 => 254256)


--- trunk/Source/WebCore/ChangeLog	2020-01-09 14:23:52 UTC (rev 254255)
+++ trunk/Source/WebCore/ChangeLog	2020-01-09 14:31:09 UTC (rev 254256)
@@ -1,3 +1,20 @@
+2020-01-09  youenn fablet  <you...@apple.com>
+
+        RemoteVideoSample should be able to support canvas video samples
+        https://bugs.webkit.org/show_bug.cgi?id=205922
+
+        Reviewed by Eric Carlson.
+
+        Canvas capture is producing video samples that are not IOSurface backed.
+        This makes it impossible to send them through IPC via RemoteVideoSample.
+        We beef up RemoteVideoSample to create an IOSurface for RGBA video samples.
+
+        Test: http/wpt/mediarecorder/MediaRecorder-AV-audio-video-dataavailable-gpuprocess.html
+
+        * platform/graphics/RemoteVideoSample.cpp:
+        (WebCore::transferBGRAPixelBufferToIOSurface):
+        (WebCore::RemoteVideoSample::create):
+
 2020-01-08  Keith Miller  <keith_mil...@apple.com>
 
         [JSC] Introduce JSArrayIterator

Modified: trunk/Source/WebCore/platform/graphics/RemoteVideoSample.cpp (254255 => 254256)


--- trunk/Source/WebCore/platform/graphics/RemoteVideoSample.cpp	2020-01-09 14:23:52 UTC (rev 254255)
+++ trunk/Source/WebCore/platform/graphics/RemoteVideoSample.cpp	2020-01-09 14:31:09 UTC (rev 254256)
@@ -28,9 +28,14 @@
 
 #if ENABLE(MEDIA_STREAM) && PLATFORM(COCOA)
 
+#include "IOSurface.h"
 #include "Logging.h"
 #include "MediaSample.h"
 
+#if USE(ACCELERATE)
+#include <Accelerate/Accelerate.h>
+#endif
+
 #if HAVE(IOSURFACE)
 #include "GraphicsContextCG.h"
 #endif
@@ -42,6 +47,49 @@
 using namespace PAL;
 
 #if HAVE(IOSURFACE)
+static inline std::unique_ptr<IOSurface> transferBGRAPixelBufferToIOSurface(CVPixelBufferRef pixelBuffer)
+{
+#if USE(ACCELERATE)
+    ASSERT(CVPixelBufferGetPixelFormatType(pixelBuffer) == kCVPixelFormatType_32BGRA);
+
+    auto result = CVPixelBufferLockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
+    ASSERT(result == kCVReturnSuccess);
+    if (result != kCVReturnSuccess) {
+        RELEASE_LOG_ERROR(Media, "transferBGRAPixelBufferToIOSurface CVPixelBufferLockBaseAddress() returned error code %d", result);
+        return nullptr;
+    }
+
+    IntSize size { static_cast<int>(CVPixelBufferGetWidth(pixelBuffer)), static_cast<int>(CVPixelBufferGetHeight(pixelBuffer)) };
+    auto ioSurface =  IOSurface::create(size, sRGBColorSpaceRef(), IOSurface::Format::RGBA);
+
+    IOSurface::Locker lock(*ioSurface);
+    vImage_Buffer src;
+    src.width = size.width();
+    src.height = size.height();
+    src.rowBytes = CVPixelBufferGetBytesPerRow(pixelBuffer);
+    src.data = ""
+
+    vImage_Buffer dest;
+    dest.width = size.width();
+    dest.height = size.height();
+    dest.rowBytes = ioSurface->bytesPerRow();
+    dest.data = ""
+
+    vImageUnpremultiplyData_BGRA8888(&src, &dest, kvImageNoFlags);
+
+    result = CVPixelBufferUnlockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
+    ASSERT(result == kCVReturnSuccess);
+    if (result != kCVReturnSuccess) {
+        RELEASE_LOG_ERROR(Media, "transferBGRAPixelBufferToIOSurface CVPixelBufferUnlockBaseAddress() returned error code %d", result);
+        return nullptr;
+    }
+    return ioSurface;
+#else
+    RELEASE_LOG_ERROR(Media, "transferBGRAPixelBufferToIOSurface cannot convert to IOSurface");
+    return nullptr;
+#endif
+}
+
 std::unique_ptr<RemoteVideoSample> RemoteVideoSample::create(MediaSample& sample)
 {
     ASSERT(sample.platformSample().type == PlatformSample::CMSampleBufferType);
@@ -52,10 +100,21 @@
         return nullptr;
     }
 
+    std::unique_ptr<IOSurface> ioSurface;
     auto surface = CVPixelBufferGetIOSurface(imageBuffer);
     if (!surface) {
-        RELEASE_LOG_ERROR(Media, "RemoteVideoSample::create: CVPixelBufferGetIOSurface returned nullptr");
-        return nullptr;
+        // Special case for canvas data that is RGBA, not IOSurface backed.
+        auto pixelFormatType = CVPixelBufferGetPixelFormatType(imageBuffer);
+        if (pixelFormatType != kCVPixelFormatType_32BGRA) {
+            RELEASE_LOG_ERROR(Media, "RemoteVideoSample::create does not support non IOSurface backed samples that are not BGRA");
+            return nullptr;
+        }
+
+        ioSurface = transferBGRAPixelBufferToIOSurface(imageBuffer);
+        if (!ioSurface)
+            return nullptr;
+
+        surface = ioSurface->surface();
     }
 
     return std::unique_ptr<RemoteVideoSample>(new RemoteVideoSample(surface, sRGBColorSpaceRef(), sample.presentationTime(), sample.videoRotation(), sample.videoMirrored()));
@@ -93,4 +152,4 @@
 
 }
 
-#endif // ENABLE(MEDIA_STREAM)
+#endif // ENABLE(MEDIA_STREAM) && PLATFORM(COCOA)
_______________________________________________
webkit-changes mailing list
webkit-changes@lists.webkit.org
https://lists.webkit.org/mailman/listinfo/webkit-changes

Reply via email to