Diff
Modified: trunk/LayoutTests/ChangeLog (273950 => 273951)
--- trunk/LayoutTests/ChangeLog 2021-03-05 07:05:47 UTC (rev 273950)
+++ trunk/LayoutTests/ChangeLog 2021-03-05 09:38:00 UTC (rev 273951)
@@ -1,3 +1,13 @@
+2021-03-05 Philippe Normand <pnorm...@igalia.com>
+
+ REGRESSION(r273309) [GStreamer] webrtc/captureCanvas-webrtc-software-h264-baseline.html is flaky crashing inside libwebrtc
+ https://bugs.webkit.org/show_bug.cgi?id=222451
+
+ Reviewed by Xabier Rodriguez-Calvar.
+
+ * platform/glib/TestExpectations: Update expectations for webrtc h264 tests that are
+ slightly less broken now.
+
2021-03-04 Peng Liu <peng.l...@apple.com>
[GPUP] Some tests in imported/w3c/web-platform-tests/html/semantics/embedded-content/media-elements fail when media in GPU Process is enabled
Modified: trunk/LayoutTests/platform/glib/TestExpectations (273950 => 273951)
--- trunk/LayoutTests/platform/glib/TestExpectations 2021-03-05 07:05:47 UTC (rev 273950)
+++ trunk/LayoutTests/platform/glib/TestExpectations 2021-03-05 09:38:00 UTC (rev 273951)
@@ -1015,9 +1015,7 @@
webkit.org/b/194611 http/wpt/webrtc/getUserMedia-processSwapping.html [ Failure ]
-# Uncomment after 222451 is fixed
-# webkit.org/b/201267 [ Release ] webrtc/video-h264.html [ Failure ]
-webkit.org/b/222451 [ Release ] webrtc/video-h264.html [ Timeout ]
+webkit.org/b/201267 [ Release ] webrtc/video-h264.html [ Timeout Failure ]
webkit.org/b/222585 [ Debug ] webrtc/video-h264.html [ Crash ]
webkit.org/b/210272 webrtc/datachannel/multiple-connections.html [ Timeout Pass ]
@@ -1039,7 +1037,7 @@
webkit.org/b/209163 webrtc/audio-video-element-playing.html [ Crash Failure Pass ]
-webkit.org/b/216538 webkit.org/b/222451 webrtc/captureCanvas-webrtc-software-h264-baseline.html [ Failure Crash ]
+webkit.org/b/216538 webrtc/captureCanvas-webrtc-software-h264-baseline.html [ Slow Failure ]
webkit.org/b/216763 webrtc/captureCanvas-webrtc-software-h264-high.html [ Crash Failure ]
Modified: trunk/Source/WebCore/ChangeLog (273950 => 273951)
--- trunk/Source/WebCore/ChangeLog 2021-03-05 07:05:47 UTC (rev 273950)
+++ trunk/Source/WebCore/ChangeLog 2021-03-05 09:38:00 UTC (rev 273951)
@@ -1,3 +1,39 @@
+2021-03-05 Philippe Normand <pnorm...@igalia.com>
+
+ REGRESSION(r273309) [GStreamer] webrtc/captureCanvas-webrtc-software-h264-baseline.html is flaky crashing inside libwebrtc
+ https://bugs.webkit.org/show_bug.cgi?id=222451
+
+ Reviewed by Xabier Rodriguez-Calvar.
+
+ The main issue here was related with bad memory management in the VideoFrameLibWebRTC
+ implementation, the video converter was writing to a buffer allocated by the libwebrtc
+ aligned-malloc allocator and that was triggering various issues. There's little benefit of
+ using a buffer pool anyway since we end-up doing buffer copies.
+
+ The patch also includes a bunch of coding style fixes, a few smart-pointer improvements, and
+ improved error handling in the decoder factory, which was previously handling warnings only.
+
+ * platform/mediastream/gstreamer/GStreamerVideoFrameLibWebRTC.cpp:
+ (WebCore::GStreamerSampleFromLibWebRTCVideoFrame):
+ (WebCore::GStreamerVideoFrameLibWebRTC::create):
+ (WebCore::LibWebRTCVideoFrameFromGStreamerSample):
+ (WebCore::GStreamerVideoFrameLibWebRTC::ToI420):
+ * platform/mediastream/gstreamer/GStreamerVideoFrameLibWebRTC.h:
+ (WebCore::GStreamerVideoFrameLibWebRTC::GStreamerVideoFrameLibWebRTC):
+ (WebCore::GStreamerVideoFrameLibWebRTC::takeSample):
+ * platform/mediastream/gstreamer/RealtimeIncomingVideoSourceLibWebRTC.cpp:
+ (WebCore::RealtimeIncomingVideoSourceLibWebRTC::OnFrame):
+ * platform/mediastream/gstreamer/RealtimeOutgoingVideoSourceLibWebRTC.cpp:
+ (WebCore::RealtimeOutgoingVideoSourceLibWebRTC::videoSampleAvailable):
+ * platform/mediastream/libwebrtc/GStreamerVideoDecoderFactory.cpp:
+ (WebCore::GStreamerVideoDecoder::handleError):
+ (WebCore::GStreamerVideoDecoder::pullSample):
+ * platform/mediastream/libwebrtc/GStreamerVideoEncoderFactory.cpp:
+ (WebCore::GStreamerEncodedImageBuffer::create):
+ (WebCore::GStreamerEncodedImageBuffer::getBuffer const):
+ (WebCore::GStreamerEncodedImageBuffer::getVideoResolution const):
+ (WebCore::GStreamerEncodedImageBuffer::GStreamerEncodedImageBuffer):
+
2021-03-04 Aditya Keerthi <akeer...@apple.com>
[iOS] Add legacy prefix to the form controls UA stylesheet
Modified: trunk/Source/WebCore/platform/mediastream/gstreamer/GStreamerVideoFrameLibWebRTC.cpp (273950 => 273951)
--- trunk/Source/WebCore/platform/mediastream/gstreamer/GStreamerVideoFrameLibWebRTC.cpp 2021-03-05 07:05:47 UTC (rev 273950)
+++ trunk/Source/WebCore/platform/mediastream/gstreamer/GStreamerVideoFrameLibWebRTC.cpp 2021-03-05 09:38:00 UTC (rev 273951)
@@ -21,40 +21,39 @@
#if USE(GSTREAMER) && USE(LIBWEBRTC)
#include "GStreamerVideoFrameLibWebRTC.h"
+#include <gst/video/video-format.h>
+#include <gst/video/video-info.h>
#include <thread>
namespace WebCore {
-const GRefPtr<GstSample> GStreamerSampleFromLibWebRTCVideoFrame(const webrtc::VideoFrame& frame)
+WARN_UNUSED_RETURN GRefPtr<GstSample> GStreamerSampleFromLibWebRTCVideoFrame(const webrtc::VideoFrame& frame)
{
if (frame.video_frame_buffer()->type() == webrtc::VideoFrameBuffer::Type::kNative) {
- auto framebuffer = static_cast<GStreamerVideoFrameLibWebRTC*>(frame.video_frame_buffer().get());
- auto gstsample = framebuffer->getSample();
-
- GST_LOG("Reusing native GStreamer sample: %p", gstsample.get());
-
- return gstsample;
+ auto* framebuffer = static_cast<GStreamerVideoFrameLibWebRTC*>(frame.video_frame_buffer().get());
+ return framebuffer->takeSample();
}
- auto webrtcbuffer = frame.video_frame_buffer().get()->ToI420();
- // FIXME - Check lifetime of those buffers.
- const uint8_t* comps[3] = {
- webrtcbuffer->DataY(),
- webrtcbuffer->DataU(),
- webrtcbuffer->DataV()
+ auto* i420Buffer = frame.video_frame_buffer()->ToI420().release();
+ int height = i420Buffer->height();
+ int strides[3] = {
+ i420Buffer->StrideY(),
+ i420Buffer->StrideU(),
+ i420Buffer->StrideV()
};
-
+ size_t offsets[3] = {
+ 0,
+ static_cast<gsize>(i420Buffer->StrideY() * height),
+ static_cast<gsize>(i420Buffer->StrideY() * height + i420Buffer->StrideU() * ((height + 1) / 2))
+ };
GstVideoInfo info;
gst_video_info_set_format(&info, GST_VIDEO_FORMAT_I420, frame.width(), frame.height());
- auto buffer = adoptGRef(gst_buffer_new());
- for (gint i = 0; i < 3; i++) {
- gsize compsize = GST_VIDEO_INFO_COMP_STRIDE(&info, i) * GST_VIDEO_INFO_COMP_HEIGHT(&info, i);
+ auto buffer = adoptGRef(gst_buffer_new_wrapped_full(static_cast<GstMemoryFlags>(GST_MEMORY_FLAG_NO_SHARE | GST_MEMORY_FLAG_READONLY),
+ const_cast<gpointer>(reinterpret_cast<const void*>(i420Buffer->DataY())), info.size, 0, info.size, i420Buffer, [](gpointer buffer) {
+ reinterpret_cast<webrtc::I420Buffer*>(buffer)->Release();
+ }));
- GstMemory* comp = gst_memory_new_wrapped(
- static_cast<GstMemoryFlags>(GST_MEMORY_FLAG_PHYSICALLY_CONTIGUOUS | GST_MEMORY_FLAG_READONLY),
- const_cast<gpointer>(reinterpret_cast<const void*>(comps[i])), compsize, 0, compsize, webrtcbuffer, nullptr);
- gst_buffer_append_memory(buffer.get(), comp);
- }
+ gst_buffer_add_video_meta_full(buffer.get(), GST_VIDEO_FRAME_FLAG_NONE, GST_VIDEO_FORMAT_I420, frame.width(), frame.height(), 3, offsets, strides);
auto caps = adoptGRef(gst_video_info_to_caps(&info));
auto sample = adoptGRef(gst_sample_new(buffer.get(), caps.get(), nullptr, nullptr));
@@ -61,88 +60,54 @@
return sample;
}
-rtc::scoped_refptr<webrtc::VideoFrameBuffer> GStreamerVideoFrameLibWebRTC::create(GstSample * sample)
+rtc::scoped_refptr<webrtc::VideoFrameBuffer> GStreamerVideoFrameLibWebRTC::create(GRefPtr<GstSample>&& sample)
{
GstVideoInfo info;
- if (!gst_video_info_from_caps(&info, gst_sample_get_caps(sample)))
+ if (!gst_video_info_from_caps(&info, gst_sample_get_caps(sample.get())))
ASSERT_NOT_REACHED();
- return rtc::scoped_refptr<webrtc::VideoFrameBuffer>(new GStreamerVideoFrameLibWebRTC(sample, info));
+ return rtc::scoped_refptr<webrtc::VideoFrameBuffer>(new GStreamerVideoFrameLibWebRTC(WTFMove(sample), info));
}
-std::unique_ptr<webrtc::VideoFrame> LibWebRTCVideoFrameFromGStreamerSample(GstSample* sample, webrtc::VideoRotation rotation,
+std::unique_ptr<webrtc::VideoFrame> LibWebRTCVideoFrameFromGStreamerSample(GRefPtr<GstSample>&& sample, webrtc::VideoRotation rotation,
int64_t timestamp, int64_t renderTimeMs)
{
- auto frameBuffer(GStreamerVideoFrameLibWebRTC::create(sample));
-
- return std::unique_ptr<webrtc::VideoFrame>(
- new webrtc::VideoFrame(frameBuffer, timestamp, renderTimeMs, rotation));
+ auto frameBuffer(GStreamerVideoFrameLibWebRTC::create(WTFMove(sample)));
+ return std::unique_ptr<webrtc::VideoFrame>(new webrtc::VideoFrame(frameBuffer, timestamp, renderTimeMs, rotation));
}
-webrtc::VideoFrameBuffer::Type GStreamerVideoFrameLibWebRTC::type() const
-{
- return Type::kNative;
-}
-
-GRefPtr<GstSample> GStreamerVideoFrameLibWebRTC::getSample()
-{
- return m_sample.get();
-}
-
rtc::scoped_refptr<webrtc::I420BufferInterface> GStreamerVideoFrameLibWebRTC::ToI420()
{
GstMappedFrame inFrame(m_sample, GST_MAP_READ);
-
if (!inFrame) {
GST_WARNING("Could not map frame");
-
return nullptr;
}
- auto newBuffer = m_bufferPool.CreateI420Buffer(inFrame.width(), inFrame.height());
- ASSERT(newBuffer);
- if (!newBuffer) {
- GST_WARNING("RealtimeOutgoingVideoSourceGStreamer::videoSampleAvailable unable to allocate buffer for conversion to YUV");
- return nullptr;
- }
-
if (inFrame.format() != GST_VIDEO_FORMAT_I420) {
GstVideoInfo outInfo;
+ gst_video_info_set_format(&outInfo, GST_VIDEO_FORMAT_I420, inFrame.width(), inFrame.height());
- gst_video_info_set_format(&outInfo, GST_VIDEO_FORMAT_I420, inFrame.width(),
- inFrame.height());
auto info = inFrame.info();
outInfo.fps_n = info->fps_n;
outInfo.fps_d = info->fps_d;
- GRefPtr<GstBuffer> buffer = adoptGRef(gst_buffer_new_wrapped_full(GST_MEMORY_FLAG_NO_SHARE, newBuffer->MutableDataY(),
- outInfo.size, 0, outInfo.size, nullptr, nullptr));
-
+ auto buffer = adoptGRef(gst_buffer_new_allocate(nullptr, GST_VIDEO_INFO_SIZE(&outInfo), nullptr));
GstMappedFrame outFrame(buffer.get(), outInfo, GST_MAP_WRITE);
-
- GUniquePtr<GstVideoConverter> videoConverter(gst_video_converter_new(inFrame.info(),
- &outInfo, gst_structure_new("GstVideoConvertConfig",
+ GUniquePtr<GstVideoConverter> videoConverter(gst_video_converter_new(inFrame.info(), &outInfo, gst_structure_new("GstVideoConvertConfig",
GST_VIDEO_CONVERTER_OPT_THREADS, G_TYPE_UINT, std::max(std::thread::hardware_concurrency(), 1u), nullptr)));
ASSERT(videoConverter);
-
gst_video_converter_frame(videoConverter.get(), inFrame.get(), outFrame.get());
-
- return newBuffer;
+ return webrtc::I420Buffer::Copy(outFrame.width(), outFrame.height(), outFrame.ComponentData(0), outFrame.ComponentStride(0),
+ outFrame.ComponentData(1), outFrame.ComponentStride(1), outFrame.ComponentData(2), outFrame.ComponentStride(2));
}
- newBuffer->Copy(
- inFrame.width(),
- inFrame.height(),
- inFrame.ComponentData(0),
- inFrame.ComponentStride(0),
- inFrame.ComponentData(1),
- inFrame.ComponentStride(1),
- inFrame.ComponentData(2),
- inFrame.ComponentStride(2));
+ return webrtc::I420Buffer::Copy(inFrame.width(), inFrame.height(), inFrame.ComponentData(0), inFrame.ComponentStride(0),
+ inFrame.ComponentData(1), inFrame.ComponentStride(1), inFrame.ComponentData(2), inFrame.ComponentStride(2));
+}
- return newBuffer;
}
-}
+
#endif // USE(LIBWEBRTC)
Modified: trunk/Source/WebCore/platform/mediastream/gstreamer/GStreamerVideoFrameLibWebRTC.h (273950 => 273951)
--- trunk/Source/WebCore/platform/mediastream/gstreamer/GStreamerVideoFrameLibWebRTC.h 2021-03-05 07:05:47 UTC (rev 273950)
+++ trunk/Source/WebCore/platform/mediastream/gstreamer/GStreamerVideoFrameLibWebRTC.h 2021-03-05 09:38:00 UTC (rev 273951)
@@ -21,39 +21,39 @@
#pragma once
#if USE(GSTREAMER) && USE(LIBWEBRTC)
+
#include "GStreamerCommon.h"
#include "LibWebRTCMacros.h"
#include "webrtc/api/video/i420_buffer.h"
#include "webrtc/api/video/video_frame.h"
-#include "webrtc/common_video/include/video_frame_buffer_pool.h"
#include "webrtc/common_video/include/video_frame_buffer.h"
#include "webrtc/rtc_base/ref_counted_object.h"
namespace WebCore {
-const GRefPtr<GstSample> GStreamerSampleFromLibWebRTCVideoFrame(const webrtc::VideoFrame&);
-std::unique_ptr<webrtc::VideoFrame> LibWebRTCVideoFrameFromGStreamerSample(GstSample*, webrtc::VideoRotation, int64_t timestamp, int64_t renderTimeMs);
+GRefPtr<GstSample> GStreamerSampleFromLibWebRTCVideoFrame(const webrtc::VideoFrame&);
+std::unique_ptr<webrtc::VideoFrame> LibWebRTCVideoFrameFromGStreamerSample(GRefPtr<GstSample>&&, webrtc::VideoRotation, int64_t timestamp, int64_t renderTimeMs);
+
class GStreamerVideoFrameLibWebRTC : public rtc::RefCountedObject<webrtc::VideoFrameBuffer> {
public:
- GStreamerVideoFrameLibWebRTC(GstSample* sample, GstVideoInfo info)
- : m_sample(adoptGRef(sample))
+ GStreamerVideoFrameLibWebRTC(GRefPtr<GstSample>&& sample, GstVideoInfo info)
+ : m_sample(WTFMove(sample))
, m_info(info) { }
- static rtc::scoped_refptr<webrtc::VideoFrameBuffer> create(GstSample*);
+ static rtc::scoped_refptr<webrtc::VideoFrameBuffer> create(GRefPtr<GstSample>&&);
- GRefPtr<GstSample> getSample();
+ GRefPtr<GstSample>&& takeSample() { return WTFMove(m_sample); }
rtc::scoped_refptr<webrtc::I420BufferInterface> ToI420() final;
- int width() const override { return GST_VIDEO_INFO_WIDTH(&m_info); }
- int height() const override { return GST_VIDEO_INFO_HEIGHT(&m_info); }
+ int width() const final { return GST_VIDEO_INFO_WIDTH(&m_info); }
+ int height() const final { return GST_VIDEO_INFO_HEIGHT(&m_info); }
private:
- webrtc::VideoFrameBuffer::Type type() const override;
+ webrtc::VideoFrameBuffer::Type type() const final { return Type::kNative; }
GRefPtr<GstSample> m_sample;
GstVideoInfo m_info;
- webrtc::VideoFrameBufferPool m_bufferPool;
};
}
Modified: trunk/Source/WebCore/platform/mediastream/gstreamer/RealtimeIncomingVideoSourceLibWebRTC.cpp (273950 => 273951)
--- trunk/Source/WebCore/platform/mediastream/gstreamer/RealtimeIncomingVideoSourceLibWebRTC.cpp 2021-03-05 07:05:47 UTC (rev 273950)
+++ trunk/Source/WebCore/platform/mediastream/gstreamer/RealtimeIncomingVideoSourceLibWebRTC.cpp 2021-03-05 09:38:00 UTC (rev 273951)
@@ -33,7 +33,6 @@
#include "GStreamerVideoFrameLibWebRTC.h"
#include "MediaSampleGStreamer.h"
-#include <gst/video/video.h>
namespace WebCore {
@@ -59,10 +58,10 @@
if (!isProducingData())
return;
- auto sample = GStreamerSampleFromLibWebRTCVideoFrame(frame);
- callOnMainThread([protectedThis = makeRef(*this), sample] {
- protectedThis->videoSampleAvailable(MediaSampleGStreamer::create(sample.get(),
- WebCore::FloatSize(), String()));
+ callOnMainThread([protectedThis = makeRef(*this), frame] {
+ auto gstSample = GStreamerSampleFromLibWebRTCVideoFrame(frame);
+ auto sample = MediaSampleGStreamer::create(WTFMove(gstSample), { }, { });
+ protectedThis->videoSampleAvailable(sample.get());
});
}
@@ -69,4 +68,3 @@
} // namespace WebCore
#endif // USE(LIBWEBRTC)
-
Modified: trunk/Source/WebCore/platform/mediastream/gstreamer/RealtimeOutgoingVideoSourceLibWebRTC.cpp (273950 => 273951)
--- trunk/Source/WebCore/platform/mediastream/gstreamer/RealtimeOutgoingVideoSourceLibWebRTC.cpp 2021-03-05 07:05:47 UTC (rev 273950)
+++ trunk/Source/WebCore/platform/mediastream/gstreamer/RealtimeOutgoingVideoSourceLibWebRTC.cpp 2021-03-05 09:38:00 UTC (rev 273951)
@@ -69,7 +69,7 @@
ASSERT(sample.platformSample().type == PlatformSample::GStreamerSampleType);
auto& mediaSample = static_cast<MediaSampleGStreamer&>(sample);
- auto frameBuffer(GStreamerVideoFrameLibWebRTC::create(gst_sample_ref(mediaSample.platformSample().sample.gstSample)));
+ auto frameBuffer = GStreamerVideoFrameLibWebRTC::create(gst_sample_ref(mediaSample.platformSample().sample.gstSample));
sendFrame(WTFMove(frameBuffer));
}
Modified: trunk/Source/WebCore/platform/mediastream/libwebrtc/GStreamerVideoDecoderFactory.cpp (273950 => 273951)
--- trunk/Source/WebCore/platform/mediastream/libwebrtc/GStreamerVideoDecoderFactory.cpp 2021-03-05 07:05:47 UTC (rev 273950)
+++ trunk/Source/WebCore/platform/mediastream/libwebrtc/GStreamerVideoDecoderFactory.cpp 2021-03-05 09:38:00 UTC (rev 273951)
@@ -61,9 +61,7 @@
{
}
- static void decodebinPadAddedCb(GstElement*,
- GstPad* srcpad,
- GstPad* sinkpad)
+ static void decodebinPadAddedCb(GstElement*, GstPad* srcpad, GstPad* sinkpad)
{
GST_INFO_OBJECT(srcpad, "connecting pad with %" GST_PTR_FORMAT, sinkpad);
if (gst_pad_link(srcpad, sinkpad) != GST_PAD_LINK_OK)
@@ -82,6 +80,14 @@
return gst_element_factory_make(factoryName, name.get());
}
+ void handleError(GError* error)
+ {
+ if (!g_error_matches(error, GST_STREAM_ERROR, GST_STREAM_ERROR_DECODE))
+ return;
+ GST_INFO_OBJECT(pipeline(), "--> needs keyframe (%s)", error->message);
+ m_needsKeyframe = true;
+ }
+
int32_t InitDecode(const webrtc::VideoCodec* codecSettings, int32_t) override
{
m_src = makeElement("appsrc");
@@ -105,33 +111,19 @@
// happening in the main pipeline.
if (m_requireParse) {
caps = gst_caps_new_simple(Caps(), "parsed", G_TYPE_BOOLEAN, TRUE, nullptr);
- GRefPtr<GstBus> bus = adoptGRef(gst_pipeline_get_bus(GST_PIPELINE(m_pipeline.get())));
+ auto bus = adoptGRef(gst_pipeline_get_bus(GST_PIPELINE(m_pipeline.get())));
gst_bus_enable_sync_message_emission(bus.get());
- g_signal_connect(bus.get(), "sync-message::warning",
- G_CALLBACK(+[](GstBus*, GstMessage* message, GStreamerVideoDecoder* justThis) {
- GUniqueOutPtr<GError> err;
-
- switch (GST_MESSAGE_TYPE(message)) {
- case GST_MESSAGE_WARNING: {
- gst_message_parse_warning(message, &err.outPtr(), nullptr);
- FALLTHROUGH;
- }
- case GST_MESSAGE_ERROR: {
- if (!err)
- gst_message_parse_error(message, &err.outPtr(), nullptr);
-
- if (g_error_matches(err.get(), GST_STREAM_ERROR, GST_STREAM_ERROR_DECODE)) {
- GST_INFO_OBJECT(justThis->pipeline(), "--> needs keyframe (%s)",
- err->message);
- justThis->m_needsKeyframe = true;
- }
- break;
- }
- default:
- break;
- }
- }), this);
+ g_signal_connect_swapped(bus.get(), "sync-message::warning", G_CALLBACK(+[](GStreamerVideoDecoder* decoder, GstMessage* message) {
+ GUniqueOutPtr<GError> error;
+ gst_message_parse_warning(message, &error.outPtr(), nullptr);
+ decoder->handleError(error.get());
+ }), this);
+ g_signal_connect_swapped(bus.get(), "sync-message::error", G_CALLBACK(+[](GStreamerVideoDecoder* decoder, GstMessage* message) {
+ GUniqueOutPtr<GError> error;
+ gst_message_parse_error(message, &error.outPtr(), nullptr);
+ decoder->handleError(error.get());
+ }), this);
} else {
/* FIXME - How could we handle missing keyframes case we do not plug parsers ? */
caps = gst_caps_new_empty_simple(Caps());
@@ -177,10 +169,10 @@
int32_t Release() final
{
- if (m_pipeline.get()) {
- GRefPtr<GstBus> bus = adoptGRef(gst_pipeline_get_bus(GST_PIPELINE(m_pipeline.get())));
+ if (m_pipeline) {
+ disconnectSimpleBusMessageCallback(m_pipeline.get());
+ auto bus = adoptGRef(gst_pipeline_get_bus(GST_PIPELINE(m_pipeline.get())));
gst_bus_set_sync_handler(bus.get(), nullptr, nullptr, nullptr);
-
gst_element_set_state(m_pipeline.get(), GST_STATE_NULL);
m_src = nullptr;
m_sink = nullptr;
@@ -244,12 +236,12 @@
int32_t pullSample()
{
- auto sample = gst_app_sink_try_pull_sample(GST_APP_SINK(m_sink), GST_SECOND / 30);
+ auto sample = adoptGRef(gst_app_sink_try_pull_sample(GST_APP_SINK(m_sink), GST_SECOND / 30));
if (!sample) {
GST_ERROR("Needs more data");
return WEBRTC_VIDEO_CODEC_OK;
}
- auto buffer = gst_sample_get_buffer(sample);
+ auto buffer = gst_sample_get_buffer(sample.get());
// Make sure that the frame.timestamp == previsouly input_frame._timeStamp
// as it is required by the VideoDecoder baseclass.
@@ -256,7 +248,7 @@
auto timestamps = m_dtsPtsMap[GST_BUFFER_PTS(buffer)];
m_dtsPtsMap.erase(GST_BUFFER_PTS(buffer));
- auto frame(LibWebRTCVideoFrameFromGStreamerSample(sample, webrtc::kVideoRotation_0,
+ auto frame(LibWebRTCVideoFrameFromGStreamerSample(WTFMove(sample), webrtc::kVideoRotation_0,
timestamps.timestamp, timestamps.renderTimeMs));
GST_BUFFER_DTS(buffer) = GST_CLOCK_TIME_NONE;
Modified: trunk/Source/WebCore/platform/mediastream/libwebrtc/GStreamerVideoEncoderFactory.cpp (273950 => 273951)
--- trunk/Source/WebCore/platform/mediastream/libwebrtc/GStreamerVideoEncoderFactory.cpp 2021-03-05 07:05:47 UTC (rev 273950)
+++ trunk/Source/WebCore/platform/mediastream/libwebrtc/GStreamerVideoEncoderFactory.cpp 2021-03-05 09:38:00 UTC (rev 273951)
@@ -35,9 +35,11 @@
#include <gst/app/gstappsink.h>
#include <gst/app/gstappsrc.h>
-#define GST_USE_UNSTABLE_API 1
+
+#define GST_USE_UNSTABLE_API
#include <gst/codecparsers/gsth264parser.h>
#undef GST_USE_UNSTABLE_API
+
#include <gst/pbutils/encoding-profile.h>
#include <gst/video/video.h>
#include <wtf/Atomics.h>
@@ -46,11 +48,6 @@
#include <wtf/StdMap.h>
#include <wtf/text/StringConcatenateNumbers.h>
-// Required for unified builds
-#ifdef GST_CAT_DEFAULT
-#undef GST_CAT_DEFAULT
-#endif
-
GST_DEBUG_CATEGORY(webkit_webrtcenc_debug);
#define GST_CAT_DEFAULT webkit_webrtcenc_debug
@@ -60,40 +57,28 @@
WTF_MAKE_FAST_ALLOCATED;
public:
- static rtc::scoped_refptr<GStreamerEncodedImageBuffer> create(GRefPtr<GstBuffer>&& buffer)
+ static rtc::scoped_refptr<GStreamerEncodedImageBuffer> create(GRefPtr<GstSample>&& sample)
{
- return new rtc::RefCountedObject<GStreamerEncodedImageBuffer>(GstMappedOwnedBuffer::create(buffer));
+ return new rtc::RefCountedObject<GStreamerEncodedImageBuffer>(WTFMove(sample));
}
- static rtc::scoped_refptr<GStreamerEncodedImageBuffer> create(GstBuffer* buffer)
- {
- return new rtc::RefCountedObject<GStreamerEncodedImageBuffer>(GstMappedOwnedBuffer::create(buffer));
- }
+ const uint8_t* data() const final { return m_mappedBuffer->data(); }
+ uint8_t* data() final { return m_mappedBuffer->data(); }
+ size_t size() const final { return m_mappedBuffer->size(); }
- virtual const uint8_t* data() const final
- {
- return m_mappedBuffer->data();
- }
+ const GstBuffer* getBuffer() const { return gst_sample_get_buffer(m_sample.get()); }
+ Optional<FloatSize> getVideoResolution() const { return getVideoResolutionFromCaps(gst_sample_get_caps(m_sample.get())); }
- virtual uint8_t* data() final
- {
- return m_mappedBuffer->data();
- }
-
- virtual size_t size() const final
- {
- return m_mappedBuffer->size();
- }
-
-
protected:
GStreamerEncodedImageBuffer() = default;
~GStreamerEncodedImageBuffer() = default;
- GStreamerEncodedImageBuffer(RefPtr<GstMappedOwnedBuffer> mappedBuffer)
- : m_mappedBuffer(mappedBuffer)
+ GStreamerEncodedImageBuffer(GRefPtr<GstSample>&& sample)
+ : m_sample(sample)
{
+ m_mappedBuffer = GstMappedOwnedBuffer::create(gst_sample_get_buffer(m_sample.get()));
}
+ GRefPtr<GstSample> m_sample;
RefPtr<GstMappedOwnedBuffer> m_mappedBuffer;
};
@@ -162,6 +147,7 @@
g_object_set(m_src, "is-live", true, "format", GST_FORMAT_TIME, nullptr);
auto videoconvert = makeElement("videoconvert");
+ auto videoscale = makeElement("videoscale");
m_sink = makeElement("appsink");
g_object_set(m_sink, "sync", FALSE, nullptr);
@@ -169,8 +155,8 @@
if (m_restrictionCaps)
g_object_set(m_capsFilter, "caps", m_restrictionCaps.get(), nullptr);
- gst_bin_add_many(GST_BIN(m_pipeline.get()), m_src, videoconvert, m_capsFilter, encoder.leakRef(), m_sink, nullptr);
- if (!gst_element_link_many(m_src, videoconvert, m_capsFilter, m_encoder, m_sink, nullptr)) {
+ gst_bin_add_many(GST_BIN(m_pipeline.get()), m_src, videoconvert, videoscale, m_capsFilter, encoder.leakRef(), m_sink, nullptr);
+ if (!gst_element_link_many(m_src, videoconvert, videoscale, m_capsFilter, m_encoder, m_sink, nullptr)) {
GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(m_pipeline.get()), GST_DEBUG_GRAPH_SHOW_VERBOSE, "webkit-webrtc-encoder.error");
ASSERT_NOT_REACHED();
@@ -192,9 +178,7 @@
{
m_encodedFrame.ClearEncodedData();
if (m_pipeline) {
- GRefPtr<GstBus> bus = adoptGRef(gst_pipeline_get_bus(GST_PIPELINE(m_pipeline.get())));
- gst_bus_set_sync_handler(bus.get(), nullptr, nullptr, nullptr);
-
+ disconnectSimpleBusMessageCallback(m_pipeline.get());
gst_element_set_state(m_pipeline.get(), GST_STATE_NULL);
m_src = nullptr;
m_encoder = nullptr;
@@ -279,18 +263,16 @@
return WEBRTC_VIDEO_CODEC_ERROR;
}
- auto encodedBuffer = gst_sample_get_buffer(encodedSample.get());
- auto encodedCaps = gst_sample_get_caps(encodedSample.get());
-
- m_encodedFrame.SetEncodedData(GStreamerEncodedImageBuffer::create(encodedBuffer));
+ auto encodedData = GStreamerEncodedImageBuffer::create(WTFMove(encodedSample));
+ const auto* encodedBuffer = encodedData->getBuffer();
+ auto resolution = encodedData->getVideoResolution();
+ m_encodedFrame.SetEncodedData(encodedData);
if (!m_encodedFrame.size())
return WEBRTC_VIDEO_CODEC_OK;
- gst_structure_get(gst_caps_get_structure(encodedCaps, 0),
- "width", G_TYPE_INT, &m_encodedFrame._encodedWidth,
- "height", G_TYPE_INT, &m_encodedFrame._encodedHeight,
- nullptr);
-
+ ASSERT(resolution);
+ m_encodedFrame._encodedWidth = resolution->width();
+ m_encodedFrame._encodedHeight = resolution->height();
m_encodedFrame._frameType = GST_BUFFER_FLAG_IS_SET(encodedBuffer, GST_BUFFER_FLAG_DELTA_UNIT) ? webrtc::VideoFrameType::kVideoFrameDelta : webrtc::VideoFrameType::kVideoFrameKey;
m_encodedFrame._completeFrame = true;
m_encodedFrame.capture_time_ms_ = frame.render_time_ms();
@@ -344,7 +326,7 @@
}
virtual webrtc::VideoCodecType CodecType() = 0;
- virtual void PopulateCodecSpecific(webrtc::CodecSpecificInfo*, GstBuffer*) = 0;
+ virtual void PopulateCodecSpecific(webrtc::CodecSpecificInfo*, const GstBuffer*) = 0;
virtual const gchar* Name() = 0;
virtual int KeyframeInterval(const webrtc::VideoCodec* codecSettings) = 0;
@@ -400,7 +382,7 @@
GstH264NalParser* m_parser;
webrtc::VideoCodecType CodecType() final { return webrtc::kVideoCodecH264; }
- void PopulateCodecSpecific(webrtc::CodecSpecificInfo* codecSpecificInfos, GstBuffer*) final
+ void PopulateCodecSpecific(webrtc::CodecSpecificInfo* codecSpecificInfos, const GstBuffer*) final
{
codecSpecificInfos->codecType = CodecType();
webrtc::CodecSpecificInfoH264* h264Info = &(codecSpecificInfos->codecSpecific.H264);
@@ -423,7 +405,7 @@
return codecSettings->VP8().keyFrameInterval;
}
- void PopulateCodecSpecific(webrtc::CodecSpecificInfo* codecSpecificInfos, GstBuffer* buffer) final
+ void PopulateCodecSpecific(webrtc::CodecSpecificInfo* codecSpecificInfos, const GstBuffer* buffer) final
{
codecSpecificInfos->codecType = webrtc::kVideoCodecVP8;
webrtc::CodecSpecificInfoVP8* vp8Info = &(codecSpecificInfos->codecSpecific.VP8);