File: GStreamerVideoFrameLibWebRTC.cpp

package info (click to toggle)
webkit2gtk 2.42.2-1~deb12u1
  • links: PTS, VCS
  • area: main
  • in suites: bookworm
  • size: 362,452 kB
  • sloc: cpp: 2,881,971; javascript: 282,447; ansic: 134,088; python: 43,789; ruby: 18,308; perl: 15,872; asm: 14,389; xml: 4,395; yacc: 2,350; sh: 2,074; java: 1,734; lex: 1,323; makefile: 288; pascal: 60
file content (108 lines) | stat: -rw-r--r-- 4,885 bytes parent folder | download | duplicates (5)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
/*
 *  Copyright (C) 2012, 2015, 2016, 2018 Igalia S.L
 *  Copyright (C) 2015, 2016, 2018 Metrological Group B.V.
 *
 *  This library is free software; you can redistribute it and/or
 *  modify it under the terms of the GNU Lesser General Public
 *  License as published by the Free Software Foundation; either
 *  version 2 of the License, or (at your option) any later version.
 *
 *  This library is distributed in the hope that it will be useful,
 *  but WITHOUT ANY WARRANTY; without even the implied warranty of
 *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
 *  Lesser General Public License for more details.
 *
 *  You should have received a copy of the GNU Lesser General Public
 *  License along with this library; if not, write to the Free Software
 *  Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301  USA
 */
#include "config.h"

#if USE(GSTREAMER) && USE(LIBWEBRTC)
#include "GStreamerVideoFrameLibWebRTC.h"

#include <gst/video/video-format.h>
#include <gst/video/video-info.h>
#include <thread>

namespace WebCore {

GRefPtr<GstSample> convertLibWebRTCVideoFrameToGStreamerSample(const webrtc::VideoFrame& frame)
{
    RELEASE_ASSERT(frame.video_frame_buffer()->type() != webrtc::VideoFrameBuffer::Type::kNative);
    auto* i420Buffer = frame.video_frame_buffer()->ToI420().release();
    int height = i420Buffer->height();
    int strides[3] = {
        i420Buffer->StrideY(),
        i420Buffer->StrideU(),
        i420Buffer->StrideV()
    };
    size_t offsets[3] = {
        0,
        static_cast<gsize>(i420Buffer->StrideY() * height),
        static_cast<gsize>(i420Buffer->StrideY() * height + i420Buffer->StrideU() * ((height + 1) / 2))
    };
    GstVideoInfo info;
    gst_video_info_set_format(&info, GST_VIDEO_FORMAT_I420, frame.width(), frame.height());
    auto buffer = adoptGRef(gst_buffer_new_wrapped_full(static_cast<GstMemoryFlags>(GST_MEMORY_FLAG_NO_SHARE | GST_MEMORY_FLAG_READONLY),
        const_cast<gpointer>(reinterpret_cast<const void*>(i420Buffer->DataY())), info.size, 0, info.size, i420Buffer, [](gpointer buffer) {
            reinterpret_cast<webrtc::I420Buffer*>(buffer)->Release();
    }));

    gst_buffer_add_video_meta_full(buffer.get(), GST_VIDEO_FRAME_FLAG_NONE, GST_VIDEO_FORMAT_I420, frame.width(), frame.height(), 3, offsets, strides);

    auto caps = adoptGRef(gst_video_info_to_caps(&info));
    auto sample = adoptGRef(gst_sample_new(buffer.get(), caps.get(), nullptr, nullptr));
    return sample;
}

std::unique_ptr<webrtc::VideoFrame> convertGStreamerSampleToLibWebRTCVideoFrame(GRefPtr<GstSample>& sample, webrtc::VideoRotation rotation, int64_t timestamp, int64_t renderTimeMs)
{
    auto frameBuffer(GStreamerVideoFrameLibWebRTC::create(sample));
    return std::unique_ptr<webrtc::VideoFrame>(new webrtc::VideoFrame(frameBuffer, timestamp, renderTimeMs, rotation));
}

rtc::scoped_refptr<webrtc::VideoFrameBuffer> GStreamerVideoFrameLibWebRTC::create(const GRefPtr<GstSample>& sample)
{
    GstVideoInfo info;

    if (!gst_video_info_from_caps(&info, gst_sample_get_caps(sample.get())))
        ASSERT_NOT_REACHED();

    return rtc::scoped_refptr<webrtc::VideoFrameBuffer>(new GStreamerVideoFrameLibWebRTC(sample, info));
}

rtc::scoped_refptr<webrtc::I420BufferInterface> GStreamerVideoFrameLibWebRTC::ToI420()
{
    GstMappedFrame inFrame(m_sample, GST_MAP_READ);
    if (!inFrame) {
        GST_WARNING("Could not map frame");
        return nullptr;
    }

    if (inFrame.format() != GST_VIDEO_FORMAT_I420) {
        GstVideoInfo outInfo;
        gst_video_info_set_format(&outInfo, GST_VIDEO_FORMAT_I420, inFrame.width(), inFrame.height());

        auto info = inFrame.info();
        outInfo.fps_n = info->fps_n;
        outInfo.fps_d = info->fps_d;

        auto buffer = adoptGRef(gst_buffer_new_allocate(nullptr, GST_VIDEO_INFO_SIZE(&outInfo), nullptr));
        GstMappedFrame outFrame(buffer.get(), outInfo, GST_MAP_WRITE);
        GUniquePtr<GstVideoConverter> videoConverter(gst_video_converter_new(inFrame.info(), &outInfo, gst_structure_new("GstVideoConvertConfig",
            GST_VIDEO_CONVERTER_OPT_THREADS, G_TYPE_UINT, std::max(std::thread::hardware_concurrency(), 1u), nullptr)));

        ASSERT(videoConverter);
        gst_video_converter_frame(videoConverter.get(), inFrame.get(), outFrame.get());
        return webrtc::I420Buffer::Copy(outFrame.width(), outFrame.height(), outFrame.ComponentData(0), outFrame.ComponentStride(0),
            outFrame.ComponentData(1), outFrame.ComponentStride(1), outFrame.ComponentData(2), outFrame.ComponentStride(2));
    }

    return webrtc::I420Buffer::Copy(inFrame.width(), inFrame.height(), inFrame.ComponentData(0), inFrame.ComponentStride(0),
        inFrame.ComponentData(1), inFrame.ComponentStride(1), inFrame.ComponentData(2), inFrame.ComponentStride(2));
}

}

#endif // USE(LIBWEBRTC)