File: ImageGStreamerCairo.cpp

package info (click to toggle)
webkit2gtk 2.48.5-1
  • links: PTS, VCS
  • area: main
  • in suites: forky, sid
  • size: 429,764 kB
  • sloc: cpp: 3,697,587; javascript: 194,444; ansic: 169,997; python: 46,499; asm: 19,295; ruby: 18,528; perl: 16,602; xml: 4,650; yacc: 2,360; sh: 2,098; java: 1,993; lex: 1,327; pascal: 366; makefile: 298
file content (182 lines) | stat: -rw-r--r-- 7,479 bytes parent folder | download | duplicates (8)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
/*
 * Copyright (C) 2010 Igalia S.L
 *
 * This library is free software; you can redistribute it and/or
 * modify it under the terms of the GNU Library General Public
 * License as published by the Free Software Foundation; either
 * version 2 of the License, or (at your option) any later version.
 *
 * This library is distributed in the hope that it will be useful,
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
 * Library General Public License for more details.
 *
 * You should have received a copy of the GNU Library General Public License
 * along with this library; see the file COPYING.LIB.  If not, write to
 * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
 * Boston, MA 02110-1301, USA.
 */

#include "config.h"
#include "ImageGStreamer.h"

#if ENABLE(VIDEO) && USE(GSTREAMER) && USE(CAIRO)

#include "GStreamerCommon.h"

#include <cairo.h>
#include <gst/gst.h>
#include <gst/video/gstvideometa.h>


namespace WebCore {

ImageGStreamer::ImageGStreamer(GRefPtr<GstSample>&& sample)
    : m_sample(WTFMove(sample))
{
    GstCaps* caps = gst_sample_get_caps(m_sample.get());
    GstVideoInfo videoInfo;
    gst_video_info_init(&videoInfo);
    if (!gst_video_info_from_caps(&videoInfo, caps))
        return;

    // The frame has to RGB so we can paint it.
    ASSERT(GST_VIDEO_INFO_IS_RGB(&videoInfo));

    GstBuffer* buffer = gst_sample_get_buffer(m_sample.get());
    if (UNLIKELY(!GST_IS_BUFFER(buffer)))
        return;

    m_frameMapped = gst_video_frame_map(&m_videoFrame, &videoInfo, buffer, GST_MAP_READ);
    if (!m_frameMapped)
        return;

    // The video buffer may have these formats in these cases:
    // { BGRx, BGRA }: on little endian:
    //   - When GStreamer-gl is disabled (being AC enabled or not) as VideoSinkGStreamer is used.
    //   - When GStreamer-gl is enabled, but the caps used in the sink are not RGB and it's converted by the player to paint it.
    // { xRGB, ARGB }: on big endian:
    //   - When GStreamer-gl is disabled (being AC enabled or not) as VideoSinkGStreamer is used.
    //   - When GStreamer-gl is enabled, but the caps used in the sink are not RGB and it's converted by the player to paint it.
    // { RGBx, RGBA }
    //   - When GStreamer-gl is enabled and the caps used in the sink are RGBx/RGBA.
    //
    // Internally cairo uses BGRA for CAIRO_FORMAT_ARGB32 on little endian and ARGB on big endian, so both { BGRx, BGRA }
    // and { xRGB, ARGB } can be passed directly to cairo. But for { RGBx, RGBA } we need to swap the R and B components.
    // Also, GStreamer uses straight alpha while cairo requires it to be premultiplied, so if the format has alpha
    // we need to premultiply the color components. So in these cases we need to create a modified copy of the original
    // buffer.
    m_hasAlpha = GST_VIDEO_INFO_HAS_ALPHA(&videoInfo);
    bool componentSwapRequired = GST_VIDEO_FRAME_FORMAT(&m_videoFrame) == GST_VIDEO_FORMAT_RGBA || GST_VIDEO_FRAME_FORMAT(&m_videoFrame) == GST_VIDEO_FORMAT_RGBx;
    unsigned char* bufferData = reinterpret_cast<unsigned char*>(GST_VIDEO_FRAME_PLANE_DATA(&m_videoFrame, 0));
    int stride = GST_VIDEO_FRAME_PLANE_STRIDE(&m_videoFrame, 0);
    int width = GST_VIDEO_FRAME_WIDTH(&m_videoFrame);
    int height = GST_VIDEO_FRAME_HEIGHT(&m_videoFrame);
    RefPtr<cairo_surface_t> surface;

    m_size = { static_cast<float>(width), static_cast<float>(height) };

    if (m_hasAlpha || componentSwapRequired) {
        uint8_t* surfaceData = static_cast<uint8_t*>(fastMalloc(height * stride));
        uint8_t* surfacePixel = surfaceData;

        for (int x = 0; x < width; x++) {
            for (int y = 0; y < height; y++) {
                // These store the source pixel components.
                uint16_t red;
                uint16_t green;
                uint16_t blue;
                uint16_t alpha;
                // These store the component offset inside the pixel for the destination surface.
                uint8_t redIndex;
                uint8_t greenIndex;
                uint8_t blueIndex;
                uint8_t alphaIndex;
                if (componentSwapRequired) {
                    // Source is RGBA or RGBx.
                    red = bufferData[0];
                    green = bufferData[1];
                    blue = bufferData[2];
                    alpha = bufferData[3];
#if G_BYTE_ORDER == G_LITTLE_ENDIAN
                    // Destination is BGRA.
                    redIndex = 2;
                    greenIndex = 1;
                    blueIndex = 0;
                    alphaIndex = 3;
#else
                    // Destination is ARGB.
                    redIndex = 1;
                    greenIndex = 2;
                    blueIndex = 3;
                    alphaIndex = 0;
#endif
                } else {
#if G_BYTE_ORDER == G_LITTLE_ENDIAN
                    // BGRA or BGRx both source and destination.
                    red = bufferData[2];
                    green = bufferData[1];
                    blue = bufferData[0];
                    alpha = bufferData[3];
                    redIndex = 2;
                    greenIndex = 1;
                    blueIndex = 0;
                    alphaIndex = 3;
#else
                    // ARGB ot xRGB both source and destination.
                    red = bufferData[1];
                    green = bufferData[2];
                    blue = bufferData[3];
                    alpha = bufferData[0];
                    redIndex = 1;
                    greenIndex = 2;
                    blueIndex = 3;
                    alphaIndex = 0;
#endif
                }

                if (m_hasAlpha) {
                    surfacePixel[redIndex] = red * alpha / 255;
                    surfacePixel[greenIndex] = green * alpha / 255;
                    surfacePixel[blueIndex] = blue * alpha / 255;
                    surfacePixel[alphaIndex] = alpha;
                } else {
                    surfacePixel[redIndex] = red;
                    surfacePixel[greenIndex] = green;
                    surfacePixel[blueIndex] = blue;
                    surfacePixel[alphaIndex] = alpha;
                }

                bufferData += 4;
                surfacePixel += 4;
            }
        }
        surface = adoptRef(cairo_image_surface_create_for_data(surfaceData, CAIRO_FORMAT_ARGB32, width, height, stride));
        static cairo_user_data_key_t s_surfaceDataKey;
        cairo_surface_set_user_data(surface.get(), &s_surfaceDataKey, surfaceData, [](void* data) { fastFree(data); });
    } else
        surface = adoptRef(cairo_image_surface_create_for_data(bufferData, CAIRO_FORMAT_ARGB32, width, height, stride));

    ASSERT(cairo_surface_status(surface.get()) == CAIRO_STATUS_SUCCESS);
    m_image = WTFMove(surface);

    if (GstVideoCropMeta* cropMeta = gst_buffer_get_video_crop_meta(buffer))
        setCropRect(FloatRect(cropMeta->x, cropMeta->y, cropMeta->width, cropMeta->height));
}

ImageGStreamer::~ImageGStreamer()
{
    if (m_image)
        m_image = nullptr;

    // We keep the buffer memory mapped until the image is destroyed because the internal
    // cairo_surface_t was created using cairo_image_surface_create_for_data().
    if (m_frameMapped)
        gst_video_frame_unmap(&m_videoFrame);

    m_sample.clear();
}

} // namespace WebCore

#endif // ENABLE(VIDEO) && USE(GSTREAMER) && USE(CAIRO)