File: VideoFrameGStreamer.h

package info (click to toggle)
webkit2gtk 2.51.3-1
  • links: PTS, VCS
  • area: main
  • in suites: experimental
  • size: 477,912 kB
  • sloc: cpp: 3,898,343; javascript: 198,215; ansic: 165,229; python: 50,371; asm: 21,819; ruby: 18,095; perl: 16,953; xml: 4,623; sh: 2,398; yacc: 2,356; java: 2,019; lex: 1,358; pascal: 372; makefile: 197
file content (131 lines) | stat: -rw-r--r-- 4,506 bytes parent folder | download
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
/*
 * Copyright (C) 2022 Igalia S.L
 *
 * This library is free software; you can redistribute it and/or
 * modify it under the terms of the GNU Library General Public
 * License as published by the Free Software Foundation; either
 * version 2 of the License, or (at your option) any later version.
 *
 * This library is distributed in the hope that it will be useful,
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
 * Library General Public License for more details.
 *
 * You should have received a copy of the GNU Library General Public License
 * aint with this library; see the file COPYING.LIB.  If not, write to
 * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
 * Boston, MA 02110-1301, USA.
 */

#pragma once

#if ENABLE(VIDEO) && USE(GSTREAMER)

#include "VideoFrame.h"
#include "VideoFrameContentHint.h"
#include "VideoFrameMetadataGStreamer.h"
#include <gst/video/video-format.h>
#include <gst/video/video-info.h>
#include <wtf/glib/GRefPtr.h>

typedef struct _GstSample GstSample;

namespace WebCore {

class PixelBuffer;
class IntSize;
class ImageGStreamer;

using DMABufFormat = std::pair<uint32_t, uint64_t>;

class VideoFrameGStreamer final : public VideoFrame {
public:
    struct Info {
        GstVideoInfo info;
        std::optional<DMABufFormat> dmaBufFormat { std::nullopt };
    };
    static Info infoFromCaps(const GRefPtr<GstCaps>&);

    struct CreateOptions {
        CreateOptions() = default;
        CreateOptions(IntSize&& presentationSize, std::optional<Info>&& info = { })
            : presentationSize(WTFMove(presentationSize))
            , info(WTFMove(info))
        { }
        IntSize presentationSize;
        std::optional<Info> info;
        Rotation rotation { VideoFrame::Rotation::None };
        MediaTime presentationTime { MediaTime::invalidTime() };
        std::optional<VideoFrameTimeMetadata> timeMetadata;
        bool isMirrored { false };
        VideoFrameContentHint contentHint { VideoFrameContentHint::None };
    };

    static Ref<VideoFrameGStreamer> create(GRefPtr<GstSample>&&, const CreateOptions&, PlatformVideoColorSpace&& = { });

    static Ref<VideoFrameGStreamer> createWrappedSample(const GRefPtr<GstSample>&, const MediaTime& presentationTime = MediaTime::invalidTime(), Rotation videoRotation = Rotation::None);

    static RefPtr<VideoFrameGStreamer> createFromPixelBuffer(Ref<PixelBuffer>&&, const IntSize& destinationSize, double frameRate, const CreateOptions&, PlatformVideoColorSpace&& = { });
    ~VideoFrameGStreamer();

    void setFrameRate(double);
    void setMaxFrameRate(double);

    void setPresentationTime(const MediaTime&);
    void setMetadataAndContentHint(std::optional<VideoFrameTimeMetadata>, VideoFrameContentHint);

    RefPtr<VideoFrameGStreamer> resizeTo(const IntSize&);

    GRefPtr<GstSample> resizedSample(const IntSize&);

    GRefPtr<GstSample> downloadSample(std::optional<GstVideoFormat> = { });

    GstSample* sample() const { return m_sample.get(); }

    RefPtr<ImageGStreamer> convertToImage();

    IntSize presentationSize() const final { return m_presentationSize; }
    uint32_t pixelFormat() const final { return GST_VIDEO_INFO_FORMAT(&m_info.info); }
    RefPtr<NativeImage> copyNativeImage() const final;

    enum class MemoryType : uint8_t {
        Unsupported,
        System,
#if USE(GSTREAMER_GL)
        GL,
#if USE(GBM)
        DMABuf
#endif
#endif
    };
    MemoryType memoryType() const { return m_memoryType; }

    const GstVideoInfo& info() const { return m_info.info; }
    std::optional<DMABufFormat> dmaBufFormat() const { return m_info.dmaBufFormat; }

    VideoFrameContentHint contentHint() const;

private:
    VideoFrameGStreamer(GRefPtr<GstSample>&&, const CreateOptions&, PlatformVideoColorSpace&&);
    VideoFrameGStreamer(const GRefPtr<GstSample>&, const CreateOptions&, PlatformVideoColorSpace&&);

    bool isGStreamer() const final { return true; }
    Ref<VideoFrame> clone() final;

    GRefPtr<GstSample> convert(GstVideoFormat, const IntSize&);

    void setMemoryTypeFromCaps();

    GRefPtr<GstSample> m_sample;
    Info m_info;
    IntSize m_presentationSize;
    MemoryType m_memoryType;
};

} // namespace WebCore

SPECIALIZE_TYPE_TRAITS_BEGIN(WebCore::VideoFrameGStreamer)
static bool isType(const WebCore::VideoFrame& frame) { return frame.isGStreamer(); }
SPECIALIZE_TYPE_TRAITS_END()

#endif // ENABLE(VIDEO) && USE(GSTREAMER)