Loading...
Searching...
No Matches
GStreamerCompatibility.hpp
1#pragma once
2#include <Media/Libav.hpp>
3#if SCORE_HAS_LIBAV
4#include <ossia/detail/hash_map.hpp>
5
6#include <QDebug>
7
8#include <score_plugin_media_export.h>
9
10#include <string>
11
12extern "C" {
13#include <libavformat/avformat.h>
14#include <libavutil/pixdesc.h>
15}
16
17namespace Video
18{
19
20inline const ossia::hash_map<std::string, AVPixelFormat>& gstreamerToLibav()
21{
22 static const auto map = [] {
23 ossia::hash_map<std::string, AVPixelFormat> format_map;
24
25 /*
26 * @GST_VIDEO_FORMAT_I420: planar 4:2:0 YUV
27 * @GST_VIDEO_FORMAT_YV12: planar 4:2:0 YVU (like I420 but UV planes swapped)
28 * @GST_VIDEO_FORMAT_YUY2: packed 4:2:2 YUV (Y0-U0-Y1-V0 Y2-U2-Y3-V2 Y4 ...)
29 * @GST_VIDEO_FORMAT_UYVY: packed 4:2:2 YUV (U0-Y0-V0-Y1 U2-Y2-V2-Y3 U4 ...)
30 * @GST_VIDEO_FORMAT_VYUY: packed 4:2:2 YUV (V0-Y0-U0-Y1 V2-Y2-U2-Y3 V4 ...)
31 * @GST_VIDEO_FORMAT_AYUV: packed 4:4:4 YUV with alpha channel (A0-Y0-U0-V0 ...)
32 * @GST_VIDEO_FORMAT_RGBx: sparse rgb packed into 32 bit, space last
33 * @GST_VIDEO_FORMAT_BGRx: sparse reverse rgb packed into 32 bit, space last
34 * @GST_VIDEO_FORMAT_xRGB: sparse rgb packed into 32 bit, space first
35 * @GST_VIDEO_FORMAT_xBGR: sparse reverse rgb packed into 32 bit, space first
36 * @GST_VIDEO_FORMAT_RGBA: rgb with alpha channel last
37 * @GST_VIDEO_FORMAT_BGRA: reverse rgb with alpha channel last
38 * @GST_VIDEO_FORMAT_ARGB: rgb with alpha channel first
39 * @GST_VIDEO_FORMAT_ABGR: reverse rgb with alpha channel first
40 * @GST_VIDEO_FORMAT_RGB: RGB packed into 24 bits without padding (`R-G-B-R-G-B`)
41 * @GST_VIDEO_FORMAT_BGR: reverse RGB packed into 24 bits without padding (`B-G-R-B-G-R`)
42 * @GST_VIDEO_FORMAT_Y41B: planar 4:1:1 YUV
43 * @GST_VIDEO_FORMAT_Y42B: planar 4:2:2 YUV
44 * @GST_VIDEO_FORMAT_YVYU: packed 4:2:2 YUV (Y0-V0-Y1-U0 Y2-V2-Y3-U2 Y4 ...)
45 * @GST_VIDEO_FORMAT_Y444: planar 4:4:4 YUV
46 * @GST_VIDEO_FORMAT_v210: packed 4:2:2 10-bit YUV, complex format
47 * @GST_VIDEO_FORMAT_v216: packed 4:2:2 16-bit YUV, Y0-U0-Y1-V1 order
48 * @GST_VIDEO_FORMAT_NV12: planar 4:2:0 YUV with interleaved UV plane
49 * @GST_VIDEO_FORMAT_NV21: planar 4:2:0 YUV with interleaved VU plane
50 * @GST_VIDEO_FORMAT_NV12_10LE32: 10-bit variant of @GST_VIDEO_FORMAT_NV12, packed into 32bit words (MSB 2 bits padding) (Since: 1.14)
51 * @GST_VIDEO_FORMAT_GRAY8: 8-bit grayscale
52 * @GST_VIDEO_FORMAT_GRAY10_LE32: 10-bit grayscale, packed into 32bit words (2 bits padding) (Since: 1.14)
53 * @GST_VIDEO_FORMAT_GRAY16_BE: 16-bit grayscale, most significant byte first
54 * @GST_VIDEO_FORMAT_GRAY16_LE: 16-bit grayscale, least significant byte first
55 * @GST_VIDEO_FORMAT_v308: packed 4:4:4 YUV (Y-U-V ...)
56 * @GST_VIDEO_FORMAT_IYU2: packed 4:4:4 YUV (U-Y-V ...) (Since: 1.10)
57 * @GST_VIDEO_FORMAT_RGB16: rgb 5-6-5 bits per component
58 * @GST_VIDEO_FORMAT_BGR16: reverse rgb 5-6-5 bits per component
59 * @GST_VIDEO_FORMAT_RGB15: rgb 5-5-5 bits per component
60 * @GST_VIDEO_FORMAT_BGR15: reverse rgb 5-5-5 bits per component
61 * @GST_VIDEO_FORMAT_UYVP: packed 10-bit 4:2:2 YUV (U0-Y0-V0-Y1 U2-Y2-V2-Y3 U4 ...)
62 * @GST_VIDEO_FORMAT_A420: planar 4:4:2:0 AYUV
63 * @GST_VIDEO_FORMAT_RGB8P: 8-bit paletted RGB
64 * @GST_VIDEO_FORMAT_YUV9: planar 4:1:0 YUV
65 * @GST_VIDEO_FORMAT_YVU9: planar 4:1:0 YUV (like YUV9 but UV planes swapped)
66 * @GST_VIDEO_FORMAT_IYU1: packed 4:1:1 YUV (Cb-Y0-Y1-Cr-Y2-Y3 ...)
67 * @GST_VIDEO_FORMAT_ARGB64: rgb with alpha channel first, 16 bits per channel
68 * @GST_VIDEO_FORMAT_AYUV64: packed 4:4:4 YUV with alpha channel, 16 bits per channel (A0-Y0-U0-V0 ...)
69 * @GST_VIDEO_FORMAT_r210: packed 4:4:4 RGB, 10 bits per channel
70 * @GST_VIDEO_FORMAT_I420_10BE: planar 4:2:0 YUV, 10 bits per channel
71 * @GST_VIDEO_FORMAT_I420_10LE: planar 4:2:0 YUV, 10 bits per channel
72 * @GST_VIDEO_FORMAT_I422_10BE: planar 4:2:2 YUV, 10 bits per channel
73 * @GST_VIDEO_FORMAT_I422_10LE: planar 4:2:2 YUV, 10 bits per channel
74 * @GST_VIDEO_FORMAT_Y444_10BE: planar 4:4:4 YUV, 10 bits per channel (Since: 1.2)
75 * @GST_VIDEO_FORMAT_Y444_10LE: planar 4:4:4 YUV, 10 bits per channel (Since: 1.2)
76 * @GST_VIDEO_FORMAT_GBR: planar 4:4:4 RGB, 8 bits per channel (Since: 1.2)
77 * @GST_VIDEO_FORMAT_GBR_10BE: planar 4:4:4 RGB, 10 bits per channel (Since: 1.2)
78 * @GST_VIDEO_FORMAT_GBR_10LE: planar 4:4:4 RGB, 10 bits per channel (Since: 1.2)
79 * @GST_VIDEO_FORMAT_NV16: planar 4:2:2 YUV with interleaved UV plane (Since: 1.2)
80 * @GST_VIDEO_FORMAT_NV16_10LE32: 10-bit variant of @GST_VIDEO_FORMAT_NV16, packed into 32bit words (MSB 2 bits padding) (Since: 1.14)
81 * @GST_VIDEO_FORMAT_NV24: planar 4:4:4 YUV with interleaved UV plane (Since: 1.2)
82 * @GST_VIDEO_FORMAT_NV12_64Z32: NV12 with 64x32 tiling in zigzag pattern (Since: 1.4)
83 * @GST_VIDEO_FORMAT_A420_10BE: planar 4:4:2:0 YUV, 10 bits per channel (Since: 1.6)
84 * @GST_VIDEO_FORMAT_A420_10LE: planar 4:4:2:0 YUV, 10 bits per channel (Since: 1.6)
85 * @GST_VIDEO_FORMAT_A422_10BE: planar 4:4:2:2 YUV, 10 bits per channel (Since: 1.6)
86 * @GST_VIDEO_FORMAT_A422_10LE: planar 4:4:2:2 YUV, 10 bits per channel (Since: 1.6)
87 * @GST_VIDEO_FORMAT_A444_10BE: planar 4:4:4:4 YUV, 10 bits per channel (Since: 1.6)
88 * @GST_VIDEO_FORMAT_A444_10LE: planar 4:4:4:4 YUV, 10 bits per channel (Since: 1.6)
89 * @GST_VIDEO_FORMAT_NV61: planar 4:2:2 YUV with interleaved VU plane (Since: 1.6)
90 * @GST_VIDEO_FORMAT_P010_10BE: planar 4:2:0 YUV with interleaved UV plane, 10 bits per channel (Since: 1.10)
91 * @GST_VIDEO_FORMAT_P010_10LE: planar 4:2:0 YUV with interleaved UV plane, 10 bits per channel (Since: 1.10)
92 * @GST_VIDEO_FORMAT_GBRA: planar 4:4:4:4 ARGB, 8 bits per channel (Since: 1.12)
93 * @GST_VIDEO_FORMAT_GBRA_10BE: planar 4:4:4:4 ARGB, 10 bits per channel (Since: 1.12)
94 * @GST_VIDEO_FORMAT_GBRA_10LE: planar 4:4:4:4 ARGB, 10 bits per channel (Since: 1.12)
95 * @GST_VIDEO_FORMAT_GBR_12BE: planar 4:4:4 RGB, 12 bits per channel (Since: 1.12)
96 * @GST_VIDEO_FORMAT_GBR_12LE: planar 4:4:4 RGB, 12 bits per channel (Since: 1.12)
97 * @GST_VIDEO_FORMAT_GBRA_12BE: planar 4:4:4:4 ARGB, 12 bits per channel (Since: 1.12)
98 * @GST_VIDEO_FORMAT_GBRA_12LE: planar 4:4:4:4 ARGB, 12 bits per channel (Since: 1.12)
99 * @GST_VIDEO_FORMAT_I420_12BE: planar 4:2:0 YUV, 12 bits per channel (Since: 1.12)
100 * @GST_VIDEO_FORMAT_I420_12LE: planar 4:2:0 YUV, 12 bits per channel (Since: 1.12)
101 * @GST_VIDEO_FORMAT_I422_12BE: planar 4:2:2 YUV, 12 bits per channel (Since: 1.12)
102 * @GST_VIDEO_FORMAT_I422_12LE: planar 4:2:2 YUV, 12 bits per channel (Since: 1.12)
103 * @GST_VIDEO_FORMAT_Y444_12BE: planar 4:4:4 YUV, 12 bits per channel (Since: 1.12)
104 * @GST_VIDEO_FORMAT_Y444_12LE: planar 4:4:4 YUV, 12 bits per channel (Since: 1.12)
105 * @GST_VIDEO_FORMAT_NV12_10LE40: Fully packed variant of NV12_10LE32 (Since: 1.16)
106 * @GST_VIDEO_FORMAT_Y210: packed 4:2:2 YUV, 10 bits per channel (Since: 1.16)
107 * @GST_VIDEO_FORMAT_Y410: packed 4:4:4 YUV, 10 bits per channel(A-V-Y-U...) (Since: 1.16)
108 * @GST_VIDEO_FORMAT_VUYA: packed 4:4:4 YUV with alpha channel (V0-U0-Y0-A0...) (Since: 1.16)
109 * @GST_VIDEO_FORMAT_BGR10A2_LE: packed 4:4:4 RGB with alpha channel(B-G-R-A), 10 bits for R/G/B channel and MSB 2 bits for alpha channel (Since: 1.16)
110 * @GST_VIDEO_FORMAT_RGB10A2_LE: packed 4:4:4 RGB with alpha channel(R-G-B-A), 10 bits for R/G/B channel and MSB 2 bits for alpha channel (Since: 1.18)
111 * @GST_VIDEO_FORMAT_Y444_16BE: planar 4:4:4 YUV, 16 bits per channel (Since: 1.18)
112 * @GST_VIDEO_FORMAT_Y444_16LE: planar 4:4:4 YUV, 16 bits per channel (Since: 1.18)
113 * @GST_VIDEO_FORMAT_P016_BE: planar 4:2:0 YUV with interleaved UV plane, 16 bits per channel (Since: 1.18)
114 * @GST_VIDEO_FORMAT_P016_LE: planar 4:2:0 YUV with interleaved UV plane, 16 bits per channel (Since: 1.18)
115 * @GST_VIDEO_FORMAT_P012_BE: planar 4:2:0 YUV with interleaved UV plane, 12 bits per channel (Since: 1.18)
116 * @GST_VIDEO_FORMAT_P012_LE: planar 4:2:0 YUV with interleaved UV plane, 12 bits per channel (Since: 1.18)
117 * @GST_VIDEO_FORMAT_Y212_BE: packed 4:2:2 YUV, 12 bits per channel (Y-U-Y-V) (Since: 1.18)
118 * @GST_VIDEO_FORMAT_Y212_LE: packed 4:2:2 YUV, 12 bits per channel (Y-U-Y-V) (Since: 1.18)
119 * @GST_VIDEO_FORMAT_Y412_BE: packed 4:4:4:4 YUV, 12 bits per channel(U-Y-V-A...) (Since: 1.18)
120 * @GST_VIDEO_FORMAT_Y412_LE: packed 4:4:4:4 YUV, 12 bits per channel(U-Y-V-A...) (Since: 1.18)
121 * @GST_VIDEO_FORMAT_NV12_4L4: NV12 with 4x4 tiles in linear order (Since: 1.18)
122 * @GST_VIDEO_FORMAT_NV12_32L32: NV12 with 32x32 tiles in linear order (Since: 1.18)
123 * @GST_VIDEO_FORMAT_RGBP: planar 4:4:4 RGB, 8 bits per channel (Since: 1.20)
124 * @GST_VIDEO_FORMAT_BGRP: planar 4:4:4 RGB, 8 bits per channel (Since: 1.20)
125 * @GST_VIDEO_FORMAT_AV12: Planar 4:2:0 YUV with interleaved UV plane with alpha as 3rd plane (Since: 1.20)
126 */
127
128 format_map["A420"] = AV_PIX_FMT_YUVA420P;
129 format_map["A420_10BE"] = AV_PIX_FMT_YUVA420P10BE;
130 format_map["A420_10LE"] = AV_PIX_FMT_YUVA420P10LE;
131 format_map["A422_10BE"] = AV_PIX_FMT_YUVA422P10BE;
132 format_map["A422_10LE"] = AV_PIX_FMT_YUVA422P10LE;
133 format_map["A444_10BE"] = AV_PIX_FMT_YUVA444P10BE;
134 format_map["A444_10LE"] = AV_PIX_FMT_YUVA444P10BE;
135 format_map["ABGR"] = AV_PIX_FMT_ABGR;
136 // format_map["ABGR64_BE"] = AV_PIX_FMT_ABGR64BE;
137 // format_map["ABGR64_LE"] = AV_PIX_FMT_ABGR64LE;
138 format_map["ARGB"] = AV_PIX_FMT_ARGB;
139 // format_map["ARGB64"] = AV_PIX_FMT_ARGB64;
140 // format_map["ARGB64_BE"] = AV_PIX_FMT_ARGB64BE;
141 // format_map["ARGB64_LE"] = AV_PIX_FMT_ARGB64LE;
142 // format_map["AV12"] = AV_PIX_FMT_AV12;
143 // format_map["AYUV"] = AV_PIX_FMT_AYUV;
144 format_map["AYUV64"] = AV_PIX_FMT_AYUV64;
145 // format_map["BGR"] = AV_PIX_FMT_BGR;
146 // format_map["BGR10A2_LE"] = AV_PIX_FMT_BGR10A2LE;
147 // format_map["BGR15"] = AV_PIX_FMT_BGR15;
148 // format_map["BGR16"] = AV_PIX_FMT_BGR16;
149 format_map["BGRA"] = AV_PIX_FMT_BGRA;
150 // format_map["BGRA64_BE"] = AV_PIX_FMT_BGRA64BE;
151 // format_map["BGRA64_LE"] = AV_PIX_FMT_BGRA64LE;
152 // format_map["BGRP"] = AV_PIX_FMT_BGRP;
153 format_map["BGRX"] = AV_PIX_FMT_BGR0;
154 // format_map["ENCODED"] = AV_PIX_FMT_ENCODED;
155 format_map["GBR"] = AV_PIX_FMT_GBRP;
156 format_map["GBRA"] = AV_PIX_FMT_GBRAP;
157 // format_map["GBRA_10BE"] = AV_PIX_FMT_GBRA_10BE;
158 // format_map["GBRA_10LE"] = AV_PIX_FMT_GBRA_10LE;
159 // format_map["GBRA_12BE"] = AV_PIX_FMT_GBRA_12BE;
160 // format_map["GBRA_12LE"] = AV_PIX_FMT_GBRA_12LE;
161 // format_map["GBR_10BE"] = AV_PIX_FMT_GBR_10BE;
162 // format_map["GBR_10LE"] = AV_PIX_FMT_GBR_10LE;
163 // format_map["GBR_12BE"] = AV_PIX_FMT_GBR_12BE;
164 // format_map["GBR_12LE"] = AV_PIX_FMT_GBR_12LE;
165 // format_map["GRAY10_LE32"] = AV_PIX_FMT_GRAY10_LE32;
166 format_map["GRAY16_BE"] = AV_PIX_FMT_GRAY16BE;
167 format_map["GRAY16_LE"] = AV_PIX_FMT_GRAY16LE;
168 format_map["GRAY8"] = AV_PIX_FMT_GRAY8;
169 format_map["I420"] = AV_PIX_FMT_YUV420P;
170 format_map["I420_10BE"] = AV_PIX_FMT_YUV420P10BE;
171 format_map["I420_10LE"] = AV_PIX_FMT_YUV420P10LE;
172 format_map["I420_12BE"] = AV_PIX_FMT_YUV420P12BE;
173 format_map["I420_12LE"] = AV_PIX_FMT_YUV420P12LE;
174 format_map["I422_10BE"] = AV_PIX_FMT_YUV422P10BE;
175 format_map["I422_10LE"] = AV_PIX_FMT_YUV422P10LE;
176 format_map["I422_12BE"] = AV_PIX_FMT_YUV422P12BE;
177 format_map["I422_12LE"] = AV_PIX_FMT_YUV422P12LE;
178 // format_map["IYU1"] = AV_PIX_FMT_IYU1;
179 // format_map["IYU2"] = AV_PIX_FMT_IYU2;
180 format_map["NV12"] = AV_PIX_FMT_NV12;
181 // format_map["NV12_10LE32"] = AV_PIX_FMT_NV12_10LE32;
182 // format_map["NV12_10LE40"] = AV_PIX_FMT_NV12_10LE40;
183 // format_map["NV12_32L32"] = AV_PIX_FMT_NV12_32L32;
184 // format_map["NV12_4L4"] = AV_PIX_FMT_NV12_4L4;
185 // format_map["NV12_64Z32"] = AV_PIX_FMT_NV12_64Z32;
186 format_map["NV16"] = AV_PIX_FMT_NV16;
187 // format_map["NV16_10LE32"] = AV_PIX_FMT_NV16_10LE32;
188 format_map["NV21"] = AV_PIX_FMT_NV21;
189 format_map["NV24"] = AV_PIX_FMT_NV24;
190 // format_map["NV61"] = AV_PIX_FMT_NV61;
191 format_map["P010_10BE"] = AV_PIX_FMT_P010BE;
192 format_map["P010_10LE"] = AV_PIX_FMT_P010LE;
193 // format_map["P012_BE"] = AV_PIX_FMT_P012BE;
194 // format_map["P012_LE"] = AV_PIX_FMT_P012LE;
195 format_map["P016_BE"] = AV_PIX_FMT_P016BE;
196 format_map["P016_LE"] = AV_PIX_FMT_P016LE;
197 // format_map["R210"] = AV_PIX_FMT_R210;
198 format_map["RGB"] = AV_PIX_FMT_RGB24;
199 // format_map["RGB10A2_LE"] = AV_PIX_FMT_RGB10A2LE;
200 // format_map["RGB15"] = AV_PIX_FMT_RGB15;
201 // format_map["RGB16"] = AV_PIX_FMT_RGB16;
202 // format_map["RGB8P"] = AV_PIX_FMT_RGB8P;
203 format_map["RGBA"] = AV_PIX_FMT_RGBA;
204 format_map["RGBA64_BE"] = AV_PIX_FMT_RGBA64BE;
205 format_map["RGBA64_LE"] = AV_PIX_FMT_RGBA64LE;
206 format_map["RGBP"] = AV_PIX_FMT_RGB24;
207 format_map["RGBX"] = AV_PIX_FMT_RGB0;
208 // format_map["UNKNOWN"] = AV_PIX_FMT_UNKNOWN;
209 // format_map["UYVP"] = AV_PIX_FMT_UYVP;
210 format_map["UYVY"] = AV_PIX_FMT_UYVY422;
211 // format_map["V210"] = AV_PIX_FMT_V210;
212 // format_map["V216"] = AV_PIX_FMT_V216;
213 // format_map["V308"] = AV_PIX_FMT_V308;
214 // format_map["VUYA"] = AV_PIX_FMT_VUYA;
215 // format_map["VYUY"] = AV_PIX_FMT_VYUY;
216 format_map["XBGR"] = AV_PIX_FMT_0BGR;
217 format_map["XRGB"] = AV_PIX_FMT_0RGB;
218 format_map["Y210"] = AV_PIX_FMT_Y210;
219 // format_map["Y212_BE"] = AV_PIX_FMT_Y212BE;
220 // format_map["Y212_LE"] = AV_PIX_FMT_Y212LE;
221 format_map["Y410"] = AV_PIX_FMT_YUV410P;
222 // format_map["Y412_BE"] = AV_PIX_FMT_Y412BE;
223 // format_map["Y412_LE"] = AV_PIX_FMT_Y412LE;
224 // format_map["Y41B"] = AV_PIX_FMT_Y41B;
225 // format_map["Y42B"] = AV_PIX_FMT_Y42B;
226 format_map["Y444"] = AV_PIX_FMT_YUV444P;
227 format_map["Y444_10BE"] = AV_PIX_FMT_YUV444P10BE;
228 format_map["Y444_10LE"] = AV_PIX_FMT_YUV444P10LE;
229 format_map["Y444_12BE"] = AV_PIX_FMT_YUV444P12BE;
230 format_map["Y444_12LE"] = AV_PIX_FMT_YUV444P12LE;
231 format_map["Y444_16BE"] = AV_PIX_FMT_YUV444P16BE;
232 format_map["Y444_16LE"] = AV_PIX_FMT_YUV444P16LE;
233 // format_map["YUV9"] = AV_PIX_FMT_YUV9;
234 format_map["YUY2"] = AV_PIX_FMT_YUYV422;
235 // format_map["YV12"] = AV_PIX_FMT_YV12;
236 // format_map["YVU9"] = AV_PIX_FMT_YVU9;
237 format_map["YVYU"] = AV_PIX_FMT_YVYU422;
238
239 return format_map;
240 }();
241
242 return map;
243}
244
245// This is used when we do not have stride info, we make a best guess...
246[[nodiscard]]
247inline bool initFrameFromRawData(AVFrame* frame, uint8_t* p, std::size_t sz)
248{
249 switch(frame->format)
250 {
251 case AV_PIX_FMT_YUV420P:
252 case AV_PIX_FMT_YUVJ420P:
253 {
254 // assuming 640x480:
255 // first plane is 640x480 Y
256 frame->data[0] = p;
257 frame->linesize[0] = frame->width;
258
259 // second plane is 320x240 U
260 frame->data[1] = frame->data[0] + frame->linesize[0] * frame->height;
261 frame->linesize[1] = frame->width / 2;
262
263 // third plane is 320x240 U
264 frame->data[2] = frame->data[1] + frame->linesize[1] * frame->height / 2;
265 frame->linesize[2] = frame->width / 2;
266 break;
267 }
268
269 case AV_PIX_FMT_NV12:
270 case AV_PIX_FMT_NV21:
271 {
272 // assuming 640x480:
273 // first plane is 640x480 Y
274 frame->data[0] = p;
275 frame->linesize[0] = frame->width;
276
277 // second plane is 640x480 UV
278 frame->data[1] = frame->data[0] + frame->linesize[0] * frame->height;
279 frame->linesize[1] = frame->width;
280
281 break;
282 }
283
284 case AV_PIX_FMT_P016LE:
285 case AV_PIX_FMT_P016BE:
286 {
287 constexpr int byte_per_component = 2;
288 // assuming 640x480:
289 // first plane is 640x480 Y
290 frame->data[0] = p;
291 frame->linesize[0] = byte_per_component * frame->width;
292
293 // second plane is 640x480 UV
294 frame->data[1] = frame->data[0] + frame->linesize[0] * frame->height;
295 frame->linesize[1] = byte_per_component * frame->width;
296
297 break;
298 }
299
300 case AV_PIX_FMT_YUV420P16LE:
301 case AV_PIX_FMT_YUV420P16BE:
302 {
303 constexpr int byte_per_component = 2;
304 // assuming 640x480:
305 // first plane is 640x480 Y
306 frame->data[0] = p;
307 frame->linesize[0] = byte_per_component * frame->width;
308
309 // second plane is 320x240 U
310 frame->data[1] = frame->data[0] + frame->linesize[0] * frame->height;
311 frame->linesize[1] = byte_per_component * frame->width / 2;
312
313 // third plane is 320x240 U
314 frame->data[2] = frame->data[1] + frame->linesize[1] * frame->height / 2;
315 frame->linesize[2] = byte_per_component * frame->width / 2;
316 break;
317 }
318
319 case AV_PIX_FMT_YUV444P:
320 case AV_PIX_FMT_YUVJ444P:
321 case AV_PIX_FMT_GBRP:
322 {
323 // assuming 640x480:
324 // first plane is 640x480 Y
325 frame->data[0] = p;
326 frame->linesize[0] = frame->width;
327
328 // second plane is 640x480 U
329 frame->data[1] = frame->data[0] + frame->linesize[0] * frame->height;
330 frame->linesize[1] = frame->width;
331
332 // third plane is 640x480 V
333 frame->data[2] = frame->data[1] + frame->linesize[1] * frame->height;
334 frame->linesize[2] = frame->width;
335 break;
336 }
337
338 case AV_PIX_FMT_YUV444P16LE:
339 case AV_PIX_FMT_YUV444P16BE:
340 {
341 constexpr int byte_per_component = 2;
342 // assuming 640x480:
343 // first plane is 640x480 Y
344 frame->data[0] = p;
345 frame->linesize[0] = byte_per_component * frame->width;
346
347 // second plane is 640x480 U
348 frame->data[1] = frame->data[0] + frame->linesize[0] * frame->height;
349 frame->linesize[1] = byte_per_component * frame->width;
350
351 // third plane is 640x480 U
352 frame->data[2] = frame->data[1] + frame->linesize[1] * frame->height;
353 frame->linesize[2] = byte_per_component * frame->width;
354 break;
355 }
356
357 case AV_PIX_FMT_YUVA444P:
358 case AV_PIX_FMT_GBRAP:
359 case AV_PIX_FMT_GBRAP16BE:
360 case AV_PIX_FMT_GBRAP16LE:
361 {
362 frame->data[0] = p;
363 frame->linesize[0] = frame->width;
364
365 frame->data[1] = frame->data[0] + frame->linesize[0] * frame->height;
366 frame->linesize[1] = frame->width;
367
368 frame->data[2] = frame->data[1] + frame->linesize[1] * frame->height;
369 frame->linesize[2] = frame->width;
370
371 frame->data[3] = frame->data[2] + frame->linesize[2] * frame->height;
372 frame->linesize[3] = frame->width;
373 break;
374 }
375
376 case AV_PIX_FMT_YUV422P:
377 case AV_PIX_FMT_YUVJ422P:
378 {
379 // assuming 640x480:
380 // first plane is 640x480 Y
381 frame->data[0] = p;
382 frame->linesize[0] = frame->width;
383
384 // second plane is 640x480 / 2 U
385 frame->data[1] = frame->data[0] + frame->linesize[0] * frame->height;
386 frame->linesize[1] = frame->width / 2;
387
388 // third plane is 640x480 / 2 V
389 frame->data[2] = frame->data[1] + frame->linesize[1] * frame->height;
390 frame->linesize[2] = frame->width / 2;
391 break;
392 }
393
394 case AV_PIX_FMT_YUV422P16LE:
395 case AV_PIX_FMT_YUV422P16BE:
396 {
397 constexpr int byte_per_component = 2;
398 // assuming 640x480:
399 // first plane is 640x480 Y
400 frame->data[0] = p;
401 frame->linesize[0] = byte_per_component * frame->width;
402
403 // second plane is 640x480 / 2 U
404 frame->data[1] = frame->data[0] + frame->linesize[0] * frame->height;
405 frame->linesize[1] = byte_per_component * frame->width / 2;
406
407 // third plane is 3640x480 / 2 V
408 frame->data[2] = frame->data[1] + frame->linesize[1] * frame->height;
409 frame->linesize[2] = byte_per_component * frame->width / 2;
410 break;
411 }
412
413 case AV_PIX_FMT_NV24:
414 case AV_PIX_FMT_NV42:
415 {
416 // assuming 640x480:
417 // first plane is 640x480 Y
418 frame->data[0] = p;
419 frame->linesize[0] = frame->width;
420
421 // second plane is 640x480
422 frame->data[1] = frame->data[0] + frame->linesize[0] * frame->height;
423 frame->linesize[1] = frame->width * 2;
424 break;
425 }
426
427 case AV_PIX_FMT_YUV410P:
428 {
429 // assuming 640x480:
430 // first plane is 640x480 Y
431 frame->data[0] = p;
432 frame->linesize[0] = frame->width;
433
434 // second plane is 160x120 U
435 frame->data[1] = frame->data[0] + frame->linesize[0] * frame->height;
436 frame->linesize[1] = frame->width / 4;
437
438 // third plane is 160x120 V
439 frame->data[2] = frame->data[1] + frame->linesize[1] * frame->height / 4;
440 frame->linesize[2] = frame->width / 4;
441 break;
442 }
443
444 case AV_PIX_FMT_YUV411P:
445 {
446 // assuming 640x480:
447 // first plane is 640x480 Y
448 frame->data[0] = p;
449 frame->linesize[0] = frame->width;
450
451 // second plane is 640x480 / 2 U
452 frame->data[1] = frame->data[0] + frame->linesize[0] * frame->height;
453 frame->linesize[1] = frame->width / 4;
454
455 // third plane is 640x480 / 2 V
456 frame->data[2] = frame->data[1] + frame->linesize[1] * frame->height;
457 frame->linesize[2] = frame->width / 4;
458 break;
459 }
460
461 case AV_PIX_FMT_YUV440P:
462 case AV_PIX_FMT_YUVJ440P:
463 case AV_PIX_FMT_YUVA420P:
464
465 case AV_PIX_FMT_YUV420P9BE:
466 case AV_PIX_FMT_YUV420P9LE:
467 case AV_PIX_FMT_YUV420P10BE:
468 case AV_PIX_FMT_YUV420P10LE:
469 case AV_PIX_FMT_YUV422P10BE:
470 case AV_PIX_FMT_YUV422P10LE:
471 case AV_PIX_FMT_YUV444P9BE:
472 case AV_PIX_FMT_YUV444P9LE:
473 case AV_PIX_FMT_YUV444P10BE:
474 case AV_PIX_FMT_YUV444P10LE:
475 case AV_PIX_FMT_YUV422P9BE:
476 case AV_PIX_FMT_YUV422P9LE:
477
478 case AV_PIX_FMT_GBRP9BE:
479 case AV_PIX_FMT_GBRP9LE:
480 case AV_PIX_FMT_GBRP10BE:
481 case AV_PIX_FMT_GBRP10LE:
482 case AV_PIX_FMT_GBRP16BE:
483 case AV_PIX_FMT_GBRP16LE:
484 case AV_PIX_FMT_YUVA422P:
485 case AV_PIX_FMT_YUVA420P9BE:
486 case AV_PIX_FMT_YUVA420P9LE:
487 case AV_PIX_FMT_YUVA422P9BE:
488 case AV_PIX_FMT_YUVA422P9LE:
489 case AV_PIX_FMT_YUVA444P9BE:
490 case AV_PIX_FMT_YUVA444P9LE:
491 case AV_PIX_FMT_YUVA420P10BE:
492 case AV_PIX_FMT_YUVA420P10LE:
493 case AV_PIX_FMT_YUVA422P10BE:
494 case AV_PIX_FMT_YUVA422P10LE:
495 case AV_PIX_FMT_YUVA444P10BE:
496 case AV_PIX_FMT_YUVA444P10LE:
497 case AV_PIX_FMT_YUVA420P16BE:
498 case AV_PIX_FMT_YUVA420P16LE:
499 case AV_PIX_FMT_YUVA422P16BE:
500 case AV_PIX_FMT_YUVA422P16LE:
501 case AV_PIX_FMT_YUVA444P16BE:
502 case AV_PIX_FMT_YUVA444P16LE:
503
504 case AV_PIX_FMT_YUV420P12BE:
505 case AV_PIX_FMT_YUV420P12LE:
506 case AV_PIX_FMT_YUV420P14BE:
507 case AV_PIX_FMT_YUV420P14LE:
508 case AV_PIX_FMT_YUV422P12BE:
509 case AV_PIX_FMT_YUV422P12LE:
510 case AV_PIX_FMT_YUV422P14BE:
511 case AV_PIX_FMT_YUV422P14LE:
512 case AV_PIX_FMT_YUV444P12BE:
513 case AV_PIX_FMT_YUV444P12LE:
514 case AV_PIX_FMT_YUV444P14BE:
515 case AV_PIX_FMT_YUV444P14LE:
516 case AV_PIX_FMT_GBRP12BE:
517 case AV_PIX_FMT_GBRP12LE:
518 case AV_PIX_FMT_GBRP14BE:
519 case AV_PIX_FMT_GBRP14LE:
520 case AV_PIX_FMT_YUVJ411P:
521
522 case AV_PIX_FMT_YUV440P10LE:
523 case AV_PIX_FMT_YUV440P10BE:
524 case AV_PIX_FMT_YUV440P12LE:
525 case AV_PIX_FMT_YUV440P12BE:
526
527 case AV_PIX_FMT_GBRAP12BE:
528 case AV_PIX_FMT_GBRAP12LE:
529
530 case AV_PIX_FMT_GBRAP10BE:
531 case AV_PIX_FMT_GBRAP10LE:
532
533 case AV_PIX_FMT_GBRPF32BE:
534 case AV_PIX_FMT_GBRPF32LE:
535 case AV_PIX_FMT_GBRAPF32BE:
536 case AV_PIX_FMT_GBRAPF32LE:
537
538 case AV_PIX_FMT_YUVA422P12BE:
539 case AV_PIX_FMT_YUVA422P12LE:
540 case AV_PIX_FMT_YUVA444P12BE:
541 case AV_PIX_FMT_YUVA444P12LE:
542 {
543 qDebug() << "TODO unhandled video format";
544 return false;
545 }
546
547 case AV_PIX_FMT_MONOWHITE:
548 case AV_PIX_FMT_MONOBLACK:
549 frame->data[0] = p;
550 frame->linesize[0] = frame->width / CHAR_BIT;
551 break;
552
553 case AV_PIX_FMT_BGR4:
554 case AV_PIX_FMT_RGB4:
555 frame->data[0] = p;
556 frame->linesize[0] = frame->width * 4. / 8.;
557 break;
558
559 case AV_PIX_FMT_GRAY8:
560 case AV_PIX_FMT_BGR8:
561 case AV_PIX_FMT_RGB8:
562 case AV_PIX_FMT_BGR4_BYTE:
563 case AV_PIX_FMT_RGB4_BYTE:
564 frame->data[0] = p;
565 frame->linesize[0] = frame->width * 8. / 8.;
566 break;
567
568 case AV_PIX_FMT_GRAY9BE:
569 case AV_PIX_FMT_GRAY9LE:
570 frame->data[0] = p;
571 frame->linesize[0] = frame->width * 9. / 8.; // ?? wtf
572 break;
573
574 case AV_PIX_FMT_P010LE:
575 case AV_PIX_FMT_P010BE:
576 case AV_PIX_FMT_GRAY10BE:
577 case AV_PIX_FMT_GRAY10LE:
578 frame->data[0] = p;
579 frame->linesize[0] = frame->width * 10. / 8.; // ??
580 break;
581
582 case AV_PIX_FMT_GRAY12BE:
583 case AV_PIX_FMT_GRAY12LE:
584 case AV_PIX_FMT_UYYVYY411:
585 frame->data[0] = p;
586 frame->linesize[0] = frame->width * 12. / 8.; // ??
587 break;
588
589 case AV_PIX_FMT_GRAY14BE:
590 case AV_PIX_FMT_GRAY14LE:
591 frame->data[0] = p;
592 frame->linesize[0] = frame->width * 14. / 8.; // ??
593 break;
594
595 case AV_PIX_FMT_NV16:
596 case AV_PIX_FMT_YUYV422:
597 case AV_PIX_FMT_UYVY422:
598 case AV_PIX_FMT_YVYU422:
599 case AV_PIX_FMT_GRAY16BE:
600 case AV_PIX_FMT_GRAY16LE:
601 case AV_PIX_FMT_YA8:
602
603 case AV_PIX_FMT_RGB565BE:
604 case AV_PIX_FMT_RGB565LE:
605 case AV_PIX_FMT_RGB555BE:
606 case AV_PIX_FMT_RGB555LE:
607
608 case AV_PIX_FMT_BGR565BE:
609 case AV_PIX_FMT_BGR565LE:
610 case AV_PIX_FMT_BGR555BE:
611 case AV_PIX_FMT_BGR555LE:
612
613 case AV_PIX_FMT_RGB444LE:
614 case AV_PIX_FMT_RGB444BE:
615 case AV_PIX_FMT_BGR444LE:
616 case AV_PIX_FMT_BGR444BE:
617 frame->data[0] = p;
618 frame->linesize[0] = frame->width * 16. / 8.;
619 break;
620
621 case AV_PIX_FMT_NV20LE:
622 case AV_PIX_FMT_NV20BE:
623 case AV_PIX_FMT_Y210BE:
624 case AV_PIX_FMT_Y210LE:
625 frame->data[0] = p;
626 frame->linesize[0] = frame->width * 20. / 8.;
627 break;
628
629 case AV_PIX_FMT_RGB24:
630 case AV_PIX_FMT_BGR24:
631 frame->data[0] = p;
632 frame->linesize[0] = frame->width * 24. / 8.;
633 break;
634
635 // needs a too recent ffmpeg?
636 // case AV_PIX_FMT_X2RGB10LE: ///< packed RGB 10:10:10: 30bpp: (msb)2X 10R 10G 10B(lsb): little-endian: X=unused/undefined
637 // case AV_PIX_FMT_X2RGB10BE: ///< packed RGB 10:10:10: 30bpp: (msb)2X 10R 10G 10B(lsb): big-endian: X=unused/undefined
638 // frame->data[0] = p;
639 // frame->linesize[0] = frame->width * 30. / 8.;
640 // break;
641
642 case AV_PIX_FMT_ARGB:
643 case AV_PIX_FMT_RGBA:
644 case AV_PIX_FMT_ABGR:
645 case AV_PIX_FMT_BGRA:
646 case AV_PIX_FMT_0RGB:
647 case AV_PIX_FMT_RGB0:
648 case AV_PIX_FMT_0BGR:
649 case AV_PIX_FMT_BGR0:
650 case AV_PIX_FMT_GRAYF32BE:
651 case AV_PIX_FMT_GRAYF32LE:
652 case AV_PIX_FMT_YA16BE:
653 case AV_PIX_FMT_YA16LE:
654 frame->data[0] = p;
655 frame->linesize[0] = frame->width * 32. / 8.;
656 break;
657
658 case AV_PIX_FMT_XYZ12LE:
659 case AV_PIX_FMT_XYZ12BE:
660 frame->data[0] = p;
661 frame->linesize[0] = frame->width * 36. / 8.;
662 break;
663
664 case AV_PIX_FMT_RGB48BE:
665 case AV_PIX_FMT_RGB48LE:
666
667 case AV_PIX_FMT_BGR48BE:
668 case AV_PIX_FMT_BGR48LE:
669 frame->data[0] = p;
670 frame->linesize[0] = frame->width * 48. / 8.;
671 break;
672
673 case AV_PIX_FMT_RGBA64BE:
674 case AV_PIX_FMT_RGBA64LE:
675 case AV_PIX_FMT_BGRA64BE:
676 case AV_PIX_FMT_BGRA64LE:
677 case AV_PIX_FMT_AYUV64LE:
678 case AV_PIX_FMT_AYUV64BE:
679 frame->data[0] = p;
680 frame->linesize[0] = frame->width * 64. / 8.;
681 break;
682
683 case AV_PIX_FMT_BAYER_BGGR8:
684 case AV_PIX_FMT_BAYER_RGGB8:
685 case AV_PIX_FMT_BAYER_GBRG8:
686 case AV_PIX_FMT_BAYER_GRBG8:
687 case AV_PIX_FMT_BAYER_BGGR16LE:
688 case AV_PIX_FMT_BAYER_BGGR16BE:
689 case AV_PIX_FMT_BAYER_RGGB16LE:
690 case AV_PIX_FMT_BAYER_RGGB16BE:
691 case AV_PIX_FMT_BAYER_GBRG16LE:
692 case AV_PIX_FMT_BAYER_GBRG16BE:
693 case AV_PIX_FMT_BAYER_GRBG16LE:
694 case AV_PIX_FMT_BAYER_GRBG16BE:
695 case AV_PIX_FMT_PAL8:
696 default: {
697 qDebug() << "TODO unhandled video format";
698 return false;
699 }
700 }
701 return true;
702}
703
704}
705#endif