GStreamerCompatibility.hpp
1 #pragma once
2 #include <Media/Libav.hpp>
3 #if SCORE_HAS_LIBAV
4 #include <ossia/detail/hash_map.hpp>
5 
6 #include <QDebug>
7 
8 #include <score_plugin_media_export.h>
9 
10 #include <string>
11 
12 extern "C" {
13 #include <libavformat/avformat.h>
14 #include <libavutil/pixdesc.h>
15 }
16 
17 namespace Video
18 {
19 
20 inline const ossia::hash_map<std::string, AVPixelFormat>& gstreamerToLibav()
21 {
22  static const auto map = [] {
23  ossia::hash_map<std::string, AVPixelFormat> format_map;
24 
25  /*
26  * @GST_VIDEO_FORMAT_I420: planar 4:2:0 YUV
27  * @GST_VIDEO_FORMAT_YV12: planar 4:2:0 YVU (like I420 but UV planes swapped)
28  * @GST_VIDEO_FORMAT_YUY2: packed 4:2:2 YUV (Y0-U0-Y1-V0 Y2-U2-Y3-V2 Y4 ...)
29  * @GST_VIDEO_FORMAT_UYVY: packed 4:2:2 YUV (U0-Y0-V0-Y1 U2-Y2-V2-Y3 U4 ...)
30  * @GST_VIDEO_FORMAT_VYUY: packed 4:2:2 YUV (V0-Y0-U0-Y1 V2-Y2-U2-Y3 V4 ...)
31  * @GST_VIDEO_FORMAT_AYUV: packed 4:4:4 YUV with alpha channel (A0-Y0-U0-V0 ...)
32  * @GST_VIDEO_FORMAT_RGBx: sparse rgb packed into 32 bit, space last
33  * @GST_VIDEO_FORMAT_BGRx: sparse reverse rgb packed into 32 bit, space last
34  * @GST_VIDEO_FORMAT_xRGB: sparse rgb packed into 32 bit, space first
35  * @GST_VIDEO_FORMAT_xBGR: sparse reverse rgb packed into 32 bit, space first
36  * @GST_VIDEO_FORMAT_RGBA: rgb with alpha channel last
37  * @GST_VIDEO_FORMAT_BGRA: reverse rgb with alpha channel last
38  * @GST_VIDEO_FORMAT_ARGB: rgb with alpha channel first
39  * @GST_VIDEO_FORMAT_ABGR: reverse rgb with alpha channel first
40  * @GST_VIDEO_FORMAT_RGB: RGB packed into 24 bits without padding (`R-G-B-R-G-B`)
41  * @GST_VIDEO_FORMAT_BGR: reverse RGB packed into 24 bits without padding (`B-G-R-B-G-R`)
42  * @GST_VIDEO_FORMAT_Y41B: planar 4:1:1 YUV
43  * @GST_VIDEO_FORMAT_Y42B: planar 4:2:2 YUV
44  * @GST_VIDEO_FORMAT_YVYU: packed 4:2:2 YUV (Y0-V0-Y1-U0 Y2-V2-Y3-U2 Y4 ...)
45  * @GST_VIDEO_FORMAT_Y444: planar 4:4:4 YUV
46  * @GST_VIDEO_FORMAT_v210: packed 4:2:2 10-bit YUV, complex format
47  * @GST_VIDEO_FORMAT_v216: packed 4:2:2 16-bit YUV, Y0-U0-Y1-V1 order
48  * @GST_VIDEO_FORMAT_NV12: planar 4:2:0 YUV with interleaved UV plane
49  * @GST_VIDEO_FORMAT_NV21: planar 4:2:0 YUV with interleaved VU plane
50  * @GST_VIDEO_FORMAT_NV12_10LE32: 10-bit variant of @GST_VIDEO_FORMAT_NV12, packed into 32bit words (MSB 2 bits padding) (Since: 1.14)
51  * @GST_VIDEO_FORMAT_GRAY8: 8-bit grayscale
52  * @GST_VIDEO_FORMAT_GRAY10_LE32: 10-bit grayscale, packed into 32bit words (2 bits padding) (Since: 1.14)
53  * @GST_VIDEO_FORMAT_GRAY16_BE: 16-bit grayscale, most significant byte first
54  * @GST_VIDEO_FORMAT_GRAY16_LE: 16-bit grayscale, least significant byte first
55  * @GST_VIDEO_FORMAT_v308: packed 4:4:4 YUV (Y-U-V ...)
56  * @GST_VIDEO_FORMAT_IYU2: packed 4:4:4 YUV (U-Y-V ...) (Since: 1.10)
57  * @GST_VIDEO_FORMAT_RGB16: rgb 5-6-5 bits per component
58  * @GST_VIDEO_FORMAT_BGR16: reverse rgb 5-6-5 bits per component
59  * @GST_VIDEO_FORMAT_RGB15: rgb 5-5-5 bits per component
60  * @GST_VIDEO_FORMAT_BGR15: reverse rgb 5-5-5 bits per component
61  * @GST_VIDEO_FORMAT_UYVP: packed 10-bit 4:2:2 YUV (U0-Y0-V0-Y1 U2-Y2-V2-Y3 U4 ...)
62  * @GST_VIDEO_FORMAT_A420: planar 4:4:2:0 AYUV
63  * @GST_VIDEO_FORMAT_RGB8P: 8-bit paletted RGB
64  * @GST_VIDEO_FORMAT_YUV9: planar 4:1:0 YUV
65  * @GST_VIDEO_FORMAT_YVU9: planar 4:1:0 YUV (like YUV9 but UV planes swapped)
66  * @GST_VIDEO_FORMAT_IYU1: packed 4:1:1 YUV (Cb-Y0-Y1-Cr-Y2-Y3 ...)
67  * @GST_VIDEO_FORMAT_ARGB64: rgb with alpha channel first, 16 bits per channel
68  * @GST_VIDEO_FORMAT_AYUV64: packed 4:4:4 YUV with alpha channel, 16 bits per channel (A0-Y0-U0-V0 ...)
69  * @GST_VIDEO_FORMAT_r210: packed 4:4:4 RGB, 10 bits per channel
70  * @GST_VIDEO_FORMAT_I420_10BE: planar 4:2:0 YUV, 10 bits per channel
71  * @GST_VIDEO_FORMAT_I420_10LE: planar 4:2:0 YUV, 10 bits per channel
72  * @GST_VIDEO_FORMAT_I422_10BE: planar 4:2:2 YUV, 10 bits per channel
73  * @GST_VIDEO_FORMAT_I422_10LE: planar 4:2:2 YUV, 10 bits per channel
74  * @GST_VIDEO_FORMAT_Y444_10BE: planar 4:4:4 YUV, 10 bits per channel (Since: 1.2)
75  * @GST_VIDEO_FORMAT_Y444_10LE: planar 4:4:4 YUV, 10 bits per channel (Since: 1.2)
76  * @GST_VIDEO_FORMAT_GBR: planar 4:4:4 RGB, 8 bits per channel (Since: 1.2)
77  * @GST_VIDEO_FORMAT_GBR_10BE: planar 4:4:4 RGB, 10 bits per channel (Since: 1.2)
78  * @GST_VIDEO_FORMAT_GBR_10LE: planar 4:4:4 RGB, 10 bits per channel (Since: 1.2)
79  * @GST_VIDEO_FORMAT_NV16: planar 4:2:2 YUV with interleaved UV plane (Since: 1.2)
80  * @GST_VIDEO_FORMAT_NV16_10LE32: 10-bit variant of @GST_VIDEO_FORMAT_NV16, packed into 32bit words (MSB 2 bits padding) (Since: 1.14)
81  * @GST_VIDEO_FORMAT_NV24: planar 4:4:4 YUV with interleaved UV plane (Since: 1.2)
82  * @GST_VIDEO_FORMAT_NV12_64Z32: NV12 with 64x32 tiling in zigzag pattern (Since: 1.4)
83  * @GST_VIDEO_FORMAT_A420_10BE: planar 4:4:2:0 YUV, 10 bits per channel (Since: 1.6)
84  * @GST_VIDEO_FORMAT_A420_10LE: planar 4:4:2:0 YUV, 10 bits per channel (Since: 1.6)
85  * @GST_VIDEO_FORMAT_A422_10BE: planar 4:4:2:2 YUV, 10 bits per channel (Since: 1.6)
86  * @GST_VIDEO_FORMAT_A422_10LE: planar 4:4:2:2 YUV, 10 bits per channel (Since: 1.6)
87  * @GST_VIDEO_FORMAT_A444_10BE: planar 4:4:4:4 YUV, 10 bits per channel (Since: 1.6)
88  * @GST_VIDEO_FORMAT_A444_10LE: planar 4:4:4:4 YUV, 10 bits per channel (Since: 1.6)
89  * @GST_VIDEO_FORMAT_NV61: planar 4:2:2 YUV with interleaved VU plane (Since: 1.6)
90  * @GST_VIDEO_FORMAT_P010_10BE: planar 4:2:0 YUV with interleaved UV plane, 10 bits per channel (Since: 1.10)
91  * @GST_VIDEO_FORMAT_P010_10LE: planar 4:2:0 YUV with interleaved UV plane, 10 bits per channel (Since: 1.10)
92  * @GST_VIDEO_FORMAT_GBRA: planar 4:4:4:4 ARGB, 8 bits per channel (Since: 1.12)
93  * @GST_VIDEO_FORMAT_GBRA_10BE: planar 4:4:4:4 ARGB, 10 bits per channel (Since: 1.12)
94  * @GST_VIDEO_FORMAT_GBRA_10LE: planar 4:4:4:4 ARGB, 10 bits per channel (Since: 1.12)
95  * @GST_VIDEO_FORMAT_GBR_12BE: planar 4:4:4 RGB, 12 bits per channel (Since: 1.12)
96  * @GST_VIDEO_FORMAT_GBR_12LE: planar 4:4:4 RGB, 12 bits per channel (Since: 1.12)
97  * @GST_VIDEO_FORMAT_GBRA_12BE: planar 4:4:4:4 ARGB, 12 bits per channel (Since: 1.12)
98  * @GST_VIDEO_FORMAT_GBRA_12LE: planar 4:4:4:4 ARGB, 12 bits per channel (Since: 1.12)
99  * @GST_VIDEO_FORMAT_I420_12BE: planar 4:2:0 YUV, 12 bits per channel (Since: 1.12)
100  * @GST_VIDEO_FORMAT_I420_12LE: planar 4:2:0 YUV, 12 bits per channel (Since: 1.12)
101  * @GST_VIDEO_FORMAT_I422_12BE: planar 4:2:2 YUV, 12 bits per channel (Since: 1.12)
102  * @GST_VIDEO_FORMAT_I422_12LE: planar 4:2:2 YUV, 12 bits per channel (Since: 1.12)
103  * @GST_VIDEO_FORMAT_Y444_12BE: planar 4:4:4 YUV, 12 bits per channel (Since: 1.12)
104  * @GST_VIDEO_FORMAT_Y444_12LE: planar 4:4:4 YUV, 12 bits per channel (Since: 1.12)
105  * @GST_VIDEO_FORMAT_NV12_10LE40: Fully packed variant of NV12_10LE32 (Since: 1.16)
106  * @GST_VIDEO_FORMAT_Y210: packed 4:2:2 YUV, 10 bits per channel (Since: 1.16)
107  * @GST_VIDEO_FORMAT_Y410: packed 4:4:4 YUV, 10 bits per channel(A-V-Y-U...) (Since: 1.16)
108  * @GST_VIDEO_FORMAT_VUYA: packed 4:4:4 YUV with alpha channel (V0-U0-Y0-A0...) (Since: 1.16)
109  * @GST_VIDEO_FORMAT_BGR10A2_LE: packed 4:4:4 RGB with alpha channel(B-G-R-A), 10 bits for R/G/B channel and MSB 2 bits for alpha channel (Since: 1.16)
110  * @GST_VIDEO_FORMAT_RGB10A2_LE: packed 4:4:4 RGB with alpha channel(R-G-B-A), 10 bits for R/G/B channel and MSB 2 bits for alpha channel (Since: 1.18)
111  * @GST_VIDEO_FORMAT_Y444_16BE: planar 4:4:4 YUV, 16 bits per channel (Since: 1.18)
112  * @GST_VIDEO_FORMAT_Y444_16LE: planar 4:4:4 YUV, 16 bits per channel (Since: 1.18)
113  * @GST_VIDEO_FORMAT_P016_BE: planar 4:2:0 YUV with interleaved UV plane, 16 bits per channel (Since: 1.18)
114  * @GST_VIDEO_FORMAT_P016_LE: planar 4:2:0 YUV with interleaved UV plane, 16 bits per channel (Since: 1.18)
115  * @GST_VIDEO_FORMAT_P012_BE: planar 4:2:0 YUV with interleaved UV plane, 12 bits per channel (Since: 1.18)
116  * @GST_VIDEO_FORMAT_P012_LE: planar 4:2:0 YUV with interleaved UV plane, 12 bits per channel (Since: 1.18)
117  * @GST_VIDEO_FORMAT_Y212_BE: packed 4:2:2 YUV, 12 bits per channel (Y-U-Y-V) (Since: 1.18)
118  * @GST_VIDEO_FORMAT_Y212_LE: packed 4:2:2 YUV, 12 bits per channel (Y-U-Y-V) (Since: 1.18)
119  * @GST_VIDEO_FORMAT_Y412_BE: packed 4:4:4:4 YUV, 12 bits per channel(U-Y-V-A...) (Since: 1.18)
120  * @GST_VIDEO_FORMAT_Y412_LE: packed 4:4:4:4 YUV, 12 bits per channel(U-Y-V-A...) (Since: 1.18)
121  * @GST_VIDEO_FORMAT_NV12_4L4: NV12 with 4x4 tiles in linear order (Since: 1.18)
122  * @GST_VIDEO_FORMAT_NV12_32L32: NV12 with 32x32 tiles in linear order (Since: 1.18)
123  * @GST_VIDEO_FORMAT_RGBP: planar 4:4:4 RGB, 8 bits per channel (Since: 1.20)
124  * @GST_VIDEO_FORMAT_BGRP: planar 4:4:4 RGB, 8 bits per channel (Since: 1.20)
125  * @GST_VIDEO_FORMAT_AV12: Planar 4:2:0 YUV with interleaved UV plane with alpha as 3rd plane (Since: 1.20)
126  */
127 
128  format_map["A420"] = AV_PIX_FMT_YUVA420P;
129  format_map["A420_10BE"] = AV_PIX_FMT_YUVA420P10BE;
130  format_map["A420_10LE"] = AV_PIX_FMT_YUVA420P10LE;
131  format_map["A422_10BE"] = AV_PIX_FMT_YUVA422P10BE;
132  format_map["A422_10LE"] = AV_PIX_FMT_YUVA422P10LE;
133  format_map["A444_10BE"] = AV_PIX_FMT_YUVA444P10BE;
134  format_map["A444_10LE"] = AV_PIX_FMT_YUVA444P10BE;
135  format_map["ABGR"] = AV_PIX_FMT_ABGR;
136  // format_map["ABGR64_BE"] = AV_PIX_FMT_ABGR64BE;
137  // format_map["ABGR64_LE"] = AV_PIX_FMT_ABGR64LE;
138  format_map["ARGB"] = AV_PIX_FMT_ARGB;
139  // format_map["ARGB64"] = AV_PIX_FMT_ARGB64;
140  // format_map["ARGB64_BE"] = AV_PIX_FMT_ARGB64BE;
141  // format_map["ARGB64_LE"] = AV_PIX_FMT_ARGB64LE;
142  // format_map["AV12"] = AV_PIX_FMT_AV12;
143  // format_map["AYUV"] = AV_PIX_FMT_AYUV;
144  format_map["AYUV64"] = AV_PIX_FMT_AYUV64;
145  // format_map["BGR"] = AV_PIX_FMT_BGR;
146  // format_map["BGR10A2_LE"] = AV_PIX_FMT_BGR10A2LE;
147  // format_map["BGR15"] = AV_PIX_FMT_BGR15;
148  // format_map["BGR16"] = AV_PIX_FMT_BGR16;
149  format_map["BGRA"] = AV_PIX_FMT_BGRA;
150  // format_map["BGRA64_BE"] = AV_PIX_FMT_BGRA64BE;
151  // format_map["BGRA64_LE"] = AV_PIX_FMT_BGRA64LE;
152  // format_map["BGRP"] = AV_PIX_FMT_BGRP;
153  format_map["BGRX"] = AV_PIX_FMT_BGR0;
154  // format_map["ENCODED"] = AV_PIX_FMT_ENCODED;
155  format_map["GBR"] = AV_PIX_FMT_GBRP;
156  format_map["GBRA"] = AV_PIX_FMT_GBRAP;
157  // format_map["GBRA_10BE"] = AV_PIX_FMT_GBRA_10BE;
158  // format_map["GBRA_10LE"] = AV_PIX_FMT_GBRA_10LE;
159  // format_map["GBRA_12BE"] = AV_PIX_FMT_GBRA_12BE;
160  // format_map["GBRA_12LE"] = AV_PIX_FMT_GBRA_12LE;
161  // format_map["GBR_10BE"] = AV_PIX_FMT_GBR_10BE;
162  // format_map["GBR_10LE"] = AV_PIX_FMT_GBR_10LE;
163  // format_map["GBR_12BE"] = AV_PIX_FMT_GBR_12BE;
164  // format_map["GBR_12LE"] = AV_PIX_FMT_GBR_12LE;
165  // format_map["GRAY10_LE32"] = AV_PIX_FMT_GRAY10_LE32;
166  format_map["GRAY16_BE"] = AV_PIX_FMT_GRAY16BE;
167  format_map["GRAY16_LE"] = AV_PIX_FMT_GRAY16LE;
168  format_map["GRAY8"] = AV_PIX_FMT_GRAY8;
169  format_map["I420"] = AV_PIX_FMT_YUV420P;
170  format_map["I420_10BE"] = AV_PIX_FMT_YUV420P10BE;
171  format_map["I420_10LE"] = AV_PIX_FMT_YUV420P10LE;
172  format_map["I420_12BE"] = AV_PIX_FMT_YUV420P12BE;
173  format_map["I420_12LE"] = AV_PIX_FMT_YUV420P12LE;
174  format_map["I422_10BE"] = AV_PIX_FMT_YUV422P10BE;
175  format_map["I422_10LE"] = AV_PIX_FMT_YUV422P10LE;
176  format_map["I422_12BE"] = AV_PIX_FMT_YUV422P12BE;
177  format_map["I422_12LE"] = AV_PIX_FMT_YUV422P12LE;
178  // format_map["IYU1"] = AV_PIX_FMT_IYU1;
179  // format_map["IYU2"] = AV_PIX_FMT_IYU2;
180  format_map["NV12"] = AV_PIX_FMT_NV12;
181  // format_map["NV12_10LE32"] = AV_PIX_FMT_NV12_10LE32;
182  // format_map["NV12_10LE40"] = AV_PIX_FMT_NV12_10LE40;
183  // format_map["NV12_32L32"] = AV_PIX_FMT_NV12_32L32;
184  // format_map["NV12_4L4"] = AV_PIX_FMT_NV12_4L4;
185  // format_map["NV12_64Z32"] = AV_PIX_FMT_NV12_64Z32;
186  format_map["NV16"] = AV_PIX_FMT_NV16;
187  // format_map["NV16_10LE32"] = AV_PIX_FMT_NV16_10LE32;
188  format_map["NV21"] = AV_PIX_FMT_NV21;
189  format_map["NV24"] = AV_PIX_FMT_NV24;
190  // format_map["NV61"] = AV_PIX_FMT_NV61;
191  format_map["P010_10BE"] = AV_PIX_FMT_P010BE;
192  format_map["P010_10LE"] = AV_PIX_FMT_P010LE;
193  // format_map["P012_BE"] = AV_PIX_FMT_P012BE;
194  // format_map["P012_LE"] = AV_PIX_FMT_P012LE;
195  format_map["P016_BE"] = AV_PIX_FMT_P016BE;
196  format_map["P016_LE"] = AV_PIX_FMT_P016LE;
197  // format_map["R210"] = AV_PIX_FMT_R210;
198  format_map["RGB"] = AV_PIX_FMT_RGB24;
199  // format_map["RGB10A2_LE"] = AV_PIX_FMT_RGB10A2LE;
200  // format_map["RGB15"] = AV_PIX_FMT_RGB15;
201  // format_map["RGB16"] = AV_PIX_FMT_RGB16;
202  // format_map["RGB8P"] = AV_PIX_FMT_RGB8P;
203  format_map["RGBA"] = AV_PIX_FMT_RGBA;
204  format_map["RGBA64_BE"] = AV_PIX_FMT_RGBA64BE;
205  format_map["RGBA64_LE"] = AV_PIX_FMT_RGBA64LE;
206  format_map["RGBP"] = AV_PIX_FMT_RGB24;
207  format_map["RGBX"] = AV_PIX_FMT_RGB0;
208  // format_map["UNKNOWN"] = AV_PIX_FMT_UNKNOWN;
209  // format_map["UYVP"] = AV_PIX_FMT_UYVP;
210  format_map["UYVY"] = AV_PIX_FMT_UYVY422;
211  // format_map["V210"] = AV_PIX_FMT_V210;
212  // format_map["V216"] = AV_PIX_FMT_V216;
213  // format_map["V308"] = AV_PIX_FMT_V308;
214  // format_map["VUYA"] = AV_PIX_FMT_VUYA;
215  // format_map["VYUY"] = AV_PIX_FMT_VYUY;
216  format_map["XBGR"] = AV_PIX_FMT_0BGR;
217  format_map["XRGB"] = AV_PIX_FMT_0RGB;
218  format_map["Y210"] = AV_PIX_FMT_Y210;
219  // format_map["Y212_BE"] = AV_PIX_FMT_Y212BE;
220  // format_map["Y212_LE"] = AV_PIX_FMT_Y212LE;
221  format_map["Y410"] = AV_PIX_FMT_YUV410P;
222  // format_map["Y412_BE"] = AV_PIX_FMT_Y412BE;
223  // format_map["Y412_LE"] = AV_PIX_FMT_Y412LE;
224  // format_map["Y41B"] = AV_PIX_FMT_Y41B;
225  // format_map["Y42B"] = AV_PIX_FMT_Y42B;
226  format_map["Y444"] = AV_PIX_FMT_YUV444P;
227  format_map["Y444_10BE"] = AV_PIX_FMT_YUV444P10BE;
228  format_map["Y444_10LE"] = AV_PIX_FMT_YUV444P10LE;
229  format_map["Y444_12BE"] = AV_PIX_FMT_YUV444P12BE;
230  format_map["Y444_12LE"] = AV_PIX_FMT_YUV444P12LE;
231  format_map["Y444_16BE"] = AV_PIX_FMT_YUV444P16BE;
232  format_map["Y444_16LE"] = AV_PIX_FMT_YUV444P16LE;
233  // format_map["YUV9"] = AV_PIX_FMT_YUV9;
234  // format_map["YUY2"] = AV_PIX_FMT_YUY2;
235  // format_map["YV12"] = AV_PIX_FMT_YV12;
236  // format_map["YVU9"] = AV_PIX_FMT_YVU9;
237  format_map["YVYU"] = AV_PIX_FMT_YVYU422;
238 
239  return format_map;
240  }();
241 
242  return map;
243 }
244 
245 // This is used when we do not have stride info, we make a best guess...
246 inline void initFrameFromRawData(AVFrame* frame, uint8_t* p, std::size_t sz)
247 {
248  switch(frame->format)
249  {
250  case AV_PIX_FMT_YUV420P:
251  case AV_PIX_FMT_YUVJ420P:
252  {
253  // assuming 640x480:
254  // first plane is 640x480 Y
255  frame->data[0] = p;
256  frame->linesize[0] = frame->width;
257 
258  // second plane is 320x240 U
259  frame->data[1] = frame->data[0] + frame->linesize[0] * frame->height;
260  frame->linesize[1] = frame->width / 2;
261 
262  // third plane is 320x240 U
263  frame->data[2] = frame->data[1] + frame->linesize[1] * frame->height / 2;
264  frame->linesize[2] = frame->width / 2;
265  break;
266  }
267 
268  case AV_PIX_FMT_NV12:
269  case AV_PIX_FMT_NV21:
270  {
271  // assuming 640x480:
272  // first plane is 640x480 Y
273  frame->data[0] = p;
274  frame->linesize[0] = frame->width;
275 
276  // second plane is 640x480 UV
277  frame->data[1] = frame->data[0] + frame->linesize[0] * frame->height;
278  frame->linesize[1] = frame->width;
279 
280  break;
281  }
282 
283  case AV_PIX_FMT_P016LE:
284  case AV_PIX_FMT_P016BE:
285  {
286  constexpr int byte_per_component = 2;
287  // assuming 640x480:
288  // first plane is 640x480 Y
289  frame->data[0] = p;
290  frame->linesize[0] = byte_per_component * frame->width;
291 
292  // second plane is 640x480 UV
293  frame->data[1] = frame->data[0] + frame->linesize[0] * frame->height;
294  frame->linesize[1] = byte_per_component * frame->width;
295 
296  break;
297  }
298 
299  case AV_PIX_FMT_YUV420P16LE:
300  case AV_PIX_FMT_YUV420P16BE:
301  {
302  constexpr int byte_per_component = 2;
303  // assuming 640x480:
304  // first plane is 640x480 Y
305  frame->data[0] = p;
306  frame->linesize[0] = byte_per_component * frame->width;
307 
308  // second plane is 320x240 U
309  frame->data[1] = frame->data[0] + frame->linesize[0] * frame->height;
310  frame->linesize[1] = byte_per_component * frame->width / 2;
311 
312  // third plane is 320x240 U
313  frame->data[2] = frame->data[1] + frame->linesize[1] * frame->height / 2;
314  frame->linesize[2] = byte_per_component * frame->width / 2;
315  break;
316  }
317 
318  case AV_PIX_FMT_YUV444P:
319  case AV_PIX_FMT_YUVJ444P:
320  case AV_PIX_FMT_GBRP:
321  {
322  // assuming 640x480:
323  // first plane is 640x480 Y
324  frame->data[0] = p;
325  frame->linesize[0] = frame->width;
326 
327  // second plane is 640x480 U
328  frame->data[1] = frame->data[0] + frame->linesize[0] * frame->height;
329  frame->linesize[1] = frame->width;
330 
331  // third plane is 640x480 V
332  frame->data[2] = frame->data[1] + frame->linesize[1] * frame->height;
333  frame->linesize[2] = frame->width;
334  break;
335  }
336 
337  case AV_PIX_FMT_YUV444P16LE:
338  case AV_PIX_FMT_YUV444P16BE:
339  {
340  constexpr int byte_per_component = 2;
341  // assuming 640x480:
342  // first plane is 640x480 Y
343  frame->data[0] = p;
344  frame->linesize[0] = byte_per_component * frame->width;
345 
346  // second plane is 640x480 U
347  frame->data[1] = frame->data[0] + frame->linesize[0] * frame->height;
348  frame->linesize[1] = byte_per_component * frame->width;
349 
350  // third plane is 640x480 U
351  frame->data[2] = frame->data[1] + frame->linesize[1] * frame->height;
352  frame->linesize[2] = byte_per_component * frame->width;
353  break;
354  }
355 
356  case AV_PIX_FMT_YUVA444P:
357  case AV_PIX_FMT_GBRAP:
358  case AV_PIX_FMT_GBRAP16BE:
359  case AV_PIX_FMT_GBRAP16LE:
360  {
361  frame->data[0] = p;
362  frame->linesize[0] = frame->width;
363 
364  frame->data[1] = frame->data[0] + frame->linesize[0] * frame->height;
365  frame->linesize[1] = frame->width;
366 
367  frame->data[2] = frame->data[1] + frame->linesize[1] * frame->height;
368  frame->linesize[2] = frame->width;
369 
370  frame->data[3] = frame->data[2] + frame->linesize[2] * frame->height;
371  frame->linesize[3] = frame->width;
372  break;
373  }
374 
375  case AV_PIX_FMT_YUV422P:
376  case AV_PIX_FMT_YUVJ422P:
377  {
378  // assuming 640x480:
379  // first plane is 640x480 Y
380  frame->data[0] = p;
381  frame->linesize[0] = frame->width;
382 
383  // second plane is 640x480 / 2 U
384  frame->data[1] = frame->data[0] + frame->linesize[0] * frame->height;
385  frame->linesize[1] = frame->width / 2;
386 
387  // third plane is 640x480 / 2 V
388  frame->data[2] = frame->data[1] + frame->linesize[1] * frame->height;
389  frame->linesize[2] = frame->width / 2;
390  break;
391  }
392 
393  case AV_PIX_FMT_YUV422P16LE:
394  case AV_PIX_FMT_YUV422P16BE:
395  {
396  constexpr int byte_per_component = 2;
397  // assuming 640x480:
398  // first plane is 640x480 Y
399  frame->data[0] = p;
400  frame->linesize[0] = byte_per_component * frame->width;
401 
402  // second plane is 640x480 / 2 U
403  frame->data[1] = frame->data[0] + frame->linesize[0] * frame->height;
404  frame->linesize[1] = byte_per_component * frame->width / 2;
405 
406  // third plane is 3640x480 / 2 V
407  frame->data[2] = frame->data[1] + frame->linesize[1] * frame->height;
408  frame->linesize[2] = byte_per_component * frame->width / 2;
409  break;
410  }
411 
412  case AV_PIX_FMT_NV24:
413  case AV_PIX_FMT_NV42:
414  {
415  // assuming 640x480:
416  // first plane is 640x480 Y
417  frame->data[0] = p;
418  frame->linesize[0] = frame->width;
419 
420  // second plane is 640x480
421  frame->data[1] = frame->data[0] + frame->linesize[0] * frame->height;
422  frame->linesize[1] = frame->width * 2;
423  break;
424  }
425 
426  case AV_PIX_FMT_YUV410P:
427  {
428  // assuming 640x480:
429  // first plane is 640x480 Y
430  frame->data[0] = p;
431  frame->linesize[0] = frame->width;
432 
433  // second plane is 160x120 U
434  frame->data[1] = frame->data[0] + frame->linesize[0] * frame->height;
435  frame->linesize[1] = frame->width / 4;
436 
437  // third plane is 160x120 V
438  frame->data[2] = frame->data[1] + frame->linesize[1] * frame->height / 4;
439  frame->linesize[2] = frame->width / 4;
440  break;
441  }
442 
443  case AV_PIX_FMT_YUV411P:
444  {
445  // assuming 640x480:
446  // first plane is 640x480 Y
447  frame->data[0] = p;
448  frame->linesize[0] = frame->width;
449 
450  // second plane is 640x480 / 2 U
451  frame->data[1] = frame->data[0] + frame->linesize[0] * frame->height;
452  frame->linesize[1] = frame->width / 4;
453 
454  // third plane is 640x480 / 2 V
455  frame->data[2] = frame->data[1] + frame->linesize[1] * frame->height;
456  frame->linesize[2] = frame->width / 4;
457  break;
458  }
459 
460  case AV_PIX_FMT_YUV440P:
461  case AV_PIX_FMT_YUVJ440P:
462  case AV_PIX_FMT_YUVA420P:
463 
464  case AV_PIX_FMT_YUV420P9BE:
465  case AV_PIX_FMT_YUV420P9LE:
466  case AV_PIX_FMT_YUV420P10BE:
467  case AV_PIX_FMT_YUV420P10LE:
468  case AV_PIX_FMT_YUV422P10BE:
469  case AV_PIX_FMT_YUV422P10LE:
470  case AV_PIX_FMT_YUV444P9BE:
471  case AV_PIX_FMT_YUV444P9LE:
472  case AV_PIX_FMT_YUV444P10BE:
473  case AV_PIX_FMT_YUV444P10LE:
474  case AV_PIX_FMT_YUV422P9BE:
475  case AV_PIX_FMT_YUV422P9LE:
476 
477  case AV_PIX_FMT_GBRP9BE:
478  case AV_PIX_FMT_GBRP9LE:
479  case AV_PIX_FMT_GBRP10BE:
480  case AV_PIX_FMT_GBRP10LE:
481  case AV_PIX_FMT_GBRP16BE:
482  case AV_PIX_FMT_GBRP16LE:
483  case AV_PIX_FMT_YUVA422P:
484  case AV_PIX_FMT_YUVA420P9BE:
485  case AV_PIX_FMT_YUVA420P9LE:
486  case AV_PIX_FMT_YUVA422P9BE:
487  case AV_PIX_FMT_YUVA422P9LE:
488  case AV_PIX_FMT_YUVA444P9BE:
489  case AV_PIX_FMT_YUVA444P9LE:
490  case AV_PIX_FMT_YUVA420P10BE:
491  case AV_PIX_FMT_YUVA420P10LE:
492  case AV_PIX_FMT_YUVA422P10BE:
493  case AV_PIX_FMT_YUVA422P10LE:
494  case AV_PIX_FMT_YUVA444P10BE:
495  case AV_PIX_FMT_YUVA444P10LE:
496  case AV_PIX_FMT_YUVA420P16BE:
497  case AV_PIX_FMT_YUVA420P16LE:
498  case AV_PIX_FMT_YUVA422P16BE:
499  case AV_PIX_FMT_YUVA422P16LE:
500  case AV_PIX_FMT_YUVA444P16BE:
501  case AV_PIX_FMT_YUVA444P16LE:
502 
503  case AV_PIX_FMT_YUV420P12BE:
504  case AV_PIX_FMT_YUV420P12LE:
505  case AV_PIX_FMT_YUV420P14BE:
506  case AV_PIX_FMT_YUV420P14LE:
507  case AV_PIX_FMT_YUV422P12BE:
508  case AV_PIX_FMT_YUV422P12LE:
509  case AV_PIX_FMT_YUV422P14BE:
510  case AV_PIX_FMT_YUV422P14LE:
511  case AV_PIX_FMT_YUV444P12BE:
512  case AV_PIX_FMT_YUV444P12LE:
513  case AV_PIX_FMT_YUV444P14BE:
514  case AV_PIX_FMT_YUV444P14LE:
515  case AV_PIX_FMT_GBRP12BE:
516  case AV_PIX_FMT_GBRP12LE:
517  case AV_PIX_FMT_GBRP14BE:
518  case AV_PIX_FMT_GBRP14LE:
519  case AV_PIX_FMT_YUVJ411P:
520 
521  case AV_PIX_FMT_YUV440P10LE:
522  case AV_PIX_FMT_YUV440P10BE:
523  case AV_PIX_FMT_YUV440P12LE:
524  case AV_PIX_FMT_YUV440P12BE:
525 
526  case AV_PIX_FMT_GBRAP12BE:
527  case AV_PIX_FMT_GBRAP12LE:
528 
529  case AV_PIX_FMT_GBRAP10BE:
530  case AV_PIX_FMT_GBRAP10LE:
531 
532  case AV_PIX_FMT_GBRPF32BE:
533  case AV_PIX_FMT_GBRPF32LE:
534  case AV_PIX_FMT_GBRAPF32BE:
535  case AV_PIX_FMT_GBRAPF32LE:
536 
537  case AV_PIX_FMT_YUVA422P12BE:
538  case AV_PIX_FMT_YUVA422P12LE:
539  case AV_PIX_FMT_YUVA444P12BE:
540  case AV_PIX_FMT_YUVA444P12LE:
541  {
542  qDebug() << "TODO unhandled video format";
543  free(p);
544  break;
545  }
546 
547  case AV_PIX_FMT_MONOWHITE:
548  case AV_PIX_FMT_MONOBLACK:
549  frame->data[0] = p;
550  frame->linesize[0] = frame->width / CHAR_BIT;
551  break;
552 
553  case AV_PIX_FMT_BGR4:
554  case AV_PIX_FMT_RGB4:
555  frame->data[0] = p;
556  frame->linesize[0] = frame->width * 4. / 8.;
557  break;
558 
559  case AV_PIX_FMT_GRAY8:
560  case AV_PIX_FMT_BGR8:
561  case AV_PIX_FMT_RGB8:
562  case AV_PIX_FMT_BGR4_BYTE:
563  case AV_PIX_FMT_RGB4_BYTE:
564  frame->data[0] = p;
565  frame->linesize[0] = frame->width * 8. / 8.;
566  break;
567 
568  case AV_PIX_FMT_GRAY9BE:
569  case AV_PIX_FMT_GRAY9LE:
570  frame->data[0] = p;
571  frame->linesize[0] = frame->width * 9. / 8.; // ?? wtf
572  break;
573 
574  case AV_PIX_FMT_P010LE:
575  case AV_PIX_FMT_P010BE:
576  case AV_PIX_FMT_GRAY10BE:
577  case AV_PIX_FMT_GRAY10LE:
578  frame->data[0] = p;
579  frame->linesize[0] = frame->width * 10. / 8.; // ??
580  break;
581 
582  case AV_PIX_FMT_GRAY12BE:
583  case AV_PIX_FMT_GRAY12LE:
584  case AV_PIX_FMT_UYYVYY411:
585  frame->data[0] = p;
586  frame->linesize[0] = frame->width * 12. / 8.; // ??
587  break;
588 
589  case AV_PIX_FMT_GRAY14BE:
590  case AV_PIX_FMT_GRAY14LE:
591  frame->data[0] = p;
592  frame->linesize[0] = frame->width * 14. / 8.; // ??
593  break;
594 
595  case AV_PIX_FMT_NV16:
596  case AV_PIX_FMT_YUYV422:
597  case AV_PIX_FMT_UYVY422:
598  case AV_PIX_FMT_YVYU422:
599  case AV_PIX_FMT_GRAY16BE:
600  case AV_PIX_FMT_GRAY16LE:
601  case AV_PIX_FMT_YA8:
602 
603  case AV_PIX_FMT_RGB565BE:
604  case AV_PIX_FMT_RGB565LE:
605  case AV_PIX_FMT_RGB555BE:
606  case AV_PIX_FMT_RGB555LE:
607 
608  case AV_PIX_FMT_BGR565BE:
609  case AV_PIX_FMT_BGR565LE:
610  case AV_PIX_FMT_BGR555BE:
611  case AV_PIX_FMT_BGR555LE:
612 
613  case AV_PIX_FMT_RGB444LE:
614  case AV_PIX_FMT_RGB444BE:
615  case AV_PIX_FMT_BGR444LE:
616  case AV_PIX_FMT_BGR444BE:
617  frame->data[0] = p;
618  frame->linesize[0] = frame->width * 16. / 8.;
619  break;
620 
621  case AV_PIX_FMT_NV20LE:
622  case AV_PIX_FMT_NV20BE:
623  case AV_PIX_FMT_Y210BE:
624  case AV_PIX_FMT_Y210LE:
625  frame->data[0] = p;
626  frame->linesize[0] = frame->width * 20. / 8.;
627  break;
628 
629  case AV_PIX_FMT_RGB24:
630  case AV_PIX_FMT_BGR24:
631  frame->data[0] = p;
632  frame->linesize[0] = frame->width * 24. / 8.;
633  break;
634 
635  // needs a too recent ffmpeg?
636  // case AV_PIX_FMT_X2RGB10LE: ///< packed RGB 10:10:10: 30bpp: (msb)2X 10R 10G 10B(lsb): little-endian: X=unused/undefined
637  // case AV_PIX_FMT_X2RGB10BE: ///< packed RGB 10:10:10: 30bpp: (msb)2X 10R 10G 10B(lsb): big-endian: X=unused/undefined
638  // frame->data[0] = p;
639  // frame->linesize[0] = frame->width * 30. / 8.;
640  // break;
641 
642  case AV_PIX_FMT_ARGB:
643  case AV_PIX_FMT_RGBA:
644  case AV_PIX_FMT_ABGR:
645  case AV_PIX_FMT_BGRA:
646  case AV_PIX_FMT_0RGB:
647  case AV_PIX_FMT_RGB0:
648  case AV_PIX_FMT_0BGR:
649  case AV_PIX_FMT_BGR0:
650  case AV_PIX_FMT_GRAYF32BE:
651  case AV_PIX_FMT_GRAYF32LE:
652  case AV_PIX_FMT_YA16BE:
653  case AV_PIX_FMT_YA16LE:
654  frame->data[0] = p;
655  frame->linesize[0] = frame->width * 32. / 8.;
656  break;
657 
658  case AV_PIX_FMT_XYZ12LE:
659  case AV_PIX_FMT_XYZ12BE:
660  frame->data[0] = p;
661  frame->linesize[0] = frame->width * 36. / 8.;
662  break;
663 
664  case AV_PIX_FMT_RGB48BE:
665  case AV_PIX_FMT_RGB48LE:
666 
667  case AV_PIX_FMT_BGR48BE:
668  case AV_PIX_FMT_BGR48LE:
669  frame->data[0] = p;
670  frame->linesize[0] = frame->width * 48. / 8.;
671  break;
672 
673  case AV_PIX_FMT_RGBA64BE:
674  case AV_PIX_FMT_RGBA64LE:
675  case AV_PIX_FMT_BGRA64BE:
676  case AV_PIX_FMT_BGRA64LE:
677  case AV_PIX_FMT_AYUV64LE:
678  case AV_PIX_FMT_AYUV64BE:
679  frame->data[0] = p;
680  frame->linesize[0] = frame->width * 64. / 8.;
681  break;
682 
683  case AV_PIX_FMT_BAYER_BGGR8:
684  case AV_PIX_FMT_BAYER_RGGB8:
685  case AV_PIX_FMT_BAYER_GBRG8:
686  case AV_PIX_FMT_BAYER_GRBG8:
687  case AV_PIX_FMT_BAYER_BGGR16LE:
688  case AV_PIX_FMT_BAYER_BGGR16BE:
689  case AV_PIX_FMT_BAYER_RGGB16LE:
690  case AV_PIX_FMT_BAYER_RGGB16BE:
691  case AV_PIX_FMT_BAYER_GBRG16LE:
692  case AV_PIX_FMT_BAYER_GBRG16BE:
693  case AV_PIX_FMT_BAYER_GRBG16LE:
694  case AV_PIX_FMT_BAYER_GRBG16BE:
695  case AV_PIX_FMT_PAL8:
696  default: {
697  qDebug() << "TODO unhandled video format";
698  free(p);
699  break;
700  }
701  }
702 }
703 
704 }
705 #endif