FFmpeg  4.4.4
cfhd.c
Go to the documentation of this file.
1 /*
2  * Copyright (c) 2015-2016 Kieran Kunhya <kieran@kunhya.com>
3  *
4  * This file is part of FFmpeg.
5  *
6  * FFmpeg is free software; you can redistribute it and/or
7  * modify it under the terms of the GNU Lesser General Public
8  * License as published by the Free Software Foundation; either
9  * version 2.1 of the License, or (at your option) any later version.
10  *
11  * FFmpeg is distributed in the hope that it will be useful,
12  * but WITHOUT ANY WARRANTY; without even the implied warranty of
13  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14  * Lesser General Public License for more details.
15  *
16  * You should have received a copy of the GNU Lesser General Public
17  * License along with FFmpeg; if not, write to the Free Software
18  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19  */
20 
21 /**
22  * @file
23  * Cineform HD video decoder
24  */
25 
26 #include "libavutil/attributes.h"
27 #include "libavutil/buffer.h"
28 #include "libavutil/common.h"
29 #include "libavutil/imgutils.h"
30 #include "libavutil/intreadwrite.h"
31 #include "libavutil/opt.h"
32 
33 #include "avcodec.h"
34 #include "bytestream.h"
35 #include "get_bits.h"
36 #include "internal.h"
37 #include "thread.h"
38 #include "cfhd.h"
39 
40 #define ALPHA_COMPAND_DC_OFFSET 256
41 #define ALPHA_COMPAND_GAIN 9400
42 
43 static av_cold int cfhd_init(AVCodecContext *avctx)
44 {
45  CFHDContext *s = avctx->priv_data;
46 
47  s->avctx = avctx;
48 
49  for (int i = 0; i < 64; i++) {
50  int val = i;
51 
52  if (val >= 40) {
53  if (val >= 54) {
54  val -= 54;
55  val <<= 2;
56  val += 54;
57  }
58 
59  val -= 40;
60  val <<= 2;
61  val += 40;
62  }
63 
64  s->lut[0][i] = val;
65  }
66 
67  for (int i = 0; i < 256; i++)
68  s->lut[1][i] = i + ((768LL * i * i * i) / (256 * 256 * 256));
69 
70  return ff_cfhd_init_vlcs(s);
71 }
72 
74 {
75  s->subband_num = 0;
76  s->level = 0;
77  s->subband_num_actual = 0;
78 }
79 
81 {
82  s->peak.level = 0;
83  s->peak.offset = 0;
84  memset(&s->peak.base, 0, sizeof(s->peak.base));
85 }
86 
88 {
89  s->coded_width = 0;
90  s->coded_height = 0;
91  s->coded_format = AV_PIX_FMT_YUV422P10;
92  s->cropped_height = 0;
93  s->bpc = 10;
94  s->channel_cnt = 3;
95  s->subband_cnt = SUBBAND_COUNT;
96  s->channel_num = 0;
97  s->lowpass_precision = 16;
98  s->quantisation = 1;
99  s->codebook = 0;
100  s->difference_coding = 0;
101  s->frame_type = 0;
102  s->sample_type = 0;
103  if (s->transform_type != 2)
104  s->transform_type = -1;
107 }
108 
109 static inline int dequant_and_decompand(CFHDContext *s, int level, int quantisation, int codebook)
110 {
111  if (codebook == 0 || codebook == 1) {
112  return s->lut[codebook][abs(level)] * FFSIGN(level) * quantisation;
113  } else
114  return level * quantisation;
115 }
116 
117 static inline void difference_coding(int16_t *band, int width, int height)
118 {
119 
120  int i,j;
121  for (i = 0; i < height; i++) {
122  for (j = 1; j < width; j++) {
123  band[j] += band[j-1];
124  }
125  band += width;
126  }
127 }
128 
129 static inline void peak_table(int16_t *band, Peak *peak, int length)
130 {
131  int i;
132  for (i = 0; i < length; i++)
133  if (abs(band[i]) > peak->level)
134  band[i] = bytestream2_get_le16(&peak->base);
135 }
136 
137 static inline void process_alpha(int16_t *alpha, int width)
138 {
139  int i, channel;
140  for (i = 0; i < width; i++) {
141  channel = alpha[i];
143  channel <<= 3;
145  channel >>= 16;
147  alpha[i] = channel;
148  }
149 }
150 
151 static inline void process_bayer(AVFrame *frame, int bpc)
152 {
153  const int linesize = frame->linesize[0];
154  uint16_t *r = (uint16_t *)frame->data[0];
155  uint16_t *g1 = (uint16_t *)(frame->data[0] + 2);
156  uint16_t *g2 = (uint16_t *)(frame->data[0] + frame->linesize[0]);
157  uint16_t *b = (uint16_t *)(frame->data[0] + frame->linesize[0] + 2);
158  const int mid = 1 << (bpc - 1);
159  const int factor = 1 << (16 - bpc);
160 
161  for (int y = 0; y < frame->height >> 1; y++) {
162  for (int x = 0; x < frame->width; x += 2) {
163  int R, G1, G2, B;
164  int g, rg, bg, gd;
165 
166  g = r[x];
167  rg = g1[x];
168  bg = g2[x];
169  gd = b[x];
170  gd -= mid;
171 
172  R = (rg - mid) * 2 + g;
173  G1 = g + gd;
174  G2 = g - gd;
175  B = (bg - mid) * 2 + g;
176 
177  R = av_clip_uintp2(R * factor, 16);
178  G1 = av_clip_uintp2(G1 * factor, 16);
179  G2 = av_clip_uintp2(G2 * factor, 16);
180  B = av_clip_uintp2(B * factor, 16);
181 
182  r[x] = R;
183  g1[x] = G1;
184  g2[x] = G2;
185  b[x] = B;
186  }
187 
188  r += linesize;
189  g1 += linesize;
190  g2 += linesize;
191  b += linesize;
192  }
193 }
194 
195 static inline void interlaced_vertical_filter(int16_t *output, int16_t *low, int16_t *high,
196  int width, int linesize, int plane)
197 {
198  int i;
199  int16_t even, odd;
200  for (i = 0; i < width; i++) {
201  even = (low[i] - high[i])/2;
202  odd = (low[i] + high[i])/2;
203  output[i] = av_clip_uintp2(even, 10);
204  output[i + linesize] = av_clip_uintp2(odd, 10);
205  }
206 }
207 
208 static inline void inverse_temporal_filter(int16_t *low, int16_t *high, int width)
209 {
210  for (int i = 0; i < width; i++) {
211  int even = (low[i] - high[i]) / 2;
212  int odd = (low[i] + high[i]) / 2;
213 
214  low[i] = even;
215  high[i] = odd;
216  }
217 }
218 
220 {
221  int i, j;
222 
223  for (i = 0; i < FF_ARRAY_ELEMS(s->plane); i++) {
224  Plane *p = &s->plane[i];
225  av_freep(&s->plane[i].idwt_buf);
226  av_freep(&s->plane[i].idwt_tmp);
227  s->plane[i].idwt_size = 0;
228 
229  for (j = 0; j < SUBBAND_COUNT_3D; j++)
230  s->plane[i].subband[j] = NULL;
231 
232  for (j = 0; j < 10; j++)
233  s->plane[i].l_h[j] = NULL;
234 
235  for (j = 0; j < DWT_LEVELS_3D; j++)
236  p->band[j][0].read_ok =
237  p->band[j][1].read_ok =
238  p->band[j][2].read_ok =
239  p->band[j][3].read_ok = 0;
240  }
241  s->a_height = 0;
242  s->a_width = 0;
243  s->a_transform_type = INT_MIN;
244 }
245 
246 static int alloc_buffers(AVCodecContext *avctx)
247 {
248  CFHDContext *s = avctx->priv_data;
249  int i, j, ret, planes, bayer = 0;
250  int chroma_x_shift, chroma_y_shift;
251  unsigned k;
252 
253  if ((ret = ff_set_dimensions(avctx, s->coded_width, s->coded_height)) < 0)
254  return ret;
255  avctx->pix_fmt = s->coded_format;
256 
257  ff_cfhddsp_init(&s->dsp, s->bpc, avctx->pix_fmt == AV_PIX_FMT_BAYER_RGGB16);
258 
259  if ((ret = av_pix_fmt_get_chroma_sub_sample(s->coded_format,
260  &chroma_x_shift,
261  &chroma_y_shift)) < 0)
262  return ret;
263  planes = av_pix_fmt_count_planes(s->coded_format);
264  if (s->coded_format == AV_PIX_FMT_BAYER_RGGB16) {
265  planes = 4;
266  chroma_x_shift = 1;
267  chroma_y_shift = 1;
268  bayer = 1;
269  }
270 
271  for (i = 0; i < planes; i++) {
272  int w8, h8, w4, h4, w2, h2;
273  int width = (i || bayer) ? s->coded_width >> chroma_x_shift : s->coded_width;
274  int height = (i || bayer) ? s->coded_height >> chroma_y_shift : s->coded_height;
275  ptrdiff_t stride = (FFALIGN(width / 8, 8) + 64) * 8;
276 
277  if (chroma_y_shift && !bayer)
278  height = FFALIGN(height / 8, 2) * 8;
279  s->plane[i].width = width;
280  s->plane[i].height = height;
281  s->plane[i].stride = stride;
282 
283  w8 = FFALIGN(s->plane[i].width / 8, 8) + 64;
284  h8 = FFALIGN(height, 8) / 8;
285  w4 = w8 * 2;
286  h4 = h8 * 2;
287  w2 = w4 * 2;
288  h2 = h4 * 2;
289 
290  if (s->transform_type == 0) {
291  s->plane[i].idwt_size = FFALIGN(height, 8) * stride;
292  s->plane[i].idwt_buf =
293  av_mallocz_array(s->plane[i].idwt_size, sizeof(*s->plane[i].idwt_buf));
294  s->plane[i].idwt_tmp =
295  av_malloc_array(s->plane[i].idwt_size, sizeof(*s->plane[i].idwt_tmp));
296  } else {
297  s->plane[i].idwt_size = FFALIGN(height, 8) * stride * 2;
298  s->plane[i].idwt_buf =
299  av_mallocz_array(s->plane[i].idwt_size, sizeof(*s->plane[i].idwt_buf));
300  s->plane[i].idwt_tmp =
301  av_malloc_array(s->plane[i].idwt_size, sizeof(*s->plane[i].idwt_tmp));
302  }
303 
304  if (!s->plane[i].idwt_buf || !s->plane[i].idwt_tmp)
305  return AVERROR(ENOMEM);
306 
307  s->plane[i].subband[0] = s->plane[i].idwt_buf;
308  s->plane[i].subband[1] = s->plane[i].idwt_buf + 2 * w8 * h8;
309  s->plane[i].subband[2] = s->plane[i].idwt_buf + 1 * w8 * h8;
310  s->plane[i].subband[3] = s->plane[i].idwt_buf + 3 * w8 * h8;
311  s->plane[i].subband[4] = s->plane[i].idwt_buf + 2 * w4 * h4;
312  s->plane[i].subband[5] = s->plane[i].idwt_buf + 1 * w4 * h4;
313  s->plane[i].subband[6] = s->plane[i].idwt_buf + 3 * w4 * h4;
314  if (s->transform_type == 0) {
315  s->plane[i].subband[7] = s->plane[i].idwt_buf + 2 * w2 * h2;
316  s->plane[i].subband[8] = s->plane[i].idwt_buf + 1 * w2 * h2;
317  s->plane[i].subband[9] = s->plane[i].idwt_buf + 3 * w2 * h2;
318  } else {
319  int16_t *frame2 =
320  s->plane[i].subband[7] = s->plane[i].idwt_buf + 4 * w2 * h2;
321  s->plane[i].subband[8] = frame2 + 2 * w4 * h4;
322  s->plane[i].subband[9] = frame2 + 1 * w4 * h4;
323  s->plane[i].subband[10] = frame2 + 3 * w4 * h4;
324  s->plane[i].subband[11] = frame2 + 2 * w2 * h2;
325  s->plane[i].subband[12] = frame2 + 1 * w2 * h2;
326  s->plane[i].subband[13] = frame2 + 3 * w2 * h2;
327  s->plane[i].subband[14] = s->plane[i].idwt_buf + 2 * w2 * h2;
328  s->plane[i].subband[15] = s->plane[i].idwt_buf + 1 * w2 * h2;
329  s->plane[i].subband[16] = s->plane[i].idwt_buf + 3 * w2 * h2;
330  }
331 
332  if (s->transform_type == 0) {
333  for (j = 0; j < DWT_LEVELS; j++) {
334  for (k = 0; k < FF_ARRAY_ELEMS(s->plane[i].band[j]); k++) {
335  s->plane[i].band[j][k].a_width = w8 << j;
336  s->plane[i].band[j][k].a_height = h8 << j;
337  }
338  }
339  } else {
340  for (j = 0; j < DWT_LEVELS_3D; j++) {
341  int t = j < 1 ? 0 : (j < 3 ? 1 : 2);
342 
343  for (k = 0; k < FF_ARRAY_ELEMS(s->plane[i].band[j]); k++) {
344  s->plane[i].band[j][k].a_width = w8 << t;
345  s->plane[i].band[j][k].a_height = h8 << t;
346  }
347  }
348  }
349 
350  /* ll2 and ll1 commented out because they are done in-place */
351  s->plane[i].l_h[0] = s->plane[i].idwt_tmp;
352  s->plane[i].l_h[1] = s->plane[i].idwt_tmp + 2 * w8 * h8;
353  // s->plane[i].l_h[2] = ll2;
354  s->plane[i].l_h[3] = s->plane[i].idwt_tmp;
355  s->plane[i].l_h[4] = s->plane[i].idwt_tmp + 2 * w4 * h4;
356  // s->plane[i].l_h[5] = ll1;
357  s->plane[i].l_h[6] = s->plane[i].idwt_tmp;
358  s->plane[i].l_h[7] = s->plane[i].idwt_tmp + 2 * w2 * h2;
359  if (s->transform_type != 0) {
360  int16_t *frame2 = s->plane[i].idwt_tmp + 4 * w2 * h2;
361 
362  s->plane[i].l_h[8] = frame2;
363  s->plane[i].l_h[9] = frame2 + 2 * w2 * h2;
364  }
365  }
366 
367  s->a_transform_type = s->transform_type;
368  s->a_height = s->coded_height;
369  s->a_width = s->coded_width;
370  s->a_format = s->coded_format;
371 
372  return 0;
373 }
374 
375 static int cfhd_decode(AVCodecContext *avctx, void *data, int *got_frame,
376  AVPacket *avpkt)
377 {
378  CFHDContext *s = avctx->priv_data;
379  CFHDDSPContext *dsp = &s->dsp;
380  GetByteContext gb;
381  ThreadFrame frame = { .f = data };
382  AVFrame *pic = data;
383  int ret = 0, i, j, plane, got_buffer = 0;
384  int16_t *coeff_data;
385 
387  s->planes = av_pix_fmt_count_planes(s->coded_format);
388 
389  bytestream2_init(&gb, avpkt->data, avpkt->size);
390 
391  while (bytestream2_get_bytes_left(&gb) >= 4) {
392  /* Bit weird but implement the tag parsing as the spec says */
393  uint16_t tagu = bytestream2_get_be16(&gb);
394  int16_t tag = (int16_t)tagu;
395  int8_t tag8 = (int8_t)(tagu >> 8);
396  uint16_t abstag = abs(tag);
397  int8_t abs_tag8 = abs(tag8);
398  uint16_t data = bytestream2_get_be16(&gb);
399  if (abs_tag8 >= 0x60 && abs_tag8 <= 0x6f) {
400  av_log(avctx, AV_LOG_DEBUG, "large len %x\n", ((tagu & 0xff) << 16) | data);
401  } else if (tag == SampleFlags) {
402  av_log(avctx, AV_LOG_DEBUG, "Progressive? %"PRIu16"\n", data);
403  s->progressive = data & 0x0001;
404  } else if (tag == FrameType) {
405  s->frame_type = data;
406  av_log(avctx, AV_LOG_DEBUG, "Frame type %"PRIu16"\n", data);
407  } else if (abstag == VersionMajor) {
408  av_log(avctx, AV_LOG_DEBUG, "Version major %"PRIu16"\n", data);
409  } else if (abstag == VersionMinor) {
410  av_log(avctx, AV_LOG_DEBUG, "Version minor %"PRIu16"\n", data);
411  } else if (abstag == VersionRevision) {
412  av_log(avctx, AV_LOG_DEBUG, "Version revision %"PRIu16"\n", data);
413  } else if (abstag == VersionEdit) {
414  av_log(avctx, AV_LOG_DEBUG, "Version edit %"PRIu16"\n", data);
415  } else if (abstag == Version) {
416  av_log(avctx, AV_LOG_DEBUG, "Version %"PRIu16"\n", data);
417  } else if (tag == ImageWidth) {
418  av_log(avctx, AV_LOG_DEBUG, "Width %"PRIu16"\n", data);
419  s->coded_width = data;
420  } else if (tag == ImageHeight) {
421  av_log(avctx, AV_LOG_DEBUG, "Height %"PRIu16"\n", data);
422  s->coded_height = data;
423  } else if (tag == ChannelCount) {
424  av_log(avctx, AV_LOG_DEBUG, "Channel Count: %"PRIu16"\n", data);
425  s->channel_cnt = data;
426  if (data > 4) {
427  av_log(avctx, AV_LOG_ERROR, "Channel Count of %"PRIu16" is unsupported\n", data);
428  ret = AVERROR_PATCHWELCOME;
429  goto end;
430  }
431  } else if (tag == SubbandCount) {
432  av_log(avctx, AV_LOG_DEBUG, "Subband Count: %"PRIu16"\n", data);
433  if (data != SUBBAND_COUNT && data != SUBBAND_COUNT_3D) {
434  av_log(avctx, AV_LOG_ERROR, "Subband Count of %"PRIu16" is unsupported\n", data);
435  ret = AVERROR_PATCHWELCOME;
436  goto end;
437  }
438  } else if (tag == ChannelNumber) {
439  s->channel_num = data;
440  av_log(avctx, AV_LOG_DEBUG, "Channel number %"PRIu16"\n", data);
441  if (s->channel_num >= s->planes) {
442  av_log(avctx, AV_LOG_ERROR, "Invalid channel number\n");
443  ret = AVERROR(EINVAL);
444  goto end;
445  }
447  } else if (tag == SubbandNumber) {
448  if (s->subband_num != 0 && data == 1 && (s->transform_type == 0 || s->transform_type == 2)) // hack
449  s->level++;
450  av_log(avctx, AV_LOG_DEBUG, "Subband number %"PRIu16"\n", data);
451  s->subband_num = data;
452  if ((s->transform_type == 0 && s->level >= DWT_LEVELS) ||
453  (s->transform_type == 2 && s->level >= DWT_LEVELS_3D)) {
454  av_log(avctx, AV_LOG_ERROR, "Invalid level\n");
455  ret = AVERROR(EINVAL);
456  goto end;
457  }
458  if (s->subband_num > 3) {
459  av_log(avctx, AV_LOG_ERROR, "Invalid subband number\n");
460  ret = AVERROR(EINVAL);
461  goto end;
462  }
463  } else if (tag == SubbandBand) {
464  av_log(avctx, AV_LOG_DEBUG, "Subband number actual %"PRIu16"\n", data);
465  if ((s->transform_type == 0 && data >= SUBBAND_COUNT) ||
466  (s->transform_type == 2 && data >= SUBBAND_COUNT_3D && data != 255)) {
467  av_log(avctx, AV_LOG_ERROR, "Invalid subband number actual\n");
468  ret = AVERROR(EINVAL);
469  goto end;
470  }
471  if (s->transform_type == 0 || s->transform_type == 2)
472  s->subband_num_actual = data;
473  else
474  av_log(avctx, AV_LOG_WARNING, "Ignoring subband num actual %"PRIu16"\n", data);
475  } else if (tag == LowpassPrecision)
476  av_log(avctx, AV_LOG_DEBUG, "Lowpass precision bits: %"PRIu16"\n", data);
477  else if (tag == Quantization) {
478  s->quantisation = data;
479  av_log(avctx, AV_LOG_DEBUG, "Quantisation: %"PRIu16"\n", data);
480  } else if (tag == PrescaleTable) {
481  for (i = 0; i < 8; i++)
482  s->prescale_table[i] = (data >> (14 - i * 2)) & 0x3;
483  av_log(avctx, AV_LOG_DEBUG, "Prescale table: %x\n", data);
484  } else if (tag == BandEncoding) {
485  if (!data || data > 5) {
486  av_log(avctx, AV_LOG_ERROR, "Invalid band encoding\n");
487  ret = AVERROR(EINVAL);
488  goto end;
489  }
490  s->band_encoding = data;
491  av_log(avctx, AV_LOG_DEBUG, "Encode Method for Subband %d : %x\n", s->subband_num_actual, data);
492  } else if (tag == LowpassWidth) {
493  av_log(avctx, AV_LOG_DEBUG, "Lowpass width %"PRIu16"\n", data);
494  s->plane[s->channel_num].band[0][0].width = data;
495  s->plane[s->channel_num].band[0][0].stride = data;
496  } else if (tag == LowpassHeight) {
497  av_log(avctx, AV_LOG_DEBUG, "Lowpass height %"PRIu16"\n", data);
498  s->plane[s->channel_num].band[0][0].height = data;
499  } else if (tag == SampleType) {
500  s->sample_type = data;
501  av_log(avctx, AV_LOG_DEBUG, "Sample type? %"PRIu16"\n", data);
502  } else if (tag == TransformType) {
503  if (data > 2) {
504  av_log(avctx, AV_LOG_ERROR, "Invalid transform type\n");
505  ret = AVERROR(EINVAL);
506  goto end;
507  } else if (data == 1) {
508  av_log(avctx, AV_LOG_ERROR, "unsupported transform type\n");
509  ret = AVERROR_PATCHWELCOME;
510  goto end;
511  }
512  if (s->transform_type == -1) {
513  s->transform_type = data;
514  av_log(avctx, AV_LOG_DEBUG, "Transform type %"PRIu16"\n", data);
515  } else {
516  av_log(avctx, AV_LOG_DEBUG, "Ignoring additional transform type %"PRIu16"\n", data);
517  }
518  } else if (abstag >= 0x4000 && abstag <= 0x40ff) {
519  if (abstag == 0x4001)
520  s->peak.level = 0;
521  av_log(avctx, AV_LOG_DEBUG, "Small chunk length %d %s\n", data * 4, tag < 0 ? "optional" : "required");
522  bytestream2_skipu(&gb, data * 4);
523  } else if (tag == FrameIndex) {
524  av_log(avctx, AV_LOG_DEBUG, "Frame index %"PRIu16"\n", data);
525  s->frame_index = data;
526  } else if (tag == SampleIndexTable) {
527  av_log(avctx, AV_LOG_DEBUG, "Sample index table - skipping %i values\n", data);
528  if (data > bytestream2_get_bytes_left(&gb) / 4) {
529  av_log(avctx, AV_LOG_ERROR, "too many values (%d)\n", data);
530  ret = AVERROR_INVALIDDATA;
531  goto end;
532  }
533  for (i = 0; i < data; i++) {
534  uint32_t offset = bytestream2_get_be32(&gb);
535  av_log(avctx, AV_LOG_DEBUG, "Offset = %"PRIu32"\n", offset);
536  }
537  } else if (tag == HighpassWidth) {
538  av_log(avctx, AV_LOG_DEBUG, "Highpass width %i channel %i level %i subband %i\n", data, s->channel_num, s->level, s->subband_num);
539  if (data < 3) {
540  av_log(avctx, AV_LOG_ERROR, "Invalid highpass width\n");
541  ret = AVERROR(EINVAL);
542  goto end;
543  }
544  s->plane[s->channel_num].band[s->level][s->subband_num].width = data;
545  s->plane[s->channel_num].band[s->level][s->subband_num].stride = FFALIGN(data, 8);
546  } else if (tag == HighpassHeight) {
547  av_log(avctx, AV_LOG_DEBUG, "Highpass height %i\n", data);
548  if (data < 3) {
549  av_log(avctx, AV_LOG_ERROR, "Invalid highpass height\n");
550  ret = AVERROR(EINVAL);
551  goto end;
552  }
553  s->plane[s->channel_num].band[s->level][s->subband_num].height = data;
554  } else if (tag == BandWidth) {
555  av_log(avctx, AV_LOG_DEBUG, "Highpass width2 %i\n", data);
556  if (data < 3) {
557  av_log(avctx, AV_LOG_ERROR, "Invalid highpass width2\n");
558  ret = AVERROR(EINVAL);
559  goto end;
560  }
561  s->plane[s->channel_num].band[s->level][s->subband_num].width = data;
562  s->plane[s->channel_num].band[s->level][s->subband_num].stride = FFALIGN(data, 8);
563  } else if (tag == BandHeight) {
564  av_log(avctx, AV_LOG_DEBUG, "Highpass height2 %i\n", data);
565  if (data < 3) {
566  av_log(avctx, AV_LOG_ERROR, "Invalid highpass height2\n");
567  ret = AVERROR(EINVAL);
568  goto end;
569  }
570  s->plane[s->channel_num].band[s->level][s->subband_num].height = data;
571  } else if (tag == InputFormat) {
572  av_log(avctx, AV_LOG_DEBUG, "Input format %i\n", data);
573  if (s->coded_format == AV_PIX_FMT_NONE ||
574  s->coded_format == AV_PIX_FMT_YUV422P10) {
575  if (data >= 100 && data <= 105) {
576  s->coded_format = AV_PIX_FMT_BAYER_RGGB16;
577  } else if (data >= 122 && data <= 128) {
578  s->coded_format = AV_PIX_FMT_GBRP12;
579  } else if (data == 30) {
580  s->coded_format = AV_PIX_FMT_GBRAP12;
581  } else {
582  s->coded_format = AV_PIX_FMT_YUV422P10;
583  }
584  s->planes = s->coded_format == AV_PIX_FMT_BAYER_RGGB16 ? 4 : av_pix_fmt_count_planes(s->coded_format);
585  }
586  } else if (tag == BandCodingFlags) {
587  s->codebook = data & 0xf;
588  s->difference_coding = (data >> 4) & 1;
589  av_log(avctx, AV_LOG_DEBUG, "Other codebook? %i\n", s->codebook);
590  } else if (tag == Precision) {
591  av_log(avctx, AV_LOG_DEBUG, "Precision %i\n", data);
592  if (!(data == 10 || data == 12)) {
593  av_log(avctx, AV_LOG_ERROR, "Invalid bits per channel\n");
594  ret = AVERROR(EINVAL);
595  goto end;
596  }
597  avctx->bits_per_raw_sample = s->bpc = data;
598  } else if (tag == EncodedFormat) {
599  av_log(avctx, AV_LOG_DEBUG, "Sample format? %i\n", data);
600  if (data == 1) {
601  s->coded_format = AV_PIX_FMT_YUV422P10;
602  } else if (data == 2) {
603  s->coded_format = AV_PIX_FMT_BAYER_RGGB16;
604  } else if (data == 3) {
605  s->coded_format = AV_PIX_FMT_GBRP12;
606  } else if (data == 4) {
607  s->coded_format = AV_PIX_FMT_GBRAP12;
608  } else {
609  avpriv_report_missing_feature(avctx, "Sample format of %"PRIu16, data);
610  ret = AVERROR_PATCHWELCOME;
611  goto end;
612  }
613  s->planes = data == 2 ? 4 : av_pix_fmt_count_planes(s->coded_format);
614  } else if (tag == -DisplayHeight) {
615  av_log(avctx, AV_LOG_DEBUG, "Cropped height %"PRIu16"\n", data);
616  s->cropped_height = data;
617  } else if (tag == -PeakOffsetLow) {
618  s->peak.offset &= ~0xffff;
619  s->peak.offset |= (data & 0xffff);
620  s->peak.base = gb;
621  s->peak.level = 0;
622  } else if (tag == -PeakOffsetHigh) {
623  s->peak.offset &= 0xffff;
624  s->peak.offset |= (data & 0xffffU)<<16;
625  s->peak.base = gb;
626  s->peak.level = 0;
627  } else if (tag == -PeakLevel && s->peak.offset) {
628  s->peak.level = data;
629  if (s->peak.offset < 4 - bytestream2_tell(&s->peak.base) ||
630  s->peak.offset > 4 + bytestream2_get_bytes_left(&s->peak.base)
631  ) {
632  ret = AVERROR_INVALIDDATA;
633  goto end;
634  }
635  bytestream2_seek(&s->peak.base, s->peak.offset - 4, SEEK_CUR);
636  } else
637  av_log(avctx, AV_LOG_DEBUG, "Unknown tag %i data %x\n", tag, data);
638 
639  if (tag == BitstreamMarker && data == 0xf0f &&
640  s->coded_format != AV_PIX_FMT_NONE) {
641  int lowpass_height = s->plane[s->channel_num].band[0][0].height;
642  int lowpass_width = s->plane[s->channel_num].band[0][0].width;
643  int factor = s->coded_format == AV_PIX_FMT_BAYER_RGGB16 ? 2 : 1;
644 
645  if (s->coded_width) {
646  s->coded_width *= factor;
647  }
648 
649  if (s->coded_height) {
650  s->coded_height *= factor;
651  }
652 
653  if (!s->a_width && !s->coded_width) {
654  s->coded_width = lowpass_width * factor * 8;
655  }
656 
657  if (!s->a_height && !s->coded_height) {
658  s->coded_height = lowpass_height * factor * 8;
659  }
660 
661  if (s->a_width && !s->coded_width)
662  s->coded_width = s->a_width;
663  if (s->a_height && !s->coded_height)
664  s->coded_height = s->a_height;
665 
666  if (s->a_width != s->coded_width || s->a_height != s->coded_height ||
667  s->a_format != s->coded_format ||
668  s->transform_type != s->a_transform_type) {
669  free_buffers(s);
670  if ((ret = alloc_buffers(avctx)) < 0) {
671  free_buffers(s);
672  return ret;
673  }
674  }
675  ret = ff_set_dimensions(avctx, s->coded_width, s->coded_height);
676  if (ret < 0)
677  return ret;
678  if (s->cropped_height) {
679  unsigned height = s->cropped_height << (avctx->pix_fmt == AV_PIX_FMT_BAYER_RGGB16);
680  if (avctx->height < height)
681  return AVERROR_INVALIDDATA;
682  avctx->height = height;
683  }
684  frame.f->width =
685  frame.f->height = 0;
686 
687  if ((ret = ff_thread_get_buffer(avctx, &frame, 0)) < 0)
688  return ret;
689 
690  s->coded_width = 0;
691  s->coded_height = 0;
692  s->coded_format = AV_PIX_FMT_NONE;
693  got_buffer = 1;
694  } else if (tag == FrameIndex && data == 1 && s->sample_type == 1 && s->frame_type == 2) {
695  frame.f->width =
696  frame.f->height = 0;
697 
698  if ((ret = ff_thread_get_buffer(avctx, &frame, 0)) < 0)
699  return ret;
700  s->coded_width = 0;
701  s->coded_height = 0;
702  s->coded_format = AV_PIX_FMT_NONE;
703  got_buffer = 1;
704  }
705 
706  if (s->subband_num_actual == 255)
707  goto finish;
708  coeff_data = s->plane[s->channel_num].subband[s->subband_num_actual];
709 
710  /* Lowpass coefficients */
711  if (tag == BitstreamMarker && data == 0xf0f) {
712  int lowpass_height, lowpass_width, lowpass_a_height, lowpass_a_width;
713 
714  if (!s->a_width || !s->a_height) {
715  ret = AVERROR_INVALIDDATA;
716  goto end;
717  }
718 
719  lowpass_height = s->plane[s->channel_num].band[0][0].height;
720  lowpass_width = s->plane[s->channel_num].band[0][0].width;
721  lowpass_a_height = s->plane[s->channel_num].band[0][0].a_height;
722  lowpass_a_width = s->plane[s->channel_num].band[0][0].a_width;
723 
724  if (lowpass_width < 3 ||
725  lowpass_width > lowpass_a_width) {
726  av_log(avctx, AV_LOG_ERROR, "Invalid lowpass width\n");
727  ret = AVERROR(EINVAL);
728  goto end;
729  }
730 
731  if (lowpass_height < 3 ||
732  lowpass_height > lowpass_a_height) {
733  av_log(avctx, AV_LOG_ERROR, "Invalid lowpass height\n");
734  ret = AVERROR(EINVAL);
735  goto end;
736  }
737 
738  if (!got_buffer) {
739  av_log(avctx, AV_LOG_ERROR, "No end of header tag found\n");
740  ret = AVERROR(EINVAL);
741  goto end;
742  }
743 
744  if (lowpass_height > lowpass_a_height || lowpass_width > lowpass_a_width ||
745  lowpass_width * lowpass_height * sizeof(int16_t) > bytestream2_get_bytes_left(&gb)) {
746  av_log(avctx, AV_LOG_ERROR, "Too many lowpass coefficients\n");
747  ret = AVERROR(EINVAL);
748  goto end;
749  }
750 
751  av_log(avctx, AV_LOG_DEBUG, "Start of lowpass coeffs component %d height:%d, width:%d\n", s->channel_num, lowpass_height, lowpass_width);
752  for (i = 0; i < lowpass_height; i++) {
753  for (j = 0; j < lowpass_width; j++)
754  coeff_data[j] = bytestream2_get_be16u(&gb);
755 
756  coeff_data += lowpass_width;
757  }
758 
759  /* Align to mod-4 position to continue reading tags */
760  bytestream2_seek(&gb, bytestream2_tell(&gb) & 3, SEEK_CUR);
761 
762  /* Copy last line of coefficients if odd height */
763  if (lowpass_height & 1) {
764  memcpy(&coeff_data[lowpass_height * lowpass_width],
765  &coeff_data[(lowpass_height - 1) * lowpass_width],
766  lowpass_width * sizeof(*coeff_data));
767  }
768 
769  s->plane[s->channel_num].band[0][0].read_ok = 1;
770 
771  av_log(avctx, AV_LOG_DEBUG, "Lowpass coefficients %d\n", lowpass_width * lowpass_height);
772  }
773 
774  av_assert0(s->subband_num_actual != 255);
775  if (tag == BandHeader || tag == BandSecondPass) {
776  int highpass_height, highpass_width, highpass_a_width, highpass_a_height, highpass_stride, a_expected;
777  int expected;
778  int level, run, coeff;
779  int count = 0, bytes;
780 
781  if (!s->a_width || !s->a_height) {
782  ret = AVERROR_INVALIDDATA;
783  goto end;
784  }
785 
786  highpass_height = s->plane[s->channel_num].band[s->level][s->subband_num].height;
787  highpass_width = s->plane[s->channel_num].band[s->level][s->subband_num].width;
788  highpass_a_width = s->plane[s->channel_num].band[s->level][s->subband_num].a_width;
789  highpass_a_height = s->plane[s->channel_num].band[s->level][s->subband_num].a_height;
790  highpass_stride = s->plane[s->channel_num].band[s->level][s->subband_num].stride;
791  a_expected = highpass_a_height * highpass_a_width;
792 
793  if (!got_buffer) {
794  av_log(avctx, AV_LOG_ERROR, "No end of header tag found\n");
795  ret = AVERROR(EINVAL);
796  goto end;
797  }
798 
799  if (highpass_height > highpass_a_height || highpass_width > highpass_a_width || a_expected < highpass_height * (uint64_t)highpass_stride) {
800  av_log(avctx, AV_LOG_ERROR, "Too many highpass coefficients\n");
801  ret = AVERROR(EINVAL);
802  goto end;
803  }
804  expected = highpass_height * highpass_stride;
805 
806  av_log(avctx, AV_LOG_DEBUG, "Start subband coeffs plane %i level %i codebook %i expected %i\n", s->channel_num, s->level, s->codebook, expected);
807 
808  ret = init_get_bits8(&s->gb, gb.buffer, bytestream2_get_bytes_left(&gb));
809  if (ret < 0)
810  goto end;
811  {
812  OPEN_READER(re, &s->gb);
813 
814  const int lossless = s->band_encoding == 5;
815 
816  if (s->codebook == 0 && s->transform_type == 2 && s->subband_num_actual == 7)
817  s->codebook = 1;
818  if (!s->codebook) {
819  while (1) {
820  UPDATE_CACHE(re, &s->gb);
821  GET_RL_VLC(level, run, re, &s->gb, s->table_9_rl_vlc,
822  VLC_BITS, 3, 1);
823 
824  /* escape */
825  if (level == 64)
826  break;
827 
828  count += run;
829 
830  if (count > expected)
831  break;
832 
833  if (!lossless)
834  coeff = dequant_and_decompand(s, level, s->quantisation, 0);
835  else
836  coeff = level;
837  if (tag == BandSecondPass) {
838  const uint16_t q = s->quantisation;
839 
840  for (i = 0; i < run; i++) {
841  *coeff_data |= coeff * 256U;
842  *coeff_data++ *= q;
843  }
844  } else {
845  for (i = 0; i < run; i++)
846  *coeff_data++ = coeff;
847  }
848  }
849  } else {
850  while (1) {
851  UPDATE_CACHE(re, &s->gb);
852  GET_RL_VLC(level, run, re, &s->gb, s->table_18_rl_vlc,
853  VLC_BITS, 3, 1);
854 
855  /* escape */
856  if (level == 255 && run == 2)
857  break;
858 
859  count += run;
860 
861  if (count > expected)
862  break;
863 
864  if (!lossless)
865  coeff = dequant_and_decompand(s, level, s->quantisation, s->codebook);
866  else
867  coeff = level;
868  if (tag == BandSecondPass) {
869  const uint16_t q = s->quantisation;
870 
871  for (i = 0; i < run; i++) {
872  *coeff_data |= coeff * 256U;
873  *coeff_data++ *= q;
874  }
875  } else {
876  for (i = 0; i < run; i++)
877  *coeff_data++ = coeff;
878  }
879  }
880  }
881  CLOSE_READER(re, &s->gb);
882  }
883 
884  if (count > expected) {
885  av_log(avctx, AV_LOG_ERROR, "Escape codeword not found, probably corrupt data\n");
886  ret = AVERROR(EINVAL);
887  goto end;
888  }
889  if (s->peak.level)
890  peak_table(coeff_data - count, &s->peak, count);
891  if (s->difference_coding)
892  difference_coding(s->plane[s->channel_num].subband[s->subband_num_actual], highpass_width, highpass_height);
893 
894  bytes = FFALIGN(AV_CEIL_RSHIFT(get_bits_count(&s->gb), 3), 4);
895  if (bytes > bytestream2_get_bytes_left(&gb)) {
896  av_log(avctx, AV_LOG_ERROR, "Bitstream overread error\n");
897  ret = AVERROR(EINVAL);
898  goto end;
899  } else
900  bytestream2_seek(&gb, bytes, SEEK_CUR);
901 
902  av_log(avctx, AV_LOG_DEBUG, "End subband coeffs %i extra %i\n", count, count - expected);
903  s->plane[s->channel_num].band[s->level][s->subband_num].read_ok = 1;
904 finish:
905  if (s->subband_num_actual != 255)
906  s->codebook = 0;
907  }
908  }
909 
910  s->planes = av_pix_fmt_count_planes(avctx->pix_fmt);
911  if (avctx->pix_fmt == AV_PIX_FMT_BAYER_RGGB16) {
912  s->progressive = 1;
913  s->planes = 4;
914  }
915 
916  ff_thread_finish_setup(avctx);
917 
918  if (!s->a_width || !s->a_height || s->a_format == AV_PIX_FMT_NONE ||
919  s->a_transform_type == INT_MIN ||
920  s->coded_width || s->coded_height || s->coded_format != AV_PIX_FMT_NONE) {
921  av_log(avctx, AV_LOG_ERROR, "Invalid dimensions\n");
922  ret = AVERROR(EINVAL);
923  goto end;
924  }
925 
926  if (!got_buffer) {
927  av_log(avctx, AV_LOG_ERROR, "No end of header tag found\n");
928  ret = AVERROR(EINVAL);
929  goto end;
930  }
931 
932  for (plane = 0; plane < s->planes; plane++) {
933  int o, level;
934 
935  for (level = 0; level < (s->transform_type == 0 ? DWT_LEVELS : DWT_LEVELS_3D) ; level++) {
936  if (s->transform_type == 2)
937  if (level == 2 || level == 5)
938  continue;
939  for (o = !!level; o < 4 ; o++) {
940  if (!s->plane[plane].band[level][o].read_ok) {
941  ret = AVERROR_INVALIDDATA;
942  goto end;
943  }
944  }
945  }
946  }
947 
948  if (s->transform_type == 0 && s->sample_type != 1) {
949  for (plane = 0; plane < s->planes && !ret; plane++) {
950  /* level 1 */
951  int lowpass_height = s->plane[plane].band[0][0].height;
952  int output_stride = s->plane[plane].band[0][0].a_width;
953  int lowpass_width = s->plane[plane].band[0][0].width;
954  int highpass_stride = s->plane[plane].band[0][1].stride;
955  int act_plane = plane == 1 ? 2 : plane == 2 ? 1 : plane;
956  ptrdiff_t dst_linesize;
957  int16_t *low, *high, *output, *dst;
958 
959  if (avctx->pix_fmt == AV_PIX_FMT_BAYER_RGGB16) {
960  act_plane = 0;
961  dst_linesize = pic->linesize[act_plane];
962  } else {
963  dst_linesize = pic->linesize[act_plane] / 2;
964  }
965 
966  if (lowpass_height > s->plane[plane].band[0][0].a_height || lowpass_width > s->plane[plane].band[0][0].a_width ||
967  !highpass_stride || s->plane[plane].band[0][1].width > s->plane[plane].band[0][1].a_width ||
968  lowpass_width < 3 || lowpass_height < 3) {
969  av_log(avctx, AV_LOG_ERROR, "Invalid plane dimensions\n");
970  ret = AVERROR(EINVAL);
971  goto end;
972  }
973 
974  av_log(avctx, AV_LOG_DEBUG, "Decoding level 1 plane %i %i %i %i\n", plane, lowpass_height, lowpass_width, highpass_stride);
975 
976  low = s->plane[plane].subband[0];
977  high = s->plane[plane].subband[2];
978  output = s->plane[plane].l_h[0];
979  dsp->vert_filter(output, output_stride, low, lowpass_width, high, highpass_stride, lowpass_width, lowpass_height);
980 
981  low = s->plane[plane].subband[1];
982  high = s->plane[plane].subband[3];
983  output = s->plane[plane].l_h[1];
984 
985  dsp->vert_filter(output, output_stride, low, highpass_stride, high, highpass_stride, lowpass_width, lowpass_height);
986 
987  low = s->plane[plane].l_h[0];
988  high = s->plane[plane].l_h[1];
989  output = s->plane[plane].subband[0];
990  dsp->horiz_filter(output, output_stride, low, output_stride, high, output_stride, lowpass_width, lowpass_height * 2);
991  if (s->bpc == 12) {
992  output = s->plane[plane].subband[0];
993  for (i = 0; i < lowpass_height * 2; i++) {
994  for (j = 0; j < lowpass_width * 2; j++)
995  output[j] *= 4;
996 
997  output += output_stride * 2;
998  }
999  }
1000 
1001  /* level 2 */
1002  lowpass_height = s->plane[plane].band[1][1].height;
1003  output_stride = s->plane[plane].band[1][1].a_width;
1004  lowpass_width = s->plane[plane].band[1][1].width;
1005  highpass_stride = s->plane[plane].band[1][1].stride;
1006 
1007  if (lowpass_height > s->plane[plane].band[1][1].a_height || lowpass_width > s->plane[plane].band[1][1].a_width ||
1008  !highpass_stride || s->plane[plane].band[1][1].width > s->plane[plane].band[1][1].a_width ||
1009  lowpass_width < 3 || lowpass_height < 3) {
1010  av_log(avctx, AV_LOG_ERROR, "Invalid plane dimensions\n");
1011  ret = AVERROR(EINVAL);
1012  goto end;
1013  }
1014 
1015  av_log(avctx, AV_LOG_DEBUG, "Level 2 plane %i %i %i %i\n", plane, lowpass_height, lowpass_width, highpass_stride);
1016 
1017  low = s->plane[plane].subband[0];
1018  high = s->plane[plane].subband[5];
1019  output = s->plane[plane].l_h[3];
1020  dsp->vert_filter(output, output_stride, low, output_stride, high, highpass_stride, lowpass_width, lowpass_height);
1021 
1022  low = s->plane[plane].subband[4];
1023  high = s->plane[plane].subband[6];
1024  output = s->plane[plane].l_h[4];
1025  dsp->vert_filter(output, output_stride, low, highpass_stride, high, highpass_stride, lowpass_width, lowpass_height);
1026 
1027  low = s->plane[plane].l_h[3];
1028  high = s->plane[plane].l_h[4];
1029  output = s->plane[plane].subband[0];
1030  dsp->horiz_filter(output, output_stride, low, output_stride, high, output_stride, lowpass_width, lowpass_height * 2);
1031 
1032  output = s->plane[plane].subband[0];
1033  for (i = 0; i < lowpass_height * 2; i++) {
1034  for (j = 0; j < lowpass_width * 2; j++)
1035  output[j] *= 4;
1036 
1037  output += output_stride * 2;
1038  }
1039 
1040  /* level 3 */
1041  lowpass_height = s->plane[plane].band[2][1].height;
1042  output_stride = s->plane[plane].band[2][1].a_width;
1043  lowpass_width = s->plane[plane].band[2][1].width;
1044  highpass_stride = s->plane[plane].band[2][1].stride;
1045 
1046  if (lowpass_height > s->plane[plane].band[2][1].a_height || lowpass_width > s->plane[plane].band[2][1].a_width ||
1047  !highpass_stride || s->plane[plane].band[2][1].width > s->plane[plane].band[2][1].a_width ||
1048  lowpass_height < 3 || lowpass_width < 3 || lowpass_width * 2 > s->plane[plane].width) {
1049  av_log(avctx, AV_LOG_ERROR, "Invalid plane dimensions\n");
1050  ret = AVERROR(EINVAL);
1051  goto end;
1052  }
1053 
1054  av_log(avctx, AV_LOG_DEBUG, "Level 3 plane %i %i %i %i\n", plane, lowpass_height, lowpass_width, highpass_stride);
1055  if (s->progressive) {
1056  low = s->plane[plane].subband[0];
1057  high = s->plane[plane].subband[8];
1058  output = s->plane[plane].l_h[6];
1059  dsp->vert_filter(output, output_stride, low, output_stride, high, highpass_stride, lowpass_width, lowpass_height);
1060 
1061  low = s->plane[plane].subband[7];
1062  high = s->plane[plane].subband[9];
1063  output = s->plane[plane].l_h[7];
1064  dsp->vert_filter(output, output_stride, low, highpass_stride, high, highpass_stride, lowpass_width, lowpass_height);
1065 
1066  dst = (int16_t *)pic->data[act_plane];
1067  if (avctx->pix_fmt == AV_PIX_FMT_BAYER_RGGB16) {
1068  if (plane & 1)
1069  dst++;
1070  if (plane > 1)
1071  dst += pic->linesize[act_plane] >> 1;
1072  }
1073  low = s->plane[plane].l_h[6];
1074  high = s->plane[plane].l_h[7];
1075 
1076  if (avctx->pix_fmt == AV_PIX_FMT_BAYER_RGGB16 &&
1077  (lowpass_height * 2 > avctx->coded_height / 2 ||
1078  lowpass_width * 2 > avctx->coded_width / 2 )
1079  ) {
1080  ret = AVERROR_INVALIDDATA;
1081  goto end;
1082  }
1083 
1084  for (i = 0; i < s->plane[act_plane].height; i++) {
1085  dsp->horiz_filter_clip(dst, low, high, lowpass_width, s->bpc);
1086  if (avctx->pix_fmt == AV_PIX_FMT_GBRAP12 && act_plane == 3)
1087  process_alpha(dst, lowpass_width * 2);
1088  low += output_stride;
1089  high += output_stride;
1090  dst += dst_linesize;
1091  }
1092  } else {
1093  av_log(avctx, AV_LOG_DEBUG, "interlaced frame ? %d", pic->interlaced_frame);
1094  pic->interlaced_frame = 1;
1095  low = s->plane[plane].subband[0];
1096  high = s->plane[plane].subband[7];
1097  output = s->plane[plane].l_h[6];
1098  dsp->horiz_filter(output, output_stride, low, output_stride, high, highpass_stride, lowpass_width, lowpass_height);
1099 
1100  low = s->plane[plane].subband[8];
1101  high = s->plane[plane].subband[9];
1102  output = s->plane[plane].l_h[7];
1103  dsp->horiz_filter(output, output_stride, low, highpass_stride, high, highpass_stride, lowpass_width, lowpass_height);
1104 
1105  dst = (int16_t *)pic->data[act_plane];
1106  low = s->plane[plane].l_h[6];
1107  high = s->plane[plane].l_h[7];
1108  for (i = 0; i < s->plane[act_plane].height / 2; i++) {
1109  interlaced_vertical_filter(dst, low, high, lowpass_width * 2, pic->linesize[act_plane]/2, act_plane);
1110  low += output_stride * 2;
1111  high += output_stride * 2;
1112  dst += pic->linesize[act_plane];
1113  }
1114  }
1115  }
1116  } else if (s->transform_type == 2 && (avctx->internal->is_copy || s->frame_index == 1 || s->sample_type != 1)) {
1117  for (plane = 0; plane < s->planes && !ret; plane++) {
1118  int lowpass_height = s->plane[plane].band[0][0].height;
1119  int output_stride = s->plane[plane].band[0][0].a_width;
1120  int lowpass_width = s->plane[plane].band[0][0].width;
1121  int highpass_stride = s->plane[plane].band[0][1].stride;
1122  int act_plane = plane == 1 ? 2 : plane == 2 ? 1 : plane;
1123  int16_t *low, *high, *output, *dst;
1124  ptrdiff_t dst_linesize;
1125 
1126  if (avctx->pix_fmt == AV_PIX_FMT_BAYER_RGGB16) {
1127  act_plane = 0;
1128  dst_linesize = pic->linesize[act_plane];
1129  } else {
1130  dst_linesize = pic->linesize[act_plane] / 2;
1131  }
1132 
1133  if (lowpass_height > s->plane[plane].band[0][0].a_height || lowpass_width > s->plane[plane].band[0][0].a_width ||
1134  !highpass_stride || s->plane[plane].band[0][1].width > s->plane[plane].band[0][1].a_width ||
1135  lowpass_width < 3 || lowpass_height < 3) {
1136  av_log(avctx, AV_LOG_ERROR, "Invalid plane dimensions\n");
1137  ret = AVERROR(EINVAL);
1138  goto end;
1139  }
1140 
1141  av_log(avctx, AV_LOG_DEBUG, "Decoding level 1 plane %i %i %i %i\n", plane, lowpass_height, lowpass_width, highpass_stride);
1142 
1143  low = s->plane[plane].subband[0];
1144  high = s->plane[plane].subband[2];
1145  output = s->plane[plane].l_h[0];
1146  dsp->vert_filter(output, output_stride, low, lowpass_width, high, highpass_stride, lowpass_width, lowpass_height);
1147 
1148  low = s->plane[plane].subband[1];
1149  high = s->plane[plane].subband[3];
1150  output = s->plane[plane].l_h[1];
1151  dsp->vert_filter(output, output_stride, low, highpass_stride, high, highpass_stride, lowpass_width, lowpass_height);
1152 
1153  low = s->plane[plane].l_h[0];
1154  high = s->plane[plane].l_h[1];
1155  output = s->plane[plane].l_h[7];
1156  dsp->horiz_filter(output, output_stride, low, output_stride, high, output_stride, lowpass_width, lowpass_height * 2);
1157  if (s->bpc == 12) {
1158  output = s->plane[plane].l_h[7];
1159  for (i = 0; i < lowpass_height * 2; i++) {
1160  for (j = 0; j < lowpass_width * 2; j++)
1161  output[j] *= 4;
1162 
1163  output += output_stride * 2;
1164  }
1165  }
1166 
1167  lowpass_height = s->plane[plane].band[1][1].height;
1168  output_stride = s->plane[plane].band[1][1].a_width;
1169  lowpass_width = s->plane[plane].band[1][1].width;
1170  highpass_stride = s->plane[plane].band[1][1].stride;
1171 
1172  if (lowpass_height > s->plane[plane].band[1][1].a_height || lowpass_width > s->plane[plane].band[1][1].a_width ||
1173  !highpass_stride || s->plane[plane].band[1][1].width > s->plane[plane].band[1][1].a_width ||
1174  lowpass_width < 3 || lowpass_height < 3) {
1175  av_log(avctx, AV_LOG_ERROR, "Invalid plane dimensions\n");
1176  ret = AVERROR(EINVAL);
1177  goto end;
1178  }
1179 
1180  av_log(avctx, AV_LOG_DEBUG, "Level 2 lowpass plane %i %i %i %i\n", plane, lowpass_height, lowpass_width, highpass_stride);
1181 
1182  low = s->plane[plane].l_h[7];
1183  high = s->plane[plane].subband[5];
1184  output = s->plane[plane].l_h[3];
1185  dsp->vert_filter(output, output_stride, low, output_stride, high, highpass_stride, lowpass_width, lowpass_height);
1186 
1187  low = s->plane[plane].subband[4];
1188  high = s->plane[plane].subband[6];
1189  output = s->plane[plane].l_h[4];
1190  dsp->vert_filter(output, output_stride, low, highpass_stride, high, highpass_stride, lowpass_width, lowpass_height);
1191 
1192  low = s->plane[plane].l_h[3];
1193  high = s->plane[plane].l_h[4];
1194  output = s->plane[plane].l_h[7];
1195  dsp->horiz_filter(output, output_stride, low, output_stride, high, output_stride, lowpass_width, lowpass_height * 2);
1196 
1197  output = s->plane[plane].l_h[7];
1198  for (i = 0; i < lowpass_height * 2; i++) {
1199  for (j = 0; j < lowpass_width * 2; j++)
1200  output[j] *= 4;
1201  output += output_stride * 2;
1202  }
1203 
1204  low = s->plane[plane].subband[7];
1205  high = s->plane[plane].subband[9];
1206  output = s->plane[plane].l_h[3];
1207  dsp->vert_filter(output, output_stride, low, highpass_stride, high, highpass_stride, lowpass_width, lowpass_height);
1208 
1209  low = s->plane[plane].subband[8];
1210  high = s->plane[plane].subband[10];
1211  output = s->plane[plane].l_h[4];
1212  dsp->vert_filter(output, output_stride, low, highpass_stride, high, highpass_stride, lowpass_width, lowpass_height);
1213 
1214  low = s->plane[plane].l_h[3];
1215  high = s->plane[plane].l_h[4];
1216  output = s->plane[plane].l_h[9];
1217  dsp->horiz_filter(output, output_stride, low, output_stride, high, output_stride, lowpass_width, lowpass_height * 2);
1218 
1219  lowpass_height = s->plane[plane].band[4][1].height;
1220  output_stride = s->plane[plane].band[4][1].a_width;
1221  lowpass_width = s->plane[plane].band[4][1].width;
1222  highpass_stride = s->plane[plane].band[4][1].stride;
1223  av_log(avctx, AV_LOG_DEBUG, "temporal level %i %i %i %i\n", plane, lowpass_height, lowpass_width, highpass_stride);
1224 
1225  if (lowpass_height > s->plane[plane].band[4][1].a_height || lowpass_width > s->plane[plane].band[4][1].a_width ||
1226  !highpass_stride || s->plane[plane].band[4][1].width > s->plane[plane].band[4][1].a_width ||
1227  lowpass_width < 3 || lowpass_height < 3) {
1228  av_log(avctx, AV_LOG_ERROR, "Invalid plane dimensions\n");
1229  ret = AVERROR(EINVAL);
1230  goto end;
1231  }
1232 
1233  low = s->plane[plane].l_h[7];
1234  high = s->plane[plane].l_h[9];
1235  output = s->plane[plane].l_h[7];
1236  for (i = 0; i < lowpass_height; i++) {
1237  inverse_temporal_filter(low, high, lowpass_width);
1238  low += output_stride;
1239  high += output_stride;
1240  }
1241  if (s->progressive) {
1242  low = s->plane[plane].l_h[7];
1243  high = s->plane[plane].subband[15];
1244  output = s->plane[plane].l_h[6];
1245  dsp->vert_filter(output, output_stride, low, output_stride, high, highpass_stride, lowpass_width, lowpass_height);
1246 
1247  low = s->plane[plane].subband[14];
1248  high = s->plane[plane].subband[16];
1249  output = s->plane[plane].l_h[7];
1250  dsp->vert_filter(output, output_stride, low, highpass_stride, high, highpass_stride, lowpass_width, lowpass_height);
1251 
1252  low = s->plane[plane].l_h[9];
1253  high = s->plane[plane].subband[12];
1254  output = s->plane[plane].l_h[8];
1255  dsp->vert_filter(output, output_stride, low, output_stride, high, highpass_stride, lowpass_width, lowpass_height);
1256 
1257  low = s->plane[plane].subband[11];
1258  high = s->plane[plane].subband[13];
1259  output = s->plane[plane].l_h[9];
1260  dsp->vert_filter(output, output_stride, low, highpass_stride, high, highpass_stride, lowpass_width, lowpass_height);
1261 
1262  if (s->sample_type == 1)
1263  continue;
1264 
1265  dst = (int16_t *)pic->data[act_plane];
1266  if (avctx->pix_fmt == AV_PIX_FMT_BAYER_RGGB16) {
1267  if (plane & 1)
1268  dst++;
1269  if (plane > 1)
1270  dst += pic->linesize[act_plane] >> 1;
1271  }
1272 
1273  if (avctx->pix_fmt == AV_PIX_FMT_BAYER_RGGB16 &&
1274  (lowpass_height * 2 > avctx->coded_height / 2 ||
1275  lowpass_width * 2 > avctx->coded_width / 2 )
1276  ) {
1277  ret = AVERROR_INVALIDDATA;
1278  goto end;
1279  }
1280 
1281  low = s->plane[plane].l_h[6];
1282  high = s->plane[plane].l_h[7];
1283  for (i = 0; i < s->plane[act_plane].height; i++) {
1284  dsp->horiz_filter_clip(dst, low, high, lowpass_width, s->bpc);
1285  low += output_stride;
1286  high += output_stride;
1287  dst += dst_linesize;
1288  }
1289  } else {
1290  pic->interlaced_frame = 1;
1291  low = s->plane[plane].l_h[7];
1292  high = s->plane[plane].subband[14];
1293  output = s->plane[plane].l_h[6];
1294  dsp->horiz_filter(output, output_stride, low, output_stride, high, highpass_stride, lowpass_width, lowpass_height);
1295 
1296  low = s->plane[plane].subband[15];
1297  high = s->plane[plane].subband[16];
1298  output = s->plane[plane].l_h[7];
1299  dsp->horiz_filter(output, output_stride, low, highpass_stride, high, highpass_stride, lowpass_width, lowpass_height);
1300 
1301  low = s->plane[plane].l_h[9];
1302  high = s->plane[plane].subband[11];
1303  output = s->plane[plane].l_h[8];
1304  dsp->horiz_filter(output, output_stride, low, output_stride, high, highpass_stride, lowpass_width, lowpass_height);
1305 
1306  low = s->plane[plane].subband[12];
1307  high = s->plane[plane].subband[13];
1308  output = s->plane[plane].l_h[9];
1309  dsp->horiz_filter(output, output_stride, low, highpass_stride, high, highpass_stride, lowpass_width, lowpass_height);
1310 
1311  if (s->sample_type == 1)
1312  continue;
1313 
1314  dst = (int16_t *)pic->data[act_plane];
1315  low = s->plane[plane].l_h[6];
1316  high = s->plane[plane].l_h[7];
1317  for (i = 0; i < s->plane[act_plane].height / 2; i++) {
1318  interlaced_vertical_filter(dst, low, high, lowpass_width * 2, pic->linesize[act_plane]/2, act_plane);
1319  low += output_stride * 2;
1320  high += output_stride * 2;
1321  dst += pic->linesize[act_plane];
1322  }
1323  }
1324  }
1325  }
1326 
1327  if (s->transform_type == 2 && s->sample_type == 1) {
1328  int16_t *low, *high, *dst;
1329  int output_stride, lowpass_height, lowpass_width;
1330  ptrdiff_t dst_linesize;
1331 
1332  for (plane = 0; plane < s->planes; plane++) {
1333  int act_plane = plane == 1 ? 2 : plane == 2 ? 1 : plane;
1334 
1335  if (avctx->pix_fmt == AV_PIX_FMT_BAYER_RGGB16) {
1336  act_plane = 0;
1337  dst_linesize = pic->linesize[act_plane];
1338  } else {
1339  dst_linesize = pic->linesize[act_plane] / 2;
1340  }
1341 
1342  lowpass_height = s->plane[plane].band[4][1].height;
1343  output_stride = s->plane[plane].band[4][1].a_width;
1344  lowpass_width = s->plane[plane].band[4][1].width;
1345 
1346  if (lowpass_height > s->plane[plane].band[4][1].a_height || lowpass_width > s->plane[plane].band[4][1].a_width ||
1347  s->plane[plane].band[4][1].width > s->plane[plane].band[4][1].a_width ||
1348  lowpass_width < 3 || lowpass_height < 3) {
1349  av_log(avctx, AV_LOG_ERROR, "Invalid plane dimensions\n");
1350  ret = AVERROR(EINVAL);
1351  goto end;
1352  }
1353 
1354  if (s->progressive) {
1355  dst = (int16_t *)pic->data[act_plane];
1356  low = s->plane[plane].l_h[8];
1357  high = s->plane[plane].l_h[9];
1358 
1359  if (avctx->pix_fmt == AV_PIX_FMT_BAYER_RGGB16) {
1360  if (plane & 1)
1361  dst++;
1362  if (plane > 1)
1363  dst += pic->linesize[act_plane] >> 1;
1364  }
1365 
1366  if (avctx->pix_fmt == AV_PIX_FMT_BAYER_RGGB16 &&
1367  (lowpass_height * 2 > avctx->coded_height / 2 ||
1368  lowpass_width * 2 > avctx->coded_width / 2 )
1369  ) {
1370  ret = AVERROR_INVALIDDATA;
1371  goto end;
1372  }
1373 
1374  for (i = 0; i < s->plane[act_plane].height; i++) {
1375  dsp->horiz_filter_clip(dst, low, high, lowpass_width, s->bpc);
1376  low += output_stride;
1377  high += output_stride;
1378  dst += dst_linesize;
1379  }
1380  } else {
1381  dst = (int16_t *)pic->data[act_plane];
1382  low = s->plane[plane].l_h[8];
1383  high = s->plane[plane].l_h[9];
1384  for (i = 0; i < s->plane[act_plane].height / 2; i++) {
1385  interlaced_vertical_filter(dst, low, high, lowpass_width * 2, pic->linesize[act_plane]/2, act_plane);
1386  low += output_stride * 2;
1387  high += output_stride * 2;
1388  dst += pic->linesize[act_plane];
1389  }
1390  }
1391  }
1392  }
1393 
1394  if (avctx->pix_fmt == AV_PIX_FMT_BAYER_RGGB16)
1395  process_bayer(pic, s->bpc);
1396 end:
1397  if (ret < 0)
1398  return ret;
1399 
1400  *got_frame = 1;
1401  return avpkt->size;
1402 }
1403 
1405 {
1406  CFHDContext *s = avctx->priv_data;
1407 
1408  free_buffers(s);
1409 
1410  ff_free_vlc(&s->vlc_9);
1411  ff_free_vlc(&s->vlc_18);
1412 
1413  return 0;
1414 }
1415 
1416 #if HAVE_THREADS
1417 static int update_thread_context(AVCodecContext *dst, const AVCodecContext *src)
1418 {
1419  CFHDContext *psrc = src->priv_data;
1420  CFHDContext *pdst = dst->priv_data;
1421  int ret;
1422 
1423  if (dst == src || psrc->transform_type == 0)
1424  return 0;
1425 
1426  if (pdst->plane[0].idwt_size != psrc->plane[0].idwt_size ||
1427  pdst->a_format != psrc->a_format ||
1428  pdst->a_width != psrc->a_width ||
1429  pdst->a_height != psrc->a_height ||
1430  pdst->a_transform_type != psrc->a_transform_type)
1431  free_buffers(pdst);
1432 
1433  pdst->a_format = psrc->a_format;
1434  pdst->a_width = psrc->a_width;
1435  pdst->a_height = psrc->a_height;
1436  pdst->a_transform_type = psrc->a_transform_type;
1437  pdst->transform_type = psrc->transform_type;
1438  pdst->progressive = psrc->progressive;
1439  pdst->planes = psrc->planes;
1440 
1441  if (!pdst->plane[0].idwt_buf) {
1442  pdst->coded_width = pdst->a_width;
1443  pdst->coded_height = pdst->a_height;
1444  pdst->coded_format = pdst->a_format;
1445  pdst->transform_type = pdst->a_transform_type;
1446  ret = alloc_buffers(dst);
1447  if (ret < 0)
1448  return ret;
1449  }
1450 
1451  for (int plane = 0; plane < pdst->planes; plane++) {
1452  memcpy(pdst->plane[plane].band, psrc->plane[plane].band, sizeof(pdst->plane[plane].band));
1453  memcpy(pdst->plane[plane].idwt_buf, psrc->plane[plane].idwt_buf,
1454  pdst->plane[plane].idwt_size * sizeof(int16_t));
1455  }
1456 
1457  return 0;
1458 }
1459 #endif
1460 
1462  .name = "cfhd",
1463  .long_name = NULL_IF_CONFIG_SMALL("GoPro CineForm HD"),
1464  .type = AVMEDIA_TYPE_VIDEO,
1465  .id = AV_CODEC_ID_CFHD,
1466  .priv_data_size = sizeof(CFHDContext),
1467  .init = cfhd_init,
1468  .close = cfhd_close,
1469  .decode = cfhd_decode,
1470  .update_thread_context = ONLY_IF_THREADS_ENABLED(update_thread_context),
1471  .capabilities = AV_CODEC_CAP_DR1 | AV_CODEC_CAP_FRAME_THREADS,
1473 };
int32_t SampleType
Definition: ac3enc.h:63
static double val(void *priv, double ch)
Definition: aeval.c:76
Macro definitions for various function/variable attributes.
#define av_cold
Definition: attributes.h:88
static av_always_inline int even(uint64_t layout)
#define av_assert0(cond)
assert() equivalent, that is always enabled.
Definition: avassert.h:37
Libavcodec external API header.
static av_cold int init(AVCodecContext *avctx)
Definition: avrndec.c:31
void ff_free_vlc(VLC *vlc)
Definition: bitstream.c:431
refcounted data buffer API
static av_always_inline void bytestream2_skipu(GetByteContext *g, unsigned int size)
Definition: bytestream.h:174
static av_always_inline int bytestream2_get_bytes_left(GetByteContext *g)
Definition: bytestream.h:158
static av_always_inline void bytestream2_init(GetByteContext *g, const uint8_t *buf, int buf_size)
Definition: bytestream.h:137
static av_always_inline int bytestream2_seek(GetByteContext *g, int offset, int whence)
Definition: bytestream.h:212
static av_always_inline int bytestream2_tell(GetByteContext *g)
Definition: bytestream.h:192
#define s(width, name)
Definition: cbs_vp9.c:257
static void difference_coding(int16_t *band, int width, int height)
Definition: cfhd.c:117
static av_cold int cfhd_init(AVCodecContext *avctx)
Definition: cfhd.c:43
static void interlaced_vertical_filter(int16_t *output, int16_t *low, int16_t *high, int width, int linesize, int plane)
Definition: cfhd.c:195
static void inverse_temporal_filter(int16_t *low, int16_t *high, int width)
Definition: cfhd.c:208
#define ALPHA_COMPAND_GAIN
Definition: cfhd.c:41
static void init_plane_defaults(CFHDContext *s)
Definition: cfhd.c:73
static void process_alpha(int16_t *alpha, int width)
Definition: cfhd.c:137
static int cfhd_decode(AVCodecContext *avctx, void *data, int *got_frame, AVPacket *avpkt)
Definition: cfhd.c:375
static int alloc_buffers(AVCodecContext *avctx)
Definition: cfhd.c:246
static void init_frame_defaults(CFHDContext *s)
Definition: cfhd.c:87
#define ALPHA_COMPAND_DC_OFFSET
Definition: cfhd.c:40
static void process_bayer(AVFrame *frame, int bpc)
Definition: cfhd.c:151
static void peak_table(int16_t *band, Peak *peak, int length)
Definition: cfhd.c:129
static av_cold int cfhd_close(AVCodecContext *avctx)
Definition: cfhd.c:1404
static void init_peak_table_defaults(CFHDContext *s)
Definition: cfhd.c:80
static void free_buffers(CFHDContext *s)
Definition: cfhd.c:219
AVCodec ff_cfhd_decoder
Definition: cfhd.c:1461
static int dequant_and_decompand(CFHDContext *s, int level, int quantisation, int codebook)
Definition: cfhd.c:109
#define DWT_LEVELS
Definition: cfhd.h:108
int ff_cfhd_init_vlcs(CFHDContext *s)
Definition: cfhddata.c:276
#define SUBBAND_COUNT
Definition: cfhd.h:99
#define VLC_BITS
Definition: cfhd.h:98
@ Quantization
Definition: cfhd.h:76
@ SampleFlags
Definition: cfhd.h:81
@ BandHeight
Definition: cfhd.h:73
@ HighpassWidth
Definition: cfhd.h:65
@ SampleIndexTable
Definition: cfhd.h:36
@ VersionMajor
Definition: cfhd.h:38
@ EncodedFormat
Definition: cfhd.h:92
@ BandWidth
Definition: cfhd.h:72
@ PeakOffsetLow
Definition: cfhd.h:87
@ LowpassHeight
Definition: cfhd.h:57
@ BandSecondPass
Definition: cfhd.h:90
@ LowpassPrecision
Definition: cfhd.h:60
@ SubbandNumber
Definition: cfhd.h:71
@ BandHeader
Definition: cfhd.h:78
@ BitstreamMarker
Definition: cfhd.h:37
@ PeakLevel
Definition: cfhd.h:86
@ PrescaleTable
Definition: cfhd.h:91
@ HighpassHeight
Definition: cfhd.h:66
@ VersionEdit
Definition: cfhd.h:41
@ ImageWidth
Definition: cfhd.h:51
@ LowpassWidth
Definition: cfhd.h:56
@ BandEncoding
Definition: cfhd.h:75
@ DisplayHeight
Definition: cfhd.h:93
@ PeakOffsetHigh
Definition: cfhd.h:88
@ BandCodingFlags
Definition: cfhd.h:85
@ Version
Definition: cfhd.h:89
@ Precision
Definition: cfhd.h:83
@ VersionRevision
Definition: cfhd.h:40
@ ChannelCount
Definition: cfhd.h:44
@ ImageHeight
Definition: cfhd.h:52
@ SubbandBand
Definition: cfhd.h:74
@ ChannelNumber
Definition: cfhd.h:80
@ VersionMinor
Definition: cfhd.h:39
@ SubbandCount
Definition: cfhd.h:46
@ InputFormat
Definition: cfhd.h:84
@ FrameIndex
Definition: cfhd.h:53
#define DWT_LEVELS_3D
Definition: cfhd.h:109
#define SUBBAND_COUNT_3D
Definition: cfhd.h:100
av_cold void ff_cfhddsp_init(CFHDDSPContext *c, int depth, int bayer)
Definition: cfhddsp.c:106
static const unsigned codebook[256][2]
Definition: cfhdenc.c:42
common internal and external API header
#define AV_CEIL_RSHIFT(a, b)
Definition: common.h:58
#define av_clip_uintp2
Definition: common.h:146
#define FFSIGN(a)
Definition: common.h:73
#define NULL
Definition: coverity.c:32
#define abs(x)
Definition: cuda_runtime.h:35
static void decode(AVCodecContext *dec_ctx, AVPacket *pkt, AVFrame *frame, FILE *outfile)
Definition: decode_audio.c:71
static AVFrame * frame
channel
Use these values when setting the channel map with ebur128_set_channel().
Definition: ebur128.h:39
float re
Definition: fft.c:82
FrameType
G723.1 frame types.
Definition: g723_1.h:63
bitstream reader API header.
#define CLOSE_READER(name, gb)
Definition: get_bits.h:149
#define OPEN_READER(name, gb)
Definition: get_bits.h:138
#define GET_RL_VLC(level, run, name, gb, table, bits, max_depth, need_update)
Definition: get_bits.h:738
#define UPDATE_CACHE(name, gb)
Definition: get_bits.h:178
static int init_get_bits8(GetBitContext *s, const uint8_t *buffer, int byte_size)
Initialize GetBitContext.
Definition: get_bits.h:677
static int get_bits_count(const GetBitContext *s)
Definition: get_bits.h:219
#define AV_CODEC_CAP_DR1
Codec uses get_buffer() or get_encode_buffer() for allocating buffers and supports custom allocators.
Definition: codec.h:52
#define AV_CODEC_CAP_FRAME_THREADS
Codec supports frame-level multithreading.
Definition: codec.h:108
@ AV_CODEC_ID_CFHD
Definition: codec_id.h:266
#define AVERROR_PATCHWELCOME
Not yet implemented in FFmpeg, patches welcome.
Definition: error.h:62
#define AVERROR_INVALIDDATA
Invalid data found when processing input.
Definition: error.h:59
#define AVERROR(e)
Definition: error.h:43
#define AV_LOG_DEBUG
Stuff which is only useful for libav* developers.
Definition: log.h:215
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:200
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:194
void * av_mallocz_array(size_t nmemb, size_t size)
Allocate a memory block for an array with av_mallocz().
Definition: mem.c:190
@ AVMEDIA_TYPE_VIDEO
Definition: avutil.h:201
for(j=16;j >0;--j)
#define B
Definition: huffyuvdsp.h:32
#define R
Definition: huffyuvdsp.h:34
static const int16_t alpha[]
Definition: ilbcdata.h:55
misc image utilities
int i
Definition: input.c:407
#define FF_CODEC_CAP_INIT_THREADSAFE
The codec does not modify any global variables in the init function, allowing to call the init functi...
Definition: internal.h:41
#define FF_CODEC_CAP_INIT_CLEANUP
The codec allows calling the close function for deallocation even if the init function returned a fai...
Definition: internal.h:49
int ff_set_dimensions(AVCodecContext *s, int width, int height)
Check that the provided frame dimensions are valid and set them on the codec context.
Definition: utils.c:84
common internal API header
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification.
Definition: internal.h:117
void avpriv_report_missing_feature(void *avc, const char *msg,...) av_printf_format(2
Log a generic warning message about a missing feature.
#define ONLY_IF_THREADS_ENABLED(x)
Define a function with only the non-default version specified.
Definition: internal.h:156
static const struct @322 planes[]
int stride
Definition: mace.c:144
#define FFALIGN(x, a)
Definition: macros.h:48
uint32_t tag
Definition: movenc.c:1611
const char data[16]
Definition: mxf.c:142
AVOptions.
int av_pix_fmt_count_planes(enum AVPixelFormat pix_fmt)
Definition: pixdesc.c:2613
int av_pix_fmt_get_chroma_sub_sample(enum AVPixelFormat pix_fmt, int *h_shift, int *v_shift)
Utility function to access log2_chroma_w log2_chroma_h from the pixel format AVPixFmtDescriptor.
Definition: pixdesc.c:2601
#define AV_PIX_FMT_GBRAP12
Definition: pixfmt.h:420
#define AV_PIX_FMT_YUV422P10
Definition: pixfmt.h:400
#define AV_PIX_FMT_GBRP12
Definition: pixfmt.h:416
#define AV_PIX_FMT_BAYER_RGGB16
Definition: pixfmt.h:424
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:65
FF_ENABLE_DEPRECATION_WARNINGS int ff_thread_get_buffer(AVCodecContext *avctx, ThreadFrame *f, int flags)
Wrapper around get_buffer() for frame-multithreaded codecs.
void ff_thread_finish_setup(AVCodecContext *avctx)
If the codec defines update_thread_context(), call this when they are ready for the next thread to st...
#define FF_ARRAY_ELEMS(a)
main external API structure.
Definition: avcodec.h:536
enum AVPixelFormat pix_fmt
Pixel format, see AV_PIX_FMT_xxx.
Definition: avcodec.h:746
int bits_per_raw_sample
Bits per sample/pixel of internal libavcodec pixel/sample format.
Definition: avcodec.h:1747
int coded_height
Definition: avcodec.h:724
int coded_width
Bitstream width / height, may be different from width/height e.g.
Definition: avcodec.h:724
struct AVCodecInternal * internal
Private context used for internal data.
Definition: avcodec.h:571
void * priv_data
Definition: avcodec.h:563
int is_copy
Whether the parent AVCodecContext is a copy of the context which had init() called on it.
Definition: internal.h:136
AVCodec.
Definition: codec.h:197
const char * name
Name of the codec implementation.
Definition: codec.h:204
This structure describes decoded (raw) audio or video data.
Definition: frame.h:318
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:332
int width
Definition: frame.h:376
int height
Definition: frame.h:376
int interlaced_frame
The content of the picture is interlaced.
Definition: frame.h:465
int linesize[AV_NUM_DATA_POINTERS]
For video, size in bytes of each picture line.
Definition: frame.h:349
This structure stores compressed data.
Definition: packet.h:346
int size
Definition: packet.h:370
uint8_t * data
Definition: packet.h:369
enum AVPixelFormat coded_format
Definition: cfhd.h:163
int a_format
Definition: cfhd.h:168
Plane plane[4]
Definition: cfhd.h:186
int coded_width
Definition: cfhd.h:160
int planes
Definition: cfhd.h:155
int a_transform_type
Definition: cfhd.h:169
int progressive
Definition: cfhd.h:164
int transform_type
Definition: cfhd.h:159
int a_height
Definition: cfhd.h:167
int a_width
Definition: cfhd.h:166
int coded_height
Definition: cfhd.h:161
void(* horiz_filter_clip)(int16_t *output, const int16_t *low, const int16_t *high, int width, int bpc)
Definition: cfhddsp.h:36
void(* vert_filter)(int16_t *output, ptrdiff_t out_stride, const int16_t *low, ptrdiff_t low_stride, const int16_t *high, ptrdiff_t high_stride, int width, int height)
Definition: cfhddsp.h:31
void(* horiz_filter)(int16_t *output, ptrdiff_t out_stride, const int16_t *low, ptrdiff_t low_stride, const int16_t *high, ptrdiff_t high_stride, int width, int height)
Definition: cfhddsp.h:26
const uint8_t * buffer
Definition: bytestream.h:34
Definition: cfhd.h:136
GetByteContext base
Definition: cfhd.h:139
int level
Definition: cfhd.h:137
Definition: cfhd.h:120
int idwt_size
Definition: cfhd.h:127
SubBand band[DWT_LEVELS_3D][4]
Definition: cfhd.h:133
int16_t * idwt_buf
Definition: cfhd.h:125
int8_t read_ok
Definition: cfhd.h:117
uint8_t run
Definition: svq3.c:205
uint8_t level
Definition: svq3.c:206
#define av_malloc_array(a, b)
#define av_freep(p)
#define av_log(a,...)
#define src
Definition: vp8dsp.c:255
static void finish(void)
Definition: movenc.c:342
#define height
#define width
const char * b
Definition: vf_curves.c:118
const char * g
Definition: vf_curves.c:117
const char * r
Definition: vf_curves.c:116
if(ret< 0)
Definition: vf_mcdeint.c:282
static const double coeff[2][5]
Definition: vf_owdenoise.c:73
static const int factor[16]
Definition: vf_pp7.c:77
static const uint8_t offset[127][2]
Definition: vf_spp.c:107
TransformType
Definition: webp.c:110