FFmpeg  4.4.6
vf_deband.c
Go to the documentation of this file.
1 /*
2  * Copyright (c) 2015 Niklas Haas
3  * Copyright (c) 2015 Paul B Mahol
4  *
5  * Permission is hereby granted, free of charge, to any person obtaining a copy
6  * of this software and associated documentation files (the "Software"), to deal
7  * in the Software without restriction, including without limitation the rights
8  * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9  * copies of the Software, and to permit persons to whom the Software is
10  * furnished to do so, subject to the following conditions:
11  *
12  * The above copyright notice and this permission notice shall be included in
13  * all copies or substantial portions of the Software.
14  *
15  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18  * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20  * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21  * SOFTWARE.
22  */
23 
24 #include "libavutil/opt.h"
25 #include "libavutil/pixdesc.h"
26 #include "avfilter.h"
27 #include "internal.h"
28 #include "video.h"
29 
30 typedef struct DebandContext {
31  const AVClass *class;
32 
33  int coupling;
34  float threshold[4];
35  int range;
36  int blur;
37  float direction;
38 
40  int planewidth[4];
41  int planeheight[4];
42  int shift[2];
43  int thr[4];
44 
45  int *x_pos;
46  int *y_pos;
47 
48  int (*deband)(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs);
50 
51 #define OFFSET(x) offsetof(DebandContext, x)
52 #define FLAGS AV_OPT_FLAG_FILTERING_PARAM|AV_OPT_FLAG_VIDEO_PARAM|AV_OPT_FLAG_RUNTIME_PARAM
53 
54 static const AVOption deband_options[] = {
55  { "1thr", "set 1st plane threshold", OFFSET(threshold[0]), AV_OPT_TYPE_FLOAT, {.dbl=0.02}, 0.00003, 0.5, FLAGS },
56  { "2thr", "set 2nd plane threshold", OFFSET(threshold[1]), AV_OPT_TYPE_FLOAT, {.dbl=0.02}, 0.00003, 0.5, FLAGS },
57  { "3thr", "set 3rd plane threshold", OFFSET(threshold[2]), AV_OPT_TYPE_FLOAT, {.dbl=0.02}, 0.00003, 0.5, FLAGS },
58  { "4thr", "set 4th plane threshold", OFFSET(threshold[3]), AV_OPT_TYPE_FLOAT, {.dbl=0.02}, 0.00003, 0.5, FLAGS },
59  { "range", "set range", OFFSET(range), AV_OPT_TYPE_INT, {.i64=16}, INT_MIN, INT_MAX, FLAGS },
60  { "r", "set range", OFFSET(range), AV_OPT_TYPE_INT, {.i64=16}, INT_MIN, INT_MAX, FLAGS },
61  { "direction", "set direction", OFFSET(direction), AV_OPT_TYPE_FLOAT, {.dbl=2*M_PI},-2*M_PI, 2*M_PI, FLAGS },
62  { "d", "set direction", OFFSET(direction), AV_OPT_TYPE_FLOAT, {.dbl=2*M_PI},-2*M_PI, 2*M_PI, FLAGS },
63  { "blur", "set blur", OFFSET(blur), AV_OPT_TYPE_BOOL, {.i64=1}, 0, 1, FLAGS },
64  { "b", "set blur", OFFSET(blur), AV_OPT_TYPE_BOOL, {.i64=1}, 0, 1, FLAGS },
65  { "coupling", "set plane coupling", OFFSET(coupling), AV_OPT_TYPE_BOOL, {.i64=0}, 0, 1, FLAGS },
66  { "c", "set plane coupling", OFFSET(coupling), AV_OPT_TYPE_BOOL, {.i64=0}, 0, 1, FLAGS },
67  { NULL }
68 };
69 
71 
73 {
74  DebandContext *s = ctx->priv;
75 
76  static const enum AVPixelFormat pix_fmts[] = {
96  };
97 
98  static const enum AVPixelFormat cpix_fmts[] = {
109  };
110 
111  AVFilterFormats *fmts_list = ff_make_format_list(s->coupling ? cpix_fmts : pix_fmts);
112  if (!fmts_list)
113  return AVERROR(ENOMEM);
114 
115  return ff_set_common_formats(ctx, fmts_list);
116 }
117 
118 static float frand(int x, int y)
119 {
120  const float r = sinf(x * 12.9898f + y * 78.233f) * 43758.545f;
121 
122  return r - floorf(r);
123 }
124 
125 static int inline get_avg(int ref0, int ref1, int ref2, int ref3)
126 {
127  return (ref0 + ref1 + ref2 + ref3) / 4;
128 }
129 
130 typedef struct ThreadData {
131  AVFrame *in, *out;
132 } ThreadData;
133 
134 static int deband_8_c(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
135 {
136  DebandContext *s = ctx->priv;
137  ThreadData *td = arg;
138  AVFrame *in = td->in;
139  AVFrame *out = td->out;
140  int x, y, p;
141 
142  for (p = 0; p < s->nb_components; p++) {
143  const uint8_t *src_ptr = (const uint8_t *)in->data[p];
144  uint8_t *dst_ptr = (uint8_t *)out->data[p];
145  const int dst_linesize = out->linesize[p];
146  const int src_linesize = in->linesize[p];
147  const int thr = s->thr[p];
148  const int start = (s->planeheight[p] * jobnr ) / nb_jobs;
149  const int end = (s->planeheight[p] * (jobnr+1)) / nb_jobs;
150  const int w = s->planewidth[p] - 1;
151  const int h = s->planeheight[p] - 1;
152 
153  for (y = start; y < end; y++) {
154  const int pos = y * s->planewidth[0];
155 
156  for (x = 0; x < s->planewidth[p]; x++) {
157  const int x_pos = s->x_pos[pos + x];
158  const int y_pos = s->y_pos[pos + x];
159  const int ref0 = src_ptr[av_clip(y + y_pos, 0, h) * src_linesize + av_clip(x + x_pos, 0, w)];
160  const int ref1 = src_ptr[av_clip(y + -y_pos, 0, h) * src_linesize + av_clip(x + x_pos, 0, w)];
161  const int ref2 = src_ptr[av_clip(y + -y_pos, 0, h) * src_linesize + av_clip(x + -x_pos, 0, w)];
162  const int ref3 = src_ptr[av_clip(y + y_pos, 0, h) * src_linesize + av_clip(x + -x_pos, 0, w)];
163  const int src0 = src_ptr[y * src_linesize + x];
164 
165  if (s->blur) {
166  const int avg = get_avg(ref0, ref1, ref2, ref3);
167  const int diff = FFABS(src0 - avg);
168 
169  dst_ptr[y * dst_linesize + x] = diff < thr ? avg : src0;
170  } else {
171  dst_ptr[y * dst_linesize + x] = (FFABS(src0 - ref0) < thr) &&
172  (FFABS(src0 - ref1) < thr) &&
173  (FFABS(src0 - ref2) < thr) &&
174  (FFABS(src0 - ref3) < thr) ? get_avg(ref0, ref1, ref2, ref3) : src0;
175  }
176  }
177  }
178  }
179 
180  return 0;
181 }
182 
183 static int deband_8_coupling_c(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
184 {
185  DebandContext *s = ctx->priv;
186  ThreadData *td = arg;
187  AVFrame *in = td->in;
188  AVFrame *out = td->out;
189  const int start = (s->planeheight[0] * jobnr ) / nb_jobs;
190  const int end = (s->planeheight[0] * (jobnr+1)) / nb_jobs;
191  int x, y, p;
192 
193  for (y = start; y < end; y++) {
194  const int pos = y * s->planewidth[0];
195 
196  for (x = 0; x < s->planewidth[0]; x++) {
197  const int x_pos = s->x_pos[pos + x];
198  const int y_pos = s->y_pos[pos + x];
199  int avg[4], cmp[4] = { 0 }, src[4];
200 
201  for (p = 0; p < s->nb_components; p++) {
202  const uint8_t *src_ptr = (const uint8_t *)in->data[p];
203  const int src_linesize = in->linesize[p];
204  const int thr = s->thr[p];
205  const int w = s->planewidth[p] - 1;
206  const int h = s->planeheight[p] - 1;
207  const int ref0 = src_ptr[av_clip(y + y_pos, 0, h) * src_linesize + av_clip(x + x_pos, 0, w)];
208  const int ref1 = src_ptr[av_clip(y + -y_pos, 0, h) * src_linesize + av_clip(x + x_pos, 0, w)];
209  const int ref2 = src_ptr[av_clip(y + -y_pos, 0, h) * src_linesize + av_clip(x + -x_pos, 0, w)];
210  const int ref3 = src_ptr[av_clip(y + y_pos, 0, h) * src_linesize + av_clip(x + -x_pos, 0, w)];
211  const int src0 = src_ptr[y * src_linesize + x];
212 
213  src[p] = src0;
214  avg[p] = get_avg(ref0, ref1, ref2, ref3);
215 
216  if (s->blur) {
217  cmp[p] = FFABS(src0 - avg[p]) < thr;
218  } else {
219  cmp[p] = (FFABS(src0 - ref0) < thr) &&
220  (FFABS(src0 - ref1) < thr) &&
221  (FFABS(src0 - ref2) < thr) &&
222  (FFABS(src0 - ref3) < thr);
223  }
224  }
225 
226  for (p = 0; p < s->nb_components; p++)
227  if (!cmp[p])
228  break;
229  if (p == s->nb_components) {
230  for (p = 0; p < s->nb_components; p++) {
231  const int dst_linesize = out->linesize[p];
232 
233  out->data[p][y * dst_linesize + x] = avg[p];
234  }
235  } else {
236  for (p = 0; p < s->nb_components; p++) {
237  const int dst_linesize = out->linesize[p];
238 
239  out->data[p][y * dst_linesize + x] = src[p];
240  }
241  }
242  }
243  }
244 
245  return 0;
246 }
247 
248 static int deband_16_coupling_c(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
249 {
250  DebandContext *s = ctx->priv;
251  ThreadData *td = arg;
252  AVFrame *in = td->in;
253  AVFrame *out = td->out;
254  const int start = (s->planeheight[0] * jobnr ) / nb_jobs;
255  const int end = (s->planeheight[0] * (jobnr+1)) / nb_jobs;
256  int x, y, p, z;
257 
258  for (y = start; y < end; y++) {
259  const int pos = y * s->planewidth[0];
260 
261  for (x = 0; x < s->planewidth[0]; x++) {
262  const int x_pos = s->x_pos[pos + x];
263  const int y_pos = s->y_pos[pos + x];
264  int avg[4], cmp[4] = { 0 }, src[4];
265 
266  for (p = 0; p < s->nb_components; p++) {
267  const uint16_t *src_ptr = (const uint16_t *)in->data[p];
268  const int src_linesize = in->linesize[p] / 2;
269  const int thr = s->thr[p];
270  const int w = s->planewidth[p] - 1;
271  const int h = s->planeheight[p] - 1;
272  const int ref0 = src_ptr[av_clip(y + y_pos, 0, h) * src_linesize + av_clip(x + x_pos, 0, w)];
273  const int ref1 = src_ptr[av_clip(y + -y_pos, 0, h) * src_linesize + av_clip(x + x_pos, 0, w)];
274  const int ref2 = src_ptr[av_clip(y + -y_pos, 0, h) * src_linesize + av_clip(x + -x_pos, 0, w)];
275  const int ref3 = src_ptr[av_clip(y + y_pos, 0, h) * src_linesize + av_clip(x + -x_pos, 0, w)];
276  const int src0 = src_ptr[y * src_linesize + x];
277 
278  src[p] = src0;
279  avg[p] = get_avg(ref0, ref1, ref2, ref3);
280 
281  if (s->blur) {
282  cmp[p] = FFABS(src0 - avg[p]) < thr;
283  } else {
284  cmp[p] = (FFABS(src0 - ref0) < thr) &&
285  (FFABS(src0 - ref1) < thr) &&
286  (FFABS(src0 - ref2) < thr) &&
287  (FFABS(src0 - ref3) < thr);
288  }
289  }
290 
291  for (z = 0; z < s->nb_components; z++)
292  if (!cmp[z])
293  break;
294  if (z == s->nb_components) {
295  for (p = 0; p < s->nb_components; p++) {
296  const int dst_linesize = out->linesize[p] / 2;
297  uint16_t *dst = (uint16_t *)out->data[p] + y * dst_linesize + x;
298 
299  dst[0] = avg[p];
300  }
301  } else {
302  for (p = 0; p < s->nb_components; p++) {
303  const int dst_linesize = out->linesize[p] / 2;
304  uint16_t *dst = (uint16_t *)out->data[p] + y * dst_linesize + x;
305 
306  dst[0] = src[p];
307  }
308  }
309  }
310  }
311 
312  return 0;
313 }
314 
315 static int deband_16_c(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
316 {
317  DebandContext *s = ctx->priv;
318  ThreadData *td = arg;
319  AVFrame *in = td->in;
320  AVFrame *out = td->out;
321  int x, y, p;
322 
323  for (p = 0; p < s->nb_components; p++) {
324  const uint16_t *src_ptr = (const uint16_t *)in->data[p];
325  uint16_t *dst_ptr = (uint16_t *)out->data[p];
326  const int dst_linesize = out->linesize[p] / 2;
327  const int src_linesize = in->linesize[p] / 2;
328  const int thr = s->thr[p];
329  const int start = (s->planeheight[p] * jobnr ) / nb_jobs;
330  const int end = (s->planeheight[p] * (jobnr+1)) / nb_jobs;
331  const int w = s->planewidth[p] - 1;
332  const int h = s->planeheight[p] - 1;
333 
334  for (y = start; y < end; y++) {
335  const int pos = y * s->planewidth[0];
336 
337  for (x = 0; x < s->planewidth[p]; x++) {
338  const int x_pos = s->x_pos[pos + x];
339  const int y_pos = s->y_pos[pos + x];
340  const int ref0 = src_ptr[av_clip(y + y_pos, 0, h) * src_linesize + av_clip(x + x_pos, 0, w)];
341  const int ref1 = src_ptr[av_clip(y + -y_pos, 0, h) * src_linesize + av_clip(x + x_pos, 0, w)];
342  const int ref2 = src_ptr[av_clip(y + -y_pos, 0, h) * src_linesize + av_clip(x + -x_pos, 0, w)];
343  const int ref3 = src_ptr[av_clip(y + y_pos, 0, h) * src_linesize + av_clip(x + -x_pos, 0, w)];
344  const int src0 = src_ptr[y * src_linesize + x];
345 
346  if (s->blur) {
347  const int avg = get_avg(ref0, ref1, ref2, ref3);
348  const int diff = FFABS(src0 - avg);
349 
350  dst_ptr[y * dst_linesize + x] = diff < thr ? avg : src0;
351  } else {
352  dst_ptr[y * dst_linesize + x] = (FFABS(src0 - ref0) < thr) &&
353  (FFABS(src0 - ref1) < thr) &&
354  (FFABS(src0 - ref2) < thr) &&
355  (FFABS(src0 - ref3) < thr) ? get_avg(ref0, ref1, ref2, ref3) : src0;
356  }
357  }
358  }
359  }
360 
361  return 0;
362 }
363 
364 static int config_input(AVFilterLink *inlink)
365 {
367  AVFilterContext *ctx = inlink->dst;
368  DebandContext *s = ctx->priv;
369  const float direction = s->direction;
370  const int range = s->range;
371  int x, y;
372 
373  s->nb_components = desc->nb_components;
374 
375  s->planeheight[1] = s->planeheight[2] = AV_CEIL_RSHIFT(inlink->h, desc->log2_chroma_h);
376  s->planeheight[0] = s->planeheight[3] = inlink->h;
377  s->planewidth[1] = s->planewidth[2] = AV_CEIL_RSHIFT(inlink->w, desc->log2_chroma_w);
378  s->planewidth[0] = s->planewidth[3] = inlink->w;
379  s->shift[0] = desc->log2_chroma_w;
380  s->shift[1] = desc->log2_chroma_h;
381 
382  if (s->coupling)
383  s->deband = desc->comp[0].depth > 8 ? deband_16_coupling_c : deband_8_coupling_c;
384  else
385  s->deband = desc->comp[0].depth > 8 ? deband_16_c : deband_8_c;
386 
387  s->thr[0] = ((1 << desc->comp[0].depth) - 1) * s->threshold[0];
388  s->thr[1] = ((1 << desc->comp[1].depth) - 1) * s->threshold[1];
389  s->thr[2] = ((1 << desc->comp[2].depth) - 1) * s->threshold[2];
390  s->thr[3] = ((1 << desc->comp[3].depth) - 1) * s->threshold[3];
391 
392  if (!s->x_pos)
393  s->x_pos = av_malloc(s->planewidth[0] * s->planeheight[0] * sizeof(*s->x_pos));
394  if (!s->y_pos)
395  s->y_pos = av_malloc(s->planewidth[0] * s->planeheight[0] * sizeof(*s->y_pos));
396  if (!s->x_pos || !s->y_pos)
397  return AVERROR(ENOMEM);
398 
399  for (y = 0; y < s->planeheight[0]; y++) {
400  for (x = 0; x < s->planewidth[0]; x++) {
401  const float r = frand(x, y);
402  const float dir = direction < 0 ? -direction : r * direction;
403  const int dist = range < 0 ? -range : r * range;
404 
405  s->x_pos[y * s->planewidth[0] + x] = cosf(dir) * dist;
406  s->y_pos[y * s->planewidth[0] + x] = sinf(dir) * dist;
407  }
408  }
409 
410  return 0;
411 }
412 
413 static int filter_frame(AVFilterLink *inlink, AVFrame *in)
414 {
415  AVFilterContext *ctx = inlink->dst;
416  AVFilterLink *outlink = ctx->outputs[0];
417  DebandContext *s = ctx->priv;
418  AVFrame *out;
419  ThreadData td;
420 
421  out = ff_get_video_buffer(outlink, outlink->w, outlink->h);
422  if (!out) {
423  av_frame_free(&in);
424  return AVERROR(ENOMEM);
425  }
427 
428  td.in = in; td.out = out;
429  ctx->internal->execute(ctx, s->deband, &td, NULL, FFMIN3(s->planeheight[1],
430  s->planeheight[2],
432 
433  av_frame_free(&in);
434  return ff_filter_frame(outlink, out);
435 }
436 
437 static int process_command(AVFilterContext *ctx, const char *cmd, const char *args,
438  char *res, int res_len, int flags)
439 {
440  int ret = ff_filter_process_command(ctx, cmd, args, res, res_len, flags);
441 
442  if (ret < 0)
443  return ret;
444 
445  return config_input(ctx->inputs[0]);
446 }
447 
449 {
450  DebandContext *s = ctx->priv;
451 
452  av_freep(&s->x_pos);
453  av_freep(&s->y_pos);
454 }
455 
457  {
458  .name = "default",
459  .type = AVMEDIA_TYPE_VIDEO,
460  .config_props = config_input,
461  .filter_frame = filter_frame,
462  },
463  { NULL }
464 };
465 
467  {
468  .name = "default",
469  .type = AVMEDIA_TYPE_VIDEO,
470  },
471  { NULL }
472 };
473 
475  .name = "deband",
476  .description = NULL_IF_CONFIG_SMALL("Debands video."),
477  .priv_size = sizeof(DebandContext),
478  .priv_class = &deband_class,
479  .uninit = uninit,
485 };
static const AVFilterPad inputs[]
Definition: af_acontrast.c:193
static const AVFilterPad outputs[]
Definition: af_acontrast.c:203
#define av_cold
Definition: attributes.h:88
uint8_t pi<< 24) CONV_FUNC_GROUP(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_U8, uint8_t,(*(const uint8_t *) pi - 0x80) *(1.0f/(1<< 7))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_U8, uint8_t,(*(const uint8_t *) pi - 0x80) *(1.0/(1<< 7))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_S16, int16_t,(*(const int16_t *) pi >> 8)+0x80) CONV_FUNC_GROUP(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_S16, int16_t, *(const int16_t *) pi *(1.0f/(1<< 15))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_S16, int16_t, *(const int16_t *) pi *(1.0/(1<< 15))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_S32, int32_t,(*(const int32_t *) pi >> 24)+0x80) CONV_FUNC_GROUP(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_S32, int32_t, *(const int32_t *) pi *(1.0f/(1U<< 31))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_S32, int32_t, *(const int32_t *) pi *(1.0/(1U<< 31))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_FLT, float, av_clip_uint8(lrintf(*(const float *) pi *(1<< 7))+0x80)) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S16, int16_t, AV_SAMPLE_FMT_FLT, float, av_clip_int16(lrintf(*(const float *) pi *(1<< 15)))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S32, int32_t, AV_SAMPLE_FMT_FLT, float, av_clipl_int32(llrintf(*(const float *) pi *(1U<< 31)))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_DBL, double, av_clip_uint8(lrint(*(const double *) pi *(1<< 7))+0x80)) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S16, int16_t, AV_SAMPLE_FMT_DBL, double, av_clip_int16(lrint(*(const double *) pi *(1<< 15)))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S32, int32_t, AV_SAMPLE_FMT_DBL, double, av_clipl_int32(llrint(*(const double *) pi *(1U<< 31)))) #define SET_CONV_FUNC_GROUP(ofmt, ifmt) static void set_generic_function(AudioConvert *ac) { } void ff_audio_convert_free(AudioConvert **ac) { if(! *ac) return;ff_dither_free(&(*ac) ->dc);av_freep(ac);} AudioConvert *ff_audio_convert_alloc(AVAudioResampleContext *avr, enum AVSampleFormat out_fmt, enum AVSampleFormat in_fmt, int channels, int sample_rate, int apply_map) { AudioConvert *ac;int in_planar, out_planar;ac=av_mallocz(sizeof(*ac));if(!ac) return NULL;ac->avr=avr;ac->out_fmt=out_fmt;ac->in_fmt=in_fmt;ac->channels=channels;ac->apply_map=apply_map;if(avr->dither_method !=AV_RESAMPLE_DITHER_NONE &&av_get_packed_sample_fmt(out_fmt)==AV_SAMPLE_FMT_S16 &&av_get_bytes_per_sample(in_fmt) > 2) { ac->dc=ff_dither_alloc(avr, out_fmt, in_fmt, channels, sample_rate, apply_map);if(!ac->dc) { av_free(ac);return NULL;} return ac;} in_planar=ff_sample_fmt_is_planar(in_fmt, channels);out_planar=ff_sample_fmt_is_planar(out_fmt, channels);if(in_planar==out_planar) { ac->func_type=CONV_FUNC_TYPE_FLAT;ac->planes=in_planar ? ac->channels :1;} else if(in_planar) ac->func_type=CONV_FUNC_TYPE_INTERLEAVE;else ac->func_type=CONV_FUNC_TYPE_DEINTERLEAVE;set_generic_function(ac);if(ARCH_AARCH64) ff_audio_convert_init_aarch64(ac);if(ARCH_ARM) ff_audio_convert_init_arm(ac);if(ARCH_X86) ff_audio_convert_init_x86(ac);return ac;} int ff_audio_convert(AudioConvert *ac, AudioData *out, AudioData *in) { int use_generic=1;int len=in->nb_samples;int p;if(ac->dc) { av_log(ac->avr, AV_LOG_TRACE, "%d samples - audio_convert: %s to %s (dithered)\n", len, av_get_sample_fmt_name(ac->in_fmt), av_get_sample_fmt_name(ac->out_fmt));return ff_convert_dither(ac-> in
uint8_t
int ff_filter_frame(AVFilterLink *link, AVFrame *frame)
Send a frame of data to the next filter.
Definition: avfilter.c:1096
int ff_filter_process_command(AVFilterContext *ctx, const char *cmd, const char *arg, char *res, int res_len, int flags)
Generic processing of user supplied commands that are set in the same way as the filter options.
Definition: avfilter.c:882
int ff_filter_get_nb_threads(AVFilterContext *ctx)
Get number of threads for current filter instance.
Definition: avfilter.c:802
Main libavfilter public API header.
#define flags(name, subs,...)
Definition: cbs_av1.c:572
#define s(width, name)
Definition: cbs_vp9.c:257
#define avg(a, b, c, d)
#define AV_CEIL_RSHIFT(a, b)
Definition: common.h:58
#define av_clip
Definition: common.h:122
#define FFABS(a)
Absolute value, Note, INT_MIN / INT64_MIN result in undefined behavior as they are not representable ...
Definition: common.h:72
#define FFMIN3(a, b, c)
Definition: common.h:106
#define NULL
Definition: coverity.c:32
static __device__ float floorf(float a)
Definition: cuda_runtime.h:172
int
int ff_set_common_formats(AVFilterContext *ctx, AVFilterFormats *formats)
A helper for query_formats() which sets all links to the same list of formats.
Definition: formats.c:587
AVFilterFormats * ff_make_format_list(const int *fmts)
Create a list of supported formats.
Definition: formats.c:286
@ AV_OPT_TYPE_INT
Definition: opt.h:225
@ AV_OPT_TYPE_FLOAT
Definition: opt.h:228
@ AV_OPT_TYPE_BOOL
Definition: opt.h:242
#define AVFILTER_FLAG_SUPPORT_TIMELINE_GENERIC
Some filters support a generic "enable" expression option that can be used to enable or disable a fil...
Definition: avfilter.h:126
#define AVFILTER_FLAG_SLICE_THREADS
The filter supports multithreading by splitting frames into multiple parts and processing them concur...
Definition: avfilter.h:117
#define AVERROR(e)
Definition: error.h:43
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:203
int av_frame_copy_props(AVFrame *dst, const AVFrame *src)
Copy only "metadata" fields from src to dst.
Definition: frame.c:658
@ AVMEDIA_TYPE_VIDEO
Definition: avutil.h:201
const char * arg
Definition: jacosubdec.c:66
common internal API header
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification.
Definition: internal.h:117
static enum AVPixelFormat pix_fmts[]
Definition: libkvazaar.c:309
#define sinf(x)
Definition: libm.h:419
#define cosf(x)
Definition: libm.h:78
const char * desc
Definition: libsvtav1.c:79
uint8_t w
Definition: llviddspenc.c:39
#define M_PI
Definition: mathematics.h:52
static av_always_inline int cmp(MpegEncContext *s, const int x, const int y, const int subx, const int suby, const int size, const int h, int ref_index, int src_index, me_cmp_func cmp_func, me_cmp_func chroma_cmp_func, const int flags)
compares a block (either a full macroblock or a partition thereof) against a proposed motion-compensa...
Definition: motion_est.c:260
AVOptions.
const AVPixFmtDescriptor * av_pix_fmt_desc_get(enum AVPixelFormat pix_fmt)
Definition: pixdesc.c:2573
#define AV_PIX_FMT_YUV420P16
Definition: pixfmt.h:410
#define AV_PIX_FMT_YUV444P12
Definition: pixfmt.h:406
#define AV_PIX_FMT_YUV444P9
Definition: pixfmt.h:398
#define AV_PIX_FMT_GRAY9
Definition: pixfmt.h:379
#define AV_PIX_FMT_GBRAP16
Definition: pixfmt.h:421
#define AV_PIX_FMT_GBRP9
Definition: pixfmt.h:414
#define AV_PIX_FMT_YUV422P9
Definition: pixfmt.h:397
#define AV_PIX_FMT_YUVA444P10
Definition: pixfmt.h:438
#define AV_PIX_FMT_YUVA420P16
Definition: pixfmt.h:441
#define AV_PIX_FMT_YUV420P12
Definition: pixfmt.h:403
#define AV_PIX_FMT_YUVA420P10
Definition: pixfmt.h:436
#define AV_PIX_FMT_YUVA422P9
Definition: pixfmt.h:434
#define AV_PIX_FMT_YUV422P12
Definition: pixfmt.h:404
#define AV_PIX_FMT_GBRP10
Definition: pixfmt.h:415
#define AV_PIX_FMT_GRAY12
Definition: pixfmt.h:381
#define AV_PIX_FMT_GBRP12
Definition: pixfmt.h:416
#define AV_PIX_FMT_YUV420P9
Definition: pixfmt.h:396
#define AV_PIX_FMT_YUVA420P9
Definition: pixfmt.h:433
#define AV_PIX_FMT_YUVA422P10
Definition: pixfmt.h:437
#define AV_PIX_FMT_YUV420P14
Definition: pixfmt.h:407
AVPixelFormat
Pixel format.
Definition: pixfmt.h:64
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:65
@ AV_PIX_FMT_YUV420P
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:66
@ AV_PIX_FMT_YUV440P
planar YUV 4:4:0 (1 Cr & Cb sample per 1x2 Y samples)
Definition: pixfmt.h:99
@ AV_PIX_FMT_YUV422P
planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
Definition: pixfmt.h:70
@ AV_PIX_FMT_GRAY8
Y , 8bpp.
Definition: pixfmt.h:74
@ AV_PIX_FMT_YUVA420P
planar YUV 4:2:0, 20bpp, (1 Cr & Cb sample per 2x2 Y & A samples)
Definition: pixfmt.h:101
@ AV_PIX_FMT_YUVJ440P
planar YUV 4:4:0 full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV440P and setting color_range
Definition: pixfmt.h:100
@ AV_PIX_FMT_YUV410P
planar YUV 4:1:0, 9bpp, (1 Cr & Cb sample per 4x4 Y samples)
Definition: pixfmt.h:72
@ AV_PIX_FMT_YUV411P
planar YUV 4:1:1, 12bpp, (1 Cr & Cb sample per 4x1 Y samples)
Definition: pixfmt.h:73
@ AV_PIX_FMT_YUV444P
planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples)
Definition: pixfmt.h:71
@ AV_PIX_FMT_YUVA444P
planar YUV 4:4:4 32bpp, (1 Cr & Cb sample per 1x1 Y & A samples)
Definition: pixfmt.h:177
@ AV_PIX_FMT_YUVJ411P
planar YUV 4:1:1, 12bpp, (1 Cr & Cb sample per 4x1 Y samples) full scale (JPEG), deprecated in favor ...
Definition: pixfmt.h:258
@ AV_PIX_FMT_GBRAP
planar GBRA 4:4:4:4 32bpp
Definition: pixfmt.h:215
@ AV_PIX_FMT_YUVJ422P
planar YUV 4:2:2, 16bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV422P and setting col...
Definition: pixfmt.h:79
@ AV_PIX_FMT_YUVA422P
planar YUV 4:2:2 24bpp, (1 Cr & Cb sample per 2x1 Y & A samples)
Definition: pixfmt.h:176
@ AV_PIX_FMT_GBRP
planar GBR 4:4:4 24bpp
Definition: pixfmt.h:168
@ AV_PIX_FMT_YUVJ444P
planar YUV 4:4:4, 24bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV444P and setting col...
Definition: pixfmt.h:80
@ AV_PIX_FMT_YUVJ420P
planar YUV 4:2:0, 12bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV420P and setting col...
Definition: pixfmt.h:78
#define AV_PIX_FMT_YUV422P14
Definition: pixfmt.h:408
#define AV_PIX_FMT_GRAY10
Definition: pixfmt.h:380
#define AV_PIX_FMT_GRAY14
Definition: pixfmt.h:382
#define AV_PIX_FMT_YUV422P16
Definition: pixfmt.h:411
#define AV_PIX_FMT_GRAY16
Definition: pixfmt.h:383
#define AV_PIX_FMT_YUVA444P16
Definition: pixfmt.h:443
#define AV_PIX_FMT_YUVA422P16
Definition: pixfmt.h:442
#define AV_PIX_FMT_GBRP16
Definition: pixfmt.h:418
#define AV_PIX_FMT_YUV444P14
Definition: pixfmt.h:409
#define AV_PIX_FMT_YUVA444P9
Definition: pixfmt.h:435
#define AV_PIX_FMT_GBRP14
Definition: pixfmt.h:417
#define AV_PIX_FMT_YUV444P16
Definition: pixfmt.h:412
#define td
Definition: regdef.h:70
unsigned int pos
Definition: spdifenc.c:412
Describe the class of an AVClass context structure.
Definition: log.h:67
An instance of a filter.
Definition: avfilter.h:341
A list of supported formats for one end of a filter link.
Definition: formats.h:65
A filter pad used for either input or output.
Definition: internal.h:54
const char * name
Pad name.
Definition: internal.h:60
Filter definition.
Definition: avfilter.h:145
const char * name
Filter name.
Definition: avfilter.h:149
AVFormatInternal * internal
An opaque field for libavformat internal usage.
Definition: avformat.h:1699
This structure describes decoded (raw) audio or video data.
Definition: frame.h:318
AVOption.
Definition: opt.h:248
Descriptor that unambiguously describes how the bits of a pixel are stored in the up to 4 data planes...
Definition: pixdesc.h:81
int nb_components
Definition: vf_deband.c:39
int(* deband)(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_deband.c:48
int planewidth[4]
Definition: vf_deband.c:40
int * x_pos
Definition: vf_deband.c:45
int shift[2]
Definition: vf_deband.c:42
float direction
Definition: vf_deband.c:37
float threshold[4]
Definition: vf_deband.c:34
int planeheight[4]
Definition: vf_deband.c:41
int thr[4]
Definition: vf_deband.c:43
int * y_pos
Definition: vf_deband.c:46
Used for passing data between threads.
Definition: dsddec.c:67
AVFrame * out
Definition: af_adeclick.c:502
AVFrame * in
Definition: af_adenorm.c:223
#define av_freep(p)
#define av_malloc(s)
#define src0
Definition: h264pred.c:139
#define src
Definition: vp8dsp.c:255
FILE * out
Definition: movenc.c:54
AVFormatContext * ctx
Definition: movenc.c:48
static void blur(uint8_t *dst, int dst_step, const uint8_t *src, int src_step, int len, int radius, int pixsize)
Definition: vf_boxblur.c:160
const char * r
Definition: vf_curves.c:116
static float frand(int x, int y)
Definition: vf_deband.c:118
static const AVFilterPad avfilter_vf_deband_outputs[]
Definition: vf_deband.c:466
static int deband_16_c(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_deband.c:315
AVFILTER_DEFINE_CLASS(deband)
static int query_formats(AVFilterContext *ctx)
Definition: vf_deband.c:72
static int config_input(AVFilterLink *inlink)
Definition: vf_deband.c:364
#define FLAGS
Definition: vf_deband.c:52
static int filter_frame(AVFilterLink *inlink, AVFrame *in)
Definition: vf_deband.c:413
AVFilter ff_vf_deband
Definition: vf_deband.c:474
static int deband_8_coupling_c(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_deband.c:183
static int get_avg(int ref0, int ref1, int ref2, int ref3)
Definition: vf_deband.c:125
static int deband_16_coupling_c(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_deband.c:248
static int process_command(AVFilterContext *ctx, const char *cmd, const char *args, char *res, int res_len, int flags)
Definition: vf_deband.c:437
static av_cold void uninit(AVFilterContext *ctx)
Definition: vf_deband.c:448
static int deband_8_c(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_deband.c:134
#define OFFSET(x)
Definition: vf_deband.c:51
static const AVOption deband_options[]
Definition: vf_deband.c:54
static const AVFilterPad avfilter_vf_deband_inputs[]
Definition: vf_deband.c:456
static av_always_inline int diff(const uint32_t a, const uint32_t b)
AVFrame * ff_get_video_buffer(AVFilterLink *link, int w, int h)
Request a picture buffer with a specific set of permissions.
Definition: video.c:104