FFmpeg  4.4.6
vf_scale.c
Go to the documentation of this file.
1 /*
2  * Copyright (c) 2007 Bobby Bingham
3  *
4  * This file is part of FFmpeg.
5  *
6  * FFmpeg is free software; you can redistribute it and/or
7  * modify it under the terms of the GNU Lesser General Public
8  * License as published by the Free Software Foundation; either
9  * version 2.1 of the License, or (at your option) any later version.
10  *
11  * FFmpeg is distributed in the hope that it will be useful,
12  * but WITHOUT ANY WARRANTY; without even the implied warranty of
13  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14  * Lesser General Public License for more details.
15  *
16  * You should have received a copy of the GNU Lesser General Public
17  * License along with FFmpeg; if not, write to the Free Software
18  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19  */
20 
21 /**
22  * @file
23  * scale video filter
24  */
25 
26 #include <stdio.h>
27 #include <string.h>
28 
29 #include "avfilter.h"
30 #include "formats.h"
31 #include "internal.h"
32 #include "scale_eval.h"
33 #include "video.h"
34 #include "libavutil/avstring.h"
35 #include "libavutil/eval.h"
36 #include "libavutil/internal.h"
37 #include "libavutil/mathematics.h"
38 #include "libavutil/opt.h"
39 #include "libavutil/parseutils.h"
40 #include "libavutil/pixdesc.h"
41 #include "libavutil/imgutils.h"
42 #include "libavutil/avassert.h"
43 #include "libswscale/swscale.h"
44 
45 static const char *const var_names[] = {
46  "in_w", "iw",
47  "in_h", "ih",
48  "out_w", "ow",
49  "out_h", "oh",
50  "a",
51  "sar",
52  "dar",
53  "hsub",
54  "vsub",
55  "ohsub",
56  "ovsub",
57  "n",
58  "t",
59  "pos",
60  "main_w",
61  "main_h",
62  "main_a",
63  "main_sar",
64  "main_dar", "mdar",
65  "main_hsub",
66  "main_vsub",
67  "main_n",
68  "main_t",
69  "main_pos",
70  NULL
71 };
72 
73 enum var_name {
98  VARS_NB
99 };
100 
101 enum EvalMode {
105 };
106 
107 typedef struct ScaleContext {
108  const AVClass *class;
109  struct SwsContext *sws; ///< software scaler context
110  struct SwsContext *isws[2]; ///< software scaler context for interlaced material
112 
113  /**
114  * New dimensions. Special values are:
115  * 0 = original width/height
116  * -1 = keep original aspect
117  * -N = try to keep aspect but make sure it is divisible by N
118  */
119  int w, h;
120  char *size_str;
121  unsigned int flags; ///sws flags
122  double param[2]; // sws params
123 
124  int hsub, vsub; ///< chroma subsampling
125  int slice_y; ///< top of current output slice
126  int input_is_pal; ///< set to 1 if the input format is paletted
127  int output_is_pal; ///< set to 1 if the output format is paletted
129 
130  char *w_expr; ///< width expression string
131  char *h_expr; ///< height expression string
135 
136  char *flags_str;
137 
140 
141  int in_range;
143 
148 
151 
153 
154  int eval_mode; ///< expression evaluation mode
155 
156 } ScaleContext;
157 
159 
160 static int config_props(AVFilterLink *outlink);
161 
163 {
164  ScaleContext *scale = ctx->priv;
165  unsigned vars_w[VARS_NB] = { 0 }, vars_h[VARS_NB] = { 0 };
166 
167  if (!scale->w_pexpr && !scale->h_pexpr)
168  return AVERROR(EINVAL);
169 
170  if (scale->w_pexpr)
171  av_expr_count_vars(scale->w_pexpr, vars_w, VARS_NB);
172  if (scale->h_pexpr)
173  av_expr_count_vars(scale->h_pexpr, vars_h, VARS_NB);
174 
175  if (vars_w[VAR_OUT_W] || vars_w[VAR_OW]) {
176  av_log(ctx, AV_LOG_ERROR, "Width expression cannot be self-referencing: '%s'.\n", scale->w_expr);
177  return AVERROR(EINVAL);
178  }
179 
180  if (vars_h[VAR_OUT_H] || vars_h[VAR_OH]) {
181  av_log(ctx, AV_LOG_ERROR, "Height expression cannot be self-referencing: '%s'.\n", scale->h_expr);
182  return AVERROR(EINVAL);
183  }
184 
185  if ((vars_w[VAR_OUT_H] || vars_w[VAR_OH]) &&
186  (vars_h[VAR_OUT_W] || vars_h[VAR_OW])) {
187  av_log(ctx, AV_LOG_WARNING, "Circular references detected for width '%s' and height '%s' - possibly invalid.\n", scale->w_expr, scale->h_expr);
188  }
189 
190  if (ctx->filter != &ff_vf_scale2ref &&
191  (vars_w[VAR_S2R_MAIN_W] || vars_h[VAR_S2R_MAIN_W] ||
192  vars_w[VAR_S2R_MAIN_H] || vars_h[VAR_S2R_MAIN_H] ||
193  vars_w[VAR_S2R_MAIN_A] || vars_h[VAR_S2R_MAIN_A] ||
194  vars_w[VAR_S2R_MAIN_SAR] || vars_h[VAR_S2R_MAIN_SAR] ||
195  vars_w[VAR_S2R_MAIN_DAR] || vars_h[VAR_S2R_MAIN_DAR] ||
196  vars_w[VAR_S2R_MDAR] || vars_h[VAR_S2R_MDAR] ||
197  vars_w[VAR_S2R_MAIN_HSUB] || vars_h[VAR_S2R_MAIN_HSUB] ||
198  vars_w[VAR_S2R_MAIN_VSUB] || vars_h[VAR_S2R_MAIN_VSUB] ||
199  vars_w[VAR_S2R_MAIN_N] || vars_h[VAR_S2R_MAIN_N] ||
200  vars_w[VAR_S2R_MAIN_T] || vars_h[VAR_S2R_MAIN_T] ||
201  vars_w[VAR_S2R_MAIN_POS] || vars_h[VAR_S2R_MAIN_POS]) ) {
202  av_log(ctx, AV_LOG_ERROR, "Expressions with scale2ref variables are not valid in scale filter.\n");
203  return AVERROR(EINVAL);
204  }
205 
206  if (scale->eval_mode == EVAL_MODE_INIT &&
207  (vars_w[VAR_N] || vars_h[VAR_N] ||
208  vars_w[VAR_T] || vars_h[VAR_T] ||
209  vars_w[VAR_POS] || vars_h[VAR_POS] ||
210  vars_w[VAR_S2R_MAIN_N] || vars_h[VAR_S2R_MAIN_N] ||
211  vars_w[VAR_S2R_MAIN_T] || vars_h[VAR_S2R_MAIN_T] ||
212  vars_w[VAR_S2R_MAIN_POS] || vars_h[VAR_S2R_MAIN_POS]) ) {
213  av_log(ctx, AV_LOG_ERROR, "Expressions with frame variables 'n', 't', 'pos' are not valid in init eval_mode.\n");
214  return AVERROR(EINVAL);
215  }
216 
217  return 0;
218 }
219 
220 static int scale_parse_expr(AVFilterContext *ctx, char *str_expr, AVExpr **pexpr_ptr, const char *var, const char *args)
221 {
222  ScaleContext *scale = ctx->priv;
223  int ret, is_inited = 0;
224  char *old_str_expr = NULL;
225  AVExpr *old_pexpr = NULL;
226 
227  if (str_expr) {
228  old_str_expr = av_strdup(str_expr);
229  if (!old_str_expr)
230  return AVERROR(ENOMEM);
231  av_opt_set(scale, var, args, 0);
232  }
233 
234  if (*pexpr_ptr) {
235  old_pexpr = *pexpr_ptr;
236  *pexpr_ptr = NULL;
237  is_inited = 1;
238  }
239 
240  ret = av_expr_parse(pexpr_ptr, args, var_names,
241  NULL, NULL, NULL, NULL, 0, ctx);
242  if (ret < 0) {
243  av_log(ctx, AV_LOG_ERROR, "Cannot parse expression for %s: '%s'\n", var, args);
244  goto revert;
245  }
246 
247  ret = check_exprs(ctx);
248  if (ret < 0)
249  goto revert;
250 
251  if (is_inited && (ret = config_props(ctx->outputs[0])) < 0)
252  goto revert;
253 
254  av_expr_free(old_pexpr);
255  old_pexpr = NULL;
256  av_freep(&old_str_expr);
257 
258  return 0;
259 
260 revert:
261  av_expr_free(*pexpr_ptr);
262  *pexpr_ptr = NULL;
263  if (old_str_expr) {
264  av_opt_set(scale, var, old_str_expr, 0);
265  av_free(old_str_expr);
266  }
267  if (old_pexpr)
268  *pexpr_ptr = old_pexpr;
269 
270  return ret;
271 }
272 
274 {
275  ScaleContext *scale = ctx->priv;
276  int ret;
277 
278  if (scale->size_str && (scale->w_expr || scale->h_expr)) {
280  "Size and width/height expressions cannot be set at the same time.\n");
281  return AVERROR(EINVAL);
282  }
283 
284  if (scale->w_expr && !scale->h_expr)
285  FFSWAP(char *, scale->w_expr, scale->size_str);
286 
287  if (scale->size_str) {
288  char buf[32];
289  if ((ret = av_parse_video_size(&scale->w, &scale->h, scale->size_str)) < 0) {
291  "Invalid size '%s'\n", scale->size_str);
292  return ret;
293  }
294  snprintf(buf, sizeof(buf)-1, "%d", scale->w);
295  av_opt_set(scale, "w", buf, 0);
296  snprintf(buf, sizeof(buf)-1, "%d", scale->h);
297  av_opt_set(scale, "h", buf, 0);
298  }
299  if (!scale->w_expr)
300  av_opt_set(scale, "w", "iw", 0);
301  if (!scale->h_expr)
302  av_opt_set(scale, "h", "ih", 0);
303 
304  ret = scale_parse_expr(ctx, NULL, &scale->w_pexpr, "width", scale->w_expr);
305  if (ret < 0)
306  return ret;
307 
308  ret = scale_parse_expr(ctx, NULL, &scale->h_pexpr, "height", scale->h_expr);
309  if (ret < 0)
310  return ret;
311 
312  av_log(ctx, AV_LOG_VERBOSE, "w:%s h:%s flags:'%s' interl:%d\n",
313  scale->w_expr, scale->h_expr, (char *)av_x_if_null(scale->flags_str, ""), scale->interlaced);
314 
315  scale->flags = 0;
316 
317  if (scale->flags_str) {
318  const AVClass *class = sws_get_class();
319  const AVOption *o = av_opt_find(&class, "sws_flags", NULL, 0,
321  int ret = av_opt_eval_flags(&class, o, scale->flags_str, &scale->flags);
322  if (ret < 0)
323  return ret;
324  }
325  scale->opts = *opts;
326  *opts = NULL;
327 
328  return 0;
329 }
330 
332 {
333  ScaleContext *scale = ctx->priv;
334  av_expr_free(scale->w_pexpr);
335  av_expr_free(scale->h_pexpr);
336  scale->w_pexpr = scale->h_pexpr = NULL;
337  sws_freeContext(scale->sws);
338  sws_freeContext(scale->isws[0]);
339  sws_freeContext(scale->isws[1]);
340  scale->sws = NULL;
341  av_dict_free(&scale->opts);
342 }
343 
345 {
347  enum AVPixelFormat pix_fmt;
348  int ret;
349 
350  if (ctx->inputs[0]) {
351  const AVPixFmtDescriptor *desc = NULL;
352  formats = NULL;
353  while ((desc = av_pix_fmt_desc_next(desc))) {
357  && (ret = ff_add_format(&formats, pix_fmt)) < 0) {
358  return ret;
359  }
360  }
361  if ((ret = ff_formats_ref(formats, &ctx->inputs[0]->outcfg.formats)) < 0)
362  return ret;
363  }
364  if (ctx->outputs[0]) {
365  const AVPixFmtDescriptor *desc = NULL;
366  formats = NULL;
367  while ((desc = av_pix_fmt_desc_next(desc))) {
371  && (ret = ff_add_format(&formats, pix_fmt)) < 0) {
372  return ret;
373  }
374  }
375  if ((ret = ff_formats_ref(formats, &ctx->outputs[0]->incfg.formats)) < 0)
376  return ret;
377  }
378 
379  return 0;
380 }
381 
382 static const int *parse_yuv_type(const char *s, enum AVColorSpace colorspace)
383 {
384  if (!s)
385  s = "bt601";
386 
387  if (s && strstr(s, "bt709")) {
388  colorspace = AVCOL_SPC_BT709;
389  } else if (s && strstr(s, "fcc")) {
390  colorspace = AVCOL_SPC_FCC;
391  } else if (s && strstr(s, "smpte240m")) {
392  colorspace = AVCOL_SPC_SMPTE240M;
393  } else if (s && (strstr(s, "bt601") || strstr(s, "bt470") || strstr(s, "smpte170m"))) {
394  colorspace = AVCOL_SPC_BT470BG;
395  } else if (s && strstr(s, "bt2020")) {
396  colorspace = AVCOL_SPC_BT2020_NCL;
397  }
398 
399  if (colorspace < 1 || colorspace > 10 || colorspace == 8) {
400  colorspace = AVCOL_SPC_BT470BG;
401  }
402 
403  return sws_getCoefficients(colorspace);
404 }
405 
407 {
408  ScaleContext *scale = ctx->priv;
409  const char scale2ref = ctx->filter == &ff_vf_scale2ref;
410  const AVFilterLink *inlink = scale2ref ? ctx->inputs[1] : ctx->inputs[0];
411  const AVFilterLink *outlink = ctx->outputs[0];
413  const AVPixFmtDescriptor *out_desc = av_pix_fmt_desc_get(outlink->format);
414  char *expr;
415  int eval_w, eval_h;
416  int ret;
417  double res;
418  const AVPixFmtDescriptor *main_desc;
419  const AVFilterLink *main_link;
420 
421  if (scale2ref) {
422  main_link = ctx->inputs[0];
423  main_desc = av_pix_fmt_desc_get(main_link->format);
424  }
425 
426  scale->var_values[VAR_IN_W] = scale->var_values[VAR_IW] = inlink->w;
427  scale->var_values[VAR_IN_H] = scale->var_values[VAR_IH] = inlink->h;
428  scale->var_values[VAR_OUT_W] = scale->var_values[VAR_OW] = NAN;
429  scale->var_values[VAR_OUT_H] = scale->var_values[VAR_OH] = NAN;
430  scale->var_values[VAR_A] = (double) inlink->w / inlink->h;
431  scale->var_values[VAR_SAR] = inlink->sample_aspect_ratio.num ?
432  (double) inlink->sample_aspect_ratio.num / inlink->sample_aspect_ratio.den : 1;
433  scale->var_values[VAR_DAR] = scale->var_values[VAR_A] * scale->var_values[VAR_SAR];
434  scale->var_values[VAR_HSUB] = 1 << desc->log2_chroma_w;
435  scale->var_values[VAR_VSUB] = 1 << desc->log2_chroma_h;
436  scale->var_values[VAR_OHSUB] = 1 << out_desc->log2_chroma_w;
437  scale->var_values[VAR_OVSUB] = 1 << out_desc->log2_chroma_h;
438 
439  if (scale2ref) {
440  scale->var_values[VAR_S2R_MAIN_W] = main_link->w;
441  scale->var_values[VAR_S2R_MAIN_H] = main_link->h;
442  scale->var_values[VAR_S2R_MAIN_A] = (double) main_link->w / main_link->h;
443  scale->var_values[VAR_S2R_MAIN_SAR] = main_link->sample_aspect_ratio.num ?
444  (double) main_link->sample_aspect_ratio.num / main_link->sample_aspect_ratio.den : 1;
447  scale->var_values[VAR_S2R_MAIN_HSUB] = 1 << main_desc->log2_chroma_w;
448  scale->var_values[VAR_S2R_MAIN_VSUB] = 1 << main_desc->log2_chroma_h;
449  }
450 
451  res = av_expr_eval(scale->w_pexpr, scale->var_values, NULL);
452  eval_w = scale->var_values[VAR_OUT_W] = scale->var_values[VAR_OW] = (int) res == 0 ? inlink->w : (int) res;
453 
454  res = av_expr_eval(scale->h_pexpr, scale->var_values, NULL);
455  if (isnan(res)) {
456  expr = scale->h_expr;
457  ret = AVERROR(EINVAL);
458  goto fail;
459  }
460  eval_h = scale->var_values[VAR_OUT_H] = scale->var_values[VAR_OH] = (int) res == 0 ? inlink->h : (int) res;
461 
462  res = av_expr_eval(scale->w_pexpr, scale->var_values, NULL);
463  if (isnan(res)) {
464  expr = scale->w_expr;
465  ret = AVERROR(EINVAL);
466  goto fail;
467  }
468  eval_w = scale->var_values[VAR_OUT_W] = scale->var_values[VAR_OW] = (int) res == 0 ? inlink->w : (int) res;
469 
470  scale->w = eval_w;
471  scale->h = eval_h;
472 
473  return 0;
474 
475 fail:
477  "Error when evaluating the expression '%s'.\n", expr);
478  return ret;
479 }
480 
481 static int config_props(AVFilterLink *outlink)
482 {
483  AVFilterContext *ctx = outlink->src;
484  AVFilterLink *inlink0 = outlink->src->inputs[0];
485  AVFilterLink *inlink = ctx->filter == &ff_vf_scale2ref ?
486  outlink->src->inputs[1] :
487  outlink->src->inputs[0];
488  enum AVPixelFormat outfmt = outlink->format;
490  ScaleContext *scale = ctx->priv;
491  int ret;
492 
493  if ((ret = scale_eval_dimensions(ctx)) < 0)
494  goto fail;
495 
496  outlink->w = scale->w;
497  outlink->h = scale->h;
498 
499  ret = ff_scale_adjust_dimensions(inlink, &outlink->w, &outlink->h,
501  scale->force_divisible_by);
502 
503  if (ret < 0)
504  goto fail;
505 
506  if (outlink->w > INT_MAX ||
507  outlink->h > INT_MAX ||
508  (outlink->h * inlink->w) > INT_MAX ||
509  (outlink->w * inlink->h) > INT_MAX)
510  av_log(ctx, AV_LOG_ERROR, "Rescaled value for width or height is too big.\n");
511 
512  /* TODO: make algorithm configurable */
513 
514  scale->input_is_pal = desc->flags & AV_PIX_FMT_FLAG_PAL;
515  if (outfmt == AV_PIX_FMT_PAL8) outfmt = AV_PIX_FMT_BGR8;
518 
519  if (scale->sws)
520  sws_freeContext(scale->sws);
521  if (scale->isws[0])
522  sws_freeContext(scale->isws[0]);
523  if (scale->isws[1])
524  sws_freeContext(scale->isws[1]);
525  scale->isws[0] = scale->isws[1] = scale->sws = NULL;
526  if (inlink0->w == outlink->w &&
527  inlink0->h == outlink->h &&
528  !scale->out_color_matrix &&
529  scale->in_range == scale->out_range &&
530  inlink0->format == outlink->format)
531  ;
532  else {
533  struct SwsContext **swscs[3] = {&scale->sws, &scale->isws[0], &scale->isws[1]};
534  int i;
535 
536  for (i = 0; i < 3; i++) {
537  int in_v_chr_pos = scale->in_v_chr_pos, out_v_chr_pos = scale->out_v_chr_pos;
538  struct SwsContext **s = swscs[i];
539  *s = sws_alloc_context();
540  if (!*s)
541  return AVERROR(ENOMEM);
542 
543  av_opt_set_int(*s, "srcw", inlink0 ->w, 0);
544  av_opt_set_int(*s, "srch", inlink0 ->h >> !!i, 0);
545  av_opt_set_int(*s, "src_format", inlink0->format, 0);
546  av_opt_set_int(*s, "dstw", outlink->w, 0);
547  av_opt_set_int(*s, "dsth", outlink->h >> !!i, 0);
548  av_opt_set_int(*s, "dst_format", outfmt, 0);
549  av_opt_set_int(*s, "sws_flags", scale->flags, 0);
550  av_opt_set_int(*s, "param0", scale->param[0], 0);
551  av_opt_set_int(*s, "param1", scale->param[1], 0);
552  if (scale->in_range != AVCOL_RANGE_UNSPECIFIED)
553  av_opt_set_int(*s, "src_range",
554  scale->in_range == AVCOL_RANGE_JPEG, 0);
555  if (scale->out_range != AVCOL_RANGE_UNSPECIFIED)
556  av_opt_set_int(*s, "dst_range",
557  scale->out_range == AVCOL_RANGE_JPEG, 0);
558 
559  if (scale->opts) {
560  AVDictionaryEntry *e = NULL;
561  while ((e = av_dict_get(scale->opts, "", e, AV_DICT_IGNORE_SUFFIX))) {
562  if ((ret = av_opt_set(*s, e->key, e->value, 0)) < 0)
563  return ret;
564  }
565  }
566  /* Override YUV420P default settings to have the correct (MPEG-2) chroma positions
567  * MPEG-2 chroma positions are used by convention
568  * XXX: support other 4:2:0 pixel formats */
569  if (inlink0->format == AV_PIX_FMT_YUV420P && scale->in_v_chr_pos == -513) {
570  in_v_chr_pos = (i == 0) ? 128 : (i == 1) ? 64 : 192;
571  }
572 
573  if (outlink->format == AV_PIX_FMT_YUV420P && scale->out_v_chr_pos == -513) {
574  out_v_chr_pos = (i == 0) ? 128 : (i == 1) ? 64 : 192;
575  }
576 
577  av_opt_set_int(*s, "src_h_chr_pos", scale->in_h_chr_pos, 0);
578  av_opt_set_int(*s, "src_v_chr_pos", in_v_chr_pos, 0);
579  av_opt_set_int(*s, "dst_h_chr_pos", scale->out_h_chr_pos, 0);
580  av_opt_set_int(*s, "dst_v_chr_pos", out_v_chr_pos, 0);
581 
582  if ((ret = sws_init_context(*s, NULL, NULL)) < 0)
583  return ret;
584  if (!scale->interlaced)
585  break;
586  }
587  }
588 
589  if (inlink0->sample_aspect_ratio.num){
590  outlink->sample_aspect_ratio = av_mul_q((AVRational){outlink->h * inlink0->w, outlink->w * inlink0->h}, inlink0->sample_aspect_ratio);
591  } else
592  outlink->sample_aspect_ratio = inlink0->sample_aspect_ratio;
593 
594  av_log(ctx, AV_LOG_VERBOSE, "w:%d h:%d fmt:%s sar:%d/%d -> w:%d h:%d fmt:%s sar:%d/%d flags:0x%0x\n",
595  inlink ->w, inlink ->h, av_get_pix_fmt_name( inlink->format),
597  outlink->w, outlink->h, av_get_pix_fmt_name(outlink->format),
598  outlink->sample_aspect_ratio.num, outlink->sample_aspect_ratio.den,
599  scale->flags);
600  return 0;
601 
602 fail:
603  return ret;
604 }
605 
606 static int config_props_ref(AVFilterLink *outlink)
607 {
608  AVFilterLink *inlink = outlink->src->inputs[1];
609 
610  outlink->w = inlink->w;
611  outlink->h = inlink->h;
612  outlink->sample_aspect_ratio = inlink->sample_aspect_ratio;
613  outlink->time_base = inlink->time_base;
614  outlink->frame_rate = inlink->frame_rate;
615 
616  return 0;
617 }
618 
619 static int request_frame(AVFilterLink *outlink)
620 {
621  return ff_request_frame(outlink->src->inputs[0]);
622 }
623 
624 static int request_frame_ref(AVFilterLink *outlink)
625 {
626  return ff_request_frame(outlink->src->inputs[1]);
627 }
628 
629 static int scale_slice(AVFilterLink *link, AVFrame *out_buf, AVFrame *cur_pic, struct SwsContext *sws, int y, int h, int mul, int field)
630 {
631  ScaleContext *scale = link->dst->priv;
632  const uint8_t *in[4];
633  uint8_t *out[4];
634  int in_stride[4],out_stride[4];
635  int i;
636 
637  for (i=0; i<4; i++) {
638  int vsub= ((i+1)&2) ? scale->vsub : 0;
639  in_stride[i] = cur_pic->linesize[i] * mul;
640  out_stride[i] = out_buf->linesize[i] * mul;
641  in[i] = FF_PTR_ADD(cur_pic->data[i], ((y>>vsub)+field) * cur_pic->linesize[i]);
642  out[i] = FF_PTR_ADD(out_buf->data[i], field * out_buf->linesize[i]);
643  }
644  if (scale->input_is_pal)
645  in[1] = cur_pic->data[1];
646  if (scale->output_is_pal)
647  out[1] = out_buf->data[1];
648 
649  return sws_scale(sws, in, in_stride, y/mul, h,
650  out,out_stride);
651 }
652 
653 static int scale_frame(AVFilterLink *link, AVFrame *in, AVFrame **frame_out)
654 {
655  AVFilterContext *ctx = link->dst;
656  ScaleContext *scale = ctx->priv;
657  AVFilterLink *outlink = ctx->outputs[0];
658  AVFrame *out;
660  char buf[32];
661  int in_range;
662  int frame_changed;
663 
664  *frame_out = NULL;
665  if (in->colorspace == AVCOL_SPC_YCGCO)
666  av_log(link->dst, AV_LOG_WARNING, "Detected unsupported YCgCo colorspace.\n");
667 
668  frame_changed = in->width != link->w ||
669  in->height != link->h ||
670  in->format != link->format ||
671  in->sample_aspect_ratio.den != link->sample_aspect_ratio.den ||
672  in->sample_aspect_ratio.num != link->sample_aspect_ratio.num;
673 
674  if (scale->eval_mode == EVAL_MODE_FRAME || frame_changed) {
675  int ret;
676  unsigned vars_w[VARS_NB] = { 0 }, vars_h[VARS_NB] = { 0 };
677 
678  av_expr_count_vars(scale->w_pexpr, vars_w, VARS_NB);
679  av_expr_count_vars(scale->h_pexpr, vars_h, VARS_NB);
680 
681  if (scale->eval_mode == EVAL_MODE_FRAME &&
682  !frame_changed &&
683  ctx->filter != &ff_vf_scale2ref &&
684  !(vars_w[VAR_N] || vars_w[VAR_T] || vars_w[VAR_POS]) &&
685  !(vars_h[VAR_N] || vars_h[VAR_T] || vars_h[VAR_POS]) &&
686  scale->w && scale->h)
687  goto scale;
688 
689  if (scale->eval_mode == EVAL_MODE_INIT) {
690  snprintf(buf, sizeof(buf) - 1, "%d", scale->w);
691  av_opt_set(scale, "w", buf, 0);
692  snprintf(buf, sizeof(buf) - 1, "%d", scale->h);
693  av_opt_set(scale, "h", buf, 0);
694 
695  ret = scale_parse_expr(ctx, NULL, &scale->w_pexpr, "width", scale->w_expr);
696  if (ret < 0)
697  return ret;
698 
699  ret = scale_parse_expr(ctx, NULL, &scale->h_pexpr, "height", scale->h_expr);
700  if (ret < 0)
701  return ret;
702  }
703 
704  if (ctx->filter == &ff_vf_scale2ref) {
705  scale->var_values[VAR_S2R_MAIN_N] = link->frame_count_out;
706  scale->var_values[VAR_S2R_MAIN_T] = TS2T(in->pts, link->time_base);
707  scale->var_values[VAR_S2R_MAIN_POS] = in->pkt_pos == -1 ? NAN : in->pkt_pos;
708  } else {
709  scale->var_values[VAR_N] = link->frame_count_out;
710  scale->var_values[VAR_T] = TS2T(in->pts, link->time_base);
711  scale->var_values[VAR_POS] = in->pkt_pos == -1 ? NAN : in->pkt_pos;
712  }
713 
714  link->dst->inputs[0]->format = in->format;
715  link->dst->inputs[0]->w = in->width;
716  link->dst->inputs[0]->h = in->height;
717 
718  link->dst->inputs[0]->sample_aspect_ratio.den = in->sample_aspect_ratio.den;
719  link->dst->inputs[0]->sample_aspect_ratio.num = in->sample_aspect_ratio.num;
720 
721  if ((ret = config_props(outlink)) < 0)
722  return ret;
723  }
724 
725 scale:
726  if (!scale->sws) {
727  *frame_out = in;
728  return 0;
729  }
730 
731  scale->hsub = desc->log2_chroma_w;
732  scale->vsub = desc->log2_chroma_h;
733 
734  out = ff_get_video_buffer(outlink, outlink->w, outlink->h);
735  if (!out) {
736  av_frame_free(&in);
737  return AVERROR(ENOMEM);
738  }
739  *frame_out = out;
740 
742  out->width = outlink->w;
743  out->height = outlink->h;
744 
745  // Sanity checks:
746  // 1. If the output is RGB, set the matrix coefficients to RGB.
747  // 2. If the output is not RGB and we've got the RGB/XYZ (identity)
748  // matrix configured, unset the matrix.
749  // In theory these should be in swscale itself as the AVFrame
750  // based API gets in, so that not every swscale API user has
751  // to go through duplicating such sanity checks.
753  out->colorspace = AVCOL_SPC_RGB;
754  else if (out->colorspace == AVCOL_SPC_RGB)
755  out->colorspace = AVCOL_SPC_UNSPECIFIED;
756 
757  if (scale->output_is_pal)
758  avpriv_set_systematic_pal2((uint32_t*)out->data[1], outlink->format == AV_PIX_FMT_PAL8 ? AV_PIX_FMT_BGR8 : outlink->format);
759 
760  in_range = in->color_range;
761 
762  if ( scale->in_color_matrix
763  || scale->out_color_matrix
764  || scale-> in_range != AVCOL_RANGE_UNSPECIFIED
765  || in_range != AVCOL_RANGE_UNSPECIFIED
766  || scale->out_range != AVCOL_RANGE_UNSPECIFIED) {
767  int in_full, out_full, brightness, contrast, saturation;
768  const int *inv_table, *table;
769 
770  sws_getColorspaceDetails(scale->sws, (int **)&inv_table, &in_full,
771  (int **)&table, &out_full,
773 
774  if (scale->in_color_matrix)
775  inv_table = parse_yuv_type(scale->in_color_matrix, in->colorspace);
776  if (scale->out_color_matrix)
778  else if (scale->in_color_matrix)
779  table = inv_table;
780 
781  if (scale-> in_range != AVCOL_RANGE_UNSPECIFIED)
782  in_full = (scale-> in_range == AVCOL_RANGE_JPEG);
783  else if (in_range != AVCOL_RANGE_UNSPECIFIED)
784  in_full = (in_range == AVCOL_RANGE_JPEG);
785  if (scale->out_range != AVCOL_RANGE_UNSPECIFIED)
786  out_full = (scale->out_range == AVCOL_RANGE_JPEG);
787 
788  sws_setColorspaceDetails(scale->sws, inv_table, in_full,
789  table, out_full,
791  if (scale->isws[0])
792  sws_setColorspaceDetails(scale->isws[0], inv_table, in_full,
793  table, out_full,
795  if (scale->isws[1])
796  sws_setColorspaceDetails(scale->isws[1], inv_table, in_full,
797  table, out_full,
799 
800  out->color_range = out_full ? AVCOL_RANGE_JPEG : AVCOL_RANGE_MPEG;
801  }
802 
803  av_reduce(&out->sample_aspect_ratio.num, &out->sample_aspect_ratio.den,
804  (int64_t)in->sample_aspect_ratio.num * outlink->h * link->w,
805  (int64_t)in->sample_aspect_ratio.den * outlink->w * link->h,
806  INT_MAX);
807 
808  if (scale->interlaced>0 || (scale->interlaced<0 && in->interlaced_frame)) {
809  scale_slice(link, out, in, scale->isws[0], 0, (link->h+1)/2, 2, 0);
810  scale_slice(link, out, in, scale->isws[1], 0, link->h /2, 2, 1);
811  } else if (scale->nb_slices) {
812  int i, slice_h, slice_start, slice_end = 0;
813  const int nb_slices = FFMIN(scale->nb_slices, link->h);
814  for (i = 0; i < nb_slices; i++) {
815  slice_start = slice_end;
816  slice_end = (link->h * (i+1)) / nb_slices;
817  slice_h = slice_end - slice_start;
818  scale_slice(link, out, in, scale->sws, slice_start, slice_h, 1, 0);
819  }
820  } else {
821  scale_slice(link, out, in, scale->sws, 0, link->h, 1, 0);
822  }
823 
824  av_frame_free(&in);
825  return 0;
826 }
827 
828 static int filter_frame(AVFilterLink *link, AVFrame *in)
829 {
830  AVFilterContext *ctx = link->dst;
831  AVFilterLink *outlink = ctx->outputs[0];
832  AVFrame *out;
833  int ret;
834 
835  ret = scale_frame(link, in, &out);
836  if (out)
837  return ff_filter_frame(outlink, out);
838 
839  return ret;
840 }
841 
843 {
844  ScaleContext *scale = link->dst->priv;
845  AVFilterLink *outlink = link->dst->outputs[1];
846  int frame_changed;
847 
848  frame_changed = in->width != link->w ||
849  in->height != link->h ||
850  in->format != link->format ||
851  in->sample_aspect_ratio.den != link->sample_aspect_ratio.den ||
852  in->sample_aspect_ratio.num != link->sample_aspect_ratio.num;
853 
854  if (frame_changed) {
855  link->format = in->format;
856  link->w = in->width;
857  link->h = in->height;
858  link->sample_aspect_ratio.num = in->sample_aspect_ratio.num;
859  link->sample_aspect_ratio.den = in->sample_aspect_ratio.den;
860 
861  config_props_ref(outlink);
862  }
863 
864  if (scale->eval_mode == EVAL_MODE_FRAME) {
865  scale->var_values[VAR_N] = link->frame_count_out;
866  scale->var_values[VAR_T] = TS2T(in->pts, link->time_base);
867  scale->var_values[VAR_POS] = in->pkt_pos == -1 ? NAN : in->pkt_pos;
868  }
869 
870  return ff_filter_frame(outlink, in);
871 }
872 
873 static int process_command(AVFilterContext *ctx, const char *cmd, const char *args,
874  char *res, int res_len, int flags)
875 {
876  ScaleContext *scale = ctx->priv;
877  char *str_expr;
878  AVExpr **pexpr_ptr;
879  int ret, w, h;
880 
881  w = !strcmp(cmd, "width") || !strcmp(cmd, "w");
882  h = !strcmp(cmd, "height") || !strcmp(cmd, "h");
883 
884  if (w || h) {
885  str_expr = w ? scale->w_expr : scale->h_expr;
886  pexpr_ptr = w ? &scale->w_pexpr : &scale->h_pexpr;
887 
888  ret = scale_parse_expr(ctx, str_expr, pexpr_ptr, cmd, args);
889  } else
890  ret = AVERROR(ENOSYS);
891 
892  if (ret < 0)
893  av_log(ctx, AV_LOG_ERROR, "Failed to process command. Continuing with existing parameters.\n");
894 
895  return ret;
896 }
897 
898 #if FF_API_CHILD_CLASS_NEXT
899 static const AVClass *child_class_next(const AVClass *prev)
900 {
901  return prev ? NULL : sws_get_class();
902 }
903 #endif
904 
905 static const AVClass *child_class_iterate(void **iter)
906 {
907  const AVClass *c = *iter ? NULL : sws_get_class();
908  *iter = (void*)(uintptr_t)c;
909  return c;
910 }
911 
912 #define OFFSET(x) offsetof(ScaleContext, x)
913 #define FLAGS AV_OPT_FLAG_VIDEO_PARAM|AV_OPT_FLAG_FILTERING_PARAM
914 #define TFLAGS AV_OPT_FLAG_VIDEO_PARAM|AV_OPT_FLAG_FILTERING_PARAM|AV_OPT_FLAG_RUNTIME_PARAM
915 
916 static const AVOption scale_options[] = {
917  { "w", "Output video width", OFFSET(w_expr), AV_OPT_TYPE_STRING, .flags = TFLAGS },
918  { "width", "Output video width", OFFSET(w_expr), AV_OPT_TYPE_STRING, .flags = TFLAGS },
919  { "h", "Output video height", OFFSET(h_expr), AV_OPT_TYPE_STRING, .flags = TFLAGS },
920  { "height","Output video height", OFFSET(h_expr), AV_OPT_TYPE_STRING, .flags = TFLAGS },
921  { "flags", "Flags to pass to libswscale", OFFSET(flags_str), AV_OPT_TYPE_STRING, { .str = "bilinear" }, .flags = FLAGS },
922  { "interl", "set interlacing", OFFSET(interlaced), AV_OPT_TYPE_BOOL, {.i64 = 0 }, -1, 1, FLAGS },
923  { "size", "set video size", OFFSET(size_str), AV_OPT_TYPE_STRING, {.str = NULL}, 0, FLAGS },
924  { "s", "set video size", OFFSET(size_str), AV_OPT_TYPE_STRING, {.str = NULL}, 0, FLAGS },
925  { "in_color_matrix", "set input YCbCr type", OFFSET(in_color_matrix), AV_OPT_TYPE_STRING, { .str = "auto" }, .flags = FLAGS, "color" },
926  { "out_color_matrix", "set output YCbCr type", OFFSET(out_color_matrix), AV_OPT_TYPE_STRING, { .str = NULL }, .flags = FLAGS, "color"},
927  { "auto", NULL, 0, AV_OPT_TYPE_CONST, { .str = "auto" }, 0, 0, FLAGS, "color" },
928  { "bt601", NULL, 0, AV_OPT_TYPE_CONST, { .str = "bt601" }, 0, 0, FLAGS, "color" },
929  { "bt470", NULL, 0, AV_OPT_TYPE_CONST, { .str = "bt470" }, 0, 0, FLAGS, "color" },
930  { "smpte170m", NULL, 0, AV_OPT_TYPE_CONST, { .str = "smpte170m" }, 0, 0, FLAGS, "color" },
931  { "bt709", NULL, 0, AV_OPT_TYPE_CONST, { .str = "bt709" }, 0, 0, FLAGS, "color" },
932  { "fcc", NULL, 0, AV_OPT_TYPE_CONST, { .str = "fcc" }, 0, 0, FLAGS, "color" },
933  { "smpte240m", NULL, 0, AV_OPT_TYPE_CONST, { .str = "smpte240m" }, 0, 0, FLAGS, "color" },
934  { "bt2020", NULL, 0, AV_OPT_TYPE_CONST, { .str = "bt2020" }, 0, 0, FLAGS, "color" },
935  { "in_range", "set input color range", OFFSET( in_range), AV_OPT_TYPE_INT, {.i64 = AVCOL_RANGE_UNSPECIFIED }, 0, 2, FLAGS, "range" },
936  { "out_range", "set output color range", OFFSET(out_range), AV_OPT_TYPE_INT, {.i64 = AVCOL_RANGE_UNSPECIFIED }, 0, 2, FLAGS, "range" },
937  { "auto", NULL, 0, AV_OPT_TYPE_CONST, {.i64 = AVCOL_RANGE_UNSPECIFIED }, 0, 0, FLAGS, "range" },
938  { "unknown", NULL, 0, AV_OPT_TYPE_CONST, {.i64 = AVCOL_RANGE_UNSPECIFIED }, 0, 0, FLAGS, "range" },
939  { "full", NULL, 0, AV_OPT_TYPE_CONST, {.i64 = AVCOL_RANGE_JPEG}, 0, 0, FLAGS, "range" },
940  { "limited",NULL, 0, AV_OPT_TYPE_CONST, {.i64 = AVCOL_RANGE_MPEG}, 0, 0, FLAGS, "range" },
941  { "jpeg", NULL, 0, AV_OPT_TYPE_CONST, {.i64 = AVCOL_RANGE_JPEG}, 0, 0, FLAGS, "range" },
942  { "mpeg", NULL, 0, AV_OPT_TYPE_CONST, {.i64 = AVCOL_RANGE_MPEG}, 0, 0, FLAGS, "range" },
943  { "tv", NULL, 0, AV_OPT_TYPE_CONST, {.i64 = AVCOL_RANGE_MPEG}, 0, 0, FLAGS, "range" },
944  { "pc", NULL, 0, AV_OPT_TYPE_CONST, {.i64 = AVCOL_RANGE_JPEG}, 0, 0, FLAGS, "range" },
945  { "in_v_chr_pos", "input vertical chroma position in luma grid/256" , OFFSET(in_v_chr_pos), AV_OPT_TYPE_INT, { .i64 = -513}, -513, 512, FLAGS },
946  { "in_h_chr_pos", "input horizontal chroma position in luma grid/256", OFFSET(in_h_chr_pos), AV_OPT_TYPE_INT, { .i64 = -513}, -513, 512, FLAGS },
947  { "out_v_chr_pos", "output vertical chroma position in luma grid/256" , OFFSET(out_v_chr_pos), AV_OPT_TYPE_INT, { .i64 = -513}, -513, 512, FLAGS },
948  { "out_h_chr_pos", "output horizontal chroma position in luma grid/256", OFFSET(out_h_chr_pos), AV_OPT_TYPE_INT, { .i64 = -513}, -513, 512, FLAGS },
949  { "force_original_aspect_ratio", "decrease or increase w/h if necessary to keep the original AR", OFFSET(force_original_aspect_ratio), AV_OPT_TYPE_INT, { .i64 = 0}, 0, 2, FLAGS, "force_oar" },
950  { "disable", NULL, 0, AV_OPT_TYPE_CONST, {.i64 = 0 }, 0, 0, FLAGS, "force_oar" },
951  { "decrease", NULL, 0, AV_OPT_TYPE_CONST, {.i64 = 1 }, 0, 0, FLAGS, "force_oar" },
952  { "increase", NULL, 0, AV_OPT_TYPE_CONST, {.i64 = 2 }, 0, 0, FLAGS, "force_oar" },
953  { "force_divisible_by", "enforce that the output resolution is divisible by a defined integer when force_original_aspect_ratio is used", OFFSET(force_divisible_by), AV_OPT_TYPE_INT, { .i64 = 1}, 1, 256, FLAGS },
954  { "param0", "Scaler param 0", OFFSET(param[0]), AV_OPT_TYPE_DOUBLE, { .dbl = SWS_PARAM_DEFAULT }, INT_MIN, INT_MAX, FLAGS },
955  { "param1", "Scaler param 1", OFFSET(param[1]), AV_OPT_TYPE_DOUBLE, { .dbl = SWS_PARAM_DEFAULT }, INT_MIN, INT_MAX, FLAGS },
956  { "nb_slices", "set the number of slices (debug purpose only)", OFFSET(nb_slices), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, INT_MAX, FLAGS },
957  { "eval", "specify when to evaluate expressions", OFFSET(eval_mode), AV_OPT_TYPE_INT, {.i64 = EVAL_MODE_INIT}, 0, EVAL_MODE_NB-1, FLAGS, "eval" },
958  { "init", "eval expressions once during initialization", 0, AV_OPT_TYPE_CONST, {.i64=EVAL_MODE_INIT}, .flags = FLAGS, .unit = "eval" },
959  { "frame", "eval expressions during initialization and per-frame", 0, AV_OPT_TYPE_CONST, {.i64=EVAL_MODE_FRAME}, .flags = FLAGS, .unit = "eval" },
960  { NULL }
961 };
962 
963 static const AVClass scale_class = {
964  .class_name = "scale",
965  .item_name = av_default_item_name,
966  .option = scale_options,
967  .version = LIBAVUTIL_VERSION_INT,
968  .category = AV_CLASS_CATEGORY_FILTER,
969 #if FF_API_CHILD_CLASS_NEXT
970  .child_class_next = child_class_next,
971 #endif
972  .child_class_iterate = child_class_iterate,
973 };
974 
976  {
977  .name = "default",
978  .type = AVMEDIA_TYPE_VIDEO,
979  .filter_frame = filter_frame,
980  },
981  { NULL }
982 };
983 
985  {
986  .name = "default",
987  .type = AVMEDIA_TYPE_VIDEO,
988  .config_props = config_props,
989  },
990  { NULL }
991 };
992 
994  .name = "scale",
995  .description = NULL_IF_CONFIG_SMALL("Scale the input video size and/or convert the image format."),
996  .init_dict = init_dict,
997  .uninit = uninit,
998  .query_formats = query_formats,
999  .priv_size = sizeof(ScaleContext),
1000  .priv_class = &scale_class,
1004 };
1005 
1006 static const AVClass scale2ref_class = {
1007  .class_name = "scale2ref",
1008  .item_name = av_default_item_name,
1009  .option = scale_options,
1010  .version = LIBAVUTIL_VERSION_INT,
1011  .category = AV_CLASS_CATEGORY_FILTER,
1012 #if FF_API_CHILD_CLASS_NEXT
1013  .child_class_next = child_class_next,
1014 #endif
1015  .child_class_iterate = child_class_iterate,
1016 };
1017 
1019  {
1020  .name = "default",
1021  .type = AVMEDIA_TYPE_VIDEO,
1022  .filter_frame = filter_frame,
1023  },
1024  {
1025  .name = "ref",
1026  .type = AVMEDIA_TYPE_VIDEO,
1027  .filter_frame = filter_frame_ref,
1028  },
1029  { NULL }
1030 };
1031 
1033  {
1034  .name = "default",
1035  .type = AVMEDIA_TYPE_VIDEO,
1036  .config_props = config_props,
1037  .request_frame= request_frame,
1038  },
1039  {
1040  .name = "ref",
1041  .type = AVMEDIA_TYPE_VIDEO,
1042  .config_props = config_props_ref,
1043  .request_frame= request_frame_ref,
1044  },
1045  { NULL }
1046 };
1047 
1049  .name = "scale2ref",
1050  .description = NULL_IF_CONFIG_SMALL("Scale the input video size and/or convert the image format to the given reference."),
1051  .init_dict = init_dict,
1052  .uninit = uninit,
1053  .query_formats = query_formats,
1054  .priv_size = sizeof(ScaleContext),
1055  .priv_class = &scale2ref_class,
1059 };
static const AVFilterPad inputs[]
Definition: af_acontrast.c:193
static const AVFilterPad outputs[]
Definition: af_acontrast.c:203
EvalMode
Definition: af_volume.h:39
#define av_cold
Definition: attributes.h:88
uint8_t pi<< 24) CONV_FUNC_GROUP(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_U8, uint8_t,(*(const uint8_t *) pi - 0x80) *(1.0f/(1<< 7))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_U8, uint8_t,(*(const uint8_t *) pi - 0x80) *(1.0/(1<< 7))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_S16, int16_t,(*(const int16_t *) pi >> 8)+0x80) CONV_FUNC_GROUP(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_S16, int16_t, *(const int16_t *) pi *(1.0f/(1<< 15))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_S16, int16_t, *(const int16_t *) pi *(1.0/(1<< 15))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_S32, int32_t,(*(const int32_t *) pi >> 24)+0x80) CONV_FUNC_GROUP(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_S32, int32_t, *(const int32_t *) pi *(1.0f/(1U<< 31))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_S32, int32_t, *(const int32_t *) pi *(1.0/(1U<< 31))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_FLT, float, av_clip_uint8(lrintf(*(const float *) pi *(1<< 7))+0x80)) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S16, int16_t, AV_SAMPLE_FMT_FLT, float, av_clip_int16(lrintf(*(const float *) pi *(1<< 15)))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S32, int32_t, AV_SAMPLE_FMT_FLT, float, av_clipl_int32(llrintf(*(const float *) pi *(1U<< 31)))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_DBL, double, av_clip_uint8(lrint(*(const double *) pi *(1<< 7))+0x80)) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S16, int16_t, AV_SAMPLE_FMT_DBL, double, av_clip_int16(lrint(*(const double *) pi *(1<< 15)))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S32, int32_t, AV_SAMPLE_FMT_DBL, double, av_clipl_int32(llrint(*(const double *) pi *(1U<< 31)))) #define SET_CONV_FUNC_GROUP(ofmt, ifmt) static void set_generic_function(AudioConvert *ac) { } void ff_audio_convert_free(AudioConvert **ac) { if(! *ac) return;ff_dither_free(&(*ac) ->dc);av_freep(ac);} AudioConvert *ff_audio_convert_alloc(AVAudioResampleContext *avr, enum AVSampleFormat out_fmt, enum AVSampleFormat in_fmt, int channels, int sample_rate, int apply_map) { AudioConvert *ac;int in_planar, out_planar;ac=av_mallocz(sizeof(*ac));if(!ac) return NULL;ac->avr=avr;ac->out_fmt=out_fmt;ac->in_fmt=in_fmt;ac->channels=channels;ac->apply_map=apply_map;if(avr->dither_method !=AV_RESAMPLE_DITHER_NONE &&av_get_packed_sample_fmt(out_fmt)==AV_SAMPLE_FMT_S16 &&av_get_bytes_per_sample(in_fmt) > 2) { ac->dc=ff_dither_alloc(avr, out_fmt, in_fmt, channels, sample_rate, apply_map);if(!ac->dc) { av_free(ac);return NULL;} return ac;} in_planar=ff_sample_fmt_is_planar(in_fmt, channels);out_planar=ff_sample_fmt_is_planar(out_fmt, channels);if(in_planar==out_planar) { ac->func_type=CONV_FUNC_TYPE_FLAT;ac->planes=in_planar ? ac->channels :1;} else if(in_planar) ac->func_type=CONV_FUNC_TYPE_INTERLEAVE;else ac->func_type=CONV_FUNC_TYPE_DEINTERLEAVE;set_generic_function(ac);if(ARCH_AARCH64) ff_audio_convert_init_aarch64(ac);if(ARCH_ARM) ff_audio_convert_init_arm(ac);if(ARCH_X86) ff_audio_convert_init_x86(ac);return ac;} int ff_audio_convert(AudioConvert *ac, AudioData *out, AudioData *in) { int use_generic=1;int len=in->nb_samples;int p;if(ac->dc) { av_log(ac->avr, AV_LOG_TRACE, "%d samples - audio_convert: %s to %s (dithered)\n", len, av_get_sample_fmt_name(ac->in_fmt), av_get_sample_fmt_name(ac->out_fmt));return ff_convert_dither(ac-> in
uint8_t
simple assert() macros that are a bit more flexible than ISO C assert().
int ff_filter_frame(AVFilterLink *link, AVFrame *frame)
Send a frame of data to the next filter.
Definition: avfilter.c:1096
int ff_request_frame(AVFilterLink *link)
Request an input frame from the filter at the other end of the link.
Definition: avfilter.c:408
Main libavfilter public API header.
#define flags(name, subs,...)
Definition: cbs_av1.c:572
#define s(width, name)
Definition: cbs_vp9.c:257
#define fail()
Definition: checkasm.h:133
#define sws_isSupportedOutput(x)
#define sws_isSupportedInput(x)
#define FFSWAP(type, a, b)
Definition: common.h:108
#define FFMIN(a, b)
Definition: common.h:105
#define NULL
Definition: coverity.c:32
long long int64_t
Definition: coverity.c:34
static enum AVPixelFormat pix_fmt
static float mul(float src0, float src1)
void av_expr_free(AVExpr *e)
Free a parsed expression previously created with av_expr_parse().
Definition: eval.c:336
int av_expr_count_vars(AVExpr *e, unsigned *counter, int size)
Track the presence of variables and their number of occurrences in a parsed expression.
Definition: eval.c:756
double av_expr_eval(AVExpr *e, const double *const_values, void *opaque)
Evaluate a previously parsed expression.
Definition: eval.c:766
int av_expr_parse(AVExpr **expr, const char *s, const char *const *const_names, const char *const *func1_names, double(*const *funcs1)(void *, double), const char *const *func2_names, double(*const *funcs2)(void *, double, double), int log_offset, void *log_ctx)
Parse an expression.
Definition: eval.c:685
simple arithmetic expression evaluator
int
int ff_formats_ref(AVFilterFormats *f, AVFilterFormats **ref)
Add ref as a new reference to formats.
Definition: formats.c:466
int ff_add_format(AVFilterFormats **avff, int64_t fmt)
Add fmt to the list of media formats contained in *avff.
Definition: formats.c:332
#define AV_OPT_SEARCH_FAKE_OBJ
The obj passed to av_opt_find() is fake – only a double pointer to AVClass instead of a required poin...
Definition: opt.h:568
const AVOption * av_opt_find(void *obj, const char *name, const char *unit, int opt_flags, int search_flags)
Look for an option in an object.
Definition: opt.c:1661
@ AV_OPT_TYPE_CONST
Definition: opt.h:234
@ AV_OPT_TYPE_INT
Definition: opt.h:225
@ AV_OPT_TYPE_DOUBLE
Definition: opt.h:227
@ AV_OPT_TYPE_BOOL
Definition: opt.h:242
@ AV_OPT_TYPE_STRING
Definition: opt.h:229
void av_dict_free(AVDictionary **pm)
Free all the memory allocated for an AVDictionary struct and all keys and values.
Definition: dict.c:203
#define AV_DICT_IGNORE_SUFFIX
Return first entry in a dictionary whose first part corresponds to the search key,...
Definition: dict.h:70
AVDictionaryEntry * av_dict_get(const AVDictionary *m, const char *key, const AVDictionaryEntry *prev, int flags)
Get a dictionary entry with matching key.
Definition: dict.c:40
#define AVERROR(e)
Definition: error.h:43
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:203
int av_frame_copy_props(AVFrame *dst, const AVFrame *src)
Copy only "metadata" fields from src to dst.
Definition: frame.c:658
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:200
#define AV_LOG_VERBOSE
Detailed information.
Definition: log.h:210
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:194
const char * av_default_item_name(void *ptr)
Return the context name.
Definition: log.c:235
AVRational av_mul_q(AVRational b, AVRational c)
Multiply two rationals.
Definition: rational.c:80
int av_reduce(int *dst_num, int *dst_den, int64_t num, int64_t den, int64_t max)
Reduce a fraction.
Definition: rational.c:35
char * av_strdup(const char *s)
Duplicate a string.
Definition: mem.c:253
static void * av_x_if_null(const void *p, const void *x)
Return x default pointer in case p is NULL.
Definition: avutil.h:308
@ AVMEDIA_TYPE_VIDEO
Definition: avutil.h:201
#define LIBAVUTIL_VERSION_INT
Definition: version.h:85
int sws_getColorspaceDetails(struct SwsContext *c, int **inv_table, int *srcRange, int **table, int *dstRange, int *brightness, int *contrast, int *saturation)
Definition: utils.c:1007
const AVClass * sws_get_class(void)
Get the AVClass for swsContext.
Definition: options.c:95
av_warn_unused_result int sws_init_context(struct SwsContext *sws_context, SwsFilter *srcFilter, SwsFilter *dstFilter)
Initialize the swscaler context sws_context.
Definition: utils.c:1179
int sws_setColorspaceDetails(struct SwsContext *c, const int inv_table[4], int srcRange, const int table[4], int dstRange, int brightness, int contrast, int saturation)
Definition: utils.c:872
int sws_isSupportedEndiannessConversion(enum AVPixelFormat pix_fmt)
Definition: utils.c:289
struct SwsContext * sws_alloc_context(void)
Allocate an empty SwsContext.
Definition: utils.c:1093
const int * sws_getCoefficients(int colorspace)
Return a pointer to yuv<->rgb coefficients for the given colorspace suitable for sws_setColorspaceDet...
Definition: yuv2rgb.c:63
void sws_freeContext(struct SwsContext *swsContext)
Free the swscaler context swsContext.
Definition: utils.c:2337
#define SWS_PARAM_DEFAULT
Definition: swscale.h:73
int attribute_align_arg sws_scale(struct SwsContext *c, const uint8_t *const srcSlice[], const int srcStride[], int srcSliceY, int srcSliceH, uint8_t *const dst[], const int dstStride[])
swscale wrapper, so we don't need to export the SwsContext.
Definition: swscale.c:745
int av_opt_eval_flags(void *obj, const AVOption *o, const char *val, int *flags_out)
int av_opt_set_int(void *obj, const char *name, int64_t val, int search_flags)
Definition: opt.c:586
int av_opt_set(void *obj, const char *name, const char *val, int search_flags)
Definition: opt.c:465
int avpriv_set_systematic_pal2(uint32_t pal[256], enum AVPixelFormat pix_fmt)
Definition: imgutils.c:176
misc image utilities
int i
Definition: input.c:407
#define TS2T(ts, tb)
Definition: internal.h:209
common internal API header
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification.
Definition: internal.h:117
#define FF_PSEUDOPAL
Definition: internal.h:299
#define FF_PTR_ADD(ptr, off)
Definition: internal.h:105
#define isnan(x)
Definition: libm.h:340
const char * desc
Definition: libsvtav1.c:79
uint8_t w
Definition: llviddspenc.c:39
@ AV_CLASS_CATEGORY_FILTER
Definition: log.h:37
#define NAN
Definition: mathematics.h:64
static int slice_end(AVCodecContext *avctx, AVFrame *pict)
Handle slice ends.
Definition: mpeg12dec.c:2033
uint8_t interlaced
Definition: mxfenc.c:2208
AVOptions.
int av_parse_video_size(int *width_ptr, int *height_ptr, const char *str)
Parse str and put in width_ptr and height_ptr the detected values.
Definition: parseutils.c:148
misc parsing utilities
enum AVPixelFormat av_pix_fmt_desc_get_id(const AVPixFmtDescriptor *desc)
Definition: pixdesc.c:2592
const AVPixFmtDescriptor * av_pix_fmt_desc_next(const AVPixFmtDescriptor *prev)
Iterate over all pixel format descriptors known to libavutil.
Definition: pixdesc.c:2580
const char * av_get_pix_fmt_name(enum AVPixelFormat pix_fmt)
Return the short name for a pixel format, NULL in case pix_fmt is unknown.
Definition: pixdesc.c:2489
const AVPixFmtDescriptor * av_pix_fmt_desc_get(enum AVPixelFormat pix_fmt)
Definition: pixdesc.c:2573
#define AV_PIX_FMT_FLAG_RGB
The pixel format contains RGB-like data (as opposed to YUV/grayscale).
Definition: pixdesc.h:148
#define AV_PIX_FMT_FLAG_PAL
Pixel format has a palette in data[1], values are indexes in this palette.
Definition: pixdesc.h:132
@ AVCOL_RANGE_MPEG
Narrow or limited range content.
Definition: pixfmt.h:569
@ AVCOL_RANGE_UNSPECIFIED
Definition: pixfmt.h:552
@ AVCOL_RANGE_JPEG
Full range content.
Definition: pixfmt.h:586
AVPixelFormat
Pixel format.
Definition: pixfmt.h:64
@ AV_PIX_FMT_YUV420P
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:66
@ AV_PIX_FMT_BGR8
packed RGB 3:3:2, 8bpp, (msb)2B 3G 3R(lsb)
Definition: pixfmt.h:83
@ AV_PIX_FMT_PAL8
8 bits with AV_PIX_FMT_RGB32 palette
Definition: pixfmt.h:77
AVColorSpace
YUV colorspace type.
Definition: pixfmt.h:512
@ AVCOL_SPC_BT709
also ITU-R BT1361 / IEC 61966-2-4 xvYCC709 / SMPTE RP177 Annex B
Definition: pixfmt.h:514
@ AVCOL_SPC_BT470BG
also ITU-R BT601-6 625 / ITU-R BT1358 625 / ITU-R BT1700 625 PAL & SECAM / IEC 61966-2-4 xvYCC601
Definition: pixfmt.h:518
@ AVCOL_SPC_RGB
order of coefficients is actually GBR, also IEC 61966-2-1 (sRGB)
Definition: pixfmt.h:513
@ AVCOL_SPC_BT2020_NCL
ITU-R BT2020 non-constant luminance system.
Definition: pixfmt.h:523
@ AVCOL_SPC_UNSPECIFIED
Definition: pixfmt.h:515
@ AVCOL_SPC_FCC
FCC Title 47 Code of Federal Regulations 73.682 (a)(20)
Definition: pixfmt.h:517
@ AVCOL_SPC_SMPTE240M
functionally identical to above
Definition: pixfmt.h:520
@ AVCOL_SPC_YCGCO
Used by Dirac / VC-2 and H.264 FRext, see ITU-T SG16.
Definition: pixfmt.h:521
static const uint16_t table[]
Definition: prosumer.c:206
int ff_scale_adjust_dimensions(AVFilterLink *inlink, int *ret_w, int *ret_h, int force_original_aspect_ratio, int force_divisible_by)
Transform evaluated width and height obtained from ff_scale_eval_dimensions into actual target width ...
Definition: scale_eval.c:113
var_name
Definition: setts_bsf.c:50
formats
Definition: signature.h:48
#define snprintf
Definition: snprintf.h:34
Describe the class of an AVClass context structure.
Definition: log.h:67
const char * class_name
The name of the class; usually it is the same name as the context structure type to which the AVClass...
Definition: log.h:72
char * key
Definition: dict.h:82
char * value
Definition: dict.h:83
Definition: eval.c:157
An instance of a filter.
Definition: avfilter.h:341
AVFilterLink ** inputs
array of pointers to input links
Definition: avfilter.h:349
void * priv
private data for use by the filter
Definition: avfilter.h:356
AVFilterLink ** outputs
array of pointers to output links
Definition: avfilter.h:353
A list of supported formats for one end of a filter link.
Definition: formats.h:65
A filter pad used for either input or output.
Definition: internal.h:54
const char * name
Pad name.
Definition: internal.h:60
Filter definition.
Definition: avfilter.h:145
const char * name
Filter name.
Definition: avfilter.h:149
This structure describes decoded (raw) audio or video data.
Definition: frame.h:318
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:332
int linesize[AV_NUM_DATA_POINTERS]
For video, size in bytes of each picture line.
Definition: frame.h:349
AVOption.
Definition: opt.h:248
Descriptor that unambiguously describes how the bits of a pixel are stored in the up to 4 data planes...
Definition: pixdesc.h:81
uint8_t log2_chroma_w
Amount to shift the luma width right to find the chroma width.
Definition: pixdesc.h:92
uint64_t flags
Combination of AV_PIX_FMT_FLAG_...
Definition: pixdesc.h:106
uint8_t log2_chroma_h
Amount to shift the luma height right to find the chroma height.
Definition: pixdesc.h:101
Rational number (pair of numerator and denominator).
Definition: rational.h:58
int num
Numerator.
Definition: rational.h:59
int den
Denominator.
Definition: rational.h:60
int in_h_chr_pos
Definition: vf_scale.c:146
int nb_slices
Definition: vf_scale.c:152
int w
New dimensions.
Definition: vf_scale.c:119
double param[2]
sws flags
Definition: vf_scale.c:122
int eval_mode
expression evaluation mode
Definition: vf_scale.c:154
int out_h_chr_pos
Definition: vf_scale.c:144
struct SwsContext * sws
software scaler context
Definition: vf_scale.c:109
int slice_y
top of current output slice
Definition: vf_scale.c:125
int force_original_aspect_ratio
Definition: vf_scale.c:149
char * in_color_matrix
Definition: vf_scale.c:138
int output_is_pal
set to 1 if the output format is paletted
Definition: vf_scale.c:127
char * flags_str
Definition: vf_scale.c:136
int in_v_chr_pos
Definition: vf_scale.c:147
AVExpr * w_pexpr
Definition: vf_scale.c:132
int input_is_pal
set to 1 if the input format is paletted
Definition: vf_scale.c:126
struct SwsContext * isws[2]
software scaler context for interlaced material
Definition: vf_scale.c:110
char * size_str
Definition: vf_scale.c:120
char * w_expr
width expression string
Definition: vf_scale.c:130
char * h_expr
height expression string
Definition: vf_scale.c:131
int out_range
Definition: vf_scale.c:142
int vsub
chroma subsampling
Definition: vf_scale.c:124
AVExpr * h_pexpr
Definition: vf_scale.c:133
int in_range
Definition: vf_scale.c:141
int out_v_chr_pos
Definition: vf_scale.c:145
int force_divisible_by
Definition: vf_scale.c:150
char * out_color_matrix
Definition: vf_scale.c:139
AVDictionary * opts
Definition: vf_scale.c:111
unsigned int flags
Definition: vf_scale.c:121
double var_values[VARS_NB]
Definition: vf_scale.c:134
int interlaced
Definition: vf_scale.c:128
double param[2]
Input parameters for scaling algorithms that need them.
external API header
#define av_free(p)
#define av_freep(p)
#define av_log(a,...)
FILE * out
Definition: movenc.c:54
AVFormatContext * ctx
Definition: movenc.c:48
AVDictionary * opts
Definition: movenc.c:50
@ VAR_S2R_MAIN_SAR
Definition: vf_scale.c:91
@ VAR_VSUB
Definition: vf_scale.c:82
@ VAR_OUT_H
Definition: vf_scale.c:77
@ VARS_NB
Definition: vf_scale.c:98
@ VAR_POS
Definition: vf_scale.c:87
@ VAR_SAR
Definition: vf_scale.c:79
@ VAR_OH
Definition: vf_scale.c:77
@ VAR_IN_W
Definition: vf_scale.c:74
@ VAR_S2R_MAIN_H
Definition: vf_scale.c:89
@ VAR_IW
Definition: vf_scale.c:74
@ VAR_OW
Definition: vf_scale.c:76
@ VAR_OHSUB
Definition: vf_scale.c:83
@ VAR_N
Definition: vf_scale.c:85
@ VAR_IH
Definition: vf_scale.c:75
@ VAR_A
Definition: vf_scale.c:78
@ VAR_S2R_MAIN_W
Definition: vf_scale.c:88
@ VAR_OUT_W
Definition: vf_scale.c:76
@ VAR_OVSUB
Definition: vf_scale.c:84
@ VAR_S2R_MAIN_N
Definition: vf_scale.c:95
@ VAR_S2R_MAIN_DAR
Definition: vf_scale.c:92
@ VAR_S2R_MAIN_POS
Definition: vf_scale.c:97
@ VAR_S2R_MAIN_T
Definition: vf_scale.c:96
@ VAR_HSUB
Definition: vf_scale.c:81
@ VAR_S2R_MAIN_A
Definition: vf_scale.c:90
@ VAR_IN_H
Definition: vf_scale.c:75
@ VAR_DAR
Definition: vf_scale.c:80
@ VAR_S2R_MDAR
Definition: vf_scale.c:92
@ VAR_S2R_MAIN_VSUB
Definition: vf_scale.c:94
@ VAR_T
Definition: vf_scale.c:86
@ VAR_S2R_MAIN_HSUB
Definition: vf_scale.c:93
static int config_props(AVFilterLink *outlink)
Definition: vf_scale.c:481
static int request_frame_ref(AVFilterLink *outlink)
Definition: vf_scale.c:624
static int scale_eval_dimensions(AVFilterContext *ctx)
Definition: vf_scale.c:406
static const AVClass scale_class
Definition: vf_scale.c:963
static int scale_frame(AVFilterLink *link, AVFrame *in, AVFrame **frame_out)
Definition: vf_scale.c:653
static int filter_frame_ref(AVFilterLink *link, AVFrame *in)
Definition: vf_scale.c:842
static const AVFilterPad avfilter_vf_scale_outputs[]
Definition: vf_scale.c:984
static const AVClass * child_class_iterate(void **iter)
Definition: vf_scale.c:905
static int filter_frame(AVFilterLink *link, AVFrame *in)
Definition: vf_scale.c:828
#define TFLAGS
Definition: vf_scale.c:914
static int query_formats(AVFilterContext *ctx)
Definition: vf_scale.c:344
static const AVClass scale2ref_class
Definition: vf_scale.c:1006
#define FLAGS
Definition: vf_scale.c:913
static int request_frame(AVFilterLink *outlink)
Definition: vf_scale.c:619
static int config_props_ref(AVFilterLink *outlink)
Definition: vf_scale.c:606
static int scale_parse_expr(AVFilterContext *ctx, char *str_expr, AVExpr **pexpr_ptr, const char *var, const char *args)
Definition: vf_scale.c:220
static int scale_slice(AVFilterLink *link, AVFrame *out_buf, AVFrame *cur_pic, struct SwsContext *sws, int y, int h, int mul, int field)
Definition: vf_scale.c:629
AVFilter ff_vf_scale
Definition: vf_scale.c:993
@ EVAL_MODE_NB
Definition: vf_scale.c:104
@ EVAL_MODE_FRAME
Definition: vf_scale.c:103
@ EVAL_MODE_INIT
Definition: vf_scale.c:102
static const AVFilterPad avfilter_vf_scale2ref_inputs[]
Definition: vf_scale.c:1018
static const char *const var_names[]
Definition: vf_scale.c:45
static int process_command(AVFilterContext *ctx, const char *cmd, const char *args, char *res, int res_len, int flags)
Definition: vf_scale.c:873
static int check_exprs(AVFilterContext *ctx)
Definition: vf_scale.c:162
static av_cold void uninit(AVFilterContext *ctx)
Definition: vf_scale.c:331
static const int * parse_yuv_type(const char *s, enum AVColorSpace colorspace)
Definition: vf_scale.c:382
static const AVFilterPad avfilter_vf_scale_inputs[]
Definition: vf_scale.c:975
static const AVOption scale_options[]
Definition: vf_scale.c:916
#define OFFSET(x)
Definition: vf_scale.c:912
static av_cold int init_dict(AVFilterContext *ctx, AVDictionary **opts)
Definition: vf_scale.c:273
AVFilter ff_vf_scale2ref
Definition: vf_scale.c:158
static const AVFilterPad avfilter_vf_scale2ref_outputs[]
Definition: vf_scale.c:1032
AVFrame * ff_get_video_buffer(AVFilterLink *link, int w, int h)
Request a picture buffer with a specific set of permissions.
Definition: video.c:104
static double c[64]