26 #define CGROUPS (int [3]){ 32, 32, 1 }
58 C(1, vec2 npos = (vec2(
pos) + 0.5f) / imageSize(output_img[idx]); )
59 C(1, npos *= crop_range; )
60 C(1, npos += crop_off; )
61 C(1,
return texture(input_img[idx], npos); )
68 C(1,
src *= yuv_matrix; )
69 C(1,
if (fullrange == 1) { )
70 C(2,
src += vec4(0.0, 0.5, 0.5, 0.0); )
72 C(2,
src *= vec4(219.0 / 255.0, 224.0 / 255.0, 224.0 / 255.0, 1.0); )
73 C(2,
src += vec4(16.0 / 255.0, 128.0 / 255.0, 128.0 / 255.0, 0.0); )
82 C(1, imageStore(output_img[0],
pos, vec4(
src.r, 0.0, 0.0, 0.0)); )
83 C(1,
pos /= ivec2(2); )
84 C(1, imageStore(output_img[1],
pos, vec4(
src.g,
src.b, 0.0, 0.0)); )
91 C(1, imageStore(output_img[0],
pos, vec4(
src.r, 0.0, 0.0, 0.0)); )
92 C(1,
pos /= ivec2(2); )
93 C(1, imageStore(output_img[1],
pos, vec4(
src.g, 0.0, 0.0, 0.0)); )
94 C(1, imageStore(output_img[2],
pos, vec4(
src.b, 0.0, 0.0, 0.0)); )
101 C(1, imageStore(output_img[0],
pos, vec4(
src.r, 0.0, 0.0, 0.0)); )
102 C(1, imageStore(output_img[1],
pos, vec4(
src.g, 0.0, 0.0, 0.0)); )
103 C(1, imageStore(output_img[2],
pos, vec4(
src.b, 0.0, 0.0, 0.0)); )
111 VkFilter sampler_mode;
114 int crop_x =
in->crop_left;
115 int crop_y =
in->crop_top;
116 int crop_w =
in->width - (
in->crop_left +
in->crop_right);
117 int crop_h =
in->height - (
in->crop_top +
in->crop_bottom);
119 s->vkctx.queue_family_idx =
s->vkctx.hwctx->queue_family_comp_index;
125 sampler_mode = VK_FILTER_NEAREST;
128 sampler_mode = VK_FILTER_LINEAR;
145 .type = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
148 .stages = VK_SHADER_STAGE_COMPUTE_BIT,
149 .updater =
s->input_images,
153 .name =
"output_img",
154 .type = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE,
156 .mem_quali =
"writeonly",
159 .stages = VK_SHADER_STAGE_COMPUTE_BIT,
160 .updater =
s->output_images,
166 .type = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER,
167 .mem_quali =
"readonly",
168 .mem_layout =
"std430",
169 .stages = VK_SHADER_STAGE_COMPUTE_BIT,
170 .updater = &
s->params_desc,
171 .buf_content =
"mat4 yuv_matrix;",
175 VK_SHADER_STAGE_COMPUTE_BIT);
186 if (
s->vkctx.output_format !=
s->vkctx.input_format) {
190 switch (
s->vkctx.output_format) {
200 GLSLC(1, ivec2
pos = ivec2(gl_GlobalInvocationID.xy); );
201 GLSLF(1, vec2 in_d = vec2(%
i, %
i); ,
in->width,
in->height);
202 GLSLF(1, vec2 c_r = vec2(%
i, %
i) / in_d; ,crop_w, crop_h);
203 GLSLF(1, vec2 c_o = vec2(%
i, %
i) / in_d; ,crop_x,crop_y);
206 if (
s->vkctx.output_format ==
s->vkctx.input_format) {
207 for (
int i = 0;
i < desc_i[1].
elems;
i++) {
214 GLSLF(2, imageStore(output_img[%
i],
pos, res); ,
i);
222 switch (
s->vkctx.output_format) {
226 default:
return AVERROR(EINVAL);
238 if (
s->vkctx.output_format !=
s->vkctx.input_format) {
240 double tmp_mat[3][3];
243 float yuv_matrix[4][4];
254 VK_BUFFER_USAGE_STORAGE_BUFFER_BIT,
255 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
265 memset(par, 0,
sizeof(*par));
267 for (
int y = 0; y < 3; y++)
268 for (
int x = 0; x < 3; x++)
269 par->yuv_matrix[x][y] = tmp_mat[x][y];
271 par->yuv_matrix[3][3] = 1.0;
277 s->params_desc.buffer =
s->params_buf.buf;
278 s->params_desc.range = VK_WHOLE_SIZE;
297 VkCommandBuffer cmd_buf;
302 int barrier_count = 0;
314 s->input_images[
i].imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
323 s->output_images[
i].imageLayout = VK_IMAGE_LAYOUT_GENERAL;
329 VkImageMemoryBarrier bar = {
330 .sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
332 .dstAccessMask = VK_ACCESS_SHADER_READ_BIT,
333 .oldLayout =
in->layout[
i],
334 .newLayout =
s->input_images[
i].imageLayout,
335 .srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
336 .dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
338 .subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
339 .subresourceRange.levelCount = 1,
340 .subresourceRange.layerCount = 1,
343 memcpy(&barriers[barrier_count++], &bar,
sizeof(VkImageMemoryBarrier));
345 in->layout[
i] = bar.newLayout;
346 in->access[
i] = bar.dstAccessMask;
350 VkImageMemoryBarrier bar = {
351 .sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
353 .dstAccessMask = VK_ACCESS_SHADER_WRITE_BIT,
354 .oldLayout =
out->layout[
i],
355 .newLayout =
s->output_images[
i].imageLayout,
356 .srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
357 .dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
358 .image =
out->img[
i],
359 .subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
360 .subresourceRange.levelCount = 1,
361 .subresourceRange.layerCount = 1,
364 memcpy(&barriers[barrier_count++], &bar,
sizeof(VkImageMemoryBarrier));
366 out->layout[
i] = bar.newLayout;
367 out->access[
i] = bar.dstAccessMask;
370 vkCmdPipelineBarrier(cmd_buf, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,
371 VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, 0,
372 0,
NULL, 0,
NULL, barrier_count, barriers);
376 vkCmdDispatch(cmd_buf,
417 out->color_range =
s->out_range;
418 if (
s->vkctx.output_format !=
s->vkctx.input_format)
439 &
s->vkctx.output_width,
440 &
s->vkctx.output_height);
444 if (
s->out_format_string) {
451 s->vkctx.output_format =
s->vkctx.input_format;
454 if (
s->vkctx.output_format !=
s->vkctx.input_format) {
487 #define OFFSET(x) offsetof(ScaleVulkanContext, x)
488 #define FLAGS (AV_OPT_FLAG_FILTERING_PARAM | AV_OPT_FLAG_VIDEO_PARAM)
528 .
name =
"scale_vulkan",
536 .priv_class = &scale_vulkan_class,
static int query_formats(AVFilterContext *ctx)
static const AVFilterPad inputs[]
static const AVFilterPad outputs[]
uint8_t pi<< 24) CONV_FUNC_GROUP(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_U8, uint8_t,(*(const uint8_t *) pi - 0x80) *(1.0f/(1<< 7))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_U8, uint8_t,(*(const uint8_t *) pi - 0x80) *(1.0/(1<< 7))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_S16, int16_t,(*(const int16_t *) pi >> 8)+0x80) CONV_FUNC_GROUP(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_S16, int16_t, *(const int16_t *) pi *(1.0f/(1<< 15))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_S16, int16_t, *(const int16_t *) pi *(1.0/(1<< 15))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_S32, int32_t,(*(const int32_t *) pi >> 24)+0x80) CONV_FUNC_GROUP(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_S32, int32_t, *(const int32_t *) pi *(1.0f/(1U<< 31))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_S32, int32_t, *(const int32_t *) pi *(1.0/(1U<< 31))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_FLT, float, av_clip_uint8(lrintf(*(const float *) pi *(1<< 7))+0x80)) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S16, int16_t, AV_SAMPLE_FMT_FLT, float, av_clip_int16(lrintf(*(const float *) pi *(1<< 15)))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S32, int32_t, AV_SAMPLE_FMT_FLT, float, av_clipl_int32(llrintf(*(const float *) pi *(1U<< 31)))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_DBL, double, av_clip_uint8(lrint(*(const double *) pi *(1<< 7))+0x80)) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S16, int16_t, AV_SAMPLE_FMT_DBL, double, av_clip_int16(lrint(*(const double *) pi *(1<< 15)))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S32, int32_t, AV_SAMPLE_FMT_DBL, double, av_clipl_int32(llrint(*(const double *) pi *(1U<< 31)))) #define SET_CONV_FUNC_GROUP(ofmt, ifmt) static void set_generic_function(AudioConvert *ac) { } void ff_audio_convert_free(AudioConvert **ac) { if(! *ac) return;ff_dither_free(&(*ac) ->dc);av_freep(ac);} AudioConvert *ff_audio_convert_alloc(AVAudioResampleContext *avr, enum AVSampleFormat out_fmt, enum AVSampleFormat in_fmt, int channels, int sample_rate, int apply_map) { AudioConvert *ac;int in_planar, out_planar;ac=av_mallocz(sizeof(*ac));if(!ac) return NULL;ac->avr=avr;ac->out_fmt=out_fmt;ac->in_fmt=in_fmt;ac->channels=channels;ac->apply_map=apply_map;if(avr->dither_method !=AV_RESAMPLE_DITHER_NONE &&av_get_packed_sample_fmt(out_fmt)==AV_SAMPLE_FMT_S16 &&av_get_bytes_per_sample(in_fmt) > 2) { ac->dc=ff_dither_alloc(avr, out_fmt, in_fmt, channels, sample_rate, apply_map);if(!ac->dc) { av_free(ac);return NULL;} return ac;} in_planar=ff_sample_fmt_is_planar(in_fmt, channels);out_planar=ff_sample_fmt_is_planar(out_fmt, channels);if(in_planar==out_planar) { ac->func_type=CONV_FUNC_TYPE_FLAT;ac->planes=in_planar ? ac->channels :1;} else if(in_planar) ac->func_type=CONV_FUNC_TYPE_INTERLEAVE;else ac->func_type=CONV_FUNC_TYPE_DEINTERLEAVE;set_generic_function(ac);if(ARCH_AARCH64) ff_audio_convert_init_aarch64(ac);if(ARCH_ARM) ff_audio_convert_init_arm(ac);if(ARCH_X86) ff_audio_convert_init_x86(ac);return ac;} int ff_audio_convert(AudioConvert *ac, AudioData *out, AudioData *in) { int use_generic=1;int len=in->nb_samples;int p;if(ac->dc) { av_log(ac->avr, AV_LOG_TRACE, "%d samples - audio_convert: %s to %s (dithered)\n", len, av_get_sample_fmt_name(ac->in_fmt), av_get_sample_fmt_name(ac->out_fmt));return ff_convert_dither(ac-> in
int ff_filter_frame(AVFilterLink *link, AVFrame *frame)
Send a frame of data to the next filter.
int main(int argc, char *argv[])
static av_cold int init(AVCodecContext *avctx)
static av_cold int uninit(AVCodecContext *avctx)
#define AV_NUM_DATA_POINTERS
uint32_t av_get_random_seed(void)
Get a seed to use in conjunction with random functions.
#define AVERROR_EXTERNAL
Generic error in an external library.
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
int av_frame_copy_props(AVFrame *dst, const AVFrame *src)
Copy only "metadata" fields from src to dst.
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
const VkFormat * av_vkfmt_from_pixfmt(enum AVPixelFormat p)
Returns the format of each image up to the number of planes for a given sw_format.
const struct LumaCoefficients * ff_get_luma_coefficients(enum AVColorSpace csp)
void ff_fill_rgb2yuv_table(const struct LumaCoefficients *coeffs, double rgb2yuv[3][3])
#define FF_FILTER_FLAG_HWFRAME_AWARE
The filter is aware of hardware frames, and any hardware frame context should not be automatically pr...
Various defines for YUV<->RGB conversion.
common internal API header
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification.
int av_pix_fmt_count_planes(enum AVPixelFormat pix_fmt)
enum AVPixelFormat av_get_pix_fmt(const char *name)
Return the pixel format corresponding to name.
@ AVCHROMA_LOC_TOPLEFT
ITU-R 601, SMPTE 274M 296M S314M(DV 4:1:1), mpeg2 4:2:2.
AVColorRange
Visual content value range.
@ AVCOL_RANGE_MPEG
Narrow or limited range content.
@ AVCOL_RANGE_UNSPECIFIED
@ AVCOL_RANGE_JPEG
Full range content.
@ AV_PIX_FMT_NV12
planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (firs...
@ AV_PIX_FMT_YUV420P
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
@ AV_PIX_FMT_YUV444P
planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples)
int ff_scale_eval_dimensions(void *log_ctx, const char *w_expr, const char *h_expr, AVFilterLink *inlink, AVFilterLink *outlink, int *ret_w, int *ret_h)
Parse and evaluate string expressions for width and height.
AVFilterLink ** inputs
array of pointers to input links
void * priv
private data for use by the filter
A link between two filters.
int w
agreed upon image width
int h
agreed upon image height
AVFilterContext * src
source filter
AVFilterContext * dst
dest filter
A filter pad used for either input or output.
const char * name
Pad name.
const char * name
Filter name.
This structure describes decoded (raw) audio or video data.
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
enum AVColorRange out_range
VulkanFilterContext vkctx
VkDescriptorImageInfo output_images[3]
VkDescriptorBufferInfo params_desc
VkDescriptorImageInfo input_images[3]
static const char scale_bilinear[]
static const char write_nv12[]
static int scale_vulkan_filter_frame(AVFilterLink *link, AVFrame *in)
static const AVFilterPad scale_vulkan_outputs[]
static const char write_444[]
static int scale_vulkan_config_output(AVFilterLink *outlink)
static const AVFilterPad scale_vulkan_inputs[]
AVFILTER_DEFINE_CLASS(scale_vulkan)
AVFilter ff_vf_scale_vulkan
static const char rgb2yuv[]
static const char write_420[]
static av_cold int init_filter(AVFilterContext *ctx, AVFrame *in)
static void scale_vulkan_uninit(AVFilterContext *avctx)
static const AVOption scale_vulkan_options[]
static int process_frames(AVFilterContext *avctx, AVFrame *out_f, AVFrame *in_f)
AVFrame * ff_get_video_buffer(AVFilterLink *link, int w, int h)
Request a picture buffer with a specific set of permissions.
int ff_vk_unmap_buffers(AVFilterContext *avctx, FFVkBuffer *buf, int nb_buffers, int flush)
Unmaps the buffer from userspace.
VkSampler * ff_vk_init_sampler(AVFilterContext *avctx, int unnorm_coords, VkFilter filt)
Create a Vulkan sampler, will be auto-freed in ff_vk_filter_uninit()
int ff_vk_map_buffers(AVFilterContext *avctx, FFVkBuffer *buf, uint8_t *mem[], int nb_buffers, int invalidate)
Maps the buffer to userspace.
int ff_vk_add_exec_dep(AVFilterContext *avctx, FFVkExecContext *e, AVFrame *frame, VkPipelineStageFlagBits in_wait_dst_flag)
Adds a frame as a queue dependency.
int ff_vk_create_buf(AVFilterContext *avctx, FFVkBuffer *buf, size_t size, VkBufferUsageFlags usage, VkMemoryPropertyFlagBits flags)
Create a VkBuffer with the specified parameters.
int ff_vk_add_descriptor_set(AVFilterContext *avctx, VulkanPipeline *pl, SPIRVShader *shd, VulkanDescriptorSetBinding *desc, int num, int only_print_to_shader)
Adds a descriptor set to the shader and registers them in the pipeline.
const char * ff_vk_shader_rep_fmt(enum AVPixelFormat pixfmt)
Gets the glsl format string for a pixel format.
void ff_vk_discard_exec_deps(AVFilterContext *avctx, FFVkExecContext *e)
Discards all queue dependencies.
void ff_vk_set_compute_shader_sizes(AVFilterContext *avctx, SPIRVShader *shd, int local_size[3])
Writes the workgroup size for a shader.
int ff_vk_init_compute_pipeline(AVFilterContext *avctx, VulkanPipeline *pl)
Initializes a compute pipeline.
int ff_vk_filter_config_input(AVFilterLink *inlink)
int ff_vk_filter_config_output(AVFilterLink *outlink)
const VkComponentMapping ff_comp_identity_map
void ff_vk_filter_uninit(AVFilterContext *avctx)
SPIRVShader * ff_vk_init_shader(AVFilterContext *avctx, VulkanPipeline *pl, const char *name, VkShaderStageFlags stage)
Inits a shader for a specific pipeline.
int ff_vk_compile_shader(AVFilterContext *avctx, SPIRVShader *shd, const char *entrypoint)
Compiles the shader, entrypoint must be set to "main".
int ff_vk_create_exec_ctx(AVFilterContext *avctx, FFVkExecContext **ctx)
Init an execution context for command recording and queue submission.
VkCommandBuffer ff_vk_get_exec_buf(AVFilterContext *avctx, FFVkExecContext *e)
Gets the command buffer to use for this submission from the exe context.
void ff_vk_update_descriptor_set(AVFilterContext *avctx, VulkanPipeline *pl, int set_id)
Updates a descriptor set via the updaters defined.
void ff_vk_bind_pipeline_exec(AVFilterContext *avctx, FFVkExecContext *e, VulkanPipeline *pl)
Add a command to bind the completed pipeline and its descriptor sets.
int ff_vk_start_exec_recording(AVFilterContext *avctx, FFVkExecContext *e)
Begin recording to the command buffer.
int ff_vk_mt_is_np_rgb(enum AVPixelFormat pix_fmt)
Returns 1 if the image is any sort of supported RGB.
int ff_vk_init_pipeline_layout(AVFilterContext *avctx, VulkanPipeline *pl)
Initializes the pipeline layout after all shaders and descriptor sets have been finished.
void ff_vk_free_buf(AVFilterContext *avctx, FFVkBuffer *buf)
Frees a buffer.
int ff_vk_submit_exec_queue(AVFilterContext *avctx, FFVkExecContext *e)
Submits a command buffer to the queue for execution.
int ff_vk_filter_query_formats(AVFilterContext *avctx)
General lavfi IO functions.
int ff_vk_filter_init(AVFilterContext *avctx)
VulkanPipeline * ff_vk_create_pipeline(AVFilterContext *avctx)
Inits a pipeline.
int ff_vk_create_imageview(AVFilterContext *avctx, FFVkExecContext *e, VkImageView *v, VkImage img, VkFormat fmt, const VkComponentMapping map)
Create an imageview.
#define DUP_SAMPLER_ARRAY4(x)
#define GET_QUEUE_COUNT(hwctx, graph, comp, tx)