FFmpeg中AVFilter模块实践指南

在做音视频相关的开发过程大体如下所示,对于其中的编码/解码,整个流程相对比较固定,使用ffmpeg可以很好的完成这部分的开发。对其中的帧数据处理(包括音频和视频数据)则相对要多样化一些,比如对视频做尺寸变换,进行音频音量均衡,直播中的美颜处理,多路流合成等等,这些都是属于流程中的帧数据处理。今天要介绍FFmpeg中的AVFilter模块进行帧数据处理的开发,AVFilter模块对帧数据处理进行了很好的抽象。AVFilter中的filter graph(滤波器图)概念非常适合帧数据处理中的多级滤波处理,同时对滤波器的接口进行了规定,后期添加一些自定义的滤波器也是很方便。网上关于AVFilter的介绍大多是基于ffmpeg的命令使用,基于代码实现的很少,最近项目中正好要使用到了AVFilter,写个小结,希望对有同样需求的小伙伴有帮助。

原始音视频–>解码–>帧数据处理–>编码–>输出音视频

1. 主要结构体和API介绍

1
2
3
4
5
6
// 对filters系统的整体管理
struct AVFilterGraph
{
AVFilterContext **filters;
unsigned nb_filters;
}
1
2
3
4
5
6
7
// 定义filter本身的能力,拥有的pads,回调函数接口定义
struct AVFilter
{
const char *name;
const AVFilterPad *inputs;
const AVFilterPad *outputs;
}
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
// filter实例,管理filter与外部的联系
struct AVFilterContext
{
const AVFilter *filter;
char *name;

AVFilterPad *input_pads;
AVFilterLink **inputs;
unsigned nb_inputs

AVFilterPad *output_pads;
AVFilterLink **outputs;
unsigned nb_outputs;

struct AVFilterGraph *graph;
}
1
2
3
4
5
6
7
8
9
10
11
// 定义两个filters之间的联接
struct AVFilterLink
{
AVFilterContext *src;
AVFilterPad *srcpad;

AVFilterContext *dst;
AVFilterPad *dstpad;

struct AVFilterGraph *graph;
}
1
2
3
4
5
6
7
8
9
// 定义filter的输入/输出接口
struct AVFilterPad
{
const char *name;
AVFrame *(*get_video_buffer)(AVFilterLink *link, int w, int h);
AVFrame *(*get_audio_buffer)(AVFilterLink *link, int nb_samples);
int (*filter_frame)(AVFilterLink *link, AVFrame *frame);
int (*request_frame)(AVFilterLink *link);
}
1
2
3
4
5
6
7
struct AVFilterInOut
{
char *name;
AVFilterContext *filter_ctx;
int pad_idx;
struct AVFilterInOut *next;
}

在AVFilter模块中定义了AVFilter结构,很个AVFilter都是具有独立功能的节点,如scale filter的作用就是进行图像尺寸变换,overlay filter的作用就是进行图像的叠加,这里需要重点提的是两个特别的filter,一个是buffer,一个是buffersink,滤波器buffer代表filter graph中的源头,原始数据就往这个filter节点输入的;而滤波器buffersink代表filter graph中的输出节点,处理完成的数据从这个filter节点输出。

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
// 获取FFmpeg中定义的filter,调用该方法前需要先调用avfilter_register_all();进行滤波器注册
AVFilter *avfilter_get_by_name(const char *name);

// 往源滤波器buffer中输入待处理的数据
int av_buffersrc_add_frame(AVFilterContext *ctx, AVFrame *frame);

// 从目的滤波器buffersink中输出处理完的数据
int av_buffersink_get_frame(AVFilterContext *ctx, AVFrame *frame);

// 创建一个滤波器图filter graph
AVFilterGraph *avfilter_graph_alloc(void);

// 创建一个滤波器实例AVFilterContext,并添加到AVFilterGraph中
int avfilter_graph_create_filter(AVFilterContext **filt_ctx, const AVFilter *filt,
const char *name, const char *args, void *opaque,
AVFilterGraph *graph_ctx);

// 连接两个滤波器节点
int avfilter_link(AVFilterContext *src, unsigned srcpad,
AVFilterContext *dst, unsigned dstpad);

2. AVFilter主体框架流程

在利用AVFilter进行音视频数据处理前先将在进行的处理流程绘制出来,现在以FFmpeg filter官方文档中的一个例子为例进行说明。

1
2
3
4
5
                [main]
input --> split ---------------------> overlay --> output
| ^
|[tmp] [flip]|
+-----> crop --> vflip -------+

这个例子的处理流程如上所示,首先使用split滤波器将input流分成两路流(main和tmp),然后分别对两路流进行处理。对于tmp流,先经过crop滤波器进行裁剪处理,再经过flip滤波器进行垂直方向上的翻转操作,输出的结果命名为flip流。再将main流和flip流输入到overlay滤波器进行合成操作。上图的input就是上面提过的buffer源滤波器,output就是上面的提过的buffersink滤波器。上图中每个节点都是一个AVFilterContext,每个连线就是AVFliterLink。所有这些信息都统一由AVFilterGraph来管理。

image

3. 实例实现

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
extern "C" {
#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>
#include <libavfilter/avfiltergraph.h>
#include <libavfilter/buffersink.h>
#include <libavfilter/buffersrc.h>
#include <libavutil/opt.h>
#include <libavutil/imgutils.h>
}

int main(int argc, char* argv)
{
int ret = 0;

// input yuv
FILE* inFile = NULL;
const char* inFileName = "sintel_480x272_yuv420p.yuv";
fopen_s(&inFile, inFileName, "rb+");
if (!inFile) {
printf("Fail to open file\n");
return -1;
}

int in_width = 480;
int in_height = 272;

// output yuv
FILE* outFile = NULL;
const char* outFileName = "out_crop_vfilter.yuv";
fopen_s(&outFile, outFileName, "wb");
if (!outFile) {
printf("Fail to create file for output\n");
return -1;
}

avfilter_register_all();

AVFilterGraph* filter_graph = avfilter_graph_alloc();
if (!filter_graph) {
printf("Fail to create filter graph!\n");
return -1;
}

// source filter
char args[512];
_snprintf_s(args, sizeof(args),
"video_size=%dx%d:pix_fmt=%d:time_base=%d/%d:pixel_aspect=%d/%d",
in_width, in_height, AV_PIX_FMT_YUV420P,
1, 25, 1, 1);
AVFilter* bufferSrc = avfilter_get_by_name("buffer");
AVFilterContext* bufferSrc_ctx;
ret = avfilter_graph_create_filter(&bufferSrc_ctx, bufferSrc, "in", args, NULL, filter_graph);
if (ret < 0) {
printf("Fail to create filter bufferSrc\n");
return -1;
}

// sink filter
AVBufferSinkParams *bufferSink_params;
AVFilterContext* bufferSink_ctx;
AVFilter* bufferSink = avfilter_get_by_name("buffersink");
enum AVPixelFormat pix_fmts[] = { AV_PIX_FMT_YUV420P, AV_PIX_FMT_NONE };
bufferSink_params = av_buffersink_params_alloc();
bufferSink_params->pixel_fmts = pix_fmts;
ret = avfilter_graph_create_filter(&bufferSink_ctx, bufferSink, "out", NULL, bufferSink_params, filter_graph);
if (ret < 0) {
printf("Fail to create filter sink filter\n");
return -1;
}

// split filter
AVFilter *splitFilter = avfilter_get_by_name("split");
AVFilterContext *splitFilter_ctx;
ret = avfilter_graph_create_filter(&splitFilter_ctx, splitFilter, "split", "outputs=2", NULL, filter_graph);
if (ret < 0) {
printf("Fail to create split filter\n");
return -1;
}

// crop filter
AVFilter *cropFilter = avfilter_get_by_name("crop");
AVFilterContext *cropFilter_ctx;
ret = avfilter_graph_create_filter(&cropFilter_ctx, cropFilter, "crop", "out_w=iw:out_h=ih/2:x=0:y=0", NULL, filter_graph);
if (ret < 0) {
printf("Fail to create crop filter\n");
return -1;
}

// vflip filter
AVFilter *vflipFilter = avfilter_get_by_name("vflip");
AVFilterContext *vflipFilter_ctx;
ret = avfilter_graph_create_filter(&vflipFilter_ctx, vflipFilter, "vflip", NULL, NULL, filter_graph);
if (ret < 0) {
printf("Fail to create vflip filter\n");
return -1;
}

// overlay filter
AVFilter *overlayFilter = avfilter_get_by_name("overlay");
AVFilterContext *overlayFilter_ctx;
ret = avfilter_graph_create_filter(&overlayFilter_ctx, overlayFilter, "overlay", "y=0:H/2", NULL, filter_graph);
if (ret < 0) {
printf("Fail to create overlay filter\n");
return -1;
}

// src filter to split filter
ret = avfilter_link(bufferSrc_ctx, 0, splitFilter_ctx, 0);
if (ret != 0) {
printf("Fail to link src filter and split filter\n");
return -1;
}
// split filter's first pad to overlay filter's main pad
ret = avfilter_link(splitFilter_ctx, 0, overlayFilter_ctx, 0);
if (ret != 0) {
printf("Fail to link split filter and overlay filter main pad\n");
return -1;
}
// split filter's second pad to crop filter
ret = avfilter_link(splitFilter_ctx, 1, cropFilter_ctx, 0);
if (ret != 0) {
printf("Fail to link split filter's second pad and crop filter\n");
return -1;
}
// crop filter to vflip filter
ret = avfilter_link(cropFilter_ctx, 0, vflipFilter_ctx, 0);
if (ret != 0) {
printf("Fail to link crop filter and vflip filter\n");
return -1;
}
// vflip filter to overlay filter's second pad
ret = avfilter_link(vflipFilter_ctx, 0, overlayFilter_ctx, 1);
if (ret != 0) {
printf("Fail to link vflip filter and overlay filter's second pad\n");
return -1;
}
// overlay filter to sink filter
ret = avfilter_link(overlayFilter_ctx, 0, bufferSink_ctx, 0);
if (ret != 0) {
printf("Fail to link overlay filter and sink filter\n");
return -1;
}

// check filter graph
ret = avfilter_graph_config(filter_graph, NULL);
if (ret < 0) {
printf("Fail in filter graph\n");
return -1;
}

char *graph_str = avfilter_graph_dump(filter_graph, NULL);
FILE* graphFile = NULL;
fopen_s(&graphFile, "graphFile.txt", "w");
fprintf(graphFile, "%s", graph_str);
av_free(graph_str);

AVFrame *frame_in = av_frame_alloc();
unsigned char *frame_buffer_in = (unsigned char *)av_malloc(av_image_get_buffer_size(AV_PIX_FMT_YUV420P, in_width, in_height, 1));
av_image_fill_arrays(frame_in->data, frame_in->linesize, frame_buffer_in,
AV_PIX_FMT_YUV420P, in_width, in_height, 1);

AVFrame *frame_out = av_frame_alloc();
unsigned char *frame_buffer_out = (unsigned char *)av_malloc(av_image_get_buffer_size(AV_PIX_FMT_YUV420P, in_width, in_height, 1));
av_image_fill_arrays(frame_out->data, frame_out->linesize, frame_buffer_out,
AV_PIX_FMT_YUV420P, in_width, in_height, 1);

frame_in->width = in_width;
frame_in->height = in_height;
frame_in->format = AV_PIX_FMT_YUV420P;

while (1) {

if (fread(frame_buffer_in, 1, in_width*in_height * 3 / 2, inFile) != in_width*in_height * 3 / 2) {
break;
}
//input Y,U,V
frame_in->data[0] = frame_buffer_in;
frame_in->data[1] = frame_buffer_in + in_width*in_height;
frame_in->data[2] = frame_buffer_in + in_width*in_height * 5 / 4;

if (av_buffersrc_add_frame(bufferSrc_ctx, frame_in) < 0) {
printf("Error while add frame.\n");
break;
}

/* pull filtered pictures from the filtergraph */
ret = av_buffersink_get_frame(bufferSink_ctx, frame_out);
if (ret < 0)
break;

//output Y,U,V
if (frame_out->format == AV_PIX_FMT_YUV420P) {
for (int i = 0; i < frame_out->height; i++) {
fwrite(frame_out->data[0] + frame_out->linesize[0] * i, 1, frame_out->width, outFile);
}
for (int i = 0; i < frame_out->height / 2; i++) {
fwrite(frame_out->data[1] + frame_out->linesize[1] * i, 1, frame_out->width / 2, outFile);
}
for (int i = 0; i < frame_out->height / 2; i++) {
fwrite(frame_out->data[2] + frame_out->linesize[2] * i, 1, frame_out->width / 2, outFile);
}
}
printf("Process 1 frame!\n");
av_frame_unref(frame_out);
}

fclose(inFile);
fclose(outFile);

av_frame_free(&frame_in);
av_frame_free(&frame_out);
avfilter_graph_free(&filter_graph);
return 0;
}

github代码仓库

4. 建立定义Filter滤波器

这部分暂时还没有实践,参考ffmpeg源码中已有的filter和ffmpeg源码中的文档writing_filter.txt,应该实践起来也来难,等后面有时间再补上这部分介绍。