flowchart TB
N --> F
F --读取下一帧--> L
subgraph filter
A[创建滤镜出入口] --> B[创建滤镜通道图]
B --> C[设置出入口参数]
C --> D[解析滤镜命令]
D --> E[滤镜通道图检查]
E --> F[添加滤镜]
direction TB
end
subgraph decode
direction TB
G[打开文件] --> H[查找流信息]
H --> I[查找视频流]
I --> J[查找解码器]
J --> K[打开解码器]
K --> L{读取帧}
L --No--> M[释放资源]
L --Yew--> N[解码帧]
end
//根据指定的Filter,这里就是buffer,构造对应的初始化参数args,二者结合即可创建Filter的示例,并放入filter_graph中 int ret = avfilter_graph_create_filter(&bufSrcCtx, bufSrc, "in", args,NULL, filterGraph); if(ret<0){ printf("Cannot create buffer source.\n"); }
/* buffer video sink: to terminate the filter chain. */ ret = avfilter_graph_create_filter(&bufSinkCtx, bufSink, "out", NULL, NULL, filterGraph); if(ret<0){ printf("Cannot creat buffer sink.\n"); }
ret = av_opt_set_int_list(bufSinkCtx, "pix_fmts", pixel_fmts, AV_PIX_FMT_NONE, AV_OPT_SEARCH_CHILDREN); if (ret < 0) { av_log(NULL, AV_LOG_ERROR, "Cannot set output pixel format\n"); }
/* Endpoints for the filter graph. */ outFilter->name = av_strdup("in"); outFilter->filter_ctx = bufSrcCtx; outFilter->pad_idx =0; outFilter->next = NULL;
AVFrame *filterFrame = av_frame_alloc(); while(av_read_frame(fmtCtx,pkt)>=0){ if(!runFlag){ break; } initFilter(); if(pkt->stream_index == videoStreamIndex){ if(avcodec_send_packet(videoCodecCtx,pkt)>=0){ int ret; while((ret=avcodec_receive_frame(videoCodecCtx,yuvFrame))>=0){ if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF) return; elseif (ret < 0) { fprintf(stderr, "Error during decoding\n"); exit(1); }
/* push the decoded frame into the filtergraph */ if (av_buffersrc_add_frame_flags(bufSrcCtx, yuvFrame, AV_BUFFERSRC_FLAG_KEEP_REF) < 0) { av_log(NULL, AV_LOG_ERROR, "Error while feeding the filtergraph\n"); break; }
//把滤波后的视频帧从filter graph取出来 if(av_buffersink_get_frame(bufSinkCtx,filterFrame)<0){ printf("Cannot get frame from bufSinkCtx.\n"); continue; }