FFmpeg5开发入门教程27:捕获摄像头编码h264并推流

索引地址:系列教程索引地址

上一篇:FFmpeg5开发入门教程26:本地文件推流

本文内容包括:

  • 获取摄像头数据
  • 压缩摄像头视频为H264
  • H264数据RTSP推流

FFmpeg5开发入门教程14:Linux下摄像头捕获并编码为h264中包括获取和压缩的过程,FFmpeg5开发入门教程26:本地文件推流包含数据推流的过程。按照FFmpeg5开发入门教程25:搭建UDP/TCP/HTTP(S)/RTP/RTMP/RTSP推流服务器搭建RTSP推流服务器

那么就是怎么把这两个合并。

先看一下流程图

flow

解码部分

从摄像头获取数据,

1
2
3
4
5
$ ffprobe /dev/video0            
...
Input #0, video4linux2,v4l2, from '/dev/video0':
Duration: N/A, start: 35167.891164, bitrate: 147456 kb/s
Stream #0:0: Video: rawvideo (YUY2 / 0x32595559), yuyv422, 1280x720, 147456 kb/s, 10 fps, 10 tbr, 1000k tbn

解码部分就是将原始的YUYV422解码为YUV420P,代码为

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
/////////////解码器部分//////////////////////
//打开摄像头
const AVInputFormat *inFmt = av_find_input_format("v4l2");
if (!inFmt)
{
printf("can't find input format.\n");
break;
}
AVDictionary *options=NULL;
av_dict_set_int(&options, "rtbufsize", 18432000 , 0);
if(avformat_open_input(&inFmtCtx,"/dev/video0",inFmt,NULL)<0){
printf("Cannot open camera.\n");
return -1;
}

if(avformat_find_stream_info(inFmtCtx,NULL)<0){
printf("Cannot find any stream in file.\n");
return -1;
}

for(size_t i=0;i<inFmtCtx->nb_streams;i++){
if(inFmtCtx->streams[i]->codecpar->codec_type==AVMEDIA_TYPE_VIDEO){
inVideoStreamIndex=i;
break;
}
}
if(inVideoStreamIndex==-1){
printf("Cannot find video stream in file.\n");
return -1;
}

AVCodecParameters *inVideoCodecPara = inFmtCtx->streams[inVideoStreamIndex]->codecpar;
if(!(inCodec=avcodec_find_decoder(inVideoCodecPara->codec_id))){
printf("Cannot find valid video decoder.\n");
return -1;
}
if(!(inCodecCtx = avcodec_alloc_context3(inCodec))){
printf("Cannot alloc valid decode codec context.\n");
return -1;
}
if(avcodec_parameters_to_context(inCodecCtx,inVideoCodecPara)<0){
printf("Cannot initialize parameters.\n");
return -1;
}

if(avcodec_open2(inCodecCtx,inCodec,NULL)<0){
printf("Cannot open codec.\n");
return -1;
}

av_dump_format(inFmtCtx, 0, "/dev/video0", 0);

img_ctx = sws_getContext(inCodecCtx->width,
inCodecCtx->height,
inCodecCtx->pix_fmt,
inCodecCtx->width,
inCodecCtx->height,
AV_PIX_FMT_YUV420P,
SWS_BICUBIC,
NULL,NULL,NULL);

int numBytes = av_image_get_buffer_size(AV_PIX_FMT_YUV420P,
inCodecCtx->width,
inCodecCtx->height,1);
uint8_t* out_buffer = (unsigned char*)av_malloc(numBytes*sizeof(unsigned char));

ret = av_image_fill_arrays(yuvFrame->data,
yuvFrame->linesize,
out_buffer,
AV_PIX_FMT_YUV420P,
inCodecCtx->width,
inCodecCtx->height,
1);
if(ret<0){
printf("Fill arrays failed.\n");
return -1;
}
//////////////解码器部分结束/////////////////////

就是之前一直使用的

编码部分

将解码后的YUV420P数据编码为h264

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
//////////////编码器部分开始/////////////////////
const char *outFile = "rtsp://192.168.1.31/test"; //输出URL
const char *ofmtName = "rtsp";//输出格式;

if(avformat_alloc_output_context2(&outFmtCtx,NULL,ofmtName,outFile)<0){
printf("Cannot alloc output file context.\n");
return -1;
}
outFmt = outFmtCtx->oformat;

//创建h264视频流,并设置参数
outVStream = avformat_new_stream(outFmtCtx,outCodec);
if(outVStream==NULL){
printf("create new video stream fialed.\n");
return -1;
}
outVStream->time_base.den=30;
outVStream->time_base.num=1;

//编码参数相关
AVCodecParameters *outCodecPara = outFmtCtx->streams[outVStream->index]->codecpar;
outCodecPara->codec_type=AVMEDIA_TYPE_VIDEO;
outCodecPara->codec_id = outFmt->video_codec;
outCodecPara->width = 480;
outCodecPara->height = 360;
outCodecPara->bit_rate = 110000;

//查找编码器
outCodec = avcodec_find_encoder(outFmt->video_codec);
if(outCodec==NULL){
printf("Cannot find any encoder.\n");
return -1;
}

//设置编码器内容
outCodecCtx = avcodec_alloc_context3(outCodec);
avcodec_parameters_to_context(outCodecCtx,outCodecPara);
if(outCodecCtx==NULL){
printf("Cannot alloc output codec content.\n");
return -1;
}
outCodecCtx->codec_id = outFmt->video_codec;
outCodecCtx->codec_type = AVMEDIA_TYPE_VIDEO;
outCodecCtx->pix_fmt = AV_PIX_FMT_YUV420P;
outCodecCtx->width = inCodecCtx->width;
outCodecCtx->height = inCodecCtx->height;
outCodecCtx->time_base.num=1;
outCodecCtx->time_base.den=30;
outCodecCtx->bit_rate=110000;
outCodecCtx->gop_size=10;

if(outCodecCtx->codec_id==AV_CODEC_ID_H264){
outCodecCtx->qmin=10;
outCodecCtx->qmax=51;
outCodecCtx->qcompress=(float)0.6;
}else if(outCodecCtx->codec_id==AV_CODEC_ID_MPEG2VIDEO){
outCodecCtx->max_b_frames=2;
}else if(outCodecCtx->codec_id==AV_CODEC_ID_MPEG1VIDEO){
outCodecCtx->mb_decision=2;
}

//打开编码器
AVDictionary *dict = NULL;
av_dict_set(&dict, "rtsp_transport", "tcp", 0);
av_dict_set(&dict, "vcodec", "h264", 0);
//av_dict_set(&dict, "f", "rtsp", 0);
if(avcodec_open2(outCodecCtx,outCodec,&dict)<0){
printf("Open encoder failed.\n");
return -1;
}

av_dump_format(outFmtCtx, 0, outFile, 1);

if (!(outFmtCtx->oformat->flags & AVFMT_NOFILE))
{
// 2.3 创建并初始化一个AVIOContext, 用以访问URL(outFilename)指定的资源
ret = avio_open(&outFmtCtx->pb, outFile, AVIO_FLAG_WRITE);
if (ret < 0)
{
printf("can't open output URL: %s\n", outFile);
break;
}
}
///////////////编码器部分结束////////////////////

这也是之前用过的代码,因为到此为止基本上没有改动14的代码

推流部分

将编码后的h264推至RTSP地址

前两个部分是类型于初始化,实际操作是在一个循环里面

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
///////////////编解码部分//////////////////////
yuvFrame->format = outCodecCtx->pix_fmt;
yuvFrame->width = outCodecCtx->width;
yuvFrame->height = outCodecCtx->height;

ret = avformat_write_header(outFmtCtx,NULL);

int64_t startTime = av_gettime();

while(av_read_frame(inFmtCtx,inPkt)>=0){
if(inPkt->stream_index == inVideoStreamIndex){
if(avcodec_send_packet(inCodecCtx,inPkt)>=0){
while((ret=avcodec_receive_frame(inCodecCtx,srcFrame))>=0){
if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF)
return -1;
else if (ret < 0) {
fprintf(stderr, "Error during decoding\n");
exit(1);
}
sws_scale(img_ctx,
(const uint8_t* const*)srcFrame->data,
srcFrame->linesize,
0,inCodecCtx->height,
yuvFrame->data,yuvFrame->linesize);

yuvFrame->pts=srcFrame->pts;
//encode
if(avcodec_send_frame(outCodecCtx,yuvFrame)>=0){
if(avcodec_receive_packet(outCodecCtx,outPkt)>=0){
printf("encoded one frame.\n");
//delay
// AVRational time_base = outFmtCtx->streams[inVideoStreamIndex]->time_base;
// AVRational time_base_q = {1, AV_TIME_BASE};
// int64_t pts_time = av_rescale_q(outPkt->dts, time_base, time_base_q);
// int64_t now_time = av_gettime() - startTime;
// printf("pts %ld\n",pts_time);
// printf("now %ld\n",now_time);
// if (pts_time > now_time)
// av_usleep(pts_time - now_time);

av_packet_rescale_ts(outPkt,
inFmtCtx->streams[inVideoStreamIndex]->time_base,
outFmtCtx->streams[0]->time_base);
outPkt->pos=-1;
av_interleaved_write_frame(outFmtCtx,outPkt);
av_packet_unref(outPkt);
}
}
//usleep(1000*24);
}
}
av_packet_unref(inPkt);
}
}

av_write_trailer(outFmtCtx);
////////////////编解码部分结束////////////////

运行程序进行解码编码推流

然后取流

1
ffplay rtsp://192.168.1.31/test

效果为

camera rtsp

总结

  • 明显感觉到延迟

完整代码在ffmpeg_Beginner中的27.video_push_encoded_camera_2cloud

下一篇:FFmpeg5开发入门教程28:保存视频流数据至本地(rtsp->mp4)


FFmpeg5开发入门教程27:捕获摄像头编码h264并推流
https://feater.top/ffmpeg/ffmpeg-encode-capature-camera-data-and-push-stream
作者
JackeyLea
发布于
2021年5月1日
许可协议