您当前的位置: 首页 >  服务器

phymat.nico

暂无认证

  • 1浏览

    0关注

    1967博文

    0收益

  • 0浏览

    0点赞

    0打赏

    0留言

私信
关注
热门博文

利用ffmpeg从RTSP服务器拉流并保存各种格式文件

phymat.nico 发布时间:2020-05-29 10:45:36 ,浏览量:1

#include
#include
#include
#include
#include 

#define USAGE   "rtsp2x -i  -t [avi | flv | mp4] -n "
#define OPTS    "i:t:n:h"

static void print_usage()
{
        printf("Usage: %s\n", USAGE);
        return;
}
int main(int argc,char **argv)
{
        AVOutputFormat *ofmt = NULL;
    AVFormatContext *ifmt_ctx = NULL,*ofmt_ctx = NULL;
    AVPacket pkt;
    const char in_filename[128] = {0}, out_filename[128] = {0};    
    int ret,i;
    int video_index=-1;
    int frame_index=0;
        int I_received = 0;

        int opt, frames_count = -1;

        while ((opt = getopt(argc, argv, OPTS)) != -1) {
                switch (opt) {
                case 'i':
                        strcpy(in_filename, optarg);
                        break;
                case 't':
                        if (strcmp(optarg, "avi") == 0)
                                strcpy(out_filename, "receive.avi");
                        else if (strcmp(optarg, "flv") == 0)
                                strcpy(out_filename, "receive.flv");
                        else if (strcmp(optarg, "mp4") == 0) 
                                strcpy(out_filename, "receive.mp4");
                        else {
                                return -1;
                        }
                        print_usage();
                        break;
                case 'n':
                        frames_count = atoi(optarg);
                        if (frames_count < 0) {
                                print_usage();
                                return -1;
                        }
                        printf("frames_count = %d\n", frames_count);
                        break;
                case 'h':
                default:
                        print_usage();
                        return -1;
                }
        }

        if (strlen(in_filename) == 0 || strlen(out_filename) == 0 || frames_count < 0) {
                print_usage();
                return -1;
        }

    av_register_all();
    avformat_network_init();

    //使用TCP连接打开RTSP,设置最大延迟时间
    AVDictionary *avdic=NULL;  
    char option_key[]="rtsp_transport";  
    char option_value[]="tcp";  
    av_dict_set(&avdic,option_key,option_value,0);  
    char option_key2[]="max_delay";  
    char option_value2[]="5000000";  
    av_dict_set(&avdic,option_key2,option_value2,0); 
    //打开输入流
    if((ret=avformat_open_input(&ifmt_ctx,in_filename,0,&avdic))codec->codec_type==AVMEDIA_TYPE_VIDEO)
        {
            //这一路是视频流,标记一下,以后取视频流都从ifmt_ctx->streams[video_index]取
            video_index=i;
            break;
        }
    }

    av_dump_format(ifmt_ctx,0,in_filename,0);

    //打开输出流
    avformat_alloc_output_context2(&ofmt_ctx,NULL,NULL,out_filename);
    
    if(!ofmt_ctx)
    {
        printf("Could not create output context\n");
        ret=AVERROR_UNKNOWN;
        goto end;
    }
    
    ofmt = ofmt_ctx->oformat;
    for(i=0;inb_streams;i++)
    {    //根据输入流创建输出流
        AVStream *in_stream = ifmt_ctx->streams[i];
        AVStream *out_stream = avformat_new_stream(ofmt_ctx,in_stream->codec->codec);
        if(!out_stream)
        {
            printf("Failed allocating output stream.\n");
            ret = AVERROR_UNKNOWN;
            goto end;
        }

        //将输出流的编码信息复制到输入流
        ret = avcodec_copy_context(out_stream->codec,in_stream->codec);
        if(retcodec->codec_tag = 0;
    
        if(ofmt_ctx->oformat->flags & AVFMT_GLOBALHEADER)
            out_stream->codec->flags |= CODEC_FLAG_GLOBAL_HEADER;

    }

    //Dump format--------------------
    av_dump_format(ofmt_ctx,0,out_filename,1);
    //打开输出文件
    if(!(ofmt->flags & AVFMT_NOFILE))
    {
        ret = avio_open(&ofmt_ctx->pb,out_filename,AVIO_FLAG_WRITE);
        if(retstreams[pkt.stream_index];
        //copy packet
        //转换 PTS/DTS 时序
        pkt.pts = av_rescale_q_rnd(pkt.pts,in_stream->time_base,out_stream->time_base,(enum AVRounding)(AV_ROUND_NEAR_INF|AV_ROUND_PASS_MINMAX));
        pkt.dts = av_rescale_q_rnd(pkt.dts, in_stream->time_base, out_stream->time_base, (enum AVRounding)(AV_ROUND_NEAR_INF|AV_ROUND_PASS_MINMAX));  
        //printf("pts %d dts %d base %d\n",pkt.pts,pkt.dts, in_stream->time_base);
        pkt.duration = av_rescale_q(pkt.duration, in_stream->time_base, out_stream->time_base); 
        pkt.pos = -1;  

        //此while循环中并非所有packet都是视频帧,当收到视频帧时记录一下,仅此而已
        if(pkt.stream_index==video_index)
        {
                        if ((pkt->flags & AV_PKT_FLAG_KEY) && (I_received == 0)) 
                                I_received = 1;
                        if (I_received == 0)
                                continue;

            printf("Receive %8d video frames from input URL\n",frame_index);
            frame_index++;
        } else {
                        continue;
                }

                if (frame_index == frames_count)
                        break;

        //将包数据写入到文件。
        ret = av_interleaved_write_frame(ofmt_ctx,&pkt);
        if(ret < 0)
        {
            /**
            当网络有问题时,容易出现到达包的先后不一致,pts时序混乱会导致
            av_interleaved_write_frame函数报 -22 错误。暂时先丢弃这些迟来的帧吧
            若所大部分包都没有pts时序,那就要看情况自己补上时序(比如较前一帧时序+1)再写入。
            */
            if(ret==-22){
                continue;
            }else{
                printf("Error muxing packet.error code %d\n" , ret);
                break;
            }
            
        }
        
        //av_free_packet(&pkt); //此句在新版本中已deprecated 由av_packet_unref代替
        av_packet_unref(&pkt);
    }


    //写文件尾
    av_write_trailer(ofmt_ctx);

end:
    
    av_dict_free(&avdic);
    avformat_close_input(&ifmt_ctx);
    //Close input
    if(ofmt_ctx && !(ofmt->flags & AVFMT_NOFILE))
        avio_close(ofmt_ctx->pb);
    avformat_free_context(ofmt_ctx);
    if(ret            
关注
打赏
1659628745
查看更多评论
0.0777s