基于FFmpeg的推送文件到RTMP服务器

原文地址: http://blog.csdn.net/leixiaohua1020/article/details/39803457

将本地的MOV/AVI/MKV/MP4/FLV等格式的媒体文件, 通过流媒体协议(RTMP, HTTP, UDP, TCP, RTP等)以直播流的形式推送出去.

在这个推流器的基础上, 可以进行以下几种方式的修改, 实现各式各样的推流器. 例如: * 将输入文件改为网络流URL, 可以显示转流器 * 将输入文件改为回调函数(内存读取)的形式, 可以推送内存中的视频数据 * 将输入文件改为系统设备(通过libavdevice), 同时加上编码的功能, 可以实现实时推流器(现场直播)

需要注意的地方

封装格式

RTMP采用的封装格式FLV, 因此在指定输出流媒体的时候需要制定其封装格式为"flv". 同理, 其他流媒体协议也需要指定其封装格式. 例如采用UDP推送流媒体的时候, 可以指定其封装格式为"mpegts".

延时

发送流媒体的数据的时候需要延时. 否则, FFmpeg处理数据速度很快, 瞬间就能把所有的数据发送出去, 流媒体服务器是承受不了的. 因此需要按照视频实际的帧率发送数据. 本文的推流器在视频帧与帧之间采用av_usleep()函数休眠的方式来延迟发送. 这样就可以按照视频的帧率发送数据了, 代码如下

//…  
int64_t start_time=av_gettime();  
while (1) {  
//…  
    //Important:Delay  
    if(pkt.stream_index==videoindex){  
        AVRational time_base=ifmt_ctx->streams[videoindex]->time_base;  
        AVRational time_base_q={1,AV_TIME_BASE};  
        int64_t pts_time = av_rescale_q(pkt.dts, time_base, time_base_q);  
        int64_t now_time = av_gettime() - start_time;  
        if (pts_time > now_time)  
            av_usleep(pts_time - now_time);  
    }  
//…  
}  
//… 

PTS/DTS问题

没有封装格式的裸流(例如H.264裸流)是不包含PTS, DTS这些参数的. 在发送这种数据的时候, 需要自己计算并写入AVPacket的PTS, DTS, duration等参数.

//FIX:No PTS (Example: Raw H.264)  
//Simple Write PTS  
if(pkt.pts==AV_NOPTS_VALUE){  
    //Write PTS  
    AVRational time_base1=ifmt_ctx->streams[videoindex]->time_base;  
    //Duration between 2 frames (us)  
    int64_t calc_duration=(double)AV_TIME_BASE/av_q2d(ifmt_ctx->streams[videoindex]->r_frame_rate);  
    //Parameters  
    pkt.pts=(double)(frame_index*calc_duration)/(double)(av_q2d(time_base1)*AV_TIME_BASE);  
    pkt.dts=pkt.pts;  
    pkt.duration=(double)calc_duration/(double)(av_q2d(time_base1)*AV_TIME_BASE);  
} 

sequence

这里写图片描述

代码

/** 
 * 最简单的基于FFmpeg的推流器(推送RTMP) 
 * Simplest FFmpeg Streamer (Send RTMP) 
 *  
 * 雷霄骅 Lei Xiaohua 
 * leixiaohua1020@126.com 
 * 中国传媒大学/数字电视技术 
 * Communication University of China / Digital TV Technology 
 * http://blog.csdn.net/leixiaohua1020 
 *  
 * 本例子实现了推送本地视频至流媒体服务器(以RTMP为例)。 
 * 是使用FFmpeg进行流媒体推送最简单的教程。 
 * 
 * This example stream local media files to streaming media  
 * server (Use RTMP as example).  
 * It's the simplest FFmpeg streamer. 
 *  
 */  

#include <stdio.h>  

#define __STDC_CONSTANT_MACROS  

#ifdef _WIN32  
//Windows  
extern "C"  
{  
#include "libavformat/avformat.h"  
#include "libavutil/mathematics.h"  
#include "libavutil/time.h"  
};  
#else  
//Linux...  
#ifdef __cplusplus  
extern "C"  
{  
#endif  
#include <libavformat/avformat.h>  
#include <libavutil/mathematics.h>  
#include <libavutil/time.h>  
#ifdef __cplusplus  
};  
#endif  
#endif  

int main(int argc, char* argv[])  
{  
    AVOutputFormat *ofmt = NULL;  
    //输入对应一个AVFormatContext,输出对应一个AVFormatContext  
    //(Input AVFormatContext and Output AVFormatContext)  
    AVFormatContext *ifmt_ctx = NULL, *ofmt_ctx = NULL;  
    AVPacket pkt;  
    const char *in_filename, *out_filename;  
    int ret, i;  
    int videoindex=-1;  
    int frame_index=0;  
    int64_t start_time=0;  
    //in_filename  = "cuc_ieschool.mov";  
    //in_filename  = "cuc_ieschool.mkv";  
    //in_filename  = "cuc_ieschool.ts";  
    //in_filename  = "cuc_ieschool.mp4";  
    //in_filename  = "cuc_ieschool.h264";  
    in_filename  = "cuc_ieschool.flv";//输入URL(Input file URL)  
    //in_filename  = "shanghai03_p.h264";  

    out_filename = "rtmp://localhost/publishlive/livestream";//输出 URL(Output URL)[RTMP]  
    //out_filename = "rtp://233.233.233.233:6666";//输出 URL(Output URL)[UDP]  

    av_register_all();  
    //Network  
    avformat_network_init();  
    //输入(Input)  
    if ((ret = avformat_open_input(&ifmt_ctx, in_filename, 0, 0)) < 0) {  
        printf( "Could not open input file.");  
        goto end;  
    }  
    if ((ret = avformat_find_stream_info(ifmt_ctx, 0)) < 0) {  
        printf( "Failed to retrieve input stream information");  
        goto end;  
    }  

    for(i=0; i<ifmt_ctx->nb_streams; i++)   
        if(ifmt_ctx->streams[i]->codec->codec_type==AVMEDIA_TYPE_VIDEO){  
            videoindex=i;  
            break;  
        }  

    av_dump_format(ifmt_ctx, 0, in_filename, 0);  

    //输出(Output)  

    avformat_alloc_output_context2(&ofmt_ctx, NULL, "flv", out_filename); //RTMP  
    //avformat_alloc_output_context2(&ofmt_ctx, NULL, "mpegts", out_filename);//UDP  

    if (!ofmt_ctx) {  
        printf( "Could not create output context\n");  
        ret = AVERROR_UNKNOWN;  
        goto end;  
    }  
    ofmt = ofmt_ctx->oformat;  
    for (i = 0; i < ifmt_ctx->nb_streams; i++) {  
        //根据输入流创建输出流(Create output AVStream according to input AVStream)  
        AVStream *in_stream = ifmt_ctx->streams[i];  
        AVStream *out_stream = avformat_new_stream(ofmt_ctx, in_stream->codec->codec);  
        if (!out_stream) {  
            printf( "Failed allocating output stream\n");  
            ret = AVERROR_UNKNOWN;  
            goto end;  
        }  
        //复制AVCodecContext的设置(Copy the settings of AVCodecContext)  
        ret = avcodec_copy_context(out_stream->codec, in_stream->codec);  
        if (ret < 0) {  
            printf( "Failed to copy context from input to output stream codec context\n");  
            goto end;  
        }  
        out_stream->codec->codec_tag = 0;  
        if (ofmt_ctx->oformat->flags & AVFMT_GLOBALHEADER)  
            out_stream->codec->flags |= CODEC_FLAG_GLOBAL_HEADER;  
    }  
    //Dump Format------------------  
    av_dump_format(ofmt_ctx, 0, out_filename, 1);  
    //打开输出URL(Open output URL)  
    if (!(ofmt->flags & AVFMT_NOFILE)) {  
        ret = avio_open(&ofmt_ctx->pb, out_filename, AVIO_FLAG_WRITE);  
        if (ret < 0) {  
            printf( "Could not open output URL '%s'", out_filename);  
            goto end;  
        }  
    }  
    //写文件头(Write file header)  
    ret = avformat_write_header(ofmt_ctx, NULL);  
    if (ret < 0) {  
        printf( "Error occurred when opening output URL\n");  
        goto end;  
    }  

    start_time=av_gettime();  
    while (1) {  
        AVStream *in_stream, *out_stream;  
        //获取一个AVPacket(Get an AVPacket)  
        ret = av_read_frame(ifmt_ctx, &pkt);  
        if (ret < 0)  
            break;  
        //FIX:No PTS (Example: Raw H.264)  
        //Simple Write PTS  
        if(pkt.pts==AV_NOPTS_VALUE){  
            //Write PTS  
            AVRational time_base1=ifmt_ctx->streams[videoindex]->time_base;  
            //Duration between 2 frames (us)  
            int64_t calc_duration=(double)AV_TIME_BASE/av_q2d(ifmt_ctx->streams[videoindex]->r_frame_rate);  
            //Parameters  
            pkt.pts=(double)(frame_index*calc_duration)/(double)(av_q2d(time_base1)*AV_TIME_BASE);  
            pkt.dts=pkt.pts;  
            pkt.duration=(double)calc_duration/(double)(av_q2d(time_base1)*AV_TIME_BASE);  
        }  
        //Important:Delay  
        if(pkt.stream_index==videoindex){  
            AVRational time_base=ifmt_ctx->streams[videoindex]->time_base;  
            AVRational time_base_q={1,AV_TIME_BASE};  
            int64_t pts_time = av_rescale_q(pkt.dts, time_base, time_base_q);  
            int64_t now_time = av_gettime() - start_time;  
            if (pts_time > now_time)  
                av_usleep(pts_time - now_time);  

        }  

        in_stream  = ifmt_ctx->streams[pkt.stream_index];  
        out_stream = ofmt_ctx->streams[pkt.stream_index];  
        /* copy packet */  
        //转换PTS/DTS(Convert PTS/DTS)  
        pkt.pts = av_rescale_q_rnd(pkt.pts, in_stream->time_base, out_stream->time_base, (AVRounding)(AV_ROUND_NEAR_INF|AV_ROUND_PASS_MINMAX));  
        pkt.dts = av_rescale_q_rnd(pkt.dts, in_stream->time_base, out_stream->time_base, (AVRounding)(AV_ROUND_NEAR_INF|AV_ROUND_PASS_MINMAX));  
        pkt.duration = av_rescale_q(pkt.duration, in_stream->time_base, out_stream->time_base);  
        pkt.pos = -1;  
        //Print to Screen  
        if(pkt.stream_index==videoindex){  
            printf("Send %8d video frames to output URL\n",frame_index);  
            frame_index++;  
        }  
        //ret = av_write_frame(ofmt_ctx, &pkt);  
        ret = av_interleaved_write_frame(ofmt_ctx, &pkt);  

        if (ret < 0) {  
            printf( "Error muxing packet\n");  
            break;  
        }  

        av_free_packet(&pkt);  

    }  
    //写文件尾(Write file trailer)  
    av_write_trailer(ofmt_ctx);  
end:  
    avformat_close_input(&ifmt_ctx);  
    /* close output */  
    if (ofmt_ctx && !(ofmt->flags & AVFMT_NOFILE))  
        avio_close(ofmt_ctx->pb);  
    avformat_free_context(ofmt_ctx);  
    if (ret < 0 && ret != AVERROR_EOF) {  
        printf( "Error occurred.\n");  
        return -1;  
    }  
    return 0;  
}
Licensed under CC BY-NC-SA 4.0
Built with Hugo
主题 StackJimmy 设计