本文共 4780 字,大约阅读时间需要 15 分钟。
以下是一个优化后的技术文档版本:
本文将介绍如何使用 ffmpeg 实现视频流的推流与.edges (注:此处应为实际的输出 URL 或文件名)编码及传输。
#include#include #include #include #define __STDC_CONSTANT_MACROS extern "C"int main(int argc, char* argv[]) { AVOutputFormat* ofmt = NULL; AVFormatContext* ifmt_ctx = NULL; AVFormatContext* ofmt_ctx = NULL; AVPacket pkt; const char* in_filename = "./video/input.mp4"; const char* out_filename = "rtmp://localhost/dash/test"; // 初始化 ffmpeg 库 av_register_all(); avformat_network_init(); // 打开输入文件 if ((ret = avformat_open_input(&ifmt_ctx, in_filename, 0, 0)) < 0) { printf("Could not open input file."); goto end; } // 获取流信息 if ((ret = avformat_find_stream_info(ifmt_ctx, 0)) < 0) { printf("Failed to retrieve input stream information."); goto end; } // 确定视频流索引 videoindex = -1; for (i = 0; i < ifmt_ctx->nb_streams; i++) { if (ifmt_ctx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO) { videoindex = i; break; } } // 输出子递交综述 av_dump_format(ifmt_ctx, 0, in_filename, 0); // 创建输出流上下文 if ((ret = avformat_alloc_output_context2(&ofmt_ctx, NULL, "flv", out_filename)) < 0) { printf("Could not create output context."); ret = AVERROR_UNKNOWN; goto end; } ofmt = ofmt_ctx->oformat; // 创建输出流 for (i = 0; i < ifmt_ctx->nb_streams; i++) { AVStream* in_stream = ifmt_ctx->streams[i]; AVStream* out_stream = avformat_new_stream(ofmt_ctx, in_stream->codec->codec); if (!out_stream) { printf("Failed allocating output stream."); ret = AVERROR_UNKNOWN; goto end; } // 复制编码上下文 ret = avcodec_copy_context(out_stream->codec, in_stream->codec); if (ret < 0) { printf("Failed to copy context from input to output stream codec context."); goto end; } out_stream->codec->codec_tag = 0; if (ofmt_ctx->oformat->flags & AVFMT_GLOBALHEADER) { out_stream->codec->flags |= CODEC_FLAGS_GLOBAL_HEADER; } } // 写出文件头信息 av_dump_format(ofmt_ctx, 0, out_filename, 1); // 打开输出 URL if (!(ofmt->flags & AVFMT_NOFILE)) { ret = avio_open(&ofmt_ctx->pb, out_filename, AVIO_WRITE); if (ret < 0) { printf("Could not open output URL '%s'", out_filename); goto end; } } // 写出文件头部 ret = avformat_write_header(ofmt_ctx, NULL); if (ret < 0) { printf("Error occurred when opening output URL."); goto end; } // 视频流时间戳处理 AVRational time_base = ifmt_ctx->streams[videoindex]->time_base; start_time = av_gettime(); while (1) { // 读取流数据 ret = av_read_frame(ifmt_ctx, &pkt); if (ret < 0) break; // 处理时间戳 if (pkt.pts == AV_NOPTS_VALUE) { AVRational time_base1 = ifmt_ctx->streams[videoindex]->time_base; int64_t calc_duration = (double) AV_TIME_BASE / av_q2d(time_base1); pkt.pts = (double)(frame_index * calc_duration) / av_q2d(time_base1) * AV_TIME_BASE; pkt.dts = pkt.pts; pkt.duration = calc_duration / av_q2d(time_base1) * AV_TIME_BASE; } // 视频流延迟处理 if (pkt.stream_index == videoindex) { AVRational time_base = ifmt_ctx->streams[videoindex]->time_base; int64_t pts_time = av_rescale_q(pkt.dts, time_base, {1, AV_TIME_BASE}); int64_t now_time = av_gettime() - start_time; if (pts_time > now_time) { av_usleep(pts_time - now_time); } } // 处理流数据 in_stream = ifmt_ctx->streams[pkt.stream_index]; out_stream = ofmt_ctx->streams[pkt.stream_index]; pkt.pts = av_rescale_q_rnd(pkt.pts, in_stream->time_base, out_stream->time_base, AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX); pkt.dts = av_rescale_q_rnd(pkt.dts, in_stream->time_base, out_stream->time_base, AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX); pkt.duration = av_rescale_q(pkt.duration, in_stream->time_base, out_stream->time_base); if (ret = av_interleaved_write_frame(ofmt_ctx, &pkt)) { frame_index++; } else { printf("Error muxing packet."); break; } av_free_packet(&pkt); } av_write_trailer(ofmt_ctx);end:avformat_close_input(&ifmt_ctx);if (ofmt_ctx && !(ofmt->flags & AVFMT_NOFILE)) { avio_close(ofmt_ctx->pb);}avformat_free_context(ofmt_ctx);if (ret < 0 && ret != AVERROR_EOF) { printf("Error occurred."); return -1;}return 0;
out_filename 中正确配置。转载地址:http://qvzmz.baihongyu.com/