代码拉取完成,页面将自动刷新
#include "ffmpegopt.h"
#include <QDebug>
#include"qtlog.h"
#include<QVariant>
#include<QMap>
#include<QJsonDocument>
#include<QDateTime>
#include<QFile>
FFmpegOpt::FFmpegOpt(QObject *parent) : QObject(parent)
{
m_videoDecod = new VideoDecode(nullptr);
m_audioDec = new AudioDec(nullptr);
m_audio = new AudioDecode(this);
m_readPk = new ReadPaket(nullptr);
connect(this,&FFmpegOpt::sig_audio,m_audio,&AudioDecode::s_audio);
m_gList.clear();
}
QString FFmpegOpt::getAvConfig()
{
return avcodec_configuration();
}
FFmpegOpt::~FFmpegOpt()
{
clearFFmpeg();
}
void FFmpegOpt::clearFFmpeg()
{
QMutexLocker lock(&m_mutex);
m_isPlay = false;
if(m_afc)
avformat_close_input(&m_afc);
}
QVariantMap FFmpegOpt::openVideo(QString strPath)
{
clearFFmpeg();
QVariantMap a;
avformat_network_init();
// AVDictionary* avdic = nullptr;
// av_dict_set(&avdic, "buffer_size", "108000", 0); //设置缓存大小,1080p可将值调大
// av_dict_set(&avdic, "rtmp_transport", "udp", 0); //以udp方式打开,如果以tcp方式打开将udp替换为tcp
// av_dict_set(&avdic, "stimeout", "2000000", 0); //设置超时断开连接时间,单位微秒
// av_dict_set(&avdic, "max_delay", "500000", 0); //设置最大时延
int nRet = avformat_open_input(&m_afc, strPath.toStdString().c_str(), 0, nullptr);
a["nRet"] = nRet;
if (nRet < 0)
{
qDLog()<<QString::fromLocal8Bit("找不到视频文件");
return a;
}
else{
qDLog()<<QString::fromLocal8Bit("视频打开成功");
}
nRet = avformat_find_stream_info(m_afc, nullptr);
if (nRet < 0)
{
qDLog() << "avformat_find_stream_info: faile" <<nRet;
return a;
}
for (int i = 0; i < m_afc->nb_streams; i++) //这里其实就是查找 打开解码器,相当于设置m_afc里的解码器
{
// AVCodecContext 这是一个描述编解码器上下文的数据结构,包含了众多编解码器需要的参数信息
AVCodecContext *acc = m_afc->streams[i]->codec;
if (acc->codec_type == AVMEDIA_TYPE_VIDEO) //如果是视频类型
{
AVCodec *codec = avcodec_find_decoder(acc->codec_id); //查找解码器 软解
// AVCodec *codec = avcodec_find_decoder_by_name("h264_qsv"); //查找解码器 h264_cuvid h264_qsv 硬解
if (!codec){
qDLog() << QString::fromLocal8Bit("没有该视频类型解码器");
continue;
}
int ret = avcodec_open2(acc, codec, nullptr); //打开解码器
if (ret != 0){
qDLog() << QString::fromLocal8Bit("打开视频解码器失败:")<<getFFmpegError(ret);
continue;
}
m_videoStream = i;
qDLog() << QString::fromLocal8Bit("视频解码器打开成功")<<m_videoStream;
}else if(acc->codec_type == AVMEDIA_TYPE_AUDIO){
AVCodec *codec = avcodec_find_decoder(acc->codec_id); //查找解码器
if (!codec){
qDLog() << QString::fromLocal8Bit("没有该音频类型解码器");
continue;
}
int ret = avcodec_open2(acc, codec, nullptr); //打开解码器
if (ret != 0){
qDLog() << QString::fromLocal8Bit("打开音频解码器失败:")<<getFFmpegError(ret);
continue;
}
m_audioStream = i;
m_audio->setSample(acc->sample_rate,8*av_get_bytes_per_sample(AV_SAMPLE_FMT_S16),av_get_channel_layout_nb_channels(acc->channel_layout));
m_audio->Start();
qDLog() << QString::fromLocal8Bit("音频解码器打开成功")<<m_audioStream <<":"<<acc->sample_fmt;
}
}
if(m_videoStream < 0)
return a;
m_isPlay = true;
m_readPk->startRead(this);
m_videoDecod->startVideo(this);
m_audioDec->startAudio(this);
return a;
}
void FFmpegOpt::openUrlVideo(QString url)
{
if(!m_netStream){
m_netStream = new NetStreamVideo();
connect(m_netStream,&NetStreamVideo::sig_imageFrame,this,&FFmpegOpt::sig_play);
}
m_netStream->openUrlVideo(url);
m_netStream->playVideo(true);
}
QString FFmpegOpt::getFFmpegError(int ret)
{
char buf[1024] = { 0 };
av_strerror(ret, buf, sizeof(buf)); //获得错误详情
return QString(buf);
}
int FFmpegOpt::ReadFrame()
{
QMutexLocker lock(&m_mutex);
if(m_gList.size() > 256){
qDLog()<<m_gList.size();
return -2;
}
AVPacket pkt;
memset(&pkt, 0, sizeof(AVPacket));
if (!m_afc)
return 0;
int err = av_read_frame(m_afc, &pkt); //这样看来读取的一帧数据里面包含了所有的数据,音视频数据和字幕等数据其它数据
if (err < 0 )
{
qDLog()<<"av_read_frame error" <<err;
}else{
if(pkt.size>0 && (pkt.stream_index == m_audioStream || pkt.stream_index == m_videoStream))
m_gList.append(pkt);
}
return pkt.size;
}
bool FFmpegOpt::decodeAudio()
{
if(m_gList.size() == 0){
return false;
}
m_mutex.lock();
QList<AVPacket> curList;
for(auto it : m_gList){
if(it.stream_index == m_audioStream){
curList.append(it);
}
}
m_mutex.unlock();
if(curList.size() == 0)
return false;
AVPacket *pkt = nullptr;
static AVFrame *frame = nullptr;
if(!frame)
frame = av_frame_alloc();
for(auto it:curList){
pkt = ⁢
int re = avcodec_send_packet(m_afc->streams[pkt->stream_index]->codec, pkt);
if (re != 0){
return true;
}
re = 0;
while (re == 0) {
re = avcodec_receive_frame(m_afc->streams[pkt->stream_index]->codec, frame);
if (re != 0){
break;
}
static qint64 m_cur = QDateTime::currentDateTime().toMSecsSinceEpoch();
qint64 tm = QDateTime::currentDateTime().toMSecsSinceEpoch();
qint64 curPts = frame->pts * av_q2d(m_afc->streams[m_audioStream]->time_base)*1000;
static qint64 dif = curPts;
qint64 streamDur = curPts - dif;
qint64 dur = tm - m_cur;
if(qAbs(streamDur -dur)>20 && streamDur > dur ){
QThread::msleep(streamDur-dur);
}
ToPCM(frame);
}
}
m_mutex.lock();
for(int j = 0 ;j < curList.size(); j++){
for(int i = 0 ; i < m_gList.size() ; i++){
if(m_gList[i].stream_index ==m_audioStream && m_gList[i].pos== curList[j].pos){
m_gList.removeAt(i);
av_packet_unref(&curList[j]);
break;
}
}
}
m_mutex.unlock();
}
int FFmpegOpt::ToPCM(AVFrame *frame)
{
AVCodecContext *ctx = m_afc->streams[m_audioStream]->codec;
SwrContext *swr = nullptr;
if (m_aCtx == nullptr)
{
swr = swr_alloc_set_opts(nullptr,
AV_CH_LAYOUT_STEREO,
AV_SAMPLE_FMT_S16,
ctx->sample_rate,
ctx->channel_layout,
ctx->sample_fmt,
ctx->sample_rate,
0, 0);
if(swr != nullptr){
qDLog()<<QString::fromLocal8Bit("swr_alloc_set_opts 失败:");
}
int ret = swr_init(swr);
if(ret != 0){
qDLog()<<QString::fromLocal8Bit("swr_init 失败:")<<getFFmpegError(ret);
return 0;
}
m_aCtx = (void*)swr;
}else{
swr = (SwrContext *)m_aCtx;
}
if(!swr){
qDLog()<<"swr";
return 0;
}
int dst_linesize;
int dst_nb_samples, max_dst_nb_samples;
int dst_nb_channels = frame->channels;
uint8_t **desData;
max_dst_nb_samples = dst_nb_samples = av_rescale_rnd(frame->nb_samples, ctx->sample_rate,ctx->sample_rate, AV_ROUND_UP);
int ret = av_samples_alloc_array_and_samples(&desData, &dst_linesize, dst_nb_channels,
max_dst_nb_samples, AV_SAMPLE_FMT_S16, 0);
int lens = swr_convert((SwrContext *)m_aCtx, desData,dst_nb_samples, (const uint8_t**)(frame->data),frame->nb_samples );
int dst_bufsize = av_samples_get_buffer_size(&dst_nb_samples, frame->channels,
lens, AV_SAMPLE_FMT_S16, 1);
m_audio->Write((const char*)desData[0], dst_bufsize);
if (desData)
av_freep(&desData[0]);
av_freep(&desData);
return lens;
}
bool FFmpegOpt::decodeVideo()
{
if(m_gList.size() == 0){
return false;
}
m_mutex.lock();
QList<AVPacket> curList;
for(auto it : m_gList){
if(it.stream_index == m_videoStream){
curList.append(it);
}
}
m_mutex.unlock();
if(curList.size() == 0){
return false;
}
static AVFrame *frame = nullptr; //指针传值
if(!frame)
frame = av_frame_alloc();
AVPacket *pkt = nullptr;
for(auto it:curList){
pkt = ⁢
int re = avcodec_send_packet(m_afc->streams[pkt->stream_index]->codec, pkt);
if (re != 0){
return true;
}
re = avcodec_receive_frame(m_afc->streams[pkt->stream_index]->codec, frame);
if (re != 0){
return true;
}
static qint64 m_cur = QDateTime::currentDateTime().toMSecsSinceEpoch();
qint64 tm = QDateTime::currentDateTime().toMSecsSinceEpoch();
qint64 curPts = frame->pts * av_q2d(m_afc->streams[m_videoStream]->time_base)*1000;
static qint64 dif = curPts;
qint64 streamDur = curPts - dif;
qint64 dur = tm - m_cur;
if(qAbs(streamDur -dur)>20 && streamDur > dur ){
QThread::msleep(streamDur-dur);
}
YuvToRGB(frame,1920,1080);
}
m_mutex.lock();
for(auto it:curList){
for(int i = 0 ; i < m_gList.size() ; i++){
if(m_gList[i].stream_index == m_videoStream && m_gList[i].pos== it.pos){
m_gList.removeAt(i);
av_packet_unref(&it);
break;
}
}
}
m_mutex.unlock();
}
bool FFmpegOpt::YuvToRGB(AVFrame *frame, int outweight, int outheight)
{
AVCodecContext *videoCtx = m_afc->streams[m_videoStream]->codec;
m_cCtx = sws_getCachedContext(m_cCtx, videoCtx->width, videoCtx->height,
videoCtx->pix_fmt, //像素点的格式
videoCtx->width, videoCtx->height, //目标宽度与高度
AV_PIX_FMT_BGRA, //输出的格式
SWS_BICUBIC, //算法标记
nullptr, nullptr, nullptr
);
if (!m_cCtx){
return false;
}
static QImage *imge = nullptr;
static uchar *buf = nullptr;
if(!imge){
buf = new uchar[videoCtx->width * videoCtx->height * 4];
imge = new QImage(buf, videoCtx->width, videoCtx->height, QImage::Format_ARGB32);
}
char *pbuf = (char*)imge->bits();
uint8_t *data[AV_NUM_DATA_POINTERS] = { 0 };
data[0] = (uint8_t *)pbuf;
int linesize[AV_NUM_DATA_POINTERS] = { 0 };
linesize[0] = videoCtx->width * 4;
//返回转码后的高度
int h = sws_scale(m_cCtx, frame->data, frame->linesize, 0, videoCtx->height,
data,
linesize
);
if(h >0)
emit sig_play(*imge);
return true;
}
bool FFmpegOpt::isPlay()
{
return m_isPlay;
}
void FFmpegOpt::stopPlay()
{
clearFFmpeg();
}
void FFmpegOpt::seekPos(double pos)
{
QMutexLocker lock(&m_mutex);
int64_t timestamp = pos *m_afc->streams[0]->duration;
av_seek_frame(m_afc, m_videoStream, timestamp, AVSEEK_FLAG_BACKWARD | AVSEEK_FLAG_FRAME);
avcodec_flush_buffers(m_afc->streams[m_videoStream]->codec);
}
void FFmpegOpt::DecodeFrame(const AVPacket *pkt)
{
QMutexLocker lock(&m_mutex);
m_pos = pkt->pts * av_q2d(m_afc->streams[m_audioStream]->time_base);
}
此处可能存在不合适展示的内容,页面不予展示。您可通过相关编辑功能自查并修改。
如您确认内容无涉及 不当用语 / 纯广告导流 / 暴力 / 低俗色情 / 侵权 / 盗版 / 虚假 / 无价值内容或违法国家有关法律法规的内容,可点击提交进行申诉,我们将尽快为您处理。