#include "CVideoSource.h" #include #include #include extern "C" { #include "libavcodec/avcodec.h" #include "libavformat/avformat.h" #include "libavutil/frame.h" #include "libavcodec/bsf.h" }; #include "CSyncProc.h" #include "CVideoDataManager.h" #pragma comment(lib, "avcodec.lib") #pragma comment(lib, "avformat.lib") #pragma comment(lib, "avutil.lib") #pragma comment(lib, "avdevice.lib") #pragma comment(lib, "avfilter.lib") #pragma comment(lib, "postproc.lib") #pragma comment(lib, "swresample.lib") #pragma comment(lib, "swscale.lib") AVHWDeviceType CVideoSource::m_HardType = AV_HWDEVICE_TYPE_NONE; AVPixelFormat CVideoSource::m_pixelFormat = AV_PIX_FMT_NONE; bool CVideoSource::m_bUseHardDecode = true; CVideoSource::CVideoSource() { } CVideoSource::~CVideoSource() { Clear(); } bool CVideoSource::IsHardDecode() { //return false; if (!m_bUseHardDecode) { return false; } //已经检测过 if (m_HardType != AV_HWDEVICE_TYPE_NONE) { return true; } m_HardType = av_hwdevice_find_type_by_name("cuda"); if (m_HardType != AV_HWDEVICE_TYPE_NONE) { return true; } while ((m_HardType = av_hwdevice_iterate_types(m_HardType)) != AV_HWDEVICE_TYPE_NONE) { return true; } //m_HardType = AV_HWDEVICE_TYPE_VULKAN; /*AV_HWDEVICE_TYPE_VDPAU, AV_HWDEVICE_TYPE_CUDA, AV_HWDEVICE_TYPE_VAAPI, AV_HWDEVICE_TYPE_DXVA2, AV_HWDEVICE_TYPE_QSV, AV_HWDEVICE_TYPE_VIDEOTOOLBOX, AV_HWDEVICE_TYPE_D3D11VA, AV_HWDEVICE_TYPE_DRM, AV_HWDEVICE_TYPE_OPENCL, AV_HWDEVICE_TYPE_MEDIACODEC, AV_HWDEVICE_TYPE_VULKAN,*/ return false; } AVPixelFormat CVideoSource::GetpPixelFormat(AVCodecContext* ctx, const enum AVPixelFormat* fmts) { const enum AVPixelFormat* p; for (p = fmts; *p != AV_PIX_FMT_NONE; p++) { if (*p == m_pixelFormat) { return *p; } } return AV_PIX_FMT_NONE; } bool CVideoSource::Init(const char * szUrl, CVideoDataManager * pDataManager) { m_pDataManager = pDataManager; auto memberFunc1 = std::bind(&CVideoDataManager::AlignRecvData, m_pDataManager); CSyncProc::getInstance().AddFun(this, memberFunc1); int ret; AVDictionary* options = NULL; av_dict_set(&options, "rtsp_transport", "tcp", 0); av_dict_set(&options, "probesize", "10240", 0); if (avformat_open_input(&m_pFormatCtx, szUrl, NULL, &options) != 0) { return false; } if ((ret = avformat_find_stream_info(m_pFormatCtx, NULL)) < 0) { return false; } // 最后一个参数目前未定义,填写0 即可 // 找到指定流类型的流信息,并且初始化codec(如果codec没有值) AVCodec *decoder = nullptr; if ((ret = av_find_best_stream(m_pFormatCtx, AVMEDIA_TYPE_VIDEO, -1, -1, (const AVCodec **)&decoder, 0)) < 0) { return false; } m_nVideoIndex = ret; AVCodec* decoderVideo = nullptr; AVBufferRef* hw_device_ctx = NULL; if (IsHardDecode()) { // 根据解码器获取支持此解码方式的硬件加速计 /** 所有支持的硬件解码器保存在AVCodec的hw_configs变量中。对于硬件编码器来说又是单独的AVCodec */ if (m_pixelFormat == AV_PIX_FMT_NONE) { for (int i = 0;; i++) { const AVCodecHWConfig* hwcodec = avcodec_get_hw_config(decoder, i); if (hwcodec == NULL) break; // 可能一个解码器对应着多个硬件加速方式,所以这里将其挑选出来 if (hwcodec->methods & AV_CODEC_HW_CONFIG_METHOD_HW_DEVICE_CTX && hwcodec->device_type == m_HardType) { m_pixelFormat = hwcodec->pix_fmt; break; } } } if ((m_pCodecContextVideo = avcodec_alloc_context3(decoder)) == NULL) { return false; } AVStream* video_stream = m_pFormatCtx->streams[m_nVideoIndex]; // 给解码器赋值解码相关参数 if (avcodec_parameters_to_context(m_pCodecContextVideo, video_stream->codecpar) < 0) { return false; } // 配置获取硬件加速器像素格式的函数;该函数实际上就是将AVCodec中AVHWCodecConfig中的pix_fmt返回 //m_pCodecContext->get_format = &CFfmpegProc::GetpPixelFormat; // 创建硬件加速器的缓冲区 if (ret = av_hwdevice_ctx_create(&hw_device_ctx, m_HardType, NULL, NULL, 0) < 0) { return false; } /** 如果使用软解码则默认有一个软解码的缓冲区(获取AVFrame的),而硬解码则需要额外创建硬件解码的缓冲区 * 这个缓冲区变量为hw_frames_ctx,不手动创建,则在调用avcodec_send_packet()函数内部自动创建一个 * 但是必须手动赋值硬件解码缓冲区引用hw_device_ctx(它是一个AVBufferRef变量) */ // 即hw_device_ctx有值则使用硬件解码 m_pCodecContextVideo->hw_device_ctx = av_buffer_ref(hw_device_ctx); av_buffer_unref(&hw_device_ctx); // 初始化并打开解码器上下文 if (avcodec_open2(m_pCodecContextVideo, decoder, NULL) < 0) { return false; } } else { decoderVideo = (AVCodec*)avcodec_find_decoder(m_pFormatCtx->streams[m_nVideoIndex]->codecpar->codec_id); if (decoderVideo == nullptr) { return false; } m_pCodecContextVideo = avcodec_alloc_context3(decoderVideo); //AV_CODEC_ID_PCM_ALAW avcodec_parameters_to_context(m_pCodecContextVideo, m_pFormatCtx->streams[m_nVideoIndex]->codecpar); //打开解码器 if (avcodec_open2(m_pCodecContextVideo, 0, 0) < 0) { return false; } } m_nWidth = m_pFormatCtx->streams[m_nVideoIndex]->codecpar->width; m_nHeight = m_pFormatCtx->streams[m_nVideoIndex]->codecpar->height ; m_eVideoCodec = m_pFormatCtx->streams[m_nVideoIndex]->codecpar->codec_id; InitAudio(); _beginthreadex(nullptr, 0, &CVideoSource::RecvThread, this, 0, 0); } void CVideoSource::Clear() { if (m_pFormatCtx!=nullptr) { avformat_close_input(&m_pFormatCtx); m_pFormatCtx = nullptr; } if (m_pCodecContextAudio != nullptr) { avcodec_close(m_pCodecContextAudio); m_pCodecContextAudio = nullptr; } } unsigned __stdcall CVideoSource::RecvThread(void * param) { CVideoSource* p = (CVideoSource*)param; p->RecvThreadProcessor(); return 0; } void CVideoSource::RecvThreadProcessor() { while (true) { RecvFrameSync(); } } bool CVideoSource::InitAudio() { int ret; AVCodec *decoderAudio = nullptr; if ((ret = av_find_best_stream(m_pFormatCtx, AVMEDIA_TYPE_AUDIO, -1, -1, (const AVCodec **)&decoderAudio, 0)) < 0) { return false; } m_nAudioIndex = ret; decoderAudio = (AVCodec *)avcodec_find_decoder(m_pFormatCtx->streams[m_nAudioIndex]->codecpar->codec_id); if (decoderAudio == nullptr) { return false; } m_pCodecContextAudio = avcodec_alloc_context3(decoderAudio); //AV_CODEC_ID_PCM_ALAW avcodec_parameters_to_context(m_pCodecContextAudio, m_pFormatCtx->streams[m_nAudioIndex]->codecpar); //打开解码器 if (avcodec_open2(m_pCodecContextAudio, 0, 0) < 0) { return false; } } int CVideoSource::RecvFrameSync() { int nRet = 0; AVPacket *packet = av_packet_alloc(); nRet = av_read_frame(m_pFormatCtx, packet); if (nRet < 0) { return RECV_FAILED; } if (packet->stream_index != m_nVideoIndex) { AudioProc(packet); av_packet_free(&packet); return RECV_SUCCESS_NOTPROC; } m_pDataManager->AddRecvData(packet->pts, packet, packet->flags&AV_PKT_FLAG_KEY ); //av_packet_free(&packet); return RECV_SUCCESS; } int CVideoSource::AudioProc(AVPacket * pktAudio) { int nRet = 0; avcodec_send_packet(m_pCodecContextAudio, pktAudio); AVFrame * pFramAudio = av_frame_alloc(); nRet = avcodec_receive_frame(m_pCodecContextAudio, pFramAudio); if (nRet < 0) { return RECV_FAILED; } int32_t nSum = 0; for (int i = 0; i < pFramAudio->linesize[0] * pFramAudio->channels; i = i + 2) { int16_t word16 = 0; word16 = (int16_t)(pFramAudio->data[0][i+1] << 8); word16 +=(pFramAudio->data[0][i]); if (word16<0) { word16 = abs(word16); word16 = ~word16 + 1; //word16 |= 0x8000; } //OutputDebugPrintf("VideoPlayer AudioProc word16 1--- %d", word16); //word16 = bit_reverse(word16); nSum += abs(word16); } av_frame_free(&pFramAudio); //通过多次数据采集分析,能量值大于80000 确保是有同步信号 if (nSum < 80000) { return RECV_SUCCESS; } double pts = 0; pts = av_q2d(m_pCodecContextAudio->time_base)* pktAudio->pts; //出现 音频的同步信号,3秒以内认为是同一个信号,认为无效 if (pts - m_curAutdioPts < 3) { return RECV_SUCCESS; } m_curAutdioPts = pts; m_pDataManager->SetAutdioPts(m_curAutdioPts); CSyncProc::getInstance().UpdateTimeQueue(m_pDataManager, m_curAutdioPts); }