/** * 视频流解码引擎 * */ #include "VideoDecodeEngine.h" using namespace std; using namespace cv; using namespace ai_matrix; VideoDecodeEngine::VideoDecodeEngine() {} VideoDecodeEngine::~VideoDecodeEngine() {} APP_ERROR VideoDecodeEngine::Init() { std::vector vecDataSourceConfig = Config::getins()->getAllDataSourceConfig(); if (vecDataSourceConfig.size() <= this->engineId_) { LogWarn << " -- " << engineName_ << "_" << engineId_ << " dataSource no set, Engine DeInit"; return APP_ERR_OK; } dataSourceConfig_ = vecDataSourceConfig.at(engineId_); strPort0_ = engineName_ + "_" + std::to_string(engineId_) + "_0"; LogInfo << "VideoDecodeEngine Init ok"; return APP_ERR_OK; } APP_ERROR VideoDecodeEngine::DeInit() { if (harddecoder_ != nullptr) { harddecoder_->hardDecoderDeInit(); delete harddecoder_; harddecoder_ = nullptr; } LogInfo << "VideoDecodeEngine DeInit ok"; return APP_ERR_OK; } APP_ERROR VideoDecodeEngine::Process() { int iRet = APP_ERR_OK; uint32_t iSkipCount = 1; while (!isStop_) { //从上一引擎接收图像数据 std::shared_ptr pVoidData0 = nullptr; inputQueMap_[strPort0_]->pop(pVoidData0); if (nullptr == pVoidData0) { std::this_thread::sleep_for(std::chrono::milliseconds(10)); continue; } std::shared_ptr pProcessData = std::static_pointer_cast(pVoidData0); if (pProcessData->bIsEnd) { iRet = outputQueMap_[strPort0_]->push(std::static_pointer_cast(pProcessData)); if (iRet != APP_ERR_OK) { LogError << "push the decode yuv420m frame data failed..."; } continue; } //创建解码类 if (harddecoder_ == nullptr) { harddecoder_ = new HardDecode; int iRet = harddecoder_->hardDecoderInit(pProcessData->dataSourceInfo.iWidth, pProcessData->dataSourceInfo.iHeight, pProcessData->dataSourceInfo.iRate, pProcessData->pCodecParameters_); if (!iRet) { LogError << " 硬解码初始化失败!"; if (harddecoder_) { delete harddecoder_; harddecoder_ = nullptr; } continue; } } // 构造YUV420M数据 unsigned int pYUV420MBuffer_Size = pProcessData->dataSourceInfo.iWidth * pProcessData->dataSourceInfo.iHeight * 3 / 2; void *pYUV420MBuffer = nullptr; pYUV420MBuffer = new uint8_t[pYUV420MBuffer_Size]; std::shared_ptr pYUVData; // 智能指针管理内存 pYUVData.reset(pYUV420MBuffer, [](void *data){if(data) {delete[] data; data = nullptr;}}); // 这里填入一个指向完整H264/H265数据帧的指针 harddecoder_->pPacket_->data = static_cast(pProcessData->sourceFrameData.pData.get()); // 这个填入H264/H265数据帧的大小 harddecoder_->pPacket_->size = pProcessData->sourceFrameData.iSize; int iDecodeRet = harddecoder_->hardDecoder(pYUV420MBuffer, &pYUV420MBuffer_Size); if (iDecodeRet) { if (iSkipCount++ % dataSourceConfig_.iSkipInterval != 0) { continue; } iSkipCount = 1; //硬解码YUV转BGR cv::Mat matYUV(pProcessData->dataSourceInfo.iHeight * 3 / 2, pProcessData->dataSourceInfo.iWidth, CV_8UC1); memcpy(matYUV.data, static_cast(pYUVData.get()), pYUV420MBuffer_Size); cv::Mat matBGR(pProcessData->dataSourceInfo.iHeight, pProcessData->dataSourceInfo.iWidth, CV_8UC3); cv::cvtColor(matYUV, matBGR, cv::COLOR_YUV2BGR_I420); cv::resize(matBGR, matBGR, cv::Size(IMAGE_WIDTH, IMAGE_HEIGHT)); unsigned int iResizeSize = IMAGE_WIDTH * IMAGE_HEIGHT * 3; void *pResizeBGRBuffer = nullptr; pResizeBGRBuffer = new uint8_t[iResizeSize]; memcpy(pResizeBGRBuffer, matBGR.data, iResizeSize); pProcessData->sourceFrameData.pData.reset(pResizeBGRBuffer, [](void *data) {if(data) {delete[] data; data = nullptr;} }); pProcessData->sourceFrameData.iSize = iResizeSize; pProcessData->dataSourceInfo.iWidth = IMAGE_WIDTH; pProcessData->dataSourceInfo.iHeight = IMAGE_HEIGHT; // vector compression_params; // compression_params.push_back(cv::IMWRITE_JPEG_QUALITY); //选择jpeg // compression_params.push_back(100); //图片质量 // cv::imwrite("./jpg/" + std::to_string(pProcessData->sourceFrameData.i64TimeStamp) + ".jpg", matBGR, compression_params); iRet = outputQueMap_[strPort0_]->push(std::static_pointer_cast(pProcessData), true); if (iRet != APP_ERR_OK) { // LogError << "push the decode yuv420m frame data failed..."; } } else { LogError << "数据源:" << pProcessData->iDataSource << " 硬解码失败... 返回失败信息:" << iDecodeRet; } } return APP_ERR_OK; }