Train_Identify/nvidia_ascend_engine/nvidia_engine/DataSourceEngine/VideoEngine.cpp

109 lines
4.1 KiB
C++
Raw Normal View History

2024-01-23 02:46:26 +00:00
#include "VideoEngine.h"
using namespace std;
using namespace cv;
using namespace ai_matrix;
VideoEngine::VideoEngine() {}
VideoEngine::~VideoEngine() {}
APP_ERROR VideoEngine::Init()
{
LogInfo << "engineId_:" << engineId_ << " VideoEngine Init start";
strPort0_ = engineName_ + "_" + std::to_string(engineId_) + "_0";
dataSourceConfig_ = MyYaml::GetIns()->GetDataSourceConfigById(engineId_); //获取摄像机参数
width_ = IMAGE_WIDTH, height_ = IMAGE_HEIGHT;
LogInfo << "engineId_:" << engineId_ << " VideoEngine Init ok";
return APP_ERR_OK;
}
APP_ERROR VideoEngine::DeInit()
{
LogInfo << "engineId_:" << engineId_ << " VideoEngine DeInit ok";
return APP_ERR_OK;
}
APP_ERROR VideoEngine::Process()
{
int iRet = APP_ERR_OK;
uint64_t u64count_num = 0;
// if (MyYaml::GetIns()->GetStringValue("gc_data_source") != "camera")
// {
// LogDebug << "engineId_:" << engineId_ << " gc_data_source no camera";
// return iRet;
// }
VideoCapture capture;
/*****************************************************************************************
Gstream解码
:1.nvv4l2decoder 2.omxh264dec
使nvv4l2decoder解码时enable-max-performance和enable-frame-type-reporting才可以使用
enable-max-performance=1
enable-frame-type-reporting=1 使
*****************************************************************************************/
//从摄像头RTSP拉流
const std::string videoStreamAddress = std::string("rtspsrc location=") + dataSourceConfig_.strUrl.c_str() + " latency=10 ! \
rtph264depay ! h264parse ! nvv4l2decoder enable-max-performance=1 enable-frame-type-reporting=1 ! nvvidconv ! video/x-raw, format=(string)BGRx ! videoconvert ! appsink";
// while(!capture.open(dataSourceConfig_.strUrl.c_str())){
while(!capture.open(videoStreamAddress)){
std::cerr<<"Opening video stream or file failed!!!" <<std::endl;
std::cout<<"Restart Opening video stream or file ..."<<std::endl;
sleep(1);
}
std::cout<<"Opening video stream or file Success"<<std::endl;
int frameW = capture.get(3);
int frameH = capture.get(4);
std::cout << dataSourceConfig_.strUrl.c_str() << ";"<< "frameW:" << frameW << " frameH:" << frameH << std::endl;
while (!isStop_)
{
std::cout << dataSourceConfig_.strUrl.c_str() << ";"<< "frameW:" << frameW << " frameH:" << frameH << std::endl;
// std::cout<<"Enter VideoEngine Thread "<<++u64count_num<<" Times!"<<std::endl;
// std::cout<<"VideoEngine Thread ID: "<<std::this_thread::get_id()<<std::endl;
//构造BGR数据
void* pBGRBuffer = nullptr;
unsigned int pBGRBuffer_Size = width_*height_*3;
pBGRBuffer = new uint8_t[pBGRBuffer_Size];
std::shared_ptr<FrameData> pBGRFrameData = std::make_shared<FrameData>();
cv::Mat frame(frameH, frameW, CV_8UC3, pBGRBuffer);
// clock_t start, end;
// start = clock();
if(!capture.read(frame)) {
std::cerr << "no frame" << std::endl;
waitKey();
}
// end = clock();
// printf("read 1 frame time is %.8f ms\n", (double)(end-start)/CLOCKS_PER_SEC*1000);
//压入OpenCV RTSP所拉的H264解码BRG后的数据
//组织数据
pBGRFrameData->iDataSource = engineId_;
pBGRFrameData->iSize = pBGRBuffer_Size;
pBGRFrameData->pData.reset(pBGRBuffer, [](void* data){if(data) {delete[] data; data = nullptr;}}); //智能指针管理内存
// pBGRFrameData->pData.reset(pBGRBuffer, Deleter); //智能指针管理内存
pBGRFrameData->i64TimeStamp = MyUtils::getins()->GetCurrentTimeMillis();
iRet = outputQueMap_[strPort0_]->push(std::static_pointer_cast<void>(pBGRFrameData));
if (iRet != APP_ERR_OK){
LogError << "push the bgr frame data failed...";
std::cerr<<"push the bgr frame data failed..."<<std::endl;
}else{
// std::cout<<"push the bgr frame data success!"<<std::endl;
}
// usleep(30*1000); //读取文件时模拟30帧
}
}