1 line
7.7 KiB
C++
1 line
7.7 KiB
C++
|
|
#include "TrainStep2InferenceEngine.h"
#include <opencv2/opencv.hpp>
#include "myqueue.h"
using namespace ai_matrix;
TrainStep2InferenceEngine::TrainStep2InferenceEngine() {}
TrainStep2InferenceEngine::~TrainStep2InferenceEngine() {}
APP_ERROR TrainStep2InferenceEngine::Init()
{
strPort0_ = engineName_ + "_" + std::to_string(engineId_) + "_0";
this->modelConfig_ = Config::getins()->getModelByTrainStep2Config();
//读取模型信息
int iFolderExist = access(modelConfig_.strModelPath.c_str(), R_OK);
if (iFolderExist == -1)
{
LogError << "模型:" << modelConfig_.strModelPath << " 不存在!";
return false;
}
class_num = this->modelConfig_.vecClass.size();
score_threshold = this->modelConfig_.fScoreThreshold;
int ret = initModel();
if (ret != APP_ERR_OK)
{
LogError << "Failed to read model info, ret = " << ret;
return ret;
}
LogInfo << "TrainStep2InferenceEngine Init ok";
return APP_ERR_OK;
}
APP_ERROR TrainStep2InferenceEngine::initModel()
{
modelinfo.yolov5ClearityModelParam.uiClassNum = class_num;
modelinfo.yolov5ClearityModelParam.uiClearNum = clear_num;
modelinfo.yolov5ClearityModelParam.uiDetSize = det_size;
modelinfo.yolov5ClearityModelParam.fScoreThreshold = score_threshold;
modelinfo.yolov5ClearityModelParam.fNmsThreshold = nms_threshold;
modelinfo.modelCommonInfo.uiModelWidth = model_width;
modelinfo.modelCommonInfo.uiModelHeight = model_height;
modelinfo.modelCommonInfo.uiInputSize = input_size;
modelinfo.modelCommonInfo.uiOutputSize = output_size;
modelinfo.modelCommonInfo.uiChannel = INPUT_CHANNEL;
modelinfo.modelCommonInfo.uiBatchSize = batch_size;
modelinfo.modelCommonInfo.strInputBlobName = INPUT_BLOB_NAME;
modelinfo.modelCommonInfo.strOutputBlobName = OUTPUT_BLOB_NAME;
string strModelName = "";
int nRet = yolov5model.YoloV5ClearityInferenceInit(&modelinfo,
strModelName,
this->modelConfig_.strModelPath);
if (nRet != 0)
{
LogInfo << "YoloV5ClassifyInferenceInit nRet:" << nRet;
return APP_ERR_COMM_READ_FAIL;
}
return APP_ERR_OK;
}
APP_ERROR TrainStep2InferenceEngine::DeInit()
{
yolov5model.YoloV5ClearityInferenceDeinit();
LogInfo << "TrainStep2InferenceEngine DeInit ok";
return APP_ERR_OK;
}
APP_ERROR TrainStep2InferenceEngine::Process()
{
int iRet = APP_ERR_OK;
while (!isStop_)
{
std::shared_ptr<void> pVoidData0 = nullptr;
inputQueMap_[strPort0_]->pop(pVoidData0);
if (nullptr == pVoidData0)
{
usleep(1000); //1ms
continue;
}
std::shared_ptr<VStep2InputData> pVStep2InputData = std::static_pointer_cast<VStep2InputData>(pVoidData0);
std::shared_ptr<VStep2OutputData> pVStep2OutputData = std::make_shared<VStep2OutputData>();
pVStep2OutputData->iDataSource = pVStep2InputData->iDataSource;
pVStep2OutputData->strTrainDate = pVStep2InputData->strTrainDate;
pVStep2OutputData->strTrainTime = pVStep2InputData->strTrainTime;
pVStep2OutputData->strTrainName = pVStep2InputData->strTrainName;
pVStep2OutputData->iFrameId = pVStep2InputData->iFrameId;
pVStep2OutputData->bIsEnd = pVStep2InputData->bIsEnd;
// LogWarn << "-- 0 -->" << pVStep2InputData->vecSingleData.size();
for (int i = 0; i < pVStep2InputData->vecSingleData.size(); i++)
{
Step2ResultData step2ResultData;
step2ResultData.fLTX = pVStep2InputData->vecSingleData[i].fLTX;
step2ResultData.fLTY = pVStep2InputData->vecSingleData[i].fLTY;
step2ResultData.fRBX = pVStep2InputData->vecSingleData[i].fRBX;
step2ResultData.fRBY = pVStep2InputData->vecSingleData[i].fRBY;
step2ResultData.iClassId = pVStep2InputData->vecSingleData[i].iClassId;
step2ResultData.fScore = pVStep2InputData->vecSingleData[i].fScore;
|