diff --git a/app/train b/app/train index 9ee789a..ed22817 100644 Binary files a/app/train and b/app/train differ diff --git a/nvidia_ascend_engine/common/AppCommon.h b/nvidia_ascend_engine/common/AppCommon.h index 926557e..2cdea93 100644 --- a/nvidia_ascend_engine/common/AppCommon.h +++ b/nvidia_ascend_engine/common/AppCommon.h @@ -516,6 +516,7 @@ typedef struct uint64_t i64TimeStamp = 0; //帧数据时间戳 std::shared_ptr pDecodeData = nullptr; int iDirection = 0; //行驶方向(0-未知; 1-向左; 2-向右) + int nMonitorState = MONITOR_MODEL_INIT_STATE; } SaveImgData; //识别处理数据 diff --git a/nvidia_ascend_engine/common_engine/DataDealEngine/DataDealEngine.cpp b/nvidia_ascend_engine/common_engine/DataDealEngine/DataDealEngine.cpp index 4ff3192..4692fcf 100644 --- a/nvidia_ascend_engine/common_engine/DataDealEngine/DataDealEngine.cpp +++ b/nvidia_ascend_engine/common_engine/DataDealEngine/DataDealEngine.cpp @@ -1,385 +1 @@ -#include "DataDealEngine.h" - -using namespace ai_matrix; - -extern std::atomic_uint64_t g_i64ReRunTimeStamp; -extern std::atomic_uint32_t g_iReRunOrigFrameId; -extern std::atomic_uint32_t g_iReRunFrameId; - -DataDealEngine::DataDealEngine() {} - -DataDealEngine::~DataDealEngine() {} - -APP_ERROR DataDealEngine::Init() -{ - strPort0_ = engineName_ + "_" + std::to_string(engineId_) + "_0"; - strPort1_ = engineName_ + "_" + std::to_string(engineId_) + "_1"; - strPort2_ = engineName_ + "_" + std::to_string(engineId_) + "_2"; - strResultPath_ = MyYaml::GetIns()->GetPathValue("gc_result_path"); - iPushDirection_ = MyYaml::GetIns()->GetIntValue("gc_push_direction"); - - dataSourceConfig_ = MyYaml::GetIns()->GetDataSourceConfigById(engineId_); //获取摄像机参数 - iIntervalTime_ = (dataSourceConfig_.iSkipInterval - 1) * 40 * 1000; - mapUseDataSouceCfg_ = MyYaml::GetIns()->GetUseDataSourceConfig(); - - std::string delimiter(","); - for (auto iter = mapUseDataSouceCfg_.begin(); iter!=mapUseDataSouceCfg_.end(); iter++) - { - int iSourceId = iter->first; - std::vector vecSplit = MyUtils::getins()->split(iter->second.strTarget, delimiter); - - std::vector vecPushPorts; - for (auto iter = vecSplit.begin(); iter != vecSplit.end(); iter++) - { - if (*iter == "NUM") - { - vecPushPorts.push_back(strPort0_); - } - else if (*iter == "CHKDATE") - { - vecPushPorts.push_back(strPort1_); - } - else if(*iter == "CONTAINER" || *iter == "CONTAINER_T") - { - vecPushPorts.push_back(strPort2_); - } - } - mapSourcePushPort_[iSourceId] = vecPushPorts; - } - - InitParam(); - LogInfo << "engineId_:" << engineId_ << " DataDealEngine Init ok"; - return APP_ERR_OK; -} - -APP_ERROR DataDealEngine::DeInit() -{ - - LogInfo << "engineId_:" << engineId_ << " DataDealEngine DeInit ok"; - return APP_ERR_OK; -} - -/** -* 参数初始化(列车结束时需调用) -* inParam : N/A -* outParam: N/A -* return : N/A -*/ -void DataDealEngine::InitParam() -{ - iOrigDataNO_ = 1; - strDataDir_ = ""; - moveData_.i64TimeStamp = 0; - moveData_.bHasTrain = false; - moveData_.bIsEnd = false; - iDataNO_ = 1; - strTrainDate_ = ""; - strTrainName_ = ""; - iDirection_ = DIRECTION_UNKNOWN; - bNotPushFlag_ = false; -} - -/** -* 图片数据解码 -* inParam : RawData &rawData :图片数据 -* outParam: std::shared_ptr pDecodedData :解码数据 -* return : true(编码成功)/false(编码失败) -*/ -bool DataDealEngine::GetJpegdOut(std::shared_ptr pDecodedData, RawData &rawData) -{ - return true; -} - -/** -* 读取图片和文本 -* inParam : N/A -* outParam: N/A -* return : N/A -*/ -bool DataDealEngine::ReadFileInfo(Json::Value &jvFrameInfo, RawData &rawData, std::string &strFileName, std::string &strImgName) -{ - //图片或文件不存在时,休眠10ms直接return,重新获取。 - if (access(strFileName.c_str(), F_OK) != 0 || access(strImgName.c_str(), F_OK) != 0) - { - LogWarn << "txt:" << strFileName << " or image:" << strImgName << " no exist"; - return false; - } - - if (!MyUtils::getins()->ReadJsonInfo(jvFrameInfo, strFileName)) - { - LogError << "Failed to read txt:" << strFileName; - return false; - } - - //读取图片内容 - // int iRet = ReadFile(strImgName, rawData); - // if (iRet != APP_ERR_OK) - // { - // LogError << "Failed to read image:" << strImgName; - // return false; - // } - - return true; -} - -/** -* push数据到队列,队列满时则休眠一段时间再push。 -* inParam : const std::string strPort push的端口 - : const std::shared_ptr &pProcessData push的数据 -* outParam: N/A -* return : N/A -*/ -void DataDealEngine::PushData(const std::string &strPort, const std::shared_ptr &pProcessData) -{ - while (true) - { - int iRet = outputQueMap_[strPort]->push(std::static_pointer_cast(pProcessData)); - if (iRet != 0) - { - LogDebug << "sourceid:" << pProcessData->iDataSource << " frameid:" << pProcessData->iFrameId << " push fail iRet:" << iRet; - if (iRet == 2) - { - usleep(10000); // 10ms - continue; - } - } - break; - } -} - -/** -* 构造处理数据并push -* inParam : N/A -* outParam: N/A -* return : N/A -*/ -void DataDealEngine::MakeProcessData() -{ - int iRet = APP_ERR_OK; - uint64_t i64TimeStampTemp = 0; - - bool bIsEndByStop = false; - uint64_t i64ReRunTimeStamp = 0; - uint32_t iReRunOrigFrameId = 0; - uint32_t iReRunFrameId = 0; - if (g_iReRunOrigFrameId != 0) - { - LogDebug << "g_i64ReRunTimeStamp:" << g_i64ReRunTimeStamp << " g_iReRunOrigFrameId:" << g_iReRunOrigFrameId - << " g_iReRunFrameId:" << g_iReRunFrameId; - i64ReRunTimeStamp = g_i64ReRunTimeStamp; - iReRunOrigFrameId = g_iReRunOrigFrameId; - iReRunFrameId = g_iReRunFrameId; - g_i64ReRunTimeStamp = 0; - g_iReRunOrigFrameId = 0; - g_iReRunFrameId = 0; - bIsEndByStop = true; - } - - for (auto iter = mapUseDataSouceCfg_.begin(); iter!=mapUseDataSouceCfg_.end(); iter++) - { - int iSourceId = iter->first; - char szCameraNo[5] = {0}; - sprintf(szCameraNo, "%03d/", iSourceId + 1); - uint32_t iOrigFrameId = iOrigDataNO_ * dataSourceConfig_.iSkipInterval; - uint32_t iFrameId = iDataNO_ * dataSourceConfig_.iSkipInterval; - bool bIsEndFlag = (moveData_.bIsEnd && moveData_.iFrameId == iOrigFrameId); - if (bIsEndByStop) - { - bIsEndFlag = true; - iOrigFrameId = iReRunOrigFrameId; - iFrameId = iReRunFrameId; - } - - LogInfo << "sourceid:" << iSourceId << " MakeProcessData origtime:" << moveData_.strTrainName << " iOrigFrameId:" << iOrigFrameId - << " time:" << strTrainName_ << " iFrameId:" << iFrameId << " bIsEndFlag:" << bIsEndFlag; - - std::string strImgName = strDataDir_ + szCameraNo + std::to_string(iOrigFrameId); - strImgName += (iter->second.iRotate != 0) ? "_rotate.jpg" : ".jpg"; - std::string strFileName = strDataDir_ + szCameraNo + std::to_string(iOrigFrameId) + ".txt"; - - //摄像头读取失败后重试30次。 - Json::Value jvFrameInfo; - RawData rawData; - bool bRet = false; - int iNoDataCnt = 0; - while (!isStop_ && iNoDataCnt < 30) - { - bRet = ReadFileInfo(jvFrameInfo, rawData, strFileName, strImgName); - if (bRet) - { - break; - } - usleep(500 * 1000); // 500ms - iNoDataCnt++; - } - - //组织数据 - std::shared_ptr pProcessData = std::make_shared(); - pProcessData->iDataSource = iSourceId; - pProcessData->iFrameId = iFrameId; - pProcessData->strPicFilePath = strImgName; - pProcessData->i64TimeStamp = i64TimeStampTemp; - pProcessData->strOrigTrainDate = moveData_.strTrainDate; - pProcessData->strOrigTrainName = moveData_.strTrainName; - pProcessData->iOrigFrameId = iOrigFrameId; - pProcessData->strTrainDate = strTrainDate_; - pProcessData->strTrainName = strTrainName_; - pProcessData->iStatus = TRAINSTATUS_RUN; - pProcessData->bIsEnd = bIsEndFlag; - pProcessData->iDataNO = iDataNO_; - pProcessData->nMonitorState = moveData_.nMonitorState; - - if (bRet) - { - i64TimeStampTemp = jvFrameInfo["timeStamp"].asUInt64(); - pProcessData->i64TimeStamp = i64TimeStampTemp; - pProcessData->iWidth = jvFrameInfo["width"].asInt(); - pProcessData->iHeight = jvFrameInfo["height"].asInt(); - pProcessData->iDirection = jvFrameInfo["direction"].asInt(); - pProcessData->iRate = jvFrameInfo["rate"].asInt(); - - cv::Mat cvframe = cv::imread(pProcessData->strPicFilePath); - int iBufferSize = pProcessData->iWidth * pProcessData->iHeight * 3; - void* pBGRBufferobj = nullptr; - pBGRBufferobj = new uint8_t[iBufferSize]; - memcpy(pBGRBufferobj, cvframe.data, iBufferSize); - pProcessData->pData.reset(pBGRBufferobj, [](void* data){if(data) {delete[] data; data = nullptr;}}); - pProcessData->iSize = iBufferSize; - } - - std::vector vecPushPorts = mapSourcePushPort_[iSourceId]; - for (int iPort = 0; iPort < vecPushPorts.size(); iPort++) - { - if (iPort == vecPushPorts.size() - 1) - { - //iRet = outputQueMap_[vecPushPorts[iPort]]->push(std::static_pointer_cast(pProcessData)); - PushData(vecPushPorts[iPort], pProcessData); - continue; - } - std::shared_ptr pNewProcessData = std::make_shared(); - *pNewProcessData = *pProcessData; - //iRet = outputQueMap_[vecPushPorts[iPort]]->push(std::static_pointer_cast(pNewProcessData)); - PushData(vecPushPorts[iPort], pNewProcessData); - } - - } - - iOrigDataNO_++; - iDataNO_++; - //每组处理数据需间隔一定时间 - usleep(iIntervalTime_); - - if (bIsEndByStop) - { - iDataNO_ = 1; - iOrigDataNO_ = iReRunOrigFrameId / dataSourceConfig_.iSkipInterval; - std::string strDateTime = MyUtils::getins()->GetDateTimeByMilliSeconds(i64ReRunTimeStamp); - strTrainDate_ = strDateTime.substr(0, 10); - strTrainName_ = strDateTime.substr(11, strDateTime.length()); - std::string strTrainPath = strResultPath_ + strTrainDate_ + "/" + strTrainName_ + "/"; - MyUtils::getins()->CreateDirPath(strTrainPath); - LogDebug << "rerun traindate:" << strTrainDate_ << " trainname:" << strTrainName_; - } -} - -APP_ERROR DataDealEngine::Process() -{ - int iRet = APP_ERR_OK; - //return APP_ERR_OK; - while (!isStop_) - { - //获取主摄像头检测的状态 - std::shared_ptr pVoidData0 = nullptr; - iRet = inputQueMap_[strPort0_]->pop(pVoidData0); - - if (nullptr != pVoidData0) - { - std::shared_ptr pMoveData = std::static_pointer_cast(pVoidData0); - - // queuwMoveData_.push(*pMoveData); - moveData_ = *pMoveData; - LogDebug << "traindate:" << moveData_.strTrainDate << " trainname:" << moveData_.strTrainName - << " MoveData frameid:" << moveData_.iFrameId << " IsEnd:" << moveData_.bIsEnd; - } - - // LogDebug << "【帧号】" << (iDataNO_ * dataSourceConfig_.iSkipInterval); - // if (queuwMoveData_.size() > 0 && (iDataNO_ * dataSourceConfig_.iSkipInterval) >= queuwMoveData_.front().iFrameId) - // { - // moveData_ = queuwMoveData_.front(); - // queuwMoveData_.pop(); - // LogDebug << "!!!--- moveDate 更新"; - // } - - if (!moveData_.bHasTrain) - { - usleep(1000); //1ms - continue; - } - - //第一个数据,休眠1s,等待图片存入本地 - if (iOrigDataNO_ == 1) - { - usleep(1000 * 1000); //1s - } - - if (strDataDir_.empty()) - { - strDataDir_ = strResultPath_ + moveData_.strTrainDate + "/" + moveData_.strTrainName + "/"; - strTrainDate_ = moveData_.strTrainDate; - strTrainName_ = moveData_.strTrainName; - } - - //如果设置了方向,则方向不对直接过滤,但结束帧不能过滤,需流转到后面Engine,保证后面处理正确。 - if (iDirection_ == DIRECTION_UNKNOWN) - { - std::string strFilePath = strResultPath_ + strTrainDate_ + "/" + strTrainName_ + "/" + "direction.txt"; - Json::Value jvDirectionInfo; - if (MyUtils::getins()->ReadJsonInfo(jvDirectionInfo, strFilePath, 0)) - { - iDirection_ = jvDirectionInfo["direction"].asInt(); - } - } - - if (!moveData_.bIsEnd) - { - if (iDirection_ == DIRECTION_UNKNOWN || iDirection_ == iPushDirection_ || iPushDirection_ == DIRECTION_UNKNOWN) - { - MakeProcessData(); - } - else - { - LogDebug << "traindate:" << strTrainDate_ << " trainname:" << strTrainName_ << " iOrigDataNO_:" << iOrigDataNO_ << " continue."; - bNotPushFlag_ = true; - usleep(iIntervalTime_); //每组处理数据需间隔一定时间 - continue; - } - } - else - { - if (bNotPushFlag_) - { - iOrigDataNO_ = moveData_.iFrameId / dataSourceConfig_.iSkipInterval; - iDataNO_ = moveData_.iFrameId / dataSourceConfig_.iSkipInterval; - } - - if (iOrigDataNO_ * dataSourceConfig_.iSkipInterval > moveData_.iFrameId) - { - LogDebug << "dealFrameid:" << iOrigDataNO_ * dataSourceConfig_.iSkipInterval << " moveFrameid:" << moveData_.iFrameId; - iOrigDataNO_ = moveData_.iFrameId / dataSourceConfig_.iSkipInterval; - MakeProcessData(); - } - else - { - while (!isStop_ && iOrigDataNO_ * dataSourceConfig_.iSkipInterval <= moveData_.iFrameId) - { - //继续处理 - MakeProcessData(); - } - } - - InitParam(); - } - } - return APP_ERR_OK; -} - +#include "DataDealEngine.h" using namespace ai_matrix; extern std::atomic_uint64_t g_i64ReRunTimeStamp; extern std::atomic_uint32_t g_iReRunOrigFrameId; extern std::atomic_uint32_t g_iReRunFrameId; DataDealEngine::DataDealEngine() {} DataDealEngine::~DataDealEngine() {} APP_ERROR DataDealEngine::Init() { strPort0_ = engineName_ + "_" + std::to_string(engineId_) + "_0"; strPort1_ = engineName_ + "_" + std::to_string(engineId_) + "_1"; strPort2_ = engineName_ + "_" + std::to_string(engineId_) + "_2"; strResultPath_ = MyYaml::GetIns()->GetPathValue("gc_result_path"); iPushDirection_ = MyYaml::GetIns()->GetIntValue("gc_push_direction"); dataSourceConfig_ = MyYaml::GetIns()->GetDataSourceConfigById(engineId_); //获取摄像机参数 iIntervalTime_ = (dataSourceConfig_.iSkipInterval - 1) * 40 * 1000; mapUseDataSouceCfg_ = MyYaml::GetIns()->GetUseDataSourceConfig(); std::string delimiter(","); for (auto iter = mapUseDataSouceCfg_.begin(); iter!=mapUseDataSouceCfg_.end(); iter++) { int iSourceId = iter->first; std::vector vecSplit = MyUtils::getins()->split(iter->second.strTarget, delimiter); std::vector vecPushPorts; for (auto iter = vecSplit.begin(); iter != vecSplit.end(); iter++) { if (*iter == "NUM") { vecPushPorts.push_back(strPort0_); } else if (*iter == "CHKDATE") { vecPushPorts.push_back(strPort1_); } else if(*iter == "CONTAINER" || *iter == "CONTAINER_T") { vecPushPorts.push_back(strPort2_); } } mapSourcePushPort_[iSourceId] = vecPushPorts; } InitParam(); LogInfo << "engineId_:" << engineId_ << " DataDealEngine Init ok"; return APP_ERR_OK; } APP_ERROR DataDealEngine::DeInit() { LogInfo << "engineId_:" << engineId_ << " DataDealEngine DeInit ok"; return APP_ERR_OK; } /** * 参数初始化(列车结束时需调用) * inParam : N/A * outParam: N/A * return : N/A */ void DataDealEngine::InitParam() { iOrigDataNO_ = 1; strDataDir_ = ""; moveData_.i64TimeStamp = 0; moveData_.bHasTrain = false; moveData_.bIsEnd = false; iDataNO_ = 1; strTrainDate_ = ""; strTrainName_ = ""; iDirection_ = DIRECTION_UNKNOWN; bNotPushFlag_ = false; } /** * 图片数据解码 * inParam : RawData &rawData :图片数据 * outParam: std::shared_ptr pDecodedData :解码数据 * return : true(编码成功)/false(编码失败) */ bool DataDealEngine::GetJpegdOut(std::shared_ptr pDecodedData, RawData &rawData) { return true; } /** * 读取图片和文本 * inParam : N/A * outParam: N/A * return : N/A */ bool DataDealEngine::ReadFileInfo(Json::Value &jvFrameInfo, RawData &rawData, std::string &strFileName, std::string &strImgName) { //图片或文件不存在时,休眠10ms直接return,重新获取。 if (access(strFileName.c_str(), F_OK) != 0 || access(strImgName.c_str(), F_OK) != 0) { LogWarn << "txt:" << strFileName << " or image:" << strImgName << " no exist"; return false; } if (!MyUtils::getins()->ReadJsonInfo(jvFrameInfo, strFileName)) { LogError << "Failed to read txt:" << strFileName; return false; } //读取图片内容 // int iRet = ReadFile(strImgName, rawData); // if (iRet != APP_ERR_OK) // { // LogError << "Failed to read image:" << strImgName; // return false; // } return true; } /** * push数据到队列,队列满时则休眠一段时间再push。 * inParam : const std::string strPort push的端口 : const std::shared_ptr &pProcessData push的数据 * outParam: N/A * return : N/A */ void DataDealEngine::PushData(const std::string &strPort, const std::shared_ptr &pProcessData) { while (true) { int iRet = outputQueMap_[strPort]->push(std::static_pointer_cast(pProcessData)); if (iRet != 0) { LogDebug << "sourceid:" << pProcessData->iDataSource << " frameid:" << pProcessData->iFrameId << " push fail iRet:" << iRet; if (iRet == 2) { usleep(10000); // 10ms continue; } } break; } } /** * 构造处理数据并push * inParam : N/A * outParam: N/A * return : N/A */ void DataDealEngine::MakeProcessData() { int iRet = APP_ERR_OK; uint64_t i64TimeStampTemp = 0; bool bIsEndByStop = false; uint64_t i64ReRunTimeStamp = 0; uint32_t iReRunOrigFrameId = 0; uint32_t iReRunFrameId = 0; if (g_iReRunOrigFrameId != 0) { LogDebug << "g_i64ReRunTimeStamp:" << g_i64ReRunTimeStamp << " g_iReRunOrigFrameId:" << g_iReRunOrigFrameId << " g_iReRunFrameId:" << g_iReRunFrameId; i64ReRunTimeStamp = g_i64ReRunTimeStamp; iReRunOrigFrameId = g_iReRunOrigFrameId; iReRunFrameId = g_iReRunFrameId; g_i64ReRunTimeStamp = 0; g_iReRunOrigFrameId = 0; g_iReRunFrameId = 0; bIsEndByStop = true; } for (auto iter = mapUseDataSouceCfg_.begin(); iter!=mapUseDataSouceCfg_.end(); iter++) { int iSourceId = iter->first; char szCameraNo[5] = {0}; sprintf(szCameraNo, "%03d/", iSourceId + 1); uint32_t iOrigFrameId = iOrigDataNO_ * dataSourceConfig_.iSkipInterval; uint32_t iFrameId = iDataNO_ * dataSourceConfig_.iSkipInterval; bool bIsEndFlag = (moveData_.bIsEnd && moveData_.iFrameId == iOrigFrameId); if (bIsEndByStop) { bIsEndFlag = true; iOrigFrameId = iReRunOrigFrameId; iFrameId = iReRunFrameId; } LogInfo << "sourceid:" << iSourceId << " MakeProcessData origtime:" << moveData_.strTrainName << " iOrigFrameId:" << iOrigFrameId << " time:" << strTrainName_ << " iFrameId:" << iFrameId << " bIsEndFlag:" << bIsEndFlag; std::string strImgName = strDataDir_ + szCameraNo + std::to_string(iOrigFrameId); strImgName += (iter->second.iRotate != 0) ? "_rotate.jpg" : ".jpg"; std::string strFileName = strDataDir_ + szCameraNo + std::to_string(iOrigFrameId) + ".txt"; //摄像头读取失败后重试30次。 Json::Value jvFrameInfo; RawData rawData; bool bRet = false; int iNoDataCnt = 0; while (!isStop_ && iNoDataCnt < 30) { bRet = ReadFileInfo(jvFrameInfo, rawData, strFileName, strImgName); if (bRet) { break; } usleep(500 * 1000); // 500ms iNoDataCnt++; } //组织数据 std::shared_ptr pProcessData = std::make_shared(); pProcessData->iDataSource = iSourceId; pProcessData->iFrameId = iFrameId; pProcessData->strPicFilePath = strImgName; pProcessData->i64TimeStamp = i64TimeStampTemp; pProcessData->strOrigTrainDate = moveData_.strTrainDate; pProcessData->strOrigTrainName = moveData_.strTrainName; pProcessData->iOrigFrameId = iOrigFrameId; pProcessData->strTrainDate = strTrainDate_; pProcessData->strTrainName = strTrainName_; pProcessData->iStatus = TRAINSTATUS_RUN; pProcessData->bIsEnd = bIsEndFlag; pProcessData->iDataNO = iDataNO_; if (bRet) { i64TimeStampTemp = jvFrameInfo["timeStamp"].asUInt64(); pProcessData->i64TimeStamp = i64TimeStampTemp; pProcessData->iWidth = jvFrameInfo["width"].asInt(); pProcessData->iHeight = jvFrameInfo["height"].asInt(); pProcessData->iDirection = jvFrameInfo["direction"].asInt(); pProcessData->iRate = jvFrameInfo["rate"].asInt(); pProcessData->nMonitorState = jvFrameInfo["moveType"].asInt(); cv::Mat cvframe = cv::imread(pProcessData->strPicFilePath); int iBufferSize = pProcessData->iWidth * pProcessData->iHeight * 3; void* pBGRBufferobj = nullptr; pBGRBufferobj = new uint8_t[iBufferSize]; memcpy(pBGRBufferobj, cvframe.data, iBufferSize); pProcessData->pData.reset(pBGRBufferobj, [](void* data){if(data) {delete[] data; data = nullptr;}}); pProcessData->iSize = iBufferSize; } std::vector vecPushPorts = mapSourcePushPort_[iSourceId]; for (int iPort = 0; iPort < vecPushPorts.size(); iPort++) { if (iPort == vecPushPorts.size() - 1) { //iRet = outputQueMap_[vecPushPorts[iPort]]->push(std::static_pointer_cast(pProcessData)); PushData(vecPushPorts[iPort], pProcessData); continue; } std::shared_ptr pNewProcessData = std::make_shared(); *pNewProcessData = *pProcessData; //iRet = outputQueMap_[vecPushPorts[iPort]]->push(std::static_pointer_cast(pNewProcessData)); PushData(vecPushPorts[iPort], pNewProcessData); } } iOrigDataNO_++; iDataNO_++; //每组处理数据需间隔一定时间 usleep(iIntervalTime_); if (bIsEndByStop) { iDataNO_ = 1; iOrigDataNO_ = iReRunOrigFrameId / dataSourceConfig_.iSkipInterval; std::string strDateTime = MyUtils::getins()->GetDateTimeByMilliSeconds(i64ReRunTimeStamp); strTrainDate_ = strDateTime.substr(0, 10); strTrainName_ = strDateTime.substr(11, strDateTime.length()); std::string strTrainPath = strResultPath_ + strTrainDate_ + "/" + strTrainName_ + "/"; MyUtils::getins()->CreateDirPath(strTrainPath); LogDebug << "rerun traindate:" << strTrainDate_ << " trainname:" << strTrainName_; } } APP_ERROR DataDealEngine::Process() { int iRet = APP_ERR_OK; //return APP_ERR_OK; while (!isStop_) { //获取主摄像头检测的状态 std::shared_ptr pVoidData0 = nullptr; iRet = inputQueMap_[strPort0_]->pop(pVoidData0); if (nullptr != pVoidData0) { std::shared_ptr pMoveData = std::static_pointer_cast(pVoidData0); // queuwMoveData_.push(*pMoveData); moveData_ = *pMoveData; LogDebug << "traindate:" << moveData_.strTrainDate << " trainname:" << moveData_.strTrainName << " MoveData frameid:" << moveData_.iFrameId << " IsEnd:" << moveData_.bIsEnd; } // LogDebug << "【帧号】" << (iDataNO_ * dataSourceConfig_.iSkipInterval); // if (queuwMoveData_.size() > 0 && (iDataNO_ * dataSourceConfig_.iSkipInterval) >= queuwMoveData_.front().iFrameId) // { // moveData_ = queuwMoveData_.front(); // queuwMoveData_.pop(); // LogDebug << "!!!--- moveDate 更新"; // } if (!moveData_.bHasTrain) { usleep(1000); //1ms continue; } //第一个数据,休眠1s,等待图片存入本地 if (iOrigDataNO_ == 1) { usleep(1000 * 1000); //1s } if (strDataDir_.empty()) { strDataDir_ = strResultPath_ + moveData_.strTrainDate + "/" + moveData_.strTrainName + "/"; strTrainDate_ = moveData_.strTrainDate; strTrainName_ = moveData_.strTrainName; } //如果设置了方向,则方向不对直接过滤,但结束帧不能过滤,需流转到后面Engine,保证后面处理正确。 if (iDirection_ == DIRECTION_UNKNOWN) { std::string strFilePath = strResultPath_ + strTrainDate_ + "/" + strTrainName_ + "/" + "direction.txt"; Json::Value jvDirectionInfo; if (MyUtils::getins()->ReadJsonInfo(jvDirectionInfo, strFilePath, 0)) { iDirection_ = jvDirectionInfo["direction"].asInt(); } } if (!moveData_.bIsEnd) { if (iDirection_ == DIRECTION_UNKNOWN || iDirection_ == iPushDirection_ || iPushDirection_ == DIRECTION_UNKNOWN) { MakeProcessData(); } else { LogDebug << "traindate:" << strTrainDate_ << " trainname:" << strTrainName_ << " iOrigDataNO_:" << iOrigDataNO_ << " continue."; bNotPushFlag_ = true; usleep(iIntervalTime_); //每组处理数据需间隔一定时间 continue; } } else { if (bNotPushFlag_) { iOrigDataNO_ = moveData_.iFrameId / dataSourceConfig_.iSkipInterval; iDataNO_ = moveData_.iFrameId / dataSourceConfig_.iSkipInterval; } if (iOrigDataNO_ * dataSourceConfig_.iSkipInterval > moveData_.iFrameId) { LogDebug << "dealFrameid:" << iOrigDataNO_ * dataSourceConfig_.iSkipInterval << " moveFrameid:" << moveData_.iFrameId; iOrigDataNO_ = moveData_.iFrameId / dataSourceConfig_.iSkipInterval; MakeProcessData(); } else { while (!isStop_ && iOrigDataNO_ * dataSourceConfig_.iSkipInterval <= moveData_.iFrameId) { //继续处理 MakeProcessData(); } } InitParam(); } } return APP_ERR_OK; } \ No newline at end of file diff --git a/nvidia_ascend_engine/common_engine/DataSourceEngine/CameraEngine.cpp b/nvidia_ascend_engine/common_engine/DataSourceEngine/CameraEngine.cpp index 5794b4f..34f313e 100644 --- a/nvidia_ascend_engine/common_engine/DataSourceEngine/CameraEngine.cpp +++ b/nvidia_ascend_engine/common_engine/DataSourceEngine/CameraEngine.cpp @@ -1,434 +1 @@ -#include "CameraEngine.h" -#include "myutils.h" - -using namespace ai_matrix; - -namespace -{ - const int LOW_THRESHOLD = 128; - const int MAX_THRESHOLD = 4096; - const uint16_t DELAY_TIME = 10000; -} - -CameraEngine::CameraEngine() {} - -CameraEngine::~CameraEngine() {} - -APP_ERROR CameraEngine::Init() -{ - bUseEngine_ = true; - bHwDecode_ = MyYaml::GetIns()->GetBoolValue("gc_hardware_decode"); - - dataSourceConfig_ = MyYaml::GetIns()->GetDataSourceConfigById(engineId_); //获取摄像机参数 - if (MyYaml::GetIns()->GetStringValue("gc_data_source") != "camera" || !dataSourceConfig_.bUse) - { - bUseEngine_ = false; - LogWarn << "engineId_:" << engineId_ << " not use engine"; - return APP_ERR_OK; - } - - strPort0_ = engineName_ + "_" + std::to_string(engineId_) + "_0"; - strPort1_ = engineName_ + "_" + std::to_string(engineId_) + "_1"; - nDelayTime = MyYaml::GetIns()->GetIntValue("gc_load_delay"); - - LogInfo << "engineId_:" << engineId_ << " CameraEngine Init ok"; - return APP_ERR_OK; -} - -APP_ERROR CameraEngine::DeInit() -{ - if (!bUseEngine_) - { - LogWarn << "engineId_:" << engineId_ << " not use engine"; - return APP_ERR_OK; - } - ResetCamera(); - LogInfo << "engineId_:" << engineId_ << " CameraEngine DeInit ok"; - return APP_ERR_OK; -} - -void CameraEngine::ResetCamera() -{ - if (pFormatCtx_ != nullptr) - { - // clear th cache of the queue - avformat_close_input(&pFormatCtx_); - pFormatCtx_ = nullptr; - } -} - -APP_ERROR CameraEngine::ConnectCamera() -{ - pFormatCtx_ = CreateFormatContext(); // create context - if (pFormatCtx_ == nullptr) - { - LogError << "engineId_:" << engineId_ << " pFormatCtx_ null!"; - return APP_ERR_COMM_FAILURE; - } - - //0-代表输入 - av_dump_format(pFormatCtx_, 0, dataSourceConfig_.strUrl.c_str(), 0); - - // get stream infomation - int iRet = APP_ERR_OK; - iRet = GetStreamInfo(); - if (iRet != APP_ERR_OK) - { - LogError << "engineId_:" << engineId_ << " Stream Info Check failed, iRet = " << iRet; - return APP_ERR_COMM_FAILURE; - } - - return APP_ERR_OK; -} - -APP_ERROR CameraEngine::GetStreamInfo() -{ - if (pFormatCtx_ != nullptr) - { - iVideoStream_ = -1; - iAudioStream_ = -1; - //frameInfo_.iFrameId = 0; //帧号从0开始 - - for (unsigned int i = 0; i < pFormatCtx_->nb_streams; i++) - { - AVStream *inStream = pFormatCtx_->streams[i]; - if (inStream->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) - { - iVideoStream_ = i; - frameInfo_.iHeight = inStream->codecpar->height; - frameInfo_.iWidth = inStream->codecpar->width; - - //获取帧率,帧率的打印都在流中的两个成员.且应取平均帧率为先,为{x,0}或者{0,1}则取实时帧率 - if (inStream->avg_frame_rate.den == 0 || (inStream->avg_frame_rate.num == 0 && inStream->avg_frame_rate.den == 1)) - { - frameInfo_.iRate = inStream->r_frame_rate.num / inStream->r_frame_rate.den; - } - else - { - frameInfo_.iRate = inStream->avg_frame_rate.num / inStream->avg_frame_rate.den; - } - LogDebug << "engineId_:" << engineId_ << " width:" << frameInfo_.iWidth << " height:" << frameInfo_.iHeight - << " rate:" << frameInfo_.iRate << " iVideoStream_:" << iVideoStream_; - } - else if (inStream->codecpar->codec_type == AVMEDIA_TYPE_AUDIO) - { - iAudioStream_ = i; - LogDebug << "engineId_:" << engineId_ << " iAudioStream_:" << iAudioStream_; - } - } - - if (iVideoStream_ == -1) - { - LogError << "engineId_:" << engineId_ << " Didn't find a video stream!"; - return APP_ERR_COMM_FAILURE; - } - - if (frameInfo_.iHeight < LOW_THRESHOLD || frameInfo_.iWidth < LOW_THRESHOLD || - frameInfo_.iHeight > MAX_THRESHOLD || frameInfo_.iWidth > MAX_THRESHOLD) - { - LogError << "engineId_:" << engineId_ << " Size of frame is not supported in DVPP Video Decode!"; - return APP_ERR_COMM_FAILURE; - } - - AVCodecID codecId = pFormatCtx_->streams[iVideoStream_]->codecpar->codec_id; - if (codecId == AV_CODEC_ID_H264) - { - int profile = pFormatCtx_->streams[iVideoStream_]->codecpar->profile; - if (profile == FF_PROFILE_H264_BASELINE) - { - frameInfo_.format = H264_BASELINE_LEVEL; - } - else if (profile == FF_PROFILE_H264_MAIN) - { - frameInfo_.format = H264_MAIN_LEVEL; - } - else if (profile == FF_PROFILE_H264_HIGH) - { - frameInfo_.format = H264_HIGH_LEVEL; - } - else - { - LogError << "engineId_:" << engineId_ << " not support h264 profile"; - return APP_ERR_COMM_FAILURE; - } - } - else if (codecId == AV_CODEC_ID_H265) - { - int profile = pFormatCtx_->streams[iVideoStream_]->codecpar->profile; - if (profile == FF_PROFILE_HEVC_MAIN) - { - frameInfo_.format = H265_MAIN_LEVEL; - } - else - { - LogError << "engineId_:" << engineId_ << " not support h265 profile"; - return APP_ERR_COMM_FAILURE; - } - } - else - { - LogError << "engineId_:" << engineId_ << " Error unsupported format" << codecId; - return APP_ERR_COMM_FAILURE; - } - } - return APP_ERR_OK; -} - -AVFormatContext *CameraEngine::CreateFormatContext() -{ - // create message for stream pull - AVFormatContext *pFormatContext = nullptr; - AVDictionary *pOptions = nullptr; - - // formatContext = avformat_alloc_context(); - if (dataSourceConfig_.strUrl.find("rtsp:") != std::string::npos) // rtsp - { - av_dict_set(&pOptions, "rtsp_transport", "tcp", 0); // 指定其传输方式为TCP - // av_dict_set(&pOptions, "stimeout", "3000000", 0); // 设置超时3秒 - // av_dict_set(&pOptions, "rw_timeout", "3000", 0); //单位:ms - av_dict_set(&pOptions, "timeout", "3000000", 0); //设置超时时间为3秒 - - } - - //av_register_all(); //注册所有支持的格式(这里一定注册这些,否则会因为协议解析问题报错!!!) - //avcodec_register_all(); //注册编解码器 - //avformat_network_init(); //注册网格格式,如果为本地文件则可以去掉该代码 - - int iRet = avformat_open_input(&pFormatContext, dataSourceConfig_.strUrl.c_str(), nullptr, &pOptions); - if (nullptr != pOptions) - { - av_dict_free(&pOptions); - } - if (iRet != 0) - { - LogError << "engineId_:" << engineId_ << " Couldn't open input stream " << dataSourceConfig_.strUrl.c_str() << ", iRet=" << iRet; - return nullptr; - } - - // pFormatContext->flags |= AVFMT_FLAG_NONBLOCK; - // pFormatContext->pb->flags |= AVIO_FLAG_NONBLOCK; - // av_dict_set(&pFormatContext->interrupt_callback.callback, "timeout", "3000", 0); - // iRet = avio_open2(&pFormatContext->pb, dataSourceConfig_.strUrl.c_str(), AVIO_FLAG_READ, NULL, NULL) < 0; - // { - // // 处理错误 - // LogError << "engineId_:" << engineId_ << "avio_open2 iRet=" << iRet; - // return nullptr; - // } - - iRet = avformat_find_stream_info(pFormatContext, nullptr); - if (iRet != 0) - { - LogError << "engineId_:" << engineId_ << " Couldn't find stream information, iRet = " << iRet; - return nullptr; - } - return pFormatContext; -} - -//av_read_frame的中断回调函数 -// int CameraEngine::InterruptCallback(void *pData) -// { -// TimeoutContext* pTimeOutCtx = (TimeoutContext*)pData; -// LogDebug << "InterruptCallback i64Timeout:" << pTimeOutCtx->i64Timeout; -// return std::chrono::duration_cast( -// std::chrono::system_clock::now().time_since_epoch()) -// .count() >= pTimeOutCtx->i64Timeout -// ? AVERROR_EXIT -// : 0; -// } - -APP_ERROR CameraEngine::Process() -{ - int iRet = APP_ERR_OK; - if (!bUseEngine_) - { - LogWarn << "engineId_:" << engineId_ << " not use engine"; - return APP_ERR_OK; - } - - if (bHwDecode_) - { - iRet = ConnectCamera(); - if (iRet == APP_ERR_OK) - { - LogInfo << "engineId_:" << engineId_ << " Start the stream......"; - bReconnectFlag_ = false; - } - else - { - ResetCamera(); - bReconnectFlag_ = true; - } - - // Pull data cyclically - AVPacket pkt; - - while (!isStop_) - { - //重连相机 - if (bReconnectFlag_) - { - iRet = ConnectCamera(); - if (iRet == APP_ERR_OK) - { - LogInfo << "engineId_:" << engineId_ << " Start the stream......"; - bReconnectFlag_ = false; - } - else - { - outputQueMap_[strPort1_]->push(std::static_pointer_cast(std::make_shared("摄像头连接失败!"))); - ResetCamera(); - std::this_thread::sleep_for(std::chrono::seconds(3)); //3秒后重连 - continue; - } - } - - //设置av_read_frame中断函数 (中断函数中超过1s,则中断处理) - // TimeoutContext timeoutCtx = { std::chrono::duration_cast(std::chrono::system_clock::now().time_since_epoch()).count() + 1000 }; - // pFormatCtx_->interrupt_callback.callback = &CameraEngine::InterruptCallback; - // pFormatCtx_->interrupt_callback.opaque = &timeoutCtx; - - av_init_packet(&pkt); //init pkt - - iRet = av_read_frame(pFormatCtx_, &pkt); //需要一直读取,否则获取到的是历史数据 - if (iRet != 0) - { - outputQueMap_[strPort1_]->push(std::static_pointer_cast(std::make_shared("图像读取失败!"))); - LogError << "engineId_:" << engineId_ << " Read frame failed, reconnect iRet:" << iRet; - av_packet_unref(&pkt); - - //重连相机 - ResetCamera(); - - bReconnectFlag_ = true; - continue; - } - else if (pkt.stream_index == iVideoStream_) //只解码视频流 - { -// LogDebug << "iRet:" << iRet << " pkt.size:" << pkt.size; - if (pkt.size <= 0) - { - LogError << "engineId_:" << engineId_ << " Invalid pkt.size: " << pkt.size; - av_packet_unref(&pkt); - continue; - } - if (dataSourceConfig_.strUrl.find(".mp4") != std::string::npos) - { - const char szStartCode[4] = {0, 0, 0, 1}; - if (bIsAvc_ || memcmp(szStartCode, pkt.data, 4) != 0) - { // is avc1 code, have no start code of H264 - int iLen = 0; - uint8_t *p = pkt.data; - bIsAvc_ = true; - do - { // add start_code for each NAL, one frame may have multi NALs. - iLen = ntohl(*((long *)p)); - memcpy(p, szStartCode, 4); - p += 4; - p += iLen; - if (p >= pkt.data + pkt.size) - { - break; - } - } while (1); - } - } - - void* pH264Buffer = nullptr; - pH264Buffer = new uint8_t[pkt.size]; - memcpy(pH264Buffer, pkt.data, pkt.size); - //组织数据 - std::shared_ptr pProcessData = std::make_shared(); - pProcessData->iWidth = frameInfo_.iWidth; - pProcessData->iHeight = frameInfo_.iHeight; - pProcessData->iRate = frameInfo_.iRate; - pProcessData->i64TimeStamp = MyUtils::getins()->GetCurrentTimeMillis(); - pProcessData->iDataSource = engineId_; - pProcessData->iSize = pkt.size; - pProcessData->pData.reset(pH264Buffer, [](void* data){if(data) {delete[] data; data = nullptr;}}); //智能指针管理内存 - - //push端口0,视频解码 - iRet = outputQueMap_[strPort0_]->push(std::static_pointer_cast(pProcessData)); - if (iRet != APP_ERR_OK) - { - LogError << "engineId_:" << engineId_ << "push the h264 frame data failed..."; - } - } - else if (pkt.stream_index == iAudioStream_) - { - //音频流不处理。 - } - else - { - LogError << "engineId_:" << engineId_ << " stream err stream_index:" << pkt.stream_index; - } - av_packet_unref(&pkt); //unref - - if (dataSourceConfig_.strUrl.find("rtsp:") == std::string::npos) // 如果不是rtsp,定时发送 - { - usleep(DELAY_TIME); // delay 40ms - } - } - } - else - { - //从摄像头RTSP拉流 - const std::string videoStreamAddress = std::string("rtspsrc location=") + dataSourceConfig_.strUrl.c_str() + " latency=10 ! \ - rtph264depay ! h264parse ! nvv4l2decoder enable-max-performance=1 enable-frame-type-reporting=1 ! nvvidconv ! video/x-raw, format=(string)BGRx ! videoconvert ! appsink"; - - VideoCapture capture_video; - while(!capture_video.open(videoStreamAddress)){ //, cv::CAP_FFMPEG - LogInfo<<"Restart Opening video stream or file ..."< pBGRFrameData = std::make_shared(); - std::shared_ptr pProcessData = std::make_shared(); - if(!capture_video.read(frame)) { - capture_video.release(); - while(!capture_video.open(videoStreamAddress)){ //, cv::CAP_FFMPEG - LogInfo<<"Restart Opening video stream or file ..."<iDataSource = engineId_; - pBGRFrameData->iFrameId = nFrameid++; - pBGRFrameData->iSize = resizepBGRBuffer_Size; - pBGRFrameData->frameInfo.iWidth = IMAGE_WIDTH; - pBGRFrameData->frameInfo.iHeight = IMAGE_HEIGHT; - pBGRFrameData->frameInfo.iRate = frameRate; - pProcessData->pVoidData = std::static_pointer_cast(pBGRFrameData); - pProcessData->pData.reset(resizeBGRBufferobj, [](void* data){if(data) {delete[] data; data = nullptr;}}); - if (nFrameid >= 0xFFFFFFFF) {nFrameid = 0;} - pBGRFrameData->i64TimeStamp = MyUtils::getins()->GetCurrentTimeMillis(); - pProcessData->iWidth = pBGRFrameData->frameInfo.iWidth; - pProcessData->iHeight = pBGRFrameData->frameInfo.iHeight; - pProcessData->iHeight = pBGRFrameData->frameInfo.iRate; - iRet = outputQueMap_[strPort0_]->push(std::static_pointer_cast(pProcessData)); - } - - } - - return APP_ERR_OK; -} +#include "CameraEngine.h" #include "myutils.h" using namespace ai_matrix; namespace { const int LOW_THRESHOLD = 128; const int MAX_THRESHOLD = 4096; const uint16_t DELAY_TIME = 10000; } CameraEngine::CameraEngine() {} CameraEngine::~CameraEngine() {} APP_ERROR CameraEngine::Init() { bUseEngine_ = true; bHwDecode_ = MyYaml::GetIns()->GetBoolValue("gc_hardware_decode"); dataSourceConfig_ = MyYaml::GetIns()->GetDataSourceConfigById(engineId_); //获取摄像机参数 if (MyYaml::GetIns()->GetStringValue("gc_data_source") != "camera" || !dataSourceConfig_.bUse) { bUseEngine_ = false; LogWarn << "engineId_:" << engineId_ << " not use engine"; return APP_ERR_OK; } strPort0_ = engineName_ + "_" + std::to_string(engineId_) + "_0"; strPort1_ = engineName_ + "_" + std::to_string(engineId_) + "_1"; nDelayTime = MyYaml::GetIns()->GetIntValue("gc_load_delay"); LogInfo << "engineId_:" << engineId_ << " CameraEngine Init ok"; return APP_ERR_OK; } APP_ERROR CameraEngine::DeInit() { if (!bUseEngine_) { LogWarn << "engineId_:" << engineId_ << " not use engine"; return APP_ERR_OK; } ResetCamera(); LogInfo << "engineId_:" << engineId_ << " CameraEngine DeInit ok"; return APP_ERR_OK; } void CameraEngine::ResetCamera() { if (pFormatCtx_ != nullptr) { // clear th cache of the queue avformat_close_input(&pFormatCtx_); pFormatCtx_ = nullptr; } } APP_ERROR CameraEngine::ConnectCamera() { pFormatCtx_ = CreateFormatContext(); // create context if (pFormatCtx_ == nullptr) { LogError << "engineId_:" << engineId_ << " pFormatCtx_ null!"; return APP_ERR_COMM_FAILURE; } //0-代表输入 av_dump_format(pFormatCtx_, 0, dataSourceConfig_.strUrl.c_str(), 0); // get stream infomation int iRet = APP_ERR_OK; iRet = GetStreamInfo(); if (iRet != APP_ERR_OK) { LogError << "engineId_:" << engineId_ << " Stream Info Check failed, iRet = " << iRet; return APP_ERR_COMM_FAILURE; } return APP_ERR_OK; } APP_ERROR CameraEngine::GetStreamInfo() { if (pFormatCtx_ != nullptr) { iVideoStream_ = -1; iAudioStream_ = -1; //frameInfo_.iFrameId = 0; //帧号从0开始 for (unsigned int i = 0; i < pFormatCtx_->nb_streams; i++) { AVStream *inStream = pFormatCtx_->streams[i]; if (inStream->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) { iVideoStream_ = i; frameInfo_.iHeight = inStream->codecpar->height; frameInfo_.iWidth = inStream->codecpar->width; //获取帧率,帧率的打印都在流中的两个成员.且应取平均帧率为先,为{x,0}或者{0,1}则取实时帧率 if (inStream->avg_frame_rate.den == 0 || (inStream->avg_frame_rate.num == 0 && inStream->avg_frame_rate.den == 1)) { frameInfo_.iRate = inStream->r_frame_rate.num / inStream->r_frame_rate.den; } else { frameInfo_.iRate = inStream->avg_frame_rate.num / inStream->avg_frame_rate.den; } LogDebug << "engineId_:" << engineId_ << " width:" << frameInfo_.iWidth << " height:" << frameInfo_.iHeight << " rate:" << frameInfo_.iRate << " iVideoStream_:" << iVideoStream_; } else if (inStream->codecpar->codec_type == AVMEDIA_TYPE_AUDIO) { iAudioStream_ = i; LogDebug << "engineId_:" << engineId_ << " iAudioStream_:" << iAudioStream_; } } if (iVideoStream_ == -1) { LogError << "engineId_:" << engineId_ << " Didn't find a video stream!"; return APP_ERR_COMM_FAILURE; } if (frameInfo_.iHeight < LOW_THRESHOLD || frameInfo_.iWidth < LOW_THRESHOLD || frameInfo_.iHeight > MAX_THRESHOLD || frameInfo_.iWidth > MAX_THRESHOLD) { LogError << "engineId_:" << engineId_ << " Size of frame is not supported in DVPP Video Decode!"; return APP_ERR_COMM_FAILURE; } AVCodecID codecId = pFormatCtx_->streams[iVideoStream_]->codecpar->codec_id; if (codecId == AV_CODEC_ID_H264) { int profile = pFormatCtx_->streams[iVideoStream_]->codecpar->profile; if (profile == FF_PROFILE_H264_BASELINE) { frameInfo_.format = H264_BASELINE_LEVEL; } else if (profile == FF_PROFILE_H264_MAIN) { frameInfo_.format = H264_MAIN_LEVEL; } else if (profile == FF_PROFILE_H264_HIGH) { frameInfo_.format = H264_HIGH_LEVEL; } else { LogError << "engineId_:" << engineId_ << " not support h264 profile"; return APP_ERR_COMM_FAILURE; } } else if (codecId == AV_CODEC_ID_H265) { int profile = pFormatCtx_->streams[iVideoStream_]->codecpar->profile; if (profile == FF_PROFILE_HEVC_MAIN) { frameInfo_.format = H265_MAIN_LEVEL; } else { LogError << "engineId_:" << engineId_ << " not support h265 profile"; return APP_ERR_COMM_FAILURE; } } else { LogError << "engineId_:" << engineId_ << " Error unsupported format" << codecId; return APP_ERR_COMM_FAILURE; } } return APP_ERR_OK; } AVFormatContext *CameraEngine::CreateFormatContext() { // create message for stream pull AVFormatContext *pFormatContext = nullptr; AVDictionary *pOptions = nullptr; // formatContext = avformat_alloc_context(); if (dataSourceConfig_.strUrl.find("rtsp:") != std::string::npos) // rtsp { av_dict_set(&pOptions, "rtsp_transport", "tcp", 0); // 指定其传输方式为TCP // av_dict_set(&pOptions, "stimeout", "3000000", 0); // 设置超时3秒 // av_dict_set(&pOptions, "rw_timeout", "3000", 0); //单位:ms av_dict_set(&pOptions, "timeout", "3000000", 0); //设置超时时间为3秒 } //av_register_all(); //注册所有支持的格式(这里一定注册这些,否则会因为协议解析问题报错!!!) //avcodec_register_all(); //注册编解码器 //avformat_network_init(); //注册网格格式,如果为本地文件则可以去掉该代码 int iRet = avformat_open_input(&pFormatContext, dataSourceConfig_.strUrl.c_str(), nullptr, &pOptions); if (nullptr != pOptions) { av_dict_free(&pOptions); } if (iRet != 0) { LogError << "engineId_:" << engineId_ << " Couldn't open input stream " << dataSourceConfig_.strUrl.c_str() << ", iRet=" << iRet; return nullptr; } // pFormatContext->flags |= AVFMT_FLAG_NONBLOCK; // pFormatContext->pb->flags |= AVIO_FLAG_NONBLOCK; // av_dict_set(&pFormatContext->interrupt_callback.callback, "timeout", "3000", 0); // iRet = avio_open2(&pFormatContext->pb, dataSourceConfig_.strUrl.c_str(), AVIO_FLAG_READ, NULL, NULL) < 0; // { // // 处理错误 // LogError << "engineId_:" << engineId_ << "avio_open2 iRet=" << iRet; // return nullptr; // } iRet = avformat_find_stream_info(pFormatContext, nullptr); if (iRet != 0) { LogError << "engineId_:" << engineId_ << " Couldn't find stream information, iRet = " << iRet; return nullptr; } return pFormatContext; } //av_read_frame的中断回调函数 // int CameraEngine::InterruptCallback(void *pData) // { // TimeoutContext* pTimeOutCtx = (TimeoutContext*)pData; // LogDebug << "InterruptCallback i64Timeout:" << pTimeOutCtx->i64Timeout; // return std::chrono::duration_cast( // std::chrono::system_clock::now().time_since_epoch()) // .count() >= pTimeOutCtx->i64Timeout // ? AVERROR_EXIT // : 0; // } APP_ERROR CameraEngine::Process() { int iRet = APP_ERR_OK; if (!bUseEngine_) { LogWarn << "engineId_:" << engineId_ << " not use engine"; return APP_ERR_OK; } if (bHwDecode_) { iRet = ConnectCamera(); if (iRet == APP_ERR_OK) { LogInfo << "engineId_:" << engineId_ << " Start the stream......"; bReconnectFlag_ = false; } else { ResetCamera(); bReconnectFlag_ = true; } // Pull data cyclically AVPacket pkt; while (!isStop_) { //重连相机 if (bReconnectFlag_) { iRet = ConnectCamera(); if (iRet == APP_ERR_OK) { LogInfo << "engineId_:" << engineId_ << " Start the stream......"; bReconnectFlag_ = false; } else { outputQueMap_[strPort1_]->push(std::static_pointer_cast(std::make_shared("摄像头连接失败!"))); ResetCamera(); std::this_thread::sleep_for(std::chrono::seconds(3)); //3秒后重连 continue; } } //设置av_read_frame中断函数 (中断函数中超过1s,则中断处理) // TimeoutContext timeoutCtx = { std::chrono::duration_cast(std::chrono::system_clock::now().time_since_epoch()).count() + 1000 }; // pFormatCtx_->interrupt_callback.callback = &CameraEngine::InterruptCallback; // pFormatCtx_->interrupt_callback.opaque = &timeoutCtx; av_init_packet(&pkt); //init pkt iRet = av_read_frame(pFormatCtx_, &pkt); //需要一直读取,否则获取到的是历史数据 if (iRet != 0) { outputQueMap_[strPort1_]->push(std::static_pointer_cast(std::make_shared("图像读取失败!"))); LogError << "engineId_:" << engineId_ << " Read frame failed, reconnect iRet:" << iRet; av_packet_unref(&pkt); //重连相机 ResetCamera(); bReconnectFlag_ = true; continue; } else if (pkt.stream_index == iVideoStream_) //只解码视频流 { // LogDebug << "iRet:" << iRet << " pkt.size:" << pkt.size; if (pkt.size <= 0) { LogError << "engineId_:" << engineId_ << " Invalid pkt.size: " << pkt.size; av_packet_unref(&pkt); continue; } if (dataSourceConfig_.strUrl.find(".mp4") != std::string::npos) { const char szStartCode[4] = {0, 0, 0, 1}; if (bIsAvc_ || memcmp(szStartCode, pkt.data, 4) != 0) { // is avc1 code, have no start code of H264 int iLen = 0; uint8_t *p = pkt.data; bIsAvc_ = true; do { // add start_code for each NAL, one frame may have multi NALs. iLen = ntohl(*((long *)p)); memcpy(p, szStartCode, 4); p += 4; p += iLen; if (p >= pkt.data + pkt.size) { break; } } while (1); } } void* pH264Buffer = nullptr; pH264Buffer = new uint8_t[pkt.size]; memcpy(pH264Buffer, pkt.data, pkt.size); //组织数据 std::shared_ptr pProcessData = std::make_shared(); pProcessData->iWidth = frameInfo_.iWidth; pProcessData->iHeight = frameInfo_.iHeight; pProcessData->iRate = frameInfo_.iRate; pProcessData->i64TimeStamp = MyUtils::getins()->GetCurrentTimeMillis(); pProcessData->iDataSource = engineId_; pProcessData->iSize = pkt.size; pProcessData->pData.reset(pH264Buffer, [](void* data){if(data) {delete[] data; data = nullptr;}}); //智能指针管理内存 //push端口0,视频解码 iRet = outputQueMap_[strPort0_]->push(std::static_pointer_cast(pProcessData)); if (iRet != APP_ERR_OK) { LogError << "engineId_:" << engineId_ << "push the h264 frame data failed..."; } } else if (pkt.stream_index == iAudioStream_) { //音频流不处理。 } else { LogError << "engineId_:" << engineId_ << " stream err stream_index:" << pkt.stream_index; } av_packet_unref(&pkt); //unref if (dataSourceConfig_.strUrl.find("rtsp:") == std::string::npos) // 如果不是rtsp,定时发送 { usleep(DELAY_TIME); // delay 40ms } } } else { //从摄像头RTSP拉流 const std::string videoStreamAddress = std::string("rtspsrc location=") + dataSourceConfig_.strUrl.c_str() + " latency=10 ! \ rtph264depay ! h264parse ! nvv4l2decoder enable-max-performance=1 enable-frame-type-reporting=1 ! nvvidconv ! video/x-raw, format=(string)BGRx ! videoconvert ! appsink"; VideoCapture capture_video; while(!capture_video.open(videoStreamAddress)){ //, cv::CAP_FFMPEG LogInfo<<"Restart Opening video stream or file ..."< pBGRFrameData = std::make_shared(); std::shared_ptr pProcessData = std::make_shared(); if(!capture_video.read(frame)) { capture_video.release(); while(!capture_video.open(videoStreamAddress)){ //, cv::CAP_FFMPEG LogInfo<<"Restart Opening video stream or file ..."<iDataSource = engineId_; pBGRFrameData->iFrameId = nFrameid++; pBGRFrameData->iSize = resizepBGRBuffer_Size; pBGRFrameData->frameInfo.iWidth = IMAGE_WIDTH; pBGRFrameData->frameInfo.iHeight = IMAGE_HEIGHT; pBGRFrameData->frameInfo.iRate = frameRate; pProcessData->pVoidData = std::static_pointer_cast(pBGRFrameData); pProcessData->pData.reset(resizeBGRBufferobj, [](void* data){if(data) {delete[] data; data = nullptr;}}); if (nFrameid >= 0xFFFFFFFF) {nFrameid = 0;} pBGRFrameData->i64TimeStamp = MyUtils::getins()->GetCurrentTimeMillis(); pProcessData->iWidth = pBGRFrameData->frameInfo.iWidth; pProcessData->iHeight = pBGRFrameData->frameInfo.iHeight; pProcessData->iHeight = pBGRFrameData->frameInfo.iRate; iRet = outputQueMap_[strPort0_]->push(std::static_pointer_cast(pProcessData)); } } return APP_ERR_OK; } \ No newline at end of file diff --git a/nvidia_ascend_engine/common_engine/DataUploadEngine/ResultToHttpSrvEngine.cpp b/nvidia_ascend_engine/common_engine/DataUploadEngine/ResultToHttpSrvEngine.cpp index f7fe5e0..05b03cb 100644 --- a/nvidia_ascend_engine/common_engine/DataUploadEngine/ResultToHttpSrvEngine.cpp +++ b/nvidia_ascend_engine/common_engine/DataUploadEngine/ResultToHttpSrvEngine.cpp @@ -1,445 +1 @@ -#include "ResultToHttpSrvEngine.h" -#include "myutils.h" - - -ResultToHttpSrvEngine::ResultToHttpSrvEngine() {} -ResultToHttpSrvEngine::~ResultToHttpSrvEngine() {} - -APP_ERROR ResultToHttpSrvEngine::Init() -{ - strPort0_ = engineName_ + "_" + std::to_string(engineId_) + "_0"; - strUsername_ = MyYaml::GetIns()->GetStringValue("username"); - strPassword_ = MyYaml::GetIns()->GetStringValue("password"); - strURL_ = MyYaml::GetIns()->GetStringValue("gc_http_url"); - strGetTokenURL_ = MyYaml::GetIns()->GetStringValue("gc_gettoken_url"); - strImageSrv_ = MyYaml::GetIns()->GetPathValue("gc_image_srv"); - strPoundNo_ = MyYaml::GetIns()->GetStringValue("atlas_poundno"); - strFailSavePath_ = MyYaml::GetIns()->GetPathValue("gc_result_path") + "httpfailcontent.csv"; - strFailSaveBakPath_ = MyYaml::GetIns()->GetPathValue("gc_result_path") + "httpfailcontent_bak.csv"; - LogInfo << "ResultToHttpSrvEngine Init ok"; - return APP_ERR_OK; -} - -APP_ERROR ResultToHttpSrvEngine::DeInit() -{ - LogDebug << "curl_easy_cleanup"; - curl_easy_cleanup(pCurl_); - - /* 这个处理移动到main中, 防止多线程调用 - LogDebug << "内存清理"; - curl_global_cleanup(); - */ - LogInfo << "ResultToHttpSrvEngine DeInit ok"; - return APP_ERR_OK; -} - -/** -* libcurl回调函数 -* inParam : void *pBuffer 回调内容地址 - : size_t size 回调单个数据大小 - : size_t nmemb 回调数据个数 -* outParam: std::string &strResp 返回内容 -* return : 回调数据大小 -*/ -size_t ResultToHttpSrvEngine::WriteCallBack(void *pBuffer, size_t size, size_t nmemb, std::string &strResp) -{ - size_t sizes = size * nmemb; - std::string strTemp((char*)pBuffer, sizes); - strResp += strTemp; - return sizes; -} - -/** -* 调用http接口获取token -* inParam : -* outParam: std::string &strBladeAuth 返回的token信息 -* return : true:成功; false:失败 -*/ -bool ResultToHttpSrvEngine::GetToken(std::string &strBladeAuth) -{ - //1. 获得curl操作符 - if (nullptr == pCurl_) - { - LogDebug<<"pCurl_ is null, invoke curl_easy_init"; - pCurl_ = curl_easy_init(); - if (nullptr == pCurl_) - { - LogError << "curl_easy_init failed !"; - return false; - } - } - - //2. 设置head, 和表单 - //设置head信息 - struct curl_slist *pHeaderList = nullptr; - pHeaderList = curl_slist_append(pHeaderList, "Authorization:Basic Y2xpZW50X2VudGVycHJpc2U6Y2xpZW50X2VudGVycHJpc2Vfc2VjcmV0"); - - //设置表单信息 - curl_mime *pMultipart = curl_mime_init(pCurl_); - curl_mimepart *pPart = curl_mime_addpart(pMultipart); - curl_mime_name(pPart, "username"); - curl_mime_data(pPart, strUsername_.c_str(), CURL_ZERO_TERMINATED); - pPart = curl_mime_addpart(pMultipart); - curl_mime_name(pPart, "password"); - curl_mime_data(pPart, strPassword_.c_str(), CURL_ZERO_TERMINATED); - pPart = curl_mime_addpart(pMultipart); - curl_mime_name(pPart, "tenantId"); - curl_mime_data(pPart, "000000", CURL_ZERO_TERMINATED); - pPart = curl_mime_addpart(pMultipart); - curl_mime_name(pPart, "grant_type"); - curl_mime_data(pPart, "password", CURL_ZERO_TERMINATED); - - curl_easy_setopt(pCurl_, CURLOPT_CONNECTTIMEOUT, 1); //连接超时(1s连接不上服务器返回超时) - curl_easy_setopt(pCurl_, CURLOPT_URL, strGetTokenURL_.c_str()); //设置url - curl_easy_setopt(pCurl_, CURLOPT_HTTPHEADER, pHeaderList); //设置报文头 - curl_easy_setopt(pCurl_, CURLOPT_MIMEPOST, pMultipart); //设置表单 - //curl_easy_setopt(pCurl_, CURLOPT_POSTFIELDS, strBody.c_str()); //设置post内容 - //curl_easy_setopt(pCurl_, CURLOPT_POST, 1); //设置操作为POST(为非0表示post) - curl_easy_setopt(pCurl_, CURLOPT_WRITEFUNCTION, WriteCallBack); //设置回调函数 - std::string strResponse; - curl_easy_setopt(pCurl_, CURLOPT_WRITEDATA, &strResponse); //设置回调参数 - - //3. 执行http请求 - CURLcode res = curl_easy_perform(pCurl_); - curl_slist_free_all(pHeaderList); //清除headerlist - curl_mime_free(pMultipart); //清除curl_mime - curl_easy_reset(pCurl_); //重置curl - if (res != CURLE_OK) - { - LogError << " curl_easy_perform fail:" << curl_easy_strerror(res); - return false; - } - - //4. 执行成功解析响应内容 - Json::CharReaderBuilder readerBuilder; - std::shared_ptr reader(readerBuilder.newCharReader()); - Json::Value jvResponse; - JSONCPP_STRING errs; - if (!reader->parse(strResponse.data(), strResponse.data() + strResponse.size(), &jvResponse, &errs)) - { - LogError << " response content fail " << strResponse; - return false; - } - LogDebug << "GetToken resp:" << strResponse; - - strBladeAuth += jvResponse["token_type"].asString(); - strBladeAuth += " "; - strBladeAuth += jvResponse["access_token"].asString(); - return true; -} - -/** -* 列车信息提交http接口 -* inParam : Json::Value &jvRequest 提交内容 -* outParam: -* return : true:提交成功; false:提交失败 -*/ -bool ResultToHttpSrvEngine::ResultToHttpSrv(Json::Value &jvRequest) -{ - //获取token - std::string strBladeAuth("blade-auth:"); - if (!GetToken(strBladeAuth)) - { - LogError << "comeTime:" << jvRequest["comeTime"].asString() << " carriageOrder:" << jvRequest["carriageOrder"].asInt() << " GetToken fail "; - return false; - } - LogDebug << "strBladeAuth:" << strBladeAuth; - - //1. 获得curl操作符 - if (nullptr == pCurl_) - { - LogDebug<<"pCurl_ is null, invoke curl_easy_init"; - pCurl_ = curl_easy_init(); - if (nullptr == pCurl_) - { - LogError << "curl_easy_init failed !"; - return false; - } - } - - //2. 设置http请求信息 - Json::StreamWriterBuilder jswBuilder; - std::string strRequest = Json::writeString(jswBuilder, jvRequest); - LogDebug << "to http:" << strRequest; - std::string strResponse; - struct curl_slist *pHeaderList = nullptr; - pHeaderList = curl_slist_append(pHeaderList, "Accept:application/json"); - pHeaderList = curl_slist_append(pHeaderList, "Content-Type:application/json"); - pHeaderList = curl_slist_append(pHeaderList, "charset:utf-8"); - pHeaderList = curl_slist_append(pHeaderList, strBladeAuth.c_str()); - curl_easy_setopt(pCurl_, CURLOPT_CONNECTTIMEOUT, 1); //连接超时(1s连接不上服务器返回超时) - curl_easy_setopt(pCurl_, CURLOPT_URL, strURL_.c_str()); //设置url - curl_easy_setopt(pCurl_, CURLOPT_HTTPHEADER, pHeaderList); //设置报文头 - curl_easy_setopt(pCurl_, CURLOPT_POSTFIELDS, strRequest.c_str()); //设置post内容 - curl_easy_setopt(pCurl_, CURLOPT_POST, 1); //设置操作为POST(为非0表示post) - curl_easy_setopt(pCurl_, CURLOPT_WRITEFUNCTION, WriteCallBack); //设置回调函数 - curl_easy_setopt(pCurl_, CURLOPT_WRITEDATA, &strResponse); //设置回调参数 - - //3. 执行http请求 - CURLcode res = curl_easy_perform(pCurl_); - curl_slist_free_all(pHeaderList); //清除headerlist - curl_easy_reset(pCurl_); //重置curl - if (res != CURLE_OK) - { - LogError << "comeTime:" << jvRequest["comeTime"].asString() << " carriageOrder:" << jvRequest["carriageOrder"].asInt() - << " curl_easy_perform fail:" << curl_easy_strerror(res); - return false; - } - - //4. 执行成功解析响应内容 - LogInfo << "http resp:" << strResponse; - Json::CharReaderBuilder readerBuilder; - std::shared_ptr reader(readerBuilder.newCharReader()); - Json::Value jvResponse; - JSONCPP_STRING errs; - if (!reader->parse(strResponse.data(), strResponse.data() + strResponse.size(), &jvResponse, &errs)) - { - LogError << "comeTime:" << jvRequest["comeTime"].asString() << " carriageOrder:" << jvRequest["carriageOrder"].asInt() - << " response content fail " << strResponse; - return false; - } - - if (!jvResponse["success"].asBool()) - { - LogError << "comeTime:" << jvRequest["comeTime"].asString() << " carriageOrder:" << jvRequest["carriageOrder"].asInt() - << " response fail"; - return false; - } - - LogInfo << "comeTime:" << jvRequest["comeTime"].asString() << " carriageOrder:" << jvRequest["carriageOrder"].asInt() - << " post success"; - return true; -} - -/** -* 处理上传失败的信息 -* inParam : N/A -* outParam: N/A -* return : N/A -*/ -void ResultToHttpSrvEngine::DealHttpFailInfo() -{ - //队列有待处理数据,则先不处理异常数据。 - if (inputQueMap_[strPort0_]->getSize() > 0) - { - LogDebug << "have new data to process"; - return; - } - - //文件不存在不处理 - if (access(strFailSavePath_.c_str(), F_OK) == -1) - { - LogDebug << "no exit file:" << strFailSavePath_; - return; - } - - bool bAllSucc = true; - std::ifstream inFile(strFailSavePath_.c_str(), std::ios::in); - if (!inFile.is_open()) - { - LogError << strFailSavePath_ << " open fail"; - return; - } - - int iDealCnt = 0; - std::string strLine; - while (getline(inFile, strLine)) - { - Json::CharReaderBuilder jsrBuilder; - std::shared_ptr reader(jsrBuilder.newCharReader()); - Json::Value jvRequest; - JSONCPP_STRING errs; - if (!reader->parse(strLine.data(), strLine.data() + strLine.size(), &jvRequest, &errs)) - { - LogError << "json parse fail content:" << strLine; - return; - } - - /* - 新数据到达后,异常数据还未开始处理,则直接关闭文件返回。先处理正常数据。 - 新数据到达后,异常数据处理中,则把异常未处理的数据全部当处理失败写入新文件中。先处理正常数据。 - */ - if (inputQueMap_[strPort0_]->getSize() > 0) - { - LogDebug << "Abnormal data processing, have new data to process"; - if (0 == iDealCnt) - { - LogDebug << "Abnormal data processing not start"; - inFile.close(); - return; - } - SaveHttpFailInfo(jvRequest, strFailSaveBakPath_); - bAllSucc = false; - continue; - } - - iDealCnt++; - - if (!ResultToHttpSrv(jvRequest)) - { - LogError << "re http post err:" << strLine; - //SaveHttpFailInfo(jvRequest, strFailSaveBakPath_); -// bAllSucc = false; - continue; - } - } - inFile.close(); - - if(bAllSucc) - { - //都处理成功,文件删除 - remove(strFailSavePath_.c_str()); - } - else - { - //部分处理成功,重命名后再次被处理 - rename(strFailSaveBakPath_.c_str(), strFailSavePath_.c_str()); - } -} - -/** -* 保存http上传失败的信息 -* inParam : Json::Value &jvRequest http失败信息 -* : std::string &strFilePath 保存路径 -* outParam: N/A -* return : true(成功);false(失败) -*/ -bool ResultToHttpSrvEngine::SaveHttpFailInfo(Json::Value &jvRequest, std::string &strFilePath) -{ - std::ofstream outFile; - outFile.open(strFilePath, std::ios::app); - if (!outFile.is_open()) - { - LogError << strFilePath << " open fail"; - return false; - } - - Json::StreamWriterBuilder jswBuilder; - jswBuilder["indentation"] = ""; - std::string strRequest = Json::writeString(jswBuilder, jvRequest); - - outFile << strRequest << std::endl; - outFile.close(); - return true; -} - - -APP_ERROR ResultToHttpSrvEngine::Process() -{ - int iRet = APP_ERR_OK; - if (0 == MyYaml::GetIns()->GetIntValue("gc_http_open")) - { - LogDebug << " gc_http_open value is 0"; - return APP_ERR_OK; - } - - while (!isStop_) - { - std::shared_ptr pVoidData0 = nullptr; - inputQueMap_[strPort0_]->pop(pVoidData0); - if (nullptr == pVoidData0) - { - usleep(1000); //1ms - - //无数据大于1分钟 - iNoDataCnt_++; - if (iNoDataCnt_ > (60 * 1000)) - { - DealHttpFailInfo(); - iNoDataCnt_ = 0; - } - continue; - } - iNoDataCnt_ = 0; - - std::shared_ptr pTrain = std::static_pointer_cast(pVoidData0); - - ai_matrix::DataSourceConfig dataSourceConfig = MyYaml::GetIns()->GetDataSourceConfigById(pTrain->trainNum.iDataSource); //获取摄像机参数 - char szCameraNo[4] = {0}; - sprintf(szCameraNo, "%03d", pTrain->trainNum.iDataSource + 1); - - char szNumImgPath[64] = {0}; //车号最优图片路径 - if (!pTrain->trainNum.strBestImg.empty()) - { - sprintf(szNumImgPath, "/%03d/%s", pTrain->trainNum.iDataSource + 1, pTrain->trainNum.strBestImg.c_str()); - } - - char szProImgPath[64] = {0}; //属性最优图片路径 - if (!pTrain->trainPro.strBestImg.empty()) - { - sprintf(szProImgPath, "/%03d/%s", pTrain->trainPro.iDataSource + 1, pTrain->trainPro.strBestImg.c_str()); - } - - // char szChkDateImgPath[64] = {0}; //定检期最优图片路径 - // if (!pTrain->chkDate.strBestImg.empty()) - // { - // sprintf(szChkDateImgPath, "%03d/%s", pTrain->chkDate.iDataSource + 1, pTrain->chkDate.strBestImg.c_str()); - // } - - // char szContainer1ImgPath[64] = {0}; //集装箱1最优图片路径 - // if (!pTrain->container1.strBestImg.empty()) - // { - // sprintf(szContainer1ImgPath, "%03d/%s", pTrain->container1.iDataSource + 1, pTrain->container1.strBestImg.c_str()); - // } - - // char szContainer2ImgPath[64] = {0}; //集装箱2最优图片路径 - // if (!pTrain->container2.strBestImg.empty()) - // { - // sprintf(szContainer2ImgPath, "%03d/%s", pTrain->container2.iDataSource + 1, pTrain->container2.strBestImg.c_str()); - // } - - std::string strTime = pTrain->strTrainName; - strTime = MyUtils::getins()->replace_all_distinct(strTime, std::string("-"), std::string(":")); - int iCategory = 0; - if (pTrain->trainNum.iTrainTypeId == 3) - { - iCategory = 0; - } - else if(pTrain->trainNum.iTrainTypeId == 2) - { - iCategory = 1; - } - else if (pTrain->trainNum.iTrainTypeId == 6) - { - iCategory = 2; - } - else if (pTrain->trainNum.iTrainTypeId == 0) - { - iCategory = 3; - } - - //组装post信息 - Json::Value jvRequest; - Json::Value jvSubObj; - jvSubObj["poundNo"] = strPoundNo_; // 股道号 - jvRequest["trainParams"] = jvSubObj; - jvRequest["carriageType"] = pTrain->trainNum.strTrainType; // 车型 - jvRequest["carriageNumber"] = pTrain->trainNum.strTrainNum; // 车厢号 - jvRequest["carriageOrder"] = pTrain->iCarXH; // 车节号 - jvRequest["cameraNumber"] = szCameraNo; // 摄像头编号 - jvRequest["carriageTareweight"] = pTrain->trainPro.strSelf; // 皮重 - jvRequest["carriageLoad"] = pTrain->trainPro.strLoad; // 载重 - jvRequest["carriageVolume"] = pTrain->trainPro.strVolume; // 容积 - jvRequest["carriageChangelength"] = pTrain->trainPro.strChange; // 换长 - jvRequest["proImageName"] = strImageSrv_ + pTrain->strTrainDate + "/" + pTrain->strTrainName + szProImgPath; // 属性图片 - jvRequest["numImageName"] = strImageSrv_ + pTrain->strTrainDate + "/" + pTrain->strTrainName + szNumImgPath; // 车号图片 - jvRequest["comeTime"] = pTrain->strTrainDate + " " + strTime; // 来车时间 - jvRequest["carriageCategory"] = iCategory; // 车厢类别:0敞车,1:漏洞矿车,2:平车,3:车头 - jvRequest["isTheLast"] = pTrain->bIsEnd ? 1 : 0; // 是否最后一节: 0:否,1:是 - jvRequest["startFrame"] = pTrain->iStartFrameId; //车厢开始帧 - jvRequest["endFrame"] = pTrain->iEndFrameId; //车厢结束帧 - jvRequest["skipFrame"] = dataSourceConfig.iSkipInterval; - jvRequest["collectTime"] = MyUtils::getins()->Stamp2Time(pTrain->i64EndTimeStamp, true);//车厢切分的时间 //跳帧 - if (!ResultToHttpSrv(jvRequest)) - { -// SaveHttpFailInfo(jvRequest, strFailSavePath_); - } - - //列车结束后再次处理失败的信息 - if (pTrain->bIsEnd) - { - DealHttpFailInfo(); - } - } - return APP_ERR_OK; -} +#include "ResultToHttpSrvEngine.h" #include "myutils.h" ResultToHttpSrvEngine::ResultToHttpSrvEngine() {} ResultToHttpSrvEngine::~ResultToHttpSrvEngine() {} APP_ERROR ResultToHttpSrvEngine::Init() { strPort0_ = engineName_ + "_" + std::to_string(engineId_) + "_0"; strUsername_ = MyYaml::GetIns()->GetStringValue("username"); strPassword_ = MyYaml::GetIns()->GetStringValue("password"); strURL_ = MyYaml::GetIns()->GetStringValue("gc_http_url"); strGetTokenURL_ = MyYaml::GetIns()->GetStringValue("gc_gettoken_url"); strImageSrv_ = MyYaml::GetIns()->GetPathValue("gc_image_srv"); strPoundNo_ = MyYaml::GetIns()->GetStringValue("atlas_poundno"); strFailSavePath_ = MyYaml::GetIns()->GetPathValue("gc_result_path") + "httpfailcontent.csv"; strFailSaveBakPath_ = MyYaml::GetIns()->GetPathValue("gc_result_path") + "httpfailcontent_bak.csv"; LogInfo << "ResultToHttpSrvEngine Init ok"; return APP_ERR_OK; } APP_ERROR ResultToHttpSrvEngine::DeInit() { LogDebug << "curl_easy_cleanup"; curl_easy_cleanup(pCurl_); /* 这个处理移动到main中, 防止多线程调用 LogDebug << "内存清理"; curl_global_cleanup(); */ LogInfo << "ResultToHttpSrvEngine DeInit ok"; return APP_ERR_OK; } /** * libcurl回调函数 * inParam : void *pBuffer 回调内容地址 : size_t size 回调单个数据大小 : size_t nmemb 回调数据个数 * outParam: std::string &strResp 返回内容 * return : 回调数据大小 */ size_t ResultToHttpSrvEngine::WriteCallBack(void *pBuffer, size_t size, size_t nmemb, std::string &strResp) { size_t sizes = size * nmemb; std::string strTemp((char*)pBuffer, sizes); strResp += strTemp; return sizes; } /** * 调用http接口获取token * inParam : * outParam: std::string &strBladeAuth 返回的token信息 * return : true:成功; false:失败 */ bool ResultToHttpSrvEngine::GetToken(std::string &strBladeAuth) { //1. 获得curl操作符 if (nullptr == pCurl_) { LogDebug<<"pCurl_ is null, invoke curl_easy_init"; pCurl_ = curl_easy_init(); if (nullptr == pCurl_) { LogError << "curl_easy_init failed !"; return false; } } //2. 设置head, 和表单 //设置head信息 struct curl_slist *pHeaderList = nullptr; pHeaderList = curl_slist_append(pHeaderList, "Authorization:Basic Y2xpZW50X2VudGVycHJpc2U6Y2xpZW50X2VudGVycHJpc2Vfc2VjcmV0"); //设置表单信息 curl_mime *pMultipart = curl_mime_init(pCurl_); curl_mimepart *pPart = curl_mime_addpart(pMultipart); curl_mime_name(pPart, "username"); curl_mime_data(pPart, strUsername_.c_str(), CURL_ZERO_TERMINATED); pPart = curl_mime_addpart(pMultipart); curl_mime_name(pPart, "password"); curl_mime_data(pPart, strPassword_.c_str(), CURL_ZERO_TERMINATED); pPart = curl_mime_addpart(pMultipart); curl_mime_name(pPart, "tenantId"); curl_mime_data(pPart, "000000", CURL_ZERO_TERMINATED); pPart = curl_mime_addpart(pMultipart); curl_mime_name(pPart, "grant_type"); curl_mime_data(pPart, "password", CURL_ZERO_TERMINATED); curl_easy_setopt(pCurl_, CURLOPT_CONNECTTIMEOUT, 1); //连接超时(1s连接不上服务器返回超时) curl_easy_setopt(pCurl_, CURLOPT_URL, strGetTokenURL_.c_str()); //设置url curl_easy_setopt(pCurl_, CURLOPT_HTTPHEADER, pHeaderList); //设置报文头 curl_easy_setopt(pCurl_, CURLOPT_MIMEPOST, pMultipart); //设置表单 //curl_easy_setopt(pCurl_, CURLOPT_POSTFIELDS, strBody.c_str()); //设置post内容 //curl_easy_setopt(pCurl_, CURLOPT_POST, 1); //设置操作为POST(为非0表示post) curl_easy_setopt(pCurl_, CURLOPT_WRITEFUNCTION, WriteCallBack); //设置回调函数 std::string strResponse; curl_easy_setopt(pCurl_, CURLOPT_WRITEDATA, &strResponse); //设置回调参数 //3. 执行http请求 CURLcode res = curl_easy_perform(pCurl_); curl_slist_free_all(pHeaderList); //清除headerlist curl_mime_free(pMultipart); //清除curl_mime curl_easy_reset(pCurl_); //重置curl if (res != CURLE_OK) { LogError << " curl_easy_perform fail:" << curl_easy_strerror(res); return false; } //4. 执行成功解析响应内容 Json::CharReaderBuilder readerBuilder; std::shared_ptr reader(readerBuilder.newCharReader()); Json::Value jvResponse; JSONCPP_STRING errs; if (!reader->parse(strResponse.data(), strResponse.data() + strResponse.size(), &jvResponse, &errs)) { LogError << " response content fail " << strResponse; return false; } LogDebug << "GetToken resp:" << strResponse; strBladeAuth += jvResponse["token_type"].asString(); strBladeAuth += " "; strBladeAuth += jvResponse["access_token"].asString(); return true; } /** * 列车信息提交http接口 * inParam : Json::Value &jvRequest 提交内容 * outParam: * return : true:提交成功; false:提交失败 */ bool ResultToHttpSrvEngine::ResultToHttpSrv(Json::Value &jvRequest) { //获取token std::string strBladeAuth("blade-auth:"); if (!GetToken(strBladeAuth)) { LogError << "comeTime:" << jvRequest["comeTime"].asString() << " carriageOrder:" << jvRequest["carriageOrder"].asInt() << " GetToken fail "; return false; } LogDebug << "strBladeAuth:" << strBladeAuth; //1. 获得curl操作符 if (nullptr == pCurl_) { LogDebug<<"pCurl_ is null, invoke curl_easy_init"; pCurl_ = curl_easy_init(); if (nullptr == pCurl_) { LogError << "curl_easy_init failed !"; return false; } } //2. 设置http请求信息 Json::StreamWriterBuilder jswBuilder; std::string strRequest = Json::writeString(jswBuilder, jvRequest); LogDebug << "to http:" << strRequest; std::string strResponse; struct curl_slist *pHeaderList = nullptr; pHeaderList = curl_slist_append(pHeaderList, "Accept:application/json"); pHeaderList = curl_slist_append(pHeaderList, "Content-Type:application/json"); pHeaderList = curl_slist_append(pHeaderList, "charset:utf-8"); pHeaderList = curl_slist_append(pHeaderList, strBladeAuth.c_str()); curl_easy_setopt(pCurl_, CURLOPT_CONNECTTIMEOUT, 1); //连接超时(1s连接不上服务器返回超时) curl_easy_setopt(pCurl_, CURLOPT_URL, strURL_.c_str()); //设置url curl_easy_setopt(pCurl_, CURLOPT_HTTPHEADER, pHeaderList); //设置报文头 curl_easy_setopt(pCurl_, CURLOPT_POSTFIELDS, strRequest.c_str()); //设置post内容 curl_easy_setopt(pCurl_, CURLOPT_POST, 1); //设置操作为POST(为非0表示post) curl_easy_setopt(pCurl_, CURLOPT_WRITEFUNCTION, WriteCallBack); //设置回调函数 curl_easy_setopt(pCurl_, CURLOPT_WRITEDATA, &strResponse); //设置回调参数 //3. 执行http请求 CURLcode res = curl_easy_perform(pCurl_); curl_slist_free_all(pHeaderList); //清除headerlist curl_easy_reset(pCurl_); //重置curl if (res != CURLE_OK) { LogError << "comeTime:" << jvRequest["comeTime"].asString() << " carriageOrder:" << jvRequest["carriageOrder"].asInt() << " curl_easy_perform fail:" << curl_easy_strerror(res); return false; } //4. 执行成功解析响应内容 LogInfo << "http resp:" << strResponse; Json::CharReaderBuilder readerBuilder; std::shared_ptr reader(readerBuilder.newCharReader()); Json::Value jvResponse; JSONCPP_STRING errs; if (!reader->parse(strResponse.data(), strResponse.data() + strResponse.size(), &jvResponse, &errs)) { LogError << "comeTime:" << jvRequest["comeTime"].asString() << " carriageOrder:" << jvRequest["carriageOrder"].asInt() << " response content fail " << strResponse; return false; } if (!jvResponse["success"].asBool()) { LogError << "comeTime:" << jvRequest["comeTime"].asString() << " carriageOrder:" << jvRequest["carriageOrder"].asInt() << " response fail"; return false; } LogInfo << "comeTime:" << jvRequest["comeTime"].asString() << " carriageOrder:" << jvRequest["carriageOrder"].asInt() << " post success"; return true; } /** * 处理上传失败的信息 * inParam : N/A * outParam: N/A * return : N/A */ void ResultToHttpSrvEngine::DealHttpFailInfo() { //队列有待处理数据,则先不处理异常数据。 if (inputQueMap_[strPort0_]->getSize() > 0) { LogDebug << "have new data to process"; return; } //文件不存在不处理 if (access(strFailSavePath_.c_str(), F_OK) == -1) { LogDebug << "no exit file:" << strFailSavePath_; return; } bool bAllSucc = true; std::ifstream inFile(strFailSavePath_.c_str(), std::ios::in); if (!inFile.is_open()) { LogError << strFailSavePath_ << " open fail"; return; } int iDealCnt = 0; std::string strLine; while (getline(inFile, strLine)) { Json::CharReaderBuilder jsrBuilder; std::shared_ptr reader(jsrBuilder.newCharReader()); Json::Value jvRequest; JSONCPP_STRING errs; if (!reader->parse(strLine.data(), strLine.data() + strLine.size(), &jvRequest, &errs)) { LogError << "json parse fail content:" << strLine; return; } /* 新数据到达后,异常数据还未开始处理,则直接关闭文件返回。先处理正常数据。 新数据到达后,异常数据处理中,则把异常未处理的数据全部当处理失败写入新文件中。先处理正常数据。 */ if (inputQueMap_[strPort0_]->getSize() > 0) { LogDebug << "Abnormal data processing, have new data to process"; if (0 == iDealCnt) { LogDebug << "Abnormal data processing not start"; inFile.close(); return; } SaveHttpFailInfo(jvRequest, strFailSaveBakPath_); bAllSucc = false; continue; } iDealCnt++; if (!ResultToHttpSrv(jvRequest)) { LogError << "re http post err:" << strLine; //SaveHttpFailInfo(jvRequest, strFailSaveBakPath_); // bAllSucc = false; continue; } } inFile.close(); if(bAllSucc) { //都处理成功,文件删除 remove(strFailSavePath_.c_str()); } else { //部分处理成功,重命名后再次被处理 rename(strFailSaveBakPath_.c_str(), strFailSavePath_.c_str()); } } /** * 保存http上传失败的信息 * inParam : Json::Value &jvRequest http失败信息 * : std::string &strFilePath 保存路径 * outParam: N/A * return : true(成功);false(失败) */ bool ResultToHttpSrvEngine::SaveHttpFailInfo(Json::Value &jvRequest, std::string &strFilePath) { std::ofstream outFile; outFile.open(strFilePath, std::ios::app); if (!outFile.is_open()) { LogError << strFilePath << " open fail"; return false; } Json::StreamWriterBuilder jswBuilder; jswBuilder["indentation"] = ""; std::string strRequest = Json::writeString(jswBuilder, jvRequest); outFile << strRequest << std::endl; outFile.close(); return true; } APP_ERROR ResultToHttpSrvEngine::Process() { int iRet = APP_ERR_OK; if (0 == MyYaml::GetIns()->GetIntValue("gc_http_open")) { LogDebug << " gc_http_open value is 0"; return APP_ERR_OK; } while (!isStop_) { std::shared_ptr pVoidData0 = nullptr; inputQueMap_[strPort0_]->pop(pVoidData0); if (nullptr == pVoidData0) { usleep(1000); //1ms //无数据大于1分钟 iNoDataCnt_++; if (iNoDataCnt_ > (60 * 1000)) { DealHttpFailInfo(); iNoDataCnt_ = 0; } continue; } iNoDataCnt_ = 0; std::shared_ptr pTrain = std::static_pointer_cast(pVoidData0); ai_matrix::DataSourceConfig dataSourceConfig = MyYaml::GetIns()->GetDataSourceConfigById(pTrain->trainNum.iDataSource); //获取摄像机参数 char szCameraNo[4] = {0}; sprintf(szCameraNo, "%03d", pTrain->trainNum.iDataSource + 1); char szNumImgPath[64] = {0}; //车号最优图片路径 if (!pTrain->trainNum.strBestImg.empty()) { sprintf(szNumImgPath, "/%03d/%s", pTrain->trainNum.iDataSource + 1, pTrain->trainNum.strBestImg.c_str()); } char szProImgPath[64] = {0}; //属性最优图片路径 if (!pTrain->trainPro.strBestImg.empty()) { sprintf(szProImgPath, "/%03d/%s", pTrain->trainPro.iDataSource + 1, pTrain->trainPro.strBestImg.c_str()); } // char szChkDateImgPath[64] = {0}; //定检期最优图片路径 // if (!pTrain->chkDate.strBestImg.empty()) // { // sprintf(szChkDateImgPath, "%03d/%s", pTrain->chkDate.iDataSource + 1, pTrain->chkDate.strBestImg.c_str()); // } // char szContainer1ImgPath[64] = {0}; //集装箱1最优图片路径 // if (!pTrain->container1.strBestImg.empty()) // { // sprintf(szContainer1ImgPath, "%03d/%s", pTrain->container1.iDataSource + 1, pTrain->container1.strBestImg.c_str()); // } // char szContainer2ImgPath[64] = {0}; //集装箱2最优图片路径 // if (!pTrain->container2.strBestImg.empty()) // { // sprintf(szContainer2ImgPath, "%03d/%s", pTrain->container2.iDataSource + 1, pTrain->container2.strBestImg.c_str()); // } std::string strTime = pTrain->strTrainName; strTime = MyUtils::getins()->replace_all_distinct(strTime, std::string("-"), std::string(":")); int iCategory = 0; if (pTrain->trainNum.iTrainTypeId == 3) { iCategory = 0; } else if(pTrain->trainNum.iTrainTypeId == 2) { iCategory = 1; } else if (pTrain->trainNum.iTrainTypeId == 6) { iCategory = 2; } else if (pTrain->trainNum.iTrainTypeId == 0) { iCategory = 3; } //组装post信息 Json::Value jvRequest; Json::Value jvSubObj; jvSubObj["poundNo"] = strPoundNo_; // 股道号 jvRequest["trainParams"] = jvSubObj; jvRequest["carriageType"] = pTrain->trainNum.strTrainType; // 车型 jvRequest["carriageNumber"] = pTrain->trainNum.strTrainNum; // 车厢号 jvRequest["carriageOrder"] = pTrain->iCarXH; // 车节号 jvRequest["cameraNumber"] = szCameraNo; // 摄像头编号 jvRequest["carriageTareweight"] = pTrain->trainPro.strSelf; // 皮重 jvRequest["carriageLoad"] = pTrain->trainPro.strLoad; // 载重 jvRequest["carriageVolume"] = pTrain->trainPro.strVolume; // 容积 jvRequest["carriageChangelength"] = pTrain->trainPro.strChange; // 换长 jvRequest["proImageName"] = strImageSrv_ + pTrain->strTrainDate + "/" + pTrain->strTrainName + szProImgPath; // 属性图片 jvRequest["numImageName"] = strImageSrv_ + pTrain->strTrainDate + "/" + pTrain->strTrainName + szNumImgPath; // 车号图片 jvRequest["comeTime"] = pTrain->strTrainDate + " " + strTime; // 来车时间 jvRequest["carriageCategory"] = iCategory; // 车厢类别:0敞车,1:漏洞矿车,2:平车,3:车头 jvRequest["isTheLast"] = pTrain->bIsEnd ? 1 : 0; // 是否最后一节: 0:否,1:是 jvRequest["startFrame"] = pTrain->iStartFrameId; //车厢开始帧 jvRequest["endFrame"] = pTrain->iEndFrameId; //车厢结束帧 jvRequest["skipFrame"] = dataSourceConfig.iSkipInterval; jvRequest["collectTime"] = MyUtils::getins()->Stamp2Time(pTrain->i64EndTimeStamp, true);//车厢切分的时间 //跳帧 if (!ResultToHttpSrv(jvRequest)) { // SaveHttpFailInfo(jvRequest, strFailSavePath_); } //列车结束后再次处理失败的信息 if (pTrain->bIsEnd) { DealHttpFailInfo(); } } return APP_ERR_OK; } \ No newline at end of file diff --git a/nvidia_ascend_engine/common_engine/DeleteExpiredFolderEngine/DeleteExpiredFolderEngine.cpp b/nvidia_ascend_engine/common_engine/DeleteExpiredFolderEngine/DeleteExpiredFolderEngine.cpp index c87ea3c..88c4083 100644 --- a/nvidia_ascend_engine/common_engine/DeleteExpiredFolderEngine/DeleteExpiredFolderEngine.cpp +++ b/nvidia_ascend_engine/common_engine/DeleteExpiredFolderEngine/DeleteExpiredFolderEngine.cpp @@ -1,187 +1 @@ -#include "DeleteExpiredFolderEngine.h" - -using namespace ai_matrix; - -DeleteExpiredFolderEngine::DeleteExpiredFolderEngine() {} - -DeleteExpiredFolderEngine::~DeleteExpiredFolderEngine() {} - -APP_ERROR DeleteExpiredFolderEngine::Init() -{ - iDaysNumber_ = MyYaml::GetIns()->GetIntValue("gc_days_for_result_expire_folder"); - strResultPath_ = MyYaml::GetIns()->GetPathValue("gc_result_path"); - - LogInfo << "DeleteExpiredFolderEngine Init ok"; - return APP_ERR_OK; -} - -APP_ERROR DeleteExpiredFolderEngine::DeInit() -{ - LogInfo << "DeleteExpiredFolderEngine DeInit ok"; - return APP_ERR_OK; -} - -APP_ERROR DeleteExpiredFolderEngine::Process() -{ - int iRet = APP_ERR_OK; - while (!isStop_) - { - std::string strTrainDate_temp = MyUtils::getins()->GetDate(); - - DeletePreviousFolder(strResultPath_, strTrainDate_temp, iDaysNumber_); - - usleep(1000*1000*3600*24); //每二十四小时执行一次 - } - - return APP_ERR_OK; -} - -void DeleteExpiredFolderEngine::DeletePreviousFolder(std::string path, const std::string &date, int n_days) -{ - // 1 computer date - std::string previous_date = getDateBeforeNDays(date, n_days); - if (!previous_date.empty()) - std::cout << "Date before " << n_days << " days from " << date << " is: " << previous_date << std::endl; - - - // 2 - std::vector subfolders; - GetSubfolderNames(path, subfolders); - // for (const auto &it : subfolders) - // std::cout << it.year << "." << it.month << "." << it.day << std::endl; - - // 3 delete - if (path.back() != '/') - path += "/"; - Date reference_date = StrToDate(previous_date); // 给定的参考日期 - DeleteEarlierDatesFolder(path, subfolders, reference_date); -} - -// 获取某月有多少天 -int DeleteExpiredFolderEngine::DaysInMonth(int year, int month) -{ - int max_days[13] = {0, 31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31}; - if (month == 2 && ((year % 4 == 0 && year % 100 != 0) || year % 400 == 0)) - { - max_days[2] = 29; // 闰年2月有29天 - } - return max_days[month]; -} - -// 解析字符串为日期结构体 -Date DeleteExpiredFolderEngine::StrToDate(const std::string &date_str) -{ - std::istringstream iss(date_str); - int year, month, day; - char dash; - - if (!(iss >> year >> dash && dash == '-' && - iss >> month >> dash && dash == '-' && - iss >> day)) - { - LogError << ("Invalid date format") << ":" << date_str; - } - return {year, month, day}; -} - -// 减去指定天数 -void DeleteExpiredFolderEngine::SubtractDays(Date &date, int n_days) -{ - while (n_days > 0) - { - date.day--; - n_days--; - if (date.day == 0) - { - if (--date.month == 0) - { - --date.year; - date.month = 12; - } - int max_days = DaysInMonth(date.year, date.month); - date.day = max_days; - } - } -} - -// 格式化日期结构体为字符串 -std::string DeleteExpiredFolderEngine::DateToStr(const Date &date) -{ - std::ostringstream oss; - oss << date.year << "-" << std::setfill('0') << std::setw(2) << date.month << "-" << std::setw(2) << date.day; - return oss.str(); -} - -// 主要功能函数,接收一个日期字符串和一个整数n,返回n天前的日期字符串 -std::string DeleteExpiredFolderEngine::getDateBeforeNDays(const std::string &input_date, int n_days) -{ - try - { - Date date = StrToDate(input_date); - SubtractDays(date, n_days); - return DateToStr(date); - } - catch (const std::exception &e) - { - LogError << "Error: " << e.what(); - return ""; - } -} - -void DeleteExpiredFolderEngine::GetSubfolderNames(std::string &directory, std::vector &folder_names) -{ - if (directory.back() != '/') - directory += "/"; - DIR *dir; - struct dirent *ent; - if ((dir = opendir(directory.c_str())) != nullptr) - { - while ((ent = readdir(dir)) != nullptr) - { - // 排除"."和".." - if (ent->d_type == DT_DIR && ent->d_name[0] != '.' && ent->d_name == "best") - { - folder_names.push_back(StrToDate(ent->d_name)); - } - } - closedir(dir); - } - else - { - LogError << "Unable to open directory: " << directory; - } -} - -void DeleteExpiredFolderEngine::DeleteFolder(const std::string directory) -{ - std::string command = "rm -rf " + directory; - int result = system(command.c_str()); - - if (result != 0) - std::cout << "Failed to remove directory recursively: " << directory << std::endl; - else - std::cout << "delete folder successfully : " << directory << std::endl; -} - -// 删除向量中小于指定日期的所有元素 -void DeleteExpiredFolderEngine::DeleteEarlierDatesFolder(std::string &path, std::vector &subfolders, const Date &reference_date) -{ - if (path.back() != '/') - path += "/"; - for (const Date &cur : subfolders) - { - // bool flag = false; - if (cur.year < reference_date.year) - { - DeleteFolder(path + DateToStr(cur)); - } - else if (cur.year == reference_date.year && cur.month < reference_date.month) - { - DeleteFolder(path + DateToStr(cur)); - } - else if (cur.year == reference_date.year && cur.month == reference_date.month && cur.day < reference_date.day) - { - DeleteFolder(path + DateToStr(cur)); - } - } -} \ No newline at end of file +#include "DeleteExpiredFolderEngine.h" using namespace ai_matrix; DeleteExpiredFolderEngine::DeleteExpiredFolderEngine() {} DeleteExpiredFolderEngine::~DeleteExpiredFolderEngine() {} APP_ERROR DeleteExpiredFolderEngine::Init() { iDaysNumber_ = MyYaml::GetIns()->GetIntValue("gc_days_for_result_expire_folder"); strResultPath_ = MyYaml::GetIns()->GetPathValue("gc_result_path"); LogInfo << "DeleteExpiredFolderEngine Init ok"; return APP_ERR_OK; } APP_ERROR DeleteExpiredFolderEngine::DeInit() { LogInfo << "DeleteExpiredFolderEngine DeInit ok"; return APP_ERR_OK; } APP_ERROR DeleteExpiredFolderEngine::Process() { int iRet = APP_ERR_OK; while (!isStop_) { std::string strTrainDate_temp = MyUtils::getins()->GetDate(); DeletePreviousFolder(strResultPath_, strTrainDate_temp, iDaysNumber_); usleep(1000*1000*3600*24); //每二十四小时执行一次 } return APP_ERR_OK; } void DeleteExpiredFolderEngine::DeletePreviousFolder(std::string path, const std::string &date, int n_days) { // 1 computer date std::string previous_date = getDateBeforeNDays(date, n_days); if (!previous_date.empty()) std::cout << "Date before " << n_days << " days from " << date << " is: " << previous_date << std::endl; // 2 std::vector subfolders; GetSubfolderNames(path, subfolders); // for (const auto &it : subfolders) // std::cout << it.year << "." << it.month << "." << it.day << std::endl; // 3 delete if (path.back() != '/') path += "/"; Date reference_date = StrToDate(previous_date); // 给定的参考日期 DeleteEarlierDatesFolder(path, subfolders, reference_date); } // 获取某月有多少天 int DeleteExpiredFolderEngine::DaysInMonth(int year, int month) { int max_days[13] = {0, 31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31}; if (month == 2 && ((year % 4 == 0 && year % 100 != 0) || year % 400 == 0)) { max_days[2] = 29; // 闰年2月有29天 } return max_days[month]; } // 解析字符串为日期结构体 Date DeleteExpiredFolderEngine::StrToDate(const std::string &date_str) { std::istringstream iss(date_str); int year, month, day; char dash; if (!(iss >> year >> dash && dash == '-' && iss >> month >> dash && dash == '-' && iss >> day)) { LogError << ("Invalid date format") << ":" << date_str; } return {year, month, day}; } // 减去指定天数 void DeleteExpiredFolderEngine::SubtractDays(Date &date, int n_days) { while (n_days > 0) { date.day--; n_days--; if (date.day == 0) { if (--date.month == 0) { --date.year; date.month = 12; } int max_days = DaysInMonth(date.year, date.month); date.day = max_days; } } } // 格式化日期结构体为字符串 std::string DeleteExpiredFolderEngine::DateToStr(const Date &date) { std::ostringstream oss; oss << date.year << "-" << std::setfill('0') << std::setw(2) << date.month << "-" << std::setw(2) << date.day; return oss.str(); } // 主要功能函数,接收一个日期字符串和一个整数n,返回n天前的日期字符串 std::string DeleteExpiredFolderEngine::getDateBeforeNDays(const std::string &input_date, int n_days) { try { Date date = StrToDate(input_date); SubtractDays(date, n_days); return DateToStr(date); } catch (const std::exception &e) { LogError << "Error: " << e.what(); return ""; } } void DeleteExpiredFolderEngine::GetSubfolderNames(std::string &directory, std::vector &folder_names) { if (directory.back() != '/') directory += "/"; DIR *dir; struct dirent *ent; if ((dir = opendir(directory.c_str())) != nullptr) { while ((ent = readdir(dir)) != nullptr) { // 排除"."和".." if (ent->d_type == DT_DIR && ent->d_name[0] != '.' && ent->d_name == "best") { folder_names.push_back(StrToDate(ent->d_name)); } } closedir(dir); } else { LogError << "Unable to open directory: " << directory; } } void DeleteExpiredFolderEngine::DeleteFolder(const std::string directory) { std::string command = "rm -rf " + directory; int result = system(command.c_str()); if (result != 0) std::cout << "Failed to remove directory recursively: " << directory << std::endl; else std::cout << "delete folder successfully : " << directory << std::endl; } // 删除向量中小于指定日期的所有元素 void DeleteExpiredFolderEngine::DeleteEarlierDatesFolder(std::string &path, std::vector &subfolders, const Date &reference_date) { if (path.back() != '/') path += "/"; for (const Date &cur : subfolders) { // bool flag = false; if (cur.year < reference_date.year) { DeleteFolder(path + DateToStr(cur)); } else if (cur.year == reference_date.year && cur.month < reference_date.month) { DeleteFolder(path + DateToStr(cur)); } else if (cur.year == reference_date.year && cur.month == reference_date.month && cur.day < reference_date.day) { DeleteFolder(path + DateToStr(cur)); } } } \ No newline at end of file diff --git a/nvidia_ascend_engine/common_engine/FilterEngine/FilterTrainStepOneEngine.cpp b/nvidia_ascend_engine/common_engine/FilterEngine/FilterTrainStepOneEngine.cpp index b2da381..a3fe3a0 100644 --- a/nvidia_ascend_engine/common_engine/FilterEngine/FilterTrainStepOneEngine.cpp +++ b/nvidia_ascend_engine/common_engine/FilterEngine/FilterTrainStepOneEngine.cpp @@ -1,1056 +1 @@ -#include "FilterTrainStepOneEngine.h" -#include "myutils.h" - -using namespace ai_matrix; - -namespace -{ - //按照x坐标排列 - bool CompareX(const PostSubData &a, const PostSubData &b) - { - return a.step1Location.fLTX < b.step1Location.fLTX; - } -} - -extern std::atomic_uint64_t g_i64ReRunTimeStamp; -extern std::atomic_uint32_t g_iReRunOrigFrameId; -extern std::atomic_uint32_t g_iReRunFrameId; -extern std::atomic_uint32_t g_iReRunOrigChkDateFid; -extern std::atomic_uint32_t g_iReRunOrigContainerFid; - -FilterTrainStepOneEngine::FilterTrainStepOneEngine() {} - -FilterTrainStepOneEngine::~FilterTrainStepOneEngine() {} - -APP_ERROR FilterTrainStepOneEngine::Init() -{ - strPort0_ = engineName_ + "_" + std::to_string(engineId_) + "_0"; - strResultPath_ = MyYaml::GetIns()->GetPathValue("gc_result_path"); - iChkStopPX_ = MyYaml::GetIns()->GetIntValue("gc_chkstop_px"); - iChkStopCount_ = MyYaml::GetIns()->GetIntValue("gc_chkstop_count"); - iPartitionFrameNum_ = MyYaml::GetIns()->GetIntValue("partition_frame_span"); - iPlitFrameSpanPX_ = MyYaml::GetIns()->GetIntValue("gc_split_frame_span_px"); - iPushDirection_ = MyYaml::GetIns()->GetIntValue("gc_push_direction"); - - //获取主摄像头信息 - mainCfg_ = MyYaml::GetIns()->GetDataSourceConfigById(0); - - std::map mapUseDataSouceCfg = MyYaml::GetIns()->GetUseDataSourceConfig(); - for (auto iter = mapUseDataSouceCfg.begin(); iter != mapUseDataSouceCfg.end(); iter++) - { - this->rightFirst_ = iter->second.iRightFirst; - this->leftFirst_ = iter->second.iLeftFirst; - if (iter->second.strTarget.find("NUM") != std::string::npos) - { - LogDebug << "DataSource:" << iter->first << " deal NUM"; - mapHeadContinueCnt_[iter->first] = 0; - mapProContinueCnt_[iter->first] = 0; - mapNumContinueCnt_[iter->first] = 0; - mapSpaceContinueCnt_[iter->first] = 0; - mapTrainSpaceContinueCnt_[iter->first] = 0; - mapDataSourceIsEnd_[iter->first] = false; - - PostData postDataTemp; - mapPostDataFrist_[iter->first] = postDataTemp; - std::map> mapStep1InfoTemp; //[key-目标框id, 目标框坐标集合] - mapMapStep1Info_[iter->first] = mapStep1InfoTemp; - } - } - - mapTargetStr_.insert(std::make_pair(NUM, "NUM")); - mapTargetStr_.insert(std::make_pair(PRO, "PRO")); - mapTargetStr_.insert(std::make_pair(HEAD, "HEAD")); - mapTargetStr_.insert(std::make_pair(SPACE, "SPACE"));//SPACE - mapTargetStr_.insert(std::make_pair(TRAINSPACE, "SPACE"));//SPACE - - InitParam(); - LogInfo << "FilterTrainStepOneEngine Init ok"; - return APP_ERR_OK; -} - -APP_ERROR FilterTrainStepOneEngine::DeInit() -{ - LogInfo << "FilterTrainStepOneEngine DeInit ok"; - return APP_ERR_OK; -} - - -/** -* 参数初始化(列车结束时需调用) -* inParam : N/A -* outParam: N/A -* return : N/A -*/ -void FilterTrainStepOneEngine::InitParam() -{ - mapProcessDataPre_.clear(); - for (auto iter = mapHeadContinueCnt_.begin(); iter != mapHeadContinueCnt_.end(); iter++) - { - iter->second = 0; - } - for (auto iter = mapProContinueCnt_.begin(); iter != mapProContinueCnt_.end(); iter++) - { - iter->second = 0; - } - for (auto iter = mapNumContinueCnt_.begin(); iter != mapNumContinueCnt_.end(); iter++) - { - iter->second = 0; - } - for (auto iter = mapSpaceContinueCnt_.begin(); iter != mapSpaceContinueCnt_.end(); iter++) - { - iter->second = 0; - } - for (auto iter = mapTrainSpaceContinueCnt_.begin(); iter != mapTrainSpaceContinueCnt_.end(); iter++) - { - iter->second = 0; - } - for (auto iter = mapDataSourceIsEnd_.begin(); iter != mapDataSourceIsEnd_.end(); iter++) - { - iter->second = false; - } - for (auto iter = mapPostDataFrist_.begin(); iter != mapPostDataFrist_.end(); iter++) - { - iter->second.vecPostSubData.clear(); - } - for (auto iter = mapMapStep1Info_.begin(); iter != mapMapStep1Info_.end(); iter++) - { - iter->second.clear(); - } - iDirection_ = DIRECTION_UNKNOWN; - iNotChgCount_ = 0; - - while (!stackBackInfo_.empty()) - { - stackBackInfo_.pop(); - } - while (!quePostData_.empty()) - { - quePostData_.pop(); - } - iTrainStatus_ = TRAINSTATUS_RUN; - - mapCalDirection_.clear(); -} - -/** -* push数据到队列,队列满时则休眠一段时间再push -* inParam : const std::string strPort push的端口 - : const std::shared_ptr &pProcessData push的数据 -* outParam: N/A -* return : N/A -*/ -void FilterTrainStepOneEngine::PushData(const std::string &strPort, const std::shared_ptr &pProcessData) -{ - while (true) - { - int iRet = outputQueMap_[strPort]->push(std::static_pointer_cast(pProcessData)); - if (iRet != 0) - { - LogDebug << "sourceid:" << pProcessData->iDataSource << " frameid:" << pProcessData->iFrameId << " push fail iRet:" << iRet; - if (iRet == 2) - { - usleep(10000); // 10ms - continue; - } - } - break; - } -} - -void FilterTrainStepOneEngine::AddBackInfo(std::shared_ptr pProcessData) -{ - std::shared_ptr pPostData = std::static_pointer_cast(pProcessData->pVoidData); - std::sort(pPostData->vecPostSubData.begin(), pPostData->vecPostSubData.end(), CompareX); - - std::string strAllClassType; - for (size_t i = 0; i < pPostData->vecPostSubData.size(); i++) - { - if (strAllClassType.find(mapTargetStr_[pPostData->vecPostSubData[i].iTargetType]) != std::string::npos) - { - continue; - } - strAllClassType += mapTargetStr_[pPostData->vecPostSubData[i].iTargetType]; - } - - if (strAllClassType.empty()) - { - return; - } - LogDebug << "frameId:" << pProcessData->iFrameId << " addbackinfo strAllClassType:" << strAllClassType; - - TrainBackInfo trainBackInfo; - trainBackInfo.processData = pProcessData; - trainBackInfo.strAllClassType = strAllClassType; - if (stackBackInfo_.empty()) - { - stackBackInfo_.push(trainBackInfo); - LogDebug << "frameId:" << pProcessData->iFrameId << " push strAllClassType:" << strAllClassType - << " stacksize:" << stackBackInfo_.size(); - } - else - { - TrainBackInfo trainBackInfoTop = stackBackInfo_.top(); - // 2024年3月27日修改前 - // if (trainBackInfoTop.strAllClassType != trainBackInfo.strAllClassType) - // { - // stackBackInfo_.push(trainBackInfo); - // LogDebug << "frameId:" << pProcessData->iFrameId << " push strAllClassType:" << strAllClassType - // << " stacksize:" << stackBackInfo_.size(); - // } - - if (trainBackInfoTop.strAllClassType != trainBackInfo.strAllClassType) - { - if (iDirection_ == DIRECTION_RIGHT - && trainBackInfo.strAllClassType == "SPACE" - && (trainBackInfoTop.strAllClassType == "PROSPACE" || trainBackInfoTop.strAllClassType == "SPACEPRO")) - { - return; - } - if (iDirection_ == DIRECTION_RIGHT - && trainBackInfo.strAllClassType == "SPACE" - && (trainBackInfoTop.strAllClassType == "NUMSPACE" || trainBackInfoTop.strAllClassType == "SPACENUM")) - { - return; - } - - if (iDirection_ == DIRECTION_LEFT - && trainBackInfo.strAllClassType == "SPACE" - && (trainBackInfoTop.strAllClassType == "NUMSPACE" || trainBackInfoTop.strAllClassType == "SPACENUM")) - { - return; - } - stackBackInfo_.push(trainBackInfo); - LogDebug << "frameId:" << pProcessData->iFrameId << " push strAllClassType:" << strAllClassType - << " stacksize:" << stackBackInfo_.size(); - } - } -} - -bool FilterTrainStepOneEngine::IsEndDealBackInfo(std::shared_ptr pProcessData) -{ - if (stackBackInfo_.empty()) - { - return true; - } - - bool bPopFlag = false; - std::shared_ptr pPostData = std::static_pointer_cast(pProcessData->pVoidData); - if (pPostData->vecPostSubData.size() == 0) return false; - - /* - 处理倒车数据时,数据需设置为倒车,主要是保证这样的数据后面Engine不处理,防止切分车厢出错。 - - 类型不相等时,就pop,当pop后,还剩一个数据时,则表示已经回到了刚开始倒车的地方。(只剩一个数据的逻辑在上方) - - 处理最后一个时,不能只判断下类型相同就弹出。需要控制下位置。(要么类型相同位置合适,要么类型不相同) - 正向为向左行驶,则当前数据的位置尽量小于等于栈中最后一个元素的位置。 - 正向为向右行驶,则当前数据的位置尽量大于等于栈中最后一个元素的位置。 - */ - std::sort(pPostData->vecPostSubData.begin(), pPostData->vecPostSubData.end(), CompareX); - - std::string strAllClassType; - for (size_t i = 0; i < pPostData->vecPostSubData.size(); i++) - { - if (strAllClassType.find(mapTargetStr_[pPostData->vecPostSubData[i].iTargetType]) != std::string::npos) - { - continue; - } - strAllClassType += mapTargetStr_[pPostData->vecPostSubData[i].iTargetType]; - } - if (strAllClassType.empty()) - { - return false; - } - - if (stackBackInfo_.size() == 1) - { - TrainBackInfo trainBackInfoLast = stackBackInfo_.top(); - std::shared_ptr pPostDataBack = std::static_pointer_cast(trainBackInfoLast.processData->pVoidData); - std::sort(pPostDataBack->vecPostSubData.begin(), pPostDataBack->vecPostSubData.end(), CompareX); - - for (size_t i = 0; i < pPostDataBack->vecPostSubData.size(); i++) - { - int bFlag = -1; - for (size_t j = 0; j < pPostData->vecPostSubData.size(); j++) - { - if (pPostDataBack->vecPostSubData[i].iBigClassId == pPostData->vecPostSubData[j].iBigClassId) - { - if (pPostData->vecPostSubData[j].step1Location.fLTX < 1 || pPostDataBack->vecPostSubData[i].step1Location.fLTX < 1) - { - LogDebug << "大框X坐标小于1,判定为异常大框。过滤!!"; - break; - } - bFlag = (pPostDataBack->vecPostSubData[i].step1Location.fLTX <= pPostData->vecPostSubData[j].step1Location.fLTX) ? 1 : 0; - LogDebug << "帧:" << pProcessData->iFrameId << " 倒车前帧:" << pPostDataBack->iFrameId << " 恢复到原位:" << bFlag - << " 当前框位置:" << pPostData->vecPostSubData[i].step1Location.fLTX - << " 倒车前位置:" << pPostDataBack->vecPostSubData[i].step1Location.fLTX << "方向:" << iDirection_; - } - } - - if ((iDirection_ == DIRECTION_LEFT && bFlag == 0) || - (iDirection_ == DIRECTION_RIGHT && bFlag == 1)) - { - bPopFlag = true; - break; - } - } - - if (bPopFlag) - { - LogDebug << "frameId:" << pProcessData->iFrameId << " 恢复倒车前的位置:" << bPopFlag; - stackBackInfo_.pop(); - } - } - else - { - TrainBackInfo trainBackInfoTop_bak = stackBackInfo_.top(); - stackBackInfo_.pop(); - TrainBackInfo trainBackInfoTop = stackBackInfo_.top(); - - if (trainBackInfoTop.strAllClassType != strAllClassType) - { - stackBackInfo_.push(trainBackInfoTop_bak); - LogDebug << "帧:" << pProcessData->iFrameId << " 倒车信息:" << stackBackInfo_.size() - << " 顶部倒车信息:" << trainBackInfoTop.strAllClassType << " 本次识别信息:" << strAllClassType; - } - else - { -// bPopFlag = true; - LogDebug << "帧:" << pProcessData->iFrameId << " 倒车信息:" << stackBackInfo_.size() - << " 顶部倒车信息:" << trainBackInfoTop.strAllClassType << " 本次识别信息:" << strAllClassType - << " 删除倒车信息:" << trainBackInfoTop_bak.strAllClassType; - } - - -// if(bPopFlag) -// { -// stackBackInfo_.pop(); -// bPopFlag = false; -// } - } - - return stackBackInfo_.empty() ? true : false; -} - -/** -* 校验火车是否停止 -* inParam : std::shared_ptr pProcessData :待处理数据 -* outParam: N/A -* return : true:停止; false:非停止 1(正常行驶) 2(停车) 3(倒车) -*/ -int FilterTrainStepOneEngine::GetTrainStatus(std::shared_ptr pProcessData) -{ - if (iDirection_ == DIRECTION_UNKNOWN) - { - LogDebug << " frameId:" << pProcessData->iFrameId << " 未判断出行车方向,暂定认为火车正常行驶中"; - return TRAINSTATUS_RUN; - } - - std::shared_ptr pPostData = std::static_pointer_cast(pProcessData->pVoidData); - pPostData->iFrameId = pProcessData->iFrameId; - - // 1. 无框时,返回之前的列车状态 - if (pPostData->vecPostSubData.size() == 0) - { - return iTrainStatus_; - } - - quePostData_.push(*pPostData.get()); - - if (quePostData_.size() < 3) - { - return TRAINSTATUS_RUN; - } - - PostData postDataFront = quePostData_.front(); - //iNotChgCount_大于0表示有可能停车,此时pop队列数据要多留存几个。用最开始的数据来判断是否真正停车,如果每次只用上上帧判断当列车超级慢时可能判断为停车。 - int iSizeTemp = iNotChgCount_ > 0 ? 10 : 2; - while (quePostData_.size() > iSizeTemp) - { - quePostData_.pop(); - } - LogDebug << "frameId:" << pProcessData->iFrameId << " 判断运动状态队列 第一帧:" << postDataFront.iFrameId << " 队列size:" << quePostData_.size() << " iSizeTemp:" << iSizeTemp; - - bool bSameFlag = false; - int iDiffValue = iChkStopPX_; - for (size_t i = 0; i < pPostData->vecPostSubData.size(); i++) - { - PostSubData postSubDataBack = pPostData->vecPostSubData[i]; - for (size_t j = 0; j < postDataFront.vecPostSubData.size(); j++) - { - PostSubData postSubDataFront = postDataFront.vecPostSubData[j]; - /* - 使用iBigClassId,可能出现平车只有间隔大框,且间隔大框可以一会是平车间隔,一会是通用间隔。导致类别不一样 - 使用iTargetType,可能出现平车只有间隔大框,且间隔大框可以一会是平车间隔,一会是通用间隔。导致像素差判断不准。 - */ - if (postSubDataFront.iTargetType != postSubDataBack.iTargetType) - { - LogDebug << "判断前后帧识别的是否一致 上一个:" << postSubDataFront.iTargetType << " 当前:" << postSubDataBack.iTargetType; - continue; - } - - // if (postSubDataFront.iTargetType == SPACE && postSubDataFront.iBigClassId != postSubDataBack.iBigClassId) - // { - // iDiffValue = 50; - // } - - bSameFlag = true; - int iCenterBack = postSubDataBack.step1Location.fLTX + (postSubDataBack.step1Location.fRBX - postSubDataBack.step1Location.fLTX) / 2; - int iCenterFront = postSubDataFront.step1Location.fLTX + (postSubDataFront.step1Location.fRBX - postSubDataFront.step1Location.fLTX) / 2; - - //位置比较大于10个像素,则表示有移动。再判断时正向移动,还是倒车 - LogDebug << "frameId:" << pProcessData->iFrameId << " " << iCenterBack << "-" << iCenterFront - << "=" << abs(iCenterBack - iCenterFront) << " 预期判定移动的差值为iDiffValue:" << iDiffValue; - if (abs(iCenterBack - iCenterFront) > iDiffValue) - { - iNotChgCount_ = 0; - /* - iCenterBack > iCenterFront 表示向右行驶,且原方向为向左行驶 - iCenterBack < iCenterFront 表示向左行驶,且原方向为向右行驶 - 以上2种表示倒车。 - */ - if ((iCenterBack > iCenterFront && iDirection_ == DIRECTION_LEFT) || - (iCenterBack < iCenterFront && iDirection_ == DIRECTION_RIGHT)) - { - if (this->iPartitionFrameNum_ < (pProcessData->iFrameId - postDataFront.iFrameId) - && this->iPlitFrameSpanPX_ < abs(iCenterBack - iCenterFront)) - { - return TRAINSTATUS_RUN; - } - LogDebug << "frameId:" << pProcessData->iFrameId << " 检测到火车倒车"; - return TRAINSTATUS_BACK; - } - else - { - LogDebug << "frameId:" << pProcessData->iFrameId << " 正常行驶"; - return TRAINSTATUS_RUN; - } - } - /* - 小于10个像素表示可能停车,累计未变化次数。 - 累计变化次数超过10次,返回停车 - 累计变化次数未超过10次,返回之前行驶状态 - */ - else - { - iNotChgCount_++; - LogDebug << " frameId:" << pProcessData->iFrameId - << " 大框移动范围小 判断停车计数:" << iNotChgCount_ << "/" << iChkStopCount_; - if (iNotChgCount_ > iChkStopCount_) - { - LogDebug << "frameId:" << pProcessData->iFrameId << " 检测到火车停车"; - return TRAINSTATUS_STOP; - } - else - { -// LogDebug << "frameId:" << pProcessData->iFrameId << " iTrainStatus_:" << iTrainStatus_; - return iTrainStatus_; - } - } - } - } - - /* - 未找到相同的框,说明是老框消失掉了,新框出现了。 - 按新框出现的位置判断是向左行驶,还是向右行驶。 - */ - LogDebug << "frameId:" << pProcessData->iFrameId << " bSameFlag:" << bSameFlag; - if (!bSameFlag) - { - std::sort(pPostData->vecPostSubData.begin(), pPostData->vecPostSubData.end(), CompareX); - Step1Location step1Location = pPostData->vecPostSubData.front().step1Location; - if (iDirection_ == DIRECTION_LEFT) - { - step1Location = pPostData->vecPostSubData.back().step1Location; - } - LogDebug << "frameId:" << pProcessData->iFrameId << " fLTX:" << step1Location.fLTX << " fRBX:" << step1Location.fRBX; - - iNotChgCount_ = 0; - int iCenter = step1Location.fLTX + (step1Location.fRBX - step1Location.fLTX) / 2; - int iValue = pProcessData->iWidth / 2; - if ((iCenter > iValue && iDirection_ == DIRECTION_RIGHT) || - (iCenter < iValue && iDirection_ == DIRECTION_LEFT)) - { - /* - 针对有效帧较少时,和上上帧比较没有同类型大框,且当前帧已行驶到画面中心导致误判的情况, - 增加和上帧同类型大框的比较处理。 - */ - PostData postDataMiddle = quePostData_.front(); - for (size_t i = 0; i < pPostData->vecPostSubData.size(); i++) - { - PostSubData postSubDataBack = pPostData->vecPostSubData[i]; - for (size_t j = 0; j < postDataMiddle.vecPostSubData.size(); j++) - { - PostSubData postSubDataMiddle = postDataMiddle.vecPostSubData[j]; - if (postSubDataMiddle.iTargetType != postSubDataBack.iTargetType) - { - continue; - } - int iCenterBack = postSubDataBack.step1Location.fLTX + (postSubDataBack.step1Location.fRBX - postSubDataBack.step1Location.fLTX) / 2; - int iCenterMiddle = postSubDataMiddle.step1Location.fLTX + (postSubDataMiddle.step1Location.fRBX - postSubDataMiddle.step1Location.fLTX) / 2; - - // 位置比较大于10个像素,则表示有移动。再判断时正向移动,还是倒车 - LogDebug << "frameId:" << pProcessData->iFrameId << " " << iCenterBack << "-" << iCenterMiddle - << "=" << abs(iCenterBack - iCenterMiddle) << " middle cmp iDiffValue:" << iDiffValue; - if (abs(iCenterBack - iCenterMiddle) > iDiffValue) - { - if ((iCenterBack > iCenterMiddle && iDirection_ == DIRECTION_LEFT) || - (iCenterBack < iCenterMiddle && iDirection_ == DIRECTION_RIGHT)) - { - LogDebug << "frameId:" << pProcessData->iFrameId << " back2 back2"; - return TRAINSTATUS_BACK; - } - else - { - LogDebug << "frameId:" << pProcessData->iFrameId << " run"; - return TRAINSTATUS_RUN; - } - } - } - } - -// LogDebug << "frameId:" << pProcessData->iFrameId << " back2"; - return iTrainStatus_; - } - } - LogDebug << "frameId:" << pProcessData->iFrameId << " iNotChgCount_:" << iNotChgCount_ << " run run"; - return TRAINSTATUS_RUN; -} - -/** -* 设置行车方向 -* inParam : std::vector &vecLocation :大框坐标集合 - : std::shared_ptr pProcessData :待处理数据 -* outParam: N/A -* return : N/A -*/ -void FilterTrainStepOneEngine::SetDirection(std::vector &vecLocation, std::shared_ptr pProcessData) -{ - if (vecLocation.size() < 1) - { - return; - } - Step1Location slFront = vecLocation.front(); - Step1Location slBack = vecLocation.back(); - /* - 移动距离小于50个像素,则判断最后一个框的出现的位置 - */ - LogDebug << "sourceid:" << pProcessData->iDataSource << " frameid:" << pProcessData->iFrameId << " slFront:" << slFront.fLTX << " slBack:" << slBack.fLTX; - if (fabs(slBack.fLTX - slFront.fLTX) < 50) - { - float fTempX1 = pProcessData->iWidth / 5; //5分之1的X坐标 - float fTempX2 = pProcessData->iWidth / 5 * 4; //5分之4的X坐标 - - if (slBack.fLTX < fTempX1) - { - iDirection_ = DIRECTION_LEFT; - } - else if (slBack.fRBX > fTempX2) - { - iDirection_ = DIRECTION_RIGHT; - } - } - else - { - //行驶方向 左 - if ((slBack.fLTX - slFront.fLTX) < 0) - { - iDirection_ = DIRECTION_LEFT; - } - //行驶方向 右 - else if ((slBack.fLTX - slFront.fLTX) > 0) - { - iDirection_ = DIRECTION_RIGHT; - } - } - - //未判断出移动方向,清除数后重新计算。 - if (iDirection_ == DIRECTION_UNKNOWN) - { - auto iterMapStep1Info = mapMapStep1Info_.find(pProcessData->iDataSource); - iterMapStep1Info->second.clear(); - } - else - { - if (MyYaml::GetIns()->GetDataSourceConfigById(pProcessData->iDataSource).iLeftFirst != mainCfg_.iLeftFirst) - { - LogDebug << "before sourceid:" << pProcessData->iDataSource << "frameid:" << pProcessData->iFrameId << " direction:" << iDirection_; - iDirection_ = (iDirection_ == DIRECTION_LEFT) ? DIRECTION_RIGHT : DIRECTION_LEFT; - LogDebug << "after sourceid:" << pProcessData->iDataSource << "frameid:" << pProcessData->iFrameId << " direction:" << iDirection_; - } - } -} - -/** -* 计算行车方向新 -* inParam : std::shared_ptr pProcessData :待处理数据 -* outParam: N/A -* return : N/A -*/ -void FilterTrainStepOneEngine::CalculateDirectionNew(std::shared_ptr pProcessData) -{ - /* - 连续3帧同目标识别框信息 - 判断位置差异是否超过10px(判停车参数),且两两之间都是线性。如果符合则计算方向。 - 上述条件不符合则剔除第一个元素,再次累计连续3帧处理。 - */ - auto iterMap = mapCalDirection_.find(pProcessData->iDataSource); - if (iterMap == mapCalDirection_.end()) - { - std::map> mapTemp; - mapCalDirection_.insert(std::make_pair(pProcessData->iDataSource, mapTemp)); - iterMap = mapCalDirection_.find(pProcessData->iDataSource); - } - - std::shared_ptr pPostData = std::static_pointer_cast(pProcessData->pVoidData); - for (auto iter = pPostData->vecPostSubData.begin(); iter != pPostData->vecPostSubData.end(); iter++) - { - CalculateInfo calInfo; - calInfo.iFrameId = pProcessData->iFrameId; - calInfo.iBigClassId = iter->iCarXH; - calInfo.fCenterX = iter->step1Location.fLTX + (iter->step1Location.fRBX - iter->step1Location.fLTX) / 2; - - auto iterSubMap = iterMap->second.find(iter->iBigClassId); - if (iterSubMap == iterMap->second.end()) - { - std::vector vecTemp; - iterMap->second.insert(std::make_pair(iter->iBigClassId, vecTemp)); - iterSubMap = iterMap->second.find(iter->iBigClassId); - } - iterSubMap->second.emplace_back(calInfo); - - if (iterSubMap->second.size() > 2) - { - LogDebug << "souceid:" << pProcessData->iDataSource << " frameid:" << pProcessData->iFrameId - << " last:" << iterSubMap->second.at(2).iFrameId << " " << iterSubMap->second.at(2).fCenterX - << " mid:" << iterSubMap->second.at(1).iFrameId << " " << iterSubMap->second.at(1).fCenterX - << " pre:" << iterSubMap->second.at(0).iFrameId << " " << iterSubMap->second.at(0).fCenterX; - //如果帧号连续,且移动位置大于15px,则计算方向 - if (iterSubMap->second.at(2).iFrameId - iterSubMap->second.at(1).iFrameId != mainCfg_.iSkipInterval || - iterSubMap->second.at(1).iFrameId - iterSubMap->second.at(0).iFrameId != mainCfg_.iSkipInterval) - { - iterSubMap->second.erase(iterSubMap->second.begin()); - continue; - } - - int iLast = iterSubMap->second.at(2).fCenterX; - int iMid = iterSubMap->second.at(1).fCenterX; - int iPre = iterSubMap->second.at(0).fCenterX; - if (abs(iPre - iLast) <= iChkStopPX_) - { - iterSubMap->second.erase(iterSubMap->second.begin()); - continue; - } - if (iPre <= iMid && iMid <= iLast) - { - iDirection_ = DIRECTION_RIGHT; - } - else if (iPre >= iMid && iMid >= iLast) - { - iDirection_ = DIRECTION_LEFT; - } - else - { - iterSubMap->second.erase(iterSubMap->second.begin()); - continue; - } - - LogDebug << "souceid:" << pProcessData->iDataSource << " frameid:" << pProcessData->iFrameId << " iDirection_:" << iDirection_; - } - } - - //主相机的对侧相机需反向设置方向 - if (iDirection_ != DIRECTION_UNKNOWN && MyYaml::GetIns()->GetDataSourceConfigById(pProcessData->iDataSource).iLeftFirst != mainCfg_.iLeftFirst) - { - LogDebug << "before sourceid:" << pProcessData->iDataSource << "frameid:" << pProcessData->iFrameId << " direction:" << iDirection_; - iDirection_ = (iDirection_ == DIRECTION_LEFT) ? DIRECTION_RIGHT : DIRECTION_LEFT; - LogDebug << "after sourceid:" << pProcessData->iDataSource << "frameid:" << pProcessData->iFrameId << " direction:" << iDirection_; - } -} - -/** -* 计算行车方向 -* inParam : std::shared_ptr pProcessData :待处理数据 -* outParam: N/A -* return : N/A -*/ -void FilterTrainStepOneEngine::CalculateDirection(std::shared_ptr pProcessData) -{ - auto iterPostDataFrist = mapPostDataFrist_.find(pProcessData->iDataSource); - auto iterMapStep1Info = mapMapStep1Info_.find(pProcessData->iDataSource); - /* - 1帧暂时最多识别3个大框。[(车头、间隔、车号); (车号、间隔、属性)] - 因间隔大框不是很准确,暂时把间隔大框剔除后,计算方向。 - 剔除间隔后,大框的可能情况为(车头; 车号; 属性; 车头&车号; 车号&属性) - */ - std::shared_ptr pPostData = std::static_pointer_cast(pProcessData->pVoidData); - PostData postDataTemp = *pPostData; - for (auto iter = postDataTemp.vecPostSubData.begin(); iter != postDataTemp.vecPostSubData.end();) - { - if (iter->iTargetType == SPACE) - { - iter = postDataTemp.vecPostSubData.erase(iter); - continue; - } - auto iterMap = iterMapStep1Info->second.find(iter->iTargetType); - if (iterMap != iterMapStep1Info->second.end()) - { - iterMap->second.emplace_back(iter->step1Location); - } - else - { - std::vector vecTemp; - vecTemp.emplace_back(iter->step1Location); - iterMapStep1Info->second.insert(std::pair>(iter->iTargetType, vecTemp)); - } - - iter++; - } - - //记录第一次识别到大框的信息 - if (iterPostDataFrist->second.vecPostSubData.size() == 0) - { - iterPostDataFrist->second = postDataTemp; - LogDebug << "sourceid:" << pProcessData->iDataSource << " frameid:" << pProcessData->iFrameId - << " size:" << iterPostDataFrist->second.vecPostSubData.size(); - } - - //无框时,不计算方向 - if (iterPostDataFrist->second.vecPostSubData.size() == 0) - { - LogDebug << "sourceid:" << pProcessData->iDataSource << " frameid:" << pProcessData->iFrameId << " no find data"; - return; - } - //框的个数和类型没有变化时,不计算方向 - if (iterPostDataFrist->second.vecPostSubData.size() - postDataTemp.vecPostSubData.size() == 0) - { - std::sort(iterPostDataFrist->second.vecPostSubData.begin(), iterPostDataFrist->second.vecPostSubData.end(), CompareX); - std::sort(postDataTemp.vecPostSubData.begin(), postDataTemp.vecPostSubData.end(), CompareX); - bool bFlag = true; - for (int i = 0; i < iterPostDataFrist->second.vecPostSubData.size(); i++) - { - if (iterPostDataFrist->second.vecPostSubData[i].iTargetType != postDataTemp.vecPostSubData[i].iTargetType) - { - bFlag = false; - break; - } - } - if (bFlag) - { - LogDebug << "sourceid:" << pProcessData->iDataSource << " frameid:" << pProcessData->iFrameId << " datasize and datatype equal"; - return; - } - } - - auto iter = iterMapStep1Info->second.begin(); - int iMaxSize = iter->second.size(); - int iMaxSizeKey = iter->first; - int iMinSize = iter->second.size(); - int iMinSizeKey = iter->first; - while (iter != iterMapStep1Info->second.end()) - { - if (iter->second.size() > iMaxSize) - { - iMaxSize = iter->second.size(); - iMaxSizeKey = iter->first; - } - if (iter->second.size() < iMinSize) - { - iMinSize = iter->second.size(); - iMinSizeKey = iter->first; - } - iter++; - } - LogDebug << "sourceid:" << pProcessData->iDataSource << " frameid:" << pProcessData->iFrameId - << " iMaxSize:" << iMaxSize << " iMaxSizeKey:" << iMaxSizeKey - << " iMinSize:" << iMinSize << " iMinSizeKey:" << iMinSizeKey; - - /* - 第一次识别为1个框,后续变化为无框或2个框,使用框个数多的数据计算方向 - 第一次识别为2个框,后续变化为1个框,使用个数多的数据计算方向。 - 后续变化为0个框,使用个数少的数据计算方向。 - */ - if (iterPostDataFrist->second.vecPostSubData.size() == 1) - { - SetDirection(iterMapStep1Info->second[iMaxSizeKey], pProcessData); - } - else if (iterPostDataFrist->second.vecPostSubData.size() == 2) - { - if (postDataTemp.vecPostSubData.size() == 1) - { - SetDirection(iterMapStep1Info->second[iMinSizeKey], pProcessData); - } - else if (postDataTemp.vecPostSubData.size() == 0) - { - SetDirection(iterMapStep1Info->second[iMaxSizeKey], pProcessData); - } - } - LogDebug<< "sourceid:" << pProcessData->iDataSource << " frameid:" << pProcessData->iFrameId << " 行车方向:" << iDirection_; -} - -void FilterTrainStepOneEngine::sendComeTrain(const std::string strTrainDate, const std::string strTrainName, const int iDirection) { - std::string message = "{\"cometime\":\"" + strTrainDate + " " + strTrainName + "\",\"type\":\"1\",\"direction\":" + to_string(iDirection == iPushDirection_ ? 1:-1) + "}"; - LogWarn << message; - outputQueMap_[engineName_ + "_" + std::to_string(engineId_) + "_1"]->push(std::static_pointer_cast(std::make_shared(message))); -} - -/** -* 根据当前帧数据,处理上一帧数据 -* inParam : std::shared_ptr pProcessData :当前帧数据 -* outParam: N/A -* return : N/A -*/ -void FilterTrainStepOneEngine::DealProcessDataPre(std::shared_ptr pProcessData) -{ - /* - 目标框是否是连续识别,只识别到一帧的目标框认为误识别,过滤掉。 - 判断上一帧,当前帧 是否有框 - 上一帧有框,当前帧有框,说明连续识别,正常处理。 - 上一帧有框,当前帧无框,则非连续识别,过滤大框 - 上一帧无框,当前帧有框,则连续识别个数置零。 - 上一帧无框,当前帧无框,则连续识别个数置零。 - */ - auto iterProcessData = mapProcessDataPre_.find(pProcessData->iDataSource); - if (iterProcessData == mapProcessDataPre_.end() || nullptr == iterProcessData->second) - { - return; - } - - auto iterHeadContinueCnt = mapHeadContinueCnt_.find(pProcessData->iDataSource); - auto iterProContinueCnt = mapProContinueCnt_.find(pProcessData->iDataSource); - auto iterNumContinueCnt = mapNumContinueCnt_.find(pProcessData->iDataSource); - auto iterSpaceContinueCnt = mapSpaceContinueCnt_.find(pProcessData->iDataSource); - auto iterTranSpaceContinueCnt = mapTrainSpaceContinueCnt_.find(pProcessData->iDataSource); - - //获取当前帧识别情况 - std::shared_ptr pPostData = std::static_pointer_cast(pProcessData->pVoidData); - for (int i = 0; i < pPostData->vecPostSubData.size(); i++) - { - if (pPostData->vecPostSubData[i].iTargetType == HEAD) - { - iterHeadContinueCnt->second++; - } - else if (pPostData->vecPostSubData[i].iTargetType == PRO) - { - iterProContinueCnt->second++; - } - else if (pPostData->vecPostSubData[i].iTargetType == NUM) - { - iterNumContinueCnt->second++; - } - else if (pPostData->vecPostSubData[i].iTargetType == SPACE) - { - iterSpaceContinueCnt->second++; - } - else if (pPostData->vecPostSubData[i].iTargetType == TRAINSPACE) - { - iterTranSpaceContinueCnt->second++; - } - } - //LogDebug << "engineId:" << engineId_ << " frameId:" << pProcessData->iFrameId << " " << iProContinueCnt_ << " " << iNumContinueCnt_; - - //获取上一帧识别情况 - std::shared_ptr pPostDataPre = std::static_pointer_cast(iterProcessData->second->pVoidData); - bool bPreHaveHead = false; - bool bPreHavePro = false; - bool bPreHaveNum = false; - bool bPreHaveSpace = false; - bool bPreHaveTrainSpace = false; - for (int iPre = 0; iPre < pPostDataPre->vecPostSubData.size(); iPre++) - { - if (pPostDataPre->vecPostSubData[iPre].iTargetType == HEAD) - { - iterHeadContinueCnt->second++; - bPreHaveHead = true; - } - else if (pPostDataPre->vecPostSubData[iPre].iTargetType == PRO) - { - iterProContinueCnt->second++; - bPreHavePro = true; - } - else if (pPostDataPre->vecPostSubData[iPre].iTargetType == NUM) - { - iterNumContinueCnt->second++; - bPreHaveNum = true; - } - else if (pPostDataPre->vecPostSubData[iPre].iTargetType == SPACE) - { - iterSpaceContinueCnt->second++; - bPreHaveSpace = true; - } - else if (pPostDataPre->vecPostSubData[iPre].iTargetType == TRAINSPACE) - { - iterTranSpaceContinueCnt->second++; - bPreHaveTrainSpace = true; - } - } - - //前一帧无大框连续识别次数置零 - iterHeadContinueCnt->second = bPreHaveHead ? iterHeadContinueCnt->second : 0; - iterProContinueCnt->second = bPreHavePro ? iterProContinueCnt->second : 0; - iterNumContinueCnt->second = bPreHaveNum ? iterNumContinueCnt->second : 0; - iterSpaceContinueCnt->second = bPreHaveSpace ? iterSpaceContinueCnt->second : 0; - iterTranSpaceContinueCnt->second = bPreHaveTrainSpace ? iterTranSpaceContinueCnt->second : 0; - - //非连续识别的情况,认为误识别,剔除误识别的大框信息 - for (std::vector::iterator it = pPostDataPre->vecPostSubData.begin(); it != pPostDataPre->vecPostSubData.end();) - { - if (iterHeadContinueCnt->second < 2 && it->iTargetType == HEAD) - { - LogDebug << " frameId:" << iterProcessData->second->iFrameId << " Head 框因非连续识别而过滤"; - it = pPostDataPre->vecPostSubData.erase(it); - continue; - } - if (iterProContinueCnt->second < 2 && it->iTargetType == PRO) - { - LogDebug << " frameId:" << iterProcessData->second->iFrameId << " PRO 框因非连续识别而过滤"; - it = pPostDataPre->vecPostSubData.erase(it); - continue; - } - if (iterNumContinueCnt->second < 2 && it->iTargetType == NUM) - { - LogDebug << " frameId:" << iterProcessData->second->iFrameId << " NUM 框因非连续识别而过滤"; - it = pPostDataPre->vecPostSubData.erase(it); - continue; - } - if (iterSpaceContinueCnt->second < 2 && it->iTargetType == SPACE) - { - LogDebug << " frameId:" << iterProcessData->second->iFrameId << " SPACE 框因非连续识别而过滤"; - it = pPostDataPre->vecPostSubData.erase(it); - continue; - } - if (iterTranSpaceContinueCnt->second < 2 && it->iTargetType == TRAINSPACE) - { - LogDebug << " frameId:" << iterProcessData->second->iFrameId << " TRAINSPACE 框因非连续识别而过滤"; - it = pPostDataPre->vecPostSubData.erase(it); - continue; - } - it++; - } - - //判定行驶方向, 记录Direction文件信息 - if (iDirection_ == DIRECTION_UNKNOWN) - { - iDirection_ = iterProcessData->second->iDirection; - - LogInfo << "方向:" << (iDirection_ == DIRECTION_LEFT ? "左" : (iDirection_ == DIRECTION_RIGHT ? "右" : "未判断出来")); - if (iDirection_ == DIRECTION_UNKNOWN) - { - //CalculateDirection(iterProcessData->second); - CalculateDirectionNew(iterProcessData->second); - if (iDirection_ != DIRECTION_UNKNOWN) this->sendComeTrain(pProcessData->strTrainDate, pProcessData->strTrainName, iDirection_); - } - - if (iDirection_ != DIRECTION_UNKNOWN) - { - auto iterPostDataFrist = mapPostDataFrist_.find(pProcessData->iDataSource); - Json::Value jvDirectionInfo; - jvDirectionInfo["direction"] = iDirection_; - jvDirectionInfo["firstStep1Cnt"] = iterPostDataFrist->second.vecPostSubData.size(); - jvDirectionInfo["frameid"] = iterProcessData->second->iFrameId; - jvDirectionInfo["sourceid"] = iterProcessData->second->iDataSource; - std::string strFilePath = strResultPath_ + pProcessData->strTrainDate + "/" + pProcessData->strTrainName + "/" + "direction.txt"; - MyUtils::getins()->WriteJsonInfo(jvDirectionInfo, strFilePath); - } - } - - //主摄像头校验是否停车 - int iTrainStatusTemp = iTrainStatus_; - if (iterProcessData->second->iDataSource == 0) - { - iTrainStatus_ = GetTrainStatus(iterProcessData->second); - iTrainStatusTemp = iTrainStatus_; - - if (iTrainStatus_ == TRAINSTATUS_STOP) - { - //停车 - } - else if (iTrainStatus_ == TRAINSTATUS_BACK) - { - //倒车 - AddBackInfo(iterProcessData->second); - iTrainStatusTemp = TRAINSTATUS_STOP; - } - else if(iTrainStatus_ == TRAINSTATUS_RUN) - { - /* - 正向行驶需先把倒车产生的倒车数据处理完毕,即使车辆回到原倒车点,再开始识别行驶数据 - */ - if(!IsEndDealBackInfo(iterProcessData->second)) - { - iTrainStatusTemp = TRAINSTATUS_STOP; - } - } - } - LogDebug << "数据源:" << iterProcessData->second->iDataSource << " 帧:" << iterProcessData->second->iFrameId - << " 火车实时运行状态:" << iTrainStatus_ << "(0无车,1运行,2停车,3倒车) iTrainStatusTemp:" << iTrainStatusTemp; - iterProcessData->second->iStatus = iTrainStatusTemp; - - // this->sendComeTrain(pProcessData->strTrainDate, pProcessData->strTrainName, iDirection_); - - //上一帧,push端口0 - PushData(strPort0_, iterProcessData->second); -} - - -APP_ERROR FilterTrainStepOneEngine::Process() -{ - int iRet = APP_ERR_OK; - while (!isStop_) - { - std::shared_ptr pVoidData0 = nullptr; - inputQueMap_[strPort0_]->pop(pVoidData0); - if (nullptr == pVoidData0) - { - usleep(1000); //1ms - continue; - } - - std::shared_ptr pProcessData = std::static_pointer_cast(pVoidData0); - std::shared_ptr pPostData = std::static_pointer_cast(pProcessData->pVoidData); - - // //停止后再次行驶,把队列中多余的数据丢弃。 - // if (iReRunOrigFrameId_ != 0 && pProcessData->iOrigFrameId > iReRunOrigFrameId_ && !pProcessData->bIsEnd) - // { - // LogDebug << "reRunOrigFrameId:" << iReRunOrigFrameId_ << " origFrameId:" << pProcessData->iOrigFrameId; - // continue; - // } - - if (pProcessData->bIsEnd) - { - mapDataSourceIsEnd_[pProcessData->iDataSource] = pProcessData->bIsEnd; - } - - //1帧暂时最多识别4个大框。[(车头、车厢间隔、间隔、车号); (车号、车厢间隔、间隔、属性)] - if (pPostData->vecPostSubData.size() > 4) - { - LogError << "sourceid:" << pProcessData->iDataSource << " frameId:" << pProcessData->iFrameId << " vecpostsubdata size:" << pPostData->vecPostSubData.size(); - pPostData->vecPostSubData.clear(); - } - - //按指定识别区域过滤误识别信息--过滤逻辑迁移至TrainStepOneEngine中。 - - //根据当前帧数据,处理上一帧数据 - DealProcessDataPre(pProcessData); - mapProcessDataPre_[pProcessData->iDataSource] = pProcessData; - if (pProcessData->bIsEnd) - { - //结束帧,push端口0 - LogDebug << "sourceid:" << pProcessData->iDataSource << " frameid:" << pProcessData->iFrameId << " isEnd:" << pProcessData->bIsEnd; - PushData(strPort0_, pProcessData); - } - - //3. 全部结束,初始化相关参数 - bool bAllEnd = true; - for (auto iter = mapDataSourceIsEnd_.begin(); iter != mapDataSourceIsEnd_.end(); iter++) - { - bAllEnd = bAllEnd && iter->second; - } - if (bAllEnd) - { - LogDebug << "sourceid:" << pProcessData->iDataSource << " frameid:" << pProcessData->iFrameId << " bAllEnd"; - InitParam(); - } - } - return APP_ERR_OK; -} +#include "FilterTrainStepOneEngine.h" #include "myutils.h" using namespace ai_matrix; namespace { //按照x坐标排列 bool CompareX(const PostSubData &a, const PostSubData &b) { return a.step1Location.fLTX < b.step1Location.fLTX; } } extern std::atomic_uint64_t g_i64ReRunTimeStamp; extern std::atomic_uint32_t g_iReRunOrigFrameId; extern std::atomic_uint32_t g_iReRunFrameId; extern std::atomic_uint32_t g_iReRunOrigChkDateFid; extern std::atomic_uint32_t g_iReRunOrigContainerFid; FilterTrainStepOneEngine::FilterTrainStepOneEngine() {} FilterTrainStepOneEngine::~FilterTrainStepOneEngine() {} APP_ERROR FilterTrainStepOneEngine::Init() { strPort0_ = engineName_ + "_" + std::to_string(engineId_) + "_0"; strResultPath_ = MyYaml::GetIns()->GetPathValue("gc_result_path"); iChkStopPX_ = MyYaml::GetIns()->GetIntValue("gc_chkstop_px"); iChkStopCount_ = MyYaml::GetIns()->GetIntValue("gc_chkstop_count"); iPartitionFrameNum_ = MyYaml::GetIns()->GetIntValue("partition_frame_span"); iPlitFrameSpanPX_ = MyYaml::GetIns()->GetIntValue("gc_split_frame_span_px"); iPushDirection_ = MyYaml::GetIns()->GetIntValue("gc_push_direction"); //获取主摄像头信息 mainCfg_ = MyYaml::GetIns()->GetDataSourceConfigById(0); std::map mapUseDataSouceCfg = MyYaml::GetIns()->GetUseDataSourceConfig(); for (auto iter = mapUseDataSouceCfg.begin(); iter != mapUseDataSouceCfg.end(); iter++) { this->rightFirst_ = iter->second.iRightFirst; this->leftFirst_ = iter->second.iLeftFirst; if (iter->second.strTarget.find("NUM") != std::string::npos) { LogDebug << "DataSource:" << iter->first << " deal NUM"; mapHeadContinueCnt_[iter->first] = 0; mapProContinueCnt_[iter->first] = 0; mapNumContinueCnt_[iter->first] = 0; mapSpaceContinueCnt_[iter->first] = 0; mapTrainSpaceContinueCnt_[iter->first] = 0; mapDataSourceIsEnd_[iter->first] = false; PostData postDataTemp; mapPostDataFrist_[iter->first] = postDataTemp; std::map> mapStep1InfoTemp; //[key-目标框id, 目标框坐标集合] mapMapStep1Info_[iter->first] = mapStep1InfoTemp; } } mapTargetStr_.insert(std::make_pair(NUM, "NUM")); mapTargetStr_.insert(std::make_pair(PRO, "PRO")); mapTargetStr_.insert(std::make_pair(HEAD, "HEAD")); mapTargetStr_.insert(std::make_pair(SPACE, "SPACE"));//SPACE mapTargetStr_.insert(std::make_pair(TRAINSPACE, "SPACE"));//SPACE InitParam(); LogInfo << "FilterTrainStepOneEngine Init ok"; return APP_ERR_OK; } APP_ERROR FilterTrainStepOneEngine::DeInit() { LogInfo << "FilterTrainStepOneEngine DeInit ok"; return APP_ERR_OK; } /** * 参数初始化(列车结束时需调用) * inParam : N/A * outParam: N/A * return : N/A */ void FilterTrainStepOneEngine::InitParam() { mapProcessDataPre_.clear(); for (auto iter = mapHeadContinueCnt_.begin(); iter != mapHeadContinueCnt_.end(); iter++) { iter->second = 0; } for (auto iter = mapProContinueCnt_.begin(); iter != mapProContinueCnt_.end(); iter++) { iter->second = 0; } for (auto iter = mapNumContinueCnt_.begin(); iter != mapNumContinueCnt_.end(); iter++) { iter->second = 0; } for (auto iter = mapSpaceContinueCnt_.begin(); iter != mapSpaceContinueCnt_.end(); iter++) { iter->second = 0; } for (auto iter = mapTrainSpaceContinueCnt_.begin(); iter != mapTrainSpaceContinueCnt_.end(); iter++) { iter->second = 0; } for (auto iter = mapDataSourceIsEnd_.begin(); iter != mapDataSourceIsEnd_.end(); iter++) { iter->second = false; } for (auto iter = mapPostDataFrist_.begin(); iter != mapPostDataFrist_.end(); iter++) { iter->second.vecPostSubData.clear(); } for (auto iter = mapMapStep1Info_.begin(); iter != mapMapStep1Info_.end(); iter++) { iter->second.clear(); } iDirection_ = DIRECTION_UNKNOWN; iNotChgCount_ = 0; while (!stackBackInfo_.empty()) { stackBackInfo_.pop(); } while (!quePostData_.empty()) { quePostData_.pop(); } iTrainStatus_ = TRAINSTATUS_RUN; mapCalDirection_.clear(); } /** * push数据到队列,队列满时则休眠一段时间再push * inParam : const std::string strPort push的端口 : const std::shared_ptr &pProcessData push的数据 * outParam: N/A * return : N/A */ void FilterTrainStepOneEngine::PushData(const std::string &strPort, const std::shared_ptr &pProcessData) { while (true) { int iRet = outputQueMap_[strPort]->push(std::static_pointer_cast(pProcessData)); if (iRet != 0) { LogDebug << "sourceid:" << pProcessData->iDataSource << " frameid:" << pProcessData->iFrameId << " push fail iRet:" << iRet; if (iRet == 2) { usleep(10000); // 10ms continue; } } break; } } void FilterTrainStepOneEngine::AddBackInfo(std::shared_ptr pProcessData) { std::shared_ptr pPostData = std::static_pointer_cast(pProcessData->pVoidData); std::sort(pPostData->vecPostSubData.begin(), pPostData->vecPostSubData.end(), CompareX); std::string strAllClassType; for (size_t i = 0; i < pPostData->vecPostSubData.size(); i++) { if (strAllClassType.find(mapTargetStr_[pPostData->vecPostSubData[i].iTargetType]) != std::string::npos) { continue; } strAllClassType += mapTargetStr_[pPostData->vecPostSubData[i].iTargetType]; } if (strAllClassType.empty()) { return; } LogDebug << "frameId:" << pProcessData->iFrameId << " addbackinfo strAllClassType:" << strAllClassType; TrainBackInfo trainBackInfo; trainBackInfo.processData = pProcessData; trainBackInfo.strAllClassType = strAllClassType; if (stackBackInfo_.empty()) { stackBackInfo_.push(trainBackInfo); LogDebug << "frameId:" << pProcessData->iFrameId << " push strAllClassType:" << strAllClassType << " stacksize:" << stackBackInfo_.size(); } else { TrainBackInfo trainBackInfoTop = stackBackInfo_.top(); // 2024年3月27日修改前 // if (trainBackInfoTop.strAllClassType != trainBackInfo.strAllClassType) // { // stackBackInfo_.push(trainBackInfo); // LogDebug << "frameId:" << pProcessData->iFrameId << " push strAllClassType:" << strAllClassType // << " stacksize:" << stackBackInfo_.size(); // } if (trainBackInfoTop.strAllClassType != trainBackInfo.strAllClassType) { if (iDirection_ == DIRECTION_RIGHT && trainBackInfo.strAllClassType == "SPACE" && (trainBackInfoTop.strAllClassType == "PROSPACE" || trainBackInfoTop.strAllClassType == "SPACEPRO")) { return; } if (iDirection_ == DIRECTION_RIGHT && trainBackInfo.strAllClassType == "SPACE" && (trainBackInfoTop.strAllClassType == "NUMSPACE" || trainBackInfoTop.strAllClassType == "SPACENUM")) { return; } if (iDirection_ == DIRECTION_LEFT && trainBackInfo.strAllClassType == "SPACE" && (trainBackInfoTop.strAllClassType == "NUMSPACE" || trainBackInfoTop.strAllClassType == "SPACENUM")) { return; } stackBackInfo_.push(trainBackInfo); LogDebug << "frameId:" << pProcessData->iFrameId << " push strAllClassType:" << strAllClassType << " stacksize:" << stackBackInfo_.size(); } } } bool FilterTrainStepOneEngine::IsEndDealBackInfo(std::shared_ptr pProcessData) { if (stackBackInfo_.empty()) { return true; } bool bPopFlag = false; std::shared_ptr pPostData = std::static_pointer_cast(pProcessData->pVoidData); if (pPostData->vecPostSubData.size() == 0) return false; /* 处理倒车数据时,数据需设置为倒车,主要是保证这样的数据后面Engine不处理,防止切分车厢出错。 类型不相等时,就pop,当pop后,还剩一个数据时,则表示已经回到了刚开始倒车的地方。(只剩一个数据的逻辑在上方) 处理最后一个时,不能只判断下类型相同就弹出。需要控制下位置。(要么类型相同位置合适,要么类型不相同) 正向为向左行驶,则当前数据的位置尽量小于等于栈中最后一个元素的位置。 正向为向右行驶,则当前数据的位置尽量大于等于栈中最后一个元素的位置。 */ std::sort(pPostData->vecPostSubData.begin(), pPostData->vecPostSubData.end(), CompareX); std::string strAllClassType; for (size_t i = 0; i < pPostData->vecPostSubData.size(); i++) { if (strAllClassType.find(mapTargetStr_[pPostData->vecPostSubData[i].iTargetType]) != std::string::npos) { continue; } strAllClassType += mapTargetStr_[pPostData->vecPostSubData[i].iTargetType]; } if (strAllClassType.empty()) { return false; } if (stackBackInfo_.size() == 1) { TrainBackInfo trainBackInfoLast = stackBackInfo_.top(); std::shared_ptr pPostDataBack = std::static_pointer_cast(trainBackInfoLast.processData->pVoidData); std::sort(pPostDataBack->vecPostSubData.begin(), pPostDataBack->vecPostSubData.end(), CompareX); for (size_t i = 0; i < pPostDataBack->vecPostSubData.size(); i++) { int bFlag = -1; for (size_t j = 0; j < pPostData->vecPostSubData.size(); j++) { if (pPostDataBack->vecPostSubData[i].iBigClassId == pPostData->vecPostSubData[j].iBigClassId) { if (pPostData->vecPostSubData[j].step1Location.fLTX < 1 || pPostDataBack->vecPostSubData[i].step1Location.fLTX < 1) { LogDebug << "大框X坐标小于1,判定为异常大框。过滤!!"; break; } bFlag = (pPostDataBack->vecPostSubData[i].step1Location.fLTX <= pPostData->vecPostSubData[j].step1Location.fLTX) ? 1 : 0; LogDebug << "帧:" << pProcessData->iFrameId << " 倒车前帧:" << pPostDataBack->iFrameId << " 恢复到原位:" << bFlag << " 当前框位置:" << pPostData->vecPostSubData[i].step1Location.fLTX << " 倒车前位置:" << pPostDataBack->vecPostSubData[i].step1Location.fLTX << "方向:" << iDirection_; } } if ((iDirection_ == DIRECTION_LEFT && bFlag == 0) || (iDirection_ == DIRECTION_RIGHT && bFlag == 1)) { bPopFlag = true; break; } } if (bPopFlag) { LogDebug << "frameId:" << pProcessData->iFrameId << " 恢复倒车前的位置:" << bPopFlag; stackBackInfo_.pop(); } } else { TrainBackInfo trainBackInfoTop_bak = stackBackInfo_.top(); stackBackInfo_.pop(); TrainBackInfo trainBackInfoTop = stackBackInfo_.top(); if (trainBackInfoTop.strAllClassType != strAllClassType) { stackBackInfo_.push(trainBackInfoTop_bak); LogDebug << "帧:" << pProcessData->iFrameId << " 倒车信息:" << stackBackInfo_.size() << " 顶部倒车信息:" << trainBackInfoTop.strAllClassType << " 本次识别信息:" << strAllClassType; } else { // bPopFlag = true; LogDebug << "帧:" << pProcessData->iFrameId << " 倒车信息:" << stackBackInfo_.size() << " 顶部倒车信息:" << trainBackInfoTop.strAllClassType << " 本次识别信息:" << strAllClassType << " 删除倒车信息:" << trainBackInfoTop_bak.strAllClassType; } // if(bPopFlag) // { // stackBackInfo_.pop(); // bPopFlag = false; // } } return stackBackInfo_.empty() ? true : false; } /** * 校验火车是否停止 * inParam : std::shared_ptr pProcessData :待处理数据 * outParam: N/A * return : true:停止; false:非停止 1(正常行驶) 2(停车) 3(倒车) */ int FilterTrainStepOneEngine::GetTrainStatus(std::shared_ptr pProcessData) { if (iDirection_ == DIRECTION_UNKNOWN) { LogDebug << " frameId:" << pProcessData->iFrameId << " 未判断出行车方向,暂定认为火车正常行驶中"; return TRAINSTATUS_RUN; } std::shared_ptr pPostData = std::static_pointer_cast(pProcessData->pVoidData); pPostData->iFrameId = pProcessData->iFrameId; // 1. 无框时,返回之前的列车状态 if (pPostData->vecPostSubData.size() == 0) { return iTrainStatus_; } quePostData_.push(*pPostData.get()); if (quePostData_.size() < 3) { return TRAINSTATUS_RUN; } PostData postDataFront = quePostData_.front(); //iNotChgCount_大于0表示有可能停车,此时pop队列数据要多留存几个。用最开始的数据来判断是否真正停车,如果每次只用上上帧判断当列车超级慢时可能判断为停车。 int iSizeTemp = iNotChgCount_ > 0 ? 10 : 2; while (quePostData_.size() > iSizeTemp) { quePostData_.pop(); } LogDebug << "frameId:" << pProcessData->iFrameId << " 判断运动状态队列 第一帧:" << postDataFront.iFrameId << " 队列size:" << quePostData_.size() << " iSizeTemp:" << iSizeTemp; bool bSameFlag = false; int iDiffValue = iChkStopPX_; for (size_t i = 0; i < pPostData->vecPostSubData.size(); i++) { PostSubData postSubDataBack = pPostData->vecPostSubData[i]; for (size_t j = 0; j < postDataFront.vecPostSubData.size(); j++) { PostSubData postSubDataFront = postDataFront.vecPostSubData[j]; /* 使用iBigClassId,可能出现平车只有间隔大框,且间隔大框可以一会是平车间隔,一会是通用间隔。导致类别不一样 使用iTargetType,可能出现平车只有间隔大框,且间隔大框可以一会是平车间隔,一会是通用间隔。导致像素差判断不准。 */ if (postSubDataFront.iTargetType != postSubDataBack.iTargetType) { LogDebug << "判断前后帧识别的是否一致 上一个:" << postSubDataFront.iTargetType << " 当前:" << postSubDataBack.iTargetType; continue; } // if (postSubDataFront.iTargetType == SPACE && postSubDataFront.iBigClassId != postSubDataBack.iBigClassId) // { // iDiffValue = 50; // } bSameFlag = true; int iCenterBack = postSubDataBack.step1Location.fLTX + (postSubDataBack.step1Location.fRBX - postSubDataBack.step1Location.fLTX) / 2; int iCenterFront = postSubDataFront.step1Location.fLTX + (postSubDataFront.step1Location.fRBX - postSubDataFront.step1Location.fLTX) / 2; //位置比较大于10个像素,则表示有移动。再判断时正向移动,还是倒车 LogDebug << "frameId:" << pProcessData->iFrameId << " " << iCenterBack << "-" << iCenterFront << "=" << abs(iCenterBack - iCenterFront) << " 预期判定移动的差值为iDiffValue:" << iDiffValue; if (abs(iCenterBack - iCenterFront) > iDiffValue) { iNotChgCount_ = 0; /* iCenterBack > iCenterFront 表示向右行驶,且原方向为向左行驶 iCenterBack < iCenterFront 表示向左行驶,且原方向为向右行驶 以上2种表示倒车。 */ if ((iCenterBack > iCenterFront && iDirection_ == DIRECTION_LEFT) || (iCenterBack < iCenterFront && iDirection_ == DIRECTION_RIGHT)) { if (this->iPartitionFrameNum_ < (pProcessData->iFrameId - postDataFront.iFrameId) && this->iPlitFrameSpanPX_ < abs(iCenterBack - iCenterFront)) { return TRAINSTATUS_RUN; } LogDebug << "frameId:" << pProcessData->iFrameId << " 检测到火车倒车"; return TRAINSTATUS_BACK; } else { LogDebug << "frameId:" << pProcessData->iFrameId << " 正常行驶"; return TRAINSTATUS_RUN; } } /* 小于10个像素表示可能停车,累计未变化次数。 累计变化次数超过10次,返回停车 累计变化次数未超过10次,返回之前行驶状态 */ else { iNotChgCount_++; LogDebug << " frameId:" << pProcessData->iFrameId << " 大框移动范围小 判断停车计数:" << iNotChgCount_ << "/" << iChkStopCount_; if (iNotChgCount_ > iChkStopCount_) { LogDebug << "frameId:" << pProcessData->iFrameId << " 检测到火车停车"; return TRAINSTATUS_STOP; } else { // LogDebug << "frameId:" << pProcessData->iFrameId << " iTrainStatus_:" << iTrainStatus_; return iTrainStatus_; } } } } /* 未找到相同的框,说明是老框消失掉了,新框出现了。 按新框出现的位置判断是向左行驶,还是向右行驶。 */ LogDebug << "frameId:" << pProcessData->iFrameId << " bSameFlag:" << bSameFlag; if (!bSameFlag) { std::sort(pPostData->vecPostSubData.begin(), pPostData->vecPostSubData.end(), CompareX); Step1Location step1Location = pPostData->vecPostSubData.front().step1Location; if (iDirection_ == DIRECTION_LEFT) { step1Location = pPostData->vecPostSubData.back().step1Location; } LogDebug << "frameId:" << pProcessData->iFrameId << " fLTX:" << step1Location.fLTX << " fRBX:" << step1Location.fRBX; iNotChgCount_ = 0; int iCenter = step1Location.fLTX + (step1Location.fRBX - step1Location.fLTX) / 2; int iValue = pProcessData->iWidth / 2; if ((iCenter > iValue && iDirection_ == DIRECTION_RIGHT) || (iCenter < iValue && iDirection_ == DIRECTION_LEFT)) { /* 针对有效帧较少时,和上上帧比较没有同类型大框,且当前帧已行驶到画面中心导致误判的情况, 增加和上帧同类型大框的比较处理。 */ PostData postDataMiddle = quePostData_.front(); for (size_t i = 0; i < pPostData->vecPostSubData.size(); i++) { PostSubData postSubDataBack = pPostData->vecPostSubData[i]; for (size_t j = 0; j < postDataMiddle.vecPostSubData.size(); j++) { PostSubData postSubDataMiddle = postDataMiddle.vecPostSubData[j]; if (postSubDataMiddle.iTargetType != postSubDataBack.iTargetType) { continue; } int iCenterBack = postSubDataBack.step1Location.fLTX + (postSubDataBack.step1Location.fRBX - postSubDataBack.step1Location.fLTX) / 2; int iCenterMiddle = postSubDataMiddle.step1Location.fLTX + (postSubDataMiddle.step1Location.fRBX - postSubDataMiddle.step1Location.fLTX) / 2; // 位置比较大于10个像素,则表示有移动。再判断时正向移动,还是倒车 LogDebug << "frameId:" << pProcessData->iFrameId << " " << iCenterBack << "-" << iCenterMiddle << "=" << abs(iCenterBack - iCenterMiddle) << " middle cmp iDiffValue:" << iDiffValue; if (abs(iCenterBack - iCenterMiddle) > iDiffValue) { if ((iCenterBack > iCenterMiddle && iDirection_ == DIRECTION_LEFT) || (iCenterBack < iCenterMiddle && iDirection_ == DIRECTION_RIGHT)) { LogDebug << "frameId:" << pProcessData->iFrameId << " back2 back2"; return TRAINSTATUS_BACK; } else { LogDebug << "frameId:" << pProcessData->iFrameId << " run"; return TRAINSTATUS_RUN; } } } } // LogDebug << "frameId:" << pProcessData->iFrameId << " back2"; return iTrainStatus_; } } LogDebug << "frameId:" << pProcessData->iFrameId << " iNotChgCount_:" << iNotChgCount_ << " run run"; return TRAINSTATUS_RUN; } /** * 设置行车方向 * inParam : std::vector &vecLocation :大框坐标集合 : std::shared_ptr pProcessData :待处理数据 * outParam: N/A * return : N/A */ void FilterTrainStepOneEngine::SetDirection(std::vector &vecLocation, std::shared_ptr pProcessData) { if (vecLocation.size() < 1) { return; } Step1Location slFront = vecLocation.front(); Step1Location slBack = vecLocation.back(); /* 移动距离小于50个像素,则判断最后一个框的出现的位置 */ LogDebug << "sourceid:" << pProcessData->iDataSource << " frameid:" << pProcessData->iFrameId << " slFront:" << slFront.fLTX << " slBack:" << slBack.fLTX; if (fabs(slBack.fLTX - slFront.fLTX) < 50) { float fTempX1 = pProcessData->iWidth / 5; //5分之1的X坐标 float fTempX2 = pProcessData->iWidth / 5 * 4; //5分之4的X坐标 if (slBack.fLTX < fTempX1) { iDirection_ = DIRECTION_LEFT; } else if (slBack.fRBX > fTempX2) { iDirection_ = DIRECTION_RIGHT; } } else { //行驶方向 左 if ((slBack.fLTX - slFront.fLTX) < 0) { iDirection_ = DIRECTION_LEFT; } //行驶方向 右 else if ((slBack.fLTX - slFront.fLTX) > 0) { iDirection_ = DIRECTION_RIGHT; } } //未判断出移动方向,清除数后重新计算。 if (iDirection_ == DIRECTION_UNKNOWN) { auto iterMapStep1Info = mapMapStep1Info_.find(pProcessData->iDataSource); iterMapStep1Info->second.clear(); } else { if (MyYaml::GetIns()->GetDataSourceConfigById(pProcessData->iDataSource).iLeftFirst != mainCfg_.iLeftFirst) { LogDebug << "before sourceid:" << pProcessData->iDataSource << "frameid:" << pProcessData->iFrameId << " direction:" << iDirection_; iDirection_ = (iDirection_ == DIRECTION_LEFT) ? DIRECTION_RIGHT : DIRECTION_LEFT; LogDebug << "after sourceid:" << pProcessData->iDataSource << "frameid:" << pProcessData->iFrameId << " direction:" << iDirection_; } } } /** * 计算行车方向新 * inParam : std::shared_ptr pProcessData :待处理数据 * outParam: N/A * return : N/A */ void FilterTrainStepOneEngine::CalculateDirectionNew(std::shared_ptr pProcessData) { /* 连续3帧同目标识别框信息 判断位置差异是否超过10px(判停车参数),且两两之间都是线性。如果符合则计算方向。 上述条件不符合则剔除第一个元素,再次累计连续3帧处理。 */ auto iterMap = mapCalDirection_.find(pProcessData->iDataSource); if (iterMap == mapCalDirection_.end()) { std::map> mapTemp; mapCalDirection_.insert(std::make_pair(pProcessData->iDataSource, mapTemp)); iterMap = mapCalDirection_.find(pProcessData->iDataSource); } std::shared_ptr pPostData = std::static_pointer_cast(pProcessData->pVoidData); for (auto iter = pPostData->vecPostSubData.begin(); iter != pPostData->vecPostSubData.end(); iter++) { CalculateInfo calInfo; calInfo.iFrameId = pProcessData->iFrameId; calInfo.iBigClassId = iter->iCarXH; calInfo.fCenterX = iter->step1Location.fLTX + (iter->step1Location.fRBX - iter->step1Location.fLTX) / 2; auto iterSubMap = iterMap->second.find(iter->iBigClassId); if (iterSubMap == iterMap->second.end()) { std::vector vecTemp; iterMap->second.insert(std::make_pair(iter->iBigClassId, vecTemp)); iterSubMap = iterMap->second.find(iter->iBigClassId); } iterSubMap->second.emplace_back(calInfo); if (iterSubMap->second.size() > 2) { LogDebug << "souceid:" << pProcessData->iDataSource << " frameid:" << pProcessData->iFrameId << " last:" << iterSubMap->second.at(2).iFrameId << " " << iterSubMap->second.at(2).fCenterX << " mid:" << iterSubMap->second.at(1).iFrameId << " " << iterSubMap->second.at(1).fCenterX << " pre:" << iterSubMap->second.at(0).iFrameId << " " << iterSubMap->second.at(0).fCenterX; //如果帧号连续,且移动位置大于15px,则计算方向 if (iterSubMap->second.at(2).iFrameId - iterSubMap->second.at(1).iFrameId != mainCfg_.iSkipInterval || iterSubMap->second.at(1).iFrameId - iterSubMap->second.at(0).iFrameId != mainCfg_.iSkipInterval) { iterSubMap->second.erase(iterSubMap->second.begin()); continue; } int iLast = iterSubMap->second.at(2).fCenterX; int iMid = iterSubMap->second.at(1).fCenterX; int iPre = iterSubMap->second.at(0).fCenterX; if (abs(iPre - iLast) <= iChkStopPX_) { iterSubMap->second.erase(iterSubMap->second.begin()); continue; } if (iPre <= iMid && iMid <= iLast) { iDirection_ = DIRECTION_RIGHT; } else if (iPre >= iMid && iMid >= iLast) { iDirection_ = DIRECTION_LEFT; } else { iterSubMap->second.erase(iterSubMap->second.begin()); continue; } LogDebug << "souceid:" << pProcessData->iDataSource << " frameid:" << pProcessData->iFrameId << " iDirection_:" << iDirection_; } } //主相机的对侧相机需反向设置方向 if (iDirection_ != DIRECTION_UNKNOWN && MyYaml::GetIns()->GetDataSourceConfigById(pProcessData->iDataSource).iLeftFirst != mainCfg_.iLeftFirst) { LogDebug << "before sourceid:" << pProcessData->iDataSource << "frameid:" << pProcessData->iFrameId << " direction:" << iDirection_; iDirection_ = (iDirection_ == DIRECTION_LEFT) ? DIRECTION_RIGHT : DIRECTION_LEFT; LogDebug << "after sourceid:" << pProcessData->iDataSource << "frameid:" << pProcessData->iFrameId << " direction:" << iDirection_; } } /** * 计算行车方向 * inParam : std::shared_ptr pProcessData :待处理数据 * outParam: N/A * return : N/A */ void FilterTrainStepOneEngine::CalculateDirection(std::shared_ptr pProcessData) { auto iterPostDataFrist = mapPostDataFrist_.find(pProcessData->iDataSource); auto iterMapStep1Info = mapMapStep1Info_.find(pProcessData->iDataSource); /* 1帧暂时最多识别3个大框。[(车头、间隔、车号); (车号、间隔、属性)] 因间隔大框不是很准确,暂时把间隔大框剔除后,计算方向。 剔除间隔后,大框的可能情况为(车头; 车号; 属性; 车头&车号; 车号&属性) */ std::shared_ptr pPostData = std::static_pointer_cast(pProcessData->pVoidData); PostData postDataTemp = *pPostData; for (auto iter = postDataTemp.vecPostSubData.begin(); iter != postDataTemp.vecPostSubData.end();) { if (iter->iTargetType == SPACE) { iter = postDataTemp.vecPostSubData.erase(iter); continue; } auto iterMap = iterMapStep1Info->second.find(iter->iTargetType); if (iterMap != iterMapStep1Info->second.end()) { iterMap->second.emplace_back(iter->step1Location); } else { std::vector vecTemp; vecTemp.emplace_back(iter->step1Location); iterMapStep1Info->second.insert(std::pair>(iter->iTargetType, vecTemp)); } iter++; } //记录第一次识别到大框的信息 if (iterPostDataFrist->second.vecPostSubData.size() == 0) { iterPostDataFrist->second = postDataTemp; LogDebug << "sourceid:" << pProcessData->iDataSource << " frameid:" << pProcessData->iFrameId << " size:" << iterPostDataFrist->second.vecPostSubData.size(); } //无框时,不计算方向 if (iterPostDataFrist->second.vecPostSubData.size() == 0) { LogDebug << "sourceid:" << pProcessData->iDataSource << " frameid:" << pProcessData->iFrameId << " no find data"; return; } //框的个数和类型没有变化时,不计算方向 if (iterPostDataFrist->second.vecPostSubData.size() - postDataTemp.vecPostSubData.size() == 0) { std::sort(iterPostDataFrist->second.vecPostSubData.begin(), iterPostDataFrist->second.vecPostSubData.end(), CompareX); std::sort(postDataTemp.vecPostSubData.begin(), postDataTemp.vecPostSubData.end(), CompareX); bool bFlag = true; for (int i = 0; i < iterPostDataFrist->second.vecPostSubData.size(); i++) { if (iterPostDataFrist->second.vecPostSubData[i].iTargetType != postDataTemp.vecPostSubData[i].iTargetType) { bFlag = false; break; } } if (bFlag) { LogDebug << "sourceid:" << pProcessData->iDataSource << " frameid:" << pProcessData->iFrameId << " datasize and datatype equal"; return; } } auto iter = iterMapStep1Info->second.begin(); int iMaxSize = iter->second.size(); int iMaxSizeKey = iter->first; int iMinSize = iter->second.size(); int iMinSizeKey = iter->first; while (iter != iterMapStep1Info->second.end()) { if (iter->second.size() > iMaxSize) { iMaxSize = iter->second.size(); iMaxSizeKey = iter->first; } if (iter->second.size() < iMinSize) { iMinSize = iter->second.size(); iMinSizeKey = iter->first; } iter++; } LogDebug << "sourceid:" << pProcessData->iDataSource << " frameid:" << pProcessData->iFrameId << " iMaxSize:" << iMaxSize << " iMaxSizeKey:" << iMaxSizeKey << " iMinSize:" << iMinSize << " iMinSizeKey:" << iMinSizeKey; /* 第一次识别为1个框,后续变化为无框或2个框,使用框个数多的数据计算方向 第一次识别为2个框,后续变化为1个框,使用个数多的数据计算方向。 后续变化为0个框,使用个数少的数据计算方向。 */ if (iterPostDataFrist->second.vecPostSubData.size() == 1) { SetDirection(iterMapStep1Info->second[iMaxSizeKey], pProcessData); } else if (iterPostDataFrist->second.vecPostSubData.size() == 2) { if (postDataTemp.vecPostSubData.size() == 1) { SetDirection(iterMapStep1Info->second[iMinSizeKey], pProcessData); } else if (postDataTemp.vecPostSubData.size() == 0) { SetDirection(iterMapStep1Info->second[iMaxSizeKey], pProcessData); } } LogDebug<< "sourceid:" << pProcessData->iDataSource << " frameid:" << pProcessData->iFrameId << " 行车方向:" << iDirection_; } void FilterTrainStepOneEngine::sendComeTrain(const std::string strTrainDate, const std::string strTrainName, const int iDirection) { std::string message = "{\"cometime\":\"" + strTrainDate + " " + strTrainName + "\",\"type\":\"1\",\"direction\":" + to_string(iDirection == iPushDirection_ ? 1:-1) + "}"; LogWarn << message; outputQueMap_[engineName_ + "_" + std::to_string(engineId_) + "_1"]->push(std::static_pointer_cast(std::make_shared(message))); } /** * 根据当前帧数据,处理上一帧数据 * inParam : std::shared_ptr pProcessData :当前帧数据 * outParam: N/A * return : N/A */ void FilterTrainStepOneEngine::DealProcessDataPre(std::shared_ptr pProcessData) { /* 目标框是否是连续识别,只识别到一帧的目标框认为误识别,过滤掉。 判断上一帧,当前帧 是否有框 上一帧有框,当前帧有框,说明连续识别,正常处理。 上一帧有框,当前帧无框,则非连续识别,过滤大框 上一帧无框,当前帧有框,则连续识别个数置零。 上一帧无框,当前帧无框,则连续识别个数置零。 */ auto iterProcessData = mapProcessDataPre_.find(pProcessData->iDataSource); if (iterProcessData == mapProcessDataPre_.end() || nullptr == iterProcessData->second) { return; } auto iterHeadContinueCnt = mapHeadContinueCnt_.find(pProcessData->iDataSource); auto iterProContinueCnt = mapProContinueCnt_.find(pProcessData->iDataSource); auto iterNumContinueCnt = mapNumContinueCnt_.find(pProcessData->iDataSource); auto iterSpaceContinueCnt = mapSpaceContinueCnt_.find(pProcessData->iDataSource); auto iterTranSpaceContinueCnt = mapTrainSpaceContinueCnt_.find(pProcessData->iDataSource); //获取当前帧识别情况 std::shared_ptr pPostData = std::static_pointer_cast(pProcessData->pVoidData); for (int i = 0; i < pPostData->vecPostSubData.size(); i++) { if (pPostData->vecPostSubData[i].iTargetType == HEAD) { iterHeadContinueCnt->second++; } else if (pPostData->vecPostSubData[i].iTargetType == PRO) { iterProContinueCnt->second++; } else if (pPostData->vecPostSubData[i].iTargetType == NUM) { iterNumContinueCnt->second++; } else if (pPostData->vecPostSubData[i].iTargetType == SPACE) { iterSpaceContinueCnt->second++; } else if (pPostData->vecPostSubData[i].iTargetType == TRAINSPACE) { iterTranSpaceContinueCnt->second++; } } //LogDebug << "engineId:" << engineId_ << " frameId:" << pProcessData->iFrameId << " " << iProContinueCnt_ << " " << iNumContinueCnt_; //获取上一帧识别情况 std::shared_ptr pPostDataPre = std::static_pointer_cast(iterProcessData->second->pVoidData); bool bPreHaveHead = false; bool bPreHavePro = false; bool bPreHaveNum = false; bool bPreHaveSpace = false; bool bPreHaveTrainSpace = false; for (int iPre = 0; iPre < pPostDataPre->vecPostSubData.size(); iPre++) { if (pPostDataPre->vecPostSubData[iPre].iTargetType == HEAD) { iterHeadContinueCnt->second++; bPreHaveHead = true; } else if (pPostDataPre->vecPostSubData[iPre].iTargetType == PRO) { iterProContinueCnt->second++; bPreHavePro = true; } else if (pPostDataPre->vecPostSubData[iPre].iTargetType == NUM) { iterNumContinueCnt->second++; bPreHaveNum = true; } else if (pPostDataPre->vecPostSubData[iPre].iTargetType == SPACE) { iterSpaceContinueCnt->second++; bPreHaveSpace = true; } else if (pPostDataPre->vecPostSubData[iPre].iTargetType == TRAINSPACE) { iterTranSpaceContinueCnt->second++; bPreHaveTrainSpace = true; } } //前一帧无大框连续识别次数置零 iterHeadContinueCnt->second = bPreHaveHead ? iterHeadContinueCnt->second : 0; iterProContinueCnt->second = bPreHavePro ? iterProContinueCnt->second : 0; iterNumContinueCnt->second = bPreHaveNum ? iterNumContinueCnt->second : 0; iterSpaceContinueCnt->second = bPreHaveSpace ? iterSpaceContinueCnt->second : 0; iterTranSpaceContinueCnt->second = bPreHaveTrainSpace ? iterTranSpaceContinueCnt->second : 0; //非连续识别的情况,认为误识别,剔除误识别的大框信息 for (std::vector::iterator it = pPostDataPre->vecPostSubData.begin(); it != pPostDataPre->vecPostSubData.end();) { if (iterHeadContinueCnt->second < 2 && it->iTargetType == HEAD) { LogDebug << " frameId:" << iterProcessData->second->iFrameId << " Head 框因非连续识别而过滤"; it = pPostDataPre->vecPostSubData.erase(it); continue; } if (iterProContinueCnt->second < 2 && it->iTargetType == PRO) { LogDebug << " frameId:" << iterProcessData->second->iFrameId << " PRO 框因非连续识别而过滤"; it = pPostDataPre->vecPostSubData.erase(it); continue; } if (iterNumContinueCnt->second < 2 && it->iTargetType == NUM) { LogDebug << " frameId:" << iterProcessData->second->iFrameId << " NUM 框因非连续识别而过滤"; it = pPostDataPre->vecPostSubData.erase(it); continue; } if (iterSpaceContinueCnt->second < 2 && it->iTargetType == SPACE) { LogDebug << " frameId:" << iterProcessData->second->iFrameId << " SPACE 框因非连续识别而过滤"; it = pPostDataPre->vecPostSubData.erase(it); continue; } if (iterTranSpaceContinueCnt->second < 2 && it->iTargetType == TRAINSPACE) { LogDebug << " frameId:" << iterProcessData->second->iFrameId << " TRAINSPACE 框因非连续识别而过滤"; it = pPostDataPre->vecPostSubData.erase(it); continue; } it++; } //判定行驶方向, 记录Direction文件信息 if (iDirection_ == DIRECTION_UNKNOWN) { iDirection_ = iterProcessData->second->iDirection; LogInfo << "方向:" << (iDirection_ == DIRECTION_LEFT ? "左" : (iDirection_ == DIRECTION_RIGHT ? "右" : "未判断出来")); if (iDirection_ == DIRECTION_UNKNOWN) { //CalculateDirection(iterProcessData->second); CalculateDirectionNew(iterProcessData->second); if (iDirection_ != DIRECTION_UNKNOWN) this->sendComeTrain(pProcessData->strTrainDate, pProcessData->strTrainName, iDirection_); } if (iDirection_ != DIRECTION_UNKNOWN) { auto iterPostDataFrist = mapPostDataFrist_.find(pProcessData->iDataSource); Json::Value jvDirectionInfo; jvDirectionInfo["direction"] = iDirection_; jvDirectionInfo["firstStep1Cnt"] = iterPostDataFrist->second.vecPostSubData.size(); jvDirectionInfo["frameid"] = iterProcessData->second->iFrameId; jvDirectionInfo["sourceid"] = iterProcessData->second->iDataSource; std::string strFilePath = strResultPath_ + pProcessData->strTrainDate + "/" + pProcessData->strTrainName + "/" + "direction.txt"; MyUtils::getins()->WriteJsonInfo(jvDirectionInfo, strFilePath); } } //主摄像头校验是否停车 int iTrainStatusTemp = iTrainStatus_; if (iterProcessData->second->iDataSource == 0) { iTrainStatus_ = GetTrainStatus(iterProcessData->second); iTrainStatusTemp = iTrainStatus_; if (iTrainStatus_ == TRAINSTATUS_STOP) { //停车 } else if (iTrainStatus_ == TRAINSTATUS_BACK) { //倒车 AddBackInfo(iterProcessData->second); iTrainStatusTemp = TRAINSTATUS_STOP; } else if(iTrainStatus_ == TRAINSTATUS_RUN) { /* 正向行驶需先把倒车产生的倒车数据处理完毕,即使车辆回到原倒车点,再开始识别行驶数据 */ if(!IsEndDealBackInfo(iterProcessData->second)) { iTrainStatusTemp = TRAINSTATUS_STOP; } } } LogDebug << "数据源:" << iterProcessData->second->iDataSource << " 帧:" << iterProcessData->second->iFrameId << " 火车实时运行状态:" << iTrainStatus_ << "(0无车,1运行,2停车,3倒车) iTrainStatusTemp:" << iTrainStatusTemp; iterProcessData->second->iStatus = iTrainStatusTemp; // this->sendComeTrain(pProcessData->strTrainDate, pProcessData->strTrainName, iDirection_); //上一帧,push端口0 PushData(strPort0_, iterProcessData->second); } APP_ERROR FilterTrainStepOneEngine::Process() { int iRet = APP_ERR_OK; while (!isStop_) { std::shared_ptr pVoidData0 = nullptr; inputQueMap_[strPort0_]->pop(pVoidData0); if (nullptr == pVoidData0) { usleep(1000); //1ms continue; } std::shared_ptr pProcessData = std::static_pointer_cast(pVoidData0); std::shared_ptr pPostData = std::static_pointer_cast(pProcessData->pVoidData); // //停止后再次行驶,把队列中多余的数据丢弃。 // if (iReRunOrigFrameId_ != 0 && pProcessData->iOrigFrameId > iReRunOrigFrameId_ && !pProcessData->bIsEnd) // { // LogDebug << "reRunOrigFrameId:" << iReRunOrigFrameId_ << " origFrameId:" << pProcessData->iOrigFrameId; // continue; // } if (pProcessData->bIsEnd) { mapDataSourceIsEnd_[pProcessData->iDataSource] = pProcessData->bIsEnd; } //1帧暂时最多识别4个大框。[(车头、车厢间隔、间隔、车号); (车号、车厢间隔、间隔、属性)] if (pPostData->vecPostSubData.size() > 4) { LogError << "sourceid:" << pProcessData->iDataSource << " frameId:" << pProcessData->iFrameId << " vecpostsubdata size:" << pPostData->vecPostSubData.size(); pPostData->vecPostSubData.clear(); } //按指定识别区域过滤误识别信息--过滤逻辑迁移至TrainStepOneEngine中。 //根据当前帧数据,处理上一帧数据 DealProcessDataPre(pProcessData); mapProcessDataPre_[pProcessData->iDataSource] = pProcessData; if (pProcessData->bIsEnd) { //结束帧,push端口0 LogDebug << "sourceid:" << pProcessData->iDataSource << " frameid:" << pProcessData->iFrameId << " isEnd:" << pProcessData->bIsEnd; PushData(strPort0_, pProcessData); } //3. 全部结束,初始化相关参数 bool bAllEnd = true; for (auto iter = mapDataSourceIsEnd_.begin(); iter != mapDataSourceIsEnd_.end(); iter++) { bAllEnd = bAllEnd && iter->second; } if (bAllEnd) { LogDebug << "sourceid:" << pProcessData->iDataSource << " frameid:" << pProcessData->iFrameId << " bAllEnd"; InitParam(); } } return APP_ERR_OK; } \ No newline at end of file diff --git a/nvidia_ascend_engine/common_engine/MergerEngine/MergerAllEngine.cpp b/nvidia_ascend_engine/common_engine/MergerEngine/MergerAllEngine.cpp index f3048ec..9ec9201 100644 --- a/nvidia_ascend_engine/common_engine/MergerEngine/MergerAllEngine.cpp +++ b/nvidia_ascend_engine/common_engine/MergerEngine/MergerAllEngine.cpp @@ -113,6 +113,8 @@ void MergerAllEngine::PushData(std::shared_ptr pTrain) << "集装箱2: " << pTrain->container2.strContainerNo << "\n" << "集装箱2图片: " << pTrain->container2.strBestImg << "\n" << "集装箱2时间戳: " << pTrain->container2.i64TimeStamp << "\n" + << "车厢开始时间: " << MyUtils::getins()->Stamp2Time(pTrain->i64StartTimeStamp, true) << "\n" + << "车厢结束时间: " << MyUtils::getins()->Stamp2Time(pTrain->i64EndTimeStamp, true) << "\n" << " ---所有信息合并结果 END--- "; if (pTrain->bIsEnd) { diff --git a/nvidia_ascend_engine/common_engine/SaveEngine/SaveCsvEngine.cpp b/nvidia_ascend_engine/common_engine/SaveEngine/SaveCsvEngine.cpp index 5381376..232abfd 100644 --- a/nvidia_ascend_engine/common_engine/SaveEngine/SaveCsvEngine.cpp +++ b/nvidia_ascend_engine/common_engine/SaveEngine/SaveCsvEngine.cpp @@ -1,497 +1 @@ -#include "SaveCsvEngine.h" - -using namespace ai_matrix; - -SaveCsvEngine::SaveCsvEngine() {} - -SaveCsvEngine::~SaveCsvEngine() {} - -APP_ERROR SaveCsvEngine::Init() -{ - strPort0_ = engineName_ + "_" + std::to_string(engineId_) + "_0"; - strPort1_ = engineName_ + "_" + std::to_string(engineId_) + "_1"; - strPort2_ = engineName_ + "_" + std::to_string(engineId_) + "_2"; - strResultPath_ = MyYaml::GetIns()->GetPathValue("gc_result_path"); - strPoundNo_ = MyYaml::GetIns()->GetStringValue("atlas_poundno"); - - LogInfo << "SaveCsvEngine Init ok"; - return APP_ERR_OK; -} - -APP_ERROR SaveCsvEngine::DeInit() -{ - LogInfo << "SaveCsvEngine DeInit ok"; - return APP_ERR_OK; -} - -/** -* 保存合并后车厢的最优结果到CSV中 (该文件支持其web导入) -* inParam : std::shared_ptr pTrain :列车信息 -* outParam: -* return : true/false -*/ -bool SaveCsvEngine::SaveMergerCsv(std::shared_ptr pTrain) -{ - //1. 创建保存路径 (固定路径/YYYY-MM-DD/hh-mm-ss/) - std::string strTrainPath = strResultPath_ + pTrain->strTrainDate + "/" + pTrain->strTrainName + "/"; - if (!MyUtils::getins()->CreateDirPath(strTrainPath)) - { - LogError << "iCarXH:" << pTrain->iCarXH << " train savecsv err"; - return false; - } - - //2. 保存csv - std::string strCsvName = pTrain->strTrainDate + pTrain->strTrainName + std::string(".csv"); - strCsvName = MyUtils::getins()->replace_all_distinct(strCsvName, std::string("-"), std::string("")); - std::string strCsvPath = strTrainPath + strCsvName; - - bool bIsExsit = false; - if (access(strCsvPath.c_str(), F_OK) != -1) - { - bIsExsit = true; - } - - try - { - // 写文件 - std::ofstream outFile; - outFile.open(strCsvPath, std::ios::app); // 打开模式可省略 - - if (!bIsExsit) - { - outFile << "poundno" << ',' - << "year" << ',' - << "time" << ',' - << "direction" << ',' - << "speed" << ',' - << "camerano" << ',' - << "skipInterval" << ',' - << "carxh" << ',' - << "type" << ',' - << "num" << ',' - << "load" << ',' - << "self" << ',' - << "volume" << ',' - << "volumesurface" << ',' - << "change" << ',' - << "numImgPath" << ',' - << "proImgPath" << ',' - << "videoStart" << ',' - << "videoEnd" << ',' - << "containerNo1" << ',' - << "containerNo2" << ',' - << "inspection" << ',' - << "inspectionImg" << ',' - << "containerImg_1" << ',' - << "containerImg_2" << ',' - << "startTime" << ',' - << "endTime" - << std::endl; - } - - std::string strTime = pTrain->strTrainName; - strTime = MyUtils::getins()->replace_all_distinct(strTime, std::string("-"), std::string(":")); - ai_matrix::DataSourceConfig dataSourceConfig = MyYaml::GetIns()->GetDataSourceConfigById(pTrain->trainNum.iDataSource); //获取摄像机参数 - - char szCameraNo[4] = {0}; - sprintf(szCameraNo, "%03d", pTrain->trainNum.iDataSource + 1); - - char szNumImgPath[64] = {0}; //车号最优图片路径 - if (!pTrain->trainNum.strBestImg.empty()) - { - sprintf(szNumImgPath, "%03d/%s", pTrain->trainNum.iDataSource + 1, pTrain->trainNum.strBestImg.c_str()); - } - - char szProImgPath[64] = {0}; //属性最优图片路径 - if (!pTrain->trainPro.strBestImg.empty()) - { - sprintf(szProImgPath, "%03d/%s", pTrain->trainPro.iDataSource + 1, pTrain->trainPro.strBestImg.c_str()); - } - - char szChkDateImgPath[64] = {0}; //定检期最优图片路径 - if (!pTrain->chkDate.strBestImg.empty()) - { - sprintf(szChkDateImgPath, "%03d/%s", pTrain->chkDate.iDataSource + 1, pTrain->chkDate.strBestImg.c_str()); - } - - char szContainer1ImgPath[64] = {0}; //集装箱1最优图片路径 - if (!pTrain->container1.strBestImg.empty()) - { - sprintf(szContainer1ImgPath, "%03d/%s", pTrain->container1.iDataSource + 1, pTrain->container1.strBestImg.c_str()); - } - - char szContainer2ImgPath[64] = {0}; //集装箱2最优图片路径 - if (!pTrain->container2.strBestImg.empty()) - { - sprintf(szContainer2ImgPath, "%03d/%s", pTrain->container2.iDataSource + 1, pTrain->container2.strBestImg.c_str()); - } - - outFile << strPoundNo_ << ',' - << pTrain->strTrainDate << ',' - << strTime << ',' - << pTrain->iDirection << ',' - << 0.0 << ',' - << szCameraNo << ',' - << dataSourceConfig.iSkipInterval << ',' - << pTrain->iCarXH << ',' - << pTrain->trainNum.strTrainType << ',' - << pTrain->trainNum.strTrainNum << ',' - << pTrain->trainPro.strLoad << ',' - << pTrain->trainPro.strSelf << ',' - << pTrain->trainPro.strVolume << ',' - << pTrain->trainPro.strVolumeSurface << ',' //容量记表 - << pTrain->trainPro.strChange << ',' - << szNumImgPath << ',' - << szProImgPath << ',' - << pTrain->iStartFrameId << ',' - << pTrain->iEndFrameId << ',' - << pTrain->container1.strContainerNo << ',' - << pTrain->container2.strContainerNo << ',' - << pTrain->chkDate.strChkDate1DeadLine << ',' - << szChkDateImgPath << ',' - << szContainer1ImgPath << ',' - << szContainer2ImgPath << ',' - << MyUtils::getins()->Stamp2Time(pTrain->i64StartTimeStamp, true) << ',' - << MyUtils::getins()->Stamp2Time(pTrain->i64EndTimeStamp, true) - << std::endl; - - outFile.close(); - } - catch (const std::exception &) - { - LogError << "strCsvPath:" << strCsvPath << " train savecsv fail!"; - return false; - } - return true; -} - -/** -* 保存车厢的最优结果到CSV中 -* inParam : std::shared_ptr pTrain :列车信息 -* outParam: -* return : true/false -*/ -bool SaveCsvEngine::SaveTrainCsv(std::shared_ptr pTrain) -{ - char szCameraNo[4] = {0}; - sprintf(szCameraNo, "%03d", pTrain->iDataSource + 1); - std::string strTrainPath = strResultPath_ + pTrain->strTrainDate + "/" + pTrain->strTrainName + "/" + - szCameraNo + "/"; - - //1. 创建保存路径 (固定路径/YYYY-MM-DD/hh-mm-ss/iDataSoure/) - if (!MyUtils::getins()->CreateDirPath(strTrainPath)) - { - LogError << "iCarXH:" << pTrain->iCarXH << " train savecsv err"; - return false; - } - - //2. 保存csv - std::string strCsvName = pTrain->strTrainDate + pTrain->strTrainName + "_" + szCameraNo + "_train.csv"; - strCsvName = MyUtils::getins()->replace_all_distinct(strCsvName, std::string("-"), std::string("")); - std::string strCsvPath = strTrainPath + strCsvName; - - bool bIsExsit = false; - if (access(strCsvPath.c_str(), F_OK) != -1) - { - bIsExsit = true; - } - - try - { - // 写文件 - std::ofstream outFile; - outFile.open(strCsvPath, std::ios::app); - - if (!bIsExsit) - { - outFile << "car_xh" << ',' - << "train_num_name" << ',' - << "train_pro_name" << ',' - << "t_type" << ',' - << "t_num" << ',' - << "load" << ',' - << "self" << ',' - << "volume" << ',' - << "change" << ',' - << "volumesurface" << ',' - << "num_ltx" << ',' - << "num_lty" << ',' - << "num_rbx" << ',' - << "num_rby" << ',' - << "pro_ltx" << ',' - << "pro_lty" << ',' - << "pro_rbx" << ',' - << "pro_rby" << ',' - << "start_frameId" << ',' - << "end_frameId" << ',' - << "start_timestamp" << ',' //记录车厢开始,结束帧时间戳,用于比对集装箱合并 - << "timestamp" << ',' - << "end_timestamp" << ',' - << "start_num" << ',' - << "num_timestamp" << ',' //记录车号,属性时间戳,用于比对定检期合并 - << "end_num" << ',' - << "start_pro" << ',' - << "pro_timestamp" << ',' - << "end_pro" << std::endl; - } - outFile << pTrain->iCarXH << ',' - << pTrain->trainNum.strBestImg << ',' - << pTrain->trainPro.strBestImg << ',' - << pTrain->trainNum.strTrainType << ',' - << pTrain->trainNum.strTrainNum << ',' - << pTrain->trainPro.strLoad << ',' - << pTrain->trainPro.strSelf << ',' - << pTrain->trainPro.strVolume << ',' - << pTrain->trainPro.strChange << ',' - << pTrain->trainPro.strVolumeSurface << ',' - << pTrain->trainNum.step1Location.fLTX << ',' - << pTrain->trainNum.step1Location.fLTY << ',' - << pTrain->trainNum.step1Location.fRBX << ',' - << pTrain->trainNum.step1Location.fRBY << ',' - << pTrain->trainPro.step1Location.fLTX << ',' - << pTrain->trainPro.step1Location.fLTY << ',' - << pTrain->trainPro.step1Location.fRBX << ',' - << pTrain->trainPro.step1Location.fRBY << ',' - << pTrain->iStartFrameId << ',' - << pTrain->iEndFrameId << ',' - << pTrain->i64StartTimeStamp << ',' - << pTrain->i64TimeStamp << ',' - << pTrain->i64EndTimeStamp << ',' - << pTrain->trainNum.i64StartTimeStamp << ',' - << pTrain->trainNum.i64TimeStamp << ',' - << pTrain->trainNum.i64EndTimeStamp << ',' - << pTrain->trainPro.i64StartTimeStamp << ',' - << pTrain->trainPro.i64TimeStamp << ',' - << pTrain->trainPro.i64EndTimeStamp << std::endl; - - outFile.close(); - } - catch (const std::exception &) - { - LogError << "strCsvPath:" << strCsvPath << " train savecsv fail!"; - return false; - } - return true; -} - -/** -* 保存定检期的最优结果到CSV中 -* inParam : std::shared_ptr pChkDate :定检期信息 -* outParam: -* return : true/false -*/ -bool SaveCsvEngine::SaveChkDateCsv(std::shared_ptr pChkDate) -{ - if (pChkDate->strBestImg.empty()) - { - LogDebug << "datetime:" << pChkDate->strTrainDate << " " << pChkDate->strTrainName - << " carxh:" << pChkDate->iCarXH << " chkdate empty"; - return true; - } - //1. 创建保存路径 (固定路径/YYYY-MM-DD/hh-mm-ss/iDataSoure/) - char szCameraNo[4] = {0}; - sprintf(szCameraNo, "%03d", pChkDate->iDataSource + 1); - std::string strChkDatePath = strResultPath_ + pChkDate->strTrainDate + "/" + pChkDate->strTrainName + "/" + - szCameraNo + "/"; - - if (!MyUtils::getins()->CreateDirPath(strChkDatePath)) - { - LogError << "iCarXH:" << pChkDate->iCarXH << "chkdate savecsv err"; - return false; - } - - //2. 保存csv - std::string strCsvName = pChkDate->strTrainDate + pChkDate->strTrainName + "_" + szCameraNo + "_chkdate.csv"; - strCsvName = MyUtils::getins()->replace_all_distinct(strCsvName, std::string("-"), std::string("")); - std::string strCsvPath = strChkDatePath + strCsvName; - - bool bIsExsit = false; - if (access(strCsvPath.c_str(), F_OK) != -1) - { - bIsExsit = true; - } - - try - { - // 写文件 - std::ofstream outFile; - outFile.open(strCsvPath, std::ios::app); // 打开模式可省略 - - if (!bIsExsit) - { - outFile << "car_xh" << ',' - << "chkdate_name" << ',' - << "chkdate1" << ',' - << "chkdate2" << ',' - << "chkdate_ltx" << ',' - << "chkdate_lty" << ',' - << "chkdate_rbx" << ',' - << "chkdate_rby" << ',' - << "start_frameId" << ',' - << "end_frameId" << ',' - << "start_timestamp" << ',' - << "end_timestamp" << ',' - << "timestamp" << ',' - << "chkdate1deadline" << std::endl; - } - outFile << pChkDate->iCarXH << ',' - << pChkDate->strBestImg << ',' - << pChkDate->strChkDate1 << ',' - << pChkDate->strChkDate2 << ',' - << pChkDate->step1Location.fLTX << ',' - << pChkDate->step1Location.fLTY << ',' - << pChkDate->step1Location.fRBX << ',' - << pChkDate->step1Location.fRBY << ',' - << pChkDate->iStartFrameId << ',' - << pChkDate->iEndFrameId << ',' - << pChkDate->i64StartTimeStamp << ',' - << pChkDate->i64EndTimeStamp << ',' - << pChkDate->i64TimeStamp << ',' - << pChkDate->strChkDate1DeadLine << std::endl; - - outFile.close(); - } - catch (const std::exception &) - { - LogError << "strCsvPath:" << strCsvPath << " chkdate savecsv fail!"; - return false; - } - return true; -} - -/** -* 保存集装箱的最优结果到CSV中 -* inParam : std::shared_ptr pTrainContainer :集装箱信息 -* outParam: -* return : true/false -*/ -bool SaveCsvEngine::SaveContainerCsv(std::shared_ptr pTrainContainer) -{ - std::vector vecContainer; - vecContainer.emplace_back(pTrainContainer->container1); - vecContainer.emplace_back(pTrainContainer->container2); - for (auto iter = vecContainer.begin(); iter != vecContainer.end(); iter++) - { - if (iter->strContainerNo.empty()) - { - continue; - } - // 1. 创建保存路径 (固定路径/YYYY-MM-DD/hh-mm-ss/iDataSoure/) - char szCameraNo[4] = {0}; - sprintf(szCameraNo, "%03d", iter->iDataSource + 1); - std::string strContainerPath = strResultPath_ + iter->strTrainDate + "/" + iter->strTrainName + "/" + - szCameraNo + "/"; - - if (!MyUtils::getins()->CreateDirPath(strContainerPath)) - { - LogError << "ContainerNo:" << iter->strContainerNo << " container savecsv err"; - continue; - } - // 2. 保存csv - std::string strCsvName = iter->strTrainDate + iter->strTrainName + "_" + szCameraNo + "_container.csv"; - strCsvName = MyUtils::getins()->replace_all_distinct(strCsvName, std::string("-"), std::string("")); - std::string strCsvPath = strContainerPath + strCsvName; - - bool bIsExsit = false; - if (access(strCsvPath.c_str(), F_OK) != -1) - { - bIsExsit = true; - } - - try - { - // 写文件 - std::ofstream outFile; - outFile.open(strCsvPath, std::ios::app); - - if (!bIsExsit) - { - outFile << "datasource" << ',' - << "container_name" << ',' - << "containerNo" << ',' - << "container_ltx" << ',' - << "container_lty" << ',' - << "container_rbx" << ',' - << "container_rby" << ',' - << "start_frameId" << ',' - << "end_frameId" << ',' - << "timestamp" << std::endl; - } - outFile << iter->iDataSource << ',' - << iter->strBestImg << ',' - << iter->strContainerNo << ',' - << iter->step1Location.fLTX << ',' - << iter->step1Location.fLTY << ',' - << iter->step1Location.fRBX << ',' - << iter->step1Location.fRBY << ',' - << iter->iStartFrameId << ',' - << iter->iEndFrameId << ',' - << iter->i64TimeStamp << std::endl; - - outFile.close(); - } - catch (const std::exception &) - { - LogError << "strCsvPath:" << strCsvPath << " container savecsv fail!"; - continue; - } - } - return true; -} - -APP_ERROR SaveCsvEngine::Process() -{ - int iRet = APP_ERR_OK; - while (!isStop_) - { - - bool bPopFlag = false; - //pop端口0 车厢信息 - std::shared_ptr pVoidData0 = nullptr; - iRet = inputQueMap_[strPort0_]->pop(pVoidData0); - if (nullptr != pVoidData0) - { - std::shared_ptr pTrain = std::static_pointer_cast(pVoidData0); - if (pTrain->bMergerFlag) - { - SaveMergerCsv(pTrain); - } - else - { - SaveTrainCsv(pTrain); - } - bPopFlag = true; - } - //pop端口1 定检期信息 - if (inputQueMap_.count(strPort1_) > 0) - { - std::shared_ptr pVoidData1 = nullptr; - inputQueMap_[strPort1_]->pop(pVoidData1); - if (nullptr != pVoidData1) - { - std::shared_ptr pChkDate = std::static_pointer_cast(pVoidData1); - SaveChkDateCsv(pChkDate); - bPopFlag = true; - } - } - //pop端口2 集装箱信息 - if (inputQueMap_.count(strPort2_) > 0) - { - std::shared_ptr pVoidData2 = nullptr; - inputQueMap_[strPort2_]->pop(pVoidData2); - if (nullptr != pVoidData2) - { - std::shared_ptr pTrainContainer = std::static_pointer_cast(pVoidData2); - SaveContainerCsv(pTrainContainer); - bPopFlag = true; - } - } - - if (!bPopFlag) - { - usleep(1000); - continue; - } - } - return APP_ERR_OK; -} +#include "SaveCsvEngine.h" using namespace ai_matrix; SaveCsvEngine::SaveCsvEngine() {} SaveCsvEngine::~SaveCsvEngine() {} APP_ERROR SaveCsvEngine::Init() { strPort0_ = engineName_ + "_" + std::to_string(engineId_) + "_0"; strPort1_ = engineName_ + "_" + std::to_string(engineId_) + "_1"; strPort2_ = engineName_ + "_" + std::to_string(engineId_) + "_2"; strResultPath_ = MyYaml::GetIns()->GetPathValue("gc_result_path"); strPoundNo_ = MyYaml::GetIns()->GetStringValue("atlas_poundno"); LogInfo << "SaveCsvEngine Init ok"; return APP_ERR_OK; } APP_ERROR SaveCsvEngine::DeInit() { LogInfo << "SaveCsvEngine DeInit ok"; return APP_ERR_OK; } /** * 保存合并后车厢的最优结果到CSV中 (该文件支持其web导入) * inParam : std::shared_ptr pTrain :列车信息 * outParam: * return : true/false */ bool SaveCsvEngine::SaveMergerCsv(std::shared_ptr pTrain) { //1. 创建保存路径 (固定路径/YYYY-MM-DD/hh-mm-ss/) std::string strTrainPath = strResultPath_ + pTrain->strTrainDate + "/" + pTrain->strTrainName + "/"; if (!MyUtils::getins()->CreateDirPath(strTrainPath)) { LogError << "iCarXH:" << pTrain->iCarXH << " train savecsv err"; return false; } //2. 保存csv std::string strCsvName = pTrain->strTrainDate + pTrain->strTrainName + std::string(".csv"); strCsvName = MyUtils::getins()->replace_all_distinct(strCsvName, std::string("-"), std::string("")); std::string strCsvPath = strTrainPath + strCsvName; bool bIsExsit = false; if (access(strCsvPath.c_str(), F_OK) != -1) { bIsExsit = true; } try { // 写文件 std::ofstream outFile; outFile.open(strCsvPath, std::ios::app); // 打开模式可省略 if (!bIsExsit) { outFile << "poundno" << ',' << "year" << ',' << "time" << ',' << "direction" << ',' << "speed" << ',' << "camerano" << ',' << "skipInterval" << ',' << "carxh" << ',' << "type" << ',' << "num" << ',' << "load" << ',' << "self" << ',' << "volume" << ',' << "volumesurface" << ',' << "change" << ',' << "numImgPath" << ',' << "proImgPath" << ',' << "videoStart" << ',' << "videoEnd" << ',' << "containerNo1" << ',' << "containerNo2" << ',' << "inspection" << ',' << "inspectionImg" << ',' << "containerImg_1" << ',' << "containerImg_2" << ',' << "startTime" << ',' << "endTime" << std::endl; } std::string strTime = pTrain->strTrainName; strTime = MyUtils::getins()->replace_all_distinct(strTime, std::string("-"), std::string(":")); ai_matrix::DataSourceConfig dataSourceConfig = MyYaml::GetIns()->GetDataSourceConfigById(pTrain->trainNum.iDataSource); //获取摄像机参数 char szCameraNo[4] = {0}; sprintf(szCameraNo, "%03d", pTrain->trainNum.iDataSource + 1); char szNumImgPath[64] = {0}; //车号最优图片路径 if (!pTrain->trainNum.strBestImg.empty()) { sprintf(szNumImgPath, "%03d/%s", pTrain->trainNum.iDataSource + 1, pTrain->trainNum.strBestImg.c_str()); } char szProImgPath[64] = {0}; //属性最优图片路径 if (!pTrain->trainPro.strBestImg.empty()) { sprintf(szProImgPath, "%03d/%s", pTrain->trainPro.iDataSource + 1, pTrain->trainPro.strBestImg.c_str()); } char szChkDateImgPath[64] = {0}; //定检期最优图片路径 if (!pTrain->chkDate.strBestImg.empty()) { sprintf(szChkDateImgPath, "%03d/%s", pTrain->chkDate.iDataSource + 1, pTrain->chkDate.strBestImg.c_str()); } char szContainer1ImgPath[64] = {0}; //集装箱1最优图片路径 if (!pTrain->container1.strBestImg.empty()) { sprintf(szContainer1ImgPath, "%03d/%s", pTrain->container1.iDataSource + 1, pTrain->container1.strBestImg.c_str()); } char szContainer2ImgPath[64] = {0}; //集装箱2最优图片路径 if (!pTrain->container2.strBestImg.empty()) { sprintf(szContainer2ImgPath, "%03d/%s", pTrain->container2.iDataSource + 1, pTrain->container2.strBestImg.c_str()); } outFile << strPoundNo_ << ',' << pTrain->strTrainDate << ',' << strTime << ',' << pTrain->iDirection << ',' << 0.0 << ',' << szCameraNo << ',' << dataSourceConfig.iSkipInterval << ',' << pTrain->iCarXH << ',' << pTrain->trainNum.strTrainType << ',' << pTrain->trainNum.strTrainNum << ',' << pTrain->trainPro.strLoad << ',' << pTrain->trainPro.strSelf << ',' << pTrain->trainPro.strVolume << ',' << pTrain->trainPro.strVolumeSurface << ',' //容量记表 << pTrain->trainPro.strChange << ',' << szNumImgPath << ',' << szProImgPath << ',' << pTrain->iStartFrameId << ',' << pTrain->iEndFrameId << ',' << pTrain->container1.strContainerNo << ',' << pTrain->container2.strContainerNo << ',' << pTrain->chkDate.strChkDate1DeadLine << ',' << szChkDateImgPath << ',' << szContainer1ImgPath << ',' << szContainer2ImgPath << ',' << MyUtils::getins()->Stamp2Time(pTrain->i64StartTimeStamp, true) << ',' << MyUtils::getins()->Stamp2Time(pTrain->i64EndTimeStamp, true) << std::endl; outFile.close(); } catch (const std::exception &) { LogError << "strCsvPath:" << strCsvPath << " train savecsv fail!"; return false; } return true; } /** * 保存车厢的最优结果到CSV中 * inParam : std::shared_ptr pTrain :列车信息 * outParam: * return : true/false */ bool SaveCsvEngine::SaveTrainCsv(std::shared_ptr pTrain) { char szCameraNo[4] = {0}; sprintf(szCameraNo, "%03d", pTrain->iDataSource + 1); std::string strTrainPath = strResultPath_ + pTrain->strTrainDate + "/" + pTrain->strTrainName + "/" + szCameraNo + "/"; //1. 创建保存路径 (固定路径/YYYY-MM-DD/hh-mm-ss/iDataSoure/) if (!MyUtils::getins()->CreateDirPath(strTrainPath)) { LogError << "iCarXH:" << pTrain->iCarXH << " train savecsv err"; return false; } //2. 保存csv std::string strCsvName = pTrain->strTrainDate + pTrain->strTrainName + "_" + szCameraNo + "_train.csv"; strCsvName = MyUtils::getins()->replace_all_distinct(strCsvName, std::string("-"), std::string("")); std::string strCsvPath = strTrainPath + strCsvName; bool bIsExsit = false; if (access(strCsvPath.c_str(), F_OK) != -1) { bIsExsit = true; } try { // 写文件 std::ofstream outFile; outFile.open(strCsvPath, std::ios::app); if (!bIsExsit) { outFile << "car_xh" << ',' << "train_num_name" << ',' << "train_pro_name" << ',' << "t_type" << ',' << "t_num" << ',' << "load" << ',' << "self" << ',' << "volume" << ',' << "change" << ',' << "volumesurface" << ',' << "num_ltx" << ',' << "num_lty" << ',' << "num_rbx" << ',' << "num_rby" << ',' << "pro_ltx" << ',' << "pro_lty" << ',' << "pro_rbx" << ',' << "pro_rby" << ',' << "start_frameId" << ',' << "end_frameId" << ',' << "start_timestamp" << ',' //记录车厢开始,结束帧时间戳,用于比对集装箱合并 << "timestamp" << ',' << "end_timestamp" << ',' << "start_num" << ',' << "num_timestamp" << ',' //记录车号,属性时间戳,用于比对定检期合并 << "end_num" << ',' << "start_pro" << ',' << "pro_timestamp" << ',' << "end_pro" << std::endl; } outFile << pTrain->iCarXH << ',' << pTrain->trainNum.strBestImg << ',' << pTrain->trainPro.strBestImg << ',' << pTrain->trainNum.strTrainType << ',' << pTrain->trainNum.strTrainNum << ',' << pTrain->trainPro.strLoad << ',' << pTrain->trainPro.strSelf << ',' << pTrain->trainPro.strVolume << ',' << pTrain->trainPro.strChange << ',' << pTrain->trainPro.strVolumeSurface << ',' << pTrain->trainNum.step1Location.fLTX << ',' << pTrain->trainNum.step1Location.fLTY << ',' << pTrain->trainNum.step1Location.fRBX << ',' << pTrain->trainNum.step1Location.fRBY << ',' << pTrain->trainPro.step1Location.fLTX << ',' << pTrain->trainPro.step1Location.fLTY << ',' << pTrain->trainPro.step1Location.fRBX << ',' << pTrain->trainPro.step1Location.fRBY << ',' << pTrain->iStartFrameId << ',' << pTrain->iEndFrameId << ',' << pTrain->i64StartTimeStamp << ',' << pTrain->i64TimeStamp << ',' << pTrain->i64EndTimeStamp << ',' << pTrain->trainNum.i64StartTimeStamp << ',' << pTrain->trainNum.i64TimeStamp << ',' << pTrain->trainNum.i64EndTimeStamp << ',' << pTrain->trainPro.i64StartTimeStamp << ',' << pTrain->trainPro.i64TimeStamp << ',' << pTrain->trainPro.i64EndTimeStamp << std::endl; outFile.close(); } catch (const std::exception &) { LogError << "strCsvPath:" << strCsvPath << " train savecsv fail!"; return false; } return true; } /** * 保存定检期的最优结果到CSV中 * inParam : std::shared_ptr pChkDate :定检期信息 * outParam: * return : true/false */ bool SaveCsvEngine::SaveChkDateCsv(std::shared_ptr pChkDate) { if (pChkDate->strBestImg.empty()) { LogDebug << "datetime:" << pChkDate->strTrainDate << " " << pChkDate->strTrainName << " carxh:" << pChkDate->iCarXH << " chkdate empty"; return true; } //1. 创建保存路径 (固定路径/YYYY-MM-DD/hh-mm-ss/iDataSoure/) char szCameraNo[4] = {0}; sprintf(szCameraNo, "%03d", pChkDate->iDataSource + 1); std::string strChkDatePath = strResultPath_ + pChkDate->strTrainDate + "/" + pChkDate->strTrainName + "/" + szCameraNo + "/"; if (!MyUtils::getins()->CreateDirPath(strChkDatePath)) { LogError << "iCarXH:" << pChkDate->iCarXH << "chkdate savecsv err"; return false; } //2. 保存csv std::string strCsvName = pChkDate->strTrainDate + pChkDate->strTrainName + "_" + szCameraNo + "_chkdate.csv"; strCsvName = MyUtils::getins()->replace_all_distinct(strCsvName, std::string("-"), std::string("")); std::string strCsvPath = strChkDatePath + strCsvName; bool bIsExsit = false; if (access(strCsvPath.c_str(), F_OK) != -1) { bIsExsit = true; } try { // 写文件 std::ofstream outFile; outFile.open(strCsvPath, std::ios::app); // 打开模式可省略 if (!bIsExsit) { outFile << "car_xh" << ',' << "chkdate_name" << ',' << "chkdate1" << ',' << "chkdate2" << ',' << "chkdate_ltx" << ',' << "chkdate_lty" << ',' << "chkdate_rbx" << ',' << "chkdate_rby" << ',' << "start_frameId" << ',' << "end_frameId" << ',' << "start_timestamp" << ',' << "end_timestamp" << ',' << "timestamp" << ',' << "chkdate1deadline" << std::endl; } outFile << pChkDate->iCarXH << ',' << pChkDate->strBestImg << ',' << pChkDate->strChkDate1 << ',' << pChkDate->strChkDate2 << ',' << pChkDate->step1Location.fLTX << ',' << pChkDate->step1Location.fLTY << ',' << pChkDate->step1Location.fRBX << ',' << pChkDate->step1Location.fRBY << ',' << pChkDate->iStartFrameId << ',' << pChkDate->iEndFrameId << ',' << pChkDate->i64StartTimeStamp << ',' << pChkDate->i64EndTimeStamp << ',' << pChkDate->i64TimeStamp << ',' << pChkDate->strChkDate1DeadLine << std::endl; outFile.close(); } catch (const std::exception &) { LogError << "strCsvPath:" << strCsvPath << " chkdate savecsv fail!"; return false; } return true; } /** * 保存集装箱的最优结果到CSV中 * inParam : std::shared_ptr pTrainContainer :集装箱信息 * outParam: * return : true/false */ bool SaveCsvEngine::SaveContainerCsv(std::shared_ptr pTrainContainer) { std::vector vecContainer; vecContainer.emplace_back(pTrainContainer->container1); vecContainer.emplace_back(pTrainContainer->container2); for (auto iter = vecContainer.begin(); iter != vecContainer.end(); iter++) { if (iter->strContainerNo.empty()) { continue; } // 1. 创建保存路径 (固定路径/YYYY-MM-DD/hh-mm-ss/iDataSoure/) char szCameraNo[4] = {0}; sprintf(szCameraNo, "%03d", iter->iDataSource + 1); std::string strContainerPath = strResultPath_ + iter->strTrainDate + "/" + iter->strTrainName + "/" + szCameraNo + "/"; if (!MyUtils::getins()->CreateDirPath(strContainerPath)) { LogError << "ContainerNo:" << iter->strContainerNo << " container savecsv err"; continue; } // 2. 保存csv std::string strCsvName = iter->strTrainDate + iter->strTrainName + "_" + szCameraNo + "_container.csv"; strCsvName = MyUtils::getins()->replace_all_distinct(strCsvName, std::string("-"), std::string("")); std::string strCsvPath = strContainerPath + strCsvName; bool bIsExsit = false; if (access(strCsvPath.c_str(), F_OK) != -1) { bIsExsit = true; } try { // 写文件 std::ofstream outFile; outFile.open(strCsvPath, std::ios::app); if (!bIsExsit) { outFile << "datasource" << ',' << "container_name" << ',' << "containerNo" << ',' << "container_ltx" << ',' << "container_lty" << ',' << "container_rbx" << ',' << "container_rby" << ',' << "start_frameId" << ',' << "end_frameId" << ',' << "timestamp" << std::endl; } outFile << iter->iDataSource << ',' << iter->strBestImg << ',' << iter->strContainerNo << ',' << iter->step1Location.fLTX << ',' << iter->step1Location.fLTY << ',' << iter->step1Location.fRBX << ',' << iter->step1Location.fRBY << ',' << iter->iStartFrameId << ',' << iter->iEndFrameId << ',' << iter->i64TimeStamp << std::endl; outFile.close(); } catch (const std::exception &) { LogError << "strCsvPath:" << strCsvPath << " container savecsv fail!"; continue; } } return true; } APP_ERROR SaveCsvEngine::Process() { int iRet = APP_ERR_OK; while (!isStop_) { bool bPopFlag = false; //pop端口0 车厢信息 std::shared_ptr pVoidData0 = nullptr; iRet = inputQueMap_[strPort0_]->pop(pVoidData0); if (nullptr != pVoidData0) { std::shared_ptr pTrain = std::static_pointer_cast(pVoidData0); if (pTrain->bMergerFlag) { SaveMergerCsv(pTrain); } else { SaveTrainCsv(pTrain); } bPopFlag = true; } //pop端口1 定检期信息 if (inputQueMap_.count(strPort1_) > 0) { std::shared_ptr pVoidData1 = nullptr; inputQueMap_[strPort1_]->pop(pVoidData1); if (nullptr != pVoidData1) { std::shared_ptr pChkDate = std::static_pointer_cast(pVoidData1); SaveChkDateCsv(pChkDate); bPopFlag = true; } } //pop端口2 集装箱信息 if (inputQueMap_.count(strPort2_) > 0) { std::shared_ptr pVoidData2 = nullptr; inputQueMap_[strPort2_]->pop(pVoidData2); if (nullptr != pVoidData2) { std::shared_ptr pTrainContainer = std::static_pointer_cast(pVoidData2); SaveContainerCsv(pTrainContainer); bPopFlag = true; } } if (!bPopFlag) { usleep(1000); continue; } } return APP_ERR_OK; } \ No newline at end of file diff --git a/nvidia_ascend_engine/common_engine/SaveEngine/SaveImgEngine.cpp b/nvidia_ascend_engine/common_engine/SaveEngine/SaveImgEngine.cpp index c9361c3..3569b64 100644 --- a/nvidia_ascend_engine/common_engine/SaveEngine/SaveImgEngine.cpp +++ b/nvidia_ascend_engine/common_engine/SaveEngine/SaveImgEngine.cpp @@ -154,6 +154,7 @@ APP_ERROR SaveImgEngine::Process() Json::Value jvFrameInfo; jvFrameInfo["timeStamp"] = pSaveImgData->i64TimeStamp; jvFrameInfo["status"] = iStatus; + jvFrameInfo["moveType"] = pSaveImgData->nMonitorState; jvFrameInfo["direction"] = pSaveImgData->iDirection; jvFrameInfo["width"] = iWidth; jvFrameInfo["height"] = iHeight; diff --git a/nvidia_ascend_engine/common_engine/SaveEngine/SaveStepOneResultEngine.cpp b/nvidia_ascend_engine/common_engine/SaveEngine/SaveStepOneResultEngine.cpp index 69916eb..4c98c1b 100644 --- a/nvidia_ascend_engine/common_engine/SaveEngine/SaveStepOneResultEngine.cpp +++ b/nvidia_ascend_engine/common_engine/SaveEngine/SaveStepOneResultEngine.cpp @@ -301,8 +301,8 @@ void SaveStepOneResultEngine::DealTrainSpaceInfo(std::shared_ptr pP if (!(bDealCenterFlag_ && !bIntervalFlag && (iCenterCur < (pProcessData->iWidth / 3 + 30)))) { vecParationInfo_.push_back(parationInfo); - } - } + } + } } else if (iDirection_ == DIRECTION_RIGHT) { @@ -550,7 +550,7 @@ APP_ERROR SaveStepOneResultEngine::Process() { //车头没有属性,因此车头号也加入到属性中。保证向右行驶属性在前时最后2节的切分。 //车头只加入一次,防止一个车头2个车头号的场景。但有两个车头且没识别车头间隔则无法处理。 - if (!bHaveHeadFlag_) + if (!bHaveHeadFlag_) { bool bIntervalFlag = ((int)(pProcessData->iFrameId - headInfo_.iFrameId) > iSplitSpan_ && headInfo_.iFrameId != 0); @@ -585,7 +585,7 @@ APP_ERROR SaveStepOneResultEngine::Process() } else if (postSubData.iTargetType == CONTAINER) { - jvStep1Container.append(jvInfo); + jvStep1Container.append(jvInfo); } else if (postSubData.iTargetType == SPACE) { diff --git a/nvidia_ascend_engine/common_engine/SocketEngine/SocketEngine.cpp b/nvidia_ascend_engine/common_engine/SocketEngine/SocketEngine.cpp index 9577ba8..92d39f2 100644 --- a/nvidia_ascend_engine/common_engine/SocketEngine/SocketEngine.cpp +++ b/nvidia_ascend_engine/common_engine/SocketEngine/SocketEngine.cpp @@ -1,4 +1,4 @@ -#include "SocketEngine.h" +#include "SocketEngine.h" diff --git a/nvidia_ascend_engine/common_engine/TrainAnaEngine/TrainParationMgr.cpp b/nvidia_ascend_engine/common_engine/TrainAnaEngine/TrainParationMgr.cpp index 3caf1d2..98ba244 100644 --- a/nvidia_ascend_engine/common_engine/TrainAnaEngine/TrainParationMgr.cpp +++ b/nvidia_ascend_engine/common_engine/TrainAnaEngine/TrainParationMgr.cpp @@ -1,256 +1 @@ -#include "TrainParationMgr.h" - -using namespace ai_matrix; - -TrainParationMgr::TrainParationMgr() {} - -TrainParationMgr::~TrainParationMgr() {} - -APP_ERROR TrainParationMgr::Init() -{ - strPort0_ = engineName_ + "_" + std::to_string(engineId_) + "_0"; - strResultPath_ = MyYaml::GetIns()->GetPathValue("gc_result_path"); - //获取主摄像头信息 - mainCfg_ = MyYaml::GetIns()->GetDataSourceConfigById(0); - - nFrameRate = 25; - InitParam(); - nTailPixOffset = getTailPixOffset(); - LogInfo << "TrainParationMgr Init ok"; - return APP_ERR_OK; -} - -APP_ERROR TrainParationMgr::DeInit() -{ - LogInfo << "TrainParationMgr DeInit ok"; - return APP_ERR_OK; -} - -int TrainParationMgr::getTailPixOffset() -{ - LogInfo << "TrainParationMgr getTailPixOffset start"; - // 单位计算 - // 帧宽像素/米 - float fframewidth_meter = ((METHOD_BASE_WIDTH * 1.0) / (TRAIN_IN_CAMERA_WIDTH)); - // 车尾车钩像素位置 - float fOffsetPosistion = TRAIN_WIDTH * fframewidth_meter; - int nretOffset = (int)fOffsetPosistion; - return nretOffset; -} - -/** -* 参数初始化(列车结束时需调用) -* inParam : N/A -* outParam: N/A -* return : N/A -*/ -void TrainParationMgr::InitParam() -{ - -} - -/** -* 计算车钩移动的像素值 -* inParam : 行车速度(单位:米/秒) -* inParam : 宽度 -* inParam : 相机帧率(单位:帧/秒) -* outParam: N/A -* return : 间隔帧 -*/ -int TrainParationMgr::getCouplerOffsetPosition(float fspeed, int nframeindex) -{ - LogInfo << "TrainAnaEngine getCouplerOffsetPosition start"; - //单位换算 - // 米/秒 -> 米/帧 - // 米/帧 = 米/秒 * 秒/帧(即:1/帧率) - float fmeter_frame = fspeed / nFrameRate; - // 米/帧 -> 像素/帧 - // 像素/帧 = 米/帧 * 像素/米 - float fpix_frame = fmeter_frame * (METHOD_BASE_WIDTH / TRAIN_IN_CAMERA_WIDTH); - - int nretPixOffet = (int)fpix_frame; - nretPixOffet = nretPixOffet * nframeindex; - LogInfo << "TrainAnaEngine getCouplerOffsetPosition nretPixOffet:" << nretPixOffet; - LogInfo << "TrainAnaEngine getCouplerOffsetPosition end"; - return nretPixOffet; -} - -/** -* 计算车钩移动的像素值 -* inParam : 行车速度(单位:米/秒) -* inParam : 宽度 -* inParam : 相机帧率(单位:帧/秒) -* outParam: N/A -* return : 间隔帧 -*/ -int TrainParationMgr::getCouplerOffsetPix(float fspeed, int noffsetPix) -{ - LogInfo << "TrainAnaEngine getCouplerOffsetPix start"; - LogInfo << "TrainAnaEngine getCouplerOffsetPix fspeed:" << fspeed; -// LogInfo << "TrainAnaEngine getCouplerOffsetPix start:" << nframeindex; - //单位换算 - // 米/秒 -> 米/帧 - // 米/帧 = 米/秒 * 秒/帧(即:1/帧率) - float fmeter_frame = fspeed / nFrameRate; - LogInfo << "TrainAnaEngine getCouplerOffsetPix fmeter_frame:" << fmeter_frame; - // 米/帧 -> 像素/帧 - // 像素/帧 = 米/帧 * 像素/米 - float fpix_frame = fmeter_frame * (METHOD_BASE_WIDTH / TRAIN_IN_CAMERA_WIDTH); - LogInfo << "TrainAnaEngine getCouplerOffsetPix fpix_frame:" << fpix_frame; - - int nretPixOffet = (int)fpix_frame; - nretPixOffet = (noffsetPix - (METHOD_BASE_WIDTH / 2)) / nretPixOffet; - LogInfo << "TrainAnaEngine getCouplerOffsetPix nretPixOffet:" << nretPixOffet; - LogInfo << "TrainAnaEngine getCouplerOffsetPix end"; - return nretPixOffet; -} - -/** -* 计算车钩从中间到边缘的间隔帧 -* inParam : 行车速度(单位:米/秒) -* inParam : 宽度 -* inParam : 相机帧率(单位:帧/秒) -* outParam: N/A -* return : 间隔帧 -*/ -int TrainParationMgr::getOffsetFrame(float fspeed, int width, int nFrameRate) -{ - LogInfo << "TrainAnaEngine getOffsetFrame start"; - LogInfo << "TrainAnaEngine getOffsetFrame fspeed:" << fspeed; - LogInfo << "TrainAnaEngine getOffsetFrame width:" << width; - LogInfo << "TrainAnaEngine getOffsetFrame nFrameRate:" << nFrameRate; - //LogInfo << "TrainAnaEngine getOffsetFrame nLatestFrame:" << nLatestFrame; - //偏移值 = (中间到边缘的宽度(米) / 速度(米/秒)->时间(秒))* 帧率(帧/秒) - float ftmp = width * (float) nFrameRate; - LogInfo << "TrainAnaEngine getOffsetFrame start end:" << ftmp; - ftmp = ftmp / fspeed; - LogInfo << "TrainAnaEngine getOffsetFrame start end:" << ftmp; - int nRet = (int) ftmp; - LogInfo << "TrainAnaEngine getOffsetFrame start end:" << nRet; - return nRet; -} - - - -APP_ERROR TrainParationMgr::Process() -{ - int iRet = APP_ERR_OK; - while (!isStop_) - { - std::shared_ptr pVoidData0 = nullptr; - inputQueMap_[strPort0_]->pop(pVoidData0); - if (nullptr == pVoidData0) - { - usleep(1000); //1ms - continue; - } - - std::shared_ptr pPartionInfo = std::static_pointer_cast(pVoidData0); - - int nSize = lstPartInfo.size(); - int nPartionIndex = nSize - 1; - - //当然车厢通过的数量 - if (nSize == 0) { - PartionInfo stTempInfo; - stTempInfo.endframe = pPartionInfo->modelSpaceFrame; - stTempInfo.i64EndTimeStamp = pPartionInfo->i64EndTimeStamp; - stTempInfo.nindex = 1; - //第一节车厢开始帧为跳帧数,开始帧时间设置为来车时间 - stTempInfo.startframe = mainCfg_.iSkipInterval; - std::string strTemp = pPartionInfo->strTrainDate + " " + pPartionInfo->strTrainName; - stTempInfo.i64StartTimeStamp = MyUtils::getins()->GetParamTimeMilliSeconds(strTemp); - stTempInfo.fspeed = TRAIN_DEFAULT_SPEED; - stTempInfo.fLTX = (abs(pPartionInfo->fLTX - pPartionInfo->fRBX) / 2) + pPartionInfo->fLTX; - lstPartInfo.push_back(stTempInfo); - //lstPartInfo.push_back(stTempInfo); - nPartionIndex++; - } - lstPartInfo[nPartionIndex].i64EndTimeStamp = pPartionInfo->i64EndTimeStamp; - lstPartInfo[nPartionIndex].endframe = pPartionInfo->modelSpaceFrame; - // 根据开始帧时间戳和结束帧时间错 计算当节车厢的行车速度 - // 根据时间戳计算时间差 - - - float nTimePassed = (abs(lstPartInfo[nPartionIndex].i64EndTimeStamp - lstPartInfo[nPartionIndex].i64StartTimeStamp)) * 1.0; - //防止停车导致速度过小 - if(pPartionInfo->nStatus != TRAIN_PAUSE && nTimePassed <= 50000) { - lstPartInfo[nPartionIndex].fspeed = (TRAIN_WIDTH * 1000.0) /nTimePassed; - } else { - if (nPartionIndex >= 1){ - lstPartInfo[nPartionIndex].fspeed = lstPartInfo[nPartionIndex - 1].fspeed / 3; - } else { - lstPartInfo[nPartionIndex].fspeed = TRAIN_DEFAULT_SPEED / 10; - } - } - - // - //nSamePartionIgnoreCount = (nTimePassed / (3 * 5000)) * nFrameRate; - // 结束帧为当前帧再往后 (除以2的原因:中间为车钩,车钩后的车体宽度为整个镜头的宽度除以2) - lstPartInfo[nPartionIndex].bmodelconfirmed = true; - - /// write json info to file - - //先读取文本内容,追加新的信息后再写入 - //划分信息 JSON格式 - Json::Value jvPartionInfo; - //JSON保存路径 - std::string strFilePath; - - //检测到车厢划分信息 - strFilePath = strResultPath_ + pPartionInfo->strTrainDate + "/" + pPartionInfo->strTrainName + "/" - + std::to_string(nPartionIndex + 1) + ".txt"; - - LogInfo << "TrainAnaEngine Process lstPartInfo[nPartionIndex].startframe:" << lstPartInfo[nPartionIndex].startframe ; - LogInfo << "TrainAnaEngine Process lstPartInfo[nPartionIndex].endframe:" << lstPartInfo[nPartionIndex].endframe; - - PartionInfo stTempInfo; - // 开始记录新的一节车厢信息(从索引变成序号+1 ,新增一节车厢信息+1) - stTempInfo.nindex = nPartionIndex + 2; - // 上一节车厢的结束帧 - (偏移帧 = (镜头内的车体宽度/ (速度) -> 通过时间) * 帧/秒 ) 作为下一节车厢的开始帧 - int ntempOffsetFrame = lstPartInfo[nPartionIndex].endframe; - stTempInfo.startframe = ntempOffsetFrame; - stTempInfo.i64StartTimeStamp = pPartionInfo->i64EndTimeStamp; - // 初始化下一节的结束帧 - //stTempInfo.endframe = 0; - - lstPartInfo.push_back(stTempInfo); - - // 记录过车日期 - jvPartionInfo["trainDate"] = pPartionInfo->strTrainDate; - // 记录过车时间 - jvPartionInfo["trainName"] = pPartionInfo->strTrainName; - // 记录车厢节数 (索引从0开始 所以这里+1) - jvPartionInfo["trainNo"] = nPartionIndex + 1; - // 记录行车开始帧 - jvPartionInfo["startFrameId"] = lstPartInfo[nPartionIndex].startframe; - jvPartionInfo["startTimeStamp"] = lstPartInfo[nPartionIndex].i64StartTimeStamp; - // 记录行车结束帧 - jvPartionInfo["endFrameId"] = lstPartInfo[nPartionIndex].endframe; - jvPartionInfo["endTimeStamp"] = lstPartInfo[nPartionIndex].i64EndTimeStamp; - // 记录车厢是否完全通过 - jvPartionInfo["isEnd"] = pPartionInfo->bIsEnd; - - //是否是间隔模型切分的车厢 - jvPartionInfo["modelconfirmed"] = pPartionInfo->bmodelconfirmed; - - // 记录当前车厢的信息到JSON文件 - MyUtils::getins()->WriteJsonInfo(jvPartionInfo, strFilePath); - std::shared_ptr pTrainRange = std::make_shared(); - pTrainRange->strTrainDate = jvPartionInfo["trainDate"].asString(); - pTrainRange->strTrainName = jvPartionInfo["trainName"].asString(); - pTrainRange->iTrainIndex = jvPartionInfo["trainNo"].asInt(); - pTrainRange->iStartFrameId = jvPartionInfo["startFrameId"].asInt(); - pTrainRange->i64StartTimeStamp = jvPartionInfo["startTimeStamp"].asInt64(); - pTrainRange->iEndFrameId = jvPartionInfo["endFrameId"].asInt(); - pTrainRange->i64EndTimeStamp = jvPartionInfo["endTimeStamp"].asInt64(); - pTrainRange->bIsEnd = jvPartionInfo["isEnd"].asBool(); - pTrainRange->bmodelconfirmed = jvPartionInfo["modelconfirmed"].asBool(); - iRet = outputQueMap_[strPort0_]->push(std::static_pointer_cast(pTrainRange)); - - if (pPartionInfo->bIsEnd) { - lstPartInfo.clear(); - } - } - return APP_ERR_OK; -} +#include "TrainParationMgr.h" using namespace ai_matrix; TrainParationMgr::TrainParationMgr() {} TrainParationMgr::~TrainParationMgr() {} APP_ERROR TrainParationMgr::Init() { strPort0_ = engineName_ + "_" + std::to_string(engineId_) + "_0"; strResultPath_ = MyYaml::GetIns()->GetPathValue("gc_result_path"); //获取主摄像头信息 mainCfg_ = MyYaml::GetIns()->GetDataSourceConfigById(0); nFrameRate = 25; InitParam(); nTailPixOffset = getTailPixOffset(); LogInfo << "TrainParationMgr Init ok"; return APP_ERR_OK; } APP_ERROR TrainParationMgr::DeInit() { LogInfo << "TrainParationMgr DeInit ok"; return APP_ERR_OK; } int TrainParationMgr::getTailPixOffset() { LogInfo << "TrainParationMgr getTailPixOffset start"; // 单位计算 // 帧宽像素/米 float fframewidth_meter = ((METHOD_BASE_WIDTH * 1.0) / (TRAIN_IN_CAMERA_WIDTH)); // 车尾车钩像素位置 float fOffsetPosistion = TRAIN_WIDTH * fframewidth_meter; int nretOffset = (int)fOffsetPosistion; return nretOffset; } /** * 参数初始化(列车结束时需调用) * inParam : N/A * outParam: N/A * return : N/A */ void TrainParationMgr::InitParam() { } /** * 计算车钩移动的像素值 * inParam : 行车速度(单位:米/秒) * inParam : 宽度 * inParam : 相机帧率(单位:帧/秒) * outParam: N/A * return : 间隔帧 */ int TrainParationMgr::getCouplerOffsetPosition(float fspeed, int nframeindex) { LogInfo << "TrainAnaEngine getCouplerOffsetPosition start"; //单位换算 // 米/秒 -> 米/帧 // 米/帧 = 米/秒 * 秒/帧(即:1/帧率) float fmeter_frame = fspeed / nFrameRate; // 米/帧 -> 像素/帧 // 像素/帧 = 米/帧 * 像素/米 float fpix_frame = fmeter_frame * (METHOD_BASE_WIDTH / TRAIN_IN_CAMERA_WIDTH); int nretPixOffet = (int)fpix_frame; nretPixOffet = nretPixOffet * nframeindex; LogInfo << "TrainAnaEngine getCouplerOffsetPosition nretPixOffet:" << nretPixOffet; LogInfo << "TrainAnaEngine getCouplerOffsetPosition end"; return nretPixOffet; } /** * 计算车钩移动的像素值 * inParam : 行车速度(单位:米/秒) * inParam : 宽度 * inParam : 相机帧率(单位:帧/秒) * outParam: N/A * return : 间隔帧 */ int TrainParationMgr::getCouplerOffsetPix(float fspeed, int noffsetPix) { LogInfo << "TrainAnaEngine getCouplerOffsetPix start"; LogInfo << "TrainAnaEngine getCouplerOffsetPix fspeed:" << fspeed; // LogInfo << "TrainAnaEngine getCouplerOffsetPix start:" << nframeindex; //单位换算 // 米/秒 -> 米/帧 // 米/帧 = 米/秒 * 秒/帧(即:1/帧率) float fmeter_frame = fspeed / nFrameRate; LogInfo << "TrainAnaEngine getCouplerOffsetPix fmeter_frame:" << fmeter_frame; // 米/帧 -> 像素/帧 // 像素/帧 = 米/帧 * 像素/米 float fpix_frame = fmeter_frame * (METHOD_BASE_WIDTH / TRAIN_IN_CAMERA_WIDTH); LogInfo << "TrainAnaEngine getCouplerOffsetPix fpix_frame:" << fpix_frame; int nretPixOffet = (int)fpix_frame; nretPixOffet = (noffsetPix - (METHOD_BASE_WIDTH / 2)) / nretPixOffet; LogInfo << "TrainAnaEngine getCouplerOffsetPix nretPixOffet:" << nretPixOffet; LogInfo << "TrainAnaEngine getCouplerOffsetPix end"; return nretPixOffet; } /** * 计算车钩从中间到边缘的间隔帧 * inParam : 行车速度(单位:米/秒) * inParam : 宽度 * inParam : 相机帧率(单位:帧/秒) * outParam: N/A * return : 间隔帧 */ int TrainParationMgr::getOffsetFrame(float fspeed, int width, int nFrameRate) { LogInfo << "TrainAnaEngine getOffsetFrame start"; LogInfo << "TrainAnaEngine getOffsetFrame fspeed:" << fspeed; LogInfo << "TrainAnaEngine getOffsetFrame width:" << width; LogInfo << "TrainAnaEngine getOffsetFrame nFrameRate:" << nFrameRate; //LogInfo << "TrainAnaEngine getOffsetFrame nLatestFrame:" << nLatestFrame; //偏移值 = (中间到边缘的宽度(米) / 速度(米/秒)->时间(秒))* 帧率(帧/秒) float ftmp = width * (float) nFrameRate; LogInfo << "TrainAnaEngine getOffsetFrame start end:" << ftmp; ftmp = ftmp / fspeed; LogInfo << "TrainAnaEngine getOffsetFrame start end:" << ftmp; int nRet = (int) ftmp; LogInfo << "TrainAnaEngine getOffsetFrame start end:" << nRet; return nRet; } APP_ERROR TrainParationMgr::Process() { int iRet = APP_ERR_OK; while (!isStop_) { std::shared_ptr pVoidData0 = nullptr; inputQueMap_[strPort0_]->pop(pVoidData0); if (nullptr == pVoidData0) { usleep(1000); //1ms continue; } std::shared_ptr pPartionInfo = std::static_pointer_cast(pVoidData0); int nSize = lstPartInfo.size(); int nPartionIndex = nSize - 1; //当然车厢通过的数量 if (nSize == 0) { PartionInfo stTempInfo; stTempInfo.endframe = pPartionInfo->modelSpaceFrame; stTempInfo.i64EndTimeStamp = pPartionInfo->i64EndTimeStamp; stTempInfo.nindex = 1; //第一节车厢开始帧为跳帧数,开始帧时间设置为来车时间 stTempInfo.startframe = mainCfg_.iSkipInterval; std::string strTemp = pPartionInfo->strTrainDate + " " + pPartionInfo->strTrainName; stTempInfo.i64StartTimeStamp = MyUtils::getins()->GetParamTimeMilliSeconds(strTemp); stTempInfo.fspeed = TRAIN_DEFAULT_SPEED; stTempInfo.fLTX = (abs(pPartionInfo->fLTX - pPartionInfo->fRBX) / 2) + pPartionInfo->fLTX; lstPartInfo.push_back(stTempInfo); //lstPartInfo.push_back(stTempInfo); nPartionIndex++; } lstPartInfo[nPartionIndex].i64EndTimeStamp = pPartionInfo->i64EndTimeStamp; lstPartInfo[nPartionIndex].endframe = pPartionInfo->modelSpaceFrame; // 根据开始帧时间戳和结束帧时间错 计算当节车厢的行车速度 // 根据时间戳计算时间差 float nTimePassed = (abs(lstPartInfo[nPartionIndex].i64EndTimeStamp - lstPartInfo[nPartionIndex].i64StartTimeStamp)) * 1.0; //防止停车导致速度过小 if(pPartionInfo->nStatus != TRAIN_PAUSE && nTimePassed <= 50000) { lstPartInfo[nPartionIndex].fspeed = (TRAIN_WIDTH * 1000.0) /nTimePassed; } else { if (nPartionIndex >= 1){ lstPartInfo[nPartionIndex].fspeed = lstPartInfo[nPartionIndex - 1].fspeed / 3; } else { lstPartInfo[nPartionIndex].fspeed = TRAIN_DEFAULT_SPEED / 10; } } // //nSamePartionIgnoreCount = (nTimePassed / (3 * 5000)) * nFrameRate; // 结束帧为当前帧再往后 (除以2的原因:中间为车钩,车钩后的车体宽度为整个镜头的宽度除以2) lstPartInfo[nPartionIndex].bmodelconfirmed = true; /// write json info to file //先读取文本内容,追加新的信息后再写入 //划分信息 JSON格式 Json::Value jvPartionInfo; //JSON保存路径 std::string strFilePath; //检测到车厢划分信息 strFilePath = strResultPath_ + pPartionInfo->strTrainDate + "/" + pPartionInfo->strTrainName + "/" + std::to_string(nPartionIndex + 1) + ".txt"; LogInfo << "TrainAnaEngine Process lstPartInfo[nPartionIndex].startframe:" << lstPartInfo[nPartionIndex].startframe ; LogInfo << "TrainAnaEngine Process lstPartInfo[nPartionIndex].endframe:" << lstPartInfo[nPartionIndex].endframe; PartionInfo stTempInfo; // 开始记录新的一节车厢信息(从索引变成序号+1 ,新增一节车厢信息+1) stTempInfo.nindex = nPartionIndex + 2; // 上一节车厢的结束帧 - (偏移帧 = (镜头内的车体宽度/ (速度) -> 通过时间) * 帧/秒 ) 作为下一节车厢的开始帧 int ntempOffsetFrame = lstPartInfo[nPartionIndex].endframe; stTempInfo.startframe = ntempOffsetFrame; stTempInfo.i64StartTimeStamp = pPartionInfo->i64EndTimeStamp; // 初始化下一节的结束帧 //stTempInfo.endframe = 0; lstPartInfo.push_back(stTempInfo); // 记录过车日期 jvPartionInfo["trainDate"] = pPartionInfo->strTrainDate; // 记录过车时间 jvPartionInfo["trainName"] = pPartionInfo->strTrainName; // 记录车厢节数 (索引从0开始 所以这里+1) jvPartionInfo["trainNo"] = nPartionIndex + 1; // 记录行车开始帧 jvPartionInfo["startFrameId"] = lstPartInfo[nPartionIndex].startframe; jvPartionInfo["startTimeStamp"] = lstPartInfo[nPartionIndex].i64StartTimeStamp; // 记录行车结束帧 jvPartionInfo["endFrameId"] = lstPartInfo[nPartionIndex].endframe; jvPartionInfo["endTimeStamp"] = lstPartInfo[nPartionIndex].i64EndTimeStamp; // 记录车厢是否完全通过 jvPartionInfo["isEnd"] = pPartionInfo->bIsEnd; //是否是间隔模型切分的车厢 jvPartionInfo["modelconfirmed"] = pPartionInfo->bmodelconfirmed; // 记录当前车厢的信息到JSON文件 MyUtils::getins()->WriteJsonInfo(jvPartionInfo, strFilePath); std::shared_ptr pTrainRange = std::make_shared(); pTrainRange->strTrainDate = jvPartionInfo["trainDate"].asString(); pTrainRange->strTrainName = jvPartionInfo["trainName"].asString(); pTrainRange->iTrainIndex = jvPartionInfo["trainNo"].asInt(); pTrainRange->iStartFrameId = jvPartionInfo["startFrameId"].asInt(); pTrainRange->i64StartTimeStamp = jvPartionInfo["startTimeStamp"].asInt64(); pTrainRange->iEndFrameId = jvPartionInfo["endFrameId"].asInt(); pTrainRange->i64EndTimeStamp = jvPartionInfo["endTimeStamp"].asInt64(); pTrainRange->bIsEnd = jvPartionInfo["isEnd"].asBool(); pTrainRange->bmodelconfirmed = jvPartionInfo["modelconfirmed"].asBool(); iRet = outputQueMap_[strPort0_]->push(std::static_pointer_cast(pTrainRange)); if (pPartionInfo->bIsEnd) { lstPartInfo.clear(); } } return APP_ERR_OK; } \ No newline at end of file diff --git a/nvidia_ascend_engine/nvidia_engine/DecodeEngine/HardH264FFmpegDecode.cpp b/nvidia_ascend_engine/nvidia_engine/DecodeEngine/HardH264FFmpegDecode.cpp index ac45241..b01d69a 100644 --- a/nvidia_ascend_engine/nvidia_engine/DecodeEngine/HardH264FFmpegDecode.cpp +++ b/nvidia_ascend_engine/nvidia_engine/DecodeEngine/HardH264FFmpegDecode.cpp @@ -2,54 +2,54 @@ using namespace std; -HardH264FFmpegDecode::HardH264FFmpegDecode() +HardH264FFmpegDecode::HardH264FFmpegDecode() { - ; + ; } -HardH264FFmpegDecode::~HardH264FFmpegDecode() +HardH264FFmpegDecode::~HardH264FFmpegDecode() { - ; + ; } int HardH264FFmpegDecode::HardH264FFmpegDecoderInit(unsigned int uiWidth, unsigned int uiHeight, unsigned int uiFrameRate) { - uiWidth_ = uiWidth; uiHeight_ = uiHeight; - uiFrameRate_ = uiFrameRate; - iFrameFinished_ = 0; + uiWidth_ = uiWidth; uiHeight_ = uiHeight; + uiFrameRate_ = uiFrameRate; + iFrameFinished_ = 0; - av_log_set_level(AV_LOG_ERROR); + av_log_set_level(AV_LOG_ERROR); + + // AVCodecID codec_id = AV_CODEC_ID_H264; //解码H264 + // pCodec_ = avcodec_find_decoder(codec_id); //获取解码器 - // AVCodecID codec_id = AV_CODEC_ID_H264; //解码H264 - // pCodec_ = avcodec_find_decoder(codec_id); //获取解码器 - - pCodec_ = avcodec_find_decoder_by_name(NVIDIA_H264_DECODER); + pCodec_ = avcodec_find_decoder_by_name(NVIDIA_H264_DECODER); if (!pCodec_) { fprintf(stderr, "Codec '%s' not found\n", pCodec_->long_name); exit(1); } - printf("Codec found with name %d(%s)\n", pCodec_->id, pCodec_->long_name); + printf("Codec found with name %d(%s)\n", pCodec_->id, pCodec_->long_name); - //创建上下文 - pCodecCtx_ = avcodec_alloc_context3(pCodec_); + //创建上下文 + pCodecCtx_ = avcodec_alloc_context3(pCodec_); if (!pCodecCtx_){ fprintf(stderr, "Could not allocate video codec context\n"); exit(1); } - - //创建解析器 - pCodecParserCtx_ = av_parser_init(pCodec_->id); - if (!pCodecParserCtx_){ + + //创建解析器 + pCodecParserCtx_ = av_parser_init(pCodec_->id); + if (!pCodecParserCtx_){ fprintf(stderr, "parser not found\n"); exit(1); - } - + } + //if(pCodec_->capabilities&CODEC_CAP_TRUNCATED) - // pCodecCtx_->flags|= CODEC_FLAG_TRUNCATED; - - //打开解码器 + // pCodecCtx_->flags|= CODEC_FLAG_TRUNCATED; + + //打开解码器 int ret = avcodec_open2(pCodecCtx_, pCodec_, nullptr); - if (ret < 0) { + if (ret < 0) { fprintf(stderr, "Could not open codec\n"); printf("avcodec_open2 ret is: %d\n",ret); exit(1); @@ -63,7 +63,7 @@ int HardH264FFmpegDecode::HardH264FFmpegDecoderInit(unsigned int uiWidth, unsign } // av_init_packet(pPacket_); - //分配frame + //分配frame pSrcFrame_ = av_frame_alloc(); if (!pSrcFrame_) { fprintf(stderr, "Could not allocate video src pFrame\n"); @@ -78,14 +78,14 @@ int HardH264FFmpegDecode::HardH264FFmpegDecoderInit(unsigned int uiWidth, unsign printf("after align down video_width: %d, video_height: %d\n", uiWidth_, uiHeight_); - //初始化解析器参数 - pCodecCtx_->time_base.num = 1; - pCodecCtx_->frame_number = 1; //每包一个视频帧 - pCodecCtx_->codec_type = AVMEDIA_TYPE_VIDEO; - pCodecCtx_->bit_rate = 0; - pCodecCtx_->time_base.den = uiFrameRate_;//帧率 - pCodecCtx_->width = uiWidth_; //视频宽 - pCodecCtx_->height = uiHeight_; //视频高 + //初始化解析器参数 + pCodecCtx_->time_base.num = 1; + pCodecCtx_->frame_number = 1; //每包一个视频帧 + pCodecCtx_->codec_type = AVMEDIA_TYPE_VIDEO; + pCodecCtx_->bit_rate = 0; + pCodecCtx_->time_base.den = uiFrameRate_;//帧率 + pCodecCtx_->width = uiWidth_; //视频宽 + pCodecCtx_->height = uiHeight_; //视频高 // pCodecCtx_->pix_fmt = AV_PIX_FMT_YUV420P; int bufferSize = av_image_get_buffer_size(AV_PIX_FMT_YUV420P, @@ -102,46 +102,46 @@ int HardH264FFmpegDecode::HardH264FFmpegDecoderInit(unsigned int uiWidth, unsign printf("pDstFrame_->linesize: %d, bufferSize: %d\n", pDstFrame_->linesize, bufferSize); pSwsContext_ = sws_getContext(pCodecCtx_->width, pCodecCtx_->height, pCodecCtx_->pix_fmt, - pCodecCtx_->width, pCodecCtx_->height, AV_PIX_FMT_YUV420P, SWS_BICUBIC, nullptr, nullptr, nullptr); + pCodecCtx_->width, pCodecCtx_->height, AV_PIX_FMT_YUV420P, SWS_BICUBIC, nullptr, nullptr, nullptr); printf("pCodecCtx_->width: %d, pCodecCtx_->height: %d, pCodecCtx_->pix_fmt: %d\n", pCodecCtx_->width, pCodecCtx_->height, pCodecCtx_->pix_fmt); - return 0; + return 0; } int HardH264FFmpegDecode::HardH264FFmpegDecoderDeInit() { - if(pu8OutBuffer_){ + if(pu8OutBuffer_){ av_free(pu8OutBuffer_); pu8OutBuffer_ = nullptr; } - if(pSrcFrame_){ - av_frame_free(&pSrcFrame_); - pSrcFrame_ = nullptr; - } - if(pDstFrame_){ - av_frame_free(&pDstFrame_); - pDstFrame_ = nullptr; - } - if(pPacket_){ - av_packet_free(&pPacket_); + if(pSrcFrame_){ + av_frame_free(&pSrcFrame_); + pSrcFrame_ = nullptr; + } + if(pDstFrame_){ + av_frame_free(&pDstFrame_); + pDstFrame_ = nullptr; + } + if(pPacket_){ + av_packet_free(&pPacket_); pPacket_ = nullptr; } - if(pCodecParserCtx_){ - av_parser_close(pCodecParserCtx_); - pCodecParserCtx_ = nullptr; - } - if(pCodecCtx_){ - avcodec_close(pCodecCtx_); - av_free(pCodecCtx_); - pCodecCtx_ = nullptr; - } + if(pCodecParserCtx_){ + av_parser_close(pCodecParserCtx_); + pCodecParserCtx_ = nullptr; + } + if(pCodecCtx_){ + avcodec_close(pCodecCtx_); + av_free(pCodecCtx_); + pCodecCtx_ = nullptr; + } - if(pSwsContext_){ - sws_freeContext(pSwsContext_); - pSwsContext_ = nullptr; - } + if(pSwsContext_){ + sws_freeContext(pSwsContext_); + pSwsContext_ = nullptr; + } } int HardH264FFmpegDecode::HardH264FFmpegDecoderFilterGraph(AVFilterGraph *pGraph, AVFilterContext *pSourceCtx, AVFilterContext *pSinkCtx) @@ -149,7 +149,7 @@ int HardH264FFmpegDecode::HardH264FFmpegDecoderFilterGraph(AVFilterGraph *pGraph int ret; AVFilterInOut *pOutputs = nullptr, *pInputs = nullptr; if ((ret = avfilter_link(pSourceCtx, 0, pSinkCtx, 0)) >= 0){ - ret = avfilter_graph_config(pGraph, nullptr); + ret = avfilter_graph_config(pGraph, nullptr); } avfilter_inout_free(&pOutputs); @@ -168,14 +168,14 @@ int HardH264FFmpegDecode::HardH264FFmpegDecoderConfigureVideoFilters(AVFilterGra "video_size=%dx%d:pix_fmt=%d:time_base=1/1200000", iWidth, iHeight, iFormat); if ((ret = avfilter_graph_create_filter(&pFiltSrc, - avfilter_get_by_name("buffer"), "ffplay_buffer", BufferSrcArgs, - nullptr, pGraph)) < 0){ + avfilter_get_by_name("buffer"), "ffplay_buffer", BufferSrcArgs, + nullptr, pGraph)) < 0){ goto fail; } ret = avfilter_graph_create_filter(&pFiltDst, - avfilter_get_by_name("buffersink"), - "ffplay_buffersink", nullptr, nullptr, pGraph); + avfilter_get_by_name("buffersink"), + "ffplay_buffersink", nullptr, nullptr, pGraph); if (ret < 0){ goto fail; } @@ -190,14 +190,14 @@ int HardH264FFmpegDecode::HardH264FFmpegDecoderConfigureVideoFilters(AVFilterGra pDecoderFilterIn = pFiltSrc; pDecoderFilterOut = pFiltDst; - fail: +fail: return ret; } int HardH264FFmpegDecode::HardH264FFmpegDecoder(AVCodecContext *pDecCtx, AVFrame *pFrame, AVPacket *pPkt, void* pOutputData, unsigned int* puiOutputDataSize) { - int ret; - AVFilterGraph* pDecoderGraph = nullptr; + int ret; + AVFilterGraph* pDecoderGraph = nullptr; ret = avcodec_send_packet(pDecCtx, pPkt); //接收packet解码 if (ret < 0) { @@ -208,7 +208,7 @@ int HardH264FFmpegDecode::HardH264FFmpegDecoder(AVCodecContext *pDecCtx, AVFrame while (ret >= 0) { ret = avcodec_receive_frame(pDecCtx, pFrame); //解码 if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF){ - fprintf(stderr, "During decoding eof\n"); + fprintf(stderr, "During decoding eof\n"); return -1; } else if (ret < 0) { @@ -219,35 +219,35 @@ int HardH264FFmpegDecode::HardH264FFmpegDecoder(AVCodecContext *pDecCtx, AVFrame //printf("saving frame %3d\n", pDecCtx->frame_number); fflush(stdout); - AVFilterContext *pDecoderFilterIn = nullptr, *pDecoderFilterOut = nullptr; + AVFilterContext *pDecoderFilterIn = nullptr, *pDecoderFilterOut = nullptr; // pFrame->width = ALIGN_DOWN(pFrame->width, 32); // pFrame->height = ALIGN_DOWN(pFrame->height, 32); // printf("pFrame->width: %d\tpFrame->height: %d\n", pFrame->width, pFrame->height); - + pDecoderGraph = avfilter_graph_alloc(); HardH264FFmpegDecoderConfigureVideoFilters(pDecoderGraph, pDecoderFilterIn, pDecoderFilterOut, pFrame->width, pFrame->height, pFrame->format); - if (pFrame->format != AV_PIX_FMT_YUV420P){ + if (pFrame->format != AV_PIX_FMT_YUV420P){ DUMP_FRAME(pFrame); - ret = av_buffersrc_add_frame(pDecoderFilterIn, pFrame); + ret = av_buffersrc_add_frame(pDecoderFilterIn, pFrame); ret = av_buffersink_get_frame_flags(pDecoderFilterOut, pFrame, 0); DUMP_FRAME(pFrame); - - int iSize = pFrame->width * pFrame->height; - memcpy(pOutputData, pFrame->data[0], iSize); //Y - memcpy(pOutputData+iSize, pFrame->data[1], iSize/4); //U - memcpy(pOutputData+iSize+iSize/4, pFrame->data[2], iSize/4); //V - *puiOutputDataSize = iSize*3/2; - return iSize*3/2; - } - } - return 0; + + int iSize = pFrame->width * pFrame->height; + memcpy(pOutputData, pFrame->data[0], iSize); //Y + memcpy(pOutputData+iSize, pFrame->data[1], iSize/4); //U + memcpy(pOutputData+iSize+iSize/4, pFrame->data[2], iSize/4); //V + *puiOutputDataSize = iSize*3/2; + return iSize*3/2; + } + } + return 0; } int HardH264FFmpegDecode::HardH264FFmpegDecoderV2(AVCodecContext *pDecCtx, SwsContext *pSwsCtx, AVFrame *pSrcFrame, AVFrame *pDstFrame, AVPacket *pPkt, void* pOutputData,unsigned int* puiOutputDataSize) { - int ret; + int ret; ret = avcodec_send_packet(pDecCtx, pPkt); //接收packet解码 if (ret < 0) { @@ -258,7 +258,7 @@ int HardH264FFmpegDecode::HardH264FFmpegDecoderV2(AVCodecContext *pDecCtx, SwsCo while (ret >= 0) { ret = avcodec_receive_frame(pDecCtx, pSrcFrame); //解码 if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF){ - fprintf(stderr, "During decoding eof\n"); + fprintf(stderr, "During decoding eof\n"); return -1; } else if (ret < 0) { @@ -266,7 +266,7 @@ int HardH264FFmpegDecode::HardH264FFmpegDecoderV2(AVCodecContext *pDecCtx, SwsCo exit(1); } - // pDecCtx->width = ALIGN_DOWN(pDecCtx->width, 32); + // pDecCtx->width = ALIGN_DOWN(pDecCtx->width, 32); // pDecCtx->height = ALIGN_DOWN(pDecCtx->height, 32); sws_scale(pSwsCtx, @@ -280,13 +280,13 @@ int HardH264FFmpegDecode::HardH264FFmpegDecoderV2(AVCodecContext *pDecCtx, SwsCo //printf("saving frame %3d\n", pDecCtx->frame_number); fflush(stdout); - int iSize = pDecCtx->width * pDecCtx->height; + int iSize = pDecCtx->width * pDecCtx->height; - memcpy(pOutputData, pDstFrame->data[0], iSize); //Y - memcpy(pOutputData+iSize, pDstFrame->data[1], iSize/4); //U - memcpy(pOutputData+iSize+iSize/4, pDstFrame->data[2], iSize/4); //V - *puiOutputDataSize = iSize*3/2; - return iSize*3/2; - } - return 0; + memcpy(pOutputData, pDstFrame->data[0], iSize); //Y + memcpy(pOutputData+iSize, pDstFrame->data[1], iSize/4); //U + memcpy(pOutputData+iSize+iSize/4, pDstFrame->data[2], iSize/4); //V + *puiOutputDataSize = iSize*3/2; + return iSize*3/2; + } + return 0; } \ No newline at end of file diff --git a/nvidia_ascend_engine/nvidia_engine/DecodeEngine/HardH264FFmpegDecode.h b/nvidia_ascend_engine/nvidia_engine/DecodeEngine/HardH264FFmpegDecode.h index 7f27ac9..39f5085 100644 --- a/nvidia_ascend_engine/nvidia_engine/DecodeEngine/HardH264FFmpegDecode.h +++ b/nvidia_ascend_engine/nvidia_engine/DecodeEngine/HardH264FFmpegDecode.h @@ -56,7 +56,7 @@ extern "C" frame->linesize[2] \ );} -#define NVIDIA_H264_DECODER "h264_cuvid" +#define NVIDIA_H264_DECODER "h264_cuvid" // #define NVIDIA_H264_DECODER "h264_v4l2m2m" class HardH264FFmpegDecode @@ -69,21 +69,21 @@ public: int HardH264FFmpegDecoderDeInit(); int HardH264FFmpegDecoder(AVCodecContext *pDecCtx, AVFrame *pFrame, AVPacket *pPkt, void* pOutputData, unsigned int* puiOutputDataSize); int HardH264FFmpegDecoderV2(AVCodecContext *pDecCtx, SwsContext *pSwsCtx, AVFrame *pSrcFrame, AVFrame *pDstFrame, AVPacket *pPkt, void* pOutputData, unsigned int* puiOutputDataSize); - + const AVCodec *pCodec_ = nullptr; //解码器 AVCodecContext *pCodecCtx_ = nullptr; //上下文 - AVCodecParserContext *pCodecParserCtx_ = nullptr; //解析器上下文 - AVFrame *pSrcFrame_ = nullptr; - AVFrame *pDstFrame_ = nullptr; - AVPacket *pPacket_ = nullptr; - SwsContext *pSwsContext_ = nullptr; + AVCodecParserContext *pCodecParserCtx_ = nullptr; //解析器上下文 + AVFrame *pSrcFrame_ = nullptr; + AVFrame *pDstFrame_ = nullptr; + AVPacket *pPacket_ = nullptr; + SwsContext *pSwsContext_ = nullptr; - uint8_t *pu8OutBuffer_ = nullptr; + uint8_t *pu8OutBuffer_ = nullptr; private: int HardH264FFmpegDecoderFilterGraph(AVFilterGraph *pGraph, AVFilterContext *pSourceCtx, AVFilterContext *pSinkCtx); int HardH264FFmpegDecoderConfigureVideoFilters(AVFilterGraph *pGraph, AVFilterContext* &pDecoderFilterIn, AVFilterContext* &pDecoderFilterOut, const int iWidth, const int iHeight, const int iFormat); - + unsigned int uiWidth_, uiHeight_; int iFrameFinished_; diff --git a/nvidia_ascend_engine/nvidia_engine/DecodeEngine/VideoDecodeEngine.cpp b/nvidia_ascend_engine/nvidia_engine/DecodeEngine/VideoDecodeEngine.cpp index d2fe2b2..33dd5d6 100644 --- a/nvidia_ascend_engine/nvidia_engine/DecodeEngine/VideoDecodeEngine.cpp +++ b/nvidia_ascend_engine/nvidia_engine/DecodeEngine/VideoDecodeEngine.cpp @@ -1,165 +1 @@ -/** - * 视频流解码引擎 - * */ - -#include "VideoDecodeEngine.h" - -using namespace std; -using namespace cv; -using namespace ai_matrix; - -VideoDecodeEngine::VideoDecodeEngine() {} - -VideoDecodeEngine::~VideoDecodeEngine() {} - -APP_ERROR VideoDecodeEngine::Init() -{ - bUseEngine_ = true; - dataSourceConfig_ = MyYaml::GetIns()->GetDataSourceConfigById(engineId_); // 获取摄像机参数 - - strPort0_ = engineName_ + "_" + std::to_string(engineId_) + "_0"; - if (MyYaml::GetIns()->GetStringValue("gc_data_source") != "camera" || !dataSourceConfig_.bUse) - { - bUseEngine_ = false; - LogWarn << "engineId_:" << engineId_ << " not use engine"; - return APP_ERR_OK; - } - - LogInfo << "VideoDecodeEngine Init ok"; - return APP_ERR_OK; -} - -APP_ERROR VideoDecodeEngine::DeInit() -{ - if (!bUseEngine_) - { - LogWarn << "engineId_:" << engineId_ << " not use engine"; - return APP_ERR_OK; - } - - if (hard_h264_ffmpeg_decoder_ != nullptr) - { - hard_h264_ffmpeg_decoder_->HardH264FFmpegDecoderDeInit(); - delete hard_h264_ffmpeg_decoder_; - hard_h264_ffmpeg_decoder_ = nullptr; - } - LogInfo << "VideoDecodeEngine DeInit ok"; - return APP_ERR_OK; -} - -APP_ERROR VideoDecodeEngine::Process() -{ - if (!bUseEngine_) - { - LogWarn << "engineId_:" << engineId_ << " not use engine"; - return APP_ERR_OK; - } - - int iRet = APP_ERR_OK; - int iSkipCount = 1; - int iNoCameraDataCnt = 0; - while (!isStop_) - { - //从上一引擎接收图像数据 - std::shared_ptr pVoidData0 = nullptr; - inputQueMap_[strPort0_]->pop(pVoidData0); - if (nullptr == pVoidData0) - { - usleep(10*1000); //10ms - - // iNoCameraDataCnt++; - // if (iNoCameraDataCnt >= 1000) //10秒内收不到,认为相机断开 - // { - // LogError << "engineId:" << engineId_ << " 超过10秒获取到摄像头数据,疑似摄像头断开。计数:" << iNoCameraDataCnt; - // iNoCameraDataCnt = 0; - // //camera异常时,构造空的解码数据push,确保一直有数据流转到后面Engine - // std::shared_ptr pProcessData = std::make_shared(); - // pProcessData->iDataSource = engineId_; - // pProcessData->i64TimeStamp = MyUtils::getins()->GetCurrentTimeMillis(); - // pProcessData->iSize = 0; - // pProcessData->pData = nullptr; - // iRet = outputQueMap_[strPort0_]->push(std::static_pointer_cast(pProcessData)); - // } - - continue; - } - - iNoCameraDataCnt = 0; - std::shared_ptr pProcessData = std::static_pointer_cast(pVoidData0); - - //创建解码类 - if (hard_h264_ffmpeg_decoder_ == nullptr) - { - hard_h264_ffmpeg_decoder_ = new HardH264FFmpegDecode; - int iRet = hard_h264_ffmpeg_decoder_->HardH264FFmpegDecoderInit(pProcessData->iWidth, pProcessData->iHeight, pProcessData->iRate); - if (iRet != 0) - { - LogError << "engineId:" << engineId_ << " HardH264FFmpegDecoderInit Failed"; - if (hard_h264_ffmpeg_decoder_ != nullptr) - { - delete hard_h264_ffmpeg_decoder_; - hard_h264_ffmpeg_decoder_ = nullptr; - } - continue; - } - } - - //构造YUV420M数据 - unsigned int pYUV420MBuffer_Size = pProcessData->iWidth * pProcessData->iHeight * 3 / 2; - void *pYUV420MBuffer = nullptr; - pYUV420MBuffer = new uint8_t[pYUV420MBuffer_Size]; - std::shared_ptr pYUVData; - pYUVData.reset(pYUV420MBuffer, [](void *data){if(data) {delete[] data; data = nullptr;}}); //智能指针管理内存 - - hard_h264_ffmpeg_decoder_->pPacket_->data = static_cast(pProcessData->pData.get()); //这里填入一个指向完整H264数据帧的指针 - hard_h264_ffmpeg_decoder_->pPacket_->size = pProcessData->iSize; //这个填入H264数据帧的大小 - - // H264硬件解码 - // int iDecodeRet= hard_h264_ffmpeg_decoder_->HardH264FFmpegDecoderV2(hard_h264_ffmpeg_decoder_->pCodecCtx_, hard_h264_ffmpeg_decoder_->pFrame_, - // hard_h264_ffmpeg_decoder_->pPacket_, pYUV420MBuffer, &pYUV420MBuffer_Size); - - int iDecodeRet = hard_h264_ffmpeg_decoder_->HardH264FFmpegDecoderV2(hard_h264_ffmpeg_decoder_->pCodecCtx_, - hard_h264_ffmpeg_decoder_->pSwsContext_, - hard_h264_ffmpeg_decoder_->pSrcFrame_, - hard_h264_ffmpeg_decoder_->pDstFrame_, - hard_h264_ffmpeg_decoder_->pPacket_, - pYUV420MBuffer, - &pYUV420MBuffer_Size); - if (iDecodeRet > 0) - { - if (iSkipCount++ % dataSourceConfig_.iSkipInterval != 0) - { - continue; - } - iSkipCount = 1; - - //硬解码YUV转BGR - cv::Mat matYUV(pProcessData->iHeight * 3 / 2, pProcessData->iWidth, CV_8UC1); - memcpy(matYUV.data, static_cast(pYUVData.get()), pYUV420MBuffer_Size); - - cv::Mat matBGR(pProcessData->iHeight, pProcessData->iWidth, CV_8UC3); - cv::cvtColor(matYUV, matBGR, cv::COLOR_YUV2BGR_I420); - - cv::resize(matBGR, matBGR, cv::Size(IMAGE_WIDTH, IMAGE_HEIGHT)); - unsigned int iResizeSize = IMAGE_WIDTH * IMAGE_HEIGHT * 3; - void *pResizeBGRBuffer = nullptr; - pResizeBGRBuffer = new uint8_t[iResizeSize]; - memcpy(pResizeBGRBuffer, matBGR.data, iResizeSize); - pProcessData->pData.reset(pResizeBGRBuffer, [](void *data) {if(data) {delete[] data; data = nullptr;} }); - pProcessData->iSize = iResizeSize; - pProcessData->iWidth = IMAGE_WIDTH; - pProcessData->iHeight = IMAGE_HEIGHT; - - iRet = outputQueMap_[strPort0_]->push(std::static_pointer_cast(pProcessData)); - if (iRet != APP_ERR_OK) - { - LogError << "push the after hard h264 decode yuv420m frame data failed..."; - } - } - else - { - LogError << "engineId:" << engineId_ << " HardH264FFmpegDecoderV2 failed...iDecodeRet:" << iDecodeRet; - } - } -} - +/** * 视频流解码引擎 * */ #include "VideoDecodeEngine.h" using namespace std; using namespace cv; using namespace ai_matrix; VideoDecodeEngine::VideoDecodeEngine() {} VideoDecodeEngine::~VideoDecodeEngine() {} APP_ERROR VideoDecodeEngine::Init() { bUseEngine_ = true; dataSourceConfig_ = MyYaml::GetIns()->GetDataSourceConfigById(engineId_); // 获取摄像机参数 strPort0_ = engineName_ + "_" + std::to_string(engineId_) + "_0"; if (MyYaml::GetIns()->GetStringValue("gc_data_source") != "camera" || !dataSourceConfig_.bUse) { bUseEngine_ = false; LogWarn << "engineId_:" << engineId_ << " not use engine"; return APP_ERR_OK; } LogInfo << "VideoDecodeEngine Init ok"; return APP_ERR_OK; } APP_ERROR VideoDecodeEngine::DeInit() { if (!bUseEngine_) { LogWarn << "engineId_:" << engineId_ << " not use engine"; return APP_ERR_OK; } if (hard_h264_ffmpeg_decoder_ != nullptr) { hard_h264_ffmpeg_decoder_->HardH264FFmpegDecoderDeInit(); delete hard_h264_ffmpeg_decoder_; hard_h264_ffmpeg_decoder_ = nullptr; } LogInfo << "VideoDecodeEngine DeInit ok"; return APP_ERR_OK; } APP_ERROR VideoDecodeEngine::Process() { if (!bUseEngine_) { LogWarn << "engineId_:" << engineId_ << " not use engine"; return APP_ERR_OK; } int iRet = APP_ERR_OK; int iSkipCount = 1; int iNoCameraDataCnt = 0; while (!isStop_) { //从上一引擎接收图像数据 std::shared_ptr pVoidData0 = nullptr; inputQueMap_[strPort0_]->pop(pVoidData0); if (nullptr == pVoidData0) { usleep(10*1000); //10ms // iNoCameraDataCnt++; // if (iNoCameraDataCnt >= 1000) //10秒内收不到,认为相机断开 // { // LogError << "engineId:" << engineId_ << " 超过10秒获取到摄像头数据,疑似摄像头断开。计数:" << iNoCameraDataCnt; // iNoCameraDataCnt = 0; // //camera异常时,构造空的解码数据push,确保一直有数据流转到后面Engine // std::shared_ptr pProcessData = std::make_shared(); // pProcessData->iDataSource = engineId_; // pProcessData->i64TimeStamp = MyUtils::getins()->GetCurrentTimeMillis(); // pProcessData->iSize = 0; // pProcessData->pData = nullptr; // iRet = outputQueMap_[strPort0_]->push(std::static_pointer_cast(pProcessData)); // } continue; } iNoCameraDataCnt = 0; std::shared_ptr pProcessData = std::static_pointer_cast(pVoidData0); //创建解码类 if (hard_h264_ffmpeg_decoder_ == nullptr) { hard_h264_ffmpeg_decoder_ = new HardH264FFmpegDecode; int iRet = hard_h264_ffmpeg_decoder_->HardH264FFmpegDecoderInit(pProcessData->iWidth, pProcessData->iHeight, pProcessData->iRate); if (iRet != 0) { LogError << "engineId:" << engineId_ << " HardH264FFmpegDecoderInit Failed"; if (hard_h264_ffmpeg_decoder_ != nullptr) { delete hard_h264_ffmpeg_decoder_; hard_h264_ffmpeg_decoder_ = nullptr; } continue; } } //构造YUV420M数据 unsigned int pYUV420MBuffer_Size = pProcessData->iWidth * pProcessData->iHeight * 3 / 2; void *pYUV420MBuffer = nullptr; pYUV420MBuffer = new uint8_t[pYUV420MBuffer_Size]; std::shared_ptr pYUVData; pYUVData.reset(pYUV420MBuffer, [](void *data){if(data) {delete[] data; data = nullptr;}}); //智能指针管理内存 hard_h264_ffmpeg_decoder_->pPacket_->data = static_cast(pProcessData->pData.get()); //这里填入一个指向完整H264数据帧的指针 hard_h264_ffmpeg_decoder_->pPacket_->size = pProcessData->iSize; //这个填入H264数据帧的大小 // H264硬件解码 // int iDecodeRet= hard_h264_ffmpeg_decoder_->HardH264FFmpegDecoderV2(hard_h264_ffmpeg_decoder_->pCodecCtx_, hard_h264_ffmpeg_decoder_->pFrame_, // hard_h264_ffmpeg_decoder_->pPacket_, pYUV420MBuffer, &pYUV420MBuffer_Size); int iDecodeRet = hard_h264_ffmpeg_decoder_->HardH264FFmpegDecoderV2(hard_h264_ffmpeg_decoder_->pCodecCtx_, hard_h264_ffmpeg_decoder_->pSwsContext_, hard_h264_ffmpeg_decoder_->pSrcFrame_, hard_h264_ffmpeg_decoder_->pDstFrame_, hard_h264_ffmpeg_decoder_->pPacket_, pYUV420MBuffer, &pYUV420MBuffer_Size); if (iDecodeRet > 0) { if (iSkipCount++ % dataSourceConfig_.iSkipInterval != 0) { continue; } iSkipCount = 1; //硬解码YUV转BGR cv::Mat matYUV(pProcessData->iHeight * 3 / 2, pProcessData->iWidth, CV_8UC1); memcpy(matYUV.data, static_cast(pYUVData.get()), pYUV420MBuffer_Size); cv::Mat matBGR(pProcessData->iHeight, pProcessData->iWidth, CV_8UC3); cv::cvtColor(matYUV, matBGR, cv::COLOR_YUV2BGR_I420); cv::resize(matBGR, matBGR, cv::Size(IMAGE_WIDTH, IMAGE_HEIGHT)); unsigned int iResizeSize = IMAGE_WIDTH * IMAGE_HEIGHT * 3; void *pResizeBGRBuffer = nullptr; pResizeBGRBuffer = new uint8_t[iResizeSize]; memcpy(pResizeBGRBuffer, matBGR.data, iResizeSize); pProcessData->pData.reset(pResizeBGRBuffer, [](void *data) {if(data) {delete[] data; data = nullptr;} }); pProcessData->iSize = iResizeSize; pProcessData->iWidth = IMAGE_WIDTH; pProcessData->iHeight = IMAGE_HEIGHT; iRet = outputQueMap_[strPort0_]->push(std::static_pointer_cast(pProcessData)); if (iRet != APP_ERR_OK) { LogError << "push the after hard h264 decode yuv420m frame data failed..."; } } else { LogError << "engineId:" << engineId_ << " HardH264FFmpegDecoderV2 failed...iDecodeRet:" << iDecodeRet; } } } \ No newline at end of file diff --git a/nvidia_ascend_engine/nvidia_engine/MoveEngine/MoveEngine.cpp b/nvidia_ascend_engine/nvidia_engine/MoveEngine/MoveEngine.cpp index 8aa1571..1fb6f43 100644 --- a/nvidia_ascend_engine/nvidia_engine/MoveEngine/MoveEngine.cpp +++ b/nvidia_ascend_engine/nvidia_engine/MoveEngine/MoveEngine.cpp @@ -211,6 +211,7 @@ void MoveEngine::SingleDeviceProcess(std::shared_ptr pProcessData, pSaveImgData->bIsEnd = pProcessData->bIsEnd; pSaveImgData->bSaveToFtp = true; pSaveImgData->i64TimeStamp = pProcessData->i64TimeStamp; + pSaveImgData->nMonitorState = nType; outputQueMap_[strPort0_]->push(std::static_pointer_cast(pSaveImgData)); }