更新部分日志,以及倒车处理的逻辑

This commit is contained in:
Mr.V 2024-02-29 16:16:47 +08:00
parent f330b88229
commit f2bcc2e472
20 changed files with 336 additions and 659 deletions

View File

@ -653,7 +653,7 @@ namespace ai_matrix
}
if (!ifs.is_open())
{
LogWarn << "txt:" << strFilePath << " open fail";
// LogWarn << "txt:" << strFilePath << " open fail";
return false;
}

BIN
app/train Normal file

Binary file not shown.

View File

@ -190,8 +190,8 @@ void DataDealEngine::MakeProcessData()
iFrameId = iReRunFrameId;
}
LogInfo << "sourceid:" << iSourceId << " MakeProcessData origtime:" << moveData_.strTrainName << " iOrigFrameId:" << iOrigFrameId
<< " time:" << strTrainName_ << " iFrameId:" << iFrameId << " bIsEndFlag:" << bIsEndFlag;
// LogInfo << "sourceid:" << iSourceId << " MakeProcessData origtime:" << moveData_.strTrainName << " iOrigFrameId:" << iOrigFrameId
// << " time:" << strTrainName_ << " iFrameId:" << iFrameId << " bIsEndFlag:" << bIsEndFlag;
std::string strImgName = strDataDir_ + szCameraNo + std::to_string(iOrigFrameId);
strImgName += (iter->second.iRotate != 0) ? "_rotate.jpg" : ".jpg";

View File

@ -239,8 +239,8 @@ void DataDealTwoEngine::GetMainSplitInfo(Json::Value &jvMainSplit, std::shared_p
{
iValidType = pProcessData->iDirection == DIRECTION_LEFT ? VALID_LEFT : VALID_RIGHT;
}
LogDebug << "sourceid:" << pProcessData->iDataSource << " frameid:" << pProcessData->iFrameId
<< " trainIndex:" << pProcessData->iTrainIndex << " iSpaceType_:" << iSpaceType_
LogDebug << " frameid:" << pProcessData->iFrameId
<< " 车节:" << pProcessData->iTrainIndex << " iSpaceType_:" << iSpaceType_
<< " iSpaceX:" << iSpaceX << " iLastSpaceX_:" << iLastSpaceX_
<< " iLastSpaceFrameid_:" << iLastSpaceFrameid_ << " bIntervalFlag:" << bIntervalFlag;
iLastSpaceX_ = iSpaceX;
@ -352,11 +352,11 @@ void DataDealTwoEngine::GetValidTypeAndSplit(Json::Value &jvOneSplit, Json::Valu
GetSubSplitInfoByMain(jvOneSplit, pProcessData, jvFrameInfo);
}
LogDebug << "sourceid:" << pProcessData->iDataSource << " frameid:" << pProcessData->iFrameId
<< " trainIndex:" << pProcessData->iTrainIndex
<< " validType:" << jvOneSplit["validType"].asInt() << " splitX:" << jvOneSplit["splitX"].asInt()
<< " needNum:" << jvOneSplit["needNum"].asBool() << " needPro:" << jvOneSplit["needPro"].asBool()
<< " needChkDate:" << jvOneSplit["needChkDate"].asBool() << " needContainer:" << jvOneSplit["needContainer"].asBool();
// LogDebug << "sourceid:" << pProcessData->iDataSource << " frameid:" << pProcessData->iFrameId
// << " trainIndex:" << pProcessData->iTrainIndex
// << " validType:" << jvOneSplit["validType"].asInt() << " splitX:" << jvOneSplit["splitX"].asInt()
// << " needNum:" << jvOneSplit["needNum"].asBool() << " needPro:" << jvOneSplit["needPro"].asBool()
// << " needChkDate:" << jvOneSplit["needChkDate"].asBool() << " needContainer:" << jvOneSplit["needContainer"].asBool();
}
/**
@ -456,8 +456,8 @@ void DataDealTwoEngine::MakeProcessData(std::shared_ptr<TrainRange> pTrainRange)
sprintf(szCameraNo, "%03d/", iSourceId + 1);
bool bIsEndFlag = (pTrainRange->iEndFrameId == iFrameId);
LogInfo << "sourceid:" << iSourceId << " StepTwo MakeProcessData trainIndex:" << pTrainRange->iTrainIndex
<< " iFrameId:" << iFrameId << " bIsEndFlag:" << bIsEndFlag;
// LogInfo << "sourceid:" << iSourceId << " StepTwo MakeProcessData trainIndex:" << pTrainRange->iTrainIndex
// << " iFrameId:" << iFrameId << " bIsEndFlag:" << bIsEndFlag;
std::string strImgName = strDataDir + szCameraNo + std::to_string(iFrameId);
strImgName += (iter->second.iRotate != 0) ? "_rotate.jpg" : ".jpg";
std::string strFileName = strDataDir + szCameraNo + std::to_string(iFrameId) + ".txt";

View File

@ -7,7 +7,7 @@ namespace
{
const int LOW_THRESHOLD = 128;
const int MAX_THRESHOLD = 4096;
const uint16_t DELAY_TIME = 40000;
const uint16_t DELAY_TIME = 10000;
}
CameraEngine::CameraEngine() {}

View File

@ -276,8 +276,8 @@ void ResultToHttpSrvEngine::DealHttpFailInfo()
if (!ResultToHttpSrv(jvRequest))
{
LogError << "re http post err:" << strLine;
SaveHttpFailInfo(jvRequest, strFailSaveBakPath_);
bAllSucc = false;
//SaveHttpFailInfo(jvRequest, strFailSaveBakPath_);
// bAllSucc = false;
continue;
}
}
@ -429,7 +429,7 @@ APP_ERROR ResultToHttpSrvEngine::Process()
jvRequest["skipFrame"] = dataSourceConfig.iSkipInterval; //跳帧
if (!ResultToHttpSrv(jvRequest))
{
SaveHttpFailInfo(jvRequest, strFailSavePath_);
// SaveHttpFailInfo(jvRequest, strFailSavePath_);
}
//列车结束后再次处理失败的信息

View File

@ -0,0 +1,81 @@
#include "DeleteOldDataEngine.h"
using namespace ai_matrix;
DeleteOldDataEngine::DeleteOldDataEngine() {}
DeleteOldDataEngine::~DeleteOldDataEngine() {}
APP_ERROR DeleteOldDataEngine::Init()
{
bUseEngine_ = true;
// bHwDecode_ = MyYaml::GetIns()->GetBoolValue("gc_hardware_decode");//硬解码
this->outTimeDay_ = 10;
LogInfo << "DeleteOldDataEngine Init ok";
return APP_ERR_OK;
}
APP_ERROR DeleteOldDataEngine::DeInit()
{
if (!bUseEngine_)
{
LogWarn << "engineId_:" << engineId_ << " not use engine";
return APP_ERR_OK;
}
LogInfo << "DeleteOldDataEngine DeInit ok";
return APP_ERR_OK;
}
//void rm_dir( const char *path )
//{
// DIR *dir;
// struct dirent *dirp;
// struct stat buf;
// char *p = getcwd( NULL, 0 );
// if ( (dir = opendir( path ) ) == NULL )
// error_quit( "OpenDir" );
// change_path( path );
// while ( dirp = readdir( dir ) )
// {
// if ( (strcmp( dirp->d_name, "." ) == 0) || (strcmp( dirp->d_name, ".." ) == 0) )
// continue;
// if ( stat( dirp->d_name, &buf ) == -1 )
// error_quit( "stat" );
// if ( S_ISDIR( buf.st_mode ) )
// {
// rm_dir( dirp->d_name );
// /*if(rmdir(dirp->d_name)==-1)
// * error_quit("rmdir");
// * printf("rm %s Successed . . .\n",dirp->d_name);*/
// continue;
// }
// if ( remove( dirp->d_name ) == -1 )
// error_quit( "remove" );
// printf( "rm %s Successed . . .\n", dirp->d_name );
// }
// closedir( dir );
// change_path( p );
// if ( rmdir( path ) == -1 )
// error_quit( "rmdir" );
// printf( "rm %s Successed . . .\n", path );
//}
APP_ERROR DeleteOldDataEngine::Process()
{
if (!bUseEngine_)
{
LogWarn << "engineId_:" << engineId_ << " not use engine";
return APP_ERR_OK;
}
int iRet = APP_ERR_OK;
while (!isStop_)
{
"find 目录地址 -type f -ctime +10 | xargs rm -f"
}
return APP_ERR_OK;
}

View File

@ -0,0 +1,32 @@
/**
* Engine
* */
#ifndef DELETEOLDDATAENGINE_H
#define DELETEOLDDATAENGINE_H
#include "AppCommon.h"
#include "MyYaml.h"
#include "myutils.h"
#include "EngineBase.h"
#include "EngineFactory.h"
class DeleteOldDataEngine : public ai_matrix::EngineBase
{
public:
DeleteOldDataEngine();
~DeleteOldDataEngine();
APP_ERROR Init() override;
APP_ERROR DeInit() override;
APP_ERROR Process() override;
private:
bool bUseEngine_;
int outTimeDay_;
};
ENGINE_REGIST(DeleteOldDataEngine)
#endif

View File

@ -55,8 +55,8 @@ APP_ERROR FilterTrainStepOneEngine::Init()
mapTargetStr_.insert(std::make_pair(NUM, "NUM"));
mapTargetStr_.insert(std::make_pair(PRO, "PRO"));
mapTargetStr_.insert(std::make_pair(HEAD, "HEAD"));
mapTargetStr_.insert(std::make_pair(SPACE, "SPACE"));
mapTargetStr_.insert(std::make_pair(TRAINSPACE, "SPACE"));
mapTargetStr_.insert(std::make_pair(SPACE, "SPACE"));//SPACE
mapTargetStr_.insert(std::make_pair(TRAINSPACE, "SPACE"));//SPACE
InitParam();
LogInfo << "FilterTrainStepOneEngine Init ok";
@ -166,11 +166,12 @@ void FilterTrainStepOneEngine::AddBackInfo(std::shared_ptr<ProcessData> pProcess
}
strAllClassType += mapTargetStr_[pPostData->vecPostSubData[i].iTargetType];
}
LogDebug << "frameId:" << pProcessData->iFrameId << " addbackinfo strAllClassType:" << strAllClassType;
if (strAllClassType.empty())
{
return;
}
LogDebug << "frameId:" << pProcessData->iFrameId << " addbackinfo strAllClassType:" << strAllClassType;
TrainBackInfo trainBackInfo;
trainBackInfo.processData = pProcessData;
@ -200,16 +201,19 @@ bool FilterTrainStepOneEngine::IsEndDealBackInfo(std::shared_ptr<ProcessData> pP
return true;
}
bool bPopFlag = false;
std::shared_ptr<PostData> pPostData = std::static_pointer_cast<PostData>(pProcessData->pVoidData);
if (pPostData->vecPostSubData.size() == 0) return false;
/*
Engine不处理
poppop后
poppop后
()
*/
std::shared_ptr<PostData> pPostData = std::static_pointer_cast<PostData>(pProcessData->pVoidData);
std::sort(pPostData->vecPostSubData.begin(), pPostData->vecPostSubData.end(), CompareX);
std::string strAllClassType;
@ -226,48 +230,59 @@ bool FilterTrainStepOneEngine::IsEndDealBackInfo(std::shared_ptr<ProcessData> pP
return false;
}
TrainBackInfo trainBackInfoTop = stackBackInfo_.top();
bool bPopFlag = false;
if (trainBackInfoTop.strAllClassType != strAllClassType)
{
bPopFlag = true;
}
LogDebug << "frameId:" << pProcessData->iFrameId << " stacksize:" << stackBackInfo_.size()
<< " topClassType:" << trainBackInfoTop.strAllClassType << " dealbackinfo strAllClassType:" << strAllClassType
<< " bPopFlag:" << bPopFlag;
if(bPopFlag)
{
stackBackInfo_.pop();
bPopFlag = false;
}
if (stackBackInfo_.size() == 1)
{
if (!bPopFlag)
{
TrainBackInfo trainBackInfoLast = stackBackInfo_.top();
std::shared_ptr<PostData> pPostDataBack = std::static_pointer_cast<PostData>(trainBackInfoLast.processData->pVoidData);
std::sort(pPostDataBack->vecPostSubData.begin(), pPostDataBack->vecPostSubData.end(), CompareX);
TrainBackInfo trainBackInfoLast = stackBackInfo_.top();
std::shared_ptr<PostData> pPostDataBack = std::static_pointer_cast<PostData>(trainBackInfoLast.processData->pVoidData);
std::sort(pPostDataBack->vecPostSubData.begin(), pPostDataBack->vecPostSubData.end(), CompareX);
for (size_t i = 0; i < pPostDataBack->vecPostSubData.size(); i++)
for (size_t i = 0; i < pPostDataBack->vecPostSubData.size(); i++)
{
bool bFlag = (pPostDataBack->vecPostSubData[i].step1Location.fLTX <= pPostData->vecPostSubData[i].step1Location.fLTX);
LogDebug << "帧:" << pProcessData->iFrameId << " 倒车前帧:" << pPostDataBack->iFrameId << " 恢复到原位:" << bFlag
<< " 当前框位置:" << pPostData->vecPostSubData[i].step1Location.fLTX
<< " 倒车前位置:" << pPostDataBack->vecPostSubData[i].step1Location.fLTX;
if ((iDirection_ == DIRECTION_LEFT && !bFlag) ||
(iDirection_ == DIRECTION_RIGHT && bFlag))
{
bool bFlag = (pPostDataBack->vecPostSubData[i].step1Location.fLTX <= pPostData->vecPostSubData[i].step1Location.fLTX);
LogDebug << "frameId:" << pProcessData->iFrameId << " stackFrameid:" << pPostDataBack->iFrameId << " bFlag:" << bFlag;
if ((iDirection_ == DIRECTION_LEFT && !bFlag) ||
(iDirection_ == DIRECTION_RIGHT && bFlag))
{
bPopFlag = true;
break;
}
bPopFlag = true;
break;
}
if (bPopFlag)
{
LogDebug << "frameId:" << pProcessData->iFrameId << " last one bPopFlag:" << bPopFlag;
stackBackInfo_.pop();
}
}
}
if (bPopFlag)
{
LogDebug << "frameId:" << pProcessData->iFrameId << " last one bPopFlag:" << bPopFlag;
stackBackInfo_.pop();
}
}
else
{
TrainBackInfo trainBackInfoTop_bak = stackBackInfo_.top();
stackBackInfo_.pop();
TrainBackInfo trainBackInfoTop = stackBackInfo_.top();
if (trainBackInfoTop.strAllClassType != strAllClassType)
{
stackBackInfo_.push(trainBackInfoTop_bak);
LogDebug << "帧:" << pProcessData->iFrameId << " 倒车信息:" << stackBackInfo_.size()
<< " 顶部倒车信息:" << trainBackInfoTop.strAllClassType << " 本次识别信息:" << strAllClassType;
}
else
{
// bPopFlag = true;
LogDebug << "帧:" << pProcessData->iFrameId << " 倒车信息:" << stackBackInfo_.size()
<< " 顶部倒车信息:" << trainBackInfoTop.strAllClassType << " 本次识别信息:" << strAllClassType
<< " 删除倒车信息:" << trainBackInfoTop_bak.strAllClassType;
}
// if(bPopFlag)
// {
// stackBackInfo_.pop();
// bPopFlag = false;
// }
}
return stackBackInfo_.empty() ? true : false;
}
@ -281,25 +296,23 @@ int FilterTrainStepOneEngine::GetTrainStatus(std::shared_ptr<ProcessData> pProce
{
if (iDirection_ == DIRECTION_UNKNOWN)
{
LogDebug << "sourceid:" << pProcessData->iDataSource << " frameId:" << pProcessData->iFrameId << " direction unknown trainStatus=1";
LogDebug << " frameId:" << pProcessData->iFrameId << " 未判断出行车方向,暂定认为火车正常行驶中";
return TRAINSTATUS_RUN;
}
std::shared_ptr<PostData> pPostData = std::static_pointer_cast<PostData>(pProcessData->pVoidData);
pPostData->iFrameId = pProcessData->iFrameId;
quePostData_.push(*pPostData.get());
// 1. 无框时,返回之前的列车状态
if (pPostData->vecPostSubData.size() == 0)
{
LogDebug << "sourceid:" << pProcessData->iDataSource << " frameId:" << pProcessData->iFrameId << " step1 no result trainStatus="<< iTrainStatus_;
quePostData_.pop();
return iTrainStatus_;
}
quePostData_.push(*pPostData.get());
if (quePostData_.size() < 3)
{
LogDebug << "sourceid:" << pProcessData->iDataSource << " frameId:" << pProcessData->iFrameId << " size < 3 trainStatus=1";
return TRAINSTATUS_RUN;
}
@ -310,7 +323,7 @@ int FilterTrainStepOneEngine::GetTrainStatus(std::shared_ptr<ProcessData> pProce
{
quePostData_.pop();
}
LogDebug << "queue front frameId:" << postDataFront.iFrameId << " queuesize:" << quePostData_.size() << " iSizeTemp:" << iSizeTemp;
LogDebug << "frameId:" << pProcessData->iFrameId << " 判断运动状态队列 第一帧:" << postDataFront.iFrameId << " 队列size:" << quePostData_.size() << " iSizeTemp:" << iSizeTemp;
bool bSameFlag = false;
int iDiffValue = iChkStopPX_;
@ -326,6 +339,7 @@ int FilterTrainStepOneEngine::GetTrainStatus(std::shared_ptr<ProcessData> pProce
*/
if (postSubDataFront.iTargetType != postSubDataBack.iTargetType)
{
LogDebug << "判断前后帧识别的是否一致 上一个:" << postSubDataFront.iTargetType << " 当前:" << postSubDataBack.iTargetType;
continue;
}
@ -340,7 +354,7 @@ int FilterTrainStepOneEngine::GetTrainStatus(std::shared_ptr<ProcessData> pProce
//位置比较大于10个像素则表示有移动。再判断时正向移动还是倒车
LogDebug << "frameId:" << pProcessData->iFrameId << " " << iCenterBack << "-" << iCenterFront
<< "=" << abs(iCenterBack - iCenterFront) << " iDiffValue:" << iDiffValue;
<< "=" << abs(iCenterBack - iCenterFront) << " 预期判定移动的差值为iDiffValue:" << iDiffValue;
if (abs(iCenterBack - iCenterFront) > iDiffValue)
{
iNotChgCount_ = 0;
@ -352,12 +366,12 @@ int FilterTrainStepOneEngine::GetTrainStatus(std::shared_ptr<ProcessData> pProce
if ((iCenterBack > iCenterFront && iDirection_ == DIRECTION_LEFT) ||
(iCenterBack < iCenterFront && iDirection_ == DIRECTION_RIGHT))
{
LogDebug << "frameId:" << pProcessData->iFrameId << " back1";
LogDebug << "frameId:" << pProcessData->iFrameId << " 检测到火车倒车";
return TRAINSTATUS_BACK;
}
else
{
LogDebug << "frameId:" << pProcessData->iFrameId << " run";
LogDebug << "frameId:" << pProcessData->iFrameId << " 正常行驶";
return TRAINSTATUS_RUN;
}
}
@ -369,15 +383,16 @@ int FilterTrainStepOneEngine::GetTrainStatus(std::shared_ptr<ProcessData> pProce
else
{
iNotChgCount_++;
LogDebug << " frameId:" << pProcessData->iFrameId << " no chg iNotChgCount:" << iNotChgCount_;
LogDebug << " frameId:" << pProcessData->iFrameId
<< " 大框移动范围小 判断停车计数:" << iNotChgCount_ << "/" << iChkStopCount_;
if (iNotChgCount_ > iChkStopCount_)
{
LogDebug << "frameId:" << pProcessData->iFrameId << " stop";
LogDebug << "frameId:" << pProcessData->iFrameId << " 检测到火车停车";
return TRAINSTATUS_STOP;
}
else
{
LogDebug << "frameId:" << pProcessData->iFrameId << " iTrainStatus_:" << iTrainStatus_;
// LogDebug << "frameId:" << pProcessData->iFrameId << " iTrainStatus_:" << iTrainStatus_;
return iTrainStatus_;
}
}
@ -443,8 +458,8 @@ int FilterTrainStepOneEngine::GetTrainStatus(std::shared_ptr<ProcessData> pProce
}
}
LogDebug << "frameId:" << pProcessData->iFrameId << " back2";
return TRAINSTATUS_BACK;
// LogDebug << "frameId:" << pProcessData->iFrameId << " back2";
return iTrainStatus_;
}
}
LogDebug << "frameId:" << pProcessData->iFrameId << " iNotChgCount_:" << iNotChgCount_ << " run run";
@ -830,31 +845,31 @@ void FilterTrainStepOneEngine::DealProcessDataPre(std::shared_ptr<ProcessData> p
{
if (iterHeadContinueCnt->second < 2 && it->iTargetType == HEAD)
{
LogError << "sourceid:" << iterProcessData->second->iDataSource << " frameId:" << iterProcessData->second->iFrameId << " Head wrong";
LogDebug << " frameId:" << iterProcessData->second->iFrameId << " Head 框因非连续识别而过滤";
it = pPostDataPre->vecPostSubData.erase(it);
continue;
}
if (iterProContinueCnt->second < 2 && it->iTargetType == PRO)
{
LogError << "sourceid:" << iterProcessData->second->iDataSource << " frameId:" << iterProcessData->second->iFrameId << " PRO wrong";
LogDebug << " frameId:" << iterProcessData->second->iFrameId << " PRO 框因非连续识别而过滤";
it = pPostDataPre->vecPostSubData.erase(it);
continue;
}
if (iterNumContinueCnt->second < 2 && it->iTargetType == NUM)
{
LogError << "sourceid:" << iterProcessData->second->iDataSource << " frameId:" << iterProcessData->second->iFrameId << " NUM wrong";
LogDebug << " frameId:" << iterProcessData->second->iFrameId << " NUM 框因非连续识别而过滤";
it = pPostDataPre->vecPostSubData.erase(it);
continue;
}
if (iterSpaceContinueCnt->second < 2 && it->iTargetType == SPACE)
{
LogError << "sourceid:" << iterProcessData->second->iDataSource << " frameId:" << iterProcessData->second->iFrameId << " SPACE wrong";
LogDebug << " frameId:" << iterProcessData->second->iFrameId << " SPACE 框因非连续识别而过滤";
it = pPostDataPre->vecPostSubData.erase(it);
continue;
}
if (iterTranSpaceContinueCnt->second < 2 && it->iTargetType == TRAINSPACE)
{
LogError << "sourceid:" << iterProcessData->second->iDataSource << " frameId:" << iterProcessData->second->iFrameId << " TRAINSPACE wrong";
LogDebug << " frameId:" << iterProcessData->second->iFrameId << " TRAINSPACE 框因非连续识别而过滤";
it = pPostDataPre->vecPostSubData.erase(it);
continue;
}

View File

@ -160,7 +160,7 @@ APP_ERROR SaveImgEngine::Process()
jvFrameInfo["rate"] = iRate;
jvFrameInfo["isEnd"] = pSaveImgData->bIsEnd;
MyUtils::getins()->WriteJsonInfo(jvFrameInfo, strTxtFilePath);
LogDebug << "engineId:" << engineId_ << " save success txt:" << strTxtFilePath;
// LogDebug << "engineId:" << engineId_ << " save success txt:" << strTxtFilePath;
}
}
return APP_ERR_OK;

View File

@ -119,10 +119,10 @@ void SaveStepOneResultEngine::DealCenterSpace(std::vector<PartionInfo> &vecParat
{
return;
}
LogDebug << "size:" << iVecSize << " frameId:" << pProcessData->iFrameId
<< " vecParationInfo[0].frameId:" << vecParationInfo.at(0).modelSpaceFrame
<< " vecParationInfo[size-1].frameId:" << vecParationInfo.at(iVecSize - 1).modelSpaceFrame
<< " isEnd:" << vecParationInfo.at(iVecSize - 1).bIsEnd;
LogDebug << "积累的车厢切分信息数:" << iVecSize << " :" << pProcessData->iFrameId
<< " 第一个车厢切分信息帧:" << vecParationInfo.at(0).modelSpaceFrame
<< " 最后一个车厢切分信息帧:" << vecParationInfo.at(iVecSize - 1).modelSpaceFrame
<< " 最后一个车厢切分信息是否为结束:" << vecParationInfo.at(iVecSize - 1).bIsEnd;
/*
(2023-02-28)
@ -134,9 +134,9 @@ void SaveStepOneResultEngine::DealCenterSpace(std::vector<PartionInfo> &vecParat
int iCenterXPre = vecParationInfo[i - 1].fLTX + (vecParationInfo[i - 1].fRBX - vecParationInfo[i - 1].fLTX) / 2;
int iCenterX = vecParationInfo[i].fLTX + (vecParationInfo[i].fRBX - vecParationInfo[i].fLTX) / 2;
bool bIntervalFlag = ((int)(vecParationInfo[i].modelSpaceFrame - vecParationInfo[i - 1].modelSpaceFrame)) > iSplitSpan_;
LogDebug << "frameidPre:" << vecParationInfo[i - 1].modelSpaceFrame << " iCenterXPre:" << iCenterXPre
<< " frameid:" << vecParationInfo[i].modelSpaceFrame << " iCenterX:" << iCenterX
<< " bIntervalFlag:" << bIntervalFlag << " i:" << i;
LogDebug << "上一帧ID:" << vecParationInfo[i - 1].modelSpaceFrame << " 上一帧间隔X轴中线:" << iCenterXPre
<< " 本帧ID:" << vecParationInfo[i].modelSpaceFrame << " 本帧间隔X轴中线:" << iCenterX
<< " 满足帧间隔:" << bIntervalFlag << " i:" << i;
if (iDirection_ == DIRECTION_LEFT && (iCenterXPre < iCenterX - iSplitSpanPX_) && bIntervalFlag)
{
vecSpacePos.push_back(i - 1);
@ -190,6 +190,8 @@ void SaveStepOneResultEngine::DealCenterSpace(std::vector<PartionInfo> &vecParat
pPartionInfo->startframe = dataSourceConfig_.iSkipInterval;
}
pPartionInfo->nStatus = ((pProcessData->iStatus == TRAINSTATUS_STOP) ? TRAIN_PAUSE : iDirection_);
LogWarn << "--------- 向Paration 发送数据 --------";
outputQueMap_[strPort0_]->push(std::static_pointer_cast<void>(pPartionInfo));
iPushSpaceFrameId_ = pPartionInfo->modelSpaceFrame;
@ -242,14 +244,18 @@ void SaveStepOneResultEngine::DealTrainSpaceInfo(std::shared_ptr<ProcessData> pP
{
iDirection_ = jvDirectionInfo["direction"].asInt();
}
else
{
LogWarn << "暂未检测出行车方向";
}
}
bool bIntervalFlag = ((int)(pProcessData->iFrameId - parationInfoLast_.modelSpaceFrame)) > iSplitSpan_;
int iCenterCur = jvStep1Space[0]["ltx"].asFloat() + (jvStep1Space[0]["rbx"].asFloat() - jvStep1Space[0]["ltx"].asFloat()) / 2;
int iCenterLast = parationInfoLast_.fLTX + (parationInfoLast_.fRBX - parationInfoLast_.fLTX) / 2;
LogDebug << "frameid:" << pProcessData->iFrameId << " centerCur:" << iCenterCur
<< " lastFrameid:" << parationInfoLast_.modelSpaceFrame << " centerLast:" << iCenterLast
<< " iDirection_:" << iDirection_ << " bIntervalFlag:" << bIntervalFlag << " bDealCenterFlag_:" << bDealCenterFlag_;
LogDebug << "当前帧:" << pProcessData->iFrameId << " 间隔框中心线:" << iCenterCur
<< " 上一帧:" << parationInfoLast_.modelSpaceFrame << " 间隔框中心线:" << iCenterLast
<< " 行车方向:" << iDirection_ << " 是否满足切分帧数:" << bIntervalFlag << " bDealCenterFlag_:" << bDealCenterFlag_;
if (iDirection_ == DIRECTION_UNKNOWN || iCenterLast == 0)
{
@ -418,7 +424,7 @@ void SaveStepOneResultEngine::SplitTrainByNumPro(std::shared_ptr<PartionInfo> &p
pPartionInfoNew->i64StartTimeStamp = i64TimeStampFirst_;
pPartionInfoNew->startframe = dataSourceConfig_.iSkipInterval;
}
pPartionInfoNew->nStatus = ((pProcessData->iStatus == TRAINSTATUS_STOP) ? TRAIN_PAUSE : iDirection_);
// pPartionInfoNew->nStatus = ((pProcessData->iStatus == TRAINSTATUS_STOP) ? TRAIN_PAUSE : iDirection_);
//构造一个间隔信息写入到切分帧中
char szCameraNo[5] = {0};
@ -437,6 +443,7 @@ void SaveStepOneResultEngine::SplitTrainByNumPro(std::shared_ptr<PartionInfo> &p
jvFrameInfo["step1Space"].append(jvOneSpace);
MyUtils::getins()->WriteJsonInfo(jvFrameInfo, strFilePath);
LogWarn << "--------- 向Paration 发送数据 --------";
outputQueMap_[strPort0_]->push(std::static_pointer_cast<void>(pPartionInfoNew));
iPushSpaceFrameId_ = pPartionInfoNew->modelSpaceFrame;
@ -681,6 +688,7 @@ APP_ERROR SaveStepOneResultEngine::Process()
//最后一节和倒数第二节之间的间隔未能识别时,此时也需要通过车号属性切分下。
SplitTrainByNumPro(pPartionInfo, pProcessData);
LogWarn << "--------- 向Paration 发送数据 --------";
outputQueMap_[strPort0_]->push(std::static_pointer_cast<void>(pPartionInfo));
iPushSpaceFrameId_ = pPartionInfo->modelSpaceFrame;

View File

@ -149,7 +149,6 @@ APP_ERROR TrainParationMgr::Process()
int nSize = lstPartInfo.size();
int nPartionIndex = nSize - 1;
int nPrePartionIndex = nPartionIndex;
//当然车厢通过的数量
if (nSize == 0) {
@ -166,37 +165,30 @@ APP_ERROR TrainParationMgr::Process()
lstPartInfo.push_back(stTempInfo);
//lstPartInfo.push_back(stTempInfo);
nPartionIndex++;
}
{
lstPartInfo[nPartionIndex].i64EndTimeStamp = pPartionInfo->i64EndTimeStamp;
lstPartInfo[nPartionIndex].endframe = pPartionInfo->modelSpaceFrame;
// 根据开始帧时间戳和结束帧时间错 计算当节车厢的行车速度
// LogInfo << "TrainAnaEngine checkPartion bPartion == true lstPartInfo[nPrePartionIndex].ftime" << abs(lstPartInfo[nPrePartionIndex].i64EndTimeStamp - lstPartInfo[nPrePartionIndex].i64StartTimeStamp);
// 根据时间戳计算时间差
LogInfo << "-测试-----测试-----测试------测试-";
float nTimePassed = (abs(lstPartInfo[nPartionIndex].i64EndTimeStamp - lstPartInfo[nPartionIndex].i64StartTimeStamp)) * 1.0;
//防止停车导致速度过小
if(pPartionInfo->nStatus != TRAIN_PAUSE && nTimePassed <= 50000) {
lstPartInfo[nPartionIndex].fspeed = (TRAIN_WIDTH * 1000.0) /nTimePassed;
} else {
if (nPartionIndex >= 1){
lstPartInfo[nPartionIndex].fspeed = lstPartInfo[nPartionIndex - 1].fspeed / 3;
} else {
lstPartInfo[nPartionIndex].fspeed = TRAIN_DEFAULT_SPEED / 10;
}
}
//
//nSamePartionIgnoreCount = (nTimePassed / (3 * 5000)) * nFrameRate;
// 结束帧为当前帧再往后 (除以2的原因中间为车钩车钩后的车体宽度为整个镜头的宽度除以2)
//lstPartInfo[nPrePartionIndex].endframe = pPartionInfo->modelSpaceFrame;
//LogInfo << "TrainAnaEngine checkPartion bPartion == true lstPartInfo[nPrePartionIndex].endframe" << lstPartInfo[nPrePartionIndex].endframe;
lstPartInfo[nPartionIndex].bmodelconfirmed = true;
}
LogInfo << "-测试3-----测试3-----测试3------测试3-";
lstPartInfo[nPartionIndex].i64EndTimeStamp = pPartionInfo->i64EndTimeStamp;
lstPartInfo[nPartionIndex].endframe = pPartionInfo->modelSpaceFrame;
// 根据开始帧时间戳和结束帧时间错 计算当节车厢的行车速度
// 根据时间戳计算时间差
float nTimePassed = (abs(lstPartInfo[nPartionIndex].i64EndTimeStamp - lstPartInfo[nPartionIndex].i64StartTimeStamp)) * 1.0;
//防止停车导致速度过小
if(pPartionInfo->nStatus != TRAIN_PAUSE && nTimePassed <= 50000) {
lstPartInfo[nPartionIndex].fspeed = (TRAIN_WIDTH * 1000.0) /nTimePassed;
} else {
if (nPartionIndex >= 1){
lstPartInfo[nPartionIndex].fspeed = lstPartInfo[nPartionIndex - 1].fspeed / 3;
} else {
lstPartInfo[nPartionIndex].fspeed = TRAIN_DEFAULT_SPEED / 10;
}
}
//
//nSamePartionIgnoreCount = (nTimePassed / (3 * 5000)) * nFrameRate;
// 结束帧为当前帧再往后 (除以2的原因中间为车钩车钩后的车体宽度为整个镜头的宽度除以2)
lstPartInfo[nPartionIndex].bmodelconfirmed = true;
/// write json info to file
//先读取文本内容,追加新的信息后再写入
@ -204,135 +196,57 @@ APP_ERROR TrainParationMgr::Process()
Json::Value jvPartionInfo;
//JSON保存路径
std::string strFilePath;
bool brightcome = false;
int nrightoffset = 0;
if (pPartionInfo->nStatus == 1) {
brightcome = true;
// nrightoffset = -1;
}
//检测到车厢划分信息
{
strFilePath = strResultPath_ + pPartionInfo->strTrainDate + "/" + pPartionInfo->strTrainName + "/"
+ std::to_string(nPartionIndex + 1) + ".txt";
LogInfo << "-测试2-----测试2-----测试2------测试2-";
// if (nPartionIndex == 0) {
// lstPartInfo[nPartionIndex].endframe = lstPartInfo[nPartionIndex].endframe - nrightoffset * (lstPartInfo[nPartionIndex].fLTX - METHOD_BASE_WIDTH) / 10;
// } else {
// lstPartInfo[nPartionIndex].endframe = lstPartInfo[nPartionIndex].endframe - nrightoffset * getCouplerOffsetPix(lstPartInfo[nPartionIndex].fspeed, lstPartInfo[nPartionIndex].endframe);
// }
//lstPartInfo[nPartionIndex].endframe = lstPartInfo[nPartionIndex].endframe + getOffsetFrame(lstPartInfo[nPartionIndex].fspeed, (TRAIN_IN_CAMERA_WIDTH / 2), nFrameRate);
strFilePath = strResultPath_ + pPartionInfo->strTrainDate + "/" + pPartionInfo->strTrainName + "/"
+ std::to_string(nPartionIndex + 1) + ".txt";
LogInfo << "TrainAnaEngine Process lstPartInfo[nPartionIndex].startframe" << lstPartInfo[nPartionIndex].startframe ;
LogInfo << "TrainAnaEngine Process lstPartInfo[nPartionIndex].endframe" << lstPartInfo[nPartionIndex].endframe;
// 首部车钩的偏移位置 (单位帧)
int headpos = 0;
// 尾部车钩的偏移位置 (单位帧)
int tailpos = (0 - nTailPixOffset);
PartionInfo stTempInfo;
// 开始记录新的一节车厢信息(从索引变成序号+1 ,新增一节车厢信息+1)
stTempInfo.nindex = nPartionIndex + 2;
// 上一节车厢的结束帧 - (偏移帧 = (镜头内的车体宽度/ (速度) -> 通过时间) * 帧/秒 ) 作为下一节车厢的开始帧
int ntempOffsetFrame = lstPartInfo[nPartionIndex].endframe;
stTempInfo.startframe = ntempOffsetFrame;
stTempInfo.i64StartTimeStamp = pPartionInfo->i64EndTimeStamp;
// 初始化下一节的结束帧
//stTempInfo.endframe = 0;
//if (nPartionIndex == 0)
{
headpos = METHOD_BASE_WIDTH / 2;
tailpos = tailpos + headpos;
}
// 是否位右侧来车
if (brightcome == true)
{
//brightcome = true;
// 右侧来车 首部车钩从画面最右侧开始
headpos = METHOD_BASE_WIDTH / 2;
// 右侧来车 尾部车钩从画面最右侧+车厢宽的像素值
tailpos = headpos + nTailPixOffset;
/*
if (nPartionIndex == 0)
{
headpos = METHOD_BASE_WIDTH / 2;
tailpos = tailpos - headpos;
}
*/
}
LogInfo << "TrainAnaEngine Process lstPartInfo[nPartionIndex].startframe" << lstPartInfo[nPartionIndex].startframe ;
LogInfo << "TrainAnaEngine Process lstPartInfo[nPartionIndex].endframe" << lstPartInfo[nPartionIndex].endframe;
//从当节车厢的开始帧到结束帧计算首部车钩和尾部车钩的偏移值
// for (int nplayframe = lstPartInfo[nPartionIndex].startframe; nplayframe <= lstPartInfo[nPartionIndex].endframe; nplayframe++)
// {
// Json::Value jvposInfo;
// // 当前车厢的第几几帧
// int noffsetindex = (nplayframe - lstPartInfo[nPartionIndex].startframe);
// // 根据车速计算车钩位置量(单位 像素)
// int noffsetpos = getCouplerOffsetPosition(lstPartInfo[nPartionIndex].fspeed, noffsetindex);
// // 初始化首部车钩偏移量(单位 像素)
// jvposInfo["headpos"] = -1;
// // 初始化尾部车钩偏移量(单位 像素)
// jvposInfo["tailpos"] = -1;
// if (brightcome == false) {
// // 左侧来车
// // 首部车钩和尾部车钩 每帧加 车钩偏移值
// jvposInfo["headpos"] = (headpos + noffsetpos);
// jvposInfo["tailpos"] = (tailpos + noffsetpos);
// } else {
// // 右侧来车
// // 首部车钩和尾部车钩 每帧减 车钩偏移值
// jvposInfo["headpos"] = (headpos - noffsetpos);
// jvposInfo["tailpos"] = (tailpos - noffsetpos);
// }
// //LogInfo << "TrainAnaEngine Process jvposInfo[headpos]" << jvposInfo["headpos"];
// // LogInfo << "TrainAnaEngine Process jvposInfo[tailpos]:" << jvposInfo["tailpos"];
// //LogInfo << "TrainAnaEngine Process jvPartionListInfo.append";
// jvPartionInfo[std::to_string(nplayframe)] = jvposInfo;
// }
PartionInfo stTempInfo;
// 开始记录新的一节车厢信息(从索引变成序号+1 ,新增一节车厢信息+1)
stTempInfo.nindex = nPartionIndex + 2;
// 上一节车厢的结束帧 - (偏移帧 = (镜头内的车体宽度/ (速度) -> 通过时间) * 帧/秒 ) 作为下一节车厢的开始帧
int ntempOffsetFrame = lstPartInfo[nPartionIndex].endframe;
//
//- (int)(((TRAIN_IN_CAMERA_WIDTH / 2) / lstPartInfo[nPartionIndex].fspeed) * nFrameRate);
//LogInfo << "TrainAnaEngine Process ntempOffsetFrame:" << ntempOffsetFrame;
stTempInfo.startframe = ntempOffsetFrame;
stTempInfo.i64StartTimeStamp = pPartionInfo->i64EndTimeStamp;
// 初始化下一节的结束帧
//stTempInfo.endframe = 0;
lstPartInfo.push_back(stTempInfo);
lstPartInfo.push_back(stTempInfo);
// 记录过车日期
jvPartionInfo["trainDate"] = pPartionInfo->strTrainDate;
// 记录过车时间
jvPartionInfo["trainName"] = pPartionInfo->strTrainName;
// 记录车厢节数 (索引从0开始 所以这里+1)
jvPartionInfo["trainNo"] = nPartionIndex + 1;
// 记录行车开始帧
jvPartionInfo["startFrameId"] = lstPartInfo[nPartionIndex].startframe;
jvPartionInfo["startTimeStamp"] = lstPartInfo[nPartionIndex].i64StartTimeStamp;
// 记录行车结束帧
jvPartionInfo["endFrameId"] = lstPartInfo[nPartionIndex].endframe;
jvPartionInfo["endTimeStamp"] = lstPartInfo[nPartionIndex].i64EndTimeStamp;
// 记录车厢是否完全通过
jvPartionInfo["isEnd"] = pPartionInfo->bIsEnd;
// 记录过车日期
jvPartionInfo["trainDate"] = pPartionInfo->strTrainDate;
// 记录过车时间
jvPartionInfo["trainName"] = pPartionInfo->strTrainName;
// 记录车厢节数 (索引从0开始 所以这里+1)
jvPartionInfo["trainNo"] = nPartionIndex + 1;
// 记录行车开始帧
jvPartionInfo["startFrameId"] = lstPartInfo[nPartionIndex].startframe;
jvPartionInfo["startTimeStamp"] = lstPartInfo[nPartionIndex].i64StartTimeStamp;
// 记录行车结束帧
jvPartionInfo["endFrameId"] = lstPartInfo[nPartionIndex].endframe;
jvPartionInfo["endTimeStamp"] = lstPartInfo[nPartionIndex].i64EndTimeStamp;
// 记录车厢是否完全通过
jvPartionInfo["isEnd"] = pPartionInfo->bIsEnd;
//是否是间隔模型切分的车厢
jvPartionInfo["modelconfirmed"] = pPartionInfo->bmodelconfirmed;
//是否是间隔模型切分的车厢
jvPartionInfo["modelconfirmed"] = pPartionInfo->bmodelconfirmed;
// 记录当前车厢的信息到JSON文件
MyUtils::getins()->WriteJsonInfo(jvPartionInfo, strFilePath);
std::shared_ptr<TrainRange> pTrainRange = std::make_shared<TrainRange>();
pTrainRange->strTrainDate = jvPartionInfo["trainDate"].asString();
pTrainRange->strTrainName = jvPartionInfo["trainName"].asString();
pTrainRange->iTrainIndex = jvPartionInfo["trainNo"].asInt();
pTrainRange->iStartFrameId = jvPartionInfo["startFrameId"].asInt();
pTrainRange->i64StartTimeStamp = jvPartionInfo["startTimeStamp"].asInt64();
pTrainRange->iEndFrameId = jvPartionInfo["endFrameId"].asInt();
pTrainRange->i64EndTimeStamp = jvPartionInfo["endTimeStamp"].asInt64();
pTrainRange->bIsEnd = jvPartionInfo["isEnd"].asBool();
pTrainRange->bmodelconfirmed = jvPartionInfo["modelconfirmed"].asBool();
iRet = outputQueMap_[strPort0_]->push(std::static_pointer_cast<void>(pTrainRange));
}
// 记录当前车厢的信息到JSON文件
MyUtils::getins()->WriteJsonInfo(jvPartionInfo, strFilePath);
std::shared_ptr<TrainRange> pTrainRange = std::make_shared<TrainRange>();
pTrainRange->strTrainDate = jvPartionInfo["trainDate"].asString();
pTrainRange->strTrainName = jvPartionInfo["trainName"].asString();
pTrainRange->iTrainIndex = jvPartionInfo["trainNo"].asInt();
pTrainRange->iStartFrameId = jvPartionInfo["startFrameId"].asInt();
pTrainRange->i64StartTimeStamp = jvPartionInfo["startTimeStamp"].asInt64();
pTrainRange->iEndFrameId = jvPartionInfo["endFrameId"].asInt();
pTrainRange->i64EndTimeStamp = jvPartionInfo["endTimeStamp"].asInt64();
pTrainRange->bIsEnd = jvPartionInfo["isEnd"].asBool();
pTrainRange->bmodelconfirmed = jvPartionInfo["modelconfirmed"].asBool();
iRet = outputQueMap_[strPort0_]->push(std::static_pointer_cast<void>(pTrainRange));
if (pPartionInfo->bIsEnd) {
lstPartInfo.clear();

View File

@ -101,7 +101,7 @@ void TransTrainEngine::InitParam()
*/
bool TransTrainEngine::AuthTransNum(int classId, const std::string &trainNum)
{
LogInfo << "classId:" << classId << " trainNum:" << trainNum;
// LogInfo << "classId:" << classId << " trainNum:" << trainNum;
switch (classId)
{
case TRAIN_HEAD: // 车头上的编号
@ -774,8 +774,8 @@ APP_ERROR TransTrainEngine::Process()
{
strTemp += vecClassNames_.at(it->second.at(j).iClassId);
}
LogDebug << "step2 char sourceid:" << pProcessData->iDataSource << " frameid:" << pProcessData->iFrameId
<< " bigclassId:" << postSubData.iBigClassId << " line:" << it->first << "," << strTemp;
// LogDebug << "step2 char sourceid:" << pProcessData->iDataSource << " frameid:" << pProcessData->iFrameId
// << " bigclassId:" << postSubData.iBigClassId << " line:" << it->first << "," << strTemp;
}
TransSubData transSubData;

View File

@ -1,146 +0,0 @@
#include "TestImgEngine.h"
#include <iostream>
#include <algorithm>
#include <string>
#include <stdio.h>
#include <stdarg.h>
#include <sys/time.h>
#include <string.h>
#include <vector>
#include <memory>
using namespace std;
using namespace ai_matrix;
TestImgEngine::TestImgEngine() {}
TestImgEngine::~TestImgEngine() {}
APP_ERROR TestImgEngine::Init()
{
strPort0_ = engineName_ + "_" + std::to_string(engineId_) + "_0";
dataSourceConfig_ = MyYaml::GetIns()->GetDataSourceConfigById(engineId_); //获取摄像机参数
width_ = IMAGE_WIDTH, height_ = IMAGE_HEIGHT;
LogInfo << "engineId_:" << engineId_ << " TestImgEngine Init ok";
return APP_ERR_OK;
}
APP_ERROR TestImgEngine::DeInit()
{
LogInfo << "engineId_:" << engineId_ << " TestImgEngine DeInit ok";
return APP_ERR_OK;
}
//测试jpeg解码时打开,并修改相应的yaml配置引擎间通信
#if 0
APP_ERROR TestImgEngine::Process()
{
int iRet = APP_ERR_OK;
uint64_t u64count_num = 0;
while (!isStop_)
{
// std::cout<<"Enter Read Image Thread "<<++u64count_num<<" Times!"<<std::endl;
// std::cout<<"Read Image Thread ID: "<<std::this_thread::get_id()<<std::endl;
//读取图像
std::string jpeg_img_file_name = MyYaml::GetIns()->GetStringValue("jpeg_image_file_name");
//从本地文件读取jpg图像并构造jpeg数据
void* pJPEGBuffer = nullptr;
FILE *jpeg_fp;
jpeg_fp = fopen(jpeg_img_file_name.c_str(), "r");
if (!jpeg_fp)
{
std::cerr<<"Can not open "<<jpeg_img_file_name.c_str()<<std::endl;
}
fseek(jpeg_fp, 0L, SEEK_END);
unsigned int pJPEGBuffer_Size = ftell(jpeg_fp);
// printf("the jpg image data len: %d\n", pJPEGBuffer_Size);
// std::cout<<"the jpg image data len: "<<pJPEGBuffer_Size<<std::endl;
fseek(jpeg_fp, 0L, SEEK_SET);
pJPEGBuffer = new uint8_t[pJPEGBuffer_Size];
fread((char*)pJPEGBuffer, 1, pJPEGBuffer_Size, jpeg_fp);
fclose(jpeg_fp);
std::shared_ptr<FrameData> pJPEGFrameData = std::make_shared<FrameData>();
//组织数据,压入下一引擎
pJPEGFrameData->iDataSource = engineId_;
pJPEGFrameData->iSize = pJPEGBuffer_Size;
pJPEGFrameData->pData.reset(pJPEGBuffer, [](void* data){if(data) {delete[] data; data = nullptr;}}); //智能指针管理内存
// pJPEGFrameData->pData.reset(pJPEGBuffer, Deleter); //智能指针管理内存
pJPEGFrameData->i64TimeStamp = MyUtils::getins()->GetCurrentTimeMillis();
#if 1
iRet = outputQueMap_[strPort0_]->push(std::static_pointer_cast<void>(pJPEGFrameData));
if (iRet != APP_ERR_OK){
LogError << "push the jpeg image data failed...";
std::cerr<<"push the jpeg image data failed..."<<std::endl;
}else{
// std::cout<<"push the jpeg image data success!"<<std::endl;
}
#endif
usleep(30 * 1000);
}
}
#else
//测试H264编码或者jpeg编码打开,并修改相应的yaml配置引擎间通信
APP_ERROR TestImgEngine::Process()
{
int iRet = APP_ERR_OK;
uint64_t u64count_num = 0;
while (!isStop_)
{
// std::cout<<"Enter Read Image Thread "<<++u64count_num<<" Times!"<<std::endl;
// std::cout<<"Read Image Thread ID: "<<std::this_thread::get_id()<<std::endl;
//读取图像
std::string yuv420m_img_file_name = MyYaml::GetIns()->GetStringValue("yuv420m_image_file_name");
//从本地文件读取yuv420m图像并构造yuv420m数据
void* pYUV420MBuffer = nullptr;
FILE *yuv420m_fp;
yuv420m_fp = fopen(yuv420m_img_file_name.c_str(), "rb");
if (!yuv420m_fp)
{
std::cerr<<"Can not open "<<yuv420m_img_file_name.c_str()<<std::endl;
}
fseek(yuv420m_fp, 0L, SEEK_END);
unsigned int pYUV420MBuffer_Size = ftell(yuv420m_fp);
// printf("test.yuv filesize = %d\n", pYUV420MBuffer_Size);
// std::cout<<"test.yuv filesize = "<<pYUV420MBuffer_Size<<std::endl;
fseek(yuv420m_fp, 0L, SEEK_SET);
pYUV420MBuffer = new uint8_t[pYUV420MBuffer_Size];
fread((char*)pYUV420MBuffer, 1, pYUV420MBuffer_Size, yuv420m_fp);
fclose(yuv420m_fp);
std::shared_ptr<FrameData> pYUV420MFrameData = std::make_shared<FrameData>();
//组织数据,压入下一引擎
pYUV420MFrameData->iDataSource = engineId_;
pYUV420MFrameData->iSize = pYUV420MBuffer_Size;
pYUV420MFrameData->pData.reset(pYUV420MBuffer, [](void* data){if(data) {delete[] data; data = nullptr;}}); //智能指针管理内存
// pYUV420MFrameData->pData.reset(pYUV420MBuffer, Deleter); //智能指针管理内存
pYUV420MFrameData->i64TimeStamp = MyUtils::getins()->GetCurrentTimeMillis();
#if 1
iRet = outputQueMap_[strPort0_]->push(std::static_pointer_cast<void>(pYUV420MFrameData));
if (iRet != APP_ERR_OK){
LogError << "push the yuv420m image data failed...";
std::cerr<<"push the yuv420m image data failed..."<<std::endl;
}else{
// std::cout<<"push the yuv420m image data success!"<<std::endl;
}
#endif
usleep(30 * 1000);
}
}
#endif

View File

@ -1,45 +0,0 @@
//读取图像引擎(用于测试)
#ifndef _TEST_IMG_ENGINE_H
#define _TEST_IMG_ENGINE_H
#include <iostream>
#include <chrono>
#include <cmath>
#include <utility>
#include <thread>
#include <chrono>
#include <functional>
#include <atomic>
#include <time.h>
#include <unistd.h>
#include <queue>
#include <mutex>
#include <semaphore.h>
#include "EngineBase.h"
#include "EngineFactory.h"
#include "MyYaml.h"
#include "myutils.h"
#include "AppCommon.h"
class TestImgEngine : public ai_matrix::EngineBase
{
public:
TestImgEngine();
~TestImgEngine();
APP_ERROR Init() override;
APP_ERROR DeInit() override;
APP_ERROR Process() override;
private:
ai_matrix::DataSourceConfig dataSourceConfig_;
std::string strPort0_;
unsigned int width_, height_;
};
ENGINE_REGIST(TestImgEngine)
#endif //END OF _TEST_IMG_ENGINE_H

View File

@ -1,109 +0,0 @@
#include "VideoEngine.h"
using namespace std;
using namespace cv;
using namespace ai_matrix;
VideoEngine::VideoEngine() {}
VideoEngine::~VideoEngine() {}
APP_ERROR VideoEngine::Init()
{
LogInfo << "engineId_:" << engineId_ << " VideoEngine Init start";
strPort0_ = engineName_ + "_" + std::to_string(engineId_) + "_0";
dataSourceConfig_ = MyYaml::GetIns()->GetDataSourceConfigById(engineId_); //获取摄像机参数
width_ = IMAGE_WIDTH, height_ = IMAGE_HEIGHT;
LogInfo << "engineId_:" << engineId_ << " VideoEngine Init ok";
return APP_ERR_OK;
}
APP_ERROR VideoEngine::DeInit()
{
LogInfo << "engineId_:" << engineId_ << " VideoEngine DeInit ok";
return APP_ERR_OK;
}
APP_ERROR VideoEngine::Process()
{
int iRet = APP_ERR_OK;
uint64_t u64count_num = 0;
// if (MyYaml::GetIns()->GetStringValue("gc_data_source") != "camera")
// {
// LogDebug << "engineId_:" << engineId_ << " gc_data_source no camera";
// return iRet;
// }
VideoCapture capture;
/*****************************************************************************************
Gstream解码
:1.nvv4l2decoder 2.omxh264dec
使nvv4l2decoder解码时enable-max-performance和enable-frame-type-reporting才可以使用
enable-max-performance=1
enable-frame-type-reporting=1 使
*****************************************************************************************/
//从摄像头RTSP拉流
const std::string videoStreamAddress = std::string("rtspsrc location=") + dataSourceConfig_.strUrl.c_str() + " latency=10 ! \
rtph264depay ! h264parse ! nvv4l2decoder enable-max-performance=1 enable-frame-type-reporting=1 ! nvvidconv ! video/x-raw, format=(string)BGRx ! videoconvert ! appsink";
// while(!capture.open(dataSourceConfig_.strUrl.c_str())){
while(!capture.open(videoStreamAddress)){
std::cerr<<"Opening video stream or file failed!!!" <<std::endl;
std::cout<<"Restart Opening video stream or file ..."<<std::endl;
sleep(1);
}
std::cout<<"Opening video stream or file Success"<<std::endl;
int frameW = capture.get(3);
int frameH = capture.get(4);
std::cout << dataSourceConfig_.strUrl.c_str() << ";"<< "frameW:" << frameW << " frameH:" << frameH << std::endl;
while (!isStop_)
{
std::cout << dataSourceConfig_.strUrl.c_str() << ";"<< "frameW:" << frameW << " frameH:" << frameH << std::endl;
// std::cout<<"Enter VideoEngine Thread "<<++u64count_num<<" Times!"<<std::endl;
// std::cout<<"VideoEngine Thread ID: "<<std::this_thread::get_id()<<std::endl;
//构造BGR数据
void* pBGRBuffer = nullptr;
unsigned int pBGRBuffer_Size = width_*height_*3;
pBGRBuffer = new uint8_t[pBGRBuffer_Size];
std::shared_ptr<FrameData> pBGRFrameData = std::make_shared<FrameData>();
cv::Mat frame(frameH, frameW, CV_8UC3, pBGRBuffer);
// clock_t start, end;
// start = clock();
if(!capture.read(frame)) {
std::cerr << "no frame" << std::endl;
waitKey();
}
// end = clock();
// printf("read 1 frame time is %.8f ms\n", (double)(end-start)/CLOCKS_PER_SEC*1000);
//压入OpenCV RTSP所拉的H264解码BRG后的数据
//组织数据
pBGRFrameData->iDataSource = engineId_;
pBGRFrameData->iSize = pBGRBuffer_Size;
pBGRFrameData->pData.reset(pBGRBuffer, [](void* data){if(data) {delete[] data; data = nullptr;}}); //智能指针管理内存
// pBGRFrameData->pData.reset(pBGRBuffer, Deleter); //智能指针管理内存
pBGRFrameData->i64TimeStamp = MyUtils::getins()->GetCurrentTimeMillis();
iRet = outputQueMap_[strPort0_]->push(std::static_pointer_cast<void>(pBGRFrameData));
if (iRet != APP_ERR_OK){
LogError << "push the bgr frame data failed...";
std::cerr<<"push the bgr frame data failed..."<<std::endl;
}else{
// std::cout<<"push the bgr frame data success!"<<std::endl;
}
// usleep(30*1000); //读取文件时模拟30帧
}
}

View File

@ -1,69 +0,0 @@
//OpenCV RTSP拉流引擎(包含视频解码)
#ifndef _VIDEO_ENGINE_H
#define _VIDEO_ENGINE_H
#include <iostream>
#include <chrono>
#include <cmath>
#include <utility>
#include <thread>
#include <chrono>
#include <functional>
#include <atomic>
#include <time.h>
#include <sys/time.h>
#include <unistd.h>
#include <queue>
#include <mutex>
#include <semaphore.h>
#include <algorithm>
#include <string>
#include <stdio.h>
#include <stdarg.h>
#include <string.h>
#include <vector>
#include <memory>
#include <opencv2/opencv.hpp>
#include <opencv2/core/core.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#ifdef __cplusplus
extern "C"
{
#endif
#include <libavcodec/avcodec.h>
#include <libavutil/samplefmt.h>
#include <libavformat/avformat.h>
#ifdef __cplusplus
};
#endif
#include "EngineBase.h"
#include "EngineFactory.h"
#include "MyYaml.h"
#include "myutils.h"
#include "AppCommon.h"
#define RTSP_PULL_CAMERA_VIDEO_STREAM
class VideoEngine : public ai_matrix::EngineBase
{
public:
VideoEngine();
~VideoEngine();
APP_ERROR Init() override;
APP_ERROR DeInit() override;
APP_ERROR Process() override;
private:
ai_matrix::DataSourceConfig dataSourceConfig_;
std::string strPort0_;
unsigned int width_, height_;
};
ENGINE_REGIST(VideoEngine)
#endif //_VIDEO_ENGINE_H

View File

@ -111,13 +111,8 @@ APP_ERROR MoveEngine::ReadModelInfo()
model_width = jvModelInfo["model_width"].asInt();
model_height = jvModelInfo["model_height"].asInt();
//clear_num = jvModelInfo["clear"].isArray() ? jvModelInfo["clear"].size() : 0;
//class_num = jvModelInfo["class"].isArray() ? jvModelInfo["class"].size() : 0;
input_size = GET_INPUT_SIZE(model_width , model_height);
output_size = GET_OUTPUT_SIZE(model_width , model_height, clear_num , class_num);
// det_size = clear_num + class_num + 5;
// score_threshold = modelConfig_.fScoreThreshold;
// nms_threshold = modelConfig_.fNMSTreshold;
return APP_ERR_OK;
}
@ -282,7 +277,7 @@ APP_ERROR MoveEngine::Process()
nType = n;
}
}
LogDebug <<"模型得分 车头:"<< fReturnVal[0]<<" 无车:"<< fReturnVal[1]<<" 车尾:"<< fReturnVal[2]<<" 有车:"<< fReturnVal[3];
// LogDebug <<"模型得分 车头:"<< fReturnVal[0]<<" 无车:"<< fReturnVal[1]<<" 车尾:"<< fReturnVal[2]<<" 有车:"<< fReturnVal[3];
// LogInfo<<"来车当前状态:"<< (nType == 0 ? "有车头" : (nType == 1 ? "无车"));
switch (nType) {
case 0:
@ -315,7 +310,7 @@ APP_ERROR MoveEngine::Process()
if (bGetTrainExist == true)
{
iHasTrainNum_ = iHasTrainNum_ > 20 ? iHasTrainNum_ : iHasTrainNum_ + 1;
if (iHasTrainNum_ > 0) LogDebug << "当前有车, 计数:" << iHasTrainNum_;
// if (iHasTrainNum_ > 0) LogDebug << "当前有车, 计数:" << iHasTrainNum_;
}
else
{

View File

@ -194,7 +194,7 @@ void TrainStepOneEngine::FilterInvalidInfo(std::vector<stDetection> &vecRet, std
std::vector<stDetection> vecSpaceInfo;
for (auto it = vecRet.begin(); it != vecRet.end();)
{
LogDebug << "sourceid:" << pProcessData->iDataSource << " frameId:" << pProcessData->iFrameId
LogDebug << "frameId:" << pProcessData->iFrameId
<< " bigclassid:" << it->class_id << " ltx:" << it->bbox[0] << " lty:" << it->bbox[1]
<< " rbx:" << it->bbox[2] << " rby:" << it->bbox[3];
// 根据配置文件中 设置的识别范围,过滤掉无效数据
@ -203,8 +203,18 @@ void TrainStepOneEngine::FilterInvalidInfo(std::vector<stDetection> &vecRet, std
it->bbox[2] <= dataSourceCfg.fIdentifyAreasRBX &&
it->bbox[3] <= dataSourceCfg.fIdentifyAreasRBY))
{
LogWarn << "sourceid:" << pProcessData->iDataSource << " frameId:" << pProcessData->iFrameId
<< " bigclassid:" << it->class_id << " invalid areas";
LogDebug << "frameId:" << pProcessData->iFrameId
<< " bigclassid:" << it->class_id << " 超出识别区域-识别区域:("
<< dataSourceCfg.fIdentifyAreasLTX << "," << dataSourceCfg.fIdentifyAreasLTY << "),("
<< dataSourceCfg.fIdentifyAreasRBX << "," << dataSourceCfg.fIdentifyAreasRBY << ")";
it = vecRet.erase(it);
continue;
}
// 如果设置了不识别车头,则去掉车头标记的大框
if (!MyYaml::GetIns()->GetBoolValue("gc_train_heard_detect") && it->class_id == TRAIN_HEAD)
{
LogDebug << "frameId:" << pProcessData->iFrameId << " 过滤掉车头编号";
it = vecRet.erase(it);
continue;
}
@ -212,40 +222,31 @@ void TrainStepOneEngine::FilterInvalidInfo(std::vector<stDetection> &vecRet, std
// 去除车头车尾的间隔信息
if(pProcessData->nMonitorState == MONITOR_MODEL_TRAIN_HEAD )
{
LogWarn<<" pProcessData->nMonitorState:" << pProcessData->nMonitorState;
if(it->class_id != TRAIN_HEAD)
if(it->class_id != TRAIN_HEAD)
{
LogError << "sourceid:" << pProcessData->iDataSource << " frameId:" << pProcessData->iFrameId
<< " bigclassid:" << it->class_id << " pProcessData->nMonitorState:" << pProcessData->nMonitorState
<< " invalid";
LogDebug << " 帧号:" << pProcessData->iFrameId
<< " 大类:" << it->class_id << " 识别于车头位置,无效!";
it = vecRet.erase(it);
continue;
}
}
if (pProcessData->nMonitorState == MONITOR_MODEL_TRAIN_TAIL )
if (pProcessData->nMonitorState == MONITOR_MODEL_TRAIN_TAIL
&& ((it->class_id >= 9 && it->class_id <= 17 && it->class_id != 15) || it->class_id == 18))
{
LogWarn<<" pProcessData->nMonitorState:" << pProcessData->nMonitorState;
/*if(
(it->class_id <= U_TRAIN_SPACE)
&& (it->class_id >= C_TRAIN_SPACE)
&& (it->class_id != W_TRAIN_NUM)
)*/
{
LogError << "sourceid:" << pProcessData->iDataSource << " frameId:" << pProcessData->iFrameId
<< " bigclassid:" << it->class_id << " pProcessData->nMonitorState:" << pProcessData->nMonitorState
<< " invalid";
it = vecRet.erase(it);
continue;
}
LogDebug << " frameId:" << pProcessData->iFrameId
<< " bigclassid:" << it->class_id
<<" 识别于车尾部分,无效!";
it = vecRet.erase(it);
continue;
}
// 按大框高度剔除远股道识别的信息
int iClassHeight = it->bbox[3] - it->bbox[1];
if (dataSourceCfg.mapClassMinH.find(it->class_id) != dataSourceCfg.mapClassMinH.end() &&
iClassHeight < dataSourceCfg.mapClassMinH[it->class_id])
{
LogError << "sourceid:" << pProcessData->iDataSource << " frameId:" << pProcessData->iFrameId
LogDebug << " frameId:" << pProcessData->iFrameId
<< " bigclassid:" << it->class_id << " iClassHeight:" << iClassHeight
<< " minH:" << dataSourceCfg.mapClassMinH[it->class_id] << " invalid hegiht";
<< " minH:" << dataSourceCfg.mapClassMinH[it->class_id] << " 过滤疑似远股道识别";
it = vecRet.erase(it);
continue;
}
@ -256,7 +257,7 @@ void TrainStepOneEngine::FilterInvalidInfo(std::vector<stDetection> &vecRet, std
{
if (it->class_id != 1 && it->class_id != 6)
{
LogDebug << "sourceid:" << pProcessData->iDataSource << " frameId:" << pProcessData->iFrameId << " flat camera only deal 1 or 6";
LogDebug << " frameId:" << pProcessData->iFrameId << " flat camera only deal 1 or 6";
it = vecRet.erase(it);
continue;
}
@ -266,8 +267,8 @@ void TrainStepOneEngine::FilterInvalidInfo(std::vector<stDetection> &vecRet, std
if (((it->class_id >= 2 && it->class_id <= 6) || it->class_id == J_TRAIN_NUM || it->class_id == W_TRAIN_NUM) &&
(it->bbox[3] - it->bbox[1]) > (it->bbox[2] - it->bbox[0]))
{
LogWarn << "sourceid:" << pProcessData->iDataSource << " frameId:" << pProcessData->iFrameId
<< " bigclassid:" << it->class_id << " invalid data-- height > width ";
LogWarn << " frameId:" << pProcessData->iFrameId
<< " bigclassid:" << it->class_id << " 过滤 高度大于宽度的车号";
it = vecRet.erase(it);
continue;
}
@ -301,8 +302,8 @@ void TrainStepOneEngine::FilterInvalidInfo(std::vector<stDetection> &vecRet, std
{
if (it->bbox[3] < (pProcessData->iHeight * iSpaceMinRBXPer_ / 100))
{
LogWarn << "sourceid:" << pProcessData->iDataSource << " frameId:" << pProcessData->iFrameId
<< " bigclassid:" << it->class_id << " spaceinfo invalid fRBY:" << it->bbox[3];
LogWarn << " frameId:" << pProcessData->iFrameId
<< " bigclassid:" << it->class_id << " 过滤间隔过于靠下的间隔信息 fRBY:" << it->bbox[3];
it = vecRet.erase(it);
continue;
}
@ -321,9 +322,9 @@ void TrainStepOneEngine::FilterInvalidInfo(std::vector<stDetection> &vecRet, std
if (iHeight0 < iCenterY && iHeight1 < iCenterY) //非平车
{
if (!((vecRet[0].class_id >= 9 && vecRet[0].class_id <= 17 && vecRet[0].class_id != 15) || vecRet[0].class_id == U_TRAIN_SPACE) &&
!((vecRet[1].class_id >= 9 && vecRet[10].class_id <= 17 && vecRet[1].class_id != 15) || vecRet[1].class_id == U_TRAIN_SPACE))
!((vecRet[1].class_id >= 9 && vecRet[1].class_id <= 17 && vecRet[1].class_id != 15) || vecRet[1].class_id == U_TRAIN_SPACE))
{
LogError << "sourceid:" << pProcessData->iDataSource << " frameId:" << pProcessData->iFrameId << " no space";
LogDebug << " frameId:" << pProcessData->iFrameId << " no space";
vecRet.clear();
}
}
@ -461,10 +462,10 @@ APP_ERROR TrainStepOneEngine::Process()
SetTargetType(postSubData);
pPostData->vecPostSubData.emplace_back(postSubData);
LogDebug << "sourceid:" << pProcessData->iDataSource << " frameId:" << pProcessData->iFrameId
<< " --iClassId:" << singledata.iClassId << " iLine:" << singledata.iLine << " confidence=" << singledata.fScore
<< " lx=" << singledata.fLTX << " ly=" << singledata.fLTY << " rx=" << singledata.fRBX << " ry=" << singledata.fRBY
<< " clear:" << singledata.fClear;
// LogDebug << "数据源:" << pProcessData->iDataSource << " 帧:" << pProcessData->iFrameId
// << " --iClassId:" << singledata.iClassId << " iLine:" << singledata.iLine << " confidence=" << singledata.fScore
// << " lx=" << singledata.fLTX << " ly=" << singledata.fLTY << " rx=" << singledata.fRBX << " ry=" << singledata.fRBY
// << " clear:" << singledata.fClear;
}
}
}

View File

@ -193,7 +193,7 @@ APP_ERROR TrainStepTwoEngine::Process()
auto start = std::chrono::system_clock::now(); // 计时开始
yolov5model.YoloV5ClearityInferenceModel(step2_image, res);
auto end = std::chrono::system_clock::now();
LogInfo << "nopr2 inference time: " << std::chrono::duration_cast<std::chrono::milliseconds>(end - start).count() << "ms";
// LogInfo << "nopr2 inference time: " << std::chrono::duration_cast<std::chrono::milliseconds>(end - start).count() << "ms";
PostSubData postSubDataNew;
postSubDataNew.iTargetType = postsubdata.iTargetType;
@ -221,9 +221,9 @@ APP_ERROR TrainStepTwoEngine::Process()
postSubDataNew.vecSingleData.emplace_back(singledata);
LogDebug << "sourceid:" << pProcessData->iDataSource << " step2 after frameId:" << pProcessData->iFrameId
<< " --iClassId:" << singledata.iClassId << " iLine:" << singledata.iLine << " confidence=" << singledata.fScore
<< " lx=" << singledata.fLTX << " ly=" << singledata.fLTY << " rx=" << singledata.fRBX << " ry=" << singledata.fRBY;
// LogDebug << "sourceid:" << pProcessData->iDataSource << " step2 after frameId:" << pProcessData->iFrameId
// << " --iClassId:" << singledata.iClassId << " iLine:" << singledata.iLine << " confidence=" << singledata.fScore
// << " lx=" << singledata.fLTX << " ly=" << singledata.fLTY << " rx=" << singledata.fRBX << " ry=" << singledata.fRBY;
}
pPostData->vecPostSubData.emplace_back(postSubDataNew);
}