parent
665e6b62a7
commit
2e67e97508
|
@ -41,8 +41,10 @@ set(SYS_USR_INCLUDE_DIR "/usr/include")
|
|||
set(SYS_USR_LIB_DIR "/usr/lib")
|
||||
set(SYS_USR_LOCAL_INCLUDE_DIR "/usr/local/include")
|
||||
set(SYS_USR_LOCAL_LIB_DIR "/usr/local/lib")
|
||||
# -- X86下使用 --
|
||||
set(AARCH64_LINUX_INCLUDE_DIR "/usr/include/x86_64-linux-gnu")
|
||||
set(AARCH64_LINUX_LIB_DIR "/usr/lib/x86_64-linux-gnu")
|
||||
# -- ARM下使用 --
|
||||
#set(AARCH64_LINUX_INCLUDE_DIR "/usr/include/aarch64-linux-gnu")
|
||||
#set(AARCH64_LINUX_LIB_DIR "/usr/lib/aarch64-linux-gnu")
|
||||
|
||||
|
|
|
@ -386,6 +386,23 @@ namespace ai_matrix
|
|||
return std::string(szTmp);
|
||||
}
|
||||
|
||||
//时间戳转化为时间 毫秒级
|
||||
std::string MyUtils::Stamp2Time(long long timestamp, bool has_msec)
|
||||
{
|
||||
int ms = timestamp % 1000;//取毫秒
|
||||
time_t tick = (time_t)(timestamp/1000);//转换时间
|
||||
struct tm tm;
|
||||
char s[40];
|
||||
tm = *localtime(&tick);
|
||||
strftime(s, sizeof(s), "%Y-%m-%d %H:%M:%S", &tm);
|
||||
std::string str(s);
|
||||
if (has_msec)
|
||||
{
|
||||
str = str+ "." + std::to_string(ms);
|
||||
}
|
||||
return str;
|
||||
}
|
||||
|
||||
/**
|
||||
* 获取当前时间距1970年的毫秒数
|
||||
* inParam : N/A
|
||||
|
@ -507,7 +524,7 @@ namespace ai_matrix
|
|||
return true;
|
||||
}
|
||||
|
||||
#ifdef ASCEND
|
||||
#ifdef ASCEND
|
||||
/**
|
||||
* 拷贝Device数据到Host
|
||||
* inParam : void *pDeviceBuffer device内存地址
|
||||
|
@ -589,7 +606,7 @@ namespace ai_matrix
|
|||
}
|
||||
return true;
|
||||
}
|
||||
#endif
|
||||
#endif
|
||||
|
||||
/**
|
||||
* 获取指定毫秒数的对应的北京日期时间
|
||||
|
|
|
@ -56,6 +56,9 @@ namespace ai_matrix
|
|||
//获取时间
|
||||
std::string get_time();
|
||||
|
||||
//时间戳转化为时间 毫秒级
|
||||
std::string Stamp2Time(long long timestamp, bool has_msec = false);
|
||||
|
||||
//创建文件夹
|
||||
std::string create_dir_name(std::string root, std::string name);
|
||||
std::string create_dir_date_name_time(std::string root, std::string name);
|
||||
|
@ -99,13 +102,13 @@ namespace ai_matrix
|
|||
//创建文件夹路径
|
||||
bool CreateDirPath(std::string strDirPath);
|
||||
|
||||
#ifdef ASCEND
|
||||
#ifdef ASCEND
|
||||
//拷贝Device数据到Host
|
||||
bool MemcpyDeviceToHost(std::shared_ptr<void> *pHostData, const void *pDeviceBuffer, uint32_t iBufferSize);
|
||||
|
||||
//拷贝Host数据到Device
|
||||
bool MemcpyHostToDevice(std::shared_ptr<void> *pDeviceData, const void *pHostBuffer, uint32_t iBufferSize, bool bDvppFlag = true);
|
||||
#endif
|
||||
#endif
|
||||
|
||||
//获取指定毫秒数的对应的日期时间
|
||||
std::string GetDateTimeByMilliSeconds(uint64_t i64MilliSeconds, bool bFormatFlag = false);
|
||||
|
|
|
@ -113,6 +113,8 @@ model:
|
|||
nms_threshold: 0.3
|
||||
|
||||
gc_http_open: 1
|
||||
username: "guest_01"
|
||||
password: "d55b0f642e817eea24725d2f2a31dd08" # 神东
|
||||
gc_http_url: "http://192.168.2.211:20004/api/train-carriage/identification/video-save"
|
||||
gc_gettoken_url: "http://192.168.2.211:20004/api/blade-auth/oauth/token"
|
||||
gc_image_srv: "http://192.168.2.211:9010/"
|
||||
|
|
|
@ -190,14 +190,14 @@ void DataDealEngine::MakeProcessData()
|
|||
iFrameId = iReRunFrameId;
|
||||
}
|
||||
|
||||
// LogInfo << "sourceid:" << iSourceId << " MakeProcessData origtime:" << moveData_.strTrainName << " iOrigFrameId:" << iOrigFrameId
|
||||
// << " time:" << strTrainName_ << " iFrameId:" << iFrameId << " bIsEndFlag:" << bIsEndFlag;
|
||||
LogInfo << "sourceid:" << iSourceId << " MakeProcessData origtime:" << moveData_.strTrainName << " iOrigFrameId:" << iOrigFrameId
|
||||
<< " time:" << strTrainName_ << " iFrameId:" << iFrameId << " bIsEndFlag:" << bIsEndFlag;
|
||||
|
||||
std::string strImgName = strDataDir_ + szCameraNo + std::to_string(iOrigFrameId);
|
||||
strImgName += (iter->second.iRotate != 0) ? "_rotate.jpg" : ".jpg";
|
||||
std::string strFileName = strDataDir_ + szCameraNo + std::to_string(iOrigFrameId) + ".txt";
|
||||
|
||||
//摄像头读取失败后重试2000次。
|
||||
//摄像头读取失败后重试30次。
|
||||
Json::Value jvFrameInfo;
|
||||
RawData rawData;
|
||||
bool bRet = false;
|
||||
|
@ -291,14 +291,25 @@ APP_ERROR DataDealEngine::Process()
|
|||
//获取主摄像头检测的状态
|
||||
std::shared_ptr<void> pVoidData0 = nullptr;
|
||||
iRet = inputQueMap_[strPort0_]->pop(pVoidData0);
|
||||
|
||||
if (nullptr != pVoidData0)
|
||||
{
|
||||
std::shared_ptr<MoveData> pMoveData = std::static_pointer_cast<MoveData>(pVoidData0);
|
||||
|
||||
// queuwMoveData_.push(*pMoveData);
|
||||
moveData_ = *pMoveData;
|
||||
LogDebug << "traindate:" << moveData_.strTrainDate << " trainname:" << moveData_.strTrainName
|
||||
<< " MoveData frameid:" << moveData_.iFrameId << " IsEnd:" << moveData_.bIsEnd;
|
||||
}
|
||||
|
||||
// LogDebug << "【帧号】" << (iDataNO_ * dataSourceConfig_.iSkipInterval);
|
||||
// if (queuwMoveData_.size() > 0 && (iDataNO_ * dataSourceConfig_.iSkipInterval) >= queuwMoveData_.front().iFrameId)
|
||||
// {
|
||||
// moveData_ = queuwMoveData_.front();
|
||||
// queuwMoveData_.pop();
|
||||
// LogDebug << "!!!--- moveDate 更新";
|
||||
// }
|
||||
|
||||
if (!moveData_.bHasTrain)
|
||||
{
|
||||
usleep(1000); //1ms
|
||||
|
@ -308,7 +319,7 @@ APP_ERROR DataDealEngine::Process()
|
|||
//第一个数据,休眠1s,等待图片存入本地
|
||||
if (iOrigDataNO_ == 1)
|
||||
{
|
||||
usleep(1000000); //1s
|
||||
usleep(1000 * 1000); //1s
|
||||
}
|
||||
|
||||
if (strDataDir_.empty())
|
||||
|
|
|
@ -8,6 +8,8 @@ ResultToHttpSrvEngine::~ResultToHttpSrvEngine() {}
|
|||
APP_ERROR ResultToHttpSrvEngine::Init()
|
||||
{
|
||||
strPort0_ = engineName_ + "_" + std::to_string(engineId_) + "_0";
|
||||
strUsername_ = MyYaml::GetIns()->GetStringValue("username");
|
||||
strPassword_ = MyYaml::GetIns()->GetStringValue("password");
|
||||
strURL_ = MyYaml::GetIns()->GetStringValue("gc_http_url");
|
||||
strGetTokenURL_ = MyYaml::GetIns()->GetStringValue("gc_gettoken_url");
|
||||
strImageSrv_ = MyYaml::GetIns()->GetPathValue("gc_image_srv");
|
||||
|
@ -76,10 +78,10 @@ bool ResultToHttpSrvEngine::GetToken(std::string &strBladeAuth)
|
|||
curl_mime *pMultipart = curl_mime_init(pCurl_);
|
||||
curl_mimepart *pPart = curl_mime_addpart(pMultipart);
|
||||
curl_mime_name(pPart, "username");
|
||||
curl_mime_data(pPart, "guest_01", CURL_ZERO_TERMINATED);
|
||||
curl_mime_data(pPart, strUsername_.c_str(), CURL_ZERO_TERMINATED);
|
||||
pPart = curl_mime_addpart(pMultipart);
|
||||
curl_mime_name(pPart, "password");
|
||||
curl_mime_data(pPart, "d55b0f642e817eea24725d2f2a31dd08", CURL_ZERO_TERMINATED);
|
||||
curl_mime_data(pPart, strPassword_.c_str(), CURL_ZERO_TERMINATED);
|
||||
pPart = curl_mime_addpart(pMultipart);
|
||||
curl_mime_name(pPart, "tenantId");
|
||||
curl_mime_data(pPart, "000000", CURL_ZERO_TERMINATED);
|
||||
|
@ -426,7 +428,8 @@ APP_ERROR ResultToHttpSrvEngine::Process()
|
|||
jvRequest["isTheLast"] = pTrain->bIsEnd ? 1 : 0; // 是否最后一节: 0:否,1:是
|
||||
jvRequest["startFrame"] = pTrain->iStartFrameId; //车厢开始帧
|
||||
jvRequest["endFrame"] = pTrain->iEndFrameId; //车厢结束帧
|
||||
jvRequest["skipFrame"] = dataSourceConfig.iSkipInterval; //跳帧
|
||||
jvRequest["skipFrame"] = dataSourceConfig.iSkipInterval;
|
||||
jvRequest["collectTime"] = MyUtils::getins()->Stamp2Time(pTrain->i64EndTimeStamp, true);//车厢切分的时间 //跳帧
|
||||
if (!ResultToHttpSrv(jvRequest))
|
||||
{
|
||||
// SaveHttpFailInfo(jvRequest, strFailSavePath_);
|
||||
|
|
|
@ -40,6 +40,8 @@ private:
|
|||
bool SaveHttpFailInfo(Json::Value &jvRequest, std::string &strFilePath);
|
||||
|
||||
std::string strPort0_;
|
||||
std::string strUsername_;
|
||||
std::string strPassword_;
|
||||
std::string strURL_;
|
||||
std::string strGetTokenURL_;
|
||||
std::string strImageSrv_;
|
||||
|
|
|
@ -30,6 +30,7 @@ APP_ERROR FilterTrainStepOneEngine::Init()
|
|||
iChkStopCount_ = MyYaml::GetIns()->GetIntValue("gc_chkstop_count");
|
||||
iPartitionFrameNum_ = MyYaml::GetIns()->GetIntValue("partition_frame_span");
|
||||
iPlitFrameSpanPX_ = MyYaml::GetIns()->GetIntValue("gc_split_frame_span_px");
|
||||
iPushDirection_ = MyYaml::GetIns()->GetIntValue("gc_push_direction");
|
||||
|
||||
//获取主摄像头信息
|
||||
mainCfg_ = MyYaml::GetIns()->GetDataSourceConfigById(0);
|
||||
|
@ -205,6 +206,13 @@ void FilterTrainStepOneEngine::AddBackInfo(std::shared_ptr<ProcessData> pProcess
|
|||
{
|
||||
return;
|
||||
}
|
||||
if (iDirection_ == DIRECTION_RIGHT
|
||||
&& trainBackInfo.strAllClassType == "SPACE"
|
||||
&& (trainBackInfoTop.strAllClassType == "NUMSPACE" || trainBackInfoTop.strAllClassType == "SPACENUM"))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
if (iDirection_ == DIRECTION_LEFT
|
||||
&& trainBackInfo.strAllClassType == "SPACE"
|
||||
&& (trainBackInfoTop.strAllClassType == "NUMSPACE" || trainBackInfoTop.strAllClassType == "SPACENUM"))
|
||||
|
@ -262,20 +270,34 @@ bool FilterTrainStepOneEngine::IsEndDealBackInfo(std::shared_ptr<ProcessData> pP
|
|||
|
||||
for (size_t i = 0; i < pPostDataBack->vecPostSubData.size(); i++)
|
||||
{
|
||||
bool bFlag = (pPostDataBack->vecPostSubData[i].step1Location.fLTX <= pPostData->vecPostSubData[i].step1Location.fLTX);
|
||||
int bFlag = -1;
|
||||
for (size_t j = 0; j < pPostData->vecPostSubData.size(); j++)
|
||||
{
|
||||
if (pPostDataBack->vecPostSubData[i].iBigClassId == pPostData->vecPostSubData[j].iBigClassId)
|
||||
{
|
||||
if (pPostData->vecPostSubData[j].step1Location.fLTX < 1 || pPostDataBack->vecPostSubData[i].step1Location.fLTX < 1)
|
||||
{
|
||||
LogDebug << "大框X坐标小于1,判定为异常大框。过滤!!";
|
||||
break;
|
||||
}
|
||||
bFlag = (pPostDataBack->vecPostSubData[i].step1Location.fLTX <= pPostData->vecPostSubData[j].step1Location.fLTX) ? 1 : 0;
|
||||
LogDebug << "帧:" << pProcessData->iFrameId << " 倒车前帧:" << pPostDataBack->iFrameId << " 恢复到原位:" << bFlag
|
||||
<< " 当前框位置:" << pPostData->vecPostSubData[i].step1Location.fLTX
|
||||
<< " 倒车前位置:" << pPostDataBack->vecPostSubData[i].step1Location.fLTX;
|
||||
if ((iDirection_ == DIRECTION_LEFT && !bFlag) ||
|
||||
(iDirection_ == DIRECTION_RIGHT && bFlag))
|
||||
<< " 倒车前位置:" << pPostDataBack->vecPostSubData[i].step1Location.fLTX << "方向:" << iDirection_;
|
||||
}
|
||||
}
|
||||
|
||||
if ((iDirection_ == DIRECTION_LEFT && bFlag == 0) ||
|
||||
(iDirection_ == DIRECTION_RIGHT && bFlag == 1))
|
||||
{
|
||||
bPopFlag = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (bPopFlag)
|
||||
{
|
||||
LogDebug << "frameId:" << pProcessData->iFrameId << " last one bPopFlag:" << bPopFlag;
|
||||
LogDebug << "frameId:" << pProcessData->iFrameId << " 恢复倒车前的位置:" << bPopFlag;
|
||||
stackBackInfo_.pop();
|
||||
}
|
||||
}
|
||||
|
@ -767,7 +789,8 @@ void FilterTrainStepOneEngine::CalculateDirection(std::shared_ptr<ProcessData> p
|
|||
}
|
||||
|
||||
void FilterTrainStepOneEngine::sendComeTrain(const std::string strTrainDate, const std::string strTrainName, const int iDirection) {
|
||||
std::string message = "{\"cometime\":\"" + strTrainDate + " " + strTrainName + "\",\"type\":\"1\",\"direction\":\"" + to_string(iDirection) + "\"}";
|
||||
std::string message = "{\"cometime\":\"" + strTrainDate + " " + strTrainName + "\",\"type\":\"1\",\"direction\":" + to_string(iDirection == iPushDirection_ ? 1:-1) + "}";
|
||||
LogWarn << message;
|
||||
outputQueMap_[engineName_ + "_" + std::to_string(engineId_) + "_1"]->push(std::static_pointer_cast<void>(std::make_shared<std::string>(message)));
|
||||
}
|
||||
|
||||
|
@ -915,6 +938,7 @@ void FilterTrainStepOneEngine::DealProcessDataPre(std::shared_ptr<ProcessData> p
|
|||
{
|
||||
//CalculateDirection(iterProcessData->second);
|
||||
CalculateDirectionNew(iterProcessData->second);
|
||||
if (iDirection_ != DIRECTION_UNKNOWN) this->sendComeTrain(pProcessData->strTrainDate, pProcessData->strTrainName, iDirection_);
|
||||
}
|
||||
|
||||
if (iDirection_ != DIRECTION_UNKNOWN)
|
||||
|
|
|
@ -58,6 +58,7 @@ private:
|
|||
int iChkStopPX_;
|
||||
int iChkStopCount_;
|
||||
int iDirection_; //方向
|
||||
int iPushDirection_; //需要识别的方向
|
||||
int rightFirst_; // 向右行驶的在前大框类型
|
||||
int leftFirst_; // 向左行驶的在前大框类型
|
||||
int iPartitionFrameNum_; //满足跨车厢的帧间隔
|
||||
|
|
|
@ -83,7 +83,10 @@ bool SaveCsvEngine::SaveMergerCsv(std::shared_ptr<Train> pTrain)
|
|||
<< "inspection" << ','
|
||||
<< "inspectionImg" << ','
|
||||
<< "containerImg_1" << ','
|
||||
<< "containerImg_2" << std::endl;
|
||||
<< "containerImg_2" << ','
|
||||
<< "startTime" << ','
|
||||
<< "endTime"
|
||||
<< std::endl;
|
||||
}
|
||||
|
||||
std::string strTime = pTrain->strTrainName;
|
||||
|
@ -147,7 +150,10 @@ bool SaveCsvEngine::SaveMergerCsv(std::shared_ptr<Train> pTrain)
|
|||
<< pTrain->chkDate.strChkDate1DeadLine << ','
|
||||
<< szChkDateImgPath << ','
|
||||
<< szContainer1ImgPath << ','
|
||||
<< szContainer2ImgPath << std::endl;
|
||||
<< szContainer2ImgPath << ','
|
||||
<< MyUtils::getins()->Stamp2Time(pTrain->i64StartTimeStamp, true) << ','
|
||||
<< MyUtils::getins()->Stamp2Time(pTrain->i64EndTimeStamp, true)
|
||||
<< std::endl;
|
||||
|
||||
outFile.close();
|
||||
}
|
||||
|
|
|
@ -190,7 +190,7 @@ int HardH264FFmpegDecode::HardH264FFmpegDecoderConfigureVideoFilters(AVFilterGra
|
|||
pDecoderFilterIn = pFiltSrc;
|
||||
pDecoderFilterOut = pFiltDst;
|
||||
|
||||
fail:
|
||||
fail:
|
||||
return ret;
|
||||
}
|
||||
|
||||
|
@ -245,7 +245,7 @@ int HardH264FFmpegDecode::HardH264FFmpegDecoder(AVCodecContext *pDecCtx, AVFrame
|
|||
return 0;
|
||||
}
|
||||
|
||||
int HardH264FFmpegDecode::HardH264FFmpegDecoderV2(AVCodecContext *pDecCtx, AVFrame *pSrcFrame, AVFrame *pDstFrame, AVPacket *pPkt, void* pOutputData,unsigned int* puiOutputDataSize)
|
||||
int HardH264FFmpegDecode::HardH264FFmpegDecoderV2(AVCodecContext *pDecCtx, SwsContext *pSwsCtx, AVFrame *pSrcFrame, AVFrame *pDstFrame, AVPacket *pPkt, void* pOutputData,unsigned int* puiOutputDataSize)
|
||||
{
|
||||
int ret;
|
||||
|
||||
|
@ -269,29 +269,24 @@ int HardH264FFmpegDecode::HardH264FFmpegDecoderV2(AVCodecContext *pDecCtx, AVFra
|
|||
// pDecCtx->width = ALIGN_DOWN(pDecCtx->width, 32);
|
||||
// pDecCtx->height = ALIGN_DOWN(pDecCtx->height, 32);
|
||||
|
||||
// sws_scale(pSwsCtx,
|
||||
// (const uint8_t *const *)pSrcFrame->data,
|
||||
// pSrcFrame->linesize,
|
||||
// 0,
|
||||
// pDecCtx->height,
|
||||
// pDstFrame->data,
|
||||
// pDstFrame->linesize);
|
||||
sws_scale(pSwsCtx,
|
||||
(const uint8_t *const *)pSrcFrame->data,
|
||||
pSrcFrame->linesize,
|
||||
0,
|
||||
pDecCtx->height,
|
||||
pDstFrame->data,
|
||||
pDstFrame->linesize);
|
||||
|
||||
//printf("saving frame %3d\n", pDecCtx->frame_number);
|
||||
// fflush(stdout);
|
||||
fflush(stdout);
|
||||
|
||||
// int iSize = pDecCtx->width * pDecCtx->height;
|
||||
//
|
||||
// memcpy(pOutputData, pDstFrame->data[0], iSize); //Y
|
||||
// memcpy(pOutputData+iSize, pDstFrame->data[1], iSize/4); //U
|
||||
// memcpy(pOutputData+iSize+iSize/4, pDstFrame->data[2], iSize/4); //V
|
||||
// *puiOutputDataSize = iSize*3/2;
|
||||
// return iSize*3/2;
|
||||
memcpy(pOutputData, pSrcFrame->data[0], pSrcFrame->width * pSrcFrame->height); // Y
|
||||
memcpy(pOutputData + pSrcFrame->width * pSrcFrame->height, pSrcFrame->data[1], pSrcFrame->width * pSrcFrame->height / 4); // U
|
||||
memcpy(pOutputData + pSrcFrame->width * pSrcFrame->height + pSrcFrame->width * pSrcFrame->height / 4, pSrcFrame->data[2], pSrcFrame->width * pSrcFrame->height / 4); // V
|
||||
*puiOutputDataSize = pSrcFrame->width * pSrcFrame->height * 3 / 2;
|
||||
return pSrcFrame->width * pSrcFrame->height * 3 / 2;
|
||||
int iSize = pDecCtx->width * pDecCtx->height;
|
||||
|
||||
memcpy(pOutputData, pDstFrame->data[0], iSize); //Y
|
||||
memcpy(pOutputData+iSize, pDstFrame->data[1], iSize/4); //U
|
||||
memcpy(pOutputData+iSize+iSize/4, pDstFrame->data[2], iSize/4); //V
|
||||
*puiOutputDataSize = iSize*3/2;
|
||||
return iSize*3/2;
|
||||
}
|
||||
return 0;
|
||||
}
|
|
@ -68,7 +68,7 @@ public:
|
|||
int HardH264FFmpegDecoderInit(unsigned int uiWidth, unsigned int uiHeight, unsigned int uiFrameRate = 30);
|
||||
int HardH264FFmpegDecoderDeInit();
|
||||
int HardH264FFmpegDecoder(AVCodecContext *pDecCtx, AVFrame *pFrame, AVPacket *pPkt, void* pOutputData, unsigned int* puiOutputDataSize);
|
||||
int HardH264FFmpegDecoderV2(AVCodecContext *pDecCtx, AVFrame *pSrcFrame, AVFrame *pDstFrame, AVPacket *pPkt, void* pOutputData, unsigned int* puiOutputDataSize);
|
||||
int HardH264FFmpegDecoderV2(AVCodecContext *pDecCtx, SwsContext *pSwsCtx, AVFrame *pSrcFrame, AVFrame *pDstFrame, AVPacket *pPkt, void* pOutputData, unsigned int* puiOutputDataSize);
|
||||
|
||||
const AVCodec *pCodec_ = nullptr; //解码器
|
||||
AVCodecContext *pCodecCtx_ = nullptr; //上下文
|
||||
|
|
|
@ -67,19 +67,19 @@ APP_ERROR VideoDecodeEngine::Process()
|
|||
{
|
||||
usleep(10*1000); //10ms
|
||||
|
||||
// iNoCameraDataCnt++;
|
||||
// if (iNoCameraDataCnt >= 1000) //10秒内收不到,认为相机断开
|
||||
// {
|
||||
// LogError << "engineId:" << engineId_ << " 超过10秒获取到摄像头数据,疑似摄像头断开。计数:" << iNoCameraDataCnt;
|
||||
// iNoCameraDataCnt = 0;
|
||||
// //camera异常时,构造空的解码数据push,确保一直有数据流转到后面Engine
|
||||
// std::shared_ptr<ProcessData> pProcessData = std::make_shared<ProcessData>();
|
||||
// pProcessData->iDataSource = engineId_;
|
||||
// pProcessData->i64TimeStamp = MyUtils::getins()->GetCurrentTimeMillis();
|
||||
// pProcessData->iSize = 0;
|
||||
// pProcessData->pData = nullptr;
|
||||
// iRet = outputQueMap_[strPort0_]->push(std::static_pointer_cast<void>(pProcessData));
|
||||
// }
|
||||
// iNoCameraDataCnt++;
|
||||
// if (iNoCameraDataCnt >= 1000) //10秒内收不到,认为相机断开
|
||||
// {
|
||||
// LogError << "engineId:" << engineId_ << " 超过10秒获取到摄像头数据,疑似摄像头断开。计数:" << iNoCameraDataCnt;
|
||||
// iNoCameraDataCnt = 0;
|
||||
// //camera异常时,构造空的解码数据push,确保一直有数据流转到后面Engine
|
||||
// std::shared_ptr<ProcessData> pProcessData = std::make_shared<ProcessData>();
|
||||
// pProcessData->iDataSource = engineId_;
|
||||
// pProcessData->i64TimeStamp = MyUtils::getins()->GetCurrentTimeMillis();
|
||||
// pProcessData->iSize = 0;
|
||||
// pProcessData->pData = nullptr;
|
||||
// iRet = outputQueMap_[strPort0_]->push(std::static_pointer_cast<void>(pProcessData));
|
||||
// }
|
||||
|
||||
continue;
|
||||
}
|
||||
|
@ -119,6 +119,7 @@ APP_ERROR VideoDecodeEngine::Process()
|
|||
// hard_h264_ffmpeg_decoder_->pPacket_, pYUV420MBuffer, &pYUV420MBuffer_Size);
|
||||
|
||||
int iDecodeRet = hard_h264_ffmpeg_decoder_->HardH264FFmpegDecoderV2(hard_h264_ffmpeg_decoder_->pCodecCtx_,
|
||||
hard_h264_ffmpeg_decoder_->pSwsContext_,
|
||||
hard_h264_ffmpeg_decoder_->pSrcFrame_,
|
||||
hard_h264_ffmpeg_decoder_->pDstFrame_,
|
||||
hard_h264_ffmpeg_decoder_->pPacket_,
|
||||
|
|
|
@ -14,7 +14,7 @@ APP_ERROR TrainStepOneEngine::Init()
|
|||
bUseEngine_ = MyUtils::getins()->ChkIsHaveTarget("NUM");
|
||||
if (!bUseEngine_)
|
||||
{
|
||||
LogWarn << "engineId_:" << engineId_ << " not use engine";
|
||||
LogInfo << "engineId_:" << engineId_ << " not use engine";
|
||||
return APP_ERR_OK;
|
||||
}
|
||||
|
||||
|
@ -98,7 +98,7 @@ APP_ERROR TrainStepOneEngine::InitModel()
|
|||
int nRet = yolov5model.YoloV5ClearityInferenceInit(&modelinfo, strModelName, modelConfig_.strOmPath);
|
||||
if (nRet != 0)
|
||||
{
|
||||
LogInfo << "YoloV5ClassifyInferenceInit nRet:" << nRet;
|
||||
LogError << "YoloV5ClassifyInferenceInit nRet:" << nRet;
|
||||
return APP_ERR_COMM_READ_FAIL;
|
||||
}
|
||||
return APP_ERR_OK;
|
||||
|
@ -147,7 +147,7 @@ APP_ERROR TrainStepOneEngine::DeInit()
|
|||
{
|
||||
if (!bUseEngine_)
|
||||
{
|
||||
LogWarn << "engineId_:" << engineId_ << " not use engine";
|
||||
LogInfo << "engineId_:" << engineId_ << " not use engine";
|
||||
return APP_ERR_OK;
|
||||
}
|
||||
|
||||
|
@ -170,7 +170,7 @@ void TrainStepOneEngine::PushData(const std::string &strPort, const std::shared_
|
|||
int iRet = outputQueMap_[strPort]->push(std::static_pointer_cast<void>(pProcessData));
|
||||
if (iRet != 0)
|
||||
{
|
||||
LogDebug << "sourceid:" << pProcessData->iDataSource << " frameid:" << pProcessData->iFrameId << " push fail iRet:" << iRet;
|
||||
LogError << " frameid:" << pProcessData->iFrameId << " push fail iRet:" << iRet;
|
||||
if (iRet == 2)
|
||||
{
|
||||
usleep(10000); // 10ms
|
||||
|
@ -219,7 +219,7 @@ void TrainStepOneEngine::FilterInvalidInfo(std::vector<stDetection> &vecRet, std
|
|||
continue;
|
||||
}
|
||||
|
||||
// 去除车头车尾的间隔信息
|
||||
// 去除车头时的非车头编号信息
|
||||
if(pProcessData->nMonitorState == MONITOR_MODEL_TRAIN_HEAD )
|
||||
{
|
||||
if(it->class_id != TRAIN_HEAD)
|
||||
|
@ -230,6 +230,20 @@ void TrainStepOneEngine::FilterInvalidInfo(std::vector<stDetection> &vecRet, std
|
|||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
// 去除车尾的车头编号信息
|
||||
if (pProcessData->nMonitorState != MONITOR_MODEL_TRAIN_HEAD)
|
||||
{
|
||||
if (it->class_id == TRAIN_HEAD)
|
||||
{
|
||||
LogDebug << " 帧号:" << pProcessData->iFrameId
|
||||
<< " 大类:" << it->class_id << " 识别于非车头位置,无效!";
|
||||
it = vecRet.erase(it);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
// 去除车尾的间隔信息
|
||||
if (pProcessData->nMonitorState == MONITOR_MODEL_TRAIN_TAIL
|
||||
&& ((it->class_id >= 9 && it->class_id <= 17 && it->class_id != 15) || it->class_id == 18))
|
||||
{
|
||||
|
@ -239,6 +253,17 @@ void TrainStepOneEngine::FilterInvalidInfo(std::vector<stDetection> &vecRet, std
|
|||
it = vecRet.erase(it);
|
||||
continue;
|
||||
}
|
||||
|
||||
// 过滤掉识别于模型反馈无车状态下的所有大框信息
|
||||
if (pProcessData->nMonitorState == MONITOR_MODEL_NO_TRAIN)
|
||||
{
|
||||
LogDebug << " frameId:" << pProcessData->iFrameId
|
||||
<< " bigclassid:" << it->class_id
|
||||
<<" 识别于模型反馈的无车状态下,无效!";
|
||||
it = vecRet.erase(it);
|
||||
continue;
|
||||
}
|
||||
|
||||
// 按大框高度剔除远股道识别的信息
|
||||
int iClassHeight = it->bbox[3] - it->bbox[1];
|
||||
if (dataSourceCfg.mapClassMinH.find(it->class_id) != dataSourceCfg.mapClassMinH.end() &&
|
||||
|
@ -388,7 +413,7 @@ APP_ERROR TrainStepOneEngine::Process()
|
|||
{
|
||||
if (!bUseEngine_)
|
||||
{
|
||||
LogWarn << "engineId_:" << engineId_ << " not use engine";
|
||||
LogInfo << "engineId_:" << engineId_ << " not use engine";
|
||||
return APP_ERR_OK;
|
||||
}
|
||||
int iRet = APP_ERR_OK;
|
||||
|
@ -418,10 +443,7 @@ APP_ERROR TrainStepOneEngine::Process()
|
|||
|
||||
//进行推理
|
||||
std::vector<stDetection> res;
|
||||
//auto start = std::chrono::system_clock::now(); //计时开始
|
||||
yolov5model.YoloV5ClearityInferenceModel(img, res);
|
||||
//auto end = std::chrono::system_clock::now();
|
||||
//LogInfo << "nopr1 inference time: " << std::chrono::duration_cast<std::chrono::milliseconds>(end - start).count() << "ms";
|
||||
|
||||
//过滤无效信息
|
||||
FilterInvalidInfo(res, pProcessData);
|
||||
|
|
Loading…
Reference in New Issue