优化倒车后的恢复判断。

增加上传接口的用户名密码配置。
This commit is contained in:
Mr.V 2024-05-22 17:25:51 +08:00
parent 665e6b62a7
commit 2e67e97508
14 changed files with 324 additions and 235 deletions

View File

@ -41,8 +41,10 @@ set(SYS_USR_INCLUDE_DIR "/usr/include")
set(SYS_USR_LIB_DIR "/usr/lib") set(SYS_USR_LIB_DIR "/usr/lib")
set(SYS_USR_LOCAL_INCLUDE_DIR "/usr/local/include") set(SYS_USR_LOCAL_INCLUDE_DIR "/usr/local/include")
set(SYS_USR_LOCAL_LIB_DIR "/usr/local/lib") set(SYS_USR_LOCAL_LIB_DIR "/usr/local/lib")
# -- X86使 --
set(AARCH64_LINUX_INCLUDE_DIR "/usr/include/x86_64-linux-gnu") set(AARCH64_LINUX_INCLUDE_DIR "/usr/include/x86_64-linux-gnu")
set(AARCH64_LINUX_LIB_DIR "/usr/lib/x86_64-linux-gnu") set(AARCH64_LINUX_LIB_DIR "/usr/lib/x86_64-linux-gnu")
# -- ARM使 --
#set(AARCH64_LINUX_INCLUDE_DIR "/usr/include/aarch64-linux-gnu") #set(AARCH64_LINUX_INCLUDE_DIR "/usr/include/aarch64-linux-gnu")
#set(AARCH64_LINUX_LIB_DIR "/usr/lib/aarch64-linux-gnu") #set(AARCH64_LINUX_LIB_DIR "/usr/lib/aarch64-linux-gnu")

View File

@ -129,37 +129,37 @@ namespace ai_matrix
return std::string(tmp); return std::string(tmp);
} }
std::string MyUtils::get_date() std::string MyUtils::get_date()
{ {
time_t timep = time(NULL); time_t timep = time(NULL);
struct tm *p = localtime(&timep); struct tm *p = localtime(&timep);
struct timeval tv; struct timeval tv;
gettimeofday(&tv, NULL); gettimeofday(&tv, NULL);
int msec = tv.tv_usec / 1000; int msec = tv.tv_usec / 1000;
char tmp[12] = { 0 }; char tmp[12] = { 0 };
sprintf(tmp, "%04d-%02d-%02d", 1900 + p->tm_year, 1 + p->tm_mon, p->tm_mday); sprintf(tmp, "%04d-%02d-%02d", 1900 + p->tm_year, 1 + p->tm_mon, p->tm_mday);
return std::string(tmp); return std::string(tmp);
} }
std::string MyUtils::get_time() std::string MyUtils::get_time()
{ {
time_t timep = time(NULL); time_t timep = time(NULL);
struct tm *p = localtime(&timep); struct tm *p = localtime(&timep);
struct timeval tv; struct timeval tv;
gettimeofday(&tv, NULL); gettimeofday(&tv, NULL);
int msec = tv.tv_usec / 1000; int msec = tv.tv_usec / 1000;
char tmp[10] = { 0 }; char tmp[10] = { 0 };
sprintf(tmp, "%02d-%02d-%02d", p->tm_hour, p->tm_min, p->tm_sec); sprintf(tmp, "%02d-%02d-%02d", p->tm_hour, p->tm_min, p->tm_sec);
return std::string(tmp); return std::string(tmp);
} }
std::string MyUtils::get_timestamp_log() std::string MyUtils::get_timestamp_log()
{ {
@ -310,12 +310,12 @@ namespace ai_matrix
return buffer.str(); return buffer.str();
} }
/** /**
* *
* @param filePath * @param filePath
* @param savePath * @param savePath
* @return * @return
*/ */
bool MyUtils::copyFile(std::string filePath, std::string savePath) bool MyUtils::copyFile(std::string filePath, std::string savePath)
{ {
FILE *fp, *sp; FILE *fp, *sp;
@ -338,14 +338,14 @@ namespace ai_matrix
} }
std::string& MyUtils::replace_all_distinct(std::string &str, const std::string &old_value, const std::string &new_value) std::string& MyUtils::replace_all_distinct(std::string &str, const std::string &old_value, const std::string &new_value)
{ {
for (std::string::size_type pos(0); pos != std::string::npos; pos += new_value.length()) { for (std::string::size_type pos(0); pos != std::string::npos; pos += new_value.length()) {
if ((pos = str.find(old_value, pos)) != std::string::npos) if ((pos = str.find(old_value, pos)) != std::string::npos)
str.replace(pos, old_value.length(), new_value); str.replace(pos, old_value.length(), new_value);
else break; else break;
} }
return str; return str;
} }
/** /**
* *
@ -386,6 +386,23 @@ namespace ai_matrix
return std::string(szTmp); return std::string(szTmp);
} }
//时间戳转化为时间 毫秒级
std::string MyUtils::Stamp2Time(long long timestamp, bool has_msec)
{
int ms = timestamp % 1000;//取毫秒
time_t tick = (time_t)(timestamp/1000);//转换时间
struct tm tm;
char s[40];
tm = *localtime(&tick);
strftime(s, sizeof(s), "%Y-%m-%d %H:%M:%S", &tm);
std::string str(s);
if (has_msec)
{
str = str+ "." + std::to_string(ms);
}
return str;
}
/** /**
* 1970 * 1970
* inParam : N/A * inParam : N/A
@ -507,7 +524,7 @@ namespace ai_matrix
return true; return true;
} }
#ifdef ASCEND #ifdef ASCEND
/** /**
* Device数据到Host * Device数据到Host
* inParam : void *pDeviceBuffer device内存地址 * inParam : void *pDeviceBuffer device内存地址
@ -589,7 +606,7 @@ namespace ai_matrix
} }
return true; return true;
} }
#endif #endif
/** /**
* *

View File

@ -51,10 +51,13 @@ namespace ai_matrix
//获取时间戳 //获取时间戳
std::string get_timestamp_file(); std::string get_timestamp_file();
std::string get_timestamp_log(); std::string get_timestamp_log();
//获取日期 //获取日期
std::string get_date(); std::string get_date();
//获取时间 //获取时间
std::string get_time(); std::string get_time();
//时间戳转化为时间 毫秒级
std::string Stamp2Time(long long timestamp, bool has_msec = false);
//创建文件夹 //创建文件夹
std::string create_dir_name(std::string root, std::string name); std::string create_dir_name(std::string root, std::string name);
@ -73,16 +76,16 @@ namespace ai_matrix
//bool 转 string //bool 转 string
std::string getStringFromBool(bool b); std::string getStringFromBool(bool b);
/** /**
* *
* @param filePath * @param filePath
* @param savePath * @param savePath
* @return * @return
*/ */
bool copyFile(std::string filePath, std::string savePath); bool copyFile(std::string filePath, std::string savePath);
//替换string中所有指定字符串 //替换string中所有指定字符串
std::string& replace_all_distinct(std::string &str, const std::string &old_value, const std::string &new_value); std::string& replace_all_distinct(std::string &str, const std::string &old_value, const std::string &new_value);
//获取北京当前日期 //获取北京当前日期
std::string GetDate(); std::string GetDate();
@ -99,13 +102,13 @@ namespace ai_matrix
//创建文件夹路径 //创建文件夹路径
bool CreateDirPath(std::string strDirPath); bool CreateDirPath(std::string strDirPath);
#ifdef ASCEND #ifdef ASCEND
//拷贝Device数据到Host //拷贝Device数据到Host
bool MemcpyDeviceToHost(std::shared_ptr<void> *pHostData, const void *pDeviceBuffer, uint32_t iBufferSize); bool MemcpyDeviceToHost(std::shared_ptr<void> *pHostData, const void *pDeviceBuffer, uint32_t iBufferSize);
//拷贝Host数据到Device //拷贝Host数据到Device
bool MemcpyHostToDevice(std::shared_ptr<void> *pDeviceData, const void *pHostBuffer, uint32_t iBufferSize, bool bDvppFlag = true); bool MemcpyHostToDevice(std::shared_ptr<void> *pDeviceData, const void *pHostBuffer, uint32_t iBufferSize, bool bDvppFlag = true);
#endif #endif
//获取指定毫秒数的对应的日期时间 //获取指定毫秒数的对应的日期时间
std::string GetDateTimeByMilliSeconds(uint64_t i64MilliSeconds, bool bFormatFlag = false); std::string GetDateTimeByMilliSeconds(uint64_t i64MilliSeconds, bool bFormatFlag = false);

View File

@ -113,6 +113,8 @@ model:
nms_threshold: 0.3 nms_threshold: 0.3
gc_http_open: 1 gc_http_open: 1
username: "guest_01"
password: "d55b0f642e817eea24725d2f2a31dd08" # 神东
gc_http_url: "http://192.168.2.211:20004/api/train-carriage/identification/video-save" gc_http_url: "http://192.168.2.211:20004/api/train-carriage/identification/video-save"
gc_gettoken_url: "http://192.168.2.211:20004/api/blade-auth/oauth/token" gc_gettoken_url: "http://192.168.2.211:20004/api/blade-auth/oauth/token"
gc_image_srv: "http://192.168.2.211:9010/" gc_image_srv: "http://192.168.2.211:9010/"

View File

@ -190,14 +190,14 @@ void DataDealEngine::MakeProcessData()
iFrameId = iReRunFrameId; iFrameId = iReRunFrameId;
} }
// LogInfo << "sourceid:" << iSourceId << " MakeProcessData origtime:" << moveData_.strTrainName << " iOrigFrameId:" << iOrigFrameId LogInfo << "sourceid:" << iSourceId << " MakeProcessData origtime:" << moveData_.strTrainName << " iOrigFrameId:" << iOrigFrameId
// << " time:" << strTrainName_ << " iFrameId:" << iFrameId << " bIsEndFlag:" << bIsEndFlag; << " time:" << strTrainName_ << " iFrameId:" << iFrameId << " bIsEndFlag:" << bIsEndFlag;
std::string strImgName = strDataDir_ + szCameraNo + std::to_string(iOrigFrameId); std::string strImgName = strDataDir_ + szCameraNo + std::to_string(iOrigFrameId);
strImgName += (iter->second.iRotate != 0) ? "_rotate.jpg" : ".jpg"; strImgName += (iter->second.iRotate != 0) ? "_rotate.jpg" : ".jpg";
std::string strFileName = strDataDir_ + szCameraNo + std::to_string(iOrigFrameId) + ".txt"; std::string strFileName = strDataDir_ + szCameraNo + std::to_string(iOrigFrameId) + ".txt";
//摄像头读取失败后重试2000次。 //摄像头读取失败后重试30次。
Json::Value jvFrameInfo; Json::Value jvFrameInfo;
RawData rawData; RawData rawData;
bool bRet = false; bool bRet = false;
@ -241,7 +241,7 @@ void DataDealEngine::MakeProcessData()
cv::Mat cvframe = cv::imread(pProcessData->strPicFilePath); cv::Mat cvframe = cv::imread(pProcessData->strPicFilePath);
int iBufferSize = pProcessData->iWidth * pProcessData->iHeight * 3; int iBufferSize = pProcessData->iWidth * pProcessData->iHeight * 3;
void* pBGRBufferobj = nullptr; void* pBGRBufferobj = nullptr;
pBGRBufferobj = new uint8_t[iBufferSize]; pBGRBufferobj = new uint8_t[iBufferSize];
memcpy(pBGRBufferobj, cvframe.data, iBufferSize); memcpy(pBGRBufferobj, cvframe.data, iBufferSize);
pProcessData->pData.reset(pBGRBufferobj, [](void* data){if(data) {delete[] data; data = nullptr;}}); pProcessData->pData.reset(pBGRBufferobj, [](void* data){if(data) {delete[] data; data = nullptr;}});
pProcessData->iSize = iBufferSize; pProcessData->iSize = iBufferSize;
@ -291,14 +291,25 @@ APP_ERROR DataDealEngine::Process()
//获取主摄像头检测的状态 //获取主摄像头检测的状态
std::shared_ptr<void> pVoidData0 = nullptr; std::shared_ptr<void> pVoidData0 = nullptr;
iRet = inputQueMap_[strPort0_]->pop(pVoidData0); iRet = inputQueMap_[strPort0_]->pop(pVoidData0);
if (nullptr != pVoidData0) if (nullptr != pVoidData0)
{ {
std::shared_ptr<MoveData> pMoveData = std::static_pointer_cast<MoveData>(pVoidData0); std::shared_ptr<MoveData> pMoveData = std::static_pointer_cast<MoveData>(pVoidData0);
// queuwMoveData_.push(*pMoveData);
moveData_ = *pMoveData; moveData_ = *pMoveData;
LogDebug << "traindate:" << moveData_.strTrainDate << " trainname:" << moveData_.strTrainName LogDebug << "traindate:" << moveData_.strTrainDate << " trainname:" << moveData_.strTrainName
<< " MoveData frameid:" << moveData_.iFrameId << " IsEnd:" << moveData_.bIsEnd; << " MoveData frameid:" << moveData_.iFrameId << " IsEnd:" << moveData_.bIsEnd;
} }
// LogDebug << "【帧号】" << (iDataNO_ * dataSourceConfig_.iSkipInterval);
// if (queuwMoveData_.size() > 0 && (iDataNO_ * dataSourceConfig_.iSkipInterval) >= queuwMoveData_.front().iFrameId)
// {
// moveData_ = queuwMoveData_.front();
// queuwMoveData_.pop();
// LogDebug << "!!!--- moveDate 更新";
// }
if (!moveData_.bHasTrain) if (!moveData_.bHasTrain)
{ {
usleep(1000); //1ms usleep(1000); //1ms
@ -308,7 +319,7 @@ APP_ERROR DataDealEngine::Process()
//第一个数据休眠1s等待图片存入本地 //第一个数据休眠1s等待图片存入本地
if (iOrigDataNO_ == 1) if (iOrigDataNO_ == 1)
{ {
usleep(1000000); //1s usleep(1000 * 1000); //1s
} }
if (strDataDir_.empty()) if (strDataDir_.empty())

View File

@ -8,6 +8,8 @@ ResultToHttpSrvEngine::~ResultToHttpSrvEngine() {}
APP_ERROR ResultToHttpSrvEngine::Init() APP_ERROR ResultToHttpSrvEngine::Init()
{ {
strPort0_ = engineName_ + "_" + std::to_string(engineId_) + "_0"; strPort0_ = engineName_ + "_" + std::to_string(engineId_) + "_0";
strUsername_ = MyYaml::GetIns()->GetStringValue("username");
strPassword_ = MyYaml::GetIns()->GetStringValue("password");
strURL_ = MyYaml::GetIns()->GetStringValue("gc_http_url"); strURL_ = MyYaml::GetIns()->GetStringValue("gc_http_url");
strGetTokenURL_ = MyYaml::GetIns()->GetStringValue("gc_gettoken_url"); strGetTokenURL_ = MyYaml::GetIns()->GetStringValue("gc_gettoken_url");
strImageSrv_ = MyYaml::GetIns()->GetPathValue("gc_image_srv"); strImageSrv_ = MyYaml::GetIns()->GetPathValue("gc_image_srv");
@ -76,11 +78,11 @@ bool ResultToHttpSrvEngine::GetToken(std::string &strBladeAuth)
curl_mime *pMultipart = curl_mime_init(pCurl_); curl_mime *pMultipart = curl_mime_init(pCurl_);
curl_mimepart *pPart = curl_mime_addpart(pMultipart); curl_mimepart *pPart = curl_mime_addpart(pMultipart);
curl_mime_name(pPart, "username"); curl_mime_name(pPart, "username");
curl_mime_data(pPart, "guest_01", CURL_ZERO_TERMINATED); curl_mime_data(pPart, strUsername_.c_str(), CURL_ZERO_TERMINATED);
pPart = curl_mime_addpart(pMultipart); pPart = curl_mime_addpart(pMultipart);
curl_mime_name(pPart, "password"); curl_mime_name(pPart, "password");
curl_mime_data(pPart, "d55b0f642e817eea24725d2f2a31dd08", CURL_ZERO_TERMINATED); curl_mime_data(pPart, strPassword_.c_str(), CURL_ZERO_TERMINATED);
pPart = curl_mime_addpart(pMultipart); pPart = curl_mime_addpart(pMultipart);
curl_mime_name(pPart, "tenantId"); curl_mime_name(pPart, "tenantId");
curl_mime_data(pPart, "000000", CURL_ZERO_TERMINATED); curl_mime_data(pPart, "000000", CURL_ZERO_TERMINATED);
pPart = curl_mime_addpart(pMultipart); pPart = curl_mime_addpart(pMultipart);
@ -426,7 +428,8 @@ APP_ERROR ResultToHttpSrvEngine::Process()
jvRequest["isTheLast"] = pTrain->bIsEnd ? 1 : 0; // 是否最后一节: 0:否,1:是 jvRequest["isTheLast"] = pTrain->bIsEnd ? 1 : 0; // 是否最后一节: 0:否,1:是
jvRequest["startFrame"] = pTrain->iStartFrameId; //车厢开始帧 jvRequest["startFrame"] = pTrain->iStartFrameId; //车厢开始帧
jvRequest["endFrame"] = pTrain->iEndFrameId; //车厢结束帧 jvRequest["endFrame"] = pTrain->iEndFrameId; //车厢结束帧
jvRequest["skipFrame"] = dataSourceConfig.iSkipInterval; //跳帧 jvRequest["skipFrame"] = dataSourceConfig.iSkipInterval;
jvRequest["collectTime"] = MyUtils::getins()->Stamp2Time(pTrain->i64EndTimeStamp, true);//车厢切分的时间 //跳帧
if (!ResultToHttpSrv(jvRequest)) if (!ResultToHttpSrv(jvRequest))
{ {
// SaveHttpFailInfo(jvRequest, strFailSavePath_); // SaveHttpFailInfo(jvRequest, strFailSavePath_);

View File

@ -40,6 +40,8 @@ private:
bool SaveHttpFailInfo(Json::Value &jvRequest, std::string &strFilePath); bool SaveHttpFailInfo(Json::Value &jvRequest, std::string &strFilePath);
std::string strPort0_; std::string strPort0_;
std::string strUsername_;
std::string strPassword_;
std::string strURL_; std::string strURL_;
std::string strGetTokenURL_; std::string strGetTokenURL_;
std::string strImageSrv_; std::string strImageSrv_;

View File

@ -30,6 +30,7 @@ APP_ERROR FilterTrainStepOneEngine::Init()
iChkStopCount_ = MyYaml::GetIns()->GetIntValue("gc_chkstop_count"); iChkStopCount_ = MyYaml::GetIns()->GetIntValue("gc_chkstop_count");
iPartitionFrameNum_ = MyYaml::GetIns()->GetIntValue("partition_frame_span"); iPartitionFrameNum_ = MyYaml::GetIns()->GetIntValue("partition_frame_span");
iPlitFrameSpanPX_ = MyYaml::GetIns()->GetIntValue("gc_split_frame_span_px"); iPlitFrameSpanPX_ = MyYaml::GetIns()->GetIntValue("gc_split_frame_span_px");
iPushDirection_ = MyYaml::GetIns()->GetIntValue("gc_push_direction");
//获取主摄像头信息 //获取主摄像头信息
mainCfg_ = MyYaml::GetIns()->GetDataSourceConfigById(0); mainCfg_ = MyYaml::GetIns()->GetDataSourceConfigById(0);
@ -205,6 +206,13 @@ void FilterTrainStepOneEngine::AddBackInfo(std::shared_ptr<ProcessData> pProcess
{ {
return; return;
} }
if (iDirection_ == DIRECTION_RIGHT
&& trainBackInfo.strAllClassType == "SPACE"
&& (trainBackInfoTop.strAllClassType == "NUMSPACE" || trainBackInfoTop.strAllClassType == "SPACENUM"))
{
return;
}
if (iDirection_ == DIRECTION_LEFT if (iDirection_ == DIRECTION_LEFT
&& trainBackInfo.strAllClassType == "SPACE" && trainBackInfo.strAllClassType == "SPACE"
&& (trainBackInfoTop.strAllClassType == "NUMSPACE" || trainBackInfoTop.strAllClassType == "SPACENUM")) && (trainBackInfoTop.strAllClassType == "NUMSPACE" || trainBackInfoTop.strAllClassType == "SPACENUM"))
@ -262,20 +270,34 @@ bool FilterTrainStepOneEngine::IsEndDealBackInfo(std::shared_ptr<ProcessData> pP
for (size_t i = 0; i < pPostDataBack->vecPostSubData.size(); i++) for (size_t i = 0; i < pPostDataBack->vecPostSubData.size(); i++)
{ {
bool bFlag = (pPostDataBack->vecPostSubData[i].step1Location.fLTX <= pPostData->vecPostSubData[i].step1Location.fLTX); int bFlag = -1;
LogDebug << "帧:" << pProcessData->iFrameId << " 倒车前帧:" << pPostDataBack->iFrameId << " 恢复到原位:" << bFlag for (size_t j = 0; j < pPostData->vecPostSubData.size(); j++)
<< " 当前框位置:" << pPostData->vecPostSubData[i].step1Location.fLTX {
<< " 倒车前位置:" << pPostDataBack->vecPostSubData[i].step1Location.fLTX; if (pPostDataBack->vecPostSubData[i].iBigClassId == pPostData->vecPostSubData[j].iBigClassId)
if ((iDirection_ == DIRECTION_LEFT && !bFlag) || {
(iDirection_ == DIRECTION_RIGHT && bFlag)) if (pPostData->vecPostSubData[j].step1Location.fLTX < 1 || pPostDataBack->vecPostSubData[i].step1Location.fLTX < 1)
{
LogDebug << "大框X坐标小于1判定为异常大框。过滤";
break;
}
bFlag = (pPostDataBack->vecPostSubData[i].step1Location.fLTX <= pPostData->vecPostSubData[j].step1Location.fLTX) ? 1 : 0;
LogDebug << "帧:" << pProcessData->iFrameId << " 倒车前帧:" << pPostDataBack->iFrameId << " 恢复到原位:" << bFlag
<< " 当前框位置:" << pPostData->vecPostSubData[i].step1Location.fLTX
<< " 倒车前位置:" << pPostDataBack->vecPostSubData[i].step1Location.fLTX << "方向:" << iDirection_;
}
}
if ((iDirection_ == DIRECTION_LEFT && bFlag == 0) ||
(iDirection_ == DIRECTION_RIGHT && bFlag == 1))
{ {
bPopFlag = true; bPopFlag = true;
break; break;
} }
} }
if (bPopFlag) if (bPopFlag)
{ {
LogDebug << "frameId:" << pProcessData->iFrameId << " last one bPopFlag:" << bPopFlag; LogDebug << "frameId:" << pProcessData->iFrameId << " 恢复倒车前的位置:" << bPopFlag;
stackBackInfo_.pop(); stackBackInfo_.pop();
} }
} }
@ -767,7 +789,8 @@ void FilterTrainStepOneEngine::CalculateDirection(std::shared_ptr<ProcessData> p
} }
void FilterTrainStepOneEngine::sendComeTrain(const std::string strTrainDate, const std::string strTrainName, const int iDirection) { void FilterTrainStepOneEngine::sendComeTrain(const std::string strTrainDate, const std::string strTrainName, const int iDirection) {
std::string message = "{\"cometime\":\"" + strTrainDate + " " + strTrainName + "\",\"type\":\"1\",\"direction\":\"" + to_string(iDirection) + "\"}"; std::string message = "{\"cometime\":\"" + strTrainDate + " " + strTrainName + "\",\"type\":\"1\",\"direction\":" + to_string(iDirection == iPushDirection_ ? 1:-1) + "}";
LogWarn << message;
outputQueMap_[engineName_ + "_" + std::to_string(engineId_) + "_1"]->push(std::static_pointer_cast<void>(std::make_shared<std::string>(message))); outputQueMap_[engineName_ + "_" + std::to_string(engineId_) + "_1"]->push(std::static_pointer_cast<void>(std::make_shared<std::string>(message)));
} }
@ -915,6 +938,7 @@ void FilterTrainStepOneEngine::DealProcessDataPre(std::shared_ptr<ProcessData> p
{ {
//CalculateDirection(iterProcessData->second); //CalculateDirection(iterProcessData->second);
CalculateDirectionNew(iterProcessData->second); CalculateDirectionNew(iterProcessData->second);
if (iDirection_ != DIRECTION_UNKNOWN) this->sendComeTrain(pProcessData->strTrainDate, pProcessData->strTrainName, iDirection_);
} }
if (iDirection_ != DIRECTION_UNKNOWN) if (iDirection_ != DIRECTION_UNKNOWN)

View File

@ -58,6 +58,7 @@ private:
int iChkStopPX_; int iChkStopPX_;
int iChkStopCount_; int iChkStopCount_;
int iDirection_; //方向 int iDirection_; //方向
int iPushDirection_; //需要识别的方向
int rightFirst_; // 向右行驶的在前大框类型 int rightFirst_; // 向右行驶的在前大框类型
int leftFirst_; // 向左行驶的在前大框类型 int leftFirst_; // 向左行驶的在前大框类型
int iPartitionFrameNum_; //满足跨车厢的帧间隔 int iPartitionFrameNum_; //满足跨车厢的帧间隔

View File

@ -83,7 +83,10 @@ bool SaveCsvEngine::SaveMergerCsv(std::shared_ptr<Train> pTrain)
<< "inspection" << ',' << "inspection" << ','
<< "inspectionImg" << ',' << "inspectionImg" << ','
<< "containerImg_1" << ',' << "containerImg_1" << ','
<< "containerImg_2" << std::endl; << "containerImg_2" << ','
<< "startTime" << ','
<< "endTime"
<< std::endl;
} }
std::string strTime = pTrain->strTrainName; std::string strTime = pTrain->strTrainName;
@ -147,7 +150,10 @@ bool SaveCsvEngine::SaveMergerCsv(std::shared_ptr<Train> pTrain)
<< pTrain->chkDate.strChkDate1DeadLine << ',' << pTrain->chkDate.strChkDate1DeadLine << ','
<< szChkDateImgPath << ',' << szChkDateImgPath << ','
<< szContainer1ImgPath << ',' << szContainer1ImgPath << ','
<< szContainer2ImgPath << std::endl; << szContainer2ImgPath << ','
<< MyUtils::getins()->Stamp2Time(pTrain->i64StartTimeStamp, true) << ','
<< MyUtils::getins()->Stamp2Time(pTrain->i64EndTimeStamp, true)
<< std::endl;
outFile.close(); outFile.close();
} }
@ -427,7 +433,7 @@ bool SaveCsvEngine::SaveContainerCsv(std::shared_ptr<TrainContainer> pTrainConta
catch (const std::exception &) catch (const std::exception &)
{ {
LogError << "strCsvPath:" << strCsvPath << " container savecsv fail!"; LogError << "strCsvPath:" << strCsvPath << " container savecsv fail!";
continue; continue;
} }
} }
return true; return true;

View File

@ -4,50 +4,50 @@ using namespace std;
HardH264FFmpegDecode::HardH264FFmpegDecode() HardH264FFmpegDecode::HardH264FFmpegDecode()
{ {
; ;
} }
HardH264FFmpegDecode::~HardH264FFmpegDecode() HardH264FFmpegDecode::~HardH264FFmpegDecode()
{ {
; ;
} }
int HardH264FFmpegDecode::HardH264FFmpegDecoderInit(unsigned int uiWidth, unsigned int uiHeight, unsigned int uiFrameRate) int HardH264FFmpegDecode::HardH264FFmpegDecoderInit(unsigned int uiWidth, unsigned int uiHeight, unsigned int uiFrameRate)
{ {
uiWidth_ = uiWidth; uiHeight_ = uiHeight; uiWidth_ = uiWidth; uiHeight_ = uiHeight;
uiFrameRate_ = uiFrameRate; uiFrameRate_ = uiFrameRate;
iFrameFinished_ = 0; iFrameFinished_ = 0;
av_log_set_level(AV_LOG_ERROR); av_log_set_level(AV_LOG_ERROR);
// AVCodecID codec_id = AV_CODEC_ID_H264; //解码H264 // AVCodecID codec_id = AV_CODEC_ID_H264; //解码H264
// pCodec_ = avcodec_find_decoder(codec_id); //获取解码器 // pCodec_ = avcodec_find_decoder(codec_id); //获取解码器
pCodec_ = avcodec_find_decoder_by_name(NVIDIA_H264_DECODER); pCodec_ = avcodec_find_decoder_by_name(NVIDIA_H264_DECODER);
if (!pCodec_) { if (!pCodec_) {
fprintf(stderr, "Codec '%s' not found\n", pCodec_->long_name); fprintf(stderr, "Codec '%s' not found\n", pCodec_->long_name);
exit(1); exit(1);
} }
printf("Codec found with name %d(%s)\n", pCodec_->id, pCodec_->long_name); printf("Codec found with name %d(%s)\n", pCodec_->id, pCodec_->long_name);
//创建上下文 //创建上下文
pCodecCtx_ = avcodec_alloc_context3(pCodec_); pCodecCtx_ = avcodec_alloc_context3(pCodec_);
if (!pCodecCtx_){ if (!pCodecCtx_){
fprintf(stderr, "Could not allocate video codec context\n"); fprintf(stderr, "Could not allocate video codec context\n");
exit(1); exit(1);
} }
//创建解析器 //创建解析器
pCodecParserCtx_ = av_parser_init(pCodec_->id); pCodecParserCtx_ = av_parser_init(pCodec_->id);
if (!pCodecParserCtx_){ if (!pCodecParserCtx_){
fprintf(stderr, "parser not found\n"); fprintf(stderr, "parser not found\n");
exit(1); exit(1);
} }
//if(pCodec_->capabilities&CODEC_CAP_TRUNCATED) //if(pCodec_->capabilities&CODEC_CAP_TRUNCATED)
// pCodecCtx_->flags|= CODEC_FLAG_TRUNCATED; // pCodecCtx_->flags|= CODEC_FLAG_TRUNCATED;
//打开解码器 //打开解码器
int ret = avcodec_open2(pCodecCtx_, pCodec_, nullptr); int ret = avcodec_open2(pCodecCtx_, pCodec_, nullptr);
if (ret < 0) { if (ret < 0) {
fprintf(stderr, "Could not open codec\n"); fprintf(stderr, "Could not open codec\n");
@ -63,7 +63,7 @@ int HardH264FFmpegDecode::HardH264FFmpegDecoderInit(unsigned int uiWidth, unsign
} }
// av_init_packet(pPacket_); // av_init_packet(pPacket_);
//分配frame //分配frame
pSrcFrame_ = av_frame_alloc(); pSrcFrame_ = av_frame_alloc();
if (!pSrcFrame_) { if (!pSrcFrame_) {
fprintf(stderr, "Could not allocate video src pFrame\n"); fprintf(stderr, "Could not allocate video src pFrame\n");
@ -78,14 +78,14 @@ int HardH264FFmpegDecode::HardH264FFmpegDecoderInit(unsigned int uiWidth, unsign
printf("after align down video_width: %d, video_height: %d\n", uiWidth_, uiHeight_); printf("after align down video_width: %d, video_height: %d\n", uiWidth_, uiHeight_);
//初始化解析器参数 //初始化解析器参数
pCodecCtx_->time_base.num = 1; pCodecCtx_->time_base.num = 1;
pCodecCtx_->frame_number = 1; //每包一个视频帧 pCodecCtx_->frame_number = 1; //每包一个视频帧
pCodecCtx_->codec_type = AVMEDIA_TYPE_VIDEO; pCodecCtx_->codec_type = AVMEDIA_TYPE_VIDEO;
pCodecCtx_->bit_rate = 0; pCodecCtx_->bit_rate = 0;
pCodecCtx_->time_base.den = uiFrameRate_;//帧率 pCodecCtx_->time_base.den = uiFrameRate_;//帧率
pCodecCtx_->width = uiWidth_; //视频宽 pCodecCtx_->width = uiWidth_; //视频宽
pCodecCtx_->height = uiHeight_; //视频高 pCodecCtx_->height = uiHeight_; //视频高
// pCodecCtx_->pix_fmt = AV_PIX_FMT_YUV420P; // pCodecCtx_->pix_fmt = AV_PIX_FMT_YUV420P;
int bufferSize = av_image_get_buffer_size(AV_PIX_FMT_YUV420P, int bufferSize = av_image_get_buffer_size(AV_PIX_FMT_YUV420P,
@ -102,46 +102,46 @@ int HardH264FFmpegDecode::HardH264FFmpegDecoderInit(unsigned int uiWidth, unsign
printf("pDstFrame_->linesize: %d, bufferSize: %d\n", pDstFrame_->linesize, bufferSize); printf("pDstFrame_->linesize: %d, bufferSize: %d\n", pDstFrame_->linesize, bufferSize);
pSwsContext_ = sws_getContext(pCodecCtx_->width, pCodecCtx_->height, pCodecCtx_->pix_fmt, pSwsContext_ = sws_getContext(pCodecCtx_->width, pCodecCtx_->height, pCodecCtx_->pix_fmt,
pCodecCtx_->width, pCodecCtx_->height, AV_PIX_FMT_YUV420P, SWS_BICUBIC, nullptr, nullptr, nullptr); pCodecCtx_->width, pCodecCtx_->height, AV_PIX_FMT_YUV420P, SWS_BICUBIC, nullptr, nullptr, nullptr);
printf("pCodecCtx_->width: %d, pCodecCtx_->height: %d, pCodecCtx_->pix_fmt: %d\n", pCodecCtx_->width, pCodecCtx_->height, pCodecCtx_->pix_fmt); printf("pCodecCtx_->width: %d, pCodecCtx_->height: %d, pCodecCtx_->pix_fmt: %d\n", pCodecCtx_->width, pCodecCtx_->height, pCodecCtx_->pix_fmt);
return 0; return 0;
} }
int HardH264FFmpegDecode::HardH264FFmpegDecoderDeInit() int HardH264FFmpegDecode::HardH264FFmpegDecoderDeInit()
{ {
if(pu8OutBuffer_){ if(pu8OutBuffer_){
av_free(pu8OutBuffer_); av_free(pu8OutBuffer_);
pu8OutBuffer_ = nullptr; pu8OutBuffer_ = nullptr;
} }
if(pSrcFrame_){ if(pSrcFrame_){
av_frame_free(&pSrcFrame_); av_frame_free(&pSrcFrame_);
pSrcFrame_ = nullptr; pSrcFrame_ = nullptr;
} }
if(pDstFrame_){ if(pDstFrame_){
av_frame_free(&pDstFrame_); av_frame_free(&pDstFrame_);
pDstFrame_ = nullptr; pDstFrame_ = nullptr;
} }
if(pPacket_){ if(pPacket_){
av_packet_free(&pPacket_); av_packet_free(&pPacket_);
pPacket_ = nullptr; pPacket_ = nullptr;
} }
if(pCodecParserCtx_){ if(pCodecParserCtx_){
av_parser_close(pCodecParserCtx_); av_parser_close(pCodecParserCtx_);
pCodecParserCtx_ = nullptr; pCodecParserCtx_ = nullptr;
} }
if(pCodecCtx_){ if(pCodecCtx_){
avcodec_close(pCodecCtx_); avcodec_close(pCodecCtx_);
av_free(pCodecCtx_); av_free(pCodecCtx_);
pCodecCtx_ = nullptr; pCodecCtx_ = nullptr;
} }
if(pSwsContext_){ if(pSwsContext_){
sws_freeContext(pSwsContext_); sws_freeContext(pSwsContext_);
pSwsContext_ = nullptr; pSwsContext_ = nullptr;
} }
} }
int HardH264FFmpegDecode::HardH264FFmpegDecoderFilterGraph(AVFilterGraph *pGraph, AVFilterContext *pSourceCtx, AVFilterContext *pSinkCtx) int HardH264FFmpegDecode::HardH264FFmpegDecoderFilterGraph(AVFilterGraph *pGraph, AVFilterContext *pSourceCtx, AVFilterContext *pSinkCtx)
@ -149,7 +149,7 @@ int HardH264FFmpegDecode::HardH264FFmpegDecoderFilterGraph(AVFilterGraph *pGraph
int ret; int ret;
AVFilterInOut *pOutputs = nullptr, *pInputs = nullptr; AVFilterInOut *pOutputs = nullptr, *pInputs = nullptr;
if ((ret = avfilter_link(pSourceCtx, 0, pSinkCtx, 0)) >= 0){ if ((ret = avfilter_link(pSourceCtx, 0, pSinkCtx, 0)) >= 0){
ret = avfilter_graph_config(pGraph, nullptr); ret = avfilter_graph_config(pGraph, nullptr);
} }
avfilter_inout_free(&pOutputs); avfilter_inout_free(&pOutputs);
@ -168,14 +168,14 @@ int HardH264FFmpegDecode::HardH264FFmpegDecoderConfigureVideoFilters(AVFilterGra
"video_size=%dx%d:pix_fmt=%d:time_base=1/1200000", "video_size=%dx%d:pix_fmt=%d:time_base=1/1200000",
iWidth, iHeight, iFormat); iWidth, iHeight, iFormat);
if ((ret = avfilter_graph_create_filter(&pFiltSrc, if ((ret = avfilter_graph_create_filter(&pFiltSrc,
avfilter_get_by_name("buffer"), "ffplay_buffer", BufferSrcArgs, avfilter_get_by_name("buffer"), "ffplay_buffer", BufferSrcArgs,
nullptr, pGraph)) < 0){ nullptr, pGraph)) < 0){
goto fail; goto fail;
} }
ret = avfilter_graph_create_filter(&pFiltDst, ret = avfilter_graph_create_filter(&pFiltDst,
avfilter_get_by_name("buffersink"), avfilter_get_by_name("buffersink"),
"ffplay_buffersink", nullptr, nullptr, pGraph); "ffplay_buffersink", nullptr, nullptr, pGraph);
if (ret < 0){ if (ret < 0){
goto fail; goto fail;
} }
@ -190,14 +190,14 @@ int HardH264FFmpegDecode::HardH264FFmpegDecoderConfigureVideoFilters(AVFilterGra
pDecoderFilterIn = pFiltSrc; pDecoderFilterIn = pFiltSrc;
pDecoderFilterOut = pFiltDst; pDecoderFilterOut = pFiltDst;
fail: fail:
return ret; return ret;
} }
int HardH264FFmpegDecode::HardH264FFmpegDecoder(AVCodecContext *pDecCtx, AVFrame *pFrame, AVPacket *pPkt, void* pOutputData, unsigned int* puiOutputDataSize) int HardH264FFmpegDecode::HardH264FFmpegDecoder(AVCodecContext *pDecCtx, AVFrame *pFrame, AVPacket *pPkt, void* pOutputData, unsigned int* puiOutputDataSize)
{ {
int ret; int ret;
AVFilterGraph* pDecoderGraph = nullptr; AVFilterGraph* pDecoderGraph = nullptr;
ret = avcodec_send_packet(pDecCtx, pPkt); //接收packet解码 ret = avcodec_send_packet(pDecCtx, pPkt); //接收packet解码
if (ret < 0) { if (ret < 0) {
@ -208,7 +208,7 @@ int HardH264FFmpegDecode::HardH264FFmpegDecoder(AVCodecContext *pDecCtx, AVFrame
while (ret >= 0) { while (ret >= 0) {
ret = avcodec_receive_frame(pDecCtx, pFrame); //解码 ret = avcodec_receive_frame(pDecCtx, pFrame); //解码
if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF){ if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF){
fprintf(stderr, "During decoding eof\n"); fprintf(stderr, "During decoding eof\n");
return -1; return -1;
} }
else if (ret < 0) { else if (ret < 0) {
@ -219,7 +219,7 @@ int HardH264FFmpegDecode::HardH264FFmpegDecoder(AVCodecContext *pDecCtx, AVFrame
//printf("saving frame %3d\n", pDecCtx->frame_number); //printf("saving frame %3d\n", pDecCtx->frame_number);
fflush(stdout); fflush(stdout);
AVFilterContext *pDecoderFilterIn = nullptr, *pDecoderFilterOut = nullptr; AVFilterContext *pDecoderFilterIn = nullptr, *pDecoderFilterOut = nullptr;
// pFrame->width = ALIGN_DOWN(pFrame->width, 32); // pFrame->width = ALIGN_DOWN(pFrame->width, 32);
// pFrame->height = ALIGN_DOWN(pFrame->height, 32); // pFrame->height = ALIGN_DOWN(pFrame->height, 32);
@ -228,26 +228,26 @@ int HardH264FFmpegDecode::HardH264FFmpegDecoder(AVCodecContext *pDecCtx, AVFrame
pDecoderGraph = avfilter_graph_alloc(); pDecoderGraph = avfilter_graph_alloc();
HardH264FFmpegDecoderConfigureVideoFilters(pDecoderGraph, pDecoderFilterIn, pDecoderFilterOut, pFrame->width, pFrame->height, pFrame->format); HardH264FFmpegDecoderConfigureVideoFilters(pDecoderGraph, pDecoderFilterIn, pDecoderFilterOut, pFrame->width, pFrame->height, pFrame->format);
if (pFrame->format != AV_PIX_FMT_YUV420P){ if (pFrame->format != AV_PIX_FMT_YUV420P){
DUMP_FRAME(pFrame); DUMP_FRAME(pFrame);
ret = av_buffersrc_add_frame(pDecoderFilterIn, pFrame); ret = av_buffersrc_add_frame(pDecoderFilterIn, pFrame);
ret = av_buffersink_get_frame_flags(pDecoderFilterOut, pFrame, 0); ret = av_buffersink_get_frame_flags(pDecoderFilterOut, pFrame, 0);
DUMP_FRAME(pFrame); DUMP_FRAME(pFrame);
int iSize = pFrame->width * pFrame->height; int iSize = pFrame->width * pFrame->height;
memcpy(pOutputData, pFrame->data[0], iSize); //Y memcpy(pOutputData, pFrame->data[0], iSize); //Y
memcpy(pOutputData+iSize, pFrame->data[1], iSize/4); //U memcpy(pOutputData+iSize, pFrame->data[1], iSize/4); //U
memcpy(pOutputData+iSize+iSize/4, pFrame->data[2], iSize/4); //V memcpy(pOutputData+iSize+iSize/4, pFrame->data[2], iSize/4); //V
*puiOutputDataSize = iSize*3/2; *puiOutputDataSize = iSize*3/2;
return iSize*3/2; return iSize*3/2;
} }
} }
return 0; return 0;
} }
int HardH264FFmpegDecode::HardH264FFmpegDecoderV2(AVCodecContext *pDecCtx, AVFrame *pSrcFrame, AVFrame *pDstFrame, AVPacket *pPkt, void* pOutputData,unsigned int* puiOutputDataSize) int HardH264FFmpegDecode::HardH264FFmpegDecoderV2(AVCodecContext *pDecCtx, SwsContext *pSwsCtx, AVFrame *pSrcFrame, AVFrame *pDstFrame, AVPacket *pPkt, void* pOutputData,unsigned int* puiOutputDataSize)
{ {
int ret; int ret;
ret = avcodec_send_packet(pDecCtx, pPkt); //接收packet解码 ret = avcodec_send_packet(pDecCtx, pPkt); //接收packet解码
if (ret < 0) { if (ret < 0) {
@ -258,7 +258,7 @@ int HardH264FFmpegDecode::HardH264FFmpegDecoderV2(AVCodecContext *pDecCtx, AVFra
while (ret >= 0) { while (ret >= 0) {
ret = avcodec_receive_frame(pDecCtx, pSrcFrame); //解码 ret = avcodec_receive_frame(pDecCtx, pSrcFrame); //解码
if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF){ if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF){
fprintf(stderr, "During decoding eof\n"); fprintf(stderr, "During decoding eof\n");
return -1; return -1;
} }
else if (ret < 0) { else if (ret < 0) {
@ -266,32 +266,27 @@ int HardH264FFmpegDecode::HardH264FFmpegDecoderV2(AVCodecContext *pDecCtx, AVFra
exit(1); exit(1);
} }
// pDecCtx->width = ALIGN_DOWN(pDecCtx->width, 32); // pDecCtx->width = ALIGN_DOWN(pDecCtx->width, 32);
// pDecCtx->height = ALIGN_DOWN(pDecCtx->height, 32); // pDecCtx->height = ALIGN_DOWN(pDecCtx->height, 32);
// sws_scale(pSwsCtx, sws_scale(pSwsCtx,
// (const uint8_t *const *)pSrcFrame->data, (const uint8_t *const *)pSrcFrame->data,
// pSrcFrame->linesize, pSrcFrame->linesize,
// 0, 0,
// pDecCtx->height, pDecCtx->height,
// pDstFrame->data, pDstFrame->data,
// pDstFrame->linesize); pDstFrame->linesize);
//printf("saving frame %3d\n", pDecCtx->frame_number); //printf("saving frame %3d\n", pDecCtx->frame_number);
// fflush(stdout); fflush(stdout);
// int iSize = pDecCtx->width * pDecCtx->height; int iSize = pDecCtx->width * pDecCtx->height;
//
// memcpy(pOutputData, pDstFrame->data[0], iSize); //Y memcpy(pOutputData, pDstFrame->data[0], iSize); //Y
// memcpy(pOutputData+iSize, pDstFrame->data[1], iSize/4); //U memcpy(pOutputData+iSize, pDstFrame->data[1], iSize/4); //U
// memcpy(pOutputData+iSize+iSize/4, pDstFrame->data[2], iSize/4); //V memcpy(pOutputData+iSize+iSize/4, pDstFrame->data[2], iSize/4); //V
// *puiOutputDataSize = iSize*3/2; *puiOutputDataSize = iSize*3/2;
// return iSize*3/2; return iSize*3/2;
memcpy(pOutputData, pSrcFrame->data[0], pSrcFrame->width * pSrcFrame->height); // Y }
memcpy(pOutputData + pSrcFrame->width * pSrcFrame->height, pSrcFrame->data[1], pSrcFrame->width * pSrcFrame->height / 4); // U return 0;
memcpy(pOutputData + pSrcFrame->width * pSrcFrame->height + pSrcFrame->width * pSrcFrame->height / 4, pSrcFrame->data[2], pSrcFrame->width * pSrcFrame->height / 4); // V
*puiOutputDataSize = pSrcFrame->width * pSrcFrame->height * 3 / 2;
return pSrcFrame->width * pSrcFrame->height * 3 / 2;
}
return 0;
} }

View File

@ -68,11 +68,11 @@ public:
int HardH264FFmpegDecoderInit(unsigned int uiWidth, unsigned int uiHeight, unsigned int uiFrameRate = 30); int HardH264FFmpegDecoderInit(unsigned int uiWidth, unsigned int uiHeight, unsigned int uiFrameRate = 30);
int HardH264FFmpegDecoderDeInit(); int HardH264FFmpegDecoderDeInit();
int HardH264FFmpegDecoder(AVCodecContext *pDecCtx, AVFrame *pFrame, AVPacket *pPkt, void* pOutputData, unsigned int* puiOutputDataSize); int HardH264FFmpegDecoder(AVCodecContext *pDecCtx, AVFrame *pFrame, AVPacket *pPkt, void* pOutputData, unsigned int* puiOutputDataSize);
int HardH264FFmpegDecoderV2(AVCodecContext *pDecCtx, AVFrame *pSrcFrame, AVFrame *pDstFrame, AVPacket *pPkt, void* pOutputData, unsigned int* puiOutputDataSize); int HardH264FFmpegDecoderV2(AVCodecContext *pDecCtx, SwsContext *pSwsCtx, AVFrame *pSrcFrame, AVFrame *pDstFrame, AVPacket *pPkt, void* pOutputData, unsigned int* puiOutputDataSize);
const AVCodec *pCodec_ = nullptr; //解码器 const AVCodec *pCodec_ = nullptr; //解码器
AVCodecContext *pCodecCtx_ = nullptr; //上下文 AVCodecContext *pCodecCtx_ = nullptr; //上下文
AVCodecParserContext *pCodecParserCtx_ = nullptr; //解析器上下文 AVCodecParserContext *pCodecParserCtx_ = nullptr; //解析器上下文
AVFrame *pSrcFrame_ = nullptr; AVFrame *pSrcFrame_ = nullptr;
AVFrame *pDstFrame_ = nullptr; AVFrame *pDstFrame_ = nullptr;
AVPacket *pPacket_ = nullptr; AVPacket *pPacket_ = nullptr;

View File

@ -56,7 +56,7 @@ APP_ERROR VideoDecodeEngine::Process()
} }
int iRet = APP_ERR_OK; int iRet = APP_ERR_OK;
int iSkipCount = 1; int iSkipCount = 1;
int iNoCameraDataCnt = 0; int iNoCameraDataCnt = 0;
while (!isStop_) while (!isStop_)
{ {
@ -67,19 +67,19 @@ APP_ERROR VideoDecodeEngine::Process()
{ {
usleep(10*1000); //10ms usleep(10*1000); //10ms
// iNoCameraDataCnt++; // iNoCameraDataCnt++;
// if (iNoCameraDataCnt >= 1000) //10秒内收不到认为相机断开 // if (iNoCameraDataCnt >= 1000) //10秒内收不到认为相机断开
// { // {
// LogError << "engineId:" << engineId_ << " 超过10秒获取到摄像头数据疑似摄像头断开。计数" << iNoCameraDataCnt; // LogError << "engineId:" << engineId_ << " 超过10秒获取到摄像头数据疑似摄像头断开。计数" << iNoCameraDataCnt;
// iNoCameraDataCnt = 0; // iNoCameraDataCnt = 0;
// //camera异常时构造空的解码数据push确保一直有数据流转到后面Engine // //camera异常时构造空的解码数据push确保一直有数据流转到后面Engine
// std::shared_ptr<ProcessData> pProcessData = std::make_shared<ProcessData>(); // std::shared_ptr<ProcessData> pProcessData = std::make_shared<ProcessData>();
// pProcessData->iDataSource = engineId_; // pProcessData->iDataSource = engineId_;
// pProcessData->i64TimeStamp = MyUtils::getins()->GetCurrentTimeMillis(); // pProcessData->i64TimeStamp = MyUtils::getins()->GetCurrentTimeMillis();
// pProcessData->iSize = 0; // pProcessData->iSize = 0;
// pProcessData->pData = nullptr; // pProcessData->pData = nullptr;
// iRet = outputQueMap_[strPort0_]->push(std::static_pointer_cast<void>(pProcessData)); // iRet = outputQueMap_[strPort0_]->push(std::static_pointer_cast<void>(pProcessData));
// } // }
continue; continue;
} }
@ -111,14 +111,15 @@ APP_ERROR VideoDecodeEngine::Process()
std::shared_ptr<void> pYUVData; std::shared_ptr<void> pYUVData;
pYUVData.reset(pYUV420MBuffer, [](void *data){if(data) {delete[] data; data = nullptr;}}); //智能指针管理内存 pYUVData.reset(pYUV420MBuffer, [](void *data){if(data) {delete[] data; data = nullptr;}}); //智能指针管理内存
hard_h264_ffmpeg_decoder_->pPacket_->data = static_cast<uint8_t *>(pProcessData->pData.get()); //这里填入一个指向完整H264数据帧的指针 hard_h264_ffmpeg_decoder_->pPacket_->data = static_cast<uint8_t *>(pProcessData->pData.get()); //这里填入一个指向完整H264数据帧的指针
hard_h264_ffmpeg_decoder_->pPacket_->size = pProcessData->iSize; //这个填入H264数据帧的大小 hard_h264_ffmpeg_decoder_->pPacket_->size = pProcessData->iSize; //这个填入H264数据帧的大小
// H264硬件解码 // H264硬件解码
// int iDecodeRet= hard_h264_ffmpeg_decoder_->HardH264FFmpegDecoderV2(hard_h264_ffmpeg_decoder_->pCodecCtx_, hard_h264_ffmpeg_decoder_->pFrame_, // int iDecodeRet= hard_h264_ffmpeg_decoder_->HardH264FFmpegDecoderV2(hard_h264_ffmpeg_decoder_->pCodecCtx_, hard_h264_ffmpeg_decoder_->pFrame_,
// hard_h264_ffmpeg_decoder_->pPacket_, pYUV420MBuffer, &pYUV420MBuffer_Size); // hard_h264_ffmpeg_decoder_->pPacket_, pYUV420MBuffer, &pYUV420MBuffer_Size);
int iDecodeRet = hard_h264_ffmpeg_decoder_->HardH264FFmpegDecoderV2(hard_h264_ffmpeg_decoder_->pCodecCtx_, int iDecodeRet = hard_h264_ffmpeg_decoder_->HardH264FFmpegDecoderV2(hard_h264_ffmpeg_decoder_->pCodecCtx_,
hard_h264_ffmpeg_decoder_->pSwsContext_,
hard_h264_ffmpeg_decoder_->pSrcFrame_, hard_h264_ffmpeg_decoder_->pSrcFrame_,
hard_h264_ffmpeg_decoder_->pDstFrame_, hard_h264_ffmpeg_decoder_->pDstFrame_,
hard_h264_ffmpeg_decoder_->pPacket_, hard_h264_ffmpeg_decoder_->pPacket_,

View File

@ -14,7 +14,7 @@ APP_ERROR TrainStepOneEngine::Init()
bUseEngine_ = MyUtils::getins()->ChkIsHaveTarget("NUM"); bUseEngine_ = MyUtils::getins()->ChkIsHaveTarget("NUM");
if (!bUseEngine_) if (!bUseEngine_)
{ {
LogWarn << "engineId_:" << engineId_ << " not use engine"; LogInfo << "engineId_:" << engineId_ << " not use engine";
return APP_ERR_OK; return APP_ERR_OK;
} }
@ -98,7 +98,7 @@ APP_ERROR TrainStepOneEngine::InitModel()
int nRet = yolov5model.YoloV5ClearityInferenceInit(&modelinfo, strModelName, modelConfig_.strOmPath); int nRet = yolov5model.YoloV5ClearityInferenceInit(&modelinfo, strModelName, modelConfig_.strOmPath);
if (nRet != 0) if (nRet != 0)
{ {
LogInfo << "YoloV5ClassifyInferenceInit nRet:" << nRet; LogError << "YoloV5ClassifyInferenceInit nRet:" << nRet;
return APP_ERR_COMM_READ_FAIL; return APP_ERR_COMM_READ_FAIL;
} }
return APP_ERR_OK; return APP_ERR_OK;
@ -147,7 +147,7 @@ APP_ERROR TrainStepOneEngine::DeInit()
{ {
if (!bUseEngine_) if (!bUseEngine_)
{ {
LogWarn << "engineId_:" << engineId_ << " not use engine"; LogInfo << "engineId_:" << engineId_ << " not use engine";
return APP_ERR_OK; return APP_ERR_OK;
} }
@ -170,7 +170,7 @@ void TrainStepOneEngine::PushData(const std::string &strPort, const std::shared_
int iRet = outputQueMap_[strPort]->push(std::static_pointer_cast<void>(pProcessData)); int iRet = outputQueMap_[strPort]->push(std::static_pointer_cast<void>(pProcessData));
if (iRet != 0) if (iRet != 0)
{ {
LogDebug << "sourceid:" << pProcessData->iDataSource << " frameid:" << pProcessData->iFrameId << " push fail iRet:" << iRet; LogError << " frameid:" << pProcessData->iFrameId << " push fail iRet:" << iRet;
if (iRet == 2) if (iRet == 2)
{ {
usleep(10000); // 10ms usleep(10000); // 10ms
@ -204,9 +204,9 @@ void TrainStepOneEngine::FilterInvalidInfo(std::vector<stDetection> &vecRet, std
it->bbox[3] <= dataSourceCfg.fIdentifyAreasRBY)) it->bbox[3] <= dataSourceCfg.fIdentifyAreasRBY))
{ {
LogDebug << "frameId:" << pProcessData->iFrameId LogDebug << "frameId:" << pProcessData->iFrameId
<< " bigclassid:" << it->class_id << " 超出识别区域-识别区域:(" << " bigclassid:" << it->class_id << " 超出识别区域-识别区域:("
<< dataSourceCfg.fIdentifyAreasLTX << "," << dataSourceCfg.fIdentifyAreasLTY << "),(" << dataSourceCfg.fIdentifyAreasLTX << "," << dataSourceCfg.fIdentifyAreasLTY << "),("
<< dataSourceCfg.fIdentifyAreasRBX << "," << dataSourceCfg.fIdentifyAreasRBY << ")"; << dataSourceCfg.fIdentifyAreasRBX << "," << dataSourceCfg.fIdentifyAreasRBY << ")";
it = vecRet.erase(it); it = vecRet.erase(it);
continue; continue;
} }
@ -219,19 +219,33 @@ void TrainStepOneEngine::FilterInvalidInfo(std::vector<stDetection> &vecRet, std
continue; continue;
} }
// 去除车头车尾的间隔信息 // 去除车头时的非车头编号信息
if(pProcessData->nMonitorState == MONITOR_MODEL_TRAIN_HEAD ) if(pProcessData->nMonitorState == MONITOR_MODEL_TRAIN_HEAD )
{ {
if(it->class_id != TRAIN_HEAD) if(it->class_id != TRAIN_HEAD)
{ {
LogDebug << " 帧号:" << pProcessData->iFrameId LogDebug << " 帧号:" << pProcessData->iFrameId
<< " 大类:" << it->class_id << " 识别于车头位置,无效!"; << " 大类:" << it->class_id << " 识别于车头位置,无效!";
it = vecRet.erase(it); it = vecRet.erase(it);
continue; continue;
} }
} }
// 去除车尾的车头编号信息
if (pProcessData->nMonitorState != MONITOR_MODEL_TRAIN_HEAD)
{
if (it->class_id == TRAIN_HEAD)
{
LogDebug << " 帧号:" << pProcessData->iFrameId
<< " 大类:" << it->class_id << " 识别于非车头位置,无效!";
it = vecRet.erase(it);
continue;
}
}
// 去除车尾的间隔信息
if (pProcessData->nMonitorState == MONITOR_MODEL_TRAIN_TAIL if (pProcessData->nMonitorState == MONITOR_MODEL_TRAIN_TAIL
&& ((it->class_id >= 9 && it->class_id <= 17 && it->class_id != 15) || it->class_id == 18)) && ((it->class_id >= 9 && it->class_id <= 17 && it->class_id != 15) || it->class_id == 18))
{ {
LogDebug << " frameId:" << pProcessData->iFrameId LogDebug << " frameId:" << pProcessData->iFrameId
<< " bigclassid:" << it->class_id << " bigclassid:" << it->class_id
@ -239,6 +253,17 @@ void TrainStepOneEngine::FilterInvalidInfo(std::vector<stDetection> &vecRet, std
it = vecRet.erase(it); it = vecRet.erase(it);
continue; continue;
} }
// 过滤掉识别于模型反馈无车状态下的所有大框信息
if (pProcessData->nMonitorState == MONITOR_MODEL_NO_TRAIN)
{
LogDebug << " frameId:" << pProcessData->iFrameId
<< " bigclassid:" << it->class_id
<<" 识别于模型反馈的无车状态下,无效!";
it = vecRet.erase(it);
continue;
}
// 按大框高度剔除远股道识别的信息 // 按大框高度剔除远股道识别的信息
int iClassHeight = it->bbox[3] - it->bbox[1]; int iClassHeight = it->bbox[3] - it->bbox[1];
if (dataSourceCfg.mapClassMinH.find(it->class_id) != dataSourceCfg.mapClassMinH.end() && if (dataSourceCfg.mapClassMinH.find(it->class_id) != dataSourceCfg.mapClassMinH.end() &&
@ -273,7 +298,7 @@ void TrainStepOneEngine::FilterInvalidInfo(std::vector<stDetection> &vecRet, std
continue; continue;
} }
if (((it->class_id >= 2 && it->class_id <= 6) || it->class_id == J_TRAIN_NUM || it->class_id == W_TRAIN_NUM) && if (((it->class_id >= 2 && it->class_id <= 6) || it->class_id == J_TRAIN_NUM || it->class_id == W_TRAIN_NUM) &&
(it->bbox[3] - it->bbox[1]) < MyYaml::GetIns()->GetIntValue("gc_num_frame_height")) (it->bbox[3] - it->bbox[1]) < MyYaml::GetIns()->GetIntValue("gc_num_frame_height"))
{ {
LogWarn << "疑似误识别到远股道车号,帧号:" << pProcessData->iFrameId LogWarn << "疑似误识别到远股道车号,帧号:" << pProcessData->iFrameId
<< "大框高度:" << (it->bbox[3] - it->bbox[1]); << "大框高度:" << (it->bbox[3] - it->bbox[1]);
@ -282,7 +307,7 @@ void TrainStepOneEngine::FilterInvalidInfo(std::vector<stDetection> &vecRet, std
} }
if ((it->class_id == 1 || it->class_id == TRAIN_PRO) if ((it->class_id == 1 || it->class_id == TRAIN_PRO)
&& (it->bbox[3] - it->bbox[1]) < MyYaml::GetIns()->GetIntValue("gc_pro_frame_height")) { && (it->bbox[3] - it->bbox[1]) < MyYaml::GetIns()->GetIntValue("gc_pro_frame_height")) {
LogWarn << "疑似误识别到远股道属性,帧号:" << pProcessData->iFrameId LogWarn << "疑似误识别到远股道属性,帧号:" << pProcessData->iFrameId
<< "大框高度:" << (it->bbox[3] - it->bbox[1]); << "大框高度:" << (it->bbox[3] - it->bbox[1]);
it = vecRet.erase(it); it = vecRet.erase(it);
@ -388,7 +413,7 @@ APP_ERROR TrainStepOneEngine::Process()
{ {
if (!bUseEngine_) if (!bUseEngine_)
{ {
LogWarn << "engineId_:" << engineId_ << " not use engine"; LogInfo << "engineId_:" << engineId_ << " not use engine";
return APP_ERR_OK; return APP_ERR_OK;
} }
int iRet = APP_ERR_OK; int iRet = APP_ERR_OK;
@ -409,7 +434,7 @@ APP_ERROR TrainStepOneEngine::Process()
pPostData->iModelType = MODELTYPE_NUM; pPostData->iModelType = MODELTYPE_NUM;
pPostData->nMonitorState = pProcessData->nMonitorState; //来车检测的四个分类 pPostData->nMonitorState = pProcessData->nMonitorState; //来车检测的四个分类
//获取图片 //获取图片
if (pProcessData->iStatus == TRAINSTATUS_RUN || pProcessData->bIsEnd) if (pProcessData->iStatus == TRAINSTATUS_RUN || pProcessData->bIsEnd)
{ {
if (pProcessData->pData != nullptr && pProcessData->iSize != 0) if (pProcessData->pData != nullptr && pProcessData->iSize != 0)
@ -418,10 +443,7 @@ APP_ERROR TrainStepOneEngine::Process()
//进行推理 //进行推理
std::vector<stDetection> res; std::vector<stDetection> res;
//auto start = std::chrono::system_clock::now(); //计时开始
yolov5model.YoloV5ClearityInferenceModel(img, res); yolov5model.YoloV5ClearityInferenceModel(img, res);
//auto end = std::chrono::system_clock::now();
//LogInfo << "nopr1 inference time: " << std::chrono::duration_cast<std::chrono::milliseconds>(end - start).count() << "ms";
//过滤无效信息 //过滤无效信息
FilterInvalidInfo(res, pProcessData); FilterInvalidInfo(res, pProcessData);