Merge pull request 'Test更新一部分逻辑' (#2) from Test into main

Reviewed-on: #2
This commit is contained in:
zhangwei 2024-05-22 09:29:56 +00:00
commit ac73334039
33 changed files with 912 additions and 961 deletions

7
.gitignore vendored Normal file
View File

@ -0,0 +1,7 @@
/build/
/app/
*.tgz
*.tar
*.log
*.o
*.out

View File

@ -20,6 +20,8 @@ find_package(OpenCV REQUIRED)
# message(STATUS "${OpenCV_LIBS}")
# message(STATUS "${OpenCV_INCLUDE_DIRS}")
find_package(CUDA REQUIRED)
#
set(CMAKE_SYSTEM_NAME Linux)
set(CMAKE_C_COMPILER "gcc")
@ -39,8 +41,12 @@ set(SYS_USR_INCLUDE_DIR "/usr/include")
set(SYS_USR_LIB_DIR "/usr/lib")
set(SYS_USR_LOCAL_INCLUDE_DIR "/usr/local/include")
set(SYS_USR_LOCAL_LIB_DIR "/usr/local/lib")
# -- X86使 --
set(AARCH64_LINUX_INCLUDE_DIR "/usr/include/x86_64-linux-gnu")
set(AARCH64_LINUX_LIB_DIR "/usr/lib/x86_64-linux-gnu")
# -- ARM使 --
#set(AARCH64_LINUX_INCLUDE_DIR "/usr/include/aarch64-linux-gnu")
#set(AARCH64_LINUX_LIB_DIR "/usr/lib/aarch64-linux-gnu")
#opencv3.2.0/usr/lib/aarch64-linux-gnu /usr/include/opencv2
@ -60,7 +66,7 @@ set(DRM_INCLUDE_DIR ${SYS_USR_INCLUDE_DIR}/libdrm) #DRM的头文件在/usr/incl
set(TEGRA_LIB_DIR ${AARCH64_LINUX_LIB_DIR}/tegra) #tegra/usr/lib/aarch64-linux-gnu/tegra
set(PCL_INCLUDE ${SYS_USR_LOCAL_INCLUDE_DIR}/pcl-1.7) #pcl
#set(PCL_INCLUDE ${SYS_USR_LOCAL_INCLUDE_DIR}/pcl-1.7) #pcl
# nvidia ascend common include
@ -96,13 +102,13 @@ include_directories(
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/DataUploadEngine
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/FilterEngine
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/MergerEngine
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/PixelFormatConvertEngine
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/SaveEngine
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/SelectBestEngine
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/TrainAnaEngine
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/TransEngine
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/DataDealEngine
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/SocketEngine
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/DeleteExpiredFolderEngine
#common tools rtsp_server include
${PROJECT_SOURCE_DIR}/nvidia_ascend_tools/common_tools/rtsp_server/3rdpart/md5
@ -133,7 +139,7 @@ include_directories(
${OpenCV_DIR}
${AARCH64_LINUX_INCLUDE_DIR}
${SYS_USR_LOCAL_INCLUDE_DIR}
${PCL_INCLUDE}
# ${PCL_INCLUDE}
)
@ -171,7 +177,6 @@ file(GLOB_RECURSE COMMON_SRCS_LISTS
#common engine src
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common/*.cpp
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/DataSourceEngine/*.cpp
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/PixelFormatConvertEngine/*.cpp
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/ControlEngine/*.cpp
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/DataUploadEngine/*.cpp
@ -182,6 +187,10 @@ file(GLOB_RECURSE COMMON_SRCS_LISTS
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/TransEngine/*.cpp
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/DataDealEngine/*.cpp
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/SocketEngine/*.cpp
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/SocketEngine/*.cpp
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/DataSourceEngine/*.cpp
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/SaveEngine/*.cpp
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/DeleteExpiredFolderEngine/*.cpp
#common tools rtsp_server src
${PROJECT_SOURCE_DIR}/nvidia_ascend_tools/common_tools/rtsp_server/net/*.cpp
@ -191,17 +200,6 @@ file(GLOB_RECURSE COMMON_SRCS_LISTS
file(GLOB_RECURSE SRCS_LISTS
#nvidia engine src
#nvidia engine include
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/ControlEngine/*.cpp
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/DataSourceEngine/*.cpp
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/DataUploadEngine/*.cpp
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/FilterEngine/*.cpp
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/MergerEngine/*.cpp
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/PixelFormatConvertEngine/*.cpp
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/SaveEngine/*.cpp
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/SelectBestEngine/*.cpp
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/TrainAnaEngine/*.cpp
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/TransEngine/*.cpp
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/DataDealEngine/*.cpp
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/nvidia_engine/ChkDateStepOneEngine/*.cpp
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/nvidia_engine/ChkDateStepTwoEngine/*.cpp
@ -227,9 +225,9 @@ target_link_libraries(${PROJECT_NAME} cudart cuda) #CUDA
target_link_libraries(${PROJECT_NAME}
${OpenCV_LIBS} #third party librarys
${PCL_LIBRARY_DIRS}
pcl_common pcl_io_ply pcl_keypoints pcl_registration pcl_segmentation pcl_features pcl_io pcl_octree #pcl
pcl_sample_consensus pcl_surface pcl_filters pcl_kdtree pcl_recognition pcl_search pcl_tracking
# ${PCL_LIBRARY_DIRS}
# pcl_common pcl_io_ply pcl_keypoints pcl_registration pcl_segmentation pcl_features pcl_io pcl_octree #pcl
# pcl_sample_consensus pcl_surface pcl_filters pcl_kdtree pcl_recognition pcl_search pcl_tracking
avformat avcodec avutil avfilter swresample swscale postproc #VideoCodecV2
yaml-cpp https_sn
jsoncpp curl boost_system boost_filesystem ssh2

View File

@ -386,6 +386,23 @@ namespace ai_matrix
return std::string(szTmp);
}
//时间戳转化为时间 毫秒级
std::string MyUtils::Stamp2Time(long long timestamp, bool has_msec)
{
int ms = timestamp % 1000;//取毫秒
time_t tick = (time_t)(timestamp/1000);//转换时间
struct tm tm;
char s[40];
tm = *localtime(&tick);
strftime(s, sizeof(s), "%Y-%m-%d %H:%M:%S", &tm);
std::string str(s);
if (has_msec)
{
str = str+ "." + std::to_string(ms);
}
return str;
}
/**
* 1970
* inParam : N/A
@ -653,7 +670,7 @@ namespace ai_matrix
}
if (!ifs.is_open())
{
LogWarn << "txt:" << strFilePath << " open fail";
// LogWarn << "txt:" << strFilePath << " open fail";
return false;
}

View File

@ -56,6 +56,9 @@ namespace ai_matrix
//获取时间
std::string get_time();
//时间戳转化为时间 毫秒级
std::string Stamp2Time(long long timestamp, bool has_msec = false);
//创建文件夹
std::string create_dir_name(std::string root, std::string name);
std::string create_dir_date_name_time(std::string root, std::string name);

BIN
app/train Normal file

Binary file not shown.

View File

@ -113,6 +113,8 @@ model:
nms_threshold: 0.3
gc_http_open: 1
username: "guest_01"
password: "d55b0f642e817eea24725d2f2a31dd08" # 神东
gc_http_url: "http://192.168.2.211:20004/api/train-carriage/identification/video-save"
gc_gettoken_url: "http://192.168.2.211:20004/api/blade-auth/oauth/token"
gc_image_srv: "http://192.168.2.211:9010/"
@ -179,3 +181,6 @@ gc_c_space_frame_width: 500
# 是否识别车头
gc_train_heard_detect: true
#过期文件夹天数
gc_days_for_result_expire_folder: 3

View File

@ -55,6 +55,7 @@ engines:
DeviceStatusUpSerEngine: 0
#ResultToMySQLSrvEngine: 0
#DataToMinioSrvEngine: 0
DeleteExpiredFolderEngine: 0
#engine连接
connects:

View File

@ -197,7 +197,7 @@ void DataDealEngine::MakeProcessData()
strImgName += (iter->second.iRotate != 0) ? "_rotate.jpg" : ".jpg";
std::string strFileName = strDataDir_ + szCameraNo + std::to_string(iOrigFrameId) + ".txt";
//摄像头读取失败后重试2000次。
//摄像头读取失败后重试30次。
Json::Value jvFrameInfo;
RawData rawData;
bool bRet = false;
@ -291,14 +291,25 @@ APP_ERROR DataDealEngine::Process()
//获取主摄像头检测的状态
std::shared_ptr<void> pVoidData0 = nullptr;
iRet = inputQueMap_[strPort0_]->pop(pVoidData0);
if (nullptr != pVoidData0)
{
std::shared_ptr<MoveData> pMoveData = std::static_pointer_cast<MoveData>(pVoidData0);
// queuwMoveData_.push(*pMoveData);
moveData_ = *pMoveData;
LogDebug << "traindate:" << moveData_.strTrainDate << " trainname:" << moveData_.strTrainName
<< " MoveData frameid:" << moveData_.iFrameId << " IsEnd:" << moveData_.bIsEnd;
}
// LogDebug << "【帧号】" << (iDataNO_ * dataSourceConfig_.iSkipInterval);
// if (queuwMoveData_.size() > 0 && (iDataNO_ * dataSourceConfig_.iSkipInterval) >= queuwMoveData_.front().iFrameId)
// {
// moveData_ = queuwMoveData_.front();
// queuwMoveData_.pop();
// LogDebug << "!!!--- moveDate 更新";
// }
if (!moveData_.bHasTrain)
{
usleep(1000); //1ms
@ -308,7 +319,7 @@ APP_ERROR DataDealEngine::Process()
//第一个数据休眠1s等待图片存入本地
if (iOrigDataNO_ == 1)
{
usleep(1000000); //1s
usleep(1000 * 1000); //1s
}
if (strDataDir_.empty())

View File

@ -239,8 +239,8 @@ void DataDealTwoEngine::GetMainSplitInfo(Json::Value &jvMainSplit, std::shared_p
{
iValidType = pProcessData->iDirection == DIRECTION_LEFT ? VALID_LEFT : VALID_RIGHT;
}
LogDebug << "sourceid:" << pProcessData->iDataSource << " frameid:" << pProcessData->iFrameId
<< " trainIndex:" << pProcessData->iTrainIndex << " iSpaceType_:" << iSpaceType_
LogDebug << " frameid:" << pProcessData->iFrameId
<< " 车节:" << pProcessData->iTrainIndex << " iSpaceType_:" << iSpaceType_
<< " iSpaceX:" << iSpaceX << " iLastSpaceX_:" << iLastSpaceX_
<< " iLastSpaceFrameid_:" << iLastSpaceFrameid_ << " bIntervalFlag:" << bIntervalFlag;
iLastSpaceX_ = iSpaceX;
@ -352,11 +352,11 @@ void DataDealTwoEngine::GetValidTypeAndSplit(Json::Value &jvOneSplit, Json::Valu
GetSubSplitInfoByMain(jvOneSplit, pProcessData, jvFrameInfo);
}
LogDebug << "sourceid:" << pProcessData->iDataSource << " frameid:" << pProcessData->iFrameId
<< " trainIndex:" << pProcessData->iTrainIndex
<< " validType:" << jvOneSplit["validType"].asInt() << " splitX:" << jvOneSplit["splitX"].asInt()
<< " needNum:" << jvOneSplit["needNum"].asBool() << " needPro:" << jvOneSplit["needPro"].asBool()
<< " needChkDate:" << jvOneSplit["needChkDate"].asBool() << " needContainer:" << jvOneSplit["needContainer"].asBool();
// LogDebug << "sourceid:" << pProcessData->iDataSource << " frameid:" << pProcessData->iFrameId
// << " trainIndex:" << pProcessData->iTrainIndex
// << " validType:" << jvOneSplit["validType"].asInt() << " splitX:" << jvOneSplit["splitX"].asInt()
// << " needNum:" << jvOneSplit["needNum"].asBool() << " needPro:" << jvOneSplit["needPro"].asBool()
// << " needChkDate:" << jvOneSplit["needChkDate"].asBool() << " needContainer:" << jvOneSplit["needContainer"].asBool();
}
/**
@ -456,8 +456,8 @@ void DataDealTwoEngine::MakeProcessData(std::shared_ptr<TrainRange> pTrainRange)
sprintf(szCameraNo, "%03d/", iSourceId + 1);
bool bIsEndFlag = (pTrainRange->iEndFrameId == iFrameId);
LogInfo << "sourceid:" << iSourceId << " StepTwo MakeProcessData trainIndex:" << pTrainRange->iTrainIndex
<< " iFrameId:" << iFrameId << " bIsEndFlag:" << bIsEndFlag;
// LogInfo << "sourceid:" << iSourceId << " StepTwo MakeProcessData trainIndex:" << pTrainRange->iTrainIndex
// << " iFrameId:" << iFrameId << " bIsEndFlag:" << bIsEndFlag;
std::string strImgName = strDataDir + szCameraNo + std::to_string(iFrameId);
strImgName += (iter->second.iRotate != 0) ? "_rotate.jpg" : ".jpg";
std::string strFileName = strDataDir + szCameraNo + std::to_string(iFrameId) + ".txt";

View File

@ -7,7 +7,7 @@ namespace
{
const int LOW_THRESHOLD = 128;
const int MAX_THRESHOLD = 4096;
const uint16_t DELAY_TIME = 40000;
const uint16_t DELAY_TIME = 10000;
}
CameraEngine::CameraEngine() {}

View File

@ -8,6 +8,8 @@ ResultToHttpSrvEngine::~ResultToHttpSrvEngine() {}
APP_ERROR ResultToHttpSrvEngine::Init()
{
strPort0_ = engineName_ + "_" + std::to_string(engineId_) + "_0";
strUsername_ = MyYaml::GetIns()->GetStringValue("username");
strPassword_ = MyYaml::GetIns()->GetStringValue("password");
strURL_ = MyYaml::GetIns()->GetStringValue("gc_http_url");
strGetTokenURL_ = MyYaml::GetIns()->GetStringValue("gc_gettoken_url");
strImageSrv_ = MyYaml::GetIns()->GetPathValue("gc_image_srv");
@ -76,10 +78,10 @@ bool ResultToHttpSrvEngine::GetToken(std::string &strBladeAuth)
curl_mime *pMultipart = curl_mime_init(pCurl_);
curl_mimepart *pPart = curl_mime_addpart(pMultipart);
curl_mime_name(pPart, "username");
curl_mime_data(pPart, "guest_01", CURL_ZERO_TERMINATED);
curl_mime_data(pPart, strUsername_.c_str(), CURL_ZERO_TERMINATED);
pPart = curl_mime_addpart(pMultipart);
curl_mime_name(pPart, "password");
curl_mime_data(pPart, "d55b0f642e817eea24725d2f2a31dd08", CURL_ZERO_TERMINATED);
curl_mime_data(pPart, strPassword_.c_str(), CURL_ZERO_TERMINATED);
pPart = curl_mime_addpart(pMultipart);
curl_mime_name(pPart, "tenantId");
curl_mime_data(pPart, "000000", CURL_ZERO_TERMINATED);
@ -276,8 +278,8 @@ void ResultToHttpSrvEngine::DealHttpFailInfo()
if (!ResultToHttpSrv(jvRequest))
{
LogError << "re http post err:" << strLine;
SaveHttpFailInfo(jvRequest, strFailSaveBakPath_);
bAllSucc = false;
//SaveHttpFailInfo(jvRequest, strFailSaveBakPath_);
// bAllSucc = false;
continue;
}
}
@ -426,10 +428,11 @@ APP_ERROR ResultToHttpSrvEngine::Process()
jvRequest["isTheLast"] = pTrain->bIsEnd ? 1 : 0; // 是否最后一节: 0:否,1:是
jvRequest["startFrame"] = pTrain->iStartFrameId; //车厢开始帧
jvRequest["endFrame"] = pTrain->iEndFrameId; //车厢结束帧
jvRequest["skipFrame"] = dataSourceConfig.iSkipInterval; //跳帧
jvRequest["skipFrame"] = dataSourceConfig.iSkipInterval;
jvRequest["collectTime"] = MyUtils::getins()->Stamp2Time(pTrain->i64EndTimeStamp, true);//车厢切分的时间 //跳帧
if (!ResultToHttpSrv(jvRequest))
{
SaveHttpFailInfo(jvRequest, strFailSavePath_);
// SaveHttpFailInfo(jvRequest, strFailSavePath_);
}
//列车结束后再次处理失败的信息

View File

@ -40,6 +40,8 @@ private:
bool SaveHttpFailInfo(Json::Value &jvRequest, std::string &strFilePath);
std::string strPort0_;
std::string strUsername_;
std::string strPassword_;
std::string strURL_;
std::string strGetTokenURL_;
std::string strImageSrv_;

View File

@ -0,0 +1,187 @@
#include "DeleteExpiredFolderEngine.h"
using namespace ai_matrix;
DeleteExpiredFolderEngine::DeleteExpiredFolderEngine() {}
DeleteExpiredFolderEngine::~DeleteExpiredFolderEngine() {}
APP_ERROR DeleteExpiredFolderEngine::Init()
{
iDaysNumber_ = MyYaml::GetIns()->GetIntValue("gc_days_for_result_expire_folder");
strResultPath_ = MyYaml::GetIns()->GetPathValue("gc_result_path");
LogInfo << "DeleteExpiredFolderEngine Init ok";
return APP_ERR_OK;
}
APP_ERROR DeleteExpiredFolderEngine::DeInit()
{
LogInfo << "DeleteExpiredFolderEngine DeInit ok";
return APP_ERR_OK;
}
APP_ERROR DeleteExpiredFolderEngine::Process()
{
int iRet = APP_ERR_OK;
while (!isStop_)
{
std::string strTrainDate_temp = MyUtils::getins()->GetDate();
DeletePreviousFolder(strResultPath_, strTrainDate_temp, iDaysNumber_);
usleep(1000*1000*3600*24); //每二十四小时执行一次
}
return APP_ERR_OK;
}
void DeleteExpiredFolderEngine::DeletePreviousFolder(std::string path, const std::string &date, int n_days)
{
// 1 computer date
std::string previous_date = getDateBeforeNDays(date, n_days);
if (!previous_date.empty())
std::cout << "Date before " << n_days << " days from " << date << " is: " << previous_date << std::endl;
// 2
std::vector<Date> subfolders;
GetSubfolderNames(path, subfolders);
// for (const auto &it : subfolders)
// std::cout << it.year << "." << it.month << "." << it.day << std::endl;
// 3 delete
if (path.back() != '/')
path += "/";
Date reference_date = StrToDate(previous_date); // 给定的参考日期
DeleteEarlierDatesFolder(path, subfolders, reference_date);
}
// 获取某月有多少天
int DeleteExpiredFolderEngine::DaysInMonth(int year, int month)
{
int max_days[13] = {0, 31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31};
if (month == 2 && ((year % 4 == 0 && year % 100 != 0) || year % 400 == 0))
{
max_days[2] = 29; // 闰年2月有29天
}
return max_days[month];
}
// 解析字符串为日期结构体
Date DeleteExpiredFolderEngine::StrToDate(const std::string &date_str)
{
std::istringstream iss(date_str);
int year, month, day;
char dash;
if (!(iss >> year >> dash && dash == '-' &&
iss >> month >> dash && dash == '-' &&
iss >> day))
{
LogError << ("Invalid date format") << ":" << date_str;
}
return {year, month, day};
}
// 减去指定天数
void DeleteExpiredFolderEngine::SubtractDays(Date &date, int n_days)
{
while (n_days > 0)
{
date.day--;
n_days--;
if (date.day == 0)
{
if (--date.month == 0)
{
--date.year;
date.month = 12;
}
int max_days = DaysInMonth(date.year, date.month);
date.day = max_days;
}
}
}
// 格式化日期结构体为字符串
std::string DeleteExpiredFolderEngine::DateToStr(const Date &date)
{
std::ostringstream oss;
oss << date.year << "-" << std::setfill('0') << std::setw(2) << date.month << "-" << std::setw(2) << date.day;
return oss.str();
}
// 主要功能函数接收一个日期字符串和一个整数n返回n天前的日期字符串
std::string DeleteExpiredFolderEngine::getDateBeforeNDays(const std::string &input_date, int n_days)
{
try
{
Date date = StrToDate(input_date);
SubtractDays(date, n_days);
return DateToStr(date);
}
catch (const std::exception &e)
{
LogError << "Error: " << e.what();
return "";
}
}
void DeleteExpiredFolderEngine::GetSubfolderNames(std::string &directory, std::vector<Date> &folder_names)
{
if (directory.back() != '/')
directory += "/";
DIR *dir;
struct dirent *ent;
if ((dir = opendir(directory.c_str())) != nullptr)
{
while ((ent = readdir(dir)) != nullptr)
{
// 排除"."和".."
if (ent->d_type == DT_DIR && ent->d_name[0] != '.' && ent->d_name == "best")
{
folder_names.push_back(StrToDate(ent->d_name));
}
}
closedir(dir);
}
else
{
LogError << "Unable to open directory: " << directory;
}
}
void DeleteExpiredFolderEngine::DeleteFolder(const std::string directory)
{
std::string command = "rm -rf " + directory;
int result = system(command.c_str());
if (result != 0)
std::cout << "Failed to remove directory recursively: " << directory << std::endl;
else
std::cout << "delete folder successfully : " << directory << std::endl;
}
// 删除向量中小于指定日期的所有元素
void DeleteExpiredFolderEngine::DeleteEarlierDatesFolder(std::string &path, std::vector<Date> &subfolders, const Date &reference_date)
{
if (path.back() != '/')
path += "/";
for (const Date &cur : subfolders)
{
// bool flag = false;
if (cur.year < reference_date.year)
{
DeleteFolder(path + DateToStr(cur));
}
else if (cur.year == reference_date.year && cur.month < reference_date.month)
{
DeleteFolder(path + DateToStr(cur));
}
else if (cur.year == reference_date.year && cur.month == reference_date.month && cur.day < reference_date.day)
{
DeleteFolder(path + DateToStr(cur));
}
}
}

View File

@ -0,0 +1,57 @@
/**
*
**/
#ifndef DELETEEXPIREDFOLDERENGINE_H
#define DELETEEXPIREDFOLDERENGINE_H
#include "AppCommon.h"
#include "EngineBase.h"
#include "EngineFactory.h"
#include "MyYaml.h"
#include "myutils.h"
// 定义日期结构体
struct Date
{
int year;
int month;
int day;
};
class DeleteExpiredFolderEngine : public ai_matrix::EngineBase
{
public:
DeleteExpiredFolderEngine();
~DeleteExpiredFolderEngine();
APP_ERROR Init() override;
APP_ERROR DeInit() override;
APP_ERROR Process() override;
private:
// 获取某月有多少天
int DaysInMonth(int year, int month);
// 解析字符串为日期结构体
Date StrToDate(const std::string &date_str);
// 减去指定天数
void SubtractDays(Date &date, int n_days);
// 格式化日期结构体为字符串
std::string DateToStr(const Date &date);
// 接收一个日期字符串和一个整数n返回n天前的日期字符串
std::string getDateBeforeNDays(const std::string &input_date, int n_days);
void GetSubfolderNames(std::string &directory, std::vector<Date> &folder_names);
void DeleteFolder(const std::string directory);
// 删除向量中小于指定日期的所有元素
void DeleteEarlierDatesFolder(std::string &path, std::vector<Date> &subfolders, const Date &reference_date);
void DeletePreviousFolder(std::string path, const std::string &date, int n_days);
private:
std::string strResultPath_;
int iDaysNumber_;
};
ENGINE_REGIST(DeleteExpiredFolderEngine)
#endif

View File

@ -28,6 +28,9 @@ APP_ERROR FilterTrainStepOneEngine::Init()
strResultPath_ = MyYaml::GetIns()->GetPathValue("gc_result_path");
iChkStopPX_ = MyYaml::GetIns()->GetIntValue("gc_chkstop_px");
iChkStopCount_ = MyYaml::GetIns()->GetIntValue("gc_chkstop_count");
iPartitionFrameNum_ = MyYaml::GetIns()->GetIntValue("partition_frame_span");
iPlitFrameSpanPX_ = MyYaml::GetIns()->GetIntValue("gc_split_frame_span_px");
iPushDirection_ = MyYaml::GetIns()->GetIntValue("gc_push_direction");
//获取主摄像头信息
mainCfg_ = MyYaml::GetIns()->GetDataSourceConfigById(0);
@ -35,6 +38,8 @@ APP_ERROR FilterTrainStepOneEngine::Init()
std::map<int, ai_matrix::DataSourceConfig> mapUseDataSouceCfg = MyYaml::GetIns()->GetUseDataSourceConfig();
for (auto iter = mapUseDataSouceCfg.begin(); iter != mapUseDataSouceCfg.end(); iter++)
{
this->rightFirst_ = iter->second.iRightFirst;
this->leftFirst_ = iter->second.iLeftFirst;
if (iter->second.strTarget.find("NUM") != std::string::npos)
{
LogDebug << "DataSource:" << iter->first << " deal NUM";
@ -55,8 +60,8 @@ APP_ERROR FilterTrainStepOneEngine::Init()
mapTargetStr_.insert(std::make_pair(NUM, "NUM"));
mapTargetStr_.insert(std::make_pair(PRO, "PRO"));
mapTargetStr_.insert(std::make_pair(HEAD, "HEAD"));
mapTargetStr_.insert(std::make_pair(SPACE, "SPACE"));
mapTargetStr_.insert(std::make_pair(TRAINSPACE, "SPACE"));
mapTargetStr_.insert(std::make_pair(SPACE, "SPACE"));//SPACE
mapTargetStr_.insert(std::make_pair(TRAINSPACE, "SPACE"));//SPACE
InitParam();
LogInfo << "FilterTrainStepOneEngine Init ok";
@ -166,11 +171,12 @@ void FilterTrainStepOneEngine::AddBackInfo(std::shared_ptr<ProcessData> pProcess
}
strAllClassType += mapTargetStr_[pPostData->vecPostSubData[i].iTargetType];
}
LogDebug << "frameId:" << pProcessData->iFrameId << " addbackinfo strAllClassType:" << strAllClassType;
if (strAllClassType.empty())
{
return;
}
LogDebug << "frameId:" << pProcessData->iFrameId << " addbackinfo strAllClassType:" << strAllClassType;
TrainBackInfo trainBackInfo;
trainBackInfo.processData = pProcessData;
@ -184,8 +190,35 @@ void FilterTrainStepOneEngine::AddBackInfo(std::shared_ptr<ProcessData> pProcess
else
{
TrainBackInfo trainBackInfoTop = stackBackInfo_.top();
// 2024年3月27日修改前
// if (trainBackInfoTop.strAllClassType != trainBackInfo.strAllClassType)
// {
// stackBackInfo_.push(trainBackInfo);
// LogDebug << "frameId:" << pProcessData->iFrameId << " push strAllClassType:" << strAllClassType
// << " stacksize:" << stackBackInfo_.size();
// }
if (trainBackInfoTop.strAllClassType != trainBackInfo.strAllClassType)
{
if (iDirection_ == DIRECTION_RIGHT
&& trainBackInfo.strAllClassType == "SPACE"
&& (trainBackInfoTop.strAllClassType == "PROSPACE" || trainBackInfoTop.strAllClassType == "SPACEPRO"))
{
return;
}
if (iDirection_ == DIRECTION_RIGHT
&& trainBackInfo.strAllClassType == "SPACE"
&& (trainBackInfoTop.strAllClassType == "NUMSPACE" || trainBackInfoTop.strAllClassType == "SPACENUM"))
{
return;
}
if (iDirection_ == DIRECTION_LEFT
&& trainBackInfo.strAllClassType == "SPACE"
&& (trainBackInfoTop.strAllClassType == "NUMSPACE" || trainBackInfoTop.strAllClassType == "SPACENUM"))
{
return;
}
stackBackInfo_.push(trainBackInfo);
LogDebug << "frameId:" << pProcessData->iFrameId << " push strAllClassType:" << strAllClassType
<< " stacksize:" << stackBackInfo_.size();
@ -200,16 +233,19 @@ bool FilterTrainStepOneEngine::IsEndDealBackInfo(std::shared_ptr<ProcessData> pP
return true;
}
bool bPopFlag = false;
std::shared_ptr<PostData> pPostData = std::static_pointer_cast<PostData>(pProcessData->pVoidData);
if (pPostData->vecPostSubData.size() == 0) return false;
/*
Engine不处理
poppop后
poppop后
()
*/
std::shared_ptr<PostData> pPostData = std::static_pointer_cast<PostData>(pProcessData->pVoidData);
std::sort(pPostData->vecPostSubData.begin(), pPostData->vecPostSubData.end(), CompareX);
std::string strAllClassType;
@ -226,25 +262,7 @@ bool FilterTrainStepOneEngine::IsEndDealBackInfo(std::shared_ptr<ProcessData> pP
return false;
}
TrainBackInfo trainBackInfoTop = stackBackInfo_.top();
bool bPopFlag = false;
if (trainBackInfoTop.strAllClassType != strAllClassType)
{
bPopFlag = true;
}
LogDebug << "frameId:" << pProcessData->iFrameId << " stacksize:" << stackBackInfo_.size()
<< " topClassType:" << trainBackInfoTop.strAllClassType << " dealbackinfo strAllClassType:" << strAllClassType
<< " bPopFlag:" << bPopFlag;
if(bPopFlag)
{
stackBackInfo_.pop();
bPopFlag = false;
}
if (stackBackInfo_.size() == 1)
{
if (!bPopFlag)
{
TrainBackInfo trainBackInfoLast = stackBackInfo_.top();
std::shared_ptr<PostData> pPostDataBack = std::static_pointer_cast<PostData>(trainBackInfoLast.processData->pVoidData);
@ -252,22 +270,65 @@ bool FilterTrainStepOneEngine::IsEndDealBackInfo(std::shared_ptr<ProcessData> pP
for (size_t i = 0; i < pPostDataBack->vecPostSubData.size(); i++)
{
bool bFlag = (pPostDataBack->vecPostSubData[i].step1Location.fLTX <= pPostData->vecPostSubData[i].step1Location.fLTX);
LogDebug << "frameId:" << pProcessData->iFrameId << " stackFrameid:" << pPostDataBack->iFrameId << " bFlag:" << bFlag;
if ((iDirection_ == DIRECTION_LEFT && !bFlag) ||
(iDirection_ == DIRECTION_RIGHT && bFlag))
int bFlag = -1;
for (size_t j = 0; j < pPostData->vecPostSubData.size(); j++)
{
if (pPostDataBack->vecPostSubData[i].iBigClassId == pPostData->vecPostSubData[j].iBigClassId)
{
if (pPostData->vecPostSubData[j].step1Location.fLTX < 1 || pPostDataBack->vecPostSubData[i].step1Location.fLTX < 1)
{
LogDebug << "大框X坐标小于1判定为异常大框。过滤";
break;
}
bFlag = (pPostDataBack->vecPostSubData[i].step1Location.fLTX <= pPostData->vecPostSubData[j].step1Location.fLTX) ? 1 : 0;
LogDebug << "帧:" << pProcessData->iFrameId << " 倒车前帧:" << pPostDataBack->iFrameId << " 恢复到原位:" << bFlag
<< " 当前框位置:" << pPostData->vecPostSubData[i].step1Location.fLTX
<< " 倒车前位置:" << pPostDataBack->vecPostSubData[i].step1Location.fLTX << "方向:" << iDirection_;
}
}
if ((iDirection_ == DIRECTION_LEFT && bFlag == 0) ||
(iDirection_ == DIRECTION_RIGHT && bFlag == 1))
{
bPopFlag = true;
break;
}
}
if (bPopFlag)
{
LogDebug << "frameId:" << pProcessData->iFrameId << " last one bPopFlag:" << bPopFlag;
LogDebug << "frameId:" << pProcessData->iFrameId << " 恢复倒车前的位置:" << bPopFlag;
stackBackInfo_.pop();
}
}
else
{
TrainBackInfo trainBackInfoTop_bak = stackBackInfo_.top();
stackBackInfo_.pop();
TrainBackInfo trainBackInfoTop = stackBackInfo_.top();
if (trainBackInfoTop.strAllClassType != strAllClassType)
{
stackBackInfo_.push(trainBackInfoTop_bak);
LogDebug << "帧:" << pProcessData->iFrameId << " 倒车信息:" << stackBackInfo_.size()
<< " 顶部倒车信息:" << trainBackInfoTop.strAllClassType << " 本次识别信息:" << strAllClassType;
}
else
{
// bPopFlag = true;
LogDebug << "帧:" << pProcessData->iFrameId << " 倒车信息:" << stackBackInfo_.size()
<< " 顶部倒车信息:" << trainBackInfoTop.strAllClassType << " 本次识别信息:" << strAllClassType
<< " 删除倒车信息:" << trainBackInfoTop_bak.strAllClassType;
}
// if(bPopFlag)
// {
// stackBackInfo_.pop();
// bPopFlag = false;
// }
}
return stackBackInfo_.empty() ? true : false;
}
@ -281,25 +342,23 @@ int FilterTrainStepOneEngine::GetTrainStatus(std::shared_ptr<ProcessData> pProce
{
if (iDirection_ == DIRECTION_UNKNOWN)
{
LogDebug << "sourceid:" << pProcessData->iDataSource << " frameId:" << pProcessData->iFrameId << " direction unknown trainStatus=1";
LogDebug << " frameId:" << pProcessData->iFrameId << " 未判断出行车方向,暂定认为火车正常行驶中";
return TRAINSTATUS_RUN;
}
std::shared_ptr<PostData> pPostData = std::static_pointer_cast<PostData>(pProcessData->pVoidData);
pPostData->iFrameId = pProcessData->iFrameId;
quePostData_.push(*pPostData.get());
// 1. 无框时,返回之前的列车状态
if (pPostData->vecPostSubData.size() == 0)
{
LogDebug << "sourceid:" << pProcessData->iDataSource << " frameId:" << pProcessData->iFrameId << " step1 no result trainStatus="<< iTrainStatus_;
quePostData_.pop();
return iTrainStatus_;
}
quePostData_.push(*pPostData.get());
if (quePostData_.size() < 3)
{
LogDebug << "sourceid:" << pProcessData->iDataSource << " frameId:" << pProcessData->iFrameId << " size < 3 trainStatus=1";
return TRAINSTATUS_RUN;
}
@ -310,7 +369,7 @@ int FilterTrainStepOneEngine::GetTrainStatus(std::shared_ptr<ProcessData> pProce
{
quePostData_.pop();
}
LogDebug << "queue front frameId:" << postDataFront.iFrameId << " queuesize:" << quePostData_.size() << " iSizeTemp:" << iSizeTemp;
LogDebug << "frameId:" << pProcessData->iFrameId << " 判断运动状态队列 第一帧:" << postDataFront.iFrameId << " 队列size:" << quePostData_.size() << " iSizeTemp:" << iSizeTemp;
bool bSameFlag = false;
int iDiffValue = iChkStopPX_;
@ -326,6 +385,7 @@ int FilterTrainStepOneEngine::GetTrainStatus(std::shared_ptr<ProcessData> pProce
*/
if (postSubDataFront.iTargetType != postSubDataBack.iTargetType)
{
LogDebug << "判断前后帧识别的是否一致 上一个:" << postSubDataFront.iTargetType << " 当前:" << postSubDataBack.iTargetType;
continue;
}
@ -340,7 +400,7 @@ int FilterTrainStepOneEngine::GetTrainStatus(std::shared_ptr<ProcessData> pProce
//位置比较大于10个像素则表示有移动。再判断时正向移动还是倒车
LogDebug << "frameId:" << pProcessData->iFrameId << " " << iCenterBack << "-" << iCenterFront
<< "=" << abs(iCenterBack - iCenterFront) << " iDiffValue:" << iDiffValue;
<< "=" << abs(iCenterBack - iCenterFront) << " 预期判定移动的差值为iDiffValue:" << iDiffValue;
if (abs(iCenterBack - iCenterFront) > iDiffValue)
{
iNotChgCount_ = 0;
@ -352,12 +412,17 @@ int FilterTrainStepOneEngine::GetTrainStatus(std::shared_ptr<ProcessData> pProce
if ((iCenterBack > iCenterFront && iDirection_ == DIRECTION_LEFT) ||
(iCenterBack < iCenterFront && iDirection_ == DIRECTION_RIGHT))
{
LogDebug << "frameId:" << pProcessData->iFrameId << " back1";
if (this->iPartitionFrameNum_ < (pProcessData->iFrameId - postDataFront.iFrameId)
&& this->iPlitFrameSpanPX_ < abs(iCenterBack - iCenterFront))
{
return TRAINSTATUS_RUN;
}
LogDebug << "frameId:" << pProcessData->iFrameId << " 检测到火车倒车";
return TRAINSTATUS_BACK;
}
else
{
LogDebug << "frameId:" << pProcessData->iFrameId << " run";
LogDebug << "frameId:" << pProcessData->iFrameId << " 正常行驶";
return TRAINSTATUS_RUN;
}
}
@ -369,15 +434,16 @@ int FilterTrainStepOneEngine::GetTrainStatus(std::shared_ptr<ProcessData> pProce
else
{
iNotChgCount_++;
LogDebug << " frameId:" << pProcessData->iFrameId << " no chg iNotChgCount:" << iNotChgCount_;
LogDebug << " frameId:" << pProcessData->iFrameId
<< " 大框移动范围小 判断停车计数:" << iNotChgCount_ << "/" << iChkStopCount_;
if (iNotChgCount_ > iChkStopCount_)
{
LogDebug << "frameId:" << pProcessData->iFrameId << " stop";
LogDebug << "frameId:" << pProcessData->iFrameId << " 检测到火车停车";
return TRAINSTATUS_STOP;
}
else
{
LogDebug << "frameId:" << pProcessData->iFrameId << " iTrainStatus_:" << iTrainStatus_;
// LogDebug << "frameId:" << pProcessData->iFrameId << " iTrainStatus_:" << iTrainStatus_;
return iTrainStatus_;
}
}
@ -443,8 +509,8 @@ int FilterTrainStepOneEngine::GetTrainStatus(std::shared_ptr<ProcessData> pProce
}
}
LogDebug << "frameId:" << pProcessData->iFrameId << " back2";
return TRAINSTATUS_BACK;
// LogDebug << "frameId:" << pProcessData->iFrameId << " back2";
return iTrainStatus_;
}
}
LogDebug << "frameId:" << pProcessData->iFrameId << " iNotChgCount_:" << iNotChgCount_ << " run run";
@ -723,7 +789,8 @@ void FilterTrainStepOneEngine::CalculateDirection(std::shared_ptr<ProcessData> p
}
void FilterTrainStepOneEngine::sendComeTrain(const std::string strTrainDate, const std::string strTrainName, const int iDirection) {
std::string message = "{\"cometime\":\"" + strTrainDate + " " + strTrainName + "\",\"type\":\"1\",\"direction\":\"" + to_string(iDirection) + "\"}";
std::string message = "{\"cometime\":\"" + strTrainDate + " " + strTrainName + "\",\"type\":\"1\",\"direction\":" + to_string(iDirection == iPushDirection_ ? 1:-1) + "}";
LogWarn << message;
outputQueMap_[engineName_ + "_" + std::to_string(engineId_) + "_1"]->push(std::static_pointer_cast<void>(std::make_shared<std::string>(message)));
}
@ -830,31 +897,31 @@ void FilterTrainStepOneEngine::DealProcessDataPre(std::shared_ptr<ProcessData> p
{
if (iterHeadContinueCnt->second < 2 && it->iTargetType == HEAD)
{
LogError << "sourceid:" << iterProcessData->second->iDataSource << " frameId:" << iterProcessData->second->iFrameId << " Head wrong";
LogDebug << " frameId:" << iterProcessData->second->iFrameId << " Head 框因非连续识别而过滤";
it = pPostDataPre->vecPostSubData.erase(it);
continue;
}
if (iterProContinueCnt->second < 2 && it->iTargetType == PRO)
{
LogError << "sourceid:" << iterProcessData->second->iDataSource << " frameId:" << iterProcessData->second->iFrameId << " PRO wrong";
LogDebug << " frameId:" << iterProcessData->second->iFrameId << " PRO 框因非连续识别而过滤";
it = pPostDataPre->vecPostSubData.erase(it);
continue;
}
if (iterNumContinueCnt->second < 2 && it->iTargetType == NUM)
{
LogError << "sourceid:" << iterProcessData->second->iDataSource << " frameId:" << iterProcessData->second->iFrameId << " NUM wrong";
LogDebug << " frameId:" << iterProcessData->second->iFrameId << " NUM 框因非连续识别而过滤";
it = pPostDataPre->vecPostSubData.erase(it);
continue;
}
if (iterSpaceContinueCnt->second < 2 && it->iTargetType == SPACE)
{
LogError << "sourceid:" << iterProcessData->second->iDataSource << " frameId:" << iterProcessData->second->iFrameId << " SPACE wrong";
LogDebug << " frameId:" << iterProcessData->second->iFrameId << " SPACE 框因非连续识别而过滤";
it = pPostDataPre->vecPostSubData.erase(it);
continue;
}
if (iterTranSpaceContinueCnt->second < 2 && it->iTargetType == TRAINSPACE)
{
LogError << "sourceid:" << iterProcessData->second->iDataSource << " frameId:" << iterProcessData->second->iFrameId << " TRAINSPACE wrong";
LogDebug << " frameId:" << iterProcessData->second->iFrameId << " TRAINSPACE 框因非连续识别而过滤";
it = pPostDataPre->vecPostSubData.erase(it);
continue;
}
@ -871,6 +938,7 @@ void FilterTrainStepOneEngine::DealProcessDataPre(std::shared_ptr<ProcessData> p
{
//CalculateDirection(iterProcessData->second);
CalculateDirectionNew(iterProcessData->second);
if (iDirection_ != DIRECTION_UNKNOWN) this->sendComeTrain(pProcessData->strTrainDate, pProcessData->strTrainName, iDirection_);
}
if (iDirection_ != DIRECTION_UNKNOWN)
@ -918,7 +986,7 @@ void FilterTrainStepOneEngine::DealProcessDataPre(std::shared_ptr<ProcessData> p
<< " 火车实时运行状态:" << iTrainStatus_ << "(0无车1运行2停车3倒车) iTrainStatusTemp:" << iTrainStatusTemp;
iterProcessData->second->iStatus = iTrainStatusTemp;
this->sendComeTrain(pProcessData->strTrainDate, pProcessData->strTrainName, iDirection_);
// this->sendComeTrain(pProcessData->strTrainDate, pProcessData->strTrainName, iDirection_);
//上一帧push端口0
PushData(strPort0_, iterProcessData->second);

View File

@ -58,6 +58,11 @@ private:
int iChkStopPX_;
int iChkStopCount_;
int iDirection_; //方向
int iPushDirection_; //需要识别的方向
int rightFirst_; // 向右行驶的在前大框类型
int leftFirst_; // 向左行驶的在前大框类型
int iPartitionFrameNum_; //满足跨车厢的帧间隔
int iPlitFrameSpanPX_; //相连帧 同种大框的跨度最大值
std::map<int, PostData> mapPostDataFrist_; //[key-数据源id, value-第一步识别信息]
std::map<int, std::map<int, std::vector<Step1Location>>> mapMapStep1Info_; //[key-数据源id, value-[key-识别目标, value-识别框集合]]

View File

@ -83,7 +83,10 @@ bool SaveCsvEngine::SaveMergerCsv(std::shared_ptr<Train> pTrain)
<< "inspection" << ','
<< "inspectionImg" << ','
<< "containerImg_1" << ','
<< "containerImg_2" << std::endl;
<< "containerImg_2" << ','
<< "startTime" << ','
<< "endTime"
<< std::endl;
}
std::string strTime = pTrain->strTrainName;
@ -147,7 +150,10 @@ bool SaveCsvEngine::SaveMergerCsv(std::shared_ptr<Train> pTrain)
<< pTrain->chkDate.strChkDate1DeadLine << ','
<< szChkDateImgPath << ','
<< szContainer1ImgPath << ','
<< szContainer2ImgPath << std::endl;
<< szContainer2ImgPath << ','
<< MyUtils::getins()->Stamp2Time(pTrain->i64StartTimeStamp, true) << ','
<< MyUtils::getins()->Stamp2Time(pTrain->i64EndTimeStamp, true)
<< std::endl;
outFile.close();
}

View File

@ -160,7 +160,7 @@ APP_ERROR SaveImgEngine::Process()
jvFrameInfo["rate"] = iRate;
jvFrameInfo["isEnd"] = pSaveImgData->bIsEnd;
MyUtils::getins()->WriteJsonInfo(jvFrameInfo, strTxtFilePath);
LogDebug << "engineId:" << engineId_ << " save success txt:" << strTxtFilePath;
// LogDebug << "engineId:" << engineId_ << " save success txt:" << strTxtFilePath;
}
}
return APP_ERR_OK;

View File

@ -119,10 +119,10 @@ void SaveStepOneResultEngine::DealCenterSpace(std::vector<PartionInfo> &vecParat
{
return;
}
LogDebug << "size:" << iVecSize << " frameId:" << pProcessData->iFrameId
<< " vecParationInfo[0].frameId:" << vecParationInfo.at(0).modelSpaceFrame
<< " vecParationInfo[size-1].frameId:" << vecParationInfo.at(iVecSize - 1).modelSpaceFrame
<< " isEnd:" << vecParationInfo.at(iVecSize - 1).bIsEnd;
LogDebug << "积累的车厢切分信息数:" << iVecSize << " :" << pProcessData->iFrameId
<< " 第一个车厢切分信息帧:" << vecParationInfo.at(0).modelSpaceFrame
<< " 最后一个车厢切分信息帧:" << vecParationInfo.at(iVecSize - 1).modelSpaceFrame
<< " 最后一个车厢切分信息是否为结束:" << vecParationInfo.at(iVecSize - 1).bIsEnd;
/*
(2023-02-28)
@ -134,9 +134,9 @@ void SaveStepOneResultEngine::DealCenterSpace(std::vector<PartionInfo> &vecParat
int iCenterXPre = vecParationInfo[i - 1].fLTX + (vecParationInfo[i - 1].fRBX - vecParationInfo[i - 1].fLTX) / 2;
int iCenterX = vecParationInfo[i].fLTX + (vecParationInfo[i].fRBX - vecParationInfo[i].fLTX) / 2;
bool bIntervalFlag = ((int)(vecParationInfo[i].modelSpaceFrame - vecParationInfo[i - 1].modelSpaceFrame)) > iSplitSpan_;
LogDebug << "frameidPre:" << vecParationInfo[i - 1].modelSpaceFrame << " iCenterXPre:" << iCenterXPre
<< " frameid:" << vecParationInfo[i].modelSpaceFrame << " iCenterX:" << iCenterX
<< " bIntervalFlag:" << bIntervalFlag << " i:" << i;
LogDebug << "上一帧ID:" << vecParationInfo[i - 1].modelSpaceFrame << " 上一帧间隔X轴中线:" << iCenterXPre
<< " 本帧ID:" << vecParationInfo[i].modelSpaceFrame << " 本帧间隔X轴中线:" << iCenterX
<< " 满足帧间隔:" << bIntervalFlag << " i:" << i;
if (iDirection_ == DIRECTION_LEFT && (iCenterXPre < iCenterX - iSplitSpanPX_) && bIntervalFlag)
{
vecSpacePos.push_back(i - 1);
@ -190,6 +190,8 @@ void SaveStepOneResultEngine::DealCenterSpace(std::vector<PartionInfo> &vecParat
pPartionInfo->startframe = dataSourceConfig_.iSkipInterval;
}
pPartionInfo->nStatus = ((pProcessData->iStatus == TRAINSTATUS_STOP) ? TRAIN_PAUSE : iDirection_);
LogWarn << "--------- 向Paration 发送数据 --------";
outputQueMap_[strPort0_]->push(std::static_pointer_cast<void>(pPartionInfo));
iPushSpaceFrameId_ = pPartionInfo->modelSpaceFrame;
@ -242,14 +244,18 @@ void SaveStepOneResultEngine::DealTrainSpaceInfo(std::shared_ptr<ProcessData> pP
{
iDirection_ = jvDirectionInfo["direction"].asInt();
}
else
{
LogWarn << "暂未检测出行车方向";
}
}
bool bIntervalFlag = ((int)(pProcessData->iFrameId - parationInfoLast_.modelSpaceFrame)) > iSplitSpan_;
int iCenterCur = jvStep1Space[0]["ltx"].asFloat() + (jvStep1Space[0]["rbx"].asFloat() - jvStep1Space[0]["ltx"].asFloat()) / 2;
int iCenterLast = parationInfoLast_.fLTX + (parationInfoLast_.fRBX - parationInfoLast_.fLTX) / 2;
LogDebug << "frameid:" << pProcessData->iFrameId << " centerCur:" << iCenterCur
<< " lastFrameid:" << parationInfoLast_.modelSpaceFrame << " centerLast:" << iCenterLast
<< " iDirection_:" << iDirection_ << " bIntervalFlag:" << bIntervalFlag << " bDealCenterFlag_:" << bDealCenterFlag_;
LogDebug << "当前帧:" << pProcessData->iFrameId << " 间隔框中心线:" << iCenterCur
<< " 上一帧:" << parationInfoLast_.modelSpaceFrame << " 间隔框中心线:" << iCenterLast
<< " 行车方向:" << iDirection_ << " 是否满足切分帧数:" << bIntervalFlag << " bDealCenterFlag_:" << bDealCenterFlag_;
if (iDirection_ == DIRECTION_UNKNOWN || iCenterLast == 0)
{
@ -418,7 +424,7 @@ void SaveStepOneResultEngine::SplitTrainByNumPro(std::shared_ptr<PartionInfo> &p
pPartionInfoNew->i64StartTimeStamp = i64TimeStampFirst_;
pPartionInfoNew->startframe = dataSourceConfig_.iSkipInterval;
}
pPartionInfoNew->nStatus = ((pProcessData->iStatus == TRAINSTATUS_STOP) ? TRAIN_PAUSE : iDirection_);
// pPartionInfoNew->nStatus = ((pProcessData->iStatus == TRAINSTATUS_STOP) ? TRAIN_PAUSE : iDirection_);
//构造一个间隔信息写入到切分帧中
char szCameraNo[5] = {0};
@ -437,6 +443,7 @@ void SaveStepOneResultEngine::SplitTrainByNumPro(std::shared_ptr<PartionInfo> &p
jvFrameInfo["step1Space"].append(jvOneSpace);
MyUtils::getins()->WriteJsonInfo(jvFrameInfo, strFilePath);
LogWarn << "--------- 向Paration 发送数据 --------";
outputQueMap_[strPort0_]->push(std::static_pointer_cast<void>(pPartionInfoNew));
iPushSpaceFrameId_ = pPartionInfoNew->modelSpaceFrame;
@ -681,6 +688,7 @@ APP_ERROR SaveStepOneResultEngine::Process()
//最后一节和倒数第二节之间的间隔未能识别时,此时也需要通过车号属性切分下。
SplitTrainByNumPro(pPartionInfo, pProcessData);
LogWarn << "--------- 向Paration 发送数据 --------";
outputQueMap_[strPort0_]->push(std::static_pointer_cast<void>(pPartionInfo));
iPushSpaceFrameId_ = pPartionInfo->modelSpaceFrame;

View File

@ -212,6 +212,11 @@ std::string SelectBestEngine::GetBest(std::vector<TransInfo> &vecAllTransInfo, T
{
vecAllTransInfo = vecTransInfoTemp;
}
else
{
// 此处因车厢太脏。识别效果很差难以与RFID识别结果融合所以增加eles
return strValue;
}
//获取最优长度
int iBestLen = GetBestLength(vecAllTransInfo, iMaxLen);

View File

@ -149,7 +149,6 @@ APP_ERROR TrainParationMgr::Process()
int nSize = lstPartInfo.size();
int nPartionIndex = nSize - 1;
int nPrePartionIndex = nPartionIndex;
//当然车厢通过的数量
if (nSize == 0) {
@ -167,13 +166,12 @@ APP_ERROR TrainParationMgr::Process()
//lstPartInfo.push_back(stTempInfo);
nPartionIndex++;
}
{
lstPartInfo[nPartionIndex].i64EndTimeStamp = pPartionInfo->i64EndTimeStamp;
lstPartInfo[nPartionIndex].endframe = pPartionInfo->modelSpaceFrame;
// 根据开始帧时间戳和结束帧时间错 计算当节车厢的行车速度
// LogInfo << "TrainAnaEngine checkPartion bPartion == true lstPartInfo[nPrePartionIndex].ftime" << abs(lstPartInfo[nPrePartionIndex].i64EndTimeStamp - lstPartInfo[nPrePartionIndex].i64StartTimeStamp);
// 根据时间戳计算时间差
float nTimePassed = (abs(lstPartInfo[nPartionIndex].i64EndTimeStamp - lstPartInfo[nPartionIndex].i64StartTimeStamp)) * 1.0;
//防止停车导致速度过小
if(pPartionInfo->nStatus != TRAIN_PAUSE && nTimePassed <= 50000) {
@ -189,11 +187,7 @@ APP_ERROR TrainParationMgr::Process()
//
//nSamePartionIgnoreCount = (nTimePassed / (3 * 5000)) * nFrameRate;
// 结束帧为当前帧再往后 (除以2的原因中间为车钩车钩后的车体宽度为整个镜头的宽度除以2)
//lstPartInfo[nPrePartionIndex].endframe = pPartionInfo->modelSpaceFrame;
//LogInfo << "TrainAnaEngine checkPartion bPartion == true lstPartInfo[nPrePartionIndex].endframe" << lstPartInfo[nPrePartionIndex].endframe;
lstPartInfo[nPartionIndex].bmodelconfirmed = true;
}
/// write json info to file
@ -202,94 +196,19 @@ APP_ERROR TrainParationMgr::Process()
Json::Value jvPartionInfo;
//JSON保存路径
std::string strFilePath;
bool brightcome = false;
int nrightoffset = 0;
if (pPartionInfo->nStatus == 1) {
brightcome = true;
// nrightoffset = -1;
}
//检测到车厢划分信息
{
// if (nPartionIndex == 0) {
// lstPartInfo[nPartionIndex].endframe = lstPartInfo[nPartionIndex].endframe - nrightoffset * (lstPartInfo[nPartionIndex].fLTX - METHOD_BASE_WIDTH) / 10;
// } else {
// lstPartInfo[nPartionIndex].endframe = lstPartInfo[nPartionIndex].endframe - nrightoffset * getCouplerOffsetPix(lstPartInfo[nPartionIndex].fspeed, lstPartInfo[nPartionIndex].endframe);
// }
//lstPartInfo[nPartionIndex].endframe = lstPartInfo[nPartionIndex].endframe + getOffsetFrame(lstPartInfo[nPartionIndex].fspeed, (TRAIN_IN_CAMERA_WIDTH / 2), nFrameRate);
strFilePath = strResultPath_ + pPartionInfo->strTrainDate + "/" + pPartionInfo->strTrainName + "/"
+ std::to_string(nPartionIndex + 1) + ".txt";
// 首部车钩的偏移位置 (单位帧)
int headpos = 0;
// 尾部车钩的偏移位置 (单位帧)
int tailpos = (0 - nTailPixOffset);
//if (nPartionIndex == 0)
{
headpos = METHOD_BASE_WIDTH / 2;
tailpos = tailpos + headpos;
}
// 是否位右侧来车
if (brightcome == true)
{
//brightcome = true;
// 右侧来车 首部车钩从画面最右侧开始
headpos = METHOD_BASE_WIDTH / 2;
// 右侧来车 尾部车钩从画面最右侧+车厢宽的像素值
tailpos = headpos + nTailPixOffset;
/*
if (nPartionIndex == 0)
{
headpos = METHOD_BASE_WIDTH / 2;
tailpos = tailpos - headpos;
}
*/
}
LogInfo << "TrainAnaEngine Process lstPartInfo[nPartionIndex].startframe" << lstPartInfo[nPartionIndex].startframe ;
LogInfo << "TrainAnaEngine Process lstPartInfo[nPartionIndex].endframe" << lstPartInfo[nPartionIndex].endframe;
//从当节车厢的开始帧到结束帧计算首部车钩和尾部车钩的偏移值
// for (int nplayframe = lstPartInfo[nPartionIndex].startframe; nplayframe <= lstPartInfo[nPartionIndex].endframe; nplayframe++)
// {
// Json::Value jvposInfo;
// // 当前车厢的第几几帧
// int noffsetindex = (nplayframe - lstPartInfo[nPartionIndex].startframe);
// // 根据车速计算车钩位置量(单位 像素)
// int noffsetpos = getCouplerOffsetPosition(lstPartInfo[nPartionIndex].fspeed, noffsetindex);
// // 初始化首部车钩偏移量(单位 像素)
// jvposInfo["headpos"] = -1;
// // 初始化尾部车钩偏移量(单位 像素)
// jvposInfo["tailpos"] = -1;
// if (brightcome == false) {
// // 左侧来车
// // 首部车钩和尾部车钩 每帧加 车钩偏移值
// jvposInfo["headpos"] = (headpos + noffsetpos);
// jvposInfo["tailpos"] = (tailpos + noffsetpos);
// } else {
// // 右侧来车
// // 首部车钩和尾部车钩 每帧减 车钩偏移值
// jvposInfo["headpos"] = (headpos - noffsetpos);
// jvposInfo["tailpos"] = (tailpos - noffsetpos);
// }
// //LogInfo << "TrainAnaEngine Process jvposInfo[headpos]" << jvposInfo["headpos"];
// // LogInfo << "TrainAnaEngine Process jvposInfo[tailpos]:" << jvposInfo["tailpos"];
// //LogInfo << "TrainAnaEngine Process jvPartionListInfo.append";
// jvPartionInfo[std::to_string(nplayframe)] = jvposInfo;
// }
PartionInfo stTempInfo;
// 开始记录新的一节车厢信息(从索引变成序号+1 ,新增一节车厢信息+1)
stTempInfo.nindex = nPartionIndex + 2;
// 上一节车厢的结束帧 - (偏移帧 = (镜头内的车体宽度/ (速度) -> 通过时间) * 帧/秒 ) 作为下一节车厢的开始帧
int ntempOffsetFrame = lstPartInfo[nPartionIndex].endframe;
//
//- (int)(((TRAIN_IN_CAMERA_WIDTH / 2) / lstPartInfo[nPartionIndex].fspeed) * nFrameRate);
//LogInfo << "TrainAnaEngine Process ntempOffsetFrame:" << ntempOffsetFrame;
stTempInfo.startframe = ntempOffsetFrame;
stTempInfo.i64StartTimeStamp = pPartionInfo->i64EndTimeStamp;
// 初始化下一节的结束帧
@ -328,7 +247,6 @@ APP_ERROR TrainParationMgr::Process()
pTrainRange->bIsEnd = jvPartionInfo["isEnd"].asBool();
pTrainRange->bmodelconfirmed = jvPartionInfo["modelconfirmed"].asBool();
iRet = outputQueMap_[strPort0_]->push(std::static_pointer_cast<void>(pTrainRange));
}
if (pPartionInfo->bIsEnd) {
lstPartInfo.clear();

View File

@ -101,7 +101,7 @@ void TransTrainEngine::InitParam()
*/
bool TransTrainEngine::AuthTransNum(int classId, const std::string &trainNum)
{
LogInfo << "classId:" << classId << " trainNum:" << trainNum;
// LogInfo << "classId:" << classId << " trainNum:" << trainNum;
switch (classId)
{
case TRAIN_HEAD: // 车头上的编号
@ -774,8 +774,8 @@ APP_ERROR TransTrainEngine::Process()
{
strTemp += vecClassNames_.at(it->second.at(j).iClassId);
}
LogDebug << "step2 char sourceid:" << pProcessData->iDataSource << " frameid:" << pProcessData->iFrameId
<< " bigclassId:" << postSubData.iBigClassId << " line:" << it->first << "," << strTemp;
// LogDebug << "step2 char sourceid:" << pProcessData->iDataSource << " frameid:" << pProcessData->iFrameId
// << " bigclassId:" << postSubData.iBigClassId << " line:" << it->first << "," << strTemp;
}
TransSubData transSubData;

View File

@ -1,146 +0,0 @@
#include "TestImgEngine.h"
#include <iostream>
#include <algorithm>
#include <string>
#include <stdio.h>
#include <stdarg.h>
#include <sys/time.h>
#include <string.h>
#include <vector>
#include <memory>
using namespace std;
using namespace ai_matrix;
TestImgEngine::TestImgEngine() {}
TestImgEngine::~TestImgEngine() {}
APP_ERROR TestImgEngine::Init()
{
strPort0_ = engineName_ + "_" + std::to_string(engineId_) + "_0";
dataSourceConfig_ = MyYaml::GetIns()->GetDataSourceConfigById(engineId_); //获取摄像机参数
width_ = IMAGE_WIDTH, height_ = IMAGE_HEIGHT;
LogInfo << "engineId_:" << engineId_ << " TestImgEngine Init ok";
return APP_ERR_OK;
}
APP_ERROR TestImgEngine::DeInit()
{
LogInfo << "engineId_:" << engineId_ << " TestImgEngine DeInit ok";
return APP_ERR_OK;
}
//测试jpeg解码时打开,并修改相应的yaml配置引擎间通信
#if 0
APP_ERROR TestImgEngine::Process()
{
int iRet = APP_ERR_OK;
uint64_t u64count_num = 0;
while (!isStop_)
{
// std::cout<<"Enter Read Image Thread "<<++u64count_num<<" Times!"<<std::endl;
// std::cout<<"Read Image Thread ID: "<<std::this_thread::get_id()<<std::endl;
//读取图像
std::string jpeg_img_file_name = MyYaml::GetIns()->GetStringValue("jpeg_image_file_name");
//从本地文件读取jpg图像并构造jpeg数据
void* pJPEGBuffer = nullptr;
FILE *jpeg_fp;
jpeg_fp = fopen(jpeg_img_file_name.c_str(), "r");
if (!jpeg_fp)
{
std::cerr<<"Can not open "<<jpeg_img_file_name.c_str()<<std::endl;
}
fseek(jpeg_fp, 0L, SEEK_END);
unsigned int pJPEGBuffer_Size = ftell(jpeg_fp);
// printf("the jpg image data len: %d\n", pJPEGBuffer_Size);
// std::cout<<"the jpg image data len: "<<pJPEGBuffer_Size<<std::endl;
fseek(jpeg_fp, 0L, SEEK_SET);
pJPEGBuffer = new uint8_t[pJPEGBuffer_Size];
fread((char*)pJPEGBuffer, 1, pJPEGBuffer_Size, jpeg_fp);
fclose(jpeg_fp);
std::shared_ptr<FrameData> pJPEGFrameData = std::make_shared<FrameData>();
//组织数据,压入下一引擎
pJPEGFrameData->iDataSource = engineId_;
pJPEGFrameData->iSize = pJPEGBuffer_Size;
pJPEGFrameData->pData.reset(pJPEGBuffer, [](void* data){if(data) {delete[] data; data = nullptr;}}); //智能指针管理内存
// pJPEGFrameData->pData.reset(pJPEGBuffer, Deleter); //智能指针管理内存
pJPEGFrameData->i64TimeStamp = MyUtils::getins()->GetCurrentTimeMillis();
#if 1
iRet = outputQueMap_[strPort0_]->push(std::static_pointer_cast<void>(pJPEGFrameData));
if (iRet != APP_ERR_OK){
LogError << "push the jpeg image data failed...";
std::cerr<<"push the jpeg image data failed..."<<std::endl;
}else{
// std::cout<<"push the jpeg image data success!"<<std::endl;
}
#endif
usleep(30 * 1000);
}
}
#else
//测试H264编码或者jpeg编码打开,并修改相应的yaml配置引擎间通信
APP_ERROR TestImgEngine::Process()
{
int iRet = APP_ERR_OK;
uint64_t u64count_num = 0;
while (!isStop_)
{
// std::cout<<"Enter Read Image Thread "<<++u64count_num<<" Times!"<<std::endl;
// std::cout<<"Read Image Thread ID: "<<std::this_thread::get_id()<<std::endl;
//读取图像
std::string yuv420m_img_file_name = MyYaml::GetIns()->GetStringValue("yuv420m_image_file_name");
//从本地文件读取yuv420m图像并构造yuv420m数据
void* pYUV420MBuffer = nullptr;
FILE *yuv420m_fp;
yuv420m_fp = fopen(yuv420m_img_file_name.c_str(), "rb");
if (!yuv420m_fp)
{
std::cerr<<"Can not open "<<yuv420m_img_file_name.c_str()<<std::endl;
}
fseek(yuv420m_fp, 0L, SEEK_END);
unsigned int pYUV420MBuffer_Size = ftell(yuv420m_fp);
// printf("test.yuv filesize = %d\n", pYUV420MBuffer_Size);
// std::cout<<"test.yuv filesize = "<<pYUV420MBuffer_Size<<std::endl;
fseek(yuv420m_fp, 0L, SEEK_SET);
pYUV420MBuffer = new uint8_t[pYUV420MBuffer_Size];
fread((char*)pYUV420MBuffer, 1, pYUV420MBuffer_Size, yuv420m_fp);
fclose(yuv420m_fp);
std::shared_ptr<FrameData> pYUV420MFrameData = std::make_shared<FrameData>();
//组织数据,压入下一引擎
pYUV420MFrameData->iDataSource = engineId_;
pYUV420MFrameData->iSize = pYUV420MBuffer_Size;
pYUV420MFrameData->pData.reset(pYUV420MBuffer, [](void* data){if(data) {delete[] data; data = nullptr;}}); //智能指针管理内存
// pYUV420MFrameData->pData.reset(pYUV420MBuffer, Deleter); //智能指针管理内存
pYUV420MFrameData->i64TimeStamp = MyUtils::getins()->GetCurrentTimeMillis();
#if 1
iRet = outputQueMap_[strPort0_]->push(std::static_pointer_cast<void>(pYUV420MFrameData));
if (iRet != APP_ERR_OK){
LogError << "push the yuv420m image data failed...";
std::cerr<<"push the yuv420m image data failed..."<<std::endl;
}else{
// std::cout<<"push the yuv420m image data success!"<<std::endl;
}
#endif
usleep(30 * 1000);
}
}
#endif

View File

@ -1,45 +0,0 @@
//读取图像引擎(用于测试)
#ifndef _TEST_IMG_ENGINE_H
#define _TEST_IMG_ENGINE_H
#include <iostream>
#include <chrono>
#include <cmath>
#include <utility>
#include <thread>
#include <chrono>
#include <functional>
#include <atomic>
#include <time.h>
#include <unistd.h>
#include <queue>
#include <mutex>
#include <semaphore.h>
#include "EngineBase.h"
#include "EngineFactory.h"
#include "MyYaml.h"
#include "myutils.h"
#include "AppCommon.h"
class TestImgEngine : public ai_matrix::EngineBase
{
public:
TestImgEngine();
~TestImgEngine();
APP_ERROR Init() override;
APP_ERROR DeInit() override;
APP_ERROR Process() override;
private:
ai_matrix::DataSourceConfig dataSourceConfig_;
std::string strPort0_;
unsigned int width_, height_;
};
ENGINE_REGIST(TestImgEngine)
#endif //END OF _TEST_IMG_ENGINE_H

View File

@ -1,109 +0,0 @@
#include "VideoEngine.h"
using namespace std;
using namespace cv;
using namespace ai_matrix;
VideoEngine::VideoEngine() {}
VideoEngine::~VideoEngine() {}
APP_ERROR VideoEngine::Init()
{
LogInfo << "engineId_:" << engineId_ << " VideoEngine Init start";
strPort0_ = engineName_ + "_" + std::to_string(engineId_) + "_0";
dataSourceConfig_ = MyYaml::GetIns()->GetDataSourceConfigById(engineId_); //获取摄像机参数
width_ = IMAGE_WIDTH, height_ = IMAGE_HEIGHT;
LogInfo << "engineId_:" << engineId_ << " VideoEngine Init ok";
return APP_ERR_OK;
}
APP_ERROR VideoEngine::DeInit()
{
LogInfo << "engineId_:" << engineId_ << " VideoEngine DeInit ok";
return APP_ERR_OK;
}
APP_ERROR VideoEngine::Process()
{
int iRet = APP_ERR_OK;
uint64_t u64count_num = 0;
// if (MyYaml::GetIns()->GetStringValue("gc_data_source") != "camera")
// {
// LogDebug << "engineId_:" << engineId_ << " gc_data_source no camera";
// return iRet;
// }
VideoCapture capture;
/*****************************************************************************************
Gstream解码
:1.nvv4l2decoder 2.omxh264dec
使nvv4l2decoder解码时enable-max-performance和enable-frame-type-reporting才可以使用
enable-max-performance=1
enable-frame-type-reporting=1 使
*****************************************************************************************/
//从摄像头RTSP拉流
const std::string videoStreamAddress = std::string("rtspsrc location=") + dataSourceConfig_.strUrl.c_str() + " latency=10 ! \
rtph264depay ! h264parse ! nvv4l2decoder enable-max-performance=1 enable-frame-type-reporting=1 ! nvvidconv ! video/x-raw, format=(string)BGRx ! videoconvert ! appsink";
// while(!capture.open(dataSourceConfig_.strUrl.c_str())){
while(!capture.open(videoStreamAddress)){
std::cerr<<"Opening video stream or file failed!!!" <<std::endl;
std::cout<<"Restart Opening video stream or file ..."<<std::endl;
sleep(1);
}
std::cout<<"Opening video stream or file Success"<<std::endl;
int frameW = capture.get(3);
int frameH = capture.get(4);
std::cout << dataSourceConfig_.strUrl.c_str() << ";"<< "frameW:" << frameW << " frameH:" << frameH << std::endl;
while (!isStop_)
{
std::cout << dataSourceConfig_.strUrl.c_str() << ";"<< "frameW:" << frameW << " frameH:" << frameH << std::endl;
// std::cout<<"Enter VideoEngine Thread "<<++u64count_num<<" Times!"<<std::endl;
// std::cout<<"VideoEngine Thread ID: "<<std::this_thread::get_id()<<std::endl;
//构造BGR数据
void* pBGRBuffer = nullptr;
unsigned int pBGRBuffer_Size = width_*height_*3;
pBGRBuffer = new uint8_t[pBGRBuffer_Size];
std::shared_ptr<FrameData> pBGRFrameData = std::make_shared<FrameData>();
cv::Mat frame(frameH, frameW, CV_8UC3, pBGRBuffer);
// clock_t start, end;
// start = clock();
if(!capture.read(frame)) {
std::cerr << "no frame" << std::endl;
waitKey();
}
// end = clock();
// printf("read 1 frame time is %.8f ms\n", (double)(end-start)/CLOCKS_PER_SEC*1000);
//压入OpenCV RTSP所拉的H264解码BRG后的数据
//组织数据
pBGRFrameData->iDataSource = engineId_;
pBGRFrameData->iSize = pBGRBuffer_Size;
pBGRFrameData->pData.reset(pBGRBuffer, [](void* data){if(data) {delete[] data; data = nullptr;}}); //智能指针管理内存
// pBGRFrameData->pData.reset(pBGRBuffer, Deleter); //智能指针管理内存
pBGRFrameData->i64TimeStamp = MyUtils::getins()->GetCurrentTimeMillis();
iRet = outputQueMap_[strPort0_]->push(std::static_pointer_cast<void>(pBGRFrameData));
if (iRet != APP_ERR_OK){
LogError << "push the bgr frame data failed...";
std::cerr<<"push the bgr frame data failed..."<<std::endl;
}else{
// std::cout<<"push the bgr frame data success!"<<std::endl;
}
// usleep(30*1000); //读取文件时模拟30帧
}
}

View File

@ -1,69 +0,0 @@
//OpenCV RTSP拉流引擎(包含视频解码)
#ifndef _VIDEO_ENGINE_H
#define _VIDEO_ENGINE_H
#include <iostream>
#include <chrono>
#include <cmath>
#include <utility>
#include <thread>
#include <chrono>
#include <functional>
#include <atomic>
#include <time.h>
#include <sys/time.h>
#include <unistd.h>
#include <queue>
#include <mutex>
#include <semaphore.h>
#include <algorithm>
#include <string>
#include <stdio.h>
#include <stdarg.h>
#include <string.h>
#include <vector>
#include <memory>
#include <opencv2/opencv.hpp>
#include <opencv2/core/core.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#ifdef __cplusplus
extern "C"
{
#endif
#include <libavcodec/avcodec.h>
#include <libavutil/samplefmt.h>
#include <libavformat/avformat.h>
#ifdef __cplusplus
};
#endif
#include "EngineBase.h"
#include "EngineFactory.h"
#include "MyYaml.h"
#include "myutils.h"
#include "AppCommon.h"
#define RTSP_PULL_CAMERA_VIDEO_STREAM
class VideoEngine : public ai_matrix::EngineBase
{
public:
VideoEngine();
~VideoEngine();
APP_ERROR Init() override;
APP_ERROR DeInit() override;
APP_ERROR Process() override;
private:
ai_matrix::DataSourceConfig dataSourceConfig_;
std::string strPort0_;
unsigned int width_, height_;
};
ENGINE_REGIST(VideoEngine)
#endif //_VIDEO_ENGINE_H

View File

@ -67,19 +67,19 @@ APP_ERROR VideoDecodeEngine::Process()
{
usleep(10*1000); //10ms
iNoCameraDataCnt++;
if (iNoCameraDataCnt >= 1000) //10秒内收不到认为相机断开
{
LogError << "engineId:" << engineId_ << " 超过10秒获取到摄像头数据疑似摄像头断开。计数" << iNoCameraDataCnt;
iNoCameraDataCnt = 0;
//camera异常时构造空的解码数据push确保一直有数据流转到后面Engine
std::shared_ptr<ProcessData> pProcessData = std::make_shared<ProcessData>();
pProcessData->iDataSource = engineId_;
pProcessData->i64TimeStamp = MyUtils::getins()->GetCurrentTimeMillis();
pProcessData->iSize = 0;
pProcessData->pData = nullptr;
iRet = outputQueMap_[strPort0_]->push(std::static_pointer_cast<void>(pProcessData));
}
// iNoCameraDataCnt++;
// if (iNoCameraDataCnt >= 1000) //10秒内收不到认为相机断开
// {
// LogError << "engineId:" << engineId_ << " 超过10秒获取到摄像头数据疑似摄像头断开。计数" << iNoCameraDataCnt;
// iNoCameraDataCnt = 0;
// //camera异常时构造空的解码数据push确保一直有数据流转到后面Engine
// std::shared_ptr<ProcessData> pProcessData = std::make_shared<ProcessData>();
// pProcessData->iDataSource = engineId_;
// pProcessData->i64TimeStamp = MyUtils::getins()->GetCurrentTimeMillis();
// pProcessData->iSize = 0;
// pProcessData->pData = nullptr;
// iRet = outputQueMap_[strPort0_]->push(std::static_pointer_cast<void>(pProcessData));
// }
continue;
}

View File

@ -111,13 +111,8 @@ APP_ERROR MoveEngine::ReadModelInfo()
model_width = jvModelInfo["model_width"].asInt();
model_height = jvModelInfo["model_height"].asInt();
//clear_num = jvModelInfo["clear"].isArray() ? jvModelInfo["clear"].size() : 0;
//class_num = jvModelInfo["class"].isArray() ? jvModelInfo["class"].size() : 0;
input_size = GET_INPUT_SIZE(model_width , model_height);
output_size = GET_OUTPUT_SIZE(model_width , model_height, clear_num , class_num);
// det_size = clear_num + class_num + 5;
// score_threshold = modelConfig_.fScoreThreshold;
// nms_threshold = modelConfig_.fNMSTreshold;
return APP_ERR_OK;
}
@ -146,8 +141,8 @@ void MoveEngine::InitParam()
}
void MoveEngine::sendComeTrain() {
// std::string message = "{\"cometime\":" + this->strTrainDate_ + " " + this->strTrainName_ + "\",\"type\":\"1\"}";
// outputQueMap_[engineName_ + "_" + std::to_string(engineId_) + "_1"]->push(std::static_pointer_cast<void>(std::make_shared<std::string>(message)));
std::string message = "{\"cometime\":\"" + this->strTrainDate_ + " " + this->strTrainName_ + "\",\"type\":\"1\"}";
outputQueMap_[engineName_ + "_" + std::to_string(engineId_) + "_1"]->push(std::static_pointer_cast<void>(std::make_shared<std::string>(message)));
}
void MoveEngine::sendEndTrain() {
@ -274,6 +269,7 @@ APP_ERROR MoveEngine::Process()
memset(fReturnVal, 0x00, sizeof(fReturnVal));
yolov8model.YoloV8InferenceModelGetType(img, fReturnVal, STEP0_OUTPUT_ARRAY * sizeof(float));
// exit(0);
float fScore = 0.0f;
for(int n = 0; n < 4; n++){
@ -282,7 +278,7 @@ APP_ERROR MoveEngine::Process()
nType = n;
}
}
LogDebug <<"模型得分 车头:"<< fReturnVal[0]<<" 无车:"<< fReturnVal[1]<<" 车尾:"<< fReturnVal[2]<<" 有车:"<< fReturnVal[3];
// LogDebug <<"模型得分 车头:"<< fReturnVal[0]<<" 无车:"<< fReturnVal[1]<<" 车尾:"<< fReturnVal[2]<<" 有车:"<< fReturnVal[3];
// LogInfo<<"来车当前状态:"<< (nType == 0 ? "有车头" : (nType == 1 ? "无车"));
switch (nType) {
case 0:
@ -315,7 +311,7 @@ APP_ERROR MoveEngine::Process()
if (bGetTrainExist == true)
{
iHasTrainNum_ = iHasTrainNum_ > 20 ? iHasTrainNum_ : iHasTrainNum_ + 1;
if (iHasTrainNum_ > 0) LogDebug << "当前有车, 计数:" << iHasTrainNum_;
// if (iHasTrainNum_ > 0) LogDebug << "当前有车, 计数:" << iHasTrainNum_;
}
else
{
@ -337,7 +333,7 @@ APP_ERROR MoveEngine::Process()
LogDebug << "iStepInter_: " << iStepInter_ << " queSize:" << queProcessData_.size() << " continue";
continue;
}
// if (iStepInter_ != 1) this->sendComeTrain();
if (iStepInter_ != 1) this->sendComeTrain();
iStepInter_ = 1;
}
@ -347,13 +343,13 @@ APP_ERROR MoveEngine::Process()
if (iStepInter_ == 1)
{
iStepInter_ = 2;
this->sendEndTrain();
}
while (!queProcessData_.empty())
{
LogDebug << "while iStepInter_: " << iStepInter_ << " queSize:" << queProcessData_.size();
queProcessData_.pop();
}
this->sendEndTrain();
}
//有车识别处理
if (iStepInter_ != 0)

View File

@ -14,7 +14,7 @@ APP_ERROR TrainStepOneEngine::Init()
bUseEngine_ = MyUtils::getins()->ChkIsHaveTarget("NUM");
if (!bUseEngine_)
{
LogWarn << "engineId_:" << engineId_ << " not use engine";
LogInfo << "engineId_:" << engineId_ << " not use engine";
return APP_ERR_OK;
}
@ -98,7 +98,7 @@ APP_ERROR TrainStepOneEngine::InitModel()
int nRet = yolov5model.YoloV5ClearityInferenceInit(&modelinfo, strModelName, modelConfig_.strOmPath);
if (nRet != 0)
{
LogInfo << "YoloV5ClassifyInferenceInit nRet:" << nRet;
LogError << "YoloV5ClassifyInferenceInit nRet:" << nRet;
return APP_ERR_COMM_READ_FAIL;
}
return APP_ERR_OK;
@ -147,7 +147,7 @@ APP_ERROR TrainStepOneEngine::DeInit()
{
if (!bUseEngine_)
{
LogWarn << "engineId_:" << engineId_ << " not use engine";
LogInfo << "engineId_:" << engineId_ << " not use engine";
return APP_ERR_OK;
}
@ -170,7 +170,7 @@ void TrainStepOneEngine::PushData(const std::string &strPort, const std::shared_
int iRet = outputQueMap_[strPort]->push(std::static_pointer_cast<void>(pProcessData));
if (iRet != 0)
{
LogDebug << "sourceid:" << pProcessData->iDataSource << " frameid:" << pProcessData->iFrameId << " push fail iRet:" << iRet;
LogError << " frameid:" << pProcessData->iFrameId << " push fail iRet:" << iRet;
if (iRet == 2)
{
usleep(10000); // 10ms
@ -194,7 +194,7 @@ void TrainStepOneEngine::FilterInvalidInfo(std::vector<stDetection> &vecRet, std
std::vector<stDetection> vecSpaceInfo;
for (auto it = vecRet.begin(); it != vecRet.end();)
{
LogDebug << "sourceid:" << pProcessData->iDataSource << " frameId:" << pProcessData->iFrameId
LogDebug << "frameId:" << pProcessData->iFrameId
<< " bigclassid:" << it->class_id << " ltx:" << it->bbox[0] << " lty:" << it->bbox[1]
<< " rbx:" << it->bbox[2] << " rby:" << it->bbox[3];
// 根据配置文件中 设置的识别范围,过滤掉无效数据
@ -203,49 +203,75 @@ void TrainStepOneEngine::FilterInvalidInfo(std::vector<stDetection> &vecRet, std
it->bbox[2] <= dataSourceCfg.fIdentifyAreasRBX &&
it->bbox[3] <= dataSourceCfg.fIdentifyAreasRBY))
{
LogWarn << "sourceid:" << pProcessData->iDataSource << " frameId:" << pProcessData->iFrameId
<< " bigclassid:" << it->class_id << " invalid areas";
LogDebug << "frameId:" << pProcessData->iFrameId
<< " bigclassid:" << it->class_id << " 超出识别区域-识别区域:("
<< dataSourceCfg.fIdentifyAreasLTX << "," << dataSourceCfg.fIdentifyAreasLTY << "),("
<< dataSourceCfg.fIdentifyAreasRBX << "," << dataSourceCfg.fIdentifyAreasRBY << ")";
it = vecRet.erase(it);
continue;
}
// 去除车头车尾的间隔信息
// 如果设置了不识别车头,则去掉车头标记的大框
if (!MyYaml::GetIns()->GetBoolValue("gc_train_heard_detect") && it->class_id == TRAIN_HEAD)
{
LogDebug << "frameId:" << pProcessData->iFrameId << " 过滤掉车头编号";
it = vecRet.erase(it);
continue;
}
// 去除车头时的非车头编号信息
if(pProcessData->nMonitorState == MONITOR_MODEL_TRAIN_HEAD )
{
LogWarn<<" pProcessData->nMonitorState:" << pProcessData->nMonitorState;
if(it->class_id != TRAIN_HEAD)
{
LogError << "sourceid:" << pProcessData->iDataSource << " frameId:" << pProcessData->iFrameId
<< " bigclassid:" << it->class_id << " pProcessData->nMonitorState:" << pProcessData->nMonitorState
<< " invalid";
LogDebug << " 帧号:" << pProcessData->iFrameId
<< " 大类:" << it->class_id << " 识别于车头位置,无效!";
it = vecRet.erase(it);
continue;
}
}
if (pProcessData->nMonitorState == MONITOR_MODEL_TRAIN_TAIL )
// 去除车尾的车头编号信息
if (pProcessData->nMonitorState != MONITOR_MODEL_TRAIN_HEAD)
{
LogWarn<<" pProcessData->nMonitorState:" << pProcessData->nMonitorState;
/*if(
(it->class_id <= U_TRAIN_SPACE)
&& (it->class_id >= C_TRAIN_SPACE)
&& (it->class_id != W_TRAIN_NUM)
)*/
if (it->class_id == TRAIN_HEAD)
{
LogError << "sourceid:" << pProcessData->iDataSource << " frameId:" << pProcessData->iFrameId
<< " bigclassid:" << it->class_id << " pProcessData->nMonitorState:" << pProcessData->nMonitorState
<< " invalid";
LogDebug << " 帧号:" << pProcessData->iFrameId
<< " 大类:" << it->class_id << " 识别于非车头位置,无效!";
it = vecRet.erase(it);
continue;
}
}
// 去除车尾的间隔信息
if (pProcessData->nMonitorState == MONITOR_MODEL_TRAIN_TAIL
&& ((it->class_id >= 9 && it->class_id <= 17 && it->class_id != 15) || it->class_id == 18))
{
LogDebug << " frameId:" << pProcessData->iFrameId
<< " bigclassid:" << it->class_id
<<" 识别于车尾部分,无效!";
it = vecRet.erase(it);
continue;
}
// 过滤掉识别于模型反馈无车状态下的所有大框信息
if (pProcessData->nMonitorState == MONITOR_MODEL_NO_TRAIN)
{
LogDebug << " frameId:" << pProcessData->iFrameId
<< " bigclassid:" << it->class_id
<<" 识别于模型反馈的无车状态下,无效!";
it = vecRet.erase(it);
continue;
}
// 按大框高度剔除远股道识别的信息
int iClassHeight = it->bbox[3] - it->bbox[1];
if (dataSourceCfg.mapClassMinH.find(it->class_id) != dataSourceCfg.mapClassMinH.end() &&
iClassHeight < dataSourceCfg.mapClassMinH[it->class_id])
{
LogError << "sourceid:" << pProcessData->iDataSource << " frameId:" << pProcessData->iFrameId
LogDebug << " frameId:" << pProcessData->iFrameId
<< " bigclassid:" << it->class_id << " iClassHeight:" << iClassHeight
<< " minH:" << dataSourceCfg.mapClassMinH[it->class_id] << " invalid hegiht";
<< " minH:" << dataSourceCfg.mapClassMinH[it->class_id] << " 过滤疑似远股道识别";
it = vecRet.erase(it);
continue;
}
@ -256,7 +282,7 @@ void TrainStepOneEngine::FilterInvalidInfo(std::vector<stDetection> &vecRet, std
{
if (it->class_id != 1 && it->class_id != 6)
{
LogDebug << "sourceid:" << pProcessData->iDataSource << " frameId:" << pProcessData->iFrameId << " flat camera only deal 1 or 6";
LogDebug << " frameId:" << pProcessData->iFrameId << " flat camera only deal 1 or 6";
it = vecRet.erase(it);
continue;
}
@ -266,8 +292,8 @@ void TrainStepOneEngine::FilterInvalidInfo(std::vector<stDetection> &vecRet, std
if (((it->class_id >= 2 && it->class_id <= 6) || it->class_id == J_TRAIN_NUM || it->class_id == W_TRAIN_NUM) &&
(it->bbox[3] - it->bbox[1]) > (it->bbox[2] - it->bbox[0]))
{
LogWarn << "sourceid:" << pProcessData->iDataSource << " frameId:" << pProcessData->iFrameId
<< " bigclassid:" << it->class_id << " invalid data-- height > width ";
LogWarn << " frameId:" << pProcessData->iFrameId
<< " bigclassid:" << it->class_id << " 过滤 高度大于宽度的车号";
it = vecRet.erase(it);
continue;
}
@ -301,8 +327,8 @@ void TrainStepOneEngine::FilterInvalidInfo(std::vector<stDetection> &vecRet, std
{
if (it->bbox[3] < (pProcessData->iHeight * iSpaceMinRBXPer_ / 100))
{
LogWarn << "sourceid:" << pProcessData->iDataSource << " frameId:" << pProcessData->iFrameId
<< " bigclassid:" << it->class_id << " spaceinfo invalid fRBY:" << it->bbox[3];
LogWarn << " frameId:" << pProcessData->iFrameId
<< " bigclassid:" << it->class_id << " 过滤间隔过于靠下的间隔信息 fRBY:" << it->bbox[3];
it = vecRet.erase(it);
continue;
}
@ -321,9 +347,9 @@ void TrainStepOneEngine::FilterInvalidInfo(std::vector<stDetection> &vecRet, std
if (iHeight0 < iCenterY && iHeight1 < iCenterY) //非平车
{
if (!((vecRet[0].class_id >= 9 && vecRet[0].class_id <= 17 && vecRet[0].class_id != 15) || vecRet[0].class_id == U_TRAIN_SPACE) &&
!((vecRet[1].class_id >= 9 && vecRet[10].class_id <= 17 && vecRet[1].class_id != 15) || vecRet[1].class_id == U_TRAIN_SPACE))
!((vecRet[1].class_id >= 9 && vecRet[1].class_id <= 17 && vecRet[1].class_id != 15) || vecRet[1].class_id == U_TRAIN_SPACE))
{
LogError << "sourceid:" << pProcessData->iDataSource << " frameId:" << pProcessData->iFrameId << " no space";
LogDebug << " frameId:" << pProcessData->iFrameId << " no space";
vecRet.clear();
}
}
@ -387,7 +413,7 @@ APP_ERROR TrainStepOneEngine::Process()
{
if (!bUseEngine_)
{
LogWarn << "engineId_:" << engineId_ << " not use engine";
LogInfo << "engineId_:" << engineId_ << " not use engine";
return APP_ERR_OK;
}
int iRet = APP_ERR_OK;
@ -417,10 +443,7 @@ APP_ERROR TrainStepOneEngine::Process()
//进行推理
std::vector<stDetection> res;
//auto start = std::chrono::system_clock::now(); //计时开始
yolov5model.YoloV5ClearityInferenceModel(img, res);
//auto end = std::chrono::system_clock::now();
//LogInfo << "nopr1 inference time: " << std::chrono::duration_cast<std::chrono::milliseconds>(end - start).count() << "ms";
//过滤无效信息
FilterInvalidInfo(res, pProcessData);
@ -461,10 +484,10 @@ APP_ERROR TrainStepOneEngine::Process()
SetTargetType(postSubData);
pPostData->vecPostSubData.emplace_back(postSubData);
LogDebug << "sourceid:" << pProcessData->iDataSource << " frameId:" << pProcessData->iFrameId
<< " --iClassId:" << singledata.iClassId << " iLine:" << singledata.iLine << " confidence=" << singledata.fScore
<< " lx=" << singledata.fLTX << " ly=" << singledata.fLTY << " rx=" << singledata.fRBX << " ry=" << singledata.fRBY
<< " clear:" << singledata.fClear;
// LogDebug << "数据源:" << pProcessData->iDataSource << " 帧:" << pProcessData->iFrameId
// << " --iClassId:" << singledata.iClassId << " iLine:" << singledata.iLine << " confidence=" << singledata.fScore
// << " lx=" << singledata.fLTX << " ly=" << singledata.fLTY << " rx=" << singledata.fRBX << " ry=" << singledata.fRBY
// << " clear:" << singledata.fClear;
}
}
}

View File

@ -193,7 +193,7 @@ APP_ERROR TrainStepTwoEngine::Process()
auto start = std::chrono::system_clock::now(); // 计时开始
yolov5model.YoloV5ClearityInferenceModel(step2_image, res);
auto end = std::chrono::system_clock::now();
LogInfo << "nopr2 inference time: " << std::chrono::duration_cast<std::chrono::milliseconds>(end - start).count() << "ms";
// LogInfo << "nopr2 inference time: " << std::chrono::duration_cast<std::chrono::milliseconds>(end - start).count() << "ms";
PostSubData postSubDataNew;
postSubDataNew.iTargetType = postsubdata.iTargetType;
@ -221,9 +221,9 @@ APP_ERROR TrainStepTwoEngine::Process()
postSubDataNew.vecSingleData.emplace_back(singledata);
LogDebug << "sourceid:" << pProcessData->iDataSource << " step2 after frameId:" << pProcessData->iFrameId
<< " --iClassId:" << singledata.iClassId << " iLine:" << singledata.iLine << " confidence=" << singledata.fScore
<< " lx=" << singledata.fLTX << " ly=" << singledata.fLTY << " rx=" << singledata.fRBX << " ry=" << singledata.fRBY;
// LogDebug << "sourceid:" << pProcessData->iDataSource << " step2 after frameId:" << pProcessData->iFrameId
// << " --iClassId:" << singledata.iClassId << " iLine:" << singledata.iLine << " confidence=" << singledata.fScore
// << " lx=" << singledata.fLTX << " ly=" << singledata.fLTY << " rx=" << singledata.fRBX << " ry=" << singledata.fRBY;
}
pPostData->vecPostSubData.emplace_back(postSubDataNew);
}

View File

@ -464,12 +464,12 @@ void yolov5_preprocess_kernel_img(
s2d.value[0] = scale;
s2d.value[1] = 0;
s2d.value[2] = 0; //左上顶点贴图
// s2d.value[2] = -scale * src_width * 0.5 + dst_width * 0.5; //中心贴图
// s2d.value[2] = 0; //左上顶点贴图
s2d.value[2] = -scale * src_width * 0.5 + dst_width * 0.5; //中心贴图
s2d.value[3] = 0;
s2d.value[4] = scale;
s2d.value[5] = 0; //左上顶点贴图
// s2d.value[5] = -scale * src_height * 0.5 + dst_height * 0.5; //中心贴图
// s2d.value[5] = 0; //左上顶点贴图
s2d.value[5] = -scale * src_height * 0.5 + dst_height * 0.5; //中心贴图
cv::Mat m2x3_s2d(2, 3, CV_32F, s2d.value);
cv::Mat m2x3_d2s(2, 3, CV_32F, d2s.value);