Compare commits

...

6 Commits

Author SHA1 Message Date
zhangwei ac73334039 Merge pull request 'Test更新一部分逻辑' (#2) from Test into main
Reviewed-on: #2
2024-05-22 09:29:56 +00:00
Mr.V 2e67e97508 优化倒车后的恢复判断。
增加上传接口的用户名密码配置。
2024-05-22 17:25:51 +08:00
Mr.V 665e6b62a7 1、更新车厢倒车恢复原位的判断逻辑
2、新增识别结果定期删除
3、arm于X86切换
4、对识别不符合规则的数据,不上传
5、来车检测状态反馈socket
2024-04-01 15:03:32 +08:00
Mr.V f2bcc2e472 更新部分日志,以及倒车处理的逻辑 2024-02-29 16:16:47 +08:00
Mr.V f330b88229 灰度图 中心贴图推理 2024-01-30 10:23:37 +08:00
Mr.V cabdbad267 忽略文件 2024-01-30 10:20:49 +08:00
33 changed files with 912 additions and 961 deletions

7
.gitignore vendored Normal file
View File

@ -0,0 +1,7 @@
/build/
/app/
*.tgz
*.tar
*.log
*.o
*.out

View File

@ -5,7 +5,7 @@ message("NVIDIA NX PLATFORM")
set(PROJECT_NAME train)
project(${PROJECT_NAME} VERSION 1.0)
add_definitions(-std=c++11)
add_definitions(-DAPI_EXPORTS)
@ -20,6 +20,8 @@ find_package(OpenCV REQUIRED)
# message(STATUS "${OpenCV_LIBS}")
# message(STATUS "${OpenCV_INCLUDE_DIRS}")
find_package(CUDA REQUIRED)
#
set(CMAKE_SYSTEM_NAME Linux)
set(CMAKE_C_COMPILER "gcc")
@ -39,15 +41,19 @@ set(SYS_USR_INCLUDE_DIR "/usr/include")
set(SYS_USR_LIB_DIR "/usr/lib")
set(SYS_USR_LOCAL_INCLUDE_DIR "/usr/local/include")
set(SYS_USR_LOCAL_LIB_DIR "/usr/local/lib")
# -- X86使 --
set(AARCH64_LINUX_INCLUDE_DIR "/usr/include/x86_64-linux-gnu")
set(AARCH64_LINUX_LIB_DIR "/usr/lib/x86_64-linux-gnu")
# -- ARM使 --
#set(AARCH64_LINUX_INCLUDE_DIR "/usr/include/aarch64-linux-gnu")
#set(AARCH64_LINUX_LIB_DIR "/usr/lib/aarch64-linux-gnu")
#opencv3.2.0/usr/lib/aarch64-linux-gnu /usr/include/opencv2
#opencv4.5.5/usr/local/lib /usr/local/include/opencv4
#使opencv4.5.5
set(OPENCV_INCLUDE_DIR ${SYS_USR_LOCAL_INCLUDE_DIR}/opencv4)
set(OPENCV_LIB_DIR ${SYS_USR_LOCAL_LIB_DIR})
set(OPENCV_INCLUDE_DIR ${SYS_USR_LOCAL_INCLUDE_DIR}/opencv4)
set(OPENCV_LIB_DIR ${SYS_USR_LOCAL_LIB_DIR})
set(CUDA_DIR "/usr/local/cuda-11.7")
set(CUDA_INCLUDE_DIR ${CUDA_DIR}/include)
@ -60,80 +66,80 @@ set(DRM_INCLUDE_DIR ${SYS_USR_INCLUDE_DIR}/libdrm) #DRM的头文件在/usr/incl
set(TEGRA_LIB_DIR ${AARCH64_LINUX_LIB_DIR}/tegra) #tegra/usr/lib/aarch64-linux-gnu/tegra
set(PCL_INCLUDE ${SYS_USR_LOCAL_INCLUDE_DIR}/pcl-1.7) #pcl
#set(PCL_INCLUDE ${SYS_USR_LOCAL_INCLUDE_DIR}/pcl-1.7) #pcl
# nvidia ascend common include
include_directories(
#ai_matrix include
${PROJECT_SOURCE_DIR}/ai_matrix
${PROJECT_SOURCE_DIR}/ai_matrix/framework
${PROJECT_SOURCE_DIR}/ai_matrix/myftp
${PROJECT_SOURCE_DIR}/ai_matrix/myhttp
${PROJECT_SOURCE_DIR}/ai_matrix/myJson
${PROJECT_SOURCE_DIR}/ai_matrix/myJson/json
${PROJECT_SOURCE_DIR}/ai_matrix/mylog
${PROJECT_SOURCE_DIR}/ai_matrix/pcl
${PROJECT_SOURCE_DIR}/ai_matrix/myqueue
${PROJECT_SOURCE_DIR}/ai_matrix/myshell
${PROJECT_SOURCE_DIR}/ai_matrix/myutils
#ai_matrix include
${PROJECT_SOURCE_DIR}/ai_matrix
${PROJECT_SOURCE_DIR}/ai_matrix/framework
${PROJECT_SOURCE_DIR}/ai_matrix/myftp
${PROJECT_SOURCE_DIR}/ai_matrix/myhttp
${PROJECT_SOURCE_DIR}/ai_matrix/myJson
${PROJECT_SOURCE_DIR}/ai_matrix/myJson/json
${PROJECT_SOURCE_DIR}/ai_matrix/mylog
${PROJECT_SOURCE_DIR}/ai_matrix/pcl
${PROJECT_SOURCE_DIR}/ai_matrix/myqueue
${PROJECT_SOURCE_DIR}/ai_matrix/myshell
${PROJECT_SOURCE_DIR}/ai_matrix/myutils
#nvidia ascend common cann include
${PROJECT_SOURCE_DIR}/nvidia_ascend_base/Base/BlockingQueue
${PROJECT_SOURCE_DIR}/nvidia_ascend_base/Base/CBase64
${PROJECT_SOURCE_DIR}/nvidia_ascend_base/Base/CommandParser
${PROJECT_SOURCE_DIR}/nvidia_ascend_base/Base/CommonDataType
${PROJECT_SOURCE_DIR}/nvidia_ascend_base/Base/ConfigParser
${PROJECT_SOURCE_DIR}/nvidia_ascend_base/Base/ErrorCode
${PROJECT_SOURCE_DIR}/nvidia_ascend_base/Base/FileManager
${PROJECT_SOURCE_DIR}/nvidia_ascend_base/Base/Log
${PROJECT_SOURCE_DIR}/nvidia_ascend_base/Base/
#nvidia ascend common cann include
${PROJECT_SOURCE_DIR}/nvidia_ascend_base/Base/BlockingQueue
${PROJECT_SOURCE_DIR}/nvidia_ascend_base/Base/CBase64
${PROJECT_SOURCE_DIR}/nvidia_ascend_base/Base/CommandParser
${PROJECT_SOURCE_DIR}/nvidia_ascend_base/Base/CommonDataType
${PROJECT_SOURCE_DIR}/nvidia_ascend_base/Base/ConfigParser
${PROJECT_SOURCE_DIR}/nvidia_ascend_base/Base/ErrorCode
${PROJECT_SOURCE_DIR}/nvidia_ascend_base/Base/FileManager
${PROJECT_SOURCE_DIR}/nvidia_ascend_base/Base/Log
${PROJECT_SOURCE_DIR}/nvidia_ascend_base/Base/
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common
#common engine include
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/ControlEngine
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/DataSourceEngine
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/DataUploadEngine
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/FilterEngine
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/MergerEngine
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/PixelFormatConvertEngine
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/SaveEngine
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/SelectBestEngine
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/TrainAnaEngine
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/TransEngine
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/DataDealEngine
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/SocketEngine
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common
#common engine include
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/ControlEngine
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/DataSourceEngine
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/DataUploadEngine
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/FilterEngine
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/MergerEngine
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/SaveEngine
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/SelectBestEngine
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/TrainAnaEngine
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/TransEngine
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/DataDealEngine
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/SocketEngine
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/DeleteExpiredFolderEngine
#common tools rtsp_server include
${PROJECT_SOURCE_DIR}/nvidia_ascend_tools/common_tools/rtsp_server/3rdpart/md5
${PROJECT_SOURCE_DIR}/nvidia_ascend_tools/common_tools/rtsp_server/net/
${PROJECT_SOURCE_DIR}/nvidia_ascend_tools/common_tools/rtsp_server/xop/
#common tools rtsp_server include
${PROJECT_SOURCE_DIR}/nvidia_ascend_tools/common_tools/rtsp_server/3rdpart/md5
${PROJECT_SOURCE_DIR}/nvidia_ascend_tools/common_tools/rtsp_server/net/
${PROJECT_SOURCE_DIR}/nvidia_ascend_tools/common_tools/rtsp_server/xop/
)
include_directories(
#nvidia engine include
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/nvidia_engine/ChkDateStepOneEngine
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/nvidia_engine/ChkDateStepTwoEngine
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/nvidia_engine/ContainerEngine
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/nvidia_engine/DecodeEngine
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/nvidia_engine/MoveEngine
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/nvidia_engine/MyYaml
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/nvidia_engine/TrainStepOneEngine
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/nvidia_engine/TrainStepTwoEngine
#nvidia engine include
#nvidia_tools yolov5 include
${PROJECT_SOURCE_DIR}/nvidia_ascend_tools/nvidia_tools/yolov5/include
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/nvidia_engine/ChkDateStepOneEngine
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/nvidia_engine/ChkDateStepTwoEngine
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/nvidia_engine/ContainerEngine
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/nvidia_engine/DecodeEngine
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/nvidia_engine/MoveEngine
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/nvidia_engine/MyYaml
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/nvidia_engine/TrainStepOneEngine
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/nvidia_engine/TrainStepTwoEngine
#third party include
${CUDA_INCLUDE_DIR}
${TENSORRT_INCLUDE_DIR}
${DRM_INCLUDE_DIR}
${OpenCV_DIR}
${AARCH64_LINUX_INCLUDE_DIR}
${SYS_USR_LOCAL_INCLUDE_DIR}
${PCL_INCLUDE}
#nvidia_tools yolov5 include
${PROJECT_SOURCE_DIR}/nvidia_ascend_tools/nvidia_tools/yolov5/include
#third party include
${CUDA_INCLUDE_DIR}
${TENSORRT_INCLUDE_DIR}
${DRM_INCLUDE_DIR}
${OpenCV_DIR}
${AARCH64_LINUX_INCLUDE_DIR}
${SYS_USR_LOCAL_INCLUDE_DIR}
# ${PCL_INCLUDE}
)
@ -144,7 +150,7 @@ link_directories(${SYS_USR_LOCAL_LIB_DIR}
${CUDA_LIB_DIR}
${TENSORRT_LIB_DIR}
${TEGRA_LIB_DIR}
)
)
#
#
@ -171,7 +177,6 @@ file(GLOB_RECURSE COMMON_SRCS_LISTS
#common engine src
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common/*.cpp
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/DataSourceEngine/*.cpp
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/PixelFormatConvertEngine/*.cpp
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/ControlEngine/*.cpp
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/DataUploadEngine/*.cpp
@ -182,26 +187,19 @@ file(GLOB_RECURSE COMMON_SRCS_LISTS
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/TransEngine/*.cpp
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/DataDealEngine/*.cpp
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/SocketEngine/*.cpp
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/SocketEngine/*.cpp
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/DataSourceEngine/*.cpp
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/SaveEngine/*.cpp
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/DeleteExpiredFolderEngine/*.cpp
#common tools rtsp_server src
${PROJECT_SOURCE_DIR}/nvidia_ascend_tools/common_tools/rtsp_server/net/*.cpp
${PROJECT_SOURCE_DIR}/nvidia_ascend_tools/common_tools/rtsp_server/xop/*.cpp
)
)
file(GLOB_RECURSE SRCS_LISTS
#nvidia engine src
#nvidia engine include
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/ControlEngine/*.cpp
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/DataSourceEngine/*.cpp
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/DataUploadEngine/*.cpp
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/FilterEngine/*.cpp
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/MergerEngine/*.cpp
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/PixelFormatConvertEngine/*.cpp
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/SaveEngine/*.cpp
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/SelectBestEngine/*.cpp
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/TrainAnaEngine/*.cpp
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/TransEngine/*.cpp
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/DataDealEngine/*.cpp
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/nvidia_engine/ChkDateStepOneEngine/*.cpp
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/nvidia_engine/ChkDateStepTwoEngine/*.cpp
@ -217,7 +215,7 @@ file(GLOB_RECURSE SRCS_LISTS
#nvidia tools yolov5 src
${PROJECT_SOURCE_DIR}/nvidia_ascend_tools/nvidia_tools/yolov5/src/*.cpp
${PROJECT_SOURCE_DIR}/nvidia_ascend_tools/nvidia_tools/yolov5/src/*.cu
)
)
cuda_add_executable(${PROJECT_NAME} ${COMMON_SRCS_LISTS} ${SRCS_LISTS})
@ -227,11 +225,11 @@ target_link_libraries(${PROJECT_NAME} cudart cuda) #CUDA
target_link_libraries(${PROJECT_NAME}
${OpenCV_LIBS} #third party librarys
${PCL_LIBRARY_DIRS}
pcl_common pcl_io_ply pcl_keypoints pcl_registration pcl_segmentation pcl_features pcl_io pcl_octree #pcl
pcl_sample_consensus pcl_surface pcl_filters pcl_kdtree pcl_recognition pcl_search pcl_tracking
# ${PCL_LIBRARY_DIRS}
# pcl_common pcl_io_ply pcl_keypoints pcl_registration pcl_segmentation pcl_features pcl_io pcl_octree #pcl
# pcl_sample_consensus pcl_surface pcl_filters pcl_kdtree pcl_recognition pcl_search pcl_tracking
avformat avcodec avutil avfilter swresample swscale postproc #VideoCodecV2
yaml-cpp https_sn
jsoncpp curl boost_system boost_filesystem ssh2
-Wl,-z,relro,-z,now,-z,noexecstack -pie -s
)
)

View File

@ -129,37 +129,37 @@ namespace ai_matrix
return std::string(tmp);
}
std::string MyUtils::get_date()
{
time_t timep = time(NULL);
struct tm *p = localtime(&timep);
std::string MyUtils::get_date()
{
time_t timep = time(NULL);
struct tm *p = localtime(&timep);
struct timeval tv;
gettimeofday(&tv, NULL);
struct timeval tv;
gettimeofday(&tv, NULL);
int msec = tv.tv_usec / 1000;
int msec = tv.tv_usec / 1000;
char tmp[12] = { 0 };
sprintf(tmp, "%04d-%02d-%02d", 1900 + p->tm_year, 1 + p->tm_mon, p->tm_mday);
char tmp[12] = { 0 };
sprintf(tmp, "%04d-%02d-%02d", 1900 + p->tm_year, 1 + p->tm_mon, p->tm_mday);
return std::string(tmp);
}
return std::string(tmp);
}
std::string MyUtils::get_time()
{
time_t timep = time(NULL);
struct tm *p = localtime(&timep);
std::string MyUtils::get_time()
{
time_t timep = time(NULL);
struct tm *p = localtime(&timep);
struct timeval tv;
gettimeofday(&tv, NULL);
struct timeval tv;
gettimeofday(&tv, NULL);
int msec = tv.tv_usec / 1000;
int msec = tv.tv_usec / 1000;
char tmp[10] = { 0 };
sprintf(tmp, "%02d-%02d-%02d", p->tm_hour, p->tm_min, p->tm_sec);
char tmp[10] = { 0 };
sprintf(tmp, "%02d-%02d-%02d", p->tm_hour, p->tm_min, p->tm_sec);
return std::string(tmp);
}
return std::string(tmp);
}
std::string MyUtils::get_timestamp_log()
{
@ -310,12 +310,12 @@ namespace ai_matrix
return buffer.str();
}
/**
*
* @param filePath
* @param savePath
* @return
*/
/**
*
* @param filePath
* @param savePath
* @return
*/
bool MyUtils::copyFile(std::string filePath, std::string savePath)
{
FILE *fp, *sp;
@ -338,14 +338,14 @@ namespace ai_matrix
}
std::string& MyUtils::replace_all_distinct(std::string &str, const std::string &old_value, const std::string &new_value)
{
for (std::string::size_type pos(0); pos != std::string::npos; pos += new_value.length()) {
if ((pos = str.find(old_value, pos)) != std::string::npos)
str.replace(pos, old_value.length(), new_value);
else break;
}
return str;
}
{
for (std::string::size_type pos(0); pos != std::string::npos; pos += new_value.length()) {
if ((pos = str.find(old_value, pos)) != std::string::npos)
str.replace(pos, old_value.length(), new_value);
else break;
}
return str;
}
/**
*
@ -386,6 +386,23 @@ namespace ai_matrix
return std::string(szTmp);
}
//时间戳转化为时间 毫秒级
std::string MyUtils::Stamp2Time(long long timestamp, bool has_msec)
{
int ms = timestamp % 1000;//取毫秒
time_t tick = (time_t)(timestamp/1000);//转换时间
struct tm tm;
char s[40];
tm = *localtime(&tick);
strftime(s, sizeof(s), "%Y-%m-%d %H:%M:%S", &tm);
std::string str(s);
if (has_msec)
{
str = str+ "." + std::to_string(ms);
}
return str;
}
/**
* 1970
* inParam : N/A
@ -507,7 +524,7 @@ namespace ai_matrix
return true;
}
#ifdef ASCEND
#ifdef ASCEND
/**
* Device数据到Host
* inParam : void *pDeviceBuffer device内存地址
@ -589,8 +606,8 @@ namespace ai_matrix
}
return true;
}
#endif
#endif
/**
*
* inParam : uint64_t i64MilliSeconds
@ -653,7 +670,7 @@ namespace ai_matrix
}
if (!ifs.is_open())
{
LogWarn << "txt:" << strFilePath << " open fail";
// LogWarn << "txt:" << strFilePath << " open fail";
return false;
}
@ -735,7 +752,7 @@ namespace ai_matrix
}
//清空之前的结果
vecResult.clear();
vecResult.clear();
// 每个类别中,获取得分最高的框
for (auto iter = mapResult.begin(); iter != mapResult.end(); iter++)
{

View File

@ -51,10 +51,13 @@ namespace ai_matrix
//获取时间戳
std::string get_timestamp_file();
std::string get_timestamp_log();
//获取日期
std::string get_date();
//获取时间
std::string get_time();
//获取日期
std::string get_date();
//获取时间
std::string get_time();
//时间戳转化为时间 毫秒级
std::string Stamp2Time(long long timestamp, bool has_msec = false);
//创建文件夹
std::string create_dir_name(std::string root, std::string name);
@ -73,16 +76,16 @@ namespace ai_matrix
//bool 转 string
std::string getStringFromBool(bool b);
/**
*
* @param filePath
* @param savePath
* @return
*/
bool copyFile(std::string filePath, std::string savePath);
/**
*
* @param filePath
* @param savePath
* @return
*/
bool copyFile(std::string filePath, std::string savePath);
//替换string中所有指定字符串
std::string& replace_all_distinct(std::string &str, const std::string &old_value, const std::string &new_value);
//替换string中所有指定字符串
std::string& replace_all_distinct(std::string &str, const std::string &old_value, const std::string &new_value);
//获取北京当前日期
std::string GetDate();
@ -99,13 +102,13 @@ namespace ai_matrix
//创建文件夹路径
bool CreateDirPath(std::string strDirPath);
#ifdef ASCEND
#ifdef ASCEND
//拷贝Device数据到Host
bool MemcpyDeviceToHost(std::shared_ptr<void> *pHostData, const void *pDeviceBuffer, uint32_t iBufferSize);
//拷贝Host数据到Device
bool MemcpyHostToDevice(std::shared_ptr<void> *pDeviceData, const void *pHostBuffer, uint32_t iBufferSize, bool bDvppFlag = true);
#endif
#endif
//获取指定毫秒数的对应的日期时间
std::string GetDateTimeByMilliSeconds(uint64_t i64MilliSeconds, bool bFormatFlag = false);

BIN
app/train Normal file

Binary file not shown.

View File

@ -113,6 +113,8 @@ model:
nms_threshold: 0.3
gc_http_open: 1
username: "guest_01"
password: "d55b0f642e817eea24725d2f2a31dd08" # 神东
gc_http_url: "http://192.168.2.211:20004/api/train-carriage/identification/video-save"
gc_gettoken_url: "http://192.168.2.211:20004/api/blade-auth/oauth/token"
gc_image_srv: "http://192.168.2.211:9010/"
@ -179,3 +181,6 @@ gc_c_space_frame_width: 500
# 是否识别车头
gc_train_heard_detect: true
#过期文件夹天数
gc_days_for_result_expire_folder: 3

View File

@ -55,6 +55,7 @@ engines:
DeviceStatusUpSerEngine: 0
#ResultToMySQLSrvEngine: 0
#DataToMinioSrvEngine: 0
DeleteExpiredFolderEngine: 0
#engine连接
connects:

View File

@ -118,7 +118,7 @@ bool DataDealEngine::ReadFileInfo(Json::Value &jvFrameInfo, RawData &rawData, st
// LogError << "Failed to read image:" << strImgName;
// return false;
// }
return true;
}
@ -191,13 +191,13 @@ void DataDealEngine::MakeProcessData()
}
LogInfo << "sourceid:" << iSourceId << " MakeProcessData origtime:" << moveData_.strTrainName << " iOrigFrameId:" << iOrigFrameId
<< " time:" << strTrainName_ << " iFrameId:" << iFrameId << " bIsEndFlag:" << bIsEndFlag;
<< " time:" << strTrainName_ << " iFrameId:" << iFrameId << " bIsEndFlag:" << bIsEndFlag;
std::string strImgName = strDataDir_ + szCameraNo + std::to_string(iOrigFrameId);
strImgName += (iter->second.iRotate != 0) ? "_rotate.jpg" : ".jpg";
std::string strFileName = strDataDir_ + szCameraNo + std::to_string(iOrigFrameId) + ".txt";
//摄像头读取失败后重试2000次。
//摄像头读取失败后重试30次。
Json::Value jvFrameInfo;
RawData rawData;
bool bRet = false;
@ -228,7 +228,7 @@ void DataDealEngine::MakeProcessData()
pProcessData->bIsEnd = bIsEndFlag;
pProcessData->iDataNO = iDataNO_;
pProcessData->nMonitorState = moveData_.nMonitorState;
if (bRet)
{
i64TimeStampTemp = jvFrameInfo["timeStamp"].asUInt64();
@ -241,7 +241,7 @@ void DataDealEngine::MakeProcessData()
cv::Mat cvframe = cv::imread(pProcessData->strPicFilePath);
int iBufferSize = pProcessData->iWidth * pProcessData->iHeight * 3;
void* pBGRBufferobj = nullptr;
pBGRBufferobj = new uint8_t[iBufferSize];
pBGRBufferobj = new uint8_t[iBufferSize];
memcpy(pBGRBufferobj, cvframe.data, iBufferSize);
pProcessData->pData.reset(pBGRBufferobj, [](void* data){if(data) {delete[] data; data = nullptr;}});
pProcessData->iSize = iBufferSize;
@ -255,15 +255,15 @@ void DataDealEngine::MakeProcessData()
//iRet = outputQueMap_[vecPushPorts[iPort]]->push(std::static_pointer_cast<void>(pProcessData));
PushData(vecPushPorts[iPort], pProcessData);
continue;
}
}
std::shared_ptr<ProcessData> pNewProcessData = std::make_shared<ProcessData>();
*pNewProcessData = *pProcessData;
//iRet = outputQueMap_[vecPushPorts[iPort]]->push(std::static_pointer_cast<void>(pNewProcessData));
PushData(vecPushPorts[iPort], pNewProcessData);
}
}
iOrigDataNO_++;
iDataNO_++;
//每组处理数据需间隔一定时间
@ -291,14 +291,25 @@ APP_ERROR DataDealEngine::Process()
//获取主摄像头检测的状态
std::shared_ptr<void> pVoidData0 = nullptr;
iRet = inputQueMap_[strPort0_]->pop(pVoidData0);
if (nullptr != pVoidData0)
{
std::shared_ptr<MoveData> pMoveData = std::static_pointer_cast<MoveData>(pVoidData0);
// queuwMoveData_.push(*pMoveData);
moveData_ = *pMoveData;
LogDebug << "traindate:" << moveData_.strTrainDate << " trainname:" << moveData_.strTrainName
<< " MoveData frameid:" << moveData_.iFrameId << " IsEnd:" << moveData_.bIsEnd;
}
// LogDebug << "【帧号】" << (iDataNO_ * dataSourceConfig_.iSkipInterval);
// if (queuwMoveData_.size() > 0 && (iDataNO_ * dataSourceConfig_.iSkipInterval) >= queuwMoveData_.front().iFrameId)
// {
// moveData_ = queuwMoveData_.front();
// queuwMoveData_.pop();
// LogDebug << "!!!--- moveDate 更新";
// }
if (!moveData_.bHasTrain)
{
usleep(1000); //1ms
@ -308,7 +319,7 @@ APP_ERROR DataDealEngine::Process()
//第一个数据休眠1s等待图片存入本地
if (iOrigDataNO_ == 1)
{
usleep(1000000); //1s
usleep(1000 * 1000); //1s
}
if (strDataDir_.empty())

View File

@ -239,8 +239,8 @@ void DataDealTwoEngine::GetMainSplitInfo(Json::Value &jvMainSplit, std::shared_p
{
iValidType = pProcessData->iDirection == DIRECTION_LEFT ? VALID_LEFT : VALID_RIGHT;
}
LogDebug << "sourceid:" << pProcessData->iDataSource << " frameid:" << pProcessData->iFrameId
<< " trainIndex:" << pProcessData->iTrainIndex << " iSpaceType_:" << iSpaceType_
LogDebug << " frameid:" << pProcessData->iFrameId
<< " 车节:" << pProcessData->iTrainIndex << " iSpaceType_:" << iSpaceType_
<< " iSpaceX:" << iSpaceX << " iLastSpaceX_:" << iLastSpaceX_
<< " iLastSpaceFrameid_:" << iLastSpaceFrameid_ << " bIntervalFlag:" << bIntervalFlag;
iLastSpaceX_ = iSpaceX;
@ -352,11 +352,11 @@ void DataDealTwoEngine::GetValidTypeAndSplit(Json::Value &jvOneSplit, Json::Valu
GetSubSplitInfoByMain(jvOneSplit, pProcessData, jvFrameInfo);
}
LogDebug << "sourceid:" << pProcessData->iDataSource << " frameid:" << pProcessData->iFrameId
<< " trainIndex:" << pProcessData->iTrainIndex
<< " validType:" << jvOneSplit["validType"].asInt() << " splitX:" << jvOneSplit["splitX"].asInt()
<< " needNum:" << jvOneSplit["needNum"].asBool() << " needPro:" << jvOneSplit["needPro"].asBool()
<< " needChkDate:" << jvOneSplit["needChkDate"].asBool() << " needContainer:" << jvOneSplit["needContainer"].asBool();
// LogDebug << "sourceid:" << pProcessData->iDataSource << " frameid:" << pProcessData->iFrameId
// << " trainIndex:" << pProcessData->iTrainIndex
// << " validType:" << jvOneSplit["validType"].asInt() << " splitX:" << jvOneSplit["splitX"].asInt()
// << " needNum:" << jvOneSplit["needNum"].asBool() << " needPro:" << jvOneSplit["needPro"].asBool()
// << " needChkDate:" << jvOneSplit["needChkDate"].asBool() << " needContainer:" << jvOneSplit["needContainer"].asBool();
}
/**
@ -456,8 +456,8 @@ void DataDealTwoEngine::MakeProcessData(std::shared_ptr<TrainRange> pTrainRange)
sprintf(szCameraNo, "%03d/", iSourceId + 1);
bool bIsEndFlag = (pTrainRange->iEndFrameId == iFrameId);
LogInfo << "sourceid:" << iSourceId << " StepTwo MakeProcessData trainIndex:" << pTrainRange->iTrainIndex
<< " iFrameId:" << iFrameId << " bIsEndFlag:" << bIsEndFlag;
// LogInfo << "sourceid:" << iSourceId << " StepTwo MakeProcessData trainIndex:" << pTrainRange->iTrainIndex
// << " iFrameId:" << iFrameId << " bIsEndFlag:" << bIsEndFlag;
std::string strImgName = strDataDir + szCameraNo + std::to_string(iFrameId);
strImgName += (iter->second.iRotate != 0) ? "_rotate.jpg" : ".jpg";
std::string strFileName = strDataDir + szCameraNo + std::to_string(iFrameId) + ".txt";

View File

@ -7,7 +7,7 @@ namespace
{
const int LOW_THRESHOLD = 128;
const int MAX_THRESHOLD = 4096;
const uint16_t DELAY_TIME = 40000;
const uint16_t DELAY_TIME = 10000;
}
CameraEngine::CameraEngine() {}

View File

@ -8,6 +8,8 @@ ResultToHttpSrvEngine::~ResultToHttpSrvEngine() {}
APP_ERROR ResultToHttpSrvEngine::Init()
{
strPort0_ = engineName_ + "_" + std::to_string(engineId_) + "_0";
strUsername_ = MyYaml::GetIns()->GetStringValue("username");
strPassword_ = MyYaml::GetIns()->GetStringValue("password");
strURL_ = MyYaml::GetIns()->GetStringValue("gc_http_url");
strGetTokenURL_ = MyYaml::GetIns()->GetStringValue("gc_gettoken_url");
strImageSrv_ = MyYaml::GetIns()->GetPathValue("gc_image_srv");
@ -76,11 +78,11 @@ bool ResultToHttpSrvEngine::GetToken(std::string &strBladeAuth)
curl_mime *pMultipart = curl_mime_init(pCurl_);
curl_mimepart *pPart = curl_mime_addpart(pMultipart);
curl_mime_name(pPart, "username");
curl_mime_data(pPart, "guest_01", CURL_ZERO_TERMINATED);
curl_mime_data(pPart, strUsername_.c_str(), CURL_ZERO_TERMINATED);
pPart = curl_mime_addpart(pMultipart);
curl_mime_name(pPart, "password");
curl_mime_data(pPart, "d55b0f642e817eea24725d2f2a31dd08", CURL_ZERO_TERMINATED);
pPart = curl_mime_addpart(pMultipart);
curl_mime_data(pPart, strPassword_.c_str(), CURL_ZERO_TERMINATED);
pPart = curl_mime_addpart(pMultipart);
curl_mime_name(pPart, "tenantId");
curl_mime_data(pPart, "000000", CURL_ZERO_TERMINATED);
pPart = curl_mime_addpart(pMultipart);
@ -276,8 +278,8 @@ void ResultToHttpSrvEngine::DealHttpFailInfo()
if (!ResultToHttpSrv(jvRequest))
{
LogError << "re http post err:" << strLine;
SaveHttpFailInfo(jvRequest, strFailSaveBakPath_);
bAllSucc = false;
//SaveHttpFailInfo(jvRequest, strFailSaveBakPath_);
// bAllSucc = false;
continue;
}
}
@ -426,10 +428,11 @@ APP_ERROR ResultToHttpSrvEngine::Process()
jvRequest["isTheLast"] = pTrain->bIsEnd ? 1 : 0; // 是否最后一节: 0:否,1:是
jvRequest["startFrame"] = pTrain->iStartFrameId; //车厢开始帧
jvRequest["endFrame"] = pTrain->iEndFrameId; //车厢结束帧
jvRequest["skipFrame"] = dataSourceConfig.iSkipInterval; //跳帧
jvRequest["skipFrame"] = dataSourceConfig.iSkipInterval;
jvRequest["collectTime"] = MyUtils::getins()->Stamp2Time(pTrain->i64EndTimeStamp, true);//车厢切分的时间 //跳帧
if (!ResultToHttpSrv(jvRequest))
{
SaveHttpFailInfo(jvRequest, strFailSavePath_);
// SaveHttpFailInfo(jvRequest, strFailSavePath_);
}
//列车结束后再次处理失败的信息

View File

@ -40,6 +40,8 @@ private:
bool SaveHttpFailInfo(Json::Value &jvRequest, std::string &strFilePath);
std::string strPort0_;
std::string strUsername_;
std::string strPassword_;
std::string strURL_;
std::string strGetTokenURL_;
std::string strImageSrv_;

View File

@ -0,0 +1,187 @@
#include "DeleteExpiredFolderEngine.h"
using namespace ai_matrix;
DeleteExpiredFolderEngine::DeleteExpiredFolderEngine() {}
DeleteExpiredFolderEngine::~DeleteExpiredFolderEngine() {}
APP_ERROR DeleteExpiredFolderEngine::Init()
{
iDaysNumber_ = MyYaml::GetIns()->GetIntValue("gc_days_for_result_expire_folder");
strResultPath_ = MyYaml::GetIns()->GetPathValue("gc_result_path");
LogInfo << "DeleteExpiredFolderEngine Init ok";
return APP_ERR_OK;
}
APP_ERROR DeleteExpiredFolderEngine::DeInit()
{
LogInfo << "DeleteExpiredFolderEngine DeInit ok";
return APP_ERR_OK;
}
APP_ERROR DeleteExpiredFolderEngine::Process()
{
int iRet = APP_ERR_OK;
while (!isStop_)
{
std::string strTrainDate_temp = MyUtils::getins()->GetDate();
DeletePreviousFolder(strResultPath_, strTrainDate_temp, iDaysNumber_);
usleep(1000*1000*3600*24); //每二十四小时执行一次
}
return APP_ERR_OK;
}
void DeleteExpiredFolderEngine::DeletePreviousFolder(std::string path, const std::string &date, int n_days)
{
// 1 computer date
std::string previous_date = getDateBeforeNDays(date, n_days);
if (!previous_date.empty())
std::cout << "Date before " << n_days << " days from " << date << " is: " << previous_date << std::endl;
// 2
std::vector<Date> subfolders;
GetSubfolderNames(path, subfolders);
// for (const auto &it : subfolders)
// std::cout << it.year << "." << it.month << "." << it.day << std::endl;
// 3 delete
if (path.back() != '/')
path += "/";
Date reference_date = StrToDate(previous_date); // 给定的参考日期
DeleteEarlierDatesFolder(path, subfolders, reference_date);
}
// 获取某月有多少天
int DeleteExpiredFolderEngine::DaysInMonth(int year, int month)
{
int max_days[13] = {0, 31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31};
if (month == 2 && ((year % 4 == 0 && year % 100 != 0) || year % 400 == 0))
{
max_days[2] = 29; // 闰年2月有29天
}
return max_days[month];
}
// 解析字符串为日期结构体
Date DeleteExpiredFolderEngine::StrToDate(const std::string &date_str)
{
std::istringstream iss(date_str);
int year, month, day;
char dash;
if (!(iss >> year >> dash && dash == '-' &&
iss >> month >> dash && dash == '-' &&
iss >> day))
{
LogError << ("Invalid date format") << ":" << date_str;
}
return {year, month, day};
}
// 减去指定天数
void DeleteExpiredFolderEngine::SubtractDays(Date &date, int n_days)
{
while (n_days > 0)
{
date.day--;
n_days--;
if (date.day == 0)
{
if (--date.month == 0)
{
--date.year;
date.month = 12;
}
int max_days = DaysInMonth(date.year, date.month);
date.day = max_days;
}
}
}
// 格式化日期结构体为字符串
std::string DeleteExpiredFolderEngine::DateToStr(const Date &date)
{
std::ostringstream oss;
oss << date.year << "-" << std::setfill('0') << std::setw(2) << date.month << "-" << std::setw(2) << date.day;
return oss.str();
}
// 主要功能函数接收一个日期字符串和一个整数n返回n天前的日期字符串
std::string DeleteExpiredFolderEngine::getDateBeforeNDays(const std::string &input_date, int n_days)
{
try
{
Date date = StrToDate(input_date);
SubtractDays(date, n_days);
return DateToStr(date);
}
catch (const std::exception &e)
{
LogError << "Error: " << e.what();
return "";
}
}
void DeleteExpiredFolderEngine::GetSubfolderNames(std::string &directory, std::vector<Date> &folder_names)
{
if (directory.back() != '/')
directory += "/";
DIR *dir;
struct dirent *ent;
if ((dir = opendir(directory.c_str())) != nullptr)
{
while ((ent = readdir(dir)) != nullptr)
{
// 排除"."和".."
if (ent->d_type == DT_DIR && ent->d_name[0] != '.' && ent->d_name == "best")
{
folder_names.push_back(StrToDate(ent->d_name));
}
}
closedir(dir);
}
else
{
LogError << "Unable to open directory: " << directory;
}
}
void DeleteExpiredFolderEngine::DeleteFolder(const std::string directory)
{
std::string command = "rm -rf " + directory;
int result = system(command.c_str());
if (result != 0)
std::cout << "Failed to remove directory recursively: " << directory << std::endl;
else
std::cout << "delete folder successfully : " << directory << std::endl;
}
// 删除向量中小于指定日期的所有元素
void DeleteExpiredFolderEngine::DeleteEarlierDatesFolder(std::string &path, std::vector<Date> &subfolders, const Date &reference_date)
{
if (path.back() != '/')
path += "/";
for (const Date &cur : subfolders)
{
// bool flag = false;
if (cur.year < reference_date.year)
{
DeleteFolder(path + DateToStr(cur));
}
else if (cur.year == reference_date.year && cur.month < reference_date.month)
{
DeleteFolder(path + DateToStr(cur));
}
else if (cur.year == reference_date.year && cur.month == reference_date.month && cur.day < reference_date.day)
{
DeleteFolder(path + DateToStr(cur));
}
}
}

View File

@ -0,0 +1,57 @@
/**
*
**/
#ifndef DELETEEXPIREDFOLDERENGINE_H
#define DELETEEXPIREDFOLDERENGINE_H
#include "AppCommon.h"
#include "EngineBase.h"
#include "EngineFactory.h"
#include "MyYaml.h"
#include "myutils.h"
// 定义日期结构体
struct Date
{
int year;
int month;
int day;
};
class DeleteExpiredFolderEngine : public ai_matrix::EngineBase
{
public:
DeleteExpiredFolderEngine();
~DeleteExpiredFolderEngine();
APP_ERROR Init() override;
APP_ERROR DeInit() override;
APP_ERROR Process() override;
private:
// 获取某月有多少天
int DaysInMonth(int year, int month);
// 解析字符串为日期结构体
Date StrToDate(const std::string &date_str);
// 减去指定天数
void SubtractDays(Date &date, int n_days);
// 格式化日期结构体为字符串
std::string DateToStr(const Date &date);
// 接收一个日期字符串和一个整数n返回n天前的日期字符串
std::string getDateBeforeNDays(const std::string &input_date, int n_days);
void GetSubfolderNames(std::string &directory, std::vector<Date> &folder_names);
void DeleteFolder(const std::string directory);
// 删除向量中小于指定日期的所有元素
void DeleteEarlierDatesFolder(std::string &path, std::vector<Date> &subfolders, const Date &reference_date);
void DeletePreviousFolder(std::string path, const std::string &date, int n_days);
private:
std::string strResultPath_;
int iDaysNumber_;
};
ENGINE_REGIST(DeleteExpiredFolderEngine)
#endif

View File

@ -28,6 +28,9 @@ APP_ERROR FilterTrainStepOneEngine::Init()
strResultPath_ = MyYaml::GetIns()->GetPathValue("gc_result_path");
iChkStopPX_ = MyYaml::GetIns()->GetIntValue("gc_chkstop_px");
iChkStopCount_ = MyYaml::GetIns()->GetIntValue("gc_chkstop_count");
iPartitionFrameNum_ = MyYaml::GetIns()->GetIntValue("partition_frame_span");
iPlitFrameSpanPX_ = MyYaml::GetIns()->GetIntValue("gc_split_frame_span_px");
iPushDirection_ = MyYaml::GetIns()->GetIntValue("gc_push_direction");
//获取主摄像头信息
mainCfg_ = MyYaml::GetIns()->GetDataSourceConfigById(0);
@ -35,6 +38,8 @@ APP_ERROR FilterTrainStepOneEngine::Init()
std::map<int, ai_matrix::DataSourceConfig> mapUseDataSouceCfg = MyYaml::GetIns()->GetUseDataSourceConfig();
for (auto iter = mapUseDataSouceCfg.begin(); iter != mapUseDataSouceCfg.end(); iter++)
{
this->rightFirst_ = iter->second.iRightFirst;
this->leftFirst_ = iter->second.iLeftFirst;
if (iter->second.strTarget.find("NUM") != std::string::npos)
{
LogDebug << "DataSource:" << iter->first << " deal NUM";
@ -55,8 +60,8 @@ APP_ERROR FilterTrainStepOneEngine::Init()
mapTargetStr_.insert(std::make_pair(NUM, "NUM"));
mapTargetStr_.insert(std::make_pair(PRO, "PRO"));
mapTargetStr_.insert(std::make_pair(HEAD, "HEAD"));
mapTargetStr_.insert(std::make_pair(SPACE, "SPACE"));
mapTargetStr_.insert(std::make_pair(TRAINSPACE, "SPACE"));
mapTargetStr_.insert(std::make_pair(SPACE, "SPACE"));//SPACE
mapTargetStr_.insert(std::make_pair(TRAINSPACE, "SPACE"));//SPACE
InitParam();
LogInfo << "FilterTrainStepOneEngine Init ok";
@ -113,7 +118,7 @@ void FilterTrainStepOneEngine::InitParam()
}
iDirection_ = DIRECTION_UNKNOWN;
iNotChgCount_ = 0;
while (!stackBackInfo_.empty())
{
stackBackInfo_.pop();
@ -166,11 +171,12 @@ void FilterTrainStepOneEngine::AddBackInfo(std::shared_ptr<ProcessData> pProcess
}
strAllClassType += mapTargetStr_[pPostData->vecPostSubData[i].iTargetType];
}
LogDebug << "frameId:" << pProcessData->iFrameId << " addbackinfo strAllClassType:" << strAllClassType;
if (strAllClassType.empty())
{
return;
}
LogDebug << "frameId:" << pProcessData->iFrameId << " addbackinfo strAllClassType:" << strAllClassType;
TrainBackInfo trainBackInfo;
trainBackInfo.processData = pProcessData;
@ -184,8 +190,35 @@ void FilterTrainStepOneEngine::AddBackInfo(std::shared_ptr<ProcessData> pProcess
else
{
TrainBackInfo trainBackInfoTop = stackBackInfo_.top();
// 2024年3月27日修改前
// if (trainBackInfoTop.strAllClassType != trainBackInfo.strAllClassType)
// {
// stackBackInfo_.push(trainBackInfo);
// LogDebug << "frameId:" << pProcessData->iFrameId << " push strAllClassType:" << strAllClassType
// << " stacksize:" << stackBackInfo_.size();
// }
if (trainBackInfoTop.strAllClassType != trainBackInfo.strAllClassType)
{
if (iDirection_ == DIRECTION_RIGHT
&& trainBackInfo.strAllClassType == "SPACE"
&& (trainBackInfoTop.strAllClassType == "PROSPACE" || trainBackInfoTop.strAllClassType == "SPACEPRO"))
{
return;
}
if (iDirection_ == DIRECTION_RIGHT
&& trainBackInfo.strAllClassType == "SPACE"
&& (trainBackInfoTop.strAllClassType == "NUMSPACE" || trainBackInfoTop.strAllClassType == "SPACENUM"))
{
return;
}
if (iDirection_ == DIRECTION_LEFT
&& trainBackInfo.strAllClassType == "SPACE"
&& (trainBackInfoTop.strAllClassType == "NUMSPACE" || trainBackInfoTop.strAllClassType == "SPACENUM"))
{
return;
}
stackBackInfo_.push(trainBackInfo);
LogDebug << "frameId:" << pProcessData->iFrameId << " push strAllClassType:" << strAllClassType
<< " stacksize:" << stackBackInfo_.size();
@ -200,16 +233,19 @@ bool FilterTrainStepOneEngine::IsEndDealBackInfo(std::shared_ptr<ProcessData> pP
return true;
}
bool bPopFlag = false;
std::shared_ptr<PostData> pPostData = std::static_pointer_cast<PostData>(pProcessData->pVoidData);
if (pPostData->vecPostSubData.size() == 0) return false;
/*
Engine不处理
poppop后
poppop后
()
*/
std::shared_ptr<PostData> pPostData = std::static_pointer_cast<PostData>(pProcessData->pVoidData);
std::sort(pPostData->vecPostSubData.begin(), pPostData->vecPostSubData.end(), CompareX);
std::string strAllClassType;
@ -226,48 +262,73 @@ bool FilterTrainStepOneEngine::IsEndDealBackInfo(std::shared_ptr<ProcessData> pP
return false;
}
TrainBackInfo trainBackInfoTop = stackBackInfo_.top();
bool bPopFlag = false;
if (trainBackInfoTop.strAllClassType != strAllClassType)
{
bPopFlag = true;
}
LogDebug << "frameId:" << pProcessData->iFrameId << " stacksize:" << stackBackInfo_.size()
<< " topClassType:" << trainBackInfoTop.strAllClassType << " dealbackinfo strAllClassType:" << strAllClassType
<< " bPopFlag:" << bPopFlag;
if(bPopFlag)
{
stackBackInfo_.pop();
bPopFlag = false;
}
if (stackBackInfo_.size() == 1)
{
if (!bPopFlag)
{
TrainBackInfo trainBackInfoLast = stackBackInfo_.top();
std::shared_ptr<PostData> pPostDataBack = std::static_pointer_cast<PostData>(trainBackInfoLast.processData->pVoidData);
std::sort(pPostDataBack->vecPostSubData.begin(), pPostDataBack->vecPostSubData.end(), CompareX);
TrainBackInfo trainBackInfoLast = stackBackInfo_.top();
std::shared_ptr<PostData> pPostDataBack = std::static_pointer_cast<PostData>(trainBackInfoLast.processData->pVoidData);
std::sort(pPostDataBack->vecPostSubData.begin(), pPostDataBack->vecPostSubData.end(), CompareX);
for (size_t i = 0; i < pPostDataBack->vecPostSubData.size(); i++)
for (size_t i = 0; i < pPostDataBack->vecPostSubData.size(); i++)
{
int bFlag = -1;
for (size_t j = 0; j < pPostData->vecPostSubData.size(); j++)
{
bool bFlag = (pPostDataBack->vecPostSubData[i].step1Location.fLTX <= pPostData->vecPostSubData[i].step1Location.fLTX);
LogDebug << "frameId:" << pProcessData->iFrameId << " stackFrameid:" << pPostDataBack->iFrameId << " bFlag:" << bFlag;
if ((iDirection_ == DIRECTION_LEFT && !bFlag) ||
(iDirection_ == DIRECTION_RIGHT && bFlag))
if (pPostDataBack->vecPostSubData[i].iBigClassId == pPostData->vecPostSubData[j].iBigClassId)
{
bPopFlag = true;
break;
if (pPostData->vecPostSubData[j].step1Location.fLTX < 1 || pPostDataBack->vecPostSubData[i].step1Location.fLTX < 1)
{
LogDebug << "大框X坐标小于1判定为异常大框。过滤";
break;
}
bFlag = (pPostDataBack->vecPostSubData[i].step1Location.fLTX <= pPostData->vecPostSubData[j].step1Location.fLTX) ? 1 : 0;
LogDebug << "帧:" << pProcessData->iFrameId << " 倒车前帧:" << pPostDataBack->iFrameId << " 恢复到原位:" << bFlag
<< " 当前框位置:" << pPostData->vecPostSubData[i].step1Location.fLTX
<< " 倒车前位置:" << pPostDataBack->vecPostSubData[i].step1Location.fLTX << "方向:" << iDirection_;
}
}
if (bPopFlag)
if ((iDirection_ == DIRECTION_LEFT && bFlag == 0) ||
(iDirection_ == DIRECTION_RIGHT && bFlag == 1))
{
LogDebug << "frameId:" << pProcessData->iFrameId << " last one bPopFlag:" << bPopFlag;
stackBackInfo_.pop();
bPopFlag = true;
break;
}
}
}
if (bPopFlag)
{
LogDebug << "frameId:" << pProcessData->iFrameId << " 恢复倒车前的位置:" << bPopFlag;
stackBackInfo_.pop();
}
}
else
{
TrainBackInfo trainBackInfoTop_bak = stackBackInfo_.top();
stackBackInfo_.pop();
TrainBackInfo trainBackInfoTop = stackBackInfo_.top();
if (trainBackInfoTop.strAllClassType != strAllClassType)
{
stackBackInfo_.push(trainBackInfoTop_bak);
LogDebug << "帧:" << pProcessData->iFrameId << " 倒车信息:" << stackBackInfo_.size()
<< " 顶部倒车信息:" << trainBackInfoTop.strAllClassType << " 本次识别信息:" << strAllClassType;
}
else
{
// bPopFlag = true;
LogDebug << "帧:" << pProcessData->iFrameId << " 倒车信息:" << stackBackInfo_.size()
<< " 顶部倒车信息:" << trainBackInfoTop.strAllClassType << " 本次识别信息:" << strAllClassType
<< " 删除倒车信息:" << trainBackInfoTop_bak.strAllClassType;
}
// if(bPopFlag)
// {
// stackBackInfo_.pop();
// bPopFlag = false;
// }
}
return stackBackInfo_.empty() ? true : false;
}
@ -275,31 +336,29 @@ bool FilterTrainStepOneEngine::IsEndDealBackInfo(std::shared_ptr<ProcessData> pP
*
* inParam : std::shared_ptr<ProcessData> pProcessData :
* outParam: N/A
* return : true:; false: 1() 2() 3()
* return : true:; false: 1() 2() 3()
*/
int FilterTrainStepOneEngine::GetTrainStatus(std::shared_ptr<ProcessData> pProcessData)
{
if (iDirection_ == DIRECTION_UNKNOWN)
{
LogDebug << "sourceid:" << pProcessData->iDataSource << " frameId:" << pProcessData->iFrameId << " direction unknown trainStatus=1";
LogDebug << " frameId:" << pProcessData->iFrameId << " 未判断出行车方向,暂定认为火车正常行驶中";
return TRAINSTATUS_RUN;
}
std::shared_ptr<PostData> pPostData = std::static_pointer_cast<PostData>(pProcessData->pVoidData);
pPostData->iFrameId = pProcessData->iFrameId;
quePostData_.push(*pPostData.get());
// 1. 无框时,返回之前的列车状态
if (pPostData->vecPostSubData.size() == 0)
{
LogDebug << "sourceid:" << pProcessData->iDataSource << " frameId:" << pProcessData->iFrameId << " step1 no result trainStatus="<< iTrainStatus_;
quePostData_.pop();
return iTrainStatus_;
}
quePostData_.push(*pPostData.get());
if (quePostData_.size() < 3)
{
LogDebug << "sourceid:" << pProcessData->iDataSource << " frameId:" << pProcessData->iFrameId << " size < 3 trainStatus=1";
return TRAINSTATUS_RUN;
}
@ -310,7 +369,7 @@ int FilterTrainStepOneEngine::GetTrainStatus(std::shared_ptr<ProcessData> pProce
{
quePostData_.pop();
}
LogDebug << "queue front frameId:" << postDataFront.iFrameId << " queuesize:" << quePostData_.size() << " iSizeTemp:" << iSizeTemp;
LogDebug << "frameId:" << pProcessData->iFrameId << " 判断运动状态队列 第一帧:" << postDataFront.iFrameId << " 队列size:" << quePostData_.size() << " iSizeTemp:" << iSizeTemp;
bool bSameFlag = false;
int iDiffValue = iChkStopPX_;
@ -326,6 +385,7 @@ int FilterTrainStepOneEngine::GetTrainStatus(std::shared_ptr<ProcessData> pProce
*/
if (postSubDataFront.iTargetType != postSubDataBack.iTargetType)
{
LogDebug << "判断前后帧识别的是否一致 上一个:" << postSubDataFront.iTargetType << " 当前:" << postSubDataBack.iTargetType;
continue;
}
@ -340,7 +400,7 @@ int FilterTrainStepOneEngine::GetTrainStatus(std::shared_ptr<ProcessData> pProce
//位置比较大于10个像素则表示有移动。再判断时正向移动还是倒车
LogDebug << "frameId:" << pProcessData->iFrameId << " " << iCenterBack << "-" << iCenterFront
<< "=" << abs(iCenterBack - iCenterFront) << " iDiffValue:" << iDiffValue;
<< "=" << abs(iCenterBack - iCenterFront) << " 预期判定移动的差值为iDiffValue:" << iDiffValue;
if (abs(iCenterBack - iCenterFront) > iDiffValue)
{
iNotChgCount_ = 0;
@ -352,32 +412,38 @@ int FilterTrainStepOneEngine::GetTrainStatus(std::shared_ptr<ProcessData> pProce
if ((iCenterBack > iCenterFront && iDirection_ == DIRECTION_LEFT) ||
(iCenterBack < iCenterFront && iDirection_ == DIRECTION_RIGHT))
{
LogDebug << "frameId:" << pProcessData->iFrameId << " back1";
if (this->iPartitionFrameNum_ < (pProcessData->iFrameId - postDataFront.iFrameId)
&& this->iPlitFrameSpanPX_ < abs(iCenterBack - iCenterFront))
{
return TRAINSTATUS_RUN;
}
LogDebug << "frameId:" << pProcessData->iFrameId << " 检测到火车倒车";
return TRAINSTATUS_BACK;
}
else
{
LogDebug << "frameId:" << pProcessData->iFrameId << " run";
LogDebug << "frameId:" << pProcessData->iFrameId << " 正常行驶";
return TRAINSTATUS_RUN;
}
}
/*
10
10
10
*/
/*
10
10
10
*/
else
{
iNotChgCount_++;
LogDebug << " frameId:" << pProcessData->iFrameId << " no chg iNotChgCount:" << iNotChgCount_;
LogDebug << " frameId:" << pProcessData->iFrameId
<< " 大框移动范围小 判断停车计数:" << iNotChgCount_ << "/" << iChkStopCount_;
if (iNotChgCount_ > iChkStopCount_)
{
LogDebug << "frameId:" << pProcessData->iFrameId << " stop";
LogDebug << "frameId:" << pProcessData->iFrameId << " 检测到火车停车";
return TRAINSTATUS_STOP;
}
else
{
LogDebug << "frameId:" << pProcessData->iFrameId << " iTrainStatus_:" << iTrainStatus_;
// LogDebug << "frameId:" << pProcessData->iFrameId << " iTrainStatus_:" << iTrainStatus_;
return iTrainStatus_;
}
}
@ -443,8 +509,8 @@ int FilterTrainStepOneEngine::GetTrainStatus(std::shared_ptr<ProcessData> pProce
}
}
LogDebug << "frameId:" << pProcessData->iFrameId << " back2";
return TRAINSTATUS_BACK;
// LogDebug << "frameId:" << pProcessData->iFrameId << " back2";
return iTrainStatus_;
}
}
LogDebug << "frameId:" << pProcessData->iFrameId << " iNotChgCount_:" << iNotChgCount_ << " run run";
@ -491,7 +557,7 @@ void FilterTrainStepOneEngine::SetDirection(std::vector<Step1Location> &vecLocat
{
iDirection_ = DIRECTION_LEFT;
}
//行驶方向 右
//行驶方向 右
else if ((slBack.fLTX - slFront.fLTX) > 0)
{
iDirection_ = DIRECTION_RIGHT;
@ -723,7 +789,8 @@ void FilterTrainStepOneEngine::CalculateDirection(std::shared_ptr<ProcessData> p
}
void FilterTrainStepOneEngine::sendComeTrain(const std::string strTrainDate, const std::string strTrainName, const int iDirection) {
std::string message = "{\"cometime\":\"" + strTrainDate + " " + strTrainName + "\",\"type\":\"1\",\"direction\":\"" + to_string(iDirection) + "\"}";
std::string message = "{\"cometime\":\"" + strTrainDate + " " + strTrainName + "\",\"type\":\"1\",\"direction\":" + to_string(iDirection == iPushDirection_ ? 1:-1) + "}";
LogWarn << message;
outputQueMap_[engineName_ + "_" + std::to_string(engineId_) + "_1"]->push(std::static_pointer_cast<void>(std::make_shared<std::string>(message)));
}
@ -830,31 +897,31 @@ void FilterTrainStepOneEngine::DealProcessDataPre(std::shared_ptr<ProcessData> p
{
if (iterHeadContinueCnt->second < 2 && it->iTargetType == HEAD)
{
LogError << "sourceid:" << iterProcessData->second->iDataSource << " frameId:" << iterProcessData->second->iFrameId << " Head wrong";
LogDebug << " frameId:" << iterProcessData->second->iFrameId << " Head 框因非连续识别而过滤";
it = pPostDataPre->vecPostSubData.erase(it);
continue;
}
if (iterProContinueCnt->second < 2 && it->iTargetType == PRO)
{
LogError << "sourceid:" << iterProcessData->second->iDataSource << " frameId:" << iterProcessData->second->iFrameId << " PRO wrong";
LogDebug << " frameId:" << iterProcessData->second->iFrameId << " PRO 框因非连续识别而过滤";
it = pPostDataPre->vecPostSubData.erase(it);
continue;
}
if (iterNumContinueCnt->second < 2 && it->iTargetType == NUM)
{
LogError << "sourceid:" << iterProcessData->second->iDataSource << " frameId:" << iterProcessData->second->iFrameId << " NUM wrong";
LogDebug << " frameId:" << iterProcessData->second->iFrameId << " NUM 框因非连续识别而过滤";
it = pPostDataPre->vecPostSubData.erase(it);
continue;
}
if (iterSpaceContinueCnt->second < 2 && it->iTargetType == SPACE)
{
LogError << "sourceid:" << iterProcessData->second->iDataSource << " frameId:" << iterProcessData->second->iFrameId << " SPACE wrong";
LogDebug << " frameId:" << iterProcessData->second->iFrameId << " SPACE 框因非连续识别而过滤";
it = pPostDataPre->vecPostSubData.erase(it);
continue;
}
if (iterTranSpaceContinueCnt->second < 2 && it->iTargetType == TRAINSPACE)
{
LogError << "sourceid:" << iterProcessData->second->iDataSource << " frameId:" << iterProcessData->second->iFrameId << " TRAINSPACE wrong";
LogDebug << " frameId:" << iterProcessData->second->iFrameId << " TRAINSPACE 框因非连续识别而过滤";
it = pPostDataPre->vecPostSubData.erase(it);
continue;
}
@ -871,6 +938,7 @@ void FilterTrainStepOneEngine::DealProcessDataPre(std::shared_ptr<ProcessData> p
{
//CalculateDirection(iterProcessData->second);
CalculateDirectionNew(iterProcessData->second);
if (iDirection_ != DIRECTION_UNKNOWN) this->sendComeTrain(pProcessData->strTrainDate, pProcessData->strTrainName, iDirection_);
}
if (iDirection_ != DIRECTION_UNKNOWN)
@ -901,7 +969,7 @@ void FilterTrainStepOneEngine::DealProcessDataPre(std::shared_ptr<ProcessData> p
{
//倒车
AddBackInfo(iterProcessData->second);
iTrainStatusTemp = TRAINSTATUS_STOP;
iTrainStatusTemp = TRAINSTATUS_STOP;
}
else if(iTrainStatus_ == TRAINSTATUS_RUN)
{
@ -918,7 +986,7 @@ void FilterTrainStepOneEngine::DealProcessDataPre(std::shared_ptr<ProcessData> p
<< " 火车实时运行状态:" << iTrainStatus_ << "(0无车1运行2停车3倒车) iTrainStatusTemp:" << iTrainStatusTemp;
iterProcessData->second->iStatus = iTrainStatusTemp;
this->sendComeTrain(pProcessData->strTrainDate, pProcessData->strTrainName, iDirection_);
// this->sendComeTrain(pProcessData->strTrainDate, pProcessData->strTrainName, iDirection_);
//上一帧push端口0
PushData(strPort0_, iterProcessData->second);

View File

@ -58,6 +58,11 @@ private:
int iChkStopPX_;
int iChkStopCount_;
int iDirection_; //方向
int iPushDirection_; //需要识别的方向
int rightFirst_; // 向右行驶的在前大框类型
int leftFirst_; // 向左行驶的在前大框类型
int iPartitionFrameNum_; //满足跨车厢的帧间隔
int iPlitFrameSpanPX_; //相连帧 同种大框的跨度最大值
std::map<int, PostData> mapPostDataFrist_; //[key-数据源id, value-第一步识别信息]
std::map<int, std::map<int, std::vector<Step1Location>>> mapMapStep1Info_; //[key-数据源id, value-[key-识别目标, value-识别框集合]]

View File

@ -83,7 +83,10 @@ bool SaveCsvEngine::SaveMergerCsv(std::shared_ptr<Train> pTrain)
<< "inspection" << ','
<< "inspectionImg" << ','
<< "containerImg_1" << ','
<< "containerImg_2" << std::endl;
<< "containerImg_2" << ','
<< "startTime" << ','
<< "endTime"
<< std::endl;
}
std::string strTime = pTrain->strTrainName;
@ -147,7 +150,10 @@ bool SaveCsvEngine::SaveMergerCsv(std::shared_ptr<Train> pTrain)
<< pTrain->chkDate.strChkDate1DeadLine << ','
<< szChkDateImgPath << ','
<< szContainer1ImgPath << ','
<< szContainer2ImgPath << std::endl;
<< szContainer2ImgPath << ','
<< MyUtils::getins()->Stamp2Time(pTrain->i64StartTimeStamp, true) << ','
<< MyUtils::getins()->Stamp2Time(pTrain->i64EndTimeStamp, true)
<< std::endl;
outFile.close();
}
@ -427,18 +433,18 @@ bool SaveCsvEngine::SaveContainerCsv(std::shared_ptr<TrainContainer> pTrainConta
catch (const std::exception &)
{
LogError << "strCsvPath:" << strCsvPath << " container savecsv fail!";
continue;
continue;
}
}
return true;
}
APP_ERROR SaveCsvEngine::Process()
{
{
int iRet = APP_ERR_OK;
while (!isStop_)
{
bool bPopFlag = false;
//pop端口0 车厢信息
std::shared_ptr<void> pVoidData0 = nullptr;

View File

@ -160,7 +160,7 @@ APP_ERROR SaveImgEngine::Process()
jvFrameInfo["rate"] = iRate;
jvFrameInfo["isEnd"] = pSaveImgData->bIsEnd;
MyUtils::getins()->WriteJsonInfo(jvFrameInfo, strTxtFilePath);
LogDebug << "engineId:" << engineId_ << " save success txt:" << strTxtFilePath;
// LogDebug << "engineId:" << engineId_ << " save success txt:" << strTxtFilePath;
}
}
return APP_ERR_OK;

View File

@ -119,10 +119,10 @@ void SaveStepOneResultEngine::DealCenterSpace(std::vector<PartionInfo> &vecParat
{
return;
}
LogDebug << "size:" << iVecSize << " frameId:" << pProcessData->iFrameId
<< " vecParationInfo[0].frameId:" << vecParationInfo.at(0).modelSpaceFrame
<< " vecParationInfo[size-1].frameId:" << vecParationInfo.at(iVecSize - 1).modelSpaceFrame
<< " isEnd:" << vecParationInfo.at(iVecSize - 1).bIsEnd;
LogDebug << "积累的车厢切分信息数:" << iVecSize << " :" << pProcessData->iFrameId
<< " 第一个车厢切分信息帧:" << vecParationInfo.at(0).modelSpaceFrame
<< " 最后一个车厢切分信息帧:" << vecParationInfo.at(iVecSize - 1).modelSpaceFrame
<< " 最后一个车厢切分信息是否为结束:" << vecParationInfo.at(iVecSize - 1).bIsEnd;
/*
(2023-02-28)
@ -134,9 +134,9 @@ void SaveStepOneResultEngine::DealCenterSpace(std::vector<PartionInfo> &vecParat
int iCenterXPre = vecParationInfo[i - 1].fLTX + (vecParationInfo[i - 1].fRBX - vecParationInfo[i - 1].fLTX) / 2;
int iCenterX = vecParationInfo[i].fLTX + (vecParationInfo[i].fRBX - vecParationInfo[i].fLTX) / 2;
bool bIntervalFlag = ((int)(vecParationInfo[i].modelSpaceFrame - vecParationInfo[i - 1].modelSpaceFrame)) > iSplitSpan_;
LogDebug << "frameidPre:" << vecParationInfo[i - 1].modelSpaceFrame << " iCenterXPre:" << iCenterXPre
<< " frameid:" << vecParationInfo[i].modelSpaceFrame << " iCenterX:" << iCenterX
<< " bIntervalFlag:" << bIntervalFlag << " i:" << i;
LogDebug << "上一帧ID:" << vecParationInfo[i - 1].modelSpaceFrame << " 上一帧间隔X轴中线:" << iCenterXPre
<< " 本帧ID:" << vecParationInfo[i].modelSpaceFrame << " 本帧间隔X轴中线:" << iCenterX
<< " 满足帧间隔:" << bIntervalFlag << " i:" << i;
if (iDirection_ == DIRECTION_LEFT && (iCenterXPre < iCenterX - iSplitSpanPX_) && bIntervalFlag)
{
vecSpacePos.push_back(i - 1);
@ -190,6 +190,8 @@ void SaveStepOneResultEngine::DealCenterSpace(std::vector<PartionInfo> &vecParat
pPartionInfo->startframe = dataSourceConfig_.iSkipInterval;
}
pPartionInfo->nStatus = ((pProcessData->iStatus == TRAINSTATUS_STOP) ? TRAIN_PAUSE : iDirection_);
LogWarn << "--------- 向Paration 发送数据 --------";
outputQueMap_[strPort0_]->push(std::static_pointer_cast<void>(pPartionInfo));
iPushSpaceFrameId_ = pPartionInfo->modelSpaceFrame;
@ -242,14 +244,18 @@ void SaveStepOneResultEngine::DealTrainSpaceInfo(std::shared_ptr<ProcessData> pP
{
iDirection_ = jvDirectionInfo["direction"].asInt();
}
else
{
LogWarn << "暂未检测出行车方向";
}
}
bool bIntervalFlag = ((int)(pProcessData->iFrameId - parationInfoLast_.modelSpaceFrame)) > iSplitSpan_;
int iCenterCur = jvStep1Space[0]["ltx"].asFloat() + (jvStep1Space[0]["rbx"].asFloat() - jvStep1Space[0]["ltx"].asFloat()) / 2;
int iCenterLast = parationInfoLast_.fLTX + (parationInfoLast_.fRBX - parationInfoLast_.fLTX) / 2;
LogDebug << "frameid:" << pProcessData->iFrameId << " centerCur:" << iCenterCur
<< " lastFrameid:" << parationInfoLast_.modelSpaceFrame << " centerLast:" << iCenterLast
<< " iDirection_:" << iDirection_ << " bIntervalFlag:" << bIntervalFlag << " bDealCenterFlag_:" << bDealCenterFlag_;
LogDebug << "当前帧:" << pProcessData->iFrameId << " 间隔框中心线:" << iCenterCur
<< " 上一帧:" << parationInfoLast_.modelSpaceFrame << " 间隔框中心线:" << iCenterLast
<< " 行车方向:" << iDirection_ << " 是否满足切分帧数:" << bIntervalFlag << " bDealCenterFlag_:" << bDealCenterFlag_;
if (iDirection_ == DIRECTION_UNKNOWN || iCenterLast == 0)
{
@ -295,8 +301,8 @@ void SaveStepOneResultEngine::DealTrainSpaceInfo(std::shared_ptr<ProcessData> pP
if (!(bDealCenterFlag_ && !bIntervalFlag && (iCenterCur < (pProcessData->iWidth / 3 + 30))))
{
vecParationInfo_.push_back(parationInfo);
}
}
}
}
}
else if (iDirection_ == DIRECTION_RIGHT)
{
@ -418,7 +424,7 @@ void SaveStepOneResultEngine::SplitTrainByNumPro(std::shared_ptr<PartionInfo> &p
pPartionInfoNew->i64StartTimeStamp = i64TimeStampFirst_;
pPartionInfoNew->startframe = dataSourceConfig_.iSkipInterval;
}
pPartionInfoNew->nStatus = ((pProcessData->iStatus == TRAINSTATUS_STOP) ? TRAIN_PAUSE : iDirection_);
// pPartionInfoNew->nStatus = ((pProcessData->iStatus == TRAINSTATUS_STOP) ? TRAIN_PAUSE : iDirection_);
//构造一个间隔信息写入到切分帧中
char szCameraNo[5] = {0};
@ -437,6 +443,7 @@ void SaveStepOneResultEngine::SplitTrainByNumPro(std::shared_ptr<PartionInfo> &p
jvFrameInfo["step1Space"].append(jvOneSpace);
MyUtils::getins()->WriteJsonInfo(jvFrameInfo, strFilePath);
LogWarn << "--------- 向Paration 发送数据 --------";
outputQueMap_[strPort0_]->push(std::static_pointer_cast<void>(pPartionInfoNew));
iPushSpaceFrameId_ = pPartionInfoNew->modelSpaceFrame;
@ -543,7 +550,7 @@ APP_ERROR SaveStepOneResultEngine::Process()
{
//车头没有属性因此车头号也加入到属性中。保证向右行驶属性在前时最后2节的切分。
//车头只加入一次防止一个车头2个车头号的场景。但有两个车头且没识别车头间隔则无法处理。
if (!bHaveHeadFlag_)
if (!bHaveHeadFlag_)
{
bool bIntervalFlag = ((int)(pProcessData->iFrameId - headInfo_.iFrameId) > iSplitSpan_ && headInfo_.iFrameId != 0);
@ -578,7 +585,7 @@ APP_ERROR SaveStepOneResultEngine::Process()
}
else if (postSubData.iTargetType == CONTAINER)
{
jvStep1Container.append(jvInfo);
jvStep1Container.append(jvInfo);
}
else if (postSubData.iTargetType == SPACE)
{
@ -681,6 +688,7 @@ APP_ERROR SaveStepOneResultEngine::Process()
//最后一节和倒数第二节之间的间隔未能识别时,此时也需要通过车号属性切分下。
SplitTrainByNumPro(pPartionInfo, pProcessData);
LogWarn << "--------- 向Paration 发送数据 --------";
outputQueMap_[strPort0_]->push(std::static_pointer_cast<void>(pPartionInfo));
iPushSpaceFrameId_ = pPartionInfo->modelSpaceFrame;

View File

@ -212,6 +212,11 @@ std::string SelectBestEngine::GetBest(std::vector<TransInfo> &vecAllTransInfo, T
{
vecAllTransInfo = vecTransInfoTemp;
}
else
{
// 此处因车厢太脏。识别效果很差难以与RFID识别结果融合所以增加eles
return strValue;
}
//获取最优长度
int iBestLen = GetBestLength(vecAllTransInfo, iMaxLen);

View File

@ -149,7 +149,6 @@ APP_ERROR TrainParationMgr::Process()
int nSize = lstPartInfo.size();
int nPartionIndex = nSize - 1;
int nPrePartionIndex = nPartionIndex;
//当然车厢通过的数量
if (nSize == 0) {
@ -166,35 +165,30 @@ APP_ERROR TrainParationMgr::Process()
lstPartInfo.push_back(stTempInfo);
//lstPartInfo.push_back(stTempInfo);
nPartionIndex++;
}
{
lstPartInfo[nPartionIndex].i64EndTimeStamp = pPartionInfo->i64EndTimeStamp;
lstPartInfo[nPartionIndex].endframe = pPartionInfo->modelSpaceFrame;
// 根据开始帧时间戳和结束帧时间错 计算当节车厢的行车速度
// LogInfo << "TrainAnaEngine checkPartion bPartion == true lstPartInfo[nPrePartionIndex].ftime" << abs(lstPartInfo[nPrePartionIndex].i64EndTimeStamp - lstPartInfo[nPrePartionIndex].i64StartTimeStamp);
// 根据时间戳计算时间差
float nTimePassed = (abs(lstPartInfo[nPartionIndex].i64EndTimeStamp - lstPartInfo[nPartionIndex].i64StartTimeStamp)) * 1.0;
//防止停车导致速度过小
if(pPartionInfo->nStatus != TRAIN_PAUSE && nTimePassed <= 50000) {
lstPartInfo[nPartionIndex].fspeed = (TRAIN_WIDTH * 1000.0) /nTimePassed;
}
lstPartInfo[nPartionIndex].i64EndTimeStamp = pPartionInfo->i64EndTimeStamp;
lstPartInfo[nPartionIndex].endframe = pPartionInfo->modelSpaceFrame;
// 根据开始帧时间戳和结束帧时间错 计算当节车厢的行车速度
// 根据时间戳计算时间差
float nTimePassed = (abs(lstPartInfo[nPartionIndex].i64EndTimeStamp - lstPartInfo[nPartionIndex].i64StartTimeStamp)) * 1.0;
//防止停车导致速度过小
if(pPartionInfo->nStatus != TRAIN_PAUSE && nTimePassed <= 50000) {
lstPartInfo[nPartionIndex].fspeed = (TRAIN_WIDTH * 1000.0) /nTimePassed;
} else {
if (nPartionIndex >= 1){
lstPartInfo[nPartionIndex].fspeed = lstPartInfo[nPartionIndex - 1].fspeed / 3;
} else {
if (nPartionIndex >= 1){
lstPartInfo[nPartionIndex].fspeed = lstPartInfo[nPartionIndex - 1].fspeed / 3;
} else {
lstPartInfo[nPartionIndex].fspeed = TRAIN_DEFAULT_SPEED / 10;
}
lstPartInfo[nPartionIndex].fspeed = TRAIN_DEFAULT_SPEED / 10;
}
//
//nSamePartionIgnoreCount = (nTimePassed / (3 * 5000)) * nFrameRate;
// 结束帧为当前帧再往后 (除以2的原因中间为车钩车钩后的车体宽度为整个镜头的宽度除以2)
//lstPartInfo[nPrePartionIndex].endframe = pPartionInfo->modelSpaceFrame;
//LogInfo << "TrainAnaEngine checkPartion bPartion == true lstPartInfo[nPrePartionIndex].endframe" << lstPartInfo[nPrePartionIndex].endframe;
lstPartInfo[nPartionIndex].bmodelconfirmed = true;
}
//
//nSamePartionIgnoreCount = (nTimePassed / (3 * 5000)) * nFrameRate;
// 结束帧为当前帧再往后 (除以2的原因中间为车钩车钩后的车体宽度为整个镜头的宽度除以2)
lstPartInfo[nPartionIndex].bmodelconfirmed = true;
/// write json info to file
//先读取文本内容,追加新的信息后再写入
@ -202,133 +196,57 @@ APP_ERROR TrainParationMgr::Process()
Json::Value jvPartionInfo;
//JSON保存路径
std::string strFilePath;
bool brightcome = false;
int nrightoffset = 0;
if (pPartionInfo->nStatus == 1) {
brightcome = true;
// nrightoffset = -1;
}
//检测到车厢划分信息
{
// if (nPartionIndex == 0) {
// lstPartInfo[nPartionIndex].endframe = lstPartInfo[nPartionIndex].endframe - nrightoffset * (lstPartInfo[nPartionIndex].fLTX - METHOD_BASE_WIDTH) / 10;
// } else {
// lstPartInfo[nPartionIndex].endframe = lstPartInfo[nPartionIndex].endframe - nrightoffset * getCouplerOffsetPix(lstPartInfo[nPartionIndex].fspeed, lstPartInfo[nPartionIndex].endframe);
// }
//lstPartInfo[nPartionIndex].endframe = lstPartInfo[nPartionIndex].endframe + getOffsetFrame(lstPartInfo[nPartionIndex].fspeed, (TRAIN_IN_CAMERA_WIDTH / 2), nFrameRate);
strFilePath = strResultPath_ + pPartionInfo->strTrainDate + "/" + pPartionInfo->strTrainName + "/"
+ std::to_string(nPartionIndex + 1) + ".txt";
strFilePath = strResultPath_ + pPartionInfo->strTrainDate + "/" + pPartionInfo->strTrainName + "/"
+ std::to_string(nPartionIndex + 1) + ".txt";
// 首部车钩的偏移位置 (单位帧)
int headpos = 0;
// 尾部车钩的偏移位置 (单位帧)
int tailpos = (0 - nTailPixOffset);
LogInfo << "TrainAnaEngine Process lstPartInfo[nPartionIndex].startframe" << lstPartInfo[nPartionIndex].startframe ;
LogInfo << "TrainAnaEngine Process lstPartInfo[nPartionIndex].endframe" << lstPartInfo[nPartionIndex].endframe;
//if (nPartionIndex == 0)
{
headpos = METHOD_BASE_WIDTH / 2;
tailpos = tailpos + headpos;
}
// 是否位右侧来车
if (brightcome == true)
{
//brightcome = true;
// 右侧来车 首部车钩从画面最右侧开始
headpos = METHOD_BASE_WIDTH / 2;
// 右侧来车 尾部车钩从画面最右侧+车厢宽的像素值
tailpos = headpos + nTailPixOffset;
/*
if (nPartionIndex == 0)
{
headpos = METHOD_BASE_WIDTH / 2;
tailpos = tailpos - headpos;
}
*/
}
LogInfo << "TrainAnaEngine Process lstPartInfo[nPartionIndex].startframe" << lstPartInfo[nPartionIndex].startframe ;
LogInfo << "TrainAnaEngine Process lstPartInfo[nPartionIndex].endframe" << lstPartInfo[nPartionIndex].endframe;
//从当节车厢的开始帧到结束帧计算首部车钩和尾部车钩的偏移值
// for (int nplayframe = lstPartInfo[nPartionIndex].startframe; nplayframe <= lstPartInfo[nPartionIndex].endframe; nplayframe++)
// {
// Json::Value jvposInfo;
// // 当前车厢的第几几帧
// int noffsetindex = (nplayframe - lstPartInfo[nPartionIndex].startframe);
// // 根据车速计算车钩位置量(单位 像素)
// int noffsetpos = getCouplerOffsetPosition(lstPartInfo[nPartionIndex].fspeed, noffsetindex);
// // 初始化首部车钩偏移量(单位 像素)
// jvposInfo["headpos"] = -1;
// // 初始化尾部车钩偏移量(单位 像素)
// jvposInfo["tailpos"] = -1;
// if (brightcome == false) {
// // 左侧来车
// // 首部车钩和尾部车钩 每帧加 车钩偏移值
// jvposInfo["headpos"] = (headpos + noffsetpos);
// jvposInfo["tailpos"] = (tailpos + noffsetpos);
// } else {
// // 右侧来车
// // 首部车钩和尾部车钩 每帧减 车钩偏移值
// jvposInfo["headpos"] = (headpos - noffsetpos);
// jvposInfo["tailpos"] = (tailpos - noffsetpos);
// }
// //LogInfo << "TrainAnaEngine Process jvposInfo[headpos]" << jvposInfo["headpos"];
// // LogInfo << "TrainAnaEngine Process jvposInfo[tailpos]:" << jvposInfo["tailpos"];
// //LogInfo << "TrainAnaEngine Process jvPartionListInfo.append";
// jvPartionInfo[std::to_string(nplayframe)] = jvposInfo;
// }
PartionInfo stTempInfo;
// 开始记录新的一节车厢信息(从索引变成序号+1 ,新增一节车厢信息+1)
stTempInfo.nindex = nPartionIndex + 2;
// 上一节车厢的结束帧 - (偏移帧 = (镜头内的车体宽度/ (速度) -> 通过时间) * 帧/秒 ) 作为下一节车厢的开始帧
int ntempOffsetFrame = lstPartInfo[nPartionIndex].endframe;
//
//- (int)(((TRAIN_IN_CAMERA_WIDTH / 2) / lstPartInfo[nPartionIndex].fspeed) * nFrameRate);
//LogInfo << "TrainAnaEngine Process ntempOffsetFrame:" << ntempOffsetFrame;
stTempInfo.startframe = ntempOffsetFrame;
stTempInfo.i64StartTimeStamp = pPartionInfo->i64EndTimeStamp;
// 初始化下一节的结束帧
//stTempInfo.endframe = 0;
PartionInfo stTempInfo;
// 开始记录新的一节车厢信息(从索引变成序号+1 ,新增一节车厢信息+1)
stTempInfo.nindex = nPartionIndex + 2;
// 上一节车厢的结束帧 - (偏移帧 = (镜头内的车体宽度/ (速度) -> 通过时间) * 帧/秒 ) 作为下一节车厢的开始帧
int ntempOffsetFrame = lstPartInfo[nPartionIndex].endframe;
stTempInfo.startframe = ntempOffsetFrame;
stTempInfo.i64StartTimeStamp = pPartionInfo->i64EndTimeStamp;
// 初始化下一节的结束帧
//stTempInfo.endframe = 0;
lstPartInfo.push_back(stTempInfo);
lstPartInfo.push_back(stTempInfo);
// 记录过车日期
jvPartionInfo["trainDate"] = pPartionInfo->strTrainDate;
// 记录过车时间
jvPartionInfo["trainName"] = pPartionInfo->strTrainName;
// 记录车厢节数 (索引从0开始 所以这里+1)
jvPartionInfo["trainNo"] = nPartionIndex + 1;
// 记录行车开始帧
jvPartionInfo["startFrameId"] = lstPartInfo[nPartionIndex].startframe;
jvPartionInfo["startTimeStamp"] = lstPartInfo[nPartionIndex].i64StartTimeStamp;
// 记录行车结束帧
jvPartionInfo["endFrameId"] = lstPartInfo[nPartionIndex].endframe;
jvPartionInfo["endTimeStamp"] = lstPartInfo[nPartionIndex].i64EndTimeStamp;
// 记录车厢是否完全通过
jvPartionInfo["isEnd"] = pPartionInfo->bIsEnd;
// 记录过车日期
jvPartionInfo["trainDate"] = pPartionInfo->strTrainDate;
// 记录过车时间
jvPartionInfo["trainName"] = pPartionInfo->strTrainName;
// 记录车厢节数 (索引从0开始 所以这里+1)
jvPartionInfo["trainNo"] = nPartionIndex + 1;
// 记录行车开始帧
jvPartionInfo["startFrameId"] = lstPartInfo[nPartionIndex].startframe;
jvPartionInfo["startTimeStamp"] = lstPartInfo[nPartionIndex].i64StartTimeStamp;
// 记录行车结束帧
jvPartionInfo["endFrameId"] = lstPartInfo[nPartionIndex].endframe;
jvPartionInfo["endTimeStamp"] = lstPartInfo[nPartionIndex].i64EndTimeStamp;
// 记录车厢是否完全通过
jvPartionInfo["isEnd"] = pPartionInfo->bIsEnd;
//是否是间隔模型切分的车厢
jvPartionInfo["modelconfirmed"] = pPartionInfo->bmodelconfirmed;
//是否是间隔模型切分的车厢
jvPartionInfo["modelconfirmed"] = pPartionInfo->bmodelconfirmed;
// 记录当前车厢的信息到JSON文件
MyUtils::getins()->WriteJsonInfo(jvPartionInfo, strFilePath);
std::shared_ptr<TrainRange> pTrainRange = std::make_shared<TrainRange>();
pTrainRange->strTrainDate = jvPartionInfo["trainDate"].asString();
pTrainRange->strTrainName = jvPartionInfo["trainName"].asString();
pTrainRange->iTrainIndex = jvPartionInfo["trainNo"].asInt();
pTrainRange->iStartFrameId = jvPartionInfo["startFrameId"].asInt();
pTrainRange->i64StartTimeStamp = jvPartionInfo["startTimeStamp"].asInt64();
pTrainRange->iEndFrameId = jvPartionInfo["endFrameId"].asInt();
pTrainRange->i64EndTimeStamp = jvPartionInfo["endTimeStamp"].asInt64();
pTrainRange->bIsEnd = jvPartionInfo["isEnd"].asBool();
pTrainRange->bmodelconfirmed = jvPartionInfo["modelconfirmed"].asBool();
iRet = outputQueMap_[strPort0_]->push(std::static_pointer_cast<void>(pTrainRange));
}
// 记录当前车厢的信息到JSON文件
MyUtils::getins()->WriteJsonInfo(jvPartionInfo, strFilePath);
std::shared_ptr<TrainRange> pTrainRange = std::make_shared<TrainRange>();
pTrainRange->strTrainDate = jvPartionInfo["trainDate"].asString();
pTrainRange->strTrainName = jvPartionInfo["trainName"].asString();
pTrainRange->iTrainIndex = jvPartionInfo["trainNo"].asInt();
pTrainRange->iStartFrameId = jvPartionInfo["startFrameId"].asInt();
pTrainRange->i64StartTimeStamp = jvPartionInfo["startTimeStamp"].asInt64();
pTrainRange->iEndFrameId = jvPartionInfo["endFrameId"].asInt();
pTrainRange->i64EndTimeStamp = jvPartionInfo["endTimeStamp"].asInt64();
pTrainRange->bIsEnd = jvPartionInfo["isEnd"].asBool();
pTrainRange->bmodelconfirmed = jvPartionInfo["modelconfirmed"].asBool();
iRet = outputQueMap_[strPort0_]->push(std::static_pointer_cast<void>(pTrainRange));
if (pPartionInfo->bIsEnd) {
lstPartInfo.clear();

View File

@ -101,7 +101,7 @@ void TransTrainEngine::InitParam()
*/
bool TransTrainEngine::AuthTransNum(int classId, const std::string &trainNum)
{
LogInfo << "classId:" << classId << " trainNum:" << trainNum;
// LogInfo << "classId:" << classId << " trainNum:" << trainNum;
switch (classId)
{
case TRAIN_HEAD: // 车头上的编号
@ -774,8 +774,8 @@ APP_ERROR TransTrainEngine::Process()
{
strTemp += vecClassNames_.at(it->second.at(j).iClassId);
}
LogDebug << "step2 char sourceid:" << pProcessData->iDataSource << " frameid:" << pProcessData->iFrameId
<< " bigclassId:" << postSubData.iBigClassId << " line:" << it->first << "," << strTemp;
// LogDebug << "step2 char sourceid:" << pProcessData->iDataSource << " frameid:" << pProcessData->iFrameId
// << " bigclassId:" << postSubData.iBigClassId << " line:" << it->first << "," << strTemp;
}
TransSubData transSubData;

View File

@ -1,146 +0,0 @@
#include "TestImgEngine.h"
#include <iostream>
#include <algorithm>
#include <string>
#include <stdio.h>
#include <stdarg.h>
#include <sys/time.h>
#include <string.h>
#include <vector>
#include <memory>
using namespace std;
using namespace ai_matrix;
TestImgEngine::TestImgEngine() {}
TestImgEngine::~TestImgEngine() {}
APP_ERROR TestImgEngine::Init()
{
strPort0_ = engineName_ + "_" + std::to_string(engineId_) + "_0";
dataSourceConfig_ = MyYaml::GetIns()->GetDataSourceConfigById(engineId_); //获取摄像机参数
width_ = IMAGE_WIDTH, height_ = IMAGE_HEIGHT;
LogInfo << "engineId_:" << engineId_ << " TestImgEngine Init ok";
return APP_ERR_OK;
}
APP_ERROR TestImgEngine::DeInit()
{
LogInfo << "engineId_:" << engineId_ << " TestImgEngine DeInit ok";
return APP_ERR_OK;
}
//测试jpeg解码时打开,并修改相应的yaml配置引擎间通信
#if 0
APP_ERROR TestImgEngine::Process()
{
int iRet = APP_ERR_OK;
uint64_t u64count_num = 0;
while (!isStop_)
{
// std::cout<<"Enter Read Image Thread "<<++u64count_num<<" Times!"<<std::endl;
// std::cout<<"Read Image Thread ID: "<<std::this_thread::get_id()<<std::endl;
//读取图像
std::string jpeg_img_file_name = MyYaml::GetIns()->GetStringValue("jpeg_image_file_name");
//从本地文件读取jpg图像并构造jpeg数据
void* pJPEGBuffer = nullptr;
FILE *jpeg_fp;
jpeg_fp = fopen(jpeg_img_file_name.c_str(), "r");
if (!jpeg_fp)
{
std::cerr<<"Can not open "<<jpeg_img_file_name.c_str()<<std::endl;
}
fseek(jpeg_fp, 0L, SEEK_END);
unsigned int pJPEGBuffer_Size = ftell(jpeg_fp);
// printf("the jpg image data len: %d\n", pJPEGBuffer_Size);
// std::cout<<"the jpg image data len: "<<pJPEGBuffer_Size<<std::endl;
fseek(jpeg_fp, 0L, SEEK_SET);
pJPEGBuffer = new uint8_t[pJPEGBuffer_Size];
fread((char*)pJPEGBuffer, 1, pJPEGBuffer_Size, jpeg_fp);
fclose(jpeg_fp);
std::shared_ptr<FrameData> pJPEGFrameData = std::make_shared<FrameData>();
//组织数据,压入下一引擎
pJPEGFrameData->iDataSource = engineId_;
pJPEGFrameData->iSize = pJPEGBuffer_Size;
pJPEGFrameData->pData.reset(pJPEGBuffer, [](void* data){if(data) {delete[] data; data = nullptr;}}); //智能指针管理内存
// pJPEGFrameData->pData.reset(pJPEGBuffer, Deleter); //智能指针管理内存
pJPEGFrameData->i64TimeStamp = MyUtils::getins()->GetCurrentTimeMillis();
#if 1
iRet = outputQueMap_[strPort0_]->push(std::static_pointer_cast<void>(pJPEGFrameData));
if (iRet != APP_ERR_OK){
LogError << "push the jpeg image data failed...";
std::cerr<<"push the jpeg image data failed..."<<std::endl;
}else{
// std::cout<<"push the jpeg image data success!"<<std::endl;
}
#endif
usleep(30 * 1000);
}
}
#else
//测试H264编码或者jpeg编码打开,并修改相应的yaml配置引擎间通信
APP_ERROR TestImgEngine::Process()
{
int iRet = APP_ERR_OK;
uint64_t u64count_num = 0;
while (!isStop_)
{
// std::cout<<"Enter Read Image Thread "<<++u64count_num<<" Times!"<<std::endl;
// std::cout<<"Read Image Thread ID: "<<std::this_thread::get_id()<<std::endl;
//读取图像
std::string yuv420m_img_file_name = MyYaml::GetIns()->GetStringValue("yuv420m_image_file_name");
//从本地文件读取yuv420m图像并构造yuv420m数据
void* pYUV420MBuffer = nullptr;
FILE *yuv420m_fp;
yuv420m_fp = fopen(yuv420m_img_file_name.c_str(), "rb");
if (!yuv420m_fp)
{
std::cerr<<"Can not open "<<yuv420m_img_file_name.c_str()<<std::endl;
}
fseek(yuv420m_fp, 0L, SEEK_END);
unsigned int pYUV420MBuffer_Size = ftell(yuv420m_fp);
// printf("test.yuv filesize = %d\n", pYUV420MBuffer_Size);
// std::cout<<"test.yuv filesize = "<<pYUV420MBuffer_Size<<std::endl;
fseek(yuv420m_fp, 0L, SEEK_SET);
pYUV420MBuffer = new uint8_t[pYUV420MBuffer_Size];
fread((char*)pYUV420MBuffer, 1, pYUV420MBuffer_Size, yuv420m_fp);
fclose(yuv420m_fp);
std::shared_ptr<FrameData> pYUV420MFrameData = std::make_shared<FrameData>();
//组织数据,压入下一引擎
pYUV420MFrameData->iDataSource = engineId_;
pYUV420MFrameData->iSize = pYUV420MBuffer_Size;
pYUV420MFrameData->pData.reset(pYUV420MBuffer, [](void* data){if(data) {delete[] data; data = nullptr;}}); //智能指针管理内存
// pYUV420MFrameData->pData.reset(pYUV420MBuffer, Deleter); //智能指针管理内存
pYUV420MFrameData->i64TimeStamp = MyUtils::getins()->GetCurrentTimeMillis();
#if 1
iRet = outputQueMap_[strPort0_]->push(std::static_pointer_cast<void>(pYUV420MFrameData));
if (iRet != APP_ERR_OK){
LogError << "push the yuv420m image data failed...";
std::cerr<<"push the yuv420m image data failed..."<<std::endl;
}else{
// std::cout<<"push the yuv420m image data success!"<<std::endl;
}
#endif
usleep(30 * 1000);
}
}
#endif

View File

@ -1,45 +0,0 @@
//读取图像引擎(用于测试)
#ifndef _TEST_IMG_ENGINE_H
#define _TEST_IMG_ENGINE_H
#include <iostream>
#include <chrono>
#include <cmath>
#include <utility>
#include <thread>
#include <chrono>
#include <functional>
#include <atomic>
#include <time.h>
#include <unistd.h>
#include <queue>
#include <mutex>
#include <semaphore.h>
#include "EngineBase.h"
#include "EngineFactory.h"
#include "MyYaml.h"
#include "myutils.h"
#include "AppCommon.h"
class TestImgEngine : public ai_matrix::EngineBase
{
public:
TestImgEngine();
~TestImgEngine();
APP_ERROR Init() override;
APP_ERROR DeInit() override;
APP_ERROR Process() override;
private:
ai_matrix::DataSourceConfig dataSourceConfig_;
std::string strPort0_;
unsigned int width_, height_;
};
ENGINE_REGIST(TestImgEngine)
#endif //END OF _TEST_IMG_ENGINE_H

View File

@ -1,109 +0,0 @@
#include "VideoEngine.h"
using namespace std;
using namespace cv;
using namespace ai_matrix;
VideoEngine::VideoEngine() {}
VideoEngine::~VideoEngine() {}
APP_ERROR VideoEngine::Init()
{
LogInfo << "engineId_:" << engineId_ << " VideoEngine Init start";
strPort0_ = engineName_ + "_" + std::to_string(engineId_) + "_0";
dataSourceConfig_ = MyYaml::GetIns()->GetDataSourceConfigById(engineId_); //获取摄像机参数
width_ = IMAGE_WIDTH, height_ = IMAGE_HEIGHT;
LogInfo << "engineId_:" << engineId_ << " VideoEngine Init ok";
return APP_ERR_OK;
}
APP_ERROR VideoEngine::DeInit()
{
LogInfo << "engineId_:" << engineId_ << " VideoEngine DeInit ok";
return APP_ERR_OK;
}
APP_ERROR VideoEngine::Process()
{
int iRet = APP_ERR_OK;
uint64_t u64count_num = 0;
// if (MyYaml::GetIns()->GetStringValue("gc_data_source") != "camera")
// {
// LogDebug << "engineId_:" << engineId_ << " gc_data_source no camera";
// return iRet;
// }
VideoCapture capture;
/*****************************************************************************************
Gstream解码
:1.nvv4l2decoder 2.omxh264dec
使nvv4l2decoder解码时enable-max-performance和enable-frame-type-reporting才可以使用
enable-max-performance=1
enable-frame-type-reporting=1 使
*****************************************************************************************/
//从摄像头RTSP拉流
const std::string videoStreamAddress = std::string("rtspsrc location=") + dataSourceConfig_.strUrl.c_str() + " latency=10 ! \
rtph264depay ! h264parse ! nvv4l2decoder enable-max-performance=1 enable-frame-type-reporting=1 ! nvvidconv ! video/x-raw, format=(string)BGRx ! videoconvert ! appsink";
// while(!capture.open(dataSourceConfig_.strUrl.c_str())){
while(!capture.open(videoStreamAddress)){
std::cerr<<"Opening video stream or file failed!!!" <<std::endl;
std::cout<<"Restart Opening video stream or file ..."<<std::endl;
sleep(1);
}
std::cout<<"Opening video stream or file Success"<<std::endl;
int frameW = capture.get(3);
int frameH = capture.get(4);
std::cout << dataSourceConfig_.strUrl.c_str() << ";"<< "frameW:" << frameW << " frameH:" << frameH << std::endl;
while (!isStop_)
{
std::cout << dataSourceConfig_.strUrl.c_str() << ";"<< "frameW:" << frameW << " frameH:" << frameH << std::endl;
// std::cout<<"Enter VideoEngine Thread "<<++u64count_num<<" Times!"<<std::endl;
// std::cout<<"VideoEngine Thread ID: "<<std::this_thread::get_id()<<std::endl;
//构造BGR数据
void* pBGRBuffer = nullptr;
unsigned int pBGRBuffer_Size = width_*height_*3;
pBGRBuffer = new uint8_t[pBGRBuffer_Size];
std::shared_ptr<FrameData> pBGRFrameData = std::make_shared<FrameData>();
cv::Mat frame(frameH, frameW, CV_8UC3, pBGRBuffer);
// clock_t start, end;
// start = clock();
if(!capture.read(frame)) {
std::cerr << "no frame" << std::endl;
waitKey();
}
// end = clock();
// printf("read 1 frame time is %.8f ms\n", (double)(end-start)/CLOCKS_PER_SEC*1000);
//压入OpenCV RTSP所拉的H264解码BRG后的数据
//组织数据
pBGRFrameData->iDataSource = engineId_;
pBGRFrameData->iSize = pBGRBuffer_Size;
pBGRFrameData->pData.reset(pBGRBuffer, [](void* data){if(data) {delete[] data; data = nullptr;}}); //智能指针管理内存
// pBGRFrameData->pData.reset(pBGRBuffer, Deleter); //智能指针管理内存
pBGRFrameData->i64TimeStamp = MyUtils::getins()->GetCurrentTimeMillis();
iRet = outputQueMap_[strPort0_]->push(std::static_pointer_cast<void>(pBGRFrameData));
if (iRet != APP_ERR_OK){
LogError << "push the bgr frame data failed...";
std::cerr<<"push the bgr frame data failed..."<<std::endl;
}else{
// std::cout<<"push the bgr frame data success!"<<std::endl;
}
// usleep(30*1000); //读取文件时模拟30帧
}
}

View File

@ -1,69 +0,0 @@
//OpenCV RTSP拉流引擎(包含视频解码)
#ifndef _VIDEO_ENGINE_H
#define _VIDEO_ENGINE_H
#include <iostream>
#include <chrono>
#include <cmath>
#include <utility>
#include <thread>
#include <chrono>
#include <functional>
#include <atomic>
#include <time.h>
#include <sys/time.h>
#include <unistd.h>
#include <queue>
#include <mutex>
#include <semaphore.h>
#include <algorithm>
#include <string>
#include <stdio.h>
#include <stdarg.h>
#include <string.h>
#include <vector>
#include <memory>
#include <opencv2/opencv.hpp>
#include <opencv2/core/core.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#ifdef __cplusplus
extern "C"
{
#endif
#include <libavcodec/avcodec.h>
#include <libavutil/samplefmt.h>
#include <libavformat/avformat.h>
#ifdef __cplusplus
};
#endif
#include "EngineBase.h"
#include "EngineFactory.h"
#include "MyYaml.h"
#include "myutils.h"
#include "AppCommon.h"
#define RTSP_PULL_CAMERA_VIDEO_STREAM
class VideoEngine : public ai_matrix::EngineBase
{
public:
VideoEngine();
~VideoEngine();
APP_ERROR Init() override;
APP_ERROR DeInit() override;
APP_ERROR Process() override;
private:
ai_matrix::DataSourceConfig dataSourceConfig_;
std::string strPort0_;
unsigned int width_, height_;
};
ENGINE_REGIST(VideoEngine)
#endif //_VIDEO_ENGINE_H

View File

@ -2,54 +2,54 @@
using namespace std;
HardH264FFmpegDecode::HardH264FFmpegDecode()
HardH264FFmpegDecode::HardH264FFmpegDecode()
{
;
;
}
HardH264FFmpegDecode::~HardH264FFmpegDecode()
HardH264FFmpegDecode::~HardH264FFmpegDecode()
{
;
;
}
int HardH264FFmpegDecode::HardH264FFmpegDecoderInit(unsigned int uiWidth, unsigned int uiHeight, unsigned int uiFrameRate)
{
uiWidth_ = uiWidth; uiHeight_ = uiHeight;
uiFrameRate_ = uiFrameRate;
iFrameFinished_ = 0;
uiWidth_ = uiWidth; uiHeight_ = uiHeight;
uiFrameRate_ = uiFrameRate;
iFrameFinished_ = 0;
av_log_set_level(AV_LOG_ERROR);
// AVCodecID codec_id = AV_CODEC_ID_H264; //解码H264
// pCodec_ = avcodec_find_decoder(codec_id); //获取解码器
av_log_set_level(AV_LOG_ERROR);
pCodec_ = avcodec_find_decoder_by_name(NVIDIA_H264_DECODER);
// AVCodecID codec_id = AV_CODEC_ID_H264; //解码H264
// pCodec_ = avcodec_find_decoder(codec_id); //获取解码器
pCodec_ = avcodec_find_decoder_by_name(NVIDIA_H264_DECODER);
if (!pCodec_) {
fprintf(stderr, "Codec '%s' not found\n", pCodec_->long_name);
exit(1);
}
printf("Codec found with name %d(%s)\n", pCodec_->id, pCodec_->long_name);
printf("Codec found with name %d(%s)\n", pCodec_->id, pCodec_->long_name);
//创建上下文
pCodecCtx_ = avcodec_alloc_context3(pCodec_);
//创建上下文
pCodecCtx_ = avcodec_alloc_context3(pCodec_);
if (!pCodecCtx_){
fprintf(stderr, "Could not allocate video codec context\n");
exit(1);
}
//创建解析器
pCodecParserCtx_ = av_parser_init(pCodec_->id);
if (!pCodecParserCtx_){
//创建解析器
pCodecParserCtx_ = av_parser_init(pCodec_->id);
if (!pCodecParserCtx_){
fprintf(stderr, "parser not found\n");
exit(1);
}
}
//if(pCodec_->capabilities&CODEC_CAP_TRUNCATED)
// pCodecCtx_->flags|= CODEC_FLAG_TRUNCATED;
//打开解码器
// pCodecCtx_->flags|= CODEC_FLAG_TRUNCATED;
//打开解码器
int ret = avcodec_open2(pCodecCtx_, pCodec_, nullptr);
if (ret < 0) {
if (ret < 0) {
fprintf(stderr, "Could not open codec\n");
printf("avcodec_open2 ret is: %d\n",ret);
exit(1);
@ -63,7 +63,7 @@ int HardH264FFmpegDecode::HardH264FFmpegDecoderInit(unsigned int uiWidth, unsign
}
// av_init_packet(pPacket_);
//分配frame
//分配frame
pSrcFrame_ = av_frame_alloc();
if (!pSrcFrame_) {
fprintf(stderr, "Could not allocate video src pFrame\n");
@ -78,14 +78,14 @@ int HardH264FFmpegDecode::HardH264FFmpegDecoderInit(unsigned int uiWidth, unsign
printf("after align down video_width: %d, video_height: %d\n", uiWidth_, uiHeight_);
//初始化解析器参数
pCodecCtx_->time_base.num = 1;
pCodecCtx_->frame_number = 1; //每包一个视频帧
pCodecCtx_->codec_type = AVMEDIA_TYPE_VIDEO;
pCodecCtx_->bit_rate = 0;
pCodecCtx_->time_base.den = uiFrameRate_;//帧率
pCodecCtx_->width = uiWidth_; //视频宽
pCodecCtx_->height = uiHeight_; //视频高
//初始化解析器参数
pCodecCtx_->time_base.num = 1;
pCodecCtx_->frame_number = 1; //每包一个视频帧
pCodecCtx_->codec_type = AVMEDIA_TYPE_VIDEO;
pCodecCtx_->bit_rate = 0;
pCodecCtx_->time_base.den = uiFrameRate_;//帧率
pCodecCtx_->width = uiWidth_; //视频宽
pCodecCtx_->height = uiHeight_; //视频高
// pCodecCtx_->pix_fmt = AV_PIX_FMT_YUV420P;
int bufferSize = av_image_get_buffer_size(AV_PIX_FMT_YUV420P,
@ -102,46 +102,46 @@ int HardH264FFmpegDecode::HardH264FFmpegDecoderInit(unsigned int uiWidth, unsign
printf("pDstFrame_->linesize: %d, bufferSize: %d\n", pDstFrame_->linesize, bufferSize);
pSwsContext_ = sws_getContext(pCodecCtx_->width, pCodecCtx_->height, pCodecCtx_->pix_fmt,
pCodecCtx_->width, pCodecCtx_->height, AV_PIX_FMT_YUV420P, SWS_BICUBIC, nullptr, nullptr, nullptr);
pCodecCtx_->width, pCodecCtx_->height, AV_PIX_FMT_YUV420P, SWS_BICUBIC, nullptr, nullptr, nullptr);
printf("pCodecCtx_->width: %d, pCodecCtx_->height: %d, pCodecCtx_->pix_fmt: %d\n", pCodecCtx_->width, pCodecCtx_->height, pCodecCtx_->pix_fmt);
return 0;
return 0;
}
int HardH264FFmpegDecode::HardH264FFmpegDecoderDeInit()
{
if(pu8OutBuffer_){
if(pu8OutBuffer_){
av_free(pu8OutBuffer_);
pu8OutBuffer_ = nullptr;
}
if(pSrcFrame_){
av_frame_free(&pSrcFrame_);
pSrcFrame_ = nullptr;
}
if(pDstFrame_){
av_frame_free(&pDstFrame_);
pDstFrame_ = nullptr;
}
if(pPacket_){
av_packet_free(&pPacket_);
if(pSrcFrame_){
av_frame_free(&pSrcFrame_);
pSrcFrame_ = nullptr;
}
if(pDstFrame_){
av_frame_free(&pDstFrame_);
pDstFrame_ = nullptr;
}
if(pPacket_){
av_packet_free(&pPacket_);
pPacket_ = nullptr;
}
if(pCodecParserCtx_){
av_parser_close(pCodecParserCtx_);
pCodecParserCtx_ = nullptr;
}
if(pCodecCtx_){
avcodec_close(pCodecCtx_);
av_free(pCodecCtx_);
pCodecCtx_ = nullptr;
}
if(pCodecParserCtx_){
av_parser_close(pCodecParserCtx_);
pCodecParserCtx_ = nullptr;
}
if(pCodecCtx_){
avcodec_close(pCodecCtx_);
av_free(pCodecCtx_);
pCodecCtx_ = nullptr;
}
if(pSwsContext_){
sws_freeContext(pSwsContext_);
pSwsContext_ = nullptr;
}
if(pSwsContext_){
sws_freeContext(pSwsContext_);
pSwsContext_ = nullptr;
}
}
int HardH264FFmpegDecode::HardH264FFmpegDecoderFilterGraph(AVFilterGraph *pGraph, AVFilterContext *pSourceCtx, AVFilterContext *pSinkCtx)
@ -149,7 +149,7 @@ int HardH264FFmpegDecode::HardH264FFmpegDecoderFilterGraph(AVFilterGraph *pGraph
int ret;
AVFilterInOut *pOutputs = nullptr, *pInputs = nullptr;
if ((ret = avfilter_link(pSourceCtx, 0, pSinkCtx, 0)) >= 0){
ret = avfilter_graph_config(pGraph, nullptr);
ret = avfilter_graph_config(pGraph, nullptr);
}
avfilter_inout_free(&pOutputs);
@ -168,14 +168,14 @@ int HardH264FFmpegDecode::HardH264FFmpegDecoderConfigureVideoFilters(AVFilterGra
"video_size=%dx%d:pix_fmt=%d:time_base=1/1200000",
iWidth, iHeight, iFormat);
if ((ret = avfilter_graph_create_filter(&pFiltSrc,
avfilter_get_by_name("buffer"), "ffplay_buffer", BufferSrcArgs,
nullptr, pGraph)) < 0){
avfilter_get_by_name("buffer"), "ffplay_buffer", BufferSrcArgs,
nullptr, pGraph)) < 0){
goto fail;
}
ret = avfilter_graph_create_filter(&pFiltDst,
avfilter_get_by_name("buffersink"),
"ffplay_buffersink", nullptr, nullptr, pGraph);
avfilter_get_by_name("buffersink"),
"ffplay_buffersink", nullptr, nullptr, pGraph);
if (ret < 0){
goto fail;
}
@ -190,14 +190,14 @@ int HardH264FFmpegDecode::HardH264FFmpegDecoderConfigureVideoFilters(AVFilterGra
pDecoderFilterIn = pFiltSrc;
pDecoderFilterOut = pFiltDst;
fail:
fail:
return ret;
}
int HardH264FFmpegDecode::HardH264FFmpegDecoder(AVCodecContext *pDecCtx, AVFrame *pFrame, AVPacket *pPkt, void* pOutputData, unsigned int* puiOutputDataSize)
{
int ret;
AVFilterGraph* pDecoderGraph = nullptr;
int ret;
AVFilterGraph* pDecoderGraph = nullptr;
ret = avcodec_send_packet(pDecCtx, pPkt); //接收packet解码
if (ret < 0) {
@ -208,7 +208,7 @@ int HardH264FFmpegDecode::HardH264FFmpegDecoder(AVCodecContext *pDecCtx, AVFrame
while (ret >= 0) {
ret = avcodec_receive_frame(pDecCtx, pFrame); //解码
if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF){
fprintf(stderr, "During decoding eof\n");
fprintf(stderr, "During decoding eof\n");
return -1;
}
else if (ret < 0) {
@ -219,35 +219,35 @@ int HardH264FFmpegDecode::HardH264FFmpegDecoder(AVCodecContext *pDecCtx, AVFrame
//printf("saving frame %3d\n", pDecCtx->frame_number);
fflush(stdout);
AVFilterContext *pDecoderFilterIn = nullptr, *pDecoderFilterOut = nullptr;
AVFilterContext *pDecoderFilterIn = nullptr, *pDecoderFilterOut = nullptr;
// pFrame->width = ALIGN_DOWN(pFrame->width, 32);
// pFrame->height = ALIGN_DOWN(pFrame->height, 32);
// printf("pFrame->width: %d\tpFrame->height: %d\n", pFrame->width, pFrame->height);
pDecoderGraph = avfilter_graph_alloc();
HardH264FFmpegDecoderConfigureVideoFilters(pDecoderGraph, pDecoderFilterIn, pDecoderFilterOut, pFrame->width, pFrame->height, pFrame->format);
if (pFrame->format != AV_PIX_FMT_YUV420P){
if (pFrame->format != AV_PIX_FMT_YUV420P){
DUMP_FRAME(pFrame);
ret = av_buffersrc_add_frame(pDecoderFilterIn, pFrame);
ret = av_buffersrc_add_frame(pDecoderFilterIn, pFrame);
ret = av_buffersink_get_frame_flags(pDecoderFilterOut, pFrame, 0);
DUMP_FRAME(pFrame);
int iSize = pFrame->width * pFrame->height;
memcpy(pOutputData, pFrame->data[0], iSize); //Y
memcpy(pOutputData+iSize, pFrame->data[1], iSize/4); //U
memcpy(pOutputData+iSize+iSize/4, pFrame->data[2], iSize/4); //V
*puiOutputDataSize = iSize*3/2;
return iSize*3/2;
}
}
return 0;
int iSize = pFrame->width * pFrame->height;
memcpy(pOutputData, pFrame->data[0], iSize); //Y
memcpy(pOutputData+iSize, pFrame->data[1], iSize/4); //U
memcpy(pOutputData+iSize+iSize/4, pFrame->data[2], iSize/4); //V
*puiOutputDataSize = iSize*3/2;
return iSize*3/2;
}
}
return 0;
}
int HardH264FFmpegDecode::HardH264FFmpegDecoderV2(AVCodecContext *pDecCtx, SwsContext *pSwsCtx, AVFrame *pSrcFrame, AVFrame *pDstFrame, AVPacket *pPkt, void* pOutputData,unsigned int* puiOutputDataSize)
{
int ret;
int ret;
ret = avcodec_send_packet(pDecCtx, pPkt); //接收packet解码
if (ret < 0) {
@ -258,7 +258,7 @@ int HardH264FFmpegDecode::HardH264FFmpegDecoderV2(AVCodecContext *pDecCtx, SwsCo
while (ret >= 0) {
ret = avcodec_receive_frame(pDecCtx, pSrcFrame); //解码
if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF){
fprintf(stderr, "During decoding eof\n");
fprintf(stderr, "During decoding eof\n");
return -1;
}
else if (ret < 0) {
@ -266,7 +266,7 @@ int HardH264FFmpegDecode::HardH264FFmpegDecoderV2(AVCodecContext *pDecCtx, SwsCo
exit(1);
}
// pDecCtx->width = ALIGN_DOWN(pDecCtx->width, 32);
// pDecCtx->width = ALIGN_DOWN(pDecCtx->width, 32);
// pDecCtx->height = ALIGN_DOWN(pDecCtx->height, 32);
sws_scale(pSwsCtx,
@ -280,13 +280,13 @@ int HardH264FFmpegDecode::HardH264FFmpegDecoderV2(AVCodecContext *pDecCtx, SwsCo
//printf("saving frame %3d\n", pDecCtx->frame_number);
fflush(stdout);
int iSize = pDecCtx->width * pDecCtx->height;
int iSize = pDecCtx->width * pDecCtx->height;
memcpy(pOutputData, pDstFrame->data[0], iSize); //Y
memcpy(pOutputData+iSize, pDstFrame->data[1], iSize/4); //U
memcpy(pOutputData+iSize+iSize/4, pDstFrame->data[2], iSize/4); //V
*puiOutputDataSize = iSize*3/2;
return iSize*3/2;
}
return 0;
memcpy(pOutputData, pDstFrame->data[0], iSize); //Y
memcpy(pOutputData+iSize, pDstFrame->data[1], iSize/4); //U
memcpy(pOutputData+iSize+iSize/4, pDstFrame->data[2], iSize/4); //V
*puiOutputDataSize = iSize*3/2;
return iSize*3/2;
}
return 0;
}

View File

@ -56,7 +56,7 @@ extern "C"
frame->linesize[2] \
);}
#define NVIDIA_H264_DECODER "h264_cuvid"
#define NVIDIA_H264_DECODER "h264_cuvid"
// #define NVIDIA_H264_DECODER "h264_v4l2m2m"
class HardH264FFmpegDecode
@ -69,21 +69,21 @@ public:
int HardH264FFmpegDecoderDeInit();
int HardH264FFmpegDecoder(AVCodecContext *pDecCtx, AVFrame *pFrame, AVPacket *pPkt, void* pOutputData, unsigned int* puiOutputDataSize);
int HardH264FFmpegDecoderV2(AVCodecContext *pDecCtx, SwsContext *pSwsCtx, AVFrame *pSrcFrame, AVFrame *pDstFrame, AVPacket *pPkt, void* pOutputData, unsigned int* puiOutputDataSize);
const AVCodec *pCodec_ = nullptr; //解码器
AVCodecContext *pCodecCtx_ = nullptr; //上下文
AVCodecParserContext *pCodecParserCtx_ = nullptr; //解析器上下文
AVFrame *pSrcFrame_ = nullptr;
AVFrame *pDstFrame_ = nullptr;
AVPacket *pPacket_ = nullptr;
SwsContext *pSwsContext_ = nullptr;
AVCodecParserContext *pCodecParserCtx_ = nullptr; //解析器上下文
AVFrame *pSrcFrame_ = nullptr;
AVFrame *pDstFrame_ = nullptr;
AVPacket *pPacket_ = nullptr;
SwsContext *pSwsContext_ = nullptr;
uint8_t *pu8OutBuffer_ = nullptr;
uint8_t *pu8OutBuffer_ = nullptr;
private:
int HardH264FFmpegDecoderFilterGraph(AVFilterGraph *pGraph, AVFilterContext *pSourceCtx, AVFilterContext *pSinkCtx);
int HardH264FFmpegDecoderConfigureVideoFilters(AVFilterGraph *pGraph, AVFilterContext* &pDecoderFilterIn, AVFilterContext* &pDecoderFilterOut, const int iWidth, const int iHeight, const int iFormat);
unsigned int uiWidth_, uiHeight_;
int iFrameFinished_;

View File

@ -56,7 +56,7 @@ APP_ERROR VideoDecodeEngine::Process()
}
int iRet = APP_ERR_OK;
int iSkipCount = 1;
int iSkipCount = 1;
int iNoCameraDataCnt = 0;
while (!isStop_)
{
@ -67,19 +67,19 @@ APP_ERROR VideoDecodeEngine::Process()
{
usleep(10*1000); //10ms
iNoCameraDataCnt++;
if (iNoCameraDataCnt >= 1000) //10秒内收不到认为相机断开
{
LogError << "engineId:" << engineId_ << " 超过10秒获取到摄像头数据疑似摄像头断开。计数" << iNoCameraDataCnt;
iNoCameraDataCnt = 0;
//camera异常时构造空的解码数据push确保一直有数据流转到后面Engine
std::shared_ptr<ProcessData> pProcessData = std::make_shared<ProcessData>();
pProcessData->iDataSource = engineId_;
pProcessData->i64TimeStamp = MyUtils::getins()->GetCurrentTimeMillis();
pProcessData->iSize = 0;
pProcessData->pData = nullptr;
iRet = outputQueMap_[strPort0_]->push(std::static_pointer_cast<void>(pProcessData));
}
// iNoCameraDataCnt++;
// if (iNoCameraDataCnt >= 1000) //10秒内收不到认为相机断开
// {
// LogError << "engineId:" << engineId_ << " 超过10秒获取到摄像头数据疑似摄像头断开。计数" << iNoCameraDataCnt;
// iNoCameraDataCnt = 0;
// //camera异常时构造空的解码数据push确保一直有数据流转到后面Engine
// std::shared_ptr<ProcessData> pProcessData = std::make_shared<ProcessData>();
// pProcessData->iDataSource = engineId_;
// pProcessData->i64TimeStamp = MyUtils::getins()->GetCurrentTimeMillis();
// pProcessData->iSize = 0;
// pProcessData->pData = nullptr;
// iRet = outputQueMap_[strPort0_]->push(std::static_pointer_cast<void>(pProcessData));
// }
continue;
}
@ -111,8 +111,8 @@ APP_ERROR VideoDecodeEngine::Process()
std::shared_ptr<void> pYUVData;
pYUVData.reset(pYUV420MBuffer, [](void *data){if(data) {delete[] data; data = nullptr;}}); //智能指针管理内存
hard_h264_ffmpeg_decoder_->pPacket_->data = static_cast<uint8_t *>(pProcessData->pData.get()); //这里填入一个指向完整H264数据帧的指针
hard_h264_ffmpeg_decoder_->pPacket_->size = pProcessData->iSize; //这个填入H264数据帧的大小
hard_h264_ffmpeg_decoder_->pPacket_->data = static_cast<uint8_t *>(pProcessData->pData.get()); //这里填入一个指向完整H264数据帧的指针
hard_h264_ffmpeg_decoder_->pPacket_->size = pProcessData->iSize; //这个填入H264数据帧的大小
// H264硬件解码
// int iDecodeRet= hard_h264_ffmpeg_decoder_->HardH264FFmpegDecoderV2(hard_h264_ffmpeg_decoder_->pCodecCtx_, hard_h264_ffmpeg_decoder_->pFrame_,

View File

@ -51,7 +51,7 @@ APP_ERROR MoveEngine::Init()
}
InitParam();
LogInfo << "MoveEngine Init ok";
return APP_ERR_OK;
}
@ -111,13 +111,8 @@ APP_ERROR MoveEngine::ReadModelInfo()
model_width = jvModelInfo["model_width"].asInt();
model_height = jvModelInfo["model_height"].asInt();
//clear_num = jvModelInfo["clear"].isArray() ? jvModelInfo["clear"].size() : 0;
//class_num = jvModelInfo["class"].isArray() ? jvModelInfo["class"].size() : 0;
input_size = GET_INPUT_SIZE(model_width , model_height);
output_size = GET_OUTPUT_SIZE(model_width , model_height, clear_num , class_num);
// det_size = clear_num + class_num + 5;
// score_threshold = modelConfig_.fScoreThreshold;
// nms_threshold = modelConfig_.fNMSTreshold;
return APP_ERR_OK;
}
@ -146,8 +141,8 @@ void MoveEngine::InitParam()
}
void MoveEngine::sendComeTrain() {
// std::string message = "{\"cometime\":" + this->strTrainDate_ + " " + this->strTrainName_ + "\",\"type\":\"1\"}";
// outputQueMap_[engineName_ + "_" + std::to_string(engineId_) + "_1"]->push(std::static_pointer_cast<void>(std::make_shared<std::string>(message)));
std::string message = "{\"cometime\":\"" + this->strTrainDate_ + " " + this->strTrainName_ + "\",\"type\":\"1\"}";
outputQueMap_[engineName_ + "_" + std::to_string(engineId_) + "_1"]->push(std::static_pointer_cast<void>(std::make_shared<std::string>(message)));
}
void MoveEngine::sendEndTrain() {
@ -197,7 +192,7 @@ void MoveEngine::SingleDeviceProcess(std::shared_ptr<ProcessData> pProcessData,
{
outputQueMap_[engineName_ + "_" + std::to_string(engineId_) + "_" + std::to_string(*iter)]->push(std::static_pointer_cast<void>(pMoveData));
}
//通知第一步开始识别
//通知第一步开始识别
outputQueMap_[engineName_ + "_" + std::to_string(engineId_) + "_5"]->push(std::static_pointer_cast<void>(pMoveData));
}
@ -274,6 +269,7 @@ APP_ERROR MoveEngine::Process()
memset(fReturnVal, 0x00, sizeof(fReturnVal));
yolov8model.YoloV8InferenceModelGetType(img, fReturnVal, STEP0_OUTPUT_ARRAY * sizeof(float));
// exit(0);
float fScore = 0.0f;
for(int n = 0; n < 4; n++){
@ -282,7 +278,7 @@ APP_ERROR MoveEngine::Process()
nType = n;
}
}
LogDebug <<"模型得分 车头:"<< fReturnVal[0]<<" 无车:"<< fReturnVal[1]<<" 车尾:"<< fReturnVal[2]<<" 有车:"<< fReturnVal[3];
// LogDebug <<"模型得分 车头:"<< fReturnVal[0]<<" 无车:"<< fReturnVal[1]<<" 车尾:"<< fReturnVal[2]<<" 有车:"<< fReturnVal[3];
// LogInfo<<"来车当前状态:"<< (nType == 0 ? "有车头" : (nType == 1 ? "无车"));
switch (nType) {
case 0:
@ -315,7 +311,7 @@ APP_ERROR MoveEngine::Process()
if (bGetTrainExist == true)
{
iHasTrainNum_ = iHasTrainNum_ > 20 ? iHasTrainNum_ : iHasTrainNum_ + 1;
if (iHasTrainNum_ > 0) LogDebug << "当前有车, 计数:" << iHasTrainNum_;
// if (iHasTrainNum_ > 0) LogDebug << "当前有车, 计数:" << iHasTrainNum_;
}
else
{
@ -334,33 +330,33 @@ APP_ERROR MoveEngine::Process()
{
queProcessData_.push(pProcessData);
LogDebug << "iStepInter_: " << iStepInter_ << " queSize:" << queProcessData_.size() << " continue";
LogDebug << "iStepInter_: " << iStepInter_ << " queSize:" << queProcessData_.size() << " continue";
continue;
}
// if (iStepInter_ != 1) this->sendComeTrain();
if (iStepInter_ != 1) this->sendComeTrain();
iStepInter_ = 1;
}
//无车停止识别
//无车停止识别
else
{
if (iStepInter_ == 1)
{
iStepInter_ = 2;
this->sendEndTrain();
}
while (!queProcessData_.empty())
{
LogDebug << "while iStepInter_: " << iStepInter_ << " queSize:" << queProcessData_.size();
LogDebug << "while iStepInter_: " << iStepInter_ << " queSize:" << queProcessData_.size();
queProcessData_.pop();
}
this->sendEndTrain();
}
//有车识别处理
if (iStepInter_ != 0)
{
while (!queProcessData_.empty())
{
LogDebug << "while2 iStepInter_: " << iStepInter_ << " queSize:" << queProcessData_.size();
LogDebug << "while2 iStepInter_: " << iStepInter_ << " queSize:" << queProcessData_.size();
std::shared_ptr<ProcessData> pProcessDataTemp = queProcessData_.front();
queProcessData_.pop();
pProcessDataTemp->iStatus = TRAINSTATUS_RUN;
@ -372,7 +368,7 @@ APP_ERROR MoveEngine::Process()
pProcessData->bIsEnd = ((iStepInter_ == 2) ? true : false); //动态检测无车,设置列车结束标识
SingleDeviceProcess(pProcessData, nType);
if (iStepInter_ == 2)
{
// this->sendEndTrain();

View File

@ -14,7 +14,7 @@ APP_ERROR TrainStepOneEngine::Init()
bUseEngine_ = MyUtils::getins()->ChkIsHaveTarget("NUM");
if (!bUseEngine_)
{
LogWarn << "engineId_:" << engineId_ << " not use engine";
LogInfo << "engineId_:" << engineId_ << " not use engine";
return APP_ERR_OK;
}
@ -98,7 +98,7 @@ APP_ERROR TrainStepOneEngine::InitModel()
int nRet = yolov5model.YoloV5ClearityInferenceInit(&modelinfo, strModelName, modelConfig_.strOmPath);
if (nRet != 0)
{
LogInfo << "YoloV5ClassifyInferenceInit nRet:" << nRet;
LogError << "YoloV5ClassifyInferenceInit nRet:" << nRet;
return APP_ERR_COMM_READ_FAIL;
}
return APP_ERR_OK;
@ -147,7 +147,7 @@ APP_ERROR TrainStepOneEngine::DeInit()
{
if (!bUseEngine_)
{
LogWarn << "engineId_:" << engineId_ << " not use engine";
LogInfo << "engineId_:" << engineId_ << " not use engine";
return APP_ERR_OK;
}
@ -170,7 +170,7 @@ void TrainStepOneEngine::PushData(const std::string &strPort, const std::shared_
int iRet = outputQueMap_[strPort]->push(std::static_pointer_cast<void>(pProcessData));
if (iRet != 0)
{
LogDebug << "sourceid:" << pProcessData->iDataSource << " frameid:" << pProcessData->iFrameId << " push fail iRet:" << iRet;
LogError << " frameid:" << pProcessData->iFrameId << " push fail iRet:" << iRet;
if (iRet == 2)
{
usleep(10000); // 10ms
@ -194,7 +194,7 @@ void TrainStepOneEngine::FilterInvalidInfo(std::vector<stDetection> &vecRet, std
std::vector<stDetection> vecSpaceInfo;
for (auto it = vecRet.begin(); it != vecRet.end();)
{
LogDebug << "sourceid:" << pProcessData->iDataSource << " frameId:" << pProcessData->iFrameId
LogDebug << "frameId:" << pProcessData->iFrameId
<< " bigclassid:" << it->class_id << " ltx:" << it->bbox[0] << " lty:" << it->bbox[1]
<< " rbx:" << it->bbox[2] << " rby:" << it->bbox[3];
// 根据配置文件中 设置的识别范围,过滤掉无效数据
@ -203,49 +203,75 @@ void TrainStepOneEngine::FilterInvalidInfo(std::vector<stDetection> &vecRet, std
it->bbox[2] <= dataSourceCfg.fIdentifyAreasRBX &&
it->bbox[3] <= dataSourceCfg.fIdentifyAreasRBY))
{
LogWarn << "sourceid:" << pProcessData->iDataSource << " frameId:" << pProcessData->iFrameId
<< " bigclassid:" << it->class_id << " invalid areas";
LogDebug << "frameId:" << pProcessData->iFrameId
<< " bigclassid:" << it->class_id << " 超出识别区域-识别区域:("
<< dataSourceCfg.fIdentifyAreasLTX << "," << dataSourceCfg.fIdentifyAreasLTY << "),("
<< dataSourceCfg.fIdentifyAreasRBX << "," << dataSourceCfg.fIdentifyAreasRBY << ")";
it = vecRet.erase(it);
continue;
}
// 去除车头车尾的间隔信息
// 如果设置了不识别车头,则去掉车头标记的大框
if (!MyYaml::GetIns()->GetBoolValue("gc_train_heard_detect") && it->class_id == TRAIN_HEAD)
{
LogDebug << "frameId:" << pProcessData->iFrameId << " 过滤掉车头编号";
it = vecRet.erase(it);
continue;
}
// 去除车头时的非车头编号信息
if(pProcessData->nMonitorState == MONITOR_MODEL_TRAIN_HEAD )
{
LogWarn<<" pProcessData->nMonitorState:" << pProcessData->nMonitorState;
if(it->class_id != TRAIN_HEAD)
if(it->class_id != TRAIN_HEAD)
{
LogError << "sourceid:" << pProcessData->iDataSource << " frameId:" << pProcessData->iFrameId
<< " bigclassid:" << it->class_id << " pProcessData->nMonitorState:" << pProcessData->nMonitorState
<< " invalid";
LogDebug << " 帧号:" << pProcessData->iFrameId
<< " 大类:" << it->class_id << " 识别于车头位置,无效!";
it = vecRet.erase(it);
continue;
}
}
if (pProcessData->nMonitorState == MONITOR_MODEL_TRAIN_TAIL )
// 去除车尾的车头编号信息
if (pProcessData->nMonitorState != MONITOR_MODEL_TRAIN_HEAD)
{
LogWarn<<" pProcessData->nMonitorState:" << pProcessData->nMonitorState;
/*if(
(it->class_id <= U_TRAIN_SPACE)
&& (it->class_id >= C_TRAIN_SPACE)
&& (it->class_id != W_TRAIN_NUM)
)*/
if (it->class_id == TRAIN_HEAD)
{
LogError << "sourceid:" << pProcessData->iDataSource << " frameId:" << pProcessData->iFrameId
<< " bigclassid:" << it->class_id << " pProcessData->nMonitorState:" << pProcessData->nMonitorState
<< " invalid";
LogDebug << " 帧号:" << pProcessData->iFrameId
<< " 大类:" << it->class_id << " 识别于非车头位置,无效!";
it = vecRet.erase(it);
continue;
}
}
// 去除车尾的间隔信息
if (pProcessData->nMonitorState == MONITOR_MODEL_TRAIN_TAIL
&& ((it->class_id >= 9 && it->class_id <= 17 && it->class_id != 15) || it->class_id == 18))
{
LogDebug << " frameId:" << pProcessData->iFrameId
<< " bigclassid:" << it->class_id
<<" 识别于车尾部分,无效!";
it = vecRet.erase(it);
continue;
}
// 过滤掉识别于模型反馈无车状态下的所有大框信息
if (pProcessData->nMonitorState == MONITOR_MODEL_NO_TRAIN)
{
LogDebug << " frameId:" << pProcessData->iFrameId
<< " bigclassid:" << it->class_id
<<" 识别于模型反馈的无车状态下,无效!";
it = vecRet.erase(it);
continue;
}
// 按大框高度剔除远股道识别的信息
int iClassHeight = it->bbox[3] - it->bbox[1];
if (dataSourceCfg.mapClassMinH.find(it->class_id) != dataSourceCfg.mapClassMinH.end() &&
iClassHeight < dataSourceCfg.mapClassMinH[it->class_id])
{
LogError << "sourceid:" << pProcessData->iDataSource << " frameId:" << pProcessData->iFrameId
LogDebug << " frameId:" << pProcessData->iFrameId
<< " bigclassid:" << it->class_id << " iClassHeight:" << iClassHeight
<< " minH:" << dataSourceCfg.mapClassMinH[it->class_id] << " invalid hegiht";
<< " minH:" << dataSourceCfg.mapClassMinH[it->class_id] << " 过滤疑似远股道识别";
it = vecRet.erase(it);
continue;
}
@ -256,7 +282,7 @@ void TrainStepOneEngine::FilterInvalidInfo(std::vector<stDetection> &vecRet, std
{
if (it->class_id != 1 && it->class_id != 6)
{
LogDebug << "sourceid:" << pProcessData->iDataSource << " frameId:" << pProcessData->iFrameId << " flat camera only deal 1 or 6";
LogDebug << " frameId:" << pProcessData->iFrameId << " flat camera only deal 1 or 6";
it = vecRet.erase(it);
continue;
}
@ -266,13 +292,13 @@ void TrainStepOneEngine::FilterInvalidInfo(std::vector<stDetection> &vecRet, std
if (((it->class_id >= 2 && it->class_id <= 6) || it->class_id == J_TRAIN_NUM || it->class_id == W_TRAIN_NUM) &&
(it->bbox[3] - it->bbox[1]) > (it->bbox[2] - it->bbox[0]))
{
LogWarn << "sourceid:" << pProcessData->iDataSource << " frameId:" << pProcessData->iFrameId
<< " bigclassid:" << it->class_id << " invalid data-- height > width ";
LogWarn << " frameId:" << pProcessData->iFrameId
<< " bigclassid:" << it->class_id << " 过滤 高度大于宽度的车号";
it = vecRet.erase(it);
continue;
}
if (((it->class_id >= 2 && it->class_id <= 6) || it->class_id == J_TRAIN_NUM || it->class_id == W_TRAIN_NUM) &&
(it->bbox[3] - it->bbox[1]) < MyYaml::GetIns()->GetIntValue("gc_num_frame_height"))
(it->bbox[3] - it->bbox[1]) < MyYaml::GetIns()->GetIntValue("gc_num_frame_height"))
{
LogWarn << "疑似误识别到远股道车号,帧号:" << pProcessData->iFrameId
<< "大框高度:" << (it->bbox[3] - it->bbox[1]);
@ -281,7 +307,7 @@ void TrainStepOneEngine::FilterInvalidInfo(std::vector<stDetection> &vecRet, std
}
if ((it->class_id == 1 || it->class_id == TRAIN_PRO)
&& (it->bbox[3] - it->bbox[1]) < MyYaml::GetIns()->GetIntValue("gc_pro_frame_height")) {
&& (it->bbox[3] - it->bbox[1]) < MyYaml::GetIns()->GetIntValue("gc_pro_frame_height")) {
LogWarn << "疑似误识别到远股道属性,帧号:" << pProcessData->iFrameId
<< "大框高度:" << (it->bbox[3] - it->bbox[1]);
it = vecRet.erase(it);
@ -301,8 +327,8 @@ void TrainStepOneEngine::FilterInvalidInfo(std::vector<stDetection> &vecRet, std
{
if (it->bbox[3] < (pProcessData->iHeight * iSpaceMinRBXPer_ / 100))
{
LogWarn << "sourceid:" << pProcessData->iDataSource << " frameId:" << pProcessData->iFrameId
<< " bigclassid:" << it->class_id << " spaceinfo invalid fRBY:" << it->bbox[3];
LogWarn << " frameId:" << pProcessData->iFrameId
<< " bigclassid:" << it->class_id << " 过滤间隔过于靠下的间隔信息 fRBY:" << it->bbox[3];
it = vecRet.erase(it);
continue;
}
@ -320,10 +346,10 @@ void TrainStepOneEngine::FilterInvalidInfo(std::vector<stDetection> &vecRet, std
int iCenterY = pProcessData->iHeight / 2;
if (iHeight0 < iCenterY && iHeight1 < iCenterY) //非平车
{
if (!((vecRet[0].class_id >= 9 && vecRet[0].class_id <= 17 && vecRet[0].class_id != 15) || vecRet[0].class_id == U_TRAIN_SPACE) &&
!((vecRet[1].class_id >= 9 && vecRet[10].class_id <= 17 && vecRet[1].class_id != 15) || vecRet[1].class_id == U_TRAIN_SPACE))
if (!((vecRet[0].class_id >= 9 && vecRet[0].class_id <= 17 && vecRet[0].class_id != 15) || vecRet[0].class_id == U_TRAIN_SPACE) &&
!((vecRet[1].class_id >= 9 && vecRet[1].class_id <= 17 && vecRet[1].class_id != 15) || vecRet[1].class_id == U_TRAIN_SPACE))
{
LogError << "sourceid:" << pProcessData->iDataSource << " frameId:" << pProcessData->iFrameId << " no space";
LogDebug << " frameId:" << pProcessData->iFrameId << " no space";
vecRet.clear();
}
}
@ -387,7 +413,7 @@ APP_ERROR TrainStepOneEngine::Process()
{
if (!bUseEngine_)
{
LogWarn << "engineId_:" << engineId_ << " not use engine";
LogInfo << "engineId_:" << engineId_ << " not use engine";
return APP_ERR_OK;
}
int iRet = APP_ERR_OK;
@ -408,7 +434,7 @@ APP_ERROR TrainStepOneEngine::Process()
pPostData->iModelType = MODELTYPE_NUM;
pPostData->nMonitorState = pProcessData->nMonitorState; //来车检测的四个分类
//获取图片
//获取图片
if (pProcessData->iStatus == TRAINSTATUS_RUN || pProcessData->bIsEnd)
{
if (pProcessData->pData != nullptr && pProcessData->iSize != 0)
@ -417,10 +443,7 @@ APP_ERROR TrainStepOneEngine::Process()
//进行推理
std::vector<stDetection> res;
//auto start = std::chrono::system_clock::now(); //计时开始
yolov5model.YoloV5ClearityInferenceModel(img, res);
//auto end = std::chrono::system_clock::now();
//LogInfo << "nopr1 inference time: " << std::chrono::duration_cast<std::chrono::milliseconds>(end - start).count() << "ms";
//过滤无效信息
FilterInvalidInfo(res, pProcessData);
@ -461,10 +484,10 @@ APP_ERROR TrainStepOneEngine::Process()
SetTargetType(postSubData);
pPostData->vecPostSubData.emplace_back(postSubData);
LogDebug << "sourceid:" << pProcessData->iDataSource << " frameId:" << pProcessData->iFrameId
<< " --iClassId:" << singledata.iClassId << " iLine:" << singledata.iLine << " confidence=" << singledata.fScore
<< " lx=" << singledata.fLTX << " ly=" << singledata.fLTY << " rx=" << singledata.fRBX << " ry=" << singledata.fRBY
<< " clear:" << singledata.fClear;
// LogDebug << "数据源:" << pProcessData->iDataSource << " 帧:" << pProcessData->iFrameId
// << " --iClassId:" << singledata.iClassId << " iLine:" << singledata.iLine << " confidence=" << singledata.fScore
// << " lx=" << singledata.fLTX << " ly=" << singledata.fLTY << " rx=" << singledata.fRBX << " ry=" << singledata.fRBY
// << " clear:" << singledata.fClear;
}
}
}

View File

@ -193,7 +193,7 @@ APP_ERROR TrainStepTwoEngine::Process()
auto start = std::chrono::system_clock::now(); // 计时开始
yolov5model.YoloV5ClearityInferenceModel(step2_image, res);
auto end = std::chrono::system_clock::now();
LogInfo << "nopr2 inference time: " << std::chrono::duration_cast<std::chrono::milliseconds>(end - start).count() << "ms";
// LogInfo << "nopr2 inference time: " << std::chrono::duration_cast<std::chrono::milliseconds>(end - start).count() << "ms";
PostSubData postSubDataNew;
postSubDataNew.iTargetType = postsubdata.iTargetType;
@ -221,9 +221,9 @@ APP_ERROR TrainStepTwoEngine::Process()
postSubDataNew.vecSingleData.emplace_back(singledata);
LogDebug << "sourceid:" << pProcessData->iDataSource << " step2 after frameId:" << pProcessData->iFrameId
<< " --iClassId:" << singledata.iClassId << " iLine:" << singledata.iLine << " confidence=" << singledata.fScore
<< " lx=" << singledata.fLTX << " ly=" << singledata.fLTY << " rx=" << singledata.fRBX << " ry=" << singledata.fRBY;
// LogDebug << "sourceid:" << pProcessData->iDataSource << " step2 after frameId:" << pProcessData->iFrameId
// << " --iClassId:" << singledata.iClassId << " iLine:" << singledata.iLine << " confidence=" << singledata.fScore
// << " lx=" << singledata.fLTX << " ly=" << singledata.fLTY << " rx=" << singledata.fRBX << " ry=" << singledata.fRBY;
}
pPostData->vecPostSubData.emplace_back(postSubDataNew);
}

View File

@ -464,12 +464,12 @@ void yolov5_preprocess_kernel_img(
s2d.value[0] = scale;
s2d.value[1] = 0;
s2d.value[2] = 0; //左上顶点贴图
// s2d.value[2] = -scale * src_width * 0.5 + dst_width * 0.5; //中心贴图
// s2d.value[2] = 0; //左上顶点贴图
s2d.value[2] = -scale * src_width * 0.5 + dst_width * 0.5; //中心贴图
s2d.value[3] = 0;
s2d.value[4] = scale;
s2d.value[5] = 0; //左上顶点贴图
// s2d.value[5] = -scale * src_height * 0.5 + dst_height * 0.5; //中心贴图
// s2d.value[5] = 0; //左上顶点贴图
s2d.value[5] = -scale * src_height * 0.5 + dst_height * 0.5; //中心贴图
cv::Mat m2x3_s2d(2, 3, CV_32F, s2d.value);
cv::Mat m2x3_d2s(2, 3, CV_32F, d2s.value);