diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..77b2158 --- /dev/null +++ b/.gitignore @@ -0,0 +1,7 @@ +/build/ +/app/ +*.tgz +*.tar +*.log +*.o +*.out diff --git a/CMakeLists.txt b/CMakeLists.txt index bc0fcf7..04908e9 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -5,7 +5,7 @@ message("NVIDIA NX PLATFORM") set(PROJECT_NAME train) project(${PROJECT_NAME} VERSION 1.0) - + add_definitions(-std=c++11) add_definitions(-DAPI_EXPORTS) @@ -20,6 +20,8 @@ find_package(OpenCV REQUIRED) # message(STATUS "${OpenCV_LIBS}") # message(STATUS "${OpenCV_INCLUDE_DIRS}") +find_package(CUDA REQUIRED) + # 设置编译工具 set(CMAKE_SYSTEM_NAME Linux) set(CMAKE_C_COMPILER "gcc") @@ -39,15 +41,19 @@ set(SYS_USR_INCLUDE_DIR "/usr/include") set(SYS_USR_LIB_DIR "/usr/lib") set(SYS_USR_LOCAL_INCLUDE_DIR "/usr/local/include") set(SYS_USR_LOCAL_LIB_DIR "/usr/local/lib") +# -- X86下使用 -- set(AARCH64_LINUX_INCLUDE_DIR "/usr/include/x86_64-linux-gnu") set(AARCH64_LINUX_LIB_DIR "/usr/lib/x86_64-linux-gnu") +# -- ARM下使用 -- +#set(AARCH64_LINUX_INCLUDE_DIR "/usr/include/aarch64-linux-gnu") +#set(AARCH64_LINUX_LIB_DIR "/usr/lib/aarch64-linux-gnu") #opencv3.2.0的库文件在/usr/lib/aarch64-linux-gnu下 头文件路径在/usr/include/opencv2 #opencv4.5.5的库文件在/usr/local/lib下 头文件路径在/usr/local/include/opencv4 #目前使用最新版opencv4.5.5 -set(OPENCV_INCLUDE_DIR ${SYS_USR_LOCAL_INCLUDE_DIR}/opencv4) -set(OPENCV_LIB_DIR ${SYS_USR_LOCAL_LIB_DIR}) +set(OPENCV_INCLUDE_DIR ${SYS_USR_LOCAL_INCLUDE_DIR}/opencv4) +set(OPENCV_LIB_DIR ${SYS_USR_LOCAL_LIB_DIR}) set(CUDA_DIR "/usr/local/cuda-11.7") set(CUDA_INCLUDE_DIR ${CUDA_DIR}/include) @@ -60,80 +66,80 @@ set(DRM_INCLUDE_DIR ${SYS_USR_INCLUDE_DIR}/libdrm) #DRM的头文件在/usr/incl set(TEGRA_LIB_DIR ${AARCH64_LINUX_LIB_DIR}/tegra) #tegra库文件路径/usr/lib/aarch64-linux-gnu/tegra -set(PCL_INCLUDE ${SYS_USR_LOCAL_INCLUDE_DIR}/pcl-1.7) #pcl头文件路径 +#set(PCL_INCLUDE ${SYS_USR_LOCAL_INCLUDE_DIR}/pcl-1.7) #pcl头文件路径 # nvidia ascend common include include_directories( - #ai_matrix include - ${PROJECT_SOURCE_DIR}/ai_matrix - ${PROJECT_SOURCE_DIR}/ai_matrix/framework - ${PROJECT_SOURCE_DIR}/ai_matrix/myftp - ${PROJECT_SOURCE_DIR}/ai_matrix/myhttp - ${PROJECT_SOURCE_DIR}/ai_matrix/myJson - ${PROJECT_SOURCE_DIR}/ai_matrix/myJson/json - ${PROJECT_SOURCE_DIR}/ai_matrix/mylog - ${PROJECT_SOURCE_DIR}/ai_matrix/pcl - ${PROJECT_SOURCE_DIR}/ai_matrix/myqueue - ${PROJECT_SOURCE_DIR}/ai_matrix/myshell - ${PROJECT_SOURCE_DIR}/ai_matrix/myutils + #ai_matrix include + ${PROJECT_SOURCE_DIR}/ai_matrix + ${PROJECT_SOURCE_DIR}/ai_matrix/framework + ${PROJECT_SOURCE_DIR}/ai_matrix/myftp + ${PROJECT_SOURCE_DIR}/ai_matrix/myhttp + ${PROJECT_SOURCE_DIR}/ai_matrix/myJson + ${PROJECT_SOURCE_DIR}/ai_matrix/myJson/json + ${PROJECT_SOURCE_DIR}/ai_matrix/mylog + ${PROJECT_SOURCE_DIR}/ai_matrix/pcl + ${PROJECT_SOURCE_DIR}/ai_matrix/myqueue + ${PROJECT_SOURCE_DIR}/ai_matrix/myshell + ${PROJECT_SOURCE_DIR}/ai_matrix/myutils - #nvidia ascend common cann include - ${PROJECT_SOURCE_DIR}/nvidia_ascend_base/Base/BlockingQueue - ${PROJECT_SOURCE_DIR}/nvidia_ascend_base/Base/CBase64 - ${PROJECT_SOURCE_DIR}/nvidia_ascend_base/Base/CommandParser - ${PROJECT_SOURCE_DIR}/nvidia_ascend_base/Base/CommonDataType - ${PROJECT_SOURCE_DIR}/nvidia_ascend_base/Base/ConfigParser - ${PROJECT_SOURCE_DIR}/nvidia_ascend_base/Base/ErrorCode - ${PROJECT_SOURCE_DIR}/nvidia_ascend_base/Base/FileManager - ${PROJECT_SOURCE_DIR}/nvidia_ascend_base/Base/Log - ${PROJECT_SOURCE_DIR}/nvidia_ascend_base/Base/ + #nvidia ascend common cann include + ${PROJECT_SOURCE_DIR}/nvidia_ascend_base/Base/BlockingQueue + ${PROJECT_SOURCE_DIR}/nvidia_ascend_base/Base/CBase64 + ${PROJECT_SOURCE_DIR}/nvidia_ascend_base/Base/CommandParser + ${PROJECT_SOURCE_DIR}/nvidia_ascend_base/Base/CommonDataType + ${PROJECT_SOURCE_DIR}/nvidia_ascend_base/Base/ConfigParser + ${PROJECT_SOURCE_DIR}/nvidia_ascend_base/Base/ErrorCode + ${PROJECT_SOURCE_DIR}/nvidia_ascend_base/Base/FileManager + ${PROJECT_SOURCE_DIR}/nvidia_ascend_base/Base/Log + ${PROJECT_SOURCE_DIR}/nvidia_ascend_base/Base/ - ${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common - #common engine include - ${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/ControlEngine - ${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/DataSourceEngine - ${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/DataUploadEngine - ${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/FilterEngine - ${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/MergerEngine - ${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/PixelFormatConvertEngine - ${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/SaveEngine - ${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/SelectBestEngine - ${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/TrainAnaEngine - ${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/TransEngine - ${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/DataDealEngine - ${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/SocketEngine + ${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common + #common engine include + ${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/ControlEngine + ${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/DataSourceEngine + ${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/DataUploadEngine + ${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/FilterEngine + ${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/MergerEngine + ${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/SaveEngine + ${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/SelectBestEngine + ${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/TrainAnaEngine + ${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/TransEngine + ${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/DataDealEngine + ${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/SocketEngine + ${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/DeleteExpiredFolderEngine - #common tools rtsp_server include - ${PROJECT_SOURCE_DIR}/nvidia_ascend_tools/common_tools/rtsp_server/3rdpart/md5 - ${PROJECT_SOURCE_DIR}/nvidia_ascend_tools/common_tools/rtsp_server/net/ - ${PROJECT_SOURCE_DIR}/nvidia_ascend_tools/common_tools/rtsp_server/xop/ + #common tools rtsp_server include + ${PROJECT_SOURCE_DIR}/nvidia_ascend_tools/common_tools/rtsp_server/3rdpart/md5 + ${PROJECT_SOURCE_DIR}/nvidia_ascend_tools/common_tools/rtsp_server/net/ + ${PROJECT_SOURCE_DIR}/nvidia_ascend_tools/common_tools/rtsp_server/xop/ ) include_directories( - #nvidia engine include - - ${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/nvidia_engine/ChkDateStepOneEngine - ${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/nvidia_engine/ChkDateStepTwoEngine - ${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/nvidia_engine/ContainerEngine - ${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/nvidia_engine/DecodeEngine - ${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/nvidia_engine/MoveEngine - ${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/nvidia_engine/MyYaml - ${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/nvidia_engine/TrainStepOneEngine - ${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/nvidia_engine/TrainStepTwoEngine + #nvidia engine include - #nvidia_tools yolov5 include - ${PROJECT_SOURCE_DIR}/nvidia_ascend_tools/nvidia_tools/yolov5/include + ${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/nvidia_engine/ChkDateStepOneEngine + ${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/nvidia_engine/ChkDateStepTwoEngine + ${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/nvidia_engine/ContainerEngine + ${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/nvidia_engine/DecodeEngine + ${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/nvidia_engine/MoveEngine + ${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/nvidia_engine/MyYaml + ${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/nvidia_engine/TrainStepOneEngine + ${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/nvidia_engine/TrainStepTwoEngine - #third party include - ${CUDA_INCLUDE_DIR} - ${TENSORRT_INCLUDE_DIR} - ${DRM_INCLUDE_DIR} - ${OpenCV_DIR} - ${AARCH64_LINUX_INCLUDE_DIR} - ${SYS_USR_LOCAL_INCLUDE_DIR} - ${PCL_INCLUDE} + #nvidia_tools yolov5 include + ${PROJECT_SOURCE_DIR}/nvidia_ascend_tools/nvidia_tools/yolov5/include + + #third party include + ${CUDA_INCLUDE_DIR} + ${TENSORRT_INCLUDE_DIR} + ${DRM_INCLUDE_DIR} + ${OpenCV_DIR} + ${AARCH64_LINUX_INCLUDE_DIR} + ${SYS_USR_LOCAL_INCLUDE_DIR} +# ${PCL_INCLUDE} ) @@ -144,7 +150,7 @@ link_directories(${SYS_USR_LOCAL_LIB_DIR} ${CUDA_LIB_DIR} ${TENSORRT_LIB_DIR} ${TEGRA_LIB_DIR} - ) +) #源文件 #公共源文件 @@ -171,7 +177,6 @@ file(GLOB_RECURSE COMMON_SRCS_LISTS #common engine src ${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common/*.cpp ${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/DataSourceEngine/*.cpp - ${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/PixelFormatConvertEngine/*.cpp ${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/ControlEngine/*.cpp ${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/DataUploadEngine/*.cpp @@ -182,26 +187,19 @@ file(GLOB_RECURSE COMMON_SRCS_LISTS ${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/TransEngine/*.cpp ${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/DataDealEngine/*.cpp ${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/SocketEngine/*.cpp + ${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/SocketEngine/*.cpp + ${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/DataSourceEngine/*.cpp + ${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/SaveEngine/*.cpp + ${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/DeleteExpiredFolderEngine/*.cpp #common tools rtsp_server src ${PROJECT_SOURCE_DIR}/nvidia_ascend_tools/common_tools/rtsp_server/net/*.cpp ${PROJECT_SOURCE_DIR}/nvidia_ascend_tools/common_tools/rtsp_server/xop/*.cpp - ) +) file(GLOB_RECURSE SRCS_LISTS #nvidia engine src #nvidia engine include - ${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/ControlEngine/*.cpp - ${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/DataSourceEngine/*.cpp - ${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/DataUploadEngine/*.cpp - ${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/FilterEngine/*.cpp - ${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/MergerEngine/*.cpp - ${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/PixelFormatConvertEngine/*.cpp - ${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/SaveEngine/*.cpp - ${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/SelectBestEngine/*.cpp - ${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/TrainAnaEngine/*.cpp - ${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/TransEngine/*.cpp - ${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/DataDealEngine/*.cpp ${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/nvidia_engine/ChkDateStepOneEngine/*.cpp ${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/nvidia_engine/ChkDateStepTwoEngine/*.cpp @@ -217,7 +215,7 @@ file(GLOB_RECURSE SRCS_LISTS #nvidia tools yolov5 src ${PROJECT_SOURCE_DIR}/nvidia_ascend_tools/nvidia_tools/yolov5/src/*.cpp ${PROJECT_SOURCE_DIR}/nvidia_ascend_tools/nvidia_tools/yolov5/src/*.cu - ) +) cuda_add_executable(${PROJECT_NAME} ${COMMON_SRCS_LISTS} ${SRCS_LISTS}) @@ -227,11 +225,11 @@ target_link_libraries(${PROJECT_NAME} cudart cuda) #CUDA target_link_libraries(${PROJECT_NAME} ${OpenCV_LIBS} #third party librarys - ${PCL_LIBRARY_DIRS} - pcl_common pcl_io_ply pcl_keypoints pcl_registration pcl_segmentation pcl_features pcl_io pcl_octree #pcl - pcl_sample_consensus pcl_surface pcl_filters pcl_kdtree pcl_recognition pcl_search pcl_tracking +# ${PCL_LIBRARY_DIRS} +# pcl_common pcl_io_ply pcl_keypoints pcl_registration pcl_segmentation pcl_features pcl_io pcl_octree #pcl +# pcl_sample_consensus pcl_surface pcl_filters pcl_kdtree pcl_recognition pcl_search pcl_tracking avformat avcodec avutil avfilter swresample swscale postproc #VideoCodecV2 yaml-cpp https_sn jsoncpp curl boost_system boost_filesystem ssh2 -Wl,-z,relro,-z,now,-z,noexecstack -pie -s - ) \ No newline at end of file +) diff --git a/ai_matrix/myutils/myutils.cpp b/ai_matrix/myutils/myutils.cpp index 166a5f2..bcc12be 100644 --- a/ai_matrix/myutils/myutils.cpp +++ b/ai_matrix/myutils/myutils.cpp @@ -129,37 +129,37 @@ namespace ai_matrix return std::string(tmp); } - std::string MyUtils::get_date() - { - time_t timep = time(NULL); - struct tm *p = localtime(&timep); + std::string MyUtils::get_date() + { + time_t timep = time(NULL); + struct tm *p = localtime(&timep); - struct timeval tv; - gettimeofday(&tv, NULL); + struct timeval tv; + gettimeofday(&tv, NULL); - int msec = tv.tv_usec / 1000; + int msec = tv.tv_usec / 1000; - char tmp[12] = { 0 }; - sprintf(tmp, "%04d-%02d-%02d", 1900 + p->tm_year, 1 + p->tm_mon, p->tm_mday); + char tmp[12] = { 0 }; + sprintf(tmp, "%04d-%02d-%02d", 1900 + p->tm_year, 1 + p->tm_mon, p->tm_mday); - return std::string(tmp); - } + return std::string(tmp); + } - std::string MyUtils::get_time() - { - time_t timep = time(NULL); - struct tm *p = localtime(&timep); + std::string MyUtils::get_time() + { + time_t timep = time(NULL); + struct tm *p = localtime(&timep); - struct timeval tv; - gettimeofday(&tv, NULL); + struct timeval tv; + gettimeofday(&tv, NULL); - int msec = tv.tv_usec / 1000; + int msec = tv.tv_usec / 1000; - char tmp[10] = { 0 }; - sprintf(tmp, "%02d-%02d-%02d", p->tm_hour, p->tm_min, p->tm_sec); + char tmp[10] = { 0 }; + sprintf(tmp, "%02d-%02d-%02d", p->tm_hour, p->tm_min, p->tm_sec); - return std::string(tmp); - } + return std::string(tmp); + } std::string MyUtils::get_timestamp_log() { @@ -310,12 +310,12 @@ namespace ai_matrix return buffer.str(); } - /** - * 拷贝文件 - * @param filePath 源文件位置 - * @param savePath 将要拷贝的新位置 - * @return - */ + /** + * 拷贝文件 + * @param filePath 源文件位置 + * @param savePath 将要拷贝的新位置 + * @return + */ bool MyUtils::copyFile(std::string filePath, std::string savePath) { FILE *fp, *sp; @@ -338,14 +338,14 @@ namespace ai_matrix } std::string& MyUtils::replace_all_distinct(std::string &str, const std::string &old_value, const std::string &new_value) - { - for (std::string::size_type pos(0); pos != std::string::npos; pos += new_value.length()) { - if ((pos = str.find(old_value, pos)) != std::string::npos) - str.replace(pos, old_value.length(), new_value); - else break; - } - return str; - } + { + for (std::string::size_type pos(0); pos != std::string::npos; pos += new_value.length()) { + if ((pos = str.find(old_value, pos)) != std::string::npos) + str.replace(pos, old_value.length(), new_value); + else break; + } + return str; + } /** * 获取北京当前日期 @@ -386,6 +386,23 @@ namespace ai_matrix return std::string(szTmp); } + //时间戳转化为时间 毫秒级 + std::string MyUtils::Stamp2Time(long long timestamp, bool has_msec) + { + int ms = timestamp % 1000;//取毫秒 + time_t tick = (time_t)(timestamp/1000);//转换时间 + struct tm tm; + char s[40]; + tm = *localtime(&tick); + strftime(s, sizeof(s), "%Y-%m-%d %H:%M:%S", &tm); + std::string str(s); + if (has_msec) + { + str = str+ "." + std::to_string(ms); + } + return str; + } + /** * 获取当前时间距1970年的毫秒数 * inParam : N/A @@ -507,7 +524,7 @@ namespace ai_matrix return true; } - #ifdef ASCEND +#ifdef ASCEND /** * 拷贝Device数据到Host * inParam : void *pDeviceBuffer device内存地址 @@ -589,8 +606,8 @@ namespace ai_matrix } return true; } - #endif - +#endif + /** * 获取指定毫秒数的对应的北京日期时间 * inParam : uint64_t i64MilliSeconds @@ -653,7 +670,7 @@ namespace ai_matrix } if (!ifs.is_open()) { - LogWarn << "txt:" << strFilePath << " open fail"; +// LogWarn << "txt:" << strFilePath << " open fail"; return false; } @@ -735,7 +752,7 @@ namespace ai_matrix } //清空之前的结果 - vecResult.clear(); + vecResult.clear(); // 每个类别中,获取得分最高的框 for (auto iter = mapResult.begin(); iter != mapResult.end(); iter++) { diff --git a/ai_matrix/myutils/myutils.h b/ai_matrix/myutils/myutils.h index b91e48e..d145748 100644 --- a/ai_matrix/myutils/myutils.h +++ b/ai_matrix/myutils/myutils.h @@ -51,10 +51,13 @@ namespace ai_matrix //获取时间戳 std::string get_timestamp_file(); std::string get_timestamp_log(); - //获取日期 - std::string get_date(); - //获取时间 - std::string get_time(); + //获取日期 + std::string get_date(); + //获取时间 + std::string get_time(); + + //时间戳转化为时间 毫秒级 + std::string Stamp2Time(long long timestamp, bool has_msec = false); //创建文件夹 std::string create_dir_name(std::string root, std::string name); @@ -73,16 +76,16 @@ namespace ai_matrix //bool 转 string std::string getStringFromBool(bool b); - /** - * 拷贝文件 - * @param filePath 源文件位置 - * @param savePath 将要拷贝的新位置 - * @return - */ - bool copyFile(std::string filePath, std::string savePath); + /** + * 拷贝文件 + * @param filePath 源文件位置 + * @param savePath 将要拷贝的新位置 + * @return + */ + bool copyFile(std::string filePath, std::string savePath); - //替换string中所有指定字符串 - std::string& replace_all_distinct(std::string &str, const std::string &old_value, const std::string &new_value); + //替换string中所有指定字符串 + std::string& replace_all_distinct(std::string &str, const std::string &old_value, const std::string &new_value); //获取北京当前日期 std::string GetDate(); @@ -99,13 +102,13 @@ namespace ai_matrix //创建文件夹路径 bool CreateDirPath(std::string strDirPath); - #ifdef ASCEND +#ifdef ASCEND //拷贝Device数据到Host bool MemcpyDeviceToHost(std::shared_ptr *pHostData, const void *pDeviceBuffer, uint32_t iBufferSize); //拷贝Host数据到Device bool MemcpyHostToDevice(std::shared_ptr *pDeviceData, const void *pHostBuffer, uint32_t iBufferSize, bool bDvppFlag = true); - #endif +#endif //获取指定毫秒数的对应的日期时间 std::string GetDateTimeByMilliSeconds(uint64_t i64MilliSeconds, bool bFormatFlag = false); diff --git a/app/train b/app/train new file mode 100644 index 0000000..9ee789a Binary files /dev/null and b/app/train differ diff --git a/config.yaml b/config.yaml index 8a1f8eb..b236405 100644 --- a/config.yaml +++ b/config.yaml @@ -113,6 +113,8 @@ model: nms_threshold: 0.3 gc_http_open: 1 +username: "guest_01" +password: "d55b0f642e817eea24725d2f2a31dd08" # 神东 gc_http_url: "http://192.168.2.211:20004/api/train-carriage/identification/video-save" gc_gettoken_url: "http://192.168.2.211:20004/api/blade-auth/oauth/token" gc_image_srv: "http://192.168.2.211:9010/" @@ -179,3 +181,6 @@ gc_c_space_frame_width: 500 # 是否识别车头 gc_train_heard_detect: true + +#过期文件夹天数 +gc_days_for_result_expire_folder: 3 diff --git a/matrix.yaml b/matrix.yaml index 6bbc336..0f9b81c 100644 --- a/matrix.yaml +++ b/matrix.yaml @@ -55,6 +55,7 @@ engines: DeviceStatusUpSerEngine: 0 #ResultToMySQLSrvEngine: 0 #DataToMinioSrvEngine: 0 + DeleteExpiredFolderEngine: 0 #engine连接 connects: diff --git a/nvidia_ascend_engine/common_engine/DataDealEngine/DataDealEngine.cpp b/nvidia_ascend_engine/common_engine/DataDealEngine/DataDealEngine.cpp index 5a75139..4ff3192 100644 --- a/nvidia_ascend_engine/common_engine/DataDealEngine/DataDealEngine.cpp +++ b/nvidia_ascend_engine/common_engine/DataDealEngine/DataDealEngine.cpp @@ -118,7 +118,7 @@ bool DataDealEngine::ReadFileInfo(Json::Value &jvFrameInfo, RawData &rawData, st // LogError << "Failed to read image:" << strImgName; // return false; // } - + return true; } @@ -191,13 +191,13 @@ void DataDealEngine::MakeProcessData() } LogInfo << "sourceid:" << iSourceId << " MakeProcessData origtime:" << moveData_.strTrainName << " iOrigFrameId:" << iOrigFrameId - << " time:" << strTrainName_ << " iFrameId:" << iFrameId << " bIsEndFlag:" << bIsEndFlag; + << " time:" << strTrainName_ << " iFrameId:" << iFrameId << " bIsEndFlag:" << bIsEndFlag; std::string strImgName = strDataDir_ + szCameraNo + std::to_string(iOrigFrameId); strImgName += (iter->second.iRotate != 0) ? "_rotate.jpg" : ".jpg"; std::string strFileName = strDataDir_ + szCameraNo + std::to_string(iOrigFrameId) + ".txt"; - //摄像头读取失败后重试2000次。 + //摄像头读取失败后重试30次。 Json::Value jvFrameInfo; RawData rawData; bool bRet = false; @@ -228,7 +228,7 @@ void DataDealEngine::MakeProcessData() pProcessData->bIsEnd = bIsEndFlag; pProcessData->iDataNO = iDataNO_; pProcessData->nMonitorState = moveData_.nMonitorState; - + if (bRet) { i64TimeStampTemp = jvFrameInfo["timeStamp"].asUInt64(); @@ -241,7 +241,7 @@ void DataDealEngine::MakeProcessData() cv::Mat cvframe = cv::imread(pProcessData->strPicFilePath); int iBufferSize = pProcessData->iWidth * pProcessData->iHeight * 3; void* pBGRBufferobj = nullptr; - pBGRBufferobj = new uint8_t[iBufferSize]; + pBGRBufferobj = new uint8_t[iBufferSize]; memcpy(pBGRBufferobj, cvframe.data, iBufferSize); pProcessData->pData.reset(pBGRBufferobj, [](void* data){if(data) {delete[] data; data = nullptr;}}); pProcessData->iSize = iBufferSize; @@ -255,15 +255,15 @@ void DataDealEngine::MakeProcessData() //iRet = outputQueMap_[vecPushPorts[iPort]]->push(std::static_pointer_cast(pProcessData)); PushData(vecPushPorts[iPort], pProcessData); continue; - } + } std::shared_ptr pNewProcessData = std::make_shared(); *pNewProcessData = *pProcessData; //iRet = outputQueMap_[vecPushPorts[iPort]]->push(std::static_pointer_cast(pNewProcessData)); PushData(vecPushPorts[iPort], pNewProcessData); } - + } - + iOrigDataNO_++; iDataNO_++; //每组处理数据需间隔一定时间 @@ -291,14 +291,25 @@ APP_ERROR DataDealEngine::Process() //获取主摄像头检测的状态 std::shared_ptr pVoidData0 = nullptr; iRet = inputQueMap_[strPort0_]->pop(pVoidData0); + if (nullptr != pVoidData0) { std::shared_ptr pMoveData = std::static_pointer_cast(pVoidData0); + + // queuwMoveData_.push(*pMoveData); moveData_ = *pMoveData; LogDebug << "traindate:" << moveData_.strTrainDate << " trainname:" << moveData_.strTrainName << " MoveData frameid:" << moveData_.iFrameId << " IsEnd:" << moveData_.bIsEnd; } + // LogDebug << "【帧号】" << (iDataNO_ * dataSourceConfig_.iSkipInterval); + // if (queuwMoveData_.size() > 0 && (iDataNO_ * dataSourceConfig_.iSkipInterval) >= queuwMoveData_.front().iFrameId) + // { + // moveData_ = queuwMoveData_.front(); + // queuwMoveData_.pop(); + // LogDebug << "!!!--- moveDate 更新"; + // } + if (!moveData_.bHasTrain) { usleep(1000); //1ms @@ -308,7 +319,7 @@ APP_ERROR DataDealEngine::Process() //第一个数据,休眠1s,等待图片存入本地 if (iOrigDataNO_ == 1) { - usleep(1000000); //1s + usleep(1000 * 1000); //1s } if (strDataDir_.empty()) diff --git a/nvidia_ascend_engine/common_engine/DataDealEngine/DataDealTwoEngine.cpp b/nvidia_ascend_engine/common_engine/DataDealEngine/DataDealTwoEngine.cpp index d94a1a7..0623e2b 100644 --- a/nvidia_ascend_engine/common_engine/DataDealEngine/DataDealTwoEngine.cpp +++ b/nvidia_ascend_engine/common_engine/DataDealEngine/DataDealTwoEngine.cpp @@ -239,8 +239,8 @@ void DataDealTwoEngine::GetMainSplitInfo(Json::Value &jvMainSplit, std::shared_p { iValidType = pProcessData->iDirection == DIRECTION_LEFT ? VALID_LEFT : VALID_RIGHT; } - LogDebug << "sourceid:" << pProcessData->iDataSource << " frameid:" << pProcessData->iFrameId - << " trainIndex:" << pProcessData->iTrainIndex << " iSpaceType_:" << iSpaceType_ + LogDebug << " frameid:" << pProcessData->iFrameId + << " 车节:" << pProcessData->iTrainIndex << " iSpaceType_:" << iSpaceType_ << " iSpaceX:" << iSpaceX << " iLastSpaceX_:" << iLastSpaceX_ << " iLastSpaceFrameid_:" << iLastSpaceFrameid_ << " bIntervalFlag:" << bIntervalFlag; iLastSpaceX_ = iSpaceX; @@ -352,11 +352,11 @@ void DataDealTwoEngine::GetValidTypeAndSplit(Json::Value &jvOneSplit, Json::Valu GetSubSplitInfoByMain(jvOneSplit, pProcessData, jvFrameInfo); } - LogDebug << "sourceid:" << pProcessData->iDataSource << " frameid:" << pProcessData->iFrameId - << " trainIndex:" << pProcessData->iTrainIndex - << " validType:" << jvOneSplit["validType"].asInt() << " splitX:" << jvOneSplit["splitX"].asInt() - << " needNum:" << jvOneSplit["needNum"].asBool() << " needPro:" << jvOneSplit["needPro"].asBool() - << " needChkDate:" << jvOneSplit["needChkDate"].asBool() << " needContainer:" << jvOneSplit["needContainer"].asBool(); +// LogDebug << "sourceid:" << pProcessData->iDataSource << " frameid:" << pProcessData->iFrameId +// << " trainIndex:" << pProcessData->iTrainIndex +// << " validType:" << jvOneSplit["validType"].asInt() << " splitX:" << jvOneSplit["splitX"].asInt() +// << " needNum:" << jvOneSplit["needNum"].asBool() << " needPro:" << jvOneSplit["needPro"].asBool() +// << " needChkDate:" << jvOneSplit["needChkDate"].asBool() << " needContainer:" << jvOneSplit["needContainer"].asBool(); } /** @@ -456,8 +456,8 @@ void DataDealTwoEngine::MakeProcessData(std::shared_ptr pTrainRange) sprintf(szCameraNo, "%03d/", iSourceId + 1); bool bIsEndFlag = (pTrainRange->iEndFrameId == iFrameId); - LogInfo << "sourceid:" << iSourceId << " StepTwo MakeProcessData trainIndex:" << pTrainRange->iTrainIndex - << " iFrameId:" << iFrameId << " bIsEndFlag:" << bIsEndFlag; +// LogInfo << "sourceid:" << iSourceId << " StepTwo MakeProcessData trainIndex:" << pTrainRange->iTrainIndex +// << " iFrameId:" << iFrameId << " bIsEndFlag:" << bIsEndFlag; std::string strImgName = strDataDir + szCameraNo + std::to_string(iFrameId); strImgName += (iter->second.iRotate != 0) ? "_rotate.jpg" : ".jpg"; std::string strFileName = strDataDir + szCameraNo + std::to_string(iFrameId) + ".txt"; diff --git a/nvidia_ascend_engine/common_engine/DataSourceEngine/CameraEngine.cpp b/nvidia_ascend_engine/common_engine/DataSourceEngine/CameraEngine.cpp index fba52d4..5794b4f 100644 --- a/nvidia_ascend_engine/common_engine/DataSourceEngine/CameraEngine.cpp +++ b/nvidia_ascend_engine/common_engine/DataSourceEngine/CameraEngine.cpp @@ -7,7 +7,7 @@ namespace { const int LOW_THRESHOLD = 128; const int MAX_THRESHOLD = 4096; - const uint16_t DELAY_TIME = 40000; + const uint16_t DELAY_TIME = 10000; } CameraEngine::CameraEngine() {} diff --git a/nvidia_ascend_engine/common_engine/DataUploadEngine/ResultToHttpSrvEngine.cpp b/nvidia_ascend_engine/common_engine/DataUploadEngine/ResultToHttpSrvEngine.cpp index de25ab0..f7fe5e0 100644 --- a/nvidia_ascend_engine/common_engine/DataUploadEngine/ResultToHttpSrvEngine.cpp +++ b/nvidia_ascend_engine/common_engine/DataUploadEngine/ResultToHttpSrvEngine.cpp @@ -8,6 +8,8 @@ ResultToHttpSrvEngine::~ResultToHttpSrvEngine() {} APP_ERROR ResultToHttpSrvEngine::Init() { strPort0_ = engineName_ + "_" + std::to_string(engineId_) + "_0"; + strUsername_ = MyYaml::GetIns()->GetStringValue("username"); + strPassword_ = MyYaml::GetIns()->GetStringValue("password"); strURL_ = MyYaml::GetIns()->GetStringValue("gc_http_url"); strGetTokenURL_ = MyYaml::GetIns()->GetStringValue("gc_gettoken_url"); strImageSrv_ = MyYaml::GetIns()->GetPathValue("gc_image_srv"); @@ -76,11 +78,11 @@ bool ResultToHttpSrvEngine::GetToken(std::string &strBladeAuth) curl_mime *pMultipart = curl_mime_init(pCurl_); curl_mimepart *pPart = curl_mime_addpart(pMultipart); curl_mime_name(pPart, "username"); - curl_mime_data(pPart, "guest_01", CURL_ZERO_TERMINATED); + curl_mime_data(pPart, strUsername_.c_str(), CURL_ZERO_TERMINATED); pPart = curl_mime_addpart(pMultipart); curl_mime_name(pPart, "password"); - curl_mime_data(pPart, "d55b0f642e817eea24725d2f2a31dd08", CURL_ZERO_TERMINATED); - pPart = curl_mime_addpart(pMultipart); + curl_mime_data(pPart, strPassword_.c_str(), CURL_ZERO_TERMINATED); + pPart = curl_mime_addpart(pMultipart); curl_mime_name(pPart, "tenantId"); curl_mime_data(pPart, "000000", CURL_ZERO_TERMINATED); pPart = curl_mime_addpart(pMultipart); @@ -276,8 +278,8 @@ void ResultToHttpSrvEngine::DealHttpFailInfo() if (!ResultToHttpSrv(jvRequest)) { LogError << "re http post err:" << strLine; - SaveHttpFailInfo(jvRequest, strFailSaveBakPath_); - bAllSucc = false; + //SaveHttpFailInfo(jvRequest, strFailSaveBakPath_); +// bAllSucc = false; continue; } } @@ -426,10 +428,11 @@ APP_ERROR ResultToHttpSrvEngine::Process() jvRequest["isTheLast"] = pTrain->bIsEnd ? 1 : 0; // 是否最后一节: 0:否,1:是 jvRequest["startFrame"] = pTrain->iStartFrameId; //车厢开始帧 jvRequest["endFrame"] = pTrain->iEndFrameId; //车厢结束帧 - jvRequest["skipFrame"] = dataSourceConfig.iSkipInterval; //跳帧 + jvRequest["skipFrame"] = dataSourceConfig.iSkipInterval; + jvRequest["collectTime"] = MyUtils::getins()->Stamp2Time(pTrain->i64EndTimeStamp, true);//车厢切分的时间 //跳帧 if (!ResultToHttpSrv(jvRequest)) { - SaveHttpFailInfo(jvRequest, strFailSavePath_); +// SaveHttpFailInfo(jvRequest, strFailSavePath_); } //列车结束后再次处理失败的信息 diff --git a/nvidia_ascend_engine/common_engine/DataUploadEngine/ResultToHttpSrvEngine.h b/nvidia_ascend_engine/common_engine/DataUploadEngine/ResultToHttpSrvEngine.h index 71251fb..512c143 100644 --- a/nvidia_ascend_engine/common_engine/DataUploadEngine/ResultToHttpSrvEngine.h +++ b/nvidia_ascend_engine/common_engine/DataUploadEngine/ResultToHttpSrvEngine.h @@ -40,6 +40,8 @@ private: bool SaveHttpFailInfo(Json::Value &jvRequest, std::string &strFilePath); std::string strPort0_; + std::string strUsername_; + std::string strPassword_; std::string strURL_; std::string strGetTokenURL_; std::string strImageSrv_; diff --git a/nvidia_ascend_engine/common_engine/DeleteExpiredFolderEngine/DeleteExpiredFolderEngine.cpp b/nvidia_ascend_engine/common_engine/DeleteExpiredFolderEngine/DeleteExpiredFolderEngine.cpp new file mode 100644 index 0000000..c87ea3c --- /dev/null +++ b/nvidia_ascend_engine/common_engine/DeleteExpiredFolderEngine/DeleteExpiredFolderEngine.cpp @@ -0,0 +1,187 @@ +#include "DeleteExpiredFolderEngine.h" + +using namespace ai_matrix; + +DeleteExpiredFolderEngine::DeleteExpiredFolderEngine() {} + +DeleteExpiredFolderEngine::~DeleteExpiredFolderEngine() {} + +APP_ERROR DeleteExpiredFolderEngine::Init() +{ + iDaysNumber_ = MyYaml::GetIns()->GetIntValue("gc_days_for_result_expire_folder"); + strResultPath_ = MyYaml::GetIns()->GetPathValue("gc_result_path"); + + LogInfo << "DeleteExpiredFolderEngine Init ok"; + return APP_ERR_OK; +} + +APP_ERROR DeleteExpiredFolderEngine::DeInit() +{ + LogInfo << "DeleteExpiredFolderEngine DeInit ok"; + return APP_ERR_OK; +} + +APP_ERROR DeleteExpiredFolderEngine::Process() +{ + int iRet = APP_ERR_OK; + while (!isStop_) + { + std::string strTrainDate_temp = MyUtils::getins()->GetDate(); + + DeletePreviousFolder(strResultPath_, strTrainDate_temp, iDaysNumber_); + + usleep(1000*1000*3600*24); //每二十四小时执行一次 + } + + return APP_ERR_OK; +} + +void DeleteExpiredFolderEngine::DeletePreviousFolder(std::string path, const std::string &date, int n_days) +{ + // 1 computer date + std::string previous_date = getDateBeforeNDays(date, n_days); + if (!previous_date.empty()) + std::cout << "Date before " << n_days << " days from " << date << " is: " << previous_date << std::endl; + + + // 2 + std::vector subfolders; + GetSubfolderNames(path, subfolders); + // for (const auto &it : subfolders) + // std::cout << it.year << "." << it.month << "." << it.day << std::endl; + + // 3 delete + if (path.back() != '/') + path += "/"; + Date reference_date = StrToDate(previous_date); // 给定的参考日期 + DeleteEarlierDatesFolder(path, subfolders, reference_date); +} + +// 获取某月有多少天 +int DeleteExpiredFolderEngine::DaysInMonth(int year, int month) +{ + int max_days[13] = {0, 31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31}; + if (month == 2 && ((year % 4 == 0 && year % 100 != 0) || year % 400 == 0)) + { + max_days[2] = 29; // 闰年2月有29天 + } + return max_days[month]; +} + +// 解析字符串为日期结构体 +Date DeleteExpiredFolderEngine::StrToDate(const std::string &date_str) +{ + std::istringstream iss(date_str); + int year, month, day; + char dash; + + if (!(iss >> year >> dash && dash == '-' && + iss >> month >> dash && dash == '-' && + iss >> day)) + { + LogError << ("Invalid date format") << ":" << date_str; + } + return {year, month, day}; +} + +// 减去指定天数 +void DeleteExpiredFolderEngine::SubtractDays(Date &date, int n_days) +{ + while (n_days > 0) + { + date.day--; + n_days--; + if (date.day == 0) + { + if (--date.month == 0) + { + --date.year; + date.month = 12; + } + int max_days = DaysInMonth(date.year, date.month); + date.day = max_days; + } + } +} + +// 格式化日期结构体为字符串 +std::string DeleteExpiredFolderEngine::DateToStr(const Date &date) +{ + std::ostringstream oss; + oss << date.year << "-" << std::setfill('0') << std::setw(2) << date.month << "-" << std::setw(2) << date.day; + return oss.str(); +} + +// 主要功能函数,接收一个日期字符串和一个整数n,返回n天前的日期字符串 +std::string DeleteExpiredFolderEngine::getDateBeforeNDays(const std::string &input_date, int n_days) +{ + try + { + Date date = StrToDate(input_date); + SubtractDays(date, n_days); + return DateToStr(date); + } + catch (const std::exception &e) + { + LogError << "Error: " << e.what(); + return ""; + } +} + +void DeleteExpiredFolderEngine::GetSubfolderNames(std::string &directory, std::vector &folder_names) +{ + if (directory.back() != '/') + directory += "/"; + DIR *dir; + struct dirent *ent; + if ((dir = opendir(directory.c_str())) != nullptr) + { + while ((ent = readdir(dir)) != nullptr) + { + // 排除"."和".." + if (ent->d_type == DT_DIR && ent->d_name[0] != '.' && ent->d_name == "best") + { + folder_names.push_back(StrToDate(ent->d_name)); + } + } + closedir(dir); + } + else + { + LogError << "Unable to open directory: " << directory; + } +} + +void DeleteExpiredFolderEngine::DeleteFolder(const std::string directory) +{ + std::string command = "rm -rf " + directory; + int result = system(command.c_str()); + + if (result != 0) + std::cout << "Failed to remove directory recursively: " << directory << std::endl; + else + std::cout << "delete folder successfully : " << directory << std::endl; +} + +// 删除向量中小于指定日期的所有元素 +void DeleteExpiredFolderEngine::DeleteEarlierDatesFolder(std::string &path, std::vector &subfolders, const Date &reference_date) +{ + if (path.back() != '/') + path += "/"; + for (const Date &cur : subfolders) + { + // bool flag = false; + if (cur.year < reference_date.year) + { + DeleteFolder(path + DateToStr(cur)); + } + else if (cur.year == reference_date.year && cur.month < reference_date.month) + { + DeleteFolder(path + DateToStr(cur)); + } + else if (cur.year == reference_date.year && cur.month == reference_date.month && cur.day < reference_date.day) + { + DeleteFolder(path + DateToStr(cur)); + } + } +} \ No newline at end of file diff --git a/nvidia_ascend_engine/common_engine/DeleteExpiredFolderEngine/DeleteExpiredFolderEngine.h b/nvidia_ascend_engine/common_engine/DeleteExpiredFolderEngine/DeleteExpiredFolderEngine.h new file mode 100644 index 0000000..8c808e0 --- /dev/null +++ b/nvidia_ascend_engine/common_engine/DeleteExpiredFolderEngine/DeleteExpiredFolderEngine.h @@ -0,0 +1,57 @@ +/** + * 过期数据删除引擎 + **/ + +#ifndef DELETEEXPIREDFOLDERENGINE_H +#define DELETEEXPIREDFOLDERENGINE_H + +#include "AppCommon.h" +#include "EngineBase.h" +#include "EngineFactory.h" +#include "MyYaml.h" +#include "myutils.h" + +// 定义日期结构体 +struct Date +{ + int year; + int month; + int day; +}; + +class DeleteExpiredFolderEngine : public ai_matrix::EngineBase +{ +public: + DeleteExpiredFolderEngine(); + ~DeleteExpiredFolderEngine(); + + APP_ERROR Init() override; + APP_ERROR DeInit() override; + APP_ERROR Process() override; + +private: + // 获取某月有多少天 + int DaysInMonth(int year, int month); + // 解析字符串为日期结构体 + Date StrToDate(const std::string &date_str); + // 减去指定天数 + void SubtractDays(Date &date, int n_days); + // 格式化日期结构体为字符串 + std::string DateToStr(const Date &date); + // 接收一个日期字符串和一个整数n,返回n天前的日期字符串 + std::string getDateBeforeNDays(const std::string &input_date, int n_days); + void GetSubfolderNames(std::string &directory, std::vector &folder_names); + void DeleteFolder(const std::string directory); + // 删除向量中小于指定日期的所有元素 + void DeleteEarlierDatesFolder(std::string &path, std::vector &subfolders, const Date &reference_date); + void DeletePreviousFolder(std::string path, const std::string &date, int n_days); + +private: + std::string strResultPath_; + + int iDaysNumber_; +}; + +ENGINE_REGIST(DeleteExpiredFolderEngine) + +#endif diff --git a/nvidia_ascend_engine/common_engine/FilterEngine/FilterTrainStepOneEngine.cpp b/nvidia_ascend_engine/common_engine/FilterEngine/FilterTrainStepOneEngine.cpp index e44dbdc..b2da381 100644 --- a/nvidia_ascend_engine/common_engine/FilterEngine/FilterTrainStepOneEngine.cpp +++ b/nvidia_ascend_engine/common_engine/FilterEngine/FilterTrainStepOneEngine.cpp @@ -28,6 +28,9 @@ APP_ERROR FilterTrainStepOneEngine::Init() strResultPath_ = MyYaml::GetIns()->GetPathValue("gc_result_path"); iChkStopPX_ = MyYaml::GetIns()->GetIntValue("gc_chkstop_px"); iChkStopCount_ = MyYaml::GetIns()->GetIntValue("gc_chkstop_count"); + iPartitionFrameNum_ = MyYaml::GetIns()->GetIntValue("partition_frame_span"); + iPlitFrameSpanPX_ = MyYaml::GetIns()->GetIntValue("gc_split_frame_span_px"); + iPushDirection_ = MyYaml::GetIns()->GetIntValue("gc_push_direction"); //获取主摄像头信息 mainCfg_ = MyYaml::GetIns()->GetDataSourceConfigById(0); @@ -35,6 +38,8 @@ APP_ERROR FilterTrainStepOneEngine::Init() std::map mapUseDataSouceCfg = MyYaml::GetIns()->GetUseDataSourceConfig(); for (auto iter = mapUseDataSouceCfg.begin(); iter != mapUseDataSouceCfg.end(); iter++) { + this->rightFirst_ = iter->second.iRightFirst; + this->leftFirst_ = iter->second.iLeftFirst; if (iter->second.strTarget.find("NUM") != std::string::npos) { LogDebug << "DataSource:" << iter->first << " deal NUM"; @@ -55,8 +60,8 @@ APP_ERROR FilterTrainStepOneEngine::Init() mapTargetStr_.insert(std::make_pair(NUM, "NUM")); mapTargetStr_.insert(std::make_pair(PRO, "PRO")); mapTargetStr_.insert(std::make_pair(HEAD, "HEAD")); - mapTargetStr_.insert(std::make_pair(SPACE, "SPACE")); - mapTargetStr_.insert(std::make_pair(TRAINSPACE, "SPACE")); + mapTargetStr_.insert(std::make_pair(SPACE, "SPACE"));//SPACE + mapTargetStr_.insert(std::make_pair(TRAINSPACE, "SPACE"));//SPACE InitParam(); LogInfo << "FilterTrainStepOneEngine Init ok"; @@ -113,7 +118,7 @@ void FilterTrainStepOneEngine::InitParam() } iDirection_ = DIRECTION_UNKNOWN; iNotChgCount_ = 0; - + while (!stackBackInfo_.empty()) { stackBackInfo_.pop(); @@ -166,11 +171,12 @@ void FilterTrainStepOneEngine::AddBackInfo(std::shared_ptr pProcess } strAllClassType += mapTargetStr_[pPostData->vecPostSubData[i].iTargetType]; } - LogDebug << "frameId:" << pProcessData->iFrameId << " addbackinfo strAllClassType:" << strAllClassType; + if (strAllClassType.empty()) { return; } + LogDebug << "frameId:" << pProcessData->iFrameId << " addbackinfo strAllClassType:" << strAllClassType; TrainBackInfo trainBackInfo; trainBackInfo.processData = pProcessData; @@ -184,8 +190,35 @@ void FilterTrainStepOneEngine::AddBackInfo(std::shared_ptr pProcess else { TrainBackInfo trainBackInfoTop = stackBackInfo_.top(); + // 2024年3月27日修改前 + // if (trainBackInfoTop.strAllClassType != trainBackInfo.strAllClassType) + // { + // stackBackInfo_.push(trainBackInfo); + // LogDebug << "frameId:" << pProcessData->iFrameId << " push strAllClassType:" << strAllClassType + // << " stacksize:" << stackBackInfo_.size(); + // } + if (trainBackInfoTop.strAllClassType != trainBackInfo.strAllClassType) { + if (iDirection_ == DIRECTION_RIGHT + && trainBackInfo.strAllClassType == "SPACE" + && (trainBackInfoTop.strAllClassType == "PROSPACE" || trainBackInfoTop.strAllClassType == "SPACEPRO")) + { + return; + } + if (iDirection_ == DIRECTION_RIGHT + && trainBackInfo.strAllClassType == "SPACE" + && (trainBackInfoTop.strAllClassType == "NUMSPACE" || trainBackInfoTop.strAllClassType == "SPACENUM")) + { + return; + } + + if (iDirection_ == DIRECTION_LEFT + && trainBackInfo.strAllClassType == "SPACE" + && (trainBackInfoTop.strAllClassType == "NUMSPACE" || trainBackInfoTop.strAllClassType == "SPACENUM")) + { + return; + } stackBackInfo_.push(trainBackInfo); LogDebug << "frameId:" << pProcessData->iFrameId << " push strAllClassType:" << strAllClassType << " stacksize:" << stackBackInfo_.size(); @@ -200,16 +233,19 @@ bool FilterTrainStepOneEngine::IsEndDealBackInfo(std::shared_ptr pP return true; } + bool bPopFlag = false; + std::shared_ptr pPostData = std::static_pointer_cast(pProcessData->pVoidData); + if (pPostData->vecPostSubData.size() == 0) return false; + /* 处理倒车数据时,数据需设置为倒车,主要是保证这样的数据后面Engine不处理,防止切分车厢出错。 - 类型不相等时,就pop,当pop后,还剩一个数据时,则表示已经回到了刚开始倒车的地方。 - + 类型不相等时,就pop,当pop后,还剩一个数据时,则表示已经回到了刚开始倒车的地方。(只剩一个数据的逻辑在上方) + 处理最后一个时,不能只判断下类型相同就弹出。需要控制下位置。(要么类型相同位置合适,要么类型不相同) 正向为向左行驶,则当前数据的位置尽量小于等于栈中最后一个元素的位置。 正向为向右行驶,则当前数据的位置尽量大于等于栈中最后一个元素的位置。 */ - std::shared_ptr pPostData = std::static_pointer_cast(pProcessData->pVoidData); std::sort(pPostData->vecPostSubData.begin(), pPostData->vecPostSubData.end(), CompareX); std::string strAllClassType; @@ -226,48 +262,73 @@ bool FilterTrainStepOneEngine::IsEndDealBackInfo(std::shared_ptr pP return false; } - TrainBackInfo trainBackInfoTop = stackBackInfo_.top(); - bool bPopFlag = false; - if (trainBackInfoTop.strAllClassType != strAllClassType) - { - bPopFlag = true; - } - LogDebug << "frameId:" << pProcessData->iFrameId << " stacksize:" << stackBackInfo_.size() - << " topClassType:" << trainBackInfoTop.strAllClassType << " dealbackinfo strAllClassType:" << strAllClassType - << " bPopFlag:" << bPopFlag; - - if(bPopFlag) - { - stackBackInfo_.pop(); - bPopFlag = false; - } - if (stackBackInfo_.size() == 1) { - if (!bPopFlag) - { - TrainBackInfo trainBackInfoLast = stackBackInfo_.top(); - std::shared_ptr pPostDataBack = std::static_pointer_cast(trainBackInfoLast.processData->pVoidData); - std::sort(pPostDataBack->vecPostSubData.begin(), pPostDataBack->vecPostSubData.end(), CompareX); + TrainBackInfo trainBackInfoLast = stackBackInfo_.top(); + std::shared_ptr pPostDataBack = std::static_pointer_cast(trainBackInfoLast.processData->pVoidData); + std::sort(pPostDataBack->vecPostSubData.begin(), pPostDataBack->vecPostSubData.end(), CompareX); - for (size_t i = 0; i < pPostDataBack->vecPostSubData.size(); i++) + for (size_t i = 0; i < pPostDataBack->vecPostSubData.size(); i++) + { + int bFlag = -1; + for (size_t j = 0; j < pPostData->vecPostSubData.size(); j++) { - bool bFlag = (pPostDataBack->vecPostSubData[i].step1Location.fLTX <= pPostData->vecPostSubData[i].step1Location.fLTX); - LogDebug << "frameId:" << pProcessData->iFrameId << " stackFrameid:" << pPostDataBack->iFrameId << " bFlag:" << bFlag; - if ((iDirection_ == DIRECTION_LEFT && !bFlag) || - (iDirection_ == DIRECTION_RIGHT && bFlag)) + if (pPostDataBack->vecPostSubData[i].iBigClassId == pPostData->vecPostSubData[j].iBigClassId) { - bPopFlag = true; - break; + if (pPostData->vecPostSubData[j].step1Location.fLTX < 1 || pPostDataBack->vecPostSubData[i].step1Location.fLTX < 1) + { + LogDebug << "大框X坐标小于1,判定为异常大框。过滤!!"; + break; + } + bFlag = (pPostDataBack->vecPostSubData[i].step1Location.fLTX <= pPostData->vecPostSubData[j].step1Location.fLTX) ? 1 : 0; + LogDebug << "帧:" << pProcessData->iFrameId << " 倒车前帧:" << pPostDataBack->iFrameId << " 恢复到原位:" << bFlag + << " 当前框位置:" << pPostData->vecPostSubData[i].step1Location.fLTX + << " 倒车前位置:" << pPostDataBack->vecPostSubData[i].step1Location.fLTX << "方向:" << iDirection_; } } - if (bPopFlag) + + if ((iDirection_ == DIRECTION_LEFT && bFlag == 0) || + (iDirection_ == DIRECTION_RIGHT && bFlag == 1)) { - LogDebug << "frameId:" << pProcessData->iFrameId << " last one bPopFlag:" << bPopFlag; - stackBackInfo_.pop(); + bPopFlag = true; + break; } - } + } + + if (bPopFlag) + { + LogDebug << "frameId:" << pProcessData->iFrameId << " 恢复倒车前的位置:" << bPopFlag; + stackBackInfo_.pop(); + } } + else + { + TrainBackInfo trainBackInfoTop_bak = stackBackInfo_.top(); + stackBackInfo_.pop(); + TrainBackInfo trainBackInfoTop = stackBackInfo_.top(); + + if (trainBackInfoTop.strAllClassType != strAllClassType) + { + stackBackInfo_.push(trainBackInfoTop_bak); + LogDebug << "帧:" << pProcessData->iFrameId << " 倒车信息:" << stackBackInfo_.size() + << " 顶部倒车信息:" << trainBackInfoTop.strAllClassType << " 本次识别信息:" << strAllClassType; + } + else + { +// bPopFlag = true; + LogDebug << "帧:" << pProcessData->iFrameId << " 倒车信息:" << stackBackInfo_.size() + << " 顶部倒车信息:" << trainBackInfoTop.strAllClassType << " 本次识别信息:" << strAllClassType + << " 删除倒车信息:" << trainBackInfoTop_bak.strAllClassType; + } + + +// if(bPopFlag) +// { +// stackBackInfo_.pop(); +// bPopFlag = false; +// } + } + return stackBackInfo_.empty() ? true : false; } @@ -275,31 +336,29 @@ bool FilterTrainStepOneEngine::IsEndDealBackInfo(std::shared_ptr pP * 校验火车是否停止 * inParam : std::shared_ptr pProcessData :待处理数据 * outParam: N/A -* return : true:停止; false:非停止 1(正常行驶) 2(停车) 3(倒车) +* return : true:停止; false:非停止 1(正常行驶) 2(停车) 3(倒车) */ int FilterTrainStepOneEngine::GetTrainStatus(std::shared_ptr pProcessData) { if (iDirection_ == DIRECTION_UNKNOWN) { - LogDebug << "sourceid:" << pProcessData->iDataSource << " frameId:" << pProcessData->iFrameId << " direction unknown trainStatus=1"; + LogDebug << " frameId:" << pProcessData->iFrameId << " 未判断出行车方向,暂定认为火车正常行驶中"; return TRAINSTATUS_RUN; } std::shared_ptr pPostData = std::static_pointer_cast(pProcessData->pVoidData); pPostData->iFrameId = pProcessData->iFrameId; - quePostData_.push(*pPostData.get()); // 1. 无框时,返回之前的列车状态 if (pPostData->vecPostSubData.size() == 0) { - LogDebug << "sourceid:" << pProcessData->iDataSource << " frameId:" << pProcessData->iFrameId << " step1 no result trainStatus="<< iTrainStatus_; - quePostData_.pop(); return iTrainStatus_; } + quePostData_.push(*pPostData.get()); + if (quePostData_.size() < 3) { - LogDebug << "sourceid:" << pProcessData->iDataSource << " frameId:" << pProcessData->iFrameId << " size < 3 trainStatus=1"; return TRAINSTATUS_RUN; } @@ -310,7 +369,7 @@ int FilterTrainStepOneEngine::GetTrainStatus(std::shared_ptr pProce { quePostData_.pop(); } - LogDebug << "queue front frameId:" << postDataFront.iFrameId << " queuesize:" << quePostData_.size() << " iSizeTemp:" << iSizeTemp; + LogDebug << "frameId:" << pProcessData->iFrameId << " 判断运动状态队列 第一帧:" << postDataFront.iFrameId << " 队列size:" << quePostData_.size() << " iSizeTemp:" << iSizeTemp; bool bSameFlag = false; int iDiffValue = iChkStopPX_; @@ -326,6 +385,7 @@ int FilterTrainStepOneEngine::GetTrainStatus(std::shared_ptr pProce */ if (postSubDataFront.iTargetType != postSubDataBack.iTargetType) { + LogDebug << "判断前后帧识别的是否一致 上一个:" << postSubDataFront.iTargetType << " 当前:" << postSubDataBack.iTargetType; continue; } @@ -340,7 +400,7 @@ int FilterTrainStepOneEngine::GetTrainStatus(std::shared_ptr pProce //位置比较大于10个像素,则表示有移动。再判断时正向移动,还是倒车 LogDebug << "frameId:" << pProcessData->iFrameId << " " << iCenterBack << "-" << iCenterFront - << "=" << abs(iCenterBack - iCenterFront) << " iDiffValue:" << iDiffValue; + << "=" << abs(iCenterBack - iCenterFront) << " 预期判定移动的差值为iDiffValue:" << iDiffValue; if (abs(iCenterBack - iCenterFront) > iDiffValue) { iNotChgCount_ = 0; @@ -352,32 +412,38 @@ int FilterTrainStepOneEngine::GetTrainStatus(std::shared_ptr pProce if ((iCenterBack > iCenterFront && iDirection_ == DIRECTION_LEFT) || (iCenterBack < iCenterFront && iDirection_ == DIRECTION_RIGHT)) { - LogDebug << "frameId:" << pProcessData->iFrameId << " back1"; + if (this->iPartitionFrameNum_ < (pProcessData->iFrameId - postDataFront.iFrameId) + && this->iPlitFrameSpanPX_ < abs(iCenterBack - iCenterFront)) + { + return TRAINSTATUS_RUN; + } + LogDebug << "frameId:" << pProcessData->iFrameId << " 检测到火车倒车"; return TRAINSTATUS_BACK; } else { - LogDebug << "frameId:" << pProcessData->iFrameId << " run"; + LogDebug << "frameId:" << pProcessData->iFrameId << " 正常行驶"; return TRAINSTATUS_RUN; } } - /* - 小于10个像素表示可能停车,累计未变化次数。 - 累计变化次数超过10次,返回停车 - 累计变化次数未超过10次,返回之前行驶状态 - */ + /* + 小于10个像素表示可能停车,累计未变化次数。 + 累计变化次数超过10次,返回停车 + 累计变化次数未超过10次,返回之前行驶状态 + */ else { iNotChgCount_++; - LogDebug << " frameId:" << pProcessData->iFrameId << " no chg iNotChgCount:" << iNotChgCount_; + LogDebug << " frameId:" << pProcessData->iFrameId + << " 大框移动范围小 判断停车计数:" << iNotChgCount_ << "/" << iChkStopCount_; if (iNotChgCount_ > iChkStopCount_) { - LogDebug << "frameId:" << pProcessData->iFrameId << " stop"; + LogDebug << "frameId:" << pProcessData->iFrameId << " 检测到火车停车"; return TRAINSTATUS_STOP; } else { - LogDebug << "frameId:" << pProcessData->iFrameId << " iTrainStatus_:" << iTrainStatus_; +// LogDebug << "frameId:" << pProcessData->iFrameId << " iTrainStatus_:" << iTrainStatus_; return iTrainStatus_; } } @@ -443,8 +509,8 @@ int FilterTrainStepOneEngine::GetTrainStatus(std::shared_ptr pProce } } - LogDebug << "frameId:" << pProcessData->iFrameId << " back2"; - return TRAINSTATUS_BACK; +// LogDebug << "frameId:" << pProcessData->iFrameId << " back2"; + return iTrainStatus_; } } LogDebug << "frameId:" << pProcessData->iFrameId << " iNotChgCount_:" << iNotChgCount_ << " run run"; @@ -491,7 +557,7 @@ void FilterTrainStepOneEngine::SetDirection(std::vector &vecLocat { iDirection_ = DIRECTION_LEFT; } - //行驶方向 右 + //行驶方向 右 else if ((slBack.fLTX - slFront.fLTX) > 0) { iDirection_ = DIRECTION_RIGHT; @@ -723,7 +789,8 @@ void FilterTrainStepOneEngine::CalculateDirection(std::shared_ptr p } void FilterTrainStepOneEngine::sendComeTrain(const std::string strTrainDate, const std::string strTrainName, const int iDirection) { - std::string message = "{\"cometime\":\"" + strTrainDate + " " + strTrainName + "\",\"type\":\"1\",\"direction\":\"" + to_string(iDirection) + "\"}"; + std::string message = "{\"cometime\":\"" + strTrainDate + " " + strTrainName + "\",\"type\":\"1\",\"direction\":" + to_string(iDirection == iPushDirection_ ? 1:-1) + "}"; + LogWarn << message; outputQueMap_[engineName_ + "_" + std::to_string(engineId_) + "_1"]->push(std::static_pointer_cast(std::make_shared(message))); } @@ -830,31 +897,31 @@ void FilterTrainStepOneEngine::DealProcessDataPre(std::shared_ptr p { if (iterHeadContinueCnt->second < 2 && it->iTargetType == HEAD) { - LogError << "sourceid:" << iterProcessData->second->iDataSource << " frameId:" << iterProcessData->second->iFrameId << " Head wrong"; + LogDebug << " frameId:" << iterProcessData->second->iFrameId << " Head 框因非连续识别而过滤"; it = pPostDataPre->vecPostSubData.erase(it); continue; } if (iterProContinueCnt->second < 2 && it->iTargetType == PRO) { - LogError << "sourceid:" << iterProcessData->second->iDataSource << " frameId:" << iterProcessData->second->iFrameId << " PRO wrong"; + LogDebug << " frameId:" << iterProcessData->second->iFrameId << " PRO 框因非连续识别而过滤"; it = pPostDataPre->vecPostSubData.erase(it); continue; } if (iterNumContinueCnt->second < 2 && it->iTargetType == NUM) { - LogError << "sourceid:" << iterProcessData->second->iDataSource << " frameId:" << iterProcessData->second->iFrameId << " NUM wrong"; + LogDebug << " frameId:" << iterProcessData->second->iFrameId << " NUM 框因非连续识别而过滤"; it = pPostDataPre->vecPostSubData.erase(it); continue; } if (iterSpaceContinueCnt->second < 2 && it->iTargetType == SPACE) { - LogError << "sourceid:" << iterProcessData->second->iDataSource << " frameId:" << iterProcessData->second->iFrameId << " SPACE wrong"; + LogDebug << " frameId:" << iterProcessData->second->iFrameId << " SPACE 框因非连续识别而过滤"; it = pPostDataPre->vecPostSubData.erase(it); continue; } if (iterTranSpaceContinueCnt->second < 2 && it->iTargetType == TRAINSPACE) { - LogError << "sourceid:" << iterProcessData->second->iDataSource << " frameId:" << iterProcessData->second->iFrameId << " TRAINSPACE wrong"; + LogDebug << " frameId:" << iterProcessData->second->iFrameId << " TRAINSPACE 框因非连续识别而过滤"; it = pPostDataPre->vecPostSubData.erase(it); continue; } @@ -871,6 +938,7 @@ void FilterTrainStepOneEngine::DealProcessDataPre(std::shared_ptr p { //CalculateDirection(iterProcessData->second); CalculateDirectionNew(iterProcessData->second); + if (iDirection_ != DIRECTION_UNKNOWN) this->sendComeTrain(pProcessData->strTrainDate, pProcessData->strTrainName, iDirection_); } if (iDirection_ != DIRECTION_UNKNOWN) @@ -901,7 +969,7 @@ void FilterTrainStepOneEngine::DealProcessDataPre(std::shared_ptr p { //倒车 AddBackInfo(iterProcessData->second); - iTrainStatusTemp = TRAINSTATUS_STOP; + iTrainStatusTemp = TRAINSTATUS_STOP; } else if(iTrainStatus_ == TRAINSTATUS_RUN) { @@ -918,7 +986,7 @@ void FilterTrainStepOneEngine::DealProcessDataPre(std::shared_ptr p << " 火车实时运行状态:" << iTrainStatus_ << "(0无车,1运行,2停车,3倒车) iTrainStatusTemp:" << iTrainStatusTemp; iterProcessData->second->iStatus = iTrainStatusTemp; - this->sendComeTrain(pProcessData->strTrainDate, pProcessData->strTrainName, iDirection_); + // this->sendComeTrain(pProcessData->strTrainDate, pProcessData->strTrainName, iDirection_); //上一帧,push端口0 PushData(strPort0_, iterProcessData->second); diff --git a/nvidia_ascend_engine/common_engine/FilterEngine/FilterTrainStepOneEngine.h b/nvidia_ascend_engine/common_engine/FilterEngine/FilterTrainStepOneEngine.h index fb0ac5b..a95f9c2 100644 --- a/nvidia_ascend_engine/common_engine/FilterEngine/FilterTrainStepOneEngine.h +++ b/nvidia_ascend_engine/common_engine/FilterEngine/FilterTrainStepOneEngine.h @@ -58,6 +58,11 @@ private: int iChkStopPX_; int iChkStopCount_; int iDirection_; //方向 + int iPushDirection_; //需要识别的方向 + int rightFirst_; // 向右行驶的在前大框类型 + int leftFirst_; // 向左行驶的在前大框类型 + int iPartitionFrameNum_; //满足跨车厢的帧间隔 + int iPlitFrameSpanPX_; //相连帧 同种大框的跨度最大值 std::map mapPostDataFrist_; //[key-数据源id, value-第一步识别信息] std::map>> mapMapStep1Info_; //[key-数据源id, value-[key-识别目标, value-识别框集合]] diff --git a/nvidia_ascend_engine/common_engine/SaveEngine/SaveCsvEngine.cpp b/nvidia_ascend_engine/common_engine/SaveEngine/SaveCsvEngine.cpp index a73af63..5381376 100644 --- a/nvidia_ascend_engine/common_engine/SaveEngine/SaveCsvEngine.cpp +++ b/nvidia_ascend_engine/common_engine/SaveEngine/SaveCsvEngine.cpp @@ -83,7 +83,10 @@ bool SaveCsvEngine::SaveMergerCsv(std::shared_ptr pTrain) << "inspection" << ',' << "inspectionImg" << ',' << "containerImg_1" << ',' - << "containerImg_2" << std::endl; + << "containerImg_2" << ',' + << "startTime" << ',' + << "endTime" + << std::endl; } std::string strTime = pTrain->strTrainName; @@ -147,7 +150,10 @@ bool SaveCsvEngine::SaveMergerCsv(std::shared_ptr pTrain) << pTrain->chkDate.strChkDate1DeadLine << ',' << szChkDateImgPath << ',' << szContainer1ImgPath << ',' - << szContainer2ImgPath << std::endl; + << szContainer2ImgPath << ',' + << MyUtils::getins()->Stamp2Time(pTrain->i64StartTimeStamp, true) << ',' + << MyUtils::getins()->Stamp2Time(pTrain->i64EndTimeStamp, true) + << std::endl; outFile.close(); } @@ -427,18 +433,18 @@ bool SaveCsvEngine::SaveContainerCsv(std::shared_ptr pTrainConta catch (const std::exception &) { LogError << "strCsvPath:" << strCsvPath << " container savecsv fail!"; - continue; + continue; } } return true; } APP_ERROR SaveCsvEngine::Process() -{ +{ int iRet = APP_ERR_OK; while (!isStop_) { - + bool bPopFlag = false; //pop端口0 车厢信息 std::shared_ptr pVoidData0 = nullptr; diff --git a/nvidia_ascend_engine/common_engine/SaveEngine/SaveImgEngine.cpp b/nvidia_ascend_engine/common_engine/SaveEngine/SaveImgEngine.cpp index 4dd30b0..c9361c3 100644 --- a/nvidia_ascend_engine/common_engine/SaveEngine/SaveImgEngine.cpp +++ b/nvidia_ascend_engine/common_engine/SaveEngine/SaveImgEngine.cpp @@ -160,7 +160,7 @@ APP_ERROR SaveImgEngine::Process() jvFrameInfo["rate"] = iRate; jvFrameInfo["isEnd"] = pSaveImgData->bIsEnd; MyUtils::getins()->WriteJsonInfo(jvFrameInfo, strTxtFilePath); - LogDebug << "engineId:" << engineId_ << " save success txt:" << strTxtFilePath; +// LogDebug << "engineId:" << engineId_ << " save success txt:" << strTxtFilePath; } } return APP_ERR_OK; diff --git a/nvidia_ascend_engine/common_engine/SaveEngine/SaveStepOneResultEngine.cpp b/nvidia_ascend_engine/common_engine/SaveEngine/SaveStepOneResultEngine.cpp index ffe8a9c..69916eb 100644 --- a/nvidia_ascend_engine/common_engine/SaveEngine/SaveStepOneResultEngine.cpp +++ b/nvidia_ascend_engine/common_engine/SaveEngine/SaveStepOneResultEngine.cpp @@ -119,10 +119,10 @@ void SaveStepOneResultEngine::DealCenterSpace(std::vector &vecParat { return; } - LogDebug << "size:" << iVecSize << " frameId:" << pProcessData->iFrameId - << " vecParationInfo[0].frameId:" << vecParationInfo.at(0).modelSpaceFrame - << " vecParationInfo[size-1].frameId:" << vecParationInfo.at(iVecSize - 1).modelSpaceFrame - << " isEnd:" << vecParationInfo.at(iVecSize - 1).bIsEnd; + LogDebug << "积累的车厢切分信息数:" << iVecSize << " 帧:" << pProcessData->iFrameId + << " 第一个车厢切分信息帧:" << vecParationInfo.at(0).modelSpaceFrame + << " 最后一个车厢切分信息帧:" << vecParationInfo.at(iVecSize - 1).modelSpaceFrame + << " 最后一个车厢切分信息是否为结束:" << vecParationInfo.at(iVecSize - 1).bIsEnd; /* 因停车后再行驶未能及时判断出为行驶状态,导致更新间隔信息,出现漏切分车厢(具体原因分析见正通2023-02-28的问题分析记录) @@ -134,9 +134,9 @@ void SaveStepOneResultEngine::DealCenterSpace(std::vector &vecParat int iCenterXPre = vecParationInfo[i - 1].fLTX + (vecParationInfo[i - 1].fRBX - vecParationInfo[i - 1].fLTX) / 2; int iCenterX = vecParationInfo[i].fLTX + (vecParationInfo[i].fRBX - vecParationInfo[i].fLTX) / 2; bool bIntervalFlag = ((int)(vecParationInfo[i].modelSpaceFrame - vecParationInfo[i - 1].modelSpaceFrame)) > iSplitSpan_; - LogDebug << "frameidPre:" << vecParationInfo[i - 1].modelSpaceFrame << " iCenterXPre:" << iCenterXPre - << " frameid:" << vecParationInfo[i].modelSpaceFrame << " iCenterX:" << iCenterX - << " bIntervalFlag:" << bIntervalFlag << " i:" << i; + LogDebug << "上一帧ID:" << vecParationInfo[i - 1].modelSpaceFrame << " 上一帧间隔X轴中线:" << iCenterXPre + << " 本帧ID:" << vecParationInfo[i].modelSpaceFrame << " 本帧间隔X轴中线:" << iCenterX + << " 满足帧间隔:" << bIntervalFlag << " i:" << i; if (iDirection_ == DIRECTION_LEFT && (iCenterXPre < iCenterX - iSplitSpanPX_) && bIntervalFlag) { vecSpacePos.push_back(i - 1); @@ -190,6 +190,8 @@ void SaveStepOneResultEngine::DealCenterSpace(std::vector &vecParat pPartionInfo->startframe = dataSourceConfig_.iSkipInterval; } pPartionInfo->nStatus = ((pProcessData->iStatus == TRAINSTATUS_STOP) ? TRAIN_PAUSE : iDirection_); + + LogWarn << "--------- 向Paration 发送数据 --------"; outputQueMap_[strPort0_]->push(std::static_pointer_cast(pPartionInfo)); iPushSpaceFrameId_ = pPartionInfo->modelSpaceFrame; @@ -242,14 +244,18 @@ void SaveStepOneResultEngine::DealTrainSpaceInfo(std::shared_ptr pP { iDirection_ = jvDirectionInfo["direction"].asInt(); } + else + { + LogWarn << "暂未检测出行车方向"; + } } bool bIntervalFlag = ((int)(pProcessData->iFrameId - parationInfoLast_.modelSpaceFrame)) > iSplitSpan_; int iCenterCur = jvStep1Space[0]["ltx"].asFloat() + (jvStep1Space[0]["rbx"].asFloat() - jvStep1Space[0]["ltx"].asFloat()) / 2; int iCenterLast = parationInfoLast_.fLTX + (parationInfoLast_.fRBX - parationInfoLast_.fLTX) / 2; - LogDebug << "frameid:" << pProcessData->iFrameId << " centerCur:" << iCenterCur - << " lastFrameid:" << parationInfoLast_.modelSpaceFrame << " centerLast:" << iCenterLast - << " iDirection_:" << iDirection_ << " bIntervalFlag:" << bIntervalFlag << " bDealCenterFlag_:" << bDealCenterFlag_; + LogDebug << "当前帧:" << pProcessData->iFrameId << " 间隔框中心线:" << iCenterCur + << " 上一帧:" << parationInfoLast_.modelSpaceFrame << " 间隔框中心线:" << iCenterLast + << " 行车方向:" << iDirection_ << " 是否满足切分帧数:" << bIntervalFlag << " bDealCenterFlag_:" << bDealCenterFlag_; if (iDirection_ == DIRECTION_UNKNOWN || iCenterLast == 0) { @@ -295,8 +301,8 @@ void SaveStepOneResultEngine::DealTrainSpaceInfo(std::shared_ptr pP if (!(bDealCenterFlag_ && !bIntervalFlag && (iCenterCur < (pProcessData->iWidth / 3 + 30)))) { vecParationInfo_.push_back(parationInfo); - } - } + } + } } else if (iDirection_ == DIRECTION_RIGHT) { @@ -418,7 +424,7 @@ void SaveStepOneResultEngine::SplitTrainByNumPro(std::shared_ptr &p pPartionInfoNew->i64StartTimeStamp = i64TimeStampFirst_; pPartionInfoNew->startframe = dataSourceConfig_.iSkipInterval; } - pPartionInfoNew->nStatus = ((pProcessData->iStatus == TRAINSTATUS_STOP) ? TRAIN_PAUSE : iDirection_); +// pPartionInfoNew->nStatus = ((pProcessData->iStatus == TRAINSTATUS_STOP) ? TRAIN_PAUSE : iDirection_); //构造一个间隔信息写入到切分帧中 char szCameraNo[5] = {0}; @@ -437,6 +443,7 @@ void SaveStepOneResultEngine::SplitTrainByNumPro(std::shared_ptr &p jvFrameInfo["step1Space"].append(jvOneSpace); MyUtils::getins()->WriteJsonInfo(jvFrameInfo, strFilePath); + LogWarn << "--------- 向Paration 发送数据 --------"; outputQueMap_[strPort0_]->push(std::static_pointer_cast(pPartionInfoNew)); iPushSpaceFrameId_ = pPartionInfoNew->modelSpaceFrame; @@ -543,7 +550,7 @@ APP_ERROR SaveStepOneResultEngine::Process() { //车头没有属性,因此车头号也加入到属性中。保证向右行驶属性在前时最后2节的切分。 //车头只加入一次,防止一个车头2个车头号的场景。但有两个车头且没识别车头间隔则无法处理。 - if (!bHaveHeadFlag_) + if (!bHaveHeadFlag_) { bool bIntervalFlag = ((int)(pProcessData->iFrameId - headInfo_.iFrameId) > iSplitSpan_ && headInfo_.iFrameId != 0); @@ -578,7 +585,7 @@ APP_ERROR SaveStepOneResultEngine::Process() } else if (postSubData.iTargetType == CONTAINER) { - jvStep1Container.append(jvInfo); + jvStep1Container.append(jvInfo); } else if (postSubData.iTargetType == SPACE) { @@ -681,6 +688,7 @@ APP_ERROR SaveStepOneResultEngine::Process() //最后一节和倒数第二节之间的间隔未能识别时,此时也需要通过车号属性切分下。 SplitTrainByNumPro(pPartionInfo, pProcessData); + LogWarn << "--------- 向Paration 发送数据 --------"; outputQueMap_[strPort0_]->push(std::static_pointer_cast(pPartionInfo)); iPushSpaceFrameId_ = pPartionInfo->modelSpaceFrame; diff --git a/nvidia_ascend_engine/common_engine/SelectBestEngine/SelectBestEngine.cpp b/nvidia_ascend_engine/common_engine/SelectBestEngine/SelectBestEngine.cpp index 89b8a84..085783c 100644 --- a/nvidia_ascend_engine/common_engine/SelectBestEngine/SelectBestEngine.cpp +++ b/nvidia_ascend_engine/common_engine/SelectBestEngine/SelectBestEngine.cpp @@ -212,6 +212,11 @@ std::string SelectBestEngine::GetBest(std::vector &vecAllTransInfo, T { vecAllTransInfo = vecTransInfoTemp; } + else + { + // 此处因车厢太脏。识别效果很差,难以与RFID识别结果融合,所以增加eles + return strValue; + } //获取最优长度 int iBestLen = GetBestLength(vecAllTransInfo, iMaxLen); diff --git a/nvidia_ascend_engine/common_engine/TrainAnaEngine/TrainParationMgr.cpp b/nvidia_ascend_engine/common_engine/TrainAnaEngine/TrainParationMgr.cpp index a3f3590..3caf1d2 100644 --- a/nvidia_ascend_engine/common_engine/TrainAnaEngine/TrainParationMgr.cpp +++ b/nvidia_ascend_engine/common_engine/TrainAnaEngine/TrainParationMgr.cpp @@ -149,7 +149,6 @@ APP_ERROR TrainParationMgr::Process() int nSize = lstPartInfo.size(); int nPartionIndex = nSize - 1; - int nPrePartionIndex = nPartionIndex; //当然车厢通过的数量 if (nSize == 0) { @@ -166,35 +165,30 @@ APP_ERROR TrainParationMgr::Process() lstPartInfo.push_back(stTempInfo); //lstPartInfo.push_back(stTempInfo); nPartionIndex++; - } - { - lstPartInfo[nPartionIndex].i64EndTimeStamp = pPartionInfo->i64EndTimeStamp; - lstPartInfo[nPartionIndex].endframe = pPartionInfo->modelSpaceFrame; - // 根据开始帧时间戳和结束帧时间错 计算当节车厢的行车速度 - // LogInfo << "TrainAnaEngine checkPartion bPartion == true lstPartInfo[nPrePartionIndex].ftime:" << abs(lstPartInfo[nPrePartionIndex].i64EndTimeStamp - lstPartInfo[nPrePartionIndex].i64StartTimeStamp); - // 根据时间戳计算时间差 - - float nTimePassed = (abs(lstPartInfo[nPartionIndex].i64EndTimeStamp - lstPartInfo[nPartionIndex].i64StartTimeStamp)) * 1.0; - //防止停车导致速度过小 - if(pPartionInfo->nStatus != TRAIN_PAUSE && nTimePassed <= 50000) { - lstPartInfo[nPartionIndex].fspeed = (TRAIN_WIDTH * 1000.0) /nTimePassed; + } + lstPartInfo[nPartionIndex].i64EndTimeStamp = pPartionInfo->i64EndTimeStamp; + lstPartInfo[nPartionIndex].endframe = pPartionInfo->modelSpaceFrame; + // 根据开始帧时间戳和结束帧时间错 计算当节车厢的行车速度 + // 根据时间戳计算时间差 + + + float nTimePassed = (abs(lstPartInfo[nPartionIndex].i64EndTimeStamp - lstPartInfo[nPartionIndex].i64StartTimeStamp)) * 1.0; + //防止停车导致速度过小 + if(pPartionInfo->nStatus != TRAIN_PAUSE && nTimePassed <= 50000) { + lstPartInfo[nPartionIndex].fspeed = (TRAIN_WIDTH * 1000.0) /nTimePassed; + } else { + if (nPartionIndex >= 1){ + lstPartInfo[nPartionIndex].fspeed = lstPartInfo[nPartionIndex - 1].fspeed / 3; } else { - if (nPartionIndex >= 1){ - lstPartInfo[nPartionIndex].fspeed = lstPartInfo[nPartionIndex - 1].fspeed / 3; - } else { - lstPartInfo[nPartionIndex].fspeed = TRAIN_DEFAULT_SPEED / 10; - } + lstPartInfo[nPartionIndex].fspeed = TRAIN_DEFAULT_SPEED / 10; } - - // - //nSamePartionIgnoreCount = (nTimePassed / (3 * 5000)) * nFrameRate; - // 结束帧为当前帧再往后 (除以2的原因:中间为车钩,车钩后的车体宽度为整个镜头的宽度除以2) - //lstPartInfo[nPrePartionIndex].endframe = pPartionInfo->modelSpaceFrame; - //LogInfo << "TrainAnaEngine checkPartion bPartion == true lstPartInfo[nPrePartionIndex].endframe" << lstPartInfo[nPrePartionIndex].endframe; - lstPartInfo[nPartionIndex].bmodelconfirmed = true; } - + // + //nSamePartionIgnoreCount = (nTimePassed / (3 * 5000)) * nFrameRate; + // 结束帧为当前帧再往后 (除以2的原因:中间为车钩,车钩后的车体宽度为整个镜头的宽度除以2) + lstPartInfo[nPartionIndex].bmodelconfirmed = true; + /// write json info to file //先读取文本内容,追加新的信息后再写入 @@ -202,133 +196,57 @@ APP_ERROR TrainParationMgr::Process() Json::Value jvPartionInfo; //JSON保存路径 std::string strFilePath; - bool brightcome = false; - int nrightoffset = 0; - - if (pPartionInfo->nStatus == 1) { - brightcome = true; - // nrightoffset = -1; - } //检测到车厢划分信息 - { - - // if (nPartionIndex == 0) { - // lstPartInfo[nPartionIndex].endframe = lstPartInfo[nPartionIndex].endframe - nrightoffset * (lstPartInfo[nPartionIndex].fLTX - METHOD_BASE_WIDTH) / 10; - // } else { - // lstPartInfo[nPartionIndex].endframe = lstPartInfo[nPartionIndex].endframe - nrightoffset * getCouplerOffsetPix(lstPartInfo[nPartionIndex].fspeed, lstPartInfo[nPartionIndex].endframe); - // } - //lstPartInfo[nPartionIndex].endframe = lstPartInfo[nPartionIndex].endframe + getOffsetFrame(lstPartInfo[nPartionIndex].fspeed, (TRAIN_IN_CAMERA_WIDTH / 2), nFrameRate); - strFilePath = strResultPath_ + pPartionInfo->strTrainDate + "/" + pPartionInfo->strTrainName + "/" - + std::to_string(nPartionIndex + 1) + ".txt"; + strFilePath = strResultPath_ + pPartionInfo->strTrainDate + "/" + pPartionInfo->strTrainName + "/" + + std::to_string(nPartionIndex + 1) + ".txt"; - // 首部车钩的偏移位置 (单位帧) - int headpos = 0; - // 尾部车钩的偏移位置 (单位帧) - int tailpos = (0 - nTailPixOffset); + LogInfo << "TrainAnaEngine Process lstPartInfo[nPartionIndex].startframe:" << lstPartInfo[nPartionIndex].startframe ; + LogInfo << "TrainAnaEngine Process lstPartInfo[nPartionIndex].endframe:" << lstPartInfo[nPartionIndex].endframe; - //if (nPartionIndex == 0) - { - headpos = METHOD_BASE_WIDTH / 2; - tailpos = tailpos + headpos; - } - // 是否位右侧来车 - - if (brightcome == true) - { - //brightcome = true; - // 右侧来车 首部车钩从画面最右侧开始 - headpos = METHOD_BASE_WIDTH / 2; - // 右侧来车 尾部车钩从画面最右侧+车厢宽的像素值 - tailpos = headpos + nTailPixOffset; - /* - if (nPartionIndex == 0) - { - headpos = METHOD_BASE_WIDTH / 2; - tailpos = tailpos - headpos; - } - */ - } - - LogInfo << "TrainAnaEngine Process lstPartInfo[nPartionIndex].startframe:" << lstPartInfo[nPartionIndex].startframe ; - LogInfo << "TrainAnaEngine Process lstPartInfo[nPartionIndex].endframe:" << lstPartInfo[nPartionIndex].endframe; - //从当节车厢的开始帧到结束帧计算首部车钩和尾部车钩的偏移值 - // for (int nplayframe = lstPartInfo[nPartionIndex].startframe; nplayframe <= lstPartInfo[nPartionIndex].endframe; nplayframe++) - // { - // Json::Value jvposInfo; - // // 当前车厢的第几几帧 - // int noffsetindex = (nplayframe - lstPartInfo[nPartionIndex].startframe); - // // 根据车速计算车钩位置量(单位 像素) - // int noffsetpos = getCouplerOffsetPosition(lstPartInfo[nPartionIndex].fspeed, noffsetindex); - // // 初始化首部车钩偏移量(单位 像素) - // jvposInfo["headpos"] = -1; - // // 初始化尾部车钩偏移量(单位 像素) - // jvposInfo["tailpos"] = -1; - - // if (brightcome == false) { - // // 左侧来车 - // // 首部车钩和尾部车钩 每帧加 车钩偏移值 - // jvposInfo["headpos"] = (headpos + noffsetpos); - // jvposInfo["tailpos"] = (tailpos + noffsetpos); - // } else { - // // 右侧来车 - // // 首部车钩和尾部车钩 每帧减 车钩偏移值 - // jvposInfo["headpos"] = (headpos - noffsetpos); - // jvposInfo["tailpos"] = (tailpos - noffsetpos); - // } - // //LogInfo << "TrainAnaEngine Process jvposInfo[headpos]" << jvposInfo["headpos"]; - // // LogInfo << "TrainAnaEngine Process jvposInfo[tailpos]:" << jvposInfo["tailpos"]; - // //LogInfo << "TrainAnaEngine Process jvPartionListInfo.append"; - // jvPartionInfo[std::to_string(nplayframe)] = jvposInfo; - // } - - PartionInfo stTempInfo; - // 开始记录新的一节车厢信息(从索引变成序号+1 ,新增一节车厢信息+1) - stTempInfo.nindex = nPartionIndex + 2; - // 上一节车厢的结束帧 - (偏移帧 = (镜头内的车体宽度/ (速度) -> 通过时间) * 帧/秒 ) 作为下一节车厢的开始帧 - int ntempOffsetFrame = lstPartInfo[nPartionIndex].endframe; - // - //- (int)(((TRAIN_IN_CAMERA_WIDTH / 2) / lstPartInfo[nPartionIndex].fspeed) * nFrameRate); - //LogInfo << "TrainAnaEngine Process ntempOffsetFrame:" << ntempOffsetFrame; - stTempInfo.startframe = ntempOffsetFrame; - stTempInfo.i64StartTimeStamp = pPartionInfo->i64EndTimeStamp; - // 初始化下一节的结束帧 - //stTempInfo.endframe = 0; + PartionInfo stTempInfo; + // 开始记录新的一节车厢信息(从索引变成序号+1 ,新增一节车厢信息+1) + stTempInfo.nindex = nPartionIndex + 2; + // 上一节车厢的结束帧 - (偏移帧 = (镜头内的车体宽度/ (速度) -> 通过时间) * 帧/秒 ) 作为下一节车厢的开始帧 + int ntempOffsetFrame = lstPartInfo[nPartionIndex].endframe; + stTempInfo.startframe = ntempOffsetFrame; + stTempInfo.i64StartTimeStamp = pPartionInfo->i64EndTimeStamp; + // 初始化下一节的结束帧 + //stTempInfo.endframe = 0; - lstPartInfo.push_back(stTempInfo); + lstPartInfo.push_back(stTempInfo); - // 记录过车日期 - jvPartionInfo["trainDate"] = pPartionInfo->strTrainDate; - // 记录过车时间 - jvPartionInfo["trainName"] = pPartionInfo->strTrainName; - // 记录车厢节数 (索引从0开始 所以这里+1) - jvPartionInfo["trainNo"] = nPartionIndex + 1; - // 记录行车开始帧 - jvPartionInfo["startFrameId"] = lstPartInfo[nPartionIndex].startframe; - jvPartionInfo["startTimeStamp"] = lstPartInfo[nPartionIndex].i64StartTimeStamp; - // 记录行车结束帧 - jvPartionInfo["endFrameId"] = lstPartInfo[nPartionIndex].endframe; - jvPartionInfo["endTimeStamp"] = lstPartInfo[nPartionIndex].i64EndTimeStamp; - // 记录车厢是否完全通过 - jvPartionInfo["isEnd"] = pPartionInfo->bIsEnd; + // 记录过车日期 + jvPartionInfo["trainDate"] = pPartionInfo->strTrainDate; + // 记录过车时间 + jvPartionInfo["trainName"] = pPartionInfo->strTrainName; + // 记录车厢节数 (索引从0开始 所以这里+1) + jvPartionInfo["trainNo"] = nPartionIndex + 1; + // 记录行车开始帧 + jvPartionInfo["startFrameId"] = lstPartInfo[nPartionIndex].startframe; + jvPartionInfo["startTimeStamp"] = lstPartInfo[nPartionIndex].i64StartTimeStamp; + // 记录行车结束帧 + jvPartionInfo["endFrameId"] = lstPartInfo[nPartionIndex].endframe; + jvPartionInfo["endTimeStamp"] = lstPartInfo[nPartionIndex].i64EndTimeStamp; + // 记录车厢是否完全通过 + jvPartionInfo["isEnd"] = pPartionInfo->bIsEnd; - //是否是间隔模型切分的车厢 - jvPartionInfo["modelconfirmed"] = pPartionInfo->bmodelconfirmed; + //是否是间隔模型切分的车厢 + jvPartionInfo["modelconfirmed"] = pPartionInfo->bmodelconfirmed; - // 记录当前车厢的信息到JSON文件 - MyUtils::getins()->WriteJsonInfo(jvPartionInfo, strFilePath); - std::shared_ptr pTrainRange = std::make_shared(); - pTrainRange->strTrainDate = jvPartionInfo["trainDate"].asString(); - pTrainRange->strTrainName = jvPartionInfo["trainName"].asString(); - pTrainRange->iTrainIndex = jvPartionInfo["trainNo"].asInt(); - pTrainRange->iStartFrameId = jvPartionInfo["startFrameId"].asInt(); - pTrainRange->i64StartTimeStamp = jvPartionInfo["startTimeStamp"].asInt64(); - pTrainRange->iEndFrameId = jvPartionInfo["endFrameId"].asInt(); - pTrainRange->i64EndTimeStamp = jvPartionInfo["endTimeStamp"].asInt64(); - pTrainRange->bIsEnd = jvPartionInfo["isEnd"].asBool(); - pTrainRange->bmodelconfirmed = jvPartionInfo["modelconfirmed"].asBool(); - iRet = outputQueMap_[strPort0_]->push(std::static_pointer_cast(pTrainRange)); - } + // 记录当前车厢的信息到JSON文件 + MyUtils::getins()->WriteJsonInfo(jvPartionInfo, strFilePath); + std::shared_ptr pTrainRange = std::make_shared(); + pTrainRange->strTrainDate = jvPartionInfo["trainDate"].asString(); + pTrainRange->strTrainName = jvPartionInfo["trainName"].asString(); + pTrainRange->iTrainIndex = jvPartionInfo["trainNo"].asInt(); + pTrainRange->iStartFrameId = jvPartionInfo["startFrameId"].asInt(); + pTrainRange->i64StartTimeStamp = jvPartionInfo["startTimeStamp"].asInt64(); + pTrainRange->iEndFrameId = jvPartionInfo["endFrameId"].asInt(); + pTrainRange->i64EndTimeStamp = jvPartionInfo["endTimeStamp"].asInt64(); + pTrainRange->bIsEnd = jvPartionInfo["isEnd"].asBool(); + pTrainRange->bmodelconfirmed = jvPartionInfo["modelconfirmed"].asBool(); + iRet = outputQueMap_[strPort0_]->push(std::static_pointer_cast(pTrainRange)); if (pPartionInfo->bIsEnd) { lstPartInfo.clear(); diff --git a/nvidia_ascend_engine/common_engine/TransEngine/TransTrainEngine.cpp b/nvidia_ascend_engine/common_engine/TransEngine/TransTrainEngine.cpp index 16ee621..c66fa42 100644 --- a/nvidia_ascend_engine/common_engine/TransEngine/TransTrainEngine.cpp +++ b/nvidia_ascend_engine/common_engine/TransEngine/TransTrainEngine.cpp @@ -101,7 +101,7 @@ void TransTrainEngine::InitParam() */ bool TransTrainEngine::AuthTransNum(int classId, const std::string &trainNum) { - LogInfo << "classId:" << classId << " trainNum:" << trainNum; +// LogInfo << "classId:" << classId << " trainNum:" << trainNum; switch (classId) { case TRAIN_HEAD: // 车头上的编号 @@ -774,8 +774,8 @@ APP_ERROR TransTrainEngine::Process() { strTemp += vecClassNames_.at(it->second.at(j).iClassId); } - LogDebug << "step2 char sourceid:" << pProcessData->iDataSource << " frameid:" << pProcessData->iFrameId - << " bigclassId:" << postSubData.iBigClassId << " line:" << it->first << "," << strTemp; +// LogDebug << "step2 char sourceid:" << pProcessData->iDataSource << " frameid:" << pProcessData->iFrameId +// << " bigclassId:" << postSubData.iBigClassId << " line:" << it->first << "," << strTemp; } TransSubData transSubData; diff --git a/nvidia_ascend_engine/nvidia_engine/DataSourceEngine/TestImgEngine.cpp b/nvidia_ascend_engine/nvidia_engine/DataSourceEngine/TestImgEngine.cpp deleted file mode 100644 index a47d1b8..0000000 --- a/nvidia_ascend_engine/nvidia_engine/DataSourceEngine/TestImgEngine.cpp +++ /dev/null @@ -1,146 +0,0 @@ -#include "TestImgEngine.h" - -#include -#include -#include -#include -#include -#include -#include -#include -#include - -using namespace std; -using namespace ai_matrix; - -TestImgEngine::TestImgEngine() {} -TestImgEngine::~TestImgEngine() {} - - -APP_ERROR TestImgEngine::Init() -{ - strPort0_ = engineName_ + "_" + std::to_string(engineId_) + "_0"; - dataSourceConfig_ = MyYaml::GetIns()->GetDataSourceConfigById(engineId_); //获取摄像机参数 - - width_ = IMAGE_WIDTH, height_ = IMAGE_HEIGHT; - - LogInfo << "engineId_:" << engineId_ << " TestImgEngine Init ok"; - return APP_ERR_OK; -} - - -APP_ERROR TestImgEngine::DeInit() -{ - LogInfo << "engineId_:" << engineId_ << " TestImgEngine DeInit ok"; - return APP_ERR_OK; - -} - -//测试jpeg解码时打开,并修改相应的yaml配置引擎间通信 -#if 0 -APP_ERROR TestImgEngine::Process() -{ - int iRet = APP_ERR_OK; - uint64_t u64count_num = 0; - while (!isStop_) - { - // std::cout<<"Enter Read Image Thread "<<++u64count_num<<" Times!"<GetStringValue("jpeg_image_file_name"); - - //从本地文件读取jpg图像并构造jpeg数据 - void* pJPEGBuffer = nullptr; - - FILE *jpeg_fp; - jpeg_fp = fopen(jpeg_img_file_name.c_str(), "r"); - if (!jpeg_fp) - { - std::cerr<<"Can not open "< pJPEGFrameData = std::make_shared(); - - //组织数据,压入下一引擎 - pJPEGFrameData->iDataSource = engineId_; - pJPEGFrameData->iSize = pJPEGBuffer_Size; - pJPEGFrameData->pData.reset(pJPEGBuffer, [](void* data){if(data) {delete[] data; data = nullptr;}}); //智能指针管理内存 - // pJPEGFrameData->pData.reset(pJPEGBuffer, Deleter); //智能指针管理内存 - pJPEGFrameData->i64TimeStamp = MyUtils::getins()->GetCurrentTimeMillis(); - - #if 1 - iRet = outputQueMap_[strPort0_]->push(std::static_pointer_cast(pJPEGFrameData)); - if (iRet != APP_ERR_OK){ - LogError << "push the jpeg image data failed..."; - std::cerr<<"push the jpeg image data failed..."<GetStringValue("yuv420m_image_file_name"); - - //从本地文件读取yuv420m图像并构造yuv420m数据 - void* pYUV420MBuffer = nullptr; - - FILE *yuv420m_fp; - yuv420m_fp = fopen(yuv420m_img_file_name.c_str(), "rb"); - if (!yuv420m_fp) - { - std::cerr<<"Can not open "< -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include - -#include "EngineBase.h" -#include "EngineFactory.h" -#include "MyYaml.h" -#include "myutils.h" -#include "AppCommon.h" - -class TestImgEngine : public ai_matrix::EngineBase -{ -public: - TestImgEngine(); - ~TestImgEngine(); - - APP_ERROR Init() override; - APP_ERROR DeInit() override; - APP_ERROR Process() override; - -private: - ai_matrix::DataSourceConfig dataSourceConfig_; - std::string strPort0_; - unsigned int width_, height_; -}; - - -ENGINE_REGIST(TestImgEngine) - - -#endif //END OF _TEST_IMG_ENGINE_H \ No newline at end of file diff --git a/nvidia_ascend_engine/nvidia_engine/DataSourceEngine/VideoEngine.cpp b/nvidia_ascend_engine/nvidia_engine/DataSourceEngine/VideoEngine.cpp deleted file mode 100644 index bfc6db1..0000000 --- a/nvidia_ascend_engine/nvidia_engine/DataSourceEngine/VideoEngine.cpp +++ /dev/null @@ -1,109 +0,0 @@ -#include "VideoEngine.h" - -using namespace std; -using namespace cv; -using namespace ai_matrix; - -VideoEngine::VideoEngine() {} -VideoEngine::~VideoEngine() {} - - -APP_ERROR VideoEngine::Init() -{ - LogInfo << "engineId_:" << engineId_ << " VideoEngine Init start"; - - strPort0_ = engineName_ + "_" + std::to_string(engineId_) + "_0"; - dataSourceConfig_ = MyYaml::GetIns()->GetDataSourceConfigById(engineId_); //获取摄像机参数 - - width_ = IMAGE_WIDTH, height_ = IMAGE_HEIGHT; - - LogInfo << "engineId_:" << engineId_ << " VideoEngine Init ok"; - return APP_ERR_OK; -} - - -APP_ERROR VideoEngine::DeInit() -{ - LogInfo << "engineId_:" << engineId_ << " VideoEngine DeInit ok"; - return APP_ERR_OK; -} - -APP_ERROR VideoEngine::Process() -{ - int iRet = APP_ERR_OK; - uint64_t u64count_num = 0; - - // if (MyYaml::GetIns()->GetStringValue("gc_data_source") != "camera") - // { - // LogDebug << "engineId_:" << engineId_ << " gc_data_source no camera"; - // return iRet; - // } - - VideoCapture capture; - - /***************************************************************************************** - Gstream解码 - 硬件解码方式:1.nvv4l2decoder 2.omxh264dec - 使用nvv4l2decoder解码时enable-max-performance和enable-frame-type-reporting才可以使用 - enable-max-performance=1 开启最大效率模式 - enable-frame-type-reporting=1 使能帧数据汇报模式 - *****************************************************************************************/ - - //从摄像头RTSP拉流 - const std::string videoStreamAddress = std::string("rtspsrc location=") + dataSourceConfig_.strUrl.c_str() + " latency=10 ! \ - rtph264depay ! h264parse ! nvv4l2decoder enable-max-performance=1 enable-frame-type-reporting=1 ! nvvidconv ! video/x-raw, format=(string)BGRx ! videoconvert ! appsink"; - - // while(!capture.open(dataSourceConfig_.strUrl.c_str())){ - while(!capture.open(videoStreamAddress)){ - std::cerr<<"Opening video stream or file failed!!!" < pBGRFrameData = std::make_shared(); - - cv::Mat frame(frameH, frameW, CV_8UC3, pBGRBuffer); - - // clock_t start, end; - // start = clock(); - if(!capture.read(frame)) { - std::cerr << "no frame" << std::endl; - waitKey(); - } - // end = clock(); - // printf("read 1 frame time is %.8f ms\n", (double)(end-start)/CLOCKS_PER_SEC*1000); - - //压入OpenCV RTSP所拉的H264解码BRG后的数据 - //组织数据 - pBGRFrameData->iDataSource = engineId_; - pBGRFrameData->iSize = pBGRBuffer_Size; - pBGRFrameData->pData.reset(pBGRBuffer, [](void* data){if(data) {delete[] data; data = nullptr;}}); //智能指针管理内存 - // pBGRFrameData->pData.reset(pBGRBuffer, Deleter); //智能指针管理内存 - pBGRFrameData->i64TimeStamp = MyUtils::getins()->GetCurrentTimeMillis(); - - iRet = outputQueMap_[strPort0_]->push(std::static_pointer_cast(pBGRFrameData)); - if (iRet != APP_ERR_OK){ - LogError << "push the bgr frame data failed..."; - std::cerr<<"push the bgr frame data failed..."< -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include - -#include -#include -#include -#include - -#ifdef __cplusplus -extern "C" -{ -#endif -#include -#include -#include -#ifdef __cplusplus -}; -#endif - -#include "EngineBase.h" -#include "EngineFactory.h" -#include "MyYaml.h" -#include "myutils.h" -#include "AppCommon.h" - -#define RTSP_PULL_CAMERA_VIDEO_STREAM - -class VideoEngine : public ai_matrix::EngineBase -{ -public: - VideoEngine(); - ~VideoEngine(); - - APP_ERROR Init() override; - APP_ERROR DeInit() override; - APP_ERROR Process() override; - -private: - ai_matrix::DataSourceConfig dataSourceConfig_; - std::string strPort0_; - unsigned int width_, height_; -}; - -ENGINE_REGIST(VideoEngine) - -#endif //_VIDEO_ENGINE_H \ No newline at end of file diff --git a/nvidia_ascend_engine/nvidia_engine/DecodeEngine/HardH264FFmpegDecode.cpp b/nvidia_ascend_engine/nvidia_engine/DecodeEngine/HardH264FFmpegDecode.cpp index b01d69a..ac45241 100644 --- a/nvidia_ascend_engine/nvidia_engine/DecodeEngine/HardH264FFmpegDecode.cpp +++ b/nvidia_ascend_engine/nvidia_engine/DecodeEngine/HardH264FFmpegDecode.cpp @@ -2,54 +2,54 @@ using namespace std; -HardH264FFmpegDecode::HardH264FFmpegDecode() +HardH264FFmpegDecode::HardH264FFmpegDecode() { - ; + ; } -HardH264FFmpegDecode::~HardH264FFmpegDecode() +HardH264FFmpegDecode::~HardH264FFmpegDecode() { - ; + ; } int HardH264FFmpegDecode::HardH264FFmpegDecoderInit(unsigned int uiWidth, unsigned int uiHeight, unsigned int uiFrameRate) { - uiWidth_ = uiWidth; uiHeight_ = uiHeight; - uiFrameRate_ = uiFrameRate; - iFrameFinished_ = 0; + uiWidth_ = uiWidth; uiHeight_ = uiHeight; + uiFrameRate_ = uiFrameRate; + iFrameFinished_ = 0; - av_log_set_level(AV_LOG_ERROR); - - // AVCodecID codec_id = AV_CODEC_ID_H264; //解码H264 - // pCodec_ = avcodec_find_decoder(codec_id); //获取解码器 + av_log_set_level(AV_LOG_ERROR); - pCodec_ = avcodec_find_decoder_by_name(NVIDIA_H264_DECODER); + // AVCodecID codec_id = AV_CODEC_ID_H264; //解码H264 + // pCodec_ = avcodec_find_decoder(codec_id); //获取解码器 + + pCodec_ = avcodec_find_decoder_by_name(NVIDIA_H264_DECODER); if (!pCodec_) { fprintf(stderr, "Codec '%s' not found\n", pCodec_->long_name); exit(1); } - printf("Codec found with name %d(%s)\n", pCodec_->id, pCodec_->long_name); + printf("Codec found with name %d(%s)\n", pCodec_->id, pCodec_->long_name); - //创建上下文 - pCodecCtx_ = avcodec_alloc_context3(pCodec_); + //创建上下文 + pCodecCtx_ = avcodec_alloc_context3(pCodec_); if (!pCodecCtx_){ fprintf(stderr, "Could not allocate video codec context\n"); exit(1); } - - //创建解析器 - pCodecParserCtx_ = av_parser_init(pCodec_->id); - if (!pCodecParserCtx_){ + + //创建解析器 + pCodecParserCtx_ = av_parser_init(pCodec_->id); + if (!pCodecParserCtx_){ fprintf(stderr, "parser not found\n"); exit(1); - } - + } + //if(pCodec_->capabilities&CODEC_CAP_TRUNCATED) - // pCodecCtx_->flags|= CODEC_FLAG_TRUNCATED; - - //打开解码器 + // pCodecCtx_->flags|= CODEC_FLAG_TRUNCATED; + + //打开解码器 int ret = avcodec_open2(pCodecCtx_, pCodec_, nullptr); - if (ret < 0) { + if (ret < 0) { fprintf(stderr, "Could not open codec\n"); printf("avcodec_open2 ret is: %d\n",ret); exit(1); @@ -63,7 +63,7 @@ int HardH264FFmpegDecode::HardH264FFmpegDecoderInit(unsigned int uiWidth, unsign } // av_init_packet(pPacket_); - //分配frame + //分配frame pSrcFrame_ = av_frame_alloc(); if (!pSrcFrame_) { fprintf(stderr, "Could not allocate video src pFrame\n"); @@ -78,14 +78,14 @@ int HardH264FFmpegDecode::HardH264FFmpegDecoderInit(unsigned int uiWidth, unsign printf("after align down video_width: %d, video_height: %d\n", uiWidth_, uiHeight_); - //初始化解析器参数 - pCodecCtx_->time_base.num = 1; - pCodecCtx_->frame_number = 1; //每包一个视频帧 - pCodecCtx_->codec_type = AVMEDIA_TYPE_VIDEO; - pCodecCtx_->bit_rate = 0; - pCodecCtx_->time_base.den = uiFrameRate_;//帧率 - pCodecCtx_->width = uiWidth_; //视频宽 - pCodecCtx_->height = uiHeight_; //视频高 + //初始化解析器参数 + pCodecCtx_->time_base.num = 1; + pCodecCtx_->frame_number = 1; //每包一个视频帧 + pCodecCtx_->codec_type = AVMEDIA_TYPE_VIDEO; + pCodecCtx_->bit_rate = 0; + pCodecCtx_->time_base.den = uiFrameRate_;//帧率 + pCodecCtx_->width = uiWidth_; //视频宽 + pCodecCtx_->height = uiHeight_; //视频高 // pCodecCtx_->pix_fmt = AV_PIX_FMT_YUV420P; int bufferSize = av_image_get_buffer_size(AV_PIX_FMT_YUV420P, @@ -102,46 +102,46 @@ int HardH264FFmpegDecode::HardH264FFmpegDecoderInit(unsigned int uiWidth, unsign printf("pDstFrame_->linesize: %d, bufferSize: %d\n", pDstFrame_->linesize, bufferSize); pSwsContext_ = sws_getContext(pCodecCtx_->width, pCodecCtx_->height, pCodecCtx_->pix_fmt, - pCodecCtx_->width, pCodecCtx_->height, AV_PIX_FMT_YUV420P, SWS_BICUBIC, nullptr, nullptr, nullptr); + pCodecCtx_->width, pCodecCtx_->height, AV_PIX_FMT_YUV420P, SWS_BICUBIC, nullptr, nullptr, nullptr); printf("pCodecCtx_->width: %d, pCodecCtx_->height: %d, pCodecCtx_->pix_fmt: %d\n", pCodecCtx_->width, pCodecCtx_->height, pCodecCtx_->pix_fmt); - return 0; + return 0; } int HardH264FFmpegDecode::HardH264FFmpegDecoderDeInit() { - if(pu8OutBuffer_){ + if(pu8OutBuffer_){ av_free(pu8OutBuffer_); pu8OutBuffer_ = nullptr; } - if(pSrcFrame_){ - av_frame_free(&pSrcFrame_); - pSrcFrame_ = nullptr; - } - if(pDstFrame_){ - av_frame_free(&pDstFrame_); - pDstFrame_ = nullptr; - } - if(pPacket_){ - av_packet_free(&pPacket_); + if(pSrcFrame_){ + av_frame_free(&pSrcFrame_); + pSrcFrame_ = nullptr; + } + if(pDstFrame_){ + av_frame_free(&pDstFrame_); + pDstFrame_ = nullptr; + } + if(pPacket_){ + av_packet_free(&pPacket_); pPacket_ = nullptr; } - if(pCodecParserCtx_){ - av_parser_close(pCodecParserCtx_); - pCodecParserCtx_ = nullptr; - } - if(pCodecCtx_){ - avcodec_close(pCodecCtx_); - av_free(pCodecCtx_); - pCodecCtx_ = nullptr; - } + if(pCodecParserCtx_){ + av_parser_close(pCodecParserCtx_); + pCodecParserCtx_ = nullptr; + } + if(pCodecCtx_){ + avcodec_close(pCodecCtx_); + av_free(pCodecCtx_); + pCodecCtx_ = nullptr; + } - if(pSwsContext_){ - sws_freeContext(pSwsContext_); - pSwsContext_ = nullptr; - } + if(pSwsContext_){ + sws_freeContext(pSwsContext_); + pSwsContext_ = nullptr; + } } int HardH264FFmpegDecode::HardH264FFmpegDecoderFilterGraph(AVFilterGraph *pGraph, AVFilterContext *pSourceCtx, AVFilterContext *pSinkCtx) @@ -149,7 +149,7 @@ int HardH264FFmpegDecode::HardH264FFmpegDecoderFilterGraph(AVFilterGraph *pGraph int ret; AVFilterInOut *pOutputs = nullptr, *pInputs = nullptr; if ((ret = avfilter_link(pSourceCtx, 0, pSinkCtx, 0)) >= 0){ - ret = avfilter_graph_config(pGraph, nullptr); + ret = avfilter_graph_config(pGraph, nullptr); } avfilter_inout_free(&pOutputs); @@ -168,14 +168,14 @@ int HardH264FFmpegDecode::HardH264FFmpegDecoderConfigureVideoFilters(AVFilterGra "video_size=%dx%d:pix_fmt=%d:time_base=1/1200000", iWidth, iHeight, iFormat); if ((ret = avfilter_graph_create_filter(&pFiltSrc, - avfilter_get_by_name("buffer"), "ffplay_buffer", BufferSrcArgs, - nullptr, pGraph)) < 0){ + avfilter_get_by_name("buffer"), "ffplay_buffer", BufferSrcArgs, + nullptr, pGraph)) < 0){ goto fail; } ret = avfilter_graph_create_filter(&pFiltDst, - avfilter_get_by_name("buffersink"), - "ffplay_buffersink", nullptr, nullptr, pGraph); + avfilter_get_by_name("buffersink"), + "ffplay_buffersink", nullptr, nullptr, pGraph); if (ret < 0){ goto fail; } @@ -190,14 +190,14 @@ int HardH264FFmpegDecode::HardH264FFmpegDecoderConfigureVideoFilters(AVFilterGra pDecoderFilterIn = pFiltSrc; pDecoderFilterOut = pFiltDst; -fail: + fail: return ret; } int HardH264FFmpegDecode::HardH264FFmpegDecoder(AVCodecContext *pDecCtx, AVFrame *pFrame, AVPacket *pPkt, void* pOutputData, unsigned int* puiOutputDataSize) { - int ret; - AVFilterGraph* pDecoderGraph = nullptr; + int ret; + AVFilterGraph* pDecoderGraph = nullptr; ret = avcodec_send_packet(pDecCtx, pPkt); //接收packet解码 if (ret < 0) { @@ -208,7 +208,7 @@ int HardH264FFmpegDecode::HardH264FFmpegDecoder(AVCodecContext *pDecCtx, AVFrame while (ret >= 0) { ret = avcodec_receive_frame(pDecCtx, pFrame); //解码 if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF){ - fprintf(stderr, "During decoding eof\n"); + fprintf(stderr, "During decoding eof\n"); return -1; } else if (ret < 0) { @@ -219,35 +219,35 @@ int HardH264FFmpegDecode::HardH264FFmpegDecoder(AVCodecContext *pDecCtx, AVFrame //printf("saving frame %3d\n", pDecCtx->frame_number); fflush(stdout); - AVFilterContext *pDecoderFilterIn = nullptr, *pDecoderFilterOut = nullptr; + AVFilterContext *pDecoderFilterIn = nullptr, *pDecoderFilterOut = nullptr; // pFrame->width = ALIGN_DOWN(pFrame->width, 32); // pFrame->height = ALIGN_DOWN(pFrame->height, 32); // printf("pFrame->width: %d\tpFrame->height: %d\n", pFrame->width, pFrame->height); - + pDecoderGraph = avfilter_graph_alloc(); HardH264FFmpegDecoderConfigureVideoFilters(pDecoderGraph, pDecoderFilterIn, pDecoderFilterOut, pFrame->width, pFrame->height, pFrame->format); - if (pFrame->format != AV_PIX_FMT_YUV420P){ + if (pFrame->format != AV_PIX_FMT_YUV420P){ DUMP_FRAME(pFrame); - ret = av_buffersrc_add_frame(pDecoderFilterIn, pFrame); + ret = av_buffersrc_add_frame(pDecoderFilterIn, pFrame); ret = av_buffersink_get_frame_flags(pDecoderFilterOut, pFrame, 0); DUMP_FRAME(pFrame); - - int iSize = pFrame->width * pFrame->height; - memcpy(pOutputData, pFrame->data[0], iSize); //Y - memcpy(pOutputData+iSize, pFrame->data[1], iSize/4); //U - memcpy(pOutputData+iSize+iSize/4, pFrame->data[2], iSize/4); //V - *puiOutputDataSize = iSize*3/2; - return iSize*3/2; - } - } - return 0; + + int iSize = pFrame->width * pFrame->height; + memcpy(pOutputData, pFrame->data[0], iSize); //Y + memcpy(pOutputData+iSize, pFrame->data[1], iSize/4); //U + memcpy(pOutputData+iSize+iSize/4, pFrame->data[2], iSize/4); //V + *puiOutputDataSize = iSize*3/2; + return iSize*3/2; + } + } + return 0; } int HardH264FFmpegDecode::HardH264FFmpegDecoderV2(AVCodecContext *pDecCtx, SwsContext *pSwsCtx, AVFrame *pSrcFrame, AVFrame *pDstFrame, AVPacket *pPkt, void* pOutputData,unsigned int* puiOutputDataSize) { - int ret; + int ret; ret = avcodec_send_packet(pDecCtx, pPkt); //接收packet解码 if (ret < 0) { @@ -258,7 +258,7 @@ int HardH264FFmpegDecode::HardH264FFmpegDecoderV2(AVCodecContext *pDecCtx, SwsCo while (ret >= 0) { ret = avcodec_receive_frame(pDecCtx, pSrcFrame); //解码 if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF){ - fprintf(stderr, "During decoding eof\n"); + fprintf(stderr, "During decoding eof\n"); return -1; } else if (ret < 0) { @@ -266,7 +266,7 @@ int HardH264FFmpegDecode::HardH264FFmpegDecoderV2(AVCodecContext *pDecCtx, SwsCo exit(1); } - // pDecCtx->width = ALIGN_DOWN(pDecCtx->width, 32); + // pDecCtx->width = ALIGN_DOWN(pDecCtx->width, 32); // pDecCtx->height = ALIGN_DOWN(pDecCtx->height, 32); sws_scale(pSwsCtx, @@ -280,13 +280,13 @@ int HardH264FFmpegDecode::HardH264FFmpegDecoderV2(AVCodecContext *pDecCtx, SwsCo //printf("saving frame %3d\n", pDecCtx->frame_number); fflush(stdout); - int iSize = pDecCtx->width * pDecCtx->height; + int iSize = pDecCtx->width * pDecCtx->height; - memcpy(pOutputData, pDstFrame->data[0], iSize); //Y - memcpy(pOutputData+iSize, pDstFrame->data[1], iSize/4); //U - memcpy(pOutputData+iSize+iSize/4, pDstFrame->data[2], iSize/4); //V - *puiOutputDataSize = iSize*3/2; - return iSize*3/2; - } - return 0; + memcpy(pOutputData, pDstFrame->data[0], iSize); //Y + memcpy(pOutputData+iSize, pDstFrame->data[1], iSize/4); //U + memcpy(pOutputData+iSize+iSize/4, pDstFrame->data[2], iSize/4); //V + *puiOutputDataSize = iSize*3/2; + return iSize*3/2; + } + return 0; } \ No newline at end of file diff --git a/nvidia_ascend_engine/nvidia_engine/DecodeEngine/HardH264FFmpegDecode.h b/nvidia_ascend_engine/nvidia_engine/DecodeEngine/HardH264FFmpegDecode.h index 39f5085..7f27ac9 100644 --- a/nvidia_ascend_engine/nvidia_engine/DecodeEngine/HardH264FFmpegDecode.h +++ b/nvidia_ascend_engine/nvidia_engine/DecodeEngine/HardH264FFmpegDecode.h @@ -56,7 +56,7 @@ extern "C" frame->linesize[2] \ );} -#define NVIDIA_H264_DECODER "h264_cuvid" +#define NVIDIA_H264_DECODER "h264_cuvid" // #define NVIDIA_H264_DECODER "h264_v4l2m2m" class HardH264FFmpegDecode @@ -69,21 +69,21 @@ public: int HardH264FFmpegDecoderDeInit(); int HardH264FFmpegDecoder(AVCodecContext *pDecCtx, AVFrame *pFrame, AVPacket *pPkt, void* pOutputData, unsigned int* puiOutputDataSize); int HardH264FFmpegDecoderV2(AVCodecContext *pDecCtx, SwsContext *pSwsCtx, AVFrame *pSrcFrame, AVFrame *pDstFrame, AVPacket *pPkt, void* pOutputData, unsigned int* puiOutputDataSize); - + const AVCodec *pCodec_ = nullptr; //解码器 AVCodecContext *pCodecCtx_ = nullptr; //上下文 - AVCodecParserContext *pCodecParserCtx_ = nullptr; //解析器上下文 - AVFrame *pSrcFrame_ = nullptr; - AVFrame *pDstFrame_ = nullptr; - AVPacket *pPacket_ = nullptr; - SwsContext *pSwsContext_ = nullptr; + AVCodecParserContext *pCodecParserCtx_ = nullptr; //解析器上下文 + AVFrame *pSrcFrame_ = nullptr; + AVFrame *pDstFrame_ = nullptr; + AVPacket *pPacket_ = nullptr; + SwsContext *pSwsContext_ = nullptr; - uint8_t *pu8OutBuffer_ = nullptr; + uint8_t *pu8OutBuffer_ = nullptr; private: int HardH264FFmpegDecoderFilterGraph(AVFilterGraph *pGraph, AVFilterContext *pSourceCtx, AVFilterContext *pSinkCtx); int HardH264FFmpegDecoderConfigureVideoFilters(AVFilterGraph *pGraph, AVFilterContext* &pDecoderFilterIn, AVFilterContext* &pDecoderFilterOut, const int iWidth, const int iHeight, const int iFormat); - + unsigned int uiWidth_, uiHeight_; int iFrameFinished_; diff --git a/nvidia_ascend_engine/nvidia_engine/DecodeEngine/VideoDecodeEngine.cpp b/nvidia_ascend_engine/nvidia_engine/DecodeEngine/VideoDecodeEngine.cpp index 79c3bf5..d2fe2b2 100644 --- a/nvidia_ascend_engine/nvidia_engine/DecodeEngine/VideoDecodeEngine.cpp +++ b/nvidia_ascend_engine/nvidia_engine/DecodeEngine/VideoDecodeEngine.cpp @@ -56,7 +56,7 @@ APP_ERROR VideoDecodeEngine::Process() } int iRet = APP_ERR_OK; - int iSkipCount = 1; + int iSkipCount = 1; int iNoCameraDataCnt = 0; while (!isStop_) { @@ -67,19 +67,19 @@ APP_ERROR VideoDecodeEngine::Process() { usleep(10*1000); //10ms - iNoCameraDataCnt++; - if (iNoCameraDataCnt >= 1000) //10秒内收不到,认为相机断开 - { - LogError << "engineId:" << engineId_ << " 超过10秒获取到摄像头数据,疑似摄像头断开。计数:" << iNoCameraDataCnt; - iNoCameraDataCnt = 0; - //camera异常时,构造空的解码数据push,确保一直有数据流转到后面Engine - std::shared_ptr pProcessData = std::make_shared(); - pProcessData->iDataSource = engineId_; - pProcessData->i64TimeStamp = MyUtils::getins()->GetCurrentTimeMillis(); - pProcessData->iSize = 0; - pProcessData->pData = nullptr; - iRet = outputQueMap_[strPort0_]->push(std::static_pointer_cast(pProcessData)); - } + // iNoCameraDataCnt++; + // if (iNoCameraDataCnt >= 1000) //10秒内收不到,认为相机断开 + // { + // LogError << "engineId:" << engineId_ << " 超过10秒获取到摄像头数据,疑似摄像头断开。计数:" << iNoCameraDataCnt; + // iNoCameraDataCnt = 0; + // //camera异常时,构造空的解码数据push,确保一直有数据流转到后面Engine + // std::shared_ptr pProcessData = std::make_shared(); + // pProcessData->iDataSource = engineId_; + // pProcessData->i64TimeStamp = MyUtils::getins()->GetCurrentTimeMillis(); + // pProcessData->iSize = 0; + // pProcessData->pData = nullptr; + // iRet = outputQueMap_[strPort0_]->push(std::static_pointer_cast(pProcessData)); + // } continue; } @@ -111,8 +111,8 @@ APP_ERROR VideoDecodeEngine::Process() std::shared_ptr pYUVData; pYUVData.reset(pYUV420MBuffer, [](void *data){if(data) {delete[] data; data = nullptr;}}); //智能指针管理内存 - hard_h264_ffmpeg_decoder_->pPacket_->data = static_cast(pProcessData->pData.get()); //这里填入一个指向完整H264数据帧的指针 - hard_h264_ffmpeg_decoder_->pPacket_->size = pProcessData->iSize; //这个填入H264数据帧的大小 + hard_h264_ffmpeg_decoder_->pPacket_->data = static_cast(pProcessData->pData.get()); //这里填入一个指向完整H264数据帧的指针 + hard_h264_ffmpeg_decoder_->pPacket_->size = pProcessData->iSize; //这个填入H264数据帧的大小 // H264硬件解码 // int iDecodeRet= hard_h264_ffmpeg_decoder_->HardH264FFmpegDecoderV2(hard_h264_ffmpeg_decoder_->pCodecCtx_, hard_h264_ffmpeg_decoder_->pFrame_, diff --git a/nvidia_ascend_engine/nvidia_engine/MoveEngine/MoveEngine.cpp b/nvidia_ascend_engine/nvidia_engine/MoveEngine/MoveEngine.cpp index 272c398..8aa1571 100644 --- a/nvidia_ascend_engine/nvidia_engine/MoveEngine/MoveEngine.cpp +++ b/nvidia_ascend_engine/nvidia_engine/MoveEngine/MoveEngine.cpp @@ -51,7 +51,7 @@ APP_ERROR MoveEngine::Init() } InitParam(); - + LogInfo << "MoveEngine Init ok"; return APP_ERR_OK; } @@ -111,13 +111,8 @@ APP_ERROR MoveEngine::ReadModelInfo() model_width = jvModelInfo["model_width"].asInt(); model_height = jvModelInfo["model_height"].asInt(); - //clear_num = jvModelInfo["clear"].isArray() ? jvModelInfo["clear"].size() : 0; - //class_num = jvModelInfo["class"].isArray() ? jvModelInfo["class"].size() : 0; input_size = GET_INPUT_SIZE(model_width , model_height); output_size = GET_OUTPUT_SIZE(model_width , model_height, clear_num , class_num); - // det_size = clear_num + class_num + 5; - // score_threshold = modelConfig_.fScoreThreshold; - // nms_threshold = modelConfig_.fNMSTreshold; return APP_ERR_OK; } @@ -146,8 +141,8 @@ void MoveEngine::InitParam() } void MoveEngine::sendComeTrain() { - // std::string message = "{\"cometime\":" + this->strTrainDate_ + " " + this->strTrainName_ + "\",\"type\":\"1\"}"; - // outputQueMap_[engineName_ + "_" + std::to_string(engineId_) + "_1"]->push(std::static_pointer_cast(std::make_shared(message))); + std::string message = "{\"cometime\":\"" + this->strTrainDate_ + " " + this->strTrainName_ + "\",\"type\":\"1\"}"; + outputQueMap_[engineName_ + "_" + std::to_string(engineId_) + "_1"]->push(std::static_pointer_cast(std::make_shared(message))); } void MoveEngine::sendEndTrain() { @@ -197,7 +192,7 @@ void MoveEngine::SingleDeviceProcess(std::shared_ptr pProcessData, { outputQueMap_[engineName_ + "_" + std::to_string(engineId_) + "_" + std::to_string(*iter)]->push(std::static_pointer_cast(pMoveData)); } - //通知第一步开始识别 + //通知第一步开始识别 outputQueMap_[engineName_ + "_" + std::to_string(engineId_) + "_5"]->push(std::static_pointer_cast(pMoveData)); } @@ -274,6 +269,7 @@ APP_ERROR MoveEngine::Process() memset(fReturnVal, 0x00, sizeof(fReturnVal)); yolov8model.YoloV8InferenceModelGetType(img, fReturnVal, STEP0_OUTPUT_ARRAY * sizeof(float)); + // exit(0); float fScore = 0.0f; for(int n = 0; n < 4; n++){ @@ -282,7 +278,7 @@ APP_ERROR MoveEngine::Process() nType = n; } } - LogDebug <<"模型得分 车头:"<< fReturnVal[0]<<" 无车:"<< fReturnVal[1]<<" 车尾:"<< fReturnVal[2]<<" 有车:"<< fReturnVal[3]; +// LogDebug <<"模型得分 车头:"<< fReturnVal[0]<<" 无车:"<< fReturnVal[1]<<" 车尾:"<< fReturnVal[2]<<" 有车:"<< fReturnVal[3]; // LogInfo<<"来车当前状态:"<< (nType == 0 ? "有车头" : (nType == 1 ? "无车")); switch (nType) { case 0: @@ -315,7 +311,7 @@ APP_ERROR MoveEngine::Process() if (bGetTrainExist == true) { iHasTrainNum_ = iHasTrainNum_ > 20 ? iHasTrainNum_ : iHasTrainNum_ + 1; - if (iHasTrainNum_ > 0) LogDebug << "当前有车, 计数:" << iHasTrainNum_; +// if (iHasTrainNum_ > 0) LogDebug << "当前有车, 计数:" << iHasTrainNum_; } else { @@ -334,33 +330,33 @@ APP_ERROR MoveEngine::Process() { queProcessData_.push(pProcessData); - LogDebug << "iStepInter_: " << iStepInter_ << " queSize:" << queProcessData_.size() << " continue"; + LogDebug << "iStepInter_: " << iStepInter_ << " queSize:" << queProcessData_.size() << " continue"; continue; } - // if (iStepInter_ != 1) this->sendComeTrain(); + if (iStepInter_ != 1) this->sendComeTrain(); iStepInter_ = 1; - + } - //无车停止识别 + //无车停止识别 else { if (iStepInter_ == 1) { iStepInter_ = 2; + this->sendEndTrain(); } while (!queProcessData_.empty()) { - LogDebug << "while iStepInter_: " << iStepInter_ << " queSize:" << queProcessData_.size(); + LogDebug << "while iStepInter_: " << iStepInter_ << " queSize:" << queProcessData_.size(); queProcessData_.pop(); } - this->sendEndTrain(); } //有车识别处理 if (iStepInter_ != 0) { while (!queProcessData_.empty()) { - LogDebug << "while2 iStepInter_: " << iStepInter_ << " queSize:" << queProcessData_.size(); + LogDebug << "while2 iStepInter_: " << iStepInter_ << " queSize:" << queProcessData_.size(); std::shared_ptr pProcessDataTemp = queProcessData_.front(); queProcessData_.pop(); pProcessDataTemp->iStatus = TRAINSTATUS_RUN; @@ -372,7 +368,7 @@ APP_ERROR MoveEngine::Process() pProcessData->bIsEnd = ((iStepInter_ == 2) ? true : false); //动态检测无车,设置列车结束标识 SingleDeviceProcess(pProcessData, nType); - + if (iStepInter_ == 2) { // this->sendEndTrain(); diff --git a/nvidia_ascend_engine/nvidia_engine/TrainStepOneEngine/TrainStepOneEngine.cpp b/nvidia_ascend_engine/nvidia_engine/TrainStepOneEngine/TrainStepOneEngine.cpp index 6837ca9..cfaee5a 100644 --- a/nvidia_ascend_engine/nvidia_engine/TrainStepOneEngine/TrainStepOneEngine.cpp +++ b/nvidia_ascend_engine/nvidia_engine/TrainStepOneEngine/TrainStepOneEngine.cpp @@ -14,7 +14,7 @@ APP_ERROR TrainStepOneEngine::Init() bUseEngine_ = MyUtils::getins()->ChkIsHaveTarget("NUM"); if (!bUseEngine_) { - LogWarn << "engineId_:" << engineId_ << " not use engine"; + LogInfo << "engineId_:" << engineId_ << " not use engine"; return APP_ERR_OK; } @@ -98,7 +98,7 @@ APP_ERROR TrainStepOneEngine::InitModel() int nRet = yolov5model.YoloV5ClearityInferenceInit(&modelinfo, strModelName, modelConfig_.strOmPath); if (nRet != 0) { - LogInfo << "YoloV5ClassifyInferenceInit nRet:" << nRet; + LogError << "YoloV5ClassifyInferenceInit nRet:" << nRet; return APP_ERR_COMM_READ_FAIL; } return APP_ERR_OK; @@ -147,7 +147,7 @@ APP_ERROR TrainStepOneEngine::DeInit() { if (!bUseEngine_) { - LogWarn << "engineId_:" << engineId_ << " not use engine"; + LogInfo << "engineId_:" << engineId_ << " not use engine"; return APP_ERR_OK; } @@ -170,7 +170,7 @@ void TrainStepOneEngine::PushData(const std::string &strPort, const std::shared_ int iRet = outputQueMap_[strPort]->push(std::static_pointer_cast(pProcessData)); if (iRet != 0) { - LogDebug << "sourceid:" << pProcessData->iDataSource << " frameid:" << pProcessData->iFrameId << " push fail iRet:" << iRet; + LogError << " frameid:" << pProcessData->iFrameId << " push fail iRet:" << iRet; if (iRet == 2) { usleep(10000); // 10ms @@ -194,7 +194,7 @@ void TrainStepOneEngine::FilterInvalidInfo(std::vector &vecRet, std std::vector vecSpaceInfo; for (auto it = vecRet.begin(); it != vecRet.end();) { - LogDebug << "sourceid:" << pProcessData->iDataSource << " frameId:" << pProcessData->iFrameId + LogDebug << "frameId:" << pProcessData->iFrameId << " bigclassid:" << it->class_id << " ltx:" << it->bbox[0] << " lty:" << it->bbox[1] << " rbx:" << it->bbox[2] << " rby:" << it->bbox[3]; // 根据配置文件中 设置的识别范围,过滤掉无效数据 @@ -203,49 +203,75 @@ void TrainStepOneEngine::FilterInvalidInfo(std::vector &vecRet, std it->bbox[2] <= dataSourceCfg.fIdentifyAreasRBX && it->bbox[3] <= dataSourceCfg.fIdentifyAreasRBY)) { - LogWarn << "sourceid:" << pProcessData->iDataSource << " frameId:" << pProcessData->iFrameId - << " bigclassid:" << it->class_id << " invalid areas"; + LogDebug << "frameId:" << pProcessData->iFrameId + << " bigclassid:" << it->class_id << " 超出识别区域-识别区域:(" + << dataSourceCfg.fIdentifyAreasLTX << "," << dataSourceCfg.fIdentifyAreasLTY << "),(" + << dataSourceCfg.fIdentifyAreasRBX << "," << dataSourceCfg.fIdentifyAreasRBY << ")"; it = vecRet.erase(it); continue; } - // 去除车头车尾的间隔信息 + // 如果设置了不识别车头,则去掉车头标记的大框 + if (!MyYaml::GetIns()->GetBoolValue("gc_train_heard_detect") && it->class_id == TRAIN_HEAD) + { + LogDebug << "frameId:" << pProcessData->iFrameId << " 过滤掉车头编号"; + it = vecRet.erase(it); + continue; + } + + // 去除车头时的非车头编号信息 if(pProcessData->nMonitorState == MONITOR_MODEL_TRAIN_HEAD ) { - LogWarn<<" pProcessData->nMonitorState:" << pProcessData->nMonitorState; - if(it->class_id != TRAIN_HEAD) + if(it->class_id != TRAIN_HEAD) { - LogError << "sourceid:" << pProcessData->iDataSource << " frameId:" << pProcessData->iFrameId - << " bigclassid:" << it->class_id << " pProcessData->nMonitorState:" << pProcessData->nMonitorState - << " invalid"; + LogDebug << " 帧号:" << pProcessData->iFrameId + << " 大类:" << it->class_id << " 识别于车头位置,无效!"; it = vecRet.erase(it); continue; } } - if (pProcessData->nMonitorState == MONITOR_MODEL_TRAIN_TAIL ) + + // 去除车尾的车头编号信息 + if (pProcessData->nMonitorState != MONITOR_MODEL_TRAIN_HEAD) { - LogWarn<<" pProcessData->nMonitorState:" << pProcessData->nMonitorState; - /*if( - (it->class_id <= U_TRAIN_SPACE) - && (it->class_id >= C_TRAIN_SPACE) - && (it->class_id != W_TRAIN_NUM) - )*/ + if (it->class_id == TRAIN_HEAD) { - LogError << "sourceid:" << pProcessData->iDataSource << " frameId:" << pProcessData->iFrameId - << " bigclassid:" << it->class_id << " pProcessData->nMonitorState:" << pProcessData->nMonitorState - << " invalid"; + LogDebug << " 帧号:" << pProcessData->iFrameId + << " 大类:" << it->class_id << " 识别于非车头位置,无效!"; it = vecRet.erase(it); continue; } } + + // 去除车尾的间隔信息 + if (pProcessData->nMonitorState == MONITOR_MODEL_TRAIN_TAIL + && ((it->class_id >= 9 && it->class_id <= 17 && it->class_id != 15) || it->class_id == 18)) + { + LogDebug << " frameId:" << pProcessData->iFrameId + << " bigclassid:" << it->class_id + <<" 识别于车尾部分,无效!"; + it = vecRet.erase(it); + continue; + } + + // 过滤掉识别于模型反馈无车状态下的所有大框信息 + if (pProcessData->nMonitorState == MONITOR_MODEL_NO_TRAIN) + { + LogDebug << " frameId:" << pProcessData->iFrameId + << " bigclassid:" << it->class_id + <<" 识别于模型反馈的无车状态下,无效!"; + it = vecRet.erase(it); + continue; + } + // 按大框高度剔除远股道识别的信息 int iClassHeight = it->bbox[3] - it->bbox[1]; if (dataSourceCfg.mapClassMinH.find(it->class_id) != dataSourceCfg.mapClassMinH.end() && iClassHeight < dataSourceCfg.mapClassMinH[it->class_id]) { - LogError << "sourceid:" << pProcessData->iDataSource << " frameId:" << pProcessData->iFrameId + LogDebug << " frameId:" << pProcessData->iFrameId << " bigclassid:" << it->class_id << " iClassHeight:" << iClassHeight - << " minH:" << dataSourceCfg.mapClassMinH[it->class_id] << " invalid hegiht"; + << " minH:" << dataSourceCfg.mapClassMinH[it->class_id] << " 过滤疑似远股道识别"; it = vecRet.erase(it); continue; } @@ -256,7 +282,7 @@ void TrainStepOneEngine::FilterInvalidInfo(std::vector &vecRet, std { if (it->class_id != 1 && it->class_id != 6) { - LogDebug << "sourceid:" << pProcessData->iDataSource << " frameId:" << pProcessData->iFrameId << " flat camera only deal 1 or 6"; + LogDebug << " frameId:" << pProcessData->iFrameId << " flat camera only deal 1 or 6"; it = vecRet.erase(it); continue; } @@ -266,13 +292,13 @@ void TrainStepOneEngine::FilterInvalidInfo(std::vector &vecRet, std if (((it->class_id >= 2 && it->class_id <= 6) || it->class_id == J_TRAIN_NUM || it->class_id == W_TRAIN_NUM) && (it->bbox[3] - it->bbox[1]) > (it->bbox[2] - it->bbox[0])) { - LogWarn << "sourceid:" << pProcessData->iDataSource << " frameId:" << pProcessData->iFrameId - << " bigclassid:" << it->class_id << " invalid data-- height > width "; + LogWarn << " frameId:" << pProcessData->iFrameId + << " bigclassid:" << it->class_id << " 过滤 高度大于宽度的车号"; it = vecRet.erase(it); continue; } if (((it->class_id >= 2 && it->class_id <= 6) || it->class_id == J_TRAIN_NUM || it->class_id == W_TRAIN_NUM) && - (it->bbox[3] - it->bbox[1]) < MyYaml::GetIns()->GetIntValue("gc_num_frame_height")) + (it->bbox[3] - it->bbox[1]) < MyYaml::GetIns()->GetIntValue("gc_num_frame_height")) { LogWarn << "疑似误识别到远股道车号,帧号:" << pProcessData->iFrameId << "大框高度:" << (it->bbox[3] - it->bbox[1]); @@ -281,7 +307,7 @@ void TrainStepOneEngine::FilterInvalidInfo(std::vector &vecRet, std } if ((it->class_id == 1 || it->class_id == TRAIN_PRO) - && (it->bbox[3] - it->bbox[1]) < MyYaml::GetIns()->GetIntValue("gc_pro_frame_height")) { + && (it->bbox[3] - it->bbox[1]) < MyYaml::GetIns()->GetIntValue("gc_pro_frame_height")) { LogWarn << "疑似误识别到远股道属性,帧号:" << pProcessData->iFrameId << "大框高度:" << (it->bbox[3] - it->bbox[1]); it = vecRet.erase(it); @@ -301,8 +327,8 @@ void TrainStepOneEngine::FilterInvalidInfo(std::vector &vecRet, std { if (it->bbox[3] < (pProcessData->iHeight * iSpaceMinRBXPer_ / 100)) { - LogWarn << "sourceid:" << pProcessData->iDataSource << " frameId:" << pProcessData->iFrameId - << " bigclassid:" << it->class_id << " spaceinfo invalid fRBY:" << it->bbox[3]; + LogWarn << " frameId:" << pProcessData->iFrameId + << " bigclassid:" << it->class_id << " 过滤间隔过于靠下的间隔信息 fRBY:" << it->bbox[3]; it = vecRet.erase(it); continue; } @@ -320,10 +346,10 @@ void TrainStepOneEngine::FilterInvalidInfo(std::vector &vecRet, std int iCenterY = pProcessData->iHeight / 2; if (iHeight0 < iCenterY && iHeight1 < iCenterY) //非平车 { - if (!((vecRet[0].class_id >= 9 && vecRet[0].class_id <= 17 && vecRet[0].class_id != 15) || vecRet[0].class_id == U_TRAIN_SPACE) && - !((vecRet[1].class_id >= 9 && vecRet[10].class_id <= 17 && vecRet[1].class_id != 15) || vecRet[1].class_id == U_TRAIN_SPACE)) + if (!((vecRet[0].class_id >= 9 && vecRet[0].class_id <= 17 && vecRet[0].class_id != 15) || vecRet[0].class_id == U_TRAIN_SPACE) && + !((vecRet[1].class_id >= 9 && vecRet[1].class_id <= 17 && vecRet[1].class_id != 15) || vecRet[1].class_id == U_TRAIN_SPACE)) { - LogError << "sourceid:" << pProcessData->iDataSource << " frameId:" << pProcessData->iFrameId << " no space"; + LogDebug << " frameId:" << pProcessData->iFrameId << " no space"; vecRet.clear(); } } @@ -387,7 +413,7 @@ APP_ERROR TrainStepOneEngine::Process() { if (!bUseEngine_) { - LogWarn << "engineId_:" << engineId_ << " not use engine"; + LogInfo << "engineId_:" << engineId_ << " not use engine"; return APP_ERR_OK; } int iRet = APP_ERR_OK; @@ -408,7 +434,7 @@ APP_ERROR TrainStepOneEngine::Process() pPostData->iModelType = MODELTYPE_NUM; pPostData->nMonitorState = pProcessData->nMonitorState; //来车检测的四个分类 - //获取图片 + //获取图片 if (pProcessData->iStatus == TRAINSTATUS_RUN || pProcessData->bIsEnd) { if (pProcessData->pData != nullptr && pProcessData->iSize != 0) @@ -417,10 +443,7 @@ APP_ERROR TrainStepOneEngine::Process() //进行推理 std::vector res; - //auto start = std::chrono::system_clock::now(); //计时开始 yolov5model.YoloV5ClearityInferenceModel(img, res); - //auto end = std::chrono::system_clock::now(); - //LogInfo << "nopr1 inference time: " << std::chrono::duration_cast(end - start).count() << "ms"; //过滤无效信息 FilterInvalidInfo(res, pProcessData); @@ -461,10 +484,10 @@ APP_ERROR TrainStepOneEngine::Process() SetTargetType(postSubData); pPostData->vecPostSubData.emplace_back(postSubData); - LogDebug << "sourceid:" << pProcessData->iDataSource << " frameId:" << pProcessData->iFrameId - << " --iClassId:" << singledata.iClassId << " iLine:" << singledata.iLine << " confidence=" << singledata.fScore - << " lx=" << singledata.fLTX << " ly=" << singledata.fLTY << " rx=" << singledata.fRBX << " ry=" << singledata.fRBY - << " clear:" << singledata.fClear; +// LogDebug << "数据源:" << pProcessData->iDataSource << " 帧:" << pProcessData->iFrameId +// << " --iClassId:" << singledata.iClassId << " iLine:" << singledata.iLine << " confidence=" << singledata.fScore +// << " lx=" << singledata.fLTX << " ly=" << singledata.fLTY << " rx=" << singledata.fRBX << " ry=" << singledata.fRBY +// << " clear:" << singledata.fClear; } } } diff --git a/nvidia_ascend_engine/nvidia_engine/TrainStepTwoEngine/TrainStepTwoEngine.cpp b/nvidia_ascend_engine/nvidia_engine/TrainStepTwoEngine/TrainStepTwoEngine.cpp index b62cb99..97775cc 100644 --- a/nvidia_ascend_engine/nvidia_engine/TrainStepTwoEngine/TrainStepTwoEngine.cpp +++ b/nvidia_ascend_engine/nvidia_engine/TrainStepTwoEngine/TrainStepTwoEngine.cpp @@ -193,7 +193,7 @@ APP_ERROR TrainStepTwoEngine::Process() auto start = std::chrono::system_clock::now(); // 计时开始 yolov5model.YoloV5ClearityInferenceModel(step2_image, res); auto end = std::chrono::system_clock::now(); - LogInfo << "nopr2 inference time: " << std::chrono::duration_cast(end - start).count() << "ms"; +// LogInfo << "nopr2 inference time: " << std::chrono::duration_cast(end - start).count() << "ms"; PostSubData postSubDataNew; postSubDataNew.iTargetType = postsubdata.iTargetType; @@ -221,9 +221,9 @@ APP_ERROR TrainStepTwoEngine::Process() postSubDataNew.vecSingleData.emplace_back(singledata); - LogDebug << "sourceid:" << pProcessData->iDataSource << " step2 after frameId:" << pProcessData->iFrameId - << " --iClassId:" << singledata.iClassId << " iLine:" << singledata.iLine << " confidence=" << singledata.fScore - << " lx=" << singledata.fLTX << " ly=" << singledata.fLTY << " rx=" << singledata.fRBX << " ry=" << singledata.fRBY; +// LogDebug << "sourceid:" << pProcessData->iDataSource << " step2 after frameId:" << pProcessData->iFrameId +// << " --iClassId:" << singledata.iClassId << " iLine:" << singledata.iLine << " confidence=" << singledata.fScore +// << " lx=" << singledata.fLTX << " ly=" << singledata.fLTY << " rx=" << singledata.fRBX << " ry=" << singledata.fRBY; } pPostData->vecPostSubData.emplace_back(postSubDataNew); } diff --git a/nvidia_ascend_tools/nvidia_tools/yolov5/src/preprocess.cu b/nvidia_ascend_tools/nvidia_tools/yolov5/src/preprocess.cu index 62561b0..a6289e1 100644 --- a/nvidia_ascend_tools/nvidia_tools/yolov5/src/preprocess.cu +++ b/nvidia_ascend_tools/nvidia_tools/yolov5/src/preprocess.cu @@ -464,12 +464,12 @@ void yolov5_preprocess_kernel_img( s2d.value[0] = scale; s2d.value[1] = 0; - s2d.value[2] = 0; //左上顶点贴图 - // s2d.value[2] = -scale * src_width * 0.5 + dst_width * 0.5; //中心贴图 +// s2d.value[2] = 0; //左上顶点贴图 + s2d.value[2] = -scale * src_width * 0.5 + dst_width * 0.5; //中心贴图 s2d.value[3] = 0; s2d.value[4] = scale; - s2d.value[5] = 0; //左上顶点贴图 - // s2d.value[5] = -scale * src_height * 0.5 + dst_height * 0.5; //中心贴图 +// s2d.value[5] = 0; //左上顶点贴图 + s2d.value[5] = -scale * src_height * 0.5 + dst_height * 0.5; //中心贴图 cv::Mat m2x3_s2d(2, 3, CV_32F, s2d.value); cv::Mat m2x3_d2s(2, 3, CV_32F, d2s.value);