初版ARM版

This commit is contained in:
Mr.V 2024-06-19 14:41:40 +08:00
parent 9fadfe6a37
commit d02a50a6c2
31 changed files with 725 additions and 648 deletions

View File

@ -1,11 +1,11 @@
cmake_minimum_required(VERSION 3.5)
cmake_policy(SET CMP0074 NEW)
# cmake_policy(SET CMP0074 NEW)
message("NVIDIA NX PLATFORM")
set(PROJECT_NAME train)
project(${PROJECT_NAME} VERSION 1.0)
add_definitions(-std=c++11)
add_definitions(-DAPI_EXPORTS)
@ -13,19 +13,22 @@ set(CMAKE_CXX_STANDARD 11)
#set(CMAKE_BUILD_TYPE Debug)
#set(CMAKE_BUILD_TYPE Release)
#add_definitions("-Wall -g")
find_package(CUDA REQUIRED)
#opencv
find_package(OpenCV REQUIRED)
# message(STATUS "${OpenCV_LIBS}")
# message(STATUS "${OpenCV_INCLUDE_DIRS}")
find_package(CUDA REQUIRED)
#
set(CMAKE_SYSTEM_NAME Linux)
set(CMAKE_C_COMPILER "gcc")
set(CMAKE_CXX_COMPILER "g++")
# arm
# set(CMAKE_SYSTEM_NAME Linux)
# set(CMAKE_SYSTEM_PROCESSOR aarch64)
# set(CMAKE_C_COMPILER "aarch64-linux-gnu-gcc")
# set(CMAKE_CXX_COMPILER "aarch64-linux-gnu-g++")
#
@ -41,21 +44,20 @@ set(SYS_USR_INCLUDE_DIR "/usr/include")
set(SYS_USR_LIB_DIR "/usr/lib")
set(SYS_USR_LOCAL_INCLUDE_DIR "/usr/local/include")
set(SYS_USR_LOCAL_LIB_DIR "/usr/local/lib")
# -- X86使 --
set(AARCH64_LINUX_INCLUDE_DIR "/usr/include/x86_64-linux-gnu")
set(AARCH64_LINUX_LIB_DIR "/usr/lib/x86_64-linux-gnu")
# -- ARM使 --
#set(AARCH64_LINUX_INCLUDE_DIR "/usr/include/aarch64-linux-gnu")
#set(AARCH64_LINUX_LIB_DIR "/usr/lib/aarch64-linux-gnu")
# set(AARCH64_LINUX_INCLUDE_DIR "/usr/include/x86_64-linux-gnu")
# set(AARCH64_LINUX_LIB_DIR "/usr/lib/x86_64-linux-gnu")
set(AARCH64_LINUX_INCLUDE_DIR "/usr/include/aarch64-linux-gnu")
set(AARCH64_LINUX_LIB_DIR "/usr/lib/aarch64-linux-gnu")
#opencv3.2.0/usr/lib/aarch64-linux-gnu /usr/include/opencv2
#opencv4.5.5/usr/local/lib /usr/local/include/opencv4
#使opencv4.5.5
set(OPENCV_INCLUDE_DIR ${SYS_USR_LOCAL_INCLUDE_DIR}/opencv4)
set(OPENCV_LIB_DIR ${SYS_USR_LOCAL_LIB_DIR})
set(OPENCV_INCLUDE_DIR ${SYS_USR_LOCAL_INCLUDE_DIR}/opencv4)
set(OPENCV_LIB_DIR ${SYS_USR_LOCAL_LIB_DIR})
set(CUDA_DIR "/usr/local/cuda-11.7")
# set(CUDA_DIR "/usr/local/cuda-11.7")
set(CUDA_DIR "/usr/local/cuda-10.2")
set(CUDA_INCLUDE_DIR ${CUDA_DIR}/include)
set(CUDA_LIB_DIR ${CUDA_DIR}/lib64)
@ -71,75 +73,75 @@ set(TEGRA_LIB_DIR ${AARCH64_LINUX_LIB_DIR}/tegra) #tegra库文件路径/usr/li
# nvidia ascend common include
include_directories(
#ai_matrix include
${PROJECT_SOURCE_DIR}/ai_matrix
${PROJECT_SOURCE_DIR}/ai_matrix/framework
${PROJECT_SOURCE_DIR}/ai_matrix/myftp
${PROJECT_SOURCE_DIR}/ai_matrix/myhttp
${PROJECT_SOURCE_DIR}/ai_matrix/myJson
${PROJECT_SOURCE_DIR}/ai_matrix/myJson/json
${PROJECT_SOURCE_DIR}/ai_matrix/mylog
${PROJECT_SOURCE_DIR}/ai_matrix/pcl
${PROJECT_SOURCE_DIR}/ai_matrix/myqueue
${PROJECT_SOURCE_DIR}/ai_matrix/myshell
${PROJECT_SOURCE_DIR}/ai_matrix/myutils
#ai_matrix include
${PROJECT_SOURCE_DIR}/ai_matrix
${PROJECT_SOURCE_DIR}/ai_matrix/framework
${PROJECT_SOURCE_DIR}/ai_matrix/myftp
${PROJECT_SOURCE_DIR}/ai_matrix/myhttp
${PROJECT_SOURCE_DIR}/ai_matrix/myJson
${PROJECT_SOURCE_DIR}/ai_matrix/myJson/json
${PROJECT_SOURCE_DIR}/ai_matrix/mylog
${PROJECT_SOURCE_DIR}/ai_matrix/pcl
${PROJECT_SOURCE_DIR}/ai_matrix/myqueue
${PROJECT_SOURCE_DIR}/ai_matrix/myshell
${PROJECT_SOURCE_DIR}/ai_matrix/myutils
#nvidia ascend common cann include
${PROJECT_SOURCE_DIR}/nvidia_ascend_base/Base/BlockingQueue
${PROJECT_SOURCE_DIR}/nvidia_ascend_base/Base/CBase64
${PROJECT_SOURCE_DIR}/nvidia_ascend_base/Base/CommandParser
${PROJECT_SOURCE_DIR}/nvidia_ascend_base/Base/CommonDataType
${PROJECT_SOURCE_DIR}/nvidia_ascend_base/Base/ConfigParser
${PROJECT_SOURCE_DIR}/nvidia_ascend_base/Base/ErrorCode
${PROJECT_SOURCE_DIR}/nvidia_ascend_base/Base/FileManager
${PROJECT_SOURCE_DIR}/nvidia_ascend_base/Base/Log
${PROJECT_SOURCE_DIR}/nvidia_ascend_base/Base/
#nvidia ascend common cann include
${PROJECT_SOURCE_DIR}/nvidia_ascend_base/Base/BlockingQueue
${PROJECT_SOURCE_DIR}/nvidia_ascend_base/Base/CBase64
${PROJECT_SOURCE_DIR}/nvidia_ascend_base/Base/CommandParser
${PROJECT_SOURCE_DIR}/nvidia_ascend_base/Base/CommonDataType
${PROJECT_SOURCE_DIR}/nvidia_ascend_base/Base/ConfigParser
${PROJECT_SOURCE_DIR}/nvidia_ascend_base/Base/ErrorCode
${PROJECT_SOURCE_DIR}/nvidia_ascend_base/Base/FileManager
${PROJECT_SOURCE_DIR}/nvidia_ascend_base/Base/Log
${PROJECT_SOURCE_DIR}/nvidia_ascend_base/Base/
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common
#common engine include
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/ControlEngine
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/DataSourceEngine
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/DataUploadEngine
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/FilterEngine
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/MergerEngine
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/SaveEngine
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/SelectBestEngine
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/TrainAnaEngine
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/TransEngine
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/DataDealEngine
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/SocketEngine
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/DeleteExpiredFolderEngine
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common
#common engine include
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/ControlEngine
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/DataSourceEngine
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/DataUploadEngine
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/FilterEngine
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/MergerEngine
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/SaveEngine
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/SelectBestEngine
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/TrainAnaEngine
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/TransEngine
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/DataDealEngine
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/SocketEngine
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/DeleteExpiredFolderEngine
#common tools rtsp_server include
${PROJECT_SOURCE_DIR}/nvidia_ascend_tools/common_tools/rtsp_server/3rdpart/md5
${PROJECT_SOURCE_DIR}/nvidia_ascend_tools/common_tools/rtsp_server/net/
${PROJECT_SOURCE_DIR}/nvidia_ascend_tools/common_tools/rtsp_server/xop/
#common tools rtsp_server include
${PROJECT_SOURCE_DIR}/nvidia_ascend_tools/common_tools/rtsp_server/3rdpart/md5
${PROJECT_SOURCE_DIR}/nvidia_ascend_tools/common_tools/rtsp_server/net/
${PROJECT_SOURCE_DIR}/nvidia_ascend_tools/common_tools/rtsp_server/xop/
)
include_directories(
#nvidia engine include
#nvidia engine include
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/nvidia_engine/ChkDateStepOneEngine
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/nvidia_engine/ChkDateStepTwoEngine
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/nvidia_engine/ContainerEngine
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/nvidia_engine/DecodeEngine
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/nvidia_engine/MoveEngine
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/nvidia_engine/MyYaml
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/nvidia_engine/TrainStepOneEngine
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/nvidia_engine/TrainStepTwoEngine
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/nvidia_engine/ChkDateStepOneEngine
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/nvidia_engine/ChkDateStepTwoEngine
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/nvidia_engine/ContainerEngine
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/nvidia_engine/DecodeEngine
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/nvidia_engine/MoveEngine
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/nvidia_engine/MyYaml
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/nvidia_engine/TrainStepOneEngine
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/nvidia_engine/TrainStepTwoEngine
#nvidia_tools yolov5 include
${PROJECT_SOURCE_DIR}/nvidia_ascend_tools/nvidia_tools/yolov5/include
#nvidia_tools yolov5 include
${PROJECT_SOURCE_DIR}/nvidia_ascend_tools/nvidia_tools/yolov5/include
#third party include
${CUDA_INCLUDE_DIR}
${TENSORRT_INCLUDE_DIR}
${DRM_INCLUDE_DIR}
${OpenCV_DIR}
${AARCH64_LINUX_INCLUDE_DIR}
${SYS_USR_LOCAL_INCLUDE_DIR}
# ${PCL_INCLUDE}
#third party include
${CUDA_INCLUDE_DIR}
${TENSORRT_INCLUDE_DIR}
${DRM_INCLUDE_DIR}
${OpenCV_DIR}
${AARCH64_LINUX_INCLUDE_DIR}
${SYS_USR_LOCAL_INCLUDE_DIR}
# ${PCL_INCLUDE}
)
@ -150,7 +152,7 @@ link_directories(${SYS_USR_LOCAL_LIB_DIR}
${CUDA_LIB_DIR}
${TENSORRT_LIB_DIR}
${TEGRA_LIB_DIR}
)
)
#
#
@ -187,19 +189,26 @@ file(GLOB_RECURSE COMMON_SRCS_LISTS
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/TransEngine/*.cpp
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/DataDealEngine/*.cpp
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/SocketEngine/*.cpp
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/SocketEngine/*.cpp
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/DataSourceEngine/*.cpp
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/SaveEngine/*.cpp
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/DeleteExpiredFolderEngine/*.cpp
#common tools rtsp_server src
${PROJECT_SOURCE_DIR}/nvidia_ascend_tools/common_tools/rtsp_server/net/*.cpp
${PROJECT_SOURCE_DIR}/nvidia_ascend_tools/common_tools/rtsp_server/xop/*.cpp
)
)
file(GLOB_RECURSE SRCS_LISTS
#nvidia engine src
#nvidia engine include
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/ControlEngine/*.cpp
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/DataSourceEngine/*.cpp
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/DataUploadEngine/*.cpp
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/FilterEngine/*.cpp
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/MergerEngine/*.cpp
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/SaveEngine/*.cpp
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/SelectBestEngine/*.cpp
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/TrainAnaEngine/*.cpp
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/TransEngine/*.cpp
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/DataDealEngine/*.cpp
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/nvidia_engine/ChkDateStepOneEngine/*.cpp
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/nvidia_engine/ChkDateStepTwoEngine/*.cpp
@ -215,9 +224,12 @@ file(GLOB_RECURSE SRCS_LISTS
#nvidia tools yolov5 src
${PROJECT_SOURCE_DIR}/nvidia_ascend_tools/nvidia_tools/yolov5/src/*.cpp
${PROJECT_SOURCE_DIR}/nvidia_ascend_tools/nvidia_tools/yolov5/src/*.cu
)
)
cuda_add_executable(${PROJECT_NAME} ${COMMON_SRCS_LISTS} ${SRCS_LISTS})
# add_executable(${PROJECT_NAME} ${COMMON_SRCS_LISTS} ${SRCS_LISTS})
target_link_libraries(${PROJECT_NAME} pthread) #other
target_link_libraries(${PROJECT_NAME} nvinfer nvonnxparser nvcaffe_parser nvinfer_plugin) #TensorRT
@ -229,7 +241,7 @@ target_link_libraries(${PROJECT_NAME}
# pcl_common pcl_io_ply pcl_keypoints pcl_registration pcl_segmentation pcl_features pcl_io pcl_octree #pcl
# pcl_sample_consensus pcl_surface pcl_filters pcl_kdtree pcl_recognition pcl_search pcl_tracking
avformat avcodec avutil avfilter swresample swscale postproc #VideoCodecV2
yaml-cpp https_sn
yaml-cpp
jsoncpp curl boost_system boost_filesystem ssh2
-Wl,-z,relro,-z,now,-z,noexecstack -pie -s
)
)

View File

@ -104,10 +104,11 @@ namespace ai_matrix
//检查是否有重复engine
std::string engine_unique = engine_name + "_" + std::to_string(engine_id);
printf(engine_unique.c_str());
// printf(engine_unique.c_str());
auto iter = engine_map_.find(engine_unique);
if (iter != engine_map_.end())
{
LogWarn << "重复engine " << engine_unique;
continue;
}

View File

@ -55,7 +55,6 @@ namespace ai_matrix
std::string get_date();
//获取时间
std::string get_time();
//时间戳转化为时间 毫秒级
std::string Stamp2Time(long long timestamp, bool has_msec = false);

View File

@ -4,7 +4,7 @@ set -e
path_cur=$(cd `dirname $0`; pwd)
build_type="Release"
# 生成目录 ceshi
# 生成目录
app_path="app"
# 可执行程序名
appname="train"
@ -39,7 +39,7 @@ function build(){
if [ "$1" == $app_path ]; then
cmake -DCMAKE_BUILD_TYPE="Release" ..
else
cmake -DCMAKE_BUILD_TYPE="Debug" ..
cmake -DCMAKE_BUILD_TYPE="Release" ..
fi
make -j4

View File

@ -11,7 +11,8 @@ gc_data_source: "camera" #[camera, images]
camera:
camera_0:
#url: "rtsp://admin:sgt12345@10.27.119.13:554/h264/ch1/main/av_stream"
url: "./videos/km70.mp4"
# url: "./videos/km70.mp4"
url: "./vedio/buertai2.mp4"
skipInterval: 3
target: "NUM"
use: true
@ -70,7 +71,8 @@ gc_best_path: "./result/best"
#模型参数,只考虑敞车
model:
MoveEngine: #动态检测
om_path: "./model/step0/step0.FP16.engine"
# om_path: "./model/step0/step0.FP16.engine"
om_path: "./model/step0/step0.engine"
modelinfo_path: "./model/step0/retina_move_modelinfo.txt"
model_type: "retina" #(retina, yolov5)
score_threshold: 0.9
@ -82,6 +84,7 @@ model:
score_threshold: 0.6
nms_threshold: 0.3
TrainStepTwoEngine: #字符识别
# om_path: "./model/step2/step2.engine"
om_path: "./model/step2/step2.engine"
modelinfo_path: "./model/step2/yolov5_train_step2_modelinfo.txt"
model_type: "yolov5" #(retina, yolov5)
@ -112,20 +115,21 @@ model:
score_threshold: 0.7
nms_threshold: 0.3
gc_http_open: 1
username: "guest_01"
password: "d55b0f642e817eea24725d2f2a31dd08" # 神东
gc_http_url: "http://192.168.2.211:20004/api/train-carriage/identification/video-save"
gc_gettoken_url: "http://192.168.2.211:20004/api/blade-auth/oauth/token"
gc_image_srv: "http://192.168.2.211:9010/"
gc_http_open: 0
# gc_http_url: "http://192.168.2.211:20004/api/train-carriage/identification/video-save"
# gc_gettoken_url: "http://192.168.2.211:20004/api/blade-auth/oauth/token"
# gc_image_srv: "http://192.168.2.211:9010/"
gc_http_url: "http://192.168.2.121:8081"
gc_gettoken_url: "http://192.168.0.121:20004/api/blade-auth/oauth/token"
gc_image_srv: "http://192.168.0.121:9010/"
gc_device_status_open: 1
gc_device_status_open: 0
gc_device_status_url: "http://192.168.2.211:20004/api/blade-train/deviceInfo/save"
rfid_ip: "10.27.200.39"
#socket_server 的服务端参数
socket_server_open: 1
socket_server_open: 0
socket_server_port: 7000
socket_server_queue_len: 10
@ -182,5 +186,5 @@ gc_c_space_frame_width: 500
# 是否识别车头
gc_train_heard_detect: true
#过期文件夹天数
# 识别结果保存天数
gc_days_for_result_expire_folder: 3

View File

@ -70,8 +70,8 @@ connects:
#ControlEngine_0_2: "PushEngine_0_0 1024" #实时推流直播
MoveEngine_0_0: "SaveImgEngine_0_0 1024" #识别过程中数据保存
MoveEngine_0_1: "SocketEngine_0_0 1024" #
MoveEngine_0_5: "DataDealEngine_0_0 1024"
DataDealEngine_0_0: "TrainStepOneEngine_0_0"
TrainStepOneEngine_0_0: "FilterTrainStepOneEngine_0_0"
FilterTrainStepOneEngine_0_0: "SaveStepOneResultEngine_0_0 1024"
@ -89,6 +89,7 @@ connects:
FilterContainerStepOneEngine_0_0: "SaveStepOneResultEngine_0_0"
TrainParationMgr_0_0: "DataDealTwoEngine_0_0 1024"
DataDealTwoEngine_0_0: "TrainStepTwoEngine_0_0"
TrainStepTwoEngine_0_0: "TransTrainEngine_0_0"
TransTrainEngine_0_0: "SelectBestEngine_0_0"

View File

@ -36,8 +36,8 @@ namespace MatrixAILog
uint32_t Log::logLevel = LOG_LEVEL_INFO;
std::vector<std::string> Log::levelString{"[Debug]", "[Info ]", "[Warn ]", "[Error]", "[Fatal]"};
std::mutex Log::mutex;
std::string Log::logFile = "/home/nvidia/train/logs/log.log"; // default log file
std::string Log::logFileBak = "/home/nvidia/train/logs/log.log.bak";
std::string Log::logFile = "./logs/log.log"; // default log file
std::string Log::logFileBak = "./logs/log.log.bak";
std::string Log::logFileBakPath = "";
Log::Log(std::string file, std::string function, int line, uint32_t level)
@ -69,27 +69,26 @@ namespace MatrixAILog
{
return;
}
// dstFileSize >= FILE_SIZE
// "date_格式[yyyy-mm-dd HH:MM:SS:"改为"yyyymmmdd_HHMMSS"
if (!logFileBakPath.empty())
{
CreateDirRecursivelyByFile(logFileBakPath);
std::string strData;
for (int i = 0; i < date_.size(); i++)
{
if (date_[i] != '-' && date_[i] != ':' && date_[i] != '[')
{
if (date_[i] == ' ')
{
strData += "_";
continue;
}
strData += date_[i];
}
}
logFileBak = logFileBakPath + strData + "_bak.txt";
}
// if (!logFileBakPath.empty())
// {
// CreateDirRecursivelyByFile(logFileBakPath);
// std::string strData;
// for (int i = 0; i < date_.size(); i++)
// {
// if (date_[i] != '-' && date_[i] != ':' && date_[i] != '[')
// {
// if (date_[i] == ' ')
// {
// strData += "_";
// continue;
// }
// strData += date_[i];
// }
// }
// logFileBak = logFileBakPath + strData + "_bak.txt";
// }
if (access(logFileBak.c_str(), 0) == APP_ERR_OK)
{

View File

@ -516,6 +516,7 @@ typedef struct
uint64_t i64TimeStamp = 0; //帧数据时间戳
std::shared_ptr<DecodedData> pDecodeData = nullptr;
int iDirection = 0; //行驶方向(0-未知; 1-向左; 2-向右)
int nMonitorState = MONITOR_MODEL_INIT_STATE;
} SaveImgData;
//识别处理数据

View File

@ -118,7 +118,7 @@ bool DataDealEngine::ReadFileInfo(Json::Value &jvFrameInfo, RawData &rawData, st
// LogError << "Failed to read image:" << strImgName;
// return false;
// }
return true;
}
@ -190,14 +190,14 @@ void DataDealEngine::MakeProcessData()
iFrameId = iReRunFrameId;
}
LogInfo << "sourceid:" << iSourceId << " MakeProcessData origtime:" << moveData_.strTrainName << " iOrigFrameId:" << iOrigFrameId
<< " time:" << strTrainName_ << " iFrameId:" << iFrameId << " bIsEndFlag:" << bIsEndFlag;
// LogInfo << "sourceid:" << iSourceId << " MakeProcessData origtime:" << moveData_.strTrainName << " iOrigFrameId:" << iOrigFrameId
// << " time:" << strTrainName_ << " iFrameId:" << iFrameId << " bIsEndFlag:" << bIsEndFlag;
std::string strImgName = strDataDir_ + szCameraNo + std::to_string(iOrigFrameId);
strImgName += (iter->second.iRotate != 0) ? "_rotate.jpg" : ".jpg";
std::string strFileName = strDataDir_ + szCameraNo + std::to_string(iOrigFrameId) + ".txt";
//摄像头读取失败后重试30次。
//摄像头读取失败后重试2000次。
Json::Value jvFrameInfo;
RawData rawData;
bool bRet = false;
@ -227,7 +227,6 @@ void DataDealEngine::MakeProcessData()
pProcessData->iStatus = TRAINSTATUS_RUN;
pProcessData->bIsEnd = bIsEndFlag;
pProcessData->iDataNO = iDataNO_;
pProcessData->nMonitorState = moveData_.nMonitorState;
if (bRet)
{
@ -237,11 +236,12 @@ void DataDealEngine::MakeProcessData()
pProcessData->iHeight = jvFrameInfo["height"].asInt();
pProcessData->iDirection = jvFrameInfo["direction"].asInt();
pProcessData->iRate = jvFrameInfo["rate"].asInt();
pProcessData->nMonitorState = jvFrameInfo["moveType"].asInt();
cv::Mat cvframe = cv::imread(pProcessData->strPicFilePath);
int iBufferSize = pProcessData->iWidth * pProcessData->iHeight * 3;
void* pBGRBufferobj = nullptr;
pBGRBufferobj = new uint8_t[iBufferSize];
pBGRBufferobj = new uint8_t[iBufferSize];
memcpy(pBGRBufferobj, cvframe.data, iBufferSize);
pProcessData->pData.reset(pBGRBufferobj, [](void* data){if(data) {delete[] data; data = nullptr;}});
pProcessData->iSize = iBufferSize;
@ -252,18 +252,18 @@ void DataDealEngine::MakeProcessData()
{
if (iPort == vecPushPorts.size() - 1)
{
//iRet = outputQueMap_[vecPushPorts[iPort]]->push(std::static_pointer_cast<void>(pProcessData));
PushData(vecPushPorts[iPort], pProcessData);
iRet = outputQueMap_[vecPushPorts[iPort]]->push(std::static_pointer_cast<void>(pProcessData), true);
// PushData(vecPushPorts[iPort], pProcessData);
continue;
}
}
std::shared_ptr<ProcessData> pNewProcessData = std::make_shared<ProcessData>();
*pNewProcessData = *pProcessData;
//iRet = outputQueMap_[vecPushPorts[iPort]]->push(std::static_pointer_cast<void>(pNewProcessData));
PushData(vecPushPorts[iPort], pNewProcessData);
iRet = outputQueMap_[vecPushPorts[iPort]]->push(std::static_pointer_cast<void>(pNewProcessData), true);
// PushData(vecPushPorts[iPort], pNewProcessData);
}
}
iOrigDataNO_++;
iDataNO_++;
//每组处理数据需间隔一定时间
@ -291,25 +291,14 @@ APP_ERROR DataDealEngine::Process()
//获取主摄像头检测的状态
std::shared_ptr<void> pVoidData0 = nullptr;
iRet = inputQueMap_[strPort0_]->pop(pVoidData0);
if (nullptr != pVoidData0)
{
std::shared_ptr<MoveData> pMoveData = std::static_pointer_cast<MoveData>(pVoidData0);
// queuwMoveData_.push(*pMoveData);
moveData_ = *pMoveData;
LogDebug << "traindate:" << moveData_.strTrainDate << " trainname:" << moveData_.strTrainName
<< " MoveData frameid:" << moveData_.iFrameId << " IsEnd:" << moveData_.bIsEnd;
}
// LogDebug << "【帧号】" << (iDataNO_ * dataSourceConfig_.iSkipInterval);
// if (queuwMoveData_.size() > 0 && (iDataNO_ * dataSourceConfig_.iSkipInterval) >= queuwMoveData_.front().iFrameId)
// {
// moveData_ = queuwMoveData_.front();
// queuwMoveData_.pop();
// LogDebug << "!!!--- moveDate 更新";
// }
if (!moveData_.bHasTrain)
{
usleep(1000); //1ms
@ -319,7 +308,7 @@ APP_ERROR DataDealEngine::Process()
//第一个数据休眠1s等待图片存入本地
if (iOrigDataNO_ == 1)
{
usleep(1000 * 1000); //1s
usleep(1000000); //1s
}
if (strDataDir_.empty())

View File

@ -407,31 +407,6 @@ int DataDealTwoEngine::GetPostData(std::shared_ptr<ProcessData> pProcessData, Js
return pPostData->vecPostSubData.size();
}
/**
* push数据到队列push
* inParam : const std::string strPort push的端口
: const std::shared_ptr<ProcessData> &pProcessData push的数据
* outParam: N/A
* return : N/A
*/
void DataDealTwoEngine::PushData(const std::string &strPort, const std::shared_ptr<ProcessData> &pProcessData)
{
while (true)
{
int iRet = outputQueMap_[strPort]->push(std::static_pointer_cast<void>(pProcessData));
if (iRet != 0)
{
LogDebug << "sourceid:" << pProcessData->iDataSource << " frameid:" << pProcessData->iFrameId << " push fail iRet:" << iRet;
if (iRet == 2)
{
usleep(10000); // 10ms
continue;
}
}
break;
}
}
/**
* push
* inParam : N/A
@ -515,14 +490,14 @@ void DataDealTwoEngine::MakeProcessData(std::shared_ptr<TrainRange> pTrainRange)
{
if (iPort == vecPushPorts.size() - 1)
{
//iRet = outputQueMap_[vecPushPorts[iPort]]->push(std::static_pointer_cast<void>(pProcessData), true);
PushData(vecPushPorts[iPort], pProcessData);
iRet = outputQueMap_[vecPushPorts[iPort]]->push(std::static_pointer_cast<void>(pProcessData), true);
// PushData(vecPushPorts[iPort], pProcessData);
continue;
}
std::shared_ptr<ProcessData> pNewProcessData = std::make_shared<ProcessData>();
*pNewProcessData = *pProcessData;
//iRet = outputQueMap_[vecPushPorts[iPort]]->push(std::static_pointer_cast<void>(pNewProcessData), true);
PushData(vecPushPorts[iPort], pNewProcessData);
iRet = outputQueMap_[vecPushPorts[iPort]]->push(std::static_pointer_cast<void>(pNewProcessData), true);
// PushData(vecPushPorts[iPort], pNewProcessData);
}
}
@ -618,7 +593,7 @@ APP_ERROR DataDealTwoEngine::Process()
//处理当车厢的每帧信息
MakeProcessData(pTrainRange);
// push结果汇总
iRet = outputQueMap_[engineName_ + "_" + std::to_string(engineId_) + "_9"]->push(std::static_pointer_cast<void>(pTrainRange));
iRet = outputQueMap_[engineName_ + "_" + std::to_string(engineId_) + "_9"]->push(std::static_pointer_cast<void>(pTrainRange), true);
iTrainIndex_++;
if (pTrainRange->bIsEnd)

View File

@ -3,25 +3,22 @@
using namespace ai_matrix;
namespace
{
namespace {
const int LOW_THRESHOLD = 128;
const int MAX_THRESHOLD = 4096;
const uint16_t DELAY_TIME = 10000;
const uint16_t DELAY_TIME = 20000;
}
CameraEngine::CameraEngine() {}
CameraEngine::~CameraEngine() {}
APP_ERROR CameraEngine::Init()
{
APP_ERROR CameraEngine::Init() {
bUseEngine_ = true;
bHwDecode_ = MyYaml::GetIns()->GetBoolValue("gc_hardware_decode");
dataSourceConfig_ = MyYaml::GetIns()->GetDataSourceConfigById(engineId_); //获取摄像机参数
if (MyYaml::GetIns()->GetStringValue("gc_data_source") != "camera" || !dataSourceConfig_.bUse)
{
if (MyYaml::GetIns()->GetStringValue("gc_data_source") != "camera" || !dataSourceConfig_.bUse) {
bUseEngine_ = false;
LogWarn << "engineId_:" << engineId_ << " not use engine";
return APP_ERR_OK;
@ -35,10 +32,8 @@ APP_ERROR CameraEngine::Init()
return APP_ERR_OK;
}
APP_ERROR CameraEngine::DeInit()
{
if (!bUseEngine_)
{
APP_ERROR CameraEngine::DeInit() {
if (!bUseEngine_) {
LogWarn << "engineId_:" << engineId_ << " not use engine";
return APP_ERR_OK;
}
@ -47,21 +42,17 @@ APP_ERROR CameraEngine::DeInit()
return APP_ERR_OK;
}
void CameraEngine::ResetCamera()
{
if (pFormatCtx_ != nullptr)
{
void CameraEngine::ResetCamera() {
if (pFormatCtx_ != nullptr) {
// clear th cache of the queue
avformat_close_input(&pFormatCtx_);
pFormatCtx_ = nullptr;
}
}
APP_ERROR CameraEngine::ConnectCamera()
{
APP_ERROR CameraEngine::ConnectCamera() {
pFormatCtx_ = CreateFormatContext(); // create context
if (pFormatCtx_ == nullptr)
{
if (pFormatCtx_ == nullptr) {
LogError << "engineId_:" << engineId_ << " pFormatCtx_ null!";
return APP_ERR_COMM_FAILURE;
}
@ -72,8 +63,7 @@ APP_ERROR CameraEngine::ConnectCamera()
// get stream infomation
int iRet = APP_ERR_OK;
iRet = GetStreamInfo();
if (iRet != APP_ERR_OK)
{
if (iRet != APP_ERR_OK) {
LogError << "engineId_:" << engineId_ << " Stream Info Check failed, iRet = " << iRet;
return APP_ERR_COMM_FAILURE;
}
@ -81,92 +71,68 @@ APP_ERROR CameraEngine::ConnectCamera()
return APP_ERR_OK;
}
APP_ERROR CameraEngine::GetStreamInfo()
{
if (pFormatCtx_ != nullptr)
{
APP_ERROR CameraEngine::GetStreamInfo() {
if (pFormatCtx_ != nullptr) {
iVideoStream_ = -1;
iAudioStream_ = -1;
//frameInfo_.iFrameId = 0; //帧号从0开始
for (unsigned int i = 0; i < pFormatCtx_->nb_streams; i++)
{
for (unsigned int i = 0; i < pFormatCtx_->nb_streams; i++) {
AVStream *inStream = pFormatCtx_->streams[i];
if (inStream->codecpar->codec_type == AVMEDIA_TYPE_VIDEO)
{
if (inStream->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
iVideoStream_ = i;
frameInfo_.iHeight = inStream->codecpar->height;
frameInfo_.iWidth = inStream->codecpar->width;
//获取帧率,帧率的打印都在流中的两个成员.且应取平均帧率为先,为{x,0}或者{0,1}则取实时帧率
if (inStream->avg_frame_rate.den == 0 || (inStream->avg_frame_rate.num == 0 && inStream->avg_frame_rate.den == 1))
{
if (inStream->avg_frame_rate.den == 0 ||
(inStream->avg_frame_rate.num == 0 && inStream->avg_frame_rate.den == 1)) {
frameInfo_.iRate = inStream->r_frame_rate.num / inStream->r_frame_rate.den;
}
else
{
} else {
frameInfo_.iRate = inStream->avg_frame_rate.num / inStream->avg_frame_rate.den;
}
LogDebug << "engineId_:" << engineId_ << " width:" << frameInfo_.iWidth << " height:" << frameInfo_.iHeight
LogDebug << "engineId_:" << engineId_ << " width:" << frameInfo_.iWidth << " height:"
<< frameInfo_.iHeight
<< " rate:" << frameInfo_.iRate << " iVideoStream_:" << iVideoStream_;
}
else if (inStream->codecpar->codec_type == AVMEDIA_TYPE_AUDIO)
{
} else if (inStream->codecpar->codec_type == AVMEDIA_TYPE_AUDIO) {
iAudioStream_ = i;
LogDebug << "engineId_:" << engineId_ << " iAudioStream_:" << iAudioStream_;
}
}
if (iVideoStream_ == -1)
{
if (iVideoStream_ == -1) {
LogError << "engineId_:" << engineId_ << " Didn't find a video stream!";
return APP_ERR_COMM_FAILURE;
}
if (frameInfo_.iHeight < LOW_THRESHOLD || frameInfo_.iWidth < LOW_THRESHOLD ||
frameInfo_.iHeight > MAX_THRESHOLD || frameInfo_.iWidth > MAX_THRESHOLD)
{
frameInfo_.iHeight > MAX_THRESHOLD || frameInfo_.iWidth > MAX_THRESHOLD) {
LogError << "engineId_:" << engineId_ << " Size of frame is not supported in DVPP Video Decode!";
return APP_ERR_COMM_FAILURE;
}
AVCodecID codecId = pFormatCtx_->streams[iVideoStream_]->codecpar->codec_id;
if (codecId == AV_CODEC_ID_H264)
{
if (codecId == AV_CODEC_ID_H264) {
int profile = pFormatCtx_->streams[iVideoStream_]->codecpar->profile;
if (profile == FF_PROFILE_H264_BASELINE)
{
if (profile == FF_PROFILE_H264_BASELINE) {
frameInfo_.format = H264_BASELINE_LEVEL;
}
else if (profile == FF_PROFILE_H264_MAIN)
{
} else if (profile == FF_PROFILE_H264_MAIN) {
frameInfo_.format = H264_MAIN_LEVEL;
}
else if (profile == FF_PROFILE_H264_HIGH)
{
} else if (profile == FF_PROFILE_H264_HIGH) {
frameInfo_.format = H264_HIGH_LEVEL;
}
else
{
} else {
LogError << "engineId_:" << engineId_ << " not support h264 profile";
return APP_ERR_COMM_FAILURE;
}
}
else if (codecId == AV_CODEC_ID_H265)
{
} else if (codecId == AV_CODEC_ID_H265) {
int profile = pFormatCtx_->streams[iVideoStream_]->codecpar->profile;
if (profile == FF_PROFILE_HEVC_MAIN)
{
if (profile == FF_PROFILE_HEVC_MAIN) {
frameInfo_.format = H265_MAIN_LEVEL;
}
else
{
} else {
LogError << "engineId_:" << engineId_ << " not support h265 profile";
return APP_ERR_COMM_FAILURE;
}
}
else
{
} else {
LogError << "engineId_:" << engineId_ << " Error unsupported format" << codecId;
return APP_ERR_COMM_FAILURE;
}
@ -174,8 +140,7 @@ APP_ERROR CameraEngine::GetStreamInfo()
return APP_ERR_OK;
}
AVFormatContext *CameraEngine::CreateFormatContext()
{
AVFormatContext *CameraEngine::CreateFormatContext() {
// create message for stream pull
AVFormatContext *pFormatContext = nullptr;
AVDictionary *pOptions = nullptr;
@ -184,24 +149,21 @@ AVFormatContext *CameraEngine::CreateFormatContext()
if (dataSourceConfig_.strUrl.find("rtsp:") != std::string::npos) // rtsp
{
av_dict_set(&pOptions, "rtsp_transport", "tcp", 0); // 指定其传输方式为TCP
// av_dict_set(&pOptions, "stimeout", "3000000", 0); // 设置超时3秒
// av_dict_set(&pOptions, "rw_timeout", "3000", 0); //单位:ms
av_dict_set(&pOptions, "timeout", "3000000", 0); //设置超时时间为3秒
av_dict_set(&pOptions, "stimeout", "3000000", 0); // 设置超时3秒
}
//av_register_all(); //注册所有支持的格式(这里一定注册这些,否则会因为协议解析问题报错!!!)
//avcodec_register_all(); //注册编解码器
//avformat_network_init(); //注册网格格式,如果为本地文件则可以去掉该代码
av_register_all(); //注册所有支持的格式(这里一定注册这些,否则会因为协议解析问题报错!!!)
avcodec_register_all(); //注册编解码器
avformat_network_init(); //注册网格格式,如果为本地文件则可以去掉该代码
int iRet = avformat_open_input(&pFormatContext, dataSourceConfig_.strUrl.c_str(), nullptr, &pOptions);
if (nullptr != pOptions)
{
if (nullptr != pOptions) {
av_dict_free(&pOptions);
}
if (iRet != 0)
{
LogError << "engineId_:" << engineId_ << " Couldn't open input stream " << dataSourceConfig_.strUrl.c_str() << ", iRet=" << iRet;
if (iRet != 0) {
LogError << "engineId_:" << engineId_ << " Couldn't open input stream " << dataSourceConfig_.strUrl.c_str()
<< ", iRet=" << iRet;
return nullptr;
}
@ -216,8 +178,7 @@ AVFormatContext *CameraEngine::CreateFormatContext()
// }
iRet = avformat_find_stream_info(pFormatContext, nullptr);
if (iRet != 0)
{
if (iRet != 0) {
LogError << "engineId_:" << engineId_ << " Couldn't find stream information, iRet = " << iRet;
return nullptr;
}
@ -225,57 +186,44 @@ AVFormatContext *CameraEngine::CreateFormatContext()
}
//av_read_frame的中断回调函数
// int CameraEngine::InterruptCallback(void *pData)
// {
// TimeoutContext* pTimeOutCtx = (TimeoutContext*)pData;
// LogDebug << "InterruptCallback i64Timeout:" << pTimeOutCtx->i64Timeout;
// return std::chrono::duration_cast<std::chrono::milliseconds>(
// std::chrono::system_clock::now().time_since_epoch())
// .count() >= pTimeOutCtx->i64Timeout
// ? AVERROR_EXIT
// : 0;
// }
int CameraEngine::InterruptCallback(void *pData) {
TimeoutContext *pTimeOutCtx = (TimeoutContext *) pData;
LogDebug << "InterruptCallback i64Timeout:" << pTimeOutCtx->i64Timeout;
return std::chrono::duration_cast<std::chrono::milliseconds>(
std::chrono::system_clock::now().time_since_epoch())
.count() >= pTimeOutCtx->i64Timeout
? AVERROR_EXIT
: 0;
}
APP_ERROR CameraEngine::Process()
{
int iRet = APP_ERR_OK;
if (!bUseEngine_)
{
APP_ERROR CameraEngine::Process() {
int iRet = APP_ERR_OK;
if (!bUseEngine_) {
LogWarn << "engineId_:" << engineId_ << " not use engine";
return APP_ERR_OK;
}
if (bHwDecode_)
{
if (bHwDecode_) {
iRet = ConnectCamera();
if (iRet == APP_ERR_OK)
{
if (iRet == APP_ERR_OK) {
LogInfo << "engineId_:" << engineId_ << " Start the stream......";
bReconnectFlag_ = false;
}
else
{
} else {
ResetCamera();
bReconnectFlag_ = true;
}
// Pull data cyclically
AVPacket pkt;
while (!isStop_)
{
while (!isStop_) {
//重连相机
if (bReconnectFlag_)
{
if (bReconnectFlag_) {
iRet = ConnectCamera();
if (iRet == APP_ERR_OK)
{
if (iRet == APP_ERR_OK) {
LogInfo << "engineId_:" << engineId_ << " Start the stream......";
bReconnectFlag_ = false;
}
else
{
outputQueMap_[strPort1_]->push(std::static_pointer_cast<void>(std::make_shared<std::string>("摄像头连接失败!")));
} else {
// outputQueMap_[strPort1_]->push(std::static_pointer_cast<void>(std::make_shared<std::string>("摄像头连接失败!")));
ResetCamera();
std::this_thread::sleep_for(std::chrono::seconds(3)); //3秒后重连
continue;
@ -283,16 +231,24 @@ APP_ERROR CameraEngine::Process()
}
//设置av_read_frame中断函数 (中断函数中超过1s则中断处理)
// TimeoutContext timeoutCtx = { std::chrono::duration_cast<std::chrono::milliseconds>(std::chrono::system_clock::now().time_since_epoch()).count() + 1000 };
// pFormatCtx_->interrupt_callback.callback = &CameraEngine::InterruptCallback;
// pFormatCtx_->interrupt_callback.opaque = &timeoutCtx;
TimeoutContext timeoutCtx = {std::chrono::duration_cast<std::chrono::milliseconds>(
std::chrono::system_clock::now().time_since_epoch()).count() + 1000};
pFormatCtx_->interrupt_callback.callback = InterruptCallback;
pFormatCtx_->interrupt_callback.opaque = &timeoutCtx;
av_init_packet(&pkt); //init pkt
iRet = av_read_frame(pFormatCtx_, &pkt); //需要一直读取,否则获取到的是历史数据
if (iRet != 0)
{
outputQueMap_[strPort1_]->push(std::static_pointer_cast<void>(std::make_shared<std::string>("图像读取失败!")));
//校验是否存取到结尾
if (iRet == AVERROR_EOF) {
LogInfo << "CameraEngine--av_read_frame--end";
// break;
}
if (iRet != 0) {
outputQueMap_[strPort1_]->push(
std::static_pointer_cast<void>(std::make_shared<std::string>("图像读取失败!")));
LogError << "engineId_:" << engineId_ << " Read frame failed, reconnect iRet:" << iRet;
av_packet_unref(&pkt);
@ -301,64 +257,58 @@ APP_ERROR CameraEngine::Process()
bReconnectFlag_ = true;
continue;
}
else if (pkt.stream_index == iVideoStream_) //只解码视频流
} else if (pkt.stream_index == iVideoStream_) //只解码视频流
{
// LogDebug << "iRet:" << iRet << " pkt.size:" << pkt.size;
if (pkt.size <= 0)
{
if (pkt.size <= 0) {
LogError << "engineId_:" << engineId_ << " Invalid pkt.size: " << pkt.size;
av_packet_unref(&pkt);
continue;
}
if (dataSourceConfig_.strUrl.find(".mp4") != std::string::npos)
{
if (dataSourceConfig_.strUrl.find(".mp4") != std::string::npos) {
const char szStartCode[4] = {0, 0, 0, 1};
if (bIsAvc_ || memcmp(szStartCode, pkt.data, 4) != 0)
{ // is avc1 code, have no start code of H264
if (bIsAvc_ || memcmp(szStartCode, pkt.data, 4) != 0) { // is avc1 code, have no start code of H264
int iLen = 0;
uint8_t *p = pkt.data;
bIsAvc_ = true;
do
{ // add start_code for each NAL, one frame may have multi NALs.
iLen = ntohl(*((long *)p));
do { // add start_code for each NAL, one frame may have multi NALs.
iLen = ntohl(*((long *) p));
memcpy(p, szStartCode, 4);
p += 4;
p += iLen;
if (p >= pkt.data + pkt.size)
{
if (p >= pkt.data + pkt.size) {
break;
}
} while (1);
}
}
void* pH264Buffer = nullptr;
void *pH264Buffer = nullptr;
pH264Buffer = new uint8_t[pkt.size];
memcpy(pH264Buffer, pkt.data, pkt.size);
memcpy(pH264Buffer, pkt.data, pkt.size);
//组织数据
std::shared_ptr<ProcessData> pProcessData = std::make_shared<ProcessData>();
std::shared_ptr <ProcessData> pProcessData = std::make_shared<ProcessData>();
pProcessData->iWidth = frameInfo_.iWidth;
pProcessData->iHeight = frameInfo_.iHeight;
pProcessData->iRate = frameInfo_.iRate;
pProcessData->i64TimeStamp = MyUtils::getins()->GetCurrentTimeMillis();
pProcessData->iDataSource = engineId_;
pProcessData->iSize = pkt.size;
pProcessData->pData.reset(pH264Buffer, [](void* data){if(data) {delete[] data; data = nullptr;}}); //智能指针管理内存
pProcessData->pData.reset(pH264Buffer, [](void *data) {
if (data) {
delete[] data;
data = nullptr;
}
}); //智能指针管理内存
//push端口0视频解码
iRet = outputQueMap_[strPort0_]->push(std::static_pointer_cast<void>(pProcessData));
if (iRet != APP_ERR_OK)
{
iRet = outputQueMap_[strPort0_]->push(std::static_pointer_cast<void>(pProcessData), true);
if (iRet != APP_ERR_OK) {
LogError << "engineId_:" << engineId_ << "push the h264 frame data failed...";
}
}
else if (pkt.stream_index == iAudioStream_)
{
} else if (pkt.stream_index == iAudioStream_) {
//音频流不处理。
}
else
{
} else {
LogError << "engineId_:" << engineId_ << " stream err stream_index:" << pkt.stream_index;
}
av_packet_unref(&pkt); //unref
@ -368,19 +318,17 @@ APP_ERROR CameraEngine::Process()
usleep(DELAY_TIME); // delay 40ms
}
}
}
else
{
} else {
//从摄像头RTSP拉流
const std::string videoStreamAddress = std::string("rtspsrc location=") + dataSourceConfig_.strUrl.c_str() + " latency=10 ! \
rtph264depay ! h264parse ! nvv4l2decoder enable-max-performance=1 enable-frame-type-reporting=1 ! nvvidconv ! video/x-raw, format=(string)BGRx ! videoconvert ! appsink";
VideoCapture capture_video;
while(!capture_video.open(videoStreamAddress)){ //, cv::CAP_FFMPEG
LogInfo<<"Restart Opening video stream or file ..."<<std::endl;
while (!capture_video.open(videoStreamAddress)) { //, cv::CAP_FFMPEG
LogInfo << "Restart Opening video stream or file ..." << std::endl;
sleep(1);
}
LogInfo<<"Opening video stream or file Success:"<<engineId_;
LogInfo << "Opening video stream or file Success:" << engineId_;
int frameW = capture_video.get(3);
int frameH = capture_video.get(4);
@ -390,28 +338,27 @@ APP_ERROR CameraEngine::Process()
bool breadend = false;
cv::Mat frame(frameH, frameW, CV_8UC3);
while (!isStop_)
{
std::shared_ptr<FrameData> pBGRFrameData = std::make_shared<FrameData>();
std::shared_ptr<ProcessData> pProcessData = std::make_shared<ProcessData>();
if(!capture_video.read(frame)) {
while (!isStop_) {
std::shared_ptr <FrameData> pBGRFrameData = std::make_shared<FrameData>();
std::shared_ptr <ProcessData> pProcessData = std::make_shared<ProcessData>();
if (!capture_video.read(frame)) {
capture_video.release();
while(!capture_video.open(videoStreamAddress)){ //, cv::CAP_FFMPEG
LogInfo<<"Restart Opening video stream or file ..."<<std::endl;
while (!capture_video.open(videoStreamAddress)) { //, cv::CAP_FFMPEG
LogInfo << "Restart Opening video stream or file ..." << std::endl;
sleep(1);
}
continue;
}
unsigned int resizepBGRBuffer_Size = IMAGE_WIDTH*IMAGE_HEIGHT*3;
unsigned int resizepBGRBuffer_Size = IMAGE_WIDTH * IMAGE_HEIGHT * 3;
cv::Mat mtInImage, mtOutImage;
cv::resize(frame, mtInImage, cv::Size(IMAGE_WIDTH, IMAGE_HEIGHT));
cv::cvtColor(mtInImage, mtOutImage, cv::COLOR_BGR2RGB);
void* resizeBGRBufferobj = nullptr;
void *resizeBGRBufferobj = nullptr;
resizeBGRBufferobj = new uint8_t[resizepBGRBuffer_Size];
memcpy(resizeBGRBufferobj, mtOutImage.data, resizepBGRBuffer_Size);
pBGRFrameData->iDataSource = engineId_;
pBGRFrameData->iFrameId = nFrameid++;
pBGRFrameData->iSize = resizepBGRBuffer_Size;
@ -419,16 +366,21 @@ APP_ERROR CameraEngine::Process()
pBGRFrameData->frameInfo.iHeight = IMAGE_HEIGHT;
pBGRFrameData->frameInfo.iRate = frameRate;
pProcessData->pVoidData = std::static_pointer_cast<void>(pBGRFrameData);
pProcessData->pData.reset(resizeBGRBufferobj, [](void* data){if(data) {delete[] data; data = nullptr;}});
if (nFrameid >= 0xFFFFFFFF) {nFrameid = 0;}
pProcessData->pData.reset(resizeBGRBufferobj, [](void *data) {
if (data) {
delete[] data;
data = nullptr;
}
});
if (nFrameid >= 0xFFFFFFFF) { nFrameid = 0; }
pBGRFrameData->i64TimeStamp = MyUtils::getins()->GetCurrentTimeMillis();
pProcessData->iWidth = pBGRFrameData->frameInfo.iWidth;
pProcessData->iHeight = pBGRFrameData->frameInfo.iHeight;
pProcessData->iHeight = pBGRFrameData->frameInfo.iRate;
iRet = outputQueMap_[strPort0_]->push(std::static_pointer_cast<void>(pProcessData));
iRet = outputQueMap_[strPort0_]->push(std::static_pointer_cast<void>(pProcessData), true);
}
}
return APP_ERR_OK;
}

View File

@ -50,7 +50,7 @@ protected:
APP_ERROR GetStreamInfo();
APP_ERROR ConnectCamera(); //连接相机
void ResetCamera(); //复位相机连接
//static int InterruptCallback(void *pData);
static int InterruptCallback(void *pData);
private:
AVFormatContext *pFormatCtx_ = nullptr;

View File

@ -84,6 +84,7 @@ std::string ReadImgEngine::GetFileName(const std::string &strParam)
APP_ERROR ReadImgEngine::Process()
{
if (!bUseEngine_)
{
LogWarn << "engineId_:" << engineId_ << " not use engine";
@ -103,6 +104,7 @@ APP_ERROR ReadImgEngine::Process()
int iFileIndex = 0; //文件序号
while (!isStop_)
{
std::string strFilePath = vecFiles_.at(iFileIndex);
cv::Mat matBGR = cv::imread(strFilePath);
@ -150,7 +152,6 @@ APP_ERROR ReadImgEngine::Process()
//break; //只发布一遍
}
//模拟1秒25帧
usleep(40000); //40ms
}

View File

@ -37,12 +37,12 @@ APP_ERROR ResultToHttpSrvEngine::DeInit()
* libcurl回调函数
* inParam : void *pBuffer
: size_t size
: size_t nmemb
: size_t nmemb
* outParam: std::string &strResp
* return :
*/
size_t ResultToHttpSrvEngine::WriteCallBack(void *pBuffer, size_t size, size_t nmemb, std::string &strResp)
{
{
size_t sizes = size * nmemb;
std::string strTemp((char*)pBuffer, sizes);
strResp += strTemp;
@ -51,8 +51,8 @@ size_t ResultToHttpSrvEngine::WriteCallBack(void *pBuffer, size_t size, size_t n
/**
* http接口获取token
* inParam :
* outParam: std::string &strBladeAuth token信息
* inParam :
* outParam: std::string &strBladeAuth token信息
* return : true:; false:
*/
bool ResultToHttpSrvEngine::GetToken(std::string &strBladeAuth)
@ -131,7 +131,7 @@ bool ResultToHttpSrvEngine::GetToken(std::string &strBladeAuth)
/**
* http接口
* inParam : Json::Value &jvRequest
* outParam:
* outParam:
* return : true:; false:
*/
bool ResultToHttpSrvEngine::ResultToHttpSrv(Json::Value &jvRequest)
@ -169,7 +169,7 @@ bool ResultToHttpSrvEngine::ResultToHttpSrv(Json::Value &jvRequest)
pHeaderList = curl_slist_append(pHeaderList, strBladeAuth.c_str());
curl_easy_setopt(pCurl_, CURLOPT_CONNECTTIMEOUT, 1); //连接超时(1s连接不上服务器返回超时)
curl_easy_setopt(pCurl_, CURLOPT_URL, strURL_.c_str()); //设置url
curl_easy_setopt(pCurl_, CURLOPT_HTTPHEADER, pHeaderList); //设置报文头
curl_easy_setopt(pCurl_, CURLOPT_HTTPHEADER, pHeaderList); //设置报文头
curl_easy_setopt(pCurl_, CURLOPT_POSTFIELDS, strRequest.c_str()); //设置post内容
curl_easy_setopt(pCurl_, CURLOPT_POST, 1); //设置操作为POST(为非0表示post)
curl_easy_setopt(pCurl_, CURLOPT_WRITEFUNCTION, WriteCallBack); //设置回调函数
@ -194,19 +194,19 @@ bool ResultToHttpSrvEngine::ResultToHttpSrv(Json::Value &jvRequest)
JSONCPP_STRING errs;
if (!reader->parse(strResponse.data(), strResponse.data() + strResponse.size(), &jvResponse, &errs))
{
LogError << "comeTime:" << jvRequest["comeTime"].asString() << " carriageOrder:" << jvRequest["carriageOrder"].asInt()
LogError << "comeTime:" << jvRequest["comeTime"].asString() << " carriageOrder:" << jvRequest["carriageOrder"].asInt()
<< " response content fail " << strResponse;
return false;
}
if (!jvResponse["success"].asBool())
{
LogError << "comeTime:" << jvRequest["comeTime"].asString() << " carriageOrder:" << jvRequest["carriageOrder"].asInt()
LogError << "comeTime:" << jvRequest["comeTime"].asString() << " carriageOrder:" << jvRequest["carriageOrder"].asInt()
<< " response fail";
return false;
}
LogInfo << "comeTime:" << jvRequest["comeTime"].asString() << " carriageOrder:" << jvRequest["carriageOrder"].asInt()
LogInfo << "comeTime:" << jvRequest["comeTime"].asString() << " carriageOrder:" << jvRequest["carriageOrder"].asInt()
<< " post success";
return true;
}
@ -284,7 +284,7 @@ void ResultToHttpSrvEngine::DealHttpFailInfo()
}
}
inFile.close();
if(bAllSucc)
{
//都处理成功,文件删除
@ -340,7 +340,7 @@ APP_ERROR ResultToHttpSrvEngine::Process()
if (nullptr == pVoidData0)
{
usleep(1000); //1ms
//无数据大于1分钟
iNoDataCnt_++;
if (iNoDataCnt_ > (60 * 1000))

View File

@ -41,7 +41,7 @@ void DeleteExpiredFolderEngine::DeletePreviousFolder(std::string path, const std
// 1 computer date
std::string previous_date = getDateBeforeNDays(date, n_days);
if (!previous_date.empty())
std::cout << "Date before " << n_days << " days from " << date << " is: " << previous_date << std::endl;
LogDebug << "Date before " << n_days << " days from " << date << " is: " << previous_date;
// 2
@ -139,7 +139,7 @@ void DeleteExpiredFolderEngine::GetSubfolderNames(std::string &directory, std::v
while ((ent = readdir(dir)) != nullptr)
{
// 排除"."和".."
if (ent->d_type == DT_DIR && ent->d_name[0] != '.' && ent->d_name == "best")
if (ent->d_type == DT_DIR && ent->d_name[0] != '.' && ent->d_name != "best")
{
folder_names.push_back(StrToDate(ent->d_name));
}
@ -158,9 +158,9 @@ void DeleteExpiredFolderEngine::DeleteFolder(const std::string directory)
int result = system(command.c_str());
if (result != 0)
std::cout << "Failed to remove directory recursively: " << directory << std::endl;
LogError << "Failed to remove directory recursively: " << directory;
else
std::cout << "delete folder successfully : " << directory << std::endl;
LogError << "delete folder successfully : " << directory;
}
// 删除向量中小于指定日期的所有元素

View File

@ -988,8 +988,9 @@ void FilterTrainStepOneEngine::DealProcessDataPre(std::shared_ptr<ProcessData> p
// this->sendComeTrain(pProcessData->strTrainDate, pProcessData->strTrainName, iDirection_);
outputQueMap_[strPort0_]->push(std::static_pointer_cast<void>(iterProcessData->second), true);
//上一帧push端口0
PushData(strPort0_, iterProcessData->second);
// PushData(strPort0_, iterProcessData->second);
}
@ -1037,7 +1038,8 @@ APP_ERROR FilterTrainStepOneEngine::Process()
{
//结束帧push端口0
LogDebug << "sourceid:" << pProcessData->iDataSource << " frameid:" << pProcessData->iFrameId << " isEnd:" << pProcessData->bIsEnd;
PushData(strPort0_, pProcessData);
// PushData(strPort0_, pProcessData);
outputQueMap_[strPort0_]->push(std::static_pointer_cast<void>(pProcessData), true);
}
//3. 全部结束,初始化相关参数

View File

@ -58,7 +58,7 @@ private:
int iChkStopPX_;
int iChkStopCount_;
int iDirection_; //方向
int iPushDirection_; //需要识别的方向
int iPushDirection_; //需要识别的方向
int rightFirst_; // 向右行驶的在前大框类型
int leftFirst_; // 向左行驶的在前大框类型
int iPartitionFrameNum_; //满足跨车厢的帧间隔

View File

@ -113,6 +113,8 @@ void MergerAllEngine::PushData(std::shared_ptr<Train> pTrain)
<< "集装箱2: " << pTrain->container2.strContainerNo << "\n"
<< "集装箱2图片: " << pTrain->container2.strBestImg << "\n"
<< "集装箱2时间戳: " << pTrain->container2.i64TimeStamp << "\n"
<< "车厢开始时间: " << MyUtils::getins()->Stamp2Time(pTrain->i64StartTimeStamp, true) << "\n"
<< "车厢结束时间: " << MyUtils::getins()->Stamp2Time(pTrain->i64EndTimeStamp, true) << "\n"
<< " ---所有信息合并结果 END--- ";
if (pTrain->bIsEnd)
{

View File

@ -109,7 +109,7 @@ APP_ERROR LocalDataMoveEngine::Process()
pFtpData->strFtpFilePath = strImgPath;
pFtpData->strFtpFileName = strImgName;
pFtpData->bIsEnd = pProcessData->bIsEnd;
iRet = outputQueMap_[strPort0_]->push(std::static_pointer_cast<void>(pFtpData), false);
iRet = outputQueMap_[strPort0_]->push(std::static_pointer_cast<void>(pFtpData), true);
}
}
}

View File

@ -64,7 +64,7 @@ bool SaveCsvEngine::SaveMergerCsv(std::shared_ptr<Train> pTrain)
<< "time" << ','
<< "direction" << ','
<< "speed" << ','
<< "camerano" << ','
<< "typeId" << ','
<< "skipInterval" << ','
<< "carxh" << ','
<< "type" << ','
@ -131,7 +131,7 @@ bool SaveCsvEngine::SaveMergerCsv(std::shared_ptr<Train> pTrain)
<< strTime << ','
<< pTrain->iDirection << ','
<< 0.0 << ','
<< szCameraNo << ','
<< pTrain->trainNum.iTrainTypeId << ','
<< dataSourceConfig.iSkipInterval << ','
<< pTrain->iCarXH << ','
<< pTrain->trainNum.strTrainType << ','
@ -433,18 +433,18 @@ bool SaveCsvEngine::SaveContainerCsv(std::shared_ptr<TrainContainer> pTrainConta
catch (const std::exception &)
{
LogError << "strCsvPath:" << strCsvPath << " container savecsv fail!";
continue;
continue;
}
}
return true;
}
APP_ERROR SaveCsvEngine::Process()
{
{
int iRet = APP_ERR_OK;
while (!isStop_)
{
bool bPopFlag = false;
//pop端口0 车厢信息
std::shared_ptr<void> pVoidData0 = nullptr;

View File

@ -154,6 +154,7 @@ APP_ERROR SaveImgEngine::Process()
Json::Value jvFrameInfo;
jvFrameInfo["timeStamp"] = pSaveImgData->i64TimeStamp;
jvFrameInfo["status"] = iStatus;
jvFrameInfo["moveType"] = pSaveImgData->nMonitorState;
jvFrameInfo["direction"] = pSaveImgData->iDirection;
jvFrameInfo["width"] = iWidth;
jvFrameInfo["height"] = iHeight;

View File

@ -192,7 +192,7 @@ void SaveStepOneResultEngine::DealCenterSpace(std::vector<PartionInfo> &vecParat
pPartionInfo->nStatus = ((pProcessData->iStatus == TRAINSTATUS_STOP) ? TRAIN_PAUSE : iDirection_);
LogWarn << "--------- 向Paration 发送数据 --------";
outputQueMap_[strPort0_]->push(std::static_pointer_cast<void>(pPartionInfo));
outputQueMap_[strPort0_]->push(std::static_pointer_cast<void>(pPartionInfo), true);
iPushSpaceFrameId_ = pPartionInfo->modelSpaceFrame;
bPushIsEnd_ = pPartionInfo->bIsEnd;
@ -444,7 +444,7 @@ void SaveStepOneResultEngine::SplitTrainByNumPro(std::shared_ptr<PartionInfo> &p
MyUtils::getins()->WriteJsonInfo(jvFrameInfo, strFilePath);
LogWarn << "--------- 向Paration 发送数据 --------";
outputQueMap_[strPort0_]->push(std::static_pointer_cast<void>(pPartionInfoNew));
outputQueMap_[strPort0_]->push(std::static_pointer_cast<void>(pPartionInfoNew), true);
iPushSpaceFrameId_ = pPartionInfoNew->modelSpaceFrame;
bPushIsEnd_ = pPartionInfoNew->bIsEnd;
@ -658,7 +658,7 @@ APP_ERROR SaveStepOneResultEngine::Process()
//第一步处理结束后, push端口1做复制图片文本数据和图片上传处理。
if (jvFrameInfo["step1Finish"].asBool())
{
iRet = outputQueMap_[strPort1_]->push(std::static_pointer_cast<void>(pProcessData));
iRet = outputQueMap_[strPort1_]->push(std::static_pointer_cast<void>(pProcessData), true);
}
if (pProcessData->iDataSource == 0 && pPostData->iModelType == MODELTYPE_NUM)
@ -689,7 +689,7 @@ APP_ERROR SaveStepOneResultEngine::Process()
SplitTrainByNumPro(pPartionInfo, pProcessData);
LogWarn << "--------- 向Paration 发送数据 --------";
outputQueMap_[strPort0_]->push(std::static_pointer_cast<void>(pPartionInfo));
outputQueMap_[strPort0_]->push(std::static_pointer_cast<void>(pPartionInfo), true);
iPushSpaceFrameId_ = pPartionInfo->modelSpaceFrame;
bPushIsEnd_ = pPartionInfo->bIsEnd;

View File

@ -246,7 +246,7 @@ APP_ERROR TrainParationMgr::Process()
pTrainRange->i64EndTimeStamp = jvPartionInfo["endTimeStamp"].asInt64();
pTrainRange->bIsEnd = jvPartionInfo["isEnd"].asBool();
pTrainRange->bmodelconfirmed = jvPartionInfo["modelconfirmed"].asBool();
iRet = outputQueMap_[strPort0_]->push(std::static_pointer_cast<void>(pTrainRange));
iRet = outputQueMap_[strPort0_]->push(std::static_pointer_cast<void>(pTrainRange), true);
if (pPartionInfo->bIsEnd) {
lstPartInfo.clear();

View File

@ -1,147 +1,235 @@
#include "HardH264FFmpegDecode.h"
#include <iostream>
using namespace std;
HardH264FFmpegDecode::HardH264FFmpegDecode()
HardH264FFmpegDecode::HardH264FFmpegDecode()
{
;
;
}
HardH264FFmpegDecode::~HardH264FFmpegDecode()
HardH264FFmpegDecode::~HardH264FFmpegDecode()
{
;
;
}
int HardH264FFmpegDecode::HardH264FFmpegDecoderInit(unsigned int uiWidth, unsigned int uiHeight, unsigned int uiFrameRate)
// int HardH264FFmpegDecode::HardH264FFmpegDecoderInit(unsigned int uiWidth, unsigned int uiHeight, unsigned int uiFrameRate)
// {
// uiWidth_ = uiWidth; uiHeight_ = uiHeight;
// uiFrameRate_ = uiFrameRate;
// iFrameFinished_ = 0;
// av_log_set_level(AV_LOG_ERROR);
// // AVCodecID codec_id = AV_CODEC_ID_H264; //解码H264
// // pCodec_ = avcodec_find_decoder(codec_id); //获取解码器
// pCodec_ = avcodec_find_decoder_by_name(NVIDIA_H264_DECODER);
// if (!pCodec_) {
// fprintf(stderr, "Codec '%s' not found\n", pCodec_->long_name);
// exit(1);
// }
// printf("Codec found with name %d(%s)\n", pCodec_->id, pCodec_->long_name);
// //创建上下文
// pCodecCtx_ = avcodec_alloc_context3(pCodec_);
// if (!pCodecCtx_){
// fprintf(stderr, "Could not allocate video codec context\n");
// exit(1);
// }
// //创建解析器
// pCodecParserCtx_ = av_parser_init(pCodec_->id);
// if (!pCodecParserCtx_){
// fprintf(stderr, "parser not found\n");
// exit(1);
// }
// //if(pCodec_->capabilities&CODEC_CAP_TRUNCATED)
// // pCodecCtx_->flags|= CODEC_FLAG_TRUNCATED;
// //打开解码器
// int ret = avcodec_open2(pCodecCtx_, pCodec_, nullptr);
// if (ret < 0) {
// fprintf(stderr, "Could not open codec\n");
// printf("avcodec_open2 ret is: %d\n",ret);
// exit(1);
// }
// //分配packet
// pPacket_ = av_packet_alloc();
// if (!pPacket_){
// fprintf(stderr, "Could not allocate video packet\n");
// exit(1);
// }
// // av_init_packet(pPacket_);
// //分配frame
// pSrcFrame_ = av_frame_alloc();
// if (!pSrcFrame_) {
// fprintf(stderr, "Could not allocate video src pFrame\n");
// exit(1);
// }
// pDstFrame_ = av_frame_alloc();
// if (!pDstFrame_) {
// fprintf(stderr, "Could not allocate video dst pFrame\n");
// exit(1);
// }
// printf("after align down video_width: %d, video_height: %d\n", uiWidth_, uiHeight_);
// //初始化解析器参数
// pCodecCtx_->time_base.num = 1;
// pCodecCtx_->frame_number = 1; //每包一个视频帧
// pCodecCtx_->codec_type = AVMEDIA_TYPE_VIDEO;
// pCodecCtx_->bit_rate = 0;
// pCodecCtx_->time_base.den = uiFrameRate_;//帧率
// pCodecCtx_->width = uiWidth_; //视频宽
// pCodecCtx_->height = uiHeight_; //视频高
// // pCodecCtx_->pix_fmt = AV_PIX_FMT_YUV420P;
// int bufferSize = av_image_get_buffer_size(AV_PIX_FMT_YUV420P,
// pCodecCtx_->width,
// pCodecCtx_->height, 1);
// pu8OutBuffer_ = (unsigned char *) av_malloc(bufferSize);
// av_image_fill_arrays(pDstFrame_->data,
// pDstFrame_->linesize,
// pu8OutBuffer_,
// AV_PIX_FMT_YUV420P,
// pCodecCtx_->width,
// pCodecCtx_->height, 1);
// printf("pDstFrame_->linesize: %d, bufferSize: %d\n", pDstFrame_->linesize, bufferSize);
// pSwsContext_ = sws_getContext(pCodecCtx_->width, pCodecCtx_->height, pCodecCtx_->pix_fmt,
// pCodecCtx_->width, pCodecCtx_->height, AV_PIX_FMT_YUV420P, SWS_BICUBIC, nullptr, nullptr, nullptr);
// printf("pCodecCtx_->width: %d, pCodecCtx_->height: %d, pCodecCtx_->pix_fmt: %d\n", pCodecCtx_->width, pCodecCtx_->height, pCodecCtx_->pix_fmt);
// return 0;
// }
int HardH264FFmpegDecode::HardH264FFmpegDecoderInit(unsigned int width, unsigned int height, unsigned int frame_rate)
{
uiWidth_ = uiWidth; uiHeight_ = uiHeight;
uiFrameRate_ = uiFrameRate;
iFrameFinished_ = 0;
width_= width;
height_= height;
frame_rate_= frame_rate;
frameFinished_= 0;
av_log_set_level(AV_LOG_ERROR);
avcodec_register_all(); // 注册编解码器
avformat_network_init(); // 注册网络格式,如果为本地文件则可以去掉该代码
// AVCodecID codec_id = AV_CODEC_ID_H264; //解码H264
// pCodec_ = avcodec_find_decoder(codec_id); //获取解码器
pCodec_ = avcodec_find_decoder_by_name(NVIDIA_H264_DECODER);
if (!pCodec_) {
fprintf(stderr, "Codec '%s' not found\n", pCodec_->long_name);
if (!pCodec_)
{
// fprintf(stderr, "Codec '%s' not found\n", pCodec_->long_name);
exit(1);
}
printf("Codec found with name %d(%s)\n", pCodec_->id, pCodec_->long_name);
// printf("Codec found with name %d(%s)\n", pCodec_->id, pCodec_->long_name);
//创建上下文
// 创建上下文
pCodecCtx_ = avcodec_alloc_context3(pCodec_);
if (!pCodecCtx_){
fprintf(stderr, "Could not allocate video codec context\n");
if (!pCodecCtx_)
{
// fprintf(stderr, "Could not allocate video codec context\n");
exit(1);
}
//创建解析器
// 创建解析器
pCodecParserCtx_ = av_parser_init(pCodec_->id);
if (!pCodecParserCtx_){
fprintf(stderr, "parser not found\n");
if (!pCodecParserCtx_)
{
// fprintf(stderr, "parser not found\n");
exit(1);
}
//if(pCodec_->capabilities&CODEC_CAP_TRUNCATED)
// pCodecCtx_->flags|= CODEC_FLAG_TRUNCATED;
// if(pCodec_->capabilities&CODEC_CAP_TRUNCATED)
// pCodecCtx_->flags|= CODEC_FLAG_TRUNCATED;
//打开解码器
// 打开解码器
int ret = avcodec_open2(pCodecCtx_, pCodec_, nullptr);
if (ret < 0) {
fprintf(stderr, "Could not open codec\n");
printf("avcodec_open2 ret is: %d\n",ret);
if (ret < 0)
{
// fprintf(stderr, "Could not open codec\n");
// printf("avcodec_open2 ret is: %d\n",ret);
exit(1);
}
//分配packet
pPacket_ = av_packet_alloc();
if (!pPacket_){
fprintf(stderr, "Could not allocate video packet\n");
exit(1);
}
// av_init_packet(pPacket_);
//分配frame
pSrcFrame_ = av_frame_alloc();
if (!pSrcFrame_) {
fprintf(stderr, "Could not allocate video src pFrame\n");
exit(1);
}
pDstFrame_ = av_frame_alloc();
if (!pDstFrame_) {
fprintf(stderr, "Could not allocate video dst pFrame\n");
exit(1);
}
printf("after align down video_width: %d, video_height: %d\n", uiWidth_, uiHeight_);
//初始化解析器参数
// 初始化解析器参数
pCodecCtx_->time_base.num = 1;
pCodecCtx_->frame_number = 1; //每包一个视频帧
pCodecCtx_->codec_type = AVMEDIA_TYPE_VIDEO;
pCodecCtx_->bit_rate = 0;
pCodecCtx_->time_base.den = uiFrameRate_;//帧率
pCodecCtx_->width = uiWidth_; //视频宽
pCodecCtx_->height = uiHeight_; //视频高
// pCodecCtx_->pix_fmt = AV_PIX_FMT_YUV420P;
pCodecCtx_->frame_number = 1; // 每包一个视频帧
pCodecCtx_->codec_type = AVMEDIA_TYPE_VIDEO;
pCodecCtx_->bit_rate = 0;
pCodecCtx_->time_base.den = frame_rate_; // 帧率
pCodecCtx_->width = width_; // 视频宽
pCodecCtx_->height = height_; // 视频高
int bufferSize = av_image_get_buffer_size(AV_PIX_FMT_YUV420P,
pCodecCtx_->width,
pCodecCtx_->height, 1);
pu8OutBuffer_ = (unsigned char *) av_malloc(bufferSize);
av_image_fill_arrays(pDstFrame_->data,
pDstFrame_->linesize,
pu8OutBuffer_,
AV_PIX_FMT_YUV420P,
pCodecCtx_->width,
pCodecCtx_->height, 1);
printf("pDstFrame_->linesize: %d, bufferSize: %d\n", pDstFrame_->linesize, bufferSize);
// 分配frame
pFrame_= av_frame_alloc();
if (!pFrame_)
{
// fprintf(stderr, "Could not allocate video frame\n");
exit(1);
}
pSwsContext_ = sws_getContext(pCodecCtx_->width, pCodecCtx_->height, pCodecCtx_->pix_fmt,
pCodecCtx_->width, pCodecCtx_->height, AV_PIX_FMT_YUV420P, SWS_BICUBIC, nullptr, nullptr, nullptr);
printf("pCodecCtx_->width: %d, pCodecCtx_->height: %d, pCodecCtx_->pix_fmt: %d\n", pCodecCtx_->width, pCodecCtx_->height, pCodecCtx_->pix_fmt);
// 分配packet
pPacket_ = av_packet_alloc();
if (!pPacket_)
{
// fprintf(stderr, "Could not allocate video packet\n");
exit(1);
}
// av_init_packet(pPacket_);
return 0;
}
int HardH264FFmpegDecode::HardH264FFmpegDecoderDeInit()
{
if(pu8OutBuffer_){
if(pu8OutBuffer_){
av_free(pu8OutBuffer_);
pu8OutBuffer_ = nullptr;
}
if(pSrcFrame_){
av_frame_free(&pSrcFrame_);
pSrcFrame_ = nullptr;
}
if(pDstFrame_){
av_frame_free(&pDstFrame_);
pDstFrame_ = nullptr;
}
if(pPacket_){
av_packet_free(&pPacket_);
if(pSrcFrame_){
av_frame_free(&pSrcFrame_);
pSrcFrame_ = nullptr;
}
if(pDstFrame_){
av_frame_free(&pDstFrame_);
pDstFrame_ = nullptr;
}
if(pPacket_){
av_packet_free(&pPacket_);
pPacket_ = nullptr;
}
if(pCodecParserCtx_){
av_parser_close(pCodecParserCtx_);
pCodecParserCtx_ = nullptr;
}
if(pCodecCtx_){
avcodec_close(pCodecCtx_);
av_free(pCodecCtx_);
pCodecCtx_ = nullptr;
}
if(pCodecParserCtx_){
av_parser_close(pCodecParserCtx_);
pCodecParserCtx_ = nullptr;
}
if(pCodecCtx_){
avcodec_close(pCodecCtx_);
av_free(pCodecCtx_);
pCodecCtx_ = nullptr;
}
if(pSwsContext_){
sws_freeContext(pSwsContext_);
pSwsContext_ = nullptr;
}
if(pSwsContext_){
sws_freeContext(pSwsContext_);
pSwsContext_ = nullptr;
}
}
int HardH264FFmpegDecode::HardH264FFmpegDecoderFilterGraph(AVFilterGraph *pGraph, AVFilterContext *pSourceCtx, AVFilterContext *pSinkCtx)
@ -149,7 +237,7 @@ int HardH264FFmpegDecode::HardH264FFmpegDecoderFilterGraph(AVFilterGraph *pGraph
int ret;
AVFilterInOut *pOutputs = nullptr, *pInputs = nullptr;
if ((ret = avfilter_link(pSourceCtx, 0, pSinkCtx, 0)) >= 0){
ret = avfilter_graph_config(pGraph, nullptr);
ret = avfilter_graph_config(pGraph, nullptr);
}
avfilter_inout_free(&pOutputs);
@ -168,14 +256,14 @@ int HardH264FFmpegDecode::HardH264FFmpegDecoderConfigureVideoFilters(AVFilterGra
"video_size=%dx%d:pix_fmt=%d:time_base=1/1200000",
iWidth, iHeight, iFormat);
if ((ret = avfilter_graph_create_filter(&pFiltSrc,
avfilter_get_by_name("buffer"), "ffplay_buffer", BufferSrcArgs,
nullptr, pGraph)) < 0){
avfilter_get_by_name("buffer"), "ffplay_buffer", BufferSrcArgs,
nullptr, pGraph)) < 0){
goto fail;
}
ret = avfilter_graph_create_filter(&pFiltDst,
avfilter_get_by_name("buffersink"),
"ffplay_buffersink", nullptr, nullptr, pGraph);
avfilter_get_by_name("buffersink"),
"ffplay_buffersink", nullptr, nullptr, pGraph);
if (ret < 0){
goto fail;
}
@ -190,14 +278,14 @@ int HardH264FFmpegDecode::HardH264FFmpegDecoderConfigureVideoFilters(AVFilterGra
pDecoderFilterIn = pFiltSrc;
pDecoderFilterOut = pFiltDst;
fail:
fail:
return ret;
}
int HardH264FFmpegDecode::HardH264FFmpegDecoder(AVCodecContext *pDecCtx, AVFrame *pFrame, AVPacket *pPkt, void* pOutputData, unsigned int* puiOutputDataSize)
{
int ret;
AVFilterGraph* pDecoderGraph = nullptr;
int ret;
AVFilterGraph* pDecoderGraph = nullptr;
ret = avcodec_send_packet(pDecCtx, pPkt); //接收packet解码
if (ret < 0) {
@ -208,7 +296,7 @@ int HardH264FFmpegDecode::HardH264FFmpegDecoder(AVCodecContext *pDecCtx, AVFrame
while (ret >= 0) {
ret = avcodec_receive_frame(pDecCtx, pFrame); //解码
if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF){
fprintf(stderr, "During decoding eof\n");
fprintf(stderr, "During decoding eof\n");
return -1;
}
else if (ret < 0) {
@ -219,74 +307,113 @@ int HardH264FFmpegDecode::HardH264FFmpegDecoder(AVCodecContext *pDecCtx, AVFrame
//printf("saving frame %3d\n", pDecCtx->frame_number);
fflush(stdout);
AVFilterContext *pDecoderFilterIn = nullptr, *pDecoderFilterOut = nullptr;
AVFilterContext *pDecoderFilterIn = nullptr, *pDecoderFilterOut = nullptr;
// pFrame->width = ALIGN_DOWN(pFrame->width, 32);
// pFrame->height = ALIGN_DOWN(pFrame->height, 32);
// printf("pFrame->width: %d\tpFrame->height: %d\n", pFrame->width, pFrame->height);
pDecoderGraph = avfilter_graph_alloc();
HardH264FFmpegDecoderConfigureVideoFilters(pDecoderGraph, pDecoderFilterIn, pDecoderFilterOut, pFrame->width, pFrame->height, pFrame->format);
if (pFrame->format != AV_PIX_FMT_YUV420P){
if (pFrame->format != AV_PIX_FMT_YUV420P){
DUMP_FRAME(pFrame);
ret = av_buffersrc_add_frame(pDecoderFilterIn, pFrame);
ret = av_buffersrc_add_frame(pDecoderFilterIn, pFrame);
ret = av_buffersink_get_frame_flags(pDecoderFilterOut, pFrame, 0);
DUMP_FRAME(pFrame);
int iSize = pFrame->width * pFrame->height;
memcpy(pOutputData, pFrame->data[0], iSize); //Y
memcpy(pOutputData+iSize, pFrame->data[1], iSize/4); //U
memcpy(pOutputData+iSize+iSize/4, pFrame->data[2], iSize/4); //V
*puiOutputDataSize = iSize*3/2;
return iSize*3/2;
}
}
return 0;
int iSize = pFrame->width * pFrame->height;
memcpy(pOutputData, pFrame->data[0], iSize); //Y
memcpy(pOutputData+iSize, pFrame->data[1], iSize/4); //U
memcpy(pOutputData+iSize+iSize/4, pFrame->data[2], iSize/4); //V
*puiOutputDataSize = iSize*3/2;
return iSize*3/2;
}
}
return 0;
}
int HardH264FFmpegDecode::HardH264FFmpegDecoderV2(AVCodecContext *pDecCtx, SwsContext *pSwsCtx, AVFrame *pSrcFrame, AVFrame *pDstFrame, AVPacket *pPkt, void* pOutputData,unsigned int* puiOutputDataSize)
// int HardH264FFmpegDecode::HardH264FFmpegDecoderV2(AVCodecContext *pDecCtx, SwsContext *pSwsCtx, AVFrame *pSrcFrame, AVFrame *pDstFrame, AVPacket *pPkt, void* pOutputData,unsigned int* puiOutputDataSize)
// {
// std::cout << "HardH264FFmpegDecoderV2--in " << std::endl;
// int ret;
// ret = avcodec_send_packet(pDecCtx, pPkt); //接收packet解码
// if (ret < 0) {
// fprintf(stderr, "Error sending a packet for decoding\n");
// exit(1);
// }
// std::cout << "HardH264FFmpegDecoderV2--in " << std::endl;
// while (ret >= 0) {
// ret = avcodec_receive_frame(pDecCtx, pSrcFrame); //解码
// if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF){
// fprintf(stderr, "During decoding eof\n");
// return -1;
// }
// else if (ret < 0) {
// fprintf(stderr, "Error during decoding\n");
// exit(1);
// }
// // pDecCtx->width = ALIGN_DOWN(pDecCtx->width, 32);
// // pDecCtx->height = ALIGN_DOWN(pDecCtx->height, 32);
// sws_scale(pSwsCtx,
// (const uint8_t *const *)pSrcFrame->data,
// pSrcFrame->linesize,
// 0,
// pDecCtx->height,
// pDstFrame->data,
// pDstFrame->linesize);
// //printf("saving frame %3d\n", pDecCtx->frame_number);
// fflush(stdout);
// int iSize = pDecCtx->width * pDecCtx->height;
// memcpy(pOutputData, pDstFrame->data[0], iSize); //Y
// memcpy(pOutputData+iSize, pDstFrame->data[1], iSize/4); //U
// memcpy(pOutputData+iSize+iSize/4, pDstFrame->data[2], iSize/4); //V
// *puiOutputDataSize = iSize*3/2;
// return iSize*3/2;
// }
// return 0;
// }
int HardH264FFmpegDecode::HardH264FFmpegDecoderV2(AVCodecContext* pDecCtx, AVFrame* pSrcFrame, AVPacket* pPkt, void* pOutputData, unsigned int* puiOutputDataSize)
{
int ret;
ret = avcodec_send_packet(pDecCtx, pPkt); //接收packet解码
if (ret < 0) {
fprintf(stderr, "Error sending a packet for decoding\n");
ret = avcodec_send_packet(pDecCtx, pPkt); // 接收packet解码
if (ret < 0)
{
// fprintf(stderr, "Error sending a packet for decoding\n");
exit(1);
}
while (ret >= 0) {
ret = avcodec_receive_frame(pDecCtx, pSrcFrame); //解码
if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF){
fprintf(stderr, "During decoding eof\n");
while (ret >= 0)
{
ret = avcodec_receive_frame(pDecCtx, pSrcFrame); // 解码
if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF)
{
// fprintf(stderr, "During decoding eof\n");
return -1;
}
else if (ret < 0) {
fprintf(stderr, "Error during decoding\n");
else if (ret < 0)
{
// fprintf(stderr, "Error during decoding\n");
exit(1);
}
// pDecCtx->width = ALIGN_DOWN(pDecCtx->width, 32);
// pDecCtx->height = ALIGN_DOWN(pDecCtx->height, 32);
////printf("saving frame %3d\n", pDecCtx->frame_number);
// fflush(stdout);
sws_scale(pSwsCtx,
(const uint8_t *const *)pSrcFrame->data,
pSrcFrame->linesize,
0,
pDecCtx->height,
pDstFrame->data,
pDstFrame->linesize);
//printf("saving frame %3d\n", pDecCtx->frame_number);
fflush(stdout);
int iSize = pDecCtx->width * pDecCtx->height;
memcpy(pOutputData, pDstFrame->data[0], iSize); //Y
memcpy(pOutputData+iSize, pDstFrame->data[1], iSize/4); //U
memcpy(pOutputData+iSize+iSize/4, pDstFrame->data[2], iSize/4); //V
*puiOutputDataSize = iSize*3/2;
return iSize*3/2;
memcpy(pOutputData, pSrcFrame->data[0], pSrcFrame->width * pSrcFrame->height); // Y
memcpy(pOutputData + pSrcFrame->width * pSrcFrame->height, pSrcFrame->data[1], pSrcFrame->width * pSrcFrame->height / 4); // U
memcpy(pOutputData + pSrcFrame->width * pSrcFrame->height + pSrcFrame->width * pSrcFrame->height / 4, pSrcFrame->data[2], pSrcFrame->width * pSrcFrame->height / 4); // V
*puiOutputDataSize = pSrcFrame->width * pSrcFrame->height * 3 / 2;
return pSrcFrame->width * pSrcFrame->height * 3 / 2;
}
return 0;
}

View File

@ -56,7 +56,8 @@ extern "C"
frame->linesize[2] \
);}
#define NVIDIA_H264_DECODER "h264_cuvid"
#define NVIDIA_H264_DECODER "h264_nvmpi"
// #define NVIDIA_H264_DECODER "h264_cuvid"
// #define NVIDIA_H264_DECODER "h264_v4l2m2m"
class HardH264FFmpegDecode
@ -65,26 +66,31 @@ public:
HardH264FFmpegDecode();
~HardH264FFmpegDecode();
int HardH264FFmpegDecoderInit(unsigned int uiWidth, unsigned int uiHeight, unsigned int uiFrameRate = 30);
int HardH264FFmpegDecoderInit(unsigned int width, unsigned int height, unsigned int frame_rate = 30);
int HardH264FFmpegDecoderDeInit();
int HardH264FFmpegDecoder(AVCodecContext *pDecCtx, AVFrame *pFrame, AVPacket *pPkt, void* pOutputData, unsigned int* puiOutputDataSize);
int HardH264FFmpegDecoderV2(AVCodecContext *pDecCtx, SwsContext *pSwsCtx, AVFrame *pSrcFrame, AVFrame *pDstFrame, AVPacket *pPkt, void* pOutputData, unsigned int* puiOutputDataSize);
// int HardH264FFmpegDecoderV2(AVCodecContext *pDecCtx, SwsContext *pSwsCtx, AVFrame *pSrcFrame, AVFrame *pDstFrame, AVPacket *pPkt, void* pOutputData, unsigned int* puiOutputDataSize);
int HardH264FFmpegDecoderV2(AVCodecContext* pDecCtx, AVFrame* pSrcFrame, AVPacket* pPkt, void* pOutputData, unsigned int* puiOutputDataSize);
const AVCodec *pCodec_ = nullptr; //解码器
AVCodecContext *pCodecCtx_ = nullptr; //上下文
AVCodecParserContext *pCodecParserCtx_ = nullptr; //解析器上下文
AVFrame *pSrcFrame_ = nullptr;
AVFrame *pDstFrame_ = nullptr;
AVPacket *pPacket_ = nullptr;
SwsContext *pSwsContext_ = nullptr;
AVCodecParserContext *pCodecParserCtx_ = nullptr; //解析器上下文
AVFrame *pFrame_ = nullptr;
AVFrame *pSrcFrame_ = nullptr;
AVFrame *pDstFrame_ = nullptr;
AVPacket *pPacket_ = nullptr;
SwsContext *pSwsContext_ = nullptr;
uint8_t *pu8OutBuffer_ = nullptr;
uint8_t *pu8OutBuffer_ = nullptr;
private:
int HardH264FFmpegDecoderFilterGraph(AVFilterGraph *pGraph, AVFilterContext *pSourceCtx, AVFilterContext *pSinkCtx);
int HardH264FFmpegDecoderConfigureVideoFilters(AVFilterGraph *pGraph, AVFilterContext* &pDecoderFilterIn, AVFilterContext* &pDecoderFilterOut, const int iWidth, const int iHeight, const int iFormat);
unsigned int uiWidth_, uiHeight_;
unsigned int width_, height_;
unsigned int frame_rate_;
int frameFinished_;
int iFrameFinished_;
unsigned int uiFrameRate_;

View File

@ -56,109 +56,119 @@ APP_ERROR VideoDecodeEngine::Process()
}
int iRet = APP_ERR_OK;
int iSkipCount = 1;
int iSkipCount = 1;
int iNoCameraDataCnt = 0;
while (!isStop_)
{
//从上一引擎接收图像数据
std::shared_ptr<void> pVoidData0 = nullptr;
inputQueMap_[strPort0_]->pop(pVoidData0);
if (nullptr == pVoidData0)
try
{
usleep(10*1000); //10ms
// iNoCameraDataCnt++;
// if (iNoCameraDataCnt >= 1000) //10秒内收不到认为相机断开
// {
// LogError << "engineId:" << engineId_ << " 超过10秒获取到摄像头数据疑似摄像头断开。计数" << iNoCameraDataCnt;
// iNoCameraDataCnt = 0;
// //camera异常时构造空的解码数据push确保一直有数据流转到后面Engine
// std::shared_ptr<ProcessData> pProcessData = std::make_shared<ProcessData>();
// pProcessData->iDataSource = engineId_;
// pProcessData->i64TimeStamp = MyUtils::getins()->GetCurrentTimeMillis();
// pProcessData->iSize = 0;
// pProcessData->pData = nullptr;
// iRet = outputQueMap_[strPort0_]->push(std::static_pointer_cast<void>(pProcessData));
// }
continue;
}
iNoCameraDataCnt = 0;
std::shared_ptr<ProcessData> pProcessData = std::static_pointer_cast<ProcessData>(pVoidData0);
//创建解码类
if (hard_h264_ffmpeg_decoder_ == nullptr)
{
hard_h264_ffmpeg_decoder_ = new HardH264FFmpegDecode;
int iRet = hard_h264_ffmpeg_decoder_->HardH264FFmpegDecoderInit(pProcessData->iWidth, pProcessData->iHeight, pProcessData->iRate);
if (iRet != 0)
//从上一引擎接收图像数据
std::shared_ptr<void> pVoidData0 = nullptr;
inputQueMap_[strPort0_]->pop(pVoidData0);
if (nullptr == pVoidData0)
{
LogError << "engineId:" << engineId_ << " HardH264FFmpegDecoderInit Failed";
if (hard_h264_ffmpeg_decoder_ != nullptr)
usleep(10*1000); //10ms
iNoCameraDataCnt++;
if (iNoCameraDataCnt >= 1000) //10秒内收不到认为相机断开
{
delete hard_h264_ffmpeg_decoder_;
hard_h264_ffmpeg_decoder_ = nullptr;
LogError << "engineId:" << engineId_ << " 超过10秒获取到摄像头数据疑似摄像头断开。计数" << iNoCameraDataCnt;
iNoCameraDataCnt = 0;
//camera异常时构造空的解码数据push确保一直有数据流转到后面Engine
std::shared_ptr<ProcessData> pProcessData = std::make_shared<ProcessData>();
pProcessData->iDataSource = engineId_;
pProcessData->i64TimeStamp = MyUtils::getins()->GetCurrentTimeMillis();
pProcessData->iSize = 0;
pProcessData->pData = nullptr;
iRet = outputQueMap_[strPort0_]->push(std::static_pointer_cast<void>(pProcessData));
}
continue;
}
}
//构造YUV420M数据
unsigned int pYUV420MBuffer_Size = pProcessData->iWidth * pProcessData->iHeight * 3 / 2;
void *pYUV420MBuffer = nullptr;
pYUV420MBuffer = new uint8_t[pYUV420MBuffer_Size];
std::shared_ptr<void> pYUVData;
pYUVData.reset(pYUV420MBuffer, [](void *data){if(data) {delete[] data; data = nullptr;}}); //智能指针管理内存
iNoCameraDataCnt = 0;
std::shared_ptr<ProcessData> pProcessData = std::static_pointer_cast<ProcessData>(pVoidData0);
hard_h264_ffmpeg_decoder_->pPacket_->data = static_cast<uint8_t *>(pProcessData->pData.get()); //这里填入一个指向完整H264数据帧的指针
hard_h264_ffmpeg_decoder_->pPacket_->size = pProcessData->iSize; //这个填入H264数据帧的大小
// H264硬件解码
// int iDecodeRet= hard_h264_ffmpeg_decoder_->HardH264FFmpegDecoderV2(hard_h264_ffmpeg_decoder_->pCodecCtx_, hard_h264_ffmpeg_decoder_->pFrame_,
// hard_h264_ffmpeg_decoder_->pPacket_, pYUV420MBuffer, &pYUV420MBuffer_Size);
int iDecodeRet = hard_h264_ffmpeg_decoder_->HardH264FFmpegDecoderV2(hard_h264_ffmpeg_decoder_->pCodecCtx_,
hard_h264_ffmpeg_decoder_->pSwsContext_,
hard_h264_ffmpeg_decoder_->pSrcFrame_,
hard_h264_ffmpeg_decoder_->pDstFrame_,
hard_h264_ffmpeg_decoder_->pPacket_,
pYUV420MBuffer,
&pYUV420MBuffer_Size);
if (iDecodeRet > 0)
{
if (iSkipCount++ % dataSourceConfig_.iSkipInterval != 0)
//创建解码类
if (hard_h264_ffmpeg_decoder_ == nullptr)
{
continue;
hard_h264_ffmpeg_decoder_ = new HardH264FFmpegDecode;
int iRet = hard_h264_ffmpeg_decoder_->HardH264FFmpegDecoderInit(pProcessData->iWidth, pProcessData->iHeight, pProcessData->iRate);
if (iRet != 0)
{
LogError << "engineId:" << engineId_ << " HardH264FFmpegDecoderInit Failed";
if (hard_h264_ffmpeg_decoder_ != nullptr)
{
delete hard_h264_ffmpeg_decoder_;
hard_h264_ffmpeg_decoder_ = nullptr;
}
continue;
}
}
iSkipCount = 1;
//硬解码YUV转BGR
cv::Mat matYUV(pProcessData->iHeight * 3 / 2, pProcessData->iWidth, CV_8UC1);
memcpy(matYUV.data, static_cast<uint8_t *>(pYUVData.get()), pYUV420MBuffer_Size);
//构造YUV420M数据
unsigned int pYUV420MBuffer_Size = pProcessData->iWidth * pProcessData->iHeight * 3 / 2;
void *pYUV420MBuffer = nullptr;
pYUV420MBuffer = new uint8_t[pYUV420MBuffer_Size];
std::shared_ptr<void> pYUVData;
pYUVData.reset(pYUV420MBuffer, [](void *data){if(data) {delete[] data; data = nullptr;}}); //智能指针管理内存
cv::Mat matBGR(pProcessData->iHeight, pProcessData->iWidth, CV_8UC3);
cv::cvtColor(matYUV, matBGR, cv::COLOR_YUV2BGR_I420);
hard_h264_ffmpeg_decoder_->pPacket_->data = static_cast<uint8_t *>(pProcessData->pData.get()); //这里填入一个指向完整H264数据帧的指针
hard_h264_ffmpeg_decoder_->pPacket_->size = pProcessData->iSize; //这个填入H264数据帧的大小
cv::resize(matBGR, matBGR, cv::Size(IMAGE_WIDTH, IMAGE_HEIGHT));
unsigned int iResizeSize = IMAGE_WIDTH * IMAGE_HEIGHT * 3;
void *pResizeBGRBuffer = nullptr;
pResizeBGRBuffer = new uint8_t[iResizeSize];
memcpy(pResizeBGRBuffer, matBGR.data, iResizeSize);
pProcessData->pData.reset(pResizeBGRBuffer, [](void *data) {if(data) {delete[] data; data = nullptr;} });
pProcessData->iSize = iResizeSize;
pProcessData->iWidth = IMAGE_WIDTH;
pProcessData->iHeight = IMAGE_HEIGHT;
// H264硬件解码
int iDecodeRet= hard_h264_ffmpeg_decoder_->HardH264FFmpegDecoderV2(hard_h264_ffmpeg_decoder_->pCodecCtx_,
hard_h264_ffmpeg_decoder_->pFrame_,
hard_h264_ffmpeg_decoder_->pPacket_,
pYUV420MBuffer,
&pYUV420MBuffer_Size);
iRet = outputQueMap_[strPort0_]->push(std::static_pointer_cast<void>(pProcessData));
if (iRet != APP_ERR_OK)
// int iDecodeRet = hard_h264_ffmpeg_decoder_->HardH264FFmpegDecoderV2(hard_h264_ffmpeg_decoder_->pCodecCtx_,
// hard_h264_ffmpeg_decoder_->pSwsContext_,
// hard_h264_ffmpeg_decoder_->pSrcFrame_,
// hard_h264_ffmpeg_decoder_->pDstFrame_,
// hard_h264_ffmpeg_decoder_->pPacket_,
// pYUV420MBuffer,
// &pYUV420MBuffer_Size);
if (iDecodeRet > 0)
{
LogError << "push the after hard h264 decode yuv420m frame data failed...";
if (iSkipCount++ % dataSourceConfig_.iSkipInterval != 0)
{
continue;
}
iSkipCount = 1;
//硬解码YUV转BGR
cv::Mat matYUV(pProcessData->iHeight * 3 / 2, pProcessData->iWidth, CV_8UC1);
memcpy(matYUV.data, static_cast<uint8_t *>(pYUVData.get()), pYUV420MBuffer_Size);
cv::Mat matBGR(pProcessData->iHeight, pProcessData->iWidth, CV_8UC3);
cv::cvtColor(matYUV, matBGR, cv::COLOR_YUV2BGR_I420);
cv::resize(matBGR, matBGR, cv::Size(IMAGE_WIDTH, IMAGE_HEIGHT));
unsigned int iResizeSize = IMAGE_WIDTH * IMAGE_HEIGHT * 3;
void *pResizeBGRBuffer = nullptr;
pResizeBGRBuffer = new uint8_t[iResizeSize];
memcpy(pResizeBGRBuffer, matBGR.data, iResizeSize);
pProcessData->pData.reset(pResizeBGRBuffer, [](void *data) {if(data) {delete[] data; data = nullptr;} });
pProcessData->iSize = iResizeSize;
pProcessData->iWidth = IMAGE_WIDTH;
pProcessData->iHeight = IMAGE_HEIGHT;
iRet = outputQueMap_[strPort0_]->push(std::static_pointer_cast<void>(pProcessData));
if (iRet != APP_ERR_OK)
{
LogError << "push the after hard h264 decode yuv420m frame data failed...";
}
}
else
{
LogError << "engineId:" << engineId_ << " HardH264FFmpegDecoderV2 failed...iDecodeRet:" << iDecodeRet;
}
}
else
catch(...)
{
LogError << "engineId:" << engineId_ << " HardH264FFmpegDecoderV2 failed...iDecodeRet:" << iDecodeRet;
LogError << "解码异常!!";
}
}
}

View File

@ -211,6 +211,7 @@ void MoveEngine::SingleDeviceProcess(std::shared_ptr<ProcessData> pProcessData,
pSaveImgData->bIsEnd = pProcessData->bIsEnd;
pSaveImgData->bSaveToFtp = true;
pSaveImgData->i64TimeStamp = pProcessData->i64TimeStamp;
pSaveImgData->nMonitorState = nType;
outputQueMap_[strPort0_]->push(std::static_pointer_cast<void>(pSaveImgData));
}
@ -234,7 +235,7 @@ APP_ERROR MoveEngine::Process()
int nType = MONITOR_MODEL_INIT_STATE;
if (iQueueSize > 5)
{
LogDebug << "iQueueSize: " << iQueueSize;
// LogDebug << "iQueueSize: " << iQueueSize;
g_bNoDealStepTwoFlag = true;
}
else if (g_bNoDealStepTwoFlag)

View File

@ -14,7 +14,7 @@ APP_ERROR TrainStepOneEngine::Init()
bUseEngine_ = MyUtils::getins()->ChkIsHaveTarget("NUM");
if (!bUseEngine_)
{
LogInfo << "engineId_:" << engineId_ << " not use engine";
LogWarn << "engineId_:" << engineId_ << " not use engine";
return APP_ERR_OK;
}
@ -98,7 +98,7 @@ APP_ERROR TrainStepOneEngine::InitModel()
int nRet = yolov5model.YoloV5ClearityInferenceInit(&modelinfo, strModelName, modelConfig_.strOmPath);
if (nRet != 0)
{
LogError << "YoloV5ClassifyInferenceInit nRet:" << nRet;
LogInfo << "YoloV5ClassifyInferenceInit nRet:" << nRet;
return APP_ERR_COMM_READ_FAIL;
}
return APP_ERR_OK;
@ -147,7 +147,7 @@ APP_ERROR TrainStepOneEngine::DeInit()
{
if (!bUseEngine_)
{
LogInfo << "engineId_:" << engineId_ << " not use engine";
LogWarn << "engineId_:" << engineId_ << " not use engine";
return APP_ERR_OK;
}
@ -170,7 +170,7 @@ void TrainStepOneEngine::PushData(const std::string &strPort, const std::shared_
int iRet = outputQueMap_[strPort]->push(std::static_pointer_cast<void>(pProcessData));
if (iRet != 0)
{
LogError << " frameid:" << pProcessData->iFrameId << " push fail iRet:" << iRet;
LogDebug << "sourceid:" << pProcessData->iDataSource << " frameid:" << pProcessData->iFrameId << " push fail iRet:" << iRet;
if (iRet == 2)
{
usleep(10000); // 10ms
@ -204,14 +204,14 @@ void TrainStepOneEngine::FilterInvalidInfo(std::vector<stDetection> &vecRet, std
it->bbox[3] <= dataSourceCfg.fIdentifyAreasRBY))
{
LogDebug << "frameId:" << pProcessData->iFrameId
<< " bigclassid:" << it->class_id << " 超出识别区域-识别区域:("
<< dataSourceCfg.fIdentifyAreasLTX << "," << dataSourceCfg.fIdentifyAreasLTY << "),("
<< dataSourceCfg.fIdentifyAreasRBX << "," << dataSourceCfg.fIdentifyAreasRBY << ")";
<< " bigclassid:" << it->class_id << " 超出识别区域-识别区域:("
<< dataSourceCfg.fIdentifyAreasLTX << "," << dataSourceCfg.fIdentifyAreasLTY << "),("
<< dataSourceCfg.fIdentifyAreasRBX << "," << dataSourceCfg.fIdentifyAreasRBY << ")";
it = vecRet.erase(it);
continue;
}
// 如果设置了不识别车头,则去掉车头标记的大框
// 如果设置了不识别车头,则去掉车头所有大框
if (!MyYaml::GetIns()->GetBoolValue("gc_train_heard_detect") && it->class_id == TRAIN_HEAD)
{
LogDebug << "frameId:" << pProcessData->iFrameId << " 过滤掉车头编号";
@ -219,25 +219,13 @@ void TrainStepOneEngine::FilterInvalidInfo(std::vector<stDetection> &vecRet, std
continue;
}
// 去除车头时的非车头编号信息
// 过滤车头部分的非车头编号大框
if(pProcessData->nMonitorState == MONITOR_MODEL_TRAIN_HEAD )
{
if(it->class_id != TRAIN_HEAD)
{
LogDebug << " 帧号:" << pProcessData->iFrameId
<< " 大类:" << it->class_id << " 识别于车头位置,无效!";
it = vecRet.erase(it);
continue;
}
}
// 去除车尾的车头编号信息
if (pProcessData->nMonitorState != MONITOR_MODEL_TRAIN_HEAD)
{
if (it->class_id == TRAIN_HEAD)
{
LogDebug << " 帧号:" << pProcessData->iFrameId
<< " 大类:" << it->class_id << " 识别于非车头位置,无效!";
<< " 大类:" << it->class_id << " 识别于车头位置,无效!";
it = vecRet.erase(it);
continue;
}
@ -245,7 +233,7 @@ void TrainStepOneEngine::FilterInvalidInfo(std::vector<stDetection> &vecRet, std
// 去除车尾的间隔信息
if (pProcessData->nMonitorState == MONITOR_MODEL_TRAIN_TAIL
&& ((it->class_id >= 9 && it->class_id <= 17 && it->class_id != 15) || it->class_id == 18))
&& ((it->class_id >= 9 && it->class_id <= 17 && it->class_id != 15) || it->class_id == 18))
{
LogDebug << " frameId:" << pProcessData->iFrameId
<< " bigclassid:" << it->class_id
@ -254,12 +242,12 @@ void TrainStepOneEngine::FilterInvalidInfo(std::vector<stDetection> &vecRet, std
continue;
}
// 过滤掉识别于模型反馈无车状态下的所有大框信息
// 去除无车状态下的大框信息
if (pProcessData->nMonitorState == MONITOR_MODEL_NO_TRAIN)
{
LogDebug << " frameId:" << pProcessData->iFrameId
<< " bigclassid:" << it->class_id
<<" 识别于模型反馈的无车状态,无效!";
<<" 识别于无车状态,无效!";
it = vecRet.erase(it);
continue;
}
@ -298,7 +286,7 @@ void TrainStepOneEngine::FilterInvalidInfo(std::vector<stDetection> &vecRet, std
continue;
}
if (((it->class_id >= 2 && it->class_id <= 6) || it->class_id == J_TRAIN_NUM || it->class_id == W_TRAIN_NUM) &&
(it->bbox[3] - it->bbox[1]) < MyYaml::GetIns()->GetIntValue("gc_num_frame_height"))
(it->bbox[3] - it->bbox[1]) < MyYaml::GetIns()->GetIntValue("gc_num_frame_height"))
{
LogWarn << "疑似误识别到远股道车号,帧号:" << pProcessData->iFrameId
<< "大框高度:" << (it->bbox[3] - it->bbox[1]);
@ -307,7 +295,7 @@ void TrainStepOneEngine::FilterInvalidInfo(std::vector<stDetection> &vecRet, std
}
if ((it->class_id == 1 || it->class_id == TRAIN_PRO)
&& (it->bbox[3] - it->bbox[1]) < MyYaml::GetIns()->GetIntValue("gc_pro_frame_height")) {
&& (it->bbox[3] - it->bbox[1]) < MyYaml::GetIns()->GetIntValue("gc_pro_frame_height")) {
LogWarn << "疑似误识别到远股道属性,帧号:" << pProcessData->iFrameId
<< "大框高度:" << (it->bbox[3] - it->bbox[1]);
it = vecRet.erase(it);
@ -346,7 +334,7 @@ void TrainStepOneEngine::FilterInvalidInfo(std::vector<stDetection> &vecRet, std
int iCenterY = pProcessData->iHeight / 2;
if (iHeight0 < iCenterY && iHeight1 < iCenterY) //非平车
{
if (!((vecRet[0].class_id >= 9 && vecRet[0].class_id <= 17 && vecRet[0].class_id != 15) || vecRet[0].class_id == U_TRAIN_SPACE) &&
if (!((vecRet[0].class_id >= 9 && vecRet[0].class_id <= 17 && vecRet[0].class_id != 15) || vecRet[0].class_id == U_TRAIN_SPACE) &&
!((vecRet[1].class_id >= 9 && vecRet[1].class_id <= 17 && vecRet[1].class_id != 15) || vecRet[1].class_id == U_TRAIN_SPACE))
{
LogDebug << " frameId:" << pProcessData->iFrameId << " no space";
@ -413,7 +401,7 @@ APP_ERROR TrainStepOneEngine::Process()
{
if (!bUseEngine_)
{
LogInfo << "engineId_:" << engineId_ << " not use engine";
LogWarn << "engineId_:" << engineId_ << " not use engine";
return APP_ERR_OK;
}
int iRet = APP_ERR_OK;
@ -434,7 +422,7 @@ APP_ERROR TrainStepOneEngine::Process()
pPostData->iModelType = MODELTYPE_NUM;
pPostData->nMonitorState = pProcessData->nMonitorState; //来车检测的四个分类
//获取图片
//获取图片
if (pProcessData->iStatus == TRAINSTATUS_RUN || pProcessData->bIsEnd)
{
if (pProcessData->pData != nullptr && pProcessData->iSize != 0)
@ -443,7 +431,10 @@ APP_ERROR TrainStepOneEngine::Process()
//进行推理
std::vector<stDetection> res;
//auto start = std::chrono::system_clock::now(); //计时开始
yolov5model.YoloV5ClearityInferenceModel(img, res);
//auto end = std::chrono::system_clock::now();
//LogInfo << "nopr1 inference time: " << std::chrono::duration_cast<std::chrono::milliseconds>(end - start).count() << "ms";
//过滤无效信息
FilterInvalidInfo(res, pProcessData);
@ -501,7 +492,8 @@ APP_ERROR TrainStepOneEngine::Process()
//push端口0第1步推理
pProcessData->pVoidData = std::static_pointer_cast<void>(pPostData);
PushData(strPort0_, pProcessData);
iRet = outputQueMap_[strPort0_]->push(std::static_pointer_cast<void>(pProcessData), true);
// PushData(strPort0_, pProcessData);
}
return APP_ERR_OK;
}

View File

@ -176,7 +176,7 @@ APP_ERROR TrainStepTwoEngine::Process()
{
std::shared_ptr<PostData> ppostbuff = std::static_pointer_cast<PostData>(pProcessData->pVoidData);
cv::Mat img(pProcessData->iHeight, pProcessData->iWidth, CV_8UC3, static_cast<uint8_t *>(pProcessData->pData.get())); //RGB
for(int i = 0; i< ppostbuff->vecPostSubData.size(); i++)
{
PostSubData postsubdata = ppostbuff->vecPostSubData[i];
@ -239,7 +239,8 @@ APP_ERROR TrainStepTwoEngine::Process()
// push端口0第1步推理
pProcessData->pVoidData = std::static_pointer_cast<void>(pPostData);
PushData(strPort0_, pProcessData);
iRet = outputQueMap_[strPort0_]->push(std::static_pointer_cast<void>(pProcessData));
// PushData(strPort0_, pProcessData);
}
return APP_ERR_OK;
}

View File

@ -432,12 +432,12 @@ void preprocess_kernel_img(
s2d.value[0] = scale;
s2d.value[1] = 0;
// s2d.value[2] = 0; //左上顶点贴图
s2d.value[2] = -scale * src_width * 0.5 + dst_width * 0.5; //中心贴图
s2d.value[2] = 0; //左上顶点贴图
// s2d.value[2] = -scale * src_width * 0.5 + dst_width * 0.5; //中心贴图
s2d.value[3] = 0;
s2d.value[4] = scale;
// s2d.value[5] = 0; //左上顶点贴图
s2d.value[5] = -scale * src_height * 0.5 + dst_height * 0.5; //中心贴图
s2d.value[5] = 0; //左上顶点贴图
// s2d.value[5] = -scale * src_height * 0.5 + dst_height * 0.5; //中心贴图
cv::Mat m2x3_s2d(2, 3, CV_32F, s2d.value);
cv::Mat m2x3_d2s(2, 3, CV_32F, d2s.value);

View File

@ -186,6 +186,7 @@ int YoloV8Inference::YoloV8InferenceModelCommon(cv::Mat& frame, float& fResizeRa
size_t size_image_dst = pYoloV5ModelInfo_->modelCommonInfo.uiModelWidth * pYoloV5ModelInfo_->modelCommonInfo.uiModelHeight * pYoloV5ModelInfo_->modelCommonInfo.uiChannel;
auto preprocess_start = std::chrono::system_clock::now(); //计时开始
#ifdef ENABLE_CUDA_PREPROCESS
memcpy(pu8ImgHost_, frame.data, size_image_src); //拷贝预处理数据到HOST侧
CUDA_CHECK(cudaMemcpyAsync(pu8ImgDevice_, pu8ImgHost_, size_image_src, cudaMemcpyHostToDevice, *pImagePreprocessStream_)); //拷贝预处理数据到Device侧