1、对倒车处理增加更细的修正。

2、配置文件修改
This commit is contained in:
zhangwei 2024-08-12 11:34:13 +08:00
parent c1273c545a
commit 81be618326
47 changed files with 482 additions and 30630 deletions

View File

@ -1,11 +1,11 @@
cmake_minimum_required(VERSION 3.5)
# cmake_policy(SET CMP0074 NEW)
# cmake_policy(SET CMP0146 NEW)
message("NVIDIA NX PLATFORM")
set(PROJECT_NAME train)
project(${PROJECT_NAME} VERSION 1.0)
add_definitions(-std=c++11)
add_definitions(-DAPI_EXPORTS)
@ -100,18 +100,19 @@ include_directories(
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common
#common engine include
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/ControlEngine
# ${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/ControlEngine
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/DataSourceEngine
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/DataUploadEngine
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/FilterEngine
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/MergerEngine
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/SaveEngine
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/SelectBestEngine
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/TrainAnaEngine
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/TransEngine
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/TrainAnaEngine
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/DataDealEngine
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/SocketEngine
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/DeleteExpiredFolderEngine
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/ApiEngine
#common tools rtsp_server include
${PROJECT_SOURCE_DIR}/nvidia_ascend_tools/common_tools/rtsp_server/3rdpart/md5
@ -180,17 +181,18 @@ file(GLOB_RECURSE COMMON_SRCS_LISTS
#common engine src
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common/*.cpp
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/DataSourceEngine/*.cpp
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/ControlEngine/*.cpp
# ${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/ControlEngine/*.cpp
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/DataUploadEngine/*.cpp
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/FilterEngine/*.cpp
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/MergerEngine/*.cpp
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/SaveEngine/*.cpp
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/SelectBestEngine/*.cpp
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/TrainAnaEngine/*.cpp
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/TransEngine/*.cpp
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/TrainAnaEngine/*.cpp
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/DataDealEngine/*.cpp
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/SocketEngine/*.cpp
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/DeleteExpiredFolderEngine/*.cpp
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/ApiEngine/*.cpp
#common tools rtsp_server src
${PROJECT_SOURCE_DIR}/nvidia_ascend_tools/common_tools/rtsp_server/net/*.cpp
@ -199,18 +201,6 @@ file(GLOB_RECURSE COMMON_SRCS_LISTS
file(GLOB_RECURSE SRCS_LISTS
#nvidia engine src
#nvidia engine include
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/ControlEngine/*.cpp
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/DataSourceEngine/*.cpp
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/DataUploadEngine/*.cpp
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/FilterEngine/*.cpp
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/MergerEngine/*.cpp
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/SaveEngine/*.cpp
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/SelectBestEngine/*.cpp
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/TrainAnaEngine/*.cpp
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/TransEngine/*.cpp
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/common_engine/DataDealEngine/*.cpp
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/nvidia_engine/ChkDateStepOneEngine/*.cpp
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/nvidia_engine/ChkDateStepTwoEngine/*.cpp
${PROJECT_SOURCE_DIR}/nvidia_ascend_engine/nvidia_engine/MyYaml/*.cpp

View File

@ -1,3 +1,2 @@
# Train_Identify
火车车号识别

File diff suppressed because it is too large Load Diff

View File

@ -104,11 +104,10 @@ namespace ai_matrix
//检查是否有重复engine
std::string engine_unique = engine_name + "_" + std::to_string(engine_id);
// printf(engine_unique.c_str());
printf(engine_unique.c_str());
auto iter = engine_map_.find(engine_unique);
if (iter != engine_map_.end())
{
LogWarn << "重复engine " << engine_unique;
continue;
}

View File

@ -55,6 +55,7 @@ namespace ai_matrix
std::string get_date();
//获取时间
std::string get_time();
//时间戳转化为时间 毫秒级
std::string Stamp2Time(long long timestamp, bool has_msec = false);

View File

@ -1,26 +1,19 @@
#acl参数
stop_delay_count: 64
stop_by_vari: 0
gc_acl_path: ./config/acl.json
gc_init_deviceid: "ALL" #例: 0; 0,1; 2,3; ALL
method_setting: ./modeltest/model
gc_init_deviceid: "ALL" #例: 0; 0,1; 2,3; ALL
#识别数据来源参数配置
gc_data_source: "camera" #[camera, images]
camera:
camera_0:
#url: "rtsp://admin:sgt12345@10.27.119.13:554/h264/ch1/main/av_stream"
# url: "./videos/km70.mp4"
url: "./vedio/buertai2.mp4"
skipInterval: 3
target: "NUM"
use: true
direction: 0 #行驶方向 0-自动识别 1-向左 2-向右 (与“首位信息”成对存在,形成例如向左就编号在前,向右就属性在前的对应)
left_first: 0 # 0-向左编号在前 1-向左属性在前 (向右行驶的情况2-向右编号在前 3-向右属性在前)
right_first: 3 # (向左行驶的情况0-向左编号在前 1-向左属性在前) 2-向右编号在前 3-向右属性在前
identify_areas: "120, 0, 1800, 1080" #(ltx,lty,rbx,rby)
classid_minheight: "1:90, 2:120, 3:120, 9:240, 10:240, 18:120" #大框的最小高度(为屏蔽远股道识别到的信息)
#url: "rtsp://admin:sgt12345@10.27.119.13:554/h264/ch1/main/av_stream"
# url: "./videos/km70.mp4"
url: "./videos/06-29_96.mp4"
skipInterval: 3
target: "NUM"
use: true
direction: 0 #行驶方向 0-自动识别 1-向左 2-向右 (与“首位信息”成对存在,形成例如向左就编号在前,向右就属性在前的对应)
left_first: 0 # 0-向左编号在前 1-向左属性在前 (向右行驶的情况2-向右编号在前 3-向右属性在前)
right_first: 3 # (向左行驶的情况0-向左编号在前 1-向左属性在前) 2-向右编号在前 3-向右属性在前
identify_areas: "120, 0, 1800, 1080" #(ltx,lty,rbx,rby)
classid_minheight: "1:90, 2:120, 3:120, 9:240, 10:240, 18:120" #大框的最小高度(为屏蔽远股道识别到的信息)
images:
images_0:
url: "./images"
@ -116,8 +109,11 @@ model:
nms_threshold: 0.3
gc_http_open: 0
username: "guest_01"
password: "d55b0f642e817eea24725d2f2a31dd08" # 神东
# gc_http_url: "http://192.168.2.211:20004/api/train-carriage/identification/video-save"
# gc_gettoken_url: "http://192.168.2.211:20004/api/blade-auth/oauth/token"
# gc_image_srv: "http://192.168.2.211:9010/"
username: ""
password: ""
gc_http_url: "http://192.168.2.121:8081"
gc_gettoken_url: "http://192.168.0.121:20004/api/blade-auth/oauth/token"
gc_image_srv: "http://192.168.0.121:9010/"
@ -158,7 +154,7 @@ gc_mysql_passwd: "123456"
gc_mysql_db: "test1"
gc_mysql_port: "http://192.168.2.115:9000"
gc_push_direction: 1 #(1:识别向左行驶的列车2:识别向右行驶的列车0:识别双方向。 注如果方向不对服务器会正常识别只是不推送给web)
gc_push_direction: 2 #(1:识别向左行驶的列车2:识别向右行驶的列车0:识别双方向。 注如果方向不对服务器会正常识别只是不推送给web)
gc_space_minrbx_imgpercent: 0 #间隔框最低点不应小于画面某个高度值(该值为画面百分比) [主要为屏蔽远股道间隔框若不需要屏蔽则配置为0]

View File

@ -24,7 +24,7 @@
#include <math.h>
#include <chrono>
#include <cmath>
#include <functional>
#include <functional>
#include <sys/types.h>
#include <sys/stat.h>
#include <unistd.h>
@ -459,10 +459,10 @@ typedef struct
std::shared_ptr<void> pSrcData; // Smart pointer of data(源数据) 推理时,RGB数据会拷贝一份,源数据不变用于传送到后处理引擎画框,另一份数据用于resize成640x640并归一化到0-1
uint64_t i64TimeStamp = 0; //帧数据时间戳
Step1Location step1Location; //step2 use
int iTargetType; //目标类别 (0:车号; 1:属性; 2:车头; 3:定检期; 4:集装箱)
int iTargetType; //目标类别 (0:车号; 1:属性; 2:车头; 3:定检期; 4:集装箱)
int iBigClassId = -1; //大框id (给第二步用 1-属性 23456-编号)
int iCarXH = 0; //当前大框所属车厢
} InferenceData;
@ -491,7 +491,7 @@ typedef struct
std::shared_ptr<void> pData = nullptr; // Smart pointer of data
uint64_t i64TimeStamp = 0; //帧数据时间戳
bool bHostMemory = false; //数据解码后内存是否在Host侧
std::string strPicFilePath;
std::string strPicFilePath;
std::string strTrainDate; //过车日期 (格式YYYY-MM-DD)
std::string strTrainName; //车次 (格式HH-MM-SS)
bool bIsEnd = false; //列车结束标识
@ -756,7 +756,7 @@ typedef struct
std::string strTrainDate; //过车日期 (格式YYYY-MM-DD)
std::string strTrainName; //车次 (格式HH-MM-SS)
std::string strContainerNo; //集装箱号
std::string strBestImg; //集装箱最优图
std::string strBestImg; //集装箱最优图
float fScoreSum = 0; //集装箱最优图总得分, 取最优帧用
uint64_t i64TimeStamp = 0; //集装箱最优图时间戳
Step1Location step1Location; //原图上大框坐标
@ -832,30 +832,30 @@ typedef struct imganalyse
{
// from file
/////////下述设定取自csv文件
int nType; //算法目的 0:动态检测 1:车厢划分
int nAreaX1; //算法监测区域 开始位置x坐标
int nAreaY1; //算法监测区域 开始位置y坐标
int nAreaX2; //算法监测区域 结束位置x坐标
int nAreaY2; //算法监测区域 结束位置y坐标
int bOrLevel; //算法结果融合等级 相同值进行与运算 不同值进行或运算
bool bDiff; //是否以不同作为判断条件 false:以匹配度相同作为判定条件 true以匹配度不同作为判定条件
bool bOn; //是否启用改区域匹配监测(该算法是否有效) false不启用(无效) true:启用(有效)
double dSameThresholdVal; //判定匹配相同的阈值
double dDiffThresholdVal; //判定匹配不同的阈值
int nPauseMaxCoumt; //判定为停车的最大计量(超过设定值判定为停车)
int npicimprovetype; //图像改善处理(包含 灰度化/均衡化直方图/gamma变换/自定义)
int templemethod; //模型比对模式
int histmethod; //直方图比对模式
int specmethod; //自定义比对模式
int compmethod; //对比类型(模型比对/模型比对/自定义比对,可多选)
std::string baseImagePath; //基准图片路径
int nType; //算法目的 0:动态检测 1:车厢划分
int nAreaX1; //算法监测区域 开始位置x坐标
int nAreaY1; //算法监测区域 开始位置y坐标
int nAreaX2; //算法监测区域 结束位置x坐标
int nAreaY2; //算法监测区域 结束位置y坐标
int bOrLevel; //算法结果融合等级 相同值进行与运算 不同值进行或运算
bool bDiff; //是否以不同作为判断条件 false:以匹配度相同作为判定条件 true以匹配度不同作为判定条件
bool bOn; //是否启用改区域匹配监测(该算法是否有效) false不启用(无效) true:启用(有效)
double dSameThresholdVal; //判定匹配相同的阈值
double dDiffThresholdVal; //判定匹配不同的阈值
int nPauseMaxCoumt; //判定为停车的最大计量(超过设定值判定为停车)
int npicimprovetype; //图像改善处理(包含 灰度化/均衡化直方图/gamma变换/自定义)
int templemethod; //模型比对模式
int histmethod; //直方图比对模式
int specmethod; //自定义比对模式
int compmethod; //对比类型(模型比对/模型比对/自定义比对,可多选)
std::string baseImagePath; //基准图片路径
/////////下述设定为处理识别变量
double dComparePoint; //比对值
double dComparePoint; //比对值
double dPreComparePoint; //比对值
bool bChanged; //与基准图对比值 是否发生变化 true:发生变化 false:没有发生变化
bool bPreChanged; //上一帧与基准图对比值
bool bChanged; //与基准图对比值 是否发生变化 true:发生变化 false:没有发生变化
bool bPreChanged; //上一帧与基准图对比值
bool fluctuationFlag; //最近100帧的最大值与最小值的波动标志
int nSameCount; //比对值没有发生变化的计数
int nSameCount; //比对值没有发生变化的计数
int iIndex; //第几个配置项
}AnalyseInfo;
@ -866,25 +866,25 @@ typedef struct trainpartion
{
std::string strTrainDate;
std::string strTrainName;
int nindex; //车厢顺位
uint64_t i64StartTimeStamp; //当节车厢出现的开始帧时间
uint64_t i64EndTimeStamp; //当节车厢出现的结束帧时间
float fspeed; //当节车厢的车速
int startframe; //当节车厢出现的开始帧帧号
int endframe; //当节车厢出现的结束帧帧号
int nindex; //车厢顺位
uint64_t i64StartTimeStamp; //当节车厢出现的开始帧时间
uint64_t i64EndTimeStamp; //当节车厢出现的结束帧时间
float fspeed; //当节车厢的车速
int startframe; //当节车厢出现的开始帧帧号
int endframe; //当节车厢出现的结束帧帧号
int modelSpaceFrame;
bool bfuncconfirmed; //是否算法划分完成 true:是 false:否
bool bmodelconfirmed; //是否根据第一次识别结果确认完成 true:是 false:否
float fLTX; //车钩开始位置X值
float fLTY; //车钩开始位置y值
float fRBX; //车钩结束位置X值
float fRBY; //车钩结束位置y值
bool bfuncconfirmed; //是否算法划分完成 true:是 false:否
bool bmodelconfirmed; //是否根据第一次识别结果确认完成 true:是 false:否
float fLTX; //车钩开始位置X值
float fLTY; //车钩开始位置y值
float fRBX; //车钩结束位置X值
float fRBY; //车钩结束位置y值
bool bIsEnd = false; //是否最后一节
int nStatus; //方向(0:未知1:向左行驶2:向右行驶)
int iRate; //帧率
}PartionInfo;
typedef struct
typedef struct
{
std::shared_ptr<ProcessData> processData;
std::string strAllClassType;
@ -895,7 +895,8 @@ typedef struct
{
int iBigClassId = -1; //大框id (给第二步用 1-属性 23456-编号)
uint32_t iFrameId = 0; //帧号
float fCenterX; //第一步识别目标中心点X坐标
float fCenterX; //第一步识别目标中心点X坐标
float fTargetWidth; // 第一步识别目标X坐标宽度
} CalculateInfo;

View File

@ -1,152 +0,0 @@
#include "ControlEngine.h"
using namespace ai_matrix;
ControlEngine::ControlEngine() {}
ControlEngine::~ControlEngine() {}
APP_ERROR ControlEngine::Init()
{
bUseEngine_ = true;
dataSourceConfig_ = MyYaml::GetIns()->GetDataSourceConfigById(engineId_); //获取摄像机参数
if (!dataSourceConfig_.bUse)
{
bUseEngine_ = false;
LogWarn << "engineId_:" << engineId_ << " not use engine";
return APP_ERR_OK;
}
bHwDecode_ = MyYaml::GetIns()->GetBoolValue("gc_hardware_decode");//硬解码
strPort0_ = engineName_ + "_" + std::to_string(engineId_) + "_0";
strPort1_ = engineName_ + "_" + std::to_string(engineId_) + "_1";
strPort2_ = engineName_ + "_" + std::to_string(engineId_) + "_2";
bCommandFlag_ = (MyYaml::GetIns()->GetStringValue("gc_run_mode") == "command");
bCollectDataFlag_ = MyYaml::GetIns()->GetBoolValue("gc_collect_data_flag");
strCollectDataSavePath_ = MyYaml::GetIns()->GetPathValue("gc_collect_data_savepath");
bPushActualFlag_ = MyYaml::GetIns()->GetBoolValue("gc_push_actual_flag");
LogInfo << "ControlEngine Init ok";
return APP_ERR_OK;
}
APP_ERROR ControlEngine::DeInit()
{
if (!bUseEngine_)
{
LogWarn << "engineId_:" << engineId_ << " not use engine";
return APP_ERR_OK;
}
LogInfo << "ControlEngine DeInit ok";
return APP_ERR_OK;
}
/**
*
* inParam : N/A
* outParam: N/A
* return : N/A
*/
void ControlEngine::GetDetectState()
{
// 1.一直识别模式
if (!bCommandFlag_)
{
iStepInter_ = 1;
return;
}
//2.命令触发识别模式
int iRet = APP_ERR_OK;
std::shared_ptr<void> pVoidData1 = nullptr;
iRet = inputQueMap_[strPort1_]->pop(pVoidData1);
if (nullptr != pVoidData1)
{
std::shared_ptr<std::string> pstrCommand = std::static_pointer_cast<std::string>(pVoidData1);
LogDebug << "recv Command:" << *pstrCommand;
if ((*pstrCommand) == "start")
{
iStepInter_ = 1;
}
else if ((*pstrCommand) == "end")
{
if (iStepInter_ == 1)
{
iStepInter_ = 2;
}
else
{
LogError << "command:end is worng";
}
}
}
//未接受到新命令,按上次命令结果返回
}
APP_ERROR ControlEngine::Process()
{
if (!bUseEngine_)
{
LogWarn << "engineId_:" << engineId_ << " not use engine";
return APP_ERR_OK;
}
int iRet = APP_ERR_OK;
while (!isStop_)
{
//获取识别状态
GetDetectState();
//pop端口0解码后数据
std::shared_ptr<void> pVoidData0 = nullptr;
inputQueMap_[strPort0_]->pop(pVoidData0);
if (nullptr == pVoidData0)
{
usleep(1000); //1ms
continue;
}
std::shared_ptr<ProcessData> pProcessData = std::static_pointer_cast<ProcessData>(pVoidData0);
//1. 识别处理
if (iStepInter_ != 0)
{
pProcessData->iStatus = TRAINSTATUS_RUN;
pProcessData->bIsEnd = ((iStepInter_ == 2) ? true : false);
iRet = outputQueMap_[strPort0_]->push(std::static_pointer_cast<void>(pProcessData));
if (iStepInter_ == 2)
{
iStepInter_ = 0;
}
}
//2.图片采集
if (bCollectDataFlag_)
{
//组织数据 push端口0 存图
std::shared_ptr<SaveImgData> pSaveImgData = std::make_shared<SaveImgData>();
pSaveImgData->iFrameId = pProcessData->iFrameId; //帧号
char szCameraNo[4] = {0};
sprintf(szCameraNo, "%03d", pProcessData->iDataSource + 1);
pSaveImgData->strImgPath = strCollectDataSavePath_ + szCameraNo;
pSaveImgData->strImgName = std::to_string(pSaveImgData->iFrameId);
pSaveImgData->strImgName += "_";
pSaveImgData->strImgName += std::to_string(pProcessData->i64TimeStamp);
pSaveImgData->strImgName += ".jpg";
iRet = outputQueMap_[strPort1_]->push(std::static_pointer_cast<void>(pSaveImgData));
continue;
}
//3.直播推流
if(bPushActualFlag_)
{
//发送推流Egnine
//iRet = outputQueMap_[strPort2_]->push(std::static_pointer_cast<void>(pProcessData));
}
}
return APP_ERR_OK;
}

View File

@ -1,44 +0,0 @@
/**
* Engine
* */
#ifndef CONTROLENGINE_H
#define CONTROLENGINE_H
#include "AppCommon.h"
#include "MyYaml.h"
#include "myutils.h"
#include "EngineBase.h"
#include "EngineFactory.h"
class ControlEngine : public ai_matrix::EngineBase
{
public:
ControlEngine();
~ControlEngine();
APP_ERROR Init() override;
APP_ERROR DeInit() override;
APP_ERROR Process() override;
private:
//获取检测状态
void GetDetectState();
bool bUseEngine_;
ai_matrix::DataSourceConfig dataSourceConfig_;
std::string save_path_;
std::string strPort0_;
std::string strPort1_;
std::string strPort2_;
bool bCommandFlag_; //命令触发识别模式
bool bHwDecode_;
bool bCollectDataFlag_;
std::string strCollectDataSavePath_;
bool bPushActualFlag_; //是否实时推流-用于直播
int iStepInter_ = 0; //(0:不识别; 1:开始识别; 2:结束识别)
};
ENGINE_REGIST(ControlEngine)
#endif

View File

@ -1,192 +0,0 @@
#include "SubControlEngine.h"
using namespace ai_matrix;
SubControlEngine::SubControlEngine() {}
SubControlEngine::~SubControlEngine() {}
APP_ERROR SubControlEngine::Init()
{
bUseEngine_ = true;
dataSourceConfig_ = MyYaml::GetIns()->GetDataSourceConfigById(engineId_); //获取摄像机参数
if (!dataSourceConfig_.bUse)
{
bUseEngine_ = false;
LogWarn << "engineId_:" << engineId_ << " not use engine";
return APP_ERR_OK;
}
bHwDecode_ = MyYaml::GetIns()->GetBoolValue("gc_hardware_decode");//硬解码
strPort0_ = engineName_ + "_" + std::to_string(engineId_) + "_0";
strPort1_ = engineName_ + "_" + std::to_string(engineId_) + "_1";
strPort2_ = engineName_ + "_" + std::to_string(engineId_) + "_2";
bCollectDataFlag_ = MyYaml::GetIns()->GetBoolValue("gc_collect_data_flag");
bPushActualFlag_ = MyYaml::GetIns()->GetBoolValue("gc_push_actual_flag");
strCollectDataSavePath_ = MyYaml::GetIns()->GetPathValue("gc_collect_data_savepath");
strResultPath_ = MyYaml::GetIns()->GetPathValue("gc_result_path");
LogInfo << "SubControlEngine Init ok";
return APP_ERR_OK;
}
APP_ERROR SubControlEngine::DeInit()
{
if (!bUseEngine_)
{
LogWarn << "engineId_:" << engineId_ << " not use engine";
return APP_ERR_OK;
}
LogInfo << "SubControlEngine DeInit ok";
return APP_ERR_OK;
}
/**
* ()
* inParam : N/A
* outParam: N/A
* return : N/A
*/
void SubControlEngine::InitParam()
{
iPushDataNO_ = 1;
moveData_.i64TimeStamp = 0;
moveData_.bHasTrain = false;
moveData_.bIsEnd = false;
moveData_.strTrainDate = "";
moveData_.strTrainName = "";
moveData_.iFrameId = 1;
}
APP_ERROR SubControlEngine::Process()
{
if (!bUseEngine_)
{
LogWarn << "engineId_:" << engineId_ << " not use engine";
return APP_ERR_OK;
}
int iRet = APP_ERR_OK;
while (!isStop_)
{
//pop端口0解码后数据
std::shared_ptr<void> pVoidData0 = nullptr;
inputQueMap_[strPort0_]->pop(pVoidData0);
if (nullptr == pVoidData0)
{
usleep(1000); //1ms
continue;
}
//获取主摄像头检测的状态
std::shared_ptr<void> pVoidData1 = nullptr;
iRet = inputQueMap_[strPort1_]->pop(pVoidData1);
if (nullptr != pVoidData1)
{
std::shared_ptr<MoveData> pMoveData = std::static_pointer_cast<MoveData>(pVoidData1);
moveData_ = *pMoveData;
LogDebug << "engineId:" << engineId_ << " trainname:" << moveData_.strTrainName
<< " MoveData frameid:" << moveData_.iFrameId << " direction:" << moveData_.iDirection
<< " IsEnd:" << moveData_.bIsEnd;
}
std::shared_ptr<ProcessData> pProcessDataTemp = std::static_pointer_cast<ProcessData>(pVoidData0);
queueProcessData_.push(pProcessDataTemp);
//1. 无车丢弃多余的数据,(只保留2s,50个数据)
if (!moveData_.bHasTrain)
{
while (queueProcessData_.size() > 50)
{
queueProcessData_.pop();
}
continue;
}
//2. 有车获取解码数据处理
if (queueProcessData_.empty())
{
continue;
}
std::shared_ptr<ProcessData> pProcessData = queueProcessData_.front();
queueProcessData_.pop();
bool bContinueFlag = false;
while (!moveData_.bIsEnd && pProcessData->i64TimeStamp < moveData_.i64TimeStamp) //获取和来车时间最接近的数据
{
if (queueProcessData_.empty())
{
bContinueFlag = true;
LogWarn << "engineId:" << engineId_ << " no fit data oldFrameTimeStamp:" << pProcessData->i64TimeStamp << " rePush.";
queueProcessData_.push(pProcessData);
break;
}
if (queueProcessData_.front()->i64TimeStamp > moveData_.i64TimeStamp)
{
if (moveData_.i64TimeStamp - pProcessData->i64TimeStamp > queueProcessData_.front()->i64TimeStamp - moveData_.i64TimeStamp)
{
LogDebug << "oldFrameTimeStamp: " << pProcessData->i64TimeStamp << " newFrameTimeStamp:" << queueProcessData_.front()->i64TimeStamp;
pProcessData = queueProcessData_.front();
queueProcessData_.pop();
}
break;
}
pProcessData = queueProcessData_.front();
queueProcessData_.pop();
}
if (bContinueFlag)
{
continue;
}
//3.构造过车存图数据,重置帧号 push 过车存图
std::shared_ptr<SaveImgData> pSaveImgData = std::make_shared<SaveImgData>();
pSaveImgData->iFrameId = iPushDataNO_ * dataSourceConfig_.iSkipInterval; //帧号
pSaveImgData->pData = pProcessData->pData;
pSaveImgData->iSize = pProcessData->iSize;
pSaveImgData->iWidth = pProcessData->iWidth;
pSaveImgData->iHeight = pProcessData->iHeight;
char szCameraNo[4] = {0};
sprintf(szCameraNo, "%03d", pProcessData->iDataSource + 1);
pSaveImgData->strImgPath = strResultPath_ + moveData_.strTrainDate + "/" + moveData_.strTrainName + "/" + szCameraNo;
pSaveImgData->strImgName = std::to_string(pSaveImgData->iFrameId) + ".jpg";
pSaveImgData->bIsEnd = moveData_.bIsEnd;
pSaveImgData->bSaveToFtp = true;
pSaveImgData->i64TimeStamp = pProcessData->i64TimeStamp;
pSaveImgData->iDirection = moveData_.iDirection;
outputQueMap_[strPort0_]->push(std::static_pointer_cast<void>(pSaveImgData));
iPushDataNO_++;
if ((moveData_.bIsEnd) && (moveData_.iFrameId <= pSaveImgData->iFrameId))
{
InitParam();
}
//4.图片采集
if (bCollectDataFlag_)
{
//组织数据 push端口0 存图
std::shared_ptr<SaveImgData> pSaveImgData = std::make_shared<SaveImgData>();
pSaveImgData->iFrameId = pProcessData->iFrameId; //帧号
char szCameraNo[4] = {0};
sprintf(szCameraNo, "%03d", pProcessData->iDataSource + 1);
pSaveImgData->strImgPath = strCollectDataSavePath_ + szCameraNo;
pSaveImgData->strImgName = std::to_string(pSaveImgData->iFrameId);
pSaveImgData->strImgName += "_";
pSaveImgData->strImgName += std::to_string(pProcessData->i64TimeStamp);
pSaveImgData->strImgName += ".jpg";
iRet = outputQueMap_[strPort1_]->push(std::static_pointer_cast<void>(pSaveImgData));
}
//5.直播推流
if(bPushActualFlag_)
{
//发送推流Egnine
//iRet = outputQueMap_[strPort2_]->push(std::static_pointer_cast<void>(pProcessData));
}
}
return APP_ERR_OK;
}

View File

@ -1,48 +0,0 @@
/**
* Engine()
* */
#ifndef SUBCONTROLENGINE_H
#define SUBCONTROLENGINE_H
#include "AppCommon.h"
#include "MyYaml.h"
#include "myutils.h"
#include "EngineBase.h"
#include "EngineFactory.h"
class SubControlEngine : public ai_matrix::EngineBase
{
public:
SubControlEngine();
~SubControlEngine();
APP_ERROR Init() override;
APP_ERROR DeInit() override;
APP_ERROR Process() override;
private:
//参数初始化
void InitParam();
bool bUseEngine_;
bool bHwDecode_;
ai_matrix::DataSourceConfig dataSourceConfig_;
std::string save_path_;
std::string strPort0_;
std::string strPort1_;
std::string strPort2_;
bool bCollectDataFlag_;
bool bPushActualFlag_; //是否实时推流-用于直播
std::string strCollectDataSavePath_;
std::string strResultPath_;
uint32_t iPushDataNO_ = 1; //发送数据编号
MoveData moveData_;
std::queue<std::shared_ptr<ProcessData>> queueProcessData_;
};
ENGINE_REGIST(SubControlEngine)
#endif

File diff suppressed because one or more lines are too long

View File

@ -50,6 +50,7 @@ private:
std::map<int, std::vector<std::string>> mapSourcePushPort_;
MoveData moveData_;
std::queue<MoveData> queuwMoveData_;
std::string strDataDir_;
uint32_t iOrigDataNO_; //原过车数据个数

View File

@ -407,6 +407,31 @@ int DataDealTwoEngine::GetPostData(std::shared_ptr<ProcessData> pProcessData, Js
return pPostData->vecPostSubData.size();
}
/**
* push数据到队列push
* inParam : const std::string strPort push的端口
: const std::shared_ptr<ProcessData> &pProcessData push的数据
* outParam: N/A
* return : N/A
*/
void DataDealTwoEngine::PushData(const std::string &strPort, const std::shared_ptr<ProcessData> &pProcessData)
{
while (true)
{
int iRet = outputQueMap_[strPort]->push(std::static_pointer_cast<void>(pProcessData));
if (iRet != 0)
{
LogDebug << "sourceid:" << pProcessData->iDataSource << " frameid:" << pProcessData->iFrameId << " push fail iRet:" << iRet;
if (iRet == 2)
{
usleep(10000); // 10ms
continue;
}
}
break;
}
}
/**
* push
* inParam : N/A
@ -490,14 +515,14 @@ void DataDealTwoEngine::MakeProcessData(std::shared_ptr<TrainRange> pTrainRange)
{
if (iPort == vecPushPorts.size() - 1)
{
iRet = outputQueMap_[vecPushPorts[iPort]]->push(std::static_pointer_cast<void>(pProcessData), true);
// PushData(vecPushPorts[iPort], pProcessData);
//iRet = outputQueMap_[vecPushPorts[iPort]]->push(std::static_pointer_cast<void>(pProcessData), true);
PushData(vecPushPorts[iPort], pProcessData);
continue;
}
std::shared_ptr<ProcessData> pNewProcessData = std::make_shared<ProcessData>();
*pNewProcessData = *pProcessData;
iRet = outputQueMap_[vecPushPorts[iPort]]->push(std::static_pointer_cast<void>(pNewProcessData), true);
// PushData(vecPushPorts[iPort], pNewProcessData);
//iRet = outputQueMap_[vecPushPorts[iPort]]->push(std::static_pointer_cast<void>(pNewProcessData), true);
PushData(vecPushPorts[iPort], pNewProcessData);
}
}
@ -593,7 +618,7 @@ APP_ERROR DataDealTwoEngine::Process()
//处理当车厢的每帧信息
MakeProcessData(pTrainRange);
// push结果汇总
iRet = outputQueMap_[engineName_ + "_" + std::to_string(engineId_) + "_9"]->push(std::static_pointer_cast<void>(pTrainRange), true);
iRet = outputQueMap_[engineName_ + "_" + std::to_string(engineId_) + "_9"]->push(std::static_pointer_cast<void>(pTrainRange));
iTrainIndex_++;
if (pTrainRange->bIsEnd)

File diff suppressed because one or more lines are too long

View File

@ -9,7 +9,7 @@
#include "AppCommon.h"
//编译器这部分代码按C语言而不是C++)的方式进行编译
//编译器这部分代码按C语言而不是C++)的方式进行编译
#ifdef __cplusplus
extern "C"
{
@ -50,7 +50,7 @@ protected:
APP_ERROR GetStreamInfo();
APP_ERROR ConnectCamera(); //连接相机
void ResetCamera(); //复位相机连接
static int InterruptCallback(void *pData);
//static int InterruptCallback(void *pData);
private:
AVFormatContext *pFormatCtx_ = nullptr;
@ -65,6 +65,8 @@ private:
int nDelayTime;
bool bIsAvc_ = false;
int iAudioStream_ = -1;
bool bCameraError_ = false;
};
ENGINE_REGIST(CameraEngine)

View File

@ -1,160 +0,0 @@
#include "ReadImgEngine.h"
#include <algorithm>
#include <string>
#include <regex>
#include <boost/filesystem.hpp>
#include <sys/stat.h>
#include "myutils.h"
using namespace ai_matrix;
namespace
{
//按照文件名排序
bool CompareFileName(const std::string &sParam1, const std::string &sParam2)
{
int iPos1 = sParam1.find(".");
int iPos11 = sParam1.find_last_of("/");
int iPos2 = sParam2.find(".");
int iPos22 = sParam2.find_last_of("/");
std::string sFileName1 = sParam1.substr(iPos11+1, iPos1-iPos11-1);
std::string sFileName2 = sParam2.substr(iPos22+1, iPos2-iPos22-1);
return (atoi(sFileName1.c_str()) < atoi(sFileName2.c_str()));
}
}
ReadImgEngine::ReadImgEngine() {}
ReadImgEngine::~ReadImgEngine() {}
APP_ERROR ReadImgEngine::Init()
{
bUseEngine_ = true;
dataSourceConfig_ = MyYaml::GetIns()->GetDataSourceConfigById(engineId_); //获取摄像机参数
if (MyYaml::GetIns()->GetStringValue("gc_data_source") != "images" || !dataSourceConfig_.bUse)
{
bUseEngine_ = false;
LogWarn << "engineId_:" << engineId_ << " not use engine";
return APP_ERR_OK;
}
strPort0_ = engineName_ + "_" + std::to_string(engineId_) + "_0";
LogInfo << "engineId_:" << engineId_ << " ReadImgEngine Init ok";
return APP_ERR_OK;
}
APP_ERROR ReadImgEngine::DeInit()
{
if (!bUseEngine_)
{
LogWarn << "engineId_:" << engineId_ << " not use engine";
return APP_ERR_OK;
}
LogInfo << "engineId_:" << engineId_ << " ReadImgEngine DeInit ok";
return APP_ERR_OK;
}
void ReadImgEngine::RecursionReadDir(const std::string &strDir)
{
boost::filesystem::directory_iterator end_itr;
for (boost::filesystem::directory_iterator itr(strDir); itr != end_itr; ++itr)
{
if (boost::filesystem::is_directory(*itr)) //文件夹
{
RecursionReadDir(itr->path().string());
}
else
{
std::string strExt = itr->path().extension().string();
if (strExt.compare(".jpg") == 0)
{
vecFiles_.push_back(itr->path().string()); //保存绝对路径
}
}
}
}
std::string ReadImgEngine::GetFileName(const std::string &strParam)
{
int iPos = strParam.find(".");
int iPos1 = strParam.find_last_of("/");
return strParam.substr(iPos1 + 1, iPos - iPos1 - 1);
}
APP_ERROR ReadImgEngine::Process()
{
if (!bUseEngine_)
{
LogWarn << "engineId_:" << engineId_ << " not use engine";
return APP_ERR_OK;
}
int iRet = APP_ERR_OK;
RecursionReadDir(dataSourceConfig_.strUrl); //递归读取文件夹
if (vecFiles_.size() == 0)
{
LogError << "engineId_:" << engineId_ << " no file";
return APP_ERR_COMM_FAILURE;
}
std::sort(vecFiles_.begin(), vecFiles_.end(), CompareFileName);
LogInfo << "engineId_:" << engineId_ << " vecFiles_ size: " << vecFiles_.size();
int iFileIndex = 0; //文件序号
while (!isStop_)
{
std::string strFilePath = vecFiles_.at(iFileIndex);
cv::Mat matBGR = cv::imread(strFilePath);
//组织数据
std::shared_ptr<ProcessData> pProcessData = std::make_shared<ProcessData>();
pProcessData->iWidth = matBGR.cols;
pProcessData->iHeight = matBGR.rows;
pProcessData->iRate = 25;
pProcessData->i64TimeStamp = MyUtils::getins()->GetCurrentTimeMillis();
pProcessData->iDataSource = engineId_;
uint32_t iBGRSize = pProcessData->iWidth * pProcessData->iHeight * 3;
void *pBGRBuffer = nullptr;
pBGRBuffer = new uint8_t[iBGRSize];
memcpy(pBGRBuffer, matBGR.data, iBGRSize);
pProcessData->pData.reset(pBGRBuffer, [](void *data){if(data) {delete[] data; data = nullptr;} }); // 智能指针管理内存
pProcessData->iSize = iBGRSize;
// //读取文件内容
// RawData rawData;
// iRet = ReadFile(strFilePath, rawData);
// if (iRet != APP_ERR_OK)
// {
// LogError << "engineId_:" << engineId_ << " Failed to read image on " << strFilePath << ", iRet = " << iRet << ".";
// return iRet;
// }
// //组织数据
// std::shared_ptr<FrameData> pFrameData = std::make_shared<FrameData>();
// pFrameData->iDataSource = engineId_;
// pFrameData->iFrameId = iFileIndex + 1;
// pFrameData->iSize = rawData.lenOfByte;
// pFrameData->pData = rawData.data;
// pFrameData->i64TimeStamp = MyUtils::getins()->GetCurrentTimeMillis();
iRet = outputQueMap_[strPort0_]->push(std::static_pointer_cast<void>(pProcessData));
iFileIndex = (iFileIndex + 1) % vecFiles_.size();
if (iFileIndex == 0)
{
LogInfo << "engineId_:" << engineId_ << " read images finish ";
while (!isStop_)
{
usleep(100000); //100ms
}
//break; //只发布一遍
}
//模拟1秒25帧
usleep(40000); //40ms
}
return APP_ERR_OK;
}

View File

@ -1,42 +0,0 @@
/**
*
* */
#ifndef READIMGENGINE_H
#define READIMGENGINE_H
#include "AppCommon.h"
#include "EngineBase.h"
#include "EngineFactory.h"
#include "MyYaml.h"
class ReadImgEngine : public ai_matrix::EngineBase
{
public:
ReadImgEngine();
~ReadImgEngine();
APP_ERROR Init() override;
APP_ERROR DeInit() override;
APP_ERROR Process() override;
protected:
void RecursionReadDir(const std::string &dir); //递归读取文件夹
private:
std::string GetFileName(const std::string &strParam);
bool bUseEngine_;
ai_matrix::DataSourceConfig dataSourceConfig_;
std::vector<std::string> vecFiles_; //文件名集合
uint64_t frameid_; //帧号
uint64_t timestamp_; //时间戳
std::string strPort0_;
};
ENGINE_REGIST(ReadImgEngine)
#endif

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -48,6 +48,7 @@ private:
private:
std::string strResultPath_;
std::string strLogBakPath_;
int iDaysNumber_;
};

File diff suppressed because one or more lines are too long

View File

@ -62,7 +62,7 @@ private:
int rightFirst_; // 向右行驶的在前大框类型
int leftFirst_; // 向左行驶的在前大框类型
int iPartitionFrameNum_; //满足跨车厢的帧间隔
int iPlitFrameSpanPX_; //相连帧 同种大框的跨度最大值
int iPlitFrameSpanPX_; //相连帧 同种大框的跨度最大值
std::map<int, PostData> mapPostDataFrist_; //[key-数据源id, value-第一步识别信息]
std::map<int, std::map<int, std::vector<Step1Location>>> mapMapStep1Info_; //[key-数据源id, value-[key-识别目标, value-识别框集合]]

View File

@ -1,345 +0,0 @@
/*
* @Author:
* @Date: 2022-01-27 10:27:26
* @LastEditors: your name
* @LastEditTime: 2022-02-22 15:01:51
* @Description: gRPC接口
* @FilePath: \lirs\code\GrpcEngine\GrpcEngine.cpp
*
* Copyright © 2022 <Shandong Matrix Software Engineering Co., Ltd>
*/
#include "GrpcEngine.h"
using namespace ai_matrix;
GrpcEngine::GrpcEngine() {}
GrpcEngine::~GrpcEngine() {}
APP_ERROR GrpcEngine::Init()
{
strPort0_ = engineName_ + "_" + std::to_string(engineId_) + "_0";
LogInfo << "GrpcEngine Init ok";
return APP_ERR_OK;
}
APP_ERROR GrpcEngine::DeInit()
{
LogInfo << "GrpcEngine DeInit ok";
return APP_ERR_OK;
}
APP_ERROR GrpcEngine::Process()
{
int ret = APP_ERR_OK;
while (!isStop_)
{
usleep(1000);
}
return APP_ERR_OK;
}
/**
*
* */
/**
* -
* @description:
* @param {ServerContext} *context
* @param {Request} *request
* @param {Reply} *reply
* @return {*}
*/
Status GrpcEngine::Come(ServerContext *context, const Request *request, Reply *reply)
{
//验证请求头
if (request->head() != this->HEAD)
{
reply->set_error_msg(" head: \"" + std::to_string(request->head()) + "\" permission denied !");
return Status::OK;
}
//TODO(zhangwei):具体实现
std::shared_ptr<std::string> pstrCommand = std::make_shared<std::string>("start");
outputQueMap_[strPort0_]->push(std::static_pointer_cast<void>(pstrCommand));
return Status::OK;
}
/**
* -
* @description:
* @param {ServerContext} *context
* @param {Request} *request
* @param {Reply} *reply
* @return {*}
*/
Status GrpcEngine::Leave(ServerContext *context, const Request *request, Reply *reply)
{
//验证请求头
if (request->head() != this->HEAD)
{
reply->set_error_msg(" head: \"" + std::to_string(request->head()) + "\" permission denied !");
return Status::OK;
}
//TODO(zhangwei):具体实现
std::shared_ptr<std::string> pstrCommand = std::make_shared<std::string>("end");
outputQueMap_[strPort0_]->push(std::static_pointer_cast<void>(pstrCommand));
return Status::OK;
}
/**
* -
* @description:
* @param {ServerContext} *context
* @param {Request} *request
* @param {Reply} *reply
* @return {*}
*/
Status GrpcEngine::TemporaryStop(ServerContext *context, const Request *request, Reply *reply)
{
//验证请求头
if (request->head() != this->HEAD)
{
reply->set_error_msg(" head: \"" + std::to_string(request->head()) + "\" permission denied !");
return Status::OK;
}
//TODO(zhangwei):具体实现
return Status::OK;
}
/**
* -
* @description:
* @param {ServerContext} *context
* @param {Request} *request
* @param {Reply} *reply
* @return {*}
*/
Status GrpcEngine::Reversing(ServerContext *context, const Request *request, Reply *reply)
{
//验证请求头
if (request->head() != this->HEAD)
{
reply->set_error_msg(" head: \"" + std::to_string(request->head()) + "\" permission denied !");
return Status::OK;
}
//TODO(zhangwei):具体实现
std::shared_ptr<std::string> pstrCommand = std::make_shared<std::string>("back");
outputQueMap_[strPort0_]->push(std::static_pointer_cast<void>(pstrCommand));
return Status::OK;
}
/**
*
* @description:
* @param {ServerContext} *context
* @param {QueryWorkstationRequest} *request
* @param {QueryWorkstationReply} *reply
* @return {*}
*/
Status GrpcEngine::QueryWorkstation(ServerContext *context, const QueryWorkstationRequest *request, QueryWorkstationReply *reply)
{
//验证请求头
if (request->head() != this->HEAD)
{
reply->set_error_msg(" head: \"" + std::to_string(request->head()) + "\" permission denied !");
return Status::OK;
}
//TODO(zhangwei):具体实现看后续是否要改
try
{
reply->set_name_no(MyYaml::GetIns()->GetStringValue("atlas_poundno"));
}
catch(const std::exception& e)
{
reply->set_error_msg("查询失败!配置参数读取异常!");
}
return Status::OK;
}
/**
*
* @description:
* @param {ServerContext} *context
* @param {UpdateWorkstationRequest} *request
* @param {UpdateWorkstationReply} *reply
* @return {*}
*/
Status GrpcEngine::UpdateWorkstation(ServerContext *context, const UpdateWorkstationRequest *request, UpdateWorkstationReply *reply)
{
//验证请求头
if (request->head() != this->HEAD)
{
reply->set_error_msg(" head: \"" + std::to_string(request->head()) + "\" permission denied !");
return Status::OK;
}
//TODO(zhangwei):具体实现
return Status::OK;
}
/**
*
* @description:
* @param {ServerContext} *context
* @param {QueryCameraRequest} *request
* @param {QueryCameraReply} *reply
* @return {*}
*/
Status GrpcEngine::QueryCamera(ServerContext *context, const QueryCameraRequest *request, QueryCameraReply *reply)
{
//验证请求头
if (request->head() != this->HEAD)
{
reply->set_error_msg(" head: \"" + std::to_string(request->head()) + "\" permission denied !");
return Status::OK;
}
//TODO(zhangwei):具体实现看后续是否要改
ai_matrix::DataSourceConfig dataSourceConfig_;
dataSourceConfig_ = MyYaml::GetIns()->GetDataSourceConfigById(request->channel_id()); //获取摄像机参数
reply->set_channel_id(request->channel_id());
reply->set_use(dataSourceConfig_.bUse);
reply->set_url(dataSourceConfig_.strUrl);
reply->set_skipinterval(dataSourceConfig_.iSkipInterval);
reply->set_direction(dataSourceConfig_.iDirection);
reply->set_left_first(dataSourceConfig_.iLeftFirst);
reply->set_right_first(dataSourceConfig_.iRightFirst);
return Status::OK;
}
/**
*
* @description:
* @param {ServerContext} *context
* @param {UpdateCameraRequest} *request
* @param {UpdateCameraReply} *reply
* @return {*}
*/
Status GrpcEngine::UpdateCamera(ServerContext *context, const UpdateCameraRequest *request, UpdateCameraReply *reply)
{
//验证请求头
if (request->head() != this->HEAD)
{
reply->set_error_msg(" head: \"" + std::to_string(request->head()) + "\" permission denied !");
return Status::OK;
}
//TODO(zhangwei):具体实现
return Status::OK;
}
/**
* FTP的配置
* @description:
* @param {ServerContext} *context
* @param {QueryFTPRequest} *request
* @param {QueryFTPReply} *reply
* @return {*}
*/
Status GrpcEngine::QueryFTP(ServerContext *context, const QueryFTPRequest *request, QueryFTPReply *reply)
{
//验证请求头
if (request->head() != this->HEAD)
{
reply->set_error_msg(" head: \"" + std::to_string(request->head()) + "\" permission denied !");
return Status::OK;
}
//TODO(zhangwei):具体实现看后续是否需要改动
try
{
reply->set_open(MyYaml::GetIns()->GetIntValue("gc_ftp_open"));
reply->set_username(MyYaml::GetIns()->GetStringValue("gc_ftp_username"));
reply->set_password(MyYaml::GetIns()->GetStringValue("gc_ftp_password"));
reply->set_ip(MyYaml::GetIns()->GetStringValue("gc_ftp_ip"));
reply->set_port(MyYaml::GetIns()->GetIntValue("gc_ftp_port"));
reply->set_image_path(MyYaml::GetIns()->GetStringValue("gc_ftp_image_path"));
reply->set_quit_time(MyYaml::GetIns()->GetIntValue("gc_ftp_quit_time"));
}
catch (const std::exception& e)
{
reply->set_error_msg("查询失败!配置参数读取异常!");
}
return Status::OK;
}
/**
* FTP的配置
* @description:
* @param {ServerContext} *context
* @param {UpdateFTPRequest} *request
* @param {UpdateFTPReply} *reply
* @return {*}
*/
Status GrpcEngine::UpdateFTP(ServerContext *context, const UpdateFTPRequest *request, UpdateFTPReply *reply)
{
//验证请求头
if (request->head() != this->HEAD)
{
reply->set_error_msg(" head: \"" + std::to_string(request->head()) + "\" permission denied !");
return Status::OK;
}
//TODO(zhangwei):具体实现
return Status::OK;
}
/**
*
* @description:
* @param {ServerContext} *context
* @param {Request} *request
* @param {Reply} *reply
* @return {*}
*/
Status GrpcEngine::RebootAI(ServerContext *context, const Request *request, Reply *reply)
{
LogInfo << "Restart >>> head:" << request->head();
//验证请求头
if (request->head() != this->HEAD)
{
reply->set_error_msg(" head: \"" + std::to_string(request->head()) + "\" permission denied !");
return Status::OK;
}
pthread_t id;
int ret = pthread_create(&id, NULL, system_restart, NULL);
if (ret != 0)
{
reply->set_error_msg("小站重启失败!");
}
return Status::OK;
}
// 线程的运行函数
void* GrpcEngine::system_restart(void* args)
{
sleep(2);
system("reboot");
return 0;
}

View File

@ -1,106 +0,0 @@
/*
* @Author: your name
* @Date: 2022-01-27 10:27:26
* @LastEditors: your name
* @LastEditTime: 2022-02-17 10:40:16
* @Description: file content
* @FilePath: \lirs\code\GrpcEngine\GrpcEngine.h
*
* Copyright © 2022 <Shandong Matrix Software Engineering Co., Ltd>
*/
/**
* grpc微服务接口
* */
#ifndef GRPC_ENGINE_H
#define GRPC_ENGINE_H
#include <grpcpp/ext/proto_server_reflection_plugin.h>
#include <grpcpp/grpcpp.h>
#include <grpcpp/health_check_service_interface.h>
#include "grpcservice.grpc.pb.h"
#include "AppCommon.h"
using grpc::Server;
using grpc::ServerBuilder;
using grpc::ServerContext;
using grpc::Status;
using grpc::StatusCode;
using MatrixAi::Train;
using MatrixAi::Request;
using MatrixAi::Reply;
using MatrixAi::QueryWorkstationRequest;
using MatrixAi::QueryWorkstationReply;
using MatrixAi::UpdateWorkstationRequest;
using MatrixAi::UpdateWorkstationReply;
using MatrixAi::QueryCameraRequest;
using MatrixAi::QueryCameraReply;
using MatrixAi::UpdateCameraRequest;
using MatrixAi::UpdateCameraReply;
using MatrixAi::QueryFTPRequest;
using MatrixAi::QueryFTPReply;
using MatrixAi::UpdateFTPRequest;
using MatrixAi::UpdateFTPReply;
#include "EngineBase.h"
#include "EngineFactory.h"
#include "MyYaml.h"
class GrpcEngine : public ai_matrix::EngineBase, public Train::Service
{
public:
GrpcEngine();
~GrpcEngine();
APP_ERROR Init() override;
APP_ERROR DeInit() override;
APP_ERROR Process() override;
//识别状态控制-来车
Status Come(ServerContext *context, const Request *request,Reply *reply);
//识别状态控制-车走
Status Leave(ServerContext *context, const Request *request, Reply *reply);
//识别状态控制-停车
Status TemporaryStop(ServerContext *context, const Request *request,Reply *reply);
//识别状态控制-倒车
Status Reversing(ServerContext *context, const Request *request, Reply *reply);
//查看“工作站设置”
Status QueryWorkstation(ServerContext *context, const QueryWorkstationRequest *request, QueryWorkstationReply *reply);
//修改“工作站设置”
Status UpdateWorkstation(ServerContext *context, const UpdateWorkstationRequest *request, UpdateWorkstationReply *reply);
//查询摄像头配置
Status QueryCamera(ServerContext *context, const QueryCameraRequest *request, QueryCameraReply *reply);
//修改摄像头配置
Status UpdateCamera(ServerContext *context, const UpdateCameraRequest *request, UpdateCameraReply *reply);
//查询FTP的配置
Status QueryFTP(ServerContext *context, const QueryFTPRequest *request, QueryFTPReply *reply);
//修改FTP的配置
Status UpdateFTP(ServerContext *context, const UpdateFTPRequest *request, UpdateFTPReply *reply);
//重启识别程序
Status RebootAI(ServerContext *context, const Request *request, Reply *reply);
private:
std::string strPort0_;
//接口调用必须的命令标志
const int HEAD = 0x7d;
// 线程的运行函数
static void* system_restart(void* args);
};
ENGINE_REGIST(GrpcEngine)
#endif

View File

@ -1,506 +0,0 @@
// Generated by the gRPC C++ plugin.
// If you make any local change, they will be lost.
// source: grpcservice.proto
#include "grpcservice.pb.h"
#include "grpcservice.grpc.pb.h"
#include <functional>
#include <grpcpp/impl/codegen/async_stream.h>
#include <grpcpp/impl/codegen/async_unary_call.h>
#include <grpcpp/impl/codegen/channel_interface.h>
#include <grpcpp/impl/codegen/client_unary_call.h>
#include <grpcpp/impl/codegen/client_callback.h>
#include <grpcpp/impl/codegen/message_allocator.h>
#include <grpcpp/impl/codegen/method_handler.h>
#include <grpcpp/impl/codegen/rpc_service_method.h>
#include <grpcpp/impl/codegen/server_callback.h>
#include <grpcpp/impl/codegen/server_callback_handlers.h>
#include <grpcpp/impl/codegen/server_context.h>
#include <grpcpp/impl/codegen/service_type.h>
#include <grpcpp/impl/codegen/sync_stream.h>
namespace MatrixAi {
static const char* Train_method_names[] = {
"/MatrixAi.Train/Come",
"/MatrixAi.Train/Leave",
"/MatrixAi.Train/TemporaryStop",
"/MatrixAi.Train/Reversing",
"/MatrixAi.Train/QueryWorkstation",
"/MatrixAi.Train/UpdateWorkstation",
"/MatrixAi.Train/QueryCamera",
"/MatrixAi.Train/UpdateCamera",
"/MatrixAi.Train/QueryFTP",
"/MatrixAi.Train/UpdateFTP",
"/MatrixAi.Train/RebootAI",
};
std::unique_ptr< Train::Stub> Train::NewStub(const std::shared_ptr< ::grpc::ChannelInterface>& channel, const ::grpc::StubOptions& options) {
(void)options;
std::unique_ptr< Train::Stub> stub(new Train::Stub(channel));
return stub;
}
Train::Stub::Stub(const std::shared_ptr< ::grpc::ChannelInterface>& channel)
: channel_(channel), rpcmethod_Come_(Train_method_names[0], ::grpc::internal::RpcMethod::NORMAL_RPC, channel)
, rpcmethod_Leave_(Train_method_names[1], ::grpc::internal::RpcMethod::NORMAL_RPC, channel)
, rpcmethod_TemporaryStop_(Train_method_names[2], ::grpc::internal::RpcMethod::NORMAL_RPC, channel)
, rpcmethod_Reversing_(Train_method_names[3], ::grpc::internal::RpcMethod::NORMAL_RPC, channel)
, rpcmethod_QueryWorkstation_(Train_method_names[4], ::grpc::internal::RpcMethod::NORMAL_RPC, channel)
, rpcmethod_UpdateWorkstation_(Train_method_names[5], ::grpc::internal::RpcMethod::NORMAL_RPC, channel)
, rpcmethod_QueryCamera_(Train_method_names[6], ::grpc::internal::RpcMethod::NORMAL_RPC, channel)
, rpcmethod_UpdateCamera_(Train_method_names[7], ::grpc::internal::RpcMethod::NORMAL_RPC, channel)
, rpcmethod_QueryFTP_(Train_method_names[8], ::grpc::internal::RpcMethod::NORMAL_RPC, channel)
, rpcmethod_UpdateFTP_(Train_method_names[9], ::grpc::internal::RpcMethod::NORMAL_RPC, channel)
, rpcmethod_RebootAI_(Train_method_names[10], ::grpc::internal::RpcMethod::NORMAL_RPC, channel)
{}
::grpc::Status Train::Stub::Come(::grpc::ClientContext* context, const ::MatrixAi::Request& request, ::MatrixAi::Reply* response) {
return ::grpc::internal::BlockingUnaryCall< ::MatrixAi::Request, ::MatrixAi::Reply, ::grpc::protobuf::MessageLite, ::grpc::protobuf::MessageLite>(channel_.get(), rpcmethod_Come_, context, request, response);
}
void Train::Stub::experimental_async::Come(::grpc::ClientContext* context, const ::MatrixAi::Request* request, ::MatrixAi::Reply* response, std::function<void(::grpc::Status)> f) {
::grpc::internal::CallbackUnaryCall< ::MatrixAi::Request, ::MatrixAi::Reply, ::grpc::protobuf::MessageLite, ::grpc::protobuf::MessageLite>(stub_->channel_.get(), stub_->rpcmethod_Come_, context, request, response, std::move(f));
}
void Train::Stub::experimental_async::Come(::grpc::ClientContext* context, const ::MatrixAi::Request* request, ::MatrixAi::Reply* response, ::grpc::experimental::ClientUnaryReactor* reactor) {
::grpc::internal::ClientCallbackUnaryFactory::Create< ::grpc::protobuf::MessageLite, ::grpc::protobuf::MessageLite>(stub_->channel_.get(), stub_->rpcmethod_Come_, context, request, response, reactor);
}
::grpc::ClientAsyncResponseReader< ::MatrixAi::Reply>* Train::Stub::PrepareAsyncComeRaw(::grpc::ClientContext* context, const ::MatrixAi::Request& request, ::grpc::CompletionQueue* cq) {
return ::grpc::internal::ClientAsyncResponseReaderHelper::Create< ::MatrixAi::Reply, ::MatrixAi::Request, ::grpc::protobuf::MessageLite, ::grpc::protobuf::MessageLite>(channel_.get(), cq, rpcmethod_Come_, context, request);
}
::grpc::ClientAsyncResponseReader< ::MatrixAi::Reply>* Train::Stub::AsyncComeRaw(::grpc::ClientContext* context, const ::MatrixAi::Request& request, ::grpc::CompletionQueue* cq) {
auto* result =
this->PrepareAsyncComeRaw(context, request, cq);
result->StartCall();
return result;
}
::grpc::Status Train::Stub::Leave(::grpc::ClientContext* context, const ::MatrixAi::Request& request, ::MatrixAi::Reply* response) {
return ::grpc::internal::BlockingUnaryCall< ::MatrixAi::Request, ::MatrixAi::Reply, ::grpc::protobuf::MessageLite, ::grpc::protobuf::MessageLite>(channel_.get(), rpcmethod_Leave_, context, request, response);
}
void Train::Stub::experimental_async::Leave(::grpc::ClientContext* context, const ::MatrixAi::Request* request, ::MatrixAi::Reply* response, std::function<void(::grpc::Status)> f) {
::grpc::internal::CallbackUnaryCall< ::MatrixAi::Request, ::MatrixAi::Reply, ::grpc::protobuf::MessageLite, ::grpc::protobuf::MessageLite>(stub_->channel_.get(), stub_->rpcmethod_Leave_, context, request, response, std::move(f));
}
void Train::Stub::experimental_async::Leave(::grpc::ClientContext* context, const ::MatrixAi::Request* request, ::MatrixAi::Reply* response, ::grpc::experimental::ClientUnaryReactor* reactor) {
::grpc::internal::ClientCallbackUnaryFactory::Create< ::grpc::protobuf::MessageLite, ::grpc::protobuf::MessageLite>(stub_->channel_.get(), stub_->rpcmethod_Leave_, context, request, response, reactor);
}
::grpc::ClientAsyncResponseReader< ::MatrixAi::Reply>* Train::Stub::PrepareAsyncLeaveRaw(::grpc::ClientContext* context, const ::MatrixAi::Request& request, ::grpc::CompletionQueue* cq) {
return ::grpc::internal::ClientAsyncResponseReaderHelper::Create< ::MatrixAi::Reply, ::MatrixAi::Request, ::grpc::protobuf::MessageLite, ::grpc::protobuf::MessageLite>(channel_.get(), cq, rpcmethod_Leave_, context, request);
}
::grpc::ClientAsyncResponseReader< ::MatrixAi::Reply>* Train::Stub::AsyncLeaveRaw(::grpc::ClientContext* context, const ::MatrixAi::Request& request, ::grpc::CompletionQueue* cq) {
auto* result =
this->PrepareAsyncLeaveRaw(context, request, cq);
result->StartCall();
return result;
}
::grpc::Status Train::Stub::TemporaryStop(::grpc::ClientContext* context, const ::MatrixAi::Request& request, ::MatrixAi::Reply* response) {
return ::grpc::internal::BlockingUnaryCall< ::MatrixAi::Request, ::MatrixAi::Reply, ::grpc::protobuf::MessageLite, ::grpc::protobuf::MessageLite>(channel_.get(), rpcmethod_TemporaryStop_, context, request, response);
}
void Train::Stub::experimental_async::TemporaryStop(::grpc::ClientContext* context, const ::MatrixAi::Request* request, ::MatrixAi::Reply* response, std::function<void(::grpc::Status)> f) {
::grpc::internal::CallbackUnaryCall< ::MatrixAi::Request, ::MatrixAi::Reply, ::grpc::protobuf::MessageLite, ::grpc::protobuf::MessageLite>(stub_->channel_.get(), stub_->rpcmethod_TemporaryStop_, context, request, response, std::move(f));
}
void Train::Stub::experimental_async::TemporaryStop(::grpc::ClientContext* context, const ::MatrixAi::Request* request, ::MatrixAi::Reply* response, ::grpc::experimental::ClientUnaryReactor* reactor) {
::grpc::internal::ClientCallbackUnaryFactory::Create< ::grpc::protobuf::MessageLite, ::grpc::protobuf::MessageLite>(stub_->channel_.get(), stub_->rpcmethod_TemporaryStop_, context, request, response, reactor);
}
::grpc::ClientAsyncResponseReader< ::MatrixAi::Reply>* Train::Stub::PrepareAsyncTemporaryStopRaw(::grpc::ClientContext* context, const ::MatrixAi::Request& request, ::grpc::CompletionQueue* cq) {
return ::grpc::internal::ClientAsyncResponseReaderHelper::Create< ::MatrixAi::Reply, ::MatrixAi::Request, ::grpc::protobuf::MessageLite, ::grpc::protobuf::MessageLite>(channel_.get(), cq, rpcmethod_TemporaryStop_, context, request);
}
::grpc::ClientAsyncResponseReader< ::MatrixAi::Reply>* Train::Stub::AsyncTemporaryStopRaw(::grpc::ClientContext* context, const ::MatrixAi::Request& request, ::grpc::CompletionQueue* cq) {
auto* result =
this->PrepareAsyncTemporaryStopRaw(context, request, cq);
result->StartCall();
return result;
}
::grpc::Status Train::Stub::Reversing(::grpc::ClientContext* context, const ::MatrixAi::Request& request, ::MatrixAi::Reply* response) {
return ::grpc::internal::BlockingUnaryCall< ::MatrixAi::Request, ::MatrixAi::Reply, ::grpc::protobuf::MessageLite, ::grpc::protobuf::MessageLite>(channel_.get(), rpcmethod_Reversing_, context, request, response);
}
void Train::Stub::experimental_async::Reversing(::grpc::ClientContext* context, const ::MatrixAi::Request* request, ::MatrixAi::Reply* response, std::function<void(::grpc::Status)> f) {
::grpc::internal::CallbackUnaryCall< ::MatrixAi::Request, ::MatrixAi::Reply, ::grpc::protobuf::MessageLite, ::grpc::protobuf::MessageLite>(stub_->channel_.get(), stub_->rpcmethod_Reversing_, context, request, response, std::move(f));
}
void Train::Stub::experimental_async::Reversing(::grpc::ClientContext* context, const ::MatrixAi::Request* request, ::MatrixAi::Reply* response, ::grpc::experimental::ClientUnaryReactor* reactor) {
::grpc::internal::ClientCallbackUnaryFactory::Create< ::grpc::protobuf::MessageLite, ::grpc::protobuf::MessageLite>(stub_->channel_.get(), stub_->rpcmethod_Reversing_, context, request, response, reactor);
}
::grpc::ClientAsyncResponseReader< ::MatrixAi::Reply>* Train::Stub::PrepareAsyncReversingRaw(::grpc::ClientContext* context, const ::MatrixAi::Request& request, ::grpc::CompletionQueue* cq) {
return ::grpc::internal::ClientAsyncResponseReaderHelper::Create< ::MatrixAi::Reply, ::MatrixAi::Request, ::grpc::protobuf::MessageLite, ::grpc::protobuf::MessageLite>(channel_.get(), cq, rpcmethod_Reversing_, context, request);
}
::grpc::ClientAsyncResponseReader< ::MatrixAi::Reply>* Train::Stub::AsyncReversingRaw(::grpc::ClientContext* context, const ::MatrixAi::Request& request, ::grpc::CompletionQueue* cq) {
auto* result =
this->PrepareAsyncReversingRaw(context, request, cq);
result->StartCall();
return result;
}
::grpc::Status Train::Stub::QueryWorkstation(::grpc::ClientContext* context, const ::MatrixAi::QueryWorkstationRequest& request, ::MatrixAi::QueryWorkstationReply* response) {
return ::grpc::internal::BlockingUnaryCall< ::MatrixAi::QueryWorkstationRequest, ::MatrixAi::QueryWorkstationReply, ::grpc::protobuf::MessageLite, ::grpc::protobuf::MessageLite>(channel_.get(), rpcmethod_QueryWorkstation_, context, request, response);
}
void Train::Stub::experimental_async::QueryWorkstation(::grpc::ClientContext* context, const ::MatrixAi::QueryWorkstationRequest* request, ::MatrixAi::QueryWorkstationReply* response, std::function<void(::grpc::Status)> f) {
::grpc::internal::CallbackUnaryCall< ::MatrixAi::QueryWorkstationRequest, ::MatrixAi::QueryWorkstationReply, ::grpc::protobuf::MessageLite, ::grpc::protobuf::MessageLite>(stub_->channel_.get(), stub_->rpcmethod_QueryWorkstation_, context, request, response, std::move(f));
}
void Train::Stub::experimental_async::QueryWorkstation(::grpc::ClientContext* context, const ::MatrixAi::QueryWorkstationRequest* request, ::MatrixAi::QueryWorkstationReply* response, ::grpc::experimental::ClientUnaryReactor* reactor) {
::grpc::internal::ClientCallbackUnaryFactory::Create< ::grpc::protobuf::MessageLite, ::grpc::protobuf::MessageLite>(stub_->channel_.get(), stub_->rpcmethod_QueryWorkstation_, context, request, response, reactor);
}
::grpc::ClientAsyncResponseReader< ::MatrixAi::QueryWorkstationReply>* Train::Stub::PrepareAsyncQueryWorkstationRaw(::grpc::ClientContext* context, const ::MatrixAi::QueryWorkstationRequest& request, ::grpc::CompletionQueue* cq) {
return ::grpc::internal::ClientAsyncResponseReaderHelper::Create< ::MatrixAi::QueryWorkstationReply, ::MatrixAi::QueryWorkstationRequest, ::grpc::protobuf::MessageLite, ::grpc::protobuf::MessageLite>(channel_.get(), cq, rpcmethod_QueryWorkstation_, context, request);
}
::grpc::ClientAsyncResponseReader< ::MatrixAi::QueryWorkstationReply>* Train::Stub::AsyncQueryWorkstationRaw(::grpc::ClientContext* context, const ::MatrixAi::QueryWorkstationRequest& request, ::grpc::CompletionQueue* cq) {
auto* result =
this->PrepareAsyncQueryWorkstationRaw(context, request, cq);
result->StartCall();
return result;
}
::grpc::Status Train::Stub::UpdateWorkstation(::grpc::ClientContext* context, const ::MatrixAi::UpdateWorkstationRequest& request, ::MatrixAi::UpdateWorkstationReply* response) {
return ::grpc::internal::BlockingUnaryCall< ::MatrixAi::UpdateWorkstationRequest, ::MatrixAi::UpdateWorkstationReply, ::grpc::protobuf::MessageLite, ::grpc::protobuf::MessageLite>(channel_.get(), rpcmethod_UpdateWorkstation_, context, request, response);
}
void Train::Stub::experimental_async::UpdateWorkstation(::grpc::ClientContext* context, const ::MatrixAi::UpdateWorkstationRequest* request, ::MatrixAi::UpdateWorkstationReply* response, std::function<void(::grpc::Status)> f) {
::grpc::internal::CallbackUnaryCall< ::MatrixAi::UpdateWorkstationRequest, ::MatrixAi::UpdateWorkstationReply, ::grpc::protobuf::MessageLite, ::grpc::protobuf::MessageLite>(stub_->channel_.get(), stub_->rpcmethod_UpdateWorkstation_, context, request, response, std::move(f));
}
void Train::Stub::experimental_async::UpdateWorkstation(::grpc::ClientContext* context, const ::MatrixAi::UpdateWorkstationRequest* request, ::MatrixAi::UpdateWorkstationReply* response, ::grpc::experimental::ClientUnaryReactor* reactor) {
::grpc::internal::ClientCallbackUnaryFactory::Create< ::grpc::protobuf::MessageLite, ::grpc::protobuf::MessageLite>(stub_->channel_.get(), stub_->rpcmethod_UpdateWorkstation_, context, request, response, reactor);
}
::grpc::ClientAsyncResponseReader< ::MatrixAi::UpdateWorkstationReply>* Train::Stub::PrepareAsyncUpdateWorkstationRaw(::grpc::ClientContext* context, const ::MatrixAi::UpdateWorkstationRequest& request, ::grpc::CompletionQueue* cq) {
return ::grpc::internal::ClientAsyncResponseReaderHelper::Create< ::MatrixAi::UpdateWorkstationReply, ::MatrixAi::UpdateWorkstationRequest, ::grpc::protobuf::MessageLite, ::grpc::protobuf::MessageLite>(channel_.get(), cq, rpcmethod_UpdateWorkstation_, context, request);
}
::grpc::ClientAsyncResponseReader< ::MatrixAi::UpdateWorkstationReply>* Train::Stub::AsyncUpdateWorkstationRaw(::grpc::ClientContext* context, const ::MatrixAi::UpdateWorkstationRequest& request, ::grpc::CompletionQueue* cq) {
auto* result =
this->PrepareAsyncUpdateWorkstationRaw(context, request, cq);
result->StartCall();
return result;
}
::grpc::Status Train::Stub::QueryCamera(::grpc::ClientContext* context, const ::MatrixAi::QueryCameraRequest& request, ::MatrixAi::QueryCameraReply* response) {
return ::grpc::internal::BlockingUnaryCall< ::MatrixAi::QueryCameraRequest, ::MatrixAi::QueryCameraReply, ::grpc::protobuf::MessageLite, ::grpc::protobuf::MessageLite>(channel_.get(), rpcmethod_QueryCamera_, context, request, response);
}
void Train::Stub::experimental_async::QueryCamera(::grpc::ClientContext* context, const ::MatrixAi::QueryCameraRequest* request, ::MatrixAi::QueryCameraReply* response, std::function<void(::grpc::Status)> f) {
::grpc::internal::CallbackUnaryCall< ::MatrixAi::QueryCameraRequest, ::MatrixAi::QueryCameraReply, ::grpc::protobuf::MessageLite, ::grpc::protobuf::MessageLite>(stub_->channel_.get(), stub_->rpcmethod_QueryCamera_, context, request, response, std::move(f));
}
void Train::Stub::experimental_async::QueryCamera(::grpc::ClientContext* context, const ::MatrixAi::QueryCameraRequest* request, ::MatrixAi::QueryCameraReply* response, ::grpc::experimental::ClientUnaryReactor* reactor) {
::grpc::internal::ClientCallbackUnaryFactory::Create< ::grpc::protobuf::MessageLite, ::grpc::protobuf::MessageLite>(stub_->channel_.get(), stub_->rpcmethod_QueryCamera_, context, request, response, reactor);
}
::grpc::ClientAsyncResponseReader< ::MatrixAi::QueryCameraReply>* Train::Stub::PrepareAsyncQueryCameraRaw(::grpc::ClientContext* context, const ::MatrixAi::QueryCameraRequest& request, ::grpc::CompletionQueue* cq) {
return ::grpc::internal::ClientAsyncResponseReaderHelper::Create< ::MatrixAi::QueryCameraReply, ::MatrixAi::QueryCameraRequest, ::grpc::protobuf::MessageLite, ::grpc::protobuf::MessageLite>(channel_.get(), cq, rpcmethod_QueryCamera_, context, request);
}
::grpc::ClientAsyncResponseReader< ::MatrixAi::QueryCameraReply>* Train::Stub::AsyncQueryCameraRaw(::grpc::ClientContext* context, const ::MatrixAi::QueryCameraRequest& request, ::grpc::CompletionQueue* cq) {
auto* result =
this->PrepareAsyncQueryCameraRaw(context, request, cq);
result->StartCall();
return result;
}
::grpc::Status Train::Stub::UpdateCamera(::grpc::ClientContext* context, const ::MatrixAi::UpdateCameraRequest& request, ::MatrixAi::UpdateCameraReply* response) {
return ::grpc::internal::BlockingUnaryCall< ::MatrixAi::UpdateCameraRequest, ::MatrixAi::UpdateCameraReply, ::grpc::protobuf::MessageLite, ::grpc::protobuf::MessageLite>(channel_.get(), rpcmethod_UpdateCamera_, context, request, response);
}
void Train::Stub::experimental_async::UpdateCamera(::grpc::ClientContext* context, const ::MatrixAi::UpdateCameraRequest* request, ::MatrixAi::UpdateCameraReply* response, std::function<void(::grpc::Status)> f) {
::grpc::internal::CallbackUnaryCall< ::MatrixAi::UpdateCameraRequest, ::MatrixAi::UpdateCameraReply, ::grpc::protobuf::MessageLite, ::grpc::protobuf::MessageLite>(stub_->channel_.get(), stub_->rpcmethod_UpdateCamera_, context, request, response, std::move(f));
}
void Train::Stub::experimental_async::UpdateCamera(::grpc::ClientContext* context, const ::MatrixAi::UpdateCameraRequest* request, ::MatrixAi::UpdateCameraReply* response, ::grpc::experimental::ClientUnaryReactor* reactor) {
::grpc::internal::ClientCallbackUnaryFactory::Create< ::grpc::protobuf::MessageLite, ::grpc::protobuf::MessageLite>(stub_->channel_.get(), stub_->rpcmethod_UpdateCamera_, context, request, response, reactor);
}
::grpc::ClientAsyncResponseReader< ::MatrixAi::UpdateCameraReply>* Train::Stub::PrepareAsyncUpdateCameraRaw(::grpc::ClientContext* context, const ::MatrixAi::UpdateCameraRequest& request, ::grpc::CompletionQueue* cq) {
return ::grpc::internal::ClientAsyncResponseReaderHelper::Create< ::MatrixAi::UpdateCameraReply, ::MatrixAi::UpdateCameraRequest, ::grpc::protobuf::MessageLite, ::grpc::protobuf::MessageLite>(channel_.get(), cq, rpcmethod_UpdateCamera_, context, request);
}
::grpc::ClientAsyncResponseReader< ::MatrixAi::UpdateCameraReply>* Train::Stub::AsyncUpdateCameraRaw(::grpc::ClientContext* context, const ::MatrixAi::UpdateCameraRequest& request, ::grpc::CompletionQueue* cq) {
auto* result =
this->PrepareAsyncUpdateCameraRaw(context, request, cq);
result->StartCall();
return result;
}
::grpc::Status Train::Stub::QueryFTP(::grpc::ClientContext* context, const ::MatrixAi::QueryFTPRequest& request, ::MatrixAi::QueryFTPReply* response) {
return ::grpc::internal::BlockingUnaryCall< ::MatrixAi::QueryFTPRequest, ::MatrixAi::QueryFTPReply, ::grpc::protobuf::MessageLite, ::grpc::protobuf::MessageLite>(channel_.get(), rpcmethod_QueryFTP_, context, request, response);
}
void Train::Stub::experimental_async::QueryFTP(::grpc::ClientContext* context, const ::MatrixAi::QueryFTPRequest* request, ::MatrixAi::QueryFTPReply* response, std::function<void(::grpc::Status)> f) {
::grpc::internal::CallbackUnaryCall< ::MatrixAi::QueryFTPRequest, ::MatrixAi::QueryFTPReply, ::grpc::protobuf::MessageLite, ::grpc::protobuf::MessageLite>(stub_->channel_.get(), stub_->rpcmethod_QueryFTP_, context, request, response, std::move(f));
}
void Train::Stub::experimental_async::QueryFTP(::grpc::ClientContext* context, const ::MatrixAi::QueryFTPRequest* request, ::MatrixAi::QueryFTPReply* response, ::grpc::experimental::ClientUnaryReactor* reactor) {
::grpc::internal::ClientCallbackUnaryFactory::Create< ::grpc::protobuf::MessageLite, ::grpc::protobuf::MessageLite>(stub_->channel_.get(), stub_->rpcmethod_QueryFTP_, context, request, response, reactor);
}
::grpc::ClientAsyncResponseReader< ::MatrixAi::QueryFTPReply>* Train::Stub::PrepareAsyncQueryFTPRaw(::grpc::ClientContext* context, const ::MatrixAi::QueryFTPRequest& request, ::grpc::CompletionQueue* cq) {
return ::grpc::internal::ClientAsyncResponseReaderHelper::Create< ::MatrixAi::QueryFTPReply, ::MatrixAi::QueryFTPRequest, ::grpc::protobuf::MessageLite, ::grpc::protobuf::MessageLite>(channel_.get(), cq, rpcmethod_QueryFTP_, context, request);
}
::grpc::ClientAsyncResponseReader< ::MatrixAi::QueryFTPReply>* Train::Stub::AsyncQueryFTPRaw(::grpc::ClientContext* context, const ::MatrixAi::QueryFTPRequest& request, ::grpc::CompletionQueue* cq) {
auto* result =
this->PrepareAsyncQueryFTPRaw(context, request, cq);
result->StartCall();
return result;
}
::grpc::Status Train::Stub::UpdateFTP(::grpc::ClientContext* context, const ::MatrixAi::UpdateFTPRequest& request, ::MatrixAi::UpdateFTPReply* response) {
return ::grpc::internal::BlockingUnaryCall< ::MatrixAi::UpdateFTPRequest, ::MatrixAi::UpdateFTPReply, ::grpc::protobuf::MessageLite, ::grpc::protobuf::MessageLite>(channel_.get(), rpcmethod_UpdateFTP_, context, request, response);
}
void Train::Stub::experimental_async::UpdateFTP(::grpc::ClientContext* context, const ::MatrixAi::UpdateFTPRequest* request, ::MatrixAi::UpdateFTPReply* response, std::function<void(::grpc::Status)> f) {
::grpc::internal::CallbackUnaryCall< ::MatrixAi::UpdateFTPRequest, ::MatrixAi::UpdateFTPReply, ::grpc::protobuf::MessageLite, ::grpc::protobuf::MessageLite>(stub_->channel_.get(), stub_->rpcmethod_UpdateFTP_, context, request, response, std::move(f));
}
void Train::Stub::experimental_async::UpdateFTP(::grpc::ClientContext* context, const ::MatrixAi::UpdateFTPRequest* request, ::MatrixAi::UpdateFTPReply* response, ::grpc::experimental::ClientUnaryReactor* reactor) {
::grpc::internal::ClientCallbackUnaryFactory::Create< ::grpc::protobuf::MessageLite, ::grpc::protobuf::MessageLite>(stub_->channel_.get(), stub_->rpcmethod_UpdateFTP_, context, request, response, reactor);
}
::grpc::ClientAsyncResponseReader< ::MatrixAi::UpdateFTPReply>* Train::Stub::PrepareAsyncUpdateFTPRaw(::grpc::ClientContext* context, const ::MatrixAi::UpdateFTPRequest& request, ::grpc::CompletionQueue* cq) {
return ::grpc::internal::ClientAsyncResponseReaderHelper::Create< ::MatrixAi::UpdateFTPReply, ::MatrixAi::UpdateFTPRequest, ::grpc::protobuf::MessageLite, ::grpc::protobuf::MessageLite>(channel_.get(), cq, rpcmethod_UpdateFTP_, context, request);
}
::grpc::ClientAsyncResponseReader< ::MatrixAi::UpdateFTPReply>* Train::Stub::AsyncUpdateFTPRaw(::grpc::ClientContext* context, const ::MatrixAi::UpdateFTPRequest& request, ::grpc::CompletionQueue* cq) {
auto* result =
this->PrepareAsyncUpdateFTPRaw(context, request, cq);
result->StartCall();
return result;
}
::grpc::Status Train::Stub::RebootAI(::grpc::ClientContext* context, const ::MatrixAi::Request& request, ::MatrixAi::Reply* response) {
return ::grpc::internal::BlockingUnaryCall< ::MatrixAi::Request, ::MatrixAi::Reply, ::grpc::protobuf::MessageLite, ::grpc::protobuf::MessageLite>(channel_.get(), rpcmethod_RebootAI_, context, request, response);
}
void Train::Stub::experimental_async::RebootAI(::grpc::ClientContext* context, const ::MatrixAi::Request* request, ::MatrixAi::Reply* response, std::function<void(::grpc::Status)> f) {
::grpc::internal::CallbackUnaryCall< ::MatrixAi::Request, ::MatrixAi::Reply, ::grpc::protobuf::MessageLite, ::grpc::protobuf::MessageLite>(stub_->channel_.get(), stub_->rpcmethod_RebootAI_, context, request, response, std::move(f));
}
void Train::Stub::experimental_async::RebootAI(::grpc::ClientContext* context, const ::MatrixAi::Request* request, ::MatrixAi::Reply* response, ::grpc::experimental::ClientUnaryReactor* reactor) {
::grpc::internal::ClientCallbackUnaryFactory::Create< ::grpc::protobuf::MessageLite, ::grpc::protobuf::MessageLite>(stub_->channel_.get(), stub_->rpcmethod_RebootAI_, context, request, response, reactor);
}
::grpc::ClientAsyncResponseReader< ::MatrixAi::Reply>* Train::Stub::PrepareAsyncRebootAIRaw(::grpc::ClientContext* context, const ::MatrixAi::Request& request, ::grpc::CompletionQueue* cq) {
return ::grpc::internal::ClientAsyncResponseReaderHelper::Create< ::MatrixAi::Reply, ::MatrixAi::Request, ::grpc::protobuf::MessageLite, ::grpc::protobuf::MessageLite>(channel_.get(), cq, rpcmethod_RebootAI_, context, request);
}
::grpc::ClientAsyncResponseReader< ::MatrixAi::Reply>* Train::Stub::AsyncRebootAIRaw(::grpc::ClientContext* context, const ::MatrixAi::Request& request, ::grpc::CompletionQueue* cq) {
auto* result =
this->PrepareAsyncRebootAIRaw(context, request, cq);
result->StartCall();
return result;
}
Train::Service::Service() {
AddMethod(new ::grpc::internal::RpcServiceMethod(
Train_method_names[0],
::grpc::internal::RpcMethod::NORMAL_RPC,
new ::grpc::internal::RpcMethodHandler< Train::Service, ::MatrixAi::Request, ::MatrixAi::Reply, ::grpc::protobuf::MessageLite, ::grpc::protobuf::MessageLite>(
[](Train::Service* service,
::grpc::ServerContext* ctx,
const ::MatrixAi::Request* req,
::MatrixAi::Reply* resp) {
return service->Come(ctx, req, resp);
}, this)));
AddMethod(new ::grpc::internal::RpcServiceMethod(
Train_method_names[1],
::grpc::internal::RpcMethod::NORMAL_RPC,
new ::grpc::internal::RpcMethodHandler< Train::Service, ::MatrixAi::Request, ::MatrixAi::Reply, ::grpc::protobuf::MessageLite, ::grpc::protobuf::MessageLite>(
[](Train::Service* service,
::grpc::ServerContext* ctx,
const ::MatrixAi::Request* req,
::MatrixAi::Reply* resp) {
return service->Leave(ctx, req, resp);
}, this)));
AddMethod(new ::grpc::internal::RpcServiceMethod(
Train_method_names[2],
::grpc::internal::RpcMethod::NORMAL_RPC,
new ::grpc::internal::RpcMethodHandler< Train::Service, ::MatrixAi::Request, ::MatrixAi::Reply, ::grpc::protobuf::MessageLite, ::grpc::protobuf::MessageLite>(
[](Train::Service* service,
::grpc::ServerContext* ctx,
const ::MatrixAi::Request* req,
::MatrixAi::Reply* resp) {
return service->TemporaryStop(ctx, req, resp);
}, this)));
AddMethod(new ::grpc::internal::RpcServiceMethod(
Train_method_names[3],
::grpc::internal::RpcMethod::NORMAL_RPC,
new ::grpc::internal::RpcMethodHandler< Train::Service, ::MatrixAi::Request, ::MatrixAi::Reply, ::grpc::protobuf::MessageLite, ::grpc::protobuf::MessageLite>(
[](Train::Service* service,
::grpc::ServerContext* ctx,
const ::MatrixAi::Request* req,
::MatrixAi::Reply* resp) {
return service->Reversing(ctx, req, resp);
}, this)));
AddMethod(new ::grpc::internal::RpcServiceMethod(
Train_method_names[4],
::grpc::internal::RpcMethod::NORMAL_RPC,
new ::grpc::internal::RpcMethodHandler< Train::Service, ::MatrixAi::QueryWorkstationRequest, ::MatrixAi::QueryWorkstationReply, ::grpc::protobuf::MessageLite, ::grpc::protobuf::MessageLite>(
[](Train::Service* service,
::grpc::ServerContext* ctx,
const ::MatrixAi::QueryWorkstationRequest* req,
::MatrixAi::QueryWorkstationReply* resp) {
return service->QueryWorkstation(ctx, req, resp);
}, this)));
AddMethod(new ::grpc::internal::RpcServiceMethod(
Train_method_names[5],
::grpc::internal::RpcMethod::NORMAL_RPC,
new ::grpc::internal::RpcMethodHandler< Train::Service, ::MatrixAi::UpdateWorkstationRequest, ::MatrixAi::UpdateWorkstationReply, ::grpc::protobuf::MessageLite, ::grpc::protobuf::MessageLite>(
[](Train::Service* service,
::grpc::ServerContext* ctx,
const ::MatrixAi::UpdateWorkstationRequest* req,
::MatrixAi::UpdateWorkstationReply* resp) {
return service->UpdateWorkstation(ctx, req, resp);
}, this)));
AddMethod(new ::grpc::internal::RpcServiceMethod(
Train_method_names[6],
::grpc::internal::RpcMethod::NORMAL_RPC,
new ::grpc::internal::RpcMethodHandler< Train::Service, ::MatrixAi::QueryCameraRequest, ::MatrixAi::QueryCameraReply, ::grpc::protobuf::MessageLite, ::grpc::protobuf::MessageLite>(
[](Train::Service* service,
::grpc::ServerContext* ctx,
const ::MatrixAi::QueryCameraRequest* req,
::MatrixAi::QueryCameraReply* resp) {
return service->QueryCamera(ctx, req, resp);
}, this)));
AddMethod(new ::grpc::internal::RpcServiceMethod(
Train_method_names[7],
::grpc::internal::RpcMethod::NORMAL_RPC,
new ::grpc::internal::RpcMethodHandler< Train::Service, ::MatrixAi::UpdateCameraRequest, ::MatrixAi::UpdateCameraReply, ::grpc::protobuf::MessageLite, ::grpc::protobuf::MessageLite>(
[](Train::Service* service,
::grpc::ServerContext* ctx,
const ::MatrixAi::UpdateCameraRequest* req,
::MatrixAi::UpdateCameraReply* resp) {
return service->UpdateCamera(ctx, req, resp);
}, this)));
AddMethod(new ::grpc::internal::RpcServiceMethod(
Train_method_names[8],
::grpc::internal::RpcMethod::NORMAL_RPC,
new ::grpc::internal::RpcMethodHandler< Train::Service, ::MatrixAi::QueryFTPRequest, ::MatrixAi::QueryFTPReply, ::grpc::protobuf::MessageLite, ::grpc::protobuf::MessageLite>(
[](Train::Service* service,
::grpc::ServerContext* ctx,
const ::MatrixAi::QueryFTPRequest* req,
::MatrixAi::QueryFTPReply* resp) {
return service->QueryFTP(ctx, req, resp);
}, this)));
AddMethod(new ::grpc::internal::RpcServiceMethod(
Train_method_names[9],
::grpc::internal::RpcMethod::NORMAL_RPC,
new ::grpc::internal::RpcMethodHandler< Train::Service, ::MatrixAi::UpdateFTPRequest, ::MatrixAi::UpdateFTPReply, ::grpc::protobuf::MessageLite, ::grpc::protobuf::MessageLite>(
[](Train::Service* service,
::grpc::ServerContext* ctx,
const ::MatrixAi::UpdateFTPRequest* req,
::MatrixAi::UpdateFTPReply* resp) {
return service->UpdateFTP(ctx, req, resp);
}, this)));
AddMethod(new ::grpc::internal::RpcServiceMethod(
Train_method_names[10],
::grpc::internal::RpcMethod::NORMAL_RPC,
new ::grpc::internal::RpcMethodHandler< Train::Service, ::MatrixAi::Request, ::MatrixAi::Reply, ::grpc::protobuf::MessageLite, ::grpc::protobuf::MessageLite>(
[](Train::Service* service,
::grpc::ServerContext* ctx,
const ::MatrixAi::Request* req,
::MatrixAi::Reply* resp) {
return service->RebootAI(ctx, req, resp);
}, this)));
}
Train::Service::~Service() {
}
::grpc::Status Train::Service::Come(::grpc::ServerContext* context, const ::MatrixAi::Request* request, ::MatrixAi::Reply* response) {
(void) context;
(void) request;
(void) response;
return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, "");
}
::grpc::Status Train::Service::Leave(::grpc::ServerContext* context, const ::MatrixAi::Request* request, ::MatrixAi::Reply* response) {
(void) context;
(void) request;
(void) response;
return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, "");
}
::grpc::Status Train::Service::TemporaryStop(::grpc::ServerContext* context, const ::MatrixAi::Request* request, ::MatrixAi::Reply* response) {
(void) context;
(void) request;
(void) response;
return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, "");
}
::grpc::Status Train::Service::Reversing(::grpc::ServerContext* context, const ::MatrixAi::Request* request, ::MatrixAi::Reply* response) {
(void) context;
(void) request;
(void) response;
return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, "");
}
::grpc::Status Train::Service::QueryWorkstation(::grpc::ServerContext* context, const ::MatrixAi::QueryWorkstationRequest* request, ::MatrixAi::QueryWorkstationReply* response) {
(void) context;
(void) request;
(void) response;
return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, "");
}
::grpc::Status Train::Service::UpdateWorkstation(::grpc::ServerContext* context, const ::MatrixAi::UpdateWorkstationRequest* request, ::MatrixAi::UpdateWorkstationReply* response) {
(void) context;
(void) request;
(void) response;
return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, "");
}
::grpc::Status Train::Service::QueryCamera(::grpc::ServerContext* context, const ::MatrixAi::QueryCameraRequest* request, ::MatrixAi::QueryCameraReply* response) {
(void) context;
(void) request;
(void) response;
return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, "");
}
::grpc::Status Train::Service::UpdateCamera(::grpc::ServerContext* context, const ::MatrixAi::UpdateCameraRequest* request, ::MatrixAi::UpdateCameraReply* response) {
(void) context;
(void) request;
(void) response;
return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, "");
}
::grpc::Status Train::Service::QueryFTP(::grpc::ServerContext* context, const ::MatrixAi::QueryFTPRequest* request, ::MatrixAi::QueryFTPReply* response) {
(void) context;
(void) request;
(void) response;
return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, "");
}
::grpc::Status Train::Service::UpdateFTP(::grpc::ServerContext* context, const ::MatrixAi::UpdateFTPRequest* request, ::MatrixAi::UpdateFTPReply* response) {
(void) context;
(void) request;
(void) response;
return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, "");
}
::grpc::Status Train::Service::RebootAI(::grpc::ServerContext* context, const ::MatrixAi::Request* request, ::MatrixAi::Reply* response) {
(void) context;
(void) request;
(void) response;
return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, "");
}
} // namespace MatrixAi

File diff suppressed because it is too large Load Diff

View File

@ -149,7 +149,7 @@ void MergerAllEngine::PushData(std::shared_ptr<Train> pTrain)
pTrain->bMergerFlag = true;
std::shared_ptr<Train> pTrainToCsv = std::make_shared<Train>();
*pTrainToCsv = *pTrain;
if (bUploadFlag_)
{
queTrain_.push(pTrain);
@ -267,7 +267,7 @@ APP_ERROR MergerAllEngine::Process()
}
int iHaveDataCnt = QueueHaveDataCount();
//识别目标信息个数和获取信息个数相等则合并信息推送web
//识别目标信息个数和获取信息个数相等则合并信息推送web
if (iHaveDataCnt == iPopPortCnt_)
{
std::shared_ptr<Train> pTrain = nullptr;
@ -315,7 +315,7 @@ APP_ERROR MergerAllEngine::Process()
pTrain->container1 = pTrainContainer->container1;
pTrain->container2 = pTrainContainer->container2;
}
//车头在前为第0节; 车头在后为最后一节+1
if(pTrain->trainNum.iTrainTypeId == 0 && iTrainIndex_ == 1)
{
@ -328,7 +328,9 @@ APP_ERROR MergerAllEngine::Process()
web端无法过滤的问题
*/
if (!bHeadFrontFlag_ && pTrain->bIsEnd && pTrain->trainNum.iTrainTypeId == -1)
if (!bHeadFrontFlag_
&& pTrain->bIsEnd
&& pTrain->trainNum.iTrainTypeId == -1)
{
LogDebug << "cometime:" << pTrain->strTrainDate << " " << pTrain->strTrainName << " iCarXH:" << pTrain->iCarXH
<< " num:" << pTrain->trainNum.strTrainNum;
@ -346,6 +348,8 @@ APP_ERROR MergerAllEngine::Process()
}
}
// if (pTrain->trainPro.strLoad.size() > 2 && pTrain->trainPro.strLoad[0] != '1') pTrain->trainPro.strLoad = "";
PushData(pTrain);
//最后一节处理后,初始化参数

View File

@ -109,7 +109,7 @@ APP_ERROR LocalDataMoveEngine::Process()
pFtpData->strFtpFilePath = strImgPath;
pFtpData->strFtpFileName = strImgName;
pFtpData->bIsEnd = pProcessData->bIsEnd;
iRet = outputQueMap_[strPort0_]->push(std::static_pointer_cast<void>(pFtpData), true);
iRet = outputQueMap_[strPort0_]->push(std::static_pointer_cast<void>(pFtpData), false);
}
}
}

File diff suppressed because one or more lines are too long

View File

@ -45,10 +45,10 @@ APP_ERROR SaveImgEngine::Process()
LogWarn << "engineId_:" << engineId_ << " not use engine";
return APP_ERR_OK;
}
vector<int> compression_params;
compression_params.push_back(cv::IMWRITE_JPEG_QUALITY); //选择jpeg
compression_params.push_back(iPicQuality); //图片质量
compression_params.push_back(cv::IMWRITE_JPEG_QUALITY); //选择jpeg
compression_params.push_back(iPicQuality); //图片质量
iDirection_ = DIRECTION_UNKNOWN;
int iRet = APP_ERR_OK;
@ -61,7 +61,7 @@ APP_ERROR SaveImgEngine::Process()
usleep(1000);
continue;
}
std::shared_ptr<SaveImgData> pSaveImgData = std::static_pointer_cast<SaveImgData>(pvoidd);
//如果设置了方向则方向不对直接过滤但结束帧不能过滤需流转到后面Engine保证后面处理正确。
@ -113,13 +113,13 @@ APP_ERROR SaveImgEngine::Process()
}
// 3.保存图片
if (pSaveImgData->pData != nullptr && pSaveImgData->iSize != 0)
{
cv::Mat matBGR(pSaveImgData->iHeight, pSaveImgData->iWidth, CV_8UC3, static_cast<uint8_t *>(pSaveImgData->pData.get())); //RGB
iWidth = pSaveImgData->iWidth;
iHeight = pSaveImgData->iHeight;
// cv::Mat mtOutImage;
// cv::cvtColor(cvimg, mtOutImage, cv::COLOR_RGB2BGR);

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -1,279 +1,279 @@
#include "SocketEngine.h"
#include "SocketEngine.h"
SocketEngine::SocketEngine()
{
isStop_ = false;
isStop_ = false;
}
SocketEngine::~SocketEngine() {}
APP_ERROR SocketEngine::Init()
{
strPort0_ = engineName_ + "_" + std::to_string(engineId_) + "_0";
strPort0_ = engineName_ + "_" + std::to_string(engineId_) + "_0";
this->socketOpenType_ = MyYaml::GetIns()->GetIntValue("socket_server_open");
this->socketPort_ = MyYaml::GetIns()->GetIntValue("socket_server_port");
this->socketQueueLen_ = MyYaml::GetIns()->GetIntValue("socket_server_queue_len");
this->socketOpenType_ = MyYaml::GetIns()->GetIntValue("socket_server_open");
this->socketPort_ = MyYaml::GetIns()->GetIntValue("socket_server_port");
this->socketQueueLen_ = MyYaml::GetIns()->GetIntValue("socket_server_queue_len");
MyShellInfo << "SocketEngine init ok";
return APP_ERR_OK;
MyShellInfo << "SocketEngine init ok";
return APP_ERR_OK;
}
APP_ERROR SocketEngine::DeInit()
{
for (int fd = 0; fd <= max_fd; ++fd)
{
if (FD_ISSET(fd, &master_set))
{
close(fd);
}
}
for (int fd = 0; fd <= max_fd; ++fd)
{
if (FD_ISSET(fd, &master_set))
{
close(fd);
}
}
MyShellInfo << "SocketEngine deinit ok";
return APP_ERR_OK;
MyShellInfo << "SocketEngine deinit ok";
return APP_ERR_OK;
}
APP_ERROR SocketEngine::Process()
{
int ret = APP_ERR_OK;
int ret = APP_ERR_OK;
if (this->socketOpenType_)
{
while (!isStop_)
{
if (!this->Socket_(this->socketPort_)) continue;
if (!this->Bind()) continue;
this->Listen(this->socketQueueLen_);
this->Run();
}
if (this->socketOpenType_)
{
while (!isStop_)
{
if (!this->Socket_(this->socketPort_)) continue;
if (!this->Bind()) continue;
this->Listen(this->socketQueueLen_);
this->Run();
}
}
else
{
while (!isStop_)
{
usleep(1000);
}
}
}
else
{
while (!isStop_)
{
usleep(1000);
}
}
return APP_ERR_OK;
return APP_ERR_OK;
}
bool SocketEngine::Socket_(int port)
{
bzero(&server_addr, sizeof(server_addr));
server_addr.sin_family = AF_INET;
server_addr.sin_addr.s_addr = htons(INADDR_ANY);
server_addr.sin_port = htons(port);
// create socket to listen
listen_fd = socket(PF_INET, SOCK_STREAM, 0);
if (listen_fd < 0)
{
LogError << "Create Scoket_Server Failed!";
return false;
}
int opt = 1;
setsockopt(listen_fd, SOL_SOCKET, SO_REUSEADDR, &opt, sizeof(opt));
return true;
bzero(&server_addr, sizeof(server_addr));
server_addr.sin_family = AF_INET;
server_addr.sin_addr.s_addr = htons(INADDR_ANY);
server_addr.sin_port = htons(port);
// create socket to listen
listen_fd = socket(PF_INET, SOCK_STREAM, 0);
if (listen_fd < 0)
{
LogError << "Create Scoket_Server Failed!";
return false;
}
int opt = 1;
setsockopt(listen_fd, SOL_SOCKET, SO_REUSEADDR, &opt, sizeof(opt));
return true;
}
bool SocketEngine::Bind()
{
if (-1 == (bind(listen_fd, (struct sockaddr*)&server_addr, sizeof(server_addr))))
{
LogError << "Scoket_Server Bind Failed!";
return false;
}
LogInfo << "Scoket_Server Bind Successfully.";
return true;
if (-1 == (bind(listen_fd, (struct sockaddr*)&server_addr, sizeof(server_addr))))
{
LogError << "Scoket_Server Bind Failed!";
return false;
}
LogInfo << "Scoket_Server Bind Successfully.";
return true;
}
bool SocketEngine::Listen(int queue_len)
{
if (-1 == listen(listen_fd, queue_len))
{
LogError << "Scoket_Server Listen Failed!";
return false;
}
LogInfo << "Scoket_Server Listen Successfully.";
return true;
if (-1 == listen(listen_fd, queue_len))
{
LogError << "Scoket_Server Listen Failed!";
return false;
}
LogInfo << "Scoket_Server Listen Successfully.";
return true;
}
bool SocketEngine::Accept()
{
struct sockaddr_in client_addr;
socklen_t client_addr_len = sizeof(client_addr);
struct sockaddr_in client_addr;
socklen_t client_addr_len = sizeof(client_addr);
int new_fd = accept(listen_fd, (struct sockaddr*)&client_addr, &client_addr_len);
if (new_fd < 0)
{
LogError << "Scoket_Server Accept Failed!";
return false;
}
int new_fd = accept(listen_fd, (struct sockaddr*)&client_addr, &client_addr_len);
if (new_fd < 0)
{
LogError << "Scoket_Server Accept Failed!";
return false;
}
std::string ip(inet_ntoa(client_addr.sin_addr)); // 获取客户端IP
std::string ip(inet_ntoa(client_addr.sin_addr)); // 获取客户端IP
LogInfo << ip << " new connection was accepted.";
LogInfo << ip << " new connection was accepted.";
mmap.insert(std::make_pair(new_fd, std::make_pair(ip, 0)));
mmap.insert(std::make_pair(new_fd, std::make_pair(ip, 0)));
// 将新建立的连接的fd加入master_set
FD_SET(new_fd, &master_set);
if (new_fd > max_fd)
{
max_fd = new_fd;
}
return true;
// 将新建立的连接的fd加入master_set
FD_SET(new_fd, &master_set);
if (new_fd > max_fd)
{
max_fd = new_fd;
}
return true;
}
bool SocketEngine::Recv(int nums)
{
for (int fd = 0; fd <= max_fd; ++fd)
{
if (FD_ISSET(fd, &working_set))
{
bool close_conn = false; // 标记当前连接是否断开了
for (int fd = 0; fd <= max_fd; ++fd)
{
if (FD_ISSET(fd, &working_set))
{
bool close_conn = false; // 标记当前连接是否断开了
char order_str[512] = { 0 };
char order_str[512] = { 0 };
int size_get = recv(fd, (char*)&order_str, sizeof(order_str) - 1, 0);
int size_get = recv(fd, (char*)&order_str, sizeof(order_str) - 1, 0);
if (size_get <= 0)
{
continue;
}
else
{
if (size_get <= 0)
{
continue;
}
else
{
bool isHeart_beat = (std::string(order_str) == "matrixai");
if (isHeart_beat)
{
mmap[fd].second = 0; // 每次收到心跳包count置0
//LogDebug << "Scoket_Server Received heart-beat from client.";
}
else
{
LogInfo << "Received message from client:" << std::string(order_str);
}
}
bool isHeart_beat = (std::string(order_str) == "matrixai");
if (isHeart_beat)
{
mmap[fd].second = 0; // 每次收到心跳包count置0
//LogDebug << "Scoket_Server Received heart-beat from client.";
}
else
{
LogInfo << "Received message from client:" << std::string(order_str);
}
}
if (close_conn) // 当前这个连接有问题,关闭它
{
close(fd);
FD_CLR(fd, &master_set);
if (fd == max_fd) // 需要更新max_fd;
{
while (FD_ISSET(max_fd, &master_set) == false)
--max_fd;
}
}
}
}
if (close_conn) // 当前这个连接有问题,关闭它
{
close(fd);
FD_CLR(fd, &master_set);
if (fd == max_fd) // 需要更新max_fd;
{
while (FD_ISSET(max_fd, &master_set) == false)
--max_fd;
}
}
}
}
return true;
return true;
}
bool SocketEngine::Run()
{
pthread_t id; // 创建心跳检测线程
int ret = pthread_create(&id, NULL, heart_handler, (void*)this);
if (ret != 0)
{
LogError << "Scoket_Server Can not create heart-beat checking thread.";
return false;
}
pthread_t id; // 创建心跳检测线程
int ret = pthread_create(&id, NULL, heart_handler, (void*)this);
if (ret != 0)
{
LogError << "Scoket_Server Can not create heart-beat checking thread.";
return false;
}
ret = pthread_create(&id, NULL, sendInfo, (void*)this);
if (ret != 0)
{
LogError << "Scoket_Server Can not create message send thread.";
return false;
}
ret = pthread_create(&id, NULL, sendInfo, (void*)this);
if (ret != 0)
{
LogError << "Scoket_Server Can not create message send thread.";
return false;
}
max_fd = listen_fd; // 初始化max_fd
FD_ZERO(&master_set);
FD_SET(listen_fd, &master_set); // 添加监听fd
max_fd = listen_fd; // 初始化max_fd
FD_ZERO(&master_set);
FD_SET(listen_fd, &master_set); // 添加监听fd
while (!isStop_)
{
FD_ZERO(&working_set);
memcpy(&working_set, &master_set, sizeof(master_set));
while (!isStop_)
{
FD_ZERO(&working_set);
memcpy(&working_set, &master_set, sizeof(master_set));
timeout.tv_sec = 30;
timeout.tv_usec = 0;
timeout.tv_sec = 30;
timeout.tv_usec = 0;
int nums = select(max_fd + 1, &working_set, NULL, NULL, &timeout);
/*if (nums < 0)
{
LogError << "Scoket_Server select() error!";
return false;
}*/
int nums = select(max_fd + 1, &working_set, NULL, NULL, &timeout);
/*if (nums < 0)
{
LogError << "Scoket_Server select() error!";
return false;
}*/
if (nums <= 0)
{
//cout << "select() is timeout!";
continue;
}
if (nums <= 0)
{
//cout << "select() is timeout!";
continue;
}
if (FD_ISSET(listen_fd, &working_set))
Accept(); // 有新的客户端请求
else
Recv(nums); // 接收客户端的消息
if (FD_ISSET(listen_fd, &working_set))
Accept(); // 有新的客户端请求
else
Recv(nums); // 接收客户端的消息
}
}
return true;
return true;
}
//解析命令
bool SocketEngine::getOrder(const std::string &recv, Json::Value &order)
{
Json::CharReaderBuilder readerBuilder;
std::shared_ptr<Json::CharReader> reader(readerBuilder.newCharReader());
Json::CharReaderBuilder readerBuilder;
std::shared_ptr<Json::CharReader> reader(readerBuilder.newCharReader());
JSONCPP_STRING errs;
if (!reader->parse(recv.data(), recv.data() + recv.size(), &order, &errs))
return false;
JSONCPP_STRING errs;
if (!reader->parse(recv.data(), recv.data() + recv.size(), &order, &errs))
return false;
if (order.isArray()) {
if (order.size() > 0) order = order[0];
return true;
}
return true;
if (order.isArray()) {
if (order.size() > 0) order = order[0];
return true;
}
return true;
}
std::string SocketEngine::getFeedBack(const std::string poundNo, const std::string type, const std::string info)
{
Json::Value feedBack;
Json::StreamWriterBuilder strbuild;
Json::Value feedBack;
Json::StreamWriterBuilder strbuild;
feedBack["poundNo"] = poundNo;
feedBack["type"] = type;
feedBack["info"] = info;
//
return Json::writeString(strbuild, feedBack);
feedBack["poundNo"] = poundNo;
feedBack["type"] = type;
feedBack["info"] = info;
//
return Json::writeString(strbuild, feedBack);
}
void* SocketEngine::sendInfo(void* arg) {
SocketEngine* s = (SocketEngine*)arg;
while (!s->isStop_) {
//pop端口0
std::shared_ptr<void> pVoidData0 = nullptr;
s->inputQueMap_[s->strPort0_]->pop(pVoidData0);
if (nullptr == pVoidData0)
{
usleep(1000); //1ms
continue;
}
SocketEngine* s = (SocketEngine*)arg;
while (!s->isStop_) {
//pop端口0
std::shared_ptr<void> pVoidData0 = nullptr;
s->inputQueMap_[s->strPort0_]->pop(pVoidData0);
if (nullptr == pVoidData0)
{
usleep(1000); //1ms
continue;
}
std::shared_ptr<std::string> pMessage = std::static_pointer_cast<std::string>(pVoidData0);
std::shared_ptr<std::string> pMessage = std::static_pointer_cast<std::string>(pVoidData0);
SendAllClient(s->mmap, *pMessage);
}
SendAllClient(s->mmap, *pMessage);
}
}
@ -281,176 +281,176 @@ void* SocketEngine::sendInfo(void* arg) {
// thread function
void* SocketEngine::heart_handler(void* arg)
{
LogInfo << "Scoket_Server The heartbeat checking thread started.\n";
SocketEngine* s = (SocketEngine*)arg;
while (1)
{
std::map<int, std::pair<std::string, int> >::iterator it = s->mmap.begin();
for (; it != s->mmap.end(); )
{
if (it->second.second == 5) // sleep(3)*5没有收到心跳包判定客户端掉线
{
LogInfo << "The client " << it->second.first << " has be offline.\n";
LogInfo << "Scoket_Server The heartbeat checking thread started.\n";
SocketEngine* s = (SocketEngine*)arg;
while (1)
{
std::map<int, std::pair<std::string, int> >::iterator it = s->mmap.begin();
for (; it != s->mmap.end(); )
{
if (it->second.second == 5) // sleep(3)*5没有收到心跳包判定客户端掉线
{
LogInfo << "The client " << it->second.first << " has be offline.\n";
int fd = it->first;
close(fd); // 关闭该连接
FD_CLR(fd, &s->master_set);
if (fd == s->max_fd) // 需要更新max_fd;
{
while (FD_ISSET(s->max_fd, &s->master_set) == false)
s->max_fd--;
}
int fd = it->first;
close(fd); // 关闭该连接
FD_CLR(fd, &s->master_set);
if (fd == s->max_fd) // 需要更新max_fd;
{
while (FD_ISSET(s->max_fd, &s->master_set) == false)
s->max_fd--;
}
s->mmap.erase(it++); // 从map中移除该记录
}
else if (it->second.second < 5 && it->second.second >= 0)
{
it->second.second += 1;
++it;
}
else
{
++it;
}
}
sleep(3); // 定时三秒
}
s->mmap.erase(it++); // 从map中移除该记录
}
else if (it->second.second < 5 && it->second.second >= 0)
{
it->second.second += 1;
++it;
}
else
{
++it;
}
}
sleep(1); // 定时三秒
}
}
bool SocketEngine::SendClient(std::map<int, std::pair<std::string, int> > mmap, const std::string ip, const std::string message) {
try
{
std::map<int, std::pair<std::string, int> >::iterator it = mmap.begin();
for (; it != mmap.end(); ++it)
{
if (it->second.first == ip) // 遍历找雷达ip
{
int fd = it->first;
try
{
std::map<int, std::pair<std::string, int> >::iterator it = mmap.begin();
for (; it != mmap.end(); ++it)
{
if (it->second.first == ip) // 遍历找雷达ip
{
int fd = it->first;
if (send(fd, message.c_str(), message.size(), 0) <= 0)
{
LogError << "Socker Server send message to IP:" << ip << " failed, message:" << message;
return false;
}
else
{
char clientFeedBack[256] = { 0 };
if (recv(fd, clientFeedBack, sizeof(clientFeedBack), 0) > 0)
{
Json::Value feedBack;
if (send(fd, message.c_str(), message.size(), 0) <= 0)
{
LogError << "Socker Server send message to IP:" << ip << " failed, message:" << message;
return false;
}
else
{
char clientFeedBack[256] = { 0 };
if (recv(fd, clientFeedBack, sizeof(clientFeedBack), 0) > 0)
{
Json::Value feedBack;
if (!getOrder(clientFeedBack, feedBack)) {
LogError << "Get Client IP:" << ip << " feekBack format is error : " << clientFeedBack;
return false;
}
if (!getOrder(clientFeedBack, feedBack)) {
LogError << "Get Client IP:" << ip << " feekBack format is error : " << clientFeedBack;
return false;
}
if (feedBack.get("success", "").asString() == "true")
{
return true;
}
else
{
LogError << "Client FeedBack Error: " << feedBack.get("error_msg", "").asString();
return false;
}
}
else {
if (feedBack.get("success", "").asString() == "true")
{
return true;
}
else
{
LogError << "Client FeedBack Error: " << feedBack.get("error_msg", "").asString();
return false;
}
}
else {
LogError << "Socker Server send message to IP:" << ip << " successful, But recv error!";
return false;
}
}
}
}
}
catch (const std::exception&)
{
LogError << "Socker Server send message to IP:" << ip << " failed, message:" << message;
return false;
}
LogError << "Socker Server send message to IP:" << ip << " failed, because no find connection";
return false;
LogError << "Socker Server send message to IP:" << ip << " successful, But recv error!";
return false;
}
}
}
}
}
catch (const std::exception&)
{
LogError << "Socker Server send message to IP:" << ip << " failed, message:" << message;
return false;
}
LogError << "Socker Server send message to IP:" << ip << " failed, because no find connection";
return false;
}
bool SocketEngine::SendAllClient(std::map<int, std::pair<std::string, int> > mmap, const std::string message) {
try
{
std::map<int, std::pair<std::string, int> >::iterator it = mmap.begin();
for (; it != mmap.end(); ++it)
{
int fd = it->first;
LogInfo << "Socket send IP: " << it->second.first << " msg:" << message;
if (send(fd, message.c_str(), message.size(), MSG_NOSIGNAL) <= 0)
{
LogError << "Socker Server send message to IP:" << it->second.first << " failed, message:" << message;
return false;
}
else
{
// char clientFeedBack[256] = { 0 };
// if (recv(fd, clientFeedBack, sizeof(clientFeedBack), 0) > 0)
// {
// Json::Value feedBack;
try
{
std::map<int, std::pair<std::string, int> >::iterator it = mmap.begin();
for (; it != mmap.end(); ++it)
{
int fd = it->first;
LogInfo << "Socket send IP: " << it->second.first << " msg:" << message;
if (send(fd, message.c_str(), message.size(), MSG_NOSIGNAL) <= 0)
{
LogError << "Socker Server send message to IP:" << it->second.first << " failed, message:" << message;
return false;
}
else
{
// char clientFeedBack[256] = { 0 };
// if (recv(fd, clientFeedBack, sizeof(clientFeedBack), 0) > 0)
// {
// Json::Value feedBack;
// if (!getOrder(clientFeedBack, feedBack)) {
// LogError << "Get Client IP: "<< it->second.first << " feekBack format is error : " << clientFeedBack;
// return false;
// }
// if (!getOrder(clientFeedBack, feedBack)) {
// LogError << "Get Client IP: "<< it->second.first << " feekBack format is error : " << clientFeedBack;
// return false;
// }
// if (feedBack.get("success", "").asString() == "true")
// {
// return true;
// }
// else
// {
// LogError << "Client FeedBack Error: " << feedBack.get("error_msg", "").asString();
// return false;
// }
// }
// else {
// if (feedBack.get("success", "").asString() == "true")
// {
// return true;
// }
// else
// {
// LogError << "Client FeedBack Error: " << feedBack.get("error_msg", "").asString();
// return false;
// }
// }
// else {
// LogError << "Socker Server send message to IP:" << it->second.first << " successful, But recv error!";
// return false;
// }
}
}
}
catch (const std::exception&)
{
LogError << "Socker Server send message to all client failed, message:" << message;
return false;
}
return false;
// LogError << "Socker Server send message to IP:" << it->second.first << " successful, But recv error!";
// return false;
// }
}
}
}
catch (const std::exception&)
{
LogError << "Socker Server send message to all client failed, message:" << message;
return false;
}
return false;
}
std::string SocketEngine::HexToStr(const std::string &str)
{
std::string hex = str;
long len = hex.length();
std::string newString;
for (long i = 0; i < len; i += 2)
{
std::string byte = hex.substr(i, 2);
char chr = (char)(int)strtol(byte.c_str(), NULL, 16);
newString.push_back(chr);
}
return newString;
std::string hex = str;
long len = hex.length();
std::string newString;
for (long i = 0; i < len; i += 2)
{
std::string byte = hex.substr(i, 2);
char chr = (char)(int)strtol(byte.c_str(), NULL, 16);
newString.push_back(chr);
}
return newString;
}
std::string SocketEngine::StrToHex(const std::string &str)
{
unsigned char c;
char buf[2];
std::string result = "";
std::stringstream ss;
ss << str;
while (ss.read((char*)(&c), sizeof(c)))
{
sprintf(buf, "%02x", c);
result += buf;
}
return result;
unsigned char c;
char buf[2];
std::string result = "";
std::stringstream ss;
ss << str;
while (ss.read((char*)(&c), sizeof(c)))
{
sprintf(buf, "%02x", c);
result += buf;
}
return result;
}

View File

@ -1,118 +0,0 @@
#include "PostTrainAnaEngine.h"
using namespace ai_matrix;
PostTrainAnaEngine::PostTrainAnaEngine() {}
PostTrainAnaEngine::~PostTrainAnaEngine() {}
APP_ERROR PostTrainAnaEngine::Init()
{
strPort0_ = engineName_ + "_" + std::to_string(engineId_) + "_0";
strResultPath_ = MyYaml::GetIns()->GetPathValue("gc_result_path");
std::map<int, ai_matrix::DataSourceConfig> mapUseDataSouceCfg = MyYaml::GetIns()->GetUseDataSourceConfig();
for (auto iter = mapUseDataSouceCfg.begin(); iter != mapUseDataSouceCfg.end(); iter++)
{
//端口0是主摄像头push存图需给其他使用的数据源push(来车/结束)通知
if (iter->first != 0)
{
setPushPort_.insert(iter->first);
}
else
{
dataSourceCfg_ = iter->second;
}
}
InitParam();
LogInfo << "PostTrainAnaEngine Init ok";
return APP_ERR_OK;
}
APP_ERROR PostTrainAnaEngine::DeInit()
{
LogInfo << "PostTrainAnaEngine DeInit ok";
return APP_ERR_OK;
}
/**
* ()
* inParam : N/A
* outParam: N/A
* return : N/A
*/
void PostTrainAnaEngine::InitParam()
{
iMoveDataNO_ = dataSourceCfg_.iSkipInterval;
}
APP_ERROR PostTrainAnaEngine::Process()
{
int iRet = APP_ERR_OK;
while (!isStop_)
{
std::shared_ptr<void> pVoidData0 = nullptr;
inputQueMap_[strPort0_]->pop(pVoidData0);
if (nullptr == pVoidData0)
{
usleep(1000); //1ms
continue;
}
std::shared_ptr<DecodedData> pDecodedData = std::static_pointer_cast<DecodedData>(pVoidData0);
if (pDecodedData->iStatus == TRAINSTATUS_NO && !pDecodedData->bIsEnd)
{
continue;
}
//组织数据, push其他端口 (只通知2次车来一次车结束一次)
if (iMoveDataNO_ == dataSourceCfg_.iSkipInterval || pDecodedData->bIsEnd)
{
LogDebug << "traindate:" << pDecodedData->strTrainDate << " trainname:" << pDecodedData->strTrainName
<< " frameid:" << iMoveDataNO_ << " isEnd:" << pDecodedData->bIsEnd;
uint32_t iFrameId = iMoveDataNO_ / dataSourceCfg_.iSkipInterval * dataSourceCfg_.iSkipInterval;
std::shared_ptr<MoveData> pMoveData = std::make_shared<MoveData>();
pMoveData->iFrameId = iFrameId; //当前帧号
pMoveData->i64TimeStamp = pDecodedData->i64TimeStamp;
pMoveData->bHasTrain = true;
pMoveData->bIsEnd = pDecodedData->bIsEnd;
pMoveData->strTrainDate = pDecodedData->strTrainDate;
pMoveData->strTrainName = pDecodedData->strTrainName;
pMoveData->iDirection = pDecodedData->iDirection;
for (auto iter = setPushPort_.begin(); iter != setPushPort_.end(); iter++)
{
outputQueMap_[engineName_ + "_" + std::to_string(engineId_) + "_" + std::to_string(*iter)]->push(std::static_pointer_cast<void>(pMoveData));
}
outputQueMap_[engineName_ + "_" + std::to_string(engineId_) + "_5"]->push(std::static_pointer_cast<void>(pMoveData));
}
//跳帧 存图
if (iMoveDataNO_ % dataSourceCfg_.iSkipInterval == 0 || pDecodedData->bIsEnd)
{
// push端口,存图
std::shared_ptr<SaveImgData> pSaveImgData = std::make_shared<SaveImgData>();
pSaveImgData->pDecodeData = pDecodedData;
pSaveImgData->iFrameId = iMoveDataNO_; //帧号
char szCameraNo[4] = {0};
sprintf(szCameraNo, "%03d", pDecodedData->iDataSource + 1);
pSaveImgData->strImgPath = strResultPath_ + pDecodedData->strTrainDate + "/" + pDecodedData->strTrainName + "/" + szCameraNo;
pSaveImgData->strImgName = std::to_string(pSaveImgData->iFrameId) + ".jpg";
pSaveImgData->bIsEnd = pDecodedData->bIsEnd;
pSaveImgData->bSaveToFtp = true;
pSaveImgData->i64TimeStamp = pDecodedData->i64TimeStamp;
pSaveImgData->iDirection = pDecodedData->iDirection;
iRet = outputQueMap_[strPort0_]->push(std::static_pointer_cast<void>(pSaveImgData));
}
iMoveDataNO_++;
//结束帧或停车时需重新初始化相关信息
if (pDecodedData->bIsEnd)
{
InitParam();
}
}
return APP_ERR_OK;
}

View File

@ -1,41 +0,0 @@
/**
*
* */
#ifndef POSTTRAINANAENGINE_H
#define POSTTRAINANAENGINE_H
#include "AppCommon.h"
#include "MyYaml.h"
#include "myutils.h"
#include "EngineBase.h"
#include "EngineFactory.h"
class PostTrainAnaEngine : public ai_matrix::EngineBase
{
public:
PostTrainAnaEngine();
~PostTrainAnaEngine();
APP_ERROR Init() override;
APP_ERROR DeInit() override;
APP_ERROR Process() override;
private:
//参数初始化
void InitParam();
std::string strPort0_;
uint32_t iMoveDataNO_ = 1; //动态检测数据编号
std::string strTrainData_;
std::string strTrainName_;
std::string strResultPath_;
std::set<int> setPushPort_;
ai_matrix::DataSourceConfig dataSourceCfg_;
};
ENGINE_REGIST(PostTrainAnaEngine)
#endif

View File

@ -1,107 +0,0 @@
/**
*
* */
#ifndef TRAINANAENGINE_H
#define TRAINANAENGINE_H
#include <algorithm>
#include "AppCommon.h"
#include "MyYaml.h"
#include "myutils.h"
#include "EngineBase.h"
#include "EngineFactory.h"
#include "math.h"
#include "opencv2/core/core.hpp"
#include "opencv2/highgui/highgui.hpp"
#include "opencv2/imgproc/imgproc.hpp"
#include "opencv2/imgcodecs/imgcodecs.hpp"
using namespace cv;
using namespace std;
typedef void* LPVOID;
class TrainAnaEngine : public ai_matrix::EngineBase
{
public:
TrainAnaEngine();
~TrainAnaEngine();
APP_ERROR Init() override;
APP_ERROR DeInit() override;
APP_ERROR Process() override;
private:
//////////////////////////////变量部分//////////////////////////////////////////
//engine变量
std::string strPort0_;
std::string strPort1_;
ai_matrix::DataSourceConfig dataSourceConfig_;
std::string strResultPath_;
std::string strResultPath_for_test;
std::string strTrainData_;
std::string strTrainName_;
//划分算法参数和算法计算的实况数值
std::vector<AnalyseInfo> lstAction; //行车检测算法基准参数集
std::vector<AnalyseInfo> lstPartion; //车厢划分算法基准参数集
std::vector<PartionInfo> lstPartInfo; //当前车次车厢划分结果
int nRightComeFlagR1; //最右侧检测区域状态变化的数值
int nRightComeFlagR2; //次右侧检测区域状态变化的数值
int nRightComeFlagR3; //第三右侧检测区域状态变化的数值
//动态数据
int nStatus; //车辆状态
int nPreStatus; //上一帧车辆状态
int nPicAreaChangeing; //图片区域变化状态(从左往右)
int nPrePicAreaChangeing; //上一帧图片区域变化状态(从左往右)
int nSamePartionIgnoreCount; //检测到车钩跳帧数
int nPartionPassFrameCount; //检测到车钩后通过了多少帧
int nTailPixOffset; //尾部车钩偏移位置
float fdefaultspeed; //缺省行车速度
cv::Mat cvFirstImg; //比对基准图
uint64_t ncurtime;
//图像相关
long nLatestFrame; //目前为止最大帧数
int nFrameRate; //相机帧率
unsigned int nRecIndex; //测试用记录索引
//////////////////////////////函数部分//////////////////////////////////////////
//基础函数
void InitParam(); //参数初始化
void checkAction(cv::Mat baseimg, uint64_t i64TimeStamp); //行车检测
bool checkPartion(cv::Mat baseimg, uint64_t i64TimeStamp); //车厢划分检测
//功能函数
void vformatStructAnalyseInfo(std::vector<std::string> elems, AnalyseInfo &info); //读取的csv文件内容格式化到结构体变量中
void getsetting(string strFilePath); //读取的算法参数的csv文件
double anapicbyHist(cv::Mat baseimg, cv::Mat tarpic, int method); //根据检测物形状进行比对
double anapicbyTemple(cv::Mat baseimg, cv::Mat tarpic, int method, cv::Point &pPos); //根据直方图进行比对
double anapicbySpec(cv::Mat baseimg);
void vResetPartion(); //重置车厢划分信息
int getOffsetFrame(float fspeed, int nFrameRate, int width); //根据车钩位置计算帧数
int getCouplerOffsetPosition(float fspeed, int nframeindex); //计算车钩在图片的x坐标(预测的计算值非实际值)
int getTailPixOffset(); //计算尾部车钩在图片的x坐标(预测的计算值非实际值)
float GetPointMaxReduceMin(float dComparePoint,int index); //获取最近100帧的dComparePoint的最大值与最小值之差
//图像处理函数
cv::Mat mtdecodeImageDatabyFile(string path); //读取图像文件并转化为Mat格式
cv::Mat mtdecodeImageDatabyBin(LPVOID lpimgdata, int imglen); //将图像二进制数据转化为Mat格式
cv::Mat mtImproveImage(cv::Mat inImage, AnalyseInfo info, bool bcvread); //图像优化 根据配置进行灰度/直方图均衡/gamma变化/自定义优化
cv::Mat mtresizeImage(cv::Mat inImage, int width, int height); //调整车厢图像尺寸为固定尺寸
cv::Mat mtareaImage(cv::Mat inImage, int x1, int y1, int x2, int y2); //从图像获取敏感位置图像数据
cv::Mat mtgrayImage(cv::Mat inImage, int mode); //图像灰度化
cv::Mat mtequalizeImage(cv::Mat inImage); //图像直方图均衡化
cv::Mat mtGammaImage(cv::Mat inImage, float fgamma); //图像gamma变换
cv::Mat mtspecImage(cv::Mat inImage); //图像自定义优化
std::vector<double >getCompPoint(AnalyseInfo info, cv::Mat baseimg, cv::Mat tarpic); //获取图像比对值
};
ENGINE_REGIST(TrainAnaEngine)
#endif

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -1,137 +0,0 @@
#include "ImgDecodeEngine.h"
#include <algorithm>
#include <string>
#include <regex>
#include <sys/stat.h>
using namespace std;
using namespace cv;
using namespace ai_matrix;
ImgDecodeEngine::ImgDecodeEngine() {}
ImgDecodeEngine::~ImgDecodeEngine() {}
APP_ERROR ImgDecodeEngine::Init()
{
bUseEngine_ = true;
dataSourceConfig_ = MyYaml::GetIns()->GetDataSourceConfigById(engineId_); //获取摄像机参数
if (MyYaml::GetIns()->GetStringValue("gc_data_source") != "images" || !dataSourceConfig_.bUse)
{
bUseEngine_ = false;
LogWarn << "engineId_:" << engineId_ << " not use engine";
return APP_ERR_OK;
}
strPort0_ = engineName_ + "_" + std::to_string(engineId_) + "_0";
width_ = IMAGE_WIDTH, height_ = IMAGE_HEIGHT;
LogInfo << "ImgDecodeEngine Init ok";
return APP_ERR_OK;
}
APP_ERROR ImgDecodeEngine::DeInit()
{
if (!bUseEngine_)
{
LogWarn << "engineId_:" << engineId_ << " not use engine";
return APP_ERR_OK;
}
LogInfo << "ImgDecodeEngine deinit ok";
return APP_ERR_OK;
}
APP_ERROR ImgDecodeEngine::Process()
{
if (!bUseEngine_)
{
LogWarn << "engineId_:" << engineId_ << " not use engine";
return APP_ERR_OK;
}
// #ifdef SAVE_BGR2RGB_FILE
// char rgb_stream[256] = {};
// sprintf(rgb_stream, "bgr2rgb_stream_%dx%d.rgb", width_, height_);
// FILE *rgb_stream_fp = fopen(rgb_stream, "ab+");
// #endif
uint64_t u64count_num = 0;
int iRet = APP_ERR_OK;
while (!isStop_)
{
std::shared_ptr<void> pVoidData0 = nullptr;
inputQueMap_[strPort0_]->pop(pVoidData0);
if (nullptr == pVoidData0)
{
usleep(1*1000); //n ms
continue;
}
//透传下一个Engine
outputQueMap_[strPort0_]->push(pVoidData0);
// // std::cout<<"Enter BGR2RGBEngine Thread "<<++u64count_num<<" Times!"<<std::endl;
// // std::cout<<"BGR2RGBEngine Thread ID: "<<std::this_thread::get_id()<<std::endl;
// //接收到BGR数据
// std::shared_ptr<FrameData> pBGRFrameData = std::static_pointer_cast<FrameData>(pVoidData0);
// //构造RGB数据
// void* pRGBBuffer = nullptr;
// unsigned int pRGBBuffer_Size = width_*height_*3;
// pRGBBuffer = new uint8_t[pRGBBuffer_Size];
// std::shared_ptr<FrameData> pRGBFrameData = std::make_shared<FrameData>();
// //像素格式转换
// cv::Mat BGRImage(height_, width_, CV_8UC3, static_cast<uint8_t *>(pBGRFrameData->pData.get())); //BGR
// cv::Mat RGBImage(height_, width_, CV_8UC3, pRGBBuffer); //RGB
// #ifdef OPENCV_CVTCOLOR_BGR2RGB_TIME_CONSUMING_TEST
// auto start = std::chrono::system_clock::now(); //计时开始
// cv::cvtColor(BGRImage, RGBImage, cv::COLOR_BGR2RGB); //像素格式转换 BGR转RGB
// auto end = std::chrono::system_clock::now(); //计时结束
// std::cout << "frame width: "<<width_<<" frame height:"<<height_<<" opencv cvtColor BGR2RGB time: " << std::chrono::duration_cast<std::chrono::milliseconds>(end - start).count() << "ms" << std::endl;
// #else
// cv::cvtColor(BGRImage, RGBImage, cv::COLOR_BGR2RGB); //像素格式转换 BGR转RGB
// #endif
// //压入像素格式转换后的数据
// //组织数据
// pRGBFrameData->iDataSource = engineId_;
// pRGBFrameData->iSize = pRGBBuffer_Size;
// pRGBFrameData->pData.reset(pRGBBuffer, [](void* data){if(data) {delete[] data; data = nullptr;}}); //智能指针管理内存
// // pFrameData->pData.reset(pRGBBuffer, Deleter); //智能指针管理内存
// pRGBFrameData->i64TimeStamp = pBGRFrameData->i64TimeStamp;
// //开启此宏保存转换后的RGB数据
// #ifdef SAVE_BGR2RGB_FILE
// if (rgb_stream_fp)
// {
// fwrite(pRGBFrameData->pData.get(), 1, pRGBFrameData->iSize, rgb_stream_fp);
// fflush(rgb_stream_fp);
// fsync(fileno(rgb_stream_fp));
// }
// #endif
// std::cout<<"port0 push the rgb frame data!"<<std::endl;
// #if 1
// iRet = outputQueMap_[strPort0_]->push(std::static_pointer_cast<void>(pRGBFrameData));
// if (iRet != APP_ERR_OK){
// LogError << "push the rgb frame data failed...";
// // std::cerr<<"push the rgb frame data failed..."<<std::endl;
// }else{
// std::cout<<"push the rgb frame data success!"<<std::endl;
// }
// #endif
}
// #ifdef SAVE_BGR2RGB_FILE
// fclose(rgb_stream_fp);
// if(rgb_stream_fp){
// rgb_stream_fp = nullptr;
// }
// #endif
}

View File

@ -1,35 +0,0 @@
/**
*
* */
#ifndef IMGDECODEENGINE_H
#define IMGDECODEENGINE_H
#include "AppCommon.h"
#include "EngineBase.h"
#include "EngineFactory.h"
#include "MyYaml.h"
#include "myutils.h"
class ImgDecodeEngine : public ai_matrix::EngineBase
{
public:
ImgDecodeEngine();
~ImgDecodeEngine();
APP_ERROR Init() override;
APP_ERROR DeInit() override;
APP_ERROR Process() override;
private:
bool bUseEngine_;
std::string strPort0_;
ai_matrix::DataSourceConfig dataSourceConfig_;
unsigned int width_, height_;
};
ENGINE_REGIST(ImgDecodeEngine)
#endif

View File

@ -315,6 +315,20 @@ void TrainStepOneEngine::FilterInvalidInfo(std::vector<stDetection> &vecRet, std
continue;
}
if (it->class_id == K_TRAIN_NUM)
{
int iCenterY = pProcessData->iHeight / 2;
int iHeight0 = it->bbox[1] / 2 + it->bbox[3] / 2;
if (iHeight0 > iCenterY) {
LogWarn << "矿车编号大框在画面Y轴中线以下帧号:"
<< pProcessData->iFrameId
<< " 画面Y轴中心" << iCenterY
<< " 大框Y轴中心" << iHeight0 ;
it = vecRet.erase(it);
continue;
}
}
//补连塔的相机比较近,间隔基本在画面底部,因此当间隔比较靠画面上时过滤掉。
if ((it->class_id >= 9 && it->class_id <= 17 && it->class_id != 15) || it->class_id == U_TRAIN_SPACE)
{

View File

@ -239,7 +239,7 @@ APP_ERROR TrainStepTwoEngine::Process()
// push端口0第1步推理
pProcessData->pVoidData = std::static_pointer_cast<void>(pPostData);
iRet = outputQueMap_[strPort0_]->push(std::static_pointer_cast<void>(pProcessData), true);
iRet = outputQueMap_[strPort0_]->push(std::static_pointer_cast<void>(pProcessData));
// PushData(strPort0_, pProcessData);
}
return APP_ERR_OK;