Train_Identify_arm/nvidia_ascend_engine/common_engine/DataSourceEngine/CameraEngine.cpp

387 lines
16 KiB
C++
Raw Normal View History

2024-06-19 06:35:05 +00:00
#include "CameraEngine.h"
#include "myutils.h"
using namespace ai_matrix;
2024-06-19 06:41:40 +00:00
namespace {
2024-06-19 06:35:05 +00:00
const int LOW_THRESHOLD = 128;
const int MAX_THRESHOLD = 4096;
2024-06-19 06:41:40 +00:00
const uint16_t DELAY_TIME = 20000;
2024-06-19 06:35:05 +00:00
}
CameraEngine::CameraEngine() {}
CameraEngine::~CameraEngine() {}
2024-06-19 06:41:40 +00:00
APP_ERROR CameraEngine::Init() {
2024-06-19 06:35:05 +00:00
bUseEngine_ = true;
bHwDecode_ = MyYaml::GetIns()->GetBoolValue("gc_hardware_decode");
2024-06-19 06:41:40 +00:00
2024-06-19 06:35:05 +00:00
dataSourceConfig_ = MyYaml::GetIns()->GetDataSourceConfigById(engineId_); //获取摄像机参数
2024-06-19 06:41:40 +00:00
if (MyYaml::GetIns()->GetStringValue("gc_data_source") != "camera" || !dataSourceConfig_.bUse) {
2024-06-19 06:35:05 +00:00
bUseEngine_ = false;
LogWarn << "engineId_:" << engineId_ << " not use engine";
return APP_ERR_OK;
}
strPort0_ = engineName_ + "_" + std::to_string(engineId_) + "_0";
strPort1_ = engineName_ + "_" + std::to_string(engineId_) + "_1";
nDelayTime = MyYaml::GetIns()->GetIntValue("gc_load_delay");
LogInfo << "engineId_:" << engineId_ << " CameraEngine Init ok";
return APP_ERR_OK;
}
2024-06-19 06:41:40 +00:00
APP_ERROR CameraEngine::DeInit() {
if (!bUseEngine_) {
2024-06-19 06:35:05 +00:00
LogWarn << "engineId_:" << engineId_ << " not use engine";
return APP_ERR_OK;
}
ResetCamera();
LogInfo << "engineId_:" << engineId_ << " CameraEngine DeInit ok";
return APP_ERR_OK;
}
2024-06-19 06:41:40 +00:00
void CameraEngine::ResetCamera() {
if (pFormatCtx_ != nullptr) {
2024-06-19 06:35:05 +00:00
// clear th cache of the queue
avformat_close_input(&pFormatCtx_);
pFormatCtx_ = nullptr;
}
}
2024-06-19 06:41:40 +00:00
APP_ERROR CameraEngine::ConnectCamera() {
2024-06-19 06:35:05 +00:00
pFormatCtx_ = CreateFormatContext(); // create context
2024-06-19 06:41:40 +00:00
if (pFormatCtx_ == nullptr) {
2024-06-19 06:35:05 +00:00
LogError << "engineId_:" << engineId_ << " pFormatCtx_ null!";
return APP_ERR_COMM_FAILURE;
}
//0-代表输入
av_dump_format(pFormatCtx_, 0, dataSourceConfig_.strUrl.c_str(), 0);
// get stream infomation
int iRet = APP_ERR_OK;
iRet = GetStreamInfo();
2024-06-19 06:41:40 +00:00
if (iRet != APP_ERR_OK) {
2024-06-19 06:35:05 +00:00
LogError << "engineId_:" << engineId_ << " Stream Info Check failed, iRet = " << iRet;
return APP_ERR_COMM_FAILURE;
}
return APP_ERR_OK;
}
2024-06-19 06:41:40 +00:00
APP_ERROR CameraEngine::GetStreamInfo() {
if (pFormatCtx_ != nullptr) {
2024-06-19 06:35:05 +00:00
iVideoStream_ = -1;
iAudioStream_ = -1;
//frameInfo_.iFrameId = 0; //帧号从0开始
2024-06-19 06:41:40 +00:00
for (unsigned int i = 0; i < pFormatCtx_->nb_streams; i++) {
2024-06-19 06:35:05 +00:00
AVStream *inStream = pFormatCtx_->streams[i];
2024-06-19 06:41:40 +00:00
if (inStream->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
2024-06-19 06:35:05 +00:00
iVideoStream_ = i;
frameInfo_.iHeight = inStream->codecpar->height;
frameInfo_.iWidth = inStream->codecpar->width;
//获取帧率,帧率的打印都在流中的两个成员.且应取平均帧率为先,为{x,0}或者{0,1}则取实时帧率
2024-06-19 06:41:40 +00:00
if (inStream->avg_frame_rate.den == 0 ||
(inStream->avg_frame_rate.num == 0 && inStream->avg_frame_rate.den == 1)) {
2024-06-19 06:35:05 +00:00
frameInfo_.iRate = inStream->r_frame_rate.num / inStream->r_frame_rate.den;
2024-06-19 06:41:40 +00:00
} else {
2024-06-19 06:35:05 +00:00
frameInfo_.iRate = inStream->avg_frame_rate.num / inStream->avg_frame_rate.den;
}
2024-06-19 06:41:40 +00:00
LogDebug << "engineId_:" << engineId_ << " width:" << frameInfo_.iWidth << " height:"
<< frameInfo_.iHeight
2024-06-19 06:35:05 +00:00
<< " rate:" << frameInfo_.iRate << " iVideoStream_:" << iVideoStream_;
2024-06-19 06:41:40 +00:00
} else if (inStream->codecpar->codec_type == AVMEDIA_TYPE_AUDIO) {
2024-06-19 06:35:05 +00:00
iAudioStream_ = i;
LogDebug << "engineId_:" << engineId_ << " iAudioStream_:" << iAudioStream_;
}
}
2024-06-19 06:41:40 +00:00
if (iVideoStream_ == -1) {
2024-06-19 06:35:05 +00:00
LogError << "engineId_:" << engineId_ << " Didn't find a video stream!";
return APP_ERR_COMM_FAILURE;
}
if (frameInfo_.iHeight < LOW_THRESHOLD || frameInfo_.iWidth < LOW_THRESHOLD ||
2024-06-19 06:41:40 +00:00
frameInfo_.iHeight > MAX_THRESHOLD || frameInfo_.iWidth > MAX_THRESHOLD) {
2024-06-19 06:35:05 +00:00
LogError << "engineId_:" << engineId_ << " Size of frame is not supported in DVPP Video Decode!";
return APP_ERR_COMM_FAILURE;
}
AVCodecID codecId = pFormatCtx_->streams[iVideoStream_]->codecpar->codec_id;
2024-06-19 06:41:40 +00:00
if (codecId == AV_CODEC_ID_H264) {
2024-06-19 06:35:05 +00:00
int profile = pFormatCtx_->streams[iVideoStream_]->codecpar->profile;
2024-06-19 06:41:40 +00:00
if (profile == FF_PROFILE_H264_BASELINE) {
2024-06-19 06:35:05 +00:00
frameInfo_.format = H264_BASELINE_LEVEL;
2024-06-19 06:41:40 +00:00
} else if (profile == FF_PROFILE_H264_MAIN) {
2024-06-19 06:35:05 +00:00
frameInfo_.format = H264_MAIN_LEVEL;
2024-06-19 06:41:40 +00:00
} else if (profile == FF_PROFILE_H264_HIGH) {
2024-06-19 06:35:05 +00:00
frameInfo_.format = H264_HIGH_LEVEL;
2024-06-19 06:41:40 +00:00
} else {
2024-06-19 06:35:05 +00:00
LogError << "engineId_:" << engineId_ << " not support h264 profile";
return APP_ERR_COMM_FAILURE;
}
2024-06-19 06:41:40 +00:00
} else if (codecId == AV_CODEC_ID_H265) {
2024-06-19 06:35:05 +00:00
int profile = pFormatCtx_->streams[iVideoStream_]->codecpar->profile;
2024-06-19 06:41:40 +00:00
if (profile == FF_PROFILE_HEVC_MAIN) {
2024-06-19 06:35:05 +00:00
frameInfo_.format = H265_MAIN_LEVEL;
2024-06-19 06:41:40 +00:00
} else {
2024-06-19 06:35:05 +00:00
LogError << "engineId_:" << engineId_ << " not support h265 profile";
return APP_ERR_COMM_FAILURE;
}
2024-06-19 06:41:40 +00:00
} else {
2024-06-19 06:35:05 +00:00
LogError << "engineId_:" << engineId_ << " Error unsupported format" << codecId;
return APP_ERR_COMM_FAILURE;
}
}
return APP_ERR_OK;
}
2024-06-19 06:41:40 +00:00
AVFormatContext *CameraEngine::CreateFormatContext() {
2024-06-19 06:35:05 +00:00
// create message for stream pull
AVFormatContext *pFormatContext = nullptr;
AVDictionary *pOptions = nullptr;
// formatContext = avformat_alloc_context();
if (dataSourceConfig_.strUrl.find("rtsp:") != std::string::npos) // rtsp
{
av_dict_set(&pOptions, "rtsp_transport", "tcp", 0); // 指定其传输方式为TCP
2024-06-19 06:41:40 +00:00
av_dict_set(&pOptions, "stimeout", "3000000", 0); // 设置超时3秒
2024-06-19 06:35:05 +00:00
}
2024-06-19 06:41:40 +00:00
av_register_all(); //注册所有支持的格式(这里一定注册这些,否则会因为协议解析问题报错!!!)
avcodec_register_all(); //注册编解码器
avformat_network_init(); //注册网格格式,如果为本地文件则可以去掉该代码
2024-06-19 06:35:05 +00:00
int iRet = avformat_open_input(&pFormatContext, dataSourceConfig_.strUrl.c_str(), nullptr, &pOptions);
2024-06-19 06:41:40 +00:00
if (nullptr != pOptions) {
2024-06-19 06:35:05 +00:00
av_dict_free(&pOptions);
}
2024-06-19 06:41:40 +00:00
if (iRet != 0) {
LogError << "engineId_:" << engineId_ << " Couldn't open input stream " << dataSourceConfig_.strUrl.c_str()
<< ", iRet=" << iRet;
2024-06-19 06:35:05 +00:00
return nullptr;
}
// pFormatContext->flags |= AVFMT_FLAG_NONBLOCK;
// pFormatContext->pb->flags |= AVIO_FLAG_NONBLOCK;
// av_dict_set(&pFormatContext->interrupt_callback.callback, "timeout", "3000", 0);
// iRet = avio_open2(&pFormatContext->pb, dataSourceConfig_.strUrl.c_str(), AVIO_FLAG_READ, NULL, NULL) < 0;
// {
// // 处理错误
// LogError << "engineId_:" << engineId_ << "avio_open2 iRet=" << iRet;
// return nullptr;
// }
iRet = avformat_find_stream_info(pFormatContext, nullptr);
2024-06-19 06:41:40 +00:00
if (iRet != 0) {
2024-06-19 06:35:05 +00:00
LogError << "engineId_:" << engineId_ << " Couldn't find stream information, iRet = " << iRet;
return nullptr;
}
return pFormatContext;
}
//av_read_frame的中断回调函数
2024-06-19 06:41:40 +00:00
int CameraEngine::InterruptCallback(void *pData) {
TimeoutContext *pTimeOutCtx = (TimeoutContext *) pData;
LogDebug << "InterruptCallback i64Timeout:" << pTimeOutCtx->i64Timeout;
return std::chrono::duration_cast<std::chrono::milliseconds>(
std::chrono::system_clock::now().time_since_epoch())
.count() >= pTimeOutCtx->i64Timeout
? AVERROR_EXIT
: 0;
}
APP_ERROR CameraEngine::Process() {
int iRet = APP_ERR_OK;
if (!bUseEngine_) {
2024-06-19 06:35:05 +00:00
LogWarn << "engineId_:" << engineId_ << " not use engine";
return APP_ERR_OK;
}
2024-06-19 06:41:40 +00:00
if (bHwDecode_) {
2024-06-19 06:35:05 +00:00
iRet = ConnectCamera();
2024-06-19 06:41:40 +00:00
if (iRet == APP_ERR_OK) {
2024-06-19 06:35:05 +00:00
LogInfo << "engineId_:" << engineId_ << " Start the stream......";
bReconnectFlag_ = false;
2024-06-19 06:41:40 +00:00
} else {
2024-06-19 06:35:05 +00:00
ResetCamera();
bReconnectFlag_ = true;
}
// Pull data cyclically
AVPacket pkt;
2024-06-19 06:41:40 +00:00
while (!isStop_) {
2024-06-19 06:35:05 +00:00
//重连相机
2024-06-19 06:41:40 +00:00
if (bReconnectFlag_) {
2024-06-19 06:35:05 +00:00
iRet = ConnectCamera();
2024-06-19 06:41:40 +00:00
if (iRet == APP_ERR_OK) {
2024-06-19 06:35:05 +00:00
LogInfo << "engineId_:" << engineId_ << " Start the stream......";
bReconnectFlag_ = false;
2024-06-19 06:41:40 +00:00
} else {
// outputQueMap_[strPort1_]->push(std::static_pointer_cast<void>(std::make_shared<std::string>("摄像头连接失败!")));
2024-06-19 06:35:05 +00:00
ResetCamera();
std::this_thread::sleep_for(std::chrono::seconds(3)); //3秒后重连
continue;
}
}
//设置av_read_frame中断函数 (中断函数中超过1s则中断处理)
2024-06-19 06:41:40 +00:00
TimeoutContext timeoutCtx = {std::chrono::duration_cast<std::chrono::milliseconds>(
std::chrono::system_clock::now().time_since_epoch()).count() + 1000};
pFormatCtx_->interrupt_callback.callback = InterruptCallback;
pFormatCtx_->interrupt_callback.opaque = &timeoutCtx;
2024-06-19 06:35:05 +00:00
av_init_packet(&pkt); //init pkt
iRet = av_read_frame(pFormatCtx_, &pkt); //需要一直读取,否则获取到的是历史数据
2024-06-19 06:41:40 +00:00
//校验是否存取到结尾
if (iRet == AVERROR_EOF) {
LogInfo << "CameraEngine--av_read_frame--end";
// break;
}
if (iRet != 0) {
outputQueMap_[strPort1_]->push(
std::static_pointer_cast<void>(std::make_shared<std::string>("图像读取失败!")));
2024-06-19 06:35:05 +00:00
LogError << "engineId_:" << engineId_ << " Read frame failed, reconnect iRet:" << iRet;
av_packet_unref(&pkt);
//重连相机
ResetCamera();
bReconnectFlag_ = true;
continue;
2024-06-19 06:41:40 +00:00
} else if (pkt.stream_index == iVideoStream_) //只解码视频流
2024-06-19 06:35:05 +00:00
{
// LogDebug << "iRet:" << iRet << " pkt.size:" << pkt.size;
2024-06-19 06:41:40 +00:00
if (pkt.size <= 0) {
2024-06-19 06:35:05 +00:00
LogError << "engineId_:" << engineId_ << " Invalid pkt.size: " << pkt.size;
av_packet_unref(&pkt);
continue;
}
2024-06-19 06:41:40 +00:00
if (dataSourceConfig_.strUrl.find(".mp4") != std::string::npos) {
2024-06-19 06:35:05 +00:00
const char szStartCode[4] = {0, 0, 0, 1};
2024-06-19 06:41:40 +00:00
if (bIsAvc_ || memcmp(szStartCode, pkt.data, 4) != 0) { // is avc1 code, have no start code of H264
2024-06-19 06:35:05 +00:00
int iLen = 0;
uint8_t *p = pkt.data;
bIsAvc_ = true;
2024-06-19 06:41:40 +00:00
do { // add start_code for each NAL, one frame may have multi NALs.
iLen = ntohl(*((long *) p));
2024-06-19 06:35:05 +00:00
memcpy(p, szStartCode, 4);
p += 4;
p += iLen;
2024-06-19 06:41:40 +00:00
if (p >= pkt.data + pkt.size) {
2024-06-19 06:35:05 +00:00
break;
}
} while (1);
}
}
2024-06-19 06:41:40 +00:00
void *pH264Buffer = nullptr;
2024-06-19 06:35:05 +00:00
pH264Buffer = new uint8_t[pkt.size];
2024-06-19 06:41:40 +00:00
memcpy(pH264Buffer, pkt.data, pkt.size);
2024-06-19 06:35:05 +00:00
//组织数据
2024-06-19 06:41:40 +00:00
std::shared_ptr <ProcessData> pProcessData = std::make_shared<ProcessData>();
2024-06-19 06:35:05 +00:00
pProcessData->iWidth = frameInfo_.iWidth;
pProcessData->iHeight = frameInfo_.iHeight;
pProcessData->iRate = frameInfo_.iRate;
pProcessData->i64TimeStamp = MyUtils::getins()->GetCurrentTimeMillis();
pProcessData->iDataSource = engineId_;
pProcessData->iSize = pkt.size;
2024-06-19 06:41:40 +00:00
pProcessData->pData.reset(pH264Buffer, [](void *data) {
if (data) {
delete[] data;
data = nullptr;
}
}); //智能指针管理内存
2024-06-19 06:35:05 +00:00
//push端口0视频解码
2024-06-19 06:41:40 +00:00
iRet = outputQueMap_[strPort0_]->push(std::static_pointer_cast<void>(pProcessData), true);
if (iRet != APP_ERR_OK) {
2024-06-19 06:35:05 +00:00
LogError << "engineId_:" << engineId_ << "push the h264 frame data failed...";
}
2024-06-19 06:41:40 +00:00
} else if (pkt.stream_index == iAudioStream_) {
2024-06-19 06:35:05 +00:00
//音频流不处理。
2024-06-19 06:41:40 +00:00
} else {
2024-06-19 06:35:05 +00:00
LogError << "engineId_:" << engineId_ << " stream err stream_index:" << pkt.stream_index;
}
av_packet_unref(&pkt); //unref
if (dataSourceConfig_.strUrl.find("rtsp:") == std::string::npos) // 如果不是rtsp定时发送
{
usleep(DELAY_TIME); // delay 40ms
}
}
2024-06-19 06:41:40 +00:00
} else {
2024-06-19 06:35:05 +00:00
//从摄像头RTSP拉流
const std::string videoStreamAddress = std::string("rtspsrc location=") + dataSourceConfig_.strUrl.c_str() + " latency=10 ! \
rtph264depay ! h264parse ! nvv4l2decoder enable-max-performance=1 enable-frame-type-reporting=1 ! nvvidconv ! video/x-raw, format=(string)BGRx ! videoconvert ! appsink";
VideoCapture capture_video;
2024-06-19 06:41:40 +00:00
while (!capture_video.open(videoStreamAddress)) { //, cv::CAP_FFMPEG
LogInfo << "Restart Opening video stream or file ..." << std::endl;
2024-06-19 06:35:05 +00:00
sleep(1);
}
2024-06-19 06:41:40 +00:00
LogInfo << "Opening video stream or file Success:" << engineId_;
2024-06-19 06:35:05 +00:00
int frameW = capture_video.get(3);
int frameH = capture_video.get(4);
int frameRate = capture_video.get(5);
usleep(nDelayTime * 1000 * 1000);
int nFrameid = 0;
bool breadend = false;
cv::Mat frame(frameH, frameW, CV_8UC3);
2024-06-19 06:41:40 +00:00
while (!isStop_) {
std::shared_ptr <FrameData> pBGRFrameData = std::make_shared<FrameData>();
std::shared_ptr <ProcessData> pProcessData = std::make_shared<ProcessData>();
if (!capture_video.read(frame)) {
2024-06-19 06:35:05 +00:00
capture_video.release();
2024-06-19 06:41:40 +00:00
while (!capture_video.open(videoStreamAddress)) { //, cv::CAP_FFMPEG
LogInfo << "Restart Opening video stream or file ..." << std::endl;
2024-06-19 06:35:05 +00:00
sleep(1);
}
continue;
}
2024-06-19 06:41:40 +00:00
unsigned int resizepBGRBuffer_Size = IMAGE_WIDTH * IMAGE_HEIGHT * 3;
2024-06-19 06:35:05 +00:00
cv::Mat mtInImage, mtOutImage;
cv::resize(frame, mtInImage, cv::Size(IMAGE_WIDTH, IMAGE_HEIGHT));
cv::cvtColor(mtInImage, mtOutImage, cv::COLOR_BGR2RGB);
2024-06-19 06:41:40 +00:00
void *resizeBGRBufferobj = nullptr;
2024-06-19 06:35:05 +00:00
resizeBGRBufferobj = new uint8_t[resizepBGRBuffer_Size];
memcpy(resizeBGRBufferobj, mtOutImage.data, resizepBGRBuffer_Size);
2024-06-19 06:41:40 +00:00
2024-06-19 06:35:05 +00:00
pBGRFrameData->iDataSource = engineId_;
pBGRFrameData->iFrameId = nFrameid++;
pBGRFrameData->iSize = resizepBGRBuffer_Size;
pBGRFrameData->frameInfo.iWidth = IMAGE_WIDTH;
pBGRFrameData->frameInfo.iHeight = IMAGE_HEIGHT;
pBGRFrameData->frameInfo.iRate = frameRate;
pProcessData->pVoidData = std::static_pointer_cast<void>(pBGRFrameData);
2024-06-19 06:41:40 +00:00
pProcessData->pData.reset(resizeBGRBufferobj, [](void *data) {
if (data) {
delete[] data;
data = nullptr;
}
});
if (nFrameid >= 0xFFFFFFFF) { nFrameid = 0; }
2024-06-19 06:35:05 +00:00
pBGRFrameData->i64TimeStamp = MyUtils::getins()->GetCurrentTimeMillis();
pProcessData->iWidth = pBGRFrameData->frameInfo.iWidth;
pProcessData->iHeight = pBGRFrameData->frameInfo.iHeight;
pProcessData->iHeight = pBGRFrameData->frameInfo.iRate;
2024-06-19 06:41:40 +00:00
iRet = outputQueMap_[strPort0_]->push(std::static_pointer_cast<void>(pProcessData), true);
2024-06-19 06:35:05 +00:00
}
}
return APP_ERR_OK;
}
2024-06-19 06:41:40 +00:00