1 line
16 KiB
C++
1 line
16 KiB
C++
#include "CameraEngine.h"
|
||
#include "myutils.h"
|
||
|
||
using namespace ai_matrix;
|
||
|
||
namespace
|
||
{
|
||
const int LOW_THRESHOLD = 128;
|
||
const int MAX_THRESHOLD = 4096;
|
||
const uint16_t DELAY_TIME = 10000;
|
||
}
|
||
|
||
CameraEngine::CameraEngine() {}
|
||
|
||
CameraEngine::~CameraEngine() {}
|
||
|
||
APP_ERROR CameraEngine::Init()
|
||
{
|
||
bUseEngine_ = true;
|
||
bHwDecode_ = MyYaml::GetIns()->GetBoolValue("gc_hardware_decode");
|
||
|
||
dataSourceConfig_ = MyYaml::GetIns()->GetDataSourceConfigById(engineId_); //获取摄像机参数
|
||
if (MyYaml::GetIns()->GetStringValue("gc_data_source") != "camera" || !dataSourceConfig_.bUse)
|
||
{
|
||
bUseEngine_ = false;
|
||
LogWarn << "engineId_:" << engineId_ << " not use engine";
|
||
return APP_ERR_OK;
|
||
}
|
||
|
||
strPort0_ = engineName_ + "_" + std::to_string(engineId_) + "_0";
|
||
strPort1_ = engineName_ + "_" + std::to_string(engineId_) + "_1";
|
||
nDelayTime = MyYaml::GetIns()->GetIntValue("gc_load_delay");
|
||
|
||
LogInfo << "engineId_:" << engineId_ << " CameraEngine Init ok";
|
||
return APP_ERR_OK;
|
||
}
|
||
|
||
APP_ERROR CameraEngine::DeInit()
|
||
{
|
||
if (!bUseEngine_)
|
||
{
|
||
LogWarn << "engineId_:" << engineId_ << " not use engine";
|
||
return APP_ERR_OK;
|
||
}
|
||
ResetCamera();
|
||
LogInfo << "engineId_:" << engineId_ << " CameraEngine DeInit ok";
|
||
return APP_ERR_OK;
|
||
}
|
||
|
||
void CameraEngine::ResetCamera()
|
||
{
|
||
if (pFormatCtx_ != nullptr)
|
||
{
|
||
// clear th cache of the queue
|
||
avformat_close_input(&pFormatCtx_);
|
||
pFormatCtx_ = nullptr;
|
||
}
|
||
}
|
||
|
||
APP_ERROR CameraEngine::ConnectCamera()
|
||
{
|
||
pFormatCtx_ = CreateFormatContext(); // create context
|
||
if (pFormatCtx_ == nullptr)
|
||
{
|
||
LogError << "engineId_:" << engineId_ << " pFormatCtx_ null!";
|
||
return APP_ERR_COMM_FAILURE;
|
||
}
|
||
|
||
//0-代表输入
|
||
av_dump_format(pFormatCtx_, 0, dataSourceConfig_.strUrl.c_str(), 0);
|
||
|
||
// get stream infomation
|
||
int iRet = APP_ERR_OK;
|
||
iRet = GetStreamInfo();
|
||
if (iRet != APP_ERR_OK)
|
||
{
|
||
LogError << "engineId_:" << engineId_ << " Stream Info Check failed, iRet = " << iRet;
|
||
return APP_ERR_COMM_FAILURE;
|
||
}
|
||
|
||
return APP_ERR_OK;
|
||
}
|
||
|
||
APP_ERROR CameraEngine::GetStreamInfo()
|
||
{
|
||
if (pFormatCtx_ != nullptr)
|
||
{
|
||
iVideoStream_ = -1;
|
||
iAudioStream_ = -1;
|
||
//frameInfo_.iFrameId = 0; //帧号从0开始
|
||
|
||
for (unsigned int i = 0; i < pFormatCtx_->nb_streams; i++)
|
||
{
|
||
AVStream *inStream = pFormatCtx_->streams[i];
|
||
if (inStream->codecpar->codec_type == AVMEDIA_TYPE_VIDEO)
|
||
{
|
||
iVideoStream_ = i;
|
||
frameInfo_.iHeight = inStream->codecpar->height;
|
||
frameInfo_.iWidth = inStream->codecpar->width;
|
||
|
||
//获取帧率,帧率的打印都在流中的两个成员.且应取平均帧率为先,为{x,0}或者{0,1}则取实时帧率
|
||
if (inStream->avg_frame_rate.den == 0 || (inStream->avg_frame_rate.num == 0 && inStream->avg_frame_rate.den == 1))
|
||
{
|
||
frameInfo_.iRate = inStream->r_frame_rate.num / inStream->r_frame_rate.den;
|
||
}
|
||
else
|
||
{
|
||
frameInfo_.iRate = inStream->avg_frame_rate.num / inStream->avg_frame_rate.den;
|
||
}
|
||
LogDebug << "engineId_:" << engineId_ << " width:" << frameInfo_.iWidth << " height:" << frameInfo_.iHeight
|
||
<< " rate:" << frameInfo_.iRate << " iVideoStream_:" << iVideoStream_;
|
||
}
|
||
else if (inStream->codecpar->codec_type == AVMEDIA_TYPE_AUDIO)
|
||
{
|
||
iAudioStream_ = i;
|
||
LogDebug << "engineId_:" << engineId_ << " iAudioStream_:" << iAudioStream_;
|
||
}
|
||
}
|
||
|
||
if (iVideoStream_ == -1)
|
||
{
|
||
LogError << "engineId_:" << engineId_ << " Didn't find a video stream!";
|
||
return APP_ERR_COMM_FAILURE;
|
||
}
|
||
|
||
if (frameInfo_.iHeight < LOW_THRESHOLD || frameInfo_.iWidth < LOW_THRESHOLD ||
|
||
frameInfo_.iHeight > MAX_THRESHOLD || frameInfo_.iWidth > MAX_THRESHOLD)
|
||
{
|
||
LogError << "engineId_:" << engineId_ << " Size of frame is not supported in DVPP Video Decode!";
|
||
return APP_ERR_COMM_FAILURE;
|
||
}
|
||
|
||
AVCodecID codecId = pFormatCtx_->streams[iVideoStream_]->codecpar->codec_id;
|
||
if (codecId == AV_CODEC_ID_H264)
|
||
{
|
||
int profile = pFormatCtx_->streams[iVideoStream_]->codecpar->profile;
|
||
if (profile == FF_PROFILE_H264_BASELINE)
|
||
{
|
||
frameInfo_.format = H264_BASELINE_LEVEL;
|
||
}
|
||
else if (profile == FF_PROFILE_H264_MAIN)
|
||
{
|
||
frameInfo_.format = H264_MAIN_LEVEL;
|
||
}
|
||
else if (profile == FF_PROFILE_H264_HIGH)
|
||
{
|
||
frameInfo_.format = H264_HIGH_LEVEL;
|
||
}
|
||
else
|
||
{
|
||
LogError << "engineId_:" << engineId_ << " not support h264 profile";
|
||
return APP_ERR_COMM_FAILURE;
|
||
}
|
||
}
|
||
else if (codecId == AV_CODEC_ID_H265)
|
||
{
|
||
int profile = pFormatCtx_->streams[iVideoStream_]->codecpar->profile;
|
||
if (profile == FF_PROFILE_HEVC_MAIN)
|
||
{
|
||
frameInfo_.format = H265_MAIN_LEVEL;
|
||
}
|
||
else
|
||
{
|
||
LogError << "engineId_:" << engineId_ << " not support h265 profile";
|
||
return APP_ERR_COMM_FAILURE;
|
||
}
|
||
}
|
||
else
|
||
{
|
||
LogError << "engineId_:" << engineId_ << " Error unsupported format" << codecId;
|
||
return APP_ERR_COMM_FAILURE;
|
||
}
|
||
}
|
||
return APP_ERR_OK;
|
||
}
|
||
|
||
AVFormatContext *CameraEngine::CreateFormatContext()
|
||
{
|
||
// create message for stream pull
|
||
AVFormatContext *pFormatContext = nullptr;
|
||
AVDictionary *pOptions = nullptr;
|
||
|
||
// formatContext = avformat_alloc_context();
|
||
if (dataSourceConfig_.strUrl.find("rtsp:") != std::string::npos) // rtsp
|
||
{
|
||
av_dict_set(&pOptions, "rtsp_transport", "tcp", 0); // 指定其传输方式为TCP
|
||
// av_dict_set(&pOptions, "stimeout", "3000000", 0); // 设置超时3秒
|
||
// av_dict_set(&pOptions, "rw_timeout", "3000", 0); //单位:ms
|
||
av_dict_set(&pOptions, "timeout", "3000000", 0); //设置超时时间为3秒
|
||
|
||
}
|
||
|
||
//av_register_all(); //注册所有支持的格式(这里一定注册这些,否则会因为协议解析问题报错!!!)
|
||
//avcodec_register_all(); //注册编解码器
|
||
//avformat_network_init(); //注册网格格式,如果为本地文件则可以去掉该代码
|
||
|
||
int iRet = avformat_open_input(&pFormatContext, dataSourceConfig_.strUrl.c_str(), nullptr, &pOptions);
|
||
if (nullptr != pOptions)
|
||
{
|
||
av_dict_free(&pOptions);
|
||
}
|
||
if (iRet != 0)
|
||
{
|
||
LogError << "engineId_:" << engineId_ << " Couldn't open input stream " << dataSourceConfig_.strUrl.c_str() << ", iRet=" << iRet;
|
||
return nullptr;
|
||
}
|
||
|
||
// pFormatContext->flags |= AVFMT_FLAG_NONBLOCK;
|
||
// pFormatContext->pb->flags |= AVIO_FLAG_NONBLOCK;
|
||
// av_dict_set(&pFormatContext->interrupt_callback.callback, "timeout", "3000", 0);
|
||
// iRet = avio_open2(&pFormatContext->pb, dataSourceConfig_.strUrl.c_str(), AVIO_FLAG_READ, NULL, NULL) < 0;
|
||
// {
|
||
// // 处理错误
|
||
// LogError << "engineId_:" << engineId_ << "avio_open2 iRet=" << iRet;
|
||
// return nullptr;
|
||
// }
|
||
|
||
iRet = avformat_find_stream_info(pFormatContext, nullptr);
|
||
if (iRet != 0)
|
||
{
|
||
LogError << "engineId_:" << engineId_ << " Couldn't find stream information, iRet = " << iRet;
|
||
return nullptr;
|
||
}
|
||
return pFormatContext;
|
||
}
|
||
|
||
//av_read_frame的中断回调函数
|
||
// int CameraEngine::InterruptCallback(void *pData)
|
||
// {
|
||
// TimeoutContext* pTimeOutCtx = (TimeoutContext*)pData;
|
||
// LogDebug << "InterruptCallback i64Timeout:" << pTimeOutCtx->i64Timeout;
|
||
// return std::chrono::duration_cast<std::chrono::milliseconds>(
|
||
// std::chrono::system_clock::now().time_since_epoch())
|
||
// .count() >= pTimeOutCtx->i64Timeout
|
||
// ? AVERROR_EXIT
|
||
// : 0;
|
||
// }
|
||
|
||
APP_ERROR CameraEngine::Process()
|
||
{
|
||
int iRet = APP_ERR_OK;
|
||
if (!bUseEngine_)
|
||
{
|
||
LogWarn << "engineId_:" << engineId_ << " not use engine";
|
||
return APP_ERR_OK;
|
||
}
|
||
|
||
if (bHwDecode_)
|
||
{
|
||
iRet = ConnectCamera();
|
||
if (iRet == APP_ERR_OK)
|
||
{
|
||
LogInfo << "engineId_:" << engineId_ << " Start the stream......";
|
||
bReconnectFlag_ = false;
|
||
}
|
||
else
|
||
{
|
||
ResetCamera();
|
||
bReconnectFlag_ = true;
|
||
}
|
||
|
||
// Pull data cyclically
|
||
AVPacket pkt;
|
||
|
||
while (!isStop_)
|
||
{
|
||
//重连相机
|
||
if (bReconnectFlag_)
|
||
{
|
||
iRet = ConnectCamera();
|
||
if (iRet == APP_ERR_OK)
|
||
{
|
||
LogInfo << "engineId_:" << engineId_ << " Start the stream......";
|
||
bReconnectFlag_ = false;
|
||
}
|
||
else
|
||
{
|
||
outputQueMap_[strPort1_]->push(std::static_pointer_cast<void>(std::make_shared<std::string>("摄像头连接失败!")));
|
||
ResetCamera();
|
||
std::this_thread::sleep_for(std::chrono::seconds(3)); //3秒后重连
|
||
continue;
|
||
}
|
||
}
|
||
|
||
//设置av_read_frame中断函数 (中断函数中超过1s,则中断处理)
|
||
// TimeoutContext timeoutCtx = { std::chrono::duration_cast<std::chrono::milliseconds>(std::chrono::system_clock::now().time_since_epoch()).count() + 1000 };
|
||
// pFormatCtx_->interrupt_callback.callback = &CameraEngine::InterruptCallback;
|
||
// pFormatCtx_->interrupt_callback.opaque = &timeoutCtx;
|
||
|
||
av_init_packet(&pkt); //init pkt
|
||
|
||
iRet = av_read_frame(pFormatCtx_, &pkt); //需要一直读取,否则获取到的是历史数据
|
||
if (iRet != 0)
|
||
{
|
||
outputQueMap_[strPort1_]->push(std::static_pointer_cast<void>(std::make_shared<std::string>("图像读取失败!")));
|
||
LogError << "engineId_:" << engineId_ << " Read frame failed, reconnect iRet:" << iRet;
|
||
av_packet_unref(&pkt);
|
||
|
||
//重连相机
|
||
ResetCamera();
|
||
|
||
bReconnectFlag_ = true;
|
||
continue;
|
||
}
|
||
else if (pkt.stream_index == iVideoStream_) //只解码视频流
|
||
{
|
||
// LogDebug << "iRet:" << iRet << " pkt.size:" << pkt.size;
|
||
if (pkt.size <= 0)
|
||
{
|
||
LogError << "engineId_:" << engineId_ << " Invalid pkt.size: " << pkt.size;
|
||
av_packet_unref(&pkt);
|
||
continue;
|
||
}
|
||
if (dataSourceConfig_.strUrl.find(".mp4") != std::string::npos)
|
||
{
|
||
const char szStartCode[4] = {0, 0, 0, 1};
|
||
if (bIsAvc_ || memcmp(szStartCode, pkt.data, 4) != 0)
|
||
{ // is avc1 code, have no start code of H264
|
||
int iLen = 0;
|
||
uint8_t *p = pkt.data;
|
||
bIsAvc_ = true;
|
||
do
|
||
{ // add start_code for each NAL, one frame may have multi NALs.
|
||
iLen = ntohl(*((long *)p));
|
||
memcpy(p, szStartCode, 4);
|
||
p += 4;
|
||
p += iLen;
|
||
if (p >= pkt.data + pkt.size)
|
||
{
|
||
break;
|
||
}
|
||
} while (1);
|
||
}
|
||
}
|
||
|
||
void* pH264Buffer = nullptr;
|
||
pH264Buffer = new uint8_t[pkt.size];
|
||
memcpy(pH264Buffer, pkt.data, pkt.size);
|
||
//组织数据
|
||
std::shared_ptr<ProcessData> pProcessData = std::make_shared<ProcessData>();
|
||
pProcessData->iWidth = frameInfo_.iWidth;
|
||
pProcessData->iHeight = frameInfo_.iHeight;
|
||
pProcessData->iRate = frameInfo_.iRate;
|
||
pProcessData->i64TimeStamp = MyUtils::getins()->GetCurrentTimeMillis();
|
||
pProcessData->iDataSource = engineId_;
|
||
pProcessData->iSize = pkt.size;
|
||
pProcessData->pData.reset(pH264Buffer, [](void* data){if(data) {delete[] data; data = nullptr;}}); //智能指针管理内存
|
||
|
||
//push端口0,视频解码
|
||
iRet = outputQueMap_[strPort0_]->push(std::static_pointer_cast<void>(pProcessData));
|
||
if (iRet != APP_ERR_OK)
|
||
{
|
||
LogError << "engineId_:" << engineId_ << "push the h264 frame data failed...";
|
||
}
|
||
}
|
||
else if (pkt.stream_index == iAudioStream_)
|
||
{
|
||
//音频流不处理。
|
||
}
|
||
else
|
||
{
|
||
LogError << "engineId_:" << engineId_ << " stream err stream_index:" << pkt.stream_index;
|
||
}
|
||
av_packet_unref(&pkt); //unref
|
||
|
||
if (dataSourceConfig_.strUrl.find("rtsp:") == std::string::npos) // 如果不是rtsp,定时发送
|
||
{
|
||
usleep(DELAY_TIME); // delay 40ms
|
||
}
|
||
}
|
||
}
|
||
else
|
||
{
|
||
//从摄像头RTSP拉流
|
||
const std::string videoStreamAddress = std::string("rtspsrc location=") + dataSourceConfig_.strUrl.c_str() + " latency=10 ! \
|
||
rtph264depay ! h264parse ! nvv4l2decoder enable-max-performance=1 enable-frame-type-reporting=1 ! nvvidconv ! video/x-raw, format=(string)BGRx ! videoconvert ! appsink";
|
||
|
||
VideoCapture capture_video;
|
||
while(!capture_video.open(videoStreamAddress)){ //, cv::CAP_FFMPEG
|
||
LogInfo<<"Restart Opening video stream or file ..."<<std::endl;
|
||
sleep(1);
|
||
}
|
||
LogInfo<<"Opening video stream or file Success:"<<engineId_;
|
||
|
||
int frameW = capture_video.get(3);
|
||
int frameH = capture_video.get(4);
|
||
int frameRate = capture_video.get(5);
|
||
usleep(nDelayTime * 1000 * 1000);
|
||
int nFrameid = 0;
|
||
bool breadend = false;
|
||
cv::Mat frame(frameH, frameW, CV_8UC3);
|
||
|
||
while (!isStop_)
|
||
{
|
||
std::shared_ptr<FrameData> pBGRFrameData = std::make_shared<FrameData>();
|
||
std::shared_ptr<ProcessData> pProcessData = std::make_shared<ProcessData>();
|
||
if(!capture_video.read(frame)) {
|
||
capture_video.release();
|
||
while(!capture_video.open(videoStreamAddress)){ //, cv::CAP_FFMPEG
|
||
LogInfo<<"Restart Opening video stream or file ..."<<std::endl;
|
||
sleep(1);
|
||
}
|
||
continue;
|
||
}
|
||
unsigned int resizepBGRBuffer_Size = IMAGE_WIDTH*IMAGE_HEIGHT*3;
|
||
|
||
cv::Mat mtInImage, mtOutImage;
|
||
cv::resize(frame, mtInImage, cv::Size(IMAGE_WIDTH, IMAGE_HEIGHT));
|
||
cv::cvtColor(mtInImage, mtOutImage, cv::COLOR_BGR2RGB);
|
||
|
||
void* resizeBGRBufferobj = nullptr;
|
||
resizeBGRBufferobj = new uint8_t[resizepBGRBuffer_Size];
|
||
memcpy(resizeBGRBufferobj, mtOutImage.data, resizepBGRBuffer_Size);
|
||
|
||
pBGRFrameData->iDataSource = engineId_;
|
||
pBGRFrameData->iFrameId = nFrameid++;
|
||
pBGRFrameData->iSize = resizepBGRBuffer_Size;
|
||
pBGRFrameData->frameInfo.iWidth = IMAGE_WIDTH;
|
||
pBGRFrameData->frameInfo.iHeight = IMAGE_HEIGHT;
|
||
pBGRFrameData->frameInfo.iRate = frameRate;
|
||
pProcessData->pVoidData = std::static_pointer_cast<void>(pBGRFrameData);
|
||
pProcessData->pData.reset(resizeBGRBufferobj, [](void* data){if(data) {delete[] data; data = nullptr;}});
|
||
if (nFrameid >= 0xFFFFFFFF) {nFrameid = 0;}
|
||
pBGRFrameData->i64TimeStamp = MyUtils::getins()->GetCurrentTimeMillis();
|
||
pProcessData->iWidth = pBGRFrameData->frameInfo.iWidth;
|
||
pProcessData->iHeight = pBGRFrameData->frameInfo.iHeight;
|
||
pProcessData->iHeight = pBGRFrameData->frameInfo.iRate;
|
||
iRet = outputQueMap_[strPort0_]->push(std::static_pointer_cast<void>(pProcessData));
|
||
}
|
||
|
||
}
|
||
|
||
return APP_ERR_OK;
|
||
}
|