1、识别结果增加车厢开始结束时间
2、来车检测后,对来车状态改为存储在txt中,模型推理时读取来判定帧所对应的车厢状态
This commit is contained in:
parent
2e67e97508
commit
d72bd78b59
|
|
@ -516,6 +516,7 @@ typedef struct
|
||||||
uint64_t i64TimeStamp = 0; //帧数据时间戳
|
uint64_t i64TimeStamp = 0; //帧数据时间戳
|
||||||
std::shared_ptr<DecodedData> pDecodeData = nullptr;
|
std::shared_ptr<DecodedData> pDecodeData = nullptr;
|
||||||
int iDirection = 0; //行驶方向(0-未知; 1-向左; 2-向右)
|
int iDirection = 0; //行驶方向(0-未知; 1-向左; 2-向右)
|
||||||
|
int nMonitorState = MONITOR_MODEL_INIT_STATE;
|
||||||
} SaveImgData;
|
} SaveImgData;
|
||||||
|
|
||||||
//识别处理数据
|
//识别处理数据
|
||||||
|
|
|
||||||
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
|
@ -113,6 +113,8 @@ void MergerAllEngine::PushData(std::shared_ptr<Train> pTrain)
|
||||||
<< "集装箱2: " << pTrain->container2.strContainerNo << "\n"
|
<< "集装箱2: " << pTrain->container2.strContainerNo << "\n"
|
||||||
<< "集装箱2图片: " << pTrain->container2.strBestImg << "\n"
|
<< "集装箱2图片: " << pTrain->container2.strBestImg << "\n"
|
||||||
<< "集装箱2时间戳: " << pTrain->container2.i64TimeStamp << "\n"
|
<< "集装箱2时间戳: " << pTrain->container2.i64TimeStamp << "\n"
|
||||||
|
<< "车厢开始时间: " << MyUtils::getins()->Stamp2Time(pTrain->i64StartTimeStamp, true) << "\n"
|
||||||
|
<< "车厢结束时间: " << MyUtils::getins()->Stamp2Time(pTrain->i64EndTimeStamp, true) << "\n"
|
||||||
<< " ---所有信息合并结果 END--- ";
|
<< " ---所有信息合并结果 END--- ";
|
||||||
if (pTrain->bIsEnd)
|
if (pTrain->bIsEnd)
|
||||||
{
|
{
|
||||||
|
|
|
||||||
File diff suppressed because one or more lines are too long
|
|
@ -154,6 +154,7 @@ APP_ERROR SaveImgEngine::Process()
|
||||||
Json::Value jvFrameInfo;
|
Json::Value jvFrameInfo;
|
||||||
jvFrameInfo["timeStamp"] = pSaveImgData->i64TimeStamp;
|
jvFrameInfo["timeStamp"] = pSaveImgData->i64TimeStamp;
|
||||||
jvFrameInfo["status"] = iStatus;
|
jvFrameInfo["status"] = iStatus;
|
||||||
|
jvFrameInfo["moveType"] = pSaveImgData->nMonitorState;
|
||||||
jvFrameInfo["direction"] = pSaveImgData->iDirection;
|
jvFrameInfo["direction"] = pSaveImgData->iDirection;
|
||||||
jvFrameInfo["width"] = iWidth;
|
jvFrameInfo["width"] = iWidth;
|
||||||
jvFrameInfo["height"] = iHeight;
|
jvFrameInfo["height"] = iHeight;
|
||||||
|
|
|
||||||
|
|
@ -585,7 +585,7 @@ APP_ERROR SaveStepOneResultEngine::Process()
|
||||||
}
|
}
|
||||||
else if (postSubData.iTargetType == CONTAINER)
|
else if (postSubData.iTargetType == CONTAINER)
|
||||||
{
|
{
|
||||||
jvStep1Container.append(jvInfo);
|
jvStep1Container.append(jvInfo);
|
||||||
}
|
}
|
||||||
else if (postSubData.iTargetType == SPACE)
|
else if (postSubData.iTargetType == SPACE)
|
||||||
{
|
{
|
||||||
|
|
|
||||||
|
|
@ -1,4 +1,4 @@
|
||||||
#include "SocketEngine.h"
|
#include "SocketEngine.h"
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
File diff suppressed because one or more lines are too long
|
|
@ -4,50 +4,50 @@ using namespace std;
|
||||||
|
|
||||||
HardH264FFmpegDecode::HardH264FFmpegDecode()
|
HardH264FFmpegDecode::HardH264FFmpegDecode()
|
||||||
{
|
{
|
||||||
;
|
;
|
||||||
}
|
}
|
||||||
|
|
||||||
HardH264FFmpegDecode::~HardH264FFmpegDecode()
|
HardH264FFmpegDecode::~HardH264FFmpegDecode()
|
||||||
{
|
{
|
||||||
;
|
;
|
||||||
}
|
}
|
||||||
|
|
||||||
int HardH264FFmpegDecode::HardH264FFmpegDecoderInit(unsigned int uiWidth, unsigned int uiHeight, unsigned int uiFrameRate)
|
int HardH264FFmpegDecode::HardH264FFmpegDecoderInit(unsigned int uiWidth, unsigned int uiHeight, unsigned int uiFrameRate)
|
||||||
{
|
{
|
||||||
uiWidth_ = uiWidth; uiHeight_ = uiHeight;
|
uiWidth_ = uiWidth; uiHeight_ = uiHeight;
|
||||||
uiFrameRate_ = uiFrameRate;
|
uiFrameRate_ = uiFrameRate;
|
||||||
iFrameFinished_ = 0;
|
iFrameFinished_ = 0;
|
||||||
|
|
||||||
av_log_set_level(AV_LOG_ERROR);
|
av_log_set_level(AV_LOG_ERROR);
|
||||||
|
|
||||||
// AVCodecID codec_id = AV_CODEC_ID_H264; //解码H264
|
// AVCodecID codec_id = AV_CODEC_ID_H264; //解码H264
|
||||||
// pCodec_ = avcodec_find_decoder(codec_id); //获取解码器
|
// pCodec_ = avcodec_find_decoder(codec_id); //获取解码器
|
||||||
|
|
||||||
pCodec_ = avcodec_find_decoder_by_name(NVIDIA_H264_DECODER);
|
pCodec_ = avcodec_find_decoder_by_name(NVIDIA_H264_DECODER);
|
||||||
if (!pCodec_) {
|
if (!pCodec_) {
|
||||||
fprintf(stderr, "Codec '%s' not found\n", pCodec_->long_name);
|
fprintf(stderr, "Codec '%s' not found\n", pCodec_->long_name);
|
||||||
exit(1);
|
exit(1);
|
||||||
}
|
}
|
||||||
printf("Codec found with name %d(%s)\n", pCodec_->id, pCodec_->long_name);
|
printf("Codec found with name %d(%s)\n", pCodec_->id, pCodec_->long_name);
|
||||||
|
|
||||||
//创建上下文
|
//创建上下文
|
||||||
pCodecCtx_ = avcodec_alloc_context3(pCodec_);
|
pCodecCtx_ = avcodec_alloc_context3(pCodec_);
|
||||||
if (!pCodecCtx_){
|
if (!pCodecCtx_){
|
||||||
fprintf(stderr, "Could not allocate video codec context\n");
|
fprintf(stderr, "Could not allocate video codec context\n");
|
||||||
exit(1);
|
exit(1);
|
||||||
}
|
}
|
||||||
|
|
||||||
//创建解析器
|
//创建解析器
|
||||||
pCodecParserCtx_ = av_parser_init(pCodec_->id);
|
pCodecParserCtx_ = av_parser_init(pCodec_->id);
|
||||||
if (!pCodecParserCtx_){
|
if (!pCodecParserCtx_){
|
||||||
fprintf(stderr, "parser not found\n");
|
fprintf(stderr, "parser not found\n");
|
||||||
exit(1);
|
exit(1);
|
||||||
}
|
}
|
||||||
|
|
||||||
//if(pCodec_->capabilities&CODEC_CAP_TRUNCATED)
|
//if(pCodec_->capabilities&CODEC_CAP_TRUNCATED)
|
||||||
// pCodecCtx_->flags|= CODEC_FLAG_TRUNCATED;
|
// pCodecCtx_->flags|= CODEC_FLAG_TRUNCATED;
|
||||||
|
|
||||||
//打开解码器
|
//打开解码器
|
||||||
int ret = avcodec_open2(pCodecCtx_, pCodec_, nullptr);
|
int ret = avcodec_open2(pCodecCtx_, pCodec_, nullptr);
|
||||||
if (ret < 0) {
|
if (ret < 0) {
|
||||||
fprintf(stderr, "Could not open codec\n");
|
fprintf(stderr, "Could not open codec\n");
|
||||||
|
|
@ -63,7 +63,7 @@ int HardH264FFmpegDecode::HardH264FFmpegDecoderInit(unsigned int uiWidth, unsign
|
||||||
}
|
}
|
||||||
// av_init_packet(pPacket_);
|
// av_init_packet(pPacket_);
|
||||||
|
|
||||||
//分配frame
|
//分配frame
|
||||||
pSrcFrame_ = av_frame_alloc();
|
pSrcFrame_ = av_frame_alloc();
|
||||||
if (!pSrcFrame_) {
|
if (!pSrcFrame_) {
|
||||||
fprintf(stderr, "Could not allocate video src pFrame\n");
|
fprintf(stderr, "Could not allocate video src pFrame\n");
|
||||||
|
|
@ -78,14 +78,14 @@ int HardH264FFmpegDecode::HardH264FFmpegDecoderInit(unsigned int uiWidth, unsign
|
||||||
|
|
||||||
printf("after align down video_width: %d, video_height: %d\n", uiWidth_, uiHeight_);
|
printf("after align down video_width: %d, video_height: %d\n", uiWidth_, uiHeight_);
|
||||||
|
|
||||||
//初始化解析器参数
|
//初始化解析器参数
|
||||||
pCodecCtx_->time_base.num = 1;
|
pCodecCtx_->time_base.num = 1;
|
||||||
pCodecCtx_->frame_number = 1; //每包一个视频帧
|
pCodecCtx_->frame_number = 1; //每包一个视频帧
|
||||||
pCodecCtx_->codec_type = AVMEDIA_TYPE_VIDEO;
|
pCodecCtx_->codec_type = AVMEDIA_TYPE_VIDEO;
|
||||||
pCodecCtx_->bit_rate = 0;
|
pCodecCtx_->bit_rate = 0;
|
||||||
pCodecCtx_->time_base.den = uiFrameRate_;//帧率
|
pCodecCtx_->time_base.den = uiFrameRate_;//帧率
|
||||||
pCodecCtx_->width = uiWidth_; //视频宽
|
pCodecCtx_->width = uiWidth_; //视频宽
|
||||||
pCodecCtx_->height = uiHeight_; //视频高
|
pCodecCtx_->height = uiHeight_; //视频高
|
||||||
// pCodecCtx_->pix_fmt = AV_PIX_FMT_YUV420P;
|
// pCodecCtx_->pix_fmt = AV_PIX_FMT_YUV420P;
|
||||||
|
|
||||||
int bufferSize = av_image_get_buffer_size(AV_PIX_FMT_YUV420P,
|
int bufferSize = av_image_get_buffer_size(AV_PIX_FMT_YUV420P,
|
||||||
|
|
@ -102,46 +102,46 @@ int HardH264FFmpegDecode::HardH264FFmpegDecoderInit(unsigned int uiWidth, unsign
|
||||||
printf("pDstFrame_->linesize: %d, bufferSize: %d\n", pDstFrame_->linesize, bufferSize);
|
printf("pDstFrame_->linesize: %d, bufferSize: %d\n", pDstFrame_->linesize, bufferSize);
|
||||||
|
|
||||||
pSwsContext_ = sws_getContext(pCodecCtx_->width, pCodecCtx_->height, pCodecCtx_->pix_fmt,
|
pSwsContext_ = sws_getContext(pCodecCtx_->width, pCodecCtx_->height, pCodecCtx_->pix_fmt,
|
||||||
pCodecCtx_->width, pCodecCtx_->height, AV_PIX_FMT_YUV420P, SWS_BICUBIC, nullptr, nullptr, nullptr);
|
pCodecCtx_->width, pCodecCtx_->height, AV_PIX_FMT_YUV420P, SWS_BICUBIC, nullptr, nullptr, nullptr);
|
||||||
printf("pCodecCtx_->width: %d, pCodecCtx_->height: %d, pCodecCtx_->pix_fmt: %d\n", pCodecCtx_->width, pCodecCtx_->height, pCodecCtx_->pix_fmt);
|
printf("pCodecCtx_->width: %d, pCodecCtx_->height: %d, pCodecCtx_->pix_fmt: %d\n", pCodecCtx_->width, pCodecCtx_->height, pCodecCtx_->pix_fmt);
|
||||||
|
|
||||||
|
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
int HardH264FFmpegDecode::HardH264FFmpegDecoderDeInit()
|
int HardH264FFmpegDecode::HardH264FFmpegDecoderDeInit()
|
||||||
{
|
{
|
||||||
if(pu8OutBuffer_){
|
if(pu8OutBuffer_){
|
||||||
av_free(pu8OutBuffer_);
|
av_free(pu8OutBuffer_);
|
||||||
pu8OutBuffer_ = nullptr;
|
pu8OutBuffer_ = nullptr;
|
||||||
}
|
}
|
||||||
|
|
||||||
if(pSrcFrame_){
|
if(pSrcFrame_){
|
||||||
av_frame_free(&pSrcFrame_);
|
av_frame_free(&pSrcFrame_);
|
||||||
pSrcFrame_ = nullptr;
|
pSrcFrame_ = nullptr;
|
||||||
}
|
}
|
||||||
if(pDstFrame_){
|
if(pDstFrame_){
|
||||||
av_frame_free(&pDstFrame_);
|
av_frame_free(&pDstFrame_);
|
||||||
pDstFrame_ = nullptr;
|
pDstFrame_ = nullptr;
|
||||||
}
|
}
|
||||||
if(pPacket_){
|
if(pPacket_){
|
||||||
av_packet_free(&pPacket_);
|
av_packet_free(&pPacket_);
|
||||||
pPacket_ = nullptr;
|
pPacket_ = nullptr;
|
||||||
}
|
}
|
||||||
if(pCodecParserCtx_){
|
if(pCodecParserCtx_){
|
||||||
av_parser_close(pCodecParserCtx_);
|
av_parser_close(pCodecParserCtx_);
|
||||||
pCodecParserCtx_ = nullptr;
|
pCodecParserCtx_ = nullptr;
|
||||||
}
|
}
|
||||||
if(pCodecCtx_){
|
if(pCodecCtx_){
|
||||||
avcodec_close(pCodecCtx_);
|
avcodec_close(pCodecCtx_);
|
||||||
av_free(pCodecCtx_);
|
av_free(pCodecCtx_);
|
||||||
pCodecCtx_ = nullptr;
|
pCodecCtx_ = nullptr;
|
||||||
}
|
}
|
||||||
|
|
||||||
if(pSwsContext_){
|
if(pSwsContext_){
|
||||||
sws_freeContext(pSwsContext_);
|
sws_freeContext(pSwsContext_);
|
||||||
pSwsContext_ = nullptr;
|
pSwsContext_ = nullptr;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
int HardH264FFmpegDecode::HardH264FFmpegDecoderFilterGraph(AVFilterGraph *pGraph, AVFilterContext *pSourceCtx, AVFilterContext *pSinkCtx)
|
int HardH264FFmpegDecode::HardH264FFmpegDecoderFilterGraph(AVFilterGraph *pGraph, AVFilterContext *pSourceCtx, AVFilterContext *pSinkCtx)
|
||||||
|
|
@ -149,7 +149,7 @@ int HardH264FFmpegDecode::HardH264FFmpegDecoderFilterGraph(AVFilterGraph *pGraph
|
||||||
int ret;
|
int ret;
|
||||||
AVFilterInOut *pOutputs = nullptr, *pInputs = nullptr;
|
AVFilterInOut *pOutputs = nullptr, *pInputs = nullptr;
|
||||||
if ((ret = avfilter_link(pSourceCtx, 0, pSinkCtx, 0)) >= 0){
|
if ((ret = avfilter_link(pSourceCtx, 0, pSinkCtx, 0)) >= 0){
|
||||||
ret = avfilter_graph_config(pGraph, nullptr);
|
ret = avfilter_graph_config(pGraph, nullptr);
|
||||||
}
|
}
|
||||||
|
|
||||||
avfilter_inout_free(&pOutputs);
|
avfilter_inout_free(&pOutputs);
|
||||||
|
|
@ -168,14 +168,14 @@ int HardH264FFmpegDecode::HardH264FFmpegDecoderConfigureVideoFilters(AVFilterGra
|
||||||
"video_size=%dx%d:pix_fmt=%d:time_base=1/1200000",
|
"video_size=%dx%d:pix_fmt=%d:time_base=1/1200000",
|
||||||
iWidth, iHeight, iFormat);
|
iWidth, iHeight, iFormat);
|
||||||
if ((ret = avfilter_graph_create_filter(&pFiltSrc,
|
if ((ret = avfilter_graph_create_filter(&pFiltSrc,
|
||||||
avfilter_get_by_name("buffer"), "ffplay_buffer", BufferSrcArgs,
|
avfilter_get_by_name("buffer"), "ffplay_buffer", BufferSrcArgs,
|
||||||
nullptr, pGraph)) < 0){
|
nullptr, pGraph)) < 0){
|
||||||
goto fail;
|
goto fail;
|
||||||
}
|
}
|
||||||
|
|
||||||
ret = avfilter_graph_create_filter(&pFiltDst,
|
ret = avfilter_graph_create_filter(&pFiltDst,
|
||||||
avfilter_get_by_name("buffersink"),
|
avfilter_get_by_name("buffersink"),
|
||||||
"ffplay_buffersink", nullptr, nullptr, pGraph);
|
"ffplay_buffersink", nullptr, nullptr, pGraph);
|
||||||
if (ret < 0){
|
if (ret < 0){
|
||||||
goto fail;
|
goto fail;
|
||||||
}
|
}
|
||||||
|
|
@ -190,14 +190,14 @@ int HardH264FFmpegDecode::HardH264FFmpegDecoderConfigureVideoFilters(AVFilterGra
|
||||||
pDecoderFilterIn = pFiltSrc;
|
pDecoderFilterIn = pFiltSrc;
|
||||||
pDecoderFilterOut = pFiltDst;
|
pDecoderFilterOut = pFiltDst;
|
||||||
|
|
||||||
fail:
|
fail:
|
||||||
return ret;
|
return ret;
|
||||||
}
|
}
|
||||||
|
|
||||||
int HardH264FFmpegDecode::HardH264FFmpegDecoder(AVCodecContext *pDecCtx, AVFrame *pFrame, AVPacket *pPkt, void* pOutputData, unsigned int* puiOutputDataSize)
|
int HardH264FFmpegDecode::HardH264FFmpegDecoder(AVCodecContext *pDecCtx, AVFrame *pFrame, AVPacket *pPkt, void* pOutputData, unsigned int* puiOutputDataSize)
|
||||||
{
|
{
|
||||||
int ret;
|
int ret;
|
||||||
AVFilterGraph* pDecoderGraph = nullptr;
|
AVFilterGraph* pDecoderGraph = nullptr;
|
||||||
|
|
||||||
ret = avcodec_send_packet(pDecCtx, pPkt); //接收packet解码
|
ret = avcodec_send_packet(pDecCtx, pPkt); //接收packet解码
|
||||||
if (ret < 0) {
|
if (ret < 0) {
|
||||||
|
|
@ -208,7 +208,7 @@ int HardH264FFmpegDecode::HardH264FFmpegDecoder(AVCodecContext *pDecCtx, AVFrame
|
||||||
while (ret >= 0) {
|
while (ret >= 0) {
|
||||||
ret = avcodec_receive_frame(pDecCtx, pFrame); //解码
|
ret = avcodec_receive_frame(pDecCtx, pFrame); //解码
|
||||||
if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF){
|
if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF){
|
||||||
fprintf(stderr, "During decoding eof\n");
|
fprintf(stderr, "During decoding eof\n");
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
else if (ret < 0) {
|
else if (ret < 0) {
|
||||||
|
|
@ -219,7 +219,7 @@ int HardH264FFmpegDecode::HardH264FFmpegDecoder(AVCodecContext *pDecCtx, AVFrame
|
||||||
//printf("saving frame %3d\n", pDecCtx->frame_number);
|
//printf("saving frame %3d\n", pDecCtx->frame_number);
|
||||||
fflush(stdout);
|
fflush(stdout);
|
||||||
|
|
||||||
AVFilterContext *pDecoderFilterIn = nullptr, *pDecoderFilterOut = nullptr;
|
AVFilterContext *pDecoderFilterIn = nullptr, *pDecoderFilterOut = nullptr;
|
||||||
|
|
||||||
// pFrame->width = ALIGN_DOWN(pFrame->width, 32);
|
// pFrame->width = ALIGN_DOWN(pFrame->width, 32);
|
||||||
// pFrame->height = ALIGN_DOWN(pFrame->height, 32);
|
// pFrame->height = ALIGN_DOWN(pFrame->height, 32);
|
||||||
|
|
@ -228,26 +228,26 @@ int HardH264FFmpegDecode::HardH264FFmpegDecoder(AVCodecContext *pDecCtx, AVFrame
|
||||||
pDecoderGraph = avfilter_graph_alloc();
|
pDecoderGraph = avfilter_graph_alloc();
|
||||||
HardH264FFmpegDecoderConfigureVideoFilters(pDecoderGraph, pDecoderFilterIn, pDecoderFilterOut, pFrame->width, pFrame->height, pFrame->format);
|
HardH264FFmpegDecoderConfigureVideoFilters(pDecoderGraph, pDecoderFilterIn, pDecoderFilterOut, pFrame->width, pFrame->height, pFrame->format);
|
||||||
|
|
||||||
if (pFrame->format != AV_PIX_FMT_YUV420P){
|
if (pFrame->format != AV_PIX_FMT_YUV420P){
|
||||||
DUMP_FRAME(pFrame);
|
DUMP_FRAME(pFrame);
|
||||||
ret = av_buffersrc_add_frame(pDecoderFilterIn, pFrame);
|
ret = av_buffersrc_add_frame(pDecoderFilterIn, pFrame);
|
||||||
ret = av_buffersink_get_frame_flags(pDecoderFilterOut, pFrame, 0);
|
ret = av_buffersink_get_frame_flags(pDecoderFilterOut, pFrame, 0);
|
||||||
DUMP_FRAME(pFrame);
|
DUMP_FRAME(pFrame);
|
||||||
|
|
||||||
int iSize = pFrame->width * pFrame->height;
|
int iSize = pFrame->width * pFrame->height;
|
||||||
memcpy(pOutputData, pFrame->data[0], iSize); //Y
|
memcpy(pOutputData, pFrame->data[0], iSize); //Y
|
||||||
memcpy(pOutputData+iSize, pFrame->data[1], iSize/4); //U
|
memcpy(pOutputData+iSize, pFrame->data[1], iSize/4); //U
|
||||||
memcpy(pOutputData+iSize+iSize/4, pFrame->data[2], iSize/4); //V
|
memcpy(pOutputData+iSize+iSize/4, pFrame->data[2], iSize/4); //V
|
||||||
*puiOutputDataSize = iSize*3/2;
|
*puiOutputDataSize = iSize*3/2;
|
||||||
return iSize*3/2;
|
return iSize*3/2;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
int HardH264FFmpegDecode::HardH264FFmpegDecoderV2(AVCodecContext *pDecCtx, SwsContext *pSwsCtx, AVFrame *pSrcFrame, AVFrame *pDstFrame, AVPacket *pPkt, void* pOutputData,unsigned int* puiOutputDataSize)
|
int HardH264FFmpegDecode::HardH264FFmpegDecoderV2(AVCodecContext *pDecCtx, SwsContext *pSwsCtx, AVFrame *pSrcFrame, AVFrame *pDstFrame, AVPacket *pPkt, void* pOutputData,unsigned int* puiOutputDataSize)
|
||||||
{
|
{
|
||||||
int ret;
|
int ret;
|
||||||
|
|
||||||
ret = avcodec_send_packet(pDecCtx, pPkt); //接收packet解码
|
ret = avcodec_send_packet(pDecCtx, pPkt); //接收packet解码
|
||||||
if (ret < 0) {
|
if (ret < 0) {
|
||||||
|
|
@ -258,7 +258,7 @@ int HardH264FFmpegDecode::HardH264FFmpegDecoderV2(AVCodecContext *pDecCtx, SwsCo
|
||||||
while (ret >= 0) {
|
while (ret >= 0) {
|
||||||
ret = avcodec_receive_frame(pDecCtx, pSrcFrame); //解码
|
ret = avcodec_receive_frame(pDecCtx, pSrcFrame); //解码
|
||||||
if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF){
|
if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF){
|
||||||
fprintf(stderr, "During decoding eof\n");
|
fprintf(stderr, "During decoding eof\n");
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
else if (ret < 0) {
|
else if (ret < 0) {
|
||||||
|
|
@ -266,7 +266,7 @@ int HardH264FFmpegDecode::HardH264FFmpegDecoderV2(AVCodecContext *pDecCtx, SwsCo
|
||||||
exit(1);
|
exit(1);
|
||||||
}
|
}
|
||||||
|
|
||||||
// pDecCtx->width = ALIGN_DOWN(pDecCtx->width, 32);
|
// pDecCtx->width = ALIGN_DOWN(pDecCtx->width, 32);
|
||||||
// pDecCtx->height = ALIGN_DOWN(pDecCtx->height, 32);
|
// pDecCtx->height = ALIGN_DOWN(pDecCtx->height, 32);
|
||||||
|
|
||||||
sws_scale(pSwsCtx,
|
sws_scale(pSwsCtx,
|
||||||
|
|
@ -280,13 +280,13 @@ int HardH264FFmpegDecode::HardH264FFmpegDecoderV2(AVCodecContext *pDecCtx, SwsCo
|
||||||
//printf("saving frame %3d\n", pDecCtx->frame_number);
|
//printf("saving frame %3d\n", pDecCtx->frame_number);
|
||||||
fflush(stdout);
|
fflush(stdout);
|
||||||
|
|
||||||
int iSize = pDecCtx->width * pDecCtx->height;
|
int iSize = pDecCtx->width * pDecCtx->height;
|
||||||
|
|
||||||
memcpy(pOutputData, pDstFrame->data[0], iSize); //Y
|
memcpy(pOutputData, pDstFrame->data[0], iSize); //Y
|
||||||
memcpy(pOutputData+iSize, pDstFrame->data[1], iSize/4); //U
|
memcpy(pOutputData+iSize, pDstFrame->data[1], iSize/4); //U
|
||||||
memcpy(pOutputData+iSize+iSize/4, pDstFrame->data[2], iSize/4); //V
|
memcpy(pOutputData+iSize+iSize/4, pDstFrame->data[2], iSize/4); //V
|
||||||
*puiOutputDataSize = iSize*3/2;
|
*puiOutputDataSize = iSize*3/2;
|
||||||
return iSize*3/2;
|
return iSize*3/2;
|
||||||
}
|
}
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
@ -72,7 +72,7 @@ public:
|
||||||
|
|
||||||
const AVCodec *pCodec_ = nullptr; //解码器
|
const AVCodec *pCodec_ = nullptr; //解码器
|
||||||
AVCodecContext *pCodecCtx_ = nullptr; //上下文
|
AVCodecContext *pCodecCtx_ = nullptr; //上下文
|
||||||
AVCodecParserContext *pCodecParserCtx_ = nullptr; //解析器上下文
|
AVCodecParserContext *pCodecParserCtx_ = nullptr; //解析器上下文
|
||||||
AVFrame *pSrcFrame_ = nullptr;
|
AVFrame *pSrcFrame_ = nullptr;
|
||||||
AVFrame *pDstFrame_ = nullptr;
|
AVFrame *pDstFrame_ = nullptr;
|
||||||
AVPacket *pPacket_ = nullptr;
|
AVPacket *pPacket_ = nullptr;
|
||||||
|
|
|
||||||
File diff suppressed because one or more lines are too long
|
|
@ -211,6 +211,7 @@ void MoveEngine::SingleDeviceProcess(std::shared_ptr<ProcessData> pProcessData,
|
||||||
pSaveImgData->bIsEnd = pProcessData->bIsEnd;
|
pSaveImgData->bIsEnd = pProcessData->bIsEnd;
|
||||||
pSaveImgData->bSaveToFtp = true;
|
pSaveImgData->bSaveToFtp = true;
|
||||||
pSaveImgData->i64TimeStamp = pProcessData->i64TimeStamp;
|
pSaveImgData->i64TimeStamp = pProcessData->i64TimeStamp;
|
||||||
|
pSaveImgData->nMonitorState = nType;
|
||||||
outputQueMap_[strPort0_]->push(std::static_pointer_cast<void>(pSaveImgData));
|
outputQueMap_[strPort0_]->push(std::static_pointer_cast<void>(pSaveImgData));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue