From e038e2a9ffa65da2ccc1332bbf08013ba81a9fec Mon Sep 17 00:00:00 2001 From: Fancy code <258828110.@qq.com> Date: Wed, 10 Jul 2024 22:40:51 +0800 Subject: [PATCH] Backup:add time stamp param. --- middleware/MediaManager/src/MediaHandle.cpp | 76 +++++++++++++-------- middleware/MediaManager/src/MediaHandle.h | 3 + middleware/MediaManager/src/RecordMp4.cpp | 7 +- utils/MediaBase/include/MediaBase.h | 8 ++- utils/MediaBase/src/FfmpegEncoder.cpp | 2 +- utils/MediaBase/src/FfmpegMuxStream.cpp | 16 ++--- utils/MediaBase/src/FfmpegMuxStream.h | 2 +- utils/MediaBase/src/FfmpegMuxStreamV2.cpp | 8 +-- utils/MediaBase/src/FfmpegMuxStreamV2.h | 2 +- utils/MediaBase/src/FfmpegOutputStream.cpp | 29 ++++---- utils/MediaBase/src/FfmpegOutputStream.h | 2 +- utils/MediaBase/src/IMediaBase.cpp | 2 +- utils/MediaBase/src/IMediaBase.h | 2 +- utils/MediaBase/src/MediaBase.cpp | 4 +- 14 files changed, 98 insertions(+), 65 deletions(-) diff --git a/middleware/MediaManager/src/MediaHandle.cpp b/middleware/MediaManager/src/MediaHandle.cpp index 8da4a6a..8fcaa2f 100644 --- a/middleware/MediaManager/src/MediaHandle.cpp +++ b/middleware/MediaManager/src/MediaHandle.cpp @@ -26,14 +26,15 @@ #include #include #include -one_frame_stream::one_frame_stream() : mType(FrameType::END), mData(nullptr), mLength(0) +constexpr int MEDIA_TASK_NOT_START = 0; +one_frame_stream::one_frame_stream() : mType(FrameType::END), mData(nullptr), mLength(0), mTimeStamp(0) { } one_frame_stream::~one_frame_stream() { } MediaHandle::MediaHandle(const MediaChannel &mediaChannel, const std::shared_ptr &cameraHal) - : mMediaChannel(mediaChannel), mCameraHal(cameraHal), mTaskRuning(false) + : mMediaChannel(mediaChannel), mCameraHal(cameraHal), mTaskRuning(false), mFirstFrameTimeStamp(MEDIA_TASK_NOT_START) { } void MediaHandle::Init(void) @@ -121,11 +122,11 @@ void MediaHandle::StartTaskTimer(void) } void MediaHandle::TaskTimer(void) { - constexpr int TASK_TIMER = 1000 * 10; + constexpr int TASK_TIME_OUT = 1000 * 20; mTaskRuning = true; while (mTaskRuning) { std::unique_lock lock(mMutex); - mCvTaskHandle.wait_for(lock, std::chrono::milliseconds(TASK_TIMER), [&] { + mCvTaskHandle.wait_for(lock, std::chrono::milliseconds(TASK_TIME_OUT), [&] { return !mTaskRuning; }); /** @@ -142,6 +143,7 @@ void MediaHandle::TaskTimer(void) } mStreamHandle.reset(); mMutex.lock(); + mFirstFrameTimeStamp = MEDIA_TASK_NOT_START; auto runingTask = mCurrentTask.lock(); if (mCurrentTask.expired()) { LogWarning("mCurrentTask is expired.\n"); @@ -180,19 +182,19 @@ void MediaHandle::HandleListFrame(void) int leftFrameCount = -1; do { OneFrameStream &frontFrame = mFrameList.front(); - OneFrameStream handleIt; - handleIt.mData = frontFrame.mData; - handleIt.mLength = frontFrame.mLength; - handleIt.mType = frontFrame.mType; + // OneFrameStream handleIt; + // handleIt.mData = frontFrame.mData; + // handleIt.mLength = frontFrame.mLength; + // handleIt.mType = frontFrame.mType; + if (FrameType::VIDEO == frontFrame.mType) { + mStreamHandle->GetVideoStream(frontFrame.mData, frontFrame.mLength, frontFrame.mTimeStamp); + } + if (FrameType::AUDIO == frontFrame.mType) { + mStreamHandle->GetAudioStream(frontFrame.mData, frontFrame.mLength, frontFrame.mTimeStamp); + } + free(frontFrame.mData); + frontFrame.mData = nullptr; mFrameList.pop_front(); - if (FrameType::VIDEO == handleIt.mType) { - mStreamHandle->GetVideoStream(handleIt.mData, handleIt.mLength, 0); - } - if (FrameType::AUDIO == handleIt.mType) { - mStreamHandle->GetAudioStream(handleIt.mData, handleIt.mLength, 0); - } - free(handleIt.mData); - handleIt.mData = nullptr; leftFrameCount = mFrameList.size(); } while (leftFrameCount > 0); } @@ -202,25 +204,45 @@ CameraTaskType MediaHandle::TaskTypeConvert(const MediaTaskType &type) } void MediaHandle::GetVideoStreamCallback(const void *stream, const int &length, const unsigned long long &timeStamp) { - std::unique_lock lock(mMutex); - // mStreamHandle->GetVideoStream(stream, length, timeStamp); - OneFrameStream addFrame; - addFrame.mData = malloc(length); - addFrame.mLength = length; - memcpy(addFrame.mData, stream, length); - addFrame.mType = FrameType::VIDEO; - mFrameList.push_back(addFrame); - mCvFrameHandle.notify_one(); + GetAVStream(FrameType::VIDEO, stream, length, timeStamp); + // return; + // std::unique_lock lock(mMutex); + // // mStreamHandle->GetVideoStream(stream, length, timeStamp); + // OneFrameStream addFrame; + // addFrame.mData = malloc(length); + // addFrame.mLength = length; + // memcpy(addFrame.mData, stream, length); + // addFrame.mType = FrameType::VIDEO; + // mFrameList.push_back(addFrame); + // mCvFrameHandle.notify_one(); } void MediaHandle::GetAudioStreamCallback(const void *stream, const int &length, const unsigned long long &timeStamp) +{ + GetAVStream(FrameType::AUDIO, stream, length, timeStamp); + // return; + // std::unique_lock lock(mMutex); + // // mStreamHandle->GetAudioStream(stream, length, timeStamp); + // OneFrameStream addFrame; + // addFrame.mData = malloc(length); + // addFrame.mLength = length; + // memcpy(addFrame.mData, stream, length); + // addFrame.mType = FrameType::AUDIO; + // mFrameList.push_back(addFrame); + // mCvFrameHandle.notify_one(); +} +void MediaHandle::GetAVStream(const FrameType &type, const void *stream, const int &length, + const unsigned long long &timeStamp) { std::unique_lock lock(mMutex); - // mStreamHandle->GetAudioStream(stream, length, timeStamp); + if (MEDIA_TASK_NOT_START == mFirstFrameTimeStamp) { + mFirstFrameTimeStamp = timeStamp; + } OneFrameStream addFrame; addFrame.mData = malloc(length); addFrame.mLength = length; memcpy(addFrame.mData, stream, length); - addFrame.mType = FrameType::AUDIO; + addFrame.mType = type; + addFrame.mTimeStamp = timeStamp - mFirstFrameTimeStamp; mFrameList.push_back(addFrame); mCvFrameHandle.notify_one(); } \ No newline at end of file diff --git a/middleware/MediaManager/src/MediaHandle.h b/middleware/MediaManager/src/MediaHandle.h index 04ec1fb..99e9eea 100644 --- a/middleware/MediaManager/src/MediaHandle.h +++ b/middleware/MediaManager/src/MediaHandle.h @@ -37,6 +37,7 @@ typedef struct one_frame_stream FrameType mType; void *mData; int mLength; + unsigned long long mTimeStamp; } OneFrameStream; class MediaHandle : public VMediaHandle, public std::enable_shared_from_this { @@ -60,6 +61,7 @@ private: CameraTaskType TaskTypeConvert(const MediaTaskType &type); void GetVideoStreamCallback(const void *stream, const int &length, const unsigned long long &timeStamp); void GetAudioStreamCallback(const void *stream, const int &length, const unsigned long long &timeStamp); + void GetAVStream(const FrameType &type, const void *stream, const int &length, const unsigned long long &timeStamp); private: std::mutex mMutex; @@ -73,5 +75,6 @@ private: std::thread mTaskTimerThread; std::thread mFrameHandleThread; std::list mFrameList; + unsigned long long mFirstFrameTimeStamp; }; #endif \ No newline at end of file diff --git a/middleware/MediaManager/src/RecordMp4.cpp b/middleware/MediaManager/src/RecordMp4.cpp index 625ac04..346ebb8 100644 --- a/middleware/MediaManager/src/RecordMp4.cpp +++ b/middleware/MediaManager/src/RecordMp4.cpp @@ -36,7 +36,8 @@ StatusCode RecordMp4::Init(void) return CreateStatusCode(STATUS_CODE_NOT_OK); } std::string videoPath = mRecordTask->GetTargetNameForSaving(); - StatusCode code = IOpenOutputFile(mRecordMp4Object, videoPath.c_str()); + OutputFileInfo fileInfo = {.mFileName = videoPath.c_str(), .mDuration_ms = 5000}; + StatusCode code = IOpenOutputFile(mRecordMp4Object, &fileInfo); if (!IsCodeOK(code)) { LogError("OpenOutputFile failed.\n"); ICloseOutputFile(mRecordMp4Object); @@ -66,7 +67,7 @@ void RecordMp4::GetVideoStream(const void *stream, const unsigned int &length, c { std::lock_guard locker(mMutex); if (mRecordMp4Object) { - StreamInfo info = {.mType = STREAM_TYPE_VIDEO_H264}; + StreamInfo info = {.mType = STREAM_TYPE_VIDEO_H264, .mTimeStamp = timeStamp}; IGetStreamData(mRecordMp4Object, stream, length, info); } } @@ -74,7 +75,7 @@ void RecordMp4::GetAudioStream(const void *stream, const unsigned int &length, c { std::lock_guard locker(mMutex); if (mRecordMp4Object) { - StreamInfo info = {.mType = STREAM_TYPE_AUDIO_G711A}; + StreamInfo info = {.mType = STREAM_TYPE_AUDIO_G711A, .mTimeStamp = timeStamp}; IGetStreamData(mRecordMp4Object, stream, length, info); } } diff --git a/utils/MediaBase/include/MediaBase.h b/utils/MediaBase/include/MediaBase.h index 9942307..b75481e 100644 --- a/utils/MediaBase/include/MediaBase.h +++ b/utils/MediaBase/include/MediaBase.h @@ -35,7 +35,13 @@ enum StreamType typedef struct stream_info { const StreamType mType; + const unsigned long long mTimeStamp; } StreamInfo; +typedef struct output_file_info +{ + const char *mFileName; + const unsigned int mDuration_ms; +} OutputFileInfo; typedef void (*ReadVideoFileCallback)(const void *, const unsigned int, void *); typedef void (*ReadAudioFileCallback)(const void *, const unsigned int, void *); void *ICreateMediaBase(const MediaHandleType type); @@ -47,7 +53,7 @@ StatusCode ISetReadAudioCallback(void *object, ReadVideoFileCallback callback, v StatusCode IStartReadFile(void *object, const char *path); StatusCode IStopReadFile(void *object); -StatusCode IOpenOutputFile(void *object, const char *fileName); +StatusCode IOpenOutputFile(void *object, const OutputFileInfo *info); StatusCode ICloseOutputFile(void *object); void IGetStreamData(void *object, const void *data, const size_t size, const StreamInfo streamInfo); diff --git a/utils/MediaBase/src/FfmpegEncoder.cpp b/utils/MediaBase/src/FfmpegEncoder.cpp index 0ff0e91..0b2b1b7 100644 --- a/utils/MediaBase/src/FfmpegEncoder.cpp +++ b/utils/MediaBase/src/FfmpegEncoder.cpp @@ -41,7 +41,7 @@ extern "C" { #include constexpr long SOURCE_AUDIO_SAMPEL_RATE = 8000; #define STREAM_DURATION 10.0 -#define STREAM_FRAME_RATE 1200000 /* 25 images/s */ +#define STREAM_FRAME_RATE 25 /* 25 images/s */ #define STREAM_PIX_FMT AV_PIX_FMT_YUV420P /* default pix_fmt */ FfmpegEncoder::FfmpegEncoder(const enum AVCodecID &codecId) : mCodecId(codecId), mCodecCtx(nullptr), mCodec(nullptr), mFrame(nullptr), mTmpFrame(nullptr), mTmpPkt(nullptr), diff --git a/utils/MediaBase/src/FfmpegMuxStream.cpp b/utils/MediaBase/src/FfmpegMuxStream.cpp index 3d10f94..69dd46c 100644 --- a/utils/MediaBase/src/FfmpegMuxStream.cpp +++ b/utils/MediaBase/src/FfmpegMuxStream.cpp @@ -59,7 +59,7 @@ FfmpegMuxStream::FfmpegMuxStream() memset(&mVideoSt, 0, sizeof(mVideoSt)); memset(&mAudioSt, 0, sizeof(mAudioSt)); } -StatusCode FfmpegMuxStream::OpenOutputFile(const std::string &fileName) +StatusCode FfmpegMuxStream::OpenOutputFile(const OutputFileInfo &fileInfo) { if (!InitCodecVideo(AV_CODEC_ID_H264, &mCodecVideo, &mCodecVideoContext, &mFrameVideo)) { LogError("InitCodec failed\n"); @@ -72,10 +72,10 @@ StatusCode FfmpegMuxStream::OpenOutputFile(const std::string &fileName) int ret; AVFormatContext *oc = nullptr; int have_video = 0, have_audio = 0; - int encode_video = 0, encode_audio = 0; + // int encode_video = 0, encode_audio = 0; const AVCodec *audio_codec, *video_codec; AVDictionary *opt = nullptr; - avformat_alloc_output_context2(&oc, nullptr, "mp4", fileName.c_str()); + avformat_alloc_output_context2(&oc, nullptr, "mp4", fileInfo.mFileName); if (!oc) { LogError("Could not deduce output format from file extension: using MPEG.\n"); return CreateStatusCode(STATUS_CODE_NOT_OK); @@ -88,13 +88,13 @@ StatusCode FfmpegMuxStream::OpenOutputFile(const std::string &fileName) LogInfo("Add video stream\n"); add_stream(&mVideoSt, oc, &video_codec, fmt->video_codec); have_video = 1; - encode_video = 1; + // encode_video = 1; } if (fmt->audio_codec != AV_CODEC_ID_NONE) { LogInfo("Add audio stream\n"); add_stream(&mAudioSt, oc, &audio_codec, fmt->audio_codec); have_audio = 1; - encode_audio = 1; + // encode_audio = 1; } /* Now that all the parameters are set, we can open the audio and * video codecs and allocate the necessary encode buffers. */ if (have_video) { @@ -104,13 +104,13 @@ StatusCode FfmpegMuxStream::OpenOutputFile(const std::string &fileName) if (have_audio) { open_audio(oc, audio_codec, &mAudioSt, opt); } - av_dump_format(oc, 0, fileName.c_str(), 1); + av_dump_format(oc, 0, fileInfo.mFileName, 1); if (!(fmt->flags & AVFMT_NOFILE)) { - ret = avio_open(&oc->pb, fileName.c_str(), AVIO_FLAG_WRITE); + ret = avio_open(&oc->pb, fileInfo.mFileName, AVIO_FLAG_WRITE); if (ret < 0) { char error_str[AV_ERROR_MAX_STRING_SIZE] = {0}; LogError("Could not open '%s': %s\n", - fileName.c_str(), + fileInfo.mFileName, av_make_error_string(error_str, AV_ERROR_MAX_STRING_SIZE, ret)); return CreateStatusCode(STATUS_CODE_NOT_OK); } diff --git a/utils/MediaBase/src/FfmpegMuxStream.h b/utils/MediaBase/src/FfmpegMuxStream.h index 308cabc..2c1623b 100644 --- a/utils/MediaBase/src/FfmpegMuxStream.h +++ b/utils/MediaBase/src/FfmpegMuxStream.h @@ -22,7 +22,7 @@ public: virtual ~FfmpegMuxStream() = default; public: // About combine file. - StatusCode OpenOutputFile(const std::string &fileName) override; + StatusCode OpenOutputFile(const OutputFileInfo &fileInfo) override; StatusCode CloseOutputFile(void) override; void GetStreamData(const void *data, const size_t &size, const StreamInfo &streamInfo) override; diff --git a/utils/MediaBase/src/FfmpegMuxStreamV2.cpp b/utils/MediaBase/src/FfmpegMuxStreamV2.cpp index bd9666a..4a09622 100644 --- a/utils/MediaBase/src/FfmpegMuxStreamV2.cpp +++ b/utils/MediaBase/src/FfmpegMuxStreamV2.cpp @@ -38,9 +38,9 @@ extern "C" { FfmpegMuxStreamV2::FfmpegMuxStreamV2() : mOutputFormat(nullptr), mOptions(nullptr), mFilesMuxing(false) { } -StatusCode FfmpegMuxStreamV2::OpenOutputFile(const std::string &fileName) +StatusCode FfmpegMuxStreamV2::OpenOutputFile(const OutputFileInfo &fileInfo) { - return OpenMuxOutputFile(fileName); + return OpenMuxOutputFile(fileInfo.mFileName); } StatusCode FfmpegMuxStreamV2::CloseOutputFile(void) { @@ -90,10 +90,10 @@ void FfmpegMuxStreamV2::GetStreamData(const void *data, const size_t &size, cons } } if (streamInfo.mType == STREAM_TYPE_VIDEO_H264 && mVideoStream) { - mVideoStream->WriteSourceData(data, size); + mVideoStream->WriteSourceData(data, size, streamInfo.mTimeStamp); } if (streamInfo.mType == STREAM_TYPE_AUDIO_G711A && mAudioStream) { - mAudioStream->WriteSourceData(data, size); + mAudioStream->WriteSourceData(data, size, streamInfo.mTimeStamp); } } StatusCode inline FfmpegMuxStreamV2::OpenMuxOutputFile(const std::string &fileName) diff --git a/utils/MediaBase/src/FfmpegMuxStreamV2.h b/utils/MediaBase/src/FfmpegMuxStreamV2.h index 2cf715d..1050176 100644 --- a/utils/MediaBase/src/FfmpegMuxStreamV2.h +++ b/utils/MediaBase/src/FfmpegMuxStreamV2.h @@ -45,7 +45,7 @@ public: virtual ~FfmpegMuxStreamV2() = default; public: // About combine file. - StatusCode OpenOutputFile(const std::string &fileName) override; + StatusCode OpenOutputFile(const OutputFileInfo &fileInfo) override; StatusCode CloseOutputFile(void) override; void GetStreamData(const void *data, const size_t &size, const StreamInfo &streamInfo) override; diff --git a/utils/MediaBase/src/FfmpegOutputStream.cpp b/utils/MediaBase/src/FfmpegOutputStream.cpp index a1103ab..27d0d43 100644 --- a/utils/MediaBase/src/FfmpegOutputStream.cpp +++ b/utils/MediaBase/src/FfmpegOutputStream.cpp @@ -89,31 +89,32 @@ void FfmpegOutputStream::UnInit(void) } av_packet_free(&mTmpPkt); } -void FfmpegOutputStream::WriteSourceData(const void *data, const size_t &size) +void FfmpegOutputStream::WriteSourceData(const void *data, const size_t &size, const unsigned long long &pts) { if (mDecoder) { mDecoder->DecodeData(data, size, mDecodeCallback); return; } - AVPacket *mTmpPkt = av_packet_alloc(); + AVPacket *tmpPkt = av_packet_alloc(); static unsigned long long u64Interval = 0; - AVRational in_timebase = (AVRational){1, 15}; + // AVRational in_timebase = (AVRational){1, 15}; if (mEncodeCallback) { - mTmpPkt->data = (uint8_t *)data; - mTmpPkt->size = size; - mTmpPkt->stream_index = mStream->index; - mTmpPkt->duration = 0; - // mTmpPkt->pts = u64Interval * 1000; // ת���� us - mTmpPkt->pts = av_rescale_q(u64Interval, (AVRational){1, 15}, mStream->time_base); + tmpPkt->data = (uint8_t *)data; + tmpPkt->size = size; + tmpPkt->stream_index = mStream->index; + constexpr int64_t ZERO_MEANS_UNKNOWN = 0; + tmpPkt->duration = ZERO_MEANS_UNKNOWN; + // tmpPkt->pts = u64Interval * 1000; // ת���� us + tmpPkt->pts = av_rescale_q(u64Interval, (AVRational){1, 15}, mStream->time_base); u64Interval++; - mTmpPkt->dts = mTmpPkt->pts; + tmpPkt->dts = tmpPkt->pts; /* copy packet */ // av_packet_rescale_ts(mTmpPkt, in_timebase, mStream->time_base); - mTmpPkt->pos = -1; - mEncodeCallback(mTmpPkt); + tmpPkt->pos = -1; + mEncodeCallback(tmpPkt); } - av_packet_unref(mTmpPkt); - av_packet_free(&mTmpPkt); + av_packet_unref(tmpPkt); + av_packet_free(&tmpPkt); } void FfmpegOutputStream::SetWriteSourceDataCallback(std::function callback) { diff --git a/utils/MediaBase/src/FfmpegOutputStream.h b/utils/MediaBase/src/FfmpegOutputStream.h index 58fa81e..d06e1c2 100644 --- a/utils/MediaBase/src/FfmpegOutputStream.h +++ b/utils/MediaBase/src/FfmpegOutputStream.h @@ -43,7 +43,7 @@ public: virtual ~FfmpegOutputStream() = default; bool Init(AVFormatContext *outputFormat); void UnInit(void); - void WriteSourceData(const void *data, const size_t &size); + void WriteSourceData(const void *data, const size_t &size, const unsigned long long &pts); void SetWriteSourceDataCallback(std::function callback); bool CheckStreamHeader(const void *data, const size_t &size); diff --git a/utils/MediaBase/src/IMediaBase.cpp b/utils/MediaBase/src/IMediaBase.cpp index ee6303e..408a4d9 100644 --- a/utils/MediaBase/src/IMediaBase.cpp +++ b/utils/MediaBase/src/IMediaBase.cpp @@ -41,7 +41,7 @@ StatusCode IMediaBase::SetReadAudioCallback(ReadVideoFileCallback callback, void LogWarning("STATUS_CODE_VIRTUAL_FUNCTION\n"); return CreateStatusCode(STATUS_CODE_VIRTUAL_FUNCTION); } -StatusCode IMediaBase::OpenOutputFile(const std::string &fileName) +StatusCode IMediaBase::OpenOutputFile(const OutputFileInfo &fileInfo) { LogWarning("STATUS_CODE_VIRTUAL_FUNCTION\n"); return CreateStatusCode(STATUS_CODE_VIRTUAL_FUNCTION); diff --git a/utils/MediaBase/src/IMediaBase.h b/utils/MediaBase/src/IMediaBase.h index b327ec5..e04108b 100644 --- a/utils/MediaBase/src/IMediaBase.h +++ b/utils/MediaBase/src/IMediaBase.h @@ -29,7 +29,7 @@ public: // About read media file. virtual StatusCode SetReadAudioCallback(ReadVideoFileCallback callback, void *context); public: // About combine file. - virtual StatusCode OpenOutputFile(const std::string &fileName); + virtual StatusCode OpenOutputFile(const OutputFileInfo &fileInfo); virtual StatusCode CloseOutputFile(void); virtual void GetStreamData(const void *data, const size_t &size, const StreamInfo &streamInfo); }; diff --git a/utils/MediaBase/src/MediaBase.cpp b/utils/MediaBase/src/MediaBase.cpp index 3083fda..50b322f 100644 --- a/utils/MediaBase/src/MediaBase.cpp +++ b/utils/MediaBase/src/MediaBase.cpp @@ -64,10 +64,10 @@ StatusCode IStopReadFile(void *object) } return CreateStatusCode(STATUS_CODE_OK); } -StatusCode IOpenOutputFile(void *object, const char *fileName) +StatusCode IOpenOutputFile(void *object, const OutputFileInfo *info) { if (ObjectCheck(object) == true) { - return (*(std::shared_ptr *)object)->OpenOutputFile(fileName); + return (*(std::shared_ptr *)object)->OpenOutputFile(*info); } return CreateStatusCode(STATUS_CODE_OK); }