From 7d6164959ec532f49151b30ed4647a7436c73a53 Mon Sep 17 00:00:00 2001 From: Fancy code <258828110.@qq.com> Date: Fri, 12 Jul 2024 19:32:44 +0800 Subject: [PATCH] Improve:CameraHalMock code. --- middleware/MediaManager/src/MediaHandle.cpp | 2 +- middleware/MediaManager/src/RecordMp4.cpp | 4 +-- test/hal/tool/src/CameraHalMock.cpp | 24 ++++++++++++---- test/hal/tool/src/CameraHalMock.h | 4 +-- utils/MediaBase/include/MediaBase.h | 15 ++++++---- utils/MediaBase/src/FfmpegDecoder.cpp | 4 +-- utils/MediaBase/src/FfmpegEncoder.cpp | 10 +++---- utils/MediaBase/src/FfmpegMuxStreamV2.cpp | 4 +-- utils/MediaBase/src/FfmpegOutputStream.cpp | 4 +-- utils/MediaBase/src/FfmpegReadFile.cpp | 32 +++++++++++++-------- utils/MediaBase/src/FfmpegReadFile.h | 2 +- 11 files changed, 65 insertions(+), 40 deletions(-) diff --git a/middleware/MediaManager/src/MediaHandle.cpp b/middleware/MediaManager/src/MediaHandle.cpp index 23db7f3..0f08e99 100644 --- a/middleware/MediaManager/src/MediaHandle.cpp +++ b/middleware/MediaManager/src/MediaHandle.cpp @@ -226,7 +226,7 @@ void MediaHandle::GetAVStream(const FrameType &type, const void *stream, const i mFirstFrameTimeStamp = timeStamp; } OneFrameStream addFrame; - addFrame.mData = malloc(length); + addFrame.mData = malloc(length); // TODO: detected memory leaks addFrame.mLength = length; memcpy(addFrame.mData, stream, length); addFrame.mType = type; diff --git a/middleware/MediaManager/src/RecordMp4.cpp b/middleware/MediaManager/src/RecordMp4.cpp index 346ebb8..0276b81 100644 --- a/middleware/MediaManager/src/RecordMp4.cpp +++ b/middleware/MediaManager/src/RecordMp4.cpp @@ -67,7 +67,7 @@ void RecordMp4::GetVideoStream(const void *stream, const unsigned int &length, c { std::lock_guard locker(mMutex); if (mRecordMp4Object) { - StreamInfo info = {.mType = STREAM_TYPE_VIDEO_H264, .mTimeStamp = timeStamp}; + StreamInfo info = {.mType = STREAM_TYPE_VIDEO_H264, .mTimeStamp_us = timeStamp}; IGetStreamData(mRecordMp4Object, stream, length, info); } } @@ -75,7 +75,7 @@ void RecordMp4::GetAudioStream(const void *stream, const unsigned int &length, c { std::lock_guard locker(mMutex); if (mRecordMp4Object) { - StreamInfo info = {.mType = STREAM_TYPE_AUDIO_G711A, .mTimeStamp = timeStamp}; + StreamInfo info = {.mType = STREAM_TYPE_AUDIO_G711A, .mTimeStamp_us = timeStamp}; IGetStreamData(mRecordMp4Object, stream, length, info); } } diff --git a/test/hal/tool/src/CameraHalMock.cpp b/test/hal/tool/src/CameraHalMock.cpp index 3038d13..b3fe4a3 100644 --- a/test/hal/tool/src/CameraHalMock.cpp +++ b/test/hal/tool/src/CameraHalMock.cpp @@ -29,15 +29,21 @@ void CameraHalTest::Init(void) { CameraHal::Init(); if (nullptr == mReadH264File) { - ReadVideoFileCallback videCallback = [](const void *stream, const unsigned int length, void *context) -> void { - ((CameraHalTest *)context)->ReadDataFromH264File(stream, length); + ReadVideoFileCallback videCallback = [](const void *stream, + const unsigned int length, + const unsigned long long duration_us, + void *context) -> void { + ((CameraHalTest *)context)->ReadDataFromH264File(stream, length, duration_us); }; mReadH264File = ICreateMediaBase(MEDIA_HANDLE_TYPE_READ_H264); ISetReadVideoCallback(mReadH264File, videCallback, this); } if (nullptr == mReadG711aFile) { - ReadAudioFileCallback audioCallback = [](const void *stream, const unsigned int length, void *context) -> void { - ((CameraHalTest *)context)->ReadDataFromG711aFile(stream, length); + ReadAudioFileCallback audioCallback = [](const void *stream, + const unsigned int length, + const unsigned long long duration_us, + void *context) -> void { + ((CameraHalTest *)context)->ReadDataFromG711aFile(stream, length, duration_us); }; mReadG711aFile = ICreateMediaBase(MEDIA_HANDLE_TYPE_READ_G711A); ISetReadVideoCallback(mReadG711aFile, audioCallback, this); @@ -129,19 +135,25 @@ void CameraHalTest::MockReportMediaStream(void) mTaskRuning = false; } } -void CameraHalTest::ReadDataFromH264File(const void *stream, const unsigned int length) +void CameraHalTest::ReadDataFromH264File(const void *stream, const unsigned int length, + const unsigned long long duration_us) { struct timespec ts; clock_gettime(CLOCK_REALTIME, &ts); long microseconds = ts.tv_sec * 1000000L + ts.tv_nsec / 1000; GetVideoStream(stream, length, microseconds); + // LogInfo("CameraHalTest::ReadDataFromH264File duration = %llu\n", microseconds); + std::this_thread::sleep_for(std::chrono::milliseconds(duration_us / 1000)); } -void CameraHalTest::ReadDataFromG711aFile(const void *stream, const unsigned int length) +void CameraHalTest::ReadDataFromG711aFile(const void *stream, const unsigned int length, + const unsigned long long duration_us) { struct timespec ts; clock_gettime(CLOCK_REALTIME, &ts); long microseconds = ts.tv_sec * 1000000L + ts.tv_nsec / 1000; GetAudioStream(stream, length, microseconds); + // LogInfo("CameraHalTest::ReadDataFromG711aFile duration = %ld\n", microseconds); + std::this_thread::sleep_for(std::chrono::milliseconds((duration_us / 1000) + 43)); } CameraHalMock::CameraHalMock(const CameraType &cameraType) : CameraHalTest(cameraType) { diff --git a/test/hal/tool/src/CameraHalMock.h b/test/hal/tool/src/CameraHalMock.h index 6a5badb..3b2d1e0 100644 --- a/test/hal/tool/src/CameraHalMock.h +++ b/test/hal/tool/src/CameraHalMock.h @@ -37,8 +37,8 @@ protected: private: void MockReportMediaStream(void); - void ReadDataFromH264File(const void *stream, const unsigned int length); - void ReadDataFromG711aFile(const void *stream, const unsigned int length); + void ReadDataFromH264File(const void *stream, const unsigned int length, const unsigned long long duration_us); + void ReadDataFromG711aFile(const void *stream, const unsigned int length, const unsigned long long duration_us); protected: const CameraType mCameraType; diff --git a/utils/MediaBase/include/MediaBase.h b/utils/MediaBase/include/MediaBase.h index b75481e..4d1b7ab 100644 --- a/utils/MediaBase/include/MediaBase.h +++ b/utils/MediaBase/include/MediaBase.h @@ -32,18 +32,23 @@ enum StreamType STREAM_TYPE_AUDIO_G711A, STREAM_TYPE_END }; -typedef struct stream_info +/** + * @brief + * NOTE: The timestamp parameter is critical. The time base of the timestamp must be {1,1000000}, which means the unit + * is 1us. + */ +typedef struct StreamInfo { - const StreamType mType; - const unsigned long long mTimeStamp; + const StreamType mType; ///< Type of the stream. + const unsigned long long mTimeStamp_us; ///< Timestamp of the stream. } StreamInfo; typedef struct output_file_info { const char *mFileName; const unsigned int mDuration_ms; } OutputFileInfo; -typedef void (*ReadVideoFileCallback)(const void *, const unsigned int, void *); -typedef void (*ReadAudioFileCallback)(const void *, const unsigned int, void *); +typedef void (*ReadVideoFileCallback)(const void *, const unsigned int, const unsigned long long, void *); +typedef void (*ReadAudioFileCallback)(const void *, const unsigned int, const unsigned long long, void *); void *ICreateMediaBase(const MediaHandleType type); // StatusCode Init(void *object); // StatusCode UnInit(void *object); diff --git a/utils/MediaBase/src/FfmpegDecoder.cpp b/utils/MediaBase/src/FfmpegDecoder.cpp index 5c1c890..f118f8f 100644 --- a/utils/MediaBase/src/FfmpegDecoder.cpp +++ b/utils/MediaBase/src/FfmpegDecoder.cpp @@ -136,7 +136,7 @@ void FfmpegDecoder::DecodeData(const void *data, const size_t &size, const unsig // mPacket->stream_index = 0; mPacket->pts = pts; mPacket->dts = mPacket->pts; - LogInfo("source data mPacket->pts:%d\n", mPacket->pts); + // LogInfo("source data mPacket->pts:%d\n", mPacket->pts); AVDecodeData(mPacket, callback); return; } @@ -214,7 +214,7 @@ void inline FfmpegDecoder::AVDecodeData(AVPacket *pkt, std::functiondata[ch] + data_size * i, 1, data_size, outfile); // save_code_stream_file(mFrame->data[ch] + data_size * i, data_size); // save_code_stream_file(mFrame->data[0], mFrame->linesize[0]); - LogInfo("decode frame pts = %llu, nb_samples = %d\n", mFrame->pts, mFrame->nb_samples); + // LogInfo("decode frame pts = %llu, nb_samples = %d\n", mFrame->pts, mFrame->nb_samples); callback(mFrame); } break; diff --git a/utils/MediaBase/src/FfmpegEncoder.cpp b/utils/MediaBase/src/FfmpegEncoder.cpp index a40b048..a59f121 100644 --- a/utils/MediaBase/src/FfmpegEncoder.cpp +++ b/utils/MediaBase/src/FfmpegEncoder.cpp @@ -225,7 +225,7 @@ int FfmpegEncoder::EncodeData(AVFrame *frame, AVStream *stream, std::functiontime_base.num: %d\n", stream->time_base.num); // LogInfo("Write stream->time_base.den: %d\n", stream->time_base.den); mTmpPkt->stream_index = stream->index; - LogInfo("aaaaaaaaaaaaaaaaaaaaaaa Write frame mTmpPkt->pts: %llu\n", mTmpPkt->pts); + // LogInfo("aaaaaaaaaaaaaaaaaaaaaaa Write frame mTmpPkt->pts: %llu\n", mTmpPkt->pts); if (callback) { // save_code_stream_file(mTmpPkt->data, mTmpPkt->size); @@ -325,7 +325,7 @@ AVFrame *FfmpegEncoder::ConvertAudioFrame(AVFrame *decodeFrame, struct SwrContex LogError("decodeFrame is null\n"); return nullptr; } - LogInfo("decodeFrame->pts = %d\n", decodeFrame->pts); + // LogInfo("decodeFrame->pts = %d\n", decodeFrame->pts); // decodeFrame->pts = next_pts; // next_pts += decodeFrame->nb_samples; int ret = 0; @@ -355,9 +355,9 @@ AVFrame *FfmpegEncoder::ConvertAudioFrame(AVFrame *decodeFrame, struct SwrContex LogError("Error while converting\n"); return nullptr; } - LogInfo("mCodecCtx->time_base.num = %d, mCodecCtx->time_base.den=%d\n", - mCodecCtx->time_base.num, - mCodecCtx->time_base.den); + // LogInfo("mCodecCtx->time_base.num = %d, mCodecCtx->time_base.den=%d\n", + // mCodecCtx->time_base.num, + // mCodecCtx->time_base.den); mFrame->pts = av_rescale_q(decodeFrame->pts, (AVRational){1, 1000000}, mCodecCtx->time_base); // LogInfo("decodeFrame->pts = %d\n", decodeFrame->pts); // LogInfo("mFrame->pts = %d\n", mFrame->pts); diff --git a/utils/MediaBase/src/FfmpegMuxStreamV2.cpp b/utils/MediaBase/src/FfmpegMuxStreamV2.cpp index 4a09622..4bedc62 100644 --- a/utils/MediaBase/src/FfmpegMuxStreamV2.cpp +++ b/utils/MediaBase/src/FfmpegMuxStreamV2.cpp @@ -90,10 +90,10 @@ void FfmpegMuxStreamV2::GetStreamData(const void *data, const size_t &size, cons } } if (streamInfo.mType == STREAM_TYPE_VIDEO_H264 && mVideoStream) { - mVideoStream->WriteSourceData(data, size, streamInfo.mTimeStamp); + mVideoStream->WriteSourceData(data, size, streamInfo.mTimeStamp_us); } if (streamInfo.mType == STREAM_TYPE_AUDIO_G711A && mAudioStream) { - mAudioStream->WriteSourceData(data, size, streamInfo.mTimeStamp); + mAudioStream->WriteSourceData(data, size, streamInfo.mTimeStamp_us); } } StatusCode inline FfmpegMuxStreamV2::OpenMuxOutputFile(const std::string &fileName) diff --git a/utils/MediaBase/src/FfmpegOutputStream.cpp b/utils/MediaBase/src/FfmpegOutputStream.cpp index fa5385c..cd34a41 100644 --- a/utils/MediaBase/src/FfmpegOutputStream.cpp +++ b/utils/MediaBase/src/FfmpegOutputStream.cpp @@ -110,8 +110,8 @@ void FfmpegOutputStream::WriteSourceData(const void *data, const size_t &size, c tmpPkt->duration = ZERO_MEANS_UNKNOWN; // tmpPkt->pts = u64Interval * 1000; // ת���� us tmpPkt->pts = av_rescale_q(pts, (AVRational){1, 1000000}, mStream->time_base); - LogInfo("vvvvvvvvvvvvvvvvvvvvvvvvv num:%d, den:%d\n", mStream->time_base.num, mStream->time_base.den); - LogInfo("vvvvvvvvvvvvvvvvvvvvvvvvv pts:%llu, duration:%d\n", tmpPkt->pts, tmpPkt->duration); + // LogInfo("vvvvvvvvvvvvvvvvvvvvvvvvv num:%d, den:%d\n", mStream->time_base.num, mStream->time_base.den); + // LogInfo("vvvvvvvvvvvvvvvvvvvvvvvvv pts:%llu, duration:%d\n", tmpPkt->pts, tmpPkt->duration); // tmpPkt->pts = pts; u64Interval++; tmpPkt->dts = tmpPkt->pts; diff --git a/utils/MediaBase/src/FfmpegReadFile.cpp b/utils/MediaBase/src/FfmpegReadFile.cpp index 8807916..8742577 100644 --- a/utils/MediaBase/src/FfmpegReadFile.cpp +++ b/utils/MediaBase/src/FfmpegReadFile.cpp @@ -108,7 +108,7 @@ void FfmpegReadFile::ReadFileThread(AVFormatContext *pFormatCtx, int mediaStream LogWarning("ReadVideoCallback is null.\n"); } AVPacket packet; - unsigned int playTimeMs = 0; + unsigned long long playTime = 0; // av_new_packet(&packet, AV_INPUT_BUFFER_MIN_SIZE); while (av_read_frame(pFormatCtx, &packet) >= 0) { if (nullptr == mReadVideoCallback) { @@ -121,21 +121,29 @@ void FfmpegReadFile::ReadFileThread(AVFormatContext *pFormatCtx, int mediaStream } // Checks whether the packet belongs to a video stream. if (packet.stream_index == mediaStreamIndex) { - playTimeMs = (packet.duration * pFormatCtx->streams[mediaStreamIndex]->time_base.num * 1000) / - pFormatCtx->streams[mediaStreamIndex]->time_base.den; + playTime = (packet.duration * pFormatCtx->streams[mediaStreamIndex]->time_base.num) / + pFormatCtx->streams[mediaStreamIndex]->time_base.den; // AVRational time_base = pFormatCtx->streams[mediaStreamIndex]->time_base; // int64_t duration_ms = av_rescale_q(packet.duration, time_base, {1, AV_TIME_BASE}) * 1000; // LogInfo("Frame data address: %p, length: %zu\n", packet.data, packet.size); - // LogInfo("Play time ms:%d\n", playTimeMs); // static unsigned long long timeAmout = 0; - // timeAmout += playTimeMs; + // timeAmout += playTime; // LogInfo("Time amout: %llu\n", timeAmout); - // LogInfo("time base: num = %d, den = %d\n", + // LogInfo("time base: num = %d, den = %d, duration = %d\n", // pFormatCtx->streams[mediaStreamIndex]->time_base.num, - // pFormatCtx->streams[mediaStreamIndex]->time_base.den); + // pFormatCtx->streams[mediaStreamIndex]->time_base.den, + // packet.duration); // LogInfo("pFormatCtx->bit_rate = %ld\n", pFormatCtx->bit_rate); - ReadFrame(&packet); - std::this_thread::sleep_for(std::chrono::milliseconds(playTimeMs)); + playTime = (unsigned long long)(packet.duration * av_q2d(pFormatCtx->streams[mediaStreamIndex]->time_base) * + 1000000); + // LogInfo("playTime time ms:%llu\n", playTime); + int64_t duration_us = av_rescale_q( + packet.duration, pFormatCtx->streams[mediaStreamIndex]->time_base, (AVRational){1, 1000000}); + unsigned long long playTime_us = + av_rescale_q(playTime, pFormatCtx->streams[mediaStreamIndex]->time_base, (AVRational){1, 1000000}); + // LogInfo("playTime_us time ms:%llu\n", playTime_us); + ReadFrame(&packet, duration_us); + // std::this_thread::sleep_for(std::chrono::milliseconds(playTime)); } // Release the data packet. av_packet_unref(&packet); @@ -144,12 +152,12 @@ void FfmpegReadFile::ReadFileThread(AVFormatContext *pFormatCtx, int mediaStream avformat_close_input(&pFormatCtx); } -void inline FfmpegReadFile::ReadFrame(AVPacket *packet) +void inline FfmpegReadFile::ReadFrame(AVPacket *packet, const unsigned int duration_us) { if (AVMEDIA_TYPE_VIDEO == mFFmpegMediaType) { - mReadVideoCallback(packet->data, packet->size, mReadVideoCallbackContext); + mReadVideoCallback(packet->data, packet->size, duration_us, mReadVideoCallbackContext); } else if (AVMEDIA_TYPE_AUDIO == mFFmpegMediaType) { - mReadVideoCallback(packet->data, packet->size, mReadVideoCallbackContext); + mReadVideoCallback(packet->data, packet->size, duration_us, mReadVideoCallbackContext); } } \ No newline at end of file diff --git a/utils/MediaBase/src/FfmpegReadFile.h b/utils/MediaBase/src/FfmpegReadFile.h index b3f3b93..35df727 100644 --- a/utils/MediaBase/src/FfmpegReadFile.h +++ b/utils/MediaBase/src/FfmpegReadFile.h @@ -30,7 +30,7 @@ public: // About read media file. private: void ReadFileThread(AVFormatContext *pFormatCtx, int video_stream_index); - void ReadFrame(AVPacket *packet); + void ReadFrame(AVPacket *packet, const unsigned int duration_us); private: ReadVideoFileCallback mReadVideoCallback;