From 19aa5e15998389519d1cc24a12a490d229aaabf9 Mon Sep 17 00:00:00 2001 From: Fancy code <258828110.@qq.com> Date: Thu, 11 Jul 2024 14:23:09 +0800 Subject: [PATCH] Backup. --- test/hal/tool/src/CameraHalMock.cpp | 15 ++++++++++++--- test/hal/tool/src/CameraHalMock.h | 2 ++ utils/MediaBase/src/FfmpegDecoder.cpp | 18 ++++++++++++------ utils/MediaBase/src/FfmpegDecoder.h | 3 ++- utils/MediaBase/src/FfmpegEncoder.cpp | 16 ++++++++++++---- utils/MediaBase/src/FfmpegOutputStream.cpp | 12 +++++++++--- utils/MediaBase/src/FfmpegReadFile.cpp | 5 +++++ 7 files changed, 54 insertions(+), 17 deletions(-) diff --git a/test/hal/tool/src/CameraHalMock.cpp b/test/hal/tool/src/CameraHalMock.cpp index 8502c55..3038d13 100644 --- a/test/hal/tool/src/CameraHalMock.cpp +++ b/test/hal/tool/src/CameraHalMock.cpp @@ -18,8 +18,11 @@ #include "MediaBase.h" #include "StatusCode.h" #include +#include +#include CameraHalTest::CameraHalTest(const CameraType &cameraType) - : mCameraType(cameraType), mReadH264File(nullptr), mReadG711aFile(nullptr), mTaskRuning(false) + : mCameraType(cameraType), mReadH264File(nullptr), mReadG711aFile(nullptr), mTaskRuning(false), + mVideoTimeStamp_us(0), mAudioTimeStamp_us(0) { } void CameraHalTest::Init(void) @@ -128,11 +131,17 @@ void CameraHalTest::MockReportMediaStream(void) } void CameraHalTest::ReadDataFromH264File(const void *stream, const unsigned int length) { - GetVideoStream(stream, length, 0); + struct timespec ts; + clock_gettime(CLOCK_REALTIME, &ts); + long microseconds = ts.tv_sec * 1000000L + ts.tv_nsec / 1000; + GetVideoStream(stream, length, microseconds); } void CameraHalTest::ReadDataFromG711aFile(const void *stream, const unsigned int length) { - GetAudioStream(stream, length, 0); + struct timespec ts; + clock_gettime(CLOCK_REALTIME, &ts); + long microseconds = ts.tv_sec * 1000000L + ts.tv_nsec / 1000; + GetAudioStream(stream, length, microseconds); } CameraHalMock::CameraHalMock(const CameraType &cameraType) : CameraHalTest(cameraType) { diff --git a/test/hal/tool/src/CameraHalMock.h b/test/hal/tool/src/CameraHalMock.h index ea41a99..6a5badb 100644 --- a/test/hal/tool/src/CameraHalMock.h +++ b/test/hal/tool/src/CameraHalMock.h @@ -50,6 +50,8 @@ protected: std::condition_variable mCv; bool mTaskRuning; std::thread mTaskTimerThread; + unsigned long long mVideoTimeStamp_us; + unsigned long long mAudioTimeStamp_us; }; class CameraHalMock : public CameraHalTest { diff --git a/utils/MediaBase/src/FfmpegDecoder.cpp b/utils/MediaBase/src/FfmpegDecoder.cpp index 5e2a97c..5c1c890 100644 --- a/utils/MediaBase/src/FfmpegDecoder.cpp +++ b/utils/MediaBase/src/FfmpegDecoder.cpp @@ -63,12 +63,15 @@ bool FfmpegDecoder::Init(void) /* check that the encoder supports s16 pcm input */ mCodecCtx->sample_fmt = AV_SAMPLE_FMT_S16; if (!check_sample_fmt(mCodec, mCodecCtx->sample_fmt)) { - LogError("Encoder does not support sample format %s", av_get_sample_fmt_name(mCodecCtx->sample_fmt)); + LogError("decoder does not support sample format %s", av_get_sample_fmt_name(mCodecCtx->sample_fmt)); return false; } /* select other audio parameters supported by the encoder */ mCodecCtx->sample_rate = select_sample_rate(mCodec); + LogInfo("decoder sample_rate:%d\n", mCodecCtx->sample_rate); + // const AVChannelLayout src = (AVChannelLayout)AV_CHANNEL_LAYOUT_MONO; + // av_channel_layout_copy(&mCodecCtx->ch_layout, &src); ret = select_channel_layout(mCodec, &(mCodecCtx->ch_layout)); if (ret < 0) { LogError("Could not set channel layout\n"); @@ -96,8 +99,8 @@ bool FfmpegDecoder::Init(void) return false; } if (AVMEDIA_TYPE_AUDIO == mCodec->type) { - mFrame->nb_samples = mCodecCtx->frame_size; - mFrame->format = mCodecCtx->sample_fmt; + // mFrame->nb_samples = mCodecCtx->frame_size; + // mFrame->format = mCodecCtx->sample_fmt; ret = av_channel_layout_copy(&(mFrame->ch_layout), &(mCodecCtx->ch_layout)); if (ret < 0) { LogError("Could not copy channel layout\n"); @@ -124,14 +127,16 @@ bool FfmpegDecoder::UnInit(void) } return true; } -void FfmpegDecoder::DecodeData(const void *data, const size_t &size, std::function callback) +void FfmpegDecoder::DecodeData(const void *data, const size_t &size, const unsigned long long &pts, + std::function callback) { if (nullptr == mParser) { mPacket->data = (uint8_t *)data; mPacket->size = size; // mPacket->stream_index = 0; - mPacket->pts = AV_NOPTS_VALUE; - mPacket->dts = AV_NOPTS_VALUE; + mPacket->pts = pts; + mPacket->dts = mPacket->pts; + LogInfo("source data mPacket->pts:%d\n", mPacket->pts); AVDecodeData(mPacket, callback); return; } @@ -209,6 +214,7 @@ void inline FfmpegDecoder::AVDecodeData(AVPacket *pkt, std::functiondata[ch] + data_size * i, 1, data_size, outfile); // save_code_stream_file(mFrame->data[ch] + data_size * i, data_size); // save_code_stream_file(mFrame->data[0], mFrame->linesize[0]); + LogInfo("decode frame pts = %llu, nb_samples = %d\n", mFrame->pts, mFrame->nb_samples); callback(mFrame); } break; diff --git a/utils/MediaBase/src/FfmpegDecoder.h b/utils/MediaBase/src/FfmpegDecoder.h index 3145704..5e03394 100644 --- a/utils/MediaBase/src/FfmpegDecoder.h +++ b/utils/MediaBase/src/FfmpegDecoder.h @@ -40,7 +40,8 @@ public: virtual ~FfmpegDecoder() = default; bool Init(void); bool UnInit(void); - void DecodeData(const void *data, const size_t &size, std::function callback); + void DecodeData(const void *data, const size_t &size, const unsigned long long &pts, + std::function callback); private: void AVParseData(const void *data, const size_t &size, std::function callback); diff --git a/utils/MediaBase/src/FfmpegEncoder.cpp b/utils/MediaBase/src/FfmpegEncoder.cpp index 0b2b1b7..bb33f11 100644 --- a/utils/MediaBase/src/FfmpegEncoder.cpp +++ b/utils/MediaBase/src/FfmpegEncoder.cpp @@ -68,7 +68,7 @@ bool FfmpegEncoder::Init(int &outputFlags) LogError("Could not alloc an encoding context\n"); return false; } - const AVChannelLayout src = (AVChannelLayout)AV_CHANNEL_LAYOUT_STEREO; + const AVChannelLayout src = (AVChannelLayout)AV_CHANNEL_LAYOUT_MONO; switch (mCodec->type) { case AVMEDIA_TYPE_AUDIO: mCodecCtx->sample_fmt = mCodec->sample_fmts ? mCodec->sample_fmts[0] : AV_SAMPLE_FMT_FLTP; @@ -220,7 +220,12 @@ int FfmpegEncoder::EncodeData(AVFrame *frame, AVStream *stream, std::functiontime_base, stream->time_base); + // LogInfo("Write mCodecCtx->time_base.num: %d\n", mCodecCtx->time_base.num); + // LogInfo("Write mCodecCtx->time_base.den: %d\n", mCodecCtx->time_base.den); + // LogInfo("Write stream->time_base.num: %d\n", stream->time_base.num); + // LogInfo("Write stream->time_base.den: %d\n", stream->time_base.den); mTmpPkt->stream_index = stream->index; + LogInfo("Write frame mTmpPkt->pts: %llu\n", mTmpPkt->pts); if (callback) { // save_code_stream_file(mTmpPkt->data, mTmpPkt->size); @@ -320,8 +325,9 @@ AVFrame *FfmpegEncoder::ConvertAudioFrame(AVFrame *decodeFrame, struct SwrContex LogError("decodeFrame is null\n"); return nullptr; } - decodeFrame->pts = next_pts; - next_pts += decodeFrame->nb_samples; + LogInfo("decodeFrame->pts = %d\n", decodeFrame->pts); + // decodeFrame->pts = next_pts; + // next_pts += decodeFrame->nb_samples; int ret = 0; int dst_nb_samples = 0; /* convert samples from native format to destination codec format, using the resampler */ @@ -350,7 +356,9 @@ AVFrame *FfmpegEncoder::ConvertAudioFrame(AVFrame *decodeFrame, struct SwrContex return nullptr; } - mFrame->pts = av_rescale_q(mSamplesCount, (AVRational){1, mCodecCtx->sample_rate}, mCodecCtx->time_base); + mFrame->pts = av_rescale_q(decodeFrame->pts, (AVRational){1, SOURCE_AUDIO_SAMPEL_RATE}, mCodecCtx->time_base); + LogInfo("decodeFrame->pts = %d\n", decodeFrame->pts); + LogInfo("mFrame->pts = %d\n", mFrame->pts); mSamplesCount += dst_nb_samples; return mFrame; } diff --git a/utils/MediaBase/src/FfmpegOutputStream.cpp b/utils/MediaBase/src/FfmpegOutputStream.cpp index 27d0d43..5718672 100644 --- a/utils/MediaBase/src/FfmpegOutputStream.cpp +++ b/utils/MediaBase/src/FfmpegOutputStream.cpp @@ -64,7 +64,11 @@ bool FfmpegOutputStream::Init(AVFormatContext *outputFormat) mEncoder->OpenEncoder(nullptr, mStream); } else { - mStream->time_base = (AVRational){1, 15}; + /** + * @brief There is no need to set time_base here, time_base will be automatically corrected inside ffmpeg. + * + */ + // mStream->time_base = (AVRational){1, 15}; mStream->codecpar->codec_id = AV_CODEC_ID_H264; mStream->codecpar->codec_type = AVMEDIA_TYPE_VIDEO; mStream->codecpar->width = 1920; @@ -92,7 +96,7 @@ void FfmpegOutputStream::UnInit(void) void FfmpegOutputStream::WriteSourceData(const void *data, const size_t &size, const unsigned long long &pts) { if (mDecoder) { - mDecoder->DecodeData(data, size, mDecodeCallback); + mDecoder->DecodeData(data, size, pts, mDecodeCallback); return; } AVPacket *tmpPkt = av_packet_alloc(); @@ -105,7 +109,9 @@ void FfmpegOutputStream::WriteSourceData(const void *data, const size_t &size, c constexpr int64_t ZERO_MEANS_UNKNOWN = 0; tmpPkt->duration = ZERO_MEANS_UNKNOWN; // tmpPkt->pts = u64Interval * 1000; // ת���� us - tmpPkt->pts = av_rescale_q(u64Interval, (AVRational){1, 15}, mStream->time_base); + tmpPkt->pts = av_rescale_q(pts, (AVRational){1, 15}, mStream->time_base); + // LogInfo("pts:%llu, duration:%d\n", tmpPkt->pts, tmpPkt->duration); + // tmpPkt->pts = pts; u64Interval++; tmpPkt->dts = tmpPkt->pts; /* copy packet */ diff --git a/utils/MediaBase/src/FfmpegReadFile.cpp b/utils/MediaBase/src/FfmpegReadFile.cpp index 0ced6c9..8807916 100644 --- a/utils/MediaBase/src/FfmpegReadFile.cpp +++ b/utils/MediaBase/src/FfmpegReadFile.cpp @@ -123,8 +123,13 @@ void FfmpegReadFile::ReadFileThread(AVFormatContext *pFormatCtx, int mediaStream if (packet.stream_index == mediaStreamIndex) { playTimeMs = (packet.duration * pFormatCtx->streams[mediaStreamIndex]->time_base.num * 1000) / pFormatCtx->streams[mediaStreamIndex]->time_base.den; + // AVRational time_base = pFormatCtx->streams[mediaStreamIndex]->time_base; + // int64_t duration_ms = av_rescale_q(packet.duration, time_base, {1, AV_TIME_BASE}) * 1000; // LogInfo("Frame data address: %p, length: %zu\n", packet.data, packet.size); // LogInfo("Play time ms:%d\n", playTimeMs); + // static unsigned long long timeAmout = 0; + // timeAmout += playTimeMs; + // LogInfo("Time amout: %llu\n", timeAmout); // LogInfo("time base: num = %d, den = %d\n", // pFormatCtx->streams[mediaStreamIndex]->time_base.num, // pFormatCtx->streams[mediaStreamIndex]->time_base.den);