Improve:muxing file duration and memory leaks.

This commit is contained in:
Fancy code 2024-07-12 22:58:55 +08:00
parent 7d6164959e
commit 59dd2bda6a
12 changed files with 141 additions and 56 deletions

View File

@ -122,8 +122,6 @@ typedef struct camera_task_param
unsigned int mVideoRecordingTimeMs; unsigned int mVideoRecordingTimeMs;
std::shared_ptr<VCameraTaskContext> mCtx; std::shared_ptr<VCameraTaskContext> mCtx;
} CameraTaskParam; } CameraTaskParam;
// using AudioStreamCallback = void (*)(const void *, const int, const unsigned long long);
// using VideoStreamCallback = void (*)(const void *, const int, const unsigned long long);
using AudioStreamCallback = std::function<void(const void *, const unsigned int &, const unsigned long long &)>; using AudioStreamCallback = std::function<void(const void *, const unsigned int &, const unsigned long long &)>;
using VideoStreamCallback = std::function<void(const void *, const unsigned int &, const unsigned long long &)>; using VideoStreamCallback = std::function<void(const void *, const unsigned int &, const unsigned long long &)>;
class VCameraHal class VCameraHal

View File

@ -27,14 +27,15 @@
#include <thread> #include <thread>
#include <vector> #include <vector>
constexpr int MEDIA_TASK_NOT_START = 0; constexpr int MEDIA_TASK_NOT_START = 0;
one_frame_stream::one_frame_stream() : mType(FrameType::END), mData(nullptr), mLength(0), mTimeStamp(0) one_frame_stream::one_frame_stream() : mType(FrameType::END), mData(nullptr), mLength(0), mTimeStamp_us(0)
{ {
} }
one_frame_stream::~one_frame_stream() one_frame_stream::~one_frame_stream()
{ {
} }
MediaHandle::MediaHandle(const MediaChannel &mediaChannel, const std::shared_ptr<VCameraHal> &cameraHal) MediaHandle::MediaHandle(const MediaChannel &mediaChannel, const std::shared_ptr<VCameraHal> &cameraHal)
: mMediaChannel(mediaChannel), mCameraHal(cameraHal), mTaskRuning(false), mFirstFrameTimeStamp(MEDIA_TASK_NOT_START) : mMediaChannel(mediaChannel), mCameraHal(cameraHal), mTaskRuning(false),
mFirstFrameTimeStamp_us(MEDIA_TASK_NOT_START)
{ {
} }
void MediaHandle::Init(void) void MediaHandle::Init(void)
@ -140,10 +141,11 @@ void MediaHandle::TaskTimer(void)
mStreamHandle->UnInit(); mStreamHandle->UnInit();
if (mCameraHal) { if (mCameraHal) {
mCameraHal->StopTask(); mCameraHal->StopTask();
ClearFrameList();
} }
mMutex.lock(); mMutex.lock();
mStreamHandle.reset(); mStreamHandle.reset();
mFirstFrameTimeStamp = MEDIA_TASK_NOT_START; mFirstFrameTimeStamp_us = MEDIA_TASK_NOT_START;
auto runingTask = mCurrentTask.lock(); auto runingTask = mCurrentTask.lock();
if (mCurrentTask.expired()) { if (mCurrentTask.expired()) {
LogWarning("mCurrentTask is expired.\n"); LogWarning("mCurrentTask is expired.\n");
@ -185,26 +187,25 @@ void MediaHandle::FrameHandle(void)
} }
} }
} }
void MediaHandle::HandleListFrame(void) void inline MediaHandle::HandleListFrame(void)
{ {
int leftFrameCount = -1; int leftFrameCount = -1;
do { do {
OneFrameStream &frontFrame = mFrameList.front(); OneFrameStream &frontFrame = mFrameList.front();
// OneFrameStream handleIt;
// handleIt.mData = frontFrame.mData;
// handleIt.mLength = frontFrame.mLength;
// handleIt.mType = frontFrame.mType;
if (FrameType::VIDEO == frontFrame.mType) { if (FrameType::VIDEO == frontFrame.mType) {
mStreamHandle->GetVideoStream(frontFrame.mData, frontFrame.mLength, frontFrame.mTimeStamp); mStreamHandle->GetVideoStream(frontFrame.mData, frontFrame.mLength, frontFrame.mTimeStamp_us);
} }
else if (FrameType::AUDIO == frontFrame.mType) { else if (FrameType::AUDIO == frontFrame.mType) {
mStreamHandle->GetAudioStream(frontFrame.mData, frontFrame.mLength, frontFrame.mTimeStamp); mStreamHandle->GetAudioStream(frontFrame.mData, frontFrame.mLength, frontFrame.mTimeStamp_us);
} }
free(frontFrame.mData); free(frontFrame.mData);
frontFrame.mData = nullptr; frontFrame.mData = nullptr;
mFrameList.pop_front(); mFrameList.pop_front();
leftFrameCount = mFrameList.size(); leftFrameCount = mFrameList.size();
} while (leftFrameCount > 0); } while (leftFrameCount > 0);
if (mStreamHandle->HandleFinished()) {
mTaskRuning = false;
}
} }
CameraTaskType MediaHandle::TaskTypeConvert(const MediaTaskType &type) CameraTaskType MediaHandle::TaskTypeConvert(const MediaTaskType &type)
{ {
@ -219,18 +220,29 @@ void MediaHandle::GetAudioStreamCallback(const void *stream, const int &length,
GetAVStream(FrameType::AUDIO, stream, length, timeStamp); GetAVStream(FrameType::AUDIO, stream, length, timeStamp);
} }
void MediaHandle::GetAVStream(const FrameType &type, const void *stream, const int &length, void MediaHandle::GetAVStream(const FrameType &type, const void *stream, const int &length,
const unsigned long long &timeStamp) const unsigned long long &timeStamp_us)
{ {
std::unique_lock<std::mutex> lock(mMutex); std::unique_lock<std::mutex> lock(mMutex);
if (MEDIA_TASK_NOT_START == mFirstFrameTimeStamp) { if (MEDIA_TASK_NOT_START == mFirstFrameTimeStamp_us) {
mFirstFrameTimeStamp = timeStamp; mFirstFrameTimeStamp_us = timeStamp_us;
} }
OneFrameStream addFrame; OneFrameStream addFrame;
addFrame.mData = malloc(length); // TODO: detected memory leaks addFrame.mData = malloc(length);
addFrame.mLength = length; addFrame.mLength = length;
memcpy(addFrame.mData, stream, length); memcpy(addFrame.mData, stream, length);
addFrame.mType = type; addFrame.mType = type;
addFrame.mTimeStamp = timeStamp - mFirstFrameTimeStamp; addFrame.mTimeStamp_us = timeStamp_us - mFirstFrameTimeStamp_us;
mFrameList.push_back(addFrame); mFrameList.push_back(addFrame);
mCvFrameHandle.notify_one(); mCvFrameHandle.notify_one();
} }
void MediaHandle::ClearFrameList(void)
{
std::unique_lock<std::mutex> lock(mMutex);
for (auto &frame : mFrameList) {
if (frame.mData) {
free(frame.mData);
frame.mData = nullptr;
}
}
mFrameList.clear();
}

View File

@ -37,7 +37,7 @@ typedef struct one_frame_stream
FrameType mType; FrameType mType;
void *mData; void *mData;
int mLength; int mLength;
unsigned long long mTimeStamp; unsigned long long mTimeStamp_us;
} OneFrameStream; } OneFrameStream;
class MediaHandle : public VMediaHandle, public std::enable_shared_from_this<MediaHandle> class MediaHandle : public VMediaHandle, public std::enable_shared_from_this<MediaHandle>
{ {
@ -61,7 +61,17 @@ private:
CameraTaskType TaskTypeConvert(const MediaTaskType &type); CameraTaskType TaskTypeConvert(const MediaTaskType &type);
void GetVideoStreamCallback(const void *stream, const int &length, const unsigned long long &timeStamp); void GetVideoStreamCallback(const void *stream, const int &length, const unsigned long long &timeStamp);
void GetAudioStreamCallback(const void *stream, const int &length, const unsigned long long &timeStamp); void GetAudioStreamCallback(const void *stream, const int &length, const unsigned long long &timeStamp);
void GetAVStream(const FrameType &type, const void *stream, const int &length, const unsigned long long &timeStamp); /**
* @brief
*
* @param type The type of media stream (video/audio).
* @param stream Data pointer of the media stream.
* @param length The length of the media stream data.
* @param timeStamp_us The unit of timestamp must be us.
*/
void GetAVStream(const FrameType &type, const void *stream, const int &length,
const unsigned long long &timeStamp_us);
void ClearFrameList(void);
private: private:
std::mutex mMutex; std::mutex mMutex;
@ -75,6 +85,6 @@ private:
std::thread mTaskTimerThread; std::thread mTaskTimerThread;
std::thread mFrameHandleThread; std::thread mFrameHandleThread;
std::list<OneFrameStream> mFrameList; std::list<OneFrameStream> mFrameList;
unsigned long long mFirstFrameTimeStamp; unsigned long long mFirstFrameTimeStamp_us;
}; };
#endif #endif

View File

@ -25,7 +25,8 @@
#include <string> #include <string>
#include <utility> #include <utility>
#include <vector> #include <vector>
RecordMp4::RecordMp4(std::shared_ptr<VMediaTask> &recordTask) : mRecordMp4Object(nullptr), mRecordTask(recordTask) RecordMp4::RecordMp4(std::shared_ptr<VMediaTask> &recordTask)
: mRecordMp4Object(nullptr), mRecordTask(recordTask), mIsRecordingFinished(OUTPUT_FILE_STATUS_END)
{ {
} }
StatusCode RecordMp4::Init(void) StatusCode RecordMp4::Init(void)
@ -36,7 +37,8 @@ StatusCode RecordMp4::Init(void)
return CreateStatusCode(STATUS_CODE_NOT_OK); return CreateStatusCode(STATUS_CODE_NOT_OK);
} }
std::string videoPath = mRecordTask->GetTargetNameForSaving(); std::string videoPath = mRecordTask->GetTargetNameForSaving();
OutputFileInfo fileInfo = {.mFileName = videoPath.c_str(), .mDuration_ms = 5000}; OutputFileInfo fileInfo = {
.mFileName = videoPath.c_str(), .mDuration_ms = 5000, .mFinished = &mIsRecordingFinished};
StatusCode code = IOpenOutputFile(mRecordMp4Object, &fileInfo); StatusCode code = IOpenOutputFile(mRecordMp4Object, &fileInfo);
if (!IsCodeOK(code)) { if (!IsCodeOK(code)) {
LogError("OpenOutputFile failed.\n"); LogError("OpenOutputFile failed.\n");
@ -85,3 +87,7 @@ StatusCode RecordMp4::GetAllFiles(std::vector<MediaTaskResponse> &files)
mTaskResponse.clear(); mTaskResponse.clear();
return CreateStatusCode(STATUS_CODE_OK); return CreateStatusCode(STATUS_CODE_OK);
} }
bool RecordMp4::HandleFinished(void)
{
return mIsRecordingFinished == OUTPUT_FILE_STATUS_FINISHED ? true : false;
}

View File

@ -30,11 +30,13 @@ public:
void GetVideoStream(const void *stream, const unsigned int &length, const unsigned long long &timeStamp) override; void GetVideoStream(const void *stream, const unsigned int &length, const unsigned long long &timeStamp) override;
void GetAudioStream(const void *stream, const unsigned int &length, const unsigned long long &timeStamp) override; void GetAudioStream(const void *stream, const unsigned int &length, const unsigned long long &timeStamp) override;
StatusCode GetAllFiles(std::vector<MediaTaskResponse> &files) override; StatusCode GetAllFiles(std::vector<MediaTaskResponse> &files) override;
bool HandleFinished(void) override;
private: private:
std::mutex mMutex; std::mutex mMutex;
void *mRecordMp4Object; void *mRecordMp4Object;
std::shared_ptr<VMediaTask> mRecordTask; std::shared_ptr<VMediaTask> mRecordTask;
std::vector<MediaTaskResponse> mTaskResponse; std::vector<MediaTaskResponse> mTaskResponse;
int mIsRecordingFinished;
}; };
#endif #endif

View File

@ -44,3 +44,8 @@ StatusCode VStreamHandle::GetAllFiles(std::vector<MediaTaskResponse> &files)
LogInfo("STATUS_CODE_VIRTUAL_FUNCTION\n"); LogInfo("STATUS_CODE_VIRTUAL_FUNCTION\n");
return CreateStatusCode(STATUS_CODE_VIRTUAL_FUNCTION); return CreateStatusCode(STATUS_CODE_VIRTUAL_FUNCTION);
} }
bool VStreamHandle::HandleFinished(void)
{
LogInfo("STATUS_CODE_VIRTUAL_FUNCTION\n");
return false;
}

View File

@ -28,5 +28,6 @@ public:
virtual void GetVideoStream(const void *stream, const unsigned int &length, const unsigned long long &timeStamp); virtual void GetVideoStream(const void *stream, const unsigned int &length, const unsigned long long &timeStamp);
virtual void GetAudioStream(const void *stream, const unsigned int &length, const unsigned long long &timeStamp); virtual void GetAudioStream(const void *stream, const unsigned int &length, const unsigned long long &timeStamp);
virtual StatusCode GetAllFiles(std::vector<MediaTaskResponse> &files); virtual StatusCode GetAllFiles(std::vector<MediaTaskResponse> &files);
virtual bool HandleFinished(void);
}; };
#endif #endif

View File

@ -42,10 +42,17 @@ typedef struct StreamInfo
const StreamType mType; ///< Type of the stream. const StreamType mType; ///< Type of the stream.
const unsigned long long mTimeStamp_us; ///< Timestamp of the stream. const unsigned long long mTimeStamp_us; ///< Timestamp of the stream.
} StreamInfo; } StreamInfo;
enum OutputFileStatus
{
OUTPUT_FILE_STATUS_MUXING = 0,
OUTPUT_FILE_STATUS_FINISHED,
OUTPUT_FILE_STATUS_END
};
typedef struct output_file_info typedef struct output_file_info
{ {
const char *mFileName; const char *mFileName; ///< Name of the output file. Must be an absolute path.
const unsigned int mDuration_ms; const unsigned int mDuration_ms; ///< Duration of the output file in milliseconds.
int *const mFinished; ///< Whether the output file is finished. See OutputFileStatus.
} OutputFileInfo; } OutputFileInfo;
typedef void (*ReadVideoFileCallback)(const void *, const unsigned int, const unsigned long long, void *); typedef void (*ReadVideoFileCallback)(const void *, const unsigned int, const unsigned long long, void *);
typedef void (*ReadAudioFileCallback)(const void *, const unsigned int, const unsigned long long, void *); typedef void (*ReadAudioFileCallback)(const void *, const unsigned int, const unsigned long long, void *);

View File

@ -21,7 +21,6 @@
#include <stdint.h> #include <stdint.h>
#include <stdio.h> #include <stdio.h>
#include <string.h> #include <string.h>
#include <string>
#ifdef __cplusplus #ifdef __cplusplus
extern "C" { extern "C" {
#endif #endif

View File

@ -35,11 +35,15 @@ extern "C" {
#include <memory> #include <memory>
// #include <mutex> // #include <mutex>
#include <string> #include <string>
FfmpegMuxStreamV2::FfmpegMuxStreamV2() : mOutputFormat(nullptr), mOptions(nullptr), mFilesMuxing(false) constexpr unsigned long long MUXING_NOT_START = 0;
FfmpegMuxStreamV2::FfmpegMuxStreamV2()
: mOutputFormat(nullptr), mOptions(nullptr), mFilesMuxing(false), mFileMuxingDuration_us(0),
mStartPts(MUXING_NOT_START), mMuxingFinised(false)
{ {
} }
StatusCode FfmpegMuxStreamV2::OpenOutputFile(const OutputFileInfo &fileInfo) StatusCode FfmpegMuxStreamV2::OpenOutputFile(const OutputFileInfo &fileInfo)
{ {
mOutputFileInfo = std::make_shared<OutputFileInfo>(fileInfo);
return OpenMuxOutputFile(fileInfo.mFileName); return OpenMuxOutputFile(fileInfo.mFileName);
} }
StatusCode FfmpegMuxStreamV2::CloseOutputFile(void) StatusCode FfmpegMuxStreamV2::CloseOutputFile(void)
@ -62,34 +66,29 @@ StatusCode FfmpegMuxStreamV2::CloseOutputFile(void)
} }
avformat_free_context(mOutputFormat); avformat_free_context(mOutputFormat);
fx_system("sync"); fx_system("sync");
mOutputFileInfo.reset();
return CreateStatusCode(STATUS_CODE_OK); return CreateStatusCode(STATUS_CODE_OK);
} }
void FfmpegMuxStreamV2::GetStreamData(const void *data, const size_t &size, const StreamInfo &streamInfo) void FfmpegMuxStreamV2::GetStreamData(const void *data, const size_t &size, const StreamInfo &streamInfo)
{ {
int ret = 0; if (mMuxingFinised) {
if (!mFilesMuxing) { /**
bool fileMuxing = false; * @brief Packaging has been completed according to the recording duration parameters, and the excess data
fileMuxing = mVideoStream->CheckStreamHeader(data, size); * frames will be discarded.
if (fileMuxing) { */
AVDictionary *opt = nullptr;
av_dict_set_int(&opt, "use_editlist", 0, 0);
/* Write the stream header, if any. */
ret = avformat_write_header(mOutputFormat, nullptr);
if (ret < 0) {
char error_str[AV_ERROR_MAX_STRING_SIZE] = {0};
LogError("Error occurred when opening output file: %s\n",
av_make_error_string(error_str, AV_ERROR_MAX_STRING_SIZE, ret));
return; return;
} }
mFilesMuxing = true; if (!MakeSureStreamHeanderOK(data, size)) {
av_dict_free(&opt);
}
else {
LogWarning("Stream header not found, skip this frame.\n");
return; return;
} }
}
if (streamInfo.mType == STREAM_TYPE_VIDEO_H264 && mVideoStream) { if (streamInfo.mType == STREAM_TYPE_VIDEO_H264 && mVideoStream) {
if (MUXING_NOT_START == mStartPts) {
mStartPts = streamInfo.mTimeStamp_us;
}
/**
* @brief Use the video's timestamp to count the playback duration of the packaged file.
*/
CalculatingDuration(streamInfo.mTimeStamp_us);
mVideoStream->WriteSourceData(data, size, streamInfo.mTimeStamp_us); mVideoStream->WriteSourceData(data, size, streamInfo.mTimeStamp_us);
} }
if (streamInfo.mType == STREAM_TYPE_AUDIO_G711A && mAudioStream) { if (streamInfo.mType == STREAM_TYPE_AUDIO_G711A && mAudioStream) {
@ -160,6 +159,44 @@ void FfmpegMuxStreamV2::GetAVPacketDataCallback(AVPacket *pkt)
av_make_error_string(error_str, AV_ERROR_MAX_STRING_SIZE, ret)); av_make_error_string(error_str, AV_ERROR_MAX_STRING_SIZE, ret));
} }
} }
void FfmpegMuxStreamV2::CalculatingDuration(const unsigned long long &pts_us)
{
mFileMuxingDuration_us = pts_us - mStartPts;
if (mFileMuxingDuration_us / 1000 >= mOutputFileInfo->mDuration_ms) {
LogInfo("Muxing file finished, duration: %lld ms\n", mFileMuxingDuration_us / 1000);
mMuxingFinised = true;
if (mOutputFileInfo && mOutputFileInfo->mFinished) {
*(mOutputFileInfo->mFinished) = static_cast<int>(OUTPUT_FILE_STATUS_FINISHED);
}
}
}
bool inline FfmpegMuxStreamV2::MakeSureStreamHeanderOK(const void *data, const size_t &size)
{
int ret = 0;
if (!mFilesMuxing) {
bool fileMuxing = false;
fileMuxing = mVideoStream->CheckStreamHeader(data, size);
if (fileMuxing) {
AVDictionary *opt = nullptr;
av_dict_set_int(&opt, "use_editlist", 0, 0);
/* Write the stream header, if any. */
ret = avformat_write_header(mOutputFormat, nullptr);
if (ret < 0) {
char error_str[AV_ERROR_MAX_STRING_SIZE] = {0};
LogError("Error occurred when opening output file: %s\n",
av_make_error_string(error_str, AV_ERROR_MAX_STRING_SIZE, ret));
return false;
}
mFilesMuxing = true;
av_dict_free(&opt);
}
else {
LogWarning("Stream header not found, skip this frame.\n");
return false;
}
}
return true;
}
std::shared_ptr<FfmpegOutputStream> FfmpegMuxStreamV2::AddStream(AVFormatContext *outputFormat, std::shared_ptr<FfmpegOutputStream> FfmpegMuxStreamV2::AddStream(AVFormatContext *outputFormat,
enum AVCodecID encodecId, enum AVCodecID decodecId) enum AVCodecID encodecId, enum AVCodecID decodecId)
{ {

View File

@ -52,6 +52,8 @@ public: // About combine file.
private: private:
StatusCode OpenMuxOutputFile(const std::string &fileName); StatusCode OpenMuxOutputFile(const std::string &fileName);
void GetAVPacketDataCallback(AVPacket *pkt); void GetAVPacketDataCallback(AVPacket *pkt);
void CalculatingDuration(const unsigned long long &pts_us);
bool MakeSureStreamHeanderOK(const void *data, const size_t &size);
private: private:
/** /**
@ -73,5 +75,9 @@ private:
std::shared_ptr<FfmpegOutputStream> mAudioStream; std::shared_ptr<FfmpegOutputStream> mAudioStream;
AVDictionary *mOptions; AVDictionary *mOptions;
bool mFilesMuxing; bool mFilesMuxing;
std::shared_ptr<OutputFileInfo> mOutputFileInfo;
unsigned long long mFileMuxingDuration_us;
unsigned long long mStartPts;
bool mMuxingFinised;
}; };
#endif #endif

View File

@ -27,12 +27,13 @@ extern "C" {
#include <libavformat/avformat.h> #include <libavformat/avformat.h>
#include <libavutil/avutil.h> #include <libavutil/avutil.h>
#include <libavutil/error.h> #include <libavutil/error.h>
#include <libavutil/mathematics.h>
#ifdef __cplusplus #ifdef __cplusplus
} }
#endif #endif
#include <chrono>
#include <math.h> #include <math.h>
#include <memory> #include <memory>
#include <stdint.h>
#include <thread> #include <thread>
FfmpegReadFile::FfmpegReadFile() FfmpegReadFile::FfmpegReadFile()
: mReadVideoCallback(nullptr), mReadVideoCallbackContext(nullptr), mReadAudioCallback(nullptr), : mReadVideoCallback(nullptr), mReadVideoCallbackContext(nullptr), mReadAudioCallback(nullptr),
@ -108,7 +109,7 @@ void FfmpegReadFile::ReadFileThread(AVFormatContext *pFormatCtx, int mediaStream
LogWarning("ReadVideoCallback is null.\n"); LogWarning("ReadVideoCallback is null.\n");
} }
AVPacket packet; AVPacket packet;
unsigned long long playTime = 0; // unsigned long long playTime = 0;
// av_new_packet(&packet, AV_INPUT_BUFFER_MIN_SIZE); // av_new_packet(&packet, AV_INPUT_BUFFER_MIN_SIZE);
while (av_read_frame(pFormatCtx, &packet) >= 0) { while (av_read_frame(pFormatCtx, &packet) >= 0) {
if (nullptr == mReadVideoCallback) { if (nullptr == mReadVideoCallback) {
@ -121,8 +122,8 @@ void FfmpegReadFile::ReadFileThread(AVFormatContext *pFormatCtx, int mediaStream
} }
// Checks whether the packet belongs to a video stream. // Checks whether the packet belongs to a video stream.
if (packet.stream_index == mediaStreamIndex) { if (packet.stream_index == mediaStreamIndex) {
playTime = (packet.duration * pFormatCtx->streams[mediaStreamIndex]->time_base.num) / // playTime = (packet.duration * pFormatCtx->streams[mediaStreamIndex]->time_base.num) /
pFormatCtx->streams[mediaStreamIndex]->time_base.den; // pFormatCtx->streams[mediaStreamIndex]->time_base.den;
// AVRational time_base = pFormatCtx->streams[mediaStreamIndex]->time_base; // AVRational time_base = pFormatCtx->streams[mediaStreamIndex]->time_base;
// int64_t duration_ms = av_rescale_q(packet.duration, time_base, {1, AV_TIME_BASE}) * 1000; // int64_t duration_ms = av_rescale_q(packet.duration, time_base, {1, AV_TIME_BASE}) * 1000;
// LogInfo("Frame data address: %p, length: %zu\n", packet.data, packet.size); // LogInfo("Frame data address: %p, length: %zu\n", packet.data, packet.size);
@ -134,13 +135,14 @@ void FfmpegReadFile::ReadFileThread(AVFormatContext *pFormatCtx, int mediaStream
// pFormatCtx->streams[mediaStreamIndex]->time_base.den, // pFormatCtx->streams[mediaStreamIndex]->time_base.den,
// packet.duration); // packet.duration);
// LogInfo("pFormatCtx->bit_rate = %ld\n", pFormatCtx->bit_rate); // LogInfo("pFormatCtx->bit_rate = %ld\n", pFormatCtx->bit_rate);
playTime = (unsigned long long)(packet.duration * av_q2d(pFormatCtx->streams[mediaStreamIndex]->time_base) * // playTime = (unsigned long long)(packet.duration *
1000000); // av_q2d(pFormatCtx->streams[mediaStreamIndex]->time_base) *
// 1000000);
// LogInfo("playTime time ms:%llu\n", playTime); // LogInfo("playTime time ms:%llu\n", playTime);
int64_t duration_us = av_rescale_q( int64_t duration_us = av_rescale_q(
packet.duration, pFormatCtx->streams[mediaStreamIndex]->time_base, (AVRational){1, 1000000}); packet.duration, pFormatCtx->streams[mediaStreamIndex]->time_base, (AVRational){1, 1000000});
unsigned long long playTime_us = // unsigned long long playTime_us =
av_rescale_q(playTime, pFormatCtx->streams[mediaStreamIndex]->time_base, (AVRational){1, 1000000}); // av_rescale_q(playTime, pFormatCtx->streams[mediaStreamIndex]->time_base, (AVRational){1, 1000000});
// LogInfo("playTime_us time ms:%llu\n", playTime_us); // LogInfo("playTime_us time ms:%llu\n", playTime_us);
ReadFrame(&packet, duration_us); ReadFrame(&packet, duration_us);
// std::this_thread::sleep_for(std::chrono::milliseconds(playTime)); // std::this_thread::sleep_for(std::chrono::milliseconds(playTime));