Improve:muxing file duration and memory leaks.

This commit is contained in:
Fancy code 2024-07-12 22:58:55 +08:00
parent 7d6164959e
commit 59dd2bda6a
12 changed files with 141 additions and 56 deletions

View File

@ -122,8 +122,6 @@ typedef struct camera_task_param
unsigned int mVideoRecordingTimeMs;
std::shared_ptr<VCameraTaskContext> mCtx;
} CameraTaskParam;
// using AudioStreamCallback = void (*)(const void *, const int, const unsigned long long);
// using VideoStreamCallback = void (*)(const void *, const int, const unsigned long long);
using AudioStreamCallback = std::function<void(const void *, const unsigned int &, const unsigned long long &)>;
using VideoStreamCallback = std::function<void(const void *, const unsigned int &, const unsigned long long &)>;
class VCameraHal

View File

@ -27,14 +27,15 @@
#include <thread>
#include <vector>
constexpr int MEDIA_TASK_NOT_START = 0;
one_frame_stream::one_frame_stream() : mType(FrameType::END), mData(nullptr), mLength(0), mTimeStamp(0)
one_frame_stream::one_frame_stream() : mType(FrameType::END), mData(nullptr), mLength(0), mTimeStamp_us(0)
{
}
one_frame_stream::~one_frame_stream()
{
}
MediaHandle::MediaHandle(const MediaChannel &mediaChannel, const std::shared_ptr<VCameraHal> &cameraHal)
: mMediaChannel(mediaChannel), mCameraHal(cameraHal), mTaskRuning(false), mFirstFrameTimeStamp(MEDIA_TASK_NOT_START)
: mMediaChannel(mediaChannel), mCameraHal(cameraHal), mTaskRuning(false),
mFirstFrameTimeStamp_us(MEDIA_TASK_NOT_START)
{
}
void MediaHandle::Init(void)
@ -140,10 +141,11 @@ void MediaHandle::TaskTimer(void)
mStreamHandle->UnInit();
if (mCameraHal) {
mCameraHal->StopTask();
ClearFrameList();
}
mMutex.lock();
mStreamHandle.reset();
mFirstFrameTimeStamp = MEDIA_TASK_NOT_START;
mFirstFrameTimeStamp_us = MEDIA_TASK_NOT_START;
auto runingTask = mCurrentTask.lock();
if (mCurrentTask.expired()) {
LogWarning("mCurrentTask is expired.\n");
@ -185,26 +187,25 @@ void MediaHandle::FrameHandle(void)
}
}
}
void MediaHandle::HandleListFrame(void)
void inline MediaHandle::HandleListFrame(void)
{
int leftFrameCount = -1;
do {
OneFrameStream &frontFrame = mFrameList.front();
// OneFrameStream handleIt;
// handleIt.mData = frontFrame.mData;
// handleIt.mLength = frontFrame.mLength;
// handleIt.mType = frontFrame.mType;
if (FrameType::VIDEO == frontFrame.mType) {
mStreamHandle->GetVideoStream(frontFrame.mData, frontFrame.mLength, frontFrame.mTimeStamp);
mStreamHandle->GetVideoStream(frontFrame.mData, frontFrame.mLength, frontFrame.mTimeStamp_us);
}
else if (FrameType::AUDIO == frontFrame.mType) {
mStreamHandle->GetAudioStream(frontFrame.mData, frontFrame.mLength, frontFrame.mTimeStamp);
mStreamHandle->GetAudioStream(frontFrame.mData, frontFrame.mLength, frontFrame.mTimeStamp_us);
}
free(frontFrame.mData);
frontFrame.mData = nullptr;
mFrameList.pop_front();
leftFrameCount = mFrameList.size();
} while (leftFrameCount > 0);
if (mStreamHandle->HandleFinished()) {
mTaskRuning = false;
}
}
CameraTaskType MediaHandle::TaskTypeConvert(const MediaTaskType &type)
{
@ -219,18 +220,29 @@ void MediaHandle::GetAudioStreamCallback(const void *stream, const int &length,
GetAVStream(FrameType::AUDIO, stream, length, timeStamp);
}
void MediaHandle::GetAVStream(const FrameType &type, const void *stream, const int &length,
const unsigned long long &timeStamp)
const unsigned long long &timeStamp_us)
{
std::unique_lock<std::mutex> lock(mMutex);
if (MEDIA_TASK_NOT_START == mFirstFrameTimeStamp) {
mFirstFrameTimeStamp = timeStamp;
if (MEDIA_TASK_NOT_START == mFirstFrameTimeStamp_us) {
mFirstFrameTimeStamp_us = timeStamp_us;
}
OneFrameStream addFrame;
addFrame.mData = malloc(length); // TODO: detected memory leaks
addFrame.mData = malloc(length);
addFrame.mLength = length;
memcpy(addFrame.mData, stream, length);
addFrame.mType = type;
addFrame.mTimeStamp = timeStamp - mFirstFrameTimeStamp;
addFrame.mTimeStamp_us = timeStamp_us - mFirstFrameTimeStamp_us;
mFrameList.push_back(addFrame);
mCvFrameHandle.notify_one();
}
void MediaHandle::ClearFrameList(void)
{
std::unique_lock<std::mutex> lock(mMutex);
for (auto &frame : mFrameList) {
if (frame.mData) {
free(frame.mData);
frame.mData = nullptr;
}
}
mFrameList.clear();
}

View File

@ -37,7 +37,7 @@ typedef struct one_frame_stream
FrameType mType;
void *mData;
int mLength;
unsigned long long mTimeStamp;
unsigned long long mTimeStamp_us;
} OneFrameStream;
class MediaHandle : public VMediaHandle, public std::enable_shared_from_this<MediaHandle>
{
@ -61,7 +61,17 @@ private:
CameraTaskType TaskTypeConvert(const MediaTaskType &type);
void GetVideoStreamCallback(const void *stream, const int &length, const unsigned long long &timeStamp);
void GetAudioStreamCallback(const void *stream, const int &length, const unsigned long long &timeStamp);
void GetAVStream(const FrameType &type, const void *stream, const int &length, const unsigned long long &timeStamp);
/**
* @brief
*
* @param type The type of media stream (video/audio).
* @param stream Data pointer of the media stream.
* @param length The length of the media stream data.
* @param timeStamp_us The unit of timestamp must be us.
*/
void GetAVStream(const FrameType &type, const void *stream, const int &length,
const unsigned long long &timeStamp_us);
void ClearFrameList(void);
private:
std::mutex mMutex;
@ -75,6 +85,6 @@ private:
std::thread mTaskTimerThread;
std::thread mFrameHandleThread;
std::list<OneFrameStream> mFrameList;
unsigned long long mFirstFrameTimeStamp;
unsigned long long mFirstFrameTimeStamp_us;
};
#endif

View File

@ -25,7 +25,8 @@
#include <string>
#include <utility>
#include <vector>
RecordMp4::RecordMp4(std::shared_ptr<VMediaTask> &recordTask) : mRecordMp4Object(nullptr), mRecordTask(recordTask)
RecordMp4::RecordMp4(std::shared_ptr<VMediaTask> &recordTask)
: mRecordMp4Object(nullptr), mRecordTask(recordTask), mIsRecordingFinished(OUTPUT_FILE_STATUS_END)
{
}
StatusCode RecordMp4::Init(void)
@ -36,7 +37,8 @@ StatusCode RecordMp4::Init(void)
return CreateStatusCode(STATUS_CODE_NOT_OK);
}
std::string videoPath = mRecordTask->GetTargetNameForSaving();
OutputFileInfo fileInfo = {.mFileName = videoPath.c_str(), .mDuration_ms = 5000};
OutputFileInfo fileInfo = {
.mFileName = videoPath.c_str(), .mDuration_ms = 5000, .mFinished = &mIsRecordingFinished};
StatusCode code = IOpenOutputFile(mRecordMp4Object, &fileInfo);
if (!IsCodeOK(code)) {
LogError("OpenOutputFile failed.\n");
@ -84,4 +86,8 @@ StatusCode RecordMp4::GetAllFiles(std::vector<MediaTaskResponse> &files)
files = std::move(mTaskResponse);
mTaskResponse.clear();
return CreateStatusCode(STATUS_CODE_OK);
}
bool RecordMp4::HandleFinished(void)
{
return mIsRecordingFinished == OUTPUT_FILE_STATUS_FINISHED ? true : false;
}

View File

@ -30,11 +30,13 @@ public:
void GetVideoStream(const void *stream, const unsigned int &length, const unsigned long long &timeStamp) override;
void GetAudioStream(const void *stream, const unsigned int &length, const unsigned long long &timeStamp) override;
StatusCode GetAllFiles(std::vector<MediaTaskResponse> &files) override;
bool HandleFinished(void) override;
private:
std::mutex mMutex;
void *mRecordMp4Object;
std::shared_ptr<VMediaTask> mRecordTask;
std::vector<MediaTaskResponse> mTaskResponse;
int mIsRecordingFinished;
};
#endif

View File

@ -43,4 +43,9 @@ StatusCode VStreamHandle::GetAllFiles(std::vector<MediaTaskResponse> &files)
{
LogInfo("STATUS_CODE_VIRTUAL_FUNCTION\n");
return CreateStatusCode(STATUS_CODE_VIRTUAL_FUNCTION);
}
bool VStreamHandle::HandleFinished(void)
{
LogInfo("STATUS_CODE_VIRTUAL_FUNCTION\n");
return false;
}

View File

@ -28,5 +28,6 @@ public:
virtual void GetVideoStream(const void *stream, const unsigned int &length, const unsigned long long &timeStamp);
virtual void GetAudioStream(const void *stream, const unsigned int &length, const unsigned long long &timeStamp);
virtual StatusCode GetAllFiles(std::vector<MediaTaskResponse> &files);
virtual bool HandleFinished(void);
};
#endif

View File

@ -42,10 +42,17 @@ typedef struct StreamInfo
const StreamType mType; ///< Type of the stream.
const unsigned long long mTimeStamp_us; ///< Timestamp of the stream.
} StreamInfo;
enum OutputFileStatus
{
OUTPUT_FILE_STATUS_MUXING = 0,
OUTPUT_FILE_STATUS_FINISHED,
OUTPUT_FILE_STATUS_END
};
typedef struct output_file_info
{
const char *mFileName;
const unsigned int mDuration_ms;
const char *mFileName; ///< Name of the output file. Must be an absolute path.
const unsigned int mDuration_ms; ///< Duration of the output file in milliseconds.
int *const mFinished; ///< Whether the output file is finished. See OutputFileStatus.
} OutputFileInfo;
typedef void (*ReadVideoFileCallback)(const void *, const unsigned int, const unsigned long long, void *);
typedef void (*ReadAudioFileCallback)(const void *, const unsigned int, const unsigned long long, void *);

View File

@ -21,7 +21,6 @@
#include <stdint.h>
#include <stdio.h>
#include <string.h>
#include <string>
#ifdef __cplusplus
extern "C" {
#endif

View File

@ -35,11 +35,15 @@ extern "C" {
#include <memory>
// #include <mutex>
#include <string>
FfmpegMuxStreamV2::FfmpegMuxStreamV2() : mOutputFormat(nullptr), mOptions(nullptr), mFilesMuxing(false)
constexpr unsigned long long MUXING_NOT_START = 0;
FfmpegMuxStreamV2::FfmpegMuxStreamV2()
: mOutputFormat(nullptr), mOptions(nullptr), mFilesMuxing(false), mFileMuxingDuration_us(0),
mStartPts(MUXING_NOT_START), mMuxingFinised(false)
{
}
StatusCode FfmpegMuxStreamV2::OpenOutputFile(const OutputFileInfo &fileInfo)
{
mOutputFileInfo = std::make_shared<OutputFileInfo>(fileInfo);
return OpenMuxOutputFile(fileInfo.mFileName);
}
StatusCode FfmpegMuxStreamV2::CloseOutputFile(void)
@ -62,34 +66,29 @@ StatusCode FfmpegMuxStreamV2::CloseOutputFile(void)
}
avformat_free_context(mOutputFormat);
fx_system("sync");
mOutputFileInfo.reset();
return CreateStatusCode(STATUS_CODE_OK);
}
void FfmpegMuxStreamV2::GetStreamData(const void *data, const size_t &size, const StreamInfo &streamInfo)
{
int ret = 0;
if (!mFilesMuxing) {
bool fileMuxing = false;
fileMuxing = mVideoStream->CheckStreamHeader(data, size);
if (fileMuxing) {
AVDictionary *opt = nullptr;
av_dict_set_int(&opt, "use_editlist", 0, 0);
/* Write the stream header, if any. */
ret = avformat_write_header(mOutputFormat, nullptr);
if (ret < 0) {
char error_str[AV_ERROR_MAX_STRING_SIZE] = {0};
LogError("Error occurred when opening output file: %s\n",
av_make_error_string(error_str, AV_ERROR_MAX_STRING_SIZE, ret));
return;
}
mFilesMuxing = true;
av_dict_free(&opt);
}
else {
LogWarning("Stream header not found, skip this frame.\n");
return;
}
if (mMuxingFinised) {
/**
* @brief Packaging has been completed according to the recording duration parameters, and the excess data
* frames will be discarded.
*/
return;
}
if (!MakeSureStreamHeanderOK(data, size)) {
return;
}
if (streamInfo.mType == STREAM_TYPE_VIDEO_H264 && mVideoStream) {
if (MUXING_NOT_START == mStartPts) {
mStartPts = streamInfo.mTimeStamp_us;
}
/**
* @brief Use the video's timestamp to count the playback duration of the packaged file.
*/
CalculatingDuration(streamInfo.mTimeStamp_us);
mVideoStream->WriteSourceData(data, size, streamInfo.mTimeStamp_us);
}
if (streamInfo.mType == STREAM_TYPE_AUDIO_G711A && mAudioStream) {
@ -160,6 +159,44 @@ void FfmpegMuxStreamV2::GetAVPacketDataCallback(AVPacket *pkt)
av_make_error_string(error_str, AV_ERROR_MAX_STRING_SIZE, ret));
}
}
void FfmpegMuxStreamV2::CalculatingDuration(const unsigned long long &pts_us)
{
mFileMuxingDuration_us = pts_us - mStartPts;
if (mFileMuxingDuration_us / 1000 >= mOutputFileInfo->mDuration_ms) {
LogInfo("Muxing file finished, duration: %lld ms\n", mFileMuxingDuration_us / 1000);
mMuxingFinised = true;
if (mOutputFileInfo && mOutputFileInfo->mFinished) {
*(mOutputFileInfo->mFinished) = static_cast<int>(OUTPUT_FILE_STATUS_FINISHED);
}
}
}
bool inline FfmpegMuxStreamV2::MakeSureStreamHeanderOK(const void *data, const size_t &size)
{
int ret = 0;
if (!mFilesMuxing) {
bool fileMuxing = false;
fileMuxing = mVideoStream->CheckStreamHeader(data, size);
if (fileMuxing) {
AVDictionary *opt = nullptr;
av_dict_set_int(&opt, "use_editlist", 0, 0);
/* Write the stream header, if any. */
ret = avformat_write_header(mOutputFormat, nullptr);
if (ret < 0) {
char error_str[AV_ERROR_MAX_STRING_SIZE] = {0};
LogError("Error occurred when opening output file: %s\n",
av_make_error_string(error_str, AV_ERROR_MAX_STRING_SIZE, ret));
return false;
}
mFilesMuxing = true;
av_dict_free(&opt);
}
else {
LogWarning("Stream header not found, skip this frame.\n");
return false;
}
}
return true;
}
std::shared_ptr<FfmpegOutputStream> FfmpegMuxStreamV2::AddStream(AVFormatContext *outputFormat,
enum AVCodecID encodecId, enum AVCodecID decodecId)
{

View File

@ -52,6 +52,8 @@ public: // About combine file.
private:
StatusCode OpenMuxOutputFile(const std::string &fileName);
void GetAVPacketDataCallback(AVPacket *pkt);
void CalculatingDuration(const unsigned long long &pts_us);
bool MakeSureStreamHeanderOK(const void *data, const size_t &size);
private:
/**
@ -73,5 +75,9 @@ private:
std::shared_ptr<FfmpegOutputStream> mAudioStream;
AVDictionary *mOptions;
bool mFilesMuxing;
std::shared_ptr<OutputFileInfo> mOutputFileInfo;
unsigned long long mFileMuxingDuration_us;
unsigned long long mStartPts;
bool mMuxingFinised;
};
#endif

View File

@ -27,12 +27,13 @@ extern "C" {
#include <libavformat/avformat.h>
#include <libavutil/avutil.h>
#include <libavutil/error.h>
#include <libavutil/mathematics.h>
#ifdef __cplusplus
}
#endif
#include <chrono>
#include <math.h>
#include <memory>
#include <stdint.h>
#include <thread>
FfmpegReadFile::FfmpegReadFile()
: mReadVideoCallback(nullptr), mReadVideoCallbackContext(nullptr), mReadAudioCallback(nullptr),
@ -108,7 +109,7 @@ void FfmpegReadFile::ReadFileThread(AVFormatContext *pFormatCtx, int mediaStream
LogWarning("ReadVideoCallback is null.\n");
}
AVPacket packet;
unsigned long long playTime = 0;
// unsigned long long playTime = 0;
// av_new_packet(&packet, AV_INPUT_BUFFER_MIN_SIZE);
while (av_read_frame(pFormatCtx, &packet) >= 0) {
if (nullptr == mReadVideoCallback) {
@ -121,8 +122,8 @@ void FfmpegReadFile::ReadFileThread(AVFormatContext *pFormatCtx, int mediaStream
}
// Checks whether the packet belongs to a video stream.
if (packet.stream_index == mediaStreamIndex) {
playTime = (packet.duration * pFormatCtx->streams[mediaStreamIndex]->time_base.num) /
pFormatCtx->streams[mediaStreamIndex]->time_base.den;
// playTime = (packet.duration * pFormatCtx->streams[mediaStreamIndex]->time_base.num) /
// pFormatCtx->streams[mediaStreamIndex]->time_base.den;
// AVRational time_base = pFormatCtx->streams[mediaStreamIndex]->time_base;
// int64_t duration_ms = av_rescale_q(packet.duration, time_base, {1, AV_TIME_BASE}) * 1000;
// LogInfo("Frame data address: %p, length: %zu\n", packet.data, packet.size);
@ -134,13 +135,14 @@ void FfmpegReadFile::ReadFileThread(AVFormatContext *pFormatCtx, int mediaStream
// pFormatCtx->streams[mediaStreamIndex]->time_base.den,
// packet.duration);
// LogInfo("pFormatCtx->bit_rate = %ld\n", pFormatCtx->bit_rate);
playTime = (unsigned long long)(packet.duration * av_q2d(pFormatCtx->streams[mediaStreamIndex]->time_base) *
1000000);
// playTime = (unsigned long long)(packet.duration *
// av_q2d(pFormatCtx->streams[mediaStreamIndex]->time_base) *
// 1000000);
// LogInfo("playTime time ms:%llu\n", playTime);
int64_t duration_us = av_rescale_q(
packet.duration, pFormatCtx->streams[mediaStreamIndex]->time_base, (AVRational){1, 1000000});
unsigned long long playTime_us =
av_rescale_q(playTime, pFormatCtx->streams[mediaStreamIndex]->time_base, (AVRational){1, 1000000});
// unsigned long long playTime_us =
// av_rescale_q(playTime, pFormatCtx->streams[mediaStreamIndex]->time_base, (AVRational){1, 1000000});
// LogInfo("playTime_us time ms:%llu\n", playTime_us);
ReadFrame(&packet, duration_us);
// std::this_thread::sleep_for(std::chrono::milliseconds(playTime));