Backup:add time stamp param.

This commit is contained in:
Fancy code 2024-07-10 22:40:51 +08:00
parent 42e1e62756
commit e038e2a9ff
14 changed files with 98 additions and 65 deletions

View File

@ -26,14 +26,15 @@
#include <string.h> #include <string.h>
#include <thread> #include <thread>
#include <vector> #include <vector>
one_frame_stream::one_frame_stream() : mType(FrameType::END), mData(nullptr), mLength(0) constexpr int MEDIA_TASK_NOT_START = 0;
one_frame_stream::one_frame_stream() : mType(FrameType::END), mData(nullptr), mLength(0), mTimeStamp(0)
{ {
} }
one_frame_stream::~one_frame_stream() one_frame_stream::~one_frame_stream()
{ {
} }
MediaHandle::MediaHandle(const MediaChannel &mediaChannel, const std::shared_ptr<VCameraHal> &cameraHal) MediaHandle::MediaHandle(const MediaChannel &mediaChannel, const std::shared_ptr<VCameraHal> &cameraHal)
: mMediaChannel(mediaChannel), mCameraHal(cameraHal), mTaskRuning(false) : mMediaChannel(mediaChannel), mCameraHal(cameraHal), mTaskRuning(false), mFirstFrameTimeStamp(MEDIA_TASK_NOT_START)
{ {
} }
void MediaHandle::Init(void) void MediaHandle::Init(void)
@ -121,11 +122,11 @@ void MediaHandle::StartTaskTimer(void)
} }
void MediaHandle::TaskTimer(void) void MediaHandle::TaskTimer(void)
{ {
constexpr int TASK_TIMER = 1000 * 10; constexpr int TASK_TIME_OUT = 1000 * 20;
mTaskRuning = true; mTaskRuning = true;
while (mTaskRuning) { while (mTaskRuning) {
std::unique_lock<std::mutex> lock(mMutex); std::unique_lock<std::mutex> lock(mMutex);
mCvTaskHandle.wait_for(lock, std::chrono::milliseconds(TASK_TIMER), [&] { mCvTaskHandle.wait_for(lock, std::chrono::milliseconds(TASK_TIME_OUT), [&] {
return !mTaskRuning; return !mTaskRuning;
}); });
/** /**
@ -142,6 +143,7 @@ void MediaHandle::TaskTimer(void)
} }
mStreamHandle.reset(); mStreamHandle.reset();
mMutex.lock(); mMutex.lock();
mFirstFrameTimeStamp = MEDIA_TASK_NOT_START;
auto runingTask = mCurrentTask.lock(); auto runingTask = mCurrentTask.lock();
if (mCurrentTask.expired()) { if (mCurrentTask.expired()) {
LogWarning("mCurrentTask is expired.\n"); LogWarning("mCurrentTask is expired.\n");
@ -180,19 +182,19 @@ void MediaHandle::HandleListFrame(void)
int leftFrameCount = -1; int leftFrameCount = -1;
do { do {
OneFrameStream &frontFrame = mFrameList.front(); OneFrameStream &frontFrame = mFrameList.front();
OneFrameStream handleIt; // OneFrameStream handleIt;
handleIt.mData = frontFrame.mData; // handleIt.mData = frontFrame.mData;
handleIt.mLength = frontFrame.mLength; // handleIt.mLength = frontFrame.mLength;
handleIt.mType = frontFrame.mType; // handleIt.mType = frontFrame.mType;
if (FrameType::VIDEO == frontFrame.mType) {
mStreamHandle->GetVideoStream(frontFrame.mData, frontFrame.mLength, frontFrame.mTimeStamp);
}
if (FrameType::AUDIO == frontFrame.mType) {
mStreamHandle->GetAudioStream(frontFrame.mData, frontFrame.mLength, frontFrame.mTimeStamp);
}
free(frontFrame.mData);
frontFrame.mData = nullptr;
mFrameList.pop_front(); mFrameList.pop_front();
if (FrameType::VIDEO == handleIt.mType) {
mStreamHandle->GetVideoStream(handleIt.mData, handleIt.mLength, 0);
}
if (FrameType::AUDIO == handleIt.mType) {
mStreamHandle->GetAudioStream(handleIt.mData, handleIt.mLength, 0);
}
free(handleIt.mData);
handleIt.mData = nullptr;
leftFrameCount = mFrameList.size(); leftFrameCount = mFrameList.size();
} while (leftFrameCount > 0); } while (leftFrameCount > 0);
} }
@ -202,25 +204,45 @@ CameraTaskType MediaHandle::TaskTypeConvert(const MediaTaskType &type)
} }
void MediaHandle::GetVideoStreamCallback(const void *stream, const int &length, const unsigned long long &timeStamp) void MediaHandle::GetVideoStreamCallback(const void *stream, const int &length, const unsigned long long &timeStamp)
{ {
std::unique_lock<std::mutex> lock(mMutex); GetAVStream(FrameType::VIDEO, stream, length, timeStamp);
// mStreamHandle->GetVideoStream(stream, length, timeStamp); // return;
OneFrameStream addFrame; // std::unique_lock<std::mutex> lock(mMutex);
addFrame.mData = malloc(length); // // mStreamHandle->GetVideoStream(stream, length, timeStamp);
addFrame.mLength = length; // OneFrameStream addFrame;
memcpy(addFrame.mData, stream, length); // addFrame.mData = malloc(length);
addFrame.mType = FrameType::VIDEO; // addFrame.mLength = length;
mFrameList.push_back(addFrame); // memcpy(addFrame.mData, stream, length);
mCvFrameHandle.notify_one(); // addFrame.mType = FrameType::VIDEO;
// mFrameList.push_back(addFrame);
// mCvFrameHandle.notify_one();
} }
void MediaHandle::GetAudioStreamCallback(const void *stream, const int &length, const unsigned long long &timeStamp) void MediaHandle::GetAudioStreamCallback(const void *stream, const int &length, const unsigned long long &timeStamp)
{
GetAVStream(FrameType::AUDIO, stream, length, timeStamp);
// return;
// std::unique_lock<std::mutex> lock(mMutex);
// // mStreamHandle->GetAudioStream(stream, length, timeStamp);
// OneFrameStream addFrame;
// addFrame.mData = malloc(length);
// addFrame.mLength = length;
// memcpy(addFrame.mData, stream, length);
// addFrame.mType = FrameType::AUDIO;
// mFrameList.push_back(addFrame);
// mCvFrameHandle.notify_one();
}
void MediaHandle::GetAVStream(const FrameType &type, const void *stream, const int &length,
const unsigned long long &timeStamp)
{ {
std::unique_lock<std::mutex> lock(mMutex); std::unique_lock<std::mutex> lock(mMutex);
// mStreamHandle->GetAudioStream(stream, length, timeStamp); if (MEDIA_TASK_NOT_START == mFirstFrameTimeStamp) {
mFirstFrameTimeStamp = timeStamp;
}
OneFrameStream addFrame; OneFrameStream addFrame;
addFrame.mData = malloc(length); addFrame.mData = malloc(length);
addFrame.mLength = length; addFrame.mLength = length;
memcpy(addFrame.mData, stream, length); memcpy(addFrame.mData, stream, length);
addFrame.mType = FrameType::AUDIO; addFrame.mType = type;
addFrame.mTimeStamp = timeStamp - mFirstFrameTimeStamp;
mFrameList.push_back(addFrame); mFrameList.push_back(addFrame);
mCvFrameHandle.notify_one(); mCvFrameHandle.notify_one();
} }

View File

@ -37,6 +37,7 @@ typedef struct one_frame_stream
FrameType mType; FrameType mType;
void *mData; void *mData;
int mLength; int mLength;
unsigned long long mTimeStamp;
} OneFrameStream; } OneFrameStream;
class MediaHandle : public VMediaHandle, public std::enable_shared_from_this<MediaHandle> class MediaHandle : public VMediaHandle, public std::enable_shared_from_this<MediaHandle>
{ {
@ -60,6 +61,7 @@ private:
CameraTaskType TaskTypeConvert(const MediaTaskType &type); CameraTaskType TaskTypeConvert(const MediaTaskType &type);
void GetVideoStreamCallback(const void *stream, const int &length, const unsigned long long &timeStamp); void GetVideoStreamCallback(const void *stream, const int &length, const unsigned long long &timeStamp);
void GetAudioStreamCallback(const void *stream, const int &length, const unsigned long long &timeStamp); void GetAudioStreamCallback(const void *stream, const int &length, const unsigned long long &timeStamp);
void GetAVStream(const FrameType &type, const void *stream, const int &length, const unsigned long long &timeStamp);
private: private:
std::mutex mMutex; std::mutex mMutex;
@ -73,5 +75,6 @@ private:
std::thread mTaskTimerThread; std::thread mTaskTimerThread;
std::thread mFrameHandleThread; std::thread mFrameHandleThread;
std::list<OneFrameStream> mFrameList; std::list<OneFrameStream> mFrameList;
unsigned long long mFirstFrameTimeStamp;
}; };
#endif #endif

View File

@ -36,7 +36,8 @@ StatusCode RecordMp4::Init(void)
return CreateStatusCode(STATUS_CODE_NOT_OK); return CreateStatusCode(STATUS_CODE_NOT_OK);
} }
std::string videoPath = mRecordTask->GetTargetNameForSaving(); std::string videoPath = mRecordTask->GetTargetNameForSaving();
StatusCode code = IOpenOutputFile(mRecordMp4Object, videoPath.c_str()); OutputFileInfo fileInfo = {.mFileName = videoPath.c_str(), .mDuration_ms = 5000};
StatusCode code = IOpenOutputFile(mRecordMp4Object, &fileInfo);
if (!IsCodeOK(code)) { if (!IsCodeOK(code)) {
LogError("OpenOutputFile failed.\n"); LogError("OpenOutputFile failed.\n");
ICloseOutputFile(mRecordMp4Object); ICloseOutputFile(mRecordMp4Object);
@ -66,7 +67,7 @@ void RecordMp4::GetVideoStream(const void *stream, const unsigned int &length, c
{ {
std::lock_guard<std::mutex> locker(mMutex); std::lock_guard<std::mutex> locker(mMutex);
if (mRecordMp4Object) { if (mRecordMp4Object) {
StreamInfo info = {.mType = STREAM_TYPE_VIDEO_H264}; StreamInfo info = {.mType = STREAM_TYPE_VIDEO_H264, .mTimeStamp = timeStamp};
IGetStreamData(mRecordMp4Object, stream, length, info); IGetStreamData(mRecordMp4Object, stream, length, info);
} }
} }
@ -74,7 +75,7 @@ void RecordMp4::GetAudioStream(const void *stream, const unsigned int &length, c
{ {
std::lock_guard<std::mutex> locker(mMutex); std::lock_guard<std::mutex> locker(mMutex);
if (mRecordMp4Object) { if (mRecordMp4Object) {
StreamInfo info = {.mType = STREAM_TYPE_AUDIO_G711A}; StreamInfo info = {.mType = STREAM_TYPE_AUDIO_G711A, .mTimeStamp = timeStamp};
IGetStreamData(mRecordMp4Object, stream, length, info); IGetStreamData(mRecordMp4Object, stream, length, info);
} }
} }

View File

@ -35,7 +35,13 @@ enum StreamType
typedef struct stream_info typedef struct stream_info
{ {
const StreamType mType; const StreamType mType;
const unsigned long long mTimeStamp;
} StreamInfo; } StreamInfo;
typedef struct output_file_info
{
const char *mFileName;
const unsigned int mDuration_ms;
} OutputFileInfo;
typedef void (*ReadVideoFileCallback)(const void *, const unsigned int, void *); typedef void (*ReadVideoFileCallback)(const void *, const unsigned int, void *);
typedef void (*ReadAudioFileCallback)(const void *, const unsigned int, void *); typedef void (*ReadAudioFileCallback)(const void *, const unsigned int, void *);
void *ICreateMediaBase(const MediaHandleType type); void *ICreateMediaBase(const MediaHandleType type);
@ -47,7 +53,7 @@ StatusCode ISetReadAudioCallback(void *object, ReadVideoFileCallback callback, v
StatusCode IStartReadFile(void *object, const char *path); StatusCode IStartReadFile(void *object, const char *path);
StatusCode IStopReadFile(void *object); StatusCode IStopReadFile(void *object);
StatusCode IOpenOutputFile(void *object, const char *fileName); StatusCode IOpenOutputFile(void *object, const OutputFileInfo *info);
StatusCode ICloseOutputFile(void *object); StatusCode ICloseOutputFile(void *object);
void IGetStreamData(void *object, const void *data, const size_t size, const StreamInfo streamInfo); void IGetStreamData(void *object, const void *data, const size_t size, const StreamInfo streamInfo);

View File

@ -41,7 +41,7 @@ extern "C" {
#include <functional> #include <functional>
constexpr long SOURCE_AUDIO_SAMPEL_RATE = 8000; constexpr long SOURCE_AUDIO_SAMPEL_RATE = 8000;
#define STREAM_DURATION 10.0 #define STREAM_DURATION 10.0
#define STREAM_FRAME_RATE 1200000 /* 25 images/s */ #define STREAM_FRAME_RATE 25 /* 25 images/s */
#define STREAM_PIX_FMT AV_PIX_FMT_YUV420P /* default pix_fmt */ #define STREAM_PIX_FMT AV_PIX_FMT_YUV420P /* default pix_fmt */
FfmpegEncoder::FfmpegEncoder(const enum AVCodecID &codecId) FfmpegEncoder::FfmpegEncoder(const enum AVCodecID &codecId)
: mCodecId(codecId), mCodecCtx(nullptr), mCodec(nullptr), mFrame(nullptr), mTmpFrame(nullptr), mTmpPkt(nullptr), : mCodecId(codecId), mCodecCtx(nullptr), mCodec(nullptr), mFrame(nullptr), mTmpFrame(nullptr), mTmpPkt(nullptr),

View File

@ -59,7 +59,7 @@ FfmpegMuxStream::FfmpegMuxStream()
memset(&mVideoSt, 0, sizeof(mVideoSt)); memset(&mVideoSt, 0, sizeof(mVideoSt));
memset(&mAudioSt, 0, sizeof(mAudioSt)); memset(&mAudioSt, 0, sizeof(mAudioSt));
} }
StatusCode FfmpegMuxStream::OpenOutputFile(const std::string &fileName) StatusCode FfmpegMuxStream::OpenOutputFile(const OutputFileInfo &fileInfo)
{ {
if (!InitCodecVideo(AV_CODEC_ID_H264, &mCodecVideo, &mCodecVideoContext, &mFrameVideo)) { if (!InitCodecVideo(AV_CODEC_ID_H264, &mCodecVideo, &mCodecVideoContext, &mFrameVideo)) {
LogError("InitCodec failed\n"); LogError("InitCodec failed\n");
@ -72,10 +72,10 @@ StatusCode FfmpegMuxStream::OpenOutputFile(const std::string &fileName)
int ret; int ret;
AVFormatContext *oc = nullptr; AVFormatContext *oc = nullptr;
int have_video = 0, have_audio = 0; int have_video = 0, have_audio = 0;
int encode_video = 0, encode_audio = 0; // int encode_video = 0, encode_audio = 0;
const AVCodec *audio_codec, *video_codec; const AVCodec *audio_codec, *video_codec;
AVDictionary *opt = nullptr; AVDictionary *opt = nullptr;
avformat_alloc_output_context2(&oc, nullptr, "mp4", fileName.c_str()); avformat_alloc_output_context2(&oc, nullptr, "mp4", fileInfo.mFileName);
if (!oc) { if (!oc) {
LogError("Could not deduce output format from file extension: using MPEG.\n"); LogError("Could not deduce output format from file extension: using MPEG.\n");
return CreateStatusCode(STATUS_CODE_NOT_OK); return CreateStatusCode(STATUS_CODE_NOT_OK);
@ -88,13 +88,13 @@ StatusCode FfmpegMuxStream::OpenOutputFile(const std::string &fileName)
LogInfo("Add video stream\n"); LogInfo("Add video stream\n");
add_stream(&mVideoSt, oc, &video_codec, fmt->video_codec); add_stream(&mVideoSt, oc, &video_codec, fmt->video_codec);
have_video = 1; have_video = 1;
encode_video = 1; // encode_video = 1;
} }
if (fmt->audio_codec != AV_CODEC_ID_NONE) { if (fmt->audio_codec != AV_CODEC_ID_NONE) {
LogInfo("Add audio stream\n"); LogInfo("Add audio stream\n");
add_stream(&mAudioSt, oc, &audio_codec, fmt->audio_codec); add_stream(&mAudioSt, oc, &audio_codec, fmt->audio_codec);
have_audio = 1; have_audio = 1;
encode_audio = 1; // encode_audio = 1;
} /* Now that all the parameters are set, we can open the audio and } /* Now that all the parameters are set, we can open the audio and
* video codecs and allocate the necessary encode buffers. */ * video codecs and allocate the necessary encode buffers. */
if (have_video) { if (have_video) {
@ -104,13 +104,13 @@ StatusCode FfmpegMuxStream::OpenOutputFile(const std::string &fileName)
if (have_audio) { if (have_audio) {
open_audio(oc, audio_codec, &mAudioSt, opt); open_audio(oc, audio_codec, &mAudioSt, opt);
} }
av_dump_format(oc, 0, fileName.c_str(), 1); av_dump_format(oc, 0, fileInfo.mFileName, 1);
if (!(fmt->flags & AVFMT_NOFILE)) { if (!(fmt->flags & AVFMT_NOFILE)) {
ret = avio_open(&oc->pb, fileName.c_str(), AVIO_FLAG_WRITE); ret = avio_open(&oc->pb, fileInfo.mFileName, AVIO_FLAG_WRITE);
if (ret < 0) { if (ret < 0) {
char error_str[AV_ERROR_MAX_STRING_SIZE] = {0}; char error_str[AV_ERROR_MAX_STRING_SIZE] = {0};
LogError("Could not open '%s': %s\n", LogError("Could not open '%s': %s\n",
fileName.c_str(), fileInfo.mFileName,
av_make_error_string(error_str, AV_ERROR_MAX_STRING_SIZE, ret)); av_make_error_string(error_str, AV_ERROR_MAX_STRING_SIZE, ret));
return CreateStatusCode(STATUS_CODE_NOT_OK); return CreateStatusCode(STATUS_CODE_NOT_OK);
} }

View File

@ -22,7 +22,7 @@ public:
virtual ~FfmpegMuxStream() = default; virtual ~FfmpegMuxStream() = default;
public: // About combine file. public: // About combine file.
StatusCode OpenOutputFile(const std::string &fileName) override; StatusCode OpenOutputFile(const OutputFileInfo &fileInfo) override;
StatusCode CloseOutputFile(void) override; StatusCode CloseOutputFile(void) override;
void GetStreamData(const void *data, const size_t &size, const StreamInfo &streamInfo) override; void GetStreamData(const void *data, const size_t &size, const StreamInfo &streamInfo) override;

View File

@ -38,9 +38,9 @@ extern "C" {
FfmpegMuxStreamV2::FfmpegMuxStreamV2() : mOutputFormat(nullptr), mOptions(nullptr), mFilesMuxing(false) FfmpegMuxStreamV2::FfmpegMuxStreamV2() : mOutputFormat(nullptr), mOptions(nullptr), mFilesMuxing(false)
{ {
} }
StatusCode FfmpegMuxStreamV2::OpenOutputFile(const std::string &fileName) StatusCode FfmpegMuxStreamV2::OpenOutputFile(const OutputFileInfo &fileInfo)
{ {
return OpenMuxOutputFile(fileName); return OpenMuxOutputFile(fileInfo.mFileName);
} }
StatusCode FfmpegMuxStreamV2::CloseOutputFile(void) StatusCode FfmpegMuxStreamV2::CloseOutputFile(void)
{ {
@ -90,10 +90,10 @@ void FfmpegMuxStreamV2::GetStreamData(const void *data, const size_t &size, cons
} }
} }
if (streamInfo.mType == STREAM_TYPE_VIDEO_H264 && mVideoStream) { if (streamInfo.mType == STREAM_TYPE_VIDEO_H264 && mVideoStream) {
mVideoStream->WriteSourceData(data, size); mVideoStream->WriteSourceData(data, size, streamInfo.mTimeStamp);
} }
if (streamInfo.mType == STREAM_TYPE_AUDIO_G711A && mAudioStream) { if (streamInfo.mType == STREAM_TYPE_AUDIO_G711A && mAudioStream) {
mAudioStream->WriteSourceData(data, size); mAudioStream->WriteSourceData(data, size, streamInfo.mTimeStamp);
} }
} }
StatusCode inline FfmpegMuxStreamV2::OpenMuxOutputFile(const std::string &fileName) StatusCode inline FfmpegMuxStreamV2::OpenMuxOutputFile(const std::string &fileName)

View File

@ -45,7 +45,7 @@ public:
virtual ~FfmpegMuxStreamV2() = default; virtual ~FfmpegMuxStreamV2() = default;
public: // About combine file. public: // About combine file.
StatusCode OpenOutputFile(const std::string &fileName) override; StatusCode OpenOutputFile(const OutputFileInfo &fileInfo) override;
StatusCode CloseOutputFile(void) override; StatusCode CloseOutputFile(void) override;
void GetStreamData(const void *data, const size_t &size, const StreamInfo &streamInfo) override; void GetStreamData(const void *data, const size_t &size, const StreamInfo &streamInfo) override;

View File

@ -89,31 +89,32 @@ void FfmpegOutputStream::UnInit(void)
} }
av_packet_free(&mTmpPkt); av_packet_free(&mTmpPkt);
} }
void FfmpegOutputStream::WriteSourceData(const void *data, const size_t &size) void FfmpegOutputStream::WriteSourceData(const void *data, const size_t &size, const unsigned long long &pts)
{ {
if (mDecoder) { if (mDecoder) {
mDecoder->DecodeData(data, size, mDecodeCallback); mDecoder->DecodeData(data, size, mDecodeCallback);
return; return;
} }
AVPacket *mTmpPkt = av_packet_alloc(); AVPacket *tmpPkt = av_packet_alloc();
static unsigned long long u64Interval = 0; static unsigned long long u64Interval = 0;
AVRational in_timebase = (AVRational){1, 15}; // AVRational in_timebase = (AVRational){1, 15};
if (mEncodeCallback) { if (mEncodeCallback) {
mTmpPkt->data = (uint8_t *)data; tmpPkt->data = (uint8_t *)data;
mTmpPkt->size = size; tmpPkt->size = size;
mTmpPkt->stream_index = mStream->index; tmpPkt->stream_index = mStream->index;
mTmpPkt->duration = 0; constexpr int64_t ZERO_MEANS_UNKNOWN = 0;
// mTmpPkt->pts = u64Interval * 1000; // ת<><D7AA><EFBFBD><EFBFBD> us tmpPkt->duration = ZERO_MEANS_UNKNOWN;
mTmpPkt->pts = av_rescale_q(u64Interval, (AVRational){1, 15}, mStream->time_base); // tmpPkt->pts = u64Interval * 1000; // ת<><D7AA><EFBFBD><EFBFBD> us
tmpPkt->pts = av_rescale_q(u64Interval, (AVRational){1, 15}, mStream->time_base);
u64Interval++; u64Interval++;
mTmpPkt->dts = mTmpPkt->pts; tmpPkt->dts = tmpPkt->pts;
/* copy packet */ /* copy packet */
// av_packet_rescale_ts(mTmpPkt, in_timebase, mStream->time_base); // av_packet_rescale_ts(mTmpPkt, in_timebase, mStream->time_base);
mTmpPkt->pos = -1; tmpPkt->pos = -1;
mEncodeCallback(mTmpPkt); mEncodeCallback(tmpPkt);
} }
av_packet_unref(mTmpPkt); av_packet_unref(tmpPkt);
av_packet_free(&mTmpPkt); av_packet_free(&tmpPkt);
} }
void FfmpegOutputStream::SetWriteSourceDataCallback(std::function<void(AVPacket *pkt)> callback) void FfmpegOutputStream::SetWriteSourceDataCallback(std::function<void(AVPacket *pkt)> callback)
{ {

View File

@ -43,7 +43,7 @@ public:
virtual ~FfmpegOutputStream() = default; virtual ~FfmpegOutputStream() = default;
bool Init(AVFormatContext *outputFormat); bool Init(AVFormatContext *outputFormat);
void UnInit(void); void UnInit(void);
void WriteSourceData(const void *data, const size_t &size); void WriteSourceData(const void *data, const size_t &size, const unsigned long long &pts);
void SetWriteSourceDataCallback(std::function<void(AVPacket *pkt)> callback); void SetWriteSourceDataCallback(std::function<void(AVPacket *pkt)> callback);
bool CheckStreamHeader(const void *data, const size_t &size); bool CheckStreamHeader(const void *data, const size_t &size);

View File

@ -41,7 +41,7 @@ StatusCode IMediaBase::SetReadAudioCallback(ReadVideoFileCallback callback, void
LogWarning("STATUS_CODE_VIRTUAL_FUNCTION\n"); LogWarning("STATUS_CODE_VIRTUAL_FUNCTION\n");
return CreateStatusCode(STATUS_CODE_VIRTUAL_FUNCTION); return CreateStatusCode(STATUS_CODE_VIRTUAL_FUNCTION);
} }
StatusCode IMediaBase::OpenOutputFile(const std::string &fileName) StatusCode IMediaBase::OpenOutputFile(const OutputFileInfo &fileInfo)
{ {
LogWarning("STATUS_CODE_VIRTUAL_FUNCTION\n"); LogWarning("STATUS_CODE_VIRTUAL_FUNCTION\n");
return CreateStatusCode(STATUS_CODE_VIRTUAL_FUNCTION); return CreateStatusCode(STATUS_CODE_VIRTUAL_FUNCTION);

View File

@ -29,7 +29,7 @@ public: // About read media file.
virtual StatusCode SetReadAudioCallback(ReadVideoFileCallback callback, void *context); virtual StatusCode SetReadAudioCallback(ReadVideoFileCallback callback, void *context);
public: // About combine file. public: // About combine file.
virtual StatusCode OpenOutputFile(const std::string &fileName); virtual StatusCode OpenOutputFile(const OutputFileInfo &fileInfo);
virtual StatusCode CloseOutputFile(void); virtual StatusCode CloseOutputFile(void);
virtual void GetStreamData(const void *data, const size_t &size, const StreamInfo &streamInfo); virtual void GetStreamData(const void *data, const size_t &size, const StreamInfo &streamInfo);
}; };

View File

@ -64,10 +64,10 @@ StatusCode IStopReadFile(void *object)
} }
return CreateStatusCode(STATUS_CODE_OK); return CreateStatusCode(STATUS_CODE_OK);
} }
StatusCode IOpenOutputFile(void *object, const char *fileName) StatusCode IOpenOutputFile(void *object, const OutputFileInfo *info)
{ {
if (ObjectCheck(object) == true) { if (ObjectCheck(object) == true) {
return (*(std::shared_ptr<IMediaBase> *)object)->OpenOutputFile(fileName); return (*(std::shared_ptr<IMediaBase> *)object)->OpenOutputFile(*info);
} }
return CreateStatusCode(STATUS_CODE_OK); return CreateStatusCode(STATUS_CODE_OK);
} }