Backup:add time stamp param.

This commit is contained in:
Fancy code 2024-07-10 22:40:51 +08:00
parent 42e1e62756
commit e038e2a9ff
14 changed files with 98 additions and 65 deletions

View File

@ -26,14 +26,15 @@
#include <string.h>
#include <thread>
#include <vector>
one_frame_stream::one_frame_stream() : mType(FrameType::END), mData(nullptr), mLength(0)
constexpr int MEDIA_TASK_NOT_START = 0;
one_frame_stream::one_frame_stream() : mType(FrameType::END), mData(nullptr), mLength(0), mTimeStamp(0)
{
}
one_frame_stream::~one_frame_stream()
{
}
MediaHandle::MediaHandle(const MediaChannel &mediaChannel, const std::shared_ptr<VCameraHal> &cameraHal)
: mMediaChannel(mediaChannel), mCameraHal(cameraHal), mTaskRuning(false)
: mMediaChannel(mediaChannel), mCameraHal(cameraHal), mTaskRuning(false), mFirstFrameTimeStamp(MEDIA_TASK_NOT_START)
{
}
void MediaHandle::Init(void)
@ -121,11 +122,11 @@ void MediaHandle::StartTaskTimer(void)
}
void MediaHandle::TaskTimer(void)
{
constexpr int TASK_TIMER = 1000 * 10;
constexpr int TASK_TIME_OUT = 1000 * 20;
mTaskRuning = true;
while (mTaskRuning) {
std::unique_lock<std::mutex> lock(mMutex);
mCvTaskHandle.wait_for(lock, std::chrono::milliseconds(TASK_TIMER), [&] {
mCvTaskHandle.wait_for(lock, std::chrono::milliseconds(TASK_TIME_OUT), [&] {
return !mTaskRuning;
});
/**
@ -142,6 +143,7 @@ void MediaHandle::TaskTimer(void)
}
mStreamHandle.reset();
mMutex.lock();
mFirstFrameTimeStamp = MEDIA_TASK_NOT_START;
auto runingTask = mCurrentTask.lock();
if (mCurrentTask.expired()) {
LogWarning("mCurrentTask is expired.\n");
@ -180,19 +182,19 @@ void MediaHandle::HandleListFrame(void)
int leftFrameCount = -1;
do {
OneFrameStream &frontFrame = mFrameList.front();
OneFrameStream handleIt;
handleIt.mData = frontFrame.mData;
handleIt.mLength = frontFrame.mLength;
handleIt.mType = frontFrame.mType;
// OneFrameStream handleIt;
// handleIt.mData = frontFrame.mData;
// handleIt.mLength = frontFrame.mLength;
// handleIt.mType = frontFrame.mType;
if (FrameType::VIDEO == frontFrame.mType) {
mStreamHandle->GetVideoStream(frontFrame.mData, frontFrame.mLength, frontFrame.mTimeStamp);
}
if (FrameType::AUDIO == frontFrame.mType) {
mStreamHandle->GetAudioStream(frontFrame.mData, frontFrame.mLength, frontFrame.mTimeStamp);
}
free(frontFrame.mData);
frontFrame.mData = nullptr;
mFrameList.pop_front();
if (FrameType::VIDEO == handleIt.mType) {
mStreamHandle->GetVideoStream(handleIt.mData, handleIt.mLength, 0);
}
if (FrameType::AUDIO == handleIt.mType) {
mStreamHandle->GetAudioStream(handleIt.mData, handleIt.mLength, 0);
}
free(handleIt.mData);
handleIt.mData = nullptr;
leftFrameCount = mFrameList.size();
} while (leftFrameCount > 0);
}
@ -202,25 +204,45 @@ CameraTaskType MediaHandle::TaskTypeConvert(const MediaTaskType &type)
}
void MediaHandle::GetVideoStreamCallback(const void *stream, const int &length, const unsigned long long &timeStamp)
{
std::unique_lock<std::mutex> lock(mMutex);
// mStreamHandle->GetVideoStream(stream, length, timeStamp);
OneFrameStream addFrame;
addFrame.mData = malloc(length);
addFrame.mLength = length;
memcpy(addFrame.mData, stream, length);
addFrame.mType = FrameType::VIDEO;
mFrameList.push_back(addFrame);
mCvFrameHandle.notify_one();
GetAVStream(FrameType::VIDEO, stream, length, timeStamp);
// return;
// std::unique_lock<std::mutex> lock(mMutex);
// // mStreamHandle->GetVideoStream(stream, length, timeStamp);
// OneFrameStream addFrame;
// addFrame.mData = malloc(length);
// addFrame.mLength = length;
// memcpy(addFrame.mData, stream, length);
// addFrame.mType = FrameType::VIDEO;
// mFrameList.push_back(addFrame);
// mCvFrameHandle.notify_one();
}
void MediaHandle::GetAudioStreamCallback(const void *stream, const int &length, const unsigned long long &timeStamp)
{
GetAVStream(FrameType::AUDIO, stream, length, timeStamp);
// return;
// std::unique_lock<std::mutex> lock(mMutex);
// // mStreamHandle->GetAudioStream(stream, length, timeStamp);
// OneFrameStream addFrame;
// addFrame.mData = malloc(length);
// addFrame.mLength = length;
// memcpy(addFrame.mData, stream, length);
// addFrame.mType = FrameType::AUDIO;
// mFrameList.push_back(addFrame);
// mCvFrameHandle.notify_one();
}
void MediaHandle::GetAVStream(const FrameType &type, const void *stream, const int &length,
const unsigned long long &timeStamp)
{
std::unique_lock<std::mutex> lock(mMutex);
// mStreamHandle->GetAudioStream(stream, length, timeStamp);
if (MEDIA_TASK_NOT_START == mFirstFrameTimeStamp) {
mFirstFrameTimeStamp = timeStamp;
}
OneFrameStream addFrame;
addFrame.mData = malloc(length);
addFrame.mLength = length;
memcpy(addFrame.mData, stream, length);
addFrame.mType = FrameType::AUDIO;
addFrame.mType = type;
addFrame.mTimeStamp = timeStamp - mFirstFrameTimeStamp;
mFrameList.push_back(addFrame);
mCvFrameHandle.notify_one();
}

View File

@ -37,6 +37,7 @@ typedef struct one_frame_stream
FrameType mType;
void *mData;
int mLength;
unsigned long long mTimeStamp;
} OneFrameStream;
class MediaHandle : public VMediaHandle, public std::enable_shared_from_this<MediaHandle>
{
@ -60,6 +61,7 @@ private:
CameraTaskType TaskTypeConvert(const MediaTaskType &type);
void GetVideoStreamCallback(const void *stream, const int &length, const unsigned long long &timeStamp);
void GetAudioStreamCallback(const void *stream, const int &length, const unsigned long long &timeStamp);
void GetAVStream(const FrameType &type, const void *stream, const int &length, const unsigned long long &timeStamp);
private:
std::mutex mMutex;
@ -73,5 +75,6 @@ private:
std::thread mTaskTimerThread;
std::thread mFrameHandleThread;
std::list<OneFrameStream> mFrameList;
unsigned long long mFirstFrameTimeStamp;
};
#endif

View File

@ -36,7 +36,8 @@ StatusCode RecordMp4::Init(void)
return CreateStatusCode(STATUS_CODE_NOT_OK);
}
std::string videoPath = mRecordTask->GetTargetNameForSaving();
StatusCode code = IOpenOutputFile(mRecordMp4Object, videoPath.c_str());
OutputFileInfo fileInfo = {.mFileName = videoPath.c_str(), .mDuration_ms = 5000};
StatusCode code = IOpenOutputFile(mRecordMp4Object, &fileInfo);
if (!IsCodeOK(code)) {
LogError("OpenOutputFile failed.\n");
ICloseOutputFile(mRecordMp4Object);
@ -66,7 +67,7 @@ void RecordMp4::GetVideoStream(const void *stream, const unsigned int &length, c
{
std::lock_guard<std::mutex> locker(mMutex);
if (mRecordMp4Object) {
StreamInfo info = {.mType = STREAM_TYPE_VIDEO_H264};
StreamInfo info = {.mType = STREAM_TYPE_VIDEO_H264, .mTimeStamp = timeStamp};
IGetStreamData(mRecordMp4Object, stream, length, info);
}
}
@ -74,7 +75,7 @@ void RecordMp4::GetAudioStream(const void *stream, const unsigned int &length, c
{
std::lock_guard<std::mutex> locker(mMutex);
if (mRecordMp4Object) {
StreamInfo info = {.mType = STREAM_TYPE_AUDIO_G711A};
StreamInfo info = {.mType = STREAM_TYPE_AUDIO_G711A, .mTimeStamp = timeStamp};
IGetStreamData(mRecordMp4Object, stream, length, info);
}
}

View File

@ -35,7 +35,13 @@ enum StreamType
typedef struct stream_info
{
const StreamType mType;
const unsigned long long mTimeStamp;
} StreamInfo;
typedef struct output_file_info
{
const char *mFileName;
const unsigned int mDuration_ms;
} OutputFileInfo;
typedef void (*ReadVideoFileCallback)(const void *, const unsigned int, void *);
typedef void (*ReadAudioFileCallback)(const void *, const unsigned int, void *);
void *ICreateMediaBase(const MediaHandleType type);
@ -47,7 +53,7 @@ StatusCode ISetReadAudioCallback(void *object, ReadVideoFileCallback callback, v
StatusCode IStartReadFile(void *object, const char *path);
StatusCode IStopReadFile(void *object);
StatusCode IOpenOutputFile(void *object, const char *fileName);
StatusCode IOpenOutputFile(void *object, const OutputFileInfo *info);
StatusCode ICloseOutputFile(void *object);
void IGetStreamData(void *object, const void *data, const size_t size, const StreamInfo streamInfo);

View File

@ -41,7 +41,7 @@ extern "C" {
#include <functional>
constexpr long SOURCE_AUDIO_SAMPEL_RATE = 8000;
#define STREAM_DURATION 10.0
#define STREAM_FRAME_RATE 1200000 /* 25 images/s */
#define STREAM_FRAME_RATE 25 /* 25 images/s */
#define STREAM_PIX_FMT AV_PIX_FMT_YUV420P /* default pix_fmt */
FfmpegEncoder::FfmpegEncoder(const enum AVCodecID &codecId)
: mCodecId(codecId), mCodecCtx(nullptr), mCodec(nullptr), mFrame(nullptr), mTmpFrame(nullptr), mTmpPkt(nullptr),

View File

@ -59,7 +59,7 @@ FfmpegMuxStream::FfmpegMuxStream()
memset(&mVideoSt, 0, sizeof(mVideoSt));
memset(&mAudioSt, 0, sizeof(mAudioSt));
}
StatusCode FfmpegMuxStream::OpenOutputFile(const std::string &fileName)
StatusCode FfmpegMuxStream::OpenOutputFile(const OutputFileInfo &fileInfo)
{
if (!InitCodecVideo(AV_CODEC_ID_H264, &mCodecVideo, &mCodecVideoContext, &mFrameVideo)) {
LogError("InitCodec failed\n");
@ -72,10 +72,10 @@ StatusCode FfmpegMuxStream::OpenOutputFile(const std::string &fileName)
int ret;
AVFormatContext *oc = nullptr;
int have_video = 0, have_audio = 0;
int encode_video = 0, encode_audio = 0;
// int encode_video = 0, encode_audio = 0;
const AVCodec *audio_codec, *video_codec;
AVDictionary *opt = nullptr;
avformat_alloc_output_context2(&oc, nullptr, "mp4", fileName.c_str());
avformat_alloc_output_context2(&oc, nullptr, "mp4", fileInfo.mFileName);
if (!oc) {
LogError("Could not deduce output format from file extension: using MPEG.\n");
return CreateStatusCode(STATUS_CODE_NOT_OK);
@ -88,13 +88,13 @@ StatusCode FfmpegMuxStream::OpenOutputFile(const std::string &fileName)
LogInfo("Add video stream\n");
add_stream(&mVideoSt, oc, &video_codec, fmt->video_codec);
have_video = 1;
encode_video = 1;
// encode_video = 1;
}
if (fmt->audio_codec != AV_CODEC_ID_NONE) {
LogInfo("Add audio stream\n");
add_stream(&mAudioSt, oc, &audio_codec, fmt->audio_codec);
have_audio = 1;
encode_audio = 1;
// encode_audio = 1;
} /* Now that all the parameters are set, we can open the audio and
* video codecs and allocate the necessary encode buffers. */
if (have_video) {
@ -104,13 +104,13 @@ StatusCode FfmpegMuxStream::OpenOutputFile(const std::string &fileName)
if (have_audio) {
open_audio(oc, audio_codec, &mAudioSt, opt);
}
av_dump_format(oc, 0, fileName.c_str(), 1);
av_dump_format(oc, 0, fileInfo.mFileName, 1);
if (!(fmt->flags & AVFMT_NOFILE)) {
ret = avio_open(&oc->pb, fileName.c_str(), AVIO_FLAG_WRITE);
ret = avio_open(&oc->pb, fileInfo.mFileName, AVIO_FLAG_WRITE);
if (ret < 0) {
char error_str[AV_ERROR_MAX_STRING_SIZE] = {0};
LogError("Could not open '%s': %s\n",
fileName.c_str(),
fileInfo.mFileName,
av_make_error_string(error_str, AV_ERROR_MAX_STRING_SIZE, ret));
return CreateStatusCode(STATUS_CODE_NOT_OK);
}

View File

@ -22,7 +22,7 @@ public:
virtual ~FfmpegMuxStream() = default;
public: // About combine file.
StatusCode OpenOutputFile(const std::string &fileName) override;
StatusCode OpenOutputFile(const OutputFileInfo &fileInfo) override;
StatusCode CloseOutputFile(void) override;
void GetStreamData(const void *data, const size_t &size, const StreamInfo &streamInfo) override;

View File

@ -38,9 +38,9 @@ extern "C" {
FfmpegMuxStreamV2::FfmpegMuxStreamV2() : mOutputFormat(nullptr), mOptions(nullptr), mFilesMuxing(false)
{
}
StatusCode FfmpegMuxStreamV2::OpenOutputFile(const std::string &fileName)
StatusCode FfmpegMuxStreamV2::OpenOutputFile(const OutputFileInfo &fileInfo)
{
return OpenMuxOutputFile(fileName);
return OpenMuxOutputFile(fileInfo.mFileName);
}
StatusCode FfmpegMuxStreamV2::CloseOutputFile(void)
{
@ -90,10 +90,10 @@ void FfmpegMuxStreamV2::GetStreamData(const void *data, const size_t &size, cons
}
}
if (streamInfo.mType == STREAM_TYPE_VIDEO_H264 && mVideoStream) {
mVideoStream->WriteSourceData(data, size);
mVideoStream->WriteSourceData(data, size, streamInfo.mTimeStamp);
}
if (streamInfo.mType == STREAM_TYPE_AUDIO_G711A && mAudioStream) {
mAudioStream->WriteSourceData(data, size);
mAudioStream->WriteSourceData(data, size, streamInfo.mTimeStamp);
}
}
StatusCode inline FfmpegMuxStreamV2::OpenMuxOutputFile(const std::string &fileName)

View File

@ -45,7 +45,7 @@ public:
virtual ~FfmpegMuxStreamV2() = default;
public: // About combine file.
StatusCode OpenOutputFile(const std::string &fileName) override;
StatusCode OpenOutputFile(const OutputFileInfo &fileInfo) override;
StatusCode CloseOutputFile(void) override;
void GetStreamData(const void *data, const size_t &size, const StreamInfo &streamInfo) override;

View File

@ -89,31 +89,32 @@ void FfmpegOutputStream::UnInit(void)
}
av_packet_free(&mTmpPkt);
}
void FfmpegOutputStream::WriteSourceData(const void *data, const size_t &size)
void FfmpegOutputStream::WriteSourceData(const void *data, const size_t &size, const unsigned long long &pts)
{
if (mDecoder) {
mDecoder->DecodeData(data, size, mDecodeCallback);
return;
}
AVPacket *mTmpPkt = av_packet_alloc();
AVPacket *tmpPkt = av_packet_alloc();
static unsigned long long u64Interval = 0;
AVRational in_timebase = (AVRational){1, 15};
// AVRational in_timebase = (AVRational){1, 15};
if (mEncodeCallback) {
mTmpPkt->data = (uint8_t *)data;
mTmpPkt->size = size;
mTmpPkt->stream_index = mStream->index;
mTmpPkt->duration = 0;
// mTmpPkt->pts = u64Interval * 1000; // ת<><D7AA><EFBFBD><EFBFBD> us
mTmpPkt->pts = av_rescale_q(u64Interval, (AVRational){1, 15}, mStream->time_base);
tmpPkt->data = (uint8_t *)data;
tmpPkt->size = size;
tmpPkt->stream_index = mStream->index;
constexpr int64_t ZERO_MEANS_UNKNOWN = 0;
tmpPkt->duration = ZERO_MEANS_UNKNOWN;
// tmpPkt->pts = u64Interval * 1000; // ת<><D7AA><EFBFBD><EFBFBD> us
tmpPkt->pts = av_rescale_q(u64Interval, (AVRational){1, 15}, mStream->time_base);
u64Interval++;
mTmpPkt->dts = mTmpPkt->pts;
tmpPkt->dts = tmpPkt->pts;
/* copy packet */
// av_packet_rescale_ts(mTmpPkt, in_timebase, mStream->time_base);
mTmpPkt->pos = -1;
mEncodeCallback(mTmpPkt);
tmpPkt->pos = -1;
mEncodeCallback(tmpPkt);
}
av_packet_unref(mTmpPkt);
av_packet_free(&mTmpPkt);
av_packet_unref(tmpPkt);
av_packet_free(&tmpPkt);
}
void FfmpegOutputStream::SetWriteSourceDataCallback(std::function<void(AVPacket *pkt)> callback)
{

View File

@ -43,7 +43,7 @@ public:
virtual ~FfmpegOutputStream() = default;
bool Init(AVFormatContext *outputFormat);
void UnInit(void);
void WriteSourceData(const void *data, const size_t &size);
void WriteSourceData(const void *data, const size_t &size, const unsigned long long &pts);
void SetWriteSourceDataCallback(std::function<void(AVPacket *pkt)> callback);
bool CheckStreamHeader(const void *data, const size_t &size);

View File

@ -41,7 +41,7 @@ StatusCode IMediaBase::SetReadAudioCallback(ReadVideoFileCallback callback, void
LogWarning("STATUS_CODE_VIRTUAL_FUNCTION\n");
return CreateStatusCode(STATUS_CODE_VIRTUAL_FUNCTION);
}
StatusCode IMediaBase::OpenOutputFile(const std::string &fileName)
StatusCode IMediaBase::OpenOutputFile(const OutputFileInfo &fileInfo)
{
LogWarning("STATUS_CODE_VIRTUAL_FUNCTION\n");
return CreateStatusCode(STATUS_CODE_VIRTUAL_FUNCTION);

View File

@ -29,7 +29,7 @@ public: // About read media file.
virtual StatusCode SetReadAudioCallback(ReadVideoFileCallback callback, void *context);
public: // About combine file.
virtual StatusCode OpenOutputFile(const std::string &fileName);
virtual StatusCode OpenOutputFile(const OutputFileInfo &fileInfo);
virtual StatusCode CloseOutputFile(void);
virtual void GetStreamData(const void *data, const size_t &size, const StreamInfo &streamInfo);
};

View File

@ -64,10 +64,10 @@ StatusCode IStopReadFile(void *object)
}
return CreateStatusCode(STATUS_CODE_OK);
}
StatusCode IOpenOutputFile(void *object, const char *fileName)
StatusCode IOpenOutputFile(void *object, const OutputFileInfo *info)
{
if (ObjectCheck(object) == true) {
return (*(std::shared_ptr<IMediaBase> *)object)->OpenOutputFile(fileName);
return (*(std::shared_ptr<IMediaBase> *)object)->OpenOutputFile(*info);
}
return CreateStatusCode(STATUS_CODE_OK);
}