Improve:MediaManager.
This commit is contained in:
parent
0ceb3acc2b
commit
e7ce303ec3
|
@ -19,11 +19,19 @@
|
|||
#include "RecordMp4.h"
|
||||
#include "StatusCode.h"
|
||||
#include <chrono>
|
||||
#include <cstdlib>
|
||||
#include <functional>
|
||||
#include <memory>
|
||||
#include <mutex>
|
||||
#include <string.h>
|
||||
#include <thread>
|
||||
#include <vector>
|
||||
one_frame_stream::one_frame_stream() : mType(FrameType::END), mData(nullptr), mLength(0)
|
||||
{
|
||||
}
|
||||
one_frame_stream::~one_frame_stream()
|
||||
{
|
||||
}
|
||||
MediaHandle::MediaHandle(const MediaChannel &mediaChannel, const std::shared_ptr<VCameraHal> &cameraHal)
|
||||
: mMediaChannel(mediaChannel), mCameraHal(cameraHal), mTaskRuning(false)
|
||||
{
|
||||
|
@ -42,10 +50,14 @@ void MediaHandle::Init(void)
|
|||
void MediaHandle::UnInit(void)
|
||||
{
|
||||
mTaskRuning = false;
|
||||
mCv.notify_one();
|
||||
mCvTaskHandle.notify_one();
|
||||
mCvFrameHandle.notify_one();
|
||||
if (mTaskTimerThread.joinable()) {
|
||||
mTaskTimerThread.join();
|
||||
}
|
||||
if (mFrameHandleThread.joinable()) {
|
||||
mFrameHandleThread.join();
|
||||
}
|
||||
if (mCameraHal) {
|
||||
/**
|
||||
* @brief Before releasing the class instance, it is necessary to call the UnInit function to ensure that the
|
||||
|
@ -83,6 +95,7 @@ StatusCode MediaHandle::ExecuteTask(std::shared_ptr<VMediaTask> &task)
|
|||
if (IsCodeOK(code)) {
|
||||
mCurrentTask = task;
|
||||
StartTaskTimer();
|
||||
StartFrameHandle();
|
||||
}
|
||||
else {
|
||||
LogError("Execute task failed.\n");
|
||||
|
@ -112,7 +125,7 @@ void MediaHandle::TaskTimer(void)
|
|||
mTaskRuning = true;
|
||||
while (mTaskRuning) {
|
||||
std::unique_lock<std::mutex> lock(mMutex);
|
||||
mCv.wait_for(lock, std::chrono::milliseconds(TASK_TIMER), [&] {
|
||||
mCvTaskHandle.wait_for(lock, std::chrono::milliseconds(TASK_TIMER), [&] {
|
||||
return !mTaskRuning;
|
||||
});
|
||||
/**
|
||||
|
@ -139,15 +152,75 @@ void MediaHandle::TaskTimer(void)
|
|||
mCurrentTask.reset();
|
||||
mMutex.unlock();
|
||||
}
|
||||
void MediaHandle::StartFrameHandle(void)
|
||||
{
|
||||
auto taskTimerThread = [=](std::shared_ptr<MediaHandle> media) {
|
||||
LogInfo("StartFrameHandle start.\n");
|
||||
media->FrameHandle();
|
||||
};
|
||||
std::shared_ptr<MediaHandle> media = shared_from_this();
|
||||
mFrameHandleThread = std::thread(taskTimerThread, media);
|
||||
}
|
||||
void MediaHandle::FrameHandle(void)
|
||||
{
|
||||
constexpr int TASK_TIMER = 1000 * 1000;
|
||||
mTaskRuning = true;
|
||||
while (mTaskRuning) {
|
||||
std::unique_lock<std::mutex> lock(mMutex);
|
||||
mCvFrameHandle.wait_for(lock, std::chrono::milliseconds(TASK_TIMER), [&] {
|
||||
return !mTaskRuning || !mFrameList.empty();
|
||||
});
|
||||
if (mFrameList.size() > 0) {
|
||||
HandleListFrame();
|
||||
}
|
||||
}
|
||||
}
|
||||
void MediaHandle::HandleListFrame(void)
|
||||
{
|
||||
int leftFrameCount = -1;
|
||||
do {
|
||||
OneFrameStream &frontFrame = mFrameList.front();
|
||||
OneFrameStream handleIt;
|
||||
handleIt.mData = frontFrame.mData;
|
||||
handleIt.mLength = frontFrame.mLength;
|
||||
handleIt.mType = frontFrame.mType;
|
||||
mFrameList.pop_front();
|
||||
if (FrameType::VIDEO == handleIt.mType) {
|
||||
mStreamHandle->GetVideoStream(handleIt.mData, handleIt.mLength, 0);
|
||||
}
|
||||
if (FrameType::AUDIO == handleIt.mType) {
|
||||
mStreamHandle->GetAudioStream(handleIt.mData, handleIt.mLength, 0);
|
||||
}
|
||||
free(handleIt.mData);
|
||||
handleIt.mData = nullptr;
|
||||
leftFrameCount = mFrameList.size();
|
||||
} while (leftFrameCount > 0);
|
||||
}
|
||||
CameraTaskType MediaHandle::TaskTypeConvert(const MediaTaskType &type)
|
||||
{
|
||||
return CameraTaskType::END;
|
||||
}
|
||||
void MediaHandle::GetVideoStreamCallback(const void *stream, const int &length, const unsigned long long &timeStamp)
|
||||
{
|
||||
mStreamHandle->GetVideoStream(stream, length, timeStamp);
|
||||
std::unique_lock<std::mutex> lock(mMutex);
|
||||
// mStreamHandle->GetVideoStream(stream, length, timeStamp);
|
||||
OneFrameStream addFrame;
|
||||
addFrame.mData = malloc(length);
|
||||
addFrame.mLength = length;
|
||||
memcpy(addFrame.mData, stream, length);
|
||||
addFrame.mType = FrameType::VIDEO;
|
||||
mFrameList.push_back(addFrame);
|
||||
mCvFrameHandle.notify_one();
|
||||
}
|
||||
void MediaHandle::GetAudioStreamCallback(const void *stream, const int &length, const unsigned long long &timeStamp)
|
||||
{
|
||||
mStreamHandle->GetAudioStream(stream, length, timeStamp);
|
||||
std::unique_lock<std::mutex> lock(mMutex);
|
||||
// mStreamHandle->GetAudioStream(stream, length, timeStamp);
|
||||
OneFrameStream addFrame;
|
||||
addFrame.mData = malloc(length);
|
||||
addFrame.mLength = length;
|
||||
memcpy(addFrame.mData, stream, length);
|
||||
addFrame.mType = FrameType::AUDIO;
|
||||
mFrameList.push_back(addFrame);
|
||||
mCvFrameHandle.notify_one();
|
||||
}
|
|
@ -18,11 +18,26 @@
|
|||
#include "IMediaManager.h"
|
||||
#include "VStreamHandle.h"
|
||||
#include <condition_variable>
|
||||
#include <list>
|
||||
#include <mutex>
|
||||
#include <thread>
|
||||
using std::placeholders::_1;
|
||||
using std::placeholders::_2;
|
||||
using std::placeholders::_3;
|
||||
enum class FrameType
|
||||
{
|
||||
VIDEO,
|
||||
AUDIO,
|
||||
END
|
||||
};
|
||||
typedef struct one_frame_stream
|
||||
{
|
||||
one_frame_stream();
|
||||
~one_frame_stream();
|
||||
FrameType mType;
|
||||
void *mData;
|
||||
int mLength;
|
||||
} OneFrameStream;
|
||||
class MediaHandle : public VMediaHandle, public std::enable_shared_from_this<MediaHandle>
|
||||
{
|
||||
public:
|
||||
|
@ -39,18 +54,24 @@ protected:
|
|||
private:
|
||||
void StartTaskTimer(void);
|
||||
void TaskTimer(void);
|
||||
void StartFrameHandle(void);
|
||||
void FrameHandle(void);
|
||||
void HandleListFrame(void);
|
||||
CameraTaskType TaskTypeConvert(const MediaTaskType &type);
|
||||
void GetVideoStreamCallback(const void *stream, const int &length, const unsigned long long &timeStamp);
|
||||
void GetAudioStreamCallback(const void *stream, const int &length, const unsigned long long &timeStamp);
|
||||
|
||||
private:
|
||||
std::mutex mMutex;
|
||||
std::condition_variable mCv;
|
||||
std::condition_variable mCvTaskHandle;
|
||||
std::condition_variable mCvFrameHandle;
|
||||
const MediaChannel &mMediaChannel;
|
||||
std::shared_ptr<VCameraHal> mCameraHal;
|
||||
std::weak_ptr<VMediaTask> mCurrentTask;
|
||||
std::shared_ptr<VStreamHandle> mStreamHandle;
|
||||
bool mTaskRuning;
|
||||
std::thread mTaskTimerThread;
|
||||
std::thread mFrameHandleThread;
|
||||
std::list<OneFrameStream> mFrameList;
|
||||
};
|
||||
#endif
|
|
@ -21,7 +21,9 @@
|
|||
#include <cstdlib>
|
||||
#include <cstring>
|
||||
#include <memory>
|
||||
#include <mutex>
|
||||
#include <string>
|
||||
#include <utility>
|
||||
#include <vector>
|
||||
RecordMp4::RecordMp4(std::shared_ptr<VMediaTask> &recordTask) : mRecordMp4Object(nullptr), mRecordTask(recordTask)
|
||||
{
|
||||
|
|
|
@ -14,6 +14,7 @@
|
|||
*/
|
||||
#include "VStreamHandle.h"
|
||||
#include "ILog.h"
|
||||
#include "IMediaManager.h"
|
||||
#include "StatusCode.h"
|
||||
#include <vector>
|
||||
StatusCode VStreamHandle::Init(void)
|
||||
|
|
Binary file not shown.
|
@ -132,10 +132,6 @@ void FfmpegDecoder::DecodeData(const void *data, const size_t &size, std::functi
|
|||
if (callback) {
|
||||
callback(mFrame);
|
||||
}
|
||||
// mFrame->pts = mAudioSt.next_pts;
|
||||
// mAudioSt.next_pts += mFrame->nb_samples;
|
||||
// ConvertAudioFrame(mFrame, mAudioSt.enc, &mAudioSt);
|
||||
// write_frame(mOc, mAudioSt.enc, mAudioSt.st, mAudioSt.frame, mAudioSt.tmp_pkt);
|
||||
break;
|
||||
}
|
||||
av_packet_unref(packet);
|
||||
|
|
|
@ -82,7 +82,6 @@ bool FfmpegEncoder::Init(int &outputFlags)
|
|||
}
|
||||
mCodecCtx->sample_rate = 8000;
|
||||
av_channel_layout_copy(&mCodecCtx->ch_layout, &src);
|
||||
// st->time_base = (AVRational){1, mCodecCtx->sample_rate};
|
||||
break;
|
||||
|
||||
case AVMEDIA_TYPE_VIDEO:
|
||||
|
@ -96,7 +95,6 @@ bool FfmpegEncoder::Init(int &outputFlags)
|
|||
* of which frame timestamps are represented. For fixed-fps content,
|
||||
* timebase should be 1/framerate and timestamp increments should be
|
||||
* identical to 1. */
|
||||
// st->time_base = (AVRational){1, STREAM_FRAME_RATE};
|
||||
mCodecCtx->time_base = (AVRational){1, STREAM_FRAME_RATE};
|
||||
|
||||
mCodecCtx->gop_size = 12; /* emit one intra frame every twelve frames at most */
|
||||
|
@ -202,19 +200,9 @@ int FfmpegEncoder::EncodeData(AVFrame *frame, AVStream *stream, std::function<vo
|
|||
av_packet_rescale_ts(mTmpPkt, mCodecCtx->time_base, stream->time_base);
|
||||
mTmpPkt->stream_index = stream->index;
|
||||
|
||||
/* Write the compressed frame to the media file. */
|
||||
// log_packet(fmt_ctx, pkt);
|
||||
// ret = av_interleaved_write_frame(fmt_ctx, pkt);
|
||||
if (callback) {
|
||||
callback(mTmpPkt);
|
||||
}
|
||||
/* pkt is now blank (av_interleaved_write_frame() takes ownership of
|
||||
* its contents and resets pkt), so that no unreferencing is necessary.
|
||||
* This would be different if one used av_write_frame(). */
|
||||
// if (ret < 0) {
|
||||
// fprintf(stderr, "Error while writing output packet: %s\n", av_err2str(ret));
|
||||
// return AVERROR_EXIT;
|
||||
// }
|
||||
}
|
||||
|
||||
return ret == AVERROR_EOF ? 1 : 0;
|
||||
|
|
Loading…
Reference in New Issue
Block a user