Improve:CameraHalMock code.
This commit is contained in:
parent
da5aa894c7
commit
7d6164959e
|
@ -226,7 +226,7 @@ void MediaHandle::GetAVStream(const FrameType &type, const void *stream, const i
|
|||
mFirstFrameTimeStamp = timeStamp;
|
||||
}
|
||||
OneFrameStream addFrame;
|
||||
addFrame.mData = malloc(length);
|
||||
addFrame.mData = malloc(length); // TODO: detected memory leaks
|
||||
addFrame.mLength = length;
|
||||
memcpy(addFrame.mData, stream, length);
|
||||
addFrame.mType = type;
|
||||
|
|
|
@ -67,7 +67,7 @@ void RecordMp4::GetVideoStream(const void *stream, const unsigned int &length, c
|
|||
{
|
||||
std::lock_guard<std::mutex> locker(mMutex);
|
||||
if (mRecordMp4Object) {
|
||||
StreamInfo info = {.mType = STREAM_TYPE_VIDEO_H264, .mTimeStamp = timeStamp};
|
||||
StreamInfo info = {.mType = STREAM_TYPE_VIDEO_H264, .mTimeStamp_us = timeStamp};
|
||||
IGetStreamData(mRecordMp4Object, stream, length, info);
|
||||
}
|
||||
}
|
||||
|
@ -75,7 +75,7 @@ void RecordMp4::GetAudioStream(const void *stream, const unsigned int &length, c
|
|||
{
|
||||
std::lock_guard<std::mutex> locker(mMutex);
|
||||
if (mRecordMp4Object) {
|
||||
StreamInfo info = {.mType = STREAM_TYPE_AUDIO_G711A, .mTimeStamp = timeStamp};
|
||||
StreamInfo info = {.mType = STREAM_TYPE_AUDIO_G711A, .mTimeStamp_us = timeStamp};
|
||||
IGetStreamData(mRecordMp4Object, stream, length, info);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -29,15 +29,21 @@ void CameraHalTest::Init(void)
|
|||
{
|
||||
CameraHal::Init();
|
||||
if (nullptr == mReadH264File) {
|
||||
ReadVideoFileCallback videCallback = [](const void *stream, const unsigned int length, void *context) -> void {
|
||||
((CameraHalTest *)context)->ReadDataFromH264File(stream, length);
|
||||
ReadVideoFileCallback videCallback = [](const void *stream,
|
||||
const unsigned int length,
|
||||
const unsigned long long duration_us,
|
||||
void *context) -> void {
|
||||
((CameraHalTest *)context)->ReadDataFromH264File(stream, length, duration_us);
|
||||
};
|
||||
mReadH264File = ICreateMediaBase(MEDIA_HANDLE_TYPE_READ_H264);
|
||||
ISetReadVideoCallback(mReadH264File, videCallback, this);
|
||||
}
|
||||
if (nullptr == mReadG711aFile) {
|
||||
ReadAudioFileCallback audioCallback = [](const void *stream, const unsigned int length, void *context) -> void {
|
||||
((CameraHalTest *)context)->ReadDataFromG711aFile(stream, length);
|
||||
ReadAudioFileCallback audioCallback = [](const void *stream,
|
||||
const unsigned int length,
|
||||
const unsigned long long duration_us,
|
||||
void *context) -> void {
|
||||
((CameraHalTest *)context)->ReadDataFromG711aFile(stream, length, duration_us);
|
||||
};
|
||||
mReadG711aFile = ICreateMediaBase(MEDIA_HANDLE_TYPE_READ_G711A);
|
||||
ISetReadVideoCallback(mReadG711aFile, audioCallback, this);
|
||||
|
@ -129,19 +135,25 @@ void CameraHalTest::MockReportMediaStream(void)
|
|||
mTaskRuning = false;
|
||||
}
|
||||
}
|
||||
void CameraHalTest::ReadDataFromH264File(const void *stream, const unsigned int length)
|
||||
void CameraHalTest::ReadDataFromH264File(const void *stream, const unsigned int length,
|
||||
const unsigned long long duration_us)
|
||||
{
|
||||
struct timespec ts;
|
||||
clock_gettime(CLOCK_REALTIME, &ts);
|
||||
long microseconds = ts.tv_sec * 1000000L + ts.tv_nsec / 1000;
|
||||
GetVideoStream(stream, length, microseconds);
|
||||
// LogInfo("CameraHalTest::ReadDataFromH264File duration = %llu\n", microseconds);
|
||||
std::this_thread::sleep_for(std::chrono::milliseconds(duration_us / 1000));
|
||||
}
|
||||
void CameraHalTest::ReadDataFromG711aFile(const void *stream, const unsigned int length)
|
||||
void CameraHalTest::ReadDataFromG711aFile(const void *stream, const unsigned int length,
|
||||
const unsigned long long duration_us)
|
||||
{
|
||||
struct timespec ts;
|
||||
clock_gettime(CLOCK_REALTIME, &ts);
|
||||
long microseconds = ts.tv_sec * 1000000L + ts.tv_nsec / 1000;
|
||||
GetAudioStream(stream, length, microseconds);
|
||||
// LogInfo("CameraHalTest::ReadDataFromG711aFile duration = %ld\n", microseconds);
|
||||
std::this_thread::sleep_for(std::chrono::milliseconds((duration_us / 1000) + 43));
|
||||
}
|
||||
CameraHalMock::CameraHalMock(const CameraType &cameraType) : CameraHalTest(cameraType)
|
||||
{
|
||||
|
|
|
@ -37,8 +37,8 @@ protected:
|
|||
|
||||
private:
|
||||
void MockReportMediaStream(void);
|
||||
void ReadDataFromH264File(const void *stream, const unsigned int length);
|
||||
void ReadDataFromG711aFile(const void *stream, const unsigned int length);
|
||||
void ReadDataFromH264File(const void *stream, const unsigned int length, const unsigned long long duration_us);
|
||||
void ReadDataFromG711aFile(const void *stream, const unsigned int length, const unsigned long long duration_us);
|
||||
|
||||
protected:
|
||||
const CameraType mCameraType;
|
||||
|
|
|
@ -32,18 +32,23 @@ enum StreamType
|
|||
STREAM_TYPE_AUDIO_G711A,
|
||||
STREAM_TYPE_END
|
||||
};
|
||||
typedef struct stream_info
|
||||
/**
|
||||
* @brief
|
||||
* NOTE: The timestamp parameter is critical. The time base of the timestamp must be {1,1000000}, which means the unit
|
||||
* is 1us.
|
||||
*/
|
||||
typedef struct StreamInfo
|
||||
{
|
||||
const StreamType mType;
|
||||
const unsigned long long mTimeStamp;
|
||||
const StreamType mType; ///< Type of the stream.
|
||||
const unsigned long long mTimeStamp_us; ///< Timestamp of the stream.
|
||||
} StreamInfo;
|
||||
typedef struct output_file_info
|
||||
{
|
||||
const char *mFileName;
|
||||
const unsigned int mDuration_ms;
|
||||
} OutputFileInfo;
|
||||
typedef void (*ReadVideoFileCallback)(const void *, const unsigned int, void *);
|
||||
typedef void (*ReadAudioFileCallback)(const void *, const unsigned int, void *);
|
||||
typedef void (*ReadVideoFileCallback)(const void *, const unsigned int, const unsigned long long, void *);
|
||||
typedef void (*ReadAudioFileCallback)(const void *, const unsigned int, const unsigned long long, void *);
|
||||
void *ICreateMediaBase(const MediaHandleType type);
|
||||
// StatusCode Init(void *object);
|
||||
// StatusCode UnInit(void *object);
|
||||
|
|
|
@ -136,7 +136,7 @@ void FfmpegDecoder::DecodeData(const void *data, const size_t &size, const unsig
|
|||
// mPacket->stream_index = 0;
|
||||
mPacket->pts = pts;
|
||||
mPacket->dts = mPacket->pts;
|
||||
LogInfo("source data mPacket->pts:%d\n", mPacket->pts);
|
||||
// LogInfo("source data mPacket->pts:%d\n", mPacket->pts);
|
||||
AVDecodeData(mPacket, callback);
|
||||
return;
|
||||
}
|
||||
|
@ -214,7 +214,7 @@ void inline FfmpegDecoder::AVDecodeData(AVPacket *pkt, std::function<void(AVFram
|
|||
// // fwrite(frame->data[ch] + data_size * i, 1, data_size, outfile);
|
||||
// save_code_stream_file(mFrame->data[ch] + data_size * i, data_size);
|
||||
// save_code_stream_file(mFrame->data[0], mFrame->linesize[0]);
|
||||
LogInfo("decode frame pts = %llu, nb_samples = %d\n", mFrame->pts, mFrame->nb_samples);
|
||||
// LogInfo("decode frame pts = %llu, nb_samples = %d\n", mFrame->pts, mFrame->nb_samples);
|
||||
callback(mFrame);
|
||||
}
|
||||
break;
|
||||
|
|
|
@ -225,7 +225,7 @@ int FfmpegEncoder::EncodeData(AVFrame *frame, AVStream *stream, std::function<vo
|
|||
// LogInfo("Write stream->time_base.num: %d\n", stream->time_base.num);
|
||||
// LogInfo("Write stream->time_base.den: %d\n", stream->time_base.den);
|
||||
mTmpPkt->stream_index = stream->index;
|
||||
LogInfo("aaaaaaaaaaaaaaaaaaaaaaa Write frame mTmpPkt->pts: %llu\n", mTmpPkt->pts);
|
||||
// LogInfo("aaaaaaaaaaaaaaaaaaaaaaa Write frame mTmpPkt->pts: %llu\n", mTmpPkt->pts);
|
||||
|
||||
if (callback) {
|
||||
// save_code_stream_file(mTmpPkt->data, mTmpPkt->size);
|
||||
|
@ -325,7 +325,7 @@ AVFrame *FfmpegEncoder::ConvertAudioFrame(AVFrame *decodeFrame, struct SwrContex
|
|||
LogError("decodeFrame is null\n");
|
||||
return nullptr;
|
||||
}
|
||||
LogInfo("decodeFrame->pts = %d\n", decodeFrame->pts);
|
||||
// LogInfo("decodeFrame->pts = %d\n", decodeFrame->pts);
|
||||
// decodeFrame->pts = next_pts;
|
||||
// next_pts += decodeFrame->nb_samples;
|
||||
int ret = 0;
|
||||
|
@ -355,9 +355,9 @@ AVFrame *FfmpegEncoder::ConvertAudioFrame(AVFrame *decodeFrame, struct SwrContex
|
|||
LogError("Error while converting\n");
|
||||
return nullptr;
|
||||
}
|
||||
LogInfo("mCodecCtx->time_base.num = %d, mCodecCtx->time_base.den=%d\n",
|
||||
mCodecCtx->time_base.num,
|
||||
mCodecCtx->time_base.den);
|
||||
// LogInfo("mCodecCtx->time_base.num = %d, mCodecCtx->time_base.den=%d\n",
|
||||
// mCodecCtx->time_base.num,
|
||||
// mCodecCtx->time_base.den);
|
||||
mFrame->pts = av_rescale_q(decodeFrame->pts, (AVRational){1, 1000000}, mCodecCtx->time_base);
|
||||
// LogInfo("decodeFrame->pts = %d\n", decodeFrame->pts);
|
||||
// LogInfo("mFrame->pts = %d\n", mFrame->pts);
|
||||
|
|
|
@ -90,10 +90,10 @@ void FfmpegMuxStreamV2::GetStreamData(const void *data, const size_t &size, cons
|
|||
}
|
||||
}
|
||||
if (streamInfo.mType == STREAM_TYPE_VIDEO_H264 && mVideoStream) {
|
||||
mVideoStream->WriteSourceData(data, size, streamInfo.mTimeStamp);
|
||||
mVideoStream->WriteSourceData(data, size, streamInfo.mTimeStamp_us);
|
||||
}
|
||||
if (streamInfo.mType == STREAM_TYPE_AUDIO_G711A && mAudioStream) {
|
||||
mAudioStream->WriteSourceData(data, size, streamInfo.mTimeStamp);
|
||||
mAudioStream->WriteSourceData(data, size, streamInfo.mTimeStamp_us);
|
||||
}
|
||||
}
|
||||
StatusCode inline FfmpegMuxStreamV2::OpenMuxOutputFile(const std::string &fileName)
|
||||
|
|
|
@ -110,8 +110,8 @@ void FfmpegOutputStream::WriteSourceData(const void *data, const size_t &size, c
|
|||
tmpPkt->duration = ZERO_MEANS_UNKNOWN;
|
||||
// tmpPkt->pts = u64Interval * 1000; // ת<><D7AA><EFBFBD><EFBFBD> us
|
||||
tmpPkt->pts = av_rescale_q(pts, (AVRational){1, 1000000}, mStream->time_base);
|
||||
LogInfo("vvvvvvvvvvvvvvvvvvvvvvvvv num:%d, den:%d\n", mStream->time_base.num, mStream->time_base.den);
|
||||
LogInfo("vvvvvvvvvvvvvvvvvvvvvvvvv pts:%llu, duration:%d\n", tmpPkt->pts, tmpPkt->duration);
|
||||
// LogInfo("vvvvvvvvvvvvvvvvvvvvvvvvv num:%d, den:%d\n", mStream->time_base.num, mStream->time_base.den);
|
||||
// LogInfo("vvvvvvvvvvvvvvvvvvvvvvvvv pts:%llu, duration:%d\n", tmpPkt->pts, tmpPkt->duration);
|
||||
// tmpPkt->pts = pts;
|
||||
u64Interval++;
|
||||
tmpPkt->dts = tmpPkt->pts;
|
||||
|
|
|
@ -108,7 +108,7 @@ void FfmpegReadFile::ReadFileThread(AVFormatContext *pFormatCtx, int mediaStream
|
|||
LogWarning("ReadVideoCallback is null.\n");
|
||||
}
|
||||
AVPacket packet;
|
||||
unsigned int playTimeMs = 0;
|
||||
unsigned long long playTime = 0;
|
||||
// av_new_packet(&packet, AV_INPUT_BUFFER_MIN_SIZE);
|
||||
while (av_read_frame(pFormatCtx, &packet) >= 0) {
|
||||
if (nullptr == mReadVideoCallback) {
|
||||
|
@ -121,21 +121,29 @@ void FfmpegReadFile::ReadFileThread(AVFormatContext *pFormatCtx, int mediaStream
|
|||
}
|
||||
// Checks whether the packet belongs to a video stream.
|
||||
if (packet.stream_index == mediaStreamIndex) {
|
||||
playTimeMs = (packet.duration * pFormatCtx->streams[mediaStreamIndex]->time_base.num * 1000) /
|
||||
pFormatCtx->streams[mediaStreamIndex]->time_base.den;
|
||||
playTime = (packet.duration * pFormatCtx->streams[mediaStreamIndex]->time_base.num) /
|
||||
pFormatCtx->streams[mediaStreamIndex]->time_base.den;
|
||||
// AVRational time_base = pFormatCtx->streams[mediaStreamIndex]->time_base;
|
||||
// int64_t duration_ms = av_rescale_q(packet.duration, time_base, {1, AV_TIME_BASE}) * 1000;
|
||||
// LogInfo("Frame data address: %p, length: %zu\n", packet.data, packet.size);
|
||||
// LogInfo("Play time ms:%d\n", playTimeMs);
|
||||
// static unsigned long long timeAmout = 0;
|
||||
// timeAmout += playTimeMs;
|
||||
// timeAmout += playTime;
|
||||
// LogInfo("Time amout: %llu\n", timeAmout);
|
||||
// LogInfo("time base: num = %d, den = %d\n",
|
||||
// LogInfo("time base: num = %d, den = %d, duration = %d\n",
|
||||
// pFormatCtx->streams[mediaStreamIndex]->time_base.num,
|
||||
// pFormatCtx->streams[mediaStreamIndex]->time_base.den);
|
||||
// pFormatCtx->streams[mediaStreamIndex]->time_base.den,
|
||||
// packet.duration);
|
||||
// LogInfo("pFormatCtx->bit_rate = %ld\n", pFormatCtx->bit_rate);
|
||||
ReadFrame(&packet);
|
||||
std::this_thread::sleep_for(std::chrono::milliseconds(playTimeMs));
|
||||
playTime = (unsigned long long)(packet.duration * av_q2d(pFormatCtx->streams[mediaStreamIndex]->time_base) *
|
||||
1000000);
|
||||
// LogInfo("playTime time ms:%llu\n", playTime);
|
||||
int64_t duration_us = av_rescale_q(
|
||||
packet.duration, pFormatCtx->streams[mediaStreamIndex]->time_base, (AVRational){1, 1000000});
|
||||
unsigned long long playTime_us =
|
||||
av_rescale_q(playTime, pFormatCtx->streams[mediaStreamIndex]->time_base, (AVRational){1, 1000000});
|
||||
// LogInfo("playTime_us time ms:%llu\n", playTime_us);
|
||||
ReadFrame(&packet, duration_us);
|
||||
// std::this_thread::sleep_for(std::chrono::milliseconds(playTime));
|
||||
}
|
||||
// Release the data packet.
|
||||
av_packet_unref(&packet);
|
||||
|
@ -144,12 +152,12 @@ void FfmpegReadFile::ReadFileThread(AVFormatContext *pFormatCtx, int mediaStream
|
|||
|
||||
avformat_close_input(&pFormatCtx);
|
||||
}
|
||||
void inline FfmpegReadFile::ReadFrame(AVPacket *packet)
|
||||
void inline FfmpegReadFile::ReadFrame(AVPacket *packet, const unsigned int duration_us)
|
||||
{
|
||||
if (AVMEDIA_TYPE_VIDEO == mFFmpegMediaType) {
|
||||
mReadVideoCallback(packet->data, packet->size, mReadVideoCallbackContext);
|
||||
mReadVideoCallback(packet->data, packet->size, duration_us, mReadVideoCallbackContext);
|
||||
}
|
||||
else if (AVMEDIA_TYPE_AUDIO == mFFmpegMediaType) {
|
||||
mReadVideoCallback(packet->data, packet->size, mReadVideoCallbackContext);
|
||||
mReadVideoCallback(packet->data, packet->size, duration_us, mReadVideoCallbackContext);
|
||||
}
|
||||
}
|
|
@ -30,7 +30,7 @@ public: // About read media file.
|
|||
|
||||
private:
|
||||
void ReadFileThread(AVFormatContext *pFormatCtx, int video_stream_index);
|
||||
void ReadFrame(AVPacket *packet);
|
||||
void ReadFrame(AVPacket *packet, const unsigned int duration_us);
|
||||
|
||||
private:
|
||||
ReadVideoFileCallback mReadVideoCallback;
|
||||
|
|
Loading…
Reference in New Issue
Block a user