Merge remote-tracking branch 'ipc-sdk/master-develop' into without-testtool

This commit is contained in:
Fancy code 2024-07-06 20:26:23 +08:00
commit 7ba189be9b
15 changed files with 335 additions and 90 deletions

View File

@ -5,48 +5,39 @@ if(NOT NASM)
message("nasm not found. Now install.")
execute_process(COMMAND sudo apt install nasm WORKING_DIRECTORY ${EXTERNAL_SOURCE_PATH}/)
endif()
set(FFMPEG_COMMON_CONFIG "--enable-cross-compile --target-os=linux \
--cc=${CMAKE_C_COMPILER} \
--cxx=${CMAKE_CXX_COMPILER} \
--prefix=${FFMPEG_INSTALL_PATH} \
--enable-parsers --disable-decoders --enable-decoder=h264 --enable-libfdk-aac \
--disable-debug --enable-ffmpeg --enable-static --disable-stripping --disable-doc \
--enable-gpl --enable-nonfree --enable-version3 --enable-small \
--disable-mipsdsp --disable-mipsdspr2 \
--disable-encoders \
--disable-muxers --enable-muxer=mov --enable-muxer=mp4 --enable-encoder=mpeg4 \
--enable-decoder=aac --enable-encoder=aac --enable-decoder=pcm_alaw --enable-encoder=pcm_alaw \
--enable-demuxer=mov \
--disable-protocols --enable-protocol=file \
--disable-bsfs --enable-bsf=aac_adtstoasc --enable-bsf=h264_mp4toannexb --enable-bsf=hevc_mp4toannexb \
--disable-indevs --disable-outdevs \
--extra-libs=-lm \
--extra-cflags=\"-I${EXTERNAL_LIBS_OUTPUT_PATH}/fdk-aac/include\" \
--extra-ldflags=\"-L${EXTERNAL_LIBS_OUTPUT_PATH}/fdk-aac/lib\"")
if(${TARGET_PLATFORM} MATCHES ${DEFINE_LINUX})
set(CONFIGURE_COMMAND "--enable-cross-compile --target-os=linux --arch=x86_64 \
--cc=${CMAKE_C_COMPILER} \
--cxx=${CMAKE_CXX_COMPILER} \
--prefix=${FFMPEG_INSTALL_PATH} \
--enable-parsers --enable-decoder=h264 --enable-libfdk-aac \
--enable-ffmpeg --enable-static \
--enable-gpl --enable-nonfree --enable-version3 --enable-small \
--enable-muxer=mov --enable-muxer=mp4 \
--enable-decoder=aac --enable-decoder=pcm_alaw --enable-encoder=pcm_alaw \
--enable-demuxer=mov \
--enable-protocol=file --enable-bsf=aac_adtstoasc --enable-bsf=h264_mp4toannexb --enable-bsf=hevc_mp4toannexb \
--extra-libs=-lm \
--extra-cflags=\"-I${EXTERNAL_LIBS_OUTPUT_PATH}/fdk-aac/include\" \
--extra-ldflags=\"-L${EXTERNAL_LIBS_OUTPUT_PATH}/fdk-aac/lib\"")
set(CONFIGURE_COMMAND "${FFMPEG_COMMON_CONFIG} --arch=x86_64")
else()
set(CONFIGURE_COMMAND "--enable-cross-compile --target-os=linux --arch=arm64 \
--cc=${CMAKE_C_COMPILER} \
--cxx=${CMAKE_CXX_COMPILER} \
--prefix=${FFMPEG_INSTALL_PATH} \
--disable-asm --enable-parsers --disable-decoders --enable-decoder=h264 --enable-libfdk-aac \
--disable-debug --enable-ffmpeg --enable-shared --enable-static --disable-stripping --disable-doc \
--enable-gpl --enable-nonfree --enable-version3 --enable-small \
--disable-mipsdsp --disable-mipsdspr2 \
--disable-encoders \
--disable-muxers --enable-muxer=mov --enable-muxer=mp4 \
--disable-decoders --enable-decoder=aac \
--disable-filters \
--disable-demuxers --enable-demuxer=mov \
--disable-parsers \
--disable-protocols --enable-protocol=file \
--disable-bsfs --enable-bsf=aac_adtstoasc --enable-bsf=h264_mp4toannexb --enable-bsf=hevc_mp4toannexb \
--disable-indevs \
--disable-outdevs --disable-ffprobe --disable-ffmpeg --disable-ffplay --disable-debug \
--extra-libs=-lm \
--extra-cflags=\"-I${EXTERNAL_LIBS_OUTPUT_PATH}/fdk-aac/include\" \
--extra-ldflags=\"-L${EXTERNAL_LIBS_OUTPUT_PATH}/fdk-aac/lib\"")
set(CONFIGURE_COMMAND "${FFMPEG_COMMON_CONFIG} --arch=arm64 --disable-asm")
endif()
message("Compile ffmpeg comand : ${CONFIGURE_COMMAND}")
add_custom_command(
OUTPUT ${EXTERNAL_LIBS_OUTPUT_PATH}/fdk-aac/lib/libfdk-aac.a
COMMAND echo "Did not found fdk-aac lib in output_files, now compile fdk-aac."
COMMAND make fdk_aac
WORKING_DIRECTORY ${PLATFORM_PATH}/cmake-shell/
)
add_custom_target(
ffmpeg
DEPENDS fdk_aac
DEPENDS ${EXTERNAL_LIBS_OUTPUT_PATH}/fdk-aac/lib/libfdk-aac.a
COMMAND echo "Now compile ffmpeg, please wait..."
COMMAND test -f ${EXTERNAL_SOURCE_PATH}/ffmpeg/ffmpeg-6.1.1/Makefile || tar -xf ffmpeg_6.1.1.orig.tar.xz
COMMAND chmod 777 -R ffmpeg-6.1.1

View File

@ -19,11 +19,19 @@
#include "RecordMp4.h"
#include "StatusCode.h"
#include <chrono>
#include <cstdlib>
#include <functional>
#include <memory>
#include <mutex>
#include <string.h>
#include <thread>
#include <vector>
one_frame_stream::one_frame_stream() : mType(FrameType::END), mData(nullptr), mLength(0)
{
}
one_frame_stream::~one_frame_stream()
{
}
MediaHandle::MediaHandle(const MediaChannel &mediaChannel, const std::shared_ptr<VCameraHal> &cameraHal)
: mMediaChannel(mediaChannel), mCameraHal(cameraHal), mTaskRuning(false)
{
@ -42,10 +50,14 @@ void MediaHandle::Init(void)
void MediaHandle::UnInit(void)
{
mTaskRuning = false;
mCv.notify_one();
mCvTaskHandle.notify_one();
mCvFrameHandle.notify_one();
if (mTaskTimerThread.joinable()) {
mTaskTimerThread.join();
}
if (mFrameHandleThread.joinable()) {
mFrameHandleThread.join();
}
if (mCameraHal) {
/**
* @brief Before releasing the class instance, it is necessary to call the UnInit function to ensure that the
@ -83,6 +95,7 @@ StatusCode MediaHandle::ExecuteTask(std::shared_ptr<VMediaTask> &task)
if (IsCodeOK(code)) {
mCurrentTask = task;
StartTaskTimer();
StartFrameHandle();
}
else {
LogError("Execute task failed.\n");
@ -112,7 +125,7 @@ void MediaHandle::TaskTimer(void)
mTaskRuning = true;
while (mTaskRuning) {
std::unique_lock<std::mutex> lock(mMutex);
mCv.wait_for(lock, std::chrono::milliseconds(TASK_TIMER), [&] {
mCvTaskHandle.wait_for(lock, std::chrono::milliseconds(TASK_TIMER), [&] {
return !mTaskRuning;
});
/**
@ -139,15 +152,75 @@ void MediaHandle::TaskTimer(void)
mCurrentTask.reset();
mMutex.unlock();
}
void MediaHandle::StartFrameHandle(void)
{
auto taskTimerThread = [=](std::shared_ptr<MediaHandle> media) {
LogInfo("StartFrameHandle start.\n");
media->FrameHandle();
};
std::shared_ptr<MediaHandle> media = shared_from_this();
mFrameHandleThread = std::thread(taskTimerThread, media);
}
void MediaHandle::FrameHandle(void)
{
constexpr int TASK_TIMER = 1000 * 1000;
mTaskRuning = true;
while (mTaskRuning) {
std::unique_lock<std::mutex> lock(mMutex);
mCvFrameHandle.wait_for(lock, std::chrono::milliseconds(TASK_TIMER), [&] {
return !mTaskRuning || !mFrameList.empty();
});
if (mFrameList.size() > 0) {
HandleListFrame();
}
}
}
void MediaHandle::HandleListFrame(void)
{
int leftFrameCount = -1;
do {
OneFrameStream &frontFrame = mFrameList.front();
OneFrameStream handleIt;
handleIt.mData = frontFrame.mData;
handleIt.mLength = frontFrame.mLength;
handleIt.mType = frontFrame.mType;
mFrameList.pop_front();
if (FrameType::VIDEO == handleIt.mType) {
mStreamHandle->GetVideoStream(handleIt.mData, handleIt.mLength, 0);
}
if (FrameType::AUDIO == handleIt.mType) {
mStreamHandle->GetAudioStream(handleIt.mData, handleIt.mLength, 0);
}
free(handleIt.mData);
handleIt.mData = nullptr;
leftFrameCount = mFrameList.size();
} while (leftFrameCount > 0);
}
CameraTaskType MediaHandle::TaskTypeConvert(const MediaTaskType &type)
{
return CameraTaskType::END;
}
void MediaHandle::GetVideoStreamCallback(const void *stream, const int &length, const unsigned long long &timeStamp)
{
mStreamHandle->GetVideoStream(stream, length, timeStamp);
std::unique_lock<std::mutex> lock(mMutex);
// mStreamHandle->GetVideoStream(stream, length, timeStamp);
OneFrameStream addFrame;
addFrame.mData = malloc(length);
addFrame.mLength = length;
memcpy(addFrame.mData, stream, length);
addFrame.mType = FrameType::VIDEO;
mFrameList.push_back(addFrame);
mCvFrameHandle.notify_one();
}
void MediaHandle::GetAudioStreamCallback(const void *stream, const int &length, const unsigned long long &timeStamp)
{
mStreamHandle->GetAudioStream(stream, length, timeStamp);
std::unique_lock<std::mutex> lock(mMutex);
// mStreamHandle->GetAudioStream(stream, length, timeStamp);
OneFrameStream addFrame;
addFrame.mData = malloc(length);
addFrame.mLength = length;
memcpy(addFrame.mData, stream, length);
addFrame.mType = FrameType::AUDIO;
mFrameList.push_back(addFrame);
mCvFrameHandle.notify_one();
}

View File

@ -18,11 +18,26 @@
#include "IMediaManager.h"
#include "VStreamHandle.h"
#include <condition_variable>
#include <list>
#include <mutex>
#include <thread>
using std::placeholders::_1;
using std::placeholders::_2;
using std::placeholders::_3;
enum class FrameType
{
VIDEO,
AUDIO,
END
};
typedef struct one_frame_stream
{
one_frame_stream();
~one_frame_stream();
FrameType mType;
void *mData;
int mLength;
} OneFrameStream;
class MediaHandle : public VMediaHandle, public std::enable_shared_from_this<MediaHandle>
{
public:
@ -39,18 +54,24 @@ protected:
private:
void StartTaskTimer(void);
void TaskTimer(void);
void StartFrameHandle(void);
void FrameHandle(void);
void HandleListFrame(void);
CameraTaskType TaskTypeConvert(const MediaTaskType &type);
void GetVideoStreamCallback(const void *stream, const int &length, const unsigned long long &timeStamp);
void GetAudioStreamCallback(const void *stream, const int &length, const unsigned long long &timeStamp);
private:
std::mutex mMutex;
std::condition_variable mCv;
std::condition_variable mCvTaskHandle;
std::condition_variable mCvFrameHandle;
const MediaChannel &mMediaChannel;
std::shared_ptr<VCameraHal> mCameraHal;
std::weak_ptr<VMediaTask> mCurrentTask;
std::shared_ptr<VStreamHandle> mStreamHandle;
bool mTaskRuning;
std::thread mTaskTimerThread;
std::thread mFrameHandleThread;
std::list<OneFrameStream> mFrameList;
};
#endif

View File

@ -21,7 +21,9 @@
#include <cstdlib>
#include <cstring>
#include <memory>
#include <mutex>
#include <string>
#include <utility>
#include <vector>
RecordMp4::RecordMp4(std::shared_ptr<VMediaTask> &recordTask) : mRecordMp4Object(nullptr), mRecordTask(recordTask)
{

View File

@ -14,6 +14,7 @@
*/
#include "VStreamHandle.h"
#include "ILog.h"
#include "IMediaManager.h"
#include "StatusCode.h"
#include <vector>
StatusCode VStreamHandle::Init(void)

View File

@ -19,7 +19,7 @@ aux_source_directory(./src SRC_FILES)
set(TARGET_NAME ConfigBase)
add_library(${TARGET_NAME} STATIC ${SRC_FILES})
target_link_libraries(${TARGET_NAME} StatusCode Log config)
target_link_libraries(${TARGET_NAME} StatusCode Log libconfig.a)
add_custom_target(
ConfigBase_code_check

View File

@ -33,8 +33,9 @@ extern "C" {
#include <cstdlib>
#include <errno.h>
#include <functional>
#include <stdint.h>
FfmpegDecoder::FfmpegDecoder(const enum AVCodecID &codecId)
: mCodecId(codecId), mCodec(nullptr), mCodecCtx(nullptr), mFrame(nullptr)
: mCodecId(codecId), mCodec(nullptr), mCodecCtx(nullptr), mFrame(nullptr), mPacket(nullptr), mParser(nullptr)
{
}
bool FfmpegDecoder::Init(void)
@ -44,7 +45,7 @@ bool FfmpegDecoder::Init(void)
mCodec = (AVCodec *)avcodec_find_decoder(mCodecId);
// mCodec = (AVCodec *)avcodec_find_decoder_by_name("libfdk_aac");
if (!(mCodec)) {
LogError("Codec not found\n");
LogError("decoder not found:%s\n", avcodec_get_name(mCodecId));
return false;
}
mCodecCtx = avcodec_alloc_context3((const AVCodec *)(mCodec));
@ -84,6 +85,16 @@ bool FfmpegDecoder::Init(void)
LogError("Could not allocate video frame\n");
return false;
}
mPacket = av_packet_alloc();
if (!mPacket) {
LogError("Could not allocate video frame\n");
return false;
}
mParser = av_parser_init(mCodec->id);
if (!mParser) {
LogError("mParser not found : %s\n", avcodec_get_name(mCodec->id));
return false;
}
if (AVMEDIA_TYPE_AUDIO == mCodec->type) {
mFrame->nb_samples = mCodecCtx->frame_size;
mFrame->format = mCodecCtx->sample_fmt;
@ -105,19 +116,63 @@ bool FfmpegDecoder::UnInit(void)
avcodec_free_context(&mCodecCtx);
mCodecCtx = nullptr;
}
av_packet_free(&mPacket);
mPacket = nullptr;
if (mParser) {
av_parser_close(mParser);
mParser = nullptr;
}
return true;
}
void FfmpegDecoder::DecodeData(const void *data, const size_t &size, std::function<void(AVFrame *frame)> callback)
{
AVPacket *packet = nullptr;
packet = av_packet_alloc();
packet->data = (unsigned char *)data;
packet->size = size;
int ret = avcodec_send_packet(mCodecCtx, packet);
if (nullptr == mParser) {
mPacket->data = (uint8_t *)data;
mPacket->size = size;
// mPacket->stream_index = 0;
mPacket->pts = AV_NOPTS_VALUE;
mPacket->dts = AV_NOPTS_VALUE;
AVDecodeData(mPacket, callback);
return;
}
AVParseData(data, size, callback);
}
void inline FfmpegDecoder::AVParseData(const void *data, const size_t &size,
std::function<void(AVFrame *frame)> callback)
{
if (nullptr == data) {
LogError("data is null\n");
return;
}
uint8_t *frameData = (uint8_t *)data;
size_t data_size = size;
while (data_size > 0) {
int ret = av_parser_parse2(mParser,
mCodecCtx,
&mPacket->data,
&mPacket->size,
frameData,
data_size,
AV_NOPTS_VALUE,
AV_NOPTS_VALUE,
0);
if (ret < 0) {
LogError("av_parse_frame failed\n");
break;
}
frameData += ret;
data_size -= ret;
if (mPacket->size) {
AVDecodeData(mPacket, callback);
}
}
}
void inline FfmpegDecoder::AVDecodeData(AVPacket *pkt, std::function<void(AVFrame *frame)> callback)
{
int ret = avcodec_send_packet(mCodecCtx, pkt);
if (ret < 0) {
LogError("Error sending a packet for decoding\n");
av_packet_unref(packet);
av_packet_free(&packet);
av_packet_unref(pkt);
return;
}
while (ret >= 0) {
@ -132,14 +187,9 @@ void FfmpegDecoder::DecodeData(const void *data, const size_t &size, std::functi
if (callback) {
callback(mFrame);
}
// mFrame->pts = mAudioSt.next_pts;
// mAudioSt.next_pts += mFrame->nb_samples;
// ConvertAudioFrame(mFrame, mAudioSt.enc, &mAudioSt);
// write_frame(mOc, mAudioSt.enc, mAudioSt.st, mAudioSt.frame, mAudioSt.tmp_pkt);
break;
}
av_packet_unref(packet);
av_packet_free(&packet);
av_packet_unref(pkt);
}
/* just pick the highest supported samplerate */
int FfmpegDecoder::select_sample_rate(const AVCodec *codec)

View File

@ -42,6 +42,10 @@ public:
bool UnInit(void);
void DecodeData(const void *data, const size_t &size, std::function<void(AVFrame *frame)> callback);
private:
void AVParseData(const void *data, const size_t &size, std::function<void(AVFrame *frame)> callback);
void AVDecodeData(AVPacket *pkt, std::function<void(AVFrame *frame)> callback);
private:
static int select_sample_rate(const AVCodec *codec);
static int select_channel_layout(const AVCodec *codec, AVChannelLayout *dst);
@ -52,5 +56,7 @@ private:
AVCodec *mCodec;
AVCodecContext *mCodecCtx;
AVFrame *mFrame;
AVPacket *mPacket;
AVCodecParserContext *mParser;
};
#endif

View File

@ -82,7 +82,6 @@ bool FfmpegEncoder::Init(int &outputFlags)
}
mCodecCtx->sample_rate = 8000;
av_channel_layout_copy(&mCodecCtx->ch_layout, &src);
// st->time_base = (AVRational){1, mCodecCtx->sample_rate};
break;
case AVMEDIA_TYPE_VIDEO:
@ -96,7 +95,6 @@ bool FfmpegEncoder::Init(int &outputFlags)
* of which frame timestamps are represented. For fixed-fps content,
* timebase should be 1/framerate and timestamp increments should be
* identical to 1. */
// st->time_base = (AVRational){1, STREAM_FRAME_RATE};
mCodecCtx->time_base = (AVRational){1, STREAM_FRAME_RATE};
mCodecCtx->gop_size = 12; /* emit one intra frame every twelve frames at most */
@ -202,19 +200,9 @@ int FfmpegEncoder::EncodeData(AVFrame *frame, AVStream *stream, std::function<vo
av_packet_rescale_ts(mTmpPkt, mCodecCtx->time_base, stream->time_base);
mTmpPkt->stream_index = stream->index;
/* Write the compressed frame to the media file. */
// log_packet(fmt_ctx, pkt);
// ret = av_interleaved_write_frame(fmt_ctx, pkt);
if (callback) {
callback(mTmpPkt);
}
/* pkt is now blank (av_interleaved_write_frame() takes ownership of
* its contents and resets pkt), so that no unreferencing is necessary.
* This would be different if one used av_write_frame(). */
// if (ret < 0) {
// fprintf(stderr, "Error while writing output packet: %s\n", av_err2str(ret));
// return AVERROR_EXIT;
// }
}
return ret == AVERROR_EOF ? 1 : 0;

View File

@ -34,7 +34,7 @@ extern "C" {
#include <memory>
// #include <mutex>
#include <string>
FfmpegMuxStreamV2::FfmpegMuxStreamV2() : mOutputFormat(nullptr), mOptions(nullptr)
FfmpegMuxStreamV2::FfmpegMuxStreamV2() : mOutputFormat(nullptr), mOptions(nullptr), mFilesMuxing(false)
{
}
StatusCode FfmpegMuxStreamV2::OpenOutputFile(const std::string &fileName)
@ -43,7 +43,7 @@ StatusCode FfmpegMuxStreamV2::OpenOutputFile(const std::string &fileName)
}
StatusCode FfmpegMuxStreamV2::CloseOutputFile(void)
{
if (mOutputFormat && mOutputFormat->pb) {
if (mOutputFormat && mOutputFormat->pb && mFilesMuxing) {
av_write_trailer(mOutputFormat);
}
if (mVideoStream) {
@ -64,6 +64,27 @@ StatusCode FfmpegMuxStreamV2::CloseOutputFile(void)
}
void FfmpegMuxStreamV2::GetStreamData(const void *data, const size_t &size, const StreamInfo &streamInfo)
{
int ret = 0;
if (!mFilesMuxing) {
bool fileMuxing = false;
fileMuxing = mVideoStream->CheckStreamHeader(data, size);
if (fileMuxing) {
/* Write the stream header, if any. */
ret = avformat_write_header(mOutputFormat, nullptr);
if (ret < 0) {
char error_str[AV_ERROR_MAX_STRING_SIZE] = {0};
LogError("Error occurred when opening output file: %s\n",
av_make_error_string(error_str, AV_ERROR_MAX_STRING_SIZE, ret));
return;
}
mFilesMuxing = true;
}
else {
LogWarning("Stream header not found, skip this frame.\n");
return;
}
}
LogInfo("Write frame size: %zu.\n", size);
if (streamInfo.mType == STREAM_TYPE_VIDEO_H264 && mVideoStream) {
mVideoStream->WriteSourceData(data, size);
}
@ -71,7 +92,7 @@ void FfmpegMuxStreamV2::GetStreamData(const void *data, const size_t &size, cons
mAudioStream->WriteSourceData(data, size);
}
}
StatusCode FfmpegMuxStreamV2::OpenMuxOutputFile(const std::string &fileName)
StatusCode inline FfmpegMuxStreamV2::OpenMuxOutputFile(const std::string &fileName)
{
AVDictionary *opt = nullptr;
int ret = 0;
@ -84,7 +105,8 @@ StatusCode FfmpegMuxStreamV2::OpenMuxOutputFile(const std::string &fileName)
/* Add the audio and video streams using the default format codecs
* and initialize the codecs. */
if (mOutputFormat->oformat->video_codec != AV_CODEC_ID_NONE) {
mVideoStream = AddStream(mOutputFormat, mOutputFormat->oformat->video_codec, AV_CODEC_ID_H264);
mVideoStream = AddStream(mOutputFormat, AV_CODEC_ID_NONE, AV_CODEC_ID_NONE);
// mVideoStream = AddStream(mOutputFormat, mOutputFormat->oformat->video_codec, AV_CODEC_ID_H264);
mVideoStream->SetWriteSourceDataCallback(
std::bind(&FfmpegMuxStreamV2::GetAVPacketDataCallback, this, std::placeholders::_1));
}
@ -104,6 +126,7 @@ StatusCode FfmpegMuxStreamV2::OpenMuxOutputFile(const std::string &fileName)
av_make_error_string(error_str, AV_ERROR_MAX_STRING_SIZE, ret));
}
}
return CreateStatusCode(STATUS_CODE_OK);
/* Write the stream header, if any. */
ret = avformat_write_header(mOutputFormat, &opt);
if (ret < 0) {
@ -112,6 +135,7 @@ StatusCode FfmpegMuxStreamV2::OpenMuxOutputFile(const std::string &fileName)
av_make_error_string(error_str, AV_ERROR_MAX_STRING_SIZE, ret));
return CreateStatusCode(STATUS_CODE_NOT_OK);
}
mFilesMuxing = true;
return CreateStatusCode(STATUS_CODE_OK);
}
void FfmpegMuxStreamV2::GetAVPacketDataCallback(AVPacket *pkt)

View File

@ -63,5 +63,6 @@ private:
std::shared_ptr<FfmpegOutputStream> mVideoStream;
std::shared_ptr<FfmpegOutputStream> mAudioStream;
AVDictionary *mOptions;
bool mFilesMuxing;
};
#endif

View File

@ -22,15 +22,21 @@ extern "C" {
#include <libavcodec/codec_id.h>
#include <libavcodec/packet.h>
#include <libavformat/avformat.h>
#include <libavutil/avutil.h>
#include <libavutil/frame.h>
#include <libavutil/mathematics.h>
#include <libavutil/mem.h>
#include <libavutil/pixfmt.h>
#ifdef __cplusplus
}
#endif
#include <cstddef>
#include <cstdint>
#include <functional>
#include <memory>
#include <string.h>
FfmpegOutputStream::FfmpegOutputStream(const AVCodecID &encodecId, const AVCodecID &dncodecId)
: mEncodecId(encodecId), mDeccodecId(dncodecId), mTmpPkt(nullptr), mStream(nullptr)
: mEncodecId(encodecId), mDeccodecId(dncodecId), mTmpPkt(nullptr), mStream(nullptr), mStreamHeaderWritten(false)
{
}
bool FfmpegOutputStream::Init(AVFormatContext *outputFormat)
@ -46,33 +52,112 @@ bool FfmpegOutputStream::Init(AVFormatContext *outputFormat)
LogError("Could not allocate stream\n");
return false;
}
mDecoder = std::make_shared<FfmpegDecoder>(mDeccodecId);
mDecoder->Init();
if (mDeccodecId != AV_CODEC_ID_NONE) {
mDecoder = std::make_shared<FfmpegDecoder>(mDeccodecId);
mDecoder->Init();
}
mStream->id = outputFormat->nb_streams - 1;
mEncoder = std::make_shared<FfmpegEncoder>(mEncodecId);
mEncoder->Init(outputFormat->flags);
mStream->time_base = mEncoder->GetTimeBase();
mEncoder->OpenEncoder(nullptr, mStream);
if (mEncodecId != AV_CODEC_ID_NONE) {
mEncoder = std::make_shared<FfmpegEncoder>(mEncodecId);
mEncoder->Init(outputFormat->flags);
mStream->time_base = mEncoder->GetTimeBase();
mEncoder->OpenEncoder(nullptr, mStream);
}
else {
mStream->time_base = (AVRational){1, 15};
// int ret = avcodec_parameters_copy(mStream->codecpar, in_codecpar);
// if (ret < 0) {
// LogError("Failed to copy codec parameters\n");
// return false;
// }
mStream->codecpar->codec_id = AV_CODEC_ID_H264;
mStream->codecpar->codec_type = AVMEDIA_TYPE_VIDEO;
mStream->codecpar->width = 1920;
mStream->codecpar->height = 2160;
// mStream->codecpar->bit_rate = 2073;
mStream->codecpar->format = AV_PIX_FMT_YUV420P;
mStream->codecpar->codec_tag = 0;
mStream->codecpar->extradata = nullptr;
mStream->codecpar->extradata_size = 0;
// mEncoder = std::make_shared<FfmpegEncoder>(AV_CODEC_ID_MPEG4);
// mEncoder->Init(outputFormat->flags);
// mStream->time_base = mEncoder->GetTimeBase();
// mEncoder->OpenEncoder(nullptr, mStream);
// mEncoder->UnInit();
// mEncoder.reset();
}
return true;
}
void FfmpegOutputStream::UnInit(void)
{
mEncoder->UnInit();
mDecoder->UnInit();
if (mEncoder) {
mEncoder->UnInit();
mEncoder.reset();
}
if (mDecoder) {
mDecoder->UnInit();
mDecoder.reset();
}
av_packet_free(&mTmpPkt);
}
void FfmpegOutputStream::WriteSourceData(const void *data, const size_t &size)
{
mDecoder->DecodeData(data, size, mDecodeCallback);
if (mDecoder) {
mDecoder->DecodeData(data, size, mDecodeCallback);
return;
}
AVPacket *mTmpPkt = av_packet_alloc();
static unsigned long long u64Interval = 0;
AVRational in_timebase = (AVRational){1, 15};
if (mEncodeCallback) {
mTmpPkt->data = (uint8_t *)data;
mTmpPkt->size = size;
mTmpPkt->stream_index = mStream->index;
mTmpPkt->duration = 0;
// mTmpPkt->pts = u64Interval * 1000; // ת<><D7AA><EFBFBD><EFBFBD> us
mTmpPkt->pts = av_rescale_q(u64Interval, (AVRational){1, 15}, mStream->time_base);
u64Interval++;
mTmpPkt->dts = mTmpPkt->pts;
/* copy packet */
// av_packet_rescale_ts(mTmpPkt, in_timebase, mStream->time_base);
mTmpPkt->pos = -1;
mEncodeCallback(mTmpPkt);
}
av_packet_unref(mTmpPkt);
av_packet_free(&mTmpPkt);
}
void FfmpegOutputStream::SetWriteSourceDataCallback(std::function<void(AVPacket *pkt)> callback)
{
mEncodeCallback = callback;
}
bool FfmpegOutputStream::CheckStreamHeader(const void *data, const size_t &size)
{
if (mStreamHeaderWritten || mEncodecId != AV_CODEC_ID_NONE) {
return true;
}
char *pData = (char *)data;
for (size_t i = 0; i < size; i++) {
if ((0x00 == pData[i]) && (0x00 == pData[i + 1]) && (0x00 == pData[i + 2]) && (0x01 == pData[i + 3]) &&
(0x5 == (pData[i + 4] & 0x1F))) {
uint8_t *extradata = (uint8_t *)av_mallocz(i + 1);
if (!extradata) {
LogError("Could not allocate extradata\n");
return false;
}
LogInfo("Found extradata\n");
memcpy(extradata, pData, i);
mStream->codecpar->extradata = extradata;
mStream->codecpar->extradata_size = i;
mStreamHeaderWritten = true;
return mStreamHeaderWritten;
}
}
return false;
}
void FfmpegOutputStream::GetDecodeDataCallback(AVFrame *frame)
{
mEncoder->EncodeData(frame, mStream, mEncodeCallback);
}
void FfmpegOutputStream::GetEncodeDataCallback(AVPacket *pkt)
{
if (mEncoder) {
mEncoder->EncodeData(frame, mStream, mEncodeCallback);
return;
}
}

View File

@ -45,10 +45,10 @@ public:
void UnInit(void);
void WriteSourceData(const void *data, const size_t &size);
void SetWriteSourceDataCallback(std::function<void(AVPacket *pkt)> callback);
bool CheckStreamHeader(const void *data, const size_t &size);
private:
void GetDecodeDataCallback(AVFrame *frame);
void GetEncodeDataCallback(AVPacket *pkt);
private:
const AVCodecID mEncodecId;
@ -59,5 +59,6 @@ private:
AVStream *mStream;
std::function<void(AVFrame *)> mDecodeCallback;
std::function<void(AVPacket *)> mEncodeCallback;
bool mStreamHeaderWritten;
};
#endif

View File

@ -22,6 +22,7 @@
#ifdef __cplusplus
extern "C" {
#endif
#include <libavcodec/codec_id.h>
#include <libavcodec/packet.h>
#include <libavformat/avformat.h>
#include <libavutil/avutil.h>
@ -52,7 +53,7 @@ StatusCode FfmpegReadFile::StartReadFile(const std::string &path)
return CreateStatusCode(STATUS_CODE_NOT_OK);
}
if (avformat_find_stream_info(pFormatCtx, nullptr) < 0) {
LogError("Couldn't find stream information.\n");
LogError("Couldn't find stream information.%s\n", path.c_str());
avformat_close_input(&pFormatCtx);
return CreateStatusCode(STATUS_CODE_NOT_OK);
}
@ -60,6 +61,7 @@ StatusCode FfmpegReadFile::StartReadFile(const std::string &path)
for (unsigned int i = 0; i < pFormatCtx->nb_streams; i++) {
if (pFormatCtx->streams[i]->codecpar->codec_type == mFFmpegMediaType) {
mediaStreamIndex = i;
LogInfo("Find stream index=%s.\n", avcodec_get_name(pFormatCtx->streams[i]->codecpar->codec_id));
break;
}
}

View File

@ -81,7 +81,7 @@ add_custom_target(
curl
DEPENDS openssl
COMMAND echo "Build curl. openssl path = ${EXTERNAL_SOURCE_PATH}"
COMMAND ./configure --without-zlib --prefix=${EXTERNAL_SOURCE_PATH}/curl --with-ssl=${EXTERNAL_SOURCE_PATH}/openssl/build ${CURL_HOST} CC=${CMAKE_C_COMPILER}
COMMAND ./configure --disable-shared --without-zlib --prefix=${EXTERNAL_SOURCE_PATH}/curl --with-ssl=${EXTERNAL_SOURCE_PATH}/openssl/build ${CURL_HOST} CC=${CMAKE_C_COMPILER}
COMMAND make
COMMAND cp ${EXTERNAL_SOURCE_PATH}/curl/curl-8.1.2/lib/.libs/lib*.a ${LIBS_OUTPUT_PATH}
COMMAND cp ${EXTERNAL_SOURCE_PATH}/openssl/build/lib/lib*.a ${LIBS_OUTPUT_PATH}