From 86a441f877735ff459c5543c4d526c5044c2dd29 Mon Sep 17 00:00:00 2001 From: Fancy code <258828110.@qq.com> Date: Fri, 21 Jun 2024 11:07:44 +0800 Subject: [PATCH] Backup ffmpeg mux h264 and g711a. --- external/CMakeLists.txt | 28 +- external/ffmpeg/README.md | 22 +- middleware/MediaManager/CMakeLists.txt | 6 +- .../MediaManager/include/IMediaManager.h | 6 + middleware/MediaManager/src/MediaHandle.cpp | 5 +- middleware/MediaManager/src/RecordMp4.cpp | 50 ++ middleware/MediaManager/src/RecordMp4.h | 32 ++ test/utils/CMakeLists.txt | 2 +- test/utils/LinuxApiMock/src/LinuxApiMock.cpp | 4 +- test/utils/LinuxApiMock/src/LinuxTestImpl.cpp | 4 +- test/utils/LinuxApiMock/src/WrapApi.cpp | 4 +- .../tool/src/McuProtocolTestTool.cpp | 4 +- .../tool/src/UartDeviceTestTool.cpp | 4 +- utils/CMakeLists.txt | 2 +- utils/MediaBase/include/MediaBase.h | 18 + utils/MediaBase/src/IMediaBase.cpp | 13 + utils/MediaBase/src/IMediaBase.h | 7 + utils/MediaBase/src/MediaBase.cpp | 21 + utils/MediaBase/src/MediaBaseImpl.cpp | 474 +++++++++++++++++- utils/MediaBase/src/MediaBaseImpl.h | 58 ++- utils/MediaBase/src/MediaBaseMakePtr.cpp | 1 + 21 files changed, 733 insertions(+), 32 deletions(-) create mode 100644 middleware/MediaManager/src/RecordMp4.cpp create mode 100644 middleware/MediaManager/src/RecordMp4.h diff --git a/external/CMakeLists.txt b/external/CMakeLists.txt index 3da84f1..cf1276f 100644 --- a/external/CMakeLists.txt +++ b/external/CMakeLists.txt @@ -3,19 +3,21 @@ add_subdirectory(sqlite3/sqlite-3430000) add_subdirectory(goahead-5.2.0) # ================= httpserver ================= # -find_program(M4 m4) -if(NOT M4) - message("m4 not found. Install before continuing.") - execute_process(COMMAND sudo apt-get install m4 - WORKING_DIRECTORY ${EXTERNAL_SOURCE_PATH}/gtest/) -endif() -find_program(RAGEL ragel) -if(NOT RAGEL) - message(FATAL_ERROR "ragel not found. Install before continuing.") - execute_process(COMMAND sudo apt-get install ragel - WORKING_DIRECTORY ${EXTERNAL_SOURCE_PATH}/gtest/) -endif() -add_subdirectory(httpserver.h-master/src) +# Do not delete this module, it is just disabled for compilation because it is not used. +# 不要删掉该模块,此处只是因为未使用而屏蔽掉编译。 +# find_program(M4 m4) +# if(NOT M4) +# message("m4 not found. Install before continuing.") +# execute_process(COMMAND sudo apt-get install m4 +# WORKING_DIRECTORY ${EXTERNAL_SOURCE_PATH}/) +# endif() +# find_program(RAGEL ragel) +# if(NOT RAGEL) +# message("ragel not found. Now install.") +# execute_process(COMMAND sudo apt-get install ragel +# WORKING_DIRECTORY ${EXTERNAL_SOURCE_PATH}/) +# endif() +# add_subdirectory(httpserver.h-master/src) # ================= httpserver end ================= # add_subdirectory(cJSON-1.7.17) diff --git a/external/ffmpeg/README.md b/external/ffmpeg/README.md index ec0614b..4201ba5 100644 --- a/external/ffmpeg/README.md +++ b/external/ffmpeg/README.md @@ -17,7 +17,7 @@ $ ffplay video.h264 ## 1.2. 问题记录 -### 1.2.1. avformat_open_input执行失败 +### 1.2.1. avformat_open_input 执行失败   在执行avformat_open_input时,返回-1094995529<0,错误 @@ -37,3 +37,23 @@ set(CONFIGURE_COMMAND "--enable-cross-compile --target-os=linux --arch=linux \ --enable-demuxer=mov \ --enable-protocol=file --enable-bsf=aac_adtstoasc --enable-bsf=h264_mp4toannexb --enable-bsf=hevc_mp4toannexb") ``` + +### 1.2.2. avformat_open_input 执行失败 + +  打开g711a文件时,提示无效数据。如下: +**Invalid data found when processing input** + +解决: +调用 avformat_open_input 函数时,指定输入文件的格式(第三个参数),g711a文件格式为:alaw。 + +```code +# //utils/MediaBase/src/MediaBaseImpl.cpp +const AVInputFormat *iformat = av_find_input_format(InputFormat(mType)); +AVFormatContext *pFormatCtx = nullptr; +if ((result = avformat_open_input(&pFormatCtx, path.c_str(), iformat, nullptr)) < 0) { + char error_str[AV_ERROR_MAX_STRING_SIZE] = {0}; + av_make_error_string(error_str, AV_ERROR_MAX_STRING_SIZE, result); + LogError("Couldn't open file: %s, result=%s\n", path.c_str(), error_str); + return CreateStatusCode(STATUS_CODE_NOT_OK); +} +``` diff --git a/middleware/MediaManager/CMakeLists.txt b/middleware/MediaManager/CMakeLists.txt index f1884ce..7f1d301 100644 --- a/middleware/MediaManager/CMakeLists.txt +++ b/middleware/MediaManager/CMakeLists.txt @@ -6,11 +6,9 @@ set(LIBRARY_OUTPUT_PATH ${LIBS_OUTPUT_PATH}) include_directories( ./src ./include - ${MIDDLEWARE_SOURCE_PATH}/McuAskBase/include + ${UTILS_SOURCE_PATH}/MediaBase/include ${UTILS_SOURCE_PATH}/StatusCode/include ${UTILS_SOURCE_PATH}/Log/include - ${UTILS_SOURCE_PATH}/McuProtocol/include - ${UTILS_SOURCE_PATH}/UartDevice/include ${HAL_SOURCE_PATH}/include ) #do not rely on any other library @@ -27,7 +25,7 @@ aux_source_directory(./src SRC_FILES) set(TARGET_NAME MediaManager) add_library(${TARGET_NAME} STATIC ${SRC_FILES}) -target_link_libraries(${TARGET_NAME} Hal StatusCode Log) +target_link_libraries(${TARGET_NAME} MediaBase Hal StatusCode Log) add_custom_target( MediaManager_code_check diff --git a/middleware/MediaManager/include/IMediaManager.h b/middleware/MediaManager/include/IMediaManager.h index 8f189f4..097c8c8 100644 --- a/middleware/MediaManager/include/IMediaManager.h +++ b/middleware/MediaManager/include/IMediaManager.h @@ -30,6 +30,12 @@ enum class MediaTaskType TAKE_PICTURE = 0, TAKE_VIDEO, TAKE_PICTURE_AND_VIDEO, + SAVE_STREAM_VIDEO, + SAVE_STREAM_AUDIO, + SAVE_STREAM_AUDIO_AND_VIDEO, + GET_STREAM_AUDIO, + GET_STREAM_VIDEO, + GET_STREAM_AUDIO_AND_VIDEO, MONITOR, END }; diff --git a/middleware/MediaManager/src/MediaHandle.cpp b/middleware/MediaManager/src/MediaHandle.cpp index 3435f19..7a83562 100644 --- a/middleware/MediaManager/src/MediaHandle.cpp +++ b/middleware/MediaManager/src/MediaHandle.cpp @@ -16,7 +16,7 @@ #include "IHalCpp.h" #include "ILog.h" #include "IMediaManager.h" -#include "SaveStream.h" +#include "RecordMp4.h" #include "StatusCode.h" #include #include @@ -66,7 +66,7 @@ StatusCode MediaHandle::ExecuteTask(std::shared_ptr &task) return CreateStatusCode(STATUS_CODE_NOT_OK); } } - mStreamHandle = std::make_shared(); + mStreamHandle = std::make_shared(); if (nullptr == mStreamHandle) { LogError("Create stream handle failed.\n"); return CreateStatusCode(STATUS_CODE_NOT_OK); @@ -115,6 +115,7 @@ void MediaHandle::TaskTimer(void) */ mTaskRuning = false; } + mStreamHandle->UnInit(); if (mCameraHal) { mCameraHal->StopTask(); } diff --git a/middleware/MediaManager/src/RecordMp4.cpp b/middleware/MediaManager/src/RecordMp4.cpp new file mode 100644 index 0000000..a280281 --- /dev/null +++ b/middleware/MediaManager/src/RecordMp4.cpp @@ -0,0 +1,50 @@ +/* + * Copyright (c) 2023 Fancy Code. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this mFileAudio except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#include "RecordMp4.h" +#include "ILog.h" +#include "MediaBase.h" +#include +#include +#include +RecordMp4::RecordMp4() : mRecordMp4Object(nullptr) +{ +} +void RecordMp4::Init(void) +{ + mRecordMp4Object = ICreateMediaBase(MEDIA_HANDLE_TYPE_COMBINE_MP4); + if (nullptr == mRecordMp4Object) { + LogError("mRecordMp4Object is null.\n"); + return; + } + IOpenOutputFile(mRecordMp4Object, "./record.mp4"); +} +void RecordMp4::UnInit(void) +{ + if (mRecordMp4Object) { + ICloseOutputFile(mRecordMp4Object); + IMediaBaseFree(mRecordMp4Object); + mRecordMp4Object = nullptr; + } +} +void RecordMp4::GetVideoStream(const void *stream, const unsigned int &length, const unsigned long long &timeStamp) +{ + if (mRecordMp4Object) { + StreamInfo info = {.mType = STREAM_TYPE_END}; + IGetStreamData(mRecordMp4Object, stream, length, info); + } +} +void RecordMp4::GetAudioStream(const void *stream, const unsigned int &length, const unsigned long long &timeStamp) +{ +} \ No newline at end of file diff --git a/middleware/MediaManager/src/RecordMp4.h b/middleware/MediaManager/src/RecordMp4.h new file mode 100644 index 0000000..b8118ee --- /dev/null +++ b/middleware/MediaManager/src/RecordMp4.h @@ -0,0 +1,32 @@ +/* + * Copyright (c) 2023 Fancy Code. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef RECORD_MP4_H +#define RECORD_MP4_H +#include "VStreamHandle.h" +#include +class RecordMp4 : public VStreamHandle +{ +public: + RecordMp4(); + virtual ~RecordMp4() = default; + void Init(void) override; + void UnInit(void) override; + void GetVideoStream(const void *stream, const unsigned int &length, const unsigned long long &timeStamp) override; + void GetAudioStream(const void *stream, const unsigned int &length, const unsigned long long &timeStamp) override; + +private: + void *mRecordMp4Object; +}; +#endif \ No newline at end of file diff --git a/test/utils/CMakeLists.txt b/test/utils/CMakeLists.txt index 5a21e7a..6e4acbc 100644 --- a/test/utils/CMakeLists.txt +++ b/test/utils/CMakeLists.txt @@ -6,7 +6,7 @@ add_subdirectory(WebServer) add_subdirectory(UartDevice) add_subdirectory(LinuxApiMock) add_subdirectory(McuProtocol) -add_subdirectory(FxHttpServer) +# add_subdirectory(FxHttpServer) add_subdirectory(TestManager) add_subdirectory(TcpModule) add_subdirectory(MediaBase) \ No newline at end of file diff --git a/test/utils/LinuxApiMock/src/LinuxApiMock.cpp b/test/utils/LinuxApiMock/src/LinuxApiMock.cpp index 2a3906f..e719089 100644 --- a/test/utils/LinuxApiMock/src/LinuxApiMock.cpp +++ b/test/utils/LinuxApiMock/src/LinuxApiMock.cpp @@ -15,7 +15,9 @@ #include "LinuxApiMock.h" #include "LinuxTestImpl.h" #include "WrapApi.h" -// #include +#if defined(__x86_64__) + #include +#endif #include #include #include diff --git a/test/utils/LinuxApiMock/src/LinuxTestImpl.cpp b/test/utils/LinuxApiMock/src/LinuxTestImpl.cpp index b3cf17a..4720b57 100644 --- a/test/utils/LinuxApiMock/src/LinuxTestImpl.cpp +++ b/test/utils/LinuxApiMock/src/LinuxTestImpl.cpp @@ -17,7 +17,9 @@ #include "ILog.h" #include "LinuxApiMock.h" #include "WrapApi.h" -// #include +#if defined(__x86_64__) + #include +#endif #include #include #include diff --git a/test/utils/LinuxApiMock/src/WrapApi.cpp b/test/utils/LinuxApiMock/src/WrapApi.cpp index 02ce3bb..b994ee5 100644 --- a/test/utils/LinuxApiMock/src/WrapApi.cpp +++ b/test/utils/LinuxApiMock/src/WrapApi.cpp @@ -14,7 +14,9 @@ */ #include "WrapApi.h" #include "LinuxApiMock.h" -// #include +#if defined(__x86_64__) + #include +#endif #include #include #include diff --git a/test/utils/McuProtocol/tool/src/McuProtocolTestTool.cpp b/test/utils/McuProtocol/tool/src/McuProtocolTestTool.cpp index f57cf1a..01ef1e1 100644 --- a/test/utils/McuProtocol/tool/src/McuProtocolTestTool.cpp +++ b/test/utils/McuProtocol/tool/src/McuProtocolTestTool.cpp @@ -21,7 +21,9 @@ #include "ProtocolMonitor.h" #include "UartDevice.h" #include "UartDeviceTestTool.h" -// #include +#if defined(__x86_64__) + #include +#endif #include #include #include diff --git a/test/utils/UartDevice/tool/src/UartDeviceTestTool.cpp b/test/utils/UartDevice/tool/src/UartDeviceTestTool.cpp index 1e98b30..6bf6add 100644 --- a/test/utils/UartDevice/tool/src/UartDeviceTestTool.cpp +++ b/test/utils/UartDevice/tool/src/UartDeviceTestTool.cpp @@ -17,7 +17,9 @@ #include "ILog.h" #include "LinuxApiMock.h" #include "UartDevice.h" -// #include +#if defined(__x86_64__) + #include +#endif #include #include #include diff --git a/utils/CMakeLists.txt b/utils/CMakeLists.txt index 04cdd65..7412102 100644 --- a/utils/CMakeLists.txt +++ b/utils/CMakeLists.txt @@ -13,7 +13,7 @@ add_subdirectory(ModBusCRC16) add_subdirectory(LedControl) add_subdirectory(KeyControl) add_subdirectory(MediaBase) -add_subdirectory(FxHttpServer) +# add_subdirectory(FxHttpServer) add_subdirectory(Servers) add_subdirectory(TcpModule) add_subdirectory(UpgradeBase) diff --git a/utils/MediaBase/include/MediaBase.h b/utils/MediaBase/include/MediaBase.h index a312042..9942307 100644 --- a/utils/MediaBase/include/MediaBase.h +++ b/utils/MediaBase/include/MediaBase.h @@ -15,6 +15,7 @@ #ifndef MEDIA_BASE_H #define MEDIA_BASE_H #include "StatusCode.h" +#include #ifdef __cplusplus extern "C" { #endif @@ -22,17 +23,34 @@ enum MediaHandleType { MEDIA_HANDLE_TYPE_READ_H264 = 0, MEDIA_HANDLE_TYPE_READ_G711A, + MEDIA_HANDLE_TYPE_COMBINE_MP4, MEDIA_HANDLE_TYPE_END }; +enum StreamType +{ + STREAM_TYPE_VIDEO_H264 = 0, + STREAM_TYPE_AUDIO_G711A, + STREAM_TYPE_END +}; +typedef struct stream_info +{ + const StreamType mType; +} StreamInfo; typedef void (*ReadVideoFileCallback)(const void *, const unsigned int, void *); typedef void (*ReadAudioFileCallback)(const void *, const unsigned int, void *); void *ICreateMediaBase(const MediaHandleType type); // StatusCode Init(void *object); // StatusCode UnInit(void *object); + StatusCode ISetReadVideoCallback(void *object, ReadVideoFileCallback callback, void *context); StatusCode ISetReadAudioCallback(void *object, ReadVideoFileCallback callback, void *context); StatusCode IStartReadFile(void *object, const char *path); StatusCode IStopReadFile(void *object); + +StatusCode IOpenOutputFile(void *object, const char *fileName); +StatusCode ICloseOutputFile(void *object); +void IGetStreamData(void *object, const void *data, const size_t size, const StreamInfo streamInfo); + void IMediaBaseFree(void *object); #ifdef __cplusplus } diff --git a/utils/MediaBase/src/IMediaBase.cpp b/utils/MediaBase/src/IMediaBase.cpp index cc719b2..ee6303e 100644 --- a/utils/MediaBase/src/IMediaBase.cpp +++ b/utils/MediaBase/src/IMediaBase.cpp @@ -41,6 +41,19 @@ StatusCode IMediaBase::SetReadAudioCallback(ReadVideoFileCallback callback, void LogWarning("STATUS_CODE_VIRTUAL_FUNCTION\n"); return CreateStatusCode(STATUS_CODE_VIRTUAL_FUNCTION); } +StatusCode IMediaBase::OpenOutputFile(const std::string &fileName) +{ + LogWarning("STATUS_CODE_VIRTUAL_FUNCTION\n"); + return CreateStatusCode(STATUS_CODE_VIRTUAL_FUNCTION); +} +StatusCode IMediaBase::CloseOutputFile(void) +{ + LogWarning("STATUS_CODE_VIRTUAL_FUNCTION\n"); + return CreateStatusCode(STATUS_CODE_VIRTUAL_FUNCTION); +} +void IMediaBase::GetStreamData(const void *data, const size_t &size, const StreamInfo &streamInfo) +{ +} static const char *MEDIA_BASE_NAME = "media_adapter"; const char *GetMediaBaseModuleName(void) { diff --git a/utils/MediaBase/src/IMediaBase.h b/utils/MediaBase/src/IMediaBase.h index aa45014..b327ec5 100644 --- a/utils/MediaBase/src/IMediaBase.h +++ b/utils/MediaBase/src/IMediaBase.h @@ -21,10 +21,17 @@ class IMediaBase public: IMediaBase() = default; virtual ~IMediaBase() = default; + +public: // About read media file. virtual StatusCode StartReadFile(const std::string &path); virtual StatusCode StopReadFile(void); virtual StatusCode SetReadVideoCallback(ReadVideoFileCallback callback, void *context); virtual StatusCode SetReadAudioCallback(ReadVideoFileCallback callback, void *context); + +public: // About combine file. + virtual StatusCode OpenOutputFile(const std::string &fileName); + virtual StatusCode CloseOutputFile(void); + virtual void GetStreamData(const void *data, const size_t &size, const StreamInfo &streamInfo); }; typedef struct media_base_header { diff --git a/utils/MediaBase/src/MediaBase.cpp b/utils/MediaBase/src/MediaBase.cpp index 8b1c256..3083fda 100644 --- a/utils/MediaBase/src/MediaBase.cpp +++ b/utils/MediaBase/src/MediaBase.cpp @@ -16,6 +16,7 @@ #include "ILog.h" #include "IMediaBase.h" #include "StatusCode.h" +#include #include #include #include @@ -63,6 +64,26 @@ StatusCode IStopReadFile(void *object) } return CreateStatusCode(STATUS_CODE_OK); } +StatusCode IOpenOutputFile(void *object, const char *fileName) +{ + if (ObjectCheck(object) == true) { + return (*(std::shared_ptr *)object)->OpenOutputFile(fileName); + } + return CreateStatusCode(STATUS_CODE_OK); +} +StatusCode ICloseOutputFile(void *object) +{ + if (ObjectCheck(object) == true) { + return (*(std::shared_ptr *)object)->CloseOutputFile(); + } + return CreateStatusCode(STATUS_CODE_OK); +} +void IGetStreamData(void *object, const void *data, const size_t size, const StreamInfo streamInfo) +{ + if (ObjectCheck(object) == true) { + (*(std::shared_ptr *)object)->GetStreamData(data, size, streamInfo); + } +} void IMediaBaseFree(void *object) { if (ObjectCheck(object) == true) { diff --git a/utils/MediaBase/src/MediaBaseImpl.cpp b/utils/MediaBase/src/MediaBaseImpl.cpp index 13c322c..a39176a 100644 --- a/utils/MediaBase/src/MediaBaseImpl.cpp +++ b/utils/MediaBase/src/MediaBaseImpl.cpp @@ -16,23 +16,53 @@ #include "ILog.h" #include "MediaBase.h" #include "StatusCode.h" +#include #include +#include +#include #include #ifdef __cplusplus extern "C" { #endif // #include +#include +#include +#include #include #include +#include +// #include #include +#include +#include +#include +#include +// #include +// #include +#include +#include +#include +#include +#include // #include #ifdef __cplusplus } #endif +#include +#include +#include +#include +#include +#define STREAM_DURATION 10.0 +#define STREAM_FRAME_RATE 25 /* 25 images/s */ +#define STREAM_PIX_FMT AV_PIX_FMT_YUV420P /* default pix_fmt */ MediaBaseImpl::MediaBaseImpl(const MediaHandleType &type) - : mType(type), mReadVideoCallback(nullptr), mReadVideoCallbackContext(nullptr), mTaskRuning(false) + : mCodec(nullptr), mCodec_ctx(nullptr), mFrame(nullptr), mOc(nullptr), mType(type), mReadVideoCallback(nullptr), + mReadVideoCallbackContext(nullptr), mTaskRuning(false) { MediaTypeConvert(); + memset(&mVideoSt, 0, sizeof(mVideoSt)); + memset(&mAudioSt, 0, sizeof(mAudioSt)); } StatusCode MediaBaseImpl::StartReadFile(const std::string &path) { @@ -41,7 +71,7 @@ StatusCode MediaBaseImpl::StartReadFile(const std::string &path) const AVInputFormat *iformat = av_find_input_format(InputFormat(mType)); AVFormatContext *pFormatCtx = nullptr; if ((result = avformat_open_input(&pFormatCtx, path.c_str(), iformat, nullptr)) < 0) { - char error_str[AV_ERROR_MAX_STRING_SIZE]; + char error_str[AV_ERROR_MAX_STRING_SIZE] = {0}; av_make_error_string(error_str, AV_ERROR_MAX_STRING_SIZE, result); LogError("Couldn't open file: %s, result=%s\n", path.c_str(), error_str); return CreateStatusCode(STATUS_CODE_NOT_OK); @@ -91,6 +121,107 @@ StatusCode MediaBaseImpl::SetReadAudioCallback(ReadVideoFileCallback callback, v mReadAudioCallbackContext = context; return CreateStatusCode(STATUS_CODE_OK); } +StatusCode MediaBaseImpl::OpenOutputFile(const std::string &fileName) +{ + InitCodec(&mCodec, &mCodec_ctx, &mFrame); + int ret; + AVFormatContext *oc = nullptr; + int have_video = 0, have_audio = 0; + int encode_video = 0, encode_audio = 0; + const AVCodec *audio_codec, *video_codec; + AVDictionary *opt = nullptr; + avformat_alloc_output_context2(&oc, nullptr, "mp4", fileName.c_str()); + if (!oc) { + LogError("Could not deduce output format from file extension: using MPEG.\n"); + return CreateStatusCode(STATUS_CODE_NOT_OK); + } + mOc = oc; + const AVOutputFormat *fmt = oc->oformat; + /* Add the audio and video streams using the default format codecs + * and initialize the codecs. */ + if (fmt->video_codec != AV_CODEC_ID_NONE) { + add_stream(&mVideoSt, oc, &video_codec, fmt->video_codec); + have_video = 1; + encode_video = 1; + } + if (fmt->audio_codec != AV_CODEC_ID_NONE) { + add_stream(&mAudioSt, oc, &audio_codec, fmt->audio_codec); + have_audio = 1; + encode_audio = 1; + } /* Now that all the parameters are set, we can open the audio and + * video codecs and allocate the necessary encode buffers. */ + if (have_video) { + open_video(oc, video_codec, &mVideoSt, opt); + } + + if (have_audio) { + open_audio(oc, audio_codec, &mAudioSt, opt); + } + av_dump_format(oc, 0, fileName.c_str(), 1); + if (!(fmt->flags & AVFMT_NOFILE)) { + ret = avio_open(&oc->pb, fileName.c_str(), AVIO_FLAG_WRITE); + if (ret < 0) { + char error_str[AV_ERROR_MAX_STRING_SIZE] = {0}; + av_make_error_string(error_str, AV_ERROR_MAX_STRING_SIZE, ret); + LogInfo("Could not open '%s': %s\n", fileName.c_str(), error_str); + return CreateStatusCode(STATUS_CODE_NOT_OK); + } + } + /* Write the stream header, if any. */ + ret = avformat_write_header(oc, &opt); + if (ret < 0) { + char error_str[AV_ERROR_MAX_STRING_SIZE] = {0}; + av_make_error_string(error_str, AV_ERROR_MAX_STRING_SIZE, ret); + LogInfo("Error occurred when opening output file: %s\n", error_str); + return CreateStatusCode(STATUS_CODE_NOT_OK); + } + return CreateStatusCode(STATUS_CODE_OK); +} +StatusCode MediaBaseImpl::CloseOutputFile(void) +{ + av_write_trailer(mOc); + av_frame_free(&mFrame); + mFrame = nullptr; + avcodec_free_context(&mCodec_ctx); + + close_stream(mOc, &mVideoSt); + close_stream(mOc, &mAudioSt); + if (!(mOc->oformat->flags & AVFMT_NOFILE)) { + /* Close the output file. */ + avio_closep(&mOc->pb); + } + avformat_free_context(mOc); + return CreateStatusCode(STATUS_CODE_OK); +} +void MediaBaseImpl::GetStreamData(const void *data, const size_t &size, const StreamInfo &streamInfo) +{ + AVPacket *packet = nullptr; + // av_init_packet(&packet); + packet = av_packet_alloc(); + packet->data = (unsigned char *)data; + packet->size = size; + int ret = avcodec_send_packet(mCodec_ctx, packet); + if (ret < 0) { + LogInfo("Error sending a packet for decoding\n"); + // av_packet_unref(packet); + av_packet_free(&packet); + return; + } + while (ret >= 0) { + ret = avcodec_receive_frame(mCodec_ctx, mFrame); + if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF) { + break; + } + if (ret < 0) { + LogInfo("Error during decoding\n"); + break; + } + write_frame(mOc, mVideoSt.enc, mVideoSt.st, mFrame, mVideoSt.tmp_pkt); + break; + } + // av_packet_unref(packet); + av_packet_free(&packet); +} void MediaBaseImpl::InitFfmpeg(void) { } @@ -123,7 +254,7 @@ void MediaBaseImpl::ReadFileThread(AVFormatContext *pFormatCtx, int mediaStreamI playTimeMs = (packet.duration * pFormatCtx->streams[mediaStreamIndex]->time_base.num * 1000) / pFormatCtx->streams[mediaStreamIndex]->time_base.den; // LogInfo("Frame data address: %p, length: %zu\n", packet.data, packet.size); - LogInfo("Play time ms:%d\n", playTimeMs); + // LogInfo("Play time ms:%d\n", playTimeMs); ReadFrame(&packet); std::this_thread::sleep_for(std::chrono::milliseconds(playTimeMs)); } @@ -153,7 +284,7 @@ void MediaBaseImpl::MediaTypeConvert(void) mFFmpegMediaType = AVMEDIA_TYPE_AUDIO; break; default: - LogError("Unknown media type.\n"); + LogWarning("Unknown media type.\n"); break; } } @@ -170,4 +301,339 @@ const char *MediaBaseImpl::InputFormat(const MediaHandleType &type) LogError("Unknown media type.\n"); return nullptr; } +} +bool MediaBaseImpl::add_stream(OutputStream *ost, AVFormatContext *oc, const AVCodec **codec, enum AVCodecID codec_id) +{ + AVCodecContext *c; + int i; + + /* find the encoder */ + *codec = avcodec_find_encoder(codec_id); + if (!(*codec)) { + LogError("Could not find encoder for '%s'\n", avcodec_get_name(codec_id)); + return false; + } + + ost->tmp_pkt = av_packet_alloc(); + if (!ost->tmp_pkt) { + LogError("Could not allocate AVPacket\n"); + return false; + } + + ost->st = avformat_new_stream(oc, nullptr); + if (!ost->st) { + LogError("Could not allocate stream\n"); + return false; + } + ost->st->id = oc->nb_streams - 1; + c = avcodec_alloc_context3(*codec); + if (!c) { + LogError("Could not alloc an encoding context\n"); + return false; + } + ost->enc = c; + + const AVChannelLayout src = (AVChannelLayout)AV_CHANNEL_LAYOUT_STEREO; + switch ((*codec)->type) { + case AVMEDIA_TYPE_AUDIO: + c->sample_fmt = (*codec)->sample_fmts ? (*codec)->sample_fmts[0] : AV_SAMPLE_FMT_FLTP; + c->bit_rate = 64000; + c->sample_rate = 44100; + if ((*codec)->supported_samplerates) { + c->sample_rate = (*codec)->supported_samplerates[0]; + for (i = 0; (*codec)->supported_samplerates[i]; i++) { + if ((*codec)->supported_samplerates[i] == 44100) + c->sample_rate = 44100; + } + } + av_channel_layout_copy(&c->ch_layout, &src); + ost->st->time_base = (AVRational){1, c->sample_rate}; + break; + + case AVMEDIA_TYPE_VIDEO: + c->codec_id = codec_id; + + c->bit_rate = 400000; + /* Resolution must be a multiple of two. */ + c->width = 352; + c->height = 288; + /* timebase: This is the fundamental unit of time (in seconds) in terms + * of which frame timestamps are represented. For fixed-fps content, + * timebase should be 1/framerate and timestamp increments should be + * identical to 1. */ + ost->st->time_base = (AVRational){1, STREAM_FRAME_RATE}; + c->time_base = ost->st->time_base; + + c->gop_size = 12; /* emit one intra frame every twelve frames at most */ + c->pix_fmt = STREAM_PIX_FMT; + if (c->codec_id == AV_CODEC_ID_MPEG2VIDEO) { + /* just for testing, we also add B-frames */ + c->max_b_frames = 2; + } + if (c->codec_id == AV_CODEC_ID_MPEG1VIDEO) { + /* Needed to avoid using macroblocks in which some coeffs overflow. + * This does not happen with normal video, it just happens here as + * the motion of the chroma plane does not match the luma plane. */ + c->mb_decision = 2; + } + break; + + default: + break; + } + + /* Some formats want stream headers to be separate. */ + if (oc->oformat->flags & AVFMT_GLOBALHEADER) { + c->flags |= AV_CODEC_FLAG_GLOBAL_HEADER; + } + return true; +} +void MediaBaseImpl::close_stream(AVFormatContext *oc, OutputStream *ost) +{ + avcodec_free_context(&ost->enc); + av_frame_free(&ost->frame); + av_frame_free(&ost->tmp_frame); + av_packet_free(&ost->tmp_pkt); + sws_freeContext(ost->sws_ctx); + swr_free(&ost->swr_ctx); +} +bool MediaBaseImpl::open_video(AVFormatContext *oc, const AVCodec *codec, OutputStream *ost, AVDictionary *opt_arg) +{ + + int ret; + AVCodecContext *c = ost->enc; + AVDictionary *opt = nullptr; + + av_dict_copy(&opt, opt_arg, 0); + + /* open the codec */ + ret = avcodec_open2(c, codec, &opt); + av_dict_free(&opt); + if (ret < 0) { + char error_str[AV_ERROR_MAX_STRING_SIZE] = {0}; + av_make_error_string(error_str, AV_ERROR_MAX_STRING_SIZE, ret); + LogInfo("Could not open video codec: %s\n", error_str); + return false; + } + + /* allocate and init a re-usable frame */ + ost->frame = alloc_frame(c->pix_fmt, c->width, c->height); + if (!ost->frame) { + LogInfo("Could not allocate video frame\n"); + return false; + } + + /* If the output format is not YUV420P, then a temporary YUV420P + * picture is needed too. It is then converted to the required + * output format. */ + ost->tmp_frame = nullptr; + if (c->pix_fmt != AV_PIX_FMT_YUV420P) { + ost->tmp_frame = alloc_frame(AV_PIX_FMT_YUV420P, c->width, c->height); + if (!ost->tmp_frame) { + LogInfo("Could not allocate temporary video frame\n"); + return false; + } + } + + /* copy the stream parameters to the muxer */ + ret = avcodec_parameters_from_context(ost->st->codecpar, c); + if (ret < 0) { + LogInfo("Could not copy the stream parameters\n"); + return false; + } + return true; +} +bool MediaBaseImpl::open_audio(AVFormatContext *oc, const AVCodec *codec, OutputStream *ost, AVDictionary *opt_arg) +{ + AVCodecContext *c; + int nb_samples; + int ret; + AVDictionary *opt = nullptr; + + c = ost->enc; + + /* open it */ + av_dict_copy(&opt, opt_arg, 0); + ret = avcodec_open2(c, codec, &opt); + av_dict_free(&opt); + if (ret < 0) { + char error_str[AV_ERROR_MAX_STRING_SIZE] = {0}; + av_make_error_string(error_str, AV_ERROR_MAX_STRING_SIZE, ret); + LogInfo("Could not open audio codec: %s\n", error_str); + return false; + } + + /* init signal generator */ + ost->t = 0; + ost->tincr = 2 * M_PI * 110.0 / c->sample_rate; + /* increment frequency by 110 Hz per second */ + ost->tincr2 = 2 * M_PI * 110.0 / c->sample_rate / c->sample_rate; + + if (c->codec->capabilities & AV_CODEC_CAP_VARIABLE_FRAME_SIZE) + nb_samples = 10000; + else + nb_samples = c->frame_size; + + ost->frame = alloc_audio_frame(c->sample_fmt, &c->ch_layout, c->sample_rate, nb_samples); + ost->tmp_frame = alloc_audio_frame(AV_SAMPLE_FMT_S16, &c->ch_layout, c->sample_rate, nb_samples); + + /* copy the stream parameters to the muxer */ + ret = avcodec_parameters_from_context(ost->st->codecpar, c); + if (ret < 0) { + LogInfo("Could not copy the stream parameters\n"); + return false; + } + + /* create resampler context */ + ost->swr_ctx = swr_alloc(); + if (!ost->swr_ctx) { + LogInfo("Could not allocate resampler context\n"); + return false; + } + + /* set options */ + av_opt_set_chlayout(ost->swr_ctx, "in_chlayout", &c->ch_layout, 0); + av_opt_set_int(ost->swr_ctx, "in_sample_rate", c->sample_rate, 0); + av_opt_set_sample_fmt(ost->swr_ctx, "in_sample_fmt", AV_SAMPLE_FMT_S16, 0); + av_opt_set_chlayout(ost->swr_ctx, "out_chlayout", &c->ch_layout, 0); + av_opt_set_int(ost->swr_ctx, "out_sample_rate", c->sample_rate, 0); + av_opt_set_sample_fmt(ost->swr_ctx, "out_sample_fmt", c->sample_fmt, 0); + + /* initialize the resampling context */ + if ((ret = swr_init(ost->swr_ctx)) < 0) { + LogInfo("Failed to initialize the resampling context\n"); + return false; + } + return true; +} +AVFrame *MediaBaseImpl::alloc_audio_frame(enum AVSampleFormat sample_fmt, const AVChannelLayout *channel_layout, + int sample_rate, int nb_samples) +{ + AVFrame *frame = av_frame_alloc(); + if (!frame) { + LogInfo("Error allocating an audio frame\n"); + return nullptr; + } + + frame->format = sample_fmt; + av_channel_layout_copy(&frame->ch_layout, channel_layout); + frame->sample_rate = sample_rate; + frame->nb_samples = nb_samples; + + if (nb_samples) { + if (av_frame_get_buffer(frame, 0) < 0) { + LogInfo("Error allocating an audio buffer\n"); + return nullptr; + } + } + + return frame; +} +AVFrame *MediaBaseImpl::alloc_frame(enum AVPixelFormat pix_fmt, int width, int height) +{ + AVFrame *frame; + int ret; + + frame = av_frame_alloc(); + if (!frame) + return nullptr; + + frame->format = pix_fmt; + frame->width = width; + frame->height = height; + + /* allocate the buffers for the frame data */ + ret = av_frame_get_buffer(frame, 0); + if (ret < 0) { + LogInfo("Could not allocate frame data.\n"); + return nullptr; + } + + return frame; +} +void MediaBaseImpl::InitCodec(AVCodec **codec, AVCodecContext **codec_ctx, AVFrame **frame) +{ + *codec = (AVCodec *)avcodec_find_decoder(AV_CODEC_ID_H264); + if (!codec) { + LogInfo("Codec not found\n"); + return; + } + *codec_ctx = avcodec_alloc_context3((const AVCodec *)codec); + if (!codec_ctx) { + LogInfo("Could not allocate codec context\n"); + return; + } + if (avcodec_open2(*codec_ctx, *codec, nullptr) < 0) { + LogInfo("Could not open codec\n"); + return; + } + *frame = av_frame_alloc(); + if (!frame) { + LogInfo("Could not allocate video frame\n"); + return; + } +} +int MediaBaseImpl::write_frame(AVFormatContext *fmt_ctx, AVCodecContext *c, AVStream *st, AVFrame *frame, AVPacket *pkt) +{ + int ret; + + // send the frame to the encoder + ret = avcodec_send_frame(c, frame); + if (ret < 0) { + char error_str[AV_ERROR_MAX_STRING_SIZE] = {0}; + av_make_error_string(error_str, AV_ERROR_MAX_STRING_SIZE, ret); + LogInfo("Error sending a frame to the encoder: %s\n", error_str); + return -1; + } + + while (ret >= 0) { + ret = avcodec_receive_packet(c, pkt); + if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF) { + break; + } + if (ret < 0) { + char error_str[AV_ERROR_MAX_STRING_SIZE] = {0}; + av_make_error_string(error_str, AV_ERROR_MAX_STRING_SIZE, ret); + LogInfo("Error encoding a frame: %s\n", error_str); + return -1; + } + + /* rescale output packet timestamp values from codec to stream timebase */ + av_packet_rescale_ts(pkt, c->time_base, st->time_base); + pkt->stream_index = st->index; + + /* Write the compressed frame to the media file. */ + log_packet(fmt_ctx, pkt); + ret = av_interleaved_write_frame(fmt_ctx, pkt); + /* pkt is now blank (av_interleaved_write_frame() takes ownership of + * its contents and resets pkt), so that no unreferencing is necessary. + * This would be different if one used av_write_frame(). */ + if (ret < 0) { + char error_str[AV_ERROR_MAX_STRING_SIZE] = {0}; + av_make_error_string(error_str, AV_ERROR_MAX_STRING_SIZE, ret); + LogInfo("Error while writing output packet: %s\n", error_str); + return -1; + } + } + + return ret == AVERROR_EOF ? 1 : 0; +} +void MediaBaseImpl::log_packet(const AVFormatContext *fmt_ctx, const AVPacket *pkt) +{ + char pts[AV_TS_MAX_STRING_SIZE] = {0}; + char dts[AV_TS_MAX_STRING_SIZE] = {0}; + char duration[AV_TS_MAX_STRING_SIZE] = {0}; + char pts_time[AV_TS_MAX_STRING_SIZE] = {0}; + char dts_time[AV_TS_MAX_STRING_SIZE] = {0}; + char duration_time[AV_TS_MAX_STRING_SIZE]; + AVRational *time_base = &fmt_ctx->streams[pkt->stream_index]->time_base; + av_ts_make_string(pts, pkt->pts); + printf("pts:%s pts_time:%s dts:%s dts_time:%s duration:%s duration_time:%s stream_index:%d\n", + av_ts_make_string(pts, pkt->pts), + av_ts_make_time_string(pts_time, pkt->pts, time_base), + av_ts_make_string(dts, pkt->dts), + av_ts_make_time_string(dts_time, pkt->dts, time_base), + av_ts_make_string(duration, pkt->duration), + av_ts_make_time_string(duration_time, pkt->duration, time_base), + pkt->stream_index); } \ No newline at end of file diff --git a/utils/MediaBase/src/MediaBaseImpl.h b/utils/MediaBase/src/MediaBaseImpl.h index 395bcc2..9177fed 100644 --- a/utils/MediaBase/src/MediaBaseImpl.h +++ b/utils/MediaBase/src/MediaBaseImpl.h @@ -19,31 +19,87 @@ extern "C" { #endif // #include +#include #include #include +#include #include +#include +#include +#include +#include +#include +#include +#include // #include #ifdef __cplusplus } #endif #include +// a wrapper around a single output AVStream +typedef struct OutputStream +{ + AVStream *st; + AVCodecContext *enc; + + /* pts of the next frame that will be generated */ + int64_t next_pts; + int samples_count; + + AVFrame *frame; + AVFrame *tmp_frame; + + AVPacket *tmp_pkt; + + float t, tincr, tincr2; + + struct SwsContext *sws_ctx; + struct SwrContext *swr_ctx; +} OutputStream; class MediaBaseImpl : public IMediaBase, public std::enable_shared_from_this { public: MediaBaseImpl(const MediaHandleType &type); virtual ~MediaBaseImpl() = default; + +public: // About read media file. StatusCode StartReadFile(const std::string &path) override; StatusCode StopReadFile(void) override; StatusCode SetReadVideoCallback(ReadVideoFileCallback callback, void *context) override; StatusCode SetReadAudioCallback(ReadVideoFileCallback callback, void *context) override; +public: // About combine file. + StatusCode OpenOutputFile(const std::string &fileName) override; + StatusCode CloseOutputFile(void) override; + void GetStreamData(const void *data, const size_t &size, const StreamInfo &streamInfo) override; + private: void InitFfmpeg(void); void UnInitFfmpeg(void); void ReadFileThread(AVFormatContext *pFormatCtx, int video_stream_index); void ReadFrame(AVPacket *packet); void MediaTypeConvert(void); - const char *InputFormat(const MediaHandleType &type); + static const char *InputFormat(const MediaHandleType &type); + +private: + static bool add_stream(OutputStream *ost, AVFormatContext *oc, const AVCodec **codec, enum AVCodecID codec_id); + static void close_stream(AVFormatContext *oc, OutputStream *ost); + static bool open_video(AVFormatContext *oc, const AVCodec *codec, OutputStream *ost, AVDictionary *opt_arg); + static bool open_audio(AVFormatContext *oc, const AVCodec *codec, OutputStream *ost, AVDictionary *opt_arg); + static AVFrame *alloc_audio_frame(enum AVSampleFormat sample_fmt, const AVChannelLayout *channel_layout, + int sample_rate, int nb_samples); + static AVFrame *alloc_frame(enum AVPixelFormat pix_fmt, int width, int height); + static void InitCodec(AVCodec **codec, AVCodecContext **codec_ctx, AVFrame **frame); + static int write_frame(AVFormatContext *fmt_ctx, AVCodecContext *c, AVStream *st, AVFrame *frame, AVPacket *pkt); + static void log_packet(const AVFormatContext *fmt_ctx, const AVPacket *pkt); + +private: + AVCodec *mCodec; + AVCodecContext *mCodec_ctx; + AVFrame *mFrame; + AVFormatContext *mOc; + OutputStream mVideoSt; + OutputStream mAudioSt; private: const MediaHandleType mType; diff --git a/utils/MediaBase/src/MediaBaseMakePtr.cpp b/utils/MediaBase/src/MediaBaseMakePtr.cpp index 09eaa99..cadeeb3 100644 --- a/utils/MediaBase/src/MediaBaseMakePtr.cpp +++ b/utils/MediaBase/src/MediaBaseMakePtr.cpp @@ -15,6 +15,7 @@ #include "MediaBaseMakePtr.h" #include "ILog.h" #include "IMediaBase.h" +#include "MediaBase.h" #include "MediaBaseImpl.h" #include std::shared_ptr &MediaBaseMakePtr::GetInstance(std::shared_ptr *impl)