Improve:MediaBase module.
This commit is contained in:
parent
1b7ff22112
commit
35f8b4093b
|
@ -48,7 +48,7 @@ StatusCode RecordMp4::Init(void)
|
|||
return CreateStatusCode(STATUS_CODE_NOT_OK);
|
||||
}
|
||||
if (OUTPUT_FILE_NAME_MAX >= thumbnailPath.size()) {
|
||||
memcpy(fileInfo.mFileName, thumbnailPath.c_str(), thumbnailPath.size());
|
||||
memcpy(fileInfo.mThumbnailFileName, thumbnailPath.c_str(), thumbnailPath.size());
|
||||
}
|
||||
else {
|
||||
LogError("ThumbnailPath is too long.\n");
|
||||
|
|
|
@ -25,6 +25,7 @@ extern "C" {
|
|||
#include <libavutil/channel_layout.h>
|
||||
#include <libavutil/error.h>
|
||||
#include <libavutil/frame.h>
|
||||
#include <libavutil/pixfmt.h>
|
||||
#include <libavutil/samplefmt.h>
|
||||
#ifdef __cplusplus
|
||||
}
|
||||
|
@ -35,7 +36,13 @@ extern "C" {
|
|||
#include <functional>
|
||||
#include <stdint.h>
|
||||
FfmpegDecoder::FfmpegDecoder(const enum AVCodecID &codecId)
|
||||
: mCodecId(codecId), mCodec(nullptr), mCodecCtx(nullptr), mFrame(nullptr), mPacket(nullptr), mParser(nullptr)
|
||||
: mCodecId(codecId), mCodec(nullptr), mCodecCtx(nullptr), mFrame(nullptr), mPacket(nullptr), mParser(nullptr),
|
||||
mVideoWidth(DECODER_UNSUPORTED), mVideoHeight(DECODER_UNSUPORTED)
|
||||
{
|
||||
}
|
||||
FfmpegDecoder::FfmpegDecoder(const enum AVCodecID &codecId, const int &width, const int &height)
|
||||
: mCodecId(codecId), mCodec(nullptr), mCodecCtx(nullptr), mFrame(nullptr), mPacket(nullptr), mParser(nullptr),
|
||||
mVideoWidth(width), mVideoHeight(height)
|
||||
{
|
||||
}
|
||||
bool FfmpegDecoder::Init(void)
|
||||
|
@ -80,8 +87,8 @@ bool FfmpegDecoder::Init(void)
|
|||
}
|
||||
else {
|
||||
mCodecCtx->pix_fmt = AV_PIX_FMT_YUV420P;
|
||||
mCodecCtx->width = 1920;
|
||||
mCodecCtx->height = 2610;
|
||||
mCodecCtx->width = mVideoWidth;
|
||||
mCodecCtx->height = mVideoHeight;
|
||||
}
|
||||
if ((ret = avcodec_open2(mCodecCtx, mCodec, nullptr)) < 0) {
|
||||
char error_str[AV_ERROR_MAX_STRING_SIZE] = {0};
|
||||
|
@ -124,10 +131,6 @@ bool FfmpegDecoder::UnInit(void)
|
|||
av_parser_close(mParser);
|
||||
mParser = nullptr;
|
||||
}
|
||||
// if (mCodecCtx) {
|
||||
// // if (mCodecId != AV_CODEC_ID_H264) {
|
||||
// // }
|
||||
// }
|
||||
avcodec_free_context(&mCodecCtx);
|
||||
mCodecCtx = nullptr;
|
||||
if (mFrame) {
|
||||
|
@ -142,7 +145,6 @@ void FfmpegDecoder::DecodeData(const void *data, const size_t &size, const unsig
|
|||
if (nullptr == mParser) {
|
||||
mPacket->data = (uint8_t *)data;
|
||||
mPacket->size = size;
|
||||
// mPacket->stream_index = 0;
|
||||
mPacket->pts = pts;
|
||||
mPacket->dts = mPacket->pts;
|
||||
// LogInfo("source data mPacket->pts:%d\n", mPacket->pts);
|
||||
|
@ -233,8 +235,8 @@ void inline FfmpegDecoder::AVDecodeData(AVPacket *pkt, std::function<void(AVFram
|
|||
// for (ch = 0; ch < mCodecCtx->ch_layout.nb_channels; ch++)
|
||||
// // fwrite(frame->data[ch] + data_size * i, 1, data_size, outfile);
|
||||
// save_code_stream_file(mFrame->data[ch] + data_size * i, data_size);
|
||||
// save_code_stream_file(mFrame->data[0], mFrame->linesize[0]);
|
||||
// if (mCodecId == AV_CODEC_ID_H264) {
|
||||
// // save_code_stream_file(mFrame->data[0], mFrame->linesize[0]);
|
||||
// pgm_save(mFrame->data[0], mFrame->linesize[0], mFrame->width, mFrame->height, "./test.yuv");
|
||||
// }
|
||||
// LogInfo("decode frame pts = %llu, nb_samples = %d\n", mFrame->pts, mFrame->nb_samples);
|
||||
|
|
|
@ -33,10 +33,18 @@ extern "C" {
|
|||
}
|
||||
#endif
|
||||
#include <functional>
|
||||
constexpr int DECODER_UNSUPORTED = 0;
|
||||
class FfmpegDecoder
|
||||
{
|
||||
public:
|
||||
FfmpegDecoder(const enum AVCodecID &codecId);
|
||||
FfmpegDecoder(const enum AVCodecID &codecId); // TODO: should be delete
|
||||
/**
|
||||
* @brief When decoding a video stream, you need to use this constructor to provide the required parameters.
|
||||
* @param codecId Video stream format
|
||||
* @param width Video height
|
||||
* @param height Video width
|
||||
*/
|
||||
FfmpegDecoder(const enum AVCodecID &codecId, const int &width, const int &height);
|
||||
virtual ~FfmpegDecoder() = default;
|
||||
bool Init(void);
|
||||
bool UnInit(void);
|
||||
|
@ -59,5 +67,7 @@ private:
|
|||
AVFrame *mFrame;
|
||||
AVPacket *mPacket;
|
||||
AVCodecParserContext *mParser;
|
||||
const int mVideoWidth;
|
||||
const int mVideoHeight;
|
||||
};
|
||||
#endif
|
|
@ -45,7 +45,12 @@ constexpr long SOURCE_AUDIO_SAMPEL_RATE = 8000;
|
|||
#define STREAM_PIX_FMT AV_PIX_FMT_YUV420P /* default pix_fmt */
|
||||
FfmpegEncoder::FfmpegEncoder(const enum AVCodecID &codecId)
|
||||
: mCodecId(codecId), mCodecCtx(nullptr), mCodec(nullptr), mFrame(nullptr), mTmpFrame(nullptr), mTmpPkt(nullptr),
|
||||
mSamplesCount(0), mSwrCtx(nullptr), next_pts(0)
|
||||
mSamplesCount(0), mSwrCtx(nullptr), next_pts(0), mVideoWidth(ENCODER_UNSUPORTED), mVideoHeight(ENCODER_UNSUPORTED)
|
||||
{
|
||||
}
|
||||
FfmpegEncoder::FfmpegEncoder(const enum AVCodecID &codecId, const int &width, const int &height)
|
||||
: mCodecId(codecId), mCodecCtx(nullptr), mCodec(nullptr), mFrame(nullptr), mTmpFrame(nullptr), mTmpPkt(nullptr),
|
||||
mSamplesCount(0), mSwrCtx(nullptr), next_pts(0), mVideoWidth(width), mVideoHeight(height)
|
||||
{
|
||||
}
|
||||
bool FfmpegEncoder::Init(int &outputFlags)
|
||||
|
@ -94,8 +99,8 @@ bool FfmpegEncoder::Init(int &outputFlags)
|
|||
|
||||
mCodecCtx->bit_rate = 300000;
|
||||
/* Resolution must be a multiple of two. */
|
||||
mCodecCtx->width = 640;
|
||||
mCodecCtx->height = 480;
|
||||
mCodecCtx->width = mVideoWidth;
|
||||
mCodecCtx->height = mVideoHeight;
|
||||
/* timebase: This is the fundamental unit of time (in seconds) in terms
|
||||
* of which frame timestamps are represented. For fixed-fps content,
|
||||
* timebase should be 1/framerate and timestamp increments should be
|
||||
|
@ -170,23 +175,23 @@ bool FfmpegEncoder::OpenEncoder(AVDictionary *optArg, AVStream *stream)
|
|||
return false;
|
||||
}
|
||||
}
|
||||
static void save_code_stream_file(const void *data, const size_t &size)
|
||||
{
|
||||
char OutPath[16];
|
||||
const void *pData = data;
|
||||
FILE *file = NULL;
|
||||
LogInfo("save_code_stream_file: %d\n", size);
|
||||
sprintf(OutPath, "./test.jpg");
|
||||
file = fopen(OutPath, "a+");
|
||||
// static void save_code_stream_file(const void *data, const size_t &size)
|
||||
// {
|
||||
// char OutPath[16];
|
||||
// const void *pData = data;
|
||||
// FILE *file = NULL;
|
||||
// LogInfo("save_code_stream_file: %d\n", size);
|
||||
// sprintf(OutPath, "./test.jpg");
|
||||
// file = fopen(OutPath, "a+");
|
||||
|
||||
if (file) { // TODO: Don't open very time.
|
||||
fwrite(pData, 1, size, file);
|
||||
fflush(file);
|
||||
}
|
||||
// if (file) { // TODO: Don't open very time.
|
||||
// fwrite(pData, 1, size, file);
|
||||
// fflush(file);
|
||||
// }
|
||||
|
||||
if (file)
|
||||
fclose(file);
|
||||
}
|
||||
// if (file)
|
||||
// fclose(file);
|
||||
// }
|
||||
int FfmpegEncoder::EncodeData(AVFrame *frame, AVStream *stream, std::function<void(AVPacket *pkt)> callback)
|
||||
{
|
||||
int ret = 0;
|
||||
|
@ -225,12 +230,12 @@ int FfmpegEncoder::EncodeData(AVFrame *frame, AVStream *stream, std::function<vo
|
|||
// LogInfo("Write stream->time_base.num: %d\n", stream->time_base.num);
|
||||
// LogInfo("Write stream->time_base.den: %d\n", stream->time_base.den);
|
||||
mTmpPkt->stream_index = stream->index;
|
||||
// LogInfo("aaaaaaaaaaaaaaaaaaaaaaa Write frame mTmpPkt->pts: %llu\n", mTmpPkt->pts);
|
||||
// LogInfo(" Write frame mTmpPkt->pts: %llu\n", mTmpPkt->pts);
|
||||
|
||||
if (callback) {
|
||||
if (mCodecId == AV_CODEC_ID_MJPEG) {
|
||||
save_code_stream_file(mTmpPkt->data, mTmpPkt->size);
|
||||
}
|
||||
// if (mCodecId == AV_CODEC_ID_MJPEG) {
|
||||
// save_code_stream_file(mTmpPkt->data, mTmpPkt->size);
|
||||
// }
|
||||
callback(mTmpPkt);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -33,10 +33,18 @@ extern "C" {
|
|||
}
|
||||
#endif
|
||||
#include <functional>
|
||||
constexpr int ENCODER_UNSUPORTED = 0;
|
||||
class FfmpegEncoder
|
||||
{
|
||||
public:
|
||||
FfmpegEncoder(const enum AVCodecID &codecId);
|
||||
/**
|
||||
* @brief When encoding a video stream, you need to use this constructor to provide the required parameters.
|
||||
* @param codecId Video stream format.
|
||||
* @param width Video width.
|
||||
* @param height Video height.
|
||||
*/
|
||||
FfmpegEncoder(const enum AVCodecID &codecId, const int &width, const int &height);
|
||||
virtual ~FfmpegEncoder() = default;
|
||||
bool Init(int &outputFlags);
|
||||
void UnInit(void);
|
||||
|
@ -64,5 +72,7 @@ private:
|
|||
int mSamplesCount;
|
||||
struct SwrContext *mSwrCtx;
|
||||
int64_t next_pts;
|
||||
const int mVideoWidth;
|
||||
const int mVideoHeight;
|
||||
};
|
||||
#endif
|
|
@ -116,7 +116,8 @@ StatusCode inline FfmpegMuxStreamV2::OpenMuxOutputFile(const std::string &fileNa
|
|||
/* Add the audio and video streams using the default format codecs
|
||||
* and initialize the codecs. */
|
||||
if (mOutputFormat->oformat->video_codec != AV_CODEC_ID_NONE) {
|
||||
mVideoStream = AddStream(mOutputFormat, AV_CODEC_ID_NONE, AV_CODEC_ID_NONE);
|
||||
const std::string thumbnailFileName = mOutputFileInfo->mThumbnailFileName;
|
||||
mVideoStream = AddStream(mOutputFormat, AV_CODEC_ID_NONE, AV_CODEC_ID_NONE, thumbnailFileName);
|
||||
// mVideoStream = AddStream(mOutputFormat, mOutputFormat->oformat->video_codec, AV_CODEC_ID_H264);
|
||||
mVideoStream->SetWriteSourceDataCallback(
|
||||
std::bind(&FfmpegMuxStreamV2::GetAVPacketDataCallback, this, std::placeholders::_1));
|
||||
|
@ -206,9 +207,15 @@ bool inline FfmpegMuxStreamV2::MakeSureStreamHeanderOK(const void *data, const s
|
|||
return true;
|
||||
}
|
||||
std::shared_ptr<FfmpegOutputStream> FfmpegMuxStreamV2::AddStream(AVFormatContext *outputFormat,
|
||||
enum AVCodecID encodecId, enum AVCodecID decodecId)
|
||||
enum AVCodecID encodecId, enum AVCodecID decodecId,
|
||||
const std::string &thumbnailFile)
|
||||
{
|
||||
auto stream = std::make_shared<FfmpegOutputStream>(encodecId, decodecId);
|
||||
if (thumbnailFile.empty()) {
|
||||
stream->Init(outputFormat);
|
||||
}
|
||||
else {
|
||||
stream->Init(outputFormat, thumbnailFile);
|
||||
}
|
||||
return stream;
|
||||
}
|
|
@ -67,7 +67,8 @@ private:
|
|||
* @return std::shared_ptr<FfmpegOutputStream>
|
||||
*/
|
||||
static std::shared_ptr<FfmpegOutputStream> AddStream(AVFormatContext *outputFormat, enum AVCodecID encodecId,
|
||||
enum AVCodecID decodecId);
|
||||
enum AVCodecID decodecId,
|
||||
const std::string &thumbnailFile = "");
|
||||
|
||||
private:
|
||||
std::mutex mMutex;
|
||||
|
|
|
@ -17,6 +17,7 @@
|
|||
#include "FfmpegEncoder.h"
|
||||
#include "FfmpegThumbnail.h"
|
||||
#include "ILog.h"
|
||||
#include <stdio.h>
|
||||
#ifdef __cplusplus
|
||||
extern "C" {
|
||||
#endif
|
||||
|
@ -33,9 +34,11 @@ extern "C" {
|
|||
#endif
|
||||
#include <cstddef>
|
||||
#include <cstdint>
|
||||
#include <cstdlib>
|
||||
#include <functional>
|
||||
#include <memory>
|
||||
#include <string.h>
|
||||
#include <string>
|
||||
#include <thread>
|
||||
FfmpegOutputStream::FfmpegOutputStream(const AVCodecID &encodecId, const AVCodecID &dncodecId)
|
||||
: mEncodecId(encodecId), mDeccodecId(dncodecId), mTmpPkt(nullptr), mStream(nullptr), mStreamHeaderWritten(false),
|
||||
|
@ -84,6 +87,11 @@ bool FfmpegOutputStream::Init(AVFormatContext *outputFormat)
|
|||
}
|
||||
return true;
|
||||
}
|
||||
bool FfmpegOutputStream::Init(AVFormatContext *outputFormat, const std::string &thumbnailFile)
|
||||
{
|
||||
mThumbnailFileName = thumbnailFile;
|
||||
return Init(outputFormat);
|
||||
}
|
||||
void FfmpegOutputStream::UnInit(void)
|
||||
{
|
||||
if (mEncoder) {
|
||||
|
@ -173,6 +181,10 @@ void FfmpegOutputStream::GetDecodeDataCallback(AVFrame *frame)
|
|||
}
|
||||
void FfmpegOutputStream::CreateThumbnailFile(const void *frame, const size_t &size)
|
||||
{
|
||||
if (mThumbnailFileName.empty()) {
|
||||
LogError("mThumbnailFileName is empty.\n");
|
||||
return;
|
||||
}
|
||||
auto thumbnailThread =
|
||||
[](std::shared_ptr<FfmpegOutputStream> output, const void *frameData, const size_t dataSize) {
|
||||
LogInfo("CreateThumbnailFile start.\n");
|
||||
|
@ -185,7 +197,7 @@ void FfmpegOutputStream::CreateThumbnailFileThread(const void *frame, const size
|
|||
{
|
||||
FfmpegThumbnail thumbnail(AV_CODEC_ID_MJPEG, AV_CODEC_ID_H264);
|
||||
thumbnail.Init();
|
||||
thumbnail.CreateThumbnail("./sssss.jpeg", frame, size);
|
||||
thumbnail.CreateThumbnail(mThumbnailFileName, frame, size);
|
||||
thumbnail.UnInit();
|
||||
LogInfo("CreateThumbnailFile end eeeeeeeeeeeeeeeeeeeeeeee.\n");
|
||||
LogInfo("CreateThumbnailFile end.\n");
|
||||
}
|
|
@ -43,6 +43,14 @@ public:
|
|||
FfmpegOutputStream(const AVCodecID &encodecId, const AVCodecID &dncodecId);
|
||||
virtual ~FfmpegOutputStream() = default;
|
||||
bool Init(AVFormatContext *outputFormat);
|
||||
/**
|
||||
* @brief If you need to convert stream frames into thumbnails, use this function.
|
||||
* @param outputFormat
|
||||
* @param thumbnailFile
|
||||
* @return true
|
||||
* @return false
|
||||
*/
|
||||
bool Init(AVFormatContext *outputFormat, const std::string &thumbnailFile);
|
||||
void UnInit(void);
|
||||
void WriteSourceData(const void *data, const size_t &size, const unsigned long long &pts);
|
||||
void SetWriteSourceDataCallback(std::function<void(AVPacket *pkt)> callback);
|
||||
|
@ -70,5 +78,6 @@ private:
|
|||
bool mStreamHeaderWritten;
|
||||
std::thread mThumbnailThread;
|
||||
char *mH264Data2Jpeg;
|
||||
std::string mThumbnailFileName;
|
||||
};
|
||||
#endif
|
|
@ -13,38 +13,43 @@
|
|||
* limitations under the License.
|
||||
*/
|
||||
#include "FfmpegThumbnail.h"
|
||||
#include "FfmpegDecoder.h"
|
||||
#include "FfmpegEncoder.h"
|
||||
#include "ILog.h"
|
||||
#include "LinuxApi.h"
|
||||
#include <stdio.h>
|
||||
#ifdef __cplusplus
|
||||
extern "C" {
|
||||
#endif
|
||||
#include <libavcodec/avcodec.h>
|
||||
#include <libavcodec/codec_id.h>
|
||||
#include <libavcodec/packet.h>
|
||||
#include <libavformat/avformat.h>
|
||||
#include <libavutil/avassert.h>
|
||||
#include <libavformat/avio.h>
|
||||
#include <libavutil/avutil.h>
|
||||
#include <libavutil/channel_layout.h>
|
||||
#include <libavutil/dict.h>
|
||||
#include <libavutil/error.h>
|
||||
#include <libavutil/frame.h>
|
||||
#include <libavutil/imgutils.h>
|
||||
#include <libavutil/mathematics.h>
|
||||
#include <libavutil/opt.h>
|
||||
#include <libavutil/timestamp.h>
|
||||
#include <libswresample/swresample.h>
|
||||
#include <libavutil/mem.h>
|
||||
#include <libavutil/pixfmt.h>
|
||||
#include <libswscale/swscale.h>
|
||||
#ifdef __cplusplus
|
||||
}
|
||||
#endif
|
||||
#include <cstdlib>
|
||||
#include <functional>
|
||||
#include <memory>
|
||||
#include <stdint.h>
|
||||
#include <string>
|
||||
FfmpegThumbnail::FfmpegThumbnail(const AVCodecID &encodecId, const AVCodecID &dncodecId)
|
||||
: mOutputFormat(nullptr), mStream(nullptr), sws_ctx(nullptr)
|
||||
{
|
||||
mDecoder = std::make_shared<FfmpegDecoder>(dncodecId);
|
||||
mEncoder = std::make_shared<FfmpegEncoder>(encodecId);
|
||||
mDecoder = std::make_shared<FfmpegDecoder>(dncodecId, 1920, 2160);
|
||||
mEncoder = std::make_shared<FfmpegEncoder>(encodecId, 640, 480);
|
||||
}
|
||||
void FfmpegThumbnail::Init(void)
|
||||
{
|
||||
LogInfo("FfmpegThumbnail Init\n");
|
||||
mDecodeCallback = std::bind(&FfmpegThumbnail::GetDecodeDataCallback, this, std::placeholders::_1);
|
||||
mEncodeCallback = std::bind(&FfmpegThumbnail::GetEncodeDataCallback, this, std::placeholders::_1);
|
||||
}
|
||||
void FfmpegThumbnail::UnInit(void)
|
||||
{
|
||||
|
@ -75,10 +80,12 @@ void FfmpegThumbnail::UnInit(void)
|
|||
}
|
||||
bool FfmpegThumbnail::CreateThumbnail(const std::string &outputFile, const void *data, const size_t &size)
|
||||
{
|
||||
if (!mDecoder || !mDecodeCallback) {
|
||||
if (!mDecoder) {
|
||||
LogError("CreateThumbnail mDecoder && mDecodeCallback\n");
|
||||
return true;
|
||||
}
|
||||
mDecodeCallback = std::bind(&FfmpegThumbnail::GetDecodeDataCallback, this, std::placeholders::_1);
|
||||
mEncodeCallback = std::bind(&FfmpegThumbnail::GetEncodeDataCallback, this, std::placeholders::_1, outputFile);
|
||||
AVDictionary *opt = nullptr;
|
||||
int ret = 0;
|
||||
/* allocate the output media context */
|
||||
|
@ -97,32 +104,8 @@ bool FfmpegThumbnail::CreateThumbnail(const std::string &outputFile, const void
|
|||
}
|
||||
mStream->id = mOutputFormat->nb_streams - 1;
|
||||
LogInfo("Create video stream\n");
|
||||
// char *pData = (char *)data;
|
||||
// for (size_t i = 0; i < size; i++) {
|
||||
// if ((0x00 == pData[i]) && (0x00 == pData[i + 1]) && (0x00 == pData[i + 2]) && (0x01 == pData[i + 3]) &&
|
||||
// (0x5 == (pData[i + 4] & 0x1F))) {
|
||||
// uint8_t *extradata = (uint8_t *)av_mallocz(i + 1);
|
||||
// if (!extradata) {
|
||||
// LogError("Could not allocate extradata\n");
|
||||
// return false;
|
||||
// }
|
||||
// LogInfo("Found extradata\n");
|
||||
// memcpy(extradata, pData, i);
|
||||
// mStream->codecpar->extradata = extradata;
|
||||
// mStream->codecpar->extradata_size = i;
|
||||
// }
|
||||
// }
|
||||
}
|
||||
av_dump_format(mOutputFormat, 0, outputFile.c_str(), 1);
|
||||
/* open the output file, if needed */
|
||||
// if (!(mOutputFormat->oformat->flags & AVFMT_NOFILE)) {
|
||||
// ret = avio_open(&mOutputFormat->pb, outputFile.c_str(), AVIO_FLAG_WRITE);
|
||||
// if (ret < 0) {
|
||||
// char error_str[AV_ERROR_MAX_STRING_SIZE] = {0};
|
||||
// LogError("Could not open '%s': %s\n",
|
||||
// outputFile.c_str(),
|
||||
// av_make_error_string(error_str, AV_ERROR_MAX_STRING_SIZE, ret));
|
||||
// }
|
||||
LogInfo("Open output file\n");
|
||||
// }
|
||||
av_dict_set_int(&opt, "use_editlist", 0, 0);
|
||||
|
@ -136,7 +119,6 @@ bool FfmpegThumbnail::CreateThumbnail(const std::string &outputFile, const void
|
|||
return false;
|
||||
}
|
||||
av_dict_free(&opt);
|
||||
// return true;
|
||||
mDecoder->Init();
|
||||
mEncoder->Init(mOutputFormat->flags);
|
||||
mStream->time_base = mEncoder->GetTimeBase();
|
||||
|
@ -170,9 +152,9 @@ void FfmpegThumbnail::GetDecodeDataCallback(AVFrame *frame)
|
|||
output_frame->height,
|
||||
AV_PIX_FMT_YUV420P,
|
||||
SWS_BILINEAR,
|
||||
NULL,
|
||||
NULL,
|
||||
NULL);
|
||||
nullptr,
|
||||
nullptr,
|
||||
nullptr);
|
||||
// 进行像素格式转换
|
||||
sws_scale(sws_ctx, frame->data, frame->linesize, 0, frame->height, output_frame->data, output_frame->linesize);
|
||||
|
||||
|
@ -184,10 +166,10 @@ void FfmpegThumbnail::GetDecodeDataCallback(AVFrame *frame)
|
|||
av_free(jpeg_buf);
|
||||
return;
|
||||
}
|
||||
void FfmpegThumbnail::GetEncodeDataCallback(AVPacket *pkt)
|
||||
void FfmpegThumbnail::GetEncodeDataCallback(AVPacket *pkt, const std::string &fileName)
|
||||
{
|
||||
LogInfo("GetEncodeDataCallback, save thumbnail file %s\n", fileName.c_str());
|
||||
return;
|
||||
LogInfo("ggggggggggggggggggggggggggggggggggggggg GetEncodeDataCallback %d\n", pkt->size);
|
||||
int ret = 0;
|
||||
ret = av_interleaved_write_frame(mOutputFormat, pkt);
|
||||
/* pkt is now blank (av_interleaved_write_frame() takes ownership of
|
||||
|
@ -199,3 +181,24 @@ void FfmpegThumbnail::GetEncodeDataCallback(AVPacket *pkt)
|
|||
av_make_error_string(error_str, AV_ERROR_MAX_STRING_SIZE, ret));
|
||||
}
|
||||
}
|
||||
bool FfmpegThumbnail::SaveThumbnailFile(const std::string &fileName, const void *data, const size_t &size)
|
||||
{
|
||||
FILE *file = nullptr;
|
||||
LogInfo("SaveThumbnailFile:%s\n", fileName.c_str());
|
||||
file = fopen(fileName.c_str(), "a+");
|
||||
|
||||
if (file) {
|
||||
fwrite(data, 1, size, file);
|
||||
fflush(file);
|
||||
}
|
||||
else {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (file) {
|
||||
fclose(file);
|
||||
file = nullptr;
|
||||
}
|
||||
fx_system_v2("sync");
|
||||
return true;
|
||||
}
|
|
@ -47,7 +47,10 @@ public:
|
|||
|
||||
private:
|
||||
void GetDecodeDataCallback(AVFrame *frame);
|
||||
void GetEncodeDataCallback(AVPacket *pkt);
|
||||
void GetEncodeDataCallback(AVPacket *pkt, const std::string &fileName);
|
||||
|
||||
private:
|
||||
static bool SaveThumbnailFile(const std::string &fileName, const void *data, const size_t &size);
|
||||
|
||||
private:
|
||||
std::function<void(AVFrame *)> mDecodeCallback;
|
||||
|
|
Loading…
Reference in New Issue
Block a user